body
stringlengths
26
98.2k
body_hash
int64
-9,222,864,604,528,158,000
9,221,803,474B
docstring
stringlengths
1
16.8k
path
stringlengths
5
230
name
stringlengths
1
96
repository_name
stringlengths
7
89
lang
stringclasses
1 value
body_without_docstring
stringlengths
20
98.2k
def is_callback(func: Callable[(..., Any)]) -> bool: 'Check if function is safe to be called in the event loop.' return (getattr(func, '_edge_callback', False) is True)
-7,553,714,258,499,277,000
Check if function is safe to be called in the event loop.
merceedge/util/async_util.py
is_callback
hobo0cn/MerceEdge
python
def is_callback(func: Callable[(..., Any)]) -> bool: return (getattr(func, '_edge_callback', False) is True)
def _set_result_unless_cancelled(fut: Future, result: Any) -> None: 'Set the result only if the Future was not cancelled.' if fut.cancelled(): return fut.set_result(result)
-1,647,970,271,901,832,000
Set the result only if the Future was not cancelled.
merceedge/util/async_util.py
_set_result_unless_cancelled
hobo0cn/MerceEdge
python
def _set_result_unless_cancelled(fut: Future, result: Any) -> None: if fut.cancelled(): return fut.set_result(result)
def _set_concurrent_future_state(concurr: concurrent.futures.Future, source: Union[(concurrent.futures.Future, Future)]) -> None: 'Copy state from a future to a concurrent.futures.Future.' assert source.done() if source.cancelled(): concurr.cancel() if (not concurr.set_running_or_notify_cancel()): return exception = source.exception() if (exception is not None): concurr.set_exception(exception) else: result = source.result() concurr.set_result(result)
-3,110,793,826,072,417,000
Copy state from a future to a concurrent.futures.Future.
merceedge/util/async_util.py
_set_concurrent_future_state
hobo0cn/MerceEdge
python
def _set_concurrent_future_state(concurr: concurrent.futures.Future, source: Union[(concurrent.futures.Future, Future)]) -> None: assert source.done() if source.cancelled(): concurr.cancel() if (not concurr.set_running_or_notify_cancel()): return exception = source.exception() if (exception is not None): concurr.set_exception(exception) else: result = source.result() concurr.set_result(result)
def _copy_future_state(source: Union[(concurrent.futures.Future, Future)], dest: Union[(concurrent.futures.Future, Future)]) -> None: 'Copy state from another Future.\n\n The other Future may be a concurrent.futures.Future.\n ' assert source.done() if dest.cancelled(): return assert (not dest.done()) if source.cancelled(): dest.cancel() else: exception = source.exception() if (exception is not None): dest.set_exception(exception) else: result = source.result() dest.set_result(result)
-7,860,038,271,336,070,000
Copy state from another Future. The other Future may be a concurrent.futures.Future.
merceedge/util/async_util.py
_copy_future_state
hobo0cn/MerceEdge
python
def _copy_future_state(source: Union[(concurrent.futures.Future, Future)], dest: Union[(concurrent.futures.Future, Future)]) -> None: 'Copy state from another Future.\n\n The other Future may be a concurrent.futures.Future.\n ' assert source.done() if dest.cancelled(): return assert (not dest.done()) if source.cancelled(): dest.cancel() else: exception = source.exception() if (exception is not None): dest.set_exception(exception) else: result = source.result() dest.set_result(result)
def _chain_future(source: Union[(concurrent.futures.Future, Future)], destination: Union[(concurrent.futures.Future, Future)]) -> None: 'Chain two futures so that when one completes, so does the other.\n\n The result (or exception) of source will be copied to destination.\n If destination is cancelled, source gets cancelled too.\n Compatible with both asyncio.Future and concurrent.futures.Future.\n ' if (not isinstance(source, (Future, concurrent.futures.Future))): raise TypeError('A future is required for source argument') if (not isinstance(destination, (Future, concurrent.futures.Future))): raise TypeError('A future is required for destination argument') if isinstance(source, Future): source_loop = source._loop else: source_loop = None if isinstance(destination, Future): dest_loop = destination._loop else: dest_loop = None def _set_state(future: Union[(concurrent.futures.Future, Future)], other: Union[(concurrent.futures.Future, Future)]) -> None: if isinstance(future, Future): _copy_future_state(other, future) else: _set_concurrent_future_state(future, other) def _call_check_cancel(destination: Union[(concurrent.futures.Future, Future)]) -> None: if destination.cancelled(): if ((source_loop is None) or (source_loop is dest_loop)): source.cancel() else: source_loop.call_soon_threadsafe(source.cancel) def _call_set_state(source: Union[(concurrent.futures.Future, Future)]) -> None: if ((dest_loop is None) or (dest_loop is source_loop)): _set_state(destination, source) else: dest_loop.call_soon_threadsafe(_set_state, destination, source) destination.add_done_callback(_call_check_cancel) source.add_done_callback(_call_set_state)
-464,260,178,128,809,500
Chain two futures so that when one completes, so does the other. The result (or exception) of source will be copied to destination. If destination is cancelled, source gets cancelled too. Compatible with both asyncio.Future and concurrent.futures.Future.
merceedge/util/async_util.py
_chain_future
hobo0cn/MerceEdge
python
def _chain_future(source: Union[(concurrent.futures.Future, Future)], destination: Union[(concurrent.futures.Future, Future)]) -> None: 'Chain two futures so that when one completes, so does the other.\n\n The result (or exception) of source will be copied to destination.\n If destination is cancelled, source gets cancelled too.\n Compatible with both asyncio.Future and concurrent.futures.Future.\n ' if (not isinstance(source, (Future, concurrent.futures.Future))): raise TypeError('A future is required for source argument') if (not isinstance(destination, (Future, concurrent.futures.Future))): raise TypeError('A future is required for destination argument') if isinstance(source, Future): source_loop = source._loop else: source_loop = None if isinstance(destination, Future): dest_loop = destination._loop else: dest_loop = None def _set_state(future: Union[(concurrent.futures.Future, Future)], other: Union[(concurrent.futures.Future, Future)]) -> None: if isinstance(future, Future): _copy_future_state(other, future) else: _set_concurrent_future_state(future, other) def _call_check_cancel(destination: Union[(concurrent.futures.Future, Future)]) -> None: if destination.cancelled(): if ((source_loop is None) or (source_loop is dest_loop)): source.cancel() else: source_loop.call_soon_threadsafe(source.cancel) def _call_set_state(source: Union[(concurrent.futures.Future, Future)]) -> None: if ((dest_loop is None) or (dest_loop is source_loop)): _set_state(destination, source) else: dest_loop.call_soon_threadsafe(_set_state, destination, source) destination.add_done_callback(_call_check_cancel) source.add_done_callback(_call_set_state)
def run_coroutine_threadsafe(coro: Union[(Coroutine, Generator)], loop: AbstractEventLoop) -> concurrent.futures.Future: 'Submit a coroutine object to a given event loop.\n\n Return a concurrent.futures.Future to access the result.\n ' ident = loop.__dict__.get('_thread_ident') if ((ident is not None) and (ident == threading.get_ident())): raise RuntimeError('Cannot be called from within the event loop') if (not coroutines.iscoroutine(coro)): raise TypeError('A coroutine object is required') future = concurrent.futures.Future() def callback() -> None: 'Handle the call to the coroutine.' try: _chain_future(ensure_future(coro, loop=loop), future) except Exception as exc: if future.set_running_or_notify_cancel(): future.set_exception(exc) else: _LOGGER.warning('Exception on lost future: ', exc_info=True) loop.call_soon_threadsafe(callback) return future
-8,966,409,045,454,131,000
Submit a coroutine object to a given event loop. Return a concurrent.futures.Future to access the result.
merceedge/util/async_util.py
run_coroutine_threadsafe
hobo0cn/MerceEdge
python
def run_coroutine_threadsafe(coro: Union[(Coroutine, Generator)], loop: AbstractEventLoop) -> concurrent.futures.Future: 'Submit a coroutine object to a given event loop.\n\n Return a concurrent.futures.Future to access the result.\n ' ident = loop.__dict__.get('_thread_ident') if ((ident is not None) and (ident == threading.get_ident())): raise RuntimeError('Cannot be called from within the event loop') if (not coroutines.iscoroutine(coro)): raise TypeError('A coroutine object is required') future = concurrent.futures.Future() def callback() -> None: 'Handle the call to the coroutine.' try: _chain_future(ensure_future(coro, loop=loop), future) except Exception as exc: if future.set_running_or_notify_cancel(): future.set_exception(exc) else: _LOGGER.warning('Exception on lost future: ', exc_info=True) loop.call_soon_threadsafe(callback) return future
def fire_coroutine_threadsafe(coro: Coroutine, loop: AbstractEventLoop) -> None: 'Submit a coroutine object to a given event loop.\n\n This method does not provide a way to retrieve the result and\n is intended for fire-and-forget use. This reduces the\n work involved to fire the function on the loop.\n ' ident = loop.__dict__.get('_thread_ident') if ((ident is not None) and (ident == threading.get_ident())): raise RuntimeError('Cannot be called from within the event loop') if (not coroutines.iscoroutine(coro)): raise TypeError(('A coroutine object is required: %s' % coro)) def callback() -> None: 'Handle the firing of a coroutine.' ensure_future(coro, loop=loop) loop.call_soon_threadsafe(callback)
5,010,027,783,280,364,000
Submit a coroutine object to a given event loop. This method does not provide a way to retrieve the result and is intended for fire-and-forget use. This reduces the work involved to fire the function on the loop.
merceedge/util/async_util.py
fire_coroutine_threadsafe
hobo0cn/MerceEdge
python
def fire_coroutine_threadsafe(coro: Coroutine, loop: AbstractEventLoop) -> None: 'Submit a coroutine object to a given event loop.\n\n This method does not provide a way to retrieve the result and\n is intended for fire-and-forget use. This reduces the\n work involved to fire the function on the loop.\n ' ident = loop.__dict__.get('_thread_ident') if ((ident is not None) and (ident == threading.get_ident())): raise RuntimeError('Cannot be called from within the event loop') if (not coroutines.iscoroutine(coro)): raise TypeError(('A coroutine object is required: %s' % coro)) def callback() -> None: 'Handle the firing of a coroutine.' ensure_future(coro, loop=loop) loop.call_soon_threadsafe(callback)
def run_callback_threadsafe(loop: AbstractEventLoop, callback: Callable, *args: Any) -> concurrent.futures.Future: 'Submit a callback object to a given event loop.\n\n Return a concurrent.futures.Future to access the result.\n\n NOTE: This code references home-assistant.\n ' ident = loop.__dict__.get('_thread_ident') if ((ident is not None) and (ident == threading.get_ident())): raise RuntimeError('Cannot be called from within the event loop') future = concurrent.futures.Future() def run_callback() -> None: 'Run callback and store result.' try: future.set_result(callback(*args)) except Exception as exc: if future.set_running_or_notify_cancel(): future.set_exception(exc) else: _LOGGER.warning('Exception on lost future: ', exc_info=True) loop.call_soon_threadsafe(run_callback) return future
1,413,469,190,202,463,000
Submit a callback object to a given event loop. Return a concurrent.futures.Future to access the result. NOTE: This code references home-assistant.
merceedge/util/async_util.py
run_callback_threadsafe
hobo0cn/MerceEdge
python
def run_callback_threadsafe(loop: AbstractEventLoop, callback: Callable, *args: Any) -> concurrent.futures.Future: 'Submit a callback object to a given event loop.\n\n Return a concurrent.futures.Future to access the result.\n\n NOTE: This code references home-assistant.\n ' ident = loop.__dict__.get('_thread_ident') if ((ident is not None) and (ident == threading.get_ident())): raise RuntimeError('Cannot be called from within the event loop') future = concurrent.futures.Future() def run_callback() -> None: 'Run callback and store result.' try: future.set_result(callback(*args)) except Exception as exc: if future.set_running_or_notify_cancel(): future.set_exception(exc) else: _LOGGER.warning('Exception on lost future: ', exc_info=True) loop.call_soon_threadsafe(run_callback) return future
def as_dict(self) -> dict: 'Return a dictionary representation of the context.' return {'id': self.id, 'user_id': self.user_id}
-5,950,560,538,002,860,000
Return a dictionary representation of the context.
merceedge/util/async_util.py
as_dict
hobo0cn/MerceEdge
python
def as_dict(self) -> dict: return {'id': self.id, 'user_id': self.user_id}
def callback() -> None: 'Handle the call to the coroutine.' try: _chain_future(ensure_future(coro, loop=loop), future) except Exception as exc: if future.set_running_or_notify_cancel(): future.set_exception(exc) else: _LOGGER.warning('Exception on lost future: ', exc_info=True)
6,314,385,836,826,053,000
Handle the call to the coroutine.
merceedge/util/async_util.py
callback
hobo0cn/MerceEdge
python
def callback() -> None: try: _chain_future(ensure_future(coro, loop=loop), future) except Exception as exc: if future.set_running_or_notify_cancel(): future.set_exception(exc) else: _LOGGER.warning('Exception on lost future: ', exc_info=True)
def callback() -> None: 'Handle the firing of a coroutine.' ensure_future(coro, loop=loop)
7,684,422,278,126,684,000
Handle the firing of a coroutine.
merceedge/util/async_util.py
callback
hobo0cn/MerceEdge
python
def callback() -> None: ensure_future(coro, loop=loop)
def run_callback() -> None: 'Run callback and store result.' try: future.set_result(callback(*args)) except Exception as exc: if future.set_running_or_notify_cancel(): future.set_exception(exc) else: _LOGGER.warning('Exception on lost future: ', exc_info=True)
-6,252,598,045,921,766,000
Run callback and store result.
merceedge/util/async_util.py
run_callback
hobo0cn/MerceEdge
python
def run_callback() -> None: try: future.set_result(callback(*args)) except Exception as exc: if future.set_running_or_notify_cancel(): future.set_exception(exc) else: _LOGGER.warning('Exception on lost future: ', exc_info=True)
def asyncio_run(main: Awaitable[_T], *, debug: bool=False) -> _T: 'Minimal re-implementation of asyncio.run (since 3.7).' loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) loop.set_debug(debug) try: return loop.run_until_complete(main) finally: print('loop final') asyncio.set_event_loop(None) loop.close()
5,285,841,334,932,499,000
Minimal re-implementation of asyncio.run (since 3.7).
merceedge/util/async_util.py
asyncio_run
hobo0cn/MerceEdge
python
def asyncio_run(main: Awaitable[_T], *, debug: bool=False) -> _T: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) loop.set_debug(debug) try: return loop.run_until_complete(main) finally: print('loop final') asyncio.set_event_loop(None) loop.close()
def __init__(self, str_subject, template, template_params): 'string template과 딕셔너리형 template_params받아 MIME 메시지를 만든다' assert isinstance(template, Template) assert isinstance(template_params, dict) self.msg = MIMEMultipart() self.msg['Subject'] = str_subject str_msg = template.safe_substitute(**template_params) mime_msg = MIMEText(str_msg, 'html') self.msg.attach(mime_msg)
-4,368,756,389,393,821,000
string template과 딕셔너리형 template_params받아 MIME 메시지를 만든다
app/user/example.py
__init__
Einsicht1/recipe-app-api
python
def __init__(self, str_subject, template, template_params): assert isinstance(template, Template) assert isinstance(template_params, dict) self.msg = MIMEMultipart() self.msg['Subject'] = str_subject str_msg = template.safe_substitute(**template_params) mime_msg = MIMEText(str_msg, 'html') self.msg.attach(mime_msg)
def get_message(self, str_from_email_addr, str_to_email_addr): '발신자, 수신자리스트를 이용하여 보낼메시지를 만든다 ' send_msg = copy.deepcopy(self.msg) send_msg['From'] = str_from_email_addr send_msg['To'] = str_to_email_addr return send_msg
3,965,988,359,583,012,400
발신자, 수신자리스트를 이용하여 보낼메시지를 만든다
app/user/example.py
get_message
Einsicht1/recipe-app-api
python
def get_message(self, str_from_email_addr, str_to_email_addr): ' ' send_msg = copy.deepcopy(self.msg) send_msg['From'] = str_from_email_addr send_msg['To'] = str_to_email_addr return send_msg
def __init__(self, str_host, num_port): '호스트와 포트번호로 SMTP로 연결한다 ' self.str_host = str_host self.num_port = num_port self.smtp_connect = smtplib.SMTP(host=str_host, port=num_port) self.smtp_connect.starttls() self.smtp_connect.login(EMAIL_ADDRESS, EMAIL_PASSWORD)
7,523,257,613,259,052,000
호스트와 포트번호로 SMTP로 연결한다
app/user/example.py
__init__
Einsicht1/recipe-app-api
python
def __init__(self, str_host, num_port): ' ' self.str_host = str_host self.num_port = num_port self.smtp_connect = smtplib.SMTP(host=str_host, port=num_port) self.smtp_connect.starttls() self.smtp_connect.login(EMAIL_ADDRESS, EMAIL_PASSWORD)
def send_message(self, emailContent, str_from_email_addr, str_to_email_addr): 'e메일을 발송한다 ' contents = emailContent.get_message(str_from_email_addr, str_to_email_addr) self.smtp_connect.send_message(contents, from_addr=str_from_email_addr, to_addrs=str_to_email_addr) del contents
-3,186,268,996,533,305,300
e메일을 발송한다
app/user/example.py
send_message
Einsicht1/recipe-app-api
python
def send_message(self, emailContent, str_from_email_addr, str_to_email_addr): ' ' contents = emailContent.get_message(str_from_email_addr, str_to_email_addr) self.smtp_connect.send_message(contents, from_addr=str_from_email_addr, to_addrs=str_to_email_addr) del contents
def __init__(self, clip_min: float=(- 5.0), clip_max: float=5.0, name='observation_stats'): '\n :param clip_min: The minimum value to allow after normalizing the observation\n :param clip_max: The maximum value to allow after normalizing the observation\n ' super().__init__() self.clip_min = clip_min self.clip_max = clip_max self.running_observation_stats = None self.name = name self.supports_batching = True self.observation_space = None
-3,725,733,070,058,041,000
:param clip_min: The minimum value to allow after normalizing the observation :param clip_max: The maximum value to allow after normalizing the observation
rl_coach/filters/observation/observation_normalization_filter.py
__init__
AustinDeric/coach
python
def __init__(self, clip_min: float=(- 5.0), clip_max: float=5.0, name='observation_stats'): '\n :param clip_min: The minimum value to allow after normalizing the observation\n :param clip_max: The maximum value to allow after normalizing the observation\n ' super().__init__() self.clip_min = clip_min self.clip_max = clip_max self.running_observation_stats = None self.name = name self.supports_batching = True self.observation_space = None
def set_device(self, device, memory_backend_params=None, mode='numpy') -> None: "\n An optional function that allows the filter to get the device if it is required to use tensorflow ops\n :param device: the device to use\n :memory_backend_params: if not None, holds params for a memory backend for sharing data (e.g. Redis)\n :param mode: the arithmetic module to use {'tf' | 'numpy'}\n :return: None\n " if (mode == 'tf'): from rl_coach.architectures.tensorflow_components.shared_variables import TFSharedRunningStats self.running_observation_stats = TFSharedRunningStats(device, name=self.name, create_ops=False, pubsub_params=memory_backend_params) elif (mode == 'numpy'): self.running_observation_stats = NumpySharedRunningStats(name=self.name, pubsub_params=memory_backend_params)
5,497,590,193,474,922,000
An optional function that allows the filter to get the device if it is required to use tensorflow ops :param device: the device to use :memory_backend_params: if not None, holds params for a memory backend for sharing data (e.g. Redis) :param mode: the arithmetic module to use {'tf' | 'numpy'} :return: None
rl_coach/filters/observation/observation_normalization_filter.py
set_device
AustinDeric/coach
python
def set_device(self, device, memory_backend_params=None, mode='numpy') -> None: "\n An optional function that allows the filter to get the device if it is required to use tensorflow ops\n :param device: the device to use\n :memory_backend_params: if not None, holds params for a memory backend for sharing data (e.g. Redis)\n :param mode: the arithmetic module to use {'tf' | 'numpy'}\n :return: None\n " if (mode == 'tf'): from rl_coach.architectures.tensorflow_components.shared_variables import TFSharedRunningStats self.running_observation_stats = TFSharedRunningStats(device, name=self.name, create_ops=False, pubsub_params=memory_backend_params) elif (mode == 'numpy'): self.running_observation_stats = NumpySharedRunningStats(name=self.name, pubsub_params=memory_backend_params)
def set_session(self, sess) -> None: '\n An optional function that allows the filter to get the session if it is required to use tensorflow ops\n :param sess: the session\n :return: None\n ' self.running_observation_stats.set_session(sess)
6,684,206,919,324,875,000
An optional function that allows the filter to get the session if it is required to use tensorflow ops :param sess: the session :return: None
rl_coach/filters/observation/observation_normalization_filter.py
set_session
AustinDeric/coach
python
def set_session(self, sess) -> None: '\n An optional function that allows the filter to get the session if it is required to use tensorflow ops\n :param sess: the session\n :return: None\n ' self.running_observation_stats.set_session(sess)
def changed(self): 'all draft objects that have not been published yet' return self.filter(Publishable.Q_CHANGED)
-2,284,062,380,157,862,400
all draft objects that have not been published yet
publish/models.py
changed
team-35/django-publish
python
def changed(self): return self.filter(Publishable.Q_CHANGED)
def deleted(self): 'public objects that need deleting' return self.filter(Publishable.Q_DELETED)
-8,045,970,957,518,090,000
public objects that need deleting
publish/models.py
deleted
team-35/django-publish
python
def deleted(self): return self.filter(Publishable.Q_DELETED)
def draft(self): 'all draft objects' return self.filter(Publishable.Q_DRAFT)
2,715,094,798,063,293,000
all draft objects
publish/models.py
draft
team-35/django-publish
python
def draft(self): return self.filter(Publishable.Q_DRAFT)
def published(self): 'all public/published objects' return self.filter(Publishable.Q_PUBLISHED)
-5,068,784,696,173,575,000
all public/published objects
publish/models.py
published
team-35/django-publish
python
def published(self): return self.filter(Publishable.Q_PUBLISHED)
def publish(self, all_published=None): 'publish all models in this queryset' if (all_published is None): all_published = NestedSet() for p in self: p.publish(all_published=all_published)
2,130,694,998,165,031,400
publish all models in this queryset
publish/models.py
publish
team-35/django-publish
python
def publish(self, all_published=None): if (all_published is None): all_published = NestedSet() for p in self: p.publish(all_published=all_published)
def delete(self, mark_for_deletion=True): '\n override delete so that we call delete on each object separately, as delete needs\n to set some flags etc\n ' for p in self: p.delete(mark_for_deletion=mark_for_deletion)
5,955,300,016,194,922,000
override delete so that we call delete on each object separately, as delete needs to set some flags etc
publish/models.py
delete
team-35/django-publish
python
def delete(self, mark_for_deletion=True): '\n override delete so that we call delete on each object separately, as delete needs\n to set some flags etc\n ' for p in self: p.delete(mark_for_deletion=mark_for_deletion)
def changed(self): 'all draft objects that have not been published yet' return self.get_query_set().changed()
5,233,656,181,255,264,000
all draft objects that have not been published yet
publish/models.py
changed
team-35/django-publish
python
def changed(self): return self.get_query_set().changed()
def deleted(self): 'public objects that need deleting' return self.get_query_set().deleted()
191,633,847,673,881,540
public objects that need deleting
publish/models.py
deleted
team-35/django-publish
python
def deleted(self): return self.get_query_set().deleted()
def draft(self): 'all draft objects' return self.get_query_set().draft()
328,280,275,420,383,740
all draft objects
publish/models.py
draft
team-35/django-publish
python
def draft(self): return self.get_query_set().draft()
def published(self): 'all public/published objects' return self.get_query_set().published()
-2,110,702,919,340,123,000
all public/published objects
publish/models.py
published
team-35/django-publish
python
def published(self): return self.get_query_set().published()
def publish(self, dry_run=False, all_published=None, parent=None): '\n either publish changes or deletions, depending on\n whether this model is public or draft.\n \n public models will be examined to see if they need deleting\n and deleted if so.\n ' if self.is_public: raise PublishException('Cannot publish public model - publish should be called from draft model') if (self.pk is None): raise PublishException('Please save model before publishing') if (self.publish_state == Publishable.PUBLISH_DELETE): self.publish_deletions(dry_run=dry_run, all_published=all_published, parent=parent) return None else: return self.publish_changes(dry_run=dry_run, all_published=all_published, parent=parent)
5,369,444,120,397,917,000
either publish changes or deletions, depending on whether this model is public or draft. public models will be examined to see if they need deleting and deleted if so.
publish/models.py
publish
team-35/django-publish
python
def publish(self, dry_run=False, all_published=None, parent=None): '\n either publish changes or deletions, depending on\n whether this model is public or draft.\n \n public models will be examined to see if they need deleting\n and deleted if so.\n ' if self.is_public: raise PublishException('Cannot publish public model - publish should be called from draft model') if (self.pk is None): raise PublishException('Please save model before publishing') if (self.publish_state == Publishable.PUBLISH_DELETE): self.publish_deletions(dry_run=dry_run, all_published=all_published, parent=parent) return None else: return self.publish_changes(dry_run=dry_run, all_published=all_published, parent=parent)
def unpublish(self, dry_run=False): '\n unpublish models by deleting public model\n ' if self.is_public: raise UnpublishException('Cannot unpublish a public model - unpublish should be called from draft model') if (self.pk is None): raise UnpublishException('Please save the model before unpublishing') public_model = self.public if (public_model and (not dry_run)): self.public = None self.save() public_model.delete(mark_for_deletion=False) return public_model
-8,653,058,200,630,890,000
unpublish models by deleting public model
publish/models.py
unpublish
team-35/django-publish
python
def unpublish(self, dry_run=False): '\n \n ' if self.is_public: raise UnpublishException('Cannot unpublish a public model - unpublish should be called from draft model') if (self.pk is None): raise UnpublishException('Please save the model before unpublishing') public_model = self.public if (public_model and (not dry_run)): self.public = None self.save() public_model.delete(mark_for_deletion=False) return public_model
def _get_through_model(self, field_object): '\n Get the "through" model associated with this field.\n Need to handle things differently for Django1.1 vs Django1.2\n In 1.1 through is a string and through_model has class\n In 1.2 through is the class\n ' through = field_object.rel.through if through: if isinstance(through, basestring): return field_object.rel.through_model return through return None
6,883,625,740,727,871,000
Get the "through" model associated with this field. Need to handle things differently for Django1.1 vs Django1.2 In 1.1 through is a string and through_model has class In 1.2 through is the class
publish/models.py
_get_through_model
team-35/django-publish
python
def _get_through_model(self, field_object): '\n Get the "through" model associated with this field.\n Need to handle things differently for Django1.1 vs Django1.2\n In 1.1 through is a string and through_model has class\n In 1.2 through is the class\n ' through = field_object.rel.through if through: if isinstance(through, basestring): return field_object.rel.through_model return through return None
def publish_changes(self, dry_run=False, all_published=None, parent=None): "\n publish changes to the model - basically copy all of it's content to another copy in the \n database.\n if you set dry_run=True nothing will be written to the database. combined with\n the all_published value one can therefore get information about what other models\n would be affected by this function\n " assert (not self.is_public), 'Cannot publish public model - publish should be called from draft model' assert (self.pk is not None), 'Please save model before publishing' if (all_published is None): all_published = NestedSet() if (self in all_published): return all_published.original(self).public all_published.add(self, parent=parent) self._pre_publish(dry_run, all_published) public_version = self.public if (not public_version): public_version = self.__class__(is_public=True) excluded_fields = self.PublishMeta.excluded_fields() reverse_fields_to_publish = self.PublishMeta.reverse_fields_to_publish() if self._changes_need_publishing(): for field in self._meta.fields: if (field.name in excluded_fields): continue value = getattr(self, field.name) if isinstance(field, RelatedField): related = field.rel.to if issubclass(related, Publishable): if (value is not None): value = value._get_public_or_publish(dry_run=dry_run, all_published=all_published, parent=self) if (not dry_run): publish_function = self.PublishMeta.find_publish_function(field.name, setattr) publish_function(public_version, field.name, value) if (not dry_run): public_version.save() self.public = public_version self.publish_state = Publishable.PUBLISH_DEFAULT self.save(mark_changed=False) for field in self._meta.many_to_many: name = field.name if (name in excluded_fields): continue m2m_manager = getattr(self, name) public_objs = list(m2m_manager.all()) (field_object, model, direct, m2m) = self._meta.get_field_by_name(name) through_model = self._get_through_model(field_object) if through_model: if issubclass(through_model, Publishable): m2m_reverse_name = field_object.m2m_reverse_name() for reverse_field in through_model._meta.fields: if (reverse_field.column == m2m_reverse_name): related_name = reverse_field.name related_field = getattr(through_model, related_name).field reverse_name = related_field.related.get_accessor_name() reverse_fields_to_publish.append(reverse_name) break continue related = field_object.rel.to if issubclass(related, Publishable): public_objs = [p._get_public_or_publish(dry_run=dry_run, all_published=all_published, parent=self) for p in public_objs] if (not dry_run): public_m2m_manager = getattr(public_version, name) old_objs = public_m2m_manager.exclude(pk__in=[p.pk for p in public_objs]) public_m2m_manager.remove(*old_objs) public_m2m_manager.add(*public_objs) for obj in self._meta.get_all_related_objects(): if issubclass(obj.model, Publishable): name = obj.get_accessor_name() if (name in excluded_fields): continue if (name not in reverse_fields_to_publish): continue if obj.field.rel.multiple: related_items = getattr(self, name).all() else: try: related_items = [getattr(self, name)] except obj.model.DoesNotExist: related_items = [] for related_item in related_items: related_item.publish(dry_run=dry_run, all_published=all_published, parent=self) if (self.public and (not dry_run)): if obj.field.rel.multiple: public_ids = [r.public_id for r in related_items] deleted_items = getattr(self.public, name).exclude(pk__in=public_ids) deleted_items.delete(mark_for_deletion=False) self._post_publish(dry_run, all_published) return public_version
4,687,598,719,083,719,000
publish changes to the model - basically copy all of it's content to another copy in the database. if you set dry_run=True nothing will be written to the database. combined with the all_published value one can therefore get information about what other models would be affected by this function
publish/models.py
publish_changes
team-35/django-publish
python
def publish_changes(self, dry_run=False, all_published=None, parent=None): "\n publish changes to the model - basically copy all of it's content to another copy in the \n database.\n if you set dry_run=True nothing will be written to the database. combined with\n the all_published value one can therefore get information about what other models\n would be affected by this function\n " assert (not self.is_public), 'Cannot publish public model - publish should be called from draft model' assert (self.pk is not None), 'Please save model before publishing' if (all_published is None): all_published = NestedSet() if (self in all_published): return all_published.original(self).public all_published.add(self, parent=parent) self._pre_publish(dry_run, all_published) public_version = self.public if (not public_version): public_version = self.__class__(is_public=True) excluded_fields = self.PublishMeta.excluded_fields() reverse_fields_to_publish = self.PublishMeta.reverse_fields_to_publish() if self._changes_need_publishing(): for field in self._meta.fields: if (field.name in excluded_fields): continue value = getattr(self, field.name) if isinstance(field, RelatedField): related = field.rel.to if issubclass(related, Publishable): if (value is not None): value = value._get_public_or_publish(dry_run=dry_run, all_published=all_published, parent=self) if (not dry_run): publish_function = self.PublishMeta.find_publish_function(field.name, setattr) publish_function(public_version, field.name, value) if (not dry_run): public_version.save() self.public = public_version self.publish_state = Publishable.PUBLISH_DEFAULT self.save(mark_changed=False) for field in self._meta.many_to_many: name = field.name if (name in excluded_fields): continue m2m_manager = getattr(self, name) public_objs = list(m2m_manager.all()) (field_object, model, direct, m2m) = self._meta.get_field_by_name(name) through_model = self._get_through_model(field_object) if through_model: if issubclass(through_model, Publishable): m2m_reverse_name = field_object.m2m_reverse_name() for reverse_field in through_model._meta.fields: if (reverse_field.column == m2m_reverse_name): related_name = reverse_field.name related_field = getattr(through_model, related_name).field reverse_name = related_field.related.get_accessor_name() reverse_fields_to_publish.append(reverse_name) break continue related = field_object.rel.to if issubclass(related, Publishable): public_objs = [p._get_public_or_publish(dry_run=dry_run, all_published=all_published, parent=self) for p in public_objs] if (not dry_run): public_m2m_manager = getattr(public_version, name) old_objs = public_m2m_manager.exclude(pk__in=[p.pk for p in public_objs]) public_m2m_manager.remove(*old_objs) public_m2m_manager.add(*public_objs) for obj in self._meta.get_all_related_objects(): if issubclass(obj.model, Publishable): name = obj.get_accessor_name() if (name in excluded_fields): continue if (name not in reverse_fields_to_publish): continue if obj.field.rel.multiple: related_items = getattr(self, name).all() else: try: related_items = [getattr(self, name)] except obj.model.DoesNotExist: related_items = [] for related_item in related_items: related_item.publish(dry_run=dry_run, all_published=all_published, parent=self) if (self.public and (not dry_run)): if obj.field.rel.multiple: public_ids = [r.public_id for r in related_items] deleted_items = getattr(self.public, name).exclude(pk__in=public_ids) deleted_items.delete(mark_for_deletion=False) self._post_publish(dry_run, all_published) return public_version
def publish_deletions(self, all_published=None, parent=None, dry_run=False): '\n actually delete models that have been marked for deletion\n ' if (self.publish_state != Publishable.PUBLISH_DELETE): return if (all_published is None): all_published = NestedSet() if (self in all_published): return all_published.add(self, parent=parent) self._pre_publish(dry_run, all_published, deleted=True) for related in self._meta.get_all_related_objects(): if (not issubclass(related.model, Publishable)): continue name = related.get_accessor_name() if (name in self.PublishMeta.excluded_fields()): continue try: instances = getattr(self, name).all() except AttributeError: instances = [getattr(self, name)] for instance in instances: instance.publish_deletions(all_published=all_published, parent=self, dry_run=dry_run) if (not dry_run): public = self.public self.delete(mark_for_deletion=False) if public: public.delete(mark_for_deletion=False) self._post_publish(dry_run, all_published, deleted=True)
2,755,846,888,489,863,000
actually delete models that have been marked for deletion
publish/models.py
publish_deletions
team-35/django-publish
python
def publish_deletions(self, all_published=None, parent=None, dry_run=False): '\n \n ' if (self.publish_state != Publishable.PUBLISH_DELETE): return if (all_published is None): all_published = NestedSet() if (self in all_published): return all_published.add(self, parent=parent) self._pre_publish(dry_run, all_published, deleted=True) for related in self._meta.get_all_related_objects(): if (not issubclass(related.model, Publishable)): continue name = related.get_accessor_name() if (name in self.PublishMeta.excluded_fields()): continue try: instances = getattr(self, name).all() except AttributeError: instances = [getattr(self, name)] for instance in instances: instance.publish_deletions(all_published=all_published, parent=self, dry_run=dry_run) if (not dry_run): public = self.public self.delete(mark_for_deletion=False) if public: public.delete(mark_for_deletion=False) self._post_publish(dry_run, all_published, deleted=True)
@classmethod def find_publish_function(cls, field_name, default_function): '\n Search to see if there is a function to copy the given field over.\n Function should take same params as setattr()\n ' for clazz in cls.__mro__: publish_functions = getattr(clazz, 'publish_functions', {}) fn = publish_functions.get(field_name, None) if fn: return fn return default_function
6,173,340,320,263,965,000
Search to see if there is a function to copy the given field over. Function should take same params as setattr()
publish/models.py
find_publish_function
team-35/django-publish
python
@classmethod def find_publish_function(cls, field_name, default_function): '\n Search to see if there is a function to copy the given field over.\n Function should take same params as setattr()\n ' for clazz in cls.__mro__: publish_functions = getattr(clazz, 'publish_functions', {}) fn = publish_functions.get(field_name, None) if fn: return fn return default_function
@app.route('/verifyconn', methods=['GET']) def verify_db_conn(): '\n Verifies the connection to the db.\n ' try: labs_conn = psycopg2.connect(user=os.environ.get('aws_db_user'), password=os.environ.get('aws_db_password'), host=os.environ.get('aws_db_host'), port=os.environ.get('aws_db_port'), database=os.environ.get('aws_db_name')) return 'Connection verified.' except: return 'Connection failed.' finally: if labs_conn: labs_conn.close()
19,852,801,566,805,988
Verifies the connection to the db.
routes.py
verify_db_conn
Minaramzey/Sauti-Africa-Market-Monitoring-DS
python
@app.route('/verifyconn', methods=['GET']) def verify_db_conn(): '\n \n ' try: labs_conn = psycopg2.connect(user=os.environ.get('aws_db_user'), password=os.environ.get('aws_db_password'), host=os.environ.get('aws_db_host'), port=os.environ.get('aws_db_port'), database=os.environ.get('aws_db_name')) return 'Connection verified.' except: return 'Connection failed.' finally: if labs_conn: labs_conn.close()
def yield_result(value, output_name='result'): "Explicitly yield a Output.\n\n Args:\n value (Any): The value of the Output to yield.\n output_name (Optional[str]): The name of the Output to yield. Default: 'result'.\n\n " return MANAGER_FOR_NOTEBOOK_INSTANCE.yield_result(value, output_name)
6,880,240,434,030,309,000
Explicitly yield a Output. Args: value (Any): The value of the Output to yield. output_name (Optional[str]): The name of the Output to yield. Default: 'result'.
python_modules/dagstermill/dagstermill/__init__.py
yield_result
atsuhiro/dagster
python
def yield_result(value, output_name='result'): "Explicitly yield a Output.\n\n Args:\n value (Any): The value of the Output to yield.\n output_name (Optional[str]): The name of the Output to yield. Default: 'result'.\n\n " return MANAGER_FOR_NOTEBOOK_INSTANCE.yield_result(value, output_name)
def yield_event(dagster_event): 'Explicitly yield a dagster event such as a Materialization or ExpectationResult\n ' return MANAGER_FOR_NOTEBOOK_INSTANCE.yield_event(dagster_event)
-8,816,253,065,816,656,000
Explicitly yield a dagster event such as a Materialization or ExpectationResult
python_modules/dagstermill/dagstermill/__init__.py
yield_event
atsuhiro/dagster
python
def yield_event(dagster_event): '\n ' return MANAGER_FOR_NOTEBOOK_INSTANCE.yield_event(dagster_event)
def get_undelete_queryset(self, layouts): '\n Version trackers that have no references and whose content type is\n allowed by our field can be restored.\n ' VersionTracker = self.site.get_version_tracker_model() return VersionTracker.objects.orphan().filter(working_copy__content_type_id__in=layouts)
4,946,959,688,917,315,000
Version trackers that have no references and whose content type is allowed by our field can be restored.
widgy/contrib/widgy_mezzanine/admin.py
get_undelete_queryset
fusionbox/django-widgy
python
def get_undelete_queryset(self, layouts): '\n Version trackers that have no references and whose content type is\n allowed by our field can be restored.\n ' VersionTracker = self.site.get_version_tracker_model() return VersionTracker.objects.orphan().filter(working_copy__content_type_id__in=layouts)
def initialize_locale(): 'Set the locale to the users default setting\n and set ``_LOCALE_INITIALIZED`` to indicate whether\n ``get_text_width`` may run into trouble\n ' global _LOCALE_INITIALIZED, _LOCALE_INITIALIZATION_ERR if (_LOCALE_INITIALIZED is False): try: locale.setlocale(locale.LC_ALL, '') except locale.Error as e: _LOCALE_INITIALIZATION_ERR = e else: _LOCALE_INITIALIZED = True
-8,210,179,648,525,412,000
Set the locale to the users default setting and set ``_LOCALE_INITIALIZED`` to indicate whether ``get_text_width`` may run into trouble
dev/ref/display.py
initialize_locale
nrser/nansi.collections
python
def initialize_locale(): 'Set the locale to the users default setting\n and set ``_LOCALE_INITIALIZED`` to indicate whether\n ``get_text_width`` may run into trouble\n ' global _LOCALE_INITIALIZED, _LOCALE_INITIALIZATION_ERR if (_LOCALE_INITIALIZED is False): try: locale.setlocale(locale.LC_ALL, ) except locale.Error as e: _LOCALE_INITIALIZATION_ERR = e else: _LOCALE_INITIALIZED = True
def get_text_width(text): "Function that utilizes ``wcswidth`` or ``wcwidth`` to determine the\n number of columns used to display a text string.\n\n We try first with ``wcswidth``, and fallback to iterating each\n character and using wcwidth individually, falling back to a value of 0\n for non-printable wide characters\n\n On Py2, this depends on ``locale.setlocale(locale.LC_ALL, '')``,\n that in the case of Ansible is done in ``bin/ansible``\n " if (not isinstance(text, text_type)): raise TypeError(('get_text_width requires text, not %s' % type(text))) if _LOCALE_INITIALIZATION_ERR: Display().warning(('An error occurred while calling ansible.utils.display.initialize_locale (%s). This may result in incorrectly calculated text widths that can cause Display to print incorrect line lengths' % _LOCALE_INITIALIZATION_ERR)) elif (not _LOCALE_INITIALIZED): Display().warning('ansible.utils.display.initialize_locale has not been called, this may result in incorrectly calculated text widths that can cause Display to print incorrect line lengths') try: width = _LIBC.wcswidth(text, _MAX_INT) except ctypes.ArgumentError: width = (- 1) if (width != (- 1)): return width width = 0 counter = 0 for c in text: counter += 1 if (c in (u'\x08', u'\x7f', u'\x94', u'\x1b')): width -= 1 counter -= 1 continue try: w = _LIBC.wcwidth(c) except ctypes.ArgumentError: w = (- 1) if (w == (- 1)): w = 0 width += w if ((width == 0) and counter and (not _LOCALE_INITIALIZED)): raise EnvironmentError(('ansible.utils.display.initialize_locale has not been called, and get_text_width could not calculate text width of %r' % text)) return (width if (width >= 0) else 0)
-3,609,822,956,789,150,000
Function that utilizes ``wcswidth`` or ``wcwidth`` to determine the number of columns used to display a text string. We try first with ``wcswidth``, and fallback to iterating each character and using wcwidth individually, falling back to a value of 0 for non-printable wide characters On Py2, this depends on ``locale.setlocale(locale.LC_ALL, '')``, that in the case of Ansible is done in ``bin/ansible``
dev/ref/display.py
get_text_width
nrser/nansi.collections
python
def get_text_width(text): "Function that utilizes ``wcswidth`` or ``wcwidth`` to determine the\n number of columns used to display a text string.\n\n We try first with ``wcswidth``, and fallback to iterating each\n character and using wcwidth individually, falling back to a value of 0\n for non-printable wide characters\n\n On Py2, this depends on ``locale.setlocale(locale.LC_ALL, )``,\n that in the case of Ansible is done in ``bin/ansible``\n " if (not isinstance(text, text_type)): raise TypeError(('get_text_width requires text, not %s' % type(text))) if _LOCALE_INITIALIZATION_ERR: Display().warning(('An error occurred while calling ansible.utils.display.initialize_locale (%s). This may result in incorrectly calculated text widths that can cause Display to print incorrect line lengths' % _LOCALE_INITIALIZATION_ERR)) elif (not _LOCALE_INITIALIZED): Display().warning('ansible.utils.display.initialize_locale has not been called, this may result in incorrectly calculated text widths that can cause Display to print incorrect line lengths') try: width = _LIBC.wcswidth(text, _MAX_INT) except ctypes.ArgumentError: width = (- 1) if (width != (- 1)): return width width = 0 counter = 0 for c in text: counter += 1 if (c in (u'\x08', u'\x7f', u'\x94', u'\x1b')): width -= 1 counter -= 1 continue try: w = _LIBC.wcwidth(c) except ctypes.ArgumentError: w = (- 1) if (w == (- 1)): w = 0 width += w if ((width == 0) and counter and (not _LOCALE_INITIALIZED)): raise EnvironmentError(('ansible.utils.display.initialize_locale has not been called, and get_text_width could not calculate text width of %r' % text)) return (width if (width >= 0) else 0)
def display(self, msg, color=None, stderr=False, screen_only=False, log_only=False, newline=True): ' Display a message to the user\n\n Note: msg *must* be a unicode string to prevent UnicodeError tracebacks.\n ' nocolor = msg if (not log_only): has_newline = msg.endswith(u'\n') if has_newline: msg2 = msg[:(- 1)] else: msg2 = msg if color: msg2 = stringc(msg2, color) if (has_newline or newline): msg2 = (msg2 + u'\n') msg2 = to_bytes(msg2, encoding=self._output_encoding(stderr=stderr)) if (sys.version_info >= (3,)): msg2 = to_text(msg2, self._output_encoding(stderr=stderr), errors='replace') if (not stderr): fileobj = sys.stdout else: fileobj = sys.stderr fileobj.write(msg2) try: fileobj.flush() except IOError as e: if (e.errno != errno.EPIPE): raise if (logger and (not screen_only)): msg2 = to_bytes(nocolor.lstrip(u'\n')) if (sys.version_info >= (3,)): msg2 = to_text(msg2, self._output_encoding(stderr=stderr)) lvl = logging.INFO if color: try: lvl = color_to_log_level[color] except KeyError: raise AnsibleAssertionError(('Invalid color supplied to display: %s' % color)) logger.log(lvl, msg2)
3,703,657,802,151,350,000
Display a message to the user Note: msg *must* be a unicode string to prevent UnicodeError tracebacks.
dev/ref/display.py
display
nrser/nansi.collections
python
def display(self, msg, color=None, stderr=False, screen_only=False, log_only=False, newline=True): ' Display a message to the user\n\n Note: msg *must* be a unicode string to prevent UnicodeError tracebacks.\n ' nocolor = msg if (not log_only): has_newline = msg.endswith(u'\n') if has_newline: msg2 = msg[:(- 1)] else: msg2 = msg if color: msg2 = stringc(msg2, color) if (has_newline or newline): msg2 = (msg2 + u'\n') msg2 = to_bytes(msg2, encoding=self._output_encoding(stderr=stderr)) if (sys.version_info >= (3,)): msg2 = to_text(msg2, self._output_encoding(stderr=stderr), errors='replace') if (not stderr): fileobj = sys.stdout else: fileobj = sys.stderr fileobj.write(msg2) try: fileobj.flush() except IOError as e: if (e.errno != errno.EPIPE): raise if (logger and (not screen_only)): msg2 = to_bytes(nocolor.lstrip(u'\n')) if (sys.version_info >= (3,)): msg2 = to_text(msg2, self._output_encoding(stderr=stderr)) lvl = logging.INFO if color: try: lvl = color_to_log_level[color] except KeyError: raise AnsibleAssertionError(('Invalid color supplied to display: %s' % color)) logger.log(lvl, msg2)
def get_deprecation_message(self, msg, version=None, removed=False, date=None, collection_name=None): ' used to print out a deprecation message.' msg = msg.strip() if (msg and (msg[(- 1)] not in ['!', '?', '.'])): msg += '.' if (collection_name == 'ansible.builtin'): collection_name = 'ansible-base' if removed: header = '[DEPRECATED]: {0}'.format(msg) removal_fragment = 'This feature was removed' help_text = 'Please update your playbooks.' else: header = '[DEPRECATION WARNING]: {0}'.format(msg) removal_fragment = 'This feature will be removed' help_text = 'Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg.' if collection_name: from_fragment = 'from {0}'.format(collection_name) else: from_fragment = '' if date: when = 'in a release after {0}.'.format(date) elif version: when = 'in version {0}.'.format(version) else: when = 'in a future release.' message_text = ' '.join((f for f in [header, removal_fragment, from_fragment, when, help_text] if f)) return message_text
7,637,081,892,293,506,000
used to print out a deprecation message.
dev/ref/display.py
get_deprecation_message
nrser/nansi.collections
python
def get_deprecation_message(self, msg, version=None, removed=False, date=None, collection_name=None): ' ' msg = msg.strip() if (msg and (msg[(- 1)] not in ['!', '?', '.'])): msg += '.' if (collection_name == 'ansible.builtin'): collection_name = 'ansible-base' if removed: header = '[DEPRECATED]: {0}'.format(msg) removal_fragment = 'This feature was removed' help_text = 'Please update your playbooks.' else: header = '[DEPRECATION WARNING]: {0}'.format(msg) removal_fragment = 'This feature will be removed' help_text = 'Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg.' if collection_name: from_fragment = 'from {0}'.format(collection_name) else: from_fragment = if date: when = 'in a release after {0}.'.format(date) elif version: when = 'in version {0}.'.format(version) else: when = 'in a future release.' message_text = ' '.join((f for f in [header, removal_fragment, from_fragment, when, help_text] if f)) return message_text
def banner(self, msg, color=None, cows=True): '\n Prints a header-looking line with cowsay or stars with length depending on terminal width (3 minimum)\n ' msg = to_text(msg) if (self.b_cowsay and cows): try: self.banner_cowsay(msg) return except OSError: self.warning('somebody cleverly deleted cowsay or something during the PB run. heh.') msg = msg.strip() try: star_len = (self.columns - get_text_width(msg)) except EnvironmentError: star_len = (self.columns - len(msg)) if (star_len <= 3): star_len = 3 stars = (u'*' * star_len) self.display((u'\n%s %s' % (msg, stars)), color=color)
-4,591,131,243,943,156,700
Prints a header-looking line with cowsay or stars with length depending on terminal width (3 minimum)
dev/ref/display.py
banner
nrser/nansi.collections
python
def banner(self, msg, color=None, cows=True): '\n \n ' msg = to_text(msg) if (self.b_cowsay and cows): try: self.banner_cowsay(msg) return except OSError: self.warning('somebody cleverly deleted cowsay or something during the PB run. heh.') msg = msg.strip() try: star_len = (self.columns - get_text_width(msg)) except EnvironmentError: star_len = (self.columns - len(msg)) if (star_len <= 3): star_len = 3 stars = (u'*' * star_len) self.display((u'\n%s %s' % (msg, stars)), color=color)
def load_data(filename: str): '\n Load house prices dataset and preprocess data.\n Parameters\n ----------\n filename: str\n Path to house prices dataset\n\n Returns\n -------\n Design matrix and response vector (prices) - either as a single\n DataFrame or a Tuple[DataFrame, Series]\n ' df = pd.read_csv(filename) df = df.dropna() df = df[(df['price'] > 0)] df = df[(df['yr_built'] > 0)] df = df[(df['bedrooms'] < 20)] df['date'] = df['date'].apply((lambda x: int(str(x)[:4]))) df = df[(df['sqft_living'] <= df['sqft_lot'])] labels_to_drop = ['zipcode', 'lat', 'long', 'sqft_living15', 'sqft_lot15'] df.drop(columns=labels_to_drop, inplace=True) series = df.pop('price') return (df, series)
-5,054,370,289,392,005,000
Load house prices dataset and preprocess data. Parameters ---------- filename: str Path to house prices dataset Returns ------- Design matrix and response vector (prices) - either as a single DataFrame or a Tuple[DataFrame, Series]
exercises/house_price_prediction.py
load_data
shahaf-shafirstein/IML.HUJI
python
def load_data(filename: str): '\n Load house prices dataset and preprocess data.\n Parameters\n ----------\n filename: str\n Path to house prices dataset\n\n Returns\n -------\n Design matrix and response vector (prices) - either as a single\n DataFrame or a Tuple[DataFrame, Series]\n ' df = pd.read_csv(filename) df = df.dropna() df = df[(df['price'] > 0)] df = df[(df['yr_built'] > 0)] df = df[(df['bedrooms'] < 20)] df['date'] = df['date'].apply((lambda x: int(str(x)[:4]))) df = df[(df['sqft_living'] <= df['sqft_lot'])] labels_to_drop = ['zipcode', 'lat', 'long', 'sqft_living15', 'sqft_lot15'] df.drop(columns=labels_to_drop, inplace=True) series = df.pop('price') return (df, series)
def feature_evaluation(X: pd.DataFrame, y: pd.Series, output_path: str='.') -> NoReturn: '\n Create scatter plot between each feature and the response.\n - Plot title specifies feature name\n - Plot title specifies Pearson Correlation between feature and response\n - Plot saved under given folder with file name including feature name\n Parameters\n ----------\n X : DataFrame of shape (n_samples, n_features)\n Design matrix of regression problem\n\n y : array-like of shape (n_samples, )\n Response vector to evaluate against\n\n output_path: str (default ".")\n Path to folder in which plots are saved\n ' correlations = np.array(y.size) features = list(X) for feature in features: cov = np.cov(y, X[feature]) std = (np.std(X[feature]) * np.std(y)) pearson_correlation = (cov[0][1] / std) np.append(correlations, pearson_correlation) fig = go.Figure() fig.add_trace(go.Scatter(x=X[feature], y=y, mode='markers')) fig.update_layout(title=((feature + ' - Pearson Correlation = ') + str(pearson_correlation)), xaxis_title=(feature + ' Feature values'), yaxis_title="House's Price") fig.write_image(f'{output_path}\{feature}.png', format='png')
-5,545,909,993,239,813,000
Create scatter plot between each feature and the response. - Plot title specifies feature name - Plot title specifies Pearson Correlation between feature and response - Plot saved under given folder with file name including feature name Parameters ---------- X : DataFrame of shape (n_samples, n_features) Design matrix of regression problem y : array-like of shape (n_samples, ) Response vector to evaluate against output_path: str (default ".") Path to folder in which plots are saved
exercises/house_price_prediction.py
feature_evaluation
shahaf-shafirstein/IML.HUJI
python
def feature_evaluation(X: pd.DataFrame, y: pd.Series, output_path: str='.') -> NoReturn: '\n Create scatter plot between each feature and the response.\n - Plot title specifies feature name\n - Plot title specifies Pearson Correlation between feature and response\n - Plot saved under given folder with file name including feature name\n Parameters\n ----------\n X : DataFrame of shape (n_samples, n_features)\n Design matrix of regression problem\n\n y : array-like of shape (n_samples, )\n Response vector to evaluate against\n\n output_path: str (default ".")\n Path to folder in which plots are saved\n ' correlations = np.array(y.size) features = list(X) for feature in features: cov = np.cov(y, X[feature]) std = (np.std(X[feature]) * np.std(y)) pearson_correlation = (cov[0][1] / std) np.append(correlations, pearson_correlation) fig = go.Figure() fig.add_trace(go.Scatter(x=X[feature], y=y, mode='markers')) fig.update_layout(title=((feature + ' - Pearson Correlation = ') + str(pearson_correlation)), xaxis_title=(feature + ' Feature values'), yaxis_title="House's Price") fig.write_image(f'{output_path}\{feature}.png', format='png')
@router.post('/wage_trade_transport_viz/') async def wage_trade_transport_viz(user_queried_citystates: list): '\n ### Path Parameter (POST from front-end)\n list: A list of city-states the user queried in this format: ["Albany, NY", "San Francisco, CA", "Chicago, IL"]\n\n ### Response\n JSON string of all figures to render with [react-plotly.js](https://plotly.com/javascript/react/)\n ' def create_db_uri(): env_path = '.env' load_dotenv(dotenv_path=env_path, verbose=True) DB_FLAVOR = os.getenv('DB_FLAVOR') DB_PYTHON_LIBRARY = os.getenv('DB_PYTHON_LIBRARY') DB_HOST = os.getenv('DB_HOST') DB_NAME = os.getenv('DB_NAME') DB_USER = os.getenv('DB_USER') DB_PASS = os.getenv('DB_PASS') DB_PORT = os.getenv('DB_PORT') DB_URI = ((((((((((((DB_FLAVOR + '+') + DB_PYTHON_LIBRARY) + '://') + DB_USER) + ':') + DB_PASS) + '@') + DB_HOST) + ':') + DB_PORT) + '/') + DB_NAME) return DB_URI DB_URI = create_db_uri() engine = create_engine(DB_URI, echo=True) def cc_json(): '\n Opens county_city.json file, converts to .json object and returns it\n ' with open(join_path('app', 'db', 'city-county.json')) as f: data_to_encode = json.load(f) encoded_json = jsonable_encoder(data_to_encode) county_city_json = json.dumps(encoded_json) return county_city_json cc = cc_json() cc = json.loads(cc) def get_county_from_city(city_states_list): county_list = [] i = 0 for i in range(len(city_states_list)): county_list.append(cc[city_states_list[i]]) i += 1 return county_list county_list = get_county_from_city(user_queried_citystates) def sql_query(county_list): "\n Create a SQL query to grab only the user queried cities' data from the covid table in the DB.\n Output: subset grouped DF by month and city with only queried cities\n " list_length = len(county_list) if (list_length == 1): county1 = county_list[0] query1 = 'SELECT * FROM jobs WHERE county_state IN (%(county1)s)' subsetJ = pd.read_sql(sql=query1, columns='county_state', params={'county1': county1}, con=engine, parse_dates=['created_at', 'updated_at']) elif (list_length == 2): county1 = county_list[0] county2 = county_list[1] query2 = 'SELECT * FROM jobs WHERE county_state IN (%(county1)s, %(county2)s)' subsetJ = pd.read_sql(sql=query2, columns='county_state', params={'county1': county1, 'county2': county2}, con=engine, parse_dates=['created_at', 'updated_at']) elif (list_length == 3): county1 = county_list[0] county2 = county_list[1] county3 = county_list[2] query3 = 'SELECT * FROM jobs WHERE "county_state" IN (%(county1)s, %(county2)s, %(county3)s)' subsetJ = pd.read_sql(sql=query3, columns='county_state', params={'county1': county1, 'county2': county2, 'county3': county3}, con=engine, parse_dates=['created_at', 'updated_at']) else: raise Exception('Please pass a list of 1-3 City-States') return subsetJ subsetJ = sql_query(county_list) industry_list = ['Goods-producing', 'Natural resources and mining', 'Construction', 'Manufacturing', 'Service-providing', 'Trade, transportation, and utilities', 'Information', 'Financial activities', 'Professional and business services', 'Education and health services', 'Leisure and hospitality', 'Other services', 'Unclassified'] def create_wage_plots(df, industry_list, industry_name): subsetJ['County, State'] = subsetJ['county_state'] subsetJ['date'] = pd.PeriodIndex(year=subsetJ['Year'], quarter=subsetJ['Qtr']).to_timestamp() industry = subsetJ[(subsetJ['Industry'] == industry_name)] industry = industry.sort_values('date') fig = px.line(industry, x='date', y='Average Weekly Wage', labels={'Average Weekly Wage': 'Average Weekly Wage ($)', 'date': 'Date'}, color='County, State', title=f'{industry_name}: Average Weekly Wage').for_each_trace((lambda t: t.update(name=t.name.split('=')[(- 1)]))) fig.update_layout(legend=dict(orientation='h', yanchor='bottom', y=1.02, xanchor='right', x=1), xaxis=dict(tickmode='array', tick0=1, dtick=1, tickvals=[2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020], ticktext=['2010', '2011', '2012', '2013', '2014', '2015', '2016', '2017', '2018', '2019', '2020'])) fig.write_image('fig1.png') jobs_json = fig.to_json() return jobs_json wage_json = create_wage_plots(subsetJ, industry_list, industry_list[5]) return wage_json
2,200,486,331,255,627,000
### Path Parameter (POST from front-end) list: A list of city-states the user queried in this format: ["Albany, NY", "San Francisco, CA", "Chicago, IL"] ### Response JSON string of all figures to render with [react-plotly.js](https://plotly.com/javascript/react/)
project/app/api/wage_trade_transport_viz.py
wage_trade_transport_viz
Lambda-School-Labs/Labs26-Citrics-DS-TeamC
python
@router.post('/wage_trade_transport_viz/') async def wage_trade_transport_viz(user_queried_citystates: list): '\n ### Path Parameter (POST from front-end)\n list: A list of city-states the user queried in this format: ["Albany, NY", "San Francisco, CA", "Chicago, IL"]\n\n ### Response\n JSON string of all figures to render with [react-plotly.js](https://plotly.com/javascript/react/)\n ' def create_db_uri(): env_path = '.env' load_dotenv(dotenv_path=env_path, verbose=True) DB_FLAVOR = os.getenv('DB_FLAVOR') DB_PYTHON_LIBRARY = os.getenv('DB_PYTHON_LIBRARY') DB_HOST = os.getenv('DB_HOST') DB_NAME = os.getenv('DB_NAME') DB_USER = os.getenv('DB_USER') DB_PASS = os.getenv('DB_PASS') DB_PORT = os.getenv('DB_PORT') DB_URI = ((((((((((((DB_FLAVOR + '+') + DB_PYTHON_LIBRARY) + '://') + DB_USER) + ':') + DB_PASS) + '@') + DB_HOST) + ':') + DB_PORT) + '/') + DB_NAME) return DB_URI DB_URI = create_db_uri() engine = create_engine(DB_URI, echo=True) def cc_json(): '\n Opens county_city.json file, converts to .json object and returns it\n ' with open(join_path('app', 'db', 'city-county.json')) as f: data_to_encode = json.load(f) encoded_json = jsonable_encoder(data_to_encode) county_city_json = json.dumps(encoded_json) return county_city_json cc = cc_json() cc = json.loads(cc) def get_county_from_city(city_states_list): county_list = [] i = 0 for i in range(len(city_states_list)): county_list.append(cc[city_states_list[i]]) i += 1 return county_list county_list = get_county_from_city(user_queried_citystates) def sql_query(county_list): "\n Create a SQL query to grab only the user queried cities' data from the covid table in the DB.\n Output: subset grouped DF by month and city with only queried cities\n " list_length = len(county_list) if (list_length == 1): county1 = county_list[0] query1 = 'SELECT * FROM jobs WHERE county_state IN (%(county1)s)' subsetJ = pd.read_sql(sql=query1, columns='county_state', params={'county1': county1}, con=engine, parse_dates=['created_at', 'updated_at']) elif (list_length == 2): county1 = county_list[0] county2 = county_list[1] query2 = 'SELECT * FROM jobs WHERE county_state IN (%(county1)s, %(county2)s)' subsetJ = pd.read_sql(sql=query2, columns='county_state', params={'county1': county1, 'county2': county2}, con=engine, parse_dates=['created_at', 'updated_at']) elif (list_length == 3): county1 = county_list[0] county2 = county_list[1] county3 = county_list[2] query3 = 'SELECT * FROM jobs WHERE "county_state" IN (%(county1)s, %(county2)s, %(county3)s)' subsetJ = pd.read_sql(sql=query3, columns='county_state', params={'county1': county1, 'county2': county2, 'county3': county3}, con=engine, parse_dates=['created_at', 'updated_at']) else: raise Exception('Please pass a list of 1-3 City-States') return subsetJ subsetJ = sql_query(county_list) industry_list = ['Goods-producing', 'Natural resources and mining', 'Construction', 'Manufacturing', 'Service-providing', 'Trade, transportation, and utilities', 'Information', 'Financial activities', 'Professional and business services', 'Education and health services', 'Leisure and hospitality', 'Other services', 'Unclassified'] def create_wage_plots(df, industry_list, industry_name): subsetJ['County, State'] = subsetJ['county_state'] subsetJ['date'] = pd.PeriodIndex(year=subsetJ['Year'], quarter=subsetJ['Qtr']).to_timestamp() industry = subsetJ[(subsetJ['Industry'] == industry_name)] industry = industry.sort_values('date') fig = px.line(industry, x='date', y='Average Weekly Wage', labels={'Average Weekly Wage': 'Average Weekly Wage ($)', 'date': 'Date'}, color='County, State', title=f'{industry_name}: Average Weekly Wage').for_each_trace((lambda t: t.update(name=t.name.split('=')[(- 1)]))) fig.update_layout(legend=dict(orientation='h', yanchor='bottom', y=1.02, xanchor='right', x=1), xaxis=dict(tickmode='array', tick0=1, dtick=1, tickvals=[2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020], ticktext=['2010', '2011', '2012', '2013', '2014', '2015', '2016', '2017', '2018', '2019', '2020'])) fig.write_image('fig1.png') jobs_json = fig.to_json() return jobs_json wage_json = create_wage_plots(subsetJ, industry_list, industry_list[5]) return wage_json
def cc_json(): '\n Opens county_city.json file, converts to .json object and returns it\n ' with open(join_path('app', 'db', 'city-county.json')) as f: data_to_encode = json.load(f) encoded_json = jsonable_encoder(data_to_encode) county_city_json = json.dumps(encoded_json) return county_city_json
8,053,087,424,007,477,000
Opens county_city.json file, converts to .json object and returns it
project/app/api/wage_trade_transport_viz.py
cc_json
Lambda-School-Labs/Labs26-Citrics-DS-TeamC
python
def cc_json(): '\n \n ' with open(join_path('app', 'db', 'city-county.json')) as f: data_to_encode = json.load(f) encoded_json = jsonable_encoder(data_to_encode) county_city_json = json.dumps(encoded_json) return county_city_json
def sql_query(county_list): "\n Create a SQL query to grab only the user queried cities' data from the covid table in the DB.\n Output: subset grouped DF by month and city with only queried cities\n " list_length = len(county_list) if (list_length == 1): county1 = county_list[0] query1 = 'SELECT * FROM jobs WHERE county_state IN (%(county1)s)' subsetJ = pd.read_sql(sql=query1, columns='county_state', params={'county1': county1}, con=engine, parse_dates=['created_at', 'updated_at']) elif (list_length == 2): county1 = county_list[0] county2 = county_list[1] query2 = 'SELECT * FROM jobs WHERE county_state IN (%(county1)s, %(county2)s)' subsetJ = pd.read_sql(sql=query2, columns='county_state', params={'county1': county1, 'county2': county2}, con=engine, parse_dates=['created_at', 'updated_at']) elif (list_length == 3): county1 = county_list[0] county2 = county_list[1] county3 = county_list[2] query3 = 'SELECT * FROM jobs WHERE "county_state" IN (%(county1)s, %(county2)s, %(county3)s)' subsetJ = pd.read_sql(sql=query3, columns='county_state', params={'county1': county1, 'county2': county2, 'county3': county3}, con=engine, parse_dates=['created_at', 'updated_at']) else: raise Exception('Please pass a list of 1-3 City-States') return subsetJ
1,893,549,275,785,273,600
Create a SQL query to grab only the user queried cities' data from the covid table in the DB. Output: subset grouped DF by month and city with only queried cities
project/app/api/wage_trade_transport_viz.py
sql_query
Lambda-School-Labs/Labs26-Citrics-DS-TeamC
python
def sql_query(county_list): "\n Create a SQL query to grab only the user queried cities' data from the covid table in the DB.\n Output: subset grouped DF by month and city with only queried cities\n " list_length = len(county_list) if (list_length == 1): county1 = county_list[0] query1 = 'SELECT * FROM jobs WHERE county_state IN (%(county1)s)' subsetJ = pd.read_sql(sql=query1, columns='county_state', params={'county1': county1}, con=engine, parse_dates=['created_at', 'updated_at']) elif (list_length == 2): county1 = county_list[0] county2 = county_list[1] query2 = 'SELECT * FROM jobs WHERE county_state IN (%(county1)s, %(county2)s)' subsetJ = pd.read_sql(sql=query2, columns='county_state', params={'county1': county1, 'county2': county2}, con=engine, parse_dates=['created_at', 'updated_at']) elif (list_length == 3): county1 = county_list[0] county2 = county_list[1] county3 = county_list[2] query3 = 'SELECT * FROM jobs WHERE "county_state" IN (%(county1)s, %(county2)s, %(county3)s)' subsetJ = pd.read_sql(sql=query3, columns='county_state', params={'county1': county1, 'county2': county2, 'county3': county3}, con=engine, parse_dates=['created_at', 'updated_at']) else: raise Exception('Please pass a list of 1-3 City-States') return subsetJ
def route(self, rule, **options): "A decorator that is used to register a view function for a\n given URL rule. This does the same thing as :meth:`add_url_rule`\n but is intended for decorator usage::\n @app.route('/')\n def index():\n return 'Hello World'\n For more information refer to :ref:`url-route-registrations`.\n :param rule: the URL rule as string\n :param endpoint: the endpoint for the registered URL rule. Flask\n itself assumes the name of the view function as\n endpoint\n :param options: the options to be forwarded to the underlying\n :class:`~werkzeug.routing.Rule` object. A change\n to Werkzeug is handling of method options. methods\n is a list of methods this rule should be limited\n to (``GET``, ``POST`` etc.). By default a rule\n just listens for ``GET`` (and implicitly ``HEAD``).\n Starting with Flask 0.6, ``OPTIONS`` is implicitly\n added and handled by the standard request handling.\n " def decorator(f): endpoint = options.pop('endpoint', None) self.add_update_rule(rule, endpoint, f, **options) return f return decorator
-9,106,071,299,253,898,000
A decorator that is used to register a view function for a given URL rule. This does the same thing as :meth:`add_url_rule` but is intended for decorator usage:: @app.route('/') def index(): return 'Hello World' For more information refer to :ref:`url-route-registrations`. :param rule: the URL rule as string :param endpoint: the endpoint for the registered URL rule. Flask itself assumes the name of the view function as endpoint :param options: the options to be forwarded to the underlying :class:`~werkzeug.routing.Rule` object. A change to Werkzeug is handling of method options. methods is a list of methods this rule should be limited to (``GET``, ``POST`` etc.). By default a rule just listens for ``GET`` (and implicitly ``HEAD``). Starting with Flask 0.6, ``OPTIONS`` is implicitly added and handled by the standard request handling.
telebot/__init__.py
route
KyleJamesWalker/telebot
python
def route(self, rule, **options): "A decorator that is used to register a view function for a\n given URL rule. This does the same thing as :meth:`add_url_rule`\n but is intended for decorator usage::\n @app.route('/')\n def index():\n return 'Hello World'\n For more information refer to :ref:`url-route-registrations`.\n :param rule: the URL rule as string\n :param endpoint: the endpoint for the registered URL rule. Flask\n itself assumes the name of the view function as\n endpoint\n :param options: the options to be forwarded to the underlying\n :class:`~werkzeug.routing.Rule` object. A change\n to Werkzeug is handling of method options. methods\n is a list of methods this rule should be limited\n to (``GET``, ``POST`` etc.). By default a rule\n just listens for ``GET`` (and implicitly ``HEAD``).\n Starting with Flask 0.6, ``OPTIONS`` is implicitly\n added and handled by the standard request handling.\n " def decorator(f): endpoint = options.pop('endpoint', None) self.add_update_rule(rule, endpoint, f, **options) return f return decorator
def _start(self): 'Requests bot information based on current api_key, and sets\n self.whoami to dictionary with username, first_name, and id of the\n configured bot.\n\n ' if (self.whoami is None): me = self.get_me() if me.get('ok', False): self.whoami = me['result'] else: raise ValueError('Bot Cannot request information, check api_key')
843,249,728,321,361,500
Requests bot information based on current api_key, and sets self.whoami to dictionary with username, first_name, and id of the configured bot.
telebot/__init__.py
_start
KyleJamesWalker/telebot
python
def _start(self): 'Requests bot information based on current api_key, and sets\n self.whoami to dictionary with username, first_name, and id of the\n configured bot.\n\n ' if (self.whoami is None): me = self.get_me() if me.get('ok', False): self.whoami = me['result'] else: raise ValueError('Bot Cannot request information, check api_key')
def poll(self, offset=None, poll_timeout=600, cooldown=60, debug=False): 'These should also be in the config section, but some here for\n overrides\n\n ' if (self.config['api_key'] is None): raise ValueError('config api_key is undefined') if (offset or self.config.get('offset', None)): self.offset = (offset or self.config.get('offset', None)) self._start() while True: try: response = self.get_updates(poll_timeout, self.offset) if (response.get('ok', False) is False): raise ValueError(response['error']) else: self.process_updates(response) except Exception as e: print('Error: Unknown Exception') print(e) if debug: raise e else: time.sleep(cooldown)
-5,501,767,654,285,255,000
These should also be in the config section, but some here for overrides
telebot/__init__.py
poll
KyleJamesWalker/telebot
python
def poll(self, offset=None, poll_timeout=600, cooldown=60, debug=False): 'These should also be in the config section, but some here for\n overrides\n\n ' if (self.config['api_key'] is None): raise ValueError('config api_key is undefined') if (offset or self.config.get('offset', None)): self.offset = (offset or self.config.get('offset', None)) self._start() while True: try: response = self.get_updates(poll_timeout, self.offset) if (response.get('ok', False) is False): raise ValueError(response['error']) else: self.process_updates(response) except Exception as e: print('Error: Unknown Exception') print(e) if debug: raise e else: time.sleep(cooldown)
def get_me(self): "A simple method for testing your bot's auth token. Requires no\n parameters. Returns basic information about the bot in form of a `User\n object.\n\n " return self._bot_cmd(requests.get, 'getMe')
6,949,463,330,401,890,000
A simple method for testing your bot's auth token. Requires no parameters. Returns basic information about the bot in form of a `User object.
telebot/__init__.py
get_me
KyleJamesWalker/telebot
python
def get_me(self): "A simple method for testing your bot's auth token. Requires no\n parameters. Returns basic information about the bot in form of a `User\n object.\n\n " return self._bot_cmd(requests.get, 'getMe')
def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions]=None, end_ip_address: Optional[pulumi.Input[str]]=None, firewall_rule_name: Optional[pulumi.Input[str]]=None, resource_group_name: Optional[pulumi.Input[str]]=None, server_name: Optional[pulumi.Input[str]]=None, start_ip_address: Optional[pulumi.Input[str]]=None, __props__=None, __name__=None, __opts__=None): "\n Represents a server firewall rule.\n API Version: 2014-04-01.\n\n :param str resource_name: The name of the resource.\n :param pulumi.ResourceOptions opts: Options for the resource.\n :param pulumi.Input[str] end_ip_address: The end IP address of the firewall rule. Must be IPv4 format. Must be greater than or equal to startIpAddress. Use value '0.0.0.0' to represent all Azure-internal IP addresses.\n :param pulumi.Input[str] firewall_rule_name: The name of the firewall rule.\n :param pulumi.Input[str] resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.\n :param pulumi.Input[str] server_name: The name of the server.\n :param pulumi.Input[str] start_ip_address: The start IP address of the firewall rule. Must be IPv4 format. Use value '0.0.0.0' to represent all Azure-internal IP addresses.\n " if (__name__ is not None): warnings.warn('explicit use of __name__ is deprecated', DeprecationWarning) resource_name = __name__ if (__opts__ is not None): warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning) opts = __opts__ if (opts is None): opts = pulumi.ResourceOptions() if (not isinstance(opts, pulumi.ResourceOptions)): raise TypeError('Expected resource options to be a ResourceOptions instance') if (opts.version is None): opts.version = _utilities.get_version() if (opts.id is None): if (__props__ is not None): raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = dict() if ((end_ip_address is None) and (not opts.urn)): raise TypeError("Missing required property 'end_ip_address'") __props__['end_ip_address'] = end_ip_address __props__['firewall_rule_name'] = firewall_rule_name if ((resource_group_name is None) and (not opts.urn)): raise TypeError("Missing required property 'resource_group_name'") __props__['resource_group_name'] = resource_group_name if ((server_name is None) and (not opts.urn)): raise TypeError("Missing required property 'server_name'") __props__['server_name'] = server_name if ((start_ip_address is None) and (not opts.urn)): raise TypeError("Missing required property 'start_ip_address'") __props__['start_ip_address'] = start_ip_address __props__['kind'] = None __props__['location'] = None __props__['name'] = None __props__['type'] = None alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_='azure-nextgen:sql/latest:FirewallRule'), pulumi.Alias(type_='azure-nextgen:sql/v20140401:FirewallRule'), pulumi.Alias(type_='azure-nextgen:sql/v20150501preview:FirewallRule'), pulumi.Alias(type_='azure-nextgen:sql/v20200202preview:FirewallRule'), pulumi.Alias(type_='azure-nextgen:sql/v20200801preview:FirewallRule')]) opts = pulumi.ResourceOptions.merge(opts, alias_opts) super(FirewallRule, __self__).__init__('azure-nextgen:sql:FirewallRule', resource_name, __props__, opts)
7,559,605,642,026,564,000
Represents a server firewall rule. API Version: 2014-04-01. :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] end_ip_address: The end IP address of the firewall rule. Must be IPv4 format. Must be greater than or equal to startIpAddress. Use value '0.0.0.0' to represent all Azure-internal IP addresses. :param pulumi.Input[str] firewall_rule_name: The name of the firewall rule. :param pulumi.Input[str] resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. :param pulumi.Input[str] server_name: The name of the server. :param pulumi.Input[str] start_ip_address: The start IP address of the firewall rule. Must be IPv4 format. Use value '0.0.0.0' to represent all Azure-internal IP addresses.
sdk/python/pulumi_azure_nextgen/sql/firewall_rule.py
__init__
pulumi/pulumi-azure-nextgen
python
def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions]=None, end_ip_address: Optional[pulumi.Input[str]]=None, firewall_rule_name: Optional[pulumi.Input[str]]=None, resource_group_name: Optional[pulumi.Input[str]]=None, server_name: Optional[pulumi.Input[str]]=None, start_ip_address: Optional[pulumi.Input[str]]=None, __props__=None, __name__=None, __opts__=None): "\n Represents a server firewall rule.\n API Version: 2014-04-01.\n\n :param str resource_name: The name of the resource.\n :param pulumi.ResourceOptions opts: Options for the resource.\n :param pulumi.Input[str] end_ip_address: The end IP address of the firewall rule. Must be IPv4 format. Must be greater than or equal to startIpAddress. Use value '0.0.0.0' to represent all Azure-internal IP addresses.\n :param pulumi.Input[str] firewall_rule_name: The name of the firewall rule.\n :param pulumi.Input[str] resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.\n :param pulumi.Input[str] server_name: The name of the server.\n :param pulumi.Input[str] start_ip_address: The start IP address of the firewall rule. Must be IPv4 format. Use value '0.0.0.0' to represent all Azure-internal IP addresses.\n " if (__name__ is not None): warnings.warn('explicit use of __name__ is deprecated', DeprecationWarning) resource_name = __name__ if (__opts__ is not None): warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning) opts = __opts__ if (opts is None): opts = pulumi.ResourceOptions() if (not isinstance(opts, pulumi.ResourceOptions)): raise TypeError('Expected resource options to be a ResourceOptions instance') if (opts.version is None): opts.version = _utilities.get_version() if (opts.id is None): if (__props__ is not None): raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = dict() if ((end_ip_address is None) and (not opts.urn)): raise TypeError("Missing required property 'end_ip_address'") __props__['end_ip_address'] = end_ip_address __props__['firewall_rule_name'] = firewall_rule_name if ((resource_group_name is None) and (not opts.urn)): raise TypeError("Missing required property 'resource_group_name'") __props__['resource_group_name'] = resource_group_name if ((server_name is None) and (not opts.urn)): raise TypeError("Missing required property 'server_name'") __props__['server_name'] = server_name if ((start_ip_address is None) and (not opts.urn)): raise TypeError("Missing required property 'start_ip_address'") __props__['start_ip_address'] = start_ip_address __props__['kind'] = None __props__['location'] = None __props__['name'] = None __props__['type'] = None alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_='azure-nextgen:sql/latest:FirewallRule'), pulumi.Alias(type_='azure-nextgen:sql/v20140401:FirewallRule'), pulumi.Alias(type_='azure-nextgen:sql/v20150501preview:FirewallRule'), pulumi.Alias(type_='azure-nextgen:sql/v20200202preview:FirewallRule'), pulumi.Alias(type_='azure-nextgen:sql/v20200801preview:FirewallRule')]) opts = pulumi.ResourceOptions.merge(opts, alias_opts) super(FirewallRule, __self__).__init__('azure-nextgen:sql:FirewallRule', resource_name, __props__, opts)
@staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions]=None) -> 'FirewallRule': "\n Get an existing FirewallRule resource's state with the given name, id, and optional extra\n properties used to qualify the lookup.\n\n :param str resource_name: The unique name of the resulting resource.\n :param pulumi.Input[str] id: The unique provider ID of the resource to lookup.\n :param pulumi.ResourceOptions opts: Options for the resource.\n " opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = dict() return FirewallRule(resource_name, opts=opts, __props__=__props__)
780,077,911,701,839,000
Get an existing FirewallRule resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource.
sdk/python/pulumi_azure_nextgen/sql/firewall_rule.py
get
pulumi/pulumi-azure-nextgen
python
@staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions]=None) -> 'FirewallRule': "\n Get an existing FirewallRule resource's state with the given name, id, and optional extra\n properties used to qualify the lookup.\n\n :param str resource_name: The unique name of the resulting resource.\n :param pulumi.Input[str] id: The unique provider ID of the resource to lookup.\n :param pulumi.ResourceOptions opts: Options for the resource.\n " opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = dict() return FirewallRule(resource_name, opts=opts, __props__=__props__)
@property @pulumi.getter(name='endIpAddress') def end_ip_address(self) -> pulumi.Output[str]: "\n The end IP address of the firewall rule. Must be IPv4 format. Must be greater than or equal to startIpAddress. Use value '0.0.0.0' to represent all Azure-internal IP addresses.\n " return pulumi.get(self, 'end_ip_address')
-6,011,145,057,549,834,000
The end IP address of the firewall rule. Must be IPv4 format. Must be greater than or equal to startIpAddress. Use value '0.0.0.0' to represent all Azure-internal IP addresses.
sdk/python/pulumi_azure_nextgen/sql/firewall_rule.py
end_ip_address
pulumi/pulumi-azure-nextgen
python
@property @pulumi.getter(name='endIpAddress') def end_ip_address(self) -> pulumi.Output[str]: "\n \n " return pulumi.get(self, 'end_ip_address')
@property @pulumi.getter def kind(self) -> pulumi.Output[str]: '\n Kind of server that contains this firewall rule.\n ' return pulumi.get(self, 'kind')
-2,483,314,717,807,780,400
Kind of server that contains this firewall rule.
sdk/python/pulumi_azure_nextgen/sql/firewall_rule.py
kind
pulumi/pulumi-azure-nextgen
python
@property @pulumi.getter def kind(self) -> pulumi.Output[str]: '\n \n ' return pulumi.get(self, 'kind')
@property @pulumi.getter def location(self) -> pulumi.Output[str]: '\n Location of the server that contains this firewall rule.\n ' return pulumi.get(self, 'location')
-5,386,521,884,093,667,000
Location of the server that contains this firewall rule.
sdk/python/pulumi_azure_nextgen/sql/firewall_rule.py
location
pulumi/pulumi-azure-nextgen
python
@property @pulumi.getter def location(self) -> pulumi.Output[str]: '\n \n ' return pulumi.get(self, 'location')
@property @pulumi.getter def name(self) -> pulumi.Output[str]: '\n Resource name.\n ' return pulumi.get(self, 'name')
4,695,236,134,441,039,000
Resource name.
sdk/python/pulumi_azure_nextgen/sql/firewall_rule.py
name
pulumi/pulumi-azure-nextgen
python
@property @pulumi.getter def name(self) -> pulumi.Output[str]: '\n \n ' return pulumi.get(self, 'name')
@property @pulumi.getter(name='startIpAddress') def start_ip_address(self) -> pulumi.Output[str]: "\n The start IP address of the firewall rule. Must be IPv4 format. Use value '0.0.0.0' to represent all Azure-internal IP addresses.\n " return pulumi.get(self, 'start_ip_address')
5,789,904,896,928,772,000
The start IP address of the firewall rule. Must be IPv4 format. Use value '0.0.0.0' to represent all Azure-internal IP addresses.
sdk/python/pulumi_azure_nextgen/sql/firewall_rule.py
start_ip_address
pulumi/pulumi-azure-nextgen
python
@property @pulumi.getter(name='startIpAddress') def start_ip_address(self) -> pulumi.Output[str]: "\n \n " return pulumi.get(self, 'start_ip_address')
@property @pulumi.getter def type(self) -> pulumi.Output[str]: '\n Resource type.\n ' return pulumi.get(self, 'type')
2,132,950,812,122,862,800
Resource type.
sdk/python/pulumi_azure_nextgen/sql/firewall_rule.py
type
pulumi/pulumi-azure-nextgen
python
@property @pulumi.getter def type(self) -> pulumi.Output[str]: '\n \n ' return pulumi.get(self, 'type')
def test_reader_macro_error(): 'Check if we get correct error with wrong disptach character' try: macroexpand(tokenize("(dispatch_reader_macro '- '())")[0], __name__) except HyTypeError as e: assert ('with the character `-`' in str(e))
2,269,334,320,623,377,400
Check if we get correct error with wrong disptach character
tests/macros/test_reader_macros.py
test_reader_macro_error
ALSchwalm/hy
python
def test_reader_macro_error(): try: macroexpand(tokenize("(dispatch_reader_macro '- '())")[0], __name__) except HyTypeError as e: assert ('with the character `-`' in str(e))
def testSize(self): 'StringStream.size()' stream = antlr3.StringStream('foo') self.failUnlessEqual(stream.size(), 3)
-322,479,714,288,370,940
StringStream.size()
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testSize
MichaelReiter/CSC435
python
def testSize(self): stream = antlr3.StringStream('foo') self.failUnlessEqual(stream.size(), 3)
def testIndex(self): 'StringStream.index()' stream = antlr3.StringStream('foo') self.failUnlessEqual(stream.index(), 0)
-5,824,447,261,540,887,000
StringStream.index()
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testIndex
MichaelReiter/CSC435
python
def testIndex(self): stream = antlr3.StringStream('foo') self.failUnlessEqual(stream.index(), 0)
def testConsume(self): 'StringStream.consume()' stream = antlr3.StringStream('foo\nbar') stream.consume() self.failUnlessEqual(stream.index(), 1) self.failUnlessEqual(stream.charPositionInLine, 1) self.failUnlessEqual(stream.line, 1) stream.consume() self.failUnlessEqual(stream.index(), 2) self.failUnlessEqual(stream.charPositionInLine, 2) self.failUnlessEqual(stream.line, 1) stream.consume() self.failUnlessEqual(stream.index(), 3) self.failUnlessEqual(stream.charPositionInLine, 3) self.failUnlessEqual(stream.line, 1) stream.consume() self.failUnlessEqual(stream.index(), 4) self.failUnlessEqual(stream.charPositionInLine, 0) self.failUnlessEqual(stream.line, 2) stream.consume() self.failUnlessEqual(stream.index(), 5) self.failUnlessEqual(stream.charPositionInLine, 1) self.failUnlessEqual(stream.line, 2) stream.consume() self.failUnlessEqual(stream.index(), 6) self.failUnlessEqual(stream.charPositionInLine, 2) self.failUnlessEqual(stream.line, 2) stream.consume() self.failUnlessEqual(stream.index(), 7) self.failUnlessEqual(stream.charPositionInLine, 3) self.failUnlessEqual(stream.line, 2) stream.consume() self.failUnlessEqual(stream.index(), 7) self.failUnlessEqual(stream.charPositionInLine, 3) self.failUnlessEqual(stream.line, 2) stream.consume() self.failUnlessEqual(stream.index(), 7) self.failUnlessEqual(stream.charPositionInLine, 3) self.failUnlessEqual(stream.line, 2)
5,337,832,459,246,330,000
StringStream.consume()
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testConsume
MichaelReiter/CSC435
python
def testConsume(self): stream = antlr3.StringStream('foo\nbar') stream.consume() self.failUnlessEqual(stream.index(), 1) self.failUnlessEqual(stream.charPositionInLine, 1) self.failUnlessEqual(stream.line, 1) stream.consume() self.failUnlessEqual(stream.index(), 2) self.failUnlessEqual(stream.charPositionInLine, 2) self.failUnlessEqual(stream.line, 1) stream.consume() self.failUnlessEqual(stream.index(), 3) self.failUnlessEqual(stream.charPositionInLine, 3) self.failUnlessEqual(stream.line, 1) stream.consume() self.failUnlessEqual(stream.index(), 4) self.failUnlessEqual(stream.charPositionInLine, 0) self.failUnlessEqual(stream.line, 2) stream.consume() self.failUnlessEqual(stream.index(), 5) self.failUnlessEqual(stream.charPositionInLine, 1) self.failUnlessEqual(stream.line, 2) stream.consume() self.failUnlessEqual(stream.index(), 6) self.failUnlessEqual(stream.charPositionInLine, 2) self.failUnlessEqual(stream.line, 2) stream.consume() self.failUnlessEqual(stream.index(), 7) self.failUnlessEqual(stream.charPositionInLine, 3) self.failUnlessEqual(stream.line, 2) stream.consume() self.failUnlessEqual(stream.index(), 7) self.failUnlessEqual(stream.charPositionInLine, 3) self.failUnlessEqual(stream.line, 2) stream.consume() self.failUnlessEqual(stream.index(), 7) self.failUnlessEqual(stream.charPositionInLine, 3) self.failUnlessEqual(stream.line, 2)
def testReset(self): 'StringStream.reset()' stream = antlr3.StringStream('foo') stream.consume() stream.consume() stream.reset() self.failUnlessEqual(stream.index(), 0) self.failUnlessEqual(stream.line, 1) self.failUnlessEqual(stream.charPositionInLine, 0) self.failUnlessEqual(stream.LA(1), 'f')
2,610,206,022,317,359,000
StringStream.reset()
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testReset
MichaelReiter/CSC435
python
def testReset(self): stream = antlr3.StringStream('foo') stream.consume() stream.consume() stream.reset() self.failUnlessEqual(stream.index(), 0) self.failUnlessEqual(stream.line, 1) self.failUnlessEqual(stream.charPositionInLine, 0) self.failUnlessEqual(stream.LA(1), 'f')
def testLA(self): 'StringStream.LA()' stream = antlr3.StringStream('foo') self.failUnlessEqual(stream.LA(1), 'f') self.failUnlessEqual(stream.LA(2), 'o') self.failUnlessEqual(stream.LA(3), 'o') stream.consume() stream.consume() self.failUnlessEqual(stream.LA(1), 'o') self.failUnlessEqual(stream.LA(2), antlr3.EOF) self.failUnlessEqual(stream.LA(3), antlr3.EOF)
-1,285,289,739,290,805,000
StringStream.LA()
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testLA
MichaelReiter/CSC435
python
def testLA(self): stream = antlr3.StringStream('foo') self.failUnlessEqual(stream.LA(1), 'f') self.failUnlessEqual(stream.LA(2), 'o') self.failUnlessEqual(stream.LA(3), 'o') stream.consume() stream.consume() self.failUnlessEqual(stream.LA(1), 'o') self.failUnlessEqual(stream.LA(2), antlr3.EOF) self.failUnlessEqual(stream.LA(3), antlr3.EOF)
def testSubstring(self): 'StringStream.substring()' stream = antlr3.StringStream('foobar') self.failUnlessEqual(stream.substring(0, 0), 'f') self.failUnlessEqual(stream.substring(0, 1), 'fo') self.failUnlessEqual(stream.substring(0, 5), 'foobar') self.failUnlessEqual(stream.substring(3, 5), 'bar')
1,467,413,615,876,884,200
StringStream.substring()
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testSubstring
MichaelReiter/CSC435
python
def testSubstring(self): stream = antlr3.StringStream('foobar') self.failUnlessEqual(stream.substring(0, 0), 'f') self.failUnlessEqual(stream.substring(0, 1), 'fo') self.failUnlessEqual(stream.substring(0, 5), 'foobar') self.failUnlessEqual(stream.substring(3, 5), 'bar')
def testSeekForward(self): 'StringStream.seek(): forward' stream = antlr3.StringStream('foo\nbar') stream.seek(4) self.failUnlessEqual(stream.index(), 4) self.failUnlessEqual(stream.line, 2) self.failUnlessEqual(stream.charPositionInLine, 0) self.failUnlessEqual(stream.LA(1), 'b')
-6,447,231,333,538,386,000
StringStream.seek(): forward
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testSeekForward
MichaelReiter/CSC435
python
def testSeekForward(self): stream = antlr3.StringStream('foo\nbar') stream.seek(4) self.failUnlessEqual(stream.index(), 4) self.failUnlessEqual(stream.line, 2) self.failUnlessEqual(stream.charPositionInLine, 0) self.failUnlessEqual(stream.LA(1), 'b')
def testMark(self): 'StringStream.mark()' stream = antlr3.StringStream('foo\nbar') stream.seek(4) marker = stream.mark() self.failUnlessEqual(marker, 1) self.failUnlessEqual(stream.markDepth, 1) stream.consume() marker = stream.mark() self.failUnlessEqual(marker, 2) self.failUnlessEqual(stream.markDepth, 2)
6,075,229,385,085,667,000
StringStream.mark()
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testMark
MichaelReiter/CSC435
python
def testMark(self): stream = antlr3.StringStream('foo\nbar') stream.seek(4) marker = stream.mark() self.failUnlessEqual(marker, 1) self.failUnlessEqual(stream.markDepth, 1) stream.consume() marker = stream.mark() self.failUnlessEqual(marker, 2) self.failUnlessEqual(stream.markDepth, 2)
def testReleaseLast(self): 'StringStream.release(): last marker' stream = antlr3.StringStream('foo\nbar') stream.seek(4) marker1 = stream.mark() stream.consume() marker2 = stream.mark() stream.release() self.failUnlessEqual(stream.markDepth, 1) stream.release() self.failUnlessEqual(stream.markDepth, 1)
7,445,008,543,421,783,000
StringStream.release(): last marker
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testReleaseLast
MichaelReiter/CSC435
python
def testReleaseLast(self): stream = antlr3.StringStream('foo\nbar') stream.seek(4) marker1 = stream.mark() stream.consume() marker2 = stream.mark() stream.release() self.failUnlessEqual(stream.markDepth, 1) stream.release() self.failUnlessEqual(stream.markDepth, 1)
def testReleaseNested(self): 'StringStream.release(): nested' stream = antlr3.StringStream('foo\nbar') stream.seek(4) marker1 = stream.mark() stream.consume() marker2 = stream.mark() stream.consume() marker3 = stream.mark() stream.release(marker2) self.failUnlessEqual(stream.markDepth, 1)
-5,503,462,820,693,028,000
StringStream.release(): nested
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testReleaseNested
MichaelReiter/CSC435
python
def testReleaseNested(self): stream = antlr3.StringStream('foo\nbar') stream.seek(4) marker1 = stream.mark() stream.consume() marker2 = stream.mark() stream.consume() marker3 = stream.mark() stream.release(marker2) self.failUnlessEqual(stream.markDepth, 1)
def testRewindLast(self): 'StringStream.rewind(): last marker' stream = antlr3.StringStream('foo\nbar') stream.seek(4) marker = stream.mark() stream.consume() stream.consume() stream.rewind() self.failUnlessEqual(stream.markDepth, 0) self.failUnlessEqual(stream.index(), 4) self.failUnlessEqual(stream.line, 2) self.failUnlessEqual(stream.charPositionInLine, 0) self.failUnlessEqual(stream.LA(1), 'b')
1,729,413,339,423,644,400
StringStream.rewind(): last marker
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testRewindLast
MichaelReiter/CSC435
python
def testRewindLast(self): stream = antlr3.StringStream('foo\nbar') stream.seek(4) marker = stream.mark() stream.consume() stream.consume() stream.rewind() self.failUnlessEqual(stream.markDepth, 0) self.failUnlessEqual(stream.index(), 4) self.failUnlessEqual(stream.line, 2) self.failUnlessEqual(stream.charPositionInLine, 0) self.failUnlessEqual(stream.LA(1), 'b')
def testRewindNested(self): 'StringStream.rewind(): nested' stream = antlr3.StringStream('foo\nbar') stream.seek(4) marker1 = stream.mark() stream.consume() marker2 = stream.mark() stream.consume() marker3 = stream.mark() stream.rewind(marker2) self.failUnlessEqual(stream.markDepth, 1) self.failUnlessEqual(stream.index(), 5) self.failUnlessEqual(stream.line, 2) self.failUnlessEqual(stream.charPositionInLine, 1) self.failUnlessEqual(stream.LA(1), 'a')
-7,967,589,814,892,422,000
StringStream.rewind(): nested
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testRewindNested
MichaelReiter/CSC435
python
def testRewindNested(self): stream = antlr3.StringStream('foo\nbar') stream.seek(4) marker1 = stream.mark() stream.consume() marker2 = stream.mark() stream.consume() marker3 = stream.mark() stream.rewind(marker2) self.failUnlessEqual(stream.markDepth, 1) self.failUnlessEqual(stream.index(), 5) self.failUnlessEqual(stream.line, 2) self.failUnlessEqual(stream.charPositionInLine, 1) self.failUnlessEqual(stream.LA(1), 'a')
def setUp(self): 'Setup test fixure\n\n The constructor of CommonTokenStream needs a token source. This\n is a simple mock class providing just the nextToken() method.\n\n ' class MockSource(object): def __init__(self): self.tokens = [] def nextToken(self): try: return self.tokens.pop(0) except IndexError: return None self.source = MockSource()
-2,530,157,666,490,650,600
Setup test fixure The constructor of CommonTokenStream needs a token source. This is a simple mock class providing just the nextToken() method.
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
setUp
MichaelReiter/CSC435
python
def setUp(self): 'Setup test fixure\n\n The constructor of CommonTokenStream needs a token source. This\n is a simple mock class providing just the nextToken() method.\n\n ' class MockSource(object): def __init__(self): self.tokens = [] def nextToken(self): try: return self.tokens.pop(0) except IndexError: return None self.source = MockSource()
def testInit(self): 'CommonTokenStream.__init__()' stream = antlr3.CommonTokenStream(self.source) self.failUnlessEqual(stream.index(), (- 1))
-4,199,622,006,587,378,000
CommonTokenStream.__init__()
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testInit
MichaelReiter/CSC435
python
def testInit(self): stream = antlr3.CommonTokenStream(self.source) self.failUnlessEqual(stream.index(), (- 1))
def testSetTokenSource(self): 'CommonTokenStream.setTokenSource()' stream = antlr3.CommonTokenStream(None) stream.setTokenSource(self.source) self.failUnlessEqual(stream.index(), (- 1)) self.failUnlessEqual(stream.channel, antlr3.DEFAULT_CHANNEL)
8,845,780,895,028,450,000
CommonTokenStream.setTokenSource()
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testSetTokenSource
MichaelReiter/CSC435
python
def testSetTokenSource(self): stream = antlr3.CommonTokenStream(None) stream.setTokenSource(self.source) self.failUnlessEqual(stream.index(), (- 1)) self.failUnlessEqual(stream.channel, antlr3.DEFAULT_CHANNEL)
def testLTEmptySource(self): 'CommonTokenStream.LT(): EOF (empty source)' stream = antlr3.CommonTokenStream(self.source) lt1 = stream.LT(1) self.failUnlessEqual(lt1.type, antlr3.EOF)
-1,308,497,428,755,781,000
CommonTokenStream.LT(): EOF (empty source)
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testLTEmptySource
MichaelReiter/CSC435
python
def testLTEmptySource(self): stream = antlr3.CommonTokenStream(self.source) lt1 = stream.LT(1) self.failUnlessEqual(lt1.type, antlr3.EOF)
def testLT1(self): 'CommonTokenStream.LT(1)' self.source.tokens.append(antlr3.CommonToken(type=12)) stream = antlr3.CommonTokenStream(self.source) lt1 = stream.LT(1) self.failUnlessEqual(lt1.type, 12)
4,769,722,576,293,040,000
CommonTokenStream.LT(1)
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testLT1
MichaelReiter/CSC435
python
def testLT1(self): self.source.tokens.append(antlr3.CommonToken(type=12)) stream = antlr3.CommonTokenStream(self.source) lt1 = stream.LT(1) self.failUnlessEqual(lt1.type, 12)
def testLT1WithHidden(self): 'CommonTokenStream.LT(1): with hidden tokens' self.source.tokens.append(antlr3.CommonToken(type=12, channel=antlr3.HIDDEN_CHANNEL)) self.source.tokens.append(antlr3.CommonToken(type=13)) stream = antlr3.CommonTokenStream(self.source) lt1 = stream.LT(1) self.failUnlessEqual(lt1.type, 13)
-1,167,620,246,653,067,800
CommonTokenStream.LT(1): with hidden tokens
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testLT1WithHidden
MichaelReiter/CSC435
python
def testLT1WithHidden(self): self.source.tokens.append(antlr3.CommonToken(type=12, channel=antlr3.HIDDEN_CHANNEL)) self.source.tokens.append(antlr3.CommonToken(type=13)) stream = antlr3.CommonTokenStream(self.source) lt1 = stream.LT(1) self.failUnlessEqual(lt1.type, 13)
def testLT2BeyondEnd(self): 'CommonTokenStream.LT(2): beyond end' self.source.tokens.append(antlr3.CommonToken(type=12)) self.source.tokens.append(antlr3.CommonToken(type=13, channel=antlr3.HIDDEN_CHANNEL)) stream = antlr3.CommonTokenStream(self.source) lt1 = stream.LT(2) self.failUnlessEqual(lt1.type, antlr3.EOF)
5,829,866,680,724,612,000
CommonTokenStream.LT(2): beyond end
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testLT2BeyondEnd
MichaelReiter/CSC435
python
def testLT2BeyondEnd(self): self.source.tokens.append(antlr3.CommonToken(type=12)) self.source.tokens.append(antlr3.CommonToken(type=13, channel=antlr3.HIDDEN_CHANNEL)) stream = antlr3.CommonTokenStream(self.source) lt1 = stream.LT(2) self.failUnlessEqual(lt1.type, antlr3.EOF)
def testLTNegative(self): 'CommonTokenStream.LT(-1): look back' self.source.tokens.append(antlr3.CommonToken(type=12)) self.source.tokens.append(antlr3.CommonToken(type=13)) stream = antlr3.CommonTokenStream(self.source) stream.fillBuffer() stream.consume() lt1 = stream.LT((- 1)) self.failUnlessEqual(lt1.type, 12)
8,833,826,822,500,224,000
CommonTokenStream.LT(-1): look back
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testLTNegative
MichaelReiter/CSC435
python
def testLTNegative(self): self.source.tokens.append(antlr3.CommonToken(type=12)) self.source.tokens.append(antlr3.CommonToken(type=13)) stream = antlr3.CommonTokenStream(self.source) stream.fillBuffer() stream.consume() lt1 = stream.LT((- 1)) self.failUnlessEqual(lt1.type, 12)
def testLB1(self): 'CommonTokenStream.LB(1)' self.source.tokens.append(antlr3.CommonToken(type=12)) self.source.tokens.append(antlr3.CommonToken(type=13)) stream = antlr3.CommonTokenStream(self.source) stream.fillBuffer() stream.consume() self.failUnlessEqual(stream.LB(1).type, 12)
-3,394,678,491,536,926,700
CommonTokenStream.LB(1)
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testLB1
MichaelReiter/CSC435
python
def testLB1(self): self.source.tokens.append(antlr3.CommonToken(type=12)) self.source.tokens.append(antlr3.CommonToken(type=13)) stream = antlr3.CommonTokenStream(self.source) stream.fillBuffer() stream.consume() self.failUnlessEqual(stream.LB(1).type, 12)
def testLTZero(self): 'CommonTokenStream.LT(0)' self.source.tokens.append(antlr3.CommonToken(type=12)) self.source.tokens.append(antlr3.CommonToken(type=13)) stream = antlr3.CommonTokenStream(self.source) lt1 = stream.LT(0) self.failUnless((lt1 is None))
-3,495,325,699,316,676,000
CommonTokenStream.LT(0)
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testLTZero
MichaelReiter/CSC435
python
def testLTZero(self): self.source.tokens.append(antlr3.CommonToken(type=12)) self.source.tokens.append(antlr3.CommonToken(type=13)) stream = antlr3.CommonTokenStream(self.source) lt1 = stream.LT(0) self.failUnless((lt1 is None))
def testLBBeyondBegin(self): 'CommonTokenStream.LB(-1): beyond begin' self.source.tokens.append(antlr3.CommonToken(type=12)) self.source.tokens.append(antlr3.CommonToken(type=12, channel=antlr3.HIDDEN_CHANNEL)) self.source.tokens.append(antlr3.CommonToken(type=12, channel=antlr3.HIDDEN_CHANNEL)) self.source.tokens.append(antlr3.CommonToken(type=13)) stream = antlr3.CommonTokenStream(self.source) self.failUnless((stream.LB(1) is None)) stream.consume() stream.consume() self.failUnless((stream.LB(3) is None))
8,140,744,220,851,901,000
CommonTokenStream.LB(-1): beyond begin
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testLBBeyondBegin
MichaelReiter/CSC435
python
def testLBBeyondBegin(self): self.source.tokens.append(antlr3.CommonToken(type=12)) self.source.tokens.append(antlr3.CommonToken(type=12, channel=antlr3.HIDDEN_CHANNEL)) self.source.tokens.append(antlr3.CommonToken(type=12, channel=antlr3.HIDDEN_CHANNEL)) self.source.tokens.append(antlr3.CommonToken(type=13)) stream = antlr3.CommonTokenStream(self.source) self.failUnless((stream.LB(1) is None)) stream.consume() stream.consume() self.failUnless((stream.LB(3) is None))
def testFillBuffer(self): 'CommonTokenStream.fillBuffer()' self.source.tokens.append(antlr3.CommonToken(type=12)) self.source.tokens.append(antlr3.CommonToken(type=13)) self.source.tokens.append(antlr3.CommonToken(type=14)) self.source.tokens.append(antlr3.CommonToken(type=antlr3.EOF)) stream = antlr3.CommonTokenStream(self.source) stream.fillBuffer() self.failUnlessEqual(len(stream.tokens), 3) self.failUnlessEqual(stream.tokens[0].type, 12) self.failUnlessEqual(stream.tokens[1].type, 13) self.failUnlessEqual(stream.tokens[2].type, 14)
-3,472,367,152,560,268,000
CommonTokenStream.fillBuffer()
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testFillBuffer
MichaelReiter/CSC435
python
def testFillBuffer(self): self.source.tokens.append(antlr3.CommonToken(type=12)) self.source.tokens.append(antlr3.CommonToken(type=13)) self.source.tokens.append(antlr3.CommonToken(type=14)) self.source.tokens.append(antlr3.CommonToken(type=antlr3.EOF)) stream = antlr3.CommonTokenStream(self.source) stream.fillBuffer() self.failUnlessEqual(len(stream.tokens), 3) self.failUnlessEqual(stream.tokens[0].type, 12) self.failUnlessEqual(stream.tokens[1].type, 13) self.failUnlessEqual(stream.tokens[2].type, 14)
def testConsume(self): 'CommonTokenStream.consume()' self.source.tokens.append(antlr3.CommonToken(type=12)) self.source.tokens.append(antlr3.CommonToken(type=13)) self.source.tokens.append(antlr3.CommonToken(type=antlr3.EOF)) stream = antlr3.CommonTokenStream(self.source) self.failUnlessEqual(stream.LA(1), 12) stream.consume() self.failUnlessEqual(stream.LA(1), 13) stream.consume() self.failUnlessEqual(stream.LA(1), antlr3.EOF) stream.consume() self.failUnlessEqual(stream.LA(1), antlr3.EOF)
6,766,146,186,820,755,000
CommonTokenStream.consume()
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testConsume
MichaelReiter/CSC435
python
def testConsume(self): self.source.tokens.append(antlr3.CommonToken(type=12)) self.source.tokens.append(antlr3.CommonToken(type=13)) self.source.tokens.append(antlr3.CommonToken(type=antlr3.EOF)) stream = antlr3.CommonTokenStream(self.source) self.failUnlessEqual(stream.LA(1), 12) stream.consume() self.failUnlessEqual(stream.LA(1), 13) stream.consume() self.failUnlessEqual(stream.LA(1), antlr3.EOF) stream.consume() self.failUnlessEqual(stream.LA(1), antlr3.EOF)
def testSeek(self): 'CommonTokenStream.seek()' self.source.tokens.append(antlr3.CommonToken(type=12)) self.source.tokens.append(antlr3.CommonToken(type=13)) self.source.tokens.append(antlr3.CommonToken(type=antlr3.EOF)) stream = antlr3.CommonTokenStream(self.source) self.failUnlessEqual(stream.LA(1), 12) stream.seek(2) self.failUnlessEqual(stream.LA(1), antlr3.EOF) stream.seek(0) self.failUnlessEqual(stream.LA(1), 12)
-4,544,802,292,615,620,000
CommonTokenStream.seek()
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testSeek
MichaelReiter/CSC435
python
def testSeek(self): self.source.tokens.append(antlr3.CommonToken(type=12)) self.source.tokens.append(antlr3.CommonToken(type=13)) self.source.tokens.append(antlr3.CommonToken(type=antlr3.EOF)) stream = antlr3.CommonTokenStream(self.source) self.failUnlessEqual(stream.LA(1), 12) stream.seek(2) self.failUnlessEqual(stream.LA(1), antlr3.EOF) stream.seek(0) self.failUnlessEqual(stream.LA(1), 12)
def testMarkRewind(self): 'CommonTokenStream.mark()/rewind()' self.source.tokens.append(antlr3.CommonToken(type=12)) self.source.tokens.append(antlr3.CommonToken(type=13)) self.source.tokens.append(antlr3.CommonToken(type=antlr3.EOF)) stream = antlr3.CommonTokenStream(self.source) stream.fillBuffer() stream.consume() marker = stream.mark() stream.consume() stream.rewind(marker) self.failUnlessEqual(stream.LA(1), 13)
-8,019,132,124,511,811,000
CommonTokenStream.mark()/rewind()
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testMarkRewind
MichaelReiter/CSC435
python
def testMarkRewind(self): self.source.tokens.append(antlr3.CommonToken(type=12)) self.source.tokens.append(antlr3.CommonToken(type=13)) self.source.tokens.append(antlr3.CommonToken(type=antlr3.EOF)) stream = antlr3.CommonTokenStream(self.source) stream.fillBuffer() stream.consume() marker = stream.mark() stream.consume() stream.rewind(marker) self.failUnlessEqual(stream.LA(1), 13)
def testToString(self): 'CommonTokenStream.toString()' self.source.tokens.append(antlr3.CommonToken(type=12, text='foo')) self.source.tokens.append(antlr3.CommonToken(type=13, text='bar')) self.source.tokens.append(antlr3.CommonToken(type=14, text='gnurz')) self.source.tokens.append(antlr3.CommonToken(type=15, text='blarz')) stream = antlr3.CommonTokenStream(self.source) assert (stream.toString() == 'foobargnurzblarz') assert (stream.toString(1, 2) == 'bargnurz') assert (stream.toString(stream.tokens[1], stream.tokens[(- 2)]) == 'bargnurz')
-7,940,443,880,603,264,000
CommonTokenStream.toString()
libs/antlr-3.0.1/runtime/Python/unittests/teststreams.py
testToString
MichaelReiter/CSC435
python
def testToString(self): self.source.tokens.append(antlr3.CommonToken(type=12, text='foo')) self.source.tokens.append(antlr3.CommonToken(type=13, text='bar')) self.source.tokens.append(antlr3.CommonToken(type=14, text='gnurz')) self.source.tokens.append(antlr3.CommonToken(type=15, text='blarz')) stream = antlr3.CommonTokenStream(self.source) assert (stream.toString() == 'foobargnurzblarz') assert (stream.toString(1, 2) == 'bargnurz') assert (stream.toString(stream.tokens[1], stream.tokens[(- 2)]) == 'bargnurz')
def __init__(self, version, account_sid, call_sid): '\n Initialize the FeedbackList\n\n :param Version version: Version that contains the resource\n :param account_sid: The account_sid\n :param call_sid: A 34 character string that uniquely identifies this resource.\n\n :returns: twilio.rest.api.v2010.account.call.feedback.FeedbackList\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackList\n ' super(FeedbackList, self).__init__(version) self._solution = {'account_sid': account_sid, 'call_sid': call_sid}
-6,445,122,197,132,084,000
Initialize the FeedbackList :param Version version: Version that contains the resource :param account_sid: The account_sid :param call_sid: A 34 character string that uniquely identifies this resource. :returns: twilio.rest.api.v2010.account.call.feedback.FeedbackList :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackList
lib/python2.7/site-packages/twilio/rest/api/v2010/account/call/feedback.py
__init__
Jason-Paprocki/hacknjit
python
def __init__(self, version, account_sid, call_sid): '\n Initialize the FeedbackList\n\n :param Version version: Version that contains the resource\n :param account_sid: The account_sid\n :param call_sid: A 34 character string that uniquely identifies this resource.\n\n :returns: twilio.rest.api.v2010.account.call.feedback.FeedbackList\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackList\n ' super(FeedbackList, self).__init__(version) self._solution = {'account_sid': account_sid, 'call_sid': call_sid}
def get(self): '\n Constructs a FeedbackContext\n\n :returns: twilio.rest.api.v2010.account.call.feedback.FeedbackContext\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackContext\n ' return FeedbackContext(self._version, account_sid=self._solution['account_sid'], call_sid=self._solution['call_sid'])
-5,516,110,987,726,009,000
Constructs a FeedbackContext :returns: twilio.rest.api.v2010.account.call.feedback.FeedbackContext :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackContext
lib/python2.7/site-packages/twilio/rest/api/v2010/account/call/feedback.py
get
Jason-Paprocki/hacknjit
python
def get(self): '\n Constructs a FeedbackContext\n\n :returns: twilio.rest.api.v2010.account.call.feedback.FeedbackContext\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackContext\n ' return FeedbackContext(self._version, account_sid=self._solution['account_sid'], call_sid=self._solution['call_sid'])
def __call__(self): '\n Constructs a FeedbackContext\n\n :returns: twilio.rest.api.v2010.account.call.feedback.FeedbackContext\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackContext\n ' return FeedbackContext(self._version, account_sid=self._solution['account_sid'], call_sid=self._solution['call_sid'])
8,107,067,688,638,152,000
Constructs a FeedbackContext :returns: twilio.rest.api.v2010.account.call.feedback.FeedbackContext :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackContext
lib/python2.7/site-packages/twilio/rest/api/v2010/account/call/feedback.py
__call__
Jason-Paprocki/hacknjit
python
def __call__(self): '\n Constructs a FeedbackContext\n\n :returns: twilio.rest.api.v2010.account.call.feedback.FeedbackContext\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackContext\n ' return FeedbackContext(self._version, account_sid=self._solution['account_sid'], call_sid=self._solution['call_sid'])
def __repr__(self): '\n Provide a friendly representation\n\n :returns: Machine friendly representation\n :rtype: str\n ' return '<Twilio.Api.V2010.FeedbackList>'
8,539,110,798,930,971,000
Provide a friendly representation :returns: Machine friendly representation :rtype: str
lib/python2.7/site-packages/twilio/rest/api/v2010/account/call/feedback.py
__repr__
Jason-Paprocki/hacknjit
python
def __repr__(self): '\n Provide a friendly representation\n\n :returns: Machine friendly representation\n :rtype: str\n ' return '<Twilio.Api.V2010.FeedbackList>'
def __init__(self, version, response, solution): '\n Initialize the FeedbackPage\n\n :param Version version: Version that contains the resource\n :param Response response: Response from the API\n :param account_sid: The account_sid\n :param call_sid: A 34 character string that uniquely identifies this resource.\n\n :returns: twilio.rest.api.v2010.account.call.feedback.FeedbackPage\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackPage\n ' super(FeedbackPage, self).__init__(version, response) self._solution = solution
-2,614,892,443,884,998,000
Initialize the FeedbackPage :param Version version: Version that contains the resource :param Response response: Response from the API :param account_sid: The account_sid :param call_sid: A 34 character string that uniquely identifies this resource. :returns: twilio.rest.api.v2010.account.call.feedback.FeedbackPage :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackPage
lib/python2.7/site-packages/twilio/rest/api/v2010/account/call/feedback.py
__init__
Jason-Paprocki/hacknjit
python
def __init__(self, version, response, solution): '\n Initialize the FeedbackPage\n\n :param Version version: Version that contains the resource\n :param Response response: Response from the API\n :param account_sid: The account_sid\n :param call_sid: A 34 character string that uniquely identifies this resource.\n\n :returns: twilio.rest.api.v2010.account.call.feedback.FeedbackPage\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackPage\n ' super(FeedbackPage, self).__init__(version, response) self._solution = solution
def get_instance(self, payload): '\n Build an instance of FeedbackInstance\n\n :param dict payload: Payload response from the API\n\n :returns: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance\n ' return FeedbackInstance(self._version, payload, account_sid=self._solution['account_sid'], call_sid=self._solution['call_sid'])
4,555,122,512,957,802,500
Build an instance of FeedbackInstance :param dict payload: Payload response from the API :returns: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance
lib/python2.7/site-packages/twilio/rest/api/v2010/account/call/feedback.py
get_instance
Jason-Paprocki/hacknjit
python
def get_instance(self, payload): '\n Build an instance of FeedbackInstance\n\n :param dict payload: Payload response from the API\n\n :returns: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance\n ' return FeedbackInstance(self._version, payload, account_sid=self._solution['account_sid'], call_sid=self._solution['call_sid'])
def __repr__(self): '\n Provide a friendly representation\n\n :returns: Machine friendly representation\n :rtype: str\n ' return '<Twilio.Api.V2010.FeedbackPage>'
2,946,648,061,124,807,000
Provide a friendly representation :returns: Machine friendly representation :rtype: str
lib/python2.7/site-packages/twilio/rest/api/v2010/account/call/feedback.py
__repr__
Jason-Paprocki/hacknjit
python
def __repr__(self): '\n Provide a friendly representation\n\n :returns: Machine friendly representation\n :rtype: str\n ' return '<Twilio.Api.V2010.FeedbackPage>'
def __init__(self, version, account_sid, call_sid): '\n Initialize the FeedbackContext\n\n :param Version version: Version that contains the resource\n :param account_sid: The account_sid\n :param call_sid: The call sid that uniquely identifies the call\n\n :returns: twilio.rest.api.v2010.account.call.feedback.FeedbackContext\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackContext\n ' super(FeedbackContext, self).__init__(version) self._solution = {'account_sid': account_sid, 'call_sid': call_sid} self._uri = '/Accounts/{account_sid}/Calls/{call_sid}/Feedback.json'.format(**self._solution)
-7,519,640,124,503,953,000
Initialize the FeedbackContext :param Version version: Version that contains the resource :param account_sid: The account_sid :param call_sid: The call sid that uniquely identifies the call :returns: twilio.rest.api.v2010.account.call.feedback.FeedbackContext :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackContext
lib/python2.7/site-packages/twilio/rest/api/v2010/account/call/feedback.py
__init__
Jason-Paprocki/hacknjit
python
def __init__(self, version, account_sid, call_sid): '\n Initialize the FeedbackContext\n\n :param Version version: Version that contains the resource\n :param account_sid: The account_sid\n :param call_sid: The call sid that uniquely identifies the call\n\n :returns: twilio.rest.api.v2010.account.call.feedback.FeedbackContext\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackContext\n ' super(FeedbackContext, self).__init__(version) self._solution = {'account_sid': account_sid, 'call_sid': call_sid} self._uri = '/Accounts/{account_sid}/Calls/{call_sid}/Feedback.json'.format(**self._solution)
def create(self, quality_score, issue=values.unset): '\n Create a new FeedbackInstance\n\n :param unicode quality_score: The quality_score\n :param FeedbackInstance.Issues issue: The issue\n\n :returns: Newly created FeedbackInstance\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance\n ' data = values.of({'QualityScore': quality_score, 'Issue': issue}) payload = self._version.create('POST', self._uri, data=data) return FeedbackInstance(self._version, payload, account_sid=self._solution['account_sid'], call_sid=self._solution['call_sid'])
6,177,282,813,554,628,000
Create a new FeedbackInstance :param unicode quality_score: The quality_score :param FeedbackInstance.Issues issue: The issue :returns: Newly created FeedbackInstance :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance
lib/python2.7/site-packages/twilio/rest/api/v2010/account/call/feedback.py
create
Jason-Paprocki/hacknjit
python
def create(self, quality_score, issue=values.unset): '\n Create a new FeedbackInstance\n\n :param unicode quality_score: The quality_score\n :param FeedbackInstance.Issues issue: The issue\n\n :returns: Newly created FeedbackInstance\n :rtype: twilio.rest.api.v2010.account.call.feedback.FeedbackInstance\n ' data = values.of({'QualityScore': quality_score, 'Issue': issue}) payload = self._version.create('POST', self._uri, data=data) return FeedbackInstance(self._version, payload, account_sid=self._solution['account_sid'], call_sid=self._solution['call_sid'])