text
stringlengths 4
1.02M
| meta
dict |
---|---|
from electrum_cesc.i18n import _
fullname = _('Email')
description = _("Send and receive payment request with an email account")
available_for = ['qt']
| {
"content_hash": "15848de736948398eecfeadcf19da415",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 73,
"avg_line_length": 30.6,
"alnum_prop": 0.7189542483660131,
"repo_name": "Marcdnd/electrum-cesc",
"id": "7db681d09ef8c71abf8bf4cdc5db22ee03f73a24",
"size": "153",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plugins/email_requests/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3536"
},
{
"name": "GLSL",
"bytes": "289"
},
{
"name": "HTML",
"bytes": "3354"
},
{
"name": "Makefile",
"bytes": "849"
},
{
"name": "NSIS",
"bytes": "6970"
},
{
"name": "PHP",
"bytes": "404"
},
{
"name": "Protocol Buffer",
"bytes": "2354"
},
{
"name": "Python",
"bytes": "2163404"
},
{
"name": "Shell",
"bytes": "7908"
}
],
"symlink_target": ""
} |
import unittest
from mxl.note import Note
from mxl import util
class TestNote(unittest.TestCase):
def test_subtraction_same(self):
a = Note(rhythm_type='half', step='C', octave=1, staff=1)
b = Note(rhythm_type='half', step='C', octave=1, staff=1)
self.assertEqual(a - b, 0)
def test_subtraction_same_octave(self):
a = Note(rhythm_type='half', step='C', octave=1, staff=1)
b = Note(rhythm_type='half', step='A', octave=1, staff=1)
self.assertEqual(a - b, 2)
def test_subtraction_same_octave_neg(self):
a = Note(rhythm_type='half', step='C', octave=1, staff=1)
b = Note(rhythm_type='half', step='A', octave=1, staff=1)
self.assertEqual(b - a, -2)
def test_subtraction_diff_octave(self):
a = Note(rhythm_type='half', step='A', octave=2, staff=1)
b = Note(rhythm_type='half', step='A', octave=1, staff=1)
self.assertEqual(a - b, 7)
def test_subtraction_diff_octave_diff_pitch(self):
a = Note(rhythm_type='half', step='C', octave=2, staff=1)
b = Note(rhythm_type='half', step='A', octave=1, staff=1)
self.assertEqual(a - b, 9)
def test_subtraction_diff_octave_neg(self):
a = Note(rhythm_type='half', step='A', octave=2, staff=1)
b = Note(rhythm_type='half', step='A', octave=1, staff=1)
self.assertEqual(b - a, -7)
def test_subtraction_diff_octave_diff_pitch_neg(self):
a = Note(rhythm_type='half', step='C', octave=2, staff=1)
b = Note(rhythm_type='half', step='A', octave=1, staff=1)
self.assertEqual(b - a, -9)
def test_rhythm_subtraction_pos(self):
a = Note(rhythm_type='half')
b = Note(rhythm_type='quarter')
self.assertEqual(a.sub_rhythm(b), -2)
def test_rhythm_subtraction_neg(self):
a = Note(rhythm_type='half')
b = Note(rhythm_type='quarter')
self.assertEqual(b.sub_rhythm(a), 2)
def test_time_sig_normalize(self):
self.assertEqual(util.time_signature_normalizer(3, 8), 1.5)
self.assertEqual(util.time_signature_normalizer(4, 4), 4)
self.assertEqual(util.time_signature_normalizer(3, 4), 3)
def test_time_sig_normalize_unicode_input(self):
self.assertEqual(util.time_signature_normalizer(u'3', u'8'), 1.5)
self.assertEqual(util.time_signature_normalizer(u'4', u'4'), 4)
self.assertEqual(util.time_signature_normalizer(u'3', u'4'), 3)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "32049e3497f9f82aff3c1fb4b05eb791",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 73,
"avg_line_length": 39.171875,
"alnum_prop": 0.6118867171918628,
"repo_name": "themichaellai/musicxml-stats",
"id": "c8d261770150ddcf5f18ebaef6385e5d0c1fddef",
"size": "2507",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "24174"
}
],
"symlink_target": ""
} |
import os
import robot.utils
from robot.errors import DataError
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.firefox.firefox_binary import FirefoxBinary
from browsercache import BrowserCache
from keywordgroup import KeywordGroup
from BJRobot.utilities import System
BROWSER_NAMES = {'ff': "_make_ff",
'firefox': "_make_ff",
'ie': "_make_ie",
'internetexplorer': "_make_ie",
'googlechrome': "_make_chrome",
'gc': "_make_chrome",
'chrome': "_make_chrome",
'opera': "_make_opera",
'phantomjs': "_make_phantomjs",
'htmlunit': "_make_htmlunit",
'htmlunitwithjs': "_make_htmlunitwithjs",
'android': "_make_android",
'iphone': "_make_iphone",
'safari': "_make_safari",
'edge': "_make_edge"
}
class BrowserManager(KeywordGroup):
chrome_driver_version = "2.27"
edge_driver_version = "14393"
firefox_driver_version = "0.12"
ie_driver_version = "2.53"
def __init__(self):
self._cache = BrowserCache()
self._default_script_timeout_in_secs = 10
self._default_implicit_wait_in_secs = 20
def open_browser(self, url, browser_name="chrome", proxy=None, alias=None):
"""Open a browser and go to expected url address, set the browser and the proxy, as well as the alias,
1. The url is mandatory, otherwise the keyword will fail to execute.
2. Browser name is by default set to chrome, the available browser name are
ff, firefox
ie, internetexplorer
googlechrome, gc, chrome,
edge
3. Proxy support manual and direct
If manual, please give a proxy url, if direct, just leave empty
4. Set browser alias for further browser switching purpose.
Example:
| open browser | chrome | http://proxy:8083/ | browserA |
| open browser | ie | | browserB |
| switch browser | browserA |
| set value by id | kw | test |
| click element by id | su |
| open browser | chrome | http://proxy:8083/ | browserA |
| switch browser | browserB |
| do something... | |
"""
try:
driver_instance = self._make_browser(browser_name.strip(), proxy)
driver_instance.get(url)
except:
raise
self._cache.register(driver_instance, alias)
def execute_script(self, js, *args):
"""
Let current browser window execute the specified script
Example:
| ${window_title}= | Execute Script | return document.title |
"""
return self._current_browser().execute_script(js, args)
def execute_async_script(self, javascript, *args):
"""
Let current browser window execute the specified script asynchronously
"""
self._current_browser().execute_async_script(javascript, args)
def open_new_window(self, url):
"""
Open a new window in current browser, must in an existing browser
:param url: the target url to be open and navigate to.
:return: no return
Example:
| open new window | http://www.baidu.com |
"""
js = "window.open('%s')" % url
self._current_browser().execute_script(js)
def close_window(self):
"""
Close current window
:return: none
"""
js = "window.close()"
self._current_browser().execute_script(js)
def close_all_browsers(self):
"""Closes the all open browsers in current session.
Example:
| close all browsers |
"""
self._cache.close_all()
def close_browser(self):
"""Closes the current browser.
Example:
| close browser |
"""
if self._cache.current:
self._cache.close()
def switch_window(self, title_or_url):
"""When click on a link opening a new window such as www.baidu.com
we need to switch to the new window and continue the operation
Example:
| switch window | 百度一下, 你就知道 |
| set value by id | kw | test |
| click element by id | su |
"""
all_windows = self._current_browser().window_handles
for window in all_windows:
self._current_browser().switch_to.window(window)
url = self.get_url()
title = self.get_title()
if title.lower() == title_or_url.lower() or url.lower() == title_or_url.lower():
break
def switch_window_contains(self, title_or_url):
"""When click on a link opening a new window such as www.baidu.com
we need to switch to the new window and continue the operation
Example:
| switch window contains | 百度一下, 你就 |
| set value by id | kw | test |
| click element by id | su |
"""
all_windows = self._current_browser().window_handles
for window in all_windows:
self._current_browser().switch_to.window(window)
url = self.get_url()
title = self.get_title()
if title_or_url.lower() in url.lower() or title_or_url in title.lower():
break
def switch_browser(self, index_or_alias):
"""Switches between active browsers using index or alias.
Index is returned from `Open Browser` and alias can be given to it.
Example:
| Open Browser | http://google.com | ff |
| URL Should Be | http://google.com | |
| Open Browser | http://yahoo.com | ie | 2nd conn |
| URL Should Be | http://yahoo.com | |
| Switch Browser | 1 | # index |
| Switch Browser | 2nd conn | # alias |
| Close All Browsers | | |
Above example expects that there was no other open browsers when
opening the first one because it used index '1' when switching to it
later. If you aren't sure about that you can store the index into
a variable as below.
| ${id} = | Open Browser | http://google.com | *firefox |
| # Do something ... |
| Switch Browser | ${id} | | |
"""
try:
self._cache.switch(index_or_alias)
except (RuntimeError, DataError): # RF 2.6 uses RE, earlier DE
raise RuntimeError("No browser with index or alias '%s' found." % index_or_alias)
def maximize_browser_window(self):
"""Maximizes current browser window.
Example:
| maximize browser window |
"""
self._current_browser().maximize_window()
def set_window_size(self, width, height):
"""Sets the `width` and `height` of the current window to the specified values.
Example:
| Set Window Size | ${800} | ${600} |
| ${width} | ${height}= | Get Window Size |
| Should Be Equal | ${width} | ${800} |
| Should Be Equal | ${height} | ${600} |
"""
return self._current_browser().set_window_size(width, height)
def get_window_size(self):
"""Returns current window size as `width` then `height`.
Example:
| ${width} | ${height}= | Get Window Size |
"""
size = self._current_browser().get_window_size()
return size['width'], size['height']
def get_url(self):
"""Returns the current location.
Example:
|${url}=| get url |
"""
return self._current_browser().current_url
def url_should_be(self, url):
"""Verifies that current URL is exactly `url`.
Example:
| url should be | ${url} |
"""
actual = self.get_url()
if actual != url:
raise AssertionError("URL should have been '%s' but was '%s'"
% (url, actual))
def url_should_contain(self, expected):
"""Verifies that current URL contains `expected`.
Example:
| url should contain | ${expected} |
"""
actual = self.get_url()
if expected not in actual:
raise AssertionError("URL should have contained '%s' "
"but it was '%s'." % (expected, actual))
def get_title(self):
"""Returns title of current page.
Example:
|${title}=| get title |
"""
return self._current_browser().title
def title_should_be(self, title):
"""Verifies that current page title equals `title`.
Example:
| title should contain | 百度一下, 你就知道 |
"""
actual = self.get_title().strip()
if actual != title.strip():
raise AssertionError("Title should have been '%s' but was '%s'"
% (title, actual))
def title_should_contain(self, expected):
"""Verifies that current page title equals `title`.
Example:
| title should contain | 百度一下 |
"""
actual = self.get_title()
if expected not in actual:
raise AssertionError("Title should have contained '%s' but was '%s'"
% (expected, actual))
def go_back(self):
"""Simulates the user clicking the "back" button on their browser.
Example:
| go back |
"""
self._current_browser().back()
def go_to_url(self, url):
"""Navigates the active browser instance to the provided URL.
| Open Application | http://localhost:4755/wd/hub | platformName=iOS | platformVersion=7.0 |
deviceName='iPhone Simulator' | browserName=Safari |
| Go To URL | http://m.webapp.com |
"""
self._current_browser().get(url)
def reload_page(self):
"""Simulates user reloading page.
Example:
| accept alert |
"""
self._current_browser().refresh()
def accept_alert(self):
"""
Accept the alert available.
Example:
| accept alert |
"""
self._current_browser().switch_to().alert().accept()
def dismiss_alert(self):
"""
Dismisses the alert available.
Example:
| dismiss alert |
"""
self._current_browser().switch_to().alert().dismiss()
def authenticate_alert(self, username, password):
"""
Send the username / password to an Authenticated dialog (like with Basic HTTP Auth).
Implicitly 'clicks ok'
:Args:
-username: string to be set in the username section of the dialog
-password: string to be set in the password section of the dialog
Example:
| authenticate alert | username | password |
"""
self._current_browser().switch_to.alert.authenticate(username, password)
def set_browser_implicit_wait(self, seconds):
"""All the window running selenium will by impacted by this setting.
Example:
| Set Browser Implicit Wait | 15 seconds |
| Open page that loads slowly |
| Set Browser Implicit Wait | 20s |
"""
implicit_wait_in_secs = robot.utils.timestr_to_secs(seconds)
self._current_browser().implicitly_wait(implicit_wait_in_secs)
def set_global_implicit_wait(self, seconds):
"""This function will set global selenium implicit wait time out,
it will impact all the running test. So consider your purpose before you use it.
Example:
| ${orig timeout} = | Set global Implicit Wait | 10 seconds |
| Perform AJAX call that is slow |
| Set Global Implicit Wait | ${orig timeout} |
"""
old_wait = self._default_implicit_wait_in_secs
self._default_implicit_wait_in_secs = robot.utils.timestr_to_secs(seconds)
for driver_instance in self._cache.get_open_browsers():
driver_instance.implicitly_wait(self._default_implicit_wait_in_secs)
return old_wait
def set_browser_script_timeout(self, seconds):
"""All the window running selenium will by impacted by this setting.
Example:
| Set Browser Script Timeout | 15 seconds |
| Open page that loads slowly |
| Set Browser Script Timeout | 20s |
"""
_timeout_in_secs = robot.utils.timestr_to_secs(seconds)
self._current_browser().set_script_timeout(_timeout_in_secs)
def set_global_script_timeout(self, seconds):
"""This function will set global javascript wait time out,
it will impact all the running test. So consider your purpose before you use it.
Example:
| ${orig timeout} = | Set Global Script Timeout | 15 seconds |
| Open page that loads slowly |
| Set Selenium Timeout | ${orig timeout} |
"""
old_timeout = self._default_script_timeout_in_secs
self._default_script_timeout_in_secs = robot.utils.timestr_to_secs(seconds)
for driver_instance in self._cache.get_open_browsers():
driver_instance.set_script_timeout(self._default_script_timeout_in_secs)
return old_timeout
def _current_browser(self):
if not self._cache.current:
raise RuntimeError('No browser is open')
return self._cache.current
def _get_browser_creation_function(self, browser_name):
func_name = BROWSER_NAMES.get(browser_name.lower().replace(' ', ''))
return getattr(self, func_name) if func_name else None
def _make_browser(self, browser_name, proxy=None):
creation_func = self._get_browser_creation_function(browser_name)
if not creation_func:
raise ValueError(browser_name + " is not a supported browser.")
driver_instance = creation_func(proxy)
driver_instance.set_script_timeout(self._default_script_timeout_in_secs)
driver_instance.implicitly_wait(self._default_implicit_wait_in_secs)
return driver_instance
def _make_ff(self, proxy=None):
cur_path = os.path.dirname(os.path.realpath(__file__))
cur_path = cur_path + os.sep + ".." + os.sep + 'log' + os.sep + 'geckodriver.log'
fp = None
if System.get_os_name().lower() == 'windows':
target = os.path.expanduser('~') \
+ os.sep + 'AppData' + os.sep + 'Roaming' + os.sep + 'Mozilla' + os.sep + 'Firefox' \
+ os.sep + 'Profiles'
fp = System.search_file_contains(target, '.default')
firefox_profile = webdriver.FirefoxProfile(fp)
if proxy is not None:
host, port = proxy.split(r'//')[1].split(':')[0], proxy.split(r'//')[1].split(':')[1]
firefox_profile.set_preference("network.proxy.http", host)
firefox_profile.set_preference("network.proxy.http_port", int(port))
firefox_profile.set_preference("network.proxy.ssl", host)
firefox_profile.set_preference("network.proxy.ssl_port", int(port))
firefox_profile.set_preference("network.proxy.socks", host)
firefox_profile.set_preference("network.proxy.socks_port", int(port))
firefox_profile.set_preference("network.proxy.ftp", host)
firefox_profile.set_preference("network.proxy.ftp_port", int(port))
firefox_profile.set_preference("network.proxy.no_proxies_on", 'localhost')
firefox_profile.set_preference("network.proxy.type", 1)
else:
firefox_profile.set_preference("network.proxy.type", 0)
firefox_capabilities = DesiredCapabilities.FIREFOX
firefox_capabilities['acceptInsecureCerts'] = True
firefox_capabilities['marionette'] = True
binary = FirefoxBinary()
return webdriver.Firefox(executable_path=self.__get_driver_path("firefox"), capabilities=firefox_capabilities,
firefox_binary=binary, firefox_profile=firefox_profile, log_path=cur_path)
def _make_ie(self, proxy=None):
cur_path = os.path.dirname(os.path.realpath(__file__))
cur_path = cur_path + os.sep + ".." + os.sep + 'log' + os.sep + 'ie.log'
ie_capabilities = DesiredCapabilities.INTERNETEXPLORER
ie_capabilities['ignoreProtectedModeSettings'] = True
ie_capabilities['INTRODUCE_FLAKINESS_BY_IGNORING_SECURITY_DOMAINS'] = True
ie_capabilities['requireWindowFocus'] = False
ie_capabilities['enableElementCacheCleanup'] = True
ie_capabilities['ie.usePerProcessProxy'] = True
ie_capabilities['proxy'] = System.set_proxy(proxy)
return webdriver.Ie(executable_path=self.__get_driver_path("ie"),
capabilities=ie_capabilities, log_file=cur_path, log_level='INFO')
def _make_chrome(self, proxy=None):
cur_path = os.path.dirname(os.path.realpath(__file__))
cur_path = cur_path + os.sep + ".." + os.sep + 'log' + os.sep + 'chrome.log'
chrome_capabilities = webdriver.DesiredCapabilities.CHROME
chrome_capabilities['chromeOptions'] = {"args": ["--disable-extensions"], "extensions": []}
chrome_capabilities['proxy'] = System.set_proxy(proxy)
return webdriver.Chrome(executable_path=self.__get_driver_path("chrome"),
desired_capabilities=chrome_capabilities, service_log_path=cur_path)
def _make_edge(self, proxy=None):
if hasattr(webdriver, 'Edge'):
cur_path = os.path.dirname(os.path.realpath(__file__))
cur_path = cur_path + os.sep + ".." + os.sep + 'log' + os.sep + 'edge.log'
edge_capabilities = DesiredCapabilities.EDGE
edge_capabilities['edge.usePerProcessProxy'] = True
edge_capabilities['proxy'] = System.set_proxy(proxy)
# edge_options = Options()
return webdriver.Edge(executable_path=self.__get_driver_path("edge"),
capabilities=edge_capabilities, log_path=cur_path, verbose=True)
else:
raise ValueError("Edge is not a supported browser with your version of Selenium python library."
" Please, upgrade to minimum required version 2.47.0.")
def __get_driver_path(self, browser):
default = os.path.split(os.path.realpath(__file__))[0]
default = default + os.sep + '..' + os.sep + "resource" + os.sep + "driver"
_browser = browser.lower()
if _browser == "chrome":
default = default + os.sep + _browser + os.sep + self.chrome_driver_version
if System.get_os_name() == "linux":
if System.is64bit():
default = default + os.sep + "linux64"
else:
default = default + os.sep + "linux32"
default = default + os.sep + "chromedriver"
elif System.get_os_name() == "windows":
default = default + os.sep + "win32" + os.sep + "chromedriver.exe"
elif System.get_os_name() == "macos":
default = default + os.sep + "mac64" + os.sep + "chromedriver"
elif _browser == "edge":
default = default + os.sep + _browser + os.sep + self.edge_driver_version \
+ os.sep + "MicrosoftWebDriver.exe"
elif _browser == "firefox":
default = default + os.sep + _browser + os.sep + self.firefox_driver_version
if System.get_os_name() == "linux":
if System.is64bit():
default = default + os.sep + "linux64"
else:
default = default + os.sep + "linux32"
default = default + os.sep + "geckodriver"
elif System.get_os_name() == "windows":
if System.is64bit():
default = default + os.sep + "win64"
else:
default = default + os.sep + "win32"
default = default + os.sep + "geckodriver.exe"
elif _browser == "ie":
default = default + os.sep + _browser + os.sep + self.ie_driver_version
# Use win32 for IE driver only because of performance issue.
# if (self.__is64bit()):
# default = default + os.path.sep + "win64"
# else:
# default = default + os.path.sep + "win32"
# default = default + os.path.sep + "IEDriverServer.exe"
default = default + os.sep + "win32" + os.sep + "IEDriverServer.exe"
return default
def _test(self, by, value):
driver = webdriver.Chrome()
try:
driver.implicitly_wait(1)
element_list = driver.find_elements(by, value)
for element in element_list:
if element.is_displayed():
driver.implicitly_wait(self._default_implicit_wait_in_secs)
return element
except:
raise
| {
"content_hash": "3d07df7b61539d222b55cb6ce062b99b",
"timestamp": "",
"source": "github",
"line_count": 504,
"max_line_length": 118,
"avg_line_length": 42.88690476190476,
"alnum_prop": 0.561647004395096,
"repo_name": "overfly83/bjrobot",
"id": "795c8c9a829fad3405d3b55b42a474a09f7f42c5",
"size": "21715",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/BJRobot/keywords/browsermanager.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "92610"
}
],
"symlink_target": ""
} |
import requests
import time
import random
CLIENT_ID = 'XXX' # Create an Instagram app to get this
ACCESS_TOKEN = "XXX" # ...and this
MY_USER_NAME = 'XXX' # Your name on instagram
USER_NAME_TO_GIVE_LOTS_OF_LIKES = '<3<3' # The name of the person you want to
# like
MAX_PHOTOS_TO_LIKE_PER_EXECUTION = 3
def wait(min_time, max_time):
wait_time = random.randint(min_time, max_time)
print 'Waiting for %d seconds..' % wait_time
time.sleep(wait_time)
print 'Back to work'
def iphone_web(url, action='GET', params={}):
default_params = {'access_token': ACCESS_TOKEN, 'client_id': CLIENT_ID}
call = {'GET': requests.get, 'POST': requests.post}
# Rate limit the requests
wait(1, 5)
return call[action](
url,
params=dict(default_params.items() + params.items()),
headers={
'User-Agent': 'Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_0 like Mac OS X; en-us) AppleWebKit/532.9 (KHTML, like Gecko) Version/4.0.5 Mobile/8A293 Safari/6531.22.7',
"Content-type": "application/x-www-form-urlencoded"
}
).json()
def like_photo(pictureId):
like_url = "https://api.instagram.com/v1/media/%s/likes" % pictureId
if [d for d in iphone_web(like_url)['data'] if d['username'] == MY_USER_NAME]:
# We already liked this photo, skip
return False
else:
# Like the photo
iphone_web(like_url, action='POST')
wait(10, 90)
return True
def like_user_photos(userId):
user_media = iphone_web(
"https://api.instagram.com/v1/users/%s/media/recent/" % user_id)
liked_photos = 0
for picture in user_media['data']:
liked_photos += int(like_photo(picture['id']))
if liked_photos >= MAX_PHOTOS_TO_LIKE_PER_EXECUTION:
break
def get_user_id_from_name(name):
return [
d['id']
for d in iphone_web(
'https://api.instagram.com/v1/users/search',
params={'q': name}
)['data']
if d['username'] == USER_NAME_TO_GIVE_LOTS_OF_LIKES][0]
def enable_requests_logging():
# This is just to see what's going on on the wire.
import logging
import httplib
httplib.HTTPConnection.debuglevel = 1
# you need to initialize logging, otherwise you will not see anything from
# requests
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
requests_log = logging.getLogger("requests.packages.urllib3")
requests_log.setLevel(logging.DEBUG)
requests_log.propagate = True
enable_requests_logging()
user_id = get_user_id_from_name(USER_NAME_TO_GIVE_LOTS_OF_LIKES)
like_user_photos(user_id)
| {
"content_hash": "30396d81ad02a6f79c89a376b093a7f1",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 174,
"avg_line_length": 32.42168674698795,
"alnum_prop": 0.6257896692679301,
"repo_name": "invernizzi/instagram-autolike",
"id": "934c1b21e469ffb7d104d10582b3f2edf5b39e69",
"size": "2691",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "autolike.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2603"
}
],
"symlink_target": ""
} |
import urllib, urllib2
from HTMLParser import HTMLParser
csrf = ""
class Parser(HTMLParser):
def handle_startendtag(self, tag, attrs):
if tag == 'input':
attr_dict = dict(attrs)
if attr_dict['name'] == 'csrf_token':
global csrf
csrf = attr_dict['value']
# Get a copy of the original page
url = 'http://www.wsb.com/Assignment2/case09.php'
req = urllib2.Request(url)
res = urllib2.urlopen(req)
content = res.read()
# Feed the content to the parser
parser = Parser()
parser.feed(content)
# Construct the new request with the required info
post_values = dict(data='Test Data', csrf_token=csrf)
data = urllib.urlencode(post_values)
post_req = urllib2.Request(url, data)
post_res = urllib2.urlopen(post_req)
with open('results.html', 'w') as f:
f.write(post_res.read())
import webbrowser
new = 2
webbrowser.open('results.html', new=new)
| {
"content_hash": "29b50206369beaf4ad980e8d0700360b",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 53,
"avg_line_length": 23.805555555555557,
"alnum_prop": 0.7094515752625438,
"repo_name": "vhazali/cs5331",
"id": "41d33ee46aea6601b433cc64da9799933a1feaae",
"size": "857",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "assignment2/scripts/exploit09/exploit09.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "490"
},
{
"name": "CSS",
"bytes": "136542"
},
{
"name": "HTML",
"bytes": "373214"
},
{
"name": "JavaScript",
"bytes": "723067"
},
{
"name": "Makefile",
"bytes": "2506"
},
{
"name": "PHP",
"bytes": "4883616"
},
{
"name": "Python",
"bytes": "3495970"
},
{
"name": "Shell",
"bytes": "6243"
},
{
"name": "Smarty",
"bytes": "2452936"
}
],
"symlink_target": ""
} |
"""
Routines for fitting source profiles (currently just 2d-Gaussian)
We borrow from the Astropy modeling routines, since they provide a tested
implementation of both the 2d Gaussian and its Jacobian
(first partial derivatives).
We add routines for estimation of initial fitting parameters (method of moments,
using the TraP (https://github.com/transientskp/tkp routines for reference),
and for processing the fits to determine which is the semimajor/minor
axis, and constrain the rotation angle to within 180 degrees.
To do:
If time allows, understand, document and re-implement the error-estimation
routines currently implemented in the TraP (Condon 1995, Spreeuw's Thesis).
Initial implementation will have to make do with naive errors.
"""
import math
import astropy.units as u
import numpy as np
import pytest
from attr import attrib, attrs
from fastimgproto.coords import rotate_basis
def _valid_semimajor(instance, attribute, value):
if not value > 0.:
raise ValueError("Semimajor axis value must be positive.")
def _valid_semiminor(instance, attribute, value):
"""
Check if the semiminor axis is smaller than semimajor.
We leave a little bit of wiggle room (`rel_tol`) to ignore values
that are almost within numerical precision limits
"""
if not value > 0.:
raise ValueError("Semiminor axis value must be positive.")
rel_tol = 1e-12
tol_factor = 1. + rel_tol
if value > instance.semimajor * tol_factor:
raise ValueError("Semiminor axis should be smaller than semimajor.")
def _valid_theta(instance, attribute, value):
"""
Check if theta lies in the range (-pi/2,pi/2].
"""
half_pi = np.pi / 2.
if (value <= -half_pi) or (value > half_pi):
raise ValueError("Theta should lie in the range (-pi/2,pi/2].")
@attrs(frozen=True)
class Gaussian2dParams(object):
"""
Data structure for representing a 2d Gaussian profile.
Similar to an astropy Gaussian2d parameter set, but we refer to
semimajor/semiminor axis length rather than x_std_dev / y_std_dev.
Otherwise all values have the same meaning - we just always assume
that `x_std_dev > y_std_dev`, or equivalently, that theta describes the
rotation in the counterclockwise sense of the semimajor axis from
the positive x-direction. (For fits returned where this does not happen
to be true, we swap semi-major/minor and adjust theta accordingly.)
NB - we don't use astropy.units / astropy.units.Quantity parameters here,
to ease interfacing with the scipy fitting routines. Most of the parameters
are obvious anyway - pixels - but take care that theta is in radians.
All values are in units of pixels, except for theta which has units of
radians.
The data-structure is 'frozen' to avoid inadvertent modification of values,
we don't expect to need to modify a returned fit often.
"""
x_centre = attrib(convert=float)
y_centre = attrib(convert=float)
amplitude = attrib(convert=float)
semimajor = attrib(convert=float, validator=_valid_semimajor)
semiminor = attrib(convert=float, validator=_valid_semiminor)
theta = attrib(convert=float, validator=_valid_theta)
@property
def covariance(self):
"""
Reference covariance matrix
Returns:
numpy.ndarray: 2x2 matrix representing covariance matrix in the
reference x-y frame.
"""
rotated_cov = np.array([[self.semimajor ** 2, 0],
[0, self.semiminor ** 2]],
dtype=np.float_)
ref_cov = rotate_basis(rotated_cov, -self.theta * u.rad)
return ref_cov
@property
def correlation(self):
"""
Correlation co-efficient between x and y
(This is effectively a proxy for rotation angle - much easier to
compare fits with since it does not suffer from degeneracies that
are inherent to rotation angle.)
Returns:
float: Correlation coefficient in the range (-1,1).
"""
cov_matrix = self.covariance
rho = cov_matrix[0, 1] / np.sqrt(cov_matrix[0, 0] * cov_matrix[1, 1])
return rho
@staticmethod
def from_unconstrained_parameters(x_centre, y_centre, amplitude, semimajor,
semiminor, theta):
"""
Construct from unconstrained parameters, e.g. from a fitting routine.
If necessary this will swap semimajor / semiminor so that
semimajor is always the larger of the two, and shift the rotation
angle appropriately. Also shifts theta to lie within
(-pi/2,pi/2].
Args:
x_centre:
y_centre:
amplitude:
semimajor:
semiminor:
theta:
Returns:
Gaussian2dParams
"""
# Semimajor / minor are only evaluated as squares, so unconstrained
# fits can easily stray into negative values:
semimajor = np.fabs(semimajor)
semiminor = np.fabs(semiminor)
half_pi = np.pi / 2.
if semimajor < semiminor:
semimajor, semiminor = semiminor, semimajor
theta = theta + half_pi
mod_theta = math.fmod(theta, np.pi) # Rotations by pi are degeneracies
# This gets us to the range (-pi,pi). Now we add/subtract an additional
# pi as required to get down to (-pi/2, pi/2).
if mod_theta <= -half_pi:
mod_theta += np.pi
elif mod_theta > half_pi:
mod_theta -= np.pi
return Gaussian2dParams(x_centre, y_centre, amplitude, semimajor,
semiminor, mod_theta)
@property
def comparable_params(self):
"""
A tuple of values for easy comparison - replace theta with correlation.
"""
return (self.x_centre,
self.y_centre,
self.amplitude,
self.semimajor,
self.semiminor,
self.correlation,
)
def approx_equal_to(self, other, rel_tol=1e-8, abs_tol=1e-12):
"""
Determine if two Gaussian fits are approximately equivalent.
"""
return self.comparable_params == pytest.approx(other.comparable_params,
rel=rel_tol, abs=abs_tol)
def gaussian2d(x, y, x_centre, y_centre, amplitude, x_stddev, y_stddev, theta):
"""
Two dimensional Gaussian function for use in source-fitting
A tested implementation of a 2d Gaussian function, with rotation of
axes. Original Source code:
https://github.com/astropy/astropy/blob/3b1de6ee3165d176c3e2901028f86be60b4b0f4d/astropy/modeling/functional_models.py#L446
Wikipedia article on the formula:
https://en.wikipedia.org/wiki/Gaussian_function#Two-dimensional_Gaussian_function
Args:
x (numpy.ndarray): Datatype int. X-Pixel indices to calculate
Gaussian values for.
y (numpy.ndarray): Datatype int. Y-Pixel indices to calculate
Gaussian values for.
x_centre (float): Mean of the Gaussian in x.
y_centre (float): Mean of the Gaussian in y.
amplitude(float): Amplitude of the Gaussian.
x_stddev(float): Standard deviation of the Gaussian in x before rotating
by theta.
y_stddev(float): Standard deviation of the Gaussian in y before rotating
by theta.
theta(float): Rotation angle in radians. The rotation angle increases
counterclockwise.
Returns:
numpy.ndarray: Datatype np.float_. m values, one for each pixel fitted.
"""
cost2 = np.cos(theta) ** 2
sint2 = np.sin(theta) ** 2
sin2t = np.sin(2. * theta)
xstd2 = x_stddev ** 2
ystd2 = y_stddev ** 2
xdiff = x - x_centre
ydiff = y - y_centre
a = 0.5 * ((cost2 / xstd2) + (sint2 / ystd2))
b = 0.5 * ((sin2t / xstd2) - (sin2t / ystd2))
c = 0.5 * ((sint2 / xstd2) + (cost2 / ystd2))
return amplitude * np.exp(-((a * xdiff ** 2) + (b * xdiff * ydiff) +
(c * ydiff ** 2)))
def gaussian2d_jac(x, y, x_centre, y_centre, amplitude, x_stddev, y_stddev,
theta):
"""
Jacobian of Gaussian2d.
(Two dimensional Gaussian function derivative with respect to parameters)
See :ref:`.gaussian2d` for arg details
"""
cost = np.cos(theta)
sint = np.sin(theta)
cost2 = np.cos(theta) ** 2
sint2 = np.sin(theta) ** 2
cos2t = np.cos(2. * theta)
sin2t = np.sin(2. * theta)
xstd2 = x_stddev ** 2
ystd2 = y_stddev ** 2
xstd3 = x_stddev ** 3
ystd3 = y_stddev ** 3
xdiff = x - x_centre
ydiff = y - y_centre
xdiff2 = xdiff ** 2
ydiff2 = ydiff ** 2
a = 0.5 * ((cost2 / xstd2) + (sint2 / ystd2))
b = 0.5 * ((sin2t / xstd2) - (sin2t / ystd2))
c = 0.5 * ((sint2 / xstd2) + (cost2 / ystd2))
g = amplitude * np.exp(-((a * xdiff2) + (b * xdiff * ydiff) +
(c * ydiff2)))
da_dtheta = (sint * cost * ((1. / ystd2) - (1. / xstd2)))
da_dx_stddev = -cost2 / xstd3
da_dy_stddev = -sint2 / ystd3
db_dtheta = (cos2t / xstd2) - (cos2t / ystd2)
db_dx_stddev = -sin2t / xstd3
db_dy_stddev = sin2t / ystd3
dc_dtheta = -da_dtheta
dc_dx_stddev = -sint2 / xstd3
dc_dy_stddev = -cost2 / ystd3
dg_dA = g / amplitude
dg_dx_mean = g * ((2. * a * xdiff) + (b * ydiff))
dg_dy_mean = g * ((b * xdiff) + (2. * c * ydiff))
dg_dx_stddev = g * (-(da_dx_stddev * xdiff2 +
db_dx_stddev * xdiff * ydiff +
dc_dx_stddev * ydiff2))
dg_dy_stddev = g * (-(da_dy_stddev * xdiff2 +
db_dy_stddev * xdiff * ydiff +
dc_dy_stddev * ydiff2))
dg_dtheta = g * (-(da_dtheta * xdiff2 +
db_dtheta * xdiff * ydiff +
dc_dtheta * ydiff2))
return np.array([dg_dx_mean,
dg_dy_mean,
dg_dA,
dg_dx_stddev,
dg_dy_stddev,
dg_dtheta]).T
| {
"content_hash": "0b18ea7b32fd6bd45aae8bad5a08d644",
"timestamp": "",
"source": "github",
"line_count": 284,
"max_line_length": 127,
"avg_line_length": 36.017605633802816,
"alnum_prop": 0.6026982109688142,
"repo_name": "SKA-ScienceDataProcessor/FastImaging-Python",
"id": "d7ff0b44650574c89aa4b7255039cc257e037674",
"size": "10229",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/fastimgproto/sourcefind/fit.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "303819"
},
{
"name": "Shell",
"bytes": "470"
}
],
"symlink_target": ""
} |
import mimetypes
import os
import random
import time
from email import Charset, Encoders
from email.MIMEText import MIMEText
from email.MIMEMultipart import MIMEMultipart
from email.MIMEBase import MIMEBase
from email.Header import Header
from email.Utils import formatdate, getaddresses, formataddr
from django.conf import settings
from django.core.mail.utils import DNS_NAME
from django.utils.encoding import smart_str, force_unicode
# Don't BASE64-encode UTF-8 messages so that we avoid unwanted attention from
# some spam filters.
Charset.add_charset('utf-8', Charset.SHORTEST, Charset.QP, 'utf-8')
# Default MIME type to use on attachments (if it is not explicitly given
# and cannot be guessed).
DEFAULT_ATTACHMENT_MIME_TYPE = 'application/octet-stream'
class BadHeaderError(ValueError):
pass
# Copied from Python standard library, with the following modifications:
# * Used cached hostname for performance.
# * Added try/except to support lack of getpid() in Jython (#5496).
def make_msgid(idstring=None):
"""Returns a string suitable for RFC 2822 compliant Message-ID, e.g:
<[email protected]>
Optional idstring if given is a string used to strengthen the
uniqueness of the message id.
"""
timeval = time.time()
utcdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(timeval))
try:
pid = os.getpid()
except AttributeError:
# No getpid() in Jython, for example.
pid = 1
randint = random.randrange(100000)
if idstring is None:
idstring = ''
else:
idstring = '.' + idstring
idhost = DNS_NAME
msgid = '<%s.%s.%s%s@%s>' % (utcdate, pid, randint, idstring, idhost)
return msgid
def forbid_multi_line_headers(name, val, encoding):
"""Forbids multi-line headers, to prevent header injection."""
encoding = encoding or settings.DEFAULT_CHARSET
val = force_unicode(val)
if '\n' in val or '\r' in val:
raise BadHeaderError("Header values can't contain newlines (got %r for header %r)" % (val, name))
try:
val = val.encode('ascii')
except UnicodeEncodeError:
if name.lower() in ('to', 'from', 'cc'):
result = []
for nm, addr in getaddresses((val,)):
nm = str(Header(nm.encode(encoding), encoding))
try:
addr = addr.encode('ascii')
except UnicodeEncodeError: # IDN
addr = str(Header(addr.encode(encoding), encoding))
result.append(formataddr((nm, addr)))
val = ', '.join(result)
else:
val = Header(val.encode(encoding), encoding)
else:
if name.lower() == 'subject':
val = Header(val)
return name, val
class SafeMIMEText(MIMEText):
def __init__(self, text, subtype, charset):
self.encoding = charset
MIMEText.__init__(self, text, subtype, charset)
def __setitem__(self, name, val):
name, val = forbid_multi_line_headers(name, val, self.encoding)
MIMEText.__setitem__(self, name, val)
class SafeMIMEMultipart(MIMEMultipart):
def __init__(self, _subtype='mixed', boundary=None, _subparts=None, encoding=None, **_params):
self.encoding = encoding
MIMEMultipart.__init__(self, _subtype, boundary, _subparts, **_params)
def __setitem__(self, name, val):
name, val = forbid_multi_line_headers(name, val, self.encoding)
MIMEMultipart.__setitem__(self, name, val)
class EmailMessage(object):
"""
A container for email information.
"""
content_subtype = 'plain'
mixed_subtype = 'mixed'
encoding = None # None => use settings default
def __init__(self, subject='', body='', from_email=None, to=None, bcc=None,
connection=None, attachments=None, headers=None, cc=None):
"""
Initialize a single email message (which can be sent to multiple
recipients).
All strings used to create the message can be unicode strings
(or UTF-8 bytestrings). The SafeMIMEText class will handle any
necessary encoding conversions.
"""
if to:
assert not isinstance(to, basestring), '"to" argument must be a list or tuple'
self.to = list(to)
else:
self.to = []
if cc:
assert not isinstance(cc, basestring), '"cc" argument must be a list or tuple'
self.cc = list(cc)
else:
self.cc = []
if bcc:
assert not isinstance(bcc, basestring), '"bcc" argument must be a list or tuple'
self.bcc = list(bcc)
else:
self.bcc = []
self.from_email = from_email or settings.DEFAULT_FROM_EMAIL
self.subject = subject
self.body = body
self.attachments = attachments or []
self.extra_headers = headers or {}
self.connection = connection
def get_connection(self, fail_silently=False):
from django.core.mail import get_connection
if not self.connection:
self.connection = get_connection(fail_silently=fail_silently)
return self.connection
def message(self):
encoding = self.encoding or settings.DEFAULT_CHARSET
msg = SafeMIMEText(smart_str(self.body, encoding),
self.content_subtype, encoding)
msg = self._create_message(msg)
msg['Subject'] = self.subject
msg['From'] = self.extra_headers.get('From', self.from_email)
msg['To'] = ', '.join(self.to)
if self.cc:
msg['Cc'] = ', '.join(self.cc)
# Email header names are case-insensitive (RFC 2045), so we have to
# accommodate that when doing comparisons.
header_names = [key.lower() for key in self.extra_headers]
if 'date' not in header_names:
msg['Date'] = formatdate()
if 'message-id' not in header_names:
msg['Message-ID'] = make_msgid()
for name, value in self.extra_headers.items():
if name.lower() == 'from': # From is already handled
continue
msg[name] = value
return msg
def recipients(self):
"""
Returns a list of all recipients of the email (includes direct
addressees as well as Cc and Bcc entries).
"""
return self.to + self.cc + self.bcc
def send(self, fail_silently=False):
"""Sends the email message."""
if not self.recipients():
# Don't bother creating the network connection if there's nobody to
# send to.
return 0
return self.get_connection(fail_silently).send_messages([self])
def attach(self, filename=None, content=None, mimetype=None):
"""
Attaches a file with the given filename and content. The filename can
be omitted and the mimetype is guessed, if not provided.
If the first parameter is a MIMEBase subclass it is inserted directly
into the resulting message attachments.
"""
if isinstance(filename, MIMEBase):
assert content == mimetype == None
self.attachments.append(filename)
else:
assert content is not None
self.attachments.append((filename, content, mimetype))
def attach_file(self, path, mimetype=None):
"""Attaches a file from the filesystem."""
filename = os.path.basename(path)
content = open(path, 'rb').read()
self.attach(filename, content, mimetype)
def _create_message(self, msg):
return self._create_attachments(msg)
def _create_attachments(self, msg):
if self.attachments:
encoding = self.encoding or settings.DEFAULT_CHARSET
body_msg = msg
msg = SafeMIMEMultipart(_subtype=self.mixed_subtype, encoding=encoding)
if self.body:
msg.attach(body_msg)
for attachment in self.attachments:
if isinstance(attachment, MIMEBase):
msg.attach(attachment)
else:
msg.attach(self._create_attachment(*attachment))
return msg
def _create_mime_attachment(self, content, mimetype):
"""
Converts the content, mimetype pair into a MIME attachment object.
"""
basetype, subtype = mimetype.split('/', 1)
if basetype == 'text':
encoding = self.encoding or settings.DEFAULT_CHARSET
attachment = SafeMIMEText(smart_str(content, encoding), subtype, encoding)
else:
# Encode non-text attachments with base64.
attachment = MIMEBase(basetype, subtype)
attachment.set_payload(content)
Encoders.encode_base64(attachment)
return attachment
def _create_attachment(self, filename, content, mimetype=None):
"""
Converts the filename, content, mimetype triple into a MIME attachment
object.
"""
if mimetype is None:
mimetype, _ = mimetypes.guess_type(filename)
if mimetype is None:
mimetype = DEFAULT_ATTACHMENT_MIME_TYPE
attachment = self._create_mime_attachment(content, mimetype)
if filename:
attachment.add_header('Content-Disposition', 'attachment',
filename=filename)
return attachment
class EmailMultiAlternatives(EmailMessage):
"""
A version of EmailMessage that makes it easy to send multipart/alternative
messages. For example, including text and HTML versions of the text is
made easier.
"""
alternative_subtype = 'alternative'
def __init__(self, subject='', body='', from_email=None, to=None, bcc=None,
connection=None, attachments=None, headers=None, alternatives=None,
cc=None):
"""
Initialize a single email message (which can be sent to multiple
recipients).
All strings used to create the message can be unicode strings (or UTF-8
bytestrings). The SafeMIMEText class will handle any necessary encoding
conversions.
"""
super(EmailMultiAlternatives, self).__init__(subject, body, from_email, to, bcc, connection, attachments, headers, cc)
self.alternatives=alternatives or []
def attach_alternative(self, content, mimetype):
"""Attach an alternative content representation."""
assert content is not None
assert mimetype is not None
self.alternatives.append((content, mimetype))
def _create_message(self, msg):
return self._create_attachments(self._create_alternatives(msg))
def _create_alternatives(self, msg):
encoding = self.encoding or settings.DEFAULT_CHARSET
if self.alternatives:
body_msg = msg
msg = SafeMIMEMultipart(_subtype=self.alternative_subtype, encoding=encoding)
if self.body:
msg.attach(body_msg)
for alternative in self.alternatives:
msg.attach(self._create_mime_attachment(*alternative))
return msg
| {
"content_hash": "a66ad22e95c55614a2997aa8acfabc7a",
"timestamp": "",
"source": "github",
"line_count": 297,
"max_line_length": 126,
"avg_line_length": 38.77777777777778,
"alnum_prop": 0.6020665103759659,
"repo_name": "pombredanne/algos-urv",
"id": "2bf5a82eeb28287babc9b3312aea4a9277742e82",
"size": "11517",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "django/core/mail/message.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
import ctypes
import os
import sys
import sysconfig
import warnings
from ..util import galpyWarning, galpyWarningVerbose
PY3= sys.version > '3'
if PY3:
_ext_suffix= sysconfig.get_config_var('EXT_SUFFIX')
else: #pragma: no cover
_ext_suffix= '.so'
_libgalpy= None
_libgalpy_loaded= None
_libgalpy_actionAngleTorus= None
_libgalpy_actionAngleTorus_loaded= None
def load_libgalpy():
global _libgalpy
global _libgalpy_loaded
if _libgalpy_loaded is False or not _libgalpy is None:
return (_libgalpy,_libgalpy_loaded)
outerr= None
for path in sys.path:
if not os.path.isdir(path): continue
try:
if sys.platform == 'win32' and sys.version_info >= (3,8): # pragma: no cover
# winmode=0x008 is easy-going way to call LoadLibraryExA
_lib = ctypes.CDLL(os.path.join(path,'libgalpy%s' % _ext_suffix),winmode=0x008)
else:
_lib = ctypes.CDLL(os.path.join(path,'libgalpy%s' % _ext_suffix))
except OSError as e:
if os.path.exists(os.path.join(path,'libgalpy%s' % _ext_suffix)): #pragma: no cover
outerr= e
_lib = None
else:
break
if _lib is None: #pragma: no cover
if not outerr is None:
warnings.warn("libgalpy C extension module not loaded, because of error '%s' " % outerr,
galpyWarning)
else:
warnings.warn("libgalpy C extension module not loaded, because libgalpy%s image was not found" % _ext_suffix,
galpyWarning)
_libgalpy_loaded= False
else:
_libgalpy_loaded= True
_libgalpy= _lib
return (_libgalpy,_libgalpy_loaded)
def load_libgalpy_actionAngleTorus():
global _libgalpy_actionAngleTorus
global _libgalpy_actionAngleTorus_loaded
if _libgalpy_actionAngleTorus_loaded is False \
or not _libgalpy_actionAngleTorus is None:
return (_libgalpy_actionAngleTorus,_libgalpy_actionAngleTorus_loaded)
outerr= None
for path in sys.path:
if not os.path.isdir(path): continue
try:
if sys.platform == 'win32' and sys.version_info >= (3,8): # pragma: no cover
# winmode=0x008 is easy-going way to call LoadLibraryExA
_lib = ctypes.CDLL(os.path.join(path,'libgalpy_actionAngleTorus%s' % _ext_suffix),winmode=0x008)
else:
_lib = ctypes.CDLL(os.path.join(path,'libgalpy_actionAngleTorus%s' % _ext_suffix))
except OSError as e:
if os.path.exists(os.path.join(path,'libgalpy_actionAngleTorus%s' % _ext_suffix)): #pragma: no cover
outerr= e
_lib = None
else:
break
if _lib is None: #pragma: no cover
if not outerr is None:
warnings.warn("libgalpy_actionAngleTorus C extension module not loaded, because of error '%s' " % outerr,
galpyWarningVerbose)
else:
warnings.warn("libgalpy_actionAngleTorus C extension module not loaded, because libgalpy%s image was not found" % _ext_suffix,
galpyWarningVerbose)
_libgalpy_actionAngleTorus_loaded= False
else:
_libgalpy_actionAngleTorus_loaded= True
_libgalpy_actionAngleTorus= _lib
return (_libgalpy_actionAngleTorus,_libgalpy_actionAngleTorus_loaded)
| {
"content_hash": "1ce81383661fb94944d05ec52f5428fc",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 138,
"avg_line_length": 39.627906976744185,
"alnum_prop": 0.6223591549295775,
"repo_name": "jobovy/galpy",
"id": "2247f772a83010c020f1cff7d7b228cae5ff22e8",
"size": "3478",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "galpy/util/_load_extension_libs.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "479"
},
{
"name": "C",
"bytes": "527986"
},
{
"name": "C++",
"bytes": "16627"
},
{
"name": "Makefile",
"bytes": "423"
},
{
"name": "Python",
"bytes": "4970864"
},
{
"name": "Shell",
"bytes": "1873"
}
],
"symlink_target": ""
} |
import unittest
import trace
import sys
import os
import urllib.request
import json
from ast import literal_eval
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../')))
from rasblite import engine
BASE_URL='/rest/api/1.0/'
SERVER_PORT = 8080
DEFAULT_MODEL = \
"""[Base]
url = /rest/api/1.0/
[Model]
structure =
GET,POST users/
GET,PUT,DELETE users/:userID/
GET,PUT users/:userID/name
GET,POST users/:userID/addresses/
GET,PUT,DELETE users/:userID/addresses/:address/
GET,PUT users/:userID/addresses/:address/address_lines
GET,PUT users/:userID/addresses/:address/post_code
GET,PUT users/:userID/age
"""
DEFAULT_MODEL_STR = \
"""BASE URL = /rest/api/1.0/
STRUCTURE = {'users': {':userID': {'METHODS': 'GET,PUT,DELETE',
'addresses': {':address': {'METHODS': 'GET,PUT,DELETE',
'address_lines': {'METHODS': 'GET,PUT'},
'post_code': {'METHODS': 'GET,PUT'}},
'METHODS': 'GET,POST'},
'age': {'METHODS': 'GET,PUT'},
'name': {'METHODS': 'GET,PUT'}},
'METHODS': 'GET,POST'}}"""
DEFAULT_STARTING_DATA = "{'users': [{'addresses': [{'address_lines': '123 Fake Street', 'post_code': 'AB12 3CD'}], 'age': '21', 'name': 'Bob'}, {'addresses': [{'address_lines': '456 My Street', 'post_code': 'EF45 6GH'}, {'address_lines': '789 Other Street', 'post_code': 'IJ12 3KL'}], 'age': '60', 'name': 'Frank'}]}"
class TestModelParser(unittest.TestCase):
def setUp(self):
self.model_parser = engine.ModelParser()
self.assertIsNotNone(self.model_parser, 'ModelParser did not initialise')
def tearDown(self):
pass
def test_parse_default_starting_data(self):
model = self.model_parser.parse(DEFAULT_MODEL, DEFAULT_STARTING_DATA)
self.assertMultiLineEqual(str(model), DEFAULT_MODEL_STR, 'Model parsed from config seems different to model loaded')
result = model.action_path('GET', BASE_URL + 'users/')
expected = literal_eval("[{'name': 'Bob', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}, {'name': 'Frank', 'addresses': [{'post_code': 'EF45 6GH', 'address_lines': '456 My Street'}, {'post_code': 'IJ12 3KL', 'address_lines': '789 Other Street'}], 'age': '60'}]")
self.assertListEqual(result, expected, 'Starting data is different to expected')
### User Bob
result = model.action_path('GET', BASE_URL + 'users/0/')
expected = literal_eval("{'name': 'Bob', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}")
self.assertDictEqual(result, expected, 'Starting data is different to expected')
result = model.action_path('GET', BASE_URL + 'users/0/name')
expected = 'Bob'
self.assertEqual(result, expected, 'Starting data is different to expected')
result = model.action_path('GET', BASE_URL + 'users/0/age')
expected = '21'
self.assertEqual(result, expected, 'Starting data is different to expected')
result = model.action_path('GET', BASE_URL + 'users/0/addresses')
expected = literal_eval("[{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}]")
self.assertListEqual(result, expected, 'Starting data is different to expected')
### User Bob - Address 0
result = model.action_path('GET', BASE_URL + 'users/0/addresses/0')
expected = literal_eval("{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}")
self.assertDictEqual(result, expected, 'Starting data is different to expected')
result = model.action_path('GET', BASE_URL + 'users/0/addresses/0/address_lines')
expected = '123 Fake Street'
self.assertEqual(result, expected, 'Starting data is different to expected')
result = model.action_path('GET', BASE_URL + 'users/0/addresses/0/post_code')
expected = 'AB12 3CD'
self.assertEqual(result, expected, 'Starting data is different to expected')
### User Frank
result = model.action_path('GET', BASE_URL + 'users/1/')
expected = literal_eval("{'name': 'Frank', 'addresses': [{'post_code': 'EF45 6GH', 'address_lines': '456 My Street'}, {'post_code': 'IJ12 3KL', 'address_lines': '789 Other Street'}], 'age': '60'}")
self.assertDictEqual(result, expected, 'Starting data is different to expected')
result = model.action_path('GET', BASE_URL + 'users/1/name')
expected = 'Frank'
self.assertEqual(result, expected, 'Starting data is different to expected')
result = model.action_path('GET', BASE_URL + 'users/1/age')
expected = '60'
self.assertEqual(result, expected, 'Starting data is different to expected')
result = model.action_path('GET', BASE_URL + 'users/1/addresses')
expected = literal_eval("[{'post_code': 'EF45 6GH', 'address_lines': '456 My Street'}, {'post_code': 'IJ12 3KL', 'address_lines': '789 Other Street'}]")
self.assertListEqual(result, expected, 'Starting data is different to expected')
### User Frank - Address 0
result = model.action_path('GET', BASE_URL + 'users/1/addresses/0')
expected = literal_eval("{'post_code': 'EF45 6GH', 'address_lines': '456 My Street'}")
self.assertDictEqual(result, expected, 'Starting data is different to expected')
result = model.action_path('GET', BASE_URL + 'users/1/addresses/0/address_lines')
expected = '456 My Street'
self.assertEqual(result, expected, 'Starting data is different to expected')
result = model.action_path('GET', BASE_URL + 'users/1/addresses/0/post_code')
expected = 'EF45 6GH'
self.assertEqual(result, expected, 'Starting data is different to expected')
### User Frank - Address 1
result = model.action_path('GET', BASE_URL + 'users/1/addresses/1')
expected = literal_eval("{'post_code': 'IJ12 3KL', 'address_lines': '789 Other Street'}")
self.assertDictEqual(result, expected, 'Starting data is different to expected')
result = model.action_path('GET', BASE_URL + 'users/1/addresses/1/address_lines')
expected = '789 Other Street'
self.assertEqual(result, expected, 'Starting data is different to expected')
result = model.action_path('GET', BASE_URL + 'users/1/addresses/1/post_code')
expected = 'IJ12 3KL'
self.assertEqual(result, expected, 'Starting data is different to expected')
class TestModelData(unittest.TestCase):
def setUp(self):
self.model_parser = engine.ModelParser()
self.model = self.model_parser.parse(DEFAULT_MODEL, DEFAULT_STARTING_DATA)
self.assertMultiLineEqual(str(self.model), DEFAULT_MODEL_STR, 'Model parsed from config seems different to model loaded')
result = self.model.action_path('GET', BASE_URL + 'users/')
expected = literal_eval("[{'name': 'Bob', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}, {'name': 'Frank', 'addresses': [{'post_code': 'EF45 6GH', 'address_lines': '456 My Street'}, {'post_code': 'IJ12 3KL', 'address_lines': '789 Other Street'}], 'age': '60'}]")
self.assertListEqual(result, expected, 'Starting data is different to expected')
def tearDown(self):
pass
def test_action_path_GET(self):
# Check GET fails correctly
result = self.model.action_path('GET', BASE_URL + 'users/999')
expected = engine.ModelData.ModelError(error_type='BaseError')
self.assertIsInstance(result, expected.__class__, 'model_object.action_path(GET,...) should have returned an error because this user id does not exist.')
self.assertEqual(result.error_type, expected.error_type, 'model_object.action_path(GET,...) returned an error as expected but the error type was different.')
result = self.model.action_path('GET', BASE_URL + 'users/0/this_does_not_exist')
expected = engine.ModelData.ModelError(error_type='BadRequestError')
self.assertIsInstance(result, expected.__class__, 'model_object.action_path(GET,...) should have returned an error because the path does not exist.')
self.assertEqual(result.error_type, expected.error_type, 'model_object.action_path(GET,...) returned an error as expected but the error type was different.')
result = self.model.action_path('GET', BASE_URL + 'users/ten')
expected = engine.ModelData.ModelError(error_type='BadRequestError')
self.assertIsInstance(result, expected.__class__, 'model_object.action_path(GET,...) should have returned an error because a string was given instead of a number.')
self.assertEqual(result.error_type, expected.error_type, 'model_object.action_path(GET,...) returned an error as expected but the error type was different.')
# Exercise normal test routine
### User Bob
result = self.model.action_path('GET', BASE_URL + 'users/0/')
expected = literal_eval("{'name': 'Bob', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}")
self.assertDictEqual(result, expected, 'model_object.action_path(GET,...) returned unexpected result')
result = self.model.action_path('GET', BASE_URL + 'users/0/name')
expected = 'Bob'
self.assertEqual(result, expected, 'model_object.action_path(GET,...) returned unexpected result')
result = self.model.action_path('GET', BASE_URL + 'users/0/age')
expected = '21'
self.assertEqual(result, expected, 'model_object.action_path(GET,...) returned unexpected result')
result = self.model.action_path('GET', BASE_URL + 'users/0/addresses')
expected = literal_eval("[{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}]")
self.assertListEqual(result, expected, 'model_object.action_path(GET,...) returned unexpected result')
### User Bob - Address 0
result = self.model.action_path('GET', BASE_URL + 'users/0/addresses/0')
expected = literal_eval("{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}")
self.assertDictEqual(result, expected, 'model_object.action_path(GET,...) returned unexpected result')
result = self.model.action_path('GET', BASE_URL + 'users/0/addresses/0/address_lines')
expected = '123 Fake Street'
self.assertEqual(result, expected, 'model_object.action_path(GET,...) returned unexpected result')
result = self.model.action_path('GET', BASE_URL + 'users/0/addresses/0/post_code')
expected = 'AB12 3CD'
self.assertEqual(result, expected, 'model_object.action_path(GET,...) returned unexpected result')
def test_action_path_POST(self):
result = self.model.action_path('GET', BASE_URL + 'users/')
expected = literal_eval("[{'name': 'Bob', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}, {'name': 'Frank', 'addresses': [{'post_code': 'EF45 6GH', 'address_lines': '456 My Street'}, {'post_code': 'IJ12 3KL', 'address_lines': '789 Other Street'}], 'age': '60'}]")
self.assertListEqual(result, expected, 'Starting data is different to expected')
# Check POST fails correctly
message_body = literal_eval("{'name': 'Jim', 'addresses': [{'post_code': 'UR98 7ST', 'address_lines': '30 Flat Road'}], 'age': '18'}")
result = self.model.action_path('POST', BASE_URL + 'users/1/', message_body)
expected = engine.ModelData.ModelError(error_type='BadRequestError')
self.assertIsInstance(result, expected.__class__, 'model_object.action_path(POST,...) should have returned an error because the test data disallows POST here but ModelError was not returned.')
self.assertEqual(result.error_type, expected.error_type, 'model_object.action_path(POST,...) returned an error as expected but the error type was different.')
# Exercise normal test routine
message_body = literal_eval("{'name': 'Jim', 'addresses': [{'post_code': 'UR98 7ST', 'address_lines': '30 Flat Road'}], 'age': '18'}")
result = self.model.action_path('POST', BASE_URL + 'users/', message_body)
expected = literal_eval("[{'age': '21', 'name': 'Bob', 'addresses': [{'address_lines': '123 Fake Street', 'post_code': 'AB12 3CD'}]}, {'age': '60', 'name': 'Frank', 'addresses': [{'address_lines': '456 My Street', 'post_code': 'EF45 6GH'}, {'address_lines': '789 Other Street', 'post_code': 'IJ12 3KL'}]}, {'age': '18', 'name': 'Jim', 'addresses': [{'address_lines': '30 Flat Road', 'post_code': 'UR98 7ST'}]}]")
self.assertListEqual(result, expected, 'model_object.action_path(POST,...) returned unexpected result')
result = self.model.action_path('GET', BASE_URL + 'users/')
expected = literal_eval("[{'age': '21', 'name': 'Bob', 'addresses': [{'address_lines': '123 Fake Street', 'post_code': 'AB12 3CD'}]}, {'age': '60', 'name': 'Frank', 'addresses': [{'address_lines': '456 My Street', 'post_code': 'EF45 6GH'}, {'address_lines': '789 Other Street', 'post_code': 'IJ12 3KL'}]}, {'age': '18', 'name': 'Jim', 'addresses': [{'address_lines': '30 Flat Road', 'post_code': 'UR98 7ST'}]}]")
self.assertListEqual(result, expected, 'model_object.action_path(GET,...) didn\'t show the recently added user')
message_body = literal_eval("{'post_code': 'EE55 1FF', 'address_lines': '99 Oak Avenue'}")
result = self.model.action_path('POST', BASE_URL + 'users/0/addresses/', message_body)
expected = literal_eval("[{'address_lines': '123 Fake Street', 'post_code': 'AB12 3CD'}, {'post_code': 'EE55 1FF', 'address_lines': '99 Oak Avenue'}]")
self.assertListEqual(result, expected, 'model_object.action_path(POST,...) returned unexpected result')
result = self.model.action_path('GET', BASE_URL + 'users/')
expected = literal_eval("[{'age': '21', 'name': 'Bob', 'addresses': [{'address_lines': '123 Fake Street', 'post_code': 'AB12 3CD'}, {'post_code': 'EE55 1FF', 'address_lines': '99 Oak Avenue'}]}, {'age': '60', 'name': 'Frank', 'addresses': [{'address_lines': '456 My Street', 'post_code': 'EF45 6GH'}, {'address_lines': '789 Other Street', 'post_code': 'IJ12 3KL'}]}, {'age': '18', 'name': 'Jim', 'addresses': [{'address_lines': '30 Flat Road', 'post_code': 'UR98 7ST'}]}]")
self.assertListEqual(result, expected, 'model_object.action_path(GET,...) didn\'t show the recently added address')
def test_action_path_PUT(self):
result = self.model.action_path('GET', BASE_URL + 'users/')
expected = literal_eval("[{'name': 'Bob', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}, {'name': 'Frank', 'addresses': [{'post_code': 'EF45 6GH', 'address_lines': '456 My Street'}, {'post_code': 'IJ12 3KL', 'address_lines': '789 Other Street'}], 'age': '60'}]")
self.assertListEqual(result, expected, 'Starting data is different to expected')
# Check PUT fails correctly
message_body = literal_eval("{'name': 'Jim', 'addresses': [{'post_code': 'UR98 7ST', 'address_lines': '30 Flat Road'}], 'age': '18'}")
result = self.model.action_path('PUT', BASE_URL + 'users/', message_body)
expected = engine.ModelData.ModelError(error_type='BadRequestError')
self.assertIsInstance(result, expected.__class__, 'model_object.action_path(PUT,...) should have returned an error because the test data disallows PUT here but ModelError was not returned.')
self.assertEqual(result.error_type, expected.error_type, 'model_object.action_path(PUT,...) returned an error as expected but the error type was different.')
# Exercise normal test routine
message_body = literal_eval("{'name': 'Jim', 'addresses': [{'post_code': 'UR98 7ST', 'address_lines': '30 Flat Road'}, {'post_code': 'CC44 3YY', 'address_lines': '11 Testing Avenue'}], 'age': '18'}")
result = self.model.action_path('PUT', BASE_URL + 'users/1/', message_body)
expected = literal_eval("{'name': 'Jim', 'addresses': [{'post_code': 'UR98 7ST', 'address_lines': '30 Flat Road'}, {'post_code': 'CC44 3YY', 'address_lines': '11 Testing Avenue'}], 'age': '18'}")
self.assertDictEqual(result, expected, 'model_object.action_path(PUT,...) returned unexpected result')
result = self.model.action_path('GET', BASE_URL + 'users/')
expected = literal_eval("[{'name': 'Bob', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}, {'name': 'Jim', 'addresses': [{'post_code': 'UR98 7ST', 'address_lines': '30 Flat Road'}, {'post_code': 'CC44 3YY', 'address_lines': '11 Testing Avenue'}], 'age': '18'}]")
self.assertListEqual(result, expected, 'model_object.action_path(GET,...) didn\'t show the recently updated details')
message_body = 'Sarah'
result = self.model.action_path('PUT', BASE_URL + 'users/0/name', message_body)
expected = literal_eval("{'name': 'Sarah', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}")
self.assertDictEqual(result, expected, 'model_object.action_path(PUT,...) returned unexpected result')
result = self.model.action_path('GET', BASE_URL + 'users/')
expected = literal_eval("[{'name': 'Sarah', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}, {'name': 'Jim', 'addresses': [{'post_code': 'UR98 7ST', 'address_lines': '30 Flat Road'}, {'post_code': 'CC44 3YY', 'address_lines': '11 Testing Avenue'}], 'age': '18'}]")
self.assertListEqual(result, expected, 'model_object.action_path(GET,...) didn\'t show the recently updated details')
message_body = '44'
result = self.model.action_path('PUT', BASE_URL + 'users/1/age', message_body)
expected = literal_eval("{'name': 'Jim', 'addresses': [{'post_code': 'UR98 7ST', 'address_lines': '30 Flat Road'}, {'post_code': 'CC44 3YY', 'address_lines': '11 Testing Avenue'}], 'age': '44'}")
self.assertDictEqual(result, expected, 'model_object.action_path(PUT,...) returned unexpected result')
result = self.model.action_path('GET', BASE_URL + 'users/')
expected = literal_eval("[{'name': 'Sarah', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}, {'name': 'Jim', 'addresses': [{'post_code': 'UR98 7ST', 'address_lines': '30 Flat Road'}, {'post_code': 'CC44 3YY', 'address_lines': '11 Testing Avenue'}], 'age': '44'}]")
self.assertListEqual(result, expected, 'model_object.action_path(GET,...) didn\'t show the recently updated details')
message_body = literal_eval("{'post_code': 'AB55 9TT', 'address_lines': '46 Test Road'}")
result = self.model.action_path('PUT', BASE_URL + 'users/1/addresses/0/', message_body)
expected = literal_eval("{'post_code': 'AB55 9TT', 'address_lines': '46 Test Road'}")
self.assertDictEqual(result, expected, 'model_object.action_path(PUT,...) returned unexpected result')
result = self.model.action_path('GET', BASE_URL + 'users/')
expected = literal_eval("[{'name': 'Sarah', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}, {'name': 'Jim', 'addresses': [{'address_lines': '46 Test Road', 'post_code': 'AB55 9TT'}, {'post_code': 'CC44 3YY', 'address_lines': '11 Testing Avenue'}], 'age': '44'}]")
self.assertListEqual(result, expected, 'model_object.action_path(GET,...) didn\'t show the recently updated details')
message_body = '77 Roundabout Street'
result = self.model.action_path('PUT', BASE_URL + 'users/1/addresses/1/address_lines/', message_body)
expected = literal_eval("{'post_code': 'CC44 3YY', 'address_lines': '77 Roundabout Street'}")
self.assertDictEqual(result, expected, 'model_object.action_path(PUT,...) returned unexpected result')
result = self.model.action_path('GET', BASE_URL + 'users/')
expected = literal_eval("[{'name': 'Sarah', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}, {'name': 'Jim', 'addresses': [{'address_lines': '46 Test Road', 'post_code': 'AB55 9TT'}, {'post_code': 'CC44 3YY', 'address_lines': '77 Roundabout Street'}], 'age': '44'}]")
self.assertListEqual(result, expected, 'model_object.action_path(GET,...) didn\'t show the recently updated details')
message_body = 'ZY99 8XR'
result = self.model.action_path('PUT', BASE_URL + 'users/1/addresses/0/post_code/', message_body)
expected = literal_eval("{'address_lines': '46 Test Road', 'post_code': 'ZY99 8XR'}")
self.assertDictEqual(result, expected, 'model_object.action_path(PUT,...) returned unexpected result')
result = self.model.action_path('GET', BASE_URL + 'users/')
expected = literal_eval("[{'name': 'Sarah', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}, {'name': 'Jim', 'addresses': [{'address_lines': '46 Test Road', 'post_code': 'ZY99 8XR'}, {'post_code': 'CC44 3YY', 'address_lines': '77 Roundabout Street'}], 'age': '44'}]")
self.assertListEqual(result, expected, 'model_object.action_path(GET,...) didn\'t show the recently updated details')
def test_action_path_DELETE(self):
result = self.model.action_path('GET', BASE_URL + 'users/')
expected = literal_eval("[{'name': 'Bob', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}, {'name': 'Frank', 'addresses': [{'post_code': 'EF45 6GH', 'address_lines': '456 My Street'}, {'post_code': 'IJ12 3KL', 'address_lines': '789 Other Street'}], 'age': '60'}]")
self.assertListEqual(result, expected, 'Starting data is different to expected')
# Check DELETE fails correctly
result = self.model.action_path('DELETE', BASE_URL + 'users/0/name')
expected = engine.ModelData.ModelError(error_type='BadRequestError')
self.assertIsInstance(result, expected.__class__, 'model_object.action_path(DELETE,...) should have returned an error because the test data disallows DELETE here but ModelError was not returned.')
self.assertEqual(result.error_type, expected.error_type, 'model_object.action_path(DELETE,...) returned an error as expected but the error type was different.')
# Exercise normal test routine
result = self.model.action_path('DELETE', BASE_URL + 'users/0/')
expected = literal_eval("[{'name': '', 'addresses': [], 'age': ''}, {'name': 'Frank', 'addresses': [{'post_code': 'EF45 6GH', 'address_lines': '456 My Street'}, {'post_code': 'IJ12 3KL', 'address_lines': '789 Other Street'}], 'age': '60'}]")
self.assertListEqual(result, expected, 'model_object.action_path(DELETE,...) returned unexpected result')
result = self.model.action_path('GET', BASE_URL + 'users/')
expected = literal_eval("[{'name': '', 'addresses': [], 'age': ''}, {'name': 'Frank', 'addresses': [{'post_code': 'EF45 6GH', 'address_lines': '456 My Street'}, {'post_code': 'IJ12 3KL', 'address_lines': '789 Other Street'}], 'age': '60'}]")
self.assertListEqual(result, expected, 'model_object.action_path(GET,...) didn\'t show the recently deleted user')
result = self.model.action_path('DELETE', BASE_URL + 'users/1/addresses/1')
expected = literal_eval("[{'post_code': 'EF45 6GH', 'address_lines': '456 My Street'}, {'post_code': '', 'address_lines': ''}]")
self.assertListEqual(result, expected, 'model_object.action_path(DELETE,...) returned unexpected result')
result = self.model.action_path('GET', BASE_URL + 'users/')
expected = literal_eval("[{'name': '', 'addresses': [], 'age': ''}, {'name': 'Frank', 'addresses': [{'post_code': 'EF45 6GH', 'address_lines': '456 My Street'}, {'post_code': '', 'address_lines': ''}], 'age': '60'}]")
self.assertListEqual(result, expected, 'model_object.action_path(GET,...) didn\'t show the recently deleted address')
class TestRequestHandler(unittest.TestCase):
def setUp(self):
self.controller = engine.Controller(DEFAULT_MODEL, DEFAULT_STARTING_DATA, SERVER_PORT)
self.controller.start()
def tearDown(self):
self.controller.stop()
def server_request(self, method, url_path, data=None):
full_url = 'http://localhost:' + str(SERVER_PORT) + BASE_URL + url_path
if data:
data = json.dumps(data).encode('utf8')
request = urllib.request.Request(method=method,
url=full_url,
data=data,
headers={'Content-Type': 'application/json'})
try:
raw_response = urllib.request.urlopen(request).read()
return self.controller.parse_response(raw_response)
except urllib.error.URLError as error:
# This will catch urllib.error.HTTPError too
return error
def test_set_controller(self):
# Have to use getattr to surpress warnings about RequestHandler.controller
self.assertEqual(getattr(engine.RequestHandler, 'controller'), self.controller, "RequestHandler should have a reference to this controller. Did the member variable name change?")
def test_request_GET(self):
# Verify starting data
result = self.server_request('GET', 'users/')
expected = literal_eval("[{'name': 'Bob', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}, {'name': 'Frank', 'addresses': [{'post_code': 'EF45 6GH', 'address_lines': '456 My Street'}, {'post_code': 'IJ12 3KL', 'address_lines': '789 Other Street'}], 'age': '60'}]")
self.assertNotIsInstance(result, urllib.error.URLError, 'server_request failed with error: ' + str(result))
self.assertListEqual(result, expected, 'Starting data appears to be different to what was expected. This will cause the subsequent tests to fail')
# Check GET fails correctly
result = self.server_request('GET', 'users/999')
self.assertIsInstance(result, urllib.error.URLError, 'self.server_request(GET,...) should have returned an error because this user id does not exist.')
self.assertEqual(result.code, 404, 'self.server_request(GET,...) returned an error as expected but the error code was different.')
result = self.server_request('GET', 'users/0/this_does_not_exist')
self.assertIsInstance(result, urllib.error.URLError, 'self.server_request(GET,...) should have returned an error because the path does not exist.')
self.assertEqual(result.code, 400, 'self.server_request(GET,...) returned an error as expected but the error code was different.')
result = self.server_request('GET', 'users/ten')
self.assertIsInstance(result, urllib.error.URLError, 'self.server_request(GET,...) should have returned an error because a string was given instead of a number.')
self.assertEqual(result.code, 400, 'self.server_request(GET,...) returned an error as expected but the error code was different.')
# Exercise normal test routine
### User Bob
result = self.server_request('GET', 'users/0/')
expected = literal_eval("{'name': 'Bob', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}")
self.assertDictEqual(result, expected, 'self.server_request(GET,...) returned unexpected result')
result = self.server_request('GET', 'users/0/name')
expected = 'Bob'
self.assertEqual(result, expected, 'self.server_request(GET,...) returned unexpected result')
result = self.server_request('GET', 'users/0/age')
expected = '21'
self.assertEqual(result, expected, 'self.server_request(GET,...) returned unexpected result')
result = self.server_request('GET', 'users/0/addresses')
expected = literal_eval("[{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}]")
self.assertListEqual(result, expected, 'self.server_request(GET,...) returned unexpected result')
### User Bob - Address 0
result = self.server_request('GET', 'users/0/addresses/0')
expected = literal_eval("{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}")
self.assertDictEqual(result, expected, 'self.server_request(GET,...) returned unexpected result')
result = self.server_request('GET', 'users/0/addresses/0/address_lines')
expected = '123 Fake Street'
self.assertEqual(result, expected, 'self.server_request(GET,...) returned unexpected result')
result = self.server_request('GET', 'users/0/addresses/0/post_code')
expected = 'AB12 3CD'
self.assertEqual(result, expected, 'self.server_request(GET,...) returned unexpected result')
def test_request_POST(self):
# Verify starting data
result = self.server_request('GET', 'users/')
expected = literal_eval("[{'name': 'Bob', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}, {'name': 'Frank', 'addresses': [{'post_code': 'EF45 6GH', 'address_lines': '456 My Street'}, {'post_code': 'IJ12 3KL', 'address_lines': '789 Other Street'}], 'age': '60'}]")
self.assertNotIsInstance(result, urllib.error.URLError, 'server_request failed with error: ' + str(result))
self.assertListEqual(result, expected, 'Starting data appears to be different to what was expected. This will cause the subsequent tests to fail')
# Check POST fails correctly
message_body = literal_eval("{'name': 'Jim', 'addresses': [{'post_code': 'UR98 7ST', 'address_lines': '30 Flat Road'}], 'age': '18'}")
result = self.server_request('POST', 'users/1/', message_body)
self.assertIsInstance(result, urllib.error.URLError, 'self.server_request(POST,...) should have returned an error because the test data disallows POST here.')
self.assertEqual(result.code, 400, 'self.server_request(POST,...) returned an error as expected but the error code was different.')
# Exercise normal test routine
message_body = literal_eval("{'name': 'Jim', 'addresses': [{'post_code': 'UR98 7ST', 'address_lines': '30 Flat Road'}], 'age': '18'}")
result = self.server_request('POST', 'users/', message_body)
expected = literal_eval("[{'age': '21', 'name': 'Bob', 'addresses': [{'address_lines': '123 Fake Street', 'post_code': 'AB12 3CD'}]}, {'age': '60', 'name': 'Frank', 'addresses': [{'address_lines': '456 My Street', 'post_code': 'EF45 6GH'}, {'address_lines': '789 Other Street', 'post_code': 'IJ12 3KL'}]}, {'age': '18', 'name': 'Jim', 'addresses': [{'address_lines': '30 Flat Road', 'post_code': 'UR98 7ST'}]}]")
self.assertListEqual(result, expected, 'self.server_request(POST,...) returned unexpected result')
result = self.server_request('GET', 'users/')
expected = literal_eval("[{'age': '21', 'name': 'Bob', 'addresses': [{'address_lines': '123 Fake Street', 'post_code': 'AB12 3CD'}]}, {'age': '60', 'name': 'Frank', 'addresses': [{'address_lines': '456 My Street', 'post_code': 'EF45 6GH'}, {'address_lines': '789 Other Street', 'post_code': 'IJ12 3KL'}]}, {'age': '18', 'name': 'Jim', 'addresses': [{'address_lines': '30 Flat Road', 'post_code': 'UR98 7ST'}]}]")
self.assertListEqual(result, expected, 'self.server_request(GET,...) didn\'t show the recently added user')
message_body = literal_eval("{'post_code': 'EE55 1FF', 'address_lines': '99 Oak Avenue'}")
result = self.server_request('POST', 'users/0/addresses/', message_body)
expected = literal_eval("[{'address_lines': '123 Fake Street', 'post_code': 'AB12 3CD'}, {'post_code': 'EE55 1FF', 'address_lines': '99 Oak Avenue'}]")
self.assertListEqual(result, expected, 'self.server_request(POST,...) returned unexpected result')
result = self.server_request('GET', 'users/')
expected = literal_eval("[{'age': '21', 'name': 'Bob', 'addresses': [{'address_lines': '123 Fake Street', 'post_code': 'AB12 3CD'}, {'post_code': 'EE55 1FF', 'address_lines': '99 Oak Avenue'}]}, {'age': '60', 'name': 'Frank', 'addresses': [{'address_lines': '456 My Street', 'post_code': 'EF45 6GH'}, {'address_lines': '789 Other Street', 'post_code': 'IJ12 3KL'}]}, {'age': '18', 'name': 'Jim', 'addresses': [{'address_lines': '30 Flat Road', 'post_code': 'UR98 7ST'}]}]")
self.assertListEqual(result, expected, 'self.server_request(GET,...) didn\'t show the recently added address')
def test_request_PUT(self):
# Verify starting data
result = self.server_request('GET', 'users/')
expected = literal_eval("[{'name': 'Bob', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}, {'name': 'Frank', 'addresses': [{'post_code': 'EF45 6GH', 'address_lines': '456 My Street'}, {'post_code': 'IJ12 3KL', 'address_lines': '789 Other Street'}], 'age': '60'}]")
self.assertNotIsInstance(result, urllib.error.URLError, 'server_request failed with error: ' + str(result))
self.assertListEqual(result, expected, 'Starting data appears to be different to what was expected. This will cause the subsequent tests to fail')
# Check PUT fails correctly
message_body = literal_eval("{'name': 'Jim', 'addresses': [{'post_code': 'UR98 7ST', 'address_lines': '30 Flat Road'}], 'age': '18'}")
result = self.server_request('PUT', 'users/', message_body)
self.assertIsInstance(result, urllib.error.URLError, 'self.server_request(PUT,...) should have returned an error because the test data disallows PUT here.')
self.assertEqual(result.code, 400, 'self.server_request(PUT,...) returned an error as expected but the error code was different.')
# Exercise normal test routine
message_body = literal_eval("{'name': 'Jim', 'addresses': [{'post_code': 'UR98 7ST', 'address_lines': '30 Flat Road'}, {'post_code': 'CC44 3YY', 'address_lines': '11 Testing Avenue'}], 'age': '18'}")
result = self.server_request('PUT', 'users/1/', message_body)
expected = literal_eval("{'name': 'Jim', 'addresses': [{'post_code': 'UR98 7ST', 'address_lines': '30 Flat Road'}, {'post_code': 'CC44 3YY', 'address_lines': '11 Testing Avenue'}], 'age': '18'}")
self.assertDictEqual(result, expected, 'self.server_request(PUT,...) returned unexpected result')
result = self.server_request('GET', 'users/')
expected = literal_eval("[{'name': 'Bob', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}, {'name': 'Jim', 'addresses': [{'post_code': 'UR98 7ST', 'address_lines': '30 Flat Road'}, {'post_code': 'CC44 3YY', 'address_lines': '11 Testing Avenue'}], 'age': '18'}]")
self.assertListEqual(result, expected, 'self.server_request(GET,...) didn\'t show the recently updated details')
message_body = 'Sarah'
result = self.server_request('PUT', 'users/0/name', message_body)
expected = literal_eval("{'name': 'Sarah', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}")
self.assertDictEqual(result, expected, 'self.server_request(PUT,...) returned unexpected result')
result = self.server_request('GET', 'users/')
expected = literal_eval("[{'name': 'Sarah', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}, {'name': 'Jim', 'addresses': [{'post_code': 'UR98 7ST', 'address_lines': '30 Flat Road'}, {'post_code': 'CC44 3YY', 'address_lines': '11 Testing Avenue'}], 'age': '18'}]")
self.assertListEqual(result, expected, 'self.server_request(GET,...) didn\'t show the recently updated details')
message_body = '44'
result = self.server_request('PUT', 'users/1/age', message_body)
expected = literal_eval("{'name': 'Jim', 'addresses': [{'post_code': 'UR98 7ST', 'address_lines': '30 Flat Road'}, {'post_code': 'CC44 3YY', 'address_lines': '11 Testing Avenue'}], 'age': '44'}")
self.assertDictEqual(result, expected, 'self.server_request(PUT,...) returned unexpected result')
result = self.server_request('GET', 'users/')
expected = literal_eval("[{'name': 'Sarah', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}, {'name': 'Jim', 'addresses': [{'post_code': 'UR98 7ST', 'address_lines': '30 Flat Road'}, {'post_code': 'CC44 3YY', 'address_lines': '11 Testing Avenue'}], 'age': '44'}]")
self.assertListEqual(result, expected, 'self.server_request(GET,...) didn\'t show the recently updated details')
message_body = literal_eval("{'post_code': 'AB55 9TT', 'address_lines': '46 Test Road'}")
result = self.server_request('PUT', 'users/1/addresses/0/', message_body)
expected = literal_eval("{'post_code': 'AB55 9TT', 'address_lines': '46 Test Road'}")
self.assertDictEqual(result, expected, 'self.server_request(PUT,...) returned unexpected result')
result = self.server_request('GET', 'users/')
expected = literal_eval("[{'name': 'Sarah', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}, {'name': 'Jim', 'addresses': [{'address_lines': '46 Test Road', 'post_code': 'AB55 9TT'}, {'post_code': 'CC44 3YY', 'address_lines': '11 Testing Avenue'}], 'age': '44'}]")
self.assertListEqual(result, expected, 'self.server_request(GET,...) didn\'t show the recently updated details')
message_body = '77 Roundabout Street'
result = self.server_request('PUT', 'users/1/addresses/1/address_lines/', message_body)
expected = literal_eval("{'post_code': 'CC44 3YY', 'address_lines': '77 Roundabout Street'}")
self.assertDictEqual(result, expected, 'self.server_request(PUT,...) returned unexpected result')
result = self.server_request('GET', 'users/')
expected = literal_eval("[{'name': 'Sarah', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}, {'name': 'Jim', 'addresses': [{'address_lines': '46 Test Road', 'post_code': 'AB55 9TT'}, {'post_code': 'CC44 3YY', 'address_lines': '77 Roundabout Street'}], 'age': '44'}]")
self.assertListEqual(result, expected, 'self.server_request(GET,...) didn\'t show the recently updated details')
message_body = 'ZY99 8XR'
result = self.server_request('PUT', 'users/1/addresses/0/post_code/', message_body)
expected = literal_eval("{'address_lines': '46 Test Road', 'post_code': 'ZY99 8XR'}")
self.assertDictEqual(result, expected, 'self.server_request(PUT,...) returned unexpected result')
result = self.server_request('GET', 'users/')
expected = literal_eval("[{'name': 'Sarah', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}, {'name': 'Jim', 'addresses': [{'address_lines': '46 Test Road', 'post_code': 'ZY99 8XR'}, {'post_code': 'CC44 3YY', 'address_lines': '77 Roundabout Street'}], 'age': '44'}]")
self.assertListEqual(result, expected, 'self.server_request(GET,...) didn\'t show the recently updated details')
def test_request_DELETE(self):
# Verify starting data
result = self.server_request('GET', 'users/')
expected = literal_eval("[{'name': 'Bob', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}, {'name': 'Frank', 'addresses': [{'post_code': 'EF45 6GH', 'address_lines': '456 My Street'}, {'post_code': 'IJ12 3KL', 'address_lines': '789 Other Street'}], 'age': '60'}]")
self.assertNotIsInstance(result, urllib.error.URLError, 'server_request failed with error: ' + str(result))
self.assertListEqual(result, expected, 'Starting data appears to be different to what was expected. This will cause the subsequent tests to fail')
# Check DELETE fails correctly
result = self.server_request('DELETE', 'users/0/name')
expected = engine.ModelData.ModelError(error_type='BadRequestError')
self.assertIsInstance(result, urllib.error.URLError, 'self.server_request(DELETE,...) should have returned an error because the test data disallows DELETE here.')
self.assertEqual(result.code, 400, 'self.server_request(DELETE,...) returned an error as expected but the error code was different.')
# Exercise normal test routine
result = self.server_request('DELETE', 'users/0/')
expected = literal_eval("[{'name': '', 'addresses': [], 'age': ''}, {'name': 'Frank', 'addresses': [{'post_code': 'EF45 6GH', 'address_lines': '456 My Street'}, {'post_code': 'IJ12 3KL', 'address_lines': '789 Other Street'}], 'age': '60'}]")
self.assertListEqual(result, expected, 'self.server_request(DELETE,...) returned unexpected result')
result = self.server_request('GET', 'users/')
expected = literal_eval("[{'name': '', 'addresses': [], 'age': ''}, {'name': 'Frank', 'addresses': [{'post_code': 'EF45 6GH', 'address_lines': '456 My Street'}, {'post_code': 'IJ12 3KL', 'address_lines': '789 Other Street'}], 'age': '60'}]")
self.assertListEqual(result, expected, 'self.server_request(GET,...) didn\'t show the recently deleted user')
result = self.server_request('DELETE', 'users/1/addresses/1')
expected = literal_eval("[{'post_code': 'EF45 6GH', 'address_lines': '456 My Street'}, {'post_code': '', 'address_lines': ''}]")
self.assertListEqual(result, expected, 'self.server_request(DELETE,...) returned unexpected result')
result = self.server_request('GET', 'users/')
expected = literal_eval("[{'name': '', 'addresses': [], 'age': ''}, {'name': 'Frank', 'addresses': [{'post_code': 'EF45 6GH', 'address_lines': '456 My Street'}, {'post_code': '', 'address_lines': ''}], 'age': '60'}]")
self.assertListEqual(result, expected, 'self.server_request(GET,...) didn\'t show the recently deleted address')
def test_request_ALL(self):
# Verify starting data
result = self.server_request('GET', 'users/')
expected = literal_eval("[{'name': 'Bob', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}, {'name': 'Frank', 'addresses': [{'post_code': 'EF45 6GH', 'address_lines': '456 My Street'}, {'post_code': 'IJ12 3KL', 'address_lines': '789 Other Street'}], 'age': '60'}]")
self.assertNotIsInstance(result, urllib.error.URLError, 'server_request failed with error: ' + str(result))
self.assertListEqual(result, expected, 'Starting data appears to be different to what was expected. This will cause the subsequent tests to fail')
# Exercise normal test routine
message_body = literal_eval("{'name': 'Jim', 'addresses': [{'post_code': 'UR98 7ST', 'address_lines': '30 Flat Road'}], 'age': '18'}")
result = self.server_request('POST', 'users/', message_body)
expected = literal_eval("[{'age': '21', 'name': 'Bob', 'addresses': [{'address_lines': '123 Fake Street', 'post_code': 'AB12 3CD'}]}, {'age': '60', 'name': 'Frank', 'addresses': [{'address_lines': '456 My Street', 'post_code': 'EF45 6GH'}, {'address_lines': '789 Other Street', 'post_code': 'IJ12 3KL'}]}, {'age': '18', 'name': 'Jim', 'addresses': [{'address_lines': '30 Flat Road', 'post_code': 'UR98 7ST'}]}]")
self.assertListEqual(result, expected, 'self.server_request(POST,...) returned unexpected result')
message_body = 'Sarah'
result = self.server_request('PUT', 'users/0/name', message_body)
expected = literal_eval("{'name': 'Sarah', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}")
self.assertDictEqual(result, expected, 'self.server_request(PUT,...) returned unexpected result')
result = self.server_request('GET', 'users/')
expected = literal_eval("[{'name': 'Sarah', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}, {'age': '60', 'name': 'Frank', 'addresses': [{'address_lines': '456 My Street', 'post_code': 'EF45 6GH'}, {'address_lines': '789 Other Street', 'post_code': 'IJ12 3KL'}]}, {'age': '18', 'name': 'Jim', 'addresses': [{'address_lines': '30 Flat Road', 'post_code': 'UR98 7ST'}]}]")
self.assertListEqual(result, expected, 'self.server_request(GET,...) didn\'t show the recently updated details')
message_body = literal_eval("{'name': 'Dan', 'addresses': [{'post_code': 'GT58 8WW', 'address_lines': '3 Shape Road'}], 'age': '26'}")
result = self.server_request('PUT', 'users/2/', message_body)
expected = literal_eval("{'name': 'Dan', 'addresses': [{'post_code': 'GT58 8WW', 'address_lines': '3 Shape Road'}], 'age': '26'}")
self.assertDictEqual(result, expected, 'self.server_request(PUT,...) returned unexpected result')
result = self.server_request('GET', 'users/')
expected = literal_eval("[{'name': 'Sarah', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}], 'age': '21'}, {'age': '60', 'name': 'Frank', 'addresses': [{'address_lines': '456 My Street', 'post_code': 'EF45 6GH'}, {'address_lines': '789 Other Street', 'post_code': 'IJ12 3KL'}]}, {'name': 'Dan', 'addresses': [{'post_code': 'GT58 8WW', 'address_lines': '3 Shape Road'}], 'age': '26'}]")
self.assertListEqual(result, expected, 'self.server_request(GET,...) didn\'t show the recently updated details')
message_body = literal_eval("{'post_code': 'EE55 1FF', 'address_lines': '99 Oak Avenue'}")
result = self.server_request('POST', 'users/0/addresses/', message_body)
expected = literal_eval("[{'address_lines': '123 Fake Street', 'post_code': 'AB12 3CD'}, {'post_code': 'EE55 1FF', 'address_lines': '99 Oak Avenue'}]")
self.assertListEqual(result, expected, 'self.server_request(POST,...) returned unexpected result')
result = self.server_request('GET', 'users/')
expected = literal_eval("[{'name': 'Sarah', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}, {'post_code': 'EE55 1FF', 'address_lines': '99 Oak Avenue'}], 'age': '21'}, {'age': '60', 'name': 'Frank', 'addresses': [{'address_lines': '456 My Street', 'post_code': 'EF45 6GH'}, {'address_lines': '789 Other Street', 'post_code': 'IJ12 3KL'}]}, {'name': 'Dan', 'addresses': [{'post_code': 'GT58 8WW', 'address_lines': '3 Shape Road'}], 'age': '26'}]")
self.assertListEqual(result, expected, 'self.server_request(GET,...) didn\'t show the recently added address')
result = self.server_request('DELETE', 'users/1/addresses/0')
expected = literal_eval("[{'address_lines': '', 'post_code': ''}, {'address_lines': '789 Other Street', 'post_code': 'IJ12 3KL'}]")
self.assertListEqual(result, expected, 'self.server_request(DELETE,...) returned unexpected result')
result = self.server_request('GET', 'users/')
expected = literal_eval("[{'name': 'Sarah', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}, {'post_code': 'EE55 1FF', 'address_lines': '99 Oak Avenue'}], 'age': '21'}, {'age': '60', 'name': 'Frank', 'addresses': [{'address_lines': '', 'post_code': ''}, {'address_lines': '789 Other Street', 'post_code': 'IJ12 3KL'}]}, {'name': 'Dan', 'addresses': [{'post_code': 'GT58 8WW', 'address_lines': '3 Shape Road'}], 'age': '26'}]")
self.assertListEqual(result, expected, 'self.server_request(GET,...) didn\'t show the recently deleted address')
result = self.server_request('DELETE', 'users/2/')
expected = literal_eval("[{'name': 'Sarah', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}, {'post_code': 'EE55 1FF', 'address_lines': '99 Oak Avenue'}], 'age': '21'}, {'age': '60', 'name': 'Frank', 'addresses': [{'address_lines': '', 'post_code': ''}, {'address_lines': '789 Other Street', 'post_code': 'IJ12 3KL'}]}, {'name': '', 'addresses': [], 'age': ''}]")
self.assertListEqual(result, expected, 'self.server_request(DELETE,...) returned unexpected result')
result = self.server_request('GET', 'users/')
expected = literal_eval("[{'name': 'Sarah', 'addresses': [{'post_code': 'AB12 3CD', 'address_lines': '123 Fake Street'}, {'post_code': 'EE55 1FF', 'address_lines': '99 Oak Avenue'}], 'age': '21'}, {'age': '60', 'name': 'Frank', 'addresses': [{'address_lines': '', 'post_code': ''}, {'address_lines': '789 Other Street', 'post_code': 'IJ12 3KL'}]}, {'name': '', 'addresses': [], 'age': ''}]")
self.assertListEqual(result, expected, 'self.server_request(GET,...) didn\'t show the recently deleted user')
if __name__ == '__main__':
traceObj = trace.Trace(ignoredirs=[sys.prefix, sys.exec_prefix], count=1, trace=0)
traceObj.runfunc(unittest.main, exit=False)
results = traceObj.results()
results.write_results(summary=True, coverdir='.')
| {
"content_hash": "9b70936ea27f92dcea4d3a5fa8c2a121",
"timestamp": "",
"source": "github",
"line_count": 604,
"max_line_length": 483,
"avg_line_length": 83.6771523178808,
"alnum_prop": 0.6149462812370154,
"repo_name": "timrainbow/rasblite",
"id": "dc4e9add4c93387f4bda3d2017efc8116129004c",
"size": "50565",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_rasblite.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "87238"
}
],
"symlink_target": ""
} |
from .image import img_stretch, img_tile, conv_filter_tile, to_bc01, to_b01c
| {
"content_hash": "8fa0f9a6834cb771254967150aac4dcf",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 76,
"avg_line_length": 77,
"alnum_prop": 0.7532467532467533,
"repo_name": "andersbll/deeppy",
"id": "60f336daa2c7d0a441089415103319aee73a6623",
"size": "77",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "deeppy/misc/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "170995"
}
],
"symlink_target": ""
} |
from tastypie.resources import ModelResource
from tastypie import fields
from models import CityBorder
class CityBorderDetailResource(ModelResource):
'''API resource for the details of the city border'''
class Meta:
queryset = CityBorder.objects.all()
resource_name = 'cityborder-detail'
filtering = {
"name": "exact",
}
excludes = ['geom']
class CityBorderResource(ModelResource):
'''API resource for the geographical information of the city border'''
geojson = fields.CharField(attribute='geojson', readonly=True)
center = fields.CharField(attribute='center', readonly=True)
box = fields.DictField(attribute='box', readonly=True)
class Meta:
queryset = CityBorder.objects.all()
resource_name = 'cityborder'
filtering = {
"name": "exact",
}
| {
"content_hash": "9c524552033f47673d39075bee8cd326",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 74,
"avg_line_length": 30.06896551724138,
"alnum_prop": 0.658256880733945,
"repo_name": "jayArnel/crimemapping",
"id": "150d2f95cf891a1ce058f79b1ff71fc41ff9b19a",
"size": "872",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "map/api.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "7772"
},
{
"name": "CSS",
"bytes": "152276"
},
{
"name": "HTML",
"bytes": "12541"
},
{
"name": "JavaScript",
"bytes": "61256"
},
{
"name": "Makefile",
"bytes": "7686"
},
{
"name": "Python",
"bytes": "64964"
}
],
"symlink_target": ""
} |
import _plotly_utils.basevalidators
class TemplateitemnameValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
self,
plotly_name="templateitemname",
parent_name="funnel.marker.colorbar.tickformatstop",
**kwargs,
):
super(TemplateitemnameValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
**kwargs,
)
| {
"content_hash": "879a7facb2135e865c35873143d8f41a",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 78,
"avg_line_length": 31.125,
"alnum_prop": 0.6144578313253012,
"repo_name": "plotly/plotly.py",
"id": "a61b9d7c6db9caf0cb1f98da6be56668b6b8ce2d",
"size": "498",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/funnel/marker/colorbar/tickformatstop/_templateitemname.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, print_function
from datetime import timedelta
from django.conf import settings
from django.db import models
from django.utils import timezone
from sentry.db.models import EncryptedJsonField, FlexibleForeignKey, Model, sane_repr
class AuthIdentity(Model):
__core__ = True
user = FlexibleForeignKey(settings.AUTH_USER_MODEL)
auth_provider = FlexibleForeignKey("sentry.AuthProvider")
ident = models.CharField(max_length=128)
data = EncryptedJsonField()
last_verified = models.DateTimeField(default=timezone.now)
last_synced = models.DateTimeField(default=timezone.now)
date_added = models.DateTimeField(default=timezone.now)
class Meta:
app_label = "sentry"
db_table = "sentry_authidentity"
unique_together = (("auth_provider", "ident"), ("auth_provider", "user"))
__repr__ = sane_repr("user_id", "auth_provider_id")
def __unicode__(self):
return self.ident
def get_audit_log_data(self):
return {"user_id": self.user_id, "data": self.data}
# TODO(dcramer): we'd like to abstract this so there's a central Role object
# and it doesnt require two composite db objects to talk to each other
def is_valid(self, member):
if getattr(member.flags, "sso:invalid"):
return False
if not getattr(member.flags, "sso:linked"):
return False
if not self.last_verified:
return False
if self.last_verified < timezone.now() - timedelta(hours=24):
return False
return True
def get_display_name(self):
return self.user.get_display_name()
def get_label(self):
return self.user.get_label()
| {
"content_hash": "3fa1d66187d57b92657a9620670989c7",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 85,
"avg_line_length": 32.64150943396226,
"alnum_prop": 0.6676300578034682,
"repo_name": "mvaled/sentry",
"id": "cb648e011adc3ce12c7b1535680920d8c642c764",
"size": "1730",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/sentry/models/authidentity.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "226439"
},
{
"name": "Dockerfile",
"bytes": "6431"
},
{
"name": "HTML",
"bytes": "173429"
},
{
"name": "JavaScript",
"bytes": "9314175"
},
{
"name": "Lua",
"bytes": "65885"
},
{
"name": "Makefile",
"bytes": "9225"
},
{
"name": "Python",
"bytes": "50385401"
},
{
"name": "Ruby",
"bytes": "168"
},
{
"name": "Shell",
"bytes": "5685"
},
{
"name": "TypeScript",
"bytes": "773664"
}
],
"symlink_target": ""
} |
"""Support for Tasmota binary sensors."""
from homeassistant.components import binary_sensor
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
import homeassistant.helpers.event as evt
from .const import DATA_REMOVE_DISCOVER_COMPONENT
from .discovery import TASMOTA_DISCOVERY_ENTITY_NEW
from .mixins import TasmotaAvailability, TasmotaDiscoveryUpdate
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Tasmota binary sensor dynamically through discovery."""
@callback
def async_discover(tasmota_entity, discovery_hash):
"""Discover and add a Tasmota binary sensor."""
async_add_entities(
[
TasmotaBinarySensor(
tasmota_entity=tasmota_entity, discovery_hash=discovery_hash
)
]
)
hass.data[
DATA_REMOVE_DISCOVER_COMPONENT.format(binary_sensor.DOMAIN)
] = async_dispatcher_connect(
hass,
TASMOTA_DISCOVERY_ENTITY_NEW.format(binary_sensor.DOMAIN),
async_discover,
)
class TasmotaBinarySensor(
TasmotaAvailability,
TasmotaDiscoveryUpdate,
BinarySensorEntity,
):
"""Representation a Tasmota binary sensor."""
def __init__(self, **kwds):
"""Initialize the Tasmota binary sensor."""
self._delay_listener = None
self._state = None
super().__init__(
**kwds,
)
@callback
def off_delay_listener(self, now):
"""Switch device off after a delay."""
self._delay_listener = None
self._state = False
self.async_write_ha_state()
@callback
def state_updated(self, state, **kwargs):
"""Handle state updates."""
self._state = state
if self._delay_listener is not None:
self._delay_listener()
self._delay_listener = None
off_delay = self._tasmota_entity.off_delay
if self._state and off_delay is not None:
self._delay_listener = evt.async_call_later(
self.hass, off_delay, self.off_delay_listener
)
self.async_write_ha_state()
@property
def force_update(self):
"""Force update."""
return True
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self._state
| {
"content_hash": "e1e6a9cc629d5657110dae99cbd7e1cd",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 80,
"avg_line_length": 29.094117647058823,
"alnum_prop": 0.6336433481601293,
"repo_name": "turbokongen/home-assistant",
"id": "feaafa72b29c99b563fe85d4602b98d3f3d4fab7",
"size": "2473",
"binary": false,
"copies": "7",
"ref": "refs/heads/dev",
"path": "homeassistant/components/tasmota/binary_sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1720"
},
{
"name": "Python",
"bytes": "30405146"
},
{
"name": "Shell",
"bytes": "4832"
}
],
"symlink_target": ""
} |
import unittest
import numpy as np
import sys
sys.path.append("../")
from op_test import OpTest
import paddle.fluid as fluid
import paddle.fluid.core as core
class TestSequencePadOp(OpTest):
def set_attr(self):
self.x_shape = [12, 10]
self.x_len_lod = [[2, 3, 4, 3]]
self.pad_value = [1.0]
self.padded_length = -1
self.dtype = 'float64'
def set_data(self):
x_data = np.random.uniform(0.1, 0.5, self.x_shape).astype(self.dtype)
pad_value_data = np.array(self.pad_value).astype(self.dtype)
self.inputs = {
'X': (x_data, self.x_len_lod),
'PadValue': pad_value_data,
}
self.attrs = {'padded_length': self.padded_length}
def compute(self):
# get padded length
padded_length = self.padded_length
x_len_lod_0 = self.x_len_lod[0]
if padded_length == -1:
max_seq_len = 0
for l in x_len_lod_0:
max_seq_len = max(max_seq_len, l)
padded_length = max_seq_len
# do padding
x_data = self.inputs['X'][0]
pad_value_data = self.inputs['PadValue']
if pad_value_data.shape == (1,):
pad_value_data = np.broadcast_to(
pad_value_data, shape=x_data.shape[1:]
)
padded_sequences = []
start_idx = 0
for l in x_len_lod_0:
end_idx = start_idx + l
seq = x_data[start_idx:end_idx]
to_pad_len = padded_length - l
for _ in range(to_pad_len):
seq = np.append(seq, pad_value_data[np.newaxis, :], axis=0)
padded_sequences.append(seq)
start_idx = end_idx
out_data = np.array(padded_sequences)
length = np.array(self.x_len_lod[0]).reshape((-1))
self.outputs = {'Out': out_data, 'Length': length}
def setUp(self):
self.op_type = 'sequence_pad'
self.set_attr()
self.set_data()
self.compute()
def test_check_output(self):
self.check_output(check_dygraph=False)
def test_check_grad(self):
self.check_grad(["X"], "Out", check_dygraph=False)
class TestSequencePadOp2(TestSequencePadOp):
def set_attr(self):
self.x_shape = [12, 10]
self.x_len_lod = [[2, 3, 4, 3]]
self.pad_value = np.random.random((10))
self.padded_length = -1
self.dtype = 'float64'
class TestSequencePadOp3(TestSequencePadOp):
def set_attr(self):
self.x_shape = [12, 10]
self.x_len_lod = [[2, 3, 4, 3]]
self.pad_value = [1.0]
self.padded_length = 7
self.dtype = 'float64'
class TestSequencePadOp4(TestSequencePadOp):
def set_attr(self):
self.x_shape = [12, 10]
self.x_len_lod = [[2, 3, 4, 3]]
self.pad_value = np.random.random((10))
self.padded_length = 7
self.dtype = 'float64'
class TestSequencePadOp5(TestSequencePadOp):
def set_attr(self):
self.x_shape = [12, 2, 5]
self.x_len_lod = [[2, 3, 4, 3]]
self.pad_value = [1.0]
self.padded_length = -1
self.dtype = 'float64'
class TestSequencePadOp6(TestSequencePadOp):
def set_attr(self):
self.x_shape = [12, 2, 5]
self.x_len_lod = [[2, 3, 4, 3]]
self.pad_value = np.random.random((2, 5))
self.padded_length = -1
self.dtype = 'float64'
class TestSequencePadOp7(TestSequencePadOp):
def set_attr(self):
self.x_shape = [12, 2, 5]
self.x_len_lod = [[2, 3, 4, 3]]
self.pad_value = [1.0]
self.padded_length = 7
self.dtype = 'float64'
class TestSequencePadOp8(TestSequencePadOp):
def set_attr(self):
self.x_shape = [12, 2, 5]
self.x_len_lod = [[0, 8, 0, 4, 0]]
self.pad_value = [1.0]
self.padded_length = 10
self.dtype = 'float64'
class TestSequencePadOpError(unittest.TestCase):
def test_error(self):
def test_x_variable():
# the input x type must be Variable
x = np.random.random((2, 4)).astype("float32")
pad_value = fluid.layers.assign(
input=np.array([0.0], dtype=np.float32)
)
fluid.layers.sequence_pad(x=x, pad_value=pad_value)
self.assertRaises(TypeError, test_x_variable)
def test_pad_value_variable():
x1 = fluid.layers.data(
name='x1', shape=[10, 5], dtype='float32', lod_level=1
)
pad_value1 = np.array([0.0], dtype=np.float32)
fluid.layers.sequence_pad(x=x1, pad_value=pad_value1)
self.assertRaises(TypeError, test_pad_value_variable)
def test_dtype():
x2 = fluid.layers.data(
name='x2', shape=[10, 5], dtype='int16', lod_level=1
)
pad_value2 = fluid.layers.assign(
input=np.array([0.0], dtype=np.int32)
)
fluid.layers.sequence_pad(x=x2, pad_value=pad_value2)
self.assertRaises(TypeError, test_dtype)
def test_length_dtype(self):
x = fluid.data(name='x', shape=[10, 5], dtype='float32', lod_level=1)
pad_value = fluid.layers.assign(input=np.array([0.0], dtype=np.float32))
out, length = fluid.layers.sequence_pad(x=x, pad_value=pad_value)
# check if the dtype of length is int64 in compile time
self.assertEqual(length.dtype, core.VarDesc.VarType.INT64)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "85f89b88156324cf5d1a8668e9e116aa",
"timestamp": "",
"source": "github",
"line_count": 178,
"max_line_length": 80,
"avg_line_length": 31.00561797752809,
"alnum_prop": 0.555716615328864,
"repo_name": "luotao1/Paddle",
"id": "d04091d9332b9f09157961e7829b1c2e10ae5db6",
"size": "6130",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "python/paddle/fluid/tests/unittests/sequence/test_sequence_pad_op.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "58544"
},
{
"name": "C",
"bytes": "210300"
},
{
"name": "C++",
"bytes": "36771446"
},
{
"name": "CMake",
"bytes": "903079"
},
{
"name": "Cuda",
"bytes": "5200715"
},
{
"name": "Dockerfile",
"bytes": "4361"
},
{
"name": "Go",
"bytes": "49796"
},
{
"name": "Java",
"bytes": "16630"
},
{
"name": "Jinja",
"bytes": "23852"
},
{
"name": "MLIR",
"bytes": "39982"
},
{
"name": "Python",
"bytes": "36248258"
},
{
"name": "R",
"bytes": "1332"
},
{
"name": "Shell",
"bytes": "553175"
}
],
"symlink_target": ""
} |
"""
A driver wrapping the Ironic API, such that Nova may provision
bare metal resources.
"""
import base64
from distutils import version
import gzip
import shutil
import tempfile
import time
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_service import loopingcall
from oslo_utils import excutils
from oslo_utils import importutils
import six
import six.moves.urllib.parse as urlparse
from tooz import hashring as hash_ring
from nova.api.metadata import base as instance_metadata
from nova import block_device
from nova.compute import power_state
from nova.compute import task_states
from nova.compute import vm_states
import nova.conf
from nova.console import type as console_type
from nova import context as nova_context
from nova import exception
from nova.i18n import _
from nova import objects
from nova.objects import fields as obj_fields
from nova import rc_fields
from nova import servicegroup
from nova import utils
from nova.virt import configdrive
from nova.virt import driver as virt_driver
from nova.virt import firewall
from nova.virt import hardware
from nova.virt.ironic import client_wrapper
from nova.virt.ironic import ironic_states
from nova.virt.ironic import patcher
from nova.virt import netutils
ironic = None
LOG = logging.getLogger(__name__)
CONF = nova.conf.CONF
_POWER_STATE_MAP = {
ironic_states.POWER_ON: power_state.RUNNING,
ironic_states.NOSTATE: power_state.NOSTATE,
ironic_states.POWER_OFF: power_state.SHUTDOWN,
}
_UNPROVISION_STATES = (ironic_states.ACTIVE, ironic_states.DEPLOYFAIL,
ironic_states.ERROR, ironic_states.DEPLOYWAIT,
ironic_states.DEPLOYING, ironic_states.RESCUE,
ironic_states.RESCUING, ironic_states.RESCUEWAIT,
ironic_states.RESCUEFAIL, ironic_states.UNRESCUING,
ironic_states.UNRESCUEFAIL)
_NODE_FIELDS = ('uuid', 'power_state', 'target_power_state', 'provision_state',
'target_provision_state', 'last_error', 'maintenance',
'properties', 'instance_uuid', 'traits', 'resource_class')
# Console state checking interval in seconds
_CONSOLE_STATE_CHECKING_INTERVAL = 1
# Number of hash ring partitions per service
# 5 should be fine for most deployments, as an experimental feature.
_HASH_RING_PARTITIONS = 2 ** 5
def map_power_state(state):
try:
return _POWER_STATE_MAP[state]
except KeyError:
LOG.warning("Power state %s not found.", state)
return power_state.NOSTATE
def _get_nodes_supported_instances(cpu_arch=None):
"""Return supported instances for a node."""
if not cpu_arch:
return []
return [(cpu_arch,
obj_fields.HVType.BAREMETAL,
obj_fields.VMMode.HVM)]
def _log_ironic_polling(what, node, instance):
power_state = (None if node.power_state is None else
'"%s"' % node.power_state)
tgt_power_state = (None if node.target_power_state is None else
'"%s"' % node.target_power_state)
prov_state = (None if node.provision_state is None else
'"%s"' % node.provision_state)
tgt_prov_state = (None if node.target_provision_state is None else
'"%s"' % node.target_provision_state)
LOG.debug('Still waiting for ironic node %(node)s to %(what)s: '
'power_state=%(power_state)s, '
'target_power_state=%(tgt_power_state)s, '
'provision_state=%(prov_state)s, '
'target_provision_state=%(tgt_prov_state)s',
dict(what=what,
node=node.uuid,
power_state=power_state,
tgt_power_state=tgt_power_state,
prov_state=prov_state,
tgt_prov_state=tgt_prov_state),
instance=instance)
class IronicDriver(virt_driver.ComputeDriver):
"""Hypervisor driver for Ironic - bare metal provisioning."""
capabilities = {"has_imagecache": False,
"supports_evacuate": False,
"supports_migrate_to_same_host": False,
"supports_attach_interface": True,
"supports_multiattach": False,
"supports_trusted_certs": False,
}
# This driver is capable of rebalancing nodes between computes.
rebalances_nodes = True
def __init__(self, virtapi, read_only=False):
super(IronicDriver, self).__init__(virtapi)
global ironic
if ironic is None:
ironic = importutils.import_module('ironicclient')
# NOTE(deva): work around a lack of symbols in the current version.
if not hasattr(ironic, 'exc'):
ironic.exc = importutils.import_module('ironicclient.exc')
if not hasattr(ironic, 'client'):
ironic.client = importutils.import_module(
'ironicclient.client')
self.firewall_driver = firewall.load_driver(
default='nova.virt.firewall.NoopFirewallDriver')
self.node_cache = {}
self.node_cache_time = 0
self.servicegroup_api = servicegroup.API()
self.ironicclient = client_wrapper.IronicClientWrapper()
# This is needed for the instance flavor migration in Pike, and should
# be removed in Queens. Since this will run several times in the life
# of the driver, track the instances that have already been migrated.
self._migrated_instance_uuids = set()
def _get_node(self, node_uuid):
"""Get a node by its UUID.
Some methods pass in variables named nodename, but are
actually UUID's.
"""
return self.ironicclient.call('node.get', node_uuid,
fields=_NODE_FIELDS)
def _validate_instance_and_node(self, instance):
"""Get the node associated with the instance.
Check with the Ironic service that this instance is associated with a
node, and return the node.
"""
try:
return self.ironicclient.call('node.get_by_instance_uuid',
instance.uuid, fields=_NODE_FIELDS)
except ironic.exc.NotFound:
raise exception.InstanceNotFound(instance_id=instance.uuid)
def _node_resources_unavailable(self, node_obj):
"""Determine whether the node's resources are in an acceptable state.
Determines whether the node's resources should be presented
to Nova for use based on the current power, provision and maintenance
state. This is called after _node_resources_used, so any node that
is not used and not in AVAILABLE should be considered in a 'bad' state,
and unavailable for scheduling. Returns True if unacceptable.
"""
bad_power_states = [
ironic_states.ERROR, ironic_states.NOSTATE]
# keep NOSTATE around for compatibility
good_provision_states = [
ironic_states.AVAILABLE, ironic_states.NOSTATE]
return (node_obj.maintenance or
node_obj.power_state in bad_power_states or
node_obj.provision_state not in good_provision_states)
def _node_resources_used(self, node_obj):
"""Determine whether the node's resources are currently used.
Determines whether the node's resources should be considered used
or not. A node is used when it is either in the process of putting
a new instance on the node, has an instance on the node, or is in
the process of cleaning up from a deleted instance. Returns True if
used.
If we report resources as consumed for a node that does not have an
instance on it, the resource tracker will notice there's no instances
consuming resources and try to correct us. So only nodes with an
instance attached should report as consumed here.
"""
return node_obj.instance_uuid is not None
def _parse_node_properties(self, node):
"""Helper method to parse the node's properties."""
properties = {}
for prop in ('cpus', 'memory_mb', 'local_gb'):
try:
properties[prop] = int(node.properties.get(prop, 0))
except (TypeError, ValueError):
LOG.warning('Node %(uuid)s has a malformed "%(prop)s". '
'It should be an integer.',
{'uuid': node.uuid, 'prop': prop})
properties[prop] = 0
raw_cpu_arch = node.properties.get('cpu_arch', None)
try:
cpu_arch = obj_fields.Architecture.canonicalize(raw_cpu_arch)
except exception.InvalidArchitectureName:
cpu_arch = None
if not cpu_arch:
LOG.warning("cpu_arch not defined for node '%s'", node.uuid)
properties['cpu_arch'] = cpu_arch
properties['raw_cpu_arch'] = raw_cpu_arch
properties['capabilities'] = node.properties.get('capabilities')
return properties
def _node_resource(self, node):
"""Helper method to create resource dict from node stats."""
properties = self._parse_node_properties(node)
raw_cpu_arch = properties['raw_cpu_arch']
cpu_arch = properties['cpu_arch']
nodes_extra_specs = {}
# NOTE(deva): In Havana and Icehouse, the flavor was required to link
# to an arch-specific deploy kernel and ramdisk pair, and so the flavor
# also had to have extra_specs['cpu_arch'], which was matched against
# the ironic node.properties['cpu_arch'].
# With Juno, the deploy image(s) may be referenced directly by the
# node.driver_info, and a flavor no longer needs to contain any of
# these three extra specs, though the cpu_arch may still be used
# in a heterogeneous environment, if so desired.
# NOTE(dprince): we use the raw cpu_arch here because extra_specs
# filters aren't canonicalized
nodes_extra_specs['cpu_arch'] = raw_cpu_arch
# NOTE(gilliard): To assist with more precise scheduling, if the
# node.properties contains a key 'capabilities', we expect the value
# to be of the form "k1:v1,k2:v2,etc.." which we add directly as
# key/value pairs into the node_extra_specs to be used by the
# ComputeCapabilitiesFilter
capabilities = properties['capabilities']
if capabilities:
for capability in str(capabilities).split(','):
parts = capability.split(':')
if len(parts) == 2 and parts[0] and parts[1]:
nodes_extra_specs[parts[0].strip()] = parts[1]
else:
LOG.warning("Ignoring malformed capability '%s'. "
"Format should be 'key:val'.", capability)
vcpus = vcpus_used = 0
memory_mb = memory_mb_used = 0
local_gb = local_gb_used = 0
dic = {
'uuid': str(node.uuid),
'hypervisor_hostname': str(node.uuid),
'hypervisor_type': self._get_hypervisor_type(),
'hypervisor_version': self._get_hypervisor_version(),
'resource_class': node.resource_class,
# The Ironic driver manages multiple hosts, so there are
# likely many different CPU models in use. As such it is
# impossible to provide any meaningful info on the CPU
# model of the "host"
'cpu_info': None,
'vcpus': vcpus,
'vcpus_used': vcpus_used,
'local_gb': local_gb,
'local_gb_used': local_gb_used,
'disk_available_least': local_gb - local_gb_used,
'memory_mb': memory_mb,
'memory_mb_used': memory_mb_used,
'supported_instances': _get_nodes_supported_instances(cpu_arch),
'stats': nodes_extra_specs,
'numa_topology': None,
}
return dic
def _start_firewall(self, instance, network_info):
self.firewall_driver.setup_basic_filtering(instance, network_info)
self.firewall_driver.prepare_instance_filter(instance, network_info)
self.firewall_driver.apply_instance_filter(instance, network_info)
def _stop_firewall(self, instance, network_info):
self.firewall_driver.unfilter_instance(instance, network_info)
def _set_instance_uuid(self, node, instance):
patch = [{'path': '/instance_uuid', 'op': 'add',
'value': instance.uuid}]
try:
# NOTE(TheJulia): Assert an instance UUID to lock the node
# from other deployment attempts while configuration is
# being set.
self.ironicclient.call('node.update', node.uuid, patch,
retry_on_conflict=False)
except ironic.exc.BadRequest:
msg = (_("Failed to reserve node %(node)s "
"when provisioning the instance %(instance)s")
% {'node': node.uuid, 'instance': instance.uuid})
LOG.error(msg)
raise exception.InstanceDeployFailure(msg)
def prepare_for_spawn(self, instance):
LOG.debug('Preparing to spawn instance %s.', instance.uuid)
node_uuid = instance.get('node')
if not node_uuid:
raise ironic.exc.BadRequest(
_("Ironic node uuid not supplied to "
"driver for instance %s.") % instance.uuid)
node = self._get_node(node_uuid)
self._set_instance_uuid(node, instance)
def failed_spawn_cleanup(self, instance):
LOG.debug('Failed spawn cleanup called for instance',
instance=instance)
try:
node = self._validate_instance_and_node(instance)
except exception.InstanceNotFound:
LOG.warning('Attempt to clean-up from failed spawn of '
'instance %s failed due to no instance_uuid '
'present on the node.', instance.uuid)
return
self._cleanup_deploy(node, instance)
def _add_instance_info_to_node(self, node, instance, image_meta, flavor,
preserve_ephemeral=None,
block_device_info=None):
root_bdm = block_device.get_root_bdm(
virt_driver.block_device_info_get_mapping(block_device_info))
boot_from_volume = root_bdm is not None
patch = patcher.create(node).get_deploy_patch(instance,
image_meta,
flavor,
preserve_ephemeral,
boot_from_volume)
try:
# FIXME(lucasagomes): The "retry_on_conflict" parameter was added
# to basically causes the deployment to fail faster in case the
# node picked by the scheduler is already associated with another
# instance due bug #1341420.
self.ironicclient.call('node.update', node.uuid, patch,
retry_on_conflict=False)
except ironic.exc.BadRequest:
msg = (_("Failed to add deploy parameters on node %(node)s "
"when provisioning the instance %(instance)s")
% {'node': node.uuid, 'instance': instance.uuid})
LOG.error(msg)
raise exception.InstanceDeployFailure(msg)
def _remove_instance_info_from_node(self, node, instance):
patch = [{'path': '/instance_info', 'op': 'remove'},
{'path': '/instance_uuid', 'op': 'remove'}]
try:
self.ironicclient.call('node.update', node.uuid, patch)
except ironic.exc.BadRequest as e:
LOG.warning("Failed to remove deploy parameters from node "
"%(node)s when unprovisioning the instance "
"%(instance)s: %(reason)s",
{'node': node.uuid, 'instance': instance.uuid,
'reason': six.text_type(e)})
def _add_volume_target_info(self, context, instance, block_device_info):
bdms = virt_driver.block_device_info_get_mapping(block_device_info)
for bdm in bdms:
if not bdm.is_volume:
continue
connection_info = jsonutils.loads(bdm._bdm_obj.connection_info)
target_properties = connection_info['data']
driver_volume_type = connection_info['driver_volume_type']
try:
self.ironicclient.call('volume_target.create',
node_uuid=instance.node,
volume_type=driver_volume_type,
properties=target_properties,
boot_index=bdm._bdm_obj.boot_index,
volume_id=bdm._bdm_obj.volume_id)
except (ironic.exc.BadRequest, ironic.exc.Conflict):
msg = (_("Failed to add volume target information of "
"volume %(volume)s on node %(node)s when "
"provisioning the instance")
% {'volume': bdm._bdm_obj.volume_id,
'node': instance.node})
LOG.error(msg, instance=instance)
raise exception.InstanceDeployFailure(msg)
def _cleanup_volume_target_info(self, instance):
targets = self.ironicclient.call('node.list_volume_targets',
instance.node, detail=True)
for target in targets:
volume_target_id = target.uuid
try:
self.ironicclient.call('volume_target.delete',
volume_target_id)
except ironic.exc.NotFound:
LOG.debug("Volume target information %(target)s of volume "
"%(volume)s is already removed from node %(node)s",
{'target': volume_target_id,
'volume': target.volume_id,
'node': instance.node},
instance=instance)
except ironic.exc.ClientException as e:
LOG.warning("Failed to remove volume target information "
"%(target)s of volume %(volume)s from node "
"%(node)s when unprovisioning the instance: "
"%(reason)s",
{'target': volume_target_id,
'volume': target.volume_id,
'node': instance.node,
'reason': e},
instance=instance)
def _cleanup_deploy(self, node, instance, network_info=None):
self._cleanup_volume_target_info(instance)
self._unplug_vifs(node, instance, network_info)
self._stop_firewall(instance, network_info)
self._remove_instance_info_from_node(node, instance)
def _wait_for_active(self, instance):
"""Wait for the node to be marked as ACTIVE in Ironic."""
instance.refresh()
if (instance.task_state == task_states.DELETING or
instance.vm_state in (vm_states.ERROR, vm_states.DELETED)):
raise exception.InstanceDeployFailure(
_("Instance %s provisioning was aborted") % instance.uuid)
node = self._validate_instance_and_node(instance)
if node.provision_state == ironic_states.ACTIVE:
# job is done
LOG.debug("Ironic node %(node)s is now ACTIVE",
dict(node=node.uuid), instance=instance)
raise loopingcall.LoopingCallDone()
if node.target_provision_state in (ironic_states.DELETED,
ironic_states.AVAILABLE):
# ironic is trying to delete it now
raise exception.InstanceNotFound(instance_id=instance.uuid)
if node.provision_state in (ironic_states.NOSTATE,
ironic_states.AVAILABLE):
# ironic already deleted it
raise exception.InstanceNotFound(instance_id=instance.uuid)
if node.provision_state == ironic_states.DEPLOYFAIL:
# ironic failed to deploy
msg = (_("Failed to provision instance %(inst)s: %(reason)s")
% {'inst': instance.uuid, 'reason': node.last_error})
raise exception.InstanceDeployFailure(msg)
_log_ironic_polling('become ACTIVE', node, instance)
def _wait_for_power_state(self, instance, message):
"""Wait for the node to complete a power state change."""
node = self._validate_instance_and_node(instance)
if node.target_power_state == ironic_states.NOSTATE:
raise loopingcall.LoopingCallDone()
_log_ironic_polling(message, node, instance)
def init_host(self, host):
"""Initialize anything that is necessary for the driver to function.
:param host: the hostname of the compute host.
"""
self._refresh_hash_ring(nova_context.get_admin_context())
@staticmethod
def _pike_flavor_migration_for_node(ctx, node_rc, instance_uuid):
normalized_rc = rc_fields.ResourceClass.normalize_name(node_rc)
instance = objects.Instance.get_by_uuid(ctx, instance_uuid,
expected_attrs=["flavor"])
specs = instance.flavor.extra_specs
resource_key = "resources:%s" % normalized_rc
if resource_key in specs:
# The compute must have been restarted, and the instance.flavor
# has already been migrated
return False
specs[resource_key] = "1"
instance.save()
return True
def _pike_flavor_migration(self, node_uuids):
"""This code is needed in Pike to prevent problems where an operator
has already adjusted their flavors to add the custom resource class to
extra_specs. Since existing ironic instances will not have this in
their extra_specs, they will only have allocations against
VCPU/RAM/disk. By adding just the custom RC to the existing flavor
extra_specs, the periodic call to update_available_resources() will add
an allocation against the custom resource class, and prevent placement
from thinking that that node is available. This code can be removed in
Queens, and will need to be updated to also alter extra_specs to
zero-out the old-style standard resource classes of VCPU, MEMORY_MB,
and DISK_GB.
"""
ctx = nova_context.get_admin_context()
for node_uuid in node_uuids:
node = self._node_from_cache(node_uuid)
if not node:
continue
node_rc = node.resource_class
if not node_rc:
LOG.warning("Node %(node)s does not have its resource_class "
"set.", {"node": node.uuid})
continue
if node.instance_uuid in self._migrated_instance_uuids:
continue
self._pike_flavor_migration_for_node(ctx, node_rc,
node.instance_uuid)
self._migrated_instance_uuids.add(node.instance_uuid)
LOG.debug("The flavor extra_specs for Ironic instance %(inst)s "
"have been updated for custom resource class '%(rc)s'.",
{"inst": node.instance_uuid, "rc": node_rc})
return
def _get_hypervisor_type(self):
"""Get hypervisor type."""
return 'ironic'
def _get_hypervisor_version(self):
"""Returns the version of the Ironic API service endpoint."""
return client_wrapper.IRONIC_API_VERSION[0]
def instance_exists(self, instance):
"""Checks the existence of an instance.
Checks the existence of an instance. This is an override of the
base method for efficiency.
:param instance: The instance object.
:returns: True if the instance exists. False if not.
"""
try:
self._validate_instance_and_node(instance)
return True
except exception.InstanceNotFound:
return False
def _get_node_list(self, **kwargs):
"""Helper function to return the list of nodes.
If unable to connect ironic server, an empty list is returned.
:returns: a list of raw node from ironic
:raises: VirtDriverNotReady
"""
node_list = []
try:
node_list = self.ironicclient.call("node.list", **kwargs)
except exception.NovaException as e:
LOG.error("Failed to get the list of nodes from the Ironic "
"inventory. Error: %s", e)
raise exception.VirtDriverNotReady()
except Exception as e:
LOG.error("An unknown error has occurred when trying to get the "
"list of nodes from the Ironic inventory. Error: %s", e)
raise exception.VirtDriverNotReady()
return node_list
def list_instances(self):
"""Return the names of all the instances provisioned.
:returns: a list of instance names.
:raises: VirtDriverNotReady
"""
# NOTE(lucasagomes): limit == 0 is an indicator to continue
# pagination until there're no more values to be returned.
node_list = self._get_node_list(associated=True,
fields=['instance_uuid'], limit=0)
context = nova_context.get_admin_context()
return [objects.Instance.get_by_uuid(context,
i.instance_uuid).name
for i in node_list]
def list_instance_uuids(self):
"""Return the UUIDs of all the instances provisioned.
:returns: a list of instance UUIDs.
:raises: VirtDriverNotReady
"""
# NOTE(lucasagomes): limit == 0 is an indicator to continue
# pagination until there're no more values to be returned.
node_list = self._get_node_list(associated=True,
fields=['instance_uuid'], limit=0)
return list(n.instance_uuid for n in node_list)
def node_is_available(self, nodename):
"""Confirms a Nova hypervisor node exists in the Ironic inventory.
:param nodename: The UUID of the node. Parameter is called nodename
even though it is a UUID to keep method signature
the same as inherited class.
:returns: True if the node exists, False if not.
"""
# NOTE(comstud): We can cheat and use caching here. This method
# just needs to return True for nodes that exist. It doesn't
# matter if the data is stale. Sure, it's possible that removing
# node from Ironic will cause this method to return True until
# the next call to 'get_available_nodes', but there shouldn't
# be much harm. There's already somewhat of a race.
if not self.node_cache:
# Empty cache, try to populate it.
self._refresh_cache()
# nodename is the ironic node's UUID.
if nodename in self.node_cache:
return True
# NOTE(comstud): Fallback and check Ironic. This case should be
# rare.
try:
# nodename is the ironic node's UUID.
self._get_node(nodename)
return True
except ironic.exc.NotFound:
return False
def _refresh_hash_ring(self, ctxt):
service_list = objects.ServiceList.get_all_computes_by_hv_type(
ctxt, self._get_hypervisor_type())
services = set()
for svc in service_list:
is_up = self.servicegroup_api.service_is_up(svc)
if is_up:
services.add(svc.host)
# NOTE(jroll): always make sure this service is in the list, because
# only services that have something registered in the compute_nodes
# table will be here so far, and we might be brand new.
services.add(CONF.host)
self.hash_ring = hash_ring.HashRing(services,
partitions=_HASH_RING_PARTITIONS)
def _refresh_cache(self):
# NOTE(lucasagomes): limit == 0 is an indicator to continue
# pagination until there're no more values to be returned.
ctxt = nova_context.get_admin_context()
self._refresh_hash_ring(ctxt)
instances = objects.InstanceList.get_uuids_by_host(ctxt, CONF.host)
node_cache = {}
for node in self._get_node_list(fields=_NODE_FIELDS, limit=0):
# NOTE(jroll): we always manage the nodes for instances we manage
if node.instance_uuid in instances:
node_cache[node.uuid] = node
# NOTE(jroll): check if the node matches us in the hash ring, and
# does not have an instance_uuid (which would imply the node has
# an instance managed by another compute service).
# Note that this means nodes with an instance that was deleted in
# nova while the service was down, and not yet reaped, will not be
# reported until the periodic task cleans it up.
elif (node.instance_uuid is None and
CONF.host in
self.hash_ring.get_nodes(node.uuid.encode('utf-8'))):
node_cache[node.uuid] = node
self.node_cache = node_cache
self.node_cache_time = time.time()
# For Pike, we need to ensure that all instances have their flavor
# migrated to include the resource_class. Since there could be many,
# many instances controlled by this host, spawn this asynchronously so
# as not to block this service.
node_uuids = [node.uuid for node in self.node_cache.values()
if node.instance_uuid and
node.instance_uuid not in self._migrated_instance_uuids]
if node_uuids:
# No need to run unless something has changed
utils.spawn_n(self._pike_flavor_migration, node_uuids)
def get_available_nodes(self, refresh=False):
"""Returns the UUIDs of Ironic nodes managed by this compute service.
We use consistent hashing to distribute Ironic nodes between all
available compute services. The subset of nodes managed by a given
compute service is determined by the following rules:
* any node with an instance managed by the compute service
* any node that is mapped to the compute service on the hash ring
* no nodes with instances managed by another compute service
The ring is rebalanced as nova-compute services are brought up and
down. Note that this rebalance does not happen at the same time for
all compute services, so a node may be managed by multiple compute
services for a small amount of time.
:param refresh: Boolean value; If True run update first. Ignored by
this driver.
:returns: a list of UUIDs
"""
# NOTE(jroll) we refresh the cache every time this is called
# because it needs to happen in the resource tracker
# periodic task. This task doesn't pass refresh=True,
# unfortunately.
self._refresh_cache()
node_uuids = list(self.node_cache.keys())
LOG.debug("Returning %(num_nodes)s available node(s)",
dict(num_nodes=len(node_uuids)))
return node_uuids
def update_provider_tree(self, provider_tree, nodename, allocations=None):
"""Update a ProviderTree object with current resource provider and
inventory information.
:param nova.compute.provider_tree.ProviderTree provider_tree:
A nova.compute.provider_tree.ProviderTree object representing all
the providers in the tree associated with the compute node, and any
sharing providers (those with the ``MISC_SHARES_VIA_AGGREGATE``
trait) associated via aggregate with any of those providers (but
not *their* tree- or aggregate-associated providers), as currently
known by placement.
:param nodename:
String name of the compute node (i.e.
ComputeNode.hypervisor_hostname) for which the caller is requesting
updated provider information.
:param allocations:
Dict of allocation data of the form:
{ $CONSUMER_UUID: {
# The shape of each "allocations" dict below is identical
# to the return from GET /allocations/{consumer_uuid}
"allocations": {
$RP_UUID: {
"generation": $RP_GEN,
"resources": {
$RESOURCE_CLASS: $AMOUNT,
...
},
},
...
},
"project_id": $PROJ_ID,
"user_id": $USER_ID,
"consumer_generation": $CONSUMER_GEN,
},
...
}
If None, and the method determines that any inventory needs to be
moved (from one provider to another and/or to a different resource
class), the ReshapeNeeded exception must be raised. Otherwise, this
dict must be edited in place to indicate the desired final state of
allocations.
:raises ReshapeNeeded: If allocations is None and any inventory needs
to be moved from one provider to another and/or to a different
resource class.
"""
# nodename is the ironic node's UUID.
node = self._node_from_cache(nodename)
reserved = False
if (not self._node_resources_used(node) and
self._node_resources_unavailable(node)):
LOG.debug('Node %(node)s is not ready for a deployment, '
'reporting resources as reserved for it. Node\'s '
'provision state is %(prov)s, power state is '
'%(power)s and maintenance is %(maint)s.',
{'node': node.uuid, 'prov': node.provision_state,
'power': node.power_state, 'maint': node.maintenance})
reserved = True
info = self._node_resource(node)
result = {}
rc_name = info.get('resource_class')
if rc_name is None:
raise exception.NoResourceClass(node=nodename)
norm_name = rc_fields.ResourceClass.normalize_name(rc_name)
if norm_name is not None:
result[norm_name] = {
'total': 1,
'reserved': int(reserved),
'min_unit': 1,
'max_unit': 1,
'step_size': 1,
'allocation_ratio': 1.0,
}
provider_tree.update_inventory(nodename, result)
# TODO(efried): *Unset* (remove_traits) if "owned" by ironic virt but
# not set on the node object, and *set* (add_traits) only those both
# owned by ironic virt and set on the node object.
provider_tree.update_traits(nodename, node.traits)
def get_available_resource(self, nodename):
"""Retrieve resource information.
This method is called when nova-compute launches, and
as part of a periodic task that records the results in the DB.
:param nodename: the UUID of the node.
:returns: a dictionary describing resources.
"""
# NOTE(comstud): We can cheat and use caching here. This method is
# only called from a periodic task and right after the above
# get_available_nodes() call is called.
if not self.node_cache:
# Well, it's also called from init_host(), so if we have empty
# cache, let's try to populate it.
self._refresh_cache()
# nodename is the ironic node's UUID.
node = self._node_from_cache(nodename)
return self._node_resource(node)
def _node_from_cache(self, node_uuid):
"""Returns a node from the cache, retrieving the node from Ironic API
if the node doesn't yet exist in the cache.
"""
# NOTE(vdrok): node_cache might also be modified during instance
# _unprovision call, hence this function is synchronized
@utils.synchronized('ironic-node-%s' % node_uuid)
def _sync_node_from_cache():
cache_age = time.time() - self.node_cache_time
if node_uuid in self.node_cache:
LOG.debug("Using cache for node %(node)s, age: %(age)s",
{'node': node_uuid, 'age': cache_age})
return self.node_cache[node_uuid]
else:
LOG.debug("Node %(node)s not found in cache, age: %(age)s",
{'node': node_uuid, 'age': cache_age})
node = self._get_node(node_uuid)
self.node_cache[node_uuid] = node
return node
return _sync_node_from_cache()
def get_info(self, instance):
"""Get the current state and resource usage for this instance.
If the instance is not found this method returns (a dictionary
with) NOSTATE and all resources == 0.
:param instance: the instance object.
:returns: an InstanceInfo object
"""
# we should already have a cache for our nodes, refreshed on every
# RT loop. but if we don't have a cache, generate it.
if not self.node_cache:
self._refresh_cache()
for node in self.node_cache.values():
if instance.uuid == node.instance_uuid:
break
else:
# if we can't find the instance, fall back to ironic
try:
node = self._validate_instance_and_node(instance)
except exception.InstanceNotFound:
return hardware.InstanceInfo(
state=map_power_state(ironic_states.NOSTATE))
return hardware.InstanceInfo(state=map_power_state(node.power_state))
def deallocate_networks_on_reschedule(self, instance):
"""Does the driver want networks deallocated on reschedule?
:param instance: the instance object.
:returns: Boolean value. If True deallocate networks on reschedule.
"""
return True
def _get_network_metadata(self, node, network_info):
"""Gets a more complete representation of the instance network info.
This data is exposed as network_data.json in the metadata service and
the config drive.
:param node: The node object.
:param network_info: Instance network information.
"""
base_metadata = netutils.get_network_metadata(network_info)
# TODO(vdrok): change to doing a single "detailed vif list" call,
# when added to ironic API, response to that will contain all
# necessary information. Then we will be able to avoid looking at
# internal_info/extra fields.
ports = self.ironicclient.call("node.list_ports",
node.uuid, detail=True)
portgroups = self.ironicclient.call("portgroup.list", node=node.uuid,
detail=True)
vif_id_to_objects = {'ports': {}, 'portgroups': {}}
for collection, name in ((ports, 'ports'), (portgroups, 'portgroups')):
for p in collection:
vif_id = (p.internal_info.get('tenant_vif_port_id') or
p.extra.get('vif_port_id'))
if vif_id:
vif_id_to_objects[name][vif_id] = p
additional_links = []
for link in base_metadata['links']:
vif_id = link['vif_id']
if vif_id in vif_id_to_objects['portgroups']:
pg = vif_id_to_objects['portgroups'][vif_id]
pg_ports = [p for p in ports if p.portgroup_uuid == pg.uuid]
link.update({'type': 'bond', 'bond_mode': pg.mode,
'bond_links': []})
# If address is set on the portgroup, an (ironic) vif-attach
# call has already updated neutron with the port address;
# reflect it here. Otherwise, an address generated by neutron
# will be used instead (code is elsewhere to handle this case).
if pg.address:
link.update({'ethernet_mac_address': pg.address})
for prop in pg.properties:
# These properties are the bonding driver options described
# at https://www.kernel.org/doc/Documentation/networking/bonding.txt # noqa
# cloud-init checks the same way, parameter name has to
# start with bond
key = prop if prop.startswith('bond') else 'bond_%s' % prop
link[key] = pg.properties[prop]
for port in pg_ports:
# This won't cause any duplicates to be added. A port
# cannot be in more than one port group for the same
# node.
additional_links.append({
'id': port.uuid,
'type': 'phy', 'ethernet_mac_address': port.address,
})
link['bond_links'].append(port.uuid)
elif vif_id in vif_id_to_objects['ports']:
p = vif_id_to_objects['ports'][vif_id]
# Ironic updates neutron port's address during attachment
link.update({'ethernet_mac_address': p.address,
'type': 'phy'})
base_metadata['links'].extend(additional_links)
return base_metadata
def _generate_configdrive(self, context, instance, node, network_info,
extra_md=None, files=None):
"""Generate a config drive.
:param instance: The instance object.
:param node: The node object.
:param network_info: Instance network information.
:param extra_md: Optional, extra metadata to be added to the
configdrive.
:param files: Optional, a list of paths to files to be added to
the configdrive.
"""
if not extra_md:
extra_md = {}
i_meta = instance_metadata.InstanceMetadata(instance,
content=files, extra_md=extra_md, network_info=network_info,
network_metadata=self._get_network_metadata(node, network_info),
request_context=context)
with tempfile.NamedTemporaryFile() as uncompressed:
with configdrive.ConfigDriveBuilder(instance_md=i_meta) as cdb:
cdb.make_drive(uncompressed.name)
with tempfile.NamedTemporaryFile() as compressed:
# compress config drive
with gzip.GzipFile(fileobj=compressed, mode='wb') as gzipped:
uncompressed.seek(0)
shutil.copyfileobj(uncompressed, gzipped)
# base64 encode config drive
compressed.seek(0)
return base64.b64encode(compressed.read())
def spawn(self, context, instance, image_meta, injected_files,
admin_password, allocations, network_info=None,
block_device_info=None):
"""Deploy an instance.
:param context: The security context.
:param instance: The instance object.
:param image_meta: Image dict returned by nova.image.glance
that defines the image from which to boot this instance.
:param injected_files: User files to inject into instance.
:param admin_password: Administrator password to set in
instance.
:param allocations: Information about resources allocated to the
instance via placement, of the form returned by
SchedulerReportClient.get_allocations_for_consumer.
Ignored by this driver.
:param network_info: Instance network information.
:param block_device_info: Instance block device
information.
"""
LOG.debug('Spawn called for instance', instance=instance)
# The compute manager is meant to know the node uuid, so missing uuid
# is a significant issue. It may mean we've been passed the wrong data.
node_uuid = instance.get('node')
if not node_uuid:
raise ironic.exc.BadRequest(
_("Ironic node uuid not supplied to "
"driver for instance %s.") % instance.uuid)
node = self._get_node(node_uuid)
flavor = instance.flavor
self._add_instance_info_to_node(node, instance, image_meta, flavor,
block_device_info=block_device_info)
try:
self._add_volume_target_info(context, instance, block_device_info)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error("Error preparing deploy for instance "
"on baremetal node %(node)s.",
{'node': node_uuid},
instance=instance)
self._cleanup_deploy(node, instance, network_info)
# NOTE(Shrews): The default ephemeral device needs to be set for
# services (like cloud-init) that depend on it being returned by the
# metadata server. Addresses bug https://launchpad.net/bugs/1324286.
if flavor.ephemeral_gb:
instance.default_ephemeral_device = '/dev/sda1'
instance.save()
# validate we are ready to do the deploy
validate_chk = self.ironicclient.call("node.validate", node_uuid)
if (not validate_chk.deploy.get('result')
or not validate_chk.power.get('result')
or not validate_chk.storage.get('result')):
# something is wrong. undo what we have done
self._cleanup_deploy(node, instance, network_info)
raise exception.ValidationError(_(
"Ironic node: %(id)s failed to validate."
" (deploy: %(deploy)s, power: %(power)s,"
" storage: %(storage)s)")
% {'id': node.uuid,
'deploy': validate_chk.deploy,
'power': validate_chk.power,
'storage': validate_chk.storage})
# prepare for the deploy
try:
self._start_firewall(instance, network_info)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error("Error preparing deploy for instance "
"%(instance)s on baremetal node %(node)s.",
{'instance': instance.uuid,
'node': node_uuid})
self._cleanup_deploy(node, instance, network_info)
# Config drive
configdrive_value = None
if configdrive.required_by(instance):
extra_md = {}
if admin_password:
extra_md['admin_pass'] = admin_password
try:
configdrive_value = self._generate_configdrive(
context, instance, node, network_info, extra_md=extra_md,
files=injected_files)
except Exception as e:
with excutils.save_and_reraise_exception():
msg = ("Failed to build configdrive: %s" %
six.text_type(e))
LOG.error(msg, instance=instance)
self._cleanup_deploy(node, instance, network_info)
LOG.info("Config drive for instance %(instance)s on "
"baremetal node %(node)s created.",
{'instance': instance['uuid'], 'node': node_uuid})
# trigger the node deploy
try:
self.ironicclient.call("node.set_provision_state", node_uuid,
ironic_states.ACTIVE,
configdrive=configdrive_value)
except Exception as e:
with excutils.save_and_reraise_exception():
LOG.error("Failed to request Ironic to provision instance "
"%(inst)s: %(reason)s",
{'inst': instance.uuid,
'reason': six.text_type(e)})
self._cleanup_deploy(node, instance, network_info)
timer = loopingcall.FixedIntervalLoopingCall(self._wait_for_active,
instance)
try:
timer.start(interval=CONF.ironic.api_retry_interval).wait()
LOG.info('Successfully provisioned Ironic node %s',
node.uuid, instance=instance)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error("Error deploying instance %(instance)s on "
"baremetal node %(node)s.",
{'instance': instance.uuid,
'node': node_uuid})
def _unprovision(self, instance, node):
"""This method is called from destroy() to unprovision
already provisioned node after required checks.
"""
try:
self.ironicclient.call("node.set_provision_state", node.uuid,
"deleted")
except Exception as e:
# if the node is already in a deprovisioned state, continue
# This should be fixed in Ironic.
# TODO(deva): This exception should be added to
# python-ironicclient and matched directly,
# rather than via __name__.
if getattr(e, '__name__', None) != 'InstanceDeployFailure':
raise
# using a dict because this is modified in the local method
data = {'tries': 0}
def _wait_for_provision_state():
try:
node = self._validate_instance_and_node(instance)
except exception.InstanceNotFound:
LOG.debug("Instance already removed from Ironic",
instance=instance)
raise loopingcall.LoopingCallDone()
if node.provision_state in (ironic_states.NOSTATE,
ironic_states.CLEANING,
ironic_states.CLEANWAIT,
ironic_states.CLEANFAIL,
ironic_states.AVAILABLE):
# From a user standpoint, the node is unprovisioned. If a node
# gets into CLEANFAIL state, it must be fixed in Ironic, but we
# can consider the instance unprovisioned.
LOG.debug("Ironic node %(node)s is in state %(state)s, "
"instance is now unprovisioned.",
dict(node=node.uuid, state=node.provision_state),
instance=instance)
raise loopingcall.LoopingCallDone()
if data['tries'] >= CONF.ironic.api_max_retries + 1:
msg = (_("Error destroying the instance on node %(node)s. "
"Provision state still '%(state)s'.")
% {'state': node.provision_state,
'node': node.uuid})
LOG.error(msg)
raise exception.NovaException(msg)
else:
data['tries'] += 1
_log_ironic_polling('unprovision', node, instance)
# wait for the state transition to finish
timer = loopingcall.FixedIntervalLoopingCall(_wait_for_provision_state)
timer.start(interval=CONF.ironic.api_retry_interval).wait()
# NOTE(vdrok): synchronize this function so that get_available_resource
# has up-to-date view of node_cache.
@utils.synchronized('ironic-node-%s' % node.uuid)
def _sync_remove_cache_entry():
# NOTE(vdrok): Force the cache update, so that
# update_usages resource tracker call that will happen next
# has the up-to-date node view.
self.node_cache.pop(node.uuid, None)
LOG.debug('Removed node %(uuid)s from node cache.',
{'uuid': node.uuid})
_sync_remove_cache_entry()
def destroy(self, context, instance, network_info,
block_device_info=None, destroy_disks=True):
"""Destroy the specified instance, if it can be found.
:param context: The security context.
:param instance: The instance object.
:param network_info: Instance network information.
:param block_device_info: Instance block device
information. Ignored by this driver.
:param destroy_disks: Indicates if disks should be
destroyed. Ignored by this driver.
"""
LOG.debug('Destroy called for instance', instance=instance)
try:
node = self._validate_instance_and_node(instance)
except exception.InstanceNotFound:
LOG.warning("Destroy called on non-existing instance %s.",
instance.uuid)
# NOTE(deva): if nova.compute.ComputeManager._delete_instance()
# is called on a non-existing instance, the only way
# to delete it is to return from this method
# without raising any exceptions.
return
try:
if node.provision_state in _UNPROVISION_STATES:
self._unprovision(instance, node)
else:
# NOTE(hshiina): if spawn() fails before ironic starts
# provisioning, instance information should be
# removed from ironic node.
self._remove_instance_info_from_node(node, instance)
finally:
self._cleanup_deploy(node, instance, network_info)
LOG.info('Successfully unprovisioned Ironic node %s',
node.uuid, instance=instance)
def reboot(self, context, instance, network_info, reboot_type,
block_device_info=None, bad_volumes_callback=None):
"""Reboot the specified instance.
NOTE: Unlike the libvirt driver, this method does not delete
and recreate the instance; it preserves local state.
:param context: The security context.
:param instance: The instance object.
:param network_info: Instance network information. Ignored by
this driver.
:param reboot_type: Either a HARD or SOFT reboot.
:param block_device_info: Info pertaining to attached volumes.
Ignored by this driver.
:param bad_volumes_callback: Function to handle any bad volumes
encountered. Ignored by this driver.
"""
LOG.debug('Reboot(type %s) called for instance',
reboot_type, instance=instance)
node = self._validate_instance_and_node(instance)
hard = True
if reboot_type == 'SOFT':
try:
self.ironicclient.call("node.set_power_state", node.uuid,
'reboot', soft=True)
hard = False
except ironic.exc.BadRequest as exc:
LOG.info('Soft reboot is not supported by ironic hardware '
'driver. Falling back to hard reboot: %s',
exc,
instance=instance)
if hard:
self.ironicclient.call("node.set_power_state", node.uuid, 'reboot')
timer = loopingcall.FixedIntervalLoopingCall(
self._wait_for_power_state, instance, 'reboot')
timer.start(interval=CONF.ironic.api_retry_interval).wait()
LOG.info('Successfully rebooted(type %(type)s) Ironic node %(node)s',
{'type': ('HARD' if hard else 'SOFT'),
'node': node.uuid},
instance=instance)
def power_off(self, instance, timeout=0, retry_interval=0):
"""Power off the specified instance.
NOTE: Unlike the libvirt driver, this method does not delete
and recreate the instance; it preserves local state.
:param instance: The instance object.
:param timeout: time to wait for node to shutdown. If it is set,
soft power off is attempted before hard power off.
:param retry_interval: How often to signal node while waiting
for it to shutdown. Ignored by this driver. Retrying depends on
Ironic hardware driver.
"""
LOG.debug('Power off called for instance', instance=instance)
node = self._validate_instance_and_node(instance)
if timeout:
try:
self.ironicclient.call("node.set_power_state", node.uuid,
'off', soft=True, timeout=timeout)
timer = loopingcall.FixedIntervalLoopingCall(
self._wait_for_power_state, instance, 'soft power off')
timer.start(interval=CONF.ironic.api_retry_interval).wait()
node = self._validate_instance_and_node(instance)
if node.power_state == ironic_states.POWER_OFF:
LOG.info('Successfully soft powered off Ironic node %s',
node.uuid, instance=instance)
return
LOG.info("Failed to soft power off instance "
"%(instance)s on baremetal node %(node)s "
"within the required timeout %(timeout)d "
"seconds due to error: %(reason)s. "
"Attempting hard power off.",
{'instance': instance.uuid,
'timeout': timeout,
'node': node.uuid,
'reason': node.last_error},
instance=instance)
except ironic.exc.ClientException as e:
LOG.info("Failed to soft power off instance "
"%(instance)s on baremetal node %(node)s "
"due to error: %(reason)s. "
"Attempting hard power off.",
{'instance': instance.uuid,
'node': node.uuid,
'reason': e},
instance=instance)
self.ironicclient.call("node.set_power_state", node.uuid, 'off')
timer = loopingcall.FixedIntervalLoopingCall(
self._wait_for_power_state, instance, 'power off')
timer.start(interval=CONF.ironic.api_retry_interval).wait()
LOG.info('Successfully hard powered off Ironic node %s',
node.uuid, instance=instance)
def power_on(self, context, instance, network_info,
block_device_info=None):
"""Power on the specified instance.
NOTE: Unlike the libvirt driver, this method does not delete
and recreate the instance; it preserves local state.
:param context: The security context.
:param instance: The instance object.
:param network_info: Instance network information. Ignored by
this driver.
:param block_device_info: Instance block device
information. Ignored by this driver.
"""
LOG.debug('Power on called for instance', instance=instance)
node = self._validate_instance_and_node(instance)
self.ironicclient.call("node.set_power_state", node.uuid, 'on')
timer = loopingcall.FixedIntervalLoopingCall(
self._wait_for_power_state, instance, 'power on')
timer.start(interval=CONF.ironic.api_retry_interval).wait()
LOG.info('Successfully powered on Ironic node %s',
node.uuid, instance=instance)
def trigger_crash_dump(self, instance):
"""Trigger crash dump mechanism on the given instance.
Stalling instances can be triggered to dump the crash data. How the
guest OS reacts in details, depends on the configuration of it.
:param instance: The instance where the crash dump should be triggered.
:return: None
"""
LOG.debug('Trigger crash dump called for instance', instance=instance)
node = self._validate_instance_and_node(instance)
self.ironicclient.call("node.inject_nmi", node.uuid)
LOG.info('Successfully triggered crash dump into Ironic node %s',
node.uuid, instance=instance)
def refresh_security_group_rules(self, security_group_id):
"""Refresh security group rules from data store.
Invoked when security group rules are updated.
:param security_group_id: The security group id.
"""
self.firewall_driver.refresh_security_group_rules(security_group_id)
def refresh_instance_security_rules(self, instance):
"""Refresh security group rules from data store.
Gets called when an instance gets added to or removed from
the security group the instance is a member of or if the
group gains or loses a rule.
:param instance: The instance object.
"""
self.firewall_driver.refresh_instance_security_rules(instance)
def ensure_filtering_rules_for_instance(self, instance, network_info):
"""Set up filtering rules.
:param instance: The instance object.
:param network_info: Instance network information.
"""
self.firewall_driver.setup_basic_filtering(instance, network_info)
self.firewall_driver.prepare_instance_filter(instance, network_info)
def unfilter_instance(self, instance, network_info):
"""Stop filtering instance.
:param instance: The instance object.
:param network_info: Instance network information.
"""
self.firewall_driver.unfilter_instance(instance, network_info)
def _plug_vif(self, node, port_id):
last_attempt = 5
for attempt in range(0, last_attempt + 1):
try:
self.ironicclient.call("node.vif_attach", node.uuid,
port_id, retry_on_conflict=False)
except ironic.exc.BadRequest as e:
# NOTE(danms): If we race with ironic startup, there
# will be no ironic-conductor running, which will
# give us a failure to do this plug operation. So,
# be graceful in that case and wait/retry.
# NOTE(mdbooth): This will be fixed in ironic by
# change I2c21baae. This will ensure ironic returns a 503 here,
# which will cause ironicclient to automatically retry for us.
# We can remove this workaround once we are confident that we
# are only running against ironic containing this fix.
if ('No conductor' in six.text_type(e) and
attempt < last_attempt):
LOG.warning('No ironic conductor is running; '
'waiting...')
time.sleep(10)
continue
msg = (_("Cannot attach VIF %(vif)s to the node %(node)s "
"due to error: %(err)s") % {
'vif': port_id,
'node': node.uuid, 'err': e})
LOG.error(msg)
raise exception.VirtualInterfacePlugException(msg)
except ironic.exc.Conflict:
# NOTE (vsaienko) Return since the VIF is already attached.
return
# Success, so don't retry
return
def _plug_vifs(self, node, instance, network_info):
# NOTE(PhilDay): Accessing network_info will block if the thread
# it wraps hasn't finished, so do this ahead of time so that we
# don't block while holding the logging lock.
network_info_str = str(network_info)
LOG.debug("plug: instance_uuid=%(uuid)s vif=%(network_info)s",
{'uuid': instance.uuid,
'network_info': network_info_str})
for vif in network_info:
port_id = six.text_type(vif['id'])
self._plug_vif(node, port_id)
def _unplug_vifs(self, node, instance, network_info):
# NOTE(PhilDay): Accessing network_info will block if the thread
# it wraps hasn't finished, so do this ahead of time so that we
# don't block while holding the logging lock.
network_info_str = str(network_info)
LOG.debug("unplug: instance_uuid=%(uuid)s vif=%(network_info)s",
{'uuid': instance.uuid,
'network_info': network_info_str})
if not network_info:
return
for vif in network_info:
port_id = six.text_type(vif['id'])
try:
self.ironicclient.call("node.vif_detach", node.uuid,
port_id)
except ironic.exc.BadRequest:
LOG.debug("VIF %(vif)s isn't attached to Ironic node %(node)s",
{'vif': port_id, 'node': node.uuid})
def plug_vifs(self, instance, network_info):
"""Plug VIFs into networks.
:param instance: The instance object.
:param network_info: Instance network information.
"""
# instance.node is the ironic node's UUID.
node = self._get_node(instance.node)
self._plug_vifs(node, instance, network_info)
def unplug_vifs(self, instance, network_info):
"""Unplug VIFs from networks.
:param instance: The instance object.
:param network_info: Instance network information.
"""
# instance.node is the ironic node's UUID.
node = self._get_node(instance.node)
self._unplug_vifs(node, instance, network_info)
def attach_interface(self, context, instance, image_meta, vif):
"""Use hotplug to add a network interface to a running instance.
The counter action to this is :func:`detach_interface`.
:param context: The request context.
:param nova.objects.instance.Instance instance:
The instance which will get an additional network interface.
:param nova.objects.ImageMeta image_meta:
The metadata of the image of the instance.
:param nova.network.model.VIF vif:
The object which has the information about the interface to attach.
:raise nova.exception.NovaException: If the attach fails.
:returns: None
"""
# NOTE(vdrok): instance info cache gets updated by the network-changed
# event from neutron or by _heal_instance_info_cache periodic task. In
# both cases, this is done asynchronously, so the cache may not be up
# to date immediately after attachment.
self.plug_vifs(instance, [vif])
def detach_interface(self, context, instance, vif):
"""Use hotunplug to remove a network interface from a running instance.
The counter action to this is :func:`attach_interface`.
:param context: The request context.
:param nova.objects.instance.Instance instance:
The instance which gets a network interface removed.
:param nova.network.model.VIF vif:
The object which has the information about the interface to detach.
:raise nova.exception.NovaException: If the detach fails.
:returns: None
"""
# NOTE(vdrok): instance info cache gets updated by the network-changed
# event from neutron or by _heal_instance_info_cache periodic task. In
# both cases, this is done asynchronously, so the cache may not be up
# to date immediately after detachment.
self.unplug_vifs(instance, [vif])
def rebuild(self, context, instance, image_meta, injected_files,
admin_password, allocations, bdms, detach_block_devices,
attach_block_devices, network_info=None,
evacuate=False, block_device_info=None,
preserve_ephemeral=False):
"""Rebuild/redeploy an instance.
This version of rebuild() allows for supporting the option to
preserve the ephemeral partition. We cannot call spawn() from
here because it will attempt to set the instance_uuid value
again, which is not allowed by the Ironic API. It also requires
the instance to not have an 'active' provision state, but we
cannot safely change that. Given that, we implement only the
portions of spawn() we need within rebuild().
:param context: The security context.
:param instance: The instance object.
:param image_meta: Image object returned by nova.image.glance
that defines the image from which to boot this instance. Ignored
by this driver.
:param injected_files: User files to inject into instance.
:param admin_password: Administrator password to set in
instance. Ignored by this driver.
:param allocations: Information about resources allocated to the
instance via placement, of the form returned by
SchedulerReportClient.get_allocations_for_consumer.
Ignored by this driver.
:param bdms: block-device-mappings to use for rebuild. Ignored
by this driver.
:param detach_block_devices: function to detach block devices. See
nova.compute.manager.ComputeManager:_rebuild_default_impl for
usage. Ignored by this driver.
:param attach_block_devices: function to attach block devices. See
nova.compute.manager.ComputeManager:_rebuild_default_impl for
usage. Ignored by this driver.
:param network_info: Instance network information. Ignored by
this driver.
:param evacuate: Boolean value; if True the instance is
recreated on a new hypervisor - all the cleanup of old state is
skipped. Ignored by this driver.
:param block_device_info: Instance block device
information. Ignored by this driver.
:param preserve_ephemeral: Boolean value; if True the ephemeral
must be preserved on rebuild.
"""
LOG.debug('Rebuild called for instance', instance=instance)
instance.task_state = task_states.REBUILD_SPAWNING
instance.save(expected_task_state=[task_states.REBUILDING])
node_uuid = instance.node
node = self._get_node(node_uuid)
self._add_instance_info_to_node(node, instance, image_meta,
instance.flavor, preserve_ephemeral)
# Config drive
configdrive_value = None
if configdrive.required_by(instance):
extra_md = {}
if admin_password:
extra_md['admin_pass'] = admin_password
try:
configdrive_value = self._generate_configdrive(
context, instance, node, network_info, extra_md=extra_md,
files=injected_files)
except Exception as e:
with excutils.save_and_reraise_exception():
msg = ("Failed to build configdrive: %s" %
six.text_type(e))
LOG.error(msg, instance=instance)
raise exception.InstanceDeployFailure(msg)
LOG.info("Config drive for instance %(instance)s on "
"baremetal node %(node)s created.",
{'instance': instance['uuid'], 'node': node_uuid})
# Trigger the node rebuild/redeploy.
try:
self.ironicclient.call("node.set_provision_state",
node_uuid, ironic_states.REBUILD,
configdrive=configdrive_value)
except (exception.NovaException, # Retry failed
ironic.exc.InternalServerError, # Validations
ironic.exc.BadRequest) as e: # Maintenance
msg = (_("Failed to request Ironic to rebuild instance "
"%(inst)s: %(reason)s") % {'inst': instance.uuid,
'reason': six.text_type(e)})
raise exception.InstanceDeployFailure(msg)
# Although the target provision state is REBUILD, it will actually go
# to ACTIVE once the redeploy is finished.
timer = loopingcall.FixedIntervalLoopingCall(self._wait_for_active,
instance)
timer.start(interval=CONF.ironic.api_retry_interval).wait()
LOG.info('Instance was successfully rebuilt', instance=instance)
def network_binding_host_id(self, context, instance):
"""Get host ID to associate with network ports.
This defines the binding:host_id parameter to the port-create calls for
Neutron. If using the neutron network interface (separate networks for
the control plane and tenants), return None here to indicate that the
port should not yet be bound; Ironic will make a port-update call to
Neutron later to tell Neutron to bind the port.
NOTE: the late binding is important for security. If an ML2 mechanism
manages to connect the tenant network to the baremetal machine before
deployment is done (e.g. port-create time), then the tenant potentially
has access to the deploy agent, which may contain firmware blobs or
secrets. ML2 mechanisms may be able to connect the port without the
switchport info that comes from ironic, if they store that switchport
info for some reason. As such, we should *never* pass binding:host_id
in the port-create call when using the 'neutron' network_interface,
because a null binding:host_id indicates to Neutron that it should
not connect the port yet.
:param context: request context
:param instance: nova.objects.instance.Instance that the network
ports will be associated with
:returns: None
"""
# NOTE(vsaienko) Ironic will set binding:host_id later with port-update
# call when updating mac address or setting binding:profile
# to tell Neutron to bind the port.
return None
def _get_node_console_with_reset(self, instance):
"""Acquire console information for an instance.
If the console is enabled, the console will be re-enabled
before returning.
:param instance: nova instance
:return: a dictionary with below values
{ 'node': ironic node
'console_info': node console info }
:raise ConsoleNotAvailable: if console is unavailable
for the instance
"""
node = self._validate_instance_and_node(instance)
node_uuid = node.uuid
def _get_console():
"""Request ironicclient to acquire node console."""
try:
return self.ironicclient.call('node.get_console', node_uuid)
except (exception.NovaException, # Retry failed
ironic.exc.InternalServerError, # Validations
ironic.exc.BadRequest) as e: # Maintenance
LOG.error('Failed to acquire console information for '
'instance %(inst)s: %(reason)s',
{'inst': instance.uuid, 'reason': e})
raise exception.ConsoleNotAvailable()
def _wait_state(state):
"""Wait for the expected console mode to be set on node."""
console = _get_console()
if console['console_enabled'] == state:
raise loopingcall.LoopingCallDone(retvalue=console)
_log_ironic_polling('set console mode', node, instance)
# Return False to start backing off
return False
def _enable_console(mode):
"""Request ironicclient to enable/disable node console."""
try:
self.ironicclient.call('node.set_console_mode', node_uuid,
mode)
except (exception.NovaException, # Retry failed
ironic.exc.InternalServerError, # Validations
ironic.exc.BadRequest) as e: # Maintenance
LOG.error('Failed to set console mode to "%(mode)s" '
'for instance %(inst)s: %(reason)s',
{'mode': mode,
'inst': instance.uuid,
'reason': e})
raise exception.ConsoleNotAvailable()
# Waiting for the console state to change (disabled/enabled)
try:
timer = loopingcall.BackOffLoopingCall(_wait_state, state=mode)
return timer.start(
starting_interval=_CONSOLE_STATE_CHECKING_INTERVAL,
timeout=CONF.ironic.serial_console_state_timeout,
jitter=0.5).wait()
except loopingcall.LoopingCallTimeOut:
LOG.error('Timeout while waiting for console mode to be '
'set to "%(mode)s" on node %(node)s',
{'mode': mode,
'node': node_uuid})
raise exception.ConsoleNotAvailable()
# Acquire the console
console = _get_console()
# NOTE: Resetting console is a workaround to force acquiring
# console when it has already been acquired by another user/operator.
# IPMI serial console does not support multi session, so
# resetting console will deactivate any active one without
# warning the operator.
if console['console_enabled']:
try:
# Disable console
_enable_console(False)
# Then re-enable it
console = _enable_console(True)
except exception.ConsoleNotAvailable:
# NOTE: We try to do recover on failure.
# But if recover fails, the console may remain in
# "disabled" state and cause any new connection
# will be refused.
console = _enable_console(True)
if console['console_enabled']:
return {'node': node,
'console_info': console['console_info']}
else:
LOG.debug('Console is disabled for instance %s',
instance.uuid)
raise exception.ConsoleNotAvailable()
def get_serial_console(self, context, instance):
"""Acquire serial console information.
:param context: request context
:param instance: nova instance
:return: ConsoleSerial object
:raise ConsoleTypeUnavailable: if serial console is unavailable
for the instance
"""
LOG.debug('Getting serial console', instance=instance)
try:
result = self._get_node_console_with_reset(instance)
except exception.ConsoleNotAvailable:
raise exception.ConsoleTypeUnavailable(console_type='serial')
node = result['node']
console_info = result['console_info']
if console_info["type"] != "socat":
LOG.warning('Console type "%(type)s" (of ironic node '
'%(node)s) does not support Nova serial console',
{'type': console_info["type"],
'node': node.uuid},
instance=instance)
raise exception.ConsoleTypeUnavailable(console_type='serial')
# Parse and check the console url
url = urlparse.urlparse(console_info["url"])
try:
scheme = url.scheme
hostname = url.hostname
port = url.port
if not (scheme and hostname and port):
raise AssertionError()
except (ValueError, AssertionError):
LOG.error('Invalid Socat console URL "%(url)s" '
'(ironic node %(node)s)',
{'url': console_info["url"],
'node': node.uuid},
instance=instance)
raise exception.ConsoleTypeUnavailable(console_type='serial')
if scheme == "tcp":
return console_type.ConsoleSerial(host=hostname,
port=port)
else:
LOG.warning('Socat serial console only supports "tcp". '
'This URL is "%(url)s" (ironic node %(node)s).',
{'url': console_info["url"],
'node': node.uuid},
instance=instance)
raise exception.ConsoleTypeUnavailable(console_type='serial')
@property
def need_legacy_block_device_info(self):
return False
def prepare_networks_before_block_device_mapping(self, instance,
network_info):
"""Prepare networks before the block devices are mapped to instance.
Plug VIFs before block device preparation. In case where storage
network is managed by neutron and a MAC address is specified as a
volume connector to a node, we can get the IP address assigned to
the connector. An IP address of volume connector may be required by
some volume backend drivers. For getting the IP address, VIFs need to
be plugged before block device preparation so that a VIF is assigned to
a MAC address.
"""
try:
self.plug_vifs(instance, network_info)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error("Error preparing deploy for instance "
"%(instance)s on baremetal node %(node)s.",
{'instance': instance.uuid,
'node': instance.node},
instance=instance)
def clean_networks_preparation(self, instance, network_info):
"""Clean networks preparation when block device mapping is failed.
Unplug VIFs when block device preparation is failed.
"""
try:
self.unplug_vifs(instance, network_info)
except Exception as e:
LOG.warning('Error detaching VIF from node %(node)s '
'after deploy failed; %(reason)s',
{'node': instance.node,
'reason': six.text_type(e)},
instance=instance)
def get_volume_connector(self, instance):
"""Get connector information for the instance for attaching to volumes.
Connector information is a dictionary representing the hardware
information that will be making the connection. This information
consists of properties for protocols supported by the hardware.
If the hardware supports iSCSI protocol, iSCSI initiator IQN is
included as follows::
{
'ip': ip,
'initiator': initiator,
'host': hostname
}
An IP address is set if a volume connector with type ip is assigned to
a node. An IP address is also set if a node has a volume connector with
type mac. An IP address is got from a VIF attached to an ironic port
or portgroup with the MAC address. Otherwise, an IP address of one
of VIFs is used.
:param instance: nova instance
:return: A connector information dictionary
"""
node = self.ironicclient.call("node.get", instance.node)
properties = self._parse_node_properties(node)
connectors = self.ironicclient.call("node.list_volume_connectors",
instance.node, detail=True)
values = {}
for conn in connectors:
values.setdefault(conn.type, []).append(conn.connector_id)
props = {}
ip = self._get_volume_connector_ip(instance, node, values)
if ip:
LOG.debug('Volume connector IP address for node %(node)s is '
'%(ip)s.',
{'node': node.uuid, 'ip': ip},
instance=instance)
props['ip'] = props['host'] = ip
if values.get('iqn'):
props['initiator'] = values['iqn'][0]
if values.get('wwpn'):
props['wwpns'] = values['wwpn']
if values.get('wwnn'):
props['wwnns'] = values['wwnn']
props['platform'] = properties.get('cpu_arch')
props['os_type'] = 'baremetal'
# NOTE(TheJulia): The host field is important to cinder connectors
# as it is used in some drivers for logging purposes, and we presently
# only otherwise set it when an IP address is used.
if 'host' not in props:
props['host'] = instance.hostname
# Eventually it would be nice to be able to do multipath, but for now
# we should at least set the value to False.
props['multipath'] = False
return props
def _get_volume_connector_ip(self, instance, node, values):
if values.get('ip'):
LOG.debug('Node %s has an IP address for volume connector',
node.uuid, instance=instance)
return values['ip'][0]
vif_id = self._get_vif_from_macs(node, values.get('mac', []), instance)
# retrieve VIF and get the IP address
nw_info = instance.get_network_info()
if vif_id:
fixed_ips = [ip for vif in nw_info if vif['id'] == vif_id
for ip in vif.fixed_ips()]
else:
fixed_ips = [ip for vif in nw_info for ip in vif.fixed_ips()]
fixed_ips_v4 = [ip for ip in fixed_ips if ip['version'] == 4]
if fixed_ips_v4:
return fixed_ips_v4[0]['address']
elif fixed_ips:
return fixed_ips[0]['address']
return None
def _get_vif_from_macs(self, node, macs, instance):
"""Get a VIF from specified MACs.
Retrieve ports and portgroups which have specified MAC addresses and
return a UUID of a VIF attached to a port or a portgroup found first.
:param node: The node object.
:param mac: A list of MAC addresses of volume connectors.
:param instance: nova instance, used for logging.
:return: A UUID of a VIF assigned to one of the MAC addresses.
"""
for mac in macs:
for method in ['portgroup.list', 'port.list']:
ports = self.ironicclient.call(method,
node=node.uuid,
address=mac,
detail=True)
for p in ports:
vif_id = (p.internal_info.get('tenant_vif_port_id') or
p.extra.get('vif_port_id'))
if vif_id:
LOG.debug('VIF %(vif)s for volume connector is '
'retrieved with MAC %(mac)s of node '
'%(node)s',
{'vif': vif_id,
'mac': mac,
'node': node.uuid},
instance=instance)
return vif_id
return None
def _can_send_version(self, min_version=None, max_version=None):
"""Validate if the suppplied version is available in the API."""
# NOTE(TheJulia): This will effectively just be a pass if no
# version negotiation has occured, since there is no way for
# us to know without explicitly otherwise requesting that
# back-end negotiation occurs. This is a capability that is
# present in python-ironicclient, however it may not be needed
# in this case.
if self.ironicclient.is_api_version_negotiated:
current_api_version = self.ironicclient.current_api_version
if (min_version and
version.StrictVersion(current_api_version) <
version.StrictVersion(min_version)):
raise exception.IronicAPIVersionNotAvailable(
version=min_version)
if (max_version and
version.StrictVersion(current_api_version) >
version.StrictVersion(max_version)):
raise exception.IronicAPIVersionNotAvailable(
version=max_version)
def rescue(self, context, instance, network_info, image_meta,
rescue_password):
"""Rescue the specified instance.
:param nova.context.RequestContext context:
The context for the rescue.
:param nova.objects.instance.Instance instance:
The instance being rescued.
:param nova.network.model.NetworkInfo network_info:
Necessary network information for the rescue. Ignored by this
driver.
:param nova.objects.ImageMeta image_meta:
The metadata of the image of the instance. Ignored by this driver.
:param rescue_password: new root password to set for rescue.
:raise InstanceRescueFailure if rescue fails.
"""
LOG.debug('Rescue called for instance', instance=instance)
node_uuid = instance.node
def _wait_for_rescue():
try:
node = self._validate_instance_and_node(instance)
except exception.InstanceNotFound as e:
raise exception.InstanceRescueFailure(reason=six.text_type(e))
if node.provision_state == ironic_states.RESCUE:
raise loopingcall.LoopingCallDone()
if node.provision_state == ironic_states.RESCUEFAIL:
raise exception.InstanceRescueFailure(
reason=node.last_error)
try:
self._can_send_version(min_version='1.38')
self.ironicclient.call("node.set_provision_state",
node_uuid, ironic_states.RESCUE,
rescue_password=rescue_password)
except exception.IronicAPIVersionNotAvailable as e:
LOG.error('Required Ironic API version %(version)s is not '
'available for rescuing.',
version='1.38', instance=instance)
raise exception.InstanceRescueFailure(reason=six.text_type(e))
except Exception as e:
raise exception.InstanceRescueFailure(reason=six.text_type(e))
timer = loopingcall.FixedIntervalLoopingCall(_wait_for_rescue)
timer.start(interval=CONF.ironic.api_retry_interval).wait()
LOG.info('Successfully rescued Ironic node %(node)s',
{'node': node_uuid}, instance=instance)
def unrescue(self, instance, network_info):
"""Unrescue the specified instance.
:param instance: nova.objects.instance.Instance
:param nova.network.model.NetworkInfo network_info:
Necessary network information for the unrescue. Ignored by this
driver.
"""
LOG.debug('Unrescue called for instance', instance=instance)
node_uuid = instance.node
def _wait_for_unrescue():
try:
node = self._validate_instance_and_node(instance)
except exception.InstanceNotFound as e:
raise exception.InstanceUnRescueFailure(
reason=six.text_type(e))
if node.provision_state == ironic_states.ACTIVE:
raise loopingcall.LoopingCallDone()
if node.provision_state == ironic_states.UNRESCUEFAIL:
raise exception.InstanceUnRescueFailure(
reason=node.last_error)
try:
self._can_send_version(min_version='1.38')
self.ironicclient.call("node.set_provision_state",
node_uuid, ironic_states.UNRESCUE)
except exception.IronicAPIVersionNotAvailable as e:
LOG.error('Required Ironic API version %(version)s is not '
'available for unrescuing.',
version='1.38', instance=instance)
raise exception.InstanceUnRescueFailure(reason=six.text_type(e))
except Exception as e:
raise exception.InstanceUnRescueFailure(reason=six.text_type(e))
timer = loopingcall.FixedIntervalLoopingCall(_wait_for_unrescue)
timer.start(interval=CONF.ironic.api_retry_interval).wait()
LOG.info('Successfully unrescued Ironic node %(node)s',
{'node': node_uuid}, instance=instance)
| {
"content_hash": "8e28191f5e254d3e5ca97d29f2525153",
"timestamp": "",
"source": "github",
"line_count": 2092,
"max_line_length": 96,
"avg_line_length": 45.08699808795411,
"alnum_prop": 0.5771824176756218,
"repo_name": "mikalstill/nova",
"id": "4aecc864d8932d4fbe54aa76145fcfb344f423cf",
"size": "95010",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nova/virt/ironic/driver.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PHP",
"bytes": "3325"
},
{
"name": "Python",
"bytes": "22797282"
},
{
"name": "Shell",
"bytes": "32969"
},
{
"name": "Smarty",
"bytes": "418399"
}
],
"symlink_target": ""
} |
from django import template
import unicodedata
register = template.Library()
@register.filter("truncate")
def truncate(value, size):
if len(value) > size:
return value[0:size]
else:
return value
@register.filter("truncate_dot")
def truncate_dot(value, size):
if value:
if len(value) > size and size > 3:
return value[0:(size - 3)] + '...'
elif len(value) > size:
return value[0:size]
else:
return value
else:
return value
@register.filter("strip_accents")
def strip_accents(value, encoding='ASCII'):
try:
return ''.join(
(c for c in unicodedata.normalize('NFD', unicode(value))
if unicodedata.category(c) != 'Mn'))
except:
return value
@register.filter("zero2unlimited")
def zero2unlimited(value, unit=''):
try:
if value == 0:
return "unlimited"
else:
return "{0}{1}{2}".format(value, " ", unit)
except:
return "{0}{1}{2}".format('(', value, ')')
| {
"content_hash": "2935a314bcaa3583ce017b8b45421abf",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 68,
"avg_line_length": 23.065217391304348,
"alnum_prop": 0.5617342130065975,
"repo_name": "fretscha/django-postfix-admin",
"id": "cceaf20cae788d73c4f9af56f58ea21a89c30e2a",
"size": "1061",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "postfixadmin/pfa/templatetags/filter.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "117110"
},
{
"name": "Erlang",
"bytes": "1060"
},
{
"name": "HTML",
"bytes": "339518"
},
{
"name": "JavaScript",
"bytes": "2364437"
},
{
"name": "Python",
"bytes": "30044"
},
{
"name": "Shell",
"bytes": "1383"
}
],
"symlink_target": ""
} |
USE_PYTHON3 = True
_IGNORE_FREEZE_FOOTER = 'Ignore-Freeze'
# The time module's handling of timezones is abysmal, so the boundaries are
# precomputed in UNIX time
_FREEZE_START = 1639641600 # 2021/12/16 00:00 -0800
_FREEZE_END = 1641196800 # 2022/01/03 00:00 -0800
def CheckFreeze(input_api, output_api):
if _FREEZE_START <= input_api.time.time() < _FREEZE_END:
footers = input_api.change.GitFootersFromDescription()
if _IGNORE_FREEZE_FOOTER not in footers:
def convert(t):
ts = input_api.time.localtime(t)
return input_api.time.strftime('%Y/%m/%d %H:%M %z', ts)
return [
output_api.PresubmitError(
'There is a prod freeze in effect from {} until {},'
' files in //tools/mb cannot be modified'.format(
convert(_FREEZE_START), convert(_FREEZE_END)))
]
return []
def CheckTests(input_api, output_api):
glob = input_api.os_path.join(input_api.PresubmitLocalPath(), '*_test.py')
tests = input_api.canned_checks.GetUnitTests(input_api,
output_api,
input_api.glob(glob),
run_on_python2=False,
run_on_python3=True,
skip_shebang_check=True)
return input_api.RunTests(tests)
def _CommonChecks(input_api, output_api):
results = []
# Run Pylint over the files in the directory.
pylint_checks = input_api.canned_checks.GetPylint(
input_api,
output_api,
version='2.7',
# pylint complains about Checkfreeze not being defined, its probably
# finding a different PRESUBMIT.py
files_to_skip=['PRESUBMIT_test.py'],
# Disabling certain python3-specific warnings until the conversion
# is complete.
disabled_warnings=[
'super-with-arguments',
'raise-missing-from',
'useless-object-inheritance',
],
)
results.extend(input_api.RunTests(pylint_checks))
# Run the MB unittests.
results.extend(
input_api.canned_checks.RunUnitTestsInDirectory(input_api,
output_api,
'.',
[r'^.+_unittest\.py$'],
skip_shebang_check=True))
# Validate the format of the mb_config.pyl file.
cmd = [input_api.python_executable, 'mb.py', 'validate']
kwargs = {'cwd': input_api.PresubmitLocalPath()}
results.extend(input_api.RunTests([
input_api.Command(name='mb_validate',
cmd=cmd, kwargs=kwargs,
message=output_api.PresubmitError)]))
results.extend(CheckFreeze(input_api, output_api))
results.extend(CheckTests(input_api, output_api))
return results
def CheckChangeOnUpload(input_api, output_api):
return _CommonChecks(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return _CommonChecks(input_api, output_api)
| {
"content_hash": "549ebef00bb6d10974279ad4e8b485f7",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 79,
"avg_line_length": 35,
"alnum_prop": 0.5717460317460318,
"repo_name": "ric2b/Vivaldi-browser",
"id": "6d378b30c7116e611476a9d0484a8faf6e1eb42d",
"size": "3313",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "chromium/tools/mb/PRESUBMIT.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""
A module for writing SkE user interaction log to a Redis queue
How to use (CNC-specific stuff) -
Please note that this script expects a modified version of
run.cgi (with provided database connection).
-----------------------------
import redislog
# ...
if __name__ == '__main__':
t1 = time.time()
# ... orig code ...
redislog.log_action(conn, time.time() - t1)
conn.close()
-----------------------------
"""
import json
import redis
import os
import datetime
import urllib
import urlparse
KLOGPROC_CONF_PATH = '/home/tomas/work/go/src/klogproc/conf.json'
DEFAULT_DATETIME_FORMAT = '%Y-%m-%d %H:%M:%S'
ERROR_LOG_PATH = '/var/log/klogproc/ske_errors.txt'
if KLOGPROC_CONF_PATH:
with open(KLOGPROC_CONF_PATH) as fr:
data = json.load(fr)
rc = data['logRedis']
REDIS_SERVER, REDIS_PORT = rc['address'].split(':')
REDIS_PORT = int(REDIS_PORT)
REDIS_DB = rc['database']
REDIS_QUEUE_KEY = rc['queueKey']
else:
REDIS_SERVER = '127.0.0.1'
REDIS_PORT = 6379
REDIS_DB = 1
REDIS_QUEUE_KEY = 'ske_log_queue'
class QueryValues(object):
def __init__(self, url):
self._action = None
self._args = {}
if url:
action, tmp = urllib.splitquery(url)
self._args = urlparse.parse_qs(tmp if tmp else '')
if 'run.cgi/' in action:
_, self._action = action.rsplit('/', 1)
@property
def action(self):
return self._action
@property
def args(self):
ans = {}
for k, v in self._args.items():
if len(v) == 1:
ans[k] = v[0]
elif len(v) > 1:
ans[k] = v
return ans
def get_env(s):
return os.environ.get(s, None)
def find_user_id(conn, username):
cur = conn.cursor()
cur.execute('SELECT id FROM user WHERE user = %s', (username,))
ans = cur.fetchone()
return ans[0] if ans else None
def store_log_to_redis(rec):
conn = redis.StrictRedis(host=REDIS_SERVER, port=REDIS_PORT, db=REDIS_DB)
conn.rpush(REDIS_QUEUE_KEY, json.dumps(rec))
def create_log_record(mysql_conn, proc_time, log_date):
log_data = {}
log_data['user_id'] = find_user_id(mysql_conn, get_env('REMOTE_USER'))
log_data['proc_time'] = round(proc_time, 3)
log_data['settings'] = {}
log_data['date'] = log_date
log_data['request'] = {
'HTTP_USER_AGENT': get_env('HTTP_USER_AGENT'),
'REMOTE_ADDR': get_env('REMOTE_ADDR')
}
qv = QueryValues(get_env('REQUEST_URI'))
log_data['params'] = qv.args
log_data['action'] = qv.action
return log_data
def log_action(mysql_conn, proc_time):
log_date = datetime.datetime.today().strftime('%s.%%f' % DEFAULT_DATETIME_FORMAT)
try:
data = create_log_record(mysql_conn, proc_time, log_date)
store_log_to_redis(data)
except Exception as ex:
if ERROR_LOG_PATH:
with open(ERROR_LOG_PATH, 'a') as fw:
fw.write('{0} ERROR: {1}'.format(log_date, ex))
| {
"content_hash": "7ff652aecd8f3231d13d777a37cbb97d",
"timestamp": "",
"source": "github",
"line_count": 115,
"max_line_length": 85,
"avg_line_length": 26.47826086956522,
"alnum_prop": 0.5773399014778325,
"repo_name": "czcorpus/klogproc",
"id": "988092453be29d53985af6aa27d266062e1c8fa0",
"size": "3647",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/redislog.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "361344"
},
{
"name": "Makefile",
"bytes": "379"
},
{
"name": "Python",
"bytes": "10324"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import os
import sys
import shutil
import zipfile
import shlex
from zipfile import ZipFile
from urllib.request import urlretrieve
from subprocess import Popen, PIPE
from distutils.cmd import Command
def zip_directory(dir, zip_file):
zip = ZipFile(zip_file, 'w', compression=zipfile.ZIP_DEFLATED)
root_len = len(os.path.abspath(dir))
for root, dirs, files in os.walk(dir):
archive_root = os.path.abspath(root)[root_len:]
for f in files:
fullpath = os.path.join(root, f)
archive_name = os.path.join(archive_root, f)
zip.write(fullpath, archive_name, zipfile.ZIP_DEFLATED)
zip.close()
class WindowsPortableBuild(Command):
description = "custom build command that builds portable win32 package"
user_options = [
('dist-dir=', None,
"path of dist directory to use for building portable kivy, the end result will be output to this driectory. default to cwd."),
('deps-url=', None,
"url of binary dependancies for portable kivy package default: http://kivy.googlecode.com/files/portable-deps-win32.zip"),
('no-cext', None,
"flag to disable building of c extensions"),
('no-mingw', None,
"flag to disable bundling of mingw compiler for compiling c/cython extensions")]
def initialize_options(self):
self.dist_dir = None
self.deps_url = None
self.no_cext = None
self.no_mingw = None
def finalize_options(self):
if not self.deps_url:
self.deps_url = 'http://kivy.googlecode.com/files/portable-deps-win32.zip'
if not self.dist_dir:
self.dist_dir = os.getcwd()
self.src_dir = os.path.dirname(sys.modules['__main__'].__file__)
self.dist_name = self.distribution.get_fullname() # e.g. Kivy-0.5 (name and verison passed to setup())
self.build_dir = os.path.join(self.dist_dir, self.dist_name+'-w32')
def run(self):
width = 30
print("-" * width)
print("Building Kivy Portable for Win 32")
print("-" * width)
print("\nPreparing Build...")
print("-" * width)
if os.path.exists(self.build_dir):
print("*Cleaning old build dir")
shutil.rmtree(self.build_dir, ignore_errors=True)
print("*Creating build directory:", self.build_dir)
os.makedirs(self.build_dir)
print("\nGetting binary dependencies...")
print("---------------------------------------")
print("*Downloading:", self.deps_url)
#report_hook is called every time a piece of teh file is downloaded to print progress
def report_hook(block_count, block_size, total_size):
p = block_count * block_size * 100.0 / total_size
print("\b\b\b\b\b\b\b\b\b", "%06.2f" % p + "%", end=' ')
print(" Progress: 000.00%", end=' ')
urlretrieve(self.deps_url, # location of binary dependencies needed for portable kivy
os.path.join(self.build_dir, 'deps.zip'), # tmp file to store the archive
reporthook = report_hook)
print(" [Done]")
print("*Extracting binary dependencies...")
zf = ZipFile(os.path.join(self.build_dir, 'deps.zip'))
zf.extractall(self.build_dir)
zf.close()
if self.no_mingw:
print("*Excluding MinGW from portable distribution (--no-mingw option is set)")
shutil.rmtree(os.path.join(self.build_dir, 'MinGW'), ignore_errors=True)
print("\nPutting kivy into portable environment")
print("---------------------------------------")
print("*Building kivy source distribution")
sdist_cmd = [sys.executable, #path to python.exe
os.path.join(self.src_dir, 'setup.py'), #path to setup.py
'sdist', #make setup.py create a src distribution
'--dist-dir=%s'%self.build_dir] #put it into build folder
Popen(sdist_cmd, stdout=PIPE, stderr=PIPE).communicate()
print("*Placing kivy source distribution in portable context")
src_dist = os.path.join(self.build_dir, self.dist_name)
zf = ZipFile(src_dist+'.zip')
zf.extractall(self.build_dir)
zf.close()
if self.no_mingw or self.no_cext:
print("*Skipping C Extension build (either --no_cext or --no_mingw option set)")
else:
print("*Compiling C Extensions inplace for portable distribution")
cext_cmd = [sys.executable, #path to python.exe
'setup.py',
'build_ext', #make setup.py create a src distribution
'--inplace'] #do it inplace
#this time it runs teh setup.py inside the source distribution
#thats has been generated inside the build dir (to generate ext
#for teh target, instead of the source were building from)
Popen(cext_cmd, cwd=src_dist).communicate()
print("\nFinalizing kivy portable distribution...")
print("---------------------------------------")
print("*Copying scripts and resources")
#copy launcher script and readme to portable root dir/build dir
kivy_bat = os.path.join(src_dist, 'kivy', 'tools', 'packaging', 'win32', 'kivy.bat')
shutil.copy(kivy_bat, os.path.join(self.build_dir, 'kivy.bat'))
kivyenv_sh = os.path.join(src_dist, 'kivy', 'tools', 'packaging', 'win32', 'kivyenv.sh')
shutil.copy(kivyenv_sh, os.path.join(self.build_dir, 'kivyenv.sh'))
readme = os.path.join(src_dist, 'kivy', 'tools', 'packaging', 'win32', 'README.txt')
shutil.copy(readme, os.path.join(self.build_dir, 'README.txt'))
#rename kivy directory to "kivy"
os.rename(src_dist, os.path.join(self.build_dir, 'kivy'))
print("*Removing intermediate file")
os.remove(os.path.join(self.build_dir, 'deps.zip'))
os.remove(os.path.join(self.build_dir, src_dist + '.zip'))
print("*Compressing portable distribution target")
target = os.path.join(self.dist_dir, self.dist_name + "-w32.zip")
zip_directory(self.build_dir, target)
print("*Writing target:", target)
print("*Removing build dir")
shutil.rmtree(self.build_dir, ignore_errors=True)
print("*Upload to google code")
sys.path += [os.path.join(self.src_dir, 'kivy', 'tools', 'packaging')]
import googlecode_upload
version = self.dist_name.replace("Kivy-", "")
status, reason, url = googlecode_upload.upload_find_auth(
target, 'kivy',
'Kivy {}, Windows portable version (Python 2.7, '
'32 and 64 bits, bundled dependencies)'.format(version),
['Featured', 'OsSys-Windows'])
if url:
print('The file was uploaded successfully.')
print('URL: %s' % url)
else:
print('An error occurred. Your file was not uploaded.')
print('Google Code upload server said: %s (%s)' % (reason,
status))
| {
"content_hash": "10418d2959d0fbd9a06d14b90e7a1cad",
"timestamp": "",
"source": "github",
"line_count": 159,
"max_line_length": 135,
"avg_line_length": 44.9811320754717,
"alnum_prop": 0.594099552572707,
"repo_name": "kivatu/kivy_old",
"id": "ed22ff229db777199f11bfbc9d808c9fea1270df",
"size": "7152",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "kivy/tools/packaging/win32/build.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "154026"
},
{
"name": "CSS",
"bytes": "6729"
},
{
"name": "Emacs Lisp",
"bytes": "9603"
},
{
"name": "F#",
"bytes": "289"
},
{
"name": "JavaScript",
"bytes": "11300"
},
{
"name": "Python",
"bytes": "2887512"
},
{
"name": "Shell",
"bytes": "6010"
},
{
"name": "TeX",
"bytes": "4271"
},
{
"name": "VimL",
"bytes": "1123"
}
],
"symlink_target": ""
} |
"""Sensor from an SQL Query."""
from __future__ import annotations
from datetime import date
import decimal
import logging
import sqlalchemy
from sqlalchemy.engine import Result
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm import scoped_session, sessionmaker
import voluptuous as vol
from homeassistant.components.recorder import CONF_DB_URL, DEFAULT_DB_FILE, DEFAULT_URL
from homeassistant.components.sensor import (
PLATFORM_SCHEMA as PARENT_PLATFORM_SCHEMA,
SensorEntity,
)
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import CONF_NAME, CONF_UNIT_OF_MEASUREMENT, CONF_VALUE_TEMPLATE
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import TemplateError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.device_registry import DeviceEntryType
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.template import Template
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from .const import CONF_COLUMN_NAME, CONF_QUERIES, CONF_QUERY, DB_URL_RE, DOMAIN
_LOGGER = logging.getLogger(__name__)
def redact_credentials(data: str) -> str:
"""Redact credentials from string data."""
return DB_URL_RE.sub("//****:****@", data)
_QUERY_SCHEME = vol.Schema(
{
vol.Required(CONF_COLUMN_NAME): cv.string,
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_QUERY): cv.string,
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
vol.Optional(CONF_VALUE_TEMPLATE): cv.string,
}
)
PLATFORM_SCHEMA = PARENT_PLATFORM_SCHEMA.extend(
{vol.Required(CONF_QUERIES): [_QUERY_SCHEME], vol.Optional(CONF_DB_URL): cv.string}
)
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the SQL sensor platform."""
_LOGGER.warning(
# SQL config flow added in 2022.4 and should be removed in 2022.6
"Configuration of the SQL sensor platform in YAML is deprecated and "
"will be removed in Home Assistant 2022.6; Your existing configuration "
"has been imported into the UI automatically and can be safely removed "
"from your configuration.yaml file"
)
default_db_url = DEFAULT_URL.format(
hass_config_path=hass.config.path(DEFAULT_DB_FILE)
)
for query in config[CONF_QUERIES]:
new_config = {
CONF_DB_URL: config.get(CONF_DB_URL, default_db_url),
CONF_NAME: query[CONF_NAME],
CONF_QUERY: query[CONF_QUERY],
CONF_UNIT_OF_MEASUREMENT: query.get(CONF_UNIT_OF_MEASUREMENT),
CONF_VALUE_TEMPLATE: query.get(CONF_VALUE_TEMPLATE),
CONF_COLUMN_NAME: query[CONF_COLUMN_NAME],
}
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=new_config,
)
)
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up the SQL sensor entry."""
db_url: str = entry.options[CONF_DB_URL]
name: str = entry.options[CONF_NAME]
query_str: str = entry.options[CONF_QUERY]
unit: str | None = entry.options.get(CONF_UNIT_OF_MEASUREMENT)
template: str | None = entry.options.get(CONF_VALUE_TEMPLATE)
column_name: str = entry.options[CONF_COLUMN_NAME]
value_template: Template | None = None
if template is not None:
try:
value_template = Template(template)
value_template.ensure_valid()
except TemplateError:
value_template = None
if value_template is not None:
value_template.hass = hass
try:
engine = sqlalchemy.create_engine(db_url, future=True)
sessmaker = scoped_session(sessionmaker(bind=engine, future=True))
except SQLAlchemyError as err:
_LOGGER.error("Can not open database %s", {redact_credentials(str(err))})
return
# MSSQL uses TOP and not LIMIT
if not ("LIMIT" in query_str.upper() or "SELECT TOP" in query_str.upper()):
if "mssql" in db_url:
query_str = query_str.upper().replace("SELECT", "SELECT TOP 1")
else:
query_str = query_str.replace(";", "") + " LIMIT 1;"
async_add_entities(
[
SQLSensor(
name,
sessmaker,
query_str,
column_name,
unit,
value_template,
entry.entry_id,
)
],
True,
)
class SQLSensor(SensorEntity):
"""Representation of an SQL sensor."""
_attr_icon = "mdi:database-search"
_attr_has_entity_name = True
def __init__(
self,
name: str,
sessmaker: scoped_session,
query: str,
column: str,
unit: str | None,
value_template: Template | None,
entry_id: str,
) -> None:
"""Initialize the SQL sensor."""
self._query = query
self._attr_native_unit_of_measurement = unit
self._template = value_template
self._column_name = column
self.sessionmaker = sessmaker
self._attr_extra_state_attributes = {}
self._attr_unique_id = entry_id
self._attr_device_info = DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
identifiers={(DOMAIN, entry_id)},
manufacturer="SQL",
name=name,
)
def update(self) -> None:
"""Retrieve sensor data from the query."""
data = None
self._attr_extra_state_attributes = {}
sess: scoped_session = self.sessionmaker()
try:
result: Result = sess.execute(sqlalchemy.text(self._query))
except SQLAlchemyError as err:
_LOGGER.error(
"Error executing query %s: %s",
self._query,
redact_credentials(str(err)),
)
return
for res in result.mappings():
_LOGGER.debug("Query %s result in %s", self._query, res.items())
data = res[self._column_name]
for key, value in res.items():
if isinstance(value, decimal.Decimal):
value = float(value)
if isinstance(value, date):
value = value.isoformat()
self._attr_extra_state_attributes[key] = value
if data is not None and self._template is not None:
self._attr_native_value = (
self._template.async_render_with_possible_json_value(data, None)
)
else:
self._attr_native_value = data
if data is None:
_LOGGER.warning("%s returned no results", self._query)
sess.close()
| {
"content_hash": "5e16920541a5030691b6343e3919baa7",
"timestamp": "",
"source": "github",
"line_count": 211,
"max_line_length": 88,
"avg_line_length": 33.60663507109005,
"alnum_prop": 0.6192356508249894,
"repo_name": "mezz64/home-assistant",
"id": "dfb1e15f0525adadb95003f44aad00858438387a",
"size": "7091",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "homeassistant/components/sql/sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2963"
},
{
"name": "PLSQL",
"bytes": "840"
},
{
"name": "Python",
"bytes": "52481895"
},
{
"name": "Shell",
"bytes": "6252"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as DjangoUserAdmin
from django.utils.translation import ugettext_lazy as _
from .models import User
@admin.register(User)
class UserAdmin(DjangoUserAdmin):
fieldsets = (
(None, {'fields': ('username', 'password')}),
(_('Personal info'), {'fields': ('first_name', 'last_name', 'email', 'domain')}),
(_('Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',
'groups', 'user_permissions')}),
(_('Important dates'), {'fields': ('last_login', 'date_joined')}),
)
| {
"content_hash": "3c37121d19c85654542691d96ff4ec39",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 89,
"avg_line_length": 39.6875,
"alnum_prop": 0.6015748031496063,
"repo_name": "relekang/photos",
"id": "b124a68787475d9d1886ea3e1e1f9f6c238eccd1",
"size": "635",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "photos/users/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4514"
},
{
"name": "HTML",
"bytes": "2468"
},
{
"name": "Makefile",
"bytes": "711"
},
{
"name": "Python",
"bytes": "23039"
}
],
"symlink_target": ""
} |
import os
from .ansible import organization_path
def services(organization, env='production'):
service_list = {}
if not organization:
return service_list
service_file = os.path.join(organization_path,
organization,
'env_%s.yml' % env)
with open(service_file) as f:
service = None
for line in f.readlines():
if service:
service_name = line[line.find(':') + 1:].strip()
service_list[service_name] = service
service = None
elif line.startswith('# service:'):
service = {
'description': line[line.find(':') + 1:].strip(),
'default': False
}
elif line.startswith('# default service:'):
service = {
'description': line[line.find(':') + 1:].strip(),
'default': True
}
return service_list
| {
"content_hash": "518c8df9f0f6e45cdc81cc0c3b3ce164",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 69,
"avg_line_length": 30.11764705882353,
"alnum_prop": 0.46875,
"repo_name": "AerisCloud/AerisCloud",
"id": "4cfd6428fd5ad043c07b073ddc8735070b5629d4",
"size": "1024",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "aeriscloud/services.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2404"
},
{
"name": "Python",
"bytes": "213590"
},
{
"name": "Ruby",
"bytes": "6901"
},
{
"name": "Shell",
"bytes": "15381"
}
],
"symlink_target": ""
} |
from libcloud.common.types import LibcloudError
__all__ = [
'Provider',
'RecordType',
'ZoneError',
'ZoneDoesNotExistError',
'ZoneAlreadyExistsError',
'RecordError',
'RecordDoesNotExistError',
'RecordAlreadyExistsError'
]
class Provider(object):
DUMMY = 'dummy'
LINODE = 'linode'
RACKSPACE = 'rackspace'
ZERIGO = 'zerigo'
ROUTE53 = 'route53'
HOSTVIRTUAL = 'hostvirtual'
GANDI = 'gandi'
GOOGLE = 'google'
SOFTLAYER = 'softlayer'
DIGITAL_OCEAN = 'digitalocean'
AURORADNS = 'auroradns'
# Deprecated
RACKSPACE_US = 'rackspace_us'
RACKSPACE_UK = 'rackspace_uk'
class RecordType(object):
"""
DNS record type.
"""
A = 'A'
AAAA = 'AAAA'
MX = 'MX'
NS = 'NS'
CNAME = 'CNAME'
DNAME = 'DNAME'
TXT = 'TXT'
PTR = 'PTR'
SOA = 'SOA'
SPF = 'SPF'
SRV = 'SRV'
PTR = 'PTR'
NAPTR = 'NAPTR'
REDIRECT = 'REDIRECT'
GEO = 'GEO'
URL = 'URL'
WKS = 'WKS'
LOC = 'LOC'
class ZoneError(LibcloudError):
error_type = 'ZoneError'
kwargs = ('zone_id', )
def __init__(self, value, driver, zone_id):
self.zone_id = zone_id
super(ZoneError, self).__init__(value=value, driver=driver)
def __str__(self):
return self.__repr__()
def __repr__(self):
return ('<%s in %s, zone_id=%s, value=%s>' %
(self.error_type, repr(self.driver),
self.zone_id, self.value))
class ZoneDoesNotExistError(ZoneError):
error_type = 'ZoneDoesNotExistError'
class ZoneAlreadyExistsError(ZoneError):
error_type = 'ZoneAlreadyExistsError'
class RecordError(LibcloudError):
error_type = 'RecordError'
def __init__(self, value, driver, record_id):
self.record_id = record_id
super(RecordError, self).__init__(value=value, driver=driver)
def __str__(self):
return self.__repr__()
def __repr__(self):
return ('<%s in %s, record_id=%s, value=%s>' %
(self.error_type, repr(self.driver),
self.record_id, self.value))
class RecordDoesNotExistError(RecordError):
error_type = 'RecordDoesNotExistError'
class RecordAlreadyExistsError(RecordError):
error_type = 'RecordAlreadyExistsError'
| {
"content_hash": "54aa22f59afae5ca466ee9da254349da",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 69,
"avg_line_length": 22.223300970873787,
"alnum_prop": 0.5906509392747925,
"repo_name": "pantheon-systems/libcloud",
"id": "ad2b34a6139cfea77c5d14de2fddd35283de00ee",
"size": "3071",
"binary": false,
"copies": "7",
"ref": "refs/heads/trunk",
"path": "libcloud/dns/types.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "2545"
},
{
"name": "Python",
"bytes": "3891700"
},
{
"name": "Shell",
"bytes": "13868"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from django.contrib.flatpages.admin import FlatpageForm, FlatPageAdmin
from django.contrib.flatpages.models import FlatPage
from django.db.models import TextField
from ckeditor.widgets import CKEditorWidget
from mptt.admin import MPTTModelAdmin
from modeltranslation.admin import TranslationAdmin
import models
class PageForm(FlatpageForm):
class Meta:
model = models.Page
class PageAdmin(MPTTModelAdmin, TranslationAdmin, FlatPageAdmin):
form = PageForm
formfield_overrides = {TextField: {'widget': CKEditorWidget(config_name='default')}, }
filter_horizontal = ('related', 'groups',)
list_display = ('url', 'title', 'order', 'group_list')
fieldsets = (
(None, {
'fields': ('parent', 'url', 'title', 'title_menu', 'order', 'introduction',
'content', 'sites')
}),
('Advanced options', {
'classes': ('collapse',),
'fields': ('related', 'enable_comments', 'is_container',
'registration_required', 'groups', 'template_name')
}),
)
admin.site.unregister(FlatPage)
admin.site.register(models.Page, PageAdmin)
| {
"content_hash": "625f6f4f6a6e2c65a62699b5b656c73d",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 90,
"avg_line_length": 35.029411764705884,
"alnum_prop": 0.6582703610411419,
"repo_name": "socib/grumers",
"id": "30ec285727cf9c7d717a69abd3e1cdf31bf7d646",
"size": "1191",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "grumers/apps/web/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "7006"
},
{
"name": "CSS",
"bytes": "19047"
},
{
"name": "HTML",
"bytes": "784061"
},
{
"name": "JavaScript",
"bytes": "3178710"
},
{
"name": "Python",
"bytes": "383891"
},
{
"name": "Shell",
"bytes": "4945"
}
],
"symlink_target": ""
} |
"""Installs standard values for :mod:`lino_xl.lib.cal`, including a
demo set of holidays. (TODO: make them more configurable.)
See also :ref:`xl.specs.holidays`.
"""
from __future__ import unicode_literals
import datetime
from dateutil.relativedelta import relativedelta
from dateutil.easter import easter
from django.conf import settings
from lino.utils.instantiator import Instantiator
from lino_xl.lib.cal.choicelists import DurationUnits, Recurrencies, WORKDAYS
from lino.api import dd, rt, _
def objects():
cal = rt.models.cal
PlannerColumns = cal.PlannerColumns
add = Instantiator('cal.Priority', 'ref').build
yield add('1', **dd.babel_values('name', en=u"very urgent", de=u"sehr dringend", fr=u"très urgent", et=u"väga kiire"))
yield add('3', **dd.babel_values('name', en=u"urgent", de=u"dringend", fr=u"urgent", et="kiire"))
yield add('5', **dd.babel_values('name', en=u"normal", de=u"normal", fr=u"normal", et="keskmine"))
yield add('9', **dd.babel_values('name', en=u"not urgent", de=u"nicht dringend", fr=u"pas urgent", et="mitte kiire"))
calendar = Instantiator('cal.Calendar').build
general = calendar(**dd.str2kw('name', _("General")))
yield general
# settings.SITE.site_config.site_calendar = general
d = dd.demo_date()
if d.month > 4:
d = d.replace(month=4, day=1)
else:
d = d.replace(month=4, day=1, year=d.year-1)
settings.SITE.site_config.update(
site_calendar=general, hide_events_before=d)
# yield settings.SITE.site_config
event_type = Instantiator('cal.EventType').build
holidays = event_type(
planner_column=PlannerColumns.external,
is_appointment=False,
all_rooms=True, **dd.str2kw('name', _("Holidays")))
yield holidays
meeting = event_type(
planner_column=PlannerColumns.external,
**dd.str2kw('name', _("Meeting")))
yield meeting
yield event_type(
planner_column=PlannerColumns.internal,
transparent=True, **dd.str2kw('name', _("Internal")))
RecurrentEvent = rt.models.cal.RecurrentEvent
add = Instantiator(RecurrentEvent, event_type=holidays).build
def holiday(month, day, en, de, fr, et=None):
if et is None:
et = en
return add(
every_unit=Recurrencies.yearly,
monday=True, tuesday=True, wednesday=True, thursday=True,
friday=True, saturday=True, sunday=True,
every=1,
start_date=datetime.date(
year=cal.DEMO_START_YEAR,
month=month, day=day),
**dd.babelkw('name', en=en, de=de, fr=fr, et=et))
yield holiday(1, 1, "New Year's Day", "Neujahr", "Jour de l'an", "Uusaasta")
yield holiday(5, 1, "International Workers' Day", "Tag der Arbeit", "Premier Mai", "kevadpüha")
yield holiday(7, 21, "National Day", "Nationalfeiertag", "Fête nationale", "Belgia riigipüha")
yield holiday(8, 15, "Assumption of Mary", "Mariä Himmelfahrt", "Assomption de Marie")
yield holiday(10, 31, "All Souls' Day", "Allerseelen", "Commémoration des fidèles défunts")
yield holiday(11, 1, "All Saints' Day", "Allerheiligen", "Toussaint")
yield holiday(11, 11, "Armistice with Germany", "Waffenstillstand", "Armistice")
yield holiday(12, 25, "Christmas", "Weihnachten", "Noël", "Esimene Jõulupüha")
easter1 = easter(cal.DEMO_START_YEAR)
def relative_holiday(offset, name):
return add(
every_unit=Recurrencies.easter, every=1,
start_date=easter1+relativedelta(days=offset),
**dd.str2kw('name', name))
yield relative_holiday(0, _("Easter sunday"))
yield relative_holiday(1, _("Easter monday"))
yield relative_holiday(39, _("Ascension of Jesus"))
yield relative_holiday(50, _("Pentecost"))
yield relative_holiday(-2, _("Good Friday"))
yield relative_holiday(-46, _("Ash Wednesday"))
yield relative_holiday(-48, _("Rosenmontag"))
ar = settings.SITE.login()
for obj in RecurrentEvent.objects.all():
if not obj.update_reminders(ar):
raise Exception("Oops, %s generated no events" % obj)
# event policies
kw = dict()
for wd in WORKDAYS:
kw[wd.name] = True
kw.update(event_type=meeting)
exam_policy = Instantiator(
'cal.EventPolicy', 'every',
every_unit=DurationUnits.months, **kw).build
yield exam_policy(
1, start_time="9:00",
**dd.str2kw('name', _("Every month")))
yield exam_policy(
2, start_time="9:00",
**dd.str2kw('name', _("Every 2 months")))
yield exam_policy(
3, **dd.str2kw('name', _("Every 3 months")))
exam_policy = Instantiator(
'cal.EventPolicy', 'every',
every_unit=DurationUnits.weeks, **kw).build
yield exam_policy(
2, start_time="9:00",
**dd.str2kw('name', _("Every 2 weeks")))
exam_policy = Instantiator(
'cal.EventPolicy', 'every',
every_unit=DurationUnits.days, **kw).build
yield exam_policy(
10, max_events=1, start_time="9:00",
**dd.str2kw('name', _("Once after 10 days")))
exam_policy = Instantiator('cal.EventPolicy').build
yield exam_policy(**dd.str2kw('name', _("Other")))
DPR = rt.models.cal.DailyPlannerRow
yield DPR(end_time="12:00", **dd.str2kw('designation', _("AM")))
yield DPR(start_time="12:00", **dd.str2kw('designation', _("PM")))
yield DPR(**dd.str2kw('designation', _("All day")))
| {
"content_hash": "231d2fd2fde1706aeef856f7c4cbf467",
"timestamp": "",
"source": "github",
"line_count": 146,
"max_line_length": 124,
"avg_line_length": 37.82876712328767,
"alnum_prop": 0.6284627919608908,
"repo_name": "khchine5/xl",
"id": "10ee0324cfbb556162d485c59bb0b9697164bd59",
"size": "5643",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lino_xl/lib/cal/fixtures/std.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "186625"
},
{
"name": "HTML",
"bytes": "1412921"
},
{
"name": "JavaScript",
"bytes": "1630816"
},
{
"name": "PHP",
"bytes": "40437"
},
{
"name": "Python",
"bytes": "2113065"
}
],
"symlink_target": ""
} |
import os
import sys
import unittest
import object_store
from in_memory_object_store import InMemoryObjectStore
from file_system import FileSystem, StatInfo
from future import Future
from local_file_system import LocalFileSystem
from memcache_file_system import MemcacheFileSystem
class _FakeFileSystem(FileSystem):
def __init__(self):
self.stat_value = 0
self._stat_count = 0
self._read_count = 0
def CheckAndReset(self, read_count=0, stat_count=0):
try:
return (self._read_count == read_count and
self._stat_count == stat_count)
finally:
self._read_count = 0
self._stat_count = 0
def Stat(self, path):
self._stat_count += 1
children = dict((path.strip('/') + str(i), self.stat_value)
for i in range(5))
if not path.endswith('/'):
children[path.rsplit('/', 1)[-1]] = self.stat_value
return StatInfo(self.stat_value, children)
def Read(self, paths, binary=False):
self._read_count += 1
return Future(value=dict((path, path) for path in paths))
class MemcacheFileSystemTest(unittest.TestCase):
def setUp(self):
self._object_store = InMemoryObjectStore('')
self._local_fs = LocalFileSystem(os.path.join(sys.path[0],
'test_data',
'file_system'))
def _SetReadCacheItem(self, key, value, stat):
self._object_store.Set(key, (value, stat), object_store.FILE_SYSTEM_READ)
def _SetStatCacheItem(self, key, value):
self._object_store.Set(key, value, object_store.FILE_SYSTEM_STAT)
def _DeleteReadCacheItem(self, key):
self._object_store.Delete(key, object_store.FILE_SYSTEM_READ)
def _DeleteStatCacheItem(self, key):
self._object_store.Delete(key, object_store.FILE_SYSTEM_STAT)
def testReadFiles(self):
file_system = MemcacheFileSystem(self._local_fs, self._object_store)
expected = {
'./test1.txt': 'test1\n',
'./test2.txt': 'test2\n',
'./test3.txt': 'test3\n',
}
self.assertEqual(
expected,
file_system.Read(['./test1.txt', './test2.txt', './test3.txt']).Get())
def testListDir(self):
file_system = MemcacheFileSystem(self._local_fs, self._object_store)
expected = ['dir/']
for i in range(7):
expected.append('file%d.html' % i)
self._SetReadCacheItem('list/', expected, file_system.Stat('list/').version)
self.assertEqual(expected,
sorted(file_system.ReadSingle('list/')))
expected.remove('file0.html')
self._SetReadCacheItem('list/', expected, file_system.Stat('list/').version)
self.assertEqual(expected,
sorted(file_system.ReadSingle('list/')))
def testCaching(self):
fake_fs = _FakeFileSystem()
file_system = MemcacheFileSystem(fake_fs, self._object_store)
self.assertEqual('bob/bob0', file_system.ReadSingle('bob/bob0'))
self.assertTrue(fake_fs.CheckAndReset(read_count=1, stat_count=1))
# Resource has been cached, so test resource is not re-fetched.
self.assertEqual('bob/bob0', file_system.ReadSingle('bob/bob0'))
self.assertTrue(fake_fs.CheckAndReset())
# Test if the Stat version is the same the resource is not re-fetched.
self._DeleteStatCacheItem('bob/bob0')
self.assertEqual('bob/bob0', file_system.ReadSingle('bob/bob0'))
self.assertTrue(fake_fs.CheckAndReset(stat_count=1))
# Test if there is a newer version, the resource is re-fetched.
self._DeleteStatCacheItem('bob/bob0')
fake_fs.stat_value += 1
self.assertEqual('bob/bob0', file_system.ReadSingle('bob/bob0'))
self.assertTrue(fake_fs.CheckAndReset(read_count=1, stat_count=1))
# Test directory and subdirectory stats are cached.
self._DeleteStatCacheItem('bob/bob0')
self._DeleteReadCacheItem('bob/bob0')
self._DeleteStatCacheItem('bob/bob1')
self.assertEqual('bob/bob1', file_system.ReadSingle('bob/bob1'))
self.assertEqual('bob/bob0', file_system.ReadSingle('bob/bob0'))
self.assertTrue(fake_fs.CheckAndReset(read_count=2, stat_count=1))
self.assertEqual('bob/bob1', file_system.ReadSingle('bob/bob1'))
self.assertTrue(fake_fs.CheckAndReset())
# Test a more recent parent directory doesn't force a refetch of children.
self._DeleteReadCacheItem('bob/bob0')
self._DeleteReadCacheItem('bob/bob1')
self.assertEqual('bob/bob1', file_system.ReadSingle('bob/bob1'))
self.assertEqual('bob/bob2', file_system.ReadSingle('bob/bob2'))
self.assertEqual('bob/bob3', file_system.ReadSingle('bob/bob3'))
self.assertTrue(fake_fs.CheckAndReset(read_count=3))
self._SetStatCacheItem('bob/', 10)
self.assertEqual('bob/bob1', file_system.ReadSingle('bob/bob1'))
self.assertEqual('bob/bob2', file_system.ReadSingle('bob/bob2'))
self.assertEqual('bob/bob3', file_system.ReadSingle('bob/bob3'))
self.assertTrue(fake_fs.CheckAndReset())
self._DeleteStatCacheItem('bob/bob0')
self.assertEqual('bob/bob0', file_system.ReadSingle('bob/bob0'))
self.assertTrue(fake_fs.CheckAndReset(read_count=1, stat_count=1))
self.assertEqual('bob/bob0', file_system.ReadSingle('bob/bob0'))
self.assertTrue(fake_fs.CheckAndReset())
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "59898cfb8178511ed100e192c03a9feb",
"timestamp": "",
"source": "github",
"line_count": 132,
"max_line_length": 80,
"avg_line_length": 39.916666666666664,
"alnum_prop": 0.6716644524577718,
"repo_name": "junmin-zhu/chromium-rivertrail",
"id": "85b26e9d721c4b9df3f5e024c70df00c5a2260fe",
"size": "5458",
"binary": false,
"copies": "9",
"ref": "refs/heads/v8-binding",
"path": "chrome/common/extensions/docs/server2/memcache_file_system_test.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "853"
},
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "1172794"
},
{
"name": "Awk",
"bytes": "9519"
},
{
"name": "C",
"bytes": "75806807"
},
{
"name": "C#",
"bytes": "1132"
},
{
"name": "C++",
"bytes": "145161929"
},
{
"name": "DOT",
"bytes": "1559"
},
{
"name": "F#",
"bytes": "381"
},
{
"name": "Java",
"bytes": "1546515"
},
{
"name": "JavaScript",
"bytes": "18675242"
},
{
"name": "Logos",
"bytes": "4517"
},
{
"name": "Matlab",
"bytes": "5234"
},
{
"name": "Objective-C",
"bytes": "6981387"
},
{
"name": "PHP",
"bytes": "97817"
},
{
"name": "Perl",
"bytes": "926245"
},
{
"name": "Python",
"bytes": "8088373"
},
{
"name": "R",
"bytes": "262"
},
{
"name": "Ragel in Ruby Host",
"bytes": "3239"
},
{
"name": "Shell",
"bytes": "1513486"
},
{
"name": "Tcl",
"bytes": "277077"
},
{
"name": "XML",
"bytes": "13493"
}
],
"symlink_target": ""
} |
"""
Settings and configuration for Django.
Values will be read from the module specified by the DJANGO_SETTINGS_MODULE environment
variable, and then from django.conf.global_settings; see the global settings file for
a list of all possible variables.
"""
import os
import re
import time # Needed for Windows
import warnings
from django.conf import global_settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.functional import LazyObject, empty
from django.utils import importlib
ENVIRONMENT_VARIABLE = "DJANGO_SETTINGS_MODULE"
class LazySettings(LazyObject):
"""
A lazy proxy for either global Django settings or a custom settings object.
The user can manually configure settings prior to using them. Otherwise,
Django uses the settings module pointed to by DJANGO_SETTINGS_MODULE.
"""
def _setup(self):
"""
Load the settings module pointed to by the environment variable. This
is used the first time we need any settings at all, if the user has not
previously configured the settings manually.
"""
try:
settings_module = os.environ[ENVIRONMENT_VARIABLE]
if not settings_module: # If it's set but is an empty string.
raise KeyError
except KeyError:
# NOTE: This is arguably an EnvironmentError, but that causes
# problems with Python's interactive help.
raise ImportError("Settings cannot be imported, because environment variable %s is undefined." % ENVIRONMENT_VARIABLE)
self._wrapped = Settings(settings_module)
def configure(self, default_settings=global_settings, **options):
"""
Called to manually configure the settings. The 'default_settings'
parameter sets where to retrieve any unspecified values from (its
argument must support attribute access (__getattr__)).
"""
if self._wrapped is not empty:
raise RuntimeError('Settings already configured.')
holder = UserSettingsHolder(default_settings)
for name, value in options.items():
setattr(holder, name, value)
self._wrapped = holder
@property
def configured(self):
"""
Returns True if the settings have already been configured.
"""
return self._wrapped is not empty
class BaseSettings(object):
"""
Common logic for settings whether set by a module or by the user.
"""
def __setattr__(self, name, value):
if name in ("MEDIA_URL", "STATIC_URL") and value and not value.endswith('/'):
raise ImproperlyConfigured("If set, %s must end with a slash" % name)
elif name == "ADMIN_MEDIA_PREFIX":
warnings.warn("The ADMIN_MEDIA_PREFIX setting has been removed; "
"use STATIC_URL instead.", DeprecationWarning)
elif name == "ALLOWED_INCLUDE_ROOTS" and isinstance(value, basestring):
raise ValueError("The ALLOWED_INCLUDE_ROOTS setting must be set "
"to a tuple, not a string.")
object.__setattr__(self, name, value)
class Settings(BaseSettings):
def __init__(self, settings_module):
# update this dict from global settings (but only for ALL_CAPS settings)
for setting in dir(global_settings):
if setting == setting.upper():
setattr(self, setting, getattr(global_settings, setting))
# store the settings module in case someone later cares
self.SETTINGS_MODULE = settings_module
try:
mod = importlib.import_module(self.SETTINGS_MODULE)
except ImportError as e:
raise ImportError("Could not import settings '%s' (Is it on sys.path?): %s" % (self.SETTINGS_MODULE, e))
# Settings that should be converted into tuples if they're mistakenly entered
# as strings.
tuple_settings = ("INSTALLED_APPS", "TEMPLATE_DIRS")
for setting in dir(mod):
if setting == setting.upper():
setting_value = getattr(mod, setting)
if setting in tuple_settings and \
isinstance(setting_value, basestring):
setting_value = (setting_value,) # In case the user forgot the comma.
setattr(self, setting, setting_value)
if not self.SECRET_KEY:
raise ImproperlyConfigured("The SECRET_KEY setting must not be empty.")
if hasattr(time, 'tzset') and self.TIME_ZONE:
# When we can, attempt to validate the timezone. If we can't find
# this file, no check happens and it's harmless.
zoneinfo_root = '/usr/share/zoneinfo'
if (os.path.exists(zoneinfo_root) and not
os.path.exists(os.path.join(zoneinfo_root, *(self.TIME_ZONE.split('/'))))):
raise ValueError("Incorrect timezone setting: %s" % self.TIME_ZONE)
# Move the time zone info into os.environ. See ticket #2315 for why
# we don't do this unconditionally (breaks Windows).
os.environ['TZ'] = self.TIME_ZONE
time.tzset()
# Settings are configured, so we can set up the logger if required
if self.LOGGING_CONFIG:
# First find the logging configuration function ...
logging_config_path, logging_config_func_name = self.LOGGING_CONFIG.rsplit('.', 1)
logging_config_module = importlib.import_module(logging_config_path)
logging_config_func = getattr(logging_config_module, logging_config_func_name)
# Backwards-compatibility shim for #16288 fix
compat_patch_logging_config(self.LOGGING)
# ... then invoke it with the logging settings
logging_config_func(self.LOGGING)
class UserSettingsHolder(BaseSettings):
"""
Holder for user configured settings.
"""
# SETTINGS_MODULE doesn't make much sense in the manually configured
# (standalone) case.
SETTINGS_MODULE = None
def __init__(self, default_settings):
"""
Requests for configuration variables not in this class are satisfied
from the module specified in default_settings (if possible).
"""
self.default_settings = default_settings
def __getattr__(self, name):
return getattr(self.default_settings, name)
def __dir__(self):
return self.__dict__.keys() + dir(self.default_settings)
# For Python < 2.6:
__members__ = property(lambda self: self.__dir__())
settings = LazySettings()
def compat_patch_logging_config(logging_config):
"""
Backwards-compatibility shim for #16288 fix. Takes initial value of
``LOGGING`` setting and patches it in-place (issuing deprecation warning)
if "mail_admins" logging handler is configured but has no filters.
"""
# Shim only if LOGGING["handlers"]["mail_admins"] exists,
# but has no "filters" key
if "filters" not in logging_config.get(
"handlers", {}).get(
"mail_admins", {"filters": []}):
warnings.warn(
"You have no filters defined on the 'mail_admins' logging "
"handler: adding implicit debug-false-only filter. "
"See http://docs.djangoproject.com/en/dev/releases/1.4/"
"#request-exceptions-are-now-always-logged",
PendingDeprecationWarning)
filter_name = "require_debug_false"
filters = logging_config.setdefault("filters", {})
while filter_name in filters:
filter_name = filter_name + "_"
filters[filter_name] = {
"()": "django.utils.log.RequireDebugFalse",
}
logging_config["handlers"]["mail_admins"]["filters"] = [filter_name]
| {
"content_hash": "76d89e80e1ac1b52b304e22e9c33ccb2",
"timestamp": "",
"source": "github",
"line_count": 196,
"max_line_length": 130,
"avg_line_length": 39.704081632653065,
"alnum_prop": 0.6373682857877152,
"repo_name": "chrishas35/django-travis-ci",
"id": "6a2485bd4bffe8d13dd3c4ea39535b4aa47b8591",
"size": "7782",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "django/conf/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "89027"
},
{
"name": "Python",
"bytes": "8037393"
},
{
"name": "Shell",
"bytes": "4241"
}
],
"symlink_target": ""
} |
from django.http import Http404
from django.test import RequestFactory, TestCase
from ..templatetags.pagination import paginate, pagination, pagination_url
class Object(object):
paginator = None
class PaginationTest(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.request = self.factory.get('/')
def test_paginate(self):
paginate_response = paginate({'request': self.request}, [])
self.assertEqual(repr(paginate_response), '<Page 1 of 1>')
with self.assertRaises(Http404):
self.request = self.factory.get('/?page=2')
paginate({'request': self.request}, [])
def test_pagination(self):
obj = Object()
pagination_response = pagination({'request': self.request}, obj)
self.assertDictEqual(pagination_response, {
'paginator': None,
'pagination_key': 'page',
'page_obj': obj,
'request': self.request,
})
def test_pagination_url(self):
self.assertEqual(pagination_url({'request': self.request}, 1), '/')
self.assertEqual(pagination_url({'request': self.request}, 2), '/?page=2')
| {
"content_hash": "dde03e735fac8b0863c5233fd26c9f8d",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 82,
"avg_line_length": 31.05263157894737,
"alnum_prop": 0.6228813559322034,
"repo_name": "lewiscollard/cms",
"id": "220ad475b25f60c7af0336d45ec33d66ccdc5a59",
"size": "1180",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "cms/tests/test_templatetags_pagination.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "81790"
},
{
"name": "HTML",
"bytes": "67876"
},
{
"name": "JavaScript",
"bytes": "277617"
},
{
"name": "Python",
"bytes": "386804"
}
],
"symlink_target": ""
} |
def find_channel_id(channels, req_channel):
"""
Find channel ID from a human readable name.
:param req_channel: channel name
:return: channel id string.
"""
for channel in channels:
if channel["name"] == req_channel:
return channel["id"]
def filter_out_message_subtypes(messages):
"""
Filter out all message that have subtype. Only plain messages by users are used.
:param messages: total messages
:return: list of messages
"""
filtered_messages = []
for message in messages:
if "subtype" not in message:
filtered_messages.append(message)
return filtered_messages | {
"content_hash": "59ea54d626e8220f9266052a8271a862",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 84,
"avg_line_length": 30.045454545454547,
"alnum_prop": 0.6475037821482602,
"repo_name": "haukurk/pyslaquery",
"id": "a8c988e0afd055e9d60b26c1fb242222cfa953ce",
"size": "661",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyslaquery/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "6907"
}
],
"symlink_target": ""
} |
import testtools
from tempest.common import waiters
from tempest import config
import tempest.api.compute.volumes.test_attach_volume as VolumeAttachTest
CONF = config.CONF
class HybridVCloudAttachVolumeTestJSON(VolumeAttachTest.AttachVolumeTestJSON):
def _create_and_attach(self):
# Start a server and wait for it to become ready
self.admin_pass = self.image_ssh_password
self.server = self.create_test_server(
validatable=True,
wait_until='ACTIVE',
adminPass=self.admin_pass, availability_zone=CONF.compute.vcloud_availability_zone)
# Record addresses so that we can ssh later
self.server['addresses'] = self.servers_client.list_addresses(
self.server['id'])['addresses']
# Create a volume and wait for it to become ready
self.volume = self.volumes_client.create_volume(
size=CONF.volume.volume_size, display_name='test', availability_zone=CONF.compute.vcloud_availability_zone)['volume']
self.addCleanup(self._delete_volume)
waiters.wait_for_volume_status(self.volumes_client,
self.volume['id'], 'available')
# Attach the volume to the server
self.attachment = self.servers_client.attach_volume(
self.server['id'],
volumeId=self.volume['id'],
device='/dev/%s' % self.device)['volumeAttachment']
waiters.wait_for_volume_status(self.volumes_client,
self.volume['id'], 'in-use')
self.addCleanup(self._detach, self.server['id'], self.volume['id'])
class HybridAWSAttachVolumeTestJSON(VolumeAttachTest.AttachVolumeTestJSON):
def _create_and_attach(self):
# Start a server and wait for it to become ready
self.admin_pass = self.image_ssh_password
self.server = self.create_test_server(
validatable=True,
wait_until='ACTIVE',
adminPass=self.admin_pass, availability_zone=CONF.compute.aws_availability_zone)
# Record addresses so that we can ssh later
self.server['addresses'] = self.servers_client.list_addresses(
self.server['id'])['addresses']
# Create a volume and wait for it to become ready
self.volume = self.volumes_client.create_volume(
size=CONF.volume.volume_size, display_name='test', availability_zone=CONF.compute.aws_availability_zone)['volume']
self.addCleanup(self._delete_volume)
waiters.wait_for_volume_status(self.volumes_client,
self.volume['id'], 'available')
# Attach the volume to the server
self.attachment = self.servers_client.attach_volume(
self.server['id'],
volumeId=self.volume['id'],
device='/dev/%s' % self.device)['volumeAttachment']
waiters.wait_for_volume_status(self.volumes_client,
self.volume['id'], 'in-use')
self.addCleanup(self._detach, self.server['id'], self.volume['id'])
| {
"content_hash": "264c4750dcf60dda969a3d4c66d9841f",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 129,
"avg_line_length": 43.53521126760563,
"alnum_prop": 0.6331284373989,
"repo_name": "HybridF5/tempest",
"id": "9afeb8ed07903fab0e8c6f6e789d6e583d1fe3de",
"size": "3716",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tempest/api/hybrid_cloud/compute/volumes/test_attach_volume.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3636851"
},
{
"name": "Shell",
"bytes": "8175"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import contextlib
import io
import os.path
import traceback
from pre_commit.errors import FatalError
from pre_commit.store import Store
# For testing purposes
class PreCommitSystemExit(SystemExit):
pass
def _log_and_exit(msg, exc, formatted, print_fn=print):
error_msg = '{0}: {1}: {2}'.format(msg, type(exc).__name__, exc)
print_fn(error_msg)
print_fn('Check the log at ~/.pre-commit/pre-commit.log')
store = Store()
store.require_created()
with io.open(os.path.join(store.directory, 'pre-commit.log'), 'w') as log:
log.write(error_msg + '\n')
log.write(formatted + '\n')
raise PreCommitSystemExit(1)
@contextlib.contextmanager
def error_handler():
try:
yield
except FatalError as e:
_log_and_exit('An error has occurred', e, traceback.format_exc())
except Exception as e:
_log_and_exit(
'An unexpected error has occurred',
e,
traceback.format_exc(),
)
| {
"content_hash": "9b7c15217801b9c547cdaa02f726f44d",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 78,
"avg_line_length": 26.357142857142858,
"alnum_prop": 0.6504065040650406,
"repo_name": "chriskuehl/pre-commit-1",
"id": "c8d2bfc8eb3f71792462d6259486f5df3dd2000f",
"size": "1107",
"binary": false,
"copies": "4",
"ref": "refs/heads/nonexec_pre-push_tmpl",
"path": "pre_commit/error_handler.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "128"
},
{
"name": "Makefile",
"bytes": "385"
},
{
"name": "Python",
"bytes": "188634"
},
{
"name": "Ruby",
"bytes": "655"
},
{
"name": "Shell",
"bytes": "1257"
}
],
"symlink_target": ""
} |
import lldb
from lldbsuite.test.decorators import *
import lldbsuite.test.lldbtest as lldbtest
import lldbsuite.test.lldbutil as lldbutil
import os
import unittest2
class TestSwiftUnknownReference(lldbtest.TestBase):
mydir = lldbtest.TestBase.compute_mydir(__file__)
def check_class(self, var_self):
lldbutil.check_variable(self, var_self, num_children=2)
m_base_string = var_self.GetChildMemberWithName("base_string")
m_string = var_self.GetChildMemberWithName("string")
lldbutil.check_variable(self, m_base_string, summary='"hello"')
lldbutil.check_variable(self, m_string, summary='"world"')
@swiftTest
def test_unknown_objc_ref(self):
"""Test unknown references to Objective-C objects."""
self.build()
target, process, thread, bkpt = lldbutil.run_to_source_breakpoint(
self, 'break here', lldb.SBFileSpec('main.swift'))
frame = thread.frames[0]
var_self = frame.FindVariable("self")
m_pure_ref = var_self.GetChildMemberWithName("pure_ref")
self.check_class(m_pure_ref)
m_objc_ref = var_self.GetChildMemberWithName("objc_ref")
self.check_class(m_objc_ref)
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lldb.SBDebugger.Terminate)
unittest2.main()
| {
"content_hash": "87a1b322dca2a8195a145ce6e089e9f7",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 74,
"avg_line_length": 33.95,
"alnum_prop": 0.6774668630338734,
"repo_name": "apple/swift-lldb",
"id": "41ef074771239bdb9687d1654bb105fae6f8d430",
"size": "1818",
"binary": false,
"copies": "1",
"ref": "refs/heads/stable",
"path": "packages/Python/lldbsuite/test/lang/swift/unknown_reference/TestSwiftUnknownReference.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "130449"
},
{
"name": "C",
"bytes": "198536"
},
{
"name": "C++",
"bytes": "27687071"
},
{
"name": "CMake",
"bytes": "172176"
},
{
"name": "DTrace",
"bytes": "334"
},
{
"name": "LLVM",
"bytes": "6106"
},
{
"name": "Makefile",
"bytes": "106804"
},
{
"name": "Objective-C",
"bytes": "106821"
},
{
"name": "Objective-C++",
"bytes": "25658"
},
{
"name": "Perl",
"bytes": "72175"
},
{
"name": "Python",
"bytes": "4680483"
},
{
"name": "Shell",
"bytes": "6573"
},
{
"name": "Swift",
"bytes": "260786"
},
{
"name": "Vim script",
"bytes": "8434"
}
],
"symlink_target": ""
} |
import re
import collections
from enum import Enum
from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk._core._dm_meta_info import ATTRIBUTE, REFERENCE_CLASS, REFERENCE_LIST, REFERENCE_LEAFLIST, REFERENCE_IDENTITY_CLASS, REFERENCE_ENUM_CLASS, REFERENCE_BITS, REFERENCE_UNION
from ydk.errors import YPYError, YPYModelError
from ydk.providers._importer import _yang_ns
_meta_table = {
'PolicyStateEnum' : _MetaInfoEnum('PolicyStateEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper',
{
'active':'ACTIVE',
'suspended':'SUSPENDED',
}, 'Cisco-IOS-XR-pbr-oper', _yang_ns._namespaces['Cisco-IOS-XR-pbr-oper']),
'Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat.GeneralStats' : {
'meta_info' : _MetaInfoClass('Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat.GeneralStats',
False,
[
_MetaInfoClassMember('match-data-rate', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Incoming matched data rate in kbps
''',
'match_data_rate',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('pre-policy-matched-bytes', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' Matched bytes before applying policy
''',
'pre_policy_matched_bytes',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('pre-policy-matched-packets', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' Matched pkts before applying policy
''',
'pre_policy_matched_packets',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('total-drop-bytes', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' Dropped bytes (packets/bytes)
''',
'total_drop_bytes',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('total-drop-packets', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' Dropped packets (packets/bytes)
''',
'total_drop_packets',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('total-drop-rate', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Total drop rate (packets/bytes)
''',
'total_drop_rate',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('total-transmit-rate', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' Total transmit rate in kbps
''',
'total_transmit_rate',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('transmit-bytes', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' Transmitted bytes (packets/bytes)
''',
'transmit_bytes',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('transmit-packets', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' Transmitted packets (packets/bytes)
''',
'transmit_packets',
'Cisco-IOS-XR-pbr-oper', False),
],
'Cisco-IOS-XR-pbr-oper',
'general-stats',
_yang_ns._namespaces['Cisco-IOS-XR-pbr-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper'
),
},
'Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat.HttprStats' : {
'meta_info' : _MetaInfoClass('Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat.HttprStats',
False,
[
_MetaInfoClassMember('drop-bytes', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' Dropped bytes
''',
'drop_bytes',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('drop-packets', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' Dropped packets
''',
'drop_packets',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('resp-sent-bytes', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' TotalNum of Bytes HTTPR response sent
''',
'resp_sent_bytes',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('resp-sent-packets', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' TotalNum of pkts HTTPR response sent
''',
'resp_sent_packets',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('rqst-rcvd-bytes', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' TotalNum of Bytes HTTP request received
''',
'rqst_rcvd_bytes',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('rqst-rcvd-packets', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' TotalNum of pkts HTTP request received
''',
'rqst_rcvd_packets',
'Cisco-IOS-XR-pbr-oper', False),
],
'Cisco-IOS-XR-pbr-oper',
'httpr-stats',
_yang_ns._namespaces['Cisco-IOS-XR-pbr-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper'
),
},
'Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat' : {
'meta_info' : _MetaInfoClass('Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat',
False,
[
_MetaInfoClassMember('class-id', ATTRIBUTE, 'int' , None, None,
[(0, 4294967295)], [],
''' ClassId
''',
'class_id',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('class-name', ATTRIBUTE, 'str' , None, None,
[(0, 65)], [],
''' ClassName
''',
'class_name',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('counter-validity-bitmask', ATTRIBUTE, 'long' , None, None,
[(0, 18446744073709551615L)], [],
''' Bitmask to indicate which counter or counters
are undetermined. Counters will be marked
undetermined when one or more classes share
queues with class-default because in such cases
the value of counters for each class is invalid.
Based on the flag(s) set, the following counters
will be marked undetermined. For example, if
value of this object returned is 0x00000101,
counters
TransmitPackets/TransmitBytes/TotalTransmitRate
and DropPackets/DropBytes are undetermined
.0x00000001 - Transmit
(TransmitPackets/TransmitBytes/TotalTransmitRate
), 0x00000002 - Drop
(TotalDropPackets/TotalDropBytes/TotalDropRate),
0x00000004 - Httpr
(HttprTransmitPackets/HttprTransmitBytes),
''',
'counter_validity_bitmask',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('general-stats', REFERENCE_CLASS, 'GeneralStats' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper', 'Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat.GeneralStats',
[], [],
''' general stats
''',
'general_stats',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('httpr-stats', REFERENCE_CLASS, 'HttprStats' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper', 'Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat.HttprStats',
[], [],
''' HTTPR stats
''',
'httpr_stats',
'Cisco-IOS-XR-pbr-oper', False),
],
'Cisco-IOS-XR-pbr-oper',
'class-stat',
_yang_ns._namespaces['Cisco-IOS-XR-pbr-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper'
),
},
'Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input' : {
'meta_info' : _MetaInfoClass('Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input',
False,
[
_MetaInfoClassMember('class-stat', REFERENCE_LIST, 'ClassStat' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper', 'Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat',
[], [],
''' Array of classes contained in policy
''',
'class_stat',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('node-name', ATTRIBUTE, 'str' , None, None,
[(0, 42)], [],
''' NodeName
''',
'node_name',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('policy-name', ATTRIBUTE, 'str' , None, None,
[(0, 65)], [],
''' PolicyName
''',
'policy_name',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('state', REFERENCE_ENUM_CLASS, 'PolicyStateEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper', 'PolicyStateEnum',
[], [],
''' State
''',
'state',
'Cisco-IOS-XR-pbr-oper', False),
_MetaInfoClassMember('state-description', ATTRIBUTE, 'str' , None, None,
[(0, 128)], [],
''' StateDescription
''',
'state_description',
'Cisco-IOS-XR-pbr-oper', False),
],
'Cisco-IOS-XR-pbr-oper',
'input',
_yang_ns._namespaces['Cisco-IOS-XR-pbr-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper'
),
},
'Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction' : {
'meta_info' : _MetaInfoClass('Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction',
False,
[
_MetaInfoClassMember('input', REFERENCE_CLASS, 'Input' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper', 'Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input',
[], [],
''' PBR policy statistics
''',
'input',
'Cisco-IOS-XR-pbr-oper', False),
],
'Cisco-IOS-XR-pbr-oper',
'direction',
_yang_ns._namespaces['Cisco-IOS-XR-pbr-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper'
),
},
'Pbr.Nodes.Node.PolicyMap.Interfaces.Interface' : {
'meta_info' : _MetaInfoClass('Pbr.Nodes.Node.PolicyMap.Interfaces.Interface',
False,
[
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], ['(([a-zA-Z0-9_]*\\d+/){3}\\d+)|(([a-zA-Z0-9_]*\\d+/){4}\\d+)|(([a-zA-Z0-9_]*\\d+/){3}\\d+\\.\\d+)|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]*\\d+))|(([a-zA-Z0-9_]*\\d+/){2}([a-zA-Z0-9_]+))|([a-zA-Z0-9_-]*\\d+)|([a-zA-Z0-9_-]*\\d+\\.\\d+)|(mpls)|(dwdm)'],
''' Name of the interface
''',
'interface_name',
'Cisco-IOS-XR-pbr-oper', True),
_MetaInfoClassMember('direction', REFERENCE_CLASS, 'Direction' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper', 'Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction',
[], [],
''' PBR direction
''',
'direction',
'Cisco-IOS-XR-pbr-oper', False),
],
'Cisco-IOS-XR-pbr-oper',
'interface',
_yang_ns._namespaces['Cisco-IOS-XR-pbr-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper'
),
},
'Pbr.Nodes.Node.PolicyMap.Interfaces' : {
'meta_info' : _MetaInfoClass('Pbr.Nodes.Node.PolicyMap.Interfaces',
False,
[
_MetaInfoClassMember('interface', REFERENCE_LIST, 'Interface' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper', 'Pbr.Nodes.Node.PolicyMap.Interfaces.Interface',
[], [],
''' PBR action data for a particular interface
''',
'interface',
'Cisco-IOS-XR-pbr-oper', False),
],
'Cisco-IOS-XR-pbr-oper',
'interfaces',
_yang_ns._namespaces['Cisco-IOS-XR-pbr-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper'
),
},
'Pbr.Nodes.Node.PolicyMap' : {
'meta_info' : _MetaInfoClass('Pbr.Nodes.Node.PolicyMap',
False,
[
_MetaInfoClassMember('interfaces', REFERENCE_CLASS, 'Interfaces' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper', 'Pbr.Nodes.Node.PolicyMap.Interfaces',
[], [],
''' Operational data for all interfaces
''',
'interfaces',
'Cisco-IOS-XR-pbr-oper', False),
],
'Cisco-IOS-XR-pbr-oper',
'policy-map',
_yang_ns._namespaces['Cisco-IOS-XR-pbr-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper'
),
},
'Pbr.Nodes.Node' : {
'meta_info' : _MetaInfoClass('Pbr.Nodes.Node',
False,
[
_MetaInfoClassMember('node-name', ATTRIBUTE, 'str' , None, None,
[], ['([a-zA-Z0-9_]*\\d+/){1,2}([a-zA-Z0-9_]*\\d+)'],
''' The node
''',
'node_name',
'Cisco-IOS-XR-pbr-oper', True),
_MetaInfoClassMember('policy-map', REFERENCE_CLASS, 'PolicyMap' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper', 'Pbr.Nodes.Node.PolicyMap',
[], [],
''' Operational data for policymaps
''',
'policy_map',
'Cisco-IOS-XR-pbr-oper', False),
],
'Cisco-IOS-XR-pbr-oper',
'node',
_yang_ns._namespaces['Cisco-IOS-XR-pbr-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper'
),
},
'Pbr.Nodes' : {
'meta_info' : _MetaInfoClass('Pbr.Nodes',
False,
[
_MetaInfoClassMember('node', REFERENCE_LIST, 'Node' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper', 'Pbr.Nodes.Node',
[], [],
''' PBR operational data for a particular node
''',
'node',
'Cisco-IOS-XR-pbr-oper', False),
],
'Cisco-IOS-XR-pbr-oper',
'nodes',
_yang_ns._namespaces['Cisco-IOS-XR-pbr-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper'
),
},
'Pbr' : {
'meta_info' : _MetaInfoClass('Pbr',
False,
[
_MetaInfoClassMember('nodes', REFERENCE_CLASS, 'Nodes' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper', 'Pbr.Nodes',
[], [],
''' Node-specific PBR operational data
''',
'nodes',
'Cisco-IOS-XR-pbr-oper', False),
],
'Cisco-IOS-XR-pbr-oper',
'pbr',
_yang_ns._namespaces['Cisco-IOS-XR-pbr-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_pbr_oper'
),
},
}
_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat.GeneralStats']['meta_info'].parent =_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat']['meta_info']
_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat.HttprStats']['meta_info'].parent =_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat']['meta_info']
_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input.ClassStat']['meta_info'].parent =_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input']['meta_info']
_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction.Input']['meta_info'].parent =_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction']['meta_info']
_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces.Interface.Direction']['meta_info'].parent =_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces.Interface']['meta_info']
_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces.Interface']['meta_info'].parent =_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces']['meta_info']
_meta_table['Pbr.Nodes.Node.PolicyMap.Interfaces']['meta_info'].parent =_meta_table['Pbr.Nodes.Node.PolicyMap']['meta_info']
_meta_table['Pbr.Nodes.Node.PolicyMap']['meta_info'].parent =_meta_table['Pbr.Nodes.Node']['meta_info']
_meta_table['Pbr.Nodes.Node']['meta_info'].parent =_meta_table['Pbr.Nodes']['meta_info']
_meta_table['Pbr.Nodes']['meta_info'].parent =_meta_table['Pbr']['meta_info']
| {
"content_hash": "1394346202ea85be95ba9320259ef164",
"timestamp": "",
"source": "github",
"line_count": 370,
"max_line_length": 271,
"avg_line_length": 49.88918918918919,
"alnum_prop": 0.5093992090579121,
"repo_name": "abhikeshav/ydk-py",
"id": "6855a04a645ab28ba6b89039da5b27d2e4fcfc56",
"size": "18462",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_pbr_oper.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "7226"
},
{
"name": "Python",
"bytes": "446117934"
}
],
"symlink_target": ""
} |
import os
import os_milestone | {
"content_hash": "0911e40c47e874b0f9784348e18f7967",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 19,
"avg_line_length": 14.5,
"alnum_prop": 0.8620689655172413,
"repo_name": "pedrolegold/uforge-cli",
"id": "78ae798987ee87bc59958ddbdfeb0e581a203765",
"size": "29",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/uforgecli/commands/os/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "201"
},
{
"name": "Python",
"bytes": "382973"
}
],
"symlink_target": ""
} |
from merge_sort import *
def binary_search_recur(A, x, l, r):
# Try to find x in sorted list A recursively
# l is the left-most idx, r is the right-most idx
# Return the index of x if found, return False otherwise
if l > r:
return False
mid = int(l + (r-l)/2)
if A[mid] < x:
return binary_search_recur(A, x, mid+1, r)
elif A[mid] == x:
return mid
else:
return binary_search_recur(A, x, l, mid-1)
def binary_search_iter(A, x):
# Iterative version of binary search
l = 0
r = len(A) - 1
while l <= r:
mid = int(l + (r-l)/2)
if A[mid] < x:
l = mid + 1
elif A[mid] == x:
return mid
else:
r = mid - 1
return False
def one_test(n = 1000):
# Generate a single test for binary search
max_num = 2e3
A = gen_test(n, max_num)
merge_sort(A, 0, n-1)
a = random.randint(-max_num, max_num)
#idx = binary_search_recur(A, a, 0, n-1)
idx = binary_search_iter(A, a)
#print(a, idx)
if not idx and a not in A:
#print("Not found and correct")
return True
elif A[idx] == a:
#print("Found!")
return True
else:
#print("Something went wrong!")
return False
def many_tests(count = 100):
# Repeated calls one_test to verify binary search
list_len = 1000
for i in range(count):
res = one_test(list_len)
if not res:
print("Test failed")
return
print("Test passed!" )
| {
"content_hash": "6c0e5bb254551b2b16f1dabf10f3bb4b",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 60,
"avg_line_length": 26.25423728813559,
"alnum_prop": 0.5351839896707553,
"repo_name": "JasonVann/CLRS",
"id": "499a6bd0c1cc7423fa4f23d7fd492c9576a7a986",
"size": "1559",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "S1_Foundation/C2_GettingStarted/binary_search.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "60634"
}
],
"symlink_target": ""
} |
"""Gets all available campaign criterion bid modifier landscapes for a campaign.
To get campaigns, run basic_operations/get_campaigns.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
"""
from googleads import adwords
CAMPAIGN_ID = 'INSERT_CAMPAIGN_ID_HERE'
PAGE_SIZE = 100
def main(client, campaign_id):
# Initialize appropriate service.
data_service = client.GetService('DataService', version='v201802')
# Get all the campaigns for this account.
selector = {
'fields': ['CampaignId', 'CriterionId', 'StartDate', 'EndDate',
'BidModifier', 'LocalClicks', 'LocalCost', 'LocalImpressions',
'TotalLocalClicks', 'TotalLocalCost', 'TotalLocalImpressions',
'RequiredBudget'],
'paging': {
'startIndex': 0,
'numberResults': PAGE_SIZE
},
'predicates': [{
'field': 'CampaignId', 'operator': 'IN', 'values': [campaign_id]
}]
}
# Set initial values.
offset = 0
more_pages = True
while more_pages is True:
num_landscape_points = 0
page = data_service.getCampaignCriterionBidLandscape(selector)
# Display results.
if 'entries' in page:
for bid_modifier_landscape in page['entries']:
num_landscape_points = 0
print ('Found campaign-level criterion bid modifier landscapes for '
'criterion with ID "%d", start date "%s", end date "%s", and '
'landscape points:') % (bid_modifier_landscape['criterionId'],
bid_modifier_landscape['startDate'],
bid_modifier_landscape['endDate'])
for landscape_point in bid_modifier_landscape['landscapePoints']:
num_landscape_points += 1
print ('\tbid modifier: %f, clicks: %d, cost: %d, impressions: %d, '
'total clicks: %d, total cost: %d, total impressions: %d, '
'and required budget: %f') % (
landscape_point['bidModifier'], landscape_point['clicks'],
landscape_point['cost']['microAmount'],
landscape_point['impressions'],
landscape_point['totalLocalClicks'],
landscape_point['totalLocalCost']['microAmount'],
landscape_point['totalLocalImpressions'],
landscape_point['requiredBudget']['microAmount'])
else:
print 'No bid modifier landscapes found.'
# Need to increment by the total # of landscape points within the page,
# NOT the number of entries (bid landscapes) in the page.
offset += num_landscape_points
selector['paging']['startIndex'] = str(offset)
more_pages = num_landscape_points >= PAGE_SIZE
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage()
main(adwords_client, CAMPAIGN_ID)
| {
"content_hash": "31fa194d030ee0ebdc001c9852d21cb1",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 80,
"avg_line_length": 38.34567901234568,
"alnum_prop": 0.6188023180940115,
"repo_name": "Aloomaio/googleads-python-lib",
"id": "ec07bb7987e71d5106bb7e436bf52359089e3f2c",
"size": "3728",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/adwords/v201802/optimization/get_campaign_criterion_bid_modifier_simulations.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "491015"
}
],
"symlink_target": ""
} |
from functional.tests.identity.v2 import test_identity
class EndpointTests(test_identity.IdentityTests):
def test_endpoint_create(self):
self._create_dummy_endpoint()
def test_endpoint_delete(self):
endpoint_id = self._create_dummy_endpoint(add_clean_up=False)
raw_output = self.openstack(
'endpoint delete %s' % endpoint_id)
self.assertEqual(0, len(raw_output))
def test_endpoint_list(self):
endpoint_id = self._create_dummy_endpoint()
raw_output = self.openstack('endpoint list')
self.assertIn(endpoint_id, raw_output)
items = self.parse_listing(raw_output)
self.assert_table_structure(items, self.ENDPOINT_LIST_HEADERS)
def test_endpoint_show(self):
endpoint_id = self._create_dummy_endpoint()
raw_output = self.openstack('endpoint show %s' % endpoint_id)
items = self.parse_show(raw_output)
self.assert_show_fields(items, self.ENDPOINT_FIELDS)
| {
"content_hash": "2d4f8a6f656f2bf1dd65279db5ecd79f",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 70,
"avg_line_length": 37.92307692307692,
"alnum_prop": 0.6693711967545639,
"repo_name": "redhat-openstack/python-openstackclient",
"id": "8064365e016ddd2ce7349125a765034453bc04ed",
"size": "1559",
"binary": false,
"copies": "1",
"ref": "refs/heads/master-patches",
"path": "functional/tests/identity/v2/test_endpoint.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2229284"
},
{
"name": "Shell",
"bytes": "591"
}
],
"symlink_target": ""
} |
"""Tests for certbot.plugins.standalone."""
import argparse
import socket
import unittest
import mock
import six
from acme import challenges
from acme import jose
from acme import standalone as acme_standalone
from certbot import achallenges
from certbot import errors
from certbot import interfaces
from certbot.tests import acme_util
from certbot.tests import test_util
class ServerManagerTest(unittest.TestCase):
"""Tests for certbot.plugins.standalone.ServerManager."""
def setUp(self):
from certbot.plugins.standalone import ServerManager
self.certs = {}
self.http_01_resources = {}
self.mgr = ServerManager(self.certs, self.http_01_resources)
def test_init(self):
self.assertTrue(self.mgr.certs is self.certs)
self.assertTrue(
self.mgr.http_01_resources is self.http_01_resources)
def _test_run_stop(self, challenge_type):
server = self.mgr.run(port=0, challenge_type=challenge_type)
port = server.socket.getsockname()[1] # pylint: disable=no-member
self.assertEqual(self.mgr.running(), {port: server})
self.mgr.stop(port=port)
self.assertEqual(self.mgr.running(), {})
def test_run_stop_tls_sni_01(self):
self._test_run_stop(challenges.TLSSNI01)
def test_run_stop_http_01(self):
self._test_run_stop(challenges.HTTP01)
def test_run_idempotent(self):
server = self.mgr.run(port=0, challenge_type=challenges.HTTP01)
port = server.socket.getsockname()[1] # pylint: disable=no-member
server2 = self.mgr.run(port=port, challenge_type=challenges.HTTP01)
self.assertEqual(self.mgr.running(), {port: server})
self.assertTrue(server is server2)
self.mgr.stop(port)
self.assertEqual(self.mgr.running(), {})
def test_run_bind_error(self):
some_server = socket.socket()
some_server.bind(("", 0))
port = some_server.getsockname()[1]
self.assertRaises(
errors.StandaloneBindError, self.mgr.run, port,
challenge_type=challenges.HTTP01)
self.assertEqual(self.mgr.running(), {})
class SupportedChallengesValidatorTest(unittest.TestCase):
"""Tests for plugins.standalone.supported_challenges_validator."""
def _call(self, data):
from certbot.plugins.standalone import (
supported_challenges_validator)
return supported_challenges_validator(data)
def test_correct(self):
self.assertEqual("tls-sni-01", self._call("tls-sni-01"))
self.assertEqual("http-01", self._call("http-01"))
self.assertEqual("tls-sni-01,http-01", self._call("tls-sni-01,http-01"))
self.assertEqual("http-01,tls-sni-01", self._call("http-01,tls-sni-01"))
def test_unrecognized(self):
assert "foo" not in challenges.Challenge.TYPES
self.assertRaises(argparse.ArgumentTypeError, self._call, "foo")
def test_not_subset(self):
self.assertRaises(argparse.ArgumentTypeError, self._call, "dns")
class AuthenticatorTest(unittest.TestCase):
"""Tests for certbot.plugins.standalone.Authenticator."""
def setUp(self):
from certbot.plugins.standalone import Authenticator
self.config = mock.MagicMock(
tls_sni_01_port=1234, http01_port=4321,
standalone_supported_challenges="tls-sni-01,http-01")
self.auth = Authenticator(self.config, name="standalone")
def test_supported_challenges(self):
self.assertEqual(self.auth.supported_challenges,
[challenges.TLSSNI01, challenges.HTTP01])
def test_supported_challenges_configured(self):
self.config.standalone_supported_challenges = "tls-sni-01"
self.assertEqual(self.auth.supported_challenges,
[challenges.TLSSNI01])
def test_more_info(self):
self.assertTrue(isinstance(self.auth.more_info(), six.string_types))
def test_get_chall_pref(self):
self.assertEqual(self.auth.get_chall_pref(domain=None),
[challenges.TLSSNI01, challenges.HTTP01])
def test_get_chall_pref_configured(self):
self.config.standalone_supported_challenges = "tls-sni-01"
self.assertEqual(self.auth.get_chall_pref(domain=None),
[challenges.TLSSNI01])
@mock.patch("certbot.plugins.standalone.util")
def test_perform_already_listening(self, mock_util):
for chall, port in ((challenges.TLSSNI01.typ, 1234),
(challenges.HTTP01.typ, 4321)):
mock_util.already_listening.return_value = True
self.config.standalone_supported_challenges = chall
self.assertRaises(
errors.MisconfigurationError, self.auth.perform, [])
mock_util.already_listening.assert_called_once_with(port, False)
mock_util.already_listening.reset_mock()
@mock.patch("certbot.plugins.standalone.zope.component.getUtility")
def test_perform(self, unused_mock_get_utility):
achalls = [1, 2, 3]
self.auth.perform2 = mock.Mock(return_value=mock.sentinel.responses)
self.assertEqual(mock.sentinel.responses, self.auth.perform(achalls))
self.auth.perform2.assert_called_once_with(achalls)
@mock.patch("certbot.plugins.standalone.zope.component.getUtility")
def _test_perform_bind_errors(self, errno, achalls, mock_get_utility):
def _perform2(unused_achalls):
raise errors.StandaloneBindError(mock.Mock(errno=errno), 1234)
self.auth.perform2 = mock.MagicMock(side_effect=_perform2)
self.auth.perform(achalls)
mock_get_utility.assert_called_once_with(interfaces.IDisplay)
notification = mock_get_utility.return_value.notification
self.assertEqual(1, notification.call_count)
self.assertTrue("1234" in notification.call_args[0][0])
def test_perform_eacces(self):
# pylint: disable=no-value-for-parameter
self._test_perform_bind_errors(socket.errno.EACCES, [])
def test_perform_eaddrinuse(self):
# pylint: disable=no-value-for-parameter
self._test_perform_bind_errors(socket.errno.EADDRINUSE, [])
def test_perfom_unknown_bind_error(self):
self.assertRaises(
errors.StandaloneBindError, self._test_perform_bind_errors,
socket.errno.ENOTCONN, [])
def test_perform2(self):
domain = b'localhost'
key = jose.JWK.load(test_util.load_vector('rsa512_key.pem'))
http_01 = achallenges.KeyAuthorizationAnnotatedChallenge(
challb=acme_util.HTTP01_P, domain=domain, account_key=key)
tls_sni_01 = achallenges.KeyAuthorizationAnnotatedChallenge(
challb=acme_util.TLSSNI01_P, domain=domain, account_key=key)
self.auth.servers = mock.MagicMock()
def _run(port, tls): # pylint: disable=unused-argument
return "server{0}".format(port)
self.auth.servers.run.side_effect = _run
responses = self.auth.perform2([http_01, tls_sni_01])
self.assertTrue(isinstance(responses, list))
self.assertEqual(2, len(responses))
self.assertTrue(isinstance(responses[0], challenges.HTTP01Response))
self.assertTrue(isinstance(responses[1], challenges.TLSSNI01Response))
self.assertEqual(self.auth.servers.run.mock_calls, [
mock.call(4321, challenges.HTTP01),
mock.call(1234, challenges.TLSSNI01),
])
self.assertEqual(self.auth.served, {
"server1234": set([tls_sni_01]),
"server4321": set([http_01]),
})
self.assertEqual(1, len(self.auth.http_01_resources))
self.assertEqual(1, len(self.auth.certs))
self.assertEqual(list(self.auth.http_01_resources), [
acme_standalone.HTTP01RequestHandler.HTTP01Resource(
acme_util.HTTP01, responses[0], mock.ANY)])
def test_cleanup(self):
self.auth.servers = mock.Mock()
self.auth.servers.running.return_value = {
1: "server1",
2: "server2",
}
self.auth.served["server1"].add("chall1")
self.auth.served["server2"].update(["chall2", "chall3"])
self.auth.cleanup(["chall1"])
self.assertEqual(self.auth.served, {
"server1": set(), "server2": set(["chall2", "chall3"])})
self.auth.servers.stop.assert_called_once_with(1)
self.auth.servers.running.return_value = {
2: "server2",
}
self.auth.cleanup(["chall2"])
self.assertEqual(self.auth.served, {
"server1": set(), "server2": set(["chall3"])})
self.assertEqual(1, self.auth.servers.stop.call_count)
self.auth.cleanup(["chall3"])
self.assertEqual(self.auth.served, {
"server1": set(), "server2": set([])})
self.auth.servers.stop.assert_called_with(2)
if __name__ == "__main__":
unittest.main() # pragma: no cover
| {
"content_hash": "eac2c1a31884a237eded37a4b790b777",
"timestamp": "",
"source": "github",
"line_count": 226,
"max_line_length": 80,
"avg_line_length": 39.836283185840706,
"alnum_prop": 0.6511162945684772,
"repo_name": "DavidGarciaCat/letsencrypt",
"id": "9f5b1459168658ebdd6de21cd4be7d4633714ad2",
"size": "9003",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "certbot/plugins/standalone_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "50680"
},
{
"name": "Augeas",
"bytes": "5062"
},
{
"name": "Batchfile",
"bytes": "35005"
},
{
"name": "DIGITAL Command Language",
"bytes": "133"
},
{
"name": "Groff",
"bytes": "222"
},
{
"name": "Makefile",
"bytes": "37245"
},
{
"name": "Nginx",
"bytes": "4274"
},
{
"name": "Python",
"bytes": "1384952"
},
{
"name": "Shell",
"bytes": "123322"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, unicode_literals
import os
from django import VERSION as DJANGO_VERSION
from django.utils.translation import ugettext_lazy as _
######################
# MEZZANINE SETTINGS #
######################
# The following settings are already defined with default values in
# the ``defaults.py`` module within each of Mezzanine's apps, but are
# common enough to be put here, commented out, for conveniently
# overriding. Please consult the settings documentation for a full list
# of settings Mezzanine implements:
# http://mezzanine.jupo.org/docs/configuration.html#default-settings
# Controls the ordering and grouping of the admin menu.
#
# ADMIN_MENU_ORDER = (
# ("Content", ("pages.Page", "blog.BlogPost",
# "generic.ThreadedComment", (_("Media Library"), "media-library"),)),
# ("Site", ("sites.Site", "redirects.Redirect", "conf.Setting")),
# ("Users", ("auth.User", "auth.Group",)),
# )
# A three item sequence, each containing a sequence of template tags
# used to render the admin dashboard.
#
# DASHBOARD_TAGS = (
# ("blog_tags.quick_blog", "mezzanine_tags.app_list"),
# ("comment_tags.recent_comments",),
# ("mezzanine_tags.recent_actions",),
# )
# A sequence of templates used by the ``page_menu`` template tag. Each
# item in the sequence is a three item sequence, containing a unique ID
# for the template, a label for the template, and the template path.
# These templates are then available for selection when editing which
# menus a page should appear in. Note that if a menu template is used
# that doesn't appear in this setting, all pages will appear in it.
# PAGE_MENU_TEMPLATES = (
# (1, _("Top navigation bar"), "pages/menus/dropdown.html"),
# (2, _("Left-hand tree"), "pages/menus/tree.html"),
# (3, _("Footer"), "pages/menus/footer.html"),
# )
# A sequence of fields that will be injected into Mezzanine's (or any
# library's) models. Each item in the sequence is a four item sequence.
# The first two items are the dotted path to the model and its field
# name to be added, and the dotted path to the field class to use for
# the field. The third and fourth items are a sequence of positional
# args and a dictionary of keyword args, to use when creating the
# field instance. When specifying the field class, the path
# ``django.models.db.`` can be omitted for regular Django model fields.
#
# EXTRA_MODEL_FIELDS = (
# (
# # Dotted path to field.
# "mezzanine.blog.models.BlogPost.image",
# # Dotted path to field class.
# "somelib.fields.ImageField",
# # Positional args for field class.
# (_("Image"),),
# # Keyword args for field class.
# {"blank": True, "upload_to": "blog"},
# ),
# # Example of adding a field to *all* of Mezzanine's content types:
# (
# "mezzanine.pages.models.Page.another_field",
# "IntegerField", # 'django.db.models.' is implied if path is omitted.
# (_("Another name"),),
# {"blank": True, "default": 1},
# ),
# )
# Setting to turn on featured images for blog posts. Defaults to False.
#
# BLOG_USE_FEATURED_IMAGE = True
# If True, the django-modeltranslation will be added to the
# INSTALLED_APPS setting.
USE_MODELTRANSLATION = False
########################
# MAIN DJANGO SETTINGS #
########################
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'UTC'
# If you set this to True, Django will use timezone-aware datetimes.
USE_TZ = True
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = "en"
# Supported languages
LANGUAGES = (
('en', _('English')),
)
# A boolean that turns on/off debug mode. When set to ``True``, stack traces
# are displayed for error pages. Should always be set to ``False`` in
# production. Best set to ``True`` in local_settings.py
DEBUG = False
# Whether a user's session cookie expires when the Web browser is closed.
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = False
AUTHENTICATION_BACKENDS = ("mezzanine.core.auth_backends.MezzanineBackend",)
# The numeric mode to set newly-uploaded files to. The value should be
# a mode you'd pass directly to os.chmod.
FILE_UPLOAD_PERMISSIONS = 0o644
#############
# DATABASES #
#############
DATABASES = {
"default": {
# Add "postgresql_psycopg2", "mysql", "sqlite3" or "oracle".
"ENGINE": "django.db.backends.",
# DB name or path to database file if using sqlite3.
"NAME": "",
# Not used with sqlite3.
"USER": "",
# Not used with sqlite3.
"PASSWORD": "",
# Set to empty string for localhost. Not used with sqlite3.
"HOST": "",
# Set to empty string for default. Not used with sqlite3.
"PORT": "",
}
}
#########
# PATHS #
#########
# Full filesystem path to the project.
PROJECT_APP_PATH = os.path.dirname(os.path.abspath(__file__))
PROJECT_APP = os.path.basename(PROJECT_APP_PATH)
PROJECT_ROOT = BASE_DIR = os.path.dirname(PROJECT_APP_PATH)
# Every cache key will get prefixed with this value - here we set it to
# the name of the directory the project is in to try and use something
# project specific.
CACHE_MIDDLEWARE_KEY_PREFIX = PROJECT_APP
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = "/static/"
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = os.path.join(PROJECT_ROOT, STATIC_URL.strip("/"))
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = STATIC_URL + "media/"
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = os.path.join(PROJECT_ROOT, *MEDIA_URL.strip("/").split("/"))
# Package/module name to import the root urlpatterns from for the project.
ROOT_URLCONF = "%s.urls" % PROJECT_APP
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [
os.path.join(PROJECT_ROOT, "templates")
],
"OPTIONS": {
"context_processors": [
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
"django.template.context_processors.debug",
"django.template.context_processors.i18n",
"django.template.context_processors.static",
"django.template.context_processors.media",
"django.template.context_processors.request",
"django.template.context_processors.tz",
"mezzanine.conf.context_processors.settings",
"mezzanine.pages.context_processors.page",
],
"builtins": [
"mezzanine.template.loader_tags",
],
"loaders": [
"mezzanine.template.loaders.host_themes.Loader",
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
]
},
},
]
if DJANGO_VERSION < (1, 9):
del TEMPLATES[0]["OPTIONS"]["builtins"]
################
# APPLICATIONS #
################
INSTALLED_APPS = (
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.redirects",
"django.contrib.sessions",
"django.contrib.sites",
"django.contrib.sitemaps",
"django.contrib.staticfiles",
"mezzanine.boot",
"mezzanine.conf",
"mezzanine.core",
"mezzanine.generic",
"mezzanine.pages",
"mezzanine.blog",
"mezzanine.forms",
"mezzanine.galleries",
"mezzanine.twitter",
# "mezzanine.accounts",
)
# List of middleware classes to use. Order is important; in the request phase,
# these middleware classes will be applied in the order given, and in the
# response phase the middleware will be applied in reverse order.
MIDDLEWARE = (
"mezzanine.core.middleware.UpdateCacheMiddleware",
'django.contrib.sessions.middleware.SessionMiddleware',
# Uncomment if using internationalisation or localisation
# 'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
"mezzanine.core.request.CurrentRequestMiddleware",
"mezzanine.core.middleware.RedirectFallbackMiddleware",
"mezzanine.core.middleware.AdminLoginInterfaceSelectorMiddleware",
"mezzanine.core.middleware.SitePermissionMiddleware",
"mezzanine.pages.middleware.PageMiddleware",
"mezzanine.core.middleware.FetchFromCacheMiddleware",
)
if DJANGO_VERSION < (1, 10):
MIDDLEWARE_CLASSES = MIDDLEWARE
del MIDDLEWARE
# Store these package names here as they may change in the future since
# at the moment we are using custom forks of them.
PACKAGE_NAME_FILEBROWSER = "filebrowser_safe"
PACKAGE_NAME_GRAPPELLI = "grappelli_safe"
#########################
# OPTIONAL APPLICATIONS #
#########################
# These will be added to ``INSTALLED_APPS``, only if available.
OPTIONAL_APPS = (
"debug_toolbar",
"django_extensions",
"compressor",
PACKAGE_NAME_FILEBROWSER,
PACKAGE_NAME_GRAPPELLI,
)
##################
# LOCAL SETTINGS #
##################
# Allow any settings to be defined in local_settings.py which should be
# ignored in your version control system allowing for settings to be
# defined per machine.
# Instead of doing "from .local_settings import *", we use exec so that
# local_settings has full access to everything defined in this module.
# Also force into sys.modules so it's visible to Django's autoreload.
f = os.path.join(PROJECT_APP_PATH, "local_settings.py")
if os.path.exists(f):
import sys
import imp
module_name = "%s.local_settings" % PROJECT_APP
module = imp.new_module(module_name)
module.__file__ = f
sys.modules[module_name] = module
exec(open(f, "rb").read())
####################
# DYNAMIC SETTINGS #
####################
# set_dynamic_settings() will rewrite globals based on what has been
# defined so far, in order to provide some better defaults where
# applicable. We also allow this settings module to be imported
# without Mezzanine installed, as the case may be when using the
# fabfile, where setting the dynamic settings below isn't strictly
# required.
try:
from mezzanine.utils.conf import set_dynamic_settings
except ImportError:
pass
else:
set_dynamic_settings(globals())
| {
"content_hash": "de15213875bc35cf9099fb6849119f74",
"timestamp": "",
"source": "github",
"line_count": 341,
"max_line_length": 79,
"avg_line_length": 34.5366568914956,
"alnum_prop": 0.6731765305256008,
"repo_name": "BQLQ/BQLQ",
"id": "5625f12c99e6eb5b71ed6fced99c70e9fac2c424",
"size": "11778",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "mezzanine-4.2.3/mezzanine/project_template/project_name/settings.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "162055"
},
{
"name": "HTML",
"bytes": "111705"
},
{
"name": "JavaScript",
"bytes": "464963"
},
{
"name": "Python",
"bytes": "769964"
}
],
"symlink_target": ""
} |
import os.path
current_dir = os.path.dirname(os.path.abspath(__file__))
import cherrypy
from mako.template import Template
from mako.lookup import TemplateLookup
from api import API
from rpc_connections import rpc_connections
lookup = TemplateLookup(directories=['html'])
class Root(object):
api = API()
@cherrypy.expose
def index(self):
raise cherrypy.HTTPRedirect('/balance')
@cherrypy.expose
def balance(self):
tmpl = lookup.get_template("balance.html")
out = tmpl.render()
return out
@cherrypy.expose
def info(self):
tmpl = lookup.get_template("info.html")
out = tmpl.render()
return out
@cherrypy.expose
def transactions(self):
tmpl = lookup.get_template("transactions.html")
out = tmpl.render()
return out
@cherrypy.expose
def send(self):
tmpl = lookup.get_template("send.html")
out = tmpl.render(rpc_connections=rpc_connections.keys())
return out
| {
"content_hash": "afd0e263757d94b741bc7b701c782199",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 65,
"avg_line_length": 27.18918918918919,
"alnum_prop": 0.6520874751491054,
"repo_name": "hank/coin-gateway",
"id": "741ac3ba8e94c2b1797abc387d341619c03f3adc",
"size": "1006",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "root.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "135"
},
{
"name": "Python",
"bytes": "4508"
}
],
"symlink_target": ""
} |
try:
from django.core.cache import caches
except ImportError:
# Django <1.7
from django.core.cache import get_cache
else:
def get_cache(alias):
return caches[alias]
def get_redis_connection(alias='default', write=True):
"""
Helper used for obtain a raw redis client.
"""
cache = get_cache(alias)
if not hasattr(cache.client, 'get_client'):
raise NotImplementedError("This backend does not supports this feature")
return cache.client.get_client(write)
| {
"content_hash": "fbeb9203aba7112cc5bfc5a6b6fa37ff",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 80,
"avg_line_length": 25.55,
"alnum_prop": 0.6790606653620352,
"repo_name": "zl352773277/django-redis",
"id": "a1f07fd462ed70fa04020662e906b45cae23b25c",
"size": "536",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "django_redis/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "79781"
}
],
"symlink_target": ""
} |
import json, sys, errno, codecs, datetime
import numpy as np
from heapq import heappop, heappush
from pprint import pprint
from datetime import datetime
import time
def num_pageviews(session):
return len([r for r in session if 'is_pageview' in r and r['is_pageview'] == 'true'])
def get_session_length(session):
if len(session) < 2:
return 0
else:
d1 = session[0]['ts']
d2 = session[-1]['ts']
# convert to unix timestamp
d1_ts = time.mktime(d1.timetuple())
d2_ts = time.mktime(d2.timetuple())
# they are now in seconds, subtract and then divide by 60 to get minutes.
return (d2_ts-d1_ts) / 60
def external_searches(session):
return len([r for r in session if 'referer_class' in r and r['referer_class'] == 'external (search engine)'])
# This function measures the "DFS-likeness" of a tree:
# If a tree has a structure for which all valid traversals are identical, it returns NaN.a
# If a tree allows for several distinct traversals, the function returns a value between 0 and 1
# capturing from where in the open-list (a.k.a. fringe) of nodes the navigator picked the next node
# to be visited. A BFS will have a value of 0, a DFS, a value of 1. Intermediate values arise when
# the navigator behaves sometimes in a BFS, and sometimes in a DFS, way.
def dfsness(root):
heap = [(root['ts'], 0, 0, root)]
pos = []
while len(heap) > 0:
unique_phases = sorted(set([x[1] for x in heap]))
phase_dict = dict(zip(unique_phases, range(len(unique_phases))))
heap = [(x[0], phase_dict[x[1]], x[2], x[3]) for x in heap]
#print [(x[1],x[3]['uri_path']) for x in heap]
(time_next, phase_next, tb_next, node_next) = heappop(heap)
phase = len(unique_phases)
pos += [phase_next / (phase - 1.0) if phase > 1 else np.nan]
# tb stands for 'tie-breaker'.
tb = 0
if 'children' in node_next:
for ch in node_next['children']:
heappush(heap, (ch['ts'], phase, tb, ch))
tb += 1
return np.nansum(pos) / (len(pos) - sum(np.isnan(pos)))
def get_tree_metrics_helper(tree):
metrics = dict()
if 'children' not in tree:
metrics['size'] = 1
metrics['size_sum'] = 1
metrics['size_sq_sum'] = 1
metrics['num_leafs'] = 1
metrics['depth_max'] = 0
metrics['depth_sum'] = 0
metrics['degree_max'] = 0
metrics['degree_sum'] = 0
metrics['timediff_sum'] = 0
metrics['timediff_min'] = np.nan
metrics['timediff_max'] = np.nan
metrics['ambiguous_max'] = 1 if 'parent_ambiguous' in tree.keys() else 0
metrics['ambiguous_sum'] = 1 if 'parent_ambiguous' in tree.keys() else 0
else:
k = len(tree['children'])
dt = tree['ts']
metrics['size'] = 0
metrics['size_sum'] = 0
metrics['size_sq_sum'] = 0
metrics['num_leafs'] = 0
metrics['depth_max'] = 0
metrics['depth_sum'] = 0
metrics['degree_max'] = 0
metrics['degree_sum'] = 0
metrics['timediff_sum'] = 0
metrics['timediff_min'] = np.inf
metrics['timediff_max'] = -np.inf
metrics['ambiguous_max'] = 0
metrics['ambiguous_sum'] = 0
for ch in tree['children']:
child_metrics = get_tree_metrics_helper(ch)
dt_child = ch['ts']
timediff = (dt_child - dt).total_seconds()
metrics['size'] += child_metrics['size']
metrics['size_sum'] += child_metrics['size_sum']
metrics['size_sq_sum'] += child_metrics['size_sq_sum']
metrics['num_leafs'] += child_metrics['num_leafs']
metrics['depth_max'] = max(metrics['depth_max'], child_metrics['depth_max'])
metrics['depth_sum'] += child_metrics['depth_sum']
metrics['degree_max'] = max(metrics['degree_max'], child_metrics['degree_max'])
metrics['degree_sum'] += child_metrics['degree_sum']
metrics['timediff_sum'] += child_metrics['timediff_sum'] + timediff
metrics['timediff_min'] = min(metrics['timediff_min'], timediff)
metrics['timediff_max'] = max(metrics['timediff_min'], timediff)
metrics['ambiguous_max'] = max(metrics['ambiguous_max'], child_metrics['ambiguous_max'])
metrics['ambiguous_sum'] += child_metrics['ambiguous_sum']
metrics['size'] += 1
metrics['size_sum'] += metrics['size']
metrics['size_sq_sum'] += metrics['size'] * metrics['size']
metrics['depth_max'] += 1
metrics['depth_sum'] += metrics['depth_max']
metrics['degree_sum'] += k
metrics['degree_max'] = max(metrics['degree_max'], k)
metrics['ambiguous_max'] = max(metrics['ambiguous_max'], 1 if 'parent_ambiguous' in tree.keys() else 0)
metrics['ambiguous_sum'] += 1 if 'parent_ambiguous' in tree.keys() else 0
return metrics
def get_tree_metrics(tree):
metrics = get_tree_metrics_helper(tree)
metrics['mean_time_diff'] = metrics['timediff_sum'] / max(1, metrics['degree_sum'])
metrics['dfsness'] = dfsness(tree)
return metrics
| {
"content_hash": "de68d13379ba6ac2439f2e9a0091b85d",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 111,
"avg_line_length": 39.34146341463415,
"alnum_prop": 0.6336019838809671,
"repo_name": "ewulczyn/readers",
"id": "410c7d8e3683247c2777d96b63b55cf5a463dfab",
"size": "4839",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/analysis/ml_utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "228234"
},
{
"name": "Python",
"bytes": "13126"
}
],
"symlink_target": ""
} |
"""
Effects module.
"""
from __future__ import absolute_import, unicode_literals
import logging
from psd_tools.constants import Tag, Resource
from psd_tools.terminology import Klass, Key
from psd_tools.utils import new_registry
from psd_tools.psd.descriptor import Descriptor, List
logger = logging.getLogger(__name__)
_TYPES, register = new_registry()
class Effects(object):
"""
List-like effects.
"""
def __init__(self, layer):
self._data = None
for tag in (
Tag.OBJECT_BASED_EFFECTS_LAYER_INFO,
Tag.OBJECT_BASED_EFFECTS_LAYER_INFO_V0,
Tag.OBJECT_BASED_EFFECTS_LAYER_INFO_V1,
):
if tag in layer.tagged_blocks:
self._data = layer.tagged_blocks.get_data(tag)
break
self._items = []
for key in (self._data or []):
value = self._data[key]
if not isinstance(value, List):
value = [value]
for item in value:
if not (
isinstance(item, Descriptor) and item.get(Key.Enabled)
):
continue
kls = _TYPES.get(item.classID)
assert kls is not None, 'kls not found for %r' % item.classID
self._items.append(kls(item, layer._psd.image_resources))
@property
def scale(self):
"""Scale value."""
return self._data.get(Key.Scale).value if self._data else None
@property
def enabled(self):
"""Whether if all the effects are enabled.
:rtype: bool
"""
return bool(self._data.get(b'masterFXSwitch')) if self._data else False
@property
def items(self):
return self._items
def find(self, name):
"""Iterate effect items by name."""
if not self.enabled:
return
KLASS = {kls.__name__.lower(): kls for kls in _TYPES.values()}
for item in self:
if isinstance(item, KLASS.get(name.lower(), None)):
yield item
def __len__(self):
return self._items.__len__()
def __iter__(self):
return self._items.__iter__()
def __getitem__(self, key):
return self._items.__getitem__(key)
# def __setitem__(self, key, value):
# return self._items.__setitem__(key, value)
# def __delitem__(self, key):
# return self._items.__delitem__(key)
def __repr__(self):
return '%s(%s)' % (
self.__class__.__name__,
' '.join(x.__class__.__name__.lower()
for x in self) if self._data else ''
)
class _Effect(object):
"""Base Effect class."""
def __init__(self, value, image_resources):
self.value = value
self._image_resources = image_resources
@property
def enabled(self):
"""Whether if the effect is enabled."""
return bool(self.value.get(Key.Enabled))
@property
def present(self):
"""Whether if the effect is present in Photoshop UI."""
return bool(self.value.get(b'present'))
@property
def shown(self):
"""Whether if the effect is shown in dialog."""
return bool(self.value.get(b'showInDialog'))
@property
def opacity(self):
"""Layer effect opacity in percentage."""
return self.value.get(Key.Opacity).value
def __repr__(self):
return self.__class__.__name__
def _repr_pretty_(self, p, cycle):
if cycle:
return self.__repr__()
p.text(self.__repr__())
class _ColorMixin(object):
@property
def color(self):
"""Color."""
return self.value.get(Key.Color)
@property
def blend_mode(self):
"""Effect blending mode."""
return self.value.get(Key.Mode).enum
class _ChokeNoiseMixin(_ColorMixin):
@property
def choke(self):
"""Choke level."""
return self.value.get(Key.ChokeMatte).value
@property
def size(self):
"""Size in pixels."""
return self.value.get(Key.Blur).value
@property
def noise(self):
"""Noise level."""
return self.value.get(Key.Noise).value
@property
def anti_aliased(self):
"""Angi-aliased."""
return bool(self.value.get(Key.AntiAlias))
@property
def contour(self):
"""Contour configuration."""
return self.value.get(Key.TransferSpec)
class _AngleMixin(object):
@property
def use_global_light(self):
"""Using global light."""
return bool(self.value.get(Key.UseGlobalAngle))
@property
def angle(self):
"""Angle value."""
if self.use_global_light:
return self._image_resources.get_data(Resource.GLOBAL_ANGLE, 30.0)
return self.value.get(Key.LocalLightingAngle).value
class _GradientMixin(object):
@property
def gradient(self):
"""Gradient configuration."""
return self.value.get(Key.Gradient)
@property
def angle(self):
"""Angle value."""
return self.value.get(Key.Angle).value
@property
def type(self):
"""
Gradient type, one of `linear`, `radial`, `angle`, `reflected`, or
`diamond`.
"""
return self.value.get(Key.Type).enum
@property
def reversed(self):
"""Reverse flag."""
return bool(self.value.get(Key.Reverse))
@property
def dithered(self):
"""Dither flag."""
return bool(self.value.get(Key.Dither))
@property
def offset(self):
"""Offset value."""
return self.value.get(Key.Offset)
class _PatternMixin(object):
@property
def pattern(self):
"""Pattern config."""
# TODO: Expose nested property.
return self.value.get(b'Ptrn') # Enum.Pattern. Seems a bug.
@property
def linked(self):
"""Linked."""
return self.value.get(b'Lnkd') # Enum.Linked. Seems a bug.
@property
def angle(self):
"""Angle value."""
return self.value.get(Key.Angle).value
@property
def phase(self):
"""Phase value in Point."""
return self.value.get(b'phase')
class _ShadowEffect(_Effect, _ChokeNoiseMixin, _AngleMixin):
"""Base class for shadow effect."""
@property
def distance(self):
"""Distance."""
return self.value.get(Key.Distance).value
class _GlowEffect(_Effect, _ChokeNoiseMixin, _GradientMixin):
"""Base class for glow effect."""
@property
def glow_type(self):
"""Glow type."""
return self.value.get(Key.GlowTechnique).enum
@property
def quality_range(self):
"""Quality range."""
return self.value.get(Key.InputRange).value
@property
def quality_jitter(self):
"""Quality jitter"""
return self.value.get(Key.ShadingNoise).value
class _OverlayEffect(_Effect):
pass
class _AlignScaleMixin(object):
@property
def blend_mode(self):
"""Effect blending mode."""
return self.value.get(Key.Mode).enum
@property
def scale(self):
"""Scale value."""
return self.value.get(Key.Scale).value
@property
def aligned(self):
"""Aligned."""
return bool(self.value.get(Key.Alignment))
@register(Klass.DropShadow.value)
class DropShadow(_ShadowEffect):
@property
def layer_knocks_out(self):
"""Layers are knocking out."""
return bool(self.value.get(b'layerConceals'))
@register(Klass.InnerShadow.value)
class InnerShadow(_ShadowEffect):
pass
@register(Klass.OuterGlow.value)
class OuterGlow(_GlowEffect):
@property
def spread(self):
return self.value.get(Key.ShadingNoise).value
@register(Klass.InnerGlow.value)
class InnerGlow(_GlowEffect):
@property
def glow_source(self):
"""Elements source."""
return self.value.get(Key.InnerGlowSource).enum
@register(Klass.SolidFill.value)
class ColorOverlay(_OverlayEffect, _ColorMixin):
pass
@register(b'GrFl') # Equal to Enum.GradientFill. This seems a bug.
class GradientOverlay(_OverlayEffect, _AlignScaleMixin, _GradientMixin):
pass
@register(b'patternFill')
class PatternOverlay(_OverlayEffect, _AlignScaleMixin, _PatternMixin):
pass
@register(Klass.FrameFX.value)
class Stroke(_Effect, _ColorMixin, _PatternMixin, _GradientMixin):
@property
def position(self):
"""
Position of the stroke, InsetFrame, OutsetFrame, or CenteredFrame.
"""
return self.value.get(Key.Style).enum
@property
def fill_type(self):
"""Fill type, SolidColor, Gradient, or Pattern."""
return self.value.get(Key.PaintType).enum
@property
def size(self):
"""Size value."""
return self.value.get(Key.SizeKey).value
@property
def overprint(self):
"""Overprint flag."""
return bool(self.value.get(b'overprint'))
@register(Klass.BevelEmboss.value)
class BevelEmboss(_Effect, _AngleMixin):
@property
def highlight_mode(self):
"""Highlight blending mode."""
return self.value.get(Key.HighlightMode).enum
@property
def highlight_color(self):
"""Highlight color value."""
return self.value.get(Key.HighlightColor)
@property
def highlight_opacity(self):
"""Highlight opacity value."""
return self.value.get(Key.HighlightOpacity).value
@property
def shadow_mode(self):
"""Shadow blending mode."""
return self.value.get(Key.ShadowMode).enum
@property
def shadow_color(self):
"""Shadow color value."""
return self.value.get(Key.ShadowColor)
@property
def shadow_opacity(self):
"""Shadow opacity value."""
return self.value.get(Key.ShadowOpacity).value
@property
def bevel_type(self):
"""Bevel type, one of `SoftMatte`, `HardLight`, `SoftLight`."""
return self.value.get(Key.BevelTechnique).enum
@property
def bevel_style(self):
"""
Bevel style, one of `OuterBevel`, `InnerBevel`, `Emboss`,
`PillowEmboss`, or `StrokeEmboss`.
"""
return self.value.get(Key.BevelStyle).enum
@property
def altitude(self):
"""Altitude value."""
return self.value.get(Key.LocalLightingAltitude).value
@property
def depth(self):
"""Depth value."""
return self.value.get(Key.StrengthRatio).value
@property
def size(self):
"""Size value in pixel."""
return self.value.get(Key.Blur).value
@property
def direction(self):
"""Direction, either `StampIn` or `StampOut`."""
return self.value.get(Key.BevelDirection).enum
@property
def contour(self):
"""Contour configuration."""
return self.value.get(Key.TransferSpec)
@property
def anti_aliased(self):
"""Anti-aliased."""
return bool(self.value.get(b'antialiasGloss'))
@property
def soften(self):
"""Soften value."""
return self.value.get(Key.Softness).value
@property
def use_shape(self):
"""Using shape."""
return bool(self.value.get(b'useShape'))
@property
def use_texture(self):
"""Using texture."""
return bool(self.value.get(b'useTexture'))
@register(Klass.ChromeFX.value)
class Satin(_Effect, _ColorMixin):
""" Satin effect """
@property
def anti_aliased(self):
"""Anti-aliased."""
return bool(self.value.get(Key.AntiAlias))
@property
def inverted(self):
"""Inverted."""
return bool(self.value.get(Key.Invert))
@property
def angle(self):
"""Angle value."""
return self.value.get(Key.LocalLightingAngle).value
@property
def distance(self):
"""Distance value."""
return self.value.get(Key.Distance).value
@property
def size(self):
"""Size value in pixel."""
return self.value.get(Key.Blur).value
@property
def contour(self):
"""Contour configuration."""
return self.value.get(Key.MappingShape)
| {
"content_hash": "af3771e88b5cf42e5cb00c8bbd2c4337",
"timestamp": "",
"source": "github",
"line_count": 483,
"max_line_length": 79,
"avg_line_length": 25.169772256728777,
"alnum_prop": 0.5932384634367032,
"repo_name": "psd-tools/psd-tools",
"id": "ea5069d13aab265440da1cbdec0f31ffae051181",
"size": "12157",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "src/psd_tools/api/effects.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Cython",
"bytes": "3590"
},
{
"name": "Python",
"bytes": "572455"
}
],
"symlink_target": ""
} |
from flask.ext import wtf
import flask
import wtforms
import auth
import config
import model
import util
from main import app
###############################################################################
# Admin Stuff
###############################################################################
@app.route('/admin/')
@auth.admin_required
def admin():
return flask.render_template(
'admin/admin.html',
title='Admin',
html_class='admin',
)
###############################################################################
# Config Stuff
###############################################################################
class ConfigUpdateForm(wtf.Form):
analytics_id = wtforms.StringField('Tracking ID', filters=[util.strip_filter])
announcement_html = wtforms.TextAreaField('Announcement HTML', filters=[util.strip_filter])
announcement_type = wtforms.SelectField('Announcement Type',
choices=[(t, t.title()) for t in model.Config.announcement_type._choices])
anonymous_recaptcha = wtforms.BooleanField('Use reCAPTCHA in forms for unauthorized users')
brand_name = wtforms.StringField('Brand Name', [wtforms.validators.required()], filters=[util.strip_filter])
check_unique_email = wtforms.BooleanField('Check for uniqueness of the verified emails')
email_authentication = wtforms.BooleanField('Email authentication for sign in/sign up')
feedback_email = wtforms.StringField('Feedback Email', [wtforms.validators.optional(), wtforms.validators.email()],
filters=[util.email_filter])
flask_secret_key = wtforms.StringField('Flask Secret Key', [wtforms.validators.optional()],
filters=[util.strip_filter])
notify_on_new_user = wtforms.BooleanField('Send an email notification when a user signs up')
recaptcha_private_key = wtforms.StringField('Private Key', filters=[util.strip_filter])
recaptcha_public_key = wtforms.StringField('Public Key', filters=[util.strip_filter])
salt = wtforms.StringField('Salt', [wtforms.validators.optional()], filters=[util.strip_filter])
verify_email = wtforms.BooleanField('Verify user emails')
@app.route('/admin/config/', methods=['GET', 'POST'])
@auth.admin_required
def admin_config():
config_db = model.Config.get_master_db()
form = ConfigUpdateForm(obj=config_db)
if form.validate_on_submit():
form.populate_obj(config_db)
if not config_db.flask_secret_key:
config_db.flask_secret_key = util.uuid()
if not config_db.salt:
config_db.salt = util.uuid()
config_db.put()
reload(config)
app.config.update(CONFIG_DB=config_db)
return flask.redirect(flask.url_for('admin'))
return flask.render_template(
'admin/admin_config.html',
title='App Config',
html_class='admin-config',
form=form,
api_url=flask.url_for('api.config'),
)
###############################################################################
# Auth Stuff
###############################################################################
class AuthUpdateForm(wtf.Form):
bitbucket_key = wtforms.StringField('Key', filters=[util.strip_filter])
bitbucket_secret = wtforms.StringField('Secret', filters=[util.strip_filter])
dropbox_app_key = wtforms.StringField('App Key', filters=[util.strip_filter])
dropbox_app_secret = wtforms.StringField('App Secret', filters=[util.strip_filter])
facebook_app_id = wtforms.StringField('App ID', filters=[util.strip_filter])
facebook_app_secret = wtforms.StringField('App Secret', filters=[util.strip_filter])
github_client_id = wtforms.StringField('Client ID', filters=[util.strip_filter])
github_client_secret = wtforms.StringField('Client Secret', filters=[util.strip_filter])
instagram_client_id = wtforms.StringField('Client ID', filters=[util.strip_filter])
instagram_client_secret = wtforms.StringField('Client Secret', filters=[util.strip_filter])
linkedin_api_key = wtforms.StringField('API Key', filters=[util.strip_filter])
linkedin_secret_key = wtforms.StringField('Secret Key', filters=[util.strip_filter])
microsoft_client_id = wtforms.StringField('Client ID', filters=[util.strip_filter])
microsoft_client_secret = wtforms.StringField('Client Secret', filters=[util.strip_filter])
twitter_consumer_key = wtforms.StringField('Consumer Key', filters=[util.strip_filter])
twitter_consumer_secret = wtforms.StringField('Consumer Secret', filters=[util.strip_filter])
vk_app_id = wtforms.StringField('App ID', filters=[util.strip_filter])
vk_app_secret = wtforms.StringField('App Secret', filters=[util.strip_filter])
yahoo_consumer_key = wtforms.StringField('Consumer Key', filters=[util.strip_filter])
yahoo_consumer_secret = wtforms.StringField('Consumer Secret', filters=[util.strip_filter])
@app.route('/admin/auth/', methods=['GET', 'POST'])
@auth.admin_required
def admin_auth():
config_db = model.Config.get_master_db()
form = AuthUpdateForm(obj=config_db)
if form.validate_on_submit():
form.populate_obj(config_db)
config_db.put()
reload(config)
app.config.update(CONFIG_DB=config_db)
return flask.redirect(flask.url_for('admin'))
return flask.render_template(
'admin/admin_auth.html',
title='Auth Config',
html_class='admin-auth',
form=form,
api_url=flask.url_for('api.config'),
)
| {
"content_hash": "1e72c994243d6137b53b7d4c15bb1ef4",
"timestamp": "",
"source": "github",
"line_count": 118,
"max_line_length": 119,
"avg_line_length": 47.26271186440678,
"alnum_prop": 0.6256051640667025,
"repo_name": "ssxenon01/music-app",
"id": "0646f11b389fb407b1b746aaa0b76a74f23e652f",
"size": "5594",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main/control/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "96943"
},
{
"name": "CoffeeScript",
"bytes": "9306"
},
{
"name": "HTML",
"bytes": "136403"
},
{
"name": "JavaScript",
"bytes": "49507"
},
{
"name": "PHP",
"bytes": "92328"
},
{
"name": "Python",
"bytes": "182431"
}
],
"symlink_target": ""
} |
import os
import sys
import numpy as np
import tensorflow as tf
# Allow import of top level python files
import inspect
currentdir = os.path.dirname(
os.path.abspath(inspect.getfile(inspect.currentframe()))
)
parentdir = os.path.dirname(currentdir)
parentdir = os.path.dirname(parentdir)
sys.path.insert(0, parentdir)
from benchmark_args import BaseCommandLineAPI
from benchmark_runner import BaseBenchmarkRunner
class CommandLineAPI(BaseCommandLineAPI):
def __init__(self):
super(CommandLineAPI, self).__init__()
self._parser.add_argument(
"--frame_length",
type=int,
default=1,
help="Input audio frame length in seconds."
)
class BenchmarkRunner(BaseBenchmarkRunner):
def get_dataset_batches(self):
"""Returns a list of batches of input samples.
Each batch should be in the form [x, y], where
x is a numpy array of the input samples for the batch, and
y is a numpy array of the expected model outputs for the batch
Returns:
- dataset: a TF Dataset object
- bypass_data_to_eval: any object type that will be passed unmodified to
`evaluate_result()`. If not necessary: `None`
Note: script arguments can be accessed using `self._args.attr`
"""
# Input is a numpy array of arbitrary length
# generates a wave of 16kHz for frame_length number of seconds
wave = np.array(
np.sin(np.linspace(-np.pi, np.pi, 16000 * self._args.frame_length)),
dtype=np.float32
)
waves = np.expand_dims(wave, axis=0)
dataset = tf.data.Dataset.from_tensor_slices(waves)
dataset = dataset.repeat()
dataset = dataset.prefetch(tf.data.AUTOTUNE)
return dataset, None
def preprocess_model_inputs(self, data_batch):
"""This function prepare the `data_batch` generated from the dataset.
Returns:
x: input of the model
y: data to be used for model evaluation
Note: script arguments can be accessed using `self._args.attr`
"""
x = data_batch
return x, None
def postprocess_model_outputs(self, predictions, expected):
"""Post process if needed the predictions and expected tensors. At the
minimum, this function transforms all TF Tensors into a numpy arrays.
Most models will not need to modify this function.
Note: script arguments can be accessed using `self._args.attr`
"""
# NOTE : DO NOT MODIFY FOR NOW => We do not measure accuracy right now
return predictions.numpy(), expected.numpy()
def evaluate_model(self, predictions, expected, bypass_data_to_eval):
"""Evaluate result predictions for entire dataset.
This computes overall accuracy, mAP, etc. Returns the
metric value and a metric_units string naming the metric.
Note: script arguments can be accessed using `self._args.attr`
"""
return None, "Top-10 Accuracy"
if __name__ == '__main__':
cmdline_api = CommandLineAPI()
args = cmdline_api.parse_args()
runner = BenchmarkRunner(args)
runner.execute_benchmark()
| {
"content_hash": "1eb177d23a7d3c1c9d8eb0bb93ee567b",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 80,
"avg_line_length": 30.14814814814815,
"alnum_prop": 0.6446560196560197,
"repo_name": "tensorflow/tensorrt",
"id": "e1aad59afe125c644098d597dd49ddf664e72311",
"size": "4012",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tftrt/benchmarking-python/tf_hub/yamnet/infer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "46221"
},
{
"name": "CMake",
"bytes": "4340"
},
{
"name": "Jupyter Notebook",
"bytes": "2748791"
},
{
"name": "Python",
"bytes": "588488"
},
{
"name": "Shell",
"bytes": "103938"
},
{
"name": "Starlark",
"bytes": "1478"
}
],
"symlink_target": ""
} |
"""Utility methods for accessing and operating on test data."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import flags
import tensorflow as tf
from google.protobuf import text_format
import input as seq2species_input
from protos import seq2label_pb2
FLAGS = flags.FLAGS
# Target names included in the example inputs.
TEST_TARGETS = ['test_target_1', 'test_target_2']
def _as_bytes_feature(in_string):
"""Converts the given string to a tf.train.BytesList feature.
Args:
in_string: string to be converted to BytesList Feature.
Returns:
The TF BytesList Feature representing the given string.
"""
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[in_string]))
def create_tmp_train_file(num_examples,
read_len,
characters=seq2species_input.DNA_BASES,
name='test.tfrecord'):
"""Write a test TFRecord of input examples to temporary test directory.
The generated input examples are test tf.train.Example protos, each comprised
of a toy sequence of length read_len and non-meaningful labels for targets in
TEST_TARGETS.
Args:
num_examples: int; number of examples to write to test input file.
read_len: int; length of test read sequences.
characters: string; set of characters from which to construct test reads.
Defaults to canonical DNA bases.
name: string; filename for the test input file.
Returns:
Full path to the generated temporary test input file.
"""
tmp_path = os.path.join(FLAGS.test_tmpdir, name)
with tf.python_io.TFRecordWriter(tmp_path) as writer:
for i in xrange(num_examples):
char = characters[i % len(characters)]
features_dict = {'sequence': _as_bytes_feature(char * read_len)}
for target_name in TEST_TARGETS:
nonsense_label = _as_bytes_feature(str(i))
features_dict[target_name] = nonsense_label
tf_features = tf.train.Features(feature=features_dict)
example = tf.train.Example(features=tf_features)
writer.write(example.SerializeToString())
return tmp_path
def create_tmp_metadata(num_examples, read_len):
"""Write a test Seq2LabelDatasetInfo test proto to temporary test directory.
Args:
num_examples: int; number of example labels to write into test metadata.
read_len: int; length of test read sequences.
Returns:
Full path to the generated temporary test file containing the
Seq2LabelDatasetInfo text proto.
"""
dataset_info = seq2label_pb2.Seq2LabelDatasetInfo(
read_length=read_len,
num_examples=num_examples,
read_stride=1,
dataset_path='test.tfrecord')
for target in TEST_TARGETS:
dataset_info.labels.add(
name=target, values=[str(i) for i in xrange(num_examples)])
tmp_path = os.path.join(FLAGS.test_tmpdir, 'test.pbtxt')
with tf.gfile.GFile(tmp_path, 'w') as f:
f.write(text_format.MessageToString(dataset_info))
return tmp_path
| {
"content_hash": "9c54c72a3922772143105194b024fbf8",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 79,
"avg_line_length": 33.46153846153846,
"alnum_prop": 0.7050903119868637,
"repo_name": "tombstone/models",
"id": "f02798fb533c99c8545e3a51f56de31b629ca2f4",
"size": "3734",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "research/seq2species/test_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "1365199"
},
{
"name": "GLSL",
"bytes": "976"
},
{
"name": "HTML",
"bytes": "147010"
},
{
"name": "JavaScript",
"bytes": "33208"
},
{
"name": "Jupyter Notebook",
"bytes": "1858048"
},
{
"name": "Makefile",
"bytes": "4763"
},
{
"name": "Python",
"bytes": "7241242"
},
{
"name": "Shell",
"bytes": "102270"
},
{
"name": "TypeScript",
"bytes": "6515"
}
],
"symlink_target": ""
} |
port = 6713
import socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if 0 == sock.connect_ex(('localhost', port)):
sock.close()
raise Exception("port %d is in use, aborting" % (port,))
# Run bitmessage
from subprocess import Popen, DEVNULL
from os import path
curpath = path.dirname(path.abspath(__file__))
bitmessageprocess = Popen(('python', curpath + '/PyBitmessage/src/bitmessagemain.py'), stdout=DEVNULL)
# Connect to its XML-RPC server
import xmlrpc.client
bitmessage = xmlrpc.client.ServerProxy("http://admin:[email protected]:%d" % (port,))
# Implement required functionality
# UTF-8 to base64 and back
from base64 import b64encode, b64decode
def b64enc(s): return b64encode(bytes(s, 'utf-8')).decode('utf-8')
def b64dec(b): return b64decode(b).decode('utf-8')
# Send message to chan
from json import dumps as jsdumps
# FIXME This, obviously, should not be hardcoded.
toaddress = '2cUEpXgRYZAmVqqWd8tzAFLr8UXtCbZy4g'
fromaddress = '2cW9hFSVrVs2AcqwKgkcx6QtL6fgXNa4AP'
def send(subject, body):
if not type(body) is str: body = jsdumps(body)
subject, body = map(b64enc, (subject, body))
return bitmessage.sendMessage(toaddress, fromaddress, subject, body)
# Get a list of all new messages
from json import loads as jsloads
def receive(trash=True):
messages = []
inbox = jsloads(bitmessage.getAllInboxMessages())['inboxMessages']
for msgid in (m['msgid'] for m in inbox):
message = jsloads(bitmessage.getInboxMessageByID(msgid))['inboxMessage'][0]
if 'BM-' + toaddress == message['toAddress']:
fromaddress = message['fromAddress']
subject, body = map(b64dec, (message['subject'], message['message']))
try: body = jsloads(body)
except ValueError: pass
messages.append({'subject': subject, 'body': body, 'fromaddress': fromaddress})
if trash:
bitmessage.trashMessage(msgid)
if len(messages) > 0: print('transfered incoming messages: ', messages)
return messages
def close():
bitmessageprocess.terminate()
| {
"content_hash": "7d8100aa689c9826eda53ed549ceb248",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 102,
"avg_line_length": 38.24074074074074,
"alnum_prop": 0.699273607748184,
"repo_name": "owlen/iridis",
"id": "27f90d84d9bbbd2651a3218c75826b690f80ad1c",
"size": "2244",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "message/__init__.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1883"
},
{
"name": "JavaScript",
"bytes": "16562"
},
{
"name": "Python",
"bytes": "8283"
}
],
"symlink_target": ""
} |
import numpy as np
import numpy_weld_impl
from lazy_op import LazyOpResult
from weld.weldobject import *
class NumpyArrayWeld(LazyOpResult):
"""Summary
Attributes:
dim (TYPE): Description
expr (TYPE): Description
weld_type (TYPE): Description
"""
def __init__(self, expr, weld_type, dim=1):
"""Summary
Args:
expr (TYPE): Description
weld_type (TYPE): Description
dim (int, optional): Description
"""
self.expr = expr
self.weld_type = weld_type
self.dim = dim
def __div__(self, other):
"""Summary
Args:
other (TYPE): Description
Returns:
TYPE: Description
"""
if isinstance(other, LazyOpResult):
other = other.expr
return NumpyArrayWeld(
numpy_weld_impl.div(
self.expr,
other,
self.weld_type
),
self.weld_type
)
def sum(self):
"""Summary
Returns:
TYPE: Description
"""
return NumpyArrayWeld(
numpy_weld_impl.aggr(
self.expr,
"+",
0,
self.weld_type
),
self.weld_type,
0
)
def dot(matrix, vector):
"""
Computes the dot product between a matrix and a vector.
TODO: Make this more generic
Args:
matrix (TYPE): Description
vector (TYPE): Description
"""
matrix_weld_type = None
vector_weld_type = None
if isinstance(matrix, LazyOpResult):
matrix_weld_type = matrix.weld_type
matrix = matrix.expr
elif isinstance(matrix, np.ndarray):
matrix_weld_type = numpy_weld_impl.numpy_to_weld_type_mapping[
str(matrix.dtype)]
if isinstance(vector, LazyOpResult):
vector_weld_type = vector.weld_type
vector = vector.expr
elif isinstance(vector, np.ndarray):
vector_weld_type = numpy_weld_impl.numpy_to_weld_type_mapping[
str(vector.dtype)]
return NumpyArrayWeld(
numpy_weld_impl.dot(
matrix,
vector,
matrix_weld_type,
vector_weld_type),
WeldDouble())
def exp(vector):
"""
Computes a per-element exponent of the passed-in vector.
Args:
vector (TYPE): Description
"""
weld_type = None
if isinstance(vector, LazyOpResult):
weld_type = vector.weld_type
vector = vector.expr
elif isinstance(vector, np.ndarray):
weld_type = numpy_weld_impl.numpy_to_weld_type_mapping[
str(vector.dtype)]
return NumpyArrayWeld(numpy_weld_impl.exp(vector, weld_type), WeldDouble())
| {
"content_hash": "2f116b987e43884ccee6684ec4ccf8af",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 79,
"avg_line_length": 24.20689655172414,
"alnum_prop": 0.5448717948717948,
"repo_name": "sppalkia/weld",
"id": "faf0c9cb105bbb4999e3ee080f79d47c14176415",
"size": "2808",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "python/grizzly/grizzly/numpy_weld.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "799"
},
{
"name": "C",
"bytes": "660"
},
{
"name": "C++",
"bytes": "27987"
},
{
"name": "Makefile",
"bytes": "2660"
},
{
"name": "Python",
"bytes": "301176"
},
{
"name": "Rust",
"bytes": "1127035"
},
{
"name": "Shell",
"bytes": "2090"
}
],
"symlink_target": ""
} |
"""
Import utilities
Exported classes:
ImportManager Manage the import process
Importer Base class for replacing standard import functions
BuiltinImporter Emulate the import mechanism for builtin and frozen modules
DynLoadSuffixImporter
"""
from warnings import warnpy3k
warnpy3k("the imputil module has been removed in Python 3.0", stacklevel=2)
del warnpy3k
# note: avoid importing non-builtin modules
import imp ### not available in Jython?
import sys
import __builtin__
# for the DirectoryImporter
import struct
import marshal
__all__ = ["ImportManager","Importer","BuiltinImporter"]
_StringType = type('')
_ModuleType = type(sys) ### doesn't work in Jython...
class ImportManager:
"Manage the import process."
def install(self, namespace=vars(__builtin__)):
"Install this ImportManager into the specified namespace."
if isinstance(namespace, _ModuleType):
namespace = vars(namespace)
# Note: we have no notion of "chaining"
# Record the previous import hook, then install our own.
self.previous_importer = namespace['__import__']
self.namespace = namespace
namespace['__import__'] = self._import_hook
### fix this
#namespace['reload'] = self._reload_hook
def uninstall(self):
"Restore the previous import mechanism."
self.namespace['__import__'] = self.previous_importer
def add_suffix(self, suffix, importFunc):
assert callable(importFunc)
self.fs_imp.add_suffix(suffix, importFunc)
######################################################################
#
# PRIVATE METHODS
#
clsFilesystemImporter = None
def __init__(self, fs_imp=None):
# we're definitely going to be importing something in the future,
# so let's just load the OS-related facilities.
if not _os_stat:
_os_bootstrap()
# This is the Importer that we use for grabbing stuff from the
# filesystem. It defines one more method (import_from_dir) for our use.
if fs_imp is None:
cls = self.clsFilesystemImporter or _FilesystemImporter
fs_imp = cls()
self.fs_imp = fs_imp
# Initialize the set of suffixes that we recognize and import.
# The default will import dynamic-load modules first, followed by
# .py files (or a .py file's cached bytecode)
for desc in imp.get_suffixes():
if desc[2] == imp.C_EXTENSION:
self.add_suffix(desc[0],
DynLoadSuffixImporter(desc).import_file)
self.add_suffix('.py', py_suffix_importer)
def _import_hook(self, fqname, globals=None, locals=None, fromlist=None):
"""Python calls this hook to locate and import a module."""
parts = fqname.split('.')
# determine the context of this import
parent = self._determine_import_context(globals)
# if there is a parent, then its importer should manage this import
if parent:
module = parent.__importer__._do_import(parent, parts, fromlist)
if module:
return module
# has the top module already been imported?
try:
top_module = sys.modules[parts[0]]
except KeyError:
# look for the topmost module
top_module = self._import_top_module(parts[0])
if not top_module:
# the topmost module wasn't found at all.
raise ImportError, 'No module named ' + fqname
# fast-path simple imports
if len(parts) == 1:
if not fromlist:
return top_module
if not top_module.__dict__.get('__ispkg__'):
# __ispkg__ isn't defined (the module was not imported by us),
# or it is zero.
#
# In the former case, there is no way that we could import
# sub-modules that occur in the fromlist (but we can't raise an
# error because it may just be names) because we don't know how
# to deal with packages that were imported by other systems.
#
# In the latter case (__ispkg__ == 0), there can't be any sub-
# modules present, so we can just return.
#
# In both cases, since len(parts) == 1, the top_module is also
# the "bottom" which is the defined return when a fromlist
# exists.
return top_module
importer = top_module.__dict__.get('__importer__')
if importer:
return importer._finish_import(top_module, parts[1:], fromlist)
# Grrr, some people "import os.path" or do "from os.path import ..."
if len(parts) == 2 and hasattr(top_module, parts[1]):
if fromlist:
return getattr(top_module, parts[1])
else:
return top_module
# If the importer does not exist, then we have to bail. A missing
# importer means that something else imported the module, and we have
# no knowledge of how to get sub-modules out of the thing.
raise ImportError, 'No module named ' + fqname
def _determine_import_context(self, globals):
"""Returns the context in which a module should be imported.
The context could be a loaded (package) module and the imported module
will be looked for within that package. The context could also be None,
meaning there is no context -- the module should be looked for as a
"top-level" module.
"""
if not globals or not globals.get('__importer__'):
# globals does not refer to one of our modules or packages. That
# implies there is no relative import context (as far as we are
# concerned), and it should just pick it off the standard path.
return None
# The globals refer to a module or package of ours. It will define
# the context of the new import. Get the module/package fqname.
parent_fqname = globals['__name__']
# if a package is performing the import, then return itself (imports
# refer to pkg contents)
if globals['__ispkg__']:
parent = sys.modules[parent_fqname]
assert globals is parent.__dict__
return parent
i = parent_fqname.rfind('.')
# a module outside of a package has no particular import context
if i == -1:
return None
# if a module in a package is performing the import, then return the
# package (imports refer to siblings)
parent_fqname = parent_fqname[:i]
parent = sys.modules[parent_fqname]
assert parent.__name__ == parent_fqname
return parent
def _import_top_module(self, name):
# scan sys.path looking for a location in the filesystem that contains
# the module, or an Importer object that can import the module.
for item in sys.path:
if isinstance(item, _StringType):
module = self.fs_imp.import_from_dir(item, name)
else:
module = item.import_top(name)
if module:
return module
return None
def _reload_hook(self, module):
"Python calls this hook to reload a module."
# reloading of a module may or may not be possible (depending on the
# importer), but at least we can validate that it's ours to reload
importer = module.__dict__.get('__importer__')
if not importer:
### oops. now what...
pass
# okay. it is using the imputil system, and we must delegate it, but
# we don't know what to do (yet)
### we should blast the module dict and do another get_code(). need to
### flesh this out and add proper docco...
raise SystemError, "reload not yet implemented"
class Importer:
"Base class for replacing standard import functions."
def import_top(self, name):
"Import a top-level module."
return self._import_one(None, name, name)
######################################################################
#
# PRIVATE METHODS
#
def _finish_import(self, top, parts, fromlist):
# if "a.b.c" was provided, then load the ".b.c" portion down from
# below the top-level module.
bottom = self._load_tail(top, parts)
# if the form is "import a.b.c", then return "a"
if not fromlist:
# no fromlist: return the top of the import tree
return top
# the top module was imported by self.
#
# this means that the bottom module was also imported by self (just
# now, or in the past and we fetched it from sys.modules).
#
# since we imported/handled the bottom module, this means that we can
# also handle its fromlist (and reliably use __ispkg__).
# if the bottom node is a package, then (potentially) import some
# modules.
#
# note: if it is not a package, then "fromlist" refers to names in
# the bottom module rather than modules.
# note: for a mix of names and modules in the fromlist, we will
# import all modules and insert those into the namespace of
# the package module. Python will pick up all fromlist names
# from the bottom (package) module; some will be modules that
# we imported and stored in the namespace, others are expected
# to be present already.
if bottom.__ispkg__:
self._import_fromlist(bottom, fromlist)
# if the form is "from a.b import c, d" then return "b"
return bottom
def _import_one(self, parent, modname, fqname):
"Import a single module."
# has the module already been imported?
try:
return sys.modules[fqname]
except KeyError:
pass
# load the module's code, or fetch the module itself
result = self.get_code(parent, modname, fqname)
if result is None:
return None
module = self._process_result(result, fqname)
# insert the module into its parent
if parent:
setattr(parent, modname, module)
return module
def _process_result(self, result, fqname):
ispkg, code, values = result
# did get_code() return an actual module? (rather than a code object)
is_module = isinstance(code, _ModuleType)
# use the returned module, or create a new one to exec code into
if is_module:
module = code
else:
module = imp.new_module(fqname)
### record packages a bit differently??
module.__importer__ = self
module.__ispkg__ = ispkg
# insert additional values into the module (before executing the code)
module.__dict__.update(values)
# the module is almost ready... make it visible
sys.modules[fqname] = module
# execute the code within the module's namespace
if not is_module:
try:
exec code in module.__dict__
except:
if fqname in sys.modules:
del sys.modules[fqname]
raise
# fetch from sys.modules instead of returning module directly.
# also make module's __name__ agree with fqname, in case
# the "exec code in module.__dict__" played games on us.
module = sys.modules[fqname]
module.__name__ = fqname
return module
def _load_tail(self, m, parts):
"""Import the rest of the modules, down from the top-level module.
Returns the last module in the dotted list of modules.
"""
for part in parts:
fqname = "%s.%s" % (m.__name__, part)
m = self._import_one(m, part, fqname)
if not m:
raise ImportError, "No module named " + fqname
return m
def _import_fromlist(self, package, fromlist):
'Import any sub-modules in the "from" list.'
# if '*' is present in the fromlist, then look for the '__all__'
# variable to find additional items (modules) to import.
if '*' in fromlist:
fromlist = list(fromlist) + \
list(package.__dict__.get('__all__', []))
for sub in fromlist:
# if the name is already present, then don't try to import it (it
# might not be a module!).
if sub != '*' and not hasattr(package, sub):
subname = "%s.%s" % (package.__name__, sub)
submod = self._import_one(package, sub, subname)
if not submod:
raise ImportError, "cannot import name " + subname
def _do_import(self, parent, parts, fromlist):
"""Attempt to import the module relative to parent.
This method is used when the import context specifies that <self>
imported the parent module.
"""
top_name = parts[0]
top_fqname = parent.__name__ + '.' + top_name
top_module = self._import_one(parent, top_name, top_fqname)
if not top_module:
# this importer and parent could not find the module (relatively)
return None
return self._finish_import(top_module, parts[1:], fromlist)
######################################################################
#
# METHODS TO OVERRIDE
#
def get_code(self, parent, modname, fqname):
"""Find and retrieve the code for the given module.
parent specifies a parent module to define a context for importing. It
may be None, indicating no particular context for the search.
modname specifies a single module (not dotted) within the parent.
fqname specifies the fully-qualified module name. This is a
(potentially) dotted name from the "root" of the module namespace
down to the modname.
If there is no parent, then modname==fqname.
This method should return None, or a 3-tuple.
* If the module was not found, then None should be returned.
* The first item of the 2- or 3-tuple should be the integer 0 or 1,
specifying whether the module that was found is a package or not.
* The second item is the code object for the module (it will be
executed within the new module's namespace). This item can also
be a fully-loaded module object (e.g. loaded from a shared lib).
* The third item is a dictionary of name/value pairs that will be
inserted into new module before the code object is executed. This
is provided in case the module's code expects certain values (such
as where the module was found). When the second item is a module
object, then these names/values will be inserted *after* the module
has been loaded/initialized.
"""
raise RuntimeError, "get_code not implemented"
######################################################################
#
# Some handy stuff for the Importers
#
# byte-compiled file suffix character
_suffix_char = __debug__ and 'c' or 'o'
# byte-compiled file suffix
_suffix = '.py' + _suffix_char
def _compile(pathname, timestamp):
"""Compile (and cache) a Python source file.
The file specified by <pathname> is compiled to a code object and
returned.
Presuming the appropriate privileges exist, the bytecodes will be
saved back to the filesystem for future imports. The source file's
modification timestamp must be provided as a Long value.
"""
codestring = open(pathname, 'rU').read()
if codestring and codestring[-1] != '\n':
codestring = codestring + '\n'
code = __builtin__.compile(codestring, pathname, 'exec')
# try to cache the compiled code
try:
f = open(pathname + _suffix_char, 'wb')
except IOError:
pass
else:
f.write('\0\0\0\0')
f.write(struct.pack('<I', timestamp))
marshal.dump(code, f)
f.flush()
f.seek(0, 0)
f.write(imp.get_magic())
f.close()
return code
_os_stat = _os_path_join = None
def _os_bootstrap():
"Set up 'os' module replacement functions for use during import bootstrap."
names = sys.builtin_module_names
join = None
if 'posix' in names:
sep = '/'
from posix import stat
elif 'nt' in names:
sep = '\\'
from nt import stat
elif 'dos' in names:
sep = '\\'
from dos import stat
elif 'os2' in names:
sep = '\\'
from os2 import stat
elif 'mac' in names:
from mac import stat
def join(a, b):
if a == '':
return b
if ':' not in a:
a = ':' + a
if a[-1:] != ':':
a = a + ':'
return a + b
else:
raise ImportError, 'no os specific module found'
if join is None:
def join(a, b, sep=sep):
if a == '':
return b
lastchar = a[-1:]
if lastchar == '/' or lastchar == sep:
return a + b
return a + sep + b
global _os_stat
_os_stat = stat
global _os_path_join
_os_path_join = join
def _os_path_isdir(pathname):
"Local replacement for os.path.isdir()."
try:
s = _os_stat(pathname)
except OSError:
return None
return (s.st_mode & 0170000) == 0040000
def _timestamp(pathname):
"Return the file modification time as a Long."
try:
s = _os_stat(pathname)
except OSError:
return None
return long(s.st_mtime)
######################################################################
#
# Emulate the import mechanism for builtin and frozen modules
#
class BuiltinImporter(Importer):
def get_code(self, parent, modname, fqname):
if parent:
# these modules definitely do not occur within a package context
return None
# look for the module
if imp.is_builtin(modname):
type = imp.C_BUILTIN
elif imp.is_frozen(modname):
type = imp.PY_FROZEN
else:
# not found
return None
# got it. now load and return it.
module = imp.load_module(modname, None, modname, ('', '', type))
return 0, module, { }
######################################################################
#
# Internal importer used for importing from the filesystem
#
class _FilesystemImporter(Importer):
def __init__(self):
self.suffixes = [ ]
def add_suffix(self, suffix, importFunc):
assert callable(importFunc)
self.suffixes.append((suffix, importFunc))
def import_from_dir(self, dir, fqname):
result = self._import_pathname(_os_path_join(dir, fqname), fqname)
if result:
return self._process_result(result, fqname)
return None
def get_code(self, parent, modname, fqname):
# This importer is never used with an empty parent. Its existence is
# private to the ImportManager. The ImportManager uses the
# import_from_dir() method to import top-level modules/packages.
# This method is only used when we look for a module within a package.
assert parent
for submodule_path in parent.__path__:
code = self._import_pathname(_os_path_join(submodule_path, modname), fqname)
if code is not None:
return code
return self._import_pathname(_os_path_join(parent.__pkgdir__, modname),
fqname)
def _import_pathname(self, pathname, fqname):
if _os_path_isdir(pathname):
result = self._import_pathname(_os_path_join(pathname, '__init__'),
fqname)
if result:
values = result[2]
values['__pkgdir__'] = pathname
values['__path__'] = [ pathname ]
return 1, result[1], values
return None
for suffix, importFunc in self.suffixes:
filename = pathname + suffix
try:
finfo = _os_stat(filename)
except OSError:
pass
else:
return importFunc(filename, finfo, fqname)
return None
######################################################################
#
# SUFFIX-BASED IMPORTERS
#
def py_suffix_importer(filename, finfo, fqname):
file = filename[:-3] + _suffix
t_py = long(finfo[8])
t_pyc = _timestamp(file)
code = None
if t_pyc is not None and t_pyc >= t_py:
f = open(file, 'rb')
if f.read(4) == imp.get_magic():
t = struct.unpack('<I', f.read(4))[0]
if t == t_py:
code = marshal.load(f)
f.close()
if code is None:
file = filename
code = _compile(file, t_py)
return 0, code, { '__file__' : file }
class DynLoadSuffixImporter:
def __init__(self, desc):
self.desc = desc
def import_file(self, filename, finfo, fqname):
fp = open(filename, self.desc[1])
module = imp.load_module(fqname, fp, filename, self.desc)
module.__file__ = filename
return 0, module, { }
######################################################################
def _print_importers():
items = sys.modules.items()
items.sort()
for name, module in items:
if module:
print name, module.__dict__.get('__importer__', '-- no importer')
else:
print name, '-- non-existent module'
def _test_revamp():
ImportManager().install()
sys.path.insert(0, BuiltinImporter())
######################################################################
#
# TODO
#
# from Finn Bock:
# type(sys) is not a module in Jython. what to use instead?
# imp.C_EXTENSION is not in Jython. same for get_suffixes and new_module
#
# given foo.py of:
# import sys
# sys.modules['foo'] = sys
#
# ---- standard import mechanism
# >>> import foo
# >>> foo
# <module 'sys' (built-in)>
#
# ---- revamped import mechanism
# >>> import imputil
# >>> imputil._test_revamp()
# >>> import foo
# >>> foo
# <module 'foo' from 'foo.py'>
#
#
# from MAL:
# should BuiltinImporter exist in sys.path or hard-wired in ImportManager?
# need __path__ processing
# performance
# move chaining to a subclass [gjs: it's been nuked]
# deinstall should be possible
# query mechanism needed: is a specific Importer installed?
# py/pyc/pyo piping hooks to filter/process these files
# wish list:
# distutils importer hooked to list of standard Internet repositories
# module->file location mapper to speed FS-based imports
# relative imports
# keep chaining so that it can play nice with other import hooks
#
# from Gordon:
# push MAL's mapper into sys.path[0] as a cache (hard-coded for apps)
#
# from Guido:
# need to change sys.* references for rexec environs
# need hook for MAL's walk-me-up import strategy, or Tim's absolute strategy
# watch out for sys.modules[...] is None
# flag to force absolute imports? (speeds _determine_import_context and
# checking for a relative module)
# insert names of archives into sys.path (see quote below)
# note: reload does NOT blast module dict
# shift import mechanisms and policies around; provide for hooks, overrides
# (see quote below)
# add get_source stuff
# get_topcode and get_subcode
# CRLF handling in _compile
# race condition in _compile
# refactoring of os.py to deal with _os_bootstrap problem
# any special handling to do for importing a module with a SyntaxError?
# (e.g. clean up the traceback)
# implement "domain" for path-type functionality using pkg namespace
# (rather than FS-names like __path__)
# don't use the word "private"... maybe "internal"
#
#
# Guido's comments on sys.path caching:
#
# We could cache this in a dictionary: the ImportManager can have a
# cache dict mapping pathnames to importer objects, and a separate
# method for coming up with an importer given a pathname that's not yet
# in the cache. The method should do a stat and/or look at the
# extension to decide which importer class to use; you can register new
# importer classes by registering a suffix or a Boolean function, plus a
# class. If you register a new importer class, the cache is zapped.
# The cache is independent from sys.path (but maintained per
# ImportManager instance) so that rearrangements of sys.path do the
# right thing. If a path is dropped from sys.path the corresponding
# cache entry is simply no longer used.
#
# My/Guido's comments on factoring ImportManager and Importer:
#
# > However, we still have a tension occurring here:
# >
# > 1) implementing policy in ImportManager assists in single-point policy
# > changes for app/rexec situations
# > 2) implementing policy in Importer assists in package-private policy
# > changes for normal, operating conditions
# >
# > I'll see if I can sort out a way to do this. Maybe the Importer class will
# > implement the methods (which can be overridden to change policy) by
# > delegating to ImportManager.
#
# Maybe also think about what kind of policies an Importer would be
# likely to want to change. I have a feeling that a lot of the code
# there is actually not so much policy but a *necessity* to get things
# working given the calling conventions for the __import__ hook: whether
# to return the head or tail of a dotted name, or when to do the "finish
# fromlist" stuff.
#
| {
"content_hash": "281b6a590da8ad71b2656ceb9ddae6a3",
"timestamp": "",
"source": "github",
"line_count": 735,
"max_line_length": 88,
"avg_line_length": 35.38639455782313,
"alnum_prop": 0.5883732554115882,
"repo_name": "DecipherOne/Troglodyte",
"id": "e1d0a8222aee550dbe41393b6e9d364c841c78c8",
"size": "26009",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Trog Build Dependencies/Python26/Lib/imputil.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "586396"
},
{
"name": "C++",
"bytes": "697696"
},
{
"name": "CSS",
"bytes": "837"
},
{
"name": "Python",
"bytes": "14516232"
},
{
"name": "Shell",
"bytes": "127"
},
{
"name": "Visual Basic",
"bytes": "481"
}
],
"symlink_target": ""
} |
"""
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from .AuditModel import AuditModel
from django.db import models
class WellActivityCode(AuditModel):
"""
Types of Well Activity.
"""
well_activity_type_code = models.CharField(primary_key=True, max_length=10, editable=False)
description = models.CharField(max_length=100)
display_order = models.PositiveIntegerField()
effective_date = models.DateTimeField(blank=True, null=True)
expiry_date = models.DateTimeField(blank=True, null=True)
class Meta:
db_table = 'well_activity_code'
ordering = ['display_order', 'description']
def __str__(self):
return self.description
| {
"content_hash": "59182e16938d9e7170573a1f2cbd45d1",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 96,
"avg_line_length": 36.57575757575758,
"alnum_prop": 0.7108533554266777,
"repo_name": "rstens/gwells",
"id": "1d7ccd8ee379b883f66278b5558af563be8f4294",
"size": "1207",
"binary": false,
"copies": "1",
"ref": "refs/heads/developer",
"path": "gwells/models/WellActivityCode.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1707"
},
{
"name": "CSS",
"bytes": "71007"
},
{
"name": "Groovy",
"bytes": "91669"
},
{
"name": "HTML",
"bytes": "136711"
},
{
"name": "JavaScript",
"bytes": "193917"
},
{
"name": "PLpgSQL",
"bytes": "49465"
},
{
"name": "Python",
"bytes": "481602"
},
{
"name": "Shell",
"bytes": "52420"
},
{
"name": "TSQL",
"bytes": "3727"
},
{
"name": "Vue",
"bytes": "65417"
}
],
"symlink_target": ""
} |
import pymongo
from pymongo import MongoClient
from collections import defaultdict
import datetime as dt
import pygal
from pygal.style import BlueStyle
from datetime import timedelta
# Used for storage collection
def first_date(collection): # pragma: no cover
"""Returns the first date in the collection"""
for doc in collection.find({}, {'time': 1, '_id': 0}
).sort('time', 1).limit(1):
begin_date = doc['time']
return begin_date
def last_date(collection): # pragma: no cover
"""Returns the first date in the collection"""
for doc in collection.find({}, {'time': 1, '_id': 0}
).sort('time', -1).limit(1):
last_date = doc['time']
return last_date
def min_farmers_date(collection, date): # pragma: no cover
"""Returns the minimum number of farmers online during
the specified date."""
next_date = date + timedelta(days=1)
min_farmers = 0
for doc in collection.find({'time': {'$gte': date, '$lt': next_date}},
{'total_farmers': 1}
).sort('total_farmers', 1).limit(1):
min_farmers = doc['total_farmers']
return min_farmers
def max_farmers_date(collection, date): #pragma: no cover
"""Returns the minimum number of farmers online during
the specified date."""
next_date = date + timedelta(days=1)
max_farmers = 0
for doc in collection.find({'time': {'$gte': date, '$lt': next_date}},
{'total_farmers': 1}
).sort('total_farmers', -1).limit(1):
max_farmers = doc['total_farmers']
return max_farmers
def min_max_farmers(collection):
"""
Returns a dictionary of the max and min number of
farmers (values) on each day (keys)
:param collection: MongoDB "storage" collection
:return: dictionary with days (keys) and max/min number
of farmers (values)
"""
minmax_dict = defaultdict(list)
begin_date = first_date(collection)
end_date = last_date(collection)
day_count = (end_date - begin_date).days + 1
for single_date in (begin_date + timedelta(days=n)
for n in range(day_count)):
min_farmers = min_farmers_date(collection, single_date)
max_farmers = max_farmers_date(collection, single_date)
minmax_dict[single_date].append(min_farmers)
minmax_dict[single_date].append(max_farmers)
return minmax_dict
def minmax_chart(collection):
"""
Returns a line graph showing the maximum and minimum
number of farmers over time on an hourly basis.
:param collection: MongoDB "storage" collection
:return: Pygal line chart showing max/min number of
farmers over time
"""
minmax_title = 'Min/Max Number of Farmers Over Time'
minmax_chart = pygal.Line(width=1000, height=600, explicit_size=True,
title=minmax_title, x_label_rotation=45,
style=BlueStyle, disable_xml_declaration=True)
minmax_dict = min_max_farmers(collection)
dates = []
min_counts = []
max_counts = []
for key in minmax_dict.keys():
dates.append(key)
dates.sort()
for date in dates:
min_counts.append(minmax_dict[date][0])
max_counts.append(minmax_dict[date][1])
minmax_chart.x_labels = map(lambda d: d.strftime('%Y-%m-%d'), dates)
minmax_chart.add('min online farmers', min_counts)
minmax_chart.add('max online farmers', max_counts)
return minmax_chart
| {
"content_hash": "8718f47a0facd30402ff96ee94601bf0",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 75,
"avg_line_length": 36.25252525252525,
"alnum_prop": 0.6132627472833658,
"repo_name": "Storj/driveshare-graph",
"id": "d35e4fa6332d6267bcaf5ddc42597edf9e7a7bf6",
"size": "3589",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "driveshare_graph/minmax.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1270"
},
{
"name": "Python",
"bytes": "44486"
},
{
"name": "Shell",
"bytes": "215"
}
],
"symlink_target": ""
} |
"""Module that defines the Atom Models that back the Search Views"""
import six
from collections import deque
from atom.api import Atom, Typed, List, Range, Dict, observe, Str, Bool, Int
from dataportal.broker import DataBroker
from metadatastore.api import Document
import metadatastore
from mongoengine.connection import ConnectionError
from pymongo.errors import AutoReconnect
from functools import wraps
import replay
from ..persist import History
import logging
logger = logging.getLogger(__name__)
class WatchForHeadersModel(Atom):
""" Class that defines the model for a UI component that watches the
databroker for new scans
Attributes
----------
auto_update : atom.Bool
update_rate : atom.Int
The rate at which the current header will be checked on the data broker
header_id : atom.Str
"""
auto_update = Bool(False)
update_rate = Int(1000)
header = Typed(Document)
search_info = Str("No search performed")
history = Typed(History)
def __init__(self, history, **kwargs):
super(WatchForHeadersModel, self).__init__()
self.history = history
try:
state = history.get('WatchForHeadersModel')
except IndexError:
# no entries for 'WatchForHeadersModel' yet
state = {}
else:
state.pop('history', None)
if state:
self.__setstate__(state)
@observe('update_rate')
def save_state(self, changed):
logger.debug('history in WatchForHeadersModel.save_state: '
'{}'.format(self.history))
replay.core.save_state(self.history, 'WatchForHeadersModel',
self.__getstate__())
def check_header(self):
try:
header = DataBroker[-1]
except IndexError:
self.search_info = "No runs found."
header = None
return
else:
self.search_info = "Run Found."
if (not self.header or self.header.run_start_uid != header.run_start_uid):
self.header = header
class DisplayHeaderModel(Atom):
"""Class that defines the model for displaying header information
Attributes
----------
selected : metadatastore.api.Document
"""
header = Typed(Document)
header_as_dict = Dict()
header_keys = List()
def new_run_header(self, changed):
"""Observer function for a new run header"""
self.header = changed['value']
@observe('header')
def header_changed(self, changed):
self.header_as_dict = {}
self.header_keys = []
if self.header is None:
return
key_labels = [['KEY NAME', 'DATA LOCATION', 'PV NAME']]
header_dict = dict(self.header.items())
event_descriptors = header_dict.pop('event_descriptors', [])
data_keys = self._format_for_enaml(event_descriptors)
sample = header_dict.pop('sample', {})
beamline_config = header_dict.pop('beamline_config', {})
# # unpack the 'ids' fields
# ids = header_dict.pop('ids', {})
# for id_name, id_val in ids.items():
# header_dict[id_name] = id_val
data_keys = sorted(data_keys, key=lambda x: x[0].lower())
header_keys = key_labels + data_keys
print('header_dict = {}'.format(header_dict))
# set the summary dictionary
self.header_as_dict = header_dict
# set the keys dictionary
self.header_keys = header_keys
def _format_for_enaml(self, event_descriptors):
"""
Format the data keys into a single list that enaml will unpack into a
grid of N rows by 3 columns
"""
data_keys = []
for evd in event_descriptors:
dk = evd.data_keys
for data_key, data_key_dict in six.iteritems(dk):
print('data_key = {}\ndata_key_dict = {}'.format(
data_key, data_key_dict))
while data_key in data_keys:
data_key += '_1'
name = data_key
src = data_key_dict['source']
loc = data_key_dict.get('external', 'metadatastore')
data_keys.append([name, loc, src])
return data_keys
def _catch_connection_issues(func):
@wraps(func)
def inner(self, *args, **kwargs):
try:
func(self, *args, **kwargs)
except ConnectionError:
self.search_info = (
"Database {} not available at {} on port {}").format(
metadatastore.conf.connection_config['database'],
metadatastore.conf.connection_config['host'],
metadatastore.conf.connection_config['port'])
except AutoReconnect:
self.search_info = (
"Connection to database [[{}]] on [[{}]] was lost".format(
metadatastore.conf.connection_config['database'],
metadatastore.conf.connection_config['host']))
return inner
class _BrokerSearch(Atom):
"""ABC for broker searching with Atom classes
Attributes
----------
headers : atom.List
The list of headers returned from the DataBroker
header : metadatastore.api.Document
The currently selected header
connection_is_active : atom.Bool
True: Connection to the DataBroker is active
search_info : atom.Str
Potentially informative string that gets displayed on the UI regarding
the most recently performed DataBroker search
"""
search_info = Str()
headers = List()
header = Typed(Document)
history = Typed(History)
def __init__(self):
with self.suppress_notifications():
self.header = None
class GetLastModel(_BrokerSearch):
"""Class that defines the model for the 'get last N datasets view'
Attributes
----------
num_to_retrieve : range, min=1
"""
num_to_retrieve = Range(low=1)
def __init__(self, history):
super(GetLastModel, self).__init__()
self.header = None
self.history = history
try:
state = history.get('GetLastModel')
except IndexError:
# no entries for 'WatchForHeadersModel' yet
state = {}
else:
state.pop('history', None)
if state:
self.__setstate__(state)
@observe('num_to_retrieve')
@_catch_connection_issues
def num_changed(self, changed):
self.headers = DataBroker[-self.num_to_retrieve:]
self.search_info = "Requested: {}. Found: {}".format(
self.num_to_retrieve, len(self.headers))
logger.debug('history in WatchForHeadersModel.save_state: '
'{}'.format(self.history))
replay.core.save_state(self.history, 'GetLastModel',
{'num_to_retrieve': self.num_to_retrieve})
class ScanIDSearchModel(_BrokerSearch):
"""
Class that defines the model for a UI component that searches the
databroker for a specific scan
Attributes
----------
scan_id : atom.Int
"""
scan_id = Int(1)
def __init__(self, history):
super(ScanIDSearchModel, self).__init__()
self.header = None
self.search_info = "Searching by Scan ID"
self.history = history
try:
state = history.get('ScanIDSearchModel')
except IndexError:
# no entries for 'WatchForHeadersModel' yet
state = {}
else:
state.pop('history', None)
if state:
self.__setstate__(state)
@observe('scan_id')
@_catch_connection_issues
def scan_id_changed(self, changed):
self.headers = DataBroker.find_headers(scan_id=self.scan_id)
self.search_info = "Requested scan id: {}. Found: {}".format(
self.scan_id, len(self.headers))
@observe('scan_id')
def save_state(self, changed):
logger.debug('history in ScanIDSearchModel.save_state: '
'{}'.format(self.history))
replay.core.save_state(self.history, 'ScanIDSearchModel',
{'scan_id': self.scan_id})
| {
"content_hash": "e756331791003341621691161ef81e3b",
"timestamp": "",
"source": "github",
"line_count": 249,
"max_line_length": 82,
"avg_line_length": 32.98795180722892,
"alnum_prop": 0.584855125395666,
"repo_name": "ericdill/replay",
"id": "c5e9b57dc422835a9a80f6479c0bc389c0bd05b5",
"size": "8214",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "replay/search/model.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "103665"
},
{
"name": "Shell",
"bytes": "743"
}
],
"symlink_target": ""
} |
from distutils.core import setup
setup(name="geographiclib",
version="1.40",
description=
"A translation of the GeographicLib::Geodesic class to Python",
author="Charles Karney",
author_email="[email protected]",
url="http://geographiclib.sourceforge.net/",
packages=["geographiclib"],
data_files=[],
license="MIT",
keywords="gis geographical earth distance geodesic",
classifiers=["Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Scientific/Engineering :: GIS",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
| {
"content_hash": "c157ad9664064b94bf5c926099ded917",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 82,
"avg_line_length": 41.65217391304348,
"alnum_prop": 0.5678496868475992,
"repo_name": "Navendis/geographiclib",
"id": "8cdad36695dc126745c411353269e97d5df5734e",
"size": "1307",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "102640"
},
{
"name": "C#",
"bytes": "208230"
},
{
"name": "C++",
"bytes": "2443489"
},
{
"name": "FORTRAN",
"bytes": "114295"
},
{
"name": "Java",
"bytes": "142011"
},
{
"name": "Makefile",
"bytes": "9051"
},
{
"name": "Matlab",
"bytes": "133472"
},
{
"name": "Objective-C",
"bytes": "171"
},
{
"name": "Python",
"bytes": "82390"
},
{
"name": "Shell",
"bytes": "327910"
},
{
"name": "Visual Basic",
"bytes": "36914"
}
],
"symlink_target": ""
} |
'''chromium apport hook
/usr/share/apport/package-hooks/chromium-browser.py
Copyright (c) 2010, Fabien Tassin <[email protected]>
Copyright (c) 2014, Canonical
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation; either version 2 of the License, or (at your
option) any later version. See http://www.gnu.org/copyleft/gpl.html for
the full text of the license.
'''
# pylint: disable=line-too-long, missing-docstring, invalid-name
from __future__ import print_function
import os, sys, getopt, codecs
import time
import apport.hookutils
import subprocess
import json
HOME = os.getenv("HOME")
PACKAGE = 'chromium-browser'
RELATED_PACKAGES = [
PACKAGE,
'chromium-browser-l10n',
'chromium-browser-inspector',
'chromium-browser-dbg',
'chromium-codecs-ffmpeg',
'chromium-codecs-ffmpeg-extra',
'libvpx0',
'libgtk2.0-0',
'nspluginwrapper',
# various plugins
'adobe-flash-player',
'chromiumflashplugin',
'pepperflashplugin-nonfree',
'pepflashplugin-nonfree',
'flashplugin-installer',
'rhythmbox-plugins',
'totem-mozilla',
'icedtea6-plugin',
'moonlight-plugin-chromium',
'sun-java6-bin',
'acroread',
'google-talkplugin',
# chrome
'google-chrome-unstable',
'google-chrome-beta',
'google-chrome-stable',
]
def installed_version(report, pkgs):
report['RelatedPackagesPolicy'] = ''
for pkg in pkgs:
script = subprocess.Popen(['apt-cache', 'policy', pkg], stdout=subprocess.PIPE)
report['RelatedPackagesPolicy'] += str(script.communicate()[0]) + "\n"
def gconf_values(report, keys):
report['gconf-keys'] = ''
for key in keys:
try:
script = subprocess.Popen(['gconftool-2', '--get', key], stdout=subprocess.PIPE)
except OSError:
report['gconf-keys'] = "gconftool-2 not found"
return
out = str(script.communicate()[0])
if out == "":
out = "**unset**\n"
report['gconf-keys'] += key + " = " + out
def loadavg_processes_running_percent():
with open("/proc/loadavg") as loadavg:
for line in loadavg:
l1, l5, l10, runfrac, maxpid = line.split()
running_count, total_count = map(int, runfrac.split("/"))
percent = 100.0 * running_count / total_count
return "#" * int(percent) + " %0.1f%%" % (percent,)
def get_user_profile_dir():
profiledir = HOME + "/.config/chromium/Default"
# see if report['ProcCmdline'] contains a --user-data-dir=xxx and use it if it still exists
return profiledir
def user_prefs(report, filename):
with open(filename, 'r') as f:
entry = json.load(f)
report['ChromiumPrefs'] = ''
if 'browser' in entry and 'check_default_browser' in entry['browser']:
report['ChromiumPrefs'] += "browser/check_default_browser = " + \
str(entry['browser']['check_default_browser']) + "\n"
else:
report['ChromiumPrefs'] += "browser/check_default_browser = **unset** (no such key yet)\n"
if 'theme' in entry['extensions']:
if 'use_system' in entry['extensions']['theme']:
report['ChromiumPrefs'] += "extensions/theme/use_system = " + \
str(entry['extensions']['theme']['use_system']) + "\n"
else:
report['ChromiumPrefs'] += "extensions/theme/use_system = **unset** (no such key)\n"
# list entensions+versions
report['ChromiumPrefs'] += "extensions/settings =\n"
if 'settings' in entry['extensions']:
for ext in list(entry['extensions']['settings'].keys()):
report['ChromiumPrefs'] += " - '" + ext + "'\n"
if 'manifest' in entry['extensions']['settings'][ext]:
for k in ['name', 'description', 'version', 'update_url']:
report['ChromiumPrefs'] += " manifest/%s = %s\n" % (k, "'" + entry['extensions']['settings'][ext]['manifest'][k] + "'" if k in entry['extensions']['settings'][ext]['manifest'] else "*undef*")
else:
report['ChromiumPrefs'] += " manifest/* = *undef*\n"
for k in ['blacklist', 'state']:
if k in entry['extensions']['settings'][ext]:
report['ChromiumPrefs'] += " %s = %s\n" % (k, repr(entry['extensions']['settings'][ext][k]))
else:
report['ChromiumPrefs'] += " (no entry found in the Preferences file)"
# list plugins
# (for some reason, this key is not populated until something is (manually?)
# changed in about:plugins)
report['DetectedPlugins'] = ""
if 'plugins' in entry and 'plugins_list' in entry['plugins']:
for plugin in entry['plugins']['plugins_list']:
report['DetectedPlugins'] += "=> " + plugin['name'] + "\n" + \
" - enabled = " + str(plugin['enabled']) + "\n"
if 'path' in plugin:
report['DetectedPlugins'] += " - path = " + plugin['path'] + "\n"
else:
report['DetectedPlugins'] += "(no entry found in the Preferences file)"
def list_installed_plugins(report):
# $SRC/webkit/glue/plugins/plugin_list_posix.cc
# 1/ MOZ_PLUGIN_PATH env variable
# 2/ ~/.mozilla/plugins
# 3/ /usr/lib/browser-plugins /usr/lib/mozilla/plugins /usr/lib/firefox/plugins
# /usr/lib/xulrunner-addons/plugins
# and on x64 systems (unless /usr/lib64 is a symlink to /usr/lib):
# /usr/lib64/browser-plugins /usr/lib64/mozilla/plugins /usr/lib64/firefox/plugins
# /usr/lib64/xulrunner-addons/plugins
# It's not clear if $SRC/third_party/WebKit/WebCore/plugins/PluginDatabase.cpp
# is also used (if it is, we need the zillions dirs from PluginDatabase::defaultPluginDirectories()
report['InstalledPlugins'] = ''
dirs = {}
for plugindir in (HOME + "/.mozilla/plugins", "/usr/lib/browser-plugins", "/usr/lib/mozilla/plugins", "/usr/lib/firefox/plugins", "/usr/lib/xulrunner-addons/plugins", "/usr/lib64/browser-plugins", "/usr/lib64/mozilla/plugins", "/usr/lib64/firefox/plugins", "/usr/lib64/xulrunner-addons/plugins"):
if os.path.exists(plugindir):
d = os.path.realpath(plugindir)
if d not in dirs:
dirs[d] = True
report['InstalledPlugins'] += plugindir + ":\n"
for ent in os.listdir(d):
filename = os.path.join(d, ent)
report['InstalledPlugins'] += " => " + ent + "\n"
while os.path.islink(filename):
filename2 = os.readlink(filename)
if filename2 == filename:
report['InstalledPlugins'] += " - (symlink loop, abort)\n"
break
if not os.path.exists(filename):
report['InstalledPlugins'] += " - broken symlink to " + filename2 + "\n"
break
filename = os.path.normpath(os.path.join(os.path.dirname(filename), filename2))
report['InstalledPlugins'] += " - symlink to " + filename + "\n"
if os.path.exists(filename):
st = os.stat(filename)
report['InstalledPlugins'] += " (size: " + \
str(st.st_size) + " bytes, mtime: " + time.ctime(st.st_mtime) + ")\n"
report['InstalledPlugins'] += "\n"
def get_envs(envs):
return "\n".join(repr(os.getenv(env) or "None") for env in envs) + "\n"
def add_info(report, hookui, userdir=None):
apport.hookutils.attach_related_packages(report, RELATED_PACKAGES)
installed_version(report, RELATED_PACKAGES)
# Allow reports from PPAs
if not apport.packaging.is_distro_package(PACKAGE):
report['ThirdParty'] = 'True'
report['CrashDB'] = 'ubuntu'
customizations_dir = '/etc/chromium-browser/customizations'
for filename in os.listdir(customizations_dir):
apport.hookutils.attach_file_if_exists(report, os.path.join(customizations_dir, filename), key='etcconfigc'+filename.replace("-", "").replace("_", "").replace(".", ""))
apport.hookutils.attach_file_if_exists(report, os.path.join(customizations_dir, 'default'), key='etcconfigdefault')
apport.hookutils.attach_file_if_exists(report, os.path.join(HOME, '.local/share/applications', PACKAGE + 'desktop'))
gconf_values(report, ['/desktop/gnome/applications/browser/exec', '/desktop/gnome/url-handlers/https/command', '/desktop/gnome/url-handlers/https/enabled', '/desktop/gnome/url-handlers/http/command', '/desktop/gnome/url-handlers/http/enabled', '/desktop/gnome/session/required_components/windowmanager', '/apps/metacity/general/compositing_manager', '/desktop/gnome/interface/icon_theme', '/desktop/gnome/interface/gtk_theme'])
if userdir:
user_dir = userdir
else:
user_dir = get_user_profile_dir()
user_prefs(report, user_dir + "/Preferences")
list_installed_plugins(report)
# PCI video
report['Lspci'] = apport.hookutils.command_output(["lspci", "-mmkv"])
report['Load-Avg-1min'] = apport.hookutils.command_output(["cut", "-d ", "-f1", "/proc/loadavg"])
report['Load-Processes-Running-Percent'] = loadavg_processes_running_percent()
# DE
report['Desktop-Session'] = get_envs(['DESKTOP_SESSION', 'XDG_CONFIG_DIRS', 'XDG_DATA_DIRS'])
# Env
report['Env'] = get_envs(['MOZ_PLUGIN_PATH', 'LD_LIBRARY_PATH'])
# Disk usage
report['DiskUsage'] = apport.hookutils.command_output(['df', '-Th', '/home', '/tmp', '/run/shm', '/etc/chromium-browser', user_dir])
apport.hookutils.attach_hardware(report)
try:
apport.hookutils.attach_drm_info(report)
except TypeError as exc:
print("buggy hookutils", exc)
apport.hookutils.attach_dmesg(report)
## DEBUGING ##
def main():
sys.stdout = codecs.getwriter('utf8')(sys.stdout)
try:
opts, _ = getopt.getopt(sys.argv[1:], "-u:", ['user-dir='])
except getopt.GetoptError as err:
print(str(err))
sys.exit(2)
userdir = None
for o, a in opts:
if o in ("-u", "--user-dir"):
userdir = a
else:
assert False, "unhandled option"
report = {}
add_info(report, None, userdir=userdir)
for key in report:
print('[%s]\n%s\n' % (key, report[key]))
if __name__ == '__main__':
main()
| {
"content_hash": "8fd9d279c029b22d00e328be87f2b632",
"timestamp": "",
"source": "github",
"line_count": 245,
"max_line_length": 431,
"avg_line_length": 43.02448979591837,
"alnum_prop": 0.6032634474907504,
"repo_name": "mxOBS/deb-pkg_trusty_chromium-browser",
"id": "91ee1d3110fd73f77e60b4f7530d209d188383fe",
"size": "10541",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "debian/apport/chromium-browser.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "8402"
},
{
"name": "Assembly",
"bytes": "230130"
},
{
"name": "Batchfile",
"bytes": "34966"
},
{
"name": "C",
"bytes": "12435900"
},
{
"name": "C++",
"bytes": "264378706"
},
{
"name": "CMake",
"bytes": "27829"
},
{
"name": "CSS",
"bytes": "795726"
},
{
"name": "Dart",
"bytes": "74976"
},
{
"name": "Emacs Lisp",
"bytes": "2360"
},
{
"name": "Go",
"bytes": "31783"
},
{
"name": "Groff",
"bytes": "5283"
},
{
"name": "HTML",
"bytes": "19491230"
},
{
"name": "Java",
"bytes": "7637875"
},
{
"name": "JavaScript",
"bytes": "12723911"
},
{
"name": "LLVM",
"bytes": "1169"
},
{
"name": "Logos",
"bytes": "6893"
},
{
"name": "Lua",
"bytes": "14392"
},
{
"name": "Makefile",
"bytes": "208315"
},
{
"name": "Objective-C",
"bytes": "1460032"
},
{
"name": "Objective-C++",
"bytes": "7760068"
},
{
"name": "PLpgSQL",
"bytes": "175360"
},
{
"name": "Perl",
"bytes": "63937"
},
{
"name": "Protocol Buffer",
"bytes": "427212"
},
{
"name": "Python",
"bytes": "11447382"
},
{
"name": "Ragel in Ruby Host",
"bytes": "104846"
},
{
"name": "Scheme",
"bytes": "10604"
},
{
"name": "Shell",
"bytes": "1208350"
},
{
"name": "Standard ML",
"bytes": "4965"
},
{
"name": "nesC",
"bytes": "18335"
}
],
"symlink_target": ""
} |
import os
import common
import subprocess
import re
import action
import logging
import time
# Global
sys_logger = None
iostat_file_ext = "_iostat_block_devices.plt"
network_io_file_ext = "_network_devices.plt"
system_metrics_interval = '5'
docker_stat_header = "NAME CONTAINER CPU % MEM %"
# Bash Commands
date_cmd = ['date', '-u', '+%Y-%m-%dT%H:%M:%SZ']
top_cmd = ['top', '-b', '-i', '-d', system_metrics_interval]
top_get_header = ['top', '-b', '-n', '1', '-i']
iostat_cmd = ['iostat', '-dtx', system_metrics_interval]
iostat_get_header = ['iostat', '-dtx']
sar_get_header = {'cpu': ['sar', '-u', '1', '1'],
'task': ['sar', '-w', '1', '1'],
'nfs': ['sar', '-n', 'NFS', '1', '1'],
'mem': ['sar', '-r', '1', '1'],
'network_io': ['sar', '-n', 'DEV', '1', '1']
}
docker_version = ['docker', '-v']
docker_command = "( date -u +'%Y-%m-%dT%H:%M:%SZ' && docker stats -a --format " \
"'table {{.Name}}\t{{.Container}}\t{{.CPUPerc}}\t{{.MemPerc}}\t' --no-stream )"
sar_cmd = ['sar', '-n', 'DEV', '-n', 'NFS', '-u', '-r', '-w', system_metrics_interval]
get_pid = ["ps", "-eo", "pid,cmd,%cpu", "--sort=-%cpu"]
grep2 = ["grep", "-v", "grep"]
awk = ["awk", "FNR == 1 {print $1}"]
def loggersetup(filename):
"""
Logger object setup for capturing system metric gather activity in a given debug file filename
"""
if os.path.isfile(filename):
os.remove(filename)
logger = logging.getLogger("system_metrics")
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler(filename)
fh.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.ERROR)
formatter = logging.Formatter('%(asctime)s %(levelname)-6s {%(filename)s %(lineno)d} %(message)-100s',
'%Y-%m-%d %H:%M:%S')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
logger.addHandler(fh)
logger.addHandler(ch)
logger.propagate = False
return logger
def top_gather(self):
"""
This method implement thread routine for querying TOP output in a fixed interval of time. If any test is in
running state on this agent then this routine append top_output.txt for this test with new stat values
"""
running_queue = {}
# execute top batch command
p1 = subprocess.Popen(top_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
while True:
output = p1.stdout.readline()
if output == '' and p1.poll() is not None:
break
if output:
# Read new output
output = output.rstrip()
# if output line starts with "top" then it need to dump current timestamp value. It also dump list of test
# currently in running state in seperate list. As this is the new output sequence, we want to start writing
# subsequent logs for currently running tests. Hence it won't check running test list until new output
# sequence
if output.startswith('top'):
p2 = subprocess.Popen(date_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
timestamp = p2.communicate()[0].strip()
action.action_lock.acquire()
running_queue = action.running_tests
action.action_lock.release()
for testid, test in running_queue.iteritems():
if test.status == "RUNNING":
top_file = test.statsdir + "top_output.txt"
if os.path.isfile(top_file):
# If file exists then append this new output squence in this file with current TS
with open(top_file, 'a') as fh:
fh.write("\n" + timestamp + "\n")
fh.write(output + "\n")
sys_logger.debug("Generating top output for test : " + str(testid))
else:
# If file doesn't exists then this is new test just started on agent, create
# top_output.txt and dump this new output squence in this file with current TS
with open(top_file, 'w') as fh:
fh.write(timestamp + "\n")
fh.write(output + "\n")
sys_logger.debug("Starting top output for test : " + str(testid))
continue
# Continuing writing output squence in files for running tests dump at the start of new squence
for testid, test in running_queue.iteritems():
if test.status == "RUNNING":
top_file = test.statsdir + "top_output.txt"
if os.path.isfile(top_file):
with open(top_file, 'a') as fh:
fh.write(output + "\n")
def iostat_gather(self):
"""
This method implement thread routine for querying IOSTAT output in a fixed interval of time. If any test is in
running state on this agent then this routine append seperate file for each io device it create for this test
"""
iostat_header = None
device_header = 0
device_list = []
p1 = subprocess.Popen(iostat_get_header, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = p1.communicate()[0].strip()
output = output.split("\n")
# Check header and device list from iostat output
for header in output:
header = header.strip()
if header.startswith("Device"):
header = re.sub(' +', ' ', header)
header = header.replace(' ', ',')
header = header.replace("Device:", "Time")
iostat_header = header
device_header = 1
continue
if device_header:
header = re.sub(' +', ' ', header)
header = header.split(' ')
device_list.append(header[0])
# Start IOSTAT batch command for continued output
p2 = subprocess.Popen(iostat_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
running_queue = {}
timestamp = 0
try:
while True:
output = p2.stdout.readline()
if output == '' and p2.poll() is not None:
break
if output:
output = output.strip()
output = re.sub(' +', ' ', output)
output = output.replace(' ', ',')
# if output line starts with "Device" then it need to dump current timestamp value. It also dump list
# of test currently in running state in seperate list. As this is the new output sequence, we want to
# start writing subsequent logs for currently running tests. Hence it won't check running test list
# until new output sequence
if output.startswith("Device"):
p3 = subprocess.Popen(date_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
timestamp = p3.communicate()[0].strip()
action.action_lock.acquire()
running_queue = action.running_tests
action.action_lock.release()
continue
output = output.split(",")
output_device = output[0]
output[0] = str(timestamp)
output = ",".join(output)
# Continuing writing output squence in files for running tests dump at the start of new squence
if output_device in device_list:
for testid, test in running_queue.iteritems():
if test.status == "RUNNING":
iostat_file_name = output_device + iostat_file_ext
iostat_file = test.statsdir + iostat_file_name
if os.path.isfile(iostat_file):
# If file exists then append this new output squence in this file with current TS
sys_logger.debug("Generating iostat output in " + iostat_file_name + " for test : "
+ str(testid))
with open(iostat_file, 'a') as fh:
fh.write(output + "\n")
else:
# If file doesn't exists then this is new test just started on agent, create
# file and dump IOSTAT header in this file with current TS
with open(iostat_file, 'w') as fh:
sys_logger.debug("Starting " + iostat_file_name + " for test : " + str(testid))
fh.write(iostat_header + "\n")
fh.write(output + "\n")
except Exception as e:
sys_logger.error(e)
def sar_gather(self):
header_row = 2 # In SAR output header is in 2nd row, modify accordingly
# getting cpu.plt header
p = subprocess.Popen(sar_get_header['cpu'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.wait()
output = p.communicate()[0].strip()
output = output.split("\n")[header_row]
output = re.sub(' +', ' ', output)
output = output.split(" ")
del output[:3]
cpu_plt_header = ",".join(output)
# getting task.plt header
p = subprocess.Popen(sar_get_header['task'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.wait()
output = p.communicate()[0].strip()
output = output.split("\n")[header_row]
output = re.sub(' +', ' ', output)
output = output.split(" ")
del output[:2]
task_plt_header = ",".join(output)
# getting mem.plt header
p = subprocess.Popen(sar_get_header['mem'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.wait()
output = p.communicate()[0].strip()
output = output.split("\n")[header_row]
output = re.sub(' +', ' ', output)
output = output.split(" ")
del output[:2]
mem_plt_header = ",".join(output)
# getting nfs.plt header
p = subprocess.Popen(sar_get_header['nfs'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.wait()
output = p.communicate()[0].strip()
output = output.split("\n")[header_row]
output = re.sub(' +', ' ', output)
output = output.split(" ")
del output[:2]
nfs_plt_header = ",".join(output)
# getting network_io.plt header
p = subprocess.Popen(sar_get_header['network_io'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.wait()
output = p.communicate()[0].strip()
header = output.split("\n")[header_row]
header = re.sub(' +', ' ', header)
header = header.split(" ")
del header[:3]
net_io_plt_header = ",".join(header)
# starting SAR gather
p = subprocess.Popen(sar_cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
# Flags for marking the output type based on header in output sequence
print_cpu_plt = 0
print_mem_plt = 0
print_task_plt = 0
print_net_io_plt = 0
print_nfs_plt = 0
while True:
output = p.stdout.readline()
if output == '' and p.poll() is not None:
break
if output:
output = output.strip()
output = re.sub(' +', ' ', output)
output = output.replace(' ', ',')
if cpu_plt_header in output:
# Set CPU usage output flag, print subsquent lines in cpu.plt, Also this is start of new output sequence
# hence dump the current timestamp value
print_cpu_plt = 1
p3 = subprocess.Popen(date_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
timestamp = p3.communicate()[0].strip()
continue
elif task_plt_header in output:
# Set task and context switch output flag, print subsquent lines in task.plt
print_task_plt = 1
continue
elif nfs_plt_header in output:
# Set NFS activity output flag, print subsquent lines in nfs.plt
print_nfs_plt = 1
continue
elif mem_plt_header in output:
# Set memory utilization output flag, print subsquent lines in mem.plt
print_mem_plt = 1
continue
elif net_io_plt_header in output:
# Set network io activity output flag, print subsquent lines in seperate file for each io device
print_net_io_plt = 1
continue
elif output == "":
# Set all flags to zero if blank line occur, this marks end of previously set flag
print_cpu_plt = 0
print_mem_plt = 0
print_task_plt = 0
print_net_io_plt = 0
print_nfs_plt = 0
continue
# Dump list of running test on agent in running_queue
action.action_lock.acquire()
running_queue = action.running_tests
action.action_lock.release()
# Print appropriate plt files based on output flags
if print_cpu_plt:
output = output.split(",")
del output[:3]
for testid, test in running_queue.iteritems():
if test.status == "RUNNING":
cpu_plt_file = test.statsdir + "cpu.plt"
if os.path.isfile(cpu_plt_file):
sys_logger.debug("Generating cpu.plt for test : " + str(testid))
with open(cpu_plt_file, 'a') as fh:
plt_row = [timestamp] + output
plt_row = ",".join(plt_row)
fh.write(plt_row + "\n")
else:
sys_logger.debug("Starting cpu.plt for test : " + str(testid))
with open(cpu_plt_file, 'w') as fh:
header = "Time," + cpu_plt_header
fh.write(header + "\n")
plt_row = [timestamp] + output
plt_row = ",".join(plt_row)
fh.write(plt_row + "\n")
if print_task_plt:
output = output.split(",")
del output[:2]
for testid, test in running_queue.iteritems():
if test.status == "RUNNING":
task_plt_file = test.statsdir + "task.plt"
if os.path.isfile(task_plt_file):
sys_logger.debug("Generating task.plt for test : " + str(testid))
with open(task_plt_file, 'a') as fh:
plt_row = [timestamp] + output
plt_row = ",".join(plt_row)
fh.write(plt_row + "\n")
else:
sys_logger.debug("Starting task.plt for test : " + str(testid))
with open(task_plt_file, 'w') as fh:
header = "Time," + task_plt_header
fh.write(header + "\n")
plt_row = [timestamp] + output
plt_row = ",".join(plt_row)
fh.write(plt_row + "\n")
if print_mem_plt:
output = output.split(",")
del output[:2]
for testid, test in running_queue.iteritems():
if test.status == "RUNNING":
mem_plt_file = test.statsdir + "mem.plt"
if os.path.isfile(mem_plt_file):
sys_logger.debug("Generating mem.plt for test : " + str(testid))
with open(mem_plt_file, 'a') as fh:
plt_row = [timestamp] + output
plt_row = ",".join(plt_row)
fh.write(plt_row + "\n")
else:
sys_logger.debug("Starting mem.plt for test : " + str(testid))
with open(mem_plt_file, 'w') as fh:
header = "Time," + mem_plt_header
fh.write(header + "\n")
plt_row = [timestamp] + output
plt_row = ",".join(plt_row)
fh.write(plt_row + "\n")
if print_nfs_plt:
output = output.split(",")
del output[:2]
for testid, test in running_queue.iteritems():
if test.status == "RUNNING":
nfs_plt_file = test.statsdir + "nfs.plt"
if os.path.isfile(nfs_plt_file):
sys_logger.debug("Generating nfs.plt for test : " + str(testid))
with open(nfs_plt_file, 'a') as fh:
plt_row = [timestamp] + output
plt_row = ",".join(plt_row)
fh.write(plt_row + "\n")
else:
sys_logger.debug("Starting nfs.plt for test : " + str(testid))
with open(nfs_plt_file, 'w') as fh:
header = "Time," + nfs_plt_header
fh.write(header + "\n")
plt_row = [timestamp] + output
plt_row = ",".join(plt_row)
fh.write(plt_row + "\n")
if print_net_io_plt:
output = output.split(",")
del output[:2]
device = output[0]
del output[:1]
for testid, test in running_queue.iteritems():
if test.status == "RUNNING":
net_io_plt_file_name = device + network_io_file_ext
net_io_plt_file = test.statsdir + net_io_plt_file_name
if os.path.isfile(net_io_plt_file):
sys_logger.debug("Generating " + net_io_plt_file_name + " for test : " + str(testid))
with open(net_io_plt_file, 'a') as fh:
plt_row = [timestamp] + output
plt_row = ",".join(plt_row)
fh.write(plt_row + "\n")
else:
sys_logger.debug("Starting " + net_io_plt_file_name + " for test : " + str(testid))
with open(net_io_plt_file, 'w') as fh:
header = "Time," + net_io_plt_header
fh.write(header + "\n")
plt_row = [timestamp] + output
plt_row = ",".join(plt_row)
fh.write(plt_row + "\n")
def docker_stat_gather(self):
# Checking docker version
try:
p1 = subprocess.Popen(docker_version, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
version = p1.communicate()[0].strip()
version = re.findall("\d+\.\d+", version)[0]
version = float(version)
if version < 10.0:
# Docker version less than 10 is not supported
sys_logger.error("Docker version less than 10, not supported !! ")
sys_logger.error("Aborting docker stat gather thread !! ")
quit()
except Exception:
# Docker is not installed, abort this thread
sys_logger.error("Docker not installed !! ")
sys_logger.error("Aborting docker stat gather thread !! ")
quit()
# Starting docker stats
# Spawning different thread for collecting docker stat as it takes some time to collect the stats
while True:
thread = common.FuncThread(collect_docker_stats, True)
thread.start()
time.sleep(float(system_metrics_interval))
def collect_docker_stats(self):
p1 = subprocess.Popen(docker_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
(output, err) = p1.communicate()
action.action_lock.acquire()
running_queue = action.running_tests
action.action_lock.release()
if err:
sys_logger.error("Not able to collect docker stats")
sys_logger.error(str(err.strip()))
quit()
output = output.strip()
output = output.split("\n")
for testid, test in running_queue.iteritems():
if test.status == "RUNNING":
docker_stat_file = test.statsdir + "docker_stat.txt"
if os.path.isfile(docker_stat_file):
sys_logger.debug("Generating docker_stat.txt for test : " + str(testid))
with open(docker_stat_file, 'a') as fh:
for line in output:
if line.startswith("NAME"):
continue
line = line.strip()
# line = re.sub(' +', ' ', line)
# line = line.replace(' ', ',')
fh.write(line + "\n")
fh.write("\n")
else:
sys_logger.debug("Starting docker_stat.txt for test : " + str(testid))
with open(docker_stat_file, 'w') as fh:
fh.write(docker_stat_header + "\n")
for line in output:
if line.startswith("NAME"):
continue
line = line.strip()
# line = re.sub(' +', ' ', line)
# line = line.replace(' ', ',')
fh.write(line + "\n")
fh.write("\n")
def strace_gather(self, testid, strace_config):
"""
STRACE profiler collector based on configuration provided in strace_config for a given testid
"""
delay = float(strace_config['delay'])
duration = strace_config['duration']
process = strace_config['process']
sys_logger.debug("Starting STRACE for Test " + str(testid) + " in " + str(delay) + " secs")
# Start STRACE collection after delay time provided by user
time.sleep(delay)
test = action.get_test(testid)
strace_output_file = test.statsdir + "strace_output.txt"
# PID selection based on process name provided by user, if there are multiple PIDs for same process then it
# chooses the most active process in terms of cpu usage
sys_logger.debug("Setting up STRACE for process : " + process)
grep1 = ["grep", process]
p1 = subprocess.Popen(get_pid, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p2 = subprocess.Popen(grep1, stdin=p1.stdout, stdout=subprocess.PIPE)
p3 = subprocess.Popen(grep2, stdin=p2.stdout, stdout=subprocess.PIPE)
p4 = subprocess.Popen(awk, stdin=p3.stdout, stdout=subprocess.PIPE)
pid = p4.communicate()[0].strip()
if not pid:
msg = "No active PID found for given process : " + process
sys_logger.debug(msg)
if test.status == "RUNNING":
with open(strace_output_file, 'w') as fh:
fh.write(msg + "\n")
else:
sys_logger.debug("PID selected for process " + process + " : " + pid)
strace_cmd = ["timeout", duration, "strace", "-p", pid, "-c", "-S", "time", "-o", strace_output_file]
sys_logger.debug("Executing Strace for test " + str(testid))
sys_logger.debug("Strace command : " + str(strace_cmd))
p5 = subprocess.Popen(strace_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p5.wait()
sys_logger.debug("Appending PID information in output file")
perl_cmd = ['perl', '-pi', '-e',
'print "Strace Process : ' + process + ' | PID : ' + pid + ' \\n\\n" if $. == 1',
strace_output_file]
subprocess.Popen(perl_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
sys_logger.debug("Strace complete for test : " + str(testid))
def perf_gather(self, testid, perf_config):
"""
PERF profiler collector based on configuration provided in perf_config for a given testid
"""
delay = float(perf_config['delay'])
duration = perf_config['duration']
sys_logger.debug("Starting PERF for Test " + str(testid) + " in " + str(delay) + " secs")
time.sleep(delay)
test = action.get_test(testid)
perf_output_file = test.statsdir + "perf_output.txt"
# Starting system wide perf data collection
perf_system_wide_cmd = ['perf', 'stat', '-e',
'cycles,instructions,LLC-load-misses,LLC-prefetch-misses,LLC-store-misses', '-a', '-o',
perf_output_file, "sleep", duration]
if test.status == "RUNNING":
sys_logger.debug("Executing system-wide PERF")
sys_logger.debug("PERF command : " + str(perf_system_wide_cmd))
p = subprocess.Popen(perf_system_wide_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.wait()
sys_logger.debug("Finished system-wide PERF")
error = p.communicate()[1].strip()
if error:
sys_logger.debug(error)
with open(perf_output_file, 'w') as fh:
fh.write(error + "\n")
return
# Configure perf for process level data collection, if process name is provided
if "process" in perf_config:
process = perf_config['process']
sys_logger.debug("Setting up PERF for process : " + process)
grep1 = ["grep", process]
p1 = subprocess.Popen(get_pid, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p2 = subprocess.Popen(grep1, stdin=p1.stdout, stdout=subprocess.PIPE)
p3 = subprocess.Popen(grep2, stdin=p2.stdout, stdout=subprocess.PIPE)
p4 = subprocess.Popen(awk, stdin=p3.stdout, stdout=subprocess.PIPE)
pid = p4.communicate()[0].strip()
if not pid:
msg = "No active PID found for given process : " + process
sys_logger.debug(msg)
if os.path.isfile(perf_output_file):
with open(perf_output_file, 'a') as fh:
fh.write(msg + "\n")
else:
msg = "PID selected for process " + process + " : " + pid
sys_logger.debug(msg)
perf_process_cmd = ['perf', 'stat', '-e', 'cycles:u,instructions:u', '-a', '-p', pid, '-o',
perf_output_file, '--append', 'sleep', duration]
sys_logger.debug("Executing PERF for process " + process)
sys_logger.debug("PERF command : " + str(perf_process_cmd))
p5 = subprocess.Popen(perf_process_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p5.wait()
error = p5.communicate()[1].strip()
if error:
sys_logger.debug(error)
sys_logger.debug("Finished PERF on process")
sys_logger.debug("PERF complete for test : " + str(testid))
def init_sar_iostat_top():
"""
Agent process invoke this method on startup. This will spawn 4 threads for system metrics collection. Below are
thread details:
1. top_gather - For TOP output collection
2. iostat_gather - For iostat output collection
3. sar_gather - For SAR data collection
4. docker_stat_gather - For docker stat of all active containers
"""
global sys_logger
logger_file = os.getcwd() + "/system_metrics_gather_debug.out"
sys_logger = loggersetup(logger_file)
sys_logger.debug("Starting system metrics gather threads")
sys_logger.debug("Starting top gather")
t1 = common.FuncThread(top_gather, True)
t1.start()
sys_logger.debug("Starting iostat gather")
t2 = common.FuncThread(iostat_gather, True)
t2.start()
sys_logger.debug("Starting SAR gather")
t3 = common.FuncThread(sar_gather, True)
t3.start()
sys_logger.debug("Starting docker stat gather")
t4 = common.FuncThread(docker_stat_gather, True)
t4.start()
def perf_strace_gather(testid, perf_config=None, strace_config=None):
"""
Agent invoke this procedure on test startup for configuring profiler information provided in test details
"""
sys_logger.debug("Starting Profilers setup for test ID : " + str(testid))
sys_logger.debug("Perf configuration details")
if "process" in perf_config:
sys_logger.debug(
"Delay - " + perf_config['delay'] + " Duration - " + perf_config['duration'] + " Process - " + perf_config[
'process'])
else:
sys_logger.debug("Delay - " + perf_config['delay'] + " Duration - " + perf_config['duration'])
t1 = common.FuncThread(perf_gather, True, testid, perf_config)
t1.start()
if strace_config is not None:
sys_logger.debug("Strace configuration details")
sys_logger.debug(
"Delay - " + strace_config['delay'] + " Duration - " + strace_config['duration'] + " Process - " +
strace_config['process'])
t2 = common.FuncThread(strace_gather, True, testid, strace_config)
t2.start()
else:
sys_logger.debug("Strace not configured ")
| {
"content_hash": "bf19cd2826c3441518b5257c905fdbeb",
"timestamp": "",
"source": "github",
"line_count": 651,
"max_line_length": 120,
"avg_line_length": 45.54531490015361,
"alnum_prop": 0.5240809443507588,
"repo_name": "deepeshmittal/daytona",
"id": "56dedfbfad9900ce3d0eee206780f0a2f6ca810a",
"size": "29943",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Scheduler+Agent/system_metrics_gather.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "1355"
},
{
"name": "CSS",
"bytes": "35168"
},
{
"name": "Gnuplot",
"bytes": "32727"
},
{
"name": "JavaScript",
"bytes": "107806"
},
{
"name": "PHP",
"bytes": "286286"
},
{
"name": "Python",
"bytes": "212287"
},
{
"name": "Shell",
"bytes": "38383"
}
],
"symlink_target": ""
} |
from struct import pack, unpack, calcsize
class Error(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
fmt_checktextformatrequest = 'I'
fmt_checktextformatresponse = 'IIII'
TUI_MSGTYPE_CHECKTEXTFORMATREQUEST = 0x01
TUI_MSGTYPE_CHECKTEXTFORMATRESPONSE = 0x02
TUI_MSGTYPE_GETSCREENINFOREQUEST = 0x03
TUI_MSGTYPE_GETSCREENINFORESPONSE = 0x04
TUI_MSGTYPE_INITSESSIONREQUEST = 0x05
TUI_MSGTYPE_INITSESSIONRESPONSE = 0x06
TUI_MSGTYPE_CLOSESESSIONREQUEST = 0x07
TUI_MSGTYPE_CLOSESESSIONRESPONSE = 0x08
TUI_MSGTYPE_DISPLAYSCREENREQEUST = 0x09
TUI_MSGTYPE_DISPLAYSCREENRESPONSE = 0x0A
def generate_lengthvalue(data):
return pack('I', len(data)) + data
def parse_lengthvalue(msg):
length = unpack('I', msg[0:4])[0]
return length, msg[4:4+length]
def generate_message(msg_type, data):
return pack('II', msg_type, len(data)) + data
def parse_message(message):
# Parse message type and length from binary data
msg_type, msg_len = unpack('II', message[0:8])
data = message[8:]
# Verify size of packet is valid
if len(data) != msg_len:
raise Error('Data length not same as given in message header')
return (msg_type, data)
def parse_message_of_type(msg_type, message):
parsed_msg_type, data = parse_message(message)
if parsed_msg_type != msg_type:
raise(Error('Invalid message type parsed: %02x expected: %02x' %
(parsed_msg_type, msg_type)))
return data
def generate_checktextformatrequest(text):
return generate_message(TUI_MSGTYPE_CHECKTEXTFORMATREQUEST,
pack('I', len(text)) + text)
def parse_checktextformatrequest(msg):
parsed_msg = parse_message_of_type(TUI_MSGTYPE_CHECKTEXTFORMATREQUEST, msg)
text_length = unpack('I', parsed_msg[0:4])[0]
text = parsed_msg[4:]
if text_length != len(text):
raise Error('Parsed text length %i not what expected %i' %
(text_length, len(text)))
return text
def generate_checktextformatresponse(ret, width, height, last_index):
return generate_message(TUI_MSGTYPE_CHECKTEXTFORMATRESPONSE,
pack('IIII', ret, width, height, last_index))
def parse_checktextformatresponse(msg):
return unpack('IIII',
parse_message_of_type(TUI_MSGTYPE_CHECKTEXTFORMATRESPONSE, msg))
def generate_getscreeninforequest(orientation, numberofentryfields):
return generate_message(TUI_MSGTYPE_GETSCREENINFOREQUEST,
pack('II', orientation, numberofentryfields))
def parse_getscreeninforequest(msg):
return unpack('II',
parse_message_of_type(TUI_MSGTYPE_GETSCREENINFOREQUEST, msg))
fmt_getscreeninforesponse_first_segment = 'IIIIIIIIBBBII'
fmt_getscreeninforesponse_buttoninfo = 'IIBB'
def generate_getscreeninforesponse(ret,
grayscalebitsdepth,
redbitsdepth,
greenbitsdepth,
bluebitsdepth,
widthinch,
heightinch,
maxentryfields,
labelcolor,
labelwidth,
labelheight,
buttoninfos):
first_segment = pack(fmt_getscreeninforesponse_first_segment,
ret,
grayscalebitsdepth,
redbitsdepth,
greenbitsdepth,
bluebitsdepth,
widthinch,
heightinch,
maxentryfields,
labelcolor[0],
labelcolor[1],
labelcolor[2],
labelwidth,
labelheight)
if len(buttoninfos) != 6:
raise Error('Need 6 button infos')
buttoninfo_segments = map(lambda x: generate_lengthvalue(x[0]) +
pack(fmt_getscreeninforesponse_buttoninfo,
x[1],
x[2],
int(x[3]),
int(x[4])),
buttoninfos)
return generate_message(TUI_MSGTYPE_GETSCREENINFORESPONSE,
first_segment + ''.join(buttoninfo_segments))
def parse_getscreeninforesponse(msg):
parsed_msg = parse_message_of_type(TUI_MSGTYPE_GETSCREENINFORESPONSE, msg)
first_segment_size = calcsize(fmt_getscreeninforesponse_first_segment)
# Unpack values from the first segment
v = unpack(fmt_getscreeninforesponse_first_segment,
parsed_msg[0:first_segment_size])
def parse_buttoninfo_segments(seg):
button_text_length, button_text = parse_lengthvalue(seg)
buttoninfo_size = calcsize('I') + button_text_length + \
calcsize(fmt_getscreeninforesponse_buttoninfo)
seg_next = seg[buttoninfo_size:]
buttonwidth, buttonheight, buttontextcustom, buttonimagecustom = \
unpack(fmt_getscreeninforesponse_buttoninfo,
seg[calcsize('I') + button_text_length:buttoninfo_size])
buttoninfo = (button_text,
buttonwidth,
buttonheight,
buttontextcustom != 0,
buttonimagecustom != 0)
if len(seg_next) == 0:
return (buttoninfo,)
else:
return (buttoninfo,) + parse_buttoninfo_segments(seg_next)
return v[0:8] + (v[8:11],) + v[11:] + \
(parse_buttoninfo_segments(parsed_msg[first_segment_size:]),)
def generate_initsessionrequest():
return generate_message(TUI_MSGTYPE_INITSESSIONREQUEST, '')
def parse_initsessionrequest(msg):
parse_message_of_type(TUI_MSGTYPE_INITSESSIONREQUEST, msg)
def generate_initsessionresponse(ret):
return generate_message(TUI_MSGTYPE_INITSESSIONRESPONSE,
pack('I', ret))
def parse_initsessionresponse(msg):
parsed_msg = parse_message_of_type(TUI_MSGTYPE_INITSESSIONRESPONSE, msg)
return unpack('I', parsed_msg)[0]
def generate_closesessionrequest():
return generate_message(TUI_MSGTYPE_CLOSESESSIONREQUEST, '')
def parse_closesessionrequest(msg):
parse_message_of_type(TUI_MSGTYPE_CLOSESESSIONREQUEST, msg)
def generate_closesessionresponse(ret):
return generate_message(TUI_MSGTYPE_CLOSESESSIONRESPONSE,
pack('I', ret))
def parse_closesessionresponse(msg):
parsed_msg = parse_message_of_type(TUI_MSGTYPE_CLOSESESSIONRESPONSE, msg)
return unpack('I', parsed_msg)[0]
def generate_displayscreenrequest(screenorientation,
label,
buttons,
requestedbuttons,
closetuisession,
entryfields):
def generate_image(img):
return generate_lengthvalue(img[0]) + \
pack('II', img[1], img[2])
def generate_button(btn):
return generate_lengthvalue(btn[0]) + \
generate_image(btn[1])
def generate_entryfield(ef):
return generate_lengthvalue(ef[0]) + \
pack('IIII', *ef[1:5])
label_segment = generate_lengthvalue(label[0]) + \
pack('IIBBB',
label[1],
label[2],
label[3][0],
label[3][1],
label[3][2]) + \
generate_image(label[4]) + \
pack('II', label[5], label[6])
buttons_segment = map(generate_button, buttons)
reqbuttons_segment = pack('IIIIII', *map(int, requestedbuttons))
entryfields_segment = map(generate_entryfield, entryfields)
msg = pack('I', screenorientation) + \
label_segment + \
''.join(buttons_segment) + \
reqbuttons_segment + \
pack('I', int(closetuisession)) + \
pack('I', len(entryfields)) + \
''.join(entryfields_segment)
return generate_message(TUI_MSGTYPE_DISPLAYSCREENREQEUST, msg)
def parse_displayscreenrequest(msg):
parsed_msg = parse_message_of_type(TUI_MSGTYPE_DISPLAYSCREENREQEUST, msg)
screenorientation = unpack('I', parsed_msg[0:calcsize('I')])
index = calcsize('I')
def parse_image(imgmsg):
img_size, img_data = parse_lengthvalue(imgmsg)
img_size += calcsize('I')
img_vals = unpack('II', imgmsg[img_size:img_size + calcsize('II')])
return (img_data,) + img_vals, img_size + calcsize('II')
def parse_label(lblmsg):
text_length, text = parse_lengthvalue(lblmsg)
index = text_length + calcsize('I')
lbl1 = unpack('IIBBB', lblmsg[index:index + calcsize('IIBBB')])
index += calcsize('IIBBB')
lblimage, imgsize = parse_image(lblmsg[index:])
index += imgsize
lbl2 = unpack('II', lblmsg[index:index + calcsize('II')])
index += calcsize('II')
return (text, lbl1[0], lbl1[1], (lbl1[2], lbl1[3], lbl1[4])) + \
(lblimage,) + lbl2, \
index
def parse_buttons(btnmsg):
index = 0
buttons = []
for i in xrange(0,6):
text_length, text = parse_lengthvalue(btnmsg[index:])
index += text_length + calcsize('I')
img, imgsize = parse_image(btnmsg[index:])
index += imgsize
buttons += ((text,) + (img,),)
return tuple(buttons), index
def parse_entryfields(efmsg, count):
index = 0
efs = []
for i in xrange(0, count):
text_length, text = parse_lengthvalue(efmsg[index:])
index += text_length + calcsize('I')
values = unpack('IIII', efmsg[index:index + calcsize('IIII')])
index += calcsize('IIII')
efs += ((text,) + values,)
return tuple(efs)
label, label_size = parse_label(parsed_msg[index:])
index += label_size
buttons, buttons_size = parse_buttons(parsed_msg[index:])
index += buttons_size
reqbuttons = map(lambda x: x != 0,
unpack('IIIIII',
parsed_msg[index:index + calcsize('IIIIII')]))
index += calcsize('IIIIII')
closetuisession = \
unpack('I', parsed_msg[index:index + calcsize('I')])[0] != 0
index += calcsize('I')
entryfieldcount = unpack('I', parsed_msg[index:index + calcsize('I')])[0]
index += calcsize('I')
entryfields = parse_entryfields(parsed_msg[index:], entryfieldcount)
return screenorientation + \
(label,) + \
(buttons,) + \
(tuple(reqbuttons),) + \
(closetuisession,) + \
(entryfields,)
def generate_displayscreenresponse(ret, button, entryfields):
first_segment = pack('III', ret, button, len(entryfields))
entryfield_segments = map(lambda x: generate_lengthvalue(x), entryfields)
return generate_message(TUI_MSGTYPE_DISPLAYSCREENRESPONSE,
first_segment + ''.join(entryfield_segments))
def parse_displayscreenresponse(msg):
parsed_msg = parse_message_of_type(TUI_MSGTYPE_DISPLAYSCREENRESPONSE, msg)
first_segment_size = calcsize('III')
first_segment = parsed_msg[0:first_segment_size]
entryfield_segments = parsed_msg[first_segment_size:]
def parse_entryfields(seg):
bufferlen = unpack('I', seg[0:4])[0]
data = seg[4:4+bufferlen]
seg_next = seg[4+bufferlen:]
if len(seg_next) == 0:
return (data,)
else:
return (data,) + parse_entryfields(seg_next)
ret, button, entryfield_count = unpack('III', first_segment)
entryfields = parse_entryfields(entryfield_segments)
if len(entryfields) != entryfield_count:
raise Error('entryfield_count differs from actual entry fields')
return (ret, button, entryfields)
| {
"content_hash": "7fc45cb4879b5b50211f29e1f8521563",
"timestamp": "",
"source": "github",
"line_count": 388,
"max_line_length": 82,
"avg_line_length": 31.670103092783506,
"alnum_prop": 0.5753580729166666,
"repo_name": "mikatammi/tee-tui-virtual-display",
"id": "7eeb952c90c1064e7749d7600ddc8612154f4b35",
"size": "12288",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pytui/tui/protocol.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "6886"
},
{
"name": "C++",
"bytes": "2531"
},
{
"name": "Prolog",
"bytes": "498"
},
{
"name": "Python",
"bytes": "16862"
},
{
"name": "Shell",
"bytes": "67"
},
{
"name": "TeX",
"bytes": "8392"
}
],
"symlink_target": ""
} |
from .forms import TicketForm
from .lib import format_time_spent, process_attachments
from .models import CustomField, FollowUp, FollowUpAttachment, Ticket
from .user import HelpdeskUser
from django.contrib.auth.models import User
from django.contrib.humanize.templatetags import humanize
from rest_framework import serializers
from rest_framework.exceptions import ValidationError
class DatatablesTicketSerializer(serializers.ModelSerializer):
"""
A serializer for the Ticket model, returns data in the format as required by
datatables for ticket_list.html. Called from staff.datatables_ticket_list.
"""
ticket = serializers.SerializerMethodField()
assigned_to = serializers.SerializerMethodField()
submitter = serializers.SerializerMethodField()
created = serializers.SerializerMethodField()
due_date = serializers.SerializerMethodField()
status = serializers.SerializerMethodField()
row_class = serializers.SerializerMethodField()
time_spent = serializers.SerializerMethodField()
queue = serializers.SerializerMethodField()
kbitem = serializers.SerializerMethodField()
class Meta:
model = Ticket
# fields = '__all__'
fields = ('ticket', 'id', 'priority', 'title', 'queue', 'status',
'created', 'due_date', 'assigned_to', 'submitter', 'row_class',
'time_spent', 'kbitem')
def get_queue(self, obj):
return {"title": obj.queue.title, "id": obj.queue.id}
def get_ticket(self, obj):
return str(obj.id) + " " + obj.ticket
def get_status(self, obj):
return obj.get_status
def get_created(self, obj):
return humanize.naturaltime(obj.created)
def get_due_date(self, obj):
return humanize.naturaltime(obj.due_date)
def get_assigned_to(self, obj):
if obj.assigned_to:
if obj.assigned_to.get_full_name():
return obj.assigned_to.get_full_name()
elif obj.assigned_to.email:
return obj.assigned_to.email
else:
return obj.assigned_to.username
else:
return "None"
def get_submitter(self, obj):
return obj.submitter_email
def get_time_spent(self, obj):
return format_time_spent(obj.time_spent)
def get_row_class(self, obj):
return obj.get_priority_css_class
def get_kbitem(self, obj):
return obj.kbitem.title if obj.kbitem else ""
class FollowUpAttachmentSerializer(serializers.ModelSerializer):
class Meta:
model = FollowUpAttachment
fields = ('id', 'followup', 'file', 'filename', 'mime_type', 'size')
class FollowUpSerializer(serializers.ModelSerializer):
followupattachment_set = FollowUpAttachmentSerializer(
many=True, read_only=True)
attachments = serializers.ListField(
child=serializers.FileField(),
write_only=True,
required=False
)
class Meta:
model = FollowUp
fields = (
'id', 'ticket', 'date', 'title', 'comment', 'public', 'user', 'new_status', 'message_id',
'time_spent', 'followupattachment_set', 'attachments'
)
def create(self, validated_data):
attachments = validated_data.pop('attachments', None)
followup = super().create(validated_data)
if attachments:
process_attachments(followup, attachments)
return followup
class TicketSerializer(serializers.ModelSerializer):
followup_set = FollowUpSerializer(many=True, read_only=True)
attachment = serializers.FileField(write_only=True, required=False)
class Meta:
model = Ticket
fields = (
'id', 'queue', 'title', 'description', 'resolution', 'submitter_email', 'assigned_to', 'status', 'on_hold',
'priority', 'due_date', 'merged_to', 'attachment', 'followup_set'
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Add custom fields
for field in CustomField.objects.all():
self.fields['custom_%s' % field.name] = field.build_api_field()
def create(self, validated_data):
""" Use TicketForm to validate and create ticket """
queues = HelpdeskUser(self.context['request'].user).get_queues()
queue_choices = [(q.id, q.title) for q in queues]
data = validated_data.copy()
data['body'] = data['description']
# TicketForm needs id for ForeignKey (not the instance themselves)
data['queue'] = data['queue'].id
if data.get('assigned_to'):
data['assigned_to'] = data['assigned_to'].id
if data.get('merged_to'):
data['merged_to'] = data['merged_to'].id
files = {'attachment': data.pop('attachment', None)}
ticket_form = TicketForm(
data=data, files=files, queue_choices=queue_choices)
if ticket_form.is_valid():
ticket = ticket_form.save(user=self.context['request'].user)
ticket.set_custom_field_values()
return ticket
raise ValidationError(ticket_form.errors)
def update(self, instance, validated_data):
instance = super().update(instance, validated_data)
instance.save_custom_field_values(validated_data)
return instance
class UserSerializer(serializers.ModelSerializer):
password = serializers.CharField(write_only=True)
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email', 'password')
def create(self, validated_data):
user = super(UserSerializer, self).create(validated_data)
user.is_active = True
user.set_password(validated_data['password'])
user.save()
return user
| {
"content_hash": "d21697cb8f67b825969b03fd425c8c2d",
"timestamp": "",
"source": "github",
"line_count": 163,
"max_line_length": 119,
"avg_line_length": 35.484662576687114,
"alnum_prop": 0.640387275242047,
"repo_name": "django-helpdesk/django-helpdesk",
"id": "6474060d4eab2d8aa700f42817c1bbe8fe7873e8",
"size": "5784",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "helpdesk/serializers.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "13474"
},
{
"name": "HTML",
"bytes": "179798"
},
{
"name": "JavaScript",
"bytes": "44436"
},
{
"name": "Makefile",
"bytes": "2973"
},
{
"name": "Python",
"bytes": "539762"
},
{
"name": "SCSS",
"bytes": "7910"
},
{
"name": "Shell",
"bytes": "718"
}
],
"symlink_target": ""
} |
"""
Set attributes used for nearly all views
"""
from __future__ import print_function
from django.conf import settings
from django.core.cache import cache
from django.core.cache.utils import make_template_fragment_key
from gc_apps.geo_utils.geoconnect_step_names import GEOCONNECT_STEPS,\
STEP1_EXAMINE, STEP2_STYLE
from gc_apps.geo_utils.git_info import get_branch_info
from gc_apps.geo_utils.time_util import get_last_microsecond
CACHE_KEY_BRANCH_INFO = 'CACHE_KEY_BRANCH_INFO'
def get_common_lookup(request, **kwargs):
"""
Return dict with attributes common to nearly all views
"""
if request is None:
return {}
if request.user and request.user.is_active and request.user.is_staff:
is_staff = True
else:
is_staff = False
is_superuser = False
if request.user and request.user.is_authenticated():
is_logged_in = True
if request.user.is_superuser:
is_superuser = True
else:
is_logged_in = False
current_url = '{0}{1}'.format(request.get_host(), request.get_full_path())
common_dict = dict(\
DATAVERSE_SERVER_URL=settings.DATAVERSE_SERVER_URL,
current_url=current_url,
DEBUG_MODE=settings.DEBUG,
GEOCONNECT_STEPS=GEOCONNECT_STEPS,
STEP1_EXAMINE=STEP1_EXAMINE,
STEP2_STYLE=STEP2_STYLE,
is_logged_in=is_logged_in,
is_staff=is_staff,
is_superuser=is_superuser,
last_microsecond=get_last_microsecond())
branch_info_dict = None #get_git_branch_info_dict(request)
if branch_info_dict is not None:
common_dict.update(branch_info_dict)
if kwargs:
common_dict.update(kwargs)
return common_dict
def get_git_branch_info_dict(request):
"""Return a dict containing git branch info--if available
If not, returns an empty dict
"""
branch_info = cache.get(CACHE_KEY_BRANCH_INFO)
if branch_info is None:
branch_info = get_branch_info()
cache.set(CACHE_KEY_BRANCH_INFO, branch_info, 7200) # 2 hour cache
return branch_info
| {
"content_hash": "730a2511923f8f702722c8d975062149",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 78,
"avg_line_length": 29.123287671232877,
"alnum_prop": 0.6561618062088429,
"repo_name": "IQSS/geoconnect",
"id": "c4bd1bf330ebc21260e7dc6ed881708a2168120c",
"size": "2126",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gc_apps/geo_utils/view_util.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "35936"
},
{
"name": "HTML",
"bytes": "83429"
},
{
"name": "JavaScript",
"bytes": "17942"
},
{
"name": "Python",
"bytes": "539011"
},
{
"name": "Shell",
"bytes": "1224"
}
],
"symlink_target": ""
} |
import sqlite3
from syncless.best_stackless import stackless
import sys
def ProgressHandler():
sys.stderr.write('.')
db_conn = sqlite3.connect(':memory:')
# SUXX: Specifying 700 (or 1000) instead of 600 here would suppress the dot
# in the first few rows.
db_conn.set_progress_handler(ProgressHandler, 600)
cursor = db_conn.cursor()
cursor.execute('PRAGMA journal_mode = off')
cursor.execute('CREATE TABLE t (i integer)')
for i in xrange(200):
cursor.execute('INSERT INTO t (i) VALUES (?)', (i,))
sys.stderr.write('I')
query = ('SELECT t1.i, t2.i, t3.i FROM t AS t1, t AS t2, t AS T3 '
'WHERE t1.i < t2.i AND t2.i < t3.i')
for row in cursor.execute(query):
if row[1] == 198 and row[2] == 199: sys.stderr.write('/')
sys.stderr.write('S')
sys.stderr.write('\n')
| {
"content_hash": "45ad054694c586a7ee7e6bf61b837b6a",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 75,
"avg_line_length": 28.925925925925927,
"alnum_prop": 0.6798975672215108,
"repo_name": "breezechen/syncless",
"id": "03f29affe03a75e85b966a53b6272a691c884ed7",
"size": "1018",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "examples/demo_sqlite_progress_handler.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1120576"
},
{
"name": "JavaScript",
"bytes": "2560"
},
{
"name": "Python",
"bytes": "641895"
}
],
"symlink_target": ""
} |
"""Tests for certbot._internal.cert_manager."""
# pylint: disable=protected-access
import re
import shutil
import tempfile
import unittest
import configobj
try:
import mock
except ImportError: # pragma: no cover
from unittest import mock
from certbot import errors, configuration
from certbot._internal.storage import ALL_FOUR
from certbot.compat import filesystem
from certbot.compat import os
from certbot.display import util as display_util
from certbot.tests import util as test_util
import storage_test
class BaseCertManagerTest(test_util.ConfigTestCase):
"""Base class for setting up Cert Manager tests.
"""
def setUp(self):
super().setUp()
self.config.quiet = False
filesystem.makedirs(self.config.renewal_configs_dir)
self.domains = {
"example.org": None,
"other.com": os.path.join(self.config.config_dir, "specialarchive")
}
self.config_files = {domain: self._set_up_config(domain, self.domains[domain])
for domain in self.domains}
# We also create a file that isn't a renewal config in the same
# location to test that logic that reads in all-and-only renewal
# configs will ignore it and NOT attempt to parse it.
with open(os.path.join(self.config.renewal_configs_dir, "IGNORE.THIS"), "w") as junk:
junk.write("This file should be ignored!")
def _set_up_config(self, domain, custom_archive):
# TODO: maybe provide NamespaceConfig.make_dirs?
# TODO: main() should create those dirs, c.f. #902
filesystem.makedirs(os.path.join(self.config.live_dir, domain))
config_file = configobj.ConfigObj()
if custom_archive is not None:
filesystem.makedirs(custom_archive)
config_file["archive_dir"] = custom_archive
else:
filesystem.makedirs(os.path.join(self.config.default_archive_dir, domain))
for kind in ALL_FOUR:
config_file[kind] = os.path.join(self.config.live_dir, domain,
kind + ".pem")
config_file.filename = os.path.join(self.config.renewal_configs_dir,
domain + ".conf")
config_file.write()
return config_file
class UpdateLiveSymlinksTest(BaseCertManagerTest):
"""Tests for certbot._internal.cert_manager.update_live_symlinks
"""
def test_update_live_symlinks(self):
"""Test update_live_symlinks"""
# create files with incorrect symlinks
from certbot._internal import cert_manager
archive_paths = {}
for domain in self.domains:
custom_archive = self.domains[domain]
if custom_archive is not None:
archive_dir_path = custom_archive
else:
archive_dir_path = os.path.join(self.config.default_archive_dir, domain)
archive_paths[domain] = {kind:
os.path.join(archive_dir_path, kind + "1.pem") for kind in ALL_FOUR}
for kind in ALL_FOUR:
live_path = self.config_files[domain][kind]
archive_path = archive_paths[domain][kind]
open(archive_path, 'a').close()
# path is incorrect but base must be correct
os.symlink(os.path.join(self.config.config_dir, kind + "1.pem"), live_path)
# run update symlinks
cert_manager.update_live_symlinks(self.config)
# check that symlinks go where they should
prev_dir = os.getcwd()
try:
for domain in self.domains:
for kind in ALL_FOUR:
os.chdir(os.path.dirname(self.config_files[domain][kind]))
self.assertEqual(
filesystem.realpath(filesystem.readlink(self.config_files[domain][kind])),
filesystem.realpath(archive_paths[domain][kind]))
finally:
os.chdir(prev_dir)
class DeleteTest(storage_test.BaseRenewableCertTest):
"""Tests for certbot._internal.cert_manager.delete
"""
def _call(self):
from certbot._internal import cert_manager
cert_manager.delete(self.config)
@test_util.patch_display_util()
@mock.patch('certbot.display.util.notify')
@mock.patch('certbot._internal.cert_manager.lineage_for_certname')
@mock.patch('certbot._internal.storage.delete_files')
def test_delete_from_config_yes(self, mock_delete_files, mock_lineage_for_certname,
mock_notify, mock_util):
"""Test delete"""
mock_lineage_for_certname.return_value = self.test_rc
mock_util().yesno.return_value = True
self.config.certname = "example.org"
self._call()
mock_delete_files.assert_called_once_with(self.config, "example.org")
mock_notify.assert_called_once_with(
"Deleted all files relating to certificate example.org."
)
@test_util.patch_display_util()
@mock.patch('certbot._internal.cert_manager.lineage_for_certname')
@mock.patch('certbot._internal.storage.delete_files')
def test_delete_from_config_no(self, mock_delete_files, mock_lineage_for_certname,
mock_util):
"""Test delete"""
mock_lineage_for_certname.return_value = self.test_rc
mock_util().yesno.return_value = False
self.config.certname = "example.org"
self._call()
self.assertEqual(mock_delete_files.call_count, 0)
@test_util.patch_display_util()
@mock.patch('certbot._internal.cert_manager.lineage_for_certname')
@mock.patch('certbot._internal.storage.delete_files')
def test_delete_interactive_single_yes(self, mock_delete_files, mock_lineage_for_certname,
mock_util):
"""Test delete"""
mock_lineage_for_certname.return_value = self.test_rc
mock_util().checklist.return_value = (display_util.OK, ["example.org"])
mock_util().yesno.return_value = True
self._call()
mock_delete_files.assert_called_once_with(self.config, "example.org")
@test_util.patch_display_util()
@mock.patch('certbot._internal.cert_manager.lineage_for_certname')
@mock.patch('certbot._internal.storage.delete_files')
def test_delete_interactive_single_no(self, mock_delete_files, mock_lineage_for_certname,
mock_util):
"""Test delete"""
mock_lineage_for_certname.return_value = self.test_rc
mock_util().checklist.return_value = (display_util.OK, ["example.org"])
mock_util().yesno.return_value = False
self._call()
self.assertEqual(mock_delete_files.call_count, 0)
@test_util.patch_display_util()
@mock.patch('certbot._internal.cert_manager.lineage_for_certname')
@mock.patch('certbot._internal.storage.delete_files')
def test_delete_interactive_multiple_yes(self, mock_delete_files, mock_lineage_for_certname,
mock_util):
"""Test delete"""
mock_lineage_for_certname.return_value = self.test_rc
mock_util().checklist.return_value = (display_util.OK, ["example.org", "other.org"])
mock_util().yesno.return_value = True
self._call()
mock_delete_files.assert_any_call(self.config, "example.org")
mock_delete_files.assert_any_call(self.config, "other.org")
self.assertEqual(mock_delete_files.call_count, 2)
@test_util.patch_display_util()
@mock.patch('certbot._internal.cert_manager.lineage_for_certname')
@mock.patch('certbot._internal.storage.delete_files')
def test_delete_interactive_multiple_no(self, mock_delete_files, mock_lineage_for_certname,
mock_util):
"""Test delete"""
mock_lineage_for_certname.return_value = self.test_rc
mock_util().checklist.return_value = (display_util.OK, ["example.org", "other.org"])
mock_util().yesno.return_value = False
self._call()
self.assertEqual(mock_delete_files.call_count, 0)
class CertificatesTest(BaseCertManagerTest):
"""Tests for certbot._internal.cert_manager.certificates
"""
def _certificates(self, *args, **kwargs):
from certbot._internal.cert_manager import certificates
return certificates(*args, **kwargs)
@mock.patch('certbot._internal.cert_manager.logger')
@test_util.patch_display_util()
def test_certificates_parse_fail(self, mock_utility, mock_logger):
self._certificates(self.config)
self.assertTrue(mock_logger.warning.called) #pylint: disable=no-member
self.assertTrue(mock_utility.called)
@mock.patch('certbot._internal.cert_manager.logger')
@test_util.patch_display_util()
def test_certificates_quiet(self, mock_utility, mock_logger):
self.config.quiet = True
self._certificates(self.config)
self.assertIs(mock_utility.notification.called, False)
self.assertTrue(mock_logger.warning.called) #pylint: disable=no-member
@mock.patch('certbot.crypto_util.verify_renewable_cert')
@mock.patch('certbot._internal.cert_manager.logger')
@test_util.patch_display_util()
@mock.patch("certbot._internal.storage.RenewableCert")
@mock.patch('certbot._internal.cert_manager._report_human_readable')
def test_certificates_parse_success(self, mock_report, mock_renewable_cert,
mock_utility, mock_logger, mock_verifier):
mock_verifier.return_value = None
mock_report.return_value = ""
self._certificates(self.config)
self.assertIs(mock_logger.warning.called, False)
self.assertTrue(mock_report.called)
self.assertTrue(mock_utility.called)
self.assertTrue(mock_renewable_cert.called)
@mock.patch('certbot._internal.cert_manager.logger')
@test_util.patch_display_util()
def test_certificates_no_files(self, mock_utility, mock_logger):
empty_tempdir = tempfile.mkdtemp()
empty_config = configuration.NamespaceConfig(mock.MagicMock(
config_dir=os.path.join(empty_tempdir, "config"),
work_dir=os.path.join(empty_tempdir, "work"),
logs_dir=os.path.join(empty_tempdir, "logs"),
quiet=False
))
filesystem.makedirs(empty_config.renewal_configs_dir)
self._certificates(empty_config)
self.assertIs(mock_logger.warning.called, False)
self.assertTrue(mock_utility.called)
shutil.rmtree(empty_tempdir)
@mock.patch('certbot.crypto_util.get_serial_from_cert')
@mock.patch('certbot._internal.cert_manager.ocsp.RevocationChecker.ocsp_revoked')
def test_report_human_readable(self, mock_revoked, mock_serial):
mock_revoked.return_value = None
mock_serial.return_value = 1234567890
from certbot._internal import cert_manager
import datetime
import pytz
expiry = pytz.UTC.fromutc(datetime.datetime.utcnow())
cert = mock.MagicMock(lineagename="nameone")
cert.target_expiry = expiry
cert.names.return_value = ["nameone", "nametwo"]
cert.is_test_cert = False
parsed_certs = [cert]
mock_config = mock.MagicMock(certname=None, lineagename=None)
# pylint: disable=protected-access
# pylint: disable=protected-access
get_report = lambda: cert_manager._report_human_readable(mock_config, parsed_certs)
out = get_report()
self.assertIn("INVALID: EXPIRED", out)
cert.target_expiry += datetime.timedelta(hours=2)
# pylint: disable=protected-access
out = get_report()
self.assertIs('1 hour' in out or '2 hour(s)' in out, True)
self.assertIn('VALID', out)
self.assertNotIn('INVALID', out)
cert.target_expiry += datetime.timedelta(days=1)
# pylint: disable=protected-access
out = get_report()
self.assertIn('1 day', out)
self.assertNotIn('under', out)
self.assertIn('VALID', out)
self.assertNotIn('INVALID', out)
cert.target_expiry += datetime.timedelta(days=2)
# pylint: disable=protected-access
out = get_report()
self.assertIn('3 days', out)
self.assertIn('VALID', out)
self.assertNotIn('INVALID', out)
cert.is_test_cert = True
mock_revoked.return_value = True
out = get_report()
self.assertIn('INVALID: TEST_CERT, REVOKED', out)
cert = mock.MagicMock(lineagename="indescribable")
cert.target_expiry = expiry
cert.names.return_value = ["nameone", "thrice.named"]
cert.is_test_cert = True
parsed_certs.append(cert)
out = get_report()
self.assertEqual(len(re.findall("INVALID:", out)), 2)
mock_config.domains = ["thrice.named"]
out = get_report()
self.assertEqual(len(re.findall("INVALID:", out)), 1)
mock_config.domains = ["nameone"]
out = get_report()
self.assertEqual(len(re.findall("INVALID:", out)), 2)
mock_config.certname = "indescribable"
out = get_report()
self.assertEqual(len(re.findall("INVALID:", out)), 1)
mock_config.certname = "horror"
out = get_report()
self.assertEqual(len(re.findall("INVALID:", out)), 0)
class SearchLineagesTest(BaseCertManagerTest):
"""Tests for certbot._internal.cert_manager._search_lineages."""
@mock.patch('certbot.util.make_or_verify_dir')
@mock.patch('certbot._internal.storage.renewal_conf_files')
@mock.patch('certbot._internal.storage.RenewableCert')
def test_cert_storage_error(self, mock_renewable_cert, mock_renewal_conf_files,
mock_make_or_verify_dir):
mock_renewal_conf_files.return_value = ["badfile"]
mock_renewable_cert.side_effect = errors.CertStorageError
from certbot._internal import cert_manager
# pylint: disable=protected-access
self.assertEqual(cert_manager._search_lineages(self.config, lambda x: x, "check"), "check")
self.assertTrue(mock_make_or_verify_dir.called)
class LineageForCertnameTest(BaseCertManagerTest):
"""Tests for certbot._internal.cert_manager.lineage_for_certname"""
@mock.patch('certbot.util.make_or_verify_dir')
@mock.patch('certbot._internal.storage.renewal_file_for_certname')
@mock.patch('certbot._internal.storage.RenewableCert')
def test_found_match(self, mock_renewable_cert, mock_renewal_conf_file,
mock_make_or_verify_dir):
mock_renewal_conf_file.return_value = "somefile.conf"
mock_match = mock.Mock(lineagename="example.com")
mock_renewable_cert.return_value = mock_match
from certbot._internal import cert_manager
self.assertEqual(cert_manager.lineage_for_certname(self.config, "example.com"), mock_match)
self.assertTrue(mock_make_or_verify_dir.called)
@mock.patch('certbot.util.make_or_verify_dir')
@mock.patch('certbot._internal.storage.renewal_file_for_certname')
def test_no_match(self, mock_renewal_conf_file, mock_make_or_verify_dir):
mock_renewal_conf_file.return_value = "other.com.conf"
from certbot._internal import cert_manager
self.assertIsNone(cert_manager.lineage_for_certname(self.config, "example.com"))
self.assertTrue(mock_make_or_verify_dir.called)
@mock.patch('certbot.util.make_or_verify_dir')
@mock.patch('certbot._internal.storage.renewal_file_for_certname')
def test_no_renewal_file(self, mock_renewal_conf_file, mock_make_or_verify_dir):
mock_renewal_conf_file.side_effect = errors.CertStorageError()
from certbot._internal import cert_manager
self.assertIsNone(cert_manager.lineage_for_certname(self.config, "example.com"))
self.assertTrue(mock_make_or_verify_dir.called)
class DomainsForCertnameTest(BaseCertManagerTest):
"""Tests for certbot._internal.cert_manager.domains_for_certname"""
@mock.patch('certbot.util.make_or_verify_dir')
@mock.patch('certbot._internal.storage.renewal_file_for_certname')
@mock.patch('certbot._internal.storage.RenewableCert')
def test_found_match(self, mock_renewable_cert, mock_renewal_conf_file,
mock_make_or_verify_dir):
mock_renewal_conf_file.return_value = "somefile.conf"
mock_match = mock.Mock(lineagename="example.com")
domains = ["example.com", "example.org"]
mock_match.names.return_value = domains
mock_renewable_cert.return_value = mock_match
from certbot._internal import cert_manager
self.assertEqual(cert_manager.domains_for_certname(self.config, "example.com"),
domains)
self.assertTrue(mock_make_or_verify_dir.called)
@mock.patch('certbot.util.make_or_verify_dir')
@mock.patch('certbot._internal.storage.renewal_file_for_certname')
def test_no_match(self, mock_renewal_conf_file, mock_make_or_verify_dir):
mock_renewal_conf_file.return_value = "somefile.conf"
from certbot._internal import cert_manager
self.assertIsNone(cert_manager.domains_for_certname(self.config, "other.com"))
self.assertTrue(mock_make_or_verify_dir.called)
class RenameLineageTest(BaseCertManagerTest):
"""Tests for certbot._internal.cert_manager.rename_lineage"""
def setUp(self):
super().setUp()
self.config.certname = "example.org"
self.config.new_certname = "after"
def _call(self, *args, **kwargs):
from certbot._internal import cert_manager
return cert_manager.rename_lineage(*args, **kwargs)
@mock.patch('certbot._internal.storage.renewal_conf_files')
@test_util.patch_display_util()
def test_no_certname(self, mock_get_utility, mock_renewal_conf_files):
self.config.certname = None
self.config.new_certname = "two"
# if not choices
mock_renewal_conf_files.return_value = []
self.assertRaises(errors.Error, self._call, self.config)
mock_renewal_conf_files.return_value = ["one.conf"]
util_mock = mock_get_utility()
util_mock.menu.return_value = (display_util.CANCEL, 0)
self.assertRaises(errors.Error, self._call, self.config)
util_mock.menu.return_value = (display_util.OK, -1)
self.assertRaises(errors.Error, self._call, self.config)
@test_util.patch_display_util()
def test_no_new_certname(self, mock_get_utility):
self.config.certname = "one"
self.config.new_certname = None
util_mock = mock_get_utility()
util_mock.input.return_value = (display_util.CANCEL, "name")
self.assertRaises(errors.Error, self._call, self.config)
util_mock.input.return_value = (display_util.OK, None)
self.assertRaises(errors.Error, self._call, self.config)
@test_util.patch_display_util()
@mock.patch('certbot._internal.cert_manager.lineage_for_certname')
def test_no_existing_certname(self, mock_lineage_for_certname, unused_get_utility):
self.config.certname = "one"
self.config.new_certname = "two"
mock_lineage_for_certname.return_value = None
self.assertRaises(errors.ConfigurationError,
self._call, self.config)
@test_util.patch_display_util()
@mock.patch("certbot._internal.storage.RenewableCert._check_symlinks")
def test_rename_cert(self, mock_check, unused_get_utility):
mock_check.return_value = True
self._call(self.config)
from certbot._internal import cert_manager
updated_lineage = cert_manager.lineage_for_certname(self.config, self.config.new_certname)
self.assertIsNotNone(updated_lineage)
self.assertEqual(updated_lineage.lineagename, self.config.new_certname)
@test_util.patch_display_util()
@mock.patch("certbot._internal.storage.RenewableCert._check_symlinks")
def test_rename_cert_interactive_certname(self, mock_check, mock_get_utility):
mock_check.return_value = True
self.config.certname = None
util_mock = mock_get_utility()
util_mock.menu.return_value = (display_util.OK, 0)
self._call(self.config)
from certbot._internal import cert_manager
updated_lineage = cert_manager.lineage_for_certname(self.config, self.config.new_certname)
self.assertIsNotNone(updated_lineage)
self.assertEqual(updated_lineage.lineagename, self.config.new_certname)
@test_util.patch_display_util()
@mock.patch("certbot._internal.storage.RenewableCert._check_symlinks")
def test_rename_cert_bad_new_certname(self, mock_check, unused_get_utility):
mock_check.return_value = True
# for example, don't rename to existing certname
self.config.new_certname = "example.org"
self.assertRaises(errors.ConfigurationError, self._call, self.config)
self.config.new_certname = "one{0}two".format(os.path.sep)
self.assertRaises(errors.ConfigurationError, self._call, self.config)
class DuplicativeCertsTest(storage_test.BaseRenewableCertTest):
"""Test to avoid duplicate lineages."""
def setUp(self):
super().setUp()
self.config_file.write()
self._write_out_ex_kinds()
@mock.patch('certbot.util.make_or_verify_dir')
def test_find_duplicative_names(self, unused_makedir):
from certbot._internal.cert_manager import find_duplicative_certs
test_cert = test_util.load_vector('cert-san_512.pem')
with open(self.test_rc.cert, 'wb') as f:
f.write(test_cert)
# No overlap at all
result = find_duplicative_certs(
self.config, ['wow.net', 'hooray.org'])
self.assertEqual(result, (None, None))
# Totally identical
result = find_duplicative_certs(
self.config, ['example.com', 'www.example.com'])
self.assertTrue(result[0].configfile.filename.endswith('example.org.conf'))
self.assertIsNone(result[1])
# Superset
result = find_duplicative_certs(
self.config, ['example.com', 'www.example.com', 'something.new'])
self.assertIsNone(result[0])
self.assertTrue(result[1].configfile.filename.endswith('example.org.conf'))
# Partial overlap doesn't count
result = find_duplicative_certs(
self.config, ['example.com', 'something.new'])
self.assertEqual(result, (None, None))
class CertPathToLineageTest(storage_test.BaseRenewableCertTest):
"""Tests for certbot._internal.cert_manager.cert_path_to_lineage"""
def setUp(self):
super().setUp()
self.config_file.write()
self._write_out_ex_kinds()
self.fullchain = os.path.join(self.config.config_dir, 'live', 'example.org',
'fullchain.pem')
self.config.cert_path = self.fullchain
def _call(self, cli_config):
from certbot._internal.cert_manager import cert_path_to_lineage
return cert_path_to_lineage(cli_config)
def _archive_files(self, cli_config, filetype):
from certbot._internal.cert_manager import _archive_files
return _archive_files(cli_config, filetype)
def test_basic_match(self):
self.assertEqual('example.org', self._call(self.config))
def test_no_match_exists(self):
bad_test_config = self.config
bad_test_config.cert_path = os.path.join(self.config.config_dir, 'live',
'SailorMoon', 'fullchain.pem')
self.assertRaises(errors.Error, self._call, bad_test_config)
@mock.patch('certbot._internal.cert_manager._acceptable_matches')
def test_options_fullchain(self, mock_acceptable_matches):
mock_acceptable_matches.return_value = [lambda x: x.fullchain_path]
self.config.fullchain_path = self.fullchain
self.assertEqual('example.org', self._call(self.config))
@mock.patch('certbot._internal.cert_manager._acceptable_matches')
def test_options_cert_path(self, mock_acceptable_matches):
mock_acceptable_matches.return_value = [lambda x: x.cert_path]
test_cert_path = os.path.join(self.config.config_dir, 'live', 'example.org',
'cert.pem')
self.config.cert_path = test_cert_path
self.assertEqual('example.org', self._call(self.config))
@mock.patch('certbot._internal.cert_manager._acceptable_matches')
def test_options_archive_cert(self, mock_acceptable_matches):
# Also this and the next test check that the regex of _archive_files is working.
self.config.cert_path = os.path.join(self.config.config_dir, 'archive', 'example.org',
'cert11.pem')
mock_acceptable_matches.return_value = [lambda x: self._archive_files(x, 'cert')]
self.assertEqual('example.org', self._call(self.config))
@mock.patch('certbot._internal.cert_manager._acceptable_matches')
def test_options_archive_fullchain(self, mock_acceptable_matches):
self.config.cert_path = os.path.join(self.config.config_dir, 'archive',
'example.org', 'fullchain11.pem')
mock_acceptable_matches.return_value = [lambda x:
self._archive_files(x, 'fullchain')]
self.assertEqual('example.org', self._call(self.config))
def test_only_path(self):
self.config.cert_path = self.fullchain
self.assertEqual('example.org', self._call(self.config))
class MatchAndCheckOverlaps(storage_test.BaseRenewableCertTest):
"""Tests for certbot._internal.cert_manager.match_and_check_overlaps w/o overlapping
archive dirs."""
# A test with real overlapping archive dirs can be found in tests/boulder_integration.sh
def setUp(self):
super().setUp()
self.config_file.write()
self._write_out_ex_kinds()
self.fullchain = os.path.join(self.config.config_dir, 'live', 'example.org',
'fullchain.pem')
self.config.cert_path = self.fullchain
def _call(self, cli_config, acceptable_matches, match_func, rv_func):
from certbot._internal.cert_manager import match_and_check_overlaps
return match_and_check_overlaps(cli_config, acceptable_matches, match_func, rv_func)
def test_basic_match(self):
from certbot._internal.cert_manager import _acceptable_matches
self.assertEqual(['example.org'], self._call(self.config, _acceptable_matches(),
lambda x: self.config.cert_path, lambda x: x.lineagename))
@mock.patch('certbot._internal.cert_manager._search_lineages')
def test_no_matches(self, mock_search_lineages):
mock_search_lineages.return_value = []
self.assertRaises(errors.Error, self._call, self.config, None, None, None)
@mock.patch('certbot._internal.cert_manager._search_lineages')
def test_too_many_matches(self, mock_search_lineages):
mock_search_lineages.return_value = ['spider', 'dance']
self.assertRaises(errors.OverlappingMatchFound, self._call, self.config, None, None, None)
class GetCertnameTest(unittest.TestCase):
"""Tests for certbot._internal.cert_manager."""
def setUp(self):
get_utility_patch = test_util.patch_display_util()
self.mock_get_utility = get_utility_patch.start()
self.addCleanup(get_utility_patch.stop)
self.config = mock.MagicMock()
self.config.certname = None
@mock.patch('certbot._internal.storage.renewal_conf_files')
@mock.patch('certbot._internal.storage.lineagename_for_filename')
def test_get_certnames(self, mock_name, mock_files):
mock_files.return_value = ['example.com.conf']
mock_name.return_value = 'example.com'
from certbot._internal import cert_manager
prompt = "Which certificate would you"
self.mock_get_utility().menu.return_value = (display_util.OK, 0)
self.assertEqual(
cert_manager.get_certnames(
self.config, "verb", allow_multiple=False), ['example.com'])
self.assertIn(prompt, self.mock_get_utility().menu.call_args[0][0])
@mock.patch('certbot._internal.storage.renewal_conf_files')
@mock.patch('certbot._internal.storage.lineagename_for_filename')
def test_get_certnames_custom_prompt(self, mock_name, mock_files):
mock_files.return_value = ['example.com.conf']
mock_name.return_value = 'example.com'
from certbot._internal import cert_manager
prompt = "custom prompt"
self.mock_get_utility().menu.return_value = (display_util.OK, 0)
self.assertEqual(
cert_manager.get_certnames(
self.config, "verb", allow_multiple=False, custom_prompt=prompt),
['example.com'])
self.assertEqual(self.mock_get_utility().menu.call_args[0][0],
prompt)
@mock.patch('certbot._internal.storage.renewal_conf_files')
@mock.patch('certbot._internal.storage.lineagename_for_filename')
def test_get_certnames_user_abort(self, mock_name, mock_files):
mock_files.return_value = ['example.com.conf']
mock_name.return_value = 'example.com'
from certbot._internal import cert_manager
self.mock_get_utility().menu.return_value = (display_util.CANCEL, 0)
self.assertRaises(
errors.Error,
cert_manager.get_certnames,
self.config, "erroring_anyway", allow_multiple=False)
@mock.patch('certbot._internal.storage.renewal_conf_files')
@mock.patch('certbot._internal.storage.lineagename_for_filename')
def test_get_certnames_allow_multiple(self, mock_name, mock_files):
mock_files.return_value = ['example.com.conf']
mock_name.return_value = 'example.com'
from certbot._internal import cert_manager
prompt = "Which certificate(s) would you"
self.mock_get_utility().checklist.return_value = (display_util.OK,
['example.com'])
self.assertEqual(
cert_manager.get_certnames(
self.config, "verb", allow_multiple=True), ['example.com'])
self.assertIn(prompt, self.mock_get_utility().checklist.call_args[0][0])
@mock.patch('certbot._internal.storage.renewal_conf_files')
@mock.patch('certbot._internal.storage.lineagename_for_filename')
def test_get_certnames_allow_multiple_custom_prompt(self, mock_name, mock_files):
mock_files.return_value = ['example.com.conf']
mock_name.return_value = 'example.com'
from certbot._internal import cert_manager
prompt = "custom prompt"
self.mock_get_utility().checklist.return_value = (display_util.OK,
['example.com'])
self.assertEqual(
cert_manager.get_certnames(
self.config, "verb", allow_multiple=True, custom_prompt=prompt),
['example.com'])
self.assertEqual(
self.mock_get_utility().checklist.call_args[0][0],
prompt)
@mock.patch('certbot._internal.storage.renewal_conf_files')
@mock.patch('certbot._internal.storage.lineagename_for_filename')
def test_get_certnames_allow_multiple_user_abort(self, mock_name, mock_files):
mock_files.return_value = ['example.com.conf']
mock_name.return_value = 'example.com'
from certbot._internal import cert_manager
self.mock_get_utility().checklist.return_value = (display_util.CANCEL, [])
self.assertRaises(
errors.Error,
cert_manager.get_certnames,
self.config, "erroring_anyway", allow_multiple=True)
if __name__ == "__main__":
unittest.main() # pragma: no cover
| {
"content_hash": "9ca05d775ed0b87ecc705cc3d3c2d880",
"timestamp": "",
"source": "github",
"line_count": 711,
"max_line_length": 99,
"avg_line_length": 44.47116736990155,
"alnum_prop": 0.6563774945444195,
"repo_name": "letsencrypt/letsencrypt",
"id": "0ed09eccddb0138fbf3000618fd3833ab772e5f0",
"size": "31620",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "certbot/tests/cert_manager_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "50702"
},
{
"name": "Augeas",
"bytes": "5062"
},
{
"name": "Batchfile",
"bytes": "35037"
},
{
"name": "DIGITAL Command Language",
"bytes": "133"
},
{
"name": "Groff",
"bytes": "222"
},
{
"name": "Makefile",
"bytes": "37309"
},
{
"name": "Nginx",
"bytes": "4274"
},
{
"name": "Python",
"bytes": "1336185"
},
{
"name": "Shell",
"bytes": "147823"
}
],
"symlink_target": ""
} |
""" Test for assert_deallocated context manager and gc utilities
"""
from __future__ import division, print_function, absolute_import
import gc
from scipy._lib._gcutils import (set_gc_state, gc_state, assert_deallocated,
ReferenceError, IS_PYPY)
from numpy.testing import assert_equal
import pytest
def test_set_gc_state():
gc_status = gc.isenabled()
try:
for state in (True, False):
gc.enable()
set_gc_state(state)
assert_equal(gc.isenabled(), state)
gc.disable()
set_gc_state(state)
assert_equal(gc.isenabled(), state)
finally:
if gc_status:
gc.enable()
def test_gc_state():
# Test gc_state context manager
gc_status = gc.isenabled()
try:
for pre_state in (True, False):
set_gc_state(pre_state)
for with_state in (True, False):
# Check the gc state is with_state in with block
with gc_state(with_state):
assert_equal(gc.isenabled(), with_state)
# And returns to previous state outside block
assert_equal(gc.isenabled(), pre_state)
# Even if the gc state is set explicitly within the block
with gc_state(with_state):
assert_equal(gc.isenabled(), with_state)
set_gc_state(not with_state)
assert_equal(gc.isenabled(), pre_state)
finally:
if gc_status:
gc.enable()
@pytest.mark.skipif(IS_PYPY, reason="Test not meaningful on PyPy")
def test_assert_deallocated():
# Ordinary use
class C(object):
def __init__(self, arg0, arg1, name='myname'):
self.name = name
for gc_current in (True, False):
with gc_state(gc_current):
# We are deleting from with-block context, so that's OK
with assert_deallocated(C, 0, 2, 'another name') as c:
assert_equal(c.name, 'another name')
del c
# Or not using the thing in with-block context, also OK
with assert_deallocated(C, 0, 2, name='third name'):
pass
assert_equal(gc.isenabled(), gc_current)
@pytest.mark.skipif(IS_PYPY, reason="Test not meaningful on PyPy")
def test_assert_deallocated_nodel():
class C(object):
pass
with pytest.raises(ReferenceError):
# Need to delete after using if in with-block context
with assert_deallocated(C) as c:
pass
@pytest.mark.skipif(IS_PYPY, reason="Test not meaningful on PyPy")
def test_assert_deallocated_circular():
class C(object):
def __init__(self):
self._circular = self
with pytest.raises(ReferenceError):
# Circular reference, no automatic garbage collection
with assert_deallocated(C) as c:
del c
@pytest.mark.skipif(IS_PYPY, reason="Test not meaningful on PyPy")
def test_assert_deallocated_circular2():
class C(object):
def __init__(self):
self._circular = self
with pytest.raises(ReferenceError):
# Still circular reference, no automatic garbage collection
with assert_deallocated(C):
pass
| {
"content_hash": "ce3fcec96f8d82a4af5e23dbcb9e0c68",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 76,
"avg_line_length": 33.09090909090909,
"alnum_prop": 0.5891330891330891,
"repo_name": "lhilt/scipy",
"id": "d82b81f35d5cb11176c92208d1c8f5cbde14e726",
"size": "3276",
"binary": false,
"copies": "15",
"ref": "refs/heads/master",
"path": "scipy/_lib/tests/test__gcutils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "4396416"
},
{
"name": "C++",
"bytes": "643592"
},
{
"name": "Fortran",
"bytes": "5368331"
},
{
"name": "MATLAB",
"bytes": "4346"
},
{
"name": "Makefile",
"bytes": "778"
},
{
"name": "Python",
"bytes": "12378541"
},
{
"name": "Shell",
"bytes": "538"
},
{
"name": "TeX",
"bytes": "52106"
}
],
"symlink_target": ""
} |
from flask.ext.script import Manager
from david.app import app
manager = Manager(app)
if __name__ == '__main__':
manager.run()
| {
"content_hash": "a4018b23ef07d95bf1b409c6aa7530a0",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 36,
"avg_line_length": 16.75,
"alnum_prop": 0.664179104477612,
"repo_name": "ktmud/david",
"id": "d822455c8e122f352fa054d4f799c0c705be62d2",
"size": "158",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manage.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "83881"
},
{
"name": "JavaScript",
"bytes": "281633"
},
{
"name": "PHP",
"bytes": "2274"
},
{
"name": "Python",
"bytes": "82385"
}
],
"symlink_target": ""
} |
import sys
sys.path.insert(1, "../../../")
import h2o, tests
def offset_tweedie(ip,port):
# Connect to a pre-existing cluster
insurance = h2o.import_file(h2o.locate("smalldata/glm_test/insurance.csv"))
insurance["offset"] = insurance["Holders"].log()
gbm = h2o.gbm(x=insurance[0:3], y=insurance["Claims"], distribution="tweedie", ntrees=600, max_depth=1, min_rows=1,
learn_rate=.1, offset_column="offset", training_frame=insurance)
predictions = gbm.predict(insurance)
# Comparison result generated from harrysouthworth's gbm:
# fit2 = gbm(Claims ~ District + Group + Age+ offset(log(Holders)) , interaction.depth = 1,n.minobsinnode = 1,shrinkage = .1,bag.fraction = 1,train.fraction = 1,
# data = Insurance, distribution ="tweedie", n.trees = 600)
# pr = predict(fit2, Insurance)
# pr = exp(pr+log(Insurance$Holders))
assert abs(-1.869702 - gbm._model_json['output']['init_f']) < 1e-5, "expected init_f to be {0}, but got {1}".\
format(-1.869702, gbm._model_json['output']['init_f'])
assert abs(49.21591 - predictions.mean()) < 1e-3, "expected prediction mean to be {0}, but got {1}". \
format(49.21591, predictions.mean())
assert abs(1.0258 - predictions.min()) < 1e-4, "expected prediction min to be {0}, but got {1}". \
format(1.0258, predictions.min())
assert abs(392.4651 - predictions.max()) < 1e-2, "expected prediction max to be {0}, but got {1}". \
format(392.4651, predictions.max())
if __name__ == "__main__":
tests.run_test(sys.argv, offset_tweedie)
| {
"content_hash": "fe135f51f5b899599548f487fc7de193",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 165,
"avg_line_length": 48.333333333333336,
"alnum_prop": 0.6357366771159875,
"repo_name": "bospetersen/h2o-3",
"id": "c3a15e85b18d6e5779499057ec84b1f5fd670aab",
"size": "1595",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "h2o-py/tests/testdir_algos/gbm/pyunit_offset_tweedieGBM.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "5090"
},
{
"name": "CSS",
"bytes": "163561"
},
{
"name": "CoffeeScript",
"bytes": "262107"
},
{
"name": "Emacs Lisp",
"bytes": "8914"
},
{
"name": "Groovy",
"bytes": "78"
},
{
"name": "HTML",
"bytes": "146874"
},
{
"name": "Java",
"bytes": "5441396"
},
{
"name": "JavaScript",
"bytes": "88331"
},
{
"name": "Makefile",
"bytes": "31513"
},
{
"name": "Python",
"bytes": "2021301"
},
{
"name": "R",
"bytes": "1829960"
},
{
"name": "Rebol",
"bytes": "3997"
},
{
"name": "Ruby",
"bytes": "299"
},
{
"name": "Scala",
"bytes": "16336"
},
{
"name": "Shell",
"bytes": "44718"
},
{
"name": "TeX",
"bytes": "470617"
}
],
"symlink_target": ""
} |
"""This module defines an abstract class for Hotword Detection. Any Hotword Detection Engine
implemented in the app must inherit from this class.
HotwordDetector subclasses from threading.Thread since Hotword Detection will run in a separate thread for non-blocking
operation.
"""
from abc import ABC, abstractclassmethod
from threading import Thread
from rx.subjects import Subject
class HotwordDetector(ABC, Thread):
""" This is an abstract class for a Hotword Detector. Any hotword detector implemented
in the app must inherit this. It subclasses from threading.Thread allowing Hotword Detection to
run on a separate thread.
:attributes
callback_queue : A queue to send callbacks to main thread.
detection_callback: A callback function to be called on the calling thread when hotword is detected.
is_active: A boolean to indicate if hotword detection is currently active. If inactive, the engine ignores
all the hotword detected in that time.
"""
def __init__(self) -> None:
Thread.__init__(self)
self.subject = Subject()
@abstractclassmethod
def run(self):
"""This method is executed on the start of the thread. You may initialize parameters for Hotword Detection
here and start the recognition in a busy/wait loop since operation is being run on background thread.
On detecting a hotword, it should call on_detected.
"""
pass
def on_detected(self):
"""This callback is fired when a Hotword Detector detects a hotword.
:return: None
"""
self.subject.on_next("Hotword")
| {
"content_hash": "19e8f212e279b779a86a8cfc2bf2b55a",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 119,
"avg_line_length": 41.92307692307692,
"alnum_prop": 0.7143730886850153,
"repo_name": "betterclever/susi_hardware",
"id": "34c0c62d11d1c1867ac11c4ce99ce4254cc1ce34",
"size": "1635",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "main/hotword_engine/hotword_detector.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "15119"
},
{
"name": "Shell",
"bytes": "4286"
}
],
"symlink_target": ""
} |
from sympy.functions import adjoint, conjugate, transpose
from sympy.matrices.expressions import MatrixSymbol, Adjoint, Trace, Transpose
from sympy.matrices import eye, Matrix
from sympy import symbols, S
n, m, l, k, p = symbols('n m l k p', integer=True)
A = MatrixSymbol('A', n, m)
B = MatrixSymbol('B', m, l)
C = MatrixSymbol('C', n, n)
def test_transpose():
Sq = MatrixSymbol('Sq', n, n)
assert Transpose(A).shape == (m, n)
assert Transpose(A*B).shape == (l, n)
assert transpose(Transpose(A)) == A
assert isinstance(Transpose(Transpose(A)), Transpose)
assert adjoint(Transpose(A)) == Adjoint(Transpose(A))
assert conjugate(Transpose(A)) == Adjoint(A)
assert Transpose(eye(3)).doit() == eye(3)
assert Transpose(S(5)).doit() == S(5)
assert Transpose(Matrix([[1, 2], [3, 4]])).doit() == Matrix([[1, 3], [2, 4]])
assert transpose(Trace(Sq)) == Trace(Sq)
assert Trace(Transpose(Sq)) == Trace(Sq)
assert Transpose(Sq)[0, 1] == Sq[1, 0]
assert Transpose(A*B).doit() == Transpose(B) * Transpose(A)
| {
"content_hash": "03a5a276d2f2eb59e063c3d14d4be8aa",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 81,
"avg_line_length": 31.205882352941178,
"alnum_prop": 0.6427898209236569,
"repo_name": "amitjamadagni/sympy",
"id": "af6d6bbcc0eeab680378dad685bfc1da561eb2c7",
"size": "1061",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sympy/matrices/expressions/tests/test_transpose.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "12199014"
},
{
"name": "Scheme",
"bytes": "125"
},
{
"name": "Shell",
"bytes": "287"
},
{
"name": "TeX",
"bytes": "8789"
},
{
"name": "XSLT",
"bytes": "366202"
}
],
"symlink_target": ""
} |
"""Support for Australian BOM (Bureau of Meteorology) weather service."""
import logging
import voluptuous as vol
from homeassistant.components.weather import PLATFORM_SCHEMA, WeatherEntity
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME, TEMP_CELSIUS
from homeassistant.helpers import config_validation as cv
# Reuse data and API logic from the sensor implementation
from .sensor import CONF_STATION, BOMCurrentData, closest_station, validate_station
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_STATION): validate_station}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the BOM weather platform."""
station = config.get(CONF_STATION) or closest_station(
config.get(CONF_LATITUDE), config.get(CONF_LONGITUDE), hass.config.config_dir
)
if station is None:
_LOGGER.error("Could not get BOM weather station from lat/lon")
return False
bom_data = BOMCurrentData(station)
try:
bom_data.update()
except ValueError as err:
_LOGGER.error("Received error from BOM_Current: %s", err)
return False
add_entities([BOMWeather(bom_data, config.get(CONF_NAME))], True)
class BOMWeather(WeatherEntity):
"""Representation of a weather condition."""
def __init__(self, bom_data, stationname=None):
"""Initialise the platform with a data instance and station name."""
self.bom_data = bom_data
self.stationname = stationname or self.bom_data.latest_data.get("name")
def update(self):
"""Update current conditions."""
self.bom_data.update()
@property
def name(self):
"""Return the name of the sensor."""
return f"BOM {self.stationname or '(unknown station)'}"
@property
def condition(self):
"""Return the current condition."""
return self.bom_data.get_reading("weather")
# Now implement the WeatherEntity interface
@property
def temperature(self):
"""Return the platform temperature."""
return self.bom_data.get_reading("air_temp")
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS
@property
def pressure(self):
"""Return the mean sea-level pressure."""
return self.bom_data.get_reading("press_msl")
@property
def humidity(self):
"""Return the relative humidity."""
return self.bom_data.get_reading("rel_hum")
@property
def wind_speed(self):
"""Return the wind speed."""
return self.bom_data.get_reading("wind_spd_kmh")
@property
def wind_bearing(self):
"""Return the wind bearing."""
directions = [
"N",
"NNE",
"NE",
"ENE",
"E",
"ESE",
"SE",
"SSE",
"S",
"SSW",
"SW",
"WSW",
"W",
"WNW",
"NW",
"NNW",
]
wind = {name: idx * 360 / 16 for idx, name in enumerate(directions)}
return wind.get(self.bom_data.get_reading("wind_dir"))
@property
def attribution(self):
"""Return the attribution."""
return "Data provided by the Australian Bureau of Meteorology"
| {
"content_hash": "dd827cf404afb9d9e490a4845df46990",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 86,
"avg_line_length": 30.256637168141594,
"alnum_prop": 0.6171395144779175,
"repo_name": "pschmitt/home-assistant",
"id": "94b9960c851bce6a16adaa18519e169c00a3b884",
"size": "3419",
"binary": false,
"copies": "6",
"ref": "refs/heads/dev",
"path": "homeassistant/components/bom/weather.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1522"
},
{
"name": "Python",
"bytes": "24807200"
},
{
"name": "Shell",
"bytes": "4342"
}
],
"symlink_target": ""
} |
"""
Convert RST discourse tree bank into gold standard sequence of actions.
This script converts the RST discourse treebank into a gold standard
sequence of shift reduce parsing actions.
This is based on Perl code (``trees2actionseq.pl``) from Kenji Sagae.
For each tree in the input, this will output a line representing the
shift-reduce action sequence for that tree. The actions in a sequence
will be space-separated.
:author: Michael Heilman
:author: Nitin Madnani
:organization: ETS
"""
import argparse
from nltk.tree import ParentedTree
from .discourse_parsing import ShiftReduceAction
def extract_parse_actions(tree):
"""
Extract a list of ``ShiftReduceAction`` objects for the given tree.
Parameters
----------
tree : nltk.tree.ParentedTree
The RST tree from which to extract the actions.
Returns
-------
actseq : list
List of ``ShiftReduceAction`` objects extracted from the tree.
"""
if tree.label() == '':
tree.set_label("ROOT")
assert tree.label() == "ROOT"
stack = []
cstack = [ParentedTree.fromstring("(DUMMY0 (DUMMY1 DUMMY3))")]
actseq = []
_extract_parse_actions_helper(tree, stack, cstack, actseq)
actseq = _merge_constituent_end_shifts(actseq)
return actseq
def _merge_constituent_end_shifts(actseq):
"""
Remove unnecessary unary reduce action.
The ``_extract_parse_actions_helper()`` function below always puts a '*'
on binary reduce actions, and then puts a unary reduce after a sequence of
binary reduce actions for the same constituent. This method will remove
the unary reduce and make the last binary reduce not have a '*', indicating
that the constituent is complete.
Parameters
----------
actseq : list
List of ``ShiftReduceAction`` objects.
Returns
-------
res : list
Updated list of ``ShiftReduceAction`` objects.
"""
res = []
for act in actseq:
if act.type == 'U' and res and res[-1].type == 'B':
assert f"{act.label}*" == res[-1].label
tmp_act = res.pop()
res.append(ShiftReduceAction(type=tmp_act.type, label=act.label))
else:
res.append(act)
return res
def _is_head_of(node1, node2):
"""
Check if ``node1`` is the head of ``node2``.
Parameters
----------
node1 : nltk.tree.ParentedTree
The first node.
node2 : nltk.tree.ParentedTree
The second node.
Returns
-------
is_head : bool
``True`` if ``node1`` is the head of ``node2``, ``False`` otherwise.
"""
node1parent = node1.parent()
if node2.parent() != node1parent:
return False
if node1.label().startswith("nucleus:"):
# TODO: simplify using or
if node2.label().startswith("satellite:"):
return True
elif node1parent.index(node1) < node1parent.index(node2):
return True
return False
def _extract_parse_actions_helper(node, stack, cstack, actseq):
"""
Helper function for ``extract_parse_actions()``.
Parameters
----------
node : nltk.tree.ParentedTree
The input node.
stack : list
The complete stack.
cstack : list
The current stack pointer.
actseq : list
List of ``ShiftReduceAction`` objects where the extracted actions
for this node will be stored.
"""
stack.append(node)
for child in node:
if isinstance(child, str):
continue
_extract_parse_actions_helper(child, stack, cstack, actseq)
nt = stack.pop()
# If the current node is a preterminal, add a shift action.
tmp_parent = cstack[-1].parent()
if isinstance(nt[0], str):
actseq.append(ShiftReduceAction(type='S', label="text"))
cstack.append(nt)
# Otherwise, we have visited all the children of a nonterminal node,
# and we should add a unary reduce
else:
actseq.append(ShiftReduceAction(type='U', label=tmp_parent.label()))
cstack.pop()
cstack.append(nt)
# Check to see if there should be any binary reduce actions.
chflg = True
while chflg and stack:
chflg = False
# If the two most recently visited nodes have the same parent,
# then add a ``binary_reduce`` action.
# Note that this approach will still work if there are multiple
# satellite children because the ones nearest to the nucleus will be
# reduced first, and eventually all the satellites will be binary
# reduced with the nucleus.
headR = _is_head_of(cstack[-1], cstack[-2])
headL = _is_head_of(cstack[-2], cstack[-1])
if headL or headR:
tmpRc = cstack.pop()
tmpLc = cstack.pop()
if headR:
# reduce left (right node becomes head)
cstack.append(tmpRc)
new_label = tmpRc.parent().label()
else:
# reduce right (left node becomes head)
cstack.append(tmpLc)
new_label = tmpLc.parent().label()
actseq.append(ShiftReduceAction(type='B',
label=f"{new_label}*"))
chflg = True
def main(): # noqa: D103
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("mrg_path",
help="A file with constituent trees in ``mrg`` format.")
args = parser.parse_args()
with open(args.mrg_path) as constituent_file:
for line in constituent_file:
tree = ParentedTree.fromstring(line.strip())
actseq = extract_parse_actions(tree)
print(" ".join([f"{act.type}:{act.label}" for act in actseq]))
if __name__ == "__main__":
main()
| {
"content_hash": "61927a9648695a8afe8b19b0fc36a417",
"timestamp": "",
"source": "github",
"line_count": 195,
"max_line_length": 92,
"avg_line_length": 29.984615384615385,
"alnum_prop": 0.6114246622199419,
"repo_name": "EducationalTestingService/discourse-parsing",
"id": "b9454af0b3d320da7b3a5137e5fc351954da22ac",
"size": "5870",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "rstfinder/extract_actions_from_trees.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "4830"
},
{
"name": "Python",
"bytes": "152502"
}
],
"symlink_target": ""
} |
import syslog
import random
import time
class OneLiners(object):
def __init__(self, lines=12, delay=5):
self._lines = lines
self._delay = delay
self._one_liners = [
"I wrote a few children's books... Not on purpose. - Steven Wright",
"I looked up my family tree and found out I was the sap. - Rodney Dangerfield",
"Any closet is a walk-in closet if you try hard enough. - Steven Wright",
"I think it's wrong that only one company makes the game Monopoly. - Steven Wright",
"I have a lot of growing up to do. I realized that the other day inside my fort. - Zach Galifianakis",
"I never forget a face, but in your case I'd be glad to make an exception. - Groucho Marx",
"Two cannibals were eating a clown - one said to the other, 'Does he taste funny to you?' - Tommy Cooper",
"I like to play chess with old men in the park, although it's hard to find 32 of them. - Emo Phillips",
"Room service? Send up a larger room. - Groucho Marx",
"Toughest job I ever had: selling doors, door to door. - Bill Bailey",
"How do you tell when you're out of invisible ink? - Steven Wright",
"The quickest way to a man's heart is through his chest. - Roseanne Barr",
"Men don't care what's on TV. They only care what else is on TV. - Jerry Seinfeld",
"She said she was approaching forty, and I couldn't help wondering from what direction - Bob Hope",
"Where there's a will - there's a relative! - Ricky Gervais",
"Who discovered we could get milk from cows, and what did he think he was doing at the time? - Billy Connolly",
"Did you hear about the shrimp that went to the prawn's cocktail party? He pulled a mussel. - Ken Dodd",
"I needed a password eight characters long so I picked Snow White and the Seven Dwarves. - Nick Helm",
"I'm so ugly. My father carries around the picture of the kid who came with his wallet. - Rodney Dangerfield",
"Whoever said nothing is impossible obviously hasn't tried nailing jelly to a tree. - John Candy",
"I have kleptomania. But when it gets bad, I take something for it. - Ken Dodd",
"Age is an issue of mind over matter. If you don't mind, it doesn't matter. - Mark Twain",
"Don't sweat the petty things and don't pet the sweaty things. - George Carlin",
"Well, here's another nice mess you've gotten me into. - Oliver Hardy"
]
def give_me_one(self):
syslog.openlog(logoption=syslog.LOG_PID, facility=syslog.LOG_USER)
for i in range(1, self._lines + 1):
line = self._one_liners[random.randrange(0, len(self._one_liners))]
print(line)
syslog.syslog(line)
time.sleep(self._delay)
if __name__ == '__main__':
one_liner = OneLiners()
one_liner.give_me_one()
| {
"content_hash": "f1dca69933224b6f22326f6d048db097",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 123,
"avg_line_length": 61.520833333333336,
"alnum_prop": 0.6356247883508297,
"repo_name": "jdgwartney/tsi-lab",
"id": "6c1fc14fc835c1617d1774a397bc06165fa39625",
"size": "3561",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "manifests/bin/one-liners.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "384"
},
{
"name": "PHP",
"bytes": "410"
},
{
"name": "Puppet",
"bytes": "6209"
},
{
"name": "Python",
"bytes": "58294"
},
{
"name": "Shell",
"bytes": "21773"
},
{
"name": "VimL",
"bytes": "30"
}
],
"symlink_target": ""
} |
import socket
import mock
import msgpack
from oslo_config import fixture as fixture_config
from oslo_context import context
import oslo_messaging
from oslo_utils import timeutils
from oslotest import mockpatch
from stevedore import extension
from ceilometer import collector
from ceilometer import dispatcher
from ceilometer import messaging
from ceilometer.publisher import utils
from ceilometer import sample
from ceilometer.tests import base as tests_base
class FakeException(Exception):
pass
class FakeConnection(object):
def create_worker(self, topic, proxy, pool_name):
pass
class TestCollector(tests_base.BaseTestCase):
def setUp(self):
super(TestCollector, self).setUp()
self.CONF = self.useFixture(fixture_config.Config()).conf
self.CONF.import_opt("connection", "oslo_db.options", group="database")
self.CONF.set_override("connection", "log://", group='database')
self.CONF.set_override('telemetry_secret', 'not-so-secret',
group='publisher')
self._setup_messaging()
self.counter = sample.Sample(
name='foobar',
type='bad',
unit='F',
volume=1,
user_id='jd',
project_id='ceilometer',
resource_id='cat',
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={},
).as_dict()
self.utf8_msg = utils.meter_message_from_counter(
sample.Sample(
name=u'test',
type=sample.TYPE_CUMULATIVE,
unit=u'',
volume=1,
user_id=u'test',
project_id=u'test',
resource_id=u'test_run_tasks',
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={u'name': [([u'TestPublish'])]},
source=u'testsource',
),
'not-so-secret')
self.srv = collector.CollectorService()
self.useFixture(mockpatch.PatchObject(
self.srv.tg, 'add_thread',
side_effect=self._dummy_thread_group_add_thread))
@staticmethod
def _dummy_thread_group_add_thread(method):
method()
def _setup_messaging(self, enabled=True):
if enabled:
self.setup_messaging(self.CONF)
else:
self.useFixture(mockpatch.Patch(
'ceilometer.messaging.get_transport',
return_value=None))
def _setup_fake_dispatcher(self):
plugin = mock.MagicMock()
fake_dispatcher = extension.ExtensionManager.make_test_instance([
extension.Extension('test', None, None, plugin,),
], propagate_map_exceptions=True)
self.useFixture(mockpatch.Patch(
'ceilometer.dispatcher.load_dispatcher_manager',
return_value=fake_dispatcher))
return plugin
def _make_fake_socket(self, sample):
def recvfrom(size):
# Make the loop stop
self.srv.stop()
return msgpack.dumps(sample), ('127.0.0.1', 12345)
sock = mock.Mock()
sock.recvfrom = recvfrom
return sock
def _verify_udp_socket(self, udp_socket):
conf = self.CONF.collector
udp_socket.setsockopt.assert_called_once_with(socket.SOL_SOCKET,
socket.SO_REUSEADDR, 1)
udp_socket.bind.assert_called_once_with((conf.udp_address,
conf.udp_port))
def test_record_metering_data(self):
mock_dispatcher = self._setup_fake_dispatcher()
self.srv.dispatcher_manager = dispatcher.load_dispatcher_manager()
self.srv.record_metering_data(None, self.counter)
mock_dispatcher.record_metering_data.assert_called_once_with(
data=self.counter)
def test_udp_receive_base(self):
self._setup_messaging(False)
mock_dispatcher = self._setup_fake_dispatcher()
self.counter['source'] = 'mysource'
self.counter['counter_name'] = self.counter['name']
self.counter['counter_volume'] = self.counter['volume']
self.counter['counter_type'] = self.counter['type']
self.counter['counter_unit'] = self.counter['unit']
udp_socket = self._make_fake_socket(self.counter)
with mock.patch('socket.socket') as mock_socket:
mock_socket.return_value = udp_socket
self.srv.start()
mock_socket.assert_called_with(socket.AF_INET, socket.SOCK_DGRAM)
self._verify_udp_socket(udp_socket)
mock_dispatcher.record_metering_data.assert_called_once_with(
self.counter)
def test_udp_socket_ipv6(self):
self._setup_messaging(False)
self.CONF.set_override('udp_address', '::1', group='collector')
self._setup_fake_dispatcher()
sock = self._make_fake_socket('data')
with mock.patch.object(socket, 'socket') as mock_socket:
mock_socket.return_value = sock
self.srv.start()
mock_socket.assert_called_with(socket.AF_INET6, socket.SOCK_DGRAM)
def test_udp_receive_storage_error(self):
self._setup_messaging(False)
mock_dispatcher = self._setup_fake_dispatcher()
mock_dispatcher.record_metering_data.side_effect = self._raise_error
self.counter['source'] = 'mysource'
self.counter['counter_name'] = self.counter['name']
self.counter['counter_volume'] = self.counter['volume']
self.counter['counter_type'] = self.counter['type']
self.counter['counter_unit'] = self.counter['unit']
udp_socket = self._make_fake_socket(self.counter)
with mock.patch('socket.socket', return_value=udp_socket):
self.srv.start()
self._verify_udp_socket(udp_socket)
mock_dispatcher.record_metering_data.assert_called_once_with(
self.counter)
@staticmethod
def _raise_error(*args, **kwargs):
raise Exception
def test_udp_receive_bad_decoding(self):
self._setup_messaging(False)
udp_socket = self._make_fake_socket(self.counter)
with mock.patch('socket.socket', return_value=udp_socket):
with mock.patch('msgpack.loads', self._raise_error):
self.srv.start()
self._verify_udp_socket(udp_socket)
@mock.patch.object(oslo_messaging.MessageHandlingServer, 'start')
@mock.patch.object(collector.CollectorService, 'start_udp')
def test_only_udp(self, udp_start, rpc_start):
"""Check that only UDP is started if messaging transport is unset."""
self._setup_messaging(False)
udp_socket = self._make_fake_socket(self.counter)
with mock.patch('socket.socket', return_value=udp_socket):
self.srv.start()
self.assertEqual(0, rpc_start.call_count)
self.assertEqual(1, udp_start.call_count)
@mock.patch.object(oslo_messaging.MessageHandlingServer, 'start')
@mock.patch.object(collector.CollectorService, 'start_udp')
def test_only_rpc(self, udp_start, rpc_start):
"""Check that only RPC is started if udp_address is empty."""
self.CONF.set_override('enable_rpc', True, group='collector')
self.CONF.set_override('udp_address', '', group='collector')
self.srv.start()
# two calls because two servers (notification and rpc)
self.assertEqual(2, rpc_start.call_count)
self.assertEqual(0, udp_start.call_count)
def test_udp_receive_valid_encoding(self):
self._setup_messaging(False)
mock_dispatcher = self._setup_fake_dispatcher()
self.data_sent = []
with mock.patch('socket.socket',
return_value=self._make_fake_socket(self.utf8_msg)):
self.srv.start()
self.assertTrue(utils.verify_signature(
mock_dispatcher.method_calls[0][1][0],
"not-so-secret"))
@mock.patch('ceilometer.storage.impl_log.LOG')
def test_collector_no_mock(self, mylog):
self.CONF.set_override('enable_rpc', True, group='collector')
self.CONF.set_override('udp_address', '', group='collector')
self.srv.start()
mylog.info.side_effect = lambda *args: self.srv.stop()
client = messaging.get_rpc_client(self.transport, version='1.0')
cclient = client.prepare(topic='metering')
cclient.cast(context.RequestContext(),
'record_metering_data', data=[self.utf8_msg])
self.srv.rpc_server.wait()
mylog.info.assert_called_once_with(
'metering data test for test_run_tasks: 1')
def _test_collector_requeue(self, listener):
mock_dispatcher = self._setup_fake_dispatcher()
self.srv.dispatcher_manager = dispatcher.load_dispatcher_manager()
mock_dispatcher.record_metering_data.side_effect = Exception('boom')
mock_dispatcher.record_events.side_effect = Exception('boom')
self.srv.start()
endp = getattr(self.srv, listener).dispatcher.endpoints[0]
ret = endp.sample({}, 'pub_id', 'event', {}, {})
self.assertEqual(oslo_messaging.NotificationResult.REQUEUE,
ret)
@mock.patch.object(oslo_messaging.MessageHandlingServer, 'start',
mock.Mock())
@mock.patch.object(collector.CollectorService, 'start_udp', mock.Mock())
def test_collector_sample_requeue(self):
self.CONF.set_override('requeue_sample_on_dispatcher_error', True,
group='collector')
self._test_collector_requeue('sample_listener')
@mock.patch.object(oslo_messaging.MessageHandlingServer, 'start',
mock.Mock())
@mock.patch.object(collector.CollectorService, 'start_udp', mock.Mock())
def test_collector_event_requeue(self):
self.CONF.set_override('requeue_event_on_dispatcher_error', True,
group='collector')
self.CONF.set_override('store_events', True, group='notification')
self._test_collector_requeue('event_listener')
def _test_collector_no_requeue(self, listener):
mock_dispatcher = self._setup_fake_dispatcher()
self.srv.dispatcher_manager = dispatcher.load_dispatcher_manager()
mock_dispatcher.record_metering_data.side_effect = (FakeException
('boom'))
mock_dispatcher.record_events.side_effect = (FakeException
('boom'))
self.srv.start()
endp = getattr(self.srv, listener).dispatcher.endpoints[0]
self.assertRaises(FakeException, endp.sample, {}, 'pub_id',
'event', {}, {})
@mock.patch.object(oslo_messaging.MessageHandlingServer, 'start',
mock.Mock())
@mock.patch.object(collector.CollectorService, 'start_udp', mock.Mock())
def test_collector_sample_no_requeue(self):
self.CONF.set_override('requeue_sample_on_dispatcher_error', False,
group='collector')
self._test_collector_no_requeue('sample_listener')
@mock.patch.object(oslo_messaging.MessageHandlingServer, 'start',
mock.Mock())
@mock.patch.object(collector.CollectorService, 'start_udp', mock.Mock())
def test_collector_event_no_requeue(self):
self.CONF.set_override('requeue_event_on_dispatcher_error', False,
group='collector')
self.CONF.set_override('store_events', True, group='notification')
self._test_collector_no_requeue('event_listener')
| {
"content_hash": "c76e62b5086490db69aade1c57595053",
"timestamp": "",
"source": "github",
"line_count": 290,
"max_line_length": 79,
"avg_line_length": 40.50344827586207,
"alnum_prop": 0.6123786821045463,
"repo_name": "eayunstack/ceilometer",
"id": "9bc0661a0c9cd24fa53c8a519194da16924b6feb",
"size": "12347",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "ceilometer/tests/functional/test_collector.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2889032"
},
{
"name": "Shell",
"bytes": "31289"
}
],
"symlink_target": ""
} |
from __future__ import print_function
from django.conf import settings
from django.core.management.base import BaseCommand
from hemres import models
from janeus import Janeus
class Command(BaseCommand):
help = 'Remove subscription of Janeus user'
args = 'member_id list_label'
def handle(self, *args, **kwargs):
if not hasattr(settings, 'JANEUS_SERVER'):
print("Janeus is not configured!")
return
if len(args) != 2:
print("Please provide two arguments")
return
# get member_id and label from args
member_id = int(args[0])
label = str(args[1])
self.unsubscribe(member_id, label)
@staticmethod
def unsubscribe(member_id, label):
# retrieve MailingList to add
qs = models.MailingList.objects.filter(label=label)
if len(qs):
ml = qs[0]
else:
print("Mailing list not found!")
return
# find member from Janeus
res = Janeus().by_lidnummer(member_id)
if res is None:
print("Janeus user not found!")
return
# retrieve Janeus subscriber
s = models.JaneusSubscriber.objects.filter(member_id=int(member_id))
if len(s):
s = s[0]
else:
print("Janeus subscriber not found!")
return
# remove mailing list
s.subscriptions.remove(ml)
# update for required attributes
s.update_janeus_newsletters()
# save!
s.save()
| {
"content_hash": "f5ba2cbf90ab02af803858d38b377958",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 76,
"avg_line_length": 27.280701754385966,
"alnum_prop": 0.5826366559485531,
"repo_name": "jonge-democraten/hemres",
"id": "b91ab304185d4c4292205a7e1abe07a5f781ec06",
"size": "1555",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hemres/management/commands/janeus_unsubscribe.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5207"
},
{
"name": "HTML",
"bytes": "11547"
},
{
"name": "Python",
"bytes": "74995"
},
{
"name": "Shell",
"bytes": "136"
}
],
"symlink_target": ""
} |
"""
Picarto.TV API Documentation
The Picarto.TV API documentation Note, for fixed access tokens, the header that needs to be sent is of the format: `Authorization: Bearer yourTokenHere` This can be generated at https://oauth.picarto.tv/ For chat API, see https://docs.picarto.tv/chat/chat.proto - contact via the email below for implementation details
OpenAPI spec version: 1.2.5
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class Notification1(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'type': 'str',
'uuid': 'str',
'body': 'str',
'channel': 'str',
'uri': 'str',
'has_icon': 'str',
'timestamp': 'int',
'unread': 'bool'
}
attribute_map = {
'type': 'type',
'uuid': 'uuid',
'body': 'body',
'channel': 'channel',
'uri': 'uri',
'has_icon': 'hasIcon',
'timestamp': 'timestamp',
'unread': 'unread'
}
def __init__(self, type=None, uuid=None, body=None, channel=None, uri=None, has_icon=None, timestamp=None, unread=None):
"""
Notification1 - a model defined in Swagger
"""
self._type = None
self._uuid = None
self._body = None
self._channel = None
self._uri = None
self._has_icon = None
self._timestamp = None
self._unread = None
if type is not None:
self.type = type
if uuid is not None:
self.uuid = uuid
if body is not None:
self.body = body
if channel is not None:
self.channel = channel
if uri is not None:
self.uri = uri
if has_icon is not None:
self.has_icon = has_icon
if timestamp is not None:
self.timestamp = timestamp
if unread is not None:
self.unread = unread
@property
def type(self):
"""
Gets the type of this Notification1.
The notification type
:return: The type of this Notification1.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""
Sets the type of this Notification1.
The notification type
:param type: The type of this Notification1.
:type: str
"""
allowed_values = ["pinned", "live", "follow", "subscribe", "eventCreate", "eventStart", "recordingCreate", "multiInvite", "multiRevoke", "multiRemove", "multiLeave", "multiAccept"]
if type not in allowed_values:
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}"
.format(type, allowed_values)
)
self._type = type
@property
def uuid(self):
"""
Gets the uuid of this Notification1.
UUID of the notification
:return: The uuid of this Notification1.
:rtype: str
"""
return self._uuid
@uuid.setter
def uuid(self, uuid):
"""
Sets the uuid of this Notification1.
UUID of the notification
:param uuid: The uuid of this Notification1.
:type: str
"""
self._uuid = uuid
@property
def body(self):
"""
Gets the body of this Notification1.
For global notifications, this contains the HTML body of the notification. Won't appear on non-pinned notifications
:return: The body of this Notification1.
:rtype: str
"""
return self._body
@body.setter
def body(self, body):
"""
Sets the body of this Notification1.
For global notifications, this contains the HTML body of the notification. Won't appear on non-pinned notifications
:param body: The body of this Notification1.
:type: str
"""
self._body = body
@property
def channel(self):
"""
Gets the channel of this Notification1.
The channel that this notification applies to. Won't appear on pinned notifications
:return: The channel of this Notification1.
:rtype: str
"""
return self._channel
@channel.setter
def channel(self, channel):
"""
Sets the channel of this Notification1.
The channel that this notification applies to. Won't appear on pinned notifications
:param channel: The channel of this Notification1.
:type: str
"""
self._channel = channel
@property
def uri(self):
"""
Gets the uri of this Notification1.
Link to go to when clicking the notification
:return: The uri of this Notification1.
:rtype: str
"""
return self._uri
@uri.setter
def uri(self, uri):
"""
Sets the uri of this Notification1.
Link to go to when clicking the notification
:param uri: The uri of this Notification1.
:type: str
"""
self._uri = uri
@property
def has_icon(self):
"""
Gets the has_icon of this Notification1.
If the channel attached to the notification has a user icon set
:return: The has_icon of this Notification1.
:rtype: str
"""
return self._has_icon
@has_icon.setter
def has_icon(self, has_icon):
"""
Sets the has_icon of this Notification1.
If the channel attached to the notification has a user icon set
:param has_icon: The has_icon of this Notification1.
:type: str
"""
self._has_icon = has_icon
@property
def timestamp(self):
"""
Gets the timestamp of this Notification1.
Unix timestamp of notification
:return: The timestamp of this Notification1.
:rtype: int
"""
return self._timestamp
@timestamp.setter
def timestamp(self, timestamp):
"""
Sets the timestamp of this Notification1.
Unix timestamp of notification
:param timestamp: The timestamp of this Notification1.
:type: int
"""
self._timestamp = timestamp
@property
def unread(self):
"""
Gets the unread of this Notification1.
Whether this notification is unread
:return: The unread of this Notification1.
:rtype: bool
"""
return self._unread
@unread.setter
def unread(self, unread):
"""
Sets the unread of this Notification1.
Whether this notification is unread
:param unread: The unread of this Notification1.
:type: bool
"""
self._unread = unread
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, Notification1):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| {
"content_hash": "e289fc9214ea3d6fa4398766fe34b5ad",
"timestamp": "",
"source": "github",
"line_count": 325,
"max_line_length": 326,
"avg_line_length": 26.627692307692307,
"alnum_prop": 0.5507279870580079,
"repo_name": "Sythelux/Picarto.bundle",
"id": "614f061297d305e9ea187244d924af6cf3855bd0",
"size": "8671",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Contents/Libraries/Shared/PicartoClientAPI/models/notification_1.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "505"
},
{
"name": "Python",
"bytes": "927803"
}
],
"symlink_target": ""
} |
import unittest, time, sys, random
sys.path.extend(['.','..','../..','py'])
import h2o, h2o_cmd, h2o_browse as h2b, h2o_import as h2i
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
h2o.init(3, use_hdfs=True, hdfs_version='cdh4', hdfs_name_node='mr-0x6')
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_B_hdfs_files(self):
# larger set in my local dir
# fails because classes aren't integers
# "allstate_claim_prediction_train_set.zip",
csvFilenameAll = [
"3G_poker_shuffle",
"TEST-poker1000.csv",
# corrupt zip file?
# "allstate_claim_prediction_train_set.zip",
"and-testing.data",
"arcene2_train.both",
"arcene_train.both",
"bestbuy_test.csv",
"bestbuy_train.csv",
"billion_rows.csv.gz",
"covtype.13x.data",
"covtype.13x.shuffle.data",
"covtype.169x.data",
"covtype.4x.shuffle.data",
"covtype.data",
"covtype4x.shuffle.data",
"hhp.unbalanced.012.1x11.data.gz",
"hhp.unbalanced.012.data.gz",
"hhp.unbalanced.data.gz",
"hhp2.os.noisy.0_1.data",
"hhp2.os.noisy.9_4.data",
"hhp_9_14_12.data",
"leads.csv",
"prostate_long_1G.csv",
]
# pick 8 randomly!
if (1==0):
csvFilenameList = random.sample(csvFilenameAll,8)
# Alternatively: do the list in order! Note the order is easy to hard
else:
csvFilenameList = csvFilenameAll
# pop open a browser on the cloud
h2b.browseTheCloud()
timeoutSecs = 1000
# save the first, for all comparisions, to avoid slow drift with each iteration
firstglm = {}
for csvFilename in csvFilenameList:
# creates csvFilename.hex from file in hdfs dir
start = time.time()
print 'Parsing', csvFilename
csvPathname = "datasets/" + csvFilename
parseResult = h2i.import_parse(path=csvPathname, schema='hdfs', header=0,
timeoutSecs=timeoutSecs, retryDelaySecs=1.0)
print csvFilename, '\nparse time (python)', time.time() - start, 'seconds'
### print h2o.dump_json(parseResult['response'])
print "parse result:", parseResult['destination_key']
# I use this if i want the larger set in my localdir
inspect = h2o_cmd.runInspect(None, parseResult['destination_key'])
h2o_cmd.infoFromInspect(inspect, csvPathname)
# h2b.browseJsonHistoryAsUrlLastMatch("Inspect")
print "\n" + csvFilename
if __name__ == '__main__':
h2o.unit_main()
| {
"content_hash": "5a644662c71a7d6f29e5df2027ea46fc",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 87,
"avg_line_length": 35.80246913580247,
"alnum_prop": 0.5665517241379311,
"repo_name": "vbelakov/h2o",
"id": "7e3fbca59465b242d529bb53797451a4f0d482c2",
"size": "2900",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "py/testdir_0xdata_only/test_from_hdfs_hosts.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "7065"
},
{
"name": "C",
"bytes": "2461"
},
{
"name": "CSS",
"bytes": "216906"
},
{
"name": "CoffeeScript",
"bytes": "205094"
},
{
"name": "Emacs Lisp",
"bytes": "7446"
},
{
"name": "Groovy",
"bytes": "518"
},
{
"name": "HTML",
"bytes": "177967"
},
{
"name": "Java",
"bytes": "5177683"
},
{
"name": "JavaScript",
"bytes": "42958"
},
{
"name": "Makefile",
"bytes": "50927"
},
{
"name": "PHP",
"bytes": "8490"
},
{
"name": "Perl",
"bytes": "22594"
},
{
"name": "Python",
"bytes": "3244626"
},
{
"name": "R",
"bytes": "1631216"
},
{
"name": "Ruby",
"bytes": "299"
},
{
"name": "Scala",
"bytes": "39365"
},
{
"name": "Shell",
"bytes": "189829"
}
],
"symlink_target": ""
} |
'''
Datalink / Network / Transport layer headers
'''
import string
import socket
import struct
import vstruct
from vstruct.primitives import *
ETH_P_IP = 0x0800
ETH_P_ARP = 0x0806
ETH_P_IPv6 = 0x86dd
ETH_P_VLAN = 0x8100
IPPROTO_ICMP = 1
IPPROTO_TCP = 6
IPPROTO_UDP = 17
IPPROTO_IPV6 = 41
IPPROTO_GRE = 47
IPPROTO_ICMP6 = 58
TCP_F_FIN = 0x01
TCP_F_SYN = 0x02
TCP_F_RST = 0x04
TCP_F_PUSH = 0x08
TCP_F_ACK = 0x10
TCP_F_URG = 0x20
TCP_F_ECE = 0x40
TCP_F_CWR = 0x80
# Useful combinations...
TCP_F_SYNACK = (TCP_F_SYN | TCP_F_ACK)
ICMP_ECHOREPLY = 0
ICMP_DEST_UNREACH = 3
ICMP_SOURCE_QUENCH = 4
ICMP_REDIRECT = 5
ICMP_ECHO = 8
ICMP_TIME_EXCEEDED = 11
ICMP_PARAMETERPROB = 12
ICMP_TIMESTAMP = 13
ICMP_TIMESTAMPREPLY = 14
ICMP_INFO_REQUEST = 15
ICMP_INFO_REPLY = 16
ICMP_ADDRESS = 17
ICMP_ADDRESSREPLY = 18
GREPROTO_PPTP = 0x880b
def reprIPv4Addr(addr):
bytes = struct.pack('>I', addr)
return socket.inet_ntoa(bytes)
def decIPv4Addr(addrstr):
bytes = socket.inet_aton(addrstr)
return struct.unpack('>I', bytes)[0]
def reprIPv6Addr(addr):
return socket.inet_ntop(socket.AF_INET6, addr)
class IPv4Address(v_uint32):
def __init__(self, value=0):
v_uint32.__init__(self, value=value, bigend=True)
def __repr__(self):
bytes = struct.pack('>I', self._vs_value)
return socket.inet_ntop(socket.AF_INET, bytes)
class IPv6Address(v_bytes):
def __init__(self, value=0):
v_bytes.__init__(self, size=16)
def __repr__(self):
return socket.inet_ntop(socket.AF_INET6, self._vs_value)
def vsSetValue(self, val):
if all(c in string.printable for c in val):
val = socket.inet_pton(socket.AF_INET6, val)
if len(val) != self._vs_length:
raise Exception('v_bytes field set to wrong length!')
self._vs_value = val
class ETHERII(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.destmac = v_bytes(size=6)
self.srcmac = v_bytes(size=6)
self.etype = v_uint16(bigend=True)
def vsParse(self, sbytes, offset=0, fast=False):
if fast:
return vstruct.VStruct.vsParse(self, sbytes, offset=offset, fast=fast)
# If we end up with a vlan tag, reparse
ret = vstruct.VStruct.vsParse(self, sbytes, offset=offset)
if self.etype == ETH_P_VLAN:
self.vsInsertField('vtag', v_uint16(bigend=True), 'etype')
self.vsInsertField('vlan', v_uint16(bigend=True), 'etype')
ret = vstruct.VStruct.vsParse(self, sbytes, offset=offset)
return ret
class ETHERIIVLAN(ETHERII):
def __init__(self):
ETHERII.__init__(self)
self.vsInsertField('vtag', v_uint16(bigend=True), 'etype')
self.vsInsertField('vlan', v_uint16(bigend=True), 'etype')
class IPv4(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.veriphl = v_uint8()
self.tos = v_uint8()
self.totlen = v_uint16(bigend=True)
self.ipid = v_uint16(bigend=True)
self.flagfrag = v_uint16(bigend=True)
self.ttl = v_uint8()
self.proto = v_uint8()
self.cksum = v_uint16(bigend=True)
self.srcaddr = IPv4Address()
self.dstaddr = IPv4Address()
# Make our len over-ride
def __len__(self):
if self.veriphl == 0:
return vstruct.VStruct.__len__(self)
return (self.veriphl & 0x0f) * 4
class IPv6(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.verclsflowl= v_uint32(bigend=True)
self.totlen = v_uint16(bigend=True)
self.nexthdr = v_uint8()
self.hoplimit = v_uint8()
self.srcaddr = IPv6Address()
self.dstaddr = IPv6Address()
class TCP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.srcport = v_uint16(bigend=True)
self.dstport = v_uint16(bigend=True)
self.sequence = v_uint32(bigend=True)
self.ackseq = v_uint32(bigend=True)
self.doff = v_uint8()
self.flags = v_uint8()
self.window = v_uint16(bigend=True)
self.checksum = v_uint16(bigend=True)
self.urgent = v_uint16(bigend=True)
def __len__(self):
if self.doff == 0:
return vstruct.VStruct.__len__(self)
return self.doff >> 2
class UDP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.srcport = v_uint16(bigend=True)
self.dstport = v_uint16(bigend=True)
self.udplen = v_uint16(bigend=True)
self.checksum = v_uint16(bigend=True)
class ICMP(vstruct.VStruct):
def __init__(self):
vstruct.VStruct.__init__(self)
self.type = v_uint8()
self.code = v_uint8()
self.checksum = v_uint16(bigend=True)
#union field starting at offset 4 not included here
| {
"content_hash": "70a6b9e68e23a85f94dd10d7caeda5d9",
"timestamp": "",
"source": "github",
"line_count": 174,
"max_line_length": 82,
"avg_line_length": 29.511494252873565,
"alnum_prop": 0.5877312560856864,
"repo_name": "bat-serjo/vivisect",
"id": "d6d7a5ec7ce9d6c337b6610b3999b10928a985a9",
"size": "5135",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "vstruct/defs/inet.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "167795"
},
{
"name": "CSS",
"bytes": "15980"
},
{
"name": "Makefile",
"bytes": "355"
},
{
"name": "Python",
"bytes": "11662904"
},
{
"name": "Shell",
"bytes": "476"
}
],
"symlink_target": ""
} |
"""Support for BME680 Sensor over SMBus."""
import importlib
import logging
from time import time, sleep
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
import homeassistant.helpers.config_validation as cv
from homeassistant.const import (
TEMP_FAHRENHEIT, CONF_NAME, CONF_MONITORED_CONDITIONS)
from homeassistant.helpers.entity import Entity
from homeassistant.util.temperature import celsius_to_fahrenheit
_LOGGER = logging.getLogger(__name__)
CONF_I2C_ADDRESS = 'i2c_address'
CONF_I2C_BUS = 'i2c_bus'
CONF_OVERSAMPLING_TEMP = 'oversampling_temperature'
CONF_OVERSAMPLING_PRES = 'oversampling_pressure'
CONF_OVERSAMPLING_HUM = 'oversampling_humidity'
CONF_FILTER_SIZE = 'filter_size'
CONF_GAS_HEATER_TEMP = 'gas_heater_temperature'
CONF_GAS_HEATER_DURATION = 'gas_heater_duration'
CONF_AQ_BURN_IN_TIME = 'aq_burn_in_time'
CONF_AQ_HUM_BASELINE = 'aq_humidity_baseline'
CONF_AQ_HUM_WEIGHTING = 'aq_humidity_bias'
CONF_TEMP_OFFSET = 'temp_offset'
DEFAULT_NAME = 'BME680 Sensor'
DEFAULT_I2C_ADDRESS = 0x77
DEFAULT_I2C_BUS = 1
DEFAULT_OVERSAMPLING_TEMP = 8 # Temperature oversampling x 8
DEFAULT_OVERSAMPLING_PRES = 4 # Pressure oversampling x 4
DEFAULT_OVERSAMPLING_HUM = 2 # Humidity oversampling x 2
DEFAULT_FILTER_SIZE = 3 # IIR Filter Size
DEFAULT_GAS_HEATER_TEMP = 320 # Temperature in celsius 200 - 400
DEFAULT_GAS_HEATER_DURATION = 150 # Heater duration in ms 1 - 4032
DEFAULT_AQ_BURN_IN_TIME = 300 # 300 second burn in time for AQ gas measurement
DEFAULT_AQ_HUM_BASELINE = 40 # 40%, an optimal indoor humidity.
DEFAULT_AQ_HUM_WEIGHTING = 25 # 25% Weighting of humidity to gas in AQ score
DEFAULT_TEMP_OFFSET = 0 # No calibration out of the box.
SENSOR_TEMP = 'temperature'
SENSOR_HUMID = 'humidity'
SENSOR_PRESS = 'pressure'
SENSOR_GAS = 'gas'
SENSOR_AQ = 'airquality'
SENSOR_TYPES = {
SENSOR_TEMP: ['Temperature', None],
SENSOR_HUMID: ['Humidity', '%'],
SENSOR_PRESS: ['Pressure', 'mb'],
SENSOR_GAS: ['Gas Resistance', 'Ohms'],
SENSOR_AQ: ['Air Quality', '%']
}
DEFAULT_MONITORED = [SENSOR_TEMP, SENSOR_HUMID, SENSOR_PRESS, SENSOR_AQ]
OVERSAMPLING_VALUES = set([0, 1, 2, 4, 8, 16])
FILTER_VALUES = set([0, 1, 3, 7, 15, 31, 63, 127])
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_I2C_ADDRESS, default=DEFAULT_I2C_ADDRESS):
cv.positive_int,
vol.Optional(CONF_MONITORED_CONDITIONS, default=DEFAULT_MONITORED):
vol.All(cv.ensure_list, [vol.In(SENSOR_TYPES)]),
vol.Optional(CONF_I2C_BUS, default=DEFAULT_I2C_BUS): cv.positive_int,
vol.Optional(CONF_OVERSAMPLING_TEMP, default=DEFAULT_OVERSAMPLING_TEMP):
vol.All(vol.Coerce(int), vol.In(OVERSAMPLING_VALUES)),
vol.Optional(CONF_OVERSAMPLING_PRES, default=DEFAULT_OVERSAMPLING_PRES):
vol.All(vol.Coerce(int), vol.In(OVERSAMPLING_VALUES)),
vol.Optional(CONF_OVERSAMPLING_HUM, default=DEFAULT_OVERSAMPLING_HUM):
vol.All(vol.Coerce(int), vol.In(OVERSAMPLING_VALUES)),
vol.Optional(CONF_FILTER_SIZE, default=DEFAULT_FILTER_SIZE):
vol.All(vol.Coerce(int), vol.In(FILTER_VALUES)),
vol.Optional(CONF_GAS_HEATER_TEMP, default=DEFAULT_GAS_HEATER_TEMP):
vol.All(vol.Coerce(int), vol.Range(200, 400)),
vol.Optional(CONF_GAS_HEATER_DURATION,
default=DEFAULT_GAS_HEATER_DURATION):
vol.All(vol.Coerce(int), vol.Range(1, 4032)),
vol.Optional(CONF_AQ_BURN_IN_TIME, default=DEFAULT_AQ_BURN_IN_TIME):
cv.positive_int,
vol.Optional(CONF_AQ_HUM_BASELINE, default=DEFAULT_AQ_HUM_BASELINE):
vol.All(vol.Coerce(int), vol.Range(1, 100)),
vol.Optional(CONF_AQ_HUM_WEIGHTING, default=DEFAULT_AQ_HUM_WEIGHTING):
vol.All(vol.Coerce(int), vol.Range(1, 100)),
vol.Optional(CONF_TEMP_OFFSET, default=DEFAULT_TEMP_OFFSET):
vol.All(vol.Coerce(float), vol.Range(-100.0, 100.0)),
})
async def async_setup_platform(hass, config, async_add_entities,
discovery_info=None):
"""Set up the BME680 sensor."""
SENSOR_TYPES[SENSOR_TEMP][1] = hass.config.units.temperature_unit
name = config.get(CONF_NAME)
sensor_handler = await hass.async_add_job(_setup_bme680, config)
if sensor_handler is None:
return
dev = []
for variable in config[CONF_MONITORED_CONDITIONS]:
dev.append(BME680Sensor(
sensor_handler, variable, SENSOR_TYPES[variable][1], name))
async_add_entities(dev)
return
def _setup_bme680(config):
"""Set up and configure the BME680 sensor."""
from smbus import SMBus # pylint: disable=import-error
bme680 = importlib.import_module('bme680')
sensor_handler = None
sensor = None
try:
# pylint: disable=no-member
i2c_address = config.get(CONF_I2C_ADDRESS)
bus = SMBus(config.get(CONF_I2C_BUS))
sensor = bme680.BME680(i2c_address, bus)
# Configure Oversampling
os_lookup = {
0: bme680.OS_NONE,
1: bme680.OS_1X,
2: bme680.OS_2X,
4: bme680.OS_4X,
8: bme680.OS_8X,
16: bme680.OS_16X
}
sensor.set_temperature_oversample(
os_lookup[config.get(CONF_OVERSAMPLING_TEMP)]
)
sensor.set_temp_offset(
config.get(CONF_TEMP_OFFSET)
)
sensor.set_humidity_oversample(
os_lookup[config.get(CONF_OVERSAMPLING_HUM)]
)
sensor.set_pressure_oversample(
os_lookup[config.get(CONF_OVERSAMPLING_PRES)]
)
# Configure IIR Filter
filter_lookup = {
0: bme680.FILTER_SIZE_0,
1: bme680.FILTER_SIZE_1,
3: bme680.FILTER_SIZE_3,
7: bme680.FILTER_SIZE_7,
15: bme680.FILTER_SIZE_15,
31: bme680.FILTER_SIZE_31,
63: bme680.FILTER_SIZE_63,
127: bme680.FILTER_SIZE_127
}
sensor.set_filter(
filter_lookup[config.get(CONF_FILTER_SIZE)]
)
# Configure the Gas Heater
if (
SENSOR_GAS in config[CONF_MONITORED_CONDITIONS] or
SENSOR_AQ in config[CONF_MONITORED_CONDITIONS]
):
sensor.set_gas_status(bme680.ENABLE_GAS_MEAS)
sensor.set_gas_heater_duration(config[CONF_GAS_HEATER_DURATION])
sensor.set_gas_heater_temperature(config[CONF_GAS_HEATER_TEMP])
sensor.select_gas_heater_profile(0)
else:
sensor.set_gas_status(bme680.DISABLE_GAS_MEAS)
except (RuntimeError, IOError):
_LOGGER.error("BME680 sensor not detected at 0x%02x", i2c_address)
return None
sensor_handler = BME680Handler(
sensor,
(SENSOR_GAS in config[CONF_MONITORED_CONDITIONS] or
SENSOR_AQ in config[CONF_MONITORED_CONDITIONS]),
config[CONF_AQ_BURN_IN_TIME],
config[CONF_AQ_HUM_BASELINE],
config[CONF_AQ_HUM_WEIGHTING]
)
sleep(0.5) # Wait for device to stabilize
if not sensor_handler.sensor_data.temperature:
_LOGGER.error("BME680 sensor failed to Initialize")
return None
return sensor_handler
class BME680Handler:
"""BME680 sensor working in i2C bus."""
class SensorData:
"""Sensor data representation."""
def __init__(self):
"""Initialize the sensor data object."""
self.temperature = None
self.humidity = None
self.pressure = None
self.gas_resistance = None
self.air_quality = None
def __init__(
self, sensor, gas_measurement=False,
burn_in_time=300, hum_baseline=40, hum_weighting=25
):
"""Initialize the sensor handler."""
self.sensor_data = BME680Handler.SensorData()
self._sensor = sensor
self._gas_sensor_running = False
self._hum_baseline = hum_baseline
self._hum_weighting = hum_weighting
self._gas_baseline = None
if gas_measurement:
import threading
threading.Thread(
target=self._run_gas_sensor,
kwargs={'burn_in_time': burn_in_time},
name='BME680Handler_run_gas_sensor'
).start()
self.update(first_read=True)
def _run_gas_sensor(self, burn_in_time):
"""Calibrate the Air Quality Gas Baseline."""
if self._gas_sensor_running:
return
self._gas_sensor_running = True
# Pause to allow initial data read for device validation.
sleep(1)
start_time = time()
curr_time = time()
burn_in_data = []
_LOGGER.info("Beginning %d second gas sensor burn in for Air Quality",
burn_in_time)
while curr_time - start_time < burn_in_time:
curr_time = time()
if (
self._sensor.get_sensor_data() and
self._sensor.data.heat_stable
):
gas_resistance = self._sensor.data.gas_resistance
burn_in_data.append(gas_resistance)
self.sensor_data.gas_resistance = gas_resistance
_LOGGER.debug("AQ Gas Resistance Baseline reading %2f Ohms",
gas_resistance)
sleep(1)
_LOGGER.debug("AQ Gas Resistance Burn In Data (Size: %d): \n\t%s",
len(burn_in_data), burn_in_data)
self._gas_baseline = sum(burn_in_data[-50:]) / 50.0
_LOGGER.info("Completed gas sensor burn in for Air Quality")
_LOGGER.info("AQ Gas Resistance Baseline: %f", self._gas_baseline)
while True:
if (
self._sensor.get_sensor_data() and
self._sensor.data.heat_stable
):
self.sensor_data.gas_resistance = (
self._sensor.data.gas_resistance
)
self.sensor_data.air_quality = self._calculate_aq_score()
sleep(1)
def update(self, first_read=False):
"""Read sensor data."""
if first_read:
# Attempt first read, it almost always fails first attempt
self._sensor.get_sensor_data()
if self._sensor.get_sensor_data():
self.sensor_data.temperature = self._sensor.data.temperature
self.sensor_data.humidity = self._sensor.data.humidity
self.sensor_data.pressure = self._sensor.data.pressure
def _calculate_aq_score(self):
"""Calculate the Air Quality Score."""
hum_baseline = self._hum_baseline
hum_weighting = self._hum_weighting
gas_baseline = self._gas_baseline
gas_resistance = self.sensor_data.gas_resistance
gas_offset = gas_baseline - gas_resistance
hum = self.sensor_data.humidity
hum_offset = hum - hum_baseline
# Calculate hum_score as the distance from the hum_baseline.
if hum_offset > 0:
hum_score = (
(100 - hum_baseline - hum_offset) /
(100 - hum_baseline) *
hum_weighting
)
else:
hum_score = (
(hum_baseline + hum_offset) /
hum_baseline *
hum_weighting
)
# Calculate gas_score as the distance from the gas_baseline.
if gas_offset > 0:
gas_score = (gas_resistance / gas_baseline) * (100 - hum_weighting)
else:
gas_score = 100 - hum_weighting
# Calculate air quality score.
return hum_score + gas_score
class BME680Sensor(Entity):
"""Implementation of the BME680 sensor."""
def __init__(self, bme680_client, sensor_type, temp_unit, name):
"""Initialize the sensor."""
self.client_name = name
self._name = SENSOR_TYPES[sensor_type][0]
self.bme680_client = bme680_client
self.temp_unit = temp_unit
self.type = sensor_type
self._state = None
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
@property
def name(self):
"""Return the name of the sensor."""
return '{} {}'.format(self.client_name, self._name)
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of the sensor."""
return self._unit_of_measurement
async def async_update(self):
"""Get the latest data from the BME680 and update the states."""
await self.hass.async_add_job(self.bme680_client.update)
if self.type == SENSOR_TEMP:
temperature = round(self.bme680_client.sensor_data.temperature, 1)
if self.temp_unit == TEMP_FAHRENHEIT:
temperature = round(celsius_to_fahrenheit(temperature), 1)
self._state = temperature
elif self.type == SENSOR_HUMID:
self._state = round(self.bme680_client.sensor_data.humidity, 1)
elif self.type == SENSOR_PRESS:
self._state = round(self.bme680_client.sensor_data.pressure, 1)
elif self.type == SENSOR_GAS:
self._state = int(
round(self.bme680_client.sensor_data.gas_resistance, 0)
)
elif self.type == SENSOR_AQ:
aq_score = self.bme680_client.sensor_data.air_quality
if aq_score is not None:
self._state = round(aq_score, 1)
| {
"content_hash": "83d6fd3c4f79c7aa8e24e2c946b05a98",
"timestamp": "",
"source": "github",
"line_count": 366,
"max_line_length": 79,
"avg_line_length": 37.01912568306011,
"alnum_prop": 0.6117056609343863,
"repo_name": "auduny/home-assistant",
"id": "73fe827be6ba202865e73845ad0c33f54d37db32",
"size": "13549",
"binary": false,
"copies": "7",
"ref": "refs/heads/dev",
"path": "homeassistant/components/bme680/sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1175"
},
{
"name": "Dockerfile",
"bytes": "1081"
},
{
"name": "HCL",
"bytes": "407"
},
{
"name": "Python",
"bytes": "15129018"
},
{
"name": "Ruby",
"bytes": "745"
},
{
"name": "Shell",
"bytes": "17609"
}
],
"symlink_target": ""
} |
from copy import copy
import inspect
import logging
import pickle
import random
import os.path
import numpy
from sklearn.cross_validation import StratifiedKFold
from sklearn.metrics import precision_recall_curve
from iepy import defaults
from iepy.extraction.relation_extraction_classifier import RelationExtractionClassifier
logger = logging.getLogger(__name__)
HIPREC = (10, 1) # Precision is 10x more important than recall
HIREC = (1, 2) # Recall is 2x more important than precision
class ActiveLearningCore:
"""
IEPY's main class. Implements an active learning information extraction
pipeline.
From the user's point of view this class is meant to be used like this::
extractor = ActiveLearningCore(relation, lbl_evidences)
extractor.start() # blocking
while UserIsNotTired and extractor.questions:
question = extractor.questions[0]
answer = ask_user(question)
extractor.add_answer(question, answer)
extractor.process()
predictions = extractor.predict() # profit
"""
#
# IEPY User API
#
def __init__(self, relation, labeled_evidences, extractor_config=None,
tradeoff=None, extractor=None, classifier=None):
if extractor is None:
extractor = RelationExtractionClassifier
self.extractor = extractor
self.relation = relation
self.classifier = classifier
self._setup_labeled_evidences(labeled_evidences)
self._questions = list(self.candidate_evidence)
if extractor_config is None:
extractor_config = defaults.extractor_config
self.extractor_config = extractor_config
self.tradeoff = tradeoff
self.aimed_tradeoff = None
self.threshold = None
_DUMPED_ATTRS = ['relation', 'extractor', 'extractor_config', 'classifier',
'tradeoff', 'aimed_tradeoff', 'threshold']
def save(self, file_path):
if os.path.exists(file_path):
raise ValueError("Output file path already exists")
to_dump = [getattr(self, attr) for attr in self._DUMPED_ATTRS]
with open(file_path, 'wb') as filehandler:
pickle.dump(to_dump, filehandler)
@classmethod
def load(cls, file_path, **kwargs):
if not os.path.exists(file_path):
raise ValueError("File does not exists")
with open(file_path, 'rb') as filehandler:
data = pickle.load(filehandler)
loading_kwargs = copy(kwargs)
if 'labeled_evidences' not in kwargs:
loading_kwargs['labeled_evidences'] = {}
after = {}
specs = inspect.getargspec(cls)
for attr, value in zip(cls._DUMPED_ATTRS, data):
if attr in specs.args:
loading_kwargs[attr] = value
else:
after[attr] = value
self = cls(**loading_kwargs)
for after_attr, value in after.items():
print ('Setting ' + after_attr)
setattr(self, after_attr, value)
return self
def start(self):
"""
Organizes the internal information, and prepares the first "questions" that
need to be answered.
"""
# API compliance. Nothing is done on current implementation.s
pass
@property
def questions(self):
"""Returns a list of candidate evidences that would be good to have
labels for.
Order is important: labels for evidences listed firsts are more valuable.
"""
return self._questions
def add_answer(self, evidence, answer):
"""
Not blocking.
Informs to the Core the evidence label (True or False) decided
from the outside.
"""
assert answer in (True, False)
self.labeled_evidence[evidence] = answer
for list_ in (self._questions, self.candidate_evidence): # TODO: Check performance. Should use set?
list_.remove(evidence)
def process(self):
"""
Blocking.
With all the labeled evidences, new questions are generated, optimizing the
future gain of having those evidences labeled.
After calling this method the values returned by `questions`
and `predict` will change.
"""
yesno = set(self.labeled_evidence.values())
if len(yesno) > 2:
msg = "Evidence is not binary! Can't proceed."
logger.error(msg)
raise ValueError(msg)
if len(yesno) < 2:
logger.debug("Not enough labels to train.")
return
if self.tradeoff:
self.estimate_threshold()
self.train_relation_classifier()
self.rank_candidate_evidence()
self.choose_questions()
def predict(self, candidates):
"""
Using the internal trained classifier, all candidate evicence are automatically
labeled.
Returns a dict {evidence: True/False}, where the boolean label indicates if
the relation is present on that evidence or not.
"""
if not self.classifier:
logger.info("There is no trained classifier. Can't predict")
return {}
# for every already labeled candidate, instead of asking the classifier we'll use
# the actual label
knowns = copy(self.labeled_evidence)
to_predict = [c for c in candidates if c not in knowns]
if self.threshold is None:
labels = self.classifier.predict(to_predict)
else:
scores = self.classifier.decision_function(to_predict)
labels = scores >= self.threshold
prediction = dict(zip(to_predict, map(bool, labels)))
prediction.update(knowns)
return prediction
def estimate_threshold(self):
scores, y_true = self.get_kfold_data()
if scores is None:
return
prec, rec, thres = precision_recall_curve(y_true, scores)
prec[-1] = 0.0 # To avoid choosing the last phony value
c_prec, c_rec = self.tradeoff
# Below is a linear function on precision and recall, expressed using
# numpy notation, we're maximizing it.
i = (prec * c_prec + rec * c_rec).argmax() # Index of the maximum
assert i < len(thres) # Because prec[-1] is 0.0
self.aimed_tradeoff = (prec[i], rec[i])
self.threshold = thres[i]
s = "Using {} samples, threshold aiming at precision={:.4f} and recall={:.4f}"
logger.debug(s.format(len(scores), prec[i], rec[i]))
# Instance attributes:
# questions: A list of evidence
# ranked_candidate_evidence: A dict candidate_evidence -> float
# aimed_tradeoff: A (prec, rec) tuple with the precision/recall tradeoff
# that the threshold aims to achieve.
#
# Private methods
#
def _setup_labeled_evidences(self, labeled_evidences):
self.candidate_evidence = []
self.labeled_evidence = {}
for e, lbl in labeled_evidences.items():
e.relation = self.relation
if lbl is None:
self.candidate_evidence.append(e)
else:
self.labeled_evidence[e] = bool(lbl)
if not self.candidate_evidence:
raise ValueError("Cannot start core without candidate evidence")
logger.info("Loaded {} candidate evidence and {} labeled evidence".format(
len(self.candidate_evidence), len(self.labeled_evidence)))
def train_relation_classifier(self):
X = []
y = []
for evidence, score in self.labeled_evidence.items():
X.append(evidence)
y.append(int(score))
assert y[-1] in (True, False)
self.classifier = self.extractor(**self.extractor_config)
self.classifier.fit(X, y)
def rank_candidate_evidence(self):
if not self.candidate_evidence:
self.ranked_candidate_evidence = {}
logger.info("No evidence left to rank.")
return
N = min(10 * len(self.labeled_evidence), len(self.candidate_evidence))
logger.info("Ranking a sample of {} candidate evidence".format(N))
sample = random.sample(self.candidate_evidence, N)
ranks = self.classifier.decision_function(sample)
self.ranked_candidate_evidence = dict(zip(sample, ranks))
ranks = [abs(x) for x in ranks]
logger.info("Ranking completed, lowest absolute rank={}, "
"highest absolute rank={}".format(min(ranks), max(ranks)))
def choose_questions(self):
# Criteria: Answer first candidates with decision function near 0
# because they are the most uncertain for the classifier.
self._questions = sorted(self.ranked_candidate_evidence,
key=lambda x: abs(self.ranked_candidate_evidence[x]))
def get_kfold_data(self):
"""
Perform k-fold cross validation and return (scores, y_true) where
scores is a numpy array with decision function scores and y_true
is a numpy array with the true label for that evidence.
"""
allX = []
ally = []
for evidence, score in self.labeled_evidence.items():
allX.append(evidence)
ally.append(int(score))
assert ally[-1] in (True, False)
allX = numpy.array(allX)
ally = numpy.array(ally)
if numpy.bincount(ally).min() < 5:
return None, None # Too little data to do 5-fold cross validation
logger.debug("Performing 5-fold cross validation")
scores = []
y_true = []
for train_index, test_index in StratifiedKFold(ally, 5):
X = allX[train_index]
y = ally[train_index]
c = self.extractor(**self.extractor_config)
c.fit(X, y)
y_true.append(ally[test_index])
scores.append(c.decision_function(allX[test_index]))
return numpy.hstack(scores), numpy.hstack(y_true)
| {
"content_hash": "b8bb718055d7b1ad53012fea7329d059",
"timestamp": "",
"source": "github",
"line_count": 262,
"max_line_length": 108,
"avg_line_length": 38.39312977099237,
"alnum_prop": 0.6125857441097524,
"repo_name": "machinalis/iepy",
"id": "a366dff32f32e1cb3287557ac5c1182f58c4e31d",
"size": "10059",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "iepy/extraction/active_learning_core.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "25531"
},
{
"name": "HTML",
"bytes": "26374"
},
{
"name": "JavaScript",
"bytes": "26234"
},
{
"name": "Python",
"bytes": "401781"
}
],
"symlink_target": ""
} |
class ZiplineError(Exception):
msg = None
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
self.message = str(self)
def __str__(self):
msg = self.msg.format(**self.kwargs)
return msg
__unicode__ = __str__
__repr__ = __str__
class WrongDataForTransform(ZiplineError):
"""
Raised whenever a rolling transform is called on an event that
does not have the necessary properties.
"""
msg = "{transform} requires {fields}. Event cannot be processed."
class UnsupportedSlippageModel(ZiplineError):
"""
Raised if a user script calls the override_slippage magic
with a slipage object that isn't a VolumeShareSlippage or
FixedSlipapge
"""
msg = """
You attempted to override slippage with an unsupported class. \
Please use VolumeShareSlippage or FixedSlippage.
""".strip()
class OverrideSlippagePostInit(ZiplineError):
# Raised if a users script calls override_slippage magic
# after the initialize method has returned.
msg = """
You attempted to override slippage outside of `initialize`. \
You may only call override_slippage in your initialize method.
""".strip()
class RegisterTradingControlPostInit(ZiplineError):
# Raised if a user's script register's a trading control after initialize
# has been run.
msg = """
You attempted to set a trading control outside of `initialize`. \
Trading controls may only be set in your initialize method.
""".strip()
class RegisterAccountControlPostInit(ZiplineError):
# Raised if a user's script register's a trading control after initialize
# has been run.
msg = """
You attempted to set an account control outside of `initialize`. \
Account controls may only be set in your initialize method.
""".strip()
class UnsupportedCommissionModel(ZiplineError):
"""
Raised if a user script calls the override_commission magic
with a commission object that isn't a PerShare, PerTrade or
PerDollar commission
"""
msg = """
You attempted to override commission with an unsupported class. \
Please use PerShare or PerTrade.
""".strip()
class OverrideCommissionPostInit(ZiplineError):
"""
Raised if a users script calls override_commission magic
after the initialize method has returned.
"""
msg = """
You attempted to override commission outside of `initialize`. \
You may only call override_commission in your initialize method.
""".strip()
class TransactionWithNoVolume(ZiplineError):
"""
Raised if a transact call returns a transaction with zero volume.
"""
msg = """
Transaction {txn} has a volume of zero.
""".strip()
class TransactionWithWrongDirection(ZiplineError):
"""
Raised if a transact call returns a transaction with a direction that
does not match the order.
"""
msg = """
Transaction {txn} not in same direction as corresponding order {order}.
""".strip()
class TransactionWithNoAmount(ZiplineError):
"""
Raised if a transact call returns a transaction with zero amount.
"""
msg = """
Transaction {txn} has an amount of zero.
""".strip()
class TransactionVolumeExceedsOrder(ZiplineError):
"""
Raised if a transact call returns a transaction with a volume greater than
the corresponding order.
"""
msg = """
Transaction volume of {txn} exceeds the order volume of {order}.
""".strip()
class UnsupportedOrderParameters(ZiplineError):
"""
Raised if a set of mutually exclusive parameters are passed to an order
call.
"""
msg = "{msg}"
class BadOrderParameters(ZiplineError):
"""
Raised if any impossible parameters (nan, negative limit/stop)
are passed to an order call.
"""
msg = "{msg}"
class OrderDuringInitialize(ZiplineError):
"""
Raised if order is called during initialize()
"""
msg = "{msg}"
class AccountControlViolation(ZiplineError):
"""
Raised if the account violates a constraint set by a AccountControl.
"""
msg = """
Account violates account constraint {constraint}.
""".strip()
class TradingControlViolation(ZiplineError):
"""
Raised if an order would violate a constraint set by a TradingControl.
"""
msg = """
Order for {amount} shares of {asset} at {datetime} violates trading constraint
{constraint}.
""".strip()
class IncompatibleHistoryFrequency(ZiplineError):
"""
Raised when a frequency is given to history which is not supported.
At least, not yet.
"""
msg = """
Requested history at frequency '{frequency}' cannot be created with data
at frequency '{data_frequency}'.
""".strip()
class MultipleSymbolsFound(ZiplineError):
"""
Raised when a symbol() call contains a symbol that changed over
time and is thus not resolvable without additional information
provided via as_of_date.
"""
msg = """
Multiple symbols with the name '{symbol}' found. Use the
as_of_date' argument to to specify when the date symbol-lookup
should be valid.
Possible options:{options}
""".strip()
class SymbolNotFound(ZiplineError):
"""
Raised when a symbol() call contains a non-existant symbol.
"""
msg = """
Symbol '{symbol}' was not found.
""".strip()
class RootSymbolNotFound(ZiplineError):
"""
Raised when a lookup_future_chain() call contains a non-existant symbol.
"""
msg = """
Root symbol '{root_symbol}' was not found.
""".strip()
class SidNotFound(ZiplineError):
"""
Raised when a retrieve_asset() call contains a non-existent sid.
"""
msg = """
Asset with sid '{sid}' was not found.
""".strip()
class InvalidAssetType(ZiplineError):
"""
Raised when an AssetFinder tries to build an Asset with an invalid
AssetType.
"""
msg = """
AssetMetaData contained an invalid Asset type: '{asset_type}'.
""".strip()
class ConsumeAssetMetaDataError(ZiplineError):
"""
Raised when AssetFinder.consume() is called on an invalid object.
"""
msg = """
AssetFinder can not consume metadata of type {obj}. Metadata must be a dict, a
DataFrame, or a tables.Table. If the provided metadata is a Table, the rows
must contain both or one of 'sid' or 'symbol'.
""".strip()
class MapAssetIdentifierIndexError(ZiplineError):
"""
Raised when AssetMetaData.map_identifier_index_to_sids() is called on an
index of invalid objects.
"""
msg = """
AssetFinder can not map an index with values of type {obj}. Asset indices of
DataFrames or Panels must be integer sids, string symbols, or Asset objects.
""".strip()
class SidAssignmentError(ZiplineError):
"""
Raised when an AssetFinder tries to build an Asset that does not have a sid
and that AssetFinder is not permitted to assign sids.
"""
msg = """
AssetFinder metadata is missing a SID for identifier '{identifier}'.
""".strip()
class NoSourceError(ZiplineError):
"""
Raised when no source is given to the pipeline
"""
msg = """
No data source given.
""".strip()
class PipelineDateError(ZiplineError):
"""
Raised when only one date is passed to the pipeline
"""
msg = """
Only one simulation date given. Please specify both the 'start' and 'end' for
the simulation, or neither. If neither is given, the start and end of the
DataSource will be used. Given start = '{start}', end = '{end}'
""".strip()
class WindowLengthTooLong(ZiplineError):
"""
Raised when a trailing window is instantiated with a lookback greater than
the length of the underlying array.
"""
msg = (
"Can't construct a rolling window of length "
"{window_length} on an array of length {nrows}."
).strip()
class WindowLengthNotPositive(ZiplineError):
"""
Raised when a trailing window would be instantiated with a length less than
1.
"""
msg = (
"Expected a window_length greater than 0, got {window_length}."
).strip()
class InputTermNotAtomic(ZiplineError):
"""
Raised when a non-atomic term is specified as an input to an FFC term with
a lookback window.
"""
msg = (
"Can't compute {parent} with non-atomic input {child}."
)
class TermInputsNotSpecified(ZiplineError):
"""
Raised if a user attempts to construct a term without specifying inputs and
that term does not have class-level default inputs.
"""
msg = "{termname} requires inputs, but no inputs list was passed."
class WindowLengthNotSpecified(ZiplineError):
"""
Raised if a user attempts to construct a term without specifying inputs and
that term does not have class-level default inputs.
"""
msg = (
"{termname} requires a window_length, but no window_length was passed."
)
class DTypeNotSpecified(ZiplineError):
"""
Raised if a user attempts to construct a term without specifying dtype and
that term does not have class-level default dtype.
"""
msg = (
"{termname} requires a dtype, but no dtype was passed."
)
class BadPercentileBounds(ZiplineError):
"""
Raised by API functions accepting percentile bounds when the passed bounds
are invalid.
"""
msg = (
"Percentile bounds must fall between 0.0 and 100.0, and min must be "
"less than max."
"\nInputs were min={min_percentile}, max={max_percentile}."
)
class UnknownRankMethod(ZiplineError):
"""
Raised during construction of a Rank factor when supplied a bad Rank
method.
"""
msg = (
"Unknown ranking method: '{method}'. "
"`method` must be one of {choices}"
)
class AddTermPostInit(ZiplineError):
"""
Raised when a user tries to call add_{filter,factor,classifier}
outside of initialize.
"""
msg = (
"Attempted to add a new filter, factor, or classifier "
"outside of initialize.\n"
"New FFC terms may only be added during initialize."
)
class UnsupportedDataType(ZiplineError):
"""
Raised by FFC CustomFactors with unsupported dtypes.
"""
msg = "CustomFactors with dtype {dtype} are not supported."
class NoFurtherDataError(ZiplineError):
"""
Raised by calendar operations that would ask for dates beyond the extent of
our known data.
"""
# This accepts an arbitrary message string because it's used in more places
# that can be usefully templated.
msg = '{msg}'
class UnsupportedDatetimeFormat(ZiplineError):
"""
Raised when an unsupported datetime is passed to an API method.
"""
msg = ("The input '{input}' passed to '{method}' is not "
"coercible to a pandas.Timestamp object.")
class PositionTrackerMissingAssetFinder(ZiplineError):
"""
Raised by a PositionTracker if it is asked to update an Asset but does not
have an AssetFinder
"""
msg = (
"PositionTracker attempted to update its Asset information but does "
"not have an AssetFinder. This may be caused by a failure to properly "
"de-serialize a TradingAlgorithm."
)
| {
"content_hash": "87c58ad49538cd52c90c9f8c3901e733",
"timestamp": "",
"source": "github",
"line_count": 410,
"max_line_length": 79,
"avg_line_length": 27.021951219512196,
"alnum_prop": 0.6786713602310678,
"repo_name": "YuepengGuo/zipline",
"id": "ca4c99e31bcb6f27799d51fdba7e8177bff7b118",
"size": "11663",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "zipline/errors.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "564"
},
{
"name": "Emacs Lisp",
"bytes": "138"
},
{
"name": "Python",
"bytes": "1402009"
},
{
"name": "Shell",
"bytes": "4065"
}
],
"symlink_target": ""
} |
"""
All exceptions used in OVH SDK derives from `APIError`
"""
class APIError(Exception):
"""Base OVH API exception, all specific exceptions inherits from it."""
class HTTPError(APIError):
"""Raised when the request fails at a low level (DNS, network, ...)"""
class InvalidKey(APIError):
"""Raised when trying to sign request with invalid key"""
class InvalidCredential(APIError):
"""Raised when trying to sign request with invalid consumer key"""
class InvalidResponse(APIError):
"""Raised when api response is not valid json"""
class InvalidRegion(APIError):
"""Raised when region is not in `REGIONS`."""
class ReadOnlyError(APIError):
"""Raised when attempting to modify readonly data."""
class ResourceNotFoundError(APIError):
"""Raised when requested resource does not exist."""
class BadParametersError(APIError):
"""Raised when request contains bad parameters."""
class ResourceConflictError(APIError):
"""Raised when trying to create an already existing resource."""
class NetworkError(APIError):
"""Raised when there is an error from network layer."""
class NotGrantedCall(APIError):
"""Raised when there is an error from network layer."""
class NotCredential(APIError):
"""Raised when there is an error from network layer."""
class Forbidden(APIError):
"""Raised when there is an error from network layer."""
| {
"content_hash": "0fb2dcec77a678deabd0cd5386d0b240",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 75,
"avg_line_length": 30.866666666666667,
"alnum_prop": 0.7235421166306696,
"repo_name": "VincentCasse/python-ovh",
"id": "53e7b2063a6b6494905cf8ebbc8678ecada55cc8",
"size": "2925",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "ovh/exceptions.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "46395"
}
],
"symlink_target": ""
} |
__author__ = 'Nick Hirakawa'
from invdx import build_data_structures
from rank import score_BM25
import operator
class QueryProcessor:
def __init__(self, queries, corpus):
self.queries = queries
self.index, self.dlt = build_data_structures(corpus)
def run(self):
results = []
for query in self.queries:
results.append(self.run_query(query))
return results
def run_query(self, query):
query_result = dict()
for term in query:
if term in self.index:
doc_dict = self.index[term] # retrieve index entry
for docid, freq in doc_dict.items(): #for each document and its word frequency
score = score_BM25(n=len(doc_dict), f=freq, qf=1, r=0, N=len(self.dlt),
dl=self.dlt.get_length(docid), avdl=self.dlt.get_average_length()) # calculate score
if docid in query_result: #this document has already been scored once
query_result[docid] += score
else:
query_result[docid] = score
return query_result
| {
"content_hash": "2108a7a2e183a1ac6650cc55ce14f6df",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 96,
"avg_line_length": 31.129032258064516,
"alnum_prop": 0.689119170984456,
"repo_name": "nh0815/BM25",
"id": "fd000b47728eb0975f23fe59426e863ecf32443f",
"size": "965",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/query.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "6925"
}
],
"symlink_target": ""
} |
from toee import *
def OnBeginSpellCast( spell ):
print "Acid Fog OnBeginSpellCast"
print "spell.target_list=", spell.target_list
print "spell.caster=", spell.caster, " caster.level= ", spell.caster_level
game.particles( "sp-conjuration-conjure", spell.caster )
def OnSpellEffect( spell ):
print "Acid Fog OnSpellEffect"
spell.duration = 10 * spell.caster_level
# spawn one spell_object object
spell_obj = game.obj_create( OBJECT_SPELL_GENERIC, spell.target_loc, spell.target_loc_off_x, spell.target_loc_off_y )
# add to d20initiative
caster_init_value = spell.caster.get_initiative()
spell_obj.d20_status_init()
spell_obj.set_initiative( caster_init_value )
# put sp-Solid Fog condition on obj
spell_obj_partsys_id = game.particles( 'sp-Solid Fog', spell_obj )
spell_obj.condition_add_with_args( 'sp-Solid Fog', spell.id, spell.duration, 0, spell_obj_partsys_id )
#spell_obj.condition_add_arg_x( 3, spell_obj_partsys_id )
#objectevent_id = spell_obj.condition_get_arg_x( 2 )
# add monster to target list
spell.num_of_targets = 1
spell.target_list[0].obj = spell_obj
def OnBeginRound( spell ):
print "Acid Fog OnBeginRound"
damage_dice = dice_new( '2d6' )
for obj in game.obj_list_vicinity(spell.target_list[0].obj.location,OLC_NPC):
if (spell.target_list[0].obj.distance_to(obj) <= 20):
obj.spell_damage( spell.caster, D20DT_ACID, damage_dice, D20DAP_UNSPECIFIED, D20A_CAST_SPELL, spell.id )
def OnEndSpellCast( spell ):
print "Acid Fog OnEndSpellCast"
def OnAreaOfEffectHit( spell ):
print "Acid Fog OnAreaOfEffectHit"
def OnSpellStruck( spell ):
print "Acid Fog OnSpellStruck" | {
"content_hash": "e876e505f4cf6f716e472116fb30a0fe",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 118,
"avg_line_length": 34.51063829787234,
"alnum_prop": 0.7342786683107275,
"repo_name": "GrognardsFromHell/TemplePlus",
"id": "7d7dd021b1ef2fd5c2bc44bf4861b246c835fe51",
"size": "1622",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tpdatasrc/kotbfixes/scr/Spell000 - Acid Fog.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "683"
},
{
"name": "C",
"bytes": "629718"
},
{
"name": "C#",
"bytes": "167885"
},
{
"name": "C++",
"bytes": "10018792"
},
{
"name": "CMake",
"bytes": "91980"
},
{
"name": "CSS",
"bytes": "1292"
},
{
"name": "HLSL",
"bytes": "18884"
},
{
"name": "HTML",
"bytes": "433942"
},
{
"name": "PowerShell",
"bytes": "5374"
},
{
"name": "Python",
"bytes": "2850350"
}
],
"symlink_target": ""
} |
"""Utility functions for resolving file paths in histograms scripts."""
import os.path
def GetInputFile(src_relative_file_path):
"""Converts a src/-relative file path into a path that can be opened."""
depth = [os.path.dirname(__file__), '..', '..', '..']
path = os.path.join(*(depth + src_relative_file_path.split('/')))
return os.path.abspath(path)
| {
"content_hash": "6725784bee8a2aefcfffbc60c96744f3",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 74,
"avg_line_length": 36.2,
"alnum_prop": 0.6685082872928176,
"repo_name": "nwjs/chromium.src",
"id": "02d04c80137f61a21732338311fe371512780213",
"size": "503",
"binary": false,
"copies": "7",
"ref": "refs/heads/nw70",
"path": "tools/metrics/common/path_util.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
from sqlobject import col
from sqlobject.dbconnection import DBAPI
from sqlobject.dberrors import *
class ErrorMessage(str):
def __new__(cls, e, append_msg=''):
obj = str.__new__(cls, e[1] + append_msg)
obj.code = int(e[0])
obj.module = e.__module__
obj.exception = e.__class__.__name__
return obj
class MySQLConnection(DBAPI):
supportTransactions = False
dbName = 'mysql'
schemes = [dbName]
def __init__(self, db, user, password='', host='localhost', port=0, **kw):
import MySQLdb, MySQLdb.constants.CR, MySQLdb.constants.ER
self.module = MySQLdb
self.host = host
self.port = port
self.db = db
self.user = user
self.password = password
self.kw = {}
for key in ("unix_socket", "init_command",
"read_default_file", "read_default_group", "conv"):
if key in kw:
self.kw[key] = kw.pop(key)
for key in ("connect_timeout", "compress", "named_pipe", "use_unicode",
"client_flag", "local_infile"):
if key in kw:
self.kw[key] = int(kw.pop(key))
for key in ("ssl_key", "ssl_cert", "ssl_ca", "ssl_capath"):
if key in kw:
if "ssl" not in self.kw:
self.kw["ssl"] = {}
self.kw["ssl"][key[4:]] = kw.pop(key)
if "charset" in kw:
self.dbEncoding = self.kw["charset"] = kw.pop("charset")
else:
self.dbEncoding = None
# MySQLdb < 1.2.1: only ascii
# MySQLdb = 1.2.1: only unicode
# MySQLdb > 1.2.1: both ascii and unicode
self.need_unicode = (self.module.version_info[:3] >= (1, 2, 1)) and (self.module.version_info[:3] < (1, 2, 2))
self._server_version = None
self._can_use_microseconds = None
DBAPI.__init__(self, **kw)
@classmethod
def _connectionFromParams(cls, user, password, host, port, path, args):
return cls(db=path.strip('/'), user=user or '', password=password or '',
host=host or 'localhost', port=port or 0, **args)
def makeConnection(self):
dbEncoding = self.dbEncoding
if dbEncoding:
from MySQLdb.connections import Connection
if not hasattr(Connection, 'set_character_set'):
# monkeypatch pre MySQLdb 1.2.1
def character_set_name(self):
return dbEncoding + '_' + dbEncoding
Connection.character_set_name = character_set_name
try:
conn = self.module.connect(host=self.host, port=self.port,
db=self.db, user=self.user, passwd=self.password, **self.kw)
if self.module.version_info[:3] >= (1, 2, 2):
conn.ping(True) # Attempt to reconnect. This setting is persistent.
except self.module.OperationalError, e:
conninfo = "; used connection string: host=%(host)s, port=%(port)s, db=%(db)s, user=%(user)s" % self.__dict__
raise OperationalError(ErrorMessage(e, conninfo))
if hasattr(conn, 'autocommit'):
conn.autocommit(bool(self.autoCommit))
if dbEncoding:
if hasattr(conn, 'set_character_set'): # MySQLdb 1.2.1 and later
conn.set_character_set(dbEncoding)
else: # pre MySQLdb 1.2.1
# works along with monkeypatching code above
conn.query("SET NAMES %s" % dbEncoding)
return conn
def _setAutoCommit(self, conn, auto):
if hasattr(conn, 'autocommit'):
conn.autocommit(auto)
def _executeRetry(self, conn, cursor, query):
if self.need_unicode and not isinstance(query, unicode):
try:
query = unicode(query, self.dbEncoding)
except UnicodeError:
pass
# When a server connection is lost and a query is attempted, most of
# the time the query will raise a SERVER_LOST exception, then at the
# second attempt to execute it, the mysql lib will reconnect and
# succeed. However is a few cases, the first attempt raises the
# SERVER_GONE exception, the second attempt the SERVER_LOST exception
# and only the third succeeds. Thus the 3 in the loop count.
# If it doesn't reconnect even after 3 attempts, while the database is
# up and running, it is because a 5.0.3 (or newer) server is used
# which no longer permits autoreconnects by default. In that case a
# reconnect flag must be set when making the connection to indicate
# that autoreconnecting is desired. In MySQLdb 1.2.2 or newer this is
# done by calling ping(True) on the connection.
for count in range(3):
try:
return cursor.execute(query)
except self.module.OperationalError, e:
if e.args[0] in (self.module.constants.CR.SERVER_GONE_ERROR, self.module.constants.CR.SERVER_LOST):
if count == 2:
raise OperationalError(ErrorMessage(e))
if self.debug:
self.printDebug(conn, str(e), 'ERROR')
else:
raise OperationalError(ErrorMessage(e))
except self.module.IntegrityError, e:
msg = ErrorMessage(e)
if e.args[0] == self.module.constants.ER.DUP_ENTRY:
raise DuplicateEntryError(msg)
else:
raise IntegrityError(msg)
except self.module.InternalError, e:
raise InternalError(ErrorMessage(e))
except self.module.ProgrammingError, e:
raise ProgrammingError(ErrorMessage(e))
except self.module.DataError, e:
raise DataError(ErrorMessage(e))
except self.module.NotSupportedError, e:
raise NotSupportedError(ErrorMessage(e))
except self.module.DatabaseError, e:
raise DatabaseError(ErrorMessage(e))
except self.module.InterfaceError, e:
raise InterfaceError(ErrorMessage(e))
except self.module.Warning, e:
raise Warning(ErrorMessage(e))
except self.module.Error, e:
raise Error(ErrorMessage(e))
def _queryInsertID(self, conn, soInstance, id, names, values):
table = soInstance.sqlmeta.table
idName = soInstance.sqlmeta.idName
c = conn.cursor()
if id is not None:
names = [idName] + names
values = [id] + values
q = self._insertSQL(table, names, values)
if self.debug:
self.printDebug(conn, q, 'QueryIns')
self._executeRetry(conn, c, q)
if id is None:
try:
id = c.lastrowid
except AttributeError:
id = c.insert_id()
if self.debugOutput:
self.printDebug(conn, id, 'QueryIns', 'result')
return id
@classmethod
def _queryAddLimitOffset(cls, query, start, end):
if not start:
return "%s LIMIT %i" % (query, end)
if not end:
return "%s LIMIT %i, -1" % (query, start)
return "%s LIMIT %i, %i" % (query, start, end-start)
def createReferenceConstraint(self, soClass, col):
return col.mysqlCreateReferenceConstraint()
def createColumn(self, soClass, col):
return col.mysqlCreateSQL(self)
def createIndexSQL(self, soClass, index):
return index.mysqlCreateIndexSQL(soClass)
def createIDColumn(self, soClass):
if soClass.sqlmeta.idType == str:
return '%s TEXT PRIMARY KEY' % soClass.sqlmeta.idName
return '%s INT PRIMARY KEY AUTO_INCREMENT' % soClass.sqlmeta.idName
def joinSQLType(self, join):
return 'INT NOT NULL'
def tableExists(self, tableName):
try:
# Use DESCRIBE instead of SHOW TABLES because SHOW TABLES
# assumes there is a default database selected
# which is not always True (for an embedded application, e.g.)
self.query('DESCRIBE %s' % (tableName))
return True
except ProgrammingError, e:
if e[0].code == 1146: # ER_NO_SUCH_TABLE
return False
raise
def addColumn(self, tableName, column):
self.query('ALTER TABLE %s ADD COLUMN %s' %
(tableName,
column.mysqlCreateSQL(self)))
def delColumn(self, sqlmeta, column):
self.query('ALTER TABLE %s DROP COLUMN %s' % (sqlmeta.table, column.dbName))
def columnsFromSchema(self, tableName, soClass):
colData = self.queryAll("SHOW COLUMNS FROM %s"
% tableName)
results = []
for field, t, nullAllowed, key, default, extra in colData:
if field == soClass.sqlmeta.idName:
continue
colClass, kw = self.guessClass(t)
if self.kw.get('use_unicode') and colClass is col.StringCol:
colClass = col.UnicodeCol
if self.dbEncoding: kw['dbEncoding'] = self.dbEncoding
kw['name'] = soClass.sqlmeta.style.dbColumnToPythonAttr(field)
kw['dbName'] = field
# Since MySQL 5.0, 'NO' is returned in the NULL column (SQLObject expected '')
kw['notNone'] = (nullAllowed.upper() != 'YES' and True or False)
if default and t.startswith('int'):
kw['default'] = int(default)
elif default and t.startswith('float'):
kw['default'] = float(default)
elif default == 'CURRENT_TIMESTAMP' and t == 'timestamp':
kw['default'] = None
elif default and colClass is col.BoolCol:
kw['default'] = int(default) and True or False
else:
kw['default'] = default
# @@ skip key...
# @@ skip extra...
results.append(colClass(**kw))
return results
def guessClass(self, t):
if t.startswith('int'):
return col.IntCol, {}
elif t.startswith('enum'):
values = []
for i in t[5:-1].split(','): # take the enum() off and split
values.append(i[1:-1]) # remove the surrounding \'
return col.EnumCol, {'enumValues': values}
elif t.startswith('double'):
return col.FloatCol, {}
elif t.startswith('varchar'):
colType = col.StringCol
if self.kw.get('use_unicode', False):
colType = col.UnicodeCol
if t.endswith('binary'):
return colType, {'length': int(t[8:-8]),
'char_binary': True}
else:
return colType, {'length': int(t[8:-1])}
elif t.startswith('char'):
if t.endswith('binary'):
return col.StringCol, {'length': int(t[5:-8]),
'varchar': False,
'char_binary': True}
else:
return col.StringCol, {'length': int(t[5:-1]),
'varchar': False}
elif t.startswith('datetime'):
return col.DateTimeCol, {}
elif t.startswith('date'):
return col.DateCol, {}
elif t.startswith('time'):
return col.TimeCol, {}
elif t.startswith('timestamp'):
return col.TimestampCol, {}
elif t.startswith('bool'):
return col.BoolCol, {}
elif t.startswith('tinyblob'):
return col.BLOBCol, {"length": 2**8-1}
elif t.startswith('tinytext'):
return col.StringCol, {"length": 2**8-1, "varchar": True}
elif t.startswith('blob'):
return col.BLOBCol, {"length": 2**16-1}
elif t.startswith('text'):
return col.StringCol, {"length": 2**16-1, "varchar": True}
elif t.startswith('mediumblob'):
return col.BLOBCol, {"length": 2**24-1}
elif t.startswith('mediumtext'):
return col.StringCol, {"length": 2**24-1, "varchar": True}
elif t.startswith('longblob'):
return col.BLOBCol, {"length": 2**32}
elif t.startswith('longtext'):
return col.StringCol, {"length": 2**32, "varchar": True}
else:
return col.Col, {}
def listTables(self):
return [v[0] for v in self.queryAll("SHOW TABLES")]
def listDatabases(self):
return [v[0] for v in self.queryAll("SHOW DATABASES")]
def _createOrDropDatabase(self, op="CREATE"):
self.query('%s DATABASE %s' % (op, self.db))
def createEmptyDatabase(self):
self._createOrDropDatabase()
def dropDatabase(self):
self._createOrDropDatabase(op="DROP")
def server_version(self):
if self._server_version is not None:
return self._server_version
try:
server_version = self.queryOne("SELECT VERSION()")[0]
server_version = server_version.split('-', 1)
db_tag = "MySQL"
if len(server_version) == 2:
if "MariaDB" in server_version[1]:
db_tag = "MariaDB"
server_version = server_version[0]
server_version = tuple(int(v) for v in server_version.split('.'))
server_version = (server_version, db_tag)
except:
server_version = None # unknown
self._server_version = server_version
return server_version
def can_use_microseconds(self):
if self._can_use_microseconds is not None:
return self._can_use_microseconds
server_version = self.server_version()
if server_version is None:
return None
server_version, db_tag = server_version
if db_tag == "MariaDB":
can_use_microseconds = (server_version >= (5, 3, 0))
else: # MySQL
can_use_microseconds = (server_version >= (5, 6, 4))
self._can_use_microseconds = can_use_microseconds
return can_use_microseconds
| {
"content_hash": "922f74913153422ed48a00ecec98f548",
"timestamp": "",
"source": "github",
"line_count": 345,
"max_line_length": 121,
"avg_line_length": 41.36231884057971,
"alnum_prop": 0.5538892782060266,
"repo_name": "Kussie/HTPC-Manager",
"id": "f8e92262c380c9825b15f59827caae7169fcfb76",
"size": "14270",
"binary": false,
"copies": "6",
"ref": "refs/heads/master2",
"path": "libs/sqlobject/mysql/mysqlconnection.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "352"
},
{
"name": "CSS",
"bytes": "48317"
},
{
"name": "HTML",
"bytes": "159076"
},
{
"name": "JavaScript",
"bytes": "453145"
},
{
"name": "Python",
"bytes": "4671335"
},
{
"name": "R",
"bytes": "2187"
},
{
"name": "Shell",
"bytes": "3746"
}
],
"symlink_target": ""
} |
from mock import patch
import unittest
import yoda
from click.testing import CliRunner
class TestSuggestRecipes(unittest.TestCase):
"""
Test for the following commands:
| Module: food
| command: suggest_recipes
"""
RANDOM_RECIPE = {
'meals': [{
'strMeal': 'Honey Teriyaki Salmon',
'strInstructions': 'Mix all the ingredients in the Honey Teriyaki Glaze together. Whisk to blend well. Combine the salmon and the Glaze together.\r\n\r\nHeat up a skillet on medium-low heat. Add the oil, Pan-fry the salmon on both sides until it\u2019s completely cooked inside and the glaze thickens.\r\n\r\nGarnish with sesame and serve immediately.',
'strIngredient1': 'Salmon', 'strIngredient2': 'Olive oil',
'strIngredient3': 'Soy Sauce',
'strIngredient4': 'Sake', 'strIngredient5': 'Sesame',
'strMeasure1': '1 lb', 'strMeasure2': '1 tablespoon',
'strMeasure3': '2 tablespoons', 'strMeasure4': '2 tablespoons',
'strMeasure5': '4 tablespoons'
}]
}
def __init__(self, methodName="runTest"):
super(TestSuggestRecipes, self).__init__()
self.runner = CliRunner()
@patch('modules.food.requests')
def runTest(self, requests):
requests.get.json.return_value = self.RANDOM_RECIPE
result = self.runner.invoke(yoda.cli, ["food", "suggest_recipes"], input='random')
self.assertIsNone(result.exception)
result = self.runner.invoke(yoda.cli, ["food", "suggest_recipes"], input='american')
self.assertIsNone(result.exception) | {
"content_hash": "74c206d18fab03d3bc0f37384a7d0ccc",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 365,
"avg_line_length": 43.810810810810814,
"alnum_prop": 0.6452806909315237,
"repo_name": "dude-pa/dude",
"id": "0e88d687f4d3488d7d40d1457b539ed30f78a2a9",
"size": "1621",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/food/test_suggest_recipes.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "49746"
}
],
"symlink_target": ""
} |
"""
django-tinymce4-lite
--------------------
This application provides a rich-text WYSIWYG `TinyMCE 4`_ widget
for Django forms and models.
.. _TinyMCE 4: https://www.tinymce.com/
"""
from __future__ import absolute_import
from .models import HTMLField
from .widgets import TinyMCE, AdminTinyMCE
__all__ = ['HTMLField', 'TinyMCE', 'AdminTinyMCE']
| {
"content_hash": "527d80a9a42b4b773c62f05accee3b27",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 65,
"avg_line_length": 23.466666666666665,
"alnum_prop": 0.6875,
"repo_name": "webtweakers/django-tinymce4-widget",
"id": "b797ec2fa8c3a2f3ebc8d0f1705c20507154b462",
"size": "400",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tinymce/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "165"
},
{
"name": "HTML",
"bytes": "322"
},
{
"name": "JavaScript",
"bytes": "1165"
},
{
"name": "Python",
"bytes": "24087"
}
],
"symlink_target": ""
} |
"""An example tensorflow model."""
import tensorflow as tf
class Model:
def __init__(self):
self.weights = tf.ones((3, 1000))
def __call__(self, features):
images = tf.reshape(features["image"], (-1, 224, 224, 3))
means = tf.reduce_mean(images, axis=(1, 2))
logits = tf.matmul(means, self.weights)
return tf.nn.softmax(logits, axis=-1)
def create():
return Model(), None
| {
"content_hash": "728176a62e05af24fa401e5ed930e9d1",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 61,
"avg_line_length": 22.38888888888889,
"alnum_prop": 0.6277915632754343,
"repo_name": "google-research/robustness_metrics",
"id": "df544244ad02a7751bcbe29f9bd64f50a7c1fcb8",
"size": "1014",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "robustness_metrics/models/uniform_imagenet_tensorflow.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "857586"
},
{
"name": "Python",
"bytes": "557042"
}
],
"symlink_target": ""
} |
import re
import logging
from django.apps import AppConfig
from django.db.models.signals import pre_migrate, post_migrate
from django.db import connection
default_app_config = 'postgresviews.ViewConfig'
logger = logging.getLogger('postgresviews')
class ViewConfig(AppConfig):
name = 'postgresviews'
verbose_name = 'postgresviews'
def ready(self):
from .models import ViewBase, MaterializedViewBase
from django.apps import apps
tables = {
model._meta.db_table: model for model in apps.get_models()
}
for model in ViewBase.view_models:
self.validate_view_model(model, tables)
def validate_view_model(self, view_model, tables):
create_view_sql = view_model.view()
from_models = view_model._view_meta.from_models
missing = []
for table, model in tables.items():
if (re.search(table + "[^_a-zA-Z0-9]", create_view_sql)
and table != view_model._meta.db_table):
_meta = model._meta
if (_meta.label not in from_models
and _meta.db_table not in from_models):
missing.append(model)
if missing:
labels = [model._meta.label for model in missing]
logger.warning(("%s.ViewMeta.from_models might be missing the following models: %s. "
"The correct from_models definition might be:"
"\n from_models = %s") % (
view_model._meta.label,
", ".join(labels),
str(list(sorted(from_models + labels)))))
def create_views():
from .models import ViewBase, MaterializedViewBase, View
from django.apps import apps
with connection.cursor() as cursor:
created = set()
for model in ViewBase.view_models:
model._create_view(cursor, created)
for view_model in MaterializedViewBase.materialized_view_models:
view_model.refresh()
for from_table, view_models in MaterializedViewBase.refresh_triggers.items():
cursor.execute(MaterializedViewBase._create_refresh_table_sql(from_table, view_models))
cursor.execute(MaterializedViewBase._create_constraint_trigger_sql(from_table))
def drop_views():
from .models import ViewBase
with connection.cursor() as cursor:
for model in ViewBase.view_models:
model._drop_view(cursor)
| {
"content_hash": "7e7d5480e8e13f5312dbddc708389103",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 99,
"avg_line_length": 32.78666666666667,
"alnum_prop": 0.6209841398942659,
"repo_name": "Metamako/django-mysqlviews",
"id": "42375efef92cb7c66076ceb28e37d5ce4ad8f32b",
"size": "2459",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "postgresviews/__init__.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "16451"
}
],
"symlink_target": ""
} |
from setuptools import setup, find_packages
packages = find_packages()
desc = open("README.md").read(),
setup(
name='cooperhewitt.roboteyes.atkinson',
namespace_packages=['cooperhewitt', 'cooperhewitt.roboteyes'],
version='0.3',
description='',
author='Cooper Hewitt Smithsonian Design Museum',
url='https://github.com/cooperhewitt/py-cooperhewitt-roboteyes-atkinson',
dependency_links=[
'https://github.com/migurski/atkinson/tarball/master#egg=atk-0.1',
],
install_requires=[
'atk',
'Pillow'
],
packages=packages,
scripts=[],
download_url='https://github.com/cooperhewitt/py-cooperhewitt-roboteyes-atkinson/tarball/master',
license='BSD')
| {
"content_hash": "27b60a4346bd05c35e88ab1c58f96482",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 101,
"avg_line_length": 31.47826086956522,
"alnum_prop": 0.6767955801104972,
"repo_name": "cooperhewitt/py-cooperhewitt-roboteyes-atkinson",
"id": "4de668b586960feeba007c7b422974620c792f59",
"size": "747",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "2837"
}
],
"symlink_target": ""
} |
from flask import current_app, Blueprint, render_template
from lytics.db.models import Expenditure
from lytics.db.queries import QueryConn
main = Blueprint('main', __name__, url_prefix='/')
@main.route("/")
def index():
return render_template('index.jinja2')
| {
"content_hash": "afed2bfdda916a96f37499d932272b99",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 57,
"avg_line_length": 33.5,
"alnum_prop": 0.7276119402985075,
"repo_name": "patrickspencer/lytics",
"id": "cd385cf172d3bc89c0ef064b05cef50fd4432fef",
"size": "268",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lytics/routes.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "46200"
},
{
"name": "HTML",
"bytes": "2569"
},
{
"name": "JavaScript",
"bytes": "1734"
},
{
"name": "Python",
"bytes": "15537"
},
{
"name": "Shell",
"bytes": "167"
}
],
"symlink_target": ""
} |
from ..multivariate import plot
from run import get_matrix_dimensions
import config, os
config.sink_dir = '%s/sink' % os.path.dirname(__file__)
config.axis_x = {
'name' : r'$s$',
'column' : 'config_etah_slack'
}
config.axis_y = {
'name' : r'$\phi_{max}$',
'column' : 'config_phi_max'
}
config.output_nx, config.output_ny = get_matrix_dimensions()
def run():
plot.run() | {
"content_hash": "bc81f8d23db630e0627f6b158214b72b",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 60,
"avg_line_length": 20.63157894736842,
"alnum_prop": 0.625,
"repo_name": "mauzeh/formation-flight",
"id": "c67c23f063785322ad7ad5a4f7f7e6e90a47e440",
"size": "392",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "runs/multihub/s_phi/plot.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "209702"
}
],
"symlink_target": ""
} |
from oslo_log import helpers as log
from gbpservice.neutron.services.grouppolicy import (
group_policy_driver_api as api)
class NoopDriver(api.PolicyDriver):
@log.log_method_call
def initialize(self):
pass
@log.log_method_call
def create_policy_target_precommit(self, context):
pass
@log.log_method_call
def create_policy_target_postcommit(self, context):
pass
@log.log_method_call
def update_policy_target_precommit(self, context):
pass
@log.log_method_call
def update_policy_target_postcommit(self, context):
pass
@log.log_method_call
def delete_policy_target_precommit(self, context):
pass
@log.log_method_call
def delete_policy_target_postcommit(self, context):
pass
@log.log_method_call
def create_policy_target_group_precommit(self, context):
pass
@log.log_method_call
def create_policy_target_group_postcommit(self, context):
pass
@log.log_method_call
def update_policy_target_group_precommit(self, context):
pass
@log.log_method_call
def update_policy_target_group_postcommit(self, context):
pass
@log.log_method_call
def delete_policy_target_group_precommit(self, context):
pass
@log.log_method_call
def delete_policy_target_group_postcommit(self, context):
pass
@log.log_method_call
def create_l2_policy_precommit(self, context):
pass
@log.log_method_call
def create_l2_policy_postcommit(self, context):
pass
@log.log_method_call
def update_l2_policy_precommit(self, context):
pass
@log.log_method_call
def update_l2_policy_postcommit(self, context):
pass
@log.log_method_call
def delete_l2_policy_precommit(self, context):
pass
@log.log_method_call
def delete_l2_policy_postcommit(self, context):
pass
@log.log_method_call
def create_l3_policy_precommit(self, context):
pass
@log.log_method_call
def create_l3_policy_postcommit(self, context):
pass
@log.log_method_call
def update_l3_policy_precommit(self, context):
pass
@log.log_method_call
def update_l3_policy_postcommit(self, context):
pass
@log.log_method_call
def delete_l3_policy_precommit(self, context):
pass
@log.log_method_call
def delete_l3_policy_postcommit(self, context):
pass
@log.log_method_call
def create_network_service_policy_precommit(self, context):
pass
@log.log_method_call
def create_network_service_policy_postcommit(self, context):
pass
@log.log_method_call
def update_network_service_policy_precommit(self, context):
pass
@log.log_method_call
def update_network_service_policy_postcommit(self, context):
pass
@log.log_method_call
def delete_network_service_policy_precommit(self, context):
pass
@log.log_method_call
def delete_network_service_policy_postcommit(self, context):
pass
@log.log_method_call
def create_policy_classifier_precommit(self, context):
pass
@log.log_method_call
def create_policy_classifier_postcommit(self, context):
pass
@log.log_method_call
def update_policy_classifier_precommit(self, context):
pass
@log.log_method_call
def update_policy_classifier_postcommit(self, context):
pass
@log.log_method_call
def delete_policy_classifier_precommit(self, context):
pass
@log.log_method_call
def delete_policy_classifier_postcommit(self, context):
pass
@log.log_method_call
def create_policy_action_precommit(self, context):
pass
@log.log_method_call
def create_policy_action_postcommit(self, context):
pass
@log.log_method_call
def update_policy_action_precommit(self, context):
pass
@log.log_method_call
def update_policy_action_postcommit(self, context):
pass
@log.log_method_call
def delete_policy_action_precommit(self, context):
pass
@log.log_method_call
def delete_policy_action_postcommit(self, context):
pass
@log.log_method_call
def create_policy_rule_precommit(self, context):
pass
@log.log_method_call
def create_policy_rule_postcommit(self, context):
pass
@log.log_method_call
def update_policy_rule_precommit(self, context):
pass
@log.log_method_call
def update_policy_rule_postcommit(self, context):
pass
@log.log_method_call
def delete_policy_rule_precommit(self, context):
pass
@log.log_method_call
def delete_policy_rule_postcommit(self, context):
pass
@log.log_method_call
def create_policy_rule_set_precommit(self, context):
pass
@log.log_method_call
def create_policy_rule_set_postcommit(self, context):
pass
@log.log_method_call
def update_policy_rule_set_precommit(self, context):
pass
@log.log_method_call
def update_policy_rule_set_postcommit(self, context):
pass
@log.log_method_call
def delete_policy_rule_set_precommit(self, context):
pass
@log.log_method_call
def delete_policy_rule_set_postcommit(self, context):
pass
| {
"content_hash": "d9ae58c3a4aab9fadf3dbaa012ed1b52",
"timestamp": "",
"source": "github",
"line_count": 227,
"max_line_length": 64,
"avg_line_length": 23.784140969162994,
"alnum_prop": 0.654936099277644,
"repo_name": "jiahaoliang/group-based-policy",
"id": "6409494e92e57365ef50bf8f5b0349d43f23044f",
"size": "5972",
"binary": false,
"copies": "1",
"ref": "refs/heads/lbaasv2-mitaka-pull-request",
"path": "gbpservice/neutron/services/grouppolicy/drivers/dummy_driver.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "1741199"
},
{
"name": "Shell",
"bytes": "27976"
}
],
"symlink_target": ""
} |
import torch
import torch.nn as nn
try:
from torch.hub import load_state_dict_from_url
except ImportError:
from torch.utils.model_zoo import load_url as load_state_dict_from_url
import torchvision.models.resnet as torch_resnet
from torchvision.models.resnet import BasicBlock, Bottleneck
model_urls = {'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth',
'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth',
'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth',
}
class ResNet(torch_resnet.ResNet):
def __init__(self, *args, **kwargs):
super(ResNet, self).__init__(*args, **kwargs)
def modify(self, remove_layers=[], padding=''):
# Set stride of layer3 and layer 4 to 1 (from 2)
filter_layers = lambda x: [l for l in x if getattr(self, l) is not None]
for layer in filter_layers(['layer3', 'layer4']):
for m in getattr(self, layer).modules():
if isinstance(m, torch.nn.Conv2d):
m.stride = tuple(1 for _ in m.stride)
print('stride', m)
# Set padding (zeros or reflect, doesn't change much;
# zeros requires lower temperature)
if padding != '':
for m in self.modules():
if isinstance(m, torch.nn.Conv2d) and sum(m.padding) > 0:
m.padding_mode = padding
print('padding', m)
# Remove extraneous layers
remove_layers += ['fc', 'avgpool']
for layer in filter_layers(remove_layers):
setattr(self, layer, None)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = x if self.maxpool is None else self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = x if self.layer3 is None else self.layer3(x)
x = x if self.layer4 is None else self.layer4(x)
return x
def _resnet(arch, block, layers, pretrained, progress, **kwargs):
model = ResNet(block, layers, **kwargs)
if pretrained:
state_dict = load_state_dict_from_url(model_urls[arch],
progress=progress)
model.load_state_dict(state_dict)
return model
def resnet18(pretrained=False, progress=True, **kwargs):
return _resnet('resnet18', BasicBlock, [2, 2, 2, 2], pretrained, progress,
**kwargs)
def resnet50(pretrained=False, progress=True, **kwargs) -> ResNet:
return _resnet('resnet50', Bottleneck, [3, 4, 6, 3], pretrained, progress,
**kwargs) | {
"content_hash": "0924a3bcfbf031b0cf5831bd957931a7",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 86,
"avg_line_length": 38.30434782608695,
"alnum_prop": 0.5966704502459327,
"repo_name": "ajabri/videowalk",
"id": "6a5b870ad8917fba686f87229b5440450e898e10",
"size": "2643",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "code/resnet.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "11364"
},
{
"name": "Python",
"bytes": "113887"
}
],
"symlink_target": ""
} |
'''
Nearly all of the functions of this library require key indexing, which means it deals with dictionaries internally. This module deals with loading dictionaries and handles automatically converting from python-style dictionaries to condensed (no excess white space) JSON-style dictionaries.
'''
import sys
import json
import string
class Dictionary(object):
'''
This class represents a JSON-style, key-indexable dictionary of data. It contains the attribute *alldata* and the method *dictload*.
'''
def __init__(self):
self.alldata = None
'''An internal representation of a key-indexable dictionary.'''
def dictload(self, path):
'''
Load a dictionary from a JSON-like text in a text file located at *path* into the attribute *alldata*.
In order for this function to execute successfully, the text file must have the proper formatting, particularly with regard to quotation marks. See :doc:`unittestdict` for an example. Specifically, the function can get rid of excess whitespace, convert ``.x`` to ``0.x`` in decimals, and convert ``None`` to ``null``, but nothing else.
Arguments:
1. *path* -- Path to the text file (e.g. "mydictionary.txt")
Attributes modified:
1. *alldata* -- The entire loaded dictionary.
The function also returns an error if nothing was loaded into *alldata*.
'''
f = open(path, 'r')
ftext = f.read()
assert (ftext and isinstance(ftext, str)), "Input file is empty or could not be read."
# alter for json input, if necessary
loaded = False
try:
self.alldata = json.loads(ftext)
loaded = True
except ValueError:
pass
if not loaded:
try:
ftext = ftext.translate(None, '\t\n ')
ftext = ftext.replace(':', ': ')
ftext = ftext.replace(',', ', ')
ftext = ftext.replace('None', 'null')
ftext = ftext.replace('.', '0.')
self.alldata = json.loads(ftext)
except ValueError:
raise ValueError, "Convert to JSON from input file failed. Check formatting."
f.close()
assert isinstance(self.alldata, dict), "In method dictload, path did not direct to a proper text file."
| {
"content_hash": "6cc5af6cbcc50e5ab18181341c7ee4b5",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 343,
"avg_line_length": 38.888888888888886,
"alnum_prop": 0.6016326530612245,
"repo_name": "ririw/libpgm",
"id": "a3ff7d1a52f5b31d0f31fe2cace01306e2293f07",
"size": "4040",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "libpgm/dictionary.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "184395"
},
{
"name": "TeX",
"bytes": "13255"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from celery.utils.imports import symbol_by_name
ALIASES = {
"processes": "celery.concurrency.processes:TaskPool",
"eventlet": "celery.concurrency.eventlet:TaskPool",
"gevent": "celery.concurrency.gevent:TaskPool",
"threads": "celery.concurrency.threads:TaskPool",
"solo": "celery.concurrency.solo:TaskPool",
}
def get_implementation(cls):
return symbol_by_name(cls, ALIASES)
| {
"content_hash": "cb0f424a03a4b854a6cdeb35dd185e66",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 57,
"avg_line_length": 29.466666666666665,
"alnum_prop": 0.7262443438914027,
"repo_name": "ask/celery",
"id": "f7831fa2fea4c9095bd3de2af6c15746735c1210",
"size": "466",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "celery/concurrency/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "1651"
},
{
"name": "Python",
"bytes": "1369873"
},
{
"name": "Racket",
"bytes": "732"
},
{
"name": "Shell",
"bytes": "38949"
}
],
"symlink_target": ""
} |
import copy
import datetime
import hashlib
import os
import posixpath
import subprocess
import sys
import tempfile
import unittest
import urlparse
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
BUILD_TOOLS_DIR = os.path.dirname(SCRIPT_DIR)
sys.path.append(BUILD_TOOLS_DIR)
import manifest_util
import update_nacl_manifest
from update_nacl_manifest import CANARY_BUNDLE_NAME
HTTPS_BASE_URL = 'https://commondatastorage.googleapis.com' \
'/nativeclient_mirror/nacl/nacl_sdk/'
OS_CR = ('cros',)
OS_M = ('mac',)
OS_ML = ('mac', 'linux')
OS_MW = ('mac', 'win')
OS_MLW = ('mac', 'linux', 'win')
POST_STABLE = 'post_stable'
STABLE = 'stable'
BETA = 'beta'
DEV = 'dev'
CANARY = 'canary'
def GetArchiveUrl(host_os, version):
basename = 'naclsdk_%s.tar.bz2' % (host_os,)
return urlparse.urljoin(HTTPS_BASE_URL, posixpath.join(version, basename))
def MakeGsUrl(rel_path):
return update_nacl_manifest.GS_BUCKET_PATH + rel_path
def GetPathFromGsUrl(url):
assert url.startswith(update_nacl_manifest.GS_BUCKET_PATH)
return url[len(update_nacl_manifest.GS_BUCKET_PATH):]
def GetPathFromHttpsUrl(url):
assert url.startswith(HTTPS_BASE_URL)
return url[len(HTTPS_BASE_URL):]
def MakeArchive(host_os, version):
archive = manifest_util.Archive(host_os)
archive.url = GetArchiveUrl(host_os, version)
# dummy values that won't succeed if we ever use them, but will pass
# validation. :)
archive.checksum = {'sha1': 'foobar'}
archive.size = 1
return archive
def MakeNonPepperBundle(name, with_archives=False):
bundle = manifest_util.Bundle(name)
bundle.version = 1
bundle.revision = 1
bundle.description = 'Dummy bundle'
bundle.recommended = 'yes'
bundle.stability = 'stable'
if with_archives:
for host_os in OS_MLW:
archive = manifest_util.Archive(host_os)
archive.url = 'http://example.com'
archive.checksum = {'sha1': 'blah'}
archive.size = 2
bundle.AddArchive(archive)
return bundle
def MakeBundle(major_version, revision=0, version=None, host_oses=None,
stability='dev'):
assert (version is None or
version.split('.')[0] == 'trunk' or
version.split('.')[0] == str(major_version))
if stability == CANARY:
bundle_name = CANARY_BUNDLE_NAME
else:
bundle_name = 'pepper_' + str(major_version)
bundle = manifest_util.Bundle(bundle_name)
bundle.version = major_version
bundle.revision = revision
bundle.description = 'Chrome %s bundle, revision %s' % (major_version,
revision)
bundle.repath = 'pepper_' + str(major_version)
bundle.recommended = 'no'
bundle.stability = stability
if host_oses:
for host_os in host_oses:
bundle.AddArchive(MakeArchive(host_os, version))
return bundle
class MakeManifest(manifest_util.SDKManifest):
def __init__(self, *args):
manifest_util.SDKManifest.__init__(self)
for bundle in args:
self.AddBundle(bundle)
def AddBundle(self, bundle):
self.MergeBundle(bundle, allow_existing=False)
class MakeHistory(object):
def __init__(self):
# used for a dummy timestamp
self.datetime = datetime.datetime.utcnow()
self.history = []
def Add(self, host_oses, channel, version):
for host_os in host_oses:
timestamp = self.datetime.strftime('%Y-%m-%d %H:%M:%S.%f')
self.history.append((host_os, channel, version, timestamp))
self.datetime += datetime.timedelta(0, -3600) # one hour earlier
self.datetime += datetime.timedelta(-1) # one day earlier
class MakeFiles(dict):
def Add(self, bundle, add_archive_for_os=OS_MLW, add_json_for_os=OS_MLW):
for archive in bundle.GetArchives():
if not archive.host_os in add_archive_for_os:
continue
# add a dummy file for each archive
path = GetPathFromHttpsUrl(archive.url)
self[path] = 'My Dummy Archive'
if archive.host_os in add_json_for_os:
# add .json manifest snippet, it should look like a normal Bundle, but
# only has one archive.
new_bundle = manifest_util.Bundle('')
new_bundle.CopyFrom(bundle)
del new_bundle.archives[:]
new_bundle.AddArchive(archive)
self[path + '.json'] = new_bundle.GetDataAsString()
class TestDelegate(update_nacl_manifest.Delegate):
def __init__(self, manifest, history, files, version_mapping):
self.manifest = manifest
self.history = history
self.files = files
self.version_mapping = version_mapping
self.dryrun = 0
def GetRepoManifest(self):
return self.manifest
def GetHistory(self):
return self.history
def GetTrunkRevision(self, version):
return self.version_mapping[version]
def GsUtil_ls(self, url):
path = GetPathFromGsUrl(url)
result = []
for filename, _ in self.files.iteritems():
if filename.startswith(path):
result.append(MakeGsUrl(filename))
return result
def GsUtil_cat(self, url):
path = GetPathFromGsUrl(url)
if path not in self.files:
raise subprocess.CalledProcessError(1, 'gsutil cat %s' % (url,))
return self.files[path]
def GsUtil_cp(self, src, dest, stdin=None):
dest_path = GetPathFromGsUrl(dest)
if src == '-':
self.files[dest_path] = stdin
else:
src_path = GetPathFromGsUrl(src)
if src_path not in self.files:
raise subprocess.CalledProcessError(1, 'gsutil cp %s %s' % (src, dest))
self.files[dest_path] = self.files[src_path]
def Print(self, *args):
# eat all informational messages
pass
# Shorthand for premade bundles/versions
V18_0_1025_163 = '18.0.1025.163'
V18_0_1025_175 = '18.0.1025.175'
V18_0_1025_184 = '18.0.1025.184'
V19_0_1084_41 = '19.0.1084.41'
V19_0_1084_67 = '19.0.1084.67'
V21_0_1145_0 = '21.0.1145.0'
V21_0_1166_0 = '21.0.1166.0'
VTRUNK_138079 = 'trunk.138079'
B18_0_1025_163_R1_MLW = MakeBundle(18, 1, V18_0_1025_163, OS_MLW)
B18_0_1025_184_R1_MLW = MakeBundle(18, 1, V18_0_1025_184, OS_MLW)
B18_R1_NONE = MakeBundle(18)
B19_0_1084_41_R1_MLW = MakeBundle(19, 1, V19_0_1084_41, OS_MLW)
B19_0_1084_67_R1_MLW = MakeBundle(19, 1, V19_0_1084_67, OS_MLW)
B19_R1_NONE = MakeBundle(19)
BCANARY_R1_NONE = MakeBundle(0, stability=CANARY)
B21_0_1145_0_R1_MLW = MakeBundle(21, 1, V21_0_1145_0, OS_MLW)
B21_0_1166_0_R1_MW = MakeBundle(21, 1, V21_0_1166_0, OS_MW)
BTRUNK_138079_R1_MLW = MakeBundle(21, 1, VTRUNK_138079, OS_MLW)
NON_PEPPER_BUNDLE_NOARCHIVES = MakeNonPepperBundle('foo')
NON_PEPPER_BUNDLE_ARCHIVES = MakeNonPepperBundle('bar', with_archives=True)
class TestUpdateManifest(unittest.TestCase):
def setUp(self):
self.history = MakeHistory()
self.files = MakeFiles()
self.version_mapping = {}
self.delegate = None
self.uploaded_manifest = None
self.manifest = None
def _MakeDelegate(self):
self.delegate = TestDelegate(self.manifest, self.history.history,
self.files, self.version_mapping)
def _Run(self, host_oses):
update_nacl_manifest.Run(self.delegate, host_oses)
def _HasUploadedManifest(self):
return 'naclsdk_manifest2.json' in self.files
def _ReadUploadedManifest(self):
self.uploaded_manifest = manifest_util.SDKManifest()
self.uploaded_manifest.LoadDataFromString(
self.files['naclsdk_manifest2.json'])
def _AssertUploadedManifestHasBundle(self, bundle, stability):
if stability == CANARY:
bundle_name = CANARY_BUNDLE_NAME
else:
bundle_name = bundle.name
uploaded_manifest_bundle = self.uploaded_manifest.GetBundle(bundle_name)
# Bundles that we create in the test (and in the manifest snippets) have
# their stability set to "dev". update_nacl_manifest correctly updates it.
# So we have to force the stability of |bundle| so they compare equal.
test_bundle = copy.copy(bundle)
test_bundle.stability = stability
if stability == CANARY:
test_bundle.name = CANARY_BUNDLE_NAME
self.assertEqual(uploaded_manifest_bundle, test_bundle)
def _AddCsvHistory(self, history):
import csv
import cStringIO
history_stream = cStringIO.StringIO(history)
self.history.history = [(platform, channel, version, date)
for platform, channel, version, date in csv.reader(history_stream)]
def testNoUpdateNeeded(self):
self.manifest = MakeManifest(B18_0_1025_163_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self.assertEqual(self._HasUploadedManifest(), False)
# Add another bundle, make sure it still doesn't update
self.manifest.AddBundle(B19_0_1084_41_R1_MLW)
self._Run(OS_MLW)
self.assertEqual(self._HasUploadedManifest(), False)
def testSimpleUpdate(self):
self.manifest = MakeManifest(B18_R1_NONE)
self.history.Add(OS_MLW, BETA, V18_0_1025_163)
self.files.Add(B18_0_1025_163_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(B18_0_1025_163_R1_MLW, BETA)
self.assertEqual(len(self.uploaded_manifest.GetBundles()), 1)
def testOnePlatformHasNewerRelease(self):
self.manifest = MakeManifest(B18_R1_NONE)
self.history.Add(OS_M, BETA, V18_0_1025_175) # Mac has newer version
self.history.Add(OS_MLW, BETA, V18_0_1025_163)
self.files.Add(B18_0_1025_163_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(B18_0_1025_163_R1_MLW, BETA)
self.assertEqual(len(self.uploaded_manifest.GetBundles()), 1)
def testMultipleMissingPlatformsInHistory(self):
self.manifest = MakeManifest(B18_R1_NONE)
self.history.Add(OS_ML, BETA, V18_0_1025_184)
self.history.Add(OS_M, BETA, V18_0_1025_175)
self.history.Add(OS_MLW, BETA, V18_0_1025_163)
self.files.Add(B18_0_1025_163_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(B18_0_1025_163_R1_MLW, BETA)
self.assertEqual(len(self.uploaded_manifest.GetBundles()), 1)
def testUpdateOnlyOneBundle(self):
self.manifest = MakeManifest(B18_R1_NONE, B19_0_1084_41_R1_MLW)
self.history.Add(OS_MLW, BETA, V18_0_1025_163)
self.files.Add(B18_0_1025_163_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(B18_0_1025_163_R1_MLW, BETA)
self._AssertUploadedManifestHasBundle(B19_0_1084_41_R1_MLW, DEV)
self.assertEqual(len(self.uploaded_manifest.GetBundles()), 2)
def testUpdateTwoBundles(self):
self.manifest = MakeManifest(B18_R1_NONE, B19_R1_NONE)
self.history.Add(OS_MLW, DEV, V19_0_1084_41)
self.history.Add(OS_MLW, BETA, V18_0_1025_163)
self.files.Add(B18_0_1025_163_R1_MLW)
self.files.Add(B19_0_1084_41_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(B18_0_1025_163_R1_MLW, BETA)
self._AssertUploadedManifestHasBundle(B19_0_1084_41_R1_MLW, DEV)
self.assertEqual(len(self.uploaded_manifest.GetBundles()), 2)
def testUpdateWithMissingPlatformsInArchives(self):
self.manifest = MakeManifest(B18_R1_NONE)
self.history.Add(OS_MLW, BETA, V18_0_1025_184)
self.history.Add(OS_MLW, BETA, V18_0_1025_163)
self.files.Add(B18_0_1025_184_R1_MLW, add_archive_for_os=OS_M)
self.files.Add(B18_0_1025_163_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(B18_0_1025_163_R1_MLW, BETA)
self.assertEqual(len(self.uploaded_manifest.GetBundles()), 1)
def testUpdateWithMissingManifestSnippets(self):
self.manifest = MakeManifest(B18_R1_NONE)
self.history.Add(OS_MLW, BETA, V18_0_1025_184)
self.history.Add(OS_MLW, BETA, V18_0_1025_163)
self.files.Add(B18_0_1025_184_R1_MLW, add_json_for_os=OS_ML)
self.files.Add(B18_0_1025_163_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(B18_0_1025_163_R1_MLW, BETA)
self.assertEqual(len(self.uploaded_manifest.GetBundles()), 1)
def testRecommendedIsStable(self):
for channel in STABLE, BETA, DEV, CANARY:
self.setUp()
bundle = copy.deepcopy(B18_R1_NONE)
self.manifest = MakeManifest(bundle)
self.history.Add(OS_MLW, channel, V18_0_1025_163)
self.files.Add(B18_0_1025_163_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self.assertEqual(len(self.uploaded_manifest.GetBundles()), 1)
uploaded_bundle = self.uploaded_manifest.GetBundle('pepper_18')
if channel == STABLE:
self.assertEqual(uploaded_bundle.recommended, 'yes')
else:
self.assertEqual(uploaded_bundle.recommended, 'no')
def testNoUpdateWithNonPepperBundle(self):
self.manifest = MakeManifest(NON_PEPPER_BUNDLE_NOARCHIVES,
B18_0_1025_163_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self.assertEqual(self._HasUploadedManifest(), False)
def testUpdateWithHistoryWithExtraneousPlatforms(self):
self.manifest = MakeManifest(B18_R1_NONE)
self.history.Add(OS_ML, BETA, V18_0_1025_184)
self.history.Add(OS_CR, BETA, V18_0_1025_184)
self.history.Add(OS_CR, BETA, V18_0_1025_175)
self.history.Add(OS_MLW, BETA, V18_0_1025_163)
self.files.Add(B18_0_1025_163_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(B18_0_1025_163_R1_MLW, BETA)
self.assertEqual(len(self.uploaded_manifest.GetBundles()), 1)
def testSnippetWithStringRevisionAndVersion(self):
# This test exists because some manifest snippets were uploaded with
# strings for their revisions and versions. I want to make sure the
# resulting manifest is still consistent with the old format.
self.manifest = MakeManifest(B18_R1_NONE)
self.history.Add(OS_MLW, BETA, V18_0_1025_163)
bundle_string_revision = MakeBundle('18', '1234', V18_0_1025_163, OS_MLW)
self.files.Add(bundle_string_revision)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
uploaded_bundle = self.uploaded_manifest.GetBundle(
bundle_string_revision.name)
self.assertEqual(uploaded_bundle.revision, 1234)
self.assertEqual(uploaded_bundle.version, 18)
def testUpdateCanary(self):
# Note that the bundle in naclsdk_manifest2.json will be called
# CANARY_BUNDLE_NAME, whereas the bundle in the manifest "snippet" will be
# called "pepper_21".
canary_bundle = copy.deepcopy(BCANARY_R1_NONE)
self.manifest = MakeManifest(canary_bundle)
self.history.Add(OS_MW, CANARY, V21_0_1145_0)
self.files.Add(B21_0_1145_0_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(B21_0_1145_0_R1_MLW, CANARY)
def testUpdateCanaryUseTrunkArchives(self):
canary_bundle = copy.deepcopy(BCANARY_R1_NONE)
self.manifest = MakeManifest(canary_bundle)
self.history.Add(OS_MW, CANARY, V21_0_1166_0)
self.files.Add(B21_0_1166_0_R1_MW)
self.files.Add(BTRUNK_138079_R1_MLW)
self.version_mapping[V21_0_1166_0] = VTRUNK_138079
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
test_bundle = copy.deepcopy(B21_0_1166_0_R1_MW)
test_bundle.AddArchive(BTRUNK_138079_R1_MLW.GetArchive('linux'))
self._AssertUploadedManifestHasBundle(test_bundle, CANARY)
def testCanaryUseOnlyTrunkArchives(self):
self.manifest = MakeManifest(copy.deepcopy(BCANARY_R1_NONE))
history = """win,canary,21.0.1163.0,2012-06-04 12:35:44.784446
mac,canary,21.0.1163.0,2012-06-04 11:54:09.433166"""
self._AddCsvHistory(history)
self.version_mapping['21.0.1163.0'] = 'trunk.140240'
my_bundle = MakeBundle(21, 140240, '21.0.1163.0', OS_MLW)
self.files.Add(my_bundle)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(my_bundle, CANARY)
def testCanaryShouldOnlyUseCanaryVersions(self):
canary_bundle = copy.deepcopy(BCANARY_R1_NONE)
self.manifest = MakeManifest(canary_bundle)
self.history.Add(OS_MW, CANARY, V21_0_1166_0)
self.history.Add(OS_MW, BETA, V19_0_1084_41)
self.files.Add(B19_0_1084_41_R1_MLW)
self.version_mapping[V21_0_1166_0] = VTRUNK_138079
self._MakeDelegate()
self.assertRaises(Exception, self._Run, OS_MLW)
def testMissingCanaryFollowedByStableShouldWork(self):
history = """win,canary,21.0.1160.0,2012-06-01 19:44:35.936109
mac,canary,21.0.1160.0,2012-06-01 18:20:02.003123
mac,stable,19.0.1084.52,2012-06-01 17:59:21.559710
win,canary,21.0.1159.2,2012-06-01 02:31:43.877688
mac,stable,19.0.1084.53,2012-06-01 01:39:57.549149
win,canary,21.0.1158.0,2012-05-31 20:16:55.615236
win,canary,21.0.1157.0,2012-05-31 17:41:29.516013
mac,canary,21.0.1158.0,2012-05-31 17:41:27.591354
mac,beta,20.0.1132.21,2012-05-30 23:45:38.535586
linux,beta,20.0.1132.21,2012-05-30 23:45:37.025015
cf,beta,20.0.1132.21,2012-05-30 23:45:36.767529
win,beta,20.0.1132.21,2012-05-30 23:44:56.675123
win,canary,21.0.1156.1,2012-05-30 22:28:01.872056
mac,canary,21.0.1156.1,2012-05-30 21:20:29.920390
win,canary,21.0.1156.0,2012-05-30 12:46:48.046627
mac,canary,21.0.1156.0,2012-05-30 12:14:21.305090"""
self.manifest = MakeManifest(copy.deepcopy(BCANARY_R1_NONE))
self._AddCsvHistory(history)
self.version_mapping = {
'21.0.1160.0': 'trunk.139984',
'21.0.1159.2': 'trunk.139890',
'21.0.1158.0': 'trunk.139740',
'21.0.1157.0': 'unknown',
'21.0.1156.1': 'trunk.139576',
'21.0.1156.0': 'trunk.139984'}
self.files.Add(MakeBundle(21, 139890, '21.0.1159.2', OS_MLW))
self.files.Add(MakeBundle(21, 0, '21.0.1157.1', ('linux', 'win')))
my_bundle = MakeBundle(21, 139576, '21.0.1156.1', OS_MLW)
self.files.Add(my_bundle)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(my_bundle, CANARY)
def testExtensionWorksAsBz2(self):
# Allow old bundles with just .bz2 extension to work
self.manifest = MakeManifest(B18_R1_NONE)
self.history.Add(OS_MLW, BETA, V18_0_1025_163)
bundle = copy.deepcopy(B18_0_1025_163_R1_MLW)
archive_url = bundle.GetArchive('mac').url
bundle.GetArchive('mac').url = archive_url.replace('.tar', '')
self.files.Add(bundle)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
self._AssertUploadedManifestHasBundle(bundle, BETA)
self.assertEqual(len(self.uploaded_manifest.GetBundles()), 1)
def testOnlyOneStableBundle(self):
self.manifest = MakeManifest(B18_R1_NONE, B19_R1_NONE)
self.history.Add(OS_MLW, STABLE, V18_0_1025_163)
self.history.Add(OS_MLW, STABLE, V19_0_1084_41)
self.files.Add(B18_0_1025_163_R1_MLW)
self.files.Add(B19_0_1084_41_R1_MLW)
self._MakeDelegate()
self._Run(OS_MLW)
self._ReadUploadedManifest()
p18_bundle = self.uploaded_manifest.GetBundle(B18_R1_NONE.name)
self.assertEqual(p18_bundle.stability, POST_STABLE)
p19_bundle = self.uploaded_manifest.GetBundle(B19_R1_NONE.name)
self.assertEqual(p19_bundle.stability, STABLE)
class TestUpdateVitals(unittest.TestCase):
def setUp(self):
f = tempfile.NamedTemporaryFile('w', prefix="test_update_nacl_manifest")
self.test_file = f.name
f.close()
test_data = "Some test data"
self.sha1 = hashlib.sha1(test_data).hexdigest()
self.data_len = len(test_data)
with open(self.test_file, 'w') as f:
f.write(test_data)
def tearDown(self):
os.remove(self.test_file)
def testUpdateVitals(self):
archive = manifest_util.Archive(manifest_util.GetHostOS())
path = os.path.abspath(self.test_file)
if sys.platform == 'win32':
# On Windows, the path must start with three slashes, i.e.
# (file:///C:\whatever)
path = '/' + path
archive.url = 'file://' + path
bundle = MakeBundle(18)
bundle.AddArchive(archive)
manifest = MakeManifest(bundle)
archive = manifest.GetBundles()[0]['archives'][0]
self.assertTrue('size' not in archive)
self.assertTrue('checksum' not in archive)
self.assertRaises(manifest_util.Error, manifest.Validate)
manifest.Validate(add_missing_info=True)
self.assertEqual(archive['size'], self.data_len)
self.assertEqual(archive['checksum']['sha1'], self.sha1)
class TestRealDelegate(unittest.TestCase):
def setUp(self):
self.delegate = update_nacl_manifest.RealDelegate()
def testGetTrunkRevision(self):
revision_dict = {
'21.0.1180.80': '151582',
'23.0.1271.89': '167132',
'24.0.1305.4': '164971',
}
for version, revision in revision_dict.iteritems():
self.assertEqual('trunk.%s' % revision,
self.delegate.GetTrunkRevision(version))
if __name__ == '__main__':
sys.exit(unittest.main())
| {
"content_hash": "c25b52490aed4c5fa8dd812ff1eb66be",
"timestamp": "",
"source": "github",
"line_count": 583,
"max_line_length": 79,
"avg_line_length": 35.82161234991424,
"alnum_prop": 0.694167783949435,
"repo_name": "leighpauls/k2cro4",
"id": "e69f30756dcf61e22e81a67e14b8a305b4f9237c",
"size": "21073",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "native_client_sdk/src/build_tools/tests/test_update_nacl_manifest.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "3062"
},
{
"name": "AppleScript",
"bytes": "25392"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "68131038"
},
{
"name": "C",
"bytes": "242794338"
},
{
"name": "C#",
"bytes": "11024"
},
{
"name": "C++",
"bytes": "353525184"
},
{
"name": "Common Lisp",
"bytes": "3721"
},
{
"name": "D",
"bytes": "1931"
},
{
"name": "Emacs Lisp",
"bytes": "1639"
},
{
"name": "F#",
"bytes": "4992"
},
{
"name": "FORTRAN",
"bytes": "10404"
},
{
"name": "Java",
"bytes": "3845159"
},
{
"name": "JavaScript",
"bytes": "39146656"
},
{
"name": "Lua",
"bytes": "13768"
},
{
"name": "Matlab",
"bytes": "22373"
},
{
"name": "Objective-C",
"bytes": "21887598"
},
{
"name": "PHP",
"bytes": "2344144"
},
{
"name": "Perl",
"bytes": "49033099"
},
{
"name": "Prolog",
"bytes": "2926122"
},
{
"name": "Python",
"bytes": "39863959"
},
{
"name": "R",
"bytes": "262"
},
{
"name": "Racket",
"bytes": "359"
},
{
"name": "Ruby",
"bytes": "304063"
},
{
"name": "Scheme",
"bytes": "14853"
},
{
"name": "Shell",
"bytes": "9195117"
},
{
"name": "Tcl",
"bytes": "1919771"
},
{
"name": "Verilog",
"bytes": "3092"
},
{
"name": "Visual Basic",
"bytes": "1430"
},
{
"name": "eC",
"bytes": "5079"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from nltk.compat import PY3
from nltk.corpus import teardown_module
def setup_module(module):
from nose import SkipTest
raise SkipTest("portuguese_en.doctest imports nltk.examples.pt which doesn't exist!")
if not PY3:
raise SkipTest(
"portuguese_en.doctest was skipped because non-ascii doctests are not supported under Python 2.x"
)
| {
"content_hash": "1fe92c2d9f11356c31091f38c7632f1f",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 109,
"avg_line_length": 27.866666666666667,
"alnum_prop": 0.715311004784689,
"repo_name": "Edu-Glez/Bank_sentiment_analysis",
"id": "a33953b5bb73e061cbf73b7cb0b32d85e028da88",
"size": "442",
"binary": false,
"copies": "24",
"ref": "refs/heads/master",
"path": "env/lib/python3.6/site-packages/nltk/test/portuguese_en_fixt.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Lex",
"bytes": "101463"
},
{
"name": "Python",
"bytes": "29876"
},
{
"name": "Shell",
"bytes": "1509"
}
],
"symlink_target": ""
} |
from fairseq.models import FairseqEncoder
class CompositeEncoder(FairseqEncoder):
"""
A wrapper around a dictionary of :class:`FairseqEncoder` objects.
We run forward on each encoder and return a dictionary of outputs. The first
encoder's dictionary is used for initialization.
Args:
encoders (dict): a dictionary of :class:`FairseqEncoder` objects.
"""
def __init__(self, encoders):
super().__init__(next(iter(encoders.values())).dictionary)
self.encoders = encoders
for key in self.encoders:
self.add_module(key, self.encoders[key])
def forward(self, src_tokens, src_lengths):
"""
Args:
src_tokens (LongTensor): tokens in the source language of shape
`(batch, src_len)`
src_lengths (LongTensor): lengths of each source sentence of shape
`(batch)`
Returns:
dict:
the outputs from each Encoder
"""
encoder_out = {}
for key in self.encoders:
encoder_out[key] = self.encoders[key](src_tokens, src_lengths)
return encoder_out
def reorder_encoder_out(self, encoder_out, new_order):
"""Reorder encoder output according to new_order."""
for key in self.encoders:
encoder_out[key] = self.encoders[key].reorder_encoder_out(encoder_out[key], new_order)
return encoder_out
def max_positions(self):
return min(self.encoders[key].max_positions() for key in self.encoders)
def upgrade_state_dict(self, state_dict):
for key in self.encoders:
self.encoders[key].upgrade_state_dict(state_dict)
return state_dict
| {
"content_hash": "904a95d7965dc62da1a78ebd6f81c814",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 98,
"avg_line_length": 34.36,
"alnum_prop": 0.6204889406286379,
"repo_name": "hfp/libxsmm",
"id": "afef248cdcc5e657350ce37c8ba434bc01d70558",
"size": "1896",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "samples/deeplearning/sparse_training/fairseq/fairseq/models/composite_encoder.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "3115"
},
{
"name": "C",
"bytes": "8335143"
},
{
"name": "C++",
"bytes": "84416"
},
{
"name": "CSS",
"bytes": "242"
},
{
"name": "Fortran",
"bytes": "102021"
},
{
"name": "HTML",
"bytes": "390"
},
{
"name": "JavaScript",
"bytes": "1062"
},
{
"name": "Makefile",
"bytes": "158870"
},
{
"name": "Python",
"bytes": "36612"
},
{
"name": "Shell",
"bytes": "84205"
},
{
"name": "Starlark",
"bytes": "882"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hkm', '0020_translatable_page_content'),
]
operations = [
migrations.AddField(
model_name='feedback',
name='sent_from',
field=models.CharField(blank=True, max_length=500, null=True, verbose_name='Sent from'),
),
]
| {
"content_hash": "b9765518c4d7bef1cebddb918d84db17",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 100,
"avg_line_length": 24.11111111111111,
"alnum_prop": 0.6105990783410138,
"repo_name": "andersinno/kuvaselaamo",
"id": "fa84e0b2e8ea7a64c6a28f4ea1df7f70f38d975b",
"size": "507",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hkm/migrations/0021_page_ref.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "247603"
},
{
"name": "HTML",
"bytes": "103994"
},
{
"name": "JavaScript",
"bytes": "449098"
},
{
"name": "Python",
"bytes": "263106"
}
],
"symlink_target": ""
} |
"""
Module containing the SmartCommands.
SmartCommands are commands (inherited from the Command class)
but they handle features to help parsing, through the "params"
class attribute. See the class documentation for more information.
"""
from commands.command import Command
from commands.parameters.pipe import ParamPipe
class SmartCommand(Command):
"""Base class for a SmartCommand.
A SmartCommand is an evennia Command with additional
features. Its main purpose is to help parsing of parameters
using an extended definition of expected arguments. For
instance, the 'pk' command, to set the player-kill flag,
could define a single parameter: either 'on' or 'off'.
More complex commands, like 'get', could require additional
parameters: first (optionally) a number, then an object
name, then, optionally, the FROM keyword and a different
object name. This would result in more complex parameters
that the SmartCommand will more easily interpret.
"""
params = None
def __init__(self, **kwargs):
Command.__init__(self, **kwargs)
if type(self).params is None:
type(self).params = ParamPipe()
self.setup()
def setup(self):
"""Create the parameters using methods of self.params.
(See the 'commands.parameters.pipe.ParamPipe for details.)
"""
pass
def func(self):
"""Actual function, parsing of parameters.
DO NOT OVERRIDE this function in an inherited class.
This method has two purposes:
1. To parse the parameters and display an error message if needed.
2. To call the 'execute' method where the command mechanism is.
Therefore, you should override the 'execute' method,
not the 'func' one.
The parameters are parsed from self.args. The result
is then stored in attributes of 'self'. For instance:
class Roll(SmartCommand):
'''Roll a dice X times.
Usage:
roll [number]
Roll a dice 1 or more times. You can specify the
number as an argument. If no argument is specified,
roll the dice only once.
'''
key = "roll"
def setup(self):
self.params.add("number", default=1)
def execute(self):
msg = "You roll a dice {} times.".format(self.number)
self.caller.msg(msg)
In the MUD client:
> roll 5
You roll the dice 5 times.
> roll
You roll the dice 1 times.
> roll ok
This is not a valid number.
"""
try:
self.params.parse(self)
except ValueError, err:
self.caller.msg("|r{}|n".format(err))
else:
self.execute()
def execute(self):
"""Execute the actual command.
This method is called after parsing of parameters
(if it occurred without error). You can (and should)
override this method in command classes.
"""
pass
| {
"content_hash": "584dbdc59146a7da085000f7956664c9",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 75,
"avg_line_length": 31.225490196078432,
"alnum_prop": 0.5990580847723704,
"repo_name": "vlegoff/mud",
"id": "d8f2e14fe7ee03fc49e9f7015e2efe3afdbd5ac5",
"size": "3185",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "commands/smart.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "98485"
}
],
"symlink_target": ""
} |
import unittest
import os
import mock
from gzip import GzipFile
from shutil import rmtree
import cPickle as pickle
import time
import tempfile
from contextlib import contextmanager, closing
from errno import ENOENT, ENOTEMPTY, ENOTDIR
from eventlet.green import subprocess
from eventlet import Timeout, tpool
from test.unit import debug_logger, patch_policies
from swift.common import utils
from swift.common.utils import hash_path, mkdirs, normalize_timestamp, \
storage_directory
from swift.common import ring
from swift.obj import diskfile, replicator as object_replicator
from swift.common.storage_policy import StoragePolicy, POLICIES
def _ips():
return ['127.0.0.0']
object_replicator.whataremyips = _ips
def mock_http_connect(status):
class FakeConn(object):
def __init__(self, status, *args, **kwargs):
self.status = status
self.reason = 'Fake'
self.host = args[0]
self.port = args[1]
self.method = args[4]
self.path = args[5]
self.with_exc = False
self.headers = kwargs.get('headers', {})
def getresponse(self):
if self.with_exc:
raise Exception('test')
return self
def getheader(self, header):
return self.headers[header]
def read(self, amt=None):
return pickle.dumps({})
def close(self):
return
return lambda *args, **kwargs: FakeConn(status, *args, **kwargs)
process_errors = []
class MockProcess(object):
ret_code = None
ret_log = None
check_args = None
class Stream(object):
def read(self):
return MockProcess.ret_log.next()
def __init__(self, *args, **kwargs):
targs = MockProcess.check_args.next()
for targ in targs:
# Allow more than 2 candidate targs
# (e.g. a case that either node is fine when nodes shuffled)
if isinstance(targ, tuple):
allowed = False
for target in targ:
if target in args[0]:
allowed = True
if not allowed:
process_errors.append("Invalid: %s not in %s" % (targ,
args))
else:
if targ not in args[0]:
process_errors.append("Invalid: %s not in %s" % (targ,
args))
self.stdout = self.Stream()
def wait(self):
return self.ret_code.next()
@contextmanager
def _mock_process(ret):
orig_process = subprocess.Popen
MockProcess.ret_code = (i[0] for i in ret)
MockProcess.ret_log = (i[1] for i in ret)
MockProcess.check_args = (i[2] for i in ret)
object_replicator.subprocess.Popen = MockProcess
yield
object_replicator.subprocess.Popen = orig_process
def _create_test_rings(path, devs=None):
testgz = os.path.join(path, 'object.ring.gz')
intended_replica2part2dev_id = [
[0, 1, 2, 3, 4, 5, 6],
[1, 2, 3, 0, 5, 6, 4],
[2, 3, 0, 1, 6, 4, 5],
]
intended_devs = devs or [
{'id': 0, 'device': 'sda', 'zone': 0,
'region': 1, 'ip': '127.0.0.0', 'port': 6000},
{'id': 1, 'device': 'sda', 'zone': 1,
'region': 2, 'ip': '127.0.0.1', 'port': 6000},
{'id': 2, 'device': 'sda', 'zone': 2,
'region': 1, 'ip': '127.0.0.2', 'port': 6000},
{'id': 3, 'device': 'sda', 'zone': 4,
'region': 2, 'ip': '127.0.0.3', 'port': 6000},
{'id': 4, 'device': 'sda', 'zone': 5,
'region': 1, 'ip': '127.0.0.4', 'port': 6000},
{'id': 5, 'device': 'sda', 'zone': 6,
'region': 2, 'ip': 'fe80::202:b3ff:fe1e:8329', 'port': 6000},
{'id': 6, 'device': 'sda', 'zone': 7, 'region': 1,
'ip': '2001:0db8:85a3:0000:0000:8a2e:0370:7334', 'port': 6000},
]
intended_part_shift = 30
with closing(GzipFile(testgz, 'wb')) as f:
pickle.dump(
ring.RingData(intended_replica2part2dev_id,
intended_devs, intended_part_shift),
f)
testgz = os.path.join(path, 'object-1.ring.gz')
with closing(GzipFile(testgz, 'wb')) as f:
pickle.dump(
ring.RingData(intended_replica2part2dev_id,
intended_devs, intended_part_shift),
f)
for policy in POLICIES:
policy.object_ring = None # force reload
return
@patch_policies([StoragePolicy(0, 'zero', False),
StoragePolicy(1, 'one', True)])
class TestObjectReplicator(unittest.TestCase):
def setUp(self):
utils.HASH_PATH_SUFFIX = 'endcap'
utils.HASH_PATH_PREFIX = ''
# Setup a test ring (stolen from common/test_ring.py)
self.testdir = tempfile.mkdtemp()
self.devices = os.path.join(self.testdir, 'node')
rmtree(self.testdir, ignore_errors=1)
os.mkdir(self.testdir)
os.mkdir(self.devices)
os.mkdir(os.path.join(self.devices, 'sda'))
self.objects = os.path.join(self.devices, 'sda',
diskfile.get_data_dir(POLICIES[0]))
self.objects_1 = os.path.join(self.devices, 'sda',
diskfile.get_data_dir(POLICIES[1]))
os.mkdir(self.objects)
os.mkdir(self.objects_1)
self.parts = {}
self.parts_1 = {}
for part in ['0', '1', '2', '3']:
self.parts[part] = os.path.join(self.objects, part)
os.mkdir(self.parts[part])
self.parts_1[part] = os.path.join(self.objects_1, part)
os.mkdir(self.parts_1[part])
_create_test_rings(self.testdir)
self.conf = dict(
swift_dir=self.testdir, devices=self.devices, mount_check='false',
timeout='300', stats_interval='1', sync_method='rsync')
self.replicator = object_replicator.ObjectReplicator(self.conf)
self.logger = self.replicator.logger = debug_logger('test-replicator')
self.df_mgr = diskfile.DiskFileManager(self.conf,
self.replicator.logger)
def tearDown(self):
rmtree(self.testdir, ignore_errors=1)
def test_run_once(self):
conf = dict(swift_dir=self.testdir, devices=self.devices,
mount_check='false', timeout='300', stats_interval='1')
replicator = object_replicator.ObjectReplicator(conf)
was_connector = object_replicator.http_connect
object_replicator.http_connect = mock_http_connect(200)
cur_part = '0'
df = self.df_mgr.get_diskfile('sda', cur_part, 'a', 'c', 'o',
policy=POLICIES[0])
mkdirs(df._datadir)
f = open(os.path.join(df._datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, cur_part, data_dir)
process_arg_checker = []
ring = replicator.load_object_ring(POLICIES[0])
nodes = [node for node in
ring.get_part_nodes(int(cur_part))
if node['ip'] not in _ips()]
rsync_mods = tuple(['%s::object/sda/objects/%s' %
(node['ip'], cur_part) for node in nodes])
for node in nodes:
process_arg_checker.append(
(0, '', ['rsync', whole_path_from, rsync_mods]))
with _mock_process(process_arg_checker):
replicator.run_once()
self.assertFalse(process_errors)
object_replicator.http_connect = was_connector
# policy 1
def test_run_once_1(self):
conf = dict(swift_dir=self.testdir, devices=self.devices,
mount_check='false', timeout='300', stats_interval='1')
replicator = object_replicator.ObjectReplicator(conf)
was_connector = object_replicator.http_connect
object_replicator.http_connect = mock_http_connect(200)
cur_part = '0'
df = self.df_mgr.get_diskfile('sda', cur_part, 'a', 'c', 'o',
policy=POLICIES[1])
mkdirs(df._datadir)
f = open(os.path.join(df._datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects_1, cur_part, data_dir)
process_arg_checker = []
ring = replicator.load_object_ring(POLICIES[1])
nodes = [node for node in
ring.get_part_nodes(int(cur_part))
if node['ip'] not in _ips()]
rsync_mods = tuple(['%s::object/sda/objects-1/%s' %
(node['ip'], cur_part) for node in nodes])
for node in nodes:
process_arg_checker.append(
(0, '', ['rsync', whole_path_from, rsync_mods]))
with _mock_process(process_arg_checker):
replicator.run_once()
self.assertFalse(process_errors)
object_replicator.http_connect = was_connector
def test_check_ring(self):
for pol in POLICIES:
obj_ring = self.replicator.load_object_ring(pol)
self.assertTrue(self.replicator.check_ring(obj_ring))
orig_check = self.replicator.next_check
self.replicator.next_check = orig_check - 30
self.assertTrue(self.replicator.check_ring(obj_ring))
self.replicator.next_check = orig_check
orig_ring_time = obj_ring._mtime
obj_ring._mtime = orig_ring_time - 30
self.assertTrue(self.replicator.check_ring(obj_ring))
self.replicator.next_check = orig_check - 30
self.assertFalse(self.replicator.check_ring(obj_ring))
def test_collect_jobs_mkdirs_error(self):
non_local = {}
def blowup_mkdirs(path):
non_local['path'] = path
raise OSError('Ow!')
with mock.patch.object(object_replicator, 'mkdirs', blowup_mkdirs):
rmtree(self.objects, ignore_errors=1)
object_replicator.mkdirs = blowup_mkdirs
self.replicator.collect_jobs()
self.assertEqual(self.logger.get_lines_for_level('error'), [
'ERROR creating %s: ' % non_local['path']])
log_args, log_kwargs = self.logger.log_dict['error'][0]
self.assertEqual(str(log_kwargs['exc_info'][1]), 'Ow!')
def test_collect_jobs(self):
jobs = self.replicator.collect_jobs()
jobs_to_delete = [j for j in jobs if j['delete']]
jobs_by_pol_part = {}
for job in jobs:
jobs_by_pol_part[str(int(job['policy'])) + job['partition']] = job
self.assertEquals(len(jobs_to_delete), 2)
self.assertTrue('1', jobs_to_delete[0]['partition'])
self.assertEquals(
[node['id'] for node in jobs_by_pol_part['00']['nodes']], [1, 2])
self.assertEquals(
[node['id'] for node in jobs_by_pol_part['01']['nodes']],
[1, 2, 3])
self.assertEquals(
[node['id'] for node in jobs_by_pol_part['02']['nodes']], [2, 3])
self.assertEquals(
[node['id'] for node in jobs_by_pol_part['03']['nodes']], [3, 1])
self.assertEquals(
[node['id'] for node in jobs_by_pol_part['10']['nodes']], [1, 2])
self.assertEquals(
[node['id'] for node in jobs_by_pol_part['11']['nodes']],
[1, 2, 3])
self.assertEquals(
[node['id'] for node in jobs_by_pol_part['12']['nodes']], [2, 3])
self.assertEquals(
[node['id'] for node in jobs_by_pol_part['13']['nodes']], [3, 1])
for part in ['00', '01', '02', '03', ]:
for node in jobs_by_pol_part[part]['nodes']:
self.assertEquals(node['device'], 'sda')
self.assertEquals(jobs_by_pol_part[part]['path'],
os.path.join(self.objects, part[1:]))
for part in ['10', '11', '12', '13', ]:
for node in jobs_by_pol_part[part]['nodes']:
self.assertEquals(node['device'], 'sda')
self.assertEquals(jobs_by_pol_part[part]['path'],
os.path.join(self.objects_1, part[1:]))
def test_collect_jobs_handoffs_first(self):
self.replicator.handoffs_first = True
jobs = self.replicator.collect_jobs()
self.assertTrue(jobs[0]['delete'])
self.assertEquals('1', jobs[0]['partition'])
def test_replicator_skips_bogus_partition_dirs(self):
# A directory in the wrong place shouldn't crash the replicator
rmtree(self.objects)
rmtree(self.objects_1)
os.mkdir(self.objects)
os.mkdir(self.objects_1)
os.mkdir(os.path.join(self.objects, "burrito"))
jobs = self.replicator.collect_jobs()
self.assertEqual(len(jobs), 0)
def test_replicator_removes_zbf(self):
# After running xfs_repair, a partition directory could become a
# zero-byte file. If this happens, the replicator should clean it
# up, log something, and move on to the next partition.
# Surprise! Partition dir 1 is actually a zero-byte file.
pol_0_part_1_path = os.path.join(self.objects, '1')
rmtree(pol_0_part_1_path)
with open(pol_0_part_1_path, 'w'):
pass
self.assertTrue(os.path.isfile(pol_0_part_1_path)) # sanity check
# Policy 1's partition dir 1 is also a zero-byte file.
pol_1_part_1_path = os.path.join(self.objects_1, '1')
rmtree(pol_1_part_1_path)
with open(pol_1_part_1_path, 'w'):
pass
self.assertTrue(os.path.isfile(pol_1_part_1_path)) # sanity check
# Don't delete things in collect_jobs(); all the stat() calls would
# make replicator startup really slow.
self.replicator.collect_jobs()
self.assertTrue(os.path.exists(pol_0_part_1_path))
self.assertTrue(os.path.exists(pol_1_part_1_path))
# After a replication pass, the files should be gone
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
self.replicator.run_once()
self.assertFalse(os.path.exists(pol_0_part_1_path))
self.assertFalse(os.path.exists(pol_1_part_1_path))
self.assertEqual(
sorted(self.logger.get_lines_for_level('warning')), [
('Removing partition directory which was a file: %s'
% pol_1_part_1_path),
('Removing partition directory which was a file: %s'
% pol_0_part_1_path),
])
def test_delete_partition(self):
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
f = open(os.path.join(df._datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, '1', data_dir)
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
ring = self.replicator.load_object_ring(POLICIES[0])
nodes = [node for node in
ring.get_part_nodes(1)
if node['ip'] not in _ips()]
process_arg_checker = []
for node in nodes:
rsync_mod = '%s::object/sda/objects/%s' % (node['ip'], 1)
process_arg_checker.append(
(0, '', ['rsync', whole_path_from, rsync_mod]))
with _mock_process(process_arg_checker):
self.replicator.replicate()
self.assertFalse(os.access(part_path, os.F_OK))
def test_delete_partition_default_sync_method(self):
self.replicator.conf.pop('sync_method')
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
f = open(os.path.join(df._datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, '1', data_dir)
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
ring = self.replicator.load_object_ring(POLICIES[0])
nodes = [node for node in
ring.get_part_nodes(1)
if node['ip'] not in _ips()]
process_arg_checker = []
for node in nodes:
rsync_mod = '%s::object/sda/objects/%s' % (node['ip'], 1)
process_arg_checker.append(
(0, '', ['rsync', whole_path_from, rsync_mod]))
with _mock_process(process_arg_checker):
self.replicator.replicate()
self.assertFalse(os.access(part_path, os.F_OK))
def test_delete_partition_ssync_single_region(self):
devs = [
{'id': 0, 'device': 'sda', 'zone': 0,
'region': 1, 'ip': '127.0.0.0', 'port': 6000},
{'id': 1, 'device': 'sda', 'zone': 1,
'region': 1, 'ip': '127.0.0.1', 'port': 6000},
{'id': 2, 'device': 'sda', 'zone': 2,
'region': 1, 'ip': '127.0.0.2', 'port': 6000},
{'id': 3, 'device': 'sda', 'zone': 4,
'region': 1, 'ip': '127.0.0.3', 'port': 6000},
{'id': 4, 'device': 'sda', 'zone': 5,
'region': 1, 'ip': '127.0.0.4', 'port': 6000},
{'id': 5, 'device': 'sda', 'zone': 6,
'region': 1, 'ip': 'fe80::202:b3ff:fe1e:8329', 'port': 6000},
{'id': 6, 'device': 'sda', 'zone': 7, 'region': 1,
'ip': '2001:0db8:85a3:0000:0000:8a2e:0370:7334', 'port': 6000},
]
_create_test_rings(self.testdir, devs=devs)
self.conf['sync_method'] = 'ssync'
self.replicator = object_replicator.ObjectReplicator(self.conf)
self.replicator.logger = debug_logger()
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
ts = normalize_timestamp(time.time())
f = open(os.path.join(df._datadir, ts + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
whole_path_from = storage_directory(self.objects, 1, ohash)
suffix_dir_path = os.path.dirname(whole_path_from)
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
def _fake_ssync(node, job, suffixes, **kwargs):
return True, {ohash: ts}
self.replicator.sync_method = _fake_ssync
self.replicator.replicate()
self.assertFalse(os.access(whole_path_from, os.F_OK))
self.assertFalse(os.access(suffix_dir_path, os.F_OK))
self.assertFalse(os.access(part_path, os.F_OK))
def test_delete_partition_1(self):
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o',
policy=POLICIES[1])
mkdirs(df._datadir)
f = open(os.path.join(df._datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects_1, '1', data_dir)
part_path = os.path.join(self.objects_1, '1')
self.assertTrue(os.access(part_path, os.F_OK))
ring = self.replicator.load_object_ring(POLICIES[1])
nodes = [node for node in
ring.get_part_nodes(1)
if node['ip'] not in _ips()]
process_arg_checker = []
for node in nodes:
rsync_mod = '%s::object/sda/objects-1/%s' % (node['ip'], 1)
process_arg_checker.append(
(0, '', ['rsync', whole_path_from, rsync_mod]))
with _mock_process(process_arg_checker):
self.replicator.replicate()
self.assertFalse(os.access(part_path, os.F_OK))
def test_delete_partition_with_failures(self):
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
f = open(os.path.join(df._datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, '1', data_dir)
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
ring = self.replicator.load_object_ring(POLICIES[0])
nodes = [node for node in
ring.get_part_nodes(1)
if node['ip'] not in _ips()]
process_arg_checker = []
for i, node in enumerate(nodes):
rsync_mod = '%s::object/sda/objects/%s' % (node['ip'], 1)
if i == 0:
# force one of the rsync calls to fail
ret_code = 1
else:
ret_code = 0
process_arg_checker.append(
(ret_code, '', ['rsync', whole_path_from, rsync_mod]))
with _mock_process(process_arg_checker):
self.replicator.replicate()
# The path should still exist
self.assertTrue(os.access(part_path, os.F_OK))
def test_delete_partition_with_handoff_delete(self):
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
self.replicator.handoff_delete = 2
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
f = open(os.path.join(df._datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, '1', data_dir)
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
ring = self.replicator.load_object_ring(POLICIES[0])
nodes = [node for node in
ring.get_part_nodes(1)
if node['ip'] not in _ips()]
process_arg_checker = []
for i, node in enumerate(nodes):
rsync_mod = '%s::object/sda/objects/%s' % (node['ip'], 1)
if i == 0:
# force one of the rsync calls to fail
ret_code = 1
else:
ret_code = 0
process_arg_checker.append(
(ret_code, '', ['rsync', whole_path_from, rsync_mod]))
with _mock_process(process_arg_checker):
self.replicator.replicate()
self.assertFalse(os.access(part_path, os.F_OK))
def test_delete_partition_with_handoff_delete_failures(self):
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
self.replicator.handoff_delete = 2
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
f = open(os.path.join(df._datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, '1', data_dir)
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
ring = self.replicator.load_object_ring(POLICIES[0])
nodes = [node for node in
ring.get_part_nodes(1)
if node['ip'] not in _ips()]
process_arg_checker = []
for i, node in enumerate(nodes):
rsync_mod = '%s::object/sda/objects/%s' % (node['ip'], 1)
if i in (0, 1):
# force two of the rsync calls to fail
ret_code = 1
else:
ret_code = 0
process_arg_checker.append(
(ret_code, '', ['rsync', whole_path_from, rsync_mod]))
with _mock_process(process_arg_checker):
self.replicator.replicate()
# The file should still exist
self.assertTrue(os.access(part_path, os.F_OK))
def test_delete_partition_with_handoff_delete_fail_in_other_region(self):
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
f = open(os.path.join(df._datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, '1', data_dir)
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
ring = self.replicator.load_object_ring(POLICIES[0])
nodes = [node for node in
ring.get_part_nodes(1)
if node['ip'] not in _ips()]
process_arg_checker = []
for node in nodes:
rsync_mod = '%s::object/sda/objects/%s' % (node['ip'], 1)
if node['region'] != 1:
# the rsync calls for other region to fail
ret_code = 1
else:
ret_code = 0
process_arg_checker.append(
(ret_code, '', ['rsync', whole_path_from, rsync_mod]))
with _mock_process(process_arg_checker):
self.replicator.replicate()
# The file should still exist
self.assertTrue(os.access(part_path, os.F_OK))
def test_delete_partition_override_params(self):
df = self.df_mgr.get_diskfile('sda', '0', 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
self.replicator.replicate(override_devices=['sdb'])
self.assertTrue(os.access(part_path, os.F_OK))
self.replicator.replicate(override_partitions=['9'])
self.assertTrue(os.access(part_path, os.F_OK))
self.replicator.replicate(override_devices=['sda'],
override_partitions=['1'])
self.assertFalse(os.access(part_path, os.F_OK))
def test_delete_policy_override_params(self):
df0 = self.df_mgr.get_diskfile('sda', '99', 'a', 'c', 'o',
policy=POLICIES.legacy)
df1 = self.df_mgr.get_diskfile('sda', '99', 'a', 'c', 'o',
policy=POLICIES[1])
mkdirs(df0._datadir)
mkdirs(df1._datadir)
pol0_part_path = os.path.join(self.objects, '99')
pol1_part_path = os.path.join(self.objects_1, '99')
# sanity checks
self.assertTrue(os.access(pol0_part_path, os.F_OK))
self.assertTrue(os.access(pol1_part_path, os.F_OK))
# a bogus policy index doesn't bother the replicator any more than a
# bogus device or partition does
self.replicator.run_once(policies='1,2,5')
self.assertFalse(os.access(pol1_part_path, os.F_OK))
self.assertTrue(os.access(pol0_part_path, os.F_OK))
def test_delete_partition_ssync(self):
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
ts = normalize_timestamp(time.time())
f = open(os.path.join(df._datadir, ts + '.data'),
'wb')
f.write('0')
f.close()
ohash = hash_path('a', 'c', 'o')
whole_path_from = storage_directory(self.objects, 1, ohash)
suffix_dir_path = os.path.dirname(whole_path_from)
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
self.call_nums = 0
self.conf['sync_method'] = 'ssync'
def _fake_ssync(node, job, suffixes, **kwargs):
success = True
ret_val = {ohash: ts}
if self.call_nums == 2:
# ssync should return (True, []) only when the second
# candidate node has not get the replica yet.
success = False
ret_val = {}
self.call_nums += 1
return success, ret_val
self.replicator.sync_method = _fake_ssync
self.replicator.replicate()
# The file should still exist
self.assertTrue(os.access(whole_path_from, os.F_OK))
self.assertTrue(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))
self.replicator.replicate()
# The file should be deleted at the second replicate call
self.assertFalse(os.access(whole_path_from, os.F_OK))
self.assertFalse(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))
self.replicator.replicate()
# The partition should be deleted at the third replicate call
self.assertFalse(os.access(whole_path_from, os.F_OK))
self.assertFalse(os.access(suffix_dir_path, os.F_OK))
self.assertFalse(os.access(part_path, os.F_OK))
del self.call_nums
def test_delete_partition_ssync_with_sync_failure(self):
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o',
policy=POLICIES.legacy)
ts = normalize_timestamp(time.time())
mkdirs(df._datadir)
f = open(os.path.join(df._datadir, ts + '.data'), 'wb')
f.write('0')
f.close()
ohash = hash_path('a', 'c', 'o')
whole_path_from = storage_directory(self.objects, 1, ohash)
suffix_dir_path = os.path.dirname(whole_path_from)
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
self.call_nums = 0
self.conf['sync_method'] = 'ssync'
def _fake_ssync(node, job, suffixes, **kwags):
success = False
ret_val = {}
if self.call_nums == 2:
# ssync should return (True, []) only when the second
# candidate node has not get the replica yet.
success = True
ret_val = {ohash: ts}
self.call_nums += 1
return success, ret_val
self.replicator.sync_method = _fake_ssync
self.replicator.replicate()
# The file should still exist
self.assertTrue(os.access(whole_path_from, os.F_OK))
self.assertTrue(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))
self.replicator.replicate()
# The file should still exist
self.assertTrue(os.access(whole_path_from, os.F_OK))
self.assertTrue(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))
self.replicator.replicate()
# The file should still exist
self.assertTrue(os.access(whole_path_from, os.F_OK))
self.assertTrue(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))
del self.call_nums
def test_delete_objs_ssync_only_when_in_sync(self):
self.replicator.logger = debug_logger('test-replicator')
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
ts = normalize_timestamp(time.time())
f = open(os.path.join(df._datadir, ts + '.data'), 'wb')
f.write('0')
f.close()
ohash = hash_path('a', 'c', 'o')
whole_path_from = storage_directory(self.objects, 1, ohash)
suffix_dir_path = os.path.dirname(whole_path_from)
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
self.call_nums = 0
self.conf['sync_method'] = 'ssync'
in_sync_objs = {}
def _fake_ssync(node, job, suffixes, remote_check_objs=None):
self.call_nums += 1
if remote_check_objs is None:
# sync job
ret_val = {ohash: ts}
else:
ret_val = in_sync_objs
return True, ret_val
self.replicator.sync_method = _fake_ssync
self.replicator.replicate()
self.assertEqual(3, self.call_nums)
# The file should still exist
self.assertTrue(os.access(whole_path_from, os.F_OK))
self.assertTrue(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))
del self.call_nums
def test_delete_partition_ssync_with_cleanup_failure(self):
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
self.replicator.logger = mock_logger = \
debug_logger('test-replicator')
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
ts = normalize_timestamp(time.time())
f = open(os.path.join(df._datadir, ts + '.data'), 'wb')
f.write('0')
f.close()
ohash = hash_path('a', 'c', 'o')
whole_path_from = storage_directory(self.objects, 1, ohash)
suffix_dir_path = os.path.dirname(whole_path_from)
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
self.call_nums = 0
self.conf['sync_method'] = 'ssync'
def _fake_ssync(node, job, suffixes, **kwargs):
success = True
ret_val = {ohash: ts}
if self.call_nums == 2:
# ssync should return (True, []) only when the second
# candidate node has not get the replica yet.
success = False
ret_val = {}
self.call_nums += 1
return success, ret_val
rmdir_func = os.rmdir
def raise_exception_rmdir(exception_class, error_no):
instance = exception_class()
instance.errno = error_no
def func(directory):
if directory == suffix_dir_path:
raise instance
else:
rmdir_func(directory)
return func
self.replicator.sync_method = _fake_ssync
self.replicator.replicate()
# The file should still exist
self.assertTrue(os.access(whole_path_from, os.F_OK))
self.assertTrue(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))
# Fail with ENOENT
with mock.patch('os.rmdir',
raise_exception_rmdir(OSError, ENOENT)):
self.replicator.replicate()
self.assertFalse(mock_logger.get_lines_for_level('error'))
self.assertFalse(os.access(whole_path_from, os.F_OK))
self.assertTrue(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))
# Fail with ENOTEMPTY
with mock.patch('os.rmdir',
raise_exception_rmdir(OSError, ENOTEMPTY)):
self.replicator.replicate()
self.assertFalse(mock_logger.get_lines_for_level('error'))
self.assertFalse(os.access(whole_path_from, os.F_OK))
self.assertTrue(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))
# Fail with ENOTDIR
with mock.patch('os.rmdir',
raise_exception_rmdir(OSError, ENOTDIR)):
self.replicator.replicate()
self.assertEqual(len(mock_logger.get_lines_for_level('error')), 1)
self.assertFalse(os.access(whole_path_from, os.F_OK))
self.assertTrue(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))
# Finally we can cleanup everything
self.replicator.replicate()
self.assertFalse(os.access(whole_path_from, os.F_OK))
self.assertFalse(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))
self.replicator.replicate()
self.assertFalse(os.access(whole_path_from, os.F_OK))
self.assertFalse(os.access(suffix_dir_path, os.F_OK))
self.assertFalse(os.access(part_path, os.F_OK))
def test_run_once_recover_from_failure(self):
conf = dict(swift_dir=self.testdir, devices=self.devices,
mount_check='false', timeout='300', stats_interval='1')
replicator = object_replicator.ObjectReplicator(conf)
was_connector = object_replicator.http_connect
try:
object_replicator.http_connect = mock_http_connect(200)
# Write some files into '1' and run replicate- they should be moved
# to the other partitions and then node should get deleted.
cur_part = '1'
df = self.df_mgr.get_diskfile('sda', cur_part, 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
f = open(os.path.join(df._datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, cur_part, data_dir)
ring = replicator.load_object_ring(POLICIES[0])
process_arg_checker = []
nodes = [node for node in
ring.get_part_nodes(int(cur_part))
if node['ip'] not in _ips()]
for node in nodes:
rsync_mod = '%s::object/sda/objects/%s' % (node['ip'],
cur_part)
process_arg_checker.append(
(0, '', ['rsync', whole_path_from, rsync_mod]))
self.assertTrue(os.access(os.path.join(self.objects,
'1', data_dir, ohash),
os.F_OK))
with _mock_process(process_arg_checker):
replicator.run_once()
self.assertFalse(process_errors)
for i, result in [('0', True), ('1', False),
('2', True), ('3', True)]:
self.assertEquals(os.access(
os.path.join(self.objects,
i, diskfile.HASH_FILE),
os.F_OK), result)
finally:
object_replicator.http_connect = was_connector
def test_run_once_recover_from_timeout(self):
conf = dict(swift_dir=self.testdir, devices=self.devices,
mount_check='false', timeout='300', stats_interval='1')
replicator = object_replicator.ObjectReplicator(conf)
was_connector = object_replicator.http_connect
was_get_hashes = object_replicator.get_hashes
was_execute = tpool.execute
self.get_hash_count = 0
try:
def fake_get_hashes(*args, **kwargs):
self.get_hash_count += 1
if self.get_hash_count == 3:
# raise timeout on last call to get hashes
raise Timeout()
return 2, {'abc': 'def'}
def fake_exc(tester, *args, **kwargs):
if 'Error syncing partition' in args[0]:
tester.i_failed = True
self.i_failed = False
object_replicator.http_connect = mock_http_connect(200)
object_replicator.get_hashes = fake_get_hashes
replicator.logger.exception = \
lambda *args, **kwargs: fake_exc(self, *args, **kwargs)
# Write some files into '1' and run replicate- they should be moved
# to the other partitions and then node should get deleted.
cur_part = '1'
df = self.df_mgr.get_diskfile('sda', cur_part, 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
f = open(os.path.join(df._datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, cur_part, data_dir)
process_arg_checker = []
ring = replicator.load_object_ring(POLICIES[0])
nodes = [node for node in
ring.get_part_nodes(int(cur_part))
if node['ip'] not in _ips()]
for node in nodes:
rsync_mod = '%s::object/sda/objects/%s' % (node['ip'],
cur_part)
process_arg_checker.append(
(0, '', ['rsync', whole_path_from, rsync_mod]))
self.assertTrue(os.access(os.path.join(self.objects,
'1', data_dir, ohash),
os.F_OK))
with _mock_process(process_arg_checker):
replicator.run_once()
self.assertFalse(process_errors)
self.assertFalse(self.i_failed)
finally:
object_replicator.http_connect = was_connector
object_replicator.get_hashes = was_get_hashes
tpool.execute = was_execute
def test_run(self):
with _mock_process([(0, '')] * 100):
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
self.replicator.replicate()
def test_run_withlog(self):
with _mock_process([(0, "stuff in log")] * 100):
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
self.replicator.replicate()
def test_sync_just_calls_sync_method(self):
self.replicator.sync_method = mock.MagicMock()
self.replicator.sync('node', 'job', 'suffixes')
self.replicator.sync_method.assert_called_once_with(
'node', 'job', 'suffixes')
@mock.patch('swift.obj.replicator.tpool_reraise', autospec=True)
@mock.patch('swift.obj.replicator.http_connect', autospec=True)
def test_update(self, mock_http, mock_tpool_reraise):
def set_default(self):
self.replicator.suffix_count = 0
self.replicator.suffix_sync = 0
self.replicator.suffix_hash = 0
self.replicator.replication_count = 0
self.replicator.partition_times = []
self.headers = {'Content-Length': '0',
'user-agent': 'object-replicator %s' % os.getpid()}
self.replicator.logger = mock_logger = mock.MagicMock()
mock_tpool_reraise.return_value = (0, {})
all_jobs = self.replicator.collect_jobs()
jobs = [job for job in all_jobs if not job['delete']]
mock_http.return_value = answer = mock.MagicMock()
answer.getresponse.return_value = resp = mock.MagicMock()
# Check uncorrect http_connect with status 507 and
# count of attempts and call args
resp.status = 507
error = '%(ip)s/%(device)s responded as unmounted'
expect = 'Error syncing partition'
for job in jobs:
set_default(self)
ring = job['policy'].object_ring
self.headers['X-Backend-Storage-Policy-Index'] = int(job['policy'])
self.replicator.update(job)
self.assertTrue(error in mock_logger.error.call_args[0][0])
self.assertTrue(expect in mock_logger.exception.call_args[0][0])
self.assertEquals(len(self.replicator.partition_times), 1)
self.assertEquals(mock_http.call_count, len(ring._devs) - 1)
reqs = []
for node in job['nodes']:
reqs.append(mock.call(node['ip'], node['port'], node['device'],
job['partition'], 'REPLICATE', '',
headers=self.headers))
if job['partition'] == '0':
self.assertEquals(self.replicator.suffix_hash, 0)
mock_http.assert_has_calls(reqs, any_order=True)
mock_http.reset_mock()
mock_logger.reset_mock()
# Check uncorrect http_connect with status 400 != HTTP_OK
resp.status = 400
error = 'Invalid response %(resp)s from %(ip)s'
for job in jobs:
set_default(self)
self.replicator.update(job)
self.assertTrue(error in mock_logger.error.call_args[0][0])
self.assertEquals(len(self.replicator.partition_times), 1)
mock_logger.reset_mock()
# Check successful http_connection and exception with
# uncorrect pickle.loads(resp.read())
resp.status = 200
expect = 'Error syncing with node:'
for job in jobs:
set_default(self)
self.replicator.update(job)
self.assertTrue(expect in mock_logger.exception.call_args[0][0])
self.assertEquals(len(self.replicator.partition_times), 1)
mock_logger.reset_mock()
# Check successful http_connection and correct
# pickle.loads(resp.read()) for non local node
resp.status = 200
local_job = None
resp.read.return_value = pickle.dumps({})
for job in jobs:
set_default(self)
# limit local job to policy 0 for simplicity
if job['partition'] == '0' and int(job['policy']) == 0:
local_job = job.copy()
continue
self.replicator.update(job)
self.assertEquals(mock_logger.exception.call_count, 0)
self.assertEquals(mock_logger.error.call_count, 0)
self.assertEquals(len(self.replicator.partition_times), 1)
self.assertEquals(self.replicator.suffix_hash, 0)
self.assertEquals(self.replicator.suffix_sync, 0)
self.assertEquals(self.replicator.suffix_count, 0)
mock_logger.reset_mock()
# Check successful http_connect and sync for local node
mock_tpool_reraise.return_value = (1, {'a83': 'ba47fd314242ec8c'
'7efb91f5d57336e4'})
resp.read.return_value = pickle.dumps({'a83': 'c130a2c17ed45102a'
'ada0f4eee69494ff'})
set_default(self)
self.replicator.sync = fake_func = \
mock.MagicMock(return_value=(True, []))
self.replicator.update(local_job)
reqs = []
for node in local_job['nodes']:
reqs.append(mock.call(node, local_job, ['a83']))
fake_func.assert_has_calls(reqs, any_order=True)
self.assertEquals(fake_func.call_count, 2)
self.assertEquals(self.replicator.replication_count, 1)
self.assertEquals(self.replicator.suffix_sync, 2)
self.assertEquals(self.replicator.suffix_hash, 1)
self.assertEquals(self.replicator.suffix_count, 1)
# Efficient Replication Case
set_default(self)
self.replicator.sync = fake_func = \
mock.MagicMock(return_value=(True, []))
all_jobs = self.replicator.collect_jobs()
job = None
for tmp in all_jobs:
if tmp['partition'] == '3':
job = tmp
break
# The candidate nodes to replicate (i.e. dev1 and dev3)
# belong to another region
self.replicator.update(job)
self.assertEquals(fake_func.call_count, 1)
self.assertEquals(self.replicator.replication_count, 1)
self.assertEquals(self.replicator.suffix_sync, 1)
self.assertEquals(self.replicator.suffix_hash, 1)
self.assertEquals(self.replicator.suffix_count, 1)
mock_http.reset_mock()
mock_logger.reset_mock()
# test for replication params on policy 0 only
repl_job = local_job.copy()
for node in repl_job['nodes']:
node['replication_ip'] = '127.0.0.11'
node['replication_port'] = '6011'
set_default(self)
# with only one set of headers make sure we specify index 0 here
# as otherwise it may be different from earlier tests
self.headers['X-Backend-Storage-Policy-Index'] = 0
self.replicator.update(repl_job)
reqs = []
for node in repl_job['nodes']:
reqs.append(mock.call(node['replication_ip'],
node['replication_port'], node['device'],
repl_job['partition'], 'REPLICATE',
'', headers=self.headers))
reqs.append(mock.call(node['replication_ip'],
node['replication_port'], node['device'],
repl_job['partition'], 'REPLICATE',
'/a83', headers=self.headers))
mock_http.assert_has_calls(reqs, any_order=True)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "30413e20ec7222facab673abac720f3c",
"timestamp": "",
"source": "github",
"line_count": 1194,
"max_line_length": 79,
"avg_line_length": 44.56867671691792,
"alnum_prop": 0.524588931692192,
"repo_name": "jungle90/Openstack-Swift-I-O-throttler",
"id": "f169e52dd8103b0cb07485f1c3bcfb0e508643d1",
"size": "53810",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "test/unit/obj/test_replicator.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "673"
},
{
"name": "Python",
"bytes": "8142183"
}
],
"symlink_target": ""
} |
"""
@name: Modules/House/Lighting/Outlets/__init__.py
@author: D. Brian Kimmel
@contact: [email protected]
@copyright: (c) 2020-2020 by D. Brian Kimmel
@license: MIT License
@note: Created on Feb 9, 2020
"""
__updated__ = '2020-02-09'
__version_info__ = (20, 2, 9)
__version__ = '.'.join(map(str, __version_info__))
CONFIG_NAME = 'outlets'
class OutletInformation:
""" This is the information that the user needs to enter to uniquely define a Outlet.
"""
def __init__(self) -> None:
self.Name = None
self.Comment = None # Optional
self.DeviceType = 'Lighting'
self.DeviceSubType = 'Outlet'
self.LastUpdate = None # Not user entered but maintained
self.Uuid = None # Not user entered but maintained
self.Family = None
self.Room = None
# ## END DBK
| {
"content_hash": "d69e72392519d02819b3bd203c64e685",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 89,
"avg_line_length": 26.78125,
"alnum_prop": 0.6137689614935823,
"repo_name": "DBrianKimmel/PyHouse",
"id": "7482be6aacfde367f9ff0c8c9feb36ef04990d10",
"size": "857",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "Project/src/Modules/House/Lighting/Outlets/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "114778"
},
{
"name": "HTML",
"bytes": "15398"
},
{
"name": "JavaScript",
"bytes": "220171"
},
{
"name": "Python",
"bytes": "1491784"
},
{
"name": "Shell",
"bytes": "2131"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.