rem
stringlengths 0
322k
| add
stringlengths 0
2.05M
| context
stringlengths 8
228k
|
---|---|---|
| DateProperty | DateTimeField | date | skipped if |
|
| DateProperty | DateField | date | skipped if |
|
def convert_RatingProperty(model, prop, kwargs): """Returns a form field for a ``db.RatingProperty``.""" kwargs['validators'].append(validators.NumberRange(min=0, max=100)) return f.IntegerField(**kwargs)
|
:param description: A description for the field, typically used for help text. It is available through the `description` property after construction. :param id: An id to use for the field. A reasonable default is set by the form, and you shouldn't need to set this manually.
|
def __init__(self, label=u'', validators=None, filters=tuple(), description=u'', id=None, default=None, widget=None, _form=None, _name=None, _prefix=''): """ Construct a new field.
|
|
The default value to assign to the field, if one is not provided by the form. May be a callable.
|
The default value to assign to the field, if no form or object input is provided. May be a callable.
|
def __init__(self, label=u'', validators=None, filters=tuple(), description=u'', id=None, default=None, widget=None, _form=None, _name=None, _prefix=''): """ Construct a new field.
|
The ``value=`` HTML attribute by default is 'y' unless otherwise specified by `value=` at rendering. The ``checked`` HTML attribute is set if the field's data is a non-false value.
|
The ``checked`` HTML attribute is set if the field's data is a non-false value.
|
def __call__(self, field, **kwargs): if self.hide_value: kwargs['value'] = '' return super(PasswordInput, self).__call__(field, **kwargs)
|
if field.raw_data is None or not field.raw_data or not field.raw_data[0].strip():
|
if not field.raw_data or isinstance(field.raw_data[0], basestring) and not field.raw_data[0].strip():
|
def __call__(self, form, field): if field.raw_data is None or not field.raw_data or not field.raw_data[0].strip(): field.errors[:] = [] raise StopValidation()
|
if line[-1].isspace() or line == "":
|
if line == "" or line[-1].isspace():
|
def main(self): i = 0 line = "" first_line = True
|
categories = [(operators, 2), (keywords, 3), (quotedStrings, 5)]
|
categories = [(operators, 2, curses.A_BOLD), (keywords, 3, curses.A_NORMAL), (quotedStrings, 5, curses.A_NORMAL)]
|
def init_colors(self): curses.init_pair(1, curses.COLOR_RED, curses.COLOR_BLACK) #errors curses.init_pair(2, curses.COLOR_MAGENTA, curses.COLOR_BLACK) #keywords curses.init_pair(3, curses.COLOR_CYAN, curses.COLOR_BLACK) curses.init_pair(4, curses.COLOR_MAGENTA, curses.COLOR_WHITE) curses.init_pair(5, curses.COLOR_GREEN, curses.COLOR_BLACK)
|
for category, colorNumber in categories:
|
for category, colorNumber, attr in categories:
|
def init_colors(self): curses.init_pair(1, curses.COLOR_RED, curses.COLOR_BLACK) #errors curses.init_pair(2, curses.COLOR_MAGENTA, curses.COLOR_BLACK) #keywords curses.init_pair(3, curses.COLOR_CYAN, curses.COLOR_BLACK) curses.init_pair(4, curses.COLOR_MAGENTA, curses.COLOR_WHITE) curses.init_pair(5, curses.COLOR_GREEN, curses.COLOR_BLACK)
|
self.colorMap[tokenCode] = colorNumber
|
self.colorMap[tokenCode] = (colorNumber, attr)
|
def init_colors(self): curses.init_pair(1, curses.COLOR_RED, curses.COLOR_BLACK) #errors curses.init_pair(2, curses.COLOR_MAGENTA, curses.COLOR_BLACK) #keywords curses.init_pair(3, curses.COLOR_CYAN, curses.COLOR_BLACK) curses.init_pair(4, curses.COLOR_MAGENTA, curses.COLOR_WHITE) curses.init_pair(5, curses.COLOR_GREEN, curses.COLOR_BLACK)
|
color = self.colorMap.get(code,0) stringColorPairs.append((string, color))
|
color, attr = self.colorMap.get(code,(0, curses.A_NORMAL)) stringColorPairs.append((string, color, attr))
|
def updateCurrentLine(self, s): suggestions = {} try: lineTokens = self.cs164bparser.tokenize(s) if lineTokens: suggestions = dict(interpreter.complete(lineTokens[-1])) except NameError, e: lineTokens = [] #TODO color line red
|
for string, colorNumber in stringColorPairs:
|
for string, colorNumber, attr in stringColorPairs:
|
def updateCurrentLine(self, s): suggestions = {} try: lineTokens = self.cs164bparser.tokenize(s) if lineTokens: suggestions = dict(interpreter.complete(lineTokens[-1])) except NameError, e: lineTokens = [] #TODO color line red
|
self.screen.addstr(self.curLineNumber, x_pos, string, curses.color_pair(colorNumber))
|
self.screen.addstr(self.curLineNumber, x_pos, string, curses.color_pair(colorNumber) | attr)
|
def updateCurrentLine(self, s): suggestions = {} try: lineTokens = self.cs164bparser.tokenize(s) if lineTokens: suggestions = dict(interpreter.complete(lineTokens[-1])) except NameError, e: lineTokens = [] #TODO color line red
|
self.updateBox(self.curLineNumber+1, str(lineTokens), self.screen, self.infoBox)
|
def main(self): i = 0 line = ""
|
|
if line[-1].isspace() or line == "" or not self.getSuggestions(lineTokens):
|
if line == "" or line[-1].isspace() or not self.getSuggestions(lineTokens):
|
def main(self): i = 0 line = "" first_line = True
|
self.parsedepth = self.parsedepth + 1
|
self.parsedepth = 1
|
def doSDT(edge): children = [x for x in getChildren(edge) if x] if len(children) == 1 and len(children[0])==2: #terminal term = children[0] return [saObject(None,[],term[1])]
|
history = [""]
|
first_line = True history = []
|
def main(self): i = 0 line = ""
|
self.parse_line(line[:-1]) if hist_ptr != 0: hist_ptr = 0 history.insert(hist_ptr, line[:-1])
|
if not first_line: to_parse = '\n' + line[:-1] else: to_parse = line[:-1] first_line = False if self.parse_line(to_parse): first_line = True hist_ptr = 0 history[hist_ptr] = line[:-1]
|
def main(self): i = 0 line = ""
|
suggestions[k] = "function(" + reduce(lambda x,y: x+","+y, v.fun.argList) + ")"
|
if v.fun.argList: suggestions[k] = "function(" + reduce(lambda x,y: x+","+y, v.fun.argList) + ")" else: suggestions[k] = "function()"
|
def showSuggestions(self, suggestions): if suggestions: width = self.screen.getmaxyx()[1] - 6 sugList = [] for k,v in suggestions.iteritems(): # pretty representation of functions - add others as needed if isinstance(v, interpreter.FunVal): suggestions[k] = "function(" + reduce(lambda x,y: x+","+y, v.fun.argList) + ")"
|
define(e[1], Resume(co.fun.body, co.env, fun=co))
|
define(e[1], Resume(co.fun.body, co.env, fun=co, REPL=REPL))
|
funcdef = lookup(e[2])
|
print e
|
funcdef = lookup(e[2])
|
|
lineTokens = []
|
lineTokens = [] self.screen.addstr(self.curLineNumber, len(PROMPTSTR), s, curses.color_pair(1)) self.screen.addstr(self.curLineNumber, len(s)+len(PROMPTSTR), padding * ' ') self.clearBox(self.infoBox) self.screen.move(self.curLineNumber, len(s)+len(PROMPTSTR)) return if (s and s[-1].isspace()): suggestions = {}
|
def updateCurrentLine(self, s, tab=False, stringCompletion=False):
|
width = self.screen.getmaxyx()[1] - 6 padding = width - len(PROMPTSTR)
|
def updateCurrentLine(self, s, tab=False, stringCompletion=False):
|
|
if tokens[i+1][0] == self.dot_tkn[0]:
|
if tokens[i+1][0] in (self.dot_tkn[0], self.colon_tkn[0]):
|
def findFunctionalUnit(tokens): if not tokens: # can't fill the hole in your heart, I mean, code return None
|
categories = [(operators, 2, curses.A_BOLD), (keywords, 3, curses.A_NORMAL), (quotedStrings, 5, curses.A_NORMAL)]
|
number = ["5"] categories = [(operators, 2, curses.A_BOLD), (keywords, 3, curses.A_NORMAL), (quotedStrings, 5, curses.A_NORMAL), (number, 4,curses.A_NORMAL)]
|
def init_colors(self): curses.init_pair(1, curses.COLOR_RED, curses.COLOR_BLACK) #errors curses.init_pair(2, curses.COLOR_MAGENTA, curses.COLOR_BLACK) #keywords curses.init_pair(3, curses.COLOR_CYAN, curses.COLOR_BLACK) curses.init_pair(4, curses.COLOR_YELLOW, curses.COLOR_BLACK) curses.init_pair(5, curses.COLOR_GREEN, curses.COLOR_BLACK)
|
self.screen.addstr(self.curLineNumber, x_pos, comment, curses.color_pair(0))
|
self.screen.addstr(self.curLineNumber, x_pos, comment, curses.color_pair(6))
|
def updateCurrentLine(self, s, tab=False, stringCompletion=False, interruptFlag=False):
|
self.id_tkn = self.cs164bparser.tokenize('a') self.dot_tkn = self.cs164bparser.tokenize('.') self.comma_tkn = self.cs164bparser.tokenize(',') self.open_tkn = self.cs164bparser.tokenize('(') self.close_tkn = self.cs164bparser.tokenize(')')
|
self.id_tkn = self.cs164bparser.tokenize('a')[0] self.dot_tkn = self.cs164bparser.tokenize('.')[0] self.comma_tkn = self.cs164bparser.tokenize(',')[0] self.open_tkn = self.cs164bparser.tokenize('(')[0] self.close_tkn = self.cs164bparser.tokenize(')')[0]
|
def __init__(self): #initialize parser cs164grammarFile = './cs164b.grm' self.cs164bparser = parser_generator.makeParser(grammar_parser.parse(open(cs164grammarFile).read()))
|
self.printLine("Error while tokenizing line: " + line, 1)
|
self.printLine("Error while tokenizing line: " + line, 1, curses.A_BOLD)
|
def parse_line(self,line): try: tokens = self.cs164bparser.tokenize(line) if tokens: # no need to consume non-code lines input_ast = self.parser.send(tokens) # parse this line if type(input_ast) == tuple: # parsing completed on this line; execute result interpreter.ExecGlobalStmt(input_ast,self)
|
self.printLine("Error while parsing line: " + line, 1)
|
self.printLine("Error while parsing line: " + line, 1, curses.A_BOLD)
|
def parse_line(self,line): try: tokens = self.cs164bparser.tokenize(line) if tokens: # no need to consume non-code lines input_ast = self.parser.send(tokens) # parse this line if type(input_ast) == tuple: # parsing completed on this line; execute result interpreter.ExecGlobalStmt(input_ast,self)
|
if line[-1].isspace() or line == "":
|
if line[-1].isspace() or line == "" or not self.getSuggestions(lineTokens):
|
def main(self): i = 0 line = ""
|
interpreter.ExecGlobalStmt(input_ast)
|
interpreter.ExecGlobalStmt(input_ast, self)
|
def loadProgram(self, p_file): #message to return message = ""
|
self.parser = parser_generator.makeParser(grammar_parser.parse(open(cs164grammarFile).read())) self.terminals = self.parser.terminals self.newline = self.parser.tokenize("\n")
|
self.cs164bparser = parser_generator.makeParser(grammar_parser.parse(open(cs164grammarFile).read())) self.terminals = self.cs164bparser.terminals self.newline = self.cs164bparser.tokenize("\n")
|
def __init__(self): #initialize curses self.screen = curses.initscr() curses.start_color() self.init_colors() curses.noecho() self.screen.keypad(1) curses.curs_set(1) curses.cbreak() self.screen.clear() self.screen.leaveok(False) self.infoBox = 0
|
parser = self.parser.parse() parser.next()
|
def parse_line(self,line): parser = self.parser.parse() parser.next() try: tokens = self.parser.tokenize(line) if tokens: # no need to consume non-code lines input_ast = parser.send(tokens) # parse this line if type(input_ast) == tuple: # parsing completed on this line; execute result interpreter.ExecGlobalStmt(input_ast,self)
|
|
tokens = self.parser.tokenize(line)
|
tokens = self.cs164bparser.tokenize(line)
|
def parse_line(self,line): parser = self.parser.parse() parser.next() try: tokens = self.parser.tokenize(line) if tokens: # no need to consume non-code lines input_ast = parser.send(tokens) # parse this line if type(input_ast) == tuple: # parsing completed on this line; execute result interpreter.ExecGlobalStmt(input_ast,self)
|
input_ast = parser.send(tokens)
|
input_ast = self.parser.send(tokens)
|
def parse_line(self,line): parser = self.parser.parse() parser.next() try: tokens = self.parser.tokenize(line) if tokens: # no need to consume non-code lines input_ast = parser.send(tokens) # parse this line if type(input_ast) == tuple: # parsing completed on this line; execute result interpreter.ExecGlobalStmt(input_ast,self)
|
parser = self.parser.parse() parser.next()
|
self.parser = self.cs164bparser.parse() self.parser.next()
|
def parse_line(self,line): parser = self.parser.parse() parser.next() try: tokens = self.parser.tokenize(line) if tokens: # no need to consume non-code lines input_ast = parser.send(tokens) # parse this line if type(input_ast) == tuple: # parsing completed on this line; execute result interpreter.ExecGlobalStmt(input_ast,self)
|
lineTokens = self.tokenize(line)
|
lineTokens = self.cs164bparser.tokenize(line)
|
def main(self): i = 0 line = ""
|
except IndexError:
|
except (AttributeError, IndexError):
|
def get_title_from_google(movie_name, interactive=False): try: query = urllib.quote("site:www.imdb.com/title/ " + movie_name) url = 'http://ajax.googleapis.com/ajax/services/search/web?v=1.0&q=' + query data_string = urllib2.urlopen(url).read() first_result = json.loads(data_string)["responseData"]["results"][0] m = re.match("http://www.imdb.com/title/tt(.*)/", first_result["url"]) imdb_id = m.groups()[0] except IndexError: raise NoMovieFound("Unable to find movie for '%s'" % movie_name) return ia.get_movie(imdb_id)
|
return get_data_for_movie(s_result[0])
|
return s_result[0]
|
def get_title_from_imdb(movie_name, interactive=False): """ Searches for a film on IMDb. * If only one result is found return that * If no films are found, return an error. * If more than one film is found, either return an error when not in interactive mode, or prompt for a selection """ s_result = ia.search_movie(movie_name) logging.debug("Found %s results for film '%s'" % (len(s_result), movie_name)) if len(s_result) == 1: return get_data_for_movie(s_result[0]) elif len(s_result) == 0: raise NoMovieFound("Unable to find movie for '%s'" % movie_name) else: if interactive: for i,movie in enumerate(s_result): print u"\t%s: %s (%s) (%s)" % (i+1, movie, movie.get('year', 'Unknown'), movie.get('kind', 'unknown')) result_num = get_input(len(s_result)) print return s_result[result_num] else: raise NoMovieFound("Found %s results for for '%s'" % (len(s_result), movie_name))
|
'imdb_url': 'http://www.imdb.com/title/tt%s/' % movie.getID(),
|
'imdb_id': 'tt%s' % movie.getID(),
|
def get_data_for_movie(movie): """ Takes an IMDbPY movie object, updates it and formats it into a stagfs friendly dict. """ # Not all data was retrieved so update the movie object ia.update(movie) ia.update(movie, 'keywords') output = { 'title': movie.get('title', []), 'canonical_title': movie.get('canonical title', []), 'year': movie.get('year', []), 'genre': movie.get('genres', []), 'director': [x['name'] for x in movie.get('director', [])], 'writer': [x['name'] for x in movie.get('writer',[])], 'cast': [x['name'] for x in movie.get('cast',[]) ], 'keywords': [x.replace(u'\xa0',' ') for x in movie.get('keywords',[])], 'languages': movie.get('languages', []), 'countries': movie.get('countries', []), 'imdb_url': 'http://www.imdb.com/title/tt%s/' % movie.getID(), } from math import floor, ceil rating = movie.get('rating') output['rating (exact)'] = str(rating) output['rating (range)'] = '%g.0-%g.9' % (floor(rating), floor(rating)) return output
|
def __init__(self): self.throw_error = False
|
throw_error = False
|
def __init__(self): self.throw_error = False
|
self.closed = False
|
def __init__(self): global mcid self.id = mcid self.closed = False mcid += 1
|
|
mock_dbapi = MockDBAPI()
|
def close(self): pass
|
|
manager = pool.manage(mock_dbapi, use_threadlocal=True)
|
manager = pool.manage(MockDBAPI(), use_threadlocal=True)
|
def testmanager(self): manager = pool.manage(mock_dbapi, use_threadlocal=True)
|
manager = pool.manage(mock_dbapi)
|
manager = pool.manage(MockDBAPI())
|
def testbadargs(self): manager = pool.manage(mock_dbapi)
|
manager = pool.manage(mock_dbapi, use_threadlocal = False)
|
manager = pool.manage(MockDBAPI(), use_threadlocal = False)
|
def testnonthreadlocalmanager(self): manager = pool.manage(mock_dbapi, use_threadlocal = False)
|
for p in pool.QueuePool(creator=mock_dbapi.connect,
|
dbapi = MockDBAPI() for p in pool.QueuePool(creator=dbapi.connect,
|
def _do_testthreadlocal(self, useclose=False): for p in pool.QueuePool(creator=mock_dbapi.connect, pool_size=3, max_overflow=-1, use_threadlocal=True), \ pool.SingletonThreadPool(creator=mock_dbapi.connect, use_threadlocal=True): c1 = p.connect() c2 = p.connect() self.assert_(c1 is c2) c3 = p.unique_connection() self.assert_(c3 is not c1) if useclose: c2.close() else: c2 = None c2 = p.connect() self.assert_(c1 is c2) self.assert_(c3 is not c1) if useclose: c2.close() else: c2 = None lazy_gc() if useclose: c1 = p.connect() c2 = p.connect() c3 = p.connect() c3.close() c2.close() self.assert_(c1.connection is not None) c1.close() c1 = c2 = c3 = None
|
pool.SingletonThreadPool(creator=mock_dbapi.connect,
|
pool.SingletonThreadPool(creator=dbapi.connect,
|
def _do_testthreadlocal(self, useclose=False): for p in pool.QueuePool(creator=mock_dbapi.connect, pool_size=3, max_overflow=-1, use_threadlocal=True), \ pool.SingletonThreadPool(creator=mock_dbapi.connect, use_threadlocal=True): c1 = p.connect() c2 = p.connect() self.assert_(c1 is c2) c3 = p.unique_connection() self.assert_(c3 is not c1) if useclose: c2.close() else: c2 = None c2 = p.connect() self.assert_(c1 is c2) self.assert_(c3 is not c1) if useclose: c2.close() else: c2 = None lazy_gc() if useclose: c1 = p.connect() c2 = p.connect() c3 = p.connect() c3.close() c2.close() self.assert_(c1.connection is not None) c1.close() c1 = c2 = c3 = None
|
dbapi = MockDBAPI() p = pool.QueuePool(creator=lambda: dbapi.connect('foo.db'), pool_size=1, max_overflow=0, use_threadlocal=False)
|
p = self._queuepool_fixture(pool_size=1, max_overflow=0)
|
def test_properties(self): dbapi = MockDBAPI() p = pool.QueuePool(creator=lambda: dbapi.connect('foo.db'), pool_size=1, max_overflow=0, use_threadlocal=False)
|
dbapi = MockDBAPI()
|
def test_listeners(self): dbapi = MockDBAPI()
|
|
def _pool(**kw): return pool.QueuePool(creator=lambda: dbapi.connect('foo.db'), use_threadlocal=False, **kw)
|
def _pool(**kw): return pool.QueuePool(creator=lambda: dbapi.connect('foo.db'), use_threadlocal=False, **kw)
|
|
p = _pool()
|
p = self._queuepool_fixture()
|
def assert_listeners(p, total, conn, fconn, cout, cin): for instance in (p, p.recreate()): self.assert_(len(instance.dispatch.on_connect) == conn) self.assert_(len(instance.dispatch.on_first_connect) == fconn) self.assert_(len(instance.dispatch.on_checkout) == cout) self.assert_(len(instance.dispatch.on_checkin) == cin)
|
p = _pool(listeners=[snoop])
|
p = self._queuepool_fixture(listeners=[snoop])
|
def assert_listeners(p, total, conn, fconn, cout, cin): for instance in (p, p.recreate()): self.assert_(len(instance.dispatch.on_connect) == conn) self.assert_(len(instance.dispatch.on_first_connect) == fconn) self.assert_(len(instance.dispatch.on_checkout) == cout) self.assert_(len(instance.dispatch.on_checkin) == cin)
|
dbapi = MockDBAPI()
|
def test_listeners_callables(self): dbapi = MockDBAPI()
|
|
def _pool(**kw): return cls(creator=lambda: dbapi.connect('foo.db'), use_threadlocal=False, **kw)
|
def _pool(**kw): return cls(creator=lambda: dbapi.connect('foo.db'), use_threadlocal=False, **kw)
|
|
p = _pool()
|
p = self._queuepool_fixture()
|
def assert_listeners(p, total, conn, cout, cin): for instance in (p, p.recreate()): eq_(len(instance.dispatch.on_connect), conn) eq_(len(instance.dispatch.on_checkout), cout) eq_(len(instance.dispatch.on_checkin), cin)
|
p = _pool(listeners=[i_all])
|
p = self._queuepool_fixture(listeners=[i_all])
|
def assert_listeners(p, total, conn, cout, cin): for instance in (p, p.recreate()): eq_(len(instance.dispatch.on_connect), conn) eq_(len(instance.dispatch.on_checkout), cout) eq_(len(instance.dispatch.on_checkin), cin)
|
def test_listener_after_oninit(self): """Test that listeners are called after OnInit is removed""" called = [] def listener(*args): called.append(True) engine = create_engine(testing.db.url) event.listen(listener, 'on_connect', engine.pool) engine.execute(select([1])).close() assert called, "Listener not called on connect" def test_targets(self): canary = [] def listen_one(*args): canary.append("listen_one") def listen_two(*args): canary.append("listen_two") def listen_three(*args): canary.append("listen_three") def listen_four(*args): canary.append("listen_four") engine = create_engine(testing.db.url) event.listen(listen_one, 'on_connect', pool.Pool) event.listen(listen_two, 'on_connect', engine.pool) event.listen(listen_three, 'on_connect', engine) event.listen(listen_four, 'on_connect', engine.__class__) engine.execute(select([1])).close() eq_( canary, ["listen_one","listen_four", "listen_two","listen_three"] ) def teardown(self): pool.Pool.dispatch.clear()
|
def test_listener_after_oninit(self): """Test that listeners are called after OnInit is removed""" called = [] def listener(*args): called.append(True) engine = create_engine(testing.db.url) event.listen(listener, 'on_connect', engine.pool) engine.execute(select([1])).close() assert called, "Listener not called on connect"
|
|
p = pool.QueuePool(creator=mock_dbapi.connect, pool_size=3, max_overflow=-1, use_threadlocal=False)
|
p = self._queuepool_fixture(pool_size=3, max_overflow=-1)
|
def _do_testqueuepool(self, useclose=False): p = pool.QueuePool(creator=mock_dbapi.connect, pool_size=3, max_overflow=-1, use_threadlocal=False)
|
p = pool.QueuePool(creator=mock_dbapi.connect, pool_size=3, max_overflow=0, use_threadlocal=False,
|
p = self._queuepool_fixture(pool_size=3, max_overflow=0,
|
def test_timeout(self): p = pool.QueuePool(creator=mock_dbapi.connect, pool_size=3, max_overflow=0, use_threadlocal=False, timeout=2) c1 = p.connect() c2 = p.connect() c3 = p.connect() now = time.time() try: c4 = p.connect() assert False except tsa.exc.TimeoutError, e: assert int(time.time() - now) == 2
|
creator = lambda: mock_dbapi.connect(delay=.05),
|
creator = lambda: dbapi.connect(delay=.05),
|
def test_timeout_race(self): # test a race condition where the initial connecting threads all race # to queue.Empty, then block on the mutex. each thread consumes a # connection as they go in. when the limit is reached, the remaining # threads go in, and get TimeoutError; even though they never got to # wait for the timeout on queue.get(). the fix involves checking the # timeout again within the mutex, and if so, unlocking and throwing # them back to the start of do_get() p = pool.QueuePool( creator = lambda: mock_dbapi.connect(delay=.05), pool_size = 2, max_overflow = 1, use_threadlocal = False, timeout=3) timeouts = [] def checkout(): for x in xrange(1): now = time.time() try: c1 = p.connect() except tsa.exc.TimeoutError, e: timeouts.append(time.time() - now) continue time.sleep(4) c1.close()
|
return mock_dbapi.connect()
|
return dbapi.connect()
|
def creator(): time.sleep(.05) return mock_dbapi.connect()
|
p = pool.QueuePool(creator=mock_dbapi.connect, pool_size=3, max_overflow=-1, use_threadlocal=True)
|
p = self._queuepool_fixture(pool_size=3, max_overflow=-1, use_threadlocal=True)
|
def test_mixed_close(self): p = pool.QueuePool(creator=mock_dbapi.connect, pool_size=3, max_overflow=-1, use_threadlocal=True) c1 = p.connect() c2 = p.connect() assert c1 is c2 c1.close() c2 = None assert p.checkedout() == 1 c1 = None lazy_gc() assert p.checkedout() == 0 lazy_gc() assert not pool._refs
|
p = pool.QueuePool(creator=mock_dbapi.connect, pool_size=3,
|
p = self._queuepool_fixture(pool_size=3,
|
def test_weakref_kaboom(self): p = pool.QueuePool(creator=mock_dbapi.connect, pool_size=3, max_overflow=-1, use_threadlocal=True) c1 = p.connect() c2 = p.connect() c1.close() c2 = None del c1 del c2 gc_collect() assert p.checkedout() == 0 c3 = p.connect() assert c3 is not None
|
p = pool.QueuePool(creator=mock_dbapi.connect, pool_size=3,
|
p = self._queuepool_fixture(pool_size=3,
|
def test_trick_the_counter(self): """this is a "flaw" in the connection pool; since threadlocal uses a single ConnectionFairy per thread with an open/close counter, you can fool the counter into giving you a ConnectionFairy with an ambiguous counter. i.e. its not true reference counting."""
|
p = pool.QueuePool(creator=mock_dbapi.connect, pool_size=1, max_overflow=0, use_threadlocal=False,
|
p = self._queuepool_fixture(pool_size=1, max_overflow=0,
|
def test_recycle(self): p = pool.QueuePool(creator=mock_dbapi.connect, pool_size=1, max_overflow=0, use_threadlocal=False, recycle=3) c1 = p.connect() c_id = id(c1.connection) c1.close() c2 = p.connect() assert id(c2.connection) == c_id c2.close() time.sleep(4) c3 = p.connect() assert id(c3.connection) != c_id
|
dbapi = MockDBAPI() p = pool.QueuePool(creator=lambda : dbapi.connect('foo.db'), pool_size=1, max_overflow=0, use_threadlocal=False)
|
p = self._queuepool_fixture(pool_size=1, max_overflow=0)
|
def test_invalidate(self): dbapi = MockDBAPI() p = pool.QueuePool(creator=lambda : dbapi.connect('foo.db'), pool_size=1, max_overflow=0, use_threadlocal=False) c1 = p.connect() c_id = c1.connection.id c1.close() c1 = None c1 = p.connect() assert c1.connection.id == c_id c1.invalidate() c1 = None c1 = p.connect() assert c1.connection.id != c_id
|
dbapi = MockDBAPI() p = pool.QueuePool(creator=lambda : dbapi.connect('foo.db'), pool_size=1, max_overflow=0, use_threadlocal=False)
|
p = self._queuepool_fixture(pool_size=1, max_overflow=0)
|
def test_recreate(self): dbapi = MockDBAPI() p = pool.QueuePool(creator=lambda : dbapi.connect('foo.db'), pool_size=1, max_overflow=0, use_threadlocal=False) p2 = p.recreate() assert p2.size() == 1 assert p2._use_threadlocal is False assert p2._max_overflow == 0
|
dbapi = MockDBAPI() p = pool.QueuePool(creator=lambda : dbapi.connect('foo.db'), pool_size=1, max_overflow=0, use_threadlocal=False)
|
dbapi, p = self._queuepool_dbapi_fixture(pool_size=1, max_overflow=0)
|
def test_reconnect(self): """tests reconnect operations at the pool level. SA's engine/dialect includes another layer of reconnect support for 'database was lost' errors."""
|
dbapi = MockDBAPI() p = pool.QueuePool(creator=lambda : dbapi.connect('foo.db'), pool_size=1, max_overflow=0, use_threadlocal=False)
|
dbapi, p = self._queuepool_dbapi_fixture(pool_size=1, max_overflow=0)
|
def test_detach(self): dbapi = MockDBAPI() p = pool.QueuePool(creator=lambda : dbapi.connect('foo.db'), pool_size=1, max_overflow=0, use_threadlocal=False) c1 = p.connect() c1.detach() c_id = c1.connection.id c2 = p.connect() assert c2.connection.id != c1.connection.id dbapi.raise_error = True c2.invalidate() c2 = None c2 = p.connect() assert c2.connection.id != c1.connection.id con = c1.connection assert not con.closed c1.close() assert con.closed
|
p = pool.QueuePool(creator=mock_dbapi.connect, pool_size=3, max_overflow=-1, use_threadlocal=True)
|
p = self._queuepool_fixture(pool_size=3, max_overflow=-1, use_threadlocal=True)
|
def test_threadfairy(self): p = pool.QueuePool(creator=mock_dbapi.connect, pool_size=3, max_overflow=-1, use_threadlocal=True) c1 = p.connect() c1.close() c2 = p.connect() assert c2.connection is not None
|
p = pool.SingletonThreadPool(creator=mock_dbapi.connect,
|
dbapi = MockDBAPI() p = pool.SingletonThreadPool(creator=dbapi.connect,
|
def test_cleanup(self): """test that the pool's connections are OK after cleanup() has been called."""
|
__only_on__ = 'oracle+cx-oracle'
|
__only_on__ = 'oracle+cx_oracle'
|
def test_raw_lobs(self): engine = testing_engine(options=dict(auto_convert_lobs=False)) metadata = MetaData() t = Table("z_test", metadata, Column('id', Integer, primary_key=True), Column('data', Text), Column('bindata', LargeBinary)) t.create(engine) try: engine.execute(t.insert(), id=1, data='this is text', bindata='this is binary') row = engine.execute(t.select()).first() eq_(row['data'].read(), 'this is text') eq_(row['bindata'].read(), 'this is binary') finally: t.drop(engine)
|
raise exc.ArgumentError("Ambiguous literal: %r. Use the 'text()' function " "to indicate a SQL expression literal, or 'literal()' to indicate a bound value." % element)
|
raise exc.ArgumentError("Ambiguous literal: %r. Use the 'text()' " "function to indicate a SQL expression " "literal, or 'literal()' to indicate a " "bound value." % element)
|
def _no_literals(element): if hasattr(element, '__clause_element__'): return element.__clause_element__() elif not isinstance(element, Visitable): raise exc.ArgumentError("Ambiguous literal: %r. Use the 'text()' function " "to indicate a SQL expression literal, or 'literal()' to indicate a bound value." % element) else: return element
|
prop = mapper.get_property(key, raiseerr=False) if prop is None:
|
if not mapper.has_property(key):
|
def __getattr__(self, key): mapper = class_mapper(self.cls, compile=False) if mapper: prop = mapper.get_property(key, raiseerr=False) if prop is None: raise exceptions.InvalidRequestError( "Class %r does not have a mapped column named %r" % (self.cls, key)) elif not isinstance(prop, ColumnProperty): raise exceptions.InvalidRequestError( "Property %r is not an instance of" " ColumnProperty (i.e. does not correspond" " directly to a Column)." % key) return getattr(self.cls, key)
|
elif not isinstance(prop, ColumnProperty):
|
prop = mapper.get_property(key) if not isinstance(prop, ColumnProperty):
|
def __getattr__(self, key): mapper = class_mapper(self.cls, compile=False) if mapper: prop = mapper.get_property(key, raiseerr=False) if prop is None: raise exceptions.InvalidRequestError( "Class %r does not have a mapped column named %r" % (self.cls, key)) elif not isinstance(prop, ColumnProperty): raise exceptions.InvalidRequestError( "Property %r is not an instance of" " ColumnProperty (i.e. does not correspond" " directly to a Column)." % key) return getattr(self.cls, key)
|
:mod:~sqlalchemy.engine_TwoPhaseTransaction. During a ``commit()``, after
|
:mod:`~sqlalchemy.engine_TwoPhaseTransaction`. During a ``commit()``, after
|
def sessionmaker(bind=None, class_=None, autoflush=True, autocommit=False, expire_on_commit=True, **kwargs): """Generate a custom-configured :class:`~sqlalchemy.orm.session.Session` class. The returned object is a subclass of ``Session``, which, when instantiated with no arguments, uses the keyword arguments configured here as its constructor arguments. It is intended that the `sessionmaker()` function be called within the global scope of an application, and the returned class be made available to the rest of the application as the single class used to instantiate sessions. e.g.:: # global scope Session = sessionmaker(autoflush=False) # later, in a local scope, create and use a session: sess = Session() Any keyword arguments sent to the constructor itself will override the "configured" keywords:: Session = sessionmaker() # bind an individual session to a connection sess = Session(bind=connection) The class also includes a special classmethod ``configure()``, which allows additional configurational options to take place after the custom ``Session`` class has been generated. This is useful particularly for defining the specific ``Engine`` (or engines) to which new instances of ``Session`` should be bound:: Session = sessionmaker() Session.configure(bind=create_engine('sqlite:///foo.db')) sess = Session() Options: autocommit Defaults to ``False``. When ``True``, the ``Session`` does not keep a persistent transaction running, and will acquire connections from the engine on an as-needed basis, returning them immediately after their use. Flushes will begin and commit (or possibly rollback) their own transaction if no transaction is present. When using this mode, the `session.begin()` method may be used to begin a transaction explicitly. Leaving it on its default value of ``False`` means that the ``Session`` will acquire a connection and begin a transaction the first time it is used, which it will maintain persistently until ``rollback()``, ``commit()``, or ``close()`` is called. When the transaction is released by any of these methods, the ``Session`` is ready for the next usage, which will again acquire and maintain a new connection/transaction. autoflush When ``True``, all query operations will issue a ``flush()`` call to this ``Session`` before proceeding. This is a convenience feature so that ``flush()`` need not be called repeatedly in order for database queries to retrieve results. It's typical that ``autoflush`` is used in conjunction with ``autocommit=False``. In this scenario, explicit calls to ``flush()`` are rarely needed; you usually only need to call ``commit()`` (which flushes) to finalize changes. bind An optional ``Engine`` or ``Connection`` to which this ``Session`` should be bound. When specified, all SQL operations performed by this session will execute via this connectable. binds An optional dictionary, which contains more granular "bind" information than the ``bind`` parameter provides. This dictionary can map individual ``Table`` instances as well as ``Mapper`` instances to individual ``Engine`` or ``Connection`` objects. Operations which proceed relative to a particular ``Mapper`` will consult this dictionary for the direct ``Mapper`` instance as well as the mapper's ``mapped_table`` attribute in order to locate an connectable to use. The full resolution is described in the ``get_bind()`` method of ``Session``. Usage looks like:: sess = Session(binds={ SomeMappedClass: create_engine('postgresql://engine1'), somemapper: create_engine('postgresql://engine2'), some_table: create_engine('postgresql://engine3'), }) Also see the ``bind_mapper()`` and ``bind_table()`` methods. \class_ Specify an alternate class other than ``sqlalchemy.orm.session.Session`` which should be used by the returned class. This is the only argument that is local to the ``sessionmaker()`` function, and is not sent directly to the constructor for ``Session``. _enable_transaction_accounting Defaults to ``True``. A legacy-only flag which when ``False`` disables *all* 0.5-style object accounting on transaction boundaries, including auto-expiry of instances on rollback and commit, maintenance of the "new" and "deleted" lists upon rollback, and autoflush of pending changes upon begin(), all of which are interdependent. expire_on_commit Defaults to ``True``. When ``True``, all instances will be fully expired after each ``commit()``, so that all attribute/object access subsequent to a completed transaction will load from the most recent database state. extension An optional :class:`~sqlalchemy.orm.session.SessionExtension` instance, or a list of such instances, which will receive pre- and post- commit and flush events, as well as a post-rollback event. User- defined code may be placed within these hooks using a user-defined subclass of ``SessionExtension``. query_cls Class which should be used to create new Query objects, as returned by the ``query()`` method. Defaults to :class:`~sqlalchemy.orm.query.Query`. twophase When ``True``, all transactions will be started using :mod:~sqlalchemy.engine_TwoPhaseTransaction. During a ``commit()``, after ``flush()`` has been issued for all attached databases, the ``prepare()`` method on each database's ``TwoPhaseTransaction`` will be called. This allows each database to roll back the entire transaction, before each transaction is committed. weak_identity_map When set to the default value of ``True``, a weak-referencing map is used; instances which are not externally referenced will be garbage collected immediately. For dereferenced instances which have pending changes present, the attribute management system will create a temporary strong-reference to the object which lasts until the changes are flushed to the database, at which point it's again dereferenced. Alternatively, when using the value ``False``, the identity map uses a regular Python dictionary to store instances. The session will maintain all instances present until they are removed using expunge(), clear(), or purge(). """ kwargs['bind'] = bind kwargs['autoflush'] = autoflush kwargs['autocommit'] = autocommit kwargs['expire_on_commit'] = expire_on_commit if class_ is None: class_ = Session class Sess(object): def __init__(self, **local_kwargs): for k in kwargs: local_kwargs.setdefault(k, kwargs[k]) super(Sess, self).__init__(**local_kwargs) def configure(self, **new_kwargs): """(Re)configure the arguments for this sessionmaker. e.g.:: Session = sessionmaker() Session.configure(bind=create_engine('sqlite://')) """ kwargs.update(new_kwargs) configure = classmethod(configure) s = type.__new__(type, "Session", (Sess, class_), {}) return s
|
def adapt(self, impltype, **kw): return impltype(as_uuid=self.as_uuid, **kw)
|
def process(value): if value is not None: value = _python_UUID(value) return value
|
|
def dialect_impl(self, dialect, **kwargs): impl = super(ARRAY, self).dialect_impl(dialect, **kwargs) if impl is self: impl = self.__class__.__new__(self.__class__) impl.__dict__.update(self.__dict__) impl.item_type = self.item_type.dialect_impl(dialect) return impl
|
def dialect_impl(self, dialect, **kwargs): impl = super(ARRAY, self).dialect_impl(dialect, **kwargs) if impl is self: impl = self.__class__.__new__(self.__class__) impl.__dict__.update(self.__dict__) impl.item_type = self.item_type.dialect_impl(dialect) return impl
|
|
item_proc = self.item_type.bind_processor(dialect)
|
item_proc = self.item_type.dialect_impl(dialect).bind_processor(dialect)
|
def bind_processor(self, dialect): item_proc = self.item_type.bind_processor(dialect) if item_proc: def convert_item(item): if isinstance(item, (list, tuple)): return [convert_item(child) for child in item] else: return item_proc(item) else: def convert_item(item): if isinstance(item, (list, tuple)): return [convert_item(child) for child in item] else: return item def process(value): if value is None: return value return [convert_item(item) for item in value] return process
|
item_proc = self.item_type.result_processor(dialect, coltype)
|
item_proc = self.item_type.dialect_impl(dialect).result_processor(dialect, coltype)
|
def result_processor(self, dialect, coltype): item_proc = self.item_type.result_processor(dialect, coltype) if item_proc: def convert_item(item): if isinstance(item, list): r = [convert_item(child) for child in item] if self.as_tuple: r = tuple(r) return r else: return item_proc(item) else: def convert_item(item): if isinstance(item, list): r = [convert_item(child) for child in item] if self.as_tuple: r = tuple(r) return r else: return item def process(value): if value is None: return value r = [convert_item(item) for item in value] if self.as_tuple: r = tuple(r) return r return process
|
Column('id', Integer, primary_key=True),
|
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
|
def test_many_updates(self): metadata = MetaData(testing.db) wide_table = Table('t', metadata, Column('id', Integer, primary_key=True), *[Column('col%d' % i, Integer) for i in range(10)] ) class Wide(object): pass mapper(Wide, wide_table, _compiled_cache_size=10) metadata.create_all() session = create_session() w1 = Wide() session.add(w1) session.flush() session.close() del session counter = [1] @profile_memory def go(): session = create_session() w1 = session.query(Wide).first() x = counter[0] dec = 10 while dec > 0: # trying to count in binary here, # works enough to trip the test case if pow(2, dec) < x: setattr(w1, 'col%d' % dec, counter[0]) x -= pow(2, dec) dec -= 1 session.flush() session.close() counter[0] += 1 try: go() finally: metadata.drop_all()
|
Column('user_id', INT, primary_key = True),
|
Column('user_id', INT, primary_key=True, test_needs_autoincrement=True),
|
def setup_class(cls): global users, metadata metadata = MetaData(testing.db) users = Table('users', metadata, Column('user_id', INT, primary_key = True), Column('user_name', VARCHAR(20)), ) metadata.create_all()
|
@profiling.function_call_count(95, variance=0.001)
|
@profiling.function_call_count(95, variance=0.001, versions={'2.4':67})
|
def test_merge_no_load(self): sess = sessionmaker()() sess2 = sessionmaker()() p1 = sess.query(Parent).get(1) p1.children # down from 185 on this # this is a small slice of a usually bigger # operation so using a small variance @profiling.function_call_count(95, variance=0.001) def go(): return sess2.merge(p1, load=False) p2 = go()
|
The preferred method to construct an :class:`Inspector` is via the
|
The preferred method to construct an :class:`.Inspector` is via the
|
def cache(fn, self, con, *args, **kw): info_cache = kw.get('info_cache', None) if info_cache is None: return fn(self, con, *args, **kw) key = ( fn.__name__, tuple(a for a in args if isinstance(a, basestring)), tuple((k, v) for k, v in kw.iteritems() if isinstance(v, (basestring, int, float))) ) ret = info_cache.get(key) if ret is None: ret = fn(self, con, *args, **kw) info_cache[key] = ret return ret
|
to return an :class:`Inspector` subclass that provides additional
|
to return an :class:`.Inspector` subclass that provides additional
|
def cache(fn, self, con, *args, **kw): info_cache = kw.get('info_cache', None) if info_cache is None: return fn(self, con, *args, **kw) key = ( fn.__name__, tuple(a for a in args if isinstance(a, basestring)), tuple((k, v) for k, v in kw.iteritems() if isinstance(v, (basestring, int, float))) ) ret = info_cache.get(key) if ret is None: ret = fn(self, con, *args, **kw) info_cache[key] = ret return ret
|
"""Initialize a new :class:`Inspector`.
|
"""Initialize a new :class:`.Inspector`.
|
def __init__(self, bind): """Initialize a new :class:`Inspector`.
|
For a dialect-specific instance of :class:`Inspector`, see
|
For a dialect-specific instance of :class:`.Inspector`, see
|
def __init__(self, bind): """Initialize a new :class:`Inspector`.
|
This method differs from direct a direct constructor call of :class:`Inspector`
|
This method differs from direct a direct constructor call of :class:`.Inspector`
|
def from_engine(cls, bind): """Construct a new dialect-specific Inspector object from the given engine or connection.
|
a dialect-specific :class:`Inspector` instance, which may provide additional
|
a dialect-specific :class:`.Inspector` instance, which may provide additional
|
def from_engine(cls, bind): """Construct a new dialect-specific Inspector object from the given engine or connection.
|
See the example at :class:`Inspector`.
|
See the example at :class:`.Inspector`.
|
def from_engine(cls, bind): """Construct a new dialect-specific Inspector object from the given engine or connection.
|
cursor.execute(select([1], bind=testing.db))
|
cursor.execute(str(select([1], bind=testing.db)))
|
def test_cursor_iterable(self): conn = testing.db.raw_connection() cursor = conn.cursor() cursor.execute(select([1], bind=testing.db)) expected = [(1,)] for row in cursor: eq_(row, expected.pop(0))
|
super(MSSQLDialect_pymssql, self).__init__(**params)
|
super(MSDialect_pymssql, self).__init__(**params)
|
def __init__(self, **params): super(MSSQLDialect_pymssql, self).__init__(**params) self.use_scope_identity = True
|
arrtable.insert().execute(intarr=[1,2,3], strarr=['abc', 'def'])
|
arrtable.insert().execute(intarr=[1,2,3], strarr=[u'abc', u'def'])
|
def test_insert_array(self): arrtable.insert().execute(intarr=[1,2,3], strarr=['abc', 'def']) results = arrtable.select().execute().fetchall() eq_(len(results), 1) eq_(results[0]['intarr'], [1,2,3]) eq_(results[0]['strarr'], ['abc','def'])
|
arrtable.insert().execute(intarr=[1,2,3], strarr=['abc', 'def']) arrtable.insert().execute(intarr=[4,5,6], strarr='ABC')
|
arrtable.insert().execute(intarr=[1,2,3], strarr=[u'abc', u'def']) arrtable.insert().execute(intarr=[4,5,6], strarr=u'ABC')
|
def test_array_where(self): arrtable.insert().execute(intarr=[1,2,3], strarr=['abc', 'def']) arrtable.insert().execute(intarr=[4,5,6], strarr='ABC') results = arrtable.select().where(arrtable.c.intarr == [1,2,3]).execute().fetchall() eq_(len(results), 1) eq_(results[0]['intarr'], [1,2,3])
|
arrtable.insert().execute(intarr=[1,2,3], strarr=['abc', 'def'])
|
arrtable.insert().execute(intarr=[1,2,3], strarr=[u'abc', u'def'])
|
def test_array_concat(self): arrtable.insert().execute(intarr=[1,2,3], strarr=['abc', 'def']) results = select([arrtable.c.intarr + [4,5,6]]).execute().fetchall() eq_(len(results), 1) eq_(results[0][0], [1,2,3,4,5,6])
|
insert2_params.append(ReturningParam(12))
|
insert2_params += (ReturningParam(12),)
|
def assert_stmts(expected, received): for stmt, params, posn in expected: if not received: assert False while received: teststmt, testparams, testmultiparams = received.pop(0) teststmt = re.compile(r'[\n\t ]+', re.M).sub(' ', teststmt).strip() if teststmt.startswith(stmt) and (testparams==params or testparams==posn): break
|
return bind.dialect.inspector(engine)
|
return bind.dialect.inspector(bind)
|
def from_engine(cls, bind): """Construct a new dialect-specific Inspector object from the given engine or connection.
|
tups = []
|
def _save_obj(self, states, uowtransaction, postupdate=False, post_update_cols=None, single=False): """Issue ``INSERT`` and/or ``UPDATE`` statements for a list of objects.
|
|
connection_callable = uowtransaction.mapper_flush_opts['connection_callable'] for state in _sort_states(states): m = _state_mapper(state) tups.append( ( state, m, connection_callable(self, state.obj()), _state_has_identity(state), state.key or m._identity_key_from_state(state) ) )
|
connection_callable = \ uowtransaction.mapper_flush_opts['connection_callable']
|
def _save_obj(self, states, uowtransaction, postupdate=False, post_update_cols=None, single=False): """Issue ``INSERT`` and/or ``UPDATE`` statements for a list of objects.
|
for state in _sort_states(states): m = _state_mapper(state) tups.append( ( state, m, connection, _state_has_identity(state), state.key or m._identity_key_from_state(state) ) ) if not postupdate: for state, mapper, connection, has_identity, instance_key in tups:
|
connection_callable = None tups = [] for state in _sort_states(states): conn = connection_callable and \ connection_callable(self, state.obj()) or \ connection has_identity = _state_has_identity(state) mapper = _state_mapper(state) instance_key = state.key or mapper._identity_key_from_state(state) row_switch = None if not postupdate:
|
def _save_obj(self, states, uowtransaction, postupdate=False, post_update_cols=None, single=False): """Issue ``INSERT`` and/or ``UPDATE`` statements for a list of objects.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.