text
stringlengths
17
1.47k
positive
stringlengths
673
4.43k
negative
stringlengths
677
2.81k
Add auth flow onEnter states
angular.module('smartCampUZApp', ['ui.router', 'base64', 'angular-jwt']) .config(function ($stateProvider, $urlRouterProvider) { $stateProvider //starter screen .state('starter', { url: "/starter", templateUrl: "templates/starter.html", controller: "starterCtrl", onEnter: function ($state, auth) { if (auth.isAuthenticated() && (auth.getType() == 'admin')) { $state.go('admin'); } else if (auth.isAuthenticated() && (auth.getType() == 'maintenance')) { $state.go('maintenance'); } } }) //starter screen .state('admin', { url: "/admin", templateUrl: "templates/admin.html", controller: "adminCtrl", onEnter: function ($state, auth) { if (!auth.isAuthenticated() || auth.isAuthenticated() && (auth.getType() == 'profesor')) { $state.go('starter'); } else if (auth.isAuthenticated() && (auth.getType() == 'maintenance')) { $state.go('maintenance'); } } }); $urlRouterProvider.otherwise('starter'); });
angular.module('smartCampUZApp', ['ui.router', 'base64', 'angular-jwt']) .config(function ($stateProvider, $urlRouterProvider) { $stateProvider //starter screen .state('starter', { url: "/starter", templateUrl: "templates/starter.html", controller: "starterCtrl", onEnter: function ($state, auth) { if (auth.isAuthenticated()) { $state.go('admin'); } } }) //starter screen .state('admin', { url: "/admin", templateUrl: "templates/admin.html", controller: "adminCtrl", onEnter: function ($state, auth) { if (!auth.isAuthenticated()) { $state.go('starter'); } } }); $urlRouterProvider.otherwise('starter'); });
Declare vars before using it If there are not tests, then an exception occurs: ``` PHP Notice: Undefined variable: methods in PROJECT/vendor/jolicode/asynit/src/Parser/TestsFinder.php on line 56 PHP Fatal error: Uncaught TypeError: Return value of Asynit\Parser\TestsFinder::doFindTests() must be of the type array, null returned in PROJECT/vendor/jolicode/asynit/src/Parser/TestsFinder.php:56 Stack trace: #0 PROJECT/vendor/jolicode/asynit/src/Parser/TestsFinder.php(14): Asynit\Parser\TestsFinder->doFindTests(Array) #1 PROJECT/vendor/jolicode/asynit/src/Command/AsynitCommand.php(62): Asynit\Parser\TestsFinder->findTests('Api/UserOrganiz...') #2 PROJECT/vendor/symfony/console/Command/Command.php(252): Asynit\Command\AsynitCommand->execute(Object(Symfony\Component\Console\Input\ArgvInput), Object(Symfony\Component\Console\Output\ConsoleOutput)) #3 PROJECT/vendor/symfony/console/Application.php(938): Symfony\Component\Console\Command\Command->run(Object(Symfony\Component\Console\Input\ArgvInput), Object(Symfony\Component\Console\Output\ConsoleOutput)) #4 PROJECT/vendor/symfony/console/Application.php(240): Symfony\Component\Console\Application->doRunCo in PROJECT/vendor/jolicode/asynit/src/Parser/TestsFinder.php on line 56 ```
<?php namespace Asynit\Parser; use Asynit\Test; use Asynit\TestCase; use Symfony\Component\Finder\Finder; class TestsFinder { public function findTests(string $path): array { if (\is_file($path)) { return $this->doFindTests([$path]); } $finder = Finder::create() ->files() ->name('*.php') ->in($path) ; return $this->doFindTests($finder); } private function doFindTests($files): array { $methods = []; foreach ($files as $file) { $existingClasses = get_declared_classes(); $path = $file; if ($path instanceof \SplFileInfo) { $path = $path->getRealPath(); } require_once $path; $newClasses = array_diff(get_declared_classes(), $existingClasses); foreach ($newClasses as $class) { if (!is_subclass_of($class, TestCase::class)) { continue; } foreach (get_class_methods($class) as $method) { if (!preg_match('/^test(.+)$/', $method)) { continue; } $test = new Test(new \ReflectionMethod($class, $method)); $methods[$test->getIdentifier()] = $test; } } } return $methods; } }
<?php namespace Asynit\Parser; use Asynit\Test; use Asynit\TestCase; use Symfony\Component\Finder\Finder; class TestsFinder { public function findTests(string $path): array { if (\is_file($path)) { return $this->doFindTests([$path]); } $finder = Finder::create() ->files() ->name('*.php') ->in($path) ; return $this->doFindTests($finder); } private function doFindTests($files): array { foreach ($files as $file) { $existingClasses = get_declared_classes(); $path = $file; if ($path instanceof \SplFileInfo) { $path = $path->getRealPath(); } require_once $path; $newClasses = array_diff(get_declared_classes(), $existingClasses); foreach ($newClasses as $class) { if (!is_subclass_of($class, TestCase::class)) { continue; } foreach (get_class_methods($class) as $method) { if (!preg_match('/^test(.+)$/', $method)) { continue; } $test = new Test(new \ReflectionMethod($class, $method)); $methods[$test->getIdentifier()] = $test; } } } return $methods; } }
Use the HTMLParser module instead of htmllib
import plugin import urllib2 import logging import re from httplib import HTTPException import HTMLParser class Plugin(plugin.baseplugin): def __init__(self, bot): self._regex = re.compile("(https?://.+)\s?") self._ua = "Mozilla/5.0 (X11; Linux x86_64; rv:2.0b8) Gecko/20100101 Firefox/4.0b8" bot.register_regex(self._regex, self.title) def unescape(self, s): p = HTMLParser.HTMLParser(None) p.save_bgn() p.feed(s) return p.save_end() def title(self, bot, user, channel, text, match_obj): url = match_obj.groups()[0] req = urllib2.Request(url) req.add_header("User-Agent", self._ua) try: content = urllib2.urlopen(req).read() except (urllib2.URLError, HTTPException), e: logging.debug("%s - %s" % (e, url)) return beg = content.find("<title>") if beg != -1: title = content[beg+7:content.find("</title>")].replace("\n","") try: title = self.unescape(title) except HTMLParser.HTMLParseError, e: logging.info("%s - %s" % (e.msg, url)) bot.privmsg(channel, "Title: %s" % unicode(title, "utf-8"))
import plugin import urllib2 import logging import re from httplib import HTTPException from htmllib import HTMLParser class Plugin(plugin.baseplugin): def __init__(self, bot): self._regex = re.compile("(https?://.+)\s?") self._ua = "Mozilla/5.0 (X11; Linux x86_64; rv:2.0b8) Gecko/20100101 Firefox/4.0b8" bot.register_regex(self._regex, self.title) def unescape(self, s): p = HTMLParser(None) p.save_bgn() p.feed(s) return p.save_end() def title(self, bot, user, channel, text, match_obj): url = match_obj.groups()[0] req = urllib2.Request(url) req.add_header("User-Agent", self._ua) try: content = urllib2.urlopen(req).read() except (urllib2.URLError, HTTPException), e: logging.debug("%s - %s" % (e, url)) return beg = content.find("<title>") if beg != -1: title = content[beg+7:content.find("</title>")].replace("\n","") title = self.unescape(title) bot.privmsg(channel, "Title: %s" % unicode(title, "utf-8"))
Fix lecturerService being spelled wrong for logging out
(function() { 'use strict'; angular .module('lecturer') .factory('lecturerFactory', lecturerFactory); /* @ngInject */ function lecturerFactory(lecturerService, $location) { var username = ''; var observerCallbacks = []; var Login = false; var service = { login: login, checkUserToken: checkUserToken, logout: logout, registerObserverCallback: registerObserverCallback }; return service; function login(credentials) { if (!Login) { Login = lecturerService.login; } var SHA256 = new Hashes.SHA256(); SHA256.setUTF8(true); // Prevent binding the hashed password into the input. var form = { email: credentials.email, password: SHA256.hex(credentials.password) }; Login.save(form, loginSuccess, logoutSuccess); } function checkUserToken(onError) { if (!Login) { Login = lectureService.login; } Login.get(loginSuccess, function() { logoutSuccess(); onError(); }); } function logout() { lecturerService.logout .save(success); function success() { logoutSuccess(); $location.path('/'); } } function registerObserverCallback(callback) { observerCallbacks.push(callback); } function loginSuccess(response) { angular .forEach(observerCallbacks, function(callback) { callback(true, response.username); }); } function logoutSuccess() { angular .forEach(observerCallbacks, function(callback) { callback(false); }); } } })();
(function() { 'use strict'; angular .module('lecturer') .factory('lecturerFactory', lecturerFactory); /* @ngInject */ function lecturerFactory(lecturerService, $location) { var username = ''; var observerCallbacks = []; var Login = false; var service = { login: login, checkUserToken: checkUserToken, logout: logout, registerObserverCallback: registerObserverCallback }; return service; function login(credentials) { if (!Login) { Login = lecturerService.login; } var SHA256 = new Hashes.SHA256(); SHA256.setUTF8(true); // Prevent binding the hashed password into the input. var form = { email: credentials.email, password: SHA256.hex(credentials.password) }; Login.save(form, loginSuccess, logoutSuccess); } function checkUserToken(onError) { if (!Login) { Login = lectureService.login; } Login.get(loginSuccess, function() { logoutSuccess(); onError(); }); } function logout() { lectureService.logout .save(success); function success() { logoutSuccess(); $location.path('/'); } } function registerObserverCallback(callback) { observerCallbacks.push(callback); } function loginSuccess(response) { angular .forEach(observerCallbacks, function(callback) { callback(true, response.username); }); } function logoutSuccess() { angular .forEach(observerCallbacks, function(callback) { callback(false); }); } } })();
Use PickleSerializer to serialize test sessions. Use PickleSerializer so that we can serialize datetimes in sessions (this is necessary to set expired sessions in automated tests). See https://docs.djangoproject.com/en/2.0/topics/http/sessions/#write-your-own-serializer.
import datetime from importlib import import_module from django.conf import settings from django.test import TestCase from django.test.utils import override_settings from django.utils import timezone from session_cleanup.tasks import cleanup class CleanupTest(TestCase): @override_settings(SESSION_ENGINE="django.contrib.sessions.backends.file") @override_settings(SESSION_SERIALIZER="django.contrib.sessions.serializers.PickleSerializer") # noqa: E501 def test_session_cleanup(self): """ Tests that sessions are deleted by the task """ engine = import_module(settings.SESSION_ENGINE) SessionStore = engine.SessionStore now = timezone.now() last_week = now - datetime.timedelta(days=7) stores = [] unexpired_stores = [] expired_stores = [] # create unexpired sessions for i in range(20): store = SessionStore() store.save() stores.append(store) for store in stores: self.assertEqual(store.exists(store.session_key), True, "Session store could not be created.") unexpired_stores = stores[:10] expired_stores = stores[10:] # expire some sessions for store in expired_stores: store.set_expiry(last_week) store.save() cleanup() for store in unexpired_stores: self.assertEqual(store.exists(store.session_key), True, "Unexpired session was deleted by cleanup.") for store in expired_stores: self.assertEqual(store.exists(store.session_key), False, "Expired session was not deleted by cleanup.")
import datetime from importlib import import_module from django.conf import settings from django.test import TestCase from django.test.utils import override_settings from django.utils import timezone from session_cleanup.tasks import cleanup class CleanupTest(TestCase): @override_settings(SESSION_ENGINE="django.contrib.sessions.backends.file") def test_session_cleanup(self): """ Tests that sessions are deleted by the task """ engine = import_module(settings.SESSION_ENGINE) SessionStore = engine.SessionStore now = timezone.now() last_week = now - datetime.timedelta(days=7) stores = [] unexpired_stores = [] expired_stores = [] # create unexpired sessions for i in range(20): store = SessionStore() store.save() stores.append(store) for store in stores: self.assertEqual(store.exists(store.session_key), True, "Session store could not be created.") unexpired_stores = stores[:10] expired_stores = stores[10:] # expire some sessions for store in expired_stores: store.set_expiry(last_week) store.save() cleanup() for store in unexpired_stores: self.assertEqual(store.exists(store.session_key), True, "Unexpired session was deleted by cleanup.") for store in expired_stores: self.assertEqual(store.exists(store.session_key), False, "Expired session was not deleted by cleanup.")
Add delete example in CURL job
"""A job to send a HTTP GET periodically.""" import logging import requests from ndscheduler import job logger = logging.getLogger(__name__) class CurlJob(job.JobBase): TIMEOUT = 10 @classmethod def meta_info(cls): return { 'job_class_string': '%s.%s' % (cls.__module__, cls.__name__), 'notes': ('This sends a HTTP request to a particular URL'), 'arguments': [ # url {'type': 'string', 'description': 'What URL you want to make a GET call?'}, # Request Type {'type': 'string', 'description': 'What request type do you want? ' '(currently supported: GET/DELETE)'}, ], 'example_arguments': ('["http://localhost:8888/api/v1/jobs", "GET"]' '["http://localhost:8888/api/v1/jobs/ba12e", "DELETE"]') } def run(self, url, request_type, *args, **kwargs): print 'Calling GET on url: %s' % (url) session = requests.Session() result = session.request(request_type, url, timeout=self.TIMEOUT, headers=None, data=None) print(result.text) if __name__ == "__main__": job = CurlJob.create_test_instance() job.run('http://localhost:888/api/v1/jobs')
"""A job to send a HTTP GET periodically.""" import logging import requests from ndscheduler import job logger = logging.getLogger(__name__) class CurlJob(job.JobBase): TIMEOUT = 10 @classmethod def meta_info(cls): return { 'job_class_string': '%s.%s' % (cls.__module__, cls.__name__), 'notes': ('This sends a HTTP GET to a particular URL'), 'arguments': [ # url {'type': 'string', 'description': 'What URL you want to make a GET call?'}, ], 'example_arguments': ('["http://localhost:8888/api/v1/jobs"]') } def run(self, url, *args, **kwargs): print 'Calling GET on url: %s' % (url) session = requests.Session() session.request('GET', url, timeout=self.TIMEOUT, headers=None, data=None) if __name__ == "__main__": job = CurlJob.create_test_instance() job.run('http://localhost:888/api/v1/jobs')
Fix use of userID instead of id
var Blog = React.createClass({ navigate: function(interval) { alert(interval) }, navBtn: function(forwards) { return ( <div className={"ui " + (forwards?"right":"left") + " floated segment basic"}> <button className="circular ui button" onClick={()=>this.navigate(forwards?1:-1)}> {forwards?"next":""}<i className={"chevron " + (forwards?"right":"left") + " icon"}></i>{forwards?"":"back"} </button> </div> ); }, render: function() { return ( <div> {this.props.entries.map(function(entry) { return ( <div className="blogEntry" key={entry.id}> <h1 className="header">{entry.title}</h1> <h3 className="author">by <a href={"/users?u=" + entry.author.id}>{entry.author.name}</a></h3> <h4 className="date">{entry.date}</h4> <div className="content" dangerouslySetInnerHTML={{__html: entry.body}}></div> </div> ); })} { this.props.page>1?this.navBtn(false):""} { this.props.page<this.props.pages?this.navBtn(true):""} </div> ); } }); $.get('http://' + location.hostname + ':5000/blogs', function (result) { ReactDOM.render( <Blog pages={1} page={1} entries={result} />, document.getElementById('blogBox') ); });
var Blog = React.createClass({ navigate: function(interval) { alert(interval) }, navBtn: function(forwards) { return ( <div className={"ui " + (forwards?"right":"left") + " floated segment basic"}> <button className="circular ui button" onClick={()=>this.navigate(forwards?1:-1)}> {forwards?"next":""}<i className={"chevron " + (forwards?"right":"left") + " icon"}></i>{forwards?"":"back"} </button> </div> ); }, render: function() { return ( <div> {this.props.entries.map(function(entry) { return ( <div className="blogEntry" key={entry.id}> <h1 className="header">{entry.title}</h1> <h3 className="author">by <a href={"/users?u=" + entry.author.userID}>{entry.author.name}</a></h3> <h4 className="date">{entry.date}</h4> <div className="content" dangerouslySetInnerHTML={{__html: entry.body}}></div> </div> ); })} { this.props.page>1?this.navBtn(false):""} { this.props.page<this.props.pages?this.navBtn(true):""} </div> ); } }); $.get('http://' + location.hostname + ':5000/blogs', function (result) { ReactDOM.render( <Blog pages={1} page={1} entries={result} />, document.getElementById('blogBox') ); });
Use i_latest_revision to ensure we get the latest revision.
"""Name output plugin """ import optparse from pyang import plugin def pyang_plugin_init(): plugin.register_plugin(NamePlugin()) class NamePlugin(plugin.PyangPlugin): def add_output_format(self, fmts): self.multiple_modules = True fmts['name'] = self def add_opts(self, optparser): optlist = [ optparse.make_option("--name-print-revision", dest="print_revision", action="store_true", help="Print the name and revision in name@revision format"), ] g = optparser.add_option_group("Name output specific options") g.add_options(optlist) def setup_fmt(self, ctx): ctx.implicit_errors = False def emit(self, ctx, modules, fd): emit_name(ctx, modules, fd) def emit_name(ctx, modules, fd): for module in modules: bstr = "" rstr = "" if ctx.opts.print_revision: rs = module.i_latest_revision if rs is None: r = module.search_one('revision') if r is not None: rs = r.arg if rs is not None: rstr = '@%s' % rs b = module.search_one('belongs-to') if b is not None: bstr = " (belongs-to %s)" % b.arg fd.write("%s%s%s\n" % (module.arg, rstr, bstr))
"""Name output plugin """ import optparse from pyang import plugin def pyang_plugin_init(): plugin.register_plugin(NamePlugin()) class NamePlugin(plugin.PyangPlugin): def add_output_format(self, fmts): self.multiple_modules = True fmts['name'] = self def add_opts(self, optparser): optlist = [ optparse.make_option("--name-print-revision", dest="print_revision", action="store_true", help="Print the name and revision in name@revision format"), ] g = optparser.add_option_group("Name output specific options") g.add_options(optlist) def setup_fmt(self, ctx): ctx.implicit_errors = False def emit(self, ctx, modules, fd): emit_name(ctx, modules, fd) def emit_name(ctx, modules, fd): for module in modules: bstr = "" rstr = "" if ctx.opts.print_revision: r = module.search_one('revision') if r is not None: rstr = '@%s' % r.arg b = module.search_one('belongs-to') if b is not None: bstr = " (belongs-to %s)" % b.arg fd.write("%s%s%s\n" % (module.arg, rstr, bstr))
Use BSA's API to get URL of latest prescribing data
import os import requests from django.conf import settings from django.core.management import BaseCommand from openprescribing.utils import mkdir_p class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("year", type=int) parser.add_argument("month", type=int) def handle(self, year, month, **kwargs): rsp = requests.get( "https://opendata.nhsbsa.net/api/3/action/package_show?id=english-prescribing-data-epd" ) resources = rsp.json()["result"]["resources"] urls = [ r["url"] for r in resources if r["name"] == "EPD_{year}{month:02d}".format(year=year, month=month) ] assert len(urls) == 1, urls rsp = requests.get(urls[0], stream=True) assert rsp.ok dir_path = os.path.join( settings.PIPELINE_DATA_BASEDIR, "prescribing_v2", "{year}{month:02d}".format(year=year, month=month), ) mkdir_p(dir_path) filename = "epd_{year}{month:02d}.csv".format(year=year, month=month) with open(os.path.join(dir_path, filename), "wb") as f: for block in rsp.iter_content(32 * 1024): f.write(block)
import os import requests from bs4 import BeautifulSoup from django.conf import settings from django.core.management import BaseCommand from openprescribing.utils import mkdir_p class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("year", type=int) parser.add_argument("month", type=int) def handle(self, year, month, **kwargs): year_and_month = "{year}_{month:02d}".format(year=year, month=month) dir_path = os.path.join( settings.PIPELINE_DATA_BASEDIR, "prescribing_v2", year_and_month ) mkdir_p(dir_path) rsp = requests.get( "https://opendata.nhsbsa.net/dataset/english-prescribing-data-epd" ) doc = BeautifulSoup(rsp.text, "html.parser") filename = "epd_{year}{month:02d}.csv".format(year=year, month=month) urls = [a["href"] for a in doc.find_all("a") if filename in a["href"]] assert len(urls) == 1, urls rsp = requests.get(urls[0], stream=True) assert rsp.ok with open(os.path.join(dir_path, filename), "wb") as f: for block in rsp.iter_content(32 * 1024): f.write(block)
Remove hard exits and event logging from buildInfo example
<?php use Jmikola\React\MongoDB\Connection; use Jmikola\React\MongoDB\ConnectionFactory; use Jmikola\React\MongoDB\Protocol\Query; use Jmikola\React\MongoDB\Protocol\Reply; require __DIR__ . '/../vendor/autoload.php'; $loop = React\EventLoop\Factory::create(); $factory = new ConnectionFactory($loop); $connection = $factory->create('127.0.0.1', 27017)->then( function (Connection $connection) { $query = new Query('admin.$cmd', ['buildInfo' => 1], null, 0, 1); $connection->send($query)->then( function(Reply $reply) { printf("# query executed successfully!\n"); foreach ($reply as $document) { var_dump($document); } }, function (Exception $e) { printf("# query error: %s\n", $e->getMessage()); printf("%s\n", $e->getTraceAsString()); } ); $connection->end(); }, function (Exception $e) { printf("# connection error: %s\n", $e->getMessage()); printf("%s\n", $e->getTraceAsString()); } ); $loop->run();
<?php use Jmikola\React\MongoDB\Connection; use Jmikola\React\MongoDB\ConnectionFactory; use Jmikola\React\MongoDB\Protocol\Query; use Jmikola\React\MongoDB\Protocol\Reply; require __DIR__ . '/../vendor/autoload.php'; $loop = React\EventLoop\Factory::create(); $factory = new ConnectionFactory($loop); $connection = $factory->create('127.0.0.1', 27017)->then( function (Connection $connection) { $query = new Query('admin.$cmd', array('buildInfo' => 1), null, 0, 1); $connection->on('message', function(Reply $reply) { printf("# received reply with message length: %d\n", $reply->getMessageLength()); // Note: this only works because a single document is returned var_dump(bson_decode($reply->getDocumentsData())); }); $connection->on('close', function() { printf("# connection closed!\n"); }); $connection->send($query)->then( function(Reply $reply) { printf("# query executed successfully!\n"); }, function (Exception $e) { printf("# query error: %s\n", $e->getMessage()); printf("%s\n", $e->getTraceAsString()); exit(1); } ); }, function (Exception $e) { printf("# connection error: %s\n", $e->getMessage()); printf("%s\n", $e->getTraceAsString()); exit(1); } ); $loop->run();
Remove a not-too-useful comment hating on Django Pipelines
require('js-yaml'); var path = require('path'); module.exports = function (grunt) { grunt.loadNpmTasks('grunt-contrib-jst'); grunt.loadNpmTasks('grunt-mocha-test'); grunt.loadNpmTasks('grunt-karma'); grunt.initConfig({ paths: require('js_paths.yml'), mochaTest: { jsbox_apps: { src: ['<%= paths.tests.jsbox_apps.spec %>'], } }, jst: { options: { processName: function(filename) { var dir = path.dirname(filename); dir = path.relative('go/base/static/templates', dir); var parts = dir.split('/'); parts.push(path.basename(filename, '.jst')); return parts.join('_'); } }, templates: { files: { "<%= paths.client.templates.dest %>": [ "<%= paths.client.templates.src %>" ] } }, }, karma: { dev: { singleRun: true, reporters: ['dots'], configFile: 'karma.conf.js' } } }); grunt.registerTask('test:jsbox_apps', [ 'mochaTest:jsbox_apps' ]); grunt.registerTask('test:client', [ 'jst:templates', 'karma:dev' ]); grunt.registerTask('test', [ 'test:jsbox_apps', 'test:client' ]); grunt.registerTask('default', [ 'test' ]); };
require('js-yaml'); var path = require('path'); module.exports = function (grunt) { grunt.loadNpmTasks('grunt-contrib-jst'); grunt.loadNpmTasks('grunt-mocha-test'); grunt.loadNpmTasks('grunt-karma'); grunt.initConfig({ paths: require('js_paths.yml'), mochaTest: { jsbox_apps: { src: ['<%= paths.tests.jsbox_apps.spec %>'], } }, jst: { options: { processName: function(filename) { var dir = path.dirname(filename); dir = path.relative('go/base/static/templates', dir); var parts = dir.split('/'); parts.push(path.basename(filename, '.jst')); // process the template names the arb Django Pipelines way return parts.join('_'); } }, templates: { files: { "<%= paths.client.templates.dest %>": [ "<%= paths.client.templates.src %>" ] } }, }, karma: { dev: { singleRun: true, reporters: ['dots'], configFile: 'karma.conf.js' } } }); grunt.registerTask('test:jsbox_apps', [ 'mochaTest:jsbox_apps' ]); grunt.registerTask('test:client', [ 'jst:templates', 'karma:dev' ]); grunt.registerTask('test', [ 'test:jsbox_apps', 'test:client' ]); grunt.registerTask('default', [ 'test' ]); };
Remove reference to old UUIDfield in django migration
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import jsonfield.fields import uuid from django.db.models import UUIDField class Migration(migrations.Migration): dependencies = [ ] operations = [ migrations.CreateModel( name='Job', fields=[ ('id', UUIDField(serialize=False, editable=False, default=uuid.uuid4, primary_key=True)), ('created', models.DateTimeField(db_index=True, auto_now_add=True)), ('modified', models.DateTimeField(auto_now=True)), ('name', models.CharField(max_length=100)), ('state', models.CharField(db_index=True, max_length=20, default='NEW', choices=[('NEW', 'NEW'), ('READY', 'READY'), ('PROCESSING', 'PROCESSING'), ('FAILED', 'FAILED'), ('COMPLETE', 'COMPLETE')])), ('next_task', models.CharField(max_length=100, blank=True)), ('workspace', jsonfield.fields.JSONField(null=True)), ('queue_name', models.CharField(db_index=True, max_length=20, default='default')), ], options={ 'ordering': ['-created'], }, ), ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import jsonfield.fields import uuid try: from django.db.models import UUIDField except ImportError: from django_dbq.fields import UUIDField class Migration(migrations.Migration): dependencies = [ ] operations = [ migrations.CreateModel( name='Job', fields=[ ('id', UUIDField(serialize=False, editable=False, default=uuid.uuid4, primary_key=True)), ('created', models.DateTimeField(db_index=True, auto_now_add=True)), ('modified', models.DateTimeField(auto_now=True)), ('name', models.CharField(max_length=100)), ('state', models.CharField(db_index=True, max_length=20, default='NEW', choices=[('NEW', 'NEW'), ('READY', 'READY'), ('PROCESSING', 'PROCESSING'), ('FAILED', 'FAILED'), ('COMPLETE', 'COMPLETE')])), ('next_task', models.CharField(max_length=100, blank=True)), ('workspace', jsonfield.fields.JSONField(null=True)), ('queue_name', models.CharField(db_index=True, max_length=20, default='default')), ], options={ 'ordering': ['-created'], }, ), ]
Test for HOST_NAME or HOSTNAME env var
from cla_public.config.common import * DEBUG = os.environ.get('SET_DEBUG', False) == 'True' SECRET_KEY = os.environ['SECRET_KEY'] # TODO - change this to True when serving over HTTPS SESSION_COOKIE_SECURE = os.environ.get('CLA_ENV', '') in ['prod', 'staging'] HOST_NAME = os.environ.get('HOST_NAME') or os.environ.get('HOSTNAME') BACKEND_API = { 'url': os.environ['BACKEND_BASE_URI'] + '/checker/api/v1/' } if DEBUG: LOGGING['handlers']['debug_file'] = { 'level': 'DEBUG', 'class': 'logging.handlers.RotatingFileHandler', 'filename': '/var/log/wsgi/debug.log', 'maxBytes': 1024 * 1024 * 5, # 5MB 'backupCount': 7, 'formatter': 'verbose'} LOGGING['loggers'] = { '': { 'handlers': ['debug_file'], 'level': 'DEBUG' } } else: LOGGING['handlers']['production_file'] = { 'level': 'INFO', 'class': 'logging.handlers.RotatingFileHandler', 'filename': '/var/log/wsgi/app.log', 'maxBytes': 1024 * 1024 * 5, # 5MB 'backupCount': 7, 'formatter': 'logstash'} LOGGING['loggers'] = { '': { 'handlers': ['production_file'], 'level': 'DEBUG' } }
from cla_public.config.common import * DEBUG = os.environ.get('SET_DEBUG', False) == 'True' SECRET_KEY = os.environ['SECRET_KEY'] # TODO - change this to True when serving over HTTPS SESSION_COOKIE_SECURE = os.environ.get('CLA_ENV', '') in ['prod', 'staging'] HOST_NAME = os.environ['HOST_NAME'] BACKEND_API = { 'url': os.environ['BACKEND_BASE_URI'] + '/checker/api/v1/' } if DEBUG: LOGGING['handlers']['debug_file'] = { 'level': 'DEBUG', 'class': 'logging.handlers.RotatingFileHandler', 'filename': '/var/log/wsgi/debug.log', 'maxBytes': 1024 * 1024 * 5, # 5MB 'backupCount': 7, 'formatter': 'verbose'} LOGGING['loggers'] = { '': { 'handlers': ['debug_file'], 'level': 'DEBUG' } } else: LOGGING['handlers']['production_file'] = { 'level': 'INFO', 'class': 'logging.handlers.RotatingFileHandler', 'filename': '/var/log/wsgi/app.log', 'maxBytes': 1024 * 1024 * 5, # 5MB 'backupCount': 7, 'formatter': 'logstash'} LOGGING['loggers'] = { '': { 'handlers': ['production_file'], 'level': 'DEBUG' } }
Fix python 3 use of iteritems
import imp import json import os import sys class Dot(dict): def __init__(self, d): super(dict, self).__init__() for k, v in iter(d.items()): if isinstance(v, dict): self[k] = Dot(v) else: self[k] = v def __getattr__(self, attr): try: return self[attr] except KeyError: raise AttributeError("'{}'".format(attr)) __setattr__ = dict.__setitem__ __delattr__ = dict.__delitem__ class SempaiLoader(object): def find_module(self, name, path=None): for d in sys.path: self.json_path = os.path.join(d, '{}.json'.format(name)) if os.path.isfile(self.json_path): return self return None def load_module(self, name): mod = imp.new_module(name) mod.__file__ = self.json_path mod.__loader__ = self try: with open(self.json_path) as f: d = json.load(f) except ValueError: raise ImportError( '"{}" does not contain valid json.'.format(self.json_path)) except: raise ImportError( 'Could not open "{}".'.format(self.json_path)) mod.__dict__.update(d) for k, i in mod.__dict__.items(): if isinstance(i, dict): mod.__dict__[k] = Dot(i) return mod sys.meta_path.append(SempaiLoader())
import imp import json import os import sys class Dot(dict): def __init__(self, d): super(dict, self).__init__() for k, v in d.iteritems(): if isinstance(v, dict): self[k] = Dot(v) else: self[k] = v def __getattr__(self, attr): try: return self[attr] except KeyError: raise AttributeError("'{}'".format(attr)) __setattr__ = dict.__setitem__ __delattr__ = dict.__delitem__ class SempaiLoader(object): def find_module(self, name, path=None): for d in sys.path: self.json_path = os.path.join(d, '{}.json'.format(name)) if os.path.isfile(self.json_path): return self return None def load_module(self, name): mod = imp.new_module(name) mod.__file__ = self.json_path mod.__loader__ = self try: with open(self.json_path) as f: d = json.load(f) except ValueError: raise ImportError( '"{}" does not contain valid json.'.format(self.json_path)) except: raise ImportError( 'Could not open "{}".'.format(self.json_path)) mod.__dict__.update(d) for k, i in mod.__dict__.items(): if isinstance(i, dict): mod.__dict__[k] = Dot(i) return mod sys.meta_path.append(SempaiLoader())
Switch capmetrics script to etl function.
#!/usr/bin/env python3 import os from setuptools import setup, find_packages def get_readme(): return open(os.path.join(os.path.dirname(__file__), 'README.rst')).read() setup( author="Julio Gonzalez Altamirano", author_email='[email protected]', classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3 :: Only', ], description="ETL for CapMetro raw data.", entry_points={ 'console_scripts': [ 'capmetrics=capmetrics_etl.cli:etl', 'capmetrics-tables=capmetrics_etl.cli.tables' ], }, install_requires=['click', 'pytz', 'sqlalchemy', 'xlrd'], keywords="python etl transit", license="MIT", long_description=get_readme(), name='capmetrics-etl', package_data={ 'capmetrics_etl': ['templates/*.html'], }, packages=find_packages(include=['capmetrics_etl', 'capmetrics_etl.*'], exclude=['tests', 'tests.*']), platforms=['any'], url='https://github.com/jga/capmetrics-etl', version='0.1.0' )
#!/usr/bin/env python3 import os from setuptools import setup, find_packages def get_readme(): return open(os.path.join(os.path.dirname(__file__), 'README.rst')).read() setup( author="Julio Gonzalez Altamirano", author_email='[email protected]', classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3 :: Only', ], description="ETL for CapMetro raw data.", entry_points={ 'console_scripts': [ 'capmetrics=capmetrics_etl.cli:run', 'capmetrics-tables=capmetrics_etl.cli.tables' ], }, install_requires=['click', 'pytz', 'sqlalchemy', 'xlrd'], keywords="python etl transit", license="MIT", long_description=get_readme(), name='capmetrics-etl', package_data={ 'capmetrics_etl': ['templates/*.html'], }, packages=find_packages(include=['capmetrics_etl', 'capmetrics_etl.*'], exclude=['tests', 'tests.*']), platforms=['any'], url='https://github.com/jga/capmetrics-etl', version='0.1.0' )
Remove deprecated usage of TreeBuilder on Symfony 4
<?php /* * This file is part of the EzCoreExtraBundle package. * * (c) Jérôme Vieilledent <[email protected]> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace Lolautruche\EzCoreExtraBundle\DependencyInjection; use eZ\Bundle\EzPublishCoreBundle\DependencyInjection\Configuration\SiteAccessAware\Configuration as SiteAccessConfiguration; use Symfony\Component\Config\Definition\Builder\TreeBuilder; class Configuration extends SiteAccessConfiguration { public function getConfigTreeBuilder() { $treeBuilder = new TreeBuilder('ez_core_extra'); // Keep compatibility with symfony/config < 4.2 if (!method_exists($treeBuilder, 'getRootNode')) { $rootNode = $treeBuilder->root('ez_core_extra'); } else { $rootNode = $treeBuilder->getRootNode(); } $systemNode = $this->generateScopeBaseNode($rootNode); $systemNode ->arrayNode('twig_globals') ->info('Variables available in all Twig templates for current SiteAccess.') ->normalizeKeys(false) ->useAttributeAsKey('variable_name') ->example(array('foo' => '"bar"', 'pi' => 3.14)) ->prototype('variable')->end() ->end() ->booleanNode('enable_email_authentication') ->info('Whether eZ users can authenticate against their e-mail or not.') ->defaultFalse() ->end(); return $treeBuilder; } }
<?php /* * This file is part of the EzCoreExtraBundle package. * * (c) Jérôme Vieilledent <[email protected]> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace Lolautruche\EzCoreExtraBundle\DependencyInjection; use eZ\Bundle\EzPublishCoreBundle\DependencyInjection\Configuration\SiteAccessAware\Configuration as SiteAccessConfiguration; use Symfony\Component\Config\Definition\Builder\TreeBuilder; class Configuration extends SiteAccessConfiguration { public function getConfigTreeBuilder() { $treeBuilder = new TreeBuilder(); $rootNode = $treeBuilder->root('ez_core_extra'); $systemNode = $this->generateScopeBaseNode($rootNode); $systemNode ->arrayNode('twig_globals') ->info('Variables available in all Twig templates for current SiteAccess.') ->normalizeKeys(false) ->useAttributeAsKey('variable_name') ->example(array('foo' => '"bar"', 'pi' => 3.14)) ->prototype('variable')->end() ->end() ->booleanNode('enable_email_authentication') ->info('Whether eZ users can authenticate against their e-mail or not.') ->defaultFalse() ->end(); return $treeBuilder; } }
Use pyaccess as the package name.
from setuptools import setup, Extension import numpy as np import os extension_name = '_pyaccess' extension_version = '.1' include_dirs = [ 'ann_1.1.2/include', 'sparsehash-2.0.2/src', np.get_include(), '.' ] library_dirs = [ 'ann_1.1.2/lib', 'contraction_hierarchies' ] packages = ['pyaccess'] libraries = [ 'ANN', 'ch', 'gomp'] source_files = [ 'pyaccess/accessibility.cpp', 'pyaccess/graphalg.cpp', 'pyaccess/nearestneighbor.cpp', 'pyaccess/pyaccesswrap.cpp' ] extra_compile_args = [ '-shared', '-DMACOSX', '-DLINUX', '-w', '-std=gnu++0x', '-O3', '-fopenmp', '-fpic', '-g', '-Wno-deprecated', ] py_modules=['pyaccess/pyaccess', 'pyaccess/urbanaccess'] setup( packages=packages, py_modules=py_modules, name='pyaccess', version=extension_version, ext_modules=[ Extension( extension_name, source_files, include_dirs=include_dirs, library_dirs=library_dirs, libraries=libraries, extra_compile_args=extra_compile_args ) ] )
from setuptools import setup, Extension import numpy as np import os extension_name = '_pyaccess' extension_version = '.1' include_dirs = [ 'ann_1.1.2/include', 'sparsehash-2.0.2/src', np.get_include(), '.' ] library_dirs = [ 'ann_1.1.2/lib', 'contraction_hierarchies' ] packages = ['pyaccess'] libraries = [ 'ANN', 'ch', 'gomp'] source_files = [ 'pyaccess/accessibility.cpp', 'pyaccess/graphalg.cpp', 'pyaccess/nearestneighbor.cpp', 'pyaccess/pyaccesswrap.cpp' ] extra_compile_args = [ '-shared', '-DMACOSX', '-DLINUX', '-w', '-std=gnu++0x', '-O3', '-fopenmp', '-fpic', '-g', '-Wno-deprecated', # '-ferror-limit=1' ] py_modules=['pyaccess/pyaccess', 'pyaccess/urbanaccess'] setup( packages=packages, py_modules=py_modules, name=extension_name, version=extension_version, ext_modules=[ Extension( extension_name, source_files, include_dirs=include_dirs, library_dirs=library_dirs, libraries=libraries, extra_compile_args=extra_compile_args ) ] )
Fix incorrect global comment in include path script
#!/usr/bin/env python3 """ Parses .vscode/.cmaketools.json to obtain a list of include paths. These can then be subsequently pasted into .vscode/c_cpp_properties.json to make intellisense work. This is script exists purely for convenience and only needs to be used when the include paths change (e.g. when a new dependency is added). """ import json import os import sys def iterate_over(dict_or_list, result): """ Iterates recursively over nested lists and dictionaries keeping track of all "path" values with the key "includePath" within nested dictionaries. """ if isinstance(dict_or_list, list): for child in dict_or_list: iterate_over(child, result) elif isinstance(dict_or_list, dict): for key, value in dict_or_list.items(): if key == "includePath": for child in value: result.add(child["path"]) else: iterate_over(value, result) def main(arguments): """Main function of this program.""" workspace = os.path.realpath(os.path.join(__file__, os.pardir, os.pardir)) print("Workspace root: '{}'".format(workspace)) with open(os.path.join(workspace, ".vscode", ".cmaketools.json")) as f: data = json.loads(f.read()) result = set() iterate_over(data, result) result = [x.replace(workspace, "${workspaceRoot}") for x in result] print(json.dumps(result, indent=0)) if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
#!/usr/bin/env python3 """ Deploys the website to a target directory supplied as an argument. """ import json import os import sys def iterate_over(dict_or_list, result): """ Iterates recursively over nested lists and dictionaries keeping track of all "path" values with the key "includePath" within nested dictionaries. """ if isinstance(dict_or_list, list): for child in dict_or_list: iterate_over(child, result) elif isinstance(dict_or_list, dict): for key, value in dict_or_list.items(): if key == "includePath": for child in value: result.add(child["path"]) else: iterate_over(value, result) def main(arguments): """Main function of this program.""" workspace = os.path.realpath(os.path.join(__file__, os.pardir, os.pardir)) print("Workspace root: '{}'".format(workspace)) with open(os.path.join(workspace, ".vscode", ".cmaketools.json")) as f: data = json.loads(f.read()) result = set() iterate_over(data, result) result = [x.replace(workspace, "${workspaceRoot}") for x in result] print(json.dumps(result, indent=0)) if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
Remove unnecessary explicit index check.
// Place your library's code here // // If you add additional files, be sure to // load them in order in ./wrapper.js // import _ from 'underscore'; import moment from 'moment'; var ConsecutiveSegments = { // Segment an array of events by scale group(segments, scale='weeks') { if (_.isEmpty(segments)) { return []; } let currentGroup = 0, currentMoment, prevMoment; return _.chain(segments) .map((events, timestamp) => { return { timestamp, events }; }) .reduce((memo, s, index, segments) => { // Check to see if the current group is the same // as the previous group by computing the difference // in their timestamps. They aren't consecutive when // the difference is > 1 if (index) { currentMoment = moment.unix(s.timestamp).utc(); prevMoment = moment.unix(segments[index - 1].timestamp).utc(); if (currentMoment.diff(prevMoment, scale) > 1) { currentGroup++; } } // Ensure that the group exists, then push to it if (!memo[currentGroup]) { memo[currentGroup] = []; } memo[currentGroup].push(_.clone(s)); return memo; }, []) .value(); } }; export default ConsecutiveSegments;
// Place your library's code here // // If you add additional files, be sure to // load them in order in ./wrapper.js // import _ from 'underscore'; import moment from 'moment'; var ConsecutiveSegments = { // Segment an array of events by scale group(segments, scale='weeks') { if (_.isEmpty(segments)) { return []; } let currentGroup = 0, currentMoment, prevMoment; return _.chain(segments) .map((events, timestamp) => { return { timestamp, events }; }) .reduce((memo, s, index, segments) => { // Check to see if the current group is the same // as the previous group by computing the difference // in their timestamps. They aren't consecutive when // the difference is > 1 if (index !== 0) { currentMoment = moment.unix(s.timestamp).utc(); prevMoment = moment.unix(segments[index - 1].timestamp).utc(); if (currentMoment.diff(prevMoment, scale) > 1) { currentGroup++; } } // Ensure that the group exists, then push to it if (!memo[currentGroup]) { memo[currentGroup] = []; } memo[currentGroup].push(_.clone(s)); return memo; }, []) .value(); } }; export default ConsecutiveSegments;
Change artifact location to suite CircleCI
module.exports = function(grunt) { grunt.loadNpmTasks("grunt-mocha-test"); grunt.loadNpmTasks("grunt-mocha-istanbul"); var testOutputLocation = process.env.CIRCLE_TEST_REPORTS || "test_output"; var artifactsLocation = process.env.CIRCLE_ARTIFACTS || "build_artifacts"; grunt.initConfig({ mochaTest: { test: { src: ["test/**/*.js"] }, ci: { src: ["test/**/*.js"], options: { reporter: "xunit", captureFile: testOutputLocation + "/mocha/results.xml", quiet: true } } }, mocha_istanbul: { coverage: { src: ["test/**/*.js"], options: { coverageFolder: artifactsLocation, check: { lines: 100, statements: 100, branches: 100, functions: 100 }, reportFormats: ["lcov"] } } } }); grunt.registerTask("test", ["mochaTest:test", "mocha_istanbul"]); grunt.registerTask("ci-test", ["mochaTest:ci", "mocha_istanbul"]); grunt.registerTask("default", "test"); };
module.exports = function(grunt) { grunt.loadNpmTasks("grunt-mocha-test"); grunt.loadNpmTasks("grunt-mocha-istanbul"); var testOutputLocation = process.env.CIRCLE_TEST_REPORTS || "test_output"; var artifactsLocation = process.env.CIRCLE_ARTIFACTS || "build_artifacts"; grunt.initConfig({ mochaTest: { test: { src: ["test/**/*.js"] }, ci: { src: ["test/**/*.js"], options: { reporter: "xunit", captureFile: testOutputLocation + "/mocha/results.xml", quiet: true } } }, mocha_istanbul: { coverage: { src: ["test/**/*.js"], options: { coverageFolder: artifactsLocation + "/coverage", check: { lines: 100, statements: 100, branches: 100, functions: 100 }, reportFormats: ["lcov"] } } } }); grunt.registerTask("test", ["mochaTest:test", "mocha_istanbul"]); grunt.registerTask("ci-test", ["mochaTest:ci", "mocha_istanbul"]); grunt.registerTask("default", "test"); };
Change compile CLI entry point name to urbansim_compile.
from ez_setup import use_setuptools use_setuptools() from setuptools import setup, find_packages setup( name='urbansim', version='0.2dev', description='Tool for modeling metropolitan real estate markets', author='Synthicity', author_email='[email protected]', license='AGPL', url='https://github.com/synthicity/urbansim', classifiers=[ 'Development Status :: 4 - Beta', 'Programming Language :: Python :: 2.7', 'License :: OSI Approved :: GNU Affero General Public License v3' ], packages=find_packages(exclude=['urbansimd', '*.tests']), package_data={'synthicity.urbansim': ['templates/*.template']}, install_requires=[ 'Django>=1.6.2', 'jinja2>=2.7.2', 'numpy>=1.8.0', 'pandas>=0.13.1', 'patsy>=0.2.1', 'scipy>=0.13.3', 'shapely>=1.3.0', 'simplejson>=3.3.3', 'statsmodels>=0.5.0', 'tables>=3.1.0' ], entry_points={ 'console_scripts': [ 'urbansim_compile = synthicity.urbansim.compilecli:main' ] } )
from ez_setup import use_setuptools use_setuptools() from setuptools import setup, find_packages setup( name='urbansim', version='0.2dev', description='Tool for modeling metropolitan real estate markets', author='Synthicity', author_email='[email protected]', license='AGPL', url='https://github.com/synthicity/urbansim', classifiers=[ 'Development Status :: 4 - Beta', 'Programming Language :: Python :: 2.7', 'License :: OSI Approved :: GNU Affero General Public License v3' ], packages=find_packages(exclude=['urbansimd', '*.tests']), package_data={'synthicity.urbansim': ['templates/*.template']}, install_requires=[ 'Django>=1.6.2', 'jinja2>=2.7.2', 'numpy>=1.8.0', 'pandas>=0.13.1', 'patsy>=0.2.1', 'scipy>=0.13.3', 'shapely>=1.3.0', 'simplejson>=3.3.3', 'statsmodels>=0.5.0', 'tables>=3.1.0' ], entry_points={ 'console_scripts': [ 'usimcompile = synthicity.urbansim.compilecli:main' ] } )
Fix sort order for refreshed movies list
import {Meteor} from 'meteor/meteor'; import {autorun, toJS} from 'mobx'; import Movies from '../../api/documents/documents' export default class Mongo2Mobx { constructor(store) { this.moviesSubscription = null; this.moviesObserver = null; let moviesDataSync = autorun(() => { let refreshMovies = (store) => { let latestMovies = Movies.find({},{sort:{released:-1}}).fetch(); store.updateMovies(latestMovies); }; if (this.moviesSubscription) { this.moviesSubscription.stop(); } if (this.moviesObserver) { this.moviesObserver.stop(); } store.setMoviesLoading (true); this.moviesSubscription = Meteor.subscribe("documents.list", { onReady: () => { this.moviesObserver = Movies.find().observe({ added: () => { refreshMovies(store); }, changed: () => { refreshMovies(store); }, removed: () => { refreshMovies(store); } }); store.setMoviesLoading(false); } }); }) } }
import {Meteor} from 'meteor/meteor'; import {autorun, toJS} from 'mobx'; import Movies from '../../api/documents/documents' export default class Mongo2Mobx { constructor(store) { this.moviesSubscription = null; this.moviesObserver = null; let moviesDataSync = autorun(() => { let refreshMovies = (store) => { let latestMovies = Movies.find().fetch(); store.updateMovies(latestMovies); }; if (this.moviesSubscription) { this.moviesSubscription.stop(); } if (this.moviesObserver) { this.moviesObserver.stop(); } store.setMoviesLoading (true); this.moviesSubscription = Meteor.subscribe("documents.list", { onReady: () => { this.moviesObserver = Movies.find().observe({ added: () => { refreshMovies(store); }, changed: () => { refreshMovies(store); }, removed: () => { refreshMovies(store); } }); store.setMoviesLoading(false); } }); }) } }
Remove entry point for parse payload documentation
from setuptools import setup, find_packages setup( name='zeit.push', version='1.21.0.dev0', author='gocept, Zeit Online', author_email='[email protected]', url='http://www.zeit.de/', description="Sending push notifications through various providers", packages=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, zip_safe=False, license='BSD', namespace_packages=['zeit'], install_requires=[ 'fb', 'gocept.testing', 'grokcore.component', 'mock', 'pytz', 'requests', 'setuptools', 'tweepy', 'urbanairship >= 1.0', 'zc.sourcefactory', 'zeit.cms >= 2.102.0.dev0', 'zeit.content.article', 'zeit.content.image', 'zeit.objectlog', 'zope.app.appsetup', 'zope.component', 'zope.formlib', 'zope.interface', 'zope.schema', ], entry_points={ 'console_scripts': [ 'facebook-access-token = zeit.push.facebook:create_access_token', 'ua-payload-doc = zeit.push.urbanairship:print_payload_documentation', ], 'fanstatic.libraries': [ 'zeit_push=zeit.push.browser.resources:lib', ], }, )
from setuptools import setup, find_packages setup( name='zeit.push', version='1.21.0.dev0', author='gocept, Zeit Online', author_email='[email protected]', url='http://www.zeit.de/', description="Sending push notifications through various providers", packages=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, zip_safe=False, license='BSD', namespace_packages=['zeit'], install_requires=[ 'fb', 'gocept.testing', 'grokcore.component', 'mock', 'pytz', 'requests', 'setuptools', 'tweepy', 'urbanairship >= 1.0', 'zc.sourcefactory', 'zeit.cms >= 2.102.0.dev0', 'zeit.content.article', 'zeit.content.image', 'zeit.objectlog', 'zope.app.appsetup', 'zope.component', 'zope.formlib', 'zope.interface', 'zope.schema', ], entry_points={ 'console_scripts': [ 'facebook-access-token = zeit.push.facebook:create_access_token', 'parse-payload-doc = zeit.push.parse:print_payload_documentation', 'ua-payload-doc = zeit.push.urbanairship:print_payload_documentation', ], 'fanstatic.libraries': [ 'zeit_push=zeit.push.browser.resources:lib', ], }, )
Handle fields without validation objects attached
import Ember from 'ember'; import layout from '../templates/components/smd-form'; export default Ember.Component.extend({ // Services toaster: Ember.inject.service('smd-toaster'), // Attributes layout, tagName: 'form', model: null, errorMessage: 'Please correct the errors in the form', // Computed childFormControls: Ember.computed('childViews', function() { var childViews = this.get('childViews'); if (childViews) { return childViews.filter(function(currentChildView) { return currentChildView.constructor.toString().indexOf('smd-form-control') !== -1; }); } }), // Events submit(e) { e.preventDefault(); var errorMessages = []; var formControls = this.get('childFormControls'); formControls.forEach( function(formControl) { if (formControl.get('validation') && !formControl.get('validation.isValid')) { errorMessages.addObjects(formControl.get('validation.errors').mapBy('message')); } formControl.set('didValidate', true); } ); if (errorMessages.length !== 0) { var errorMessage = errorMessages.join(', '); Ember.Logger.log("Errors: " + errorMessage); this.get('toaster').setMessage(this.get('errorMessage')); return; } this.sendAction('action', this.get('model')); }, didInsertElement() { this._super(...arguments); } });
import Ember from 'ember'; import layout from '../templates/components/smd-form'; export default Ember.Component.extend({ // Services toaster: Ember.inject.service('smd-toaster'), // Attributes layout, tagName: 'form', model: null, errorMessage: 'Please correct the errors in the form', // Computed childFormControls: Ember.computed('childViews', function() { var childViews = this.get('childViews'); if (childViews) { return childViews.filter(function(currentChildView) { return currentChildView.constructor.toString().indexOf('smd-form-control') !== -1; }); } }), // Events submit(e) { e.preventDefault(); var errorMessages = []; var formControls = this.get('childFormControls'); formControls.forEach( function(formControl) { if (!formControl.get('validation.isValid')) { errorMessages.addObjects(formControl.get('validation.errors').mapBy('message')); } formControl.set('didValidate', true); } ); if (errorMessages.length !== 0) { var errorMessage = errorMessages.join(', '); Ember.Logger.log("Errors: " + errorMessage); this.get('toaster').setMessage(this.get('errorMessage')); return; } this.sendAction('action', this.get('model')); }, didInsertElement() { this._super(...arguments); } });
Remove trailing newlines from test cases
import unittest from BKKCrypt import BKKCrypt import urllib.request class TestEncodeStrings(unittest.TestCase): def test_simple_strings(self): self.assertEqual(BKKCrypt('adminadmin'), 'adminadmin') self.assertEqual(BKKCrypt('hunter2'), 'hunter2') self.assertEqual(BKKCrypt('password'), 'password') def test_PIN_numbers(self): self.assertEqual(BKKCrypt(1234), 1234) self.assertEqual(BKKCrypt('1234'), '1234') self.assertEqual(BKKCrypt(6969), 6969) self.assertEqual(BKKCrypt('6969'), '6969') def test_empty_passwords(self): self.assertEqual(BKKCrypt(''), '') class TestBLNS(unittest.TestCase): """Test strings from https://github.com/minimaxir/big-list-of-naughty-strings/""" blns_request = urllib.request.urlretrieve("https://raw.githubusercontent.com/minimaxir/big-list-of-naughty-strings/master/blns.txt") with open(blns_request[0]) as blns_file: blns = blns_file.readlines() blns = list(map(lambda x: x.strip(), blns)) def test_blns_lines(self): for line in self.blns: with self.subTest(line=line): self.assertEqual(BKKCrypt(line), line) if __name__ == '__main__': unittest.main()
import unittest from BKKCrypt import BKKCrypt import urllib.request class TestEncodeStrings(unittest.TestCase): def test_simple_strings(self): self.assertEqual(BKKCrypt('adminadmin'), 'adminadmin') self.assertEqual(BKKCrypt('hunter2'), 'hunter2') self.assertEqual(BKKCrypt('password'), 'password') def test_PIN_numbers(self): self.assertEqual(BKKCrypt(1234), 1234) self.assertEqual(BKKCrypt('1234'), '1234') self.assertEqual(BKKCrypt(6969), 6969) self.assertEqual(BKKCrypt('6969'), '6969') def test_empty_passwords(self): self.assertEqual(BKKCrypt(''), '') class TestBLNS(unittest.TestCase): """Test strings from https://github.com/minimaxir/big-list-of-naughty-strings/""" blns_request = urllib.request.urlretrieve("https://raw.githubusercontent.com/minimaxir/big-list-of-naughty-strings/master/blns.txt") with open(blns_request[0]) as blns_file: blns = blns_file.readlines() def test_blns_lines(self): for line in self.blns: with self.subTest(line=line): self.assertEqual(BKKCrypt(line), line) if __name__ == '__main__': unittest.main()
Allow get specs to everyone
<?php namespace Application\Controller\Api; use Zend\Db\Sql; use Zend\Db\TableGateway\TableGateway; use Zend\Mvc\Controller\AbstractRestfulController; use Zend\View\Model\JsonModel; class SpecController extends AbstractRestfulController { /** * @var TableGateway */ private $table; public function __construct(TableGateway $table) { $this->table = $table; } private function getSpecOptions(int $parentId = 0): array { $select = new Sql\Select($this->table->getTable()); $select->order('name'); if ($parentId) { $select->where([ 'parent_id' => $parentId ]); } else { $select->where(['parent_id is null']); } $rows = $this->table->selectWith($select); $result = []; foreach ($rows as $row) { $result[] = [ 'id' => (int)$row['id'], 'name' => $row['name'], 'short_name' => $row['short_name'], 'childs' => $this->getSpecOptions($row['id']) ]; } return $result; } public function indexAction() { return new JsonModel([ 'items' => $this->getSpecOptions(0), ]); } }
<?php namespace Application\Controller\Api; use Zend\Db\Sql; use Zend\Db\TableGateway\TableGateway; use Zend\Mvc\Controller\AbstractRestfulController; use Zend\View\Model\JsonModel; class SpecController extends AbstractRestfulController { /** * @var TableGateway */ private $table; public function __construct(TableGateway $table) { $this->table = $table; } private function getSpecOptions(int $parentId = 0): array { $select = new Sql\Select($this->table->getTable()); $select->order('name'); if ($parentId) { $select->where([ 'parent_id' => $parentId ]); } else { $select->where(['parent_id is null']); } $rows = $this->table->selectWith($select); $result = []; foreach ($rows as $row) { $result[] = [ 'id' => (int)$row['id'], 'name' => $row['name'], 'short_name' => $row['short_name'], 'childs' => $this->getSpecOptions($row['id']) ]; } return $result; } public function indexAction() { if (! $this->user()->inheritsRole('moder')) { return $this->forbiddenAction(); } return new JsonModel([ 'items' => $this->getSpecOptions(0), ]); } }
Make current request available to `ListingPage`s item methods This is an awful hack. Hopefully we can find a better way. See ICEKit ticket #154 in Assembla
from django.conf.urls import patterns, url from django.http import Http404 from django.template.response import TemplateResponse from fluent_pages.extensions import page_type_pool from icekit.page_types.layout_page.admin import LayoutPageAdmin from icekit.plugins import ICEkitFluentContentsPagePlugin class ListingPagePlugin(ICEkitFluentContentsPagePlugin): render_template = 'icekit/layouts/listing.html' model_admin = LayoutPageAdmin # TODO Awful hack to make request available to listing page class as # `_request` class attribute. There must be a better way... def get_response(self, request, page, **kwargs): page._plugin_request = request return super(ListingPagePlugin, self).get_response( request, page, **kwargs) def get_view_response(self, request, page, view_func, view_args, view_kwargs): """ Render the custom view that was exposed by the extra plugin URL patterns. This gives the ability to add extra middleware logic. """ return view_func(request, page, *view_args, **view_kwargs) def _detail_view(request, parent, slug): try: page = parent.get_visible_items().get(slug=slug) except: raise Http404 # If the article defines its own response, use that. if hasattr(page, 'get_response'): return page.get_response(request, parent=parent) raise AttributeError("Make sure to define `get_response()` in your item's model, or set `detail_view' on your Listing Page plugin.") detail_view = _detail_view urls = patterns('', url( '^(?P<slug>[-\w]+)/$', detail_view, ), )
from django.conf.urls import patterns, url from django.http import Http404 from django.template.response import TemplateResponse from fluent_pages.extensions import page_type_pool from icekit.page_types.layout_page.admin import LayoutPageAdmin from icekit.plugins import ICEkitFluentContentsPagePlugin class ListingPagePlugin(ICEkitFluentContentsPagePlugin): render_template = 'icekit/layouts/listing.html' model_admin = LayoutPageAdmin def get_view_response(self, request, page, view_func, view_args, view_kwargs): """ Render the custom view that was exposed by the extra plugin URL patterns. This gives the ability to add extra middleware logic. """ return view_func(request, page, *view_args, **view_kwargs) def _detail_view(request, parent, slug): try: page = parent.get_visible_items().get(slug=slug) except: raise Http404 # If the article defines its own response, use that. if hasattr(page, 'get_response'): return page.get_response(request, parent=parent) raise AttributeError("Make sure to define `get_response()` in your item's model, or set `detail_view' on your Listing Page plugin.") detail_view = _detail_view urls = patterns('', url( '^(?P<slug>[-\w]+)/$', detail_view, ), )
Update shortcut keys for new signature
$(document).ready(function() { $(document).keyup(function(e) { var tag = e.target.tagName.toLowerCase(); if (tag != 'input' && tag != 'textarea' && tag != 'select' && !e.ctrlKey) { if (e.keyCode==78 || e.keyCode==77) { $('.nav-menu-icon').trigger('click'); } else if (e.keyCode==65) { $('.search-menu-icon').trigger('click'); } else if (e.keyCode==73 && e.shiftKey) { $( "#new-ip" ).click(); } else if (e.keyCode==73) { $( "#new-indicator" ).click(); } else if (e.keyCode==69 && e.shiftKey) { $( "#new-event" ).click(); } else if (e.keyCode==69) { $( "#new-email-yaml" ).click(); } else if (e.keyCode==68) { $( "#new-domain" ).click(); } else if (e.keyCode==80) { $( "#new-pcap" ).click(); } else if (e.keyCode==83 && e.shiftKey) { $( "#new-signature" ).click(); } else if (e.keyCode==83) { $( "#new-sample" ).click(); } else if (e.keyCode==27) { $( ".mm-opened").trigger('close'); } else if (e.shiftKey && e.keyCode==191) { $( "#shortcut-keys").click(); } } }); });
$(document).ready(function() { $(document).keyup(function(e) { var tag = e.target.tagName.toLowerCase(); if (tag != 'input' && tag != 'textarea' && tag != 'select' && !e.ctrlKey) { if (e.keyCode==78 || e.keyCode==77) { $('.nav-menu-icon').trigger('click'); } else if (e.keyCode==65) { $('.search-menu-icon').trigger('click'); } else if (e.keyCode==73 && e.shiftKey) { $( "#new-ip" ).click(); } else if (e.keyCode==73) { $( "#new-indicator" ).click(); } else if (e.keyCode==69 && e.shiftKey) { $( "#new-event" ).click(); } else if (e.keyCode==69) { $( "#new-email-yaml" ).click(); } else if (e.keyCode==68) { $( "#new-domain" ).click(); } else if (e.keyCode==80) { $( "#new-pcap" ).click(); } else if (e.keyCode==83) { $( "#new-sample" ).click(); } else if (e.keyCode==71) { $( "#new-signature" ).click(); } else if (e.keyCode==27) { $( ".mm-opened").trigger('close'); } else if (e.shiftKey && e.keyCode==191) { $( "#shortcut-keys").click(); } } }); });
telemetry: Fix an import path in the Android screen recorder Review URL: https://codereview.chromium.org/1301613004 Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#343960}
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os import subprocess from telemetry.core import util from telemetry.internal.platform import profiler from telemetry.internal.backends.chrome import android_browser_finder class AndroidScreenRecordingProfiler(profiler.Profiler): """Captures a screen recording on Android.""" def __init__(self, browser_backend, platform_backend, output_path, state): super(AndroidScreenRecordingProfiler, self).__init__( browser_backend, platform_backend, output_path, state) self._output_path = output_path + '.mp4' self._recorder = subprocess.Popen( [os.path.join(util.GetChromiumSrcDir(), 'build', 'android', 'screenshot.py'), '--video', '--file', self._output_path, '--device', browser_backend.device.adb.GetDeviceSerial()], stdin=subprocess.PIPE, stdout=subprocess.PIPE) @classmethod def name(cls): return 'android-screen-recorder' @classmethod def is_supported(cls, browser_type): if browser_type == 'any': return android_browser_finder.CanFindAvailableBrowsers() return browser_type.startswith('android') def CollectProfile(self): self._recorder.communicate(input='\n') print 'Screen recording saved as %s' % self._output_path print 'To view, open in Chrome or a video player' return [self._output_path]
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os import subprocess from telemetry.internal.platform import profiler from telemetry.internal import util from telemetry.internal.backends.chrome import android_browser_finder class AndroidScreenRecordingProfiler(profiler.Profiler): """Captures a screen recording on Android.""" def __init__(self, browser_backend, platform_backend, output_path, state): super(AndroidScreenRecordingProfiler, self).__init__( browser_backend, platform_backend, output_path, state) self._output_path = output_path + '.mp4' self._recorder = subprocess.Popen( [os.path.join(util.GetChromiumSrcDir(), 'build', 'android', 'screenshot.py'), '--video', '--file', self._output_path, '--device', browser_backend.device.adb.GetDeviceSerial()], stdin=subprocess.PIPE, stdout=subprocess.PIPE) @classmethod def name(cls): return 'android-screen-recorder' @classmethod def is_supported(cls, browser_type): if browser_type == 'any': return android_browser_finder.CanFindAvailableBrowsers() return browser_type.startswith('android') def CollectProfile(self): self._recorder.communicate(input='\n') print 'Screen recording saved as %s' % self._output_path print 'To view, open in Chrome or a video player' return [self._output_path]
Change default baud rate to 230400 to match cantranslator.
"""A virtual serial port data source.""" from __future__ import absolute_import import logging from .base import BytestreamDataSource, DataSourceError LOG = logging.getLogger(__name__) try: import serial except ImportError: LOG.debug("serial library not installed, can't use serial interface") class SerialDataSource(BytestreamDataSource): """A data source reading from a serial port, which could be implemented with a USB to Serial or Bluetooth adapter. """ DEFAULT_PORT = "/dev/ttyUSB0" DEFAULT_BAUDRATE = 230400 def __init__(self, callback=None, port=None, baudrate=None): """Initialize a connection to the serial device. Kwargs: port - optionally override the default virtual COM port baudrate - optionally override the default baudrate Raises: DataSourceError if the serial device cannot be opened. """ super(SerialDataSource, self).__init__(callback) port = port or self.DEFAULT_PORT baudrate = baudrate or self.DEFAULT_BAUDRATE try: self.device = serial.Serial(port, baudrate, rtscts=True) except serial.SerialException as e: raise DataSourceError("Unable to open serial device at port " "%s: %s" % (port, e)) else: LOG.debug("Opened serial device at %s", port) def _read(self): return self.device.readline()
"""A virtual serial port data source.""" from __future__ import absolute_import import logging from .base import BytestreamDataSource, DataSourceError LOG = logging.getLogger(__name__) try: import serial except ImportError: LOG.debug("serial library not installed, can't use serial interface") class SerialDataSource(BytestreamDataSource): """A data source reading from a serial port, which could be implemented with a USB to Serial or Bluetooth adapter. """ DEFAULT_PORT = "/dev/ttyUSB0" DEFAULT_BAUDRATE = 115200 def __init__(self, callback=None, port=None, baudrate=None): """Initialize a connection to the serial device. Kwargs: port - optionally override the default virtual COM port baudrate - optionally override the default baudrate Raises: DataSourceError if the serial device cannot be opened. """ super(SerialDataSource, self).__init__(callback) port = port or self.DEFAULT_PORT baudrate = baudrate or self.DEFAULT_BAUDRATE try: self.device = serial.Serial(port, baudrate, rtscts=True) except serial.SerialException as e: raise DataSourceError("Unable to open serial device at port " "%s: %s" % (port, e)) else: LOG.debug("Opened serial device at %s", port) def _read(self): return self.device.readline()
Drop support for EOL Python 2.6 and 3.3
#!/usr/bin/env python import os from setuptools import setup ROOT_DIR = os.path.dirname(__file__) SOURCE_DIR = os.path.join(ROOT_DIR) requirements = [ 'six >= 1.4.0', ] version = None exec(open('dockerpycreds/version.py').read()) with open('./test-requirements.txt') as test_reqs_txt: test_requirements = [line for line in test_reqs_txt] setup( name="docker-pycreds", version=version, description="Python bindings for the docker credentials store API", url='https://github.com/shin-/dockerpy-creds', license='Apache License 2.0', packages=[ 'dockerpycreds', ], install_requires=requirements, tests_require=test_requirements, zip_safe=False, test_suite='tests', python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Other Environment', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Topic :: Utilities', 'License :: OSI Approved :: Apache Software License', ], )
#!/usr/bin/env python import os from setuptools import setup ROOT_DIR = os.path.dirname(__file__) SOURCE_DIR = os.path.join(ROOT_DIR) requirements = [ 'six >= 1.4.0', ] version = None exec(open('dockerpycreds/version.py').read()) with open('./test-requirements.txt') as test_reqs_txt: test_requirements = [line for line in test_reqs_txt] setup( name="docker-pycreds", version=version, description="Python bindings for the docker credentials store API", url='https://github.com/shin-/dockerpy-creds', license='Apache License 2.0', packages=[ 'dockerpycreds', ], install_requires=requirements, tests_require=test_requirements, zip_safe=False, test_suite='tests', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Other Environment', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Topic :: Utilities', 'License :: OSI Approved :: Apache Software License', ], )
Add support for `skipNested` field option. + The `skip` option removes `N` elements from the top-level list. This works fine for a simple list but does not work for nested lists, e.g. for `Classification` which is a list of hierarchies. + Now `skipNested` wiill remove `N` items from the second-level (nested) lists, which is good for lists of hierarchies or lists of lists.
(function () { 'use strict'; define( [ 'lodash', 'knockout', 'util/safelyParseJson' ], function (_, ko, parse) { var idField = 'object_id'; return function (result, fields) { result = ko.unwrap(result); this.id = ko.pureComputed(function () { return result[idField]; }); this.data = ko.pureComputed(function () { return _(ko.unwrap(fields)) .mapKeys('key') .mapValues(function (field) { var value = result[field.key] || ''; if (field.parse) { value = parse(value); if (field.skip) { value = _.drop(value, field.skip); } if (field.skipNested) { value = _.map(value, function (item) { return _.drop(item, field.skip); }); } if (field.filter === true) { value = _.filter(value); } else if (_.isString(field.filter)) { value = _.filter(value, field.filter); } } return value; }.bind(this)) .value(); }); }; } ); }());
(function () { 'use strict'; define( [ 'lodash', 'knockout', 'util/safelyParseJson' ], function (_, ko, parse) { var idField = 'object_id'; return function (result, fields) { result = ko.unwrap(result); this.id = ko.pureComputed(function () { return result[idField]; }); this.data = ko.pureComputed(function () { return _(ko.unwrap(fields)) .mapKeys('key') .mapValues(function (field) { var value = result[field.key] || ''; if (field.parse) { value = parse(value); if (field.skip) { value = _.drop(value, field.skip); } if (field.filter === true) { value = _.filter(value); } else if (_.isString(field.filter)) { value = _.filter(value, field.filter); } } return value; }.bind(this)) .value(); }); }; } ); }());
Add forgotten fields to Location schema
module.exports = function(config) { ['MONGO_URL', 'MONGO_USERNAME', 'MONGO_PASSWORD'].forEach(function(envvar) { if (config[envvar] === undefined) { throw new Error('Mongo is missing ' + envvar); } }); var mongoose = require('mongoose'); var db = mongoose.connection; var schemas = {}; var eventSchema = new mongoose.Schema({ 'category': String, 'coordinates': { 'latitude': Number, 'longitude': Number }, 'name': String, 'originalId': String, 'origin': String, 'url': String }); var locationSchema = new mongoose.Schema({ 'category': String, 'coordinates': { 'latitude': Number, 'longitude': Number }, 'title': String, 'url': String }); var userSchema = new mongoose.Schema({ 'coordinates': { 'latitude': Number, 'longitude': Number }, 'dateOfBirth': Date, 'email': String, 'familyName': String, 'givenName': String, 'phoneNumber': String, 'tags': [String] }); schemas.Event = mongoose.model('Event', eventSchema); schemas.Location = mongoose.model('Location', locationSchema); schemas.User = mongoose.model('User', userSchema); mongoose.connect(config.MONGO_URL, { user: config.MONGO_USERNAME, password: config.MONGO_PASSWORD }); return { db: db, schemas: schemas }; };
module.exports = function(config) { ['MONGO_URL', 'MONGO_USERNAME', 'MONGO_PASSWORD'].forEach(function(envvar) { if (config[envvar] === undefined) { throw new Error('Mongo is missing ' + envvar); } }); var mongoose = require('mongoose'); var db = mongoose.connection; var schemas = {}; var eventSchema = new mongoose.Schema({ 'category': String, 'coordinates': { 'latitude': Number, 'longitude': Number }, 'name': String, 'originalId': String, 'origin': String, 'url': String }); var locationSchema = new mongoose.Schema({ 'category': String, 'coordinates': { 'latitude': Number, 'longitude': Number } }); var userSchema = new mongoose.Schema({ 'coordinates': { 'latitude': Number, 'longitude': Number }, 'dateOfBirth': Date, 'email': String, 'familyName': String, 'givenName': String, 'phoneNumber': String, 'tags': [String] }); schemas.Event = mongoose.model('Event', eventSchema); schemas.Location = mongoose.model('Location', locationSchema); schemas.User = mongoose.model('User', userSchema); mongoose.connect(config.MONGO_URL, { user: config.MONGO_USERNAME, password: config.MONGO_PASSWORD }); return { db: db, schemas: schemas }; };
Add import option to nav
<nav class="navigation -white -floating"> <a class="navigation__logo" href="/"><span>DoSomething.org</span></a> <div class="navigation__menu"> @if (Auth::user()) <ul class="navigation__primary"> <li> <a href="/campaigns"> <strong class="navigation__title">Campaign Overview</strong> </a> </li> <li> <a href="/users"> <strong class="navigation__title">User Search</strong> </a> </li> <li> <a href="/import"> <strong class="navigation__title">Import CSV</strong> </a> </li> <li> <a href="/faq"> <strong class="navigation__title">FAQ</strong> </a> </li> </ul> <ul class="navigation__secondary"> <li> <a href="/logout">Log Out</a> </li> </ul> @endif </div> </nav>
<nav class="navigation -white -floating"> <a class="navigation__logo" href="/"><span>DoSomething.org</span></a> <div class="navigation__menu"> @if (Auth::user()) <ul class="navigation__primary"> <li> <a href="/campaigns"> <strong class="navigation__title">Campaign Overview</strong> </a> </li> <li> <a href="/users"> <strong class="navigation__title">User Search</strong> </a> </li> <li> <a href="/faq"> <strong class="navigation__title">FAQ</strong> </a> </li> </ul> <ul class="navigation__secondary"> <li> <a href="/logout">Log Out</a> </li> </ul> @endif </div> </nav>
Fix typo GYP_DEF_target_arch v GPP_DEF_target_arch BUG= Review URL: https://codereview.chromium.org/218623005 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@260590 0039d316-1c4b-4281-b951-d872f2087c98
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """A module to add gyp support to cr.""" import cr import os GYP_DEFINE_PREFIX = 'GYP_DEF_' class GypPrepareOut(cr.PrepareOut): """A prepare action that runs gyp whenever you select an output directory.""" ENABLED = cr.Config.From( GYP_GENERATORS='ninja', GYP_GENERATOR_FLAGS='output_dir={CR_OUT_BASE} config={CR_BUILDTYPE}', GYP_DEF_target_arch='{CR_ENVSETUP_ARCH}', ) def Prepare(self): # Collapse GYP_DEFINES from all GYP_DEF prefixes gyp_defines = cr.context.Find('GYP_DEFINES') or '' for key, value in cr.context.exported.items(): if key.startswith(GYP_DEFINE_PREFIX): gyp_defines += ' %s=%s' % (key[len(GYP_DEFINE_PREFIX):], value) cr.context['GYP_DEFINES'] = gyp_defines.strip() if cr.context.verbose >= 1: print cr.context.Substitute('Invoking gyp with {GYP_GENERATOR_FLAGS}') print cr.context.Substitute('GYP_DEFINES = {GYP_DEFINES}') cr.Host.Execute( '{CR_SRC}/build/gyp_chromium', '--depth={CR_SRC}', '--check' )
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """A module to add gyp support to cr.""" import cr import os GYP_DEFINE_PREFIX = 'GYP_DEF_' class GypPrepareOut(cr.PrepareOut): """A prepare action that runs gyp whenever you select an output directory.""" ENABLED = cr.Config.From( GYP_GENERATORS='ninja', GYP_GENERATOR_FLAGS='output_dir={CR_OUT_BASE} config={CR_BUILDTYPE}', GPP_DEF_target_arch='{CR_ENVSETUP_ARCH}', ) def Prepare(self): # Collapse GYP_DEFINES from all GYP_DEF prefixes gyp_defines = cr.context.Find('GYP_DEFINES') or '' for key, value in cr.context.exported.items(): if key.startswith(GYP_DEFINE_PREFIX): gyp_defines += ' %s=%s' % (key[len(GYP_DEFINE_PREFIX):], value) cr.context['GYP_DEFINES'] = gyp_defines.strip() if cr.context.verbose >= 1: print cr.context.Substitute('Invoking gyp with {GYP_GENERATOR_FLAGS}') print cr.context.Substitute('GYP_DEFINES = {GYP_DEFINES}') cr.Host.Execute( '{CR_SRC}/build/gyp_chromium', '--depth={CR_SRC}', '--check' )
Add post process stage for stray mixin calls
# -*- coding: utf8 -*- """ .. module:: lesscpy.plib.deferred :synopsis: Deferred mixin call. Copyright (c) See LICENSE for details. .. moduleauthor:: Jóhann T. Maríusson <[email protected]> """ from .node import Node class Deferred(Node): def __init__(self, mixin, args): """This node represents mixin calls within the body of other mixins. The calls to these mixins are deferred until the parent mixin is called. args: mixin (Mixin): Mixin object args (list): Call arguments """ self.mixin = mixin self.args = args def parse(self, scope): """ Parse function. args: scope (Scope): Current scope returns: mixed """ if self.args: args = [p.parse(scope) if hasattr(p, 'parse') else p for p in self.args] else: args = [] if hasattr(self.mixin, 'call'): return self.mixin.call(scope, args) mixins = scope.mixins(self.mixin.raw()) if not mixins: return self for mixin in mixins: res = mixin.call(scope, args) if res: return res return False
# -*- coding: utf8 -*- """ .. module:: lesscpy.plib.deferred :synopsis: Deferred mixin call. Copyright (c) See LICENSE for details. .. moduleauthor:: Jóhann T. Maríusson <[email protected]> """ from .node import Node class Deferred(Node): def __init__(self, mixin, args): """This node represents mixin calls within the body of other mixins. The calls to these mixins are deferred until the parent mixin is called. args: mixin (Mixin): Mixin object args (list): Call arguments """ self.mixin = mixin self.args = args def parse(self, scope): """ Parse function. args: scope (Scope): Current scope returns: mixed """ if self.args: args = [p.parse(scope) if hasattr(p, 'parse') else p for p in self.args] else: args = [] if hasattr(self.mixin, 'call'): return self.mixin.call(scope, args) mixins = scope.mixins(self.mixin.raw()) if not mixins: return self #return mixins for mixin in mixins: res = mixin.call(scope, args) if res: return res return self # def fmt(self, fills): # """ # """ # return ''
Set up login component to log the user in correctly, need to set up an alert to the user
import React, {Component} from 'react'; import loginUser from '../../utils/loginUser'; class Home extends Component { handleSubmit(event) { event.preventDefault(); loginUser({ email: document.getElementById('email').value, password: document.getElementById('password').value }); } render() { return ( <div> <form className='col s12 m12 l12' onSubmit={this.handleSubmit}> <div className='row'> <div className='input-field col s12 m12 l12'> <label htmlFor='email'>Email</label> <input id='email' ref='email' type='text' required/> </div> </div> <div className='row'> <div className='input-field col s12 m12 l12'> <label htmlFor='password'>Password</label> <input id='password' ref='password' type='password' required/> </div> </div> <button className='btn waves-effect waves-light' type='submit' name='login'> Login </button> </form> </div> ); } } export default Home;
import React, {Component} from 'react'; class Home extends Component { handleSubmit(event) { event.preventDefault(); signupUser({ email: document.getElementById('email').value, password: document.getElementById('password').value }); } render() { return ( <div> <form className='col s12 m12 l12' onSubmit={this.handleSubmit}> <div className='row'> <div className='input-field col s12 m12 l12'> <label htmlFor='email'>Email</label> <input id='email' ref='email' type='text' required/> </div> </div> <div className='row'> <div className='input-field col s12 m12 l12'> <label htmlFor='password'>Password</label> <input id='password' ref='password' type='password' required/> </div> </div> <button className='btn waves-effect waves-light' type='submit' name='login'> Login </button> </form> </div> ); } } export default Home;
Set locale on first request
<?php namespace AppBundle\EventListener; use Symfony\Component\HttpKernel\Event\GetResponseEvent; use Symfony\Component\HttpKernel\KernelEvents; use Symfony\Component\EventDispatcher\EventSubscriberInterface; class LocaleListener implements EventSubscriberInterface { /** * @var string */ private $defaultLocale; /** * @param string $defaultLocale */ public function __construct($defaultLocale = 'sv') { $this->defaultLocale = $defaultLocale; } /** * @param GetResponseEvent $event */ public function onKernelRequest(GetResponseEvent $event) { $request = $event->getRequest(); if ($locale = $request->get('_locale')) { $request->getSession()->set('_locale', $locale); $request->setLocale($locale); } else { // if no explicit locale has been set on this request, use one from the session $request->setLocale($request->getSession()->get('_locale', $this->defaultLocale)); } } /** * @return array */ public static function getSubscribedEvents() { return [ // must be registered before the default Locale listener KernelEvents::REQUEST => array(array('onKernelRequest', 17)), ]; } }
<?php namespace AppBundle\EventListener; use Symfony\Component\HttpKernel\Event\GetResponseEvent; use Symfony\Component\HttpKernel\KernelEvents; use Symfony\Component\EventDispatcher\EventSubscriberInterface; class LocaleListener implements EventSubscriberInterface { /** * @var string */ private $defaultLocale; /** * @param string $defaultLocale */ public function __construct($defaultLocale = 'sv') { $this->defaultLocale = $defaultLocale; } /** * @param GetResponseEvent $event */ public function onKernelRequest(GetResponseEvent $event) { $request = $event->getRequest(); if (!$request->hasPreviousSession()) { return; } if ($locale = $request->get('_locale')) { $request->getSession()->set('_locale', $locale); $request->setLocale($locale); } else { // if no explicit locale has been set on this request, use one from the session $request->setLocale($request->getSession()->get('_locale', $this->defaultLocale)); } } /** * @return array */ public static function getSubscribedEvents() { return [ // must be registered before the default Locale listener KernelEvents::REQUEST => array(array('onKernelRequest', 17)), ]; } }
Fix typo in variable name
## Copyright (C) 2011 Aldebaran Robotics """Small set of tools to interact with the user """ #TODO: color! def ask_choice(choices, input_text): """Ask the user to choose from a list of choices """ print "::", input_text for i, choice in enumerate(choices): print " ", (i+1), choice keep_asking = True res = None while keep_asking: answer = raw_input("> ") if not answer: return choices[0] try: index = int(answer) except ValueError: print "Please enter number" continue if index not in range(1, len(choices)+1): print "%i is out of range" % index continue res = choices[index-1] keep_asking = False return res def ask_yes_no(question): """Ask the user to answer by yes or no""" print "::", question, "(y/n)?" answer = raw_input("> ") return answer == "y" def ask_string(question, default=None): """Ask the user to enter something. Returns what the user entered """ if default: question += " (%s)" % default print "::", question answer = raw_input("> ") if not answer: return default return answer
## Copyright (C) 2011 Aldebaran Robotics """Small set of tools to interact with the user """ #TODO: color! def ask_choice(choices, input_text): """Ask the user to choose from a list of choices """ print "::", input_text for i, choice in enumerate(choices): print " ", (i+1), choice keep_asking = True res = None while keep_asking: answer = raw_input("> ") if not answer: return choices[0] try: index = int(answer) except ValueError: print "Please enter number" continue if index not in range(1, len(choices)+1): print "%i is out of range" % index continue res = choices[index-1] keep_asking = False return res def ask_yes_no(question): """Ask the user to answer by yes or no""" print "::", question, "(y/n)?" anwer = raw_input("> ") return anwer == "y" def ask_string(question, default=None): """Ask the user to enter something. Returns what the user entered """ if default: question += " (%s)" % default print "::", question answer = raw_input("> ") if not answer: return default return answer
Fix internal server error at url /account/recover Fixed a 500 error at /account/recover when trying to reset password on the login page. Testing Done: Verified that the server no longer returns a 500 error when loading the form. Reviewed at https://reviews.reviewboard.org/r/5431/
from __future__ import unicode_literals from django.conf.urls import patterns, url urlpatterns = patterns( "reviewboard.accounts.views", url(r'^register/$', 'account_register', {'next_url': 'dashboard'}, name="register"), url(r'^preferences/$', 'user_preferences', name="user-preferences"), ) urlpatterns += patterns( "django.contrib.auth.views", url(r'^login/$', 'login', {'template_name': 'accounts/login.html'}, name='login'), url(r'^logout/$', 'logout_then_login', name='logout'), url(r'^recover/$', 'password_reset', { 'template_name': 'accounts/password_reset.html', 'email_template_name': 'accounts/password_reset_email.txt' }, name='recover'), url(r'^recover/done/$', 'password_reset_done', {'template_name': 'accounts/password_reset_done.html'}, name='password_reset_done'), url(r'^reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$', 'password_reset_confirm', {'template_name': 'accounts/password_reset_confirm.html'}, name='password-reset-confirm'), url(r'^reset/done/$', 'password_reset_complete', {'template_name': 'accounts/password_reset_complete.html'}), )
from __future__ import unicode_literals from django.conf.urls import patterns, url urlpatterns = patterns( "reviewboard.accounts.views", url(r'^register/$', 'account_register', {'next_url': 'dashboard'}, name="register"), url(r'^preferences/$', 'user_preferences', name="user-preferences"), ) urlpatterns += patterns( "django.contrib.auth.views", url(r'^login/$', 'login', {'template_name': 'accounts/login.html'}, name='login'), url(r'^logout/$', 'logout_then_login', name='logout'), url(r'^recover/$', 'password_reset', { 'template_name': 'accounts/password_reset.html', 'email_template_name': 'accounts/password_reset_email.txt' }, name='recover'), url(r'^recover/done/$', 'password_reset_done', {'template_name': 'accounts/password_reset_done.html'}), url(r'^reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$', 'password_reset_confirm', {'template_name': 'accounts/password_reset_confirm.html'}, name='password-reset-confirm'), url(r'^reset/done/$', 'password_reset_complete', {'template_name': 'accounts/password_reset_complete.html'}), )
Work better with identified Ably client This avoids warnings when using an Ably "Realtime" client instance that uses "identified" authentication.
// TODO: // - end-to-end test // - extract update code, inject it as a function? function createAblyHandler(options) { var ably = options.ably var operations = options.operations var fetchOperation = options.fetchOperation return function (operation, variables, cacheConfig, observer) { var channelName, channel // POST the subscription like a normal query fetchOperation(operation, variables, cacheConfig).then(function(response) { channelName = response.headers.get("X-Subscription-ID") channel = ably.channels.get(channelName) // Register presence, so that we can detect empty channels and clean them up server-side if (ably.auth.clientId) { channel.presence.enter("subscribed") } else { channel.presence.enterClient("graphql-subscriber", "subscribed") } // When you get an update from ably, give it to Relay channel.subscribe("update", function(message) { // TODO Extract this code // When we get a response, send the update to `observer` var payload = message.data var result = payload.result if (result && result.errors) { // What kind of error stuff belongs here? observer.onError(result.errors) } else if (result) { observer.onNext({data: result.data}) } if (!payload.more) { // Subscription is finished observer.onCompleted() } }) }) return { dispose: function() { channel.presence.leaveClient() channel.unsubscribe() } } } } module.exports = createAblyHandler
// TODO: // - end-to-end test // - extract update code, inject it as a function? function createAblyHandler(options) { var ably = options.ably var operations = options.operations var fetchOperation = options.fetchOperation return function (operation, variables, cacheConfig, observer) { var channelName, channel // POST the subscription like a normal query fetchOperation(operation, variables, cacheConfig).then(function(response) { channelName = response.headers.get("X-Subscription-ID") channel = ably.channels.get(channelName) // Register presence, so that we can detect empty channels and clean them up server-side channel.presence.enterClient("graphql-subscriber", "subscribed") // When you get an update from ably, give it to Relay channel.subscribe("update", function(message) { // TODO Extract this code // When we get a response, send the update to `observer` var payload = message.data var result = payload.result if (result && result.errors) { // What kind of error stuff belongs here? observer.onError(result.errors) } else if (result) { observer.onNext({data: result.data}) } if (!payload.more) { // Subscription is finished observer.onCompleted() } }) }) return { dispose: function() { channel.presence.leaveClient() channel.unsubscribe() } } } } module.exports = createAblyHandler
Check whether user already exists before creating it.
var bcrypt = require('bcrypt'), db = require('./db'); module.exports = { /** * Insert user supplied in `user` propety. * `password` field required for bcrypt to work. * Checks that `username` is unique. */ insert: function(options, callback) { var users = db.coll('users'), data = options.user, password = options.password; bcrypt.hash(password, 10, function(err, hash) { if (err) { return callback(err); } data.hash = hash; data._id = data.username; module.exports.exists(data.username, function(err, exists) { if (err) { return callback(err); } if (exists) { callback("User with id '" + data.username + "' already exists."); } users.insert(data, function(err, users) { var user; if (err) { return callback(err); } user = users[0]; callback(null, user); }); }); }); }, /** * Whether a user with a given id exists */ exists: function(id, callback) { module.exports.getUser(id, function(err, user) { callback(err, !!user); }); }, /** * Gets the user with the given id (i.e. username) */ getUser: function(id, callback) { var users = db.coll('users'); users.findOne({ _id: id }, callback); }, /** * Callback called with null, true if database has no users. */ isEmpty: function(callback) { var users = db.coll('users'); users.count(function(err, count) { callback(err, count === 0); }); } };
var bcrypt = require('bcrypt'), db = require('./db'); module.exports = { insert: function(options, callback) { var users = db.coll('users'), data = options.user, password = options.password; bcrypt.hash(password, 10, function(err, hash) { if (err) { callback(err); } else { data.hash = hash; data._id = data.username; users.insert(data, function(err, users) { var user; if (err) { callback(err); } else { user = users[0]; callback(null, user); } }); } }); }, isEmpty: function(callback) { var users = db.coll('users'); users.count(function(err, count) { callback(err, count === 0); }); } };
Improve example to showcase forceLogin
import React, { Component } from 'react'; import TwitterLogin from 'react-twitter-auth/lib/react-twitter-auth-component.js'; class App extends Component { constructor() { super(); this.onFailed = this.onFailed.bind(this); this.onSuccess = this.onSuccess.bind(this); } onSuccess(response) { response.json().then(body => { alert(JSON.stringify(body)); }); } onFailed(error) { alert(error); } render() { const customHeader = {}; customHeader['Test'] = 'test-header'; return ( <div> <TwitterLogin loginUrl="http://localhost:4000/api/v1/auth/twitter" onFailure={this.onFailed} onSuccess={this.onSuccess} requestTokenUrl="http://localhost:4000/api/v1/auth/twitter/reverse" showIcon={true} customHeaders={customHeader} forceLogin={true}/> <TwitterLogin loginUrl="http://localhost:4000/api/v1/auth/twitter" onFailure={this.onFailed} onSuccess={this.onSuccess} requestTokenUrl="http://localhost:4000/api/v1/auth/twitter/reverse" showIcon={true} customHeaders={customHeader}> <b>Custom</b> Twitter <i>Login</i> content </TwitterLogin> </div> ); } } export default App;
import React, { Component } from 'react'; import TwitterLogin from 'react-twitter-auth/lib/react-twitter-auth-component.js'; class App extends Component { constructor() { super(); this.onFailed = this.onFailed.bind(this); this.onSuccess = this.onSuccess.bind(this); } onSuccess(response) { response.json().then(body => { alert(JSON.stringify(body)); }); } onFailed(error) { alert(error); } render() { const customHeader = {}; customHeader['Test'] = 'test-header'; return ( <div> <TwitterLogin loginUrl="http://localhost:4000/api/v1/auth/twitter" onFailure={this.onFailed} onSuccess={this.onSuccess} requestTokenUrl="http://localhost:4000/api/v1/auth/twitter/reverse" showIcon={true} customHeaders={customHeader} forceLogin={false}/> <TwitterLogin loginUrl="http://localhost:4000/api/v1/auth/twitter" onFailure={this.onFailed} onSuccess={this.onSuccess} requestTokenUrl="http://localhost:4000/api/v1/auth/twitter/reverse" showIcon={true} customHeaders={customHeader}> <b>Custom</b> Twitter <i>Login</i> content </TwitterLogin> </div> ); } } export default App;
Fix - Common list on press function
import React from 'react'; import { View, ScrollView, Image, TouchableHighlight, Text } from 'react-native'; import styles from './styles'; const nextImage = require('assets/next.png'); function onPress(data) { if (data && data.functionOnPress) { data.functionOnPress(data.url); } } function List(props) { return ( <ScrollView> { props.content.map((data, key) => ( <TouchableHighlight key={`link-${key}`} onPress={() => onPress(data)} activeOpacity={1} underlayColor="transparent" > <View key={key} style={props.bigSeparation ? [styles.container, styles.containerBigSeparation] : [styles.container]} > <View style={data.text ? styles.containerImageText : styles.containerOnlyImage}> {data.image && <Image style={styles.imageList} source={data.image} /> } {data.text && <Text style={styles.text}>{data.text}</Text> } </View> <Image style={styles.nextIcon} source={nextImage} /> </View> </TouchableHighlight> ))} </ScrollView> ); } List.propTypes = { content: React.PropTypes.array, bigSeparation: React.PropTypes.bool }; export default List;
import React from 'react'; import { View, ScrollView, Image, TouchableHighlight, Text } from 'react-native'; import styles from './styles'; const nextImage = require('assets/next.png'); function List(props) { return ( <ScrollView> { props.content.map((data, key) => ( <TouchableHighlight key={`link-${key}`} onPress={() => data.functionOnPress.bind(this, data.url)()} activeOpacity={1} underlayColor="transparent" > <View key={key} style={props.bigSeparation ? [styles.container, styles.containerBigSeparation] : [styles.container]} > <View style={data.text ? styles.containerImageText : styles.containerOnlyImage}> {data.image && <Image style={styles.imageList} source={data.image} /> } {data.text && <Text style={styles.text}>{data.text}</Text> } </View> <Image style={styles.nextIcon} source={nextImage} /> </View> </TouchableHighlight> ))} </ScrollView> ); } List.propTypes = { content: React.PropTypes.array, bigSeparation: React.PropTypes.bool }; export default List;
Set minCloudCover to zero if it's absent from scene query
export default (app) => { class SceneService { constructor($resource) { 'ngInject'; this.Scene = $resource( '/api/scenes/:id/', { id: '@properties.id' }, { query: { method: 'GET', cache: false }, get: { method: 'GET', cache: false } } ); } query(params = {}) { let validParams = Object.assign( params, {minCloudCover: params.minCloudCover ? params.minCloudCover : 0} ); return this.Scene.query(validParams).$promise; } getSceneBounds(scene) { let boundsGeoJson = L.geoJSON(); boundsGeoJson.addData(scene.dataFootprint); return boundsGeoJson.getBounds(); } } app.service('sceneService', SceneService); };
export default (app) => { class SceneService { constructor($resource) { 'ngInject'; this.Scene = $resource( '/api/scenes/:id/', { id: '@properties.id' }, { query: { method: 'GET', cache: false }, get: { method: 'GET', cache: false } } ); } query(params = {}) { return this.Scene.query(params).$promise; } getSceneBounds(scene) { let boundsGeoJson = L.geoJSON(); boundsGeoJson.addData(scene.dataFootprint); return boundsGeoJson.getBounds(); } } app.service('sceneService', SceneService); };
Allow to pass in OkHttpClient from application. + Interceptors must now be defined within the application!
package info.metadude.java.library.brockman; import com.squareup.moshi.Moshi; import com.squareup.okhttp.Interceptor; import com.squareup.okhttp.OkHttpClient; import info.metadude.java.library.brockman.adapters.StreamAdapter; import info.metadude.java.library.brockman.adapters.StreamTypeAdapter; import info.metadude.java.library.brockman.adapters.VideoSizeAdapter; import retrofit.MoshiConverterFactory; import retrofit.Retrofit; import java.util.List; public final class ApiModule { public static StreamsService provideStreamsService(final String baseUrl) { return provideStreamsService(baseUrl, null); } public static StreamsService provideStreamsService( final String baseUrl, final OkHttpClient okHttpClient) { return createRetrofit(baseUrl, okHttpClient) .create(StreamsService.class); } private static Retrofit createRetrofit(String baseUrl, OkHttpClient okHttpClient) { if (okHttpClient == null) { okHttpClient = new OkHttpClient(); } Moshi moshi = new Moshi.Builder() .add(new StreamTypeAdapter()) .add(new StreamAdapter()) .add(new VideoSizeAdapter()) .build(); return new Retrofit.Builder() .baseUrl(baseUrl) .addConverterFactory(MoshiConverterFactory.create(moshi)) .client(okHttpClient) .build(); } }
package info.metadude.java.library.brockman; import com.squareup.moshi.Moshi; import com.squareup.okhttp.Interceptor; import com.squareup.okhttp.OkHttpClient; import info.metadude.java.library.brockman.adapters.StreamAdapter; import info.metadude.java.library.brockman.adapters.StreamTypeAdapter; import info.metadude.java.library.brockman.adapters.VideoSizeAdapter; import retrofit.MoshiConverterFactory; import retrofit.Retrofit; import java.util.List; public final class ApiModule { public static StreamsService provideStreamsService(final String baseUrl) { return provideStreamsService(baseUrl, null); } public static StreamsService provideStreamsService( final String baseUrl, final List<Interceptor> httpClientInterceptors) { return createRetrofit(baseUrl, httpClientInterceptors) .create(StreamsService.class); } private static Retrofit createRetrofit(String baseUrl, List<Interceptor> httpClientInterceptors) { OkHttpClient httpClient = new OkHttpClient(); if (httpClientInterceptors != null) { httpClient.interceptors().addAll(httpClientInterceptors); } Moshi moshi = new Moshi.Builder() .add(new StreamTypeAdapter()) .add(new StreamAdapter()) .add(new VideoSizeAdapter()) .build(); return new Retrofit.Builder() .baseUrl(baseUrl) .addConverterFactory(MoshiConverterFactory.create(moshi)) .client(httpClient) .build(); } }
Fix camera entity picture 401
import Polymer from '../polymer'; import hass from '../util/home-assistant-js-instance'; const { moreInfoActions } = hass; const UPDATE_INTERVAL = 10000; // ms export default new Polymer({ is: 'ha-camera-card', properties: { stateObj: { type: Object, observer: 'updateCameraFeedSrc', }, cameraFeedSrc: { type: String, }, imageLoaded: { type: Boolean, value: true, }, /** * The z-depth of the card, from 0-5. */ elevation: { type: Number, value: 1, reflectToAttribute: true, }, }, listeners: { tap: 'cardTapped', }, attached() { this.timer = setInterval(() => this.updateCameraFeedSrc(this.stateObj), UPDATE_INTERVAL); }, detached() { clearInterval(this.timer); }, cardTapped() { this.async(() => moreInfoActions.selectEntity(this.stateObj.entityId), 1); }, updateCameraFeedSrc(stateObj) { const attr = stateObj.attributes; const time = (new Date()).getTime(); this.cameraFeedSrc = `${attr.entity_picture}&time=${time}`; }, imageLoadSuccess() { this.imageLoaded = true; }, imageLoadFail() { this.imageLoaded = false; }, });
import Polymer from '../polymer'; import hass from '../util/home-assistant-js-instance'; const { moreInfoActions } = hass; const UPDATE_INTERVAL = 10000; // ms export default new Polymer({ is: 'ha-camera-card', properties: { stateObj: { type: Object, observer: 'updateCameraFeedSrc', }, cameraFeedSrc: { type: String, }, imageLoaded: { type: Boolean, value: true, }, /** * The z-depth of the card, from 0-5. */ elevation: { type: Number, value: 1, reflectToAttribute: true, }, }, listeners: { tap: 'cardTapped', }, attached() { this.timer = setInterval(() => this.updateCameraFeedSrc(this.stateObj), UPDATE_INTERVAL); }, detached() { clearInterval(this.timer); }, cardTapped() { this.async(() => moreInfoActions.selectEntity(this.stateObj.entityId), 1); }, updateCameraFeedSrc(stateObj) { const attr = stateObj.attributes; const time = (new Date()).getTime(); this.cameraFeedSrc = `${attr.entity_picture}?token=${attr.access_token}&time=${time}`; }, imageLoadSuccess() { this.imageLoaded = true; }, imageLoadFail() { this.imageLoaded = false; }, });
Split auth test into success and failure
package core.auth; import controllers.routes; import static org.junit.Assert.*; import org.junit.*; import java.util.*; import play.mvc.*; import play.libs.*; import play.test.*; import static play.test.Helpers.*; import com.avaje.ebean.Ebean; import com.google.common.collect.ImmutableMap; import java.util.List; public class AuthenticationTest extends WithApplication { @Before public void setUp() throws Exception { start(fakeApplication(inMemoryDatabase(), fakeGlobal())); Ebean.save((List) Yaml.load("initial-data.yml")); } @Test public void testValidateSuccess() throws Exception { Result result = callAction( routes.ref.Application.authenticate(), fakeRequest().withFormUrlEncodedBody( ImmutableMap.of("username", "Karin", "password", "password") ) ); assertEquals(303, status(result)); assertEquals("Karin", session(result).get("username")); } @Test public void testValidateFailure() throws Exception { Result result = callAction( routes.ref.Application.authenticate(), fakeRequest().withFormUrlEncodedBody( ImmutableMap.of("username", "Karin", "password", "wrongpassword") ) ); assertEquals(400, status(result)); assertNull(session(result).get("username")); } }
package core.auth; import controllers.routes; import static org.junit.Assert.*; import org.junit.*; import java.util.*; import play.mvc.*; import play.libs.*; import play.test.*; import static play.test.Helpers.*; import com.avaje.ebean.Ebean; import com.google.common.collect.ImmutableMap; import java.util.List; public class AuthenticationTest extends WithApplication { @Before public void setUp() throws Exception { start(fakeApplication(inMemoryDatabase(), fakeGlobal())); Ebean.save((List) Yaml.load("initial-data.yml")); } @Test public void testValidate() throws Exception { Result result = callAction( routes.ref.Application.authenticate(), fakeRequest().withFormUrlEncodedBody( ImmutableMap.of("username", "Karin", "password", "password") ) ); assertEquals(303, status(result)); assertEquals("Karin", session(result).get("username")); result = callAction( routes.ref.Application.authenticate(), fakeRequest().withFormUrlEncodedBody( ImmutableMap.of("username", "Karin", "password", "wrongpassword") ) ); assertEquals(400, status(result)); assertNull(session(result).get("username")); } }
11804: Use async/await for ajax request
// Handles getting info about bulk media thresholds ELMO.Views.ExportCsvView = class ExportCsvView extends ELMO.Views.ApplicationView { get events() { return { 'click #response_csv_export_options_download_media': 'calculateMediaSize' }; } initialize(params) { $(".calculating-info").hide(); $(".error-info").hide(); $(".media-info").hide(); } async calculateMediaSize(event) { if (($(event.target)).is(':checked')) { $("input[type=submit]").prop("disabled", true); await this.spaceLeft(); $(".media-info").show(); } else { $("input[type=submit]").removeAttr("disabled"); $(".media-info").hide(); $(".error-info").hide(); } } async spaceLeft() { $(".calculating-info").show(); return $.ajax({ url: ELMO.app.url_builder.build("media-size"), method: "get", data: "", success: (data) => { $(".calculating-info").hide(); $("#media-size").html(data.media_size + " MB"); if (data.space_on_disk) { $("input[type=submit]").removeAttr("disabled"); } else { $("#export-error").html(I18n.t("response.export_options.no_space")); $(".error-info").show(); } return data; }, error: (xhr, error) => { $("#export-error").html(I18n.t("response.export_options.try_again")); } }); } };
// Handles getting info about bulk media thresholds ELMO.Views.ExportCsvView = class ExportCsvView extends ELMO.Views.ApplicationView { get events() { return { 'click #response_csv_export_options_download_media': 'calculateMediaSize' }; } initialize(params) { $(".calculating-info").hide(); $(".error-info").hide(); $(".media-info").hide(); } calculateMediaSize(event) { if (($(event.target)).is(':checked')) { $("input[type=submit]").prop("disabled", true); this.spaceLeft(); $(".media-info").show(); } else { $("input[type=submit]").removeAttr("disabled"); $(".media-info").hide(); $(".error-info").hide(); } } spaceLeft() { $(".calculating-info").show(); $.ajax({ url: ELMO.app.url_builder.build("media-size"), method: "get", data: "", success: (data) => { $(".calculating-info").hide(); $("#media-size").html(data.media_size + " MB"); if (data.space_on_disk) { $("input[type=submit]").removeAttr("disabled"); } else { $("#export-error").html(I18n.t("response.export_options.no_space")); $(".error-info").show(); } return data; }, error: (xhr, error) => { $("#export-error").html(I18n.t("response.export_options.try_again")); } }); } };
Fix get_current_context typo in docstring
from threading import local _local = local() def get_current_context(silent=False): """Returns the current click context. This can be used as a way to access the current context object from anywhere. This is a more implicit alternative to the :func:`pass_context` decorator. This function is primarily useful for helpers such as :func:`echo` which might be interested in changing its behavior based on the current context. To push the current context, :meth:`Context.scope` can be used. .. versionadded:: 5.0 :param silent: if set to `True` the return value is `None` if no context is available. The default behavior is to raise a :exc:`RuntimeError`. """ try: return getattr(_local, 'stack')[-1] except (AttributeError, IndexError): if not silent: raise RuntimeError('There is no active click context.') def push_context(ctx): """Pushes a new context to the current stack.""" _local.__dict__.setdefault('stack', []).append(ctx) def pop_context(): """Removes the top level from the stack.""" _local.stack.pop() def resolve_color_default(color=None): """"Internal helper to get the default value of the color flag. If a value is passed it's returned unchanged, otherwise it's looked up from the current context. """ if color is not None: return color ctx = get_current_context(silent=True) if ctx is not None: return ctx.color
from threading import local _local = local() def get_current_context(silent=False): """Returns the current click context. This can be used as a way to access the current context object from anywhere. This is a more implicit alternative to the :func:`pass_context` decorator. This function is primarily useful for helpers such as :func:`echo` which might be interested in changing its behavior based on the current context. To push the current context, :meth:`Context.scope` can be used. .. versionadded:: 5.0 :param silent: is set to `True` the return value is `None` if no context is available. The default behavior is to raise a :exc:`RuntimeError`. """ try: return getattr(_local, 'stack')[-1] except (AttributeError, IndexError): if not silent: raise RuntimeError('There is no active click context.') def push_context(ctx): """Pushes a new context to the current stack.""" _local.__dict__.setdefault('stack', []).append(ctx) def pop_context(): """Removes the top level from the stack.""" _local.stack.pop() def resolve_color_default(color=None): """"Internal helper to get the default value of the color flag. If a value is passed it's returned unchanged, otherwise it's looked up from the current context. """ if color is not None: return color ctx = get_current_context(silent=True) if ctx is not None: return ctx.color
CRM-2894: Modify sync email body. Sync at once.
<?php namespace Oro\Bundle\EmailBundle\Command; use Symfony\Bundle\FrameworkBundle\Command\ContainerAwareCommand; use Symfony\Component\Console\Input\InputInterface; use Symfony\Component\Console\Input\InputOption; use Symfony\Component\Console\Output\OutputInterface; use Oro\Bundle\EmailBundle\Cache\EmailCacheManager; use Oro\Bundle\EmailBundle\Exception\LoadEmailBodyException; use Oro\Component\Log\OutputLogger; class EmailBodySyncCommand extends ContainerAwareCommand { /** * {@internaldoc} */ protected function configure() { $this ->setName('oro:email:body-sync') ->setDescription('Synchronization email body') ->addOption( 'id', null, InputOption::VALUE_REQUIRED, 'The identifier of email to be synchronized.' ); } /** * {@internaldoc} */ protected function execute(InputInterface $input, OutputInterface $output) { $logger = new OutputLogger($output); /** @var EmailCacheManager $emailCacheManager */ $emailCacheManager = $this->getContainer()->get('oro_email.email.cache.manager'); $emailId = $input->getOption('id'); $email = $this->getContainer()->get("doctrine")->getRepository('OroEmailBundle:Email')->find($emailId); if ($email) { try { $emailCacheManager->ensureEmailBodyCached($email); $output->writeln(sprintf('<info>Email body synced for email - %s</info>', $email->getId())); } catch (LoadEmailBodyException $e) { $warn = sprintf('Email body cannot be loaded for email - %s', $email->getId()); $output->writeln('<info>' . $warn . '</info>'); $logger->warning($warn); } } } }
<?php namespace Oro\Bundle\EmailBundle\Command; use Symfony\Bundle\FrameworkBundle\Command\ContainerAwareCommand; use Symfony\Component\Console\Input\InputInterface; use Symfony\Component\Console\Input\InputOption; use Symfony\Component\Console\Output\OutputInterface; use Oro\Bundle\EmailBundle\Cache\EmailCacheManager; use Oro\Bundle\EmailBundle\Exception\LoadEmailBodyException; class EmailBodySyncCommand extends ContainerAwareCommand { /** * {@internaldoc} */ protected function configure() { $this ->setName('oro:email:body-sync') ->setDescription('Synchronization email body') ->addOption( 'id', null, InputOption::VALUE_REQUIRED, 'The identifier of email to be synchronized.' ); } /** * {@internaldoc} */ protected function execute(InputInterface $input, OutputInterface $output) { /** @var EmailCacheManager $emailCacheManager */ $emailCacheManager = $this->getContainer()->get('oro_email.email.cache.manager'); $emailId = $input->getOption('id'); $email = $this->getContainer()->get("doctrine")->getRepository('OroEmailBundle:Email')->find($emailId); if ($email) { try { $emailCacheManager->ensureEmailBodyCached($email); } catch (LoadEmailBodyException $e) { // log } } else { // s } } }
Add delay to field focus to ensure it will work
import React, {Component} from 'react' import Icon from 'react-fa' import {EditorState, Editor, ContentState} from 'draft-js' export class Input extends Component{ state = {editorState: EditorState.createWithContent(ContentState.createFromText(''))} componentDidMount() { if (this.props.isFirst) { setTimeout(() => {this.refs.editor.focus()}, 1) } } onChange(editorState) { this.props.onChange(editorState.getCurrentContent().getPlainText('')) return this.setState({editorState}) } hasValidContent() { const content = this.state.editorState.getCurrentContent().getPlainText('') return !_.isEmpty(content) && _.isEmpty(this.props.errors) } render () { const {name, errors} = this.props return ( <div className='input'> <div className='row'> <div className='col-md-7'><div className='name'>{name}</div></div> <div className='col-md-5'> <div className='editor'> <Editor ref='editor' editorState={this.state.editorState} onChange={this.onChange.bind(this)} handleReturn={() => true} /> {!_.isEmpty(errors) && <div className='error-alert'><Icon name='warning' /></div>} </div> </div> </div> </div> ) } }
import React, {Component} from 'react' import Icon from 'react-fa' import {EditorState, Editor, ContentState} from 'draft-js' export class Input extends Component{ state = {editorState: EditorState.createWithContent(ContentState.createFromText(''))} componentDidMount() { if (this.props.isFirst) { // this.refs.editor.focus() window.thiswillwork = this.refs.editor } } onChange(editorState) { this.props.onChange(editorState.getCurrentContent().getPlainText('')) return this.setState({editorState}) } hasValidContent() { const content = this.state.editorState.getCurrentContent().getPlainText('') return !_.isEmpty(content) && _.isEmpty(this.props.errors) } render () { const {name, errors} = this.props return ( <div className='input'> <div className='row'> <div className='col-md-7'><div className='name'>{name}</div></div> <div className='col-md-5'> <div className='editor'> <Editor ref='editor' editorState={this.state.editorState} onChange={this.onChange.bind(this)} handleReturn={() => true} /> {!_.isEmpty(errors) && <div className='error-alert'><Icon name='warning' /></div>} </div> </div> </div> </div> ) } }
Fix retro compatibility in dynamodb model
const smash = require("../../smash.js"); const Console = require("./console.js"); const logger = smash.logger("DynamodbModel"); const DYNAMODB_TABLE_SUFFIX = "dynamodb.tableSuffix"; const UNDERSCORE = "_"; class DynamodbModel extends Console {// FIX ME remove extends console in next major, only for compatibility constructor(table, env = null) { super(); if (this.constructor === DynamodbModel) { throw new Error("DynamodbModel is an abstract class, you must extend it from another class like class Foobar extends DynamodbModel {}"); } if (typeof table !== 'string' || table.length === 0) { throw new Error("First parameter of constructor(table) must be a string, " + logger.typeOf(table)); } this._table = table; this._env = env; this.namespace("Table " + this.table); } set env(env) { this._env = env; return this; } get env() { if (!this._env) { this._env = smash.getEnv("ENV"); if (!this._env) { logger.info("Missing ENV variable in environment"); } } return this._env; } get table() { const env = this.env; const tables = smash.config.get(DYNAMODB_TABLE_SUFFIX); if (env && tables && tables[env]) { return this._table + tables[env]; } else if (env) { return this._table + UNDERSCORE + env; } else { return this._table; } } } module.exports = DynamodbModel;
const smash = require("../../smash.js"); const logger = smash.logger("DynamodbModel"); const DYNAMODB_TABLE_SUFFIX = "dynamodb.tableSuffix"; const UNDERSCORE = "_"; class DynamodbModel { constructor(table, env = null) { if (this.constructor === DynamodbModel) { throw new Error("DynamodbModel is an abstract class, you must extend it from another class like class Foobar extends DynamodbModel {}"); } if (typeof table !== 'string' || table.length === 0) { throw new Error("First parameter of constructor(table) must be a string, " + logger.typeOf(table)); } this._table = table; this._env = env; } set env(env) { this._env = env; return this; } get env() { if (!this._env) { this._env = smash.getEnv("ENV"); if (!this._env) { logger.info("Missing ENV variable in environment"); } } return this._env; } get table() { const env = this.env; const tables = smash.config.get(DYNAMODB_TABLE_SUFFIX); if (env && tables && tables[env]) { return this._table + tables[env]; } else if (env) { return this._table + UNDERSCORE + env; } else { return this._table; } } } module.exports = DynamodbModel;
Add todo for no expectations defined case
package com.github.liucijus.jinsist.expectations; import com.github.liucijus.jinsist.matchers.Arguments; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; public class OrderedExpectations implements Expectations { private List<Expectation> expectations = new ArrayList<>(); private boolean wasUnexpectedInvocation = false; @Override public <ReturnType, MockType> void recordStub( Class<MockType> classToMock, MockType instance, Method method, Arguments arguments, ReturnType result ) { ExpectedInvocation<MockType> invocation = new ExpectedInvocation<>(classToMock, instance, method, arguments); expectations.add(new Expectation<>(invocation, result)); } @Override public <MockType> Object execute( Class<MockType> classToMock, MockType instance, Method method, Object[] arguments ) { // todo handle unexpected call when expectations are empty Expectation expectation = expectations.remove(0); Invocation<MockType> invocation = new Invocation<>(classToMock, instance, method, arguments); verifyExpectationMatchesInvocation(expectation, invocation); return expectation.getResult(); } private <MockType> void verifyExpectationMatchesInvocation( Expectation expectation, Invocation<MockType> invocation ) { if (!expectation.isFor(invocation)) { wasUnexpectedInvocation = true; throw new UnexpectedInvocation(expectation, invocation); } } @Override public void verify() { if (!expectations.isEmpty() || wasUnexpectedInvocation) { throw new UnmetExpectations(); } } }
package com.github.liucijus.jinsist.expectations; import com.github.liucijus.jinsist.matchers.Arguments; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; public class OrderedExpectations implements Expectations { private List<Expectation> expectations = new ArrayList<>(); private boolean wasUnexpectedInvocation = false; @Override public <ReturnType, MockType> void recordStub( Class<MockType> classToMock, MockType instance, Method method, Arguments arguments, ReturnType result ) { ExpectedInvocation<MockType> invocation = new ExpectedInvocation<>(classToMock, instance, method, arguments); expectations.add(new Expectation<>(invocation, result)); } @Override public <MockType> Object execute( Class<MockType> classToMock, MockType instance, Method method, Object[] arguments ) { Expectation expectation = expectations.remove(0); Invocation<MockType> invocation = new Invocation<>(classToMock, instance, method, arguments); verifyExpectationMatchesInvocation(expectation, invocation); return expectation.getResult(); } private <MockType> void verifyExpectationMatchesInvocation( Expectation expectation, Invocation<MockType> invocation ) { if (!expectation.isFor(invocation)) { wasUnexpectedInvocation = true; throw new UnexpectedInvocation(expectation, invocation); } } @Override public void verify() { if (!expectations.isEmpty() || wasUnexpectedInvocation) { throw new UnmetExpectations(); } } }
Fix for client mock api (specify page_context by documentation)
<?php namespace OpsWay\ZohoBooks\Tests\Api; use OpsWay\ZohoBooks\Api\BaseApi; use OpsWay\ZohoBooks\Client; use PHPUnit\Framework\TestCase; class BaseApiTest extends TestCase { const ORG_ID = 1; /** * @var \PHPUnit_Framework_MockObject_MockObject */ private $client; /** * @var BaseApi */ private $baseApi; public function setUp() { $this->client = $this->createMock(Client::class); $this->baseApi = new BaseApi($this->client, self::ORG_ID); } public function testGetList() { $filter = ['test' => '123']; $this->client->expects($this->once()) ->method('getList') ->with('', self::ORG_ID, $filter) ->willReturn([ 's' => [ // List of item ['id' => 1], ['id' => 2], ['id' => 3] ], 'page_context' => [ 'page' => 1, ] ]); $list = $this->baseApi->getList($filter); // Test iterable list $this->assertTrue(is_array($list) || $list instanceof \Traversable); // Test foreach list foreach ($list as $key => $item) { $this->assertEquals($key+1, $item['id']); } // Test directly access list by key $this->assertEquals(1, $list[0]['id']); } }
<?php namespace OpsWay\ZohoBooks\Tests\Api; use OpsWay\ZohoBooks\Api\BaseApi; use OpsWay\ZohoBooks\Client; use PHPUnit\Framework\TestCase; class BaseApiTest extends TestCase { const ORG_ID = 1; /** * @var \PHPUnit_Framework_MockObject_MockObject */ private $client; /** * @var BaseApi */ private $baseApi; public function setUp() { $this->client = $this->createMock(Client::class); $this->baseApi = new BaseApi($this->client, self::ORG_ID); } public function testGetList() { $filter = ['test' => '123']; $this->client->expects($this->once()) ->method('getList') ->with('', self::ORG_ID, $filter) ->willReturn([ 's' => [ // List of item ['id' => 1], ['id' => 2], ['id' => 3] ], ]); $list = $this->baseApi->getList($filter); // Test iterable list $this->assertTrue(is_array($list) || $list instanceof \Traversable); // Test foreach list foreach ($list as $key => $item) { $this->assertEquals($key+1, $item['id']); } // Test directly access list by key $this->assertEquals(1, $list[0]['id']); } }
Add whitespace to version spec?
# -*- coding: utf-8 -*- from os import path from setuptools import find_packages, setup README_rst = path.join(path.abspath(path.dirname(__file__)), 'README.rst') with open(README_rst, 'r') as f: long_description = f.read() setup( name="pyee", vcversioner={}, packages=find_packages(), setup_requires=[ 'pytest-runner', 'pytest-asyncio; python_version > "3.4"', 'vcversioner' ], tests_require=['twisted'], include_package_data=True, description="A port of node.js's EventEmitter to python.", long_description=long_description, author="Joshua Holbrook", author_email="[email protected]", url="https://github.com/jfhbrook/pyee", keywords=[ "events", "emitter", "node.js", "node", "eventemitter", "event_emitter" ], classifiers=[ "Programming Language :: Python", "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.5", "Topic :: Other/Nonlisted Topic" ] )
# -*- coding: utf-8 -*- from os import path from setuptools import find_packages, setup README_rst = path.join(path.abspath(path.dirname(__file__)), 'README.rst') with open(README_rst, 'r') as f: long_description = f.read() setup( name="pyee", vcversioner={}, packages=find_packages(), setup_requires=[ 'pytest-runner', 'pytest-asyncio;python_version>"3.4"', 'vcversioner' ], tests_require=['twisted'], include_package_data=True, description="A port of node.js's EventEmitter to python.", long_description=long_description, author="Joshua Holbrook", author_email="[email protected]", url="https://github.com/jfhbrook/pyee", keywords=[ "events", "emitter", "node.js", "node", "eventemitter", "event_emitter" ], classifiers=[ "Programming Language :: Python", "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.5", "Topic :: Other/Nonlisted Topic" ] )
Add a MAPPING table to remove duplicated loop logic
import logging from promgen.models import Project, Service logger = logging.getLogger(__name__) class SenderBase(object): MAPPING = [ ('project', Project), ('service', Service), ] def send(self, data): sent = 0 for alert in data['alerts']: for label, klass in self.MAPPING: logger.debug('Checking for %s', label) if label in alert['labels']: logger.debug('Checking for %s %s', label, klass) for obj in klass.objects.filter(name=alert['labels'][label]): for sender in obj.sender.filter(sender=self.__module__): logger.debug('Sending to %s', sender) if self._send(sender.value, alert, data): sent += 1 if sent == 0: logger.debug('No senders configured for project or service') return sent def test(self, target, alert): logger.debug('Sending test message to %s', target) self._send(target, alert, {'externalURL': ''})
import logging from promgen.models import Project, Service logger = logging.getLogger(__name__) class SenderBase(object): def send(self, data): sent = 0 for alert in data['alerts']: if 'project' in alert['labels']: logger.debug('Checking for projects') for project in Project.objects.filter(name=alert['labels']['project']): logger.debug('Checking %s', project) for sender in project.sender.all(): logger.debug('Sending to %s', sender) if self._send(sender.value, alert, data): sent += 1 if 'service' in alert['labels']: logger.debug('Checking for service') for service in Service.objects.filter(name=alert['labels']['service']): logger.debug('Checking %s', service) for sender in service.sender.all(): logger.debug('Sending to %s', sender) if self._send(sender.value, alert, data): sent += 1 if sent == 0: logger.debug('No senders configured for project or service %s', alert['labels']['project']) return sent def test(self, target, alert): logger.debug('Sending test message to %s', target) self._send(target, alert, {'externalURL': ''})
Remove hash & path on dev webpack css module names This will make bok-choy testing easier.
/* eslint-env node */ 'use strict'; var Merge = require('webpack-merge'); var path = require('path'); var webpack = require('webpack'); var commonConfig = require('./webpack.common.config.js'); module.exports = Merge.smart(commonConfig, { output: { filename: '[name].js' }, devtool: 'source-map', plugins: [ new webpack.LoaderOptionsPlugin({ debug: true }), new webpack.DefinePlugin({ 'process.env.NODE_ENV': JSON.stringify('development') }) ], module: { rules: [ { test: /(.scss|.css)$/, include: [ /studio-frontend/, /paragon/, /font-awesome/ ], use: [ 'style-loader', { loader: 'css-loader', options: { sourceMap: true, modules: true, localIdentName: '[name]__[local]' } }, { loader: 'sass-loader', options: { data: '$base-rem-size: 0.625; @import "paragon-reset";', includePaths: [ path.join(__dirname, './node_modules/@edx/paragon/src/utils'), path.join(__dirname, './node_modules/') ], sourceMap: true } } ] } ] }, watchOptions: { ignored: [/node_modules/, /\.git/] } });
/* eslint-env node */ 'use strict'; var Merge = require('webpack-merge'); var path = require('path'); var webpack = require('webpack'); var commonConfig = require('./webpack.common.config.js'); module.exports = Merge.smart(commonConfig, { output: { filename: '[name].js' }, devtool: 'source-map', plugins: [ new webpack.LoaderOptionsPlugin({ debug: true }), new webpack.DefinePlugin({ 'process.env.NODE_ENV': JSON.stringify('development') }) ], module: { rules: [ { test: /(.scss|.css)$/, include: [ /studio-frontend/, /paragon/, /font-awesome/ ], use: [ 'style-loader', { loader: 'css-loader', options: { sourceMap: true, modules: true, localIdentName: '[path][name]__[local]--[hash:base64:5]' } }, { loader: 'sass-loader', options: { data: '$base-rem-size: 0.625; @import "paragon-reset";', includePaths: [ path.join(__dirname, './node_modules/@edx/paragon/src/utils'), path.join(__dirname, './node_modules/') ], sourceMap: true } } ] } ] }, watchOptions: { ignored: [/node_modules/, /\.git/] } });
Fix wrong phpdoc type name
<?php /** * We made this code. * By pH7 (Pierre-Henry SORIA). */ namespace PFBC\Validation; use PH7\ExistsCoreModel; use PH7\Framework\Security\Ban\Ban; class BankAccount extends \PFBC\Validation { protected $sTable; /** * Constructor of class. * * @param string $sTable */ public function __construct($sTable = 'Affiliates') { parent::__construct(); $this->sTable = $sTable; } /** * @param string $sValue * * @return bool */ public function isValid($sValue) { if ($this->isNotApplicable($sValue) || $this->oValidate->email($sValue)) { if (!Ban::isBankAccount($sValue)) { if (!(new ExistsCoreModel)->bankAccount($sValue, $this->sTable)) { return true; } else { $this->message = t('Error: Another account with the same bank account already exists. Please choose another one.'); } } else { $this->message = t('Error: This bank account is not supported by our payment system.'); } } else { $this->message = t('Error: Your bank account is incorrect.'); } return false; } }
<?php /** * We made this code. * By pH7 (Pierre-Henry SORIA). */ namespace PFBC\Validation; use PH7\ExistsCoreModel; use PH7\Framework\Security\Ban\Ban; class BankAccount extends \PFBC\Validation { protected $sTable; /** * Constructor of class. * * @param $sTable Default 'Affiliates' */ public function __construct($sTable = 'Affiliates') { parent::__construct(); $this->sTable = $sTable; } /** * @param string $sValue * @return boolean */ public function isValid($sValue) { if ($this->isNotApplicable($sValue) || $this->oValidate->email($sValue)) { if (!Ban::isBankAccount($sValue)) { if (!(new ExistsCoreModel)->bankAccount($sValue, $this->sTable)) { return true; } else { $this->message = t('Error: Another account with the same bank account already exists. Please choose another one.'); } } else { $this->message = t('Error: This bank account is not supported by our payment system.'); } } else { $this->message = t('Error: Your bank account is incorrect.'); } return false; } }
:art: Fix mixins-before-declaration rule to work with latest gonzales
'use strict'; var helpers = require('../helpers'); module.exports = { 'name': 'mixins-before-declarations', 'defaults': { 'exclude': [ 'breakpoint', 'mq' ] }, 'detect': function (ast, parser) { var result = [], error; ast.traverseByType('include', function (node, i, parent) { var depth = 0, declarationCount = [depth]; parent.forEach(function (item) { if (item.is('ruleset')) { depth++; declarationCount[depth] = 0; } else if (item.is('declaration')) { if (item.first().is('property')) { var prop = item.first(); if (prop.first().is('ident')) { declarationCount[depth]++; } } } else if (item.is('include')) { item.forEach('ident', function (name) { if (parser.options.exclude.indexOf(name.content) === -1 && declarationCount[depth] > 0) { error = { 'ruleId': parser.rule.name, 'line': item.start.line, 'column': item.start.column, 'message': 'Mixins should come before declarations', 'severity': parser.severity }; result = helpers.addUnique(result, error); } }); } }); }); return result; } };
'use strict'; var helpers = require('../helpers'); module.exports = { 'name': 'mixins-before-declarations', 'defaults': { 'exclude': [ 'breakpoint', 'mq' ] }, 'detect': function (ast, parser) { var result = [], error; ast.traverseByType('include', function (node, i, parent) { var depth = 0, declarationCount = [depth]; parent.forEach( function (item) { if (item.type === 'ruleset') { depth++; declarationCount[depth] = 0; } else if (item.type === 'declaration') { if (item.first().is('property')) { var prop = item.first(); if (prop.first().is('ident')) { declarationCount[depth]++; } } } else if (item.type === 'include') { item.forEach('simpleSelector', function (name) { if (parser.options.exclude.indexOf(name.content[0].content) === -1 && declarationCount[depth] > 0) { error = { 'ruleId': parser.rule.name, 'line': item.start.line, 'column': item.start.column, 'message': 'Mixins should come before declarations', 'severity': parser.severity }; result = helpers.addUnique(result, error); } }); } }); }); return result; } };
Improve regexp to match result from babel Babel does convert commonjs require calls from var bar = require('bar'); bar.foo('test'); into _bar2.default.foo('test') therefore this regexp need to consider _ and 2.default as optional
<?php class Kwf_Assets_Util_Trl { //returns replacement used for js trl strings //used by Kwf_Assets_Dependency_File_Js and Kwf_Assets_CommonJs_Underscore_TemplateDependency public static function getJsReplacement($trlElement) { $b = $trlElement['before']; $fn = substr($b, 0, strpos($b, '(')); $key = $trlElement['type'].'.'.$trlElement['source']; if (isset($trlElement['context'])) $key .= '.'.$trlElement['context']; $key .= '.'.str_replace("'", "\\'", $trlElement['text']); $replace = ''; if (preg_match('#^(_?([a-z]+(2\.default)?\.))trl#i', $b, $m)) { $replace = substr($b, 0, strlen($m[1])); } if ($trlElement['type'] == 'trlp' || $trlElement['type'] == 'trlcp') { $replace .= "_kwfTrlp"; } else { $replace .= "_kwfTrl"; } $replace .= "('$key', ".substr($b, strpos($b, '(')+1); unset($trlElement['before']); unset($trlElement['linenr']); unset($trlElement['error_short']); return array( 'before' => $b, 'replace' => $replace, 'trlElement' => (object)$trlElement ); } }
<?php class Kwf_Assets_Util_Trl { //returns replacement used for js trl strings //used by Kwf_Assets_Dependency_File_Js and Kwf_Assets_CommonJs_Underscore_TemplateDependency public static function getJsReplacement($trlElement) { $b = $trlElement['before']; $fn = substr($b, 0, strpos($b, '(')); $key = $trlElement['type'].'.'.$trlElement['source']; if (isset($trlElement['context'])) $key .= '.'.$trlElement['context']; $key .= '.'.str_replace("'", "\\'", $trlElement['text']); $replace = ''; if (preg_match('#^([a-z]+\.)trl#i', $b, $m)) { $replace = substr($b, 0, strlen($m[1])); } if ($trlElement['type'] == 'trlp' || $trlElement['type'] == 'trlcp') { $replace .= "_kwfTrlp"; } else { $replace .= "_kwfTrl"; } $replace .= "('$key', ".substr($b, strpos($b, '(')+1); unset($trlElement['before']); unset($trlElement['linenr']); unset($trlElement['error_short']); return array( 'before' => $b, 'replace' => $replace, 'trlElement' => (object)$trlElement ); } }
Fix no labeled statement error
import React, { Component } from 'react' import PropTypes from 'prop-types' import TableRow from './TableRow/index.js' import Button from '../../Components/Button/index.js' export default class Table extends Component { static propTypes = { tableName: PropTypes.string, columnTypes: PropTypes.object.isRequired } state = { addedRows: [] } addRow = () => { const defaultValues = Object.keys(this.props.columnTypes).map(() => {return ""}) const newRow = <TableRow key={this.state.addedRows.length + 1} editing cellTypes={Object.values(this.props.columnTypes)} cellValues={defaultValues} /> this.setState((prevState) => ({ addedRows: prevState.addedRows.concat([newRow]) })) } render(){ if(this.props.readOnly){ return( <div> {this.props.children} </div> ) }else{ return( <div> <TableRow readOnly cellValues={Object.keys(this.props.columnTypes)} cellTypes={Object.values(this.props.columnTypes)} /> {this.props.children} {this.state.addedRows} <Button className="round" buttonText="+" onClick={this.addRow} /> </div> ) } } }
import React, { Component } from 'react' import PropTypes from 'prop-types' import TableRow from './TableRow/index.js' import Button from '../../Components/Button/index.js' export default class Table extends Component { static propTypes = { tableName: PropTypes.string, columnTypes: PropTypes.object.isRequired } state = { addedRows: [] } addRow = () => { const defaultValues = Object.keys(this.props.columnTypes).map(() => {return ""}) const newRow = <TableRow key={this.state.addedRows.length + 1} editing cellTypes={Object.values(this.props.columnTypes)} cellValues={defaultValues} /> this.setState((prevState) => { addedRows: prevState.addedRows.push(newRow) }) } render(){ if(this.props.readOnly){ return( <div> {this.props.children} </div> ) }else{ return( <div> <TableRow readOnly cellValues={Object.keys(this.props.columnTypes)} cellTypes={Object.values(this.props.columnTypes)} /> {this.props.children} {this.state.addedRows} <Button className="round" buttonText="+" onClick={this.addRow} /> </div> ) } } }
Add maxlengthIndicator to text area fields
import Ember from 'ember'; const formObject = Ember.Object.create({ "schema": { "type": "object", "properties": { "activity": { "type": "string", "title": "What were you doing yesterday at 10am/7pm?", "maxLength": 75 }, "location": { "type": "string", "title": "Where were you?", "maxLength": 75 }, "peoplePresent": { "type": "string", "title": "Who else was present? (If you were alone, please write \"alone\").", "maxLength": 75 } } }, "options": { "form": { "buttons": { "submit": { "title": "Continue", "styles": "btn btn-primary" } } }, "fields": { "activity": { "type": "textarea", "constrainMaxLength": true, "showMaxLengthIndicator": true }, "location": { "type": "textarea", "constrainMaxLength": true, "showMaxLengthIndicator": true }, "peoplePresent": { "type": "textarea", "constrainMaxLength": true, "showMaxLengthIndicator": true } } } }); const formActions = { submit: function() { var data = this.getValue(); console.log(data); //TODO: POST data } }; export default Ember.Component.extend({ formSchema: formObject, formActions: formActions });
import Ember from 'ember'; const formObject = { "schema": { "type": "object", "properties": { "activity": { "type": "string", "title": "What were you doing yesterday at 10am/7pm?", "maxLength": 75 }, "location": { "type": "string", "title": "Where were you?", "maxLength": 75 }, "peoplePresent": { "type": "string", "title": "Who else was present? (If you were alone, please write \"alone\").", "maxLength": 75 } } }, "options": { "form": { "buttons": { "submit": { "title": "Continue", "styles": "btn btn-primary" } } }, "fields": { "activity": { "type": "textarea" }, "location": { "type": "textarea" }, "peoplePresent": { "type": "textarea" } } } }; const formActions = { submit: function() { var data = this.getValue(); console.log(data); //TODO: POST data } }; export default Ember.Component.extend({ formSchema: formObject, formActions: formActions });
Upgrade for improved sequencing mechanism
'use strict'; module.exports = { requestProvider: { parent: 'danf:dependencyInjection.contextProvider', properties: { interface: 'danf:http.request' } }, responseProvider: { parent: 'danf:dependencyInjection.contextProvider', properties: { interface: 'danf:http.response' } }, errorHandler: { class: '%danf:http.classes.errorHandler%', properties: { debug: '%danf:context.debug%' } }, sessionHandler: { class: '%danf:http.classes.sessionHandler%', properties: { requestProvider: '#danf:http.requestProvider#', sequencerProvider: '#danf:event.currentSequencerProvider#' } }, notifier: { parent: 'danf:event.notifier', children: { request: { class: '%danf:http.classes.notifier.request%', properties: { app: '#danf:app#', renderer: '#danf:rendering.renderer#', errorHandler: '#danf:http.errorHandler#', requestProvider: '#danf:http.requestProvider#', responseProvider: '#danf:http.responseProvider#' } } } } };
'use strict'; module.exports = { requestProvider: { parent: 'danf:dependencyInjection.contextProvider', properties: { interface: 'danf:http.request' } }, responseProvider: { parent: 'danf:dependencyInjection.contextProvider', properties: { interface: 'danf:http.response' } }, errorHandler: { class: '%danf:http.classes.errorHandler%', properties: { debug: '%danf:context.debug%' } }, sessionHandler: { class: '%danf:http.classes.sessionHandler%', properties: { requestProvider: '#danf:http.requestProvider#', sequencerProvider: '#danf:event.sequencerProvider#' } }, notifier: { parent: 'danf:event.notifier', children: { request: { class: '%danf:http.classes.notifier.request%', properties: { app: '#danf:app#', renderer: '#danf:rendering.renderer#', errorHandler: '#danf:http.errorHandler#', requestProvider: '#danf:http.requestProvider#', responseProvider: '#danf:http.responseProvider#' } } } } };
Clean up the definition loader
<?php namespace Watson\Industrie; use RecursiveDirectoryIterator; use RecursiveIteratorIterator; class DefinitionLoader { /** * Locations to search for model definitions. * * @var array */ protected $directories = [ 'app/spec/factories', 'app/tests/factories', 'spec/factories', 'tests/factories' ]; /** * Find the directory that contains the factory definitions. * * @return mixed */ public function getDefinitionDirectory() { foreach ($this->directories as $directory) { if (is_dir(base_path() . "/{$directory}")) { return base_path(). "/{$directory}"; } } throw new FactoryDirectoryNotFoundException; } /** * Get a recursive iterator for all files in the given directory. * * @param string $directory * @return RecursiveIteratorIterator */ public function getDirectoryIterator($directory) { return new RecursiveIteratorIterator( new RecursiveDirectoryIterator($directory) ); } /** * Collect all the files from the factory definitions directory. * * @return array */ public function getDefinitionFiles() { $directory = $this->getDefinitionDirectory(); $filenames = []; foreach ($this->getDirectoryIterator($directory) as $file) { if ($file->isFile()) { $filenames[] = $file->getRealPath(); } } return $filenames; } /** * Load all the definition files. * * @return void */ public function loadDefinitions() { foreach ($this->getDefinitionFiles() as $file) { require $file; } } }
<?php namespace Watson\Industrie; use RecursiveDirectoryIterator; use RecursiveIteratorIterator; class DefinitionLoader { /** * Locations to search for model definitions. * * @var array */ protected $directories = [ 'app/spec/factories', 'app/tests/factories', 'spec/factories', 'tests/factories' ]; /** * Find the directory that contains the factory definitions. * * @return mixed */ public function getFactoryDirectory() { foreach ($this->directories as $directory) { if (is_dir(base_path() . "/{$directory}")) { return base_path(). "/{$directory}"; } } throw new FactoryDirectoryNotFoundException; } /** * Collect all the files from the factory definitions directory. * * @return array */ public function getDefinitionFiles() { $directory = $this->getFactoryDirectory(); $filenames = []; foreach ($this->getDirectoryIterator($directory) as $file) { if ($file->isFile()) { $filenames[] = $file->getRealPath(); } } return $filenames; } public function getDirectoryIterator($directory) { return new RecursiveIteratorIterator( new RecursiveDirectoryIterator($directory) ); } /** * Load all the definition files. * * @return void */ public function loadDefinitions() { foreach ($this->getDefinitionFiles() as $file) { require $file; } } }
Fix missing cls variable and add some docstring info
# -*- coding: utf-8 -*- # Import Python Libs import shutil import subprocess import tempfile # Import Salt Testing libs from salttesting.helpers import ensure_in_syspath ensure_in_syspath('../../') # Import salt libs import integration class GitModuleTest(integration.ModuleCase): ''' Integration tests for the git module ''' @classmethod def setUpClass(cls): ''' Check if git is installed. If it isn't, skip everything in this class. ''' from salt.utils import which git = which('git') if not git: cls.skipTest('The git binary is not available') def setUp(self): self.repos = tempfile.mkdtemp(dir=integration.TMP) self.addCleanup(shutil.rmtree, self.repos, ignore_errors=True) subprocess.check_call(['git', 'init', '--quiet', self.repos]) def test_config_set_value_has_space_characters(self): ''' Tests the git.config_set function ''' config_key = "user.name" config_value = "foo bar" ret = self.run_function( 'git.config_set', cwd=self.repos, setting_name=config_key, setting_value=config_value, ) self.assertEqual("", ret) output = subprocess.check_output( ['git', 'config', '--local', config_key], cwd=self.repos) self.assertEqual(config_value + "\n", output)
# -*- coding: utf-8 -*- import shutil import subprocess import tempfile # Import Salt Testing libs from salttesting.helpers import ensure_in_syspath ensure_in_syspath('../../') # Import salt libs import integration class GitModuleTest(integration.ModuleCase): @classmethod def setUpClass(cls): from salt.utils import which git = which('git') if not git: self.skipTest('The git binary is not available') def setUp(self): self.repos = tempfile.mkdtemp(dir=integration.TMP) self.addCleanup(shutil.rmtree, self.repos, ignore_errors=True) subprocess.check_call(['git', 'init', '--quiet', self.repos]) def test_config_set_value_has_space_characters(self): ''' git.config_set ''' config_key = "user.name" config_value = "foo bar" ret = self.run_function( 'git.config_set', cwd=self.repos, setting_name=config_key, setting_value=config_value, ) self.assertEqual("", ret) output = subprocess.check_output( ['git', 'config', '--local', config_key], cwd=self.repos) self.assertEqual(config_value + "\n", output)
Fix team permissions backend not pulling out manager_permissions Something like request.user.has_perm('reviews.can_manage_%s' % proposal.kind.section.slug) Will aways return false as the backend does a lookup of team membership (member or manager) but only grabs the 'permissions' and not the 'manager_permissions' field
from django.db.models import Q from .models import Team class TeamPermissionsBackend(object): def authenticate(self, username=None, password=None): return None def get_team_permissions(self, user_obj, obj=None): """ Returns a set of permission strings that this user has through his/her team memberships. """ if user_obj.is_anonymous() or obj is not None: return set() if not hasattr(user_obj, "_team_perm_cache"): # Member permissions memberships = Team.objects.filter( Q(memberships__user=user_obj), Q(memberships__state="member"), ) perms = memberships.values_list( "permissions__content_type__app_label", "permissions__codename" ).order_by() permissions = ["%s.%s" % (ct, name) for ct, name in perms] # Manager permissions memberships = Team.objects.filter( Q(memberships__user=user_obj), Q(memberships__state="manager"), ) perms = memberships.values_list( "manager_permissions__content_type__app_label", "manager_permissions__codename" ).order_by() permissions += ["%s.%s" % (ct, name) for ct, name in perms] user_obj._team_perm_cache = set(permissions) return user_obj._team_perm_cache def has_perm(self, user_obj, perm, obj=None): if not user_obj.is_active: return False return perm in self.get_team_permissions(user_obj, obj)
from django.db.models import Q from .models import Team class TeamPermissionsBackend(object): def authenticate(self, username=None, password=None): return None def get_team_permissions(self, user_obj, obj=None): """ Returns a set of permission strings that this user has through his/her team memberships. """ if user_obj.is_anonymous() or obj is not None: return set() if not hasattr(user_obj, "_team_perm_cache"): memberships = Team.objects.filter( Q(memberships__user=user_obj), Q(memberships__state="manager") | Q(memberships__state="member"), ) perms = memberships.values_list( "permissions__content_type__app_label", "permissions__codename" ).order_by() user_obj._team_perm_cache = set(["%s.%s" % (ct, name) for ct, name in perms]) return user_obj._team_perm_cache def has_perm(self, user_obj, perm, obj=None): if not user_obj.is_active: return False return perm in self.get_team_permissions(user_obj, obj)
Fix broken locale copying mechanism
const fs = require('fs-extra'); const path = require('path'); const { createFilePath } = require('gatsby-source-filesystem'); exports.onCreateNode = ({ node, getNode, boundActionCreators }) => { const { createNodeField } = boundActionCreators; if (node.internal.type === 'MarkdownRemark') { const slug = createFilePath({ node, getNode, basePath: 'pages' }); createNodeField({ node, name: 'slug', value: slug, }); } }; exports.onPreBootstrap = () => { console.log('Copying locales'); // eslint-disable-line no-console fs.copySync(path.join(__dirname, '/src/locales'), path.join(__dirname, '/public/locales')); }; exports.createPages = ({ graphql, boundActionCreators }) => { const { createPage } = boundActionCreators; return new Promise((resolve) => { graphql(` { allMarkdownRemark { edges { node { fields { slug } } } } } `).then(({ data }) => { data.allMarkdownRemark.edges.forEach(({ node }) => { createPage({ path: node.fields.slug, component: path.resolve('./src/templates/blog-post.jsx'), context: { // Data passed to context is available in page queries as GraphQL variables slug: node.fields.slug, }, }); }); resolve(); }); }); };
const fs = require('fs-extra'); const path = require('path'); const { createFilePath } = require('gatsby-source-filesystem'); exports.onCreateNode = ({ node, getNode, boundActionCreators }) => { const { createNodeField } = boundActionCreators; if (node.internal.type === 'MarkdownRemark') { const slug = createFilePath({ node, getNode, basePath: 'pages' }); createNodeField({ node, name: 'slug', value: slug, }); } }; exports.onPostBootstrap = () => { console.log('Copying locales'); // eslint-disable-line no-console fs.copySync(path.join(__dirname, '/src/locales'), path.join(__dirname, '/public/locales')); }; exports.createPages = ({ graphql, boundActionCreators }) => { const { createPage } = boundActionCreators; return new Promise((resolve) => { graphql(` { allMarkdownRemark { edges { node { fields { slug } } } } } `).then(({ data }) => { data.allMarkdownRemark.edges.forEach(({ node }) => { createPage({ path: node.fields.slug, component: path.resolve('./src/templates/blog-post.jsx'), context: { // Data passed to context is available in page queries as GraphQL variables slug: node.fields.slug, }, }); }); resolve(); }); }); };
Restructure major projects for spring evals 👷
from flask import Blueprint from flask import render_template from flask import request spring_evals_bp = Blueprint('spring_evals_bp', __name__) @spring_evals_bp.route('/spring_evals/') def display_spring_evals(): # get user data user_name = request.headers.get('x-webauth-user') members = [ { 'name': "Liam Middlebrook", 'uid': 'loothelion', 'committee_meetings': 24, 'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}], 'major_projects': [ { 'name': "open container", 'status': "Passed", 'description': "Riding With A Flask" }], 'major_projects_len': 1, 'major_project_passed': True, 'social_events': "", 'comments': "please don't fail me", 'result': 'Pending' }, { 'name': "Julien Eid", 'uid': 'jeid', 'committee_meetings': 69, 'house_meetings_missed': [], 'major_projects': [ { 'name': "wii-u shit", 'status': "Failed", 'description': "Rot 3 Encryption" }], 'major_projects_len': 1, 'major_project_passed': False, 'social_events': "Manipulation and Opportunism", 'comments': "imdabes", 'result': 'Passed' } ] # return names in 'first last (username)' format return render_template('spring_evals.html', username = user_name, members = members)
from flask import Blueprint from flask import render_template from flask import request spring_evals_bp = Blueprint('spring_evals_bp', __name__) @spring_evals_bp.route('/spring_evals/') def display_spring_evals(): # get user data user_name = request.headers.get('x-webauth-user') members = [ { 'name': "Liam Middlebrook", 'uid': 'loothelion', 'committee_meetings': 24, 'house_meetings_missed': [{'date': "aprial fools fayas ads", 'reason': "I was playing videogames"}], 'major_project': 'open_container', 'major_project_passed': True, 'social_events': "", 'comments': "please don't fail me", 'result': 'Pending' }, { 'name': "Julien Eid", 'uid': 'jeid', 'committee_meetings': 69, 'house_meetings_missed': [], 'major_project': 'wii-u shit', 'major_project_passed': True, 'social_events': "Manipulation and Opportunism", 'comments': "imdabes", 'result': 'Passed' } ] # return names in 'first last (username)' format return render_template('spring_evals.html', username = user_name, members = members)
Set up loggers after the configuration file is loaded
# coding: utf-8 ################################################################### # Copyright (c) 2016-2020 European Synchrotron Radiation Facility # # # # Author: Marius Retegan # # # # This work is licensed under the terms of the MIT license. # # For further information, see https://github.com/mretegan/crispy # ################################################################### """This module is the entry point to the application.""" import logging import sys import warnings from PyQt5.QtCore import QLocale from PyQt5.QtWidgets import QApplication from crispy.config import Config from crispy.gui.main import MainWindow from crispy.loggers import setUpLoggers logger = logging.getLogger("crispy.main") warnings.filterwarnings("ignore", category=UserWarning) def main(): app = QApplication([]) # This must be done after the application is instantiated. locale = QLocale(QLocale.C) locale.setNumberOptions(QLocale.OmitGroupSeparator) QLocale.setDefault(locale) config = Config() config.removeOldFiles() settings = config.read() # Set default values if the config file is empty or was not created. if not settings.allKeys(): logger.debug("Loading default settings.") config.loadDefaults() setUpLoggers() logger.info("Starting the application.") window = MainWindow() window.show() logger.info("Ready.") sys.exit(app.exec_()) if __name__ == "__main__": main()
# coding: utf-8 ################################################################### # Copyright (c) 2016-2020 European Synchrotron Radiation Facility # # # # Author: Marius Retegan # # # # This work is licensed under the terms of the MIT license. # # For further information, see https://github.com/mretegan/crispy # ################################################################### """This module is the entry point to the application.""" import logging import sys import warnings from PyQt5.QtCore import QLocale from PyQt5.QtWidgets import QApplication from crispy.config import Config from crispy.gui.main import MainWindow from crispy.loggers import setUpLoggers logger = logging.getLogger("crispy.main") warnings.filterwarnings("ignore", category=UserWarning) def main(): setUpLoggers() app = QApplication([]) # This must be done after the application is instantiated. locale = QLocale(QLocale.C) locale.setNumberOptions(QLocale.OmitGroupSeparator) QLocale.setDefault(locale) config = Config() config.removeOldFiles() settings = config.read() # Set default values if the config file is empty or was not created. if not settings.allKeys(): logger.debug("Loading default settings.") config.loadDefaults() logger.info("Starting the application.") window = MainWindow() window.show() logger.info("Ready.") sys.exit(app.exec_()) if __name__ == "__main__": main()
Convert snapshot date to format complying with ISO 8601
var PHRAGILE = PHRAGILE || {}; (function (PHRAGILE) { /** * Same as Graph but also renders graph areas under the line. * ProgressGraph will be limited to dates <= today. * @param {Object[]} data * @param {string} cssID - Its CSS identifier (used as class or id) * @param {string} label - Description text for the graph which will show in the label that appears when hovering * @constructor */ PHRAGILE.ProgressGraph = function (data, cssID, label) { data = data.filter(function (d) { var $snapshotDate = $('#snapshot-date'), filterDate = $snapshotDate.length > 0 ? Date.parse($snapshotDate.text().replace(' ', 'T')) : new Date(); return d.day <= filterDate; }); PHRAGILE.Graph.call(this, data, cssID, label); this.addGraphArea = function () { this.plane.append('path') .datum(this.data) .attr('class', 'graph-area') .attr('d', d3.svg.area() .x(PHRAGILE.Helpers.xOfDay) .y0(PHRAGILE.coordinateSystem.getY()(0)) .y1(PHRAGILE.Helpers.yOfPoints)); } }; PHRAGILE.ProgressGraph.prototype = new PHRAGILE.Graph; PHRAGILE.ProgressGraph.prototype.render = function () { PHRAGILE.Graph.prototype.render.call(this); this.addGraphArea(); }; })(PHRAGILE);
var PHRAGILE = PHRAGILE || {}; (function (PHRAGILE) { /** * Same as Graph but also renders graph areas under the line. * ProgressGraph will be limited to dates <= today. * @param {Object[]} data * @param {string} cssID - Its CSS identifier (used as class or id) * @param {string} label - Description text for the graph which will show in the label that appears when hovering * @constructor */ PHRAGILE.ProgressGraph = function (data, cssID, label) { data = data.filter(function (d) { var $snapshotDate = $('#snapshot-date'), filterDate = $snapshotDate.length > 0 ? Date.parse($snapshotDate.text()) : new Date(); return d.day <= filterDate; }); PHRAGILE.Graph.call(this, data, cssID, label); this.addGraphArea = function () { this.plane.append('path') .datum(this.data) .attr('class', 'graph-area') .attr('d', d3.svg.area() .x(PHRAGILE.Helpers.xOfDay) .y0(PHRAGILE.coordinateSystem.getY()(0)) .y1(PHRAGILE.Helpers.yOfPoints)); } }; PHRAGILE.ProgressGraph.prototype = new PHRAGILE.Graph; PHRAGILE.ProgressGraph.prototype.render = function () { PHRAGILE.Graph.prototype.render.call(this); this.addGraphArea(); }; })(PHRAGILE);
Add new engagement to household only if one not created
export default (state = [], action) => { switch (action.type) { case "FETCH_HOUSEHOLDS_SUCCESS": { return action.households } case "ADD_HOUSEHOLD_SUCCESS": { return [...state, action.household] } case "CREATE_NOTE_SUCCESS": { return state.map( h => h.id === parseInt(action.note.household_id, 10) ? Object.assign({}, h, { notes: h.notes.concat(action.note) }) : h ) } case "ADD_MEAL_TO_HOUSEHOLD": { const newState = state.map(h => { if (h.id === action.householdId) { const updatedHousehold = Object.assign({}, h, { meal_ids: h.meal_ids.concat(action.mealId) }) return updatedHousehold } else { return h } }) return newState } case "CONVERT_LEAD_COMPLETE": { const newState = state.map( h => (h.id === action.client.id ? action.client : h) ) return newState } case "CREATE_ENGAGEMENT_SUCCESS": { return state.map(h => { if (h.id == action.engagement.household_id) { if (!h.engagement) { return Object.assign({}, h, { engagement: action.engagement }) } return h } else { return h } }) } default: { return state } } }
export default (state = [], action) => { switch (action.type) { case "FETCH_HOUSEHOLDS_SUCCESS": { return action.households } case "ADD_HOUSEHOLD_SUCCESS": { return [...state, action.household] } case "CREATE_NOTE_SUCCESS": { return state.map( h => h.id === parseInt(action.note.household_id, 10) ? Object.assign({}, h, { notes: h.notes.concat(action.note) }) : h ) } case "ADD_MEAL_TO_HOUSEHOLD": { const newState = state.map(h => { if (h.id === action.householdId) { const updatedHousehold = Object.assign({}, h, { meal_ids: h.meal_ids.concat(action.mealId) }) return updatedHousehold } else { return h } }) return newState } case "CONVERT_LEAD_COMPLETE": { const newState = state.map( h => (h.id === action.client.id ? action.client : h) ) return newState } case "CREATE_ENGAGEMENT_SUCCESS": { return state.map(h => { if (h.id == action.engagement.household_id) { return Object.assign({}, h, { engagement: action.engagement }) } else { return h } }) } default: { return state } } }
Use TextReporter when in cli mode.
<?php require_once('simpletest/unit_tester.php'); require_once('simpletest/reporter.php'); require_once(dirname(__FILE__).'/acceptance_test.php'); require_once(dirname(__FILE__).'/annotation_test.php'); require_once(dirname(__FILE__).'/constrained_annotation_test.php'); require_once(dirname(__FILE__).'/annotation_parser_test.php'); require_once(dirname(__FILE__).'/doc_comment_test.php'); class AllTests extends GroupTest { function __construct($title = false) { parent::__construct($title); $this->addTestClass('TestOfAnnotations'); $this->addTestClass('TestOfPerformanceFeatures'); $this->addTestClass('TestOfSupportingFeatures'); $this->addTestClass('TestOfAnnotation'); $this->addTestClass('TestOfConstrainedAnnotation'); $this->addTestClass('TestOfMatchers'); $this->addTestClass('TestOfAnnotationMatchers'); $this->addTestClass('TestOfDocComment'); } } $reporter = TextReporter::inCli() ? new TextReporter() : new HtmlReporter(); Addendum::setRawMode(false); $test = new AllTests('All tests in reflection mode'); $test->run($reporter); Addendum::setRawMode(true); $test = new AllTests('All tests in raw mode'); $test->run($reporter); ?>
<?php require_once('simpletest/unit_tester.php'); require_once('simpletest/reporter.php'); require_once(dirname(__FILE__).'/acceptance_test.php'); require_once(dirname(__FILE__).'/annotation_test.php'); require_once(dirname(__FILE__).'/constrained_annotation_test.php'); require_once(dirname(__FILE__).'/annotation_parser_test.php'); require_once(dirname(__FILE__).'/doc_comment_test.php'); class AllTests extends GroupTest { function __construct($title = false) { parent::__construct($title); $this->addTestClass('TestOfAnnotations'); $this->addTestClass('TestOfPerformanceFeatures'); $this->addTestClass('TestOfSupportingFeatures'); $this->addTestClass('TestOfAnnotation'); $this->addTestClass('TestOfConstrainedAnnotation'); $this->addTestClass('TestOfMatchers'); $this->addTestClass('TestOfAnnotationMatchers'); $this->addTestClass('TestOfDocComment'); } } Addendum::setRawMode(false); $test = new AllTests('All tests in reflection mode'); $test->run(new HtmlReporter()); Addendum::setRawMode(true); $test = new AllTests('All tests in raw mode'); $test->run(new HtmlReporter()); ?>
Set default database migration setting to 'safe'
/** * Default model configuration * (sails.config.models) * * Unless you override them, the following properties will be included * in each of your models. * * For more info on Sails models, see: * http://sailsjs.org/#/documentation/concepts/ORM */ module.exports.models = { /*************************************************************************** * * * Your app's default connection. i.e. the name of one of your app's * * connections (see `config/connections.js`) * * * ***************************************************************************/ // connection: 'localDiskDb', /*************************************************************************** * * * How and whether Sails will attempt to automatically rebuild the * * tables/collections/etc. in your schema. * * * * See http://sailsjs.org/#/documentation/concepts/ORM/model-settings.html * * * ***************************************************************************/ migrate: 'safe' };
/** * Default model configuration * (sails.config.models) * * Unless you override them, the following properties will be included * in each of your models. * * For more info on Sails models, see: * http://sailsjs.org/#/documentation/concepts/ORM */ module.exports.models = { /*************************************************************************** * * * Your app's default connection. i.e. the name of one of your app's * * connections (see `config/connections.js`) * * * ***************************************************************************/ // connection: 'localDiskDb', /*************************************************************************** * * * How and whether Sails will attempt to automatically rebuild the * * tables/collections/etc. in your schema. * * * * See http://sailsjs.org/#/documentation/concepts/ORM/model-settings.html * * * ***************************************************************************/ // migrate: 'alter' };
Switch to getting the API key from the URL instead of a config file. Allows other people to get their anki calendar if they want.
from __future__ import absolute_import from django.http import HttpResponse from django.views.generic.base import View from icalendar import Calendar, Event from wanikani.core import WaniKani, Radical, Kanji class WaniKaniView(View): def get(self, request, **kwargs): client = WaniKani(kwargs['api_key']) level = client.profile()['level'] queue = client.query(level, items=[Radical, Kanji], include=[u'apprentice']) cal = Calendar() cal.add('prodid', '-//Wanikani Blockers//github.com/kfdm/wanikani//') cal.add('version', '2.0') for ts in sorted(queue): if not len(queue[ts]): continue counts = { Radical: 0, Kanji: 0, } for obj in queue[ts]: counts[obj.__class__] += 1 event = Event() event.add('summary', 'R: {0} K: {1}'.format( counts[Radical], counts[Kanji] )) event.add('dtstart', ts) event.add('dtend', ts) event['uid'] = str(ts) cal.add_component(event) return HttpResponse( content=cal.to_ical(), content_type='text/plain; charset=utf-8' )
from __future__ import absolute_import import os import logging from django.http import HttpResponse from django.views.generic.base import View from icalendar import Calendar, Event from wanikani.core import WaniKani, Radical, Kanji CONFIG_PATH = os.path.join(os.path.expanduser('~'), '.wanikani') with open(CONFIG_PATH) as fp: API_KEY = fp.read() logger = logging.getLogger(__name__) class WaniKaniView(View): def get(self, request, *args, **kwargs): client = WaniKani(API_KEY) level = client.profile()['level'] queue = client.query(level, items=[Radical, Kanji], include=[u'apprentice']) cal = Calendar() cal.add('prodid', '-//My calendar product//mxm.dk//') cal.add('version', '2.0') for ts in sorted(queue): if not len(queue[ts]): continue counts = { Radical: 0, Kanji: 0, } for obj in queue[ts]: counts[obj.__class__] += 1 event = Event() event.add('summary', 'R: {0} K: {1}'.format( counts[Radical], counts[Kanji] )) event.add('dtstart', ts) event.add('dtend', ts) event['uid'] = str(ts) cal.add_component(event) return HttpResponse( content=cal.to_ical(), content_type='text/plain; charset=utf-8' )
Fix handling of errors that are not the Error
var domain = require('domain'); var _ = require('lodash'); module.exports = function (app, templateVarService, appUtil) { return { setup: function () { app.use(function (req, res) { templateVarService.setupLocals(req, res); res.status(404).render('not-found'); }); app.use(function (err, req, res, next) { templateVarService.setupLocals(req, res); if (err instanceof appUtil.ValidationError) { res.status(403).json(err.fieldNameToMessage); } else if (err) { console.error(err.stack); res.locals.error = err; if (req.url.indexOf('/api/') === 0) { if (_.isError(err)) { res.status(500).send(err.stack.toString()); } else { res.status(500).send(err); } } else { res.status(500).render('error'); } } }) } }; };
var domain = require('domain'); module.exports = function (app, templateVarService, appUtil) { return { setup: function () { app.use(function (req, res) { templateVarService.setupLocals(req, res); res.status(404).render('not-found'); }); app.use(function (err, req, res, next) { templateVarService.setupLocals(req, res); if (err instanceof appUtil.ValidationError) { res.status(403).json(err.fieldNameToMessage); } else if (err) { console.error(err.stack); res.locals.error = err; if (req.url.indexOf('/api/') === 0) { res.status(500).send(err.stack.toString()); } else { res.status(500).render('error'); } } }) } }; };
Trim pasted URL before submitting. Closes #14
function(context) { var app = $$(this).app; var url = $("#pasted_url").val(); if (url.length > 0) { idPos = url.indexOf("id="); if (idPos > -1) { url = url.substr(idPos + 3); } lastSlash = url.lastIndexOf("/"); if (lastSlash > -1) { url = url.substr(lastSlash + 1); } ampPos = url.indexOf("&"); if (ampPos > -1) { url = url.substr(0, ampPos); } guid = url.replace(/^\s+|\s+$/g,""); if (guid.length == 32) { app.db.openDoc(guid, { success: function(doc) { if (doc.type) { if (doc.type == 'Veteran') { window.open("vet_edit.html?vetid=" + doc._id, '_blank') } else if (doc.type == 'Guardian') { window.open("grd_edit.html?grdid=" + doc._id, '_blank') } else if (doc.type == 'Volunteer') { window.open("vol_edit.html?volid=" + doc._id, '_blank') } } } }); } else { alert('Invalid URL'); } } return false; }; //@ sourceURL=/finder/submit_pasted_url.js
function(context) { var app = $$(this).app; var url = $("#pasted_url").val(); if (url.length > 0) { idPos = url.indexOf("id="); if (idPos > -1) { url = url.substr(idPos + 3); } lastSlash = url.lastIndexOf("/"); if (lastSlash > -1) { url = url.substr(lastSlash + 1); } ampPos = url.indexOf("&"); if (ampPos > -1) { url = url.substr(0, ampPos); } if (url.length == 32) { app.db.openDoc(url, { success: function(doc) { if (doc.type) { if (doc.type == 'Veteran') { window.open("vet_edit.html?vetid=" + doc._id, '_blank') } else if (doc.type == 'Guardian') { window.open("grd_edit.html?grdid=" + doc._id, '_blank') } else if (doc.type == 'Volunteer') { window.open("vol_edit.html?volid=" + doc._id, '_blank') } } } }); } else { alert('Invalid URL'); } } return false; }; //@ sourceURL=/finder/submit_pasted_url.js
Tweak for when SQS message is missing the eTag from a bucket notification.
from boto.sqs.message import Message import json from s3_notification_info import S3NotificationInfo class S3SQSMessage(Message): def __init__(self, queue=None, body='', xml_attrs=None): Message.__init__(self, queue, body) self.payload = None self.notification_type = 'S3Info' def event_name(self): return self.payload['Records'][0]['eventName'] def event_time(self): return self.payload['Records'][0]['eventTime'] def bucket_name(self): return self.payload['Records'][0]['s3']['bucket']['name'] def file_name(self): return self.payload['Records'][0]['s3']['object']['key'] def file_etag(self): if 'eTag' in self.payload['Records'][0]['s3']['object']: return self.payload['Records'][0]['s3']['object']['eTag'] else: return None def file_size(self): return self.payload['Records'][0]['s3']['object']['size'] def set_body(self, body): """ Override set_body to construct json payload Note Boto JSONMessage seemed to have encoding issues with S3 notification messages """ if body is not None and len(body) > 0: self.payload = json.loads(body) if body and 'Records' in self.payload.keys(): self.notification_type = 'S3Event' super(Message, self).set_body(body)
from boto.sqs.message import Message import json from s3_notification_info import S3NotificationInfo class S3SQSMessage(Message): def __init__(self, queue=None, body='', xml_attrs=None): Message.__init__(self, queue, body) self.payload = None self.notification_type = 'S3Info' def event_name(self): return self.payload['Records'][0]['eventName'] def event_time(self): return self.payload['Records'][0]['eventTime'] def bucket_name(self): return self.payload['Records'][0]['s3']['bucket']['name'] def file_name(self): return self.payload['Records'][0]['s3']['object']['key'] def file_etag(self): return self.payload['Records'][0]['s3']['object']['eTag'] def file_size(self): return self.payload['Records'][0]['s3']['object']['size'] def set_body(self, body): """ Override set_body to construct json payload Note Boto JSONMessage seemed to have encoding issues with S3 notification messages """ if body is not None and len(body) > 0: self.payload = json.loads(body) if body and 'Records' in self.payload.keys(): self.notification_type = 'S3Event' super(Message, self).set_body(body)
Use HTTPS URLs for MaxMind resources
from ooni.settings import config from ooni.utils import unzip, gunzip from ooni.deckgen.processors import citizenlab_test_lists from ooni.deckgen.processors import namebench_dns_servers config.read_config_file() __version__ = "0.0.1" inputs = { "namebench-dns-servers.csv": { "url": "https://namebench.googlecode.com/svn/trunk/config/servers.csv", "action": None, "action_args": [], "processor": namebench_dns_servers, }, "citizenlab-test-lists.zip": { "url": "https://github.com/citizenlab/test-lists/archive/master.zip", "action": unzip, "action_args": [config.resources_directory], "processor": citizenlab_test_lists } } geoip = { "GeoIPASNum.dat.gz": { "url": "https://www.maxmind.com/download/" "geoip/database/asnum/GeoIPASNum.dat.gz", "action": gunzip, "action_args": [config.advanced.geoip_data_dir], "processor": None }, "GeoIP.dat.gz": { "url": "https://geolite.maxmind.com/" "download/geoip/database/GeoLiteCountry/GeoIP.dat.gz", "action": gunzip, "action_args": [config.advanced.geoip_data_dir], "processor": None } }
from ooni.settings import config from ooni.utils import unzip, gunzip from ooni.deckgen.processors import citizenlab_test_lists from ooni.deckgen.processors import namebench_dns_servers config.read_config_file() __version__ = "0.0.1" inputs = { "namebench-dns-servers.csv": { "url": "https://namebench.googlecode.com/svn/trunk/config/servers.csv", "action": None, "action_args": [], "processor": namebench_dns_servers, }, "citizenlab-test-lists.zip": { "url": "https://github.com/citizenlab/test-lists/archive/master.zip", "action": unzip, "action_args": [config.resources_directory], "processor": citizenlab_test_lists } } geoip = { "GeoIPASNum.dat.gz": { "url": "http://www.maxmind.com/download/" "geoip/database/asnum/GeoIPASNum.dat.gz", "action": gunzip, "action_args": [config.advanced.geoip_data_dir], "processor": None }, "GeoIP.dat.gz": { "url": "http://geolite.maxmind.com/" "download/geoip/database/GeoLiteCountry/GeoIP.dat.gz", "action": gunzip, "action_args": [config.advanced.geoip_data_dir], "processor": None } }
Add license to the packaging
try: from setuptools import setup, find_packages except ImportError: from ez_setup import use_setuptools use_setuptools() from setuptools import setup, find_packages setup( name='fabtools', version='0.1', description='Tools for writing awesome Fabric files', author='Ronan Amicel', author_email='[email protected]', url='http://github.com/ronnix/fabtools', license='BSD', install_requires=[ "fabric>=1.2.0", ], setup_requires=[], tests_require=[ "unittest2", "mock", ], packages=find_packages(exclude=['ez_setup', 'tests']), include_package_data=True, zip_safe=False, classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: BSD License', 'Operating System :: MacOS :: MacOS X', 'Operating System :: Unix', 'Operating System :: POSIX', 'Programming Language :: Python', 'Programming Language :: Python :: 2.5', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Topic :: Software Development', 'Topic :: Software Development :: Build Tools', 'Topic :: Software Development :: Libraries', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: System :: Software Distribution', 'Topic :: System :: Systems Administration', ], )
try: from setuptools import setup, find_packages except ImportError: from ez_setup import use_setuptools use_setuptools() from setuptools import setup, find_packages setup( name='fabtools', version='0.1', description='Tools for writing awesome Fabric files', author='Ronan Amicel', author_email='[email protected]', url='http://github.com/ronnix/fabtools', install_requires=[ "fabric>=1.2.0", ], setup_requires=[], tests_require=[ "unittest2", "mock", ], packages=find_packages(exclude=['ez_setup', 'tests']), include_package_data=True, zip_safe=False, classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: BSD License', 'Operating System :: MacOS :: MacOS X', 'Operating System :: Unix', 'Operating System :: POSIX', 'Programming Language :: Python', 'Programming Language :: Python :: 2.5', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Topic :: Software Development', 'Topic :: Software Development :: Build Tools', 'Topic :: Software Development :: Libraries', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: System :: Software Distribution', 'Topic :: System :: Systems Administration', ], )
Use the sync version of unlink to try to resolve spurious Travis failures Travis can fail random tests with: Error: ENOENT: no such file or directory, rename 'spec-db/users.db~' -> 'spec-db/users.db' at Error (native) This isn't due to double `unlink()` calls (else the operation would be "unlink" and not "rename"). We don't ever call `fs.rename`. It looks like Travis is doing something Weird, or maybe a later version of Node is? A quick look through the Node source didn't show anything interesting. We don't ever use the tilde suffix either. The error is **impossible** to catch because it's on a different turn of the event loop and we didn't call `rename()`, so we can't make it not throw by adding an `errback`, nor can we `try/catch` because it's async. Swap to using `unlinkSync` in the vain hope that it will mean that `rename` will also be called syncly and therefore can be `try/catch`d.
/* * Helper class for cleaning nedb state */ "use strict"; var Promise = require("bluebird"); var fs = require("fs"); /** * Reset the database, wiping all data. * @param {String} databaseUri : The database URI to wipe all data from. * @return {Promise} Which is resolved when the database has been cleared. */ module.exports._reset = function(databaseUri) { if (databaseUri.indexOf("nedb://") !== 0) { return Promise.reject("Must be nedb:// URI"); } var baseDbName = databaseUri.substring("nedb://".length); function delDatabase(name) { var dbPath = baseDbName + name; return new Promise(function(resolve, reject) { try { // nuke the world fs.unlinkSync(dbPath); resolve(); } catch (err) { if (err.code === "ENOENT") { resolve(); // already deleted } else { reject(err); } } }); } return Promise.all([ delDatabase("/rooms.db"), delDatabase("/users.db"), ]); };
/* * Helper class for cleaning nedb state */ "use strict"; var Promise = require("bluebird"); var fs = require("fs"); /** * Reset the database, wiping all data. * @param {String} databaseUri : The database URI to wipe all data from. * @return {Promise} Which is resolved when the database has been cleared. */ module.exports._reset = function(databaseUri) { if (databaseUri.indexOf("nedb://") !== 0) { return Promise.reject("Must be nedb:// URI"); } var baseDbName = databaseUri.substring("nedb://".length); function delDatabase(name) { var dbPath = baseDbName + name; return new Promise(function(resolve, reject) { // nuke the world fs.unlink(dbPath, function(err) { if (err) { if (err.code == "ENOENT") { // already deleted resolve(); } else { reject(err); } } else { resolve(); } }); }); } return Promise.all([ delDatabase("/rooms.db"), delDatabase("/users.db"), ]); };
Update to newer Asserts to eliminate deprecation warning
package io.hawt.keystore; import org.junit.Test; import java.io.IOException; import static org.junit.Assert.*; /** * */ public class KeystoreServiceTest { @Test public void test() throws IOException { KeystoreService service = new KeystoreService(); SecurityProviderDTO info = service.getSecurityProviderInfo(); System.out.println(info); assertNotNull(info); assertTrue(info.supportedKeyAlgorithms.length > 0); assertTrue(info.supportedKeyStoreTypes.length > 0); CreateKeyStoreRequestDTO request = new CreateKeyStoreRequestDTO(); request.storePassword = "password"; request.storeType = "JKS"; request.createPrivateKey = true; request.keyCommonName = "localhost"; request.keyLength = 1024; request.keyAlgorithm = "RSA"; request.keyValidity = 365; request.keyPassword = "password"; byte[] data = service.createKeyStore(request); assertNotNull(data); assertTrue(data.length > 0); request = new CreateKeyStoreRequestDTO(); request.storePassword = "password"; request.storeType = "JKS"; request.createPrivateKey = false; data = service.createKeyStore(request); assertNotNull(data); assertTrue(data.length > 0); } }
package io.hawt.keystore; import org.junit.Test; import java.io.IOException; import static junit.framework.Assert.assertNotNull; import static junit.framework.Assert.assertTrue; /** * */ public class KeystoreServiceTest { @Test public void test() throws IOException { KeystoreService service = new KeystoreService(); SecurityProviderDTO info = service.getSecurityProviderInfo(); System.out.println(info); assertNotNull(info); assertTrue(info.supportedKeyAlgorithms.length > 0); assertTrue(info.supportedKeyStoreTypes.length > 0); CreateKeyStoreRequestDTO request = new CreateKeyStoreRequestDTO(); request.storePassword = "password"; request.storeType = "JKS"; request.createPrivateKey = true; request.keyCommonName = "localhost"; request.keyLength = 1024; request.keyAlgorithm = "RSA"; request.keyValidity = 365; request.keyPassword = "password"; byte[] data = service.createKeyStore(request); assertNotNull(data); assertTrue(data.length > 0); request = new CreateKeyStoreRequestDTO(); request.storePassword = "password"; request.storeType = "JKS"; request.createPrivateKey = false; data = service.createKeyStore(request); assertNotNull(data); assertTrue(data.length > 0); } }
Fix multiple bugs in download block
<div class="felix-item-title felix-item-title felix-item-title-generic"> <h3>Download the latest <i>Felix</i></h3> </div> <br> <?php $link = new ArchiveLink(); $issue = $link->getLatestForPublication(1); if($issue): try { // prime issue $issue->getThumbnailURL(); ?> <div class="row"> <div class="small-4 columns"> <center> <a href="<?php echo $issue->getDownloadURL(); ?>" class="thumbLink"> <img src="<?php echo $issue->getThumbnailURL();?>" alt="<?php echo $issue->getId();?>"/> </a> </center> </div> <div class="small-8 columns"> <p> <b><?php echo date("l jS F", strtotime($issue->getPubDate())); ?></b> <br>Issue <?php echo $issue->getIssueNo(); ?><br> <a href="<?php echo STANDARD_URL.'archive'; ?>">More issues</a> </p> </div> </div> <?php } catch(\FelixOnline\Exceptions\InternalException $e) { echo '<p><b>Sorry, we are having some trouble loading this issue '.$issue->getIssueNo().'. Please try again later.</b></p>'; } else: echo '<p>No issues found.</p>'; endif; ?> <br>
<div class="felix-item-title felix-item-title felix-item-title-generic"> <h3>Download the latest <i>Felix</i></h3> </div> <br> <?php $link = new ArchiveLink(); $issue = $link->getLatestForPublication(1); if($issue): try { // prime issue $issue->getThumbnailURL(); ?> <div class="row"> <div class="small-4 columns"> <center> <a href="<a href="<?php echo $issue->getDownloadURL(); ?>" class="thumbLink"> <img src="<?php echo $issue->getThumbnailURL();?>" alt="<?php echo $issue->getId();?>"/> </a> </center> </div> <div class="small-8 columns"> <p> <b><?php echo date("l jS F", strtotime($issue->getPubDate())); ?></b> <br>Issue <?php echo $issue->getIssueNo(); ?><br> <a href="<?php echo STANDARD_URL.'/archive'; ?>">More issues</a> </p> </div> </div> <?php } catch(\FelixOnline\Exceptions\InternalException $e) { echo '<p><b>Sorry, we are having some trouble loading this issue '.$issue->getIssueNo().'. Please try again later.</b></p>'; } else: echo '<p>No issues found.</p>'; endif; ?>
Debug print each health check
# -*- coding: utf-8 -*- import requests class Healthcheck: def __init__(self): pass def _result(self, site, health, response=None, message=None): result = { "name": site["name"], "health": health } if message: result["message"] = message if response is not None: result["status"] = response.status_code result["response_time_ms"] = int(response.elapsed.total_seconds() * 1000) return result def check_site(self, site): response = None try: print(f"Checking site {site['name']}") response = requests.get(site["url"]) if response.status_code not in site["acceptable_statuses"]: print("Bad status code: {}".format(response.status_code)) return self._result(site, "DOWN", response, "Unacceptable status code") for mandatory_string in site.get("mandatory_strings", []): if mandatory_string not in response.text: print("String not found in response: " + mandatory_string) return self._result(site, "DOWN", response, "String not found in response: {}".format(mandatory_string)) return self._result(site, "UP", response) except Exception as err: print(err) return self._result(site, "UNKNOWN", response, "Exception while trying to check site health: {}".format(err))
# -*- coding: utf-8 -*- import requests class Healthcheck: def __init__(self): pass def _result(self, site, health, response=None, message=None): result = { "name": site["name"], "health": health } if message: result["message"] = message if response is not None: result["status"] = response.status_code result["response_time_ms"] = int(response.elapsed.total_seconds() * 1000) return result def check_site(self, site): response = None try: response = requests.get(site["url"]) if response.status_code not in site["acceptable_statuses"]: print("Bad status code: {}".format(response.status_code)) return self._result(site, "DOWN", response, "Unacceptable status code") for mandatory_string in site.get("mandatory_strings", []): if mandatory_string not in response.text: print("String not found in response: " + mandatory_string) return self._result(site, "DOWN", response, "String not found in response: {}".format(mandatory_string)) return self._result(site, "UP", response) except Exception as err: print(err) return self._result(site, "UNKNOWN", response, "Exception while trying to check site health: {}".format(err))
Call CourseInstructorPanel only when having instructors
var CourseCardContentPanel = { render: function (c_section, fetched_eval) { var eval_data = (fetched_eval? WSData.iasystem_data(): null); var index = c_section.index; // Determine if we have valid eval data for the section var has_valid_eval = (eval_data && eval_data.sections && eval_data.sections.length > 0 && eval_data.sections[index] && eval_data.sections[index].evaluation_data && eval_data.sections[index].evaluation_data.length > 0 ? true : false); c_section.has_eval = has_valid_eval; c_section.evals = (has_valid_eval ? eval_data.sections[index].evaluation_data : null); // Determine if there was an err when fetching the section eval data var eval_data_err = (fetched_eval && !has_valid_eval && eval_data && eval_data.sections[index].evaluation_data === null ? true :false); c_section.eval_data_err = eval_data_err; var source = $("#course_card_content_panel").html(); var template = Handlebars.compile(source); var raw = template(c_section); $('#course_card_content' + index).html(raw); if (has_valid_eval) { CourseEvalPanel.render(c_section); } CourseSchePanel.render(c_section); CourseResourcePanel.render(c_section); if (c_section.instructors) { CourseInstructorPanel.render(c_section); } } };
var CourseCardContentPanel = { render: function (c_section, fetched_eval) { var eval_data = (fetched_eval? WSData.iasystem_data(): null); var index = c_section.index; // Determine if we have valid eval data for the section var has_valid_eval = (eval_data && eval_data.sections && eval_data.sections.length > 0 && eval_data.sections[index] && eval_data.sections[index].evaluation_data && eval_data.sections[index].evaluation_data.length > 0 ? true : false); c_section.has_eval = has_valid_eval; c_section.evals = (has_valid_eval ? eval_data.sections[index].evaluation_data : null); // Determine if there was an err when fetching the section eval data var eval_data_err = (fetched_eval && !has_valid_eval && eval_data && eval_data.sections[index].evaluation_data === null ? true :false); c_section.eval_data_err = eval_data_err; var source = $("#course_card_content_panel").html(); var template = Handlebars.compile(source); var raw = template(c_section); $('#course_card_content' + index).html(raw); if (has_valid_eval) { CourseEvalPanel.render(c_section); } CourseSchePanel.render(c_section); CourseResourcePanel.render(c_section); CourseInstructorPanel.render(c_section); } };
Fix base url for swagger
<?php namespace OParl\Website\Api\Controllers; use function Swagger\scan; /** * @SWG\Swagger( * schemes={"https"}, * host=SWAGGER_API_HOST, * basePath="/api/", * @SWG\Info( * title="OParl Developer Platform API", * description="Meta information concerning the OParl ecosystem", * version="0", * @SWG\License( * name="CC-4.0-BY", * url="https://creativecommons.org/licenses/by/4.0/" * ) * ), * produces={ "application/json" } * ) */ class ApiController { /** * Return the dynamically updated swagger.json for the meta endpoints. * * @return \Symfony\Component\HttpFoundation\Response */ public function swaggerJson() { define('SWAGGER_API_HOST', 'dev.' . config('app.url')); $swagger = scan([base_path('lib/Api/Controllers'), app_path('Model')]); return response($swagger, 200, [ 'Content-Type' => 'application/json', 'Access-Control-Allow-Origin' => '*', ]); } /** * Index page for the api. * * @return \Symfony\Component\HttpFoundation\Response */ public function index() { return view('api.index'); } }
<?php namespace OParl\Website\Api\Controllers; use function Swagger\scan; /** * @SWG\Swagger( * schemes={"https"}, * host="dev.oparl.org", * basePath="/api/", * @SWG\Info( * title="OParl Developer Platform API", * description="Meta information concerning the OParl ecosystem", * version="0", * @SWG\License( * name="CC-4.0-BY", * url="https://creativecommons.org/licenses/by/4.0/" * ) * ), * produces={ "application/json" } * ) */ class ApiController { /** * Return the dynamically updated swagger.json for the meta endpoints. * * @return \Symfony\Component\HttpFoundation\Response */ public function swaggerJson() { $swagger = scan([base_path('lib/Api/Controllers'), app_path('Model')]); return response($swagger, 200, [ 'Content-Type' => 'application/json', 'Access-Control-Allow-Origin' => '*', ]); } /** * Index page for the api. * * @return \Symfony\Component\HttpFoundation\Response */ public function index() { return view('api.index'); } }
Use same versions as requirements.pip to prevent unexpected upgrades of dependencies
# -*- coding: utf-8 -*- """ pyxform - Python library that converts XLSForms to XForms. """ from setuptools import find_packages, setup REQUIRES = [ "xlrd==1.2.0", "unicodecsv==0.14.1", "formencode==1.3.1", "unittest2==1.1.0", 'functools32==3.2.3.post2 ; python_version < "3.2"', ] setup( name="pyxform", version="1.3.2", author="github.com/xlsform", author_email="[email protected]", packages=find_packages(), package_data={ "pyxform.validators.odk_validate": ["bin/*.*"], "pyxform.tests": [ "example_xls/*.*", "fixtures/strings.ini", "bug_example_xls/*.*", "test_output/*.*", "test_expected_output/*.*", "validators/.*", "validators/data/*.*", "validators/data/.*", ], "pyxform": ["iana_subtags.txt"], }, url="http://pypi.python.org/pypi/pyxform/", description="A Python package to create XForms for ODK Collect.", long_description=open("README.rst", "rt").read(), install_requires=REQUIRES, entry_points={ "console_scripts": [ "xls2xform=pyxform.xls2xform:main_cli", "pyxform_validator_update=pyxform.validators.updater:main_cli", ] }, )
# -*- coding: utf-8 -*- """ pyxform - Python library that converts XLSForms to XForms. """ from setuptools import find_packages, setup REQUIRES = [ "xlrd>=1.1.0", "unicodecsv>=0.14.1", "formencode", "unittest2", 'functools32==3.2.3.post2 ; python_version < "3.2"', ] setup( name="pyxform", version="1.3.2", author="github.com/xlsform", author_email="[email protected]", packages=find_packages(), package_data={ "pyxform.validators.odk_validate": ["bin/*.*"], "pyxform.tests": [ "example_xls/*.*", "fixtures/strings.ini", "bug_example_xls/*.*", "test_output/*.*", "test_expected_output/*.*", "validators/.*", "validators/data/*.*", "validators/data/.*", ], "pyxform": ["iana_subtags.txt"], }, url="http://pypi.python.org/pypi/pyxform/", description="A Python package to create XForms for ODK Collect.", long_description=open("README.rst", "rt").read(), install_requires=REQUIRES, entry_points={ "console_scripts": [ "xls2xform=pyxform.xls2xform:main_cli", "pyxform_validator_update=pyxform.validators.updater:main_cli", ] }, )
Add pytest to surveymonkey dependencies
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup with open('README.rst') as readme_file: readme = readme_file.read() requirements = [ 'furl==0.5.6', 'six==1.10.0', 'pytest==3.0.3' ] test_requirements = [ # TODO: put package test requirements here ] setup( name='surveymonkey', version='0.4.2', description="Python wrapper for the Survey Monkey v3 API", long_description=readme, author="Aaron Bassett", author_email='[email protected]', url='https://github.com/Administrate/surveymonkey', packages=[ 'surveymonkey', 'surveymonkey.collectors', 'surveymonkey.webhooks', 'surveymonkey.messages' ], package_dir={'surveymonkey': 'surveymonkey'}, include_package_data=True, install_requires=requirements, license="MIT license", zip_safe=False, keywords='surveymonkey', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', "Programming Language :: Python :: 2", 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', ], test_suite='tests', tests_require=test_requirements )
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup with open('README.rst') as readme_file: readme = readme_file.read() requirements = [ 'furl==0.5.6', 'six==1.10.0' ] test_requirements = [ # TODO: put package test requirements here ] setup( name='surveymonkey', version='0.4.2', description="Python wrapper for the Survey Monkey v3 API", long_description=readme, author="Aaron Bassett", author_email='[email protected]', url='https://github.com/Administrate/surveymonkey', packages=[ 'surveymonkey', 'surveymonkey.collectors', 'surveymonkey.webhooks', 'surveymonkey.messages' ], package_dir={'surveymonkey': 'surveymonkey'}, include_package_data=True, install_requires=requirements, license="MIT license", zip_safe=False, keywords='surveymonkey', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', "Programming Language :: Python :: 2", 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', ], test_suite='tests', tests_require=test_requirements )
Remove through field check, as we now check the entire through chain
<?php namespace AlgoWeb\PODataLaravel\Models\ObjectMap\Entities\Associations; class AssociationStubMonomorphic extends AssociationStubBase { /** * @param \AlgoWeb\PODataLaravel\Models\ObjectMap\Entities\Associations\AssociationStubBase $otherStub * * @return bool */ public function isCompatible(AssociationStubBase $otherStub) { if (!parent::isCompatible($otherStub)) { return false; } $thisChain = $this->getThroughFieldChain(); $otherChain = $otherStub->getThroughFieldChain(); $thisThroughCount = count($thisChain) - 1; $otherThroughCount = count($otherChain) - 1; if ($thisThroughCount !== $otherThroughCount) { return false; } for ($i=0; $i <= $thisThroughCount;++$i) { if ($thisChain[$i] !== $otherChain[$otherThroughCount -$i]) { return false; } } return ($this->getTargType() === $otherStub->getBaseType()) && ($this->getBaseType() === $otherStub->getTargType()) && ($this->getForeignField() === $otherStub->getKeyField()) && ($this->getKeyField() === $otherStub->getForeignField()); } /** * {@inheritdoc} */ public function morphicType() { return 'monomorphic'; } }
<?php namespace AlgoWeb\PODataLaravel\Models\ObjectMap\Entities\Associations; class AssociationStubMonomorphic extends AssociationStubBase { /** * @param \AlgoWeb\PODataLaravel\Models\ObjectMap\Entities\Associations\AssociationStubBase $otherStub * * @return bool */ public function isCompatible(AssociationStubBase $otherStub) { if (!parent::isCompatible($otherStub)) { return false; } $isNull = null == $this->getThroughField(); $otherNull = null == $otherStub->getThroughField(); if ($isNull != $otherNull) { return false; } $thisChain = $this->getThroughFieldChain(); $otherChain = $otherStub->getThroughFieldChain(); $thisThroughCount = count($thisChain) - 1; $otherThroughCount = count($otherChain) - 1; if ($thisThroughCount !== $otherThroughCount) { return false; } for ($i=0; $i <= $thisThroughCount;++$i) { if ($thisChain[$i] !== $otherChain[$otherThroughCount -$i]) { return false; } } return ($this->getTargType() === $otherStub->getBaseType()) && ($this->getBaseType() === $otherStub->getTargType()) && ($this->getForeignField() === $otherStub->getKeyField()) && ($this->getKeyField() === $otherStub->getForeignField()); } /** * {@inheritdoc} */ public function morphicType() { return 'monomorphic'; } }
Disable Postgres cache by default
/** * Base class for Postgres repositories * @module arpen/repositories/postgres */ const path = require('path'); const BaseRepository = require('./base'); /** * Repository base class */ class PostgresRepository extends BaseRepository { /** * Create repository * @param {App} app The application * @param {Postgres} postgres Postgres service * @param {Cacher} cacher Cacher service * @param {Util} util Util service */ constructor(app, postgres, cacher, util) { super(app, util); this._postgres = postgres; this._cacher = cacher; this._enableCache = false; this._loadMethods(path.join(__dirname, 'postgres')); } /** * Service name is 'repositories.postgres' * @type {string} */ static get provides() { return 'repositories.postgres'; } /** * Dependencies as constructor arguments * @type {string[]} */ static get requires() { return [ 'app', 'postgres', 'cacher', 'util' ]; } } module.exports = PostgresRepository;
/** * Base class for Postgres repositories * @module arpen/repositories/postgres */ const path = require('path'); const BaseRepository = require('./base'); /** * Repository base class */ class PostgresRepository extends BaseRepository { /** * Create repository * @param {App} app The application * @param {Postgres} postgres Postgres service * @param {Cacher} cacher Cacher service * @param {Util} util Util service */ constructor(app, postgres, cacher, util) { super(app, util); this._postgres = postgres; this._cacher = cacher; this._enableCache = true; this._loadMethods(path.join(__dirname, 'postgres')); } /** * Service name is 'repositories.postgres' * @type {string} */ static get provides() { return 'repositories.postgres'; } /** * Dependencies as constructor arguments * @type {string[]} */ static get requires() { return [ 'app', 'postgres', 'cacher', 'util' ]; } } module.exports = PostgresRepository;
Improve selection of the Recipe
package com.sb.elsinore.html; import com.sb.elsinore.BrewServer; import org.rendersnake.HtmlCanvas; import org.rendersnake.Renderable; import java.io.IOException; import java.util.ArrayList; import static org.rendersnake.HtmlAttributesFactory.id; import static org.rendersnake.HtmlAttributesFactory.name; import static org.rendersnake.HtmlAttributesFactory.value; /** * Render a form allowing selection of the recipe. * Created by Doug Edey on 07/02/15. */ public class RecipeListForm implements Renderable { @Override public void renderOn(HtmlCanvas htmlCanvas) throws IOException { ArrayList<String> recipeList = BrewServer.getRecipeList(); htmlCanvas.div(id("selectRecipeForm")) .form() .select(name("name").class_("holo-spinner") .onClick("setRecipe(this);")); htmlCanvas.option(value("").selected_if(true)) .write("Select Recipe") ._option(); for (String entry: recipeList) { htmlCanvas.option(value(entry)) .write(entry) ._option(); } htmlCanvas._select() ._form() ._div() .div(id("recipeContent"))._div(); } }
package com.sb.elsinore.html; import com.sb.elsinore.BrewServer; import org.rendersnake.HtmlCanvas; import org.rendersnake.Renderable; import java.io.IOException; import java.util.ArrayList; import java.util.Map; import static org.rendersnake.HtmlAttributesFactory.id; import static org.rendersnake.HtmlAttributesFactory.name; import static org.rendersnake.HtmlAttributesFactory.value; /** * Created by doug on 07/02/15. */ public class RecipeListForm implements Renderable { @Override public void renderOn(HtmlCanvas htmlCanvas) throws IOException { ArrayList<String> recipeList = BrewServer.getRecipeList(); htmlCanvas.div(id("newTriggersForm")) .form() .select(name("type").class_("holo-spinner") .onClick("setRecipe(this);")); htmlCanvas.option(value("").selected_if(true)) .write("Select Recipe Type") ._option(); for (String entry: recipeList) { htmlCanvas.option(value(entry)) .write(entry) ._option(); } htmlCanvas._select() ._form() ._div() .div(id("recipeContent"))._div(); } }
Handle non-existent files in the database.
#!/usr/bin/env python import rethinkdb as r import optparse if __name__ == "__main__": parser = optparse.OptionParser() parser.add_option("-p", "--port", dest="port", help="rethinkdb port", default=30815) (options, args) = parser.parse_args() conn = r.connect('localhost', int(options.port), db='materialscommons') selection = list(r.table('datadirs').run(conn)) for datadir in selection: print "Updating datadir %s" % (datadir['name']) ddir = {} ddir['id'] = datadir['id'] ddir['name'] = datadir['name'] ddir['owner'] = datadir['owner'] ddir['birthtime'] = datadir['birthtime'] ddir['datafiles'] = [] for dfid in datadir['datafiles']: datafile = r.table('datafiles').get(dfid).run(conn) if datafile is None: continue df = {} df['id'] = datafile['id'] df['name'] = datafile['name'] df['owner'] = datafile['owner'] df['birthtime'] = datafile['birthtime'] df['size'] = datafile['size'] df['checksum'] = datafile['checksum'] ddir['datafiles'].append(df) r.table('datadirs_denorm').insert(ddir).run(conn)
#!/usr/bin/env python import rethinkdb as r import optparse if __name__ == "__main__": parser = optparse.OptionParser() parser.add_option("-p", "--port", dest="port", help="rethinkdb port", default=30815) (options, args) = parser.parse_args() conn = r.connect('localhost', int(options.port), db='materialscommons') selection = list(r.table('datadirs').run(conn)) for datadir in selection: print "Updating datadir %s" % (datadir['name']) ddir = {} ddir['id'] = datadir['id'] ddir['name'] = datadir['name'] ddir['owner'] = datadir['owner'] ddir['birthtime'] = datadir['birthtime'] ddir['datafiles'] = [] for dfid in datadir['datafiles']: datafile = r.table('datafiles').get(dfid).run(conn) df = {} df['id'] = datafile['id'] df['name'] = datafile['name'] df['owner'] = datafile['owner'] df['birthtime'] = datafile['birthtime'] df['size'] = datafile['size'] df['checksum'] = datafile['checksum'] ddir['datafiles'].append(df) r.table('datadirs_denorm').insert(ddir).run(conn)
Remove active flag from all options before setting current. Rapidly changing presets could result in multiple options selected simultaneously.
function updateColor(input) { var value = input.val(); console.log("Updating to " + value + ".\n"); input.data('updating', true); $.post("/pixel/" + input.attr('id'), { color: value, immediate: "false" }) .always(function () { input.data('updating', false); var next_value = input.val(); if (next_value !== value) { console.log("Updating to " + next_value + " after POST.\n"); updateColor(input); } }); } $( document ).ready(function () { $(".rgb").change(function () { var input = $( this ); if (input.data('updating')) { console.log("Waiting...\n"); return; } updateColor(input); }); $(".preset-btn").click(function () { var preset = $( this ); $.post("/preset/apply", { preset: preset.attr('id') }) .always(function () { $(".preset-btn").removeClass("active"); preset.addClass("active"); }); }); $(".preset-save-btn").click(function () { var name = prompt("Preset name:"); if (name != null) { $.post("/preset/save", { name: name }); } }); $(".preset-remove-btn").click(function () { var name = $( this ).parent().attr("id"); if (confirm("Are you sure you want to delete \"" + name + "\"?")) { $.post("/preset/delete", { name: name }); } }); });
function updateColor(input) { var value = input.val(); console.log("Updating to " + value + ".\n"); input.data('updating', true); $.post("/pixel/" + input.attr('id'), { color: value, immediate: "false" }) .always(function () { input.data('updating', false); var next_value = input.val(); if (next_value !== value) { console.log("Updating to " + next_value + " after POST.\n"); updateColor(input); } }); } $( document ).ready(function () { $(".rgb").change(function () { var input = $( this ); if (input.data('updating')) { console.log("Waiting...\n"); return; } updateColor(input); }); $(".preset-btn").click(function () { var preset = $( this ); var oldPreset = $(".preset-btn.active"); $.post("/preset/apply", { preset: preset.attr('id') }) .always(function () { preset.addClass("active"); oldPreset.removeClass("active"); }); }); $(".preset-save-btn").click(function () { var name = prompt("Preset name:"); if (name != null) { $.post("/preset/save", { name: name }); } }); $(".preset-remove-btn").click(function () { var name = $( this ).parent().attr("id"); if (confirm("Are you sure you want to delete \"" + name + "\"?")) { $.post("/preset/delete", { name: name }); } }); });
Add all versions of conversion doctests
import pytest def pytest_collection_modifyitems(config, items): try: import pandas except ImportError: pandas = None try: import cesium except ImportError: cesium = None if pandas is None: skip_marker = pytest.mark.skip(reason="pandas not installed!") for item in items: if item.name in [ "tslearn.utils.from_tsfresh_dataset", "tslearn.utils.to_tsfresh_dataset", "tslearn.utils.from_sktime_dataset", "tslearn.utils.to_sktime_dataset", "tslearn.utils.from_pyflux_dataset", "tslearn.utils.to_pyflux_dataset", "tslearn.utils.from_cesium_dataset", "tslearn.utils.to_cesium_dataset", ]: item.add_marker(skip_marker) elif cesium is None: skip_marker = pytest.mark.skip(reason="cesium not installed!") for item in items: if item.name in [ "tslearn.utils.to_cesium_dataset", "tslearn.utils.from_cesium_dataset", ]: item.add_marker(skip_marker)
import pytest def pytest_collection_modifyitems(config, items): try: import pandas except ImportError: pandas = None try: import cesium except ImportError: cesium = None if pandas is None: skip_marker = pytest.mark.skip(reason="pandas not installed!") for item in items: if item.name in [ "tslearn.utils.from_tsfresh_dataset", "tslearn.utils.from_sktime_dataset", "tslearn.utils.from_pyflux_dataset", "tslearn.utils.to_cesium_dataset", "tslearn.utils.from_cesium_dataset", ]: item.add_marker(skip_marker) elif cesium is None: skip_marker = pytest.mark.skip(reason="cesium not installed!") for item in items: if item.name in [ "tslearn.utils.to_cesium_dataset", "tslearn.utils.from_cesium_dataset", ]: item.add_marker(skip_marker)
Define the attributes that aren't mass assignable
<?php namespace App\Models\Items; use Illuminate\Database\Eloquent\Model; class Vehicle extends Model { /** * The table associated with the model. * * @var string */ protected $table = 'vehicles'; /** * The primary key column. * * @var string */ protected $primaryKey = 'code'; /** * Indicate if the model should be timestamped. * * @var bool */ public $timestamps = true; /** * The attributes that aren't mass assignable. * * @var array */ protected $guarded = ['created_at', 'updated_at']; /** * Get vehicles with pagination. * * @param int $perPage * * @return array */ public static function withPagination($perPage) { $results = Item::join('vehicles', 'vehicles.code', '=', 'items.code') ->select('items.price', 'items.images') ->paginate($perPage); $data = []; if (!$results->isEmpty()) { $data['from'] = $results->firstItem(); $data['to'] = $results->lastItem(); $data['total'] = $results->total(); $data['limit'] = $results->perPage(); foreach ($results as $result) { $item = new self(); $item->price = $result->price; $item->image = json_decode($result->images)[0]; $data['items'][] = $item; } } return $data; } }
<?php namespace App\Models\Items; use Illuminate\Database\Eloquent\Model; class Vehicle extends Model { /** * The table associated with the model. * * @var string */ protected $table = 'vehicles'; /** * The primary key column. * * @var string */ protected $primaryKey = 'code'; /** * Indicate if the model should be timestamped. * * @var bool */ public $timestamps = true; /** * Get vehicles with pagination. * * @param int $perPage * * @return array */ public static function withPagination($perPage) { $results = Item::join('vehicles', 'vehicles.code', '=', 'items.code') ->select('items.price', 'items.images') ->paginate($perPage); $data = []; if (!$results->isEmpty()) { $data['from'] = $results->firstItem(); $data['to'] = $results->lastItem(); $data['total'] = $results->total(); $data['limit'] = $results->perPage(); foreach ($results as $result) { $item = new self(); $item->price = $result->price; $item->image = json_decode($result->images)[0]; $data['items'][] = $item; } } return $data; } }
Fix simple typo: occured -> occurred
# -*- coding: utf-8 -*- import os import sys from contextlib import contextmanager from click._compat import isatty WIN = sys.platform.startswith("win") env = os.environ @contextmanager def raw_mode(): """ Enables terminal raw mode during the context. Note: Currently noop for Windows systems. Usage: :: with raw_mode(): do_some_stuff() """ if WIN: # No implementation for windows yet. yield # needed for the empty context manager to work else: # imports are placed here because this will fail under Windows import tty import termios if not isatty(sys.stdin): f = open("/dev/tty") fd = f.fileno() else: fd = sys.stdin.fileno() f = None try: old_settings = termios.tcgetattr(fd) tty.setraw(fd) except termios.error: pass try: yield finally: # this block sets the terminal to sane mode again, # also in case an exception occurred in the context manager try: termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) # sys.stdout.flush() # not needed I think. if f is not None: f.close() except termios.error: pass def get_default_shell(): return env.get("DOITLIVE_INTERPRETER") or env.get("SHELL") or "/bin/bash"
# -*- coding: utf-8 -*- import os import sys from contextlib import contextmanager from click._compat import isatty WIN = sys.platform.startswith("win") env = os.environ @contextmanager def raw_mode(): """ Enables terminal raw mode during the context. Note: Currently noop for Windows systems. Usage: :: with raw_mode(): do_some_stuff() """ if WIN: # No implementation for windows yet. yield # needed for the empty context manager to work else: # imports are placed here because this will fail under Windows import tty import termios if not isatty(sys.stdin): f = open("/dev/tty") fd = f.fileno() else: fd = sys.stdin.fileno() f = None try: old_settings = termios.tcgetattr(fd) tty.setraw(fd) except termios.error: pass try: yield finally: # this block sets the terminal to sane mode again, # also in case an exception occured in the context manager try: termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) # sys.stdout.flush() # not needed I think. if f is not None: f.close() except termios.error: pass def get_default_shell(): return env.get("DOITLIVE_INTERPRETER") or env.get("SHELL") or "/bin/bash"
Include exempt groups in search
import { inject as service } from '@ember/service'; import Controller from '@ember/controller'; import { task, timeout } from 'ember-concurrency'; export default Controller.extend({ flashMessages: service(), searchGroup: task(function* (term){ yield timeout(600); let kindOptions = { 'Chorus': 32, 'Quartet': 41, }; let kindModel = this.get('model.session.kind'); let kindInt = kindOptions[kindModel]; let groups = yield this.get('store').query('group', { 'nomen__icontains': term, 'status__gt': 0, 'page_size': 1000, 'kind': kindInt, }); return groups }), searchPerson: task(function* (term){ yield timeout(600); return this.get('store').query('person', { 'nomen__icontains': term, 'page_size': 1000 }) .then((data) => data); }), actions: { saveEntry(entry){ entry.save() .then(() => { this.get('flashMessages').success('Saved'); this.transitionToRoute('dashboard.session-manager.session.entries.entry', entry); }); }, cancelEntry(){ this.get('model').deleteRecord(); this.transitionToRoute('dashboard.session-manager.session.entries.index'); }, willTransition() { this._super(...arguments); const record = this.get('model'); record.rollbackAttributes(); }, }, });
import { inject as service } from '@ember/service'; import Controller from '@ember/controller'; import { task, timeout } from 'ember-concurrency'; export default Controller.extend({ flashMessages: service(), searchGroup: task(function* (term){ yield timeout(600); let kindOptions = { 'Chorus': 32, 'Quartet': 41, }; let kindModel = this.get('model.session.kind'); let kindInt = kindOptions[kindModel]; let groups = yield this.get('store').query('group', { 'nomen__icontains': term, 'status': 10, 'page_size': 1000, 'kind': kindInt, }); return groups }), searchPerson: task(function* (term){ yield timeout(600); return this.get('store').query('person', { 'nomen__icontains': term, 'page_size': 1000 }) .then((data) => data); }), actions: { saveEntry(entry){ entry.save() .then(() => { this.get('flashMessages').success('Saved'); this.transitionToRoute('dashboard.session-manager.session.entries.entry', entry); }); }, cancelEntry(){ this.get('model').deleteRecord(); this.transitionToRoute('dashboard.session-manager.session.entries.index'); }, willTransition() { this._super(...arguments); const record = this.get('model'); record.rollbackAttributes(); }, }, });
HBO: Add options argument to get()
class Hbo(): def handle(self, url): return "hbo.com" in url def get(self, options, url): parse = urlparse(url) try: other = parse[5] except KeyError: log.error("Something wrong with that url") sys.exit(2) match = re.search("^/(.*).html", other) if not match: log.error("Cant find video file") sys.exit(2) url = "http://www.hbo.com/data/content/%s.xml" % match.group(1) data = get_http_data(url) xml = ET.XML(data) videoid = xml.find("content")[1].find("videoId").text url = "http://render.cdn.hbo.com/data/content/global/videos/data/%s.xml" % videoid data = get_http_data(url) xml = ET.XML(data) ss = xml.find("videos") if sys.version_info < (2, 7): sa = list(ss.getiterator("size")) else: sa = list(ss.iter("size")) streams = {} for i in sa: stream = {} stream["path"] = i.find("tv14").find("path").text streams[int(i.attrib["width"])] = stream test = select_quality(options, streams) download_rtmp(options, test["path"])
class Hbo(): def handle(self, url): return "hbo.com" in url def get(self, url): parse = urlparse(url) try: other = parse[5] except KeyError: log.error("Something wrong with that url") sys.exit(2) match = re.search("^/(.*).html", other) if not match: log.error("Cant find video file") sys.exit(2) url = "http://www.hbo.com/data/content/%s.xml" % match.group(1) data = get_http_data(url) xml = ET.XML(data) videoid = xml.find("content")[1].find("videoId").text url = "http://render.cdn.hbo.com/data/content/global/videos/data/%s.xml" % videoid data = get_http_data(url) xml = ET.XML(data) ss = xml.find("videos") if sys.version_info < (2, 7): sa = list(ss.getiterator("size")) else: sa = list(ss.iter("size")) streams = {} for i in sa: stream = {} stream["path"] = i.find("tv14").find("path").text streams[int(i.attrib["width"])] = stream test = select_quality(options, streams) download_rtmp(options, test["path"])
Increase results to 25 per page to make page look nicer
from django.views.generic import ListView, DetailView from .forms import OrganisationSearchForm from .models import Organisation from .search_indexes import OrganisationIndex from .search_utils import SearchPaginator class OrganisationSearchView(ListView): template_name = 'justizgelder/search.html' paginate_by = 25 paginator_class = SearchPaginator def get_queryset(self): self.form = OrganisationSearchForm(self.request.GET) self.result = self.form.search(size=self.paginate_by) return self.result def get_context_data(self, **kwargs): context = super(OrganisationSearchView, self).get_context_data(**kwargs) context['result'] = self.result context['query'] = self.request.GET.get('q') context['form'] = self.form context['base_template'] = 'justizgelder/search_base.html' if self.request.GET.get('embed'): context['base_template'] = 'justizgelder/embed_base.html' return context class OrganisationDetail(DetailView): template_name = 'justizgelder/organisation_detail.html' model = Organisation def get_context_data(self, **kwargs): context = super(OrganisationDetail, self).get_context_data(**kwargs) idx = OrganisationIndex() context['mlt'] = idx.search(self.object.name, size=15, sort=False, aggregations=False ) return context
from django.views.generic import ListView, DetailView from .forms import OrganisationSearchForm from .models import Organisation from .search_indexes import OrganisationIndex from .search_utils import SearchPaginator class OrganisationSearchView(ListView): template_name = 'justizgelder/search.html' paginate_by = 15 paginator_class = SearchPaginator def get_queryset(self): self.form = OrganisationSearchForm(self.request.GET) self.result = self.form.search(size=self.paginate_by) return self.result def get_context_data(self, **kwargs): context = super(OrganisationSearchView, self).get_context_data(**kwargs) context['result'] = self.result context['query'] = self.request.GET.get('q') context['form'] = self.form context['base_template'] = 'justizgelder/search_base.html' if self.request.GET.get('embed'): context['base_template'] = 'justizgelder/embed_base.html' return context class OrganisationDetail(DetailView): template_name = 'justizgelder/organisation_detail.html' model = Organisation def get_context_data(self, **kwargs): context = super(OrganisationDetail, self).get_context_data(**kwargs) idx = OrganisationIndex() context['mlt'] = idx.search(self.object.name, size=15, sort=False, aggregations=False ) return context
Add created field to DataPoint model
import numpy import ast from django.db import models class DataPoint(models.Model): name = models.CharField(max_length=600) exact_name = models.CharField(max_length=1000, null=True, blank=True) decay_feature = models.CharField(max_length=1000, null=True, blank=True) created = models.DateTimeField(auto_now_add=True) options = models.CharField(max_length=100) homo = models.FloatField() lumo = models.FloatField() homo_orbital = models.IntegerField() energy = models.FloatField() dipole = models.FloatField() band_gap = models.FloatField(null=True, blank=True) def __unicode__(self): return self.exact_name @classmethod def get_all_data(cls): data = DataPoint.objects.filter(band_gap__isnull=False, exact_name__isnull=False, decay_feature__isnull=False) M = len(data) HOMO = numpy.zeros((M, 1)) LUMO = numpy.zeros((M, 1)) GAP = numpy.zeros((M, 1)) vectors = [] for i, x in enumerate(data): HOMO[i] = x.homo LUMO[i] = x.lumo GAP[i] = x.band_gap vectors.append(ast.literal_eval(x.decay_feature)) FEATURE = numpy.matrix(vectors) return FEATURE, HOMO, LUMO, GAP
import numpy import ast from django.db import models class DataPoint(models.Model): name = models.CharField(max_length=600) exact_name = models.CharField(max_length=1000, null=True, blank=True) decay_feature = models.CharField(max_length=1000, null=True, blank=True) options = models.CharField(max_length=100) homo = models.FloatField() lumo = models.FloatField() homo_orbital = models.IntegerField() energy = models.FloatField() dipole = models.FloatField() band_gap = models.FloatField(null=True, blank=True) def __unicode__(self): return self.exact_name @classmethod def get_all_data(cls): data = DataPoint.objects.filter(band_gap__isnull=False, exact_name__isnull=False, decay_feature__isnull=False) M = len(data) HOMO = numpy.zeros((M, 1)) LUMO = numpy.zeros((M, 1)) GAP = numpy.zeros((M, 1)) vectors = [] for i, x in enumerate(data): HOMO[i] = x.homo LUMO[i] = x.lumo GAP[i] = x.band_gap vectors.append(ast.literal_eval(x.decay_feature)) FEATURE = numpy.matrix(vectors) return FEATURE, HOMO, LUMO, GAP