text
stringlengths
17
1.47k
positive
stringlengths
673
4.43k
negative
stringlengths
677
2.81k
Fix assertion to check all permission groups
<?php namespace Eye4web\Zf2BoardAbac\Assertion; use Eye4web\Zf2Abac\Assertion\AssertionInterface; use Eye4web\Zf2Abac\Provider\ProviderInterface; use Eye4web\Zf2Abac\Exception; use Zend\Validator\ValidatorPluginManager; class BaseAssertion implements AssertionInterface { /** @var ProviderInterface */ protected $provider; protected $permissionName = false; public function __construct(ProviderInterface $provider) { $this->provider = $provider; } /** * @param string $value * @param array $attributes * @return bool * @throws \Eye4web\Zf2Abac\Exception\RuntimeException */ public function hasPermission($value, array $attributes) { if (!$this->permissionName || empty($this->permissionName)) { throw new \Exception('Please provide a permission name'); } $permissionGroups = $this->provider->getPermissions($this->permissionName, $value); if (!count($permissionGroups)) { return true; } foreach ($permissionGroups as $group) { foreach ($group as $permission) { if (!isset($attributes[$permission->getValueId()])) { continue; } $validator = $this->provider->getValidator($permission); if (!$validator->isValid($attributes[$permission->getValueId()])) { break 2; } } // if we reach this, all rules in this group have passed, so we are allowed return true; } return false; } }
<?php namespace Eye4web\Zf2BoardAbac\Assertion; use Eye4web\Zf2Abac\Assertion\AssertionInterface; use Eye4web\Zf2Abac\Provider\ProviderInterface; use Eye4web\Zf2Abac\Exception; use Zend\Validator\ValidatorPluginManager; class BaseAssertion implements AssertionInterface { /** @var ProviderInterface */ protected $provider; protected $permissionName = false; public function __construct(ProviderInterface $provider) { $this->provider = $provider; } /** * @param string $value * @param array $attributes * @return bool * @throws \Eye4web\Zf2Abac\Exception\RuntimeException */ public function hasPermission($value, array $attributes) { if (!$this->permissionName || empty($this->permissionName)) { throw new \Exception('Please provide a permission name'); } $permissionGroups = $this->provider->getPermissions($this->permissionName, $value); if (!count($permissionGroups)) { return true; } foreach ($permissionGroups as $group) { foreach ($group as $permission) { if (!isset($attributes[$permission->getValueId()])) { return false; } $validator = $this->provider->getValidator($permission); if (!$validator->isValid($attributes[$permission->getValueId()])) { return false; } } } return true; } }
Use tanh activation for word C2W embeddings
from keras.layers import LSTM, Input, Dense, TimeDistributed, Masking, Activation, merge from keras.models import Model from layers import Projection def C2W(params, V_C): one_hots = Input(shape=(params.maxlen, V_C.size), dtype='int8') c_E = TimeDistributed(Projection(params.d_C))(one_hots) # we want to preserve the state in case of padding so that the state # sequence s_Ef and s_Eb last values remain correct c_E_mask = Masking(mask_value=0.)(c_E) forward = LSTM(params.d_Wi, go_backwards=False, dropout_U=0.1, dropout_W=0.1, consume_less='gpu')(c_E_mask) backwards = LSTM(params.d_Wi, go_backwards=True, dropout_U=0.1, dropout_W=0.1, consume_less='gpu')(c_E_mask) s_Ef = Dense(params.d_W)(forward) s_Eb = Dense(params.d_W)(backwards) s_E = merge(inputs=[s_Ef, s_Eb], mode='sum') #s_Eout = Activation('tanh')(s_E) return Model(input=one_hots, output=s_E, name='W2C')
from keras.layers import LSTM, Input, Dense, TimeDistributed, Masking, merge from keras.models import Model from layers import Projection def C2W(params, V_C): one_hots = Input(shape=(params.maxlen, V_C.size), dtype='int8') c_E = TimeDistributed(Projection(params.d_C))(one_hots) # we want to preserve the state in case of padding so that the state # sequence s_Ef and s_Eb last values remain correct c_E_mask = Masking(mask_value=0.)(c_E) forward = LSTM(params.d_Wi, go_backwards=False, dropout_U=0.1, dropout_W=0.1, consume_less='gpu')(c_E_mask) backwards = LSTM(params.d_Wi, go_backwards=True, dropout_U=0.1, dropout_W=0.1, consume_less='gpu')(c_E_mask) s_Ef = Dense(params.d_W)(forward) s_Eb = Dense(params.d_W)(backwards) s_E = merge(inputs=[s_Ef, s_Eb], mode='sum') return Model(input=one_hots, output=s_E, name='W2C')
:bug: Fix no duplicate property interp issue
'use strict'; var helpers = require('../helpers'); var selectorHelpers = require('../selector-helpers'); module.exports = { 'name': 'no-duplicate-properties', 'defaults': { 'exclude': [] }, 'detect': function (ast, parser) { var result = []; ast.traverseByType('block', function (block) { var properties = [], items = [], warnMessage = false; block.eachFor('declaration', function (declaration) { items.push(declaration); }); items.reverse(); items.forEach(function (declaration) { warnMessage = false; declaration.eachFor('property', function (item) { var property = ''; item.content.forEach(function (subItem) { // Although not a selector the method here helps us construct the proper property name // taking into account any interpolation etc property += selectorHelpers.constructSelector(subItem); }); if (properties.indexOf(property) !== -1 && properties.length >= 1) { if (parser.options.exclude.indexOf(property) !== -1 && properties[properties.length - 1] !== property) { warnMessage = 'Excluded duplicate properties must directly follow each other.'; } else if (parser.options.exclude.indexOf(property) === -1) { warnMessage = 'Duplicate properties are not allowed within a block'; } } properties.push(property); if (warnMessage) { result = helpers.addUnique(result, { 'ruleId': parser.rule.name, 'line': item.start.line, 'column': item.start.column, 'message': warnMessage, 'severity': parser.severity }); } }); }); }); return result; } };
'use strict'; var helpers = require('../helpers'); module.exports = { 'name': 'no-duplicate-properties', 'defaults': { 'exclude': [] }, 'detect': function (ast, parser) { var result = []; ast.traverseByType('block', function (block) { var properties = [], items = [], warnMessage = false; block.eachFor('declaration', function (declaration) { items.push(declaration); }); items.reverse(); items.forEach(function (declaration) { warnMessage = false; declaration.eachFor('property', function (item) { var property = item.content[0].content; if (properties.indexOf(property) !== -1 && properties.length >= 1) { if (parser.options.exclude.indexOf(property) !== -1 && properties[properties.length - 1] !== property) { warnMessage = 'Excluded duplicate properties must directly follow each other.'; } else if (parser.options.exclude.indexOf(property) === -1) { warnMessage = 'Duplicate properties are not allowed within a block'; } } properties.push(property); if (warnMessage) { result = helpers.addUnique(result, { 'ruleId': parser.rule.name, 'line': item.start.line, 'column': item.start.column, 'message': warnMessage, 'severity': parser.severity }); } }); }); }); return result; } };
Add callback to pretty print yAxes Retab whole file so its actually understandable
function traffic_data(traffic_levels) { var ctx = document.getElementById("traffic-chart"); var timeFormat = 'DD/MM/YYYY'; var myChart = new Chart(ctx, { type: 'line', data: { datasets: [{ data: traffic_levels, borderWidth: 1, borderColor: '#209cee', backgroundColor: 'rgba(32,156,238,0.4)' }] }, options: { title: { display: false, text: "Last 60 days of traffic" }, responsive: true, scales:{ xAxes: [{ type: "time", time: { parser: timeFormat, tooltipFormat: 'll' }, scaleLabel: { display: false, labelString: 'Date' } }], yAxes: [{ scaleLabel: { display: true, labelString: 'Requests/day (in millions)' }, ticks: { callback: function(label, index, labels) { return label/1000000+'m'; } } }] }, legend: { display: false }, tooltips: { "mode": "index", "backgroundColor": "#9AA7B4", "intersect": false, "cornerRadius": 3, "footerFontStyle": "normal", "titleSpacing": 0, "bodySpacing": 0, "footerSpacing": 0, "titleMarginBottom": 5, "footerMarginTop": 0, "yPadding": 5, "caretPadding": 10, "caretSize": 0 } } }); }
function traffic_data(traffic_levels) { var ctx = document.getElementById("traffic-chart"); var timeFormat = 'DD/MM/YYYY'; var myChart = new Chart(ctx, { type: 'line', data: { datasets: [{ data: traffic_levels, borderWidth: 1, borderColor: '#209cee', backgroundColor: 'rgba(32,156,238,0.4)' }] }, options: { title: { display: false, text: "Last 60 days of traffic" }, responsive: true, scales: { xAxes: [{ type: "time", time: { parser: timeFormat, tooltipFormat: 'll' }, scaleLabel: { display: false, labelString: 'Date' } }], yAxes: [{ scaleLabel: { display: true, labelString: 'Requests/day' } }] }, "legend": { "display": false }, "tooltips": { "mode": "index", "backgroundColor": "#9AA7B4", "intersect": false, "cornerRadius": 3, "footerFontStyle": "normal", "titleSpacing": 0, "bodySpacing": 0, "footerSpacing": 0, "titleMarginBottom": 5, "footerMarginTop": 0, "yPadding": 5, "caretPadding": 10, "caretSize": 0 } } }); }
Update front end error message
$("#submit-all").attr("disabled", true); $("#submit-all").hide(); Dropzone.options.uploadDropzone = { autoProcessQueue: false, init: function() { var submitButton = document.querySelector("#submit-all") myDropzone = this; submitButton.addEventListener("click", function() { myDropzone.processQueue(); }); this.on("error", function(file, errorMessage) { $("#upload-file-validation").text("Please upload jpg, png or jpeg file less than 20 MB"); $("#upload-file-validation").addClass("alert"); this.removeAllFiles(file); $("#submit-all").hide(); }); this.on("addedfile", function() { $("#submit-all").show(); $("#upload-file-validation").text("Upload the scanned template"); $("#upload-file-validation").removeClass("alert"); }); this.on("success", function(file, response) { window.location="/finish?key=" + response.key + "&fontname=" + response.font_name; }); this.on("maxfilesexceeded", function(file) { $("#upload-file-validation").text("Upload the scanned template"); $("#upload-file-validation").removeClass("alert"); this.removeAllFiles(); this.addFile(file); $("#submit-all").show(); }); }, paramName: "file", // The name that will be used to transfer the file maxFilesize: 20, // MB maxFiles: 1, acceptedFiles: ".png, .jpg, .jpeg", dictDefaultMessage: "Drop image here<br>or click to upload" };
$("#submit-all").attr("disabled", true); $("#submit-all").hide(); Dropzone.options.uploadDropzone = { autoProcessQueue: false, init: function() { var submitButton = document.querySelector("#submit-all") myDropzone = this; submitButton.addEventListener("click", function() { myDropzone.processQueue(); }); this.on("error", function(file, errorMessage) { $("#upload-file-validation").text("Please upload only jpg, png or jpeg file"); $("#upload-file-validation").addClass("alert"); this.removeAllFiles(file); $("#submit-all").hide(); }); this.on("addedfile", function() { $("#submit-all").show(); $("#upload-file-validation").text("Upload the scanned template"); $("#upload-file-validation").removeClass("alert"); }); this.on("success", function(file, response) { window.location="/finish?key=" + response.key + "&fontname=" + response.font_name; }); this.on("maxfilesexceeded", function(file) { $("#upload-file-validation").text("Upload the scanned template"); $("#upload-file-validation").removeClass("alert"); this.removeAllFiles(); this.addFile(file); $("#submit-all").show(); }); }, paramName: "file", // The name that will be used to transfer the file maxFilesize: 20, // MB maxFiles: 1, acceptedFiles: ".png, .jpg, .jpeg", dictDefaultMessage: "Drop image here<br>or click to upload" };
Fix render() kwargs for Django 2.1
import mimetypes from django.contrib.admin.widgets import AdminFileWidget from django.template.loader import render_to_string class ImagePreviewWidget(AdminFileWidget): """ An :class:`~django.forms.FileInput` widget that also displays a preview of the image. """ template_with_initial = u'%(clear_template)s</p><p>%(input_text)s: %(input)s' def render(self, name, value, *args, **kwargs): is_image = False if value: if hasattr(value, 'path'): (mime_type, encoding) = mimetypes.guess_type(value.path) else: # Try to guess mime_type from name alone, for remote FileSystems (S3, etc...) (mime_type, encoding) = mimetypes.guess_type(value.name) is_image = mime_type and mime_type.startswith('image/') # Render different field for replacing input_field = super(ImagePreviewWidget, self).render(name, value, *args, **kwargs) if not value: return input_field else: return render_to_string("any_imagefield/imagepreviewwidget/update.html", { 'value': value, 'is_image': is_image, 'input_field': input_field, 'input_text': self.input_text, })
import mimetypes from django.contrib.admin.widgets import AdminFileWidget from django.template.loader import render_to_string class ImagePreviewWidget(AdminFileWidget): """ An :class:`~django.forms.FileInput` widget that also displays a preview of the image. """ template_with_initial = u'%(clear_template)s</p><p>%(input_text)s: %(input)s' def render(self, name, value, attrs=None): is_image = False if value: if hasattr(value, 'path'): (mime_type, encoding) = mimetypes.guess_type(value.path) else: # Try to guess mime_type from name alone, for remote FileSystems (S3, etc...) (mime_type, encoding) = mimetypes.guess_type(value.name) is_image = mime_type and mime_type.startswith('image/') # Render different field for replacing input_field = super(ImagePreviewWidget, self).render(name, value, attrs) if not value: return input_field else: return render_to_string("any_imagefield/imagepreviewwidget/update.html", { 'value': value, 'is_image': is_image, 'input_field': input_field, 'input_text': self.input_text, })
fix($hmr): Support arbitrary origins in publicPath. If the Webpack publicPath contains "http:" or "https:", treat it as the base URL for newly-loaded assets. Otherwise, continue to use the current window's location. Fixes #25.
module.exports = function(publicPath, outputFilename) { if (document) { var newHref = publicPath.match(/https?:/g) ? new URL(outputFilename, publicPath) : new URL(outputFilename, window.location); var links = document.getElementsByTagName('link'); //update the stylesheet corresponding to `outputFilename` for (var i = 0; i < links.length; i++) { if (links[i].href) { var oldChunk = new URL(links[i].href); if (oldChunk.pathname === newHref.pathname) { var oldSheet = links[i] var url = newHref.href + '?' + (+new Date) var head = document.getElementsByTagName('head')[0] var link = document.createElement('link') // date insures sheets update when [contenthash] is not used in file names link.href = url link.charset = 'utf-8' link.type = 'text/css' link.rel = 'stylesheet' head.insertBefore(link, oldSheet.nextSibling) // remove the old sheet only after the old one loads so it's seamless // we gotta do it this way since link.onload basically doesn't work var img = document.createElement('img') img.onerror = function() { oldSheet.remove() console.log('[HMR]', 'Reload css: ', url); } img.src = url break; } } } } }
module.exports = function(publicPath, outputFilename) { if (document) { var origin = document.location.protocol + '//' + document.location.hostname + (document.location.port ? ':' + document.location.port: ''); var newHref = origin + publicPath + outputFilename var links = document.getElementsByTagName('link'); //update the stylesheet corresponding to `outputFilename` for (var i = 0; i < links.length; i++) { if (links[i].href) { var oldChunk = new URL(links[i].href); var newChunk = new URL(newHref); if (oldChunk.pathname === newChunk.pathname) { var oldSheet = links[i] var url = newHref + '?' + (+new Date) var head = document.getElementsByTagName('head')[0] var link = document.createElement('link') // date insures sheets update when [contenthash] is not used in file names link.href = url link.charset = 'utf-8' link.type = 'text/css' link.rel = 'stylesheet' head.insertBefore(link, oldSheet.nextSibling) // remove the old sheet only after the old one loads so it's seamless // we gotta do it this way since link.onload basically doesn't work var img = document.createElement('img') img.onerror = function() { oldSheet.remove() console.log('[HMR]', 'Reload css: ', url); } img.src = url break; } } } } }
Hide fields if "Consumption items" is selected
$(document).ready(function () { if (!id_category_0.checked && !id_category_1.checked){ $("#id_institution").parents('.row').hide(); $("#id_department").parents('.row').hide(); } $("#id_category_0").click(function () { $("#id_institution").parents('.row').show(); $("#id_department").parents('.row').show(); }); $("#id_category_1").click(function () { $("#id_institution").parents('.row').hide(); $("#id_department").parents('.row').hide(); }); if (id_category_1.checked){ $("#id_institution").parents('.row').hide(); $("#id_department").parents('.row').hide(); } }); function ajax_filter_departments(institution_id) { $("#id_department").html('<option value="">Loading...</option>'); $.ajax({ type: "GET", url: "/order/show_department", dataType: "json", data: {'institution':institution_id}, success: function(retorno) { $("#id_department").empty(); $("#id_department").append('<option value="">--------</option>'); $.each(retorno, function(i, item){ $("#id_department").append('<option value="'+item.pk+'">'+item.valor+'</option>'); }); }, error: function(error) { alert('Error: No request return.'); } }); }
$(document).ready(function () { if (!id_category_0.checked && !id_category_1.checked){ $("#id_institution").parents('.row').hide(); $("#id_department").parents('.row').hide(); } $("#id_category_0").click(function () { $("#id_institution").parents('.row').show(); $("#id_department").parents('.row').show(); }); $("#id_category_1").click(function () { $("#id_institution").parents('.row').hide(); $("#id_department").parents('.row').hide(); }); }); function ajax_filter_departments(institution_id) { $("#id_department").html('<option value="">Loading...</option>'); $.ajax({ type: "GET", url: "/order/show_department", dataType: "json", data: {'institution':institution_id}, success: function(retorno) { $("#id_department").empty(); $("#id_department").append('<option value="">--------</option>'); $.each(retorno, function(i, item){ $("#id_department").append('<option value="'+item.pk+'">'+item.valor+'</option>'); }); }, error: function(error) { alert('Error: No request return.'); } }); }
Determine which sass package to install
let Preprocessor = require('./Preprocessor'); class Sass extends Preprocessor { /** * Required dependencies for the component. */ dependencies() { this.requiresReload = true; let dependencies = ['sass-loader@7.*']; try { require.resolve('node-sass'); dependencies.push('node-sass'); } catch (e) { dependencies.push('sass'); } if (Config.processCssUrls) { dependencies.push('[email protected]'); } return dependencies; } /** * Register the component. * * @param {*} src * @param {string} output * @param {Object} pluginOptions * @param {Array} postCssPlugins */ register(src, output, pluginOptions = {}, postCssPlugins = []) { return this.preprocess( 'sass', src, output, this.pluginOptions(pluginOptions), postCssPlugins ); } /** * Build the plugin options for sass-loader. * * @param {Object} pluginOptions * @returns {Object} */ pluginOptions(pluginOptions) { return Object.assign( { precision: 8, outputStyle: 'expanded', implementation: () => require('sass') }, pluginOptions, { sourceMap: true } ); } } module.exports = Sass;
let Preprocessor = require('./Preprocessor'); class Sass extends Preprocessor { /** * Required dependencies for the component. */ dependencies() { this.requiresReload = true; let dependencies = ['sass', 'sass-loader@7.*']; if (Config.processCssUrls) { dependencies.push('[email protected]'); } return dependencies; } /** * Register the component. * * @param {*} src * @param {string} output * @param {Object} pluginOptions * @param {Array} postCssPlugins */ register(src, output, pluginOptions = {}, postCssPlugins = []) { return this.preprocess( 'sass', src, output, this.pluginOptions(pluginOptions), postCssPlugins ); } /** * Build the plugin options for sass-loader. * * @param {Object} pluginOptions * @returns {Object} */ pluginOptions(pluginOptions) { return Object.assign( { precision: 8, outputStyle: 'expanded', implementation: () => require('sass') }, pluginOptions, { sourceMap: true } ); } } module.exports = Sass;
Add repeats to items that should be checked for child elements
Question = function(questionData) { this.name = questionData.name; this.type = questionData.type; this.label = questionData.label; } Question.prototype.getLabel = function(language) { /// if plain string, return if(typeof(this.label) == "string") return this.label; else if(typeof(this.label) == "object") { if(language && this.label.hasOwnProperty(language)) return this.label[language]; else { var label = null; for(key in this.label) { label = this.label[key]; break;// break at first instance and return that } return label; } } // return raw name return this.name; } function parseQuestions(children, prefix, cleanReplacement) { var idx; cleanReplacement = typeof cleanReplacement !== 'undefined' ? cleanReplacement : '_'; for(idx in children) { var question = children[idx]; //@TODO: do we just want to add anything with children, concern could be it item has children and is alos avalid question - if thats possible if(question.hasOwnProperty('children') && (question.type == "group" || question.type == "note" || question.type == "repeat")) { parseQuestions(question.children, ((prefix?prefix:'') + question.name + cleanReplacement)); } else { // TODO: question class that has accessor mesthods for type, label, language etc questions[((prefix?prefix:'') + question.name)] = new Question(question); } } }
Question = function(questionData) { this.name = questionData.name; this.type = questionData.type; this.label = questionData.label; } Question.prototype.getLabel = function(language) { /// if plain string, return if(typeof(this.label) == "string") return this.label; else if(typeof(this.label) == "object") { if(language && this.label.hasOwnProperty(language)) return this.label[language]; else { var label = null; for(key in this.label) { label = this.label[key]; break;// break at first instance and return that } return label; } } // return raw name return this.name; } function parseQuestions(children, prefix, cleanReplacement) { var idx; cleanReplacement = typeof cleanReplacement !== 'undefined' ? cleanReplacement : '_'; for(idx in children) { var question = children[idx]; if(question.hasOwnProperty('children') && (question.type == "group" || question.type == "note")) { parseQuestions(question.children, ((prefix?prefix:'') + question.name + cleanReplacement)); } else { // TODO: question class that has accessor mesthods for type, label, language etc questions[((prefix?prefix:'') + question.name)] = new Question(question); } } }
Change the example module fixing scroll problem.
package com.padakeji.android.ui.autowraplayoutmanager.example; import android.content.Context; import android.util.AttributeSet; import android.view.MotionEvent; import android.view.ViewConfiguration; import android.widget.ScrollView; /** * 继承于ScrollView,禁止了滑动事件向下传递。 * * Created by atanl on 2015/10/20. */ public class MyScrollView extends ScrollView { public interface OnScrollListener { void onScroll(); } private int downX; private int downY; private int mTouchSlop; public MyScrollView(Context context) { super(context); mTouchSlop = ViewConfiguration.get(context).getScaledTouchSlop(); } public MyScrollView(Context context, AttributeSet attrs) { super(context, attrs); mTouchSlop = ViewConfiguration.get(context).getScaledTouchSlop(); } public MyScrollView(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); mTouchSlop = ViewConfiguration.get(context).getScaledTouchSlop(); } @Override protected void onScrollChanged(int l, int t, int oldl, int oldt) { super.onScrollChanged(l, t, oldl, oldt); } @Override public boolean onInterceptTouchEvent(MotionEvent e) { int action = e.getAction(); switch (action) { case MotionEvent.ACTION_DOWN: downX = (int) e.getRawX(); downY = (int) e.getRawY(); break; case MotionEvent.ACTION_MOVE: int moveY = (int) e.getRawY(); if (Math.abs(moveY - downY) > mTouchSlop) { return true; } } return super.onInterceptTouchEvent(e); } }
package com.padakeji.android.ui.autowraplayoutmanager.example; import android.content.Context; import android.util.AttributeSet; import android.view.MotionEvent; import android.widget.ScrollView; /** * Created by atanl on 2016/3/5. */ public class MyScrollView extends ScrollView { private int downX; private int downY; private int mTouchSlop; public MyScrollView(Context context) { super(context); } public MyScrollView(Context context, AttributeSet attrs) { super(context, attrs); } public MyScrollView(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); } @Override public boolean onInterceptTouchEvent(MotionEvent e) { int action = e.getAction(); switch (action) { case MotionEvent.ACTION_DOWN: downX = (int) e.getRawX(); downY = (int) e.getRawY(); break; case MotionEvent.ACTION_MOVE: int moveY = (int) e.getRawY(); if (Math.abs(moveY - downY) > mTouchSlop) { return true; } } return super.onInterceptTouchEvent(e); } }
Fix bug with clock roll over.
<?php namespace MillerVein\Component\Form\DataTransformer; use DateTime; use Symfony\Component\Form\DataTransformerInterface; use Symfony\Component\Validator\Exception\UnexpectedTypeException; /** * * @author Nick Fenzan <[email protected]> */ class TwentyFourToTwelveHourArray implements DataTransformerInterface{ public function reverseTransform($value) { if (null === $value) { return null; } if (!is_array($value)) { throw new UnexpectedTypeException($value, 'array'); } if($value['ampm'] == 'AM'){ if($value['hour'] == 12){ $value['hour'] = 0; } }else{ if($value['hour'] != 12){ $value['hour'] = $value['hour'] + 12; } } unset($value['ampm']); return $value; } public function transform($value) { if (null === $value) { return null; } if (!is_array($value)) { throw new UnexpectedTypeException($value, 'array'); } if($value['hour'] === 0){ $value['hour'] = 12; $value['ampm'] = 'AM'; }elseif($value['hour']<12){ $value['ampm'] = 'AM'; }else{ $value['hour'] = $value['hour'] - 12; $value['ampm'] = 'PM'; } return $value; } }
<?php namespace MillerVein\Component\Form\DataTransformer; use DateTime; use Symfony\Component\Form\DataTransformerInterface; use Symfony\Component\Validator\Exception\UnexpectedTypeException; /** * * @author Nick Fenzan <[email protected]> */ class TwentyFourToTwelveHourArray implements DataTransformerInterface{ public function reverseTransform($value) { if (null === $value) { return null; } if (!is_array($value)) { throw new UnexpectedTypeException($value, 'array'); } if($value['ampm'] == 'PM' && $value['hour']>0){ $value['hour'] = $value['hour'] + 12; } unset($value['ampm']); return $value; } public function transform($value) { if (null === $value) { return null; } if (!is_array($value)) { throw new UnexpectedTypeException($value, 'array'); } if($value['hour'] === 0){ $value['hour'] = 12; $value['ampm'] = 'AM'; }elseif($value['hour']<12){ $value['ampm'] = 'AM'; }else{ $value['hour'] = $value['hour'] - 12; $value['ampm'] = 'PM'; } return $value; } }
Divide test into single-it tests
'use strict'; describe('SimulationService', function () { describe('sortDroits', function() { var DROITS_DESCRIPTION = { acs: { id: 'acs' }, apl: { id: 'apl' }, ass: { id: 'ass'} }; var service, droits, openfiscaResult; beforeEach(function() { module('ddsApp'); module(function($provide) { $provide.constant('droitsDescription', DROITS_DESCRIPTION); }); inject(function(SimulationService) { service = SimulationService; }); openfiscaResult = { acs: 10, apl: null }; droits = service.sortDroits(openfiscaResult); }); it('should extract eligibles droits from openfisca result', function() { expect(droits.droitsEligibles).toEqual({ acs: { id: 'acs', montant: 10 } }); }); it('should extract injected droits', function() { expect(droits.droitsInjectes).toEqual({ apl: { id: 'apl' } }); }); it('should extract non eligibles droits', function() { expect(droits.droitsNonEligibles).toEqual({ ass: { id: 'ass' } }); }); }); });
'use strict'; describe('SimulationService', function () { describe('sortDroits', function() { var DROITS_DESCRIPTION = { acs: { id: 'acs' }, apl: { id: 'apl' }}; var service; beforeEach(function() { module('ddsApp'); module(function($provide) { $provide.constant('droitsDescription', DROITS_DESCRIPTION); }); inject(function(SimulationService) { service = SimulationService; }); }); it('should sort eligible and non eligible droits', function() { var openfiscaResult = { acs: 10 }; var droits = service.sortDroits(openfiscaResult); expect(droits.droitsEligibles).toEqual({ acs: { id: 'acs', montant: 10 } }); expect(droits.droitsNonEligibles).toEqual({ apl: { id: 'apl' } }); expect(droits.droitsInjectes).toEqual({}); }); it('should sort eligible and injected droits', function() { var openfiscaResult = { acs: 10, apl: null }; var droits = service.sortDroits(openfiscaResult); expect(droits.droitsEligibles).toEqual({ acs: { id: 'acs', montant: 10 } }); expect(droits.droitsNonEligibles).toEqual({}); expect(droits.droitsInjectes).toEqual({ apl: { id: 'apl' } }); }); }); });
Add rtfeldman to user list.
module.exports = { users: [ // ADD YOUR USERNAME AT THE TOP 'gwillen', 'isaaczafuta', 'bluetidepro', 'julbaxter', 'eik3', 'crandles', 'bobnisco', 'jeromegn', 'mattclaw', 'crankeye', 'cgroner', 'tejohnso', 'sp1d3rx', 'evsie001', 'msied', 'd7p', 'kasperlewau', 'kennethrapp', 'briansoule', 'qguv', 'ianwalter', 'williamle8300', 'jeffawang', 'sorpaas', 'dylnclrk', 'dcancel', 'theprofessor117', 'roryokane', 'shedd', 'LeandroLovisolo', 'euank', 'humd', 'bwlang', 'barretts', 'pksjce', 'tito0224', 'hemanth', 'treygriffith', 'megamattron', 'pents90', 'michaelnovakjr', 'markbao', 'kalmi', 'lablayers', 'rtfeldman', 'dfjones' ] };
module.exports = { users: [ // ADD YOUR USERNAME AT THE TOP 'gwillen', 'isaaczafuta', 'bluetidepro', 'julbaxter', 'eik3', 'crandles', 'bobnisco', 'jeromegn', 'mattclaw', 'crankeye', 'cgroner', 'tejohnso', 'sp1d3rx', 'evsie001', 'msied', 'd7p', 'kasperlewau', 'kennethrapp', 'briansoule', 'qguv', 'ianwalter', 'williamle8300', 'jeffawang', 'sorpaas', 'dylnclrk', 'dcancel', 'theprofessor117', 'roryokane', 'shedd', 'LeandroLovisolo', 'euank', 'humd', 'bwlang', 'barretts', 'pksjce', 'tito0224', 'hemanth', 'treygriffith', 'megamattron', 'pents90', 'michaelnovakjr', 'markbao', 'kalmi', 'lablayers', 'dfjones' ] };
Reformat code and added @todo
<?php App::uses('AppController', 'Controller'); /** * Description of PluginsController * * @todo complete phpDoc * * @author mohammad */ class PluginsController extends AppController { public $uses = array(); public function admin_index() { $this->set('title_for_layout', __('Plugins')); $plugins = HuradPlugin::getPluginData(); $this->set(compact('plugins')); } public function admin_toggle($alias) { $this->autoRender = false; if (HuradPlugin::isActive($alias)) { if (HuradPlugin::deactivate($alias)) { $this->Session->setFlash(__('Plugin deactivate'), 'success'); $this->redirect(array('action' => 'index')); } else { $this->Session->setFlash(__('The plugin could not be deactivate. Please, try again.'), 'error'); $this->redirect(array('action' => 'index')); } } else { if (HuradPlugin::activate($alias)) { $this->Session->setFlash(__('Plugin activate'), 'success'); $this->redirect(array('action' => 'index')); } else { $this->Session->setFlash(__('The plugin could not be activate. Please, try again.'), 'error'); $this->redirect(array('action' => 'index')); } } } /* * @todo add admin_delete method */ }
<?php App::uses('AppController', 'Controller'); /** * Description of PluginsController * * @author mohammad */ class PluginsController extends AppController { public $uses = array(); public function admin_index() { $this->set('title_for_layout', __('Plugins')); $plugins = HuradPlugin::getPluginData(); $this->set(compact('plugins')); } public function admin_toggle($alias) { $this->autoRender = FALSE; if (HuradPlugin::isActive($alias)) { if (HuradPlugin::deactivate($alias)) { $this->Session->setFlash(__('Plugin deactivate'), 'success'); $this->redirect(array('action' => 'index')); } else { $this->Session->setFlash(__('The plugin could not be deactivate. Please, try again.'), 'error'); $this->redirect(array('action' => 'index')); } } else { if (HuradPlugin::activate($alias)) { $this->Session->setFlash(__('Plugin activate'), 'success'); $this->redirect(array('action' => 'index')); } else { $this->Session->setFlash(__('The plugin could not be activate. Please, try again.'), 'error'); $this->redirect(array('action' => 'index')); } } } }
Fix bug that caused error when nothing is before a decimal duration input
from __future__ import division from django.utils.duration import _get_duration_components from datetime import timedelta from decimal import Decimal def parse_duration(duration): hours = None minutes = None if duration.isdigit(): hours = int(duration) elif ':' in duration: duration_split = duration.split(':') hours = int(duration_split[0]) minutes = int(duration_split[1]) elif '.' in duration: if duration.index('.') == 0: duration = '0' + duration duration_split = duration.split('.') # TODO: Fix error here when not appending a 0, ex .5 instead of 0.5 hours = int(duration_split[0]) minutes = int(60 * float('.' + duration_split[1])) if minutes is None: minutes = 0 if hours or minutes: return timedelta(hours=hours, minutes=minutes) else: return None def duration_string(duration): if duration is not None: days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501 hours += days * 24 string = '{}:{:02d}'.format(hours, minutes) else: string = '0:00' return string def duration_decimal(duration): if duration is not None: days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501 hours += days * 24 decimal = Decimal(hours) + Decimal(minutes/60) else: decimal = Decimal(0) return decimal
from __future__ import division from django.utils.duration import _get_duration_components from datetime import timedelta from decimal import Decimal def parse_duration(duration): hours = None minutes = None if duration.isdigit(): hours = int(duration) elif ':' in duration: duration_split = duration.split(':') hours = int(duration_split[0]) minutes = int(duration_split[1]) elif '.' in duration: duration_split = duration.split('.') # TODO: Fix error here when not appending a 0, ex .5 instead of 0.5 hours = int(duration_split[0]) minutes = int(60 * float('.' + duration_split[1])) if minutes is None: minutes = 0 if hours or minutes: return timedelta(hours=hours, minutes=minutes) else: return None def duration_string(duration): if duration is not None: days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501 hours += days * 24 string = '{}:{:02d}'.format(hours, minutes) else: string = '0:00' return string def duration_decimal(duration): if duration is not None: days, hours, minutes, seconds, microseconds = _get_duration_components(duration) # noqa: E501 hours += days * 24 decimal = Decimal(hours) + Decimal(minutes/60) else: decimal = Decimal(0) return decimal
Add default value for default parameter
<?php namespace Onion\Framework\Router\Strategy; use function Onion\Framework\merge; class TreeStrategy { private $routes = []; public function __construct(array $routes) { $this->routes = $routes; } public function resolve(string $method, string $path) { $parts = explode('/', trim($path, '/')); $params = []; $match = $this->match($this->routes, $parts, $params); if ($match === null) { throw new \RuntimeException('No match found'); } $params = array_filter($params, function ($key) { return !is_integer($key); }, ARRAY_FILTER_USE_KEY); if (isset($match['methods']) && !in_array($method, $match['methods'])) { throw new \BadMethodCallException("Method {$method} not allowed"); } return [$match, $params]; } private function match(array $routes, array $parts, &$params = []): ?array { $part = array_shift($parts); if ($part === null) { return $routes; } foreach ($routes as $segment => $remaining) { if (preg_match("/^{$segment}$/i", $part, $matches) > 0) { $params = merge($params ?? [], $matches); return $this->match($remaining, $parts, $params); } } return null; } }
<?php namespace Onion\Framework\Router\Strategy; use function Onion\Framework\merge; class TreeStrategy { private $routes; public function __construct(array $routes) { $this->routes = $routes; } public function resolve(string $method, string $path) { $parts = explode('/', trim($path, '/')); $params = []; $match = $this->match($this->routes, $parts, $params); if ($match === null) { throw new \RuntimeException('No match found'); } $params = array_filter($params, function ($key) { return !is_integer($key); }, ARRAY_FILTER_USE_KEY); if (isset($match['methods']) && !in_array($method, $match['methods'])) { throw new \BadMethodCallException("Method {$method} not allowed"); } return [$match, $params]; } private function match(array $routes, array $parts, &$params = []): ?array { $part = array_shift($parts); if ($part === null) { return $routes; } foreach ($routes as $segment => $remaining) { if (preg_match("/^{$segment}$/i", $part, $matches) > 0) { $params = merge($params ?? [], $matches); return $this->match($remaining, $parts, $params); } } return null; } }
Add long description content type
#!/usr/bin/env python from setuptools import setup with open('README.rst') as file: long_description = file.read() setup(name='parmap', version='1.5.1.9000', description=('map and starmap implementations passing additional ' 'arguments and parallelizing if possible'), long_description=long_description, long_description_content_type = "text/x-rst", author='Sergio Oller', license='APACHE-2.0', author_email='[email protected]', url='https://github.com/zeehio/parmap', packages=['parmap'], extras_require = { 'progress_bar': ["tqdm>=4.8.4"], }, test_suite = "test_parmap", classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' ], )
#!/usr/bin/env python from setuptools import setup with open('README.rst') as file: long_description = file.read() setup(name='parmap', version='1.5.1.9000', description=('map and starmap implementations passing additional ' 'arguments and parallelizing if possible'), long_description=long_description, author='Sergio Oller', license='APACHE-2.0', author_email='[email protected]', url='https://github.com/zeehio/parmap', packages=['parmap'], extras_require = { 'progress_bar': ["tqdm>=4.8.4"], }, test_suite = "test_parmap", classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6' ], )
Use new onCreate callback for buster.testRunner
/*jslint onevar: false, eqeqeq: false*/ /*global require*/ if (typeof require != "undefined") { var sinon = require("sinon"); var buster = { assert: require("buster-assert"), testRunner: require("buster-test").testRunner }; try { buster.test = require("buster-test/util"); } catch (e) {} } if (buster.test && buster.test.stackFilters) { buster.test.stackFilters.push("lib/sinon"); } buster.testRunner.onCreate(function (runner) { runner.on("test:setUp", function (test) { var config = sinon.getConfig(sinon.config); config.useFakeServer = false; var sandbox = sinon.sandbox.create(); sandbox.inject(test.testCase); test.testCase.useFakeTimers = function () { return sandbox.useFakeTimers.apply(sandbox, arguments); }; test.testCase.sandbox = sandbox; }); runner.on("test:tearDown", function (test) { test.testCase.sandbox.verifyAndRestore(); }); }); for (var prop in sinon.assert) { if (typeof sinon.assert[prop] == "function" && prop != "fail" && prop != "pass") { buster.assert[prop] = sinon.assert[prop]; } } sinon.assert.fail = buster.assert.fail; sinon.assert.pass = buster.assert.pass;
/*jslint onevar: false, eqeqeq: false*/ /*global require*/ if (typeof require != "undefined") { var sinon = require("sinon"); var buster = { assert: require("buster-assert"), testRunner: require("buster-test").testRunner }; try { buster.test = require("buster-test/util"); } catch (e) {} } if (buster.test && buster.test.stackFilters) { buster.test.stackFilters.push("lib/sinon"); } buster.testRunner.onCreate = function (runner) { runner.on("test:setUp", function (test) { var config = sinon.getConfig(sinon.config); config.useFakeServer = false; var sandbox = sinon.sandbox.create(); sandbox.inject(test.testCase); test.testCase.useFakeTimers = function () { return sandbox.useFakeTimers.apply(sandbox, arguments); }; test.testCase.sandbox = sandbox; }); runner.on("test:tearDown", function (test) { test.testCase.sandbox.verifyAndRestore(); }); }; for (var prop in sinon.assert) { if (typeof sinon.assert[prop] == "function" && prop != "fail" && prop != "pass") { buster.assert[prop] = sinon.assert[prop]; } } sinon.assert.fail = buster.assert.fail; sinon.assert.pass = buster.assert.pass;
Use sample values from the documentation.
'use strict'; // Load external modules var Cloudinary = require('cloudinary'); var Code = require('code'); var Hapi = require('hapi'); var Lab = require('lab'); // Load internal modules var HapiCloudinaryConnector = require('../lib'); // Test shortcuts var lab = exports.lab = Lab.script(); var describe = lab.describe; var it = lab.it; var expect = Code.expect; describe('Connector', function () { it('configures cloudinary', function (done) { var server = new Hapi.Server(); /* eslint-disable */ var plugin = { plugin: HapiCloudinaryConnector, options: { cloud_name: 'sample', api_key: '874837483274837', api_secret: 'a676b67565c6767a6767d6767f676fe1' } }; /* eslint-enable */ server.pack.register(plugin, function (err) { expect(Cloudinary.config('api_key')).to.equal('874837483274837'); done(); }); }); it('throws an error when configuration is invalid', function (done) { var server = new Hapi.Server(); /* eslint-disable */ var plugin = { plugin: HapiCloudinaryConnector, options: { cloud_name: 'sample' } }; /* eslint-enable */ server.pack.register(plugin, function (err) { expect(err).to.exist(); expect(err.message).to.equal('api_key is required'); done(); }); }); });
'use strict'; // Load external modules var Cloudinary = require('cloudinary'); var Code = require('code'); var Hapi = require('hapi'); var Lab = require('lab'); // Load internal modules var HapiCloudinaryConnector = require('../lib'); // Test shortcuts var lab = exports.lab = Lab.script(); var describe = lab.describe; var it = lab.it; var expect = Code.expect; describe('Connector', function () { it('configures cloudinary', function (done) { var server = new Hapi.Server(); /* eslint-disable */ var plugin = { plugin: HapiCloudinaryConnector, options: { cloud_name: 'test', api_key: 'test', api_secret: 'test' } }; /* eslint-enable */ server.pack.register(plugin, function (err) { expect(Cloudinary.config('api_key')).to.equal('test'); done(); }); }); it('throws an error when configuration is invalid', function (done) { var server = new Hapi.Server(); /* eslint-disable */ var plugin = { plugin: HapiCloudinaryConnector, options: { cloud_name: 'test' } }; /* eslint-enable */ server.pack.register(plugin, function (err) { expect(err).to.exist(); expect(err.message).to.equal('api_key is required'); done(); }); }); });
Add support for linkage in b2 projects
import os import subprocess from .. import project class BoostBuildProject(project.Project): @staticmethod def identifier(): return 'boostbuild' @staticmethod def is_valid_project(definition, needy): if not definition.target.platform.is_host(): return False if not os.path.isfile('Jamroot'): return False if os.path.isfile('b2'): return True try: needy.command_output(['b2', '-v']) return True except subprocess.CalledProcessError: return False except OSError: return False @staticmethod def configuration_keys(): return ['b2-args'] def get_build_concurrency_args(self): concurrency = self.build_concurrency() if concurrency > 1: return ['-j', str(concurrency)] elif concurrency == 0: return ['-j'] return [] def build(self, output_directory): b2 = './b2' if os.path.isfile('b2') else 'b2' b2_args = self.evaluate(self.configuration('b2-args')) b2_args += self.get_build_concurrency_args() if self.configuration('linkage') in ['static']: b2_args += ['link=static'] elif self.configuration('linkage') in ['dynamic', 'shared']: b2_args += ['link=shared'] self.command([b2] + b2_args) self.command([b2, 'install', '--prefix=%s' % output_directory] + b2_args)
import os import subprocess from .. import project class BoostBuildProject(project.Project): @staticmethod def identifier(): return 'boostbuild' @staticmethod def is_valid_project(definition, needy): if not definition.target.platform.is_host(): return False if not os.path.isfile('Jamroot'): return False if os.path.isfile('b2'): return True try: needy.command_output(['b2', '-v']) return True except subprocess.CalledProcessError: return False except OSError: return False @staticmethod def configuration_keys(): return ['b2-args'] def get_build_concurrency_args(self): concurrency = self.build_concurrency() if concurrency > 1: return ['-j', str(concurrency)] elif concurrency == 0: return ['-j'] return [] def build(self, output_directory): b2 = './b2' if os.path.isfile('b2') else 'b2' b2_args = self.evaluate(self.configuration('b2-args')) b2_args += self.get_build_concurrency_args() self.command([b2] + b2_args) self.command([b2, 'install', '--prefix=%s' % output_directory] + b2_args)
Make car colors easier to read
import pygame class Graphic: car_color = (255, 50, 50) car_width = 3 road_color = (255, 255, 255) road_width = 6 draw_methods = { 'Car': 'draw_car', 'Road': 'draw_road', } def __init__(self, surface): self.surface = surface def draw(self, obj): object_class = obj.__class__.__name__ method_name = self.draw_methods.get(object_class, None) if method_name: method = getattr(self, method_name) method(obj) def draw_car(self, car): coord = car.coordinates acceleration_rate = car.acceleration_rate rect = pygame.Rect(coord.x, coord.y, self.car_width, self.car_width) # Change car color depending on acceleration if acceleration_rate > 0: rate = min(1, acceleration_rate) color = (0, 0, int(rate * 255)) else: rate = max(-1, acceleration_rate) color = (int(-rate * 255), 0, 0) pygame.draw.rect(self.surface, color, rect, 0) def draw_road(self, road): pointlist = road.pointlist() closed = False pygame.draw.lines(self.surface, self.road_color, closed, pointlist, self.road_width)
import pygame class Graphic: car_color = (255, 50, 50) car_width = 3 road_color = (255, 255, 255) road_width = 6 draw_methods = { 'Car': 'draw_car', 'Road': 'draw_road', } def __init__(self, surface): self.surface = surface def draw(self, obj): object_class = obj.__class__.__name__ method_name = self.draw_methods.get(object_class, None) if method_name: method = getattr(self, method_name) method(obj) def draw_car(self, car): coord = car.coordinates acceleration_rate = car.acceleration_rate rect = pygame.Rect(coord.x, coord.y, self.car_width, self.car_width) # Change car color depending on acceleration if acceleration_rate > 0: rate = min(1, acceleration_rate) color = (50, 50, int(rate * 255)) else: rate = max(-1, acceleration_rate) color = (int(-rate * 255), 50, 50) pygame.draw.rect(self.surface, color, rect, 0) def draw_road(self, road): pointlist = road.pointlist() closed = False pygame.draw.lines(self.surface, self.road_color, closed, pointlist, self.road_width)
Fix unresolved data with huge result set on some case with many data / many services (mainly due to load activity on client site), $resource query is solved but data are not yet attached and mapped to resolved object. adding .$promise garanty that the service is done and data are fully mapped. You can reproduce the case by displaying a search result on main page and doing a clean cache refresh.
'use strict'; <%= angularAppName %> .config(function ($routeProvider, $httpProvider, $translateProvider, USER_ROLES) { $routeProvider .when('/<%= entityInstance %>', { templateUrl: 'views/<%= entityInstance %>s.html', controller: '<%= entityClass %>Controller', resolve:{ resolved<%= entityClass %>: ['<%= entityClass %>', function (<%= entityClass %>) { return <%= entityClass %>.query().$promise; }]<% for (relationshipId in relationships) { var relationshipClass = relationships[relationshipId].otherEntityNameCapitalized;%>, resolved<%=relationshipClass%>: ['<%=relationshipClass%>', function (<%=relationshipClass%>) { return <%=relationshipClass%>.query().$promise; }]<% } %> }, access: { authorizedRoles: [USER_ROLES.all] } }) });
'use strict'; <%= angularAppName %> .config(function ($routeProvider, $httpProvider, $translateProvider, USER_ROLES) { $routeProvider .when('/<%= entityInstance %>', { templateUrl: 'views/<%= entityInstance %>s.html', controller: '<%= entityClass %>Controller', resolve:{ resolved<%= entityClass %>: ['<%= entityClass %>', function (<%= entityClass %>) { return <%= entityClass %>.query(); }]<% for (relationshipId in relationships) { var relationshipClass = relationships[relationshipId].otherEntityNameCapitalized;%>, resolved<%=relationshipClass%>: ['<%=relationshipClass%>', function (<%=relationshipClass%>) { return <%=relationshipClass%>.query(); }]<% } %> }, access: { authorizedRoles: [USER_ROLES.all] } }) });
Call the function vs return the function
'use strict' let Promise = require('bluebird') let Request = Promise.promisify(require('request')) let _ = require('underscore') let VerifyAlexaSignature = require('verify_alexa_signature') let LightingIntent = require('./intents/lighting_intent') let RemotesIntent = require('./intents/remotes_intent') let TasksIntent = require('./intents/tasks_intent') class RosieAlexa { constructor(server, options) { this.config = options this.setupIntentHandlers() } handleRequest(request) { VerifyAlexaSignature.verify(request.headers.signature, request.headers.signaturecertchainurl, request.payload) .then((valid) => { console.warn("ALEXA SIG", valid) let request = request.payload.request if (request.type === 'IntentRequest') { let name = request.intent let intent = this.intents[name] if (intent) { let requestParams = intent.handle(request) Request(requestParams).spread((response, body) => { return body }) } else { return null } } }) .catch((err) => { console.warn("ALEXA INVALID SIG", err) }) } setupIntentHandlers() { this.intents = { [LightingIntent.intentName()]: new LightingIntent(this.config), [RemotesIntent.intentName()]: new RemotesIntent(this.config), [TasksIntent.intentName()]: new TasksIntent(this.config) } } } module.exports = RosieAlexa
'use strict' let Promise = require('bluebird') let Request = Promise.promisify(require('request')) let _ = require('underscore') let VerifyAlexaSignature = require('verify_alexa_signature') let LightingIntent = require('./intents/lighting_intent') let RemotesIntent = require('./intents/remotes_intent') let TasksIntent = require('./intents/tasks_intent') class RosieAlexa { constructor(server, options) { this.config = options this.setupIntentHandlers() } handleRequest(request) { VerifyAlexaSignature.verify(request.headers.signature, request.headers.signaturecertchainurl, request.payload) .then((valid) => { console.warn("ALEXA SIG", valid) let request = request.payload.request if (request.type === 'IntentRequest') { let name = request.intent let intent = this.intents[name] if (intent) { let requestParams = intent.handle(request) Request(requestParams).spread((response, body) => { return body }) } else { return null } } }) .catch((err) => { console.warn("ALEXA INVALID SIG", err) }) } setupIntentHandlers () { this.intents = { [LightingIntent.intentName]: new LightingIntent(this.config), [RemotesIntent.intentName]: new RemotesIntent(this.config), [TasksIntent.intentName]: new TasksIntent(this.config) } } } module.exports = RosieAlexa
Add python script to find unused linux-headers packages
#!/usr/bin/env python from __future__ import print_function import sys import os import re from subprocess import check_output IMAGE_PATTERN = re.compile( 'linux-image-(?P<version>[0-9\.]+)-(?P<rev>[0-9]{2})-generic') HEADER_PATTERN = re.compile( 'linux-headers-(?P<version>[0-9\.]+)-(?P<rev>[0-9]{2})-generic') def get_all_packages(): for line in check_output(['dpkg', '-l']).split('\n'): if line.startswith('ii'): # print(line.split(' ')) yield line.split()[1] def find_group(pattern, text): matched = pattern.match(text) if matched: return '{version}-{rev}'.format( version=matched.group('version'), rev=matched.group('rev')) return None def main(): packages = list(get_all_packages()) header_pkgs = filter(lambda x: HEADER_PATTERN.match(x), packages) image_pkgs = filter(lambda x: IMAGE_PATTERN.match(x), packages) header_versions = dict(map( lambda x: (find_group(HEADER_PATTERN, x), x), header_pkgs)) image_versions = dict(map( lambda x: (find_group(IMAGE_PATTERN, x), x), image_pkgs)) results = [] for version, pkg in header_versions.items(): if version not in image_versions: results.append(pkg) print(' '.join(results)) if __name__ == "__main__": main()
#!/usr/bin/env python from __future__ import print_function import sys import os import re from subprocess import check_output IMAGE_PATTERN = re.compile( 'linux-image-(?P<version>[0-9\.]+)-(?P<rev>[0-9]{2})-generic') HEADER_PATTERN = re.compile( 'linux-headers-(?P<version>[0-9\.]+)-(?P<rev>[0-9]{2})-generic') def get_all_packages(): for line in check_output(['dpkg', '-l']).split('\n'): if line.startswith('ii'): # print(line.split(' ')) yield line.split()[1] def find_group(pattern, text): matched = pattern.match(text) if matched: return '{version}-{rev}'.format( version=matched.group('version'), rev=matched.group('rev')) return None def main(): packages = list(get_all_packages()) header_pkgs = filter(lambda x: HEADER_PATTERN.match(x), packages) image_pkgs = filter(lambda x: IMAGE_PATTERN.match(x), packages) header_versions = map(lambda x: find_group(HEADER_PATTERN, x), header_pkgs) image_versions = map(lambda x: find_group(IMAGE_PATTERN, x), image_pkgs) print(header_pkgs) print(image_pkgs) print(header_versions) print(image_versions) if __name__ == "__main__": main()
Add default filters to theme templating engine
var path = require('path'); var nunjucks = require('nunjucks'); var DoExtension = require('nunjucks-do')(nunjucks); var TEMPLATES_FOLDER = require('../../constants/templatesFolder'); var Templating = require('../../templating'); var TemplateEngine = require('../../models/templateEngine'); var defaultFilters = require('../../constants/defaultFilters'); var listSearchPaths = require('./listSearchPaths'); /** Directory for a theme with the templates */ function templateFolder(dir) { return path.join(dir, TEMPLATES_FOLDER); } /** Create templating engine to render themes @param {Output} output @param {String} currentFile @return {TemplateEngine} */ function createTemplateEngine(output, currentFile) { var searchPaths = listSearchPaths(output); // Search paths for templates var tplSearchPaths = searchPaths.map(templateFolder); var loader = new Templating.ThemesLoader(tplSearchPaths); return TemplateEngine.create({ loader: loader, filters: defaultFilters.merge({ t: function(s) { return s; }, resolveFile: function(s) { return s; }, resolveAsset: function(s) { return s; }, fileExists: function() { return false; }, contentURL: function(s) { return s; }, getArticleByPath: function(s) { return undefined; } }), extensions: { 'DoExtension': new DoExtension() } }); } module.exports = createTemplateEngine;
var path = require('path'); var nunjucks = require('nunjucks'); var DoExtension = require('nunjucks-do')(nunjucks); var TEMPLATES_FOLDER = require('../../constants/templatesFolder'); var Templating = require('../../templating'); var TemplateEngine = require('../../models/templateEngine'); var listSearchPaths = require('./listSearchPaths'); /** Directory for a theme with the templates */ function templateFolder(dir) { return path.join(dir, TEMPLATES_FOLDER); } /** Create templating engine to render themes @param {Output} output @param {String} currentFile @return {TemplateEngine} */ function createTemplateEngine(output, currentFile) { var searchPaths = listSearchPaths(output); // Search paths for templates var tplSearchPaths = searchPaths.map(templateFolder); var loader = new Templating.ThemesLoader(tplSearchPaths); return TemplateEngine.create({ loader: loader, filters: { t: function(s) { return s; }, resolveFile: function(s) { return s; }, resolveAsset: function(s) { return s; }, fileExists: function() { return false; }, contentURL: function(s) { return s; }, getArticleByPath: function(s) { return undefined; } }, extensions: { 'DoExtension': new DoExtension() } }); } module.exports = createTemplateEngine;
Watch and run tests on lib change too.
'use strict'; module.exports = function (grunt) { grunt.initConfig({ mochacli: { options: { ui: 'bdd', reporter: 'spec', require: [ 'espower_loader_helper.js' ] }, all: ['test/*Test.js'] }, jshint: { options: { jshintrc: '.jshintrc' }, gruntfile: { src: 'Gruntfile.js' }, lib: { src: ['lib/**/*.js'] }, test: { src: ['test/**/*.js'] } }, watch: { gruntfile: { files: '<%= jshint.gruntfile.src %>', tasks: ['jshint:gruntfile'] }, lib: { files: '<%= jshint.lib.src %>', tasks: ['jshint:lib', 'test'] }, test: { files: '<%= jshint.test.src %>', tasks: ['test'] } } }); grunt.loadNpmTasks('grunt-contrib-jshint'); grunt.loadNpmTasks('grunt-contrib-watch'); grunt.loadNpmTasks('grunt-mocha-cli'); grunt.registerTask('test', ['mochacli']); // Default task. grunt.registerTask('default', ['test']); };
'use strict'; module.exports = function (grunt) { grunt.initConfig({ mochacli: { options: { ui: 'bdd', reporter: 'spec', require: [ 'espower_loader_helper.js' ] }, all: ['test/*Test.js'] }, jshint: { options: { jshintrc: '.jshintrc' }, gruntfile: { src: 'Gruntfile.js' }, lib: { src: ['lib/**/*.js'] }, test: { src: ['test/**/*.js'] } }, watch: { gruntfile: { files: '<%= jshint.gruntfile.src %>', tasks: ['jshint:gruntfile'] }, lib: { files: '<%= jshint.lib.src %>', tasks: ['jshint:lib'] }, test: { files: '<%= jshint.test.src %>', tasks: ['test'] } } }); grunt.loadNpmTasks('grunt-contrib-jshint'); grunt.loadNpmTasks('grunt-contrib-watch'); grunt.loadNpmTasks('grunt-mocha-cli'); grunt.registerTask('test', ['mochacli']); // Default task. grunt.registerTask('default', ['test']); };
Update PostFeedItem to use path instead of href.
/* @flow */ import React from 'react'; import Box from 'grommet/components/Box'; import Anchor from 'grommet/components/Anchor'; import Heading from 'grommet/components/Heading'; import Section from 'grommet/components/Section'; // $FlowFixMe grommet-cms required module not found import { formatPrettyDate } from 'grommet-cms/utils'; export default function PostFeedItem(props: { post: { title: string, image: Object, slug: string, createdAt: string }, colorIndex: string, postPath: string, }) { const { post, postPath, colorIndex } = props; return ( <Anchor className="post-feed-item--anchor" path={`${postPath}${post.slug}`}> <Section full colorIndex={colorIndex} texture={post.image.path} pad="large" > <Box align="center" justify="start" pad={{ vertical: 'large' }} full > <Heading strong align="center"> {post.title} </Heading> <Heading align="center" tag="h3"> {`Posted on ${formatPrettyDate(post.createdAt)}`} </Heading> </Box> </Section> </Anchor> ); }
/* @flow */ import React from 'react'; import Box from 'grommet/components/Box'; import Anchor from 'grommet/components/Anchor'; import Heading from 'grommet/components/Heading'; import Section from 'grommet/components/Section'; // $FlowFixMe grommet-cms required module not found import { formatPrettyDate } from 'grommet-cms/utils'; export default function PostFeedItem(props: { post: { title: string, image: Object, slug: string, createdAt: string }, colorIndex: string, postPath: string, }) { const { post, postPath, colorIndex } = props; console.log('image', post.image.path, post); return ( <Anchor className="post-feed-item--anchor" href={`${postPath}${post.slug}`}> <Section full colorIndex={colorIndex} texture={post.image.path} pad="large" > <Box align="center" justify="start" pad={{ vertical: 'large' }} full > <Heading strong align="center"> {post.title} </Heading> <Heading align="center" tag="h3"> {`Posted on ${formatPrettyDate(post.createdAt)}`} </Heading> </Box> </Section> </Anchor> ); }
Add: Write CHANGES at the end of README in PyPI
from setuptools import setup from wsinfo import __version__ with open("README.rst", "r") as f: long_description = f.read() long_description += "\n" with open("CHANGES", "r") as f: long_description += f.read() setup(name="wsinfo", packages=["wsinfo"], version=__version__, description="Python package for simply retrieving information about a specific website.", long_description=long_description, author="Linus Groh", license="MIT", author_email="[email protected]", url="https://github.com/linusg/wsinfo", download_url="https://pypi.python.org/pypi/wsinfo", keywords=["website", "http", "url", "internet", "online", "information"], classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.1", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Topic :: Internet", "Topic :: Internet :: WWW/HTTP", "Topic :: Software Development :: Libraries", "Topic :: Software Development :: Libraries :: Python Modules"], )
from setuptools import setup from wsinfo import __version__ with open("README.rst", "r") as f: long_description = f.read() setup(name="wsinfo", packages=["wsinfo"], version=__version__, description="Python package for simply retrieving information about a specific website.", long_description=long_description, author="Linus Groh", license="MIT", author_email="[email protected]", url="https://github.com/linusg/wsinfo", download_url="https://pypi.python.org/pypi/wsinfo", keywords=["website", "http", "url", "internet", "online", "information"], classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.1", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Topic :: Internet", "Topic :: Internet :: WWW/HTTP", "Topic :: Software Development :: Libraries", "Topic :: Software Development :: Libraries :: Python Modules"], )
test: Use new test action function
import UserStore from 'new-dashboard/store/user'; import { testAction2 } from '../helpers'; jest.mock('carto-node'); const mutations = UserStore.mutations; const actions = UserStore.actions; describe('UserStore', () => { describe('mutations', () => { it('setUserData', () => { const state = { email: '[email protected]', username: 'example', website: 'https://carto.com' }; const userData = { email: '[email protected]', username: 'carto' }; mutations.setUserData(state, userData); expect(state).toEqual({ email: '[email protected]', username: 'carto', website: 'https://carto.com' }); }); }); describe('actions', () => { describe('updateData', () => { let state; beforeEach(() => { state = { email: '[email protected]', username: 'example', website: 'https://carto.com' }; }); it('success', done => { const newConfigData = { email: '[email protected]', username: 'carto' }; const expectedMutations = [ { type: 'setUserData', payload: newConfigData } ]; testAction2({ action: actions.updateData, state, expectedMutations, done }); }); }); }); });
import UserStore from 'new-dashboard/store/user'; import { testAction } from '../helpers'; jest.mock('carto-node'); const mutations = UserStore.mutations; const actions = UserStore.actions; describe('UserStore', () => { describe('mutations', () => { it('setUserData', () => { const state = { email: '[email protected]', username: 'example', website: 'https://carto.com' }; const userData = { email: '[email protected]', username: 'carto' }; mutations.setUserData(state, userData); expect(state).toEqual({ email: '[email protected]', username: 'carto', website: 'https://carto.com' }); }); }); describe('actions', () => { describe('updateData', () => { let state; beforeEach(() => { state = { email: '[email protected]', username: 'example', website: 'https://carto.com' }; }); it('success', done => { const newConfigData = { email: '[email protected]', username: 'carto' }; testAction(actions.updateData, null, state, [ { type: 'setUserData', payload: newConfigData } ], [], done); }); }); }); });
Use new namespace for ClassLookup
<?php namespace Kunstmaan\FormBundle\Entity\PageParts; use Kunstmaan\UtilitiesBundle\Helper\ClassLookup; use Kunstmaan\FormBundle\Entity\FormAdaptorInterface; use Kunstmaan\PagePartBundle\Entity\AbstractPagePart; use Doctrine\ORM\Mapping as ORM; /** * Abstract version of a form page part */ abstract class AbstractFormPagePart extends AbstractPagePart implements FormAdaptorInterface { const ERROR_REQUIRED_FIELD = "field.required"; /** * The label * * @ORM\Column(type="string", nullable=true) */ protected $label; /** * Returns a unique id for the current page part * * @return string */ public function getUniqueId() { return str_replace('\\', ':', ClassLookup::getClass($this)) . $this->id; //TODO } /** * Set the label used for this page part * * @param int $label */ public function setLabel($label) { $this->label = $label; } /** * Get the label used for this page part * * @return string */ public function getLabel() { return $this->label; } /** * Returns the view used in the backend * * @return string */ public function getAdminView() { return "KunstmaanFormBundle:AbstractFormPagePart:admin-view.html.twig"; } }
<?php namespace Kunstmaan\FormBundle\Entity\PageParts; use Kunstmaan\AdminBundle\Helper\ClassLookup; use Kunstmaan\FormBundle\Entity\FormAdaptorInterface; use Kunstmaan\PagePartBundle\Entity\AbstractPagePart; use Doctrine\ORM\Mapping as ORM; /** * Abstract version of a form page part */ abstract class AbstractFormPagePart extends AbstractPagePart implements FormAdaptorInterface { const ERROR_REQUIRED_FIELD = "field.required"; /** * The label * * @ORM\Column(type="string", nullable=true) */ protected $label; /** * Returns a unique id for the current page part * * @return string */ public function getUniqueId() { return str_replace('\\', ':', ClassLookup::getClass($this)) . $this->id; //TODO } /** * Set the label used for this page part * * @param int $label */ public function setLabel($label) { $this->label = $label; } /** * Get the label used for this page part * * @return string */ public function getLabel() { return $this->label; } /** * Returns the view used in the backend * * @return string */ public function getAdminView() { return "KunstmaanFormBundle:AbstractFormPagePart:admin-view.html.twig"; } }
Update dependencies for node and less
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='django-oscar-fancypages', version=":versiontools:fancypages:", url='https://github.com/tangentlabs/django-oscar-fancypages', author="Sebastian Vetter", author_email="[email protected]", description="Adding fancy CMS-style pages to Oscar", long_description=open('README.rst').read(), keywords="django, oscar, e-commerce, cms, pages, flatpages", license='BSD', platforms=['linux'], packages=find_packages(exclude=["sandbox*", "tests*"]), include_package_data=True, install_requires=[ 'versiontools>=1.9.1', 'Django>=1.4.1', 'django-oscar>=0.4', 'django-model-utils>=1.1.0', 'django-compressor>=1.2', 'virtual-node>=0.0.1', 'virtual-less>=0.0.1', ], dependency_links=[ 'http://github.com/tangentlabs/django-oscar/tarball/master#egg=django-oscar-0.4' ], # See http://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', #'License :: OSI Approved :: BSD License', 'Operating System :: Unix', 'Programming Language :: Python' ] )
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='django-oscar-fancypages', version=":versiontools:fancypages:", url='https://github.com/tangentlabs/django-oscar-fancypages', author="Sebastian Vetter", author_email="[email protected]", description="Adding fancy CMS-style pages to Oscar", long_description=open('README.rst').read(), keywords="django, oscar, e-commerce, cms, pages, flatpages", license='BSD', platforms=['linux'], packages=find_packages(exclude=["sandbox*", "tests*"]), include_package_data=True, install_requires=[ 'versiontools>=1.9.1', 'Django>=1.4.1', #'django-oscar>=0.3.3', 'django-model-utils>=1.1.0', 'django-compressor>=1.2', ], dependency_links=[ 'http://github.com/tangentlabs/django-oscar/tarball/master#egg=django-oscar' ], # See http://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', #'License :: OSI Approved :: BSD License', 'Operating System :: Unix', 'Programming Language :: Python' ] )
Fix additional "!" when push ignore list
/** * Configuration module */ var path = require('path'); var fs = require('fs'); var lodash = require('lodash'); var jsonlint = require('json-lint'); var print = require('./print.js'); module.exports = { read: read }; var defaultConfig = { 'source': './', 'destination': './situs', 'ignore': [ 'node_modules/**/*' ], 'port': 4000, 'noConfig': false, 'global': {} }; function read(filePath, callback) { fs.exists(filePath, function (exists) { if (!exists) { // Add compiled dir in ignore list defaultConfig.ignore.push('situs/**/*'); defaultConfig.noConfig = true; return callback(null, defaultConfig); } fs.readFile(filePath, 'utf8', function (error, data) { if (error) { return callback(error); } var lint = jsonlint(data, {comments:false}); if (lint.error) { error = 'Syntax error on situs.json:'+lint.line+'.\n'+lint.error; return callback(error); } var obj = lodash.extend(defaultConfig, JSON.parse(data)); // Add compiled dir in ignore list obj.ignore.push(path.normalize(obj.destination)+'/**/*'); return callback(null, obj); }); }); }
/** * Configuration module */ var path = require('path'); var fs = require('fs'); var lodash = require('lodash'); var jsonlint = require('json-lint'); var print = require('./print.js'); module.exports = { read: read }; var defaultConfig = { 'source': './', 'destination': './situs', 'ignore': [ 'node_modules/**/*' ], 'port': 4000, 'noConfig': false, 'global': {} }; function read(filePath, callback) { fs.exists(filePath, function (exists) { if (!exists) { // Add compiled dir in ignore list defaultConfig.ignore.push('situs/**/*'); defaultConfig.noConfig = true; return callback(null, defaultConfig); } fs.readFile(filePath, 'utf8', function (error, data) { if (error) { return callback(error); } var lint = jsonlint(data, {comments:false}); if (lint.error) { error = 'Syntax error on situs.json:'+lint.line+'.\n'+lint.error; return callback(error); } var obj = lodash.extend(defaultConfig, JSON.parse(data)); // Add compiled dir in ignore list obj.ignore.push('!'+path.normalize(obj.destination)+'/**/*'); return callback(null, obj); }); }); }
Edit default list to avoid violations of content security policy
var defaultFilterList = [ { name: "Default - China", isEnabled: true, isEditable: false, activePage: null, rules: [ { src: "ajax.googleapis.com/ajax/*", dest: "cdnjs.cloudflare.com/ajax/*", }, { src: "fonts.googleapis.com/*", dest: "fonts.useso.com/*", }, { src: "https://*fastly.net*", dest: "http://*fastly.net*", }, { src: "*google.com*", dest: "", }, { src: "*googleapis.*", dest: "", }, { src: "*googlecode.*", dest: "", }, { src: "*facebook.*", dest: "", }, { src: "*twitter.*", dest: "", }, { src: "*youtube.*", dest: "", }, { src: "*youtu.be*", dest: "", }, ] } ];
var defaultFilterList = [ { name: "Default - China", isEnabled: true, isEditable: false, activePage: null, rules: [ { src: "ajax.googleapis.com/ajax/*", dest: "http://ajax.useso.com/ajax/*", }, { src: "fonts.googleapis.com/*", dest: "http://fonts.useso.com/*", }, { src: "https://*fastly.net*", dest: "http://*fastly.net*", }, { src: "*google.com*", dest: "", }, { src: "*googleapis.*", dest: "", }, { src: "*googlecode.*", dest: "", }, { src: "*facebook.*", dest: "", }, { src: "*twitter.*", dest: "", }, { src: "*youtube.*", dest: "", }, { src: "*youtu.be*", dest: "", }, ] } ];
Check for xhr.upload before using it.
var extend = require("extend"); var Q = require("q"); var reqwest = require("reqwest"); module.exports = { request: function(options) { var defer = Q.defer(); var r = reqwest(extend({ contentType: "application/json", crossOrigin: (typeof document == "object"), before: function(xhr) { if(xhr && xhr.upload) { if(xhr.upload.addEventListener) { xhr.upload.addEventListener("progress", progress); } else { xhr.upload.onprogress = progress; } } function progress(event) { defer.notify({ percent: 100 * event.loaded / event.total, loaded: event.loaded, total: event.total }); }; } }, options)) .then(function(response) { if(response && typeof response.responseText == "string") { response = response.responseText; } try { response = JSON.parse(response); } catch(e) {} defer.notify({percent: 100}); defer.resolve({ response: response, request: r.request }); }) .fail(function(xhr) { defer.reject(errorFromXhr(xhr)); }); return defer.promise; } } function errorFromXhr(xhr) { try { var result = JSON.parse(xhr.responseText); if(typeof result.errorMessage == "string") { return new Error(result.errorMessage); } else { return result; } } catch(e) {} return new Error(xhr.responseText); }
var extend = require("extend"); var Q = require("q"); var reqwest = require("reqwest"); module.exports = { request: function(options) { var defer = Q.defer(); var r = reqwest(extend({ contentType: "application/json", crossOrigin: (typeof document == "object"), before: function(xhr) { if(xhr.upload && xhr.upload.addEventListener) { xhr.upload.addEventListener("progress", progress); } else { xhr.upload.onprogress = progress; } function progress(event) { defer.notify({ percent: 100 * event.loaded / event.total, loaded: event.loaded, total: event.total }); }; } }, options)) .then(function(response) { if(response && typeof response.responseText == "string") { response = response.responseText; } try { response = JSON.parse(response); } catch(e) {} defer.notify({percent: 100}); defer.resolve({ response: response, request: r.request }); }) .fail(function(xhr) { defer.reject(errorFromXhr(xhr)); }); return defer.promise; } } function errorFromXhr(xhr) { try { var result = JSON.parse(xhr.responseText); if(typeof result.errorMessage == "string") { return new Error(result.errorMessage); } else { return result; } } catch(e) {} return new Error(xhr.responseText); }
Put a test on site percolation - probably needs some better tests on percolation thresholds maybe
import OpenPNM as op import scipy as sp mgr = op.Base.Workspace() mgr.loglevel = 60 class OrdinaryPercolationTest: def setup_class(self): self.net = op.Network.Cubic(shape=[5, 5, 5]) self.geo = op.Geometry.Toray090(network=self.net, pores=self.net.Ps, throats=self.net.Ts) self.phase = op.Phases.Water(network=self.net) self.phys = op.Physics.Standard(network=self.net, phase=self.phase, pores=self.net.Ps, throats=self.net.Ts) self.OP1 = op.Algorithms.OrdinaryPercolation(network=self.net, invading_phase=self.phase) Ps = self.net.pores(labels=['bottom']) self.OP1.run(inlets=Ps) self.OP1.return_results(Pc=7000) lpf = self.OP1.evaluate_late_pore_filling(Pc=8000) assert sp.size(lpf) == self.net.Np def test_site_percolation(self): self.OP2 = op.Algorithms.OrdinaryPercolation(network=self.net, invading_phase=self.phase, percolation_type='site') Ps = self.net.pores(labels=['bottom']) self.OP2.run(inlets=Ps) self.OP2.return_results(Pc=7000) lpf = self.OP2.evaluate_late_pore_filling(Pc=8000) assert sp.size(lpf) == self.net.Np
import OpenPNM as op import scipy as sp mgr = op.Base.Workspace() mgr.loglevel = 60 class OrdinaryPercolationTest: def setup_test(self): self.net = op.Network.Cubic(shape=[5, 5, 5]) self.geo = op.Geometry.Toray090(network=self.net, pores=self.net.Ps, throats=self.net.Ts) self.phase = op.Phases.Water(network=self.net) self.phys = op.Physics.Standard(network=self.net, pores=self.net.Ps, throats=self.net.Ts) self.OP = op.Algorithms.OrdinaryPercolation(network=self.net, invading_phase=self.phase) Ps = self.net.pores(labels=['bottom_boundary']) self.OP.run(inlets=Ps) self.OP.return_results(Pc=7000) lpf = self.OP.evaluate_late_pore_filling(Pc=8000) assert sp.size(lpf) == self.net.Np
Fix a logic error in path normalization
'use strict'; var _ = require( 'lodash' ); var path = require( 'path' ); var gutil = require( 'gulp-util' ); var through = require( 'through2' ); var File = gutil.File; var PluginError = gutil.PluginError; function _normalizePath ( input ) { var input = input || '/'; var len = input.length; var end = input[ len - 1 ]; return input + (end === '/' ? '' : '/'); } module.exports = function ( includePath ) { var includePath = _normalizePath( includePath ); return through.obj(function ( file, enc, cb ) { if ( file.isNull() ) { this.push( file ); return cb(); } if ( file.isStream() ) { throw new PluginError({ plugin: 'gulp-include-generator', message: 'Streaming not supported' }); return cb(); } var fileName = path.basename( file.path ); var fileType = path.extname( fileName ); var htmlName = fileName + '.html'; var template = fileType === '.js' ? '<script src="<%= path %>"></script>' : '<link href="<%= path %>" rel="stylesheet" />'; var includeContents = _.template( template, { path: includePath + fileName }); this.push(new File({ contents: new Buffer( includeContents ), path: htmlName })); cb(); }); };
'use strict'; var _ = require( 'lodash' ); var path = require( 'path' ); var gutil = require( 'gulp-util' ); var through = require( 'through2' ); var File = gutil.File; var PluginError = gutil.PluginError; function _normalizePath ( input ) { var input = input || '/'; var len = input.length; var end = input[ len - 1 ]; return input + end === '/' ? '' : '/'; } module.exports = function ( includePath ) { var includePath = _normalizePath( includePath ); return through.obj(function ( file, enc, cb ) { if ( file.isNull() ) { this.push( file ); return cb(); } if ( file.isStream() ) { throw new PluginError({ plugin: 'gulp-include-generator', message: 'Streaming not supported' }); return cb(); } var fileName = path.basename( file.path ); var fileType = path.extname( fileName ); var htmlName = fileName + '.html'; var template = fileType === '.js' ? '<script src="<%= path %>"></script>' : '<link href="<%= path %>" rel="stylesheet" />'; var includeContents = _.template( template, { path: includePath + fileName }); this.push(new File({ contents: new Buffer( includeContents ), path: htmlName })); cb(); }); };
Fix injected file path pointing to root.
'use strict'; require('dotenv').load(); var pkg = require('./package.json'), path = require('path'); var gulp = require('gulp'), gutil = require('gulp-util'), inject = require('gulp-inject'), plumber = require('gulp-plumber'), pgbuild = require('gulp-phonegap-build'), bowerFiles = require('main-bower-files'); var dir = { out: path.join(__dirname, './out'), dist: path.join(__dirname, './dist'), assets: path.join(__dirname, './public'), }; gulp.task('default', ['inject', 'build'], function () { gutil.log('Building... This might take a while.'); }); gulp.task('build', function () { gulp.src(dir.assets + '/**') .pipe(plumber()) .pipe(pgbuild({ appId: process.env.PG_BUILD_APP_ID, download: { android: dir.out + '/onesquare-' + pkg.version + '.apk' }, user: { token: process.env.PG_BUILD_AUTH_TOKEN } })); }); gulp.task('inject', function () { var sources = [ dir.assets + '/lib/**/*.html' ]; return gulp.src(dir.assets + '/index.html') .pipe(inject(gulp.src(sources, {read: false}), {addRootSlash: false})) .pipe(inject(gulp.src(bowerFiles(), {read: false}), {name: 'bower', addRootSlash: false})) .pipe(gulp.dest(dir.dist)); });
'use strict'; require('dotenv').load(); var pkg = require('./package.json'), path = require('path'); var gulp = require('gulp'), gutil = require('gulp-util'), inject = require('gulp-inject'), plumber = require('gulp-plumber'), pgbuild = require('gulp-phonegap-build'), bowerFiles = require('main-bower-files'); var dir = { out: path.join(__dirname, './out'), dist: path.join(__dirname, './dist'), assets: path.join(__dirname, './public'), }; gulp.task('default', ['inject', 'build'], function () { gutil.log('Building... This might take a while.'); }); gulp.task('build', function () { gulp.src(dir.assets + '/**') .pipe(plumber()) .pipe(pgbuild({ appId: process.env.PG_BUILD_APP_ID, download: { android: dir.out + '/onesquare-' + pkg.version + '.apk' }, user: { token: process.env.PG_BUILD_AUTH_TOKEN } })); }); gulp.task('inject', function () { var sources = [ dir.assets + '/lib/**/*.html' ]; return gulp.src(dir.assets + '/index.html') .pipe(inject(gulp.src(sources, {read: false}))) .pipe(inject(gulp.src(bowerFiles(), {read: false}), {name: 'bower'})) .pipe(gulp.dest(dir.dist)); });
Fix Python 3 string type checking
import six from six.moves.urllib.parse import urlparse from dache.backends.base import CacheKeyWarning # noqa from dache.backends.filebased import FileBasedCache from dache.backends.locmem import LocMemCache from dache.backends.redis import RedisCache from dache.utils.module_loading import import_string __version__ = '0.0.1' __all__ = ('register_backend', 'Cache', 'CacheKeyWarning') _BACKENDS = { 'file': FileBasedCache, 'locmem': LocMemCache, 'redis': RedisCache, } def register_backend(url_scheme, backend_class): """Register a cache backend.""" _BACKENDS[url_scheme] = backend_class class Cache(object): def __init__(self, url, **options): # Create cache backend result = urlparse(url) backend_class = _BACKENDS[result.scheme] if isinstance(backend_class, six.string_types): backend_class = import_string(backend_class) self._backend = backend_class(result, **options) public_methods = ('add', 'get', 'set', 'delete', 'get_many', 'has_key', 'incr', 'decr', 'set_many', 'delete_many', 'clear', 'validate_key', 'incr_version', 'decr_version', 'close') for method in public_methods: setattr(self, method, getattr(self._backend, method)) def __contains__(self, item): return item in self._backend
from six.moves.urllib.parse import urlparse from dache.backends.base import CacheKeyWarning # noqa from dache.backends.filebased import FileBasedCache from dache.backends.locmem import LocMemCache from dache.backends.redis import RedisCache from dache.utils.module_loading import import_string __version__ = '0.0.1' __all__ = ('register_backend', 'Cache', 'CacheKeyWarning') _BACKENDS = { 'file': FileBasedCache, 'locmem': LocMemCache, 'redis': RedisCache, } def register_backend(url_scheme, backend_class): """Register a cache backend.""" _BACKENDS[url_scheme] = backend_class class Cache(object): def __init__(self, url, **options): # Create cache backend result = urlparse(url) backend_class = _BACKENDS[result.scheme] if isinstance(backend_class, basestring): backend_class = import_string(backend_class) self._backend = backend_class(result, **options) public_methods = ('add', 'get', 'set', 'delete', 'get_many', 'has_key', 'incr', 'decr', 'set_many', 'delete_many', 'clear', 'validate_key', 'incr_version', 'decr_version', 'close') for method in public_methods: setattr(self, method, getattr(self._backend, method)) def __contains__(self, item): return item in self._backend
Use on, off instead of bind, unbind
/** * Event * * @author Rakume Hayashi<[email protected]> */ Faiash.ise.extend({ on: function() { var args = $.toArr(arguments); if (typeof args[0] === 'string') { if (!args[0] || !args[1]) { return this; } this.each(function(el) { el.addEventListener(args[0], args[1], (args[2] ? args[2] : false)); }); } else if (typeof args[0] === 'object') { Faiash.each(args, function(obj) { this.each(function(el) { for (var event in obj) { if (object.hasOwnProperty(event)) { el.addEventListener(event, obj[event][1]); } } }); }); } }, off: function() { var args = $.toArr(arguments); if (typeof args[0] === 'string') { this.each(function(el) { el.removeEventListener(args[0], args[1], args[2]); }); } else if (typeof args[0] === 'object' && args.length === 1) { Faiash.each(args, function(obj) { this.each(function(el) { for (var event in obj) { if (object.hasOwnProperty(event)) { el.removeEventListener(event, obj[event][1]); } } }); }); } } });
/** * Event * * @author Rakume Hayashi<[email protected]> */ Faiash.ise.extend({ bind: function() { var args = $.toArr(arguments); if (typeof args[0] === 'string') { if (!args[0] || !args[1]) { return this; } this.each(function(el) { el.addEventListener(args[0], args[1], (args[2] ? args[2] : false)); }); } else if (typeof args[0] === 'object') { Faiash.each(args, function(obj) { this.each(function(el) { for (var event in obj) { if (object.hasOwnProperty(event)) { el.addEventListener(event, obj[event][1]); } } }); }); } }, unbind: function() { var args = $.toArr(arguments); if (typeof args[0] === 'string') { this.each(function(el) { el.removeEventListener(args[0], args[1], args[2]); }); } else if (typeof args[0] === 'object' && args.length === 1) { Faiash.each(args, function(obj) { this.each(function(el) { for (var event in obj) { if (object.hasOwnProperty(event)) { el.removeEventListener(event, obj[event][1]); } } }); }); } } });
Revert "Fixes failed to find module in react-native 0.36.0"
var React = require('react'); import ReactNativePropRegistry from 'react-native/Libraries/Renderer/src/renderers/native/ReactNativePropRegistry'; var _ = require('lodash'); module.exports = function(incomingProps, defaultProps) { // External props has a higher precedence var computedProps = {}; incomingProps = _.clone(incomingProps); delete incomingProps.children; var incomingPropsStyle = incomingProps.style; delete incomingProps.style; // console.log(defaultProps, incomingProps); if(incomingProps) _.merge(computedProps, defaultProps, incomingProps); else computedProps = defaultProps; // Pass the merged Style Object instead if(incomingPropsStyle) { var computedPropsStyle = {}; computedProps.style = {}; if (Array.isArray(incomingPropsStyle)) { _.forEach(incomingPropsStyle, (style)=>{ if(typeof style == 'number') { _.merge(computedPropsStyle, ReactNativePropRegistry.getByID(style)); } else { _.merge(computedPropsStyle, style); } }) } else { if(typeof incomingPropsStyle == 'number') { computedPropsStyle = ReactNativePropRegistry.getByID(incomingPropsStyle); } else { computedPropsStyle = incomingPropsStyle; } } _.merge(computedProps.style, defaultProps.style, computedPropsStyle); } // console.log("computedProps ", computedProps); return computedProps; }
var React = require('react'); import { ReactNativePropRegistry } from 'react-native'; var _ = require('lodash'); module.exports = function(incomingProps, defaultProps) { // External props has a higher precedence var computedProps = {}; incomingProps = _.clone(incomingProps); delete incomingProps.children; var incomingPropsStyle = incomingProps.style; delete incomingProps.style; // console.log(defaultProps, incomingProps); if(incomingProps) _.merge(computedProps, defaultProps, incomingProps); else computedProps = defaultProps; // Pass the merged Style Object instead if(incomingPropsStyle) { var computedPropsStyle = {}; computedProps.style = {}; if (Array.isArray(incomingPropsStyle)) { _.forEach(incomingPropsStyle, (style)=>{ if(typeof style == 'number') { _.merge(computedPropsStyle, ReactNativePropRegistry.getByID(style)); } else { _.merge(computedPropsStyle, style); } }) } else { if(typeof incomingPropsStyle == 'number') { computedPropsStyle = ReactNativePropRegistry.getByID(incomingPropsStyle); } else { computedPropsStyle = incomingPropsStyle; } } _.merge(computedProps.style, defaultProps.style, computedPropsStyle); } // console.log("computedProps ", computedProps); return computedProps; }
Revert "Reorder dashboard time filter buttons"
import React from 'react'; import PropTypes from 'prop-types'; import DashButton from './DashButton'; //Custom all purpose dashboard button class DashRangeButtons extends React.Component { constructor(props) { super(props); this.state={selectedButton: 'schoolYear'}; } onClick(filterFunc, button) { filterFunc(); this.setState({selectedButton: button}); } render() { return ( <div className="DashRangeButtons"> Filter: <DashButton onClick={() => this.onClick(this.props.schoolYearFilter, 'schoolYear')} isSelected={this.state.selectedButton === 'schoolYear'} buttonText='School Year' /> <DashButton onClick={() => this.onClick(this.props.ninetyDayFilter, 'ninetyDays')} isSelected={this.state.selectedButton === 'ninetyDays'} buttonText='Past 90 Days' /> <DashButton onClick={() => this.onClick(this.props.fortyFiveDayFilter, 'fortyFiveDays')} isSelected={this.state.selectedButton === 'fortyFiveDays'} buttonText='Past 45 Days' /> </div> ); } } DashRangeButtons.propTypes = { schoolYearFilter: PropTypes.func.isRequired, ninetyDayFilter: PropTypes.func.isRequired, fortyFiveDayFilter: PropTypes.func.isRequired }; export default DashRangeButtons;
import React from 'react'; import PropTypes from 'prop-types'; import DashButton from './DashButton'; //Custom all purpose dashboard button class DashRangeButtons extends React.Component { constructor(props) { super(props); this.state={selectedButton: 'fortyFiveDays'}; } onClick(filterFunc, button) { filterFunc(); this.setState({selectedButton: button}); } render() { return ( <div className="DashRangeButtons"> Filter: <DashButton onClick={() => this.onClick(this.props.fortyFiveDayFilter, 'fortyFiveDays')} isSelected={this.state.selectedButton === 'fortyFiveDays'} buttonText='Past 45 Days' /> <DashButton onClick={() => this.onClick(this.props.ninetyDayFilter, 'ninetyDays')} isSelected={this.state.selectedButton === 'ninetyDays'} buttonText='Past 90 Days' /> <DashButton onClick={() => this.onClick(this.props.schoolYearFilter, 'schoolYear')} isSelected={this.state.selectedButton === 'schoolYear'} buttonText='School Year' /> </div> ); } } DashRangeButtons.propTypes = { schoolYearFilter: PropTypes.func.isRequired, ninetyDayFilter: PropTypes.func.isRequired, fortyFiveDayFilter: PropTypes.func.isRequired }; export default DashRangeButtons;
Fix fatal in rendering Phriction "Moved Away" stories Summary: Ref T13202. See PHI881. These stories have bad rendering methods, but they didn't previously render into the timeilne (since Phriction documents didn't have a timeline). Update the rendering to work. The rendered outcome isn't great (it isn't very clear or explicit about exactly what moved where), but I'll fix that in a followup. This is a net improvement since it doesn't fatal the page, at least. Test Plan: - Moved page "X" to "Y". - Viewed the old page "X". - Before patch: bad timeline story would fatal rendering. - After patch: story renders, at least, just not great. Reviewers: amckinley Maniphest Tasks: T13202 Differential Revision: https://secure.phabricator.com/D19682
<?php final class PhrictionDocumentMoveAwayTransaction extends PhrictionDocumentVersionTransaction { const TRANSACTIONTYPE = 'move-away'; public function generateOldValue($object) { return null; } public function generateNewValue($object, $value) { $document = $value; $dict = array( 'id' => $document->getID(), 'phid' => $document->getPHID(), 'content' => $document->getContent()->getContent(), 'title' => $document->getContent()->getTitle(), ); return $dict; } public function applyInternalEffects($object, $value) { $object->setStatus(PhrictionDocumentStatus::STATUS_MOVED); $content = $this->getNewDocumentContent($object); $content->setContent(''); $content->setChangeType(PhrictionChangeType::CHANGE_MOVE_AWAY); $content->setChangeRef($value['id']); } public function getActionName() { return pht('Moved Away'); } public function getTitle() { $new = $this->getNewValue(); return pht( '%s moved this document to %s.', $this->renderAuthor(), $this->renderObject($new['phid'])); } public function getTitleForFeed() { $new = $this->getNewValue(); return pht( '%s moved %s to %s.', $this->renderAuthor(), $this->renderObject(), $this->renderObject($new['phid'])); } public function getIcon() { return 'fa-arrows'; } public function shouldHideForFeed() { return true; } }
<?php final class PhrictionDocumentMoveAwayTransaction extends PhrictionDocumentVersionTransaction { const TRANSACTIONTYPE = 'move-away'; public function generateOldValue($object) { return null; } public function generateNewValue($object, $value) { $document = $value; $dict = array( 'id' => $document->getID(), 'phid' => $document->getPHID(), 'content' => $document->getContent()->getContent(), 'title' => $document->getContent()->getTitle(), ); return $dict; } public function applyInternalEffects($object, $value) { $object->setStatus(PhrictionDocumentStatus::STATUS_MOVED); $content = $this->getNewDocumentContent($object); $content->setContent(''); $content->setChangeType(PhrictionChangeType::CHANGE_MOVE_AWAY); $content->setChangeRef($value['id']); } public function getActionName() { return pht('Moved Away'); } public function getTitle() { $new = $this->getNewValue(); return pht( '%s moved this document to %s', $this->renderAuthor(), $this->renderHandleLink($new['phid'])); } public function getTitleForFeed() { $new = $this->getNewValue(); return pht( '%s moved %s to %s', $this->renderAuthor(), $this->renderObject(), $this->renderHandleLink($new['phid'])); } public function getIcon() { return 'fa-arrows'; } public function shouldHideForFeed() { return true; } }
Comment out test that doesn't pass on Windows It appears to be assuming unix paths, so I'm going on the assumption that it's not a valid test case on Windows.
import os import tempfile import unittest from jupyter_core.paths import jupyter_config_dir from ..nbsetuptools import NBSetup class NBSetupTestCase(unittest.TestCase): def setUp(self): self.prefix = tempfile.mkdtemp() self.params = { 'prefix': self.prefix, 'static': os.path.join(os.path.dirname(__file__), 'support'), } def test_initialize(self): assert NBSetup('name').path == jupyter_config_dir() # assert NBSetup('name', prefix="/tmp").path == "/tmp/etc/jupyter" def test_install(self): nb_setup = NBSetup('name', **self.params) nb_setup.install() assert os.path.exists( os.path.join(self.prefix, 'share', 'jupyter', 'nbextensions', 'name')) def test_enable(self): nb_setup = NBSetup('name', **self.params) nb_setup.enable() for f in ['notebook.json', 'tree.json', 'edit.json']: assert os.path.exists( os.path.join(self.prefix, 'etc', 'jupyter', 'nbconfig', f) ) if __name__ == '__main__': unittest.main()
import os import tempfile import unittest from jupyter_core.paths import jupyter_config_dir from ..nbsetuptools import NBSetup class NBSetupTestCase(unittest.TestCase): def setUp(self): self.prefix = tempfile.mkdtemp() self.params = { 'prefix': self.prefix, 'static': os.path.join(os.path.dirname(__file__), 'support'), } def test_initialize(self): assert NBSetup('name').path == jupyter_config_dir() assert NBSetup('name', prefix="/tmp").path == "/tmp/etc/jupyter" def test_install(self): nb_setup = NBSetup('name', **self.params) nb_setup.install() assert os.path.exists( os.path.join(self.prefix, 'share', 'jupyter', 'nbextensions', 'name')) def test_enable(self): nb_setup = NBSetup('name', **self.params) nb_setup.enable() for f in ['notebook.json', 'tree.json', 'edit.json']: assert os.path.exists( os.path.join(self.prefix, 'etc', 'jupyter', 'nbconfig', f) ) if __name__ == '__main__': unittest.main()
Add '*.json' file extensions to test pattern list.
from __future__ import absolute_import import fnmatch import os import unittest from . import validate_json_format class TestSettings(unittest.TestCase): def _get_json_files(self, file_pattern, folder='.'): for root, dirnames, filenames in os.walk(folder): for filename in fnmatch.filter(filenames, file_pattern): yield os.path.join(root, filename) for dirname in [d for d in dirnames if d not in ('.git', '.tox')]: for f in self._get_json_files( file_pattern, os.path.join(root, dirname)): yield f def test_json_settings(self): """Test each JSON file.""" file_patterns = ( '*.sublime-settings', '*.sublime-commands', '*.sublime-menu', '*.json' ) for file_pattern in file_patterns: for f in self._get_json_files(file_pattern): print(f) self.assertFalse( validate_json_format.CheckJsonFormat( False, True).check_format(f), "%s does not comform to expected format!" % f)
from __future__ import absolute_import import fnmatch import os import unittest from . import validate_json_format class TestSettings(unittest.TestCase): def _get_json_files(self, file_pattern, folder='.'): for root, dirnames, filenames in os.walk(folder): for filename in fnmatch.filter(filenames, file_pattern): yield os.path.join(root, filename) for dirname in [d for d in dirnames if d not in ('.git', '.tox')]: for f in self._get_json_files( file_pattern, os.path.join(root, dirname)): yield f def test_json_settings(self): """Test each JSON file.""" file_patterns = ( '*.sublime-settings', '*.sublime-commands', '*.sublime-menu', ) for file_pattern in file_patterns: for f in self._get_json_files(file_pattern): print(f) self.assertFalse( validate_json_format.CheckJsonFormat( False, True).check_format(f), "%s does not comform to expected format!" % f)
Throw an critical Exception if maintenance mode could not be disabled
<?php /** * @link https://github.com/brussens/yii2-maintenance-mode * @copyright Copyright (c) since 2015 Dmitry Brusensky * @license http://opensource.org/licenses/MIT MIT */ namespace brussens\maintenance\states; use Yii; use brussens\maintenance\StateInterface; use yii\base\BaseObject; /** * Class FileState * * @package brussens\maintenance\states */ class FileState extends BaseObject implements StateInterface { /** * @var string */ public $fileName = '.enable'; /** * @var string */ public $directory = '@runtime'; /** * Turn on mode. * * @since 0.2.5 */ public function enable() { file_put_contents($this->getStatusFilePath(), ' '); } /** * Turn off mode. * * @since 0.2.5 */ public function disable() { $path = $this->getStatusFilePath(); if (file_exists($path)) { if (! unlink($path)) { throw new \Exception( "Attention: the maintenance mode could not be disabled because $path could not be removed." ); }; } } /** * @return bool will return true if the file exists */ public function isEnabled() { return file_exists($this->getStatusFilePath()); } /** * Return status file path. * * @return bool|string * @since 0.2.5 */ protected function getStatusFilePath() { return Yii::getAlias($this->directory . '/' . $this->fileName); } }
<?php /** * @link https://github.com/brussens/yii2-maintenance-mode * @copyright Copyright (c) since 2015 Dmitry Brusensky * @license http://opensource.org/licenses/MIT MIT */ namespace brussens\maintenance\states; use Yii; use brussens\maintenance\StateInterface; use yii\base\BaseObject; /** * Class FileState * @package brussens\maintenance\states */ class FileState extends BaseObject implements StateInterface { /** * @var string */ public $fileName = '.enable'; /** * @var string */ public $directory = '@runtime'; /** * Turn on mode. * @since 0.2.5 */ public function enable() { file_put_contents($this->getStatusFilePath(), ' '); } /** * Turn off mode. * @since 0.2.5 */ public function disable() { $path = $this->getStatusFilePath(); if (file_exists($path)) { unlink($path); } } /** * @return bool */ public function isEnabled() { return file_exists($this->getStatusFilePath()); } /** * Return status file path. * @since 0.2.5 * @return bool|string */ protected function getStatusFilePath() { return Yii::getAlias($this->directory . '/' . $this->fileName); } }
Rename parameter filter_package to a more appropriate transform_package
from contextlib import closing from operator import contains from functools import partial from minerva.util import k, identity from minerva.directory import EntityType from minerva.storage import Engine from minerva.storage.trend import TableTrendStore class TrendEngine(Engine): @staticmethod def store_cmd(package, transform_package=k(identity)): """ Return a function to bind a data source to the store command. :param package: A DataPackageBase subclass instance :param transform_package: (TableTrendStore) -> (DataPackage) -> DataPackage :return: function that binds a data source to the store command :rtype: (data_source) -> (conn) -> None """ def bind_data_source(data_source): def execute(conn): entity_type_name = package.entity_type_name() with closing(conn.cursor()) as cursor: entity_type = EntityType.get_by_name(entity_type_name)( cursor ) trend_store = TableTrendStore.get( data_source, entity_type, package.granularity )(cursor) trend_store.store( transform_package(trend_store)(package) ).run(conn) return execute return bind_data_source def filter_existing_trends(trend_store): existing_trend_names = {trend.name for trend in trend_store.trends} def f(package): return package.filter_trends(partial(contains, existing_trend_names)) return f
from contextlib import closing from operator import contains from functools import partial from minerva.util import k, identity from minerva.directory import EntityType from minerva.storage import Engine from minerva.storage.trend import TableTrendStore class TrendEngine(Engine): @staticmethod def store_cmd(package, filter_package=k(identity)): """ Return a function to bind a data source to the store command. :param package: A DataPackageBase subclass instance :param filter_package: (TableTrendStore) -> (DataPackage) -> DataPackage :return: function that binds a data source to the store command :rtype: (data_source) -> (conn) -> None """ def bind_data_source(data_source): def execute(conn): entity_type_name = package.entity_type_name() with closing(conn.cursor()) as cursor: entity_type = EntityType.get_by_name(entity_type_name)( cursor ) trend_store = TableTrendStore.get( data_source, entity_type, package.granularity )(cursor) trend_store.store( filter_package(trend_store)(package) ).run(conn) return execute return bind_data_source def filter_existing_trends(trend_store): existing_trend_names = {trend.name for trend in trend_store.trends} def f(package): return package.filter_trends(partial(contains, existing_trend_names)) return f
Work around null TfL common names
""" Usage: ./manage.py import_tfl_stops < data/tfl/bus-stops.csv """ import requests from titlecase import titlecase from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned from busstops.management.import_from_csv import ImportFromCSVCommand from busstops.models import StopPoint class Command(ImportFromCSVCommand): @staticmethod def get_name(atco_code): """ Given a stop's ATCO code, returns the best-formatted version of its common name from the TfL API """ data = requests.get('https://api.tfl.gov.uk/StopPoint/%s' % atco_code).json() return data.get('commonName') def handle_row(self, row): if row['Naptan_Atco'] in (None, '', 'NONE'): return None try: stop = StopPoint.objects.get(pk=row['Naptan_Atco']) except ObjectDoesNotExist: try: stop = StopPoint.objects.get(pk__contains=row['Naptan_Atco']) except (ObjectDoesNotExist, MultipleObjectsReturned) as e: print e, row return None if row['Heading'] != '': stop.heading = row['Heading'] stop.common_name = self.get_name(stop.atco_code) or stop.common_name stop.tfl = True if stop.street.isupper(): stop.street = titlecase(stop.street) if stop.landmark.isupper(): stop.landmark = titlecase(stop.landmark) stop.save()
""" Usage: ./manage.py import_tfl_stops < data/tfl/bus-stops.csv """ import requests from titlecase import titlecase from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned from busstops.management.import_from_csv import ImportFromCSVCommand from busstops.models import StopPoint class Command(ImportFromCSVCommand): @staticmethod def get_name(atco_code): """ Given a stop's ATCO code, returns the best-formatted version of its common name from the TfL API """ data = requests.get('https://api.tfl.gov.uk/StopPoint/%s' % atco_code).json() return data.get('commonName') def handle_row(self, row): if row['Naptan_Atco'] in (None, '', 'NONE'): return None try: stop = StopPoint.objects.get(pk=row['Naptan_Atco']) except ObjectDoesNotExist: try: stop = StopPoint.objects.get(pk__contains=row['Naptan_Atco']) except (ObjectDoesNotExist, MultipleObjectsReturned) as e: print e, row return None if row['Heading'] != '': stop.heading = row['Heading'] stop.common_name = self.get_name(stop.atco_code) stop.tfl = True if stop.street.isupper(): stop.street = titlecase(stop.street) if stop.landmark.isupper(): stop.landmark = titlecase(stop.landmark) stop.save()
Use io.open with encoding='utf-8' and flake8 compliance
import io from setuptools import setup, find_packages long_description = '\n'.join(( io.open('README.rst', encoding='utf-8').read(), io.open('CHANGES.txt', encoding='utf-8').read() )) setup( name='importscan', version='0.2.dev0', description='Recursively import modules and sub-packages', long_description=long_description, classifiers=[ "Intended Audience :: Developers", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", ], keywords='decorator import package', author="Martijn Faassen", author_email="[email protected]", license="BSD", packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=[ 'setuptools' ], extras_require=dict( test=['pytest >= 2.5.2', 'py >= 1.4.20', 'pytest-cov', 'pytest-remove-stale-bytecode'], ) )
import os from setuptools import setup from setuptools import find_packages here = os.path.abspath(os.path.dirname(__file__)) try: with open(os.path.join(here, 'README.rst')) as f: README = f.read() with open(os.path.join(here, 'CHANGES.rst')) as f: CHANGES = f.read() except: README = '' CHANGES = '' setup( name='importscan', version='0.2.dev0', description='Recursively import modules and sub-packages', long_description=README + '\n\n' + CHANGES, classifiers=[ "Intended Audience :: Developers", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", ], keywords='decorator import package', author="Martijn Faassen", author_email="[email protected]", license="BSD", packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=[ 'setuptools' ], extras_require=dict( test=['pytest >= 2.5.2', 'py >= 1.4.20', 'pytest-cov', 'pytest-remove-stale-bytecode'], ) )
Patron-client: Add fallback on filter names
import React, { PropTypes } from 'react' import SearchFilter from './SearchFilter' import Labels from '../constants/Labels' export default React.createClass({ propTypes: { filters: PropTypes.array.isRequired, locationQuery: PropTypes.object.isRequired, setFilter: PropTypes.func.isRequired }, getDefaultProps () { return { filters: [] } }, renderEmpty () { return <div data-automation-id='empty'></div> }, render () { let groupedFilters = {} if (this.props.locationQuery.query && this.props.filters) { this.props.filters.forEach(filter => { groupedFilters[ filter.aggregation ] = groupedFilters[ filter.aggregation ] || [] groupedFilters[ filter.aggregation ].push(filter) }) return ( <aside className='col filters'> <h3>Avgrens søket ditt</h3> <div data-automation-id='search_filters'> {Object.keys(groupedFilters).map(aggregation => { let filtersByAggregation = groupedFilters[ aggregation ] return ( <SearchFilter key={aggregation} title={Labels[aggregation] || aggregation} aggregation={aggregation} filters={filtersByAggregation} locationQuery={this.props.locationQuery} setFilter={this.props.setFilter}/> ) })} </div> </aside> ) } else { return this.renderEmpty() } } })
import React, { PropTypes } from 'react' import SearchFilter from './SearchFilter' import Labels from '../constants/Labels' export default React.createClass({ propTypes: { filters: PropTypes.array.isRequired, locationQuery: PropTypes.object.isRequired, setFilter: PropTypes.func.isRequired }, getDefaultProps () { return { filters: [] } }, renderEmpty () { return <div data-automation-id='empty'></div> }, render () { let groupedFilters = {} if (this.props.locationQuery.query && this.props.filters) { this.props.filters.forEach(filter => { groupedFilters[ filter.aggregation ] = groupedFilters[ filter.aggregation ] || [] groupedFilters[ filter.aggregation ].push(filter) }) return ( <aside className='col filters'> <h3>Avgrens søket ditt</h3> <div data-automation-id='search_filters'> {Object.keys(groupedFilters).map(aggregation => { let filtersByAggregation = groupedFilters[ aggregation ] return ( <SearchFilter key={aggregation} title={Labels[aggregation]} aggregation={aggregation} filters={filtersByAggregation} locationQuery={this.props.locationQuery} setFilter={this.props.setFilter}/> ) })} </div> </aside> ) } else { return this.renderEmpty() } } })
Switch to random scoring for ES query
<?php namespace CascadeEnergy\DistributedOperations\Elasticsearch; use CascadeEnergy\DistributedOperations\Elasticsearch\Interfaces\ReadOnlyInterface; use CascadeEnergy\DistributedOperations\Elasticsearch\Traits\ReadOnlyTrait; use CascadeEnergy\DistributedOperations\Utility\ProviderInterface; use Elasticsearch\Helper\Iterators\SearchResponseIterator; class Provider implements ProviderInterface, ReadOnlyInterface { use ReadOnlyTrait; /** @var string */ private $scrollTime = '1m'; /** @var string */ private $type; public function setScrollTime($scrollTime) { $this->scrollTime = $scrollTime; } public function setType($type) { $this->type = $type; } public function begin() { $searchParams = [ 'index' => $this->indexName, 'scroll' => $this->scrollTime, 'body' => [ 'query' => [ 'function_score' => [ 'query' => ['term' => ['state' => 'new']], 'random_score' => [] ] ] ] ]; if (!empty($this->type)) { $searchParams['type'] = $this->type; } $responseIterator = new SearchResponseIterator($this->client, $searchParams); return new ProviderIterator($responseIterator); } public function end(\Traversable $providerIterator) { if ($providerIterator instanceof ProviderIterator) { $providerIterator->end(); } } }
<?php namespace CascadeEnergy\DistributedOperations\Elasticsearch; use CascadeEnergy\DistributedOperations\Elasticsearch\Interfaces\ReadOnlyInterface; use CascadeEnergy\DistributedOperations\Elasticsearch\Traits\ReadOnlyTrait; use CascadeEnergy\DistributedOperations\Utility\ProviderInterface; use Elasticsearch\Helper\Iterators\SearchResponseIterator; class Provider implements ProviderInterface, ReadOnlyInterface { use ReadOnlyTrait; /** @var string */ private $scrollTime = '1m'; /** @var string */ private $type; public function setScrollTime($scrollTime) { $this->scrollTime = $scrollTime; } public function setType($type) { $this->type = $type; } public function begin() { $searchParams = [ 'index' => $this->indexName, 'scroll' => $this->scrollTime, 'body' => ['query' => ['bool' => ['must' => ['term' => ['state' => 'new']]]]] ]; if (!empty($this->type)) { $searchParams['type'] = $this->type; } $responseIterator = new SearchResponseIterator($this->client, $searchParams); return new ProviderIterator($responseIterator); } public function end(\Traversable $providerIterator) { if ($providerIterator instanceof ProviderIterator) { $providerIterator->end(); } } }
Add response to commander executor
package com.decker.Essentials; import java.io.IOException; import java.io.OutputStream; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import com.decker.Essentials.Category.Category; import com.decker.Essentials.User.User; public class Center extends org.sipc.se.plugin.PluginImpl { Category category; public Center() { super(); this.category=Category.getInstance(); } @Override public void getResponse(HttpServletRequest request, HttpServletResponse response) { String url = request.getRequestURI(); String target = url.substring(url.indexOf("Essentials") + "Essentials".length() + 1); OutputStream stream; try { stream = response.getOutputStream(); byte[] resource=null; if (target.matches(".*.exec")) { //dynamic generate User current=new User(request.getCookies()); resource = Command.Execute(current,request,response); } else { //static resource resource = ResourceManager.getInstance().getResource("Content/"+target); } if (resource.length == 0) { response.setStatus(404); } stream.write(resource); } catch (IOException e) { } } @Override public String getUrl() { return "Essentials"; } @Override public boolean onEnable() { // TODO Auto-generated method stub return true; } }
package com.decker.Essentials; import java.io.IOException; import java.io.OutputStream; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import com.decker.Essentials.Category.Category; import com.decker.Essentials.User.User; public class Center extends org.sipc.se.plugin.PluginImpl { Category category; public Center() { super(); this.category=Category.getInstance(); } @Override public void getResponse(HttpServletRequest request, HttpServletResponse response) { String url = request.getRequestURI(); String target = url.substring(url.indexOf("Essentials") + "Essentials".length() + 1); OutputStream stream; try { stream = response.getOutputStream(); byte[] resource=null; if (target.matches(".*.exec")) { //dynamic generate User current=new User(request.getCookies()); resource = Command.Execute(current,request); } else { //static resource resource = ResourceManager.getInstance().getResource(target); } if (resource.length == 0) { response.setStatus(404); } stream.write(resource); } catch (IOException e) { } } @Override public String getUrl() { return "Essentials"; } @Override public boolean onEnable() { // TODO Auto-generated method stub return true; } }
Normalize for nulling out on IE at the ends
var _create = global.document.createNodeIterator; export function createNodeIterator(root, whatToShow, filter = null) { var iter = _create.call(global.document, root, whatToShow, filter, false); return typeof(iter.referenceNode) === 'undefined' ? shim(iter, root) : iter; } export function install() { global.document.createNodeIterator = createNodeIterator; } function shim(iter, root) { var _referenceNode = root; var _pointerBeforeReferenceNode = true; return Object.create(NodeIterator.prototype, { root: { get: function () { return iter.root; } }, whatToShow: { get: function () { return iter.whatToShow; } }, filter: { get: function () { return iter.filter; } }, referenceNode: { get: function () { return _referenceNode; } }, pointerBeforeReferenceNode: { get: function () { return _pointerBeforeReferenceNode; } }, nextNode: { value: function () { let result = iter.nextNode(); if (result !== null) { _referenceNode = iter.nextNode(); } _pointerBeforeReferenceNode = false; return _referenceNode; } }, previousNode: { value: function () { let result = iter.previousNode(); if (result !== null) { _referenceNode = iter.previousNode(); } _pointerBeforeReferenceNode = true; return _referenceNode; } } }); }
var _create = global.document.createNodeIterator; export function createNodeIterator(root, whatToShow, filter = null) { var iter = _create.call(global.document, root, whatToShow, filter, false); return typeof(iter.referenceNode) === 'undefined' ? shim(iter, root) : iter; } export function install() { global.document.createNodeIterator = createNodeIterator; } function shim(iter, root) { var _referenceNode = root; var _pointerBeforeReferenceNode = true; return Object.create(NodeIterator.prototype, { root: { get: function () { return iter.root; } }, whatToShow: { get: function () { return iter.whatToShow; } }, filter: { get: function () { return iter.filter; } }, referenceNode: { get: function () { return _referenceNode; } }, pointerBeforeReferenceNode: { get: function () { return _pointerBeforeReferenceNode; } }, nextNode: { value: function () { _referenceNode = iter.nextNode(); _pointerBeforeReferenceNode = false; return _referenceNode; } }, previousNode: { value: function () { _referenceNode = iter.previousNode(); _pointerBeforeReferenceNode = true; return _referenceNode; } } }); }
Add missing build sequence to gulp package command.
// Modules. let gulp = require('gulp'), requireDir = require('require-dir'), runSequence = require('gulp-run-sequence'); // Require all tasks within tasks directory, including subfolders. requireDir('./build', { recurse: true }); requireDir('./copy', { recurse: true }); requireDir('./test', { recurse: true }); requireDir('./develop', { recurse: true }); requireDir('./package', { recurse: true }); // gulp command. gulp.task('default', () => { 'use strict'; runSequence( 'build' ); }); // gulp build command. gulp.task('build', () => { 'use strict'; runSequence( 'copy:lib', 'copy:fonts', 'build:less', 'build:js' ); }); // gulp test command. gulp.task('test', () => { 'use strict'; runSequence( 'test:jscs', 'test:jshint', 'test:karma' ); }); // gulp package command. gulp.task('package', () => { 'use strict'; runSequence( 'build', 'package:clean', 'package:css', 'package:fonts', 'package:js', 'package:images', 'package:partials', 'package:templates', 'package:zip' ); }); // gulp watch command. gulp.task('watch', () => { 'use strict'; runSequence( 'develop:watch' ); });
// Modules. let gulp = require('gulp'), requireDir = require('require-dir'), runSequence = require('gulp-run-sequence'); // Require all tasks within tasks directory, including subfolders. requireDir('./build', { recurse: true }); requireDir('./copy', { recurse: true }); requireDir('./test', { recurse: true }); requireDir('./develop', { recurse: true }); requireDir('./package', { recurse: true }); // gulp command. gulp.task('default', () => { 'use strict'; runSequence( 'build' ); }); // gulp build command. gulp.task('build', () => { 'use strict'; runSequence( 'copy:lib', 'copy:fonts', 'build:less', 'build:js' ); }); // gulp test command. gulp.task('test', () => { 'use strict'; runSequence( 'test:jscs', 'test:jshint', 'test:karma' ); }); // gulp package command. gulp.task('package', () => { 'use strict'; runSequence( 'package:clean', 'package:css', 'package:fonts', 'package:js', 'package:images', 'package:partials', 'package:templates', 'package:zip' ); }); // gulp watch command. gulp.task('watch', () => { 'use strict'; runSequence( 'develop:watch' ); });
Handle errors in sync job
import datetime from cucoslib.models import Analysis, Package, Version, Ecosystem from cucoslib.workers import GraphImporterTask from .base import BaseHandler class SyncToGraph(BaseHandler): """ Sync all finished analyses to Graph DB """ def execute(self): start = 0 while True: results = self.postgres.session.query(Analysis).\ join(Version).\ join(Package).\ join(Ecosystem).\ filter(Analysis.finished_at != None).\ slice(start, start + 100).all() if not results: self.log.info("Syncing to GraphDB finished") break self.log.info("Updating results, slice offset is %s", start) start += 100 for entry in results: arguments = {'ecosystem': entry.version.package.ecosystem.name, 'name': entry.version.package.name, 'version': entry.version.identifier} try: log.info('Synchronizing {ecosystem}/{name}/{version} ...'.format(**arguments)) GraphImporterTask.create_test_instance().execute(arguments) except Exception as e: self.log.exception('Failed to synchronize {ecosystem}/{name}/{version}'. format(**arguments)) del entry
import datetime from cucoslib.models import Analysis, Package, Version, Ecosystem from cucoslib.workers import GraphImporterTask from .base import BaseHandler class SyncToGraph(BaseHandler): """ Sync all finished analyses to Graph DB """ def execute(self): start = 0 while True: results = self.postgres.session.query(Analysis).\ join(Version).\ join(Package).\ join(Ecosystem).\ filter(Analysis.finished_at != None).\ slice(start, start + 100).all() if not results: self.log.info("Syncing to GraphDB finished") break self.log.info("Updating results, slice offset is %s", start) start += 100 for entry in results: arguments = {'ecosystem': entry.version.package.ecosystem.name, 'name': entry.version.package.name, 'version': entry.version.identifier} GraphImporterTask.create_test_instance().execute(arguments) del entry
Delete fk_pools in games table
<?php use Illuminate\Support\Facades\Schema; use Illuminate\Database\Schema\Blueprint; use Illuminate\Database\Migrations\Migration; class CreateGamesTable extends Migration { /** * Run the migrations. * * @return void */ public function up() { Schema::create('games', function (Blueprint $table) { $table->increments('id'); $table->integer('score_team1'); $table->integer('score_team2'); $table->date('date'); $table->time('start_time'); $table->string('type', 45); $table->integer('teams1_id')->unsigned(); $table->integer('teams2_id')->unsigned(); $table->integer('courts_id')->unsigned(); $table->foreign('teams1_id')->references('id')->on('teams'); $table->foreign('teams2_id')->references('id')->on('teams'); $table->foreign('courts_id')->references('id')->on('courts'); }); } /** * Reverse the migrations. * * @return void */ public function down() { Schema::dropIfExists('games'); } }
<?php use Illuminate\Support\Facades\Schema; use Illuminate\Database\Schema\Blueprint; use Illuminate\Database\Migrations\Migration; class CreateGamesTable extends Migration { /** * Run the migrations. * * @return void */ public function up() { Schema::create('games', function (Blueprint $table) { $table->increments('id'); $table->integer('score_team1'); $table->integer('score_team2'); $table->date('date'); $table->time('start_time'); $table->string('type', 45); $table->integer('teams1_id')->unsigned(); $table->integer('teams2_id')->unsigned(); $table->integer('pools_id')->unsigned(); $table->integer('courts_id')->unsigned(); $table->foreign('teams1_id')->references('id')->on('teams'); $table->foreign('teams2_id')->references('id')->on('teams'); $table->foreign('pools_id')->references('id')->on('pools'); $table->foreign('courts_id')->references('id')->on('courts'); }); } /** * Reverse the migrations. * * @return void */ public function down() { Schema::dropIfExists('games'); } }
Revert "Downgraded SSL version to TLS 1 until we have the requests library fixed" - TLS1.2 does not work on older pythons (like on MacOSX) but it works as intended on current linux versions so enable it if possible This reverts commit 666630f2a01bdeb7becc3cd3b324c076eaf1567d. Conflicts: pixelated/common/__init__.py
import ssl import logging from logging.handlers import SysLogHandler from threading import Timer logger = logging.getLogger('pixelated.startup') def init_logging(name, level=logging.INFO, config_file=None): global logger logger_name = 'pixelated.%s' % name logging.basicConfig(level=level) if config_file: logging.config.fileConfig(config_file) else: formatter = logging.Formatter('%(asctime)s %(name)s: %(levelname)s %(message)s', '%b %e %H:%M:%S') syslog = SysLogHandler(address='/dev/log', facility=SysLogHandler.LOG_DAEMON) syslog.setFormatter(formatter) logger.addHandler(syslog) logger.name = logger_name logger.info('Initialized logging') def latest_available_ssl_version(): try: return ssl.PROTOCOL_TLSv1_2 except AttributeError: return ssl.PROTOCOL_TLSv1 class Watchdog: def __init__(self, timeout, userHandler=None, args=[]): self.timeout = timeout self.handler = userHandler if userHandler is not None else self.defaultHandler self.timer = Timer(self.timeout, self.handler, args=args) self.timer.daemon = True self.timer.start() def reset(self): self.timer.cancel() self.timer = Timer(self.timeout, self.handler) def stop(self): self.timer.cancel() def defaultHandler(self): raise self
import ssl import logging from logging.handlers import SysLogHandler from threading import Timer logger = logging.getLogger('pixelated.startup') def init_logging(name, level=logging.INFO, config_file=None): global logger logger_name = 'pixelated.%s' % name logging.basicConfig(level=level) if config_file: logging.config.fileConfig(config_file) else: formatter = logging.Formatter('%(asctime)s %(name)s: %(levelname)s %(message)s', '%b %e %H:%M:%S') syslog = SysLogHandler(address='/dev/log', facility=SysLogHandler.LOG_DAEMON) syslog.setFormatter(formatter) logger.addHandler(syslog) logger.name = logger_name logger.info('Initialized logging') def latest_available_ssl_version(): return ssl.PROTOCOL_TLSv1 class Watchdog: def __init__(self, timeout, userHandler=None, args=[]): self.timeout = timeout self.handler = userHandler if userHandler is not None else self.defaultHandler self.timer = Timer(self.timeout, self.handler, args=args) self.timer.daemon = True self.timer.start() def reset(self): self.timer.cancel() self.timer = Timer(self.timeout, self.handler) def stop(self): self.timer.cancel() def defaultHandler(self): raise self
Refactor to follow style guides
import Ember from 'ember'; import TooltipEnabled from '../mixins/sl-tooltip-enabled'; /** @module sl-components/components/sl-drop-button */ export default Ember.Component.extend( TooltipEnabled, { /** * Class names for the div element * * @property {array} classNames * @type {Ember.Array} */ classNames: [ 'btn-group', 'dropdown', 'sl-drop-button' ], /** * Class attribute bindings for the button * * @property {array} classNameBindings * @type {Ember.Array} */ classNameBindings: [ 'themeClass' ], /** * Component actions hash * * @property {object} actions * @type {Ember.Object} */ actions: { /** * Used to trigger specific option-bound action * * @function click * @argument {string} action to trigger * @return {void} */ click: function( action ) { this.triggerAction({ action: action }); } }, /** * Class string for the button's icon * * @property {string} iconClass * @type {Ember.String} * @default "caret" */ iconClass: 'caret', /** * The string name of the style theme for the button * * @property {string} theme * @type {Ember.String} * @default "default" */ theme: 'default', /** * The class value for the drop-button based on the current "theme" * * @function themeClass * @observes 'theme' * @return {string} */ themeClass: function() { return 'dropdown-' + this.get( 'theme' ); }.property( 'theme' ) });
import Ember from 'ember'; import TooltipEnabled from '../mixins/sl-tooltip-enabled'; /** * @module components * @class sl-drop-button */ export default Ember.Component.extend( TooltipEnabled, { /** * Component actions hash * * @property {object} actions */ actions: { /** * Used to trigger specific option-bound action * * @method click */ click: function( action ) { this.triggerAction({ action: action }); } }, /** * Class attribute bindings for the button * * @property {array} classNameBindings */ classNameBindings: [ 'themeClass' ], /** * Class names for the div element * * @property {array} classNames */ classNames: [ 'btn-group', 'dropdown', 'sl-drop-button' ], /** * Class string for the button's icon * * @property {string} iconClass * @default "caret" */ iconClass: 'caret', /** * The string name of the style theme for the button * * @property {string} theme * @default "default" */ theme: 'default', /** * The class value for the drop-button based on the current "theme" * * @property {string} themeClass */ themeClass: function() { return 'dropdown-' + this.get( 'theme' ); }.property( 'theme' ) });
Document python script to extract program sizes from make output.
#!/usr/bin/python # encoding: utf-8 # In order to use this script from shell: # > make CONF=UNO clean-examples # > make CONF=UNO examples >tempsizes # > cat tempsizes | ./stats.py >sizes # > rm tempsizes # Then sizes file can be opened in LibreOffice Calc from __future__ import with_statement import argparse, re, sys def filter(args): bytes_extractor = re.compile(r"([0-9]+) bytes") with args.output: with args.input: for line in args.input: if line.find("avr-size") >= 0: # Find example name (everything after last /) example = line[line.rfind("/") + 1:-1] elif line.startswith("Program:"): # Find number of bytes of flash matcher = bytes_extractor.search(line) program = matcher.group(1) elif line.startswith("Data:"): # Find number of bytes of SRAM matcher = bytes_extractor.search(line) data = matcher.group(1) # Write new line to output args.output.write("%s\t%s\t%s\n" % (example, program, data)) if __name__ == "__main__": parser = argparse.ArgumentParser(description = 'XXXXXXXX') parser.add_argument('input', nargs='?', type=argparse.FileType('r'), default=sys.stdin) parser.add_argument('output', nargs='?', type=argparse.FileType('w'), default=sys.stdout) args = parser.parse_args() filter(args)
#!/usr/bin/python # encoding: utf-8 from __future__ import with_statement import argparse, re, sys def filter(args): bytes_extractor = re.compile(r"([0-9]+) bytes") with args.output: with args.input: for line in args.input: if line.find("avr-size") >= 0: # Find example name (everything after last /) example = line[line.rfind("/") + 1:-1] elif line.startswith("Program:"): # Find number of bytes of flash matcher = bytes_extractor.search(line) program = matcher.group(1) elif line.startswith("Data:"): # Find number of bytes of SRAM matcher = bytes_extractor.search(line) data = matcher.group(1) # Write new line to output args.output.write("%s\t%s\t%s\n" % (example, program, data)) if __name__ == "__main__": parser = argparse.ArgumentParser(description = 'XXXXXXXX') parser.add_argument('input', nargs='?', type=argparse.FileType('r'), default=sys.stdin) parser.add_argument('output', nargs='?', type=argparse.FileType('w'), default=sys.stdout) args = parser.parse_args() filter(args)
Change structure of the redirect
from django import forms from django.contrib.auth.forms import UserCreationForm from django.http import HttpResponseRedirect, HttpResponse from django.shortcuts import render_to_response from django.template import RequestContext from django.contrib.auth.decorators import login_required from django.conf import settings import os import logging log=logging.getLogger(__name__) def register(request): if request.method == 'POST': form = UserCreationForm(request.POST) if form.is_valid(): new_user = form.save() return HttpResponseRedirect("/essay_site/api/v1/?format=json") else: form = UserCreationForm() return render_to_response("registration/register.html", RequestContext(request,{ 'form': form, })) @login_required def protected_data(request, **params): path = params.get("path", None) if path is None: path = request.GET.get('path', None) response = HttpResponse() path = params.get("path", None) if path is None: path = request.GET.get('path', None) response = HttpResponse() filename_suffix = path.split('.')[-1] response['Content-Type'] = 'application/{0}'.format(filename_suffix) response['Content-Disposition'] = 'attachment; filename={0}'.format(path) log.debug("{0}{1}".format(settings.NGINX_PROTECTED_DATA_URL, path)) response['X-Accel-Redirect'] = "{0}{1}".format(settings.NGINX_PROTECTED_DATA_URL, path) return response
from django import forms from django.contrib.auth.forms import UserCreationForm from django.http import HttpResponseRedirect, HttpResponse from django.shortcuts import render_to_response from django.template import RequestContext from django.contrib.auth.decorators import login_required from django.conf import settings import os import logging log=logging.getLogger(__name__) def register(request): if request.method == 'POST': form = UserCreationForm(request.POST) if form.is_valid(): new_user = form.save() return HttpResponseRedirect("/essay_site/api/v1/?format=json") else: form = UserCreationForm() return render_to_response("registration/register.html", RequestContext(request,{ 'form': form, })) @login_required def protected_data(request, **params): path = params.get("path", None) if path is None: path = request.GET.get('path', None) response = HttpResponse() path = params.get("path", None) if path is None: path = request.GET.get('path', None) response = HttpResponse() filename_suffix = path.split('.')[-1] response['Content-Type'] = 'application/{0}'.format(filename_suffix) response['Content-Disposition'] = 'attachment; filename={0}'.format(path) response['X-Accel-Redirect'] = str(os.path.join(settings.NGINX_PROTECTED_DATA_URL, path)) return response
Remove backref on GUID; factor out _ensure_guid
from framework import StoredObject, fields class Guid(StoredObject): _id = fields.StringField() referent = fields.AbstractForeignField() _meta = { 'optimistic': True, } class GuidStoredObject(StoredObject): # Redirect to content using URL redirect by default redirect_mode = 'redirect' def _ensure_guid(self): """Create GUID record if current record doesn't already have one, then point GUID to self. """ # Create GUID with specified ID if ID provided if self._primary_key: # Done if GUID already exists guid = Guid.load(self._primary_key) if guid is not None: return # Create GUID guid = Guid( _id=self._primary_key, referent=self ) guid.save() # Else create GUID optimistically else: # Create GUID guid = Guid() guid.save() guid.referent = (guid._primary_key, self._name) guid.save() # Set primary key to GUID key self._primary_key = guid._primary_key def __init__(self, *args, **kwargs): """ Ensure GUID after initialization. """ super(GuidStoredObject, self).__init__(*args, **kwargs) self._ensure_guid() @property def annotations(self): """ Get meta-data annotations associated with object. """ return self.metadata__annotated
from framework import StoredObject, fields class Guid(StoredObject): _id = fields.StringField() referent = fields.AbstractForeignField(backref='guid') _meta = { 'optimistic': True } class GuidStoredObject(StoredObject): # Redirect to content using URL redirect by default redirect_mode = 'redirect' def __init__(self, *args, **kwargs): """Overridden constructor. When a GuidStoredObject is instantiated, create a new Guid if the object doesn't already have one, then attach the Guid to the StoredObject. Note: This requires saving the StoredObject once and the Guid twice to ensure correct back-references; this could be made more efficient if modular-odm could handle back-references of objects that have not been saved. """ # Call superclass constructor super(GuidStoredObject, self).__init__(*args, **kwargs) # Create GUID with specified ID if ID provided if self._primary_key: # Done if GUID already exists guid = Guid.load(self._primary_key) if guid is not None: return # Create GUID guid = Guid( _id=self._primary_key, referent=self ) guid.save() # Else create GUID optimistically else: # Create GUID guid = Guid() guid.save() # Set primary key to GUID key self._primary_key = guid._primary_key self.save() # Add self to GUID guid.referent = self guid.save() @property def annotations(self): """ Get meta-data annotations associated with object. """ return self.metadata__annotated
Fix screenshot generation for english rights
'use strict'; module.exports = exports = function(app) { const gt = app.utility.gettext; return { rights: [ { _id: '577225e3f3c65dd800257bdc', special: 'annualleave', name: gt.gettext("Paid annual leave"), type: '5740adf51cf1a569643cc508', quantity: 28, rules: [ { type: "entry_date", title: gt.gettext("The creation of the request can be made in the period of renewal, with 30 days of tolerance"), min: 30, max: 30 }, { type: "request_period", title: gt.gettext("The absence must be within the renewal period"), min: 0, max: 0 } ], renewal: { start: { month:0, day:1 } } }, { name: gt.gettext("Ordinary Maternity Leave"), type: '5740adf51cf1a569643cc50e', quantity: 130, // 26 weeks activeFor: { account: false } }, { name: gt.gettext("Additional Maternity Leave"), type: '5740adf51cf1a569643cc50e', quantity: 130, // 26 weeks activeFor: { account: false } } ] }; };
'use strict'; module.exports = exports = function(app) { const gt = app.utility.gettext; return { rights: [ { special: 'annualleave', name: gt.gettext("Paid annual leave"), type: '5740adf51cf1a569643cc508', quantity: 28, rules: [ { type: "entry_date", title: gt.gettext("The creation of the request can be made in the period of renewal, with 30 days of tolerance"), min: 30, max: 30 }, { type: "request_period", title: gt.gettext("The absence must be within the renewal period"), min: 0, max: 0 } ], renewal: { start: { month:0, day:1 } } }, { name: gt.gettext("Ordinary Maternity Leave"), type: '5740adf51cf1a569643cc50e', quantity: 130, // 26 weeks activeFor: { account: false } }, { name: gt.gettext("Additional Maternity Leave"), type: '5740adf51cf1a569643cc50e', quantity: 130, // 26 weeks activeFor: { account: false } } ] }; };
Remove unused import from gruntfile
module.exports = function(grunt) { require('load-grunt-tasks')(grunt); grunt.initConfig({ secret: grunt.file.readJSON('secret.json'), clean: { build: ['build/'] }, webpack: { production: require('./webpack/production') }, sftp: { options: { path: '<%= secret.path %>', host: '<%= secret.host %>', username: '<%= secret.username %>', agent: process.env.SSH_AUTH_SOCK, showProgress: true, srcBasePath: 'build/', createDirectories: true }, code: { files: { './': ['build/**', '!build/assets/**'] } }, assets: { files: { './': ['build/assets/**'] } } }, copy: { index: { src: 'index.html', dest: 'build/index.html' } }, zip: { itch: { cwd: 'build/', src: ['build/**/*'], dest: 'build/itch' + Date.now() + '.zip' } } }); grunt.registerTask('dist', ['clean:build', 'webpack:production', 'copy:index']); grunt.registerTask('deploy:code', ['dist', 'sftp:code']); grunt.registerTask('deploy', ['dist', 'sftp']); grunt.registerTask('itch', ['dist', 'zip']); };
var webpack = require('webpack'); module.exports = function(grunt) { require('load-grunt-tasks')(grunt); grunt.initConfig({ secret: grunt.file.readJSON('secret.json'), clean: { build: ['build/'] }, webpack: { production: require('./webpack/production') }, sftp: { options: { path: '<%= secret.path %>', host: '<%= secret.host %>', username: '<%= secret.username %>', agent: process.env.SSH_AUTH_SOCK, showProgress: true, srcBasePath: 'build/', createDirectories: true }, code: { files: { './': ['build/**', '!build/assets/**'] } }, assets: { files: { './': ['build/assets/**'] } } }, copy: { index: { src: 'index.html', dest: 'build/index.html' } }, zip: { itch: { cwd: 'build/', src: ['build/**/*'], dest: 'build/itch' + Date.now() + '.zip' } } }); grunt.registerTask('dist', ['clean:build', 'webpack:production', 'copy:index']); grunt.registerTask('deploy:code', ['dist', 'sftp:code']); grunt.registerTask('deploy', ['dist', 'sftp']); grunt.registerTask('itch', ['dist', 'zip']); };
Fix typo in constructor method name
<?php namespace FlexModel\FlexModelElasticsearchBundle\EventListener; use Doctrine\Common\EventSubscriber; use Doctrine\ORM\Event\LifecycleEventArgs; use FlexModel\FlexModelElasticsearchBundle\Elasticsearch\Indexer; use FlexModel\FlexModelElasticsearchBundle\Elasticsearch\Model\IndexableObjectInterface; /** * ObjectIndexSubscriber. * * @author Niels Nijens <[email protected]> */ class ObjectIndexerSubscriber implements EventSubscriber { /** * The Indexer instance. * * @var Indexer */ private $indexer; /** * Constructs a new ObjectIndexerSubscriber instance. * * @param Indexer $indexer */ public function __construct(Indexer $indexer) { $this->indexer = $indexer; } /** * {@inheritdoc} */ public function getSubscribedEvents() { return array( 'postUpdate', 'postPersist', ); } /** * Calls the Elasticsearch Indexer to index the object. * * @param LifecycleEventArgs $args */ public function postUpdate(LifecycleEventArgs $args) { $this->postPersist($args); } /** * Calls the Elasticsearch Indexer to index the object. * * @param LifecycleEventArgs $args */ public function postPersist(LifecycleEventArgs $args) { $object = $args->getObject(); $objectChangeset = $args->getObjectManager()->getUnitOfWork()->getEntityChangeSet($object); if ($object instanceof IndexableObjectInterface && count($objectChangeset) > 1) { $this->indexer->indexObject($object); } } }
<?php namespace FlexModel\FlexModelElasticsearchBundle\EventListener; use Doctrine\Common\EventSubscriber; use Doctrine\ORM\Event\LifecycleEventArgs; use FlexModel\FlexModelElasticsearchBundle\Elasticsearch\Indexer; use FlexModel\FlexModelElasticsearchBundle\Elasticsearch\Model\IndexableObjectInterface; /** * ObjectIndexSubscriber. * * @author Niels Nijens <[email protected]> */ class ObjectIndexerSubscriber implements EventSubscriber { /** * The Indexer instance. * * @var Indexer */ private $indexer; /** * Constructs a new ObjectIndexerSubscriber instance. * * @param Indexer $indexer */ public function __constuct(Indexer $indexer) { $this->indexer = $indexer; } /** * {@inheritdoc} */ public function getSubscribedEvents() { return array( 'postUpdate', 'postPersist', ); } /** * Calls the Elasticsearch Indexer to index the object. * * @param LifecycleEventArgs $args */ public function postUpdate(LifecycleEventArgs $args) { $this->postPersist($args); } /** * Calls the Elasticsearch Indexer to index the object. * * @param LifecycleEventArgs $args */ public function postPersist(LifecycleEventArgs $args) { $object = $args->getObject(); $objectChangeset = $args->getObjectManager()->getUnitOfWork()->getEntityChangeSet($object); if ($object instanceof IndexableObjectInterface && count($objectChangeset) > 1) { $this->indexer->indexObject($object); } } }
Fix tests: pypugly has two packages: pypugly and zerotk.
#!/bin/env python from setuptools import setup setup( name='pypugly', use_scm_version=True, author='Alexandre Andrade', author_email='[email protected]', url='https://github.com/zerotk/pypugly', description='Another HTML generator based on JADE.', long_description='''Another HTML generator based on JADE.''', classifiers=[ # How mature is this project? Common values are # 3 - Alpha # 4 - Beta # 5 - Production/Stable 'Development Status :: 3 - Alpha', # Indicate who your project is intended for 'Intended Audience :: Developers', 'Topic :: Software Development :: Libraries :: Python Modules', # Pick your license as you wish (should match "license" above) 'License :: OSI Approved :: MIT License', # Specify the Python versions you support here. In particular, ensure # that you indicate whether you support Python 2, Python 3 or both. 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', ], include_package_data=True, packages=['pypugly', 'zerotk'], keywords=['generator', 'html', 'jade'], install_requires=['pypeg2', 'zerotk.easyfs'], setup_requires=['setuptools_scm', 'pytest-runner'], tests_require=['coverage', 'pytest'], )
#!/bin/env python from setuptools import setup setup( name='pypugly', use_scm_version=True, author='Alexandre Andrade', author_email='[email protected]', url='https://github.com/zerotk/pypugly', description='Another HTML generator based on JADE.', long_description='''Another HTML generator based on JADE.''', classifiers=[ # How mature is this project? Common values are # 3 - Alpha # 4 - Beta # 5 - Production/Stable 'Development Status :: 3 - Alpha', # Indicate who your project is intended for 'Intended Audience :: Developers', 'Topic :: Software Development :: Libraries :: Python Modules', # Pick your license as you wish (should match "license" above) 'License :: OSI Approved :: MIT License', # Specify the Python versions you support here. In particular, ensure # that you indicate whether you support Python 2, Python 3 or both. 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', ], include_package_data=True, packages=['pypugly'], keywords=['generator', 'html', 'jade'], install_requires=['pypeg2', 'zerotk.easyfs'], setup_requires=['setuptools_scm', 'pytest-runner'], tests_require=['coverage', 'pytest'], )
Fix unnecessary encoding in poll function
"use strict"; var utils = require("../utils"); var log = require("npmlog"); module.exports = function(defaultFuncs, api, ctx) { return function createPoll(title, threadID, options, callback) { if(!callback) { if(utils.getType(options) == "Function") { callback = options; } else { callback = function() {}; } } if(!options) { options = {}; // Initial poll options are optional } var form = { 'target_id' : threadID, 'question_text' : title }; // Set fields for options (and whether they are selected initially by the posting user) var ind = 0; for(var opt in options) { if(options.hasOwnProperty(opt)) { form['option_text_array[' + ind + ']'] = opt; form['option_is_selected_array[' + ind + ']'] = (options[opt] ? '1' : '0'); ind++; } } defaultFuncs .post("https://www.messenger.com/messaging/group_polling/create_poll/?dpr=1", ctx.jar, form) .then(utils.parseAndCheckLogin(ctx.jar, defaultFuncs)) .then(function(resData) { if (resData.payload.status != 'success') { throw resData; } return callback(); }) .catch(function(err) { log.error("createPoll", err); return callback(err); }); }; };
"use strict"; var utils = require("../utils"); var log = require("npmlog"); module.exports = function(defaultFuncs, api, ctx) { return function createPoll(title, threadID, options, callback) { if(!callback) { if(utils.getType(options) == "Function") { callback = options; } else { callback = function() {}; } } if(!options) { options = {}; // Initial poll options are optional } var form = { 'target_id' : threadID, 'question_text' : title }; // Set fields for options (and whether they are selected initially by the posting user) var ind = 0; for(var opt in options) { if(options.hasOwnProperty(opt)) { form['option_text_array[' + ind + ']'] = encodeURIComponent(opt); form['option_is_selected_array[' + ind + ']'] = (options[opt] ? '1' : '0'); ind++; } } defaultFuncs .post("https://www.messenger.com/messaging/group_polling/create_poll/?dpr=1", ctx.jar, form) .then(utils.parseAndCheckLogin(ctx.jar, defaultFuncs)) .then(function(resData) { if (resData.payload.status != 'success') { throw resData; } return callback(); }) .catch(function(err) { log.error("createPoll", err); return callback(err); }); }; };
Update treeherder-client dependency from * to >=2.0.1 To ensure deprecated versions of TreeherderClient aren't being used if the virtualenv is reused. Notably 2.0.1 includes an API URL fix that will prevent 404s once non-canonical URLs are disabled in bug 1234233.
#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup except ImportError: from distutils.core import setup readme = open('README.rst').read() requirements = [ "Jinja2", "taskcluster>=0.0.24", "arrow", "requests>=2.4.3,<=2.7.0", "PyYAML", "chunkify", "treeherder-client>=2.0.1", "PGPy", "buildtools", "python-jose", ] test_requirements = [ "pytest", "pytest-cov", "flake8", "mock", ] setup( name='releasetasks', version='0.3.3', description="""Mozilla Release Promotion Tasks contains code to generate release-related Taskcluster graphs.""", long_description=readme, author="Rail Aliiev", author_email='[email protected]', url='https://github.com/rail/releasetasks', packages=[ 'releasetasks', ], package_dir={'releasetasks': 'releasetasks'}, include_package_data=True, install_requires=requirements, license="MPL", zip_safe=False, keywords='releasetasks', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Programming Language :: Python :: 2.7', ], test_suite='tests', tests_require=test_requirements, )
#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup except ImportError: from distutils.core import setup readme = open('README.rst').read() requirements = [ "Jinja2", "taskcluster>=0.0.24", "arrow", "requests>=2.4.3,<=2.7.0", "PyYAML", "chunkify", "treeherder-client", "PGPy", "buildtools", "python-jose", ] test_requirements = [ "pytest", "pytest-cov", "flake8", "mock", ] setup( name='releasetasks', version='0.3.3', description="""Mozilla Release Promotion Tasks contains code to generate release-related Taskcluster graphs.""", long_description=readme, author="Rail Aliiev", author_email='[email protected]', url='https://github.com/rail/releasetasks', packages=[ 'releasetasks', ], package_dir={'releasetasks': 'releasetasks'}, include_package_data=True, install_requires=requirements, license="MPL", zip_safe=False, keywords='releasetasks', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Programming Language :: Python :: 2.7', ], test_suite='tests', tests_require=test_requirements, )
Use redrawProp to refresh item card class
import m from 'mithril'; import _ from 'underscore'; import h from '../h'; const adminItem = { controller(args) { const displayDetailBox = h.toggleProp(false, true), alertClassToggle = h.toggleProp(false, true); return { displayDetailBox, alertClassToggle }; }, view(ctrl, args) { const item = args.item; if(args.listWrapper) { if(_.isFunction(args.listWrapper.redrawProp)) { args.listWrapper.redrawProp(); } if( _.isFunction(args.listWrapper.isSelected) && args.listWrapper.isSelected(item.id)) { ctrl.alertClassToggle(true); } } return m('.w-clearfix.card.u-radius.u-marginbottom-20.results-admin-items', { class: (ctrl.alertClassToggle() ? 'card-alert' : '' ) },[ m.component(args.listItem, { item, listWrapper: args.listWrapper, alertClassToggle: ctrl.alertClassToggle, key: args.key }), m('button.w-inline-block.arrow-admin.fa.fa-chevron-down.fontcolor-secondary', { onclick: ctrl.displayDetailBox.toggle }), ctrl.displayDetailBox() ? m.component(args.listDetail, { item, key: args.key }) : '' ]); } }; export default adminItem;
import m from 'mithril'; import _ from 'underscore'; import h from '../h'; const adminItem = { controller(args) { const displayDetailBox = h.toggleProp(false, true), alertClassToggle = h.toggleProp(false, true); if(args.listWrapper && _.isFunction(args.listWrapper.isSelected) && args.listWrapper.isSelected(args.item.id)) { alertClassToggle(true); } return { displayDetailBox, alertClassToggle }; }, view(ctrl, args) { const item = args.item; return m('.w-clearfix.card.u-radius.u-marginbottom-20.results-admin-items', { class: (ctrl.alertClassToggle() ? 'card-alert' : '' ) },[ m.component(args.listItem, { item, listWrapper: args.listWrapper, alertClassToggle: ctrl.alertClassToggle, key: args.key }), m('button.w-inline-block.arrow-admin.fa.fa-chevron-down.fontcolor-secondary', { onclick: ctrl.displayDetailBox.toggle }), ctrl.displayDetailBox() ? m.component(args.listDetail, { item, key: args.key }) : '' ]); } }; export default adminItem;
Make MpdFrontend a subclass of BaseFrontend
import logging from mopidy.frontends.base import BaseFrontend from mopidy.frontends.mpd.dispatcher import MpdDispatcher from mopidy.frontends.mpd.process import MpdProcess from mopidy.utils.process import unpickle_connection logger = logging.getLogger('mopidy.frontends.mpd') class MpdFrontend(BaseFrontend): """ The MPD frontend. **Settings:** - :attr:`mopidy.settings.MPD_SERVER_HOSTNAME` - :attr:`mopidy.settings.MPD_SERVER_PORT` """ def __init__(self, *args, **kwargs): super(MpdFrontend, self).__init__(*args, **kwargs) self.process = None self.dispatcher = MpdDispatcher(self.backend) def start(self): """Starts the MPD server.""" self.process = MpdProcess(self.core_queue) self.process.start() def destroy(self): """Destroys the MPD server.""" self.process.destroy() def process_message(self, message): """ Processes messages with the MPD frontend as destination. :param message: the message :type message: dict """ assert message['to'] == 'frontend', \ u'Message recipient must be "frontend".' if message['command'] == 'mpd_request': response = self.dispatcher.handle_request(message['request']) connection = unpickle_connection(message['reply_to']) connection.send(response) else: logger.warning(u'Cannot handle message: %s', message)
import logging from mopidy.frontends.mpd.dispatcher import MpdDispatcher from mopidy.frontends.mpd.process import MpdProcess from mopidy.utils.process import unpickle_connection logger = logging.getLogger('mopidy.frontends.mpd') class MpdFrontend(object): """ The MPD frontend. **Settings:** - :attr:`mopidy.settings.MPD_SERVER_HOSTNAME` - :attr:`mopidy.settings.MPD_SERVER_PORT` """ def __init__(self, core_queue, backend): self.core_queue = core_queue self.process = None self.dispatcher = MpdDispatcher(backend) def start(self): """Starts the MPD server.""" self.process = MpdProcess(self.core_queue) self.process.start() def process_message(self, message): """ Processes messages with the MPD frontend as destination. :param message: the message :type message: dict """ assert message['to'] == 'frontend', \ u'Message recipient must be "frontend".' if message['command'] == 'mpd_request': response = self.dispatcher.handle_request(message['request']) connection = unpickle_connection(message['reply_to']) connection.send(response) else: logger.warning(u'Cannot handle message: %s', message)
Fix bug with matrices with >= 10 columns
# -*- coding: utf-8 -*- """ Created on Sat Jul 26 12:04:49 2014 @author: pgao """ """ Read the CSV NOTE: Manually edited csv file twice to match FIELD NAME format """ import csv datareader = csv.DictReader(open("sf1_data_field_descriptors_2010.csv")) data = [] entry = None current_table = "" for line in datareader: new_table_number = line['TABLE NUMBER'] if new_table_number != current_table: # save the old one if entry != None: data.append(entry) entry = {} current_table = new_table_number entry['Matrix Number'] = line['TABLE NUMBER'] entry['File Name'] = line['SEGMENT'] next_line = datareader.next() entry['Universe'] = (next_line['FIELD NAME'][9:].lstrip()) entry['Name'] = line['FIELD NAME'][:line['FIELD NAME'].index('[')-1] entry['Cell Count'] = 0 # Increment the cell count iff there's actually data, rather than this being a descriptive row if len(line['FIELD CODE']) > 0: entry['Cell Count'] += 1 # Write the tsv file datawriter = csv.DictWriter(open("sf1_2010.tsv", "w"), ['File Name', 'Matrix Number', 'Cell Count', 'Name', 'Universe'], dialect = 'excel-tab' ) datawriter.writeheader() datawriter.writerows(data)
# -*- coding: utf-8 -*- """ Created on Sat Jul 26 12:04:49 2014 @author: pgao """ """ Read the CSV NOTE: Manually edited csv file twice to match FIELD NAME format """ import csv datareader = csv.DictReader(open("C:/Users/pgao/Documents/DATA_FIELD_DESCRIPTORS.csv")) data = [] entry = {} current_table = "" for line in datareader: new_table_number = line['TABLE NUMBER'] if new_table_number != current_table: entry = {} current_table = new_table_number entry['Matrix Number'] = line['TABLE NUMBER'] entry['File Name'] = line['SEGMENT'] next_line = datareader.next() entry['Universe'] = (next_line['FIELD NAME'][9:].lstrip()) try: entry['Name'] = line['FIELD NAME'][:line['FIELD NAME'].index('[')-1] entry['Cell Count'] = line['FIELD NAME'][line['FIELD NAME'].index('[')+1] except ValueError: print line data.append(entry) #Write the tsv file datawriter = csv.DictWriter(open("C:/Users/pgao/Documents/SF1.tsv", "w"), ['File Name', 'Matrix Number', 'Cell Count', 'Name', 'Universe'], delimiter = '\t', lineterminator='\n') datawriter.writeheader() datawriter.writerows(data)
Change record output to strict GeoJSON. Meaning features in a FeatureCollection.
# The Fiona data tool. if __name__ == '__main__': import argparse import fiona import json import pprint import sys parser = argparse.ArgumentParser( description="Serialize a file to GeoJSON or view its description") parser.add_argument('-i', '--info', action='store_true', help='View pretty printed description information only') parser.add_argument('-j', '--json', action='store_true', help='Output description as indented JSON') parser.add_argument('filename', help="data file name") args = parser.parse_args() with fiona.open(args.filename, 'r') as col: if args.info: if args.json: meta = col.meta.copy() meta.update(name=args.filename) print(json.dumps(meta, indent=2)) else: print("\nDescription of: %r" % col) print("\nCoordinate reference system (col.crs):") pprint.pprint(meta['crs']) print("\nFormat driver (col.driver):") pprint.pprint(meta['driver']) print("\nData description (col.schema):") pprint.pprint(meta['schema']) else: collection = {'type': 'FeatureCollection'} collection['features'] = list(col) print(json.dumps(collection, indent=2))
# The Fiona data tool. if __name__ == '__main__': import argparse import fiona import json import pprint import sys parser = argparse.ArgumentParser( description="Serialize a file to GeoJSON or view its description") parser.add_argument('-i', '--info', action='store_true', help='View pretty printed description information only') parser.add_argument('-j', '--json', action='store_true', help='Output description as indented JSON') parser.add_argument('filename', help="data file name") args = parser.parse_args() with fiona.open(args.filename, 'r') as col: if args.info: if args.json: meta = col.meta.copy() meta.update(name=args.filename) print(json.dumps(meta, indent=2)) else: print("\nDescription of: %r" % col) print("\nCoordinate reference system (col.crs):") pprint.pprint(meta['crs']) print("\nFormat driver (col.driver):") pprint.pprint(meta['driver']) print("\nData description (col.schema):") pprint.pprint(meta['schema']) else: print(json.dumps(list(col), indent=2))
Add call static helper method to service context
<?php /** * */ namespace Mvc5\Test\Service; use Mvc5\App; use Mvc5\Service\Context; use Mvc5\Test\Test\TestCase; /** * @runTestsInSeparateProcesses */ class ContextTest extends TestCase { /** * */ function test_bind() { $app = new App; Context::bind($app); $this->assertEquals($app, ServiceFacade::service()); } /** * */ function test_bind_exception() { $app = new App; Context::bind($app); $this->expectExceptionMessage('Service already exists'); Context::bind($app); } /** * */ function test_instantiate_with_service() { $app = new App; $context = new Context($app); $this->assertEquals($app, $context->service()); } /** * */ function test_call_static() { new Context(new App); $this->assertEquals(phpversion(), Context::{'@phpversion'}()); } /** * */ function test_invoke() { $app = new App; $context = new Context; $context($app); $this->assertEquals($app, $context->service()); } /** * */ function test_service_does_not_exist() { $this->expectExceptionMessage('Service does not exist'); Context::service(); } }
<?php /** * */ namespace Mvc5\Test\Service; use Mvc5\App; use Mvc5\Service\Context; use Mvc5\Test\Test\TestCase; /** * @runTestsInSeparateProcesses */ class ContextTest extends TestCase { /** * */ function test_bind() { $app = new App; Context::bind($app); $this->assertEquals($app, ServiceFacade::service()); } /** * */ function test_bind_exception() { $app = new App; Context::bind($app); $this->expectExceptionMessage('Service already exists'); Context::bind($app); } /** * */ function test_instantiate_with_service() { $app = new App; $context = new Context($app); $this->assertEquals($app, $context->service()); } /** * */ function test_invoke() { $app = new App; $context = new Context; $context($app); $this->assertEquals($app, $context->service()); } /** * */ function test_service_does_not_exist() { $this->expectExceptionMessage('Service does not exist'); Context::service(); } }
Use real fragment behavior for gh-pages manual.
// Application bootstrap. $(function() { // Convert any markdown sections to HTML. $('.md').each(function() { var html = $('<div></div>') .html((new Showdown.converter()).makeHtml($(this).html())) .attr('class', $(this).attr('class')) .attr('id', $(this).attr('id')); $(this).hide().after(html); $('h1, h2, h3, h4, h5, h6', html).each(function() { var heading = this; var cleaned = $(this).text().replace(/[\s\W]+/g, '-').toLowerCase(); $(this).attr('id', cleaned); this.className = this.nodeName; if (!$(this).is('h1')) { $('.navigation ul').append( $('<li></li>') .addClass(heading.nodeName) .append($('<a></a>') .text($(heading).text()) .attr('href', '#' + cleaned) ) ); } }); }); $('a.video').click(function() { $('.banner').addClass('video'); return false; }); $('#video .close').click(function() { $('.banner').removeClass('video'); return false; }); });
// Application bootstrap. $(function() { // Convert any markdown sections to HTML. $('.md').each(function() { var html = $('<div></div>') .html((new Showdown.converter()).makeHtml($(this).html())) .attr('class', $(this).attr('class')) .attr('id', $(this).attr('id')); $(this).hide().after(html); $('h1, h2, h3, h4, h5, h6', html).each(function() { var heading = this; var cleaned = $(this).text().replace(/[\s\W]+/g, '-').toLowerCase(); $(this).attr('id', cleaned); this.className = this.nodeName; if (!$(this).is('h1')) { $('.navigation ul').append( $('<li></li>') .addClass(heading.nodeName) .append($('<a></a>') .text($(heading).text()) .attr('href', '#') .click(function() { window.scroll(0, $(heading).offset().top - 60); console.log($(heading).offset().top); return false; }) ) ); } }); }); $('a.video').click(function() { $('.banner').addClass('video'); return false; }); $('#video .close').click(function() { $('.banner').removeClass('video'); return false; }); });
Revert "Added some debug message for FactionChat/issues/12" This reverts commit 3c86b96f6cdb8848773450f741353a754a8dd90a.
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package nz.co.lolnet.james137137.FactionChat; import org.bukkit.entity.Player; /** * * @author James */ class AuthMeAPI { static boolean enable; public AuthMeAPI(boolean aThis) { enable = aThis; } private static boolean isLoggedIn(Player player) { if (enable) { boolean result = false; try { result = fr.xephi.authme.api.NewAPI.getInstance().isAuthenticated(player) && fr.xephi.authme.api.NewAPI.getInstance().isRegistered(player.getName()); } catch (Exception e) { result = fr.xephi.authme.api.API.isAuthenticated(player) && fr.xephi.authme.api.API.isRegistered(player.getName()); } return result; } return true; } public static boolean isAllowToChat(Player player) { if (!enable) { return true; } return isLoggedIn(player); } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package nz.co.lolnet.james137137.FactionChat; import org.bukkit.entity.Player; /** * * @author James */ class AuthMeAPI { static boolean enable; public AuthMeAPI(boolean aThis) { enable = aThis; } private static boolean isLoggedIn(Player player) { if (enable) { boolean result; try { System.out.println("FactionChat debug: Authenticated: " + fr.xephi.authme.api.NewAPI.getInstance().isAuthenticated(player)); System.out.println("FactionChat debug: Registered: " + fr.xephi.authme.api.NewAPI.getInstance().isRegistered(player.getName())); result = fr.xephi.authme.api.NewAPI.getInstance().isAuthenticated(player) && fr.xephi.authme.api.NewAPI.getInstance().isRegistered(player.getName()); } catch (Exception e) { result = fr.xephi.authme.api.API.isAuthenticated(player) && fr.xephi.authme.api.API.isRegistered(player.getName()); } return result; } return true; } public static boolean isAllowToChat(Player player) { System.out.println("FactionChat debug: " + player.getDisplayName()); if (!enable) { System.out.println("FactionChat debug: AuthMeAPI Enabled = false"); return true; } boolean result = isLoggedIn(player); System.out.println("FactionChat debug: AuthMeAPI isLoggedIn = " + result); System.out.println("FactionChat debug End: " + player.getDisplayName()); return result; } }
Add test to ensure power spectrum slope is same w/ transposed array
# Licensed under an MIT open source license - see LICENSE ''' Test functions for PSpec ''' from unittest import TestCase import numpy as np import numpy.testing as npt from ..statistics import PowerSpectrum, PSpec_Distance from ._testing_data import \ dataset1, dataset2, computed_data, computed_distances class testPSpec(TestCase): def setUp(self): self.dataset1 = dataset1 self.dataset2 = dataset2 def test_PSpec_method(self): self.tester = \ PowerSpectrum(dataset1["moment0"], weights=dataset1["moment0_error"][0] ** 2.) self.tester.run() npt.assert_allclose(self.tester.ps1D, computed_data['pspec_val']) def test_PSpec_distance(self): self.tester_dist = \ PSpec_Distance(dataset1["moment0"], dataset2["moment0"], weights1=dataset1["moment0_error"][0] ** 2., weights2=dataset2["moment0_error"][0] ** 2.) self.tester_dist.distance_metric() npt.assert_almost_equal(self.tester_dist.distance, computed_distances['pspec_distance']) def test_pspec_nonequal_shape(): mom0_sliced = dataset1["moment0"][0][:16, :] mom0_hdr = dataset1["moment0"][1] test = PowerSpectrum((mom0_sliced, mom0_hdr)).run() test_T = PowerSpectrum((mom0_sliced.T, mom0_hdr)).run() npt.assert_almost_equal(test.slope, test_T.slope, decimal=7)
# Licensed under an MIT open source license - see LICENSE ''' Test functions for PSpec ''' from unittest import TestCase import numpy as np import numpy.testing as npt from ..statistics import PowerSpectrum, PSpec_Distance from ._testing_data import \ dataset1, dataset2, computed_data, computed_distances class testPSpec(TestCase): def setUp(self): self.dataset1 = dataset1 self.dataset2 = dataset2 def test_PSpec_method(self): self.tester = \ PowerSpectrum(dataset1["moment0"], weights=dataset1["moment0_error"][0] ** 2.) self.tester.run() npt.assert_allclose(self.tester.ps1D, computed_data['pspec_val']) def test_PSpec_distance(self): self.tester_dist = \ PSpec_Distance(dataset1["moment0"], dataset2["moment0"], weights1=dataset1["moment0_error"][0] ** 2., weights2=dataset2["moment0_error"][0] ** 2.) self.tester_dist.distance_metric() npt.assert_almost_equal(self.tester_dist.distance, computed_distances['pspec_distance'])
Disable start/limit params on recipients request
/*jslint browser: true, undef: true, white: false, laxbreak: true *//*global Ext,Slate*/ Ext.define('SlateAdmin.model.person.progress.NoteRecipient', { extend: 'Ext.data.Model', idProperty: 'ID', groupField: 'RelationshipGroup', fields: [ 'FullName', 'Email', 'Label', 'Status', { name: 'selected', type: 'boolean', convert: function (v, record) { var selected = !Ext.isEmpty(record.get('Status')); return selected; } }, { name: 'PersonID', type: 'integer' }, { name: 'RelationshipGroup', convert: function (v) { return v ? v : 'Other'; } }, { name: 'ID', type: 'integer' } ], proxy: { type: 'slaterecords', startParam: null, limitParam: null, api: { read: '/notes/progress/recipients', update: '/notes/save', create: '/notes/save', destory: '/notes/save' }, reader: { type: 'json', rootProperty: 'data' }, writer: { type: 'json', rootProperty: 'data', writeAllFields: false, allowSingle: false } } });
/*jslint browser: true, undef: true, white: false, laxbreak: true *//*global Ext,Slate*/ Ext.define('SlateAdmin.model.person.progress.NoteRecipient', { extend: 'Ext.data.Model', idProperty: 'ID', groupField: 'RelationshipGroup', fields: [ 'FullName', 'Email', 'Label', 'Status', { name: 'selected', type: 'boolean', convert: function (v, record) { var selected = !Ext.isEmpty(record.get('Status')); return selected; } }, { name: 'PersonID', type: 'integer' }, { name: 'RelationshipGroup', convert: function (v) { return v ? v : 'Other'; } }, { name: 'ID', type: 'integer' } ], proxy: { type: 'slaterecords', api: { read: '/notes/progress/recipients', update: '/notes/save', create: '/notes/save', destory: '/notes/save' }, reader: { type: 'json', rootProperty: 'data' }, writer: { type: 'json', rootProperty: 'data', writeAllFields: false, allowSingle: false } } });
Github: Add a max option in Github Jobs.
function ddg_spice_github_jobs(api_result) { if (api_result.length == 0) return; var jobs = api_result; var query = DDG.get_query(); var re = /(?:\s*(?:i\s+|we\s+)?(?:need|want|deserve|seek|get)\s+(?:an?\s+)?)?(?:(.+)\s+)(?:jobs?|work|employment)(?:\s+(?:in\s+)?(.+))?/; jobs['description'] = query.replace(re, "$1"); jobs['location'] = query.replace(re, "$2"); Spice.render({ data : api_result, header1 : query + " (GitHub Jobs)", source_url : 'https://jobs.github.com/positions?description=' + encodeURIComponent(jobs['description']) + "&location=" + encodeURIComponent(jobs['location']), source_name : 'GitHub', spice_name : 'github', template_frame : 'list', template_options: { items: api_result, template_item: "github_jobs", show: 3, max: 10, type: 'ul' }, force_big_header : true, force_no_fold : true }); }
function ddg_spice_github_jobs(api_result) { if (api_result.length == 0) return; var jobs = api_result; var query = DDG.get_query(); var re = /(?:\s*(?:i\s+|we\s+)?(?:need|want|deserve|seek|get)\s+(?:an?\s+)?)?(?:(.+)\s+)(?:jobs?|work|employment)(?:\s+(?:in\s+)?(.+))?/; jobs['description'] = query.replace(re, "$1"); jobs['location'] = query.replace(re, "$2"); Spice.render({ data : api_result, header1 : query + " (GitHub Jobs)", source_url : 'https://jobs.github.com/positions?description=' + encodeURIComponent(jobs['description']) + "&location=" + encodeURIComponent(jobs['location']), source_name : 'GitHub', spice_name : 'github', template_frame : 'list', template_options: { items: api_result, template_item: "github_jobs", show: 3, type: 'ul', }, force_big_header : true, force_no_fold : true }); }
Add local site-packages to PYTHONPATH. To pick up the local version of PyOpenSSL.
#!/usr/bin/env python import os from optparse import OptionParser from jinja2 import Template TEMPLATE = open(os.path.join(os.path.dirname(__file__), 'crontab.tpl')).read() def main(): parser = OptionParser() parser.add_option("-k", "--kitsune", help="Location of kitsune (required)") parser.add_option("-u", "--user", help=("Prefix cron with this user. " "Only define for cron.d style crontabs")) parser.add_option("-p", "--python", default="/usr/bin/python2.6", help="Python interpreter to use") (opts, args) = parser.parse_args() if not opts.kitsune: parser.error("-k must be defined") # To pick up the right PyOpenSSL: python_path = 'PYTHONPATH=/usr/local/lib64/python2.6/site-packages' ctx = {'django': 'cd %s; %s %s manage.py' % ( opts.kitsune, python_path, opts.python),} ctx['cron'] = '%s cron' % ctx['django'] if opts.user: for k, v in ctx.iteritems(): ctx[k] = '%s %s' % (opts.user, v) # Needs to stay below the opts.user injection. ctx['python'] = opts.python print Template(TEMPLATE).render(**ctx) if __name__ == "__main__": main()
#!/usr/bin/env python import os from optparse import OptionParser from jinja2 import Template TEMPLATE = open(os.path.join(os.path.dirname(__file__), 'crontab.tpl')).read() def main(): parser = OptionParser() parser.add_option("-k", "--kitsune", help="Location of kitsune (required)") parser.add_option("-u", "--user", help=("Prefix cron with this user. " "Only define for cron.d style crontabs")) parser.add_option("-p", "--python", default="/usr/bin/python2.6", help="Python interpreter to use") (opts, args) = parser.parse_args() if not opts.kitsune: parser.error("-k must be defined") ctx = {'django': 'cd %s; %s manage.py' % (opts.kitsune, opts.python),} ctx['cron'] = '%s cron' % ctx['django'] if opts.user: for k, v in ctx.iteritems(): ctx[k] = '%s %s' % (opts.user, v) # Needs to stay below the opts.user injection. ctx['python'] = opts.python print Template(TEMPLATE).render(**ctx) if __name__ == "__main__": main()
Remove monitoring only after database quarantine
from dbaas_cloudstack.provider import CloudStackProvider from pre_provisioned.pre_provisioned_provider import PreProvisionedProvider from integrations.monitoring.manager import MonitoringManager import logging LOG = logging.getLogger(__name__) class IaaSManager(): @classmethod def destroy_instance(cls, database, *args, **kwargs): plan = database.plan provider = plan.provider if provider == plan.PREPROVISIONED: LOG.info("Destroying pre provisioned database...") PreProvisionedProvider().destroy_instance(database, *args, **kwargs) elif provider == plan.CLOUDSTACK: LOG.info("Destroying cloud stack instance...") if database.is_in_quarantine: MonitoringManager.remove_monitoring(database.databaseinfra) CloudStackProvider().destroy_instance(database, *args, **kwargs) @classmethod def create_instance(cls, plan, environment, name): if plan.provider == plan.PREPROVISIONED: LOG.info("Creating pre provisioned instance...") return PreProvisionedProvider().create_instance(plan, environment) elif plan.provider == plan.CLOUDSTACK: LOG.info("Creating cloud stack instance...") databaseinfra = CloudStackProvider().create_instance(plan, environment, name) if databaseinfra is not None: MonitoringManager.create_monitoring(databaseinfra) return databaseinfra
from dbaas_cloudstack.provider import CloudStackProvider from pre_provisioned.pre_provisioned_provider import PreProvisionedProvider from integrations.monitoring.manager import MonitoringManager import logging LOG = logging.getLogger(__name__) class IaaSManager(): @classmethod def destroy_instance(cls, database, *args, **kwargs): plan = database.plan provider = plan.provider if provider == plan.PREPROVISIONED: LOG.info("Destroying pre provisioned database...") PreProvisionedProvider().destroy_instance(database, *args, **kwargs) elif provider == plan.CLOUDSTACK: LOG.info("Destroying cloud stack instance...") MonitoringManager.remove_monitoring(database.databaseinfra) CloudStackProvider().destroy_instance(database, *args, **kwargs) @classmethod def create_instance(cls, plan, environment, name): if plan.provider == plan.PREPROVISIONED: LOG.info("Creating pre provisioned instance...") return PreProvisionedProvider().create_instance(plan, environment) elif plan.provider == plan.CLOUDSTACK: LOG.info("Creating cloud stack instance...") databaseinfra = CloudStackProvider().create_instance(plan, environment, name) if databaseinfra is not None: MonitoringManager.create_monitoring(databaseinfra) return databaseinfra
Add right namespace for Acl
<?php namespace Tabster\Application; use Tabster\Application\Acl\Acl; use Tabster\Application\Auth\Auth; use Tabster\Application\Config\ConfigCollection; use Tabster\Application\Cookie\Cookie; use Tabster\Application\Database\Database; use Tabster\Application\Session\Session; use Tabster\Models\Users; use \AltoRouter as Router; class Application { private $auth; private $acl; private $config; private $cookie; private $database; private $session; private $router; private $user; public function __construct(ConfigCollection $config) { $this->config = $config; $this->database = new Database($this->config->database); $this->session = new Session('TabsterSession'); $this->cookie = new Cookie($this->config->cookie); $this->router = new Router($this->config->routes->getConfig()); $this->auth = new Auth($this->database, $this->session, $this->cookie); $this->user = $this->auth->getCurrentUser(); $this->acl = new Acl($this->database); $this->session->auth = [ 'id' => $this->user->id, 'banned' => $this->user->banned, 'suspended' => $this->user->suspended, 'active' => $this->user->active, 'role' => $this->user->role ]; } }
<?php namespace Tabster\Application; use Tabster\Application\Auth\Auth; use Tabster\Application\Config\ConfigCollection; use Tabster\Application\Cookie\Cookie; use Tabster\Application\Database\Database; use Tabster\Application\Session\Session; use Tabster\Models\Users; use \AltoRouter as Router; class Application { private $auth; private $acl; private $config; private $cookie; private $database; private $session; private $router; private $user; public function __construct(ConfigCollection $config) { $this->config = $config; $this->database = new Database($this->config->database); $this->session = new Session('TabsterSession'); $this->cookie = new Cookie($this->config->cookie); $this->router = new Router($this->config->routes->getConfig()); $this->auth = new Auth($this->database, $this->session, $this->cookie); $this->user = $this->auth->getCurrentUser(); $this->acl = new Acl($this->database); $this->session->auth = [ 'id' => $this->user->id, 'banned' => $this->user->banned, 'suspended' => $this->user->suspended, 'active' => $this->user->active, 'role' => $this->user->role ]; } }
Allow format function to be provided to Formatter constructor
import json from collections import OrderedDict from .command import Command from .settings import FormatterSettings class Formatter(): def __init__(self, name, command=None, args=None, formatter=None): self.__name = name self.__format = formatter self.__settings = FormatterSettings(name.lower()) if not formatter: command = command.split(' ') if command else [] options = self.__settings.options args = args.split(' ') if args else [] shell_command = Command(command + options + args) def external_format(input): return shell_command.run(input) self.__format = external_format @property def name(self): return self.__name @property def sources(self): return self.__settings.sources @property def format_on_save(self): return self.__settings.format_on_save @format_on_save.setter def format_on_save(self, value): self.__settings.format_on_save = value def format(self, input): return self.__format(input) class JsonFormatter(Formatter): def __init__(self): def format_json(input): try: data = json.loads(input, object_pairs_hook=OrderedDict) return json.dumps(data, indent=4), None except ValueError: return None, 'Invalid JSON' super().__init__(name='JSON', formatter=format_json)
import json from collections import OrderedDict from .command import Command from .settings import FormatterSettings class Formatter(): def __init__(self, name, command='', args=''): self.__name = name self.__command = command.split(' ') if command else [] self.__args = args.split(' ') if args else [] self.__settings = FormatterSettings(name.lower()) @property def name(self): return self.__name @property def sources(self): return self.__settings.sources @property def options(self): return self.__settings.options @property def format_on_save(self): return self.__settings.format_on_save @format_on_save.setter def format_on_save(self, value): self.__settings.format_on_save = value def format(self, input): command = self.__command options = self.options args = self.__args return Command(command + options + args).run(input) class JsonFormatter(Formatter): def __init__(self): super().__init__(name='JSON') def format(self, input): try: data = json.loads(input, object_pairs_hook=OrderedDict) return json.dumps(data, indent=4), None except ValueError: return None, 'Invalid JSON'
Implement eq(), normalize with get()
Rye.extend(function(exports){ var util = Rye.require('util') , _slice = Array.prototype.slice , _concat = Array.prototype.concat exports.get = function (index) { if (index == null) { return this.elements.slice() } return this.elements[index < 0 ? this.elements.length + index : index] } exports.eq = function (index) { if (index == null) { return new Rye() } return new Rye(this.get(index)) } var arrayMethods = [ 'forEach' , 'map' , 'reduce' , 'reduceRight' , 'sort' , 'indexOf' ] arrayMethods.forEach(function(method){ exports[method] = function(a,b,c,d){ return this.elements[method](a,b,c,d) } }) exports.concat = function () { var args = _slice.call(arguments).map(function(arr){ return arr instanceof Rye ? arr.elements : arr }) var res = util.unique(_concat.apply(this.elements, args)) return Rye(res) } exports.push = function (item) { if (item.nodeType === 1 || item.nodeType === 9){ this.elements.push(item) this.update() return this.length - 1 } else { return -1 } } exports.each = function(fn){ return this.forEach(function(element, i){ fn.call(element, i, element) }) } exports.update = function() { this.length = this.elements.length } })
Rye.extend(function(exports){ var util = Rye.require('util') , _slice = Array.prototype.slice , _concat = Array.prototype.concat exports.get = function(num){ if (num === undefined) { return this.elements.slice() } return this.elements[num < 0 ? this.elements.length + num : num] } var arrayMethods = [ 'forEach' , 'map' , 'reduce' , 'reduceRight' , 'sort' , 'indexOf' ] arrayMethods.forEach(function(method){ exports[method] = function(a,b,c,d){ return this.elements[method](a,b,c,d) } }) exports.concat = function () { var args = _slice.call(arguments).map(function(arr){ return arr instanceof Rye ? arr.elements : arr }) var res = util.unique(_concat.apply(this.elements, args)) return Rye(res) } exports.push = function (item) { if (item.nodeType === 1 || item.nodeType === 9){ this.elements.push(item) this.update() return this.length - 1 } else { return -1 } } exports.each = function(fn){ return this.forEach(function(element, i){ fn.call(element, i, element) }) } exports.update = function() { this.length = this.elements.length } })
Optimize Performance for long lines (use mb_strcut() instead of pred_split())
<?php /* * This file is part of the eluceo/iCal package. * * (c) Markus Poerschke <[email protected]> * * This source file is subject to the MIT license that is bundled * with this source code in the file LICENSE. */ namespace Eluceo\iCal\Util; class ComponentUtil { /** * Folds a single line. * * According to RFC 5545, all lines longer than 75 characters should be folded * * @see https://tools.ietf.org/html/rfc5545#section-5 * @see https://tools.ietf.org/html/rfc5545#section-3.1 * * @param $string * * @return array */ public static function fold($string) { $lines = []; while (strlen($string) > 0) { if (strlen($string) > 75) { $lines[] = mb_strcut($string, 0, 75, 'utf-8') . "\r\n"; $str = ' ' . mb_strcut($string, 75, strlen($string), 'utf-8'); } else { $lines[] = $string; $str = ''; break; } } return $lines; } }
<?php /* * This file is part of the eluceo/iCal package. * * (c) Markus Poerschke <[email protected]> * * This source file is subject to the MIT license that is bundled * with this source code in the file LICENSE. */ namespace Eluceo\iCal\Util; class ComponentUtil { /** * Folds a single line. * * According to RFC 5545, all lines longer than 75 characters should be folded * * @see https://tools.ietf.org/html/rfc5545#section-5 * @see https://tools.ietf.org/html/rfc5545#section-3.1 * * @param $string * * @return array */ public static function fold($string) { $lines = []; $array = preg_split('/(?<!^)(?!$)/u', $string); $line = ''; $lineNo = 0; foreach ($array as $char) { $charLen = strlen($char); $lineLen = strlen($line); if ($lineLen + $charLen > 75) { $line = ' ' . $char; ++$lineNo; } else { $line .= $char; } $lines[$lineNo] = $line; } return $lines; } }
Use gallery only for more than one image
import React from "react"; import ImageGallery from "react-image-gallery"; class ItemGallery extends React.Component { constructor(props) { super(props); this.defineImage = this.defineImage.bind(this); this.getWidth = this.getWidth.bind(this); this.renderItem = this.renderItem.bind(this); } defineImage(url) { if (url) { let width = this.getWidth(url); if (width < 500) { return "portrait"; } else { return "landscape"; } } else { return; } } getWidth(url) { let img = new Image(); img.src = url; return img.naturalWidth; } renderItem(item) { const imageClass = this.defineImage(item.original); return ( <div className="image-gallery-image"> <img className={imageClass} src={item.original} alt={item.originalAlt} srcSet={item.srcSet} sizes={item.sizes} /> </div> ); } render() { const images = this.props.items.map(photo => { const src = photo; return { original: src, thumbnail: src }; }); if (images.length > 1) { return ( <ImageGallery items={images} showFullscreenButton={false} renderItem={this.renderItem} slideInterval={2000} /> ); } else { return this.renderItem(images[0]); } } } export default ItemGallery;
import React from "react"; import ImageGallery from "react-image-gallery"; class ItemGallery extends React.Component { constructor(props) { super(props); this.defineImage = this.defineImage.bind(this); this.getWidth = this.getWidth.bind(this); this.renderItem = this.renderItem.bind(this); } defineImage(url) { if (url) { let width = this.getWidth(url); if (width < 500) { return "portrait"; } else { return "landscape"; } } else { return; } } getWidth(url) { let img = new Image(); img.src = url; return img.naturalWidth; } renderItem(item) { const imageClass = this.defineImage(item.original); return ( <div className="image-gallery-image"> <img className={imageClass} src={item.original} alt={item.originalAlt} srcSet={item.srcSet} sizes={item.sizes} /> </div> ); } render() { const images = this.props.items.map(photo => { const src = photo; return { original: src, thumbnail: src }; }); return ( <ImageGallery items={images} showFullscreenButton={false} renderItem={this.renderItem} slideInterval={2000} /> ); } } export default ItemGallery;
Use the more obvious linspace instead of arange
# -*- coding: utf-8 -*- # Copyright (c) 2015, Vispy Development Team. # Distributed under the (new) BSD License. See LICENSE.txt for more info. """ Circular Layout =============== This module contains several graph layouts which rely heavily on circles. """ import numpy as np from ..util import _straight_line_vertices def circular(adjacency_mat, directed=False): """Places all nodes on a single circle. Parameters ---------- adjacency_mat : matrix or sparse The graph adjacency matrix directed : bool Whether the graph is directed. If this is True, is will also generate the vertices for arrows, which can be passed to an ArrowVisual. Yields ------ (node_vertices, line_vertices, arrow_vertices) : tuple Yields the node and line vertices in a tuple. This layout only yields a single time, and has no builtin animation """ num_nodes = adjacency_mat.shape[0] t = np.linpsace(0, 2 * np.pi, num_nodes, endpt=False, dtype=np.float32) # Visual coordinate system is between 0 and 1, so generate a circle with # radius 0.5 and center it at the point (0.5, 0.5). node_coords = (0.5 * np.array([np.cos(t), np.sin(t)]) + 0.5).T line_vertices, arrows = _straight_line_vertices(adjacency_mat, node_coords, directed) yield node_coords, line_vertices, arrows
# -*- coding: utf-8 -*- # Copyright (c) 2015, Vispy Development Team. # Distributed under the (new) BSD License. See LICENSE.txt for more info. """ Circular Layout =============== This module contains several graph layouts which rely heavily on circles. """ import numpy as np from ..util import _straight_line_vertices def circular(adjacency_mat, directed=False): """Places all nodes on a single circle. Parameters ---------- adjacency_mat : matrix or sparse The graph adjacency matrix directed : bool Whether the graph is directed. If this is True, is will also generate the vertices for arrows, which can be passed to an ArrowVisual. Yields ------ (node_vertices, line_vertices, arrow_vertices) : tuple Yields the node and line vertices in a tuple. This layout only yields a single time, and has no builtin animation """ num_nodes = adjacency_mat.shape[0] t = np.arange(0, 2.0*np.pi, 2.0*np.pi/num_nodes, dtype=np.float32) # Visual coordinate system is between 0 and 1, so generate a circle with # radius 0.5 and center it at the point (0.5, 0.5). node_coords = (0.5 * np.array([np.cos(t), np.sin(t)]) + 0.5).T line_vertices, arrows = _straight_line_vertices(adjacency_mat, node_coords, directed) yield node_coords, line_vertices, arrows
Fix typeahead component ignoring search results.
<script type="text/javascript"> $(function () { let baseUrl = '{{ \URL::to('/manga') }}'; $('#searchbar').typeahead({ delay: 250, followLinkOnSelect: true, minLength: 3, selectOnBlur: false, theme: "bootstrap4", /* Required in order for results not to be ignored. */ matcher: function (data) { return true; }, itemLink: function (manga) { return baseUrl + '/' + manga.id; }, source: function (query, process) { return $.getJSON('{{ \URL::to('/search/autocomplete') }}', { query : query}, function (data) { return process(data); }); } }); $('#searchbar-small').typeahead({ delay: 250, followLinkOnSelect: true, minLength: 3, selectOnBlur: false, theme: "bootstrap4", matcher: function (data) { return true; }, itemLink: function (manga) { return baseUrl + '/' + manga.id; }, source: function (query, process) { return $.getJSON('{{ \URL::to('/search/autocomplete') }}', { query : query}, function (data) { return process(data); }); } }); }); </script>
<script type="text/javascript"> $(function () { let baseUrl = '{{ \URL::to('/manga') }}'; $('#searchbar').typeahead({ delay: 250, followLinkOnSelect: true, minLength: 3, selectOnBlur: false, theme: "bootstrap4", itemLink: function (manga) { return baseUrl + '/' + manga.id; }, source: function (query, process) { return $.getJSON('{{ \URL::to('/search/autocomplete') }}', { query : query}, function (data) { return process(data); }); } }); $('#searchbar-small').typeahead({ delay: 250, followLinkOnSelect: true, minLength: 3, selectOnBlur: false, theme: "bootstrap4", itemLink: function (manga) { return baseUrl + '/' + manga.id; }, source: function (query, process) { return $.getJSON('{{ \URL::to('/search/autocomplete') }}', { query : query}, function (data) { return process(data); }); } }); }); </script>
Allow relative paths for corenlp deps
#!/usr/bin/env python import os """ Performs some core NLP operations as a proof of concept for the library. """ from stanford_corenlp_pywrapper import CoreNLP class StanfordCoreNLP: proc = None """ When the JSON segments return from the CoreNLP library, they separate the data acquired from each word into their own element. For readability's sake, it would be nice to pair all of the information for a given word with that word, making a list of words with their part of speech tags """ def jsonCleanup(self, data, analysisTypes): for corpus in data: res = StanfordCoreNLP.proc.parse_doc(corpus.contents) print(str(res)); for sentence in res["sentences"]: words = [] for index, token in enumerate(sentence["tokens"]): word = {} word["token"] = sentence["tokens"][index] for atype in analysisTypes: word[atype] = sentence[atype][index] words.append(word) return words def __init__(self, analysisType): self.analysisType = analysisType if StanfordCoreNLP.proc == None: StanfordCoreNLP.proc = CoreNLP(configdict={'annotators':'tokenize, ssplit, pos, lemma, ner'}, corenlp_jars=[os.path.join(os.path.dirname(__file__), '../../lib/*')]) def run(self, data): return self.jsonCleanup(data, self.analysisType)
#!/usr/bin/env python import os """ Performs some core NLP operations as a proof of concept for the library. """ from stanford_corenlp_pywrapper import CoreNLP class StanfordCoreNLP: proc = None """ When the JSON segments return from the CoreNLP library, they separate the data acquired from each word into their own element. For readability's sake, it would be nice to pair all of the information for a given word with that word, making a list of words with their part of speech tags """ def jsonCleanup(self, data, analysisTypes): for corpus in data: res = StanfordCoreNLP.proc.parse_doc(corpus.contents) print(str(res)); for sentence in res["sentences"]: words = [] for index, token in enumerate(sentence["tokens"]): word = {} word["token"] = sentence["tokens"][index] for atype in analysisTypes: word[atype] = sentence[atype][index] words.append(word) return words def __init__(self, analysisType): self.analysisType = analysisType coreNLPPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLP.jar') coreNLPModelsPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLPModels.jar') if StanfordCoreNLP.proc == None: StanfordCoreNLP.proc = CoreNLP(configdict={'annotators':'tokenize, ssplit, pos, lemma, ner'}, corenlp_jars=[coreNLPPath, coreNLPModelsPath]) def run(self, data): return self.jsonCleanup(data, self.analysisType)
Fix HTML escaping in renderer crashing on non-string values
var DOM = (function(){ var entityMap = { "&": "&amp;", "<": "&lt;", ">": "&gt;", '"': '&quot;', "'": '&#39;', "/": '&#x2F;' }; var entityRegex = /[&<>"'\/]/g; return { /* * Returns a child element by its ID. Parent defaults to the entire document. */ id: function(id, parent){ return (parent || document).getElementById(id); }, /* * Returns an array of all child elements containing the specified class. Parent defaults to the entire document. */ cls: function(cls, parent){ return Array.prototype.slice.call((parent || document).getElementsByClassName(cls)); }, /* * Returns an array of all child elements that have the specified tag. Parent defaults to the entire document. */ tag: function(tag, parent){ return Array.prototype.slice.call((parent || document).getElementsByTagName(tag)); }, /* * Creates an element, adds it to the DOM, and returns it. */ createElement: function(tag, parent){ var ele = document.createElement(tag); parent.appendChild(ele); return ele; }, /* * Removes an element from the DOM. */ removeElement: function(ele){ ele.parentNode.removeChild(ele); }, /* * Converts characters to their HTML entity form. */ escapeHTML: function(html){ return String(html).replace(entityRegex, s => entityMap[s]); } }; })();
var DOM = (function(){ var entityMap = { "&": "&amp;", "<": "&lt;", ">": "&gt;", '"': '&quot;', "'": '&#39;', "/": '&#x2F;' }; var entityRegex = /[&<>"'\/]/g; return { /* * Returns a child element by its ID. Parent defaults to the entire document. */ id: function(id, parent){ return (parent || document).getElementById(id); }, /* * Returns an array of all child elements containing the specified class. Parent defaults to the entire document. */ cls: function(cls, parent){ return Array.prototype.slice.call((parent || document).getElementsByClassName(cls)); }, /* * Returns an array of all child elements that have the specified tag. Parent defaults to the entire document. */ tag: function(tag, parent){ return Array.prototype.slice.call((parent || document).getElementsByTagName(tag)); }, /* * Creates an element, adds it to the DOM, and returns it. */ createElement: function(tag, parent){ var ele = document.createElement(tag); parent.appendChild(ele); return ele; }, /* * Removes an element from the DOM. */ removeElement: function(ele){ ele.parentNode.removeChild(ele); }, /* * Converts characters to their HTML entity form. */ escapeHTML: function(html){ return html.replace(entityRegex, s => entityMap[s]); } }; })();
Add debug mode to hide logs.
var Generator = require('generate-js'), Joi = require('joi'), async = require('async'); var EndPoint = Generator.generate(function EndPoint(options) { var _ = this; _.defineProperties(options); _.debug = false; }); EndPoint.Joi = Joi; EndPoint.definePrototype({ incomingSchema: Joi.object(), outgoingSchema: Joi.object(), run: function run(data, done) { var _ = this; function validateIncoming(next) { _.debug && console.log("validateIncoming", data); Joi.validate(data, _.incomingSchema, _.validateOptions, next); } function runIncoming(data, next) { _.debug && console.log("runIncoming", data); _.incoming(data, next); } function validateOutgoing(data, next) { console.log("validateOutgoing", data); Joi.validate(data, _.outgoingSchema, _.validateOptions, next); } function runOutgoing(data, next) { _.debug && console.log("runOutgoing", data); _.outgoing(data, next); } async.waterfall([ validateIncoming, runIncoming, validateOutgoing, runOutgoing ], done); }, incoming: function incoming(data, done) { var _ = this; done(null, data); }, outgoing: function outgoing(data, done) { var _ = this; done(null, data); } }); module.exports = EndPoint;
var Generator = require('generate-js'), Joi = require('joi'), async = require('async'); var EndPoint = Generator.generate(function EndPoint(options) { var _ = this; _.defineProperties(options); }); EndPoint.Joi = Joi; EndPoint.definePrototype({ incomingSchema: Joi.object(), outgoingSchema: Joi.object(), run: function run(data, done) { var _ = this; function validateIncoming(next) { console.log("validateIncoming", data); Joi.validate(data, _.incomingSchema, _.validateOptions, next); } function runIncoming(data, next) { console.log("runIncoming", data); _.incoming(data, next); } function validateOutgoing(data, next) { console.log("validateOutgoing", data); Joi.validate(data, _.outgoingSchema, _.validateOptions, next); } function runOutgoing(data, next) { console.log("runOutgoing", data); _.outgoing(data, next); } async.waterfall([ validateIncoming, runIncoming, validateOutgoing, runOutgoing ], done); }, incoming: function incoming(data, done) { var _ = this; done(null, data); }, outgoing: function outgoing(data, done) { var _ = this; done(null, data); } }); module.exports = EndPoint;
Enable access to expense through permalink
// Filename: router.js define([ 'Backbone', 'models/expense', 'views/home' ], function(Backbone, Expense, homeview){ var AppRouter = Backbone.Router.extend({ routes: { // Define some URL routes "expense/new":"new_expense", "expense/:id":"edit_expense", // Default "*other" : "home" }, home: function(){ homeview.refresh(); }, edit_expense: function(id){ require(['views/expense-details'],function(ExpenseDetailsView){ var expense = homeview.model.get(id); if(!expense){ expense = new Expense({id:id}); expense.fetch(); } new ExpenseDetailsView({model:expense, slot: $('#content')[0]}).render(); }); }, new_expense: function(){ require(['views/expense-details'],function(ExpenseDetailsView){ new ExpenseDetailsView({model:new Expense(), slot: $('#content')[0]}).render(); }); } }); var initialize = function(){ var app_router = new AppRouter; Backbone.history.start(); homeview.slot=$("#content"); }; return { initialize: initialize }; });
// Filename: router.js define([ 'Backbone', 'models/expense', 'views/home' ], function(Backbone, Expense, homeview){ var AppRouter = Backbone.Router.extend({ routes: { // Define some URL routes "expense/new":"new_expense", "expense/:id":"edit_expense", // Default "*other" : "home" }, home: function(){ homeview.refresh(); }, edit_expense: function(id){ require(['views/expense-details'],function(ExpenseDetailsView){ var expense = homeview.model.get(id); new ExpenseDetailsView({model:expense, slot: $('#content')[0]}).render(); }); }, new_expense: function(){ require(['views/expense-details'],function(ExpenseDetailsView){ new ExpenseDetailsView({model:new Expense(), slot: $('#content')[0]}).render(); }); } }); var initialize = function(){ var app_router = new AppRouter; Backbone.history.start(); homeview.slot=$("#content"); }; return { initialize: initialize }; });
Update indentation to 4 spaces
#!/usr/bin/env python import os import re import time import sys from threading import Thread class testit(Thread): def __init__ (self,ip): Thread.__init__(self) self.ip = ip self.status = -1 self.responsetime = -1 def run(self): pingaling = os.popen("ping -q -c2 "+self.ip,"r") while 1: line = pingaling.readline() if not line: break igot = re.findall(testit.lifeline,line) if igot: self.status = int(igot[0]) line = pingaling.readline() restime = re.search(testit.response, line) if restime: self.responsetime = restime.group(1) testit.lifeline = re.compile(r"(\d) received") testit.response = re.compile(r'((\d+(\.\d*)?|\.\d+)([eE][-+]?\d+)?) ms') report = ("No response","Partial Response","Alive") print time.ctime() pinglist = [] for host in range(1,10): ip = "192.168.11."+str(host) current = testit(ip) pinglist.append(current) current.start() for pingle in pinglist: pingle.join() print "Status from ",pingle.ip,"is",report[pingle.status],"time:",pingle.responsetime print time.ctime()
#!/usr/bin/env python import os import re import time import sys from threading import Thread class testit(Thread): def __init__ (self,ip): Thread.__init__(self) self.ip = ip self.status = -1 self.responsetime = -1 def run(self): pingaling = os.popen("ping -q -c2 "+self.ip,"r") while 1: line = pingaling.readline() if not line: break igot = re.findall(testit.lifeline,line) if igot: self.status = int(igot[0]) line = pingaling.readline() restime = re.search(testit.response, line) if restime: self.responsetime = restime.group(1) testit.lifeline = re.compile(r"(\d) received") testit.response = re.compile(r'((\d+(\.\d*)?|\.\d+)([eE][-+]?\d+)?) ms') report = ("No response","Partial Response","Alive") print time.ctime() pinglist = [] for host in range(1,10): ip = "192.168.11."+str(host) current = testit(ip) pinglist.append(current) current.start() for pingle in pinglist: pingle.join() print "Status from ",pingle.ip,"is",report[pingle.status],"time:",pingle.responsetime print time.ctime()
Use default parameters for default suffixes
import isPromise from './isPromise'; const defaultTypes = ['PENDING', 'FULFILLED', 'REJECTED']; export default function promiseMiddleware(config={}) { const promiseTypeSuffixes = config.promiseTypeSuffixes || defaultTypes; return (_ref) => { const dispatch = _ref.dispatch; return next => action => { if (!isPromise(action.payload)) { return next(action); } const { type, payload, meta } = action; const { promise, data } = payload; const [ PENDING, FULFILLED, REJECTED ] = (meta || {}).promiseTypeSuffixes || promiseTypeSuffixes; /** * Dispatch the first async handler. This tells the * reducer that an async action has been dispatched. */ next({ type: `${type}_${PENDING}`, ...data && { payload: data }, ...meta && { meta } }); /** * Return either the fulfilled action object or the rejected * action object. */ return promise.then( (resolved={}) => dispatch({ type: `${type}_${FULFILLED}`, ...resolved.meta || resolved.payload ? resolved : { ...resolved && { payload: resolved }, ...meta && { meta } } }), error => dispatch({ type: `${type}_${REJECTED}`, payload: error, error: true, ...meta && { meta } }) ); }; }; }
import isPromise from './isPromise'; const defaultTypes = ['PENDING', 'FULFILLED', 'REJECTED']; export default function promiseMiddleware(config={}) { const promiseTypeSuffixes = config.promiseTypeSuffixes || defaultTypes; return (_ref) => { const dispatch = _ref.dispatch; return next => action => { if (!isPromise(action.payload)) { return next(action); } const { type, payload, meta } = action; const { promise, data } = payload; const [ PENDING, FULFILLED, REJECTED ] = (meta || {}).promiseTypeSuffixes || promiseTypeSuffixes; /** * Dispatch the first async handler. This tells the * reducer that an async action has been dispatched. */ next({ type: `${type}_${PENDING}`, ...data && { payload: data }, ...meta && { meta } }); /** * Return either the fulfilled action object or the rejected * action object. */ return promise.then( (resolved={}) => dispatch({ type: `${type}_${FULFILLED}`, ...resolved.meta || resolved.payload ? resolved : { ...resolved && { payload: resolved }, ...meta && { meta } } }), error => dispatch({ type: `${type}_${REJECTED}`, payload: error, error: true, ...meta && { meta } }) ); }; }; }
[Process] Add default xampp path to the list of possible paths to check
<?php /* * This file is part of the Symfony package. * * (c) Fabien Potencier <[email protected]> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace Symfony\Component\Process; /** * An executable finder specifically designed for the PHP executable. * * @author Fabien Potencier <[email protected]> * @author Johannes M. Schmitt <[email protected]> */ class PhpExecutableFinder { private $executableFinder; public function __construct() { $this->executableFinder = new ExecutableFinder(); } /** * Finds The PHP executable. * * @return string|false The PHP executable path or false if it cannot be found */ public function find() { // PHP_BINARY return the current sapi executable if (defined('PHP_BINARY') && PHP_BINARY && ('cli' === PHP_SAPI)) { return PHP_BINARY; } if ($php = getenv('PHP_PATH')) { if (!is_executable($php)) { return false; } return $php; } if ($php = getenv('PHP_PEAR_PHP_BIN')) { if (is_executable($php)) { return $php; } } $dirs = array(PHP_BINDIR); if (defined('PHP_WINDOWS_VERSION_BUILD')) { $dirs[] = 'C:\xampp\php\\'; } return $this->executableFinder->find('php', false, $dirs); } }
<?php /* * This file is part of the Symfony package. * * (c) Fabien Potencier <[email protected]> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace Symfony\Component\Process; /** * An executable finder specifically designed for the PHP executable. * * @author Fabien Potencier <[email protected]> * @author Johannes M. Schmitt <[email protected]> */ class PhpExecutableFinder { private $executableFinder; public function __construct() { $this->executableFinder = new ExecutableFinder(); } /** * Finds The PHP executable. * * @return string|false The PHP executable path or false if it cannot be found */ public function find() { // PHP_BINARY return the current sapi executable if (defined('PHP_BINARY') && PHP_BINARY && ('cli' === PHP_SAPI)) { return PHP_BINARY; } if ($php = getenv('PHP_PATH')) { if (!is_executable($php)) { return false; } return $php; } if ($php = getenv('PHP_PEAR_PHP_BIN')) { if (is_executable($php)) { return $php; } } $dirs = array(PHP_BINDIR); return $this->executableFinder->find('php', false, $dirs); } }
Move reader() generator out of file hasher
"""File information""" import os import pwd import hashlib from .config import CONFIG def reader(fo): """Generator which feeds bytes to the md5 hasher""" while True: b = fo.read(128) if len(b) > 0: yield b else: raise StopIteration() def get_file_hash(path): """Get md5 hash of a file""" hasher = hashlib.new('md5') if os.path.isdir(path): return else: try: with open(path, mode='br') as infile: for b in reader(infile): hasher.update(b) digest = hasher.hexdigest() return digest except PermissionError: return '' def FileInfo(fd, link=False, important=False): """Hold information about a file""" stats = os.stat(fd, follow_symlinks=link) return { 'path': os.path.abspath(fd), 'mode': stats.st_mode, 'uid': stats.st_uid, 'username': pwd.getpwuid(stats.st_uid).pw_name, 'size': stats.st_size, 'lastmod': int(stats.st_ctime), 'lastcheck': 0, 'isfile': os.path.isfile(fd), 'isdir': not os.path.isfile(fd), 'important': os.path.splitext(fd)[1] in CONFIG.get('fileinfo', 'important'), 'md5sum': get_file_hash(fd), 'problems': set() }
"""File information""" import os import time import pwd import stat import hashlib from .config import CONFIG def get_file_hash(path): """Get md5 hash of a file""" def reader(fo): """Generator which feeds bytes to the md5 hasher""" while True: b = fo.read(128) if len(b) > 0: yield b else: raise StopIteration() hasher = hashlib.new('md5') if os.path.isdir(path): return else: try: with open(path, mode='br') as infile: for b in reader(infile): hasher.update(b) digest = hasher.hexdigest() return digest except PermissionError: return '' def FileInfo(fd, link=False, important=False): """Hold information about a file""" stats = os.stat(fd, follow_symlinks=link) return { 'path': os.path.abspath(fd), 'mode': stats.st_mode, 'uid': stats.st_uid, 'username': pwd.getpwuid(stats.st_uid).pw_name, 'size': stats.st_size, 'lastmod': int(stats.st_ctime), 'lastcheck': 0, 'isfile': os.path.isfile(fd), 'isdir': not os.path.isfile(fd), 'important': os.path.splitext(fd)[1] in CONFIG.get('fileinfo', 'important'), 'md5sum': get_file_hash(fd), 'problems': set() }
Fix check for missing behavior - The method exists on a behavior, and is proxied, so therefore `method_exists` is a bad check. - You can't simply check for `hasBehavior('Search')` as a user can alias a behavior to another one. - Since we are calling `filterParams` and that is the only dependency, simply checking if the method is in the `BehaviorRegistry::$methodMap` is enough.
<?php namespace Crud\Listener; use Cake\Core\Plugin; use Cake\Event\Event; use RuntimeException; class SearchListener extends BaseListener { /** * Settings * * @var array */ protected $_defaultConfig = [ 'enabled' => [ 'Crud.beforeLookup', 'Crud.beforePaginate' ] ]; /** * Returns a list of all events that will fire in the controller during its lifecycle. * You can override this function to add your own listener callbacks * * @return array */ public function implementedEvents() { return [ 'Crud.beforeLookup' => ['callable' => 'injectSearch'], 'Crud.beforePaginate' => ['callable' => 'injectSearch'] ]; } /** * Inject search conditions to the qeury object. * * @param \Cake\Event\Event $event Event * @return void */ public function injectSearch(Event $event) { if (!Plugin::loaded('Search')) { throw new RuntimeException( 'You need to load the Search plugin in order to use the SearchListener.' ); } if (!in_array($event->name, $this->config('enabled'))) { return; } $table = $this->_table(); if (!$table->behaviors()->hasMethod('filterParams')) { throw new RuntimeException(sprintf( 'Missing Search.Search behavior on %s', get_class($table) )); } $filterParams = $table->filterParams($this->_request()->query); $event->subject->query->find('search', $filterParams); } }
<?php namespace Crud\Listener; use Cake\Core\Plugin; use Cake\Event\Event; use RuntimeException; class SearchListener extends BaseListener { /** * Settings * * @var array */ protected $_defaultConfig = [ 'enabled' => [ 'Crud.beforeLookup', 'Crud.beforePaginate' ] ]; /** * Returns a list of all events that will fire in the controller during its lifecycle. * You can override this function to add your own listener callbacks * * @return array */ public function implementedEvents() { return [ 'Crud.beforeLookup' => ['callable' => 'injectSearch'], 'Crud.beforePaginate' => ['callable' => 'injectSearch'] ]; } /** * Inject search conditions to the qeury object. * * @param \Cake\Event\Event $event Event * @return void */ public function injectSearch(Event $event) { if (!Plugin::loaded('Search')) { throw new RuntimeException( 'You need to load the Search plugin in order to use the SearchListener.' ); } if (!in_array($event->name, $this->config('enabled'))) { return; } $table = $this->_table(); if (!method_exists($table, 'filterParams')) { throw new RuntimeException(sprintf( 'Missing Search.Search behavior on %s', get_class($table) )); } $filterParams = $table->filterParams($this->_request()->query); $event->subject->query->find('search', $filterParams); } }
Fix Python3 support: remove iteritems
# vim:ts=4 sw=4 expandtab softtabstop=4 class JSONValue(object): def __init__(self, val=None, ref='#', undef=False): assert not isinstance(val, JSONValue) self.val = val self.ref = ref self.undef = undef def is_undef(self): return self.undef def _subval(self, key, **kwargs): return JSONValue(ref=self.ref+'/'+str(key), **kwargs) def __getitem__(self, key): return self._subval(key, val=self.val[key]) def get(self, key, *args): r = self.val.get(key, *args) if r is None: return self._subval(key, undef=True) else: return self._subval(key, val=r) def __repr__(self): if self.is_undef(): return 'JSONValue(undef=True)' else: return 'JSONValue(%r,%r)' % (self.val, self.ref) def items(self): for k, v in self.val.items(): yield (k, self._subval(k, val=v)) def __iter__(self): assert isinstance(self.val, list) for i, v in enumerate(self.val): yield self._subval(i, val=v)
# vim:ts=4 sw=4 expandtab softtabstop=4 class JSONValue(object): def __init__(self, val=None, ref='#', undef=False): assert not isinstance(val, JSONValue) self.val = val self.ref = ref self.undef = undef def is_undef(self): return self.undef def _subval(self, key, **kwargs): return JSONValue(ref=self.ref+'/'+str(key), **kwargs) def __getitem__(self, key): return self._subval(key, val=self.val[key]) def get(self, key, *args): r = self.val.get(key, *args) if r is None: return self._subval(key, undef=True) else: return self._subval(key, val=r) def __repr__(self): if self.is_undef(): return 'JSONValue(undef=True)' else: return 'JSONValue(%r,%r)' % (self.val, self.ref) def iteritems(self): for k, v in self.val.iteritems(): yield (k, self._subval(k, val=v)) def items(self): return list(self.iteritems()) def __iter__(self): assert isinstance(self.val, list) for i, v in enumerate(self.val): yield self._subval(i, val=v)
Add commissioners to league admin
<?php namespace SofaChamps\Bundle\BowlPickemBundle\Admin; use Sonata\AdminBundle\Admin\Admin; use Sonata\AdminBundle\Datagrid\ListMapper; use Sonata\AdminBundle\Datagrid\DatagridMapper; use Sonata\AdminBundle\Form\FormMapper; class LeagueAdmin extends Admin { protected function configureFormFields(FormMapper $formMapper) { $formMapper ->with('General') ->add('id') ->add('season') ->add('name') ->add('motto') ->add('password', null, array('required' => false)) ->add('locked', null, array('required' => false)) ->end() ->with('Members') ->add('users') ->add('commissioners') ->end() ; } protected function configureDatagridFilters(DatagridMapper $datagridMapper) { $datagridMapper ->add('id') ->add('season') ->add('name') ->add('motto') ->add('password') ->add('locked') ; } protected function configureListFields(ListMapper $listMapper) { $listMapper ->addIdentifier('id') ->add('season') ->add('name') ; } }
<?php namespace SofaChamps\Bundle\BowlPickemBundle\Admin; use Sonata\AdminBundle\Admin\Admin; use Sonata\AdminBundle\Datagrid\ListMapper; use Sonata\AdminBundle\Datagrid\DatagridMapper; use Sonata\AdminBundle\Form\FormMapper; class LeagueAdmin extends Admin { protected function configureFormFields(FormMapper $formMapper) { $formMapper ->add('id') ->add('season') ->add('name') ->add('motto') ->add('password', null, array('required' => false)) ->add('locked', null, array('required' => false)) ; } protected function configureDatagridFilters(DatagridMapper $datagridMapper) { $datagridMapper ->add('id') ->add('season') ->add('name') ->add('motto') ->add('password') ->add('locked') ; } protected function configureListFields(ListMapper $listMapper) { $listMapper ->addIdentifier('id') ->add('season') ->add('name') ; } }
Add xmltodict and dicttoxml to install_requires
import sys sys.path.insert(0, 'src') try: from setuptools import setup except ImportError: from distutils.core import setup setup( name='ibmiotf', version="0.2.7", author='David Parker', author_email='[email protected]', package_dir={'': 'src'}, packages=['ibmiotf', 'ibmiotf.codecs'], package_data={'ibmiotf': ['*.pem']}, url='https://github.com/ibm-watson-iot/iot-python', license=open('LICENSE').read(), description='Python Client for IBM Watson IoT Platform', long_description=open('README.rst').read(), install_requires=[ "iso8601 >= 0.1.10", "paho-mqtt >= 1.1", "pytz >= 2014.7", "requests >= 2.5.0", "requests_toolbelt >= 0.7.0", "dicttoxml >= 1.7.4", "xmltodict >= 0.10.2" ], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Operating System :: Microsoft :: Windows', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Communications', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
import sys sys.path.insert(0, 'src') try: from setuptools import setup except ImportError: from distutils.core import setup setup( name='ibmiotf', version="0.2.7", author='David Parker', author_email='[email protected]', package_dir={'': 'src'}, packages=['ibmiotf', 'ibmiotf.codecs'], package_data={'ibmiotf': ['*.pem']}, url='https://github.com/ibm-watson-iot/iot-python', license=open('LICENSE').read(), description='IBM Watson IoT Platform Client for Python', long_description=open('README.rst').read(), install_requires=[ "iso8601 >= 0.1.10", "paho-mqtt >= 1.1", "pytz >= 2014.7", "requests >= 2.5.0", "requests_toolbelt >= 0.7.0" ], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Operating System :: Microsoft :: Windows', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Communications', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
Update chosen directive to work with newer AngularJS versions.
angular.module("bmComponents").directive("bmChosen", ["$timeout", function ($timeout) { return { restrict : "A", link : function(scope, element, attrs) { element.chosen(); scope.$watch(attrs.bmChosen, function (data) { if (data) { $timeout(function () { element.trigger("chosen:updated"); }); } }); scope.$watch(attrs.ngDisabled, function (data) { if (data === false || data === true) { $timeout(function () { element.trigger("chosen:updated"); }); } }); scope.$on("switchMode", function() { $timeout(function () { element.next(".chosen-container").css("width", element.parent().width() + "px"); }); }); } } } ]);
angular.module("bmComponents").directive("bmChosen", ["$timeout", function ($timeout) { return { restrict : "A", link : function(scope, element, attrs) { element.chosen(); scope.$watch(attrs.bmChosen, function (data) { if (data) element.trigger("chosen:updated"); }); scope.$watch(attrs.ngDisabled, function (data) { if (data === false || data === true) element.trigger("chosen:updated"); }); scope.$on("switchMode", function() { var classes = element.attr("class").split(/\s+/), spanClass = classes.filter(function (className) { return className.indexOf("span") > -1; })[0]; element.before("<p class='" + spanClass + " hidden'></p>"); element.next(".chosen-container").css("width", element.prev().css("width")); }); } } } ]);
Fix params handling in {{s}}. Former-commit-id: 15eae70c91cd08f9028944f8b6a3990d3170aa28
#-*- encoding: utf-8 -*- import base def handle_date(template): year = None if len(template.params) >= 3: try: year = int(unicode(template.params[2])) except ValueError: pass if isinstance(year, int): # assume {{date|d|m|y|...}} return ' '.join(map(unicode, template.params[:3])) else: # assume {{date|d m y|...}} return unicode(template.params[0]) def handle_s(template): ret = unicode(template.params[0]) if len(template.params) == 2: ret += unicode(template.params[1]) if template.name.matches('-s'): ret += ' av. J.-C' return ret class SnippetParser(base.SnippetParserBase): def strip_template(self, template, normalize, collapse): if template.name.matches('unité'): return ' '.join(map(unicode, template.params[:2])) elif template.name.matches('date'): return handle_date(template) elif template.name.matches('s') or template.name.matches('-s'): return handle_s(template) elif self.is_citation_needed(template): repl = [base.CITATION_NEEDED_MARKER] if template.params: repl = [template.params[0].value.strip_code()] + repl return ''.join(repl) return ''
#-*- encoding: utf-8 -*- import base def handle_date(template): year = None if len(template.params) >= 3: try: year = int(unicode(template.params[2])) except ValueError: pass if isinstance(year, int): # assume {{date|d|m|y|...}} return ' '.join(map(unicode, template.params[:3])) else: # assume {{date|d m y|...}} return unicode(template.params[0]) def handle_s(template): ret = template.params[0] if len(template.params) == 2: ret += template.params[1] if template.name.matches('-s'): ret += ' av. J.-C' return ret class SnippetParser(base.SnippetParserBase): def strip_template(self, template, normalize, collapse): if template.name.matches('unité'): return ' '.join(map(unicode, template.params[:2])) elif template.name.matches('date'): return handle_date(template) elif template.name.matches('s') or template.name.matches('-s'): return handle_s(template) elif self.is_citation_needed(template): repl = [base.CITATION_NEEDED_MARKER] if template.params: repl = [template.params[0].value.strip_code()] + repl return ''.join(repl) return ''