text
stringlengths 2
6.14k
|
|---|
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""nsx_mappings
Revision ID: 50e86cb2637a
Revises: 1fcfc149aca4
Create Date: 2013-10-26 14:37:30.012149
"""
# revision identifiers, used by Alembic.
revision = '50e86cb2637a'
down_revision = '1fcfc149aca4'
from alembic import op
import sqlalchemy as sa
from neutron.db import migration
def upgrade():
op.create_table('neutron_nsx_port_mappings',
sa.Column('neutron_id', sa.String(length=36),
nullable=False),
sa.Column('nsx_port_id', sa.String(length=36),
nullable=False),
sa.Column('nsx_switch_id', sa.String(length=36),
nullable=True),
sa.ForeignKeyConstraint(['neutron_id'], ['ports.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('neutron_id'))
if migration.schema_has_table('quantum_nvp_port_mapping'):
op.execute(
"INSERT INTO neutron_nsx_port_mappings SELECT quantum_id as "
"neutron_id, nvp_id as nsx_port_id, null as nsx_switch_id from"
" quantum_nvp_port_mapping")
op.drop_table('quantum_nvp_port_mapping')
|
import sys
import numpy as np
from mpi4py import MPI
from pySDC.helpers.stats_helper import filter_stats, sort_stats
from pySDC.implementations.collocation_classes.gauss_radau_right import CollGaussRadau_Right
from pySDC.implementations.controller_classes.controller_MPI import controller_MPI
from pySDC.implementations.problem_classes.HeatEquation_2D_PETSc_forced import heat2d_petsc_forced
from pySDC.implementations.sweeper_classes.imex_1st_order import imex_1st_order
from pySDC.implementations.transfer_classes.TransferPETScDMDA import mesh_to_mesh_petsc_dmda
def main():
"""
Program to demonstrate usage of PETSc data structures and spatial parallelization,
combined with parallelization in time.
"""
# set MPI communicator
comm = MPI.COMM_WORLD
world_rank = comm.Get_rank()
world_size = comm.Get_size()
# split world communicator to create space-communicators
if len(sys.argv) >= 2:
color = int(world_rank / int(sys.argv[1]))
else:
color = int(world_rank / 1)
space_comm = comm.Split(color=color)
space_rank = space_comm.Get_rank()
# split world communicator to create time-communicators
if len(sys.argv) >= 2:
color = int(world_rank % int(sys.argv[1]))
else:
color = int(world_rank / world_size)
time_comm = comm.Split(color=color)
time_rank = time_comm.Get_rank()
# initialize level parameters
level_params = dict()
level_params['restol'] = 1E-08
level_params['dt'] = 0.125
level_params['nsweeps'] = [1]
# initialize sweeper parameters
sweeper_params = dict()
sweeper_params['collocation_class'] = CollGaussRadau_Right
sweeper_params['num_nodes'] = [3]
sweeper_params['QI'] = ['LU'] # For the IMEX sweeper, the LU-trick can be activated for the implicit part
sweeper_params['initial_guess'] = 'zero'
# initialize problem parameters
problem_params = dict()
problem_params['nu'] = 1.0 # diffusion coefficient
problem_params['freq'] = 2 # frequency for the test value
problem_params['cnvars'] = [(65, 65)] # number of degrees of freedom for the coarsest level
problem_params['refine'] = [1, 0] # number of refinements
problem_params['comm'] = space_comm # pass space-communicator to problem class
problem_params['sol_tol'] = 1E-12 # set tolerance to PETSc' linear solver
# initialize step parameters
step_params = dict()
step_params['maxiter'] = 50
# initialize space transfer parameters
space_transfer_params = dict()
space_transfer_params['rorder'] = 2
space_transfer_params['iorder'] = 2
space_transfer_params['periodic'] = False
# initialize controller parameters
controller_params = dict()
controller_params['logger_level'] = 20 if space_rank == 0 else 99 # set level depending on rank
controller_params['dump_setup'] = False
# fill description dictionary for easy step instantiation
description = dict()
description['problem_class'] = heat2d_petsc_forced # pass problem class
description['problem_params'] = problem_params # pass problem parameters
description['sweeper_class'] = imex_1st_order # pass sweeper (see part B)
description['sweeper_params'] = sweeper_params # pass sweeper parameters
description['level_params'] = level_params # pass level parameters
description['step_params'] = step_params # pass step parameters
description['space_transfer_class'] = mesh_to_mesh_petsc_dmda # pass spatial transfer class
description['space_transfer_params'] = space_transfer_params # pass paramters for spatial transfer
# set time parameters
t0 = 0.0
Tend = 0.25
# instantiate controller
controller = controller_MPI(controller_params=controller_params, description=description, comm=time_comm)
# get initial values on finest level
P = controller.S.levels[0].prob
uinit = P.u_exact(t0)
# call main function to get things done...
uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend)
# compute exact solution and compare
uex = P.u_exact(Tend)
err = abs(uex - uend)
# filter statistics by type (number of iterations)
filtered_stats = filter_stats(stats, type='niter')
# convert filtered statistics to list of iterations count, sorted by process
iter_counts = sort_stats(filtered_stats, sortby='time')
niters = np.array([item[1] for item in iter_counts])
# limit output to space-rank 0 (as before when setting the logger level)
if space_rank == 0:
if len(sys.argv) == 3:
fname = str(sys.argv[2])
else:
fname = 'step_7_C_out.txt'
f = open(fname, 'a+')
out = 'This is time-rank %i...' % time_rank
f.write(out + '\n')
print(out)
# compute and print statistics
for item in iter_counts:
out = 'Number of iterations for time %4.2f: %2i' % item
f.write(out + '\n')
print(out)
out = ' Mean number of iterations: %4.2f' % np.mean(niters)
f.write(out + '\n')
print(out)
out = ' Range of values for number of iterations: %2i ' % np.ptp(niters)
f.write(out + '\n')
print(out)
out = ' Position of max/min number of iterations: %2i -- %2i' % \
(int(np.argmax(niters)), int(np.argmin(niters)))
f.write(out + '\n')
print(out)
out = ' Std and var for number of iterations: %4.2f -- %4.2f' % (float(np.std(niters)), float(np.var(niters)))
f.write(out + '\n')
print(out)
timing = sort_stats(filter_stats(stats, type='timing_run'), sortby='time')
out = 'Time to solution: %6.4f sec.' % timing[0][1]
f.write(out + '\n')
print(out)
out = 'Error vs. PDE solution: %6.4e' % err
f.write(out + '\n')
print(out)
f.close()
assert err < 2E-04, 'ERROR: did not match error tolerance, got %s' % err
assert np.mean(niters) <= 12, 'ERROR: number of iterations is too high, got %s' % np.mean(niters)
if __name__ == "__main__":
main()
|
'use strict';
/* global config:true */
/* exported config */
global.SRC_FOLDER = 'src';
global.SCRIPTS_FOLDER = SRC_FOLDER + '/modules';
global.BUILD_FOLDER = 'build';
global.RELEASE_FOLDER = 'phonegap/www';
global.TMP_FOLDER = 'tmp';
global.config = {
paths: {
src: {
index: SRC_FOLDER + '/index.html',
mainStyles: SRC_FOLDER + '/styles/app.scss',
styles: SRC_FOLDER + '/styles/*.scss',
scripts: [
TMP_FOLDER + '/templates/templates.js',
SRC_FOLDER + '/modules/**/*.js'
],
vendor:[
'bower_components/angular/angular.min.js',
'bower_components/angular-ui-router/release/angular-ui-router.min.js'
],
fonts: [
'bower_components/bootstrap-sass-official/assets/fonts/bootstrap/**',
'bower_components/fontawesome/fonts/**'
],
images: SRC_FOLDER + '/images/**/*',
templates: SRC_FOLDER + '/templates/**/*.html',
templatesCompiled: TMP_FOLDER + '/templates',
dev: [
'karma.conf.js',
'protractor.conf.js',
'gulp/**/*.js'
],
unit : [SRC_FOLDER + '/tests/unit/**/*.js'],
e2e : [SRC_FOLDER + '/tests/e2e/**/*.js']
},
dest: {
build: {
index: BUILD_FOLDER,
styles: BUILD_FOLDER + '/styles',
scripts: BUILD_FOLDER + '/scripts',
fonts: BUILD_FOLDER + '/fonts',
images: BUILD_FOLDER + '/images',
},
phonegap: {
index: RELEASE_FOLDER,
styles: RELEASE_FOLDER + '/styles',
scripts: RELEASE_FOLDER + '/scripts',
fonts: RELEASE_FOLDER + '/fonts',
images: RELEASE_FOLDER + '/images'
}
}
},
filenames:{
styles: 'bundle.css',
vendor: 'vendor.js',
scripts: 'scripts.js'
}
};
|
<?php
return array (
'<strong>Create</strong> new group' => '',
'<strong>Edit</strong> group' => '',
'Delete' => '',
'Description' => '',
'Group name' => '',
'Ldap DN' => '',
'Save' => '儲存',
);
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Lmdb(MakefilePackage):
"""Symas LMDB is an extraordinarily fast, memory-efficient database we
developed for the Symas OpenLDAP Project. With memory-mapped files, it
has the read performance of a pure in-memory database while retaining
the persistence of standard disk-based databases."""
homepage = "https://lmdb.tech/"
url = "https://github.com/LMDB/lmdb/archive/LMDB_0.9.21.tar.gz"
version('0.9.24', sha256='44602436c52c29d4f301f55f6fd8115f945469b868348e3cddaf91ab2473ea26')
version('0.9.22', sha256='f3927859882eb608868c8c31586bb7eb84562a40a6bf5cc3e13b6b564641ea28')
version('0.9.21', sha256='1187b635a4cc415bb6972bba346121f81edd996e99b8f0816151d4090f90b559')
version('0.9.16', sha256='49d7b40949f2ced9bc8b23ea6a89e75471a1c9126537a8b268c318a00b84322b')
build_directory = 'libraries/liblmdb'
@property
def install_targets(self):
return ['prefix={0}'.format(self.prefix), 'install']
@run_after('install')
def install_pkgconfig(self):
mkdirp(self.prefix.lib.pkgconfig)
with open(join_path(self.prefix.lib.pkgconfig, 'lmdb.pc'), 'w') as f:
f.write('prefix={0}\n'.format(self.prefix))
f.write('exec_prefix=${prefix}\n')
f.write('libdir={0}\n'.format(self.prefix.lib))
f.write('includedir={0}\n'.format(self.prefix.include))
f.write('\n')
f.write('Name: LMDB\n')
f.write('Description: Symas LMDB is an extraordinarily fast, '
'memory-efficient database.\n')
f.write('Version: {0}\n'.format(self.spec.version))
f.write('Cflags: -I${includedir}\n')
f.write('Libs: -L${libdir} -llmdb\n')
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Batch.Protocol.Models
{
using System.Linq;
/// <summary>
/// A range of task IDs that a task can depend on. All tasks with IDs in
/// the range must complete successfully before the dependent task can be
/// scheduled.
/// </summary>
/// <remarks>
/// The start and end of the range are inclusive. For example, if a range
/// has start 9 and end 12, then it represents tasks '9', '10', '11' and
/// '12'.
/// </remarks>
public partial class TaskIdRange
{
/// <summary>
/// Initializes a new instance of the TaskIdRange class.
/// </summary>
public TaskIdRange() { }
/// <summary>
/// Initializes a new instance of the TaskIdRange class.
/// </summary>
/// <param name="start">The first task ID in the range.</param>
/// <param name="end">The last task ID in the range.</param>
public TaskIdRange(int start, int end)
{
this.Start = start;
this.End = end;
}
/// <summary>
/// Gets or sets the first task ID in the range.
/// </summary>
[Newtonsoft.Json.JsonProperty(PropertyName = "start")]
public int Start { get; set; }
/// <summary>
/// Gets or sets the last task ID in the range.
/// </summary>
[Newtonsoft.Json.JsonProperty(PropertyName = "end")]
public int End { get; set; }
/// <summary>
/// Validate the object.
/// </summary>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown if validation fails
/// </exception>
public virtual void Validate()
{
//Nothing to validate
}
}
}
|
import numpy as np
import matplotlib.pyplot as plt
t = np.genfromtxt('time.csv', delimiter = ',')
v_b_hot = np.genfromtxt('v_b_hot_HHdclamp.csv', delimiter = ',')
v_b_cold = np.genfromtxt('v_b_cold_HHdclamp.csv', delimiter = ',')
v_d_hot = np.genfromtxt('v_d_hot_HHdclamp.csv', delimiter = ',')
v_d_cold = np.genfromtxt('v_d_cold_HHdclamp.csv', delimiter = ',')
v_f_hot = np.genfromtxt('v_f_hot_HHdclamp.csv', delimiter = ',')
v_f_cold = np.genfromtxt('v_f_cold_HHdclamp.csv', delimiter = ',')
v_h_hot = np.genfromtxt('v_h_hot_HHdclamp.csv', delimiter = ',')
v_h_cold = np.genfromtxt('v_h_cold_HHdclamp.csv', delimiter = ',')
#plt.legend()
#plt.xlim(0,20)
#plt.savefig('fig.jpeg', format = 'jpeg', dpi = 600, bbox_inches = 'tight')
#plt.show()
ax = plt.subplot(4, 1, 1)
plt.plot(t, v_b_hot, lw = 2, color = 'indianred', linestyle = ':')
plt.plot(t, v_b_cold, lw = 1, color = 'blue')
plt.xlim(100, 120)
plt.ylim(-80, 60)
ax.set_yticklabels([])
ax.set_yticks([])
ax.set_xticklabels([])
ax.set_xticks([])
x0,x1 = ax.get_xlim()
y0,y1 = ax.get_ylim()
ax.set_aspect(abs(x1-x0)/abs(y1-y0))
ax = plt.subplot(4, 1, 2)
plt.plot(t, v_d_hot, lw = 2, color = 'indianred', linestyle = ':')
plt.plot(t, v_d_cold, lw = 1, color = 'blue')
plt.xlim(100, 120)
plt.ylim(-80, 60)
ax.set_yticklabels([])
ax.set_yticks([])
ax.set_xticklabels([])
ax.set_xticks([])
x0,x1 = ax.get_xlim()
y0,y1 = ax.get_ylim()
ax.set_aspect(abs(x1-x0)/abs(y1-y0))
ax = plt.subplot(4, 1, 3)
plt.plot(t, v_f_hot, lw = 2, color = 'indianred', linestyle = ':')
plt.plot(t, v_f_cold, lw = 1, color = 'blue')
plt.xlim(100, 120)
plt.ylim(-80, 60)
ax.set_yticklabels([])
ax.set_yticks([])
ax.set_xticklabels([])
ax.set_xticks([])
x0,x1 = ax.get_xlim()
y0,y1 = ax.get_ylim()
ax.set_aspect(abs(x1-x0)/abs(y1-y0))
ax = plt.subplot(4, 1, 4)
plt.plot(t, v_h_hot, lw = 2, color = 'indianred', linestyle = ':')
plt.plot(t, v_h_cold, lw = 1, color = 'blue')
plt.xlim(100, 120)
plt.ylim(-80, 60)
ax.set_yticklabels([])
ax.set_yticks([])
ax.set_xticklabels([])
ax.set_xticks([])
x0,x1 = ax.get_xlim()
y0,y1 = ax.get_ylim()
ax.set_aspect(abs(x1-x0)/abs(y1-y0))
#plt.savefig('fig_left_HH.png', format = 'png', dpi = 600, bbox_inches = 'tight')
plt.show()
|
"""Support for Essent API."""
from __future__ import annotations
from datetime import timedelta
from pyessent import PyEssent
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, ENERGY_KILO_WATT_HOUR
import homeassistant.helpers.config_validation as cv
from homeassistant.util import Throttle
SCAN_INTERVAL = timedelta(hours=1)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string}
)
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the Essent platform."""
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
essent = EssentBase(username, password)
meters = []
for meter in essent.retrieve_meters():
data = essent.retrieve_meter_data(meter)
for tariff in data["values"]["LVR"]:
meters.append(
EssentMeter(
essent,
meter,
data["type"],
tariff,
data["values"]["LVR"][tariff]["unit"],
)
)
if not meters:
hass.components.persistent_notification.create(
"Couldn't find any meter readings. "
"Please ensure Verbruiks Manager is enabled in Mijn Essent "
"and at least one reading has been logged to Meterstanden.",
title="Essent",
notification_id="essent_notification",
)
return
add_devices(meters, True)
class EssentBase:
"""Essent Base."""
def __init__(self, username, password):
"""Initialize the Essent API."""
self._username = username
self._password = password
self._meter_data = {}
self.update()
def retrieve_meters(self):
"""Retrieve the list of meters."""
return self._meter_data.keys()
def retrieve_meter_data(self, meter):
"""Retrieve the data for this meter."""
return self._meter_data[meter]
@Throttle(timedelta(minutes=30))
def update(self):
"""Retrieve the latest meter data from Essent."""
essent = PyEssent(self._username, self._password)
eans = set(essent.get_EANs())
for possible_meter in eans:
meter_data = essent.read_meter(possible_meter, only_last_meter_reading=True)
if meter_data:
self._meter_data[possible_meter] = meter_data
class EssentMeter(SensorEntity):
"""Representation of Essent measurements."""
def __init__(self, essent_base, meter, meter_type, tariff, unit):
"""Initialize the sensor."""
self._state = None
self._essent_base = essent_base
self._meter = meter
self._type = meter_type
self._tariff = tariff
self._unit = unit
@property
def unique_id(self) -> str | None:
"""Return a unique ID."""
return f"{self._meter}-{self._type}-{self._tariff}"
@property
def name(self):
"""Return the name of the sensor."""
return f"Essent {self._type} ({self._tariff})"
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
if self._unit.lower() == "kwh":
return ENERGY_KILO_WATT_HOUR
return self._unit
def update(self):
"""Fetch the energy usage."""
# Ensure our data isn't too old
self._essent_base.update()
# Retrieve our meter
data = self._essent_base.retrieve_meter_data(self._meter)
# Set our value
self._state = next(
iter(data["values"]["LVR"][self._tariff]["records"].values())
)
|
<?php
/* TwigBundle:Exception:traces.html.twig */
class __TwigTemplate_c072ffd039c744f24a1ed3c0c72c5754227b7808a879688e9675dfe6cc942b40 extends Twig_Template
{
public function __construct(Twig_Environment $env)
{
parent::__construct($env);
$this->parent = false;
$this->blocks = array(
);
}
protected function doDisplay(array $context, array $blocks = array())
{
// line 1
echo "<div class=\"block\">
";
// line 2
if (((isset($context["count"]) ? $context["count"] : null) > 0)) {
// line 3
echo " <h2>
<span><small>[";
// line 4
echo twig_escape_filter($this->env, (((isset($context["count"]) ? $context["count"] : null) - (isset($context["position"]) ? $context["position"] : null)) + 1), "html", null, true);
echo "/";
echo twig_escape_filter($this->env, ((isset($context["count"]) ? $context["count"] : null) + 1), "html", null, true);
echo "]</small></span>
";
// line 5
echo $this->env->getExtension('code')->abbrClass($this->getAttribute((isset($context["exception"]) ? $context["exception"] : null), "class", array()));
echo ": ";
echo $this->env->getExtension('code')->formatFileFromText(nl2br(twig_escape_filter($this->env, $this->getAttribute((isset($context["exception"]) ? $context["exception"] : null), "message", array()), "html", null, true)));
echo "
";
// line 6
ob_start();
// line 7
echo " <a href=\"#\" onclick=\"toggle('traces-";
echo twig_escape_filter($this->env, (isset($context["position"]) ? $context["position"] : null), "html", null, true);
echo "', 'traces'); switchIcons('icon-traces-";
echo twig_escape_filter($this->env, (isset($context["position"]) ? $context["position"] : null), "html", null, true);
echo "-open', 'icon-traces-";
echo twig_escape_filter($this->env, (isset($context["position"]) ? $context["position"] : null), "html", null, true);
echo "-close'); return false;\">
<img class=\"toggle\" id=\"icon-traces-";
// line 8
echo twig_escape_filter($this->env, (isset($context["position"]) ? $context["position"] : null), "html", null, true);
echo "-close\" alt=\"-\" src=\"data:image/gif;base64,R0lGODlhEgASAMQSANft94TG57Hb8GS44ez1+mC24IvK6ePx+Wa44dXs92+942e54o3L6W2844/M6dnu+P/+/l614P///wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH5BAEAABIALAAAAAASABIAQAVCoCQBTBOd6Kk4gJhGBCTPxysJb44K0qD/ER/wlxjmisZkMqBEBW5NHrMZmVKvv9hMVsO+hE0EoNAstEYGxG9heIhCADs=\" style=\"display: ";
echo (((0 == (isset($context["count"]) ? $context["count"] : null))) ? ("inline") : ("none"));
echo "\" />
<img class=\"toggle\" id=\"icon-traces-";
// line 9
echo twig_escape_filter($this->env, (isset($context["position"]) ? $context["position"] : null), "html", null, true);
echo "-open\" alt=\"+\" src=\"data:image/gif;base64,R0lGODlhEgASAMQTANft99/v+Ga44bHb8ITG52S44dXs9+z1+uPx+YvK6WC24G+944/M6W28443L6dnu+Ge54v/+/l614P///wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH5BAEAABMALAAAAAASABIAQAVS4DQBTiOd6LkwgJgeUSzHSDoNaZ4PU6FLgYBA5/vFID/DbylRGiNIZu74I0h1hNsVxbNuUV4d9SsZM2EzWe1qThVzwWFOAFCQFa1RQq6DJB4iIQA7\" style=\"display: ";
echo (((0 == (isset($context["count"]) ? $context["count"] : null))) ? ("none") : ("inline"));
echo "\" />
</a>
";
echo trim(preg_replace('/>\s+</', '><', ob_get_clean()));
// line 12
echo " </h2>
";
} else {
// line 14
echo " <h2>Stack Trace</h2>
";
}
// line 16
echo "
<a id=\"traces-link-";
// line 17
echo twig_escape_filter($this->env, (isset($context["position"]) ? $context["position"] : null), "html", null, true);
echo "\"></a>
<ol class=\"traces list-exception\" id=\"traces-";
// line 18
echo twig_escape_filter($this->env, (isset($context["position"]) ? $context["position"] : null), "html", null, true);
echo "\" style=\"display: ";
echo (((0 == (isset($context["count"]) ? $context["count"] : null))) ? ("block") : ("none"));
echo "\">
";
// line 19
$context['_parent'] = (array) $context;
$context['_seq'] = twig_ensure_traversable($this->getAttribute((isset($context["exception"]) ? $context["exception"] : null), "trace", array()));
foreach ($context['_seq'] as $context["i"] => $context["trace"]) {
// line 20
echo " <li>
";
// line 21
$this->env->loadTemplate("TwigBundle:Exception:trace.html.twig")->display(array("prefix" => (isset($context["position"]) ? $context["position"] : null), "i" => $context["i"], "trace" => $context["trace"]));
// line 22
echo " </li>
";
}
$_parent = $context['_parent'];
unset($context['_seq'], $context['_iterated'], $context['i'], $context['trace'], $context['_parent'], $context['loop']);
$context = array_intersect_key($context, $_parent) + $_parent;
// line 24
echo " </ol>
</div>
";
}
public function getTemplateName()
{
return "TwigBundle:Exception:traces.html.twig";
}
public function isTraitable()
{
return false;
}
public function getDebugInfo()
{
return array ( 101 => 24, 94 => 22, 92 => 21, 89 => 20, 85 => 19, 79 => 18, 75 => 17, 72 => 16, 68 => 14, 64 => 12, 56 => 9, 50 => 8, 41 => 7, 39 => 6, 33 => 5, 27 => 4, 24 => 3, 22 => 2, 19 => 1,);
}
}
|
#! /usr/bin/python
import sys;
import url_errors
DONE = False
DONE_string = "[-- DONE --]"
'''
Assumes that we are at the current line where that starts with '*'. The format
is follows the following example:
--------------------------
* 1:2009-09-09:12:56:19 http://physics.nist.gov
e796febb7593fde042a7511a9761da8ce6d84299
http://physics.nist.gov/Divisions/Div842/div842.html
http://physics.nist.gov/MajResProj/Nanotech/nanotech.html
* 2:2009-09-09:12:56:20 http://physics.nist.gov/Divisions/Div842/div842.html
68e24912e780a2076f29b76b64066003a0f05fd0
http://physics.nist.gov/Divisions/Div842/Gp4/group4.html
http://physics.nist.gov/Divisions/Div842/Gp5/index.html
http://www.doc.gov
--------------------------
will process current page to visited_links (along with its hash-code)
and add outgoing links to frontier.
current line: string
frontier: set of strings (URLs) to visit
visited_links: set of strings (URLs) already processed
hash_codes: set of strings (alphanumeric hash codes) for visited pages
'''
'''
read a URL (with possible spaces) in the current line
'''
def get_url(line):
pos = line.find('http')
if (pos < 0):
return ''
else :
return line[ line.find('http') : ]
# The last letter in readline is a '\n', so let's not include it
#
def get_next_line(file):
line = file.readline()
if len(line) > 1:
return line[:-1]
elif len(line) == 1:
return ' '
else:
return line
def scroll_to_next_webpage(file):
global DONE
if not file:
DONE = True
current_line = ""
while (file and (not DONE)) :
current_line = get_next_line(file)
if ((current_line == DONE_string) or (current_line == '')):
DONE = True
return ''
#print "scroll: " + current_line
if (len(current_line)>0 and current_line[0] == '*'):
#print " scroll: stop at " + current_line
return current_line
return ''
'''
file text input file (the webcrawl dump)
current_line string (current line of webcrawl dump)
frontier set of strings (URLs to visit)
visited_links set of strings (URLs already visited)
sha_codes set of strings (hexadecimal hash codes for vistied URLs)
'''
def process_page(file, current_line, frontier, visited_links, sha_codes):
if DONE :
return
home_url = get_url(current_line)
if (len(home_url) < 1):
return
visited_links.add( home_url )
# remove **
#print "home_url = " + home_url
# ^^^^^^^^^
frontier.discard(home_url) # set.remove() assume element is present
hash = get_next_line(file)
if (hash in url_errors.URL_errors): # one of the special cases
return
if (hash[0] == '#'):
hash = get_next_line(file)
elif (hash[0] == '!'): # already processed URL (via hash contents)
return
# remove**
#print "hash = " + hash
# ^^^^^^^^^
sha_codes.add(hash)
while (True) :
line = get_next_line(file)
url = get_url(line)
if url == "":
break
if url not in visited_links:
# print " added outlink: " + url
frontier.add(url)
def process_wg_file(file, visited_links, hash_codes, frontier):
line = ""
while (file and not DONE):
line = scroll_to_next_webpage(file)
process_page(file, line, frontier, visited_links, hash_codes)
# ***** MAIN *********
def main():
file = sys.stdin
frontier = set([])
visited_links = set([])
hash_codes = set([])
process_wg_file(file, visited_links, hash_codes, frontier)
# now print out the results
print(" ")
print("Visited Links: ")
print("------------- ")
for url in visited_links:
print(url)
print(" ")
print("Frontier: ")
print("-------- ")
for url in frontier:
print(url)
print(" ")
print("Hash Codes: ")
print("----------- ")
for url in hash_codes:
print(url)
print(" ")
if __name__ == "__main__":
main()
|
"""SCons.Tool.g++
Tool-specific initialization for g++.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/g++.py 5134 2010/08/16 23:02:40 bdeegan"
import os.path
import re
import subprocess
import SCons.Tool
import SCons.Util
cplusplus = __import__('c++', globals(), locals(), [])
compilers = ['g++']
def generate(env):
"""Add Builders and construction variables for g++ to an Environment."""
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
cplusplus.generate(env)
env['CXX'] = env.Detect(compilers)
# platform specific settings
if env['PLATFORM'] == 'aix':
env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS -mminimal-toc')
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1
env['SHOBJSUFFIX'] = '$OBJSUFFIX'
elif env['PLATFORM'] == 'hpux':
env['SHOBJSUFFIX'] = '.pic.o'
elif env['PLATFORM'] == 'sunos':
env['SHOBJSUFFIX'] = '.pic.o'
# determine compiler version
if env['CXX']:
#pipe = SCons.Action._subproc(env, [env['CXX'], '-dumpversion'],
pipe = SCons.Action._subproc(env, [env['CXX'], '--version'],
stdin = 'devnull',
stderr = 'devnull',
stdout = subprocess.PIPE)
if pipe.wait() != 0: return
# -dumpversion was added in GCC 3.0. As long as we're supporting
# GCC versions older than that, we should use --version and a
# regular expression.
#line = pipe.stdout.read().strip()
#if line:
# env['CXXVERSION'] = line
line = pipe.stdout.readline()
match = re.search(r'[0-9]+(\.[0-9]+)+', line)
if match:
env['CXXVERSION'] = match.group(0)
def exists(env):
return env.Detect(compilers)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
// ========================================================================================
// GRSFramework
// Copyright (C) 2016 by Gabriel Nützi <gnuetzi (at) gmail (døt) com>
//
// This Source Code Form is subject to the terms of the GNU General Public License as
// published by the Free Software Foundation; either version 3 of the License,
// or (at your option) any later version. If a copy of the GPL was not distributed with
// this file, you can obtain one at http://www.gnu.org/licenses/gpl-3.0.html.
// ========================================================================================
#ifndef GRSF_dynamics_collision_ContactTag_hpp
#define GRSF_dynamics_collision_ContactTag_hpp
#include "GRSF/common/Asserts.hpp"
#include "GRSF/common/TypeDefs.hpp"
#include "GRSF/common/TupleHelper.hpp"
#include "GRSF/dynamics/general/RigidBodyId.hpp"
/**
* @ingroup Contact
* @brief This is the ContactTag class which is used for the hash table boost::unordered_map.
*/
/** @{ */
class ContactTag
{
public:
DEFINE_RIGIDBODY_CONFIG_TYPES
using ContactTagTuple =
std::tuple<RigidBodyIdType, unsigned char, unsigned int, RigidBodyIdType, unsigned char, unsigned int>;
void set(const RigidBodyIdType& b1,
unsigned char type1,
unsigned int id1,
const RigidBodyIdType& b2,
unsigned char type2,
unsigned int id2);
bool operator==(ContactTag const& c2) const;
friend class ContactTagHash;
private:
/**
* This tuple builds up the hash. It consists of:
* - Body with smaller id is always first!
* - uint64_t: Body1 Ptr
* - unsigned char: Type1: None = 0, Face = 1, Edge = 2, Vertex = 3
* - unsigned int: some id, e.g. face idx, edge idx, vertex idx
* - uint64_t: Body2 Ptr
* - unsigned char: Type2: None = 0, Face = 1, Edge = 2, Vertex = 3
* - unsigned int: some id, e.g. face idx, edge idx, vertex idx
*/
ContactTagTuple m_tag{}; // value initialized
};
/**
* @ingroup Contact
* @brief This is the ContactTag functor which hashs a ContactTag!
*/
class ContactTagHash : std::unary_function<ContactTag, std::size_t>
{
public:
std::size_t operator()(ContactTag const& c) const
{
TupleHash<ContactTag::ContactTagTuple> hasher;
return hasher(c.m_tag);
}
};
#endif
|
/*
* Copyright (C) 2012 Joshua Hollenbeck
*
* This file is part of c-arel, distributed under the MIT-LICENSE.
* For full terms see the included MIT-LICENSE file.
*/
#ifndef include_c_arel_predications_h
#define include_c_arel_predications_h
namespace c_arel {
class Predications : virtual public Derived {
public:
nodes::NotEqual not_equal(variant other);
nodes::Grouping not_equal_any(std::vector<variant> others);
nodes::Grouping not_equal_all(std::vector<variant> others);
nodes::Equality equal(variant other);
nodes::Grouping equal_any(std::vector<variant> others);
nodes::Grouping equal_all(std::vector<variant> others);
nodes::In in(SelectManager & other);
nodes::In in(variant other);
nodes::Grouping in_any(std::vector<variant> others);
nodes::Grouping in_all(std::vector<variant> others);
nodes::NotIn not_in(SelectManager & other);
nodes::NotIn not_in(variant other);
nodes::Grouping not_in_any(std::vector<variant> others);
nodes::Grouping not_in_all(std::vector<variant> others);
nodes::Matches matches(variant other);
nodes::Grouping matches_any(std::vector<variant> others);
nodes::Grouping matches_all(std::vector<variant> others);
nodes::DoesNotMatch does_not_match(variant other);
nodes::Grouping does_not_match_any(std::vector<variant> others);
nodes::Grouping does_not_match_all(std::vector<variant> others);
nodes::GreaterThanOrEqual gteq(variant right);
nodes::Grouping gteq_any(std::vector<variant> others);
nodes::Grouping gteq_all(std::vector<variant> others);
nodes::GreaterThan gt(variant right);
nodes::Grouping gt_any(std::vector<variant> others);
nodes::Grouping gt_all(std::vector<variant> others);
nodes::LessThan lt(variant right);
nodes::Grouping lt_any(std::vector<variant> others);
nodes::Grouping lt_all(std::vector<variant> others);
nodes::LessThanOrEqual lteq(variant right);
nodes::Grouping lteq_any(std::vector<variant> others);
nodes::Grouping lteq_all(std::vector<variant> others);
nodes::Between between(std::vector<variant> expr);
nodes::Between between(variant left, variant right);
nodes::Grouping between_any(std::vector<variant> others);
nodes::Grouping between_all(std::vector<variant> others);
};
}
#endif
|
import yaml
from op.commit import *
from jnpr.junos import Device
from datetime import datetime
import logging
import sys
import argparse
from optparse import OptionParser
from logging.handlers import RotatingFileHandler
import re
### Function to connect to device and then collect data from PhyPort Table
def get_data(router, options ):
jdev = Device(host=router, user=options.username, password=options.password)
jdev.open()
data = CommitTableJNPR(jdev).get()
return data
def main(options):
### Open list of devices
my_list_of_devices=open(options.lab).read()
my_list_of_routers=yaml.load(my_list_of_devices)
global_netconf = 0
global_cli = 0
for router in my_list_of_routers:
cli = 0
netconf = 0
print " * Start checking router "+ router
logger.info("Analyzing router %s",router)
data = get_data(router,options)
for item in data:
if item.commit_method == "cli" :
cli += 1
elif item.commit_method == "netconf":
netconf += 1
print " - Number of NETCONF commit: "+ str(netconf)
global_netconf += netconf
print " - Number of CLI commit: "+ str(cli)
global_cli += cli
print "\n----------------------------------------"
print "* # of NETCONF commit: "+str(global_netconf)
print "* # of CLI commit: "+str(global_cli)
print "----------------------------------------\n"
# ----------------------------------------------------------------- #
# MAIN Section
# ----------------------------------------------------------------- #
if __name__ == "__main__":
# Default Username and Password. Could be updated through CLI Parameters
version = "1.1"
gUser='root'
gPass='Poclab123'
gFile='../lab-poc.yml'
### CLI Option parser:
parser = argparse.ArgumentParser(description="Python & Junos demo -- version "+version)
parser.add_argument('-u','--username' ,help='Username required to connect to devices',default=gUser)
parser.add_argument('-p','--password' ,help='User password to connect to devices',default=gPass)
parser.add_argument('-l','--lab' ,help='Files containing device IP address',default=gFile)
options = parser.parse_args()
### Activate logging to keep trace in log file
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s :: %(levelname)s :: %(funcName)s :: %(message)s')
### Display log with CRITICAL level and higher
steam_handler = logging.StreamHandler()
steam_handler.setLevel(logging.CRITICAL)
steam_handler.setFormatter(formatter)
logger.addHandler(steam_handler)
### Write log with DEBUG level and higher
file_handler = logging.FileHandler("junos-python-l4-commit-state.log")
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(formatter)
### Add handler to logger
logger.addHandler(steam_handler)
logger.addHandler(file_handler)
logger.info('Start to analyze routers')
main(options)
|
from io import BytesIO
from typing import List
import cle
from ...errors import AngrCorruptDBError
from ..models import DbObject
class LoaderSerializer:
"""
Serialize/unserialize a CLE Loader object into/from an angr DB.
"""
backend2name = dict((v, k) for k, v in cle.ALL_BACKENDS.items())
@staticmethod
def dump(session, loader):
for obj in loader.all_objects:
if isinstance(obj, (cle.ExternObject,
cle.backends.tls.elf_tls.ELFTLSObject,
cle.KernelObject,)):
# skip dynamically created objects
continue
# does the object exist?
exists = session.query(DbObject.id).filter_by(path=obj.binary).scalar() is not None
if exists:
# it exists. skip.
continue
# FIXME: We assume the binary and its libraries all still exist on the disk
# save the object
o = DbObject(
main_object=loader.main_object is obj,
path=obj.binary,
content=open(obj.binary, "rb").read(),
backend=LoaderSerializer.backend2name.get(obj.__class__),
backend_args="", # TODO: We will need support from CLE to store loader arguments
)
session.add(o)
@staticmethod
def load(session):
all_objects = { } # path to object
main_object = None
db_objects = session.query(DbObject) # type: List[DbObject]
for db_o in db_objects:
all_objects[db_o.path] = db_o
if db_o.main_object:
main_object = db_o
if main_object is None:
raise AngrCorruptDBError("Corrupt database: No main object.")
# build params
# FIXME: Load other objects
loader = cle.Loader(
BytesIO(main_object.content),
)
# fix the binary name of the main binary
loader._main_binary_path = main_object.path
loader.main_object.binary = main_object.path
return loader
|
#!/usr/bin/env python3
from direction import Direction
import math
# Shell logic
class Shell:
def __init__(self, x = 0, y = 0, direction = Direction.UP):
self.direction = direction
self.x = x
self.y = y
# Simple getters
def get_direction(self):
return self.direction
def get_x(self):
return self.x
def get_y(self):
return self.y
# Move to shell by its speed
# The game logic will have to stop the shell from running into walls
def move(self):
if self.direction == Direction.UP:
self.y -= 1.0
elif self.direction == Direction.RIGHT:
self.x += 1.0
elif self.direction == Direction.DOWN:
self.y += 1.0
else:
self.x -= 1.0
# Private things
# Shell logic
direction = Direction.UP
x = 0
y = 0
# Free functions
def add_key(player: Shell, key):
player.add_key(key)
def get_keys_pressed(player: Shell):
return player.get_keys_pressed()
def is_driving(player):
return not math.isclose(player.get_dx(), 0.0) or not math.isclose(player.get_dy(), 0.0)
def is_driving_up(player):
return math.isclose(player.get_dy(), -1.0) and math.isclose(player.get_dx(), 0.0)
def is_driving_right(player):
return math.isclose(player.get_dx(), 1.0) and math.isclose(player.get_dy(), 0.0)
def is_driving_down(player):
return math.isclose(player.get_dy(), 1.0) and math.isclose(player.get_dx(), 0.0)
def is_driving_left(player):
return math.isclose(player.get_dx(), -1.0) and math.isclose(player.get_dy(), 0.0)
def is_stopped(player):
return math.isclose(player.get_dx(), 0.0) and math.isclose(player.get_dy(), 0.0)
|
//
// PIRReceivePaymentsViewController.h
//
// Created by Kenny Tang on 12/9/13.
// Copyright (c) 2013 Kenny Tang. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface PIRReceivePaymentsViewController : UIViewController
@end
|
//
// cpp14/can_query_not_applicable_static.cpp
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
//
// Copyright (c) 2003-2021 Christopher M. Kohlhoff (chris at kohlhoff dot com)
//
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
//
#include <boost/asio/query.hpp>
#include <cassert>
struct prop
{
template <typename> static constexpr int static_query_v = 123;
};
struct object
{
};
int main()
{
static_assert(!boost::asio::can_query_v<object, prop>, "");
static_assert(!boost::asio::can_query_v<const object, prop>, "");
}
|
using Cumulus.Configuration.BusBuilderConfiguration;
using Cumulus.Contracts;
using Cumulus.DryIoc;
using Cumulus.Extensions;
using Cumulus.Pipelines;
namespace Cumulus.Configuration
{
public class BusBuilder
{
private static Container _container;
public Config Configure()
{
return new Config();
}
internal static IBus Build(Config configuration)
{
var logger = configuration.Logger;
logger.Debug("Constructing bus...");
_container = _container ?? new Container();
_container.Register(configuration.Logger.GetType(), ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.Register(configuration.Serializer.GetType(), ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.Register(configuration.Compressor.GetType(), ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.Register(configuration.Transport.GetType(), Reuse.Singleton, ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.RegisterMany(configuration.CommandPipeline.GetCommandHandlerTypes(), Reuse.Singleton, ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.RegisterMany(configuration.CommandPipeline.GetCommandTypes(), ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.RegisterMany(configuration.CommandPipeline.GetPreHookTypes(), ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.RegisterMany(configuration.CommandPipeline.GetPostHookTypes(), ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.RegisterMany(configuration.CommandPipeline.GetErrorHookTypes(), ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.RegisterMany(configuration.EventPipeline.GetCompetingEventHandlerTypes(), Reuse.Singleton, ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.RegisterMany(configuration.EventPipeline.GetMulticastEventHandlerTypes(), Reuse.Singleton, ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.RegisterMany(configuration.EventPipeline.GetEventTypes(), ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.RegisterMany(configuration.EventPipeline.GetPreHookTypes(), ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.RegisterMany(configuration.EventPipeline.GetPostHookTypes(), ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.RegisterMany(configuration.EventPipeline.GetErrorHookTypes(), ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.RegisterMany(configuration.RequestResponsePipeline.GetRequestHandlerTypes(), Reuse.Singleton, ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.RegisterMany(configuration.RequestResponsePipeline.GetMulticastRequestHandlerTypes(), Reuse.Singleton, ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.RegisterMany(configuration.RequestResponsePipeline.GetRequestTypes(), ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.RegisterMany(configuration.RequestResponsePipeline.GetResponseTypes(), ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.RegisterMany(configuration.RequestResponsePipeline.GetPreHookTypes(), ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.RegisterMany(configuration.RequestResponsePipeline.GetPostHookTypes(), ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.RegisterMany(configuration.RequestResponsePipeline.GetErrorHookTypes(), ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.Register<IPipeline, CommandPipeline>(Reuse.Singleton, ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.Register<IPipeline, EventPipeline>(Reuse.Singleton, ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.Register<IPipeline, RequestResponsePipeline>(Reuse.Singleton, ifAlreadyRegistered: IfAlreadyRegistered.Keep);
_container.Register<IBus, Bus>(Reuse.Singleton, ifAlreadyRegistered: IfAlreadyRegistered.Keep);
return _container.Resolve<IBus>();
}
}
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = companiesResponse;
function companiesResponse(data) {
var companiesByOperationArea = data.e,
informationDate = data.hr;
return {
informationDate: informationDate,
companiesByOperationArea: companiesByOperationArea.map(function (companyByOperationArea) {
return {
operationCode: companyByOperationArea.a,
companies: companyByOperationArea.e.map(function (company) {
return {
operationAreaCode: company.a,
referenceCode: company.c,
name: company.n
};
})
};
})
};
}
|
#include "../juwnd.h"
#pragma usexpstyle
LPCWSTR src = L"D:/Develop/android/English/res/drawable-mdpi";
LPCWSTR dst = L"D:/Develop/android/English/res/drawable-hdpi";
void setPng(LPCWSTR name){
ju::LocalString file = src;
ju::FPLinkPath(file,name);
ju::Bitmap png;
if(!png.Load(file)){
ju::LogfE(L"load failed: %s",file.Handle());
return;
}
ju::Memory<byte> pix;
int h = png.GetPixelData(&pix,32);
for(int i=0;i<h;i++){
for(int j=0;j<h;j++){
int index = i*h*4 + j*4;
if(pix[index+3]){
pix[index] = 0xff;
pix[index+1] = 0x99;
pix[index+2] = 0x33;
}
}
}
png.SetPixelData(&pix);
file = dst;
ju::FPLinkPath(file,name);
png.Save(file);
ju::LogfI(L"success: %s",name);
}
void __stdcall onSearch(ju::ListData* fsd){
setPng(fsd->data->cFileName);
}
void setSrcPng(){
ju::FileSearch search;
search.OnList.BindStd(&onSearch);
search.Search(src);
}
//Éú³ÉandroidʹÓõIJ»Í¬´óСµÄͼ±ê£¬src°üº¬Ô´ÎļþÃû³Æ£¬dstÊÇandroidĿ¼
void createAndroidIcon(LPCWSTR src,LPCWSTR dst){
ju::Bitmap bmp;
if(!bmp.Load(src)) return;
ju::Twin sz = bmp.Size();
ju::LogfI(L"³É¹¦¼ÓÔØÔ´Í¼Ïñ: %d X %d",sz.x,sz.y);
ju::Bitmap newBmp;
int w = 144;
newBmp = bmp.GetClip();
newBmp.Resize(144,144);
ju::String dstfn;
dstfn = dst;
ju::FPLinkPath(dstfn,L"res\\drawable-xxhdpi\\ic_launcher.png");
if(newBmp.Save(dstfn)) ju::LogfI(L"±£´æ³É¹¦£º%d X %d",w,w);
w = 96;
newBmp = bmp.GetClip();
newBmp.Resize(96,96);
dstfn = dst;
ju::FPLinkPath(dstfn,L"res\\drawable-xhdpi\\ic_launcher.png");
if(newBmp.Save(dstfn)) ju::LogfI(L"±£´æ³É¹¦£º%d X %d",w,w);
w = 72;
newBmp = bmp.GetClip();
newBmp.Resize(72,72);
dstfn = dst;
ju::FPLinkPath(dstfn,L"res\\drawable-hdpi\\ic_launcher.png");
if(newBmp.Save(dstfn)) ju::LogfI(L"±£´æ³É¹¦£º%d X %d",w,w);
w = 48;
newBmp = bmp.GetClip();
newBmp.Resize(48,48);
dstfn = dst;
ju::FPLinkPath(dstfn,L"res\\drawable-mdpi\\ic_launcher.png");
if(newBmp.Save(dstfn)) ju::LogfI(L"±£´æ³É¹¦£º%d X %d",w,w);
}
WINMAIN{
ju::LogShowWindow();
src = L"D:\\Develop\\android\\SportsCounter\\res\\drawable-hdpi";
dst = L"D:\\Develop\\android\\SportsCounter\\res\\drawable-hdpi";
//setSrcPng();
//setPng(L"ic_help_48.png");
createAndroidIcon(L"D:\\Develop\\android\\JuQr\\png\\qr.png",L"D:\\Develop\\android\\JuQr");
ju::MsgLoop::Start();
}
|
/*
* Copyright 2002-2015 Drew Noakes
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* More information about this project is available at:
*
* https://drewnoakes.com/code/exif/
* https://github.com/drewnoakes/metadata-extractor
*/
package com.drew.lang;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
/**
* @author Drew Noakes https://drewnoakes.com
*/
public class ByteTrieTest
{
@Test
public void testBasics()
{
ByteTrie<String> trie = new ByteTrie<String>();
String[] strings = {"HELLO", "HELLO WORLD", "HERBERT"};
for (String s : strings)
trie.addPath(s, s.getBytes());
for (String s : strings)
assertSame(s, trie.find(s.getBytes()));
assertNull(trie.find("Not Included".getBytes()));
assertNull(trie.find("HELL".getBytes()));
assertEquals("HELLO", trie.find("HELLO MUM".getBytes()));
assertEquals("HELLO WORLD".length(), trie.getMaxDepth());
trie.setDefaultValue("DEFAULT");
assertEquals("DEFAULT", trie.find("Also Not Included".getBytes()));
}
}
|
from django.shortcuts import render
from rest_framework.decorators import api_view
from rest_framework.response import Response
from ..userprofile.models import Application
from .serializers import AuthenticationSessionSerializer
from .models import AuthenticationSession
from rest_framework import generics
class AuthenticationSessionList(generics.ListAPIView):
queryset = AuthenticationSession.objects.all()
serializer_class = AuthenticationSessionSerializer
class CreateAuthenticationSession(generics.CreateAPIView):
queryset = AuthenticationSession.objects.all()
serializer_class = AuthenticationSessionSerializer
@api_view(['GET'])
def verify_authentication_session(request, external_session_id):
user = request.user
response = Response()
if not user.is_authenticated():
response.status_code = 403
return response
if not AuthenticationSession.objects.filter(external_session_id=external_session_id).exists():
response.status_code = 404
return response
authentication_session = AuthenticationSession.objects.get(external_session_id=external_session_id)
application = authentication_session.application
if not application.has_access(user, application.get_user_uri()):
response.status_code = 403
return response
if authentication_session.flag == 2 or authentication_session.flag == 3:
response.status_code = 429
authentication_session.flag = 2
authentication_session.save()
response.status_code = 200
return response
|
# Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source,
# Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS
# SPDX - License - Identifier: GPL - 3.0 +
from decimal import Decimal, InvalidOperation
from mantid import api
from mantid.api import ITableWorkspace
class InstrumentWidgetModel(object):
"""
The model holds the muon context and interacts with it, only able to modify the pre-processing parts of each
run.
The model should not take care of processing data, it should only interact with and modify the muon context data
so that when processing is done from elsewhere the parameters of the pre-processing are up-to-date with the
GUI.
"""
def __init__(self, context=None):
self._data = context.data_context
self._context = context
self._context.gui_context['RebinType'] = 'None'
self._context.gui_context['DoublePulseTime'] = 0.33
self._context.gui_context['DoublePulseEnabled'] = False
def clear_data(self):
"""When e.g. instrument changed"""
self._data.clear()
def get_file_time_zero(self):
return self._data.current_data["TimeZero"]
def get_user_time_zero(self):
if "TimeZero" in self._context.gui_context.keys():
time_zero = self._context.gui_context["TimeZero"]
else:
# default to loaded value, keep a record of the data vaue
self._context.gui_context["TimeZero"] = self._data.current_data["TimeZero"]
time_zero = self._context.gui_context["TimeZero"]
return time_zero
def set_time_zero_from_file(self, state):
self._context.gui_context.update_and_send_signal(TimeZeroFromFile=state)
def set_first_good_data_source(self, state):
self._context.gui_context.update_and_send_signal(FirstGoodDataFromFile=state)
def set_last_good_data_source(self, state):
self._context.gui_context.update_and_send_signal(LastGoodDataFromFile=state)
def get_file_first_good_data(self):
return self._data.current_data["FirstGoodData"]
def get_user_first_good_data(self):
if "FirstGoodData" in self._context.gui_context.keys():
first_good_data = self._context.gui_context["FirstGoodData"]
else:
# Default to loaded value
self._context.gui_context["FirstGoodData"] = self._data.current_data["FirstGoodData"]
first_good_data = self._context.gui_context["FirstGoodData"]
return first_good_data
def get_file_last_good_data(self):
if self._data.current_runs:
run = self._data.current_runs[0]
return self._context.last_good_data(run)
else:
return 0.0
def get_last_good_data(self):
if "LastGoodData" in self._context.gui_context.keys():
return self._context.gui_context["LastGoodData"]
else:
return 0.0
def set_user_time_zero(self, time_zero):
self._context.gui_context.update_and_send_signal(TimeZero=time_zero)
def set_user_first_good_data(self, first_good_data):
self._context.gui_context.update_and_send_signal(FirstGoodData=first_good_data)
def set_user_last_good_data(self, last_good_data):
self._context.gui_context.update_and_send_signal(LastGoodData=last_good_data)
def set_double_pulse_time(self, double_pulse_time):
self._context.gui_context.update_and_send_non_calculation_signal(DoublePulseTime=double_pulse_time)
def set_double_pulse_enabled(self, enabled):
self._context.gui_context.update_and_send_non_calculation_signal(DoublePulseEnabled=enabled)
def add_fixed_binning(self, fixed_bin_size):
self._context.gui_context.update_and_send_signal(RebinFixed=str(fixed_bin_size))
def add_variable_binning(self, rebin_params):
self._context.gui_context.update_and_send_signal(RebinVariable=str(rebin_params))
def get_variable_binning(self):
if 'RebinVariable' in self._context.gui_context:
return self._context.gui_context['RebinVariable']
else:
return ''
def update_binning_type(self, rebin_type):
self._context.gui_context.update_and_send_signal(RebinType=rebin_type)
def validate_variable_rebin_string(self, variable_rebin_string):
variable_rebin_list = variable_rebin_string.split(',')
try:
variable_rebin_list = [Decimal(x) for x in variable_rebin_list]
except (ValueError, InvalidOperation):
return (False, 'Rebin entries must be numbers')
if len(variable_rebin_list) == 0:
return (False, 'Rebin list must be non-empty')
if len(variable_rebin_list) == 1:
return (True, '')
if len(variable_rebin_list) == 2:
if variable_rebin_list[1] > variable_rebin_list[0]:
return (True, '')
else:
return (False, 'End of range must be greater than start of range')
while len(variable_rebin_list) >= 3:
# We don't do any additional checking of logarithmic binning so just return true in this instance
if variable_rebin_list[1] <= 0:
return (True, '')
if (variable_rebin_list[2] - variable_rebin_list[0])%variable_rebin_list[1] != 0:
return (False, 'Step and bin boundaries must line up')
variable_rebin_list = variable_rebin_list[2:]
if len(variable_rebin_list) == 1:
return (True, '')
else:
return (False, 'Variable rebin string must have 2 or an odd number of entires')
|
"""
Support for Minut Point.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.point/
"""
import logging
from homeassistant.components.point import MinutPointEntity
from homeassistant.components.point.const import (
DOMAIN as POINT_DOMAIN, POINT_DISCOVERY_NEW)
from homeassistant.components.sensor import DOMAIN
from homeassistant.const import (
DEVICE_CLASS_HUMIDITY, DEVICE_CLASS_PRESSURE, DEVICE_CLASS_TEMPERATURE,
TEMP_CELSIUS)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.util.dt import parse_datetime
_LOGGER = logging.getLogger(__name__)
DEVICE_CLASS_SOUND = 'sound_level'
SENSOR_TYPES = {
DEVICE_CLASS_TEMPERATURE: (None, 1, TEMP_CELSIUS),
DEVICE_CLASS_PRESSURE: (None, 0, 'hPa'),
DEVICE_CLASS_HUMIDITY: (None, 1, '%'),
DEVICE_CLASS_SOUND: ('mdi:ear-hearing', 1, 'dBa'),
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up a Point's sensors based on a config entry."""
async def async_discover_sensor(device_id):
"""Discover and add a discovered sensor."""
client = hass.data[POINT_DOMAIN][config_entry.entry_id]
async_add_entities((MinutPointSensor(client, device_id, sensor_type)
for sensor_type in SENSOR_TYPES), True)
async_dispatcher_connect(
hass, POINT_DISCOVERY_NEW.format(DOMAIN, POINT_DOMAIN),
async_discover_sensor)
class MinutPointSensor(MinutPointEntity):
"""The platform class required by Home Assistant."""
def __init__(self, point_client, device_id, device_class):
"""Initialize the entity."""
super().__init__(point_client, device_id, device_class)
self._device_prop = SENSOR_TYPES[device_class]
@callback
def _update_callback(self):
"""Update the value of the sensor."""
if self.is_updated:
_LOGGER.debug('Update sensor value for %s', self)
self._value = self.device.sensor(self.device_class)
self._updated = parse_datetime(self.device.last_update)
self.async_schedule_update_ha_state()
@property
def icon(self):
"""Return the icon representation."""
return self._device_prop[0]
@property
def state(self):
"""Return the state of the sensor."""
return round(self.value, self._device_prop[1])
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._device_prop[2]
|
# -*- coding: utf-8 -*-
from unittest import TestCase, TestSuite, TextTestRunner
from webpowerpy.client import *
try:
from webpower.tests.test_conf import WSDL, USERNAME, PASSWORD
except ImportError:
WSDL, PASSWORD, USERNAME = '','',''
pass
class WebpowerTest(TestCase):
campaign_id = 178
groups = [83]
def setUp(self):
self.client = WebPowerClient(WSDL, USERNAME, PASSWORD)
def addRecipient_test(self,):
print '*addRecipient_test:'
user_data = {
'email': u'[email protected]',
'lang': u'es',
'nombre': u'Pasqual',
'apellidos': u'Guerrero Menéndez',
}
result = self.client.addRecipient(self.campaign_id, self.groups, user_data)
print '\tResult status:%s Id:%d' % (result.status, result.id)
self.assertNotEqual(result.status, 'ERROR')
self.assertIsNotNone(result.id)
def addRecipients_test(self):
print '*addRecipients_test:'
user_data1 = {
'email': u'[email protected]',
'lang': u'es',
'nombre': u'Pasqual',
'apellidos': u'Guerrero Menéndez',
}
user_data2 = {
'email': u'[email protected]',
'lang': u'es',
'nombre': u'Pasqual2',
'apellidos': u'Guerrero Menéndez',
}
user_data3 = {
'email': u'[email protected]',
'lang': u'es',
'nombre': u'Pasqual3',
'apellidos': u'Guerrero Menéndez',
}
users = [user_data1, user_data2, user_data3]
result = self.client.addRecipients(self.campaign_id, self.groups, users)
print '\tResult status:%s' % (result.status)
self.assertNotEqual(result.status, 'ERROR')
def editRecipient_test(self):
print '*editRecipient_test:'
user_data1 = {
'email': u'[email protected]',
'lang': u'es',
'nombre': u'Sergio',
'apellidos': u'Sánchez',
}
recipient_id = 4
result = self.client.editRecipient(self.campaign_id, recipient_id, user_data1)
print '\tResult status:%s Id:%d' % (result.status, result.id)
self.assertNotEqual(result.status, 'ERROR')
def getRecipientFields_test(self):
print '*getRecipientFields_test:'
result = self.client.getRecipientFields(178)
#print '\tResult status:%s' % (result)
def suite():
tests = ['addRecipient_test','addRecipients_test','editRecipient_test',
'getRecipientFields_test']
return TestSuite(map(WebpowerTest, tests))
if __name__ == '__main__':
runner = TextTestRunner()
test_suite = suite()
runner.run(test_suite)
|
import {Component} from '@angular/core';
@Component({
template:`
<p>mange your heroes here</p>
`
})
export class ManageHeroesComponent{
}
|
package org.yflyud.projects.websearch.engine.config;
public class ConfigurationException extends Exception {
private static final long serialVersionUID = 7141646545440764761L;
public ConfigurationException() {
}
public ConfigurationException(String paramString) {
super(paramString);
}
public ConfigurationException(Throwable paramThrowable) {
super(paramThrowable);
}
public ConfigurationException(String paramString, Throwable paramThrowable) {
super(paramString, paramThrowable);
}
}
|
import avango
import avango.gua
import avango.script
from importlib import machinery
def create_new_script(json, app, filepath):
module = json['module']
classname = json['name']
filepath = filepath + "tmp/" + module + ".py"
loader = machinery.SourceFileLoader(module, filepath)
loader.load_module()
exec("from " + module + " import " + classname)
new_script = eval(classname + "()", globals(), locals())
new_script.Name.value = classname
app.add_field_container(new_script)
for connection in json["field_connections"]:
app.plan_field_connection(
new_script.Name.value,
connection["from_field"],
connection["to_node"],
connection["to_field"]
)
for field_name in json["values"]:
field = new_script.get_field(field_name)
field.value = load_field_value(field, json["values"][field_name])
def load_field_value(field, json_value):
if isinstance(field, avango.SFFloat):
return json_value
elif isinstance(field, avango.SFBool):
return json_value
elif isinstance(field, avango.SFInt):
return json_value
elif isinstance(field, avango.gua.SFVec3):
return load_vec3_value(json_value)
elif isinstance(field, avango.gua.SFMatrix4):
return load_mat4_value(json_value)
def load_vec3_value(json_value):
return avango.gua.Vec3(json_value[0], json_value[1], json_value[2])
def load_mat4_value(json_value):
matrix = avango.gua.make_identity_mat()
for element in range(15):
matrix.set_element(
int(element/4),
element % 4,
json_value[element],
)
return matrix
|
from django.conf.urls import url
from features.views import FeatureListView, FeatureSamplesView, DatasetListView, \
FeatureHistogramView, FeatureSlicesView, TargetDetailView, DatasetViewUploadView, \
ExperimentListView, FeatureRelevancyResultsView, ExperimentDetailView, TargetRedundancyResults, \
ConditionalDistributionsView, FeatureDensityView, FeatureSpectrogramView, FixedFeatureSetHicsView, \
CalculationListView, CurrentExperimentView, SetCurrentExperimentView
urlpatterns = [
# Experiments
url(r'experiments$', ExperimentListView.as_view(), name='experiment-list'),
url(r'experiments/current$', CurrentExperimentView.as_view(), name='current-experiment-detail'),
url(r'experiments/current/(?P<experiment_id>[a-zA-Z0-9-]+)$', SetCurrentExperimentView.as_view(),
name='set-current-experiment'),
url(r'experiments/(?P<experiment_id>[a-zA-Z0-9-]+)$', ExperimentDetailView.as_view(),
name='experiment-detail'),
url(r'experiments/(?P<experiment_id>[a-zA-Z0-9-]+)/target$', TargetDetailView.as_view(),
name='experiment-targets-detail'),
# Datasets
url(r'datasets$', DatasetListView.as_view(), name='dataset-list'),
url(r'datasets/upload$', DatasetViewUploadView.as_view(), name='dataset-upload'),
url(r'datasets/(?P<dataset_id>[a-zA-Z0-9-]+)/features$', FeatureListView.as_view(),
name='dataset-features-list'),
# Features
url(r'features/(?P<feature_id>[a-zA-Z0-9-]+)/samples(?:/(?P<max_samples>[0-9]+))?$', FeatureSamplesView.as_view(),
name='feature-samples'),
url(r'features/(?P<feature_id>[a-zA-Z0-9-]+)/spectrogram', FeatureSpectrogramView.as_view(),
name='feature-spectrogram'),
url(r'features/(?P<feature_id>[a-zA-Z0-9-]+)/histogram$', FeatureHistogramView.as_view(),
name='feature-histogram'),
url(r'features/(?P<feature_id>[a-zA-Z0-9-]+)/density/(?P<target_id>[a-zA-Z0-9-]+)$',
FeatureDensityView.as_view(), name='feature-density'),
# Results
url(r'targets/(?P<target_id>[a-zA-Z0-9-]+)/slices$',
FeatureSlicesView.as_view(), name='target-feature-slices'),
url(r'targets/(?P<target_id>[a-zA-Z0-9-]+)/relevancy_results$',
FeatureRelevancyResultsView.as_view(),
name='target-feature-relevancy_results'),
url(r'targets/(?P<target_id>[a-zA-Z0-9-]+)/redundancy_results$',
TargetRedundancyResults.as_view(),
name='feature-redundancy_results'),
url(r'targets/(?P<target_id>[a-zA-Z0-9-]+)/hics',
FixedFeatureSetHicsView.as_view(),
name='fixed-feature-set-hics'),
# Distributions
url(r'targets/(?P<target_id>[a-zA-Z0-9-]+)/distributions(?:/(?P<max_samples>[0-9]+))?$',
ConditionalDistributionsView.as_view(), name='target-conditional-distributions'),
# Calculations
url(r'calculations$', CalculationListView.as_view(), name='calculation-list'),
]
|
/**
* Animate watch/unwatch links to use asynchronous API requests to
* watch pages, rather than navigating to a different URI.
*
* @class mw.page.watch.ajax
*/
( function ( mw, $ ) {
// The name of the page to watch or unwatch
var title = mw.config.get( 'wgRelevantPageName' );
/**
* Update the link text, link href attribute and (if applicable)
* "loading" class.
*
* @param {jQuery} $link Anchor tag of (un)watch link
* @param {string} action One of 'watch', 'unwatch'
* @param {string} [state="idle"] 'idle' or 'loading'. Default is 'idle'
*/
function updateWatchLink( $link, action, state ) {
var msgKey, $li, otherAction;
// A valid but empty jQuery object shouldn't throw a TypeError
if ( !$link.length ) {
return;
}
// Invalid actions shouldn't silently turn the page in an unrecoverable state
if ( action !== 'watch' && action !== 'unwatch' ) {
throw new Error( 'Invalid action' );
}
// message keys 'watch', 'watching', 'unwatch' or 'unwatching'.
msgKey = state === 'loading' ? action + 'ing' : action;
otherAction = action === 'watch' ? 'unwatch' : 'watch';
$li = $link.closest( 'li' );
// Trigger a 'watchpage' event for this List item.
// Announce the otherAction value as the first param.
// Used to monitor the state of watch link.
// TODO: Revise when system wide hooks are implemented
if ( state === undefined ) {
$li.trigger( 'watchpage.mw', otherAction );
}
$link
.text( mw.msg( msgKey ) )
.attr( 'title', mw.msg( 'tooltip-ca-' + action ) )
.updateTooltipAccessKeys()
.attr( 'href', mw.util.wikiScript() + '?' + $.param( {
title: title,
action: action
} )
);
// Most common ID style
if ( $li.prop( 'id' ) === 'ca-' + otherAction ) {
$li.prop( 'id', 'ca-' + action );
}
if ( state === 'loading' ) {
$link.addClass( 'loading' );
} else {
$link.removeClass( 'loading' );
}
}
/**
* TODO: This should be moved somewhere more accessible.
*
* @private
* @param {string} url
* @return {string} The extracted action, defaults to 'view'
*/
function mwUriGetAction( url ) {
var action, actionPaths, key, i, m, parts;
// TODO: Does MediaWiki give action path or query param
// precedence? If the former, move this to the bottom
action = mw.util.getParamValue( 'action', url );
if ( action !== null ) {
return action;
}
actionPaths = mw.config.get( 'wgActionPaths' );
for ( key in actionPaths ) {
if ( actionPaths.hasOwnProperty( key ) ) {
parts = actionPaths[ key ].split( '$1' );
for ( i = 0; i < parts.length; i++ ) {
parts[ i ] = mw.RegExp.escape( parts[ i ] );
}
m = new RegExp( parts.join( '(.+)' ) ).exec( url );
if ( m && m[ 1 ] ) {
return key;
}
}
}
return 'view';
}
// Expose public methods
mw.page.watch = {
updateWatchLink: updateWatchLink
};
$( function () {
var $links = $( '.mw-watchlink a, a.mw-watchlink, ' +
'#ca-watch a, #ca-unwatch a, #mw-unwatch-link1, ' +
'#mw-unwatch-link2, #mw-watch-link2, #mw-watch-link1' );
// Allowing people to add inline animated links is a little scary
$links = $links.filter( ':not( #bodyContent *, #content * )' );
$links.click( function ( e ) {
var action, api, $link;
// Start preloading the notification module (normally loaded by mw.notify())
mw.loader.load( 'mediawiki.notification' );
action = mwUriGetAction( this.href );
if ( action !== 'watch' && action !== 'unwatch' ) {
// Could not extract target action from link url,
// let native browsing handle it further
return true;
}
e.preventDefault();
e.stopPropagation();
$link = $( this );
if ( $link.hasClass( 'loading' ) ) {
return;
}
updateWatchLink( $link, action, 'loading' );
api = new mw.Api();
api[ action ]( title )
.done( function ( watchResponse ) {
var otherAction = action === 'watch' ? 'unwatch' : 'watch';
mw.notify( $.parseHTML( watchResponse.message ), {
tag: 'watch-self'
} );
// Set link to opposite
updateWatchLink( $link, otherAction );
// Update the "Watch this page" checkbox on action=edit when the
// page is watched or unwatched via the tab (bug 12395).
$( '#wpWatchthis' ).prop( 'checked', watchResponse.watched !== undefined );
} )
.fail( function () {
var cleanTitle, msg, link;
// Reset link to non-loading mode
updateWatchLink( $link, action );
// Format error message
cleanTitle = title.replace( /_/g, ' ' );
link = mw.html.element(
'a', {
href: mw.util.getUrl( title ),
title: cleanTitle
}, cleanTitle
);
msg = mw.message( 'watcherrortext', link );
// Report to user about the error
mw.notify( msg, {
tag: 'watch-self',
type: 'error'
} );
} );
} );
} );
}( mediaWiki, jQuery ) );
|
import { LineResults } from "../LineResults";
import { CommandNames } from "../Names/CommandNames";
import { Command } from "./Command";
import { CommandMetadata } from "./Metadata/CommandMetadata";
/**
* Ends a group of standalone function declarations.
*/
export class StandaloneFunctionsDeclareEndCommand extends Command {
/**
* Metadata on the command.
*/
private static metadata: CommandMetadata = new CommandMetadata(CommandNames.StandaloneFunctionsDeclareEnd)
.withDescription("Ends a group of standalone function declarations")
.withIndentation([-1]);
/**
* @returns Metadata on the command.
*/
public getMetadata(): CommandMetadata {
return StandaloneFunctionsDeclareEndCommand.metadata;
}
/**
* Renders the command for a language with the given parameters.
*
* @param parameters The command's name, followed by any parameters.
* @returns Line(s) of code in the language.
*/
public render(parameters: string[]): LineResults {
if (!this.language.syntax.standaloneFunctions.withinStaticClass) {
return new LineResults([]);
}
return this.context.convertParsed([CommandNames.ClassEnd]);
}
}
|
using Recipes.ArbitraryTableRead;
using System.Collections.Generic;
using System.Data;
using Tortuga.Chain;
namespace Recipes.Chain.ArbitraryTableRead
{
public class ArbitraryTableReadScenario : IArbitraryTableReadScenario<DataTable>
{
readonly SqlServerDataSource m_DataSource;
public ArbitraryTableReadScenario(SqlServerDataSource dataSource)
{
m_DataSource = dataSource;
}
public DataTable GetAll(string schemaName, string tableName)
{
return m_DataSource.From(schemaName + "." + tableName).ToDataTable().Execute();
}
}
public class ArbitraryTableReadScenario2 : IArbitraryTableReadScenario<IReadOnlyList<IReadOnlyDictionary<string, object?>>>
{
readonly SqlServerDataSource m_DataSource;
public ArbitraryTableReadScenario2(SqlServerDataSource dataSource)
{
m_DataSource = dataSource;
}
//This version returns a lightweight object known as a "Table". It is an alternative to .NET's DataTable.
public IReadOnlyList<IReadOnlyDictionary<string, object?>> GetAll(string schemaName, string tableName)
{
return m_DataSource.From(schemaName + "." + tableName).ToTable().Execute().Rows;
}
}
}
|
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
namespace Territorium.Tests.Kademlia
{
public static class EnumerableExtensions
{
public static IEnumerable<IEnumerable<T>> Partition<T>
(this IEnumerable<T> source, int size)
{
T[] array = null;
var count = 0;
foreach (var item in source)
{
if (array == null)
{
array = new T[size];
}
array[count] = item;
count++;
if (count == size)
{
yield return new ReadOnlyCollection<T>(array);
array = null;
count = 0;
}
}
if (array != null)
{
Array.Resize(ref array, count);
yield return new ReadOnlyCollection<T>(array);
}
}
}
}
|
import {Observable} from '../../Observable';
import {startWith, StartWithSignature} from '../../operator/startWith';
Observable.prototype.startWith = startWith;
declare module '../../Observable' {
interface Observable<T> {
startWith: StartWithSignature<T>;
}
}
|
#
# Copyright 2017 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
import pytest
from vdsm.common import time
class FakeTime(object):
def __init__(self, value=0):
self.time = value
def __call__(self):
return self.time
@pytest.fixture
def fake_time(monkeypatch):
fake_time = FakeTime()
monkeypatch.setattr(time, "monotonic_time", fake_time)
return fake_time
class TestClock:
def test_no_timers(self):
c = time.Clock()
assert str(c) == "<Clock()>"
# Ccorrect usage
def test_start_and_stop(self, fake_time):
c = time.Clock()
c.start("total")
c.start("step1")
fake_time.time += 3
c.stop("step1")
c.start("step2")
fake_time.time += 4
c.stop("step2")
c.stop("total")
assert str(c) == "<Clock(total=7.00, step1=3.00, step2=4.00)>"
def test_running(self, fake_time):
c = time.Clock()
c.start("foo")
fake_time.time += 3
c.start("bar")
fake_time.time += 4
c.stop("foo")
assert str(c) == "<Clock(foo=7.00, bar=4.00*)>"
def test_run(self, fake_time):
c = time.Clock()
with c.run("foo"):
fake_time.time += 3
assert str(c) == "<Clock(foo=3.00)>"
def test_run_nested(self, fake_time):
c = time.Clock()
with c.run("outer"):
fake_time.time += 3
with c.run("inner"):
fake_time.time += 4
assert str(c) == "<Clock(outer=7.00, inner=4.00)>"
# Inccorrect usage
def test_start_started_clock(self):
c = time.Clock()
c.start("started")
with pytest.raises(RuntimeError):
c.start("started")
def test_stop_stooped_clock(self):
c = time.Clock()
c.start("stopped")
c.stop("stopped")
with pytest.raises(RuntimeError):
c.stop("stopped")
def test_stop_missing_clock(self):
c = time.Clock()
with pytest.raises(RuntimeError):
c.stop("foo")
def test_run_started(self):
c = time.Clock()
c.start("started")
with pytest.raises(RuntimeError):
with c.run("started"):
pass
def test_run_stopped(self):
c = time.Clock()
with c.run("stopped"):
pass
with pytest.raises(RuntimeError):
with c.run("stopped"):
pass
|
# -*-coding:Utf-8 -*
# Copyright (c) 2010 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le masque <canal>."""
from primaires.interpreteur.masque.masque import Masque
from primaires.interpreteur.masque.fonctions import *
from primaires.interpreteur.masque.exceptions.erreur_validation \
import ErreurValidation
class Canal(Masque):
"""Masque <canal>.
On attend un canal en paramètre.
"""
nom = "canal"
nom_complet = "canal"
def init(self):
"""Initialisation des attributs"""
self.nom_canal = ""
self.canal = None
self.canal_existe = True
def repartir(self, personnage, masques, commande):
"""Répartition du masque."""
nom_canal = liste_vers_chaine(commande)
if not nom_canal:
raise ErreurValidation( \
"Précisez le nom d'un canal.")
nom_canal = nom_canal.split(" ")[0]
self.a_interpreter = nom_canal
commande[:] = commande[len(nom_canal):]
masques.append(self)
return True
def valider(self, personnage, dic_masques):
"""Validation du masque"""
Masque.valider(self, personnage, dic_masques)
nom_canal = self.a_interpreter
canaux = type(self).importeur.communication.canaux
if not nom_canal in canaux:
self.canal_existe = False
return True
self.nom_canal = nom_canal
self.canal = canaux[nom_canal]
return True
|
/*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.client.scheduledexecutor;
import com.hazelcast.client.test.TestHazelcastFactory;
import com.hazelcast.config.Config;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.scheduledexecutor.IScheduledExecutorService;
import com.hazelcast.scheduledexecutor.ScheduledExecutorServiceSlowTest;
import com.hazelcast.test.HazelcastParallelClassRunner;
import com.hazelcast.test.annotation.ParallelTest;
import com.hazelcast.test.annotation.SlowTest;
import org.junit.After;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
@RunWith(HazelcastParallelClassRunner.class)
@Category({SlowTest.class, ParallelTest.class})
public class ClientScheduledExecutorServiceSlowTest extends ScheduledExecutorServiceSlowTest {
private TestHazelcastFactory factory;
@After
public void teardown() {
if (factory != null) {
factory.terminateAll();
}
}
@Override
protected HazelcastInstance[] createClusterWithCount(int count) {
return createClusterWithCount(count, new Config());
}
@Override
protected HazelcastInstance[] createClusterWithCount(int count, Config config) {
factory = new TestHazelcastFactory();
HazelcastInstance[] instances = factory.newInstances(config, count);
waitAllForSafeState(instances);
return instances;
}
@Override
public IScheduledExecutorService getScheduledExecutor(HazelcastInstance[] instances, String name) {
return factory.newHazelcastClient().getScheduledExecutorService(name);
}
}
|
var express = require('express'),
// routes = require('./routes/router'),
path = require('path'),
hotel = require('./routes/hotels'),
user = require('./routes/users'),
http = require('http'),
bodyParser = require('body-parser');
https = require('https'),
fs = require('fs'),
cookieParser = require('cookie-parser');
// _ = require('underscore');
// bodyParser = require('body-parser'),
// _ = require('underscore');
var app = express();
var mandrill = require('mandrill-api/mandrill');
var mandrill_client = new mandrill.Mandrill('9cUjvz7KRIT5o_YSoInj0g');
var env = process.env.NODE_ENV || 'development';
if ('development' == env) {
app.use(bodyParser());
app.use(cookieParser('hozodo'));
//app.set('port', process.env.PORT || 8639);
app.set('port', process.env.PORT || 8080);
app.use(express.static(path.join(__dirname, 'public')));
}
app.get('/', function(req, res) {
res.render(index);
});
app.get('/hotels', hotel.findAll);
app.get('/hotels/:id', hotel.findById);
app.post('/hotels', hotel.addHotels);
app.put('/hotels/:id', hotel.updateHotels);
app.delete('/hotels/:id', hotel.deleteHotels);
app.post('/hotels/hotelRequest', hotel.saveHotelRequest);
app.post('/hotels/hotelApiResponse', hotel.saveApiResponse);
app.get('/trendingDestination', hotel.trendingDestination);
app.post('/hotels/name', hotel.findHotelByName);
app.post('/user/oauth', user.auth);
app.post('/user/signin', user.signin);
app.post('/user/logout', user.signout);
app.put('/user/signup', user.signup);
app.post('/user/trace', user.trace);
app.get('/oauth2callback', user.oauth);
http.createServer(app).listen(app.get('port'), function() {
console.log("Express server listening on port " + app.get('port'));
});
/* For test
var options = {
key: fs.readFileSync('../privatekey.pem'),
cert: fs.readFileSync('../certificate.pem')
};
https.createServer(options, app).listen(443, function () {
console.log('Https server listening on port ' + 443);
});
*/
|
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
** Modified to support SQLite extensions by the SQLite developers:
** [email protected].
*/
package org.sqlite.database.sqlite;
/**
* An exception that indicates that the SQLite database is full.
*/
public class SQLiteFullException extends SQLiteException {
public SQLiteFullException() {}
public SQLiteFullException(String error) {
super(error);
}
}
|
"""
Registers the CCX feature for the edX platform.
"""
from django.conf import settings
from django.utils.translation import ugettext_noop
from courseware.access import has_access
from student.roles import CourseCcxCoachRole
from xmodule.tabs import CourseTab
class CcxCourseTab(CourseTab):
"""
The representation of the CCX course tab
"""
type = "ccx_coach"
title = ugettext_noop("CCX Coach")
view_name = "ccx_coach_dashboard"
is_dynamic = True # The CCX view is dynamically added to the set of tabs when it is enabled
@classmethod
def is_enabled(cls, course, user=None):
"""
Returns true if CCX has been enabled and the specified user is a coach
"""
if not settings.FEATURES.get('CUSTOM_COURSES_EDX', False) or not course.enable_ccx:
# If ccx is not enable do not show ccx coach tab.
return False
is_staff_or_instructor = has_access(user, 'staff', course) or has_access(user, 'instructor', course)
if hasattr(course.id, 'ccx') and is_staff_or_instructor:
# if user is staff or instructor then he can always see ccx coach tab.
return True
# check if user has coach access.
role = CourseCcxCoachRole(course.id)
return role.has_user(user)
|
/**
* Project Wonderland
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., All Rights Reserved
*
* Redistributions in source code form must reproduce the above
* copyright and this condition.
*
* The contents of this file are subject to the GNU General Public
* License, Version 2 (the "License"); you may not use this file
* except in compliance with the License. A copy of the License is
* available at http://www.opensource.org/licenses/gpl-license.php.
*
* Sun designates this particular file as subject to the "Classpath"
* exception as provided by Sun in the License file that accompanied
* this code.
*/
package org.jdesktop.wonderland.server.spatial.impl;
import com.jme.bounding.BoundingVolume;
import com.sun.sgs.auth.Identity;
import org.jdesktop.wonderland.common.cell.CellTransform;
import org.jdesktop.wonderland.server.spatial.ViewUpdateListener;
/**
*
* @author paulby
*/
public interface SpatialCell {
/**
* Return the bounds of the object in the local coordinate system
* @return the localBounds
*/
public BoundingVolume getLocalBounds();
/**
* Set the bounds of this object in the local coordinate system.
* Note the system guarantees that
* the bounds of a parent fully enclose the bounds of a child, so this
* call may cause the world bounds of parent cells to be updated.
*
* @param localBounds the localBounds to set
*/
public void setLocalBounds(BoundingVolume localBounds);
/**
* Get the bounds of this object in world coordinates
* @return
*/
public BoundingVolume getWorldBounds();
/**
* Return the local transform of this object
* @return the transform
*/
public CellTransform getLocalTransform();
/**
* Set the local transform for this object. Note the system guarantees that
* the bounds of a parent fully enclose the bounds of a child, so this
* call may cause the world bounds of parent cells to be updated.
*
* @param transform the transform to set
*/
public void setLocalTransform(CellTransform transform, Identity identity);
/**
* Add the supplied object as a child of this.
* @param child
*/
public void addChild(SpatialCell child, Identity identity);
/**
* Get the set of children for this SpatialCell
* @return
*/
// public Iterable getChildren();
/**
* Remove the specified child from this SpatialCell
*
* @param child
*/
public void removeChild(SpatialCell child);
/**
* Set the current state of an attibute
* @param attr the attr state
*/
public void setAttribute(Object attr);
/**
* Revalidate this cell, causing each cache to decide whether or not
* to reload the cell.
*/
public void revalidate();
/**
* Destroy this cell, removing it from all the view caches
*/
public void destroy();
/**
* Add a ViewUpdateLIstener to this cell. This listener will be called
* whenever the view of a ViewCache that contains this cell is updated
*
* @param viewUpdateListener listener to add
*/
public void addViewUpdateListener(ViewUpdateListener viewUpdateListener);
/**
* Remove the specified ViewUpdateListener
* @param viewUpdateListener listener to remove
*/
public void removeViewUpdateListener(ViewUpdateListener viewUpdateListener);
/**
* Re-notify each listener of the position of this cell
*/
public void revalidateListeners(Identity identity);
}
|
import os
from app.backend.task.task import Task
from app.backend.core.dataset.dataset import Dataset
from app.backend.core.dataset.input import Input
from app.backend.api import app_flask
class BuildDatasetTask(Task):
def __init__(self, params):
Task.__init__(self)
self.params = params
self.type = 'build_dataset'
self.basetype = 'dataset'
self.icon = "/frontend/assets/icon/img/img-dataset1.png"
def perform(self):
fm_base_dir = app_flask.config['DLS_FILEMANAGER_BASE_PATH']
if "csv_file_path" in self.params:
self.params["csv_file_path"] = os.path.join(fm_base_dir, self.params["csv_file_path"].strip("/"))
if "train_csv_file_path" in self.params:
self.params["train_csv_file_path"] = os.path.join(fm_base_dir, self.params["train_csv_file_path"].strip("/"))
if "validation_scv_file_path" in self.params:
self.params["validation_scv_file_path"] = os.path.join(fm_base_dir, self.params["validation_scv_file_path"].strip("/"))
input = Input.from_schema(schema=self.params)
dataset = Dataset.Builder(input=input,
name=self.params['name'],
root_dir=app_flask.config['DATASETS_BASE_PATH'],
test_dataset_percentage=self.params["test_dataset_percentage"],
parallelism_level=self.params['parallelism_level']).build(self)
if self.state == 'running':
self.state = 'finished'
|
#!/usr/bin/python
# Example using a character LCD connected to a Raspberry Pi or BeagleBone Black.
import time
import Adafruit_CharLCD as LCD
# Raspberry Pi pin configuration:
lcd_rs = 27 # Note this might need to be changed to 21 for older revision Pi's.
lcd_en = 22
lcd_d4 = 25
lcd_d5 = 24
lcd_d6 = 23
lcd_d7 = 18
lcd_backlight = 4
# BeagleBone Black configuration:
# lcd_rs = 'P8_8'
# lcd_en = 'P8_10'
# lcd_d4 = 'P8_18'
# lcd_d5 = 'P8_16'
# lcd_d6 = 'P8_14'
# lcd_d7 = 'P8_12'
# lcd_backlight = 'P8_7'
# Define LCD column and row size for 16x2 LCD.
lcd_columns = 16
lcd_rows = 2
# Alternatively specify a 20x4 LCD.
# lcd_columns = 20
# lcd_rows = 4
# Initialize the LCD using the pins above.
lcd = LCD.Adafruit_CharLCD(lcd_rs, lcd_en, lcd_d4, lcd_d5, lcd_d6, lcd_d7,
lcd_columns, lcd_rows, lcd_backlight)
# Print a two line message
lcd.message('Hello\nworld!')
# Wait 5 seconds
time.sleep(5.0)
# Demo showing the cursor.
lcd.clear()
lcd.show_cursor(True)
lcd.message('Show cursor')
time.sleep(5.0)
# Demo showing the blinking cursor.
lcd.clear()
lcd.blink(True)
lcd.message('Blink cursor')
time.sleep(5.0)
# Stop blinking and showing cursor.
lcd.show_cursor(False)
lcd.blink(False)
# Demo scrolling message right/left.
lcd.clear()
message = 'Scroll'
lcd.message(message)
for i in range(lcd_columns-len(message)):
time.sleep(0.5)
lcd.move_right()
for i in range(lcd_columns-len(message)):
time.sleep(0.5)
lcd.move_left()
# Demo turning backlight off and on.
lcd.clear()
lcd.message('Flash backlight\nin 5 seconds...')
time.sleep(5.0)
# Turn backlight off.
lcd.set_backlight(0)
time.sleep(2.0)
# Change message.
lcd.clear()
lcd.message('Goodbye!')
# Turn backlight on.
lcd.set_backlight(1)
|
# Enthought library imports.
from enthought.pyface.action.api import Action, ActionItem, Group
from enthought.traits.api import Any, Bool, List, Instance, Property, Unicode
# Local imports.
from enthought.pyface.tasks.task import Task
from enthought.pyface.tasks.task_window import TaskWindow
class TaskToggleAction(Action):
""" An action for activating a task.
"""
#### 'Action' interface ###################################################
checked = Property(Bool, depends_on='task.window.active_task')
name = Property(Unicode, depends_on='task.name')
style = 'toggle'
tooltip = Property(Unicode, depends_on='name')
#### 'TaskActivateAction' interface #######################################
task = Instance(Task)
###########################################################################
# 'Action' interface.
###########################################################################
def perform(self, event=None):
window = self.task.window
window.activate_task(self.task)
###########################################################################
# Private interface.
###########################################################################
def _get_checked(self):
window = self.task.window
return window is not None and window.active_task == self.task
def _get_name(self):
return self.task.name
def _get_tooltip(self):
return u'Switch to the %s task.' % self.name
class TaskToggleGroup(Group):
""" A menu for changing the active task in a task window.
"""
#### 'ActionManager' interface ############################################
id = 'TaskToggleGroup'
items = List
#### 'TaskChangeMenuManager' interface ####################################
# The ActionManager to which the group belongs.
manager = Any
# The window that contains the group.
window = Instance(TaskWindow)
###########################################################################
# Private interface.
###########################################################################
def _get_items(self):
items = []
for task in self.window.tasks:
action = TaskToggleAction(task=task)
items.append(ActionItem(action=action))
return items
def _rebuild(self):
# Clear out the old group, then build the new one.
self.destroy()
self.items = self._get_items()
# Inform our manager that it needs to be rebuilt.
self.manager.changed = True
#### Trait initializers ###################################################
def _items_default(self):
self.window.on_trait_change(self._rebuild, 'tasks[]')
return self._get_items()
def _manager_default(self):
manager = self
while isinstance(manager, Group):
manager = manager.parent
return manager
def _window_default(self):
return self.manager.controller.task.window
|
from PIL import ImageFont, Image, ImageDraw
import random
class YZMInfo:
def __init__(self, img, code):
self.img = img
self.code = code
def ygm(font_size=20, count_min=4, count_max=10, code_height=30,
string='abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789',
font_color=['black', 'darkblue', 'darkred', 'darkgreen'],
font_family='arial.ttf'):
if count_max < count_min:
count_max = count_min
code_count = random.randrange(count_min, count_max)
background = (random.randrange(230, 255),
random.randrange(230, 255),
random.randrange(230, 255))
line_color = [(random.randrange(0, 255),
random.randrange(0, 255),
random.randrange(0, 255)),
(random.randrange(0, 255),
random.randrange(0, 255),
random.randrange(0, 255)),
(random.randrange(0, 255),
random.randrange(0, 255),
random.randrange(0, 255))]
img_width = (font_size + 1) * code_count
img_height = code_height + font_size
verify = ''
im = Image.new('RGB', (img_width, img_height), background)
draw = ImageDraw.Draw(im)
code = random.sample(string, code_count)
draw = ImageDraw.Draw(im)
for i in range(random.randrange(code_count / 2, code_count)):
xy = (random.randrange(0, img_width), random.randrange(0, img_height),
random.randrange(0, img_width), random.randrange(0, img_height))
draw.line(xy, fill=random.choice(line_color), width=1)
x = font_size / 2
for i in code:
y = random.randrange(0, code_height)
font = ImageFont.truetype(
font_family, font_size + random.randrange(-font_size/3, font_size/3))
draw.text((x, y), i, font=font, fill=random.choice(font_color))
x += font_size
verify += i
return YZMInfo(img=im, code=verify.upper())
info = ygm(font_size=16,
code_height=10,
string='#@%&$abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23456789')
info.img.save("0010.GIF")
print info.code
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Address objects for network connections.
"""
import warnings, os
from zope.interface import implements
from twisted.internet.interfaces import IAddress
from twisted.python import util
class _IPAddress(object, util.FancyEqMixin):
"""
An L{_IPAddress} represents the address of an IP socket endpoint, providing
common behavior for IPv4 and IPv6.
@ivar type: A string describing the type of transport, either 'TCP' or
'UDP'.
@ivar host: A string containing the presentation format of the IP address;
for example, "127.0.0.1" or "::1".
@type host: C{str}
@ivar port: An integer representing the port number.
@type port: C{int}
"""
implements(IAddress)
compareAttributes = ('type', 'host', 'port')
def __init__(self, type, host, port):
assert type in ('TCP', 'UDP')
self.type = type
self.host = host
self.port = port
def __repr__(self):
return '%s(%s, %r, %d)' % (
self.__class__.__name__, self.type, self.host, self.port)
def __hash__(self):
return hash((self.type, self.host, self.port))
class IPv4Address(_IPAddress):
"""
An L{IPv4Address} represents the address of an IPv4 socket endpoint.
@ivar host: A string containing a dotted-quad IPv4 address; for example,
"127.0.0.1".
@type host: C{str}
"""
def __init__(self, type, host, port, _bwHack=None):
_IPAddress.__init__(self, type, host, port)
if _bwHack is not None:
warnings.warn("twisted.internet.address.IPv4Address._bwHack "
"is deprecated since Twisted 11.0",
DeprecationWarning, stacklevel=2)
class IPv6Address(_IPAddress):
"""
An L{IPv6Address} represents the address of an IPv6 socket endpoint.
@ivar host: A string containing a colon-separated, hexadecimal formatted
IPv6 address; for example, "::1".
@type host: C{str}
"""
class UNIXAddress(object, util.FancyEqMixin):
"""
Object representing a UNIX socket endpoint.
@ivar name: The filename associated with this socket.
@type name: C{str}
"""
implements(IAddress)
compareAttributes = ('name', )
def __init__(self, name, _bwHack = None):
self.name = name
if _bwHack is not None:
warnings.warn("twisted.internet.address.UNIXAddress._bwHack is deprecated since Twisted 11.0",
DeprecationWarning, stacklevel=2)
if getattr(os.path, 'samefile', None) is not None:
def __eq__(self, other):
"""
overriding L{util.FancyEqMixin} to ensure the os level samefile
check is done if the name attributes do not match.
"""
res = super(UNIXAddress, self).__eq__(other)
if not res and self.name and other.name:
try:
return os.path.samefile(self.name, other.name)
except OSError:
pass
return res
def __repr__(self):
return 'UNIXAddress(%r)' % (self.name,)
def __hash__(self):
if self.name is None:
return hash((self.__class__, None))
try:
s1 = os.stat(self.name)
return hash((s1.st_ino, s1.st_dev))
except OSError:
return hash(self.name)
# These are for buildFactory backwards compatability due to
# stupidity-induced inconsistency.
class _ServerFactoryIPv4Address(IPv4Address):
"""Backwards compatability hack. Just like IPv4Address in practice."""
def __eq__(self, other):
if isinstance(other, tuple):
warnings.warn("IPv4Address.__getitem__ is deprecated. Use attributes instead.",
category=DeprecationWarning, stacklevel=2)
return (self.host, self.port) == other
elif isinstance(other, IPv4Address):
a = (self.type, self.host, self.port)
b = (other.type, other.host, other.port)
return a == b
return False
|
namespace Education.DAL.Migrations
{
using System;
using System.Data.Entity.Migrations;
public partial class Initial : DbMigration
{
public override void Up()
{
CreateTable(
"dbo.Answers",
c => new
{
ID = c.Int(nullable: false, identity: true),
Content = c.String(),
Score = c.Int(nullable: false),
DateCreated = c.DateTime(),
DateModified = c.DateTime(),
Question_ID = c.Int(),
})
.PrimaryKey(t => t.ID)
.ForeignKey("dbo.Questions", t => t.Question_ID)
.Index(t => t.Question_ID);
CreateTable(
"dbo.ApplicationLogs",
c => new
{
ID = c.Int(nullable: false, identity: true),
Name = c.String(),
Description = c.String(),
StackTrace = c.String(),
DateCreated = c.DateTime(),
DateModified = c.DateTime(),
})
.PrimaryKey(t => t.ID);
CreateTable(
"dbo.Questions",
c => new
{
ID = c.Int(nullable: false, identity: true),
Content = c.String(),
DateCreated = c.DateTime(),
DateModified = c.DateTime(),
})
.PrimaryKey(t => t.ID);
CreateTable(
"dbo.Roles",
c => new
{
ID = c.Int(nullable: false, identity: true),
Name = c.String(),
Description = c.String(),
DateCreated = c.DateTime(),
DateModified = c.DateTime(),
})
.PrimaryKey(t => t.ID);
CreateTable(
"dbo.Users",
c => new
{
ID = c.Int(nullable: false, identity: true),
UserDetailsID = c.Int(nullable: false),
FirstName = c.String(),
LastName = c.String(),
DateCreated = c.DateTime(),
DateModified = c.DateTime(),
})
.PrimaryKey(t => t.ID)
.ForeignKey("dbo.UserDetails", t => t.UserDetailsID, cascadeDelete: true)
.Index(t => t.UserDetailsID);
CreateTable(
"dbo.UserDetails",
c => new
{
ID = c.Int(nullable: false, identity: true),
Password = c.String(),
PasswordSalt = c.String(),
Email = c.String(),
DateCreated = c.DateTime(),
DateModified = c.DateTime(),
})
.PrimaryKey(t => t.ID);
CreateTable(
"dbo.ScoreLogs",
c => new
{
ID = c.Int(nullable: false, identity: true),
UserID = c.Int(nullable: false),
TotalScore = c.Decimal(nullable: false, precision: 18, scale: 2),
DateCreated = c.DateTime(),
DateModified = c.DateTime(),
})
.PrimaryKey(t => t.ID)
.ForeignKey("dbo.Users", t => t.UserID, cascadeDelete: true)
.Index(t => t.UserID);
CreateTable(
"dbo.ServerInfoes",
c => new
{
ID = c.Int(nullable: false, identity: true),
IpAddress = c.String(),
DateCreated = c.DateTime(),
DateModified = c.DateTime(),
})
.PrimaryKey(t => t.ID);
CreateTable(
"dbo.UserRoles",
c => new
{
User_ID = c.Int(nullable: false),
Role_ID = c.Int(nullable: false),
})
.PrimaryKey(t => new { t.User_ID, t.Role_ID })
.ForeignKey("dbo.Users", t => t.User_ID, cascadeDelete: true)
.ForeignKey("dbo.Roles", t => t.Role_ID, cascadeDelete: true)
.Index(t => t.User_ID)
.Index(t => t.Role_ID);
}
public override void Down()
{
DropForeignKey("dbo.ScoreLogs", "UserID", "dbo.Users");
DropForeignKey("dbo.Users", "UserDetailsID", "dbo.UserDetails");
DropForeignKey("dbo.UserRoles", "Role_ID", "dbo.Roles");
DropForeignKey("dbo.UserRoles", "User_ID", "dbo.Users");
DropForeignKey("dbo.Answers", "Question_ID", "dbo.Questions");
DropIndex("dbo.UserRoles", new[] { "Role_ID" });
DropIndex("dbo.UserRoles", new[] { "User_ID" });
DropIndex("dbo.ScoreLogs", new[] { "UserID" });
DropIndex("dbo.Users", new[] { "UserDetailsID" });
DropIndex("dbo.Answers", new[] { "Question_ID" });
DropTable("dbo.UserRoles");
DropTable("dbo.ServerInfoes");
DropTable("dbo.ScoreLogs");
DropTable("dbo.UserDetails");
DropTable("dbo.Users");
DropTable("dbo.Roles");
DropTable("dbo.Questions");
DropTable("dbo.ApplicationLogs");
DropTable("dbo.Answers");
}
}
}
|
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title>CRAN - Package seasonal</title>
<link rel="stylesheet" type="text/css" href="../../CRAN_web.css" />
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<style type="text/css">
table td { vertical-align: top; }
</style>
</head>
<body>
<h2>seasonal: R Interface to X-13-ARIMA-SEATS</h2>
<p>Easy-to-use interface to X-13-ARIMA-SEATS, the seasonal adjustment
software by the US Census Bureau. It offers full access to almost all
options and outputs of X-13, including X-11 and SEATS, automatic ARIMA model
search, outlier detection and support for user defined holiday variables,
such as Chinese New Year or Indian Diwali. A graphical user interface can be
used through the 'seasonalview' package. Uses the X-13-binaries from the
'x13binary' package.</p>
<table summary="Package seasonal summary">
<tr>
<td>Version:</td>
<td>1.5.1</td>
</tr>
<tr>
<td>Depends:</td>
<td>R (≥ 2.15)</td>
</tr>
<tr>
<td>Imports:</td>
<td><a href="../x13binary/index.html">x13binary</a></td>
</tr>
<tr>
<td>Suggests:</td>
<td><a href="../seasonalview/index.html">seasonalview</a> (≥ 0.1.3)</td>
</tr>
<tr>
<td>Published:</td>
<td>2017-02-12</td>
</tr>
<tr>
<td>Author:</td>
<td>Christoph Sax</td>
</tr>
<tr>
<td>Maintainer:</td>
<td>Christoph Sax <christoph.sax at gmail.com></td>
</tr>
<tr>
<td>BugReports:</td>
<td><a href="https://github.com/christophsax/seasonal">https://github.com/christophsax/seasonal</a></td>
</tr>
<tr>
<td>License:</td>
<td><a href="../../licenses/GPL-3">GPL-3</a></td>
</tr>
<tr>
<td>URL:</td>
<td><a href="http://www.seasonal.website">http://www.seasonal.website</a></td>
</tr>
<tr>
<td>NeedsCompilation:</td>
<td>no</td>
</tr>
<tr>
<td>Materials:</td>
<td><a href="README.html">README</a> <a href="NEWS">NEWS</a> </td>
</tr>
<tr>
<td>In views:</td>
<td><a href="../../views/OfficialStatistics.html">OfficialStatistics</a>, <a href="../../views/TimeSeries.html">TimeSeries</a></td>
</tr>
<tr>
<td>CRAN checks:</td>
<td><a href="../../checks/check_results_seasonal.html">seasonal results</a></td>
</tr>
</table>
<h4>Downloads:</h4>
<table summary="Package seasonal downloads">
<tr>
<td> Reference manual: </td>
<td> <a href="seasonal.pdf"> seasonal.pdf </a> </td>
</tr>
<tr>
<td>Vignettes:</td>
<td>
<a href="vignettes/seas.pdf">Introduction to seasonal: R interface to X-13ARIMA-SEATS</a><br/>
</td>
</tr>
<tr>
<td> Package source: </td>
<td> <a href="../../../src/contrib/seasonal_1.5.1.tar.gz"> seasonal_1.5.1.tar.gz </a> </td>
</tr>
<tr>
<td> Windows binaries: </td>
<td> r-devel: <a href="../../../bin/windows/contrib/3.5/seasonal_1.5.1.zip">seasonal_1.5.1.zip</a>, r-release: <a href="../../../bin/windows/contrib/3.4/seasonal_1.5.1.zip">seasonal_1.5.1.zip</a>, r-oldrel: <a href="../../../bin/windows/contrib/3.3/seasonal_1.5.1.zip">seasonal_1.5.1.zip</a> </td>
</tr>
<tr>
<td> OS X El Capitan binaries: </td>
<td> r-release: <a href="../../../bin/macosx/el-capitan/contrib/3.4/seasonal_1.5.1.tgz">seasonal_1.5.1.tgz</a> </td>
</tr>
<tr>
<td> OS X Mavericks binaries: </td>
<td> r-oldrel: <a href="../../../bin/macosx/mavericks/contrib/3.3/seasonal_1.5.1.tgz">seasonal_1.5.1.tgz</a> </td>
</tr>
<tr>
<td> Old sources: </td>
<td> <a href="../../../src/contrib/Archive/seasonal"> seasonal archive </a> </td>
</tr>
</table>
<h4>Reverse dependencies:</h4>
<table summary="Package seasonal reverse dependencies">
<tr>
<td>Reverse depends:</td>
<td><a href="../ggseas/index.html">ggseas</a>, <a href="../seasonalview/index.html">seasonalview</a></td>
</tr>
<tr>
<td>Reverse imports:</td>
<td><a href="../BETS/index.html">BETS</a>, <a href="../gunsales/index.html">gunsales</a></td>
</tr>
<tr>
<td>Reverse suggests:</td>
<td><a href="../stR/index.html">stR</a></td>
</tr>
</table>
<h4>Linking:</h4>
<p>Please use the canonical form
<a href="https://CRAN.R-project.org/package=seasonal"><samp>https://CRAN.R-project.org/package=seasonal</samp></a>
to link to this page.</p>
</body>
</html>
|
"""
Copyright (c) 2013, XLAB D.O.O.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
- Neither the name of the XLAB D.O.O. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import logging
import logging.handlers
keep_fds=[]
logger = logging.getLogger("ebadge")
##ebadge related modules
"""
from ebadge_msg.comm import *
from ebadge_msg.common import *
from ebadge_msg.heh_level import *
from ebadge_msg.market_level import *
"""
|
/**
* @class NetProfile.documents.button.DocumentButton
* @extends Ext.button.Button
*/
Ext.define('NetProfile.documents.button.DocumentButton', {
extend: 'Ext.button.Button',
alias: 'widget.docbutton',
requires: [
'Ext.XTemplate'
],
objectType: null,
handler: function()
{
var me = this,
gen_panel = me.up('panel'),
rec_panel = gen_panel.up('panel[cls~=record-tab]'),
doc_box, obj_id, doc_id;
if(rec_panel && rec_panel.record)
obj_id = rec_panel.record.getId();
doc_box = gen_panel.getComponent('docid');
if(doc_box)
doc_id = parseInt(doc_box.getValue());
if(!obj_id || !doc_id)
return;
NetProfile.api.Document.prepare_template({
'objid' : obj_id,
'objtype' : me.objectType,
'docid' : doc_id
}, me.onPrepareTemplate, me);
},
onPrepareTemplate: function(data, res)
{
var doc, body, tpl, win, el;
if(!res.result || !res.result.success)
return;
doc = data.doc;
body = doc.body;
if(doc.type == 'html-ext')
{
tpl = new Ext.XTemplate(body);
body = tpl.apply(data.vars);
}
if(body && Ext.Array.contains(['html-plain', 'html-ext'], doc.type))
{
win = window.open(
'', 'doc_print',
'menubar=no,location=no,resizable=yes,scrollbars=yes,status=yes'
);
if(!win)
return; // TODO: alert user about blocked window
win.document.write('\074!DOCTYPE html\076\
\074html xmlns="http://www.w3.org/1999/xhtml"\076\
\074head\076\
\074meta charset="UTF-8"\076\
\074meta http-equiv="X-UA-Compatible" content="IE=edge;chrome=1" /\076\
\074title\076' + doc.name + '\074/title\076\
\074/head\076\
\074body\076\
\074/body\076\
\074/html\076\
');
el = win.document.getElementsByTagName('body');
if(el && el.length)
{
el[0].innerHTML = body;
win.focus();
win.print();
}
}
}
});
|
import * as React from 'react'
import * as AWS from 'utils/AWS'
import AsyncResult from 'utils/AsyncResult'
import * as Config from 'utils/Config'
import type { S3HandleBase } from 'utils/s3paths'
import { PreviewData } from '../types'
import * as utils from './utils'
export const detect = utils.extIn(['.m2t', '.m2ts', '.mp4', '.webm'])
interface VideoLoaderProps {
children: (result: $TSFixMe) => React.ReactNode
handle: S3HandleBase
}
function useVideoSrc(handle: S3HandleBase): string {
const { binaryApiGatewayEndpoint: endpoint } = Config.use()
const sign = AWS.Signer.useS3Signer()
const url = React.useMemo(() => sign(handle), [handle, sign])
const query = new URLSearchParams({
format: 'video/webm',
url,
})
return `${endpoint}/transcode?${query.toString()}`
}
export const Loader = function VideoLoader({ handle, children }: VideoLoaderProps) {
const src = useVideoSrc(handle)
return children(AsyncResult.Ok(PreviewData.Video({ src })))
}
|
declare module goog {
function require(name: 'goog.debug.Formatter'): typeof goog.debug.Formatter;
function require(name: 'goog.debug.HtmlFormatter'): typeof goog.debug.HtmlFormatter;
function require(name: 'goog.debug.TextFormatter'): typeof goog.debug.TextFormatter;
}
declare module goog.debug {
/**
* Base class for Formatters. A Formatter is used to format a LogRecord into
* something that can be displayed to the user.
*
* @param {string=} opt_prefix The prefix to place before text records.
* @constructor
*/
class Formatter {
constructor(opt_prefix?: string);
/**
* Whether to append newlines to the end of formatted log records.
* @type {boolean}
*/
appendNewline: boolean;
/**
* Whether to show absolute time in the DebugWindow.
* @type {boolean}
*/
showAbsoluteTime: boolean;
/**
* Whether to show relative time in the DebugWindow.
* @type {boolean}
*/
showRelativeTime: boolean;
/**
* Whether to show the logger name in the DebugWindow.
* @type {boolean}
*/
showLoggerName: boolean;
/**
* Whether to show the logger exception text.
* @type {boolean}
*/
showExceptionText: boolean;
/**
* Whether to show the severity level.
* @type {boolean}
*/
showSeverityLevel: boolean;
/**
* Formats a record.
* @param {goog.debug.LogRecord} logRecord the logRecord to format.
* @return {string} The formatted string.
*/
formatRecord(logRecord: goog.debug.LogRecord): string;
/**
* Formats a record as SafeHtml.
* @param {goog.debug.LogRecord} logRecord the logRecord to format.
* @return {!goog.html.SafeHtml} The formatted string as SafeHtml.
*/
formatRecordAsHtml(logRecord: goog.debug.LogRecord): goog.html.SafeHtml;
/**
* Sets the start time provider. By default, this is the default instance
* but can be changed.
* @param {goog.debug.RelativeTimeProvider} provider The provider to use.
*/
setStartTimeProvider(provider: goog.debug.RelativeTimeProvider): void;
/**
* Returns the start time provider. By default, this is the default instance
* but can be changed.
* @return {goog.debug.RelativeTimeProvider} The start time provider.
*/
getStartTimeProvider(): goog.debug.RelativeTimeProvider;
/**
* Resets the start relative time.
*/
resetRelativeTimeStart(): void;
}
/**
* Formatter that returns formatted html. See formatRecord for the classes
* it uses for various types of formatted output.
*
* @param {string=} opt_prefix The prefix to place before text records.
* @constructor
* @extends {goog.debug.Formatter}
*/
class HtmlFormatter extends goog.debug.Formatter {
constructor(opt_prefix?: string);
/**
* Whether to show the logger exception text
* @type {boolean}
* @override
*/
showExceptionText: boolean;
/**
* Formats a record
* @param {goog.debug.LogRecord} logRecord the logRecord to format.
* @return {string} The formatted string as html.
* @override
*/
formatRecord(logRecord: goog.debug.LogRecord): string;
/**
* Formats a record.
* @param {goog.debug.LogRecord} logRecord the logRecord to format.
* @return {!goog.html.SafeHtml} The formatted string as SafeHtml.
* @override
*/
formatRecordAsHtml(logRecord: goog.debug.LogRecord): goog.html.SafeHtml;
}
/**
* Formatter that returns formatted plain text
*
* @param {string=} opt_prefix The prefix to place before text records.
* @constructor
* @extends {goog.debug.Formatter}
* @final
*/
class TextFormatter extends goog.debug.Formatter {
constructor(opt_prefix?: string);
/**
* Formats a record as text
* @param {goog.debug.LogRecord} logRecord the logRecord to format.
* @return {string} The formatted string.
* @override
*/
formatRecord(logRecord: goog.debug.LogRecord): string;
/**
* Formats a record as text
* @param {goog.debug.LogRecord} logRecord the logRecord to format.
* @return {!goog.html.SafeHtml} The formatted string as SafeHtml. This is
* just an HTML-escaped version of the text obtained from formatRecord().
* @override
*/
formatRecordAsHtml(logRecord: goog.debug.LogRecord): goog.html.SafeHtml;
}
}
|
var flashvars =
{
file:"assets/swf/main.swf",
debug: "true"
};
var params =
{
menu: "false",
scale: "noScale",
allowFullscreen: "true",
wmode: "transparent",
allowScriptAccess: "always"
};
var attributes =
{
id: 'flash',
allownetworking:'all'
};
swfobject.embedSWF("assets/swf/preloader.swf?n="+parseInt(Math.random()*1000), "content_flash", "500", "500", "11.1", "", flashvars, params, attributes);
|
# -*- coding: utf-8 -*-
'''
Created on 18 de abr. de 2016
@author: david
'''
from sensors.pycomms.hmc5883l import HMC5883L
from math import sqrt, atan2, degrees, asin
import time
def getSpheric(mag):
mod = sqrt(mag['x']*mag['x'] + mag['y']*mag['y'] + mag['z']*mag['z'])
'''
if mag['y'] > 0.0:
theta = 90.0 - degrees(atan(float(mag['x'])/ float(mag['y'])))
elif mag['y'] < 0.0:
theta = 270.0 - degrees(atan(float(mag['x'])/ float(mag['y'])))
elif mag['x'] < 0.0:
theta = 180.0
else:
theta = 0.0
'''
theta = degrees(atan2(mag['y'], mag['x']))
phi = degrees(asin(float(mag['z'])/float(mod)))
return [mod, theta, phi]
def main():
sensor = HMC5883L()
sensor.initialize()
try:
while True:
data = sensor.getHeading()
spher = getSpheric(data)
print data, ["{0:.3f}".format(val) for val in spher]
time.sleep(0.5)
except KeyboardInterrupt:
print "[Ctrl+C] -> stop"
if __name__ == '__main__':
main()
|
package algol;
/**
* Created by Gabriel on 31/03/2016.
*/
import static algol.CLConstants.*;
class SPSwitchStatement extends SPStatement{
private SPExpression condition;
private SPStatement thenPart;
private SPStatement elsePart;
public SPSwitchStatement(int line, SPExpression condition, SPStatement thenPart,
SPStatement elsePart) {
super(line);
this.condition = condition;
this.thenPart = thenPart;
this.elsePart = elsePart;
}
public SPStatement analyze(Context context) {
condition = (SPExpression) condition.analyze(context);
condition.type().mustMatchExpected(line(), Type.BOOLEAN);
thenPart = (SPStatement) thenPart.analyze(context);
if (elsePart != null) {
elsePart = (SPStatement) elsePart.analyze(context);
}
return this;
}
public void codegen(CLEmitter output) {
String elseLabel = output.createLabel();
String endLabel = output.createLabel();
condition.codegen(output, elseLabel, false);
thenPart.codegen(output);
if (elsePart != null) {
output.addBranchInstruction(GOTO, endLabel);
}
output.addLabel(elseLabel);
if (elsePart != null) {
elsePart.codegen(output);
output.addLabel(endLabel);
}
}
}
|
# -*- coding: iso-8859-1 -*-
# Copyright (C) 2011-2014 CEA/DEN, EDF R&D
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See http://www.salome-platform.org/ or email : [email protected]
#
# Author : Guillaume Boulant (EDF)
import VISU
import MEDCoupling
__mapMedType2VisuType={
MEDCoupling.ON_CELLS:VISU.CELL,
MEDCoupling.ON_NODES:VISU.NODE,
MEDCoupling.ON_GAUSS_PT:VISU.TGAUSSPOINTS
}
def visu_typeOfField(medTypeOfField):
"""
This function gives the visu type corresponding to the specified
med type.
"""
try:
return __mapMedType2VisuType[medTypeOfField]
except IndexError, e:
return "UNCKNOWN"
import salome
from libSALOME_Swig import SALOMEGUI_Swig
def visu_scalarmap(filename,meshname,fieldname,typeOfField,iteration=-1):
"""
This is the minimalist function to render a scalar map on a field
load from a med file using the VISU module.
"""
# We first have to prepare a pointer to the VISU component engine.
visuComp = salome.lcc.FindOrLoadComponent("FactoryServer", "VISU")
visuComp.SetCurrentStudy(salome.myStudy)
visumed = visuComp.CreateResult(filename)
visumed.SetBuildGroups(True)
visumed.SetBuildFields(True, True)
visumed.Build(False, True)
if not visumed.IsDone() :
print "ERR: can't create a representation of med data"
return False
visuComp.RenameEntityInStudy(visumed, meshname, VISU.NODE, 'onNodes')
visuComp.RenameEntityInStudy(visumed, meshname, VISU.CELL, 'onCells')
visuType = visu_typeOfField(typeOfField)
scalarmap = visuComp.ScalarMapOnField(visumed,
meshname,
visuType,
fieldname,
iteration)
if scalarmap is None:
print "ERR: can't create a scalar map"
return False
# __GBO__ maybe it could be appreciated to select the component to
# display. In this interface, the modulus of the field is
# considered.
component = 1
scalarmap.SetScalarMode(component)
scalarmap.SetSourceRange()
scalarmap.SetScaling(VISU.LINEAR)
scalarmap.SetTitle(fieldname)
# This final part is to automatically display the scalar map in a
# VISU viewer.
sg = SALOMEGUI_Swig()
sg.updateObjBrowser(1)
# Display the scalar map in the viewer
myViewManager = visuComp.GetViewManager()
myView = myViewManager.Create3DView()
myView.Maximize()
myView.Display(scalarmap);
myView.SetFocalPoint([0,0,0]);
myView.FitAll();
return True
def TEST_scalarmap():
import os
from xmed import properties
#properties.setup(properties.testdata_02) # test with nodes
properties.setup(properties.testdata_03) # test with cells
# __GBO__: WARN due to a specific feature of VISU, when only one
# field timestamps exists in the med file, we have to specify an
# iteration number of 1, whatever the iteration value is in the
# med file.
#iteration = properties.testFieldIt
iteration = 1
visu_scalarmap(properties.testFilePath,
properties.testMeshName,
properties.testFieldName,
properties.testTypeOfField,
iteration)
if __name__ == "__main__":
TEST_scalarmap()
|
"""SCons.Tool.masm
Tool-specific initialization for the Microsoft Assembler.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001 - 2015 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/masm.py rel_2.4.1:3453:73fefd3ea0b0 2015/11/09 03:25:05 bdbaddog"
import SCons.Defaults
import SCons.Tool
import SCons.Util
ASSuffixes = ['.s', '.asm', '.ASM']
ASPPSuffixes = ['.spp', '.SPP', '.sx']
if SCons.Util.case_sensitive_suffixes('.s', '.S'):
ASPPSuffixes.extend(['.S'])
else:
ASSuffixes.extend(['.S'])
def generate(env):
"""Add Builders and construction variables for masm to an Environment."""
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
for suffix in ASSuffixes:
static_obj.add_action(suffix, SCons.Defaults.ASAction)
shared_obj.add_action(suffix, SCons.Defaults.ASAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter)
for suffix in ASPPSuffixes:
static_obj.add_action(suffix, SCons.Defaults.ASPPAction)
shared_obj.add_action(suffix, SCons.Defaults.ASPPAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
shared_obj.add_emitter(suffix, SCons.Defaults.SharedObjectEmitter)
env['AS'] = 'ml'
env['ASFLAGS'] = SCons.Util.CLVar('/nologo')
env['ASPPFLAGS'] = '$ASFLAGS'
env['ASCOM'] = '$AS $ASFLAGS /c /Fo$TARGET $SOURCES'
env['ASPPCOM'] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c /Fo$TARGET $SOURCES'
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1
def exists(env):
return env.Detect('ml')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
#include "runtime/allocators/MallocAllocator.h"
#include <catch2/catch.hpp>
using namespace Shiny;
class MallocTestableAllocator : public MallocAllocator {
public:
MallocTestableAllocator() {
setFreeOnReset(false); // Catches leaks.
}
};
TEST_CASE("Can allocate blocks of memory 2", "[MallocAllocator]") {
MallocTestableAllocator alloc;
auto a1 = alloc.allocate(2);
REQUIRE(nullptr != a1);
REQUIRE(1 == alloc.allocationCount());
REQUIRE(2 == alloc.requestedByteCount());
auto a2 = alloc.allocate(5);
REQUIRE(nullptr != a2);
REQUIRE(a1 != a2);
REQUIRE(2 == alloc.allocationCount());
REQUIRE(7 == alloc.requestedByteCount());
alloc.destroy(a2);
alloc.destroy(a1);
}
TEST_CASE("Can destroy allocation", "[MallocAllocator]") {
MallocTestableAllocator alloc;
auto a1 = alloc.allocate(2);
auto a2 = alloc.allocate(5);
alloc.destroy(a1);
alloc.destroy(a2);
}
|
#ifndef SCENE_LUA_HPP
#define SCENE_LUA_HPP
#include <string>
#include "scene.hpp"
bool run_lua(const std::string& filename);
#endif
|
version https://git-lfs.github.com/spec/v1
oid sha256:4330796a9aaa535ec60f3e03a86b330b0656058f50446b709c8d0df5aafd3302
size 98179
|
"""
simple.py
"""
# Copyright (c) 2012-20 G. Peter Lepage.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version (see <http://www.gnu.org/licenses/>).
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
from __future__ import print_function # makes this work for python2 and 3
try:
from collections import OrderedDict
except ImportError:
OrderedDict = dict
import numpy as np
import gvar as gv
import lsqfit
y = gv.BufferDict() # data for the dependent variable
y["data1"] = gv.gvar([1.376,2.010],[[ 0.0047,0.01],[ 0.01,0.056]])
y["data2"] = gv.gvar([1.329,1.582],[[ 0.0047,0.0067],[0.0067,0.0136]])
y["b/a" ] = gv.gvar(2.0,0.5)
x = gv.BufferDict() # independent variable
x["data1"] = np.array([0.1,1.0])
x["data2"] = np.array([0.1,0.5])
prior = gv.BufferDict() # a priori values for fit parameters
prior['a'] = gv.gvar(0.5,0.5)
prior['b'] = gv.gvar(0.5,0.5)
# print(y["data1"][0].mean,"+-",y["data1"][0].sdev)
# print(gv.evalcov(y["data1"]))
def fcn(x,p): # fit function of x and parameters p
ans = {}
for k in ["data1","data2"]:
ans[k] = gv.exp(p['a'] + x[k]*p['b'])
ans['b/a'] = p['b']/p['a']
return ans
# do the fit
fit = lsqfit.nonlinear_fit(data=(x,y),prior=prior,fcn=fcn)
print(fit.format(100)) # print standard summary of fit
p = fit.p # best-fit values for parameters
outputs = gv.BufferDict()
outputs['a'] = p['a']
outputs['b/a'] = p['b']/p['a']
outputs['b'] = p['b']
inputs = OrderedDict()
inputs['y'] = y
inputs['prior'] =prior
print(gv.fmt_values(outputs)) # tabulate outputs
print(gv.fmt_errorbudget(outputs,inputs)) # print error budget for outputs
# save best-fit values in file "outputfile.p" for later use
import pickle
pickle.dump(fit.p,open("outputfile.p","wb"))
|
# coding: utf-8
'''
Copyright (c) 2010, Alexandru Dancu
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the project nor the names of its contributors may be
used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
'''
class Config(object):
def __init__(self):
self.tables = None
self.tablesToDelete = []
|
#!/usr/bin/env python3
# Copyright (c) 2015-2018 TurboCoin
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from argparse import ArgumentParser
from base64 import urlsafe_b64encode
from binascii import hexlify
from getpass import getpass
from os import urandom
import hmac
def generate_salt(size):
"""Create size byte hex salt"""
return hexlify(urandom(size)).decode()
def generate_password():
"""Create 32 byte b64 password"""
return urlsafe_b64encode(urandom(32)).decode('utf-8')
def password_to_hmac(salt, password):
m = hmac.new(bytearray(salt, 'utf-8'), bytearray(password, 'utf-8'), 'SHA256')
return m.hexdigest()
def main():
parser = ArgumentParser(description='Create login credentials for a JSON-RPC user')
parser.add_argument('username', help='the username for authentication')
parser.add_argument('password', help='leave empty to generate a random password or specify "-" to prompt for password', nargs='?')
args = parser.parse_args()
if not args.password:
args.password = generate_password()
elif args.password == '-':
args.password = getpass()
# Create 16 byte hex salt
salt = generate_salt(16)
password_hmac = password_to_hmac(salt, args.password)
print('String to be appended to turbocoin.conf:')
print('rpcauth={0}:{1}${2}'.format(args.username, salt, password_hmac))
print('Your password:\n{0}'.format(args.password))
if __name__ == '__main__':
main()
|
#!/usr/bin/env python -OO
# encoding: utf-8
###########
# ORP - Open Robotics Platform
#
# Copyright (c) 2010 John Harrison, William Woodall
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
##########
"""
servies.py - This module allows for easy, safe threading in control code.
The @service decorator is included in control code file by default.
"""
__author__ = "John Harrison"
__copyright__ = "Copyright (c) 2010 John Harrison, William Woodall"
### Imports ###
import sys
import traceback
import re
import logging
import threading
# Setup logging
log = logging.getLogger('ORPD')
from lib import importspecial
def logError(exc_info, log_func, msg, line_no_delta=0):
"""Logs an error with a traceback"""
exceptionType, exceptionValue, exceptionTraceback = exc_info
tb_list = traceback.format_exception(exceptionType, exceptionValue, exceptionTraceback)
tb_message = ''.join(tb_list)
match = re.search(r'(.*)line\s(\d*)(.*)', tb_message, re.M | re.S)
tb_message = match.group(1) + 'line ' + str(int(match.group(2)) - line_no_delta) + match.group(3)
log_func(msg+'\n'+tb_message)
class Service(object):
"""This class facilatates safe threading in control code"""
def __call__(self, *args, **kwargs):
return self.start(*args, **kwargs)
def __init__(self, func, *args, **kwargs):
self.func = func
self.args = args
self.kwargs = kwargs
self.running = False
def __go(self):
while self.running:
try:
self.func(*self.args, **self.kwargs)
except Exception as exception:
self.stop()
logError(sys.exc_info(), log.error, 'Control Code Service Error:', importspecial.MAGIC_LINENO)
def start(self, *args, **kwargs):
self.running = True
if args:
self.args = args
if kwargs:
self.kwargs = kwargs
self.proc = threading.Thread(target=self.__go)
self.proc.start()
def toggle(self, *args, **kwargs):
if self.running:
self.running = False
else:
return self.start(*args, **kwargs)
def stop(self):
self.running = False
def service(func, *args, **kwargs):
"""This is a decorator that provides safe threading in control code
This can be used to loop a function indefinately which can be
started and stopped at any point.
Example control code which will print 'hi' every 10 seconds:
@service
def foo():
log.info('hi')
sleep(10)
def main():
foo() # or foo.start() and to stop it foo.stop()
handleEvents()
"""
result = Service(func, *args, **kwargs)
return result
|
using System.Xml;
using System.Xml.Linq;
using Autofac;
using Orchard.ContentManagement.Records;
namespace Orchard.ContentManagement.FieldStorage.InfosetStorage {
public class InfosetPart : ContentPart {
public InfosetPart() {
Infoset = new Infoset();
VersionInfoset = new Infoset();
}
public Infoset Infoset { get; set; }
public Infoset VersionInfoset { get; set; }
public string Get<TPart>(string fieldName) {
return Get<TPart>(fieldName, null);
}
public string Get<TPart>(string fieldName, string valueName) {
return Get(typeof(TPart).Name, fieldName, valueName, typeof(TPart).IsAssignableTo<ContentItemVersionRecord>());
}
public string Get(string partName, string fieldName) {
return Get(partName, fieldName, null, false);
}
public string GetVersioned(string partName, string fieldName) {
return Get(partName, fieldName, null, true);
}
public string Get(string partName, string fieldName, string valueName, bool versionable = false) {
var element = versionable ? VersionInfoset.Element : Infoset.Element;
var partElement = element.Element(XmlConvert.EncodeName(partName));
if (partElement == null) {
return null;
}
var fieldElement = partElement.Element(XmlConvert.EncodeName(fieldName));
if (fieldElement == null) {
return null;
}
if (string.IsNullOrEmpty(valueName)) {
return fieldElement.Value;
}
var valueAttribute = fieldElement.Attribute(XmlConvert.EncodeName(valueName));
if (valueAttribute == null) {
return null;
}
return valueAttribute.Value;
}
public void Set<TPart>(string fieldName, string valueName, string value) {
Set<TPart>(fieldName, value);
}
public void Set<TPart>(string fieldName, string value) {
Set(typeof(TPart).Name, fieldName, null, value, typeof(TPart).IsAssignableTo<ContentItemVersionRecord>());
}
public void Set(string partName, string fieldName, string value) {
Set(partName, fieldName, null, value, false);
}
public void SetVersioned(string partName, string fieldName, string value) {
Set(partName, fieldName, null, value, true);
}
public void Set(string partName, string fieldName, string valueName, string value, bool versionable = false) {
InfosetHelper.ThrowIfContainsInvalidXmlCharacter(value);
var element = versionable ? VersionInfoset.Element : Infoset.Element;
var encodedPartName = XmlConvert.EncodeName(partName);
var partElement = element.Element(encodedPartName);
if (partElement == null) {
partElement = new XElement(encodedPartName);
Infoset.Element.Add(partElement);
}
var encodedFieldName = XmlConvert.EncodeName(fieldName);
var fieldElement = partElement.Element(encodedFieldName);
if (fieldElement == null) {
fieldElement = new XElement(encodedFieldName);
partElement.Add(fieldElement);
}
if (string.IsNullOrEmpty(valueName)) {
fieldElement.Value = value ?? "";
}
else {
fieldElement.SetAttributeValue(XmlConvert.EncodeName(valueName), value);
}
}
}
}
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Event.end_time'
db.delete_column(u'djangocms_calendar_event', 'end_time')
def backwards(self, orm):
# Adding field 'Event.end_time'
db.add_column(u'djangocms_calendar_event', 'end_time',
self.gf('django.db.models.fields.DateField')(null=True, blank=True),
keep_default=False)
models = {
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'djangocms_calendar.event': {
'Meta': {'ordering': "('start_date', 'start_time')", 'object_name': 'Event'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['djangocms_calendar.EventCategory']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'start_time': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'djangocms_calendar.eventcalendarpluginmodel': {
'Meta': {'object_name': 'EventCalendarPluginModel', '_ormbases': ['cms.CMSPlugin']},
u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'selected_categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['djangocms_calendar.EventCategory']", 'symmetrical': 'False'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'view_mode': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
u'djangocms_calendar.eventcategory': {
'Meta': {'object_name': 'EventCategory'},
'color': ('colors.fields.ColorField', [], {'default': "'FF0000'", 'max_length': '7'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['djangocms_calendar.EventCategory']", 'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
}
}
complete_apps = ['djangocms_calendar']
|
######################################################################
##
## Copyright (C) 2006, Blekinge Institute of Technology
##
## Author: Andrew May <acmay [at] acmay [dot] homeip [dot] net>
## Description: PPC arch specific stuff
##
## Licensed under the terms of GNU General Public License version 2
## (or later, at your option). See COPYING file distributed with Dissy
## for full text of the license.
##
######################################################################
import sys, architecture
from dissy.architecture import Architecture
ppc_jumps = [
'b',
'b-',
'bdnz+',
'bdnz-',
'beq',
'beq+',
'beq-',
'bge',
'bge+',
'bge-',
'bgt+',
'bgt-',
'bl',
'ble+',
'ble-',
'blt+',
'blt-',
'bne',
'bne+',
'bne-',
'bye-',
]
ppc_calls = ['bl']
class PpcArchitecture(Architecture):
def __init__(self):
Architecture.__init__(self, ppc_jumps, ppc_calls)
def getJumpDestination(self, address, insn, args):
r = args.split(",")
if len(r) == 1:
return Architecture.getJumpDestination(self, address, insn, args)
return Architecture.getJumpDestination(self, address, insn, r[-1])
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
lasgrid.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
---------------------
Date : September 2013
Copyright : (C) 2013 by Martin Isenburg
Email : martin near rapidlasso point com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from .LAStoolsUtils import LAStoolsUtils
from .LAStoolsAlgorithm import LAStoolsAlgorithm
from processing.core.parameters import ParameterSelection
from processing.core.parameters import ParameterBoolean
class lasgrid(LAStoolsAlgorithm):
ATTRIBUTE = "ATTRIBUTE"
METHOD = "METHOD"
ATTRIBUTES = ["elevation", "intensity", "rgb", "classification"]
METHODS = ["lowest", "highest", "average", "stddev"]
USE_TILE_BB = "USE_TILE_BB"
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('lasgrid')
self.group, self.i18n_group = self.trAlgorithm('LAStools')
self.addParametersVerboseGUI()
self.addParametersPointInputGUI()
self.addParametersFilter1ReturnClassFlagsGUI()
self.addParametersStepGUI()
self.addParameter(ParameterSelection(lasgrid.ATTRIBUTE,
self.tr("Attribute"), lasgrid.ATTRIBUTES, 0))
self.addParameter(ParameterSelection(lasgrid.METHOD,
self.tr("Method"), lasgrid.METHODS, 0))
self.addParameter(ParameterBoolean(lasgrid.USE_TILE_BB,
self.tr("use tile bounding box (after tiling with buffer)"), False))
self.addParametersRasterOutputGUI()
self.addParametersAdditionalGUI()
def processAlgorithm(self, progress):
commands = [os.path.join(LAStoolsUtils.LAStoolsPath(), "bin", "lasgrid")]
self.addParametersVerboseCommands(commands)
self.addParametersPointInputCommands(commands)
self.addParametersFilter1ReturnClassFlagsCommands(commands)
self.addParametersStepCommands(commands)
attribute = self.getParameterValue(lasgrid.ATTRIBUTE)
if attribute != 0:
commands.append("-" + lasgrid.ATTRIBUTES[attribute])
method = self.getParameterValue(lasgrid.METHOD)
if method != 0:
commands.append("-" + lasgrid.METHODS[method])
if (self.getParameterValue(lasgrid.USE_TILE_BB)):
commands.append("-use_tile_bb")
self.addParametersRasterOutputCommands(commands)
self.addParametersAdditionalCommands(commands)
LAStoolsUtils.runLAStools(commands, progress)
|
define(['backbone', 'models/Person'], function (Backbone, Person) {
var People = Backbone.Collection.extend({
model: Person,
url: '/people',
findByNickName: function (nickName) {
return this.find(function (person) {
return (person.get('nickName') && person.get('nickName').toLowerCase() === nickName);
});
},
findById: function (id) {
return this.find(function (person) {
return (person.id && person.id === id);
});
}
});
return People;
});
|
<?php
/*
* This file is part of the Slack API library.
*
* (c) Cas Leentfaar <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace CL\Slack\Tests\Payload;
use CL\Slack\Payload\ChannelsInfoPayloadResponse;
use CL\Slack\Payload\PayloadResponseInterface;
/**
* @author Cas Leentfaar <[email protected]>
*/
class ChannelsInfoPayloadResponseTest extends AbstractPayloadResponseTestCase
{
/**
* @inheritdoc
*/
public function createResponseData()
{
return [
'channel' => $this->createChannel(),
];
}
/**
* @inheritdoc
*
* @param array $responseData
* @param ChannelsInfoPayloadResponse $payloadResponse
*/
protected function assertResponse(array $responseData, PayloadResponseInterface $payloadResponse)
{
$this->assertChannel($responseData['channel'], $payloadResponse->getChannel());
}
}
|
# -*- coding: utf-8 -*-
"""
Created on Thu Oct 02 08:41:08 2014
@author: Acer
"""
import sys
import pycd3
class NodeFactory(pycd3.INodeFactory):
def __init__(self, node):
pycd3.INodeFactory.__init__(self)
self.node = node
print "NodeFactory.__init__"
def getNodeName(self):
print "NodeFactory.getName"
return self.node.__name__
def createNode(self):
print "NodeFactory.createNode"
n = self.node()
n.__disown__()
print "NodeFactory.disowned"
return n
def getSource(self):
print "NodeFactory.getSource"
return "Practice.py"
class Demand_Model (pycd3.Node):
def __init__(self):
pycd3.Node.__init__(self)
self.Flow1 = pycd3.Flow()
self.Flow1 = pycd3.Flow()
self.Flow1 = pycd3.Flow()
# Inport and Outports
print "init node"
self.addOutPort("out", self.out)
def init(self, start, stop, dt):
print start
print stop
print dt
return True
def f(self, current, dt):
Outport=
return dt
def getClassName(self):
print "getClassName"
return "Demand_Model"
def register(nr):
for c in pycd3.Node.__subclasses__():
nf = NodeFactory(c)
nf.__disown__()
nr.addNodeFactory(nf)
|
# Copyright 2014-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import sys
import shutil
import wlauto
from wlauto import Command, settings
from wlauto.core.agenda import Agenda
from wlauto.core.execution import Executor
from wlauto.utils.log import add_log_file
class RunCommand(Command):
name = 'run'
description = 'Execute automated workloads on a remote device and process the resulting output.'
def initialize(self, context):
self.parser.add_argument('agenda', metavar='AGENDA',
help="""
Agenda for this workload automation run. This defines which
workloads will be executed, how many times, with which
tunables, etc. See example agendas in {} for an example of
how this file should be structured.
""".format(os.path.dirname(wlauto.__file__)))
self.parser.add_argument('-d', '--output-directory', metavar='DIR', default=None,
help="""
Specify a directory where the output will be generated. If
the directory already exists, the script will abort unless -f
option (see below) is used, in which case the contents of the
directory will be overwritten. If this option is not specified,
then {} will be used instead.
""".format(settings.output_directory))
self.parser.add_argument('-f', '--force', action='store_true',
help="""
Overwrite output directory if it exists. By default, the script
will abort in this situation to prevent accidental data loss.
""")
self.parser.add_argument('-i', '--id', action='append', dest='only_run_ids', metavar='ID',
help="""
Specify a workload spec ID from an agenda to run. If this is
specified, only that particular spec will be run, and other
workloads in the agenda will be ignored. This option may be
used to specify multiple IDs.
""")
self.parser.add_argument('--disable', action='append', dest='instruments_to_disable',
metavar='INSTRUMENT', help="""
Specify an instrument to disable from the command line. This
equivalent to adding "~{metavar}" to the instrumentation list in
the agenda. This can be used to temporarily disable a troublesome
instrument for a particular run without introducing permanent
change to the config (which one might then forget to revert).
This option may be specified multiple times.
""")
def execute(self, args): # NOQA
self.set_up_output_directory(args)
add_log_file(settings.log_file)
if os.path.isfile(args.agenda):
agenda = Agenda(args.agenda)
settings.agenda = args.agenda
shutil.copy(args.agenda, settings.meta_directory)
else:
self.logger.debug('{} is not a file; assuming workload name.'.format(args.agenda))
agenda = Agenda()
agenda.add_workload_entry(args.agenda)
if args.instruments_to_disable:
if 'instrumentation' not in agenda.config:
agenda.config['instrumentation'] = []
for itd in args.instruments_to_disable:
self.logger.debug('Updating agenda to disable {}'.format(itd))
agenda.config['instrumentation'].append('~{}'.format(itd))
basename = 'config_'
for file_number, path in enumerate(settings.get_config_paths(), 1):
file_ext = os.path.splitext(path)[1]
shutil.copy(path, os.path.join(settings.meta_directory,
basename + str(file_number) + file_ext))
executor = Executor()
executor.execute(agenda, selectors={'ids': args.only_run_ids})
def set_up_output_directory(self, args):
if args.output_directory:
settings.output_directory = args.output_directory
self.logger.debug('Using output directory: {}'.format(settings.output_directory))
if os.path.exists(settings.output_directory):
if args.force:
self.logger.info('Removing existing output directory.')
shutil.rmtree(settings.output_directory)
else:
self.logger.error('Output directory {} exists.'.format(settings.output_directory))
self.logger.error('Please specify another location, or use -f option to overwrite.\n')
sys.exit(1)
self.logger.info('Creating output directory.')
os.makedirs(settings.output_directory)
os.makedirs(settings.meta_directory)
|
#include <lib.h>
#define fork _fork
#include <unistd.h>
PUBLIC pid_t fork()
{
message m;
return(_syscall(MM, FORK, &m));
}
|
import aiohttp
import asyncio
from bs4 import BeautifulSoup
import motor.motor_asyncio
import json
import os
import time
import sys
import configparser
from contextlib import contextmanager
"""
https://www.lagou.com/gongsi/70.html
https://www.lagou.com/gongsi/interviewExperiences.html?companyId=70
<script id="interviewExperiencesData" type="text/html">
<script id="companyInfoData" type="text/html">
https://www.lagou.com/gongsi/j70.html
https://www.lagou.com/gongsi/searchPosition.json
companyId: 70
positionFirstType: 全部
schoolJob: false
pageNo: 2
pageSize: 10
companyId: 70
positionFirstType: 全部
schoolJob: true
pageNo: 1
pageSize: 10
"""
if sys.platform == 'linux':
import uvloop
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
if sys.platform == 'win32':
loop = asyncio.ProactorEventLoop()
asyncio.set_event_loop(loop)
async def get_company(collection, company_id):
headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36' }
link = f'https://www.lagou.com/gongsi/{company_id}.html'
try:
async with aiohttp.ClientSession() as session:
async with session.get(link, headers=headers, timeout=10) as resp:
status_code = resp.status
# print(type(status_code))
# print(status_code)
html = await resp.text()
# import ipdb; ipdb.set_trace()
soup = BeautifulSoup(html, 'lxml')
page404 = soup.select('div.page404')
# print(page404)
if page404:
print('Invalid link -> ', company_id)
else:
print(link)
except Exception as e:
print(e)
async def get_info(collection, from_id=0, to_id=0):
for x in range(from_id, to_id+1):
await asyncio.sleep(0.5)
await get_company(collection, x)
def main():
client = motor.motor_asyncio.AsyncIOMotorClient('mongodb://localhost:32768')
collection = client['lagou_data']['lagou_company']
loop = asyncio.get_event_loop()
loop.run_until_complete(get_info(collection, from_id=62, to_id=70))
if __name__ == '__main__':
main()
|
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript">var scriptRelativePath = "../";</script>
<script type="application/javascript" src="../pc.js"></script>
</head>
<body>
<pre id="test">
<script type="application/javascript">
createHTML({
title: 'RTCPeerConnection identity with login',
bug: '1153314'
});
function waitForLoginDone() {
return new Promise(resolve => {
window.addEventListener('message', function listener(e) {
is(e.origin, 'https://example.com', 'got the right message origin');
is(e.data, 'LOGINDONE', 'got the right message');
window.removeEventListener('message', listener);
resolve();
});
});
}
function checkLogin(t, name, onLoginNeeded) {
t.pcLocal.setIdentityProvider('example.com', 'idp.js#login:' + name);
return t.pcLocal._pc.getIdentityAssertion()
.then(a => ok(false, 'should request login'),
e => {
is(e.name, 'IdpLoginError', 'name is IdpLoginError');
is(t.pcLocal._pc.idpLoginUrl.split('#')[0],
'https://example.com/.well-known/idp-proxy/login.html',
'got the right login URL from the IdP');
return t.pcLocal._pc.idpLoginUrl;
})
.then(onLoginNeeded)
.then(waitForLoginDone)
.then(() => t.pcLocal._pc.getIdentityAssertion())
.then(a => ok(a, 'got assertion'));
}
function theTest() {
var test = new PeerConnectionTest();
test.setMediaConstraints([{audio: true}], [{audio: true}]);
test.chain.removeAfter('PC_REMOTE_CHECK_INITIAL_SIGNALINGSTATE');
test.chain.append([
function PC_LOCAL_IDENTITY_ASSERTION_WITH_IFRAME_LOGIN(t) {
return checkLogin(t, 'iframe', loginUrl => {
var iframe = document.createElement('iframe');
iframe.setAttribute('src', loginUrl);
iframe.frameBorder = 0;
iframe.width = 400;
iframe.height = 60;
document.getElementById('display').appendChild(iframe);
});
},
function PC_LOCAL_IDENTITY_ASSERTION_WITH_WINDOW_LOGIN(t) {
return checkLogin(t, 'openwin', loginUrl => {
window.open(loginUrl, 'login', 'width=400,height=60');
});
}
]);
test.run();
}
runNetworkTest(theTest);
</script>
</pre>
</body>
</html>
|
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInspection.ui;
import com.intellij.codeInspection.InspectionProfileEntry;
import org.jetbrains.annotations.NonNls;
import javax.swing.*;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import java.awt.*;
/**
* @author Bas Leijdekkers
*/
public class MultipleCheckboxOptionsPanel extends JPanel {
private final OptionAccessor myOptionAccessor;
public MultipleCheckboxOptionsPanel(final InspectionProfileEntry owner) {
this(new OptionAccessor.Default(owner));
}
public MultipleCheckboxOptionsPanel(final OptionAccessor optionAccessor) {
super(new GridBagLayout());
myOptionAccessor = optionAccessor;
}
public void addCheckbox(String label, @NonNls String property) {
final boolean selected = myOptionAccessor.getOption(property);
final JCheckBox checkBox = new JCheckBox(label, selected);
configureCheckbox(myOptionAccessor, property, checkBox);
addComponent(checkBox);
}
public void addComponent(JComponent component) {
final GridBagConstraints constraints = new GridBagConstraints();
constraints.anchor = GridBagConstraints.FIRST_LINE_START;
constraints.gridx = 0;
constraints.gridy = 0;
constraints.weightx = 1.0;
final Component[] components = getComponents();
removeAll();
for (Component component1 : components) {
add(component1, constraints);
constraints.gridy++;
}
constraints.weighty = 1.0;
add(component, constraints);
}
private static void configureCheckbox(OptionAccessor accessor, String property, JCheckBox checkBox) {
final ButtonModel model = checkBox.getModel();
final CheckboxChangeListener changeListener = new CheckboxChangeListener(accessor, property, model);
model.addChangeListener(changeListener);
}
public static void initAndConfigureCheckbox(InspectionProfileEntry owner, String property, JCheckBox checkBox) {
OptionAccessor optionAccessor = new OptionAccessor.Default(owner);
checkBox.setSelected(optionAccessor.getOption(property));
configureCheckbox(optionAccessor, property, checkBox);
}
private static class CheckboxChangeListener implements ChangeListener {
private final OptionAccessor myAccessor;
private final String property;
private final ButtonModel model;
CheckboxChangeListener(OptionAccessor myAccessor, String property, ButtonModel model) {
this.myAccessor = myAccessor;
this.property = property;
this.model = model;
}
@Override
public void stateChanged(ChangeEvent e) {
myAccessor.setOption(property, model.isSelected());
}
}
}
|
#include <iostream>
#include <boost/graph/grid_graph.hpp>
#include "NearestNeighbor/metric_space_search.hpp"
int main(int argc, char *argv[])
{
typedef boost::grid_graph<2> GraphType;
const unsigned int dimension = 5;
boost::array<std::size_t, 2> lengths = { { dimension, dimension } };
GraphType graph(lengths);
typedef boost::graph_traits<GraphType>::vertex_descriptor VertexDescriptor;
VertexDescriptor v = { { 0, 1 } };
typedef boost::hypercube_topology<6, boost::minstd_rand> TopologyType;
TopologyType myTopology;
typedef TopologyType::point_type PointType;
std::vector<PointType> vertexData(dimension * dimension);
// This is an "exterior property" of the grid_graph
typedef boost::property_map<GraphType, boost::vertex_index_t>::const_type IndexMapType;
IndexMapType indexMap(get(boost::vertex_index, graph));
typedef boost::iterator_property_map<std::vector<PointType>::iterator, IndexMapType> MapType;
MapType myMap(vertexData.begin(), indexMap);
typedef dvp_tree<VertexDescriptor, TopologyType, MapType> TreeType;
// Add vertices to the graph and corresponding points increasin integer points to the tree.
// The experiment here is to query the nearest neighbor of a point like (5.2, 5.2, 5.1, 5.3, 5.2, 5.1)
// and ensure we get back (5,5,5,5,5,5)
unsigned int numberOfVertices = 100;
for(unsigned int vertexId = 0; vertexId < numberOfVertices; ++vertexId)
{
PointType p;
for(unsigned int dim = 0; dim < dimension; ++dim)
{
p[dim] = vertexId;
}
boost::put(myMap, v, p);
};
// Prefer to initialize the DVP-tree with a filled graph, this way, the entire DVP-tree will be initialized at once (gets best results).
TreeType tree(graph, myTopology, myMap);
multi_dvp_tree_search<GraphType, TreeType> nearestNeighborFinder;
nearestNeighborFinder.graph_tree_map[&graph] = &tree;
PointType queryPoint;
for(unsigned int dim = 0; dim < dimension; ++dim)
{
queryPoint[dim] = 5.2;
}
VertexDescriptor nearestNeighbor = nearestNeighborFinder(queryPoint, graph, myTopology, myMap);
std::cout << "nearestNeighbor[0]: " << nearestNeighbor[0] << std::endl;
return 0;
}
|
Template.messages.helpers({
messages: [
{ title : "This feature is coming soon!" }
]
});
|
import unittest
"""
Problem Statement:
Given an array of integers, calculate which fraction of the elements are positive, negative, and zeroes, respectively. Print the decimal value of each fraction.
Input Format:
The first line, N, is the size of the array.
The second line contains N space-separated integers describing the array of numbers (A1,A2,A3,⋯,AN).
Output Format:
Print each value on its own line with the fraction of positive numbers first, negative numbers second, and zeroes third.
There are 3 positive numbers, 2 negative numbers, and 1 zero in the array.
The fraction of the positive numbers, negative numbers and zeroes are 36=0.500000, 26=0.333333 and 16=0.166667, respectively.
"""
def plusMinus(arr):
def roundToPrecision(num):
return round(num / n, 6)
n = len(arr)
pos, neg, zer = 0, 0, 0
for item in arr:
if item == 0:
zer += 1
elif item > 0:
pos += 1
elif item < 0:
neg += 1
results = []
for result in [pos, neg, zer]:
results.append(roundToPrecision(result))
return results
class TestPlusMinus(unittest.TestCase):
def test_plus_minus(self):
arr = [-4, 3, -9, 0, 4, 1]
self.assertEqual(plusMinus(arr), [0.500000, 0.333333, 0.166667])
if __name__ == '__main__':
unittest.main()
|
# -*- Mode: python; coding: utf-8; tab-width: 8; indent-tabs-mode: t; -*-
#
# Copyright (C) 2006 Adam Zimmerman <[email protected]>
# Copyright (C) 2006 James Livingston <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# The Rhythmbox authors hereby grant permission for non-GPL compatible
# GStreamer plugins to be used and distributed together with GStreamer
# and Rhythmbox. This permission is above and beyond the permissions granted
# by the GPL license by which Rhythmbox is covered. If you modify this code
# you may extend this exception to your version of the code, but you are not
# obligated to do so. If you do not wish to do so, delete this exception
# statement from your version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
import xml.sax, xml.sax.handler
class BuyAlbumHandler(xml.sax.handler.ContentHandler): # Class to download the track, etc.
format_map = {
'ogg' : 'URL_OGGZIP',
'flac' : 'URL_FLACZIP',
'wav' : 'URL_WAVZIP',
'mp3-cbr' : 'URL_128KMP3ZIP',
'mp3-vbr' : 'URL_VBRZIP'
}
def __init__(self, format):
xml.sax.handler.ContentHandler.__init__(self)
self._format_tag = self.format_map[format] # format of audio to download
def startElement(self, name, attrs):
self._text = ""
def endElement(self, name):
if name == "ERROR": # Something went wrong. Display error message to user.
raise MagnatunePurchaseError(self._text)
elif name == self._format_tag:
self.url = self._text
# Response also contains:
# DL_MSG - Message to the user, with promo stuff, etc.
# DL_PAGE - URL that the user can go to to manually download the album.
def characters(self, content):
self._text = self._text + content
class MagnatunePurchaseError(Exception):
pass
|
<?php
/**
* @package hubzero-cms
* @copyright Copyright 2005-2019 HUBzero Foundation, LLC.
* @license http://opensource.org/licenses/MIT MIT
*/
namespace Components\Resources\Models\MediaTracking;
use Hubzero\Database\Relational;
/**
* Detailed media tracking model
*
* @uses \Hubzero\Database\Relational
*/
class Detailed extends Relational
{
/**
* The table namespace
*
* @var string
*/
protected $namespace = 'media_tracking';
/**
* The table to which the class pertains
*
* This will default to #__{namespace}_{modelName} unless otherwise
* overwritten by a given subclass. Definition of this property likely
* indicates some derivation from standard naming conventions.
*
* @var string
*/
protected $table = '#__media_tracking_detailed';
/**
* Default order by for model
*
* @var string
*/
public $orderBy = 'id';
/**
* Default order direction for select queries
*
* @var string
*/
public $orderDir = 'asc';
/**
* Fields and their validation criteria
*
* @var array
*/
protected $rules = array(
'object_id' => 'positive|nonzero',
'object_type' => 'notempty',
'session_id' => 'notempty',
'ip_address' => 'notempty'
);
/**
* Get a record by its doi
*
* @param integer $user_id User ID
* @param integer $object_id Object ID
* @param string $object_type Object type
* @return object
*/
public static function oneByUserAndResource($user_id, $object_id, $object_type = 'resource')
{
$row = self::all()
->whereEquals('user_id', $user_id)
->whereEquals('object_id', $object_id)
->whereEquals('object_type', $object_type)
->row();
return $row;
}
}
|
import React from 'react';
import { makeStyles } from '@material-ui/core/styles';
import Drawer from '@material-ui/core/Drawer';
import CssBaseline from '@material-ui/core/CssBaseline';
import AppBar from '@material-ui/core/AppBar';
import Toolbar from '@material-ui/core/Toolbar';
import List from '@material-ui/core/List';
import Typography from '@material-ui/core/Typography';
import Divider from '@material-ui/core/Divider';
import ListItem from '@material-ui/core/ListItem';
import ListItemIcon from '@material-ui/core/ListItemIcon';
import ListItemText from '@material-ui/core/ListItemText';
import InboxIcon from '@material-ui/icons/MoveToInbox';
import MailIcon from '@material-ui/icons/Mail';
const drawerWidth = 240;
const useStyles = makeStyles(theme => ({
root: {
display: 'flex',
},
appBar: {
width: `calc(100% - ${drawerWidth}px)`,
marginLeft: drawerWidth,
},
drawer: {
width: drawerWidth,
flexShrink: 0,
},
drawerPaper: {
width: drawerWidth,
},
toolbar: theme.mixins.toolbar,
content: {
flexGrow: 1,
backgroundColor: theme.palette.background.default,
padding: theme.spacing(3),
},
}));
function PermanentDrawerLeft() {
const classes = useStyles();
return (
<div className={classes.root}>
<CssBaseline />
<AppBar position="fixed" className={classes.appBar}>
<Toolbar>
<Typography variant="h6" noWrap>
Permanent drawer
</Typography>
</Toolbar>
</AppBar>
<Drawer
className={classes.drawer}
variant="permanent"
classes={{
paper: classes.drawerPaper,
}}
anchor="left"
>
<div className={classes.toolbar} />
<Divider />
<List>
{['Inbox', 'Starred', 'Send email', 'Drafts'].map((text, index) => (
<ListItem button key={text}>
<ListItemIcon>{index % 2 === 0 ? <InboxIcon /> : <MailIcon />}</ListItemIcon>
<ListItemText primary={text} />
</ListItem>
))}
</List>
<Divider />
<List>
{['All mail', 'Trash', 'Spam'].map((text, index) => (
<ListItem button key={text}>
<ListItemIcon>{index % 2 === 0 ? <InboxIcon /> : <MailIcon />}</ListItemIcon>
<ListItemText primary={text} />
</ListItem>
))}
</List>
</Drawer>
<main className={classes.content}>
<div className={classes.toolbar} />
<Typography paragraph>
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt
ut labore et dolore magna aliqua. Rhoncus dolor purus non enim praesent elementum
facilisis leo vel. Risus at ultrices mi tempus imperdiet. Semper risus in hendrerit
gravida rutrum quisque non tellus. Convallis convallis tellus id interdum velit laoreet id
donec ultrices. Odio morbi quis commodo odio aenean sed adipiscing. Amet nisl suscipit
adipiscing bibendum est ultricies integer quis. Cursus euismod quis viverra nibh cras.
Metus vulputate eu scelerisque felis imperdiet proin fermentum leo. Mauris commodo quis
imperdiet massa tincidunt. Cras tincidunt lobortis feugiat vivamus at augue. At augue eget
arcu dictum varius duis at consectetur lorem. Velit sed ullamcorper morbi tincidunt. Lorem
donec massa sapien faucibus et molestie ac.
</Typography>
<Typography paragraph>
Consequat mauris nunc congue nisi vitae suscipit. Fringilla est ullamcorper eget nulla
facilisi etiam dignissim diam. Pulvinar elementum integer enim neque volutpat ac
tincidunt. Ornare suspendisse sed nisi lacus sed viverra tellus. Purus sit amet volutpat
consequat mauris. Elementum eu facilisis sed odio morbi. Euismod lacinia at quis risus sed
vulputate odio. Morbi tincidunt ornare massa eget egestas purus viverra accumsan in. In
hendrerit gravida rutrum quisque non tellus orci ac. Pellentesque nec nam aliquam sem et
tortor. Habitant morbi tristique senectus et. Adipiscing elit duis tristique sollicitudin
nibh sit. Ornare aenean euismod elementum nisi quis eleifend. Commodo viverra maecenas
accumsan lacus vel facilisis. Nulla posuere sollicitudin aliquam ultrices sagittis orci a.
</Typography>
</main>
</div>
);
}
export default PermanentDrawerLeft;
|
'use strict';
const path = require('path');
const fs = require('fs');
const url = require('url');
// Make sure any symlinks in the project folder are resolved:
// https://github.com/facebookincubator/create-react-app/issues/637
const appDirectory = fs.realpathSync(process.cwd());
const resolveApp = function (relativePath) { return path.resolve(appDirectory, relativePath); };
const envPublicUrl = process.env.PUBLIC_URL;
function ensureSlash(path, needsSlash) {
const hasSlash = path.endsWith('/');
if (hasSlash && !needsSlash) {
return path.substr(path, path.length - 1);
} else if (!hasSlash && needsSlash) {
// noinspection JSAnnotator
return `${path}/`;
} else {
return path;
}
}
const getPublicUrl = function (appPackageJson) {
return envPublicUrl || require(appPackageJson).homepage;
};
// We use `PUBLIC_URL` environment variable or "homepage" field to infer
// "public path" at which the app is served.
// Webpack needs to know it to put the right <script> hrefs into HTML even in
// single-page apps that may serve index.html for nested URLs like /todos/42.
// We can't use a relative path in HTML because we don't want to load something
// like /todos/42/static/js/bundle.7289d.js. We have to know the root.
function getServedPath(appPackageJson) {
const publicUrl = getPublicUrl(appPackageJson);
const servedUrl =
envPublicUrl || (publicUrl ? url.parse(publicUrl).pathname : '/');
return ensureSlash(servedUrl, true);
}
// config after eject: we're in ./config/
module.exports = {
dotenv: resolveApp('.env'),
appBuild: resolveApp('build/openchat-app'),
appPublic: resolveApp('src/app/public'),
appHtml: resolveApp('src/app/public/index.html'),
appIndexJs: resolveApp('src/app/index.tsx'),
appPackageJson: resolveApp('package.json'),
appSrc: resolveApp('src'),
yarnLockFile: resolveApp('yarn.lock'),
testsSetup: resolveApp('src/setupTests.ts'),
appNodeModules: resolveApp('node_modules'),
appTsConfig: resolveApp('tsconfig.json'),
publicUrl: getPublicUrl(resolveApp('package.json')),
servedPath: getServedPath(resolveApp('package.json'))
};
|
import codecs
import os.path
import re
from setuptools import setup
from setuptools import find_packages
def find_version(*file_paths):
here = os.path.abspath(os.path.dirname(__file__))
with codecs.open(os.path.join(here, *file_paths), 'r') as f:
version_file = f.read()
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
with open("README.rst") as f:
long_desc = f.read()
requirements = [
"logbook",
"docopt",
"fluent-logger",
"six",
]
setup(
name="flosculus",
version=find_version("flosculus", "__init__.py"),
description="Tail your log, extract the data, and send it to Fluentd",
long_description=long_desc,
author="Isman Firmansyah",
author_email="[email protected]",
url="https://github.com/iromli/flosculus",
packages=find_packages(),
license="MIT",
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
],
entry_points={
"console_scripts": ["flosculusd=flosculus.cli:main"],
},
zip_safe=False,
install_requires=requirements,
)
|
'use strict';
let angular = require('angular');
require('./rollups.less');
module.exports = angular.module('spinnaker.core.cluster.allClusters.controller', [
require('../cluster/filter/clusterFilter.service.js'),
require('../cluster/filter/clusterFilter.model.js'),
require('./filter/clusterFilter.controller.js'),
require('./clusterPod.directive.js'),
require('../account/account.module.js'),
require('../cloudProvider/providerSelection/providerSelection.service.js'),
require('../serverGroup/configure/common/serverGroupCommandBuilder.js'),
require('../filterModel/filter.tags.directive.js'),
require('../utils/waypoints/waypointContainer.directive.js'),
require('angular-ui-bootstrap'),
require('../cloudProvider/cloudProvider.registry.js'),
])
.controller('AllClustersCtrl', function($scope, app, $uibModal, $timeout, providerSelectionService, _, clusterFilterService,
ClusterFilterModel, serverGroupCommandBuilder, cloudProviderRegistry) {
ClusterFilterModel.activate();
this.initialized = false;
$scope.sortFilter = ClusterFilterModel.sortFilter;
this.groupingsTemplate = require('./groupings.html');
function addSearchFields() {
app.serverGroups.forEach(function(serverGroup) {
var buildInfo = '';
if (serverGroup.buildInfo && serverGroup.buildInfo.jenkins) {
buildInfo = [
'#' + serverGroup.buildInfo.jenkins.number,
serverGroup.buildInfo.jenkins.host,
serverGroup.buildInfo.jenkins.name].join(' ').toLowerCase();
}
if (!serverGroup.searchField) {
serverGroup.searchField = [
serverGroup.region.toLowerCase(),
serverGroup.name.toLowerCase(),
serverGroup.account.toLowerCase(),
buildInfo,
_.pluck(serverGroup.loadBalancers, 'name').join(' '),
_.pluck(serverGroup.instances, 'id').join(' ')
].join(' ');
}
});
}
let updateClusterGroups = () => {
ClusterFilterModel.applyParamsToUrl();
$scope.$evalAsync(() => {
clusterFilterService.updateClusterGroups(app);
$scope.groups = ClusterFilterModel.groups;
$scope.tags = ClusterFilterModel.tags;
// Timeout because the updateClusterGroups method is debounced by 25ms
$timeout(() => { this.initialized = true; }, 50);
}
);
};
this.clearFilters = function() {
clusterFilterService.clearFilters();
updateClusterGroups();
};
this.createServerGroup = function createServerGroup() {
providerSelectionService.selectProvider(app, 'serverGroup').then(function(selectedProvider) {
let provider = cloudProviderRegistry.getValue(selectedProvider, 'serverGroup');
$uibModal.open({
templateUrl: provider.cloneServerGroupTemplateUrl,
controller: `${provider.cloneServerGroupController} as ctrl`,
resolve: {
title: function() { return 'Create New Server Group'; },
application: function() { return app; },
serverGroup: function() { return null; },
serverGroupCommand: function() { return serverGroupCommandBuilder.buildNewServerGroupCommand(app, selectedProvider); },
provider: function() { return selectedProvider; }
}
});
});
};
this.updateClusterGroups = _.debounce(updateClusterGroups, 200);
function autoRefreshHandler() {
addSearchFields();
updateClusterGroups();
}
autoRefreshHandler();
app.registerAutoRefreshHandler(autoRefreshHandler, $scope);
})
.name;
|
// This file is a part of the IncludeOS unikernel - www.includeos.org
//
// Copyright 2015 Oslo and Akershus University College of Applied Sciences
// and Alfred Bratterud
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <fs/path.hpp>
#include <string>
#include <cerrno>
namespace fs
{
static const char PATH_SEPARATOR = '/';
Path::Path()
: Path("/")
{
// uses current directory
}
Path::Path(const std::string& path)
{
// parse full path
this->state_ = parse_add(path);
} // Path::Path(std::string)
Path::Path(std::initializer_list<std::string> parts)
{
for (auto part : parts)
parse_add(part);
}
std::string Path::to_string() const
{
// build path
std::string ss;
for (const auto& p : this->stk)
{
ss += PATH_SEPARATOR + p;
}
// append path/ to end
ss += PATH_SEPARATOR;
return ss;
}
int Path::parse_add(const std::string& path)
{
if (path.empty())
{
// do nothing?
return 0;
}
std::string buffer(path.size(), 0);
char lastChar = 0;
int bufi = 0;
for (size_t i = 0; i < path.size(); i++)
{
if (path[i] == PATH_SEPARATOR)
{
if (lastChar == PATH_SEPARATOR)
{ // invalid path containing // (more than one forw-slash)
return -EINVAL;
}
if (bufi)
{
name_added(std::string(buffer, 0, bufi));
bufi = 0;
}
else if (i == 0)
{
// if the first character is / separator,
// the path is relative to root, so clear stack
stk.clear();
}
}
else
{
buffer[bufi] = path[i];
bufi++;
}
lastChar = path[i];
} // parse path
if (bufi)
{
name_added(std::string(buffer, 0, bufi));
}
return 0;
}
void Path::name_added(const std::string& name)
{
if (name == ".")
return;
stk.push_back(name);
}
}
|
import { Pipe, PipeTransform } from '@angular/core';
/*
* Rounds a decimal to a certain amount of points
*/
@Pipe({
name: 'round'
})
export class RoundPipe implements PipeTransform {
transform(value: number, places = 2): number {
return +value.toFixed(places);
}
}
|
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { SprkStackComponent } from './sprk-stack.component';
describe('SprkStackComponent', () => {
let component: SprkStackComponent;
let fixture: ComponentFixture<SprkStackComponent>;
let element: HTMLElement;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [SprkStackComponent],
}).compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(SprkStackComponent);
component = fixture.componentInstance;
element = fixture.nativeElement.querySelector('div');
});
it('should create itself', () => {
expect(component).toBeTruthy();
});
it('getClasses should match what gets set on the element', () => {
fixture.detectChanges();
expect(element.classList.toString()).toEqual(component.getClasses());
});
it('should set the correct class for itemSpacing', () => {
component.itemSpacing = 'tiny';
fixture.detectChanges();
expect(element.classList.toString()).toBe(
'sprk-o-Stack sprk-o-Stack--tiny',
);
});
it('should set the correct class for itemSpacing', () => {
component.itemSpacing = 'small';
fixture.detectChanges();
expect(element.classList.toString()).toBe(
'sprk-o-Stack sprk-o-Stack--small',
);
});
it('should add the stack item class if isStackItem is true', () => {
component.isStackItem = true;
fixture.detectChanges();
expect(element.classList.toString()).toBe(
'sprk-o-Stack sprk-o-Stack__item',
);
});
it('should not add the stack item class if isStackItem is false', () => {
component.isStackItem = false;
fixture.detectChanges();
expect(element.classList.toString()).toBe('sprk-o-Stack');
});
it('should set the correct class for itemSpacing', () => {
component.itemSpacing = 'medium';
fixture.detectChanges();
expect(element.classList.toString()).toBe(
'sprk-o-Stack sprk-o-Stack--medium',
);
});
it('should set the correct class for itemSpacing', () => {
component.itemSpacing = 'large';
fixture.detectChanges();
expect(element.classList.toString()).toBe(
'sprk-o-Stack sprk-o-Stack--large',
);
});
it('should set the correct class for itemSpacing', () => {
component.itemSpacing = 'huge';
fixture.detectChanges();
expect(element.classList.toString()).toBe(
'sprk-o-Stack sprk-o-Stack--huge',
);
});
it('should set the correct class for splitAt', () => {
component.splitAt = 'extraTiny';
fixture.detectChanges();
expect(element.classList.toString()).toBe(
'sprk-o-Stack sprk-o-Stack--split@xxs',
);
});
it('should set the correct class for splitAt', () => {
component.splitAt = 'tiny';
fixture.detectChanges();
expect(element.classList.toString()).toBe(
'sprk-o-Stack sprk-o-Stack--split@xs',
);
});
it('should set the correct class for splitAt', () => {
component.splitAt = 'small';
fixture.detectChanges();
expect(element.classList.toString()).toBe(
'sprk-o-Stack sprk-o-Stack--split@s',
);
});
it('should set the correct class for splitAt', () => {
component.splitAt = 'medium';
fixture.detectChanges();
expect(element.classList.toString()).toBe(
'sprk-o-Stack sprk-o-Stack--split@m',
);
});
it('should set the correct class for splitAt', () => {
component.splitAt = 'large';
fixture.detectChanges();
expect(element.classList.toString()).toBe(
'sprk-o-Stack sprk-o-Stack--split@l',
);
});
it('should set the correct class for splitAt', () => {
component.splitAt = 'huge';
fixture.detectChanges();
expect(element.classList.toString()).toBe(
'sprk-o-Stack sprk-o-Stack--split@xl',
);
});
it('should add the correct classes if additionalClasses have values', () => {
component.additionalClasses = 'sprk-u-pam sprk-u-man';
fixture.detectChanges();
expect(component.getClasses()).toEqual(
'sprk-o-Stack sprk-u-pam sprk-u-man',
);
});
it(
'should set the data-analytics attribute' +
' given a value in the analyticsString Input',
() => {
component.analyticsString = 'Stack 1';
fixture.detectChanges();
expect(element.hasAttribute('data-analytics')).toEqual(true);
expect(element.getAttribute('data-analytics')).toEqual('Stack 1');
},
);
it('should correctly apply data-id', () => {
component.idString = 'Stack 1';
fixture.detectChanges();
expect(element.hasAttribute('data-id')).toEqual(true);
expect(element.getAttribute('data-id')).toEqual('Stack 1');
});
});
|
/**
* Handle a modal
* @param {String} action Action to perform on the modal (accept, dismiss,
* text)
* @param {String} modalType Type of modal (alertbox, confirmbox, prompt)
* @param {Function} done Function to execute when finished
*/
module.exports = (action, modalType, done) => {
/**
* The command to perform on the browser object
* @type {String}
*/
let command = `alert${action.slice(0, 1).toUpperCase()}${action.slice(1)}`;
/**
* Alert boxes can't be dismissed, this causes Chrome to crash during tests
*/
if (modalType === 'alertbox') {
command = 'alertAccept';
}
browser[command]();
done();
};
|
import React from 'react';
import { storiesOf, action } from '@kadira/storybook';
import { Toggle } from '../components';
import Container from './Container';
storiesOf('Checkbox Button', module)
.addDecorator((story) => <Container action={action} value={false}>{story()}</Container>)
.add('with a text', () => (
<Toggle
name='basic'
type='checkbox'
/>
))
.add('with no text', () => (
<Toggle
name='no-text'
label='hello'
count={6}
disabled
type='checkbox'
/>
));
|
using Abp.Authorization;
using LittleSurvey.Authorization.Roles;
using LittleSurvey.MultiTenancy;
using LittleSurvey.Users;
namespace LittleSurvey.Authorization
{
public class PermissionChecker : PermissionChecker<Role, User>
{
public PermissionChecker(UserManager userManager)
: base(userManager)
{
}
}
}
|
package de.zib.scalaris.executor;
import com.ericsson.otp.erlang.OtpErlangException;
import de.zib.scalaris.RequestList;
import de.zib.scalaris.ResultList;
import de.zib.scalaris.UnknownException;
import de.zib.scalaris.operations.AddOnNrOp;
/**
* Implements a list change operation using the append operation of Scalaris.
* Supports an (optional) list counter key which is updated accordingly.
*
* Sub-classes need to override {@link #changeList(RequestList)} to perform the
* changes and issue a <em>single</em> {@link AddOnNrOp} operation and
* (optionally) a second {@link AddOnNrOp} for a list counter key!
*
* @author Nico Kruber, [email protected]
* @version 3.18
* @since 3.18
*/
public abstract class ScalarisChangeListOp2 implements ScalarisOp {
/**
* Key used to store the list.
*/
protected final String key;
/**
* Key used to store the list counter.
*/
protected final String countKey;
/**
* Creates a new list change operation.
*
* @param key the key to change the list at
* @param countKey the key for the counter of the entries in the list
* (may be <tt>null</tt>)
*/
public ScalarisChangeListOp2(final String key, final String countKey) {
this.key = key;
this.countKey = countKey;
}
public int workPhases() {
return 1;
}
public final int doPhase(final int phase, final int firstOp,
final ResultList results, final RequestList requests)
throws OtpErlangException, UnknownException,
IllegalArgumentException {
switch (phase) {
case 0: return changeList(requests);
case 1: return checkChange(firstOp, results);
default:
throw new IllegalArgumentException("No phase " + phase);
}
}
/**
* Changes the given page list and its counter (if present).
*
* Sub-classes overriding this method need to perform the changes and issue
* a <em>single</em> {@link AddOnNrOp} operation and (optionally) a second
* {@link AddOnNrOp} for a list counter key!
*
* @param requests
* the request list
*
* @return number of processed operations (should be <tt>0</tt>)
*/
protected abstract int changeList(final RequestList requests);
/**
* Verifies the list change operation.
*
* @param firstOp the first operation to process inside the result list
* @param results the result list
*
* @return number of processed operations (<tt>1</tt> or <tt>2</tt>)
*/
protected int checkChange(final int firstOp, final ResultList results)
throws OtpErlangException, UnknownException {
assert results != null;
int checkedOps = 0;
results.processAddDelOnListAt(firstOp + checkedOps);
++checkedOps;
if (countKey != null) {
results.processAddOnNrAt(firstOp + checkedOps);
++checkedOps;
}
return checkedOps;
}
}
|
namespace XmlRpcLight.DataTypes {
public class XmlRpcInt {
private readonly int _value;
public XmlRpcInt() {
_value = 0;
}
public XmlRpcInt(int val) {
_value = val;
}
public override string ToString() {
return _value.ToString();
}
public override int GetHashCode() {
return _value.GetHashCode();
}
public override bool Equals(
object o) {
if (o == null || !(o is XmlRpcInt))
return false;
var dbl = o as XmlRpcInt;
return (dbl._value == _value);
}
public static bool operator ==(
XmlRpcInt xi,
XmlRpcInt xj) {
if (((object) xi) == null && ((object) xj) == null)
return true;
if (((object) xi) == null || ((object) xj) == null)
return false;
return xi._value == xj._value;
}
public static bool operator !=(
XmlRpcInt xi,
XmlRpcInt xj) {
return !(xi == xj);
}
public static implicit operator int(XmlRpcInt x) {
return x._value;
}
public static implicit operator XmlRpcInt(int x) {
return new XmlRpcInt(x);
}
}
}
|
# Copyright 2014 - Rackspace Hosting
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Get tempUrl for DU stored in swift."""
import hashlib
import hmac
import sys
import time
if len(sys.argv) < 8:
print('USAGE: python get-temp-url.py storage_host container'
' app_name account secret ttl protocol')
sys.exit(1)
storage_host = sys.argv[1]
container = sys.argv[2]
app_name = sys.argv[3]
account = sys.argv[4]
secret = sys.argv[5]
ttl = sys.argv[6]
protocol = sys.argv[7]
method = 'GET'
expires = int(time.time() + int(ttl))
base = protocol + "://"
base += storage_host
path = '/v1'
path += "/" + account
path += "/" + container
path += "/" + app_name
hmac_body = '%s\n%s\n%s' % (method, expires, path)
sig = hmac.new(secret, hmac_body, hashlib.sha1).hexdigest()
print('%s%s?temp_url_sig=%s&temp_url_expires=%s' % (base, path, sig, expires))
|
from .context import log, queue_job
import datetime
import ujson as json
def _hash_task(task):
""" Returns a unique hash for identify a task and its params """
params = task.get("params")
if params:
params = json.dumps(sorted(task["params"].items(), key=lambda x: x[0])) # pylint: disable=no-member
full = [str(task.get(x)) for x in ["path", "interval", "dailytime", "queue"]]
print full.extend([str(params)])
return " ".join(full)
class Scheduler(object):
def __init__(self, collection):
self.collection = collection
self.all_tasks = []
self.refresh()
def refresh(self):
self.all_tasks = list(self.collection.find())
def sync_tasks(self, tasks):
""" Performs the first sync of a list of tasks, often defined in the config file. """
tasks_by_hash = {_hash_task(t): t for t in tasks}
for task in self.all_tasks:
if tasks_by_hash.get(task["hash"]):
del tasks_by_hash[task["hash"]]
else:
self.collection.remove({"_id": task["_id"]})
log.debug("Scheduler: deleted %s" % task["hash"])
for h, task in tasks_by_hash.iteritems():
task["hash"] = h
task["datelastqueued"] = datetime.datetime.fromtimestamp(0)
if task.get("dailytime"):
# Because MongoDB can store datetimes but not times,
# we add today's date to the dailytime.
# The date part will be discarded in check()
task["dailytime"] = datetime.datetime.combine(
datetime.datetime.utcnow(), task["dailytime"])
task["interval"] = 3600 * 24
self.collection.find_one_and_update({"hash": task["hash"]}, {"$set": task}, upsert=True)
log.debug("Scheduler: added %s" % task["hash"])
self.refresh()
def check(self):
log.debug(
"Scheduler checking for out-of-date scheduled tasks (%s scheduled)..." %
len(
self.all_tasks))
for task in self.all_tasks:
now = datetime.datetime.utcnow()
interval = datetime.timedelta(seconds=task["interval"])
last_time = now - interval
if task.get("dailytime"):
dailytime = task.get("dailytime").time()
time_datelastqueued = task.get(
"datelastqueued").time().isoformat()[0:8]
time_dailytime = dailytime.isoformat()[0:8]
if task.get(
"datelastqueued") and time_datelastqueued != time_dailytime:
log.debug(
"Adjusting the time of scheduled task %s from %s to %s" %
(task["_id"], time_datelastqueued, time_dailytime))
# Make sure we don't queue the task in a loop by adjusting
# the time
if time_datelastqueued < time_dailytime:
adjusted_datelastqueued = datetime.datetime.combine(
task.get("datelastqueued").date() -
datetime.timedelta(days=1),
dailytime)
else:
adjusted_datelastqueued = datetime.datetime.combine(
task.get("datelastqueued").date(), dailytime)
# We do find_and_modify and not update() because several check()
# may be happening at the same time.
self.collection.find_and_modify(
{
"_id": task["_id"],
"datelastqueued": task.get("datelastqueued")
},
{"$set": {
"datelastqueued": adjusted_datelastqueued
}}
)
self.refresh()
task_data = self.collection.find_and_modify(
{
"_id": task["_id"],
"datelastqueued": {"$lt": last_time}
},
{"$set": {
"datelastqueued": now
}}
)
if task_data:
queue_job(
task_data["path"],
task_data["params"],
queue=task.get("queue"))
log.debug("Scheduler: queued %s" % task_data)
self.refresh()
|
#include "if_true_else_if.h"
/*!
* \brief foo
*
* Mandatory function comment.
*
* \return ibp::Boolean*
*/
ibp::Boolean*
foo()
{
ibp::Boolean* x = nullptr;
ibp::Boolean* y = nullptr;
if(( x != nullptr ) && ( y != nullptr ))
{
if( *( x ) == ibp::Boolean( true ) )
{
*( x ) = false;
}
else if( *( x ) == *( y ) )
{
y = new ibp::Boolean( true );
}
}
return x;
}
|
# coding=utf-8
# Copyright (c) 2015 EMC Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
from unittest import TestCase
from hamcrest import assert_that, equal_to, only_contains, instance_of, raises
from storops.exception import UnityOneDnsPerNasServerError, \
UnityResourceNotFoundError
from storops.unity.resource.dns_server import UnityFileDnsServerList, \
UnityFileDnsServer
from storops.unity.resource.nas_server import UnityNasServer
from storops_test.unity.rest_mock import t_rest, patch_rest
__author__ = 'Cedric Zhuang'
class UnityFileDnsServerTest(TestCase):
@patch_rest
def test_get_properties(self):
server = UnityFileDnsServer('dns_2', cli=t_rest())
assert_that(server.existed, equal_to(True))
assert_that(server.addresses, only_contains('10.244.209.72'))
assert_that(server.domain, equal_to('win2012.dev'))
assert_that(server.nas_server, instance_of(UnityNasServer))
@patch_rest
def test_get_all(self):
servers = UnityFileDnsServerList(cli=t_rest())
assert_that(len(servers), equal_to(1))
@patch_rest
def test_create_one_dns_each_nas_server(self):
def f():
UnityFileDnsServer.create(t_rest(), 'nas_2', 'emc.dev',
['2.2.2.2', '3.3.3.3'])
assert_that(f, raises(UnityOneDnsPerNasServerError, 'Only one DNS'))
@patch_rest
def test_create_success(self):
server = UnityNasServer.get(t_rest(), 'nas_4')
dns = UnityFileDnsServer.create(t_rest(), server, 'emc.dev',
['2.2.2.2', '3.3.3.3'])
assert_that(dns.addresses, only_contains('2.2.2.2', '3.3.3.3'))
@patch_rest
def test_delete_not_found(self):
def f():
UnityFileDnsServer.get(t_rest(), 'dns_30').delete()
assert_that(f, raises(UnityResourceNotFoundError, 'does not exist'))
|
# -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsServer MaxHeight and MaxWidth Override Options.
From build dir, run: ctest -R PyQgsServerWMSGetMapSizeServer -V
.. note:: This test needs env vars to be set before the server is
configured for the first time, for this
reason it cannot run as a test case of another server
test.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Marco Bernasocchi'
__date__ = '01/04/2019'
__copyright__ = 'Copyright 2019, The QGIS Project'
import os
# Needed on Qt 5 so that the serialization of XML is consistent among all
# executions
os.environ['QT_HASH_SEED'] = '1'
from qgis.testing import unittest
from test_qgsserver import QgsServerTestBase
from test_qgsserver_wms_getmap_size_project import make_request
class TestQgsServerWMSGetMapSizeServer(QgsServerTestBase):
"""QGIS Server WMS Tests for GetFeatureInfo request"""
# Set to True to re-generate reference files for this class
regenerate_reference = False
def setUp(self):
os.environ['QGIS_SERVER_WMS_MAX_WIDTH'] = '3000'
os.environ['QGIS_SERVER_WMS_MAX_HEIGHT'] = '3000'
super(TestQgsServerWMSGetMapSizeServer, self).setUp()
self.project = os.path.join(self.testdata_path, "test_project_with_size.qgs")
self.expected_too_big = self.strip_version_xmlns(b'<ServiceExceptionReport version="1.3.0" xmlns="http://www.opengis.net/ogc">\n <ServiceException code="InvalidParameterValue">The requested map size is too large</ServiceException>\n</ServiceExceptionReport>\n')
def test_wms_getmap_invalid_size_server(self):
# test the 3000 limit from server is overriding the less conservative 5000 in the project
r = make_request(self, 3001, 3000)
self.assertEqual(self.strip_version_xmlns(r), self.expected_too_big)
if __name__ == '__main__':
unittest.main()
|
//----------------------------------------------------------------------------
// XC program; finite element analysis code
// for structural analysis and design.
//
// Copyright (C) Luis Claudio Pérez Tato
//
// This program derives from OpenSees <http://opensees.berkeley.edu>
// developed by the «Pacific earthquake engineering research center».
//
// Except for the restrictions that may arise from the copyright
// of the original program (see copyright_opensees.txt)
// XC is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This software is distributed in the hope that it will be useful, but
// WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
//
// You should have received a copy of the GNU General Public License
// along with this program.
// If not, see <http://www.gnu.org/licenses/>.
//----------------------------------------------------------------------------
/* ****************************************************************** **
** OpenSees - Open System for Earthquake Engineering Simulation **
** Pacific Earthquake Engineering Research Center **
** **
** **
** (C) Copyright 1999, The Regents of the University of California **
** All Rights Reserved. **
** **
** Commercial use of this program without express permission of the **
** University of California, Berkeley, is strictly prohibited. See **
** file 'COPYRIGHT' in main directory for information on usage and **
** redistribution, and for a DISCLAIMER OF ALL WARRANTIES. **
** **
** Developed by: **
** Frank McKenna ([email protected]) **
** Gregory L. Fenves ([email protected]) **
** Filip C. Filippou ([email protected]) **
** **
** ****************************************************************** */
// $Revision: 1.1.1.1 $
// $Date: 2000/09/15 08:23:30 $
// $Source: /usr/local/cvs/OpenSees/SRC/utility/tagged/storage/TaggedObjectIter.h,v $
// File: ~/utility/tagged/storage/TaggedObjectIter.h
//
// Written: fmk
// Created: Fri Sep 20 15:27:47: 1996
// Revision: A
//
// Description: This file contains the class definition for TaggedObjectIter.
// TaggedObjectIter is an abstract base class.
#ifndef TaggedObjectIter_h
#define TaggedObjectIter_h
namespace XC {
class TaggedObject;
//! @ingroup Tagged
//
//! @brief An TaggedObjectIter is an iter for returning the Components
//! of an object of class TaggedObjectStorage.
//! It must be written for each subclass of TaggedObjectStorage (this is done
//! for efficiency reasons), hence the abstract base class.
class TaggedObjectIter
{
public:
TaggedObjectIter() {};
virtual ~TaggedObjectIter() {};
virtual void reset(void) =0;
virtual TaggedObject *operator()(void) =0;
};
} // end of XC namespace
#endif
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.