content
stringlengths 10
4.9M
|
---|
package flag
import (
"github.com/urfave/cli"
"time"
)
type Value interface {
Kernel() *cli.Context
Int(name string) int
GlobalInt(name string) int
Int64(name string) int64
GlobalInt64(name string) int64
Uint(name string) uint
GlobalUint(name string) uint
Uint64(name string) uint64
GlobalUint64(name string) uint64
IntSlice(name string) []int
GlobalIntSlice(name string) []int
Int64Slice(name string) []int64
GlobalInt64Slice(name string) []int64
String(name string) string
GlobalString(name string) string
StringSlice(name string) []string
GlobalStringSlice(name string) []string
Duration(name string) time.Duration
GlobalDuration(name string) time.Duration
Bool(name string) bool
GlobalBool(name string) bool
NumFlags() int
Set(name, value string) error
GlobalSet(name, value string) error
IsSet(name string) bool
GlobalIsSet(name string) bool
FlagNames() (names []string)
GlobalFlagNames() (names []string)
NArg() int
Args() Args
}
type value struct {
ctx *cli.Context
}
func NewValue(ctx *cli.Context) Value {
return &value{ctx: ctx}
}
func (v *value) Kernel() *cli.Context {
return v.ctx
}
// Int looks up the flag of a local IntFlag, returns
// 0 if not found
func (v *value) Int(name string) int {
return v.ctx.Int(name)
}
// GlobalInt looks up the flag of a global IntFlag, returns
// 0 if not found
func (v *value) GlobalInt(name string) int {
return v.ctx.GlobalInt(name)
}
// Int64 looks up the flag of a local Int64Flag, returns
// 0 if not found
func (v *value) Int64(name string) int64 {
return v.ctx.Int64(name)
}
// GlobalInt64 looks up the flag of a global Int64Flag, returns
// 0 if not found
func (v *value) GlobalInt64(name string) int64 {
return v.ctx.GlobalInt64(name)
}
// Uint looks up the flag of a local UintFlag, returns
// 0 if not found
func (v *value) Uint(name string) uint {
return v.ctx.Uint(name)
}
// GlobalUint looks up the flag of a global UintFlag, returns
// 0 if not found
func (v *value) GlobalUint(name string) uint {
return v.ctx.GlobalUint(name)
}
// Uint64 looks up the flag of a local Uint64Flag, returns
// 0 if not found
func (v *value) Uint64(name string) uint64 {
return v.ctx.Uint64(name)
}
// GlobalUint64 looks up the flag of a global Uint64Flag, returns
// 0 if not found
func (v *value) GlobalUint64(name string) uint64 {
return v.ctx.GlobalUint64(name)
}
// IntSlice looks up the flag of a local IntSliceFlag, returns
// nil if not found
func (v *value) IntSlice(name string) []int {
return v.ctx.IntSlice(name)
}
// GlobalIntSlice looks up the flag of a global IntSliceFlag, returns
// nil if not found
func (v *value) GlobalIntSlice(name string) []int {
return v.ctx.GlobalIntSlice(name)
}
// Int64Slice looks up the flag of a local Int64SliceFlag, returns
// nil if not found
func (v *value) Int64Slice(name string) []int64 {
return v.ctx.Int64Slice(name)
}
// GlobalInt64Slice looks up the flag of a global Int64SliceFlag, returns
// nil if not found
func (v *value) GlobalInt64Slice(name string) []int64 {
return v.ctx.GlobalInt64Slice(name)
}
// String looks up the flag of a local StringFlag, returns
// "" if not found
func (v *value) String(name string) string {
return v.ctx.String(name)
}
// GlobalString looks up the flag of a global StringFlag, returns
// "" if not found
func (v *value) GlobalString(name string) string {
return v.ctx.GlobalString(name)
}
// StringSlice looks up the flag of a local StringSliceFlag, returns
// nil if not found
func (v *value) StringSlice(name string) []string {
return v.ctx.StringSlice(name)
}
// GlobalStringSlice looks up the flag of a global StringSliceFlag, returns
// nil if not found
func (v *value) GlobalStringSlice(name string) []string {
return v.ctx.GlobalStringSlice(name)
}
// Duration looks up the flag of a local DurationFlag, returns
// 0 if not found
func (v *value) Duration(name string) time.Duration {
return v.ctx.Duration(name)
}
// GlobalDuration looks up the flag of a global DurationFlag, returns
// 0 if not found
func (v *value) GlobalDuration(name string) time.Duration {
return v.ctx.GlobalDuration(name)
}
func (v *value) Bool(name string) bool {
return v.ctx.Bool(name)
}
// GlobalBool looks up the flag of a global BoolFlag, returns
// false if not found
func (v *value) GlobalBool(name string) bool {
return v.ctx.GlobalBool(name)
}
// NumFlags returns the number of flags set
func (v *value) NumFlags() int {
return v.ctx.NumFlags()
}
// Set sets a context flag to a flag.
func (v *value) Set(name, value string) error {
return v.ctx.Set(name, value)
}
// GlobalSet sets a context flag to a flag on the global flagset
func (v *value) GlobalSet(name, value string) error {
return v.ctx.GlobalSet(name, value)
}
// IsSet determines if the flag was actually set
func (v *value) IsSet(name string) bool {
return v.ctx.IsSet(name)
}
// GlobalIsSet determines if the global flag was actually set
func (v *value) GlobalIsSet(name string) bool {
return v.ctx.GlobalIsSet(name)
}
// FlagNames returns a slice of flag names used in this context.
func (v *value) FlagNames() (names []string) {
return v.ctx.FlagNames()
}
// GlobalFlagNames returns a slice of global flag names used by the app.
func (v *value) GlobalFlagNames() (names []string) {
return v.ctx.GlobalFlagNames()
}
// Args contains apps console arguments
type Args = cli.Args
// NArg returns the number of the command line arguments.
func (v *value) NArg() int {
return v.ctx.NArg()
}
// Args returns the command line arguments associated with the context.
func (v *value) Args() Args {
return v.ctx.Args()
}
|
#include <cstdlib>
#include <iostream>
using namespace std;
int main() {
const auto getenv_or = [](auto env_var, auto default_value) {
const auto env = getenv(env_var);
return env != nullptr ? env : default_value;
};
cout << getenv_or("HOME", "Not found") << endl;
cout << getenv_or("HOMEE", "Not found") << endl;
}
|
Australia has rejected a claim by rights group Amnesty International that conditions on a tiny South Pacific island where about 400 Australian-bound asylum seekers are held "amount to torture".
Under Australia's tough immigration policy, asylum seekers intercepted trying to reach the country by boat are sent for processing at a camp in Nauru or to Manus Island in Papua New Guinea and are not eligible for resettlement in Australia.
Many asylum seekers on Nauru are being driven to attempt suicide to escape the prison-like conditions they face in indefinite detention, Amnesty alleged in a detailed report.
Australian PM denies Nauru prison camp claims
"I reject that claim totally. It is absolutely false," Malcolm Turnbull, the prime minister, told Australian Broadcasting Corp radio on Tuesday of the torture claim.
"The Australian government's commitment is compassionate and strong."
The detention was a "systematic regime of neglect and cruelty," said Amnesty, adding that its findings were based on both desk research and field work in Nauru between July and October.
"The conditions on Nauru - refugees' severe mental anguish, the intentional nature of the system, and the fact that the goal of offshore processing is to intimidate or coerce people to achieve a specific outcome - amounts to torture," it added.
WATCH: Nauru: Australia's Guantanamo Bay?
The Nauru government did not respond directly to the Amnesty report but criticised an ABC TV story that made similar allegations and quoted children on Nauru, where refugees are mainly from Iran and Afghanistan.
"It was clear these children were coached," the government said in a statement, calling the ABC report "biased political propaganda and lies", and "an insult to the people of Nauru".
A damning report
Almost 60 people, or about 15 percent of the 410 men, women and children on the island, reported they had either attempted suicide or had thoughts about harming themselves, Amnesty said.
Despite receiving refugee status, they continue to be confined to poor accommodation with little access to medical care, it said, adding that children, who number just over a tenth of the detainees, suffered disproportionately.
Australia accused of flouting laws
"I met children as young as nine who had already tried to kill themselves and were talking openly about ending their lives," said Anna Neistat, an Amnesty International official.
Amnesty's report, titled Island of Despair, joins a chorus of criticism by human rights groups of Australia's immigration policy, and comes just weeks after the UN said Nauru was failing to protect children.
On Tuesday, the UN issued fresh criticism of Australia's human rights record.
Australia's policy of jail terms of up to two years for detention centre workers who reveal details of the operation curtails free speech, said Michel Forst, the UN independent observer for human rights defenders.
Turnbull's government has been trying to organise resettlement of the asylum seekers in other countries, but has so far struck a deal only with Cambodia which has proved unpopular with refugees. |
from pathlib import Path
from tensorforce.agents import Agent
from tensorforce.environments import Environment
from tensorforce.execution import Runner
from bad_seeds.simple.bad_seeds_04_bollux import Bollux
def main():
bad_seeds_environment = Environment.create(
environment=Bollux, seed_count=10, bad_seed_count=3, max_episode_length=100
)
# 20200820-223031
# 20200820-233243
# batch_size 1000 goes not get smarter or dumber
# batch_size 100 20200821-095410 gets dumber
# try batch size 10000 !
agent = Agent.create(
agent="a2c",
batch_size=10000, # changed for 04 but was this a mistake? no
horizon=50, # changed from 100 to 50 for agent_04
discount=0.97, # new for agent_04
#exploration=0.05, # turned off for agent_04 - turn on for 05?
l2_regularization=0.1,
#entropy_regularization=0.2, # turned off for agent_03
variable_noise=0.5, # changed from 0.1 to 0.5 for agent_04
environment=bad_seeds_environment,
summarizer=dict(
directory="training_data/agent_04_bollux_1000000/summaries",
# list of labels, or 'all'
labels=["graph", "entropy", "kl-divergence", "losses", "rewards"],
frequency=100, # store values every 100 timesteps
),
saver=dict(
directory='saved_models/agent_04_bollux_1000000/checkpoints',
frequency=6000 # save checkpoint every 6000 seconds (100 minutes)
),
)
# this is the batch_size = 10000 version
# I hope it is the last env 04
runner = Runner(agent=agent, environment=bad_seeds_environment)
runner.run(num_episodes=1000000)
#for i in range(100):
# print("running 10000 episodes")
# runner.run(num_episodes=10000)
# print("saving the agent")
# directory = Path(f"saved_models/agent_04_env_04_1000000/10000_{i}/checkpoints")
# if directory.exists():
# directory.rmdir()
# directory.mkdir(parents=True, exist_ok=True)
# agent.save(directory=str(directory), format="numpy")
bad_seeds_environment.close()
agent.close()
if __name__ == "__main__":
main()
|
/**
* Tests description of BatchPhaseSpec.
*/
public class BatchPhaseSpecTest {
@Test
public void testDescription() throws Exception {
/*
* source1 --|
* |--> sink.connector
* source2 --|
*/
Map<String, String> props = new HashMap<>();
PluginSpec connectorSpec = new PluginSpec(Constants.Connector.PLUGIN_TYPE, "connector",
ImmutableMap.<String, String>of(), null);
ArtifactId artifactId = new ArtifactId("art", new ArtifactVersion("1.0.0"), ArtifactScope.USER);
PipelinePhase.Builder builder =
PipelinePhase.builder(ImmutableSet.of(BatchSource.PLUGIN_TYPE, Constants.Connector.PLUGIN_TYPE))
.addStage(StageSpec.builder("source1", new PluginSpec(BatchSource.PLUGIN_TYPE, "src", props, artifactId))
.build())
.addStage(StageSpec.builder("source2", new PluginSpec(BatchSource.PLUGIN_TYPE, "src", props, artifactId))
.addInputSchema("a", Schema.recordOf("stuff", Schema.Field.of("x", Schema.of(Schema.Type.INT))))
.build())
.addStage(StageSpec.builder("sink.connector", connectorSpec).build())
.addConnection("source1", "sink.connector")
.addConnection("source2", "sink.connector");
BatchPhaseSpec phaseSpec =
new BatchPhaseSpec("phase-1", builder.build(), new Resources(), new Resources(), new Resources(),
false, false, Collections.<String, String>emptyMap(),
0, Collections.<String, String>emptyMap(), false);
Assert.assertEquals("Sources 'source1', 'source2' to sinks 'sink.connector'.", phaseSpec.getDescription());
}
} |
pub fn default<T: Default>() -> T {
Default::default()
}
|
Merril Collection of Science Fiction, Speculation & Fantasy
Search Merril Collection
The Merril Collection of Science Fiction, Speculation and Fantasy is a non-circulating research collection of over 80,000 items of science fiction, fantasy and speculative fiction, as well as magic realism, experimental writing and some materials in 'fringe' areas such as parapsychology, UFOs, Atlantean legends etc.
We collect:
Non-fiction critical works relating to science fiction and fantasy.
Science fiction and fantasy art books
Subject reference tools
Limited editions
Out of print materials
Original manuscripts
Correspondence
Critical, biographical and bibliographical materials
Audiovisual and multimedia materials
All items in the Merril Collection are fully catalogued. We also maintain an extensive index to our non-fiction periodicals, and a title/author index to every short story published in every anthology or short story collection. |
package client
const (
RKETaintType = "rkeTaint"
RKETaintFieldEffect = "effect"
RKETaintFieldKey = "key"
RKETaintFieldTimeAdded = "timeAdded"
RKETaintFieldValue = "value"
)
type RKETaint struct {
Effect string `json:"effect,omitempty" yaml:"effect,omitempty"`
Key string `json:"key,omitempty" yaml:"key,omitempty"`
TimeAdded string `json:"timeAdded,omitempty" yaml:"timeAdded,omitempty"`
Value string `json:"value,omitempty" yaml:"value,omitempty"`
}
|
// making custom builtin module
class Module_Tutorial03 : public Module {
public:
Module_Tutorial03() : Module("tutorial_03") {
ModuleLibrary lib;
lib.addModule(this);
lib.addBuiltInModule();
addAnnotation(make_smart<ColorAnnotation>(lib));
addExtern<DAS_BIND_FUN(makeGray),SimNode_ExtFuncCallAndCopyOrMove>(*this, lib, "make_gray",
SideEffects::none, "makeGray");
}
} |
<filename>src/xrEngine/Environment_render.cpp
#include "stdafx.h"
#pragma hdrstop
#include "Environment.h"
#ifndef _EDITOR
#include "Render.h"
#endif
#include "xr_efflensflare.h"
#include "Rain.h"
#include "thunderbolt.h"
#ifndef _EDITOR
#include "IGame_Level.h"
#endif
//-----------------------------------------------------------------------------
// Environment render
//-----------------------------------------------------------------------------
void CEnvironment::RenderSky()
{
#ifndef _EDITOR
if (0 == g_pGameLevel)
return;
#endif
m_pRender->RenderSky(*this);
}
void CEnvironment::RenderClouds()
{
#ifndef _EDITOR
if (0 == g_pGameLevel)
return;
#endif
// draw clouds
if (fis_zero(CurrentEnv->clouds_color.w, EPS_L))
return;
m_pRender->RenderClouds(*this);
}
void CEnvironment::RenderFlares()
{
#ifndef _EDITOR
if (0 == g_pGameLevel)
return;
#endif
// 1
eff_LensFlare->Render(false, true, true);
}
void CEnvironment::RenderLast()
{
#ifndef _EDITOR
if (0 == g_pGameLevel)
return;
#endif
// 2
eff_Rain->Render();
eff_Thunderbolt->Render();
}
void CEnvironment::OnDeviceCreate()
{
m_pRender->OnDeviceCreate();
// weathers
for (auto& cycle : WeatherCycles)
{
for (auto& envDescriptor : cycle.second)
envDescriptor->on_device_create();
}
// effects
for (auto& cycle : WeatherFXs)
{
for (auto& envDescriptor : cycle.second)
envDescriptor->on_device_create();
}
Invalidate();
OnFrame();
}
void CEnvironment::OnDeviceDestroy()
{
m_pRender->OnDeviceDestroy();
// weathers
for (auto& cycle : WeatherCycles)
{
for (auto& envDescriptor : cycle.second)
envDescriptor->on_device_destroy();
}
// effects
for (auto& cycle : WeatherFXs)
{
for (auto& envDescriptor : cycle.second)
envDescriptor->on_device_destroy();
}
CurrentEnv->destroy();
}
#ifdef _EDITOR
void CEnvironment::ED_Reload()
{
OnDeviceDestroy();
OnDeviceCreate();
}
#endif
|
/**
* Check whether the transaction response from server is valid or not
* Valid if both deeplink URL and qr code URL aren't empty, or at least one of them is not,
* depending on which one that will be used
*
* @param response transaction response
* @return validity of response
*/
private boolean isResponseValid(TransactionResponse response) {
if (response == null) {
return false;
} else {
if (TextUtils.isEmpty(response.getDeeplinkUrl()) && !isTablet) {
return false;
}
if (TextUtils.isEmpty(response.getQrCodeUrl()) && isTablet) {
return false;
}
}
return true;
} |
<reponame>cerinuts/RegX2Link
import { ISettingRead } from '@rocket.chat/apps-engine/definition/accessors/ISettingRead';
import { ISetting, SettingType } from '@rocket.chat/apps-engine/definition/settings';
import { IConfigurationExtend } from '@rocket.chat/apps-engine/definition/accessors';
import yaml = require('js-yaml');
export interface IPattern {
name: string;
searchPattern: RegExp;
issuePattern: RegExp;
link: string;
}
export class Settings {
public static readonly EXCLUDE_PATTERNS: string = '\\`\\`\\`[^\\`]+\\`\\`\\`' +
'|\\~\\~\\~[^\\~]+\\~\\~\\~' +
'|\\`[^\\`]+\\`' +
'|[-a-zA-Z0-9@:%._\\+~#=]{1,256}\\.[a-zA-Z0-9()]{1,6}\\b[-a-zA-Z0-9()@:%_\\+.~#?&//=]*' +
'|\\[[^\\[\\]]+\\]\\([^\\(\\)]+\\)';
public static readonly POSITIVE_LOOKBEHIND: string = '(?<=^|[^a-zA-Z0-9])';
public static readonly POSITIVE_LOOKAHEAD: string = '(?=[^a-zA-Z0-9]|$)';
public static readonly DEFAULT_PATTERN_YAML: string = `# turns "I:issue-1234" to a link to https://example.com/issues/issue-1234
example:
link: "https://example.com/issues/%ISSUE%"
searchPattern: "I:[a-zA-Z]+-[0-9]+"
issuePattern: "[a-zA-Z]+-[0-9]+"
# turns "GH:20" to a link to https://github.com/ceriath/regx2link/issues/20
github:
link: "https://github.com/ceriath/regx2link/issues/%ISSUE%"
searchPattern: "GH:[0-9]+"
issuePattern: "[0-9]+"
`;
public static readonly DEFAULT_MAX_SEARCH_ATTEMPTS: number = 25;
public static readonly MAX_SEARCH_ATTEMPTS_ID: string = 'max-search-attempts';
public static readonly PATTERN_YAML_ID: string = 'pattern-yaml';
private _patterns: IPattern[];
get patterns(): IPattern[] {
return this._patterns;
}
private _maxSearchAttempts: number;
get maxSearchAttempts(): number {
return this._maxSearchAttempts;
}
public async init(configuration: IConfigurationExtend) {
await configuration.settings.provideSetting({
id: Settings.PATTERN_YAML_ID,
type: SettingType.CODE,
packageValue: Settings.DEFAULT_PATTERN_YAML,
required: true,
public: true,
i18nLabel: 'Config_YAML',
i18nDescription: 'Config_YAML_Descriptor',
});
await configuration.settings.provideSetting({
id: Settings.MAX_SEARCH_ATTEMPTS_ID,
type: SettingType.NUMBER,
packageValue: Settings.DEFAULT_MAX_SEARCH_ATTEMPTS,
required: true,
public: true,
i18nLabel: 'Max_Search_Attempts',
i18nDescription: 'Max_Search_Attempts_Description',
});
}
public onUpdate(setting: ISetting) {
switch (setting.id) {
case Settings.PATTERN_YAML_ID:
this.updatePatterns(setting.value);
break;
case Settings.MAX_SEARCH_ATTEMPTS_ID:
this._maxSearchAttempts = setting.value;
break;
}
}
public async setFrom(settings: ISettingRead) {
this.updatePatterns(await settings.getValueById(Settings.MAX_SEARCH_ATTEMPTS_ID));
this.updatePatterns(await settings.getValueById(Settings.PATTERN_YAML_ID));
}
private updatePatterns(value: string) {
const loadedPatterns = yaml.load(value);
const tmpPatterns: IPattern[] = []
for (const patternName in loadedPatterns){
tmpPatterns.push({
name: patternName,
link: loadedPatterns[patternName]['link'],
searchPattern: new RegExp(
Settings.EXCLUDE_PATTERNS +
"|(" +
Settings.POSITIVE_LOOKBEHIND +
loadedPatterns[patternName]['searchPattern'] +
Settings.POSITIVE_LOOKAHEAD +
")"
, 'g'),
issuePattern: new RegExp(
Settings.EXCLUDE_PATTERNS +
"|(" +
Settings.POSITIVE_LOOKBEHIND +
loadedPatterns[patternName]['issuePattern'] +
Settings.POSITIVE_LOOKAHEAD +
")"
, 'g')
});
}
this._patterns = tmpPatterns;
}
}
|
<filename>tsne/tsne.py
import numpy as np
import os
import tensorflow as tf
from tensorflow.contrib.tensorboard.plugins import projector
from tensorflow.examples.tutorials.mnist import input_data
import sys
sys.path.append("../")
from libs.configs import cfgs
LOG_DIR = './dcl_log/{}'.format(cfgs.VERSION)
SPRITE_FILE = 'dota_sprite.jpg'
META_FIEL = "dcl_meta.tsv"
TENSOR_NAME = "FINAL_LOGITS"
# 生成可视化最终输出层向量所需要的日志文件
def visualisation(final_result):
# 使用一个新的变量来保存最终输出层向量的结果,因为embedding是通过Tensorflow中变量完成的,所以PROJECTOR可视化的都是TensorFlow中的变哇。
# 所以这里需要新定义一个变量来保存输出层向量的取值
y = tf.Variable(final_result, name=TENSOR_NAME)
summary_writer = tf.summary.FileWriter(LOG_DIR)
# 通过project.ProjectorConfig类来帮助生成日志文件
config = projector.ProjectorConfig()
# 增加一个需要可视化的bedding结果
embedding = config.embeddings.add()
# 指定这个embedding结果所对应的Tensorflow变量名称
embedding.tensor_name = y.name
# Specify where you find the metadata
# 指定embedding结果所对应的原始数据信息。比如这里指定的就是每一张MNIST测试图片对应的真实类别。在单词向量中可以是单词ID对应的单词。
# 这个文件是可选的,如果没有指定那么向量就没有标签。
embedding.metadata_path = META_FIEL
# Specify where you find the sprite (we will create this later)
# 指定sprite 图像。这个也是可选的,如果没有提供sprite 图像,那么可视化的结果
# 每一个点就是一个小困点,而不是具体的图片。
# embedding.sprite.image_path = SPRITE_FILE
# 在提供sprite图像时,通过single_image_dim可以指定单张图片的大小。
# 这将用于从sprite图像中截取正确的原始图片。
# embedding.sprite.single_image_dim.extend([28, 28])
# Say that you want to visualise the embeddings
# 将PROJECTOR所需要的内容写入日志文件。
projector.visualize_embeddings(summary_writer, config)
# 生成会话,初始化新声明的变量并将需要的日志信息写入文件。
sess = tf.InteractiveSession()
sess.run(tf.global_variables_initializer())
saver = tf.train.Saver()
saver.save(sess, os.path.join(LOG_DIR, "model"))
summary_writer.close()
# 主函数先调用模型训练的过程,再使用训练好的模型来处理MNIST测试数据,
# 最后将得到的输出层矩阵输出到PROJECTOR需要的日志文件中。
def main(argv=None):
final_result = np.load(os.path.join(LOG_DIR, "final_logits.npy")) # [:25000, :]
print(final_result.shape)
final_result = tf.constant(final_result, tf.float32)
visualisation(final_result)
if __name__ == '__main__':
os.environ["CUDA_VISIBLE_DEVICES"] = '3'
main() |
/**
* remove a value record from the records array.
*
* This method is not loc sensitive, it resets loc to = dimsloc so no worries.
*
* @param row - the row of the value record you wish to remove
* @param col - a record supporting the CellValueRecordInterface.
* @see CellValueRecordInterface
*/
public void removeValueRecord(int row, CellValueRecordInterface col) {
LOGGER.atDebug().log("remove value record row {}", box(row));
_rowsAggregate.removeCell(col);
} |
import { defaultOrderByFn } from 'react-table';
export const orderByFn = (rows, functions, directions) => {
const wrapSortFn = (sortFn, index) => {
const desc = directions[index] === false || directions[index] === 'desc';
return (rowA, rowB) => {
if (rowA.original?.emptyRow && !rowB.original?.emptyRow) {
return desc ? -1 : 1;
}
if (!rowA.original?.emptyRow && rowB.original?.emptyRow) {
return desc ? 1 : -1;
}
if (rowA.original?.emptyRow && rowB.original?.emptyRow) {
return 0;
}
return sortFn(rowA, rowB);
};
};
const wrappedSortfunctions = functions.map(wrapSortFn);
return defaultOrderByFn(rows, wrappedSortfunctions, directions);
};
// copied from https://github.com/tannerlinsley/react-table/blob/f97fb98509d0b27cc0bebcf3137872afe4f2809e/src/utils.js#L320-L347 (13. Jan 2021)
const reOpenBracket = /\[/g;
const reCloseBracket = /\]/g;
function makePathArray(obj) {
return (
flattenDeep(obj)
// remove all periods in parts
.map((d) => String(d).replace('.', '_'))
// join parts using period
.join('.')
// replace brackets with periods
.replace(reOpenBracket, '.')
.replace(reCloseBracket, '')
// split it back out on periods
.split('.')
);
}
function flattenDeep(arr, newArr = []) {
if (!Array.isArray(arr)) {
newArr.push(arr);
} else {
for (let i = 0; i < arr.length; i += 1) {
flattenDeep(arr[i], newArr);
}
}
return newArr;
}
// copied from https://github.com/tannerlinsley/react-table/blob/master/src/utils.js#L169-L191 (13.Jan 2021)
const pathObjCache = new Map();
export function getBy(obj, path, def) {
if (!path) {
return obj;
}
const cacheKey = typeof path === 'function' ? path : JSON.stringify(path);
const pathObj =
pathObjCache.get(cacheKey) ||
(() => {
const pathObj = makePathArray(path);
pathObjCache.set(cacheKey, pathObj);
return pathObj;
})();
let val;
try {
val = pathObj.reduce((cursor, pathPart) => {
return cursor[pathPart];
}, obj);
} catch (e) {
// continue regardless of error
}
return typeof val !== 'undefined' ? val : def;
}
export const tagNamesWhichShouldNotSelectARow = new Set([
'UI5-AVATAR',
'UI5-BUTTON',
'UI5-CALENDAR',
'UI5-CHECKBOX',
'UI5-COLOR-PICKER',
'UI5-COMBOBOX',
'UI5-DATE-PICKER',
'UI5-DATERANGE-PICKER',
'UI5-DATETIME-PICKER',
'UI5-DURATION-PICKER',
'UI5-FILE-UPLOADER',
'UI5-ICON',
'UI5-INPUT',
'UI5-LINK',
'UI5-MULTI-COMBOBOX',
'UI5-MULTI-INPUT',
'UI5-RADIO-BUTTON',
'UI5-RANGE-SLIDER',
'UI5-RATING-INDICATOR',
'UI5-SEGMENTED-BUTTON',
'UI5-SELECT',
'UI5-SLIDER',
'UI5-STEP-INPUT',
'UI5-SWITCH',
'UI5-TEXT-AREA',
'UI5-TIME-PICKER',
'UI5-TOGGLE-BUTTON',
'UI5-UPLOAD-COLLECTION'
]);
|
#[cfg(windows_by_handle)]
use super::get_path::concatenate_or_return_absolute;
use crate::fs::{FollowSymlinks, Metadata};
use std::{fs, io, path::Path};
#[cfg(not(windows_by_handle))]
use winapi::um::winbase::{FILE_FLAG_BACKUP_SEMANTICS, FILE_FLAG_OPEN_REPARSE_POINT};
#[cfg(not(windows_by_handle))]
use {
crate::fs::{open_unchecked, OpenOptions},
std::os::windows::fs::OpenOptionsExt,
};
/// *Unsandboxed* function similar to `stat`, but which does not perform sandboxing.
pub(crate) fn stat_unchecked(
start: &fs::File,
path: &Path,
follow: FollowSymlinks,
) -> io::Result<Metadata> {
// When we have `windows_by_handle`, we just call `fs::metadata` etc. and it
// has everything.
#[cfg(windows_by_handle)]
{
let full_path = concatenate_or_return_absolute(start, path)?;
match follow {
FollowSymlinks::Yes => fs::metadata(full_path),
FollowSymlinks::No => fs::symlink_metadata(full_path),
}
.map(Metadata::from_just_metadata)
}
// Otherwise, attempt to open the file to get the metadata that way, as
// that gives us all the info.
#[cfg(not(windows_by_handle))]
{
let mut opts = OpenOptions::new();
opts.access_mode(0);
match follow {
FollowSymlinks::Yes => {
opts.custom_flags(FILE_FLAG_BACKUP_SEMANTICS);
opts.follow(FollowSymlinks::Yes);
}
FollowSymlinks::No => {
opts.custom_flags(FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS);
opts.follow(FollowSymlinks::No);
}
}
let file = open_unchecked(start, path, &opts)?;
Metadata::from_file(&file)
}
}
|
<filename>mmtbx/monomer_library/tst_geo_reduce_for_tardy.py
from __future__ import division
from mmtbx import monomer_library
import mmtbx.monomer_library.server
import mmtbx.monomer_library.pdb_interpretation
from libtbx.test_utils import approx_equal
import libtbx.load_env
from cStringIO import StringIO
import sys, os
def exercise_geo_reduce_for_tardy(
mon_lib_srv,
ener_lib,
file_name,
expected_bond_counts,
expected_dihedral_counts):
file_path = libtbx.env.find_in_repositories(
relative_path="phenix_regression/tardy_action/"+file_name,
test=os.path.isfile)
if (file_path is None):
print 'Skipping exercise_geo_reduce_for_tardy("%s"):' \
' input file not available' % file_name
return
log = StringIO()
processed_pdb_file = monomer_library.pdb_interpretation.process(
mon_lib_srv=mon_lib_srv,
ener_lib=ener_lib,
file_name=file_path,
log=log)
geo = processed_pdb_file.geometry_restraints_manager()
sites_cart = processed_pdb_file.all_chain_proxies.sites_cart_exact()
tardy_tree = geo.construct_tardy_tree(sites_cart=sites_cart)
reduced_geo = geo.reduce_for_tardy(tardy_tree=tardy_tree)
bond_counts = (
geo.pair_proxies(sites_cart=sites_cart).bond_proxies.n_total(),
reduced_geo.pair_proxies(sites_cart=sites_cart).bond_proxies.n_total())
dihedral_counts = (
geo.get_dihedral_proxies().size(),
reduced_geo.get_dihedral_proxies().size())
assert approx_equal(bond_counts, expected_bond_counts)
assert approx_equal(dihedral_counts, expected_dihedral_counts)
proxy_i_seqs_red = {}
for proxy in reduced_geo.dihedral_proxies:
proxy_i_seqs_red[proxy.i_seqs] = proxy
assert len(proxy_i_seqs_red) == dihedral_counts[1]
awl = list(processed_pdb_file.all_chain_proxies.pdb_hierarchy
.atoms_with_labels())
for proxy in geo.get_dihedral_proxies():
if (not proxy_i_seqs_red.has_key(proxy.i_seqs)):
sigma = 1/proxy.weight**0.5
if (sigma > 10):
assert awl[proxy.i_seqs[0]].resname in [ "PRO", "CYS" ]
def run(args):
assert len(args) == 0
mon_lib_srv = monomer_library.server.server()
ener_lib = monomer_library.server.ener_lib()
for file_name,expected_bond_counts,expected_dihedral_counts in [
("gly_gly_box.pdb", (8,0), (2, 1)),
("lys_pro_trp_box.pdb", (33,0), (12, 7)),
("pro_lys_trp_box.pdb", (33,0), (12, 7)),
("1yjp_box.pdb", (59,0), (22, 15)),
("disulfides_box.pdb", (198,3), (65, 28))]:
exercise_geo_reduce_for_tardy(
mon_lib_srv=mon_lib_srv,
ener_lib=ener_lib,
file_name=file_name,
expected_bond_counts=expected_bond_counts,
expected_dihedral_counts=expected_dihedral_counts)
print "OK"
if (__name__ == "__main__"):
run(args=sys.argv[1:])
|
<filename>config.example.ts<gh_stars>0
/*
* - Acidionor ao projeto um arquivo "config.ts" na raiz do projeto
* - Adicionar neste arquivo os mesmos dados constante neste preente arquivo
*/
export const TOKEN = ""; |
import numpy as np
import sys
def main():
N,M=map(int,input().split())
A=list(map(int,input().split()))
A_double=list(map(lambda x:x//2,A))
mini_multi=int(np.lcm.reduce(A_double))
res=M//mini_multi
for i in A_double:
if (mini_multi//i)%2==0:
print(0)
sys.exit()
if res%2!=0:
res+=1
print(res//2)
if __name__=="__main__":
main()
|
<reponame>tauli/LambdaLudo<filename>examples/sokokban/Main.hs
module Main where
import LambdaLudo
conf = Config
{ stepper = anim
, handler = handle
, initializer = initBoard
, memory = ()
, assets =
[ "Character1.png"
, "Character2.png"
, "Character4.png"
, "Character7.png"
, "Crate_Blue.png"
, "CrateDark_Blue.png"
, "EndPoint_Blue.png"
, "Wall_Brown.png"
]
, columns = 19
, rows = 11
, size = 64
}
main :: IO ()
main = runGame conf
initBoard :: Step () ()
initBoard = do
setBackgroundColor (Color 128 128 128)
mapM_ placeSprite $ addCoordinates puzzle
anim :: Step () ()
anim = nop
handle :: Handle () ()
handle (KeyPress KeycodeUp) = do
((x,y),name) <- findChar
deleteSprite (x,y) name
createSprite (x,y) 2 "Character7"
moveChar (x,y) (x,y-1) (x,y-2) "Character7"
handle (KeyPress KeycodeDown) = do
((x,y),name) <- findChar
deleteSprite (x,y) name
createSprite (x,y) 2 "Character4"
moveChar (x,y) (x,y+1) (x,y+2) "Character4"
handle (KeyPress KeycodeLeft) = do
((x,y),name) <- findChar
deleteSprite (x,y) name
createSprite (x,y) 2 "Character1"
moveChar (x,y) (x-1,y) (x-2,y) "Character1"
handle (KeyPress KeycodeRight) = do
((x,y),name) <- findChar
deleteSprite (x,y) name
createSprite (x,y) 2 "Character2"
moveChar (x,y) (x+1,y) (x+2,y) "Character2"
handle _ = nop
moveChar :: (Int,Int) -> (Int,Int) -> (Int,Int) -> String -> Step () ()
moveChar xy1 xy2 xy3 name = do
n <- findSpriteByXY xy2
if elem "Wall_Brown" n
then nop
else case filter isBox n of
[] -> moveSprite xy1 xy2 name
(bName:_) -> do
n' <- findSpriteByXY xy3
let b = filter isBox n'
if elem "Wall_Brown" n' || b /= []
then nop
else do
moveSprite xy1 xy2 name
deleteSprite xy2 bName
let newCrate = if elem "EndPoint_Blue" n'
then "CrateDark_Blue"
else "Crate_Blue"
createSprite xy3 1 newCrate
findChar :: Step () ((Int,Int),String)
findChar = do
result <- mapM findSpriteWithName charNames
return $ head $ concat result
findSpriteWithName :: String -> Step () [((Int,Int),String)]
findSpriteWithName n = do
result <- findSpriteByName n
return $ zip result $ repeat n
charNames :: [String]
charNames = [ "Character1" , "Character2"
, "Character4" , "Character7"]
isBox :: String -> Bool
isBox s = s == "Crate_Blue" || s == "CrateDark_Blue"
addCoordinates :: [String] -> [((Int,Int),Char)]
addCoordinates p = concatMap addMore $ zip [0..] p where
addMore (y,s) = map (\(x,c) -> ((x,y),c) ) $ zip [0..] s
placeSprite :: ((Int,Int),Char) -> Step () ()
placeSprite (xy,' ') = nop
placeSprite (xy,'#') = createSprite xy 1 "Wall_Brown"
placeSprite (xy,'$') = createSprite xy 1 "Crate_Blue"
placeSprite (xy,'@') = createSprite xy 2 "Character4"
placeSprite (xy,'.') = createSprite xy 0 "EndPoint_Blue"
puzzle :: [String]
puzzle =
[ " ##### "
, " # # "
, " #$ # "
, " ### $## "
, " # $ $ # "
, "### # ## # ######"
, "# # ## ##### ..#"
, "# $ $ ..#"
, "##### ### #@## ..#"
, " # #########"
, " ####### "
]
|
Effects on the radiation characteristics of using a corrugated reflector with a helical antenna and an electromagnetic band-gap reflector with a spiral antenna
An axial-mode helical antenna backed by a perfect electric conductor (PEC reflector) is optimized to radiate a circularly polarized (CP) wave, using the finite-difference time-domain method (FDTDM). After the optimization, the PEC reflector is replaced with a corrugated reflector. The effects of the corrugated reflector on the current distribution along the helical arm and the radiation pattern are investigated. A reduction in the backward radiation is attributed to the reduction in the current flowing over the rear surface of the corrugated reflector. A spiral antenna backed by a PEC reflector of finite extent is also analyzed using the FDTDM. As the antenna height decreases, the reverse current toward the feed point increases, resulting in deterioration of the axial ratio. To overcome this deterioration, the PEC reflector is replaced with an electromagnetic band-gap (EBG) reflector composed of mushroom-like elements. Analysis reveals that the spiral radiates a CP wave even when the spiral is located close to the reflector (0.06 wavelength above the EBG surface). The input impedance for the EBG reflector is more stable over a wide frequency band than that for the PEC reflector. |
The (Fairly Straightforward) Business Case for Health
COMPARED TO RESIDENTS of other high-income nations, Americans die younger and are less healthy at every age but incur significantly higher costs per person on healthcare (NRC 2013). Poor health has an impact well beyond expenditures on actual medical services, including costs of $576 billion per year in the United States for absence from work and lost productivity (Japsen 2012). In fact, this pattern of higher cost and lower value has many implications for US health and the healthcare delivery system, including jeopardizing our future workforce. The canary in the coal mine is in trouble when three out of four military recruits between the ages of 17 and 24 are ineligible for military service, often due to obesity (21 percent of rejected recruits), and when children and adolescents are, on average, less healthy than their peers in other countries (Secretary's Advisory Committee 2010). Improving the overall health status of the population is a complex, daunting task, but one that is essential to ensure that the United States remains a leading force in the global economy. The business case for better health is very strong.Broad Versus Narrow Healthcare Delivery FocusThe feature articles in this issue of Frontiers provide an interesting juxtaposition regarding how to define population health and methods to improve outcomes. Kindig and Isham offer a broad focus and posit that medical care has an important but limited impact on health status. I agree. They argue that unless the country addresses the broad range of health determinants-including not just medical care (which, they note, accounts for only 20 percent of health status) but also health behaviors and the social and physical environments-U S health indices will not improve enough to stave off a crisis.The United States is an outlier among Organisation for Economic Co-operation and Development (OECD) countries in how it balances medical and social expenditures. Most OECD nations, including the United States, spend roughly a third of their gross domestic product on a combination of social and health services. However, whereas for most nations the ratio is typically 2:1 in favor of social expenditure, the US social-to-health services ratio is less than 1:1 (Bradley et al. 2011).This ratio is problematic because "social" expenditures are powerful influences that appear to positively affect national health outcomes. If the United States were to reduce wasteful, inefficient, and ineffective treatments-an estimated 30 percent of current spending (NRC 2010)-some of those dollars could be redirected to support other types of programs and reduce the illness burden in the nation.Kindig and I sham suggest that clinical delivery systems will need to step outside of their traditional business model and view the health status impacts through a broader lens. This is an important concept, whether we talk about broad-based community health business plans or "population health" interventions created by delivery systems to manage subpopulations. Historically, healthcare organizations have had little incentive to focus beyond individual medical services on the more broad health status of the population. As the United States continues to move away from a fee-for-service (FFS) payment method to models such as capitation, clinical delivery systems will redesign services to include a greater emphasis on those factors that affect health status other than traditional medical care. This responsibility for clinical and cost outcomes will align healthcare delivery system incentives more closely with community incentives for a healthy workforce and a healthy community. This realignment in turn may help us avoid the healthcare version of what economists refer to as the "tragedy of the commons," whereby a shared resource is sacrificed for the individual interests of a broad range of stakeholders, leading to everyone's long-term disadvantage (Fadul 2009).ACOs as a Vehicle for ChangeThe Affordable Care Act (ACA) provides new momentum for change. … |
/**
* Asynchronously get a bunch of objects from the cache and decode them with
* the given transcoder.
*
* @param keyIter Iterator that produces the keys to request
* @return a Future result of that fetch
* @throws IllegalStateException in the rare circumstance where queue is too
* full to accept any more requests
*/
@Override
public BulkFuture<Map<String, Object>> asyncGetBulk(
Iterator<String> keyIter) {
return asyncGetBulk(keyIter, transcoder);
} |
/**
Reads object data from the given input and restores the contents of this
object. Implementation of the Externalizable interface.
@param input the stream to read data from in order to restore the object
@exception IOException if I/O errors occur
@exception ClassNotFoundException If the class for an object being
restored cannot be found.
@aribaapi private
*/
public void readExternal (ObjectInput input)
throws IOException, ClassNotFoundException
{
int numItems = input.readInt();
for (int i = 0; i < numItems; i++) {
addElement(input.readObject());
}
} |
/**
* Created by kevingaffney on 6/28/17.
*/
@Plugin(type = Op.class, name = "constructTIRFGeometry")
public class ConstructTIRFGeometry<T extends RealType<T>> extends AbstractOp {
@Parameter
private RandomAccessibleInterval<T> data;
@Parameter
private int sliceIndex;
@Parameter(label = "Wavelength (nm):")
private double lambda;
@Parameter(label = "Angle of incidence (˚):")
private double theta;
@Parameter(label = "Z-resolution (nm/slice):")
private double zRes;
@Parameter
private OpService ops;
@Parameter
private DatasetService datasetService;
@Parameter
private DisplayService displayService;
@Parameter(type = ItemIO.OUTPUT)
private Dataset output;
@Override
public void run() {
// Calculate constant d in TIRF exponential decay function
theta = theta * 2 * Math.PI / 360; // Angle of incidence in radians
final double n1 = 1.52; // Refractive index of glass
final double n2 = 1.38; // Refractive index of cytosol
final double d = lambda * Math.pow((Math.pow(n1, 2) * Math.pow(Math.sin(theta), 2) - Math.pow(n2, 2)), -0.5) / (4 * Math.PI);
System.out.println("d: " + d);
final double fluorPerMolecule = 250;
// Get frame of interest to define geometry
long maxX = data.dimension(0) - 1;
long maxY = data.dimension(1) - 1;
Interval interval = Intervals.createMinMax(0, 0, sliceIndex, maxX, maxY, sliceIndex);
RandomAccessibleInterval<T> croppedRAI = ops.transform().crop(data, interval, true);
// Subtract lowest pixel value
IterableInterval<T> dataII = Views.iterable(croppedRAI);
double min = ops.stats().min(dataII).getRealDouble();
Cursor<T> dataCursor = dataII.cursor();
while (dataCursor.hasNext()) {
double val = dataCursor.next().getRealDouble();
dataCursor.get().setReal(val - min);
}
// Perform Gaussian blur
RandomAccessibleInterval<T> blurredRAI = ops.filter().gauss(croppedRAI, 2);
IterableInterval<T> blurredII = Views.iterable(blurredRAI);
// Segment slice by threshold and fill holes
IterableInterval<BitType> thresholded = ops.threshold().huang(blurredII);
Img<BitType> thresholdedImg = ops.convert().bit(thresholded);
RandomAccessibleInterval<BitType> thresholdedRAI = ops.morphology().fillHoles(thresholdedImg);
// Get the largest region
RandomAccessibleInterval<LabelingType<ByteType>> labeling = ops.labeling().cca(thresholdedRAI,
ConnectedComponents.StructuringElement.EIGHT_CONNECTED);
LabelRegions<ByteType> labelRegions = new LabelRegions<>(labeling);
Iterator<LabelRegion<ByteType>> iterator = labelRegions.iterator();
LabelRegion<ByteType> maxRegion = iterator.next();
while (iterator.hasNext()) {
LabelRegion<ByteType> currRegion = iterator.next();
if (currRegion.size() > maxRegion.size()) {
maxRegion = currRegion;
}
}
// Generate z index map
double iMax = ops.stats().max(dataII).getRealDouble();
Img<UnsignedShortType> dataImg = ops.convert().uint16(dataII);
Img<UnsignedShortType> zMap = ops.convert().uint16(ops.create().img(dataII));
LabelRegionCursor cursor = maxRegion.localizingCursor();
RandomAccess<UnsignedShortType> zMapRA = zMap.randomAccess();
RandomAccess<UnsignedShortType> dataRA = dataImg.randomAccess();
while (cursor.hasNext()) {
cursor.fwd();
zMapRA.setPosition(cursor);
dataRA.setPosition(cursor);
double val = dataRA.get().getRealDouble();
// Log of 0 is undefined
if (val < 1) {
val = 1;
}
int z = (int) Math.round(-d * Math.log(val / iMax) / zRes);
zMapRA.get().set(z);
}
System.out.println("6");
// Use map to construct 3D geometry
int maxZ = (int) ops.stats().max(zMap).getRealDouble() + 5; // Add 5 slices of padding on top
long[] resultDimensions = {maxX + 1, maxY + 1, maxZ};
Img<BitType> result = new ArrayImgFactory<BitType>()
.create(resultDimensions, new BitType());
RandomAccess<BitType> resultRA = result.randomAccess();
System.out.println(maxZ);
cursor.reset();
while (cursor.hasNext()) {
cursor.fwd();
zMapRA.setPosition(cursor);
int zIndex = zMapRA.get().get();
int[] position = {cursor.getIntPosition(0), cursor.getIntPosition(1), zIndex};
while (position[2] < maxZ) {
resultRA.setPosition(position);
resultRA.get().set(true);
position[2]++;
}
}
output = datasetService.create(result);
CalibratedAxis[] axes = new DefaultLinearAxis[] {
new DefaultLinearAxis(Axes.X),
new DefaultLinearAxis(Axes.Y),
new DefaultLinearAxis(Axes.Z)
};
output.setAxes(axes);
System.out.println("Done constructing geometry");
}
} |
def TerrainPhysicsCollider_ChangesSizeWithAxisAlignedBoxShapeChanges():
import editor_python_test_tools.hydra_editor_utils as hydra
from editor_python_test_tools.editor_entity_utils import EditorEntity
from editor_python_test_tools.utils import TestHelper as helper
from editor_python_test_tools.utils import Report, Tracer
import azlmbr.physics as physics
import azlmbr.math as azmath
import azlmbr.bus as bus
import math
SET_BOX_X_SIZE = 5.0
SET_BOX_Y_SIZE = 6.0
EXPECTED_COLUMN_SIZE = 5
EXPECTED_ROW_SIZE = 7
hydra.open_base_level()
hydra.add_level_component("Terrain World")
hydra.add_level_component("Terrain World Renderer")
test_entity = EditorEntity.create_editor_entity_at(azmath.Vector3(0.0, 0.0, 0.0), "TestEntity")
Report.result(Tests.create_test_entity, test_entity.id.IsValid())
with Tracer() as section_tracer:
aaBoxShape_component = test_entity.add_component("Axis Aligned Box Shape")
Report.result(Tests.add_axis_aligned_box_shape, test_entity.has_component("Axis Aligned Box Shape"))
terrainPhysics_component = test_entity.add_component("Terrain Physics Heightfield Collider")
Report.result(Tests.add_terrain_collider, test_entity.has_component("Terrain Physics Heightfield Collider"))
aaBoxShape_component.set_component_property_value("Axis Aligned Box Shape|Box Configuration|Dimensions", azmath.Vector3(SET_BOX_X_SIZE, SET_BOX_Y_SIZE, 1.0))
add_check = aaBoxShape_component.get_component_property_value("Axis Aligned Box Shape|Box Configuration|Dimensions") == azmath.Vector3(SET_BOX_X_SIZE, SET_BOX_Y_SIZE, 1.0)
Report.result(Tests.box_dimensions_changed, add_check)
columns = physics.HeightfieldProviderRequestsBus(bus.Broadcast, "GetHeightfieldGridColumns")
rows = physics.HeightfieldProviderRequestsBus(bus.Broadcast, "GetHeightfieldGridRows")
Report.result(Tests.configuration_changed, math.isclose(columns, EXPECTED_COLUMN_SIZE) and math.isclose(rows, EXPECTED_ROW_SIZE))
helper.wait_for_condition(lambda: section_tracer.has_errors or section_tracer.has_asserts, 1.0)
for error_info in section_tracer.errors:
Report.info(f"Error: {error_info.filename} {error_info.function} | {error_info.message}")
for assert_info in section_tracer.asserts:
Report.info(f"Assert: {assert_info.filename} {assert_info.function} | {assert_info.message}") |
Melissa Badeker’s 2,000-square-foot warehouse is organized in sections: a pile of 3,800 binders here, a library of teacher’s guides and resource books there, and a giant storage bin stuffed with pencils, 10 to a bundle.
All of the donated items are free for the taking, a stockpile to help spare teachers some of the hundreds of dollars they spend out of pocket each year to buy supplies for their students and classrooms.
Badeker, a former Baltimore teacher who has a master’s degree in education administration, said the idea behind the Teacher Supply Swap started with the trash can into which she threw away boxes of supplies she saved from her time in the classroom, crying because she knew how much the items would mean if she could get them in the hands of a teacher who needed them.
The supply swap makes that possible.
“We’re getting supplies that are excess, and teachers are swamping this place to get those supplies,” she said. “No matter who you are, if you think you need free school supplies, you should be able to have access to them, particularly when there is so much available.”
Ericka Caruso, a first grade teacher in Prince George’s County, discovered the supply swap shortly after it opened three years ago, and stops in regularly. On a recent day, Caruso browsed the shelves along with teachers, parents and volunteers for nonprofits.
“Who knows what we’ll find?” she asked. “It’s heaven-sent for teachers in the schools that are underfunded. Otherwise, we would have to use our money, and we still do.”
Different districts manage the challenges associated with purchasing materials differently. But teachers across the country consistently report spending their own money on supplies for their students and decorations for their classrooms.
In one recent survey, the National School Supply and Equipment Association found that 99.5 percent of all public school teachers buy supplies, and spend an average of $485. Ten percent reported spending $1,000 or more.
One third grade teacher in Oklahoma drew national attention this summer when she begged at a busy intersection with the sign: “Teacher Needs School Supplies! Anything Helps. Thank you.”
The Howard County school system created a swap three years ago. Officials estimate it has saved the district $260,000. Schools can post items they no longer need and search for what is available within the system for free.
Previously, district officials say, unwanted supplies, furniture and equipment were either thrown away or sold to the public while other district schools were going out and purchasing the very same items new.
In Baltimore, teachers and administrators say Badeker is helping solve a big problem. She collects donations — leftover pencils from bowling alleys, motivational posters from teachers changing grade levels, and leftover swag bags from conferences at the Baltimore Convention Center — lines the shelves of the rented warehouse, and invites teachers, home school parents, nonprofit volunteers and daycare providers to shop.
She asks visitors to join the swap by making voluntary donations in the amount they think is fair. The suggested contribution is $25 a year.
Badeker estimates the swap has given out $100,000 in supplies in three years. The warehouse at 1224 Wicomico Street in South Baltimore, open Thursdays and Saturdays, received about 1,000 visits last year. It has received about 600 so far this year.
Badeker plans soon to offer a supply delivery truck, and is launching a pilot program in two Baltimore schools to improve inventory control and ordering.
She says her goal is to put herself out of business. She wants to create a system that allows schools to better track inventory, exchange surplus items and do a better job of ordering the supplies that teachers need most in their classrooms to minimize what they’re spending out of pocket.
She won an 18-month, $60,000 fellowship from the Open Society Institute’s Baltimore office in November to expand the swap. She also received a grant worth about $3,000 from the Abell Foundation for the pilot program at an elementary school in East Baltimore and an elementary and middle school in Northwest Baltimore.
Shandra Worthy-Owens, the principal at Dr. Bernard Harris Sr. Elementary School in the Oliver neighborhood, said she is eager for any solutions Badeker can produce. Worthy-Owens, who was new to the school last year, said on her first visit to its five supply closets she found items that had been there for decades: Math games that were never used, and new pencils intended to be offered as prizes for children.
“A lot of the materials were in stacks on top of stacks, so we didn’t really even know what we have,” she said.
Teams of teachers went through each of the closets this year, Worthy-Owens said, but the piles are unwieldy, and without an organized inventory system, she said, it will be hard to stay organized. She said she finds it difficult to stretch her supply budget of about $8,000 far enough for the school’s 415 students and 25 teachers.
She hopes an intra-district supply exchange could move, say, extra copies of a textbook at one school to another school that needs them.
Badeker said she plans to take inventory in the schools’ supply closets and assign a monetary value to items. She will ask principals what they want to distribute to their teachers, what they consider excess and what they would like to store.
She also wants to understand more about the timeline for ordering and distributing supplies. From there, she said, she will analyze the data and begin to develop a system for schools to share extra stuff.
Badeker said teachers are not always invited by principals to participate in ordering materials or developing the lists of supplies that are sent home to parents at the start of the new school year. During the budget process, she said, principals might avoid spending money on supplies, or spend it rapidly if they are at risk of losing it.
“Everyone in schools knows this happens,” Badeker said. “It’s not nefarious. They are given such limited funds and they try to manage the money properly and hold on to the funds as long as they can.”
In Baltimore, ordering is decentralized. Each school manages its own inventory based on budgets developed by principals.
Andre Cowling, the district’s chief of schools, said Badeker’s pilot program could be a boon to a district that’s watching every penny.
“People order things in schools at the beginning of the year, and principals have shortages and overages,” he said. “Nobody has extra paper, but they may have pens, paper clips and dry erase markers.” |
// Tells the VideoPlayerHelper to update the data from the video feed
@SuppressLint("NewApi")
public byte updateVideoData()
{
if (!isPlayableOnTexture())
{
return -1;
}
byte result = -1;
mSurfaceTextureLock.lock();
if (mSurfaceTexture != null)
{
if (mCurrentState == MEDIA_STATE.PLAYING)
mSurfaceTexture.updateTexImage();
result = mTextureID;
}
mSurfaceTextureLock.unlock();
return result;
} |
/**
* Determines whether or not a particular element symbolizes the need for an
* extension to be added.
* <P>
* For instance, inheriting from SysML-metamodel:Class.Block means that the
* element should extend UML-metamodel:Class, and specialize
* SysML-metamodel:Block.
*
* @param clazz
* @return
*/
private boolean symbolizesRequiredExtension(gov.nasa.jpl.imce.profileGenerator.model.bundle.NamedElement clazz) {
if (clazz.getName().startsWith("SysML-metamodel:")
&& clazz.getName().contains("."))
return true;
return false;
} |
def generate_instances(logger, conf, sa, interfaces, model, instance_maps):
model_processes, callback_processes = _yield_instances(logger, conf, sa, interfaces, model, instance_maps)
names = set()
for process in callback_processes:
new_name = __add_pretty_name(logger, process, names)
assert new_name not in names
names.add(new_name)
del names
new_collection = ExtendedProcessCollection()
new_collection.models.update({m.name: m for m in model_processes})
new_collection.environment.update({str(p): p for p in callback_processes})
for process in new_collection.processes:
if conf.get("convert statics to globals", True):
_remove_statics(logger, sa, process)
new_collection.establish_peers()
peers_cache = dict()
for process in new_collection.processes:
_simplify_process(logger, conf, sa, interfaces, process, peers_cache, new_collection)
model.environment = sortedcontainers.SortedDict({str(p): p for p in callback_processes})
model.models = sortedcontainers.SortedDict({str(p): p for p in model_processes})
filename = 'instances.json'
data = json.dumps(model, cls=CollectionEncoder, sort_keys=True, indent=2)
with open(filename, mode='w', encoding='utf-8') as fp:
fp.write(data)
return instance_maps, data |
Identification and Localization of Human Pancreatic Tumor-associated Antigens by Monoclonal Antibodies to RWP-1 and RWP-2 Cells 1
Human pancreatic adenocarcinoma cell lines, RWP-1 and RWP-2 (Dexter, D. L, Matook, G. M., Meitner, P. A., Bogaars, H. A., Jolly, G. A., Turner, M. D., and Calabresi, P. Cancer Res., 42: 2705-2714, 1982), were used as immunogens for the pro duction of monoclonal antibodies to tumor-associated membrane antigens. BALB/c mice were immunized by i.p. injection of viable cells and hybridomas resulting from the fusion of splenocytes to myeloma cell line P3x63/Ag8.653 were screened by enzymelinked immunosorbent assay for antibodies which reacted with both RWP-1 and RWP-2 cells. Hybridomas AR2-20 and AR128, both lgG1 antibody-producing cell lines, demonstrated mem brane staining by immunofluorescence cytochemistry on three of seven pancreatic tumor cell lines but not on six human tumor cell lines of nonpancreatic origin, or on normal human fibroblasts. The antibodies stained frozen sections of RWP xenografts, propagated s.c. in nude mice, and tumor cells in paraffin sections of seven of seven cases of pancreatic ductal adenocarcinoma, using indirect immunofluorescence and immunoperoxidase histochemistry, but not normal adult or fetal pancreas, or a number of other normal adult tissues. Immunoprecipitation of 125l-labeled RWP-2 cells resulted in a single band with a molecular weight of 190,000 under reducing conditions. Sequential immunoprecipitation demonstrated that both AR2-20 and AR1-28 bind to the same molecule. |
Abstract 19: Progressive Metabolic Derangement During Prolonged Resuscitation for Refractory VT/VF Cardiac Arrest and the Relationship to Neurologically Intact Survival With Extracorporeal Cardiopulmonary Resuscitation
Background:
Multiple studies have shown declining likelihood of neurologically intact survival with prolonged resuscitation with standard CPR. With standard CPR, survival after VT/VF arrest declines from 35-45% at initiation of CPR to 10-20% survival at 30 min.
Objective:
The aim of this study was to examine the effects of resuscitation duration on neurologically intact survival in the Minnesota Resuscitation Consortium ECPR protocol. Further, the progressive metabolic derangement of prolonged resuscitation was observed for the first time in this population.
Methods:
Between December 1, 2015 and May 1, 2018, 115 consecutive adult patients with refractory out-of-hospital VT/VF cardiac arrest requiring ongoing CPR were transported to the cardiac catheterization laboratory where ECLS was initiated and coronary angiography and PCI were performed as needed. Patients achieving an organized cardiac rhythm were admitted for further treatment.
Results:
Overall, 41% of patients receiving full resuscitative efforts were discharged neurologically intact. Neurologically intact survival declined with increasing duration of CPR with 100% survival in patients placed on ECLS within 30 min. Survival declined to 50% within 50 min and 20% within 70 min. Lactic acid and paCO2 increased over time peaking at 15 mmol/L and 72 mmHg, respectively. pH declined accordingly reaching 6.92 at its nadir. paO2 was stable over time but variable between patients.
Conclusions:
Likelihood of neurologically intact survival declined with increasing duration of CPR for patients going on to receive ECLS. The metabolic profile worsened during prolonged CPR with increasing lactic acidemia and hypercapnia but survival could be achieved with the hemodynamic support provided by ECLS. Together these findings support the need for rapid transport of patients to teams prepared to place ECLS. Substantial decline in survival begins at 30 min of CPR.
|
#pragma once
namespace loader_gui
{
inline bool is_gui_open = false;
void draw();
}
|
import { useApolloClient } from "@apollo/client";
import { Feather } from "@expo/vector-icons";
import React, { useState } from "react";
import {
Alert,
Modal,
StyleSheet,
Text,
TouchableHighlight,
View,
} from "react-native";
import { useDeletePostMutation, useMeQuery } from "../generated/graphql";
import { fonts, layout, theme } from "../theme";
interface PostMoreProps {
post: any;
}
export const PostMore: React.FC<PostMoreProps> = ({ post, ...props }) => {
const { data, loading } = useMeQuery();
const [deletePost] = useDeletePostMutation();
const [visible, setVisible] = useState(false);
const client = useApolloClient();
const toggle = () => {
setVisible(!visible);
};
return (
<>
<Modal animationType="slide" transparent={true} visible={visible}>
<View style={styles.centeredView}>
<Text
style={styles.option}
onPress={() => {
Alert.alert(
"Are you sure you want to report this post ?"
);
setVisible(false);
}}
>
Report
</Text>
{data?.me?.id == post?.creator?.id ? (
<Text
style={[styles.option, { color: theme.red }]}
onPress={async () => {
const res = await deletePost({
variables: {
postId: post?.id,
},
});
console.log(res);
await client.resetStore();
setVisible(false);
}}
>
Delete
</Text>
) : (
<></>
)}
<Text
style={[styles.option, { color: theme.red }]}
onPress={() => setVisible(false)}
>
Cancel
</Text>
</View>
</Modal>
<Feather
onPress={() => setVisible(true)}
style={{ marginLeft: "auto", marginRight: 0 }}
name="more-vertical"
size={layout.iconSize}
color={theme.grayDark}
/>
</>
);
};
const styles = StyleSheet.create({
centeredView: {
flex: 1,
justifyContent: "center",
position: "absolute",
bottom: 0,
backgroundColor: theme.backgroundColor,
width: "100%",
borderTopColor: theme.borderColor,
borderTopWidth: 1,
},
option: {
color: theme.grayDark,
fontSize: 18,
fontFamily: fonts.inter_500,
padding: 10,
},
});
|
/**
* Bullet graph comparative point
*
* @author Paul van Assen
*/
public class Point {
private final String point;
public Point(String point) {
this.point = point;
}
public void validate() throws ValidationException {
if (point == null) {
throw new ValidationException("point", "Point cannot be null");
}
}
} |
# Dicionário de pessoas dentro de uma lista.
colecao = {}
tabela = []
soma = 0
while True:
colecao['Nome'] = str(input('Digite o nome da pessoa: '))
print()
while True:
colecao['Sexo'] = str(input('Digite o sexo [M/F]: ')).upper()
if colecao['Sexo'] in 'MF':
break
print('ERRO!!! Digite apenas M ou F.')
print()
colecao['Idade'] = int(input('Digite a idade da pessoa: '))
soma += colecao['Idade']
print()
tabela.append(colecao.copy())
while True:
resp = str(input('Quer continuar? [S/N] ')).upper()
if resp in 'SN':
break
print('ERRO!!! Digite apenas S ou N.')
print()
if resp == 'N':
break
print(tabela)
print()
print(f'O grupo tem {len(tabela)} pessoas.')
print()
print(f'A média de idade das pessoas é: {(soma/len(tabela)):.2f} anos.')
print()
print(f'As mulheres do grupo são: ', end=' ')
for p in tabela:
if p['Sexo'] == 'F':
print(f'{p["Nome"]}', end=' ')
print()
print()
print(f'Lista das pessoas com idade acima da média: ', end=' ')
for p in tabela:
if p['Idade'] >= (soma/len(tabela)):
for k, v in p.items():
print(f'{k} : {v}', end=' ', )
print()
print()
|
<gh_stars>0
package com.victor.oprica.quyzygy20.entities;
public class WebSocketServerPacket {
public Boolean Success;
public String Action;
public String Data;
}
|
<reponame>wdas/AL_USDMaya<gh_stars>1-10
//
// Copyright 2017 Animal Logic
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.//
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#include "AL/maya/tests/mayaplugintest/UnitTestHarness.h"
#include <gtest/gtest.h>
#include "maya/MSyntax.h"
#include "maya/MArgDatabase.h"
#include "maya/MGlobal.h"
const char* happy_cat =
"\n"
" \\ /\\ \n"
" ) ( ^)\n"
" ( / )\n"
" \\(__)|\n"
"\e[39m";
const char* angry_cat =
"\n"
" // \n"
" ( >)\n"
" /\\ / )\n"
" / \\(__)|\n"
"\e[39m";
//----------------------------------------------------------------------------------------------------------------------
const MString UnitTestHarness::kName = "MayaUtils_UnitTestHarness";
//----------------------------------------------------------------------------------------------------------------------
MSyntax UnitTestHarness::createSyntax()
{
MSyntax syn;
syn.addFlag("-f", "-filter", MSyntax::kString);
syn.addFlag("-o", "-output", MSyntax::kString);
syn.addFlag("-ff", "-flag_file", MSyntax::kString);
syn.addFlag("-l", "-list");
syn.addFlag("-bof", "-break_on_failure");
syn.addFlag("-ne", "-no_catch_exceptions");
syn.addFlag("-nc", "-no_colour");
syn.addFlag("-nt", "-no_time");
syn.addFlag("-rs", "-random_seed", MSyntax::kLong);
syn.addFlag("-rp", "-repeat", MSyntax::kLong);
syn.addFlag("-std", "-stack_trace_depth", MSyntax::kLong);
syn.addFlag("-tof", "-throw_on_failure");
return syn;
}
//----------------------------------------------------------------------------------------------------------------------
std::vector<std::string> constructGoogleTestArgs(MArgDatabase& database)
{
std::cout << "CONSTRUCTING GOOGLE TESTS" << std::endl;
std::vector<std::string> args;
args.emplace_back("maya_tests");
MString filter = "*";
MString output = "";
MString color = "yes";
int rs = 0;
int rp = 1;
int sd = 100;
if(database.isFlagSet("-ff"))
{
MString flag_file;
if(database.getFlagArgument("-ff", 0, flag_file))
{
std::string str("--gtest_flagfile=");
str += flag_file.asChar();
args.emplace_back(std::move(str));
}
}
if(database.isFlagSet("-nc"))
{
color = "no";
}
if(database.isFlagSet("-f"))
{
if(database.getFlagArgument("-f", 0, filter)) {}
}
if(database.isFlagSet("-o"))
{
if(database.getFlagArgument("-o", 0, output)) {}
}
if(database.isFlagSet("-rs"))
{
if(database.getFlagArgument("-rs", 0, rs)) {}
}
if(database.isFlagSet("-rp"))
{
if(database.getFlagArgument("-rp", 0, rp)) {}
}
if(database.isFlagSet("-std"))
{
if(database.getFlagArgument("-std", 0, sd)) {}
}
::testing::GTEST_FLAG(catch_exceptions) = !database.isFlagSet("-ne");
::testing::GTEST_FLAG(print_time) = !database.isFlagSet("-nt");
::testing::GTEST_FLAG(list_tests) = database.isFlagSet("-l");
::testing::GTEST_FLAG(throw_on_failure) = database.isFlagSet("-tof");
::testing::GTEST_FLAG(filter) = filter.asChar();
::testing::GTEST_FLAG(output) = output.asChar();
::testing::GTEST_FLAG(color) = color.asChar();
::testing::GTEST_FLAG(random_seed) = rs;
::testing::GTEST_FLAG(repeat) = rp;
::testing::GTEST_FLAG(stack_trace_depth) = sd;
return args;
}
//----------------------------------------------------------------------------------------------------------------------
void* UnitTestHarness::creator()
{
return new UnitTestHarness;
}
//----------------------------------------------------------------------------------------------------------------------
MStatus UnitTestHarness::doIt(const MArgList& args)
{
MStatus status;
MArgDatabase database(syntax(), args, &status);
if(!status)
return status;
// the unit tests cycle manipulate the timeline quite a bit. Disable GL refresh to speed them up a bit.
if(MGlobal::kInteractive == MGlobal::mayaState())
MGlobal::executeCommand("refresh -suspend true");
std::vector<std::string> arguments = constructGoogleTestArgs(database);
char** argv = new char*[arguments.size()];
int argc(arguments.size());
for(size_t i = 0; i < argc; ++i)
{
argv[i] = (char*)arguments[i].c_str();
}
::testing::InitGoogleTest(&argc, argv);
int error_code = -1;
if(RUN_ALL_TESTS() == 0)
{
error_code = 0;
}
delete [] argv;
setResult(error_code);
cleanTemporaryFiles();
if(MGlobal::kInteractive == MGlobal::mayaState())
MGlobal::executeCommand("refresh -suspend false");
if(error_code)
{
if(::testing::GTEST_FLAG(color) != "no") std::cout << "\e[31m";
std::cout << angry_cat;
}
else
{
if(::testing::GTEST_FLAG(color) != "no") std::cout << "\e[32m";
std::cout << happy_cat;
}
return MS::kSuccess;
}
//------------------------------------------------------------------------------
void UnitTestHarness::cleanTemporaryFiles() const
{
MString cmd(
"import glob;"
"import os;"
"[os.remove(x) for x in glob.glob('/tmp/AL_USDMayaTests*.usda')];"
"[os.remove(x) for x in glob.glob('/tmp/AL_USDMayaTests*.ma')]"
);
MStatus stat = MGlobal::executePythonCommand(cmd);
if(stat != MStatus::kSuccess) {
MGlobal::displayWarning("Unable to remove temporary test files");
}
}
|
<reponame>antoinegag/Sara
import { Router } from "express";
import LightsRouter from "./lights";
const apiRouter = Router();
apiRouter.get("/", (req, res) => {
return res.json({ online: true });
});
apiRouter.use("/lights", LightsRouter);
export default apiRouter;
|
Rich Pedroncelli/Associated Press
California’s top law enforcement official accused JPMorgan Chase on Thursday of flooding the state’s courts with questionable lawsuits to collect overdue credit card debt.
The suit, filed in California Superior Court by the state’s attorney general, Kamala D. Harris, contends that JPMorgan, the nation’s largest bank, “committed debt collection abuses against tens of thousands of California consumers.”
For about three years, between January 2008 and April 2011, JPMorgan filed thousands of lawsuits each month to collect soured credit card debt, Ms. Harris said. On a single day, for example, JPMorgan filed 469 lawsuits, court records show.
As the bank plowed through the lawsuits, Ms. Harris said, JPMorgan took shortcuts like relying on court documents that were not reviewed for accuracy. “To maintain this breakneck pace,” according to the lawsuit, JPMorgan relied on “unlawful practices.”
The accusations outlined in the lawsuit echo problems — from questionable documents used in lawsuits to incomplete records — that plagued the foreclosure process and prompted a multibillion-dollar settlement with big banks. One hallmark of the foreclosure crisis, robosigning, in which banks worked through mountains of legal documents without reviewing them for accuracy, is at the center of Ms. Harris’s lawsuit against JPMorgan.
JPMorgan is already navigating a thicket of regulatory woes. The Office of the Comptroller of the Currency, one of the bank’s chief regulators, is preparing an enforcement action against the bank over the way it collects its credit card debt, according to several people close to the matter who spoke on the condition of anonymity because they were not authorized to discuss the cases publicly.
JPMorgan assembled a “debt collection mill that abuses the California judicial process,” according to the lawsuit. Many of the lawsuits filed rely on questionable or incomplete records, Ms. Harris said. “At nearly every stage of the collection process,” the bank “cut corners in the name of speed, cost savings and their own convenience,” she said.
Ms. Harris said she sought “to hold Chase accountable for systematically using illegal tactics to flood California’s courts with specious lawsuits against consumers.” She said she aimed to get “redress for borrowers who have been harmed,” but did not detail any request for specific damages.
While JPMorgan’s debt collection practices are the ones under scrutiny, flaws are increasingly common in credit card lawsuits filed by rival banks, according to interviews with dozens of state judges, regulators and lawyers who defend consumers.
“A vast number of the lawsuits are flawed and most of them can’t prove the individual actually owes the debt,” said Noach Dear, a civil court judge in Brooklyn who said he had presided over as many as 150 such cases a day.
Ted Mermin, executive director of the Public Good Law Center in Berkeley, Calif., said, “This is in no way just a JPMorgan problem.”
JPMorgan Chase declined to comment. The bank, though, has been cooperating with regulators, including the California attorney general’s office, to root out problems with its debt collection lawsuits, according to people briefed on the situation. Amid concerns that some of the underlying documentation was flawed, JPMorgan stopped filing new credit card lawsuits in 2011, these people said. In courts across the country, according to judges, JPMorgan has also been throwing out some pending lawsuits as well.
Some of the nation’s biggest lenders are turning to the courts to collect money they are owed on a range of debts, from credit card balances to soured auto loans, judges and lawyers for consumers say.
Since the financial crisis, fewer customers are falling behind on their bills and the morass of bad debt is shrinking. Still, lenders are working to clean up their books and whittle down the amount of soured loans, the judges say.
In most instances, the customers admit that they owe the money. The problem, though, judges and law enforcement officials say, is that credit card companies sometimes flout proper legal procedures to recover what they are owed. Many of the cases, according to Mr. Dear, the civil court judge in Brooklyn, hinge on erroneous documents, hastily assembled to make up for the fact that lenders have lost the original paperwork needed, like payment histories or the original contract. Some lawsuits rely on fabricated credit card statements, Mr. Dear said.
Lenders have been buffeted by this kind of criticism before over the way they pursued homeowners who had fallen behind on their mortgage payments. Last year, five of the nation’s largest banks reached a $26 billion pact with 49 state attorneys general over claims the lenders wrongfully seized homes.
Now the regulatory spotlight is swinging from mortgages to credit cards. The problems in credit card lawsuits play out in the shadows, judges say. That is because unlike in foreclosure cases, borrowers sued over credit card debt rarely show up to defend themselves. As a result, more than 95 percent of lawsuits result in a default judgment, an automatic victory for the lender.
Armed with a default judgment, lenders can garnish a consumer’s wages or freeze bank accounts to get their money back.
Sometimes borrowers do not even realize that they have been sued until a lender wins a default judgment, consumer lawyers say. The situation arises, consumer lawyers say, when lenders claim to serve borrowers with notice of a suit, as they are required to do under the law, but do not follow through. The practice, called “sewer service,” is rampant across the country, the consumer lawyers say. Ms. Harris accused JPMorgan of sewer service in her lawsuit.
Sonia Caro, 62, who lives in Brooklyn, said she had no idea that Capital One was suing her over credit card debt until the lender won a $2,039.43 judgment against her in 2010.
Ms. Caro, who fell behind on her credit cards after multiple sclerosis forced her to stop working, said that she was shocked. “I just didn’t know,” she said. Faced with the staggering bill, Ms. Caro said she was devastated. “It felt so bad.”
Capital One did not return calls for comment. |
/**
* Passes the {@link LValue#get(Continuation)} to the wrapped continuation
*/
private static class GetAdapter implements Continuation {
private final Continuation k;
public GetAdapter(Continuation k) {
this.k = k;
}
public Next receive(Object l) {
return ((LValue)l).get(k);
}
private static final long serialVersionUID = 1L;
} |
The title of this post is the title of a new study in PLOS ONE by three researchers whose names Retraction Watch readers may find familiar: Grant Steen, Arturo Casadevall, and Ferric Fang. Together and separately, they’ve examined retraction trends in a number of papers we’ve covered.
Their new paper tries to answer a question we’re almost always asked as a follow-up to data showing the number of retractions grew ten-fold over the first decade in the 21st century. As the authors write:
…it is unclear whether this reflects an increase in publication of flawed articles or an increase in the rate at which flawed articles are withdrawn.
In other words, is there more poor or fraudulent science being published, or are readers and editors just better at finding it — perhaps thanks to better awareness? These explanations aren’t mutually exclusive, of course. Steen et al:
The recent increase in retractions is consistent with two hypotheses: (1) infractions have become more common or (2) infractions are more quickly detected. If infractions are now more common, this would not be expected to affect the time-to-retraction when data are evaluated by year of retraction. If infractions are now detected more quickly, then the time-to-retraction should decrease when evaluated as a function of year of publication.
When the authors looked at 2,047 retracted articles indexed in PubMed, they found:
Time-to-retraction (from publication of article to publication of retraction) averaged 32.91 months. Among 714 retracted articles published in or before 2002, retraction required 49.82 months; among 1,333 retracted articles published after 2002, retraction required 23.82 months (p<0.0001). This suggests that journals are retracting papers more quickly than in the past, although recent articles requiring retraction may not have been recognized yet.
Fang and Casadevall have also showed that high-impact factor (IF) journals are more likely to retract. In the new study, the authors report that
Time-to-retraction was significantly shorter for high-IF journals, but only ~1% of the variance in time-to-retraction was explained by increased scrutiny.
And plagiarism and duplication — the latter reason for retraction having become so frequent that we can’t cover them all — are relatively new on the landscape, meaning a jump in numbers is to be expected:
The first article retracted for plagiarism was published in 1979 and the first for duplicate publication in 1990, showing that articles are now retracted for reasons not cited in the past.
The effect of those who would have shown up frequently on an earlier version of Retraction Watch — think the analogues of modern-day scientists like Joachim Boldt, Yoshitaka Fujii, and Diederik Stapel — was impressive:
The proportional impact of authors with multiple retractions was greater in 1972–1992 than in the current era (p<0.001). From 1972–1992, 46.0% of retracted papers were written by authors with a single retraction; from 1993 to 2012, 63.1% of retracted papers were written by single-retraction authors (p<0.001).
More details on that:
Authors with multiple retractions have had a considerable impact, both on the total number of retractions and on time-to-retraction. In 2011, 374 articles were retracted; of these, 137 articles (36.6%) were written by authors with >5 retractions. Articles retracted after a long interval (≥60 months after publication) make up 17.9% of all retracted articles; approximately two-thirds (65.7%) of such articles were retracted due to fraud or suspected fraud, a rate of fraud higher than in the overall sample [8]. Among fraudulent articles retracted ≥60 months after publication, only 10.4% (25/241) were written by authors with a single retraction.
We asked Daniele Fanelli, who studies misconduct in science, for his reaction to the findings:
The finding that journals are retracting papers more quickly than in the past is very good news, as it shows how the scientific system of self-correction is improving. All the other data presented in the paper can also be interpreted, most simply, as an improvement in the system of detection. Retractions, whether by single or multiple authors, are growing because more journals are retracting. High-impact factor journals retract more and more rapidly because they have more readers and better policies. Studies have shown that impact factor is the best predictor of a journal having clear and active policies for misconduct. So any correlation between retractions and impact factor has a trivial explanation. In sum, there is no need to invoke “Lower barriers to publication of flawed articles”, as the authors do. I am not saying that scientific misconduct is not increasing. Maybe it is, maybe it is not. But the evidence is inconclusive, and statistics on retractions have no bearing on the issue. Whatever the current prevalence of misconduct might be, it is most likely higher than the extremely small proportion of papers that are currently retracted each year. So retractions are a good thing, and we should just hope to see more of them in the future.
We happen to agree that the growing number of retractions is a good thing, as we wrote in Australia’s The Conversation last year, and not just because it means we have more to write about. What we’d really like to see, of course, is more transparency in those notices — which is something the authors of the new study end with:
Better understanding of the underlying causes for retractions can potentially inform efforts to change the culture of science [41] and to stem a loss of trust in science among the lay public [42], [43].
Share this: Email
Facebook
Twitter |
<reponame>DarrMirr/dl4j-facenet-mtcnn
package com.github.darrmirr.featurebank.verifier;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.ops.transforms.Transforms;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
@Component
@Qualifier(FeatureVerifier.COSINE_DISTANCE)
public class CosineFeatureVerifier implements FeatureVerifier {
private static final Logger logger = LoggerFactory.getLogger(CosineFeatureVerifier.class);
@Override
public double verify(INDArray featureSource, INDArray featureTest) {
var distance = Transforms.cosineDistance(featureSource, featureTest);
logger.debug("Cosine distance : {}", distance);
return distance;
}
@Override
public double threshold() {
return 0.4;
}
}
|
def _pfp__restore_snapshot(self, recurse=True):
super(Array, self)._pfp__restore_snapshot(recurse=recurse)
self.raw_data = self._pfp__snapshot_raw_stack.pop()
if recurse:
for item in self.items:
item._pfp__restore_snapshot(recurse=recurse) |
/*
* Performs a soft reset of a pipe. This resets the pipe, deallocates it and
* disables it. The pipe will be available for allocation again.
*/
static void usbhc_pipe_soft_reset(struct usb_pipe* pipe)
{
spinlock_aquire(&pipe->lock);
pipe->state = PIPE_STATE_FREE;
spinlock_release(&pipe->lock);
} |
<gh_stars>10-100
"""
The Purpose of this Reddit Bot is to collect replies to a particular thread, then save to a .txt file
This Python Script has been modified to work in conjunction with 'Markov Chain'-based bot to save a particular users
entire comment history to a text file, then will be added to the list of users callable by Reddit Bot
called by !name such as !jordanp or !marx or !churchill
Read More: https://medium.com/@BlockchainEng/programming-a-markov-chain-based-reddit-bot-21cebb4d1844
Created 5/21/2018 by <NAME>
Copyright (c) 2018 <NAME>
All Rights Reserved - MIT License
THIS ENTIRE COMMENT BLOCK MUST BE MAINTAINED THROUGH ANY EDITS!
"""
import time
import random
import praw
import markovify
from samplekeys import rkey
c_id = rkey['client_id']
c_secret = rkey['client_secret']
usr_name = rkey['username']
passw = rkey['password']
#Create Reddit Bot with login and private key - username - password
bot = praw.Reddit(user_agent='Collecting Replies into Spreadsheet bot1 v0.1', client_id=c_id, client_secret=c_secret,
username=usr_name, password=<PASSWORD>)
def run():
#Collect Recent Thread, Collect Comments, Format Comment, Save to .txt file
print("-----------------\n\nHello and Welcome to A Reddit Bot which collects comment replies into spreadsheet")
print("This Bot will collect thread replies and save as text file with author and comment")
print("These Comments will then be used as a markov-based bot for replying to user requests on Reddit")
print("-----------------\n\n")
time.sleep(2)
subreddit1 = 'all' #Subreddit to collect replies
monitor_subreddit = 'testingground4bots'
num_threads = 10 #Number of Threads to collect replies (number of files will be made)
comment_list = [] #New Comment List will be created for each 'thread'/file
#gather list of people to generate text for - from <NAME>no Script
people = []
with open('_people.txt') as f:
for person in f.readlines():
#exclude people that are "commented out"
print(person)
if person[0:2] != '//':
people.append(person.strip())
subreddit = bot.subreddit(subreddit1)
comments = subreddit.stream.comments()
print(people)
#for all comments with trigger word, reply with random text
for comment in comments:
text = comment.body
for person in people:
if "!" + person in text:
comm_reply = "**" + person.capitalize() + "**\n\n"
comm_reply += sentences_Markov(create_Markov(person), person)
comm_reply += "\n \n ------------------------------------------------------- \n \n"
comm_reply += "The Above Reply was Generated by a Markov Chain-Based Bot!"
comm_reply += " (type !name to call)"
comm_reply += "\n \n People Available:"
for per in people:
comm_reply += " - "+ str(per)
comment.reply(comm_reply)
print("REPLIED!!!")
print(comm_reply, "In Reply To: ", text)
#for person in people:
# file_name_pers = person + ".txt"
# create_Markov(file_name_pers)
#save_comments_to_file()
#Load Text
def create_Markov(person):
text_to_open = person +".txt"
with open(text_to_open) as f:
text=f.read()
#build model
if person == 'sprog':
text_model = markovify.NewlineText(text)
else:
text_model = markovify.Text(text)
return text_model
def sentences_Markov(text_model, person):
i=0
sentences=[]
if person == 'sprog':
j=random.randint(9, 16)
else:
j = random.randint(5,9)
while i<j:
sent = text_model.make_sentence()
print (sent)
if sent == None:
pass
else:
sentences.append(sent)
i+=1
print(sentences)
message = ""
for sent1 in sentences:
if person == 'sprog':
message += sent1 + "\n\n"
else:
message += sent1 + " "
time.sleep(10)
return message
def save_comments_to_file(sub_redditor_comm = 'poem_for_your_sprog'):
#Collect & Save Subredditor's comment replies into file
for comment in bot.redditor(sub_redditor_comm).comments.new(limit=None):
comment_list=[]
print("Author: ", comment.author)
for line1 in comment.body.split('\n'):
print(line1)
if line1 == '':
pass
else:
comment_list.append(line1)
print(comment_list)
#print("Submission Title: ", submission.title) #Submission Title will become FileName
file_name = str(sub_redditor_comm)+".txt"
print(file_name)
write_file(comment_list, file_name)
time.sleep(1)
def write_file(list1, filename):
#Submit format in list form: ['Author', 'Comment']
with open(filename, 'a') as f:
for msg in list1:
f.write(msg + "\n")
if __name__=="__main__":
run()
|
/**
* This is a helper class, needed to update Things in the BlockChain.
*/
public class UpdateThing {
private int schemaIndex;
private JsonObject data;
public UpdateThing(int schemaIndex, JsonObject data) {
this.schemaIndex = schemaIndex;
this.data = data;
}
public int getSchemaIndex() {
return schemaIndex;
}
public void setSshemaIndex(int schemaIndex) {
this.schemaIndex = schemaIndex;
}
public JsonObject getData() {
return data;
}
public void setData(JsonObject data) {
this.data = data;
}
} |
//
// Function: OnFinishPageNext
//
// Purpose: Handle the pressing of the Next button
//
// Parameters: hwndDlg [IN] - Handle to the finish dialog
//
// Returns: BOOL, TRUE
//
BOOL OnFinishPageNext(HWND hwndDlg)
{
TraceFileFunc(ttidGuiModeSetup);
HCURSOR hOldCursor = NULL;
INetConnection * pConn = NULL;
CWizard * pWizard =
reinterpret_cast<CWizard *>(::GetWindowLongPtr(hwndDlg, DWLP_USER));
Assert(NULL != pWizard);
HWND hwndEdit = GetDlgItem(hwndDlg, EDT_FINISH_NAME);
HRESULT hr;
WCHAR szConnName[NETCON_MAX_NAME_LEN + 1];
int cchText = GetWindowText(hwndEdit, reinterpret_cast<PWSTR>(&szConnName),
NETCON_MAX_NAME_LEN);
if (IsPostInstall(pWizard))
{
if (!FIsValidConnectionName(szConnName))
{
SendMessage(hwndEdit, EM_SETSEL, 0, -1);
SetFocus(hwndEdit);
MessageBox(GetParent(hwndDlg), SzLoadIds(IDS_E_INVALID_NAME),
SzLoadIds(IDS_SETUP_CAPTION), MB_OK | MB_ICONSTOP);
::SetWindowLongPtr(hwndDlg, DWLP_MSGRESULT, -1);
return TRUE;
}
}
hOldCursor = BeginWaitCursor();
BOOL fRetry;
hr = HrFinishPageSaveConnection(hwndDlg, pWizard, &pConn, &fRetry);
if (IsPostInstall(pWizard) && FAILED(hr))
{
EndWaitCursor(hOldCursor);
if (fRetry)
{
::SetWindowLongPtr(hwndDlg, DWLP_MSGRESULT, -1);
}
else
{
PostMessage(GetParent(hwndDlg), PSM_SETCURSEL, 0,
(LPARAM)pWizard->GetPageHandle(IDD_Exit));
}
EndWaitCursor(hOldCursor);
return TRUE;
}
if (IsPostInstall(pWizard))
{
DWORD dwDisposition;
HKEY hkey = NULL;
hr = HrRegCreateKeyEx(HKEY_CURRENT_USER, c_szNetConUserPath,
REG_OPTION_NON_VOLATILE, KEY_READ_WRITE, NULL,
&hkey, &dwDisposition);
if (SUCCEEDED(hr))
{
DWORD dw;
hr = HrRegQueryDword (hkey, c_szNewRasConn, &dw);
if (FAILED(hr))
{
HKEY hkeyAdvanced = NULL;
(VOID)HrRegSetDword (hkey, c_szNewRasConn, 1);
hr = HrRegOpenKeyEx(HKEY_CURRENT_USER, c_szAdvancedPath,
KEY_WRITE, &hkeyAdvanced);
if (SUCCEEDED(hr))
{
(VOID)HrRegSetSz(hkeyAdvanced,
c_szCascadeNetworkConnections,
c_szYES);
RegCloseKey(hkeyAdvanced);
ULONG_PTR lres = 0;
LRESULT lr = SendMessageTimeout(HWND_BROADCAST, WM_SETTINGCHANGE, NULL,
reinterpret_cast<LPARAM>(c_szShellMenu), SMTO_ABORTIFHUNG | SMTO_NOTIMEOUTIFNOTHUNG,
30 * 1000, &lres);
if (lr == 0)
{
if (GetLastError() == 0)
{
TraceError("SendMessageTimeout timed out sending WM_SETTINGCHANGE broadcast message", E_FAIL);
}
else
{
TraceError("SendMessageTimeout failed", HRESULT_FROM_WIN32(GetLastError()));
}
}
}
hr = S_OK;
}
}
if (IsWindowVisible(GetDlgItem(hwndDlg, CHK_CREATE_SHORTCUT)))
{
BOOL fCreateShortcut = (BST_CHECKED ==
IsDlgButtonChecked(hwndDlg, CHK_CREATE_SHORTCUT));
if (hkey)
{
(VOID)HrRegSetDword (hkey, c_szFinishShortCut,
(fCreateShortcut) ? 1 : 0);
}
if (fCreateShortcut && (NULL != pConn))
{
NETCON_PROPERTIES* pConnProps = NULL;
hr = pConn->GetProperties(&pConnProps);
if (SUCCEEDED(hr))
{
BOOL fAllUsers = FCheckAllUsers(pConnProps);
(VOID)HrCreateStartMenuShortCut(GetParent(hwndDlg),
fAllUsers,
pConnProps->pszwName,
pConn);
FreeNetconProperties(pConnProps);
}
}
}
RegCloseKey(hkey);
pWizard->CacheConnection(pConn);
pConn = NULL;
}
ReleaseObj(pConn);
SetWindowText(hwndEdit, c_szEmpty);
if (IsPostInstall(pWizard))
{
if (pWizard->FProcessLanPages())
{
(VOID)HrCommitINetCfgChanges(GetParent(hwndDlg), pWizard);
}
PostMessage(GetParent(hwndDlg), PSM_SETCURSEL, 0,
(LPARAM)pWizard->GetPageHandle(IDD_Exit));
EndWaitCursor(hOldCursor);
return TRUE;
}
else
{
EndWaitCursor(hOldCursor);
return OnProcessNextAdapterPageNext(hwndDlg, FALSE);
}
} |
module Compiler.Rum.StackMachine.Translator where
import Control.Monad.Extra (concatMapM)
import qualified Data.HashMap.Strict as HM
import Compiler.Rum.Internal.AST
import Compiler.Rum.Internal.Rumlude
import Compiler.Rum.Internal.Util
import Compiler.Rum.StackMachine.Structure
import Compiler.Rum.StackMachine.Util
translateP :: Program -> Instructions
translateP pr = let (funs, rest) = span isFun pr in
translate funs >>= \f -> translate rest >>= \r -> pure $ f ++ [Label "start"] ++ r
where
isFun Fun{} = True
isFun _ = False
translate :: Program -> Instructions
translate = concatMapM translateStmt
translateStmt :: Statement -> Instructions
translateStmt Skip = pure [Nop]
translateStmt AssignmentVar{..} = translateExpr value >>= \x -> pure $ x ++ [Store var]
translateStmt AssignmentArr{..} = translateExpr value >>= \x ->
concatMapM translateExpr (index arrC) >>= \inds ->
pure $ x ++ inds ++ [StoreArr (arr arrC) (length $ index arrC) ]
translateStmt IfElse{..} = do
lblTrue <- newLabel
lblFalse <- newLabel
ifC <- translateExpr ifCond
fAct <- translate falseAct
tAct <- translate trueAct
pure $ ifC ++ [JumpIfTrue lblTrue]
++ fAct ++ [Jump lblFalse, Label lblTrue]
++ tAct ++ [Label lblFalse]
translateStmt RepeatUntil{..} = do
lblRepeat <- newLabel
action <- translate act
repC <- translateExpr repCond
pure $ Label lblRepeat:action ++ repC ++ [JumpIfFalse lblRepeat]
translateStmt WhileDo{..} = do
lblWhile <- newLabel
lblEnd <- newLabel
whileC <- translateExpr whileCond
action <- translate act
pure $ Label lblWhile:whileC ++ [JumpIfFalse lblEnd]
++ action ++ [Jump lblWhile, Label lblEnd]
translateStmt For{..} = do
lblFor <- newLabel
lblEnd <- newLabel
st <- translate start
forExp <- translateExpr expr
bodyF <- translate body
up <- translate update
pure $ st ++ Label lblFor:forExp ++ [JumpIfFalse lblEnd]
++ bodyF ++ up ++ [Jump lblFor, Label lblEnd]
translateStmt Return{..} = translateExpr retExp >>= \ret -> pure $ ret ++ [SReturn]
translateStmt Fun {..} = translate funBody >>= \f ->
pure $ (Label $ LabelId $ varName funName) : map Store (reverse params) ++ f
translateStmt (FunCallStmt f@FunCall{fName = "strset", args = [var@(Var v), i, c]}) = do
str <- translateExpr var
ind <- translateExpr i
ch <- translateExpr c
pure $ str ++ ind ++ ch ++ [SRumludeCall Strset, Store v]
translateStmt (FunCallStmt f) = translateFunCall f
--translateStmt e = error $ "Not supported operation for stack: " ++ show e
translateExpr :: Expression -> Instructions
translateExpr (Const x) = pure [Push x]
translateExpr (Var v) = if isUp v then pure [LoadRef v] else pure [Load v]
translateExpr (ArrC ArrCell{..}) = concatMapM translateExpr index >>= \indexes ->
pure $ indexes ++ [LoadArr arr $ length indexes]
translateExpr (ArrLit exps) = concatMapM translateExpr exps >>= \ins -> pure $ ins ++ [PushNArr $ length exps]
translateExpr BinOper{..} = translateExpr l >>= \x -> translateExpr r >>= \y -> pure $ x ++ y ++ [SBinOp bop]
translateExpr CompOper{..} = translateExpr l >>= \x -> translateExpr r >>= \y -> pure $ x ++ y ++ [SCompOp cop]
translateExpr LogicOper{..} = translateExpr l >>= \x -> translateExpr r >>= \y -> pure $ x ++ y ++ [SLogicOp lop]
translateExpr (Neg e) = translateExpr e >>= \x -> pure $ Push (Number 0) : x ++ [SBinOp Sub]
translateExpr (FunCallExp f) = translateFunCall f
--translateExpr e = error $ "Not supported operation for stack: " ++ show e
-- f :: a -> m b
-- l :: [a]
-- mapM f l :: m [b]
-- almostResult = mapM translateExpr (args call) :: m [[Instruction]]
-- result = concat <$> almostResult
-- result = concatMapM translateExpr (args call) :: m [Instruction]
translateFunCall :: FunCall -> Instructions
translateFunCall FunCall{..} = let funName = varName fName in
concatMapM translateExpr args >>= \res -> pure $ res ++
case HM.lookup funName rumludeFunNames of
Nothing -> [SFunCall (LabelId funName) (length args)]
Just x -> [SRumludeCall x]
|
from django import forms
from modelform_demo.models import BookDemo, UserDemo
class BaseForm(forms.ModelForm):
def get_errors(self):
errors = self.errors.get_json_data()
new_errors = {}
for key, message_dicts in errors.items():
print(message_dicts)
message = []
for message_dict in message_dicts:
print(message_dict['message'])
message.append(message_dict['message'])
new_errors[key] = message
return new_errors
# 模型表单继承与ModelForm
class AddBookForm(BaseForm):
class Meta:
model = BookDemo
fields = "__all__" # 拿到模型中所有的字段
# fields = ['title', 'price'] # 拿到给定的字段
# exclude = ['price'] # 拿到除price以外的所有字段
class AddUserDemo(BaseForm):
class Meta:
model = UserDemo
fields = '__all__'
|
/**
* Test class for Conta class
* Jadson Santos - [email protected]
*/
public class ContaTest {
void testaContaComSaldo(){
Conta conta = new Conta(10.0d);
if(conta.temSaldo())
System.out.println("PASSED");
else
System.out.println("FAIL");
}
void testaContaSemSaldo(){
Conta conta = new Conta(0.0d);
if(conta.temSaldo())
System.out.println("FAIL");
else
System.out.println("PASSED");
}
void testaContaComSaldoNegativo(){
try {
Conta conta = new Conta(-100.0d);
System.out.println("FAIL");
}catch (IllegalArgumentException iaex){
System.out.println("PASSED");
}
}
void testaContaComSaldoNulo(){
try {
Conta conta = new Conta(null);
System.out.println("FAIL");
}catch (IllegalArgumentException iaex){
System.out.println("PASSED");
}
}
/**
* Este teste não faz sentido
*/
void testaMetodoSetSaldo(){
Conta conta = new Conta(0.0d);
conta.setSaldo(10.0d);
if(conta.getSaldo().equals(10.0d))
System.out.println("PASSED");
else
System.out.println("FAIL");
}
public static void main(String[] args) {
new ContaTest().testaContaComSaldo();
new ContaTest().testaContaSemSaldo();
new ContaTest().testaContaComSaldoNegativo();
new ContaTest().testaContaComSaldoNulo();
}
} |
<filename>src/main/java/com/cmayen/almacen/core/models/services/IDetalleFacturaService.java
package com.cmayen.almacen.core.models.services;
import com.cmayen.almacen.core.models.entity.DetalleFactura;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import java.util.List;
public interface IDetalleFacturaService {
public List<DetalleFactura> findAll();
public Page<DetalleFactura> findAll(Pageable pageable);
public DetalleFactura save(DetalleFactura detalleFactura);
public DetalleFactura findById(Long id);
public void delete (DetalleFactura detalleFactura);
public void delete (Long id);
}
|
<gh_stars>0
from fastai.vision import *
from fastai.metrics import error_rate
from flask import Flask, request, url_for, flash
from werkzeug import secure_filename
from flask import send_from_directory
import numpy as np
import os
from os import rename, listdir
from PIL import Image
import class_def
from class_def import SegLabelListCustom
from class_def import SegItemListCustom
path = ''
export_file_url = 'https://www.dropbox.com/s/bjszupvu7a15ccb/cell_export.pkl?dl=1'
export_file_name = 'cell_export.pkl'
def down_load_file(filename, url):
"""
Download an URL to a file
"""
with open(filename, 'wb') as fout:
response = requests.get(url, stream=True)
response.raise_for_status()
# Write response data to file
for block in response.iter_content(4096):
fout.write(block)
def download_if_not_exists(filename, url):
"""
Download a URL to a file if the file
does not exist already.
Returns
-------
True if the file was downloaded,
False if it already existed
"""
if not os.path.exists(filename):
down_load_file(filename, url)
return True
return False
download_if_not_exists(export_file_name, export_file_url)
class SegLabelListCustom(SegmentationLabelList):
def open(self, fn): return open_mask(fn, div=True)
class SegItemListCustom(SegmentationItemList):
_label_cls = SegLabelListCustom
learn = load_learner(path, export_file_name)
UPLOAD_FOLDER = ''
ALLOWED_EXTENSIONS = set(['jpg', 'png'])
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
def allowed_file(filename):
return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
@app.route('/', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
# check if the post request has the file part
if 'file' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file']
# if user does not select file, browser also
# submit an empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
#filename = file.filename
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
image = open_image(filename)
image_url = url_for('uploaded_file', filename=filename)
think = learn.predict(image)
think_np = np.array(think[1])
think_np.shape = (256,256)
think_np = think_np.astype(int)
think_np[think_np > 0] = 255
think_im = Image.fromarray((think_np).astype('uint8'), mode='L')
think_im.save(os.path.join(app.config['UPLOAD_FOLDER'], 'think2_im.png'))
think_im_url = url_for('uploaded_file', filename='think2_im.png')
print(think_im_url)
#image.show(y=learn.predict(image)[0])
return '''<h1>The cell image is:</h1>
<img src= "{}" height = "85" width="200"/>
<h1>The cell segmentation is:</h1>
<img src= "{}" height = "85" width="200"/>'''.format(image_url, think_im_url)
return '''
<!doctype html>
<title>Upload new File</title>
<h1>Upload an image of Cells</h1>
<form method=post enctype=multipart/form-data>
<input type=file name=file>
<input type=submit value=Upload>
</form>
'''
@app.route('/uploads/<filename>')
def uploaded_file(filename):
return send_from_directory(app.config['UPLOAD_FOLDER'], filename)
|
// This small example illustrate how to work with Trim function.
func ExampleTrim() {
println(Trim(" \r\r\ntext\r \t\n", "") == "text")
println(Trim("1234567890987654321", "1-8") == "909")
} |
<gh_stars>0
package com.davidmogar.quizzer.utils;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.URL;
import java.net.URLConnection;
public class UrlReader {
/**
* Gets the content of the URL as an String.
*
* @param url URL to fetch data from
* @return string with the URL contents
* @throws IOException if URL was invalid or the connection was refused
*/
public static String getStreamAsString(URL url) throws IOException {
URLConnection connection = url.openConnection();
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(connection.getInputStream()));
StringBuilder response = new StringBuilder();
String inputLine;
while ((inputLine = bufferedReader.readLine()) != null) {
response.append(inputLine);
}
bufferedReader.close();
return response.toString();
}
}
|
PVC-SLP: Perceptual Vibrotactile-Signal Compression Based-on Sparse Linear Prediction
Developing a signal compression technique that is able to achieve a low bit rate while maintaining high perceptual signal quality is a classical signal processing problem vigorously studied for audio, speech, image, and video type of signals. Yet, until recently, there has been limited effort directed toward the compression of vibrotactile signals, which represent a crucial element of rich touch (haptic) information. A vibrotactile signal; produced when stroking a textured surface with a tool-tip or bare-finger; like other signals contains a great deal of redundant and imperceptible information that can be exploited for efficient compression. This paper presents PVC-SLP, a vibrotactile perceptual coding approach. PVC-SLP employs a model of tactile sensitivity; called ASF (Acceleration Sensitivity Function); for perceptual coding. The ASF is inspired by the four channels model that mediate the perception of vibrotactile stimuli in the glabrous skin. The compression algorithm introduces sparsity constraints in a linear prediction scheme both on the residual and the predictor coefficients. The perceptual quantization of the residual is developed through the use of ASF. The quantization parameters of the residual and the predictor coefficients were jointly optimized; by means of both squared error and perceptual quality measures; to find the sweet spot of the rate-distortion curve. PVC-SLP coding performance is evaluated using two publicly available databases that collectively comprise 1281 vibrotactile signals covering 193 material classes. Furthermore, we compare PVC-SLP with a recent vibrotactile compression method and show that PVC-SLP perceptually outperforms existing method by a sizable margin. Most recently, PVC-SLP has been selected to become part of the haptic codec standard currently under preparation by IEEE P1918.1.1, aka Haptic Codecs for the Tactile Internet. |
<reponame>LittleNewton/Operations_Research_Report_BACKUP
#pragma once
// any is a pointer. This allows you to put arbitrary structures in
// the hashmap.
typedef void * any;
|
/**
* invalidates the textures beloging to the given tree.
* These textures will be offered to be deleted at the next call to offerTexturesToDelete
*/
public void invalidateTexturesInTree(int treeID) {
String sTreeID = treeID + "#";
synchronized (dllTextures) {
DoubleLinkedListNodeInt elem = dllTextures.getFirst();
while (elem != null) {
DoubleLinkedListNodeInt next = elem.hasNext() ? elem.getNext() : null;
if (((JoGLCachedTexture) elem).resultID.startsWith(sTreeID)) {
invalidTextures.add(elem);
elem.remove();
hmPathTextures.remove(((JoGLCachedTexture) elem).resultID);
}
elem = next;
}
}
} |
# import the necessary packages
from keras.models import Sequential
from keras.layers.normalization import BatchNormalization
from keras.layers.convolutional import Conv2D
from keras.layers.convolutional import MaxPooling2D
from keras.layers.core import Activation
from keras.layers.core import Flatten
from keras.layers.core import Dropout
from keras.layers.core import Dense
def build_MiniVGGNet(
data_shape,
number_of_classes,
model_loss='categorical_crossentropy',
model_optimizer='RMSprop'):
model = Sequential()
channels_dimension = -1
# first CONV => RELU => CONV => RELU => POOL layer set
model.add(Conv2D(32, (3, 3), padding="same",
input_shape=data_shape))
model.add(Activation("relu"))
model.add(BatchNormalization(axis=channels_dimension))
model.add(Conv2D(32, (3, 3), padding="same"))
model.add(Activation("relu"))
model.add(BatchNormalization(axis=channels_dimension))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
# second CONV => RELU => CONV => RELU => POOL layer set
model.add(Conv2D(64, (3, 3), padding="same"))
model.add(Activation("relu"))
model.add(BatchNormalization(axis=channels_dimension))
model.add(Conv2D(64, (3, 3), padding="same"))
model.add(Activation("relu"))
model.add(BatchNormalization(axis=channels_dimension))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
# first (and only) set of FC => RELU layers
model.add(Flatten())
model.add(Dense(512))
model.add(Activation("relu"))
model.add(BatchNormalization())
model.add(Dropout(0.5))
# softmax classifier
model.add(Dense(number_of_classes))
model.add(Activation("softmax"))
model.compile(loss=model_loss,
optimizer=model_optimizer,
metrics=['accuracy'])
model.summary()
return model
|
A novel technique to detect Aging in analog/mixed-signal circuits
The increasing complexity of current and future ICs has the issue of more complex and more expensive test especially in mixed signal circuit designs. As Opamps are used in a wide applications of analog/mixed-signal circuits, this paper presents a BIST technique to detect aging in mixed signal circuits. This technique uses the internal Opamps as their aging sensors. The basic idea of our proposed technique relies on detecting changes on slew rate of Opamps inside mixed-signal circuits. Our Hspice simulations show that our proposed architecture is able to fully detect aging in our test circuit. |
<gh_stars>0
// Updater.h
#ifndef _UPDATER_h
#define _UPDATER_h
#if defined(ARDUINO) && ARDUINO >= 100
#include "arduino.h"
#else
#include "WProgram.h"
#endif
#include "IotMessaging.h"
class Updater
{
public:
Updater(String serverIp, String httpsFingerprint, const char* fwVersion);
boolean processMessage(JsonObject& root, IotMessaging *messaging);
void loop();
private:
String serverIp;
String httpsFingerprint;
String fwname;
boolean update;
const char* fwVersion;
};
#endif
|
// ByID sorts the passed Scopes in place lexicographically by their IDs.
func ByID(scopes []Scope) {
sort.Slice(scopes, func(i, j int) bool {
return scopes[i].ID < scopes[j].ID
})
} |
<reponame>landon912/Lavio
// **************************************** Lavio Engine ****************************************
// **************************** Copyright (c) 2017 All Rights Reserved **************************
// ***************************** <NAME> (<EMAIL>) **************************
#pragma once
#include "../RenderAPI.h"
#include "../../IO/Input/IInput.h"
namespace Lavio
{
namespace API
{
class InputFactory
{
public:
static IO::IInput* CreateInput(RenderAPI api);
public:
InputFactory() = delete;
InputFactory(const InputFactory& other) = delete;
InputFactory& operator=(const InputFactory& rhs) = delete;
~InputFactory() = delete;
};
}
}
|
/**
* It is correlation result, which has colNames and correlation values.
*/
public class CorrelationResult implements Serializable {
private static final long serialVersionUID = -498543486504426397L;
/**
* correlation data.
*/
DenseMatrix correlation;
/**
* If it is vector correlation, colNames is null.
*/
String[] colNames;
public CorrelationResult(DenseMatrix correlation) {
this.correlation = correlation;
}
public CorrelationResult(DenseMatrix correlation, String[] colNames) {
this.correlation = correlation;
this.colNames = colNames;
}
public double[][] getCorrelation() {
return correlation.getArrayCopy2D();
}
public DenseMatrix getCorrelationMatrix() {
return correlation;
}
public String[] getColNames() {
return colNames;
}
@Override
public String toString() {
int n = correlation.numRows();
Object[][] data = new Object[n][n];
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
data[i][j] = correlation.get(i, j);
}
}
String[] outColNames = new String[n];
if (colNames != null) {
System.arraycopy(colNames, 0, outColNames, 0, n);
} else {
for (int i = 0; i < n; i++) {
outColNames[i] = String.valueOf(i);
}
}
return "Correlation:" +
"\n" +
PrettyDisplayUtils.displayTable(data, n, n, outColNames, outColNames, "colName", 20, 20);
}
} |
Joint (Arab) List MK Hanin Zoabi on Monday called for the prosecution of the Israeli security forces personnel who killed the terrorist Nashat Milhem last weekend, after he opened fire on troops who came to arrest him.
“The fatal shooting of Nashat Milhem was a liquidation, because the security forces could have arrested him,” she told Palestinian media. “The person who shot him dead must be prosecuted.”
Milhem was killed in a shootout with police and the Shin Bet on Friday, a week after he killed three Israelis — Alon Bakal, Shimon Ruimi and Ayman Shaaban — during a shooting spree in Tel Aviv, and then fled a massive police for his hometown in the northern Israeli town of Arara. According to officials, Milhem, who was located after an extensive manhunt, was not affiliated with any organized terror group, but is believed to have been motivated by a jihadist ideology. He opened fire on security forces when they came to arrest him in his home village of Arara, police said; they had been given orders to take him alive if possible, but were unable to do so.
Get The Times of Israel's Daily Edition by email and never miss our top stories Free Sign Up
Responding to Zoabi’s remarks, Yisrael Beytenu party leader Avigdor Liberman urged the state to cancel a plea bargain recently signed with her under which she avoided incitement charges. On Thursday, she was instead indicted for “insulting a public official” during an incident last year in which she branded Israeli Arab policemen as traitors.
“I turned to the attorney general to demand he cancel the plea bargain signed between MK Hanin Zoabi and the state,” Liberman said. “I demand that, in lieu of that, Zoabi be indicted for threats and incitement to racist violence.”
Liberman said he would appeal to the High Court of Justice if the attorney general declined his request.
Zoabi later responded to Liberman, accusing the foreign minister of “inciting” against the Arab community in Israel.
“Mr. Liberman himself incites and threatens serious violence against the Arab population again and again,” she said. “He is in no place to preach and determine the appropriate conduct.”
The controversial Arab lawmaker last week apologized for calling Arab policemen traitors during a demonstration in Nazareth in 2014. The apology is part of the plea bargain, which allowed Zoabi to dodge the more serious incitement charge for the July 2014 incident, during which she accused an Arab police officer of treason against his people, a statement interpreted by officials as a call for violence against Arab Israeli officers.
Zoabi has been the subject of previous investigations for incitement and has seen numerous failed attempts by fellow lawmakers to remove her from parliament.
A vociferous critic of the Israeli government and society, she came under fire for taking part in the May 2010 flotilla to Gaza that ended in a deadly clash between pro-Palestinian activists and IDF troops. In 2014, the MK refused to use the label “terrorists” for those who abducted and killed three Israeli teenagers in the West Bank. |
#ifndef TYPES_H
#define TYPES_H
typedef unsigned char byte;
typedef unsigned long long u64;
#endif |
Looking for news you can trust?
Subscribe to our free newsletters.
In his first public appearance since leaving the White House, former President Barack Obama said that empowering young people to take on leadership roles would be the “single most important” issue in his post-presidency life.
“What I’m convinced of is that although there are all kinds of issues I care about, and all kinds of issues I can work on, the single most important thing I can do is to help prepare the next generation of leadership to take up the baton and to take their own crack at changing the world,” Obama said at a panel discussion on civic engagement that he led at the University of Chicago on Monday.
Obama made no direct mention of President Donald Trump or the 2016 presidential election, but he pointed to the divisive nature of US politics as the most significant barrier to progress on a host of problems, from flaws in the criminal justice system to climate change.
Obama’s return to Chicago marked his reemergence in public life following a three-month vacation. His remarks echoed previous statements in which he’s hinted at focusing on community organizing efforts as a private citizen.
The free-form panel discussion featured several moments of levity from the former president, including an acknowledgement that panel members were given questions ahead of the event—a subtle reference to Trump’s complaints that Hillary Clinton had an unfair advantage during the presidential debates.
Aside from a brief statement in support of protesters against Trump’s proposed Muslim ban, Obama has avoided publicly criticizing his successor. Trump, on the other hand, has frequently lashed out at his predecessor. Most notably, in March, he accused Obama of ordering illegal surveillance of him and his associates. |
/**
*
* @author HLN Consulting, LLC
*/
@Entity
@Table(databaseId = "CDS", name = "value_set_sub_value_set_rel")
@JndiReference(root = "mts-ejb-cds")
@Permission(name = "Value Set Subvalue Set Relationship", isListed = false)
public class ValueSetSubValueSetRelDTO extends BaseDTO {
public interface ByValueSetId {
}
private static final long serialVersionUID = -8184473499750197840L;
@GeneratedValue(source = GenerationSource.AUTO)
@Id
private String valueSetSubValueSetRelId;
@GeneratedValue(source = GenerationSource.FOREIGN_CONSTRAINT, sourceClass = ValueSetDTO.class)
private String valueSetId;
@ReferenceDTO(isNotFoundAllowed = false)
@Column(name = "sub_value_set_id")
private ValueSetDTO subValueSetDTO;
/**
* Get the value of subValueSetDTO
*
* @return the value of subValueSetDTO
*/
public ValueSetDTO getSubValueSetDTO() {
return subValueSetDTO;
}
/**
* Set the value of subValueSetDTO
*
* @param subValueSetDTO new value of subValueSetDTO
*/
@PropertyListener
public void setSubValueSetDTO(ValueSetDTO subValueSetDTO) {
this.subValueSetDTO = subValueSetDTO;
}
/**
* Get the value of valueSetId
*
* @return the value of valueSetId
*/
public String getValueSetId() {
return valueSetId;
}
/**
* Set the value of valueSetId
*
* @param valueSetId new value of valueSetId
*/
@PropertyListener
public void setValueSetId(String valueSetId) {
this.valueSetId = valueSetId;
}
/**
* Get the value of valueSetSubValueSetRelId
*
* @return the value of valueSetSubValueSetRelId
*/
public String getValueSetSubValueSetRelId() {
return valueSetSubValueSetRelId;
}
/**
* Set the value of valueSetSubValueSetRelId
*
* @param valueSetSubValueSetRelId new value of valueSetSubValueSetRelId
*/
@PropertyListener
public void setValueSetSubValueSetRelId(String valueSetSubValueSetRelId) {
this.valueSetSubValueSetRelId = valueSetSubValueSetRelId;
}
} |
/**
* Initialize maze such that all cells have not been visited, all walls inside the maze are up,
* and borders form a rectangle on the outside of the maze.
*/
public void initialize() {
int x, y;
for (x = 0; x < width; x++) {
for (y = 0; y < height; y++) {
setBitToOne(x, y, (Constants.CW_VISITED | Constants.CW_ALL));
}
}
for (x = 0; x < width; x++) {
setBitToOne(x, 0, Constants.CW_TOP_BOUND);
setBitToOne(x, height-1, Constants.CW_BOT_BOUND);
}
for (y = 0; y < height; y++) {
setBitToOne(0, y, Constants.CW_LEFT_BOUND);
setBitToOne(width-1, y, Constants.CW_RIGHT_BOUND);
}
} |
TV
“When did music become so important?”
— Don Draper
Creating the soundtrack to a period piece about the 1960s, especially one that takes itself incredibly seriously, is a daunting task. Around every corner are songs that tempt you with nostalgia or personal connection, but if those songs are used in even slightly the wrong context it can destroy the dynamic you were hoping for. “American Pie” is one of my favorite songs, and it was released in 1971; a year that could be the last we see at Sterling Cooper & Partners. That said, if the final credits of Mad Men roll by to the sweet melody and soothing timbre of that Don McLean classic I will toss my TV out the window and promptly run Matthew Weiner over with a John Deere riding mower. Then I’ll come home, drink 10 Old Fashioneds and brood in the dark wondering if he had wronged me or I had wronged myself.
David Chase ended the Sopranos in such a fashion musically, using Journey’s “Don’t Stop Believing” as either a fill in for his middle finger or a statement on American culture (or both); apparently we get to decide. Matthew Weiner, a Chase protégé, will hopefully take a different course. The music he has already used suggests he will.
Here are the defining musical moments of the first six seasons of Mad Men.
Season One Finale. Many critics, myself included, believe the high point of Mad Men as a series is the Kodak Carousel pitch. In this presentation, Don uses a slide projector to cast a beautiful and wholly false image of a happy life using photos of his own, very tenuously held together family. The pitch destroys the room, sends Harry Crane out in tears (his last sign of human emotion) and flips a switch in Don himself. He heads home to find his wife and children on their way out the door to the train station, heading to his in-laws for Thanksgiving. He surprises them with the news that he is going with them, and his young children bound into his arms.
But this is Mad Men…so that was a fantasy. He actually comes home to a house full of silence, longing, and rye whiskey. The way the line “I once loved a women, a child I am told, I gave her my heart, but she wanted my soul” can sum up so much of Don and Betty’s relationship to this point makes the song that much more searing in the moment.
Season Two, Episode 12. Here is another example of Don Draper’s unbelievable ability to forgive himself. As he wades into the tumbling surf of the Pacific Ocean, the allusion to baptism is hard to ignore. Especially when the song starts with, “I say Christian pilgrim, my soul redeem from sin, called out of darkness, a new life to begin.”
Upon first viewing, we’re supposed to wonder if he is starting anew in California and leaving everything behind, or washing himself clean and turning back. Turns out it’s the latter. I love the scene and the song. There is a timelessness to plainly sung gospel music that really resonates with Mad Men.
Among the benefits of doing a period piece: you get to put your characters in black face and it demeans them, not the other way around. This episode, set on Derby Day, is one of my personal favorites.
It features not only the infamous “I’m Peggy Olsen and I want to smoke some Marijuana” line, but also Pete Campbell doing the Charleston (and more notably appearing happy), Paul Kinsey being told he is arrogant and can’t sing by a drug dealing Princeton alum, and the introduction of one of the show’s best small characters Conrad Hilton. The song choice and execution of the performance are flawless.
Season Four, Episode 1. This is the final scene of the season four premiere and if you like full-ego, balls out Don Draper as much as I do, it is probably among your favorites. After a disastrous interview in the opening scene, Bert Cooper sets up a second chance for Don with the Wall Street Journal. The reporter’s first question: is Don the man who defines his company? He answers, “Yes” with very little hesitation and then proceeds to tell the reporter how SCDP began.
When he reaches the part of the story in which he tells Lane Price to, “fire us,” the riff begins. This song is Mad Men soundtracking at its best. It’s a little known sixties gem that not only relates directly to Don Draper’s narrative, but fits perfectly in the scene. The raw garage sound of the guitar mirrors Don’s swaggering bravado and the opening line of “I was born, in a trunk, Momma died and my Daddy got drunk” is basically a summary of the root of his problems. It falls into the rare category of songs in TV or film that I had never heard before and now listen to on a regular basis.
Season 5, Episode 8. In what was, in my opinion, far and away the best moment of Mad Men season 5 and maybe the best musical moment of the series, Megan hands Don a copy of Revolver and tells him to start with this track (on a side note, if you’re married to Don Draper, don’t you tell him to start with “Good Day Sunshine”? It’s like she prefers him miserable.) He puts it on, pours a drink and reclines in his perfect mid-century modern leather lounge chair.
Unfortunately, the magic of psychadelia is lost on him. The scene confirms Don’s worst fear: maybe he really is out of touch, maybe the world has passed him by. Two people the world has not passed by are the remaining Beatles who charged Lionsgate (Mad Men’s production company) a cool quarter of a million dollars to use 90 seconds of one of their B-sides. So much for love being all they need. Still, finding the perfect Beatles song for a scene is a tougher job than it seems and they nailed it here. The song is just obscure enough. It fits in context without being too on the nose. And most importantly, it is a song that takes the listener a while to decide on. You could see someone from Don Draper’s generation giving it a serious shot before becoming either scared or annoyed by it.
Season Six, Episode 8. One of the hardest things for Mad Men fans to watch has been Don’s relationship with Sylvia Rosen. Maybe it’s because Dr. Arnold Rosen is the first friend Don has other than Roger, and he promptly starts banging his wife. More likely, it is because Don becomes fixated and his confidence cracks for her in a previously unseen way. Matthew Weiner has described her as, “The woman who can bring Don Draper to his knees.”
I am a huge fan of TV and film that can take a simple, innocuous song like this and turn it into something about lust and obsession. The song is playing in the whorehouse Dick Whitman grew up in just before he loses his virginity, in what I would call a soft rape scene, to a prostitute. It is meant to inform us why he is the way he is sexually, and also why he is so obsessed with Sylvia. If Mad Men has one failing it is the flashbacks – they’re too specific in predicting future behavior. The song works way better than the scene itself for me. |
<gh_stars>0
"""Tests for '--personal-dict' option of hunspellcheck CLI."""
import argparse
import contextlib
import io
import os
import shutil
import tempfile
import uuid
import pytest
from hunspellcheck.cli import hunspellchecker_argument_parser
@pytest.mark.parametrize("personal_dicts", (True, False))
@pytest.mark.parametrize("option", ("-p", "--personal-dict"))
def test_hunspellchecker_argument_parser__personal_dicts(personal_dicts, option):
parser = argparse.ArgumentParser()
hunspellchecker_argument_parser(
parser,
personal_dicts=personal_dicts,
languages=False,
files=False,
encoding=False,
)
personal_dicts_file = tempfile.NamedTemporaryFile()
if personal_dicts:
opts = parser.parse_args([option, personal_dicts_file.name])
assert len(opts.personal_dicts) == 1
assert opts.personal_dicts[0] == personal_dicts_file.name
else:
stderr = io.StringIO()
with contextlib.redirect_stderr(stderr), pytest.raises(SystemExit):
parser.parse_args([option, personal_dicts_file.name])
expected_message = (
f"error: unrecognized arguments: {option} {personal_dicts_file.name}"
)
assert expected_message in stderr.getvalue()
@pytest.mark.parametrize(
"personal_dicts_name_or_flags",
(
["--pdict"],
["-d", "--dictionary"],
),
ids=(
"--pdict",
"-d/--dictionary",
),
)
def test_hunspellchecker_argument_parser__personal_dicts_name_or_flags(
personal_dicts_name_or_flags,
):
parser = argparse.ArgumentParser()
hunspellchecker_argument_parser(
parser,
personal_dicts_name_or_flags=personal_dicts_name_or_flags,
languages=False,
files=False,
encoding=False,
)
personal_dicts_file = tempfile.NamedTemporaryFile()
# personal_dicts options matching
for personal_dicts_arg in personal_dicts_name_or_flags:
opts = parser.parse_args([personal_dicts_arg, personal_dicts_file.name])
assert len(opts.personal_dicts) == 1
assert opts.personal_dicts[0] == personal_dicts_file.name
# personal dict option not matching
option = uuid.uuid4().hex[:8]
stderr = io.StringIO()
with contextlib.redirect_stderr(stderr), pytest.raises(SystemExit):
parser.parse_args([f"--{option}", personal_dicts_file.name])
expected_message = (
f"error: unrecognized arguments: --{option} {personal_dicts_file.name}\n"
)
assert expected_message in stderr.getvalue()
@pytest.mark.parametrize(
"personal_dicts_kwargs",
(
{
"help": "Foo bar help",
"metavar": "PERSONAL DICT",
},
{
"dest": "custom_dictionary",
},
),
ids=("help,metavar", "dest"),
)
def test_hunspellchecker_argument_parser__personal_dicts_kwargs(personal_dicts_kwargs):
parser = argparse.ArgumentParser()
hunspellchecker_argument_parser(
parser,
personal_dicts_kwargs=personal_dicts_kwargs,
languages=False,
files=False,
encoding=False,
)
personal_dicts_action = parser._optionals._actions[-15]
for kwarg, value in personal_dicts_kwargs.items():
assert getattr(personal_dicts_action, kwarg) == value
def test_PersonalDictionaryAction():
parser = argparse.ArgumentParser()
hunspellchecker_argument_parser(
parser,
languages=False,
files=False,
encoding=False,
)
# existent file
foo_dict_filename = tempfile.NamedTemporaryFile().name
with open(foo_dict_filename, "w") as f:
f.write("foo")
opts = parser.parse_args(["-p", foo_dict_filename])
assert len(opts.personal_dicts) == 1
assert opts.personal_dicts[0] == foo_dict_filename
# multiple files by filepath
bar_dict_filename = tempfile.NamedTemporaryFile().name
with open(bar_dict_filename, "w") as f:
f.write("bar")
opts = parser.parse_args(["-p", foo_dict_filename, "-p", bar_dict_filename])
assert len(opts.personal_dicts) == 2
assert opts.personal_dicts[0] == foo_dict_filename
assert opts.personal_dicts[1] == bar_dict_filename
# non existent file
os.remove(foo_dict_filename)
os.remove(bar_dict_filename)
opts = parser.parse_args(["-p", foo_dict_filename])
assert len(opts.personal_dicts) == 0
# multiple files by globs
tempdir = tempfile.gettempdir()
dicts_dirs = {"foo": None, "bar": None}
for dirname in dicts_dirs:
dicts_dir = os.path.join(tempdir, f"hunspellcheck-{dirname}")
if os.path.isdir(dicts_dir):
shutil.rmtree(dicts_dir)
os.mkdir(dicts_dir)
dicts_dirs[dirname] = dicts_dir
for filename in ["foo.txt", "bar.txt"]:
filepath = os.path.join(dicts_dir, filename)
if os.path.isfile(filepath):
os.remove(filepath)
os.mknod(filepath)
opts = parser.parse_args(
[
"-p",
os.path.join(tempdir, "hunspellcheck-foo", "*.txt"),
"-p",
os.path.join(tempdir, "hunspellcheck-bar", "*.txt"),
]
)
assert len(opts.personal_dicts) == 4
for dirname, dirpath in dicts_dirs.items():
shutil.rmtree(dirpath)
|
Performance analysis of planar subharrnonicany pumped antiparallel-pair schottky diode mixers for submillimeter-wave applications
A computer simulation based on the techniques developed by Kerr has been developed that takes into account the physical presence of the pad-to-pad capacitance inherent in antiparalletpair planar diode chips. Me computer simulation is used to examine the effect of the pad-to-pad capacitance on the performance of subharmonio mixers at millimeter wave frequencies. It is shown that for the best mCxer performance the optimum pad-to-pad capacitance is dependent on the anode diameter and series inductance of the anti-parallel pair- diodes. This stresses the need for an accurate measurement of the loop inductance in an antip.arallel-pair planar subhannonic |
1.
I, for one, am thankful for Laura Miller’s article in Salon last month about the alleged irrelevance of the National Book Awards. Miller demonstrates passion—which in my book is almost never a bad thing—for good fiction and, in particular, for “ordinary readers.” Miller wants the NBAs to matter, to have impact. She wants the majority of fiction readers to both pay attention and be influenced by them. In 2004, equally (if not more) troubled by the NBA shortlist, Miller wrote in the New York Times that she wanted the awards to play a strong role in directing readers to what they should read:
For people who read, say, four novels a year, prizes help narrow down a bewilderingly vast field of candidates. Awards have become, as the critic James Wood put it, ”the new reviews.”
There is something rather idealistic—in a public service sort of way—about Miller’s position. There are people out there looking for novels they can read and enjoy; let’s give them some. It follows that she is troubled by a generalized cynicism about awards. A.O. Scott wrote about this cynicism back in 2005:
[T]he prizes, transparently trivial, implicitly corrupt and utterly detached from any meaningful notion of literary value, will be greeted with cynicism, derision and, if we’re lucky, a burst of controversy. It will escape no one’s attention – not even the winners’ – that the very idea of handing out medals and cash for aesthetic and intellectual achievement is absurd, if not obscene. Furthermore, the selections will inevitably reflect the rottenness of the literary status quo, which is either hopelessly stodgy and out of touch, or else distracted by modish extraliterary considerations – hobbled, that is, either by conservative complacency or by political correctness.
But the National Book Awards, Miller argues, are not playing the role of trusted arbiter; rather, they have become, as she puts it, the spinach of literary awards: established fiction writers (the five judges, a different group annually) telling the reading (non-writing) public what they “should” be reading, regardless of what they might “like” to read. Her use of spinach as the metaphor implies, it would seem, finger-wagging paternalism. Read this; you will be an improved, healthy, stronger person. Enjoyment? Pleasure? Well, frivolous reader, no pain no gain.
2.
Despite my admiration for Miller’s relentless crusading on behalf of “a lot of people,” i.e., “nonprofessional readers,” my personal response to Miller’s argument is fraught; not because of my “professional” status as a reader, but rather due to my relationship with spinach. You see, I love spinach. It is possibly my very favorite food. If I have spinach as part of every meal—raw or cooked, chopped or whole, plain or smothered in something cheesy or creamy or eggy—I am a happy person, my meals are pleasurable. It is in fact challenging for me to conceive of why anyone would hate spinach or need to be forced to eat it or would associate it with the pain that leads to gain. (For the record, I also like liver and other innards, Brussels sprouts, beets, and anchovies; but (ironically?) no brains, please.) When I mentioned this to a friend, he asked me if it was my favorite food, or my favorite vegetable; and my answer is the latter. I love bacon burgers, and I love spinach, and I love them especially together; in my world, there is no carnivore-herbivore hierarchy.
If I think back to childhood, no one ever had to tell me to eat my spinach. Or, put another way, no one ever told me that spinach was something that people needed to be told to eat. Sometimes for lunch we had peanut butter sandwiches, or tuna fish, and Doritos (or school rectangular pizza with soggy tater tots); sometimes (when there was more money), we had roast beef on rye and a piece of fruit. My sisters and I ate it all, and liked it, and we’re all in pretty good health now. I never remember thinking, Ew, peanut butter, where’s my roast beef?
My partner was raised mostly by his father, who loved food but couldn’t afford luxuries, so he gathered the five children up to hunt for morels and shitakis, pick watercress, dig for razor clams and oysters at the shore (this was in the Pacific Northwest). They kept chickens, which the kids were responsible for feeding and eventually slaughtering, so there were fresh eggs and sometimes chicken meat (including the feet, necks, gizzards) and always broth in the freezer. They ate well but it was also hard work. And so a “gourmet meal” for him now can be anything from grilled American cheese on buttered white bread, to noodles and dumpling soup, to oysters and Sancerre.
What is my point here? What I am saying is that we do a disservice to “ordinary readers” and “professional” ones alike, by calling a book spinach and attaching good-for-you but not good-in-itself to that metaphor. We are telling readers, This is spinach, didn’t you know, and you won’t like it; over here, this is ice cream, you’ll definitely like this. Herein lies a more insidious kind of paternalism. In dividing the world among writer-readers, critic-readers, and reader-readers, we assume that—to completely mix my metaphors here—a reader-reader (Miller’s “people waiting for the bus”) would find the literary equivalent of raw oysters or shitaki mushrooms to be esoteric or elliptical or poetic in a way that puts them off. I have caught myself in this mistake myself: when I lived in the South Bronx, I would often be surprised when I saw someone on the 6-train, north of 96th Street, reading something literary. It was important to ask myself why I was surprised. And it happened often enough (and I’m not talking about Mott Haven artists or hipsters) that I knew something was wrong with a world view I’d absorbed thoughtlessly.
I want to live in a world—and I believe that if we look and listen closely, we’ll recognize that we’re closer than we think—where “the reading public,” regardless of the inside-baseball interferences of literary professionals, consumes, likes, and engages with many different kinds of literary nourishment; and where writers, teachers, and critics trust and even expect readers to do so…
3.
…And in this sort of world, it’s a good thing to have an award like the NBA whose winner is specifically selected by writer-peers; along with an award like, say, the National Book Critics Circle Award, that is selected by some 25 book critics, and by systematic vote as opposed to the NBA’s small-group consensus. Having different awards, with different selection processes and juries, seems to me to keep the process—the parties involved—optimally honest; it allows everyone to be themselves and not have this be a liability. One wonders what Miller would have NBA novelist-judges do, short of intentionally selecting books that don’t genuinely or particularly excite them, simply because those books have gotten a lot of media and Amazon-reader attention?
I don’t and can’t know what really happens in those closed-door discussions, what baggage or agenda each judge might bring, but it would seem much more highly suspect to me—in a cynical, power-brokering, old boys club sort of way—if all the writers and critics in America were indeed selecting the same five favorite books in a given year. On what planet of readers does that happen, honestly? Given how many books each judge must read (315 this year for the NBA), and how quickly, it would seem that the specter of group-think could loom. That such pressures instead seem to push favorites to the fore for each judge in a more idiosyncratic way—you’d be looking for what grabs you—is a tribute to the individualized intelligence and diverse aesthetic interests of these judges and board members. (See Victor LaValle’s riposte to Miller at Publisher’s Weekly for one NBA judge’s confirmation of this.)
A side-by-side comparison of the finalists and winners in fiction for the NBA and NBCC since 2004 reveals both complete divergence (2004, 2006, 2007, 2010) and also significant overlap (2005, 2008, 2009). (Note: one might assume we’d see more overlap if the NBCC was also limited to American writers). I’m not sure why this is a bad thing for anyone, whether you consider yourself a reader-reader, critic-reader, writer-reader, or all of the above; especially since we now all have access to so much literature-specific social media—Amazon and Goodreads, for example—for recommendations from mostly “nonprofessional” readers who share one’s tastes.
It also does not follow, by the way, that the more “professional” you become as a writer, the more “writerly” (by which Miller means a love of “beautiful sentences, formal experiments and infinitely delicate evocations of emotional states”) your reading tastes become. I think about the book that made me want to become a writer, long before I’d actually written anything: Annie Dillard’s A Pilgrim at Tinker Creek. I would guess that Laura Miller hates this book, if she’s read it, or at least hates it for an award (it won the Pulitzer) and would not recommend it to an “ordinary reader.” It is dense, and ponderous, and theological, and there is no “story” apart from the “esoteric” story of humanity and existence, a young woman examining her tiny corner of the natural world with a magnifying glass and meditating on meaning. Then I think of what I’m reading now: everything by William Gay, and Matt Bondurant’s The Wettest County in the World—bootleggers, murders, car chases, thwarted love for the boy from the wrong side of the tracks; storytelling at its best, artful and muscular language. And I can’t get enough.
4.
I was not being facetious or rhetorically sly in praising Miller for continuing to write passionately about the NBAs. I try to be on the side of idealism over cynicism generally speaking, which is more and more challenging as I get older. In this case my position is ultimately more idealistic than Miller’s: I have faith in the pleasures of spinach, in the folks waiting for the bus and riding the 6-train, and even in the possibility of a world where we all read more than four novels a year.
Image credit: anathea/Flickr |
// Equal reports whether domain1 and domain2 are equivalent as defined by
// IDNA2008 (RFC 5890).
//
// TL;DR Use this instead of strings.EqualFold to compare domains.
//
// Equivalence for malformed A-label domains is defined using regular
// byte-string comparison with case-folding applied.
func Equal(domain1, domain2 string) bool {
if domain1 == domain2 {
return true
}
uDomain1, _ := ForLookup(domain1)
uDomain2, _ := ForLookup(domain2)
return uDomain1 == uDomain2
} |
/**
* This AlarmReceiver is responsible for handling the incomming alarm,
* loading the actual wagenstand
* and sending a notification if the meta created date is differently
*/
public class WagenstandAlarmReceiver extends BroadcastReceiver implements RestListener {
private static String TAG = WagenstandAlarmReceiver.class.getSimpleName();
private Context mContext;
private WagenstandAlarm wagenstandAlarm;
/**
* @param context The Context in which the receiver is running.
* @param intent The Intent being received.
*/
@Override
public void onReceive(Context context, Intent intent) {
mContext = context;
Bundle bundle = intent.getBundleExtra(DEFAULT_BUNDLE_NAME);
if (bundle == null) {
Log.d(TAG, "Bundle is null");
return;
}
wagenstandAlarm = WagenstandAlarm.from(intent);
final String mTrainNumber = wagenstandAlarm.trainNumber;
final String mTime = wagenstandAlarm.time;
String key = String.format("%s_%s",mTrainNumber,mTime);
if (!PrefUtil.hasAlarmSet(key, mContext)) {
// Seems like the Alarm has been cancelled
// before so prevent the creation of a Notification
return;
}
PrefUtil.cleanAlarmKey(key, mContext);
if(mTrainNumber == null || mTime == null) {
// invalid Notification
return;
}
if (!isAppForeground(mContext)) {
createNewNotification();
} else {
displayAlert();
}
}
@Override
public void onSuccess(Object payload) {
Log.d(TAG,"Received onSuccess");
WagenstandIstResponseData wagenstandData = (WagenstandIstResponseData) payload;
SimpleDateFormat createFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssss", Locale.GERMANY);
try {
Date dAlarm = createFormat.parse(wagenstandAlarm.updateTimeStamp);
Date dCreatedMeta = createFormat.parse(wagenstandData.meta.created);
Log.d(TAG,"Received Alarm date " + createFormat.format(dAlarm) +", "+ createFormat.format(dCreatedMeta));
if(!dAlarm.equals(dCreatedMeta)) {
if (!isAppForeground(mContext)) {
createNewNotification();
} else {
displayAlert();
}
}
} catch (ParseException e) {
e.printStackTrace();
}
}
private boolean isAppForeground(Context context){
BaseApplication application = ((BaseApplication) context.getApplicationContext());
if (application != null) {
Log.d(getClass().getSimpleName(), "IS APP ACTIVE: " + application.isActive());
return application.isActive();
}
Log.d(getClass().getSimpleName(), "application is null");
return false;
}
@Override
public void onFail(Object reason) {
Log.d(TAG, reason.toString() );
}
/**
* Presents an Alert
*/
protected void displayAlert() {
if (mContext != null) {
Intent resultIntent = new Intent("NOTIFICATION_WAGENSTAND_UPDATE");
resultIntent.putExtra("message",
String.format(
"Ihr Zug %s fährt in Kürze ein. Jetzt Wagenreihung prüfen.",
wagenstandAlarm.trainLabel
)
);
resultIntent.putExtra("type", "NOTIFICATION_WAGENSTAND_UPDATE");
resultIntent.putExtra(DEFAULT_BUNDLE_NAME, wagenstandAlarm.toBundle());
Log.d(getClass().getSimpleName(), "Send Broadcast");
LocalBroadcastManager.getInstance(mContext).sendBroadcast(resultIntent);
}
}
/**
* Creates a new Notification
* Checks if createNewNotification()
*/
protected void createNewNotification() {
Log.d(TAG, "Create a new notification");
final String mTrainLabel = wagenstandAlarm.trainLabel;
NotificationCompat.Builder builder = new NotificationCompat.Builder(mContext)
.setSmallIcon(R.drawable.pushicon)
.setContentTitle(mContext.getResources().getString(R.string.app_name))
.setContentText("Wagenreihungsplan " + mTrainLabel)
.setAutoCancel(true)
.setVisibility(NotificationCompat.VISIBILITY_PUBLIC)
.setLargeIcon(BitmapFactory.decodeResource(mContext.getResources(), R.drawable.app_icon));
builder.setStyle(new NotificationCompat.BigTextStyle()
.bigText(String.format(
"Ihr Zug %s fährt in Kürze ein. Jetzt Wagenreihung prüfen.", mTrainLabel
)
)
);
Intent resultIntent = new Intent(mContext, HubActivity.class);
resultIntent.putExtra("type","NOTIFICATION_WAGENSTAND_UPDATE");
resultIntent.putExtra(DEFAULT_BUNDLE_NAME, wagenstandAlarm.toBundle());
resultIntent.setFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP);
// MeinBahnhofActivity.getInstance().getWagenstandAlarmManager().cancelWagenstandAlarm(mTrainNumber, mTime);
TaskStackBuilder stackBuilder = TaskStackBuilder.create(mContext);
stackBuilder.addNextIntent(resultIntent);
PendingIntent resultPendingIntent = stackBuilder.getPendingIntent(
0, PendingIntent.FLAG_UPDATE_CURRENT
);
builder.setContentIntent(resultPendingIntent);
NotificationManager nManager = (NotificationManager) mContext.getSystemService(Context.NOTIFICATION_SERVICE);
nManager.notify(wagenstandAlarm.trainNumber.hashCode(), builder.build());
}
} |
////////////////////////////////////////////////////////////////////////////////
#ifndef __DELAY_H
#define __DELAY_H
////////////////////////////////////////////////////////////////////////////////
#include "sys.h"
////////////////////////////////////////////////////////////////////////////////
#ifdef _DELAY_C_
#define GLOBAL
#else
#define GLOBAL extern
#endif
#undef GLOBAL
////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////
//开发板
//使用SysTick的普通计数模式对延迟进行管理
//包括delay_us,delay_ms
//////////////////////////////////////////////////////////////////////////////////
void delay_init(void);
void delay_ms(u16 nms);
void delay_us(u32 nus);
#endif
|
#include "states.h"
#include "utils.h"
extern "C" {
#include "md4.h"
};
/*******************************************************************************
*
* atom list
*
******************************************************************************/
atomList_t::atomList_t( const atomList_t &alist )
{
notify( this, "atomList_t::atomList_t(atomList_t&)" );
if( alist.size() > 0 )
{
size_ = alist.size();
data_ = (ushort_t*)malloc( size_ * sizeof(ushort_t) );
for( size_t i = 0; i < size_; ++i )
data_[i] = alist.atom( i );
data_ptr_ = &data_[size_];
}
}
atomList_t::atomList_t( const ushort_t *array, size_t sz )
{
notify( this, "atomList_t::atomList_t(ushort_t&,size_t)" );
size_ = sz;
data_ = (ushort_t*)malloc( size_ * sizeof(ushort_t) );
for( size_t i = 0; i < size_; ++i )
data_[i] = array[i];
data_ptr_ = &data_[size_];
}
unsigned
atomList_t::hash_value( void ) const
{
unsigned *ptr, result;
unsigned char digest[16];
MD4_CTX context;
// compute MD4 digests
MD4Init( &context );
MD4Update( &context, (unsigned char*)data_, size() );
MD4Final( digest, &context );
// compact digest into unsigned (assumes sizeof(unsigned) = 4)
ptr = (unsigned*)digest;
result = (ptr[0] ^ ptr[1] ^ ptr[2] ^ ptr[3]);
return( result );
}
bool
atomList_t::equal( const ushort_t *array, size_t sz ) const
{
if( size() != sz ) return( false );
for( size_t i = 0; i < sz; ++i )
if( array[i] != atom( i ) )
return( false );
return( true );
}
void
atomList_t::intersect( const atomList_t &alist )
{
for( int i = 0; i < (int)size(); ++i )
if( !alist.find( atom( i ) ) )
{
remove( atom( i ) );
--i;
}
}
bool
atomList_t::empty_intersection( const atomList_t &alist ) const
{
for( size_t i = 0; i < alist.size(); ++i )
if( find( alist.atom( i ) ) ) return( false );
return( true );
}
size_t
atomList_t::intersection_size( const atomList_t &alist ) const
{
size_t isize = 0;
for( size_t i = 0; i < alist.size(); ++i )
if( find( alist.atom( i ) ) ) ++isize;
return( isize );
}
void
atomList_t::insert( const atomList_t &alist )
{
for( size_t i = 0; i < alist.size(); ++i )
insert( alist.atom( i ) );
}
void
atomList_t::remove( const atomList_t &alist )
{
for( size_t i = 0; i < alist.size(); ++i )
remove( alist.atom( i ) );
}
bool
atomList_t::holds( const state_t &state, bool nprec ) const
{
for( size_t i = 0; i < size(); ++i )
{
if( nprec )
{
if( !state.holds( atom( i ) ) )
return( false );
}
else
{
ushort_t atm = atom( i );
if( ((atm%2) && state.holds( atm-1 )) ||
(!(atm%2) && !state.holds( atm )) )
return( false );
}
}
return( true );
}
bool
atomList_t::holds( const atomList_t &alist, bool nprec ) const
{
for( size_t i = 0; i < size(); ++i )
{
if( nprec )
{
if( !alist.find( atom( i ) ) )
return( false );
}
else
{
ushort_t atm = atom( i );
if( ((atm%2) && alist.find( atm-1 )) ||
(!(atm%2) && !alist.find( atm )) )
return( false );
}
}
return( true );
}
void
atomList_t::print( std::ostream &os ) const
{
if( size() == 0 )
os << "[ <empty> ]";
else
{
os << "[";
for( size_t i = 0; i < size(); ++i )
{
os << atom( i );
if( i + 1 < size() ) os << " ";
}
os << "]";
}
}
bool
atomList_t::operator==( const atomList_t &alist ) const
{
for( size_t i = 0; i < size(); ++i )
if( !alist.find( atom(i) ) )
return( false );
for( size_t i = 0; i < alist.size(); ++i )
if( !find( alist.atom(i) ) )
return( false );
return( true );
}
atomList_t&
atomList_t::operator=( const atomList_t &alist )
{
clear();
for( size_t i = 0; i < alist.size(); ++i )
insert( alist.atom( i ) );
return( *this );
}
/*******************************************************************************
*
* atom list list
*
******************************************************************************/
bool
atomListList_t::find( const atomList_t &alist ) const
{
for( size_t i = 0; i < size(); ++i )
if( atom_list( i ) == alist ) return( true );
return( false );
}
void
atomListList_t::insert( atomList_t *alist )
{
size_t i;
for( i = 0; i < size(); ++i )
if( atom_list( i ) == *alist ) break;
if( i == size() )
{
if( !data_ || (data_ptr_ == &data_[size_]) )
{
size_ = (!data_ ? 1 : size_ << 1);
atomList_t **ndata_ =
(atomList_t**) realloc( data_, size_ * sizeof(atomList_t*) );
data_ptr_ = (!data_ ? ndata_ : &ndata_[data_ptr_ - data_]);
data_ = ndata_;
}
*data_ptr_++ = alist;
}
}
bool
atomListList_t::holds( const state_t &state, bool nprec ) const
{
for( size_t i = 0; i < size(); ++i )
if( atom_list( i ).holds( state, nprec ) )
return( true );
return( false );
}
void
atomListList_t::print( std::ostream &os ) const
{
os << "[";
for( size_t i = 0; i < size(); ++i )
{
atom_list( i ).print( os );
if( i + 1 < size() ) os << ",";
}
os << "]";
}
bool
atomListList_t::operator==( const atomListList_t &alist ) const
{
for( size_t i = 0; i < size(); ++i )
if( !alist.find( atom_list( i ) ) )
return( false );
for( size_t i = 0; i < alist.size(); ++i )
if( !find( alist.atom_list( i ) ) )
return( false );
return( true );
}
atomListList_t&
atomListList_t::operator=( const atomListList_t &alist )
{
clear();
for( size_t i = 0; i < alist.size(); ++i )
{
atomList_t *al = new atomList_t;
*al = alist.atom_list( i );
insert( al );
}
return( *this );
}
|
Low Cost and Small Component Count Hybrid Converter with Energy Management Control for Unmanned Aerial Vehicle Applications
This paper proposes a novel non-isolated three-port converter having dual-input and single-output (DISO) for low cost and small component counts. A combination of power sources, including a fuel cell and battery, can be widely used in various applications to achieve high energy density and extend a battery lifetime. However, its power systems can be complex because two independent power converters are required. Therefore, the simple non-isolated DC/DC power converter is proposed in this paper It can have three essential functions: 1) charging and discharging operations for battery, 2) a regulation of the output voltage, and 3) energy management control according to three operational states. The proposed converter can achieve high power density and low cost from a small number of components. The proposed converter operates under an energy management mechanism proposed to utilize multiple power sources efficiently. The feasibility of the proposed method was verified with a 400 W prototype converter (16.8 V/24 A), and the experimental results validated the theoretical analysis and showed the effectiveness of the proposed converter. |
import logging
from fastapi_events.registry.base import BaseEventPayloadSchemaRegistry
logger = logging.getLogger(__name__)
class EventPayloadSchemaRegistry(BaseEventPayloadSchemaRegistry):
pass
registry = EventPayloadSchemaRegistry()
|
<filename>max_slice_sum.py
def solution(A):
import sys
if len(A) < 1:
return 0
elif len(A) == 1:
return A[0]
if len(A) == 2:
a1, a2 = A
a3 = sum(A)
return max(a1,a2,a3)
if len(A) == 3:
a1, a2, a3 = A
a4 = a1 + a2
a5 = a2 + a3
a6 = sum(A)
return max(a1,a2,a3,a4,a5,a6)
s = 0
m = -sys.maxint
for x in A:
s += x
if s < 0:
s = x
if s > m:
m = s
if x > m:
m = x
return m
|
package client.startup;
import client.view.NonBlockingInterpreter;
import common.FileCatalogServer;
import java.net.MalformedURLException;
import java.rmi.Naming;
import java.rmi.NotBoundException;
import java.rmi.RemoteException;
/**
* Starts the Client Interface
*/
public class Main {
/**
* The application takes the arguments, if you have.
* @param args
*/
public static void main(String[] args){
try{
FileCatalogServer server = (FileCatalogServer) Naming.lookup(FileCatalogServer.SERVER_NAME_IN_REGISTRY);
new NonBlockingInterpreter().start(server);
System.out.println("Start Client");
}
catch (RemoteException | MalformedURLException | NotBoundException e) {
e.printStackTrace();
}
}
}
|
def sample_stats_table(self):
headers = OrderedDict()
for sample in self.ed_data_samples.keys():
for metric in self.ed_data_samples[sample]:
if metric not in headers.keys():
name = metric.replace('.','').replace('_',' ').capitalize()
headers[metric] = {
'title': name,
'description': metric
}
try:
headers[metric].update(self.ed_metric_configs[metric])
except KeyError:
pass
else:
continue
table_config = {
"namespace": "exomedepth",
"id": "exomedepth-sample-stats-table",
"table_title": "Exomedepth Sample Statistics",
"no_beeswarm": False,
}
return table.plot(self.ed_data_samples, headers, table_config) |
def WriteClient(self, client_id, client): |
package stream
import (
"math"
"runtime"
)
/**
* Reference:
* http://www.cnblogs.com/gw811/archive/2012/10/04/2711746.html
*/
// Comparator 比较器
type Comparator struct {
CompareTo func(interface{}, interface{}) int
}
// Sorted 对数据流操作,升序排序
func (s *Stream) Sorted(comparator *Comparator) *Stream {
return s.MSorted(comparator)
}
// QSorted 对数据流操作,快排,升序排序
func (s *Stream) QSorted(comparator *Comparator) *Stream {
qsort(s.list, 0, len(s.list), comparator.CompareTo)
s.isParallel = false
return s
}
// MSorted 对数据流操作,归并排序,升序排序
func (s *Stream) MSorted(comparator *Comparator) *Stream {
aux := make([]interface{}, len(s.list)) //辅助切片
var ch chan int
cores := runtime.NumCPU()
maxDepth := int(math.Log2(float64(cores)) + 1)
if s.isParallel {
ch = make(chan int)
for j := 0; j < cores; j++ {
go func(idx int) {
for i := idx; i < len(s.list); i += cores {
aux[i] = s.list[i]
}
ch <- 0
}(j)
}
waitCh(ch, cores)
go mergeSort(aux, s.list, 0, len(s.list), 0, comparator.CompareTo, 0, maxDepth, ch)
waitCh(ch, 1)
} else {
for i, v := range s.list {
aux[i] = v
}
mergeSort(aux, s.list, 0, len(s.list), 0, comparator.CompareTo, 0, maxDepth, ch)
}
s.isParallel = false
return s
}
func waitCh(ch chan int, num int) {
if ch != nil {
for i := 0; i < num; i++ {
<-ch
}
}
}
/**
* 对指定 int 型数组的指定范围按数字升序进行排序。
*/
func qsort(list []interface{}, fromIndex int, toIndex int, comp func(interface{}, interface{}) int) {
qsort1(list, fromIndex, toIndex-fromIndex, comp)
}
func qsort1(list []interface{}, off int, len int, comp func(interface{}, interface{}) int) {
/*
* 当待排序的数组中的元素个数小于 7 时,采用插入排序 。
*
* 尽管插入排序的时间复杂度为O(n^2),但是当数组元素较少时, 插入排序优于快速排序,因为这时快速排序的递归操作影响性能。
*/
if len < 7 {
for i := off; i < len+off; i++ {
for j := i; j > off && comp(list[j-1], list[j]) > 0; j-- {
swap(list, j, j-1)
}
}
return
}
/*
* 当待排序的数组中的元素个数大于 或等于7 时,采用快速排序 。
*
* Choose a partition element, v
* 选取一个划分元,V
*
* 较好的选择了划分元(基准元素)。能够将数组分成大致两个相等的部分,避免出现最坏的情况。例如当数组有序的的情况下,
* 选择第一个元素作为划分元,将使得算法的时间复杂度达到O(n^2).
*/
// 当数组大小为size=7时 ,取数组中间元素作为划分元。
m := off + (len >> 1)
// 当数组大小 7<size<=40时,取首、中、末 三个元素中间大小的元素作为划分元。
if len > 7 {
l := off
n := off + len - 1
/*
* 当数组大小 size>40 时 ,从待排数组中较均匀的选择9个元素,
* 选出一个伪中数做为划分元。
*/
if len > 40 {
s := len / 8
l = med3(list, l, l+s, l+2*s, comp)
m = med3(list, m-s, m, m+s, comp)
n = med3(list, n-2*s, n-s, n, comp)
}
// 取出中间大小的元素的位置。
m = med3(list, l, m, n, comp) // Mid-size, med of 3
}
//得到划分元V
v := list[m]
// Establish Invariant: v* (<v)* (>v)* v*
a := off
b := a
c := off + len - 1
d := c
for true {
for b <= c && comp(list[b], v) <= 0 {
if list[b] == v {
swap(list, a, b)
a++
}
b++
}
for c >= b && comp(list[c], v) >= 0 {
if comp(list[c], v) == 0 {
swap(list, c, d)
d--
}
c--
}
if b > c {
break
}
swap(list, b, c)
b++
c--
}
// Swap partition elements back to middle
s := off + len
n := s
s = min(a-off, b-a)
vecswap(list, off, b-s, s)
s = min(d-c, n-d-1)
vecswap(list, b, n-s, s)
// Recursively sort non-partition-elements
s = b - a
if s > 1 {
qsort1(list, off, s, comp)
}
s = d - c
if s > 1 {
qsort1(list, n-s, s, comp)
}
}
func min(a int, b int) int {
if a >= b {
return b
}
return a
}
func max(a int, b int) int {
if a < b {
return b
}
return a
}
/**
* Swaps x[a] with x[b].
*/
func swap(list []interface{}, a int, b int) {
list[a], list[b] = list[b], list[a]
}
/**
* Swaps x[a .. (a+n-1)] with x[b .. (b+n-1)].
*/
func vecswap(list []interface{}, a int, b int, n int) {
for i := 0; i < n; i++ {
swap(list, a, b)
a++
b++
}
}
/**
* Returns the index of the median of the three indexed integers.
*/
func med3(list []interface{}, a int, b int, c int, comp func(interface{}, interface{}) int) int {
if comp(list[a], list[b]) < 0 {
if comp(list[b], list[c]) < 0 {
return b
}
if comp(list[a], list[c]) < 0 {
return c
}
return a
}
if comp(list[b], list[c]) > 0 {
return b
}
if comp(list[a], list[c]) > 0 {
return c
}
return a
}
func rangeCopy(src []interface{}, low int, dest []interface{}, destLow int, length int) {
for i := 0; i < length; i++ {
dest[destLow+i] = src[low+i]
}
}
/**
* Src is the source array that starts at index 0
* Dest is the (possibly larger) array destination with a possible offset
* low is the index in dest to start sorting
* high is the end index in dest to end sorting
* off is the offset to generate corresponding low, high in src
*/
func mergeSort(src []interface{}, dest []interface{}, low int, high int, off int, comp func(interface{}, interface{}) int, depth int, maxDepth int, ch chan int) {
defer func() {
if ch != nil {
ch <- 0
}
}()
length := high - low
// Insertion sort on smallest arrays
if length < 7 {
for i := low; i < high; i++ {
for j := i; j > low && comp(dest[j-1], dest[j]) > 0; j-- {
swap(dest, j, j-1)
}
}
return
}
// Recursively sort halves of dest into src
destLow := low
destHigh := high
low += off
high += off
/*
* >>>:无符号右移运算符
* expression1 >>> expresion2:expression1的各个位向右移expression2
* 指定的位数。右移后左边空出的位数用0来填充。移出右边的位被丢弃。
* 例如:-14>>>2; 结果为:1073741820
*/
mid := (low + high) >> 1
if ch != nil {
if depth < maxDepth {
ch2 := make(chan int)
go mergeSort(dest, src, low, mid, -off, comp, depth+1, maxDepth, ch2)
go mergeSort(dest, src, mid, high, -off, comp, depth+1, maxDepth, ch2)
waitCh(ch2, 2)
} else {
mergeSort(dest, src, low, mid, -off, comp, depth+1, maxDepth, nil)
mergeSort(dest, src, mid, high, -off, comp, depth+1, maxDepth, nil)
}
} else {
mergeSort(dest, src, low, mid, -off, comp, depth+1, maxDepth, ch)
mergeSort(dest, src, mid, high, -off, comp, depth+1, maxDepth, ch)
}
// If list is already sorted, just copy from src to dest. This is an
// optimization that results in faster sorts for nearly ordered lists.
if comp(src[mid-1], src[mid]) <= 0 {
rangeCopy(src, low, dest, destLow, length)
return
}
// Merge sorted halves (now in src) into dest
for i, p, q := destLow, low, mid; i < destHigh; i++ {
if q >= high || p < mid && comp(src[p], src[q]) <= 0 {
dest[i] = src[p]
p++
} else {
dest[i] = src[q]
q++
}
}
}
|
<filename>Win10RestartBlockerUI/resource.h
//{{NO_DEPENDENCIES}}
// Microsoft Visual C++ generated include file.
// Used by Win10RestartBlockerUI.rc
//
#define IDI_MAIN_ICON 1
#define IDC_MYICON 2
#define IDD_WIN10RESTARTBLOCKERUI_DIALOG 102
#define IDD_ABOUTBOX 103
#define IDM_ABOUT 104
#define IDR_MAINFRAME 128
#define IDD_MAIN_WND 129
#define IDR_MENU_MAIN 130
#define IDR_ACCELERATOR_MAIN 132
#define IDC_STATIC_ICN 1000
#define IDC_CHECK_BLOCK_ENABLED 1001
#define IDC_STATIC_MSG1 1002
#define IDC_COMBO_POPUP_TIMEOUT 1003
#define IDC_CHECK_PLAY_WARN_SOUND 1004
#define IDC_CHECK_IDLE_SLEEP 1005
#define IDC_COMBO_SHOW_TYPE 1006
#define IDC_COMBO_SHOW_VAL1 1007
#define IDAPPLY 1008
#define IDC_STATIC_NAME_VAL1 1009
#define IDC_STATIC_MEASURE_VAL1 1010
#define IDC_STATIC_APP_NAME 1011
#define IDC_STATIC_COPYRIGHT 1012
#define IDC_SYSLINK_DB 1013
#define IDC_STATIC_APP_VER 1014
#define ID_FILE_EXIT 32771
#define ID_HELP_ABOUT 32772
#define ID_EDIT_SETDEFAULTS 32773
#define ID_HELP_CHECKFORUPDATES 32774
#define ID_HELP_LEARNITWASMADE 32775
#define ID_HELP_LEARN_HOW_IT_WAS_MADE 32776
#define ID_HELP_CHECK_FOR_UPDATES 32777
#define ID_HELP_LEARN_HOW_IT_WORKS 32778
#define ID_HELP_ONLINEHELP 32779
#define ID_HELP_BUGREPORT 32780
#define ID_BUGREPORT_OPENEVENTLOG 32781
#define ID_BUGREPORT_REPORTBUG 32782
#define ID_BUGREPORT_OPEN_EVENT_LOG 32783
#define ID_BUGREPORT_REPORT_BUG 32784
#define ID_FILE_SAVEINTOCONFIGFILE 32785
#define ID_FILE_SAVE_INTO_CONFIG_FILE 32786
#define ID_FILE_OPENCONFIGFILE 32789
#define ID_FILE_OPEN_CONFIG_FILE 32790
#define ID_OPTIONS_REBOOTWITHO 32793
#define ID_OPTIONS_REBOOT_WITHOUT_UPDATES 32794
#define ID_OPTIONS_SHUTDOWNWITHOUTUPDATES 32795
#define ID_OPTIONS_SHUTDOWN_WITHOUT_UPDATES 32796
#define ID_OPTIONS_FORCE 32797
#define ID_OPTIONS_FORCE_BSOD 32798
#define ID_OPTIONS_REBOOTANDINSTALLUPDATES 32799
#define ID_OPTIONS_SHUTDOWNANDINSTALLUPDATES 32800
#define ID_OPTIONS_REBOOT_AND_INSTALL_UPDATES 32801
#define ID_OPTIONS_SHUTDOWN_AND_INSTALL_UPDATES 32802
#define IDC_STATIC -1
// Next default values for new objects
//
#ifdef APSTUDIO_INVOKED
#ifndef APSTUDIO_READONLY_SYMBOLS
#define _APS_NO_MFC 1
#define _APS_NEXT_RESOURCE_VALUE 133
#define _APS_NEXT_COMMAND_VALUE 32803
#define _APS_NEXT_CONTROL_VALUE 1015
#define _APS_NEXT_SYMED_VALUE 110
#endif
#endif
|
What we do comes out of who we believe we are.
A good friend of mine, who graduated with honors from Cal Berkley several years ago, is now the co-founder and CEO of a successful start-up in Silicon Valley. Throughout grade school he struggled with reading and writing disabilities. He spent kindergarten all the way through 12th grade in English ESE classes. During a parent-teacher conference when he was a freshman in high school, two ESE teachers collectively informed his mom that it was highly unlikely he would ever graduate.
So how did he do it? How did he push through and overcome the odds? “Affirmations,” he told me with a serious smile when I interviewed him yesterday for a side-project I’m working on. “I literally told myself that they were wrong about me. I told myself exactly what I needed to hear, every single day, to move my life forward. It may sound like a cliché to some people, but it’s not; it’s powerful stuff!”
I love his sentiment, and I agree with him 110%. In fact, there’s a good reason why training our minds with positive affirmations works wonders like this.
Just like every muscle in the body, the mind needs to be exercised to gain strength. It needs to be worked consistently to grow and develop over time. If you haven’t pushed your mind in thousands of little ways over time, of course it’ll crumble on the one day that things get really challenging.
A mind well trained with positive affirmations has the right thoughts queued up and ready for retrieval at a moment’s notice.
If you’d like to begin (or enhance) this practice in your own life, here’s a selection of affirmations I often suggest to our course students, as starting point:
“I cannot control everything that happens to me; I can only control the way I respond to what happens. In my response is my power.” “I will not get caught up in what could’ve been or should’ve been. I will look instead at the power and possibility of what is, right now.” “I have to accept whatever comes my way, and the only important thing is that I meet it with the best I have to give.” “Making mistakes is always better than faking perfections.” (Read The Gifts of Imperfection.) “I will never be as good as everyone tells me when I win, and I will never be as bad as I think when I lose.” “I will think less about managing my problems and more about managing my mindset. I will keep it positive.” “A challenge only becomes an obstacle if I bow to it.” “I will get back up. Again, and again. The faster I recover from setbacks, the faster I’ll get where I’m going in life.” “I will not try to hide from my fears, because I know they are not there to scare me. They are there to let me know that something is worth it.” “There is a big difference between empty fatigue and gratifying exhaustion. Life is too short. I will invest in the activities that deeply move me.” “If I don’t have time for what matters, I will stop doing things that don’t.” “I cannot build a reputation and legacy for myself based on what I am going to (maybe) do someday.” “The future can be different than the present, and I have the power to make it so, right now.” “Happiness will come to me when it comes from me.” “Getting ahead is essential, and I will never get ahead of anyone (including my past self) as long as I try to get even with them.” “I will focus on making myself better, not on thinking that I am better.” “I will be too busy working on my own grass to notice if yours is greener.” “I will eat like I love myself. Move like I love myself. Speak like I love myself. Live like I love myself. Today. (Angel and I discuss this in more detail in the “Self-Love” chapter of 1,000 Little Things Happy, Successful People Do Differently.) “My next step in the right direction doesn’t have to be a big one.” “All the small victories are worth celebrating, every step of the way. It’s the small things done well that make a big, exciting life in the end.”
And remember, one of life’s greatest gifts is the fact that it is often difficult. Because in dealing with life’s difficulties, we build invaluable strength. This strength enables us to successfully fulfill our deepest, most meaningful purposes. It is precisely because life is difficult that we are able to make it great. It is because life is difficult that we are able to rise above the difficulties. We are able to make a difference and we are able to leave our mark on the world.
Your turn…
What’s one of your go-to affirmations? What do you tell yourself when you need to hear it? Please share your thoughts with us by leaving a comment below.
Photo by: Peeratam Tangtua |
<gh_stars>1-10
import autocurry from "Function/autocurry/autocurry"
/**
* Returns a boolean indicating whether a given value is within a lower and upper bounds.
*
* @param [ lower, upper ]
* @param {number} value
* @returns {number}
*/
const inRange = ([ lower, upper ], value) =>
value >= lower && value < upper
export default autocurry(inRange) as {
([ lower, upper ]: [ number, number ], value: number): boolean
([ lower, upper ]: [ number, number ]): (value: number) => boolean
}
|
export enum DeckdeckgoDrawAction {
PENCIL,
CIRCLE,
ARROW
}
export enum DeckdeckgoSlideAction {
PLAY = 'play',
PAUSE = 'pause'
}
export interface DeckdeckgoAttributeDefinition {
name: string;
value?: any;
}
export interface DeckdeckgoSlideDefinition {
template: string | undefined;
content?: string;
attributes?: DeckdeckgoAttributeDefinition[] | null;
}
export interface DeckdeckgoDeckDefinition {
slides: DeckdeckgoSlideDefinition[];
attributes?: DeckdeckgoAttributeDefinition[];
background?: string;
reveal: boolean;
revealOnMobile: boolean;
}
export enum DeckdeckgoEventType {
SLIDES_REQUEST = 'slides_request',
SLIDES_ANSWER = 'slides_answer',
DECK_REQUEST = 'deck_request',
DECK_UPDATE = 'deck_update',
SLIDE_UPDATE = 'slide_update',
NEXT_SLIDE = 'next_slide',
PREV_SLIDE = 'prev_slide',
SLIDE_TO = 'slide_to',
CLEAR_SLIDE = 'clear_slide',
START_DRAWING = 'start_drawing',
DRAW = 'draw',
END_DRAWING = 'end_drawing',
SLIDE_ACTION = 'slide_action',
DELETE_SLIDE = 'delete_slide',
DECK_REVEAL_UPDATE = 'deck_reveal_update'
}
export enum DeckdeckgoEventEmitter {
DECK = 'deck',
APP = 'app'
}
export interface DeckdeckgoEvent {
type: DeckdeckgoEventType;
emitter: DeckdeckgoEventEmitter;
}
export interface DeckdeckgoEventDraw extends DeckdeckgoEvent {
action: DeckdeckgoDrawAction;
clientX: number;
clientY: number;
windowWidth: number;
windowHeight: number;
color?: string;
}
export interface DeckdeckgoEventNextPrevSlide extends DeckdeckgoEvent {
slideAnimation: boolean;
}
export interface DeckdeckgoEventDeckReveal extends DeckdeckgoEvent {
reveal: boolean;
}
export interface DeckdeckgoEventSlideTo extends DeckdeckgoEvent {
index: number;
speed?: number;
}
export interface DeckdeckgoEventDeck extends DeckdeckgoEvent {
length: number;
mobile: boolean;
deck: DeckdeckgoDeckDefinition;
}
export interface DeckdeckgoEventSlide extends DeckdeckgoEvent {
index: number;
slide: DeckdeckgoSlideDefinition;
}
export interface DeckdeckgoEventSlideAction extends DeckdeckgoEvent {
action: DeckdeckgoSlideAction;
}
export interface DeckdeckgoEventDeckRequest extends DeckdeckgoEvent {
message: string;
fromSocketId: string;
}
|
// Replace a substring by another substring.
void replace(std::string& str,
const std::string& out,
const std::string& in)
{
std::string::size_type index = str.find(out);
if (index!=std::string::npos)
str.replace(index, out.size(), in);
} |
// implements the functionality for browsing
public class scalaLabPathsListener implements TreeSelectionListener {
JTree pathsTree;
// the selectedValue keeps the full pathname of the selected object for further processing
public static String selectedValue;
public static String selectedPath;
public static DefaultMutableTreeNode parentOfSelectedNode; // the parent node of the selected node of the JTree
public static DefaultMutableTreeNode selectedNode; // the currently selected node of the JTree
// initialize the listener object for the corresponding JTree
public scalaLabPathsListener(JTree classPathsTree) {
pathsTree = classPathsTree;
}
public void explicitUpdate() {
TreePath path = pathsTree.getSelectionPath(); // get the full path to the selected node
if (path==null) return; // not any tree's node selected
selectedNode = (DefaultMutableTreeNode) path.getLastPathComponent();
parentOfSelectedNode = (DefaultMutableTreeNode) selectedNode.getParent();
Object [] objPath = selectedNode.getUserObjectPath();
int len = objPath.length;
// for nested objects, their path is repeated as the parent node, so concatenate the parent and the filename to build the complete path
scalaLabPathsListener.selectedValue = objPath[len-1].toString();
scalaLabPathsListener.selectedPath = selectedValue.substring(0, selectedValue.lastIndexOf(File.separator));
}
@Override
public void valueChanged(TreeSelectionEvent event) {
new Thread(new Runnable() { // Runnable-out
public void run() { // run-out
SwingUtilities.invokeLater(new Runnable() { // Runnable-in
public void run() { // run in */
TreePath path = pathsTree.getSelectionPath();
if (path==null) return; // not any tree's node selected
selectedNode = (DefaultMutableTreeNode) path.getLastPathComponent();
parentOfSelectedNode = (DefaultMutableTreeNode) selectedNode.getParent();
Object [] objPath = selectedNode.getUserObjectPath();
int len = objPath.length;
// for nested objects, their path is repeated as the parent node, so concatenate the parent and the filename to build the complete path
scalaLabPathsListener.selectedValue = objPath[len-1].toString();
scalaLabPathsListener.selectedPath = selectedValue.substring(0, selectedValue.lastIndexOf(File.separator));
}
}); // Runnable-in
} // run-out
}).start(); // Runnable-out
} // valueChanged
} |
Development
More than 2,500 business leaders, economists and politicians from around the world will attend the meeting in Davos.
Telangana's Commerce Minister KT Rama Rao has received an invitation to the World Economic Forum (WEF) annual meeting in Switzerland next month -- an invite generally restricted to Union Ministers or Chief Ministers.
The announcement was made by an official statement that was issued on Wednesday.
The Information Technology, Industries and Commerce Minister has been invited to the prestigious meet in recognition of his active role in making Telangana the number one state in the ease of doing business ranking, said the statement from the Minister's office.
More than 2,500 business leaders, economists and politicians from around the world will attend the meeting scheduled to be held in Davos.
The forum, while extending the special invite, took into consideration the key role played by the minister in organising Global Entrepreneurship Summit in Hyderabad last month and his efforts over last three years in attracting investment to the state.
Rama Rao, the son of Chief Minister K. Chandrasekhar Rao, will meet business leaders and CEOs of leading companies during the four-day meet starting from January 23.
KTR, as the minister is popularly known, said he would highlight the policies of the state government, especially the industrial policy and the enormous investment opportunities in the state.
Andhra Pradesh Chief Minister N Chandrababu Naidu is among those who have attended the summit.
Earlier this month, in a move of bipartisanship, KTR praised Naidu and said that the TDP chief had played a key role in attracting investments to Hyderabad.
"I can't take undue credit for Microsoft being here (in Hyderabad). In fact, all credit goes to Chandrababu Naidu garu. He definitely has done his best. In fact, when Hyderabad was not as well-known as it is today in Information Technology space, he went out all the way to Bill Gates, convinced him and eventually the development centre (of Microsoft) happened about 17 years ago. So, all credit to him (Naidu) for that," KTR was quoted as saying.
The Minister was addressing techies while speaking at a Tech Mahindra’s programme at Hitex in Hyderabad's Madhapur area.
IANS inputs |
Poor neighbourhoods are increasing in outer boroughs but getting fewer in the centre, new analysis reveals
Outer London has seen rising levels of poverty while the number of poorer areas in central London is reducing, according to a new analysis of official deprivation data.
Although the poorest places in the capital are still in the eastern centre of the city, there are fears that poverty is being pushed out into the suburbs amid evidence of a significant increase in deprived areas in the outer boroughs between 2004 and 2010.
The findings are also acute in areas that saw rioting last August. In Enfield and Haringey, 22 neighbourhoods have become more deprived; in Ealing and Hillingdon 33 have and in Croydon and Sutton 75 have. Only 20 neighbourhoods in those three London assembly areas have become less deprived.
The figures were on the rise before the 2008 mayoral election. Boris Johnson won after focusing his campaign on the outer London ring in a "doughnut strategy". He beat Ken Livingstone by fewer than 140,000 votes, while the Labour candidate's core vote strategy centred on the inner city. This time around, he may face hostility from voters who have seen living standards decline.
The data shows that 430 neighbourhoods in London have become significantly more deprived than their neighbours since 2004, and 400 of those are in the outer boroughs.
In contrast, only 374 neighbourhoods across London have become significantly less deprived, and they are predominantly in the west and central parts of the city.
The biggest contrast is in the Brent and Harrow London assembly area, where 83 areas have got more deprived and only one has become less so. The data measures poverty in tiny "lower super output areas", each with a population of around 1,500 people. An area that has slipped down the index and become more deprived may still not be poor – it has just become more deprived compared with other neighbourhoods.
Tony Travers, director of the Greater London group at the London School of Economics, said: "It would appear that in 2008, rising levels of deprivation were associated with voting for Boris Johnson. It is paradoxical that as outer London has got poorer, it has appeared to vote for Johnson. Ken Livingstone will no doubt be hoping to reverse this with his policies in 2012."
The poverty data is based on an analysis by Alasdair Rae at the University of Sheffield of the government's Indices of Multiple Deprivation, which measures relative poverty across England, between 2004 and 2010. Rae said: "In London, it would appear that centrifugal forces are currently helping shift poverty from inner to outer London."
A recent report by the London School of Economics found that a majority of people in poverty in London now live in the outer city, whereas a decade earlier it was evenly split.
This reflects what is happening in major US cities, where a Brookings Institution report found the suburbs were home to the largest and fastest-growing poor populations in the country. |
/*
*----------------------------------------------------------------------
*
* TclCompileDict*Cmd --
*
* Functions called to compile "dict" sucommands.
*
* Results:
* All return TCL_OK for a successful compile, and TCL_ERROR to defer
* evaluation to runtime.
*
* Side effects:
* Instructions are added to envPtr to execute the "dict" subcommand at
* runtime.
*
*----------------------------------------------------------------------
*/
int
TclCompileDictSetCmd(
Tcl_Interp *interp,
Tcl_Parse *parsePtr,
Command *cmdPtr,
CompileEnv *envPtr)
{
Tcl_Token *tokenPtr;
int numWords, i;
DefineLineInformation;
Tcl_Token *varTokenPtr;
int dictVarIndex, nameChars;
const char *name;
if (parsePtr->numWords < 4) {
return TCL_ERROR;
}
varTokenPtr = TokenAfter(parsePtr->tokenPtr);
if (varTokenPtr->type != TCL_TOKEN_SIMPLE_WORD) {
return TCL_ERROR;
}
name = varTokenPtr[1].start;
nameChars = varTokenPtr[1].size;
if (!TclIsLocalScalar(name, nameChars)) {
return TCL_ERROR;
}
dictVarIndex = TclFindCompiledLocal(name, nameChars, 1, envPtr);
if (dictVarIndex < 0) {
return TCL_ERROR;
}
tokenPtr = TokenAfter(varTokenPtr);
numWords = parsePtr->numWords-1;
for (i=1 ; i<numWords ; i++) {
CompileWord(envPtr, tokenPtr, interp, i);
tokenPtr = TokenAfter(tokenPtr);
}
TclEmitInstInt4( INST_DICT_SET, numWords-2, envPtr);
TclEmitInt4( dictVarIndex, envPtr);
TclAdjustStackDepth(-1, envPtr);
return TCL_OK;
} |
<gh_stars>1-10
import React from "react";
import cn from "classnames";
import { Variant } from "../common";
interface AlertProps {
children: React.ReactNode;
variant: Variant;
className?: string;
dismiss?: boolean;
style?: React.CSSProperties;
visible?: boolean;
}
export const Alert = ({
children,
className,
dismiss = true,
style,
variant,
visible = true,
}: AlertProps) => {
const [isVisible, setVisible] = React.useState(visible);
if (!isVisible) {
return null;
}
const classes = cn(
"alert",
`alert-${variant}`,
{
"alert-dismissible": dismiss,
},
className,
);
const onClick = (e: React.MouseEvent) => {
setVisible(false);
e.preventDefault();
};
return (
<div className={classes} style={style} role="alert">
{dismiss && (
<a
href="#"
className="close"
data-dismiss="alert"
aria-label="close"
onClick={onClick}
>
×
</a>
)}
{children}
</div>
);
};
export default Alert;
|
The Clay Belt is a vast tract of fertile soil stretching between the Cochrane District in Ontario, and Abitibi County in Quebec, covering 180,000 square kilometres (69,000 sq mi) in total[1] with 120,000 square kilometres (46,000 sq mi) of that in Ontario.[2] It is generally subdivided into the Great Clay Belt to the north running eastward from Kapuskasing, past Lake Abitibi and on to Amos, and the V-shaped Lesser Clay Belt to its south, running from Englehart down to the Wabi River to the northern tip of Lake Timiskaming, and long the eastern side of Timiskaming and back up to Rouyn-Noranda. The Clay Belt is the result of the draining of the Glacial Lake Ojibway around 8,200 BP, whose lakebed sediment forms the modern landform. The Clay Belt is surrounded by the Canadian Shield, forming an island of "southern flatlands" in the midst of the hilly and rocky surroundings. Similar "glaciolacustrine deposits" dot the northern areas of Ontario, Quebec and Labrador.
Discovery [ edit ]
This satellite photo of Lake Timiskaming shows a clear difference in landforms, with the muskeg of the Canadian Shield to the southwest and flatter drained and cleared area of the Lesser Clay Belt to the north and east. The white coloring is due to snow lying on the flat land, while it is hidden under the fir-covered Shield to the south.
The area was first mapped by Dr. Robert Bell and his assistant Arthur Barlow in 1887, as part of a wider series of surveys in northern Ontario. In 1899, Barlow wrote a report on the geology and natural resources of the area, which suggested that the rich belt of clay that lay north of Lake Temiskaming was ideal for agricultural settlement. The area has a rich clay soil, in contrast to the low fertility of the muskeg and exposed bedrock shield surrounding it. Moreover, the combination of its general fertility, flat topography, high water table and relative accessibility to an extensive network of roads for logging and mining make it suitable for some types of farming. The following year, the government announced plans to develop the area by tapping its natural resources.
Soon after, Bernhard Eduard Fernow traveled the area at the behest of the federal Commission of Conservation, ostensibly to survey the area of the Canadian National Railway transcontinental main line (formerly the Grand Trunk) and the potential for fire. He makes extensive mention of the condition of the timber, and has a somewhat tempered view of their potential commercial value. He then moves on to a somewhat more positive report on the soil and its suitability for farming, saying its future is "bright". Wishing to avoid the "Trent watershed" problem, an earlier failed settlement attempt, he suggested setting up an experimental farm to test what "treatment is necessary on the various soils".[3] Such a farm was set up in Kapuskasing on the west side of the river to explore and develop crops and systems for farming the area.
Promotion [ edit ]
The Canadian government encouraged immigrants to settle there as farmers during and after World War I. Governments of the day were mistakenly impressed with the agricultural potential of the Great Clay Belt. Under the Soldier Settlement Act, 1917 (shortly replaced by the Soldier Settlement Act, 1919)[4] the Soldier Settlement Board established the Kapuskasing Soldier Colony to settle veterans that had returned from the Great War. Settlers received homesteads, grants and guaranteed loans and were paid for clearing their own land. But by 1920 only nine of more than a hundred original settlers remained. The farming consisted of some grains, mostly oats, and vegetables.
The Forestry Act, 1927 Graphic describing lands affected by
The Ontario government also passed legislation in 1927[5] to enable the migration of farmers from barren lands in older parts of the Province, such as Haliburton County, to areas such as the Lesser Clay Belt through the exchange of land.[6]
In spite of numerous rocky outcrops some farming was successfully established within the Great Clay Belt, however, it proved to be impractical because of the short growing season. The clay soil is tremendously fertile, but long snowy winters coupled with unpredictable rainfall during the short growing season meant most farming yielded little.[7]
Decline [ edit ]
By 1935, immigration to the Great Clay Belt virtually ended. One farmer, describing why he returned to urban life, stated that, in the Great Clay Belt, "there are seven months of snow, two months rain, and all the rest is black flies and mosquitoes." Some of the farmers returned to Toronto and Montreal. Some moved west to the prairie provinces of Manitoba, Saskatchewan and Alberta once the National Transcontinental Railway was completed. Many of the farmers shifted to mining once minerals were found in the area. Others entered the logging industry. Some towns still remaining today include Cochrane, New Liskeard, Timmins, Kapuskasing, and Hearst.
Neither the Great nor the Lesser Clay Belt seems able to attract any sort of sustainable industry to employ people for long periods of time. Both regions go through periodic boom and bust cycles, depending upon the fortunes of the pulp and paper industries, and the mining industries.
Further reading [ edit ] |
/**
* Proper alternative to {@link VirtualFileUtils#getProjectVirtualFile(IFile)}, get back from MPS's {@code IFile} to IDEA's {@code VirtualFile}
* @param file MPS file abstraction
* @return IDEA's VirtualFile, if supplied IFile is tracked under project's file system.
* @since 2021.1
*/
@Override
@Nullable
public VirtualFile asVirtualFile(@NotNull IFile file) {
if (file instanceof IdeaFile) {
return ((IdeaFile) file).getVirtualFile();
} else {
return null;
}
} |
Factor analysis of essential and toxic elements in human placentas from deliveries in arctic and subarctic areas of Russia and Norway.
Concentrations in human placenta of 11 essential elements (P, Ca, Mg, Cu, S, Na, Fe, Zn, K, Se, Mn) and 5 toxic elements (Ba, Sr, Pb, Ni, Cd) are compared for each of two arctic communities in eastern Norway and western Russia, and for another in each country located at more southerly latitudes. All but Mg, Fe, P and K were present in higher concentrations in the Russian study group. The observed inter-element correlations are reflected by the four major factors identified in a principal component analysis. The total variation explained was 67.3%, of which more than half (35.3%) was contributed by Factor 1. P, Ca, Mg, Ba, Sr, Pb, and Ni were major contributors to this factor. The placental concentrations of these elements depended strongly on gestational age, increasing from about week 35 and peaking near weeks 39 and 40, and exhibited skewed frequency distributions and a dependence on maternal smoking. The gestational-dependent mineralization of the placenta is interpreted to reflect the deposition of metal phosphates coinciding with smoking-induced tissue damage. The loadings of the remaining three factors are reviewed in the context of common uptake mechanisms, similar biochemistries and unique transport pathways. The inter-element relationships and grouping of the elements observed should constitute a scientific base for the use of placenta composition in environmental monitoring and epidemiological studies. |
/*
* pmempool_check_pool_hdr -- check/repair pool header of all files in pool set
*/
static check_result_t
pmempool_check_pool_hdr(struct pmempool_check *pcp)
{
int rdonly = !pcp->repair || !pcp->exec;
if (pool_set_file_map_headers(pcp->pfile, rdonly,
sizeof (struct pool_hdr))) {
outv_err("cannot map pool headers\n");
return CHECK_RESULT_ERROR;
}
int cannot_repair = 0;
int repaired = 0;
int not_consistent = 0;
check_result_t ret = CHECK_RESULT_CONSISTENT;
unsigned nreplicas = pcp->pfile->poolset->nreplicas;
unsigned nfiles = pmempool_check_count_files(pcp);
for (unsigned r = 0; r < nreplicas; r++) {
struct pool_replica *rep = pcp->pfile->poolset->replica[r];
for (unsigned p = 0; p < rep->nparts; p++) {
if (nfiles > 1) {
snprintf(prefix_buff, PREFIX_BUFF_SIZE,
"replica %u part %u",
r, p);
out_set_prefix(prefix_buff);
}
ret = pmempool_check_pool_hdr_single(pcp, r,
nreplicas, p, rep->nparts);
if (ret == CHECK_RESULT_CANNOT_REPAIR)
cannot_repair = 1;
else if (ret == CHECK_RESULT_REPAIRED)
repaired = 1;
else if (ret == CHECK_RESULT_NOT_CONSISTENT)
not_consistent = 1;
}
}
memcpy(&pcp->hdr.pool, pcp->pfile->poolset->replica[0]->part[0].hdr,
sizeof (struct pool_hdr));
out_set_prefix(NULL);
pool_set_file_unmap_headers(pcp->pfile);
if (cannot_repair)
return CHECK_RESULT_CANNOT_REPAIR;
if (repaired)
return CHECK_RESULT_REPAIRED;
if (not_consistent)
return CHECK_RESULT_NOT_CONSISTENT;
return ret;
} |
<commit_msg>docs: Use the current year in the copyright
<commit_before>from __future__ import unicode_literals
import sys
import os
sys.path.insert(0, os.path.dirname(os.path.abspath('.')))
import youtube_dl_server as ydl_server
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinxcontrib.httpdomain',
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'youtube-dl-api-server'
copyright = '2013, Jaime Marquínez Ferrándiz'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = ydl_server.__version__
# The short X.Y version.
version = '.'.join(release.split('.')[:2])
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
<commit_after>from __future__ import unicode_literals
import datetime
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.abspath('.')))
import youtube_dl_server as ydl_server
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinxcontrib.httpdomain',
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'youtube-dl-api-server'
copyright = '2013-{now:%Y}, Jaime Marquínez Ferrándiz'.format(now=datetime.datetime.now())
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = ydl_server.__version__
# The short X.Y version.
version = '.'.join(release.split('.')[:2])
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
|
/**
* This class can be used to add dragging capability to any JComponents.
* It contains the methods and data members needed to support automatic dragging,
* and contains methods to impliment both MouseListener, MouseMotionListener.
* In general, any existing JComponent can be made to be draggable simple by
* creating an instance of JComponentDragHandler (passing a reference to itself)
* and registering the JComponentDragHandler as the listener for all mouse events.
*
* Classes that need similar, but not identical, behavior, or that need to add
* functionality to the mouse methods here can create an inner class that extends
* this class. In this way the inner class can maintain the functionality of
* JComponentDragHandler while also having access to data members and methods
* of its enclosing class for the purposes of extension.
*
* @author Daniel <[email protected]>
*
*/
public class JComponentDragHandler implements MouseListener, MouseMotionListener {
/**
* These data members save the point at which the mouse was pressed
* relative to the (0,0) corner of the JComponent.
*/
public int mPressedX; //at mouse pressed
public int mPressedY; //at mouse pressed
public int mCurrentX; //where the mouse is currently
public int mCurrentY; //where the mouse is currently
public int dragDX; // amount of last drag in X direction
public int dragDY; // amount of last drag in Y direction
public int oldLocX; //where the component was before dragging
public int oldLocY;
private static Cursor openHandCursor = null;
private static Cursor closedHandCursor = null;
/**
* Stores location data (typically of this JComponent)
* as a Point for easy manipulation and to avoid re-creating a new object every time
* these manipulations are done.
*/
public Point myLoc = new Point();
private JComponent myComponent;
private final Workspace workspace;
/**
* Creates a new instance of a JComponentDragHandler with a pointer to the
* given JComponent. Remember to register this JComponentDragHandler as the
* listener for mouse events in the JComponent in order for this class to
* be allowed to handle those events.
* @param workspace The workspace in use
* @param jc the JComponent whose mouse events will be handled by this JComponentDragHandler
*/
public JComponentDragHandler(Workspace workspace, JComponent jc) {
this.workspace = workspace;
// this is the JComponent whose mouse events will be handled in this class
myComponent = jc;
if (openHandCursor == null || closedHandCursor == null) {
initHandCursors();
}
}
private static void initHandCursors() {
openHandCursor = createHandCursor("/edu/mit/blocks/codeblocks/open_hand.png", "openHandCursor");
closedHandCursor = createHandCursor("/edu/mit/blocks/codeblocks/closed_hand.png", "closedHandCursor");
}
private static Cursor createHandCursor(String location, String cursorName) {
if (GraphicsEnvironment.isHeadless()) {
// return default hand cursor if headless
return Cursor.getPredefinedCursor(Cursor.HAND_CURSOR);
}
java.net.URL handURL = JComponentDragHandler.class.getResource(location);
assert handURL != null : "Can not find hand cursor image " + cursorName;
ImageIcon handicon = new ImageIcon(handURL);
Dimension cursize = Toolkit.getDefaultToolkit().getBestCursorSize(handicon.getIconWidth(), handicon.getIconHeight());
BufferedImage buffImg = GraphicsManager.gc.createCompatibleImage(
cursize.width,
cursize.height,
Transparency.TRANSLUCENT);
Graphics2D buffImgG2 = (Graphics2D) buffImg.getGraphics();
Point cpoint = new Point(cursize.width / 2 - handicon.getIconWidth() / 2, cursize.height / 2 - handicon.getIconHeight() / 2);
buffImgG2.drawImage(handicon.getImage(), cpoint.x, cpoint.y, null);
return Toolkit.getDefaultToolkit().createCustomCursor(buffImg, new Point(cpoint.x + 5, cpoint.y), cursorName);
}
/**
* Returns the Cursor instance that is used when a mouse is over a draggable object
* @return the Cursor instance that is used when a mouse is over a draggable object
*/
public Cursor getDragHintCursor() {
return openHandCursor;
}
/**
* Returns the Cursor instance that is used on mouse drags
* @return the Cursor instance that is used on mouse drags
*/
public Cursor getDraggingCursor() {
return closedHandCursor;
}
/**
* @return the Point where the mouse is, in the JComponent's coordinate frame
*/
public Point getMousePoint() {
return new Point(mCurrentX, mCurrentY);
}
///////////////////
//MOUSE EVENTS
///////////////////
/**
* Called when the mouse is pressed over the JComponent.
* Saves the point (which is measured relative to the JComponent's corner)
* over which the mouse was pressed.
*/
public void mousePressed(MouseEvent e) {
myComponent.setCursor(closedHandCursor);
mPressedX = e.getX();
mPressedY = e.getY();
oldLocX = myComponent.getX();
oldLocY = myComponent.getY();
}
/**
* This method is called when the mouse is dragged over the JComponent.
* Moves the JComponent by the amount of the drag such that the point
* under which the mouse the pressed remains under the mouse cursor. In
* other words, "drags" the JComponent.
*/
public void mouseDragged(MouseEvent e) {
//System.out.println("mouse dragged: "+this.getLocation());
myComponent.setCursor(closedHandCursor);
mCurrentX = e.getX();
mCurrentY = e.getY();
int dx = mCurrentX - mPressedX;
int dy = mCurrentY - mPressedY;
int curX = myComponent.getX();
int curY = myComponent.getY();
// shift new location by amount of drag
int newX = dx + curX;
int newY = dy + curY;
/*
* Prevent dragging outside of the canvas (keep the mouse-down point inside the canvas)
*/
workspace.scrollToComponent(myComponent);
Point p = SwingUtilities.convertPoint(myComponent, newX + mPressedX, newY + mPressedY, workspace);
if (workspace.getWidgetAt(p) == null && !workspace.contains(p)) {
// how is this not working? if it's in the window, shouldn't it be dragging?
// I guess the drawer cards aren't widgets, so it's getting confused...
//...should add them as widgets but pass calls to the drawer.
//return; TODO djwendel - is the above way the best to do it? Figure it out then do it.
}
// save how much this drag amount is
dragDX = newX - myComponent.getX();
dragDY = newY - myComponent.getY();
// move to the new location
myComponent.setLocation(newX, newY);
}
/**
* update the current location of the mouse
*/
public void mouseMoved(MouseEvent e) {
mCurrentX = e.getX();
mCurrentY = e.getY();
}
/*
* The following methods can be extended by children of this
* class, and are provided here to fill out the implementations
* of MouseListener and MouseMotionListener.
*/
public void mouseReleased(MouseEvent e) {
myComponent.setCursor(openHandCursor);
}
public void mouseClicked(MouseEvent arg0) {
myComponent.setCursor(openHandCursor);
}
public void mouseEntered(MouseEvent arg0) {
myComponent.setCursor(openHandCursor);
}
public void mouseExited(MouseEvent arg0) {
myComponent.setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR));
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.