content
stringlengths 10
4.9M
|
---|
/*
* Copyright(c) 2019 Nippon Telegraph and Telephone Corporation
*/
package msf.ecmm.ope.receiver.pojo.parts;
import java.util.ArrayList;
import java.util.List;
/**
* QoS Configuration Feasibility Infomation Class.
*/
public class QosCapabilities {
/** Inflow / Outflow amount Controll Feasibility. */
private Boolean shaping = null;
/** Remark Menu Configuration Feasibility. */
private Boolean remark = null;
/** Remark Menu List. */
private List<String> remarkMenuList = new ArrayList<String>();
/** Egress Queue Menu List. */
private List<String> egressMenuList = new ArrayList<String>();
/**
* Getting Inflow / Outflow amount Controll Feasibility.
*
* @return shaping
*/
public Boolean getShaping() {
return shaping;
}
/**
* Setting Inflow / Outflow amount Controll Feasibility.
*
* @param shaping
* setting shaping
*/
public void setShaping(Boolean shaping) {
this.shaping = shaping;
}
/**
* Getting Remark Menu Configuration Feasibility.
*
* @return remark
*/
public Boolean getRemark() {
return remark;
}
/**
* Setting Remark Menu Configuration Feasibility.
*
* @param remark
* setting remark
*/
public void setRemark(Boolean remark) {
this.remark = remark;
}
/**
* Getting Remark Menu List.
*
* @return remarkMenuList
*/
public List<String> getRemarkMenuList() {
return remarkMenuList;
}
/**
* Setting Remark Menu List.
*
* @param remarkMenuList
* setting remarkMenuList
*/
public void setRemarkMenuList(List<String> remarkMenuList) {
this.remarkMenuList = remarkMenuList;
}
/**
* Getting Egress Queue Menu List.
*
* @return egressMenuList
*/
public List<String> getEgressMenuList() {
return egressMenuList;
}
/**
* Setting Egress Queue Menu List.
*
* @param egressMenuList
* setting egressMenuList
*/
public void setEgressMenuList(List<String> egressMenuList) {
this.egressMenuList = egressMenuList;
}
/*
* Stringizing Instance
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return "QosCapabilities [shaping=" + shaping + ", remark=" + remark + ", remarkMenuList=" + remarkMenuList
+ ", egressMenuList=" + egressMenuList + "]";
}
}
|
BUDAPEST, Hungary — In the United States, nobody listens to Jared Taylor. Despite his Ivy League education and polite manners, few people working in politics take him seriously. That’s because he is a white supremacist, although he would prefer to be called a “racial realist.” When he tries to organize a meeting for his publication, American Renaissance, it is typically banned from hotels and conference rooms as soon as the proprietors find out about its racist mission. His ideas obviously hold little sway with established political parties or institutions. Which explains why Taylor traveled to Hungary last month to organize an international conference of white supremacists and anti-immigrant nationalists from more than 10 countries with the express purpose of making common cause with Europe’s own burgeoning far-right political movements. The conference was blandly dubbed “The Future of Europe.”
Taylor and his fellow organizers, the Montana-based white nationalist think tank National Policy Institute, chose Hungary because of the rise of far-right nationalists in that country; they thought it might offer a hospitable environment for their assembly. In fact, it was the opposite. The Hungarian Prime Minister Viktor Orbán—a member of the leading conservative party who has been criticized for his increasingly authoritarian politics—banned the conference. While Orbán has support from Hungary’s far-right parties, he likely saw this move as an easy way to help position himself as a moderate conservative in the runup to local elections last month. Orbán even ordered police to arrest anyone trying to organize the event. William Regnery, the founder of the National Policy Institute (and heir to the conservative publishing powerhouse Regnery, home to best-sellers from Ann Coulter, Dinesh D’Souza, and Edward Klein, among others) was immediately sent back to the United States when he arrived at Budapest’s airport. Richard Spencer, the director of NPI, was arrested in a Budapest pub when he tried to organize a casual gathering of the conference’s attendees. The conference-goers already had been evicted from the hotel where their meeting was scheduled to take place.
Spencer spent the next three days in a Budapest jail, which he didn’t seem to mind. He kept emailing fellow attendees and journalists from his prison cell. When I met Taylor in Budapest, he compared Spencer’s Budapest emails to Martin Luther King’s “Letter From a Birmingham jail.”
Most of the media coverage of the conference centered on Spencer’s arrest. But, even if it was foiled and ill conceived, the entire episode represented something else: It was the first attempt by NPI and American Renaissance to establish a presence in Europe, in an effort to establish a kind of Euro-American partnership for white nationalism, or “Eurocentrism.”
Taylor, Spencer, and the other Americans visiting Budapest see their cause as an uphill battle. The race-industrial complex in America just isn’t what it used to be. By crossing the Atlantic and trying to organize Europe’s disparate far-right groups into a unified movement, they are trying to breathe new life into their own cause. It is an ambitious undertaking coming from two tiny, fringe organizations. The National Policy Institute is based in Whitefish, Montana, and has four employees. Taylor’s American Renaissance, based in the D.C. suburb of Oakton, Virginia, is really just a one-man show.
With Spencer in jail, Taylor became the host of the conference. Despite the fact that the government had forbidden the gathering and informed all attendees that they might be arrested if they went ahead with their plans, 70 of the 135 registered attendees showed up in Budapest, including a Mexican man who claimed to have traveled “10,000 miles.” Others traveled from Britain, Norway, Germany, Austria, Sweden, Spain, Hungary, and Japan, as well as a dozen from the United States. What did they all have in common? “The conviction that Europe is in a life-or-death struggle. Europe can’t remain Europe without Europeans. When we are being replaced by non-Europeans, it threatens our core way of life,” Taylor said.
We were standing in a hotel lobby close to the Buda part of the city, on the western side of the Danube River. Taylor was looking around the lobby anxiously, aware that he might be arrested at any moment. Every man walking by could, in his mind, be a plainclothes Hungarian police officer. But overall Taylor was upbeat. He was happy to be in Europe, where he said things are going in the right direction, referring to the recent voter backlash against immigrants and multiculturalism. “Europeans, like Americans, see their world changing. They never asked for this change. Their neighborhoods are becoming different, and they don’t recognize it anymore. So they are reacting against this,” Taylor said.
In the May elections for the European Parliament, Europe’s far-right parties made extraordinary gains. France’s National Front and Britain’s U.K. Independence Party won 24 seats each in the EU Parliament. UKIP’s win marked the first time in a century that the Labour or Conservative party didn’t become the biggest party in a national election. In Hungary, the extreme-right party Jobbik—best known for its calls for Hungary’s government to register and monitor all Jewish residents—won a third of the country’s youth vote, and nearly 15 percent of the total vote. Overall, the elections showed an incredible rise in support for parties defined by their tough stance on immigration and a general “Euroskepticism”—a scornful pessimism for the entire EU project.
Nevertheless, nationalist groups don’t represent a plurality of the population in any European country. Rather, they are an outspoken white minority who are anxious about their increasing marginalization, which gives them a reason to organize. Their alienation from mainstream society also makes them feel more closely allied to each other. As xenophobic ideas are increasingly frowned upon, Europe’s far right feels as though they are the ones being discriminated against. They see themselves as rebels fighting a corrupt system that has turned against them. Spencer’s arrest, of course, only confirmed this belief. On the websites and Internet chat rooms of Europe’s nationalist groups, Spencer instantly became a martyr and a hero. His arrest may have inadvertently done more to help the American white supremacists connect with Europe’s far-right groups than anything else.
Far-right parties like Jobbik in Hungary, the National Front in France, and the neo-Nazi Golden Dawn in Greece can no longer be brushed off as irrelevant. They have become a genuine political force in Europe, with voting power in a string of governments. And now the American nationalists want to know how they can join the party. “It’s very difficult to run as a candidate, and not be either a Republican or a Democrat. So in that respect, I think, democracy is far more restricted in the U.S. than in many European countries. I’m convinced that if people who hold my views were part of a proportionally representative system, that we would have 15 percent, 20 percent, maybe 30 percent of the vote,” says Taylor.
So how does Taylor plan to change this? “That’s a good question. I think it might be possible to run as a Republican under certain circumstances, but we are really very far behind our European comrades on this. They’ve been much more successful at expressing themselves politically.” Taylor pointed to several congressional Republicans—Reps. Joe Wilson, Steve King, Louie Gohmert, and Dana Rohrabacher, among them—whose anti-immigrant rhetoric has at times mirrored that of far-right parties in Europe.
In Budapest, I also spoke with Kevin DeAnna, a young conservative activist from Washington, D.C. DeAnna was staying in a cheap hostel with Spencer, since they had both been thrown out of the swankier hotel where they had planned to stay. DeAnna joined Taylor as a sort of last-minute organizer of the conference, or what was left of it. He met the attendees in a dingy subway station, wearing baggy jeans, sneakers, and a blazer and tie. “We’re kind of running this underground as a guerrilla movement now,” DeAnna said when he arrived to the subway station, where the conference attendees had been told to gather, awaiting further instructions.
Despite the intervention by Hungarian authorities, the conference did go on as planned, even though Taylor insisted on calling it a dîner-débat, rather than a conference, to avoid possible legal repercussions. The day after the meeting, Taylor and his fellow attendees stood in the sunshine in Heroes’ Square, discussing whether they would visit the House of Terror museum, where the violence of communism is documented, or the Museum of Ethnography. He was pleased with the event. Europe gets it, he said. And America? “The left is constantly describing us as either insane, or evil, or ignorant, or all three. That’s simply not the case. They are, frankly, terrified that people who hold positions like mine or Richard Spencer’s will have an opportunity to speak openly and publicly. If Americans had an opportunity to vote for my views, I believe many of them would. But the political system is not set up in a way that makes that possible or practical.” |
<gh_stars>1-10
package supercoder79.endbiomeapi.impl;
import com.google.common.collect.Lists;
import net.minecraft.util.registry.Registry;
import net.minecraft.world.biome.Biome;
import net.minecraft.world.biome.layer.util.LayerRandomnessSource;
import java.util.List;
public class BiomePicker {
private final List<Entry> biomeEntries = Lists.newArrayList();
private double weightTotal;
public Integer choose(LayerRandomnessSource rand) {
if (biomeEntries.size() == 0) {
throw new UnsupportedOperationException("No biomes registered for picker!!! This is a problem!");
}
double randVal = target(rand);
int i = -1;
while (randVal >= 0) {
++i;
randVal -= biomeEntries.get(i).weight;
}
return Registry.BIOME.getRawId(biomeEntries.get(i).getBiome());
}
public void add(Biome biome, double weight) {
this.biomeEntries.add(new Entry(biome, weight));
weightTotal += weight;
}
private double target(LayerRandomnessSource random) {
return (double) random.nextInt(Integer.MAX_VALUE) * weightTotal / Integer.MAX_VALUE;
}
private static class Entry {
private final Biome biome;
private final double weight;
private Entry(Biome biome, double weight) {
this.biome = biome;
this.weight = weight;
}
private Biome getBiome() {
return biome;
}
}
}
|
<reponame>iicarus-bit/google-ctf
// Copyright 2019 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#[macro_use]
extern crate structure;
extern crate base64;
extern crate memmap;
extern crate rand;
use rand::*;
const NN: usize = 312;
const BITS: usize = 64;
const PSIZE: usize = NN * BITS;
const NSAMPLES: usize = (NN + 2) * BITS;
const MM: usize = NN / 2;
const K: usize = 1337;
fn jumble(key: [u64; NN]) -> [u64; NN] {
let mut state = [0u64; NN];
let mut i = 1;
for _ in 0..K * (NN - 1) {
// Replace the slow + and * operations with something faster.
state[i] = state[i]
^ ((state[i - 1] ^ (state[i - 1] >> 62)) ^
((state[i - 1] >> 32) & 0xdeadbeefu64));
// state[i-1] ^ Wrapping(key[i]) ^
i += 1;
if i >= NN {
state[0] ^= state[NN - 1];
i = 1;
}
}
return state;
}
use std::fs::File;
use std::io::prelude::*;
use memmap::MmapMut;
fn ropen(fname: String) -> MmapMut {
use std::fs::OpenOptions;
let file = OpenOptions::new()
.read(true)
.write(true)
.create(true)
.open(&fname)
.expect("failed to open file");
file.set_len((NSAMPLES * NN * 8) as u64)
.expect("failed to set length");
return unsafe { MmapMut::map_mut(&file).expect("failed to map") };
}
fn main() {
let mut rng = thread_rng();
let mut startb = ropen("start".to_string());
let mut endb = ropen("end".to_string());
for j in 0..NSAMPLES {
if j % 100 == 0 {
println!("{:?}", j);
}
let mut buffer = [0u64; NN];
for i in 0..NN {
buffer[i] = rng.next_u64();
for k in 0..8 {
startb[j * NN * 8 + i * 8 + k] = (buffer[i] >> k * 8) as u8;
}
}
let end = jumble(buffer);
for i in 0..NN {
for k in 0..8 {
endb[j * NN * 8 + i * 8 + k] = (end[i] >> k * 8) as u8;
}
}
}
startb.flush().expect("failed to flush");
endb.flush().expect("failed to flush");
}
|
/** This Handler deals with assertions which ensure that their arguments cannot be null. */
public class AssertionHandler extends BaseNoOpHandler {
// Strings corresponding to the names of the methods (and their owners) used to identify
// assertions in this handler.
private static final String IS_NOT_NULL_METHOD = "isNotNull";
private static final String IS_NOT_NULL_OWNER = "com.google.common.truth.Subject";
private static final String ASSERT_THAT_METHOD = "assertThat";
private static final String ASSERT_THAT_OWNER = "com.google.common.truth.Truth";
private static final String HAMCREST_ASSERT_CLASS = "org.hamcrest.MatcherAssert";
private static final String JUNIT_ASSERT_CLASS = "org.junit.Assert";
private static final String MATCHERS_CLASS = "org.hamcrest.Matchers";
private static final String CORE_MATCHERS_CLASS = "org.hamcrest.CoreMatchers";
private static final String CORE_IS_NULL_CLASS = "org.hamcrest.core.IsNull";
private static final String IS_MATCHER = "is";
private static final String NOT_MATCHER = "not";
private static final String NOT_NULL_VALUE_MATCHER = "notNullValue";
private static final String NULL_VALUE_MATCHER = "nullValue";
// Names of the methods (and their owners) used to identify assertions in this handler. Name used
// here refers to com.sun.tools.javac.util.Name. Comparing methods using Names is faster than
// comparing using strings.
private Name isNotNull;
private Name isNotNullOwner;
private Name assertThat;
private Name assertThatOwner;
// Names for junit assertion libraries.
private Name hamcrestAssertClass;
private Name junitAssertClass;
// Names for hamcrest matchers.
private Name matchersClass;
private Name coreMatchersClass;
private Name coreIsNullClass;
private Name isMatcher;
private Name notMatcher;
private Name notNullValueMatcher;
private Name nullValueMatcher;
@Override
public NullnessHint onDataflowVisitMethodInvocation(
MethodInvocationNode node,
Types types,
Context context,
AccessPathNullnessPropagation.SubNodeValues inputs,
AccessPathNullnessPropagation.Updates thenUpdates,
AccessPathNullnessPropagation.Updates elseUpdates,
AccessPathNullnessPropagation.Updates bothUpdates) {
Symbol.MethodSymbol callee = ASTHelpers.getSymbol(node.getTree());
if (callee == null) {
return NullnessHint.UNKNOWN;
}
if (!areMethodNamesInitialized()) {
initializeMethodNames(callee.name.table);
}
// Look for statements of the form: assertThat(A).isNotNull()
// A will not be NULL after this statement.
if (isMethodIsNotNull(callee)) {
Node receiver = node.getTarget().getReceiver();
if (receiver instanceof MethodInvocationNode) {
MethodInvocationNode receiver_method = (MethodInvocationNode) receiver;
Symbol.MethodSymbol receiver_symbol = ASTHelpers.getSymbol(receiver_method.getTree());
if (isMethodAssertThat(receiver_symbol)) {
Node arg = receiver_method.getArgument(0);
AccessPath ap = AccessPath.getAccessPathForNodeNoMapGet(arg);
if (ap != null) {
bothUpdates.set(ap, NONNULL);
}
}
}
}
// Look for statements of the form:
// * assertThat(A, is(not(nullValue())))
// * assertThat(A, is(notNullValue()))
if (isMethodHamcrestAssertThat(callee) || isMethodJunitAssertThat(callee)) {
List<Node> args = node.getArguments();
if (args.size() == 2 && isMatcherIsNotNull(args.get(1))) {
AccessPath ap = AccessPath.getAccessPathForNodeNoMapGet(args.get(0));
if (ap != null) {
bothUpdates.set(ap, NONNULL);
}
}
}
return NullnessHint.UNKNOWN;
}
private boolean isMethodIsNotNull(Symbol.MethodSymbol methodSymbol) {
return matchesMethod(methodSymbol, isNotNull, isNotNullOwner);
}
private boolean isMethodAssertThat(Symbol.MethodSymbol methodSymbol) {
return matchesMethod(methodSymbol, assertThat, assertThatOwner);
}
private boolean isMethodHamcrestAssertThat(Symbol.MethodSymbol methodSymbol) {
return matchesMethod(methodSymbol, assertThat, hamcrestAssertClass);
}
private boolean isMethodJunitAssertThat(Symbol.MethodSymbol methodSymbol) {
return matchesMethod(methodSymbol, assertThat, junitAssertClass);
}
private boolean isMatcherIsNotNull(Node node) {
// Matches with
// * is(not(nullValue()))
// * is(notNullValue())
if (matchesMatcherMethod(node, isMatcher, matchersClass)
|| matchesMatcherMethod(node, isMatcher, coreMatchersClass)) {
// All overloads of `is` method have exactly one argument.
return isMatcherNotNull(((MethodInvocationNode) node).getArgument(0));
}
return false;
}
private boolean isMatcherNotNull(Node node) {
// Matches with
// * not(nullValue())
// * notNullValue()
if (matchesMatcherMethod(node, notMatcher, matchersClass)
|| matchesMatcherMethod(node, notMatcher, coreMatchersClass)) {
// All overloads of `not` method have exactly one argument.
return isMatcherNull(((MethodInvocationNode) node).getArgument(0));
}
return matchesMatcherMethod(node, notNullValueMatcher, matchersClass)
|| matchesMatcherMethod(node, notNullValueMatcher, coreMatchersClass)
|| matchesMatcherMethod(node, notNullValueMatcher, coreIsNullClass);
}
private boolean isMatcherNull(Node node) {
// Matches with nullValue()
return matchesMatcherMethod(node, nullValueMatcher, matchersClass)
|| matchesMatcherMethod(node, nullValueMatcher, coreMatchersClass)
|| matchesMatcherMethod(node, nullValueMatcher, coreIsNullClass);
}
private boolean matchesMatcherMethod(Node node, Name matcherName, Name matcherClass) {
if (node instanceof MethodInvocationNode) {
MethodInvocationNode methodInvocationNode = (MethodInvocationNode) node;
Symbol.MethodSymbol callee = ASTHelpers.getSymbol(methodInvocationNode.getTree());
return matchesMethod(callee, matcherName, matcherClass);
}
return false;
}
private boolean matchesMethod(
Symbol.MethodSymbol methodSymbol, Name toMatchMethodName, Name toMatchOwnerName) {
return methodSymbol.name.equals(toMatchMethodName)
&& methodSymbol.owner.getQualifiedName().equals(toMatchOwnerName);
}
private boolean areMethodNamesInitialized() {
return isNotNull != null;
}
private void initializeMethodNames(Name.Table table) {
isNotNull = table.fromString(IS_NOT_NULL_METHOD);
isNotNullOwner = table.fromString(IS_NOT_NULL_OWNER);
assertThat = table.fromString(ASSERT_THAT_METHOD);
assertThatOwner = table.fromString(ASSERT_THAT_OWNER);
hamcrestAssertClass = table.fromString(HAMCREST_ASSERT_CLASS);
junitAssertClass = table.fromString(JUNIT_ASSERT_CLASS);
matchersClass = table.fromString(MATCHERS_CLASS);
coreMatchersClass = table.fromString(CORE_MATCHERS_CLASS);
coreIsNullClass = table.fromString(CORE_IS_NULL_CLASS);
isMatcher = table.fromString(IS_MATCHER);
notMatcher = table.fromString(NOT_MATCHER);
notNullValueMatcher = table.fromString(NOT_NULL_VALUE_MATCHER);
nullValueMatcher = table.fromString(NULL_VALUE_MATCHER);
}
} |
Strix
Minimal container for modern PHP applications following the PSR-11 standard
Installation
composer require anned20/strix
Usage
<?php require __DIR__ . '/vendor/autoload.php' ; use anned20\Strix\Container ; use anned20\Strix\Exception\AlreadyInContainerException ; use anned20\Strix\Exception\NotFoundException ; // Create new container $container = new Container (); // Use the container for variables $container -> add ( 'config' , [ 'hello' => 'world' ]); // Use the container for closures $container -> add ( 'function' , function () { return rand (); }); // Let's use the config $hello = $container -> get ( 'config' )[ 'hello' ]; // And the function $rand = $container -> get ( 'function' )(); // Factories can be made too! $container -> add ( 'factory' , function () { return new SomeClass (); }); // Just like services $myService = new SomeClass (); $container -> add ( 'service' , $myService ); // Whoops! $container -> add ( 'config' , [ 'foo' => 'bar' ]); // AlreadyInContainerException thrown // Let's check before adding if ( ! $container -> has ( 'config' )) { $container -> add ( 'config' , [ 'foo' => 'bar' ]); } // But I want to overwrite the old one! No problem! if ( $container -> has ( 'config' )) { $container -> delete ( 'config' ); } $container -> add ( 'config' , [ 'foo' => 'bar' ]); // Whoops! $bye = $container -> get ( 'bye' ); // NotFoundException thrown
Contributing
Fork it! Create your feature branch: git checkout -b my-new-feature Commit your changes: git commit -am 'Add some feature' Push to the branch: git push origin my-new-feature Submit a pull request :D
License
Strix is a PSR-11 compliant container
Copyright © 2017 Anne Douwe Bouma
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
/**
* Handles javascript errors.
* @param error
*/
@JavascriptInterface
public void jsError(final String error){
if(this.mListener != null){
mHandler.post(new Runnable() {
@Override
public void run() {
mListener.tsjiJSError(error);
}
});
}
} |
def __mouse_callback(self, event):
if not self.__transform_state: return
if self.__transform_rect is None:
self.__transform_rect = np.zeros((4, 5), dtype = np.int32)
self.__show_point(0, event.x, event.y)
else:
for n, r in enumerate(self.__transform_rect):
if r[2] == 0:
self.__show_point(n, event.x, event.y)
self.__transform_state = (n < len(self.__transform_rect)-1)
break |
<reponame>eld-rmorbach/GPUFingerprinting
package br.org.eldorado.gpufingerprint;
import android.opengl.GLSurfaceView;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
/**
* OpenGL context factory which provides {@link EGLContext} instances to be used when drawing images
* to identify mobile devices.
*/
final class OpenGLContextFactory implements GLSurfaceView.EGLContextFactory {
/**
* The version of the client API which the context supports, as specified at context creation
* time.
*/
private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
@Override
public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) {
int[] attributes = new int[]{EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE};
return egl.eglCreateContext(display, eglConfig, EGL10.EGL_NO_CONTEXT, attributes);
}
@Override
public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) {
egl.eglDestroyContext(display, context);
}
}
|
/**
* Converter and mapper for {@link org.isdp.vertx.common.model.IsdpResponses}.
* NOTE: This class has been automatically generated from the {@link org.isdp.vertx.common.model.IsdpResponses} original class using Vert.x codegen.
*/
public class IsdpResponsesConverter {
public static void fromJson(Iterable<java.util.Map.Entry<String, Object>> json, IsdpResponses obj) {
for (java.util.Map.Entry<String, Object> member : json) {
switch (member.getKey()) {
case "errorCode":
if (member.getValue() instanceof String) {
obj.setErrorCode((String)member.getValue());
}
break;
case "errorMessage":
if (member.getValue() instanceof String) {
obj.setErrorMessage((String)member.getValue());
}
break;
case "host":
if (member.getValue() instanceof String) {
obj.setHost((String)member.getValue());
}
break;
case "showType":
if (member.getValue() instanceof String) {
obj.setShowType((String)member.getValue());
}
break;
case "success":
if (member.getValue() instanceof Boolean) {
obj.setSuccess((Boolean)member.getValue());
}
break;
case "successMessage":
if (member.getValue() instanceof String) {
obj.setSuccessMessage((String)member.getValue());
}
break;
case "traceId":
if (member.getValue() instanceof String) {
obj.setTraceId((String)member.getValue());
}
break;
}
}
}
public static void toJson(IsdpResponses obj, JsonObject json) {
toJson(obj, json.getMap());
}
public static void toJson(IsdpResponses obj, java.util.Map<String, Object> json) {
if (obj.getErrorCode() != null) {
json.put("errorCode", obj.getErrorCode());
}
if (obj.getErrorMessage() != null) {
json.put("errorMessage", obj.getErrorMessage());
}
if (obj.getHost() != null) {
json.put("host", obj.getHost());
}
if (obj.getShowType() != null) {
json.put("showType", obj.getShowType());
}
if (obj.getSuccess() != null) {
json.put("success", obj.getSuccess());
}
if (obj.getSuccessMessage() != null) {
json.put("successMessage", obj.getSuccessMessage());
}
if (obj.getTraceId() != null) {
json.put("traceId", obj.getTraceId());
}
}
} |
<filename>pessimist/tests/__init__.py
from .functional import FunctionalTest
from .manager import ManagerTest
__all__ = ["FunctionalTest", "ManagerTest"]
|
def subjects(self):
subj = [i["subject_id"] for i in self._cache.find()]
return list(set(subj)) |
<gh_stars>0
import Logger from "../Logging/Logger";
import UnexpectedEventError from "../Errors/UnexpectedEvent/UnexpectedEventError";
export default function getLastElementOfNonEmptyArray<ArrayElement>(targetArray: Array<ArrayElement>): ArrayElement {
if (targetArray.length === 0) {
Logger.throwErrorAndLog({
errorInstance: new UnexpectedEventError(
"The array is empty thus it has not last element. As it follows from the function name, the empty arrays are " +
"being considered as error."
),
title: UnexpectedEventError.localization.defaultTitle,
occurrenceLocation: "getLastElementOfNonEmptyArray(targetArray)"
});
}
return targetArray[targetArray.length - 1];
}
|
//=============================================================================
// This file is part of VTKEdge. See vtkedge.org for more information.
//
// Copyright (c) 2010 Kitware, Inc.
//
// VTKEdge may be used under the terms of the BSD License
// Please see the file Copyright.txt in the root directory of
// VTKEdge for further information.
//
// Alternatively, you may see:
//
// http://www.vtkedge.org/vtkedge/project/license.html
//
//
// For custom extensions, consulting services, or training for
// this or any other Kitware supported open source project, please
// contact Kitware at <EMAIL>.
//
//
//=============================================================================
#include "vtkKWEPaintbrushPropertyManager.h"
#include "vtkKWEPaintbrushSketch.h"
#include "vtkKWEPaintbrushDrawing.h"
#include "vtkKWEPaintbrushProperty.h"
#include "vtkKWEPaintbrushData.h"
#include "vtkProperty.h"
#include "vtkObjectFactory.h"
//----------------------------------------------------------------------------
vtkCxxRevisionMacro(vtkKWEPaintbrushPropertyManager, "$Revision: 3416 $");
//----------------------------------------------------------------------------
vtkKWEPaintbrushPropertyManager* vtkKWEPaintbrushPropertyManager::New()
{
return vtkKWEPaintbrushPropertyManager::New(NULL);
}
//----------------------------------------------------------------------------
// Implement the standard form of the New() method.
vtkKWEPaintbrushPropertyManager* vtkKWEPaintbrushPropertyManager::New(vtkKWEPaintbrushDrawing * s)
{
vtkObject* ret = vtkObjectFactory::CreateInstance("vtkKWEPaintbrushPropertyManager");
if(ret)
{
static_cast<vtkKWEPaintbrushPropertyManager*>(ret)->PaintbrushDrawing = s;
return static_cast<vtkKWEPaintbrushPropertyManager*>(ret);
}
vtkKWEPaintbrushPropertyManager * p = new vtkKWEPaintbrushPropertyManager;
p->PaintbrushDrawing = s;
return p;
}
//----------------------------------------------------------------------------
vtkKWEPaintbrushPropertyManager::vtkKWEPaintbrushPropertyManager()
{
this->PaintbrushDrawing = NULL;
this->HighlightType = vtkKWEPaintbrushProperty::ColorHighlight;
this->Colors[ ColorType( 1.0, 0.4, 0.4, 0 )] = 0;
this->Colors[ ColorType( 0.4, 1.0, 0.4, 1 )] = 0;
this->Colors[ ColorType( 0.4, 0.4, 1.0, 2 )] = 0;
this->Colors[ ColorType( 1.0, 0.0, 0.4, 3 )] = 0;
this->Colors[ ColorType( 1.0, 0.4, 1.0, 4 )] = 0;
this->Colors[ ColorType( 0.4, 1.0, 1.0, 5 )] = 0;
this->Colors[ ColorType( 1.0, 0.2, 0.5, 6 )] = 0;
this->Colors[ ColorType( 0.3, 0.7, 0.0, 7 )] = 0;
this->Colors[ ColorType( 0.5, 0.2, 1.0, 8 )] = 0;
this->Colors[ ColorType( 1.0, 0.5, 0.0, 9 )] = 0;
this->Colors[ ColorType( 0.0, 1.0, 0.0, 10 )] = 0;
this->Colors[ ColorType( 0.0, 0.0, 1.0, 11 )] = 0;
this->Colors[ ColorType( 1.0, 0.2, 0.8, 12 )] = 0;
this->Colors[ ColorType( 0.2, 1.0, 0.8, 13 )] = 0;
this->Colors[ ColorType( 0.2, 0.4, 0.8, 14 )] = 0;
this->Colors[ ColorType( 0.9, 0.5, 0.2, 15 )] = 0;
this->Colors[ ColorType( 0.4, 0.9, 0.2, 16 )] = 0;
this->Colors[ ColorType( 0.5, 0.2, 0.9, 17 )] = 0;
this->Colors[ ColorType( 0.7, 0.3, 0.9, 18 )] = 0;
this->Colors[ ColorType( 0.9, 0.3, 0.5, 19 )] = 0;
this->Colors[ ColorType( 0.3, 0.9, 1.0, 20 )] = 0;
// Clear our fast lookup label map to color table. Clearly we need to do this
// only if we are using a UCHAR label map, since that's the only case when
// fast lookup is exercised.
if (vtkKWEPaintbrushEnums::GetLabelType() == VTK_UNSIGNED_CHAR)
{
for (int i = 0; i < 256; i++)
{
for (int j = 0; j < 3; j++)
{
this->LabelToColorMapUC[i][j] = 0;
}
this->LabelToOpacityMapUC[i] = 1.0;
}
}
}
//----------------------------------------------------------------------------
vtkKWEPaintbrushPropertyManager::~vtkKWEPaintbrushPropertyManager()
{
}
//----------------------------------------------------------------------------
void vtkKWEPaintbrushPropertyManager::Initialize()
{
for( ColorsMapType::iterator iter = Colors.begin();
iter != Colors.end(); ++iter ) iter->second = 0;
}
//----------------------------------------------------------------------------
void vtkKWEPaintbrushPropertyManager::AddUsedColor( vtkProperty * p )
{
double rgb[3];
p->GetColor( rgb );
ColorType c( rgb[0], rgb[1], rgb[2] );
AddUsedColor(c);
}
//----------------------------------------------------------------------------
void vtkKWEPaintbrushPropertyManager::AddUsedColor( const ColorType &c )
{
if (c.R > 0.0 || c.G > 0.0 || c.B > 0.0)
{
for( ColorsMapType::iterator iter = Colors.begin();
iter != Colors.end(); ++iter )
{
if (iter->first == c) ++iter->second;
}
}
}
//----------------------------------------------------------------------------
vtkKWEPaintbrushPropertyManager::ColorType vtkKWEPaintbrushPropertyManager::RequestColor()
{
unsigned int minVal = VTK_INT_MAX;
ColorType c(0.0,0.0,0.0,0);
for( ColorsMapType::iterator iter = Colors.begin();
iter != Colors.end(); ++iter )
{
if (iter->second < minVal)
{
c = iter->first;
minVal = iter->second;
}
}
++Colors[c];
return c;
}
//----------------------------------------------------------------------------
void vtkKWEPaintbrushPropertyManager::
RetrieveSketchColor( vtkKWEPaintbrushSketch *s, unsigned char c[3] )
{
ColorType color = SketchToColorMap[s];
c[0] = static_cast< unsigned char >(color.R * 255.0);
c[1] = static_cast< unsigned char >(color.G * 255.0);
c[2] = static_cast< unsigned char >(color.B * 255.0);
}
//----------------------------------------------------------------------------
vtkKWEPaintbrushProperty * vtkKWEPaintbrushPropertyManager::
RequestColorForSketch( vtkKWEPaintbrushSketch *s )
{
// If the user overrode with his own fancy color, let's not displease
// him by assigning our own.
if (!s->GetPaintbrushProperty()->HasUserSpecifiedColorOverride())
{
// Give this sketch a nice color and put it in our table.
this->SketchToColorMap[s] = ColorType( 1.0, 0.4, 0.4, 0 );
this->SketchToColorMap[s] = RequestColor();
double rgb[3] = { this->SketchToColorMap[s].R,
this->SketchToColorMap[s].G,
this->SketchToColorMap[s].B };
s->GetPaintbrushProperty()->SetColorInternal( rgb );
}
return s->GetPaintbrushProperty();
}
//----------------------------------------------------------------------------
void vtkKWEPaintbrushPropertyManager::Update()
{
// Remove unused sketches from our map.
for( SketchToColorMapType::iterator iter =
this->SketchToColorMap.begin(); iter !=
this->SketchToColorMap.end(); )
{
SketchToColorMapType::iterator iter2 = iter;
++iter;
if (this->PaintbrushDrawing->IsItemPresent(iter2->first) == 0)
{
// Remove this sketch from our color map. It no-longer exists.
// Maybe someone deleted it.
--(this->Colors[iter2->second]);
this->SketchToColorMap.erase(iter2);
}
}
// Now create new colors for the sketches that aren't in the map. These
// are most likely new sketches. Try to assign unique colors. If unique
// isn't possible, at least assign the least used colors.
for (int n = 0; n < this->PaintbrushDrawing->GetNumberOfItems(); n++)
{
vtkKWEPaintbrushSketch * s = this->PaintbrushDrawing->GetItem(n);
if (this->SketchToColorMap.find(s)
== this->SketchToColorMap.end())
{
// This sketch isn't in our table. Put it in the table and give it a
// nice color.
this->RequestColorForSketch(s);
}
// Propagate our Highlight type to all properties in the drawing.
s->GetPaintbrushProperty()->SetHighlightType(this->HighlightType);
}
this->UpdateLabelToColorMap();
}
//----------------------------------------------------------------------------
void vtkKWEPaintbrushPropertyManager::UpdateLabelToColorMap()
{
if (this->PaintbrushDrawing->GetRepresentation() == vtkKWEPaintbrushEnums::Label)
{
if (vtkKWEPaintbrushEnums::GetLabelType() == VTK_UNSIGNED_CHAR)
{
for (int n = 0; n < this->PaintbrushDrawing->GetNumberOfItems(); n++)
{
vtkKWEPaintbrushSketch * s = this->PaintbrushDrawing->GetItem(n);
s->GetPaintbrushProperty()->GetColor(
this->LabelToColorMapUC[s->GetLabel()]);
this->LabelToOpacityMapUC[s->GetLabel()]
= s->GetPaintbrushProperty()->GetVisibility() ?
s->GetPaintbrushProperty()->GetOpacity() : 0.0;
}
}
else
{
unsigned char rgb[3];
this->LabelToColorMap.clear();
for (int n = 0; n < this->PaintbrushDrawing->GetNumberOfItems(); n++)
{
vtkKWEPaintbrushSketch * s = this->PaintbrushDrawing->GetItem(n);
s->GetPaintbrushProperty()->GetColor(rgb);
double opacity = s->GetPaintbrushProperty()->GetVisibility() ?
s->GetPaintbrushProperty()->GetOpacity() : 0.0;
this->LabelToColorMap[s->GetLabel()]
= vtkKWEPaintbrushLabelMapColor(rgb, opacity);
}
}
}
}
//----------------------------------------------------------------------------
void vtkKWEPaintbrushPropertyManager
::GrabFocus( vtkKWEPaintbrushSketch *sketch )
{
const int nSketches = this->PaintbrushDrawing->GetNumberOfItems();
for (int n = 0; n < nSketches; n++)
{
vtkKWEPaintbrushSketch * s = this->PaintbrushDrawing->GetItem(n);
s->GetPaintbrushProperty()->SetInteractionEnabled(s == sketch);
}
}
//----------------------------------------------------------------------------
void vtkKWEPaintbrushPropertyManager::ReleaseFocus()
{
const int nSketches = this->PaintbrushDrawing->GetNumberOfItems();
for (int n = 0; n < nSketches; n++)
{
vtkKWEPaintbrushSketch * s = this->PaintbrushDrawing->GetItem(n);
s->GetPaintbrushProperty()->SetInteractionEnabled(1);
}
}
//----------------------------------------------------------------------------
void vtkKWEPaintbrushPropertyManager::PrintSelf(ostream& os, vtkIndent indent)
{
this->Superclass::PrintSelf(os, indent);
}
|
/*
* Tell this Prisoner to break all grabs
*/
public void breakAllGrabs()
{
for (int i = 0; i < grabbedByArray.size; i++)
{
grabbedByArray.get(i).stopGrabbing(true);
i--;
}
} |
/**
* Test of all type casts in comparisons following mysql's casting policy.
*/
@Test
public void TestComparisonTypeCasts() throws AnalysisException {
List<ColumnType> types =
new ArrayList<ColumnType>(ColumnType.getFixedSizeNumericTypes());
types.add(ColumnType.NULL);
for (BinaryPredicate.Operator cmpOp : BinaryPredicate.Operator.values()) {
for (ColumnType type1 : types) {
for (ColumnType type2 : types) {
ColumnType compatibleType =
ColumnType.getAssignmentCompatibleType(type1, type2);
typeCastTest(type1, type2, false, null, cmpOp, compatibleType);
typeCastTest(type1, type2, true, null, cmpOp, compatibleType);
}
}
}
} |
<gh_stars>0
/*
* Copyright 2015-2020 The OpenZipkin Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package zipkin2.server.internal.health;
import zipkin2.Call;
import zipkin2.CheckResult;
import zipkin2.Component;
import zipkin2.internal.Nullable;
final class ComponentHealth {
static final String STATUS_UP = "UP", STATUS_DOWN = "DOWN";
static ComponentHealth ofComponent(Component component) {
Throwable t = null;
try {
CheckResult check = component.check();
if (!check.ok()) t = check.error();
} catch (Throwable unexpected) {
Call.propagateIfFatal(unexpected);
t = unexpected;
}
if (t == null) return new ComponentHealth(component.toString(), STATUS_UP, null);
String message = t.getMessage();
String error = t.getClass().getName() + (message != null ? ": " + message : "");
return new ComponentHealth(component.toString(), STATUS_DOWN, error);
}
final String name;
final String status;
@Nullable final String error;
ComponentHealth(String name, String status, String error) {
this.name = name;
this.status = status;
this.error = error;
}
}
|
import clauseFromAlts from '../utils/clauseFromAlts';
import isProblem from '../utils/isProblem';
import Problem from '../models/Problem';
export default function mapOfWalker( clause, walkFn ) {
var { keyExpression, valExpression } = clause.opts;
var keyClause = keyExpression && clauseFromAlts( keyExpression );
var valClause = valExpression && clauseFromAlts( valExpression );
return {
trailblaze: mapOfTrailblaze,
reconstruct: mapOfReconstruct,
};
function mapOfTrailblaze( x, walkOpts ) {
var guide = {};
for ( let key in x ) {
if ( x.hasOwnProperty( key ) ) {
let keyR = walkFn( keyClause, key, walkOpts );
if ( isProblem( keyR ) ) {
return new Problem( x, clause, { [ key ]: keyR }, `mapOf: key ${key} failed validation` );
}
let valR = walkFn( valClause, x[ key ], walkOpts );
if ( isProblem( valR ) ) {
return new Problem( x, clause, { [ key ]: valR }, `mapOf: value for key ${key} failed validation` );
}
guide[ key ] = {
expr: valClause,
valGuide: valR,
};
}
}
return guide;
}
function mapOfReconstruct( guide, walkOpts ) {
const r = {};
for ( let key in guide ) {
let { expr, valGuide } = guide[ key ];
r[ key ] = walkFn( expr, valGuide, walkOpts );
}
return r;
}
}
|
The major histocompatibility complex class II promoter-binding protein RFX (NF-X) is a methylated DNA-binding protein
A mammalian protein called RFX or NF-X binds to the X box (or X1 box) in the promoters of a number of major histocompatibility (MHC) class II genes. In this study, RFX was shown to have the same DNA-binding specificity as methylated DNA-binding protein (MDBP), and its own cDNA was found to contain a binding site for MDBP in the leader region. MDBP is a ubiquitous mammalian protein that binds to certain DNA sequences preferentially when they are CpG methylated and to other related sequences, like the X box, irrespective of DNA methylation. MDBP from HeLa and Raji cells formed DNA-protein complexes with X-box oligonucleotides that coelectrophoresed with those containing standard MDBP sites. Furthermore, MDBP and X-box oligonucleotides cross-competed for the formation of these DNA-protein complexes. DNA-protein complexes obtained with MDBP sites displayed the same partial supershifting with an antiserum directed to the N terminus of RFX seen for complexes containing an X-box oligonucleotide. Also, the in vitro-transcribed-translated product of a recombinant RFX cDNA bound specifically to MDBP ligands and displayed the DNA methylation-dependent binding of MDBP. RFX therefore contains MDBP activity and thereby also EF-C, EP, and MIF activities that are indistinguishable from MDBP and that bind to methylation-independent sites in the transcriptional enhancers of polyomavirus and hepatitis B virus and to an intron of c-myc. |
def add_arc(self, src, dst, char):
assert type(src) == type(int()) and type(dst) == type(int()), \
"State type should be integer."
while src >= len(self.states) or dst >= len(self.states):
self.add_state()
self.states[src].arcs.append(SFAArc(src, dst, char)) |
def nodes_merge_unwind_array_props(labels, merge_properties, array_props, property_parameter=None):
if not property_parameter:
property_parameter = 'props'
on_create_array_props_list = []
for ap in array_props:
on_create_array_props_list.append(f"n.{ap} = [properties.{ap}]")
on_create_array_props_string = ', '.join(on_create_array_props_list)
on_match_array_props_list = []
for ap in array_props:
on_match_array_props_list.append(f"n.{ap} = n.{ap} + properties.{ap}")
on_match_array_props_string = ', '.join(on_match_array_props_list)
q = CypherQuery(f"UNWIND ${property_parameter} AS properties",
merge_clause(labels, merge_properties),
"ON CREATE SET n = apoc.map.removeKeys(properties, $append_props)",
f"ON CREATE SET {on_create_array_props_string}",
"ON MATCH SET n += apoc.map.removeKeys(properties, $append_props)",
f"ON MATCH SET {on_match_array_props_string}")
return q.query() |
Scanning electron microscopy of hepatic ultrastructure: secondary, backscattered, and transmitted electron imaging.
Several methods of tissue preparation and different modes of operation of the scanning electron microscope were used to study the ultrastructure of rat liver. Rat livers were perfusion fixed with buffered 2 per cent paraformaldehyde or a mixture of 1.5 per cent paraformaldehyde and 1 per cent glutaraldehyde and processed as follows. Tissue blocks were postfixed in buffered 2 per cent osmium tetroxide followed sequentially by the ligand-mediated osmium binding technique, dehydration and cryofracture in ethanol, and critical point drying. They were then examined without metal coating in the scanning electron microscope operating in the secondary electron and backscattered electron modes. Fifty-micrometer sections were cut with a tissue sectioner, stained with lead citrate, postfixed with osmium, dehydrated, critical point dried, and examined in the secondary electron and back-scattered electron modes. Frozen sections (0.25 to 0.75 mum. thick) were cut by the method of Tokuyasu (Toluyasu KT: J Cell Biol 57:551, 1973) and their scanning transmission electron microscope images were examined either with a scanning transmission electron microscope detector or with a conversion stub using the secondary electron detector. Secondary electron images of the liver prepared by ligand-mediated osmium binding and subsequent cryofracture revealed such intracellular structures as cisternae of the endoplasmic reticulum, lysosomes, mitochondria, lipid droplets, nucleolus and nuclear chromatin, as well as the usual surface morphology, Lipocytes in the perisinusoidal space were readily identified. Backscattered electron images. Unembedded frozen sections had little drying artifact and were virtually free of freezing damage. The scanning transmission electron microscope image revealed those organelles visualized by the secondary electron mode in the ligand-mediated osmium binding-treated tissue. |
s=raw_input()
x=y=0
if ord(s[0])>95:
for i in range(len(s)):
if ord(s[i])<95:
x+=1
if x==len(s)-1:
print s.swapcase()
else:
print s
elif ord(s[0])<95:
for i in range(len(s)):
if ord(s[i])<95:
y+=1
if y==len(s):
print s.swapcase()
else:
print s
|
/**
* Provides a test double of {@link DocumentsProvider}.
*/
public class TestDocumentsProvider extends DocumentsProvider {
public static final String AUTHORITY = "android.provider.TestDocumentsProvider";
public Path nextPath;
public boolean nextIsChildDocument;
public String lastDocumentId;
public String lastParentDocumentId;
@Override
public void attachInfoForTesting(Context context, ProviderInfo info) {
context = new TestContext(context);
super.attachInfoForTesting(context, info);
}
@Override
public boolean onCreate() {
return true;
}
@Override
public Cursor queryRoots(String[] projection) throws FileNotFoundException {
return null;
}
@Override
public Cursor queryDocument(String documentId, String[] projection)
throws FileNotFoundException {
return null;
}
@Override
public Cursor queryChildDocuments(String parentDocumentId, String[] projection,
String sortOrder) throws FileNotFoundException {
return null;
}
@Override
public ParcelFileDescriptor openDocument(String documentId, String mode,
CancellationSignal signal) throws FileNotFoundException {
return null;
}
@Override
public boolean isChildDocument(String parentDocumentId, String documentId) {
return nextIsChildDocument;
}
@Override
public Path findDocumentPath(@Nullable String parentDocumentId, String documentId) {
lastDocumentId = documentId;
lastParentDocumentId = parentDocumentId;
return nextPath;
}
@Override
protected int enforceReadPermissionInner(Uri uri, String callingPkg, IBinder callerToken) {
return AppOpsManager.MODE_ALLOWED;
}
@Override
protected int enforceWritePermissionInner(Uri uri, String callingPkg, IBinder callerToken) {
return AppOpsManager.MODE_ALLOWED;
}
private static class TestContext extends ContextWrapper {
private TestContext(Context context) {
super(context);
}
@Override
public void enforceCallingPermission(String permission, String message) {
// Always granted
}
@Override
public Object getSystemService(String name) {
if (Context.APP_OPS_SERVICE.equals(name)) {
return Mockito.mock(AppOpsManager.class);
}
return super.getSystemService(name);
}
}
} |
//
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by <NAME>.
//
#import "NSObject.h"
#import "WCTTableCoding.h"
@class NSString;
@interface DBFavoritesItem : NSObject <WCTTableCoding>
{
_Bool isAutoIncrement;
unsigned int _localId;
unsigned int _favId;
unsigned int _type;
unsigned int _time;
unsigned int _status;
unsigned int _updateSeq;
unsigned int _localUpdateSeq;
unsigned int _sourceType;
unsigned int _localStatus;
unsigned int _dataTotalSize;
unsigned int _IntRes2;
unsigned int _IntRes3;
long long lastInsertedRowID;
NSString *_fromUser;
NSString *_toUser;
NSString *_realChatName;
NSString *_sourceId;
NSString *_xml;
NSString *_StrRes1;
NSString *_StrRes2;
NSString *_StrRes3;
}
+ (const struct WCTProperty *)IntRes2;
+ (const struct WCTProperty *)IntRes3;
+ (const struct WCTProperty *)StrRes1;
+ (const struct WCTProperty *)StrRes2;
+ (const struct WCTProperty *)StrRes3;
+ (void)__wcdb_column_constraint_20:(struct WCTBinding *)arg1;
+ (void)__wcdb_column_constraint_29:(struct WCTBinding *)arg1;
+ (void)__wcdb_column_constraint_30:(struct WCTBinding *)arg1;
+ (void)__wcdb_column_constraint_31:(struct WCTBinding *)arg1;
+ (void)__wcdb_column_constraint_32:(struct WCTBinding *)arg1;
+ (void)__wcdb_column_constraint_33:(struct WCTBinding *)arg1;
+ (void)__wcdb_column_constraint_34:(struct WCTBinding *)arg1;
+ (void)__wcdb_column_constraint_35:(struct WCTBinding *)arg1;
+ (void)__wcdb_column_constraint_36:(struct WCTBinding *)arg1;
+ (void)__wcdb_column_constraint_37:(struct WCTBinding *)arg1;
+ (void)__wcdb_column_constraint_38:(struct WCTBinding *)arg1;
+ (void)__wcdb_column_constraint_39:(struct WCTBinding *)arg1;
+ (void)__wcdb_index_21:(struct WCTBinding *)arg1;
+ (void)__wcdb_index_22:(struct WCTBinding *)arg1;
+ (void)__wcdb_index_23:(struct WCTBinding *)arg1;
+ (void)__wcdb_index_24:(struct WCTBinding *)arg1;
+ (void)__wcdb_index_25:(struct WCTBinding *)arg1;
+ (void)__wcdb_index_26:(struct WCTBinding *)arg1;
+ (void)__wcdb_index_27:(struct WCTBinding *)arg1;
+ (void)__wcdb_index_28:(struct WCTBinding *)arg1;
+ (const struct WCTProperty *)__wcdb_synthesize_0:(struct WCTBinding *)arg1;
+ (const struct WCTProperty *)__wcdb_synthesize_10:(struct WCTBinding *)arg1;
+ (const struct WCTProperty *)__wcdb_synthesize_11:(struct WCTBinding *)arg1;
+ (const struct WCTProperty *)__wcdb_synthesize_12:(struct WCTBinding *)arg1;
+ (const struct WCTProperty *)__wcdb_synthesize_13:(struct WCTBinding *)arg1;
+ (const struct WCTProperty *)__wcdb_synthesize_14:(struct WCTBinding *)arg1;
+ (const struct WCTProperty *)__wcdb_synthesize_15:(struct WCTBinding *)arg1;
+ (const struct WCTProperty *)__wcdb_synthesize_16:(struct WCTBinding *)arg1;
+ (const struct WCTProperty *)__wcdb_synthesize_17:(struct WCTBinding *)arg1;
+ (const struct WCTProperty *)__wcdb_synthesize_18:(struct WCTBinding *)arg1;
+ (const struct WCTProperty *)__wcdb_synthesize_19:(struct WCTBinding *)arg1;
+ (const struct WCTProperty *)__wcdb_synthesize_1:(struct WCTBinding *)arg1;
+ (const struct WCTProperty *)__wcdb_synthesize_2:(struct WCTBinding *)arg1;
+ (const struct WCTProperty *)__wcdb_synthesize_3:(struct WCTBinding *)arg1;
+ (const struct WCTProperty *)__wcdb_synthesize_4:(struct WCTBinding *)arg1;
+ (const struct WCTProperty *)__wcdb_synthesize_5:(struct WCTBinding *)arg1;
+ (const struct WCTProperty *)__wcdb_synthesize_6:(struct WCTBinding *)arg1;
+ (const struct WCTProperty *)__wcdb_synthesize_7:(struct WCTBinding *)arg1;
+ (const struct WCTProperty *)__wcdb_synthesize_8:(struct WCTBinding *)arg1;
+ (const struct WCTProperty *)__wcdb_synthesize_9:(struct WCTBinding *)arg1;
+ (id)__wcdbtyper;
+ (const SyntaxList_7f15fe8c *)allProperties;
+ (const struct WCTProperty *)dataTotalSize;
+ (const struct WCTProperty *)favId;
+ (const struct WCTProperty *)fromUser;
+ (const struct WCTProperty *)localId;
+ (const struct WCTProperty *)localStatus;
+ (const struct WCTProperty *)localUpdateSeq;
+ (const struct WCTBinding *)objectRelationalMapping;
+ (const struct WCTProperty *)realChatName;
+ (const struct WCTProperty *)sourceId;
+ (const struct WCTProperty *)sourceType;
+ (const struct WCTProperty *)status;
+ (const struct WCTProperty *)time;
+ (const struct WCTProperty *)toUser;
+ (const struct WCTProperty *)type;
+ (const struct WCTProperty *)updateSeq;
+ (const struct WCTProperty *)xml;
- (void).cxx_destruct;
@property(nonatomic) unsigned int IntRes2; // @synthesize IntRes2=_IntRes2;
@property(nonatomic) unsigned int IntRes3; // @synthesize IntRes3=_IntRes3;
@property(retain, nonatomic) NSString *StrRes1; // @synthesize StrRes1=_StrRes1;
@property(retain, nonatomic) NSString *StrRes2; // @synthesize StrRes2=_StrRes2;
@property(retain, nonatomic) NSString *StrRes3; // @synthesize StrRes3=_StrRes3;
@property(nonatomic) unsigned int dataTotalSize; // @synthesize dataTotalSize=_dataTotalSize;
@property(nonatomic) unsigned int favId; // @synthesize favId=_favId;
@property(retain, nonatomic) NSString *fromUser; // @synthesize fromUser=_fromUser;
@property(nonatomic) _Bool isAutoIncrement; // @synthesize isAutoIncrement;
@property(nonatomic) long long lastInsertedRowID; // @synthesize lastInsertedRowID;
@property(nonatomic) unsigned int localId; // @synthesize localId=_localId;
@property(nonatomic) unsigned int localStatus; // @synthesize localStatus=_localStatus;
@property(nonatomic) unsigned int localUpdateSeq; // @synthesize localUpdateSeq=_localUpdateSeq;
@property(retain, nonatomic) NSString *realChatName; // @synthesize realChatName=_realChatName;
@property(retain, nonatomic) NSString *sourceId; // @synthesize sourceId=_sourceId;
@property(nonatomic) unsigned int sourceType; // @synthesize sourceType=_sourceType;
@property(nonatomic) unsigned int status; // @synthesize status=_status;
@property(nonatomic) unsigned int time; // @synthesize time=_time;
@property(retain, nonatomic) NSString *toUser; // @synthesize toUser=_toUser;
@property(nonatomic) unsigned int type; // @synthesize type=_type;
@property(nonatomic) unsigned int updateSeq; // @synthesize updateSeq=_updateSeq;
@property(retain, nonatomic) NSString *xml; // @synthesize xml=_xml;
@end
|
def is_allowed (self, user = None) :
return common.user_has_role (self.db, self.uid, 'HR', 'HR-Org-Location') |
import {Camera} from "./Camera";
export class Display extends Camera {
getDisplayName(): string {
return this.displayName ? this.displayName + ' Display' : 'Unknown';
}
getResolutions(): [number, number, number][] {
return [[1280, 720, 15],[1920, 1080, 15],[1600, 1200, 15]];
}
}
|
def navigate_byid(
self,
fsource: str,
fid: str,
navigation: str,
source: str,
distance: int = 500,
) -> gpd.GeoDataFrame:
self._validate_fsource(fsource)
url = "/".join([self.base_url, "linked-data", fsource, fid, "navigation"])
valid_navigations = self._geturl(url)
if navigation not in valid_navigations.keys():
raise InvalidInputValue("navigation", list(valid_navigations.keys()))
url = valid_navigations[navigation]
r_json = self._geturl(url)
valid_sources = {s["source"].lower(): s["features"] for s in r_json}
if source not in valid_sources:
raise InvalidInputValue("source", list(valid_sources.keys()))
url = f"{valid_sources[source]}?distance={int(distance)}"
return geoutils.json2geodf(self._geturl(url), ALT_CRS, DEF_CRS) |
/**
* Send udp multicast packet to the given ipv4 or ipv6 address - this is the most low-level implementation
*/
int SpeedwireSocket::sendto(const void* const buff, const unsigned long size, const struct sockaddr& dest) const {
if (dest.sa_family == AF_INET) {
const struct sockaddr_in& destv4 = AddressConversion::toSockAddrIn(dest);
if ((ntohl(destv4.sin_addr.s_addr) >> 24) == 239) {
if (setsockopt(socket_fd, IPPROTO_IP, IP_MULTICAST_IF, (const char*)&socket_interface_v4, sizeof(socket_interface_v4)) < 0) {
perror("setsockopt IP_MULTICAST_IF failure");
return -1;
}
}
}
int nbytes = ::sendto(socket_fd, (char*)buff, size, 0, &dest, sizeof(dest));
if (nbytes < 0) {
#ifdef _WIN32
int error = WSAGetLastError();
if (error == WSAENETUNREACH) {
perror("sendto failure - a socket operation was attempted to an unreachable network");
}
else {
perror("sendto failure");
}
#else
perror("sendto failure");
#endif
}
return nbytes;
} |
/**
* Specifies how to extract the name from an annotation for use in determining the serialized name.
*
* @see com.google.gson.annotations.SerializedName
* @see ExtractSerializedName
*/
public abstract static class NameExtractor<A extends Annotation> implements Function<Annotation, String>,
Supplier<Predicate<Annotation>> {
protected final Class<A> annotationType;
protected final Predicate<Annotation> predicate;
protected NameExtractor(final Class<A> annotationType) {
this.annotationType = checkNotNull(annotationType, "annotationType");
this.predicate = new Predicate<Annotation>() {
public boolean apply(Annotation input) {
return input.getClass().equals(annotationType);
}
};
}
@SuppressWarnings("unchecked")
public Class<Annotation> annotationType() {
return (Class<Annotation>) annotationType;
}
@Override
public String apply(Annotation in) {
return extractName(annotationType.cast(in));
}
protected abstract String extractName(A cast);
@Override
public Predicate<Annotation> get() {
return predicate;
}
@Override
public String toString() {
return "nameExtractor(" + annotationType.getSimpleName() + ")";
}
@Override
public int hashCode() {
return annotationType.hashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null || getClass() != obj.getClass())
return false;
return annotationType.equals(NameExtractor.class.cast(obj).annotationType);
}
} |
package com.marvin.camerasurfaceview;
import android.content.Context;
import android.content.res.TypedArray;
import android.hardware.Camera;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.AttributeSet;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import java.io.IOException;
/**
* Created by hmw on 2017/12/20.
*/
public class CameraSurfaceView extends SurfaceView implements SurfaceHolder.Callback, Camera.PreviewCallback {
private Context mcontext;
private HandlerThread cameraThread;
private Handler handler;
private Camera camera;
private CameraUtil cameraUtil;
private PreviewFrameListener previewFrameListener;
private int CAMERA_WIDTH = 640;
private int CAMERA_HEIGHT = 320;
private int CAMERA_ID = 1;
private int cameraWidth;
private int cameraHeight;
private int cameraID;
public CameraSurfaceView(Context context) {
this(context,null);
}
public CameraSurfaceView(Context context, AttributeSet attrs) {
this(context, attrs,0);
}
public CameraSurfaceView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
mcontext = context;
initData(context,attrs);
getHolder().addCallback(this);
}
private void initData(Context context,AttributeSet attrs) {
TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.CameraSurfaceView);
cameraWidth = typedArray.getInt(R.styleable.CameraSurfaceView_cameraWidth,CAMERA_WIDTH);
cameraHeight = typedArray.getInt(R.styleable.CameraSurfaceView_cameraHeight,CAMERA_HEIGHT);
cameraID = typedArray.getInt(R.styleable.CameraSurfaceView_cameraID,CAMERA_ID);
typedArray.recycle();
}
@Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
//创建新的线程负责打开相机
cameraThread = new HandlerThread("camera");
cameraThread.start();
handler = new Handler(cameraThread.getLooper());
handler.post(new Runnable() {
@Override
public void run() {
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
if (camera==null){
camera = Camera.open(cameraID);
}
Camera.getCameraInfo(cameraID,cameraInfo);
try {
camera.setPreviewDisplay(getHolder());
} catch (IOException e) {
e.printStackTrace();
}
}
});
}
@Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int format, final int width, final int height) {
handler.post(new Runnable() {
@Override
public void run() {
cameraUtil = new CameraUtil(mcontext);
camera.stopPreview();
cameraUtil.configureCamera(camera,width,height,cameraWidth,cameraHeight);
if (cameraUtil.isScreenOriatationPortrait(mcontext)){
camera.setDisplayOrientation(90);
}else {
camera.setDisplayOrientation(0);
}
if (camera!=null){
camera.startPreview();
camera.setPreviewCallback(CameraSurfaceView.this);
}
}
});
}
@Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
handler.post(new Runnable() {
@Override
public void run() {
if (camera!=null){
camera.release();
}
}
});
}
@Override
public void onPreviewFrame(byte[] bytes, Camera camera) {
previewFrameListener.PreviewFrame(bytes,camera,camera.getParameters().getPreviewSize().width,camera.getParameters().getPreviewSize().height);
}
/**
* 设置预览数据回调
* @param previewFrameListener
*/
public void SetOnPreviewFrameListener(PreviewFrameListener previewFrameListener){
this.previewFrameListener = previewFrameListener;
}
public void onStart(){
if(camera!=null){
camera.startPreview();
}
}
public void onPause(){
if (camera!=null){
camera.stopPreview();
}
}
public void onDestroy(){
if (camera!=null){
camera.release();
}
}
}
|
def init(num_accts):
gmail_services = []
for i in num_accts:
print("Authenticating account: {}".format(i))
gmail_services.append(gmail_utils.GetService(index=i))
drive_service = drive_utils.GetSheetService()
return gmail_services, drive_service |
// Check if there is data in the receive buffer.
unsigned char USI_TWI_Data_In_Receive_Buffer(void)
{
unsigned char tmpRxTail;
tmpRxTail = TWI_RxTail;
return (TWI_RxHead - tmpRxTail) & TWI_RX_BUFFER_MASK;
} |
def is_valid_channel(self, channel: discord.TextChannel) -> bool:
log.trace(f"Checking if #{channel} qualifies for code block detection.")
return (
is_help_channel(channel)
or channel.id in self.channel_cooldowns
or channel.id in constants.CodeBlock.channel_whitelist
) |
<reponame>brownplt/ovid
module Data.InductiveGraph.Class
( GraphM (..)
, Vertex (..)
, expandedVertexShow
, graphToEdgeList
, verticesToMzSchemeReadable
) where
import Data.List (intersperse)
class Monad m => GraphM m n a e | m -> n, m -> a, m -> e where
newNode :: a -> m n
adjustNode :: a -> a -> m ()
updateNode :: n -> (a -> a) -> m ()
nodeValue :: n -> m a
newEdge :: e -> n -> n -> m ()
-- |'False' if the edge was not found
removeEdge :: Monad m => e -> n -> n -> m Bool
data Vertex a = Vertex
{ vxIx :: Int
, vxVal :: a
, vxTo :: [Vertex a] }
findVertices :: (a -> Bool) -> [Vertex a] -> [Vertex a]
findVertices f [] = []
findVertices f (x:xs)
| f (vxVal x) = x:(findVertices f xs)
| otherwise = findVertices f xs
instance Eq a => Eq (Vertex a) where
(Vertex n1 a1 _) == (Vertex n2 a2 _) = n1 == n2 && a1 == a2
-- | Non-terminating when the graph has cycles
expandedVertexShow :: Show a => Vertex a -> String
expandedVertexShow (Vertex n a vxs)
= "(Vertex " ++ show n ++ " " ++ show a ++
(concatMap expandedVertexShow vxs) ++ ")"
-- | 'vxs' must be a complete list of vertices.
graphToEdgeList :: [Vertex a] -> [(Int,Int)]
graphToEdgeList vxs = foldr mk [] vxs where
-- Adds edges from n to its neighbors and recurses on all reachable
-- vertices.
mk (Vertex n _ vxs') edges = foldr (newEdge n) edges vxs'
-- adds the edge (n,n') to edges if it doesn't exist
newEdge n (Vertex n' _ _) edges
| (n,n') `elem` edges = edges
| otherwise = (n,n'):edges
verticesToMzSchemeReadable :: Show a => [Vertex a] -> String
verticesToMzSchemeReadable vs =
"(letrec (" ++ concat bindings ++ ")\n (list" ++ concat ref ++ "))\n" where
(bindings,ref) = unzip (map print vs)
print (Vertex n a ws) =
("[node" ++ show n ++ " (cons " ++ show a ++
" (delay (list" ++ concatMap printEdge ws ++ ")))]\n"," node" ++ show n)
printEdge (Vertex n _ _) = " node" ++ show n
|
/**
* Make a class based off a {@link ru.swayfarer.swl2.z.dependencies.org.squiddev.luaj.api.LuaAPI} class
* If it already exists in the cache then use that
*
* @param rootClass The class to base it of
* @return The wrapper class
* @see #cache
*/
public Class<? extends T> makeClass(Class<?> rootClass) {
Class<? extends T> wrapper = cache.get(rootClass);
if (wrapper == null) {
wrapper = createClass(rootClass.getName() + settings.suffix, rootClass);
cache.put(rootClass, wrapper);
}
return wrapper;
} |
"After the battle, when night had fallen and the air was silent but for the soft moans of the wounded and dying, the Princess would take wing. All would cower, and rub their sun-discs for luck, to avoid the cold gaze of the Princess of Night. She would float over the battlefield, with a soft song escaping her lips, and ponies shuddered. For who could take such joy in the horror of war?In truth, only a few ponies ever followed her to the field. These rare ones, not afraid of the night, knew the truth. Her song was not in exultation, but in lament. A flower for the wounded, a song for those who had fallen, a tear for the final dreams of the dying..."- From the account of Commander Nightingale, the Nocturnal--------------------------------------------------------------------------------------------------------------------------------------Man it took some time to finish, but the end result was definitely worth it.For the Luna pic, I knew I had to go with something very different. For I adore the sombre, inward turned character that Luna has always felt like to me. And those emotions are very familiar to me as well. It was very important to get the feel right, hence the dark and sombre tones.I hope you guys like it, I had a lot of fun with it, the armour and the lighting always a challenge for me. But I am very proud of it. |
n = int(raw_input())
a = sorted(map(float, raw_input().split()))
p = max(a)
if p >= 0.5:
print '{:.12f}'.format(p)
else:
b = map(lambda x: 1-x, a)
best = 0
for i in xrange(n):
alla = reduce(lambda x, y: x * y, a[i:])
allb = reduce(lambda x, y: x * y, b[i:])
p = sum([a[k] * allb / b[k] for k in xrange(i, n)])
best = max(best, p)
print '{:.12f}'.format(best) |
// buildManifestResourceMeta returns resource meta for manifest. It tries to get the resource
// meta from the result object in ApplyResult struct. If the resource meta is incompleted, fall
// back to manifest template for the meta info.
func buildManifestResourceMeta(
index int,
manifest workapiv1.Manifest,
restMapper meta.RESTMapper) (resourceMeta workapiv1.ManifestResourceMeta, gvr schema.GroupVersionResource, err error) {
errs := []error{}
var object runtime.Object
switch {
case manifest.Object != nil:
object = manifest.Object
default:
unstructuredObj := &unstructured.Unstructured{}
if err = unstructuredObj.UnmarshalJSON(manifest.Raw); err != nil {
errs = append(errs, err)
return resourceMeta, gvr, utilerrors.NewAggregate(errs)
}
object = unstructuredObj
}
resourceMeta, gvr, err = buildResourceMeta(index, object, restMapper)
if err == nil {
return resourceMeta, gvr, nil
}
return resourceMeta, gvr, utilerrors.NewAggregate(errs)
} |
def decodeCltu(cltu):
cltuSize = len(cltu)
cltuBodySize = cltuSize - CLTU_START_SEQUENCE_SIZE - CLTU_TRAILER_SEQUENCE_SIZE
if cltuBodySize < 0:
return None
if cltuBodySize % UTIL.BCH.CODE_BLOCK_SIZE != 0:
return None
if cltu[:CLTU_START_SEQUENCE_SIZE] != array.array("B", CLTU_START_SEQUENCE):
return None
if cltu[-CLTU_TRAILER_SEQUENCE_SIZE:] != array.array("B", CLTU_TRAILER_SEQUENCE):
return None
nrCltuCodeBlocks = cltuBodySize // UTIL.BCH.CODE_BLOCK_SIZE
frameSize = nrCltuCodeBlocks * BCH_NETTO_SIZE
frame = array.array("B", [0] * frameSize)
frameIdx = 0
cltuIdx = CLTU_START_SEQUENCE_SIZE
codeBlkIdx = 0
while frameIdx < frameSize:
if codeBlkIdx == 0:
sreg = UTIL.BCH.encodeStart()
nextByte = cltu[cltuIdx]
frame[frameIdx] = nextByte
sreg = UTIL.BCH.encodeStep(sreg, nextByte)
frameIdx += 1
cltuIdx += 1
codeBlkIdx += 1
if codeBlkIdx >= BCH_NETTO_SIZE:
code = UTIL.BCH.encodeStop(sreg)
if cltu[cltuIdx] != code:
return None
cltuIdx += 1
codeBlkIdx = 0
return frame |
package graphql
import (
"strings"
"github.com/dpb587/boshua/releaseversion/datastore"
)
func BuildListQueryArgs(f datastore.FilterParams, l datastore.LimitParams) (string, string, map[string]interface{}) {
var queryFilter, queryVarsTypes []string
var queryVars = map[string]interface{}{}
if f.NameExpected {
queryFilter = append(queryFilter, "name: $qReleaseName")
queryVarsTypes = append(queryVarsTypes, "$qReleaseName: String!")
queryVars["qReleaseName"] = f.Name
}
if f.VersionExpected {
queryFilter = append(queryFilter, "version: $qReleaseVersion")
queryVarsTypes = append(queryVarsTypes, "$qReleaseVersion: String!")
queryVars["qReleaseVersion"] = f.Version
}
if f.ChecksumExpected {
queryFilter = append(queryFilter, "checksum: $qReleaseChecksum")
queryVarsTypes = append(queryVarsTypes, "$qReleaseChecksum: String!")
queryVars["qReleaseChecksum"] = f.Checksum
}
if f.URIExpected {
queryFilter = append(queryFilter, "uri: $qReleaseUri")
queryVarsTypes = append(queryVarsTypes, "$qReleaseUri: String!")
queryVars["qReleaseUri"] = f.URI
}
if f.LabelsExpected {
queryFilter = append(queryFilter, "labels: $qReleaseLabels")
queryVarsTypes = append(queryVarsTypes, "$qReleaseLabels: [String!]")
queryVars["qReleaseLabels"] = f.Labels
}
if l.LimitExpected {
queryFilter = append(queryFilter, "limitFirst: $qReleaseLimitFirst")
queryVarsTypes = append(queryVarsTypes, "$qReleaseLimitFirst: Float!")
queryVars["qReleaseLimitFirst"] = l.Limit
}
if l.OffsetExpected {
queryFilter = append(queryFilter, "limitOffset: $qReleaseLimitOffset")
queryVarsTypes = append(queryVarsTypes, "$qReleaseLimitOffset: Float!")
queryVars["qReleaseLimitOffset"] = l.Offset
}
if l.MinExpected {
queryFilter = append(queryFilter, "limitMin: $qReleaseLimitMin")
queryVarsTypes = append(queryVarsTypes, "$qReleaseLimitMin: Float!")
queryVars["qReleaseLimitMin"] = l.Min
}
if l.MaxExpected {
queryFilter = append(queryFilter, "limitMax: $qReleaseLimitMax")
queryVarsTypes = append(queryVarsTypes, "$qReleaseLimitMax: Float!")
queryVars["qReleaseLimitMax"] = l.Max
}
return strings.Join(queryFilter, ", "), strings.Join(queryVarsTypes, ", "), queryVars
}
|
<filename>references/WebRTC/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h"
#include <algorithm>
#include <cstdint>
#include "common_types.h"
#include "modules/audio_coding/codecs/ilbc/ilbc.h"
#include "rtc_base/checks.h"
#include "rtc_base/numerics/safe_conversions.h"
namespace webrtc {
namespace {
const int kSampleRateHz = 8000;
AudioEncoderIlbcConfig CreateConfig(const CodecInst& codec_inst) {
AudioEncoderIlbcConfig config;
config.frame_size_ms = codec_inst.pacsize / 8;
return config;
}
int GetIlbcBitrate(int ptime) {
switch (ptime) {
case 20:
case 40:
// 38 bytes per frame of 20 ms => 15200 bits/s.
return 15200;
case 30:
case 60:
// 50 bytes per frame of 30 ms => (approx) 13333 bits/s.
return 13333;
default:
FATAL();
}
}
} // namespace
AudioEncoderIlbcImpl::AudioEncoderIlbcImpl(const AudioEncoderIlbcConfig& config,
int payload_type)
: frame_size_ms_(config.frame_size_ms),
payload_type_(payload_type),
num_10ms_frames_per_packet_(
static_cast<size_t>(config.frame_size_ms / 10)),
encoder_(nullptr) {
RTC_CHECK(config.IsOk());
Reset();
}
AudioEncoderIlbcImpl::AudioEncoderIlbcImpl(const CodecInst& codec_inst)
: AudioEncoderIlbcImpl(CreateConfig(codec_inst), codec_inst.pltype) {}
AudioEncoderIlbcImpl::~AudioEncoderIlbcImpl() {
RTC_CHECK_EQ(0, WebRtcIlbcfix_EncoderFree(encoder_));
}
int AudioEncoderIlbcImpl::SampleRateHz() const {
return kSampleRateHz;
}
size_t AudioEncoderIlbcImpl::NumChannels() const {
return 1;
}
size_t AudioEncoderIlbcImpl::Num10MsFramesInNextPacket() const {
return num_10ms_frames_per_packet_;
}
size_t AudioEncoderIlbcImpl::Max10MsFramesInAPacket() const {
return num_10ms_frames_per_packet_;
}
int AudioEncoderIlbcImpl::GetTargetBitrate() const {
return GetIlbcBitrate(rtc::dchecked_cast<int>(num_10ms_frames_per_packet_) *
10);
}
AudioEncoder::EncodedInfo AudioEncoderIlbcImpl::EncodeImpl(
uint32_t rtp_timestamp,
rtc::ArrayView<const int16_t> audio,
rtc::Buffer* encoded) {
// Save timestamp if starting a new packet.
if (num_10ms_frames_buffered_ == 0)
first_timestamp_in_buffer_ = rtp_timestamp;
// Buffer input.
std::copy(audio.cbegin(), audio.cend(),
input_buffer_ + kSampleRateHz / 100 * num_10ms_frames_buffered_);
// If we don't yet have enough buffered input for a whole packet, we're done
// for now.
if (++num_10ms_frames_buffered_ < num_10ms_frames_per_packet_) {
return EncodedInfo();
}
// Encode buffered input.
RTC_DCHECK_EQ(num_10ms_frames_buffered_, num_10ms_frames_per_packet_);
num_10ms_frames_buffered_ = 0;
size_t encoded_bytes = encoded->AppendData(
RequiredOutputSizeBytes(), [&](rtc::ArrayView<uint8_t> encoded) {
const int r = WebRtcIlbcfix_Encode(
encoder_, input_buffer_,
kSampleRateHz / 100 * num_10ms_frames_per_packet_, encoded.data());
RTC_CHECK_GE(r, 0);
return static_cast<size_t>(r);
});
RTC_DCHECK_EQ(encoded_bytes, RequiredOutputSizeBytes());
EncodedInfo info;
info.encoded_bytes = encoded_bytes;
info.encoded_timestamp = first_timestamp_in_buffer_;
info.payload_type = payload_type_;
info.encoder_type = CodecType::kIlbc;
return info;
}
void AudioEncoderIlbcImpl::Reset() {
if (encoder_)
RTC_CHECK_EQ(0, WebRtcIlbcfix_EncoderFree(encoder_));
RTC_CHECK_EQ(0, WebRtcIlbcfix_EncoderCreate(&encoder_));
const int encoder_frame_size_ms =
frame_size_ms_ > 30 ? frame_size_ms_ / 2 : frame_size_ms_;
RTC_CHECK_EQ(0, WebRtcIlbcfix_EncoderInit(encoder_, encoder_frame_size_ms));
num_10ms_frames_buffered_ = 0;
}
size_t AudioEncoderIlbcImpl::RequiredOutputSizeBytes() const {
switch (num_10ms_frames_per_packet_) {
case 2:
return 38;
case 3:
return 50;
case 4:
return 2 * 38;
case 6:
return 2 * 50;
default:
FATAL();
}
}
} // namespace webrtc
|
def generate_feed_dict(graph: tf.Graph, node: Node):
all_constants = True
feed_dict = dict()
for in_data_node_name, edge_attrs in get_inputs(node.graph, node.id):
if 'control_flow_edge' in edge_attrs and edge_attrs['control_flow_edge']:
continue
value = node.in_node(edge_attrs['in']).value
if value is None:
all_constants = False
placeholder_pb = node['pbs'][edge_attrs['placeholder_name']]
value = np.ones(shape=tf_tensor_shape(placeholder_pb.attr['shape'].shape),
dtype=tf_dtype_extractor(placeholder_pb.attr['dtype'].type))
feed_dict[graph.get_tensor_by_name(edge_attrs['placeholder_name'] + ":0")] = value
return all_constants, feed_dict |
import { v4 as uuid } from "uuid";
import {
opponent,
Player,
ProjectileState,
WeaponType,
} from "../shared/protocol";
import { systemAttributes } from "../shared/systems";
import {
DeepWritable,
sendStateToWindow,
state,
updateUnusedEnergy,
} from "./state";
let intervalId: number | null = null;
export function runGameLoop() {
if (intervalId !== null) {
throw new Error("Game loop already running, this is a bug!");
}
intervalId = self.setInterval(tick, 100);
}
export function stopGameLoop() {
if (intervalId === null) {
throw new Error("Game loop not running, this is a bug!");
}
self.clearInterval(intervalId);
intervalId = null;
}
/**
* Progress the game time by one tick (100ms), in this order:
*
* 1. Activate systems (e.g. trigger the shield generator first so it can
* deflect a projectile that would destroy it.)
* 2. Process projectiles.
*/
export function tick() {
activateWeapon("player1", "weapon1");
activateWeapon("player1", "weapon2");
activateWeapon("player2", "weapon1");
activateWeapon("player2", "weapon2");
activateShieldGenerator("player1");
activateShieldGenerator("player2");
activateThrusters("player1");
activateThrusters("player2");
state.projectiles = state.projectiles.filter((p) => processProjectile(p));
const player1Lost = hasPlayerLost("player1");
const player2Lost = hasPlayerLost("player2");
if (player1Lost && player2Lost) {
state.phase = "draw";
} else if (player1Lost) {
state.phase = "player2-win";
} else if (player2Lost) {
state.phase = "player1-win";
}
if (state.phase !== "running") stopGameLoop();
sendStateToWindow();
}
function activateWeapon(player: Player, weapon: "weapon1" | "weapon2") {
const weaponState = state[player].ship[weapon];
const type = weaponState.type as WeaponType;
const weaponAttributes = systemAttributes[type];
if (
weaponState.hp === 0 ||
weaponState.energy === 0 ||
weaponState.target === null ||
state[opponent(player)].ship[weaponState.target].hp === 0
) {
weaponState.charge = 0;
return;
}
const requiredCharge =
weaponAttributes["RELOAD SPEED"] +
weaponState.energy * weaponAttributes["RLD. SP./ENERGY"];
if (weaponState.charge < requiredCharge) {
weaponState.charge++;
return;
}
state.projectiles.push({
id: uuid(),
type,
targetPlayer: opponent(player),
targetSystem: weaponState.target,
// Add 0.1 because the projectile will be processed in this tick.
timeToImpact: weaponAttributes["PROJECTILE SPEED"] + 1,
});
weaponState.charge = 0;
}
function activateShieldGenerator(player: Player) {
const shieldGeneratorState = state[player].ship.shieldGenerator;
const shieldGeneratorAttributes =
systemAttributes[
shieldGeneratorState.type as "SHIELD GENERATOR S" | "SHIELD GENERATOR L"
];
if (shieldGeneratorState.hp === 0 || shieldGeneratorState.energy === 0) {
shieldGeneratorState.charge = 0;
return;
}
const requiredCharge =
shieldGeneratorAttributes["RELOAD SPEED"] +
shieldGeneratorState.energy * shieldGeneratorAttributes["RLD. SP./ENERGY"];
if (shieldGeneratorState.charge < requiredCharge) {
shieldGeneratorState.charge++;
return;
}
state[player].ship.shieldHp = Math.min(
state[player].ship.shieldHp + shieldGeneratorAttributes["SHIELD REGEN"],
shieldGeneratorAttributes["MAX SHIELD HP"]
);
shieldGeneratorState.charge = 0;
}
function activateThrusters(player: Player) {
const { ship } = state[player];
const thrustersAttributes =
systemAttributes[ship.thrusters.type as "THRUSTERS S" | "THRUSTERS L"];
ship.dodge =
ship.thrusters.hp === 0 || ship.thrusters.energy === 0
? 0
: thrustersAttributes["DODGE/ENERGY"] * ship.thrusters.energy;
}
/**
* Returns `false` if this projectile is no longer needed and can be deleted.
*/
function processProjectile(projectile: DeepWritable<ProjectileState>): boolean {
projectile.timeToImpact--;
if (projectile.timeToImpact < 0) return false;
if (projectile.timeToImpact === 0) {
const projectileAttributes = systemAttributes[projectile.type];
const targetShip = state[projectile.targetPlayer].ship;
const targetSystem = targetShip[projectile.targetSystem];
if (Math.random() < targetShip.dodge) {
projectile.damageType = "miss";
} else if (targetShip.shieldHp > 0) {
projectile.damageType = "shield";
projectile.damangeDone = Math.min(
targetShip.shieldHp,
projectileAttributes["SHIELD DMG"]
);
targetShip.shieldHp -= projectile.damangeDone;
} else {
projectile.damageType = "hull";
projectile.damangeDone = Math.min(
targetSystem.hp,
projectileAttributes["HULL DMG"]
);
targetSystem.hp -= projectile.damangeDone;
if (targetSystem.hp === 0) {
targetSystem.energy = 0;
updateUnusedEnergy(projectile.targetPlayer);
const attackerShip = state[opponent(projectile.targetPlayer)].ship;
if (attackerShip.weapon1.target === projectile.targetSystem) {
attackerShip.weapon1.target = null;
}
if (attackerShip.weapon2.target === projectile.targetSystem) {
attackerShip.weapon2.target = null;
}
}
}
}
return true;
}
function hasPlayerLost(player: Player): boolean {
const { ship } = state[player];
return (
ship.weapon1.hp === 0 &&
ship.weapon2.hp === 0 &&
ship.shieldGenerator.hp === 0 &&
ship.thrusters.hp === 0
);
}
|
package org.uengine.processmanager;
import org.uengine.kernel.TransactionListener;
import org.uengine.util.dao.ConnectionFactory;
import java.util.List;
/**
* Created by uengine on 2018. 11. 16..
*/
public interface TransactionContext extends ConnectionFactory{
void addTransactionListener(TransactionListener tl);
List getTransactionListeners();
Object getSharedContext(String contextKey);
void setSharedContext(String contextKey, Object value);
void commit() throws Exception;
void rollback() throws Exception;
void releaseResources() throws Exception;
void addDebugInfo(String s);
}
|
Identification and management of nontraumatic splenic rupture.
A 43-year old previously fit and well gentleman presented to the emergency department (ED) with a two day history of worsening epigastric pain. He had had coryzal symptoms the preceding week but had no other past medical history. He was haemodynamically stable at presentation and an ultrasound scan (US) performed in the ED could not definitively rule out intra-abdominal fluid. In view of his tender abdomen on examination and a haemoglobin level of 9.2g/dL, a computerised tomography (CT) scan was performed and revealed extensive high-density fluid within the peritoneal cavity, raising the possibility of a concealed bleed but no obvious source was identified by the scan. |
<filename>src/Data/Parameterized/Context.hs<gh_stars>0
------------------------------------------------------------------------
-- |
-- Module : Data.Parameterized.Context
-- Copyright : (c) Galois, Inc 2014-16
-- Maintainer : <NAME> <<EMAIL>>
--
-- This module reexports either "Data.Parameterized.Context.Safe"
-- or "Data.Parameterized.Context.Unsafe" depending on the
-- the unsafe-operations compile-time flag.
--
-- It also defines some utility typeclasses for transforming
-- between curried and uncurried versions of functions over contexts.
------------------------------------------------------------------------
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE ViewPatterns #-}
module Data.Parameterized.Context
(
#ifdef UNSAFE_OPS
module Data.Parameterized.Context.Unsafe
#else
module Data.Parameterized.Context.Safe
#endif
, singleton
, toVector
, pattern (:>)
, pattern Empty
, decompose
, Data.Parameterized.Context.null
, Data.Parameterized.Context.init
, Data.Parameterized.Context.last
, Data.Parameterized.Context.view
, Data.Parameterized.Context.take
, forIndexM
, generateSome
, generateSomeM
, fromList
, traverseAndCollect
-- * Context extension and embedding utilities
, CtxEmbedding(..)
, ExtendContext(..)
, ExtendContext'(..)
, ApplyEmbedding(..)
, ApplyEmbedding'(..)
, identityEmbedding
, extendEmbeddingRightDiff
, extendEmbeddingRight
, extendEmbeddingBoth
, appendEmbedding
, ctxeSize
, ctxeAssignment
-- * Static indexing and lenses for assignments
, Idx
, field
, natIndex
, natIndexProxy
-- * Currying and uncurrying for assignments
, CurryAssignment
, CurryAssignmentClass(..)
-- * Size and Index values
, size1, size2, size3, size4, size5, size6
, i1of2, i2of2
, i1of3, i2of3, i3of3
, i1of4, i2of4, i3of4, i4of4
, i1of5, i2of5, i3of5, i4of5, i5of5
, i1of6, i2of6, i3of6, i4of6, i5of6, i6of6
) where
import Control.Applicative (liftA2)
import Control.Lens hiding (Index, (:>), Empty)
import qualified Data.Vector as V
import qualified Data.Vector.Mutable as MV
import GHC.TypeLits (Nat, type (-))
import Data.Monoid ((<>))
import Data.Parameterized.Classes
import Data.Parameterized.Some
import Data.Parameterized.TraversableFC
#ifdef UNSAFE_OPS
import Data.Parameterized.Context.Unsafe
#else
import Data.Parameterized.Context.Safe
#endif
-- | Create a single element context.
singleton :: f tp -> Assignment f (EmptyCtx ::> tp)
singleton = (empty :>)
-- |'forIndexM sz f' calls 'f' on indices '[0..sz-1]'.
forIndexM :: forall ctx m
. Applicative m
=> Size ctx
-> (forall tp . Index ctx tp -> m ())
-> m ()
forIndexM sz f = forIndexRange 0 sz (\i r -> f i *> r) (pure ())
-- | Generate an assignment with some context type that is not known.
generateSome :: forall f
. Int
-> (Int -> Some f)
-> Some (Assignment f)
generateSome n f = go n
where go :: Int -> Some (Assignment f)
go 0 = Some empty
go i = (\(Some a) (Some e) -> Some (a `extend` e)) (go (i-1)) (f (i-1))
-- | Generate an assignment with some context type that is not known.
generateSomeM :: forall m f
. Applicative m
=> Int
-> (Int -> m (Some f))
-> m (Some (Assignment f))
generateSomeM n f = go n
where go :: Int -> m (Some (Assignment f))
go 0 = pure (Some empty)
go i = (\(Some a) (Some e) -> Some (a `extend` e)) <$> go (i-1) <*> f (i-1)
-- | Convert the assignment to a vector.
toVector :: Assignment f tps -> (forall tp . f tp -> e) -> V.Vector e
toVector a f = V.create $ do
vm <- MV.new (sizeInt (size a))
forIndexM (size a) $ \i -> do
MV.write vm (indexVal i) (f (a ! i))
return vm
{-# INLINABLE toVector #-}
--------------------------------------------------------------------------------
-- Patterns
-- | Pattern synonym for the empty assignment
pattern Empty :: () => ctx ~ EmptyCtx => Assignment f ctx
pattern Empty <- (viewAssign -> AssignEmpty)
where Empty = empty
infixl :>
-- | Pattern synonym for extending an assignment on the right
pattern (:>) :: () => ctx' ~ (ctx ::> tp) => Assignment f ctx -> f tp -> Assignment f ctx'
pattern (:>) a v <- (viewAssign -> AssignExtend a v)
where a :> v = extend a v
-- The COMPLETE pragma was not defined until ghc 8.2.*
#if MIN_VERSION_base(4,10,0)
{-# COMPLETE (:>), Empty :: Assignment #-}
#endif
--------------------------------------------------------------------------------
-- Views
-- | Return true if assignment is empty.
null :: Assignment f ctx -> Bool
null a =
case viewAssign a of
AssignEmpty -> True
AssignExtend{} -> False
decompose :: Assignment f (ctx ::> tp) -> (Assignment f ctx, f tp)
decompose x = (Data.Parameterized.Context.init x, Data.Parameterized.Context.last x)
-- | Return assignment with all but the last block.
init :: Assignment f (ctx '::> tp) -> Assignment f ctx
init x =
case viewAssign x of
AssignExtend t _ -> t
-- | Return the last element in the assignment.
last :: Assignment f (ctx '::> tp) -> f tp
last x =
case viewAssign x of
AssignExtend _ e -> e
{-# DEPRECATED view "Use viewAssign or the Empty and :> patterns instead." #-}
-- | View an assignment as either empty or an assignment with one appended.
view :: forall f ctx . Assignment f ctx -> AssignView f ctx
view = viewAssign
take :: forall f ctx ctx'. Size ctx -> Size ctx' -> Assignment f (ctx <+> ctx') -> Assignment f ctx
take sz sz' asgn =
let diff = appendDiff sz' in
generate sz (\i -> asgn ! extendIndex' diff i)
--------------------------------------------------------------------------------
-- Context embedding.
-- | This datastructure contains a proof that the first context is
-- embeddable in the second. This is useful if we want to add extend
-- an existing term under a larger context.
data CtxEmbedding (ctx :: Ctx k) (ctx' :: Ctx k)
= CtxEmbedding { _ctxeSize :: Size ctx'
, _ctxeAssignment :: Assignment (Index ctx') ctx
}
-- Alternate encoding?
-- data CtxEmbedding ctx ctx' where
-- EIdentity :: CtxEmbedding ctx ctx
-- ExtendBoth :: CtxEmbedding ctx ctx' -> CtxEmbedding (ctx ::> tp) (ctx' ::> tp)
-- ExtendOne :: CtxEmbedding ctx ctx' -> CtxEmbedding ctx (ctx' ::> tp)
ctxeSize :: Simple Lens (CtxEmbedding ctx ctx') (Size ctx')
ctxeSize = lens _ctxeSize (\s v -> s { _ctxeSize = v })
ctxeAssignment :: Lens (CtxEmbedding ctx1 ctx') (CtxEmbedding ctx2 ctx')
(Assignment (Index ctx') ctx1) (Assignment (Index ctx') ctx2)
ctxeAssignment = lens _ctxeAssignment (\s v -> s { _ctxeAssignment = v })
class ApplyEmbedding (f :: Ctx k -> *) where
applyEmbedding :: CtxEmbedding ctx ctx' -> f ctx -> f ctx'
class ApplyEmbedding' (f :: Ctx k -> k' -> *) where
applyEmbedding' :: CtxEmbedding ctx ctx' -> f ctx v -> f ctx' v
class ExtendContext (f :: Ctx k -> *) where
extendContext :: Diff ctx ctx' -> f ctx -> f ctx'
class ExtendContext' (f :: Ctx k -> k' -> *) where
extendContext' :: Diff ctx ctx' -> f ctx v -> f ctx' v
instance ApplyEmbedding' Index where
applyEmbedding' ctxe idx = (ctxe ^. ctxeAssignment) ! idx
instance ExtendContext' Index where
extendContext' = extendIndex'
-- -- This is the inefficient way of doing things. A better way is to
-- -- just have a map between indices.
-- applyEmbedding :: CtxEmbedding ctx ctx'
-- -> Index ctx tp -> Index ctx' tp
-- applyEmbedding ctxe idx = (ctxe ^. ctxeAssignment) ! idx
identityEmbedding :: Size ctx -> CtxEmbedding ctx ctx
identityEmbedding sz = CtxEmbedding sz (generate sz id)
-- emptyEmbedding :: CtxEmbedding EmptyCtx EmptyCtx
-- emptyEmbedding = identityEmbedding knownSize
extendEmbeddingRightDiff :: forall ctx ctx' ctx''.
Diff ctx' ctx''
-> CtxEmbedding ctx ctx'
-> CtxEmbedding ctx ctx''
extendEmbeddingRightDiff diff (CtxEmbedding sz' assgn) = CtxEmbedding (extSize sz' diff) updated
where
updated :: Assignment (Index ctx'') ctx
updated = fmapFC (extendIndex' diff) assgn
extendEmbeddingRight :: CtxEmbedding ctx ctx' -> CtxEmbedding ctx (ctx' ::> tp)
extendEmbeddingRight = extendEmbeddingRightDiff knownDiff
appendEmbedding :: Size ctx -> Size ctx' -> CtxEmbedding ctx (ctx <+> ctx')
appendEmbedding sz sz' = CtxEmbedding (addSize sz sz') (generate sz (extendIndex' diff))
where
diff = appendDiff sz'
extendEmbeddingBoth :: forall ctx ctx' tp. CtxEmbedding ctx ctx' -> CtxEmbedding (ctx ::> tp) (ctx' ::> tp)
extendEmbeddingBoth ctxe = updated & ctxeAssignment %~ flip extend (nextIndex (ctxe ^. ctxeSize))
where
updated :: CtxEmbedding ctx (ctx' ::> tp)
updated = extendEmbeddingRight ctxe
--------------------------------------------------------------------------------
-- Static indexing based on type-level naturals
-- | Get a lens for an position in an 'Assignment' by zero-based, left-to-right position.
-- The position must be specified using @TypeApplications@ for the @n@ parameter.
field :: forall n ctx f r. Idx n ctx r => Lens' (Assignment f ctx) (f r)
field = ixF' (natIndex @n)
-- | Constraint synonym used for getting an 'Index' into a 'Ctx'.
-- @n@ is the zero-based, left-counted index into the list of types
-- @ctx@ which has the type @r@.
type Idx n ctx r = (ValidIx n ctx, Idx' (FromLeft ctx n) ctx r)
-- | Compute an 'Index' value for a particular position in a 'Ctx'. The
-- @TypeApplications@ extension will be needed to disambiguate the choice
-- of the type @n@.
natIndex :: forall n ctx r. Idx n ctx r => Index ctx r
natIndex = natIndex' @_ @(FromLeft ctx n)
-- | This version of 'natIndex' is suitable for use without the @TypeApplications@
-- extension.
natIndexProxy :: forall n ctx r proxy. Idx n ctx r => proxy n -> Index ctx r
natIndexProxy _ = natIndex @n
------------------------------------------------------------------------
-- Implementation
------------------------------------------------------------------------
-- | Class for computing 'Index' values for positions in a 'Ctx'.
class KnownContext ctx => Idx' (n :: Nat) (ctx :: Ctx k) (r :: k) | n ctx -> r where
natIndex' :: Index ctx r
-- | Base-case
instance KnownContext xs => Idx' 0 (xs '::> x) x where
natIndex' = lastIndex knownSize
-- | Inductive-step
instance {-# Overlaps #-} (KnownContext xs, Idx' (n-1) xs r) =>
Idx' n (xs '::> x) r where
natIndex' = skipIndex (natIndex' @_ @(n-1))
--------------------------------------------------------------------------------
-- CurryAssignment
-- | This type family is used to define currying\/uncurrying operations
-- on assignments. It is best understood by seeing its evaluation on
-- several examples:
--
-- > CurryAssignment EmptyCtx f x = x
-- > CurryAssignment (EmptyCtx ::> a) f x = f a -> x
-- > CurryAssignment (EmptyCtx ::> a ::> b) f x = f a -> f b -> x
-- > CurryAssignment (EmptyCtx ::> a ::> b ::> c) f x = f a -> f b -> f c -> x
type family CurryAssignment (ctx :: Ctx k) (f :: k -> *) (x :: *) :: * where
CurryAssignment EmptyCtx f x = x
CurryAssignment (ctx ::> a) f x = CurryAssignment ctx f (f a -> x)
-- | This class implements two methods that witness the isomorphism between
-- curried and uncurried functions.
class CurryAssignmentClass (ctx :: Ctx k) where
-- | Transform a function that accepts an assignment into one with a separate
-- variable for each element of the assignment.
curryAssignment :: (Assignment f ctx -> x) -> CurryAssignment ctx f x
-- | Transform a curried function into one that accepts an assignment value.
uncurryAssignment :: CurryAssignment ctx f x -> (Assignment f ctx -> x)
instance CurryAssignmentClass EmptyCtx where
curryAssignment k = k empty
uncurryAssignment k _ = k
instance CurryAssignmentClass ctx => CurryAssignmentClass (ctx ::> a) where
curryAssignment k = curryAssignment (\asgn a -> k (asgn :> a))
uncurryAssignment k asgn =
case viewAssign asgn of
AssignExtend asgn' x -> uncurryAssignment k asgn' x
-- | Create an assignment from a list of values.
fromList :: [Some f] -> Some (Assignment f)
fromList = go empty
where go :: Assignment f ctx -> [Some f] -> Some (Assignment f)
go prev [] = Some prev
go prev (Some g:next) = (go $! prev `extend` g) next
newtype Collector m w a = Collector { runCollector :: m w }
instance Functor (Collector m w) where
fmap _ (Collector x) = Collector x
instance (Applicative m, Monoid w) => Applicative (Collector m w) where
pure _ = Collector (pure mempty)
Collector x <*> Collector y = Collector (liftA2 (<>) x y)
-- | Visit each of the elements in an @Assignment@ in order
-- from left to right and collect the results using the provided @Monoid@.
traverseAndCollect ::
(Monoid w, Applicative m) =>
(forall tp. Index ctx tp -> f tp -> m w) ->
Assignment f ctx ->
m w
traverseAndCollect f =
runCollector . traverseWithIndex (\i x -> Collector (f i x))
--------------------------------------------------------------------------------
-- Size and Index values
size1 :: Size (EmptyCtx ::> a)
size1 = incSize zeroSize
size2 :: Size (EmptyCtx ::> a ::> b)
size2 = incSize size1
size3 :: Size (EmptyCtx ::> a ::> b ::> c)
size3 = incSize size2
size4 :: Size (EmptyCtx ::> a ::> b ::> c ::> d)
size4 = incSize size3
size5 :: Size (EmptyCtx ::> a ::> b ::> c ::> d ::> e)
size5 = incSize size4
size6 :: Size (EmptyCtx ::> a ::> b ::> c ::> d ::> e ::> f)
size6 = incSize size5
i1of2 :: Index (EmptyCtx ::> a ::> b) a
i1of2 = skipIndex baseIndex
i2of2 :: Index (EmptyCtx ::> a ::> b) b
i2of2 = nextIndex size1
i1of3 :: Index (EmptyCtx ::> a ::> b ::> c) a
i1of3 = skipIndex i1of2
i2of3 :: Index (EmptyCtx ::> a ::> b ::> c) b
i2of3 = skipIndex i2of2
i3of3 :: Index (EmptyCtx ::> a ::> b ::> c) c
i3of3 = nextIndex size2
i1of4 :: Index (EmptyCtx ::> a ::> b ::> c ::> d) a
i1of4 = skipIndex i1of3
i2of4 :: Index (EmptyCtx ::> a ::> b ::> c ::> d) b
i2of4 = skipIndex i2of3
i3of4 :: Index (EmptyCtx ::> a ::> b ::> c ::> d) c
i3of4 = skipIndex i3of3
i4of4 :: Index (EmptyCtx ::> a ::> b ::> c ::> d) d
i4of4 = nextIndex size3
i1of5 :: Index (EmptyCtx ::> a ::> b ::> c ::> d ::> e) a
i1of5 = skipIndex i1of4
i2of5 :: Index (EmptyCtx ::> a ::> b ::> c ::> d ::> e) b
i2of5 = skipIndex i2of4
i3of5 :: Index (EmptyCtx ::> a ::> b ::> c ::> d ::> e) c
i3of5 = skipIndex i3of4
i4of5 :: Index (EmptyCtx ::> a ::> b ::> c ::> d ::> e) d
i4of5 = skipIndex i4of4
i5of5 :: Index (EmptyCtx ::> a ::> b ::> c ::> d ::> e) e
i5of5 = nextIndex size4
i1of6 :: Index (EmptyCtx ::> a ::> b ::> c ::> d ::> e ::> f) a
i1of6 = skipIndex i1of5
i2of6 :: Index (EmptyCtx ::> a ::> b ::> c ::> d ::> e ::> f) b
i2of6 = skipIndex i2of5
i3of6 :: Index (EmptyCtx ::> a ::> b ::> c ::> d ::> e ::> f) c
i3of6 = skipIndex i3of5
i4of6 :: Index (EmptyCtx ::> a ::> b ::> c ::> d ::> e ::> f) d
i4of6 = skipIndex i4of5
i5of6 :: Index (EmptyCtx ::> a ::> b ::> c ::> d ::> e ::> f) e
i5of6 = skipIndex i5of5
i6of6 :: Index (EmptyCtx ::> a ::> b ::> c ::> d ::> e ::> f) f
i6of6 = nextIndex size5
|
class SlipEchoClient:
"""Client for the SLIP echo server"""
def __init__(self, address):
self.sock = SlipSocket.create_connection(address)
def echo(self, msg):
"""Send message to the SLIP server and returns the response."""
self.sock.send_msg(msg)
return self.sock.recv_msg()
def close(self):
"""Close the SLIP socket"""
self.sock.close() |
/**
* End a session for all entity stores under purview of the current node store
*
* @param datasetId Dataset identifier. If there are data with this dataset id, a completed future with
* <code>true</code> is returned - This can happen in a multi node setup.
* @param sessionId Session id. The session id is validated before ending the session. If the session validation
* fails, then a a completed future with <code>false</code> is returned.
* @param writers Set of writer threads used for ingesting data. Because entity stores assumes single writers, it
* is important that the same thread which ingested data ends the session (there can be some data
* remaining in the memTable).
* @return list of {@link CompletableFuture<Boolean>} with a future for each entity store
*/
public List<CompletableFuture<Boolean>> endSession(String datasetId, long sessionId, WriterPool writers) {
if (!entityStoreMap.containsKey(datasetId)) {
CompletableFuture<Boolean> returnValue = new CompletableFuture<>();
returnValue.complete(true);
return Collections.singletonList(returnValue);
}
SessionValidator.SessionValidationResponse response = validateSession(datasetId, sessionId);
if (!response.valid) {
CompletableFuture<Boolean> returnValue = new CompletableFuture<>();
returnValue.complete(false);
return Collections.singletonList(returnValue);
}
return entityStoreMap.get(datasetId).keySet().stream().map(entityId -> CompletableFuture.supplyAsync(
() -> endEntityStoreSession(datasetId, entityId, response.userId, sessionId, response.sessionStartTS),
writers.getExecutor(entityId.hashCode()))).collect(Collectors.toList());
} |
/* Write the PCH file. This is called at the end of a compilation which
will produce a PCH file. */
void
c_common_write_pch (void)
{
timevar_push (TV_PCH_SAVE);
targetm.prepare_pch_save ();
(*debug_hooks->handle_pch) (1);
prepare_target_option_nodes_for_pch ();
cpp_write_pch_deps (parse_in, pch_outfile);
gt_pch_save (pch_outfile);
timevar_push (TV_PCH_CPP_SAVE);
cpp_write_pch_state (parse_in, pch_outfile);
timevar_pop (TV_PCH_CPP_SAVE);
if (fseek (pch_outfile, 0, SEEK_SET) != 0
|| fwrite (get_ident (), IDENT_LENGTH, 1, pch_outfile) != 1)
fatal_error (input_location, "can%'t write %s: %m", pch_file);
fclose (pch_outfile);
timevar_pop (TV_PCH_SAVE);
} |
<gh_stars>0
use super::*;
#[test]
// #[ignore]
fn symbol() {
print_str("abc", "abc");
}
#[test]
// #[ignore]
fn number() {
print_str("123", "123");
}
#[test]
// #[ignore]
fn unit() {
print_str(";", ";");
}
#[test]
#[ignore]
fn tpiq() {
// print_str("|: abc 123", ":< abc 123 >");
print_str("|# abc 123", "#< abc 123 >");
}
#[test]
// #[ignore]
fn nested_tpiq() {
print_str("|: |: cde |: abc 123 456", ":(:(cde :(abc 123)) 456)");
}
#[test]
// #[ignore]
fn list() {
print_str("[abc 123]", ":(abc :(123 ;))");
}
#[test]
// #[ignore]
fn empty_env() {
print_str("|% ; -1", "%(; -1)");
}
#[test]
// #[ignore]
fn resolve_piq() {
print_str("|@ abc ;", "@(abc ;)");
}
#[test]
// #[ignore]
fn block() {
print_str(
r"|> ; |! |\ |% ; ; ^> -1 [|# abc 123 |@ ; abc] ;",
r">(; !(\(%(; ;) ^>(-1 :(#(abc 123) :(@(; abc) ;)))) ;))"
);
}
#[test]
// #[ignore]
fn evaled_symbol_ast() {
print_evaled_str("abc", "abc");
}
#[test]
// #[ignore]
fn evaled_number_ast() {
print_evaled_str("123", "123");
}
#[test]
// #[ignore]
fn evaled_define_ast() {
// define symbol is number
print_evaled_str("|> ; |# abc 123", ";");
}
/*
#[test]
#[ignore]
fn evaled_apply() {
print_evaled_str(r"|> ; |! |\ |% ; ; 0 ;", r"0");
}
*/
#[test]
// #[ignore]
fn evaled_list() {
print_evaled_str(r"|> ; ^> -1 [1 2 3]", r"3");
}
#[test]
// #[ignore]
fn evaled_defining_list() {
// print_evaled_str(r"|> ; |# abc 123", r";");
// print_evaled_str(r"|> ; |@ ; abc", r";");
print_evaled_str(r"|> ; ^> -1 [|# abc 1234 |@ ; abc]", r"1234");
}
#[test]
// #[ignore]
fn access() {
// print_str("|. a p", ".(a p)");
// print_evaled_str("|> ; |. |: 1 3 p", "p")
print_evaled_str("|> ; |. |: 1 3 q", "3")
}
#[test]
// #[ignore]
fn condition() {
// print_str("|? abc 123", "?(abc 123)");
// print_str("^T", "^T");
// print_str("^F", "^F");
print_evaled_str("|> ; |? ^T |: 1 0", "1");
print_evaled_str("|> ; |? ^F |: 1 0", "0");
}
#[test]
// #[ignore]
fn exec_func() {
// print_str(r"|% ; ;", ";a")
// print_str(r"|> ; |! |\ |% ; ; 1 ;", ";a")
print_evaled_str(r"|> ; |! |\ |% ; [a b c] ^> -1 [|@ ; c |@ ; b] [6667 6668 6669]", "6668")
}
#[test]
#[ignore]
fn primitive_function() {
// print_evaled_str("|> ; |@ ; decr", "Prim(decr)");
// print_evaled_str("|> ; |@ ; ltoreq", "Prim(ltoreq)");
print_evaled_str("|> ; |! |> ; |@ ; decr [5]", "4");
print_evaled_str("|> ; |! |> ; |@ ; ltoreq [5 4]", "^F"); // 5 <= 4
// print_evaled_str("|> ; ^> -1 [|! |> ; |@ ; ltoreq [5 4] |! |> ; |@ ; decr [5]]", ";");
// print_evaled_str(r"|> ; ^> -1 [|! |> ; |@ ; ltoreq [5 4] |! |> ; |\ |% ; [x] |> ; ^> -1 [|@ ; x][5642]]", ";");
}
#[test]
#[ignore]
fn tarai() {
print_evaled_str(
r"|> ; ^> -1
[
|# tak |\ |% ; [x y z]
^> -1 [
|? |> ; |! |> ; |@ ; ltoreq [|> ; |@ ; x |> ; |@ ; y]
|: |> ; |@ ; y
|> ; |! |> ; |@ ; tak [
|> ; |! |> ; |@ ; tak [|> ; |! |> ; |@ ; decr [|> ; |@ ; x] |> ; |@ ; y |> ; |@ ; z]
|> ; |! |> ; |@ ; tak [|> ; |! |> ; |@ ; decr [|> ; |@ ; y] |> ; |@ ; z |> ; |@ ; x]
|> ; |! |> ; |@ ; tak [|> ; |! |> ; |@ ; decr [|> ; |@ ; z] |> ; |@ ; x |> ; |@ ; y]
]
]
|! |> ; |@ ; tak [2 1 0]
]",
r"2"
);
}
#[test]
#[ignore]
fn fib() {
print_evaled_str(
r"|> ; ^> -1
[
|# fib |\ |% ; [n]
^> -1 [
|? |> ; |! |> ; |@ ; eq [|> ; |@ ; n 0]
|: 0
|> ; |? |> ; |! |> ; |@ ; eq [|> ; |@ ; n 1]
|: 1
|> ; |! |> ; |@ ; plus [
|> ; |! |> ; |@ ; fib [|> ; |! |> ; |@ ; minus [|> ; |@ ; n 2]]
|> ; |! |> ; |@ ; fib [|> ; |! |> ; |@ ; minus [|> ; |@ ; n 1]]
]
]
|! |> ; |@ ; fib [30]
]",
r"832040"
);
}
#[test]
#[ignore]
fn prim() {
print_evaled_str(
r"|> ; ^> -1
[
|# x 1
|# y 2
|! |> ; |@ ; ltoreq [|> ; |@ ; x |> ; |@ ; y]
]",
r"^T"
);
}
#[test]
// #[ignore]
fn text() {
print_str(r#""o58nkry drtse""#, r#""o58nkry drtse""#);
}
#[test]
// #[ignore]
fn evaled_text() {
print_evaled_str(r#""o58nkry drtse""#, r#""o58nkry drtse""#);
}
#[test]
// #[ignore]
fn evaled_text_in_list() {
print_str(r#"["bbb" 0 "aaa"]"#, r#":("bbb" :(0 :("aaa" ;)))"#);
}
#[test]
fn access_in_list() {
print_evaled_str("'> |. |: 0 1 p", "0");
}
#[test]
fn single_quote() {
print_str("'> [a]", ">(; :(a ;))");
}
#[test]
fn resolve_syntax_sugar() {
print_str("@abc", ">(; @(; abc))");
}
#[test]
#[ignore]
fn apply_syntax_sugar() {
print_syntax_sugar("abc! [1 2]", "'> |! abc [1 2]");
}
#[test]
#[ignore]
fn resolve_and_apply_syntax_sugar() {
print_syntax_sugar("@abc! [1 3]", "'> |! '> '@ abc [1 3]");
}
#[test]
fn lpiq() {
print_syntax_sugar(r#"abc:"a""#, r#"|: abc "a""#);
}
#[test]
fn nested_lpiq() {
print_syntax_sugar(r#"abc:"a":1"#, r#"|: abc |: "a" 1"#);
}
#[test]
#[ignore]
fn accessor() {
print_syntax_sugar("a.b", "'> |. a b");
}
#[test]
#[ignore]
fn nested_accessor() {
print_syntax_sugar("a.b.c", " '> |. '> |. a b c");
}
#[test]
// #[ignore]
fn print() {
print_evaled_str(r#"@print! ["a"]"#, ";");
}
#[test]
fn compare_texts() {
print_evaled_str(r#"@eq! ["a" "b"]"#, "^F");
}
#[test]
fn concat_texts() {
print_evaled_str(r#"@concat! ["a", "b"]"#, r#""ab""#);
}
#[test]
fn evaled_list_syntax_sugar() {
// print_syntax_sugar("^[1 2 3]", "^> -1 [1 2 3]");
print_syntax_sugar("^[@plus! [1 2]]", "^> ; [@plus! [1 2]]");
}
#[test]
fn quote() {
print_evaled_str("|| a", "a");
}
|
<reponame>zhwchch/JOBridge
//
// JOClass.h
// JOBridge
//
// Created by Wei on 2018/9/12.
// Copyright © 2018年 Wei. All rights reserved.
//
#if __arm64__
#import <Foundation/Foundation.h>
#import <JavaScriptCore/JavaScriptCore.h>
#import "JODefs.h"
JOEXTERN JOINLINE JSValue *JOSearchJsMethod(Class class, NSString *selectorName);
JOEXTERN void JOClassParser(JSValue *className, JSValue *properties, JSValue *classMethods, JSValue *metaClassMethods);
#endif
|
def create_fitted_pulse_heights_file(pulse_height_dir, panel_info, norm_dir, mass_obj_func):
masses = panel_info['Mass'].values
fit_dir = os.path.join(norm_dir, 'curve_fits')
os.makedirs(fit_dir)
combine_run_metrics(run_dir=pulse_height_dir, substring='pulse_heights')
pulse_height_df = pd.read_csv(os.path.join(pulse_height_dir, 'pulse_heights_combined.csv'))
ordering = ns.natsorted((pulse_height_df['fov'].unique()))
pulse_height_df['fov'] = pd.Categorical(pulse_height_df['fov'],
ordered=True,
categories=ordering)
pulse_height_df = pulse_height_df.sort_values('fov')
for mass in masses:
mph_vals = pulse_height_df.loc[pulse_height_df['mass'] == mass, 'pulse_height'].values
fit_mass_mph_curve(mph_vals=mph_vals, mass=mass, save_dir=fit_dir,
obj_func=mass_obj_func)
pulse_height_df = create_fitted_mass_mph_vals(pulse_height_df=pulse_height_df,
obj_func_dir=fit_dir)
return pulse_height_df |
<filename>ReviewServiceFunction/src/main/java/com/igp/reviewservice/dao/ReviewServiceDaoImpl.java
package com.igp.reviewservice.dao;
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBDeleteExpression;
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapper;
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBSaveExpression;
import com.amazonaws.services.dynamodbv2.model.AttributeValue;
import com.amazonaws.services.dynamodbv2.model.ExpectedAttributeValue;
import com.igp.reviewservice.config.DynamoDBConfig;
import com.igp.reviewservice.dao.ReviewCrudDao;
import com.igp.reviewservice.model.Review;
import java.util.HashMap;
import java.util.Map;
public class ReviewServiceDaoImpl implements ReviewCrudDao {
private DynamoDBMapper dynamoDBMapper;
public ReviewServiceDaoImpl() {
this.dynamoDBMapper = DynamoDBConfig.dynamoDBMapper();
}
@Override
public Review createReview(Review review) {
dynamoDBMapper.save(review);
return review;
}
@Override
public Review readReview(String reviewId) {
return dynamoDBMapper.load(Review.class, reviewId);
}
@Override
public Review updateReview(Review review) {
Map<String, ExpectedAttributeValue> expectedAttributeValueMap = new HashMap<>();
expectedAttributeValueMap.put("reviewId", new ExpectedAttributeValue(new AttributeValue().withS(review.getReviewId())));
DynamoDBSaveExpression saveExpression = new DynamoDBSaveExpression().withExpected(expectedAttributeValueMap);
dynamoDBMapper.save(review, saveExpression);
return review;
}
@Override
public void deleteReview(String reviewId) {
Map<String, ExpectedAttributeValue> expectedAttributeValueMap = new HashMap<>();
expectedAttributeValueMap.put("reviewId", new ExpectedAttributeValue(new AttributeValue().withS(reviewId)));
DynamoDBDeleteExpression deleteExpression = new DynamoDBDeleteExpression().withExpected(expectedAttributeValueMap);
Review review = Review.builder()
.reviewId(reviewId)
.build();
dynamoDBMapper.delete(review, deleteExpression);
}
} |
#include <cstdio>
#include <cstring>
#include <queue>
#include <algorithm>
#include <iostream>
#include <cmath>
#include <map>
#include <vector>
#include <set>
#include <string>
#define PB push_back
#define FT first
#define SD second
#define MP make_pair
#define INF (0x3f3f3f3f)*2
using namespace std;
typedef long long LL;
typedef pair<int,int> P;
const int N=1e5,maxn=5+1e5,MOD=7+1e9;
int a[maxn],vis[maxn];
vector<P> num[maxn];
bool cmp(P a,P b){ return a.FT<b.FT; }
void deal(int x)
{
int now=x,cnt=0;
while(now<=N){
num[x].PB(MP(now,cnt));
now*=2,cnt++;
}
now=x,cnt=0;
while(now){
bool fg=(now&1);
cnt++,now/=2;
int tcnt=cnt,tmp=now;
if(fg&&tmp!=0){
while(tmp<=N){
num[x].PB(MP(tmp,tcnt));
tmp*=2,tcnt++;
}
}
else {
num[x].PB(MP(now,cnt));
}
}
sort(num[x].begin(), num[x].end(),cmp);
//for(int i=0;i<num[x].size();i++)
// printf("%d-->%d\n",num[x][i].FT,num[x][i].SD);
}
int main()
{
//freopen("E:/ACM_code/in.txt","r",stdin);
//freopen("out.txt","w",stdout);
int n;
scanf("%d",&n);
for(int i=1;i<=n;i++){
scanf("%d",&a[i]);
}
for(int i=1;i<=n;i++){
int x=a[i];
if(vis[x]) continue;
vis[x]=1;
deal(x);
}
int ans=INF;
int sz=num[a[1]].size();
for(int i=1;i<sz;i++){
int aim=num[a[1]][i].FT;
int tmp=num[a[1]][i].SD;
//cout<<aim<<" "<<tmp<<endl;
bool check=1;
for(int j=2;j<=n;j++){
int now=a[j];
bool fg=0;
int s=num[now].size();
for(int k=0;k<s;k++){
if(num[now][k].FT==aim){
fg=1;
tmp+=num[now][k].SD;
break;
}
else if(num[now][k].FT>aim){
break;
}
}
if(!fg){
check=0;
break;
}
}
if(check) ans=min(ans,tmp);
}
printf("%d\n",ans);
//system("pause");
return 0;
}
|
/**
* Creates, registers, and initializes the <b>Package</b> for this model, and for any others upon which it depends.
* Simple dependencies are satisfied by calling this method on all dependent packages before doing anything else.
* This method drives initialization for interdependent packages directly, in parallel with this package, itself.
* <p>
* Of this package and its interdependencies, all packages which have not yet been registered by their URI values
* are first created and registered. The packages are then initialized in two steps: meta-model objects for all of
* the packages are created before any are initialized, since one package's meta-model objects may refer to those of
* another.
* <p>
* Invocation of this method will not affect any packages that have already been initialized. <!-- begin-user-doc
* --> <!-- end-user-doc -->
*
* @see #eNS_URI
* @see #createPackageContents()
* @see #initializePackageContents()
* @generated
*/
public static EmittersPoolPackage init() {
if (isInited)
return (EmittersPoolPackage) EPackage.Registry.INSTANCE.getEPackage(EmittersPoolPackage.eNS_URI);
EmittersPoolPackageImpl theEmittersPoolPackage = (EmittersPoolPackageImpl) (EPackage.Registry.INSTANCE
.getEPackage(eNS_URI) instanceof EmittersPoolPackageImpl ? EPackage.Registry.INSTANCE
.getEPackage(eNS_URI) : new EmittersPoolPackageImpl());
isInited = true;
XMLTypePackage.eINSTANCE.eClass();
theEmittersPoolPackage.createPackageContents();
theEmittersPoolPackage.initializePackageContents();
theEmittersPoolPackage.freeze();
return theEmittersPoolPackage;
} |
#ifndef __LIST__
#define __LIST__
#include <stdint.h>
typedef uint8_t byte_t;
typedef struct list_node_t* list_node_ptr;
typedef struct list_t* list_ptr;
struct list_node_t {
list_node_ptr prev_;
list_node_ptr next_;
byte_t data_[];
} list_node_t;
struct list_t {
list_node_ptr head_;
list_node_ptr tail_;
size_t size_;
size_t data_size_;
int (*list_cmp_func_)(void*, void*);
void (*list_print_func_)(void*, FILE*);
void (*list_delete_func_)(void*);
} list_t;
/* Creates a new list given a type, compare and print functions */
#define list_create(t, cf, pf, df) __list_create(sizeof(t), cf, pf, df)
list_ptr __list_create(size_t, int (*)(void*, void*),
void (*)(void*, FILE*), void (*)(void*));
/* Deletes list */
void list_clear(list_ptr);
/* Deletes a given node */
void list_remove(list_ptr*, list_node_ptr);
/* Adds an element to the beginning */
void list_push_front(list_ptr*, void*);
/* Adds an element to the end */
void list_push_back(list_ptr*, void*);
/* Adds an element accordingly to the compare function */
void list_sorted_insert(list_ptr*, void*);
/* Removes and returns the first element */
list_node_ptr list_pop_front(list_ptr*);
/* Removes and returns the last element */
list_node_ptr list_pop_back(list_ptr*);
/* Access the first element */
list_node_ptr list_front(list_ptr);
/* Access the last element */
list_node_ptr list_back(list_ptr);
/* Access the n-th element */
list_node_ptr list_get(list_ptr, int);
/* Find the element equals a given one */
list_node_ptr list_find(list_ptr, void*);
/* Returns the size of the list */
size_t list_size(list_ptr);
/* Prints the list from the beginning to the end */
void list_print(list_ptr, FILE*);
/* Prints the list from the end to the beginning */
void list_print_reverse(list_ptr, FILE*);
#endif
|
/*
* Copyright 2018 JDCLOUD.COM
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http:#www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*
*
*
* Contact:
*
* NOTE: This class is auto generated by the jdcloud code generator program.
*/
package com.jdcloud.sdk.service.iotcloudgateway.model;
import com.jdcloud.sdk.annotation.Required;
/**
* instanceConfig
*/
public class InstanceConfig implements java.io.Serializable {
private static final long serialVersionUID = 1L;
/**
* 服务id
* Required:true
*/
@Required
private String serviceId;
/**
* 产品key
* Required:true
*/
@Required
private String productKey;
/**
* 产品secret
* Required:true
*/
@Required
private String productSecret;
/**
* 支持协议(jt808,gbt32960)
* Required:true
*/
@Required
private String protocol;
/**
* hub mqtt地址
* Required:true
*/
@Required
private String mqttAddr;
/**
* hub http地址
* Required:true
*/
@Required
private String httpAddr;
/**
* redis地址
* Required:true
*/
@Required
private String redisAddr;
/**
* 本地多媒体存放路径
* Required:true
*/
@Required
private String mediaStore;
/**
* 网关设备ID
* Required:true
*/
@Required
private String gwdevId;
/**
* 关联的OSS配置
*/
private OssSpec ossSpec;
/**
* 关联的DW认证配置
*/
private DwAuthSpec dwAuthSpec;
/**
* get 服务id
*
* @return
*/
public String getServiceId() {
return serviceId;
}
/**
* set 服务id
*
* @param serviceId
*/
public void setServiceId(String serviceId) {
this.serviceId = serviceId;
}
/**
* get 产品key
*
* @return
*/
public String getProductKey() {
return productKey;
}
/**
* set 产品key
*
* @param productKey
*/
public void setProductKey(String productKey) {
this.productKey = productKey;
}
/**
* get 产品secret
*
* @return
*/
public String getProductSecret() {
return productSecret;
}
/**
* set 产品secret
*
* @param productSecret
*/
public void setProductSecret(String productSecret) {
this.productSecret = productSecret;
}
/**
* get 支持协议(jt808,gbt32960)
*
* @return
*/
public String getProtocol() {
return protocol;
}
/**
* set 支持协议(jt808,gbt32960)
*
* @param protocol
*/
public void setProtocol(String protocol) {
this.protocol = protocol;
}
/**
* get hub mqtt地址
*
* @return
*/
public String getMqttAddr() {
return mqttAddr;
}
/**
* set hub mqtt地址
*
* @param mqttAddr
*/
public void setMqttAddr(String mqttAddr) {
this.mqttAddr = mqttAddr;
}
/**
* get hub http地址
*
* @return
*/
public String getHttpAddr() {
return httpAddr;
}
/**
* set hub http地址
*
* @param httpAddr
*/
public void setHttpAddr(String httpAddr) {
this.httpAddr = httpAddr;
}
/**
* get redis地址
*
* @return
*/
public String getRedisAddr() {
return redisAddr;
}
/**
* set redis地址
*
* @param redisAddr
*/
public void setRedisAddr(String redisAddr) {
this.redisAddr = redisAddr;
}
/**
* get 本地多媒体存放路径
*
* @return
*/
public String getMediaStore() {
return mediaStore;
}
/**
* set 本地多媒体存放路径
*
* @param mediaStore
*/
public void setMediaStore(String mediaStore) {
this.mediaStore = mediaStore;
}
/**
* get 网关设备ID
*
* @return
*/
public String getGwdevId() {
return gwdevId;
}
/**
* set 网关设备ID
*
* @param gwdevId
*/
public void setGwdevId(String gwdevId) {
this.gwdevId = gwdevId;
}
/**
* get 关联的OSS配置
*
* @return
*/
public OssSpec getOssSpec() {
return ossSpec;
}
/**
* set 关联的OSS配置
*
* @param ossSpec
*/
public void setOssSpec(OssSpec ossSpec) {
this.ossSpec = ossSpec;
}
/**
* get 关联的DW认证配置
*
* @return
*/
public DwAuthSpec getDwAuthSpec() {
return dwAuthSpec;
}
/**
* set 关联的DW认证配置
*
* @param dwAuthSpec
*/
public void setDwAuthSpec(DwAuthSpec dwAuthSpec) {
this.dwAuthSpec = dwAuthSpec;
}
/**
* set 服务id
*
* @param serviceId
*/
public InstanceConfig serviceId(String serviceId) {
this.serviceId = serviceId;
return this;
}
/**
* set 产品key
*
* @param productKey
*/
public InstanceConfig productKey(String productKey) {
this.productKey = productKey;
return this;
}
/**
* set 产品secret
*
* @param productSecret
*/
public InstanceConfig productSecret(String productSecret) {
this.productSecret = productSecret;
return this;
}
/**
* set 支持协议(jt808,gbt32960)
*
* @param protocol
*/
public InstanceConfig protocol(String protocol) {
this.protocol = protocol;
return this;
}
/**
* set hub mqtt地址
*
* @param mqttAddr
*/
public InstanceConfig mqttAddr(String mqttAddr) {
this.mqttAddr = mqttAddr;
return this;
}
/**
* set hub http地址
*
* @param httpAddr
*/
public InstanceConfig httpAddr(String httpAddr) {
this.httpAddr = httpAddr;
return this;
}
/**
* set redis地址
*
* @param redisAddr
*/
public InstanceConfig redisAddr(String redisAddr) {
this.redisAddr = redisAddr;
return this;
}
/**
* set 本地多媒体存放路径
*
* @param mediaStore
*/
public InstanceConfig mediaStore(String mediaStore) {
this.mediaStore = mediaStore;
return this;
}
/**
* set 网关设备ID
*
* @param gwdevId
*/
public InstanceConfig gwdevId(String gwdevId) {
this.gwdevId = gwdevId;
return this;
}
/**
* set 关联的OSS配置
*
* @param ossSpec
*/
public InstanceConfig ossSpec(OssSpec ossSpec) {
this.ossSpec = ossSpec;
return this;
}
/**
* set 关联的DW认证配置
*
* @param dwAuthSpec
*/
public InstanceConfig dwAuthSpec(DwAuthSpec dwAuthSpec) {
this.dwAuthSpec = dwAuthSpec;
return this;
}
} |
The Sacramento Kings stunned the basketball world by firing coach Michael Malone late Sunday. Malone went 39-67 at the helm of the Kings, but had Sacramento off to a surprising 9-6 start against tough competition this season before the team's superstar center caught viral meningitis. The Kings are now 11-13 and a playoff run is looking bleak.
Malone wasn't on anyone's Coach of the Year shortlist, but this firing still comes off as erratic and unreasonable. Here are five reasons why.
1. Malone had DeMarcus Cousins playing at an All-Star level
Cousins is the superstar mentioned above -- he's missed nine straight games due to his illness, though he should be back soon. In 15 games this season, Cousins has averaged 23.5 points on 51 percent shooting with 12.6 rebounds and 2.4 assists. Last season, Cousins averaged 22.7 points, 11.7 rebounds and shot 49 percent. In neither season has Cousins had a single locker room incident or on-court run-in. Cousins has been a completely different player under Malone than he was with Paul Westphal or Keith Smart. Given Cousins' importance to the Kings' hopes of not being hopeless for another decade, shouldn't that matter?
Of course, Cousins himself deserves most of the credit for his rise. Owner Vivek Randive and general manager Pete D'Alessandro would probably consider their arrival and the royal jelly (and huge contract) heaped on Cousins in 2013 to be as important as Malone's coaching. But dumping Malone is still a huge risk. Cousins has had three pro coaches and clashed with two. Why be hasty in canning the exception?
2. The Kings' problems are mostly because of roster issues
Sacramento's woes during Cousins' absence make this point obvious: the overall roster is uneven. Cousins is the only star. Rudy Gay has been much better in Sacramento than in Toronto or Memphis, but he's still a high-volume, mid-range shooter whose defense isn't usually great. Darren Collison has overachieved this season and Ben McLemore is showing signs of a breakout. But right now the Kings roster has one superstar, one obvious eight-figure starter and about 13 role players. (Cousins also happens to be the only obvious plus defender on the roster, too. Reggie Evans' rebounding and energy are awesome, but he's not really a stopper in any sense.)
Malone didn't put together this roster. D'Alessandro and Vivek's adviser Chris Mullin did. (Mullin is basically D'Alessandro's mentor.) So if the roster is the problem, why is the coach getting fired? Because that's how it works in the NBA. The chefs blame the line cooks.
3. Management's stylistic desires are completely unrealistic
A big reason unnamed sources are giving for the decision to fire Malone is his style doesn't mesh with what Vivek and D'Alessandro envisioned. Vivek wants a Spursian system of ball movement and shooting with Jason Thompson, Collison, Gay and Nik Stauskas in the roles of Boris Diaw, Tony Parker, Kawhi Leonard and Manu Ginobili, apparently.
Yet, this roster, starting with Boogie, isn't built for that type of play. You can only build a house with the bricks you're given. Cousins is perhaps the only player on the roster who is a plus passer at his position. McLemore and maybe Collison are good shooters for their positions. That. Is. It. There's just nothing Spursian about the roster. How is Malone realistically supposed to mold this into that?
Malone has been trying to win with what he has, which means a ton of post play, slashing and not so much in the way of passing. Malone's rep is as a defensive coach. In fact, that was Vivek's biggest endorsement back when he hired Malone in June 2013, that he'd bring a defensive identity to the Kings. He hasn't -- and that's the biggest indictment of him as a coach, to be honest -- but this feels like the goalposts are being moved.
4. The West is an impossible puzzle without another star
The idea that Malone is holding the Kings back from playoff contention is ludicrous. The West itself is holding the Kings back. Frankly, it was stunning that Malone had the Kings at 5-1 and 9-6 against such a tough, West-heavy schedule. The Kings were in the conversation one month, which is a month longer than usual. The only reason they dropped out of the conversation is Boogie's illness.
But even if the coaching this season had been perfect -- which it hasn't -- the Kings would be an unlikely playoff team simply because of math. Seven teams in the West have a record of 17-8 or better. The Thunder have gone 6-1 since Kevin Durant's return and now seem like a playoff lock. Does management really think Malone should have this team --even if fully healthy -- at .700? What flavor of crazy is that? The idea that this roster should be in the playoff conversation more than it already has been is delusional.
5. Ty Corbin is not the answer
The Kings elevated Corbin from associate head coach to interim head coach, and reports suggest they'll give him a legitimate shot at keeping the job. Never mind that Corbin was in charge of the offense that Vivek and D'Alessandro didn't like and Corbin was completely uninspiring last season at the helm of the Jazz.
if the roster is the problem, why is the coach getting fired? Because that's how it works in the NBA. The chefs blame the line cooks.
The Kings tried to hire Alvin Gentry as Malone's lead assistant in the offseason, but he joined the Warriors' All-Star staff. (Gentry, for what it's worth, is exactly the coach Vivek and D'Alessandro want: an up-tempo people person who is gold wherever he works.) Eventually Malone hired Corbin, though there seems to have been some bristling by Malone at management's insistence he hire a top-flight assistant and by management that Malone wasn't copacetic with D'Alessandro picking a coach for him.
Regardless, the Kings now have Corbin in the lead chair. Does anyone out there think this is an upgrade in the short or long term? Is there any chance that if this were the offseason, Corbin would even make Sacramento's short list? No offense to a smart man with good character, but if you're going to make a coaching change, why now?
Unless ...
George Karl is available:
The well-traveled coach has history with D'Alessandro from their time together in Denver and Karl's teams are known for their up-tempo offenses, which would seem to fit management's requirements.
Still, if the plan is to nab Karl before someone else can -- and there are no indications any other coach is on an immediate hot seat with the exception of Monty Williams -- what rationale is there to pull the trigger in December? Why not wait until Boogie returns (likely this week) and see how Malone carries the Kings through the rest of an easy December schedule? What's the upside?
In the end, this move puts pressure on D'Alessandro to upgrade the roster heavily by the trade deadline and on Vivek to hire the right coach, whether now or in the spring. He's 0-1. The honeymoon in Sacramento will last longer than most, but nothing is forever. |
<filename>src/containers/SelectStageScreen/__tests__/SelectStageScreen.tsx<gh_stars>0
/* eslint-disable max-lines */
import React from 'react';
import { fireEvent, flushMicrotasksQueue } from 'react-native-testing-library';
import { renderWithContext } from '../../../../testUtils';
import { getStages } from '../../../actions/stages';
import {
trackAction,
updateAnalyticsContext,
} from '../../../actions/analytics';
import { updatePersonGQL } from '../../../actions/person';
import {
selectMyStage,
selectPersonStage,
updateUserStage,
} from '../../../actions/selectStage';
import { Stage } from '../../../reducers/stages';
import { ACTIONS } from '../../../constants';
import { useAnalytics } from '../../../utils/hooks/useAnalytics';
import SelectStageScreen, { SelectStageNavParams } from '..';
jest.mock('react-native-device-info');
jest.mock('../../../actions/stages');
jest.mock('../../../actions/analytics');
jest.mock('../../../actions/selectStage');
jest.mock('../../../actions/navigation');
jest.mock('../../../actions/person');
jest.mock('../../../components/common', () => ({
Text: 'Text',
Button: 'Button',
}));
jest.mock('../../DeprecatedBackButton', () => 'DeprecatedBackButton');
jest.mock('../../../components/Header', () => 'Header');
jest.mock('../../../utils/hooks/useAnalytics');
jest.mock('../../../auth/authStore', () => ({ isAuthenticated: () => true }));
const baseStage: Stage = {
id: '1',
name: 'stage',
description: 'description',
self_followup_description: 'description',
position: 1,
icon_url: 'https://misisonhub.com',
localized_pathway_stages: [],
};
const stages: Stage[] = [
{
...baseStage,
id: '1',
name: 'Stage 1',
description: 'Stage 1 description',
},
{
...baseStage,
id: '2',
name: 'Stage 2',
description: 'Stage 2 description',
},
{
...baseStage,
id: '3',
name: 'Stage 3',
description: 'Stage 3 description',
},
];
const myId = '111';
const myName = 'Me';
const assignedPersonId = '123';
const assignedPersonName = 'Person';
const unassignedPersonId = '321';
const unassignedPersonName = 'Nosrep';
const orgId = '222';
const contactAssignmentId = '1';
const contactAssignment = {
id: contactAssignmentId,
organization: { id: orgId },
assigned_to: { id: myId },
pathway_stage_id: '1',
};
const mePerson = {
id: myId,
first_name: myName,
organizational_permissions: [{ organization_id: orgId }],
user: {
pathway_stage_id: '1',
},
};
const assignedPerson = {
id: assignedPersonId,
first_name: assignedPersonName,
organizational_permissions: [{ organization_id: orgId }],
reverse_contact_assignments: [contactAssignment],
};
const unassignedPerson = {
id: unassignedPersonId,
first_name: unassignedPersonName,
organizational_permissions: [{ organization_id: orgId }],
reverse_contact_assignments: [],
};
const state = {
people: {
people: {
[myId]: mePerson,
[assignedPersonId]: assignedPerson,
[unassignedPersonId]: unassignedPerson,
},
},
stages: { stages },
onboarding: { currentlyOnboarding: false },
};
const baseParams = {
personId: assignedPersonId,
orgId,
enableBackButton: true,
};
const next = jest.fn();
const onComplete = jest.fn();
const handleScreenChange = jest.fn();
const trackActionResult = { type: 'track action' };
const updateAnalyticsContextResult = { type: 'updateAnalyticsContext' };
const getStagesResult = { type: 'get stages', response: stages };
const selectMyStageResult = { type: 'select my stage' };
const selectPersonStageResult = { type: 'select person stage' };
const updateUserStageResult = { type: 'update user stage' };
const nextResult = { type: 'next' };
beforeEach(() => {
(trackAction as jest.Mock).mockReturnValue(trackActionResult);
(updateAnalyticsContext as jest.Mock).mockReturnValue(
updateAnalyticsContextResult,
);
(getStages as jest.Mock).mockReturnValue(getStagesResult);
(selectMyStage as jest.Mock).mockReturnValue(selectMyStageResult);
(selectPersonStage as jest.Mock).mockReturnValue(selectPersonStageResult);
(updateUserStage as jest.Mock).mockReturnValue(updateUserStageResult);
(next as jest.Mock).mockReturnValue(nextResult);
(useAnalytics as jest.Mock).mockReturnValue(handleScreenChange);
(useAnalytics as jest.Mock).mockReturnValue(handleScreenChange);
});
describe('renders', () => {
it('renders correctly without stages', () => {
renderWithContext(<SelectStageScreen next={next} />, {
initialState: {
...state,
stages: { stages: [] },
},
navParams: baseParams,
}).snapshot();
expect(useAnalytics).toHaveBeenCalledWith('', {
triggerTracking: false,
});
});
it('renders correctly without back button', () => {
renderWithContext(<SelectStageScreen next={next} />, {
initialState: state,
navParams: {
...baseParams,
enableBackButton: false,
},
}).snapshot();
expect(useAnalytics).toHaveBeenCalledWith('', {
triggerTracking: false,
});
});
it('renders correctly with question text', () => {
renderWithContext(<SelectStageScreen next={next} />, {
initialState: state,
navParams: {
...baseParams,
questionText: 'Question?',
},
}).snapshot();
expect(useAnalytics).toHaveBeenCalledWith('', {
triggerTracking: false,
});
});
});
describe('renders for me', () => {
const myNavParams = {
...baseParams,
personId: myId,
};
it('renders correctly', async () => {
const { snapshot } = renderWithContext(<SelectStageScreen next={next} />, {
initialState: state,
navParams: myNavParams,
mocks: { User: () => ({ person: () => ({ id: myId }) }) },
});
await flushMicrotasksQueue();
snapshot();
expect(useAnalytics).toHaveBeenCalledWith('', {
triggerTracking: false,
});
expect(handleScreenChange).toHaveBeenCalledWith(['stage', 'stage 1']);
});
it('renders firstItem correctly', async () => {
const { snapshot } = renderWithContext(<SelectStageScreen next={next} />, {
initialState: state,
navParams: { ...myNavParams, selectedStageId: 1 },
mocks: { User: () => ({ person: () => ({ id: myId }) }) },
});
await flushMicrotasksQueue();
snapshot();
expect(useAnalytics).toHaveBeenCalledWith('', {
triggerTracking: false,
});
expect(handleScreenChange).toHaveBeenCalledWith(['stage', 'stage 2']);
});
});
describe('renders for other', () => {
const otherNavParams = {
...baseParams,
personId: assignedPersonId,
};
it('renders correctly', () => {
renderWithContext(<SelectStageScreen next={next} />, {
initialState: state,
navParams: otherNavParams,
}).snapshot();
expect(useAnalytics).toHaveBeenCalledWith('', {
triggerTracking: false,
});
expect(handleScreenChange).toHaveBeenCalledWith(['stage', 'stage 1']);
});
it('renders firstItem correctly', () => {
renderWithContext(<SelectStageScreen next={next} />, {
initialState: state,
navParams: { ...otherNavParams, selectedStageId: 1 },
}).snapshot();
expect(useAnalytics).toHaveBeenCalledWith('', {
triggerTracking: false,
});
expect(handleScreenChange).toHaveBeenCalledWith(['stage', 'stage 2']);
});
});
const buildAndTestMount = async (
initialState: typeof state,
navParams: SelectStageNavParams,
) => {
const { store, getAllByTestId } = renderWithContext(
<SelectStageScreen next={next} />,
{
initialState,
navParams,
mocks: { User: () => ({ person: () => ({ id: myId }) }) },
},
);
await flushMicrotasksQueue();
return { store, getAllByTestId };
};
describe('actions on mount', () => {
const stageId = 0;
describe('for me', () => {
it('gets stages and snaps to first item on mount', async () => {
const { store } = await buildAndTestMount(
{
...state,
stages: { stages: [] },
},
{
...baseParams,
personId: myId,
selectedStageId: stageId,
},
);
expect(getStages).toHaveBeenCalledWith();
expect(handleScreenChange).toHaveBeenCalledWith(['stage', 'stage 1']);
expect(store.getActions()).toEqual([getStagesResult]);
});
it('gets stages and snaps to first item on mount in onboarding', async () => {
const { store } = await buildAndTestMount(
{
...state,
stages: { stages: [] },
onboarding: { currentlyOnboarding: true },
},
{
...baseParams,
personId: myId,
selectedStageId: stageId,
},
);
expect(getStages).toHaveBeenCalledWith();
expect(handleScreenChange).toHaveBeenCalledWith(['stage', 'stage 1']);
expect(store.getActions()).toEqual([getStagesResult]);
});
});
describe('for other', () => {
it('gets stages and snaps to first item on mount', async () => {
const { store } = await buildAndTestMount(
{
...state,
stages: { stages: [] },
},
{
...baseParams,
personId: assignedPersonId,
selectedStageId: stageId,
},
);
expect(getStages).toHaveBeenCalledWith();
expect(handleScreenChange).toHaveBeenCalledWith(['stage', 'stage 1']);
expect(store.getActions()).toEqual([getStagesResult]);
});
});
describe('stages are in Redux', () => {
it('snaps to first item on mount without getting stages', async () => {
const { store } = await buildAndTestMount(state, {
...baseParams,
personId: myId,
selectedStageId: stageId,
});
expect(getStages).not.toHaveBeenCalled();
expect(handleScreenChange).toHaveBeenCalledWith(['stage', 'stage 1']);
expect(store.getActions()).toEqual([]);
});
});
});
describe('setStage', () => {
const selectedStageId = 0;
const stage = stages[selectedStageId];
// eslint-disable-next-line @typescript-eslint/no-explicit-any
let selectAction: any;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const buildAndTestSelect = async (navParams: any, nextProps: any) => {
const { store, getAllByTestId } = await buildAndTestMount(state, navParams);
await fireEvent.press(getAllByTestId('stageSelectButton')[selectedStageId]);
expect(next).toHaveBeenCalledWith(nextProps);
expect(trackAction).toHaveBeenCalledWith(selectAction.name, {
[selectAction.key]: stage.id,
[ACTIONS.STAGE_SELECTED.key]: null,
});
return { store };
};
describe('for me', () => {
beforeEach(() => {
selectAction = ACTIONS.SELF_STAGE_SELECTED;
});
it('selects new stage', async () => {
const navParams = {
...baseParams,
personId: myId,
};
const nextProps = {
isAlreadySelected: false,
isMe: true,
personId: myId,
stage: stage,
orgId,
};
const { store } = await buildAndTestSelect(navParams, nextProps);
expect(selectMyStage).toHaveBeenCalledWith(stage.id);
expect(updatePersonGQL).toHaveBeenCalledWith(myId);
expect(store.getActions()).toEqual([
selectMyStageResult,
nextResult,
trackActionResult,
]);
});
it('selects already selected stage', async () => {
const navParams = {
...baseParams,
personId: myId,
selectedStageId,
};
const nextProps = {
isAlreadySelected: true,
isMe: true,
personId: myId,
stage: stage,
orgId,
};
const { store } = await buildAndTestSelect(navParams, nextProps);
expect(selectMyStage).not.toHaveBeenCalled();
expect(updatePersonGQL).not.toHaveBeenCalled();
expect(store.getActions()).toEqual([nextResult, trackActionResult]);
});
});
describe('for other assigned to me', () => {
beforeEach(() => {
selectAction = ACTIONS.PERSON_STAGE_SELECTED;
});
it('selects new stage', async () => {
const navParams = {
...baseParams,
personId: assignedPersonId,
};
const nextProps = {
isAlreadySelected: false,
isMe: false,
personId: assignedPersonId,
stage: stage,
orgId,
};
const { store } = await buildAndTestSelect(navParams, nextProps);
expect(updateUserStage).toHaveBeenCalledWith(
contactAssignmentId,
stage.id,
);
expect(updatePersonGQL).toHaveBeenCalledWith(assignedPersonId);
expect(store.getActions()).toEqual([
updateUserStageResult,
nextResult,
trackActionResult,
]);
});
it('selects already selected stage', async () => {
const navParams = {
...baseParams,
personId: assignedPersonId,
selectedStageId,
};
const nextProps = {
isAlreadySelected: true,
isMe: false,
personId: assignedPersonId,
stage: stage,
orgId,
};
const { store } = await buildAndTestSelect(navParams, nextProps);
expect(updateUserStage).not.toHaveBeenCalled();
expect(updatePersonGQL).not.toHaveBeenCalled();
expect(store.getActions()).toEqual([nextResult, trackActionResult]);
});
it('selects already selected stage with skip select steps', async () => {
const navParams = {
...baseParams,
personId: assignedPersonId,
selectedStageId,
skipSelectSteps: true,
};
const nextProps = {
isAlreadySelected: true,
skipSelectSteps: true,
isMe: false,
personId: assignedPersonId,
stage: stage,
orgId,
};
const { store } = await buildAndTestSelect(navParams, nextProps);
expect(updateUserStage).not.toHaveBeenCalled();
expect(updatePersonGQL).not.toHaveBeenCalled();
expect(store.getActions()).toEqual([nextResult, trackActionResult]);
});
it('selects new stage for edit screen', async () => {
const navParams = {
...baseParams,
personId: assignedPersonId,
onComplete,
};
const { store, getAllByTestId } = await buildAndTestMount(
state,
navParams,
);
await fireEvent.press(
getAllByTestId('stageSelectButton')[selectedStageId],
);
expect(next).toHaveBeenCalled();
expect(onComplete).toHaveBeenCalledWith(stages[selectedStageId]);
expect(trackAction).toHaveBeenCalledWith(selectAction.name, {
[selectAction.key]: stage.id,
[ACTIONS.STAGE_SELECTED.key]: null,
});
expect(updateUserStage).toHaveBeenCalled();
expect(updatePersonGQL).toHaveBeenCalledWith(assignedPersonId);
expect(store.getActions()).toEqual([
updateUserStageResult,
nextResult,
trackActionResult,
]);
});
});
describe('for other not assigned to me', () => {
beforeEach(() => {
selectAction = ACTIONS.PERSON_STAGE_SELECTED;
});
it('selects new stage', async () => {
const navParams = {
...baseParams,
personId: unassignedPersonId,
};
const nextProps = {
isAlreadySelected: false,
isMe: false,
personId: unassignedPersonId,
stage: stage,
orgId,
};
const { store } = await buildAndTestSelect(navParams, nextProps);
expect(selectPersonStage).toHaveBeenCalledWith(
unassignedPersonId,
myId,
stage.id,
orgId,
);
expect(updatePersonGQL).toHaveBeenCalledWith(unassignedPersonId);
expect(store.getActions()).toEqual([
selectPersonStageResult,
nextResult,
trackActionResult,
]);
});
it('selects already selected stage', async () => {
const navParams = {
...baseParams,
personId: unassignedPersonId,
selectedStageId,
};
const nextProps = {
isAlreadySelected: true,
isMe: false,
personId: unassignedPersonId,
stage: stage,
orgId,
};
const { store } = await buildAndTestSelect(navParams, nextProps);
expect(selectPersonStage).not.toHaveBeenCalled();
expect(updatePersonGQL).not.toHaveBeenCalled();
expect(store.getActions()).toEqual([nextResult, trackActionResult]);
});
});
});
|
Time-Varying Volatility Feedback of Energy Prices: Evidence from Crude Oil, Petroleum Products, and Natural Gas Using a TVP-SVM Model
: In this paper, the time-varying volatility feedback of nine series of energy prices is researched by employing the time-varying parameter stochastic volatility in mean (TVP-SVM) model. The major findings and conclusions can be grouped as follows: Significant differences exist in the time-varying volatility feedback among the nine major energy productions. Specifically, crude oil and diesel’s price volatility has a remarkable positive time-varying effect on their returns. Yet the returns, for natural gas and most petroleum products are negatively affected by their price volatility over time. Furthermore, obvious structural break features exist in the time-varying volatility feedback of energy prices, which coincide with the breakpoints in the energy volatility. This indicates that some factors such as major global economic and geopolitical events that cause the sudden structural breaks in the energy volatility may also affect the volatility feedback of the energy price. Moreover, the volatility feedback in energy price will become weak and even have no impact on energy returns in some special periods when the energy price volatility is extremely high. |
An Indiana homeowner set up a security camera to catch a thief stealing his Donald Trump yard signs — but he also captured a moment of Karma, as well.
The Indianapolis resident, who didn’t want to be identified, caught a woman trespassing and grabbing his signs on Thursday.
But it also caught her taking a spill when she didn’t realize the booby trapped sign was tied to the house.
Woman attempts to steal Trump yard signs from the front of a Central IN house but doesn’t make it very far. 6p @rtv6 pic.twitter.com/AwP1sr8wTv — Katie Heinz (@katieheinz6) September 2, 2016
With one sign in her right hand, she grabbed the second sign with her left hand. As she turned to run, the hidden fishing line went tight — and she went flying.
“I looked out my front door and the sign was lying about four feet over, and it was crumpled up pretty good,” “Phillip told The Indy Channel of the vandalism.
The report noted the would-be thief “faceplanted.”
“Whether it’s a Bernie or Hillary or a Trump sign, you might disagree with whatever the politics are, but it’s not yours,” Phillip says. “Leave it alone.”
Meanwhile, a Hesperia, Michigan resident has several homemade signs in support of Trump — and against Hillary Clinton.
“Trump builds,” reads one, with a Rosie the Riveter-like image of Trump.
“Hillary Kills!” says another.
Even “Hillary Wins, America Loses.”
But Pawlik says they keep getting vandalized. The signs are constantly getting pelted with eggs, and one was broken.
He tells Fox 17 they’re not just targeting the signs, but his home, vehicles and other property, too.
He’s equipped some of the signs with touch-sensitive alarms, so a piercing noise is emitted if they move. He’s added motion sensors and security cameras to catch the culprits in the act, costing hundreds of dollars.
He’s even pitched a tent in his front yard and he and his wife are taking turns sleeping in it to protect the signs.
But Pawlik is undeterred.
“Every time they damage our signs, we build a new ones,” Palik tells the news station.
“You might see more signs in the next week.” |
<filename>storage/interfaces.py
from zope.interface import Interface
class IStorage(Interface):
"""This interface defines database (storage) functions
to persistently store component data.
"""
def store(obj):
"""Stores the object in a storage."""
def get(id):
"""Retrive the object by its id from
a storage."""
class ISQLiteStorage(IStorage):
pass
class ISQLiteStorageAdapter:
def store(storage):
"""Stores self in the storage.
Return an indentifier of the stored object.
"""
def get(storage, id):
"""Constructs object from stored data identified
by id"""
|
"""PyStan utility functions
These functions validate and organize data passed to and from the
classes and functions defined in the file `stan_fit.hpp` and wrapped
by the Cython file `stan_fit.pxd`.
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013-2015, PyStan developers
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
#-----------------------------------------------------------------------------
# REF: rstan/rstan/R/misc.R
from __future__ import unicode_literals, division
from pystan._compat import PY2, string_types
from collections import OrderedDict
if PY2:
from collections import Callable, Iterable, Sequence
else:
from collections.abc import Callable, Iterable, Sequence
import inspect
import io
import itertools
import logging
import math
from numbers import Number
import os
import random
import re
import sys
import shutil
import tempfile
import time
import numpy as np
try:
from scipy.stats.mstats import mquantiles
except ImportError:
from pystan.external.scipy.mstats import mquantiles
import pystan.chains
import pystan._misc
from pystan.constants import (MAX_UINT, sampling_algo_t, optim_algo_t,
variational_algo_t, sampling_metric_t, stan_args_method_t)
logger = logging.getLogger('pystan')
def stansummary(fit, pars=None, probs=(0.025, 0.25, 0.5, 0.75, 0.975), digits_summary=2):
"""
Summary statistic table.
Parameters
----------
fit : StanFit4Model object
pars : str or sequence of str, optional
Parameter names. By default use all parameters
probs : sequence of float, optional
Quantiles. By default, (0.025, 0.25, 0.5, 0.75, 0.975)
digits_summary : int, optional
Number of significant digits. By default, 2
Returns
-------
summary : string
Table includes mean, se_mean, sd, probs_0, ..., probs_n, n_eff and Rhat.
Examples
--------
>>> model_code = 'parameters {real y;} model {y ~ normal(0,1);}'
>>> m = StanModel(model_code=model_code, model_name="example_model")
>>> fit = m.sampling()
>>> print(stansummary(fit))
Inference for Stan model: example_model.
4 chains, each with iter=2000; warmup=1000; thin=1;
post-warmup draws per chain=1000, total post-warmup draws=4000.
mean se_mean sd 2.5% 25% 50% 75% 97.5% n_eff Rhat
y 0.01 0.03 1.0 -2.01 -0.68 0.02 0.72 1.97 1330 1.0
lp__ -0.5 0.02 0.68 -2.44 -0.66 -0.24 -0.05-5.5e-4 1555 1.0
Samples were drawn using NUTS at Thu Aug 17 00:52:25 2017.
For each parameter, n_eff is a crude measure of effective sample size,
and Rhat is the potential scale reduction factor on split chains (at
convergence, Rhat=1).
"""
if fit.mode == 1:
return "Stan model '{}' is of mode 'test_grad';\n"\
"sampling is not conducted.".format(fit.model_name)
elif fit.mode == 2:
return "Stan model '{}' does not contain samples.".format(fit.model_name)
n_kept = [s - w for s, w in zip(fit.sim['n_save'], fit.sim['warmup2'])]
header = "Inference for Stan model: {}.\n".format(fit.model_name)
header += "{} chains, each with iter={}; warmup={}; thin={}; \n"
header = header.format(fit.sim['chains'], fit.sim['iter'], fit.sim['warmup'],
fit.sim['thin'], sum(n_kept))
header += "post-warmup draws per chain={}, total post-warmup draws={}.\n\n"
header = header.format(n_kept[0], sum(n_kept))
footer = "\n\nSamples were drawn using {} at {}.\n"\
"For each parameter, n_eff is a crude measure of effective sample size,\n"\
"and Rhat is the potential scale reduction factor on split chains (at \n"\
"convergence, Rhat=1)."
sampler = fit.sim['samples'][0]['args']['sampler_t']
date = fit.date.strftime('%c') # %c is locale's representation
footer = footer.format(sampler, date)
s = _summary(fit, pars, probs)
body = _array_to_table(s['summary'], s['summary_rownames'],
s['summary_colnames'], digits_summary)
return header + body + footer
def _print_stanfit(fit, pars=None, probs=(0.025, 0.25, 0.5, 0.75, 0.975), digits_summary=2):
# warning added in PyStan 2.17.0
logger.warning('Function `_print_stanfit` is deprecated and will be removed in a future version. '\
'Use `stansummary` instead.', DeprecationWarning)
return stansummary(fit, pars=pars, probs=probs, digits_summary=digits_summary)
def _array_to_table(arr, rownames, colnames, n_digits):
"""Print an array with row and column names
Example:
mean se_mean sd 2.5% 25% 50% 75% 97.5% n_eff Rhat
beta[1,1] 0.0 0.0 1.0 -2.0 -0.7 0.0 0.7 2.0 4000 1
beta[1,2] 0.0 0.0 1.0 -2.1 -0.7 0.0 0.7 2.0 4000 1
beta[2,1] 0.0 0.0 1.0 -2.0 -0.7 0.0 0.7 2.0 4000 1
beta[2,2] 0.0 0.0 1.0 -1.9 -0.6 0.0 0.7 2.0 4000 1
lp__ -4.2 0.1 2.1 -9.4 -5.4 -3.8 -2.7 -1.2 317 1
"""
assert arr.shape == (len(rownames), len(colnames))
rownames_maxwidth = max(len(n) for n in rownames)
max_col_width = 7
min_col_width = 5
max_col_header_num_width = [max(max_col_width, max(len(n) + 1, min_col_width)) for n in colnames]
rows = []
for row in arr:
row_nums = []
for j, (num, width) in enumerate(zip(row, max_col_header_num_width)):
if colnames[j] == "n_eff":
num = int(round(num, 0)) if not np.isnan(num) else num
num = _format_number(num, n_digits, max_col_width - 1)
row_nums.append(num)
if len(num) + 1 > max_col_header_num_width[j]:
max_col_header_num_width[j] = len(num) + 1
rows.append(row_nums)
widths = [rownames_maxwidth] + max_col_header_num_width
header = '{:>{width}}'.format('', width=widths[0])
for name, width in zip(colnames, widths[1:]):
header += '{name:>{width}}'.format(name=name, width=width)
lines = [header]
for rowname, row in zip(rownames, rows):
line = '{name:{width}}'.format(name=rowname, width=widths[0])
for j, (num, width) in enumerate(zip(row, widths[1:])):
line += '{num:>{width}}'.format(num=num, width=width)
lines.append(line)
return '\n'.join(lines)
def _number_width(n):
"""Calculate the width in characters required to print a number
For example, -1024 takes 5 characters. -0.034 takes 6 characters.
"""
return len(str(n))
def _format_number_si(num, n_signif_figures):
"""Format a number using scientific notation to given significant figures"""
if math.isnan(num) or math.isinf(num):
return str(num)
leading, exp = '{:E}'.format(num).split('E')
leading = round(float(leading), n_signif_figures - 1)
exp = exp[:1] + exp[2:] if exp[1] == '0' else exp
formatted = '{}e{}'.format(leading, exp.lstrip('+'))
return formatted
def _format_number(num, n_signif_figures, max_width):
"""Format a number as a string while obeying space constraints.
`n_signif_figures` is the minimum number of significant figures expressed
`max_width` is the maximum width in characters allowed
"""
if max_width < 6:
raise NotImplementedError("Guaranteed formatting in fewer than 6 characters not supported.")
if math.isnan(num) or math.isinf(num):
return str(num)
# add 0.5 to prevent log(0) errors; only affects n_digits calculation for num > 0
n_digits = lambda num: math.floor(math.log10(abs(num) + 0.5)) + 1
if abs(num) > 10**-n_signif_figures and n_digits(num) <= max_width - n_signif_figures:
return str(round(num, n_signif_figures))[:max_width].rstrip('.')
elif _number_width(num) <= max_width:
if n_digits(num) >= n_signif_figures:
# the int() is necessary for consistency between Python 2 and 3
return str(int(round(num)))
else:
return str(num)
else:
return _format_number_si(num, n_signif_figures)
def _summary(fit, pars=None, probs=None, **kwargs):
"""Summarize samples (compute mean, SD, quantiles) in all chains.
REF: stanfit-class.R summary method
Parameters
----------
fit : StanFit4Model object
pars : str or sequence of str, optional
Parameter names. By default use all parameters
probs : sequence of float, optional
Quantiles. By default, (0.025, 0.25, 0.5, 0.75, 0.975)
Returns
-------
summaries : OrderedDict of array
Array indexed by 'summary' has dimensions (num_params, num_statistics).
Parameters are unraveled in *row-major order*. Statistics include: mean,
se_mean, sd, probs_0, ..., probs_n, n_eff, and Rhat. Array indexed by
'c_summary' breaks down the statistics by chain and has dimensions
(num_params, num_statistics_c_summary, num_chains). Statistics for
`c_summary` are the same as for `summary` with the exception that
se_mean, n_eff, and Rhat are absent. Row names and column names are
also included in the OrderedDict.
"""
if fit.mode == 1:
msg = "Stan model {} is of mode 'test_grad'; sampling is not conducted."
msg = msg.format(fit.model_name)
raise ValueError(msg)
elif fit.mode == 2:
msg = "Stan model {} contains no samples.".format(fit.model_name)
raise ValueError(msg)
if fit.sim['n_save'] == fit.sim['warmup2']:
msg = "Stan model {} contains no samples.".format(fit.model_name)
raise ValueError(msg)
# rstan checks for cached summaries here
if pars is None:
pars = fit.sim['pars_oi']
elif isinstance(pars, string_types):
pars = [pars]
pars = _remove_empty_pars(pars, fit.sim['pars_oi'], fit.sim['dims_oi'])
if probs is None:
probs = (0.025, 0.25, 0.5, 0.75, 0.975)
ss = _summary_sim(fit.sim, pars, probs)
# TODO: include sem, ess and rhat: ss['ess'], ss['rhat']
s1 = np.column_stack([ss['msd'][:, 0], ss['sem'], ss['msd'][:, 1], ss['quan'], ss['ess'], ss['rhat']])
s1_rownames = ss['c_msd_names']['parameters']
s1_colnames = ((ss['c_msd_names']['stats'][0],) + ('se_mean',) +
(ss['c_msd_names']['stats'][1],) + ss['c_quan_names']['stats'] +
('n_eff', 'Rhat'))
s2 = _combine_msd_quan(ss['c_msd'], ss['c_quan'])
s2_rownames = ss['c_msd_names']['parameters']
s2_colnames = ss['c_msd_names']['stats'] + ss['c_quan_names']['stats']
return OrderedDict(summary=s1, c_summary=s2,
summary_rownames=s1_rownames,
summary_colnames=s1_colnames,
c_summary_rownames=s2_rownames,
c_summary_colnames=s2_colnames)
def _combine_msd_quan(msd, quan):
"""Combine msd and quantiles in chain summary
Parameters
----------
msd : array of shape (num_params, 2, num_chains)
mean and sd for chains
cquan : array of shape (num_params, num_quan, num_chains)
quantiles for chains
Returns
-------
msdquan : array of shape (num_params, 2 + num_quan, num_chains)
"""
dim1 = msd.shape
n_par, _, n_chains = dim1
ll = []
for i in range(n_chains):
a1 = msd[:, :, i]
a2 = quan[:, :, i]
ll.append(np.column_stack([a1, a2]))
msdquan = np.dstack(ll)
return msdquan
def _summary_sim(sim, pars, probs):
"""Summarize chains together and separately
REF: rstan/rstan/R/misc.R
Parameters are unraveled in *column-major order*.
Parameters
----------
sim : dict
dict from from a stanfit fit object, i.e., fit['sim']
pars : Iterable of str
parameter names
probs : Iterable of probs
desired quantiles
Returns
-------
summaries : OrderedDict of array
This dictionary contains the following arrays indexed by the keys
given below:
- 'msd' : array of shape (num_params, 2) with mean and sd
- 'sem' : array of length num_params with standard error for the mean
- 'c_msd' : array of shape (num_params, 2, num_chains)
- 'quan' : array of shape (num_params, num_quan)
- 'c_quan' : array of shape (num_params, num_quan, num_chains)
- 'ess' : array of shape (num_params, 1)
- 'rhat' : array of shape (num_params, 1)
Note
----
`_summary_sim` has the parameters in *column-major* order whereas `_summary`
gives them in *row-major* order. (This follows RStan.)
"""
# NOTE: this follows RStan rather closely. Some of the calculations here
probs_len = len(probs)
n_chains = len(sim['samples'])
# tidx is a dict with keys that are parameters and values that are their
# indices using column-major ordering
tidx = _pars_total_indexes(sim['pars_oi'], sim['dims_oi'], sim['fnames_oi'], pars)
tidx_colm = [tidx[par] for par in pars]
tidx_colm = list(itertools.chain(*tidx_colm)) # like R's unlist()
tidx_rowm = [tidx[par+'_rowmajor'] for par in pars]
tidx_rowm = list(itertools.chain(*tidx_rowm))
tidx_len = len(tidx_colm)
lmsdq = [_get_par_summary(sim, i, probs) for i in tidx_colm]
msd = np.row_stack([x['msd'] for x in lmsdq])
quan = np.row_stack([x['quan'] for x in lmsdq])
probs_str = tuple(["{:g}%".format(100*p) for p in probs])
msd = msd.reshape(tidx_len, 2, order='F')
quan = quan.reshape(tidx_len, probs_len, order='F')
c_msd = np.row_stack([x['c_msd'] for x in lmsdq])
c_quan = np.row_stack([x['c_quan'] for x in lmsdq])
c_msd = c_msd.reshape(tidx_len, 2, n_chains, order='F')
c_quan = c_quan.reshape(tidx_len, probs_len, n_chains, order='F')
sim_attr_args = sim.get('args', None)
if sim_attr_args is None:
cids = list(range(n_chains))
else:
cids = [x['chain_id'] for x in sim_attr_args]
c_msd_names = dict(parameters=np.asarray(sim['fnames_oi'])[tidx_colm],
stats=("mean", "sd"),
chains=tuple("chain:{}".format(cid) for cid in cids))
c_quan_names = dict(parameters=np.asarray(sim['fnames_oi'])[tidx_colm],
stats=probs_str,
chains=tuple("chain:{}".format(cid) for cid in cids))
ess_and_rhat = np.array([pystan.chains.ess_and_splitrhat(sim, n) for n in tidx_colm])
ess, rhat = [arr.ravel() for arr in np.hsplit(ess_and_rhat, 2)]
return dict(msd=msd, c_msd=c_msd, c_msd_names=c_msd_names, quan=quan,
c_quan=c_quan, c_quan_names=c_quan_names,
sem=msd[:, 1] / np.sqrt(ess), ess=ess, rhat=rhat,
row_major_idx=tidx_rowm, col_major_idx=tidx_colm)
def _get_par_summary(sim, n, probs):
"""Summarize chains merged and individually
Parameters
----------
sim : dict from stanfit object
n : int
parameter index
probs : iterable of int
quantiles
Returns
-------
summary : dict
Dictionary containing summaries
"""
# _get_samples gets chains for nth parameter
ss = _get_samples(n, sim, inc_warmup=False)
msdfun = lambda chain: (np.mean(chain), np.std(chain, ddof=1))
qfun = lambda chain: mquantiles(chain, probs)
c_msd = np.array([msdfun(s) for s in ss]).flatten()
c_quan = np.array([qfun(s) for s in ss]).flatten()
ass = np.asarray(ss).flatten()
msd = np.asarray(msdfun(ass))
quan = qfun(np.asarray(ass))
return dict(msd=msd, quan=quan, c_msd=c_msd, c_quan=c_quan)
def _split_data(data):
data_r = {}
data_i = {}
# data_r and data_i are going to be converted into C++ objects of
# type: map<string, pair<vector<double>, vector<size_t>>> and
# map<string, pair<vector<int>, vector<size_t>>> so prepare
# them accordingly.
for k, v in data.items():
if np.issubdtype(np.asarray(v).dtype, np.integer):
data_i.update({k.encode('utf-8'): np.asarray(v, dtype=int)})
elif np.issubdtype(np.asarray(v).dtype, np.floating):
data_r.update({k.encode('utf-8'): np.asarray(v, dtype=float)})
else:
msg = "Variable {} is neither int nor float nor list/array thereof"
raise ValueError(msg.format(k))
return data_r, data_i
def _config_argss(chains, iter, warmup, thin,
init, seed, sample_file, diagnostic_file, algorithm,
control, **kwargs):
# After rstan/rstan/R/misc.R (config_argss)
iter = int(iter)
if iter < 1:
raise ValueError("`iter` should be a positive integer.")
thin = int(thin)
if thin < 1 or thin > iter:
raise ValueError("`thin should be a positive integer "
"less than `iter`.")
warmup = max(0, int(warmup))
if warmup > iter:
raise ValueError("`warmup` should be an integer less than `iter`.")
chains = int(chains)
if chains < 1:
raise ValueError("`chains` should be a positive integer.")
iters = [iter] * chains
thins = [thin] * chains
warmups = [warmup] * chains
# use chain_id argument if specified
if kwargs.get('chain_id') is None:
chain_id = list(range(chains))
else:
chain_id = [int(id) for id in kwargs['chain_id']]
if len(set(chain_id)) != len(chain_id):
raise ValueError("`chain_id` has duplicated elements.")
chain_id_len = len(chain_id)
if chain_id_len >= chains:
chain_id = chain_id
else:
chain_id = chain_id + [max(chain_id) + 1 + i
for i in range(chains - chain_id_len)]
del kwargs['chain_id']
inits_specified = False
# slight difference here from rstan; Python's lists are not typed.
if isinstance(init, Number):
init = str(init)
if isinstance(init, string_types):
if init in ['0', 'random']:
inits = [init] * chains
else:
inits = ["random"] * chains
inits_specified = True
if not inits_specified and isinstance(init, Callable):
## test if function takes argument named "chain_id"
if "chain_id" in inspect.getargspec(init).args:
inits = [init(chain_id=id) for id in chain_id]
else:
inits = [init()] * chains
if not isinstance(inits[0], dict):
raise ValueError("The function specifying initial values must "
"return a dictionary.")
inits_specified = True
if not inits_specified and isinstance(init, Sequence):
if len(init) != chains:
raise ValueError("Length of list of initial values does not "
"match number of chains.")
if not all([isinstance(d, dict) for d in init]):
raise ValueError("Initial value list is not a sequence of "
"dictionaries.")
inits = init
inits_specified = True
if not inits_specified:
raise ValueError("Invalid specification of initial values.")
## only one seed is needed by virtue of the RNG
seed = _check_seed(seed)
kwargs['method'] = "test_grad" if kwargs.get('test_grad') else 'sampling'
all_control = {
"adapt_engaged", "adapt_gamma", "adapt_delta", "adapt_kappa",
"adapt_t0", "adapt_init_buffer", "adapt_term_buffer", "adapt_window",
"stepsize", "stepsize_jitter", "metric", "int_time",
"max_treedepth", "epsilon", "error", "inv_metric"
}
all_metrics = {"unit_e", "diag_e", "dense_e"}
if control is not None:
if not isinstance(control, dict):
raise ValueError("`control` must be a dictionary")
if not all(key in all_control for key in control):
unknown = set(control) - all_control
raise ValueError("`control` contains unknown parameters: {}".format(unknown))
if control.get('metric') and control['metric'] not in all_metrics:
raise ValueError("`metric` must be one of {}".format(all_metrics))
kwargs['control'] = control
argss = [dict() for _ in range(chains)]
for i in range(chains):
argss[i] = dict(chain_id=chain_id[i],
iter=iters[i], thin=thins[i], seed=seed,
warmup=warmups[i], init=inits[i],
algorithm=algorithm)
if sample_file is not None:
sample_file = _writable_sample_file(sample_file)
if chains == 1:
argss[0]['sample_file'] = sample_file
elif chains > 1:
for i in range(chains):
argss[i]['sample_file'] = _append_id(sample_file, i)
if diagnostic_file is not None:
raise NotImplementedError("diagnostic_file not implemented yet.")
if control is not None and "inv_metric" in control:
inv_metric = control.pop("inv_metric")
metric_dir = tempfile.mkdtemp()
if isinstance(inv_metric, dict):
for i in range(chains):
if i not in inv_metric:
msg = "Invalid value for init_inv_metric found (keys={}). " \
"Use either a dictionary with chain_index as keys (0,1,2,...)" \
"or ndarray."
msg = msg.format(list(metric_file.keys()))
raise ValueError(msg)
mass_values = inv_metric[i]
metric_filename = "inv_metric_chain_{}.Rdata".format(str(i))
metric_path = os.path.join(metric_dir, metric_filename)
if isinstance(mass_values, str):
if not os.path.exists(mass_values):
raise ValueError("inverse metric file was not found: {}".format(mass_values))
shutil.copy(mass_values, metric_path)
else:
stan_rdump(dict(inv_metric=mass_values), metric_path)
argss[i]['metric_file'] = metric_path
elif isinstance(inv_metric, str):
if not os.path.exists(inv_metric):
raise ValueError("inverse metric file was not found: {}".format(inv_metric))
for i in range(chains):
metric_filename = "inv_metric_chain_{}.Rdata".format(str(i))
metric_path = os.path.join(metric_dir, metric_filename)
shutil.copy(inv_metric, metric_path)
argss[i]['metric_file'] = metric_path
elif isinstance(inv_metric, Iterable):
metric_filename = "inv_metric_chain_0.Rdata"
metric_path = os.path.join(metric_dir, metric_filename)
stan_rdump(dict(inv_metric=inv_metric), metric_path)
argss[0]['metric_file'] = metric_path
for i in range(1, chains):
metric_filename = "inv_metric_chain_{}.Rdata".format(str(i))
metric_path = os.path.join(metric_dir, metric_filename)
shutil.copy(argss[i-1]['metric_file'], metric_path)
argss[i]['metric_file'] = metric_path
else:
argss[i]['metric_file'] = ""
stepsize_list = None
if "control" in kwargs and "stepsize" in kwargs["control"]:
if isinstance(kwargs["control"]["stepsize"], Sequence):
stepsize_list = kwargs["control"]["stepsize"]
if len(kwargs["control"]["stepsize"]) == 1:
kwargs["control"]["stepsize"] = kwargs["control"]["stepsize"][0]
elif len(kwargs["control"]["stepsize"]) != chains:
raise ValueError("stepsize length needs to equal chain count.")
else:
stepsize_list = kwargs["control"]["stepsize"]
for i in range(chains):
argss[i].update(kwargs)
if stepsize_list is not None:
argss[i]["control"]["stepsize"] = stepsize_list[i]
argss[i] = _get_valid_stan_args(argss[i])
return argss
def _get_valid_stan_args(base_args=None):
"""Fill in default values for arguments not provided in `base_args`.
RStan does this in C++ in stan_args.hpp in the stan_args constructor.
It seems easier to deal with here in Python.
"""
args = base_args.copy() if base_args is not None else {}
# Default arguments, c.f. rstan/rstan/inst/include/rstan/stan_args.hpp
# values in args are going to be converted into C++ objects so
# prepare them accordingly---e.g., unicode -> bytes -> std::string
args['chain_id'] = args.get('chain_id', 1)
args['append_samples'] = args.get('append_samples', False)
if args.get('method') is None or args['method'] == "sampling":
args['method'] = stan_args_method_t.SAMPLING
elif args['method'] == "optim":
args['method'] = stan_args_method_t.OPTIM
elif args['method'] == 'test_grad':
args['method'] = stan_args_method_t.TEST_GRADIENT
elif args['method'] == 'variational':
args['method'] = stan_args_method_t.VARIATIONAL
else:
args['method'] = stan_args_method_t.SAMPLING
args['sample_file_flag'] = True if args.get('sample_file') else False
args['sample_file'] = args.get('sample_file', '').encode('ascii')
args['diagnostic_file_flag'] = True if args.get('diagnostic_file') else False
args['diagnostic_file'] = args.get('diagnostic_file', '').encode('ascii')
# NB: argument named "seed" not "random_seed"
args['random_seed'] = args.get('seed', int(time.time()))
args['metric_file_flag'] = True if args.get('metric_file') else False
args['metric_file'] = args.get('metric_file', '').encode('ascii')
if args['method'] == stan_args_method_t.VARIATIONAL:
# variational does not use a `control` map like sampling
args['ctrl'] = args.get('ctrl', dict(variational=dict()))
args['ctrl']['variational']['iter'] = args.get('iter', 10000)
args['ctrl']['variational']['grad_samples'] = args.get('grad_samples', 1)
args['ctrl']['variational']['elbo_samples'] = args.get('elbo_samples', 100)
args['ctrl']['variational']['eval_elbo'] = args.get('eval_elbo', 100)
args['ctrl']['variational']['output_samples'] = args.get('output_samples', 1000)
args['ctrl']['variational']['adapt_iter'] = args.get('adapt_iter', 50)
args['ctrl']['variational']['eta'] = args.get('eta', 1.0)
args['ctrl']['variational']['adapt_engaged'] = args.get('adapt_engaged', True)
args['ctrl']['variational']['tol_rel_obj'] = args.get('tol_rel_obj', 0.01)
if args.get('algorithm', '').lower() == 'fullrank':
args['ctrl']['variational']['algorithm'] = variational_algo_t.FULLRANK
else:
args['ctrl']['variational']['algorithm'] = variational_algo_t.MEANFIELD
elif args['method'] == stan_args_method_t.SAMPLING:
args['ctrl'] = args.get('ctrl', dict(sampling=dict()))
args['ctrl']['sampling']['iter'] = iter = args.get('iter', 2000)
args['ctrl']['sampling']['warmup'] = warmup = args.get('warmup', iter // 2)
calculated_thin = iter - warmup // 1000
if calculated_thin < 1:
calculated_thin = 1
args['ctrl']['sampling']['thin'] = thin = args.get('thin', calculated_thin)
args['ctrl']['sampling']['save_warmup'] = True # always True now
args['ctrl']['sampling']['iter_save_wo_warmup'] = iter_save_wo_warmup = 1 + (iter - warmup - 1) // thin
args['ctrl']['sampling']['iter_save'] = iter_save_wo_warmup + 1 + (warmup - 1) // thin
refresh = iter // 10 if iter >= 20 else 1
args['ctrl']['sampling']['refresh'] = args.get('refresh', refresh)
ctrl_lst = args.get('control', dict())
ctrl_sampling = args['ctrl']['sampling']
# NB: if these defaults change, remember to update docstrings
ctrl_sampling['adapt_engaged'] = ctrl_lst.get("adapt_engaged", True)
ctrl_sampling['adapt_gamma'] = ctrl_lst.get("adapt_gamma", 0.05)
ctrl_sampling['adapt_delta'] = ctrl_lst.get("adapt_delta", 0.8)
ctrl_sampling['adapt_kappa'] = ctrl_lst.get("adapt_kappa", 0.75)
ctrl_sampling['adapt_t0'] = ctrl_lst.get("adapt_t0", 10.0)
ctrl_sampling['adapt_init_buffer'] = ctrl_lst.get("adapt_init_buffer", 75)
ctrl_sampling['adapt_term_buffer'] = ctrl_lst.get("adapt_term_buffer", 50)
ctrl_sampling['adapt_window'] = ctrl_lst.get("adapt_window", 25)
ctrl_sampling['stepsize'] = ctrl_lst.get("stepsize", 1.0)
ctrl_sampling['stepsize_jitter'] = ctrl_lst.get("stepsize_jitter", 0.0)
algorithm = args.get('algorithm', 'NUTS')
if algorithm == 'HMC':
args['ctrl']['sampling']['algorithm'] = sampling_algo_t.HMC
elif algorithm == 'Metropolis':
args['ctrl']['sampling']['algorithm'] = sampling_algo_t.Metropolis
elif algorithm == 'NUTS':
args['ctrl']['sampling']['algorithm'] = sampling_algo_t.NUTS
elif algorithm == 'Fixed_param':
args['ctrl']['sampling']['algorithm'] = sampling_algo_t.Fixed_param
# TODO: Setting adapt_engaged to False solves the segfault reported
# in issue #200; find out why this hack is needed. RStan deals with
# the setting elsewhere.
ctrl_sampling['adapt_engaged'] = False
else:
msg = "Invalid value for parameter algorithm (found {}; " \
"require HMC, Metropolis, NUTS, or Fixed_param).".format(algorithm)
raise ValueError(msg)
metric = ctrl_lst.get('metric', 'diag_e')
if metric == "unit_e":
ctrl_sampling['metric'] = sampling_metric_t.UNIT_E
elif metric == "diag_e":
ctrl_sampling['metric'] = sampling_metric_t.DIAG_E
elif metric == "dense_e":
ctrl_sampling['metric'] = sampling_metric_t.DENSE_E
if ctrl_sampling['algorithm'] == sampling_algo_t.NUTS:
ctrl_sampling['max_treedepth'] = ctrl_lst.get("max_treedepth", 10)
elif ctrl_sampling['algorithm'] == sampling_algo_t.HMC:
ctrl_sampling['int_time'] = ctrl_lst.get('int_time', 6.283185307179586476925286766559005768e+00)
elif ctrl_sampling['algorithm'] == sampling_algo_t.Metropolis:
pass
elif ctrl_sampling['algorithm'] == sampling_algo_t.Fixed_param:
pass
elif args['method'] == stan_args_method_t.OPTIM:
args['ctrl'] = args.get('ctrl', dict(optim=dict()))
args['ctrl']['optim']['iter'] = iter = args.get('iter', 2000)
algorithm = args.get('algorithm', 'LBFGS')
if algorithm == "BFGS":
args['ctrl']['optim']['algorithm'] = optim_algo_t.BFGS
elif algorithm == "Newton":
args['ctrl']['optim']['algorithm'] = optim_algo_t.Newton
elif algorithm == "LBFGS":
args['ctrl']['optim']['algorithm'] = optim_algo_t.LBFGS
else:
msg = "Invalid value for parameter algorithm (found {}; " \
"require (L)BFGS or Newton).".format(algorithm)
raise ValueError(msg)
refresh = args['ctrl']['optim']['iter'] // 100
args['ctrl']['optim']['refresh'] = args.get('refresh', refresh)
if args['ctrl']['optim']['refresh'] < 1:
args['ctrl']['optim']['refresh'] = 1
args['ctrl']['optim']['init_alpha'] = args.get("init_alpha", 0.001)
args['ctrl']['optim']['tol_obj'] = args.get("tol_obj", 1e-12)
args['ctrl']['optim']['tol_grad'] = args.get("tol_grad", 1e-8)
args['ctrl']['optim']['tol_param'] = args.get("tol_param", 1e-8)
args['ctrl']['optim']['tol_rel_obj'] = args.get("tol_rel_obj", 1e4)
args['ctrl']['optim']['tol_rel_grad'] = args.get("tol_rel_grad", 1e7)
args['ctrl']['optim']['save_iterations'] = args.get("save_iterations", True)
args['ctrl']['optim']['history_size'] = args.get("history_size", 5)
elif args['method'] == stan_args_method_t.TEST_GRADIENT:
args['ctrl'] = args.get('ctrl', dict(test_grad=dict()))
args['ctrl']['test_grad']['epsilon'] = args.get("epsilon", 1e-6)
args['ctrl']['test_grad']['error'] = args.get("error", 1e-6)
init = args.get('init', "random")
if isinstance(init, string_types):
args['init'] = init.encode('ascii')
elif isinstance(init, dict):
args['init'] = "user".encode('ascii')
# while the name is 'init_list', it is a dict; the name comes from rstan,
# where list elements can have names
args['init_list'] = init
else:
args['init'] = "random".encode('ascii')
args['init_radius'] = args.get('init_r', 2.0)
if (args['init_radius'] <= 0):
args['init'] = b"0"
# 0 initialization requires init_radius = 0
if (args['init'] == b"0" or args['init'] == 0):
args['init_radius'] = 0.0
args['enable_random_init'] = args.get('enable_random_init', True)
# RStan calls validate_args() here
return args
def _check_seed(seed):
"""If possible, convert `seed` into a valid form for Stan (an integer
between 0 and MAX_UINT, inclusive). If not possible, use a random seed
instead and raise a warning if `seed` was not provided as `None`.
"""
if isinstance(seed, (Number, string_types)):
try:
seed = int(seed)
except ValueError:
logger.warning("`seed` must be castable to an integer")
seed = None
else:
if seed < 0:
logger.warning("`seed` may not be negative")
seed = None
elif seed > MAX_UINT:
raise ValueError('`seed` is too large; max is {}'.format(MAX_UINT))
elif isinstance(seed, np.random.RandomState):
seed = seed.randint(0, MAX_UINT)
elif seed is not None:
logger.warning('`seed` has unexpected type')
seed = None
if seed is None:
seed = random.randint(0, MAX_UINT)
return seed
def _organize_inits(inits, pars, dims):
"""Obtain a list of initial values for each chain.
The parameter 'lp__' will be removed from the chains.
Parameters
----------
inits : list
list of initial values for each chain.
pars : list of str
dims : list of list of int
from (via cython conversion) vector[vector[uint]] dims
Returns
-------
inits : list of dict
"""
try:
idx_of_lp = pars.index('lp__')
del pars[idx_of_lp]
del dims[idx_of_lp]
except ValueError:
pass
starts = _calc_starts(dims)
return [_par_vector2dict(init, pars, dims, starts) for init in inits]
def _calc_starts(dims):
"""Calculate starting indexes
Parameters
----------
dims : list of list of int
from (via cython conversion) vector[vector[uint]] dims
Examples
--------
>>> _calc_starts([[8, 2], [5], [6, 2]])
[0, 16, 21]
"""
# NB: Python uses 0-indexing; R uses 1-indexing.
l = len(dims)
s = [np.prod(d) for d in dims]
starts = np.cumsum([0] + s)[0:l].tolist()
# coerce things into ints before returning
return [int(i) for i in starts]
def _par_vector2dict(v, pars, dims, starts=None):
"""Turn a vector of samples into an OrderedDict according to param dims.
Parameters
----------
y : list of int or float
pars : list of str
parameter names
dims : list of list of int
list of dimensions of parameters
Returns
-------
d : dict
Examples
--------
>>> v = list(range(31))
>>> dims = [[5], [5, 5], []]
>>> pars = ['mu', 'Phi', 'eta']
>>> _par_vector2dict(v, pars, dims) # doctest: +ELLIPSIS
OrderedDict([('mu', array([0, 1, 2, 3, 4])), ('Phi', array([[ 5, ...
"""
if starts is None:
starts = _calc_starts(dims)
d = OrderedDict()
for i in range(len(pars)):
l = int(np.prod(dims[i]))
start = starts[i]
end = start + l
y = np.asarray(v[start:end])
if len(dims[i]) > 1:
y = y.reshape(dims[i], order='F') # 'F' = Fortran, column-major
d[pars[i]] = y.squeeze() if y.shape == (1,) else y
return d
def _check_pars(allpars, pars):
if len(pars) == 0:
raise ValueError("No parameter specified (`pars` is empty).")
for par in pars:
if par not in allpars:
raise ValueError("No parameter {}".format(par))
def _pars_total_indexes(names, dims, fnames, pars):
"""Obtain all the indexes for parameters `pars` in the sequence of names.
`names` references variables that are in column-major order
Parameters
----------
names : sequence of str
All the parameter names.
dim : sequence of list of int
Dimensions, in same order as `names`.
fnames : sequence of str
All the scalar parameter names
pars : sequence of str
The parameters of interest. It is assumed all elements in `pars` are in
`names`.
Returns
-------
indexes : OrderedDict of list of int
Dictionary uses parameter names as keys. Indexes are column-major order.
For each parameter there is also a key `par`+'_rowmajor' that stores the
row-major indexing.
Note
----
Inside each parameter (vector or array), the sequence uses column-major
ordering. For example, if we have parameters alpha and beta, having
dimensions [2, 2] and [2, 3] respectively, the whole parameter sequence
is alpha[0,0], alpha[1,0], alpha[0, 1], alpha[1, 1], beta[0, 0],
beta[1, 0], beta[0, 1], beta[1, 1], beta[0, 2], beta[1, 2]. In short,
like R matrix(..., bycol=TRUE).
Example
-------
>>> pars_oi = ['mu', 'tau', 'eta', 'theta', 'lp__']
>>> dims_oi = [[], [], [8], [8], []]
>>> fnames_oi = ['mu', 'tau', 'eta[1]', 'eta[2]', 'eta[3]', 'eta[4]',
... 'eta[5]', 'eta[6]', 'eta[7]', 'eta[8]', 'theta[1]', 'theta[2]',
... 'theta[3]', 'theta[4]', 'theta[5]', 'theta[6]', 'theta[7]',
... 'theta[8]', 'lp__']
>>> pars = ['mu', 'tau', 'eta', 'theta', 'lp__']
>>> _pars_total_indexes(pars_oi, dims_oi, fnames_oi, pars)
... # doctest: +ELLIPSIS
OrderedDict([('mu', (0,)), ('tau', (1,)), ('eta', (2, 3, ...
"""
starts = _calc_starts(dims)
def par_total_indexes(par):
# if `par` is a scalar, it will match one of `fnames`
if par in fnames:
p = fnames.index(par)
idx = tuple([p])
return OrderedDict([(par, idx), (par+'_rowmajor', idx)])
else:
p = names.index(par)
idx = starts[p] + np.arange(np.prod(dims[p]))
idx_rowmajor = starts[p] + _idx_col2rowm(dims[p])
return OrderedDict([(par, tuple(idx)), (par+'_rowmajor', tuple(idx_rowmajor))])
indexes = OrderedDict()
for par in pars:
indexes.update(par_total_indexes(par))
return indexes
def _idx_col2rowm(d):
"""Generate indexes to change from col-major to row-major ordering"""
if 0 == len(d):
return 1
if 1 == len(d):
return np.arange(d[0])
# order='F' indicates column-major ordering
idx = np.array(np.arange(np.prod(d))).reshape(d, order='F').T
return idx.flatten(order='F')
def _get_kept_samples(n, sim):
"""Get samples to be kept from the chain(s) for `n`th parameter.
Samples from different chains are merged.
Parameters
----------
n : int
sim : dict
A dictionary tied to a StanFit4Model instance.
Returns
-------
samples : array
Samples being kept, permuted and in column-major order.
"""
return pystan._misc.get_kept_samples(n, sim)
def _get_samples(n, sim, inc_warmup=True):
# NOTE: this is in stanfit-class.R in RStan (rather than misc.R)
"""Get chains for `n`th parameter.
Parameters
----------
n : int
sim : dict
A dictionary tied to a StanFit4Model instance.
Returns
-------
chains : list of array
Each chain is an element in the list.
"""
return pystan._misc.get_samples(n, sim, inc_warmup)
def _redirect_stderr():
"""Redirect stderr for subprocesses to /dev/null
Silences copious compilation messages.
Returns
-------
orig_stderr : file descriptor
Copy of original stderr file descriptor
"""
sys.stderr.flush()
stderr_fileno = sys.stderr.fileno()
orig_stderr = os.dup(stderr_fileno)
devnull = os.open(os.devnull, os.O_WRONLY)
os.dup2(devnull, stderr_fileno)
os.close(devnull)
return orig_stderr
def _has_fileno(stream):
"""Returns whether the stream object seems to have a working fileno()
Tells whether _redirect_stderr is likely to work.
Parameters
----------
stream : IO stream object
Returns
-------
has_fileno : bool
True if stream.fileno() exists and doesn't raise OSError or
UnsupportedOperation
"""
try:
stream.fileno()
except (AttributeError, OSError, IOError, io.UnsupportedOperation):
return False
return True
def _append_id(file, id, suffix='.csv'):
fname = os.path.basename(file)
fpath = os.path.dirname(file)
fname2 = re.sub(r'\.csv\s*$', '_{}.csv'.format(id), fname)
if fname2 == fname:
fname2 = '{}_{}.csv'.format(fname, id)
return os.path.join(fpath, fname2)
def _writable_sample_file(file, warn=True, wfun=None):
"""Check to see if file is writable, if not use temporary file"""
if wfun is None:
wfun = lambda x, y: '"{}" is not writable; use "{}" instead'.format(x, y)
dir = os.path.dirname(file)
dir = os.getcwd() if dir == '' else dir
if os.access(dir, os.W_OK):
return file
else:
dir2 = tempfile.mkdtemp()
if warn:
logger.warning(wfun(dir, dir2))
return os.path.join(dir2, os.path.basename(file))
def is_legal_stan_vname(name):
stan_kw1 = ('for', 'in', 'while', 'repeat', 'until', 'if', 'then', 'else',
'true', 'false')
stan_kw2 = ('int', 'real', 'vector', 'simplex', 'ordered', 'positive_ordered',
'row_vector', 'matrix', 'corr_matrix', 'cov_matrix', 'lower', 'upper')
stan_kw3 = ('model', 'data', 'parameters', 'quantities', 'transformed', 'generated')
cpp_kw = ("alignas", "alignof", "and", "and_eq", "asm", "auto", "bitand", "bitor", "bool",
"break", "case", "catch", "char", "char16_t", "char32_t", "class", "compl",
"const", "constexpr", "const_cast", "continue", "decltype", "default", "delete",
"do", "double", "dynamic_cast", "else", "enum", "explicit", "export", "extern",
"false", "float", "for", "friend", "goto", "if", "inline", "int", "long", "mutable",
"namespace", "new", "noexcept", "not", "not_eq", "nullptr", "operator", "or", "or_eq",
"private", "protected", "public", "register", "reinterpret_cast", "return",
"short", "signed", "sizeof", "static", "static_assert", "static_cast", "struct",
"switch", "template", "this", "thread_local", "throw", "true", "try", "typedef",
"typeid", "typename", "union", "unsigned", "using", "virtual", "void", "volatile",
"wchar_t", "while", "xor", "xor_eq")
illegal = stan_kw1 + stan_kw2 + stan_kw3 + cpp_kw
if re.findall(r'(\.|^[0-9]|__$)', name):
return False
return not name in illegal
def _dict_to_rdump(data):
parts = []
for name, value in data.items():
if isinstance(value, (Sequence, Number, np.number, np.ndarray, int, bool, float)) \
and not isinstance(value, string_types):
value = np.asarray(value)
else:
raise ValueError("Variable {} is not a number and cannot be dumped.".format(name))
if value.dtype == np.bool:
value = value.astype(int)
if value.ndim == 0:
s = '{} <- {}\n'.format(name, str(value))
elif value.ndim == 1:
s = '{} <-\nc({})\n'.format(name, ', '.join(str(v) for v in value))
elif value.ndim > 1:
tmpl = '{} <-\nstructure(c({}), .Dim = c({}))\n'
# transpose value as R uses column-major
# 'F' = Fortran, column-major
s = tmpl.format(name,
', '.join(str(v) for v in value.flatten(order='F')),
', '.join(str(v) for v in value.shape))
parts.append(s)
return ''.join(parts)
def stan_rdump(data, filename):
"""
Dump a dictionary with model data into a file using the R dump format that
Stan supports.
Parameters
----------
data : dict
filename : str
"""
for name in data:
if not is_legal_stan_vname(name):
raise ValueError("Variable name {} is not allowed in Stan".format(name))
with open(filename, 'w') as f:
f.write(_dict_to_rdump(data))
def _rdump_value_to_numpy(s):
"""
Convert a R dump formatted value to Numpy equivalent
For example, "c(1, 2)" becomes ``array([1, 2])``
Only supports a few R data structures. Will not work with European decimal format.
"""
if "structure" in s:
vector_str, shape_str = re.findall(r'c\([^\)]+\)', s)
shape = [int(d) for d in shape_str[2:-1].split(',')]
if '.' in vector_str:
arr = np.array([float(v) for v in vector_str[2:-1].split(',')])
else:
arr = np.array([int(v) for v in vector_str[2:-1].split(',')])
# 'F' = Fortran, column-major
arr = arr.reshape(shape, order='F')
elif "c(" in s:
if '.' in s:
arr = np.array([float(v) for v in s[2:-1].split(',')], order='F')
else:
arr = np.array([int(v) for v in s[2:-1].split(',')], order='F')
else:
arr = np.array(float(s) if '.' in s else int(s))
return arr
def _remove_empty_pars(pars, pars_oi, dims_oi):
"""
Remove parameters that are actually empty. For example, the parameter
y would be removed with the following model code:
transformed data { int n; n <- 0; }
parameters { real y[n]; }
Parameters
----------
pars: iterable of str
pars_oi: list of str
dims_oi: list of list of int
Returns
-------
pars_trimmed: list of str
"""
pars = list(pars)
for par, dim in zip(pars_oi, dims_oi):
if par in pars and np.prod(dim) == 0:
del pars[pars.index(par)]
return pars
def read_rdump(filename):
"""
Read data formatted using the R dump format
Parameters
----------
filename: str
Returns
-------
data : OrderedDict
"""
contents = open(filename).read().strip()
names = [name.strip() for name in re.findall(r'^(\w+) <-', contents, re.MULTILINE)]
values = [value.strip() for value in re.split('\w+ +<-', contents) if value]
if len(values) != len(names):
raise ValueError("Unable to read file. Unable to pair variable name with value.")
d = OrderedDict()
for name, value in zip(names, values):
d[name.strip()] = _rdump_value_to_numpy(value.strip())
return d
def to_dataframe(fit, pars=None, permuted=False, dtypes=None, inc_warmup=False, diagnostics=True, header=True):
"""Extract samples as a pandas dataframe for different parameters.
Parameters
----------
pars : {str, sequence of str}
parameter (or quantile) name(s).
permuted : bool
If True, returned samples are permuted.
If inc_warmup is True, warmup samples have negative order.
dtypes : dict
datatype of parameter(s).
If nothing is passed, float will be used for all parameters.
inc_warmup : bool
If True, warmup samples are kept; otherwise they are
discarded.
diagnostics : bool
If True, include hmc diagnostics in dataframe.
header : bool
If True, include header columns.
Returns
-------
df : pandas dataframe
Returned dataframe contains: [header_df]|[draws_df]|[diagnostics_df],
where all groups are optional.
To exclude draws_df use `pars=[]`.
"""
try:
import pandas as pd
except ImportError:
raise ImportError("Pandas module not found. You can install pandas with: pip install pandas")
fit._verify_has_samples()
pars_original = pars
if pars is None:
pars = fit.sim['pars_oi']
elif isinstance(pars, string_types):
pars = [pars]
if pars:
pars = pystan.misc._remove_empty_pars(pars, fit.sim['pars_oi'], fit.sim['dims_oi'])
allpars = fit.sim['pars_oi'] + fit.sim['fnames_oi']
_check_pars(allpars, pars)
if dtypes is None:
dtypes = {}
n_kept = [s if inc_warmup else s-w for s, w in zip(fit.sim['n_save'], fit.sim['warmup2'])]
chains = len(fit.sim['samples'])
diagnostic_type = {'divergent__':int,
'energy__':float,
'treedepth__':int,
'accept_stat__':float,
'stepsize__':float,
'n_leapfrog__':int}
header_dict = OrderedDict()
if header:
idx = np.concatenate([np.full(n_kept[chain], chain, dtype=int) for chain in range(chains)])
warmup = [np.zeros(n_kept[chain], dtype=np.int64) for chain in range(chains)]
if inc_warmup:
draw = []
for chain, w in zip(range(chains), fit.sim['warmup2']):
warmup[chain][:w] = 1
draw.append(np.arange(n_kept[chain], dtype=np.int64) - w)
draw = np.concatenate(draw)
else:
draw = np.concatenate([np.arange(n_kept[chain], dtype=np.int64) for chain in range(chains)])
warmup = np.concatenate(warmup)
header_dict = OrderedDict(zip(['chain', 'draw', 'warmup'], [idx, draw, warmup]))
if permuted:
if inc_warmup:
chain_permutation = []
chain_permutation_order = []
permutation = []
permutation_order = []
for chain, p, w in zip(range(chains), fit.sim['permutation'], fit.sim['warmup2']):
chain_permutation.append(list(range(-w, 0)) + p)
chain_permutation_order.append(list(range(-w, 0)) + list(np.argsort(p)))
permutation.append(sum(n_kept[:chain])+chain_permutation[-1]+w)
permutation_order.append(sum(n_kept[:chain])+chain_permutation_order[-1]+w)
chain_permutation = np.concatenate(chain_permutation)
chain_permutation_order = np.concatenate(chain_permutation_order)
permutation = np.concatenate(permutation)
permutation_order = np.concatenate(permutation_order)
else:
chain_permutation = np.concatenate(fit.sim['permutation'])
chain_permutation_order = np.concatenate([np.argsort(item) for item in fit.sim['permutation']])
permutation = np.concatenate([sum(n_kept[:chain])+p for chain, p in enumerate(fit.sim['permutation'])])
permutation_order = np.argsort(permutation)
header_dict["permutation"] = permutation
header_dict["chain_permutation"] = chain_permutation
header_dict["permutation_order"] = permutation_order
header_dict["chain_permutation_order"] = chain_permutation_order
if header:
header_df = pd.DataFrame.from_dict(header_dict)
else:
if permuted:
header_df = pd.DataFrame.from_dict({"permutation_order" : header_dict["permutation_order"]})
else:
header_df = pd.DataFrame()
fnames_set = set(fit.sim['fnames_oi'])
pars_set = set(pars)
if pars_original is None or fnames_set == pars_set:
dfs = [pd.DataFrame.from_dict(pyholder.chains).iloc[-n:] for pyholder, n in zip(fit.sim['samples'], n_kept)]
df = pd.concat(dfs, axis=0, sort=False, ignore_index=True)
if dtypes:
if not fnames_set.issuperset(pars_set):
par_keys = OrderedDict([(par, []) for par in fit.sim['pars_oi']])
for key in fit.sim['fnames_oi']:
par = key.split("[")
par = par[0]
par_keys[par].append(key)
for par, dtype in dtypes.items():
if isinstance(dtype, (float, np.float64)):
continue
for key in par_keys.get(par, [par]):
df.loc[:, key] = df.loc[:, key].astype(dtype)
elif pars:
par_keys = dict()
if not fnames_set.issuperset(pars_set):
par_keys = OrderedDict([(par, []) for par in fit.sim['pars_oi']])
for key in fit.sim['fnames_oi']:
par = key.split("[")
par = par[0]
par_keys[par].append(key)
columns = []
for par in pars:
columns.extend(par_keys.get(par, [par]))
columns = list(np.unique(columns))
df = pd.DataFrame(index=np.arange(sum(n_kept)), columns=columns, dtype=float)
for key in columns:
key_values = []
for chain, (pyholder, n) in enumerate(zip(fit.sim['samples'], n_kept)):
key_values.append(pyholder.chains[key][-n:])
df.loc[:, key] = np.concatenate(key_values)
for par, dtype in dtypes.items():
if isinstance(dtype, (float, np.float64)):
continue
for key in par_keys.get(par, [par]):
df.loc[:, key] = df.loc[:, key].astype(dtype)
else:
df = pd.DataFrame()
if diagnostics:
diagnostics_dfs = []
for idx, (pyholder, permutation, n) in enumerate(zip(fit.sim['samples'], fit.sim['permutation'], n_kept), 1):
diagnostics_df = pd.DataFrame(pyholder['sampler_params'], index=pyholder['sampler_param_names']).T
diagnostics_df = diagnostics_df.iloc[-n:, :]
for key, dtype in diagnostic_type.items():
if key in diagnostics_df:
diagnostics_df.loc[:, key] = diagnostics_df.loc[:, key].astype(dtype)
diagnostics_dfs.append(diagnostics_df)
if diagnostics_dfs:
diagnostics_df = pd.concat(diagnostics_dfs, axis=0, sort=False, ignore_index=True)
else:
diagnostics_df = pd.DataFrame()
else:
diagnostics_df = pd.DataFrame()
df = pd.concat((header_df, df, diagnostics_df), axis=1, sort=False)
if permuted:
df.sort_values(by='permutation_order', inplace=True)
if not header:
df.drop(columns='permutation_order', inplace=True)
return df
def get_stepsize(fit):
"""Parse stepsize from fit object
Parameters
----------
fit : StanFit4Model
Returns
-------
list
Returns an empty list if step sizes
are not found in ``fit.get_adaptation_info``.
"""
fit._verify_has_samples()
stepsizes = []
for adaptation_info in fit.get_adaptation_info():
for line in adaptation_info.splitlines():
if "Step size" in line:
stepsizes.append(float(line.split("=")[1].strip()))
break
return stepsizes
def get_inv_metric(fit, as_dict=False):
"""Parse inverse metric from the fit object
Parameters
----------
fit : StanFit4Model
as_dict : bool, optional
Returns
-------
list or dict
Returns an empty list if inverse metric
is not found in ``fit.get_adaptation_info()``.
If `as_dict` returns a dictionary which can be used with
`.sampling` method.
"""
fit._verify_has_samples()
inv_metrics = []
if not (("ctrl" in fit.stan_args[0]) and ("sampling" in fit.stan_args[0]["ctrl"])):
return inv_metrics
metric = [args["ctrl"]["sampling"]["metric"].name for args in fit.stan_args]
for adaptation_info, metric_name in zip(fit.get_adaptation_info(), metric):
iter_adaptation_info = iter(adaptation_info.splitlines())
inv_metric_list = []
for line in iter_adaptation_info:
if any(value in line for value in ["Step size", "Adaptation"]):
continue
elif "inverse mass matrix" in line:
for line in iter_adaptation_info:
stripped_set = set(line.replace("# ", "").replace(" ", "").replace(",", ""))
if stripped_set.issubset(set(".-1234567890e")):
inv_metric = np.array(list(map(float, line.replace("# ", "").strip().split(","))))
if metric_name == "DENSE_E":
inv_metric = np.atleast_2d(inv_metric)
inv_metric_list.append(inv_metric)
else:
break
inv_metrics.append(np.concatenate(inv_metric_list))
return inv_metrics if not as_dict else dict(enumerate(inv_metrics))
def get_last_position(fit, warmup=False):
"""Parse last position from fit object
Parameters
----------
fit : StanFit4Model
warmup : bool
If True, returns the last warmup position, when warmup has been done.
Otherwise function returns the first sample position.
Returns
-------
list
list contains a dictionary of last draw from each chain.
"""
fit._verify_has_samples()
positions = []
extracted = fit.extract(permuted=False, pars=fit.model_pars, inc_warmup=warmup)
draw_location = -1
if warmup:
draw_location += max(1, fit.sim["warmup"])
chains = fit.sim["chains"]
for i in range(chains):
extract_pos = {key : values[draw_location, i] for key, values in extracted.items()}
positions.append(extract_pos)
return positions
|
// Tests that a TimeRange does overlaps the end another TimeRange
@Test
public void overlaps_end() {
TimeRange range = TimeRange.fromStartEnd(TIME_6PM, TIME_730PM);
Assert.assertTrue(range.overlaps(TimeRange.fromStartEnd(TIME_7PM, TIME_8PM)));
} |
#include "Matrix.h"
#include "Vector.h"
#include "Synthesis.h"
#include "ToolPca.h"
#include "ToolGmm.h"
CGmm::~CGmm(void) { reset(); }
Error_t CGmm::init(CGmmResult *pCResult, int iK, int iNumFeatures, int iNumObs, int iMaxIter)
{
if (!pCResult || iK < 1 || iNumFeatures < 1 || iNumObs < 1 || iMaxIter < 1)
return Error_t::kFunctionInvalidArgsError;
reset();
// set variables
m_iMaxIter = iMaxIter;
m_iNumFeatures = iNumFeatures;
m_iNumObs = iNumObs;
m_iK = iK;
// init result class
pCResult->init(iK, iNumFeatures);
PrevState = CGmmResult(*pCResult);
// alloc memory
for (auto i = 0; i < 2; i++)
{
CVector::alloc(m_apfProc[i], std::max(iK, iNumFeatures));
CMatrix::alloc(m_appfSigma[i], m_iNumFeatures, m_iNumFeatures);
}
CMatrix::alloc(m_ppfProb, m_iK, m_iNumObs);
// all done
m_bisInitialized = true;
return Error_t::kNoError;
}
Error_t CGmm::reset()
{
m_bisInitialized = false;
for (auto i = 0; i < 2; i++)
{
CVector::free(m_apfProc[i]);
CMatrix::free(m_appfSigma[i], m_iNumFeatures);
}
CMatrix::free(m_ppfProb, m_iK);
m_iMaxIter = 300;
m_iNumFeatures = 0;
m_iNumObs = 0;
m_iK = 0;
return Error_t::kNoError;
}
Error_t CGmm::compGmm(CGmmResult *pCResult, const float *const *const ppfFeatures)
{
if (!pCResult || !ppfFeatures)
return Error_t::kFunctionInvalidArgsError;
if (!ppfFeatures[0])
return Error_t::kFunctionInvalidArgsError;
if (!m_bisInitialized)
return Error_t::kFunctionIllegalCallError;
// init: randomly selected data points as cluster means
initState_(ppfFeatures, pCResult);
// iterate
for (auto i = 0; i < m_iMaxIter; i++)
{
// copy state to prev state
PrevState = *pCResult;
// update cluster means
compProbabilities_(ppfFeatures, pCResult);
// assign observations to clusters
updateState_(ppfFeatures, pCResult);
// check for change
if (checkConverged_(pCResult))
break;
}
return Error_t::kNoError;
}
void CGmm::initState_(const float *const *const ppfFeatures, CGmmResult *pCCurrState)
{
// generate some noise
CSynthesis::genNoise(m_apfProc[0], m_iK);
CVector::mulC_I(m_apfProc[0], m_iNumObs - 1.F, m_iK);
CPca::compCov(m_appfSigma[0], ppfFeatures, m_iNumFeatures, m_iNumObs);
for (auto k = 0; k < m_iK; k++)
{
int iIdx = CUtil::float2int<int>(m_apfProc[0][k]);
assert(iIdx >= 0 && iIdx < m_iNumObs);
// set sigma
pCCurrState->setSigma(k, m_appfSigma[0]);
// set mean
for (auto v = 0; v < m_iNumFeatures; v++)
pCCurrState->setMu(k, v, ppfFeatures[v][iIdx]);
// set prior
pCCurrState->setPrior(k, 1.F / m_iK);
}
}
void CGmm::compProbabilities_(const float *const *const ppfFeatures, CGmmResult *pCCurrState)
{
// compute gaussian per cluster per observation
for (auto k = 0; k < m_iK; k++)
{
pCCurrState->getSigma(m_appfSigma[0], k);
float fDet = CMatrix::det(m_appfSigma[0], m_iNumFeatures, m_iNumFeatures);
if (fDet < 1e-30F)
{
CVector::setZero(m_ppfProb[k], m_iNumObs);
continue;
}
float fNorm = static_cast<float>(1. / std::sqrt(std::pow(2. * M_PI, m_iNumFeatures) * fDet));
CMatrix::inv_I(m_appfSigma[0], m_iNumFeatures, m_iNumFeatures);
for (auto n = 0; n < m_iNumObs; n++)
{
for (auto v = 0; v < m_iNumFeatures; v++)
m_apfProc[0][v] = ppfFeatures[v][n] - pCCurrState->getMu(k, v);
CMatrix::mulMatColvec(m_apfProc[1], m_appfSigma[0], m_apfProc[0], m_iNumFeatures, m_iNumFeatures);
m_ppfProb[k][n] = fNorm * std::exp(-.5F * CVector::mulScalar(m_apfProc[0], m_apfProc[1], m_iNumFeatures));
}
CVector::mulC_I(m_ppfProb[k], pCCurrState->getPrior(k), m_iNumObs);
}
// norm over clusters
for (auto n = 0; n < m_iNumObs; n++)
{
float fSum = CMatrix::getSumCol(m_ppfProb, n, m_iK);
if (fSum > 0)
CMatrix::mulColC_I(m_ppfProb, 1.F / fSum, n, m_iK);
}
}
void CGmm::updateState_(const float *const *const ppfFeatures, CGmmResult *pCCurrState)
{
for (auto k = 0; k < m_iK; k++)
{
// update priors
pCCurrState->setPrior(k, CVector::getMean(m_ppfProb[k], m_iNumObs));
// update means
float fNorm = CVector::getSum(m_ppfProb[k], m_iNumObs);
if (fNorm < 1e-30F)
fNorm = 1.F;
for (auto v = 0; v < m_iNumFeatures; v++)
pCCurrState->setMu(k, v, CVector::mulScalar(ppfFeatures[v], m_ppfProb[k], m_iNumObs) / fNorm);
// update sigma
CMatrix::setZero(m_appfSigma[0], m_iNumFeatures, m_iNumFeatures);
for (auto n = 0; n < m_iNumObs; n++)
{
for (auto v = 0; v < m_iNumFeatures; v++)
m_apfProc[0][v] = ppfFeatures[v][n] - pCCurrState->getMu(k, v);
CMatrix::mulColvecRowvec(m_appfSigma[1], m_apfProc[0], m_apfProc[0], m_iNumFeatures, m_iNumFeatures);
CMatrix::mulC_I(m_appfSigma[1], m_ppfProb[k][n], m_iNumFeatures, m_iNumFeatures);
CMatrix::add_I(m_appfSigma[0], m_appfSigma[1], m_iNumFeatures, m_iNumFeatures);
}
fNorm = CVector::getSum(m_ppfProb[k], m_iNumObs);
if (fNorm < 1e-30F)
fNorm = 1.F;
CMatrix::mulC_I(m_appfSigma[0], 1.F / fNorm, m_iNumFeatures, m_iNumFeatures);
pCCurrState->setSigma(k, m_appfSigma[0]);
}
}
bool CGmm::checkConverged_(CGmmResult *pCCurrState)
{
float fSum = 0;
for (auto k = 0; k < m_iK; k++)
{
for (auto v = 0; v < m_iNumFeatures; v++)
fSum += std::abs(pCCurrState->getMu(k, v) - PrevState.getMu(k, v));
}
if (fSum / m_iK <= 1e-20F)
return true;
return false;
}
CGmmResult::~CGmmResult(void) { reset(); }
CGmmResult &CGmmResult::operator=(const CGmmResult &that)
{
// should be all the same
if (this->m_iK != that.m_iK || this->m_iNumFeatures != that.m_iNumFeatures || this->m_bIsInitialized != that.m_bIsInitialized)
this->init(that.m_iK, that.m_iNumFeatures);
CMatrix::copy(this->m_ppfMu, that.m_ppfMu, this->m_iK, this->m_iNumFeatures);
CVector::copy(this->m_pfPrior, that.m_pfPrior, this->m_iK);
for (auto i = 0; i < kSigma; i++)
{
for (auto k = 0; k < this->m_iK; k++)
CMatrix::copy(this->m_apppfSigma[i][k], that.m_apppfSigma[i][k], this->m_iNumFeatures, this->m_iNumFeatures);
}
return *this;
}
float CGmmResult::getProb(const float *pfQuery)
{
float fProb = 0;
for (auto k = 0; k < m_iK; k++)
{
float fDet = CMatrix::det(m_apppfSigma[kNormal][k], m_iNumFeatures, m_iNumFeatures);
if (fDet < 1e-30F)
continue;
float fNorm = static_cast<float>(1. / std::sqrt(std::pow(2. * M_PI, m_iNumFeatures) * fDet));
CVector::copy(m_apfProc[0], pfQuery, m_iNumFeatures);
CVector::sub_I(m_apfProc[0], m_ppfMu[k], m_iNumFeatures);
CMatrix::mulMatColvec(m_apfProc[1], m_apppfSigma[kInv][k], m_apfProc[0], m_iNumFeatures, m_iNumFeatures);
fProb += this->getPrior(k) * fNorm * std::exp(-.5F * CVector::mulScalar(m_apfProc[0], m_apfProc[1], m_iNumFeatures));
}
return fProb;
}
CGmmResult::CGmmResult(const CGmmResult &that) :
m_iK(that.m_iK),
m_iNumFeatures(that.m_iNumFeatures),
m_bIsInitialized(that.m_bIsInitialized)
{
CMatrix::alloc(m_ppfMu, m_iK, m_iNumFeatures);
CMatrix::copy(m_ppfMu, that.m_ppfMu, m_iK, m_iNumFeatures);
CVector::alloc(m_pfPrior, m_iK);
CVector::copy(m_pfPrior, that.m_pfPrior, m_iK);
for (auto i = 0; i < kSigma; i++)
{
CVector::alloc(m_apppfSigma[i], m_iK);
for (auto k = 0; k < m_iK; k++)
{
CMatrix::alloc(m_apppfSigma[i][k], m_iNumFeatures, m_iNumFeatures);
CMatrix::copy(m_apppfSigma[i][k], that.m_apppfSigma[i][k], m_iNumFeatures, m_iNumFeatures);
}
}
}
int CGmmResult::getNumGaussians() const
{
return m_iK;
}
int CGmmResult::getNumDimensions() const
{
return m_iNumFeatures;
}
float CGmmResult::getMu(int iGaussianIdx, int iFeatureIdx) const
{
return m_ppfMu[iGaussianIdx][iFeatureIdx];
}
float CGmmResult::getPrior(int iGaussianIdx) const
{
return m_pfPrior[iGaussianIdx];
}
float CGmmResult::getSigma(int iGaussianIdx, int iRowIdx, int iColIdx) const
{
return m_apppfSigma[kNormal][iGaussianIdx][iRowIdx][iColIdx];
}
void CGmmResult::getSigma(float **ppfSigma, int iGaussianIdx) const
{
CMatrix::copy(ppfSigma, m_apppfSigma[kNormal][iGaussianIdx], m_iNumFeatures, m_iNumFeatures);
return;
}
bool CGmmResult::isInitialized() const
{
return m_bIsInitialized;
}
Error_t CGmmResult::init(int iK, int iNumFeatures)
{
reset();
m_iK = iK;
m_iNumFeatures = iNumFeatures;
CMatrix::alloc(m_ppfMu, m_iK, m_iNumFeatures);
CVector::alloc(m_pfPrior, m_iK);
for (auto i = 0; i < kSigma; i++)
{
CVector::alloc(m_apppfSigma[i], m_iK);
for (auto k = 0; k < m_iK; k++)
CMatrix::alloc(m_apppfSigma[i][k], m_iNumFeatures, m_iNumFeatures);
}
for (auto i = 0; i < 2; i++)
CVector::alloc(m_apfProc[i], m_iNumFeatures);
m_bIsInitialized = true;
return Error_t::kNoError;
}
Error_t CGmmResult::reset()
{
m_bIsInitialized = false;
CMatrix::free(m_ppfMu, m_iK);
CVector::free(m_pfPrior);
for (auto i = 0; i < kSigma; i++)
{
for (auto k = 0; k < m_iK; k++)
CMatrix::free(m_apppfSigma[i][k], m_iNumFeatures);
CVector::free(m_apppfSigma[i]);
}
for (auto i = 0; i < 2; i++)
CVector::free(m_apfProc[i]);
m_iK = 0;
m_iNumFeatures = 0;
return Error_t::kNoError;
}
Error_t CGmmResult::setMu(int iGaussianIdx, int iFeatureIdx, float fParamValue)
{
m_ppfMu[iGaussianIdx][iFeatureIdx] = fParamValue;
return Error_t::kNoError;
}
Error_t CGmmResult::setPrior(int iGaussianIdx, float fParamValue)
{
m_pfPrior[iGaussianIdx] = fParamValue;
return Error_t::kNoError;
}
Error_t CGmmResult::setSigma(int iGaussianIdx, float **ppfSigma)
{
CMatrix::copy(m_apppfSigma[kNormal][iGaussianIdx], ppfSigma, m_iNumFeatures, m_iNumFeatures);
CMatrix::copy(m_apppfSigma[kInv][iGaussianIdx], ppfSigma, m_iNumFeatures, m_iNumFeatures);
CMatrix::inv_I(m_apppfSigma[kInv][iGaussianIdx], m_iNumFeatures, m_iNumFeatures);
return Error_t::kNoError;
}
|
def weather_import(weather_filename):
df_w = pd.read_csv(weather_filename)
df_w.dropna(how='all')
temperature_index = pd.melt(df_w, id_vars=['year'], value_vars = ['1','2','3','4','5','6','7','8','9','10','11','12'],var_name = 'month',value_name='temperature variation')
temperature_index = temperature_index[['month','year','temperature variation']]
temperature_index.set_index('month','Year')
temperature_index = temperature_index.assign(date=pd.to_datetime(temperature_index[['year','month']].assign(day=1)))
return temperature_index |
<filename>examples/0022.utf/utf8_to_utf16.cc
#include"../../include/fast_io.h"
#include"../../include/fast_io_device.h"
#include"../../include/fast_io_legacy.h"
int main()
{
using namespace std::string_view_literals;
auto wstr{fast_io::concat<std::u16string>(fast_io::code_cvt(u8"lol呵呵. 开心就好。234713872947194748231asjfosajfsioafj"sv))};
for(auto const & e : wstr)
println(fast_io::c_stdout,e);
} |
package io.cattle.platform.resource.pool.subnet;
import io.cattle.platform.resource.pool.impl.AbstractStringRangeGenerator;
import io.cattle.platform.util.net.NetUtils;
public class SubnetAddressGenerator extends AbstractStringRangeGenerator {
public SubnetAddressGenerator(String min, String max) {
super(min, max);
}
@Override
protected long fromString(String value) {
return NetUtils.ip2Long(value);
}
@Override
protected String toString(long value) {
return NetUtils.long2Ip(value);
}
}
|
<filename>countlog/output/output_file.go
package output
//
//import (
// "os"
// "path/filepath"
// "time"
//)
//
//type fileLogOutput struct {
// windowSize int64
// logFile string
// rotateAfter int64
// openedFile *os.File
// openedFileArchiveTo string
//}
//
//func (output *fileLogOutput) Close() {
// if output.openedFile != nil {
// output.openedFile.Close()
// }
//}
//
//func (output *fileLogOutput) openLogFile() {
// var err error
// output.openedFile, err = os.OpenFile(output.logFile, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0644)
// if err != nil {
// os.Stderr.Write([]byte("failed to open log file: " +
// output.logFile + ", " + err.Error() + "\n"))
// os.Stderr.Sync()
// }
// output.openedFileArchiveTo = output.logFile + "." + time.Now().Format("200601021504")
//}
//
//func (output *fileLogOutput) archiveLogFile() {
// output.openedFile.Close()
// output.openedFile = nil
// err := os.Rename(output.logFile, output.openedFileArchiveTo)
// if err != nil {
// os.Stderr.Write([]byte("failed to rename to archived log file: " +
// output.openedFileArchiveTo + ", " + err.Error() + "\n"))
// os.Stderr.Sync()
// }
//}
//
//func (output *fileLogOutput) OutputLog(level int, timestamp int64, formattedEvent []byte) {
// if timestamp > output.rotateAfter {
// now := time.Now()
// output.rotateAfter = (int64(now.UnixNano()/output.windowSize) + 1) * output.windowSize
// output.archiveLogFile()
// output.openLogFile()
// }
// if output.openedFile != nil {
// output.openedFile.Write(formattedEvent) // silently ignore error
// }
//}
//
//type osFileLogOutput struct {
// logFile *os.File
//}
//
//func (output *osFileLogOutput) Close() {
// output.logFile.Sync()
//}
//
//func (output *osFileLogOutput) OutputLog(level int, timestamp int64, formattedEvent []byte) {
// output.logFile.Write(withColorLevelPrefix(level, formattedEvent))
//}
//
//func NewFileLogOutput(logFile string) LogOutput {
// switch logFile {
// case "STDOUT":
// return &osFileLogOutput{os.Stdout}
// case "STDERR":
// return &osFileLogOutput{os.Stderr}
// default:
// output := &fileLogOutput{
// windowSize: int64(time.Hour),
// logFile: logFile,
// }
// err := os.MkdirAll(filepath.Dir(logFile), 0755)
// if err != nil {
// os.Stderr.Write([]byte("failed to create dir for log file: " +
// filepath.Dir(logFile) + ", " + err.Error() + "\n"))
// os.Stderr.Sync()
// }
// output.openLogFile()
// output.rotateAfter = (int64(time.Now().UnixNano()/output.windowSize) + 1) * output.windowSize
// return output
// }
//}
|
A dad in Ostego, Michigan—population 3,956—terrified parents, police, and school administrators by handing out teddy bears to people he passed on the street, including children in the presence of adults. It was Ken Cronkhite's attempt to spread some happiness and help his 89-year-old father, the owner of an 800-teddy bear collection, to downsize.
And it backfired.
As news spread of a man, a plan, and his plush toys, the police department's phone lines lit up. Officers sped off to patrol the bus stops as at least one frantic mom ripped apart her kid's bear to see if it contained something insidious.
Meanwhile, the school district jumped into action, alerting parents with emergency phone calls and going on Facebook to warn of "a heavyset, older, white man handing out teddy bears to kids from his silver SUV."
"We have not been told that this man threatened anyone or tried to lure students to him, and adults have been present during two encounters," the letter said, "however, it raises a concern as we want to be sure our students are safe."
Hmm. A man who is not threatening kids, nor luring them, and only approaching the ones who are directly supervised by an adult? Yes, that sure is scary.
According to the Kalamazoo Gazette's mlive.com, commenters on the Facebook post fretted that the bears could be filled with heroin syringes or hidden cameras. But when Cronkhite's teenage son read it, he realized: That's my dad!
Dad immediately called the cops to tell them: I'm your man.
Cronkhite said he is a veteran Marine and a retired Chicago police officer and had fun approaching shoppers with children in store parking lots, people jogging along the streets, and children gathered with adults at bus stops or picnic tables. He said he made sure there was an adult accompanying any children he approached. He said he started the direct giveaways after he had distributed bags of bears to Sylvia's Place shelter for battered women, and to the American Cancer Society in Kalamazoo. Some charities would not accept the stuffed animals because they were not brand new, he said. So his elderly mother washed them all and his father told him to just give them to any takers in an attempt to spread some happiness, Cronkhite said.
But spreading happiness is a lot harder than spreading hysteria, Cronkhite learned.
When his father asked him to distribute the bears to children, it never occurred to him not to do it. "It was really fun until this happened," said Cronkhite.
"This" being America 2016, when even the police chief says that parents are "rightly on alert for any contact between their children and adults they don't know."
Even when the kids are standing right next to their parents, the parents are "rightly" on alert. Because no one who isn't you should ever interact with your kids. After all, you never know when a man with a teddy bear might pull the syringe out of the bear's stuffing, shoot the parent full of heroin, and run off with the child.
Certainly Cronkhite has learned what it means to be male and friendly and a little offbeat: "I will never try to go out and try to do anything nice for anyone again," he said.
What a relief. |
// Non-final only for mocking in tests. Do not subclass!
@Immutable
@AutoCodec
public class RuleClass {
@AutoCodec
static final Function<? super Rule, Map<String, Label>> NO_EXTERNAL_BINDINGS =
Functions.<Map<String, Label>>constant(ImmutableMap.<String, Label>of());
@AutoCodec
static final Function<? super Rule, Set<String>> NO_OPTION_REFERENCE =
Functions.<Set<String>>constant(ImmutableSet.<String>of());
public static final PathFragment THIRD_PARTY_PREFIX = PathFragment.create("third_party");
/**
* A constraint for the package name of the Rule instances.
*/
public static class PackageNameConstraint implements PredicateWithMessage<Rule> {
public static final int ANY_SEGMENT = 0;
private final int pathSegment;
private final Set<String> values;
/**
* The pathSegment-th segment of the package must be one of the specified values.
* The path segment indexing starts from 1.
*/
public PackageNameConstraint(int pathSegment, String... values) {
this.values = ImmutableSet.copyOf(values);
this.pathSegment = pathSegment;
}
@Override
public boolean apply(Rule input) {
PathFragment path = input.getLabel().getPackageFragment();
if (pathSegment == ANY_SEGMENT) {
return path.getFirstSegment(values) != PathFragment.INVALID_SEGMENT;
} else {
return path.segmentCount() >= pathSegment
&& values.contains(path.getSegment(pathSegment - 1));
}
}
@Override
public String getErrorReason(Rule param) {
if (pathSegment == ANY_SEGMENT) {
return param.getRuleClass() + " rules have to be under a "
+ StringUtil.joinEnglishList(values, "or", "'") + " directory";
} else if (pathSegment == 1) {
return param.getRuleClass() + " rules are only allowed in "
+ StringUtil.joinEnglishList(StringUtil.append(values, "//", ""), "or");
} else {
return param.getRuleClass() + " rules are only allowed in packages which "
+ StringUtil.ordinal(pathSegment) + " is " + StringUtil.joinEnglishList(values, "or");
}
}
@VisibleForTesting
public int getPathSegment() {
return pathSegment;
}
@VisibleForTesting
public Collection<String> getValues() {
return values;
}
}
/** A factory or builder class for rule implementations. */
public interface ConfiguredTargetFactory<
TConfiguredTarget, TContext, TActionConflictException extends Throwable> {
/**
* Returns a fully initialized configured target instance using the given context.
*
* @throws RuleErrorException if configured target creation could not be completed due to rule
* errors
* @throws TActionConflictException if there were conflicts during action registration
*/
TConfiguredTarget create(TContext ruleContext)
throws InterruptedException, RuleErrorException, TActionConflictException;
/**
* Exception indicating that configured target creation could not be completed. General error
* messaging should be done via {@link RuleErrorConsumer}; this exception only interrupts
* configured target creation in cases where it can no longer continue.
*/
public static final class RuleErrorException extends Exception {}
}
/**
* Describes in which way a rule implementation allows additional execution platform constraints.
*/
public enum ExecutionPlatformConstraintsAllowed {
/**
* Allows additional execution platform constraints to be added in the rule definition, which
* apply to all targets of that rule.
*/
PER_RULE(1),
/**
* Users are allowed to specify additional execution platform constraints for each target, using
* the 'exec_compatible_with' attribute. This also allows setting constraints in the rule
* definition, like PER_RULE.
*/
PER_TARGET(2);
private final int priority;
ExecutionPlatformConstraintsAllowed(int priority) {
this.priority = priority;
}
public int priority() {
return priority;
}
public static ExecutionPlatformConstraintsAllowed highestPriority(
ExecutionPlatformConstraintsAllowed first, ExecutionPlatformConstraintsAllowed... rest) {
ExecutionPlatformConstraintsAllowed result = first;
for (ExecutionPlatformConstraintsAllowed value : rest) {
if (result == null || result.priority() < value.priority()) {
result = value;
}
}
return result;
}
}
/**
* For Bazel's constraint system: the attribute that declares the set of environments a rule
* supports, overriding the defaults for their respective groups.
*/
public static final String RESTRICTED_ENVIRONMENT_ATTR = "restricted_to";
/**
* For Bazel's constraint system: the attribute that declares the set of environments a rule
* supports, appending them to the defaults for their respective groups.
*/
public static final String COMPATIBLE_ENVIRONMENT_ATTR = "compatible_with";
/**
* For Bazel's constraint system: the implicit attribute used to store rule class restriction
* defaults as specified by {@link Builder#restrictedTo}.
*/
public static final String DEFAULT_RESTRICTED_ENVIRONMENT_ATTR =
"$" + RESTRICTED_ENVIRONMENT_ATTR;
/**
* For Bazel's constraint system: the implicit attribute used to store rule class compatibility
* defaults as specified by {@link Builder#compatibleWith}.
*/
public static final String DEFAULT_COMPATIBLE_ENVIRONMENT_ATTR =
"$" + COMPATIBLE_ENVIRONMENT_ATTR;
/**
* A support class to make it easier to create {@code RuleClass} instances.
* This class follows the 'fluent builder' pattern.
*
* <p>The {@link #addAttribute} method will throw an exception if an attribute
* of that name already exists. Use {@link #overrideAttribute} in that case.
*/
public static final class Builder {
private static final Pattern RULE_NAME_PATTERN = Pattern.compile("[A-Za-z_][A-Za-z0-9_]*");
/**
* The type of the rule class, which determines valid names and required
* attributes.
*/
public enum RuleClassType {
/**
* Abstract rules are intended for rule classes that are just used to
* factor out common attributes, and for rule classes that are used only
* internally. These rules cannot be instantiated by a BUILD file.
*
* <p>The rule name must contain a '$' and {@link
* TargetUtils#isTestRuleName} must return false for the name.
*/
ABSTRACT {
@Override
public void checkName(String name) {
Preconditions.checkArgument(
(name.contains("$") && !TargetUtils.isTestRuleName(name)) || name.isEmpty());
}
@Override
public void checkAttributes(Map<String, Attribute> attributes) {
// No required attributes.
}
},
/**
* Invisible rule classes should contain a dollar sign so that they cannot be instantiated
* by the user. They are different from abstract rules in that they can be instantiated
* at will.
*/
INVISIBLE {
@Override
public void checkName(String name) {
Preconditions.checkArgument(name.contains("$"));
}
@Override
public void checkAttributes(Map<String, Attribute> attributes) {
// No required attributes.
}
},
/**
* Normal rules are instantiable by BUILD files. Their names must therefore
* obey the rules for identifiers in the BUILD language. In addition,
* {@link TargetUtils#isTestRuleName} must return false for the name.
*/
NORMAL {
@Override
public void checkName(String name) {
Preconditions.checkArgument(
!TargetUtils.isTestRuleName(name) && RULE_NAME_PATTERN.matcher(name).matches(),
"Invalid rule name: %s", name);
}
@Override
public void checkAttributes(Map<String, Attribute> attributes) {
for (Attribute attribute : REQUIRED_ATTRIBUTES_FOR_NORMAL_RULES) {
Attribute presentAttribute = attributes.get(attribute.getName());
Preconditions.checkState(presentAttribute != null,
"Missing mandatory '%s' attribute in normal rule class.", attribute.getName());
Preconditions.checkState(presentAttribute.getType().equals(attribute.getType()),
"Mandatory attribute '%s' in normal rule class has incorrect type (expected"
+ " %s).", attribute.getName(), attribute.getType());
}
}
},
/**
* Workspace rules can only be instantiated from a WORKSPACE file. Their names obey the
* rule for identifiers.
*/
WORKSPACE {
@Override
public void checkName(String name) {
Preconditions.checkArgument(RULE_NAME_PATTERN.matcher(name).matches());
}
@Override
public void checkAttributes(Map<String, Attribute> attributes) {
// No required attributes.
}
},
/**
* Test rules are instantiable by BUILD files and are handled specially
* when run with the 'test' command. Their names must obey the rules
* for identifiers in the BUILD language and {@link
* TargetUtils#isTestRuleName} must return true for the name.
*
* <p>In addition, test rules must contain certain attributes. See {@link
* Builder#REQUIRED_ATTRIBUTES_FOR_TESTS}.
*/
TEST {
@Override
public void checkName(String name) {
Preconditions.checkArgument(TargetUtils.isTestRuleName(name)
&& RULE_NAME_PATTERN.matcher(name).matches());
}
@Override
public void checkAttributes(Map<String, Attribute> attributes) {
for (Attribute attribute : REQUIRED_ATTRIBUTES_FOR_TESTS) {
Attribute presentAttribute = attributes.get(attribute.getName());
Preconditions.checkState(presentAttribute != null,
"Missing mandatory '%s' attribute in test rule class.", attribute.getName());
Preconditions.checkState(presentAttribute.getType().equals(attribute.getType()),
"Mandatory attribute '%s' in test rule class has incorrect type (expected %s).",
attribute.getName(), attribute.getType());
}
}
},
/**
* Placeholder rules are only instantiated when packages which refer to non-native rule
* classes are deserialized. At this time, non-native rule classes can't be serialized. To
* prevent crashes on deserialization, when a package containing a rule with a non-native rule
* class is deserialized, the rule is assigned a placeholder rule class. This is compatible
* with our limited set of package serialization use cases.
*
* Placeholder rule class names obey the rule for identifiers.
*/
PLACEHOLDER {
@Override
public void checkName(String name) {
Preconditions.checkArgument(RULE_NAME_PATTERN.matcher(name).matches(), name);
}
@Override
public void checkAttributes(Map<String, Attribute> attributes) {
// No required attributes; this rule class cannot have the wrong set of attributes now
// because, if it did, the rule class would have failed to build before the package
// referring to it was serialized.
}
};
/**
* Checks whether the given name is valid for the current rule class type.
*
* @throws IllegalArgumentException if the name is not valid
*/
public abstract void checkName(String name);
/**
* Checks whether the given set of attributes contains all the required
* attributes for the current rule class type.
*
* @throws IllegalArgumentException if a required attribute is missing
*/
public abstract void checkAttributes(Map<String, Attribute> attributes);
}
/** A predicate that filters rule classes based on their names. */
@AutoCodec
public static class RuleClassNamePredicate {
private static final RuleClassNamePredicate UNSPECIFIED_INSTANCE =
new RuleClassNamePredicate(ImmutableSet.of(), PredicateType.UNSPECIFIED, null);
private final ImmutableSet<String> ruleClassNames;
private final PredicateType predicateType;
private final Predicate<String> ruleClassNamePredicate;
private final Predicate<RuleClass> ruleClassPredicate;
// if non-null, used ONLY for checking overlap
@Nullable private final Set<?> overlappable;
@VisibleForSerialization
enum PredicateType {
ONLY,
All_EXCEPT,
UNSPECIFIED
}
@VisibleForSerialization
RuleClassNamePredicate(
ImmutableSet<String> ruleClassNames, PredicateType predicateType, Set<?> overlappable) {
this.ruleClassNames = ruleClassNames;
this.predicateType = predicateType;
this.overlappable = overlappable;
switch (predicateType) {
case All_EXCEPT:
Predicate<String> containing = only(ruleClassNames).asPredicateOfRuleClassName();
ruleClassNamePredicate =
new DescribedPredicate<>(
Predicates.not(containing), "all but " + containing.toString());
ruleClassPredicate =
new DescribedPredicate<>(
Predicates.compose(ruleClassNamePredicate, RuleClass::getName),
ruleClassNamePredicate.toString());
break;
case ONLY:
ruleClassNamePredicate =
new DescribedPredicate<>(
Predicates.in(ruleClassNames), StringUtil.joinEnglishList(ruleClassNames));
ruleClassPredicate =
new DescribedPredicate<>(
Predicates.compose(ruleClassNamePredicate, RuleClass::getName),
ruleClassNamePredicate.toString());
break;
case UNSPECIFIED:
ruleClassNamePredicate = Predicates.alwaysTrue();
ruleClassPredicate = Predicates.alwaysTrue();
break;
default:
// This shouldn't happen normally since the constructor is private and within this file.
throw new IllegalArgumentException(
"Predicate type was not specified when constructing a RuleClassNamePredicate.");
}
}
public static RuleClassNamePredicate only(Iterable<String> ruleClassNamesAsIterable) {
ImmutableSet<String> ruleClassNames = ImmutableSet.copyOf(ruleClassNamesAsIterable);
return new RuleClassNamePredicate(ruleClassNames, PredicateType.ONLY, ruleClassNames);
}
public static RuleClassNamePredicate only(String... ruleClasses) {
return only(Arrays.asList(ruleClasses));
}
public static RuleClassNamePredicate allExcept(String... ruleClasses) {
ImmutableSet<String> ruleClassNames = ImmutableSet.copyOf(ruleClasses);
Preconditions.checkState(!ruleClassNames.isEmpty(), "Use unspecified() instead");
return new RuleClassNamePredicate(ruleClassNames, PredicateType.All_EXCEPT, null);
}
/**
* This is a special sentinel value which represents a "default" {@link
* RuleClassNamePredicate} which is unspecified. Note that a call to its {@link
* RuleClassNamePredicate#asPredicateOfRuleClass} produces {@code
* Predicates.<RuleClass>alwaysTrue()}, which is a sentinel value for other parts of bazel.
*/
public static RuleClassNamePredicate unspecified() {
return UNSPECIFIED_INSTANCE;
}
public final Predicate<String> asPredicateOfRuleClassName() {
return ruleClassNamePredicate;
}
public final Predicate<RuleClass> asPredicateOfRuleClass() {
return ruleClassPredicate;
}
/**
* Determines whether two {@code RuleClassNamePredicate}s should be considered incompatible as
* rule class predicate and rule class warning predicate.
*
* <p>Specifically, if both list sets of explicit rule class names to permit, those two sets
* must be disjoint, so the restriction only applies when both predicates have been created by
* {@link #only}.
*/
boolean consideredOverlapping(RuleClassNamePredicate that) {
return this.overlappable != null
&& that.overlappable != null
&& !Collections.disjoint(this.overlappable, that.overlappable);
}
@Override
public int hashCode() {
return Objects.hash(ruleClassNames, predicateType);
}
@Override
public boolean equals(Object obj) {
// NOTE: Specifically not checking equality of ruleClassPredicate.
// By construction, if the name predicates are equals, the rule class predicates are, too.
return obj instanceof RuleClassNamePredicate
&& ruleClassNames.equals(((RuleClassNamePredicate) obj).ruleClassNames)
&& predicateType.equals(((RuleClassNamePredicate) obj).predicateType);
}
@Override
public String toString() {
return ruleClassNamePredicate.toString();
}
/** A pass-through predicate, except that an explicit {@link #toString()} is provided. */
private static class DescribedPredicate<T> implements Predicate<T> {
private final Predicate<T> delegate; // the actual predicate
private final String description;
private DescribedPredicate(Predicate<T> delegate, String description) {
this.delegate = delegate;
this.description = description;
}
@Override
public boolean apply(T input) {
return delegate.apply(input);
}
@Override
public int hashCode() {
return delegate.hashCode();
}
@Override
public boolean equals(Object obj) {
return obj instanceof DescribedPredicate
&& delegate.equals(((DescribedPredicate<?>) obj).delegate);
}
@Override
public String toString() {
return description;
}
}
}
/** A RuleTransitionFactory which always returns the same transition. */
@AutoCodec.VisibleForSerialization
@AutoCodec
static final class FixedTransitionFactory implements RuleTransitionFactory {
private final PatchTransition transition;
@AutoCodec.VisibleForSerialization
FixedTransitionFactory(PatchTransition transition) {
this.transition = transition;
}
@Override
public PatchTransition buildTransitionFor(Rule rule) {
return transition;
}
}
/**
* Name of default attribute implicitly added to all Skylark RuleClasses that are {@code
* build_setting}s.
*/
public static final String SKYLARK_BUILD_SETTING_DEFAULT_ATTR_NAME = "build_setting_default";
public static final String BUILD_SETTING_DEFAULT_NONCONFIGURABLE =
"Build setting defaults are referenced during analysis.";
/** List of required attributes for normal rules, name and type. */
public static final ImmutableList<Attribute> REQUIRED_ATTRIBUTES_FOR_NORMAL_RULES =
ImmutableList.of(attr("tags", Type.STRING_LIST).build());
/** List of required attributes for test rules, name and type. */
public static final ImmutableList<Attribute> REQUIRED_ATTRIBUTES_FOR_TESTS =
ImmutableList.of(
attr("tags", Type.STRING_LIST).build(),
attr("size", Type.STRING).build(),
attr("timeout", Type.STRING).build(),
attr("flaky", Type.BOOLEAN).build(),
attr("shard_count", Type.INTEGER).build(),
attr("local", Type.BOOLEAN).build());
private String name;
private final RuleClassType type;
private final boolean skylark;
private boolean skylarkTestable = false;
private boolean documented;
private boolean publicByDefault = false;
private boolean binaryOutput = true;
private boolean workspaceOnly = false;
private boolean isExecutableSkylark = false;
private boolean isAnalysisTest = false;
private boolean hasAnalysisTestTransition = false;
private boolean isConfigMatcher = false;
private boolean hasFunctionTransitionWhitelist = false;
private boolean ignorePackageLicenses = false;
private ImplicitOutputsFunction implicitOutputsFunction = ImplicitOutputsFunction.NONE;
private RuleTransitionFactory transitionFactory;
private ConfiguredTargetFactory<?, ?, ?> configuredTargetFactory = null;
private PredicateWithMessage<Rule> validityPredicate =
PredicatesWithMessage.<Rule>alwaysTrue();
private Predicate<String> preferredDependencyPredicate = Predicates.alwaysFalse();
private AdvertisedProviderSet.Builder advertisedProviders = AdvertisedProviderSet.builder();
private BaseFunction configuredTargetFunction = null;
private BuildSetting buildSetting = null;
private Function<? super Rule, Map<String, Label>> externalBindingsFunction =
NO_EXTERNAL_BINDINGS;
private Function<? super Rule, ? extends Set<String>> optionReferenceFunction =
NO_OPTION_REFERENCE;
/** This field and the next are null iff the rule is native. */
@Nullable private Label ruleDefinitionEnvironmentLabel;
@Nullable private String ruleDefinitionEnvironmentHashCode = null;
private ConfigurationFragmentPolicy.Builder configurationFragmentPolicy =
new ConfigurationFragmentPolicy.Builder();
private boolean supportsConstraintChecking = true;
/**
* The policy on whether Bazel should enforce that third_party rules declare <code>licenses().
* </code>. This is only intended for the migration of <a
* href="https://github.com/bazelbuild/bazel/issues/7444">GitHub #7444</a>. Our final end state
* is to have no license-related logic whatsoever. But that's going to take some time.
*/
public enum ThirdPartyLicenseExistencePolicy {
/**
* Always do this check, overriding whatever {@link
* StarlarkSemanticsOptions#checkThirdPartyTargetsHaveLicenses} says.
*/
ALWAYS_CHECK,
/**
* Never do this check, overriding whatever {@link
* StarlarkSemanticsOptions#checkThirdPartyTargetsHaveLicenses} says.
*/
NEVER_CHECK,
/** Do whatever {@link StarlarkSemanticsOptions#checkThirdPartyTargetsHaveLicenses} says. */
USER_CONTROLLABLE
}
private ThirdPartyLicenseExistencePolicy thirdPartyLicenseExistencePolicy;
private final Map<String, Attribute> attributes = new LinkedHashMap<>();
private final Set<Label> requiredToolchains = new HashSet<>();
private boolean supportsPlatforms = true;
private ExecutionPlatformConstraintsAllowed executionPlatformConstraintsAllowed =
ExecutionPlatformConstraintsAllowed.PER_RULE;
private Set<Label> executionPlatformConstraints = new HashSet<>();
private OutputFile.Kind outputFileKind = OutputFile.Kind.FILE;
/**
* Constructs a new {@code RuleClassBuilder} using all attributes from all
* parent rule classes. An attribute cannot exist in more than one parent.
*
* <p>The rule type affects the allowed names and the required
* attributes (see {@link RuleClassType}).
*
* @throws IllegalArgumentException if an attribute with the same name exists
* in more than one parent
*/
public Builder(String name, RuleClassType type, boolean skylark, RuleClass... parents) {
this.name = name;
this.skylark = skylark;
this.type = type;
Preconditions.checkState(skylark || type != RuleClassType.PLACEHOLDER, name);
this.documented = type != RuleClassType.ABSTRACT;
for (RuleClass parent : parents) {
if (parent.getValidityPredicate() != PredicatesWithMessage.<Rule>alwaysTrue()) {
setValidityPredicate(parent.getValidityPredicate());
}
if (parent.preferredDependencyPredicate != Predicates.<String>alwaysFalse()) {
setPreferredDependencyPredicate(parent.preferredDependencyPredicate);
}
configurationFragmentPolicy
.includeConfigurationFragmentsFrom(parent.getConfigurationFragmentPolicy());
configurationFragmentPolicy.setMissingFragmentPolicy(
parent.getConfigurationFragmentPolicy().getMissingFragmentPolicy());
supportsConstraintChecking = parent.supportsConstraintChecking;
addRequiredToolchains(parent.getRequiredToolchains());
supportsPlatforms = parent.supportsPlatforms;
// Make sure we use the highest priority value from all parents.
executionPlatformConstraintsAllowed(
ExecutionPlatformConstraintsAllowed.highestPriority(
executionPlatformConstraintsAllowed, parent.executionPlatformConstraintsAllowed()));
addExecutionPlatformConstraints(parent.getExecutionPlatformConstraints());
for (Attribute attribute : parent.getAttributes()) {
String attrName = attribute.getName();
Preconditions.checkArgument(
!attributes.containsKey(attrName) || attributes.get(attrName).equals(attribute),
"Attribute %s is inherited multiple times in %s ruleclass",
attrName,
name);
attributes.put(attrName, attribute);
}
advertisedProviders.addParent(parent.getAdvertisedProviders());
}
// TODO(bazel-team): move this testonly attribute setting to somewhere else
// preferably to some base RuleClass implementation.
if (this.type.equals(RuleClassType.TEST)) {
Attribute.Builder<Boolean> testOnlyAttr = attr("testonly", BOOLEAN).value(true)
.nonconfigurable("policy decision: this shouldn't depend on the configuration");
if (attributes.containsKey("testonly")) {
override(testOnlyAttr);
} else {
add(testOnlyAttr);
}
}
}
/**
* Checks that required attributes for test rules are present, creates the
* {@link RuleClass} object and returns it.
*
* @throws IllegalStateException if any of the required attributes is missing
*/
public RuleClass build() {
// For built-ins, name == key
return build(name, name);
}
/** Same as {@link #build} except with setting the name and key parameters. */
public RuleClass build(String name, String key) {
Preconditions.checkArgument(this.name.isEmpty() || this.name.equals(name));
type.checkName(name);
type.checkAttributes(attributes);
Preconditions.checkState(
(type == RuleClassType.ABSTRACT)
== (configuredTargetFactory == null && configuredTargetFunction == null),
"Bad combo for %s: %s %s %s",
name,
type,
configuredTargetFactory,
configuredTargetFunction);
if (!workspaceOnly) {
if (skylark) {
assertSkylarkRuleClassHasImplementationFunction();
assertSkylarkRuleClassHasEnvironmentLabel();
}
Preconditions.checkState(externalBindingsFunction == NO_EXTERNAL_BINDINGS);
}
if (type == RuleClassType.PLACEHOLDER) {
Preconditions.checkNotNull(ruleDefinitionEnvironmentHashCode, this.name);
}
if (executionPlatformConstraintsAllowed == ExecutionPlatformConstraintsAllowed.PER_TARGET
&& !this.contains("exec_compatible_with")) {
this.add(
attr("exec_compatible_with", BuildType.LABEL_LIST)
.allowedFileTypes()
.nonconfigurable("Used in toolchain resolution")
.value(ImmutableList.of()));
}
if (buildSetting != null) {
Type<?> type = buildSetting.getType();
Attribute.Builder<?> attrBuilder =
attr(SKYLARK_BUILD_SETTING_DEFAULT_ATTR_NAME, type)
.nonconfigurable(BUILD_SETTING_DEFAULT_NONCONFIGURABLE)
.mandatory();
if (BuildType.isLabelType(type)) {
attrBuilder.allowedFileTypes(FileTypeSet.ANY_FILE);
attrBuilder.allowedRuleClasses(ANY_RULE);
}
this.add(attrBuilder);
}
return new RuleClass(
name,
key,
type,
skylark,
skylarkTestable,
documented,
publicByDefault,
binaryOutput,
workspaceOnly,
isExecutableSkylark,
isAnalysisTest,
hasAnalysisTestTransition,
hasFunctionTransitionWhitelist,
ignorePackageLicenses,
implicitOutputsFunction,
isConfigMatcher,
transitionFactory,
configuredTargetFactory,
validityPredicate,
preferredDependencyPredicate,
advertisedProviders.build(),
configuredTargetFunction,
externalBindingsFunction,
optionReferenceFunction,
ruleDefinitionEnvironmentLabel,
ruleDefinitionEnvironmentHashCode,
configurationFragmentPolicy.build(),
supportsConstraintChecking,
thirdPartyLicenseExistencePolicy,
requiredToolchains,
supportsPlatforms,
executionPlatformConstraintsAllowed,
executionPlatformConstraints,
outputFileKind,
attributes.values(),
buildSetting);
}
private void assertSkylarkRuleClassHasImplementationFunction() {
Preconditions.checkState(
(type == RuleClassType.NORMAL || type == RuleClassType.TEST)
== (configuredTargetFunction != null),
"%s %s",
type,
configuredTargetFunction);
}
private void assertSkylarkRuleClassHasEnvironmentLabel() {
Preconditions.checkState(
(type == RuleClassType.NORMAL
|| type == RuleClassType.TEST
|| type == RuleClassType.PLACEHOLDER)
== (ruleDefinitionEnvironmentLabel != null),
"Concrete Starlark rule classes can't have null labels: %s %s",
ruleDefinitionEnvironmentLabel,
type);
}
/**
* Declares that the implementation of the associated rule class requires the given
* fragments to be present in this rule's host and target configurations.
*
* <p>The value is inherited by subclasses.
*/
public Builder requiresConfigurationFragments(Class<?>... configurationFragments) {
configurationFragmentPolicy.requiresConfigurationFragments(
ImmutableSet.<Class<?>>copyOf(configurationFragments));
return this;
}
/**
* Declares that the implementation of the associated rule class requires the given
* fragments to be present in the given configuration that isn't the rule's configuration but
* is also readable by the rule.
*
* <p>You probably don't want to use this, because rules generally shouldn't read configurations
* other than their own. If you want to declare host config fragments, see
* {@link com.google.devtools.build.lib.analysis.config.ConfigAwareRuleClassBuilder}.
*
* <p>The value is inherited by subclasses.
*/
public Builder requiresConfigurationFragments(ConfigurationTransition transition,
Class<?>... configurationFragments) {
configurationFragmentPolicy.requiresConfigurationFragments(
transition,
ImmutableSet.<Class<?>>copyOf(configurationFragments));
return this;
}
/**
* Declares the configuration fragments that are required by this rule for the target
* configuration.
*
* <p>In contrast to {@link #requiresConfigurationFragments(Class...)}, this method takes the
* Skylark module names of fragments instead of their classes.
*/
public Builder requiresConfigurationFragmentsBySkylarkModuleName(
Collection<String> configurationFragmentNames) {
configurationFragmentPolicy
.requiresConfigurationFragmentsBySkylarkModuleName(configurationFragmentNames);
return this;
}
/**
* Declares the configuration fragments that are required by this rule for the host
* configuration.
*
*/
/**
* Declares that the implementation of the associated rule class requires the given
* fragments to be present in the given configuration that isn't the rule's configuration but
* is also readable by the rule.
*
* <p>In contrast to {@link #requiresConfigurationFragments(ConfigurationTransition, Class...)},
* this method takes Skylark module names of fragments instead of their classes.
* *
* <p>You probably don't want to use this, because rules generally shouldn't read configurations
* other than their own. If you want to declare host config fragments, see
* {@link com.google.devtools.build.lib.analysis.config.ConfigAwareRuleClassBuilder}.
*
* <p>The value is inherited by subclasses.
*/
public Builder requiresConfigurationFragmentsBySkylarkModuleName(
ConfigurationTransition transition, Collection<String> configurationFragmentNames) {
configurationFragmentPolicy.requiresConfigurationFragmentsBySkylarkModuleName(transition,
configurationFragmentNames);
return this;
}
public Builder setSkylarkTestable() {
Preconditions.checkState(skylark, "Cannot set skylarkTestable on a non-Starlark rule");
skylarkTestable = true;
return this;
}
/**
* Sets the policy for the case where the configuration is missing required fragments (see
* {@link #requiresConfigurationFragments}).
*/
public Builder setMissingFragmentPolicy(MissingFragmentPolicy missingFragmentPolicy) {
configurationFragmentPolicy.setMissingFragmentPolicy(missingFragmentPolicy);
return this;
}
public Builder setUndocumented() {
documented = false;
return this;
}
public Builder publicByDefault() {
publicByDefault = true;
return this;
}
public Builder setWorkspaceOnly() {
workspaceOnly = true;
return this;
}
/**
* Determines the outputs of this rule to be created beneath the {@code
* genfiles} directory. By default, files are created beneath the {@code bin}
* directory.
*
* <p>This property is not inherited and this method should not be called by
* builder of {@link RuleClassType#ABSTRACT} rule class.
*
* @throws IllegalStateException if called for abstract rule class builder
*/
public Builder setOutputToGenfiles() {
Preconditions.checkState(type != RuleClassType.ABSTRACT,
"Setting not inherited property (output to genrules) of abstract rule class '%s'", name);
this.binaryOutput = false;
return this;
}
/**
* Sets the implicit outputs function of the rule class. The default implicit
* outputs function is {@link ImplicitOutputsFunction#NONE}.
*
* <p>This property is not inherited and this method should not be called by
* builder of {@link RuleClassType#ABSTRACT} rule class.
*
* @throws IllegalStateException if called for abstract rule class builder
*/
public Builder setImplicitOutputsFunction(
ImplicitOutputsFunction implicitOutputsFunction) {
Preconditions.checkState(type != RuleClassType.ABSTRACT,
"Setting not inherited property (implicit output function) of abstract rule class '%s'",
name);
this.implicitOutputsFunction = implicitOutputsFunction;
return this;
}
/**
* Applies the given transition to all incoming edges for this rule class.
*
* <p>This cannot be a {@link SplitTransition} because that requires coordination with the
* rule's parent: use {@link Attribute.Builder#cfg(ConfigurationTransition)} on the parent to
* declare splits.
*
* <p>If you need the transition to depend on the rule it's being applied to, use
* {@link #cfg(RuleTransitionFactory)}.
*/
public Builder cfg(PatchTransition transition) {
return cfg(new FixedTransitionFactory(transition));
}
/**
* Applies the given transition factory to all incoming edges for this rule class.
*
* <p>Unlike {@link #cfg(PatchTransition)}, the factory can examine the rule when deciding what
* transition to use.
*/
public Builder cfg(RuleTransitionFactory transitionFactory) {
Preconditions.checkState(type != RuleClassType.ABSTRACT,
"Setting not inherited property (cfg) of abstract rule class '%s'", name);
Preconditions.checkState(this.transitionFactory == null,
"Property cfg has already been set");
Preconditions.checkNotNull(transitionFactory);
this.transitionFactory = transitionFactory;
return this;
}
public Builder factory(ConfiguredTargetFactory<?, ?, ?> factory) {
this.configuredTargetFactory = factory;
return this;
}
public Builder setThirdPartyLicenseExistencePolicy(ThirdPartyLicenseExistencePolicy policy) {
this.thirdPartyLicenseExistencePolicy = policy;
return this;
}
public Builder setValidityPredicate(PredicateWithMessage<Rule> predicate) {
this.validityPredicate = predicate;
return this;
}
public Builder setPreferredDependencyPredicate(Predicate<String> predicate) {
this.preferredDependencyPredicate = predicate;
return this;
}
/**
* State that the rule class being built possibly supplies the specified provider to its direct
* dependencies.
*
* <p>When computing the set of aspects required for a rule, only the providers listed here are
* considered. The presence of a provider here does not mean that the rule <b>must</b> implement
* said provider, merely that it <b>can</b>. After the configured target is constructed from
* this rule, aspects will be filtered according to the set of actual providers.
*
* <p>This is here so that we can do the loading phase overestimation required for
* "blaze query", which does not have the configured targets available.
*
* <p>It's okay for the rule class eventually not to supply it (possibly based on analysis phase
* logic), but if a provider is not advertised but is supplied, aspects that require the it will
* not be evaluated for the rule.
*/
public Builder advertiseProvider(Class<?>... providers) {
for (Class<?> provider : providers) {
advertisedProviders.addNative(provider);
}
return this;
}
public Builder advertiseSkylarkProvider(SkylarkProviderIdentifier... skylarkProviders) {
for (SkylarkProviderIdentifier skylarkProviderIdentifier : skylarkProviders) {
advertisedProviders.addSkylark(skylarkProviderIdentifier);
}
return this;
}
/**
* Set if the rule can have any provider. This is true for "alias" rules like
* <code>bind</code> .
*/
public Builder canHaveAnyProvider() {
advertisedProviders.canHaveAnyProvider();
return this;
}
public Builder addAttribute(Attribute attribute) {
Preconditions.checkState(!attributes.containsKey(attribute.getName()),
"An attribute with the name '%s' already exists.", attribute.getName());
attributes.put(attribute.getName(), attribute);
return this;
}
private void overrideAttribute(Attribute attribute) {
String attrName = attribute.getName();
Preconditions.checkState(attributes.containsKey(attrName),
"No such attribute '%s' to override in ruleclass '%s'.", attrName, name);
Type<?> origType = attributes.get(attrName).getType();
Type<?> newType = attribute.getType();
Preconditions.checkState(origType.equals(newType),
"The type of the new attribute '%s' is different from the original one '%s'.",
newType, origType);
attributes.put(attrName, attribute);
}
/**
* Builds attribute from the attribute builder and adds it to this rule
* class.
*
* @param attr attribute builder
*/
public <TYPE> Builder add(Attribute.Builder<TYPE> attr) {
addAttribute(attr.build());
return this;
}
/**
* Builds attribute from the attribute builder and overrides the attribute
* with the same name.
*
* @throws IllegalArgumentException if the attribute does not override one of the same name
*/
public <TYPE> Builder override(Attribute.Builder<TYPE> attr) {
overrideAttribute(attr.build());
return this;
}
/**
* Adds or overrides the attribute in the rule class. Meant for Skylark usage.
*
* @throws IllegalArgumentException if the attribute overrides an existing attribute (will be
* legal in the future).
*/
public void addOrOverrideAttribute(Attribute attribute) {
String name = attribute.getName();
// Attributes may be overridden in the future.
Preconditions.checkArgument(!attributes.containsKey(name),
"There is already a built-in attribute '%s' which cannot be overridden", name);
addAttribute(attribute);
}
/** True if the rule class contains an attribute named {@code name}. */
public boolean contains(String name) {
return attributes.containsKey(name);
}
/**
* Sets the rule implementation function. Meant for Skylark usage.
*/
public Builder setConfiguredTargetFunction(BaseFunction func) {
this.configuredTargetFunction = func;
return this;
}
public Builder setBuildSetting(BuildSetting buildSetting) {
this.buildSetting = buildSetting;
return this;
}
public Builder setExternalBindingsFunction(Function<? super Rule, Map<String, Label>> func) {
this.externalBindingsFunction = func;
return this;
}
/** Sets the rule definition environment label and hash code. Meant for Skylark usage. */
public Builder setRuleDefinitionEnvironmentLabelAndHashCode(Label label, String hashCode) {
this.ruleDefinitionEnvironmentLabel = Preconditions.checkNotNull(label, this.name);
this.ruleDefinitionEnvironmentHashCode = Preconditions.checkNotNull(hashCode, this.name);
return this;
}
public Label getRuleDefinitionEnvironmentLabel() {
return this.ruleDefinitionEnvironmentLabel;
}
/**
* Removes an attribute with the same name from this rule class.
*
* @throws IllegalArgumentException if the attribute with this name does
* not exist
*/
public <TYPE> Builder removeAttribute(String name) {
Preconditions.checkState(attributes.containsKey(name), "No such attribute '%s' to remove.",
name);
attributes.remove(name);
return this;
}
/**
* This rule class outputs a default executable for every rule with the same name as
* the rules's. Only works for Skylark.
*/
public <TYPE> Builder setExecutableSkylark() {
this.isExecutableSkylark = true;
return this;
}
/** This rule class is marked as an analysis test. */
public Builder setIsAnalysisTest() {
this.isAnalysisTest = true;
return this;
}
public boolean isAnalysisTest() {
return this.isAnalysisTest;
}
/**
* This rule class has at least one attribute with an analysis test transition. (A
* starlark-defined transition using analysis_test_transition()).
*/
public Builder setHasAnalysisTestTransition() {
this.hasAnalysisTestTransition = true;
return this;
}
public boolean hasAnalysisTestTransition() {
return this.hasAnalysisTestTransition;
}
/**
* This rule class has the _whitelist_function_transition attribute. Intended only for Skylark
* rules.
*/
public <TYPE> Builder setHasFunctionTransitionWhitelist() {
this.hasFunctionTransitionWhitelist = true;
return this;
}
/** This rule class ignores package-level licenses. */
public Builder setIgnorePackageLicenses() {
this.ignorePackageLicenses = true;
return this;
}
public boolean ignorePackageLicenses() {
return this.ignorePackageLicenses;
}
public RuleClassType getType() {
return this.type;
}
/**
* Sets the kind of output files this rule creates.
* DO NOT USE! This only exists to support the non-open-sourced {@code fileset} rule.
* {@see OutputFile.Kind}.
*/
public Builder setOutputFileKind(OutputFile.Kind outputFileKind) {
this.outputFileKind = Preconditions.checkNotNull(outputFileKind);
return this;
}
/**
* Declares that instances of this rule are compatible with the specified environments,
* in addition to the defaults declared by their environment groups. This can be overridden
* by rule-specific declarations. See
* {@link com.google.devtools.build.lib.analysis.constraints.ConstraintSemantics} for details.
*/
public <TYPE> Builder compatibleWith(Label... environments) {
add(
attr(DEFAULT_COMPATIBLE_ENVIRONMENT_ATTR, LABEL_LIST)
.value(ImmutableList.copyOf(environments)));
return this;
}
/**
* Declares that instances of this rule are restricted to the specified environments, i.e.
* these override the defaults declared by their environment groups. This can be overridden
* by rule-specific declarations. See
* {@link com.google.devtools.build.lib.analysis.constraints.ConstraintSemantics} for details.
*
* <p>The input list cannot be empty.
*/
public <TYPE> Builder restrictedTo(Label firstEnvironment, Label... otherEnvironments) {
ImmutableList<Label> environments = ImmutableList.<Label>builder().add(firstEnvironment)
.add(otherEnvironments).build();
add(
attr(DEFAULT_RESTRICTED_ENVIRONMENT_ATTR, LABEL_LIST)
.value(environments));
return this;
}
/**
* Exempts rules of this type from the constraint enforcement system. This should only be
* applied to rules that are intrinsically incompatible with constraint checking (any
* application of this method weakens the reach and strength of the system).
*
* @param reason user-informative message explaining the reason for exemption (not used)
*/
public <TYPE> Builder exemptFromConstraintChecking(String reason) {
Preconditions.checkState(this.supportsConstraintChecking);
this.supportsConstraintChecking = false;
attributes.remove(RuleClass.COMPATIBLE_ENVIRONMENT_ATTR);
attributes.remove(RuleClass.RESTRICTED_ENVIRONMENT_ATTR);
return this;
}
/**
* Causes rules of this type to be evaluated with the parent's configuration, always, so that
* rules which match against parts of the configuration will behave as expected.
*
* <p>This is only intended for use by {@code config_setting} - other rules should not use this!
*/
public Builder setIsConfigMatcherForConfigSettingOnly() {
this.isConfigMatcher = true;
return this;
}
/**
* Causes rules of this type to implicitly reference the configuration fragments associated with
* the options its attributes reference.
*
* <p>This is only intended for use by {@code config_setting} - other rules should not use this!
*/
public Builder setOptionReferenceFunctionForConfigSettingOnly(
Function<? super Rule, ? extends Set<String>> optionReferenceFunction) {
this.optionReferenceFunction = Preconditions.checkNotNull(optionReferenceFunction);
return this;
}
/**
* Causes rules of this type to require the specified toolchains be available via toolchain
* resolution when a target is configured.
*/
public Builder addRequiredToolchains(Iterable<Label> toolchainLabels) {
Iterables.addAll(this.requiredToolchains, toolchainLabels);
return this;
}
/**
* Causes rules of this type to require the specified toolchains be available via toolchain
* resolution when a target is configured.
*/
public Builder addRequiredToolchains(Label... toolchainLabels) {
return this.addRequiredToolchains(Lists.newArrayList(toolchainLabels));
}
/**
* Rules that support platforms can use toolchains and execution platforms. Rules that are part
* of configuring toolchains and platforms should set this to {@code false}.
*/
public Builder supportsPlatforms(boolean flag) {
this.supportsPlatforms = flag;
return this;
}
/**
* Specifies whether targets of this rule can add additional constraints on the execution
* platform selected. If this is {@link ExecutionPlatformConstraintsAllowed#PER_TARGET}, there
* will be an attribute named {@code exec_compatible_with} that can be used to add these
* constraints.
*
* <p>Please note that this value is not inherited by child rules, and must be re-set on them if
* the same behavior is required.
*/
public Builder executionPlatformConstraintsAllowed(ExecutionPlatformConstraintsAllowed value) {
this.executionPlatformConstraintsAllowed = value;
return this;
}
/**
* Adds additional execution platform constraints that apply for all targets from this rule.
*
* <p>Please note that this value is inherited by child rules.
*/
public Builder addExecutionPlatformConstraints(Label... constraints) {
return this.addExecutionPlatformConstraints(Lists.newArrayList(constraints));
}
/**
* Adds additional execution platform constraints that apply for all targets from this rule.
*
* <p>Please note that this value is inherited by child rules.
*/
public Builder addExecutionPlatformConstraints(Iterable<Label> constraints) {
Iterables.addAll(this.executionPlatformConstraints, constraints);
return this;
}
/**
* Returns an Attribute.Builder object which contains a replica of the
* same attribute in the parent rule if exists.
*
* @param name the name of the attribute
*/
public Attribute.Builder<?> copy(String name) {
Preconditions.checkArgument(attributes.containsKey(name),
"Attribute %s does not exist in parent rule class.", name);
return attributes.get(name).cloneBuilder();
}
}
private final String name; // e.g. "cc_library"
private final String key; // Just the name for native, label + name for skylark
/**
* The kind of target represented by this RuleClass (e.g. "cc_library rule").
* Note: Even though there is partial duplication with the {@link RuleClass#name} field,
* we want to store this as a separate field instead of generating it on demand in order to
* avoid string duplication.
*/
private final String targetKind;
private final RuleClassType type;
private final boolean isSkylark;
private final boolean skylarkTestable;
private final boolean documented;
private final boolean publicByDefault;
private final boolean binaryOutput;
private final boolean workspaceOnly;
private final boolean isExecutableSkylark;
private final boolean isAnalysisTest;
private final boolean hasAnalysisTestTransition;
private final boolean isConfigMatcher;
private final boolean hasFunctionTransitionWhitelist;
private final boolean ignorePackageLicenses;
/**
* A (unordered) mapping from attribute names to small integers indexing into
* the {@code attributes} array.
*/
private final Map<String, Integer> attributeIndex;
/**
* All attributes of this rule class (including inherited ones) ordered by
* attributeIndex value.
*/
private final ImmutableList<Attribute> attributes;
/** Names of the non-configurable attributes of this rule class. */
private final ImmutableList<String> nonConfigurableAttributes;
/**
* The set of implicit outputs generated by a rule, expressed as a function
* of that rule.
*/
private final ImplicitOutputsFunction implicitOutputsFunction;
/**
* A factory which will produce a configuration transition that should be applied on any edge of
* the configured target graph that leads into a target of this rule class.
*/
private final RuleTransitionFactory transitionFactory;
/** The factory that creates configured targets from this rule. */
private final ConfiguredTargetFactory<?, ?, ?> configuredTargetFactory;
/**
* The constraint the package name of the rule instance must fulfill
*/
private final PredicateWithMessage<Rule> validityPredicate;
/**
* See {@link #isPreferredDependency}.
*/
private final Predicate<String> preferredDependencyPredicate;
/**
* The list of transitive info providers this class advertises to aspects.
*/
private final AdvertisedProviderSet advertisedProviders;
/**
* The Skylark rule implementation of this RuleClass. Null for non Skylark executable RuleClasses.
*/
@Nullable private final BaseFunction configuredTargetFunction;
/**
* The BuildSetting associated with this rule. Null for all RuleClasses except Skylark-defined
* rules that pass {@code build_setting} to their {@code rule()} declaration.
*/
@Nullable private final BuildSetting buildSetting;
/**
* Returns the extra bindings a workspace function adds to the WORKSPACE file.
*/
private final Function<? super Rule, Map<String, Label>> externalBindingsFunction;
/**
* Returns the options referenced by this rule's attributes.
*/
private final Function<? super Rule, ? extends Set<String>> optionReferenceFunction;
/**
* The Skylark rule definition environment's label and hash code of this RuleClass. Null for non
* Skylark executable RuleClasses.
*/
@Nullable private final Label ruleDefinitionEnvironmentLabel;
@Nullable private final String ruleDefinitionEnvironmentHashCode;
private final OutputFile.Kind outputFileKind;
/**
* The set of configuration fragments which are legal for this rule's implementation to access.
*/
private final ConfigurationFragmentPolicy configurationFragmentPolicy;
/**
* Determines whether instances of this rule should be checked for constraint compatibility
* with their dependencies and the rules that depend on them. This should be true for
* everything except for rules that are intrinsically incompatible with the constraint system.
*/
private final boolean supportsConstraintChecking;
private final ThirdPartyLicenseExistencePolicy thirdPartyLicenseExistencePolicy;
private final ImmutableSet<Label> requiredToolchains;
private final boolean supportsPlatforms;
private final ExecutionPlatformConstraintsAllowed executionPlatformConstraintsAllowed;
private final ImmutableSet<Label> executionPlatformConstraints;
/**
* Constructs an instance of RuleClass whose name is 'name', attributes are 'attributes'. The
* {@code srcsAllowedFiles} determines which types of files are allowed as parameters to the
* "srcs" attribute; rules are always allowed. For the "deps" attribute, there are four cases:
*
* <ul>
* <li>if the parameter is a file, it is allowed if its file type is given in {@code
* depsAllowedFiles},
* <li>if the parameter is a rule and the rule class is accepted by {@code depsAllowedRules},
* then it is allowed,
* <li>if the parameter is a rule and the rule class is not accepted by {@code
* depsAllowedRules}, but accepted by {@code depsAllowedRulesWithWarning}, then it is
* allowed, but triggers a warning;
* <li>all other parameters trigger an error.
* </ul>
*
* <p>The {@code depsAllowedRules} predicate should have a {@code toString} method which returns a
* plain English enumeration of the allowed rule class names, if it does not allow all rule
* classes.
*/
@VisibleForTesting
RuleClass(
String name,
String key,
RuleClassType type,
boolean isSkylark,
boolean skylarkTestable,
boolean documented,
boolean publicByDefault,
boolean binaryOutput,
boolean workspaceOnly,
boolean isExecutableSkylark,
boolean isAnalysisTest,
boolean hasAnalysisTestTransition,
boolean hasFunctionTransitionWhitelist,
boolean ignorePackageLicenses,
ImplicitOutputsFunction implicitOutputsFunction,
boolean isConfigMatcher,
RuleTransitionFactory transitionFactory,
ConfiguredTargetFactory<?, ?, ?> configuredTargetFactory,
PredicateWithMessage<Rule> validityPredicate,
Predicate<String> preferredDependencyPredicate,
AdvertisedProviderSet advertisedProviders,
@Nullable BaseFunction configuredTargetFunction,
Function<? super Rule, Map<String, Label>> externalBindingsFunction,
Function<? super Rule, ? extends Set<String>> optionReferenceFunction,
@Nullable Label ruleDefinitionEnvironmentLabel,
String ruleDefinitionEnvironmentHashCode,
ConfigurationFragmentPolicy configurationFragmentPolicy,
boolean supportsConstraintChecking,
ThirdPartyLicenseExistencePolicy thirdPartyLicenseExistencePolicy,
Set<Label> requiredToolchains,
boolean supportsPlatforms,
ExecutionPlatformConstraintsAllowed executionPlatformConstraintsAllowed,
Set<Label> executionPlatformConstraints,
OutputFile.Kind outputFileKind,
Collection<Attribute> attributes,
@Nullable BuildSetting buildSetting) {
this.name = name;
this.key = key;
this.type = type;
this.isSkylark = isSkylark;
this.targetKind = name + Rule.targetKindSuffix();
this.skylarkTestable = skylarkTestable;
this.documented = documented;
this.publicByDefault = publicByDefault;
this.binaryOutput = binaryOutput;
this.implicitOutputsFunction = implicitOutputsFunction;
this.isConfigMatcher = isConfigMatcher;
this.transitionFactory = transitionFactory;
this.configuredTargetFactory = configuredTargetFactory;
this.validityPredicate = validityPredicate;
this.preferredDependencyPredicate = preferredDependencyPredicate;
this.advertisedProviders = advertisedProviders;
this.configuredTargetFunction = configuredTargetFunction;
this.externalBindingsFunction = externalBindingsFunction;
this.optionReferenceFunction = optionReferenceFunction;
this.ruleDefinitionEnvironmentLabel = ruleDefinitionEnvironmentLabel;
this.ruleDefinitionEnvironmentHashCode = ruleDefinitionEnvironmentHashCode;
this.outputFileKind = outputFileKind;
validateNoClashInPublicNames(attributes);
this.attributes = ImmutableList.copyOf(attributes);
this.workspaceOnly = workspaceOnly;
this.isExecutableSkylark = isExecutableSkylark;
this.isAnalysisTest = isAnalysisTest;
this.hasAnalysisTestTransition = hasAnalysisTestTransition;
this.hasFunctionTransitionWhitelist = hasFunctionTransitionWhitelist;
this.ignorePackageLicenses = ignorePackageLicenses;
this.configurationFragmentPolicy = configurationFragmentPolicy;
this.supportsConstraintChecking = supportsConstraintChecking;
this.thirdPartyLicenseExistencePolicy = thirdPartyLicenseExistencePolicy;
this.requiredToolchains = ImmutableSet.copyOf(requiredToolchains);
this.supportsPlatforms = supportsPlatforms;
this.executionPlatformConstraintsAllowed = executionPlatformConstraintsAllowed;
this.executionPlatformConstraints = ImmutableSet.copyOf(executionPlatformConstraints);
this.buildSetting = buildSetting;
// Create the index and collect non-configurable attributes.
int index = 0;
attributeIndex = new HashMap<>(attributes.size());
ImmutableList.Builder<String> nonConfigurableAttributesBuilder = ImmutableList.builder();
for (Attribute attribute : attributes) {
attributeIndex.put(attribute.getName(), index++);
if (!attribute.isConfigurable()) {
nonConfigurableAttributesBuilder.add(attribute.getName());
}
}
this.nonConfigurableAttributes = nonConfigurableAttributesBuilder.build();
}
private void validateNoClashInPublicNames(Iterable<Attribute> attributes) {
Map<String, Attribute> publicToPrivateNames = new HashMap<>();
for (Attribute attribute : attributes) {
String publicName = attribute.getPublicName();
if (publicToPrivateNames.containsKey(publicName)) {
throw new IllegalStateException(
String.format(
"Rule %s: Attributes %s and %s have an identical public name: %s",
name,
attribute.getName(),
publicToPrivateNames.get(publicName).getName(),
publicName));
}
publicToPrivateNames.put(publicName, attribute);
}
}
/**
* Returns the default function for determining the set of implicit outputs generated by a given
* rule. If not otherwise specified, this will be the implementation used by {@link Rule}s
* created with this {@link RuleClass}.
*
* <p>Do not use this value to calculate implicit outputs for a rule, instead use
* {@link Rule#getImplicitOutputsFunction()}.
*
* <p>An implicit output is an OutputFile that automatically comes into existence when a rule of
* this class is declared, and whose name is derived from the name of the rule.
*
* <p>Implicit outputs are a widely-relied upon. All ".so", and "_deploy.jar" targets referenced
* in BUILD files are examples.
*/
@VisibleForTesting
public ImplicitOutputsFunction getDefaultImplicitOutputsFunction() {
return implicitOutputsFunction;
}
public RuleTransitionFactory getTransitionFactory() {
return transitionFactory;
}
@SuppressWarnings("unchecked")
public <CT, RC, ACE extends Throwable>
ConfiguredTargetFactory<CT, RC, ACE> getConfiguredTargetFactory() {
return (ConfiguredTargetFactory<CT, RC, ACE>) configuredTargetFactory;
}
/**
* Returns the class of rule that this RuleClass represents (e.g. "cc_library").
*/
public String getName() {
return name;
}
/** Returns the type of rule that this RuleClass represents. Only for use during serialization. */
public RuleClassType getRuleClassType() {
return type;
}
/** Returns a unique key. Used for profiling purposes. */
public String getKey() {
return key;
}
/**
* Returns the target kind of this class of rule (e.g. "cc_library rule").
*/
String getTargetKind() {
return targetKind;
}
public boolean getWorkspaceOnly() {
return workspaceOnly;
}
/**
* Returns true iff the attribute 'attrName' is defined for this rule class,
* and has type 'type'.
*/
public boolean hasAttr(String attrName, Type<?> type) {
Integer index = getAttributeIndex(attrName);
return index != null && getAttribute(index).getType() == type;
}
/**
* Returns the index of the specified attribute name. Use of indices allows
* space-efficient storage of attribute values in rules, since hashtables are
* not required. (The index mapping is specific to each RuleClass and an
* attribute may have a different index in the parent RuleClass.)
*
* <p>Returns null if the named attribute is not defined for this class of Rule.
*/
Integer getAttributeIndex(String attrName) {
return attributeIndex.get(attrName);
}
/**
* Returns the attribute whose index is 'attrIndex'. Fails if attrIndex is
* not in range.
*/
Attribute getAttribute(int attrIndex) {
return attributes.get(attrIndex);
}
/**
* Returns the attribute whose name is 'attrName'; fails with NullPointerException if not found.
*/
public Attribute getAttributeByName(String attrName) {
Integer attrIndex = Preconditions.checkNotNull(getAttributeIndex(attrName),
"Attribute %s does not exist", attrName);
return attributes.get(attrIndex);
}
/**
* Returns the attribute whose name is {@code attrName}, or null if not
* found.
*/
Attribute getAttributeByNameMaybe(String attrName) {
Integer i = getAttributeIndex(attrName);
return i == null ? null : attributes.get(i);
}
/**
* Returns the number of attributes defined for this rule class.
*/
public int getAttributeCount() {
return attributeIndex.size();
}
/**
* Returns an (immutable) list of all Attributes defined for this class of
* rule, ordered by increasing index.
*/
public List<Attribute> getAttributes() {
return attributes;
}
/** Returns set of non-configurable attribute names defined for this class of rule. */
public List<String> getNonConfigurableAttributes() {
return nonConfigurableAttributes;
}
public PredicateWithMessage<Rule> getValidityPredicate() {
return validityPredicate;
}
/**
* Returns the set of advertised transitive info providers.
*
* <p>When computing the set of aspects required for a rule, only the providers listed here are
* considered. The presence of a provider here does not mean that the rule <b>must</b> implement
* said provider, merely that it <b>can</b>. After the configured target is constructed from this
* rule, aspects will be filtered according to the set of actual providers.
*
* <p>This is here so that we can do the loading phase overestimation required for "blaze query",
* which does not have the configured targets available.
**/
public AdvertisedProviderSet getAdvertisedProviders() {
return advertisedProviders;
}
/**
* For --compile_one_dependency: if multiple rules consume the specified target,
* should we choose this one over the "unpreferred" options?
*/
public boolean isPreferredDependency(String filename) {
return preferredDependencyPredicate.apply(filename);
}
/**
* Returns this rule's policy for configuration fragment access.
*/
public ConfigurationFragmentPolicy getConfigurationFragmentPolicy() {
return configurationFragmentPolicy;
}
/**
* Returns true if rules of this type can be used with the constraint enforcement system.
*/
public boolean supportsConstraintChecking() {
return supportsConstraintChecking;
}
/**
* Returns true if rules of this type should be evaluated with the parent's configuration so that
* they can match on aspects of it.
*/
public boolean isConfigMatcher() {
return isConfigMatcher;
}
/**
* Creates a new {@link Rule} {@code r} where {@code r.getPackage()} is the {@link Package}
* associated with {@code pkgBuilder}.
*
* <p>The created {@link Rule} will be populated with attribute values from {@code
* attributeValues} or the default attribute values associated with this {@link RuleClass} and
* {@code pkgBuilder}.
*
* <p>The created {@link Rule} will also be populated with output files. These output files will
* have been collected from the explicitly provided values of type {@link BuildType#OUTPUT} and
* {@link BuildType#OUTPUT_LIST} as well as from the implicit outputs determined by this {@link
* RuleClass} and the values in {@code attributeValues}.
*
* <p>This performs several validity checks. Invalid output file labels result in a thrown {@link
* LabelSyntaxException}. Computed default attributes that fail during precomputation result in a
* {@link CannotPrecomputeDefaultsException}. All other errors are reported on {@code
* eventHandler}.
*/
<T> Rule createRule(
Package.Builder pkgBuilder,
Label ruleLabel,
AttributeValues<T> attributeValues,
EventHandler eventHandler,
@Nullable FuncallExpression ast,
Location location,
AttributeContainer attributeContainer,
boolean checkThirdPartyRulesHaveLicenses)
throws LabelSyntaxException, InterruptedException, CannotPrecomputeDefaultsException {
Rule rule = pkgBuilder.createRule(ruleLabel, this, location, attributeContainer);
populateRuleAttributeValues(rule, pkgBuilder, attributeValues, eventHandler);
checkAspectAllowedValues(rule, eventHandler);
rule.populateOutputFiles(eventHandler, pkgBuilder);
if (ast != null) {
populateAttributeLocations(rule, ast);
}
checkForDuplicateLabels(rule, eventHandler);
boolean actuallyCheckLicense;
if (thirdPartyLicenseExistencePolicy == ThirdPartyLicenseExistencePolicy.ALWAYS_CHECK) {
actuallyCheckLicense = true;
} else if (thirdPartyLicenseExistencePolicy == ThirdPartyLicenseExistencePolicy.NEVER_CHECK) {
actuallyCheckLicense = false;
} else {
actuallyCheckLicense = checkThirdPartyRulesHaveLicenses;
}
if (actuallyCheckLicense) {
checkThirdPartyRuleHasLicense(rule, pkgBuilder, eventHandler);
}
checkForValidSizeAndTimeoutValues(rule, eventHandler);
rule.checkValidityPredicate(eventHandler);
rule.checkForNullLabels();
return rule;
}
/**
* Same as {@link #createRule}, except without some internal sanity checks.
*
* <p>Don't call this function unless you know what you're doing.
*/
<T> Rule createRuleUnchecked(
Package.Builder pkgBuilder,
Label ruleLabel,
AttributeValues<T> attributeValues,
Location location,
AttributeContainer attributeContainer,
ImplicitOutputsFunction implicitOutputsFunction)
throws InterruptedException, CannotPrecomputeDefaultsException {
Rule rule = pkgBuilder.createRule(
ruleLabel,
this,
location,
attributeContainer,
implicitOutputsFunction);
populateRuleAttributeValues(rule, pkgBuilder, attributeValues, NullEventHandler.INSTANCE);
rule.populateOutputFilesUnchecked(NullEventHandler.INSTANCE, pkgBuilder);
return rule;
}
/**
* Populates the attributes table of the new {@link Rule} with the values in the {@code
* attributeValues} map and with default values provided by this {@link RuleClass} and the {@code
* pkgBuilder}.
*
* <p>Errors are reported on {@code eventHandler}.
*/
private <T> void populateRuleAttributeValues(
Rule rule,
Package.Builder pkgBuilder,
AttributeValues<T> attributeValues,
EventHandler eventHandler)
throws InterruptedException, CannotPrecomputeDefaultsException {
BitSet definedAttrIndices =
populateDefinedRuleAttributeValues(
rule,
pkgBuilder.getRepositoryMapping(),
attributeValues,
pkgBuilder.getListInterner(),
eventHandler);
populateDefaultRuleAttributeValues(rule, pkgBuilder, definedAttrIndices, eventHandler);
// Now that all attributes are bound to values, collect and store configurable attribute keys.
populateConfigDependenciesAttribute(rule);
}
/**
* Populates the attributes table of the new {@link Rule} with the values in the {@code
* attributeValues} map.
*
* <p>Handles the special cases of the attribute named {@code "name"} and attributes with value
* {@link Runtime#NONE}.
*
* <p>Returns a bitset {@code b} where {@code b.get(i)} is {@code true} if this method set a value
* for the attribute with index {@code i} in this {@link RuleClass}. Errors are reported on {@code
* eventHandler}.
*/
private <T> BitSet populateDefinedRuleAttributeValues(
Rule rule,
ImmutableMap<RepositoryName, RepositoryName> repositoryMapping,
AttributeValues<T> attributeValues,
Interner<ImmutableList<?>> listInterner,
EventHandler eventHandler) {
BitSet definedAttrIndices = new BitSet();
for (T attributeAccessor : attributeValues.getAttributeAccessors()) {
String attributeName = attributeValues.getName(attributeAccessor);
Object attributeValue = attributeValues.getValue(attributeAccessor);
// Ignore all None values.
if (attributeValue == Runtime.NONE) {
continue;
}
// Check that the attribute's name belongs to a valid attribute for this rule class.
Integer attrIndex = getAttributeIndex(attributeName);
if (attrIndex == null) {
rule.reportError(
String.format(
"%s: no such attribute '%s' in '%s' rule", rule.getLabel(), attributeName, name),
eventHandler);
continue;
}
Attribute attr = getAttribute(attrIndex);
// Convert the build-lang value to a native value, if necessary.
Object nativeAttributeValue;
if (attributeValues.valuesAreBuildLanguageTyped()) {
try {
nativeAttributeValue =
convertFromBuildLangType(rule, attr, attributeValue, repositoryMapping, listInterner);
} catch (ConversionException e) {
rule.reportError(String.format("%s: %s", rule.getLabel(), e.getMessage()), eventHandler);
continue;
}
} else {
nativeAttributeValue = attributeValue;
}
boolean explicit = attributeValues.isExplicitlySpecified(attributeAccessor);
setRuleAttributeValue(rule, eventHandler, attr, nativeAttributeValue, explicit);
definedAttrIndices.set(attrIndex);
}
return definedAttrIndices;
}
/** Populates attribute locations for attributes defined in {@code ast}. */
private void populateAttributeLocations(Rule rule, FuncallExpression ast) {
for (Argument.Passed arg : ast.getArguments()) {
if (arg.isKeyword()) {
String name = arg.getName();
Integer attrIndex = getAttributeIndex(name);
if (attrIndex != null) {
rule.setAttributeLocation(attrIndex, arg.getValue().getLocation());
}
}
}
}
/**
* Populates the attributes table of the new {@link Rule} with default values provided by this
* {@link RuleClass} and the {@code pkgBuilder}. This will only provide values for attributes that
* haven't already been populated, using {@code definedAttrIndices} to determine whether an
* attribute was populated.
*
* <p>Errors are reported on {@code eventHandler}.
*/
private void populateDefaultRuleAttributeValues(
Rule rule, Package.Builder pkgBuilder, BitSet definedAttrIndices, EventHandler eventHandler)
throws InterruptedException, CannotPrecomputeDefaultsException {
// Set defaults; ensure that every mandatory attribute has a value. Use the default if none
// is specified.
List<Attribute> attrsWithComputedDefaults = new ArrayList<>();
int numAttributes = getAttributeCount();
for (int attrIndex = 0; attrIndex < numAttributes; ++attrIndex) {
if (definedAttrIndices.get(attrIndex)) {
continue;
}
Attribute attr = getAttribute(attrIndex);
if (attr.isMandatory()) {
rule.reportError(
String.format(
"%s: missing value for mandatory attribute '%s' in '%s' rule",
rule.getLabel(),
attr.getName(),
name),
eventHandler);
}
if (attr.hasComputedDefault()) {
// Note that it is necessary to set all non-computed default values before calling
// Attribute#getDefaultValue for computed default attributes. Computed default attributes
// may have a condition predicate (i.e. the predicate returned by Attribute#getCondition)
// that depends on non-computed default attribute values, and that condition predicate is
// evaluated by the call to Attribute#getDefaultValue.
attrsWithComputedDefaults.add(attr);
} else if (attr.isLateBound()) {
rule.setAttributeValue(attr, attr.getLateBoundDefault(), /*explicit=*/ false);
} else {
Object defaultValue = getAttributeNoncomputedDefaultValue(attr, pkgBuilder);
rule.setAttributeValue(attr, defaultValue, /*explicit=*/ false);
checkAllowedValues(rule, attr, eventHandler);
}
}
// Set computed default attribute values now that all other (i.e. non-computed) default values
// have been set.
for (Attribute attr : attrsWithComputedDefaults) {
// If Attribute#hasComputedDefault was true above, Attribute#getDefaultValue returns the
// computed default function object or a Skylark computed default template. Note that we
// cannot determine the exact value of the computed default function here because it may
// depend on other attribute values that are configurable (i.e. they came from select({..})
// expressions in the build language, and they require configuration data from the analysis
// phase to be resolved). Instead, we're setting the attribute value to a reference to the
// computed default function, or if #getDefaultValue is a Skylark computed default
// template, setting the attribute value to a reference to the SkylarkComputedDefault
// returned from SkylarkComputedDefaultTemplate#computePossibleValues.
//
// SkylarkComputedDefaultTemplate#computePossibleValues pre-computes all possible values the
// function may evaluate to, and records them in a lookup table. By calling it here, with an
// EventHandler, any errors that might occur during the function's evaluation can
// be discovered and propagated here.
Object valueToSet;
Object defaultValue = attr.getDefaultValue(rule);
if (defaultValue instanceof SkylarkComputedDefaultTemplate) {
SkylarkComputedDefaultTemplate template = (SkylarkComputedDefaultTemplate) defaultValue;
valueToSet = template.computePossibleValues(attr, rule, eventHandler);
} else {
valueToSet = defaultValue;
}
rule.setAttributeValue(attr, valueToSet, /*explicit=*/ false);
}
}
/**
* Collects all labels used as keys for configurable attributes and places them into
* the special implicit attribute that tracks them.
*/
private static void populateConfigDependenciesAttribute(Rule rule) {
RawAttributeMapper attributes = RawAttributeMapper.of(rule);
Attribute configDepsAttribute = attributes.getAttributeDefinition("$config_dependencies");
if (configDepsAttribute == null) {
// Not currently compatible with Skylark rules.
return;
}
Set<Label> configLabels = new LinkedHashSet<>();
for (Attribute attr : rule.getAttributes()) {
SelectorList<?> selectors = attributes.getSelectorList(attr.getName(), attr.getType());
if (selectors != null) {
configLabels.addAll(selectors.getKeyLabels());
}
}
rule.setAttributeValue(configDepsAttribute, ImmutableList.copyOf(configLabels),
/*explicit=*/false);
}
public void checkAttributesNonEmpty(
RuleErrorConsumer ruleErrorConsumer, AttributeMap attributes) {
for (String attributeName : attributes.getAttributeNames()) {
Attribute attr = attributes.getAttributeDefinition(attributeName);
if (!attr.isNonEmpty()) {
continue;
}
Object attributeValue = attributes.get(attributeName, attr.getType());
boolean isEmpty = false;
if (attributeValue instanceof SkylarkList) {
isEmpty = ((SkylarkList<?>) attributeValue).isEmpty();
} else if (attributeValue instanceof List<?>) {
isEmpty = ((List<?>) attributeValue).isEmpty();
} else if (attributeValue instanceof Map<?, ?>) {
isEmpty = ((Map<?, ?>) attributeValue).isEmpty();
}
if (isEmpty) {
ruleErrorConsumer.attributeError(attr.getName(), "attribute must be non empty");
}
}
}
/**
* Report an error for each label that appears more than once in a LABEL_LIST attribute
* of the given rule.
*
* @param rule The rule.
* @param eventHandler The eventHandler to use to report the duplicated deps.
*/
private static void checkForDuplicateLabels(Rule rule, EventHandler eventHandler) {
for (Attribute attribute : rule.getAttributes()) {
if (attribute.getType() == BuildType.LABEL_LIST) {
checkForDuplicateLabels(rule, attribute, eventHandler);
}
}
}
/**
* Reports an error against the specified rule if it's beneath third_party
* but does not have a declared license.
*/
private static void checkThirdPartyRuleHasLicense(Rule rule,
Package.Builder pkgBuilder, EventHandler eventHandler) {
if (rule.getRuleClassObject().ignorePackageLicenses()) {
// A package license is sufficient; ignore rules that don't include it.
return;
}
if (isThirdPartyPackage(rule.getLabel().getPackageIdentifier())) {
License license = rule.getLicense();
if (license == null) {
license = pkgBuilder.getDefaultLicense();
}
if (!license.isSpecified()) {
rule.reportError("third-party rule '" + rule.getLabel() + "' lacks a license declaration "
+ "with one of the following types: notice, reciprocal, permissive, "
+ "restricted, unencumbered, by_exception_only",
eventHandler);
}
}
}
/**
* Report an error for each label that appears more than once in the given attribute
* of the given rule.
*
* @param rule The rule.
* @param attribute The attribute to check. Must exist in rule and be of type LABEL_LIST.
* @param eventHandler The eventHandler to use to report the duplicated deps.
*/
private static void checkForDuplicateLabels(Rule rule, Attribute attribute,
EventHandler eventHandler) {
Set<Label> duplicates = AggregatingAttributeMapper.of(rule).checkForDuplicateLabels(attribute);
for (Label label : duplicates) {
rule.reportError(
String.format("Label '%s' is duplicated in the '%s' attribute of rule '%s'",
label, attribute.getName(), rule.getName()), eventHandler);
}
}
/**
* Report an error if the rule has a timeout or size attribute that is not a
* legal value. These attributes appear on all tests.
*
* @param rule the rule to check
* @param eventHandler the eventHandler to use to report the duplicated deps
*/
private static void checkForValidSizeAndTimeoutValues(Rule rule, EventHandler eventHandler) {
if (rule.getRuleClassObject().hasAttr("size", Type.STRING)) {
String size = NonconfigurableAttributeMapper.of(rule).get("size", Type.STRING);
if (TestSize.getTestSize(size) == null) {
rule.reportError(
String.format("In rule '%s', size '%s' is not a valid size.", rule.getName(), size),
eventHandler);
}
}
if (rule.getRuleClassObject().hasAttr("timeout", Type.STRING)) {
String timeout = NonconfigurableAttributeMapper.of(rule).get("timeout", Type.STRING);
if (TestTimeout.getTestTimeout(timeout) == null) {
rule.reportError(
String.format(
"In rule '%s', timeout '%s' is not a valid timeout.", rule.getName(), timeout),
eventHandler);
}
}
}
/**
* Returns the default value for the specified rule attribute.
*
* <p>For most rule attributes, the default value is either explicitly specified
* in the attribute, or implicitly based on the type of the attribute, except
* for some special cases (e.g. "licenses", "distribs") where it comes from
* some other source, such as state in the package.
*
* <p>Precondition: {@code !attr.hasComputedDefault()}. (Computed defaults are
* evaluated in second pass.)
*/
private static Object getAttributeNoncomputedDefaultValue(Attribute attr,
Package.Builder pkgBuilder) {
// Starlark rules may define their own "licenses" attributes with different types -
// we shouldn't trigger the special "licenses" on those cases.
if (attr.getName().equals("licenses") && attr.getType() == BuildType.LICENSE) {
return pkgBuilder.getDefaultLicense();
}
if (attr.getName().equals("distribs")) {
return pkgBuilder.getDefaultDistribs();
}
return attr.getDefaultValue(null);
}
/**
* Sets the value of attribute {@code attr} in {@code rule} to the native value {@code
* nativeAttrVal}, and sets the value's explicitness to {@code explicit}.
*
* <p>Handles the special case of the "visibility" attribute by also setting the rule's
* visibility with {@link Rule#setVisibility}.
*
* <p>Checks that {@code nativeAttrVal} is an allowed value via {@link #checkAllowedValues}.
*/
private static void setRuleAttributeValue(
Rule rule,
EventHandler eventHandler,
Attribute attr,
Object nativeAttrVal,
boolean explicit) {
if (attr.getName().equals("visibility")) {
@SuppressWarnings("unchecked")
List<Label> attrList = (List<Label>) nativeAttrVal;
if (!attrList.isEmpty()
&& ConstantRuleVisibility.LEGACY_PUBLIC_LABEL.equals(attrList.get(0))) {
rule.reportError(
rule.getLabel() + ": //visibility:legacy_public only allowed in package declaration",
eventHandler);
}
try {
rule.setVisibility(PackageFactory.getVisibility(rule.getLabel(), attrList));
} catch (EvalException e) {
rule.reportError(rule.getLabel() + " " + e.getMessage(), eventHandler);
}
}
rule.setAttributeValue(attr, nativeAttrVal, explicit);
checkAllowedValues(rule, attr, eventHandler);
}
/**
* Converts the build-language-typed {@code buildLangValue} to a native value via {@link
* BuildType#selectableConvert}. Canonicalizes the value's order if it is a {@link List} type and
* {@code attr.isOrderIndependent()} returns {@code true}.
*
* <p>Throws {@link ConversionException} if the conversion fails, or if {@code buildLangValue} is
* a selector expression but {@code attr.isConfigurable()} is {@code false}.
*/
private static Object convertFromBuildLangType(
Rule rule,
Attribute attr,
Object buildLangValue,
ImmutableMap<RepositoryName, RepositoryName> repositoryMapping,
Interner<ImmutableList<?>> listInterner)
throws ConversionException {
LabelConversionContext context = new LabelConversionContext(rule.getLabel(), repositoryMapping);
Object converted =
BuildType.selectableConvert(
attr.getType(),
buildLangValue,
new AttributeConversionContext(attr.getName(), rule.getRuleClass()),
context);
if ((converted instanceof SelectorList<?>) && !attr.isConfigurable()) {
throw new ConversionException(
String.format("attribute \"%s\" is not configurable", attr.getName()));
}
if (converted instanceof List<?>) {
if (attr.isOrderIndependent()) {
@SuppressWarnings("unchecked")
List<? extends Comparable<?>> list = (List<? extends Comparable<?>>) converted;
converted = Ordering.natural().sortedCopy(list);
}
// It's common for multiple rule instances in the same package to have the same value for some
// attributes. As a concrete example, consider a package having several 'java_test' instances,
// each with the same exact 'tags' attribute value.
converted = listInterner.intern(ImmutableList.copyOf((List<?>) converted));
}
return converted;
}
/**
* Provides a {@link #toString()} description of the attribute being converted for
* {@link BuildType#selectableConvert}. This is preferred over a raw string to avoid uselessly
* constructing strings which are never used. A separate class instead of inline to avoid
* accidental memory leaks.
*/
private static class AttributeConversionContext {
private final String attrName;
private final String ruleClass;
AttributeConversionContext(String attrName, String ruleClass) {
this.attrName = attrName;
this.ruleClass = ruleClass;
}
@Override
public String toString() {
return "attribute '" + attrName + "' in '" + ruleClass + "' rule";
}
}
/**
* Verifies that the rule has a valid value for the attribute according to its allowed values.
*
* <p>If the value for the given attribute on the given rule is invalid, an error will be recorded
* in the given EventHandler.
*
* <p>If the rule is configurable, all of its potential values are evaluated, and errors for each
* of the invalid values are reported.
*/
private static void checkAllowedValues(
Rule rule, Attribute attribute, EventHandler eventHandler) {
if (attribute.checkAllowedValues()) {
PredicateWithMessage<Object> allowedValues = attribute.getAllowedValues();
Iterable<?> values =
AggregatingAttributeMapper.of(rule).visitAttribute(
attribute.getName(), attribute.getType());
for (Object value : values) {
if (!allowedValues.apply(value)) {
rule.reportError(
String.format(
"%s: invalid value in '%s' attribute: %s",
rule.getLabel(),
attribute.getName(),
allowedValues.getErrorReason(value)),
eventHandler);
}
}
}
}
private static void checkAspectAllowedValues(
Rule rule, EventHandler eventHandler) {
for (Attribute attrOfRule : rule.getAttributes()) {
for (Aspect aspect : attrOfRule.getAspects(rule)) {
for (Attribute attrOfAspect : aspect.getDefinition().getAttributes().values()) {
// By this point the AspectDefinition has been created and values assigned.
if (attrOfAspect.checkAllowedValues()) {
PredicateWithMessage<Object> allowedValues = attrOfAspect.getAllowedValues();
Object value = attrOfAspect.getDefaultValue(rule);
if (!allowedValues.apply(value)) {
rule.reportError(
String.format(
"%s: invalid value in '%s' attribute: %s",
rule.getLabel(),
attrOfAspect.getName(),
allowedValues.getErrorReason(value)),
eventHandler);
}
}
}
}
}
}
@Override
public String toString() {
return name;
}
public boolean isDocumented() {
return documented;
}
public boolean isPublicByDefault() {
return publicByDefault;
}
/**
* Returns true iff the outputs of this rule should be created beneath the
* <i>bin</i> directory, false if beneath <i>genfiles</i>. For most rule
* classes, this is a constant, but for genrule, it is a property of the
* individual rule instance, derived from the 'output_to_bindir' attribute;
* see Rule.hasBinaryOutput().
*/
@VisibleForTesting
public boolean hasBinaryOutput() {
return binaryOutput;
}
/**
* Returns this RuleClass's custom Skylark rule implementation.
*/
@Nullable public BaseFunction getConfiguredTargetFunction() {
return configuredTargetFunction;
}
@Nullable
public BuildSetting getBuildSetting() {
return buildSetting;
}
/**
* Returns a function that computes the external bindings a repository function contributes to
* the WORKSPACE file.
*/
public Function<? super Rule, Map<String, Label>> getExternalBindingsFunction() {
return externalBindingsFunction;
}
/**
* Returns a function that computes the options referenced by a rule.
*/
public Function<? super Rule, ? extends Set<String>> getOptionReferenceFunction() {
return optionReferenceFunction;
}
/**
* For Skylark rule classes, returns this RuleClass's rule definition environment's label, which
* is never null. Is null for native rules' RuleClass objects.
*/
@Nullable
public Label getRuleDefinitionEnvironmentLabel() {
return ruleDefinitionEnvironmentLabel;
}
/**
* Returns the hash code for the RuleClass's rule definition environment. Will be null for native
* rules' RuleClass objects.
*/
@Nullable
public String getRuleDefinitionEnvironmentHashCode() {
return ruleDefinitionEnvironmentHashCode;
}
/** Returns true if this RuleClass is a Skylark-defined RuleClass. */
public boolean isSkylark() {
return isSkylark;
}
/**
* Returns true if this RuleClass is Skylark-defined and is subject to analysis-time
* tests.
*/
public boolean isSkylarkTestable() {
return skylarkTestable;
}
/**
* Returns true if this rule class outputs a default executable for every rule.
*/
public boolean isExecutableSkylark() {
return isExecutableSkylark;
}
/** Returns true if this rule class is an analysis test (set by analysis_test = true). */
public boolean isAnalysisTest() {
return isAnalysisTest;
}
/**
* Returns true if this rule class has at least one attribute with an analysis test transition. (A
* starlark-defined transition using analysis_test_transition()).
*/
public boolean hasAnalysisTestTransition() {
return hasAnalysisTestTransition;
}
/**
* Returns true if this rule class has the _whitelist_function_transition attribute.
*/
public boolean hasFunctionTransitionWhitelist() {
return hasFunctionTransitionWhitelist;
}
/** Returns true if this rule class should ignore package-level licenses. */
public boolean ignorePackageLicenses() {
return ignorePackageLicenses;
}
public ImmutableSet<Label> getRequiredToolchains() {
return requiredToolchains;
}
public boolean supportsPlatforms() {
return supportsPlatforms;
}
public ExecutionPlatformConstraintsAllowed executionPlatformConstraintsAllowed() {
return executionPlatformConstraintsAllowed;
}
public ImmutableSet<Label> getExecutionPlatformConstraints() {
return executionPlatformConstraints;
}
public OutputFile.Kind getOutputFileKind() {
return outputFileKind;
}
public static boolean isThirdPartyPackage(PackageIdentifier packageIdentifier) {
if (!packageIdentifier.getRepository().isMain()) {
return false;
}
if (!packageIdentifier.getPackageFragment().startsWith(THIRD_PARTY_PREFIX)) {
return false;
}
if (packageIdentifier.getPackageFragment().segmentCount() <= 1) {
return false;
}
return true;
}
} |
<filename>extension/src/WebViews.ts
import { window, ViewColumn, WebviewPanel } from "vscode";
import { Server } from "./Server";
import { Disposable } from "@hediet/std/disposable";
export const debugVisualizer = "debugVisualizer";
export class WebViews {
private readonly debugVisualizations = new Map<WebviewPanel, WebView>();
public readonly dispose = Disposable.fn();
constructor(private readonly server: Server) {
this.dispose.track(
window.registerWebviewPanelSerializer(debugVisualizer, {
deserializeWebviewPanel: async (panel, state) => {
this.restore(panel);
},
})
);
this.dispose.track({
dispose: () => {
for (const panel of this.debugVisualizations.keys()) {
panel.dispose();
}
},
});
}
public createNew() {
const panel = window.createWebviewPanel(
debugVisualizer,
"Debug Visualizer",
ViewColumn.Two,
{ enableScripts: true }
);
this.setupView(panel);
}
public restore(webviewPanel: WebviewPanel) {
this.setupView(webviewPanel);
}
private setupView(webviewPanel: WebviewPanel) {
webviewPanel.webview.html = getHtml(this.server);
const view = new WebView(webviewPanel);
this.debugVisualizations.set(webviewPanel, view);
webviewPanel.onDidDispose(() => {
this.debugVisualizations.delete(webviewPanel);
});
}
}
export class WebView {
constructor(private readonly webviewPanel: WebviewPanel) {}
}
export function getHtml(server: Server) {
const isDev = !!process.env.USE_DEV_UI;
return `
<html>
<head>
<meta charset="UTF-8">
<meta http-equiv="Content-Security-Policy" content="default-src * 'unsafe-inline' 'unsafe-eval'; script-src * 'unsafe-inline' 'unsafe-eval'; connect-src * 'unsafe-inline'; img-src * data: blob: 'unsafe-inline'; frame-src *; style-src * 'unsafe-inline';">
<style>
html { height: 100%; width: 100%; padding: 0; margin: 0; }
body { height: 100%; width: 100%; padding: 0; margin: 0; }
iframe { height: 100%; width: 100%; padding: 0; margin: 0; border: 0; display: block; }
</style>
</head>
<body>
<script>
window.webViewData = ${JSON.stringify({
serverSecret: server.secret,
serverPort: server.port,
})};
const api = window.VsCodeApi = acquireVsCodeApi();
window.addEventListener('message', event => {
if (event.source === window.frames[0]) {
if (event.data.command === "setState") {
console.log("setState", event.data.state);
api.setState(event.data.state);
}
if (event.data.command === "getState") {
console.log("getState, sent ", api.getState());
window.frames[0].postMessage({ command: "getStateResult", state: api.getState() }, "*");
}
}
});
</script>
${
isDev
? `<iframe sandbox="allow-top-navigation allow-scripts allow-same-origin allow-popups allow-pointer-lock allow-forms" src="${server.getIndexUrl(
{ mode: "webViewIFrame" }
)}"></iframe>`
: `<script type="text/javascript" src="${server.mainBundleUrl}"></script>`
}
</body>
</html>
`;
}
|
<gh_stars>0
package com.winson.spring.aop.overview;
/**
* @author winson
* @date 2021/10/8
**/
public class ProxyEchoService implements EchoService {
private final EchoService echoService;
public ProxyEchoService(EchoService echoService) {
this.echoService = echoService;
}
@Override
public String echo(String message) {
long start = System.currentTimeMillis();
String result = echoService.echo(message);
System.out.println(result);
long end = System.currentTimeMillis();
System.out.println("run echo use time : " + (end - start));
return result;
}
}
|
package com.lyghtningwither.honeyfunmods.init;
import java.util.ArrayList;
import java.util.List;
import com.lyghtningwither.honeyfunmods.fluids.FluidLiquid;
import com.lyghtningwither.honeyfunmods.util.Reference;
import net.minecraft.util.ResourceLocation;
import net.minecraftforge.fluids.Fluid;
import net.minecraftforge.fluids.FluidRegistry;
public class FluidInit {
public static final List<Fluid> FLUIDS = new ArrayList<Fluid>();
public static final Fluid HYDROGEN_PEROXIDE = new FluidLiquid("hydrogen_peroxide", new ResourceLocation(Reference.MOD_ID + ":blocks/hydrogen_peroxide_still"), new ResourceLocation(Reference.MOD_ID + ":blocks/hydrogen_peroxide_flow"));
public static void registerFluids() {
registerFluid(HYDROGEN_PEROXIDE);
}
public static void registerFluid(Fluid fluid) {
FluidRegistry.registerFluid(fluid);
FluidRegistry.addBucketForFluid(fluid);
}
}
|
<reponame>duyangzhou/SilverKing
package com.ms.silverking.cloud.dht.net.protocol;
import com.ms.silverking.cloud.dht.RetrievalOptions;
import com.ms.silverking.cloud.dht.net.SecondaryTargetSerializer;
import com.ms.silverking.numeric.NumConversion;
public class RetrievalMessageFormat extends KeyedMessageFormat {
public static final int stDataOffset = RetrievalResponseMessageFormat.optionBytesSize;
public static final int getOptionsBufferLength(RetrievalOptions retrievalOptions) {
return RetrievalResponseMessageFormat.optionBytesSize // same format for both directions
+ NumConversion.BYTES_PER_SHORT
+ SecondaryTargetSerializer.serializedLength(retrievalOptions.getSecondaryTargets())
+ NumConversion.BYTES_PER_INT
+ (retrievalOptions.getUserOptions() == RetrievalOptions.noUserOptions ? 0 : retrievalOptions.getUserOptions().length);
}
}
|
Lossless current sensing and its application in current mode control
In this paper, it is revealed that the lossless current sensing structure is very similar to the well known Maxwell bridge. The matching condition in lossless current sensing is the same as the balance equation in Maxwell bridge. In practice, it is difficult to meet the matching condition due to the parameter value variations. However, it is shown that in current mode control, at certain time constant mismatching situations, the circuit dynamic response actually could be improved. Furthermore, a "speedy capacitor" could be added to the lossless current sensing circuitry to significantly improve the sensed current signal to noise ratio and avoid jittery operation in wide DC-DC conversion ratio applications. |
<reponame>mshr-h/project-euler
package main
import (
"fmt"
)
func main() {
var sum_of_sq = 0
var sq_of_sum = 0
for i := 1; i <= 100; i++ {
sum_of_sq += i * i
sq_of_sum += i
}
sq_of_sum *= sq_of_sum
fmt.Println(sq_of_sum - sum_of_sq)
}
|
import pytest
from cqd.open_xpd_uuid import sanitize
def test_sanitize_no_action():
assert sanitize('1U7XPGQ2') == '1U7XPGQ2'
def test_sanitize_remove_dashes():
assert sanitize('1U-7X-PG-Q2') == '1U7XPGQ2'
def test_sanitize_capitalize():
assert sanitize('1u7xpgq2') == '1U7XPGQ2'
@pytest.mark.parametrize(
'ambiguous_chars, expected_char', (
('0oO', '0'),
('1LliI', '1'),
))
def test_sanitize_ambiguous_chars(ambiguous_chars: str, expected_char: str):
for ambiguous_char in ambiguous_chars:
assert sanitize(f'{ambiguous_char}u7xpgq2') == f'{expected_char}U7XPGQ2'
|
package com.dewmaple.geolocation.baidu;
import android.content.Context;
import com.baidu.location.BDLocation;
import com.baidu.location.BDLocationListener;
import com.baidu.location.LocationClient;
import com.baidu.location.LocationClientOption;
import com.baidu.location.LocationClientOption.LocationMode;
import com.dewmaple.geolocation.w3.PositionOptions;
public class BDGeolocation {
private String TAG = "BDGeolocation";
private LocationClient client;
public static final String COORD_BD09LL = "bd09ll";
public static final String COORD_BD09 = "bd09";
public static final String COORD_GCJ02 = "gcj02";
private BDLocationListener listener;
BDGeolocation(Context context) {
client = new LocationClient(context);
}
private void setOptions(PositionOptions options) {
// set default coorType
String coorType = options.getCoorType();
if (coorType == null || coorType.trim().isEmpty()) {
coorType = COORD_GCJ02;
}
// set default locationMode
LocationMode locationMode = LocationMode.Battery_Saving;
if (options.isEnableHighAccuracy()) {
locationMode = LocationMode.Hight_Accuracy;
}
LocationClientOption bdoptions = new LocationClientOption();
bdoptions.setCoorType(coorType);
bdoptions.setLocationMode(locationMode);
client.setLocOption(bdoptions);
}
public boolean getCurrentPosition(PositionOptions options, final BDLocationListener callback) {
listener = new BDLocationListener() {
@Override
public void onReceiveLocation(BDLocation location) {
callback.onReceiveLocation(location);
clearWatch();
}
};
setOptions(options);
client.registerLocationListener(listener);
client.start();
return true;
}
public boolean watchPosition(PositionOptions options, BDLocationListener callback) {
listener = callback;
setOptions(options);
client.registerLocationListener(listener);
client.start();
return true;
}
public boolean clearWatch() {
client.stop();
client.unRegisterLocationListener(listener);
listener = null;
return true;
}
}
|
<filename>src/test/java/com/webank/webasemonkey/parser/MethodParserTest.java
/**
* Copyright 2014-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webank.webasemonkey.parser;
import java.util.List;
import java.util.Map;
import org.fisco.bcos.web3j.abi.TypeReference;
import org.fisco.bcos.web3j.protocol.core.methods.response.AbiDefinition.NamedType;
import org.fisco.bcos.web3j.protocol.core.methods.response.AbiDefinition.NamedType.Type;
import org.fisco.bcos.web3j.tx.txdecode.BaseException;
import org.fisco.bcos.web3j.tx.txdecode.ContractAbiUtil;
import org.fisco.bcos.web3j.tx.txdecode.DynamicArrayReference;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import com.webank.webasemonkey.WebasemonkeyApplicationTests;
import com.webank.webasemonkey.tools.JacksonUtils;
import com.webank.webasemonkey.vo.FieldVO;
import com.webank.webasemonkey.vo.MethodMetaInfo;
/**
* MethodParserTest
*
* @Description: MethodParserTest
* @author maojiayu
* @data Apr 17, 2020 3:27:00 PM
*
*/
public class MethodParserTest extends WebasemonkeyApplicationTests {
@Autowired
private MethodParser methodParser;
@SuppressWarnings("unchecked")
@Test
public void testGetField() throws BaseException, ClassNotFoundException {
String methodMetaInfoStr =
"{\"contractName\":\"AccessRestriction\",\"name\":\"revokeUser\",\"shardingNO\":1,\"list\":null}";
String inputsAddress = "[{\"name\":\"_user\",\"type\":\"address\",\"type0\":null,\"indexed\":false}]";
String fieldList =
"[{\"sqlName\":\"_user_\",\"solidityName\":\"_user\",\"javaName\":\"_user\",\"sqlType\":\"varchar(255)\",\"solidityType\":\"Address\",\"javaType\":\"String\",\"entityType\":null,\"typeMethod\":\"AddressUtils.bigIntegerToString\",\"javaCapName\":\"_user\",\"length\":0}]";
MethodMetaInfo mmi = JacksonUtils.fromJson(methodMetaInfoStr, MethodMetaInfo.class);
List<NamedType> nt = JacksonUtils.fromJson(inputsAddress, List.class, NamedType.class);
List<FieldVO> list = methodParser.getFieldList(mmi, nt);
String methodMetaInfoStaticArrayStr =
"{\"contractName\":\"RecordData\",\"name\":\"insertRecord\",\"shardingNO\":1,\"list\":null}";
String inputsStaticArray = "[{\"name\":\"record\",\"type\":\"bytes[]\",\"type0\":null,\"indexed\":false}]";
String fieldList2 =
"[{\"sqlName\":\"_record_\",\"solidityName\":\"record\",\"javaName\":\"record\",\"sqlType\":\"varchar(10240)\",\"solidityType\":\"DynamicArray<bytes>\",\"javaType\":\"String\",\"entityType\":null,\"typeMethod\":\"BytesUtils.dynamicBytesListObjectToString\",\"javaCapName\":\"Record\",\"length\":0}]";
MethodMetaInfo mmi2 = JacksonUtils.fromJson(methodMetaInfoStaticArrayStr, MethodMetaInfo.class);
List<NamedType> nt2 = JacksonUtils.fromJson(inputsStaticArray, List.class, NamedType.class);
List<FieldVO> list2 = methodParser.getFieldList(mmi2, nt2);
List<TypeReference<?>> listOfTypeReference = ContractAbiUtil.paramFormat(nt);
System.out.println(JacksonUtils.toJson(listOfTypeReference));
System.out.println(JacksonUtils.toJson(ContractAbiUtil.paramFormat(nt2)));
for(NamedType n : nt2) {
Type type = new Type(n.getType());
System.out.println(JacksonUtils.toJson(type));
TypeReference<?> tr = DynamicArrayReference.create(type.getBaseName(), n.isIndexed());
System.out.println(tr.getClass().getSimpleName());
}
}
@SuppressWarnings("unchecked")
public static void main(String[] args) {
String s = "[{\"value\":\"aw==\",\"typeAsString\":\"bytes1\"},{\"value\":\"dg==\",\"typeAsString\":\"bytes1\"}]";
List<Map<String, byte[]>> list = JacksonUtils.fromJson(s, List.class, Map.class);
System.out.println(JacksonUtils.toJson(list.get(0).get("value")));
}
}
|
def _get_instances_from_reservations(self, reservations):
instances = list()
for reservation in reservations:
for instance in reservation.instances:
instances.append(instance)
return instances |
The (T, L)-Path Model and Algorithms for Information Dissemination in Dynamic Networks
A dynamic network is the abstraction of distributed systems with frequent network topology changes. With such dynamic network models, fundamental distributed computing problems can be formally studied with rigorous correctness. Although quite a number of models have been proposed and studied for dynamic networks, the existing models are usually defined from the point of view of connectivity properties. In this paper, instead, we examine the dynamicity of network topology according to the procedure of changes, i.e., how the topology or links change. Following such an approach, we propose the notion of the “instant path” and define two dynamic network models based on the instant path. Based on these two models, we design distributed algorithms for the problem of information dissemination respectively, one of the fundamental distributing computing problems. The correctness of our algorithms is formally proved and their performance in time cost and communication cost is analyzed. Compared with existing connectivity based dynamic network models and algorithms, our procedure based ones are definitely easier to be instantiated in the practical design and deployment of dynamic networks. |
use super::*;
pub mod boxtype {
pub const SIMPLE_BOX: [char; 9] = ['*', '-', '*', '|', ' ', '|', '*', '-', '*'];
pub const BORDER_BOX: [char; 9] = ['┌', '─', '┐', '│', ' ', '│', '└', '─', '┘'];
pub const DOUBLE_BORDER_BOX: [char; 9] =
['╔', '═', '╗', '║', ' ', '║', '╚', '═', '╝'];
pub const NONE_BOX: [char; 9] = [' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' '];
}
enum Pos {
Begin,
Middle,
End,
}
pub struct VBox<W: Widget>(pub [char; 9], pub Color, pub W);
impl<W: Widget> Widget for VBox<W> {
fn size(&mut self) -> (isize, isize) {
let (w, h) = self.2.size();
(w + 2, h + 2)
}
fn try_set_size(&mut self, w: isize, h: isize) {
self.2.try_set_size(w - 2, h - 2);
}
fn get(&mut self, x: isize, y: isize) -> Option<VChar> {
let (w, h) = self.size();
if w <= x || h <= y {
return None;
}
if x < 0 || y < 0 {
return None;
}
let xpos = if x == 0 {
Pos::Begin
} else if x == w - 1 {
Pos::End
} else {
Pos::Middle
};
let ypos = if y == 0 {
Pos::Begin
} else if y == h - 1 {
Pos::End
} else {
Pos::Middle
};
let bch = match (xpos, ypos) {
(Pos::Begin, Pos::Begin) => self.0[0],
(Pos::Middle, Pos::Begin) => self.0[1],
(Pos::End, Pos::Begin) => self.0[2],
(Pos::Begin, Pos::Middle) => self.0[3],
(Pos::Middle, Pos::Middle) => {
return self.2.get(x - 1, y - 1).unwrap_or(VChar::SPACE).into();
}
(Pos::End, Pos::Middle) => self.0[5],
(Pos::Begin, Pos::End) => self.0[6],
(Pos::Middle, Pos::End) => self.0[7],
(Pos::End, Pos::End) => self.0[8],
};
Some(VChar::new(bch, self.1))
}
}
pub trait WithBox<W: Widget> {
fn boxed(self, box_type : [char; 9], c : Color) -> VBox<W>;
}
impl<W : Widget+Sized> WithBox<W> for W {
fn boxed(self, box_type : [char; 9], c : Color) -> VBox<W> {
VBox(box_type, c, self)
}
} |
def ExtractSymmetryExtent( self ):
result = None
core = self.GetCore()
if core is not None:
bottom = core.nassy
right = core.nassx
if core.coreSym == 4:
left = 0 if core.nassx <= 2 else core.nassx >> 1
top = 0 if core.nassy <= 2 else core.nassy >> 1
elif core.coreSym == 8:
left = core.nassx >> 2
top = core.nassy >> 2
else:
left = 0
top = 0
result = ( left, top, right, bottom, right - left, bottom - top )
return result if result is not None else ( 0, 0, 0, 0, 0, 0 ) |
def uniformly_sample_position(p0, v0, a0, t, j, dt):
start_t = t[:,0].min()
end_t = t[:,-1].max()
N = int(np.ceil((end_t-start_t)/dt)) + 1
st = np.linspace(start_t, end_t, N)
sp = sample_position(p0, v0, a0, t, j, st)
return st, sp |
import sys
import bisect
*data, = map(int, sys.stdin.read().split()[::-1])
def inp():
return data.pop()
output = []
for _ in range(inp()):
n, W = inp(), inp()
*w, = [inp() for _ in range(n)]
lbound = (W + 1) // 2
items = sorted(range(n), key=lambda i: w[i])
if w[items[0]] > W:
output.append('-1')
elif sum(w) < lbound:
output.append('-1')
else:
s = 0
for i in range(n):
if lbound <= w[items[i]] <= W:
output.append('1')
output.append(str(items[i] + 1))
break
else:
for i in range(n):
s += w[items[i]]
if s >= lbound and s <= W:
ans = [v + 1 for v in items[:i + 1]]
output.append(str(len(ans)))
output.append(' '.join(map(str, sorted(ans))))
break
else:
output.append('-1')
print('\n'.join(output))
|
<filename>deploy/shippers/k8/deploywatcher.go
package k8
import (
"errors"
"fmt"
"k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/kubernetes"
)
type deployStatus int
const (
statRunning deployStatus = iota
statFailed
statDone
)
var acceptableWaitingReasons = [...]string{
"ContainerCreating",
}
var ErrPodsFailedToStart = errors.New("The new Kubernetes pods failed to start.")
type podSet map[string]bool
type k8DeployWatcher struct {
running podSet // Pod IDs that have started up but have not completed yet.
done podSet // Pod IDs that have finished successfully.
dead podSet // Pod IDs that have failed to start.
}
func newK8DeployWatcher() *k8DeployWatcher {
return &k8DeployWatcher{
running: make(podSet, 5),
done: make(podSet, 5),
dead: make(podSet, 5),
}
}
// watchIt watches events on a K8 pods with the appropriate `name` and
// `version` labels and returns an error if at least `expectedReplicas` are
// not deployed successfully.
func (kdw *k8DeployWatcher) watchIt(client *kubernetes.Clientset, name, version string, expectedReplicas int32, gen int64) error {
podWatcher, err := client.CoreV1().
Pods(k8Namespace).
Watch(metav1.ListOptions{
LabelSelector: fmt.Sprintf("app=%v,version=%v", name, version),
})
if err != nil {
fmt.Println(err)
return err
}
for event := range podWatcher.ResultChan() {
pod, ok := event.Object.(*v1.Pod)
if !ok {
continue
}
switch kdw.inspectPodStatus(pod) {
case statRunning:
kdw.running[pod.ObjectMeta.Name] = true
case statFailed:
delete(kdw.running, pod.ObjectMeta.Name)
kdw.dead[pod.ObjectMeta.Name] = true
// This might be a little naive, but it should suffice.
if int32(len(kdw.dead)) >= expectedReplicas {
return ErrPodsFailedToStart
}
case statDone:
delete(kdw.running, pod.ObjectMeta.Name)
kdw.done[pod.ObjectMeta.Name] = true
if int32(len(kdw.done)) >= expectedReplicas {
return nil
}
}
}
return nil
}
// inspectPodStatus evaluates the pod's status and container condition's to
// determine if it has successfully started or not.
func (kdw *k8DeployWatcher) inspectPodStatus(pod *v1.Pod) deployStatus {
for _, cond := range pod.Status.Conditions {
if cond.Type == v1.PodReady && cond.Status == v1.ConditionTrue {
return statDone
}
}
// If this pod just started it may not have "ContainerStatuses" set yet.
// If so, considering it to still be "running".
if len(pod.Status.ContainerStatuses) < 1 {
return statRunning
}
for _, stat := range pod.Status.ContainerStatuses {
if stat.State.Waiting != nil && kdw.isAcceptableWaitingState(stat.State.Waiting) {
return statRunning
} else if stat.State.Running != nil {
return statDone
} else if stat.State.Terminated != nil {
return statFailed
}
}
return statFailed
}
func (kdw *k8DeployWatcher) isAcceptableWaitingState(state *v1.ContainerStateWaiting) bool {
for _, r := range acceptableWaitingReasons {
if state.Reason == r {
return true
}
}
return false
}
|
/**
* Parses a line of a MAVLink file.
*
* @param line line of MAVLink file
*
* @return MAVLink command, or {@code null} if the line could not be parsed
*/
@Nullable
static MavlinkCommand parse(@NonNull String line) {
MavlinkCommand command = null;
String[] tokens = line.split("\\t");
if (tokens.length == 12) {
try {
Type type = Type.fromCode(Integer.parseInt(tokens[3]));
if (type != null) {
double[] parameters = new double[7];
for (int i = 0; i < parameters.length; i++) {
parameters[i] = Double.parseDouble(tokens[i + 4]);
}
switch (type) {
case NAVIGATE_TO_WAYPOINT:
command = NavigateToWaypointCommand.create(parameters);
break;
case RETURN_TO_LAUNCH:
command = new ReturnToLaunchCommand();
break;
case LAND:
command = new LandCommand();
break;
case TAKE_OFF:
command = new TakeOffCommand();
break;
case DELAY:
command = DelayCommand.create(parameters);
break;
case CHANGE_SPEED:
command = ChangeSpeedCommand.create(parameters);
break;
case SET_ROI:
command = SetRoiCommand.create(parameters);
break;
case MOUNT_CONTROL:
command = MountControlCommand.create(parameters);
break;
case START_PHOTO_CAPTURE:
command = StartPhotoCaptureCommand.create(parameters);
break;
case STOP_PHOTO_CAPTURE:
command = new StopPhotoCaptureCommand();
break;
case START_VIDEO_CAPTURE:
command = new StartVideoCaptureCommand();
break;
case STOP_VIDEO_CAPTURE:
command = new StopVideoCaptureCommand();
break;
case CREATE_PANORAMA:
command = CreatePanoramaCommand.create(parameters);
break;
case SET_VIEW_MODE:
command = SetViewModeCommand.create(parameters);
break;
case SET_STILL_CAPTURE_MODE:
command = SetStillCaptureModeCommand.create(parameters);
break;
}
}
} catch (NumberFormatException e) {
ULog.e(Logging.TAG_MAVLINK, "Error parsing MAVLink file, ignoring line", e);
command = null;
}
}
return command;
} |
// TestJobKillNoJob tests when job is not exist
func (suite JobKillTestSuite) TestJobKillNoJob() {
suite.jobFactory.EXPECT().
GetJob(suite.jobID).
Return(nil)
err := JobKill(context.Background(), suite.jobEnt)
suite.NoError(err)
} |
<filename>server/src/game/game.ts
import { DeckOfCards, Card } from './cards';
import { Player } from './player';
import { Board, BallMove } from './board';
enum GameState { Start, ExchangeCards, Playing, End };
export class Game {
round: number = 0;
roundStartPlayerIndex:number=0;
deck: DeckOfCards;
players: Player[] = [];
board: Board;
state: GameState = GameState.Start;
playerState: Array<boolean>;
teamCardExchange: Array<Card>;
constructor(players: Player[]) {
const maxPlayers = 6;
if (players.length % 2 != 0) {
throw new Error("Number of players must be 2,4,6");
}
if (players.length > maxPlayers) {
throw new Error("Number of players cant exceed 6");
}
//Check Teams
if (players.length == 2) {
if (players[0].team == players[1].team) {
throw new Error(`Player ${players[0].name} can't be in same Team as Player ${players[1].name}`);
}
}
if (players.length == 4) {
if (players.filter(p => p.team == 0).length != 2) {
throw new Error(`Team 0 wrong!`);
}
if (players.filter(p => p.team == 1).length != 2) {
throw new Error(`Team 1 wrong!`);
}
}
if (players.length == 6) {
if (players.filter(p => p.team == 0).length != 2) {
throw new Error(`Team 0 wrong!`);
}
if (players.filter(p => p.team == 1).length != 2) {
throw new Error(`Team 1 wrong!`);
}
if (players.filter(p => p.team == 1).length != 2) {
throw new Error(`Team 3 wrong!`);
}
}
this.deck = new DeckOfCards(true, 2);
this.players = players;
this.board = new Board(this.players);
//Start Game
this.nextRound();
}
private nextRound() {
this.round++;
let handSize = 6 - ((this.round - 1) % 5);
//Check if enough cards
if (this.players.length * handSize > this.deck.countCards()) {
this.deck = new DeckOfCards(true, 2);
}
//Get Cards
let playerCards = this.deck.deal(this.players.length, handSize);
playerCards.forEach((cards, i) => { this.players[i].hand = cards; });
//Set State
if (this.players.length > 2) {
this.state = GameState.ExchangeCards;
this.teamCardExchange = new Array<Card>(this.players.length / 2);
} else {
this.state = GameState.Playing;
this.playerState = new Array<boolean>(this.players.length);
}
}
exchangeCard(player: Player, card: Card): void {
if (this.state != GameState.ExchangeCards) throw new Error("Can't exchange cards!");
let pindex = this.players.findIndex(p => p == player);
if (this.teamCardExchange[pindex]) throw new Error("Player" + player.name + " already exchanged cards!");
this.teamCardExchange[pindex] = card;
if (this.teamCardExchange.some(c => c === null)) return;
//All exchanged
for (let i=0;i<this.players.length / 2;i++){
let team=this.players.filter(p=>p.team===i);
let p1Index = this.players.findIndex(p => p.team === i && p != team[0]);
let p2Index = this.players.findIndex(p => p.team === i && p != team[1]);
this.players[p1Index].hand[this.players[p1Index].hand.findIndex(ca=>ca===this.teamCardExchange[p1Index])]=this.teamCardExchange[p2Index];
this.players[p2Index].hand[this.players[p2Index].hand.findIndex(ca=>ca===this.teamCardExchange[p2Index])]=this.teamCardExchange[p1Index];
}
this.teamCardExchange=[];
this.state = GameState.Playing;
this.playerState = new Array<boolean>(this.players.length);
}
possibleMoves(player: Player, card: Card): Array<BallMove> {
if (this.state != GameState.Playing) throw new Error("Can't get possible moves!");
let cardIndex=player.hand.findIndex(c=>c===card);
if (cardIndex<0) throw new Error("Can't find card");
return this.board.getBallMoves(player, card);
}
toString(): string {
return this.players.join("\n") + "\nPack : {" + this.deck.deck.length + "} \nRound : " + this.round + "\nBoard:" + this.board.toString();
}
} |
<gh_stars>1-10
import * as React from "react";
import { SvgIcon } from "@material-ui/core";
const fillColor: string = "#f50057";
const strokeColor: string = "#000";
const invisColor: string = "#0000";
function SvgFavoriteStrokeIcon(props: any) {
const filled: boolean = !!props.filled;
delete props.filled;
return (
<SvgIcon viewBox="0 0 24 24" width="2em" height="2em" {...props}>
<path fill={filled ? fillColor : invisColor} stroke={filled ? fillColor : strokeColor} strokeWidth="2" d="M12 21.35l-1.45-1.32C5.4 15.36 2 12.28 2 8.5 2 5.42 4.42 3 7.5 3c1.74 0 3.41.81 4.5 2.09C13.09 3.81 14.76 3 16.5 3 19.58 3 22 5.42 22 8.5c0 3.78-3.4 6.86-8.55 11.54L12 21.35z"></path>
</SvgIcon>
);
}
export default SvgFavoriteStrokeIcon; |
H, W = map(int, input().split())
s = [""] * H
for h in range(H):
s[h] = input()
INF = float('inf')
dp = [[[INF, s[h][w]] for w in range(W)] for h in range(H)]
def update_dp_1(c, dp):
if dp[1] == ".":
if c == ".":
res = [dp[0], "."]
else: # c == "#"
res = [dp[0]+1, "#"]
else: # dp[1] == "#"
if c == ".":
res = [dp[0], "."]
else: # c == "#"
res = [dp[0], "#"]
return res
def update_dp_2(c, dp_a, dp_b):
a = update_dp_1(c, dp_a)
b = update_dp_1(c, dp_b)
if a[0] < b[0]:
return a
else:
return b
if s[0][0] == ".":
dp[0][0] = [0, "."]
else: # == "#"
dp[0][0] = [1, "#"]
for h in range(H):
if h != 0:
dp[h][0] = update_dp_1(dp[h][0][1], dp[h-1][0])
for w in range(1, W):
if h == 0:
dp[h][w] = update_dp_1(dp[h][w][1], dp[h][w-1])
else:
dp[h][w] = update_dp_2(dp[h][w][1], dp[h-1][w], dp[h][w-1])
print(dp[H-1][W-1][0])
|
<gh_stars>0
package box
import (
"context"
"github.com/gildas/go-errors"
"github.com/gildas/go-request"
)
// sendRequest sends an HTTP request to Box.com's API
func (client *Client) sendRequest(ctx context.Context, options *request.Options, results interface{}) (*request.Content, error) {
if options == nil {
return nil, errors.ArgumentMissing.With("options")
}
options.Context = ctx
options.Logger = client.Logger
options.UserAgent = "BOX Client " + VERSION
if client.IsAuthenticated() {
options.Authorization = request.BearerAuthorization(client.Auth.Token.AccessToken)
}
response, err := request.Send(options, results)
// TODO: We need to get access to the response headers
// boxRequestID := res.Header.Get("Box-Request-Id")
if err != nil {
var details *RequestError
if jerr := response.UnmarshalContentJSON(&details); jerr == nil {
var httperr *errors.Error
if errors.As(err, &httperr) {
details.StatusCode = httperr.Code
}
if errors.Is(err, errors.HTTPBadRequest) && errors.Is(details, InvalidGrant) {
return nil, errors.Unauthorized.Wrap(details)
}
if errors.Is(err, errors.HTTPUnauthorized) {
return nil, errors.Unauthorized.Wrap(details)
}
if errors.Is(err, errors.HTTPNotFound) {
return nil, errors.NotFound.Wrap(details)
}
return nil, errors.WithStack(details)
}
if errors.Is(err, errors.HTTPUnauthorized) {
return nil, errors.Unauthorized.Wrap(err)
}
if errors.Is(err, errors.HTTPNotFound) {
return nil, errors.NotFound.Wrap(err)
}
}
return response, err
}
|
<gh_stars>0
from CityGraph.indicator.Indicator import Indicator
from CityGraph.indicator.indicator10 import Indicator10, denormalize10
class IndicatorGroup(Indicator):
grp_factor = 0.
indicators = []
def __init__(self, key, grp_factor, indicators: [Indicator10], name=None):
super().__init__(key, name)
self.grp_factor = grp_factor
self.indicators = indicators
def compute_group_edge(self, data_edge):
grp_sum_val = 0
grp_sum_factors = 0
# Compute indicators of a group
for indic in self.indicators:
norm_val = indic.compute_indicator_edge(data_edge)
# If the indicator found a value add to the group sum and sum factors
if norm_val is not None:
grp_sum_val += norm_val * indic.factor
grp_sum_factors += indic.factor
# Keys found for this group
if grp_sum_factors > 0:
# Normalize group indicator and save
norm_val_grp = grp_sum_val / grp_sum_factors
data_edge[self.norm_key] = norm_val_grp
data_edge[self.key] = denormalize10(norm_val_grp)
return norm_val_grp
# No key found for this groupe
else:
data_edge[self.norm_key] = None
return None
def __str__(self):
return f'{self.key}(grp_factor={self.grp_factor}, indicators:{self.indicators})'
def __repr__(self):
return self.__str__()
|
<reponame>geekhall/algorithms
/**
* ID: 01921
* Title: Eliminate Maximum Number of Monsters
* Difficulty: Medium
* Description: You are playing a video game where you are defending your city from a group of n monsters. You are given a 0-indexed integer array dist of size n, where dist[i] is the initial distance in kilometers of the i th monster from the city.
*
* The monsters walk toward the city at a constant speed. The speed of each monster is given to you in an integer array speed of size n, where speed[i] is the speed of the i th monster in kilometers per minute.
*
* You have a weapon that, once fully charged, can eliminate a single monster. However, the weapon takes one minute to charge.The weapon is fully charged at the very start.
*
* You lose when any monster reaches your city. If a monster reaches the city at the exact moment the weapon is fully charged, it counts as a loss, and the game ends before you can use your weapon.
*
* Return the maximum number of monsters that you can eliminate before you lose, or n if you can eliminate all the monsters before they reach the city.
*
* Example 1:
*
* Input: dist = [1,3,4], speed = [1,1,1] Output: 3 Explanation: In the beginning, the distances of the monsters are [1,3,4]. You eliminate the first monster. After a minute, the distances of the monsters are [X,2,3]. You eliminate the second monster. After a minute, the distances of the monsters are [X,X,2]. You eliminate the thrid monster. All 3 monsters can be eliminated.
*
* Example 2:
*
* Input: dist = [1,1,2,3], speed = [1,1,1,1] Output: 1 Explanation: In the beginning, the distances of the monsters are [1,1,2,3]. You eliminate the first monster. After a minute, the distances of the monsters are [X,0,1,2], so you lose. You can only eliminate 1 monster.
*
* Example 3:
*
* Input: dist = [3,2,4], speed = [5,3,2] Output: 1 Explanation: In the beginning, the distances of the monsters are [3,2,4]. You eliminate the first monster. After a minute, the distances of the monsters are [X,0,2], so you lose. You can only eliminate 1 monster.
*
* Constraints:
*
* n == dist.length == speed.length
* 1 <= n <= 10 5
* 1 <= dist[i], speed[i] <= 10 5
*/
function solution() {
}
function test_01921() {
}
test_01921()
|
package wazero
import (
"context"
"errors"
"io"
"io/fs"
"math"
"time"
"github.com/tetratelabs/wazero/api"
"github.com/tetratelabs/wazero/internal/engine/compiler"
"github.com/tetratelabs/wazero/internal/engine/interpreter"
"github.com/tetratelabs/wazero/internal/platform"
internalsys "github.com/tetratelabs/wazero/internal/sys"
"github.com/tetratelabs/wazero/internal/wasm"
"github.com/tetratelabs/wazero/sys"
)
// RuntimeConfig controls runtime behavior, with the default implementation as NewRuntimeConfig
//
// Ex. To explicitly limit to Wasm Core 1.0 features as opposed to relying on defaults:
// rConfig = wazero.NewRuntimeConfig().WithWasmCore1()
//
// Note: RuntimeConfig is immutable. Each WithXXX function returns a new instance including the corresponding change.
type RuntimeConfig interface {
// WithFeatureBulkMemoryOperations adds instructions modify ranges of memory or table entries
// ("bulk-memory-operations"). This defaults to false as the feature was not finished in WebAssembly 1.0.
//
// Here are the notable effects:
// * Adds `memory.fill`, `memory.init`, `memory.copy` and `data.drop` instructions.
// * Adds `table.init`, `table.copy` and `elem.drop` instructions.
// * Introduces a "passive" form of element and data segments.
// * Stops checking "active" element and data segment boundaries at compile-time, meaning they can error at runtime.
//
// Note: "bulk-memory-operations" is mixed with the "reference-types" proposal
// due to the WebAssembly Working Group merging them "mutually dependent".
// Therefore, enabling this feature results in enabling WithFeatureReferenceTypes, and vice-versa.
//
// See https://github.com/WebAssembly/spec/blob/main/proposals/bulk-memory-operations/Overview.md
// https://github.com/WebAssembly/spec/blob/main/proposals/reference-types/Overview.md and
// https://github.com/WebAssembly/spec/pull/1287
WithFeatureBulkMemoryOperations(bool) RuntimeConfig
// WithFeatureMultiValue enables multiple values ("multi-value"). This defaults to false as the feature was not
// finished in WebAssembly 1.0 (20191205).
//
// Here are the notable effects:
// * Function (`func`) types allow more than one result
// * Block types (`block`, `loop` and `if`) can be arbitrary function types
//
// See https://github.com/WebAssembly/spec/blob/main/proposals/multi-value/Overview.md
WithFeatureMultiValue(bool) RuntimeConfig
// WithFeatureMutableGlobal allows globals to be mutable. This defaults to true as the feature was finished in
// WebAssembly 1.0 (20191205).
//
// When false, an api.Global can never be cast to an api.MutableGlobal, and any wasm that includes global vars
// will fail to parse.
WithFeatureMutableGlobal(bool) RuntimeConfig
// WithFeatureNonTrappingFloatToIntConversion enables non-trapping float-to-int conversions.
// ("nontrapping-float-to-int-conversion"). This defaults to false as the feature was not in WebAssembly 1.0.
//
// The only effect of enabling is allowing the following instructions, which return 0 on NaN instead of panicking.
// * `i32.trunc_sat_f32_s`
// * `i32.trunc_sat_f32_u`
// * `i32.trunc_sat_f64_s`
// * `i32.trunc_sat_f64_u`
// * `i64.trunc_sat_f32_s`
// * `i64.trunc_sat_f32_u`
// * `i64.trunc_sat_f64_s`
// * `i64.trunc_sat_f64_u`
//
// See https://github.com/WebAssembly/spec/blob/main/proposals/nontrapping-float-to-int-conversion/Overview.md
WithFeatureNonTrappingFloatToIntConversion(bool) RuntimeConfig
// WithFeatureReferenceTypes enables various instructions and features related to table and new reference types.
//
// * Introduction of new value types: `funcref` and `externref`.
// * Support for the following new instructions:
// * `ref.null`
// * `ref.func`
// * `ref.is_null`
// * `table.fill`
// * `table.get`
// * `table.grow`
// * `table.set`
// * `table.size`
// * Support for multiple tables per module:
// * `call_indirect`, `table.init`, `table.copy` and `elem.drop` instructions can take non-zero table index.
// * Element segments can take non-zero table index.
//
// Note: "reference-types" is mixed with the "bulk-memory-operations" proposal
// due to the WebAssembly Working Group merging them "mutually dependent".
// Therefore, enabling this feature results in enabling WithFeatureBulkMemoryOperations, and vice-versa.
//
// See https://github.com/WebAssembly/spec/blob/main/proposals/bulk-memory-operations/Overview.md
// https://github.com/WebAssembly/spec/blob/main/proposals/reference-types/Overview.md and
// https://github.com/WebAssembly/spec/pull/1287
WithFeatureReferenceTypes(enabled bool) RuntimeConfig
// WithFeatureSignExtensionOps enables sign extension instructions ("sign-extension-ops"). This defaults to false
// as the feature was not in WebAssembly 1.0.
//
// Here are the notable effects:
// * Adds instructions `i32.extend8_s`, `i32.extend16_s`, `i64.extend8_s`, `i64.extend16_s` and `i64.extend32_s`
//
// See https://github.com/WebAssembly/spec/blob/main/proposals/sign-extension-ops/Overview.md
WithFeatureSignExtensionOps(bool) RuntimeConfig
// WithFeatureSIMD enables the vector value type and vector instructions (aka SIMD). This defaults to false
// as the feature was not in WebAssembly 1.0.
//
// See https://github.com/WebAssembly/spec/blob/main/proposals/simd/SIMD.md
WithFeatureSIMD(bool) RuntimeConfig
// WithWasmCore1 enables features included in the WebAssembly Core Specification 1.0. Selecting this
// overwrites any currently accumulated features with only those included in this W3C recommendation.
//
// This is default because as of mid 2022, this is the only version that is a Web Standard (W3C Recommendation).
//
// You can select the latest draft of the WebAssembly Core Specification 2.0 instead via WithWasmCore2. You can
// also enable or disable individual features via `WithXXX` methods. Ex.
// rConfig = wazero.NewRuntimeConfig().WithWasmCore1().WithFeatureMutableGlobal(false)
//
// See https://www.w3.org/TR/2019/REC-wasm-core-1-20191205/
WithWasmCore1() RuntimeConfig
// WithWasmCore2 enables features included in the WebAssembly Core Specification 2.0 (20220419). Selecting this
// overwrites any currently accumulated features with only those included in this W3C working draft.
//
// This is not default because it is not yet incomplete and also not yet a Web Standard (W3C Recommendation).
//
// Even after selecting this, you can enable or disable individual features via `WithXXX` methods. Ex.
// rConfig = wazero.NewRuntimeConfig().WithWasmCore2().WithFeatureMutableGlobal(false)
//
// See https://www.w3.org/TR/2022/WD-wasm-core-2-20220419/
WithWasmCore2() RuntimeConfig
}
// NewRuntimeConfig returns a RuntimeConfig using the compiler if it is supported in this environment,
// or the interpreter otherwise.
func NewRuntimeConfig() RuntimeConfig {
return newRuntimeConfig()
}
type runtimeConfig struct {
enabledFeatures wasm.Features
newEngine func(wasm.Features) wasm.Engine
}
// engineLessConfig helps avoid copy/pasting the wrong defaults.
var engineLessConfig = &runtimeConfig{
enabledFeatures: wasm.Features20191205,
}
// NewRuntimeConfigCompiler compiles WebAssembly modules into
// runtime.GOARCH-specific assembly for optimal performance.
//
// The default implementation is AOT (Ahead of Time) compilation, applied at
// Runtime.CompileModule. This allows consistent runtime performance, as well
// the ability to reduce any first request penalty.
//
// Note: While this is technically AOT, this does not imply any action on your
// part. wazero automatically performs ahead-of-time compilation as needed when
// Runtime.CompileModule is invoked.
//
// Warning: This panics at runtime if the runtime.GOOS or runtime.GOARCH does not
// support Compiler. Use NewRuntimeConfig to safely detect and fallback to
// NewRuntimeConfigInterpreter if needed.
func NewRuntimeConfigCompiler() RuntimeConfig {
ret := *engineLessConfig // copy
ret.newEngine = compiler.NewEngine
return &ret
}
// NewRuntimeConfigInterpreter interprets WebAssembly modules instead of compiling them into assembly.
func NewRuntimeConfigInterpreter() RuntimeConfig {
ret := *engineLessConfig // copy
ret.newEngine = interpreter.NewEngine
return &ret
}
// WithFeatureBulkMemoryOperations implements RuntimeConfig.WithFeatureBulkMemoryOperations
func (c *runtimeConfig) WithFeatureBulkMemoryOperations(enabled bool) RuntimeConfig {
ret := *c // copy
ret.enabledFeatures = ret.enabledFeatures.Set(wasm.FeatureBulkMemoryOperations, enabled)
// bulk-memory-operations proposal is mutually-dependant with reference-types proposal.
ret.enabledFeatures = ret.enabledFeatures.Set(wasm.FeatureReferenceTypes, enabled)
return &ret
}
// WithFeatureMultiValue implements RuntimeConfig.WithFeatureMultiValue
func (c *runtimeConfig) WithFeatureMultiValue(enabled bool) RuntimeConfig {
ret := *c // copy
ret.enabledFeatures = ret.enabledFeatures.Set(wasm.FeatureMultiValue, enabled)
return &ret
}
// WithFeatureMutableGlobal implements RuntimeConfig.WithFeatureMutableGlobal
func (c *runtimeConfig) WithFeatureMutableGlobal(enabled bool) RuntimeConfig {
ret := *c // copy
ret.enabledFeatures = ret.enabledFeatures.Set(wasm.FeatureMutableGlobal, enabled)
return &ret
}
// WithFeatureNonTrappingFloatToIntConversion implements RuntimeConfig.WithFeatureNonTrappingFloatToIntConversion
func (c *runtimeConfig) WithFeatureNonTrappingFloatToIntConversion(enabled bool) RuntimeConfig {
ret := *c // copy
ret.enabledFeatures = ret.enabledFeatures.Set(wasm.FeatureNonTrappingFloatToIntConversion, enabled)
return &ret
}
// WithFeatureReferenceTypes implements RuntimeConfig.WithFeatureReferenceTypes
func (c *runtimeConfig) WithFeatureReferenceTypes(enabled bool) RuntimeConfig {
ret := *c // copy
ret.enabledFeatures = ret.enabledFeatures.Set(wasm.FeatureReferenceTypes, enabled)
// reference-types proposal is mutually-dependant with bulk-memory-operations proposal.
ret.enabledFeatures = ret.enabledFeatures.Set(wasm.FeatureBulkMemoryOperations, enabled)
return &ret
}
// WithFeatureSignExtensionOps implements RuntimeConfig.WithFeatureSignExtensionOps
func (c *runtimeConfig) WithFeatureSignExtensionOps(enabled bool) RuntimeConfig {
ret := *c // copy
ret.enabledFeatures = ret.enabledFeatures.Set(wasm.FeatureSignExtensionOps, enabled)
return &ret
}
// WithFeatureSIMD implements RuntimeConfig.WithFeatureSIMD
func (c *runtimeConfig) WithFeatureSIMD(enabled bool) RuntimeConfig {
ret := *c // copy
ret.enabledFeatures = ret.enabledFeatures.Set(wasm.FeatureSIMD, enabled)
return &ret
}
// WithWasmCore1 implements RuntimeConfig.WithWasmCore1
func (c *runtimeConfig) WithWasmCore1() RuntimeConfig {
ret := *c // copy
ret.enabledFeatures = wasm.Features20191205
return &ret
}
// WithWasmCore2 implements RuntimeConfig.WithWasmCore2
func (c *runtimeConfig) WithWasmCore2() RuntimeConfig {
ret := *c // copy
ret.enabledFeatures = wasm.Features20220419
return &ret
}
// CompiledModule is a WebAssembly 1.0 module ready to be instantiated (Runtime.InstantiateModule) as an api.Module.
//
// In WebAssembly terminology, this is a decoded, validated, and possibly also compiled module. wazero avoids using
// the name "Module" for both before and after instantiation as the name conflation has caused confusion.
// See https://www.w3.org/TR/2019/REC-wasm-core-1-20191205/#semantic-phases%E2%91%A0
//
// Note: Closing the wazero.Runtime closes any CompiledModule it compiled.
type CompiledModule interface {
// Close releases all the allocated resources for this CompiledModule.
//
// Note: It is safe to call Close while having outstanding calls from an api.Module instantiated from this.
Close(context.Context) error
}
type compiledModule struct {
module *wasm.Module
// compiledEngine holds an engine on which `module` is compiled.
compiledEngine wasm.Engine
// closeWithModule prevents leaking compiled code when a module is compiled implicitly.
closeWithModule bool
}
// Close implements CompiledModule.Close
func (c *compiledModule) Close(_ context.Context) error {
// Note: If you use the context.Context param, don't forget to coerce nil to context.Background()!
c.compiledEngine.DeleteCompiledModule(c.module)
// It is possible the underlying may need to return an error later, but in any case this matches api.Module.Close.
return nil
}
// CompileConfig allows you to override what was decoded from wasm, prior to compilation (ModuleBuilder.Compile or
// Runtime.CompileModule).
//
// For example, WithImportRenamer allows you to override hard-coded names that don't match your requirements.
//
// Note: CompileConfig is immutable. Each WithXXX function returns a new instance including the corresponding change.
type CompileConfig interface {
// WithImportRenamer can rename imports or break them into different modules. No default.
// A nil function is invalid and ignored.
//
// Note: This is currently not relevant for ModuleBuilder as it has no means to define imports.
WithImportRenamer(api.ImportRenamer) CompileConfig
// WithMemorySizer are the allocation parameters used for a Wasm memory.
// The default is to set cap=min and max=65536 if unset. A nil function is invalid and ignored.
WithMemorySizer(api.MemorySizer) CompileConfig
}
type compileConfig struct {
importRenamer api.ImportRenamer
memorySizer api.MemorySizer
}
// NewCompileConfig returns a CompileConfig that can be used for configuring module compilation.
func NewCompileConfig() CompileConfig {
return &compileConfig{
importRenamer: nil,
memorySizer: wasm.MemorySizer,
}
}
// WithImportRenamer implements CompileConfig.WithImportRenamer
func (c *compileConfig) WithImportRenamer(importRenamer api.ImportRenamer) CompileConfig {
if importRenamer == nil {
return c
}
ret := *c // copy
ret.importRenamer = importRenamer
return &ret
}
// WithMemorySizer implements CompileConfig.WithMemorySizer
func (c *compileConfig) WithMemorySizer(memorySizer api.MemorySizer) CompileConfig {
if memorySizer == nil {
return c
}
ret := *c // copy
ret.memorySizer = memorySizer
return &ret
}
// ModuleConfig configures resources needed by functions that have low-level interactions with the host operating
// system. Using this, resources such as STDIN can be isolated, so that the same module can be safely instantiated
// multiple times.
//
// Ex.
// // Initialize base configuration:
// config := wazero.NewModuleConfig().WithStdout(buf).WithSysNanotime()
//
// // Assign different configuration on each instantiation
// module, _ := r.InstantiateModule(ctx, compiled, config.WithName("rotate").WithArgs("rotate", "angle=90", "dir=cw"))
//
// While wazero supports Windows as a platform, host functions using ModuleConfig follow a UNIX dialect.
// See RATIONALE.md for design background and relationship to WebAssembly System Interfaces (WASI).
//
// Note: ModuleConfig is immutable. Each WithXXX function returns a new instance including the corresponding change.
type ModuleConfig interface {
// WithArgs assigns command-line arguments visible to an imported function that reads an arg vector (argv). Defaults to
// none. Runtime.InstantiateModule errs if any arg is empty.
//
// These values are commonly read by the functions like "args_get" in "wasi_snapshot_preview1" although they could be
// read by functions imported from other modules.
//
// Similar to os.Args and exec.Cmd Env, many implementations would expect a program name to be argv[0]. However, neither
// WebAssembly nor WebAssembly System Interfaces (WASI) define this. Regardless, you may choose to set the first
// argument to the same value set via WithName.
//
// Note: This does not default to os.Args as that violates sandboxing.
//
// See https://linux.die.net/man/3/argv and https://en.wikipedia.org/wiki/Null-terminated_string
WithArgs(...string) ModuleConfig
// WithEnv sets an environment variable visible to a Module that imports functions. Defaults to none.
// Runtime.InstantiateModule errs if the key is empty or contains a NULL(0) or equals("") character.
//
// Validation is the same as os.Setenv on Linux and replaces any existing value. Unlike exec.Cmd Env, this does not
// default to the current process environment as that would violate sandboxing. This also does not preserve order.
//
// Environment variables are commonly read by the functions like "environ_get" in "wasi_snapshot_preview1" although
// they could be read by functions imported from other modules.
//
// While similar to process configuration, there are no assumptions that can be made about anything OS-specific. For
// example, neither WebAssembly nor WebAssembly System Interfaces (WASI) define concerns processes have, such as
// case-sensitivity on environment keys. For portability, define entries with case-insensitively unique keys.
//
// See https://linux.die.net/man/3/environ and https://en.wikipedia.org/wiki/Null-terminated_string
WithEnv(key, value string) ModuleConfig
// WithFS assigns the file system to use for any paths beginning at "/". Defaults to not found.
// Note: This sets WithWorkDirFS to the same file-system unless already set.
//
// Ex. This sets a read-only, embedded file-system to serve files under the root ("/") and working (".") directories:
//
// //go:embed testdata/index.html
// var testdataIndex embed.FS
//
// rooted, err := fs.Sub(testdataIndex, "testdata")
// require.NoError(t, err)
//
// // "index.html" is accessible as both "/index.html" and "./index.html" because we didn't use WithWorkDirFS.
// config := wazero.NewModuleConfig().WithFS(rooted)
//
WithFS(fs.FS) ModuleConfig
// WithName configures the module name. Defaults to what was decoded or overridden via CompileConfig.WithModuleName.
WithName(string) ModuleConfig
// WithStartFunctions configures the functions to call after the module is instantiated. Defaults to "_start".
//
// Note: If any function doesn't exist, it is skipped. However, all functions that do exist are called in order.
WithStartFunctions(...string) ModuleConfig
// WithStderr configures where standard error (file descriptor 2) is written. Defaults to io.Discard.
//
// This writer is most commonly used by the functions like "fd_write" in "wasi_snapshot_preview1" although it could
// be used by functions imported from other modules.
//
// Notes
//
// * The caller is responsible to close any io.Writer they supply: It is not closed on api.Module Close.
// * This does not default to os.Stderr as that both violates sandboxing and prevents concurrent modules.
//
// See https://linux.die.net/man/3/stderr
WithStderr(io.Writer) ModuleConfig
// WithStdin configures where standard input (file descriptor 0) is read. Defaults to return io.EOF.
//
// This reader is most commonly used by the functions like "fd_read" in "wasi_snapshot_preview1" although it could
// be used by functions imported from other modules.
//
// Notes
//
// * The caller is responsible to close any io.Reader they supply: It is not closed on api.Module Close.
// * This does not default to os.Stdin as that both violates sandboxing and prevents concurrent modules.
//
// See https://linux.die.net/man/3/stdin
WithStdin(io.Reader) ModuleConfig
// WithStdout configures where standard output (file descriptor 1) is written. Defaults to io.Discard.
//
// This writer is most commonly used by the functions like "fd_write" in "wasi_snapshot_preview1" although it could
// be used by functions imported from other modules.
//
// Notes
//
// * The caller is responsible to close any io.Writer they supply: It is not closed on api.Module Close.
// * This does not default to os.Stdout as that both violates sandboxing and prevents concurrent modules.
//
// See https://linux.die.net/man/3/stdout
WithStdout(io.Writer) ModuleConfig
// WithWalltime configures the wall clock, sometimes referred to as the
// real time clock. Defaults to a constant fake result.
//
// Ex. To override with your own clock:
// moduleConfig = moduleConfig.
// WithWalltime(func(context.Context) (sec int64, nsec int32) {
// return clock.walltime()
// }, sys.ClockResolution(time.Microsecond.Nanoseconds()))
//
// Note: This does not default to time.Now as that violates sandboxing. Use
// WithSysWalltime for a usable implementation.
WithWalltime(sys.Walltime, sys.ClockResolution) ModuleConfig
// WithSysWalltime uses time.Now for sys.Walltime with a resolution of 1us
// (1000ns).
//
// See WithWalltime
WithSysWalltime() ModuleConfig
// WithNanotime configures the monotonic clock, used to measure elapsed
// time in nanoseconds. Defaults to a constant fake result.
//
// Ex. To override with your own clock:
// moduleConfig = moduleConfig.
// WithNanotime(func(context.Context) int64 {
// return clock.nanotime()
// }, sys.ClockResolution(time.Microsecond.Nanoseconds()))
//
// Note: This does not default to time.Since as that violates sandboxing.
// Use WithSysNanotime for a usable implementation.
WithNanotime(sys.Nanotime, sys.ClockResolution) ModuleConfig
// WithSysNanotime uses time.Now for sys.Nanotime with a resolution of 1us.
//
// See WithNanotime
WithSysNanotime() ModuleConfig
// WithRandSource configures a source of random bytes. Defaults to crypto/rand.Reader.
//
// This reader is most commonly used by the functions like "random_get" in "wasi_snapshot_preview1" or "seed" in
// AssemblyScript standard "env" although it could be used by functions imported from other modules.
//
// Note: The caller is responsible to close any io.Reader they supply: It is not closed on api.Module Close.
WithRandSource(io.Reader) ModuleConfig
// WithWorkDirFS indicates the file system to use for any paths beginning at "./". Defaults to the same as WithFS.
//
// Ex. This sets a read-only, embedded file-system as the root ("/"), and a mutable one as the working directory ("."):
//
// //go:embed appA
// var rootFS embed.FS
//
// // Files relative to this source under appA are available under "/" and files relative to "/work/appA" under ".".
// config := wazero.NewModuleConfig().WithFS(rootFS).WithWorkDirFS(os.DirFS("/work/appA"))
//
// Note: os.DirFS documentation includes important notes about isolation, which also applies to fs.Sub. As of Go 1.18,
// the built-in file-systems are not jailed (chroot). See https://github.com/golang/go/issues/42322
WithWorkDirFS(fs.FS) ModuleConfig
}
type moduleConfig struct {
name string
startFunctions []string
stdin io.Reader
stdout io.Writer
stderr io.Writer
randSource io.Reader
walltimeTime *sys.Walltime
walltimeResolution sys.ClockResolution
nanotimeTime *sys.Nanotime
nanotimeResolution sys.ClockResolution
args []string
// environ is pair-indexed to retain order similar to os.Environ.
environ []string
// environKeys allow overwriting of existing values.
environKeys map[string]int
fs *internalsys.FSConfig
}
// NewModuleConfig returns a ModuleConfig that can be used for configuring module instantiation.
func NewModuleConfig() ModuleConfig {
return &moduleConfig{
startFunctions: []string{"_start"},
environKeys: map[string]int{},
fs: internalsys.NewFSConfig(),
}
}
// WithArgs implements ModuleConfig.WithArgs
func (c *moduleConfig) WithArgs(args ...string) ModuleConfig {
ret := *c // copy
ret.args = args
return &ret
}
// WithEnv implements ModuleConfig.WithEnv
func (c *moduleConfig) WithEnv(key, value string) ModuleConfig {
ret := *c // copy
// Check to see if this key already exists and update it.
if i, ok := ret.environKeys[key]; ok {
ret.environ[i+1] = value // environ is pair-indexed, so the value is 1 after the key.
} else {
ret.environKeys[key] = len(ret.environ)
ret.environ = append(ret.environ, key, value)
}
return &ret
}
// WithFS implements ModuleConfig.WithFS
func (c *moduleConfig) WithFS(fs fs.FS) ModuleConfig {
ret := *c // copy
ret.fs = ret.fs.WithFS(fs)
return &ret
}
// WithName implements ModuleConfig.WithName
func (c *moduleConfig) WithName(name string) ModuleConfig {
ret := *c // copy
ret.name = name
return &ret
}
// WithStartFunctions implements ModuleConfig.WithStartFunctions
func (c *moduleConfig) WithStartFunctions(startFunctions ...string) ModuleConfig {
ret := *c // copy
ret.startFunctions = startFunctions
return &ret
}
// WithStderr implements ModuleConfig.WithStderr
func (c *moduleConfig) WithStderr(stderr io.Writer) ModuleConfig {
ret := *c // copy
ret.stderr = stderr
return &ret
}
// WithStdin implements ModuleConfig.WithStdin
func (c *moduleConfig) WithStdin(stdin io.Reader) ModuleConfig {
ret := *c // copy
ret.stdin = stdin
return &ret
}
// WithStdout implements ModuleConfig.WithStdout
func (c *moduleConfig) WithStdout(stdout io.Writer) ModuleConfig {
ret := *c // copy
ret.stdout = stdout
return &ret
}
// WithWalltime implements ModuleConfig.WithWalltime
func (c *moduleConfig) WithWalltime(walltime sys.Walltime, resolution sys.ClockResolution) ModuleConfig {
ret := *c // copy
ret.walltimeTime = &walltime
ret.walltimeResolution = resolution
return &ret
}
// We choose arbitrary resolutions here because there's no perfect alternative. For example, according to the
// source in time.go, windows monotonic resolution can be 15ms. This chooses arbitrarily 1us for wall time and
// 1ns for monotonic. See RATIONALE.md for more context.
// WithSysWalltime implements ModuleConfig.WithSysWalltime
func (c *moduleConfig) WithSysWalltime() ModuleConfig {
return c.WithWalltime(platform.Walltime, sys.ClockResolution(time.Microsecond.Nanoseconds()))
}
// WithNanotime implements ModuleConfig.WithNanotime
func (c *moduleConfig) WithNanotime(nanotime sys.Nanotime, resolution sys.ClockResolution) ModuleConfig {
ret := *c // copy
ret.nanotimeTime = &nanotime
ret.nanotimeResolution = resolution
return &ret
}
// WithSysNanotime implements ModuleConfig.WithSysNanotime
func (c *moduleConfig) WithSysNanotime() ModuleConfig {
return c.WithNanotime(platform.Nanotime, sys.ClockResolution(1))
}
// WithRandSource implements ModuleConfig.WithRandSource
func (c *moduleConfig) WithRandSource(source io.Reader) ModuleConfig {
ret := *c // copy
ret.randSource = source
return &ret
}
// WithWorkDirFS implements ModuleConfig.WithWorkDirFS
func (c *moduleConfig) WithWorkDirFS(fs fs.FS) ModuleConfig {
ret := *c // copy
ret.fs = ret.fs.WithWorkDirFS(fs)
return &ret
}
// toSysContext creates a baseline wasm.Context configured by ModuleConfig.
func (c *moduleConfig) toSysContext() (sysCtx *internalsys.Context, err error) {
var environ []string // Intentionally doesn't pre-allocate to reduce logic to default to nil.
// Same validation as syscall.Setenv for Linux
for i := 0; i < len(c.environ); i += 2 {
key, value := c.environ[i], c.environ[i+1]
if len(key) == 0 {
err = errors.New("environ invalid: empty key")
return
}
for j := 0; j < len(key); j++ {
if key[j] == '=' { // NUL enforced in NewContext
err = errors.New("environ invalid: key contains '=' character")
return
}
}
environ = append(environ, key+"="+value)
}
preopens, err := c.fs.Preopens()
if err != nil {
return nil, err
}
return internalsys.NewContext(
math.MaxUint32,
c.args,
environ,
c.stdin,
c.stdout,
c.stderr,
c.randSource,
c.walltimeTime, c.walltimeResolution,
c.nanotimeTime, c.nanotimeResolution,
preopens,
)
}
|
def freeze_kill(self, err):
self.console.write(err)
self.client.send.kill()
self.client.recv.kill()
return |
<filename>Code_05/src/Code_0505_FindFirstIntersectNode.java
/**
* 问题:在本题中,单链表可能有环也可能无环。
* 给定两个单链表的头结点head1和head2,这两链表可能相交,也可能不相交。
* 请实现一个函数,如果两链表相交,请返回相交的第一个节点,如果两链表不相交,请返回空
*/
public class Code_0505_FindFirstIntersectNode {
/**
* 思路分析:对于给出的两链表,有以下几种情况可以讨论
* 1.两链表一个有环一个无环,结论:无相交节点,返回空
* 2.两链表均有环
* 2.1 两链表无相交节点
* 2.2 两链表有相交节点
* 2.2.1 链表的相交节点在入环节点之前
* 2.2.2 链表的相交节点在入环节点之后
* 3.两链表均无环
* 3.1 两链表不相交,返回空
* 3.2 两链表相交
*/
private static Node process(Node head1, Node head2) {
if (head1 == null || head2 == null) {
return null;
}
Node loop1 = getLoopNode(head1);
Node loop2 = getLoopNode(head2);
/**
* 两个列表均有入环节点
*/
if (loop1 != null && loop2 != null) {
return bothLoop(head1, loop1, head2, loop2);
}
/**
* 两链表均无入环节点
*/
if (loop1 == null && loop2 == null) {
return noLoop(head1, head2);
}
/**
* 其他情况下不可能存在两链表相交的情况
*/
return null;
}
/**
* 获取入环节点,如果有环返回入环节点,没有环返回空
*
* @param head
* @return
*/
private static Node getLoopNode(Node head) {
/**
* 获取入环节点的小trick:
* 1.设置一个快指针和一个慢指针,快指针一次走两步,慢指针依次走一步
* 2.当快指针走到空时,两指针若没有相遇,则无入环节点
* 3.当快指针和慢指针相遇时,慢指针不变,快指针移回起点,慢指针不动
* 4.快指针行进速度变为一次走一步,快指针与慢指针再次开始走
* 5.当快指针和慢指针相遇时,即到达入环节点
*/
Node fast = head.next == null ? null : head.next.next;
Node slow = head.next;
if (fast == null) {
return null;
}
while (fast != slow) {
if (fast == null) {
return null;
}
slow = slow.next;
fast = fast.next == null ? null : fast.next.next;
}
fast = head;
while (fast != slow) {
fast = fast.next;
slow = slow.next;
}
return fast;
}
/**
* 当两个链表均有环时,获取相交节点,没有相交节点返回空
*
* @param head1
* @param loop1
* @param head2
* @param loop2
* @return
*/
private static Node bothLoop(Node head1, Node loop1, Node head2, Node loop2) {
/**
* 1.判断两个入环节点loop1,loop2是否相等
* 1.1 若相等,则按照无环链表head1->loop1,head2->loop2方式(Y或V型)查找相交节点
* 1.2 若不相等,则利用任意一个入环节点开始往下查找
* 1.2.1 在找回到自己之前若找到另一个入环节点,则返回两个入环节点的任意一个均为相交节点
* 1.2.2 循环回到了自己的节点并没有找到另一个链表的入环节点,则说明两链表不相交,返回空
*/
if (loop1 == loop2) {
Node cur1 = head1;
Node cur2 = head2;
int n = 0;
while (cur1 != loop1) {
n++;
cur1 = cur1.next;
}
while (cur2 != loop2) {
n--;
cur2 = cur2.next;
}
cur1 = head1;
cur2 = head2;
if (n > 0) {
while (n != 0) {
cur1 = cur1.next;
n--;
}
} else {
while (n != 0) {
cur2 = cur2.next;
n++;
}
}
while (cur1 != cur2) {
cur1 = cur1.next;
cur2 = cur2.next;
}
return cur1;
} else {
Node cur = loop1.next;
while (cur != loop1) {
if (cur == loop2) {
return cur;
}
cur = cur.next;
}
return null;
}
}
/**
* 当两个链表均无环时,获取相交节点,没有相交节点返回空
*
* @param head1
* @param head2
* @return
*/
private static Node noLoop(Node head1, Node head2) {
/**
* 1.对两链表循环直到其各自的尾节点,并在此过程中记录两链表长度的差值N
* 2.若两链表尾节点不同,则说明不相交,返回空
* 3.若两链表尾节点相同,则从头开始,让长的链表先走N步,然后两链表一起走,相等时返回相交节点
*/
int n = 0;
Node cur1 = head1;
Node cur2 = head2;
while (cur1.next != null) {
n++;
cur1 = cur1.next;
}
while (cur2.next != null) {
n--;
cur2 = cur2.next;
}
if (cur1 != cur2) {
return null;
}
cur1 = head1;
cur2 = head2;
if (n > 0) {
while (n != 0) {
cur1 = cur1.next;
n--;
}
} else {
while (n != 0) {
cur2 = cur2.next;
n++;
}
}
while (cur1 != cur2) {
cur1 = cur1.next;
cur2 = cur2.next;
}
return cur1;
}
public static class Node {
int val;
Node next;
public Node(int val) {
this.val = val;
}
}
public static void main(String[] args) {
// 1->2->3->4->5->6->7->null
Node head1 = new Node(1);
head1.next = new Node(2);
head1.next.next = new Node(3);
head1.next.next.next = new Node(4);
head1.next.next.next.next = new Node(5);
head1.next.next.next.next.next = new Node(6);
head1.next.next.next.next.next.next = new Node(7);
// 0->9->8->6->7->null
Node head2 = new Node(0);
head2.next = new Node(9);
head2.next.next = new Node(8);
head2.next.next.next = head1.next.next.next.next.next; // 8->6
System.out.println(process(head1, head2).val);
// 1->2->3->4->5->6->7->4...
head1 = new Node(1);
head1.next = new Node(2);
head1.next.next = new Node(3);
head1.next.next.next = new Node(4);
head1.next.next.next.next = new Node(5);
head1.next.next.next.next.next = new Node(6);
head1.next.next.next.next.next.next = new Node(7);
head1.next.next.next.next.next.next = head1.next.next.next; // 7->4
//0->9->8->2...
head2 = new Node(0);
head2.next = new Node(9);
head2.next.next = new Node(8);
head2.next.next.next = head1.next; // 8->2
System.out.println(process(head1, head2).val);
// 0->9->8->6->4->5->6..
head2 = new Node(0);
head2.next = new Node(9);
head2.next.next = new Node(8);
head2.next.next.next = head1.next.next.next.next.next; // 8->6
System.out.println(process(head1, head2).val);
}
}
|
/**
* This class is not safe for running multiple tests in the same JVM simultaneously.
*/
public final class GWZForJUnit {
private static Object unitTest;
private static GivWenZen gwz = null;
public static void setUp(Object unitTest) {
GWZForJUnit.unitTest = unitTest;
gwz = null;
}
public static Object and(String methodString) throws Exception {
return getGWZ().and(methodString);
}
public static Object given(String methodString) throws Exception {
return getGWZ().and(methodString);
}
public static Object then(String methodString) throws Exception {
return getGWZ().and(methodString);
}
public static Object when(String methodString) throws Exception {
return getGWZ().and(methodString);
}
private static GivWenZen getGWZ() {
String basePackageForSteps = DomainStepFinder.DEFAULT_STEP_PACKAGE;
if (unitTest != null)
basePackageForSteps += "," + unitTest.getClass().getPackage().getName() + ".";
if (gwz == null)
gwz = GivWenZenExecutorCreator.instance()
.stepClassBasePackage(basePackageForSteps)
.create();
return gwz;
}
} |
Subsets and Splits