content
stringlengths 10
4.9M
|
---|
package com.bastiaanjansen.otp;
import com.bastiaanjansen.otp.helpers.URIHelper;
import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.util.Map;
import org.apache.commons.codec.binary.Base32;
/**
* Generates one-time passwords
*
* @author <NAME>
*/
class OTP {
private static final String URL_SCHEME = "otpauth";
/**
* Default value for password length
*/
private static final int DEFAULT_PASSWORD_LENGTH = 6;
/**
* Default value for HMAC Algorithm
*/
private static final HMACAlgorithm DEFAULT_HMAC_ALGORITHM = HMACAlgorithm.SHA1;
/**
* Number of digits for generated code in range 6...8, defaults to 6
*/
protected final int passwordLength;
/**
* Hashing algorithm used to generate code, defaults to SHA1
*/
protected final HMACAlgorithm algorithm;
/**
* Secret key used to generate the code, this should be a base32 string
*/
protected final byte[] secret;
protected OTP(final Builder<?, ?> builder) {
if (!validatePasswordLength(builder.passwordLength))
throw new IllegalArgumentException("Password length must be between 6 and 8 digits");
if (builder.secret.length <= 0)
throw new IllegalArgumentException("Secret must not be empty");
this.passwordLength = builder.passwordLength;
this.algorithm = builder.algorithm;
this.secret = builder.secret;
}
public int getPasswordLength() {
return passwordLength;
}
public HMACAlgorithm getAlgorithm() {
return algorithm;
}
public byte[] getSecret() {
return secret;
}
/**
* Checks whether a code is valid for a specific counter with a delay window of 0
*
* @param code an OTP code
* @param counter how many times time interval has passed since 1970
* @return a boolean, true if code is valid, otherwise false
*/
public boolean verify(final String code, final long counter) {
return verify(code, counter, 0);
}
/**
* Checks whether a code is valid for a specific counter taking a delay window into account
*
* @param code an OTP codee
* @param counter how many times time interval has passed since 1970
* @param delayWindow window in which a code can still be deemed valid
* @return a boolean, true if code is valid, otherwise false
*/
public boolean verify(final String code, final long counter, final int delayWindow) {
if (code.length() != passwordLength) return false;
for (int i = -delayWindow; i <= delayWindow; i++) {
String currentCode = generate(counter + i);
if (code.equals(currentCode)) return true;
}
return false;
}
/**
* Generate a code
*
* @param counter how many times time interval has passed since 1970
* @return generated OTP code
* @throws IllegalStateException when hashing algorithm throws an error
*/
protected String generate(final long counter) throws IllegalStateException {
if (counter < 0)
throw new IllegalArgumentException("Counter must be greater than or equal to 0");
byte[] secretBytes = decodeBase32(secret);
byte[] counterBytes = longToBytes(counter);
byte[] hash;
try {
hash = generateHash(secretBytes, counterBytes);
} catch (NoSuchAlgorithmException | InvalidKeyException e) {
throw new IllegalStateException();
}
return getCodeFromHash(hash);
}
/**
* Generate an OTPAuth URI
*
* @param type of OTPAuth URI: totp or hotp
* @param issuer name for URI
* @param account name for URI
* @param query items of URI
* @return created OTPAuth URI
* @throws URISyntaxException when URI cannot be created
*/
protected URI getURI(final String type, final String issuer, final String account, final Map<String, String> query) throws URISyntaxException {
query.put(URIHelper.DIGITS, String.valueOf(passwordLength));
query.put(URIHelper.ALGORITHM, algorithm.name());
query.put(URIHelper.SECRET, new String(secret, StandardCharsets.UTF_8));
query.put(URIHelper.ISSUER, issuer);
String path = account.isEmpty() ? issuer : String.format("%s:%s", issuer, account);
return URIHelper.createURI(URL_SCHEME, type, path, query);
}
/**
* Decode a base32 value to bytes array
*
* @param value base32 value
* @return bytes array
*/
private byte[] decodeBase32(final byte[] value) {
Base32 codec = new Base32();
return codec.decode(value);
}
/**
* Convert a long value tp bytes array
*
* @param value long value
* @return bytes array
*/
private byte[] longToBytes(final long value) {
return ByteBuffer.allocate(Long.BYTES).putLong(value).array();
}
/**
* Generate a hash based on an HMAC algorithm and secret
*
* @param secret Base32 string converted to byte array used to generate hash
* @param data to hash
* @return generated hash
* @throws NoSuchAlgorithmException when algorithm does not exist
* @throws InvalidKeyException when secret is invalid
*/
private byte[] generateHash(final byte[] secret, final byte[] data) throws InvalidKeyException, NoSuchAlgorithmException {
// Create a secret key with correct SHA algorithm
SecretKeySpec signKey = new SecretKeySpec(secret, "RAW");
// Mac is 'message authentication code' algorithm (RFC 2104)
Mac mac = Mac.getInstance(algorithm.getHMACName());
mac.init(signKey);
// Hash data with generated sign key
return mac.doFinal(data);
}
/**
* Get code from hash with specified password length
*
* @param hash
* @return OTP code
*/
private String getCodeFromHash(final byte[] hash) {
/* Find mask to get last 4 digits:
1. Set all bits to 1: ~0 -> 11111111 -> 255 decimal -> 0xFF
2. Shift n (in this case 4, because we want the last 4 bits) bits to left with <<
3. Negate the result: 1111 1100 -> 0000 0011
*/
int mask = ~(~0 << 4);
/* Get last 4 bits of hash as offset:
Use the bitwise AND (&) operator to select last 4 bits
Mask should be 00001111 = 15 = 0xF
Last byte of hash & 0xF = last 4 bits:
Example:
Input: decimal 219 as binary: 11011011 &
Mask: decimal 15 as binary: 00001111
-----------------------------------------
Output: decimal 11 as binary: 00001011
*/
byte lastByte = hash[hash.length - 1];
int offset = lastByte & mask;
// Get 4 bytes from hash from offset to offset + 3
byte[] truncatedHashInBytes = { hash[offset], hash[offset + 1], hash[offset + 2], hash[offset + 3] };
// Wrap in ByteBuffer to convert bytes to long
ByteBuffer byteBuffer = ByteBuffer.wrap(truncatedHashInBytes);
long truncatedHash = byteBuffer.getInt();
// Mask most significant bit
truncatedHash &= 0x7FFFFFFF;
// Modulo (%) truncatedHash by 10^passwordLength
truncatedHash %= Math.pow(10, passwordLength);
// Left pad with 0s for a n-digit code
return String.format("%0" + passwordLength + "d", truncatedHash);
}
/**
* Check if password is in range 6...8
*
* @param passwordLength number of digits for generated code in range 6...8
* @return whether password is valid
*/
private boolean validatePasswordLength(final int passwordLength) {
return passwordLength >= 6 && passwordLength <= 8;
}
/**
* Abstract OTP builder
*
* @author <NAME>
* @param <B> concrete builder class
*/
protected abstract static class Builder<T extends OTP, B extends Builder<T, B>> {
/**
* Number of digits for generated code in range 6...8, defaults to 6
*/
private int passwordLength;
/**
* Hashing algorithm used to generate code, defaults to SHA1
*/
private HMACAlgorithm algorithm;
/**
* Secret key used to generate the code, this should be a base32 string
*/
private final byte[] secret;
protected Builder(final byte[] secret) {
this.secret = secret;
this.passwordLength = <PASSWORD>_PASSWORD_<PASSWORD>;
this.algorithm = DEFAULT_HMAC_ALGORITHM;
}
/**
* Change password length of code
*
* @param passwordLength number of digits for generated code in range 6...8
* @return concrete builder
*/
public B withPasswordLength(final int passwordLength) {
this.passwordLength = passwordLength;
return getBuilder();
}
/**
* Change hashing algorithm
*
* @param algorithm HMAC hashing algorithm
* @return concrete builder
*/
public B withAlgorithm(final HMACAlgorithm algorithm) {
this.algorithm = algorithm;
return getBuilder();
}
protected abstract B getBuilder();
public abstract T build();
}
}
|
<reponame>elcoloo/plugins-geoladris
package org.geoladris.auth;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.Properties;
import javax.servlet.ServletConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.geoladris.Geoladris;
import org.geoladris.config.Config;
import org.junit.Before;
import org.junit.Test;
public class LoginServletTest {
private static final String ADMIN_ROLE = "admin";
private LoginServlet servlet;
private HttpServletRequest request;
private HttpServletResponse response;
private HttpSession session;
@Before
public void setup() throws ServletException {
Properties properties = new Properties();
properties.setProperty(LoginServlet.PROP_AUTHORIZED_ROLES, ADMIN_ROLE);
Config config = mock(Config.class);
when(config.getProperties()).thenReturn(properties);
ServletContext context = mock(ServletContext.class);
ServletConfig servletConfig = mock(ServletConfig.class);
when(servletConfig.getServletContext()).thenReturn(context);
servlet = new LoginServlet();
servlet.init(servletConfig);
request = mock(HttpServletRequest.class);
response = mock(HttpServletResponse.class);
session = mock(HttpSession.class);
when(request.getSession()).thenReturn(session);
when(context.getAttribute(Geoladris.ATTR_CONFIG)).thenReturn(config);
}
@Test
public void doesNotAttemptLoginIfCredentialsNotProvided() throws Exception {
servlet.doPost(request, response);
verify(request, never()).login(anyString(), anyString());
}
@Test
public void attemptsLoginIfCredentialsProvided() throws Exception {
String user = "myuser";
String pass = "<PASSWORD>";
when(request.getParameter(LoginServlet.HTTP_PARAM_USER)).thenReturn(user);
when(request.getParameter(LoginServlet.HTTP_PARAM_PASS)).thenReturn(<PASSWORD>);
when(request.isUserInRole(anyString())).thenReturn(true);
servlet.doPost(request, response);
verify(request, atLeastOnce()).getSession();
verify(request).login(user, pass);
}
@Test
public void notAuthorised() throws Exception {
when(request.getParameter(LoginServlet.HTTP_PARAM_USER)).thenReturn("myuser");
when(request.getParameter(LoginServlet.HTTP_PARAM_PASS)).thenReturn("<PASSWORD>");
servlet.doPost(request, response);
verify(response).sendError(HttpServletResponse.SC_BAD_REQUEST);
verify(request).logout();
verify(request.getSession()).invalidate();
}
@Test
public void authorised() throws Exception {
when(request.getParameter(LoginServlet.HTTP_PARAM_USER)).thenReturn("myuser");
when(request.getParameter(LoginServlet.HTTP_PARAM_PASS)).thenReturn("<PASSWORD>");
when(request.isUserInRole(anyString())).thenReturn(true);
servlet.doPost(request, response);
verify(response).sendError(HttpServletResponse.SC_NO_CONTENT);
verify(session).setAttribute(Geoladris.ATTR_ROLE, ADMIN_ROLE);
}
}
|
<reponame>pwightman/BBGroover
//
// BBViewController.h
// BeatBuilder
//
// Created by <NAME> on 7/21/12.
// Copyright (c) 2012 <NAME>. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "BBGroover.h"
#import "BBGridView.h"
#import "BBTickView.h"
@interface BBViewController : UIViewController <BBGrooverDelegate, BBGridViewDelegate, BBTickViewDelegate>
@property (nonatomic, strong) BBGroover *groover;
@property (strong, nonatomic) IBOutlet UILabel *tempoLabel;
@property (strong, nonatomic) IBOutlet UISlider *tempoSlider;
@property (strong, nonatomic) IBOutlet BBTickView *tickView;
@end
|
A Texas preschool teacher is out of a job after an anti-Semitism watchdog group uncovered a tweet she wrote encouraging her friend to “Kill some Jews.”
On February 19, Canary Mission posted a number of tweets that Nancy Salem and her friends wrote between 2012 and 2016 when she was part of the University of Texas, Arlington group called Students for Justice in Palestine (SJP).
Among the tweets were the following three from 2013, including one encouraging her friend to “Kill Some Jews.”
Her now former employers issued a statement making it clear they do not support such talk, even if it was written in the past.
“Our senior management team is working to address this matter,” the school said, according to Lawnewz. “This in no way reflects the views of The Children’s Courtyard. We strive to provide an inclusive environment for all children, families and staff members. We also expect our employees to uphold standards of personal and professional conduct.”
On Wednesday, the school announced they had severed ties with the preschool teacher.
Salem has also deactivated her social media accounts. |
<reponame>jaroslaw-weber/imgui_calculator
extern crate glium;
#[macro_use]
extern crate imgui;
extern crate imgui_glium_renderer;
extern crate meval;
use imgui::*;
mod support;
const CLEAR_COLOR: [f32; 4] = [114.0 / 255.0, 144.0 / 255.0, 154.0 / 255.0, 1.0];
fn main() {
let mut state = CalculatorState::default();
support::run(
"Imgui Calculator".to_owned(),
CLEAR_COLOR,
|ui| {
show_calculator(ui, &mut state);
true
},
(230, 300),
);
}
//calculator
struct CalculatorState {
expression: String,
}
impl Default for CalculatorState {
fn default() -> Self {
CalculatorState {
expression: "".to_owned(),
}
}
}
fn show_calculator(ui: &Ui, mut state: &mut CalculatorState) {
ui.window(im_str!("Imgui Calculator"))
.position((0.0, 0.0), ImGuiCond::FirstUseEver)
.build(|| {
ui.text(
"This is calculator.
You can make calculations",
);
ui.separator();
ui.text(&state.expression);
ui.separator();
for i in 1..10 {
let i_string = i.to_string();
add_button(&ui, &mut state, &i_string);
if i % 3 != 0 && i != 9 {
ui.same_line(0.0);
}
}
//
add_button(&ui, &mut state, "+");
ui.same_line(0.0);
add_button(&ui, &mut state, "-");
ui.same_line(0.0);
add_button(&ui, &mut state, "*");
//
add_button(&ui, &mut state, "/");
ui.same_line(0.0);
if ui.small_button(im_str!("=")) {
let evaluated = meval::eval_str(&state.expression).unwrap(); //todo error handling
state.expression = format!("{}", evaluated);
}
});
}
fn add_button(ui: &Ui, state: &mut CalculatorState, button_text: &str) {
if ui.small_button(im_str!("{}", button_text)) {
let new_expression = format!("{}{}", &state.expression, button_text);
state.expression = new_expression.to_owned();
}
}
|
<gh_stars>10-100
package vultr
// The Vultr VMI is deprecated.
// Please use the Cloug provider for Vultr instead.
import "github.com/LunaNode/lobster"
import "github.com/LunaNode/lobster/utils"
import vultr "github.com/LunaNode/vultr/lib"
import "errors"
import "fmt"
import "strconv"
import "strings"
type Vultr struct {
regionId int
client *vultr.Client
vmBandwidth map[string]int64 // for bandwidth accounting
}
func MakeVultr(apiKey string, regionId int) *Vultr {
this := new(Vultr)
this.regionId = regionId
this.client = vultr.NewClient(apiKey, nil)
return this
}
func (this *Vultr) findMatchingPlan(plan lobster.Plan) (int, error) {
apiPlans, err := this.client.GetPlans()
if err != nil {
return 0, err
}
regionPlanIds, err := this.client.GetAvailablePlansForRegion(this.regionId)
if err != nil {
return 0, err
}
regionPlans := make(map[int]bool)
for _, planId := range regionPlanIds {
regionPlans[planId] = true
}
for _, apiPlan := range apiPlans {
if regionPlans[apiPlan.ID] && apiPlan.RAM == plan.Ram && apiPlan.VCpus == plan.Cpu && apiPlan.Disk == plan.Storage {
return apiPlan.ID, nil
}
}
return 0, errors.New("no matching plan found")
}
func (this *Vultr) findOSByName(name string) (int, error) {
osList, err := this.client.GetOS()
if err != nil {
return 0, err
}
for _, os := range osList {
if os.Name == name {
return os.ID, nil
}
}
return 0, fmt.Errorf("no OS found matching %s", name)
}
func (this *Vultr) VmCreate(vm *lobster.VirtualMachine, options *lobster.VMIVmCreateOptions) (string, error) {
var planId int
if vm.Plan.Identification != "" {
planId, _ = strconv.Atoi(vm.Plan.Identification)
} else {
var err error
planId, err = this.findMatchingPlan(vm.Plan)
if err != nil {
return "", err
}
}
serverOptions := &vultr.ServerOptions{
PrivateNetworking: true,
IPV6: true,
}
imageParts := strings.SplitN(options.ImageIdentification, ":", 2)
if len(imageParts) != 2 {
return "", errors.New("malformed image identification: missing colon")
}
if imageParts[0] == "iso" {
customOSID, err := this.findOSByName("Custom")
if err != nil {
return "", fmt.Errorf("failed to get custom OS for creation from ISO: %v", err)
}
serverOptions.OS = customOSID
serverOptions.ISO, _ = strconv.Atoi(imageParts[1])
} else if imageParts[0] == "os" {
serverOptions.OS, _ = strconv.Atoi(imageParts[1])
} else if imageParts[0] == "snapshot" {
snapshotOSID, err := this.findOSByName("Snapshot")
if err != nil {
return "", fmt.Errorf("failed to get snapshot OS for creation from snapshot: %v", err)
}
serverOptions.OS = snapshotOSID
serverOptions.Snapshot = imageParts[1]
} else {
return "", errors.New("invalid image type " + imageParts[0])
}
server, err := this.client.CreateServer(vm.Name, this.regionId, planId, serverOptions)
if err != nil {
return "", err
} else {
return server.ID, nil
}
}
func (this *Vultr) VmDelete(vm *lobster.VirtualMachine) error {
return this.client.DeleteServer(vm.Identification)
}
func (this *Vultr) VmInfo(vm *lobster.VirtualMachine) (*lobster.VmInfo, error) {
server, err := this.client.GetServer(vm.Identification)
if err != nil {
return nil, err
}
info := lobster.VmInfo{
Ip: server.MainIP,
PrivateIp: server.InternalIP,
Hostname: server.Name,
BandwidthUsed: int64(server.CurrentBandwidth * 1024 * 1024 * 1024),
LoginDetails: "password: " + server.DefaultPassword,
}
if server.Status == "pending" {
info.Status = "Installing"
} else if server.Status == "active" {
if server.PowerStatus == "stopped" {
info.Status = "Offline"
} else if server.PowerStatus == "running" {
info.Status = "Online"
} else {
info.Status = server.PowerStatus
}
} else {
info.Status = fmt.Sprintf("%s (%s)", strings.Title(server.Status), strings.Title(server.PowerStatus))
}
return &info, nil
}
func (this *Vultr) VmStart(vm *lobster.VirtualMachine) error {
return this.client.StartServer(vm.Identification)
}
func (this *Vultr) VmStop(vm *lobster.VirtualMachine) error {
return this.client.HaltServer(vm.Identification)
}
func (this *Vultr) VmReboot(vm *lobster.VirtualMachine) error {
return this.client.RebootServer(vm.Identification)
}
func (this *Vultr) VmVnc(vm *lobster.VirtualMachine) (string, error) {
server, err := this.client.GetServer(vm.Identification)
if err != nil {
return "", fmt.Errorf("failed to get server details: %v", err)
} else if server.KVMUrl == "" {
return "", fmt.Errorf("console is not ready yet")
} else {
return server.KVMUrl, nil
}
}
func (this *Vultr) VmAction(vm *lobster.VirtualMachine, action string, value string) error {
return errors.New("operation not supported")
}
func (this *Vultr) VmSnapshot(vm *lobster.VirtualMachine) (string, error) {
snapshot, err := this.client.CreateSnapshot(vm.Identification, utils.Uid(16))
if err != nil {
return "", err
} else {
return "snapshot:" + snapshot.ID, nil
}
}
func (this *Vultr) BandwidthAccounting(vm *lobster.VirtualMachine) int64 {
info, err := this.VmInfo(vm)
if err != nil {
return 0
}
if this.vmBandwidth == nil {
this.vmBandwidth = make(map[string]int64)
}
currentBandwidth, ok := this.vmBandwidth[vm.Identification]
this.vmBandwidth[vm.Identification] = info.BandwidthUsed
if !ok || currentBandwidth < info.BandwidthUsed {
return 0
} else {
return info.BandwidthUsed - currentBandwidth
}
}
func (this *Vultr) ImageFetch(url string, format string) (string, error) {
return "", errors.New("operation not supported")
}
func (this *Vultr) ImageInfo(imageIdentification string) (*lobster.ImageInfo, error) {
imageParts := strings.SplitN(imageIdentification, ":", 2)
if len(imageParts) != 2 {
return nil, errors.New("malformed image identification: missing colon")
} else if imageParts[0] != "snapshot" {
return nil, errors.New("can only fetch info for snapshot images")
}
snapshots, err := this.client.GetSnapshots()
if err != nil {
return nil, err
}
for _, snapshot := range snapshots {
if snapshot.ID == imageParts[1] {
if snapshot.Status == "complete" {
return &lobster.ImageInfo{
Status: lobster.ImageActive,
Size: snapshot.Size,
}, nil
} else {
return &lobster.ImageInfo{
Status: lobster.ImagePending,
}, nil
}
}
}
return nil, errors.New("image not found")
}
func (this *Vultr) ImageDelete(imageIdentification string) error {
imageParts := strings.SplitN(imageIdentification, ":", 2)
if len(imageParts) != 2 {
return errors.New("malformed image identification: missing colon")
} else if imageParts[0] != "snapshot" {
return errors.New("can only delete snapshot images")
}
return this.client.DeleteSnapshot(imageParts[1])
}
func (this *Vultr) ImageList() ([]*lobster.Image, error) {
osList, err := this.client.GetOS()
if err != nil {
return nil, err
}
images := make([]*lobster.Image, len(osList))
for i, os := range osList {
images[i] = &lobster.Image{
Name: os.Name,
Identification: fmt.Sprintf("os:%d", os.ID),
}
}
return images, nil
}
func (this *Vultr) PlanList() ([]*lobster.Plan, error) {
apiPlans, err := this.client.GetPlans()
if err != nil {
return nil, err
}
regionPlanIds, err := this.client.GetAvailablePlansForRegion(this.regionId)
if err != nil {
return nil, err
}
regionPlans := make(map[int]bool)
for _, planId := range regionPlanIds {
regionPlans[planId] = true
}
var plans []*lobster.Plan
for _, apiPlan := range apiPlans {
if regionPlans[apiPlan.ID] {
plan := &lobster.Plan{
Name: apiPlan.Name,
Ram: apiPlan.RAM,
Cpu: apiPlan.VCpus,
Storage: apiPlan.Disk,
Identification: fmt.Sprintf("%d", apiPlan.ID),
}
plan.Bandwidth, _ = strconv.Atoi(apiPlan.Bandwidth)
plans = append(plans, plan)
}
}
return plans, nil
}
|
“As it is, today’s protesters often act like they are starting from square one. This disconnect cannot be dismissed as the hubris of youth; it is a symptom of our failure to teach this generation about black history and the way our economic and social systems actually function.” (Ilyasah Shabazz, 2015)
Not much longer than fifty years ago, an entire population of the US was not given the basic rights to go to school, to vote or to simply take public transportation as they please. Were these people condemned because they were criminals, deviants or heretics? Shamefully, the reason was much more superficial. Literally speaking, it was “skin deep”. Even after the abolishment of slavery, black American citizens were barely treated as “citizens”, let alone, viewed as human beings deserving of equal opportunity.
The Civil Rights Movement of the 1950s and 1960s is popularly seen as a successful movement that guaranteed black American citizens with all the basic rights that they had been deprived of for so long. Recalling this movement, we see the empowering and hopeful images of the ridiculed Little Rock Nine being guided by a swarm of federal troops to the previously all-white high school, the buses that transported the Freedom Riders that were lit on fire by white supremacists, and the attacked and hosed down non-violent protestors in Birmingham. These moments of both bravery and inhumanity brought African Americans the right to education, access to public transportation and outlawed segregation. History has taught us that it takes masses enduring the lowest of realities in order to provoke necessary social change.
This slideshow requires JavaScript.
The Civil Rights Movement is seen as a part of our history, something that happened, a movement that ended at some point in our past. Yet, this movement is very much part of America’s current story, a story that has not seen it’s end, because some issues confronted by this movement have yet to see it’s resolution.
Issues of The Past Remain Concerns of the Present
To note, many lynching cases of the past have only recently received appropriate sentencing. Lynching that occurred 50 years were properly addressed 40 years later. One notable case was the murders of three civil rights workers associated with the Congress of Racial Equality (CORE) in Mississippi. Although the federal court convicted seven men possibly connected to the murder, it was not until 2005 that one of these men were finally prosecuted for this case. This man was convicted of manslaughter at the age of 80.
Although equality is guaranteed to the black American, in black and white, as words written in the Unites States Constitution, there are inequalities that continue to proceed in present day society. Some of these inequalities are not as visible to the everyday American as others, as they are only being noted through statistics regarding incomes, education and judicial inequalities. Other inequalities, such as police brutality and hate speech, are being seen by all as they span across newspaper headlines and twitter updates.
The Lesser Seen Inequalities Pew Research Center, an American research organization, conducted a poll in 2013 regarding financial, social and educational disparities between different races within the US. Here are some areas where African Americans are shown to be unequal according to American opinion and demographics: Income – Black Americans are close to three times as likely to be living under the federal poverty line than white Americans. The average three-member white household makes 27 000 more dollars per year than the average black household of the same size. Education – A majority of black Americans state that they are treated less fairly than white students and co workers in both the school and workplace. Although their college completion rate has increased, it is still only 62 per cent of the white American college completion rate. Justice System – Over fifty years ago, black men were five times as likely to be in jail than white men. Considering that this was during the Civil Rights Movement, an improvement is to be expected since then, right? Today, black men are now six times more likely to be in jail. This worsening situation is acknowledged by both white and black Americans. 70 per cent of black Americans and 25 per cent of white Americans agree that black people are treated unequally by the criminal justice system. Progress – Overall, nearly half of Americans (79% of black Americans, 44% of white Americans) state that much more progress needs to be made in achieving the colour blind society desired by those of the Civil Rights Movement. Why do these disparities continue to exist in a society that claims to guarantee equal rights for all? The truth is, in the US, residential segregation in society has not ceased to exist. Today, black and white Americans inhabit different neighbourhoods of vastly different quality. In “the ghettos” or “the projects” of the US exists more poverty and less opportunity. Social isolation or seperation can be just as crippling as blatant racism.
Inequalities Seen By All “How could the news be blowing up the Trayvon Martin case while hundreds of these Black on White murders got ignored?” (Charleston Church Shooter’s justification behind his mass murder) Trayvon Martin and his story have become the driving force behind a newly revived civil rights movement in the US. This movement exists under the hashtag and title, “BlackLivesMatter”. Trayvon’s murder brought to light the harsh reality of police brutality suffered unfairly by the black population in the US. His story initated the growing interest in revealing more and more police brutality cases in America. Headlines such as “Beyond the Chokehold: the Path to Eric Garner’s Death” (New York Times, 2015), “The Shooting of Michael Brown” (The Star, 2014) and “Ezell Ford: The mentally ill man killed by the LAPD” (The Washington Post, 2014) were soon spanning across well-read newspapers across the world. This soon became an issue that the average American with access to the internet or cable TV couldn’t ignore. Black Americans soon began protesting, rioting and publicizing their concerns on the issue. Politicians began stating their beliefs on the causes of this dilemma. Divides caused by race soon became much more obvious as the debate of whether police brutality was truly an issue of race were taking place across America. Looking at the unadjusted stats, 49 percent of those killed by police men between May 2013 to April 2015 were white, compared to 30 percent being black. Those arguing that police brutality is not a race issue would look at this stat and say, “See? It’s appalling to focus on skin colour as a motive” (cough, cough, Mike Huckabee). However, it is important to adjust the stats to better represent reality. When recognizing the fact that 69% of the American population is white, while only 33% is black, the stat proves that black Americans are in fact 3.5 times more likely to be killed by a cop. The Washington Post also reports that unarmed black men are seven times more likely to be killed by police than unarmed white men.
What Would Malcolm X and MLK Jr. Say?
The likes of Martin Luther King Jr. and Malcolm X would be shaking their head at the inequalities that have both continued and escalated since their efforts more than fifty years ago. Society hardly resembles the hopeful image of the future that MLK Jr. had painted with his words on August 28 of 1963:
This speech has proven to be ageless as MLK Jr.’s desires and pieces of advice can be applied to our present day issues and the issues we will undoubtedly face in the future if we don’t change.
In regards to the current hostility between the discriminated and their discriminators in America, King would say “let us not seek to satisfy our thirst for freedom by drinking from the cup of bitterness and hatred”. We can not continue with the mentality of “us versus them” anymore, because with said mentality, there will continue to be a divide in a society that would function better as whole.
In regards to the self destructive riots that eventually broke out in cities like Ferguson and Baltimore, King would say that “we must not allow our creative protest to degenerate into physical violence”. We should not need to destroy the places we call home in order to provoke social change. Although violence may seem like the quick and easiest to bring an issue to light, it also distracts authorities from the real message, once again creating an “us versus them” mentality.
And in regard to our shortening attention spans that quickly move on from yesterday’s news and accepts these past harsh realities until another travesty comes up to remind us, King would say that we can’t be satisfied “as long as the Negro is the victim of the unspeakable horrors of police brutality”. A single protest can only draw attention for so long, especially in a world where there’s endless amounts of news to be consumed. In order to make change, we must constantly remind ourselves of the change we desire and why we desire it. Our efforts must be long term.
In looking at America’s current situation, King would be disappointed that his dream of a nation where people will “not be judged by the colour of the skin but the content of their character” has yet to become a reality. However, our hopes aren’t lost as it is the time to continue King’s work and use the growing resources we have to reach out to people who can implement change. We must take the powerful tools of the internet and artistic expression beyond a simple hashtag and be creative in our ongoing protests against inequality.
Malcolm X was also a prominent human rights activist during the 1960’s who would have many words of advice to our present day activists. Although his separationist views may be viewed as extreme, his message of never taking the path of least resistance influenced masses to actually take action. No one could put how Malcolm X would view the current American situation into better words than his own daughter, Ilyasah Shabazz. Ilyasah comments on the sad reality of police brutality and how Malcolm X would advise black Americans to handle this situation in this New York Times’ article. Ilyasah highlights what is most important in her father’s past message of social advocacy when she writes:
“The key to creating change is a critical mass of ready and angry people whose passion doesn’t ebb and flow with the news cycle.”
Let’s not let the efforts of Martin Luther King Jr. and Malcolm X remain in vain. Let’s not forget the efforts of the masses that stood behind these leaders’ messages and the sacrifices they made in order to guarantee the black American population their basic rights. Let’s not waste all the resources we have available to us to reach out to others in creative and effective ways. It is important to work as a mass willing to make sacrifices just as those that went down in history, until we create that “colour blind” society that Martin Luther King Jr. had dreamt of.
Advertisements |
Becoming a Single Teenage Mother: A Vicious Cycle
Abstract The aim of the study was to investigate the psycho-social impact of single parenthood on teenage mothers. A phenomenological study was conducted. Nine participants were purposefully selected for the study. Semi-structured interviews were conducted to collect data. Thematic content analysis was used to analyze data. The study found that single teenage motherhood is psychologically distressful, affects relationships with significant others, their education and leads to economic dependence on parents and government grant. The single teenage mothers should get support from their parents, peers, teachers and the community to help them cope with the challenges that they encounter. |
def ranges_to_singletons(
ranges: List[range],
) -> List[range]:
assert all(
r.step == 1 and r.start >= 0 and r.stop >= 0 for r in ranges
), "Ranges should be consecutive and contain non-negative indices."
return [range(i, i + 1) for r in ranges for i in r] |
Today is a good day for T-Mobile customers because they’re getting free T-Mobile Tuesdays gifts. For T-Mobile itself, though, this Tuesday isn’t quite as nice.
T-Mobile has been hit with a complaint from Huawei. The company alleges that T-Mobile has been using patents related to 4G wireless networking while refusing Huawei’s attempts to license the patents to T-Mobile on a fair, reasonable, and non-discriminatory (FRAND) basis.
In its complaint, Huawei claims that it reached out to T-Mobile on June 6, 2014, to talk about licensing and its 4G patents and that it wanted to enter into a non-disclosure agreement (NDA) with T-Mo for the discussions. T-Mo refused, Huawei says, and after Huawei again suggested the two companies enter into an NDA, the company says that T-Mo refused once again.
Fast-forward to January 2016 and Huawei filed patent infringement actions against T-Mobile and again said that it wanted to get into patent licensing talks. T-Mobile then said that it’d enter into an NDA, and the two companies are said to have then gone back and forth on the details of the “mutually-acceptable NDA” and the licensing offer. Huawei goes on to say that T-Mobile rejected Huawei’s FRAND patent licensing offer and claimed that Huawei violated its commitment that it would license its patent on FRAND terms.
Finally, in June, Huawei says that T-Mobile agreed to an NDA but then refused to meet face-to-face with Huawei for discussions. Huawei alleges that it sent T-Mobile examples of how it’d infringed on Huawei’s patents and offered to meet face-to-face. T-Mobile has purportedly not responded to Huawei’s offer.
Huawei wants the court to rule that it has met its FRAND commitment in offering licensing terms to T-Mobile and declare T-Mo an unwilling licensee.
T-Mobile hasn’t issued an official statement on the lawsuit. Huawei claims that “T-Mobile cannot operate its core wireless network without the use of Huawei’s 4G Wireless Network Essential Patents,” though, so it’ll be interesting to see how T-Mo responds to Huawei’s complaint. While we wait for T-Mobile’s answer, you can read Huawei’s full complaint below.
Huawei vs. T-Mobile by Todd Bishop on Scribd
Via: GeekWire |
package types
import (
"fmt"
"github.com/stretchr/testify/assert"
"testing"
)
func TestParams_Equal(t *testing.T) {
p1 := DefaultParams()
p2 := DefaultParams()
p3 := DefaultParams()
p3.SessionNodeCount = 1
assert.True(t, p1.Equal(p2))
assert.False(t, p2.Equal(p3))
}
func TestParams_Validate(t *testing.T) {
ethereum, err := NonNativeChain{
Ticker: "eth",
Netid: "4",
Version: "v1.9.9",
Client: "geth",
Inter: "",
}.HashString()
if err != nil {
t.Fatalf(err.Error())
}
validParams := DefaultParams()
validParams.SupportedBlockchains = []string{ethereum}
// invalid session node count
invalidParamsSessionNodes := validParams
invalidParamsSessionNodes.SessionNodeCount = -1
// invalid waiting period
invalidParamsWaitingPeriod := validParams
invalidParamsWaitingPeriod.ClaimSubmissionWindow = -1
// invalid supported chains
invalidParamsSupported := validParams
invalidParamsSupported.SupportedBlockchains = []string{"invalid"}
// invalid claim expiration
invalidParamsClaims := validParams
invalidParamsClaims.ClaimExpiration = -1
tests := []struct {
name string
params Params
hasError bool
}{
{
name: "Invalid Params, session nodes",
params: invalidParamsSessionNodes,
hasError: true,
},
{
name: "Invalid Params, session waiting period",
params: invalidParamsWaitingPeriod,
hasError: true,
},
{
name: "Invalid Params, supported chains",
params: invalidParamsSupported,
hasError: true,
},
{
name: "Invalid Params, claims",
params: invalidParamsClaims,
hasError: true,
},
{
name: "Valid Params",
params: validParams,
hasError: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
assert.Equal(t, tt.params.Validate() != nil, tt.hasError)
})
}
}
func TestDefaultParams(t *testing.T) {
assert.True(t, Params{
SessionNodeCount: DefaultSessionNodeCount,
ClaimSubmissionWindow: DefaultClaimSubmissionWindow,
SupportedBlockchains: DefaultSupportedBlockchains,
ClaimExpiration: DefaultClaimExpiration,
}.Equal(DefaultParams()))
}
func TestParams_ParamSetPairs(t *testing.T) {
df := DefaultParams()
assert.NotPanics(t, func() { df.ParamSetPairs() })
}
func TestParams_String(t *testing.T) {
df := DefaultParams()
assert.NotPanics(t, func() { fmt.Printf("%s", df.String()) })
}
|
package com.gakshintala.parkmycar.usecases.lotqueries;
import com.gakshintala.parkmycar.domain.Car;
import com.gakshintala.parkmycar.ports.persistence.QueryLotStatus;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
class QueryLotStatusQueryTest {
private static final Map<Integer, Car> TEST_LOT_STATUS = new ConcurrentHashMap<>();
private static final QueryLotStatus queryLotStatus = () -> TEST_LOT_STATUS;
@Test
void getLotStatus() {
final var lotStatusUseCase = new LotStatusQuery(queryLotStatus);
Assertions.assertEquals(TEST_LOT_STATUS, lotStatusUseCase.execute(null));
}
} |
Radiation-induced chromosomal instability in human fibroblasts: temporal effects and the influence of radiation quality.
PURPOSE
To determine whether chromosomal instability is induced in human diploid fibroblasts by ionizing radiation and to investigate the effects of radiation quality by comparing X-rays, neutrons and alpha-particles.
MATERIALS AND METHODS
Cells from two human diploid fibroblast lines, HF12 and HF19, were irradiated and analysed cytogenetically at 3, 20 and 35 population doublings post-irradiation.
RESULTS
Exposure of HF19 cells to neutrons and alpha-particles resulted in a consistently increased frequency of unstable aberrations, particularly chromatid-type aberrations, compared to control cultures. Aberration frequency after X-irradiation was not significantly greater than controls at 20 population doublings but was significantly increased after 35 population doublings, although not to the same level as that following neutron or alpha-irradiation. No chromosomal instability was demonstrated in the progeny of HF12 cells after X-, neutron or alpha-particle irradiation.
CONCLUSIONS
The data are consistent with the progeny of irradiated HF19 cells expressing chromosomal instability. All three radiations are effective in inducing instability, but the expression of the phenotype is influenced by radiation quality. The absence of radiation-induced chromosomal instability in HF12 cells may reflect the influence of genetic factors. |
// New returns the docker default configuration for libcontainer
func New() *libcontainer.Container {
container := &libcontainer.Container{
CapabilitiesMask: map[string]bool{
"SETPCAP": false,
"SYS_MODULE": false,
"SYS_RAWIO": false,
"SYS_PACCT": false,
"SYS_ADMIN": false,
"SYS_NICE": false,
"SYS_RESOURCE": false,
"SYS_TIME": false,
"SYS_TTY_CONFIG": false,
"AUDIT_WRITE": false,
"AUDIT_CONTROL": false,
"MAC_OVERRIDE": false,
"MAC_ADMIN": false,
"NET_ADMIN": false,
"MKNOD": true,
"SYSLOG": false,
},
Namespaces: map[string]bool{
"NEWNS": true,
"NEWUTS": true,
"NEWIPC": true,
"NEWPID": true,
"NEWNET": true,
},
Cgroups: &cgroups.Cgroup{
Parent: "docker",
DeviceAccess: false,
},
Context: libcontainer.Context{},
}
if apparmor.IsEnabled() {
container.Context["apparmor_profile"] = "docker-default"
}
return container
} |
<reponame>AndreyZlobin/testForBro
import { AppStoreService } from './neosound/shared/app.store';
import { BrowserModule } from "@angular/platform-browser";
import { BrowserAnimationsModule } from "@angular/platform-browser/animations";
import { APP_INITIALIZER, NgModule } from "@angular/core";
import { HttpClientModule, HTTP_INTERCEPTORS } from "@angular/common/http";
import { CoreModule } from "./@core/core.module";
import { FormsModule, ReactiveFormsModule } from "@angular/forms";
import { DragulaModule } from "ng2-dragula";
import { BsDropdownModule } from "ngx-bootstrap/dropdown";
import { TooltipModule } from "ngx-bootstrap/tooltip";
import { ModalModule } from "ngx-bootstrap/modal";
import { TypeaheadModule } from "ngx-bootstrap/typeahead";
import { BsDatepickerModule } from "ngx-bootstrap/datepicker";
import { AppComponent } from "./app.component";
import { AppRoutingModule } from "./app-routing.module";
import { ThemeModule } from "./@theme/theme.module";
import { NgbModule } from "@ng-bootstrap/ng-bootstrap";
import { ButtonsModule } from "ngx-bootstrap/buttons";
import { MainComponent } from "./neosound/components/main/main.component";
import { LoginComponent } from "./neosound/pages/auth/login/login.component";
import { DashboardComponent } from "./neosound/pages/dashboard/dashboard.component";
import { StopwordsComponent } from "./neosound/pages/dashboard/details/stopwords/stopwords.component";
import { CardSpinnerComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/card-spinner/card-spinner.component";
import { CallsDashboardComponent } from "./neosound/pages/dashboard/calls-dashboard/calls-dashboard.component";
import { InfoBarComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/info-bar/info-bar.component";
import { NumberOfCallsComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/number-of-calls/number-of-calls.component";
import { ProcessedCallsComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/processed-calls/processed-calls.component";
import { PerformanceByAgentComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/performance-by-agent/performance-by-agent.component";
import { StopWordsComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/stopwords/stopwords.component";
import { HitsStopwordsComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/hits-stopwords/hits-stopwords.component";
import { TopicComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/topics/topics.component";
import { HitsTopicsComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/hits-topics/hits-topics.component";
import { AssessmentByAgentComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/assessment-by-agent/assessment-by-agent.component";
import { MinutesStatsBatchesComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/minutes-stats-batches/minutes-stats-batchess.component";
import { MinutesStatsMinutesComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/minutes-stats-minutes/minutes-stats-minutes.component";
import { CallsByDayComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/calls-by-day/calls-by-day.component";
import { PopularWordsComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/popular-words/popular-words.component";
import { FreqWordsComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/freq-words/freq-words.component";
import { AverageSentimentsComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/average-sentiments/average-sentiments.component";
import { CountSentimentsComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/count-sentiments/count-sentiments.component";
import { SentimentalSankeyComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/sentimental-sankey/sentimental-sankey.component";
import { SentimentalTreeComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/sentiment-tree/sentiment-tree.component";
import { KeywordsPhrasesComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/keywords-phrases/keywords-phrases.component";
import { UploadInstructionsComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/upload-instructions/upload-instructions.component";
import { QueriesChartComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/queries-chart/queries-chart.component";
import { SentimentStatsBatchesComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/sentiment-stats-batches/sentiment-stats-batches.component";
import { SentimentalOfCallsComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/sentimental-of-calls/sentimental-of-calls.component";
import { SentimentStatsDayComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/sentiment-stats-day/sentiment-stats-day.component";
import { SentimentalCallsByDayComponent } from "./neosound/pages/dashboard/calls-dashboard/cards/sentimental-calls-by-day/sentimental-calls-by-day.component";
import { TextsDashboardComponent } from "./neosound/pages/dashboard/texts-dashboard/texts-dashboard.component";
import { ApiCallsStatsService } from "./neosound/pages/dashboard/calls-dashboard/services/api-calls-stats.service";
import { ChartDataService } from "./neosound/pages/dashboard/calls-dashboard/services/chart-data.service";
import { DashboardFileStatsService } from "./neosound/pages/dashboard/calls-dashboard/services/file-stats.service";
import { MinutesStatsService } from "./neosound/pages/dashboard/calls-dashboard/services/minutes-stats.service";
import { TagCloudService } from "./neosound/pages/dashboard/calls-dashboard/services/tag-cloud.service";
import { TopicCloudService } from "./neosound/pages/dashboard/calls-dashboard/services/topic-cloud.service";
import { ChecklistStatsService } from "./neosound/pages/dashboard/assessment-dashboard/services/checklist-stats.service";
import { SignupLinkComponent } from "./neosound/pages/auth/signup-link/signup-link.component";
import { SignupDetailedComponent } from "./neosound/pages/auth/signup-detailed/signup-detailed.component";
import { TermsComponent } from "./neosound/pages/auth/terms/terms.component";
import { ForgotPasswordComponent } from "./neosound/pages/auth/forgot-password/forgot-password.component";
import { AccountDetailsComponent } from "./neosound/pages/user/account-details/account-details.component";
import { FilesListComponent } from "./neosound/pages/user/files-list/files-list.component";
import { TextFilesListComponent } from "./neosound/pages/user/text-files-list/text-files-list.component";
import { FileResultsComponent } from "./neosound/pages/user/file-results/file-results.component";
import { LandingComponent } from "./neosound/pages/landing/landing.component";
import { OrganizationSettingsComponent } from "./neosound/pages/admin/organisation-settings/organization-settings.component";
import { KeywordsComponent } from "./neosound/pages/admin/organisation-settings/components/keywords/keywords.component";
import { SensitiveDataComponent } from "./neosound/pages/admin/organisation-settings/components/sensitive-data/sensitive-data.component";
import { CheckListComponent } from "./neosound/pages/admin/organisation-settings/components/check-list/check-list.component";
import { SetupStopwordsComponent } from "./neosound/pages/admin/organisation-settings/components/stopwords/setup-stopwords.component";
import { AuthGuard } from "./neosound/shared/auth-guard";
import { CanDeactivateGuard } from "./neosound/shared/can-deactivate";
import { AboutComponent } from "./neosound/pages/about/about.component";
import { MinutesSecondsPipe } from "./neosound/minutes-seconds.pipe";
import { FilesService } from "./neosound/services/files.service";
import { UploadService } from "./neosound/services/upload.service";
import { FilterService } from "./neosound/services/filter.service";
import { TextFilterService } from "./neosound/services/text-filter.service";
import { OrganizationSettingsService } from "./neosound/services/organization-settings.service";
import { PlayerService } from "./neosound/services/player.service";
import { AnalyticsService } from "./neosound/services/analytics.service";
import { MediaRecorderService } from "./neosound/services/media-recorder.service";
import { UsersService } from "./neosound/services/users.service";
import { RequestsHttpInterceptor } from "./neosound/shared/requests-http.interceptor";
import { TagCloudModule } from "angular-tag-cloud-module";
import { NbAlertModule, NbInputModule } from "@nebular/theme";
import { ToastrModule } from "ngx-toastr";
import { NgDatepickerModule } from "ng2-datepicker";
import { TagInputModule } from "ngx-chips";
import {
FooterComponent,
HeaderComponent,
} from "./@theme/components";
import {
OneColumnLayoutComponent,
} from "./@theme/layouts";
import { UploadDialogComponent } from "./neosound/pages/user/upload-dialog/upload-dialog.component";
import { UploadProgressComponent } from "./neosound/pages/user/upload-progress/upload-progress.component";
import { FileDropModule } from "ngx-file-drop";
import { PlayerComponent } from "./neosound/pages/guest/player-details/player/player.component";
import { PlayerDetailsComponent } from "./neosound/pages/guest/player-details/player-details.component";
import { AnalyticDetailsComponent } from "./neosound/pages/guest/analytic-details/analytic-details.component";
import { VideoDetailsComponent } from "./neosound/pages/guest/video-details/video-details.component";
import { VideoPlayerComponent } from "./neosound/pages/guest/video-details/video-player/video-player.component";
import { VideoFileInfoService} from './neosound/pages/guest/video-details/services/video-file-info.service'
import { TextLogComponent } from "./neosound/pages/guest/player-details/cards/text-log/text-log.component";
import { CheckListFormComponent } from "./neosound/pages/guest/player-details/cards/check-list/check-list.component";
import { TextComplianceComponent } from "./neosound/pages/guest/player-details/cards/text-compliance/text-compliance.component";
import { FullTextComponent } from "./neosound/pages/guest/player-details/cards/full-text/full-text.component";
import { TextStopwordsComponent } from "./neosound/pages/guest/player-details/cards/text-stopwords/text-stopwords.component";
import { TextSankeyComponent } from "./neosound/pages/guest/player-details/cards/text-sankey/text-sankey.component";
import { TextKeywordsComponent } from "./neosound/pages/guest/player-details/cards/text-keywords/text-keywords.component";
import { TextTagCloudComponent } from "./neosound/pages/guest/player-details/cards/text-tag-cloud/text-tag-cloud.component";
import { TextHitsWordsComponent } from "./neosound/pages/guest/player-details/cards/text-hits-words/text-hits-words.component";
import { FileInfoComponent } from "./neosound/pages/guest/player-details/cards/file-info/file-info.component";
import { FileChartDataService } from "./neosound/pages/guest/player-details/services/file-chart-data.service";
import { FileResultService } from "./neosound/pages/guest/player-details/services/file-result.service";
import { FileInfoService } from "./neosound/pages/guest/player-details/services/file-info.service";
import { WaveSurferService } from "./neosound/pages/guest/player-details/player/wave-surfer.service";
import { FileStatsService } from "./neosound/pages/guest/player-details/services/file-stats.service";
import { FilePeeksService } from "./neosound/pages/guest/player-details/services/file-peeks.service";
import { ApiPageComponent } from "./neosound/pages/user/api-page/api-page.component";
import { NgxEchartsModule } from "ngx-echarts";
import * as echarts from 'echarts';
import { PageNotFoundComponent } from "./neosound/pages/page-not-found/page-not-found.component";
import { LanguageService } from "./neosound/services/language.service";
import { NgxPaginationModule } from "ngx-pagination";
import { IntervalDirective } from "./neosound/directives/interval.detective";
import { ContenteditableDirective } from "./neosound/directives/contenteditable.directive";
import { BatchListComponent } from "./neosound/pages/user/batch-list/batch-list.component";
import { BatchDetailsComponent } from "./neosound/pages/user/batch-details/batch-details.component";
import { DataService } from "./neosound/shared";
import { ChartPageComponent } from "./neosound/pages/charts/chart-page/chart-page.component";
import { CallsByDayChartLineComponent } from "./neosound/pages/charts/calls-by-day-chart-line/calls-by-day-chart-line.component";
import { TotalMinutesByDayChartLineComponent } from "./neosound/pages/charts/total-minutes-by-day-chart-line/total-minutes-by-day-chart-line.component";
import { TotalMinutesChartBarComponent } from "./neosound/pages/charts/total-minutes-chart-bar/total-minutes-chart-bar.component";
import { TotalByQueriesChartPieComponent } from "./neosound/pages/charts/total-by-queries-chart-pie/total-by-queries-chart-pie.component";
import { TotalMinutesPlusBatchesChartBarComponent } from "./neosound/pages/charts/total-minutes-plus-batches-chart-bar/total-minutes-plus-batches-chart-bar.component";
import { KeywordsRadialTreeComponent } from "./neosound/pages/charts/keywords-radial-tree/keywords-radial-tree.component";
import { TagifyComponent } from "./neosound/components/tagify/angular-tagify.component";
import { AssessmentDashboardComponent } from './neosound/pages/dashboard/assessment-dashboard/assessment-dashboard.component';
import { AssessmentNcallsByQuestionComponent } from './neosound/pages/dashboard/assessment-dashboard/cards/assessment-ncalls-by-question/assessment-ncalls-by-question.component';
import { AssessmentNcallsAndNpositiveByQuestionComponent } from './neosound/pages/dashboard/assessment-dashboard/cards/assessment-ncalls-and-npositive-by-question/assessment-ncalls-and-npositive-by-question.component';
import { AssessmentAvgscoreByAgentComponent } from './neosound/pages/dashboard/assessment-dashboard/cards/assessment-avgscore-by-agent/assessment-avgscore-by-agent.component';
import { AssessmentReachedStagesBarComponent } from './neosound/pages/dashboard/assessment-dashboard/cards/assessment-reached-stages-bar/assessment-reached-stages-bar.component';
import { AssessmentReachedStagesBarStackedComponent } from './neosound/pages/dashboard/assessment-dashboard/cards/assessment-reached-stages-bar-stacked/assessment-reached-stages-bar-stacked.component';
import { AssessmentAvgscoreByAgentQuestionBarComponent } from './neosound/pages/dashboard/assessment-dashboard/cards/assessment-avgscore-by-agent-question-bar/assessment-avgscore-by-agent-question-bar.component';
import { AssessmentAvgscoreByAgentQuestionBarStackedComponent } from './neosound/pages/dashboard/assessment-dashboard/cards/assessment-avgscore-by-agent-question-bar-stacked/assessment-avgscore-by-agent-question-bar-stacked.component';
import {AutoTagCloudService} from "./neosound/pages/dashboard/calls-dashboard/services/auto-tag-cloud.service";
import { HitsAutotagsComponent } from './neosound/pages/dashboard/calls-dashboard/cards/hits-autotags/hits-autotags.component';
import { AutotagsByDayComponent } from './neosound/pages/dashboard/calls-dashboard/cards/autotags-by-day/autotags-by-day.component';
import { HitsBatchesComponent } from './neosound/pages/dashboard/calls-dashboard/cards/hits-batches/hits-batches.component';
import { AgentCallsByDayComponent } from './neosound/pages/dashboard/calls-dashboard/cards/agent-calls-by-day/agent-calls-by-day.component';
import { FontAwesomeModule } from '@fortawesome/angular-fontawesome';
export function appConfig(app: AppStoreService) {
return (): Promise<any> => {
return app.fetchConfig();
}
}
@NgModule({
declarations: [
HeaderComponent,
FooterComponent,
OneColumnLayoutComponent,
AppComponent,
MinutesSecondsPipe,
MainComponent,
LoginComponent,
DashboardComponent,
StopwordsComponent,
HitsStopwordsComponent,
PopularWordsComponent,
FreqWordsComponent,
TopicComponent,
HitsTopicsComponent,
AssessmentByAgentComponent,
MinutesStatsBatchesComponent,
MinutesStatsMinutesComponent,
SentimentStatsBatchesComponent,
SentimentalCallsByDayComponent,
SentimentStatsDayComponent,
SentimentalOfCallsComponent,
CallsByDayComponent,
UploadDialogComponent,
SignupLinkComponent,
SignupDetailedComponent,
UploadProgressComponent,
TermsComponent,
ForgotPasswordComponent,
PlayerComponent,
TagifyComponent,
AccountDetailsComponent,
FilesListComponent,
TextFilesListComponent,
FileResultsComponent,
LandingComponent,
OrganizationSettingsComponent,
KeywordsComponent,
SensitiveDataComponent,
CheckListComponent,
SetupStopwordsComponent,
AboutComponent,
PlayerDetailsComponent,
AnalyticDetailsComponent,
TextLogComponent,
CheckListFormComponent,
TextComplianceComponent,
TextStopwordsComponent,
AverageSentimentsComponent,
CountSentimentsComponent,
SentimentalSankeyComponent,
SentimentalTreeComponent,
KeywordsPhrasesComponent,
UploadInstructionsComponent,
QueriesChartComponent,
TextSankeyComponent,
TextKeywordsComponent,
TextTagCloudComponent,
TextHitsWordsComponent,
FullTextComponent,
ApiPageComponent,
PageNotFoundComponent,
IntervalDirective,
ContenteditableDirective,
BatchListComponent,
BatchDetailsComponent,
ChartPageComponent,
CallsByDayChartLineComponent,
TotalMinutesByDayChartLineComponent,
TotalMinutesChartBarComponent,
TotalByQueriesChartPieComponent,
TotalMinutesPlusBatchesChartBarComponent,
KeywordsRadialTreeComponent,
CallsDashboardComponent,
StopWordsComponent,
InfoBarComponent,
CardSpinnerComponent,
NumberOfCallsComponent,
PerformanceByAgentComponent,
ProcessedCallsComponent,
TextsDashboardComponent,
FileInfoComponent,
TagifyComponent,
MinutesSecondsPipe,
AssessmentDashboardComponent,
AssessmentNcallsByQuestionComponent,
AssessmentNcallsAndNpositiveByQuestionComponent,
AssessmentAvgscoreByAgentComponent,
AssessmentReachedStagesBarComponent,
AssessmentReachedStagesBarStackedComponent,
AssessmentAvgscoreByAgentQuestionBarComponent,
AssessmentAvgscoreByAgentQuestionBarStackedComponent,
VideoPlayerComponent,
VideoPlayerComponent,
VideoDetailsComponent,
HitsAutotagsComponent,
AutotagsByDayComponent,
HitsBatchesComponent,
AgentCallsByDayComponent,
],
imports: [
BrowserModule,
BrowserAnimationsModule,
HttpClientModule,
AppRoutingModule,
NbAlertModule,
FormsModule,
ReactiveFormsModule,
NbInputModule,
NgxEchartsModule.forRoot({ echarts }),
TagCloudModule,
DragulaModule.forRoot(),
BsDropdownModule.forRoot(),
TooltipModule.forRoot(),
ModalModule.forRoot(),
TypeaheadModule.forRoot(),
ButtonsModule.forRoot(),
NgbModule,
ThemeModule.forRoot(),
CoreModule.forRoot(),
FileDropModule,
ToastrModule.forRoot(),
NgDatepickerModule,
NgxPaginationModule,
TagInputModule,
BsDatepickerModule.forRoot(),
FontAwesomeModule,
],
bootstrap: [AppComponent],
providers: [
AuthGuard,
CanDeactivateGuard,
UsersService,
FilesService,
FilterService,
FileChartDataService,
FileInfoService,
FileStatsService,
ApiCallsStatsService,
ChartDataService,
DashboardFileStatsService,
MinutesStatsService,
TagCloudService,
TopicCloudService,
ChecklistStatsService,
FilePeeksService,
FileResultService,
UploadService,
TextFilterService,
OrganizationSettingsService,
PlayerService,
AnalyticsService,
MediaRecorderService,
WaveSurferService,
VideoFileInfoService,
AutoTagCloudService,
{
provide: HTTP_INTERCEPTORS,
useClass: RequestsHttpInterceptor,
multi: true,
},
AppStoreService,
{
provide: APP_INITIALIZER,
useFactory: appConfig,
deps: [AppStoreService],
multi: true,
},
LanguageService,
DataService,
]
})
export class AppModule {}
|
<reponame>shiyuan2he/code-generator
package me.hsy.mybatis.generator.enhance.task;
import me.hsy.mybatis.generator.enhance.common.Constants;
import me.hsy.mybatis.generator.enhance.config.Configuration;
import me.hsy.mybatis.generator.enhance.framework.AbstractApplicationTask;
import me.hsy.mybatis.generator.enhance.framework.context.ApplicationContext;
import me.hsy.mybatis.generator.enhance.handler.BaseHandler;
import me.hsy.mybatis.generator.enhance.handler.impl.ServiceImplHandle;
import me.hsy.mybatis.generator.enhance.handler.impl.ServiceInfoHandle;
import me.hsy.mybatis.generator.enhance.model.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* @author heshiyuan
*/
public class ServiceImplTask extends AbstractApplicationTask {
private static String Service_Impl_FTL = "template/ServiceImpl.ftl";
@SuppressWarnings("unchecked")
@Override
protected boolean doInternal(ApplicationContext context){
try{
logger.info("开始生成ServiceImpl");
context.getServiceInfoList().stream().forEach(serviceInfo -> {
BaseHandler<ServiceImplInfo> handler = new ServiceImplHandle(Service_Impl_FTL,
generateServiceImplInfo(serviceInfo));
handler.execute();
});
logger.info("结束生成ServiceImpl");
return false;
}catch (Exception e){
logger.info("异常,至此结束", e);
return true;
}
}
private ServiceImplInfo generateServiceImplInfo(ServiceInfo serviceInfo) {
String packageNameService = Configuration.getString("serviceImpl.package");
String packageNameDao = Configuration.getString("dao.package");
ServiceImplInfo serviceInfoImpl = new ServiceImplInfo();
serviceInfoImpl.setPackageStr(packageNameService);
serviceInfoImpl.setClassName(serviceInfo.getEntityInfo().getClassName() + Constants.SERVICE_IMPL_SUFFIX);
List<String> importStrList = new ArrayList<>();
String convertClassName = serviceInfo.getEntityInfo().getClassName() + Constants.CONVERT_UTILS_SUFFIX;
importStrList.add(Configuration.getString("convert.package") + "." + convertClassName);
importStrList.add(serviceInfo.getPackageStr() + "." + serviceInfo.getClassName());
importStrList.add(packageNameDao + "." + serviceInfo.getEntityInfo().getClassName() + Constants.DAO_SUFFIX);
importStrList.add(serviceInfo.getEntityInfo().getEntityPackage() + "." + serviceInfo.getEntityInfo().getClassName());
importStrList.addAll(serviceInfo.getImportStrList());
serviceInfoImpl.setImportStrList(importStrList);
serviceInfoImpl.setServiceInfo(serviceInfo);
serviceInfoImpl.setConvertClassName(convertClassName);
return serviceInfoImpl;
}
}
|
def eh_tabuleiro(tab):
if not type(tab) == tuple:
return False
if len(tab) != 3:
return False
for linha in tab:
if not isinstance(linha, tuple):
return False
if len(linha) != 3:
return False
for i in (linha):
if not type(i) == int:
return False
if not -1 <= i <= 1:
return False
return True |
<reponame>lilo2s/Typescriptpractice<filename>object_example.ts<gh_stars>0
class Animal {
// Fields
breed: string;
weight: number;
has_fur: boolean;
speed: number;
companion: Animal;
// Special method *NEW*
constructor(breed: string, weight: number, has_fur: boolean, speed: number){
this.breed = breed;
this.weight = weight;
this.has_fur = has_fur;
this.speed = speed;
}
// Rest of functions
run_for_seconds(seconds: number) {
console.log("I have run " + seconds * this.speed + " meters");
}
procreate(partner: Animal = null){
if (partner == null){
partner = this.companion;
}
var new_animal_breed;
var new_animal_weight;
var new_animal_has_fur;
var new_animal_speed;
if(this.breed == partner.breed){
new_animal_breed = this.breed;
} else {
new_animal_breed = this.breed + partner.breed;
}
new_animal_weight = (this.weight + partner.weight) / 2;
new_animal_speed = (this.speed + partner.speed) / 2;
new_animal_has_fur = this.has_fur;
return new Animal(new_animal_breed, new_animal_weight, new_animal_has_fur, new_animal_speed);
}
}
var myanimal = new Animal('cat', 1, true, 1);
var myanimal2 = new Animal('dog', 5, true, 4);
var myanimal3 = new Animal('snake', 0.1, false,0.1);
// scenario 1
myanimal.companion = myanimal2;
var offspring = myanimal.procreate();
console.log(offspring)
// scenario 2
var offspring2 = myanimal3.procreate(myanimal2);
|
def load_model(directory,is_csv=False):
start_dir = os.getcwd()
os.chdir(directory)
config_file = keys._config_file
with open(config_file,'r') as f:
parameters = yaml.load(f)
weights_file = parameters[keys._weights_file]
weights_dict = None
if is_csv:
pass
else:
weights_dict = np.load(weights_file)
os.chdir(start_dir)
net = netbuilder.Network()
net._init_from_file(params=parameters,weights_dict=weights_dict)
print("Model {0} loaded correctly".format(net.name))
return net |
<reponame>python-feladatok-tesztekkel/07-01-05-osszefoglalas<gh_stars>0
from unittest import TestCase
import os,sys,inspect
current_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parent_dir = os.path.dirname(current_dir)
sys.path.insert(0, parent_dir)
import feladatok
class TestLegnagyobbSorszama(TestCase):
def test_feladat_ures(self):
adatok=""
aktualis = feladatok.maxhely(adatok)
elvart = None
print(adatok)
self.assertEqual(elvart, aktualis, "Rosszul határozta meg a legnagyobb elem sorszámát")
def test_feladat_elso(self):
adatok="7,3; 5,4; 2,0; 1,9; 4,22; 3,7"
aktualis = feladatok.maxhely(adatok)
elvart = 1
print(adatok)
self.assertEqual(elvart, aktualis, "Rosszul határozta meg a legnagyobb elem sorszámát")
def test_feladat_utolso(self):
adatok="5,3; 5,4; 2,0; 1,9; 4,22; 9,7"
aktualis = feladatok.maxhely(adatok)
elvart = 6
print(adatok)
self.assertEqual(elvart, aktualis, "Rosszul határozta meg a legnagyobb elem sorszámát")
def test_feladat_kozbe(self):
adatok="5,3; 5,4; 10,0; 1,9; 4,22; 0,7"
aktualis = feladatok.maxhely(adatok)
elvart = 3
print(adatok)
self.assertEqual(elvart, aktualis, "Rosszul határozta meg a legnagyobb elem sorszámát") |
<reponame>CaelestisZ/IrisCore
/* Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 and
* only version 2 as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*/
#ifndef __MSM_DAI_Q6_PDATA_H__
#define __MSM_DAI_Q6_PDATA_H__
#define MSM_MI2S_SD0 (1 << 0)
#define MSM_MI2S_SD1 (1 << 1)
#define MSM_MI2S_SD2 (1 << 2)
#define MSM_MI2S_SD3 (1 << 3)
#define MSM_MI2S_CAP_RX 0
#define MSM_MI2S_CAP_TX 1
#define MSM_PRIM_MI2S 0
#define MSM_SEC_MI2S 1
#define MSM_TERT_MI2S 2
#define MSM_QUAT_MI2S 3
#define MSM_SEC_MI2S_SD1 4
struct msm_dai_auxpcm_config {
u16 mode;
u16 sync;
u16 frame;
u16 quant;
u16 num_slots;
u16 *slot_mapping;
u16 data;
u32 pcm_clk_rate;
};
struct msm_dai_auxpcm_pdata {
struct msm_dai_auxpcm_config mode_8k;
struct msm_dai_auxpcm_config mode_16k;
};
struct msm_mi2s_pdata {
u16 rx_sd_lines;
u16 tx_sd_lines;
u16 intf_id;
};
struct msm_i2s_data {
u32 capability; /* RX or TX */
u16 sd_lines;
};
#endif
|
Potassium limitation promotes the Sweetgum-Clitopilus symbiosis
Several species of soil free-living saprotrophs can sometimes establish
biotrophic symbiosis with plants, but the basic biology of this
association remains largely unknown. Here, we investigate the symbiotic
interaction between a common soil saprotroph, Clitopilus hobsonii
(Agaricomycetes), and the American sweetgum (Liquidambar styraciflua).
Notably, the colonized root cortical cells contain numerous
microsclerotia-like structures. Fungal colonization led to increased
plant growth and facilitated potassium uptake, particularly under
potassium limitation (0.05 mM K+). The expression of plant genes related
to potassium uptake is not altered during symbiosis, whereas the
transcripts of three fungal genes encoding ACU, HAK, and SKC involved in
K+ nutrition is found in colonized roots. We confirmed the K+ influx
activities by expressing the ChACU and ChSKC genes into a yeast
K+-uptake-defective mutant. Upregulation of the ChACU under 0.05 mM K+
and no K+ conditions was demonstrated in planta and in vitro compared to
normal condition (5 mM K+). In addition, colonized plants displayed a
larger accumulation of soluble sugars under 0.05 mM K+. The present
study highlights that potassium limitation promotes this novel
tree-fungus symbiosis mainly through a reciprocal transfer of additional
carbon and potassium to both partners, and the role of dual soil
saprotroph/symbiotroph in tree nutrition. |
/**
* This method gives the guess of given input values
*
* @param input_array double array
* @return output guess values
*/
public double[] feedforward(double[] input_array) {
Matrix input = Matrix.ArrayToMatrix(input_array);
Matrix[] hidden_vals = new Matrix[this.hidden_layers.length];
for (int i = 0; i < hidden_vals.length; i++) {
if (i == 0) {
hidden_vals[i] = Matrix.MatrixDotMultiply(this.weights[i], input);
} else {
hidden_vals[i] = Matrix.MatrixDotMultiply(this.weights[i], hidden_vals[i - 1]);
}
hidden_vals[i].add(this.biases[i]);
hidden_vals[i].map(this.activation);
}
Matrix output = Matrix.MatrixDotMultiply(this.weights[this.weights.length - 1], hidden_vals[hidden_vals.length - 1]);
output.add(this.biases[this.biases.length - 1]);
output.map(this.activation);
return output.toArray();
} |
#!/usr/bin/env python
# Copyright Jim Bosch & Ankit Daftery 2010-2012.
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import dtype_mod
import unittest
import numpy
class DtypeTestCase(unittest.TestCase):
def assertEquivalent(self, a, b):
return self.assert_(dtype_mod.equivalent(a, b), "%r is not equivalent to %r")
def testIntegers(self):
for bits in (8, 16, 32, 64):
s = getattr(numpy, "int%d" % bits)
u = getattr(numpy, "uint%d" % bits)
fs = getattr(dtype_mod, "accept_int%d" % bits)
fu = getattr(dtype_mod, "accept_uint%d" % bits)
self.assertEquivalent(fs(s(1)), numpy.dtype(s))
self.assertEquivalent(fu(u(1)), numpy.dtype(u))
# these should just use the regular Boost.Python converters
self.assertEquivalent(fs(True), numpy.dtype(s))
self.assertEquivalent(fu(True), numpy.dtype(u))
self.assertEquivalent(fs(int(1)), numpy.dtype(s))
self.assertEquivalent(fu(int(1)), numpy.dtype(u))
self.assertEquivalent(fs(long(1)), numpy.dtype(s))
self.assertEquivalent(fu(long(1)), numpy.dtype(u))
for name in ("bool_", "byte", "ubyte", "short", "ushort", "intc", "uintc"):
t = getattr(numpy, name)
ft = getattr(dtype_mod, "accept_%s" % name)
self.assertEquivalent(ft(t(1)), numpy.dtype(t))
# these should just use the regular Boost.Python converters
self.assertEquivalent(ft(True), numpy.dtype(t))
if name != "bool_":
self.assertEquivalent(ft(int(1)), numpy.dtype(t))
self.assertEquivalent(ft(long(1)), numpy.dtype(t))
def testFloats(self):
f = numpy.float32
c = numpy.complex64
self.assertEquivalent(dtype_mod.accept_float32(f(numpy.pi)), numpy.dtype(f))
self.assertEquivalent(dtype_mod.accept_complex64(c(1+2j)), numpy.dtype(c))
f = numpy.float64
c = numpy.complex128
self.assertEquivalent(dtype_mod.accept_float64(f(numpy.pi)), numpy.dtype(f))
self.assertEquivalent(dtype_mod.accept_complex128(c(1+2j)), numpy.dtype(c))
if hasattr(numpy, "longdouble"):
f = numpy.longdouble
c = numpy.clongdouble
self.assertEquivalent(dtype_mod.accept_longdouble(f(numpy.pi)), numpy.dtype(f))
self.assertEquivalent(dtype_mod.accept_clongdouble(c(1+2j)), numpy.dtype(c))
if __name__=="__main__":
unittest.main()
|
<gh_stars>1-10
#include "ws.h"
websocket::connection_sptr ws_conn;
void on_receive(websocket::connection_ptr conn,
const block_t *b,
int32_t size,
bool msg_end) {
std::string data(b, size);
std::string gbk = pump::utf8_to_gbk(data);
printf("received: %s\n", gbk.c_str());
conn->send(b, size);
}
void on_error(websocket::connection_ptr conn, const std::string &msg) {
printf("disconnected\n");
ws_conn.reset();
}
void on_new_connection(const std::string &path, websocket::connection_sptr conn) {
printf("new ws connection\n");
websocket::connection_callbacks cbs;
cbs.frame_cb = pump_bind(&on_receive, conn.get(), _1, _2, _3);
cbs.error_cb = pump_bind(&on_error, conn.get(), _1);
conn->start(cbs);
std::string msg = "hello world";
std::string data = pump::gbk_to_utf8(msg);
conn->send(data.c_str(), (int32_t)data.size());
ws_conn = conn;
}
void start_ws_server(pump::service_ptr sv, const std::string &ip, int port) {
pump::transport::address bind_address(ip, port);
websocket::server_sptr server = websocket::server::create(bind_address);
//websocket::server_sptr server = websocket::server::create(bind_address, "cert.pem", "key.pem");
websocket::server_callbacks cbs;
cbs.upgraded_cb = pump_bind(&on_new_connection, _1, _2);
if (!server->start(sv, cbs))
printf("ws server start error\n");
else
printf("ws server started\n");
sv->wait_stopped();
} |
import java.util.LinkedList;
import java.util.Queue;
import java.util.Scanner;
public class Main {
private static final double FLOATING_POINT_EPSILON = 1E-10;
static class FlowEdge {
final int v;
final int w;
final double capacity;
double flow;
FlowEdge(int v, int w, double capacity) {
this.v = v;
this.w = w;
this.capacity = capacity;
this.flow = 0.;
}
public double residualCapacity(int vertex) {
if (vertex == v) return flow; // backward edge
else if (vertex == w) return capacity - flow; // forward edge
else throw new IllegalArgumentException("invalid endpoint");
}
public void addResidualFlow(int vertex, double delta) {
if (!(delta >= 0.0)) throw new IllegalArgumentException("Delta must be nonnegative");
if (vertex == v) flow -= delta; // backward edge
else if (vertex == w) flow += delta; // forward edge
else throw new IllegalArgumentException("invalid endpoint");
// round flow to 0 or capacity if within floating-point precision
if (Math.abs(flow) <= FLOATING_POINT_EPSILON)
flow = 0;
if (Math.abs(flow - capacity) <= FLOATING_POINT_EPSILON)
flow = capacity;
if (!(flow >= 0.0)) throw new IllegalArgumentException("Flow is negative");
if (!(flow <= capacity)) throw new IllegalArgumentException("Flow exceeds capacity");
}
public String toString() {
return v + "->" + w + " " + flow + "/" + capacity;
}
public int other(int vertex) {
if (vertex == v) return w;
else if (vertex == w) return v;
else throw new IllegalArgumentException("invalid endpoint");
}
}
@SuppressWarnings("unchecked")
static class FlowNetwork {
private static final String NEWLINE = "\n";
private int E = 0;
final int V;
final LinkedList<FlowEdge>[] adj;
FlowNetwork(int V) {
this.V = V;
this.adj = new LinkedList[V];
for (int v = 0; v < V; v++){
adj[v] = new LinkedList<FlowEdge>();
}
}
void addEdge(FlowEdge edge) {
adj[edge.v].add(edge);
adj[edge.w].add(edge);
E++;
}
public Iterable<FlowEdge> adj(int v) {
return adj[v];
}
public String toString() {
StringBuilder s = new StringBuilder();
s.append(V + " " + E + NEWLINE);
for (int v = 0; v < V; v++) {
s.append(v + ": ");
for (FlowEdge e : adj[v]) {
if (e.w != v) s.append(e + " ");
}
s.append(NEWLINE);
}
return s.toString();
}
}
static class FordFulkerson {
private boolean[] marked;
private FlowEdge[] edgeTo;
double value;
FordFulkerson(FlowNetwork G, int s, int t){
value = 0.;
while (hasAugmentingPath(G, s, t)) {
// compute bottleneck capacity
double bottle = Double.POSITIVE_INFINITY;
for (int v = t; v != s; v = edgeTo[v].other(v)) {
bottle = Math.min(bottle, edgeTo[v].residualCapacity(v));
}
// augment flow
for (int v = t; v != s; v = edgeTo[v].other(v)) {
edgeTo[v].addResidualFlow(v, bottle);
}
value += bottle;
}
}
private boolean hasAugmentingPath(FlowNetwork G, int s, int t) {
edgeTo = new FlowEdge[G.V];
marked = new boolean[G.V];
// breadth-first search
Queue<Integer> queue = new LinkedList<>();
queue.add(s);
marked[s] = true;
while (!queue.isEmpty() && !marked[t]) {
int v = queue.poll();
for (FlowEdge e : G.adj(v)) {
int w = e.other(v);
// if residual capacity from v to w
if (e.residualCapacity(w) > 0) {
if (!marked[w]) {
edgeTo[w] = e;
marked[w] = true;
queue.add(w);
}
}
}
}
// is there an augmenting path?
return marked[t];
}
}
public static void main (String[] args){
Scanner scanner = new Scanner(System.in);
int V = scanner.nextInt();
int E = scanner.nextInt();
FlowNetwork network = new FlowNetwork(V);
for (int e = 0; e < E; e++) {
int v = scanner.nextInt();
int w = scanner.nextInt();
int c = scanner.nextInt();
FlowEdge edge = new FlowEdge(v, w, c);
network.addEdge(edge);
}
// System.out.println(network.toString());
FordFulkerson ff = new FordFulkerson(network, 0, V - 1);
System.out.println((int) ff.value);
}
}
|
<filename>adventofcode/2021/21.py
# Python Standard Library Imports
from dataclasses import dataclass
from functools import cache
from utils import (
BaseSolution,
InputConfig,
)
PROBLEM_NUM = '21'
TEST_MODE = False
TEST_MODE = True
EXPECTED_ANSWERS = (1006866, 273042027784929, )
TEST_EXPECTED_ANSWERS = (739785, 444356092776315, )
def main():
input_config = InputConfig(
as_integers=False,
as_comma_separated_integers=False,
as_json=False,
as_groups=False,
as_oneline=False,
as_table=False,
row_func=None,
cell_func=None
)
if TEST_MODE:
input_filename = f'{PROBLEM_NUM}.test.in'
expected_answers = TEST_EXPECTED_ANSWERS
else:
input_filename = f'{PROBLEM_NUM}.in'
expected_answers = EXPECTED_ANSWERS
solution = Solution(input_filename, input_config, expected_answers)
solution.solve()
solution.report()
class Solution(BaseSolution):
def process_data(self):
data = self.data
self.p1_start = int(data[0].split(' ')[-1])
self.p2_start = int(data[1].split(' ')[-1])
def solve1(self):
dice = DeterministicDice(self.p1_start, self.p2_start)
while not dice.has_winner:
dice.play()
answer = dice.losing_score
return answer
def solve2(self):
dice = DiracDice()
wins = dice.play(self.p1_start, self.p2_start)
answer = max(wins)
return answer
@dataclass
class DeterministicDice:
p1_pos: int
p2_pos: int
winning_score: int = 1000
p1_score: int = 0
p2_score: int = 0
turn: int = 1
num_rolls: int = 0
next_throw: int = 1
def roll(self):
rolls = [
n if n <= 100 else n % 100
for n
in range(self.next_throw, self.next_throw + 3)
]
self.next_throw = (self.next_throw + 3 - 1) % 100 + 1
self.num_rolls += 3
return rolls
def move(self, player, spaces):
if player == 1:
self.p1_pos = (self.p1_pos + spaces - 1) % 10 + 1
self.p1_score += self.p1_pos
elif player == 2:
self.p2_pos = (self.p2_pos + spaces - 1) % 10 + 1
self.p2_score += self.p2_pos
else:
raise Exception('Illegal player')
def play(self):
rolls = self.roll()
moves = sum(rolls)
player = 1 if self.turn == 1 else 2
self.move(player, moves)
self.turn = self.turn % 2 + 1
@property
def p1_won(self):
return self.p1_score >= self.winning_score
@property
def p2_won(self):
return self.p2_score >= self.winning_score
@property
def has_winner(self):
return self.p1_won or self.p2_won
@property
def losing_score(self):
loser_score = self.p2_score if self.p1_won else self.p1_score
score = loser_score * self.num_rolls
return score
class DiracDice:
WINNING_SCORE = 21
@cache
def play(self, p1_pos, p2_pos, p1_score=0, p2_score=0):
# Observation: even though the dice rolls are not "deterministic,"
# for a given input (player positions and scores), the outcomes are deterministic
# Each turn is symmetrical, so this cuts down on the computational complexity
wins = [0, 0]
for roll_score in self.roll():
next_p1_pos = (p1_pos + roll_score - 1) % 10 + 1
next_p1_score = p1_score + next_p1_pos
if next_p1_score >= DiracDice.WINNING_SCORE:
wins[0] += 1
else:
# recursively play, swap players and scores
wins2, wins1 = self.play(
p2_pos,
next_p1_pos,
p1_score=p2_score,
p2_score=next_p1_score
)
wins[0] += wins1
wins[1] += wins2
return wins
@cache
def roll(self):
universes = [1, 2, 3]
roll_sums = [x + y + z for x in universes for y in universes for z in universes]
return roll_sums
if __name__ == '__main__':
main()
|
import java.util.List;
import java.util.Arrays;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.BufferedReader;
import java.util.HashMap;
public final class ProblemA
{
public static void main(String[] args) throws IOException
{
BufferedReader bi = new BufferedReader(new InputStreamReader(System.in));
int n = Integer.parseInt(bi.readLine().trim());
//int[] arr = new int[n];
HashMap<Integer,Integer> h = new HashMap<Integer,Integer>();
int sum = 0;
for(int i=0; i<n; i++)
{
int temp = Integer.parseInt(bi.readLine().trim());
if(h.containsKey(temp)) h.put(temp,h.get(temp)+1);
else h.put(temp,1);
}
bi.close();
if(h.size() != 2)
{
System.out.println("NO");
return;
}
int[] x = new int[2];
int i=0;
for(int y:h.values()) x[i++] = y;
if(x[0] != x[1])
{
System.out.println("NO");
return;
}
i=0;
System.out.println("YES");
for(Integer key: h.keySet()) x[i++] = key;
System.out.print(x[0]+" ");
System.out.println(x[1]);
}
} |
The Redskins during training camp. (Steve Helber/Associated Press)
There has been plenty of talk this month about media blind spots and coastal elitism, about Twitter echo chambers and what happens when America pulls a big ol’ surprise on its pundit class. I know, I know, stick to sports. But after years of immersing myself in the Redskins name debate, this all felt more than a little familiar.
The comparison isn’t perfect, and the stakes of a presidential election are a bit grander than those of an NFL branding debate. But I think there are real similarities, and perhaps a few lessons for the media. How staying tight with your own like-minded set can make it harder to sense what’s happening outside that bubble. How things that seem obvious and indubitable in Northwest D.C. or Brooklyn could play a bit differently elsewhere. How calling someone a racist might feel accurate, but might not be the most persuasive tactic for rallying support. Ditto with insisting that you’re on the right side of history. And how a wave of media and celebrity unanimity in a contentious debate not only doesn’t guarantee success, but in fact might have the opposite effect.
That last thought is particularly resonant this week, with the Redskins starring on “Sunday Night Football.” Because the most polarizing moment of the entire Redskins name debate might have been during a 2013 “Sunday Night Football” broadcast, when Bob Costas joined the then-growing chorus of voices That last thought is particularly resonant this week, with the Redskins starring on “Sunday Night Football.” Because the most polarizing moment of the entire Redskins name debate might have been during a 2013 “Sunday Night Football” broadcast, when Bob Costas joined the then-growing chorus of voices calling for a name change
“It’s an insult, a slur, no matter how benign the present-day intent,” he said, in halftime remarks that were covered by just about every sports outlet in the country.
Was this Daniel Snyder’s Walter Cronkite moment? It felt that way in real time. In retrospect, it appears close to the opposite. Defenders of the name still cite Costas as a prime example of what they are fighting against: a late-coming outsider without a real stake in the game. An out-of-touch blowhard trying to tell them how they should think and what they should believe. A know-it-all elitist dictating for the simple masses what is right and what is wrong.
Just look at some of the comments on our initial Costas story.
“It’s like an English class competition with these guys, who can find the most soaring rhetoric, the most self-righteous ways to justify their cause of the moment,” one dissenter wrote.
“Costas must think he will be part of the ‘in group’ with a seat at the table of the elites,” another wrote. “Dan Snyder, continue to stand your ground.”
“Costas and his merry band can rant on all they want,” a third wrote. “Danny Snyder deserves credit for ignoring the PC Posse and malcontents and effectively telling them to . . . whatever.”
“Dan Snyder — Be nice but as Churchill said ‘Never give in,’ ” a fourth wrote.
“Now we all know Dan Snyder is far (far, far, far, infinity) from perfection,” a fifth wrote. “However, all Redskin fans can relate to the love of our team, and team name. . . . Give credit to Dan Snyder for standing up for it.”
How many minds did Costas change? And how many others did he convince to do the unthinkable: defend the oft-loathed team owner. Sure, many mocked and detested Snyder’s hard-line stance, but has he ever been as loved by hardcore fans as when he refused to back down from people such as Costas?
Think now of Think now of that widely-shared post-election piece by CBS News’s Will Rahn, who reflected on the “unbearable smugness” of the political press, and how “profession-wide smugness and protestations of superiority” have lessened the media’s power. Think of how Trump never backed down from his attackers and instead picked fights with his critics — his version of Snyder’s “NEVER — you can use caps .”
“The more they try to attack him, the more we love him,” “The more they try to attack him, the more we love him,” one Trump supporter told the BBC , and to a lesser degree that happened with Snyder. Both men remained unpopular, but the notion that they were under attack seemed to generate some level of sympathy.
Because it wasn’t just Costas going after the team name. It was “The View” and “The Daily Show,” “South Park” and the ESPYs, Peter King and James Brown, Larry King and Terry Bradshaw. It was the local newspapers that stopped using the Redskins name, and The Washington Post’s legion of anti-name columns. At least five Post sports columnists either spoke out against the name or stopped using it, as did at least two Metro columnists, a Kids Post columnist, at least four op-ed columnists, several guest contributors and the paper’s editorial board (repeatedly).
Which reminds me again of the election: of Beyoncé and Springsteen, of LeBron and Lena Dunham, of John Oliver destroying Trump and Samantha Bee disemboweling him, of the overwhelming array of newspapers that endorsed Clinton, of Which reminds me again of the election: of Beyoncé and Springsteen, of LeBron and Lena Dunham, of John Oliver destroying Trump and Samantha Bee disemboweling him, of the overwhelming array of newspapers that endorsed Clinton, of the Quinnipiac University poll that found almost 9 of 10 Republicans believed that news organizations were biased against Trump, of the Washingtonian headline about how The Post didn’t employ a single pro-Trump columnist. Even members of the conservative intelligentsia (Charles Krauthammer, Kathleen Parker) crossed over to the progressive path on both issues, leaving blunter voices (Sean Hannity, Sarah Palin) to stand up for the other side.
And instead of settling the matter, the elitist consensus led to at least some hopes of a comeuppance.
“BUT THE INTERNET AND BOB COSTAS TOLD US THAT WE SHOULD ALL BE OUTRAGED BY THIS!!!” one happy Redskins defender wrote after “BUT THE INTERNET AND BOB COSTAS TOLD US THAT WE SHOULD ALL BE OUTRAGED BY THIS!!!” one happy Redskins defender wrote after a 2016 Washington Post poll found that most Native Americans were not offended by the team name.
“As Trump cleared each hurdle during the campaign, and I saw how the media, the establishment and celebrities tried to derail him, my hope began to grow that I would be able to witness their collective heads explode when he was successful,” “As Trump cleared each hurdle during the campaign, and I saw how the media, the establishment and celebrities tried to derail him, my hope began to grow that I would be able to witness their collective heads explode when he was successful,” one Trump supporter told The Post . “Tuesday night was beyond satisfying to watch unfold.”
“The liberal media’s made-up controversies divide our country,” Palin said of the Redskins debate.
“After months of going back and forth, I decided to listen to [Trump] directly and not through minced and filtered quotes from the mainstream media,” another Trump supporter “After months of going back and forth, I decided to listen to [Trump] directly and not through minced and filtered quotes from the mainstream media,” another Trump supporter told The Post
“You people in the media, and the little circle of elite know-it-alls from D.C., are breathtakingly stupid,” a Redskins defender wrote on “You people in the media, and the little circle of elite know-it-alls from D.C., are breathtakingly stupid,” a Redskins defender wrote on another anti-Redskins staff editorial.
“Surprise! There is a whole other part to this country outside of your newsroom walls that actually thinks differently from the mostly liberal ideas that most news outlets put out there,” a Trump supporter “Surprise! There is a whole other part to this country outside of your newsroom walls that actually thinks differently from the mostly liberal ideas that most news outlets put out there,” a Trump supporter told the Guardian after the election.
sports dc-sports-bog Get the top Olympics news by email Major news and analysis from Rio, delivered to your inbox as it happens. post_newsletter333 magnet-olympics2016 true endOfArticle false Sports Daily newsletter Sports news with a focus on D.C. area teams. Please provide a valid email address. Sign up You’re all set! See all newsletters
The issues actually converged, with Trump speaking out in defense of the team name, Hillary Clinton advocating a change, and a pro-Trump super PAC The issues actually converged, with Trump speaking out in defense of the team name, Hillary Clinton advocating a change, and a pro-Trump super PAC running local ads about their disagreement
“I’m no fan of Trump, but if you wanted to understand why he is on the rise, you could do a lot worse than reading this article,” wrote one Post commenter on a Redskins name story, months before the election.
No, the issues aren’t identical, starting with the fact that No, the issues aren’t identical, starting with the fact that a large majority of Americans say they support the Redskins name , while a minority of registered voters supported Trump. My liberal friends might argue that Trump winning an election and the Redskins triumphing in the polls doesn’t mean history won’t ultimately be on their side.
I just remember the years of arguments I’ve had with Redskins defenders, people with whom I might not ordinarily debate serious societal issues. They saw a concerted and outrageous media effort to tip the scales. They insisted that my friends — who just about uniformly believe the team’s name should change — are out of touch. (My local precinct went 86 percent for Clinton, and I’d guess a vote on changing the team name in upper Northwest wouldn’t be terribly different.) They told me that media Twitter wasn’t the real world, that it created a phony idea of consensus for a stance that wasn’t actually ascendant. And they argued that a politically correct onslaught from big-city elites would only strengthen their convictions.
On that last score, anyhow, they may have had a point. |
Fantastic Mr. Fox may have been a work of fiction, but as you can see from these pictures, this man from County Kilkenny, Ireland is most definitely the real thing.
The man is Patsy Gibbons and his two adorable sidekicks are Grainne and Minnie. Gibbons nursed the foxes back to health after they were found abandoned as pups, and although the animals made a full recovery and could have returned to the wild, it seems that they decided to adopt their rescuer instead! Somewhat unsurprisingly, the three of them receive a lot of attention from local children, so much so that schools have invited Grainne and Minnie to come along so students can see them up close.
“I now have people from all over the country and indeed the UK asking me for advice on looking after foxes,” Gibbons told The Irish Examiner. “I’m no expert and I’m still learning from them day-by-day (but) I’m happy to advise as a lay person.” (h/t: designyoutrust, irishexaminer)
Image credits: Clodagh Kilcoyne
Image credits: Clodagh Kilcoyne
Image credits: Clodagh Kilcoyne
Image credits: inesemjphotography
Image credits: inesemjphotography
Image credits: Clodagh Kilcoyne
Image credits: inesemjphotography
Image credits: Clodagh Kilcoyne
Image credits: Clodagh Kilcoyne |
//<NAME> AM:141291
#include <iostream>
#include <string>
using namespace std;
int calculation (int a, int b);
void change_variables(int &a,int &b);
int main (int argc, char **argv)
{
int a = 5;
int b = 8;
//<NAME>
int *grades = new int[3];
//Allagh timhs const metablhths
volatile const int size = 3;
//const int *size1 = &size;
int *p = (int*) &size;
*p = 100;
for(int i=0; i<3; i++)
{
cout << "Dwse ton " <<i+1<<"o vathmo: ";
cin >> grades[i];
}
cout << "\n";
for(int i=0; i<3; i++)
{
cout<<i+1<<"os Vathmos: "<<grades[i]<<endl;
}
cout << "\n";
//Kaloume thn Calculation me parametrous 3 kai 5
int g = calculation(3,5);
cout << "Calculation:" << g << endl;
cout << "\n";
//Kaloume thn change_variables
change_variables(a,b);
cout << "a=" << a << "\n" << "b=" << b << endl;
cout <<"Size:" << size << endl;
cout << "\n";
//Apeleutherwsh mnhmhs
delete[] grades;
delete p;
return 0;
}
//Elegxei ton megalutero kai allazei thn mikroterh timh me ton megalutero
void change_variables(int &a,int &b)
{
if(a>b)
{
b=a;
}
else
{
a=b;
}
}
//Briskei tom megalutero arithmo apo tis duo parametrous kai ton epistrefei
int calculation (int a, int b)
{
if(a>b)
return a;
else
return b;
} |
/**
* Updates the specified metadata for a version. Note that this method will fail with
* `FAILED_PRECONDITION` in the event of an invalid state transition. The only valid transition for
* a version is currently from a `CREATED` status to a `FINALIZED` status. Use
* [`DeleteVersion`](../sites.versions/delete) to set the status of a version to `DELETED`.
*
* Create a request for the method "versions.patch".
*
* This request holds the parameters needed by the firebasehosting server. After setting any
* optional parameters, call the {@link Patch#execute()} method to invoke the remote operation.
*
* @param name The unique identifier for a version, in the format:
sites/site-name/versions/versionID
This name is
* provided in the response body when you call the
* [`CreateVersion`](../sites.versions/create) endpoint.
* @param content the {@link com.google.api.services.firebasehosting.v1beta1.model.Version}
* @return the request
*/
public Patch patch(java.lang.String name, com.google.api.services.firebasehosting.v1beta1.model.Version content) throws java.io.IOException {
Patch result = new Patch(name, content);
initialize(result);
return result;
} |
/**
* @author Frank Shaka
* @deprecated Use {@link IEntryStreamNormalizer}
*/
public class Crypto {
private static ISecurityProvider provider = null;
public static void setProvider(ISecurityProvider provider) {
if (Crypto.provider != null)
return;
Crypto.provider = provider;
}
public static ISecurityProvider getProvider() {
if (provider == null)
provider = NullSecurityProvider.getInstance();
return provider;
}
public static OutputStream creatOutputStream(OutputStream output,
boolean encrypt, IEncryptionData encData, String password)
throws CoreException {
return getProvider().createPasswordProtectedOutputStream(output,
encrypt, encData, password);
}
public static InputStream createInputStream(InputStream input,
boolean encrypt, IEncryptionData encData, String password)
throws CoreException {
return getProvider().createPasswordProtectedInputStream(input, encrypt,
encData, password);
}
public static void initEncryptionData(IEncryptionData encData) {
getProvider().initializeEncryptionData(encData);
}
} |
// Copyright (c) 2002 Utrecht University (The Netherlands).
// All rights reserved.
//
// This file is part of CGAL (www.cgal.org).
// You can redistribute it and/or modify it under the terms of the GNU
// General Public License as published by the Free Software Foundation,
// either version 3 of the License, or (at your option) any later version.
//
// Licensees holding a valid commercial license may use this file in
// accordance with the commercial license agreement provided with the software.
//
// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
//
// $URL$
// $Id$
//
//
// Authors : <NAME> (<<EMAIL>>)
#ifndef CGAL_ISO_RECTANGLE_D_H
#define CGAL_ISO_RECTANGLE_D_H
#include <CGAL/license/Spatial_searching.h>
#include <functional>
#include <algorithm>
#include <new>
#include <CGAL/assertions.h>
namespace CGAL {
template <class R> class Iso_rectangle_d {
public:
typedef typename R::FT FT;
typedef typename R::Point_d Point_d;
private:
int dim;
FT *lower;
FT *upper;
public:
Iso_rectangle_d(const Point_d& p, const Point_d& q)
{ CGAL_assertion(p.dimension() == q.dimension());
dim = p.dimension();
lower = new FT[dim];
upper = new FT[dim];
for (int i = 0; i < dim; ++i) {
if (p[i] <= q[i]) {
lower[i]=p[i];
upper[i]=q[i];
}
else {
lower[i]=q[i];
upper[i]=p[i];
}
}
}
// copy constructor
Iso_rectangle_d(const Iso_rectangle_d& b) : dim(b.dim) {
lower = new FT[dim];
upper = new FT[dim];
for (int i = 0; i < dim; ++i) {
lower[i]=b.lower[i];
upper[i]=b.upper[i];
}
}
bool has_on_bounded_side(const Point_d& p) const
{
FT h;
for (int i = 0; i < dimension(); ++i) {
h=p[i];
if ( (h < lower[i]) || (h > upper[i]) ) return 0;
}
return 1;
}
inline int dimension() const { return dim;}
inline FT min_coord(int i) const {
return lower[i];
}
inline FT max_coord(int i) const {
return upper[i];
}
}; // end of class
} // namespace CGAL
#endif // CGAL_ISO_RECTANGLE_D_H
|
***
Jordan has a reputation for being the Middle East’s island of peace. Geopolitically, it is—amid ISIS’s spread across Iraq, Syria’s devastation, and the escalating Israeli-Palestinian conflict, the Hashemite Kingdom feels quiet. Crises don’t happen in Jordan, the narrative goes—that’s why it’s home to more than a million refugees.
Every few weeks, a fresh bout of violence afflicts the Levant, and media attention flits to Jordan. What if Jordan falls next? Could extremists take control of the country? Then the moment passes and the speculation subsides.
But people like Souad, who fled from destruction just a few hours’ drive in any direction from Amman, are well aware that a crisis doesn’t end when its victims cross a border. It just shifts shape, the loud onslaught of extremists and rockets giving way to a muted, daily fight against despair.
Jordan’s real crisis is not the threat of encroaching extremism, but the grinding weight of hosting victims from the region’s various humanitarian emergencies. The technical name for this is a “protracted refugee crisis”—a burden that Jordan uniquely bears as host to more than a million refugees and asylum seekers from surrounding conflicts, according to government numbers. Since 2011, Jordan’s 6.3 million people have taken on roughly an additional 600,000 Syrians, who join about 29,000 Iraqis and some 4,000 refugees from Sudan, Somalia, and other countries, along with thousands more who remain unregistered with UNHCR, the U.N. refugee agency. If this were the United States, it would be as if Canada’s entire population moved in virtually at once.
“The international humanitarian system is really under unprecedented strain at the moment,” says Jeff Crisp, a former UNHCR official who now works at Refugees International. He lists the past year’s blitz of crises: the Philippines, the Central African Republic, South Sudan, Syria. “Just a few weeks ago we said if we had one new major emergency, the system wouldn’t be able to cope—and now we’ve got Iraq.”
From a humanitarian perspective, the refugee crisis is one of survival. From a human perspective, it’s one of purpose. Refugees in Jordan are protected by UNHCR’s mandate, which means they can exist without fear of non-refoulement, or being sent back to their conflict-torn countries. They also get access to food, water, shelter, and basic medical services, at least in the camps. But what happens when immediate relief morphs into long-term sustenance, spilling out of the camps and into the cities?
“People, even as refugees, still want and need to have some agency over how they cope and manage their lives,” says Dawn Chatty, director of the Refugee Studies Center at Oxford University. That’s why most don’t linger long in refugee camps. Syrians crossing the border are immediately registered and brought to either the Zaatari or Azraq camp. But more than 80 percent of Syrians then leave, moving to urban areas along with non-Syrian refugees. |
def assoc_rgx_with_types(self, index=None):
if index is not None:
index = int(index)
regex = sorted(self.game_state['regexes'], key=hash)[index]
else:
regex = self.game_state['current_rgx']
self.categories()
types = raw_input('Type numbers of categories to apply: ')
types = map(categories.__getitem__, map(int, types.split()))
types = set(map(itemgetter(0), types))
self.game_state['types'][regex] |= types
types = ', '.join(types)
puts(green(regex) + ' --> ' + yellow(types)) |
The restored sign of the Toll House Inn, with a commemorative plaque underneath
The Toll House Inn of Whitman, Massachusetts was established in 1930 by Kenneth and Ruth Graves Wakefield. Toll House chocolate chip cookies are named after the inn.
History [ edit ]
Contrary to its name and the sign, which still stands despite the building's having burned down in 1984, the site was never a toll house, and it was built in 1817, not 1709. The use of "toll house" and "1709" was a marketing strategy.[1]
Ruth Wakefield cooked all the food served and soon gained local fame for her desserts. In 1936, while adapting her butter drop dough cookie recipe, she invented the first chocolate chip cookie using a bar of semi-sweet chocolate made by Nestlé.[2][3][4][5][6]
The new dessert soon became very popular. Wakefield contacted Nestlé and they struck a deal: the company would print her recipe on the cover of all their semi-sweet chocolate bars, and she would get a lifetime supply of chocolate. Nestlé began marketing chocolate chips to be used especially for cookies.[2] Wakefield wrote a cookbook, Toll House Tried and True Recipes, that went through 39 printings starting in 1940.[6]
Wakefield died in 1977, and the Toll House Inn burned down from a fire that started in the kitchen on New Year's Eve 1984.[7] The inn was not rebuilt. The site, at 362 Bedford Street, is marked with an historical marker, and that land is now home to a Wendy's restaurant and Walgreens pharmacy. Although there are many manufacturers of chocolate chips today, Nestlé still publishes Wakefield's recipe on the back of each package of Toll House Morsels.[8]
References [ edit ]
Coordinates: |
// This API only works for non-Open-Drain pin
void gpio_direct_write(gpio_t *obj, BOOL value)
{
uint8_t port_num;
uint8_t pin_num;
uint32_t reg_value;
port_num = obj->hal_port_num;
pin_num = obj->hal_pin_num;
reg_value = HAL_READ32(GPIO_REG_BASE, GPIO_SWPORT_DR_TBL[port_num]);
reg_value &= ~(1 << pin_num);
reg_value |= (value<< pin_num);
HAL_WRITE32(GPIO_REG_BASE, GPIO_SWPORT_DR_TBL[port_num], reg_value);
} |
// BeforeRefreshDoc method is interceptor.
func (c *DocController) BeforeRefreshDoc() {
if !aah.App().IsEnvProfile("prod") {
return
}
githubEvent := strings.TrimSpace(c.Req.Header.Get("X-Github-Event"))
githubDeliveryID := strings.TrimSpace(c.Req.Header.Get("X-Github-Delivery"))
if githubEvent != "push" || ess.IsStrEmpty(githubDeliveryID) {
c.Log().Warnf("Github event: %s, DeliveryID: %s", githubEvent, githubDeliveryID)
c.Reply().BadRequest().JSON(aah.Data{"message": "bad request"})
c.Abort()
return
}
hubSignature := strings.TrimSpace(c.Req.Header.Get("X-Hub-Signature"))
c.Log().Infof("Github Signature: %s", hubSignature)
body, err := ioutil.ReadAll(c.Req.Unwrap().Body)
if err != nil {
c.Log().Errorf("Body read error: %s", hubSignature)
c.Reply().BadRequest().JSON(aah.Data{"message": "bad request"})
c.Abort()
return
}
if ess.IsStrEmpty(hubSignature) || !util.IsValidHubSignature(hubSignature, body) {
c.Log().Warnf("Github Invalied Signature: %s", hubSignature)
c.Reply().BadRequest().JSON(aah.Data{"message": "bad request"})
c.Abort()
return
}
c.Req.Unwrap().Body = ioutil.NopCloser(bytes.NewReader(body))
c.Log().Infof("Event: %s, DeliveryID: %s", githubEvent, githubDeliveryID)
} |
package com.example.tefservices;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.Menu;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;
public class MainActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// wip debug
Toast.makeText(getApplicationContext(), " MainActivity [onCreate]", Toast.LENGTH_LONG).show();
setContentView(R.layout.activity_main);
// add the buttons defined in the .xml
Button btnStart = (Button) findViewById(R.id.startBtn);
Button btnStop = (Button) findViewById(R.id.stopBtn);
// add the click liteners to the above declared buttons
btnStart.setOnClickListener( new View.OnClickListener() {
@Override
public void onClick(View v) {
// actually start the service
startService( new Intent(getBaseContext(), MyServices.class) );
}
});
btnStop.setOnClickListener( new View.OnClickListener() {
@Override
public void onClick(View v) {
// actually start the service
stopService( new Intent(getBaseContext(), MyServices.class) );
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
}
|
<gh_stars>1-10
#pragma once
#include "../node/GraphicsNode.h"
#include <vector>
#include <glm/glm.hpp>
#include "iostream"
namespace Graphics3D {
class QuadTree {
public:
virtual void drawNodes(glm::vec2 pCloseLeft, glm::vec2 pCloseRight,
glm::vec2 pFarLeft, glm::vec2 pFarRight, uint frame) = 0;
virtual void drawAll(uint frame) = 0;
virtual bool isLeaf() = 0;
/**
* Checks if all points are in the quad formed by the ohter points
*/
static bool wholeShapeInsideView(std::vector<glm::vec3> points, glm::vec2 pCloseLeft, glm::vec2 pCloseRight,
glm::vec2 pFarLeft, glm::vec2 pFarRight) {
for (int i = 0; i < points.size(); i++) {
if (!insideView(points[i], pCloseLeft, pCloseRight, pFarLeft, pFarRight))
return false;
}
return true;
}
/**
* Checks if at least one point of points is in the quad formed by the ohter points
*/
static bool shapePartlyInsideView(std::vector<glm::vec3> points, glm::vec2 pCloseLeft, glm::vec2 pCloseRight,
glm::vec2 pFarLeft, glm::vec2 pFarRight) {
for (int i = 0; i < points.size(); i++) {
if (insideView(points[i], pCloseLeft, pCloseRight, pFarLeft, pFarRight))
return true;
}
return false;
}
/**
* Checks if at least one point of points is in the quad formed by the ohter points
*/
static bool shapeInstersectsView(std::vector<glm::vec3> points, std::vector<glm::vec2> viewPoints) {
for (int i = 0; i < viewPoints.size(); i++) {
for (int j = 0; j < points.size(); j++) {
glm::vec3 point1 = points[j];
glm::vec3 point2 = points[(j + 1)%points.size()];
if (linesIntersects(viewPoints[i], viewPoints[(i + 1)%viewPoints.size()],
glm::vec2(point1.x, point1.z), glm::vec2(point2.x, point2.z)))
return true;
}
}
return false;
}
/**
* Check if lines intersect.
*/
static bool linesIntersects(glm::vec2 l1p1, glm::vec2 l1p2, glm::vec2 l2p1, glm::vec2 l2p2) {
return (toTheRightOfLine(l1p2 - l1p1, l2p1 - l1p1) != toTheRightOfLine(l1p2 - l1p1, l2p2 - l1p1))
&& (toTheRightOfLine(l2p2 - l2p1, l1p1 - l2p1) != toTheRightOfLine(l2p2 - l2p1, l1p2 - l2p1));
}
/**
* returns true if point is inside the quad formed by the other points.
*/
static bool insideView(glm::vec3 point, glm::vec2 pCloseLeft, glm::vec2 pCloseRight,
glm::vec2 pFarLeft, glm::vec2 pFarRight) {
glm::vec2 pos(point.x, point.z);
return (toTheRightOfLine(pCloseLeft - pFarLeft, pos - pFarLeft)
&& toTheRightOfLine(pFarLeft - pFarRight, pos - pFarRight)
&& toTheRightOfLine(pFarRight - pCloseRight, pos - pCloseRight));
}
/**
* returns true if the point is to the right of the line or on the line.
*/
static bool toTheRightOfLine(glm::vec2 line, glm::vec2 point) {
glm::vec3 v(line.x, 0, line.y);
glm::vec3 u(point.x, 0, point.y);
// std::cout << "\n";
// std::cout << "start \n";
// std::cout << line.x;
// std::cout << " : ";
// std::cout << line.y;
// std::cout << "\n";
// std::cout << point.x;
// std::cout << " : ";
// std::cout << point.y;
// std::cout << "\n";
// std::cout << glm::cross(v, u).y;
// std::cout << "\n";
return glm::cross(v, u).y >= 0;
}
};
}
|
<filename>src/utils/isJSON.ts
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export default function isJSON(json: any) {
try {
const jsonStr = JSON.stringify(json);
JSON.parse(jsonStr);
return true;
} catch (error) {
return false;
}
}
|
Capturing the passenger leukocyte
T ransfusion-associated leukocyte microchimerism (TA-MC) refers to the long-term persistence of allogeneic (i.e., donor derived) leukocytes following transfusion. In this issue of TRANSFUSION, Jackman and colleagues provide compelling evidence suggesting that TA-MC, one of the most fascinating—albeit poorly understood— phenomena in transfusionmedicine couldwell be coming to an end, at least in the most developed areas of the world where highly efficient leukoreduction is widely deployed. Blood components have beenmanufactured and processed in a variety of ways based on available technologies, geography, and local customs. Early leukoreduction filters were able to achieve a significant reduction in white blood cell (WBC) burden, while modern filters are able to remove nearly all leukocytes. Further, apheresis technology, when applied to collection of platelets, plasma, and red cells, is increasingly effective at excluding leukocytes from transfusable blood components. Nonetheless, all blood components, including those that have been leukoreduced, contain some leukocytes. The allowable quantity of leukocytes differs by geography. In the United States, 95% of units contain less than 5 × 10WBCs per unit, while in much of Europe the allowable limit is 1 × 10 WBCs per unit. These leukocytes, regardless of their number, are unwanted contaminants. Klein described them eloquently as “passenger leukocytes.” It is still uncertain as to whether these “passenger leukocytes” should rather considered as “stowaways” or “marauders.” Toward the end of the twentieth century, a “bounty”was placed on leukocytes’ capture for “crimes” that include febrile nonhemolytic transfusion reactions, transfusion-associated graft versus host disease (TAGVHD), alloimmunization and transmission of cell-associated viruses (notably cytomegalovirus , human T-cell lymphotropic virus , and human immunodeficiency virus ). Particularly feared at the time, and amajor factor for placing the “bounty,”was a concern that these unintended leukocyte transfusions might accelerate the clinical progression of HIV infection. Fortunately, this proved not to be the case: long-term TA-MC was not observed in highly immunocompromised anemicHIV positive patients following red blood cell transfusions. Parenteral exposure to allogeneic leukocytes can occur in pregnancy, twinning, and transplantation. The latter includes blood transfusion. Typically, one would expect the prompt clearance of allogeneic cells following transfusion through standard immunologic mechanisms, and this is usually what happens. In the era before molecular tools were available, Schecter and colleagues measured lymphocyte proliferation in response to ordinary transfusion using incorporation of tritiated thymidine, and determined that both donor and recipient cells were proliferating. This phenomenon was later confirmed by Lee and colleagues in transfused orthopedic patients, using gene amplification methods. This work ultimately led to rigorous documentation of long-term TA-MC in multiply transfused trauma patients, but not in other populations. While the phenomenon of long-term TA-MC is well documented, it has proven exceedingly difficult to understand the underlying immunologic mechanisms. The related phenomenon of TA-GVHD may shed some light. In a very detailed review of TA-GVHD cases across five decades and 26 countries, Kopolovic challenged the conventional wisdom that TA-GVHD cases mainly occur because the transfusion recipient is immunocompromised and therefore unable to reject viable donor lymphocytes. Approximately half the reviewed cases of TA-GVHD lacked the expected recipient risk factor. TA-GVHD cases in both immunocompetent and immunocompromised recipients required exposure to viable donor lymphocytes that the recipient failed to recognize as foreign and that were therefore able to act against the recipient. Whether the subtleties of directionality and partial homozygosity at HLA loci, as described by Kopolovic in TA-GVHD, will be relevant to understanding long-term TA-MC is not yet known. The study by Jackman and colleagues included 324 transfused trauma patients and 54 nontransfused controls. The investigators used quantitative allele-specific PCR to detect insertion–deletion polymorphisms expected in the donor cells. These polymorphisms are an excellent molecular target to detect chimerism because they are common in the population, differ from the major population typically by 2-3 base pairs (allowing primer recognition), and they are not HLA-linked. A panel of such assays is informative in nearly all cases and, with adequate sampling, can allow detection of a single chimeric sequence against a highly similar allogeneic background with little to no false positivity. Trauma patients were selected because, to date, this is the only clinical population where long-term TA-MC has been repeatedly documented. Why TA-MC should occur in severely injured trauma patients remains poorly understood, but it may be related to the welldocumented and profound immune dysregulation that occurs in these patients as described by Jackman. Only one case of long-term TA-MC was detected. Short-term TA-MC was detected in 6.5% of transfused subjects, but oddly in 5.6% of nontransfused controls, although typically at only a single The opinions and viewpoints expressed in this editorial reflect the Dr. William Reed’s opinions and viewpoints and not those of his employer, BeiGene, Ltd. or any of its affiliates. doi:10.1111/trf.15571 © 2019 AABB TRANSFUSION 2019;59;3291–3292 |
def filter_outliers(self, n):
reanal = self._run.reanalysis_product
if (reanal, self._run.loss_threshold) in self.outlier_filtering:
valid_data = self.outlier_filtering[(reanal, self._run.loss_threshold)]
return valid_data
df = self._aggregate.df
df_sub = df.loc[
((df['availability_pct'] + df['curtailment_pct']) < self._run.loss_threshold) & (df['nan_flag'] == False),:]
plant_capac = self._plant._plant_capacity/1000 * self._hours_in_res
df_sub = df_sub.assign(flag_range=filters.range_flag(df_sub[reanal], below = 0, above = 40))
df_sub.loc[:,'flag_frozen'] = filters.unresponsive_flag(df_sub[reanal], threshold = 3)
df_sub.loc[:,'flag_window'] = filters.window_range_flag(window_col = df_sub[reanal],
window_start = 5.,
window_end = 40,
value_col = df_sub['energy_gwh'],
value_min = 0.02*plant_capac,
value_max = 1.2*plant_capac)
df_sub.loc[:,'flag_final'] = (df_sub.loc[:, 'flag_range']) | (df_sub.loc[:, 'flag_frozen']) | \
(df_sub.loc[:, 'flag_window'])
valid_data = df_sub.loc[df_sub.loc[:, 'flag_final'] == False, [reanal,
'energy_gwh', 'availability_gwh',
'curtailment_gwh']]
if self.reg_winddirection:
valid_data_to_add = df_sub.loc[df_sub.loc[:, 'flag_final'] == False, [reanal + '_wd',
reanal + '_u_ms', reanal + '_v_ms']]
valid_data = pd.concat([valid_data, valid_data_to_add], axis=1)
if self.reg_temperature:
valid_data_to_add = df_sub.loc[df_sub.loc[:, 'flag_final'] == False, [reanal + '_temperature_K']]
valid_data = pd.concat([valid_data, valid_data_to_add], axis=1)
if self.time_resolution == 'M':
valid_data_to_add = df_sub.loc[df_sub.loc[:, 'flag_final'] == False, ['num_days_expected']]
valid_data = pd.concat([valid_data, valid_data_to_add], axis=1)
self.outlier_filtering[(reanal, self._run.loss_threshold)] = valid_data
return valid_data |
// [comment]
// Implementation of the Whitted-syle light transport algorithm (E [S*] (D|G) L)
//
// This function is the function that compute the color at the intersection point
// of a ray defined by a position and a direction. Note that thus function is recursive (it calls itself).
//
// If the material of the intersected object is either reflective or reflective and refractive,
// then we compute the reflection/refracton direction and cast two new rays into the scene
// by calling the castRay() function recursively. When the surface is transparent, we mix
// the reflection and refraction color using the result of the fresnel equations (it computes
// the amount of reflection and refraction depending on the surface normal, incident view direction
// and surface refractive index).
//
// If the surface is diffuse/glossy we use the Phong illumation model to compute the color
// at the intersection point.
// [/comment]
Vec3f castRay(
const Vec3f &orig, const Vec3f &dir,
const std::vector<std::unique_ptr<Object>> &objects,
const std::vector<std::unique_ptr<LightLite>> &lights,
const Options &options,
State &state,
uint32_t depth,
bool test = false)
{
if (depth > options.maxDepth) {
return options.backgroundColor;
}
Vec3f hitColor = options.backgroundColor;
float tnear = kInfinity;
Vec2f uv;
uint32_t index = 0;
Object *hitObject = nullptr;
if (trace(orig, dir, objects, tnear, index, uv, &hitObject)) {
Vec3f hitPoint = orig + dir * tnear;
Vec3f N;
Vec2f st;
hitObject->getSurfaceProperties(hitPoint, dir, index, uv, N, st);
Vec3f tmp = hitPoint;
switch (hitObject->materialType) {
case kReflectionAndRefraction:
{
Vec3f reflectionDirection = reflect(dir, N).normalize();
Vec3f refractionDirection = refract(dir, N, hitObject->ior).normalize();
Vec3f reflectionRayOrig = (reflectionDirection.dotProduct(N) < 0) ?
hitPoint - N * options.bias :
hitPoint + N * options.bias;
Vec3f refractionRayOrig = (refractionDirection.dotProduct(N) < 0) ?
hitPoint - N * options.bias :
hitPoint + N * options.bias;
state.numReflectionRays++;
Vec3f reflectionColor = castRay(reflectionRayOrig, reflectionDirection, objects, lights, options, state, depth + 1, 1);
state.numRefractionRays++;
Vec3f refractionColor = castRay(refractionRayOrig, refractionDirection, objects, lights, options, state, depth + 1, 1);
float kr;
fresnel(dir, N, hitObject->ior, kr);
hitColor = reflectionColor * kr + refractionColor * (1 - kr);
break;
}
case kReflection:
{
float kr;
fresnel(dir, N, hitObject->ior, kr);
Vec3f reflectionDirection = reflect(dir, N);
Vec3f reflectionRayOrig = (reflectionDirection.dotProduct(N) < 0) ?
hitPoint + N * options.bias :
hitPoint - N * options.bias;
state.numReflectionRays++;
hitColor = castRay(reflectionRayOrig, reflectionDirection, objects, lights, options, state, depth + 1) * kr;
break;
}
default:
{
Vec3f lightAmt = 0, specularColor = 0;
Vec3f shadowPointOrig = (dir.dotProduct(N) < 0) ?
hitPoint + N * options.bias :
hitPoint - N * options.bias;
for (uint32_t i = 0; i < lights.size(); ++i) {
state.numShadowRays++;
Vec3f lightDir = lights[i]->position - hitPoint;
float lightDistance2 = lightDir.dotProduct(lightDir);
lightDir = lightDir.normalize();
float LdotN = std::max(0.f, lightDir.dotProduct(N));
Object *shadowHitObject = nullptr;
float tNearShadow = kInfinity;
bool inShadow = trace(shadowPointOrig, lightDir, objects, tNearShadow, index, uv, &shadowHitObject) &&
tNearShadow * tNearShadow < lightDistance2;
lightAmt += (1.f - inShadow) * lights[i]->intensity * LdotN;
Vec3f reflectionDirection = reflect(-lightDir, N);
specularColor += powf(std::max(0.f, -reflectionDirection.dotProduct(dir)), hitObject->specularExponent) * lights[i]->intensity;
}
hitColor = lightAmt * hitObject->evalDiffuseColor(st) * hitObject->Kd + specularColor * hitObject->Ks;
break;
}
}
}
return hitColor;
} |
<reponame>kaola526/lotus
package actors
import (
"context"
"strings"
"sync"
"github.com/ipfs/go-cid"
cbor "github.com/ipfs/go-ipld-cbor"
"golang.org/x/xerrors"
"github.com/filecoin-project/go-state-types/manifest"
"github.com/filecoin-project/lotus/chain/actors/adt"
)
var manifestCids map[Version]cid.Cid = make(map[Version]cid.Cid)
var manifests map[Version]map[string]cid.Cid = make(map[Version]map[string]cid.Cid)
var actorMeta map[cid.Cid]actorEntry = make(map[cid.Cid]actorEntry)
const (
AccountKey = "account"
CronKey = "cron"
InitKey = "init"
MarketKey = "storagemarket"
MinerKey = "storageminer"
MultisigKey = "multisig"
PaychKey = "paymentchannel"
PowerKey = "storagepower"
RewardKey = "reward"
SystemKey = "system"
VerifregKey = "verifiedregistry"
)
func GetBuiltinActorsKeys() []string {
return []string{
AccountKey,
CronKey,
InitKey,
MarketKey,
MinerKey,
MultisigKey,
PaychKey,
PowerKey,
RewardKey,
SystemKey,
VerifregKey,
}
}
var (
manifestMx sync.RWMutex
)
type actorEntry struct {
name string
version Version
}
// ClearManifest clears all known manifests. This is usually used in tests that need to switch networks.
func ClearManifests() {
manifestMx.Lock()
defer manifestMx.Unlock()
manifestCids = make(map[Version]cid.Cid)
manifests = make(map[Version]map[string]cid.Cid)
actorMeta = make(map[cid.Cid]actorEntry)
}
// RegisterManifest registers an actors manifest with lotus.
func RegisterManifest(av Version, manifestCid cid.Cid, entries map[string]cid.Cid) {
manifestMx.Lock()
defer manifestMx.Unlock()
manifestCids[av] = manifestCid
manifests[av] = entries
for name, c := range entries {
actorMeta[c] = actorEntry{name: name, version: av}
}
}
// GetManifest gets a loaded manifest.
func GetManifest(av Version) (cid.Cid, bool) {
manifestMx.RLock()
defer manifestMx.RUnlock()
c, ok := manifestCids[av]
return c, ok
}
// ReadManifest reads a manifest from a blockstore. It does not "add" it.
func ReadManifest(ctx context.Context, store cbor.IpldStore, mfCid cid.Cid) (map[string]cid.Cid, error) {
adtStore := adt.WrapStore(ctx, store)
var mf manifest.Manifest
if err := adtStore.Get(ctx, mfCid, &mf); err != nil {
return nil, xerrors.Errorf("error reading manifest (cid: %s): %w", mfCid, err)
}
if err := mf.Load(ctx, adtStore); err != nil {
return nil, xerrors.Errorf("error loading manifest (cid: %s): %w", mfCid, err)
}
actorKeys := GetBuiltinActorsKeys() // TODO: we should be able to enumerate manifests directly.
metadata := make(map[string]cid.Cid, len(actorKeys))
for _, name := range actorKeys {
if c, ok := mf.Get(name); ok {
metadata[name] = c
}
}
return metadata, nil
}
// GetActorCodeID looks up a builtin actor's code CID by actor version and canonical actor name name.
func GetActorCodeID(av Version, name string) (cid.Cid, bool) {
manifestMx.RLock()
defer manifestMx.RUnlock()
c, ok := manifests[av][name]
return c, ok
}
func GetActorMetaByCode(c cid.Cid) (string, Version, bool) {
manifestMx.RLock()
defer manifestMx.RUnlock()
entry, ok := actorMeta[c]
if !ok {
return "", -1, false
}
return entry.name, entry.version, true
}
func CanonicalName(name string) string {
idx := strings.LastIndex(name, "/")
if idx >= 0 {
return name[idx+1:]
}
return name
}
|
import {isProduction} from "../env";
import {$log} from "@tsed/common";
import "@tsed/logger-logentries";
$log.name = process.env.LOG_NAME || "API";
export const loggerConfig = {
level: (process.env.LOG_LEVEL || "info") as any,
disableRoutesSummary: isProduction
};
if (isProduction) {
$log.appenders.set("stdout", {
type: "stdout",
levels: ["info", "debug"],
layout: {
type: "json"
}
});
$log.appenders.set("stderr", {
levels: ["trace", "fatal", "error", "warn"],
type: "stderr",
layout: {
type: "json"
}
});
}
export function configureLogger() {
if (process.env.LOG_ENTRIES_KEY) {
$log.appenders
.set("logentries", {
type: "logentries",
levels: ["info", "debug"],
layout: {
type: "json"
},
options: {
token: process.env.LOG_ENTRIES_KEY
}
})
.set("logentries", {
levels: ["trace", "fatal", "error", "warn"],
type: "logentries",
layout: {
type: "json"
},
options: {
token: process.env.LOG_ENTRIES_KEY
}
});
}
}
|
<gh_stars>0
import createMuiTheme from '@material-ui/core/styles/createMuiTheme';
import MuiThemeProvider from '@material-ui/core/styles/MuiThemeProvider'
import * as React from 'react';
import * as ReactDOM from 'react-dom';
import App from './App';
import './index.css';
import registerServiceWorker from './registerServiceWorker';
import { createStore } from 'redux';
import { Provider } from 'react-redux';
import { rootReducer } from './store';
import createDevTools from './tools/devTools';
import { ChangeFormFactor } from './store/layout/actions';
import { getThemeDefinition } from './tools/themeDefinition';
import { isMobile } from './tools/deviceDetection';
/*
const benzina = {
primary: {
main: '#e82129',
},
secondary: {
main: '#989898',
},
}
*/
const themeDefinition = getThemeDefinition();
const theme = createMuiTheme({
palette: {
primary: themeDefinition.primary,
secondary: themeDefinition.secondary
}
});
const store = createStore(
rootReducer,
createDevTools()
);
window.addEventListener('resize', (ev) => {
const newState = isMobile();
const currentState = store.getState().layout.mobileVersion;
if (currentState !== newState) {
store.dispatch(ChangeFormFactor(newState));
}
});
ReactDOM.render(
<Provider store={store}>
<MuiThemeProvider theme={theme}>
<App />
</MuiThemeProvider>
</Provider>,
document.getElementById('root') as HTMLElement
);
registerServiceWorker();
|
<reponame>lqf96/mltk<filename>mltk/types/gym.py
from typing import TYPE_CHECKING, Protocol, Tuple, TypeVar
import numpy as np
from .core import StrDict, T
__all__ = [
"O",
"A",
"R",
"Space",
"Box",
"StepResult",
"MAReward",
"Env",
"MAEnv"
]
# Observation type
O = TypeVar("O")
# Action type
A = TypeVar("A")
# Reward type
R = TypeVar("R")
if TYPE_CHECKING:
class Space(Protocol[T]):
@property
def dtype(self) -> np.dtype: ...
@property
def shape(self) -> Tuple[int, ...]: ...
def contains(self, x: T) -> bool: ...
def __contains__(self, x: T) -> bool: ...
class Discrete(Space[int]):
n: int
else:
from gym.spaces import Discrete, Space
# Type of result returned by `env.step`
StepResult = Tuple[O, R, bool, StrDict]
class _AbstractEnv(Protocol[O, A, R]):
@property
def observation_space(self) -> "Space[O]": ...
@property
def action_space(self) -> "Space[A]": ...
def step(self, action: A) -> StepResult[O, R]: ...
def reset(self) -> O: ...
def render(self, mode: str = ...) -> None: ...
def close(self) -> None: ...
@property
def unwrapped(self) -> "_AbstractEnv[O, A, R]": ...
if TYPE_CHECKING:
class Env(_AbstractEnv[O, A, float]):
pass
else:
from gym import Env
|
def delete(self, root_path, username=''):
owner = auth.username()
if not self._is_shared(root_path, owner):
abort(HTTP_NOT_FOUND)
if username == '':
users = userdata[owner]['shared_with_others'][root_path]
for user in users:
self._remove_share_from_user(root_path, user, owner)
save_userdata()
return HTTP_DELETED
if username in userdata[owner]['shared_with_others'][root_path]:
self._remove_share_from_user(root_path, username, owner)
save_userdata()
return HTTP_DELETED
abort(HTTP_NOT_FOUND) |
/*
* Created on 08.05.2008
*
*/
package org.jdesktop.swingx.binding;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import javax.swing.JTable;
import javax.swing.event.TableModelEvent;
import javax.swing.event.TableModelListener;
import javax.swing.table.TableModel;
import org.jdesktop.beans.AbstractBean;
public class TableRowCountAdapter extends AbstractBean {
private JTable table;
private TableModelListener tableModelListener;
private PropertyChangeListener tableListener;
public TableRowCountAdapter(JTable table) {
this.table = table;
table.getModel().addTableModelListener(getTableModelListener());
table.addPropertyChangeListener(getTableListener());
}
private PropertyChangeListener getTableListener() {
if (tableListener == null) {
tableListener = new PropertyChangeListener() {
public void propertyChange(PropertyChangeEvent evt) {
if (!"model".equals(evt.getPropertyName())) return;
reinstallTableModelListener((TableModel) evt.getOldValue());
}
};
}
return tableListener;
}
protected void reinstallTableModelListener(TableModel oldValue) {
oldValue.removeTableModelListener(getTableModelListener());
table.getModel().addTableModelListener(getTableModelListener());
}
public int getRowCount() {
return table.getRowCount();
}
private TableModelListener getTableModelListener() {
if (tableModelListener == null) {
tableModelListener = new TableModelListener() {
public void tableChanged(TableModelEvent e) {
if (isModification(e)) {
firePropertyChange("rowCount", -1, table.getRowCount());
}
}
private boolean isModification(TableModelEvent e) {
if (isStructureChanged(e) || (e.getType() == TableModelEvent.INSERT)
|| (e.getType() == TableModelEvent.DELETE)) {
return true;
}
return false;
}
private boolean isStructureChanged(TableModelEvent e) {
return e == null || e.getFirstRow() == TableModelEvent.HEADER_ROW;
}
};
}
return tableModelListener;
}
}
|
World War II is having a moment, at least in the minds of people doing Google searches. Google Trends, a tool that measures the popularity of search terms over time, shows that there have been dramatic spikes in searches for topics related to the war, including: Reichstag fire, Pearl Harbor, fascism, Kristallnacht, and Nazi Germany.
Searches for “Reichstag fire,” the event that precipitated Adolf Hitler’s declaration of martial law in 1933, peaked worldwide the month after the Brexit vote in Britain and again in October 2016 before the US election. Those searches eventually reached an exceptional five-year high in the first week of February 2017. In the United States, searches related to the event when Dutch communist Marinus van der Lubbe lit the German parliament building on fire were most popular in Arizona and New Hampshire.
As Berkeley statistician Hyunyoung Choi and Google researcher Hal Varian explain in a paper about Google Trends, there is not much of a secret sauce in computing what’s trending:
The query index is based on query share: the total query volume for the search term in question within a particular geographic region divided by the total number of queries in that region during the time period being examined. The maximum query share in the time period specified is normalized to be 100 and the query share at the initial date being examined is normalized to be zero. The queries are “broad matched” in the sense that queries such as [used automobiles] are counted in the calculation of the query index for [automobile].
Worldwide searches on Kristallnacht saw a five-year high on November 9 and 10, 2016, the anniversary of the night when tens of thousands of German Jews were deported to death camps. There is always a small upswell of interest in this term every year on the anniversary, but in 2016, searches of the term in the US jumped from the 25-30 range to 100. The annual spike of interest in Pearl Harbor was also dramatically higher than usual, jumping from a previous high of 70 to 100.
Searches on "Nazi Germany" jumped from typical levels of 25-50 to 100 in the first week of November, right before the US election. Worldwide searches for “fascism” also jumped from a baseline of 10 to 55 the week before the US election, but the authoritarian ideology saw an unprecedented spike in the first week of February, with searches for "fascism" jumping from 15-20 to 100 around the world.
These searches don't mean millions of people are suddenly abandoning their interest in the usual internet fare: post-Superbowl searches on "Lady Gaga" are far more popular than those on "Reichstag fire." Instead, what Trends reveals are peaks of interest that are far above the typical amount.
It may seem like these correlations between search terms and political events mean something, but it’s not always easy to show a direct connection between Google searches and real-world events. In 2008, researchers at Google published a paper in Science showing that Trends could predict flu season, with people’s searches for terms like “sniffles” and “cold remedy” always coming a few weeks before flu season hit. But then Google Flu Trends failed horribly a few years later, with its predictions off by 140 percent, showing that people don't always search for what ails them.
There are counter-examples, of course. In their article about Google Trends, Choi and Varian remark that it’s an excellent tool for “predicting the present,” noting that upticks on searches for a given country are correlated with jumps in tourism to that place. It can also be a good leading indicator for the financial industry, surfacing consumer trends that affect markets.
But can we track public mood or social changes through Google Trends? Data analyst Seth Stephens-Davidowitz thinks we can. In a now-famous study, he used Trends to predict how different counties would vote in the 2012 US election based on numbers of searches in those areas that contained racist words (the more racist searches, the fewer votes for Obama, even among people who typically voted for Democrats). Stephens-Davidowitz followed up with another study showing that areas with high numbers of racist word searches had higher-than-average rates of mortality among African-Americans. The upshot of this work, he argued, is that areas where people used racist words in searches were also areas where there were measurable signs of racism.
When it comes to the current search spike on World War II subjects, Stephens-Davidowitz admits he's uncertain. "I am not totally sure what to make of it," he told Ars via e-mail. "It may just be people interested in this topic, so they can prevent a future fascist takeover of the United States."
Interestingly, there have been no noticeable spikes in searches for other World War II topics, such as Adolf Hitler, the Holocaust, Weimar Germany, and the US bombing of Hiroshima and Nagasaki. It seems that people are focusing on popular ideologies during the war, as well as pivotal historical events. Perhaps they're turning to history to understand key events that are happening in our own time. Or maybe they're just hearing those terms a lot in the media and are trying to figure out what the heck pundits are talking about. |
<filename>regression-tests/horn-hcc-heap/free-2-nondet-unsafe.c<gh_stars>10-100
int nondet();
void main() {
int *a = malloc(sizeof(int));
if(nondet())
free(a);
*a = 42; // unsafe - possible use after free
}
|
Los Angeles Police Department SWAT officers walk toward their vehicles after one officer was wounded and a suspect killed in 2014. (Photo by Mark Boster/Los Angeles Times via Getty Images)
"Every second counts, and hesitation will kill you," Jamie McBride told the Los Angeles Police Commission last month.
McBride, a director of the LAPD's rank-and-file union, was testifying at a hearing about a proposal to establish new use-of-force guidelines for local cops. And he didn't mince words.
The proposed rules, McBride promised, "will get officers killed, plain and simple."
He went on to deliver a chilling warning to the five civilians who sit on the commission: "Make no mistake, if an officer is killed as a result .... [his] blood will be on your hands."
McBride's comments weren't entirely unexpected. Cops don't like change. Traditionally, many prefer to operate in shades of gray that give them room to maneuver. Basically, they like to do their thing unfettered.
But the union chief's eerie testimony was overshadowed by a just-released report on officer-involved-shootings in LA over the course of 2015. The commission, which sets Los Angeles Police Department policy, could hardly avoid the glaring numbers spelled out in the report: 48 officer-involved shootings, 38 of which hit suspects, 21 of them fatally. (On Tuesday, the commission agreed with LAPD Chief Charlie Beck that the fatal shooting of an unarmed homeless man last May was unjustified.)
The ten shots that hit no one were not warning shots, either—LAPD cops had simply missed their targets.
Of course, this isn't just an LA problem. Since 2014, police killings caught on camera have fueled a national movement for change that only seems to grow stronger each month. But no one listening to McBride that day could have avoided the stark comparison with other big-city police departments. In Chicago, with a population that's somewhat smaller than that of LA, and where gun violence seems to set new records each year, officers shot 22 people in 2015, killing eight. In New York, with roughly three times as many police officers and a population about twice as large as LA's, officers shot 32 people last year, nine fatally.
So LA posts a test case for reformers nationwide: If they can do it here, they should be able to do it anywhere.
Now the proposal that angered McBride so much was far from radical. It focused on training cops to avoid the kinds of confrontations that lead to officers shooting unarmed civilians—many of whom, as critics point out, are often stopped on the flimsiest of pretexts.
The strategy is called "de-escalation."
The driving force behind LA's new strategy is Commission President Matthew Johnson, who was named to the post just last year. The managing partner of an entertainment law firm of 30 attorneys, Johnson is an African American native of New Jersey and graduate of New York University Law School who moved to LA "literally three weeks" after the 1992 LA riots, he recalls.
In formulating the strategy, Johnson took a careful lawyer's approach. First, he ordered a ten-year review of LAPD shootings. At the same time, he considered a wide range of training policies that guide officer behavior and ultimately influence tactics, including procedures for handling the mentally ill and alternatives to using deadly force weapons.
Based on that review, the commission concluded that the department's previous approach, which called on officers to demonstrate "a reverence for human life," was way too vague. Instead, the commission wants the LAPD to focus not just on minimizing shootings, but also training and rewarding officers who use de-escalation tactics to avoid them. Perhaps most important, the commission wants to hold accountable cops who go rogue.
In its revised policy guidelines, the commission decided that henceforth, shooting a suspect would be considered "in-policy"—that is, legit—only if it came as a last resort.
One thing LA has going for it is that a key component of better police accountability is already in place. Last year, Chief Beck and the commission began instituting a requirement that all patrol officers wear body cameras, and that every patrol car be equipped with a camera too. Of course, outside critics were already unhappy with some elements of the camera policy, namely that cops have the right to review any use-of-force tapes before making a sworn shooting statement, thus allowing the officer to present their account in the best possible light.
Nevertheless, the support of Beck—and the grudging acceptance of the union—gave the commission what it considered a crucial new oversight tool in adjudicating use-of-force incidents.
"The cameras have made a huge difference," Johnson says. "At the end of the day, the video is what the video is. You can only explain so much, but the video is going to stand on its own."
Beck and the commission had already begun reviewing officer-involved shootings to consider not just whether the shooting was in- or out-of-policy, but whether the tactics leading up to the shooting were appropriate. So some elements of the revised guidelines weren't exactly new.What is different, however, is that, as a result of the commission's decision, de-escalation will be written into official policy mandating that officers be trained in de-escalation techniques, which they must use in their interactions with citizens.
Failure to do so will now be cause to declare a shooting out-of-policy, even if the officer, because of their failure, was in a position where they felt they had to fire to stay alive. An out-of-policy finding has become a serious matter in the LAPD, one that can result in anything from required retaining to a reprimand, loss of promotion, and firing.
Some of the de-escalation training is also already in place. Shooting scenarios are now performed with actors who play suspects. The scenarios graphically demonstrate how to avoid the need to shoot, focusing on when a trainee might have used de-escalation, but didn't.
"They learn how the right way of talking to a suspect, and the right display of empathy and body language [that] can de-escalate a situation," says Chief Beck.
Indeed, despite the union's objections, some experts outside the LAPD believe a well-executed de-escalation training regime can make officers—and the public—safer. According to Michael Gennaco, who oversaw reform efforts for the Los Angeles County Sheriff's Department, officers can slow down an escalating situation by taking cover and calling for back-up or specialized units. They can also try to calm people down and be careful not to get so close to a suspect that a mere gesture might cause tragedy.
Clearly, as Gennaco puts it, "Some shootings are unavoidable; you'll never get to zero."
But, he adds, "You can strive to get the number as low as possible, and avoid the 'lawful but awful' kinds of deadly force incidents that we have seen too many times."
Still, the question remains: Is a de-escalation policy sufficient in itself?
Training in avoiding interactions that can quickly spin out of control is obviously critical—but only if it's built into community policing strategy. Successful police-citizen interaction ultimately has to be based on efforts to gain the acceptance and respect of the public. De-escalation of volatile incidents is just a first step.
Whether they fall "in" or "out" of the new policy guidelines, police shootings will continue to shock the public conscience unless police departments establish a clear goal of earning legitimacy in the communities they serve.
Near the end of my interview with Matt Johnson, I ask him how the LA Police Commission will monitor compliance with the new policy, which is scheduled to be implemented within the next 30 days.
"We have an inspector general with a staff of forty auditors and investigators who will insure the policy is complied with," he says.
"And if this policy doesn't work, we'll try something else."
Joe Domanick is West Coast bureau chief of The Crime Report and Associate Director of the Center for Media, Crime and Justice at John Jay College of Criminal Justice. This column was published in partnership with The Crime Report and Witness LA. |
def backoff_time(self, response: requests.Response) -> Optional[float]:
if "Retry-After" in response.headers:
return int(response.headers["Retry-After"])
else:
self.logger.info("Retry-after header not found. Using default backoff value")
return 5 |
/*
Copyright 2019 Cisco Systems
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: ldp_stats_info.proto
package cisco_ios_xr_mpls_ldp_oper_mpls_ldp_global_active_vrfs_vrf_statistics_statistic
import (
fmt "fmt"
proto "github.com/golang/protobuf/proto"
math "math"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
type LdpStatsInfo_KEYS struct {
VrfName string `protobuf:"bytes,1,opt,name=vrf_name,json=vrfName,proto3" json:"vrf_name,omitempty"`
LsrId string `protobuf:"bytes,2,opt,name=lsr_id,json=lsrId,proto3" json:"lsr_id,omitempty"`
LabelSpaceId uint32 `protobuf:"varint,3,opt,name=label_space_id,json=labelSpaceId,proto3" json:"label_space_id,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *LdpStatsInfo_KEYS) Reset() { *m = LdpStatsInfo_KEYS{} }
func (m *LdpStatsInfo_KEYS) String() string { return proto.CompactTextString(m) }
func (*LdpStatsInfo_KEYS) ProtoMessage() {}
func (*LdpStatsInfo_KEYS) Descriptor() ([]byte, []int) {
return fileDescriptor_8321582c74958171, []int{0}
}
func (m *LdpStatsInfo_KEYS) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_LdpStatsInfo_KEYS.Unmarshal(m, b)
}
func (m *LdpStatsInfo_KEYS) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_LdpStatsInfo_KEYS.Marshal(b, m, deterministic)
}
func (m *LdpStatsInfo_KEYS) XXX_Merge(src proto.Message) {
xxx_messageInfo_LdpStatsInfo_KEYS.Merge(m, src)
}
func (m *LdpStatsInfo_KEYS) XXX_Size() int {
return xxx_messageInfo_LdpStatsInfo_KEYS.Size(m)
}
func (m *LdpStatsInfo_KEYS) XXX_DiscardUnknown() {
xxx_messageInfo_LdpStatsInfo_KEYS.DiscardUnknown(m)
}
var xxx_messageInfo_LdpStatsInfo_KEYS proto.InternalMessageInfo
func (m *LdpStatsInfo_KEYS) GetVrfName() string {
if m != nil {
return m.VrfName
}
return ""
}
func (m *LdpStatsInfo_KEYS) GetLsrId() string {
if m != nil {
return m.LsrId
}
return ""
}
func (m *LdpStatsInfo_KEYS) GetLabelSpaceId() uint32 {
if m != nil {
return m.LabelSpaceId
}
return 0
}
type LdpMsgCounters struct {
TotalCount uint32 `protobuf:"varint,1,opt,name=total_count,json=totalCount,proto3" json:"total_count,omitempty"`
InitCount uint32 `protobuf:"varint,2,opt,name=init_count,json=initCount,proto3" json:"init_count,omitempty"`
AddressCount uint32 `protobuf:"varint,3,opt,name=address_count,json=addressCount,proto3" json:"address_count,omitempty"`
AddressWithdrawCount uint32 `protobuf:"varint,4,opt,name=address_withdraw_count,json=addressWithdrawCount,proto3" json:"address_withdraw_count,omitempty"`
LabelMapCount uint32 `protobuf:"varint,5,opt,name=label_map_count,json=labelMapCount,proto3" json:"label_map_count,omitempty"`
LabelWithdrawCount uint32 `protobuf:"varint,6,opt,name=label_withdraw_count,json=labelWithdrawCount,proto3" json:"label_withdraw_count,omitempty"`
LabelReleaseCount uint32 `protobuf:"varint,7,opt,name=label_release_count,json=labelReleaseCount,proto3" json:"label_release_count,omitempty"`
LabelRequestCount uint32 `protobuf:"varint,8,opt,name=label_request_count,json=labelRequestCount,proto3" json:"label_request_count,omitempty"`
LabelAbortRequestCount uint32 `protobuf:"varint,9,opt,name=label_abort_request_count,json=labelAbortRequestCount,proto3" json:"label_abort_request_count,omitempty"`
NotificationCount uint32 `protobuf:"varint,10,opt,name=notification_count,json=notificationCount,proto3" json:"notification_count,omitempty"`
KeepAliveCount uint32 `protobuf:"varint,11,opt,name=keep_alive_count,json=keepAliveCount,proto3" json:"keep_alive_count,omitempty"`
IccpRgConnCount uint32 `protobuf:"varint,12,opt,name=iccp_rg_conn_count,json=iccpRgConnCount,proto3" json:"iccp_rg_conn_count,omitempty"`
IccpRgDisconnCount uint32 `protobuf:"varint,13,opt,name=iccp_rg_disconn_count,json=iccpRgDisconnCount,proto3" json:"iccp_rg_disconn_count,omitempty"`
IccpRgNotifCount uint32 `protobuf:"varint,14,opt,name=iccp_rg_notif_count,json=iccpRgNotifCount,proto3" json:"iccp_rg_notif_count,omitempty"`
IccpRgAppDataCount uint32 `protobuf:"varint,15,opt,name=iccp_rg_app_data_count,json=iccpRgAppDataCount,proto3" json:"iccp_rg_app_data_count,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *LdpMsgCounters) Reset() { *m = LdpMsgCounters{} }
func (m *LdpMsgCounters) String() string { return proto.CompactTextString(m) }
func (*LdpMsgCounters) ProtoMessage() {}
func (*LdpMsgCounters) Descriptor() ([]byte, []int) {
return fileDescriptor_8321582c74958171, []int{1}
}
func (m *LdpMsgCounters) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_LdpMsgCounters.Unmarshal(m, b)
}
func (m *LdpMsgCounters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_LdpMsgCounters.Marshal(b, m, deterministic)
}
func (m *LdpMsgCounters) XXX_Merge(src proto.Message) {
xxx_messageInfo_LdpMsgCounters.Merge(m, src)
}
func (m *LdpMsgCounters) XXX_Size() int {
return xxx_messageInfo_LdpMsgCounters.Size(m)
}
func (m *LdpMsgCounters) XXX_DiscardUnknown() {
xxx_messageInfo_LdpMsgCounters.DiscardUnknown(m)
}
var xxx_messageInfo_LdpMsgCounters proto.InternalMessageInfo
func (m *LdpMsgCounters) GetTotalCount() uint32 {
if m != nil {
return m.TotalCount
}
return 0
}
func (m *LdpMsgCounters) GetInitCount() uint32 {
if m != nil {
return m.InitCount
}
return 0
}
func (m *LdpMsgCounters) GetAddressCount() uint32 {
if m != nil {
return m.AddressCount
}
return 0
}
func (m *LdpMsgCounters) GetAddressWithdrawCount() uint32 {
if m != nil {
return m.AddressWithdrawCount
}
return 0
}
func (m *LdpMsgCounters) GetLabelMapCount() uint32 {
if m != nil {
return m.LabelMapCount
}
return 0
}
func (m *LdpMsgCounters) GetLabelWithdrawCount() uint32 {
if m != nil {
return m.LabelWithdrawCount
}
return 0
}
func (m *LdpMsgCounters) GetLabelReleaseCount() uint32 {
if m != nil {
return m.LabelReleaseCount
}
return 0
}
func (m *LdpMsgCounters) GetLabelRequestCount() uint32 {
if m != nil {
return m.LabelRequestCount
}
return 0
}
func (m *LdpMsgCounters) GetLabelAbortRequestCount() uint32 {
if m != nil {
return m.LabelAbortRequestCount
}
return 0
}
func (m *LdpMsgCounters) GetNotificationCount() uint32 {
if m != nil {
return m.NotificationCount
}
return 0
}
func (m *LdpMsgCounters) GetKeepAliveCount() uint32 {
if m != nil {
return m.KeepAliveCount
}
return 0
}
func (m *LdpMsgCounters) GetIccpRgConnCount() uint32 {
if m != nil {
return m.IccpRgConnCount
}
return 0
}
func (m *LdpMsgCounters) GetIccpRgDisconnCount() uint32 {
if m != nil {
return m.IccpRgDisconnCount
}
return 0
}
func (m *LdpMsgCounters) GetIccpRgNotifCount() uint32 {
if m != nil {
return m.IccpRgNotifCount
}
return 0
}
func (m *LdpMsgCounters) GetIccpRgAppDataCount() uint32 {
if m != nil {
return m.IccpRgAppDataCount
}
return 0
}
type LdpStatsInfo struct {
IccpEnabled bool `protobuf:"varint,50,opt,name=iccp_enabled,json=iccpEnabled,proto3" json:"iccp_enabled,omitempty"`
MessageOut *LdpMsgCounters `protobuf:"bytes,51,opt,name=message_out,json=messageOut,proto3" json:"message_out,omitempty"`
MessageIn *LdpMsgCounters `protobuf:"bytes,52,opt,name=message_in,json=messageIn,proto3" json:"message_in,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *LdpStatsInfo) Reset() { *m = LdpStatsInfo{} }
func (m *LdpStatsInfo) String() string { return proto.CompactTextString(m) }
func (*LdpStatsInfo) ProtoMessage() {}
func (*LdpStatsInfo) Descriptor() ([]byte, []int) {
return fileDescriptor_8321582c74958171, []int{2}
}
func (m *LdpStatsInfo) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_LdpStatsInfo.Unmarshal(m, b)
}
func (m *LdpStatsInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_LdpStatsInfo.Marshal(b, m, deterministic)
}
func (m *LdpStatsInfo) XXX_Merge(src proto.Message) {
xxx_messageInfo_LdpStatsInfo.Merge(m, src)
}
func (m *LdpStatsInfo) XXX_Size() int {
return xxx_messageInfo_LdpStatsInfo.Size(m)
}
func (m *LdpStatsInfo) XXX_DiscardUnknown() {
xxx_messageInfo_LdpStatsInfo.DiscardUnknown(m)
}
var xxx_messageInfo_LdpStatsInfo proto.InternalMessageInfo
func (m *LdpStatsInfo) GetIccpEnabled() bool {
if m != nil {
return m.IccpEnabled
}
return false
}
func (m *LdpStatsInfo) GetMessageOut() *LdpMsgCounters {
if m != nil {
return m.MessageOut
}
return nil
}
func (m *LdpStatsInfo) GetMessageIn() *LdpMsgCounters {
if m != nil {
return m.MessageIn
}
return nil
}
func init() {
proto.RegisterType((*LdpStatsInfo_KEYS)(nil), "cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.active.vrfs.vrf.statistics.statistic.ldp_stats_info_KEYS")
proto.RegisterType((*LdpMsgCounters)(nil), "cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.active.vrfs.vrf.statistics.statistic.ldp_msg_counters")
proto.RegisterType((*LdpStatsInfo)(nil), "cisco_ios_xr_mpls_ldp_oper.mpls_ldp.global.active.vrfs.vrf.statistics.statistic.ldp_stats_info")
}
func init() { proto.RegisterFile("ldp_stats_info.proto", fileDescriptor_8321582c74958171) }
var fileDescriptor_8321582c74958171 = []byte{
// 559 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x94, 0x4b, 0x6f, 0x13, 0x31,
0x14, 0x85, 0x35, 0x85, 0x3e, 0x72, 0xd3, 0x3c, 0x70, 0xd2, 0x28, 0x5d, 0x20, 0x42, 0x40, 0x28,
0x12, 0x62, 0x04, 0x69, 0x37, 0x2c, 0xa3, 0xb6, 0x8b, 0x08, 0xd1, 0x4a, 0xd3, 0x05, 0x62, 0x65,
0xdd, 0xcc, 0x38, 0xc1, 0xc2, 0x33, 0x36, 0xb6, 0x93, 0xb2, 0x44, 0xfc, 0x2d, 0x24, 0x7e, 0x1b,
0xf2, 0x63, 0xa0, 0x09, 0x5b, 0xc4, 0x26, 0xca, 0x9c, 0xf3, 0x9d, 0x7b, 0x6c, 0x27, 0x1e, 0xe8,
0x8b, 0x42, 0x51, 0x63, 0xd1, 0x1a, 0xca, 0xab, 0xa5, 0x4c, 0x95, 0x96, 0x56, 0x92, 0x9b, 0x9c,
0x9b, 0x5c, 0x52, 0x2e, 0x0d, 0xfd, 0xaa, 0x69, 0xa9, 0x84, 0xa1, 0x8e, 0x93, 0x8a, 0xe9, 0xb4,
0x7e, 0x4a, 0x57, 0x42, 0x2e, 0x50, 0xa4, 0x98, 0x5b, 0xbe, 0x61, 0xe9, 0x46, 0x2f, 0x8d, 0xfb,
0x48, 0xdd, 0x30, 0x6e, 0x2c, 0xcf, 0xcd, 0x9f, 0xaf, 0xe3, 0x12, 0x7a, 0xdb, 0x45, 0xf4, 0xdd,
0xd5, 0xc7, 0x5b, 0x72, 0x0a, 0x47, 0x1b, 0xbd, 0xa4, 0x15, 0x96, 0x6c, 0x98, 0x8c, 0x92, 0x49,
0x23, 0x3b, 0xdc, 0xe8, 0xe5, 0x35, 0x96, 0x8c, 0x9c, 0xc0, 0x81, 0x30, 0x9a, 0xf2, 0x62, 0xb8,
0xe7, 0x8d, 0x7d, 0x61, 0xf4, 0xbc, 0x20, 0xcf, 0xa1, 0x2d, 0x70, 0xc1, 0x04, 0x35, 0x0a, 0x73,
0xe6, 0xec, 0x07, 0xa3, 0x64, 0xd2, 0xca, 0x8e, 0xbd, 0x7a, 0xeb, 0xc4, 0x79, 0x31, 0xfe, 0xb9,
0x0f, 0x5d, 0xd7, 0x57, 0x9a, 0x15, 0xcd, 0xe5, 0xba, 0xb2, 0x4c, 0x1b, 0xf2, 0x04, 0x9a, 0x56,
0x5a, 0x14, 0x41, 0xf1, 0x7d, 0xad, 0x0c, 0xbc, 0x74, 0xe1, 0x14, 0xf2, 0x18, 0x80, 0x57, 0xdc,
0x46, 0x7f, 0xcf, 0xfb, 0x0d, 0xa7, 0x04, 0xfb, 0x19, 0xb4, 0xb0, 0x28, 0x34, 0x33, 0x26, 0x12,
0xb1, 0x39, 0x8a, 0x01, 0x3a, 0x87, 0x41, 0x0d, 0xdd, 0x71, 0xfb, 0xa9, 0xd0, 0x78, 0x17, 0xe9,
0x87, 0x9e, 0xee, 0x47, 0xf7, 0x43, 0x34, 0x43, 0xea, 0x05, 0x74, 0xc2, 0xae, 0x4a, 0x54, 0x11,
0xdf, 0xf7, 0x78, 0xcb, 0xcb, 0xef, 0x51, 0x05, 0xee, 0x35, 0xf4, 0x03, 0xb7, 0x33, 0xfb, 0xc0,
0xc3, 0xc4, 0x7b, 0xdb, 0x93, 0x53, 0xe8, 0x85, 0x84, 0x66, 0x82, 0xa1, 0x61, 0x31, 0x70, 0xe8,
0x03, 0x8f, 0xbc, 0x95, 0x05, 0xe7, 0x2f, 0xfe, 0xcb, 0x9a, 0x99, 0xfa, 0x30, 0x8e, 0xb6, 0x78,
0xef, 0x04, 0xfe, 0x2d, 0x9c, 0x06, 0x1e, 0x17, 0x52, 0xdb, 0x9d, 0x54, 0xc3, 0xa7, 0x06, 0x1e,
0x98, 0x39, 0x7f, 0x2b, 0xfa, 0x0a, 0x48, 0x25, 0x2d, 0x5f, 0xf2, 0x1c, 0x2d, 0x97, 0x55, 0xcc,
0x40, 0x68, 0xba, 0xef, 0x04, 0x7c, 0x02, 0xdd, 0xcf, 0x8c, 0x29, 0x8a, 0x82, 0x6f, 0xea, 0x6d,
0x34, 0x3d, 0xdc, 0x76, 0xfa, 0xcc, 0xc9, 0x81, 0x7c, 0x09, 0x84, 0xe7, 0xb9, 0xa2, 0xda, 0xfd,
0xf8, 0x55, 0x3d, 0xf8, 0xd8, 0xb3, 0x1d, 0xe7, 0x64, 0xab, 0x0b, 0x59, 0xc5, 0xb1, 0x6f, 0xe0,
0xa4, 0x86, 0x0b, 0xf7, 0xa7, 0xff, 0xcd, 0xb7, 0xc2, 0x99, 0x06, 0xfe, 0x32, 0x58, 0xf5, 0xc2,
0x7b, 0x75, 0xc4, 0x2f, 0x33, 0x06, 0xda, 0x3e, 0xd0, 0x0d, 0x81, 0x6b, 0x67, 0x04, 0x7c, 0x0a,
0x83, 0x1a, 0x47, 0xa5, 0x68, 0x81, 0x16, 0x63, 0xa2, 0x73, 0xbf, 0x62, 0xa6, 0xd4, 0x25, 0x5a,
0xf4, 0x99, 0xf1, 0x8f, 0x3d, 0x68, 0x6f, 0x5f, 0x18, 0xf2, 0x14, 0x8e, 0xfd, 0x18, 0x56, 0xe1,
0x42, 0xb0, 0x62, 0x38, 0x1d, 0x25, 0x93, 0xa3, 0xac, 0xe9, 0xb4, 0xab, 0x20, 0x91, 0xef, 0x09,
0x34, 0x4b, 0x66, 0x0c, 0xae, 0x18, 0x95, 0x6b, 0x3b, 0x3c, 0x1b, 0x25, 0x93, 0xe6, 0x14, 0xd3,
0x7f, 0x7c, 0x9b, 0xd3, 0xdd, 0xab, 0x95, 0x41, 0x6c, 0xbd, 0x59, 0x5b, 0xf2, 0x2d, 0x81, 0xfa,
0x91, 0xf2, 0x6a, 0x78, 0xfe, 0xbf, 0xd6, 0xd0, 0x88, 0xa5, 0xf3, 0x6a, 0x71, 0xe0, 0xdf, 0x62,
0x67, 0xbf, 0x02, 0x00, 0x00, 0xff, 0xff, 0x5d, 0xc2, 0xc6, 0x99, 0xdd, 0x04, 0x00, 0x00,
}
|
<gh_stars>0
import { asText } from '@prismicio/helpers'
import type {
PrismicDocument,
RichTextField,
TitleField,
} from '@prismicio/types'
import type { PromisedType, Return } from 'tsdef'
import { client } from './client'
import { LANG, Lang } from './constants'
type Homepage = PrismicDocument<{
title: TitleField
description: RichTextField
// body: SliceZone<
// Slice<
// 'lead',
// {
// leadimage: ImageField
// leadtext: RichTextField
// }
// >,
// 'filled'
// >
// seotitle: KeyTextField
// seodescription: KeyTextField
// seokeywords: KeyTextField
// seoimage: ImageField
}>
export async function getHomepageProps(lang: Lang = LANG.en) {
const home = await client().getByType<Homepage>('homepage', {
lang,
})
const data = home?.results?.[0]?.data
if (!data) return null
return {
title: asText(data.title)?.trim(),
description: asText(data.description)?.trim(),
}
}
export type HomepageProps = PromisedType<Return<typeof getHomepageProps>>
|
<reponame>sirAgg/nebula
//------------------------------------------------------------------------------
// (C) 2019 Individual contributors, see AUTHORS file
//------------------------------------------------------------------------------
#include "stdneb.h"
#include "scenes.h"
namespace SponzaSceneData
{
// Global variables, within namespace
Graphics::GraphicsEntityId entity;
Graphics::GraphicsEntityId light;
bool moveLight = true;
float x = 0;
float y = 0;
float speed = 0.005;
float scale = 2;
//------------------------------------------------------------------------------
/**
Open scene, load resources
*/
void OpenScene()
{
entity = Graphics::CreateEntity();
Graphics::RegisterEntity<Models::ModelContext, Visibility::ObservableContext>(entity);
Models::ModelContext::Setup(entity, "mdl:sponza/Sponza.n3", "SponzaScene", []()
{
Visibility::ObservableContext::Setup(entity, Visibility::VisibilityEntityType::Model);
});
Math::mat4 t = Math::translation(Math::vec3(0, 0, 0));
Math::mat4 r = Math::rotationx(Math::deg2rad(0.0f));
Math::mat4 s = Math::scaling(Math::vec3(1, 1, 1));
Math::mat4 trs = s * r;
trs = trs * t;
Models::ModelContext::SetTransform(entity, trs);
light = Graphics::CreateEntity();
Graphics::RegisterEntity<Lighting::LightContext>(light);
Lighting::LightContext::SetupPointLight(light, Math::vec3(1, 1, 1), 10.0f, Math::translation(0, 1, 0), 10.0f, false);
}
//------------------------------------------------------------------------------
/**
Close scene, clean up resources
*/
void CloseScene()
{
Graphics::DeregisterEntity<Models::ModelContext, Visibility::ObservableContext>(entity);
Graphics::DestroyEntity(entity);
Graphics::DeregisterEntity<Lighting::LightContext>(light);
Graphics::DestroyEntity(light);
}
//------------------------------------------------------------------------------
/**
Per frame callback
*/
void StepFrame()
{
if (Input::InputServer::Instance()->GetDefaultKeyboard()->KeyPressed(Input::Key::Space))
{
moveLight = !moveLight;
}
if (moveLight)
{
Lighting::LightContext::SetTransform(light, Math::translation(Math::sin(x) * scale, 0.5f, Math::cos(y) * scale));
x += speed;
y += speed;
}
}
//------------------------------------------------------------------------------
/**
ImGui code can be placed here.
*/
void RenderUI()
{
// empty
}
} // namespace SponzaSceneData
// ---------------------------------------------------------
Scene SponzaScene =
{
"SponzaScene",
SponzaSceneData::OpenScene,
SponzaSceneData::CloseScene,
SponzaSceneData::StepFrame,
SponzaSceneData::RenderUI
}; |
<reponame>illucIT/instatrie
package com.illucit.instatrie;
import java.util.Arrays;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.TreeSet;
import org.junit.Assert;
import org.junit.Test;
import com.illucit.instatrie.trie.Trie;
import com.illucit.instatrie.trie.TrieNode;
/**
* Tests for the iteratable utility methods of {@link TrieNode}.
*
* @author <NAME>
*
*/
public class TestIterators {
@Test
public void testEmptyIterables() {
Trie<String> trie = new Trie<>();
TrieNode<String> root = trie.getRoot();
Iterator<TrieNode<String>> childrenIterator = root.children().iterator();
Assert.assertFalse(childrenIterator.hasNext());
Iterator<TrieNode<String>> brothersIterator = root.brothers().iterator();
Assert.assertTrue(brothersIterator.hasNext());
Assert.assertEquals(root, brothersIterator.next());
Assert.assertFalse(brothersIterator.hasNext());
Iterator<TrieNode<String>> descendantsIterator = root.descendants().iterator();
Assert.assertTrue(descendantsIterator.hasNext());
Assert.assertEquals(root, descendantsIterator.next());
Assert.assertFalse(descendantsIterator.hasNext());
}
@Test
public void testIterables() {
Trie<String> trie = new Trie<>();
trie.insert("abc", "abc");
trie.insert("abcde", "abcde");
trie.insert("axy", "axy");
trie.insert("a", "a");
trie.insert("zzz", "zzz");
TrieNode<String> root = trie.getRoot();
String[] childrenExpectedArray = new String[] { "a", "zzz" };
LinkedList<String> childrenExpected = new LinkedList<>(Arrays.asList(childrenExpectedArray));
LinkedList<String> childrenResult = new LinkedList<>();
root.children().stream().filter(TrieNode::hasData).forEach(node -> childrenResult.add(node.getData()));
Assert.assertEquals(childrenExpected, childrenResult);
TrieNode<String> firstChild = root.getFirstSon();
String[] brothersExpectedArray = new String[] { "a", "zzz" };
LinkedList<String> brothersExpected = new LinkedList<>(Arrays.asList(brothersExpectedArray));
LinkedList<String> brothersResult = new LinkedList<>();
firstChild.brothers().stream().filter(TrieNode::hasData)
.forEach(node -> brothersResult.add(node.getData()));
Assert.assertEquals(brothersExpected, brothersResult);
String[] descendentsExpectedArray = new String[] { "abc", "abcde", "axy", "a", "zzz" };
TreeSet<String> descendentsExpected = new TreeSet<>(Arrays.asList(descendentsExpectedArray));
TreeSet<String> descendentsResult = new TreeSet<>();
root.descendants().stream().filter(TrieNode::hasData)
.forEach(node -> descendentsResult.add(node.getData()));
Assert.assertEquals(descendentsExpected, descendentsResult);
}
}
|
/*
* This file is part of the source code of the software program
* Vampire. It is protected by applicable
* copyright laws.
*
* This source code is distributed under the licence found here
* https://vprover.github.io/license.html
* and in the source directory
*/
/**
* @file TPTPPrinter.cpp
* Implements class TPTPPrinter.
*/
#include <sstream>
#include "Lib/DHMap.hpp"
#include "Lib/Environment.hpp"
#include "Lib/SharedSet.hpp"
#include "Kernel/Signature.hpp"
#include "Kernel/Clause.hpp"
#include "Kernel/SortHelper.hpp"
#include "Parse/TPTP.hpp"
#include "Kernel/Term.hpp"
#include "Kernel/Inference.hpp"
#include "Kernel/Unit.hpp"
#include "Kernel/Formula.hpp"
#include "Kernel/FormulaUnit.hpp"
#include "Kernel/Clause.hpp"
#include "Shell/Statistics.hpp"
#include "TPTPPrinter.hpp"
#include "Forwards.hpp"
namespace Shell
{
TPTPPrinter::TPTPPrinter(ostream* tgtStream)
: _tgtStream(tgtStream), _headersPrinted(false)
{
CALL("TPTPPrinter::TPTPPrinter");
}
/**
* Print the Unit @param u to the desired output
*/
void TPTPPrinter::print(Unit* u)
{
CALL("TPTPPrinter::print");
vstring body = getBodyStr(u, true);
beginOutput();
ensureHeadersPrinted(u);
printTffWrapper(u, body);
endOutput();
}
/**
* Print on the desired output the Unit with the specified name
* @param name
* @param u
*/
void TPTPPrinter::printAsClaim(vstring name, Unit* u)
{
CALL("TPTPPrinter::printAsClaim");
printWithRole(name, "claim", u);
}
void TPTPPrinter::printWithRole(vstring name, vstring role, Unit* u, bool includeSplitLevels)
{
CALL("TPTPPrinter::printWithRole");
vstring body = getBodyStr(u, includeSplitLevels);
beginOutput();
ensureHeadersPrinted(u);
tgt() << "tff(" << name << ", " << role << ", " << body << ")." << endl;
endOutput();
}
/**
* Return as a vstring the body of the Unit u
* @param u
* @param includeSplitLevels
* @return the body vstring
*/
vstring TPTPPrinter::getBodyStr(Unit* u, bool includeSplitLevels)
{
CALL("TPTPPrinter::getBodyStr");
vostringstream res;
typedef DHMap<unsigned,TermList> SortMap;
static SortMap varSorts;
varSorts.reset();
SortHelper::collectVariableSorts(u, varSorts);
if(u->isClause()) {
SortMap::Iterator vit(varSorts);
bool quantified = vit.hasNext();
if(quantified) {
res << "![";
while(vit.hasNext()) {
unsigned var;
TermList varSort;
vit.next(var, varSort);
res << 'X' << var;
if(varSort!= AtomicSort::defaultSort()) {
res << " : " << varSort.toString();
}
if(vit.hasNext()) {
res << ',';
}
}
res << "]: (";
}
Clause* cl = static_cast<Clause*>(u);
Clause::Iterator cit(*cl);
if(!cit.hasNext()) {
res << "$false";
}
while(cit.hasNext()) {
Literal* lit = cit.next();
res << lit->toString();
if(cit.hasNext()) {
res << " | ";
}
}
if(quantified) {
res << ')';
}
if(includeSplitLevels && !cl->noSplits()) {
SplitSet::Iterator sit(*cl->splits());
while(sit.hasNext()) {
SplitLevel split = sit.next();
res << " | " << "$splitLevel" << split;
}
}
}
else {
return static_cast<FormulaUnit*>(u)->formula()->toString();
}
return res.str();
}
/**
* Surround by tff() the body of the unit u
* @param u
* @param bodyStr
*/
void TPTPPrinter::printTffWrapper(Unit* u, vstring bodyStr)
{
CALL("TPTPPrinter::printTffWrapper");
tgt() << "tff(";
vstring unitName;
if(Parse::TPTP::findAxiomName(u, unitName)) {
tgt() << unitName;
}
else {
tgt() << "u_" << u->number();
}
tgt() << ", ";
switch(u->inputType()) {
case UnitInputType::AXIOM:
tgt() << "axiom"; break;
case UnitInputType::ASSUMPTION:
tgt() << "hypothesis"; break;
case UnitInputType::CONJECTURE:
tgt() << "conjecture"; break;
case UnitInputType::NEGATED_CONJECTURE:
tgt() << "negated_conjecture"; break;
case UnitInputType::CLAIM:
tgt() << "claim"; break;
case UnitInputType::EXTENSIONALITY_AXIOM:
tgt() << "extensionality"; break;
default:
ASSERTION_VIOLATION;
}
tgt() << ", " << endl << " " << bodyStr << " )." << endl;
}
/**
* Output the symbol definition
* @param symNumber
* @param function - true if the symbol is a function symbol
*/
void TPTPPrinter::outputSymbolTypeDefinitions(unsigned symNumber, SymbolType symType)
{
CALL("TPTPPrinter::outputSymbolTypeDefinitions");
Signature::Symbol* sym;
OperatorType* type;
if(symType == SymbolType::FUNC){
sym = env.signature->getFunction(symNumber);
type = sym->fnType();
} else if(symType == SymbolType::PRED){
sym = env.signature->getPredicate(symNumber);
type = sym->predType();
} else {
sym = env.signature->getTypeCon(symNumber);
type = sym->typeConType();
}
if(type->isAllDefault()) {
return;
}
bool func = symType == SymbolType::FUNC ;
if(func && theory->isInterpretedConstant(symNumber)) { return; }
if (func && sym->overflownConstant()) { return; }
if(sym->interpreted()) {
Interpretation interp = static_cast<Signature::InterpretedSymbol*>(sym)->getInterpretation();
switch(interp) {
case Theory::INT_SUCCESSOR:
case Theory::INT_ABS:
case Theory::INT_DIVIDES:
//for interpreted symbols that do not belong to TPTP standard we still have to output sort
break;
default:
return;
}
}
vstring cat = "tff(";
if(env.getMainProblem()->isHigherOrder()){
cat = "thf(";
}
vstring st = "func";
if(symType == SymbolType::PRED){
st = "pred";
} else if(symType == SymbolType::TYPE_CON){
st = "sort";
}
tgt() << cat << st << "_def_" << symNumber << ",type, "
<< sym->name() << ": ";
tgt() << type->toString();
tgt() << " )." << endl;
}
/**
* Print only the necessary headers for the sorts. This is needed in order to avoid
* having in the TPTP problem sorts that are not used
* @since 08/10/2012, Vienna
* @author Ioan Dragan
*/
/*void TPTPPrinter::ensureNecesarySorts()
{
CALL("TPTPPrinter::ensureNecesarySorts");
if (_headersPrinted) {
return;
}
unsigned i;
List<TermList> *_usedSorts(0);
OperatorType* type;
Signature::Symbol* sym;
unsigned sorts = env.sorts->count();
//check the sorts of the function symbols and collect information about used sorts
for (i = 0; i < env.signature->functions(); i++) {
if(env.signature->isTypeConOrSup(f)){ continue; }
sym = env.signature->getFunction(i);
type = sym->fnType();
unsigned arity = sym->arity();
// NOTE: for function types, the last entry (i.e., type->arg(arity)) contains the type of the result
for (unsigned i = 0; i <= arity; i++) {
if(! List<unsigned>::member(type->arg(i), _usedSorts))
List<unsigned>::push(type->arg(i), _usedSorts);
}
}
//check the sorts of the predicates and collect information about used sorts
for (i = 0; i < env.signature->predicates(); i++) {
sym = env.signature->getPredicate(i);
type = sym->predType();
unsigned arity = sym->arity();
if (arity > 0) {
for (unsigned i = 0; i < arity; i++) {
if(! List<unsigned>::member(type->arg(i), _usedSorts))
List<unsigned>::push(type->arg(i), _usedSorts);
}
}
}
//output the sort definition for the used sorts, but not for the built-in sorts
for (i = Sorts::FIRST_USER_SORT; i < sorts; i++) {
if (List<unsigned>::member(i, _usedSorts))
tgt() << "tff(sort_def_" << i << ",type, " << env.sorts->sortName(i)
<< ": $tType" << " )." << endl;
}
} */ //TODO fix this function. At te moment, not sure how important it is
/**
* Makes sure that only the needed headers in the @param u are printed out on the output
*/
void TPTPPrinter::ensureHeadersPrinted(Unit* u)
{
CALL("TPTPPrinter::ensureHeadersPrinted");
if(_headersPrinted) {
return;
}
//ensureNecesarySorts();
unsigned typeCons = env.signature->typeCons();
for(unsigned i=Signature::FIRST_USER_CON; i<typeCons; i++) {
outputSymbolTypeDefinitions(i, SymbolType::TYPE_CON);
}
unsigned funs = env.signature->functions();
for(unsigned i=0; i<funs; i++) {
outputSymbolTypeDefinitions(i, SymbolType::FUNC);
}
unsigned preds = env.signature->predicates();
for(unsigned i=1; i<preds; i++) {
outputSymbolTypeDefinitions(i, SymbolType::PRED);
}
_headersPrinted = true;
}
/**
* Retrieve the output stream to which vampire prints out
*/
ostream& TPTPPrinter::tgt()
{
CALL("TPTPPrinter::tgt");
if(_tgtStream) {
return *_tgtStream;
}
else {
return env.out();
}
}
/**
* In case there is no specified output stream, than print to the one
* specified in the env.beginOutput();
*/
void TPTPPrinter::beginOutput()
{
CALL("TPTPPrinter::beginOutput");
if(!_tgtStream) { env.beginOutput(); }
}
void TPTPPrinter::endOutput()
{
CALL("TPTPPrinter::endOutput");
if(!_tgtStream) { env.endOutput(); }
}
/**
* Return the vstring representing the formula f.
*/
vstring TPTPPrinter::toString(const Formula* formula)
{
CALL("TPTPPrinter::toString(const Formula*)");
static vstring names [] =
{ "", " & ", " | ", " => ", " <=> ", " <~> ",
"~", "!", "?", "$term", "$false", "$true", "", ""};
ASS_EQ(sizeof(names)/sizeof(vstring), NOCONN+1);
vstring res;
// render a connective if specified, and then a Formula (or ")" of formula is nullptr)
typedef pair<Connective,const Formula*> Todo;
Stack<Todo> stack;
stack.push(make_pair(NOCONN,formula));
while (stack.isNonEmpty()) {
Todo todo = stack.pop();
// in any case start by rendering the connective passed from "above"
res += names[todo.first];
const Formula* f = todo.second;
if (!f) {
res += ")";
continue;
}
Connective c = f->connective();
switch (c) {
case LITERAL: {
vstring result = f->literal()->toString();
if (f->literal()->isEquality()) {
res += "(" + result + ")";
} else {
res += result;
}
continue;
}
case AND:
case OR:
{
// we will reverse the order
// but that should not matter
const FormulaList* fs = f->args();
res += "(";
stack.push(make_pair(NOCONN,nullptr)); // render the final closing bracket
while (FormulaList::isNonEmpty(fs)) {
const Formula* arg = fs->head();
fs = fs->tail();
// the last argument, which will be printed first, is the only one not preceded by a rendering of con
stack.push(make_pair(FormulaList::isNonEmpty(fs) ? c : NOCONN,arg));
}
continue;
}
case IMP:
case IFF:
case XOR:
// here we can afford to keep the order right
res += "(";
stack.push(make_pair(NOCONN,nullptr)); // render the final closing bracket
stack.push(make_pair(c,f->right())); // second argument with con
stack.push(make_pair(NOCONN,f->left())); // first argument without con
continue;
case NOT:
res += "(";
stack.push(make_pair(NOCONN,nullptr)); // render the final closing bracket
stack.push(make_pair(c,f->uarg()));
continue;
case FORALL:
case EXISTS:
{
vstring result = vstring("(") + names[c] + "[";
bool needsComma = false;
VList::Iterator vs(f->vars());
SList::Iterator ss(f->sorts());
bool hasSorts = f->sorts();
while (vs.hasNext()) {
unsigned var = vs.next();
if (needsComma) {
result += ", ";
}
result += 'X';
result += Int::toString(var);
TermList t;
if (hasSorts) {
ASS(ss.hasNext());
t = ss.next();
if (t != AtomicSort::defaultSort()) {
result += " : " + t.toString();
}
} else if (SortHelper::tryGetVariableSort(var, const_cast<Formula*>(f),
t) && t != AtomicSort::defaultSort()) {
result += " : " + t.toString();
}
needsComma = true;
}
res += result + "] : (";
stack.push(make_pair(NOCONN,nullptr));
stack.push(make_pair(NOCONN,nullptr)); // here we close two brackets
stack.push(make_pair(NOCONN,f->qarg()));
continue;
}
case BOOL_TERM:
res += f->getBooleanTerm().toString();
continue;
case FALSE:
case TRUE:
res += names[c];
continue;
default:
ASSERTION_VIOLATION;
}
}
return res;
}
/**
* Output unit @param unit in TPTP format as a vstring
*
* If the unit is a formula of type @b CONJECTURE, output the
* negation of Vampire's internal representation with the
* TPTP role conjecture. If it is a clause, just output it as
* is, with the role negated_conjecture.
*/
vstring TPTPPrinter::toString (const Unit* unit)
{
CALL("TPTPPrinter::toString(const Unit*)");
// const Inference* inf = unit->inference();
// Inference::Rule rule = inf->rule();
vstring prefix;
vstring main = "";
bool negate_formula = false;
vstring kind;
switch (unit->inputType()) {
case UnitInputType::ASSUMPTION:
kind = "hypothesis";
break;
case UnitInputType::CONJECTURE:
if(unit->isClause()) {
kind = "negated_conjecture";
}
else {
negate_formula = true;
kind = "conjecture";
}
break;
case UnitInputType::EXTENSIONALITY_AXIOM:
kind = "extensionality";
break;
case UnitInputType::NEGATED_CONJECTURE:
kind = "negated_conjecture";
break;
default:
kind = "axiom";
break;
}
if (unit->isClause()) {
prefix = "cnf";
main = static_cast<const Clause*>(unit)->toTPTPString();
}
else {
prefix = "tff";
const Formula* f = static_cast<const FormulaUnit*>(unit)->formula();
if(negate_formula) {
Formula* quant=Formula::quantify(const_cast<Formula*>(f));
if(quant->connective()==NOT) {
ASS_EQ(quant, f);
main = toString(quant->uarg());
}
else if(quant->connective()==LITERAL && quant->literal()->isNegative()){
ASS_EQ(quant,f);
Literal* comp = Literal::complementaryLiteral(quant->literal());
main = comp->toString();
}
else {
Formula* neg=new NegatedFormula(quant);
main = toString(neg);
neg->destroy();
}
if(quant!=f) {
ASS_EQ(quant->connective(),FORALL);
VList::destroy(static_cast<QuantifiedFormula*>(quant)->vars());
quant->destroy();
}
}
else {
main = toString(f);
}
}
vstring unitName;
if(!Parse::TPTP::findAxiomName(unit, unitName)) {
unitName="u" + Int::toString(unit->number());
}
return prefix + "(" + unitName + "," + kind + ",\n"
+ " " + main + ").\n";
}
vstring TPTPPrinter::toString(const Term* t){
NOT_IMPLEMENTED;
}
vstring TPTPPrinter::toString(const Literal* l){
NOT_IMPLEMENTED;
}
}
|
/** @file compiler.h
*
* @brief Define the interface to the compiler module.
*
* Only a single function here for now. Simply call `compile` to kick
* off the scanning and compiling stage.
*
* @author <NAME> (dlains)
* @bug No Known Bugs
*/
#ifndef COMPILER_H
#define COMPILER_H
#include "object.h"
#include "vm.h"
/** @brief Compile any source code available in the scanner.
*
* Compiles any source code that is available in the scanner. If no source
* code has been read the compile function just returns.
*
* @param chunk Store the generated byte code into the chunk array.
*/
bool compile(Chunk *chunk);
/** @brief Parse an expression.
*
* Parse a full expression and write the byte code to the Chunk array.
*/
void expression(void);
#endif // COMPILER_H
|
package util
import (
"errors"
"testing"
"github.com/stretchr/testify/require"
"gopkg.in/yaml.v2"
)
func Test_MultiString_Set_and_Reset(t *testing.T) {
assert := require.New(t)
target := &MultiStringT{}
target.Set("A")
assert.Equal("A", target.Text)
assert.Equal(1, len(target.LowercasedValues))
assert.True(target.LowercasedValues["a"])
assert.Equal(1, len(target.Values))
assert.True(target.Values["A"])
text := " A, \nb, \tC \r"
target.Set(text)
assert.Equal(text, target.Text)
assert.Equal(3, len(target.LowercasedValues))
assert.True(target.LowercasedValues["a"])
assert.True(target.LowercasedValues["b"])
assert.True(target.LowercasedValues["c"])
assert.Equal(3, len(target.Values))
assert.True(target.Values["A"])
assert.True(target.Values["b"])
assert.True(target.Values["C"])
target.Reset()
assert.Equal("", target.Text)
assert.Equal(0, len(target.LowercasedValues))
assert.Equal(0, len(target.Values))
}
func Test_MultiString_Containes(t *testing.T) {
assert := require.New(t)
target := &MultiStringT{}
target.Set("A")
assert.False(target.Contains("a", true))
assert.True(target.Contains("A", true))
assert.True(target.Contains("a", false))
assert.True(target.Contains("A", false))
assert.False(target.Contains("b", false))
assert.False(target.Contains("B", true))
}
func Test_MultiString_MarshalYAML(t *testing.T) {
assert := require.New(t)
target := &MultiStringT{}
target.Set("A")
ymlBytes, err := yaml.Marshal(target)
assert.Equal("A\n", string(ymlBytes))
assert.NoError(err)
target.Set(" A, b, C")
ymlBytes, err = yaml.Marshal(target)
assert.Equal("' A, b, C'\n", string(ymlBytes)) //TODO: is this a bug?
assert.NoError(err)
}
func Test_MultiString_UnmarshalYAML(t *testing.T) {
assert := require.New(t)
target := &MultiStringT{}
yml := "12, 34"
err := yaml.Unmarshal([]byte(yml), &target)
assert.NoError(err)
assert.Equal(2, len(target.Values))
assert.True(target.Contains("12", true))
assert.True(target.Contains("34", true))
err = target.UnmarshalYAML(func(_ interface{}) error {
return errors.New("")
})
assert.Error(err)
// no changes
assert.Equal(2, len(target.Values))
assert.True(target.Contains("12", true))
assert.True(target.Contains("34", true))
}
|
def mean_true(y_true, y_pred):
with K.name_scope(name='mean_true') as scope:
if len(K.int_shape(y_true)) == 2 and K.int_shape(y_true)[1] == 3:
y_true = K.cast(y_true[:, :1], 'float32')
return K.mean(y_true) |
/**
* Setup motion paths for the given data.
* \note Only used when explicitly calculating paths on bones which may/may not be consider already
*
* \param scene Current scene (for frame ranges, etc.)
* \param ob Object to add paths for (must be provided)
* \param pchan Posechannel to add paths for (optional; if not provided, object-paths are assumed)
*/
bMotionPath *animviz_verify_motionpaths(ReportList *reports, Scene *scene, Object *ob, bPoseChannel *pchan)
{
bAnimVizSettings *avs;
bMotionPath *mpath, **dst;
if (ELEM(NULL, scene, ob))
return NULL;
if (pchan) {
avs = &ob->pose->avs;
dst = &pchan->mpath;
}
else {
avs = &ob->avs;
dst = &ob->mpath;
}
if (avs->path_sf >= avs->path_ef) {
BKE_reportf(reports, RPT_ERROR,
"Motion path frame extents invalid for %s (%d to %d)%s",
(pchan) ? pchan->name : ob->id.name,
avs->path_sf, avs->path_ef,
(avs->path_sf == avs->path_ef) ? TIP_(", cannot have single-frame paths") : "");
return NULL;
}
if (*dst != NULL) {
int expected_length = avs->path_ef - avs->path_sf;
mpath = *dst;
if ((mpath->start_frame != mpath->end_frame) && (mpath->length > 0)) {
if (mpath->length == expected_length) {
return mpath;
}
else {
animviz_free_motionpath_cache(mpath);
}
}
}
else {
mpath = MEM_callocN(sizeof(bMotionPath), "bMotionPath");
*dst = mpath;
}
mpath->start_frame = avs->path_sf;
mpath->end_frame = avs->path_ef;
mpath->length = mpath->end_frame - mpath->start_frame;
if (avs->path_bakeflag & MOTIONPATH_BAKE_HEADS)
mpath->flag |= MOTIONPATH_FLAG_BHEAD;
else
mpath->flag &= ~MOTIONPATH_FLAG_BHEAD;
mpath->color[0] = 1.0;
mpath->color[1] = 0.0;
mpath->color[2] = 0.0;
mpath->line_thickness = 1;
mpath->flag |= MOTIONPATH_FLAG_LINES;
mpath->points = MEM_callocN(sizeof(bMotionPathVert) * mpath->length, "bMotionPathVerts");
avs->path_bakeflag |= MOTIONPATH_BAKE_HAS_PATHS;
return mpath;
} |
/**
* The KeyPass2 class helps you to get to the next level once you collect all 3 objects from the Level2 class.
* @author (Chilka, Madalina, Nicolas, Jose)
* @version Gold Master(December 14, 2020)
*/
public class KeyPass2 extends Actor
{
/**
* Act - do whatever the KeyPass2 wants to do. This method is called whenever
* the 'Act' or 'Run' button gets pressed in the environment.
*/
public void act()
{
if (isTouching(Toby.class)) {
Greenfoot.playSound("Key.wav");
}
}
} |
from django.shortcuts import render
import pandas as pd
import json
# Create your views here.
def Search(request):
d = {}
query = request.GET.get('search')
query1 = request.GET.get('searchbank')
query2 = request.GET.get('searchcity')
df = pd.read_csv('https://raw.githubusercontent.com/snarayanank2/indian_banks/master/bank_branches.csv')
if query:
val = df.loc[df['ifsc'] == query].reset_index()
d = val.to_dict()
d = {'val': d, 'message': '1'}
elif query1 and query2:
array = [query1, query2]
vall = df.loc[(df['bank_name'].isin(array) & df['city'].isin(array))].reset_index()
json_records = vall.reset_index().to_json(orient='records')
data = []
data = json.loads(json_records)
d = {'vall': data, 'message': '2'}
return render(request, 'home.html', d)
|
<filename>packages/react-renderer/src/elements/Link.tsx<gh_stars>10-100
import React from 'react';
import escapeHtml from 'escape-html';
import { LinkElement, LinkRendererProps } from '@graphcms/rich-text-types';
export function Link({ children, ...rest }: LinkRendererProps) {
const { href, rel, id, title, openInNewTab, className } = rest;
const props: Pick<LinkElement, 'rel' | 'id' | 'title' | 'className'> & {
target?: string;
} = {};
if (rel) props.rel = rel;
if (id) props.id = id;
if (title) props.title = title;
if (className) props.className = className;
if (openInNewTab) props.target = '_blank';
return (
<a href={escapeHtml(href)} {...props}>
{children}
</a>
);
}
|
/// Send data back to subscribers.
/// If a send fails (likely a broken connection) the subscriber is removed from the sink.
/// O(n) in the number of subscribers.
pub fn send<T>(&mut self, result: &T) -> anyhow::Result<()>
where
T: Serialize,
{
let result = to_raw_value(result)?;
let mut errored = Vec::new();
let mut subs = self.subscribers.lock();
for ((conn_id, sub_id), sender) in subs.iter() {
let msg = serde_json::to_string(&JsonRpcNotification {
jsonrpc: TwoPointZero,
method: self.method,
params: JsonRpcNotificationParams { subscription: *sub_id, result: &*result },
})?;
// Track broken connections
if sender.unbounded_send(msg).is_err() {
errored.push((*conn_id, *sub_id));
}
}
// Remove broken connections
for entry in errored {
subs.remove(&entry);
}
Ok(())
} |
/**
* Creates a new extended item.
*
* @param name
* the optional extended item name. Can be <code>null</code>.
* @param extensionName
* the required extension name
* @return a handle to extended item, return <code>null</code> if the
* definition with the given extension name is not found
*/
public ExtendedItemHandle newExtendedItem( String name, String extensionName )
{
try
{
return newExtendedItem( name, extensionName, null );
}
catch ( ExtendsException e )
{
assert false;
return null;
}
} |
import { FC, memo } from 'react';
import { SVG } from './SVG';
import { Box } from './Box';
import { SVGIcon } from './SVG.Icons';
import { Anchor } from './Link';
const _Logo: FC<{
variant?: 'on-white' | 'on-black' | 'on-blue';
className?: string;
preserveText?: boolean;
}> = ({ className, preserveText }): JSX.Element => {
return (
<Box
className={`Logo top px-0 ${className || ''} ${preserveText ? 'preserve-text' : ''}`.trim()}>
<Anchor routeLink href="/">
<SVGIcon name="cribmd-logo--blue" />
<SVGIcon name="cribmd-logo-text" className="m" />
<SVG
xmlns="http://www.w3.org/2000/svg"
id="flag-icon-css-ng"
viewBox="0 0 512 512"
className="flag-icon d-inline-block">
<path fill="#fff" d="M0 0h512v512H0z" />
<path fill="#008753" d="M341.3 0H512v512H341.3zM0 0h170.7v512H0z" />
</SVG>
</Anchor>
</Box>
);
};
export const Logo = memo(_Logo);
|
// ConnectDb connects the env to the sql database with the sqlOpt and the redis
// database with redisOpt
func (env *Env) ConnectDb(sqlOpt SQLOptions, redisOpt RedisOptions) {
loggerDb := log.New(os.Stdout, "db: ", log.Lshortfile)
lk := &sync.Mutex{}
ra := rand.New(rand.NewSource(time.Now().UnixNano()))
d, err := sql.Open("postgres", sqlOpt.String())
if err != nil {
loggerDb.Fatal(err)
}
redisConv := &redis.Options{
Addr: fmt.Sprintf("%v:%v", redisOpt.Host, redisOpt.Port),
Password: "",
DB: 0,
}
r := redis.NewClient(redisConv)
_, err = r.Ping().Result()
if err != nil {
loggerDb.Fatal(err)
}
loggerDb.Printf("Connected to database")
env.Db = &db{d, loggerDb, nil, lk, ra}
} |
<reponame>eibens/surv_demo_preact
import { cli } from "https://deno.land/x/[email protected]/cli.ts";
const html = (options: {
module: string;
title: string;
}) => `
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width,initial-scale=1,user-scalable=no" />
<title>${options.title}</title>
<script defer type="module">
import "${options.module}"
</script>
</head>
<body></body>
</html>
`;
if (import.meta.main) {
const ignore = "--ignore=" + [
"docs",
"deps",
].join(",");
await cli({
server: "https://deno.land/x/[email protected]/serve.ts",
build: [{
cmd: [
"deno",
"run",
"-A",
"https://deno.land/x/[email protected]/cli.ts",
ignore,
],
}],
modules: {
index: "./index.tsx",
},
pages: {
index: html({
title: "Preact with Deno",
module: "./index.js",
}),
},
});
}
|
<gh_stars>0
import { NextSeo } from 'next-seo';
import Page from '@/components/page';
import Header from '@/components/header-empezar';
import Footer from '@/components/footer';
export default function Empezar() {
return (
<Page>
<NextSeo
title="GABO | Educando Offline "
description="Logística para la implementación de sistemas tecnológicos en áreas remotas"
/>
<Header />
<main />
<Footer />
</Page>
);
}
|
#!/usr/bin/env python3.6
import re, argparse, numpy as np, glob
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
from extractTargetFilesNonDim import getAllData
from extractTargetFilesNonDim import epsNuFromRe
from computeMeanIntegralQuantitiesNonDim import findAllParams
from computeMeanIntegralQuantitiesNonDim import readAllFiles
from computeSpectraNonDim import readAllSpectra
colors = ['#1f78b4', '#33a02c', '#e31a1c', '#ff7f00', '#6a3d9a', '#b15928', '#a6cee3', '#b2df8a', '#fb9a99', '#fdbf6f', '#cab2d6', '#ffff99']
colors = ['#e41a1c', '#377eb8', '#4daf4a', '#984ea3', '#ff7f00', '#ffff33', '#a65628', '#f781bf', '#999999']
nQoI = 8
h = 2 * np.pi / (16*16)
QoI = [ 'Time Step Size',
'Turbulent Kinetic Energy',
'Velocity Gradient',
'Velocity Gradient Stdev',
'Integral Length Scale',
]
def main_integral(targetpath, simdir, relambda, simnblocks):
nSimBins = simnblocks * 16//2 - 1
eps, nu = epsNuFromRe(relambda)
runData = getAllData(simdir, eps, nu, nSimBins, 1)
vecParams, vecMean, vecStd = readAllFiles(targetpath, [relambda])
vecSpectra, vecEnStdev, fullSpectra, vecCovLogE = readAllSpectra(targetpath, [relambda])
plt.figure()
axes = [plt.subplot(1, 1, 1)]
axes[0].set_xlabel(r'$k \eta$')
axes[0].grid()
axes[0].set_ylabel(r'$\Delta E(k) / E(k)$')
ci = 0
nyquist, nruns = vecSpectra.shape[0], vecSpectra.shape[1]
print(nyquist)
leta = np.power(nu**3 / eps, 0.25)
Ekscal = np.power(nu**5 * eps, 0.25)
nyquist = simnblocks * 16 // 2 - 1
logE = np.log(runData['spectra'])
logE = np.mean(logE, axis=0)
logEtgt = vecSpectra[:nyquist, 0]
print(logE.shape, logEtgt.shape, vecEnStdev.shape)
dLogE = np.zeros(nyquist)
for i in range(nyquist):
dLogE[i] = (logE[i] - logEtgt[i]) / vecEnStdev[i]
K = np.arange(1, nyquist+1, dtype=np.float64) * leta
axes[0].plot(K, dLogE)
plt.show()
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description = "Compute a target file for RL agent from DNS data.")
parser.add_argument('--targets', help="Directory containing the target files")
parser.add_argument('--sim', help="Directory containing the dim to evaluate.")
parser.add_argument('--re', type=int, help="Reynolds_lambda number of simulation.")
parser.add_argument('--nblocks', type=int, help="Number of blocks per dim in simulation.")
args = parser.parse_args()
main_integral(args.targets, args.sim, args.re, args.nblocks)
|
def processPhrases(inFile):
global new_trie
global rulesSet
phrasesSet = set([])
print 'Processing phrases from file : %s' % inFile
with codecs.open(inFile, 'r', 'utf-8') as iF:
for line in iF:
line = line.strip()
wLst = line.split()
for i in range( len(wLst) ):
for j in range(i, i + MAX_SPAN_LEN):
if j >= len(wLst): break
src = ' '.join(wLst[i:j+1])
if src in phrasesSet: continue
phrasesSet.add(src)
p_tot = 0
t_beg = time.time()
for src in phrasesSet:
matchLst = []
matchLst = new_trie.matchPattern(src)
p_tot += 1
for match in matchLst:
if match[0] in rulesSet: continue
rulesSet.add(match[0])
t_tot = time.time() - t_beg
print "Unique phrases processed : %4d" % (p_tot)
print "Total time taken : %f" % (t_tot)
print "Average time taken : %f" % (t_tot/ p_tot)
return None |
/**
* A superclass for System requests.
*/
public class SystemRequest extends SpeechletRequest {
protected SystemRequest(SpeechletRequestBuilder builder) {
super(builder);
}
protected SystemRequest(final String requestId, final Date timestamp, final Locale locale) {
super(requestId, timestamp, locale);
}
} |
<gh_stars>0
import { Component, Input } from '@angular/core';
@Component({
selector: 'linear-progress-bar',
template: `<div *ngIf="showLabel" class="d-flex justify-content-between mt-2">
<small>{{label}}</small>
<small>{{percent}}%</small>
</div>
<div class="progress progress-md mt-2">
<div class="progress-bar bg-{{color}}" role="progressbar" style="width: {{percent}}%" aria-valuemin="0" aria-valuemax="100">{{percent}}%</div>
</div>`
})
export class LinearProgressBar {
@Input() color: string;
@Input() percent: number
@Input() label: number
@Input() showLabel: boolean
}
@Component({
selector: 'circular-progress-bar',
template: `<div id="circleProgress1" class="progressbar-js-circle border rounded p-3"></div>`
})
export class CircularProgressBar {
@Input() color: string;
@Input() percent: number
}
@Component({
selector: 'loading1',
styleUrls: ['../../../assets/css/loaders.css'],
template: `<div class="loader1">Loading...</div>`
})
export class Loading1 { }
@Component({
selector: 'loading2',
styleUrls: ['../../../assets/css/loaders.css'],
template: `<div class="loader2">Loading...</div>`
})
export class Loading2 { }
@Component({
selector: 'loading3',
styleUrls: ['../../../assets/css/loaders.css'],
template: `<div class="loader3">Loading...</div>`
})
export class Loading3 { }
@Component({
selector: 'loading4',
styleUrls: ['../../../assets/css/loaders.css'],
template: `<div class="loader4">Loading...</div>`
})
export class Loading4 {}
@Component({
selector: 'loading5',
styleUrls: ['../../../assets/css/loaders.css'],
template: `<div class="loader5">Loading...</div>`
})
export class Loading5 {}
@Component({
selector: 'loading6',
styleUrls: ['../../../assets/css/loaders.css'],
template: `<div class="loader6">Loading...</div>`
})
export class Loading6 {}
@Component({
selector: 'loading7',
styleUrls: ['../../../assets/css/loaders.css'],
template: `<div class="loader7">Loading...</div>`
})
export class Loading7 {}
@Component({
selector: 'loading8',
styleUrls: ['../../../assets/css/loaders.css'],
template: `<div class="loader8">Loading...</div>`
})
export class Loading8 {}
|
def find_character_with_name(self, name) -> Entity:
for char in self.problem.find_objects_with_type(shared_variables.supported_types['character']):
if char.name == name:
return char
return None |
/**
*
*/
package com.sqli.echallenge.formation.metier;
import java.util.ArrayList;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.sqli.echallenge.formation.dao.HabilitationDao;
import com.sqli.echallenge.formation.model.Habilitation;
import com.sqli.echallenge.formation.model.HabilitationScore;
/**
* @author Mouad
*
*/
@Service
@Transactional
public class HabilitationMetierImpl implements HabilitationMetier {
@Autowired
private HabilitationDao dao;
public Habilitation getHabilitation(Long idHabilitation) throws Exception {
return dao.getHabilitation(idHabilitation);
}
public void addHabilitation(Habilitation habilitation) throws Exception {
dao.addHabilitation(habilitation);
}
public void removeHabilitation(Long idHabilitation) throws Exception {
dao.removeHabilitation(idHabilitation);
}
public void updateHabilitation(Habilitation habilitation) throws Exception {
dao.updateHabilitation(habilitation);
}
public List<Habilitation> getAllHabilitations() throws Exception {
return dao.getAllHabilitations();
}
public HabilitationDao getDao() {
return dao;
}
public void setDao(HabilitationDao dao) {
this.dao = dao;
}
/**
* list des habilitations qui n'existent pas dans la liste des habilitations d'un collaborateur
* @param CollabHabilitationScoreList : liste des habilidations d'un collaborateur
*/
public List<Habilitation> getAllHabilitations_notInCollabList(List<HabilitationScore> collabHabilitationScoreList)
throws Exception {
//variable temporelle pour stocker la nouvelle liste
List<Habilitation> temp = new ArrayList<Habilitation>();
List<Habilitation> allHabilitations = dao.getAllHabilitations();
List<Habilitation> collabHabilitationList = getListHabilitationFromHabilitationScore(collabHabilitationScoreList);
for(Habilitation h : allHabilitations){
if(!existeHabilitationInList(collabHabilitationList,h)){
temp.add(h);
}
}
return temp;
}
/**
* verification si l'habilitation exite dans la liste ou nn
*/
private boolean existeHabilitationInList(List<Habilitation> collabHabilitationList, Habilitation habi){
for(Habilitation h : collabHabilitationList){
if(h.getIdHabilitation().compareTo(habi.getIdHabilitation()) == 0){
return true;
}
}
return false;
}
/**
* recevoir une liste des habilitation a partir d'une liste des habilitationScore
* @return
*/
private List<Habilitation> getListHabilitationFromHabilitationScore(List<HabilitationScore> collabHabilitationScoreList)
{
List<Habilitation> collabHabilitationList = new ArrayList<Habilitation>();
for(HabilitationScore hScore : collabHabilitationScoreList){
collabHabilitationList.add(hScore.getHabilitation());
}
return collabHabilitationList;
}
}
|
/**
* Created by koush on 5/22/13.
*/
public class ContentLoader extends SimpleLoader {
private static final class InputStreamDataEmitterFuture extends SimpleFuture<DataEmitter> {
}
@Override
public Future<DataEmitter> load(final Ion ion, final AsyncHttpRequest request, final FutureCallback<LoaderEmitter> callback) {
if (!request.getUri().getScheme().startsWith("content"))
return null;
final InputStreamDataEmitterFuture ret = new InputStreamDataEmitterFuture();
ion.getHttpClient().getServer().post(new Runnable() {
@Override
public void run() {
try {
InputStream stream = ion.getContext().getContentResolver().openInputStream(Uri.parse(request.getUri().toString()));
if (stream == null)
throw new Exception("Unable to load content stream");
int available = stream.available();
InputStreamDataEmitter emitter = new InputStreamDataEmitter(ion.getHttpClient().getServer(), stream);
ret.setComplete(emitter);
callback.onCompleted(null, new LoaderEmitter(emitter, available, LoaderEmitter.LOADED_FROM_CACHE, null, null));
}
catch (Exception e) {
ret.setComplete(e);
callback.onCompleted(e, null);
}
}
});
return ret;
}
} |
/**
* Classe d'utilitaires pour les fichiers de type ODT.
*
* @author pforhan
*/
final class ODTUtil {
/**
* Nom du fichier XML gérant les contenus.
*/
static final String CONTENT_XML = "content.xml";
/**
* Nom du fichier XML gérant les styles.
*/
static final String STYLES_XML = "styles.xml";
/** Prefix des fichiers temporaires générés. */
private static final String TEMP_FILE_PREFIX = "quarto";
/** Suffix des fichiers temporaires générés. */
private static final String TEMP_FILE_SUFFIX = ".odt";
/**
* Constructeur privé pour classe utilitaire
*/
private ODTUtil() {
super();
}
/**
* Extrait le fichier content.xml d'un odt.
* .
* @param odtFile ZipFile fichier source
* @return String contenant le fichier content.xml sous forme de chaine
* @throws IOException Si une exception d'entrée sortie a lieu
*/
static String extractContent(final ZipFile odtFile) throws IOException {
return ZipUtil.readEntry(odtFile, CONTENT_XML);
}
/**
* Extrait le fichier styles.xml d'un odt.
* .
* @param odtFile ZipFile fichier source
* @return String contenant le fichier styles.xml sous forme de chaine
* @throws IOException Si une exception d'entrée sortie a lieu
*/
static String extractStyles(final ZipFile odtFile) throws IOException {
return ZipUtil.readEntry(odtFile, STYLES_XML);
}
/**
* Crée le fichier content.xml d'un fichier odt par le contenu provenant d'une fusion.
* @param odtFile ZipFile d'origine
* @param contentXml Contenu du content.xml à remplacer
* @param stylesXml Contenu du styles.xml à remplacer
* @param newImagesMap Fichiers images à remplacer
* @return Fichier fusionné
* @throws IOException Si une IOException a lieu
*/
static File createODT(final ZipFile odtFile, final String contentXml, final String stylesXml, final Map<String, VFile> newImagesMap) throws IOException {
final File resultFile = new TempFile(TEMP_FILE_PREFIX, TEMP_FILE_SUFFIX);
try (final ZipOutputStream outputFichierOdt = new ZipOutputStream(new BufferedOutputStream(Files.newOutputStream(resultFile.toPath())))) {
for (final ZipEntry zipEntry : Collections.list(odtFile.entries())) {
final String entryName = zipEntry.getName();
if (newImagesMap.containsKey(entryName)) {
try (final InputStream imageIS = newImagesMap.get(entryName).createInputStream()) {
ZipUtil.writeEntry(outputFichierOdt, imageIS, entryName);
}
} else if (CONTENT_XML.equals(entryName)) {
ZipUtil.writeEntry(outputFichierOdt, contentXml, CONTENT_XML);
} else if (STYLES_XML.equals(entryName)) {
ZipUtil.writeEntry(outputFichierOdt, stylesXml, STYLES_XML);
} else {
try (final InputStream zipIS = odtFile.getInputStream(zipEntry)) {
ZipUtil.writeEntry(outputFichierOdt, zipIS, zipEntry);
}
}
outputFichierOdt.closeEntry();
}
}
return resultFile;
}
/**
* Teste si un tag est présent dans le tableau de caractères content à la position index :
* équivalent à : tag.equals(new String(content, index, tag.length())).
* @param content Tableau de caractères
* @param index Index où commencé la vérification dans content
* @param tag Texte à vérifier
* @return boolean
*/
public static boolean regionMatches(final char[] content, final int index, final String tag) {
final int length = tag.length();
for (int i = 0; i < length; i++) {
if (content[i + index] != tag.charAt(i)) {
return false;
}
}
return true;
}
} |
<reponame>MarkJenniskens/OTGW-firmware<filename>OTGW-Core.h
/*
***************************************************************************
** Program : Header file: OTGWStuff.h
** Version : v0.7.1
**
** Copyright (c) 2021 <NAME>
** Borrowed from OpenTherm library from:
** https://github.com/jpraus/arduino-opentherm
**
** TERMS OF USE: MIT License. See bottom of file.
***************************************************************************
*/
// OTGW Serial 2 network port
// #include <TelnetStream.h> // https://github.com/jandrassy/TelnetStream/commit/1294a9ee5cc9b1f7e51005091e351d60c8cddecf
#define OTGW_SERIAL_PORT 1023
TelnetStreamClass OTGWstream(OTGW_SERIAL_PORT);
//Depends on the library
#define OTGW_COMMAND_TOPIC "command"
typedef struct {
uint16_t Status = 0; // flag8 / flag8 Master and Slave Status flags.
float Tset = 0.0; // f8.8 Control setpoint ie CH water temperature setpoint (°C)
uint16_t MConfigMMemberIDcode = 0; // flag8 / u8 Master Configuration Flags / Master MemberID Code
uint16_t SConfigSMemberIDcode = 0; // flag8 / u8 Slave Configuration Flags / Slave MemberID Code
uint16_t Command = 0; // u8 / u8 Remote Command
uint16_t ASFflags = 0; // / OEM-fault-code flag8 / u8 Application-specific fault flags and OEM fault code
uint16_t RBPflags = 0; // flag8 / flag8 Remote boiler parameter transfer-enable & read/write flags
float CoolingControl = 0.0 ; // f8.8 Cooling control signal (%)
float TsetCH2 = 0.0 ; // f8.8 Control setpoint for 2e CH circuit (°C)
float TrOverride = 0.0 ; // f8.8 Remote override room setpoint
uint16_t TSP = 0; // u8 / u8 Number of Transparent-Slave-Parameters supported by slave
uint16_t TSPindexTSPvalue = 0; // u8 / u8 Index number / Value of referred-to transparent slave parameter.
uint16_t FHBsize = 0; // u8 / u8 Size of Fault-History-Buffer supported by slave
uint16_t FHBindexFHBvalue = 0; // u8 / u8 Index number / Value of referred-to fault-history buffer entry.
float MaxRelModLevelSetting = 0.0 ; // f8.8 Maximum relative modulation level setting (%)
uint16_t MaxCapacityMinModLevel = 0; // u8 / u8 Maximum boiler capacity (kW) / Minimum boiler modulation level(%)
float TrSet = 0.0 ; // f8.8 Room Setpoint (°C)
float RelModLevel = 0.0 ; // f8.8 Relative Modulation Level (%)
float CHPressure = 0.0 ; // f8.8 Water pressure in CH circuit (bar)
float DHWFlowRate = 0.0 ; // f8.8 Water flow rate in DHW circuit. (litres/minute)
uint16_t DayTime = 0; // special / u8 Day of Week and Time of Day
uint16_t Date = 0; // u8 / u8 Calendar date
uint16_t Year = 0; // u16 Calendar year
float TrSetCH2 = 0.0 ; // f8.8 Room Setpoint for 2nd CH circuit (°C)
float Tr = 0.0 ; // f8.8 Room temperature (°C)
float Tboiler = 0.0 ; // f8.8 Boiler flow water temperature (°C)
float Tdhw = 0.0 ; // f8.8 DHW temperature (°C)
float Toutside = 0.0 ; // f8.8 Outside temperature (°C)
float Tret = 0.0 ; // f8.8 Return water temperature (°C)
float Tstorage = 0.0 ; // f8.8 Solar storage temperature (°C)
float Tcollector = 0.0 ; // f8.8 Solar collector temperature (°C)
float TflowCH2 = 0.0 ; // f8.8 Flow water temperature CH2 circuit (°C)
float Tdhw2 = 0.0 ; // f8.8 Domestic hot water temperature 2 (°C)
int16_t Texhaust = 0; // s16 Boiler exhaust temperature (°C)
uint16_t FanSpeed = 0; // u16 Fan Speed (rpm)
float ElectricalCurrentBurnerFlame =0.0; // f88 Electrical current through burner flame (µA)
float TRoomCH2= 0.0; // f88 Room Temperature for 2nd CH circuit ("°C)
uint16_t RelativeHumidity = 0; // u8 / u8 Relative Humidity (%)
uint16_t TdhwSetUBTdhwSetLB = 0 ; // s8 / s8 DHW setpoint upper & lower bounds for adjustment (°C)
uint16_t MaxTSetUBMaxTSetLB = 0; // s8 / s8 Max CH water setpoint upper & lower bounds for adjustment (°C)
uint16_t HcratioUBHcratioLB = 0; // s8 / s8 OTC heat curve ratio upper & lower bounds for adjustment
float TdhwSet = 0.0 ; // f8.8 DHW setpoint (°C) (Remote parameter 1)
float MaxTSet = 0.0 ; // f8.8 Max CH water setpoint (°C) (Remote parameters 2)
float Hcratio = 0.0 ; // f8.8 OTC heat curve ratio (°C) (Remote parameter 3)
//RF
uint16_t RFstrengthbatterylevel = 0; // u8/ u8 RF strength and battery level
uint16_t OperatingMode_HC1_HC2_DHW = 0; // u8 / u8 Operating Mode HC1, HC2/ DHW
uint16_t RoomRemoteOverrideFunction = 0; // Function of manual and program changes in master and remote room setpoint
//Electric Producer
uint16_t ElectricityProducerStarts = 0; // u16 Electricity producer starts
uint16_t ElectricityProducerHours = 0; // u16 Electricity producer hours
uint16_t ElectricityProduction = 0; // u16 Electricity production
uint16_t CumulativElectricityProduction = 0; // u16 Cumulativ Electricity production
//Ventilation/HeatRecovery Msgids
uint16_t StatusVH = 0;
uint8_t ControlSetpointVH = 0;
uint16_t FaultFlagsCodeVH = 0;
uint16_t DiagnosticCodeVH = 0;
uint16_t ConfigMemberIDVH = 0;
float OpenthermVersionVH = 0.0;
uint16_t VersionTypeVH = 0;
uint16_t RelativeVentilation = 0;
uint16_t RelativeHumidityVH = 0;
uint16_t CO2LevelVH = 0;
float SupplyInletTemperature = 0.0;
float SupplyOutletTemperature = 0.0;
float ExhaustInletTemperature = 0.0;
float ExhaustOutletTemperature = 0.0;
uint16_t ActualExhaustFanSpeed = 0;
uint16_t ActualInletFanSpeed = 0;
uint16_t RemoteParameterSettingVH = 0;
uint16_t NominalVentilationValue = 0;
uint16_t TSPNumberVH = 0;
uint16_t TSPEntryVH = 0;
uint16_t FaultBufferSizeVH = 0;
uint16_t FaultBufferEntryVH = 0;
//Statitics
uint16_t RemoteOverrideFunction = 0; // flag8 / - Function of manual and program changes in master and remote room setpoint.
uint16_t OEMDiagnosticCode = 0; // u16 OEM-specific diagnostic/service code
uint16_t BurnerStarts = 0; // u16 Number of starts burner
uint16_t CHPumpStarts = 0; // u16 Number of starts CH pump
uint16_t DHWPumpValveStarts = 0; // u16 Number of starts DHW pump/valve
uint16_t DHWBurnerStarts = 0; // u16 Number of starts burner during DHW mode
uint16_t BurnerOperationHours = 0; // u16 Number of hours that burner is in operation (i.e. flame on)
uint16_t CHPumpOperationHours = 0; // u16 Number of hours that CH pump has been running
uint16_t DHWPumpValveOperationHours = 0; // u16 Number of hours that DHW pump has been running or DHW valve has been opened
uint16_t DHWBurnerOperationHours = 0; // u16 Number of hours that burner is in operation during DHW mode
float OpenThermVersionMaster = 0.0 ; // f8.8 The implemented version of the OpenTherm Protocol Specification in the master.
float OpenThermVersionSlave = 0.0 ; // f8.8 The implemented version of the OpenTherm Protocol Specification in the slave.
uint16_t MasterVersion = 0; // u8 / u8 Master product version number and type
uint16_t SlaveVersion = 0; // u8 / u8 Slave product version number and type
//Rehmea
uint16_t RemehadFdUcodes = 0; // u16 Remeha dF-/dU-codes
uint16_t RemehaServicemessage = 0; // u16 Remeha Servicemessage
uint16_t RemehaDetectionConnectedSCU =0; // u16 Remeha detection connected SCU’s
} OTdataStruct;
static OTdataStruct OTdataObject;
enum OpenThermResponseStatus {
OT_NONE,
OT_SUCCESS,
OT_INVALID,
OT_TIMEOUT
};
enum OpenThermMessageType {
/* Master to Slave */
OT_READ_DATA = B000,
OT_WRITE_DATA = B001,
OT_INVALID_DATA = B010,
OT_RESERVED = B011,
/* Slave to Master */
OT_READ_ACK = B100,
OT_WRITE_ACK = B101,
OT_DATA_INVALID = B110,
OT_UNKNOWN_DATA_ID = B111
};
enum OpenThermMessageID {
Status, // flag8 / flag8 Master and Slave Status flags.
TSet, // f8.8 Control setpoint ie CH water temperature setpoint (°C)
MConfigMMemberIDcode, // flag8 / u8 Master Configuration Flags / Master MemberID Code
SConfigSMemberIDcode, // flag8 / u8 Slave Configuration Flags / Slave MemberID Code
Command, // u8 / u8 Remote Command
ASFflags, // / OEM-fault-code flag8 / u8 Application-specific fault flags and OEM fault code
RBPflags, // flag8 / flag8 Remote boiler parameter transfer-enable & read/write flags
CoolingControl, // f8.8 Cooling control signal (%)
TsetCH2, // f8.8 Control setpoint for 2e CH circuit (°C)
TrOverride, // f8.8 Remote override room setpoint
TSP, // u8 / u8 Number of Transparent-Slave-Parameters supported by slave
TSPindexTSPvalue, // u8 / u8 Index number / Value of referred-to transparent slave parameter.
FHBsize, // u8 / u8 Size of Fault-History-Buffer supported by slave
FHBindexFHBvalue, // u8 / u8 Index number / Value of referred-to fault-history buffer entry.
MaxRelModLevelSetting, // f8.8 Maximum relative modulation level setting (%)
MaxCapacityMinModLevel, // u8 / u8 Maximum boiler capacity (kW) / Minimum boiler modulation level(%)
TrSet, // f8.8 Room Setpoint (°C)
RelModLevel, // f8.8 Relative Modulation Level (%)
CHPressure, // f8.8 Water pressure in CH circuit (bar)
DHWFlowRate, // f8.8 Water flow rate in DHW circuit. (litres/minute)
DayTime, // special / u8 Day of Week and Time of Day
Date, // u8 / u8 Calendar date
Year, // u16 Calendar year
TrSetCH2, // f8.8 Room Setpoint for 2nd CH circuit (°C)
Tr, // f8.8 Room temperature (°C)
Tboiler, // f8.8 Boiler flow water temperature (°C)
Tdhw, // f8.8 DHW temperature (°C)
Toutside, // f8.8 Outside temperature (°C)
Tret, // f8.8 Return water temperature (°C)
Tstorage, // f8.8 Solar storage temperature (°C)
Tcollector, // f8.8 Solar collector temperature (°C)
TflowCH2, // f8.8 Flow water temperature CH2 circuit (°C)
Tdhw2, // f8.8 Domestic hot water temperature 2 (°C)
Texhaust, // s16 Boiler exhaust temperature (°C)
FanSpeed = 35, // u16 Fan Speed (rpm)
ElectricalCurrentBurnerFlame, // f88 Electrical current through burner flame (µA)
TRoomCH2, // f88 Room Temperature for 2nd CH circuit ("°C)
RelativeHumidity, // u8 / u8 Relative Humidity (%)
TdhwSetUBTdhwSetLB = 48, // s8 / s8 DHW setpoint upper & lower bounds for adjustment (°C)
MaxTSetUBMaxTSetLB, // s8 / s8 Max CH water setpoint upper & lower bounds for adjustment (°C)
HcratioUBHcratioLB, // s8 / s8 OTC heat curve ratio upper & lower bounds for adjustment
TdhwSet = 56, // f8.8 DHW setpoint (°C) (Remote parameter 1)
MaxTSet, // f8.8 Max CH water setpoint (°C) (Remote parameters 2)
Hcratio, // f8.8 OTC heat curve ratio (°C) (Remote parameter 3)
StatusVH = 70, // flag8 / flag8 Status Ventilation/Heat recovery
ControlSetpointVH, // u8 Control setpoint V/H
FaultFlagsCodeVH, // flag8 / u8 Fault Flags/Code V/H
DiagnosticCodeVH, // u16 Diagnostic Code V/H
ConfigMemberIDVH, // flag8 / u8 Config/Member ID V/H
OpenthermVersionVH, // f8.8 OpenTherm Version V/H
VersionTypeVH, // u8 / u8 Version & Type V/H
RelativeVentilation, // u8 Relative Ventilation (%)
RelativeHumidityVH, // u8 / u8 Relative Humidity (%)
CO2LevelVH, // u16 CO2 Level (ppm)
SupplyInletTemperature, // f8.8 Supply Inlet Temperature (°C)
SupplyOutletTemperature, // f8.8 Supply Outlet Temperature(°C)
ExhaustInletTemperature, // f8.8 Exhaust Inlet Temperature (°C)
ExhaustOutletTemperature, // f8.8 Exhaust Outlet Temperature (°C)
ActualExhaustFanSpeed, // u16 Actual Exhaust Fan Speed (rpm)
ActualInletFanSpeed, // u16 Actual Inlet Fan Speed (rpm)
RemoteParameterSettingVH, // flag8 / flag8 Remote Parameter Setting V/H
NominalVentilationValue, // u8 Nominal Ventilation Value
TSPNumberVH, // u8 / u8 TSP Number V/H
TSPEntryVH, // u8 / u8 TSP Entry V/H
FaultBufferSizeVH, // u8 / u8 Fault Buffer Size V/H
FaultBufferEntryVH, // u8 / u8 Fault Buffer Entry V/H
RFstrengthbatterylevel=98, // u8 / u8 RF strength and battery level
OperatingMode_HC1_HC2_DHW, // u8 / u8 Operating Mode HC1, HC2/ DHW
RemoteOverrideFunction = 100, // flag8 / - Function of manual and program changes in master and remote room setpoint.
ElectricityProducerStarts = 109, // u16 Electricity producer starts
ElectricityProducerHours, //u16 Electricity producer hours
ElectricityProduction, //u16 Electricity production
CumulativElectricityProduction, // u16 Cumulativ Electricity production
OEMDiagnosticCode = 115, // u16 OEM-specific diagnostic/service code
BurnerStarts, // u16 Number of starts burner
CHPumpStarts, // u16 Number of starts CH pump
DHWPumpValveStarts, // u16 Number of starts DHW pump/valve
DHWBurnerStarts, // u16 Number of starts burner during DHW mode
BurnerOperationHours, // u16 Number of hours that burner is in operation (i.e. flame on)
CHPumpOperationHours, // u16 Number of hours that CH pump has been running
DHWPumpValveOperationHours, // u16 Number of hours that DHW pump has been running or DHW valve has been opened
DHWBurnerOperationHours, // u16 Number of hours that burner is in operation during DHW mode
OpenThermVersionMaster, // f8.8 The implemented version of the OpenTherm Protocol Specification in the master.
OpenThermVersionSlave, // f8.8 The implemented version of the OpenTherm Protocol Specification in the slave.
MasterVersion, // u8 / u8 Master product version number and type
SlaveVersion, // u8 / u8 Slave product version number and type
RemehadFdUcodes, // u8 / u8 Remeha dF-/dU-codes
RemehaServicemessage, // u8 / u8 Remeha Servicemessage
RemehaDetectionConnectedSCU, // u8 / u8 Remeha detection connected SCU’s
};
enum OTtype_t { ot_f88, ot_s16, ot_s8s8, ot_u16, ot_u8u8, ot_flag8, ot_flag8flag8, ot_special, ot_flag8u8, ot_u8, ot_undef};
enum OTmsgcmd_t { OT_READ, OT_WRITE, OT_RW, OT_UNDEF };
struct OTlookup_t
{
int id;
OTmsgcmd_t msg;
OTtype_t type;
char* label;
char* friendlyname;
char* unit;
};
OTlookup_t OTmap[] = {
{ 0, OT_READ , ot_flag8flag8, "Status", "Master and Slave status", "" },
{ 1, OT_WRITE , ot_f88, "TSet", "Control setpoint", "°C" },
{ 2, OT_WRITE , ot_flag8u8, "MConfigMMemberIDcode", "Master Config / Member ID", "" },
{ 3, OT_READ , ot_flag8u8, "SConfigSMemberIDcode", "Slave Config / Member ID", "" },
{ 4, OT_RW , ot_u8u8, "Command", "Command-Code", "" },
{ 5, OT_READ , ot_flag8u8, "ASFflags", "Application-specific fault", "" },
{ 6, OT_READ , ot_flag8u8, "RBPflags", "Remote-parameter flags ", "" },
{ 7, OT_WRITE , ot_f88, "CoolingControl", "Cooling control signal", "%" },
{ 8, OT_WRITE , ot_f88, "TsetCH2", "Control setpoint for 2e CH circuit", "°C" },
{ 9, OT_READ , ot_f88, "TrOverride", "Remote override room setpoint", "" },
{ 10, OT_READ , ot_u8u8, "TSP", "Number of Transparent-Slave-Parameters supported by slave", "" },
{ 11, OT_RW , ot_u8u8, "TSPindexTSPvalue", "Index number / Value of referred-to transparent slave parameter", "" },
{ 12, OT_READ , ot_u8u8, "FHBsize", "Size of Fault-History-Buffer supported by slave", "" },
{ 13, OT_READ , ot_u8u8, "FHBindexFHBvalue", "Index number / Value of referred-to fault-history buffer entry", "" },
{ 14, OT_WRITE , ot_f88, "MaxRelModLevelSetting", "Maximum relative modulation level setting", "%" },
{ 15, OT_READ , ot_u8u8, "MaxCapacityMinModLevell", "Maximum boiler capacity (kW) / Minimum boiler modulation level(%)", "kW/%" },
{ 16, OT_WRITE , ot_f88, "TrSet", "Room Setpoint", "°C" },
{ 17, OT_READ , ot_f88, "RelModLevel", "Relative Modulation Level", "%" },
{ 18, OT_READ , ot_f88, "CHPressure", "Water pressure in CH circuit", "bar" },
{ 19, OT_READ , ot_f88, "DHWFlowRate", "Water flow rate in DHW circuit", "l/m" },
{ 20, OT_RW , ot_special, "DayTime", "Day of Week and Time of Day", "" },
{ 21, OT_RW , ot_u8u8, "Date", "Calendar date ", "" },
{ 22, OT_RW , ot_u16, "Year", "Calendar year", "" },
{ 23, OT_WRITE , ot_f88, "TrSetCH2", "Room Setpoint for 2nd CH circuit", "°C" },
{ 24, OT_WRITE , ot_f88, "Tr", "Room Temperature", "°C" },
{ 25, OT_READ , ot_f88, "Tboiler", "Boiler flow water temperature", "°C" },
{ 26, OT_READ , ot_f88, "Tdhw", "DHW temperature", "°C" },
{ 27, OT_READ , ot_f88, "Toutside", "Outside temperature", "°C" },
{ 28, OT_READ , ot_f88, "Tret", "Return water temperature", "°C" },
{ 29, OT_READ , ot_f88, "Tstorage", "Solar storage temperature", "°C" },
{ 30, OT_READ , ot_f88, "Tcollector", "Solar collector temperature", "°C" },
{ 31, OT_READ , ot_f88, "TflowCH2", "Flow water temperature CH2 circuit", "°C" },
{ 32, OT_READ , ot_s16, "Tdhw2", "Domestic hot water temperature 2", "°C" },
{ 33, OT_READ , ot_f88, "Texhaust", "Boiler exhaust temperature", "°C" },
{ 34, OT_UNDEF , ot_undef, "", "", "" },
{ 35, OT_READ , ot_u16, "FanSpeed", "fan speed", "rpm" },
{ 36, OT_READ , ot_f88, "ElectricalCurrentBurnerFlame", "Electrical current through burner flame", "µA" },
{ 37, OT_READ , ot_f88, "TRoomCH2", "Room temperature for 2nd CH circuit", "°C" },
{ 38, OT_READ , ot_u8u8, "RelativeHumidity", "Relative Humidity", "%" },
{ 39, OT_UNDEF , ot_undef, "", "", "" },
{ 40, OT_UNDEF , ot_undef, "", "", "" },
{ 41, OT_UNDEF , ot_undef, "", "", "" },
{ 42, OT_UNDEF , ot_undef, "", "", "" },
{ 43, OT_UNDEF , ot_undef, "", "", "" },
{ 44, OT_UNDEF , ot_undef, "", "", "" },
{ 45, OT_UNDEF , ot_undef, "", "", "" },
{ 46, OT_UNDEF , ot_undef, "", "", "" },
{ 47, OT_UNDEF , ot_undef, "", "", "" },
{ 48, OT_READ , ot_s8s8, "TdhwSetUBTdhwSetLB", "DHW setpoint upper & lower bounds for adjustment", "°C" },
{ 49, OT_READ , ot_s8s8, "MaxTSetUBMaxTSetLB", "Max CH water setpoint upper & lower bounds for adjustment", "°C" },
{ 50, OT_READ , ot_s8s8, "HcratioUBHcratioLB", "OTC heat curve ratio upper & lower bounds for adjustment", "" },
{ 51, OT_UNDEF , ot_undef, "", "", "" },
{ 52, OT_UNDEF , ot_undef, "", "", "" },
{ 53, OT_UNDEF , ot_undef, "", "", "" },
{ 54, OT_UNDEF , ot_undef, "", "", "" },
{ 55, OT_UNDEF , ot_undef, "", "", "" },
{ 56, OT_RW , ot_f88, "TdhwSet", "DHW setpoint", "°C" },
{ 57, OT_RW , ot_f88, "MaxTSet", "MaxCH water setpoint", "°C" },
{ 58, OT_RW , ot_f88, "Hcratio", "OTC heat curve ratio", "°C" },
{ 59, OT_UNDEF , ot_undef, "", "", "" },
{ 60, OT_UNDEF , ot_undef, "", "", "" },
{ 61, OT_UNDEF , ot_undef, "", "", "" },
{ 62, OT_UNDEF , ot_undef, "", "", "" },
{ 63, OT_UNDEF , ot_undef, "", "", "" },
{ 64, OT_UNDEF , ot_undef, "", "", "" },
{ 65, OT_UNDEF , ot_undef, "", "", "" },
{ 66, OT_UNDEF , ot_undef, "", "", "" },
{ 67, OT_UNDEF , ot_undef, "", "", "" },
{ 68, OT_UNDEF , ot_undef, "", "", "" },
{ 69, OT_UNDEF , ot_undef, "", "", "" },
{ 70, OT_READ , ot_flag8flag8, "StatusVH", "Status Ventilation/Heat recovery", "" },
{ 71, OT_WRITE , ot_u8, "ControlSetpointVH", "Control setpoint V/H", "" },
{ 72, OT_READ , ot_flag8u8, "FaultFlagsCodeVH", "Fault Flags/Code V/H", "" },
{ 73, OT_READ , ot_u16, "DiagnosticCodeVH", "Diagnostic Code V/H", "" },
{ 74, OT_READ , ot_flag8u8, "ConfigMemberIDVH", "Config/Member ID V/H", "" },
{ 75, OT_READ , ot_f88, "OpenthermVersionVH", "OpenTherm Version V/H", "" },
{ 76, OT_READ , ot_u8u8, "VersionTypeVH", "Version & Type V/H", "" },
{ 77, OT_READ , ot_u8, "RelativeVentilation", "Relative Ventilation", "%" },
{ 78, OT_RW , ot_u8u8, "RelativeHumidityVH", "Relative Humidity V/H", "%" },
{ 79, OT_RW , ot_u16, "CO2LevelVH", "CO2 Level V/H", "ppm" },
{ 80, OT_READ , ot_f88, "SupplyInletTemperature", "Supply Inlet Temperature", "°C" },
{ 81, OT_READ , ot_f88, "SupplyOutletTemperature", "Supply Outlet Temperature", "°C" },
{ 82, OT_READ , ot_f88, "ExhaustInletTemperature", "Exhaust Inlet Temperature", "°C" },
{ 83, OT_READ , ot_f88, "ExhaustOutletTemperature", "Exhaust Outlet Temperature", "°C" },
{ 84, OT_READ , ot_u16, "ActualExhaustFanSpeed", "Actual Exhaust Fan Speed", "rpm" },
{ 85, OT_READ , ot_u16, "ActualInletFanSpeed", "Actual Inlet Fan Speed", "rpm" },
{ 86, OT_READ , ot_flag8flag8, "RemoteParameterSettingVH", "Remote Parameter Setting V/H", "" },
{ 87, OT_RW , ot_u8, "NominalVentilationValue", "Nominal Ventilation Value", "" },
{ 88, OT_READ , ot_u8u8, "TSPNumberVH", "TSP Number V/H", "" },
{ 89, OT_RW , ot_u8u8, "TSPEntryVH", "TSP Entry V/H", "" },
{ 90, OT_READ , ot_u8u8, "FaultBufferSizeVH", "Fault Buffer Size V/H", "" },
{ 91, OT_READ , ot_u8u8, "FaultBufferEntryVH", "Fault Buffer Entry V/H", "" },
{ 92, OT_UNDEF , ot_undef, "", "", "" },
{ 93, OT_UNDEF , ot_undef, "", "", "" },
{ 94, OT_UNDEF , ot_undef, "", "", "" },
{ 95, OT_UNDEF , ot_undef, "", "", "" },
{ 96, OT_UNDEF , ot_undef, "", "", "" },
{ 97, OT_UNDEF , ot_undef, "", "", "" },
{ 98, OT_READ , ot_u8u8, "RFstrengthbatterylevel", "RF strength and battery level", "" },
{ 99, OT_READ , ot_u8u8, "OperatingMode_HC1_HC2_DHW", "Operating Mode HC1, HC2/ DHW", "" },
{ 100, OT_READ , ot_flag8, "RoomRemoteOverrideFunction", "Function of manual and program changes in master and remote room setpoint.", "" },
{ 101, OT_UNDEF , ot_undef, "", "", "" },
{ 102, OT_UNDEF , ot_undef, "", "", "" },
{ 103, OT_UNDEF , ot_undef, "", "", "" },
{ 104, OT_UNDEF , ot_undef, "", "", "" },
{ 105, OT_UNDEF , ot_undef, "", "", "" },
{ 106, OT_UNDEF , ot_undef, "", "", "" },
{ 107, OT_UNDEF , ot_undef, "", "", "" },
{ 108, OT_UNDEF , ot_undef, "", "", "" },
{ 109, OT_READ , ot_u16, "ElectricityProducerStarts", "Electricity producer starts", "" },
{ 110, OT_READ , ot_u16, "ElectricityProducerHours", "Electricity producer hours", "" },
{ 111, OT_READ , ot_u16, "ElectricityProduction", "Electricity production", "" },
{ 112, OT_READ , ot_u16, "CumulativElectricityProduction", "Cumulativ Electricity production", "" },
{ 113, OT_UNDEF , ot_undef, "", "", "" },
{ 114, OT_UNDEF , ot_undef, "", "", "" },
{ 115, OT_READ , ot_u16, "OEMDiagnosticCode", "OEM-specific diagnostic/service code", "" },
{ 116, OT_RW , ot_u16, "BurnerStarts", "Nr of starts burner", "" },
{ 117, OT_RW , ot_u16, "CHPumpStarts", "Nr of starts CH pump", "" },
{ 118, OT_RW , ot_u16, "DHWPumpValveStarts", "Nr of starts DHW pump/valve", "" },
{ 119, OT_RW , ot_u16, "DHWBurnerStarts", "Nr of starts burner during DHW mode", "" },
{ 120, OT_RW , ot_u16, "BurnerOperationHours", "Nr of hours that burner is in operation (i.e. flame on)", "" },
{ 121, OT_RW , ot_u16, "CHPumpOperationHours", "Nr of hours that CH pump has been running", "" },
{ 122, OT_RW , ot_u16, "DHWPumpValveOperationHours", "Nr of hours that DHW pump has been running or DHW valve has been opened ", "" },
{ 123, OT_RW , ot_u16, "DHWBurnerOperationHours", "Nr of hours that burner is in operation during DHW mode", "" },
{ 124, OT_READ , ot_f88, "OpenThermVersionMaster", "Master Version OpenTherm Protocol Specification", "" },
{ 125, OT_READ , ot_f88, "OpenThermVersionSlave", "Slave Version OpenTherm Protocol Specification", "" },
{ 126, OT_READ , ot_u8u8, "MasterVersion", "Master product version number and type", "" },
{ 127, OT_READ , ot_u8u8, "SlaveVersion", "Slave product version number and type", "" },
{ 128, OT_UNDEF , ot_undef, "", "", "" },
{ 129, OT_UNDEF , ot_undef, "", "", "" },
{ 130, OT_UNDEF , ot_undef, "", "", "" },
{ 131, OT_RW , ot_u8u8, "RemehadFdUcodes", "Remeha dF-/dU-codes", "" },
{ 132, OT_READ , ot_u8u8, "RemehaServicemessage", "Remeha Servicemessage", "" },
{ 133, OT_READ , ot_u8u8, "RemehaDetectionConnectedSCU", "Remeha detection connected SCU’s", "" },
// all data ids are not defined above are resevered for future use
};
#define OT_MSGID_MAX 133
enum OpenThermStatus {
OT_NOT_INITIALIZED,
OT_READY,
OT_DELAY,
OT_REQUEST_SENDING,
OT_RESPONSE_WAITING,
OT_RESPONSE_START_BIT,
OT_RESPONSE_RECEIVING,
OT_RESPONSE_READY,
OT_RESPONSE_INVALID
};
/**
* Structure to hold Opentherm data packet content.
* Use f88(), u16() or s16() functions to get appropriate value of data packet accoridng to id of message.
*/
/**
* Structure to hold Opentherm data packet content.
* Use f88(), u16() or s16() functions to get appropriate value of data packet according to id of message.
*/
struct OpenthermData {
byte type;
byte id;
byte valueHB;
byte valueLB;
/**
* @return float representation of data packet value
*/
float f88();
/**
* @param float number to set as value of this data packet
*/
void f88(float value);
/**
* @return unsigned 16b integer representation of data packet value
*/
uint16_t u16();
/**
* @param unsigned 16b integer number to set as value of this data packet
*/
void u16(uint16_t value);
/**
* @return signed 16b integer representation of data packet value
*/
int16_t s16();
/**
* @param signed 16b integer number to set as value of this data packet
*/
void s16(int16_t value);
};
/***************************************************************************
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to permit
* persons to whom the Software is furnished to do so, subject to the
* following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT
* OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
***************************************************************************/
|
/**
*
* Title 44 :
*
* Given an input string (s) and a pattern (p), implement wildcard pattern matching with support for '?' and '*'.
'?' Matches any single character.
'*' Matches any sequence of characters (including the empty sequence).
The matching should cover the entire input string (not partial).
Note:
s could be empty and contains only lowercase letters a-z.
p could be empty and contains only lowercase letters a-z, and characters like ? or *.
Example 1:
Input:
s = "aa"
p = "a"
Output: false
Explanation: "a" does not match the entire string "aa".
Example 2:
Input:
s = "aa"
p = "*"
Output: true
Explanation: '*' matches any sequence.
Example 3:
Input:
s = "cb"
p = "?a"
Output: false
Explanation: '?' matches 'c', but the second letter is 'a', which does not match 'b'.
*
* @author bramanarayan
* @date Jun 29, 2020
*/
public class WildCardMatching {
public static void main(String[] args) {
WildCardMatching solution = new WildCardMatching();
System.out.println(solution.isMatch("adceb", "*a*b"));
System.out.println(solution.isMatch("acdcb", "a*c?b"));
}
// DYNAMIC PROGRAMMING
public boolean isMatch(String s, String p) {
int n = s.length();
int m = p.length();
boolean[][] dp = new boolean[n + 1][m + 1];
// DP - init
dp[0][0] = true;
for (int j = 1; j <= m; j++) {
if(p.charAt(j-1) == '*') {
dp[0][j] = dp[0][j-1];
}
}
// DP - runner
for (int i = 1; i <= n; i++) {
for (int j = 1; j <= m; j++) {
char sChar = s.charAt(i-1);
char pChar = p.charAt(j-1);
if(sChar == pChar || pChar == '?') {
dp[i][j] = dp[i-1][j-1];
} else if( pChar == '*') {
// 2 cases - either you ignore the * or you ignore the character
dp[i][j] = dp[i-1][j] | dp[i][j-1];
} else {
dp[i][j] = false;
}
}
}
return dp[n][m];
}
} |
export default {
primary: "#ff3860",
secondary: "#209cee",
active: "white",
background: "black",
foreground: "white",
};
|
def request(self, request_type, api, params=None, data=None, files=None, is_binary=False):
url = '{}{}'.format(self.base_url, api)
s = requests.Session()
s.mount('https://', SSLAdapter(ssl_version=ssl.PROTOCOL_TLSv1_2))
r = s.request(request_type, url, params=params, data=data, files=files, auth=self.get_credentials())
if is_binary:
return r.content
else:
return r.text |
def voltage_plot(data, sfreq, toffset, log_scale, title):
print("voltage")
t_axis = np.arange(0, len(data)) / sfreq + toffset
fig = plt.figure()
ax0 = fig.add_subplot(2, 1, 1)
ax0.plot(t_axis, data.real)
ax0.grid(True)
maxr = np.nanmax(data.real)
minr = np.nanmin(data.real)
if minr == 0.0 and maxr == 0.0:
minr = -1.0
maxr = 1.0
ax0.axis([t_axis[0], t_axis[len(t_axis) - 1], minr, maxr])
ax0.set_ylabel("I sample value (A/D units)")
ax1 = fig.add_subplot(2, 1, 2)
ax1.plot(t_axis, data.imag)
ax1.grid(True)
maxi = np.nanmax(data.imag)
mini = np.nanmin(data.imag)
if mini == 0.0 and maxi == 0.0:
mini = -1.0
maxi = 1.0
ax1.axis([t_axis[0], t_axis[len(t_axis) - 1], mini, maxi])
ax1.set_xlabel("time (seconds)")
ax1.set_ylabel("Q sample value (A/D units)")
ax1.set_title(title)
return fig |
A one-star Marine general might not be the only attorney for Guantanamo Bay detainees taken into custody for defending the principle of a fair military commissions trial.
Two Pentagon civilians who recently quit defending Guantanamo detainee and accused U.S.S. Cole bomber Abd al-Rahim al-Nashiri, have been ordered to appear at a government facility in Alexandria, Virginia at 9 a.m. on Friday.
There, Air Force Colonel Vance Spath intends to compel the two lawyers, Rosa Eliades and Mary Spears, to continue defending Nashiri in the death-penalty case. But the Daily Beast has learned they intend to resist—deepening the latest crisis for the highly controversial post-9/11 military tribunals.
Spath on Wednesday took the unprecedented step of convicting the Chief of Defense Counsel in the Military Commissions, Marine Brig. Gen. John Baker, for refusing to reinstate Eliades, Spears and their colleague, Rick Kammen, as Nashiri’s lawyers. As first reported by the Miami Herald’s Carol Rosenberg, all three attorneys quit the death penalty case earlier this month after suspecting that the government had been spying on their communications, a violation of longstanding principles of fair trial in civilian courts.
Sources tell The Daily Beast that neither Eliades nor Spears intend to show up at the facility, setting up a dramatic clash that could see them similarly taken into custody. Baker has been sentenced to 21 days’ confinement in his Guantanamo Bay quarters.
Spath on Wednesday issued an order—currently under seal—for Kammen, Eliades and Spears to appear at the northern Virginia government facility, which hosts a secure videoconference to the Guantanamo war court. There, Spath desires for them to continue defending Nashiri in the death-penalty case.
At the Wednesday hearing that ended with Baker’s conviction, Spath said that Kammen, Eliades and Spears “remain attorneys of record in the military commission and are ordered to attend all sessions of this commission, unless properly excused by me or an appropriate federal court.”
Kammen is not a government employee, which protects him from arrest in this circumstance. But both Eliades and Spears are Defense Department civilian employees. Spath, some Pentagon colleagues fear, could order federal marshals to Eliades’ and Spears’ homes to bring them to the facility. If so, and if they refuse to continue defending Nashiri, Spath may order them confined, as he did Baker.
Neither Kammen nor Eliades responded to repeated requests for comment. The Daily Beast was unable to reach Spears.
“There is a possibility that they will be thrown in jail for defending their clients’ rights against government intrusion,” said Alka Pradhan, a Defense Department attorney also assigned to represent Guantanamo detainees.
“From the beginning, the military commissions were meant to ensure quick convictions and the defense has held them off for this long to try to ensure some sort of constitutional adherence. But this is where the real clash begins. The U.S. government is actually beginning to jail the people they have tasked with providing zealous representation of these individuals.”
Baker was convicted after arguing that he as Chief of Defense Counsel, and not Spath, the judge in a military commission, has the right to control the resignation of defense attorneys. Spath disagreed, and convicted Baker without permitting the general to speak in his own defense.
But on Thursday, Baker’s attorneys filed a habeas corpus petition in a D.C. federal court to argue that Spath illegally detained Baker, by exceeding his authority under the Military Commissions Act of 2009.
“As General Baker would have demonstrated to Respondent Spath had he been given the opportunity to be heard, the plain meaning of the governing statute precludes Judge Spath from unilaterally ordering anyone—and particularly a United States citizen—into confinement,” argued attorneys Barry Pollack and Addy Schmitt in court papers filed to Judge Royce Lamberth.
Lamberth held a hearing on late Thursday afternoon and is expected to rule on Baker’s confinement at 2 p.m. Friday. By then, however, Baker’s subordinates on the Guantanamo defense bar, Eliades and Spears, may already be in custody. |
<reponame>engimaxp/sketch_coding
import { SET, CLEAR } from './action_type';
import AccountData from '../../types/Account';
export type accountActions = Set | Clear;
interface Set {
type: SET;
account: AccountData;
}
export const set = (account: AccountData): Set => ({
type: SET,
account
});
interface Clear {
type: CLEAR;
}
export function clear(): Clear {
return {
type: CLEAR
};
}
|
<gh_stars>0
""" Calculate LCM """
class Solution_1:
"""Naive solution."""
def getLCM(self, a, b):
largest = max(a, b)
while True:
if largest % a == 0 and largest % b == 0:
break
largest += 1
return largest
class Solution_2:
""" Euclid solution. """
def getLCM(self, a, b):
temp_a = a
temp_b = b
while True:
if temp_b == 0:
break
temp_a, temp_b = temp_b, temp_a % temp_b
return a * b / temp_a
if __name__ == "__main__":
s_1 = Solution_1()
resp = s_1.getLCM(4, 6)
print("LCM of given number is ", resp)
resp = s_1.getLCM(12, 15)
print("LCM of given number is ", resp)
resp = s_1.getLCM(8, 12)
print("LCM of given number is ", resp)
print("Enhanced Euclid Solution")
s_2 = Solution_2()
resp = s_2.getLCM(4, 6)
print("LCM of given number is ", resp)
resp = s_2.getLCM(12, 15)
print("LCM of given number is ", resp)
resp = s_2.getLCM(8, 12)
print("LCM of given number is ", resp)
|
The potential for complete and durable response in nonglial primary brain tumors in children and young adults with enhanced chemotherapy delivery.
PURPOSE
Radiographic tumor response and survival were evaluated in the pediatric and young adult population with germ cell tumor, primary CNS lymphoma, or primitive neuroectodermal tumor receiving intra-arterial carboplatin- or methotrexate-based chemotherapy with osmotic blood-brain barrier disruption (BBBD).
PATIENTS AND METHODS
Thirty-four patients with histologically confirmed germ cell tumor (n = 9), primary CNS lymphoma (n = 9), or primitive neuroectodermal tumor (n = 16) were treated at the Oregon Health Sciences University from August 1981 through April 1995. Ages ranged from 1 to 30 years (mean, 18 years). Prior treatments included cranial radiation (n = 10) and chemotherapy (n = 18). All patients underwent extensive baseline neuropsychological evaluation and follow-up evaluation upon completion of the protocol, except for two patients who declined follow-up assessment.
RESULTS
Six hundred and forty-five BBBD procedures were performed with no mortality. Significant complications included one episode of tonsillar herniation with no neurologic sequelae, 4% incidence of seizures, and 3% incidence of sepsis or granulocytopenic fever. Ototoxicity was seen in 61% of patients who received carboplatin chemotherapy. Eighty-two percent of the patients had an objective response to treatment, including 62% with complete response and 20% with partial response. For most patients, cognitive functioning was maintained or improved at follow-up; this pattern was statistically significant. Three of the test scores for the seven patients who did not receive radiation therapy showed a cognitive decline of at least one standard deviation. Among the nine patients who received radiation therapy before or after BBBD chemotherapy, 11 test scores showed a decline in cognitive function at one standard deviation or more.
DISCUSSION
Durable responses were seen in patients with germ cell tumor and primary CNS lymphoma when treated with BBBD. Primitive neuroectodermal tumor requires post-chemotherapy radiotherapy for a durable response to be attained. Ototoxicity was a major form of toxicity in the patients who received carboplatin, but with the recent introduction of sodium thiosulfate, this problem has been markedly alleviated. Favorable cognitive outcomes appeared more likely for patients treated solely with BBBD chemotherapy and not with radiotherapy. Trends in the results for this sample are similar to those of previous research showing that radiotherapy is associated with cognitive decline. Current alternatives to enhanced drug delivery after BBBD include bone marrow transplantation; however, the increment in drug delivery is less, the number of courses is limited, and the morbidity and mortality are greater for bone marrow transplant than for BBBD. The current results suggest that in future trials, irradiation may not be needed in lymphoma and may not be necessary in some CNS germ cell tumors and that more focal radiotherapy should be further assessed in localized primitive neuroectodermal tumors. |
/**
* Prints in the returned string the elements contained in the given string
* array.
*
* @param arr
* The array.
* @return A string representing the supplied array contents.
*/
private static String listStrings(String[] arr) {
if (arr == null) {
return "null";
} else {
StringBuffer b = new StringBuffer();
b.append('[');
for (int i = 0; i < arr.length; i++) {
if (i > 0) {
b.append(", ");
}
b.append(arr[i]);
}
b.append(']');
return b.toString();
}
} |
/**
* prefilters a string to be formated for generics .*
* @param string
* @return
*/
public static String preFilterVariablesForGenerics(String string) {
if (string != null && !string.isEmpty()) {
int index = 0;
boolean done = false;
do {
int nameIndex = string.indexOf(":", index);
String name = string.substring(index, nameIndex);
int typeIndex = string.indexOf(":", nameIndex + 1);
if (typeIndex == -1) {
typeIndex = string.length();
done = true;
}
String type = string.substring(nameIndex + 1, typeIndex).replace(",", "*");
string = string.substring(0, nameIndex + 1) + type + string.substring(typeIndex);
int tagsIndex = string.indexOf(":", typeIndex + 1);
if (tagsIndex != -1) {
tagsIndex = string.indexOf(",", typeIndex + 1);
String tags = string.substring(typeIndex + 1, tagsIndex);
}
index = tagsIndex + 1;
if (tagsIndex == -1) {
done = true;
}
} while (!done);
}
return string;
} |
<gh_stars>0
/* This is a mst-gql generated file, don't modify it manually */
/* eslint-disable */
/* tslint:disable */
import { types } from "mobx-state-tree"
import { QueryBuilder } from "mst-gql"
import { ModelBase } from "./ModelBase"
import { RootStoreType } from "./index"
/**
* GustoCompanyLocationBase
* auto generated base class for the model GustoCompanyLocationModel.
*/
export const GustoCompanyLocationModelBase = ModelBase.named("GustoCompanyLocation")
.props({
__typename: types.optional(
types.literal("GustoCompanyLocation"),
"GustoCompanyLocation",
),
id: types.union(types.undefined, types.number),
version: types.union(types.undefined, types.string),
company_id: types.union(types.undefined, types.number),
phone_number: types.union(types.undefined, types.number),
street_1: types.union(types.undefined, types.string),
street_2: types.union(types.undefined, types.string),
city: types.union(types.undefined, types.string),
state: types.union(types.undefined, types.string),
zip: types.union(types.undefined, types.number),
country: types.union(types.undefined, types.string),
})
.views((self) => ({
get store() {
return self.__getStore<RootStoreType>()
},
}))
export class GustoCompanyLocationModelSelector extends QueryBuilder {
get id() {
return this.__attr(`id`)
}
get version() {
return this.__attr(`version`)
}
get company_id() {
return this.__attr(`company_id`)
}
get phone_number() {
return this.__attr(`phone_number`)
}
get street_1() {
return this.__attr(`street_1`)
}
get street_2() {
return this.__attr(`street_2`)
}
get city() {
return this.__attr(`city`)
}
get state() {
return this.__attr(`state`)
}
get zip() {
return this.__attr(`zip`)
}
get country() {
return this.__attr(`country`)
}
}
export function selectFromGustoCompanyLocation() {
return new GustoCompanyLocationModelSelector()
}
export const gustoCompanyLocationModelPrimitives = selectFromGustoCompanyLocation().version
.company_id.phone_number.street_1.street_2.city.state.zip.country
|
#![allow(clippy::manual_strip)]
#[allow(unused_imports)]
use std::fs;
use std::{error::Error, fmt, path::Path, str::FromStr};
/// Translate shaders to different formats
#[derive(argh::FromArgs, Debug, Clone)]
struct Args {
/// bitmask of the ValidationFlags to be used, use 0 to disable validation
#[argh(option)]
validate: Option<u8>,
/// what policy to use for index bounds checking for arrays, vectors, and
/// matrices.
///
/// May be `Restrict` (force all indices in-bounds), `ReadZeroSkipWrite`
/// (out-of-bounds indices read zeros, and don't write at all), or
/// `Unchecked` (generate the simplest code, and whatever happens, happens)
///
/// `Unchecked` is the default.
#[argh(option)]
index_bounds_check_policy: Option<BoundsCheckPolicyArg>,
/// what policy to use for index bounds checking for arrays, vectors, and
/// matrices, when they are stored in globals in the `storage` or `uniform`
/// storage classes.
///
/// Possible values are the same as for `index-bounds-check-policy`. If
/// omitted, defaults to the index bounds check policy.
#[argh(option)]
buffer_bounds_check_policy: Option<BoundsCheckPolicyArg>,
/// what policy to use for texture bounds checking.
///
/// Possible values are the same as for `index-bounds-check-policy`. If
/// omitted, defaults to the index bounds check policy.
#[argh(option)]
image_bounds_check_policy: Option<BoundsCheckPolicyArg>,
/// directory to dump the SPIR-V block context dump to
#[argh(option)]
block_ctx_dir: Option<String>,
/// the shader entrypoint to use when compiling to GLSL
#[argh(option)]
entry_point: Option<String>,
/// the shader profile to use, for example `es`, `core`, `es330`, if translating to GLSL
#[argh(option)]
profile: Option<GlslProfileArg>,
/// the shader model to use if targeting HSLS
///
/// May be `50`, 51`, or `60`
#[argh(option)]
shader_model: Option<ShaderModelArg>,
/// the input file
#[argh(positional)]
input: String,
/// the output file. If not specified, only validation will be performed
#[argh(positional)]
output: Vec<String>,
}
/// Newtype so we can implement [`FromStr`] for `BoundsCheckPolicy`.
#[derive(Debug, Clone, Copy)]
struct BoundsCheckPolicyArg(naga::back::BoundsCheckPolicy);
impl FromStr for BoundsCheckPolicyArg {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
use naga::back::BoundsCheckPolicy;
Ok(Self(match s.to_lowercase().as_str() {
"restrict" => BoundsCheckPolicy::Restrict,
"readzeroskipwrite" => BoundsCheckPolicy::ReadZeroSkipWrite,
"unchecked" => BoundsCheckPolicy::Unchecked,
_ => {
return Err(format!(
"Invalid value for --index-bounds-check-policy: {}",
s
))
}
}))
}
}
/// Newtype so we can implement [`FromStr`] for `ShaderModel`.
#[derive(Debug, Clone)]
struct ShaderModelArg(naga::back::hlsl::ShaderModel);
impl FromStr for ShaderModelArg {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
use naga::back::hlsl::ShaderModel;
Ok(Self(match s.to_lowercase().as_str() {
"50" => ShaderModel::V5_0,
"51" => ShaderModel::V5_1,
"60" => ShaderModel::V6_0,
_ => return Err(format!("Invalid value for --shader-model: {}", s)),
}))
}
}
/// Newtype so we can implement [`FromStr`] for [`naga::back::glsl::Version`].
#[derive(Clone, Debug)]
struct GlslProfileArg(naga::back::glsl::Version);
impl FromStr for GlslProfileArg {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
use naga::back::glsl::Version;
Ok(Self(if s.starts_with("core") {
Version::Desktop(s[4..].parse().unwrap_or(330))
} else if s.starts_with("es") {
Version::Embedded(s[2..].parse().unwrap_or(310))
} else {
return Err(format!("Unknown profile: {}", s));
}))
}
}
#[derive(Default)]
struct Parameters {
validation_flags: naga::valid::ValidationFlags,
bounds_check_policies: naga::back::BoundsCheckPolicies,
entry_point: Option<String>,
spv_adjust_coordinate_space: bool,
spv_block_ctx_dump_prefix: Option<String>,
spv: naga::back::spv::Options,
msl: naga::back::msl::Options,
glsl: naga::back::glsl::Options,
hlsl: naga::back::hlsl::Options,
}
trait PrettyResult {
type Target;
fn unwrap_pretty(self) -> Self::Target;
}
fn print_err(error: &dyn Error) {
eprint!("{}", error);
let mut e = error.source();
if e.is_some() {
eprintln!(": ");
} else {
eprintln!();
}
while let Some(source) = e {
eprintln!("\t{}", source);
e = source.source();
}
}
impl<T, E: Error> PrettyResult for Result<T, E> {
type Target = T;
fn unwrap_pretty(self) -> T {
match self {
Result::Ok(value) => value,
Result::Err(error) => {
print_err(&error);
std::process::exit(1);
}
}
}
}
fn main() {
if let Err(e) = run() {
print_err(e.as_ref());
std::process::exit(1);
}
}
/// Error type for the CLI
#[derive(Debug, Clone)]
struct CliError(&'static str);
impl fmt::Display for CliError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl std::error::Error for CliError {}
fn run() -> Result<(), Box<dyn std::error::Error>> {
env_logger::init();
// Initialize default parameters
//TODO: read the parameters from RON?
let mut params = Parameters::default();
// Parse commandline arguments
let args: Args = argh::from_env();
let input_path = Path::new(&args.input);
let output_paths = args.output;
// Update parameters from commandline arguments
if let Some(bits) = args.validate {
params.validation_flags = naga::valid::ValidationFlags::from_bits(bits)
.ok_or(CliError("Invalid validation flags"))?;
}
if let Some(policy) = args.index_bounds_check_policy {
params.bounds_check_policies.index = policy.0;
}
params.bounds_check_policies.buffer = match args.buffer_bounds_check_policy {
Some(arg) => arg.0,
None => params.bounds_check_policies.index,
};
params.bounds_check_policies.image = match args.image_bounds_check_policy {
Some(arg) => arg.0,
None => params.bounds_check_policies.index,
};
params.spv_block_ctx_dump_prefix = args.block_ctx_dir;
params.entry_point = args.entry_point;
if let Some(version) = args.profile {
params.glsl.version = version.0;
}
if let Some(model) = args.shader_model {
params.hlsl.shader_model = model.0;
}
let module = match Path::new(&input_path)
.extension()
.ok_or(CliError("Input filename has no extension"))?
.to_str()
.ok_or(CliError("Input filename not valid unicode"))?
{
"spv" => {
let options = naga::front::spv::Options {
adjust_coordinate_space: params.spv_adjust_coordinate_space,
strict_capabilities: false,
block_ctx_dump_prefix: params
.spv_block_ctx_dump_prefix
.map(std::path::PathBuf::from),
};
let input = fs::read(input_path)?;
naga::front::spv::parse_u8_slice(&input, &options)?
}
"wgsl" => {
let input = fs::read_to_string(input_path)?;
let result = naga::front::wgsl::parse_str(&input);
match result {
Ok(v) => v,
Err(ref e) => {
e.emit_to_stderr(&input);
return Err(CliError("Could not parse WGSL").into());
}
}
}
ext @ "vert" | ext @ "frag" | ext @ "comp" => {
let input = fs::read_to_string(input_path)?;
let mut parser = naga::front::glsl::Parser::default();
parser
.parse(
&naga::front::glsl::Options {
stage: match ext {
"vert" => naga::ShaderStage::Vertex,
"frag" => naga::ShaderStage::Fragment,
"comp" => naga::ShaderStage::Compute,
_ => unreachable!(),
},
defines: Default::default(),
},
&input,
)
.unwrap_or_else(|errors| {
let filename = input_path.file_name().and_then(std::ffi::OsStr::to_str);
emit_glsl_parser_error(errors, filename.unwrap_or("glsl"), &input);
std::process::exit(1);
})
}
_ => return Err(CliError("Unknown input file extension").into()),
};
// validate the IR
let info = match naga::valid::Validator::new(
params.validation_flags,
naga::valid::Capabilities::all(),
)
.validate(&module)
{
Ok(info) => Some(info),
Err(error) => {
print_err(&error);
None
}
};
if output_paths.is_empty() {
if info.is_some() {
println!("Validation successful");
return Ok(());
} else {
std::process::exit(!0);
}
}
for output_path in output_paths {
match Path::new(&output_path)
.extension()
.ok_or(CliError("Output filename has no extension"))?
.to_str()
.ok_or(CliError("Output filename not valid unicode"))?
{
"txt" => {
use std::io::Write;
let mut file = fs::File::create(output_path)?;
writeln!(file, "{:#?}", module)?;
if let Some(ref info) = info {
writeln!(file)?;
writeln!(file, "{:#?}", info)?;
}
}
"metal" => {
use naga::back::msl;
let pipeline_options = msl::PipelineOptions::default();
let (msl, _) = msl::write_string(
&module,
info.as_ref().ok_or(CliError(
"Generating metal output requires validation to \
succeed, and it failed in a previous step",
))?,
¶ms.msl,
&pipeline_options,
)
.unwrap_pretty();
fs::write(output_path, msl)?;
}
"spv" => {
use naga::back::spv;
let pipeline_options_owned;
let pipeline_options = match params.entry_point {
Some(ref name) => {
let ep_index = module
.entry_points
.iter()
.position(|ep| ep.name == *name)
.expect("Unable to find the entry point");
pipeline_options_owned = spv::PipelineOptions {
entry_point: name.clone(),
shader_stage: module.entry_points[ep_index].stage,
};
Some(&pipeline_options_owned)
}
None => None,
};
params.spv.bounds_check_policies = params.bounds_check_policies;
let spv = spv::write_vec(
&module,
info.as_ref().ok_or(CliError(
"Generating SPIR-V output requires validation to \
succeed, and it failed in a previous step",
))?,
¶ms.spv,
pipeline_options,
)
.unwrap_pretty();
let bytes = spv
.iter()
.fold(Vec::with_capacity(spv.len() * 4), |mut v, w| {
v.extend_from_slice(&w.to_le_bytes());
v
});
fs::write(output_path, bytes.as_slice())?;
}
stage @ "vert" | stage @ "frag" | stage @ "comp" => {
use naga::back::glsl;
let pipeline_options = glsl::PipelineOptions {
entry_point: match params.entry_point {
Some(ref name) => name.clone(),
None => "main".to_string(),
},
shader_stage: match stage {
"vert" => naga::ShaderStage::Vertex,
"frag" => naga::ShaderStage::Fragment,
"comp" => naga::ShaderStage::Compute,
_ => unreachable!(),
},
};
let mut buffer = String::new();
let mut writer = glsl::Writer::new(
&mut buffer,
&module,
info.as_ref().ok_or(CliError(
"Generating glsl output requires validation to \
succeed, and it failed in a previous step",
))?,
¶ms.glsl,
&pipeline_options,
)
.unwrap_pretty();
writer.write()?;
fs::write(output_path, buffer)?;
}
"dot" => {
use naga::back::dot;
let output = dot::write(&module, info.as_ref())?;
fs::write(output_path, output)?;
}
"hlsl" => {
use naga::back::hlsl;
let mut buffer = String::new();
let mut writer = hlsl::Writer::new(&mut buffer, ¶ms.hlsl);
writer
.write(
&module,
info.as_ref().ok_or(CliError(
"Generating hsls output requires validation to \
succeed, and it failed in a previous step",
))?,
)
.unwrap_pretty();
fs::write(output_path, buffer)?;
}
"wgsl" => {
use naga::back::wgsl;
let wgsl = wgsl::write_string(
&module,
info.as_ref().ok_or(CliError(
"Generating wgsl output requires validation to \
succeed, and it failed in a previous step",
))?,
)
.unwrap_pretty();
fs::write(output_path, wgsl)?;
}
other => {
println!("Unknown output extension: {}", other);
}
}
}
Ok(())
}
use codespan_reporting::{
diagnostic::{Diagnostic, Label},
files::SimpleFile,
term::{
self,
termcolor::{ColorChoice, StandardStream},
},
};
pub fn emit_glsl_parser_error(errors: Vec<naga::front::glsl::Error>, filename: &str, source: &str) {
let files = SimpleFile::new(filename, source);
let config = codespan_reporting::term::Config::default();
let writer = StandardStream::stderr(ColorChoice::Auto);
for err in errors {
let mut diagnostic = Diagnostic::error().with_message(err.kind.to_string());
if let Some(range) = err.meta.to_range() {
diagnostic = diagnostic.with_labels(vec![Label::primary((), range)]);
}
term::emit(&mut writer.lock(), &config, &files, &diagnostic).expect("cannot write error");
}
}
|
// GenPodNameFromSts returns the name of a specific pod in a statefulset
func GenPodNameFromSts(vdb *vapi.VerticaDB, sts *appsv1.StatefulSet, podIndex int32) types.NamespacedName {
return types.NamespacedName{
Name: fmt.Sprintf("%s-%d", sts.GetObjectMeta().GetName(), podIndex),
Namespace: vdb.Namespace,
}
} |
// Copyright 2017 Xiaomi, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cache
import (
"sync"
"github.com/didi/nightingale/src/common/dataobj"
)
type JudgeItemMap struct {
sync.RWMutex
M map[string]*SafeLinkedList
}
func NewJudgeItemMap() *JudgeItemMap {
return &JudgeItemMap{M: make(map[string]*SafeLinkedList)}
}
func (j *JudgeItemMap) Get(key string) (*SafeLinkedList, bool) {
j.RLock()
defer j.RUnlock()
val, ok := j.M[key]
return val, ok
}
func (j *JudgeItemMap) Set(key string, val *SafeLinkedList) {
j.Lock()
defer j.Unlock()
j.M[key] = val
}
func (j *JudgeItemMap) Len() int {
j.RLock()
defer j.RUnlock()
return len(j.M)
}
func (j *JudgeItemMap) CleanStale(before int64) {
keys := []string{}
j.RLock()
for key, L := range j.M {
front := L.Front()
if front == nil {
continue
}
if front.Value.(*dataobj.JudgeItem).Timestamp < before {
keys = append(keys, key)
}
}
j.RUnlock()
j.BatchDelete(keys)
}
func (j *JudgeItemMap) BatchDelete(keys []string) {
count := len(keys)
if count == 0 {
return
}
j.Lock()
defer j.Unlock()
for i := 0; i < count; i++ {
delete(j.M, keys[i])
}
}
// 这是个线程不安全的大Map,需要提前初始化好
var HistoryBigMap = make(map[string]*JudgeItemMap)
func InitHistoryBigMap() {
arr := []string{"0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "a", "b", "c", "d", "e", "f"}
for i := 0; i < 16; i++ {
for j := 0; j < 16; j++ {
HistoryBigMap[arr[i]+arr[j]] = NewJudgeItemMap()
}
}
}
|
#include <math.h>
#include <stdio.h>
#include <stdlib.h>
#include <GL/glut.h>
#define SCRSIZE 7 // screen size factor
#define NUMP 20 // number of balls points
#define NUMS (NUMP+1) // number of springs
#define MASS 1.0f // default point mass
#define BALLRADIUS 0.516f // default radius of the ball
#define KS 755.0f // spring constant
#define KD 35.0f // spring damping constant
#define GY -10.0f
#define DT 0.005f
#define FINAL_PRESSURE 45.0f
/* Mouse */
#define windW 380
#define windH 380
int mousedown = 0;
float xMouse, yMouse;
int closest_i=1; // point closest to mouse index
/* Structure of the point */
typedef struct
{
float x,y; // position
float vx,vy; // velocity
float fx,fy; // force accumulator
} CPoint2d;
/* Structure of the springs */
typedef struct
{
int i,j; // points indexes
float length; // rest length
float nx,ny; // normal vector
} CSpring;
/* There we will keep an object */
float Pressure = 0;
CPoint2d myPoints[NUMP+1];
CSpring mySprings[NUMS+1];
/* Add new spring */
void AddSpring(int pi, int i, int j)
{
mySprings[pi].i = i; mySprings[pi].j = j;
mySprings[pi].length = sqrt( (myPoints[ i ].x - myPoints[ j ].x)*(myPoints[ i ].x - myPoints[ j ].x)
+ (myPoints[ i ].y - myPoints[ j ].y)*(myPoints[ i ].y - myPoints[ j ].y) );
}
/* Create Ball Object (points & springs) */
void CreateBall(void)
{
int i;
for(i=1 ; i <= NUMP ; ++i) // create points
{
myPoints[i].x = BALLRADIUS * sin( i * (2.0 * 3.14) / NUMP );
myPoints[i].y = BALLRADIUS * cos(i * (2.0 * 3.14) / NUMP ) + SCRSIZE/2;
}
for(i=1 ; i <= NUMP ; ++i) // create springs
AddSpring(i,i,i+1);
AddSpring(i-1,i-1,1);
}
/* Reshape Window */
void Reshape(int width, int height)
{
glViewport(0, 0, width, height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluOrtho2D(-SCRSIZE, SCRSIZE, -SCRSIZE, SCRSIZE);
glMatrixMode(GL_MODELVIEW);
}
/* Force Calculation Subroutine */
void AccumulateForces(void)
{
int i;
float x1,x2,y1,y2; // positions of spring points p1, p2
float r12d; // length of p1 - p2 vector
float vx12; // vx1 - vx2
float vy12; // vy1 - vy2
float f; // hooke force value
float Fx,Fy; // force vector
float volume=0; // volume of the body
float pressurev; // pressure force value
/* gravity */
for(i=1 ; i <= NUMP ; ++i)
{
myPoints[i].fx = 0;
myPoints[i].fy = MASS * GY * (Pressure - FINAL_PRESSURE >= 0);
/* user clicked (mouse spring) */
if(i==closest_i) // closest point only
if(mousedown) // if user clicked
{
x1 = myPoints[ i ].x; // get points coords.
y1 = myPoints[ i ].y;
x2 = xMouse;
y2 = yMouse;
r12d = sqrt ( (x1 - x2) *(x1 - x2) + (y1 - y2) * (y1 - y2) );
// square
// root of the distance
f = (r12d - 2.2) * 22 + (myPoints[ i ].vx * (x1 - x2) + myPoints[ i ].y * (y1 - y2)) * 54 / r12d;
// calculate spring force
Fx = ((x1 - x2) / r12d ) * f;
Fy = ((y1 - y2) / r12d ) * f;
// accumulate force
myPoints[i].fx -= Fx;
myPoints[i].fy -= Fy;
}
}
/* spring force */
for(i=1 ; i <= NUMS ; ++i)
{
x1 = myPoints[ mySprings[i].i ].x; y1 = myPoints[ mySprings[i].i ].y;
x2 = myPoints[ mySprings[i].j ].x; y2 = myPoints[ mySprings[i].j ].y;
r12d = sqrt ( (x1 - x2) *(x1 - x2) + (y1 - y2) * (y1 - y2) ); // square
// root of the distance
if(r12d != 0)
{
vx12 = myPoints[ mySprings[i].i ].vx - myPoints[ mySprings[i].j ].vx;
vy12 = myPoints[ mySprings[i].i ].vy - myPoints[ mySprings[i].j ].vy;
f = (r12d - mySprings[i].length) * KS + (vx12 * (x1 - x2) + vy12 * (y1 - y2)) * KD / r12d;
Fx = ((x1 - x2) / r12d ) * f;
Fy = ((y1 - y2) / r12d ) * f;
myPoints[ mySprings[i].i ].fx -= Fx;
myPoints[ mySprings[i].i ].fy -= Fy;
myPoints[ mySprings[i].j ].fx += Fx;
myPoints[ mySprings[i].j ].fy += Fy;
}
/* Calculate normal vectors to springs */
mySprings[i].nx = (y1 - y2) / r12d;
mySprings[i].ny = -(x1 - x2) / r12d;
}
/* pressure force */
/* Calculate Volume of the Ball (Gauss Theorem) */
for(i=1 ; i<=NUMS-1 ; i++)
{
x1 = myPoints[ mySprings[i].i ].x; y1 = myPoints[ mySprings[i].i ].y;
x2 = myPoints[ mySprings[i].j ].x; y2 = myPoints[ mySprings[i].j ].y;
r12d = sqrt ( (x1 - x2) *(x1 - x2) + (y1 - y2) * (y1 - y2) ); // square
// root of the distance
volume += 0.5 * fabs(x1 - x2) * fabs(mySprings[i].nx) * (r12d);
}
for(i=1 ; i<=NUMS-1 ; i++)
{
x1 = myPoints[ mySprings[i].i ].x; y1 = myPoints[ mySprings[i].i ].y;
x2 = myPoints[ mySprings[i].j ].x; y2 = myPoints[ mySprings[i].j ].y;
r12d = sqrt ( (x1 - x2) *(x1 - x2) + (y1 - y2) * (y1 - y2) ); // square
// root of the distance
pressurev = r12d * Pressure * (1.0f/volume);
myPoints[ mySprings[i].i ].fx += mySprings[ i ].nx * pressurev;
myPoints[ mySprings[i].i ].fy += mySprings[ i ].ny * pressurev;
myPoints[ mySprings[i].j ].fx += mySprings[ i ].nx * pressurev;
myPoints[ mySprings[i].j ].fy += mySprings[ i ].ny * pressurev;
}
}
/**
* Euler Integrator
*/
void IntegrateEuler()
{
int i;
float dry,drx; // dr for Euler integration
/* Euler Integrator (second Newton's law) */
for(i=1 ; i <= NUMP ; ++i)
{
/* x */
myPoints[i].vx = myPoints[i].vx + ( myPoints[i].fx / MASS )* DT;
drx = myPoints[i].vx * DT;
/* Boundaries X */
if(myPoints[i].x + drx < -SCRSIZE)
{
drx = -SCRSIZE - myPoints[ i ].x;
myPoints[i].vx = - 0.1 *myPoints[i].vx;
myPoints[i].vy = 0.95 *myPoints[i].vy;
}else
/* Boundaries X */
if(myPoints[i].x + drx > SCRSIZE)
{
drx = SCRSIZE - myPoints[ i ].x;
myPoints[i].vx = - 0.1 *myPoints[i].vx;
myPoints[i].vy = 0.95 *myPoints[i].vy;
}
myPoints[i].x = myPoints[i].x + drx;
/* y */
myPoints[i].vy = myPoints[i].vy + myPoints[i].fy * DT;
dry = myPoints[i].vy * DT;
/* Boundaries Y */
if(myPoints[i].y + dry < -SCRSIZE)
{
dry = -SCRSIZE - myPoints[ i ].y;
myPoints[i].vy = - 0.1 *myPoints[i].vy;
myPoints[i].vx = 0.95 *myPoints[i].vx;
}
/* Boundaries Y */
if(myPoints[i].y + dry > SCRSIZE)
{
dry = SCRSIZE - myPoints[ i ].y;
myPoints[i].vy = - 0.1 *myPoints[i].vy;
}
myPoints[i].y = myPoints[i].y + dry;
/* fast chek if outside */
if(myPoints[i].x > SCRSIZE)
myPoints[i].x = SCRSIZE;
if(myPoints[i].y > SCRSIZE)
myPoints[i].y = SCRSIZE;
if(myPoints[i].x < -SCRSIZE)
myPoints[i].x = -SCRSIZE;
if(myPoints[i].y < -SCRSIZE)
myPoints[i].y = -SCRSIZE;
}
}
/* Idle function */
void Idle(void)
{
AccumulateForces();
IntegrateEuler();
/**
* Update Pressure (pump an air into the ball)
*/
if(Pressure < FINAL_PRESSURE)
{
Pressure += FINAL_PRESSURE/300.0f;
printf("Pressure = %4.4f\n",Pressure);
}
glutPostRedisplay();
}
/* Visualization */
void Draw(void)
{
int i;
glClearColor(1,1,1,0);
glClear(GL_COLOR_BUFFER_BIT);
glBegin(GL_QUADS);
for(i = 1 ; i <= NUMS-1 ; i++)
{
glColor3f(0.8,0.4,0.4);
glVertex2f(myPoints[ mySprings[i].i ].x,myPoints[ mySprings[i].i ].y);
glVertex2f(myPoints[ mySprings[i].j ].x,myPoints[ mySprings[i].j ].y);
glVertex2f(myPoints[ NUMP - mySprings[i].i +1].x,myPoints[ NUMP - mySprings[i].i + 1].y);
glVertex2f(myPoints[ NUMP - mySprings[i].j +1].x,myPoints[ NUMP - mySprings[i].j + 1].y);
}
glEnd();
if(mousedown)
{
glColor3f(0,0,0);
glBegin(GL_LINES);
glVertex2f(xMouse,yMouse);
glVertex2f(myPoints[closest_i].x,myPoints[closest_i].y);
glEnd();
}
glutSwapBuffers();
}
/* Find point in the model which is closest to mouse click point */
void FindClosestPoint(void)
{
float dmin;
float mousepointd;
int i;
// find closest point
dmin = sqrt(pow(myPoints[closest_i].x - xMouse,2) + pow(myPoints[closest_i].y - yMouse,2));
for(i=1 ; i <= NUMP ; ++i)
{
mousepointd = sqrt( pow(myPoints[i].x - xMouse,2) +
pow(myPoints[i].y - yMouse,2));
if(mousepointd < dmin)
{
dmin = mousepointd;
closest_i = i;
}
}
}
/* Clicked mouse */
void Mouse (int button, int state, int x, int y)
{
if (button == GLUT_LEFT_BUTTON)
{
if (state == GLUT_DOWN)
{
mousedown = 1;
xMouse = SCRSIZE * 2.0 * ((float)x/(float)windW - 0.5);
yMouse = -SCRSIZE * 2.0 * ((float)y/(float)windH - 0.5);
}
else if (state == GLUT_UP)
{
FindClosestPoint();
mousedown = 0;
}
}
}
/* Mouse Motion */
void Motion (int x, int y)
{
if (mousedown)
{
xMouse = SCRSIZE * 2.0 * ((float)x/(float)windW - 0.5);
yMouse = -SCRSIZE * 2.0 * ((float)y/(float)windH - 0.5);
glutPostRedisplay();
}
}
/* Main Function */
int main(int argc, char **argv)
{
glutInit(&argc,argv);
glutInitWindowPosition( 150, 150 ); glutInitWindowSize( windW, windH );
glutInitDisplayMode( GLUT_RGB | GLUT_DOUBLE );
glutCreateWindow("Soft Body 2D v1.0 by <NAME>");
CreateBall();
glutReshapeFunc(Reshape);
glutDisplayFunc(Draw);
glutIdleFunc(Idle);
glutMouseFunc(Mouse);
glutMotionFunc(Motion);
glutMainLoop();
return 0;
}
|
package control
import (
"github.com/mandelsoft/spiff/dynaml"
"github.com/mandelsoft/spiff/yaml"
)
func init() {
dynaml.RegisterControl("if", flowIf, "then", "else")
}
func flowIf(ctx *dynaml.ControlContext) (yaml.Node, bool) {
if node, ok := dynaml.ControlReady(ctx, false); !ok {
return node, false
}
if ctx.Value.Value() == nil {
if e := ctx.Option("else"); e != nil {
return dynaml.ControlValue(ctx, e)
}
return yaml.UndefinedNode(dynaml.NewNode(nil, ctx)), true
}
switch v := ctx.Value.Value().(type) {
case bool:
if v {
if e := ctx.Option("then"); e != nil {
return dynaml.ControlValue(ctx, e)
}
return yaml.UndefinedNode(dynaml.NewNode(nil, ctx)), true
} else {
if e := ctx.Option("else"); e != nil {
return dynaml.ControlValue(ctx, e)
}
return yaml.UndefinedNode(dynaml.NewNode(nil, ctx)), true
}
default:
return dynaml.ControlIssue(ctx, "invalid condition value type: %s", dynaml.ExpressionType(v))
}
}
|
<reponame>smacke/xterm.js
export enum MessageType {
REQUEST_ANIMATION_FRAME,
ANIMATION_SUCCESSFUL,
POPUP_OPENED
}
export interface ChromeMessage {
type: MessageType,
payload?: any
}
|
def read_single_image(self, path):
with Image.open(path) as i:
i.load()
return i |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.