content
stringlengths 10
4.9M
|
---|
<gh_stars>0
use reqwest::StatusCode;
use thiserror::Error;
#[derive(Debug, Error)]
pub enum PhishtankError {
#[error("You must supply a URL to use this function.")]
InvalidUrl,
#[error("The server has exceeded the bandwidth specified by the server administrator.")]
BandwidthExceeded,
#[error("Unknown error.")]
Unknown,
#[error("{0}")]
Io(#[from] std::io::Error),
#[error("{0}")]
Json(serde_json::Error),
#[error("{0}")]
Reqwest(#[from] reqwest::Error),
}
impl From<serde_json::Error> for PhishtankError {
fn from(err: serde_json::Error) -> PhishtankError {
use serde_json::error::Category;
match err.classify() {
Category::Io => PhishtankError::Io(err.into()),
Category::Syntax | Category::Data | Category::Eof => PhishtankError::Json(err),
}
}
}
/// Return the PhishtankError based on the http status code
impl From<StatusCode> for PhishtankError {
fn from(s: StatusCode) -> PhishtankError {
match s.as_str() {
"509" => PhishtankError::BandwidthExceeded,
_ => PhishtankError::Unknown,
}
}
}
|
k = int(input())
p = {i: 0 for i in '123456789.'}
for i in range(4):
for j in input(): p[j] += 1
p['.'] = 0
print('YNEOS'[max(p.values()) > 2 * k :: 2]) |
import * as React from "react";
import { getClassName } from "../../helpers/getClassName";
import { classNames } from "../../lib/classNames";
import { usePlatform } from "../../hooks/usePlatform";
import { HasComponent, HasRootRef } from "../../types";
import { hasReactNode } from "../../lib/utils";
import { useAdaptivity } from "../../hooks/useAdaptivity";
import "./FormField.css";
export const FormFieldMode = {
default: "default",
plain: "plain",
} as const;
export interface FormFieldProps {
/**
* Добавляет иконку слева.
*
* Рекомендации:
*
* - Используйте следующие размеры иконок `12` | `16` | `20` | `24` | `28`.
* - Используйте [IconButton](#/IconButton), если вам нужна кликабельная иконка.
*/
before?: React.ReactNode;
/**
* Добавляет иконку справа.
*
* Рекомендации:
*
* - Используйте следующие размеры иконок `12` | `16` | `20` | `24` | `28`.
* - Используйте [IconButton](#/IconButton), если вам нужна кликабельная иконка.
*/
after?: React.ReactNode;
mode?: keyof typeof FormFieldMode;
}
interface FormFieldOwnProps
extends React.AllHTMLAttributes<HTMLElement>,
HasRootRef<HTMLElement>,
HasComponent,
FormFieldProps {
disabled?: boolean;
}
/**
* @see https://vkcom.github.io/VKUI/#/FormField
*/
export const FormField: React.FC<FormFieldOwnProps> = ({
Component = "div",
children,
getRootRef,
before,
after,
disabled,
mode = FormFieldMode.default,
...restProps
}: FormFieldOwnProps) => {
const platform = usePlatform();
const { sizeY } = useAdaptivity();
const [hover, setHover] = React.useState(false);
const handleMouseEnter = (e: MouseEvent) => {
e.stopPropagation();
setHover(true);
};
const handleMouseLeave = (e: MouseEvent) => {
e.stopPropagation();
setHover(false);
};
return (
<Component
role="presentation"
{...restProps}
ref={getRootRef}
onMouseEnter={handleMouseEnter}
onMouseLeave={handleMouseLeave}
vkuiClass={classNames(
getClassName("FormField", platform),
`FormField--${mode}`,
`FormField--sizeY-${sizeY}`,
disabled && "FormField--disabled",
!disabled && hover && "FormField--hover"
)}
>
{hasReactNode(before) && (
<div role="presentation" vkuiClass="FormField__before">
{before}
</div>
)}
{children}
{hasReactNode(after) && (
<div role="presentation" vkuiClass="FormField__after">
{after}
</div>
)}
<div role="presentation" vkuiClass="FormField__border" />
</Component>
);
};
|
def process(self, index: int, count: int, item: T) -> Outcome:
pass |
A new Christian-based consumer organization that ranks companies based on their friendliness toward people of faith released a Christmas “Best in Class” list, hoping to have a major impact on where people spend their money this holiday season.
Topping the list are brands like Chick-fil-A, Dillards, Hobby Lobby, JetBlue, Pepsi, and Wal-Mart.
The organization behind the campaign, Faith Driven Consumer, coined the effort a Christmas “buycott” instead of a “boycott,” hoping to positively reward companies that they believe have friendly policies towards people of faith, instead of punishing those that don’t.
This year’s best in class list was broken down into 26 different categories. The winners include:
Airline: JetBlue
Automotive: Honda
Banking: BB&T
Beauty: Revlon
Beverage: Pepsi
Cable/Satellite: Dish Network
Computer: Samsung
Consumer Goods: Reckitt Benckiser
Craft Store: Hobby Lobby
Credit Card: MasterCard
Department Stores: Dillard’s
Electronics Retail: hhgregg
Fast Food: Chick-fil-A
Food: Tyson Foods
Health Insurance: Humana
Home Improvement: Menard’s
Hotels: Hilton
Insurance & Investments: Geico
Mail Delivery: USPS
Mega Retail: Walmart
Office Supply: Office Depot
Pharmaceutical: Astra Zeneca
Restaurant: Cracker Barrel
Retail Pharmacy: Walgreens
Travel Booking: Priceline
Wireless Carrier: Sprint
The #ChristmasBUYcott campaign was launched to empower Christian consumers to demonstrate their influence in the marketplace and encourage brands to celebrate people of faith “as an essential color in the American rainbow of diversity.”
“Are you frustrated that brands are ignoring your values?” Faith Driven Consumer asked in a press release. “This Christmas, you can finally do something about it!”
In order to do so, Faith Driven Consumer created a Faith Equality Index that rates more than 330 major brands across different product categories based on their “faith-compatibility” according to a biblical worldview.
Scoring on the index is based on a number of issues, such as whether companies use the word “Christmas” in seasonal advertising; whether they support pro-life views on abortion, embryonic stem cell research, and euthanasia; and whether they’ve enacted a workplace non-discrimination policy that includes protections for faith-driven consumers and employees.
“Faith Driven Consumers are one of the largest minority market segments in the U.S. and have $30 billion to spend this Christmas. Empowered with the Faith Equality Index, they represent an economic power block significant enough to create big winners in the marketplace,” said Chris Stone, a certified brand strategist and founder of Faith Driven Consumer. “A BUYcott offers our community a positive, proactive way to affect change, and we will absolutely use this opportunity to make progress toward our goal of achieving equality for Faith Driven Consumers.”
As part of the national #ChristmasBUYcott campaign, the organization is asking participants to do four things:
STEP 1: PETITION — Sign the petition asking brands to engage Faith Driven Consumers
STEP 2: ENGAGE — Contact “Best in Class” brands in a show of support
STEP 3: RECRUIT — Recruit family and friends to do the same
STEP 4: LEAD — Gain Faith Driven Consumer Plus Points and compete in leaderboards
To learn more about the campaign, visit ChristmasBuycott.com. |
/**
* Aggregates results of {@link HiveSchemaCollectorProcessor}.
* @since 0.3.1
*/
public class HiveSchemaAggregatorProcessor implements BatchProcessor {
static final Logger LOG = LoggerFactory.getLogger(HiveSchemaAggregatorProcessor.class);
private static final Location PATH_BASE = Location.of("etc/hive-schema"); //$NON-NLS-1$
/**
* The output path of input schema information.
*/
public static final Location PATH_INPUT = PATH_BASE.append("input.json"); //$NON-NLS-1$
/**
* The output path of output schema information.
*/
public static final Location PATH_OUTPUT = PATH_BASE.append("output.json"); //$NON-NLS-1$
@Override
public void process(Context context, BatchReference source) throws IOException {
if (Util.isAvailable(context.getClassLoader()) == false) {
return;
}
LOG.debug("aggregating hive schema information: {}", source.getBatchId());
List<HiveInputInfo> inputs = new ArrayList<>();
List<HiveOutputInfo> outputs = new ArrayList<>();
for (JobflowReference jobflow : source.getJobflows()) {
HiveIoAttribute attr = load(location -> context.findResourceFile(jobflow, location));
inputs.addAll(attr.getInputs());
outputs.addAll(attr.getOutputs());
}
inputs = Util.normalize(inputs);
outputs = Util.normalize(outputs);
LOG.debug("generating Hive input table schema: {} entries", inputs.size());
try (OutputStream stream = context.addResourceFile(PATH_INPUT)) {
Persistent.write(HiveInputInfo.class, inputs, stream);
}
LOG.debug("generating Hive output table schema: {} entries", outputs.size());
try (OutputStream stream = context.addResourceFile(PATH_OUTPUT)) {
Persistent.write(HiveOutputInfo.class, outputs, stream);
}
}
/**
* Collects the saved Hive I/O information.
* @param provider the resource provider
* @return Hive I/O information
* @throws IOException if I/O error was occurred while loading information
*/
public static HiveIoAttribute load(ResourceProvider provider) throws IOException {
List<HiveInputInfo> inputs = collect(provider,
HiveInputInfo.class, HiveSchemaCollectorProcessor.PATH_INPUT);
List<HiveOutputInfo> outputs = collect(provider,
HiveOutputInfo.class, HiveSchemaCollectorProcessor.PATH_OUTPUT);
return new HiveIoAttribute(inputs, outputs);
}
private static <T extends TableInfo.Provider> List<T> collect(
ResourceProvider provider,
Class<T> type, Location location) throws IOException {
try (InputStream input = provider.find(location)) {
if (input == null) {
return null;
}
return Persistent.read(type, input);
}
}
/**
* Provides resources.
* @since 0.5.0
*/
@FunctionalInterface
public interface ResourceProvider {
/**
* Returns a resource on the given location.
* @param location the target location
* @return the resource contents, or {@code null} if it is not found
* @throws IOException if failed to open the resource
*/
InputStream find(Location location) throws IOException;
}
} |
// Package aoc2015 contains all of the logic for the Advent of Code 2015 puzzles. The main packages are autogenerated;
// if a day is added, add the day number to the build tag in `doc.go` and then run `go generate` to regenerate the main
// packages.
package aoc2015
//go:generate go run gen.go 1
|
// NotEmpty asserts that got is not empty.
func NotEmpty(t testingT, got interface{}) bool {
t.Helper()
if isEmpty(got) {
t.Error(formatError(getArg(1)(), "was empty"))
return false
}
return true
} |
18.06.2013 @ 14:50 Posted by Jesper Ralbjerg
The Blanco Pro Cycling Team has completed the selection for the Tour de France. Bauke Mollema is the definitive leader of the team while Robert Gesink will be given a free role even though he is expected to support Mollema in the mountains. Mollema will strive for a top spot in the overall classification. A stage win is another primary goal for the team.
“We have geared our team around Bauke”, stated Sports Director Nico Verhoeven. “We are looking to score a good classification with him. We are aiming for the top ten but in fact we’re simply trying for the maximum achievable. We could say that we’d like Bauke to finish fourth, but if he then finishes fourth when second was within grasp, I’d say we’d be less satisfied. But if it turns out that he is seventh and that is the best result we could have expected, then we’d be happy with that.”
In the run-up to the Tour Verhoeven saw a promising Mollema. “In the Tour of Switzerland, Bauke showed that he was ready in the way he rode and led the team. On top of a good GC, we would like to take a stage win.”
Mollema will be supported in France by his teammates Lars Boom, Laurens ten Dam, Robert Gesink, Tom Leezer, Lars Petter Nordhaug, Bram Tankink, Sep Vanmarcke and Maarten Wynants.
Verhoeven: “Robert will have a relatively open role, but will be expected to take care of Bauke in the high mountain stages together with Laurens. Nordhaug and Boom are dangerous stage pirates but also understand what it means to be a domestique. Bram is our road captain and Maarten is an all-rounder who has proven his worth in the big tours. Sep and Tom formed a good team with Bauke in the Tour de Suisse and kept him out of trouble in the run-up to the mountains.”
The Tour de France starts Saturday 29 June.
Blanco line-up for the Tour de France:
Lars Boom, Laurens ten Dam, Robert Gesink, Tom Leezer, Bauke Mollema, Lars Petter Nordhaug, Bram Tankink, Sep Vanmarcke and Maarten Wynants |
def exclude_subgraph(graph: Graph, sub_vtx) -> GraphView:
sub_set = collect_subgraph_vertices(graph, sub_vtx)
filter_prop = graph.new_vertex_property("bool")
filter_prop.a[int(sub_vtx)] = True
for vtx in graph.vertices():
if vtx not in sub_set:
filter_prop.a[int(vtx)] = True
out_graph = GraphView(graph, vfilt=filter_prop)
return out_graph |
# -*- coding: utf-8 -*-
x = int(input())
#x = 8
y = input().split()
#y = ["3","1","4","1","5","9","2","6"]
for i in range(x):
y[i] = int(y[i])
Max = max(y)
Min = min(y)
ans = Max - Min
print(ans) |
<reponame>potherca-contrib/hyperhyperspace-core
export { Store, LoadResults, StoredOpHeader } from './store/Store'; |
Baptismal font in the new Indianapolis Temple of the Church of Jesus Christ of Latter-day Saints (Photo11: The Church of Jesus Christ of Latter-day Saints)
CARMEL, Ind. -- With the opening of Indiana's first regional Mormon temple in Carmel, some out-of-state Christian opponents say they will set up a tent across the street to declare what they think is "the truth" about the Church of Jesus Christ of Latter-Day Saints.
Utah-based Tri-Grace Ministries stirred controversy last week when it took out a 12-page advertising supplement in community newspapers in Carmel, Westfield and Zionsville that warn about the "deception" of the religion.
Now the group says it plans to hold an event to depict the multiple wives of Mormonism's founder, Joseph Smith, as the Mormon temple holds open house tours for the public. (The Mormon church officially renounced polygamy in 1890.)
"In a sense, we believe they are not telling people the truth," said Tri-Grace Ministries founder Chip Thompson. "They're not following what Jesus taught. It concerns us that Christians are being deceived by their message."
The tension dates back ages between Mormons and some Christians who reject Mormonism as non-Christian. But some say the strife is out of place in Carmel and see Tri-Grace Ministries' mission trip as a hateful attack on a minority religion.
"We don't treat people, who have been seriously picked on, in that way," said Chris Duckworth, pastor of New Joy Lutheran Church in Westfield. "We know better than that. We are better than that."
He was irked by the advertisement in the weekly Current publications in Carmel, Westfield and Zionsville, viewing it as a disparaging diatribe against Mormonism — and not a constructive way to talk about different faiths.
"If my Mormon brother says he's a Christian, I'll accept his word at that," Duckworth said. "We might disagree in theology or have differences in opinion about what core tenets of Christianity are. But I'm not going to deny him the right to call himself what he or she in their faith believe they are."
Mormon church officials declined to comment for this story.
The temple opens for tours Friday through Aug. 8, before its dedication Aug. 23. The temple is expected to serve 30,000 Mormons throughout Indiana and eastern Illinois, according to the LDS Church, many of whom previously had to travel to Louisville for special ceremonies.
Among Mormons, temples are considered to be a place where someone can make a commitment to God, different from local houses of worship where services are held.
Tri-Grace Ministries has targeted what it sees as "malice in Mormonism," calling temple rituals "secretive," criticizing its polygamist past, questioning the truth of the Book of Mormon and connecting Mormon symbols with paganism.
The organization's mission, as stated in federal tax documents, is to "produce believers who are grounded in biblical truth and who will make a lasting contribution to the kingdom of God. We are committed to challenging heretical documents and leading as many unbelievers as possible into a personal saving relationship with Jesus Christ."
"We're not attacking," Thompson said. "We're just informing people."
"We love Mormon people, and we really want the best for them," he added. "We're here to encourage Mormons to stop following what we believe is a false prophet — Joseph Smith — and we want them to follow Jesus Christ."
Hostility toward Mormons goes back "to the very beginning," when the Book of Mormon was published in 1830, said Peter Thuesen, a religious studies professor at Indiana University-Purdue University Indianapolis who teaches a course called Mormonism and American Culture.
It angered some, he said, who felt the Book of Mormon added to Scripture. Opponents of Mormonism often feel distrust toward holy rituals conducted privately in temples, Thuesen said, and Mormons also deal with lingering baggage from the long-discontinued practice of polygamy.
"They're still tarred with this image of doing something that's somehow antithetical to American values," Thuesen said, "when in reality Mormons are probably among the most conservative Americans right now when it comes to the so-called traditional family."
Thuesen said dismissing Mormons as non-Christian is disrespectful.
"Part of the reason I take them at their word that they are Christian is they believe that Jesus is the savior of the world," Thuesen said, "just like other Christians do."
Protestors often crop up at Mormon temple openings, said Sara Patterson, an associate professor of theological studies at Hanover College.
"For these groups, a temple opening is a sign of something being lost," she said. "They often imagine America as this battleground, and they are fighting to save it as a Christian nation."
Read or Share this story: http://usat.ly/1IZ2wyc |
/*
* (c) Copyright <NAME>, Germany. Contact: <EMAIL>.
*
* Created on 20.05.2006
*/
package net.finmath.montecarlo.interestrate.models.covariance;
import java.util.Map;
import net.finmath.functions.LinearAlgebra;
import net.finmath.stochastic.RandomVariable;
import net.finmath.stochastic.Scalar;
import net.finmath.time.TimeDiscretization;
/**
* Simple correlation model given by R, where R is a factor reduced matrix
* (see {@link net.finmath.functions.LinearAlgebra#factorReduction(double[][], int)}) created from the
* \( n \) Eigenvectors of \( \tilde{R} \) belonging to the \( n \) largest non-negative Eigenvalues,
* where \( \tilde{R} = \tilde{\rho}_{i,j} \) and \[ \tilde{\rho}_{i,j} = \exp( -\max(a,0) | T_{i}-T_{j} | ) \]
*
* For a more general model featuring three parameters see {@link LIBORCorrelationModelThreeParameterExponentialDecay}.
*
* @see net.finmath.functions.LinearAlgebra#factorReduction(double[][], int)
* @see LIBORCorrelationModelThreeParameterExponentialDecay
*
* @author <NAME>
* @version 1.0
*/
public class LIBORCorrelationModelExponentialDecay extends LIBORCorrelationModel {
private static final long serialVersionUID = -8218022418731667531L;
private final int numberOfFactors;
private final double a;
private final boolean isCalibrateable;
private double[][] correlationMatrix;
private double[][] factorMatrix;
/**
* Create a correlation model with an exponentially decaying correlation structure and the given number of factors.
*
* @param timeDiscretization Simulation time dicretization. Not used.
* @param liborPeriodDiscretization TenorFromArray time discretization, i.e., the \( T_{i} \)'s.
* @param numberOfFactors Number \( n \) of factors to be used.
* @param a Decay parameter. Should be positive. Negative values will be floored to 0.
* @param isCalibrateable If true, the parameter will become a free parameter in a calibration.
*/
public LIBORCorrelationModelExponentialDecay(final TimeDiscretization timeDiscretization, final TimeDiscretization liborPeriodDiscretization, final int numberOfFactors, final double a, final boolean isCalibrateable) {
super(timeDiscretization, liborPeriodDiscretization);
this.numberOfFactors = numberOfFactors;
this.a = a;
this.isCalibrateable = isCalibrateable;
initialize(numberOfFactors, a);
}
public LIBORCorrelationModelExponentialDecay(final TimeDiscretization timeDiscretization, final TimeDiscretization liborPeriodDiscretization, final int numberOfFactors, final double a) {
super(timeDiscretization, liborPeriodDiscretization);
this.numberOfFactors = numberOfFactors;
this.a = a;
isCalibrateable = false;
initialize(numberOfFactors, a);
}
@Override
public LIBORCorrelationModelExponentialDecay getCloneWithModifiedParameter(final RandomVariable[] parameter) {
if(!isCalibrateable) {
return this;
}
return new LIBORCorrelationModelExponentialDecay(getTimeDiscretization(), getLiborPeriodDiscretization(), numberOfFactors, parameter[0].doubleValue());
}
@Override
public Object clone() {
return new LIBORCorrelationModelExponentialDecay(getTimeDiscretization(), getLiborPeriodDiscretization(), numberOfFactors, a, isCalibrateable);
}
@Override
public double getFactorLoading(final int timeIndex, final int factor, final int component) {
return factorMatrix[component][factor];
}
@Override
public double getCorrelation(final int timeIndex, final int component1, final int component2) {
return correlationMatrix[component1][component2];
}
@Override
public int getNumberOfFactors() {
return factorMatrix[0].length;
}
private void initialize(final int numberOfFactors, double a) {
/*
* Create instantaneous correlation matrix
*/
// Negative values of a do not make sense.
a = Math.max(a, 0);
correlationMatrix = new double[getLiborPeriodDiscretization().getNumberOfTimeSteps()][getLiborPeriodDiscretization().getNumberOfTimeSteps()];
for(int row=0; row<correlationMatrix.length; row++) {
for(int col=0; col<correlationMatrix[row].length; col++) {
// Exponentially decreasing instantaneous correlation
correlationMatrix[row][col] = Math.exp(-a * Math.abs(getLiborPeriodDiscretization().getTime(row)-getLiborPeriodDiscretization().getTime(col)));
}
}
/*
* Perform a factor decomposition (and reduction if numberOfFactors < correlationMatrix.columns())
*/
factorMatrix = LinearAlgebra.factorReduction(correlationMatrix, numberOfFactors);
for(int component1=0; component1<factorMatrix.length; component1++) {
for(int component2=0; component2<component1; component2++) {
double correlation = 0.0;
for(int factor=0; factor<factorMatrix[component1].length; factor++) {
correlation += factorMatrix[component1][factor] * factorMatrix[component2][factor];
}
correlationMatrix[component1][component2] = correlation;
correlationMatrix[component2][component1] = correlation;
}
correlationMatrix[component1][component1] = 1.0;
}
}
@Override
public RandomVariable[] getParameter() {
if(!isCalibrateable) {
return null;
}
final RandomVariable[] parameter = new RandomVariable[1];
parameter[0] = new Scalar(a);
return parameter;
}
@Override
public LIBORCorrelationModel getCloneWithModifiedData(final Map<String, Object> dataModified) {
TimeDiscretization timeDiscretization = this.getTimeDiscretization();
TimeDiscretization liborPeriodDiscretization = this.getLiborPeriodDiscretization();
int numberOfFactors = this.getNumberOfFactors();
double a = this.a;
boolean isCalibrateable = this.isCalibrateable;
if(dataModified != null) {
timeDiscretization = (TimeDiscretization)dataModified.getOrDefault("timeDiscretization", timeDiscretization);
liborPeriodDiscretization = (TimeDiscretization)dataModified.getOrDefault("liborPeriodDiscretization", liborPeriodDiscretization);
numberOfFactors = (int)dataModified.getOrDefault("numberOfFactors", numberOfFactors);
a = (double)dataModified.getOrDefault("a", a);
isCalibrateable = (boolean)dataModified.getOrDefault("isCalibrateable", isCalibrateable);
}
final LIBORCorrelationModel newModel = new LIBORCorrelationModelExponentialDecay(timeDiscretization, liborPeriodDiscretization, numberOfFactors, a, isCalibrateable);
return newModel;
}
}
|
/**
* Perform the read on the given region,
* a local read is preferred over a network read
*
* @param region
* @throws StorageManagerException
* @throws InterruptedException
* @throws BBoxDBException
*/
private void perfomReadOnRegion(final DistributionRegion region)
throws InterruptedException, BBoxDBException {
final BBoxDBInstance localInstance = ZookeeperClientFactory.getLocalInstanceName();
try {
if(region.getSystems().contains(localInstance)) {
if(performLocalRead()) {
readDataLocal(region);
localReads++;
}
} else {
if(performNetworkRead()) {
readDataNetwork(region);
networkReads++;
}
}
} catch (StorageManagerException | ZookeeperException e) {
throw new BBoxDBException(e);
}
} |
/**
* Mueve archivo
*
* @param sourceFile
* @param destinationFile
*/
public static void fileMove(String sourceFile, String destinationFile) {
try {
File inFile = new File(sourceFile);
File outFile = new File(destinationFile);
FileInputStream in = new FileInputStream(inFile);
FileOutputStream out = new FileOutputStream(outFile);
int c;
while ((c = in.read()) != -1)
out.write(c);
in.close();
out.close();
}
catch (Exception e) {
System.err.println("Hubo un error de entrada/salida!!!");
e.printStackTrace();
}
} |
Varieties of Capitalism, Growth Regimes, and Structural Change in Eurozone Core and Peripheral Countries: Germany as a Role Model for Portugal?
Abstract This article aims at analyzing the structural changes that occurred in the Portuguese economy after the 2010/2013 sovereign debt crisis, compared with what occurred in Germany and using the current debate surrounding the new reform of the Eurozone as a backdrop. We thus intend to find out whether a peripheral southern economy like Portugal and the Eurozone’s core economy (Germany) have become closer and, if so, what that means in terms of the sustainability of the Eurozone as a set of different economies sharing the same currency. The study is framed in an institutional political economy approach (Varieties of Capitalism) and a macroeconomic post-Keynesian perspective (demand-led growth regimes), as well as a structural analysis of economic complexity and product specialization of these contrasting economies. |
<reponame>pncalbl/coin-exchange<gh_stars>0
package com.pncalbl.service.impl;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.conditions.query.LambdaQueryChainWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.pncalbl.domain.Notice;
import org.apache.commons.lang.StringUtils;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.util.List;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.pncalbl.domain.WorkIssue;
import com.pncalbl.mapper.WorkIssueMapper;
import com.pncalbl.service.WorkIssueService;
/**
* @author pncalbl
* @date 2021/10/15 20:37
* @e-mail <EMAIL>
* @description 客户工单服务实现类
**/
@Service
public class WorkIssueServiceImpl extends ServiceImpl<WorkIssueMapper, WorkIssue> implements WorkIssueService {
/**
* 条件分页查询
*
* @param page 分页参数
* @param status 工单状态
* @param startTime 开始时间
* @param endTime 结束时间
* @return 分页数据
*/
@Override
public Page<WorkIssue> findByPage(Page<WorkIssue> page, Integer status, String startTime, String endTime) {
return page(page, new LambdaQueryWrapper<WorkIssue>()
.eq(status != null, WorkIssue::getStatus, status)
.between(!StringUtils.isEmpty(startTime) && !StringUtils.isEmpty(endTime),
WorkIssue::getCreated, startTime, endTime + "23:59:59"));
}
}
|
// Copyright (c) 2019 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use serde_json::{self,
json,
Value as Json};
use std::{fs::{create_dir_all,
read_to_string,
File},
io::Write,
path::Path};
use toml::Value;
use crate::{common::{templating::TemplateRenderer,
ui::{Status,
UIWriter,
UI}},
error::Result};
#[allow(clippy::too_many_arguments)]
pub fn start(ui: &mut UI,
template_path: &Path,
default_toml_path: &Path,
user_toml_path: Option<&Path>,
mock_data_path: Option<&Path>,
print: bool,
render: bool,
render_dir: &Path,
quiet: bool)
-> Result<()> {
// Strip the file name out of our passed template
let file_name = Path::new(template_path.file_name().expect("valid template file"));
if !quiet {
ui.begin(format!("Rendering: {} into: {} as: {}",
template_path.display(),
render_dir.display(),
file_name.display()))?;
ui.br()?;
}
// read our template from file
let template = read_to_string(&template_path)?;
// create a "data" json struct
let mut data = json!({});
if !quiet {
// import default.toml values, convert to JSON
ui.begin(format!("Importing default.toml: {}", &default_toml_path.display()))?;
}
// we should always have a default.toml, would be nice to "autodiscover" based on package name,
// for now assume we're working in the plan dir if --default-toml not passed
let default_toml = read_to_string(&default_toml_path)?;
// merge default into data struct
merge(&mut data, toml_to_json(&default_toml)?);
// import default.toml values, convert to JSON
let user_toml = match user_toml_path {
Some(path) => {
if !quiet {
// print helper message, maybe only print if '--verbose'? how?
ui.begin(format!("Importing user.toml: {}", path.display()))?;
}
read_to_string(path)?
}
None => String::new(),
};
// merge default into data struct
merge(&mut data, toml_to_json(&user_toml)?);
// read mock data if provided
let mock_data = match mock_data_path {
Some(path) => {
if !quiet {
// print helper message, maybe only print if '--verbose'? how?
ui.begin(format!("Importing override file: {}", path.display()))?;
}
read_to_string(path)?
}
// return an empty json block if '--mock-data' isn't defined.
// this allows us to merge an empty JSON block
None => "{}".to_string(),
};
// merge mock data into data
merge(&mut data, serde_json::from_str(&mock_data)?);
// create a template renderer
let mut renderer = TemplateRenderer::new();
// register our template
renderer.register_template_string(&template, &template)
.expect("Could not register template content");
// render our JSON override in our template.
let rendered_template = renderer.render(&template, &data)?;
if print {
if !quiet {
ui.br()?;
ui.warn(format!("###======== Rendered template: {}",
&template_path.display()))?;
}
println!("{}", rendered_template);
if !quiet {
ui.warn(format!("========### End rendered template: {}",
&template_path.display()))?;
}
}
if render {
// Render our template file
create_with_template(ui, &render_dir, &file_name, &rendered_template, quiet)?;
}
if !quiet {
ui.br()?;
}
Ok(())
}
fn toml_to_json(cfg: &str) -> Result<Json> {
let toml_value = cfg.parse::<Value>()?;
let toml_string = serde_json::to_string(&toml_value)?;
let json = serde_json::from_str(&format!(r#"{{ "cfg": {} }}"#, &toml_string))?;
Ok(json)
}
// merge two Json structs
fn merge(a: &mut Json, b: Json) {
if let Json::Object(a_map) = a {
if let Json::Object(b_map) = b {
for (k, v) in b_map {
merge(a_map.entry(k).or_insert(Json::Null), v);
}
return;
}
}
*a = b;
}
fn create_with_template(ui: &mut UI,
render_dir: &std::path::Path,
file_name: &std::path::Path,
template: &str,
quiet: bool)
-> Result<()> {
let path = Path::new(&render_dir).join(&file_name);
if !quiet {
ui.status(Status::Creating, format!("file: {}", path.display()))?;
}
create_dir_all(render_dir)?;
// Write file to disk
File::create(path).and_then(|mut file| file.write(template.as_bytes()))?;
Ok(())
}
|
import os
os.environ['PYTHONPATH'] = os.getcwd()
if 'OMP_NUM_THREADS' not in os.environ:
os.environ['OMP_NUM_THREADS'] = '8'
import argparse
import cv2
import numpy as np
def parse_args():
parser = argparse.ArgumentParser(description='Infer from images in a directory')
parser.add_argument('image_dir', help='directory of input images')
parser.add_argument('config', help='test config file path')
parser.add_argument('checkpoint', help='checkpoint file')
parser.add_argument('--calib', help='calibration matrix in .csv format',
default='demo/calib.csv')
parser.add_argument(
'--show-dir',
help='directory where painted images will be saved (default: $IMAGE_DIR/show)')
parser.add_argument(
'--gpu-ids',
type=int,
nargs='+',
help='ids of gpus to use')
parser.add_argument(
'--score-thr', type=float, default=0.3, help='bbox score threshold')
parser.add_argument(
'--extra', action='store_true',
help='whether to draw extra results (covariance and reconstruction)')
parser.add_argument(
'--cov-scale', type=float, default=5.0, help='covariance scaling factor')
args = parser.parse_args()
return args
def main():
args = parse_args()
if args.gpu_ids is not None:
gpu_ids = args.gpu_ids
elif 'CUDA_VISIBLE_DEVICES' in os.environ:
gpu_ids = [int(i) for i in os.environ['CUDA_VISIBLE_DEVICES'].split(',')]
else:
gpu_ids = [0]
os.environ['CUDA_VISIBLE_DEVICES'] = ','.join([str(i) for i in gpu_ids])
if len(gpu_ids) != 1:
raise NotImplementedError('multi-gpu testing is not yet supported')
from mmcv.utils import track_iter_progress
from monorun.apis import init_detector, inference_detector
image_dir = args.image_dir
assert os.path.isdir(image_dir)
show_dir = args.show_dir
if show_dir is None:
show_dir = os.path.join(image_dir, 'show')
if not os.path.exists(show_dir):
os.makedirs(show_dir)
calib = np.loadtxt(args.calib, delimiter=',').astype(np.float32)
# build the model from a config file and a checkpoint file
model = init_detector(args.config, args.checkpoint, device='cuda:0')
if args.extra:
model.test_cfg['rcnn']['debug'] = True
img_list = os.listdir(image_dir)
img_list.sort()
for i, img_filename in enumerate(track_iter_progress(img_list)):
img = cv2.imread(os.path.join(image_dir, img_filename))
result = inference_detector(model, img, calib)
model.show_result(
img,
calib,
result,
score_thr=args.score_thr,
cov_scale=args.cov_scale,
out_file=os.path.join(show_dir, img_filename))
return
if __name__ == '__main__':
main()
|
// get store app instance key
func GetStoreAppInstanceKey(namespace string, appInstanceName string) string {
if appInstanceName == "" {
return fmt.Sprintf("/ns/%s/appinstances", namespace)
} else {
return fmt.Sprintf("/ns/%s/appinstances/%s", namespace, appInstanceName)
}
} |
RICHMOND, Va. (AP) - Virginia Gov. Terry McAuliffe's budget proposal includes a $6 million pilot program that would provide women with long-term contraception such as intrauterine devices at no cost.
The Daily Press reports McAuliffe also pushed unsuccessfully last year for the program, which is modeled after a project in Colorado tied to decreases in the teen pregnancy rate and number of abortions.
The funding would come from a federal Temporary Assistance for Needy Families block grant. Republicans in the General Assembly cut the proposal from the budget last year, and its chances this year are uncertain.
A statement Thursday from Lt. Gov. Ralph Northam's office said the program would provide contraception at no cost and conduct outreach to increase awareness regarding the different contraceptive options that are available. |
input_1st=input()
input_2nd=input()
N, K = input_1st.split(" ")
D = input_2nd.split(" ")
for ans in range(int(N), 100000):
flg = True
for i in range(len(str(ans))):
if str(ans)[i] in D:
flg = False
break
if flg:
print(ans)
break
|
// UnPack used to unpack the greeting packet.
func (g *Greeting) UnPack(payload []byte) error {
var err error
buf := common.ReadBuffer(payload)
if g.protocolVersion, err = buf.ReadU8(); err != nil {
return sqldb.NewSQLError(sqldb.ER_MALFORMED_PACKET, "extracting greeting protocol-version failed")
}
if g.serverVersion, err = buf.ReadStringNUL(); err != nil {
return sqldb.NewSQLError(sqldb.ER_MALFORMED_PACKET, "extracting greeting server-version failed")
}
if g.ConnectionID, err = buf.ReadU32(); err != nil {
return sqldb.NewSQLError(sqldb.ER_MALFORMED_PACKET, "extracting greeting onnection-id failed")
}
var salt8 []byte
if salt8, err = buf.ReadBytes(8); err != nil {
return sqldb.NewSQLError(sqldb.ER_MALFORMED_PACKET, "extracting greeting auth-plugin-data-part-1 failed")
}
copy(g.Salt, salt8)
if err = buf.ReadZero(1); err != nil {
return sqldb.NewSQLError(sqldb.ER_MALFORMED_PACKET, "extracting greeting filler failed")
}
var capLower uint16
if capLower, err = buf.ReadU16(); err != nil {
return sqldb.NewSQLError(sqldb.ER_MALFORMED_PACKET, "extracting greeting capability-flags failed")
}
if g.Charset, err = buf.ReadU8(); err != nil {
return sqldb.NewSQLError(sqldb.ER_MALFORMED_PACKET, "extracting greeting charset failed")
}
if g.status, err = buf.ReadU16(); err != nil {
return sqldb.NewSQLError(sqldb.ER_MALFORMED_PACKET, "extracting greeting status-flags failed")
}
var capUpper uint16
if capUpper, err = buf.ReadU16(); err != nil {
return sqldb.NewSQLError(sqldb.ER_MALFORMED_PACKET, "extracting greeting capability-flags-upper failed")
}
g.Capability = (uint32(capUpper) << 16) | (uint32(capLower))
var SLEN byte
if (g.Capability & sqldb.CLIENT_PLUGIN_AUTH) > 0 {
if SLEN, err = buf.ReadU8(); err != nil {
return sqldb.NewSQLError(sqldb.ER_MALFORMED_PACKET, "extracting greeting auth-plugin-data length failed")
}
} else {
if err = buf.ReadZero(1); err != nil {
return sqldb.NewSQLError(sqldb.ER_MALFORMED_PACKET, "extracting greeting zero failed")
}
}
if err = buf.ReadZero(10); err != nil {
return sqldb.NewSQLError(sqldb.ER_MALFORMED_PACKET, "extracting greeting reserved failed")
}
if (g.Capability & sqldb.CLIENT_SECURE_CONNECTION) > 0 {
read := int(SLEN) - 8
if read < 0 || read > 13 {
read = 13
}
var salt2 []byte
if salt2, err = buf.ReadBytes(read); err != nil {
return sqldb.NewSQLError(sqldb.ER_MALFORMED_PACKET, "extracting greeting salt2 failed")
}
if salt2[read-1] != 0 {
return sqldb.NewSQLError(sqldb.ER_MALFORMED_PACKET, "extracting greeting auth-plugin-data-part-2 is not 0 terminated")
}
copy(g.Salt[8:], salt2[:read-1])
}
if (g.Capability & sqldb.CLIENT_PLUGIN_AUTH) > 0 {
if g.authPluginName, err = buf.ReadStringNUL(); err != nil {
return sqldb.NewSQLError(sqldb.ER_MALFORMED_PACKET, "extracting greeting auth-plugin-name failed")
}
}
return nil
} |
// This file was generated automatically
#[repr(u8)]
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum Key {
CapsLock,
Shift,
Control,
Alt,
Meta,
ControlOrMeta,
RightShift,
RightControl,
RightAlt,
RightMeta,
RightControlOrMeta,
Fn,
ReturnOrEnter,
Escape,
DeleteOrBackspace,
ForwardDelete,
Tab,
Space,
Minus,
Equal,
LeftBracket,
RightBracket,
Backslash,
Semicolon,
Quote,
Grave,
Comma,
Period,
Slash,
UpArrow,
RightArrow,
DownArrow,
LeftArrow,
PageUp,
PageDown,
Home,
End,
A,
B,
C,
D,
E,
F,
G,
H,
I,
J,
K,
L,
M,
N,
O,
P,
Q,
R,
S,
T,
U,
V,
W,
X,
Y,
Z,
N0,
N1,
N2,
N3,
N4,
N5,
N6,
N7,
N8,
N9,
Numpad0,
Numpad1,
Numpad2,
Numpad3,
Numpad4,
Numpad5,
Numpad6,
Numpad7,
Numpad8,
Numpad9,
NumpadClear,
NumpadEquals,
NumpadDivide,
NumpadMultiply,
NumpadMinus,
NumpadPlus,
NumpadEnter,
NumpadDecimal,
F1,
F2,
F3,
F4,
F5,
F6,
F7,
F8,
F9,
F10,
F11,
F12,
FastForward,
Rewind,
PlayPause,
VolumeUp,
VolumeDown,
Mute,
}
impl Key {
pub const COUNT: u8 = 109;
}
impl std::str::FromStr for Key {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
use Key::*;
match s {
"capslock" => Ok(CapsLock),
"shift" => Ok(Shift),
"control" => Ok(Control),
"alt" => Ok(Alt),
"meta" => Ok(Meta),
"controlormeta" => Ok(ControlOrMeta),
"rightshift" => Ok(RightShift),
"rightcontrol" => Ok(RightControl),
"rightalt" => Ok(RightAlt),
"rightmeta" => Ok(RightMeta),
"rightcontrolormeta" => Ok(RightControlOrMeta),
"fn" => Ok(Fn),
"returnorenter" => Ok(ReturnOrEnter),
"escape" => Ok(Escape),
"deleteorbackspace" => Ok(DeleteOrBackspace),
"forwarddelete" => Ok(ForwardDelete),
"tab" => Ok(Tab),
"space" => Ok(Space),
"minus" => Ok(Minus),
"equal" => Ok(Equal),
"leftbracket" => Ok(LeftBracket),
"rightbracket" => Ok(RightBracket),
"backslash" => Ok(Backslash),
"semicolon" => Ok(Semicolon),
"quote" => Ok(Quote),
"grave" => Ok(Grave),
"comma" => Ok(Comma),
"period" => Ok(Period),
"slash" => Ok(Slash),
"uparrow" => Ok(UpArrow),
"rightarrow" => Ok(RightArrow),
"downarrow" => Ok(DownArrow),
"leftarrow" => Ok(LeftArrow),
"pageup" => Ok(PageUp),
"pagedown" => Ok(PageDown),
"home" => Ok(Home),
"end" => Ok(End),
"a" => Ok(A),
"b" => Ok(B),
"c" => Ok(C),
"d" => Ok(D),
"e" => Ok(E),
"f" => Ok(F),
"g" => Ok(G),
"h" => Ok(H),
"i" => Ok(I),
"j" => Ok(J),
"k" => Ok(K),
"l" => Ok(L),
"m" => Ok(M),
"n" => Ok(N),
"o" => Ok(O),
"p" => Ok(P),
"q" => Ok(Q),
"r" => Ok(R),
"s" => Ok(S),
"t" => Ok(T),
"u" => Ok(U),
"v" => Ok(V),
"w" => Ok(W),
"x" => Ok(X),
"y" => Ok(Y),
"z" => Ok(Z),
"0" => Ok(N0),
"1" => Ok(N1),
"2" => Ok(N2),
"3" => Ok(N3),
"4" => Ok(N4),
"5" => Ok(N5),
"6" => Ok(N6),
"7" => Ok(N7),
"8" => Ok(N8),
"9" => Ok(N9),
"numpad0" => Ok(Numpad0),
"numpad1" => Ok(Numpad1),
"numpad2" => Ok(Numpad2),
"numpad3" => Ok(Numpad3),
"numpad4" => Ok(Numpad4),
"numpad5" => Ok(Numpad5),
"numpad6" => Ok(Numpad6),
"numpad7" => Ok(Numpad7),
"numpad8" => Ok(Numpad8),
"numpad9" => Ok(Numpad9),
"numpadclear" => Ok(NumpadClear),
"numpadequals" => Ok(NumpadEquals),
"numpaddivide" => Ok(NumpadDivide),
"numpadmultiply" => Ok(NumpadMultiply),
"numpadminus" => Ok(NumpadMinus),
"numpadplus" => Ok(NumpadPlus),
"numpadenter" => Ok(NumpadEnter),
"numpaddecimal" => Ok(NumpadDecimal),
"f1" => Ok(F1),
"f2" => Ok(F2),
"f3" => Ok(F3),
"f4" => Ok(F4),
"f5" => Ok(F5),
"f6" => Ok(F6),
"f7" => Ok(F7),
"f8" => Ok(F8),
"f9" => Ok(F9),
"f10" => Ok(F10),
"f11" => Ok(F11),
"f12" => Ok(F12),
"fastforward" => Ok(FastForward),
"rewind" => Ok(Rewind),
"playpause" => Ok(PlayPause),
"volumeup" => Ok(VolumeUp),
"volumedown" => Ok(VolumeDown),
"mute" => Ok(Mute),
_ => Err(()),
}
}
}
|
def intersect_sites_method(form):
if settings.PAGE_USE_SITE_ID:
if settings.PAGE_HIDE_SITES:
site_ids = [global_settings.SITE_ID]
else:
site_ids = [int(x) for x in form.data.getlist('sites')]
def intersects_sites(sibling):
return sibling.sites.filter(id__in=site_ids).count() > 0
else:
def intersects_sites(sibling):
return True
return intersects_sites |
The Americans United for the Separation of Church and State recently posted an article discussing Christian Reconstructionism that mentions Vision Forum. It turns out that Vision Forum openly associates with and supports those who advocate stoning as the appropriate punishment for disobedient teens. Read for yourselves:
Vision Forum is run by Douglas W. Phillips, son of Howard Phillips, a longtime Republican operative and former Nixon administration official who drifted into Reconstructionism during the 1990s. The organization doesn’t flaunt its alignment with Reconstructionism – probably aware of its controversial nature – but Reconstructionist writers are a staple on its Web site.
Tellingly, the site is littered with columns by William Einwechter, a Pennsylvania pastor who in 1999 authored a controversial essay arguing that the Bible mandates the stoning of “disobedient” teenagers. (Einwechter spoke at the Vision Forum’s 2008 Witherspoon seminar in Fredericksburg, Va., offering lectures on “Christian Jurisprudence” and “The Bible and Female Magistrates.”)
DeWeese has a featured article on the Vision Forum Web site as well. In it, he argues that America was founded on “the fixed law of God” and criticizes court decisions upholding church-state separation, school integration, abortion rights and gay rights as examples of judges changing the law by judicial edict. |
<gh_stars>10-100
/* $NetBSD: db_disasm.c,v 1.16 2003/12/08 08:08:40 jdolecek Exp $ */
/*
* Copyright 2002 Wasabi Systems, Inc.
* All rights reserved.
*
* Written by <NAME> for Wasabi Systems, Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. All advertising materials mentioning features or use of this software
* must display the following acknowledgement:
* This product includes software developed for the NetBSD Project by
* Wasabi Systems, Inc.
* 4. The name of Wasabi Systems, Inc. may not be used to endorse
* or promote products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY WASABI SYSTEMS, INC. ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL WASABI SYSTEMS, INC
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include <sys/cdefs.h>
__KERNEL_RCSID(0, "$NetBSD: db_disasm.c,v 1.16 2003/12/08 08:08:40 jdolecek Exp $");
#include "opt_ddb.h"
#include <sys/param.h>
#include <sys/proc.h>
#include <sys/systm.h>
#include <machine/db_machdep.h>
#include <machine/frame.h>
#include <ddb/db_sym.h>
#include <ddb/db_command.h>
#include <ddb/db_extern.h>
#include <ddb/db_access.h>
#include <ddb/db_interface.h>
#include <ddb/db_output.h>
#include <ddb/ddbvar.h>
typedef const char *(*format_func_t)(opcode_t, db_addr_t,
char *, char *, char *);
#define SH5_OPCODE_FORMAT(op) (((op) >> 26) & 0x3f)
/*
* Opcode Major Formats
*/
static const char *sh5_fmt_mnd0(opcode_t, db_addr_t, char *, char *, char *);
static const char *sh5_fmt_msd6(opcode_t, db_addr_t, char *, char *, char *);
static const char *sh5_fmt_msd10(opcode_t, db_addr_t, char *, char *, char *);
static const char *sh5_fmt_xsd16(opcode_t, db_addr_t, char *, char *, char *);
static const format_func_t major_format_funcs[] = {
/* Opcode bits 5, 4 and 3 == 000 */
sh5_fmt_mnd0,
sh5_fmt_mnd0,
sh5_fmt_mnd0,
sh5_fmt_mnd0,
NULL, /* FPU-reserved */
sh5_fmt_mnd0,
sh5_fmt_mnd0,
sh5_fmt_mnd0,
/* Opcode bits 5, 4 and 3 == 001 */
sh5_fmt_mnd0,
sh5_fmt_mnd0,
sh5_fmt_mnd0,
sh5_fmt_mnd0,
sh5_fmt_mnd0,
sh5_fmt_mnd0,
sh5_fmt_mnd0,
sh5_fmt_mnd0,
/* Opcode bits 5, 4 and 3 == 010 */
sh5_fmt_mnd0,
sh5_fmt_mnd0,
sh5_fmt_mnd0,
sh5_fmt_mnd0,
NULL, /* reserved */
NULL, /* reserved */
NULL, /* reserved */
NULL, /* reserved */
/* Opcode bits 5, 4 and 3 == 011 */
sh5_fmt_mnd0,
sh5_fmt_mnd0,
sh5_fmt_mnd0,
sh5_fmt_mnd0,
NULL, /* reserved */
NULL, /* reserved */
NULL, /* reserved */
NULL, /* reserved */
/* Opcode bits 5, 4 and 3 == 100 */
sh5_fmt_msd10,
sh5_fmt_msd10,
sh5_fmt_msd10,
sh5_fmt_msd10,
sh5_fmt_msd10,
sh5_fmt_msd10,
sh5_fmt_msd10,
sh5_fmt_msd10,
/* Opcode bits 5, 4 and 3 == 101 */
sh5_fmt_msd10,
sh5_fmt_msd10,
sh5_fmt_msd10,
sh5_fmt_msd10,
sh5_fmt_msd10,
sh5_fmt_msd10,
sh5_fmt_msd10,
sh5_fmt_msd10,
/* Opcode bits 5, 4 and 3 == 110 */
sh5_fmt_msd6,
sh5_fmt_msd6,
sh5_fmt_xsd16,
sh5_fmt_xsd16,
sh5_fmt_msd10,
sh5_fmt_msd10,
sh5_fmt_msd10,
sh5_fmt_msd10,
/* Opcode bits 5, 4 and 3 == 111 */
sh5_fmt_msd6,
sh5_fmt_msd6,
sh5_fmt_xsd16,
sh5_fmt_xsd16,
NULL, /* reserved */
NULL, /* reserved */
NULL, /* reserved */
NULL /* reserved */
};
/*
* Major Format MND0 is decoded using the following table
*/
struct format_mnd0 {
const char *mnemonic;
char op_s1; /* Source operand 1 */
char op_s2; /* Source operand 2 */
char op_d; /* Destination operand */
};
static int sh5_fmt_mnd0_decode_op(int, int, char *);
#define FMT_MND0_MAJ_INDEX(op) (SH5_OPCODE_FORMAT(op))
#define FMT_MND0_MIN_INDEX(op) (((op) >> 16) & 0x0f)
#define FMT_MND0_S1(op) (((op) >> 20) & 0x3f)
#define FMT_MND0_S2(op) (((op) >> 10) & 0x3f)
#define FMT_MND0_D(op) (((op) >> 4) & 0x3f)
/* Possible values for the operands */
#define FMT_MND0_OP_NONE 0 /* Unused, but must be encoded = 0x3f */
#define FMT_MND0_OP_AS1 1 /* Unused, but must be encoded as s1 */
#define FMT_MND0_OP_R 2 /* General purpose register */
#define FMT_MND0_OP_F 3 /* Single-precision FP register */
#define FMT_MND0_OP_D 4 /* Double-precision FP register */
#define FMT_MND0_OP_V 5 /* Vector specification */
#define FMT_MND0_OP_MTRX 6 /* Matrix specification */
#define FMT_MND0_OP_CR 7 /* Control Register */
#define FMT_MND0_OP_TR 8 /* Branch Target Register */
#define FMT_MND0_OP_TRL 9 /* Branch Target Reg, w/ "likely" bit */
static const struct format_mnd0 format_mnd0[][16] = {
/* Opcode 000000, ALU */
{
{NULL, 0},
{"cmpeq", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{"cmpgt", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{"cmpgu", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"add.l", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"add", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"sub.l", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"sub", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"addz.l", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"nsb", FMT_MND0_OP_R, FMT_MND0_OP_NONE, FMT_MND0_OP_R},
{"mulu.l", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"byterev", FMT_MND0_OP_R, FMT_MND0_OP_NONE, FMT_MND0_OP_R}
},
/* Opcode 000001, ALU */
{
{"shlld.l", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"shlld", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"shlrd.l", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"shlrd", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{NULL, 0},
{"shard.l", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"shard", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{"or", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{"and", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{"xor", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"muls.l", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"andc", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R}
},
/* Opcode 000010, MM */
{
{NULL, 0},
{"madd.w", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"madd.l", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{"madds.ub", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"madds.w", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"madds.l", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{NULL, 0},
{"msub.w", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"msub.l", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{"msubs.ub", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"msubs.w", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"msubs.l", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0}
},
/* Opcode 000011, MM */
{
{NULL, 0},
{"mshlld.w", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"mshlld.l", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{NULL, 0},
{"mshalds.w", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"mshalds.l", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{NULL, 0},
{"mshard.w", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"mshard.l", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"mshards.q", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{"mshlrd.w", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"mshlrd.l", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0}
},
/* Opcode 000100, <unused> */
{
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0}
},
/* Opcode 000101, FPU */
{
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{"fipr.s", FMT_MND0_OP_V, FMT_MND0_OP_V, FMT_MND0_OP_F},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{"ftrv.s", FMT_MND0_OP_MTRX, FMT_MND0_OP_V, FMT_MND0_OP_V},
{NULL, 0}
},
/* Opcode 000110, FPU */
{
{"fabs.s", FMT_MND0_OP_F, FMT_MND0_OP_AS1, FMT_MND0_OP_F},
{"fabs.d", FMT_MND0_OP_D, FMT_MND0_OP_AS1, FMT_MND0_OP_D},
{"fneg.s", FMT_MND0_OP_F, FMT_MND0_OP_AS1, FMT_MND0_OP_F},
{"fneg.d", FMT_MND0_OP_D, FMT_MND0_OP_AS1, FMT_MND0_OP_D},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{"fsina.s", FMT_MND0_OP_F, FMT_MND0_OP_AS1, FMT_MND0_OP_F},
{NULL, 0},
{"fsrra.s", FMT_MND0_OP_F, FMT_MND0_OP_AS1, FMT_MND0_OP_F},
{NULL, 0},
{"fcosa.s", FMT_MND0_OP_F, FMT_MND0_OP_AS1, FMT_MND0_OP_F},
{NULL, 0},
{NULL, 0},
{NULL, 0}
},
/* Opcode 000111, FLOAD */
{
{"fmov.ls", FMT_MND0_OP_R, FMT_MND0_OP_NONE, FMT_MND0_OP_F},
{"fmov.qd", FMT_MND0_OP_R, FMT_MND0_OP_NONE, FMT_MND0_OP_D},
{"fgetscr", FMT_MND0_OP_NONE, FMT_MND0_OP_NONE, FMT_MND0_OP_F},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{"fldx.s", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_F},
{"fldx.d", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_D},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{"fldx.p", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_F},
{NULL, 0},
{NULL, 0}
},
/* Opcode 001000, RMW */
{
{NULL, 0},
{"cmveq", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{"swap.q", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{"cmvne", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0}
},
/* Opcode 001001, MISC */
{
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{"getcon", FMT_MND0_OP_CR, FMT_MND0_OP_NONE, FMT_MND0_OP_R}
},
/* Opcode 001010, MM */
{
{"mcmpeq.b", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"mcmpeq.w", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"mcmpeq.l", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{"mcmpgt.b", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"mcmpgt.w", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"mcmpgt.l", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"mextr1", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{"mabs.w", FMT_MND0_OP_R, FMT_MND0_OP_NONE, FMT_MND0_OP_R},
{"mabs.l", FMT_MND0_OP_R, FMT_MND0_OP_NONE, FMT_MND0_OP_R},
{"mextr2", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{"mperm.w", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{"mextr3", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R}
},
/* Opcode 001011, MM */
{
{"mshflo.b", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"mshflo.w", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"mshflo.l", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"mextr4", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"mshfhi.b", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"mshfhi.w", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"mshfhi.l", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"mextr5", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{"mextr6", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{"mextr7", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R}
},
/* Opcode 001100, FPU */
{
{"fmov.sl", FMT_MND0_OP_F, FMT_MND0_OP_AS1, FMT_MND0_OP_R},
{"fmov.dq", FMT_MND0_OP_D, FMT_MND0_OP_AS1, FMT_MND0_OP_R},
{"fputscr", FMT_MND0_OP_F, FMT_MND0_OP_AS1, FMT_MND0_OP_NONE},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{"fcmpeq.s", FMT_MND0_OP_F, FMT_MND0_OP_F, FMT_MND0_OP_R},
{"fcmpeq.d", FMT_MND0_OP_D, FMT_MND0_OP_D, FMT_MND0_OP_R},
{"fcmpun.s", FMT_MND0_OP_F, FMT_MND0_OP_F, FMT_MND0_OP_R},
{"fcmpun.d", FMT_MND0_OP_D, FMT_MND0_OP_D, FMT_MND0_OP_R},
{"fcmpgt.s", FMT_MND0_OP_F, FMT_MND0_OP_F, FMT_MND0_OP_R},
{"fcmpgt.d", FMT_MND0_OP_D, FMT_MND0_OP_D, FMT_MND0_OP_R},
{"fcmpge.s", FMT_MND0_OP_F, FMT_MND0_OP_F, FMT_MND0_OP_R},
{"fcmpge.d", FMT_MND0_OP_D, FMT_MND0_OP_D, FMT_MND0_OP_R}
},
/* Opcode 001101, FPU */
{
{"fadd.s", FMT_MND0_OP_F, FMT_MND0_OP_F, FMT_MND0_OP_F},
{"fadd.d", FMT_MND0_OP_D, FMT_MND0_OP_D, FMT_MND0_OP_D},
{"fsub.s", FMT_MND0_OP_F, FMT_MND0_OP_F, FMT_MND0_OP_F},
{"fsub.d", FMT_MND0_OP_D, FMT_MND0_OP_D, FMT_MND0_OP_D},
{"fdiv.s", FMT_MND0_OP_F, FMT_MND0_OP_F, FMT_MND0_OP_F},
{"fdiv.d", FMT_MND0_OP_D, FMT_MND0_OP_D, FMT_MND0_OP_D},
{"fdiv.s", FMT_MND0_OP_F, FMT_MND0_OP_F, FMT_MND0_OP_F},
{"fdiv.d", FMT_MND0_OP_D, FMT_MND0_OP_D, FMT_MND0_OP_D},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{"fmac.s", FMT_MND0_OP_F, FMT_MND0_OP_F, FMT_MND0_OP_F},
{NULL, 0}
},
/* Opcode 001110, FPU */
{
{"fmov.s", FMT_MND0_OP_F, FMT_MND0_OP_AS1, FMT_MND0_OP_F},
{"fmov.d", FMT_MND0_OP_D, FMT_MND0_OP_AS1, FMT_MND0_OP_D},
{NULL, 0},
{NULL, 0},
{"fsqrt.s", FMT_MND0_OP_F, FMT_MND0_OP_AS1, FMT_MND0_OP_F},
{"fsqrt.d", FMT_MND0_OP_D, FMT_MND0_OP_AS1, FMT_MND0_OP_D},
{"fcnv.sd", FMT_MND0_OP_F, FMT_MND0_OP_AS1, FMT_MND0_OP_D},
{"fcnv.ds", FMT_MND0_OP_D, FMT_MND0_OP_AS1, FMT_MND0_OP_F},
{"ftrc.sl", FMT_MND0_OP_F, FMT_MND0_OP_AS1, FMT_MND0_OP_F},
{"ftrc.dq", FMT_MND0_OP_D, FMT_MND0_OP_AS1, FMT_MND0_OP_D},
{"ftrc.sq", FMT_MND0_OP_F, FMT_MND0_OP_AS1, FMT_MND0_OP_D},
{"ftrc.dl", FMT_MND0_OP_D, FMT_MND0_OP_AS1, FMT_MND0_OP_F},
{"float.ls", FMT_MND0_OP_F, FMT_MND0_OP_AS1, FMT_MND0_OP_F},
{"float.qd", FMT_MND0_OP_D, FMT_MND0_OP_AS1, FMT_MND0_OP_D},
{"float.ld", FMT_MND0_OP_F, FMT_MND0_OP_AS1, FMT_MND0_OP_D},
{"float.qs", FMT_MND0_OP_D, FMT_MND0_OP_AS1, FMT_MND0_OP_F}
},
/* Opcode 001111, FSTORE */
{
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{"fstx.s", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_F},
{"fstx.d", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_D},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{"fstx.p", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_F},
{NULL, 0},
{NULL, 0},
},
/* Opcode 010000, LOAD */
{
{"ldx.b", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"ldx.w", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"ldx.l", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"ldx.q", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"ldx.ub", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"ldx.uw", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0}
},
/* Opcode 010001, BRANCH */
{
{NULL, 0},
{"blink", FMT_MND0_OP_TR, FMT_MND0_OP_NONE, FMT_MND0_OP_R},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{"gettr", FMT_MND0_OP_TR, FMT_MND0_OP_NONE, FMT_MND0_OP_R},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0}
},
/* Opcode 010010, MM */
{
{"msad.ubq", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"mmacfx.wl", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{"mcmv", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{"mmacnfx.wl", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{"mmulsum.wq", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0}
},
/* Opcode 010011, MM */
{
{NULL, 0},
{"mmul.w", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"mmul.l", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{NULL, 0},
{"mmulfx.w", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"mmulfx.l", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{"mcnvs.wb", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"mmulfxrp.w", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"mmullo.wl", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{"mcnvs.wub", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"mcnvs.lw", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"mmulhi.wl", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0}
},
/* Opcode 010100, <unused> */
{
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0}
},
/* Opcode 010101, <unused> */
{
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0}
},
/* Opcode 010110, <unused> */
{
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0}
},
/* Opcode 010111, <unused> */
{
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0}
},
/* Opcode 011000, STORE */
{
{"stx.b", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"stx.w", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"stx.l", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{"stx.q", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_R},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0}
},
/* Opcode 011001, BRANCH */
{
{NULL, 0},
{"beq", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_TRL},
{NULL, 0},
{"bge", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_TRL},
{NULL, 0},
{"bne", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_TRL},
{NULL, 0},
{"bgt", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_TRL},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{"bgeu", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_TRL},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{"bgtu", FMT_MND0_OP_R, FMT_MND0_OP_R, FMT_MND0_OP_TRL}
},
/* Opcode 011010, PT */
{
{NULL, 0},
{"ptabs", FMT_MND0_OP_NONE, FMT_MND0_OP_R, FMT_MND0_OP_TRL},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{"ptrel", FMT_MND0_OP_NONE, FMT_MND0_OP_R, FMT_MND0_OP_TRL},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0}
},
/* Opcode 011011, MISC */
{
{"nop", FMT_MND0_OP_NONE, FMT_MND0_OP_NONE, FMT_MND0_OP_NONE},
{"trapa", FMT_MND0_OP_R, FMT_MND0_OP_NONE, FMT_MND0_OP_NONE},
{"synci", FMT_MND0_OP_NONE, FMT_MND0_OP_NONE, FMT_MND0_OP_NONE},
{"rte", FMT_MND0_OP_NONE, FMT_MND0_OP_NONE, FMT_MND0_OP_NONE},
{"illegal", FMT_MND0_OP_NONE, FMT_MND0_OP_NONE, FMT_MND0_OP_NONE},
{"brk", FMT_MND0_OP_NONE, FMT_MND0_OP_NONE, FMT_MND0_OP_NONE},
{"synco", FMT_MND0_OP_NONE, FMT_MND0_OP_NONE, FMT_MND0_OP_NONE},
{"sleep", FMT_MND0_OP_NONE, FMT_MND0_OP_NONE, FMT_MND0_OP_NONE},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{"putcon", FMT_MND0_OP_R, FMT_MND0_OP_NONE, FMT_MND0_OP_CR},
}
};
/*
* Major Format MSD6 is decoded using the following table
*/
struct format_msd6 {
const char *mnemonic;
char op_r; /* Register operand */
char op_imm; /* Immediate operand */
char op_sd; /* Source/Destination operand */
};
static int sh5_fmt_msd6_decode_op(int, int, char *);
#define FMT_MSD6_MAJ_INDEX(op) (SH5_OPCODE_FORMAT(op) - 0x30)
#define FMT_MSD6_MIN_INDEX(op) (((op) >> 16) & 0x0f)
#define FMT_MSD6_R(op) (((op) >> 20) & 0x3f)
#define FMT_MSD6_IMM(op) (((op) >> 10) & 0x3f)
#define FMT_MSD6_SD(op) (((op) >> 4) & 0x3f)
/* Possible values for the operands */
#define FMT_MSD6_OP_NONE 0 /* Unused, but must be encoded = 0x3f */
#define FMT_MSD6_OP_R 1 /* General purpose register */
#define FMT_MSD6_OP_SIMM 2 /* Signed Immediate */
#define FMT_MSD6_OP_IMM 3 /* Unsigned Immediate */
#define FMT_MSD6_OP_SIMM32 4 /* Signed Immediate, scaled by 32 */
#define FMT_MSD6_OP_TRL 5 /* Branch Target Register, "likely" */
static const struct format_msd6 format_msd6[][16] = {
/* Opcode 110000, LOAD */
{
{NULL, 0},
{NULL, 0},
{"ldlo.l", FMT_MSD6_OP_R, FMT_MSD6_OP_SIMM, FMT_MSD6_OP_R},
{"ldlo.q", FMT_MSD6_OP_R, FMT_MSD6_OP_SIMM, FMT_MSD6_OP_R},
{NULL, 0},
{NULL, 0},
{"ldhi.l", FMT_MSD6_OP_R, FMT_MSD6_OP_SIMM, FMT_MSD6_OP_R},
{"ldhi.q", FMT_MSD6_OP_R, FMT_MSD6_OP_SIMM, FMT_MSD6_OP_R},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{"getcfg", FMT_MSD6_OP_R, FMT_MSD6_OP_SIMM, FMT_MSD6_OP_R}
},
/* Opcode 110001, ALU */
{
{"shlli.l", FMT_MSD6_OP_R, FMT_MSD6_OP_IMM, FMT_MSD6_OP_R},
{"shlli", FMT_MSD6_OP_R, FMT_MSD6_OP_IMM, FMT_MSD6_OP_R},
{"shlri.l", FMT_MSD6_OP_R, FMT_MSD6_OP_IMM, FMT_MSD6_OP_R},
{"shlri", FMT_MSD6_OP_R, FMT_MSD6_OP_IMM, FMT_MSD6_OP_R},
{NULL, 0},
{NULL, 0},
{"shari.l", FMT_MSD6_OP_R, FMT_MSD6_OP_IMM, FMT_MSD6_OP_R},
{"shari", FMT_MSD6_OP_R, FMT_MSD6_OP_IMM, FMT_MSD6_OP_R},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{"xori", FMT_MSD6_OP_R, FMT_MSD6_OP_SIMM, FMT_MSD6_OP_R},
{NULL, 0},
{NULL, 0}
},
/* Opcode 110010, <unused> */
{
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0}
},
/* Opcode 110011, <unused> */
{
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0}
},
/* Opcode 110100, <unused> */
{
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0}
},
/* Opcode 110101, <unused> */
{
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0}
},
/* Opcode 110110, <unused> */
{
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0}
},
/* Opcode 110111, <unused> */
{
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0}
},
/* Opcode 111000, STORE */
{
{NULL, 0},
{"prefi", FMT_MSD6_OP_R, FMT_MSD6_OP_SIMM32, FMT_MSD6_OP_NONE},
{"stlo.l", FMT_MSD6_OP_R, FMT_MSD6_OP_SIMM, FMT_MSD6_OP_R},
{"stlo.q", FMT_MSD6_OP_R, FMT_MSD6_OP_SIMM, FMT_MSD6_OP_R},
{"alloco", FMT_MSD6_OP_R, FMT_MSD6_OP_SIMM32, FMT_MSD6_OP_NONE},
{"icbi", FMT_MSD6_OP_R, FMT_MSD6_OP_SIMM32, FMT_MSD6_OP_NONE},
{"sthi.l", FMT_MSD6_OP_R, FMT_MSD6_OP_SIMM, FMT_MSD6_OP_R},
{"sthi.q", FMT_MSD6_OP_R, FMT_MSD6_OP_SIMM, FMT_MSD6_OP_R},
{"ocbp", FMT_MSD6_OP_R, FMT_MSD6_OP_SIMM32, FMT_MSD6_OP_NONE},
{"ocbi", FMT_MSD6_OP_R, FMT_MSD6_OP_SIMM32, FMT_MSD6_OP_NONE},
{NULL, 0},
{NULL, 0},
{"ocbwb", FMT_MSD6_OP_R, FMT_MSD6_OP_SIMM32, FMT_MSD6_OP_NONE},
{NULL, 0},
{NULL, 0},
{"putcfg", FMT_MSD6_OP_R, FMT_MSD6_OP_SIMM, FMT_MSD6_OP_R}
},
/* Opcode 111001, BRANCH */
{
{NULL, 0},
{"beqi", FMT_MSD6_OP_R, FMT_MSD6_OP_SIMM, FMT_MSD6_OP_TRL},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{"bnei", FMT_MSD6_OP_R, FMT_MSD6_OP_SIMM, FMT_MSD6_OP_TRL},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0}
}
};
/*
* Major Format MSD10 is decoded using the following table
*/
struct format_msd10 {
const char *mnemonic;
char op_imm; /* Immediate operand */
char op_sd; /* Source/Destination operand */
};
static int sh5_fmt_msd10_decode_op(int, int, char *);
#define FMT_MSD10_MAJ_INDEX(op) (SH5_OPCODE_FORMAT(op) - 0x20)
#define FMT_MSD10_R(op) (((op) >> 20) & 0x3f)
#define FMT_MSD10_IMM(op) (((op) >> 10) & 0x3ff)
#define FMT_MSD10_SD(op) (((op) >> 4) & 0x3f)
/* Possible values for the operands */
#define FMT_MSD10_OP_R 0 /* General purpose register */
#define FMT_MSD10_OP_F 1 /* Single-precision FP register */
#define FMT_MSD10_OP_D 2 /* Double-precision FP register */
#define FMT_MSD10_OP_SIMM 3 /* Signed Immediate */
#define FMT_MSD10_OP_SIMM2 4 /* Signed Immediate, scaled by 2 */
#define FMT_MSD10_OP_SIMM4 5 /* Signed Immediate, scaled by 4 */
#define FMT_MSD10_OP_SIMM8 6 /* Signed Immediate, scaled by 8 */
static const struct format_msd10 format_msd10[] = {
{"ld.b", FMT_MSD10_OP_SIMM, FMT_MSD10_OP_R},
{"ld.w", FMT_MSD10_OP_SIMM2, FMT_MSD10_OP_R},
{"ld.l", FMT_MSD10_OP_SIMM4, FMT_MSD10_OP_R},
{"ld.q", FMT_MSD10_OP_SIMM8, FMT_MSD10_OP_R},
{"ld.ub", FMT_MSD10_OP_SIMM, FMT_MSD10_OP_R},
{"fld.s", FMT_MSD10_OP_SIMM4, FMT_MSD10_OP_F},
{"fld.p", FMT_MSD10_OP_SIMM8, FMT_MSD10_OP_F},
{"fld.d", FMT_MSD10_OP_SIMM8, FMT_MSD10_OP_D},
{"st.b", FMT_MSD10_OP_SIMM, FMT_MSD10_OP_R},
{"st.w", FMT_MSD10_OP_SIMM2, FMT_MSD10_OP_R},
{"st.l", FMT_MSD10_OP_SIMM4, FMT_MSD10_OP_R},
{"st.q", FMT_MSD10_OP_SIMM8, FMT_MSD10_OP_R},
{"ld.uw", FMT_MSD10_OP_SIMM2, FMT_MSD10_OP_R},
{"fst.s", FMT_MSD10_OP_SIMM4, FMT_MSD10_OP_F},
{"fst.p", FMT_MSD10_OP_SIMM8, FMT_MSD10_OP_F},
{"fst.d", FMT_MSD10_OP_SIMM8, FMT_MSD10_OP_D},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{"addi", FMT_MSD10_OP_SIMM, FMT_MSD10_OP_R},
{"addi.l", FMT_MSD10_OP_SIMM, FMT_MSD10_OP_R},
{"andi", FMT_MSD10_OP_SIMM, FMT_MSD10_OP_R},
{"ori", FMT_MSD10_OP_SIMM, FMT_MSD10_OP_R}
};
/*
* Major Format XSD16 is decoded using the following table
*/
struct format_xsd16 {
const char *mnemonic;
char op_imm; /* Immediate operand */
char op_d; /* Destination operand */
};
static int sh5_fmt_xsd16_decode_op(int, int, int, db_addr_t, char *);
#define FMT_XSD16_MAJ_INDEX(op) (SH5_OPCODE_FORMAT(op) - 0x32)
#define FMT_XSD16_IMM(op) (((op) >> 10) & 0xffff)
#define FMT_XSD16_D(op) (((op) >> 4) & 0x3f)
/* Possible values for the operands */
#define FMT_XSD16_OP_R 0 /* General purpose register */
#define FMT_XSD16_OP_TRL 1 /* Branch Target Reg, w/ "likely" bit */
#define FMT_XSD16_OP_SHORI 2 /* Unsigned Immediate */
#define FMT_XSD16_OP_MOVI 3 /* Signed Immediate */
#define FMT_XSD16_OP_LABEL 5 /* Branch label/offset */
static const struct format_xsd16 format_xsd16[] = {
{"shori", FMT_XSD16_OP_SHORI, FMT_XSD16_OP_R},
{"movi", FMT_XSD16_OP_MOVI, FMT_XSD16_OP_R},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{NULL, 0},
{"pta", FMT_XSD16_OP_LABEL, FMT_XSD16_OP_TRL},
{"ptb", FMT_XSD16_OP_LABEL, FMT_XSD16_OP_TRL}
};
static const char *sh5_conreg_names[64] = {
"sr", "ssr", "pssr", "undef_03",
"intevt", "expevt", "pexpevt", "tra",
"spc", "pspc", "resvec", "vbr",
"undef_12", "tea", "undef_14", "undef_15",
"dcr", "kcr0", "kcr1", "undef_19",
"undef_20", "undef_21", "undef_22", "undef_23",
"undef_24", "undef_25", "undef_26", "undef_27",
"undef_28", "undef_29", "undef_30", "undef_31",
"resvd_32", "resvd_33", "resvd_34", "resvd_35",
"resvd_36", "resvd_37", "resvd_38", "resvd_39",
"resvd_40", "resvd_41", "resvd_42", "resvd_43",
"resvd_44", "resvd_45", "resvd_46", "resvd_47",
"resvd_48", "resvd_49", "resvd_50", "resvd_51",
"resvd_52", "resvd_53", "resvd_54", "resvd_55",
"resvd_56", "resvd_57", "resvd_58", "resvd_59",
"resvd_60", "resvd_61", "ctc", "usr"
};
static int sh5_sign_extend(int, int);
static char oper1[128], oper2[128], oper3[128];
static char extra_info[256];
db_addr_t
db_disasm(db_addr_t loc, boolean_t dummy)
{
format_func_t fp;
opcode_t op;
const char *mnemonic, *comma = "";
/*
* Ditch the SHmedia bit
*/
loc &= ~3;
if (loc < SH5_KSEG0_BASE) {
op = fuword((void *)(intptr_t)loc);
if (op == 0xffffffff) {
db_printf("invalid address.\n");
return (loc);
}
} else
op = *((opcode_t *)(intptr_t)loc);
extra_info[0] = '\0';
/*
* The lowest 4 bits must be zero
*/
if ((op & 0xf) == 0 &&
(fp = major_format_funcs[SH5_OPCODE_FORMAT(op)]) != NULL)
mnemonic = (fp)(op, loc, oper1, oper2, oper3);
else
mnemonic = NULL;
if (mnemonic == NULL) {
mnemonic = ".word";
sprintf(oper1, "0x%08x", op);
oper2[0] = oper3[0] = '\0';
}
db_printf("%-9s", mnemonic);
if (oper1[0]) {
db_printf("%s", oper1);
comma = ", ";
}
if (oper2[0]) {
db_printf("%s%s", comma, oper2);
comma = ", ";
}
if (oper3[0])
db_printf("%s%s", comma, oper3);
if (extra_info[0] != '\0')
db_printf("\t%s\n", extra_info);
else
db_printf("\n");
return (loc + sizeof(opcode_t));
}
/*ARGSUSED*/
static const char *
sh5_fmt_mnd0(opcode_t op, db_addr_t loc, char *op1, char *op2, char *op3)
{
const struct format_mnd0 *fp;
static char trl[16];
int s1, s2, d;
fp = &format_mnd0[FMT_MND0_MAJ_INDEX(op)][FMT_MND0_MIN_INDEX(op)];
s1 = FMT_MND0_S1(op);
s2 = FMT_MND0_S2(op);
d = FMT_MND0_D(op);
if (fp->mnemonic == NULL)
return (NULL);
if (sh5_fmt_mnd0_decode_op(fp->op_s1, s1, op1) < 0)
return (NULL);
if (fp->op_s2 == FMT_MND0_OP_AS1) {
if (s2 != s1)
return (NULL);
op2[0] = '\0';
} else
if (sh5_fmt_mnd0_decode_op(fp->op_s2, s2, op2) < 0)
return (NULL);
if (sh5_fmt_mnd0_decode_op(fp->op_d, d, op3) < 0)
return (NULL);
if (fp->op_d == FMT_MND0_OP_TRL) {
sprintf(trl, "%s/%c", fp->mnemonic, (d & 0x20) ? 'l' : 'u');
return (trl);
}
return (fp->mnemonic);
}
static int
sh5_fmt_mnd0_decode_op(int fmt, int op, char *ops)
{
char opstr[16];
switch (fmt) {
case FMT_MND0_OP_NONE:
if (op == 0x3f) {
ops[0] = '\0';
return (0);
}
/* FALLTHROUGH */
case FMT_MND0_OP_AS1:
return (-1);
case FMT_MND0_OP_R:
sprintf(opstr, "r%d", op);
break;
case FMT_MND0_OP_F:
sprintf(opstr, "fr%d", op);
break;
case FMT_MND0_OP_D:
sprintf(opstr, "dr%d", op);
break;
case FMT_MND0_OP_V:
sprintf(opstr, "fv%d", op);
break;
case FMT_MND0_OP_MTRX:
sprintf(opstr, "mtrx%d", op);
break;
case FMT_MND0_OP_CR:
strcpy(opstr, sh5_conreg_names[op]);
break;
case FMT_MND0_OP_TR:
case FMT_MND0_OP_TRL:
if ((op & 0x18) != 0)
return (-1);
sprintf(opstr, "tr%d", op & 0x7);
break;
default:
return (-1);
}
strcpy(ops, opstr);
return (0);
}
/*ARGSUSED*/
static const char *
sh5_fmt_msd6(opcode_t op, db_addr_t loc, char *op1, char *op2, char *op3)
{
const struct format_msd6 *fp;
static char trl[16];
int r, imm, sd;
fp = &format_msd6[FMT_MSD6_MAJ_INDEX(op)][FMT_MSD6_MIN_INDEX(op)];
r = FMT_MSD6_R(op);
imm = FMT_MSD6_IMM(op);
sd = FMT_MSD6_SD(op);
if (fp->mnemonic == NULL)
return (NULL);
if (sh5_fmt_msd6_decode_op(fp->op_r, r, op1) < 0)
return (NULL);
if (sh5_fmt_msd6_decode_op(fp->op_imm, imm, op2) < 0)
return (NULL);
if (sh5_fmt_msd6_decode_op(fp->op_sd, sd, op3) < 0)
return (NULL);
if (fp->op_sd == FMT_MSD6_OP_TRL) {
sprintf(trl, "%s/%c", fp->mnemonic, (sd & 0x20) ? 'l' : 'u');
return (trl);
}
return (fp->mnemonic);
}
static int
sh5_fmt_msd6_decode_op(int fmt, int op, char *ops)
{
char opstr[16];
switch (fmt) {
case FMT_MSD6_OP_NONE:
if (op == 0x3f) {
ops[0] = '\0';
return (0);
}
return (-1);
case FMT_MSD6_OP_R:
sprintf(opstr, "r%d", op);
break;
case FMT_MSD6_OP_SIMM:
op = sh5_sign_extend(op, 6);
/* FALTHROUGH */
case FMT_MSD6_OP_IMM:
sprintf(opstr, "%d", op);
break;
case FMT_MSD6_OP_SIMM32:
op = sh5_sign_extend(op, 6);
sprintf(opstr, "%d", op * 32);
break;
case FMT_MSD6_OP_TRL:
if ((op & 0x18) != 0)
return (-1);
sprintf(opstr, "tr%d", op & 0x7);
break;
default:
return (-1);
}
strcpy(ops, opstr);
return (0);
}
/*ARGSUSED*/
static const char *
sh5_fmt_msd10(opcode_t op, db_addr_t loc, char *op1, char *op2, char *op3)
{
const struct format_msd10 *fp;
int r, imm, sd;
fp = &format_msd10[FMT_MSD10_MAJ_INDEX(op)];
r = FMT_MSD10_R(op);
imm = FMT_MSD10_IMM(op);
sd = FMT_MSD10_SD(op);
if (fp->mnemonic == NULL)
return (NULL);
(void) sh5_fmt_msd10_decode_op(FMT_MSD10_OP_R, r, op1);
if (sh5_fmt_msd10_decode_op(fp->op_imm, imm, op2) < 0)
return (NULL);
if (sh5_fmt_msd10_decode_op(fp->op_sd, sd, op3) < 0)
return (NULL);
return (fp->mnemonic);
}
static int
sh5_fmt_msd10_decode_op(int fmt, int op, char *ops)
{
char opstr[16];
switch (fmt) {
case FMT_MSD10_OP_R:
sprintf(opstr, "r%d", op);
break;
case FMT_MSD10_OP_F:
sprintf(opstr, "fr%d", op);
break;
case FMT_MSD10_OP_D:
sprintf(opstr, "dr%d", op);
break;
case FMT_MSD10_OP_SIMM:
case FMT_MSD10_OP_SIMM2:
case FMT_MSD10_OP_SIMM4:
case FMT_MSD10_OP_SIMM8:
op = sh5_sign_extend(op, 10);
op *= 1 << (fmt - FMT_MSD10_OP_SIMM);
sprintf(opstr, "%d", op);
break;
default:
return (-1);
}
strcpy(ops, opstr);
return (0);
}
static const char *
sh5_fmt_xsd16(opcode_t op, db_addr_t loc, char *op1, char *op2, char *op3)
{
const struct format_xsd16 *fp;
static char trl[16];
int imm, d;
fp = &format_xsd16[FMT_XSD16_MAJ_INDEX(op)];
imm = FMT_XSD16_IMM(op);
d = FMT_XSD16_D(op);
if (fp->mnemonic == NULL)
return (NULL);
if (sh5_fmt_xsd16_decode_op(fp->op_imm, imm, d, loc, op1) < 0)
return (NULL);
if (sh5_fmt_xsd16_decode_op(fp->op_d, d, 0, 0, op2) < 0)
return (NULL);
op3[0] = '\0';
if (fp->op_d == FMT_XSD16_OP_TRL) {
sprintf(trl, "%s/%c", fp->mnemonic, (d & 0x20) ? 'l' : 'u');
return (trl);
}
return (fp->mnemonic);
}
static int
sh5_fmt_xsd16_decode_op(int fmt, int op, int d, db_addr_t loc, char *ops)
{
char *symname;
db_sym_t sym;
db_expr_t diff;
opcode_t nextop;
char accmovi_str[32];
static db_addr_t last_movi;
static int last_d = -1;
static int64_t accmovi;
switch (fmt) {
case FMT_XSD16_OP_R:
sprintf(ops, "r%d", op);
break;
case FMT_XSD16_OP_TRL:
if ((op & 0x18) != 0)
return (-1);
sprintf(ops, "tr%d", op & 0x7);
break;
case FMT_XSD16_OP_SHORI:
sprintf(ops, "%d", op);
if ((last_movi + 4) == loc && last_d == d) {
accmovi <<= 16;
accmovi |= op;
if ((loc + 4) < SH5_KSEG0_BASE)
nextop = fuword((void *)(intptr_t)(loc + 4));
else
nextop = *((opcode_t *)(intptr_t)(loc + 4));
if ((nextop & 0xfc00000f) == 0xc8000000 &&
((nextop >> 4) & 0x3f) == d) {
last_movi = loc;
} else {
symname = NULL;
sym = db_search_symbol((db_addr_t)accmovi,
DB_STGY_PROC, &diff);
db_symbol_values(sym, &symname, NULL);
if (symname == NULL || symname[0] == '/') {
sym = db_search_symbol(
(db_addr_t)accmovi,
DB_STGY_XTRN, &diff);
db_symbol_values(sym, &symname, NULL);
if (symname && symname[0] == '/')
symname = NULL;
} else
diff &= ~1;
if ((u_int64_t)accmovi >= 0x100000000ULL) {
sprintf(accmovi_str, "0x%08x%08x",
(u_int)(accmovi >> 32),
(u_int)accmovi);
} else
sprintf(accmovi_str, "0x%08x",
(u_int)accmovi);
if (symname == NULL || diff >= 0x400000)
strcpy(extra_info, accmovi_str);
else {
if (diff)
sprintf(extra_info,
"%s <%s+0x%x>",
accmovi_str, symname,
(int)diff);
else
sprintf(extra_info, "%s <%s>",
accmovi_str, symname);
}
}
}
break;
case FMT_XSD16_OP_MOVI:
op = sh5_sign_extend(op, 16);
sprintf(ops, "%d", op);
last_movi = loc;
last_d = d;
accmovi = (int64_t)op;
break;
case FMT_XSD16_OP_LABEL:
op = sh5_sign_extend(op, 16) * 4;
loc = loc + (db_addr_t)op;
symname = NULL;
sym = db_search_symbol(loc, DB_STGY_PROC, &diff);
db_symbol_values(sym, &symname, NULL);
if (symname == NULL)
sprintf(ops, "0x%llx", (long long) loc);
else {
if (diff)
sprintf(ops, "%s+0x%x", symname, (int) diff);
else
strcpy(ops, symname);
}
break;
default:
return (-1);
}
return (0);
}
static int
sh5_sign_extend(int imm, int bits)
{
if (imm & (1 << (bits - 1)))
imm |= (-1 << bits);
return (imm);
}
|
Journalism in the Digital Age: The Nigerian Press Framing of the Niger Delta Conflict
This paper analyzes how four Nigerian newspapers framed the local Niger Delta conflict between January and May 2008. It also analyzes the main sources of news reports on the confl ict, including the extent to which journalists relied on new technologies, as well as the ethical implications of such practice. Drawing on the theoretical framework of peace and confl ict reporting, the methodological context of framing analysis, as well as content analysis techniques, the author demonstrates how the Nigerian press constructed the confl ict in a law and order frame, suggesting that the ongoing crisis posed a serious security threat not only to the Niger Delta region but also to the entire Nigerian nation. |
/**
* dijkstra - apply Dijkstra's algorithm starting from the
* specified origin vertex to find the shortest path from the
* origin to all other vertices that can be reached from the
* origin.
*
* The method prints the vertices in the order in which they are
* finalized. For each vertex v, it lists the total cost of the
* shortest path from the origin to v, as well as v's parent
* vertex. Tracing back along the parents gives the shortest
* path.
*/
public void dijkstra(String originID) {
/* This will give all vertices an infinite cost. */
reinitVertices();
/* Get the origin and set its cost to 0. */
Vertex origin = getVertex(originID);
if (origin == null) {
throw new IllegalArgumentException("no such vertex: " + originID);
}
origin.cost = 0;
while (true) {
/* Find the unfinalized vertex with the minimal cost. */
Vertex w = null;
Vertex v = vertices;
while (v != null) {
if (!v.done && (w == null || v.cost < w.cost)) {
w = v;
}
v = v.next;
}
/*
* If there are no unfinalized vertices, or if all of the
* unfinalized vertices are unreachable from the origin
* (which is the case if the w.cost is infinite), then
* we're done.
*/
if (w == null || w.cost == Double.POSITIVE_INFINITY) {
return;
}
/* Finalize w. */
System.out.println("\tfinalizing " + w.id + " (cost = " + w.cost +
(w.parent == null ? ")" : ", parent = " + w.parent.id + ")"));
System.out.println("\t\tpath = " + w.pathString());
w.done = true;
/* Try to improve the estimates of w's unfinalized neighbors. */
Edge e = w.edges;
while (e != null) {
Vertex x = e.end;
if (!x.done) {
double cost_via_w = w.cost + e.cost;
if (cost_via_w < x.cost) {
x.cost = cost_via_w;
x.parent = w;
}
}
e = e.next;
}
}
} |
/**
* Same as normal release but with latitude and longitude.
*
* @param minutes Amount of minutes the scooter was rented for.
* @param latitude Latitude where the scooter was released.
* @param longitude Longitude where the scooter was released.
* PRE: minutes > 0 && latitude != null && longitude != null
*/
public void release(int minutes, double latitude, double longitude) {
incrementUsageAmount();
addTotalMinutes(minutes);
setState(STOPPED);
setClientInUse(null);
this.latitude = latitude;
this.longitude = longitude;
} |
# -*- coding: utf-8 -*-
from datetime import datetime
from unittest.mock import call
from unittest.mock import patch
import pytest
from pyloniex.constants import OrderType
from moneybot.errors import InsufficientBalanceError
from moneybot.errors import NoMarketAvailableError
from moneybot.errors import OrderTooSmallError
from moneybot.market import Order
from moneybot.market.adapters.poloniex import PoloniexMarketAdapter
from moneybot.market.state import MarketState
from moneybot.testing import MarketHistoryMock
from moneybot.trade import AbstractTrade
@pytest.fixture
def market_adapter():
return PoloniexMarketAdapter('BTC', MarketHistoryMock(), {})
@pytest.fixture
def market_state():
chart_data = {
'BTC_ETH': {'weighted_average': 0.07420755}, # BTC/ETH
'BTC_BCH': {'weighted_average': 0.12016601}, # BTC/BCH
'ETH_BCH': {'weighted_average': 1.63185726}, # ETH/BCH
}
return MarketState(chart_data, {}, datetime.now(), 'BTC')
@pytest.mark.parametrize('trade,expected', [
(
AbstractTrade('BTC', 'ETH', 'ETH', 4),
[
Order(
'BTC_ETH',
0.07420755,
4,
Order.Direction.BUY,
OrderType.fill_or_kill,
),
],
),
(
AbstractTrade('BTC', 'ETH', 'BTC', 0.5),
[
Order(
'BTC_ETH',
0.07420755,
6.737858883631113,
Order.Direction.BUY,
OrderType.fill_or_kill,
),
],
),
(
AbstractTrade('ETH', 'BTC', 'BCH', 3.14),
[
Order(
'BTC_ETH',
0.07420755,
5.124031796400001,
Order.Direction.SELL,
OrderType.fill_or_kill,
),
],
),
])
def test_reify_trade(market_state, trade, expected):
orders = PoloniexMarketAdapter.reify_trade(trade, market_state)
assert orders == expected
def test_reify_trade_no_market(market_state):
# Don't have a market for this one
trade = AbstractTrade('BTC', 'WAT', 'BTC', 1.4)
with pytest.raises(NoMarketAvailableError):
PoloniexMarketAdapter.reify_trade(trade, market_state)
def test_reify_trades(market_state):
trades = [
AbstractTrade('BTC', 'ETH', 'ETH', 4),
AbstractTrade('BTC', 'ETH', 'BTC', 0.5),
AbstractTrade('ETH', 'BTC', 'BCH', 3.14),
AbstractTrade('BTC', 'WAT', 'BTC', 1.4),
]
orders = PoloniexMarketAdapter.reify_trades(trades, market_state)
assert orders == [
Order(
'BTC_ETH',
0.07420755,
4,
Order.Direction.BUY,
OrderType.fill_or_kill,
),
Order(
'BTC_ETH',
0.07420755,
6.737858883631113,
Order.Direction.BUY,
OrderType.fill_or_kill,
),
Order(
'BTC_ETH',
0.07420755,
5.124031796400001,
Order.Direction.SELL,
OrderType.fill_or_kill,
),
]
@pytest.mark.parametrize('order,balances', [
(
Order(
'BTC_ETH',
0.07420755,
2,
Order.Direction.BUY,
OrderType.fill_or_kill,
),
{'BTC': 1},
),
(
Order(
'BTC_ETH',
0.07420755,
2,
Order.Direction.SELL,
OrderType.fill_or_kill,
),
{'ETH': 3},
),
])
def test_validate_order(order, balances):
PoloniexMarketAdapter.validate_order(order, balances)
@pytest.mark.parametrize('order,balances,error', [
(
Order(
'BTC_ETH',
0.07420755,
0,
Order.Direction.BUY,
OrderType.fill_or_kill,
),
{},
OrderTooSmallError,
),
(
Order(
'BTC_ETH',
0.07420755,
0,
Order.Direction.SELL,
OrderType.fill_or_kill,
),
{},
OrderTooSmallError,
),
(
Order(
'BTC_ETH',
0.07420755,
1,
Order.Direction.BUY,
OrderType.fill_or_kill,
),
{},
InsufficientBalanceError,
),
(
Order(
'BTC_ETH',
0.07420755,
1,
Order.Direction.SELL,
OrderType.fill_or_kill,
),
{},
InsufficientBalanceError,
),
(
Order(
'BTC_ETH',
0.07420755,
1,
Order.Direction.BUY,
OrderType.fill_or_kill,
),
{'BTC': 0.001},
InsufficientBalanceError,
),
(
Order(
'BTC_ETH',
0.07420755,
1,
Order.Direction.SELL,
OrderType.fill_or_kill,
),
{'ETH': 0.001},
InsufficientBalanceError,
),
])
def test_validate_order_error(order, balances, error):
with pytest.raises(error):
PoloniexMarketAdapter.validate_order(order, balances)
def test_execute_order_buy(market_adapter):
order = Order(
'BTC_ETH',
0.07420755,
2,
Order.Direction.BUY,
OrderType.fill_or_kill,
)
balances = {'BTC': {'available': '1'}}
response = {'orderNumber': 12345, 'resultingTrades': []}
with patch.object(market_adapter.private_api, 'return_complete_balances', return_value=balances):
with patch.object(market_adapter.private_api, 'buy', return_value=response) as mock_buy:
order_id = market_adapter.execute_order(order)
assert order_id == 12345
mock_buy.assert_called_once_with(
currency_pair=order.market,
rate=order.price,
amount=order.amount,
order_type=order.type,
)
def test_execute_order_buy_retry(market_adapter):
order = Order(
'BTC_ETH',
0.07420755,
2,
Order.Direction.BUY,
OrderType.fill_or_kill,
)
balances = {'BTC': {'available': '1'}}
responses = [
{'error': 'Unable to fill order completely.'},
{'orderNumber': 12345, 'resultingTrades': []},
]
with patch.object(market_adapter.private_api, 'return_complete_balances', return_value=balances):
with patch.object(market_adapter.private_api, 'buy', side_effect=responses) as mock_buy:
order_id = market_adapter.execute_order(order)
assert order_id == 12345
mock_buy.assert_has_calls([
call(
currency_pair=order.market,
rate=order.price,
amount=order.amount,
order_type=order.type,
),
call(
currency_pair=order.market,
rate=order.price + PoloniexMarketAdapter.ORDER_ADJUSTMENT,
amount=order.amount,
order_type=order.type,
),
])
def test_execute_order_sell(market_adapter):
order = Order(
'BTC_ETH',
0.07420755,
2,
Order.Direction.SELL,
OrderType.fill_or_kill,
)
balances = {'ETH': {'available': '4'}}
response = {'orderNumber': 67890, 'resultingTrades': []}
with patch.object(market_adapter.private_api, 'return_complete_balances', return_value=balances):
with patch.object(market_adapter.private_api, 'sell', return_value=response) as mock_sell:
order_id = market_adapter.execute_order(order)
assert order_id == 67890
mock_sell.assert_called_once_with(
currency_pair=order.market,
rate=order.price,
amount=order.amount,
order_type=order.type,
)
def test_execute_order_sell_retry(market_adapter):
order = Order(
'BTC_ETH',
0.07420755,
2,
Order.Direction.SELL,
OrderType.fill_or_kill,
)
balances = {'ETH': {'available': '4'}}
responses = [
{'error': 'Unable to fill order completely.'},
{'orderNumber': 67890, 'resultingTrades': []},
]
with patch.object(market_adapter.private_api, 'return_complete_balances', return_value=balances):
with patch.object(market_adapter.private_api, 'sell', side_effect=responses) as mock_sell:
order_id = market_adapter.execute_order(order)
assert order_id == 67890
mock_sell.assert_has_calls([
call(
currency_pair=order.market,
rate=order.price,
amount=order.amount,
order_type=order.type,
),
call(
currency_pair=order.market,
rate=order.price - PoloniexMarketAdapter.ORDER_ADJUSTMENT,
amount=order.amount,
order_type=order.type,
),
])
def test_execute_order_retries_exhausted(market_adapter):
order = Order(
'BTC_ETH',
0.07420755,
2,
Order.Direction.BUY,
OrderType.fill_or_kill,
)
balances = {'BTC': {'available': '1'}}
responses = [
{'error': 'Unable to fill order completely.'},
{'orderNumber': 67890, 'resultingTrades': []},
]
with patch.object(market_adapter.private_api, 'return_complete_balances', return_value=balances):
with patch.object(market_adapter.private_api, 'buy', side_effect=responses) as mock_buy:
order_id = market_adapter.execute_order(order, attempts=1)
assert order_id is None
mock_buy.assert_called_once_with(
currency_pair=order.market,
rate=order.price,
amount=order.amount,
order_type=order.type,
)
def test_execute_order_invalid(market_adapter):
order = Order(
'BTC_ETH',
0.07420755,
2,
Order.Direction.BUY,
OrderType.fill_or_kill,
)
balances = {}
with patch.object(market_adapter.private_api, 'return_complete_balances', return_value=balances):
order_id = market_adapter.execute_order(order)
assert order_id is None
def test_execute_order_unknown_error(market_adapter):
order = Order(
'BTC_ETH',
0.07420755,
2,
Order.Direction.BUY,
OrderType.fill_or_kill,
)
balances = {'BTC': {'available': '1'}}
response = {'error': 'You are a bad person and you should feel bad.'}
with patch.object(market_adapter.private_api, 'return_complete_balances', return_value=balances):
with patch.object(market_adapter.private_api, 'buy', return_value=response):
order_id = market_adapter.execute_order(order)
assert order_id is None
|
import React from "react";
import { Card } from "../";
import { render } from "enzyme";
import toJSON from "enzyme-to-json";
describe("Card", () => {
it("default", () => {
expect(toJSON(render(<Card>Example Content</Card>))).toMatchSnapshot();
});
it("with paddingSize set", () => {
expect(
toJSON(render(<Card paddingSize="l">Example Content</Card>))
).toMatchSnapshot();
});
it("with aspectRatio set", () => {
expect(
toJSON(render(<Card aspectRatio={[2, 1]}>Example Content</Card>))
).toMatchSnapshot();
});
});
|
#include "../include/bound_sketch.h"
#include "../include/util.h"
#include <math.h>
#include <omp.h>
#include <experimental/filesystem>
#include <chrono>
#include <mutex>
BoundSketch::BoundSketch() {
sketch_map_.clear();
offline_skethces_.clear();
}
void BoundSketch::PrepareSummaryStructure(DataGraph& g, double ratio) {
#ifndef ONLINE
buckets_ = ratio;
assert(buckets_ >= 1);
offline_skethces_.clear();
#ifdef PARALLEL_BUILD
int num = g.table_.size();
omp_lock_t lock;
omp_init_lock(&lock);
omp_set_num_threads(16);
#pragma omp parallel for num_threads(16)
for (int t = 0; t < num; t++)
{
OfflineSketch *s = new OfflineSketch(t, buckets_, &g);
omp_set_lock(&lock);
offline_skethces_.push_back(s);
omp_unset_lock(&lock);
}
#else
for (int t = 0; t < g.table_.size(); t++) {
OfflineSketch *s = new OfflineSketch(t, buckets_, &g);
offline_skethces_.push_back(s);
}
#endif
#endif
}
void BoundSketch::WriteSummary(const char* fn) {
#ifndef ONLINE
namespace fs = std::experimental::filesystem;
fs::create_directory(fn);
int num = offline_skethces_.size();
#ifdef PARALLEL_BUILD
#pragma omp parallel for num_threads(16)
#endif
for (int i = 0; i < num; i++)
{
offline_skethces_[i]->serialize(fn);
}
#endif
}
void BoundSketch::ReadSummary(const char* fn) {
sketch_map_.clear();
#ifndef ONLINE
OfflineSketch::deserialize(fn, sketch_map_, g);
for (OfflineSketch* s : offline_skethces_)
delete s;
offline_skethces_.clear();
#endif
}
void BoundSketch::Init() {
sketch_build_time_ = 0;
buckets_ = sample_ratio;
assert(buckets_ >= 1);
bf_index_ = -1;
}
int BoundSketch::DecomposeQuery() {
return 1;
}
//generates all bounding formulae as an intialization
//and returns a bounding formula (with index bf_index_) for each call
bool BoundSketch::GetSubstructure(int subquery_index) {
if (bf_index_ == -1) {
getJoinAttributeCovers();
getBoundFormulae();
}
bf_index_++;
if (bf_index_ == bound_formulae_.size())
return false;
return true;
}
void BoundSketch::getJoinAttributeCovers() {
covers_.clear();
covers_.resize(q->num_attrs());
has_join_attribute_ = false;
join_attribute_cnt_.clear();
int mx = -1;
for (int i = 0; i < q->num_relations(); i++) {
auto& r = q->relations_[i];
int cnt = 0;
for (auto& a : r.attrs) {
if (a.ref_cnt > 1) {
covers_[a.id].push_back(i);
cnt++;
}
}
join_attribute_cnt_.push_back(cnt);
has_join_attribute_ = true;
}
assignments_.clear();
assignments_.resize(covers_.size());
join_attribute_covers_.clear();
getJoinAttributeCovers(0);
rel_to_covered_attributes_.clear();
for (vector<int>& join_cover : join_attribute_covers_) {
bool safe = true;
unordered_map<int, vector<int>> join_var_cover_map;
for (int r = 0; r < q->num_relations(); r++) {
vector<int> responsibilities;
for (int a = 0; a < covers_.size(); a++) {
if (join_cover[a] == r)
responsibilities.push_back(a);
}
join_var_cover_map[r] = responsibilities;
}
if (safe)
rel_to_covered_attributes_.push_back(join_var_cover_map);
}
}
void BoundSketch::getJoinAttributeCovers(int pos) {
if (pos == covers_.size()) {
bool safe = true;
for (int i = 0; i < q->num_relations(); i++) {
int cnt = 0;
for (int j : assignments_) {
if (i == j)
cnt++;
}
if ((cnt > 0) && (cnt != join_attribute_cnt_[i]) && (cnt != join_attribute_cnt_[i] - 1)) {
safe = false;
break;
}
}
if (safe)
join_attribute_covers_.push_back(assignments_);
return;
}
//pos is not a join attribute
if (covers_[pos].size() == 0) {
assignments_[pos] = -1;
getJoinAttributeCovers(pos + 1);
} else {
for (int r : covers_[pos]) {
assignments_[pos] = r;
getJoinAttributeCovers(pos + 1);
}
}
}
//generate all bounding formulae
void BoundSketch::getBoundFormulae() {
bound_formulae_.clear();
int curr = 0;
for (auto& map : rel_to_covered_attributes_) {
vector<Sketch*> uncL;
vector<Sketch*> conL;
vector<int> activeL;
/* generate hash sizes for each attribute */
unordered_map<int, bool> partitioned;
vector<int> unconditionals;
for (int r = 0; r < q->num_relations(); r++) {
if (map[r].size() == join_attribute_cnt_[r])
unconditionals.push_back(r);
}
bool covered_by_unc, covered_by_con;
int num_partitioned = 0;
for (int i = 0; i < covers_.size(); i++) {
//i is not a join attribute
if (covers_[i].size() == 0) {
continue;
}
covered_by_unc = covered_by_con = false;
for (int r : covers_[i]) {
if (find(unconditionals.begin(), unconditionals.end(), r) != unconditionals.end()) {
covered_by_unc = true;
} else {
covered_by_con = true;
}
}
if (covered_by_unc && covered_by_con) {
//must be a join attribute
partitioned[i] = true;
num_partitioned++;
} else {
partitioned[i] = false;
}
}
unordered_map<int, int> hash_sizes_map;
for (int i = 0; i < covers_.size(); i++) {
hash_sizes_map[i] = 1;
//i is not a join attribute
if (covers_[i].size() == 0) {
continue;
}
if (partitioned[i]) {
hash_sizes_map[i] = round(pow(buckets_, 1.0 / num_partitioned));
hash_sizes_map[i] = std::max(hash_sizes_map[i], 1);
}
}
/* generate sketches for each relation in the join */
for (int r = 0; r < q->num_relations(); r++) {
auto& rel = q->relations_[r];
Sketch* s = NULL;
num_partitioned = 0;
/* generate array of columns and array of actual attributes */
vector<int> join_attrs_specific;
vector<int> join_cols;
vector<int> hash_sizes;
for (auto& a : rel.attrs) {
if (a.ref_cnt > 1) {
if (hash_sizes_map[a.id] > 1) {
join_attrs_specific.push_back(a.id);
join_cols.push_back(a.pos);
hash_sizes.push_back(hash_sizes_map[a.id]);
num_partitioned++;
}
}
}
/* get the active column (if none (unconditional) this is null) */
int active_attribute = -1;
int active_col = -1;
if (join_attribute_cnt_[r] != map[r].size()) {
for (auto& a : rel.attrs) {
if (a.ref_cnt > 1) {
if (find(map[r].begin(), map[r].end(), a.id) == map[r].end()) {
active_attribute = a.id;
active_col = a.pos; //0 for src, 1 for dst
break;
}
}
}
}
//for example, a sketch with hash sizes 1024,4 with join column src,dst
//is equivalent to a sketch with hash sizes 4,1024 with join columns dst,src
if (join_attrs_specific.size() == 2) {
if (join_cols[0] != 0) {
join_cols[0] = 0;
join_cols[1] = 1;
int temp = join_attrs_specific[0];
join_attrs_specific[0] = join_attrs_specific[1];
join_attrs_specific[1] = temp;
temp = hash_sizes[0];
hash_sizes[0] = hash_sizes[1];
hash_sizes[1] = temp;
}
}
if (hash_sizes.size() == 0)
hash_sizes.push_back(1);
vector<int> bounds;
vector<int> bound_cols;
/* build a probe to see if we already saw this particular sketch from another query */
string probe;
int alias = g->get_table_id(rel.id);
probe.append(to_string(alias));
probe.append("[");
//this wastes computation for TwoDimensionalSketchCon!
probe.append(to_string(active_col));
probe.append("][");
for (int h : hash_sizes) {
probe.append(to_string(h));
probe.append(", ");
}
probe.append("][");
for (int c : join_cols) {
probe.append(to_string(c));
probe.append(", ");
}
probe.append("][");
for (auto& a : rel.attrs) {
#ifdef ONLINE
if (a.is_bound) {
probe.append(to_string(a.pos));
probe.append(":");
probe.append(to_string(a.bound));
probe.append(", ");
bounds.push_back(a.bound);
bound_cols.push_back(a.pos);
}
#endif
}
probe.append("]");
if (sketch_map_.find(probe) != sketch_map_.end()) {
s = sketch_map_[probe];
} else {
#ifndef ONLINE
assert(false); //should have been preprocessed offline!
#endif
auto ckpt = chrono::high_resolution_clock::now();
//# of partitioned attributes
if (join_attrs_specific.size() == 0) {
if (active_col == -1)
s = new ZeroDimensionalSketchUnc(alias, active_col, join_cols, hash_sizes, bounds, bound_cols, g, "");
else
s = new ZeroDimensionalSketchCon(alias, active_col, join_cols, hash_sizes, bounds, bound_cols, g, "");
} else if (join_attrs_specific.size() == 1) {
if (active_col == -1)
s = new OneDimensionalSketchUnc(alias, active_col, join_cols, hash_sizes, bounds, bound_cols, g, "");
else
s = new OneDimensionalSketchCon(alias, active_col, join_cols, hash_sizes, bounds, bound_cols, g, "");
} else if (join_attrs_specific.size() == 2) {
if (active_attribute == -1)
s = new TwoDimensionalSketchUnc(alias, active_col, join_cols, hash_sizes, bounds, bound_cols, g, "");
else
s = new TwoDimensionalSketchCon(alias, active_col, join_cols, hash_sizes, bounds, bound_cols, g, "");
}
else {
cerr << "you're asking for too many attributes..." << endl;
exit(-1);
}
auto elapsed_nano = chrono::high_resolution_clock::now() - ckpt;
sketch_build_time_ += (double) elapsed_nano.count() / 1000000; //in milliseconds
sketch_map_[probe] = s;
}
for (int i = 0; i < join_attrs_specific.size(); i++) {
s->l2gIndex[curr][i] = join_attrs_specific[i];
s->g2lIndex[curr][join_attrs_specific[i]] = i;
}
/* make sure the active attribute appears in the g2l and l2g maps */
if (join_attrs_specific.size() == 0 && active_attribute != -1) {
s->l2gIndex[curr][0] = active_attribute;
s->g2lIndex[curr][active_attribute] = 0;
}
if (map[r].size() == join_attribute_cnt_[r]) {
uncL.push_back(s);
} else if (map[r].size() == join_attribute_cnt_[r] - 1) {
for (auto& a : rel.attrs){
if (a.ref_cnt > 1) {
if (find(map[r].begin(), map[r].end(), a.id) == map[r].end()) {
activeL.push_back(a.id);
break;
}
}
}
conL.push_back(s);
} else {
assert(map[r].size() == 0);
}
}
vector<int> hash_sizes_global(covers_.size(), 1);
for (size_t i = 0; i < covers_.size(); i++) {
hash_sizes_global[i] = hash_sizes_map[i];
assert(hash_sizes_global[i] > 0);
}
assert(conL.size() == activeL.size());
if (uncL.size() > 0) {
BoundFormula bf(curr, uncL, conL, activeL, hash_sizes_global);
curr++;
bound_formulae_.push_back(bf);
}
}
}
//returns the summation of the selected bounding formula (with index bf_index_)
//instantiated with counts and maximum degrees of partitions
double BoundSketch::EstCard(int subquery_index) {
BoundFormula& bf = bound_formulae_[bf_index_];
if (!has_join_attribute_) {
ZeroDimensionalSketchUnc* u = (ZeroDimensionalSketchUnc*) bf.uncList[0];
bf_index_ = bound_formulae_.size(); //no next GetSubstructure
return u->unc[0];
}
else {
long res = 0;
CrossProductIterator cp(bf.hash_sizes);
if (cp.totalBuckets > 1) {
while (cp.hasNext()) {
res += bf.execute(cp.next());
}
} else {
vector<int> index(bf.hash_sizes.size(), 0);
res = bf.execute(index);
}
if (res < 0)
res = numeric_limits<long>::max();
return (double) res;
}
}
//min
double BoundSketch::AggCard() {
if (card_vec_.size() == 0)
return 0.0;
double res = std::numeric_limits<double>::max();
for (double card : card_vec_)
if (card < res)
res = card;
return res;
}
double BoundSketch::GetSelectivity() {
#ifdef ONLINE
cout << "online sketch build time: " << sketch_build_time_ << endl;
#endif
return 1;
}
BoundSketch::~BoundSketch() {
for (auto& p : sketch_map_)
delete p.second;
}
|
// Set up the OpenGL camera for rendering
void gl_camera::setup(const math_vector_3f &scene_center, float scene_size) const
{
glViewport(viewx, viewy, vieww, viewh);
math_vector_3f center = m_camera_rm * scene_center;
float fardist = -(center[2] - 8*scene_size);
float neardist = max( -(center[2] + scene_size), scene_size / MAXDOF);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(field_of_view, (float)vieww/(float)viewh, neardist, fardist );
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
GLfloat light0_position[] = { lightdir[0], lightdir[1], lightdir[2], 0 };
GLfloat light1_position[] = { -lightdir[0], -lightdir[1], -lightdir[2], 0 };
glLightfv(GL_LIGHT0, GL_POSITION, light0_position);
glLightfv(GL_LIGHT1, GL_POSITION, light1_position);
float gl_rm[16];
to_opengl( m_camera_rm, gl_rm );
glMultMatrixf( gl_rm );
} |
/**
* Creates a new decorator with the specified {@link CircuitBreakerMapping}.
*/
public static <I extends Request, O extends Response>
Function<Client<I, O>, CircuitBreakerClient<I, O>>
newDecorator(CircuitBreakerMapping mapping) {
return delegate -> new CircuitBreakerClient<>(delegate, mapping);
} |
<reponame>nikita-volkov/rerebase
module System.Posix.Internals
(
module Rebase.System.Posix.Internals
)
where
import Rebase.System.Posix.Internals
|
use super::Service;
use crate::{ApiError, Error, ServiceClient};
use maybe_async::maybe_async;
mod request;
mod response;
/// Holidays API request.
pub use request::HolidaysRequest;
/// Holidays API response.
pub use response::HolidaysResponse;
struct HolidaysService;
impl Service for HolidaysService {
const PATH: &'static str = "holidays";
type Request = HolidaysRequest;
type Response = HolidaysResponse;
}
impl ServiceClient {
#[maybe_async]
/// The *Holidays* service can be used to retrieve a list of holidays for a country.
pub async fn get_holidays(
&self,
request: &HolidaysRequest,
) -> Result<Result<HolidaysResponse, ApiError>, Error> {
self.call::<HolidaysService>(request).await
}
}
|
module Snap.Snaplet.Test.Common.Handlers where
------------------------------------------------------------------------------
import Control.Monad.IO.Class (liftIO)
import Data.Configurator (lookup)
import Data.Maybe (fromJust, fromMaybe)
import Data.Text (append, pack)
import Data.Text.Encoding (decodeUtf8)
------------------------------------------------------------------------------
import Data.Map.Syntax ((##))
import Heist.Interpreted (textSplice)
import Snap.Core (writeText, getParam)
import Snap.Snaplet (Handler, getSnapletUserConfig, with)
import Snap.Snaplet.Test.Common.FooSnaplet
import Snap.Snaplet.Test.Common.Types
import Snap.Snaplet.HeistNoClass (renderWithSplices)
import Snap.Snaplet.Session (csrfToken, getFromSession, sessionToList, setInSession, withSession)
-------------------------------------------------------------------------------
routeWithSplice :: Handler App App ()
routeWithSplice = do
str <- with foo getFooField
writeText $ pack $ "routeWithSplice: "++str
------------------------------------------------------------------------------
routeWithConfig :: Handler App App ()
routeWithConfig = do
cfg <- getSnapletUserConfig
val <- liftIO $ Data.Configurator.lookup cfg "topConfigField"
writeText $ "routeWithConfig: " `append` fromJust val
------------------------------------------------------------------------------
sessionDemo :: Handler App App ()
sessionDemo = withSession session $ do
with session $ do
curVal <- getFromSession "foo"
case curVal of
Nothing -> setInSession "foo" "bar"
Just _ -> return ()
list <- with session $ (pack . show) `fmap` sessionToList
csrf <- with session $ (pack . show) `fmap` csrfToken
renderWithSplices heist "session" $ do
"session" ## textSplice list
"csrf" ## textSplice csrf
------------------------------------------------------------------------------
sessionTest :: Handler App App ()
sessionTest = withSession session $ do
q <- getParam "q"
val <- case q of
Just x -> do
let x' = decodeUtf8 x
with session $ setInSession "test" x'
return x'
Nothing -> fromMaybe "" `fmap` with session (getFromSession "test")
writeText val
|
Norwegian Christmas card
A 19th-century American Christmas card
A Christmas card is a greeting card sent as part of the traditional celebration of Christmas in order to convey between people a range of sentiments related to Christmastide and the holiday season. Christmas cards are usually exchanged during the weeks preceding Christmas Day by many people (including some non-Christians) in Western society and in Asia. The traditional greeting reads "wishing you a Merry Christmas and a Happy New Year". There are innumerable variations on this greeting, many cards expressing more religious sentiment, or containing a poem, prayer, Christmas song lyrics or Biblical verse; others focus on the general holiday season with an all-inclusive "Season's greetings".
A Christmas card is generally commercially designed and purchased for the occasion. The content of the design might relate directly to the Christmas narrative with depictions of the Nativity of Jesus, or have Christian symbols such as the Star of Bethlehem or a white dove representing both the Holy Spirit and Peace. Many Christmas cards show Christmas traditions, such as seasonal figures (e.g., Santa Claus, snowmen, and reindeer), objects associated with Christmas such as candles, holly, baubles, and Christmas trees, and Christmastime activities such as shopping, caroling, and partying, or other aspects of the season such as the snow and wildlife of the northern winter. Some secular cards depict nostalgic scenes of the past such as crinolined shoppers in 19th century streetscapes; others are humorous, particularly in depicting the antics of Santa and his elves.
History [ edit ]
Children looking at Christmas cards in New York 1910
Christmas card by Louis Prang, showing a group of anthropomorphized frogs parading with banner and band.
The first recorded Christmas cards were sent by Michael Maier to James I of England and his son Henry Frederick, Prince of Wales in 1611.[1] It was discovered in 1979 by Adam McLean in the Scottish Record Office.[2] They incorporated Rosicrucian imagery, with the words of the greeting – "A greeting on the birthday of the Sacred King, to the most worshipful and energetic lord and most eminent James, King of Great Britain and Ireland, and Defender of the true faith, with a gesture of joyful celebration of the Birthday of the Lord, in most joyand fortune, we enter into the new auspicious year 1612" – being laid out to form a rose.
The next cards were commissioned by Sir Henry Cole and illustrated by John Callcott Horsley in London on 1 May 1843.[3][4][5] The central picture showed three generations of a family raising a toast to the card's recipient: on either side were scenes of charity, with food and clothing being given to the poor.[6] Allegedly the image of the family drinking wine together proved controversial, but the idea was shrewd: Cole had helped introduce the Penny Post three years earlier. Two batches totaling 2,050 cards were printed and sold that year for a shilling each.[7]
Early British cards rarely showed winter or religious themes, instead favoring flowers, fairies and other fanciful designs that reminded the recipient of the approach of spring. Humorous and sentimental images of children and animals were popular, as were increasingly elaborate shapes, decorations and materials. At Christmas 1873, the lithograph firm Prang and Mayer began creating greeting cards for the popular market in Britain The firm began selling the Christmas card in America in 1874, thus becoming the first printer to offer cards in America. Its owner, Louis Prang, is sometimes called the "father of the American Christmas card."[8] By the 1880s, Prang was producing over five million cards a year by using the chromolithography process of printmaking.[3] However, the popularity of his cards led to cheap imitations that eventually drove him from the market. The advent of the postcard spelled the end for elaborate Victorian-style cards, but by the 1920s, cards with envelopes had returned. The extensive Laura Seddon Greeting Card Collection from the Manchester Metropolitan University gathers 32,000 Victorian and Edwardian greeting cards, printed by the major publishers of the day,[9] including Britain's first commercially produced Christmas card.[10]
The production of Christmas cards was, throughout the 20th century, a profitable business for many stationery manufacturers, with the design of cards continually evolving with changing tastes and printing techniques. The now widely recognized brand Hallmark Cards was established in 1913 by Joyce Hall with the help of brother Rollie Hall to market their self-produced Christmas cards.[11] The Hall brothers capitalized on a growing desire for more personalized greeting cards, and reached critical success when the outbreak of World War I increased demand for cards to send to soldiers.[11] The World Wars brought cards with patriotic themes. Idiosyncratic "studio cards" with cartoon illustrations and sometimes risque humor caught on in the 1950s. Nostalgic, sentimental, and religious images have continued in popularity, and, in the 21st century, reproductions of Victorian and Edwardian cards are easy to obtain. Modern Christmas cards can be bought individually but are also sold in packs of the same or varied designs. In recent decades changes in technology may be responsible for the decline of the Christmas card. The estimated number of cards received by American households dropped from 29 in 1987 to 20 in 2004.[12] Email and telephones allow for more frequent contact and are easier for generations raised without handwritten letters - especially given the availability of websites offering free email Christmas cards. Despite the decline, 1.9 billion cards were sent in the U.S. in 2005 alone.[13] Some card manufacturers now provide E-cards. In the UK, Christmas cards account for almost half of the volume of greeting card sales, with over 668.9 million Christmas cards sold in the 2008 festive period.[14] In mostly non-religious countries (e.g. Czech Republic), the cards are rather called New Year Cards, however they are sent before Christmas and the emphasis (design, texts) is mostly given to the New Year, omitting religious symbols.
Official Christmas cards [ edit ]
"Official" Christmas cards began with Queen Victoria in the 1840s. The British royal family's cards are generally portraits reflecting significant personal events of the year.
Despite the governing practice of the separation of church and state within American politics, there is a long-standing custom for the President and First Lady to send White House Christmas Cards each holiday season.[15] The practice originated with President Calvin Coolidge, who was the first president to issue a written statement of peaceful tidings during the holidays in 1927.[15][16] President Herbert Hoover was the first to give Christmas notes to the White House staff, and President Franklin Delano Roosevelt was the first president to utilize the card format (rather than the previously used notes or a written statement) that most closely resembles the Christmas cards of today.[15]
In 1953, U.S. President Dwight D. Eisenhower issued the first official White House card. The cards usually depict White House scenes as rendered by prominent American artists. The number of recipients has snowballed over the decades, from just 2,000 in 1961 to 1.4 million in 2005.[17]
Commercial Christmas cards [ edit ]
Tucker Corporation Christmas Card, 1947
Many businesses, from small local businesses to multi-national enterprises, send Christmas cards to the people on their customer lists, as a way to develop general goodwill, retain brand awareness and reinforce social networks. These cards are almost always discrete and secular in design, and do not attempt to sell a product, limiting themselves to mentioning the name of the business. The practice harkens back to trade cards of the 18th century, an ancestor of the modern Christmas card.
Charity Christmas cards [ edit ]
Many organizations produce special Christmas cards as a fundraising tool. The most famous of these enterprises is probably the UNICEF Christmas card program,[18] launched in 1949, which selects artwork from internationally known artists for card reproduction. The UK-based Charities Advisory Trust gives out an annual "Scrooge Award" to the cards that return the smallest percentage to the charities they claim to support[19] although it is not universally well received by the Christmas card producers.[20]
Christmas stamps and stickers [ edit ]
Santa Coming Down the Chimney
Many countries produce official Christmas stamps, which may be brightly colored and depict some aspect of Christmas tradition or a Nativity scene. Small decorative stickers are also made to seal the back of envelopes, typically showing a trinket or some symbol of Christmas.
In 2004, the German post office gave away 20 million free scented stickers, to make Christmas cards smell of a fir Christmas tree, cinnamon, gingerbread, a honey-wax candle, a baked apple and an orange.
Collectors items [ edit ]
From the beginning, Christmas cards have been avidly collected. Queen Mary amassed a large collection that is now housed in the British Museum.[21] The University College London's Slade School of Fine Art houses a collection of handmade Christmas Cards from alumni such as Paula Rego and Richard Hamilton and are displayed at events over the Christmas season, when members of the public can make their own Christmas cards in the Strang Print Room.[22] Specimens from the "golden age" of printing (1840s–1890s) are especially prized and bring in large sums at auctions. In December 2005, one of Horsley's original cards sold for nearly £9,000. Collectors may focus on particular images like Santa Claus, poets, or printing techniques. The Christmas card that holds the world record as the most expensive ever sold was a card produced in 1843 by J. C. Horsley and commissioned by civil servant Sir Henry Cole. The card, one of the world's first, was sold in 2001 by UK auctioneers Henry Aldridge to an anonymous bidder for a record breaking £22,250.[23]
Santa Claus and his reindeer
Silk cord and tassels, c. 1860
Victorian, c. 1870
Christmas Card, 1880
Victorian, 1885
Postcard, c. 1901
Christmas card, 1904
Christmas postcard 1907
Christmas card, 1912
American card, c. 1920
Frances Brundage Christmas card, 1910
Home-made cards [ edit ]
Since the 19th century, many families and individuals have chosen to make their own Christmas cards, either in response to monetary necessity, as an artistic endeavour, or in order to avoid the commercialism associated with Christmas cards. With a higher preference of handmade gifts during the 19th century over purchased or commercial items, homemade cards carried high sentimental value as gifts alone. Many families make the creation of Christmas cards a family endeavour and part of the seasonal festivity, along with stirring the Christmas cake and decorating the tree. Over the years such cards have been produced in every type of paint and crayon, in collage and in simple printing techniques such as potato-cuts. A revival of interest in paper crafts, particularly scrapbooking, has raised the status of the homemade card and made available an array of tools for stamping, punching and cutting.
Advances in digital photography and printing have provided the technology for many people to design and print their own cards, using their original graphic designs or photos, or those available with many computer programs or online as clip art, as well as a great range of typefaces. Such homemade cards include personal touches such as family photos and holidays snapshots. Crowdsourcing, another trend enabled by the Internet, has allowed thousands of independent and hobbyist graphic designers to produce and distribute holiday cards around the world.
The Christmas card list [ edit ]
Christmas Market in Nürnberg, lithography from the 19th century.
Danish Christmas card, 1919
Many people send cards to both close friends and distant acquaintances, potentially making the sending of cards a multi-hour chore in addressing dozens or even hundreds of envelopes. The greeting in the card can be personalized but brief, or may include a summary of the year's news. The extreme of this is the Christmas letter (below). Because cards are usually exchanged year after year, the phrase "to be off someone's Christmas card list" is used to indicate a falling out between friends or public figures.
Christmas letters [ edit ]
Some people take the annual mass-mailing of cards as an opportunity to update those they know with the year's events, and include the so-called "Christmas letter" reporting on the family's doings, sometimes running to multiple printed pages. In the UK these are known as round-robin letters.[24] While a practical notion, Christmas letters meet with a mixed reception; recipients may take it as boring minutiae, bragging, or a combination of the two, whereas other people appreciate Christmas letters as more personal than mass-produced cards with a generic missive and an opportunity to "catch up" with the lives of family and friends who are rarely seen or communicated with. Since the letter will be received by both close and distant relatives, there is also the potential for the family members to object to how they are presented to others; an entire episode of Everybody Loves Raymond was built around conflict over the content of just such a letter.
Environmental impact and recycling [ edit ]
Christmas card with holly
During the first 70 years of the 19th century it was common for Christmas and other greeting cards to be recycled by women's service organizations who collected them and removed the pictures, to be pasted into scrap books for the entertainment of children in hospitals, orphanages, kindergartens and missions. With children's picture books becoming cheaper and more readily available, this form of scrap-booking has almost disappeared.
Recent concern over the environmental impact of printing, mailing and delivering cards has fueled an increase in e-cards.[25][26]
The U.K. conservation charity Woodland Trust runs an annual campaign to collect and recycle Christmas cards to raise awareness of recycling and collect donations from corporate sponsors and supporters. All recycled cards help raise money to plant more trees. In the 12 years that the Woodland Trust Christmas Card Recycling Scheme has been running, more than 600 million cards have been recycled. This has enabled the Woodland Trust to plant more than 141,000 trees, save over 12,000 tonnes of paper from landfill and stop over 16,000 tonnes of CO2 from going into the atmosphere – the equivalent to taking more than 5,000 cars off the road for a year.[27] The scheme has had celebrity supporters including Jo Brand, Dermot O' Leary and Sean Bean and is the longest running scheme of its type in the country.[28]
International Christmas greetings [ edit ]
Christmas card made on a PC
The traditional English greeting of "Merry Christmas and a Happy New Year" as it appears in other languages:[29]
Christmas postcards
American card, c. 1940
War-related, c. 1943
Rust Craft, c. 1950
Snow in the Netherlands
Christmas card
Christmas card Frances Brundage
Merry Christmas card
Christmas tree market
Christmas card with embroidery
Christmas Card
Santa Claus clothes
References [ edit ] |
Granzyme B Is a Biomarker For Suspicion of Malignant Seromas Around Breast Implants.
BACKGROUND
Granzyme B (GrB) is a serine protease secreted by cytotoxic lymphocytes along with pore forming perforin to mediate apoptosis in target cells. GrB has been detected in tumor cells associated with systemic and breast implant associated anaplastic large cell lymphoma (BIA-ALCL) but its potential use for detection of early BIA-ALCL has not been fully investigated.
OBJECTIVES
With Increasing numbers of patients affected by BIA-ALCL, it becomes important to identify new biomarkers to detect early disease in malignant seromas and to better understand the nature of the neoplastic cell.
METHODS
A Human XL Cytokine Discovery Magnetic Luminex 45-plex Fixed Panel Performance Assay was used to compare cytokine levels in cell culture supernates of BIA-ALCL and other T cell lymphomas, as well as malignant and benign seromas surrounding breast implants. Immunohistochemistry was employed to localize GrB to cells in seromas and capsular infiltrates.
RESULTS
Differences in concentration of GrB between malignant and benign seromas were significant (p<0.001). GrB was found in and around apoptotic tumor cells raising the hypothesis the protease may be involved in tumor cell death.
CONCLUSIONS
GrB is a useful marker for early detection of malignant seromas and to identify tumor cells in seromas and capsular infiltrates. Because of overlap between the lowest concentrations of soluble GrB in malignant seromas with highest concentrations of GrB in benign seromas, it is recommended that GrB be used only as part of a panel of biomarkers for the screening and early detection of BIA-ALCL. |
## Here W -- column , n -- row...
def knapsack(wt,val,W,n):
t = [[-1 for i in range(W+1)] for j in range(n+1)]
## intialization step..
for i in range(n+1):
for j in range(W+1):
if (i == 0 or j == 0):
t[i][j] = 0
## code conversion from recursion to iterative
for i in range(1,n+1):
for j in range(1,W+1):
if (wt[i-1]<=j):
t[i][j] = max(val[i-1] + t[i-1][j-wt[i-1]],
t[i-1][j])
else:
t[i][j] = t[i-1][j]
return t[n][W]
## Driver code....
if __name__ == '__main__':
wt = list(map(int,input().split()))
val = list(map(int,input().split()))
W = int(input())
n = len(val)
print(knapsack(wt,val,W,n)) |
<gh_stars>0
/**
* Basic Generics usage
*
* Basic example os using generics in TypeString - Thanks about that, Microsoft <3
*/
import ArrayList from "./util/ArrayList";
var numberList = new ArrayList<number>();
numberList.add(10);
numberList.add(20);
numberList.add(30);
var stringList = new ArrayList<string>();
stringList.add("<NAME>");
stringList.add("Somethind else");
console.log("It's working... Look the code, you will see!");
|
def shallow_copy(self):
the_copy = GraphBuilder()
the_copy.processes = copy.deepcopy(self.processes)
return the_copy |
/**
* Make sure exceptions and bad params are handled appropriately
*/
public void testBadParams() {
try {
TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos);
assertTrue(reader != null);
reader.get(50, testFields[0]);
fail();
} catch (IOException e) {
}
try {
TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos);
assertTrue(reader != null);
reader.get(50);
fail();
} catch (IOException e) {
}
try {
TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos);
assertTrue(reader != null);
TermFreqVector vector = reader.get(0, "f50");
assertTrue(vector == null);
} catch (IOException e) {
fail();
}
} |
import { Injectable } from '@angular/core';
import { Constants } from '../helpers/constants';
import { HttpClient } from '@angular/common/http';
import { catchError } from 'rxjs/operators';
import { environment } from '../../environments/environment';
import { UtilitiesService } from './utilities.service';
import { Observable } from 'rxjs';
@Injectable({
providedIn: 'root'
})
export class ForceUpdateService {
insertData = {
isChangePassword: false,
isChangeProfile: false
};
constructor(
private http: HttpClient,
private constants: Constants,
private util: UtilitiesService
) {}
// get data localStorage
getDataForceChange() {
const data = localStorage.getItem(this.constants.localStorage.forceChange)
? JSON.parse(
localStorage.getItem(this.constants.localStorage.forceChange)
)
: '';
return data;
}
// insert data to localStorage
setDataForceChange(data: number) {
if (data === 1) {
// if data is 1 done change password
this.insertData.isChangePassword = true;
} else if (data === 2) {
// if data is 2 done change edit profile
this.insertData.isChangeProfile = true;
}
localStorage.setItem(
this.constants.localStorage.forceChange,
JSON.stringify(this.insertData)
);
}
checkForceUpdate() {
let stateUpdate: number;
const dataForceChange = this.getDataForceChange();
if (!dataForceChange) {
stateUpdate = 0;
} else if (dataForceChange.isChangePassword === false) {
stateUpdate = 1;
} else if (
dataForceChange.isChangePassword === true &&
dataForceChange.isChangeProfile === false
) {
stateUpdate = 2;
}
return stateUpdate;
}
PostForceChangeProfile(data: any): Observable<any> {
return this.http
.post<any>(`${environment.API_URL}/user/me/change-profile`, data)
.pipe(catchError(this.util.handleError));
}
}
|
/// Creates a new rigid body that can move.
pub fn new_dynamic<G>(shape: G, density: Scalar, restitution: Scalar, friction: Scalar) -> RigidBody
where G: Send + Sync + Repr<Point, Matrix> + Volumetric<Scalar, Point, AngularInertia> {
let props = shape.mass_properties(density);
RigidBody::new(
Arc::new(Box::new(shape) as Box<Repr<Point, Matrix>>),
Some(props),
restitution,
friction)
} |
<reponame>Zubieta/CPP
# https://app.codesignal.com/arcade/intro/level-7/PTWhv2oWqd6p4AHB9
def stringsRearrangement(input_array):
# Given an array of equal-length strings, check if its possible to
# rearrange the strings in such a way that after the rearrangement
# the strings at consecutive positions would differ by exactly one
# character. This sounds like a graphs problem!
# Calculate the distance between strings. This will create
# a sorts-of adjacency matrix, where the value is the amount
# of differing characters between any two given strings. Then see
# if we can traverse all strings by making jumps of exactly one
# character!
# Then do a graph traversal on that 'adjacency matrix'. The
# shortest-distance traversal should return a length of (n-1)
# visited elements, which would be the sum of 1 costs for each
# pair of strings.
arr = input_array[:]
n, m = len(arr), len(arr[0])
costs = [[float('inf') for _ in range(n)] for _ in range(n)]
# For each possible pair of words arr[i], arr[j]:
for i in range(n):
for j in range(n):
# Avoid it if its the same word. It will remain marked
# as cost inf to avoid being visited in the graph traversal!
# Otherwise comparing with itself would return cost 0.
if i == j:
continue
# The differing character cost between arr[i] and arr[j].
costs[i][j] = sum([arr[i][k] != arr[j][k] for k in range(m)])
# Each recursive call will have a list of visited indices (starting with
# an empty list), and the current position.
def DFS(visited, pos):
v = visited[:]
# Put current index as visited.
v.append(pos)
# If all strings/indices have been visited, the result is valid.
if len(v) == n:
return True
# Start assuming the result is False.
res = False
# For all indices/strings...
for i in range(n):
# Except the ones already visited to prevent loops...
if i in v:
continue
# If from current index to a next one can move with cost
# 1, then do a recursive call over that next index.
if costs[pos][i] == 1:
res |= DFS(v, i)
# If none of the recursive branches managed to visit all
# the nodes with the strictly 1-cost jump condition, then
# it means that they can't be arranged in the special way.
return res
res = False
# Try a DFS starting from each index/word, to see if any
# will visit all other nodes with the strictly 1-cost jump.
for i in range(n):
# If any result is True, then res will become True until end.
res |= DFS([], i)
return res
|
#include <iostream>
using namespace std;
void display();
const int MAX_CITY = 10;
const int INF = 1e9;
int main() {
int n;
int cost[MAX_CITY][MAX_CITY];
int from, to, c;
int sum[MAX_CITY];
int num_min, cost_min;
int num_max;
while (cin >> n, n!=0) {
//初期化
for (int i = 0; i < MAX_CITY; i++) {
for (int j = 0; j < MAX_CITY; j++) {
cost[i][j] = INF;
}
}
for (int i = 0; i < MAX_CITY; i++) {
sum[i] = 0;
}
num_max = 0;
num_min = -1;
cost_min = INF;
//入力
for (int i = 0; i < n; i++) {
cin >> from >> to >> c;
num_max = max(num_max, max(from, to));
cost[from][to] = cost[to][from] = c;
}
//ワーシャルフロイド
// 終了頂点
for (int k = 0; k <= num_max; k++) {
// 経由頂点
for (int i = 0; i <= num_max; i++) {
// 開始頂点
for (int j = 0; j <= num_max; j++) {
cost[i][j] = min(cost[i][j], cost[i][k]+cost[k][j]);
}
}
}
for (int i = 0; i <= num_max; i++) {
//各頂点の通勤時間の総和を計算
for (int j = 0; j <= num_max; j++) {
if (i != j) {
sum[i] += cost[i][j];
}
}
//通勤時間の総和の最小値更新
if (sum[i] < cost_min) {
num_min = i;
cost_min = sum[i];
}
}
//出力
cout << num_min << ' ' << cost_min << endl;
}
return 0;
}
|
<filename>src/main/java/be/aga/dominionSimulator/cards/Dame_AnnaCard.java
package be.aga.dominionSimulator.cards;
import be.aga.dominionSimulator.DomCard;
import be.aga.dominionSimulator.enums.DomCardName;
import java.util.ArrayList;
import java.util.Collections;
public class Dame_AnnaCard extends KnightCard {
public Dame_AnnaCard() {
super(DomCardName.Dame_Anna);
}
public void play() {
if (!owner.getCardsInHand().isEmpty()) {
if (owner.isHumanOrPossessedByHuman()) {
handleHuman();
} else {
Collections.sort(owner.getCardsInHand(), SORT_FOR_TRASHING);
int i = countCrapCards();
int count = 0;
while (i > 0 && count < 2) {
owner.trash(owner.removeCardFromHand(owner.getCardsInHand().get(0)));
count++;
i--;
}
}
}
super.play();
}
private void handleHuman() {
ArrayList<DomCard> theChosenCards = new ArrayList<DomCard>();
do {
theChosenCards = new ArrayList<DomCard>();
owner.getEngine().getGameFrame().askToSelectCards("Trash (max 2)", owner.getCardsInHand(), theChosenCards, 0);
} while (theChosenCards.size()>2);
while (!theChosenCards.isEmpty()) {
DomCard theCardToTrash = null;
for (DomCard theCard : owner.getCardsInHand()) {
if (theCard.getName() == theChosenCards.get(0).getName())
theCardToTrash = theCard;
}
theChosenCards.remove(0);
owner.trash(owner.removeCardFromHand(theCardToTrash));
}
}
private int countCrapCards() {
int counter = 0;
for (DomCard theCard : owner.getCardsInHand()) {
if (theCard.getTrashPriority() <= DomCardName.Copper.getTrashPriority())
counter++;
}
return counter;
}
} |
package server
import (
"testing"
"github.com/rs/xid"
"golang.zx2c4.com/wireguard/wgctrl/wgtypes"
)
func TestAccountManager_GetNetworkMap(t *testing.T) {
manager, err := createManager(t)
if err != nil {
t.Fatal(err)
return
}
expectedId := "test_account"
userId := "account_creator"
account, err := manager.AddAccount(expectedId, userId, "")
if err != nil {
t.Fatal(err)
}
var setupKey *SetupKey
for _, key := range account.SetupKeys {
if key.Type == SetupKeyReusable {
setupKey = key
}
}
peerKey1, err := wgtypes.GeneratePrivateKey()
if err != nil {
t.Fatal(err)
return
}
_, err = manager.AddPeer(setupKey.Key, "", &Peer{
Key: peerKey1.PublicKey().String(),
Meta: PeerSystemMeta{},
Name: "test-peer-2",
})
if err != nil {
t.Errorf("expecting peer to be added, got failure %v", err)
return
}
peerKey2, err := wgtypes.GeneratePrivateKey()
if err != nil {
t.Fatal(err)
return
}
_, err = manager.AddPeer(setupKey.Key, "", &Peer{
Key: peerKey2.PublicKey().String(),
Meta: PeerSystemMeta{},
Name: "test-peer-2",
})
if err != nil {
t.Errorf("expecting peer to be added, got failure %v", err)
return
}
networkMap, err := manager.GetNetworkMap(peerKey1.PublicKey().String())
if err != nil {
t.Fatal(err)
return
}
if len(networkMap.Peers) != 1 {
t.Errorf("expecting Account NetworkMap to have 1 peers, got %v", len(networkMap.Peers))
}
if networkMap.Peers[0].Key != peerKey2.PublicKey().String() {
t.Errorf(
"expecting Account NetworkMap to have peer with a key %s, got %s",
peerKey2.PublicKey().String(),
networkMap.Peers[0].Key,
)
}
}
func TestAccountManager_GetNetworkMapWithRule(t *testing.T) {
manager, err := createManager(t)
if err != nil {
t.Fatal(err)
return
}
expectedId := "test_account"
userId := "account_creator"
account, err := manager.AddAccount(expectedId, userId, "")
if err != nil {
t.Fatal(err)
}
var setupKey *SetupKey
for _, key := range account.SetupKeys {
if key.Type == SetupKeyReusable {
setupKey = key
}
}
peerKey1, err := wgtypes.GeneratePrivateKey()
if err != nil {
t.Fatal(err)
return
}
_, err = manager.AddPeer(setupKey.Key, "", &Peer{
Key: peerKey1.PublicKey().String(),
Meta: PeerSystemMeta{},
Name: "test-peer-2",
})
if err != nil {
t.Errorf("expecting peer to be added, got failure %v", err)
return
}
peerKey2, err := wgtypes.GeneratePrivateKey()
if err != nil {
t.Fatal(err)
return
}
_, err = manager.AddPeer(setupKey.Key, "", &Peer{
Key: peerKey2.PublicKey().String(),
Meta: PeerSystemMeta{},
Name: "test-peer-2",
})
if err != nil {
t.Errorf("expecting peer to be added, got failure %v", err)
return
}
rules, err := manager.ListRules(account.Id)
if err != nil {
t.Errorf("expecting to get a list of rules, got failure %v", err)
return
}
err = manager.DeleteRule(account.Id, rules[0].ID)
if err != nil {
t.Errorf("expecting to delete 1 group, got failure %v", err)
return
}
var (
group1 Group
group2 Group
rule Rule
)
group1.ID = xid.New().String()
group2.ID = xid.New().String()
group1.Name = "src"
group2.Name = "dst"
rule.ID = xid.New().String()
group1.Peers = append(group1.Peers, peerKey1.PublicKey().String())
group2.Peers = append(group2.Peers, peerKey2.PublicKey().String())
err = manager.SaveGroup(account.Id, &group1)
if err != nil {
t.Errorf("expecting group1 to be added, got failure %v", err)
return
}
err = manager.SaveGroup(account.Id, &group2)
if err != nil {
t.Errorf("expecting group2 to be added, got failure %v", err)
return
}
rule.Name = "test"
rule.Source = append(rule.Source, group1.ID)
rule.Destination = append(rule.Destination, group2.ID)
rule.Flow = TrafficFlowBidirect
err = manager.SaveRule(account.Id, &rule)
if err != nil {
t.Errorf("expecting rule to be added, got failure %v", err)
return
}
networkMap1, err := manager.GetNetworkMap(peerKey1.PublicKey().String())
if err != nil {
t.Fatal(err)
return
}
if len(networkMap1.Peers) != 1 {
t.Errorf(
"expecting Account NetworkMap to have 1 peers, got %v: %v",
len(networkMap1.Peers),
networkMap1.Peers,
)
}
if networkMap1.Peers[0].Key != peerKey2.PublicKey().String() {
t.Errorf(
"expecting Account NetworkMap to have peer with a key %s, got %s",
peerKey2.PublicKey().String(),
networkMap1.Peers[0].Key,
)
}
networkMap2, err := manager.GetNetworkMap(peerKey2.PublicKey().String())
if err != nil {
t.Fatal(err)
return
}
if len(networkMap2.Peers) != 1 {
t.Errorf("expecting Account NetworkMap to have 1 peers, got %v", len(networkMap2.Peers))
}
if len(networkMap2.Peers) > 0 && networkMap2.Peers[0].Key != peerKey1.PublicKey().String() {
t.Errorf(
"expecting Account NetworkMap to have peer with a key %s, got %s",
peerKey1.PublicKey().String(),
networkMap2.Peers[0].Key,
)
}
}
|
Activity screening of environmental metagenomic libraries reveals novel carboxylesterase families
Metagenomics has made accessible an enormous reserve of global biochemical diversity. To tap into this vast resource of novel enzymes, we have screened over one million clones from metagenome DNA libraries derived from sixteen different environments for carboxylesterase activity and identified 714 positive hits. We have validated the esterase activity of 80 selected genes, which belong to 17 different protein families including unknown and cyclase-like proteins. Three metagenomic enzymes exhibited lipase activity, and seven proteins showed polyester depolymerization activity against polylactic acid and polycaprolactone. Detailed biochemical characterization of four new enzymes revealed their substrate preference, whereas their catalytic residues were identified using site-directed mutagenesis. The crystal structure of the metal-ion dependent esterase MGS0169 from the amidohydrolase superfamily revealed a novel active site with a bound unknown ligand. Thus, activity-centered metagenomics has revealed diverse enzymes and novel families of microbial carboxylesterases, whose activity could not have been predicted using bioinformatics tools.
Scientific RepoRts | 7:44103 | DOI: 10.1038/srep44103 is estimated that the global protein universe of microorganisms exceeds 10 12 proteins indicating that we know astonishingly little about microbial proteins and enzymes 12,13 . Even more, based on conservative estimates, over 50% of the sequences available in the databases have uncertain (general), unknown, or incorrectly annotated functions 14 . Therefore, the direct experimental determination of protein function or enzyme activity for millions of biochemically uncharacterized proteins or genes of unknown function represents one of the major challenges in postgenomic biology. In addition to sequence similarity-based and comparative genomics methods of gene function prediction, there are several experimental approaches to annotation including analysis of gene or protein interactions, gene expression, gene knockouts, protein localization, and protein structures . However, in most cases, these approaches produce predictions or general annotations of biochemical or cellular function requiring subsequent experimental verification. In contrast, screening of purified proteins or metagenome gene libraries for enzymatic activity represents a direct experimental approach to identify the biochemical function of unknown proteins 5,7, . The feasibility and merits of general and specific enzymatic assays for screening of purified proteins and metagenome libraries has already been demonstrated for many hydrolases and oxidoreductases, two very broad classes of enzymes 5,7,20,21,23 .
The metagenomic enzyme screening approach involves directly assaying proteins expressed from environmental DNA in a surrogate host (most often E. coli) for enzymatic activity against a specific chemical substrate 24 .
An alternate approach is to clone environmental DNA fragments into a lambda phage-based system and to screen for enzymatic activities directly on phage plaques 25 . Enzymatic screening of metagenome libraries provides the possibility to mine for new enzyme activities and discover novel families of enzymes with no sequence similarity to previously characterized proteins. This method has greatly expanded the number of novel enzymes, including over 130 new nitrilases and many cellulases, carboxylesterases, and laccases . A recent high-throughput metagenomics project has identified over 27,000 putative carbohydrate-active genes in the cow rumen metagenome and demonstrated the presence of glycosyl hydrolase activity in 51 out of 90 tested proteins 29 . In addition, metagenomes from several extreme environments have revealed a rich biochemical diversity of enzymes adapted to function under extreme conditions, such as low/high temperatures, low/high pH, and high salt concentrations or high pressure 5,30,31 . Biochemical and structural characterization of these enzymes has revealed different molecular mechanisms of adaptation to extreme environmental conditions . A recent analysis of metagenome screening works published in the last two decades revealed that these studies identified almost 6,000 genes with 70% of them representing carboxylesterases and lipases 35 . Based on sequence, most known carboxylesterases and lipases belong to the large protein superfamilies of α/β hydrolases and β-lactamases and have been classified into 16 families . Since these enzymes are of high interest for applications in biotechnology, a significant number of these proteins have been characterized both structurally and biochemically, mostly esterases from the α/β hydrolase superfamily 36, .
Here we present the results of enzymatic screening of 16 metagenomic gene libraries from different environments for novel carboxylesterases. We have identified over 700 positive clones, from which 80 selected genes were expressed in E. coli, and their esterase activities were confirmed using additional assays. Four enzymes representing unknown (DUF3089) and hypothetical (MGS0084) proteins, cyclase-like enzymes (PF04199), as well as polyester hydrolyzing and lipolytic enzymes were characterized biochemically including substrate and temperature profiles. The active site residues of new enzymes were identified using site-directed mutagenesis, and the crystal structure of a metal-dependent cyclase-like esterase provided insight into the molecular mechanisms of its activity.
Results and Discussion
Enzymatic screening of metagenome libraries for carboxylesterase activity. To probe the biochemical diversity of carboxylesterases from uncultured microbes of environmental metagenomes, we screened 16 metagenome DNA libraries prepared from different geographic sites including various marine environments, soils, and waste treatment facilities (Table 1, Supplementary Table S1). The environments include moderate to hypersaline (3.8% to 10% NaCl, w/vol) conditions, low to elevated temperatures (3 °C to 50 °C), as well as sites contaminated with petroleum or heavy metals, from public or industrial wastewater sludge digesters (Supplementary Table S1). Overall, we screened over 1 million fosmid and Lambda-ZAP clones (approximately 7,000 Mbp DNA) for the ability to degrade tributyrin, generating a total 714 positive fosmid and Lambda-ZAP clones (Fig. 1, Table 1). Lambda-ZAP clones (208) and 178 fosmids (from the total 506) were sequenced by primer walking or using Illumina HiSeq, respectively. All genes predicted to have hydrolytic enzyme activity were cloned for protein purification. Where no sequence similarity to known esterases was found (two Haven library clones), we subcloned all predicted open reading frames and identified the presence of esterase activity in one hypothetical protein (MGS0084) and one predicted cyclase (MGS0169).
We confirmed the presence of esterase activity in 80 selected genes using agar plates with 1% tributyrin (Fig. 1, Supplementary Table S2). These enzymes were also tested for the presence of lipase activity, based on the ability to hydrolyze long chain-length lipids (C16, C18), using an olive oil agar plate assay (Fig. 1d). Three enzymes (MGS0084, MGS0156 and GEN0160) out of 80 tested clones were found to have lipase activity (Fig. 1), consistent with previous metagenome screens where low frequency of lipase activity was reported 7 .
Sequence analysis and enzyme families of identified metagenomic esterases. BLASTp searches of the NCBI database using 80 validated metagenomic esterases as queries indicated that most of these proteins represent genuine metagenomic enzymes with just 11 sequences from known genomes including Alcanivorax borkumensis, Cycloclasticus sp. 78-ME, Marinobacter hydrocarbonoclasticus, Parvibaculum lavamentivorans, and Serratia fonticola (99-100% identity) (Fig. 2, Supplementary Table S2). Sixty-nine remaining esterases showed 28-98% sequence identity to sequences from the NCBI database with most sequences within the range of 50-80% identity. Analysis of phylogenetic distribution of the 80 validated metagenomic esterases revealed that these proteins and their top sequence homologues are present in a broad range of Gram-positive and Gram-negative microorganisms with most proteins found in Proteobacteria (52 proteins), Terrabacteria (11 proteins), and the Fibrobacteres, Chlorobi and Bacteroidetes (FCB) group (10 proteins) (Fig. 2).
Based on sequence analysis, the 80 validated esterases belong to 17 protein families (Fig. 3). A majority of these enzymes are predicted to belong to the α/β hydrolase superfamily (59 proteins), which represents one of the largest groups of structurally related proteins (148 families in the ESTHER database) with diverse catalytic and non-catalytic functions including hydrolases, dehalogenases, haloperoxidases, and hydroxynitrile lyases 36,38,42,43 . Their catalytic activity depends on the conserved catalytic triad, which consists of a nucleophile (serine, aspartate or cysteine), a histidine and a catalytic acid (aspartate or glutamate). Most α/β hydrolases that we have identified have a conserved Gly-x-Ser-x-Gly catalytic motif and are distributed among eleven different families, with a majority belonging to α/β hydrolase-3, α/β hydrolase-1, and α/β hydrolase-6. The remaining enzymes are distributed among Hydrolase_4, Esterase, Peptidase_S9, COesterase, Chlorophyllase_2, Esterase_phd and DUF676 families, with the exception of two (Fig. 3, Supplementary Fig. S1). Although enzymes MGS0032 and MGS0156 are predicted to belong to the α/β superfamily, they are not associated with known hydrolase families, suggesting that these proteins may belong to new branches.
Two metagenomic esterases, MGS0012 and GEN0034, belong to the DUF3089 family, which appears to be related to α/β hydrolases (Fig. 3, Supplementary Fig. 1). Recently, several members of this family were also isolated from metagenomic libraries and have been shown to exhibit esterase activity . Interestingly, MGS0012 shares 99% protein sequence identity with the hypothetical protein WP_026168275 from Kordiimonas gwangyangensis (97.6% at the nucleotide level), which has been shown to have the ability to degrade high-molecular weight polycyclic aromatic hydrocarbons 50 .
Ten isolated enzymes belong to the esterase family VIII, which includes β-lactamase-like enzymes with promiscuous β-lactam hydrolytic activity responsible for resistance to β-lactam antibiotics. Previously, several metagenomic β-lactamase-like esterases have been characterized revealing high esterase activity against shorter chain p-nitrophenyl esters (C2-C5) and detectable hydrolytic activity against the β-lactamase substrates nitrocefin and cephalosporin 31,51 . All ten identified β-lactamase-like esterases are serine hydrolases with a conserved Ser-x-x-Lys catalytic motif typical for the class C β-lactamases. Interestingly, the α/β-hydrolase-like esterase GEN0169 has an additional Metallo-β-lactamase domain (PF00753). This is a domain commonly found in class B β-lactamases, a structurally unrelated enzyme family also capable of hydrolyzing β-lactam antibiotics.
The remaining hydrolase-like proteins share sequence similarity with Patatin-like phospholipases (5 proteins), SGNH-hydrolases (3 proteins), and 3-hydroxybutyrate oligomer hydrolase (one protein, PF10605). The unknown protein MGS0084 has only eight homologous sequences in the Uniprot and non-redundant GenBank databases. A protein sequence alignment of MGS0084 with its homologues shows a conserved Gly-His-Ser-His-(Ala/Gly)-Gly motif, which resembles Gly-x-Ser-x-Gly commonly found in α/β hydrolases suggesting that these proteins may represent a new branch of this superfamily ( Supplementary Fig. S1).
Metagenome library
Number of clones in library Clones screened MGS0169 belongs to the PF04199 family of putative cyclase-like enzymes, which contain a conserved His-x-Gly-Thr-His-x-Asp-x-Pro-x-His motif predicted to form part of the active site. This motif is only partially conserved in MGS0169 and its closest homologues with the two first His residues replaced by Gln ( Supplementary Fig. S1). Several cyclase-like proteins from different bacteria have been shown to exhibit metal-dependent amidohydrolase activity against formylkynurenine and isatin 52-54 , but carboxylesterase activity of PF04199 proteins has not been reported before. Thus, enzymatic screens of metagenomic libraries have revealed carboxylesterases from diverse protein families, including several candidates, which could not have been annotated based on sequence analysis.
Biochemical characterization of selected metagenomic esterases.
For biochemical and structural characterization of metagenomic esterases, we selected the lipolytic enzymes MGS0084 and GEN0160, as well as the novel esterases MGS0012 (DUF3089) and MGS0169 (a cyclase-like protein). The selected proteins were over-expressed in E. coli and affinity-purified to over 95% homogeneity. The acyl chain length preference of metagenomic esterases was analyzed using 11 model esterase substrates including three α-naphthyl and eight p-nitrophenyl (pNP) esters with different chain lengths (Fig. 4). MGS0012 showed the highest activity with α-naphthyl acetate, MGS0169 against pNP-acetate, whereas MGS0084 exhibited comparable activity against α-naphthyland pNP-acetate and propionate (Fig. 4). In contrast, GEN0160 showed a preference to substrates with longer acyl chains with the highest activity against pNP-octanoate α-naphthylor pNP-butyrate (C4, GEN0160). This protein showed detectable esterase activity against pNP-palmitate (C16), which is a representative substrate for lipases (Fig. 4). This is in line with the presence of hydrolytic activity of this enzyme toward olive oil (Fig. 1).
In contrast to the other three proteins, esterase activity of MGS0169 was greatly stimulated by the addition of divalent metal cations (Mn 2+ > Mg 2+ > Co 2+ ≫ Ni 2+ ) ( Supplementary Fig. S2). Several biochemically characterized members of the cyclase-like protein family (PF04199) exhibited metal ion dependent amidohydrolase activity against formylkynurenine and isatin 52-54 . MGS0169 also showed detectable metal dependent amidohydrolase activity against isatin (k cat /K M 0.1 × 10 3 M −1 s −1 ), but its esterase activity against pNP-acetate was at least three orders of magnitude higher (k cat /K M ~ 0.2 × 10 6 M −1 s −1 ) (Fig. 4, Table 2). Previously, the presence of metal ion-stimulated esterase activity was demonstrated in the amidohydrolase proteins from the phosphotriesterase family (PF02126) including Rsp3690 from Rhodobacter sphaeroides and Pmi1525 from Proteus mirabilis 55,56 . However, these enzymes have different structural folds and active sites. Thus, MGS0169 and homologous proteins from the PF04199 family do indeed represent a novel group of metal-dependent esterases from the amidohydrolase superfamily.
The purified metagenomic esterases showed saturation kinetics and high catalytic efficiencies with low K M values toward the tested model esterase substrates ( Table 2). The ester substrate profiles of four metagenomic esterases were determined using a library of 89 various monoesters including alkyl and aryl esters ( Supplementary Fig. S3, Supplementary Table S3). These proteins showed hydrolytic activity against a broad range of substrates with different substrate preferences. MGS0012, MGS0169 and GEN0160 were most active against phenyl acetate, whereas MGS0084 against vinyl laurate ( Supplementary Fig. S3). From these proteins, MGS0012 was found to be the most efficient esterase showing high k cat /K M values toward a broad range of substrates including the medium acyl chain esters (C4-C10) ( Table 2).
The esterase activities of metagenomic esterases showed different temperature profiles determined in the range from 5 °C to 70 °C (Supplementary Fig. S4). MGS0084 was most active at 25 °C but retained almost 50% of maximal activity at 5 °C suggesting that it is a cold-adapted enzyme. In contrast, the other metagenomic esterases showed maximal activity at 40 °C and retained less than 15% of maximal activity at 5 °C, which is typical of mesophilic enzymes (Supplementary Fig. S4). The esterases also showed different sensitivities to high salt concentrations with MGS0084 exhibiting strong inhibition by 0.25 M NaCl or KCl (Supplementary Fig. S5). In contrast, the esterase activity of MGS0012 and GEN0160 was slightly stimulated by the addition of salt, and they showed no inhibition even at 2 M or 3 M salt concentration (Supplementary Fig. S5). The metagenomic esterases also showed different sensitivities to solvents (acetonitrile and DMSO) with MGS0012 being the most sensitive enzyme and GEN0160 being the most resistant enzyme (Supplementary Fig. S5). Thus, the characterized metagenomic esterases exhibit different temperature profiles and sensitivities to inhibition by salt and solvents perhaps reflecting differences in native environmental conditions and hosts.
Polyester depolymerization activity of purified metagenomic esterases. Recent studies including
our work have demonstrated the presence of hydrolytic activity against polylactic acid (PLA), a biodegradable polyester, in several lipolytic enzymes and carboxylesterases 31,57,58 . In this work, 26 purified metagenomic esterases were screened for PLA-degrading activity using an agarose plate assay with the emulsified PLA2 (M w 2 K). These screens revealed the presence of PLA hydrolytic activity in seven enzymes including the lipolytic esterases MGS0084 and MGS0156 (Fig. 5). An agarose-based screening of purified esterases using the emulsified polycaprolactone PCL10 (M w 10 K), another biodegradable polyester, demonstrated the presence of high PCL10 depolymerization activity in MGS0084, GEN0105, and GEN0160, as well as in MGS0009 and MGS0156 (Fig. 5). The hydrolytic activity of the identified metagenomic esterases against different polyester substrates makes these enzymes attractive candidates for studies toward enzyme-based depolymerization of polyester plastics.
Crystal structure of the metal ion dependent esterase MGS0169. The purified seleno-methioninesubstituted metagenomic esterases were also submitted to crystallization trials. MGS0169 (21-341 aa) produced diffracting crystals, and its crystal structure was determined at 1.61 Å resolution (Supplementary Table 4). The MGS0169 protomer core has a slightly distorted central β-barrel containing both parallel and anti-parallel β-strands surrounded by ten α-helices, whose fold resembles the swivelling β/α/β domain of metal-dependent α/β hydrolases 53,59 . The small sub-domain of MGS0169 is comprised two β-strands (β2 and β3) connected by a flexible loop containing a short α-helix with the strands of one protomer forming a four-stranded anti-parallel β-sheet with the two related β-strands of another protomer (Fig. 6). This results in the formation of a tightly packed twisted (~90°) tetramer through the interaction between the β-sheets stabilized by interactions between the surrounding α-helices (Fig. 6). Analysis of the crystal contacts using the quaternary structure prediction server PISA suggests that MGS0159 is likely to form tetramers through multiple interactions between tightly packed monomers burying ~7,000 Å 2 of the solvent accessible surface per monomer (~30% of the total solvent accessible surface). The tetrameric organization of MGS0169 is supported by the results of size-exclusion chromatography suggesting a trimeric or tightly packed tetrameric organization (112 kDa, predicted Mw 37 kDa). A Dali search for MGS0169 structural homologues identified several protein structures as the best matches including the isatin hydrolase IH-b from Labrenzia aggregata (PDB codes 4J0N and 4M8D; Z-score 21.3, rmsd 2.5 Å), the three microbial kynurenine formamidases KynB (PDB codes 4COB, 4COG, and 4CO9; Z-score 19.4-19.9, rmsd 2.1-2.7 Å), and the uncharacterized predicted hydrolase P84132_GEOSE from Geobacillus stearothermophilus (PDB code 3KRV; Z-score 18.6, rmsd 2.9 Å). These proteins share low sequence similarity to MGS0169 (17-22% sequence identity), and the two biochemically characterized enzymes (IH-b and KynB) have metal-dependent amidohydrolase activity against isatin and N-formylkynurenine 53,54 . The active site of MGS0169. The location of the MGS0169 active site is indicated by the unknown electron density with a tetrahedral-like geometry located in the narrow cavity formed mainly by α-helices near the end of the central β-barrel (Fig. 6, Supplementary Fig. S2). Based on its shape, this density might represent a molecule of acetyl-phosphate, probably captured by the enzyme from E. coli cells. The bottom part of the ligand is positioned close to the side chains of the conserved Gln127 (5.3 Å), Gln131 (4.3 Å), Asp133 (3.8 Å), and His137 (2.6 Å) (Fig. 6). These residues represent a modified cyclase-like motif Gln-x-x-x-Gln-x-Asp-x-x-x-His found in several cyclase-like proteins (PF04199) including the trans-dienelactone hydrolase from Pseudomonas reinekei MT1 60 and are likely to be involved in metal ion binding. The bound ligand also interacts with the side chains of the conserved Arg87 (2.9 Å), Glu299 (2.6 Å), and His286 (2.6 Å). The MGS0169 substrate binding site is less conserved with only two residues (Phe84 and His286) identical to the isatin hydrolase substrate binding site (Phe41 and His212, PDB code 4J0N) (Fig. 6).
In the crystal structures of isatin hydrolase IH-b (PDB code 4M8D) and kynurenine formamidase KynB (PDB code 4COB), which contain the classical cyclase-like motif His-x-x-x-His-x-Asp-x-x-x-His, the side chains of the motif residues coordinate one Mn 2+ ion (IH-b) or two Zn 2+ ions (KynB), which are required for the enzymatic activity of these enzymes 53,54 . However, no metal ion was found in the corresponding site of the uncharacterized cyclase-like protein MJ0783 from Methanocaldococcus jannaschii (PDB code 2B0A), or in MGS0169. We propose that metal ion binding to MGS0169 was prevented by the presence of the bound acetylphosphate-like ligand, and the active site of the catalytically active MGS0169 contains one or two metal ions (probably Mn 2+ , based on the MGS0169 metal ion profile), as was suggested for the the trans-dienelactone hydrolase from Pseudomonas reinekei MT1 61 .
The structure of the MGS0169 dimer also revealed that the side chain of Phe117 (and possibly Phe110 and Lys112) from one protomer contributes to the substrate binding site of another protomer and is positioned near the bound ligand (3.7 Å) and side chains of His137 (3.6 Å) and His286 (4.0 Å) (Fig. 6). This suggests that the two active sites of the MGS0169 dimer can allosterically interact through the residues of the composite binding sites, which is in line with the observed range of Hill coefficients of 1.2-1.5. The other highly conserved residues of the MGS0169 active site, which can potentially contribute to substrate binding include Phe84 (4.6 Å to the acetyl-like ligand), Arg87 (2.9 Å to the ligand), His286 (2.5 Å), and Glu299 (2.6 Å). Thus, the active site residues of MGS0169 and other cyclase-like proteins are different from those of non-specific carboxylesterases from the amidohydrolase superfamiliy 55,56 .
Validation of the catalytic residues of metagenomic esterases using site-directed mutagenesis.
The potential active site residues of metagenomic esterases, selected based on their sequence alignments ( Supplementary Fig. S1) and the MGS0169 crystal structure (Fig. 6), were verified using site-directed mutagenesis (alanine replacement). The mutant proteins were purified using affinity chromatography, and their enzymatic activity was compared to that of wild type proteins. As shown in Fig. 7, the catalytic triad of the DUF3089 hydrolase MGS0012 includes Ser193, Asp360, and His375, because the corresponding mutant proteins showed a greatly reduced catalytic activity. Low activity was also found in the MGS0012 D232A mutant protein, whereas S131A, S244A, and D247A retained high enzymatic activity (Fig. 7). Similarly, alanine replacement mutagenesis of the unknown protein MGS0084 suggested that it is a novel Ser-dependent hydrolase with the catalytic triad comprising Ser172, Asp300, and His386 (Fig. 7).
Alanine replacement mutagenesis of the metal-dependent esterase MGS0169 (from the cyclase-like family) revealed that this enzyme is sensitive to mutations in the active site including the residues of the modified cyclase-like motif Gln127, Gln131, and Asp133 (H137A was insoluble), as well as those involved in substrate binding (Arg87, H286, and Glu299) (Fig. 7). Thus, site-directed mutagenesis of metagenomic esterases revealed that MGS0012 and MGS0084 represent novel Ser-dependent hydrolases, whereas MGS0169 is a novel metal-dependent esterase with the modified cyclase-like motif Gln127-Gln131-Asp133-His137 potentially involved in metal ion binding.
Conclusions
The discovery of new enzymes in environmental bacteria contributes greatly to our fundamental knowledge of protein structure-function relationships, expands the biocatalytic toolbox of enzymes for metabolic engineering and synthetic biology, and improves the quality of gene annotation in public databases, on which bioinformatics tools rely. Enzymatic screening of environmental gene libraries presented in this work revealed a huge sequence and biochemical diversity with identification of 80 esterases from 17 different enzyme families (Fig. 3). These enzymes exhibit diverse substrate preferences, from short to long acyl chain esters with a significant number of enzymes possessing polyesterase activity against polylactic acid and polycaprolactone. The differences in their sensitivities to temperature and salt conditions likely reflect environmental adaptations. Through activity-based screening, we have been able to identify three novel Ser-dependent esterases and present the crystal structure of a new carboxylesterase subfamily within the cyclase-like family of metal ion dependent amidohydrolases. This work contributes a 30% increase in experimentally validated metagenomic esterases (288 enzymes according to a recent analysis 35 ). By combining metagenomic enzyme discovery with protein and metabolic engineering, we may gain access to virtually unlimited diversity of enzyme sequences, with the potential to discover tailor-made enzymes for any biotransformation reaction.
Methods
Metagenome library preparation, gene cloning, and protein purification. Extraction of metagenomic DNA from environmental samples and preparation of fosmid and lambda-ZAP DNA libraries (Supplementary Table S1) were performed as described previously 25, . Genes were amplified by PCR from purified fosmids or excised lambda-ZAP plasmids and cloned into a modified pET15b vector, containing an Scientific RepoRts | 7:44103 | DOI: 10.1038/srep44103 N-terminal 6His tag, as described previously 65 . Tagged genes were overexpressed in E. coli BL21(DE3) and affinity purified using metal-chelate chromatography on Ni-NTA (Qiagen). Site-directed mutagenesis of selected enzymes was performed based on the QuikChange ® site-directed mutagenesis kit (Stratagene). All mutations were verified by DNA sequencing, and the mutant proteins were overexpressed and purified like the wild-type.
Enzymatic screening of metagenomic libraries. E. coli fosmid clones were cultured at 37 °C in 384-well microtiter plates, and spotted onto Luria Broth (LB) agar plates containing chloramphenicol (12.5 μg/mL), arabinose (0.001-0.01%), gum Arabic (0.5%), and emulsified tributyrin (1%). Clones were grown overnight at 37 °C, then at 30 °C for 3-4 days. Colonies with clear halos were considered positive for esterase activity, and selected for plasmid extraction. Lambda-ZAP clones were screened as follows. 300 μL of mid-log phase E. coli XL1-Blue MRF' cells were infected the Lambda-ZAP library added to 4 mL of 0.7% LB agar containing 10 mM MgSO 4 , 1 mM IPTG, 0.5% gum arabic and 1% of emulsified tributyrin, at 48 °C. The mixture was immediately layered onto LB agar plates containing 1 mM IPTG, at approximately 1,000 plaque forming units per plate, and the plates were incubated at 37 °C. Phage plaques exhibiting a clear halo over 3-4 days were isolated, and plasmids containing the metagenomic segments were extracted from phage DNA according to the manufacturer's protocol.
To confirm esterase activity in cloned proteins, E. coli expressing the cloned genes were streaked onto LB-agar plates containing 1% tributyrin (as above) or purified enzymes were spotted directly and the plates were incubated at 30 °C or 37 °C. Clones were also checked for lipase activity either by streaking E. coli colonies or spotting 5-10 μg of the purified enzymes onto LB agar plates containing 3% emulsified olive oil and 0.001% Rhodamine B indicator dye, and incubating at 30 °C or 37 °C. Lipase activity was identified under UV light as orange fluorescence 66 .
Sequencing of metagenomic fragments and bioinformatics analysis.
Lambda-ZAP clones were sequenced by primer walking, while fosmids were sequenced as mixed pools using Illumina or Roche 454 platforms (at TCAG, Genome Quebec and Genoscope). Reads were dereplicated and assembled into contigs using the Velvet algorithm 67 in Geneious 68 version 6.0.6, and contigs were mapped to specific fosmids using Sanger sequenced fosmid end sequences. Contigs were submitted to the MG-RAST 69 pipeline for gene annotation. In parallel, open reading frames were predicted using the Glimmer algorithm 70 , and translated protein sequences were annotated through BLAST searches of UniProt and the non-redundant GenBank protein database 71 . Genes predicted as esterases, lipases, or hydrolases were selected for recombinant expression in E. coli. Where no such gene was found, smaller esterase positive genetic fragments were identified by subcloning, and all predicted genes were cloned and rescreened.
Proteins with confirmed esterase activity (Supplementary Table S2) were classified into families through sequence analysis using HMMER 72 searches against the Pfam database and BLAST searches of the COG database 73 , with an E-value cut-off of 1E-5 unless otherwise indicated. Where an enzyme had a significant score to more than one protein family, the family with the smaller E-value and/or larger sequence coverage was assigned. Multiple sequence alignments were generated using MUSCLE 74 . The phylogenetic tree was produced using the NCBI taxonomy of the closest sequence homologues and the PhyloT tree generator (http://phylot.biobyte.de/) and visualized using the iTOL v3 online tool 75 . Enzymatic assays with purified proteins. Carboxylesterase activity of purified proteins against p-nitrophenyl (pNP) or α-naphthyl esters of various fatty acids was measured spectrophotometrically as described previously 31 . The effect of temperature, salts, and solvents on esterase activity of purified proteins against the indicated α-naphthyl substrate was measured using the same protocol. Hydrolytic activity of purified enzymes against a library of 89 ester substrates was determined spectrophotometrically using p-nitrophenol as described previously 31 . Depolymerization activity of purified enzymes against polylactic acid (PLA) or polycaprolactone (PCL) was determined essentially as described previously 31 . These assays were performed in agarose plates (1.5%) containing emulsified substrates (poly (DL-lactide), average M.W. 2,000, or PCL10), or in solution (20 mg of PLA10 in 1 ml of 0.4 M Tris-HCl buffer, pH 8.0, 0.01% Plysurf A210G) at 32 °C. For determination of kinetic parameters (K M and k cat ), esterase activity was determined over a range of substrate concentrations (0.01-5.0 mM). Kinetic parameters were calculated by non-linear regression analysis of raw data fit (to the Michaelis-Menten or Hill functions) using GraphPad Prism software (version 4.00 for Windows).
Crystallization and structure determination of MGS0169. The selenomethionine substituted MGS0169 (21-341 aa) was crystallized at 22 °C using the sitting-drop vapor diffusion method by mixing 0.5 μl of the purified protein (20 mg/ml) with 0.5 μl of the crystallization solution containing 0.2 M ammonium acetate, 0.1 M Tris-HCl (pH 8.0), and 30% (w/v) PEG 2KMME. The crystals were stabilized by cryoprotection in Paratone-N prior to flash-freezing in liquid nitrogen. Diffraction data were collected at the beamline 19-ID with an ADSC Quantum 315 R detector of the Structural Biology Center, Advanced Photon Source, Argonne National Laboratory 76,77 . Diffraction data were processed using the HKL3000 suit of programs 78 , and structural statistics is summarized in Supplementary Table S4. The MGS0169 structure was determined by the single-wavelength anomalous diffraction (SAD) method using phasing, density modification, and initial protein model building as implemented in the HKL3000 software package . Several cycles of manual corrections of the model were carried out using the programs COOT 85 and REFMAC of the CCP4 86 and finalized using Phenix 87 . The final model was refined against all reflections except for 5% randomly selected reflections, which were used for monitoring R free . The final refinement statistics are presented in Supplementary Table S4. |
In support of Danny Brown's upcoming album and Baauer's unlikely #1 hit, the pair have announced a joint US tour, cheekily titled "Worst of Both Worlds." They'll hit the South and the West Coast in April, stopping at both weekends of Coachella along the way. See a full list of scheduled dates below.
Revisit our Danny Brown cover story from last year and our recent explainer of the Baauer-driven Harlem Shake meme, then watch Danny Brown perform "Radio Song" at last year's FADER FORT.
Danny Brown and Baauer Tour:
Apr 9 Houston, TX, Fitzgeralds
Apr 10 Dallas, TX, Tree's
Apr 11 Austin, TX, Emo's East
Apr 13 Indio, CA, Coachella
Apr 14 Tucson, AZ, Congress
Apr 15 Tempe, AZ, Club Red
Apr 17 San Francisco, CA, Independent
Apr 18 Santa Barbara, CA, Casa De La Raza
Apr 20 Indio, CA, Coachella |
def cell_file(self, filename='cell.inp', triangles=False,
maxcols=125):
cells = make_gefdc_cells(
~numpy.isnan(self.mg.xn),
self.mg.cell_mask,
triangles=triangles
)
outfile = self.directory / filename
write_cellinp(cells, outputfile=outfile, flip=True, maxcols=maxcols)
return cells |
A Response to Euclid at the Core: Recentering Literary Education Close Reading: Classroom Notes on Teaching the Odyssey
I remember the topic my eleventh grade teacher assigned for The Odyssey: "What makes The Odyssey a classic?" I didn't know what made anything a classic, let alone The Odyssey. Doc Campbell set demanding questions that required close study of Macbeth and Huckleberry Finn but his broad question about The Odyssey stumped me. So, as a teacher I break things down for my students, which is basically what close reading does. The three passages from The Odyssey that follow show how one might lead students through a series of questions to an understanding of the text and the artistry behind it. For teachers, the key to close reading is simply to assume deliberate authorial intent, to ask oneself and one's students, again and again, is "Why is this or that detail in the passage?"Close Reading: Book 11: Anticleia and Odysseus in the UnderworldIn Book 11 Odysseus descends to the Underworld where he encounters his mother, Anticleia. Fie has been absent from Ithaca for 13 years and so he asks her for news of home. His questions fall into five general categories, which we try to identify. Grouping things into categories is hard for many students. It requires both insight and confidence. The more students worry about omitting something, the more categories they are likely to come up with. My question is purposeful in this regard because the ability to subsume ideas under broad headings is a skill necessary for writing well-organized essays. When we finally settle on (1) how did you die? (2) how is my father? (3) how is my son? (4) how is my kingdom? and (5) how is my wife? we number those items lightly in our texts and turn to Anticleia's reply and discover that Odysseus' questions are being answered in reverse order. I write hysteron proteron on the board, explain it means "last, first" in Greek, and we practice using hysteron proteron for a few minutes in everyday conversation. Someone makes up three or four questions: "What time did you get up this morning? Were you late getting to school? Did you get your math homework done?" Someone else answers in reverse order. "Yes, I got my math homework done. I arrived on time. I woke up at 6:30." Students like mastering so fancy a literary term, but this is just the beginning: we are laying the groundwork for the close reading to come.Upon which of the five topics does Anticleia linger? Easy-"on Laertes, her husband, Odysseus' father." And what sorts of things preoccupy her?-"domestic things, household things," "that he's not sleeping on clean sheets," "that his clothes aren't mended," "that sometimes he sleeps in the dirt." So, what kind of wife does she seem to have been? It's a large question, but easy now. They answer and we drop down a few lines to find corroborating evidence in Anticleia's farewell to her son: " But now quickly make for the light! And bear in mind all you have learnt here, so that one day you can tell your wife." How does this last exhortation, to report to Penelope, reinforce what we've just seen? What does Anticleia assume about husbands and wives? What do her words tell us about her own marriage?-"that she and Laertes probably talked about lots of things together," "that he probably confided everything in her," "that she thinks of marriage as a relationship of great closeness between husband and wife."Reminding ourselves of the hysteron proteron pattern, we complicate Homer's design. Why, beyond the formula of hysteron proteron, might Anticleia speak of her own death last? Students propose:-"perhaps because she hesitates to make her son feel guilty and she thinks hearing that she died of a broken heart will do so," "perhaps she's modest and considers other news more important than news about herself," "perhaps what she most wants to stay in his mind is how urgent his homecoming is," and so on. Without my talking formally about the difference between ascertainable fact and inference, they've made the distinction with "perhaps." I don't know whether Homer, in his genius, used such rhetorical patterns for the beauty and orderliness he found in them, or for characterization as well. … |
/**
* A producer/disposer/observer non-static method of a session bean class
* should be a business method of the session bean.
*/
private void checkBusinessMethod( ExecutableElement element,
CdiAnalysisResult result ,boolean isProducer, boolean isDisposer, boolean isObserver )
{
CompilationInfo compInfo = result.getInfo();
if ( !isProducer && !isDisposer && !isObserver ){
return;
}
Set<Modifier> modifiers = element.getModifiers();
if ( modifiers.contains(Modifier.STATIC) ){
return;
}
TypeElement containingClass = compInfo.getElementUtilities().
enclosingTypeElement(element);
if ( !AnnotationUtil.isSessionBean( containingClass, compInfo) ){
return;
}
String methodName = element.getSimpleName().toString();
boolean isBusinessMethod = true;
if ( methodName.startsWith(EJB)){
isBusinessMethod = false;
}
if (AnnotationUtil.isLifecycleCallback(element, compInfo)){
isBusinessMethod = false;
}
if ( modifiers.contains(Modifier.FINAL) ||
!modifiers.contains( Modifier.PUBLIC) )
{
isBusinessMethod = false;
}
if ( !isBusinessMethod ){
String key = null;
if ( isProducer ){
key = "ERR_ProducerNotBusiness";
}
else if ( isDisposer ){
key = "ERR_DisposerNotBusiness";
}
else if ( isObserver ){
key = "ERR_ObserverNotBusiness";
}
result.addError( element, NbBundle.getMessage(
AnnotationsAnalyzer.class, key));
}
} |
import sys
input = sys.stdin.readline
n, m = list(map(int,input().split()))
itog = 0
mass = []
for _ in range(n):
a = list(map(int,input().split()))
new = a.count(1)
last = 0
count = 0
mass.append(a)
for i in a:
if i == 1:
new -= 1
last += 1
else:
if last == 0 and new > 0 or new == 0 and last > 0:
count += 1
elif last > 0 and new > 0:
count += 2
itog += count
for i in range(m):
a = []
for j in range(n):
a.append(mass[j][i])
new = a.count(1)
last = 0
count = 0
for i in a:
if i == 1:
new -= 1
last += 1
else:
if last == 0 and new > 0 or new == 0 and last > 0:
count += 1
elif last > 0 and new > 0:
count += 2
itog += count
print(itog)
|
#include<stdio.h>
#include<stdlib.h>
extern void adainit();
extern void adafinal();
extern void _ada_cosupsys ( int n, int m, int *mc,
int ns, int *s, int nc, double *c );
void test_cosup_poly_system ( void );
/* interactive test on coefficient-support representation of poly systems */
void read_data_system ( int n, int m, int monsum, int moncnt[m] );
/* reads coefficients and supports for a system in n variables and m
equations, where the i-th polynomial has as many monomials as moncnt[i],
and the total number of monomials equals monsum */
int main(void)
{
printf("\nTesting Coefficient Support Polynomial Representations\n");
test_cosup_poly_system();
return 0;
}
void test_cosup_poly_system ( void )
{
int n,m;
printf("\nCreating a system from coefficients and supports.\n\n");
printf("Give the number of variables : "); scanf("%d", &n);
printf("Give the number of equations : "); scanf("%d", &m);
{
int i,moncnt[m],monsum = 0;
printf("Reading the number of monomials in every polynomial...\n");
for(i=0; i<m; i++)
{
printf(" Give #monomials in polynomial %d : ", i+1);
scanf("%d",&moncnt[i]);
}
printf("The monomial count : ");
for(i=0; i<m; i++)
{
printf(" %d", moncnt[i]);
monsum += moncnt[i];
}
printf(" and the sum is %d.\n", monsum);
read_data_system(n,m,monsum,moncnt);
}
}
void read_data_system ( int n, int m, int monsum, int moncnt[m] )
{
int dimsup = n*monsum;
int dimcff = 2*monsum;
int sup[dimsup];
double cff[dimcff];
int i,j,k,indsup,indcff;
printf("Number of entries in support : %d\n", dimsup);
printf("Number of coefficients : %d\n", dimcff);
indsup = 0;
indcff = 0;
for(i=0; i<m; i++)
{
printf("Reading the support and coefficients");
printf(" of polynomial %d ...\n", i+1);
for(j=0; j<moncnt[i]; j++)
{
printf(" give exponents of monomial %d : ", j+1);
for(k=0; k<n; k++) scanf("%d", &sup[indsup++]);
printf(" give two doubles : ");
scanf("%lf", &cff[indcff++]);
scanf("%lf", &cff[indcff++]);
}
}
printf("\nThe support : ");
for(i=0; i<dimsup; i++)
printf(" %d", sup[i]);
printf("\nThe coefficients :\n");
for(i=0; i<dimcff; i++)
printf(" %.15lf\n", cff[i]);
printf("Calling Ada ...\n");
adainit();
_ada_cosupsys(n,m,moncnt,dimsup,sup,dimcff,cff);
adafinal();
printf("... done with the call.\n");
}
|
<gh_stars>100-1000
/****************************************************************************
Copyright (c) 2010-2012 cocos2d-x.org
Copyright (c) 2012 <NAME>
Copyright (c) 2013 <NAME>
http://www.cocos2d-x.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
#include "CCEditBoxImplTizen.h"
#if (CC_TARGET_PLATFORM == CC_PLATFORM_TIZEN)
#include "CCEditBox.h"
#include "platform/tizen/CCOspForm.h"
using namespace Tizen::Ui::Controls;
NS_CC_EXT_BEGIN
EditBoxImpl* __createSystemEditBox(EditBox* pEditBox)
{
return new EditBoxImplTizen(pEditBox);
}
EditBoxImplTizen::EditBoxImplTizen(EditBox* pEditText)
: EditBoxImpl(pEditText)
, _label(NULL)
, _labelPlaceHolder(NULL)
, _editBoxInputMode(EditBox::InputMode::SINGLE_LINE)
, _editBoxInputFlag(EditBox::InputFlag::INTIAL_CAPS_ALL_CHARACTERS)
, _keyboardReturnType(EditBox::KeyboardReturnType::DEFAULT)
, _colText(Color3B::WHITE)
, _colPlaceHolder(Color3B::GRAY)
, _maxLength(-1)
{
}
EditBoxImplTizen::~EditBoxImplTizen()
{
}
void EditBoxImplTizen::doAnimationWhenKeyboardMove(float duration, float distance)
{
}
static const int CC_EDIT_BOX_PADDING = 5;
bool EditBoxImplTizen::initWithSize(const Size& size)
{
// int fontSize = (int)size.height-12;
_label = LabelTTF::create("", "", size.height-12);
// align the text vertically center
_label->setAnchorPoint(Point(0, 0.5f));
_label->setPosition(Point(CC_EDIT_BOX_PADDING, size.height / 2.0f));
_label->setColor(_colText);
_editBox->addChild(_label);
_labelPlaceHolder = LabelTTF::create("", "", size.height-12);
// align the text vertically center
_labelPlaceHolder->setAnchorPoint(Point(0, 0.5f));
_labelPlaceHolder->setPosition(Point(CC_EDIT_BOX_PADDING, size.height / 2.0f));
_labelPlaceHolder->setVisible(false);
_labelPlaceHolder->setColor(_colPlaceHolder);
_editBox->addChild(_labelPlaceHolder);
_editSize = size;
return true;
}
void EditBoxImplTizen::setFont(const char* pFontName, int fontSize)
{
if(_label != NULL) {
_label->setFontName(pFontName);
_label->setFontSize(fontSize);
}
if(_labelPlaceHolder != NULL) {
_labelPlaceHolder->setFontName(pFontName);
_labelPlaceHolder->setFontSize(fontSize);
}
}
void EditBoxImplTizen::setFontColor(const Color3B& color)
{
_colText = color;
_label->setColor(color);
}
void EditBoxImplTizen::setPlaceholderFont(const char* pFontName, int fontSize)
{
if(_labelPlaceHolder != NULL) {
_labelPlaceHolder->setFontName(pFontName);
_labelPlaceHolder->setFontSize(fontSize);
}
}
void EditBoxImplTizen::setPlaceholderFontColor(const Color3B& color)
{
_colPlaceHolder = color;
_labelPlaceHolder->setColor(color);
}
void EditBoxImplTizen::setInputMode(EditBox::InputMode inputMode)
{
_editBoxInputMode = inputMode;
}
void EditBoxImplTizen::setMaxLength(int maxLength)
{
_maxLength = maxLength;
}
int EditBoxImplTizen::getMaxLength()
{
return _maxLength;
}
void EditBoxImplTizen::setInputFlag(EditBox::InputFlag inputFlag)
{
_editBoxInputFlag = inputFlag;
}
void EditBoxImplTizen::setReturnType(EditBox::KeyboardReturnType returnType)
{
_keyboardReturnType = returnType;
}
bool EditBoxImplTizen::isEditing()
{
return false;
}
void EditBoxImplTizen::setText(const char* pText)
{
if (pText != NULL)
{
_text = pText;
if (_text.length() > 0)
{
_labelPlaceHolder->setVisible(false);
std::string strToShow;
if (EditBox::InputFlag::PASSWORD == _editBoxInputFlag)
{
long length = cc_utf8_strlen(_text.c_str(), -1);
for (long i = 0; i < length; i++)
{
strToShow.append("*");
}
}
else
{
strToShow = _text;
}
_label->setString(strToShow.c_str());
// Clip the text width to fit to the text box
float fMaxWidth = _editSize.width - CC_EDIT_BOX_PADDING * 2;
Rect clippingRect = _label->getTextureRect();
if(clippingRect.size.width > fMaxWidth) {
clippingRect.size.width = fMaxWidth;
_label->setTextureRect(clippingRect);
}
}
else
{
_labelPlaceHolder->setVisible(true);
_label->setString("");
}
}
}
const char* EditBoxImplTizen::getText(void)
{
return _text.c_str();
}
void EditBoxImplTizen::setPlaceHolder(const char* pText)
{
if (pText != NULL)
{
_placeHolder = pText;
if (_placeHolder.length() > 0 && _text.length() == 0)
{
_labelPlaceHolder->setVisible(true);
}
_labelPlaceHolder->setString(_placeHolder.c_str());
}
}
void EditBoxImplTizen::setPosition(const Point& pos)
{
}
void EditBoxImplTizen::setVisible(bool visible)
{
}
void EditBoxImplTizen::setContentSize(const Size& size)
{
}
void EditBoxImplTizen::setAnchorPoint(const Point& anchorPoint)
{
}
void EditBoxImplTizen::visit(void)
{
}
void EditBoxImplTizen::onEnter(void)
{
}
static void editBoxCallbackFunc(const char* pText, void* ctx)
{
EditBoxImplTizen* thiz = (EditBoxImplTizen*)ctx;
thiz->setText(pText);
if (thiz->getDelegate() != NULL)
{
thiz->getDelegate()->editBoxTextChanged(thiz->getEditBox(), thiz->getText());
thiz->getDelegate()->editBoxEditingDidEnd(thiz->getEditBox());
thiz->getDelegate()->editBoxReturn(thiz->getEditBox());
}
EditBox* pEditBox = thiz->getEditBox();
if (NULL != pEditBox && 0 != pEditBox->getScriptEditBoxHandler())
{
CommonScriptData data(pEditBox->getScriptEditBoxHandler(), "changed",pEditBox);
ScriptEvent event(kCommonEvent,(void*)&data);
ScriptEngineManager::getInstance()->getScriptEngine()->sendEvent(&event);
memset(data.eventName,0,64*sizeof(char));
strncpy(data.eventName,"ended",64);
event.data = (void*)&data;
ScriptEngineManager::getInstance()->getScriptEngine()->sendEvent(&event);
memset(data.eventName,0,64*sizeof(char));
strncpy(data.eventName,"return",64);
event.data = (void*)&data;
ScriptEngineManager::getInstance()->getScriptEngine()->sendEvent(&event);
}
}
void EditBoxImplTizen::openKeyboard()
{
if (_delegate != NULL)
{
_delegate->editBoxEditingDidBegin(_editBox);
}
EditBox* pEditBox = this->getEditBox();
if (NULL != pEditBox && 0 != pEditBox->getScriptEditBoxHandler())
{
CommonScriptData data(pEditBox->getScriptEditBoxHandler(), "began",pEditBox);
ScriptEvent event(kCommonEvent,(void*)&data);
ScriptEngineManager::getInstance()->getScriptEngine()->sendEvent(&event);
}
KeypadStyle keypadStyle = KEYPAD_STYLE_NORMAL;
KeypadInputModeCategory keypadCategory = KEYPAD_MODE_ALPHA;
bool bSingleLineEnabled = false;
switch (_editBoxInputMode)
{
case EditBox::InputMode::ANY:
keypadStyle = KEYPAD_STYLE_NORMAL;
break;
case EditBox::InputMode::EMAIL_ADDRESS:
keypadStyle = KEYPAD_STYLE_EMAIL;
break;
case EditBox::InputMode::NUMERIC:
case EditBox::InputMode::DECIMAL:
keypadStyle = KEYPAD_STYLE_NUMBER;
keypadCategory = KEYPAD_MODE_NUMERIC;
break;
case EditBox::InputMode::PHONE_NUMBER:
keypadStyle = KEYPAD_STYLE_PHONE_NUMBER;
break;
case EditBox::InputMode::URL:
keypadStyle = KEYPAD_STYLE_URL;
break;
case EditBox::InputMode::SINGLE_LINE:
bSingleLineEnabled = true;
break;
default:
keypadStyle = KEYPAD_STYLE_NORMAL;
break;
}
bool bTextPrediction = true;
switch (_editBoxInputFlag)
{
case EditBox::InputFlag::PASSWORD:
keypadStyle = KEYPAD_STYLE_PASSWORD;
break;
case EditBox::InputFlag::SENSITIVE:
bTextPrediction = false;
break;
default:
break;
}
((OspForm *)OspApplication::GetInstance()->getOspForm())->ShowKeypad(
_text.c_str(),
keypadStyle,
keypadCategory,
bSingleLineEnabled,
bTextPrediction,
_maxLength,
editBoxCallbackFunc,
(void*)this);
}
void EditBoxImplTizen::closeKeyboard()
{
((OspForm *)OspApplication::GetInstance()->getOspForm())->CloseKeypad();
}
NS_CC_EXT_END
#endif /* #if (CC_TARGET_PLATFORM == CC_PLATFORM_TIZEN) */
|
A,B,C,X,Y=map(int,input().split())
if A+B<=2*C:
print(A*X+B*Y)
elif X==Y or (X>Y and A>=2*C) or (X<Y and B>=2*C):
print(2*max([X,Y])*C)
elif X>Y and A<2*C:
print(2*Y*C+A*(X-Y))
elif X<Y and B<2*C:
print(2*X*C+B*(Y-X))
|
#include<iostream>
using namespace std;
typedef long long ll;
ll n,ans,tmp1,tmp2;
int main(){
cin >>n;
for(int i=0;i<n;i++){
cin >>tmp1>>tmp2;
if(tmp1+2<=tmp2){
ans++;
}
}
cout <<ans<<endl;
} |
/**
* Instances of this class describe operating system fonts.
* <p>
* For platform-independent behaviour, use the get and set methods
* corresponding to the following properties:
* <dl>
* <dt>height</dt><dd>the height of the font in points</dd>
* <dt>name</dt><dd>the face name of the font, which may include the foundry</dd>
* <dt>style</dt><dd>A bitwise combination of NORMAL, ITALIC and BOLD</dd>
* </dl>
* If extra, platform-dependent functionality is required:
* <ul>
* <li>On <em>Windows</em>, the data member of the <code>FontData</code>
* corresponds to a Windows <code>LOGFONT</code> structure whose fields
* may be retrieved and modified.</li>
* <li>On <em>X</em>, the fields of the <code>FontData</code> correspond
* to the entries in the font's XLFD name and may be retrieved and modified.
* </ul>
* Application code does <em>not</em> need to explicitly release the
* resources managed by each instance when those instances are no longer
* required, and thus no <code>dispose()</code> method is provided.
*
* @see Font
* @see <a href="http://www.eclipse.org/swt/">Sample code and further information</a>
*/
public final class FontData {
/**
* A Win32 LOGFONT struct
* (Warning: This field is platform dependent)
* <p>
* <b>IMPORTANT:</b> This field is <em>not</em> part of the SWT
* public API. It is marked public only so that it can be shared
* within the packages provided by SWT. It is not available on all
* platforms and should never be accessed from application code.
* </p>
*
* @noreference This field is not intended to be referenced by clients.
*/
public LOGFONT data;
/**
* The height of the font data in points
* (Warning: This field is platform dependent)
* <p>
* <b>IMPORTANT:</b> This field is <em>not</em> part of the SWT
* public API. It is marked public only so that it can be shared
* within the packages provided by SWT. It is not available on all
* platforms and should never be accessed from application code.
* </p>
*
* @noreference This field is not intended to be referenced by clients.
*/
public float height;
/**
* The locales of the font
*/
String lang, country, variant;
/**
* Constructs a new uninitialized font data.
*/
public FontData() {
data = OS.IsUnicode ? (LOGFONT)new LOGFONTW() : new LOGFONTA();
// We set the charset field so that
// wildcard searching will work properly
// out of the box
data.lfCharSet = (byte)OS.DEFAULT_CHARSET;
height = 12;
}
/**
* Constructs a new font data given the Windows <code>LOGFONT</code>
* that it should represent.
*
* @param data the <code>LOGFONT</code> for the result
*/
FontData(LOGFONT data, float height) {
this.data = data;
this.height = height;
}
/**
* Constructs a new FontData given a string representation
* in the form generated by the <code>FontData.toString</code>
* method.
* <p>
* Note that the representation varies between platforms,
* and a FontData can only be created from a string that was
* generated on the same platform.
* </p>
*
* @param string the string representation of a <code>FontData</code> (must not be null)
*
* @exception IllegalArgumentException <ul>
* <li>ERROR_NULL_ARGUMENT - if the argument is null</li>
* <li>ERROR_INVALID_ARGUMENT - if the argument does not represent a valid description</li>
* </ul>
*
* @see #toString
*/
public FontData(String string) {
if (string == null) SWT.error(SWT.ERROR_NULL_ARGUMENT);
int start = 0;
int end = string.indexOf('|');
if (end == -1) SWT.error(SWT.ERROR_INVALID_ARGUMENT);
String version1 = string.substring(start, end);
try {
if (Integer.parseInt(version1) != 1) SWT.error(SWT.ERROR_INVALID_ARGUMENT);
} catch (NumberFormatException e) {
SWT.error(SWT.ERROR_INVALID_ARGUMENT);
}
start = end + 1;
end = string.indexOf('|', start);
if (end == -1) SWT.error(SWT.ERROR_INVALID_ARGUMENT);
String name = string.substring(start, end);
start = end + 1;
end = string.indexOf('|', start);
if (end == -1) SWT.error(SWT.ERROR_INVALID_ARGUMENT);
float height = 0;
try {
height = Float.parseFloat(string.substring(start, end));
} catch (NumberFormatException e) {
SWT.error(SWT.ERROR_INVALID_ARGUMENT);
}
start = end + 1;
end = string.indexOf('|', start);
if (end == -1) SWT.error(SWT.ERROR_INVALID_ARGUMENT);
int style = 0;
try {
style = Integer.parseInt(string.substring(start, end));
} catch (NumberFormatException e) {
SWT.error(SWT.ERROR_INVALID_ARGUMENT);
}
start = end + 1;
end = string.indexOf('|', start);
data = OS.IsUnicode ? (LOGFONT)new LOGFONTW() : new LOGFONTA();
data.lfCharSet = (byte)OS.DEFAULT_CHARSET;
setName(name);
setHeight(height);
setStyle(style);
if (end == -1) return;
String platform = string.substring(start, end);
start = end + 1;
end = string.indexOf('|', start);
if (end == -1) return;
String version2 = string.substring(start, end);
if (platform.equals("WINDOWS") && version2.equals("1")) { //$NON-NLS-1$//$NON-NLS-2$
LOGFONT newData = OS.IsUnicode ? (LOGFONT)new LOGFONTW() : new LOGFONTA();
try {
start = end + 1;
end = string.indexOf('|', start);
if (end == -1) return;
newData.lfHeight = Integer.parseInt(string.substring(start, end));
start = end + 1;
end = string.indexOf('|', start);
if (end == -1) return;
newData.lfWidth = Integer.parseInt(string.substring(start, end));
start = end + 1;
end = string.indexOf('|', start);
if (end == -1) return;
newData.lfEscapement = Integer.parseInt(string.substring(start, end));
start = end + 1;
end = string.indexOf('|', start);
if (end == -1) return;
newData.lfOrientation = Integer.parseInt(string.substring(start, end));
start = end + 1;
end = string.indexOf('|', start);
if (end == -1) return;
newData.lfWeight = Integer.parseInt(string.substring(start, end));
start = end + 1;
end = string.indexOf('|', start);
if (end == -1) return;
newData.lfItalic = Byte.parseByte(string.substring(start, end));
start = end + 1;
end = string.indexOf('|', start);
if (end == -1) return;
newData.lfUnderline = Byte.parseByte(string.substring(start, end));
start = end + 1;
end = string.indexOf('|', start);
if (end == -1) return;
newData.lfStrikeOut = Byte.parseByte(string.substring(start, end));
start = end + 1;
end = string.indexOf('|', start);
if (end == -1) return;
newData.lfCharSet = Byte.parseByte(string.substring(start, end));
start = end + 1;
end = string.indexOf('|', start);
if (end == -1) return;
newData.lfOutPrecision = Byte.parseByte(string.substring(start, end));
start = end + 1;
end = string.indexOf('|', start);
if (end == -1) return;
newData.lfClipPrecision = Byte.parseByte(string.substring(start, end));
start = end + 1;
end = string.indexOf('|', start);
if (end == -1) return;
newData.lfQuality = Byte.parseByte(string.substring(start, end));
start = end + 1;
end = string.indexOf('|', start);
if (end == -1) return;
newData.lfPitchAndFamily = Byte.parseByte(string.substring(start, end));
start = end + 1;
} catch (NumberFormatException e) {
setName(name);
setHeight(height);
setStyle(style);
return;
}
TCHAR buffer = new TCHAR(0, string.substring(start), false);
int length = Math.min(OS.LF_FACESIZE - 1, buffer.length());
if (OS.IsUnicode) {
char[] lfFaceName = ((LOGFONTW)newData).lfFaceName;
System.arraycopy(buffer.chars, 0, lfFaceName, 0, length);
} else {
byte[] lfFaceName = ((LOGFONTA)newData).lfFaceName;
System.arraycopy(buffer.bytes, 0, lfFaceName, 0, length);
}
data = newData;
}
}
/**
* Constructs a new font data given a font name,
* the height of the desired font in points,
* and a font style.
*
* @param name the name of the font (must not be null)
* @param height the font height in points
* @param style a bit or combination of NORMAL, BOLD, ITALIC
*
* @exception IllegalArgumentException <ul>
* <li>ERROR_NULL_ARGUMENT - when the font name is null</li>
* <li>ERROR_INVALID_ARGUMENT - if the height is negative</li>
* </ul>
*/
public FontData(String name, int height, int style) {
if (name == null) SWT.error(SWT.ERROR_NULL_ARGUMENT);
data = OS.IsUnicode ? (LOGFONT)new LOGFONTW() : new LOGFONTA();
setName(name);
setHeight(height);
setStyle(style);
// We set the charset field so that
// wildcard searching will work properly
// out of the box
data.lfCharSet = (byte)OS.DEFAULT_CHARSET;
}
/*public*/ FontData(String name, float height, int style) {
if (name == null) SWT.error(SWT.ERROR_NULL_ARGUMENT);
data = OS.IsUnicode ? (LOGFONT)new LOGFONTW() : new LOGFONTA();
setName(name);
setHeight(height);
setStyle(style);
// We set the charset field so that
// wildcard searching will work properly
// out of the box
data.lfCharSet = (byte)OS.DEFAULT_CHARSET;
}
/**
* Compares the argument to the receiver, and returns true
* if they represent the <em>same</em> object using a class
* specific comparison.
*
* @param object the object to compare with this object
* @return <code>true</code> if the object is the same as this object and <code>false</code> otherwise
*
* @see #hashCode
*/
public boolean equals (Object object) {
if (object == this) return true;
if (!(object instanceof FontData)) return false;
FontData fd = (FontData)object;
LOGFONT lf = fd.data;
return data.lfCharSet == lf.lfCharSet &&
/*
* This code is intentionally commented. When creating
* a FontData, lfHeight is not necessarily set. Instead
* we check the height field which is always set.
*/
// data.lfHeight == lf.lfHeight &&
height == fd.height &&
data.lfWidth == lf.lfWidth &&
data.lfEscapement == lf.lfEscapement &&
data.lfOrientation == lf.lfOrientation &&
data.lfWeight == lf.lfWeight &&
data.lfItalic == lf.lfItalic &&
data.lfUnderline == lf.lfUnderline &&
data.lfStrikeOut == lf.lfStrikeOut &&
data.lfCharSet == lf.lfCharSet &&
data.lfOutPrecision == lf.lfOutPrecision &&
data.lfClipPrecision == lf.lfClipPrecision &&
data.lfQuality == lf.lfQuality &&
data.lfPitchAndFamily == lf.lfPitchAndFamily &&
getName().equals(fd.getName());
}
int /*long*/ EnumLocalesProc(int /*long*/ lpLocaleString) {
/* Get the locale ID */
int length = 8;
TCHAR buffer = new TCHAR(0, length);
int byteCount = length * TCHAR.sizeof;
OS.MoveMemory(buffer, lpLocaleString, byteCount);
int lcid = Integer.parseInt(buffer.toString(0, buffer.strlen ()), 16);
/* Check the language */
int size = OS.GetLocaleInfo(lcid, OS.LOCALE_SISO639LANGNAME, buffer, length);
if (size <= 0 || !lang.equals(buffer.toString(0, size - 1))) return 1;
/* Check the country */
if (country != null) {
size = OS.GetLocaleInfo(lcid, OS.LOCALE_SISO3166CTRYNAME, buffer, length);
if (size <= 0 || !country.equals(buffer.toString(0, size - 1))) return 1;
}
/* Get the charset */
size = OS.GetLocaleInfo(lcid, OS.LOCALE_IDEFAULTANSICODEPAGE, buffer, length);
if (size <= 0) return 1;
int cp = Integer.parseInt(buffer.toString(0, size - 1));
int [] lpCs = new int[8];
OS.TranslateCharsetInfo(cp, lpCs, OS.TCI_SRCCODEPAGE);
data.lfCharSet = (byte)lpCs[0];
return 0;
}
/**
* Returns the height of the receiver in points.
*
* @return the height of this FontData
*
* @see #setHeight(int)
*/
public int getHeight() {
return (int)(0.5f + height);
}
/*public*/ float getHeightF() {
return height;
}
/**
* Returns the locale of the receiver.
* <p>
* The locale determines which platform character set this
* font is going to use. Widgets and graphics operations that
* use this font will convert UNICODE strings to the platform
* character set of the specified locale.
* </p>
* <p>
* On platforms where there are multiple character sets for a
* given language/country locale, the variant portion of the
* locale will determine the character set.
* </p>
*
* @return the <code>String</code> representing a Locale object
* @since 3.0
*/
public String getLocale () {
StringBuffer buffer = new StringBuffer ();
char sep = '_';
if (lang != null) {
buffer.append (lang);
buffer.append (sep);
}
if (country != null) {
buffer.append (country);
buffer.append (sep);
}
if (variant != null) {
buffer.append (variant);
}
String result = buffer.toString ();
int length = result.length ();
if (length > 0) {
if (result.charAt (length - 1) == sep) {
result = result.substring (0, length - 1);
}
}
return result;
}
/**
* Returns the name of the receiver.
* On platforms that support font foundries, the return value will
* be the foundry followed by a dash ("-") followed by the face name.
*
* @return the name of this <code>FontData</code>
*
* @see #setName
*/
public String getName() {
char[] chars;
if (OS.IsUnicode) {
chars = ((LOGFONTW)data).lfFaceName;
} else {
chars = new char[OS.LF_FACESIZE];
byte[] bytes = ((LOGFONTA)data).lfFaceName;
OS.MultiByteToWideChar (OS.CP_ACP, OS.MB_PRECOMPOSED, bytes, bytes.length, chars, chars.length);
}
int index = 0;
while (index < chars.length) {
if (chars [index] == 0) break;
index++;
}
return new String (chars, 0, index);
}
/**
* Returns the style of the receiver which is a bitwise OR of
* one or more of the <code>SWT</code> constants NORMAL, BOLD
* and ITALIC.
*
* @return the style of this <code>FontData</code>
*
* @see #setStyle
*/
public int getStyle() {
int style = SWT.NORMAL;
if (data.lfWeight == 700) style |= SWT.BOLD;
if (data.lfItalic != 0) style |= SWT.ITALIC;
return style;
}
/**
* Returns an integer hash code for the receiver. Any two
* objects that return <code>true</code> when passed to
* <code>equals</code> must return the same value for this
* method.
*
* @return the receiver's hash
*
* @see #equals
*/
public int hashCode () {
return data.lfCharSet ^ getHeight() << 8 ^ data.lfWidth ^ data.lfEscapement ^
data.lfOrientation ^ data.lfWeight ^ data.lfItalic ^ data.lfUnderline ^
data.lfStrikeOut ^ data.lfCharSet ^ data.lfOutPrecision ^
data.lfClipPrecision ^ data.lfQuality ^ data.lfPitchAndFamily ^
getName().hashCode();
}
/**
* Sets the height of the receiver. The parameter is
* specified in terms of points, where a point is one
* seventy-second of an inch.
*
* @param height the height of the <code>FontData</code>
*
* @exception IllegalArgumentException <ul>
* <li>ERROR_INVALID_ARGUMENT - if the height is negative</li>
* </ul>
*
* @see #getHeight
*/
public void setHeight(int height) {
if (height < 0) SWT.error(SWT.ERROR_INVALID_ARGUMENT);
this.height = height;
data.lfWidth = 0;
}
/*public*/ void setHeight(float height) {
if (height < 0) SWT.error(SWT.ERROR_INVALID_ARGUMENT);
this.height = height;
}
/**
* Sets the locale of the receiver.
* <p>
* The locale determines which platform character set this
* font is going to use. Widgets and graphics operations that
* use this font will convert UNICODE strings to the platform
* character set of the specified locale.
* </p>
* <p>
* On platforms where there are multiple character sets for a
* given language/country locale, the variant portion of the
* locale will determine the character set.
* </p>
*
* @param locale the <code>String</code> representing a Locale object
* @see java.util.Locale#toString
*/
public void setLocale(String locale) {
lang = country = variant = null;
if (locale != null) {
char sep = '_';
int length = locale.length();
int firstSep, secondSep;
firstSep = locale.indexOf(sep);
if (firstSep == -1) {
firstSep = secondSep = length;
} else {
secondSep = locale.indexOf(sep, firstSep + 1);
if (secondSep == -1) secondSep = length;
}
if (firstSep > 0) lang = locale.substring(0, firstSep);
if (secondSep > firstSep + 1) country = locale.substring(firstSep + 1, secondSep);
if (length > secondSep + 1) variant = locale.substring(secondSep + 1);
}
if (lang == null) {
data.lfCharSet = (byte)OS.DEFAULT_CHARSET;
} else {
Callback callback = new Callback (this, "EnumLocalesProc", 1); //$NON-NLS-1$
int /*long*/ lpEnumLocalesProc = callback.getAddress ();
if (lpEnumLocalesProc == 0) SWT.error(SWT.ERROR_NO_MORE_CALLBACKS);
OS.EnumSystemLocales(lpEnumLocalesProc, OS.LCID_SUPPORTED);
callback.dispose ();
}
}
/**
* Sets the name of the receiver.
* <p>
* Some platforms support font foundries. On these platforms, the name
* of the font specified in setName() may have one of the following forms:
* <ol>
* <li>a face name (for example, "courier")</li>
* <li>a foundry followed by a dash ("-") followed by a face name (for example, "adobe-courier")</li>
* </ol>
* In either case, the name returned from getName() will include the
* foundry.
* </p>
* <p>
* On platforms that do not support font foundries, only the face name
* (for example, "courier") is used in <code>setName()</code> and
* <code>getName()</code>.
* </p>
*
* @param name the name of the font data (must not be null)
* @exception IllegalArgumentException <ul>
* <li>ERROR_NULL_ARGUMENT - when the font name is null</li>
* </ul>
*
* @see #getName
*/
public void setName(String name) {
if (name == null) SWT.error(SWT.ERROR_NULL_ARGUMENT);
/* The field lfFaceName must be NULL terminated */
TCHAR buffer = new TCHAR(0, name, true);
int length = Math.min(OS.LF_FACESIZE - 1, buffer.length());
if (OS.IsUnicode) {
char[] lfFaceName = ((LOGFONTW)data).lfFaceName;
for (int i = 0; i < lfFaceName.length; i++) lfFaceName[i] = 0;
System.arraycopy(buffer.chars, 0, lfFaceName, 0, length);
} else {
byte[] lfFaceName = ((LOGFONTA)data).lfFaceName;
for (int i = 0; i < lfFaceName.length; i++) lfFaceName[i] = 0;
System.arraycopy(buffer.bytes, 0, lfFaceName, 0, length);
}
}
/**
* Sets the style of the receiver to the argument which must
* be a bitwise OR of one or more of the <code>SWT</code>
* constants NORMAL, BOLD and ITALIC. All other style bits are
* ignored.
*
* @param style the new style for this <code>FontData</code>
*
* @see #getStyle
*/
public void setStyle(int style) {
if ((style & SWT.BOLD) == SWT.BOLD) {
data.lfWeight = 700;
} else {
data.lfWeight = 0;
}
if ((style & SWT.ITALIC) == SWT.ITALIC) {
data.lfItalic = 1;
} else {
data.lfItalic = 0;
}
}
/**
* Returns a string representation of the receiver which is suitable
* for constructing an equivalent instance using the
* <code>FontData(String)</code> constructor.
*
* @return a string representation of the FontData
*
* @see FontData
*/
public String toString() {
StringBuffer buffer = new StringBuffer(128);
buffer.append("1|"); //$NON-NLS-1$
String name = getName();
buffer.append(name);
buffer.append("|"); //$NON-NLS-1$
buffer.append(getHeightF());
buffer.append("|"); //$NON-NLS-1$
buffer.append(getStyle());
buffer.append("|"); //$NON-NLS-1$
buffer.append("WINDOWS|1|"); //$NON-NLS-1$
buffer.append(data.lfHeight);
buffer.append("|"); //$NON-NLS-1$
buffer.append(data.lfWidth);
buffer.append("|"); //$NON-NLS-1$
buffer.append(data.lfEscapement);
buffer.append("|"); //$NON-NLS-1$
buffer.append(data.lfOrientation);
buffer.append("|"); //$NON-NLS-1$
buffer.append(data.lfWeight);
buffer.append("|"); //$NON-NLS-1$
buffer.append(data.lfItalic);
buffer.append("|"); //$NON-NLS-1$
buffer.append(data.lfUnderline);
buffer.append("|"); //$NON-NLS-1$
buffer.append(data.lfStrikeOut);
buffer.append("|"); //$NON-NLS-1$
buffer.append(data.lfCharSet);
buffer.append("|"); //$NON-NLS-1$
buffer.append(data.lfOutPrecision);
buffer.append("|"); //$NON-NLS-1$
buffer.append(data.lfClipPrecision);
buffer.append("|"); //$NON-NLS-1$
buffer.append(data.lfQuality);
buffer.append("|"); //$NON-NLS-1$
buffer.append(data.lfPitchAndFamily);
buffer.append("|"); //$NON-NLS-1$
buffer.append(name);
return buffer.toString();
}
/**
* Invokes platform specific functionality to allocate a new font data.
* <p>
* <b>IMPORTANT:</b> This method is <em>not</em> part of the public
* API for <code>FontData</code>. It is marked public only so that
* it can be shared within the packages provided by SWT. It is not
* available on all platforms, and should never be called from
* application code.
* </p>
*
* @param data the <code>LOGFONT</code> for the font data
* @param height the height of the font data
* @return a new font data object containing the specified <code>LOGFONT</code> and height
*
* @noreference This method is not intended to be referenced by clients.
*/
public static FontData win32_new(LOGFONT data, float height) {
return new FontData(data, height);
}
} |
<reponame>manips2002/pdq<gh_stars>1-10
// This file is part of PDQ (https://github.com/ProofDrivenQuerying/pdq) which is released under the MIT license.
// See accompanying LICENSE for copyright notice and full details.
package uk.ac.ox.cs.pdq.test;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CommonTokenStream;
import org.junit.Test;
import uk.ac.ox.cs.pdq.ui.io.sql.SQLLikeQueryParser;
import uk.ac.ox.cs.pdq.ui.io.sql.antlr.SQLiteLexer;
public class SQLLikeParserTest {
// testParse method tests various SQL strings
@Test
public void testParse() {
int size = 28;
boolean[] succ = new boolean[size];
String[] sql = new String[size];
succ[0] = false; sql[0] = ""; // Line:1 Char:0 Missing SELECT Got: <EOF>
succ[1] = false; sql[1] = "SELECT"; // Line:1 Char:6 Missing: IDENTIFIER Got: <EOF>
succ[2] = false; sql[2] = "SELECT *"; // Line:1 Char:8 Missing TABLE_NAME Got: <EOF>
succ[3] = false; sql[3] = "SELECT * FROM"; // Line:1 Char:13 Missing TABLE_NAME Got: <EOF>
succ[4] = true; sql[4] = "SELECT * FROM R"; // Success
succ[5] = false; sql[5] = "SELECT * FROM R AS"; // Line:1 Char:18 Missing: IDENTIFIER Got: <EOF>
succ[6] = true; sql[6] = "SELECT * FROM R AS a0"; // Success
succ[7] = false; sql[7] = "SELECT * FROM R AS a0 JOIN"; // Line:1 Char:26 Missing TABLE_NAME Got: <EOF>
succ[8] = true; sql[8] = "SELECT * FROM R AS a0 JOIN S"; // Success
succ[9] = false; sql[9] = "SELECT * FROM R AS a0 JOIN S AS"; // Line:1 Char:31 Missing: IDENTIFIER Got: <EOF>
succ[10] = true; sql[10] = "SELECT * FROM R AS a0 JOIN S AS a1"; // Success
succ[11] = false; sql[11] = "SELECT * FROM R AS a0 JOIN S AS a1 ON"; // Line:1 Char:37 Missing: ROW_VALUE_CONSTRUCTOR_ELEMENT Got: <EOF>
succ[12] = false; sql[12] = "SELECT * FROM R AS a0 JOIN S AS a1 ON a0"; // Line:1 Char:40 Missing COMPARISON_OPERATOR Got: <EOF>
succ[13] = false; sql[13] = "SELECT * FROM R AS a0 JOIN S AS a1 ON a0.x"; // Line:1 Char:42 Missing COMPARISON_OPERATOR Got: <EOF>
succ[14] = false; sql[14] = "SELECT * FROM R AS a0 JOIN S AS a1 ON a0.x ="; // Line:1 Char:44 Missing: ROW_VALUE_CONSTRUCTOR_ELEMENT Got: <EOF>
succ[15] = true; sql[15] = "SELECT * FROM R AS a0 JOIN S AS a1 ON a0.x = a1"; // Success
succ[16] = true; sql[16] = "SELECT * FROM R AS a0 JOIN S AS a1 ON a0.x = a1.y"; // Success
succ[17] = false; sql[17] = "SELECT * FROM R AS a0 WHERE"; // Line:1 Char:27 Missing: ROW_VALUE_CONSTRUCTOR_ELEMENT Got: <EOF>
succ[18] = false; sql[18] = "SELECT * FROM R AS a0 WHERE a0"; // Line:1 Char:30 Missing COMPARISON_OPERATOR Got: <EOF>
succ[19] = false; sql[19] = "SELECT * FROM R AS a0 WHERE a0.x"; // Line:1 Char:32 Missing COMPARISON_OPERATOR Got: <EOF>
succ[20] = true; sql[20] = "SELECT * FROM R AS a0 WHERE a0.x = 0"; // Success
succ[21] = false; sql[21] = "SELECT x"; // Line:1 Char:6 Missing: IDENTIFIER Got: <EOF>
succ[22] = true; sql[22] = "SELECT x FROM R"; // Success
succ[23] = true; sql[23] = "SELECT x, y FROM R"; // Success
succ[24] = true; sql[24] = "SELECT x, y, z FROM R"; // Success
succ[25] = true; sql[25] = "SELECT R.x FROM R"; // Success
succ[26] = true; sql[26] = "SELECT R.x, R.y FROM R"; // Success
succ[27] = true; sql[27] = "SELECT R.x, R.y, R.z FROM R"; // Success
for(int i = 0; i < size; i++)
{
try
{
CharStream stream = new ANTLRInputStream(sql[i]);
SQLiteLexer lexer = new SQLiteLexer(stream);
CommonTokenStream tokens = new CommonTokenStream(lexer);
SQLLikeQueryParser parser = new SQLLikeQueryParser(tokens);
parser.parse();
System.out.println("---Success---");
}
catch(Exception e)
{
if(succ[i] == false)
{
System.out.println("---Caught Exception---");
}
else
{
System.out.println("---Uncaught Exception---");
}
System.out.println(e);
}
}
}
}
|
/**
* Get the follower list.
*
* @param user current user
* @return json message
*/
@RequestMapping("/switch_options")
public JSONObject switchOptions(User user) {
JSONObject modelInfos = new JSONObject();
modelInfos.put("switchableUsers", getSwitchableUsers(user, ""));
return modelInfos;
} |
Acts 2:17-18
‘And it shall come to pass in the last days, says God, That I will pour out of My Spirit on all flesh; Your sons and your daughters shall prophesy, Your young men shall see visions, Your old men shall dream dreams. And on My menservants and on My maidservants I will pour out My Spirit in those days; And they shall prophesy'
In this timely CD you will listen to a first hand conversation Evangelist Anita engages in with three of her young children: Ignacio Jr. (11 yrs), Immanuel (5 yrs) and Israel 4 (yrs old). This conversation includes:
The Nations Founding and Downfall
New World Order
The Anti-Christ
Persecution and the Great Tribulation - not loving your life to the death
Mark of the Beast
and more...
Let this CD be a blessing to you and your children concerning the crucial conversation needing
to take place in many household across the U.S. and the Globe in this Final Hour. Get your copy Today! |
package edu.ucdavis.watershed;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import org.apache.commons.csv.CSVRecord;
public class CsvData {
public LinkedList<Record> data = new LinkedList<Record>();
public double[][] columns;
public double[] firstColumn;
public void add(CSVRecord record, String type) {
Iterator<String> itr = record.iterator();
int i = 0;
Record recordData = new Record();
while( itr.hasNext() ) {
if( i == 0 ) {
if( type.equals("paired") ) {
recordData.nameDouble = Double.parseDouble(itr.next());
} else {
recordData.name = itr.next();
}
} else {
recordData.data.add(Double.parseDouble(itr.next()));
}
i++;
}
data.add(recordData);
}
public void complete(String type) {
columns = new double[data.size()][];
for( int i = 0; i < data.get(0).data.size(); i++ ) {
double[] t = new double[data.size()];
for( int j = 0; j < data.size(); j++ ) {
try {
t[j] = data.get(j).data.get(i);
} catch (Exception e) {
int k = 1;
}
}
columns[i] = t;
}
if( type.equals("paired") ) {
firstColumn = new double[data.size()];
for( int i = 0; i < data.size(); i++ ) {
//firstColumn[i] = Double.parseDouble(data.get(i).name);
firstColumn[i] = data.get(i).nameDouble;
}
}
}
public class Record {
public String name = "";
public double nameDouble = 0.0;
public LinkedList<Double> data = new LinkedList<Double>();
}
}
|
Ultra-widefield Fundus Image in Oculocutaneous Albinism.
A man in his early 20’s with cutaneous albinism was referred to an ophthalmologist for evaluation of nystagmus and photophobia. He denied oscillopsia. His best-corrected visual acuity was 20/80 with each eye. Anterior segment examination revealed iris transillumination defects in both eyes. Both of his fundi showed pronounced hypopigmentation of retinal pigmented epithelium with clearly visualized choroidal vessels. Bilateral foveal light reflexes were absent corresponding with foveal hypoplasia, lack of foveal umbo and increased foveal thickness, visible on optical coherence tomography.(1) Conjugate bilateral pendular horizontal nystagmus was presented in all gazes with decreased amplitude and frequency with convergence. Hair and skin were hypopigmented. His provisional diagnosis was oculocutaneous albinism. He declined genetic testing. |
def sendMyAPDU(self, cla, ins, p1, p2, tx_data=None, recv=0, send_le=0):
resp = None
ab = recv // 256
cd = recv % 256
if send_le != 0:
send_le = 1 if recv < 256 else 2
if tx_data:
apdu = SL.APDU(cla=cla, ins=ins, p1=p1, p2=p2, lc=len(tx_data), data=tx_data, le=recv, send_le=send_le)
else:
apdu = SL.APDU(cla=cla, ins=ins, p1=p1, p2=p2, lc=0x00, le=recv, send_le=send_le)
if DEBUG:
print(apdu)
resp = self.send_APDU(apdu)
if DEBUG:
print(resp)
assert hex(resp.sw1) == hex(0x90)
assert hex(resp.sw2) == hex(0x00)
return resp |
def action(self, observation, exploration_period=0, enable_random=True,
sample_next_obs=False):
assert len(observation.shape) == 1, 'Single observation only'
self.actions_executed_so_far += 1
if self.exploration_mode == 'egreedy':
exploration_p = rl_tuner_ops.linear_annealing(
self.actions_executed_so_far, exploration_period, 1.0,
self.dqn_hparams.random_action_probability)
elif self.exploration_mode == 'boltzmann':
enable_random = False
sample_next_obs = True
input_batch = np.reshape(observation,
(self.q_network.batch_size, 1, self.input_size))
lengths = np.full(self.q_network.batch_size, 1, dtype=int)
(action, action_softmax, self.q_network.state_value,
reward_scores, self.reward_rnn.state_value) = self.session.run(
[self.predicted_actions, self.action_softmax,
self.q_network.state_tensor, self.reward_scores,
self.reward_rnn.state_tensor],
{self.q_network.melody_sequence: input_batch,
self.q_network.initial_state: self.q_network.state_value,
self.q_network.lengths: lengths,
self.reward_rnn.melody_sequence: input_batch,
self.reward_rnn.initial_state: self.reward_rnn.state_value,
self.reward_rnn.lengths: lengths})
reward_scores = np.reshape(reward_scores, (self.num_actions))
action_softmax = np.reshape(action_softmax, (self.num_actions))
action = np.reshape(action, (self.num_actions))
if enable_random and random.random() < exploration_p:
note = self.get_random_note()
return note, note, reward_scores
else:
if not sample_next_obs:
return action, action, reward_scores
else:
obs_note = rl_tuner_ops.sample_softmax(action_softmax)
next_obs = np.array(
rl_tuner_ops.make_onehot([obs_note], self.num_actions)).flatten()
return action, next_obs, reward_scores |
<gh_stars>0
import { Component, AfterViewInit, AfterViewChecked } from '@angular/core';
import { Router } from '@angular/router';
import * as PNotify from 'pnotify';
declare var jQuery: any;
@Component({
selector: 'app-layout',
templateUrl: './layout.component.html',
})
export class LayoutComponent implements AfterViewInit, AfterViewChecked {
public constructor() {}
public ngAfterViewInit() {
jQuery(document).ready(function() {
console.log('jQuery ready');
var CURRENT_PATH = window.location.protocol + "//" + window.location.host + window.location.pathname;
var setContentHeight = function() {
// jQuery('#app_content').css('height', jQuery('#app_content_inner').outerHeight());
// jQuery('.left_col').css('height', jQuery('.left_col_content').outerHeight());
// var leftColHeight = jQuery('.left_col').outerHeight();
// var rightColHeight = jQuery('#app_content').outerHeight() + jQuery('.top_nav').outerHeight() + jQuery('#footer').outerHeight() + 10;
// if (leftColHeight < rightColHeight) {
// jQuery('.left_col').css('height', rightColHeight);
// } else {
// jQuery('#app_content').css('height', leftColHeight - jQuery('.top_nav').outerHeight() - jQuery('#footer').outerHeight() - 10);
// }
};
jQuery('#sidebar-menu').find('a').on('click', function(ev) {
console.log('clicked - sidebar_menu');
var jQueryli = jQuery(this).parent();
jQuery('#sidebar-menu').find('li').removeClass('current-page');
jQuery('#sidebar-menu').find('.child_menu').find('li').removeClass('active');
if (jQueryli.is('.active')) {
jQueryli.removeClass('active active-sm');
jQuery('ul:first', jQueryli).slideUp(function() {
setContentHeight();
});
} else {
// prevent closing menu if we are on child menu
if (!jQueryli.parent().is('.child_menu')) {
jQuery('#sidebar-menu').find('li').removeClass('active active-sm');
jQuery('#sidebar-menu').find('li ul').slideUp();
} else {
if (jQuery('body').is(".nav-sm")) {
jQuery('#sidebar-menu').find("li").removeClass("active active-sm");
jQuery('#sidebar-menu').find("li ul").slideUp();
}
}
jQueryli.addClass('active').addClass('current-page');
jQuery('ul:first', jQueryli).slideDown(function() {
setContentHeight();
});
}
});
// toggle small or large menu
jQuery('#menu_toggle').on('click', function() {
console.log('clicked - menu toggle');
if (jQuery('body').hasClass('nav-md')) {
jQuery('#sidebar-menu').find('li.active ul').hide();
jQuery('#sidebar-menu').find('li.active').addClass('active-sm').removeClass('active');
} else {
jQuery('#sidebar-menu').find('li.active-sm ul').show();
jQuery('#sidebar-menu').find('li.active-sm').addClass('active').removeClass('active-sm');
}
jQuery('body').toggleClass('nav-md nav-sm');
setContentHeight();
});
// check active menu
jQuery('#sidebar-menu').find('a[href="' + CURRENT_PATH + '"]').parent('li').addClass('current-page');
jQuery('#sidebar-menu').find('a').filter(function() {
return this.href == CURRENT_PATH;
}).parent('li').addClass('current-page').parents('ul').slideDown(function() {
setContentHeight();
}).parent().addClass('active');
setContentHeight();
// fixed sidebar
if (jQuery.fn.mCustomScrollbar) {
jQuery('.menu_fixed').mCustomScrollbar({
autoHideScrollbar: true,
theme: 'minimal',
mouseWheel: { preventDefault: true, scrollAmount: 150 }
});
}
});
}
public ngAfterViewChecked() {
jQuery('.right_col').css('min-height', jQuery(window).height());
var bodyHeight = jQuery('body').outerHeight(),
footerHeight = jQuery('body').hasClass('footer_fixed') ? -10 : jQuery('footer').height(),
leftColHeight = jQuery('.left_col').eq(1).height() + jQuery('.sidebar-footer').height(),
contentHeight = bodyHeight < leftColHeight ? leftColHeight : bodyHeight;
// normalize content
contentHeight -= jQuery('.nav_menu').height() + footerHeight;
jQuery('.right_col').css('min-height', contentHeight);
}
}
|
/**
* Sort the given files
*
* @param files The files
* @param youngestFirst Ascending or descending
* @return Just return the given array
*/
public static File[] sortFilesOnAge(File[] files,
final boolean youngestFirst) {
FileWrapper[] fw = new FileWrapper[files.length];
for (int i = 0; i < fw.length; i++) {
fw[i] = new FileWrapper(files[i], youngestFirst);
}
Arrays.sort(fw);
for (int i = 0; i < fw.length; i++) {
files[i] = fw[i].file;
}
return files;
} |
/// Writes the given bytes onto the heap starting from the given chunk address. The bytes can
/// span more than one chunk length.
pub fn write_bytes(&mut self, address: ChunkAddress, data: Vec<u8>) -> Option<()> {
for (i, chunk) in data.chunks(16).enumerate() {
let mut buffer = [0_u8; Self::CHUNK_SIZE as usize];
for (i, b) in chunk.iter().enumerate() {
buffer[i] = *b;
}
self.write_chunk(ChunkAddress(address.0 + i as u16), &buffer)?;
}
Some(())
} |
def auto_delete_files_on_instance_change(
instance: Any,
fieldnames: Iterable[str],
model_class) -> None:
if not instance.pk:
return
try:
old_instance = model_class.objects.get(pk=instance.pk)
except model_class.DoesNotExist:
return
for fieldname in fieldnames:
old_filefield = getattr(old_instance, fieldname, None)
if not old_filefield:
continue
new_filefield = getattr(instance, fieldname, None)
if old_filefield != new_filefield:
if os.path.isfile(old_filefield.path):
os.remove(old_filefield.path) |
/***********************************************************************************************************************
*
* @author Fabrizio Giudici
* @version $Id$
*
**********************************************************************************************************************/
public class CRWColorConversionOperation extends ColorConversionOperation
{
private final static Logger logger = getLogger(CRWColorConversionOperation.class);
private final Map<String, ColorMatrix> matrixMapByModel = new HashMap<String, ColorMatrix>();
public CRWColorConversionOperation()
{
matrixMapByModel.put("CANON EOS D30 DIGITAL", get(new int[] { 9805,-2689,-1312,-5803,13064,3068,-2438,3075,8775 }));
matrixMapByModel.put("CANON EOS D60 DIGITAL", get(new int[] { 6188,-1341,-890,-7168,14489,2937,-2640,3228,8483 }));
matrixMapByModel.put("CANON EOS 300D DIGITAL", get(new int[] { 8197,-2000,-1118,-6714,14335,2592,-2536,3178,8266 }));
}
private static ColorMatrix get (final @Nonnull int[] values)
{
return getMatrix(values, 1.0/10000.0);
}
/*******************************************************************************************************************
*
* {@inheritDoc}
*
******************************************************************************************************************/
@CheckForNull
@Override
protected ColorMatrix getColorMatrixXYZ (final @Nonnull PipelineArtifact artifact)
{
final CRWMetadata metadata = (CRWMetadata)artifact.getRAWMetadata();
final String model = metadata.getModel().toUpperCase().trim();
// FIXME: try to use the embedded matrix instead of hardwired coefficients.
return matrixMapByModel.get(model);
}
} |
/**
* Starts a server, runs a test with a no parameter, and then shuts down the server once the test
* has finished running. InProcess servers have less overhead than Local servers but still
* exercise most of the GRPC plumbing. This assumes the test will know how to connect to the
* server on its own without any parameters.
*
* @param fissSourceFactory factory to create a FissClaimChange MessageSource
* @param mcsSourceFactory factory to create a McsClaimChange MessageSource
* @param test the test to execute
* @throws Exception any exception is passed through to the caller
*/
public static void runWithInProcessServerNoParam(InProcessConfig config, ThrowableAction test)
throws Exception {
final Server server = startInProcess(config);
try {
test.act();
} finally {
server.shutdown();
server.awaitTermination(3, TimeUnit.MINUTES);
}
} |
import Axios, { AxiosError, AxiosInstance, AxiosResponse } from 'axios'
import https from 'https'
import { XMLWrapper } from './XMLWrapper'
import { xmlParser } from './lib/xml'
import { createSubjects, Subject } from './Subject'
import { tradeLinesHandler, TradeLinesHandlerResponse } from './TradeLinesHandler'
import { AddonHandlerResponse, addonProductHandler } from './AddonProductHandler'
import { indicativeHandler, IndicativeResponse } from './IndicativeHandler'
export interface SystemCredentials {
id: string
password: string
}
export interface Subscriber {
industryCode: string
memberCode: string
prefix: string | number
password: string
}
export interface TransunionClientOptions {
system: SystemCredentials
modelReportSubscriber: Subscriber
creditReportSubscriber: Subscriber
certificate: Buffer
production: boolean
timeout?: number
apiUrl?: string
}
export type RequestOptions<T extends Record<string, unknown> = {}> = T & {
subjects: Subject[]
}
export type RequestResponse = AddonHandlerResponse &
IndicativeResponse &
TradeLinesHandlerResponse & {
rawRequest: string
rawResponse: string
}
export interface RequestErrorResponse {
error: Error
rawRequest: RequestResponse['rawRequest']
}
export interface ProductError {
code: number
description: string
}
export interface CreditLine {}
export class RequestError extends Error {
public rawRequest: string
public rawResponse?: string
public httpError?: AxiosError
constructor({
rawRequest,
rawResponse,
httpError,
message
}: {
rawRequest: string
rawResponse?: string
httpError?: AxiosError
message: string
}) {
super(message)
this.rawRequest = rawRequest
this.rawResponse = rawResponse
this.httpError = httpError
}
}
export class TransunionClient {
private readonly axios: AxiosInstance
public readonly apiUrl: string
constructor(private readonly options: TransunionClientOptions) {
if (options.apiUrl) {
this.apiUrl = options.apiUrl
} else {
this.apiUrl =
options.production === true
? 'https://netaccess.transunion.com'
: 'https://netaccess-test.transunion.com'
}
const httpsAgent = new https.Agent({
pfx: this.options.certificate,
passphrase: this.options.system.password,
keepAlive: false,
timeout: 10000
})
this.axios = Axios.create({
baseURL: this.apiUrl,
headers: { 'Content-Type': 'text/xml' },
timeout: options.timeout,
httpsAgent
})
}
private async request({
productCode,
subjects,
subscriber
}: {
productCode: string
subjects: Subject[]
subscriber: Subscriber
}) {
const xmlRequest = XMLWrapper({
system: this.options.system,
subscriber,
product: {
code: productCode,
body: createSubjects(subjects)
},
production: this.options.production
})
let xmlResponse: string
try {
const { data }: AxiosResponse<string> = await this.axios({
method: 'POST',
data: xmlRequest
})
xmlResponse = data
} catch (err) {
if (Axios.isAxiosError(err)) {
throw new RequestError({
message: err.message,
rawRequest: xmlRequest,
rawResponse: err.response?.data,
httpError: err
})
} else {
throw err
}
}
const parsed: Record<string, any> = xmlParser.parse(xmlResponse)
const product = parsed?.creditBureau?.product
if (product) {
const error: ProductError = product.error
if (error) {
throw new RequestError({
rawRequest: xmlRequest,
rawResponse: xmlResponse,
message: `${error.description} (CODE: ${error.code})`
})
}
const returnResponse: RequestResponse = {
rawRequest: xmlResponse,
rawResponse: xmlResponse
}
const record = parsed?.creditBureau?.product?.subject?.subjectRecord
if (record) {
const addons = addonProductHandler(record.addOnProduct)
const indicative = indicativeHandler(record.indicative)
const tradeLines = tradeLinesHandler(record.custom?.credit?.trade)
Object.assign(returnResponse, addons, indicative, tradeLines)
}
return returnResponse
} else {
throw new RequestError({
rawRequest: xmlRequest,
rawResponse: xmlResponse,
message: `Missing Product`
})
}
}
public async modelReport({ subjects }: RequestOptions) {
return this.request({
productCode: '08000',
subscriber: this.options.modelReportSubscriber,
subjects
})
}
public async creditReport({ subjects }: RequestOptions) {
return this.request({
productCode: '07000',
subscriber: this.options.creditReportSubscriber,
subjects
})
}
}
|
#include "KFRand.hpp"
namespace KFrame
{
KFRand::KFRand()
{
_result_count = 0;
Init( static_cast< uint32 >( time( nullptr ) ) );
}
KFRand::~KFRand()
{
}
uint32 KFRand::Rand32()
{
if ( _result_count == 0 )
{
SetRand();
_result_count = 256;
}
return _result[ --_result_count ];
}
double KFRand::RandFloat()
{
return ( ( double )Rand32() / 4294967296.0 );
}
void KFRand::Init( uint32 seed )
{
seed |= 1;
for ( int32 i = 0; i < 256; i++ ) // initialize memory
{
seed = 16807 * ( seed % 127773 ) - 2836 * ( seed / 127773 );
_seed[ i ] = seed;
}
SetRand(); // get first set of results
_result_count = 256;
}
void KFRand::SetRand()
{
register uint32 a = _aa, b = _bb, x, y, i;
for ( i = 0; i < 256; ++i )
{
x = _seed[ i ];
a = ( ( a << 19 ) ^ ( a >> 13 ) ) + _seed[ ( i + 128 ) & 255 ];
_seed[ i ] = y = _seed[ x & 255 ] + a + b;
_result[ i ] = b = _seed[ ( y >> 8 ) & 255 ] + x;
}
_bb = b;
_aa = a;
}
} |
<filename>data/propagate_aortic_annotation.py
# Copyright 2018, <NAME>. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
Propagate manual annotations at ED and ES frames to other time frames using image registration.
"""
import os
import sys
import nibabel as nib
import numpy as np
from ukbb_cardiac.common.image_utils import *
def infer_time_frame(image_name, image_fr_name):
""" Infer which time frame the annotation is at. """
nim = nib.load(image_name)
T = nim.header['dim'][4]
image = nim.get_data()
nim_fr = nib.load(image_fr_name)
image_fr = nim_fr.get_data()
diff = np.zeros(T)
for t in range(T):
diff[t] = np.sum(np.abs(image[:, :, :, t] - image_fr))
k = np.argmin(diff)
return k
def wrap_frame_index(t_index, T):
""" Handle frame index if it less than 0 or larger than T. """
t2_index = []
for t in t_index:
if t < 0:
t2 = t + T
elif t >= T:
t2 = t - T
else:
t2 = t
t2_index += [t2]
return t2_index
if __name__ == '__main__':
data_path = '/vol/medic02/users/wbai/data/cardiac_atlas/Biobank_ao/data'
data_list = sorted(os.listdir(data_path))
par_path = '/vol/biomedic2/wbai/git/ukbb_cardiac/par'
for data in data_list:
print(data)
data_dir = os.path.join(data_path, data)
# Directory for motion tracking results
motion_dir = os.path.join(data_dir, 'motion')
if not os.path.exists(motion_dir):
os.mkdir(motion_dir)
# Split the image sequence
image_name = '{0}/ao.nii.gz'.format(data_dir)
output_name = '{0}/ao_fr'.format(motion_dir)
split_sequence(image_name, output_name)
# The number of time frames
nim = nib.load(image_name)
T = nim.header['dim'][4]
dt = nim.header['pixdim'][4]
# Get the index of ED and ES time frames
t_anno = []
for fr in ['ED', 'ES']:
image_fr_name = '{0}/ao_{1}.nii.gz'.format(data_dir, fr)
k = infer_time_frame(image_name, image_fr_name)
t_anno += [k]
# Copy the annotation if it has been annotated
os.system('cp {0}/label_ao_{1}.nii.gz {2}/label_ao_prop{3:02d}.nii.gz'.format(data_dir, fr, motion_dir, k))
# Get the ROI for image registration by cropping the annotation
auto_crop_image('{0}/label_ao.nii.gz'.format(data_dir), '{0}/label_ao_crop.nii.gz'.format(motion_dir), 10)
os.system('mirtk transform-image {0}/ao.nii.gz {1}/ao_crop.nii.gz '
'-target {1}/label_ao_crop.nii.gz'.format(data_dir, motion_dir))
# Split the cropped image sequence
split_sequence('{0}/ao_crop.nii.gz'.format(motion_dir), '{0}/ao_crop_fr'.format(motion_dir))
# Prepare for ED and ES annotation propagation
prop_idx = {}
for t in t_anno:
prop_idx[t] = {}
prop_idx[t]['forward'] = []
prop_idx[t]['backward'] = []
# For un-annotated frames, find its closest time frame
for t in range(T):
if t in t_anno:
continue
dist = np.abs(t - np.array(t_anno))
dist = [x if (x <= T / 2) else (T - x) for x in dist]
source_t = t_anno[np.argmin(dist)]
# Determine whether it is forward or backward propagation
d = t - source_t
if d > T / 2:
prop_idx[source_t]['backward'] += [t]
elif d > 0:
prop_idx[source_t]['forward'] += [t]
elif d > - T / 2:
prop_idx[source_t]['backward'] += [t]
else:
prop_idx[source_t]['forward'] += [t]
# Sort the propagation order and propagate closer frames first
for t in t_anno:
for dir in ['forward', 'backward']:
prop_idx[t][dir] = np.array(prop_idx[t][dir])
dist = np.abs(prop_idx[t][dir] - t)
dist = [x if (x <= T / 2) else (T - x) for x in dist]
sort_idx = np.argsort(dist)
prop_idx[t][dir] = prop_idx[t][dir][sort_idx]
# For each time frame, infer the segmentation from its closest annotated time frame
for t in t_anno:
for dir in ['forward', 'backward']:
for target_t in prop_idx[t][dir]:
# Propagate from source_t to target_t
# To avoid accummulation of sub-pixel errors, use long-range propagation after every 5 frames
if np.abs(target_t - t) % 5 == 0:
source_t = target_t - 5 if dir == 'forward' else target_t + 5
else:
source_t = target_t - 1 if dir == 'forward' else target_t + 1
source_t = wrap_frame_index([source_t], T)[0]
# Perform label propagation
print('{0} -> {1}'.format(source_t, target_t))
target_image = '{0}/ao_crop_fr{1:02d}.nii.gz'.format(motion_dir, target_t)
source_image = '{0}/ao_crop_fr{1:02d}.nii.gz'.format(motion_dir, source_t)
par = '{0}/ffd_aortic_motion.cfg'.format(par_path)
dof = '{0}/ffd_{1:02d}_to_{2:02d}.dof.gz'.format(motion_dir, target_t, source_t)
os.system('mirtk register {0} {1} -parin {2} -dofout {3}'.format(
target_image, source_image, par, dof))
source_label = '{0}/label_ao_prop{2:02d}.nii.gz'.format(motion_dir, fr, source_t)
target_label = '{0}/label_ao_prop{2:02d}.nii.gz'.format(motion_dir, fr, target_t)
orig_source_image = '{0}/ao_fr{1:02d}.nii.gz'.format(motion_dir, source_t)
os.system('mirtk transform-image {0} {1} -dofin {2} -target {3} -interp NN'.format(
source_label, target_label, dof, orig_source_image))
# Combine into a sequence
image_names = []
for t in range(T):
image_name = '{0}/label_ao_prop{1:02d}.nii.gz'.format(motion_dir, t)
image_names += [image_name]
output_name = '{0}/label_ao_prop.nii.gz'.format(data_dir)
make_sequence(image_names, dt, output_name)
# Remove intermediate files
os.system('rm -rf {0}'.format(motion_dir))
|
package com.yugabyte.yw.commissioner.tasks.subtasks;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.common.collect.ImmutableMap;
import com.google.common.net.HostAndPort;
import com.yugabyte.yw.commissioner.AbstractTaskBase;
import com.yugabyte.yw.commissioner.BaseTaskDependencies;
import com.yugabyte.yw.common.kms.EncryptionAtRestManager;
import com.yugabyte.yw.common.kms.EncryptionAtRestManager.RestoreKeyResult;
import com.yugabyte.yw.common.kms.util.EncryptionAtRestUtil;
import com.yugabyte.yw.forms.RestoreBackupParams;
import com.yugabyte.yw.models.KmsHistory;
import com.yugabyte.yw.models.Universe;
import java.util.Arrays;
import java.util.Base64;
import java.util.List;
import java.util.UUID;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import javax.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import org.yb.client.YBClient;
import org.yb.util.Pair;
@Slf4j
public class RestoreUniverseKeysYb extends AbstractTaskBase {
// How long to wait for universe key to be set in memory
private static final int KEY_IN_MEMORY_TIMEOUT = 500;
// The Encryption At Rest manager
private final EncryptionAtRestManager keyManager;
@Inject
protected RestoreUniverseKeysYb(
BaseTaskDependencies baseTaskDependencies, EncryptionAtRestManager keyManager) {
super(baseTaskDependencies);
this.keyManager = keyManager;
}
@Override
protected RestoreBackupParams taskParams() {
return (RestoreBackupParams) taskParams;
}
// Should we use RPC to get the activeKeyId and then try and see if it matches this key?
private byte[] getActiveUniverseKey() {
KmsHistory activeKey = EncryptionAtRestUtil.getActiveKey(taskParams().universeUUID);
if (activeKey == null || activeKey.uuid.keyRef == null || activeKey.uuid.keyRef.length() == 0) {
final String errMsg =
String.format(
"Skipping universe %s, No active keyRef found.",
taskParams().universeUUID.toString());
log.trace(errMsg);
return null;
}
return Base64.getDecoder().decode(activeKey.uuid.keyRef);
}
private void sendKeyToMasters(byte[] keyRef, UUID kmsConfigUUID) {
Universe universe = Universe.getOrBadRequest(taskParams().universeUUID);
String hostPorts = universe.getMasterAddresses();
String certificate = universe.getCertificateNodetoNode();
YBClient client = null;
try {
byte[] keyVal = keyManager.getUniverseKey(taskParams().universeUUID, kmsConfigUUID, keyRef);
String encodedKeyRef = Base64.getEncoder().encodeToString(keyRef);
client = ybService.getClient(hostPorts, certificate);
List<HostAndPort> masterAddrs =
Arrays.stream(hostPorts.split(","))
.map(addr -> HostAndPort.fromString(addr))
.collect(Collectors.toList());
for (HostAndPort hp : masterAddrs) {
client.addUniverseKeys(ImmutableMap.of(encodedKeyRef, keyVal), hp);
}
for (HostAndPort hp : masterAddrs) {
if (!client.waitForMasterHasUniverseKeyInMemory(KEY_IN_MEMORY_TIMEOUT, encodedKeyRef, hp)) {
throw new RuntimeException(
"Timeout occurred waiting for universe encryption key to be " + "set in memory");
}
}
// Since a universe key only gets written to the universe key registry during a
// change encryption info request, we need to temporarily enable encryption with each
// key to ensure it is written to the registry to be used to decrypt restored files
client.enableEncryptionAtRestInMemory(encodedKeyRef);
Pair<Boolean, String> isEncryptionEnabled = client.isEncryptionEnabled();
if (!isEncryptionEnabled.getFirst()
|| !isEncryptionEnabled.getSecond().equals(encodedKeyRef)) {
throw new RuntimeException("Master did not respond that key was enabled");
}
universe.incrementVersion();
// Activate keyRef so that if the universe is not enabled,
// the last keyRef will always be in-memory due to the setkey task
// which will mean the cluster will always be able to decrypt the
// universe key registry which we need to be the case.
EncryptionAtRestUtil.activateKeyRef(taskParams().universeUUID, kmsConfigUUID, keyRef);
} catch (Exception e) {
log.error("Error sending universe key to master: ", e);
} finally {
ybService.closeClient(client, hostPorts);
}
}
@Override
public void run() {
Universe universe = Universe.getOrBadRequest(taskParams().universeUUID);
String hostPorts = universe.getMasterAddresses();
String certificate = universe.getCertificateNodetoNode();
YBClient client = null;
byte[] activeKeyRef = null;
try {
log.info("Running {}: hostPorts={}.", getName(), hostPorts);
client = ybService.getClient(hostPorts, certificate);
Consumer<JsonNode> restoreToUniverse =
(JsonNode backupEntry) -> {
final byte[] universeKeyRef =
Base64.getDecoder().decode(backupEntry.get("key_ref").asText());
if (universeKeyRef != null) {
// Restore keys to database
keyManager
.getServiceInstance(backupEntry.get("key_provider").asText())
.restoreBackupEntry(
taskParams().universeUUID, taskParams().kmsConfigUUID, universeKeyRef);
sendKeyToMasters(universeKeyRef, taskParams().kmsConfigUUID);
}
};
// Retrieve the universe key set (if one is set) to restore universe to original state
// after restoration of backup completes
if (client.isEncryptionEnabled().getFirst()) activeKeyRef = getActiveUniverseKey();
RestoreKeyResult restoreResult =
keyManager.restoreUniverseKeyHistory(
taskParams().backupStorageInfoList.get(0).storageLocation, restoreToUniverse);
switch (restoreResult) {
case RESTORE_SKIPPED:
log.info("Skipping encryption key restore...");
break;
case RESTORE_FAILED:
log.info(
String.format(
"Error occurred restoring encryption keys to universe %s",
taskParams().universeUUID));
case RESTORE_SUCCEEDED:
///////////////
// Restore state of encryption in universe having backup restored into
///////////////
if (activeKeyRef != null) {
// Ensure the active universe key in YB is set back to what it was
// before restore flow
sendKeyToMasters(
activeKeyRef, universe.getUniverseDetails().encryptionAtRestConfig.kmsConfigUUID);
} else if (client.isEncryptionEnabled().getFirst()) {
// If there is no active keyRef but encryption is enabled,
// it means that the universe being restored into was not
// encrypted to begin with, and thus we should restore it back
// to that state
client.disableEncryptionAtRestInMemory();
universe.incrementVersion();
}
}
} catch (Exception e) {
log.error("{} hit error : {}", getName(), e.getMessage(), e);
throw new RuntimeException(e);
} finally {
// Close client
if (client != null) ybService.closeClient(client, hostPorts);
}
}
}
|
#include<bits/stdc++.h>
using namespace std;
const int LEN=1<<21;
char BUF[LEN],*Pin,*Pin_last,PUF[LEN],*Pout=PUF,*Pout_last=PUF+LEN-1;
inline char G(){return Pin==Pin_last&&(Pin_last=(Pin=BUF)+fread(BUF,1,LEN,stdin),Pin==Pin_last)?EOF:*Pin++;}
inline void P(char x){if(Pout==Pout_last)fwrite(PUF,1,Pout-PUF,stdout),Pout=PUF;*Pout++=x;}
inline int read(){register int res=0,f=1,ch=' ';while((ch<'0'||ch>'9')&&ch!=EOF){if(ch=='-')f=-1;ch=G();}while(ch>='0'&&ch<='9')res=(res<<3)+(res<<1)+ch-48,ch=G();return res*f;}
inline long long readL(){register long long res=0,f=1,ch=' ';while((ch<'0'||ch>'9')&&ch!=EOF){if(ch=='-')f=-1;ch=G();}while(ch>='0'&&ch<='9')res=(res<<3)+(res<<1)+ch-48,ch=G();return res*f;}
inline double readB(){register double res=0,f=1,ff=1;char ch=' ';while((ch<'0'||ch>'9')&&ch!=EOF){if(ch=='-')f=-1;ch=G();}while(ch>='0'&&ch<='9')res=res*10+(ch^48),ch=G();if(ch=='.'){ch=G();while(ch>='0'&&ch<='9')ff/=10,res=res+(ch^48)*ff,ch=G();}return res*f;}
inline void wtL(long long a){if(a>9)wtL(a/10);P(a%10+'0');return;}
inline void writeL(long long a,char b){if(a<0)P('-'),a=-a;wtL(a),P(b);}
inline void wt(int a){if(a>9)wt(a/10);P(a%10+'0');return;}
inline void write(int a,char b){if(a<0)P('-'),a=-a;wt(a),P(b);}
inline int random(int a,int b){return (rand()+19)*(rand()+97)%(b-a+1)+a;}
const int N=1e6+10,base=19260817,mod=1e9+9;
int n,m,len,p[N],has[N],pw[N],has_val,cnt;
char s[N];
inline int get_has(register int i,register int len){return (has[i+len-1]-1ll*has[i-1]*pw[len]%mod+mod)%mod;}
inline int ksm(int x,int p)
{
register int res=1;
for(;p;x=1ll*x*x%1000000007,p>>=1)if(p&1)res=1ll*res*x%1000000007;
return res;
}
int main()
{
n=read(),m=read();
pw[0]=1;for(register int i=1;i<=n;i++)pw[i]=1ll*pw[i-1]*base%mod;
for(s[len=1]=G();s[len]<'a'||s[len]>'z';s[len]=G());
for(;s[len]>='a'&&s[len]<='z';s[++len]=G());len--;
for(register int i=1;i<=m;i++)p[i]=read();
p[m+1]=n+1;
for(register int i=1;i<=m;i++)
for(register int j=p[i],mx=min(p[i]+len-1,p[i+1]-1);j<=mx;j++)
has[j]=(1ll*has[j-1]*base+s[j-p[i]+1])%mod;
for(register int i=1;i<=len;i++)has_val=(1ll*has_val*base+s[i])%mod;
for(register int i=1;i<=m;i++)
if(get_has(p[i],len)!=has_val){puts("0");return 0;}
for(register int i=1;i<=n;i++)if(has[i]==0)cnt++;
printf("%d\n",ksm(26,cnt));
fwrite(PUF,1,Pout-PUF,stdout);
return 0;
} |
// We are lucky that we need only 4 registers and we can use r8 to r11. These registers
// are encoded consecutively using 3 bits.
fn reg_to_bits(r: String) -> u8 {
assert!(r == "a" || r == "b" || r == "c" || r == "d");
let b = r.bytes().nth(0).unwrap();
b - b'a' + 0xc0
} |
package e2e
import (
"context"
"errors"
"fmt"
"time"
"github.com/onsi/ginkgo"
"github.com/onsi/gomega"
"github.com/open-cluster-management/multicloud-operators-foundation/pkg/helpers"
"github.com/open-cluster-management/multicloud-operators-foundation/test/e2e/util"
rbacv1 "k8s.io/api/rbac/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
"k8s.io/apimachinery/pkg/runtime/schema"
"k8s.io/apimachinery/pkg/util/rand"
"k8s.io/apimachinery/pkg/watch"
"k8s.io/client-go/dynamic"
)
var (
managedClusterViewGVR = schema.GroupVersionResource{
Group: "clusterview.open-cluster-management.io",
Version: "v1",
Resource: "managedclusters",
}
managedClusterSetViewGVR = schema.GroupVersionResource{
Group: "clusterview.open-cluster-management.io",
Version: "v1alpha1",
Resource: "managedclustersets",
}
clusterRoleNamePostfix = "-ViewClusterRole"
clusterRoleBindingNamePostfix = "-ViewClusterRoleBinding"
)
func validateClusterView(UserDynamicClient dynamic.Interface, ViewGVR, resourceGVR schema.GroupVersionResource, expectedNames []string) error {
resourceList, err := util.ListResource(UserDynamicClient, ViewGVR, "", "")
if err != nil {
return fmt.Errorf("validateClusterView: failed to List Resource %v", err)
}
if len(resourceList) != len(expectedNames) {
return fmt.Errorf("validateClusterView: reources count %v != expected count %v", len(resourceList), len(expectedNames))
}
for _, item := range resourceList {
name, _, err := unstructured.NestedString(item.Object, "metadata", "name")
if err != nil {
return fmt.Errorf("validateClusterView: failed to get resource name %v", err)
}
exist := false
for _, expectedName := range expectedNames {
if name == expectedName {
exist = true
break
}
}
if !exist {
return fmt.Errorf("validateClusterView: resource %v is not in expected resource list %v", name, expectedNames)
}
rsExisted, err := util.HasResource(dynamicClient, resourceGVR, "", name)
if err != nil {
return fmt.Errorf("validateClusterView: failed to get resource %v. err:%v", name, err)
}
if !rsExisted {
return fmt.Errorf("validateClusterView: no resource %v", name)
}
}
return nil
}
var _ = ginkgo.Describe("Testing ClusterView to get managedClusters", func() {
var userName = rand.String(6)
var clusterRoleName = userName + clusterRoleNamePostfix
var clusterRoleBindingName = userName + clusterRoleBindingNamePostfix
var cluster1 = util.RandomName()
var cluster2 = util.RandomName()
var cluster3 = util.RandomName()
var userDynamicClient dynamic.Interface
var err error
ginkgo.BeforeEach(func() {
// create clusterRole and clusterRoleBinding for user
rules := []rbacv1.PolicyRule{
helpers.NewRule("list", "watch").Groups("clusterview.open-cluster-management.io").Resources("managedclusters").RuleOrDie(),
helpers.NewRule("list", "get").Groups("cluster.open-cluster-management.io").Resources("managedclusters").RuleOrDie(),
}
err = util.CreateClusterRole(kubeClient, clusterRoleName, rules)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
err = util.CreateClusterRoleBindingForUser(kubeClient, clusterRoleBindingName, clusterRoleName, userName)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
// impersonate user to the default kubeConfig
userDynamicClient, err = util.NewDynamicClientWithImpersonate(userName, nil)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
// prepare 3 clusters
err = util.ImportManagedCluster(clusterClient, cluster1)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
err = util.ImportManagedCluster(clusterClient, cluster2)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
err = util.ImportManagedCluster(clusterClient, cluster3)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
})
ginkgo.AfterEach(func() {
// cleanup clusterRole and clusterRoleBinding
err = util.DeleteClusterRole(kubeClient, clusterRoleName)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
err = util.DeleteClusterRoleBinding(kubeClient, clusterRoleBindingName)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
// cleanup clusters
err = util.CleanManagedCluster(clusterClient, cluster1)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
err = util.CleanManagedCluster(clusterClient, cluster2)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
err = util.CleanManagedCluster(clusterClient, cluster3)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
})
// all cases are running in order with the same clusterRole and clusterRoleBinding of the user
ginkgo.It("should list the managedClusters.clusterview successfully", func() {
ginkgo.By("authorize cluster1, cluster2 to user")
expectedClusters := []string{cluster1, cluster2}
rules := []rbacv1.PolicyRule{
helpers.NewRule("list", "watch").Groups("clusterview.open-cluster-management.io").Resources("managedclusters").RuleOrDie(),
helpers.NewRule("list", "get").Groups("cluster.open-cluster-management.io").Resources("managedclusters").Names(expectedClusters...).RuleOrDie(),
}
err = util.UpdateClusterRole(kubeClient, clusterRoleName, rules)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Eventually(func() error {
return validateClusterView(userDynamicClient, managedClusterViewGVR,
util.ManagedClusterGVR, expectedClusters)
}, eventuallyTimeout, eventuallyInterval).ShouldNot(gomega.HaveOccurred())
ginkgo.By("append cluster3 to user role")
expectedClusters = []string{cluster1, cluster2, cluster3}
rules = []rbacv1.PolicyRule{
helpers.NewRule("list", "watch").Groups("clusterview.open-cluster-management.io").Resources("managedclusters").RuleOrDie(),
helpers.NewRule("list", "get").Groups("cluster.open-cluster-management.io").Resources("managedclusters").Names(expectedClusters...).RuleOrDie(),
}
err = util.UpdateClusterRole(kubeClient, clusterRoleName, rules)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Eventually(func() error {
return validateClusterView(userDynamicClient, managedClusterViewGVR,
util.ManagedClusterGVR, expectedClusters)
}, eventuallyTimeout, eventuallyInterval).ShouldNot(gomega.HaveOccurred())
ginkgo.By("delete cluster2 to user")
expectedClusters = []string{cluster1, cluster3}
rules = []rbacv1.PolicyRule{
helpers.NewRule("list", "watch").Groups("clusterview.open-cluster-management.io").Resources("managedclusters").RuleOrDie(),
helpers.NewRule("list", "get").Groups("cluster.open-cluster-management.io").Resources("managedclusters").Names(expectedClusters...).RuleOrDie(),
}
err = util.UpdateClusterRole(kubeClient, clusterRoleName, rules)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Eventually(func() error {
return validateClusterView(userDynamicClient, managedClusterViewGVR,
util.ManagedClusterGVR, expectedClusters)
}, eventuallyTimeout, eventuallyInterval).ShouldNot(gomega.HaveOccurred())
ginkgo.By("delete cluster3")
err = util.CleanManagedCluster(clusterClient, cluster3)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
clusterInRole := []string{cluster1, cluster3}
rules = []rbacv1.PolicyRule{
helpers.NewRule("list", "watch").Groups("clusterview.open-cluster-management.io").Resources("managedclusters").RuleOrDie(),
helpers.NewRule("list", "get").Groups("cluster.open-cluster-management.io").Resources("managedclusters").Names(clusterInRole...).RuleOrDie(),
}
err = util.UpdateClusterRole(kubeClient, clusterRoleName, rules)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
expectedClusters = []string{cluster1}
gomega.Eventually(func() error {
return validateClusterView(userDynamicClient, managedClusterViewGVR,
util.ManagedClusterGVR, expectedClusters)
}, eventuallyTimeout, eventuallyInterval).ShouldNot(gomega.HaveOccurred())
ginkgo.By("delete clusterRole")
err = util.DeleteClusterRole(kubeClient, clusterRoleName)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
expectedClusters = []string{}
expectedError := "validateClusterView: failed to List Resource managedclusters.clusterview.open-cluster-management.io is forbidden: User \"" + userName + "\" cannot list resource \"managedclusters\" in API group \"clusterview.open-cluster-management.io\" at the cluster scope: RBAC: clusterrole.rbac.authorization.k8s.io \"" + userName + clusterRoleNamePostfix + "\" not found"
gomega.Eventually(func() error {
return validateClusterView(userDynamicClient, managedClusterViewGVR,
util.ManagedClusterGVR, expectedClusters)
}, eventuallyTimeout, eventuallyInterval).Should(gomega.Equal(errors.New(expectedError)))
ginkgo.By("add clusterRole")
expectedClusters = []string{cluster1, cluster2}
rules = []rbacv1.PolicyRule{
helpers.NewRule("list", "watch").Groups("clusterview.open-cluster-management.io").Resources("managedclusters").RuleOrDie(),
helpers.NewRule("list", "get").Groups("cluster.open-cluster-management.io").Resources("managedclusters").Names(expectedClusters...).RuleOrDie(),
}
err = util.UpdateClusterRole(kubeClient, clusterRoleName, rules)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Eventually(func() error {
return validateClusterView(userDynamicClient, managedClusterViewGVR,
util.ManagedClusterGVR, expectedClusters)
}, eventuallyTimeout, eventuallyInterval).ShouldNot(gomega.HaveOccurred())
})
})
var _ = ginkgo.Describe("Testing ClusterView to watch managedClusters", func() {
var userName = rand.String(6)
var clusterRoleName = userName + clusterRoleNamePostfix
var clusterRoleBindingName = userName + clusterRoleBindingNamePostfix
var clusterName = util.RandomName()
var userDynamicClient dynamic.Interface
var err error
ginkgo.BeforeEach(func() {
// create clusterRole and clusterRoleBinding for user
rules := []rbacv1.PolicyRule{
helpers.NewRule("list", "watch").Groups("clusterview.open-cluster-management.io").Resources("managedclusters").RuleOrDie(),
helpers.NewRule("list", "get").Groups("cluster.open-cluster-management.io").Resources("managedclusters").Names(clusterName).RuleOrDie(),
}
err = util.CreateClusterRole(kubeClient, clusterRoleName, rules)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
err = util.CreateClusterRoleBindingForUser(kubeClient, clusterRoleBindingName, clusterRoleName, userName)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
// impersonate user to the default kubeConfig
userDynamicClient, err = util.NewDynamicClientWithImpersonate(userName, nil)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
})
ginkgo.AfterEach(func() {
// cleanup clusterRole and clusterRoleBinding
err = util.DeleteClusterRole(kubeClient, clusterRoleName)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
err = util.DeleteClusterRoleBinding(kubeClient, clusterRoleBindingName)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
// cleanup cluster
err := util.CleanManagedCluster(clusterClient, clusterName)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
})
ginkgo.It("should watch the managedClusters.clusterview successfully", func() {
var watchedClient watch.Interface
gomega.Eventually(func() error {
watchedClient, err = userDynamicClient.Resource(managedClusterViewGVR).Watch(context.Background(), metav1.ListOptions{})
return err
}, eventuallyTimeout, eventuallyInterval).ShouldNot(gomega.HaveOccurred())
defer watchedClient.Stop()
go func() {
time.Sleep(time.Second * 1)
// prepare 1 cluster
err = util.ImportManagedCluster(clusterClient, clusterName)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
}()
timeCount := 0
clusterCount := 0
expectedClusterCount := 1
for {
select {
case event, ok := <-watchedClient.ResultChan():
gomega.Expect(ok).Should(gomega.BeTrue())
if event.Type == watch.Added {
obj := event.Object.DeepCopyObject()
cluster := obj.(*unstructured.Unstructured)
name, _, _ := unstructured.NestedString(cluster.Object, "metadata", "name")
gomega.Expect(name).Should(gomega.Equal(clusterName))
clusterCount++
break
}
case <-time.After(1 * time.Second):
timeCount++
}
if expectedClusterCount == clusterCount {
break
}
gomega.Expect(timeCount).ShouldNot(gomega.BeNumerically(">=", 10))
}
})
})
var _ = ginkgo.Describe("Testing ClusterView to get managedClusterSets", func() {
var userName = rand.String(6)
var clusterRoleName = userName + clusterRoleNamePostfix
var clusterRoleBindingName = userName + clusterRoleBindingNamePostfix
var clusterSet1 = util.RandomName()
var clusterSet2 = util.RandomName()
var clusterSet3 = util.RandomName()
var userDynamicClient dynamic.Interface
var err error
ginkgo.BeforeEach(func() {
// create clusterRole and clusterRoleBinding for user
rules := []rbacv1.PolicyRule{
helpers.NewRule("list", "watch").Groups("clusterview.open-cluster-management.io").Resources("managedclustersets").RuleOrDie(),
helpers.NewRule("list", "get").Groups("cluster.open-cluster-management.io").Resources("managedclustersets").RuleOrDie(),
}
err = util.CreateClusterRole(kubeClient, clusterRoleName, rules)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
err = util.CreateClusterRoleBindingForUser(kubeClient, clusterRoleBindingName, clusterRoleName, userName)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
// impersonate user to the default kubeConfig
userDynamicClient, err = util.NewDynamicClientWithImpersonate(userName, nil)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
// prepare 3 clusterSets
err = util.CreateManagedClusterSet(clusterClient, clusterSet1)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
err = util.CreateManagedClusterSet(clusterClient, clusterSet2)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
err = util.CreateManagedClusterSet(clusterClient, clusterSet3)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
})
ginkgo.AfterEach(func() {
// cleanup clusterRole and clusterRoleBinding
err = util.DeleteClusterRole(kubeClient, clusterRoleName)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
err = util.DeleteClusterRoleBinding(kubeClient, clusterRoleBindingName)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
// cleanup clusterSets
err = util.DeleteManagedClusterSet(clusterClient, clusterSet1)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
err = util.DeleteManagedClusterSet(clusterClient, clusterSet2)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
err = util.DeleteManagedClusterSet(clusterClient, clusterSet3)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
})
// all cases are running in order with the same clusterRole and clusterRoleBinding of the user
ginkgo.It("should list the managedClusterSets.clusterView successfully", func() {
ginkgo.By("authorize clusterSet1, clusterSet2 to user")
expectedClusterSets := []string{clusterSet1, clusterSet2}
rules := []rbacv1.PolicyRule{
helpers.NewRule("list", "watch").Groups("clusterview.open-cluster-management.io").Resources("managedclustersets").RuleOrDie(),
helpers.NewRule("list", "get").Groups("cluster.open-cluster-management.io").Resources("managedclustersets").Names(expectedClusterSets...).RuleOrDie(),
}
err = util.UpdateClusterRole(kubeClient, clusterRoleName, rules)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Eventually(func() error {
return validateClusterView(userDynamicClient, managedClusterSetViewGVR,
util.ManagedClusterSetGVR, expectedClusterSets)
}, eventuallyTimeout, eventuallyInterval).ShouldNot(gomega.HaveOccurred())
ginkgo.By("append clusterSet3 to user role")
expectedClusterSets = []string{clusterSet1, clusterSet2, clusterSet3}
rules = []rbacv1.PolicyRule{
helpers.NewRule("list", "watch").Groups("clusterview.open-cluster-management.io").Resources("managedclustersets").RuleOrDie(),
helpers.NewRule("list", "get").Groups("cluster.open-cluster-management.io").Resources("managedclustersets").Names(expectedClusterSets...).RuleOrDie(),
}
err = util.UpdateClusterRole(kubeClient, clusterRoleName, rules)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Eventually(func() error {
return validateClusterView(userDynamicClient, managedClusterSetViewGVR,
util.ManagedClusterSetGVR, expectedClusterSets)
}, eventuallyTimeout, eventuallyInterval).ShouldNot(gomega.HaveOccurred())
ginkgo.By("delete clusterSet2 in user role")
expectedClusterSets = []string{clusterSet1, clusterSet3}
rules = []rbacv1.PolicyRule{
helpers.NewRule("list", "watch").Groups("clusterview.open-cluster-management.io").Resources("managedclustersets").RuleOrDie(),
helpers.NewRule("list", "get").Groups("cluster.open-cluster-management.io").Resources("managedclustersets").Names(expectedClusterSets...).RuleOrDie(),
}
err = util.UpdateClusterRole(kubeClient, clusterRoleName, rules)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Eventually(func() error {
return validateClusterView(userDynamicClient, managedClusterSetViewGVR,
util.ManagedClusterSetGVR, expectedClusterSets)
}, eventuallyTimeout, eventuallyInterval).ShouldNot(gomega.HaveOccurred())
ginkgo.By("delete clusterSet3")
err = util.DeleteManagedClusterSet(clusterClient, clusterSet3)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
clusterSetInRole := []string{clusterSet1, clusterSet3}
rules = []rbacv1.PolicyRule{
helpers.NewRule("list", "watch").Groups("clusterview.open-cluster-management.io").Resources("managedclustersets").RuleOrDie(),
helpers.NewRule("list", "get").Groups("cluster.open-cluster-management.io").Resources("managedclustersets").Names(clusterSetInRole...).RuleOrDie(),
}
err = util.UpdateClusterRole(kubeClient, clusterRoleName, rules)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
expectedClusterSets = []string{clusterSet1}
gomega.Eventually(func() error {
return validateClusterView(userDynamicClient, managedClusterSetViewGVR,
util.ManagedClusterSetGVR, expectedClusterSets)
}, eventuallyTimeout, eventuallyInterval).ShouldNot(gomega.HaveOccurred())
ginkgo.By("delete clusterRole")
err = util.DeleteClusterRole(kubeClient, clusterRoleName)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
expectedClusterSets = []string{}
expectedError := "validateClusterView: failed to List Resource managedclustersets.clusterview.open-cluster-management.io is forbidden: User \"" + userName + "\" cannot list resource \"managedclustersets\" in API group \"clusterview.open-cluster-management.io\" at the cluster scope: RBAC: clusterrole.rbac.authorization.k8s.io \"" + userName + clusterRoleNamePostfix + "\" not found"
gomega.Eventually(func() error {
return validateClusterView(userDynamicClient, managedClusterSetViewGVR,
util.ManagedClusterSetGVR, expectedClusterSets)
}, eventuallyTimeout, eventuallyInterval).Should(gomega.Equal(errors.New(expectedError)))
ginkgo.By("add clusterRole")
expectedClusterSets = []string{clusterSet1, clusterSet2}
rules = []rbacv1.PolicyRule{
helpers.NewRule("list", "watch").Groups("clusterview.open-cluster-management.io").Resources("managedclustersets").RuleOrDie(),
helpers.NewRule("list", "get").Groups("cluster.open-cluster-management.io").Resources("managedclustersets").Names(expectedClusterSets...).RuleOrDie(),
}
err = util.UpdateClusterRole(kubeClient, clusterRoleName, rules)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Eventually(func() error {
return validateClusterView(userDynamicClient, managedClusterSetViewGVR,
util.ManagedClusterSetGVR, expectedClusterSets)
}, eventuallyTimeout, eventuallyInterval).ShouldNot(gomega.HaveOccurred())
})
})
var _ = ginkgo.Describe("Testing ClusterView to watch managedClusterSets", func() {
var userName = rand.String(6)
var clusterRoleName = userName + clusterRoleNamePostfix
var clusterRoleBindingName = userName + clusterRoleBindingNamePostfix
var clusterSetName = util.RandomName()
var userDynamicClient dynamic.Interface
var err error
ginkgo.BeforeEach(func() {
// create clusterRole and clusterRoleBinding for user
rules := []rbacv1.PolicyRule{
helpers.NewRule("list", "watch").Groups("clusterview.open-cluster-management.io").Resources("managedclustersets").RuleOrDie(),
helpers.NewRule("list", "get").Groups("cluster.open-cluster-management.io").Resources("managedclustersets").Names(clusterSetName).RuleOrDie(),
}
err = util.CreateClusterRole(kubeClient, clusterRoleName, rules)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
err = util.CreateClusterRoleBindingForUser(kubeClient, clusterRoleBindingName, clusterRoleName, userName)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
// impersonate user to the default kubeConfig
userDynamicClient, err = util.NewDynamicClientWithImpersonate(userName, nil)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
})
ginkgo.AfterEach(func() {
// cleanup clusterRole and clusterRoleBinding
err = util.DeleteClusterRole(kubeClient, clusterRoleName)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
err = util.DeleteClusterRoleBinding(kubeClient, clusterRoleBindingName)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
// cleanup clusterSet
err = util.DeleteManagedClusterSet(clusterClient, clusterSetName)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
})
ginkgo.It("should watch the managedClusterSets.clusterView successfully", func() {
var watchedClient watch.Interface
var err error
gomega.Eventually(func() error {
watchedClient, err = userDynamicClient.Resource(managedClusterSetViewGVR).Watch(context.Background(), metav1.ListOptions{})
return err
}, eventuallyTimeout, eventuallyInterval).ShouldNot(gomega.HaveOccurred())
defer watchedClient.Stop()
go func() {
time.Sleep(time.Second * 1)
// prepare 1 clusterSet
err = util.CreateManagedClusterSet(clusterClient, clusterSetName)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
}()
timeCount := 0
clusterCount := 0
expectedClusterSetCount := 1
for {
select {
case event, ok := <-watchedClient.ResultChan():
gomega.Expect(ok).Should(gomega.BeTrue())
if event.Type == watch.Added {
obj := event.Object.DeepCopyObject()
clusterSet := obj.(*unstructured.Unstructured)
name, _, _ := unstructured.NestedString(clusterSet.Object, "metadata", "name")
gomega.Expect(name).Should(gomega.Equal(clusterSetName))
clusterCount++
break
}
case <-time.After(1 * time.Second):
timeCount++
}
if expectedClusterSetCount == clusterCount {
break
}
gomega.Expect(timeCount).ShouldNot(gomega.BeNumerically(">=", 10))
}
})
})
|
/// Delete a named property from the object.
///
/// Returns false if the property cannot be deleted.
fn delete_property(
&self,
activation: &mut Activation<'_, 'gc, '_>,
multiname: &Multiname<'gc>,
) -> Result<bool, Error> {
let name = self.resolve_multiname(multiname)?;
if name.is_none() {
return self.delete_property_undef(activation, multiname);
}
//At this point, the name should be known.
let name = name.unwrap();
// Reject attempts to delete lazy-bound methods before they have
// been bound.
if !self.base().has_own_instantiated_property(name) {
if let Some(class) = self.instance_of() {
if class
.instance_method(name)
.map(|t| t.is_some())
.unwrap_or(false)
{
return Ok(false);
}
}
if let Some(class) = self.as_class_object() {
if class
.class_method(name)
.map(|t| t.is_some())
.unwrap_or(false)
{
return Ok(false);
}
}
}
self.delete_property_local(activation.context.gc_context, name)
} |
def main():
args: Dict[str, str] = docopt(__doc__, version=__version__)
success = True
data_file = "reportData_" + args["ASSESSMENT_ID"] + ".json"
manual_data_file = "manualData_" + args["ASSESSMENT_ID"] + ".json"
try:
f = open(data_file)
f.close()
f = open(manual_data_file)
f.close()
except IOError as e:
print("ERROR- File not found: " + e.filename)
success = False
else:
if args["--manual"]:
manualData_processor(data_file, manual_data_file)
elif args["--graph"]:
manualData_processor(data_file, manual_data_file)
success = graph_builder(args["ASSESSMENT_ID"], args["--labels"])
elif args["--Metrics"]:
manualData_processor(data_file, manual_data_file)
success = assessment_metrics(data_file)
else:
manualData_processor(data_file, manual_data_file)
graph_builder(args["ASSESSMENT_ID"], args["--labels"])
closing_builder(args["ASSESSMENT_ID"])
original_working_dir = os.getcwd()
temp_working_dir = tempfile.mkdtemp()
os.chdir(temp_working_dir)
setup_work_directory(temp_working_dir)
org_manual_data_file = os.path.join(original_working_dir, manual_data_file)
shutil.copy(org_manual_data_file, manual_data_file)
org_data_filename = os.path.join(original_working_dir, data_file)
shutil.copy(org_data_filename, data_file)
for folder in TO_COPY:
dir_src = os.path.join(original_working_dir, folder)
dir_dst = os.path.join(temp_working_dir, "{}".format(folder))
shutil.copytree(dir_src, dir_dst)
latex_builder(
args["ASSESSMENT_ID"], data_file, args["--labels"], MUSTACHE_FILE
)
os.chdir(original_working_dir)
src_report_filename = os.path.join(
temp_working_dir, args["ASSESSMENT_ID"] + "_report.pdf"
)
src_data_filename = os.path.join(temp_working_dir, data_file)
src_tex_filename = os.path.join(
temp_working_dir, args["ASSESSMENT_ID"] + "_report.tex"
)
dest_report_filename = args["ASSESSMENT_ID"] + "_report.pdf"
dest_tex_filename = args["ASSESSMENT_ID"] + "_report.tex"
shutil.move(src_report_filename, dest_report_filename)
shutil.move(src_data_filename, data_file)
if args["--editable"]:
shutil.move(src_tex_filename, dest_tex_filename)
asset_src = os.path.join(temp_working_dir, "assets")
if os.path.exists("assets") and os.path.isdir("assets"):
shutil.rmtree("assets")
shutil.copytree(asset_src, "assets")
shutil.rmtree(temp_working_dir)
print(
"Completed, Please see report: {}_report.pdf".format(
args["ASSESSMENT_ID"]
)
)
if success:
return 0
else:
return 1 |
import { DefaultSeoProps } from "next-seo";
const defaultSEOConfig: DefaultSeoProps = {
title: "<NAME>",
titleTemplate: "%s | <NAME>",
defaultTitle: "<NAME>",
description: "Full-stack engineer and digital nomad",
canonical: "https://farazpatankar.com",
openGraph: {
url: "https://farazpatankar.com",
title: "<NAME>",
description: "Full-stack engineer and digital nomad",
images: [
{
url: "https://og-image.sznm.dev/**nextarter-chakra**.sznm.dev.png?theme=dark&md=1&fontSize=125px&images=https%3A%2F%2Fsznm.dev%2Favataaars.svg&widths=250",
alt: "nextarter-chakra.sznm.dev og-image",
},
],
site_name: "farazpatankar",
},
twitter: {
handle: "@farazpatankar13",
cardType: "summary_large_image",
},
dangerouslySetAllPagesToNoIndex: true,
dangerouslySetAllPagesToNoFollow: true,
};
export default defaultSEOConfig;
|
<filename>app/common/views_object.py
from django.shortcuts import render, redirect
from django import template
from django.http import HttpResponseRedirect
from django.views.generic import TemplateView
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login, logout
from common.forms import UserConnectionForm, UserCreationForm, ChangePasswordForm, ChangeEmailForm
from django.contrib import auth
from django.contrib.auth import REDIRECT_FIELD_NAME, login
from django.contrib.auth.forms import AuthenticationForm
from django.shortcuts import redirect, resolve_url
from django.utils.decorators import method_decorator
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
from django.views.generic import View
from django.views.generic.base import TemplateResponseMixin
from django.views.generic.edit import FormView
from django.conf import settings
class HomeView(TemplateView):
template_name = 'common/home.html'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
user_query = User.objects.all()
context['user_query'] = user_query
return context
class SignUpView(FormView):
form_class = UserCreationForm
template_name = 'common/inscription.html'
def get(self, request, *args, **kwargs):
if request.user.is_authenticated:
return redirect('profile')
form = self.form_class()
return render(request, self.template_name, {'form': form})
def post(self, request, *args, **kwargs):
form = self.form_class(request.POST)
if form.is_valid():
u = User.objects.create_user(
form.cleaned_data.get('pseudo'),
form.cleaned_data.get('email'),
form.cleaned_data.get('password'),
is_active = True,
)
u.save()
return redirect('profile')
return render(request, self.template_name, {'form': form})
class LoginView(FormView):
template_name = 'common/login.html'
form_class = UserConnectionForm
def form_valid(self, form):
username = form.cleaned_data['pseudo']
password = form.cleaned_data['password']
user = authenticate(username=username, password=password)
if user and user.is_active:
login(self.request, user)
return redirect('profile')
user = User.objects.first()
login(self.request, user)
return HttpResponseRedirect('home')
class LogoutView(FormView):
def get(self, request, **kwargs):
logout(request)
return redirect('home')
class ProfileView(TemplateView):
template_name = 'common/profile.html'
def get(self, request, *args, **kwargs):
if not request.user.is_authenticated:
return redirect('home')
return render(request, self.template_name, {})
class ChangePasswordView(FormView):
form_class = ChangePasswordForm
template_name = 'common/change_password.html'
def get(self, request, *args, **kwargs):
if not request.user.is_authenticated:
return redirect('home')
form = self.form_class()
return render(request, self.template_name, {'form': form})
def post(self, request, *args, **kwargs):
form = self.form_class(request.POST)
if form.is_valid():
u = request.user
u.set_password(form.cleaned_data.get('password'))
u.save()
return redirect('profile')
return render(request, self.template_name, {'form': form})
class ChangeEmailView(FormView):
form_class = ChangeEmailForm
template_name = 'common/change_email.html'
def get(self, request, *args, **kwargs):
if not request.user.is_authenticated:
return redirect('home')
form = self.form_class()
return render(request, self.template_name, {'form': form})
def post(self, request, *args, **kwargs):
form = self.form_class(request.POST)
if form.is_valid():
u = request.user
u.email = form.cleaned_data.get('email')
u.save()
return redirect('profile')
return render(request, self.template_name, {'form': form}) |
from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer
import json
import cgi
from PySide2.QtCore import QObject, Signal, Slot
from urllib.parse import urlparse, parse_qs, unquote
class Server(ThreadingHTTPServer, QObject):
action = Signal(str, str, dict)
def __init__(self, port, api):
QObject.__init__(self)
self.api = api
HandlerClass = self.requestHandlerFactory(self.api.getState, self.actionCallback)
ThreadingHTTPServer.__init__(self,('localhost', port), HandlerClass)
self.action.connect(self.api.action)
def actionCallback(self, action=None, filename=None, payload=None):
self.action.emit(action, filename, payload)
def requestHandlerFactory(self, stateCallback, actionCallback):
"""Factory method to pass parameters to request handler"""
class CustomHandler(RequestHandler):
def __init__(self, *args, **kwargs):
self.stateCallback = stateCallback
self.actionCallback = actionCallback
super(RequestHandler, self).__init__(*args, **kwargs)
return CustomHandler
class RequestHandler(BaseHTTPRequestHandler):
def __init__(self, *args, **kwargs):
super(RequestHandler, self).__init__(*args, **kwargs)
def send_answer(self, content, status=200, message=None):
self.send_response(status, message)
self.send_header('Content-type', 'application/json')
self.end_headers()
if content is not None:
response = json.dumps(content)
self.wfile.write(response.encode('utf-8'))
def parseAction(self):
action = {}
try:
# Parse headers
contenttype_value, contenttype_params = cgi.parse_header(self.headers.get('content-type',''))
action['contenttype'] = contenttype_value
# refuse to receive non-json content
# if contenttype_value != 'application/json':
# self.send_response(400)
# self.end_headers()
# return
# Parse path
url = urlparse(self.path)
action['path'] = url.path.strip("/").split("/")
action['path'] = [unquote(x) for x in action['path']]
action['action'] = action['path'].pop(0)
# Parse query
action['query'] = parse_qs(url.query)
filenames = action['query'].get('filename')
if filenames is not None:
action['filename'] = filenames.pop()
# Get post data
length = int(self.headers.get('content-length'))
action['body'] = json.loads(self.rfile.read(length))
except Exception as e:
action['error'] = str(e)
return action
# Sends back the state and database name
def do_GET(self):
"""
Get state
The first component of the URL path is the snippet name.
Supported snippets are : settings, log
An empty snipped just returns the database name and the state
"""
try:
action = self.parseAction()
response = self.stateCallback(action['action'])
except:
self.send_answer(None, 500, "Could not process request.")
else:
self.send_answer(response)
def do_POST(self):
"""
Execute actions
The first component of the URL path is the action name.
The following components are parameters.
Additional data ist provided in the payload.
"""
# Handle actions
try:
action = self.parseAction()
# Open database
if action['action'] == "database":
if action.get('filename') is not None:
if action['query'].get('create', False):
self.actionCallback('createdatabase', filename=action.get('filename'))
else:
self.actionCallback('opendatabase', filename=action.get('filename'))
result = "ok"
else:
result = "Missing filename."
# Post nodes: csv file or nodes in the payload
elif action['action'] == "nodes":
if action.get('filename') is not None:
self.actionCallback('addcsv', filename=action.get('filename'))
result = "ok"
elif action.get('body') is not None:
nodes = action['body'].get('nodes', [])
if not (type(nodes) is list):
nodes = [nodes]
self.actionCallback('addnodes', payload=nodes)
result = "ok"
# Post settings: preset file or settings in the payload
elif action['action'] == "settings":
if action.get('filename') is not None:
self.actionCallback('loadpreset', filename=action.get('filename'))
result = "ok"
elif action.get('body') is not None:
self.actionCallback('applysettings', payload=action.get('body'))
result = "ok"
else:
result = "Missing filename or data."
# Fetch data
elif action['action'] == "fetchdata":
self.actionCallback('fetchdata')
result = "ok"
else:
self.send_answer(None, 404, "No valid action!")
return False
# Response
response = self.stateCallback()
response['result'] = result
except Exception as e:
self.send_answer(None, 500, "Server error!")
return False
else:
self.send_answer(response)
return True
#
# Server for listening at local ports for OAuth Login redirects
#
class LoginServer(ThreadingHTTPServer, QObject):
action = Signal(int)
logmessage = Signal(str)
# getCallback: function with redirect URL as return value
# careful, the action is called from a different thread
# do not change the user interface within the callback
# responseCallback: called after sending the response, carrying the status code.
# Since a signal is emitted, callback is synchronized with the user interface.
def __init__(self, port, getCallback, responseCallback=None):
QObject.__init__(self)
HandlerClass = self.requestHandlerFactory(getCallback, self.responseCallback)
ThreadingHTTPServer.__init__(self,('localhost', port), HandlerClass)
if responseCallback is not None:
self.action.connect(responseCallback)
def responseCallback(self, status=None):
self.action.emit(status)
def requestHandlerFactory(self, getCallback,responseCallback = None):
"""Factory method to pass parameters to request handler"""
class CustomHandler(LoginRequestHandler):
def __init__(self, *args, **kwargs):
self.getCallback = getCallback
self.responseCallback = responseCallback
super(LoginRequestHandler, self).__init__(*args, **kwargs)
return CustomHandler
class LoginRequestHandler(BaseHTTPRequestHandler):
def __init__(self, *args, **kwargs):
super(LoginRequestHandler, self).__init__(*args, **kwargs)
def send_answer(self, status=200, message=None):
self.send_response(status, message)
self.send_header('Content-type', 'text/html; charset=utf-8')
self.end_headers()
self.responseCallback(status)
def send_redirect(self, status, url):
self.send_response(status, None)
self.send_header('Content-type', 'text/html; charset=utf-8')
self.send_header('Location', url)
self.end_headers()
self.responseCallback(status)
# Receives redirect URL, calls callback, and sends back a redirect
def do_GET(self):
try:
redirecturl = self.getCallback(self.path)
except Exception as e:
self.send_answer(500, "Could not process request.")
else:
if redirecturl is None:
self.send_answer(404, "Not found.")
else:
self.send_redirect(303, redirecturl)
|
package kafkaadmin
import (
"context"
"math/rand"
"strconv"
"testing"
"time"
"github.com/confluentinc/confluent-kafka-go/kafka"
"github.com/stretchr/testify/assert"
kafkatesting "github.com/tmstff/kafka-testing-go"
)
func TestTopicCreation(t *testing.T) {
ctx := context.Background()
kafkaUrl, terminateKafka, err := kafkatesting.StartKafkaWithEnv(ctx, map[string]string{"KAFKA_AUTO_CREATE_TOPICS_ENABLE": "true"})
if err != nil {
assert.Fail(t, err.Error())
} else {
defer terminateKafka(ctx)
}
rand.Seed(time.Now().UnixNano())
topic := strconv.FormatUint(rand.Uint64(), 10)
config := CompactedTopicConfig(topic)
config.ReplicationFactor = 1
err = EnsureTopicExistsWithConfig(ctx, kafkaUrl, nil, config)
assert.Nil(t, err)
adminClient, err := kafka.NewAdminClient(&kafka.ConfigMap{
"bootstrap.servers": kafkaUrl,
})
assert.Nil(t, err, "could create adminClient")
configResources, err := adminClient.DescribeConfigs(ctx, []kafka.ConfigResource{{
Type: kafka.ResourceTopic,
Name: topic,
}})
assert.Nil(t, err, "DescribeConfigs failed")
assert.NotNil(t, configResources)
for _, cr := range configResources {
t.Logf("config resource %s", cr)
for _, c := range cr.Config {
t.Logf("config %s", c)
}
}
assert.Equal(t, 1, len(configResources))
assert.Equal(t, "compact", configResources[0].Config["cleanup.policy"].Value)
// ensure it still works if topic is present
err = EnsureTopicExistsWithConfig(ctx, kafkaUrl, nil, config)
assert.Nil(t, err)
}
|
/* SPDX-License-Identifier: MIT
*
* Copyright (C) 2019 WireGuard LLC. All Rights Reserved.
*/
package winipcfg
const (
wtIpAdapterAddressesLh_Size = 376
wtIpAdapterAddressesLh_IfIndex_Offset = 4
wtIpAdapterAddressesLh_Next_Offset = 8
wtIpAdapterAddressesLh_AdapterName_Offset = 12
wtIpAdapterAddressesLh_FirstUnicastAddress_Offset = 16
wtIpAdapterAddressesLh_FirstAnycastAddress_Offset = 20
wtIpAdapterAddressesLh_FirstMulticastAddress_Offset = 24
wtIpAdapterAddressesLh_FirstDnsServerAddress_Offset = 28
wtIpAdapterAddressesLh_DnsSuffix_Offset = 32
wtIpAdapterAddressesLh_Description_Offset = 36
wtIpAdapterAddressesLh_FriendlyName_Offset = 40
wtIpAdapterAddressesLh_PhysicalAddress_Offset = 44
wtIpAdapterAddressesLh_PhysicalAddressLength_Offset = 52
wtIpAdapterAddressesLh_Flags_Offset = 56
wtIpAdapterAddressesLh_Mtu_Offset = 60
wtIpAdapterAddressesLh_IfType_Offset = 64
wtIpAdapterAddressesLh_OperStatus_Offset = 68
wtIpAdapterAddressesLh_Ipv6IfIndex_Offset = 72
wtIpAdapterAddressesLh_ZoneIndices_Offset = 76
wtIpAdapterAddressesLh_FirstPrefix_Offset = 140
wtIpAdapterAddressesLh_TransmitLinkSpeed_Offset = 144
wtIpAdapterAddressesLh_ReceiveLinkSpeed_Offset = 152
wtIpAdapterAddressesLh_FirstWinsServerAddress_Offset = 160
wtIpAdapterAddressesLh_FirstGatewayAddress_Offset = 164
wtIpAdapterAddressesLh_Ipv4Metric_Offset = 168
wtIpAdapterAddressesLh_Ipv6Metric_Offset = 172
wtIpAdapterAddressesLh_Luid_Offset = 176
wtIpAdapterAddressesLh_Dhcpv4Server_Offset = 184
wtIpAdapterAddressesLh_CompartmentId_Offset = 192
wtIpAdapterAddressesLh_NetworkGuid_Offset = 196
wtIpAdapterAddressesLh_ConnectionType_Offset = 212
wtIpAdapterAddressesLh_TunnelType_Offset = 216
wtIpAdapterAddressesLh_Dhcpv6Server_Offset = 220
wtIpAdapterAddressesLh_Dhcpv6ClientDuid_Offset = 228
wtIpAdapterAddressesLh_Dhcpv6ClientDuidLength_Offset = 360
wtIpAdapterAddressesLh_Dhcpv6Iaid_Offset = 364
wtIpAdapterAddressesLh_FirstDnsSuffix_Offset = 368
wtIpAdapterAnycastAddressXp_Size = 24
wtIpAdapterAnycastAddressXp_Flags_Offset = 4
wtIpAdapterAnycastAddressXp_Next_Offset = 8
wtIpAdapterAnycastAddressXp_Address_Offset = 12
wtSocketAddress_Size = 8
wtSocketAddress_iSockaddrLength_Offset = 4
wtIpAdapterDnsServerAddressXp_Size = 24
wtIpAdapterDnsServerAddressXp_Reserved_Offset = 4
wtIpAdapterDnsServerAddressXp_Next_Offset = 8
wtIpAdapterDnsServerAddressXp_Address_Offset = 12
wtIpAdapterDnsSuffix_Size = 516
wtIpAdapterDnsSuffix_String_Offset = 4
wtIpAdapterGatewayAddressLh_Size = 24
wtIpAdapterGatewayAddressLh_Reserved_Offset = 4
wtIpAdapterGatewayAddressLh_Next_Offset = 8
wtIpAdapterGatewayAddressLh_Address_Offset = 12
wtIpAdapterMulticastAddressXp_Size = 24
wtIpAdapterMulticastAddressXp_Flags_Offset = 4
wtIpAdapterMulticastAddressXp_Next_Offset = 8
wtIpAdapterMulticastAddressXp_Address_Offset = 12
wtIpAdapterPrefixXp_Size = 24
wtIpAdapterPrefixXp_Flags_Offset = 4
wtIpAdapterPrefixXp_Next_Offset = 8
wtIpAdapterPrefixXp_Address_Offset = 12
wtIpAdapterPrefixXp_PrefixLength_Offset = 20
wtIpAdapterUnicastAddressLh_Size = 48
wtIpAdapterUnicastAddressLh_Flags_Offset = 4
wtIpAdapterUnicastAddressLh_Next_Offset = 8
wtIpAdapterUnicastAddressLh_Address_Offset = 12
wtIpAdapterUnicastAddressLh_PrefixOrigin_Offset = 20
wtIpAdapterUnicastAddressLh_SuffixOrigin_Offset = 24
wtIpAdapterUnicastAddressLh_DadState_Offset = 28
wtIpAdapterUnicastAddressLh_ValidLifetime_Offset = 32
wtIpAdapterUnicastAddressLh_PreferredLifetime_Offset = 36
wtIpAdapterUnicastAddressLh_LeaseLifetime_Offset = 40
wtIpAdapterUnicastAddressLh_OnLinkPrefixLength_Offset = 44
wtIpAdapterWinsServerAddressLh_Size = 24
wtIpAdapterWinsServerAddressLh_Reserved_Offset = 4
wtIpAdapterWinsServerAddressLh_Next_Offset = 8
wtIpAdapterWinsServerAddressLh_Address_Offset = 12
)
|
/**
* This is a simple and stupid resolver, that does not check what is
* being resolved; and thus it should only be used if only one thing
* (a single external entity; a single external subset) is to
* be expanded (although that single entity can be repeated multiple
* times).
*/
public class SimpleResolver
implements XMLResolver
{
final String ENC = "UTF-8";
final byte[] mData;
public SimpleResolver(String content)
{
try {
mData = content.getBytes(ENC);
} catch (java.io.IOException ioe) {
throw new Error(ioe.toString());
}
}
@Override
public Object resolveEntity(String publicID, String systemID, String baseURI, String namespace)
{
return new java.io.ByteArrayInputStream(mData);
}
} |
/**
* Base class for tests in the security package.
*
* @author jgustie
*/
public abstract class AbstractSecurityTest {
public final Path file(String name) {
try {
return Paths.get(Resources.getResource(getClass(), name).toURI());
} catch (URISyntaxException e) {
throw new IllegalArgumentException("invalid uri: " + name, e);
}
}
} |
// PanicMessage goes to a standard and unavaoidable log, then emits a signal.
func (e *Engine) PanicMessage(message string) {
log.Println(fmt.Errorf("[ENGINE] %s", message))
e.Signals <- EnginePanic
} |
// Municipalities returns a list of municipalities that Eskom supplies to.
func (c *Client) Municipalities(ctx context.Context, province Province) (Municipalities, error) {
h := getClient(c)
requestURL := fmt.Sprintf("/GetMunicipalities/?Id=%d", province)
var municipalities Municipalities
err := doRequestJSON(ctx, h, requestURL, nil, &municipalities)
return municipalities, err
} |
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
import main
from imgrename import core as ir
from imgrename import EXIF, exifutil, fileutil, slogging
|
<gh_stars>0
package io.izzel.taboolib.module.effect.pobject;
import org.bukkit.Location;
/**
* 表示一个圆
*
* @author Zoyn
*/
public class Circle extends ParticleObject {
private final Arc fullArc;
public Circle(Location origin) {
this(origin, 1);
}
public Circle(Location origin, double radius) {
this(origin, radius, 1);
}
/**
* 构造一个圆
*
* @param origin 圆的圆点
* @param radius 圆的半径
* @param step 每个粒子的间隔(也即步长)
*/
public Circle(Location origin, double radius, double step) {
this(origin, radius, step, 20L);
}
/**
* 构造一个圆
*
* @param origin 圆的圆点
* @param radius 圆的半径
* @param step 每个粒子的间隔(也即步长)
* @param period 特效周期(如果需要可以使用)
*/
public Circle(Location origin, double radius, double step, long period) {
// Circle只需要控制这个fullArc就可以满足所有的要求
this.fullArc = new Arc(origin)
.setAngle(360D)
.setRadius(radius)
.setStep(step);
fullArc.setPeriod(period);
}
@Override
public void show() {
fullArc.show();
}
@Override
public void alwaysShow() {
fullArc.alwaysShow();
// 再设置Circle自身的ShowType
setShowType(ShowType.ALWAYS_SHOW);
}
@Override
public void alwaysShowAsync() {
fullArc.alwaysShowAsync();
// 再设置Circle自身的ShowType
setShowType(ShowType.ALWAYS_SHOW_ASYNC);
}
@Override
public void turnOffTask() {
fullArc.turnOffTask();
// 再设置Circle自身的ShowType
setShowType(ShowType.NONE);
}
public Location getOrigin() {
return fullArc.getOrigin();
}
public void setOrigin(Location origin) {
this.fullArc.setOrigin(origin);
// 之前脑子坏掉了忘记给fullArc设置origin了, 现在不用了
// checkArcThenRefreshShow();
}
public double getRadius() {
return this.fullArc.getRadius();
}
public Circle setRadius(double radius) {
this.fullArc.setRadius(radius);
return this;
}
public double getStep() {
return this.fullArc.getStep();
}
public Circle setStep(double step) {
this.fullArc.setStep(step);
return this;
}
public long getPeriod() {
return this.fullArc.getPeriod();
}
public void setPeriod(long period) {
this.fullArc.setPeriod(period);
}
}
|
Bacteria-Activated Theranostic Nanoprobes against Methicillin-Resistant Staphylococcus aureus Infection.
Despite numerous advanced imaging and sterilization techniques available nowadays, the sensitive in vivo diagnosis and complete elimination of drug-resistant bacterial infections remain big challenges. Here we report a strategy to design activatable theranostic nanoprobes against methicillin-resistant Staphylococcus aureus (MRSA) infections. This probe is based on silica nanoparticles coated with vancomycin-modified polyelectrolyte-cypate complexes (SiO2-Cy-Van), which is activated by an interesting phenomenon of bacteria-responsive dissociation of the polyelectrolyte from silica nanoparticles. Due to the aggregation of hydrophobic cypate fluorophores on silica nanoparticles to induce ground-state quenching, the SiO2-Cy-Van nanoprobes are nonfluorescent in aqueous environments. We demonstrate that MRSA can effectively pull out the vancomycin-modified polyelectrolyte-cypate complexes from silica nanoparticles and draw them onto their own surface, changing the state of cypate from off (aggregation) to on (disaggregation) and leading to in vitro MRSA-activated near-infrared fluorescence (NIRF) and photothermal elimination involving bacterial cell wall and membrane disruption. In vivo experiments show that this de novo-designed nanoprobe can selectively enable rapid (4 h postinjection) NIRF imaging with high sensitivity (105 colony-forming units) and efficient photothermal therapy (PTT) of MRSA infections in mice. Remarkably, the SiO2-Cy-Van nanoprobes can also afford a long-term tracking (16 days) of the development of MRSA infections, allowing real-time estimation of bacterial load in infected tissues and further providing a possible way to monitor the efficacy of antimicrobial treatment. The strategy of bacteria-activated polyelectrolyte dissociation from nanoparticles proposed in this work could also be used as a general method for the design and fabrication of bacteria-responsive functional nanomaterials that offer possibilities to combat drug-resistant bacterial infections. |
package object
import (
"io"
"time"
"github.com/gembaadvantage/go-git/v5/plumbing/storer"
)
type commitLimitIter struct {
sourceIter CommitIter
limitOptions LogLimitOptions
}
type LogLimitOptions struct {
Since *time.Time
Until *time.Time
}
func NewCommitLimitIterFromIter(commitIter CommitIter, limitOptions LogLimitOptions) CommitIter {
iterator := new(commitLimitIter)
iterator.sourceIter = commitIter
iterator.limitOptions = limitOptions
return iterator
}
func (c *commitLimitIter) Next() (*Commit, error) {
for {
commit, err := c.sourceIter.Next()
if err != nil {
return nil, err
}
if c.limitOptions.Since != nil && commit.Committer.When.Before(*c.limitOptions.Since) {
continue
}
if c.limitOptions.Until != nil && commit.Committer.When.After(*c.limitOptions.Until) {
continue
}
return commit, nil
}
}
func (c *commitLimitIter) ForEach(cb func(*Commit) error) error {
for {
commit, nextErr := c.Next()
if nextErr == io.EOF {
break
}
if nextErr != nil {
return nextErr
}
err := cb(commit)
if err == storer.ErrStop {
return nil
} else if err != nil {
return err
}
}
return nil
}
func (c *commitLimitIter) Close() {
c.sourceIter.Close()
}
|
<gh_stars>0
import { GasPayload } from '@atlaskit/analytics-gas-types';
import {
packageAttributes,
fileStateToFileGasPayload,
FileGasPayload,
PackageAttributes,
} from './index';
import { FileState, FileStatus } from '@atlaskit/media-core';
import { MediaViewerError } from '../error';
interface DownloadAttributes extends FileGasPayload {
fileSupported?: boolean;
fileProcessingStatus: FileStatus;
}
const getBasePayload = (actionSubjectId: string): GasPayload => ({
eventType: 'ui',
action: 'clicked',
actionSubject: 'button',
actionSubjectId,
});
const getBaseAttributes = (fileState: FileState) => ({
...fileStateToFileGasPayload(fileState),
fileProcessingStatus: fileState.status,
...packageAttributes,
});
const downloadEvent = (
fileState: FileState,
actionSubjectId: string,
failReason?: string,
) => {
const basePayload = getBasePayload(actionSubjectId);
const baseAttributes = failReason
? {
...getBaseAttributes(fileState),
failReason,
}
: getBaseAttributes(fileState);
switch (fileState.status) {
case 'processed':
case 'uploading':
case 'processing':
case 'failed-processing':
return {
...basePayload,
attributes: {
...baseAttributes,
fileSupported: fileState.mediaType !== 'unknown',
},
};
case 'error':
return {
...basePayload,
attributes: {
...baseAttributes,
},
};
}
};
export interface DownloadGasPayload extends GasPayload {
attributes: DownloadAttributes & PackageAttributes;
}
export function downloadErrorButtonEvent(
state: FileState,
err: MediaViewerError,
): DownloadGasPayload {
return downloadEvent(state, 'failedPreviewDownloadButton', err.errorName);
}
export function downloadButtonEvent(state: FileState): DownloadGasPayload {
return downloadEvent(state, 'downloadButton');
}
|
/**
* Check whether the required user permissions are granted
* @return boolean
*/
public boolean isPermissionGranted() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (checkSelfPermission(android.Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return false;
} else {
return true;
}
} else {
return true;
}
} |
<reponame>ahmadnassri/httpsnippet
/**
* @description
* HTTP code snippet generator for Kotlin using OkHttp.
*
* @author
* @seanghay
*
* for any questions or issues regarding the generated code snippet, please open an issue mentioning the author.
*/
import { CodeBuilder } from '../../../helpers/code-builder';
import { Client } from '../../targets';
export const okhttp: Client = {
info: {
key: 'okhttp',
title: 'OkHttp',
link: 'http://square.github.io/okhttp/',
description: 'An HTTP Request Client Library',
},
convert: ({ postData, fullUrl, method, allHeaders }, options) => {
const opts = {
indent: ' ',
...options,
};
const { blank, join, push } = new CodeBuilder({ indent: opts.indent });
const methods = ['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'HEAD'];
const methodsWithBody = ['POST', 'PUT', 'DELETE', 'PATCH'];
push('val client = OkHttpClient()');
blank();
if (postData.text) {
if (postData.boundary) {
push(
`val mediaType = MediaType.parse("${postData.mimeType}; boundary=${postData.boundary}")`,
);
} else {
push(`val mediaType = MediaType.parse("${postData.mimeType}")`);
}
push(`val body = RequestBody.create(mediaType, ${JSON.stringify(postData.text)})`);
}
push('val request = Request.Builder()');
push(`.url("${fullUrl}")`, 1);
if (!methods.includes(method.toUpperCase())) {
if (postData.text) {
push(`.method("${method.toUpperCase()}", body)`, 1);
} else {
push(`.method("${method.toUpperCase()}", null)`, 1);
}
} else if (methodsWithBody.includes(method.toUpperCase())) {
if (postData.text) {
push(`.${method.toLowerCase()}(body)`, 1);
} else {
push(`.${method.toLowerCase()}(null)`, 1);
}
} else {
push(`.${method.toLowerCase()}()`, 1);
}
// Add headers, including the cookies
Object.keys(allHeaders).forEach(key => {
push(`.addHeader("${key}", "${allHeaders[key]}")`, 1);
});
push('.build()', 1);
blank();
push('val response = client.newCall(request).execute()');
return join();
},
};
|
Subsets and Splits