content
stringlengths 10
4.9M
|
---|
<reponame>SangameswaranRS/knowledge-extraction<filename>src/main/java/nlp/ParserExtractor.java<gh_stars>10-100
package nlp;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import model.TripletRelation;
import opennlp.tools.cmdline.parser.ParserTool;
import opennlp.tools.parser.Parse;
import opennlp.tools.parser.Parser;
import opennlp.tools.parser.ParserFactory;
import opennlp.tools.parser.ParserModel;
import opennlp.tools.util.Span;
import util.Consts;
public class ParserExtractor {
private static final String LABEL_TOP = "TOP";
private static final String LABEL_SENTENCE = "S";
private static final String LABEL_NOUN_PHRASE = "NP";
private static final String LABEL_VERBAL_PHRASE = "VP";
private static final String LABEL_NAME_PREFIX = "NN";
private static final String LABEL_VERB_PREFIX = "VB";
private SentenceDetector sentenceDetector;
private Parser parser;
public ParserExtractor() {
sentenceDetector = new SentenceDetector(Consts.EN_SENT_MODEL);
try (InputStream modelIn = ParserExtractor.class.getClassLoader()
.getResourceAsStream(Consts.EN_PARSER_MODEL);){
ParserModel model = new ParserModel(modelIn);
parser = ParserFactory.create(model);
} catch (IOException e) {
e.printStackTrace();
}
};
public List<TripletRelation> extractRelationsFromText(String text){
List<TripletRelation> relations = new ArrayList<TripletRelation>();
List<String> sentences = sentenceDetector.detectSentencesIn(text);
for (String sentence : sentences) {
relations.add(extractRelationFromSentence(sentence));
}
return relations;
}
public TripletRelation extractRelationFromSentence(String sentence){
TripletRelation rel = new TripletRelation();
Parse p = parseSentence(sentence);
if (p != null){
rel = new TripletRelation(ParserExtractor.getSubject(p),
ParserExtractor.getPredicate(p),
ParserExtractor.getObject(p) );
}
else {
System.out.println("no valid parse from parseSentence");
}
return rel;
}
public Parse parseSentence(String sentence){
Parse topParses[] = ParserTool.parseLine(sentence, parser, 1);
if (topParses.length == 0)
return null;
else
return topParses[0];
}
// TODO add possibility of multiple Ss and PP
public static String getSubject(final Parse parse) {
if (parse.getType().equals(LABEL_TOP)) {
return getSubject(parse.getChildren()[0]);
}
if (parse.getType().equals(LABEL_SENTENCE)) {
for (Parse child : parse.getChildren()) {
if (child.getType().equals(LABEL_NOUN_PHRASE)) {
return getSubject(child);
}
}
}
if (parse.getType().equals(LABEL_NOUN_PHRASE)) {
return getFirstOccurenceForType(parse, LABEL_NAME_PREFIX);
}
return "";
}
public static String getPredicate(final Parse parse) {
if (parse.getType().equals(LABEL_TOP)) {
return getPredicate(parse.getChildren()[0]);
}
if (parse.getType().equals(LABEL_SENTENCE)) {
for (Parse child : parse.getChildren()) {
if (child.getType().equals(LABEL_VERBAL_PHRASE)) {
return getPredicate(child);
}
}
return "";
}
if (parse.getType().equals(LABEL_VERBAL_PHRASE)) {
return getFirstOccurenceForType(parse, LABEL_VERB_PREFIX);
}
return "";
}
public static String getObject(final Parse parse) {
String object = "";
if (parse.getType().equals(LABEL_TOP)) {
return getObject(parse.getChildren()[0]);
}
if (parse.getType().equals(LABEL_SENTENCE)) {
for (Parse child : parse.getChildren()) {
if (child.getType().equals(LABEL_VERBAL_PHRASE)) {
object = getObject(child);
if (!object.isEmpty()){
return object;
}
}
}
return object;
}
if (parse.getType().equals(LABEL_VERBAL_PHRASE)) {
return getFirstOccurenceForType(parse, LABEL_NAME_PREFIX);
}
return object;
}
public static String getConstituent(final Parse parse, final String syntactic_cat,
String lexical_cat) {
String object = "";
if (parse.getType().equals(LABEL_TOP)) {
return getConstituent(parse.getChildren()[0], syntactic_cat, lexical_cat);
}
if (parse.getType().equals(LABEL_SENTENCE)) {
for (Parse child : parse.getChildren()) {
if (child.getType().equals(syntactic_cat)) {
object = getConstituent(child, syntactic_cat, lexical_cat);
if (!object.isEmpty()){
return object;
}
}
}
return object;
}
if (parse.getType().equals(syntactic_cat)) {
return getFirstOccurenceForType(parse, lexical_cat);
}
return object;
}
// public static String getObject(Parse parse){}
private static String getFirstOccurenceForType(final Parse parse,
final String typePrefix) {
//TODO ADD PRP
// For now we are only checking the prefix
// check current
if (parse.getType().length() > 1
&& parse.getType().substring(0, 2).equals(typePrefix)) {
Span span = parse.getSpan();
String text = parse.getText().substring(span.getStart(),
span.getEnd());
return text;
}
// check children (breadth)
for (Parse child : parse.getChildren()) {
if (child.getType().length() > 1
&& child.getType().substring(0, 2).equals(typePrefix)) {
Span span = child.getSpan();
String text = child.getText().substring(span.getStart(),
span.getEnd());
if (!text.isEmpty())
return text;
}
}
// recursively check for children (deep)
for (Parse child : parse.getChildren()) {
String text = getFirstOccurenceForType(child, typePrefix);
if (!text.isEmpty())
return text;
}
return "";
}
}
|
for _ in range(int(input())):
a,b,x,y,n=map(int,input().split())
if a<x and b<y:
print(a*b)
elif a<x:
v=b-y
if n>=v:
print(a*y)
else:
print(a*(b-v))
elif b<y:
v=a-x
if n>=v:
print(b*x)
else:
print(b*(a-v))
else:
c=a-x
d=b-y
if n>=(c+d):
print(x*y)
else:
dn=n
l=[]
if n>=c:
n-=c
l.append(x*(b-n))
else:
l.append((a-n)*b)
if (b-n)>=y:
l.append((b-n)*a)
else:
l.append(y*a)
n=dn
if n>=d:
n-=d
l.append(y*(a-n))
else:
l.append((b-n)*a)
if (a-n)>=x:
l.append((a-n)*b)
else:
l.append(x*b)
print(min(l))
# 70
# 77
# 177177
# 999999999000000000
# 999999999
# 55
# 10
|
<filename>pkg/profile/profile.go
package profile
// Profile represents a person's profile.
type Profile struct {
// The ID of the profile
ID string `json:"id", datastore:"-"`
// The name of the person, which is suitable for display.
DisplayName string `json:"displayName"`
// A representation of the individual components of a person's name.
Name Name `json:"name"`
// The email address.
Email string `json:"email"`
// The URL of the person's profile photo.
ImageURL string `json:"imageUrl"`
// A short biography for this person.
AboutMe string `json:"aboutMe"`
}
// Name represents the individual components of a person's name.
type Name struct {
// The full name of this person.
Formatted string `json:"formatted"`
// The family name (last name) of this person.
FamilyName string `json:"familyName"`
// The given name (first name) of this person.
GivenName string `json:"givenName"`
}
|
#include<bits/stdc++.h>
#define ll long long
using namespace std;
const ll MAXN = 1e5+5;
int a[MAXN];
int ask(int x,int y){
cout<<"? "<<x<<" "<<y<<endl;
int ret = 0;
cin>>ret;
return ret;
}
int vis[MAXN];
int main()
{
int n;
cin>>n;
if(n==1){
cout<<"! 1"<<endl;
return 0;
}
set<int>s;
s.clear();
for(int i=1;i<=n;i++){
s.insert(i);
}
while(s.size()!=1){
int k = *(s.begin());
int k2 = *(++s.begin());
int ret = ask(k,k2);
int ret2 = ask(k2,k);
if(ret>ret2){
a[k] = ret;
vis[ret] = 1;
s.erase(s.begin());
}
else{
a[k2] = ret2;
vis[ret2] =1;
s.erase(++s.begin());
}
}
int ans;
for(int i=1;i<=n;i++){
if(!vis[i]){
ans = i;break;
}
}
a[*(s.begin())] = ans;
cout<<"!";
for(int i=1;i<=n;i++){
cout<<" "<<a[i];
}
cout<<endl;
}
|
#include<iostream>
#include<cstdio>
#include<cstring>
using namespace std;
typedef long long ll;
#define N 105
#define M 40020
inline int read(){
int x=0,f=1;
char c=getchar();
while(c<'0'||c>'9'){
if(c=='-')f=-1;
c=getchar();
}
while(c>='0'&&c<='9'){
x=(x<<3)+(x<<1)+c-'0';
c=getchar();
}
return x*f;
}
int n,m,d;
ll dp[N][M],pre[M],suf[M],ans,a[N][M],s[N][M],p[M];
struct node{
int l,r,mx,tag;
inline void Add(int d){
tag+=d;
mx+=d;
}
}tree[M<<2];
#define ls i<<1
#define rs i<<1|1
inline void update(int i){
tree[i].mx=max(tree[ls].mx,tree[rs].mx);
}
inline void pushdown(int i){
if(tree[i].tag){
tree[ls].Add(tree[i].tag);
tree[rs].Add(tree[i].tag);
tree[i].tag=0;
}
}
void build(int i,int l,int r){
tree[i].l=l;
tree[i].r=r;
if(l==r){
tree[i].mx=p[l];
return;
}
int mid=(l+r)>>1;
build(ls,l,mid);
build(rs,mid+1,r);
update(i);
}
void Change(int i,int l,int r,int d){
if(tree[i].l>=l&&tree[i].r<=r){
tree[i].Add(d);
return;
}
pushdown(i);
int mid=(tree[i].l+tree[i].r)>>1;
if(l<=mid){
Change(ls,l,r,d);
}
if(r>mid){
Change(rs,l,r,d);
}
update(i);
}
int Ask(int i,int l,int r){
if(tree[i].l>=l&&tree[i].r<=r){
return tree[i].mx;
}
pushdown(i);
int mid=(tree[i].l+tree[i].r)>>1;
int ans=0;
if(l<=mid){
ans=max(ans,Ask(ls,l,r));
}
if(r>mid){
ans=max(ans,Ask(rs,l,r));
}
return ans;
}
int main(){
n=read(),m=read(),d=read();
for(int i=1;i<=n;i++){
for(int j=1;j<=m;j++){
a[i][j]=read();
s[i][j]=s[i][j-1]+a[i][j];
}
}
for(int i=1;i+d-1<=m;i++){
dp[1][i]=s[1][i+d-1]-s[1][i-1]+s[2][i+d-1]-s[2][i-1];
}
for(int i=2;i<=n;++i){
int up=m-d+1;
for(int j=1;j<=up;j++)pre[j]=max(pre[j-1],dp[i-1][j]);
for(int j=up;j>=1;j--)suf[j]=max(suf[j+1],dp[i-1][j]);
memset(tree,0,sizeof(tree));
memset(p,0,sizeof(p));
for(int j=1;j<=d;j++){
p[j]=dp[i-1][j]-(s[i][d]-s[i][j-1]);
}
for(int j=d+1;j<=m-d+1;++j){
p[j]=dp[i-1][j];
}
build(1,1,m-d+1);
for(int j=1;j<=m-d+1;++j){
int tmp=s[i+1][j+d-1]-s[i+1][j-1]+s[i][j+d-1]-s[i][j-1];
if(j>1){
Change(1,max(1,j-d),j-1,a[i][j-1]);
Change(1,j,min(m-d+1,j+d-1),-a[i][j+d-1]);
}
dp[i][j]=Ask(1,max(1,j-d+1),min(j+d-1,m-d+1))+tmp;
if(j-d>=1)dp[i][j]=max(dp[i][j],pre[j-d]+tmp);
if(j+d<=m)dp[i][j]=max(dp[i][j],suf[j+d]+tmp);
}
}
for(int i=1;i<=m-d+1;i++){
ans=max(ans,dp[n][i]);
}
cout<<ans<<endl;
return 0;
}
|
Male Gray Short-Tailed Opossums (Monodelphis Domestica) Receive Penile Intromissions When Treated with Estrogen and Progesterone in Adulthood
Following treatment with estradiol and progesterone, gonadectomized male as well as female gray opossums received penile intromissions from intact stimulus males. Intromission was possible in male gray opossums because, like marsupials of both sexes, they possess a single cloaca-like anogenital opening. All subjects that allowed intromission showed anogenital dragging just prior to intromission. While intromission latency was similar in tests involving male and female subjects, total intromission duration was longer in tests involving male subjects than in tests involving female subjects, and sex locks were seen only in tests involving female subjects. These findings are discussed with respect to the potential usefulness of gray opossums for studying the effects of peripheral vs central factors on the display of sex differences in behavior. |
import React, { useState } from "react";
import { Link, useHistory } from "react-router-dom";
import { useTranslation } from "react-i18next";
import {
Alert,
AlertVariant,
Button,
PageSection,
ToolbarItem,
} from "@patternfly/react-core";
import {
ExpandableRowContent,
TableComposable,
Tbody,
Td,
Th,
Thead,
Tr,
} from "@patternfly/react-table";
import type ResourceServerRepresentation from "@keycloak/keycloak-admin-client/lib/defs/resourceServerRepresentation";
import type ResourceRepresentation from "@keycloak/keycloak-admin-client/lib/defs/resourceRepresentation";
import { KeycloakSpinner } from "../../components/keycloak-spinner/KeycloakSpinner";
import { useConfirmDialog } from "../../components/confirm-dialog/ConfirmDialog";
import { PaginatingTableToolbar } from "../../components/table-toolbar/PaginatingTableToolbar";
import { useAdminClient, useFetch } from "../../context/auth/AdminClient";
import { useAlerts } from "../../components/alert/Alerts";
import { DetailCell } from "./DetailCell";
import { toCreateResource } from "../routes/NewResource";
import { useRealm } from "../../context/realm-context/RealmContext";
import { toResourceDetails } from "../routes/Resource";
import { MoreLabel } from "./MoreLabel";
import { toNewPermission } from "../routes/NewPermission";
type ResourcesProps = {
clientId: string;
};
type ExpandableResourceRepresentation = ResourceRepresentation & {
isExpanded: boolean;
};
export const AuthorizationResources = ({ clientId }: ResourcesProps) => {
const { t } = useTranslation("clients");
const history = useHistory();
const adminClient = useAdminClient();
const { addAlert, addError } = useAlerts();
const { realm } = useRealm();
const [resources, setResources] =
useState<ExpandableResourceRepresentation[]>();
const [selectedResource, setSelectedResource] =
useState<ResourceRepresentation>();
const [permissions, setPermission] =
useState<ResourceServerRepresentation[]>();
const [key, setKey] = useState(0);
const refresh = () => setKey(key + 1);
const [max, setMax] = useState(10);
const [first, setFirst] = useState(0);
useFetch(
() => {
const params = {
first,
max,
deep: false,
};
return adminClient.clients.listResources({
...params,
id: clientId,
});
},
(resources) =>
setResources(
resources.map((resource) => ({ ...resource, isExpanded: false }))
),
[key]
);
const UriRenderer = ({ row }: { row: ResourceRepresentation }) => (
<>
{row.uris?.[0]} <MoreLabel array={row.uris} />
</>
);
const fetchPermissions = async (id: string) => {
return adminClient.clients.listPermissionsByResource({
id: clientId,
resourceId: id,
});
};
const [toggleDeleteDialog, DeleteConfirm] = useConfirmDialog({
titleKey: "clients:deleteResource",
children: (
<>
{t("deleteResourceConfirm")}
{permissions?.length && (
<Alert
variant="warning"
isInline
isPlain
title={t("deleteResourceWarning")}
className="pf-u-pt-lg"
>
<p className="pf-u-pt-xs">
{permissions.map((permission) => (
<strong key={permission.id} className="pf-u-pr-md">
{permission.name}
</strong>
))}
</p>
</Alert>
)}
</>
),
continueButtonLabel: "clients:confirm",
onConfirm: async () => {
try {
await adminClient.clients.delResource({
id: clientId,
resourceId: selectedResource?._id!,
});
addAlert(t("resourceDeletedSuccess"), AlertVariant.success);
refresh();
} catch (error) {
addError("clients:resourceDeletedError", error);
}
},
});
if (!resources) {
return <KeycloakSpinner />;
}
return (
<PageSection variant="light" className="pf-u-p-0">
<DeleteConfirm />
<PaginatingTableToolbar
count={resources.length}
first={first}
max={max}
onNextClick={setFirst}
onPreviousClick={setFirst}
onPerPageSelect={(first, max) => {
setFirst(first);
setMax(max);
}}
toolbarItem={
<ToolbarItem>
<Button
data-testid="createResource"
component={(props) => (
<Link
{...props}
to={toCreateResource({ realm, id: clientId })}
/>
)}
>
{t("createResource")}
</Button>
</ToolbarItem>
}
>
<TableComposable aria-label={t("resources")} variant="compact">
<Thead>
<Tr>
<Th />
<Th>{t("common:name")}</Th>
<Th>{t("common:type")}</Th>
<Th>{t("owner")}</Th>
<Th>{t("uris")}</Th>
<Th />
</Tr>
</Thead>
{resources.map((resource, rowIndex) => (
<Tbody key={resource._id} isExpanded={resource.isExpanded}>
<Tr>
<Td
expand={{
rowIndex,
isExpanded: resource.isExpanded,
onToggle: (_, rowIndex) => {
const rows = resources.map((resource, index) =>
index === rowIndex
? { ...resource, isExpanded: !resource.isExpanded }
: resource
);
setResources(rows);
},
}}
/>
<Td data-testid={`name-column-${resource.name}`}>
<Link
to={toResourceDetails({
realm,
id: clientId,
resourceId: resource._id!,
})}
>
{resource.name}
</Link>
</Td>
<Td>{resource.type}</Td>
<Td>{resource.owner?.name}</Td>
<Td>
<UriRenderer row={resource} />
</Td>
<Td
actions={{
items: [
{
title: t("common:delete"),
onClick: async () => {
setSelectedResource(resource);
setPermission(await fetchPermissions(resource._id!));
toggleDeleteDialog();
},
},
{
title: t("createPermission"),
className: "pf-m-link",
isOutsideDropdown: true,
onClick: () =>
history.push(
toNewPermission({
realm,
id: clientId,
permissionType: "resource",
})
),
},
],
}}
/>
</Tr>
<Tr
key={`child-${resource._id}`}
isExpanded={resource.isExpanded}
>
<Td colSpan={5}>
<ExpandableRowContent>
{resource.isExpanded && (
<DetailCell
clientId={clientId}
id={resource._id!}
uris={resource.uris}
/>
)}
</ExpandableRowContent>
</Td>
</Tr>
</Tbody>
))}
</TableComposable>
</PaginatingTableToolbar>
</PageSection>
);
};
|
<gh_stars>0
from keras.models import Sequential, Model
from keras.layers import Flatten, Dense, Dropout
from keras.layers import GRU, LSTM, SimpleRNN, Input
from keras.layers import Conv2D, MaxPooling2D, ZeroPadding2D
from keras.optimizers import SGD
import cv2, numpy as np
def get_symbol(input_shape,weights_path=None):
input2 = Input(shape=input_shape)
rnn=SimpleRNN(units=64,dropout=0.2,return_sequences=True)(input2)
flatr=Flatten()(rnn)
densr=Dense(1024,activation='relu')(flatr)
dropr=Dropout(0.5)(densr)
out=Dense(2,activation='softmax')(dropr)
if weights_path:
model.load_weights(weights_path)
return Model(input2,out)
def rc():
return "r"
|
def format_hyperparameters(dict_of_hyperparameter_lists,
add_save_paths=True,
main_save_path='',
append_keys=[]):
hyperparameters_as_list = explode_to_list(dict_of_hyperparameter_lists)
hyperparameters_df = pd.DataFrame(hyperparameters_as_list)
multi_value_keys = []
selected_keys = multi_value_keys
if add_save_paths:
for key, value in sorted(dict_of_hyperparameter_lists.items()):
n_items = len(value)
if n_items > 1:
multi_value_keys.append(key)
selected_keys = multi_value_keys
print('multi value keys: ', selected_keys)
print('specified keys to append: ', append_keys)
for key in append_keys:
if key not in selected_keys:
selected_keys.append(key)
hyperparameters_df = add_folder_names(hyperparameters_df, selected_keys,
main_save_path=main_save_path)
hyperparameters_as_json_list = []
for row_ix in range(len(hyperparameters_df)):
hyperparameters_as_json_list.append(hyperparameters_df.iloc[row_ix].to_json())
return hyperparameters_as_json_list |
package ta
import (
"errors"
"reflect"
"testing"
)
func TestMean(t *testing.T) {
var testCases = []struct {
msg string
values []float64
expMean float64
}{
{"test zero data point",
[]float64{},
0,
},
{"test simple values",
[]float64{2, 3},
2.5,
},
{"test 10 values",
[]float64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
4.5,
},
{"test float values",
[]float64{2, 3, 5},
3.3333333333333335,
},
}
for _, tc := range testCases {
mean := Mean(tc.values)
if mean != tc.expMean {
t.Errorf("%v Mean(%v): \nexpected %v, \nactual %v",
tc.msg, tc.values, tc.expMean, mean)
}
}
}
func TestSMA(t *testing.T) {
var testCases = []struct {
msg string
values []float64
period int
expSMA []float64
expErr error
}{
{"test zero values",
[]float64{},
0,
nil,
errors.New("no values given"),
},
{"test length of values less than period ",
[]float64{0, 1, 2},
5,
nil,
errors.New("invalid length of values, given 3, needs 5"),
},
{"test simple values",
[]float64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
5,
[]float64{2, 3, 4, 5, 6, 7},
nil,
},
{"test complex values",
[]float64{0, 0.5, 1, 1.5, 2},
3,
[]float64{0.5, 1, 1.5},
nil,
},
}
for _, tc := range testCases {
sma, err := SMA(tc.values, tc.period)
if (!reflect.DeepEqual(sma, tc.expSMA)) || (!reflect.DeepEqual(err, tc.expErr)) {
t.Errorf("%v SMA(%v, %v): \nexpected %#v %v, \nactual %#v %v",
tc.msg, tc.values, tc.period, tc.expSMA, tc.expErr, sma, err)
}
}
}
func TestEMA(t *testing.T) {
var testCases = []struct {
msg string
values []float64
period int
expEMA []float64
expErr error
}{
{"test zero values",
[]float64{},
0,
nil,
errors.New("no values given"),
},
{"test length of values less than period ",
[]float64{0, 1, 2},
5,
nil,
errors.New("invalid length of values, given 3, needs 5"),
},
{"test simple values",
[]float64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
5,
[]float64{2, 3, 4, 5, 6, 7},
nil,
},
{"test complex values",
[]float64{26.0, 54.0, 8.0, 77.0, 61.0, 39.0, 44.0, 91.0, 98.0, 17.0},
5,
[]float64{45.2, 43.13333333333333, 43.422222222222224, 59.28148148148148, 72.18765432098765, 53.7917695473251},
nil,
},
}
for _, tc := range testCases {
ema, err := EMA(tc.values, tc.period)
if (!reflect.DeepEqual(ema, tc.expEMA)) || (!reflect.DeepEqual(err, tc.expErr)) {
t.Errorf("%v EMA(%v, %v): \nexpected %#v %v, \nactual %#v %v",
tc.msg, tc.values, tc.period, tc.expEMA, tc.expErr, ema, err)
}
}
}
|
<reponame>IusztinPaul/portfolio-management<gh_stars>1-10
from .misc import *
from .time import *
from .logger import *
|
<filename>src/interfaces/index.ts
export { IController } from './base-controller'
export { Class } from './class'
export { Controller as ControllerInterface } from './controller'
export { ExceptionFilter, ExceptionFilterMetadata } from './exception-filter'
export { Middleware as MiddlewareInterface } from './middleware'
export { Redirect as RedirectInterface } from './redirect'
export { RequestMiddleware } from './request-middleware'
export { Route } from './route'
export { Type } from './type'
export { RequestMetadata } from './request-metadata'
export { RequestHandler } from './http-server'
|
Women in gambling studies: a poststructural analysis
Abstract Background Gambling studies literature is a space where discourses call objects such as ‘gambling harm reduction’ and ‘women harmed by gambling’ into being and give them status as namable and describable. Methodology and methods A poststructural feminist analysis of the positioning of women who gamble in gambling studies literature was carried out to explore possibilities and constraints for gambling harm reduction practices. Gambling studies literature was accessed to enable a range of historically emerging framings of women’s gambling practices and harm to be brought to light. Discourse analysis drew on key concepts (discourse, subjectivity, power/knowledge) and principles (reversal, discontinuity, specificity and exteriority) developed by Michel Foucault. Findings Gambling studies have constructed women as: ‘risky gamblers’, ‘vulnerable women’, and as subject to gendered socio-cultural determinants of gambling and harm. Dominant conceptualisations of women in gambling studies tend to bring individual women who gamble into focus, obscuring the social, governmental and commercial determinants of gambling and harm, and often reproducing some unhelpful gender stereotypes in the process. Conclusions Holistic, environmental and Indigenous women’s health discourses have the potential to shift gambling harm reduction from a health services approach to one that is focused on supporting community wellbeing. This includes women’s co-production and ownership of harm reduction strategies. It is concluded that opportunities women’s gambling harm reduction may be found in critical psychology, and/or approaches which emphasize coherent and critical gender analysis, collaborative action, community development and client-led practice. |
<gh_stars>0
#[diplomat::bridge]
mod ffi {
struct RefList<'a> {
data: &'a i32,
next: Option<Box<Self>>,
}
impl<'b> RefList<'b> {
pub fn node(data: &'b i32) -> Self {
RefList { data, next: None }
}
pub fn extend(&mut self, other: Self) {
match self.next.as_mut() {
Some(tail) => tail.extend(other),
None => self.next = Some(Box::new(other)),
}
}
}
}
|
def select(self):
warnings.filterwarnings("ignore", category=DeprecationWarning)
best_score = float('inf')
best_num_components = self.n_constant
for n in range(self.min_n_components, self.max_n_components + 1):
try:
hmm_model = self.base_model(n)
logL = hmm_model.score(self.X, self.lengths)
p = n ** 2 + 1 * n * len(self.X[0]) - 1
logN = math.log(len(self.lengths))
bic_score = - 2 * logL + p * logN
if bic_score < best_score:
best_score = bic_score
best_num_components = n
except:
if self.verbose:
print("failure on {} with {} states".format(self.this_word, n))
pass
if self.verbose:
print("Final model created for {} with {} states".format(self.this_word, best_num_components))
return self.base_model(best_num_components) |
A layered lithium nickel manganese oxide as environmentally friendly cathode material for secondary batteries
Cobalt-free cathode material development is considered necessity to assure the sustainability of Li-ion batteries. Cobalt is always considered expensive and unsafe for both human and environment. LiNi0.5Mn0.5O2 (LNMO) is a layer structured cathode material that has similar feature to LiCoO2 (LCO). A simple and fast processing of LNMO is proposed. A precipitation of nickel manganese oxalate was obtained in a batch reactor under atmospheric condition. The as-obtained homogenous oxalate precursor was converted to LNMO via high temperature lithiation. Based on the XRD result, a crystalline product with layer structure is successfully obtained. The presence of impurities such as residual Li can be detected from the FTIR spectra. SEM Images confirmed quasi-spherical particles with grain size of less than 10 micrometer. The charge-discharge analysis of LNMO containing cell deliver a capacity of 42 mAh/g. In spite of its promising report, continuous improvement is necessary to obtain cell with better electrochemical performance. |
<gh_stars>1-10
/*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright (c) 2015 Oracle and/or its affiliates. All rights reserved.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common Development
* and Distribution License("CDDL") (collectively, the "License"). You
* may not use this file except in compliance with the License. You can
* obtain a copy of the License at
* http://glassfish.java.net/public/CDDL+GPL_1_1.html
* or packager/legal/LICENSE.txt. See the License for the specific
* language governing permissions and limitations under the License.
*
* When distributing the software, include this License Header Notice in each
* file and include the License file at packager/legal/LICENSE.txt.
*
* GPL Classpath Exception:
* Oracle designates this particular file as subject to the "Classpath"
* exception as provided by Oracle in the GPL Version 2 section of the License
* file that accompanied this code.
*
* Modifications:
* If applicable, add the following below the License Header, with the fields
* enclosed by brackets [] replaced by your own identifying information:
* "Portions Copyright [year] [name of copyright owner]"
*
* Contributor(s):
* If you wish your version of this file to be governed by only the CDDL or
* only the GPL Version 2, indicate your decision by adding "[Contributor]
* elects to include this software in this distribution under the [CDDL or GPL
* Version 2] license." If you don't indicate a single choice of license, a
* recipient has the option to distribute your version of this file under
* either the CDDL, the GPL Version 2 or to extend the choice of license to
* its licensees as provided above. However, if you add GPL Version 2 code
* and therefore, elected the GPL Version 2 license, then the option applies
* only if the new code is made subject to such option by the copyright
* holder.
*/
package javax.security.identitystore;
import java.util.ArrayList;
import java.util.List;
import static java.util.Collections.unmodifiableList;
import static javax.security.identitystore.CredentialValidationResult.Status.*;
/**
* <code>CredentialValidationResult</code> is the result from an attempt to
* validate an instance of
* {@link javax.security.identitystore.credential.Credential}.
*
* @see javax.security.identitystore.IdentityStore#validate
*/
public class CredentialValidationResult {
public static final CredentialValidationResult INVALID_RESULT = new CredentialValidationResult(INVALID, null, null, null);
public static final CredentialValidationResult NOT_VALIDATED_RESULT = new CredentialValidationResult(NOT_VALIDATED, null, null, null);
private final String callerName;
private final Status status;
private final List<String> roles;
private final List<String> groups;
public enum Status {
/**
* Indicates that the credential could not be validated, for example, if
* no suitable
* {@link javax.security.identitystore.credential.CredentialValidator}
* could be found.
*/
NOT_VALIDATED,
/**
* Indicates that the credential is not valid after a validation
* attempt.
*/
INVALID,
/**
* Indicates that the credential is valid after a validation attempt.
*/
VALID
}
;
public CredentialValidationResult(Status status, String callerName, List<String> groups) {
this(status, callerName, groups, null);
}
/**
* Constructor
*
* @param status Validation status
* @param callerName Validated caller
* @param groups Groups associated with the caller from the identity store
* @param roles Roles associated with the caller from the identity store
*/
public CredentialValidationResult(Status status, String callerName, List<String> groups, List<String> roles) {
if (null == status) {
throw new NullPointerException("status");
}
this.status = status;
this.callerName = callerName;
if (VALID == status) {
if (null != groups) {
groups = unmodifiableList(new ArrayList<>(groups));
}
this.groups = groups;
if (null != roles) {
roles = unmodifiableList(new ArrayList<>(roles));
}
this.roles = roles;
} else {
this.groups = null;
this.roles = null;
}
}
/**
* Determines the validation status.
*
* @return The validation status
*/
public Status getStatus() {
return status;
}
/**
* Determines the Caller used to validate the credential.
*
* @return The caller name, <code>null</code> if {@link #getStatus} does not
* return {@link Status#VALID VALID}.
*/
public String getCallerName() {
return callerName;
}
/**
* Determines the list of groups that the specified Caller is in, based on
* the associated persistence store..
*
* @return The list of groups that the specified Caller is in, empty if
* none. <code>null</code> if {@link #getStatus} does not return
* {@link Status#VALID VALID} or if the identity store does not
* support groups.
*/
public List<String> getCallerGroups() {
return groups;
}
/**
* Determines the list of roles that the specified caller is in, based on
* the associated persistence store. The returned role list would include
* roles directly assigned to the Caller, and roles assigned to groups which
* contain the Caller.
*
* @return The list of roles that the specified caller is in, empty if none.
* <code>null</code> if {@link #getStatus} does not return
* {@link Status#VALID VALID} or if the identity store does not
* support roles.
*/
public List<String> getCallerRoles() {
return roles;
}
}
|
/**
* Util methods for conversion to\from {@link IDataSet}.
*/
final class DataSetConverters {
private DataSetConverters() {}
/** Invokes {@link #toDataSet(Workbook)} with new {@link Workbook}. */
static IDataSet toDataSet(final InputStream workbook) {
return toDataSet(ConverterUtils.newWorkbook(workbook));
}
/**
* Converts a {@link Workbook} to new {@link IDataSet}.
* Ignores empty rows.
*
* @throws {@link CalculationEngineException} if {@link Workbook} contains formulas or Cell references.
*/
static IDataSet toDataSet(final Workbook workbook) {
Sheet sheet = workbook.getSheetAt(0); //TODO: this works only for single sheet documents
DataSet dataSet = new DataSet(sheet.getSheetName());
for (int i = sheet.getFirstRowNum(); i <= sheet.getLastRowNum(); i++) {
IDsRow dsRow = dataSet.addRow();
Row row = sheet.getRow(i);
for (int j = row.getFirstCellNum(); j < row.getLastCellNum(); j++) {
Cell wbCell = row.getCell(j);
if (wbCell != null && wbCell.getCellType() == Cell.CELL_TYPE_FORMULA) {
throw new CalculationEngineException("DataSet should not contain formulas");
}
IDsCell cell = dsRow.addCell();
cell.setValue(ConverterUtils.resolveCellValue(wbCell));
}
}
return dataSet;
}
/**
* Converts an {@link IDataModel} to a new {@link IDataSet}.
* Ignores empty rows.
* Uses {@link IDmCell#getValue()} for {@link IDsCell} values.
*/
static IDataSet toDataSet(final IDataModel dataModel) {
if (dataModel == null) { return null; }
IDataSet dataSet = new DataSet(dataModel.getName());
if (dataModel.rowCount() == 0) { return dataSet; }
for (IDmRow dmRow : dataModel) {
IDsRow dsRow = dataSet.addRow();
for (IDmCell dmCell : dmRow) {
IDsCell dsCell = dsRow.addCell(dmCell.getAddress().a1Address().column());
if (dmCell.getValue().isPresent()) {
dsCell.setValue(dmCell.getValue().get());
}
}
}
return dataSet;
}
/**
* Converts {@link IDataSet} to new {@link Workbook},
* then writes this Workbook to new {@link ByteArrayOutputStream}.
*/
static OutputStream toXlsxFile(final IDataSet dataSet) {
ByteArrayOutputStream xlsx = new ByteArrayOutputStream();
try { toWorkbook(dataSet, (Workbook) null).write(xlsx); }
catch (IOException e) { throw new CalculationEngineException(e); }
return xlsx;
}
/**
* Converts {@link IDataSet} to {@link Workbook},
* then writes this Workbook to new {@link ByteArrayOutputStream}.
* Middle-state {@link Workbook} is created from @param formatting.
*/
static OutputStream toXlsxFile(final IDataSet dataSet, final InputStream formatting) {
ByteArrayOutputStream xlsx = new ByteArrayOutputStream();
try { toWorkbook(dataSet, ConverterUtils.newWorkbook(formatting)).write(xlsx); }
catch (IOException e) { throw new CalculationEngineException(e); }
return xlsx;
}
/** Converts {@link IDataSet} to new {@link Workbook}. */
static Workbook toWorkbook(final IDataSet dataSet) {
return toWorkbook(dataSet, (Workbook) null);
}
/**
* Converts {@link IDataSet} to {@link Workbook}.
* The result {@link Workbook} is created from @param formatting.
*/
static Workbook toWorkbook(final IDataSet dataSet, final Workbook formatting) {
Workbook result = formatting == null ? ConverterUtils.newWorkbook() : ConverterUtils.clearContent(formatting);
Sheet sheet = result.createSheet(dataSet.getName());
for (IDsRow row : dataSet) {
Row wbRow = sheet.createRow(row.index() - 1);
for (IDsCell cell : row) {
Cell wbCell = wbRow.createCell(cell.index() - 1);
ConverterUtils.populateCellValue(wbCell, cell.getValue());
}
}
return result;
}
} |
Composite longrod-the solution of future line insulation in China
A composite long rod insulator produced in the authors' laboratory is discussed. The insulator is made of porcelain and silicone rubber. Test results on the electrical and mechanical properties are reported. The insulator has many advantages, such as light weight, ease of manufacture, high mechanical strength, very good antipollution performance, no risk of internal puncture, and reduced need for service safeguarding. These advantages become more obvious in EHV and UHV lines.<<ETX>> |
def map_fold_change_from_exp(self, exp_obj):
assert isinstance(exp_obj, Experiment)
results = FoldChangeResult.objects.filter(experiment=exp_obj)
if not results:
logger.error('No results loaded for experiment %s, aborting', exp_obj.id)
return None
fc_data = collections.defaultdict(dict)
for r in results:
rat_eg = r.gene_identifier.gene.rat_entrez_gene
fc_data[exp_obj.id][rat_eg] = {'log2_fc': float(r.log2_fc), 'identifier': r.gene_identifier.gene_identifier}
return fc_data |
export const ConfigGroupsStateName: string = 'app.groups';
class ConfigGroupsController implements ng.IController {
public $onInit() { }
public details: boolean;
constructor(
private $window: ng.IWindowService,
) {
"ngInject";
}
public onRetry() {
this.$window.history.back();
}
}
function configureConfigGroupsRoute(
$injector: angular.auto.IInjectorService,
$stateProvider: pip.rest.IAuthStateService
) {
"ngInject";
$stateProvider
.state(ConfigGroupsStateName, {
url: '/groups?group_id&edit&status&details',
reloadOnSearch: false,
controller: ConfigGroupsController,
auth: true,
controllerAs: '$ctrl',
templateUrl: 'config/groups/Groups.html'
});
}
function configureConfigGroupsAccess(
iqsAccessConfigProvider: iqs.shell.IAccessConfigProvider
) {
"ngInject";
let accessLevel: number = iqs.shell.AccessRole.manager;
let accessConfig: any = {
addGroup: iqs.shell.AccessRole.manager,
editGroup: iqs.shell.AccessRole.manager,
deleteGroup: iqs.shell.AccessRole.manager
}
iqsAccessConfigProvider.registerStateAccess(ConfigGroupsStateName, accessLevel);
iqsAccessConfigProvider.registerStateConfigure(ConfigGroupsStateName, accessConfig);
}
(() => {
angular
.module('iqsConfigGroups', [
'pipNav', 'iqsObjectGroups.ViewModel',
'iqsAccessConfig',
'iqsObjectGroups.Data',
'iqsConfigGroupsPanel'
])
.config(configureConfigGroupsRoute)
.config(configureConfigGroupsAccess);
})();
|
/*
* Copyright 2019 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dev.onvoid.webrtc;
/**
* The bundle policy affects which media tracks are negotiated if the remote
* endpoint is not bundle-aware, and what ICE candidates are gathered. If the
* remote endpoint is bundle-aware, all media tracks and data channels are
* bundled onto the same transport.
*
* @author <NAME>
*/
public enum RTCBundlePolicy {
/**
* Gather ICE candidates for each media type in use (audio, video, and
* data). If the remote endpoint is not bundle-aware, negotiate only one
* audio and video track on separate transports.
*/
BALANCED,
/**
* Gather ICE candidates for only one track. If the remote endpoint is not
* bundle-aware, negotiate only one media track.
*/
MAX_BUNDLE,
/**
* Gather ICE candidates for each track. If the remote endpoint is not
* bundle-aware, negotiate all media tracks on separate transports.
*/
MAX_COMPAT;
}
|
import java.io.*;
import java.util.*;
public class Solution {
static Scanner sc=new Scanner(System.in);
static PrintWriter out=new PrintWriter(System.out);
//Main Method
static boolean isPrime(long x) {
for(long i=2;i*i<=x;i++) if(x%i==0) return false;
return true;
}
static int N=1000000,sieve[]=new int[N+1];
public static void main(String args[]) {
sieve[0]=1;
sieve[1]=1;
for(int i=2;i*i<=N;i++) {
if(sieve[i]==0) {
for(int j=i*i;j<=N;j+=i) sieve[j]=1;
}
}
int test=1;
while(test-->0) {
long n=sc.nextLong();
if(n<=5) {
out.println(1);
out.println(n);
continue;
}
long val=3;
for(long i=n-4;;i-=2) {
if(isPrime(i)) {
val=i;
break;
}
}
n-=val;
long x=val,y=-1,z=-1;
for(int i=2;i<=n;i++) {
if(sieve[i]==0 && sieve[(int)(n-i)]==0) {
y=i;
z=n-i;
}
}
out.println(3);
out.println(x+" "+y+" "+z);
}
out.flush();
out.close();
}
}
|
/*
author : <NAME>
Link : https://www.hackerrank.com/challenges/java-bigdecimal/problem
*/
import java.math.BigDecimal;
import java.util.*;
class Solution{
public static void main(String []args){
//Input
Scanner sc= new Scanner(System.in);
int n=sc.nextInt();
String []s=new String[n+2];
for(int i=0;i<n;i++){
s[i]=sc.next();
}
sc.close();
Comparator<String> bdComparator = new Comparator<String>() {
@Override
public int compare(String s1, String s2) {
BigDecimal a = new BigDecimal(s1);
BigDecimal b = new BigDecimal(s2);
return a.compareTo(b);
}
};
Arrays.sort(s, 0, n, Collections.reverseOrder(bdComparator));
//Output
for(int i=0;i<n;i++)
{
System.out.println(s[i]);
}
}
} |
/**
* This class handles the turns and the related timers.
*
* @author Fabio Codiglioni
*/
public class TurnManager extends Observable {
private List<Player> players;
private List<Integer> playersOrder;
private int index;
private CountdownTimer timer;
private int timeout = 30;
/**
* @param players the list of players taking part in the Game.
*/
public TurnManager(List<Player> players) {
this.players = players;
this.timeout = SagradaServer.getInstance().getGameTimeout();
Stream<Integer> forwardRange = IntStream.range(0, this.getNumberOfPlayers()).boxed();
Stream<Integer> backRange = IntStream.range(0, this.getNumberOfPlayers()).boxed().sorted(Collections.reverseOrder());
this.playersOrder = Stream.concat(forwardRange, backRange).collect(Collectors.toList());
this.index = 0;
this.timer = new CountdownTimer(NotificationMessages.TURN_MANAGER);
this.setActivePlayer(this.getCurrentPlayer());
}
/**
* @author Fabio Codiglioni
* @return the number of players taking part in the Game.
*/
private int getNumberOfPlayers() {
return this.players.size();
}
/**
* @author Fabio Codiglioni
* @return the current index in the sequence of players.
*/
private int getCurrentPlayerIndex() {
return this.playersOrder.get(index);
}
/**
* @author Fabio Codiglioni
* @return the current Player.
*/
Player getCurrentPlayer() {
return this.players.get(this.getCurrentPlayerIndex());
}
/**
* @return the active player
*/
public String getActivePlayer() {
return getCurrentPlayer().getNickname();
}
/**
* @param observer to be added to the list of observers on the timer
*/
public void subscribeToTimer(Observer observer) {
this.timer.addObserver(observer);
this.timer.schedule(() -> {
this.getCurrentPlayer().setSuspended(true);
this.nextTurn();
}, this.timeout);
}
/**
* This method set the specified Player as active, and all the other players as inactive.
*
* @author Fabio Codiglioni
* @param player the player to be set active.
*/
private void setActivePlayer(Player player) {
for (Player p : this.players) {
p.setDiePlacedInThisTurn(false);
p.setToolCardUsedThisTurn(false);
p.setActive(p.equals(player));
if (p.isActive() && p.isSecondTurnToBeSkipped()) {
p.setSecondTurnToBeSkipped(false);
this.nextTurn();
}
}
}
/**
* This method cancels the timer
*/
void cancelTimer() {
if (timer != null) timer.cancel();
}
/**
* @return true if the round is in its second half.
*/
public boolean isSecondHalfOfRound() {
return this.index >= this.playersOrder.size() / 2;
}
/**
* @return the number of not suspended players
*/
public int countNotSuspendedPlayers() {
return (int) players.stream()
.filter(player -> !player.isSuspended())
.count();
}
/**
* This method has to be called by each Player when the end their turn.
*
* @author Fabio Codiglioni
*/
public void nextTurn() {
this.timer.cancel();
if (countNotSuspendedPlayers() <= 1) {
this.setChanged();
this.notifyObservers(NotificationMessages.GAME_INTERRUPTED);
} else {
index++;
if (this.index == this.playersOrder.size()) {
this.index = 0;
Collections.rotate(this.players, -1); // shift starting player
this.setChanged();
this.notifyObservers(NotificationMessages.ROUND_INCREMENTED);
}
if (getCurrentPlayer().isSuspended())
nextTurn();
this.setActivePlayer(this.getCurrentPlayer());
}
}
} |
/**
* @file an_static_planner.h
* @brief Defines the Planner_c class.
* @authur: <NAME>
*/
#include <vector>
#include <ctime>
#include <chrono>
#include "ros/ros.h"
#include "an_messages/lanes.h"
#include "an_messages/obstacles.h"
#include "geometry_msgs/PoseStamped.h"
#include "lib/OpenList.h"
#include "lib/Node.h"
/**
* @brief Planning class, subscribes map, pose, goal, obstacle, path plan,
* and publishes planner trajectory.
*/
class Planner_c {
public:
bool Init_(void);
void Loop_(void);
static constexpr double DEFAULT_LOOP_RATE = 150;
static constexpr double DEFAULT_GRID_LENGTH = 20.0;
static constexpr double DEFAULT_GRID_WIDTH = 3.7;
static constexpr double DEFAULT_EPSILON = 1.1;
double loop_rate_;
double grid_length_;
double grid_width_;
double epsilon_;
private:
//callbacks
void lanes_callback_(const an_messages::lanes::ConstPtr& msg);
void pose_callback_(const geometry_msgs::PoseStamped::ConstPtr& msg);
void goal_callback_(const geometry_msgs::PoseStamped::ConstPtr& msg);
void obstacles_callback_(const an_messages::obstacles::ConstPtr& msg);
//helper functions
std::vector<an_messages::trajectory> ReadMotionPrimitvesFromFile();
void process_data_();
std::vector<int> compute_path_();
an_messages::trajectory motion_primitives_to_trajectory_(const std::vector<int>& motion_primitives);
int x_y_to_grid_id_(double x, double y);
void convert_obstacle_to_grid_(double x, double y, double width, double length);
double get_heuristic_(int i);
std::vector<std::pair<int/*node id*/, double/*cost*/> > get_successors_(int id);
int get_motion_primitive_id_(int start, int end);
//helper struct
struct Obstacle_s {
double x;
double y;
double width;
double length;
double theta;
};
//flags
bool new_plan_;
bool lanes_received_;
bool pose_received_;
bool goal_received_;
bool obstacles_received_;
//raw data received from msg
int num_of_lanes_;
double lane_width_;
double x_begin_; //x of map begin
double x_end_; //x of map end
double x_start_; //start location
double y_start_;
double x_goal_; //goal location
double y_goal_;
std::vector<Obstacle_s> obs_;
//processed data
int m_max_; //grid x dimension
int n_max_; //grid y dimension
int num_of_grid_;
int start_id_;
int goal_id_;
std::vector<Node> nodes_;
std::vector<bool> obstacles_;
// motion primitives
std::vector<an_messages::trajectory> mps_;
//ros
ros::NodeHandle nh_;
ros::Subscriber lanes_sub_, pose_sub_, goal_sub_, obstacles_sub_;
ros::Publisher planner_trajectory_pub_;
};
|
def look_up_lib(keys):
subd = command_lib.command_lib[keys.pop(0)]
while keys:
subd = subd[keys.pop(0)]
return subd |
// sort the saved files and return them
fn files_sorted(&mut self) -> &[PathBuf] {
let _ = self.files(); // prime cache
self.files.sort();
self.files()
} |
/// Use this struct to load a model, make predictions, and log events to the app.
impl<Input, Output> Model<Input, Output>
where
Input: Into<PredictInput>,
Output: From<PredictOutput> + Into<PredictOutput>,
{
/// Load a model from the `.tangram` file at `path`.
pub fn from_path(
path: impl AsRef<Path>,
options: Option<LoadModelOptions>,
) -> Result<Model<Input, Output>> {
let file = std::fs::File::open(path)?;
let bytes = unsafe { Mmap::map(&file)? };
Model::from_bytes(&bytes, options)
}
/// Load a model from a byte slice. You should use this only if you already have a `.tangram` loaded into memory. Otherwise, use [`Model::from_path`], which is faster because it memory maps the file.
pub fn from_bytes(
bytes: &[u8],
options: Option<LoadModelOptions>,
) -> Result<Model<Input, Output>> {
let model = tangram_model::from_bytes(&bytes)?;
let model = tangram_core::predict::Model::from(model);
let tangram_url = options
.and_then(|options| options.tangram_url)
.unwrap_or_else(|| "https://app.tangram.xyz".parse().unwrap());
Ok(Model {
model,
log_queue: Vec::new(),
tangram_url,
input_marker: PhantomData,
output_marker: PhantomData,
})
}
/// Retrieve the model's id.
pub fn id(&self) -> &str {
self.model.id.as_str()
}
/// Make a prediction with a single input.
pub fn predict_one(&self, input: Input, options: Option<PredictOptions>) -> Output {
let model = &self.model;
let options = options.map(Into::into).unwrap_or_default();
let output = tangram_core::predict::predict(model, &[input.into().into()], &options);
let output: PredictOutput = output.into_iter().next().unwrap().into();
output.into()
}
/// Make a prediction with multiple inputs.
pub fn predict(&self, input: Vec<Input>, options: Option<PredictOptions>) -> Vec<Output> {
let model = &self.model;
let options = options.map(Into::into).unwrap_or_default();
let input = input
.into_iter()
.map(Into::into)
.map(Into::into)
.collect::<Vec<_>>();
let output = tangram_core::predict::predict(model, &input, &options);
output
.into_iter()
.map(|output| -> PredictOutput { output.into() })
.map(Into::into)
.collect()
}
/// Send a prediction event to the app. If you want to batch events, you can use [`Model::enqueue_log_true_value`] instead.
#[cfg(not(feature = "tokio"))]
pub fn log_prediction(&mut self, args: LogPredictionArgs<Input, Output>) -> Result<()> {
let event = Event::Prediction(self.prediction_event(args));
self.log_event(event)?;
Ok(())
}
/// Send a prediction event to the app. If you want to batch events, you can use [`Model::enqueue_log_true_value`] instead.
#[cfg(feature = "tokio")]
pub async fn log_prediction(&mut self, args: LogPredictionArgs<Input, Output>) -> Result<()> {
let event = Event::Prediction(self.prediction_event(args));
self.log_event(event).await?;
Ok(())
}
/// Send a true value event to the app. If you want to batch events, you can use [`Model::enqueue_log_true_value`] instead.
#[cfg(not(feature = "tokio"))]
pub fn log_true_value(&mut self, args: LogTrueValueArgs) -> Result<()> {
let event = Event::TrueValue(self.true_value_event(args));
self.log_event(event)?;
Ok(())
}
/// Send a true value event to the app. If you want to batch events, you can use [`Model::enqueue_log_true_value`] instead.
#[cfg(feature = "tokio")]
pub async fn log_true_value(&mut self, args: LogTrueValueArgs) -> Result<()> {
let event = Event::TrueValue(self.true_value_event(args));
self.log_event(event).await?;
Ok(())
}
/// Add a prediction event to the queue. Remember to call [`Model::flush_log_queue`] at a later point to send the event to the app.
pub fn enqueue_log_prediction(&mut self, args: LogPredictionArgs<Input, Output>) {
let event = Event::Prediction(self.prediction_event(args));
self.log_queue.push(event);
}
/// Add a true value event to the queue. Remember to call [`Model::flush_log_queue`] at a later point to send the event to the app.
pub fn enqueue_log_true_value(&mut self, args: LogTrueValueArgs) {
let event = Event::TrueValue(self.true_value_event(args));
self.log_queue.push(event);
}
/// Send all events in the queue to the app.
#[cfg(not(feature = "tokio"))]
pub fn flush_log_queue(&mut self) -> Result<()> {
let events = self.log_queue.drain(0..self.log_queue.len()).collect();
self.log_events(events)
}
/// Send all events in the queue to the app.
#[cfg(feature = "tokio")]
pub async fn flush_log_queue(&mut self) -> Result<()> {
let events = self.log_queue.drain(0..self.log_queue.len()).collect();
self.log_events(events)
}
#[cfg(not(feature = "tokio"))]
fn log_event(&mut self, event: Event) -> Result<()> {
self.log_events(vec![event])
}
#[cfg(feature = "tokio")]
fn log_event(&mut self, event: Event) -> Result<()> {
self.log_events(vec![event])
}
#[cfg(not(feature = "tokio"))]
fn log_events(&mut self, events: Vec<Event>) -> Result<()> {
let mut url = self.tangram_url.clone();
url.set_path("/track");
let body = serde_json::to_vec(&events)?;
reqwest::blocking::Client::new()
.post(url)
.body(body)
.send()?;
Ok(())
}
#[cfg(feature = "tokio")]
async fn log_events(&mut self, events: Vec<Event>) -> Result<()> {
let mut url = self.tangram_url.clone();
url.set_path("/track");
let body = serde_json::to_vec(&events)?;
reqwest::Client::new().post(url).body(body).send().await?;
Ok(())
}
fn prediction_event(&self, args: LogPredictionArgs<Input, Output>) -> PredictionEvent {
PredictionEvent {
date: chrono::Utc::now(),
identifier: args.identifier,
input: args.input.into(),
options: args.options,
output: args.output.into(),
model_id: self.id().to_owned(),
}
}
fn true_value_event(&self, args: LogTrueValueArgs) -> TrueValueEvent {
TrueValueEvent {
date: chrono::Utc::now(),
identifier: args.identifier,
model_id: self.id().to_owned(),
true_value: args.true_value,
}
}
} |
<reponame>BrendanJones44/gpa_calc
"""
Module for errors within modules for this application.
Author: <NAME>, GitHub: BrendanJones44
"""
from enum import Enum
class ErrorTypes(Enum):
"""
ErrorTypes is an Enum of various classes of errors the model can have
"""
MISSING_PARAM = "missing parameter"
BAD_DATA = "must be integer"
class Error(object):
"""
Error represents an error on the actual model
Error
+ target : the field/target that the error is caused by
+ target_message : the message about the error's target
-----
"""
def __init__(self, target, target_message):
self.target = target
self.target_message = target_message
|
Okay, so the title is a poor play on words (see below). Moving on.
Daniel Kuehn comments on Greg Mankiw’s recent blog post, “The Liquidity Trap May Soon Be Over.” I do not have much to say on Mankiw or liquidity traps, but Daniel brings up two interesting — tangential — points that I would like to comment on.
Interest rates and monetary policy: Daniel asks us who in their right mind could accuse monetary policy of being “too loose” between 2000–07 and at the same time call monetary policy between 2007–present “too tight.” He, of course, takes as his starting point that in both eras we see “low interest rates.” I think he is right, but I do not think he would agree with what I would conclude: monetary policy during this recession has been too loose. I wonder, though, who he has in mind when he poses his inquiry, because I think that the ideas of “loose money” and “tight money” differ from theorist to theorist.
To Austrians, loose monetary policy leads to malinvestment. Austrians, however, are not the only economists to consider monetary policy between 2003–07 “loose.” Indeed, while I cannot cite the exact page from Stiglitz’ The Stiglitz Report (I am in Los Angeles for the week), I know that Stiglitz also refers to “easy credit” policies as causes of the recession. However, rather than a theory of discoordination, I think Stiglitz sees only the fact that credit was distributed too easily to people who probably should have never received it. Anyways, I am not privvy to any sophisticated theories — other than the Austrian one — that link interest rates, monetary policy, and economic discoordination.
The only economists I can think of who fit Daniel’s description are Austrians who also agree with monetary disequilibrium theory. I think, though, that even Austrians of this type have become disillusioned with the notion of insufficient money — it has been over three years, and prices have still not adjusted. I think more and more of these economists are looking elsewhere for explanations (regime uncertainty, mostly). Also, I firmly believe that if conditions were different, we would see sufficient evidence to prove that monetary policy has been “too loose” and that the only reason we have not seen the more adverse effects of such a policy (widespread malinvestment) is because of the continued depression in investment. Again, I am not sure Daniel would agree with me on this, but this is my take on this matter.
Milton Friedman said that looking at interest rates was a poor method of judging monetary policy. While I cannot be sure, I think that the implications of this escaped even Friedman. There are factors beyond monetary policy at play, and these factors have handicapped the ability to stimulate production (whether you believe this stimulation to be positive production or malinvestment) through monetary injections.
Austrians and NGDP Targeting: Daniel writes that he is surprised to see that many Austrians, since late 2011, have become more open to NGDP targeting. I think that if you look at the free banking literature, this is what these Austrians have been arguing in favor of for a very long time — NGDP targeting is a “second best” alternative to what would happen in a free banking industry. I think they are mistaken, but this is a belief they have had for a very long time. The crisis, though, may have turned attention to the Austrian theory of free banking, converting fellow Austrians who had formerly belonged to the “full reserve” camp and, in turn, pushing them to support these types of “second best” measures.
I am a little offended though. Daniel writes “every Austrian in the blogosphere” when he makes his point; I think I have been clear in my criticism of any fiduciary expansion (despite being a free banker) and I am an Austrian on the blogosphere. I guess that is what I get for going offline for almost a year. |
//GetDarkPubLog get darkword publish log
func (s *Service) GetDarkPubLog(date string) (darkout []searchModel.Dark, pub bool, err error) {
var (
logs []searchModel.DarkPubLog
)
l := searchModel.DarkPubLog{}
if err = s.dao.DB.Model(&searchModel.DarkPubLog{}).Where("atime = ?", date).Order("groupid desc").
First(&l).Error; err != nil && err != gorm.ErrRecordNotFound {
log.Error("searchSrv.GetDarkPubLog First error(%v)", err)
return
}
if err == gorm.ErrRecordNotFound {
return nil, false, nil
}
if err = s.dao.DB.Model(&searchModel.DarkPubLog{}).Where("groupid = ?", l.Groupid).
Find(&logs).Error; err != nil && err != gorm.ErrRecordNotFound {
log.Error("searchSrv.GetDarkPubLog Find error(%v)", err)
return
}
for _, v := range logs {
a := searchModel.Dark{
Searchword: v.Searchword,
PV: v.Pv,
}
darkout = append(darkout, a)
}
return darkout, true, nil
} |
import { Component } from '@angular/core';
import { PageTwo } from './page-two';
import { Nav } from '@ionic/angular';
@Component({
selector: 'page-one',
template: `
<ion-header>
<ion-toolbar>
<ion-title>Page One</ion-title>
</ion-toolbar>
</ion-header>
<ion-content>
Page One
<div>
<ion-button (click)="goToPageTwo()">Go to Page Two</ion-button>
</div>
<ul>
<li>ngOnInit - {{ngOnInitDetection}}</li>
<li>ionViewWillEnter - {{ionViewWillEnterDetection}}</li>
<li>ionViewDidEnter - {{ionViewDidEnterDetection}}</li>
</ul>
</ion-content>
`
})
export class PageOne {
ngOnInitDetection = 'initial';
ionViewWillEnterDetection = 'initial';
ionViewDidEnterDetection = 'initial';
constructor(private nav: Nav) {}
ngOnInit() {
console.log('page one ngOnInit');
setInterval(() => {
this.ngOnInitDetection = '' + Date.now();
}, 500);
}
ionViewWillEnter() {
console.log('page one ionViewWillEnter');
setInterval(() => {
this.ionViewWillEnterDetection = '' + Date.now();
}, 500);
}
ionViewDidEnter() {
console.log('page one ionViewDidEnter');
setInterval(() => {
this.ionViewDidEnterDetection = '' + Date.now();
}, 500);
}
async goToPageTwo() {
await this.nav.push(PageTwo);
}
}
|
<filename>Game/Test_Game/Obstacles.h
#pragma once
#include <vector>
#include <string>
#include "GameObject.h"
#include "Utils.h"
using namespace std;
class Obstacles : public GameObject {
private:
int speed = 0, tick = 0, bgColor, fgColor;
protected:
int autoX = 1, autoY = 0;
public:
Obstacles() : GameObject(0, 0, 0, 0, "unknownObstacles") {}
Obstacles(int x, int y, int _speed, int _bgColor, int _fgColor, string bufferKey, GraphicsController*& graphic) :
GameObject(x, y, graphic->getBuffer(bufferKey)[0].size(), graphic->getBuffer(bufferKey).size(), bufferKey),
speed(_speed),
bgColor(_bgColor),
fgColor(_fgColor)
{}
void move(int x, int y)
{
tick++;
// Check if enough ticks yet
if (tick < speed)
return; //Nah, not enough
tick %= speed;
this->oldX = this->x;
this->oldY = this->y;
this->x += x;
this->y += y;
if (isOutOfBound())
resetPos(x > 0);
}
virtual void resetPos(bool left = true) {
this->x = left? -MAX_ENEMY_WIDTH + 1 : screenWidth;
}
void render(GraphicsController*& graphic, int offset) {
//clearOldPos(graphic);
graphic->setBufferObject(graphic->getBuffer(bufferKey), this->x, this->y + offset, bgColor, fgColor);
}
void render(GraphicsController*& graphic, int offset, int bg) {
//clearOldPos(graphic);
graphic->setBufferObject(graphic->getBuffer(bufferKey), this->x, this->y + offset, bg, fgColor);
}
void render(GraphicsController*& graphic, int offset, int bg, int ch) {
//clearOldPos(graphic);
graphic->setBufferObject(graphic->getBuffer(bufferKey), this->x, this->y + offset, bg, ch);
}
void clearOldPos(GraphicsController*& graphic) {
GameObject::clearOldPos(graphic, bgColor, fgColor);
}
void setPos(int x, int y) {
this->x = x;
this->oldX = x;
this->y = y;
this->oldY = y;
}
bool isOutOfBound() {
return x + MAX_ENEMY_WIDTH < 1 || x > screenWidth;
}
int getTick() {
return tick;
}
};
//fell free to declare more class using the above template
//feel* |
n, k = map(int,input().split())
left = 240-k
count = 0
if left >= 5 :
for i in range(1,n+1):
if left-5*i < 0 or count>=n:
break
else:
count+=1
left = left-5*i
print(count) |
// UpdateRecentTracks updates the recent tracks
func (u *User) UpdateRecentTracks() {
if *config.MockExternalCalls {
return
}
if !u.Settings.RecentTracks {
return
}
if *config.Debug {
log.WithFields(log.Fields{
"type": "recenttracks",
"user": u,
}).Debug("Started updating recent tracks, but debug mode is enabled!")
return
}
err := u.RefreshToken()
if err != nil {
return
}
log.WithFields(log.Fields{
"type": "recenttracks",
"user": u,
"tokenExpiry": u.Token.Expiry.Unix(),
}).Debugf("Updating recent tracks")
options := &spotify.RecentlyPlayedOptions{Limit: 50}
metrics.SpotifyRequests.Add(1)
items, err := u.Client().PlayerRecentlyPlayedOpt(options)
if err != nil {
log.WithFields(log.Fields{
"type": "recenttracks",
"user": u,
"tokenExpiry": u.Token.Expiry.Unix(),
}).Error(err)
return
}
list := []*tracks.Track{}
for _, s := range items {
t := tracks.GetTrackFromID(string(s.Track.ID))
list = append(list, t)
}
tracks.BatchUpdate(list, *u.Client())
u.insertRecentTracks(items)
} |
By Nicholas West
The Chicago police department continues to march toward what it calls “policing in the 21st century.” If their conduct is any indication, that police work would include systemic corruption, unlawful detention, torture, racial profiling and mass surveillance.
However, activists and journalists continue to work hard to expose even more abuses that might still lurk in the shadows. Some progress has been made. Derrick Broze reported in January of last year about Chicago activist, Freddy Martinez, who filed a Freedom of Information Act (FOIA) request with the CPD in 2014. Martinez was seeking details regarding the much-maligned use of Stingray cell phone surveillance. Despite the Chicago PD refusing to answer the request, a Cook County, Illinois judge ordered the Chicago Police Department to allow her to review documents related to cell phone surveillance tools. This case is illustrative of the pressure that needs to be exerted if we are to get answers about what our public servants are truly doing on our behalf.
But Stingray surveillance is merely one component of a much larger surveillance network that Chicago has set up and continues to expand, which even includes an explicit mission to embrace “predictive policing” — essentially, the concept of pre-crime that most people hoped was relegated purely to science fiction.
In late 2013 the public was alerted to an index of approximately 400 people who had been identified by a computer algorithm as being future threats to commit violent crime. Without having actually committed a crime, some of those on the list were being visited by Chicago police warning them that they were already being watched. This index came to be known as the “Heat List.”
When the Chicago Police Department sent one of its commanders to Robert McDaniel’s home last summer, the 22-year-old high school dropout was surprised. Though he lived in a neighborhood well-known for bloodshed on its streets, he hadn’t committed a crime or interacted with a police officer recently. And he didn’t have a violent criminal record, nor any gun violations. In August, he incredulously told the Chicago Tribune, “I haven’t done nothing that the next kid growing up hadn’t done.” Yet, there stood the female police commander at his front door with a stern message: if you commit any crimes, there will be major consequences. We’re watching you. (Source)
At issue is the fact that no one knows how the algorithm works, nor has there been sufficient oversight to judge its effectiveness or lawfulness. Moreover, that initial list of 400 has apparently grown to 5,000. Three journalists are now suing the Chicago police for information that can explain exactly how people are winding up on this secretive list now called the “Strategic Subject List.” As reported by Shadow Proof:
Journalists George Joseph, Jamie Kalven, and Brandon Smith submitted freedom of information requests under the Illinois law in 2016. The Chicago Sun-Times submitted their request for records in May, according to the filed complaint [PDF]. Joseph sought records on the risk factors that are used when placing individuals on the list and communications about increasing the number of people on the list. Records on “the algorithm that determines who makes it on the Strategic Subject List” were sought by Kalven. He also requested records that included manuals or guides on the use of the interface employed to list people. Smith also requested records that would reveal the secret algorithm underpinning a key component of predictive policing used by Chicago police. He wanted data showing “risk scores of all people added to the list in its first two years of regular or non-test use.” He also wanted to know if the first 5,000 individuals on the lists were charged with new crimes in the two years following their inclusion. Download Your First Issue Free! Do You Want to Learn How to Become Financially Independent, Make a Living Without a Traditional Job & Finally Live Free?
Download Your Free Copy of Counter Markets The CPD denied Smith’s request for information on 5,000 individuals and called it “unduly burdensome.” It indicated it would issue an additional response rejecting other parts of the request but Smith never received a further response.
As we can see, it is clear that not only is the culture of secrecy being embraced by the CPD, but there is also an arrogant dismissal of such investigation as “unduly burdensome.” However, these particular journalists have an excellent track record of applying the proper pressure needed to defend the rights of Chicago’s citizens, explaining why full disclosure is essential. |
mod final_consonant;
mod initial_consonant;
mod medial_vowel;
mod syllable;
use super::byte::*;
use final_consonant::*;
use initial_consonant::*;
use medial_vowel::*;
pub(crate) use syllable::Syllable;
|
def split_x(self, x, unit_vals=False, src_vals=False):
retVal = FeatureSet()
if unit_vals and src_vals:
logger.warning("Not sure how you would like to split the features")
return None
if not isinstance(x, (list, tuple)):
x = [x]
if unit_vals:
x = self.coord_transform_x(x, intype="ROI_UNIT", output="SRC")
elif not src_vals:
x = self.coord_transform_x(x, intype="ROI", output="SRC")
for xt in x:
if xt < self.xtl or xt > self.xtl + self.w:
logger.warning("Invalid split point.")
return None
x.insert(0, self.xtl)
x.append(self.xtl + self.w)
for i in range(0, len(x) - 1):
xstart = x[i]
xstop = x[i + 1]
w = xstop - xstart
retVal.append(ROI(xstart, self.ytl, w, self.h, self._image))
return retVal |
/**
* Common method to prepare the request params for CDC query operation for both sync and async calls
*
* @param entities
* the list of entities
* @param changedSince
* the date where the entities should be listed from the last changed date
* @return IntuitMessage the intuit message
* @throws FMSException
*/
private <T extends IEntity> IntuitMessage prepareCDCQuery(List<? extends IEntity> entities, String changedSince) throws FMSException {
IntuitMessage intuitMessage = new IntuitMessage();
RequestElements requestElements = intuitMessage.getRequestElements();
Map<String, String> requestParameters = requestElements.getRequestParameters();
requestParameters.put(RequestElements.REQ_PARAM_METHOD_TYPE, MethodType.GET.toString());
if (entities != null) {
StringBuffer entityParam = new StringBuffer();
for (IEntity entity : entities) {
entityParam.append(entity.getClass().getSimpleName()).append(",");
}
entityParam.delete(entityParam.length() - 1, entityParam.length());
requestParameters.put(RequestElements.REQ_PARAM_ENTITIES, entityParam.toString());
}
String cdcChangedSinceParam = null;
String cdcAction = null;
cdcChangedSinceParam = RequestElements.REQ_PARAM_CHANGED_SINCE;
cdcAction = OperationType.CDCQUERY.toString();
if (StringUtils.hasText(changedSince)) {
requestParameters.put(cdcChangedSinceParam, changedSince);
}
requestElements.setAction(cdcAction);
requestElements.setContext(context);
return intuitMessage;
} |
Risk Management, Capital Budgeting, and Capital Structure Policy for Insurers and Reinsurers
This article builds on Froot and Stein in developing a framework for analyzing the risk allocation, capital budgeting, and capital structure decisions facing insurers and reinsurers. The model incorporates three key features: (i) value-maximizing insurers and reinsurers face product-market as well as capital-market imperfections that give rise to well-founded concerns with risk management and capital allocation; (ii) some, but not all, of the risks they face can be frictionlessly hedged in the capital market; and (iii) the distribution of their cash flows may be asymmetric, which alters the demand for underwriting and hedging. We show these features result in a three-factor model that determines the optimal pricing and allocation of risk and capital structure of the firm. This approach allows us to integrate these features into: (i) the pricing of risky investment, underwriting, reinsurance, and hedging; and (ii) the allocation of risk across all of these opportunities, and the optimal amount of surplus capital held by the firm. |
A mean lord exiles fairytale creatures to the swamp of a grumpy ogre, who must go on a quest and rescue a princess for the lord in order to get his land back.
Princess Fiona's parents invite her and Shrek to dinner to celebrate her marriage. If only they knew the newlyweds were both ogres.
Woody is stolen by Al who is a toy collector of Al's Toy Barn. Buzz and his friends are on a rescue mission to save Woody before he becomes a museum toy property with his roundup gang Jessie, Prospector, and Bullseye.
Determined to make her own path in life, Princess Merida defies a custom that brings chaos to her kingdom. Granted one wish, Merida must rely on her bravery and her archery skills to undo a beastly curse.
Set during the Ice Age, a sabertooth tiger, a sloth, and a wooly mammoth find a lost human infant, and they try to return him to his tribe.
A group of animals who have spent all their life in a New York zoo end up in the jungles of Madagascar, and must adjust to living in the wild.
When Gru, the world's most super-bad turned super-dad has been recruited by a team of officials to stop lethal muscle and a host of Gru's own, He has to fight back with new gadgetry, cars, and more minion madness.
The Dragon Warrior has to clash against the savage Tai Lung as China's fate hangs in the balance. However, the Dragon Warrior mantle is supposedly mistaken to be bestowed upon an obese panda who is a novice in martial arts.
In order to power the city, monsters have to scare children so that they scream. However, the children are toxic to the monsters, and after a child gets through, 2 monsters realize things may not be what they think.
When a criminal mastermind uses a trio of orphan girls as pawns for a grand scheme, he finds their love is profoundly changing him for the better.
A hot-shot race-car named Lightning McQueen gets waylaid in Radiator Springs, where he finds the true meaning of friendship and family.
Mike Wazowski and James P. Sullivan are an inseparable pair, but that wasn't always the case. From the moment these two mismatched monsters met they couldn't stand each other. "Monsters University" unlocks the door to how Mike and Sulley overcame their differences and became the best of friends. Written by Disney/Pixar
Did You Know?
Trivia Sully's pink spots appear blue when he is out in sunlight and return to pink when he is indoors or when it's night time. Sully's pink spots appear blue when he is out in sunlight and return to pink when he is indoors or when it's night time. See more
Goofs The textbook chapter on the Cobra Hiss explains the eyes need to be opened to 110% capacity, but the diagram wrongly depicts this as an angle with the eyes open at 110 degrees. The textbook chapter on the Cobra Hiss explains the eyes need to be opened to 110% capacity, but the diagram wrongly depicts this as an angle with the eyes open at 110 degrees. See more
Crazy Credits Following the final credits, the Slug from early in the movie finally makes it to his first college class, but it's the last day of school. Following the final credits, the Slug from early in the movie finally makes it to his first college class, but it's the last day of school. See more |
/**
* @author Shamsul Bahrin Abd Mutalib
* @version 1.01
*/
public class SQLRendererAdapter {
SQLRenderer r;
String sql = "";
public SQLRendererAdapter(SQLRenderer r) {
this.r = r;
}
public ResultSet doSelect(Db db, String table) throws Exception {
return doSelect(db, table, null, null);
}
public ResultSet doSelect(Db db, String table, String orderby) throws Exception {
return doSelect(db, table, orderby, null);
}
public ResultSet doSelect(Db db, String table, String orderby, String option) throws Exception {
if ( r instanceof SQLRenderer ) {
if ( orderby == null && option == null ) sql = r.getSQLSelect(table);
else if ( orderby != null && option == null ) sql = r.getSQLSelect(table, orderby);
else if ( orderby != null && option != null ) sql = r.getSQLSelect(table, orderby, option);
return db.getStatement().executeQuery(sql);
}
else if ( r instanceof SQLPStmtRenderer ){
PreparedStatement pstmt = null;
if ( orderby == null && option == null )
pstmt = ((SQLPStmtRenderer) r).getPStmtSelect(db.getConnection(), table);
else if ( orderby != null && option == null )
pstmt = ((SQLPStmtRenderer) r).getPStmtSelect(db.getConnection(), table, orderby);
else if ( orderby != null && option != null )
pstmt = ((SQLPStmtRenderer) r).getPStmtSelect(db.getConnection(), table, orderby, option);
return pstmt.executeQuery();
}
else {
return null;
}
}
public void doInsert(Db db, String table) throws Exception {
if ( r instanceof SQLRenderer ) {
sql = r.getSQLInsert(table);
db.getStatement().executeUpdate(sql);
}
else if ( r instanceof SQLPStmtRenderer) {
PreparedStatement pstmt = ((SQLPStmtRenderer) r).getPStmtInsert(db.getConnection(), table);
pstmt.executeUpdate();
}
}
public void doUpdate(Db db, String table) throws Exception {
if ( r instanceof SQLRenderer ) {
sql = r.getSQLUpdate(table);
db.getStatement().executeUpdate(sql);
}
else if ( r instanceof SQLPStmtRenderer) {
PreparedStatement pstmt = ((SQLPStmtRenderer) r).getPStmtUpdate(db.getConnection(), table);
pstmt.executeUpdate();
}
}
} |
// Fits boundingBox within the confines of the map.
public static void boxResize(Rectangle boundingBox) {
boundingBox.setSize(Math.min(boundingBox.width, res[0]), Math.min(res[1], boundingBox.height));
boundingBox.setLocation(Math.max(Math.min(boundingBox.x, res[0] - 1 - boundingBox.width), 0),
Math.max(Math.min(boundingBox.y, res[1] - 1 - boundingBox.height), 0));
} |
#include <stdint.h>
uint16_t crc16_update(uint16_t crc, uint8_t a){
int i;
if(crc || a){
crc ^= a;
for (i = 0; i < 8; ++i)
{
if (crc & 1)
crc = (crc >> 1) ^ 0xA001;
else
crc = (crc >> 1);
}
}
return crc;
}
|
// Returns an iterator within frame_entries pointing to the FrameEntry
// matching the specified sequence number.
// If the sequence number was not found, will return end(frame_entries)
//
// _Requires_lock_held_(m_lock)
vector<ctsConfig::JitterFrameEntry>::iterator ctsIoPatternMediaStreamClient::FindSequenceNumber(long long sequenceNumber) noexcept
{
const ctsSignedLongLong headSequenceNumber = m_headEntry->m_sequenceNumber;
const ctsSignedLongLong tailSequenceNumber = headSequenceNumber + m_frameEntries.size() - 1;
const ctsSignedLongLong vectorEndSequenceNumber = m_frameEntries.rbegin()->m_sequenceNumber;
if (sequenceNumber > tailSequenceNumber || sequenceNumber < headSequenceNumber)
{
return end(m_frameEntries);
}
if (sequenceNumber <= vectorEndSequenceNumber)
{
const auto offset = static_cast<size_t>(sequenceNumber - headSequenceNumber);
return m_headEntry + offset;
}
const auto offset = static_cast<size_t>(sequenceNumber - vectorEndSequenceNumber - 1LL);
return m_frameEntries.begin() + offset;
} |
/**
* Respond to an authentication request from the back-end for SSPI authentication (AUTH_REQ_SSPI).
*
* @throws SQLException on SSPI authentication handshake failure
* @throws IOException on network I/O issues
*/
@Override
public void startSSPI() throws SQLException, IOException {
/*
* We usually use SSPI negotiation (spnego), but it's disabled if the client asked for GSSPI and
* usespngo isn't explicitly turned on.
*/
final String securityPackage = enableNegotiate ? "negotiate" : "kerberos";
LOGGER.log(Level.FINEST, "Beginning SSPI/Kerberos negotiation with SSPI package: {0}", securityPackage);
try {
/*
* Acquire a handle for the local Windows login credentials for the current user
*
* See AcquireCredentialsHandle
* (http://msdn.microsoft.com/en-us/library/windows/desktop/aa374712%28v=vs.85%29.aspx)
*
* This corresponds to pg_SSPI_startup in libpq/fe-auth.c .
*/
IWindowsCredentialsHandle clientCredentials;
try {
clientCredentials = WindowsCredentialsHandleImpl.getCurrent(securityPackage);
this.clientCredentials = clientCredentials;
clientCredentials.initialize();
} catch (Win32Exception ex) {
throw new PSQLException("Could not obtain local Windows credentials for SSPI",
PSQLState.CONNECTION_UNABLE_TO_CONNECT /* TODO: Should be authentication error */, ex);
}
try {
String targetName = makeSPN();
this.targetName = targetName;
LOGGER.log(Level.FINEST, "SSPI target name: {0}", targetName);
sspiContext = new WindowsSecurityContextImpl();
sspiContext.setPrincipalName(targetName);
sspiContext.setCredentialsHandle(clientCredentials);
sspiContext.setSecurityPackage(securityPackage);
sspiContext.initialize(null, null, targetName);
} catch (Win32Exception ex) {
throw new PSQLException("Could not initialize SSPI security context",
PSQLState.CONNECTION_UNABLE_TO_CONNECT /* TODO: Should be auth error */, ex);
}
sendSSPIResponse(sspiContext.getToken());
LOGGER.log(Level.FINEST, "Sent first SSPI negotiation message");
} catch (NoClassDefFoundError ex) {
throw new PSQLException(
"SSPI cannot be used, Waffle or its dependencies are missing from the classpath",
PSQLState.NOT_IMPLEMENTED, ex);
}
} |
// Scans given directory first for the AGS game config. If such config exists
// and it contains directions to the game data, then use these settings to find it.
// Otherwise, scan original directory for the game data.
// Returns found path to game data, or empty string if failed.
String find_game_data_in_config_and_dir(const String &path)
{
String data_dir, data_file;
read_config_with_game_location(path, data_dir, data_file);
if (!data_file.IsEmpty())
return data_file;
else if (!data_dir.IsEmpty())
return FindGameData(data_dir);
return "";
} |
<gh_stars>0
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.stunner.cm.client.command;
import java.util.List;
import org.kie.workbench.common.stunner.core.client.canvas.AbstractCanvasHandler;
import org.kie.workbench.common.stunner.core.client.canvas.command.AbstractCanvasCommand;
import org.kie.workbench.common.stunner.core.client.command.CanvasViolation;
import org.kie.workbench.common.stunner.core.client.shape.MutationContext;
import org.kie.workbench.common.stunner.core.command.CommandResult;
import org.kie.workbench.common.stunner.core.diagram.Diagram;
import org.kie.workbench.common.stunner.core.graph.Edge;
import org.kie.workbench.common.stunner.core.graph.Node;
import org.kie.workbench.common.stunner.core.graph.content.relationship.Child;
import org.kie.workbench.common.stunner.core.graph.content.view.View;
import org.kie.workbench.common.stunner.core.graph.processing.traverse.content.AbstractChildrenTraverseCallback;
import org.kie.workbench.common.stunner.core.graph.processing.traverse.content.ChildrenTraverseProcessor;
/**
* Draws the whole Case Management diagram. This implementation does not use Commands since loading cannot be "undone".
*/
public class CaseManagementDrawCommand extends AbstractCanvasCommand {
private final ChildrenTraverseProcessor childrenTraverseProcessor;
public CaseManagementDrawCommand(final ChildrenTraverseProcessor childrenTraverseProcessor) {
this.childrenTraverseProcessor = childrenTraverseProcessor;
}
@Override
@SuppressWarnings("unchecked")
public CommandResult<CanvasViolation> execute(final AbstractCanvasHandler context) {
final Diagram diagram = context.getDiagram();
final String shapeSetId = context.getDiagram().getMetadata().getShapeSetId();
childrenTraverseProcessor
.traverse(diagram.getGraph(),
new AbstractChildrenTraverseCallback<Node<View, Edge>, Edge<Child, Node>>() {
@Override
public void startNodeTraversal(final Node<View, Edge> node) {
super.startNodeTraversal(node);
addNode(node);
}
@Override
public boolean startNodeTraversal(final List<Node<View, Edge>> parents,
final Node<View, Edge> node) {
super.startNodeTraversal(parents,
node);
addNode(node);
context.addChild(parents.get(parents.size() - 1),
node);
return true;
}
private void addNode(final Node<View, Edge> node) {
context.register(shapeSetId,
node);
context.applyElementMutation(node,
MutationContext.STATIC);
}
@Override
public void endGraphTraversal() {
super.endGraphTraversal();
context.getCanvas().draw();
}
});
return buildResult();
}
@Override
public CommandResult<CanvasViolation> undo(final AbstractCanvasHandler context) {
throw new UnsupportedOperationException("Draw cannot be undone, yet.");
}
}
|
NATURAL‐KILLER CELL ACTIVITY AND CYTOGENETIC RESPONSE IN CHRONIC MYELOGENOUS LEUKAEMIA TREATED WITH α‐INTERFERON
The capacity of alpha interferon (aIFN) to directly induce lymphokine activated killer (LAK) cytotoxicity without requiring the participation of interleukin 2 (IL2) (Ellis et al, 1989) has prompted us to test whether its ability to reduce dramatically the number of Philadelphia chromosome containing ( P h + ) clones in chronic myelogenous leukaemia (CML) patients is not in part mediated through the generation of aIFN-activated killer cells. Fourteen Ph+ at diagnosis CMI, patients ( I 2 under aIFN therapy and two without) were included in this study. Their clinical status, haematological and cytogenetic data a t the time selected for the natural killer cell (NK) activity studies are shown in Table I . IFNa-2a therapy was carried out together with hydroxyurea and with or without low-dose cytosine arabinoside as described in detail elsewhere (Guilhot et d. 1991). Cytogenetic studies on short-term (24 h ) |
#include <math.h>
#include <stdio.h>
#include <stdlib.h>
#include "../../lib/complex.h"
typedef struct {
const Complex base;
const uint64_t expoent;
const Complex expected;
} Test_Values;
int main(void) {
const Test_Values values[] = {
{complex_init(0.0, 0.0), 0, complex_init(1.0, 0.0)},
{complex_init(10.0, 10.0), 0, complex_init(1.0, 0.0)},
{complex_init(0.0, 0.0), 10, complex_init(0.0, 0.0)},
{complex_init(-1.0, 1.0), 1, complex_init(-1.0, 1.0)},
{complex_init(-1.0, 1.0), 2, complex_init(0.0, -2.0)},
{complex_init(0.0, 1.0), 2, complex_init(-1.0, 0.0)},
{complex_init(0.5, 0.5), 2, complex_init(0.0, 0.5)},
{complex_init(0.0, 2.0), 2, complex_init(-4.0, 0.0)},
{complex_init(1.0, 1.0), 2, complex_init(0.0, 2.0)},
{complex_init(1.0, 1.0), 3, complex_init(-2.0, 2.0)},
{complex_init(1.0, 1.0), 4, complex_init(-4.0, 0.0)},
{complex_init(1.0, 1.0), 5, complex_init(-4.0, -4.0)},
{complex_init(1.0, 1.0), 10, complex_init(0.0, 32.0)},
};
printf("Power:\n");
for (size_t i = 0; i < sizeof(values) / sizeof(values[0]); i++) {
const Complex result = complex_power(values[i].base, values[i].expoent);
if (complex_are_equal(result, values[i].expected)) {
printf("power(");
complex_print(values[i].base);
printf(", %ld) = ", values[i].expoent);
complex_print(result);
printf("\n");
} else {
fprintf(stderr, "Error: imprecise result.\n");
printf("power(");
complex_print(values[i].base);
printf(", %ld) = ", values[i].expoent);
complex_print(result);
printf("\n");
return EXIT_FAILURE;
}
}
return EXIT_SUCCESS;
} |
def hybrid_forward(self, F, x, anchors):
a = F.slice_like(anchors, x, axes=(2, 3))
return a.reshape((1, -1, 4)) |
<filename>src/mock/response.go
package mock
import (
"encoding/json"
"net/http/httptest"
)
func GetResponse() *httptest.ResponseRecorder {
return httptest.NewRecorder()
}
func GetResponseBody(body []byte, data interface{}) {
if err := json.Unmarshal(body, &data); err != nil {
panic(err)
}
}
|
// Code generated by hack/codegen-events.py. DO NOT EDIT.
package events
import "github.com/golang/protobuf/proto"
type EventType = string
type Event interface {
proto.Message
Type() EventType
}
const (
EventWorkflowCreated EventType = "WorkflowCreated"
EventWorkflowDeleted EventType = "WorkflowDeleted"
EventWorkflowParsed EventType = "WorkflowParsed"
EventWorkflowParsingFailed EventType = "WorkflowParsingFailed"
EventInvocationCreated EventType = "InvocationCreated"
EventInvocationCompleted EventType = "InvocationCompleted"
EventInvocationCanceled EventType = "InvocationCanceled"
EventInvocationTaskAdded EventType = "InvocationTaskAdded"
EventInvocationFailed EventType = "InvocationFailed"
EventTaskStarted EventType = "TaskStarted"
EventTaskSucceeded EventType = "TaskSucceeded"
EventTaskSkipped EventType = "TaskSkipped"
EventTaskFailed EventType = "TaskFailed"
)
func (m *WorkflowCreated) Type() EventType {
return EventWorkflowCreated
}
func (m *WorkflowDeleted) Type() EventType {
return EventWorkflowDeleted
}
func (m *WorkflowParsed) Type() EventType {
return EventWorkflowParsed
}
func (m *WorkflowParsingFailed) Type() EventType {
return EventWorkflowParsingFailed
}
func (m *InvocationCreated) Type() EventType {
return EventInvocationCreated
}
func (m *InvocationCompleted) Type() EventType {
return EventInvocationCompleted
}
func (m *InvocationCanceled) Type() EventType {
return EventInvocationCanceled
}
func (m *InvocationTaskAdded) Type() EventType {
return EventInvocationTaskAdded
}
func (m *InvocationFailed) Type() EventType {
return EventInvocationFailed
}
func (m *TaskStarted) Type() EventType {
return EventTaskStarted
}
func (m *TaskSucceeded) Type() EventType {
return EventTaskSucceeded
}
func (m *TaskSkipped) Type() EventType {
return EventTaskSkipped
}
func (m *TaskFailed) Type() EventType {
return EventTaskFailed
}
|
// repackIfNeeded uses a set of heuristics to determine whether the repository needs a
// full repack and, if so, repacks it.
func (s *server) repackIfNeeded(ctx context.Context, repository *gitalypb.Repository) error {
repoPath, err := s.locator.GetRepoPath(repository)
if err != nil {
return err
}
hasBitmap, err := stats.HasBitmap(repoPath)
if err != nil {
return helper.ErrInternal(err)
}
missingBloomFilters, err := stats.IsMissingBloomFilters(repoPath)
if err != nil {
return helper.ErrInternal(err)
}
if hasBitmap && !missingBloomFilters {
return nil
}
altFile, err := s.locator.InfoAlternatesPath(repository)
if err != nil {
return helper.ErrInternal(err)
}
createBitMap := false
if _, err := os.Stat(altFile); os.IsNotExist(err) {
createBitMap = true
}
if _, err = s.RepackFull(ctx, &gitalypb.RepackFullRequest{
Repository: repository,
CreateBitmap: createBitMap,
}); err != nil {
return err
}
return nil
} |
/**
* A Custom view pager class for suppressing gesture exceptions.
*/
public class CustomViewPager extends ViewPager {
/**
* Instantiates a new Custom view pager.
*
* @param context the context
*/
public CustomViewPager(Context context) {
super(context);
}
/**
* Instantiates a new Custom view pager.
*
* @param context the context
* @param attrs the attrs
*/
public CustomViewPager(Context context, AttributeSet attrs) {
super(context, attrs);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
try {
return super.onTouchEvent(event);
} catch (IllegalArgumentException ignored) {
}
return false;
}
@Override
public boolean onInterceptTouchEvent(MotionEvent event) {
try {
return super.onInterceptTouchEvent(event);
} catch (IllegalArgumentException ignored) {
}
return false;
}
} |
DRAIN: Deadlock Removal for Arbitrary Irregular Networks
Correctness is a first-order concern in the design of computer systems. For multiprocessors, a primary correctness concern is the deadlock-free operation of the network and its coherence protocol; furthermore, we must guarantee the continued correctness of the network in the face of increasing faults. Designing for deadlock freedom is expensive. Prior solutions either sacrifice performance or power efficiency to proactively avoid deadlocks or impose high hardware complexity to reactively resolve deadlocks as they occur. However, the precise confluence of events that lead to deadlocks is so rare that minimal resources and time should be spent to ensure deadlock freedom. To that end, we propose DRAIN, a subactive approach to remove potential deadlocks without needing to explicitly detect or avoid them. We simply let deadlocks happen and periodically drain (i.e., force the movement of) packets in the network that may be involved in a cyclic dependency. As deadlocks are a rare occurrence, draining can be performed infrequently and at low cost. Unlike prior solutions, DRAIN eliminates not only routing-level but also protocol-level deadlocks without the need for expensive virtual networks. DRAIN dramatically simplifies deadlock freedom for irregular topologies and networks that are prone to wear-related faults. Our evaluations show that on an average, DRAIN can save 26.73% packet latency compared to proactive deadlock-freedom schemes in the presence of faults while saving 77.6% power compared to reactive schemes. |
This video is brought to you by CoinIdol.com in partnership with Koles Coin News Channel
Crypto voting system to be patented by Fidelity Investments
New voting platform to be introduced by Fidelity. The world's fourth largest mutual fund Fidelity Investments, is looking to patent a method that enables blockchain to be used for authenticating voters and processing fair elections. On the 16th February, the US Patent and Trademark Office released an application for "Crypto Voting and Social Aggregating, Fractionally Efficient Transfer Guidance, Conditional Triggered Transaction, Data structures, Methods and Systems". The application outlines the structure of Fidelity’s crypto-voting system, the components of which include voter authentication, vote processing, a crypto user interface, a blockchain oracle and a smart contract to direct all computational actions.
FinTech association in New Zealand
The development of FinTech in New Zealand. NZTech technology development company and the New Zealand government teamed-up to launch the new technology association last week. Mitchell Pham, tech leader and director of NZTech stated that the setup of a national financial technology in New Zealand will grow the local economy. Mitchell Pham also noted: «This will help to expand connections for our fintech community to grow, which in turn will contribute to New Zealand’s economic growth.»
New blockchain trial in Austria
Blockchain energy trading in Austria. Energy provider Wien Energie, in collaboration with blockchain developer BTL Group focuses on energy trading with a new blockchain trial. The representatives of companies noted that trial would find the solutions that are commercially viable and able to reduce the cost of energy. CEO of Wien Energie Michael Strebl stated: «Blockchain technology is a further digitization trend in the energy industry. We want to make use of the technology and its chances for our customers and us». The trial is expected to last from March to May of this year. |
/********************************************************************
* FUNCTION mgr_xml_skip_subtree
*
* Already encountered an error, so advance nodes until the
* matching start-node is reached or a terminating error occurs
* - end of input
* - start depth level reached
*
* INPUTS:
* reader == XmlReader already initialized from File, Memory,
* or whatever
* startnode == xml_node_t of the start node of the sub-tree to skip
* RETURNS:
* status of the operation
* SIDE EFFECTS:
* the xmlreader state is advanced until the current node is the
* end node of the specified start node or a fatal error occurs
*********************************************************************/
status_t
mgr_xml_skip_subtree (xmlTextReaderPtr reader,
const xml_node_t *startnode)
{
xml_node_t node;
const xmlChar *qname, *badns;
uint32 len;
int ret, depth, nodetyp;
xmlns_id_t nsid;
boolean done, justone;
status_t res;
#ifdef DEBUG
if (!reader || !startnode) {
return SET_ERROR(ERR_INTERNAL_PTR);
}
#endif
justone = FALSE;
switch (startnode->nodetyp) {
case XML_NT_START:
break;
case XML_NT_EMPTY:
return NO_ERR;
case XML_NT_STRING:
justone = TRUE;
break;
case XML_NT_END:
return NO_ERR;
default:
return SET_ERROR(ERR_INTERNAL_VAL);
}
xml_init_node(&node);
res = mgr_xml_consume_node_noadv(reader, &node);
if (res == NO_ERR) {
res = xml_endnode_match(startnode, &node);
if (res == NO_ERR) {
xml_clean_node(&node);
return NO_ERR;
}
}
xml_clean_node(&node);
if (justone) {
return NO_ERR;
}
done = FALSE;
while (!done) {
ret = xmlTextReaderRead(reader);
if (ret != 1) {
return ERR_XML_READER_EOF;
}
depth = xmlTextReaderDepth(reader);
if (depth == -1) {
return ERR_XML_READER_INTERNAL;
} else if (depth <= startnode->depth) {
done = TRUE;
}
nodetyp = xmlTextReaderNodeType(reader);
qname = xmlTextReaderConstName(reader);
if (qname) {
nsid = 0;
(void)xml_check_ns(reader, qname, &nsid, &len, &badns);
} else {
qname = (const xmlChar *)"";
}
if (depth == startnode->depth &&
!xml_strcmp(qname, startnode->qname) &&
nodetyp == XML_ELEMENT_DECL) {
done = TRUE;
}
#ifdef XML_UTIL_DEBUG
log_debug3("\nxml_skip: %s L:%d T:%s",
qname, depth, xml_get_node_name(nodetyp));
#endif
}
return NO_ERR;
} |
<reponame>paser4se/marble<filename>docker/update_docker_hub.py
"""Utility script to update docker hub descriptions using the Rest API.
Usage:
update_docker_hub.py update --username=<username> --password=<password> [--image=<image>]
update_docker_hub.py (-h | --help)
update_docker_hub.py --version
Options:
-h --help Show this screen.
--version Show version.
--username=<username> Docker Hub username
--password=<password> Docker Hub password
--image=<image> Image name to update
"""
from docopt import docopt
import pycurl
import sys
import os
import glob
import certifi
import json
import io
from io import BytesIO
from urllib.parse import urlencode
short_desc_name = "README-short.txt"
full_desc_name = "README.md"
docker_subpath = "src/main/docker"
if __name__ == '__main__':
arguments = docopt(__doc__, version='1.0')
# print(arguments)
update = arguments['update']
if (update):
username = arguments['--username']
password = arguments['--password']
image = arguments['--image']
images_to_update = []
if (image):
images_to_update.append(image)
else:
for full_dir_path in glob.glob("../marble-*"):
image_name = os.path.basename(full_dir_path)
if (os.path.isdir(os.path.join(full_dir_path, docker_subpath))):
images_to_update.append(image_name)
buffer = BytesIO()
c = pycurl.Curl()
post_data = {'username': username, 'password': password}
postfields = urlencode(post_data)
c.setopt(c.POSTFIELDS, postfields)
c.setopt(pycurl.CAINFO, certifi.where())
c.setopt(c.URL, 'https://hub.docker.com/v2/users/login/')
c.setopt(c.WRITEDATA, buffer)
print("Getting Token...")
c.perform()
status = c.getinfo(c.RESPONSE_CODE)
c.close()
if (status != 200):
sys.exit("Wrong response code received: <" + str(status) + ">")
token_dict = json.loads(buffer.getvalue().decode('utf-8'))
token = token_dict['token']
for image in images_to_update:
docker_path = os.path.join("../", image, docker_subpath)
short_desc = os.path.join(docker_path, short_desc_name)
full_desc = os.path.join(docker_path, full_desc_name)
if (os.path.isfile(short_desc) and os.path.isfile(full_desc)):
with io.open(short_desc,'r',encoding='utf8') as f:
short_desc_content = f.read()
with io.open(full_desc,'r',encoding='utf8') as f:
full_desc_content = f.read()
buffer = BytesIO()
c = pycurl.Curl()
post_data = {}
post_data['description'] = short_desc_content
post_data['full_description'] = full_desc_content
postfields = urlencode(post_data)
c.setopt(c.POSTFIELDS, postfields)
c.setopt(pycurl.CAINFO, certifi.where())
c.setopt(pycurl.CUSTOMREQUEST, "PATCH")
c.setopt(pycurl.HTTPHEADER, ['Authorization: JWT ' + token])
c.setopt(
c.URL, 'https://hub.docker.com/v2/repositories/miguelfc/' + image + "/")
c.setopt(c.WRITEDATA, buffer)
print("Updating description for image <" + image + ">...")
c.perform()
status = c.getinfo(c.RESPONSE_CODE)
c.close()
if (status == 404):
#print (buffer.getvalue())
print("Image not found on Docker Hub: <" + str(status) + ">")
elif (status != 200):
#print (buffer.getvalue())
sys.exit("Wrong response code received: <" + str(status) + ">")
else:
print ("WARNING: Image " + image +
" doesn't have the required description files. Skipping.")
print ("Done.")
|
<reponame>matthew9811/rs<filename>src/main/java/com/shengxi/system/entity/test/TestStu.java
package com.shengxi.system.entity.test;
import com.shengxi.system.entity.config.BaseEntity;
/**
* @author: Matthew
* @Date: 2019/5/9 16:09
* @Description:
*/
public class TestStu extends BaseEntity {
private String subNo;
private String subName;
private String term;
private Double grade;
private String choiceStatus;
public String getChoiceStatus() {
return choiceStatus;
}
public void setChoiceStatus(String choiceStatus) {
this.choiceStatus = choiceStatus;
}
public String getSubNo() {
return subNo;
}
public void setSubNo(String subNo) {
this.subNo = subNo;
}
public String getSubName() {
return subName;
}
public void setSubName(String subName) {
this.subName = subName;
}
public String getTerm() {
return term;
}
public void setTerm(String term) {
this.term = term;
}
public Double getGrade() {
return grade;
}
public void setGrade(Double grade) {
this.grade = grade;
}
}
|
<gh_stars>10-100
#pragma once
#include <vector>
namespace SOUI
{
//----------------------------------------------------------------------------------
//
// 通用扩展通知, 起始偏移300
//
//----------------------------------------------------------------------------------
#define EVT_STDEXT_BEGIN (EVT_EXTERNAL_BEGIN + 300)
#define EVT_STD_WINDOWS (EVT_STDEXT_BEGIN+0)
//
// 控件用于通知自己收到的WINDOWS消息
//
class EventStdWindows : public TplEventArgs < EventStdWindows >
{
SOUI_CLASS_NAME(EventStdWindows, L"on_mouse_event")
public:
EventStdWindows(SWindow *pSender) :TplEventArgs<EventStdWindows>(pSender)
{
}
enum { EventID = EVT_STD_WINDOWS };
SWNDMSG msg;
};
//----------------------------------------------------------------------------------
//
// TabView 相关通知,起始偏移400
//
//----------------------------------------------------------------------------------
#define EVT_TABVIEW_BEGIN (EVT_EXTERNAL_BEGIN + 400)
#define EVT_TABVIEW_NEW (EVT_TABVIEW_BEGIN + 0)
#define EVT_TABVIEW_CLOSE (EVT_TABVIEW_BEGIN + 1)
#define EVT_TABVIEW_SELCHANGED (EVT_TABVIEW_BEGIN + 2)
class EventTabViewNew : public TplEventArgs < EventTabViewNew >
{
SOUI_CLASS_NAME(EventTabViewNew, L"on_tabview_new")
public:
EventTabViewNew(SWindow *pSender) :TplEventArgs<EventTabViewNew>(pSender)
{
}
enum { EventID = EVT_TABVIEW_NEW };
SWindow * pNewTab;
int iNewTab;
};
class EventTabViewClose : public TplEventArgs < EventTabViewClose >
{
SOUI_CLASS_NAME(EventTabViewClose, L"on_tabview_close")
public:
EventTabViewClose(SWindow *pSender) :TplEventArgs<EventTabViewClose>(pSender)
{
}
enum { EventID = EVT_TABVIEW_CLOSE };
SWindow * pCloseTab;
int iCloseTab;
};
class EventTabviewSelChanged : public TplEventArgs < EventTabviewSelChanged >
{
SOUI_CLASS_NAME(EventTabviewSelChanged, L"on_tabview_sel_changed")
public:
EventTabviewSelChanged(SWindow *pSender) :TplEventArgs<EventTabviewSelChanged>(pSender)
{
}
enum { EventID = EVT_TABVIEW_SELCHANGED };
int iOldSel;
int iNewSel;
};
//----------------------------------------------------------------------------------
//
// RichEdit 相关通知,起始偏移600
//
//----------------------------------------------------------------------------------
#define EVT_RICHEDIT_BEGIN (EVT_EXTERNAL_BEGIN + 600)
#define EVT_RE_QUERY_ACCEPT (EVT_RICHEDIT_BEGIN+0)
#define EVT_RE_OBJ (EVT_RICHEDIT_BEGIN+1)
#define EVT_RE_SCROLLBAR (EVT_RICHEDIT_BEGIN+2)
class RichFormatConv;
class EventQueryAccept : public TplEventArgs < EventQueryAccept >
{
SOUI_CLASS_NAME(EventQueryAccept, L"on_re_query_accept_data")
public:
EventQueryAccept(SWindow *pSender) :TplEventArgs<EventQueryAccept>(pSender)
{
}
enum { EventID = EVT_RE_QUERY_ACCEPT };
RichFormatConv * Conv;
};
class RichEditObj;
class EventRichEditObj : public TplEventArgs < EventRichEditObj >
{
SOUI_CLASS_NAME(EventRichEditObj, L"on_re_ole")
public:
EventRichEditObj(SWindow *pSender) :TplEventArgs<EventRichEditObj>(pSender)
{
}
enum { EventID = EVT_RE_OBJ };
RichEditObj * RichObj;
int SubEventId;
WPARAM wParam;
LPARAM lParam;
};
class EventRichEditScroll : public TplEventArgs < EventRichEditScroll >
{
SOUI_CLASS_NAME(EventRichEditScroll, L"on_re_scroll")
public:
EventRichEditScroll(SWindow *pSender) :TplEventArgs<EventRichEditScroll>(pSender)
{
WheelDelta = 0;
ScrollAtTop = FALSE;
ScrollAtBottom = FALSE;
}
enum { EventID = EVT_RE_SCROLLBAR };
int WheelDelta;
BOOL ScrollAtTop;
BOOL ScrollAtBottom;
//SCROLLBARINFO ScrollInfo;
};
//----------------------------------------------------------------------------------
//
// List 相关通知,起始偏移700
//
//----------------------------------------------------------------------------------
#define EVT_LIST_BEGIN (EVT_EXTERNAL_BEGIN + 700)
#define EVT_LIST_HOVER_CHANGED (EVT_LIST_BEGIN+0)
class EventListHoverChanged : public TplEventArgs < EventListHoverChanged >
{
SOUI_CLASS_NAME(EventListHoverChanged, L"on_list_hover_changed")
public:
EventListHoverChanged(SWindow *pSender) :TplEventArgs<EventListHoverChanged>(pSender)
{
}
enum { EventID = EVT_LIST_HOVER_CHANGED };
int nHoverNew;
int nHoverOld;
};
//----------------------------------------------------------------------------------
//
// DropDown 窗口相关通知,起始偏移800
//
//----------------------------------------------------------------------------------
#define EVT_DROPDOWN_BEGIN (EVT_EXTERNAL_BEGIN + 800)
#define EVT_DD_ITEM_SELECTED (EVT_DROPDOWN_BEGIN+0)
class EventDropDownItemSelected : public TplEventArgs < EventDropDownItemSelected >
{
SOUI_CLASS_NAME(EventDropDownItemSelected, L"on_dropdown_item_selected")
public:
EventDropDownItemSelected(SObject *pSender) :TplEventArgs<EventDropDownItemSelected>(pSender) {}
enum { EventID = EVT_DD_ITEM_SELECTED };
int CurrentSelected;
};
//----------------------------------------------------------------------------------
//
// CefWebView 窗口相关通知,起始偏移900
//
//----------------------------------------------------------------------------------
#define EVT_CEFWEBVIEW_BEGIN (EVT_EXTERNAL_BEGIN + 900)
#define EVT_WEBVIEW_NOTIFY (EVT_CEFWEBVIEW_BEGIN+0)
class EventWebViewNotify : public TplEventArgs < EventWebViewNotify >
{
SOUI_CLASS_NAME(EventWebViewNotify, L"on_webview_notify")
public:
EventWebViewNotify(SObject *pSender) :TplEventArgs<EventWebViewNotify>(pSender) {}
enum { EventID = EVT_WEBVIEW_NOTIFY };
SStringW MessageName;
SArray<SStringW> Arguments;
};
//----------------------------------------------------------------------------------
//
// ImgCache 相关通知,起始偏移1000
//
//----------------------------------------------------------------------------------
#define EVT_IMGCACHE_BEGIN (EVT_EXTERNAL_BEGIN + 1000)
#define EVT_DONE_UPDATE (EVT_IMGCACHE_BEGIN+0)
class ImageAttr;
class EventImgCacheNotify : public TplEventArgs < EventImgCacheNotify >
{
SOUI_CLASS_NAME(EventImgCacheNotify, L"on_imgcache_notify")
public:
EventImgCacheNotify(SObject *pSender) : TplEventArgs<EventImgCacheNotify>(pSender)
, Attrs(NULL)
, Context(0)
{
}
enum { EventID = EVT_DONE_UPDATE };
std::vector<ImageAttr*>* Attrs;
int Context;
};
}// namespace SOUI
|
/**
* Reads map entries from a socket, this could be a client or server socket
*/
class TcpSocketChannelEntryReader {
public static final int HEADROOM = 1024;
ByteBuffer in;
ByteBufferBytes out;
public long lastHeartBeatReceived = System.currentTimeMillis();
private long sizeInBytes;
private byte state;
private TcpSocketChannelEntryReader() {
in = ByteBuffer.allocateDirect(replicationConfig.packetSize() + maxEntrySizeBytes);
out = new ByteBufferBytes(in.slice());
out.limit(0);
in.clear();
}
void resizeBuffer(long size) {
assert size < Integer.MAX_VALUE;
if (size < in.capacity())
throw new IllegalStateException("it not possible to resize the buffer smaller");
final ByteBuffer buffer = ByteBuffer.allocateDirect((int) size).order(ByteOrder.nativeOrder());
final int inPosition = in.position();
long outPosition = out.position();
long outLimit = out.limit();
out = new ByteBufferBytes(buffer.slice());
in.position(0);
for (int i = 0; i < inPosition; i++) {
buffer.put(in.get());
}
in = buffer;
in.limit(in.capacity());
in.position(inPosition);
out.limit(outLimit);
out.position(outPosition);
}
/**
* reads from the socket and writes them to the buffer
*
* @param socketChannel the socketChannel to read from
* @return the number of bytes read
* @throws IOException
*/
private int readSocketToBuffer(@NotNull final SocketChannel socketChannel)
throws IOException {
compactBuffer();
final int len = socketChannel.read(in);
out.limit(in.position());
return len;
}
/**
* reads entries from the buffer till empty
*
* @param attached
* @throws InterruptedException
*/
private void entriesFromBuffer(Attached attached) throws InterruptedException, IOException {
for (; ; ) {
out.limit(in.position());
// its set to MIN_VALUE when it should be read again
if (state == NOT_SET) {
if (out.remaining() < SIZE_OF_SIZE + 1) {
return;
}
// state is used for both heartbeat and stateless
state = out.readByte();
sizeInBytes = out.readInt();
// if the buffer is too small to read this payload we will have to grow the
// size of the buffer
long requiredSize = sizeInBytes + SIZE_OF_SIZE + 1;
if (out.capacity() < requiredSize) {
attached.entryReader.resizeBuffer(requiredSize + HEADROOM);
}
// this is the :
// -- heartbeat if its 0
// -- stateful update if its 1
// -- the id of the stateful event
if (state == NOT_SET)
continue;
}
if (out.remaining() < sizeInBytes) {
return;
}
final long nextEntryPos = out.position() + sizeInBytes;
final long limit = out.limit();
out.limit(nextEntryPos);
boolean isStateless = (state != 1);
if (isStateless) {
if (statelessServerConnector == null) {
LOG.error("", new IllegalArgumentException("received an event " +
"from a stateless map, stateless maps are not " +
"currently supported when using Chronicle Channels"));
} else {
final Work futureWork = statelessServerConnector.processStatelessEvent(state,
attached.entryWriter.in, attached.entryReader.out);
// in some cases it may not be possible to send out all the data before we
// fill out the write buffer, so this data will be send when the buffer
// is no longer full, and as such is treated as future work
if (futureWork != null) {
try { // we will complete what we can for now
boolean isComplete = futureWork.doWork(attached.entryWriter.in);
if (!isComplete)
attached.entryWriter.uncompletedWork = futureWork;
} catch (Exception e) {
LOG.error("", e);
}
}
}
} else
externalizable.readExternalEntry(out);
out.limit(limit);
// skip onto the next entry
out.position(nextEntryPos);
state = NOT_SET;
sizeInBytes = 0;
}
}
/**
* compacts the buffer and updates the {@code in} and {@code out} accordingly
*/
private void compactBuffer() {
// the maxEntrySizeBytes used here may not be the maximum size of the entry in its serialized form
// however, its only use as an indication that the buffer is becoming full and should be compacted
// the buffer can be compacted at any time
if (in.position() == 0 || in.remaining() > maxEntrySizeBytes)
return;
in.limit(in.position());
in.position((int) out.position());
in.compact();
out.position(0);
}
/**
* @return the identifier or -1 if unsuccessful
*/
byte identifierFromBuffer() {
return (out.remaining() >= 1) ? out.readByte() : Byte.MIN_VALUE;
}
/**
* @return the timestamp or -1 if unsuccessful
*/
long remoteBootstrapTimestamp() {
if (out.remaining() >= 8)
return out.readLong();
else
return Long.MIN_VALUE;
// return (out.remaining() >= 8) ? out.readLong() : Long.MIN_VALUE;
}
public long remoteHeartbeatIntervalFromBuffer() {
return (out.remaining() >= 8) ? out.readLong() : Long.MIN_VALUE;
}
} |
class CimDumpDataLoader:
"""Utility to load data from dump folder"""
def load(self, dumps_folder: str) -> CimDataCollection:
"""Load data from dump folder
NOTE:
dumps folder should contains following files.
ports.csv, vessels.csv, routes.csv, order_proportion.csv,
global_order_proportion.txt, misc.yml, stops.bin
Args:
dumps_folders(str): folder that contains dumped files
Returns:
CimDataCollection: data collection for data container
"""
# load from files
misc_items = self._load_misc(dumps_folder)
order_target_proportion = self._load_order_proportions(dumps_folder)
port_mapping, ports = self._load_ports(dumps_folder, order_target_proportion)
route_mapping, routes = self._load_routes(dumps_folder)
vessel_mapping, vessels, peroids_without_noise = self._load_vessels(dumps_folder)
stops = self._load_stops(dumps_folder, len(vessels))
global_order_proportions = self._load_global_order_proportions(dumps_folder)
# construct data collection
# NOTE: this is a namedtuple, so out-side cannot change it
data_collection = CimDataCollection(
misc_items["total_container"],
misc_items["past_stop_number"],
misc_items["future_stop_number"],
misc_items["container_volume"],
OrderGenerateMode(misc_items["order_mode"]),
ports,
port_mapping,
vessels,
vessel_mapping,
stops,
global_order_proportions,
routes,
route_mapping,
peroids_without_noise,
misc_items["max_tick"],
misc_items["seed"],
misc_items["version"]
)
return data_collection
def _load_misc(self, dumps_folder: str) -> dict:
"""Load misc items from yaml"""
misc_file_path = os.path.join(dumps_folder, "misc.yml")
for _ in range(3):
if not os.path.exists(misc_file_path):
time.sleep(10)
with open(misc_file_path, "rt") as fp:
return safe_load(fp)
def _load_global_order_proportions(self, dumps_folder: str) -> np.ndarray:
"""load global order proportions from txt file"""
global_order_prop_file = os.path.join(
dumps_folder, "global_order_proportion.txt")
return np.loadtxt(global_order_prop_file)
def _read_csv_lines(self, file_path: str):
"""Helper to read and yield line from csv file"""
for _ in range(3):
if not os.path.exists(file_path):
time.sleep(10)
with open(file_path, "rt") as fp:
reader = csv.DictReader(fp)
for line in reader:
yield line
def _load_order_proportions(self, dumps_folder: str) -> Dict[int, List[NoisedItem]]:
"""Load target order proportions from file"""
target_proportions: Dict[int, List[NoisedItem]] = defaultdict(list)
proportion_file_path = os.path.join(dumps_folder, "order_proportion.csv")
for line in self._read_csv_lines(proportion_file_path):
source_port_index = int(line["source_port_index"])
target_prop = NoisedItem(
int(line["dest_port_index"]),
float(line["proportion"]),
float(line["proportion_noise"])
)
target_proportions[source_port_index].append(target_prop)
return target_proportions
def _load_ports(self, dumps_folder: str, order_target_proportion: dict) -> dict:
ports_file_path = os.path.join(dumps_folder, "ports.csv")
port_mapping: Dict[str, int] = {}
ports: List[PortSetting] = []
for line in self._read_csv_lines(ports_file_path):
port_name = line["name"]
port_index = int(line["index"])
port_mapping[port_name] = port_index
full_rtn_buffer = NoisedItem(
port_index,
int(line["full_return_buffer"]),
int(line["full_return_buffer_noise"]))
empty_rtn_buffer = NoisedItem(
port_index,
int(line["empty_return_buffer"]),
int(line["empty_return_buffer_noise"]))
source_order_proportion = NoisedItem(
port_index,
float(line["order_proportion"]),
float(line["order_proportion_noise"])
)
port = PortSetting(port_index,
port_name,
int(line["capacity"]),
int(line["empty"]),
source_order_proportion,
order_target_proportion[port_index],
empty_rtn_buffer,
full_rtn_buffer)
ports.append(port)
return port_mapping, ports
def _load_vessels(self, dumps_folder: str) -> (Dict[str, int], List[VesselSetting]):
vessel_mapping: Dict[str, int] = {}
vessels: List[VesselSetting] = []
periods_without_noise: List[int] = []
vessels_file_path = os.path.join(dumps_folder, "vessels.csv")
for line in self._read_csv_lines(vessels_file_path):
vessel_name = line["name"]
vessel_index = int(line["index"])
vessel_mapping[vessel_name] = vessel_index
periods_without_noise.append(int(line["period"]))
vessel = VesselSetting(vessel_index,
vessel_name,
int(line["capacity"]),
line["route_name"],
line["start_port_name"],
float(line["sailing_speed"]),
float(line["sailing_speed_noise"]),
int(line["parking_duration"]),
float(line["parking_noise"]),
int(line["empty"]))
vessels.append(vessel)
return vessel_mapping, vessels, periods_without_noise
def _load_routes(self, dumps_folder: str) -> (Dict[str, int], List[List[RoutePoint]]):
route_mapping: Dict[str, int] = {}
routes: List[List[RoutePoint]] = []
route_file_path = os.path.join(dumps_folder, "routes.csv")
for line in self._read_csv_lines(route_file_path):
route_index = int(line["index"])
route_name = line["name"]
route_mapping[route_name] = route_index
if route_index >= len(routes):
routes.append([])
route_point = RoutePoint(
route_index, line["port_name"], float(line["distance"]))
routes[route_index].append(route_point)
return route_mapping, routes
def _load_stops(self, dumps_folder: str, vessel_number: int) -> List[List[Stop]]:
stops: List[List[Stop]] = []
for _ in range(vessel_number):
stops.append([])
stops_file_path = os.path.join(dumps_folder, "stops.bin")
reader = BinaryReader(stops_file_path)
for stop_item in reader.items():
vessel_stops: List[Stop] = stops[stop_item.vessel_index]
stop = Stop(len(vessel_stops),
stop_item.timestamp,
stop_item.leave_tick,
stop_item.port_index,
stop_item.vessel_index)
vessel_stops.append(stop)
return stops |
/* returns a parent if it matches the given directive */
static const ap_directive_t * find_parent(const ap_directive_t *dirp,
const char *what)
{
while (dirp->parent != NULL) {
dirp = dirp->parent;
if (strcasecmp(dirp->directive, what) == 0)
return dirp;
}
return NULL;
} |
/**
* Learn an Annotator from AnnotationExample's.
*
* @author William Cohen
*/
public abstract class AnnotatorLearner
{
abstract public void reset();
/** Accept a pool of documents. */
abstract public void setDocumentPool(Iterator<Span> documents);
/** Returns true if the learner has more queries to answer. */
abstract public boolean hasNextQuery();
/** Returns an Span which the learner would like labeled. */
abstract public Span nextQuery();
/** Accept the answer to the last query. */
abstract public void setAnswer(AnnotationExample answeredQuery);
/** Set the label used for annotations produced by the learner. */
abstract public void setAnnotationType(String s);
/** Get the label used for annotations produced by the learner. */
abstract public String getAnnotationType();
/** Return the learned annotator */
abstract public Annotator getAnnotator();
/** Return the span feature extractor used by this annotator. This could be null
* if no such feature extractor exists.
*/
abstract public SpanFeatureExtractor getSpanFeatureExtractor();
/** Set the feature extractor used by this annotator. This may
* have no action if no such feature extractor exists.
*/
abstract public void setSpanFeatureExtractor(SpanFeatureExtractor fe);
public String getAnnotationTypeHelp() { return "Get the label used for annotations produced by the learner"; }
public String getSpanFeatureExtractorHelp() { return "<html> Set the feature extractor used by this learner <br> "; }
} |
def create_group(self, properties: Dict[str, Optional[Any]]) -> Dict:
group = self.ms_client.http_request(method='POST', url_suffix='groups', json_data=properties)
return group |
/**
*
*
* <p>
* Added: / TL<br>
* Modifications:
* </p>
*
* @author Tim Lammarsch
*
*/
public class TemporalComparisonPredicate extends BinaryExpression implements Predicate {
public static final int BEFORE = 0x0101;
public static final int AFTER = 0x0102;
public static final int STARTS = 0x0103;
public static final int FINISHES = 0x0104;
public static final int MEETS = 0x0105;
public static final int DURING = 0x0106;
public static final int OUTSIDE = 0x0107;
public static final int OVERLAPS = 0x0108;
public static final int ASLONGAS = 0x0109;
public TemporalComparisonPredicate(int operation, TemporalExpression left, TemporalExpression right) {
super(operation,Integer.MIN_VALUE,Integer.MAX_VALUE,left,right);
}
public TemporalComparisonPredicate(int operation, TemporalExpression left, TemporalElementArrayExpression right) {
super(operation,Integer.MIN_VALUE,Integer.MAX_VALUE,left,right);
}
/* (non-Javadoc)
* @see prefuse.data.expression.Expression#getType(prefuse.data.Schema)
*/
@Override
public Class getType(Schema s) {
return boolean.class;
}
@SuppressWarnings("unchecked")
public boolean getBoolean(Tuple t) {
try {
if (TemporalElement.class.isAssignableFrom(m_left.getClass()))
throw new IllegalArgumentException("Operation only permitted on TemporalElement,TemporalElement or TemporalElement,ArrayList<TemporalElement>");
TemporalElement teTemplate = (TemporalElement)m_left.get(t);
TemporalElement[] history = null;
if (TemporalElement[].class.isAssignableFrom(m_right.getType(t.getSchema()))) {
history = (TemporalElement[])m_right.get(t);
} else if(TemporalElement.class.isAssignableFrom(m_right.getType(t.getSchema()))) {
history = new TemporalElement[1];
history[0] = (TemporalElement)m_right.get(t);
} else
throw new IllegalArgumentException("Operation only permitted on TemporalElement,TemporalElement or TemporalElement,ArrayList<TemporalElement>");
TemporalElement teStart = history[0];
TemporalElement teEnd = history[history.length-1];
switch(m_op) {
case BEFORE:
if(teEnd.getLastInstant().getSup() >= teTemplate.getFirstInstant().getInf())
return false;
break;
case AFTER:
if (teStart.getFirstInstant().getInf() <= teTemplate.getLastInstant().getSup())
return false;
break;
case STARTS:
if (teTemplate.getKind() == TemporalElement.RECURRING_INSTANT || teTemplate.getKind() == TemporalElement.RECURRING_INTERVAL) {
if(teTemplate.getGranules()[0].getIdentifier() != teStart.getGranules()[0].getIdentifier())
return false;
if (teEnd.getGranules()[teEnd.getGranules().length-1].getIdentifier()
> teTemplate.getGranules()[teTemplate.getGranules().length-1].getIdentifier())
return false;
} else {
if (teTemplate.getFirstInstant().getInf() != teStart.getFirstInstant().getInf() ||
teEnd.getLastInstant().getSup() > teTemplate.getLastInstant().getSup())
return false;
}
break;
case FINISHES:
if (teTemplate.getKind() == TemporalElement.RECURRING_INSTANT || teTemplate.getKind() == TemporalElement.RECURRING_INTERVAL) {
if(teTemplate.getGranules()[teTemplate.getGranules().length-1].getIdentifier() !=
teEnd.getGranules()[teEnd.getGranules().length-1].getIdentifier())
return false;
if (teStart.getGranules()[0].getIdentifier() < teTemplate.getGranules()[0].getIdentifier())
return false;
} else {
if (teTemplate.getLastInstant().getSup() != teEnd.getLastInstant().getSup() ||
teStart.getFirstInstant().getInf() < teTemplate.getFirstInstant().getInf())
return false;
}
break;
case MEETS:
if (teTemplate.getKind() == TemporalElement.RECURRING_INSTANT || teTemplate.getKind() == TemporalElement.RECURRING_INTERVAL) {
long needed = teEnd.getGranules()[teEnd.getGranules().length-1].getIdentifier() + 1;
if(needed > teEnd.getGranule().getGranularity().getMaxGranuleIdentifier())
needed = teEnd.getGranule().getGranularity().getMinGranuleIdentifier();
if( needed != teTemplate.getGranule().getIdentifier() )
return false;
} else {
if (teEnd.getLastInstant().getSup() + 1 != teTemplate.getFirstInstant().getInf())
return false;
}
break;
case DURING:
if (teTemplate.getKind() == TemporalElement.RECURRING_INSTANT || teTemplate.getKind() == TemporalElement.RECURRING_INTERVAL) {
Granularity g = teTemplate.getGranule().getGranularity();
for(TemporalElement iTe : history) {
if (g.getIdentifier() != iTe.getGranule().getGranularity().getIdentifier() ||
g.getGranularityContextIdentifier() != iTe.getGranule().getGranularity().getGranularityContextIdentifier())
return false;
}
long inf = history[0].getFirstInstant().getInf();
long sup = history[history.length-1].getLastInstant().getSup();
Granule[] possible = g.createGranules(teTemplate.getFirstInstant().getInf(), teTemplate.getLastInstant().getSup());
for(Granule iG : g.createGranules(inf, sup)) {
boolean found = false;
for(Granule iG2 : possible) {
if ( iG.getIdentifier() == iG2.getIdentifier()) {
found = true;
break;
}
}
if (!found)
return false;
}
} else {
if(teStart.getFirstInstant().getInf() < teTemplate.getFirstInstant().getInf())
return false;
if(teEnd.getLastInstant().getSup() > teTemplate.getLastInstant().getSup())
return false;
}
break;
case OUTSIDE:
if (teTemplate.getKind() == TemporalElement.RECURRING_INSTANT || teTemplate.getKind() == TemporalElement.RECURRING_INTERVAL) {
Granule g = teTemplate.getGranule();
Granularity gy = g.getGranularity();
for(TemporalElement iTe : history) {
if (gy.getIdentifier() != iTe.getGranule().getGranularity().getIdentifier() ||
gy.getGranularityContextIdentifier() != iTe.getGranule().getGranularity().getGranularityContextIdentifier() ||
g.getIdentifier() == iTe.getGranule().getIdentifier())
return false;
}
} else {
if (!(history[0].getFirstInstant().getInf() >= teTemplate.getLastInstant().getSup() ||
history[history.length-1].getLastInstant().getSup() <= teTemplate.getFirstInstant().getInf()))
return false;
}
break;
case OVERLAPS:
if (teTemplate.getKind() == TemporalElement.RECURRING_INSTANT || teTemplate.getKind() == TemporalElement.RECURRING_INTERVAL) {
for(Granule i1 : teTemplate.getGranules()) {
for(TemporalElement i2 : history) {
for(Granule i3 : i2.getGranules()) {
if (i1.getIdentifier() == i3.getIdentifier()) {
return true;
}
}
}
}
} else {
for(TemporalElement i2 : history) {
for(Granule i3 : i2.getGranules()) {
if (teTemplate.getFirstInstant().getInf() >= i3.getInf() &&
teTemplate.getLastInstant().getSup() <= i3.getSup()) {
return true;
}
}
}
return false;
}
break;
case ASLONGAS:
if(!(teTemplate instanceof Span))
return false;
long total = 0;
for(TemporalElement iTe : history) {
total += (iTe instanceof Span) ? ((Span)iTe).getLength() : iTe.getLastInstant().getSup()-iTe.getFirstInstant().getInf() + 1;
}
if (((Span)teTemplate).getLength() != total)
return false;
break;
}
} catch (TemporalDataException e) {
throw new UnsupportedOperationException("Illegal handling of time-oriented data: "+e.getMessage());
} finally {
if (TemporalExpression.class.isAssignableFrom(m_left.getClass()) ) {
((TemporalExpression) m_left).destroyTemporaryTemporalElements();
}
if (TemporalExpression.class.isAssignableFrom(m_right.getClass()) ) {
((TemporalExpression) m_right).destroyTemporaryTemporalElements();
}
}
return true;
}
} |
Agile C2 organizational decision allocation and pattern evolution methods
A study of the evolutionary approach to decision allocation in C2 organizations, and an analysis of the decision structure and the division of decision authority in C2 organizations. Construct three decision models and give some of the attribute factors that influence decision allocation; propose decision allocation based on multi-attribute group decision theory method and improve the expert empowerment method in group decision making to improve the objective rationality of attribute empowerment; based on the changes in attribute Expert authority dynamic empowerment method and decision mode leap mechanism are used to achieve the evolution of multi-stage decision allocation. Finally, the simulation experiment proves the reasonableness of the decision authority allocation and evolution mechanism. |
def add_question(question, answers, paragraph_dict, article_title):
existing_questions = [q['question'] for q in paragraph_dict['qas']]
if question in existing_questions:
msg = "Question '{}' not unique in paragraph."
raise ValueError(msg.format(question))
for answer_dict in answers:
answer_start = answer_dict['answer_start']
answer_text = answer_dict['answer_text']
para_text = paragraph_dict['context']
if len(para_text) < answer_start + len(answer_text):
msg = "Answer '{}' starting at {} can't fit in paragraph."
raise ValueError(msg.format(answer_text, answer_start))
answer_in_para = paragraph_dict['context'][
answer_start: answer_start + len(answer_text)]
if answer_text != answer_in_para:
msg = ("Answer '{}' doesn't match '{}' in paragraph"
"using answer_start '{}'")
raise ValueError(msg.format(answer_text,
answer_in_para,
answer_start))
question_dict = {'answers': answers,
'id': str(hash(question + article_title)),
'question': question,
'is_impossible': False}
paragraph_dict['qas'].append(question_dict) |
/**
* This node represent those strange places where we have what it a valid semantic element
* but syntactically it is not there: [1, (), 3]. The parens here are syntax and evaluating
* it will return nil but a nil is not actually there.
*/
public class ImplicitNilNode extends Node {
public ImplicitNilNode(SourcePosition position) {
super(position);
}
@Override
public <T> T accept(NodeVisitor<T> visitor) {
return visitor.visitImplicitNilNode(this);
}
@Override
public NodeType getNodeType() {
return NodeType.IMPLICITNILNODE;
}
} |
/**
* @testcase tc_libc_stdio_remove_p
* @brief Deletes the file whose name is specified in filename.
* @scenario Open file and remove
* @apicovered remove
* @precondition NA
* @postcondition NA
*/
static void tc_libc_stdio_remove_p(void)
{
char *filename = VFS_FILE_PATH;
int ret;
int fd;
vfs_mount();
fd = open(VFS_FILE_PATH, O_WROK | O_CREAT);
TC_ASSERT_GEQ_CLEANUP("open", fd, 0, vfs_unmount());
close(fd);
ret = remove(filename);
TC_ASSERT_EQ("remove", ret, OK);
vfs_unmount();
TC_SUCCESS_RESULT();
} |
A Republican-aligned super PAC is trying to knock down Democrats’ official effort Monday to rebrand themselves as “a better deal” -- launching an ad campaign that targets House Minority Leader Nancy Pelosi and argues her party remains mired in “the same, old liberal ideas.”
The Congressional Leadership Fund is behind the digital ad campaign, which is titled “Resistance” and targets Pelosi’s San Francisco congressional district and 12 other Democrat-leaning districts that President Trump won last fall.
All 435 House seats are up for reelection in 2018.
“The Democrats are the party of the resistance,” the narrator says in the 33-second ad that includes images of window-smashing and other protester-driven violence surrounding the inauguration.
“Radical extremists who destroy buildings, burn cars and divide America. Hollywood celebrities who are blinded by their hatred of the president. Nancy Pelosi and the Washington Democrats answer to them.”
SCHUMER TELLS CLINTON, 'BLAME YOURSELF'
On Monday, Pelosi and Senate Minority Leader Chuck Schumer, D-N.Y., will lead an event in Virginia to announce the “better deal” agenda, following party leaders acknowledging they lost to Trump in large part because voters didn't know what the party stood for.
They intentionally are heading outside Washington to host the event in the district of GOP Rep. Barbara Comstock, whom they hope to defeat next year.
The new message -- formally titled “A Better Deal: Better Jobs, Better Wages, Better Future” -- follows months of internal debate and analysis of polling and focus groups. (After an earlier and abbreviated version leaked on Thursday, Twitter users mocked the similarity to the slogan for Papa John's pizza, "Better Ingredients, Better Pizza.")
Schumer acknowledged on Sunday that Democrats were partially to blame for Americans not knowing what the party stands for.
"When you lose an election with someone who has, say, 40 percent popularity, you look in the mirror and say what did we do wrong?” he said on ABC’s “This Week.” “And the number one thing that we did wrong is we didn't have -- we didn't tell people what we stood for."
However, Congressional Leadership Fund leaders say the message “continues to advance the same, old liberal ideas including single-payer health care, tax increases and military cuts,” despite all of the poll testing.
“The simple truth is that a Democrat is someone who is beholden to Nancy Pelosi, wants to raise your taxes, is blinded by their hatred of the president, and regularly loses elections,” said Cory Bliss, the political action committee’s executive director.
The Associated Press contributed to this report. |
/**
* Created by Noor on 1/9/17.
*/
public class ServiceError {
private int code;
private String message;
private ServiceError(int code, String message) {
this.code = code;
this.message = message;
}
public static ServiceError unknownError() {
return new ServiceError(1000, "Unknown error occurred");
}
public static ServiceError unknownError(Throwable ex) {
return new ServiceError(1000, "Unknown error occurred");
}
public static ServiceError loanAmountRequired() {
return new ServiceError(1001, "loanAmount is required");
}
public static ServiceError noBankServiceAvailable() {
return new ServiceError(1003, "No bank service is available at the moment");
}
public static ServiceError mapException(Exception ex) {
return new ServiceError(1002, ex.getMessage());
}
public static ServiceError serviceTookMoreTime() {
return new ServiceError(1003, "Service request took more time. Please try again.");
}
public int getCode() {
return code;
}
public String getMessage() {
return message;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ServiceError that = (ServiceError) o;
if (getCode() != that.getCode()) return false;
return getMessage() != null ? getMessage().equals(that.getMessage()) : that.getMessage() == null;
}
@Override
public int hashCode() {
int result = getCode();
result = 31 * result + (getMessage() != null ? getMessage().hashCode() : 0);
return result;
}
@Override
public String toString() {
return "ServiceError{" +
"code=" + code +
", message='" + message + '\'' +
'}';
}
} |
/**
* Function: load_SED_from_fitsext
* The function creates a energy distribution from the data stored
* in a fits file extension. The data must be stored in the columns
* "wavelength" and "flux".
*
* Parameters:
* @param spectral_models_file - pathname to the spectral models file
* @param s_models - pointer to the fits file extension
*
* Returns:
* @return sed - the energy distribution created
*/
energy_distrib *
load_SED_from_fitsext(const char spectral_models_file[], fitsfile *s_models)
{
int f_status=0;
int anynul;
long nrows=0;
int colnum1;
int colnum2;
energy_distrib *sed;
double *sed_wavs;
double *sed_flux;
sed = (energy_distrib *) malloc(sizeof(energy_distrib));
fits_get_num_rows (s_models, &nrows, &f_status);
if (f_status) {
ffrprt (stderr, f_status);
aXe_message (aXe_M_FATAL, __FILE__, __LINE__,
"load_SED_from_fitsext: "
"Could not determine the number of rows in"
" table %s",spectral_models_file);
}
sed_wavs = (double *) malloc(nrows*sizeof(double));
if (!sed_wavs) {
aXe_message (aXe_M_ERROR, __FILE__, __LINE__,
"Memory allocation failed");
}
sed_flux = (double *) malloc(nrows*sizeof(double));
if (!sed_flux) {
aXe_message (aXe_M_ERROR, __FILE__, __LINE__,
"Memory allocation failed");
}
fits_get_colnum (s_models, CASEINSEN, "WAV_NM", &colnum1, &f_status);
if (f_status)
{
ffrprt (stderr, f_status);
aXe_message (aXe_M_FATAL, __FILE__, __LINE__,
"create_interp_ftable: "
"Could not determine column %s in "
" table %s", "WAV_NM", spectral_models_file);
}
fits_read_col (s_models, TDOUBLE, colnum1, 1, 1, nrows, NULL, sed_wavs,
&anynul, &f_status);
if (f_status)
{
ffrprt (stderr, f_status);
aXe_message (aXe_M_FATAL, __FILE__, __LINE__,
"load_SED_from_fitsext: "
"Could not read content of WAVELENGTH column "
" from BINARY table %s", spectral_models_file);
}
fits_get_colnum (s_models, CASEINSEN, "FLUX", &colnum2, &f_status);
if (f_status)
{
ffrprt (stderr, f_status);
aXe_message (aXe_M_FATAL, __FILE__, __LINE__,
"create_interp_ftable: "
"Could not determine column %s in "
" table %s", "FLUX", spectral_models_file);
}
fits_read_col (s_models, TDOUBLE, colnum2, 1, 1, nrows, NULL, sed_flux,
&anynul, &f_status);
if (f_status)
{
ffrprt (stderr, f_status);
aXe_message (aXe_M_FATAL, __FILE__, __LINE__,
"load_SED_from_fitsext: "
"Could not read content of FLUX column "
" from BINARY table %s", spectral_models_file);
}
sed->npoints = nrows;
sed->wavelength = sed_wavs ;
sed->flux = sed_flux;
sed->interp = gsl_interp_alloc (SMODEL_INTERP_TYPE, (size_t)sed->npoints );
sed->accel = gsl_interp_accel_alloc ();
gsl_interp_init (sed->interp, sed->wavelength, sed->flux, (size_t)sed->npoints);
return sed;
} |
The program that Ottawa is looking to reboot allowed police, spy agencies, and possibly others to obtain Canadians’ data and personal information without a warrant from telecommunications companies and others. While the government argued in court that this practise, which often came with no paper trail at all, was simply a way through which police could obtain “basic subscriber information” which linked a phone number or IP address to a name and address.
The documents add to a wealth of information obtained by VICE News that show the Trudeau government has been working to build support for broad new investigative powers, all while keeping information about RCMP and CSIS surveillance tactics out of the public domain.
New documents, obtained by VICE News under access to information laws, relate to meetings of a federal-provincial working group on cybercrime that had recommended proceeding with new legislation to restart that program. The details of what, exactly, that legislation would look like are contained in a report that Ottawa has refused to make public, against the advice of its own civil service.
The federal government began moving forward on new legislation that would allow police to obtain Canadians’ data without a warrant — even as they ran a national consultation that feigned indecision on the issue.
Read more
The federal government began moving forward on new legislation that would allow police to obtain Canadians’ data without a warrant — even as they ran a national consultation that feigned indecision on the issue.
The warrantless access program had been previously declared unconstitutional by the Supreme Court of Canada.
New documents, obtained by VICE News under access to information laws, relate to meetings of a federal-provincial working group on cybercrime that had recommended proceeding with new legislation to restart that program. The details of what, exactly, that legislation would look like are contained in a report that Ottawa has refused to make public, against the advice of its own civil service.
The documents add to a wealth of information obtained by VICE News that show the Trudeau government has been working to build support for broad new investigative powers, all while keeping information about RCMP and CSIS surveillance tactics out of the public domain.
The program that Ottawa is looking to reboot allowed police, spy agencies, and possibly others to obtain Canadians’ data and personal information without a warrant from telecommunications companies and others. While the government argued in court that this practise, which often came with no paper trail at all, was simply a way through which police could obtain “basic subscriber information” which linked a phone number or IP address to a name and address.
But media reports, including from VICE, and evidence entered into the Supreme Court case showed that the program was consistently used to obtain personal information that should require a warrant. The court found that the program, by design, was an attempt to “link a specific person … to specific online activities.” It concluded it was an infringement on Canadians’ privacy and ordered it to end, except in emergency situations.
According to memos and media lines obtained by VICE News, Public Safety Minister Ralph Goodale pushed to proceed with “legislative reforms” recommended by the cybercrime working group at a meeting of the federal and provincial justice and public safety ministers last September.
Update: After this story was published, Minister Goodale’s office reached out to VICE News to state that “no new legislation regarding basic subscriber information has been drafted and no authority to draft such legislation has been granted,” instead insisting that they have been “developing proposals for what legislation could look like.” Goodale’s office is still continuing to decline to release the working group report. VICE News has updated the story to reflect the statement.
The documents make no mention of oversight or scrutiny, but do contend that the government will balance privacy rights with the need to obtain this data.
Canadian police chiefs and the RCMP have fought for this power to be reinstated since 2014. A resolution passed by the Association of Canadian Police Chiefs in 2015 reads that they want a law to allow “law enforcement the ability to obtain, in real-time or near real-time, basic subscriber information (BSI) from telecommunications providers.”
Lawyers and academics have pushed back, saying that while it might be reasonable to allow police to connect a phone number or IP address to a person or address, there still needs to be some judicial oversight to prevent abuse.
As part of a public consultation run by the Trudeau government in 2016, Canadians were asked for their input on a wide variety of issues, including on the possibility of restarting some version of the warrantless access program. Goodale’s media lines promised the government wanted to “hear from Canadians on access to basic subscriber information.”
But the decision to go forward with legislative reforms on basic subscriber information was made fully three months before the consultations were scheduled to end.
The basis for the decision, a report from a special cybercrime federal-provincial working group, remains a secret. The memos obtained this week read that the public safety ministry supports the release of a summary of the report. The ministry wrote to Goodale that it “supports the public release of the summary of the [cybercrime working group] paper.”
VICE News has repeatedly attempted to get the working group report, through the federal and provincial governments and through the access to information act, but has been consistently denied.
A spokesperson for Minister Goodale refused to release the report in March, writing via email that “it would be released at the discretion of Federal-Provincial-Territorial Ministers Responsible for Justice and Public Safety.” |
import React from 'react';
import { ArrowFunctionExpression } from '~/components';
import { render } from '~/index';
import JSXExpressionContainer from './index';
describe('<JSXExpressionContainer />', () => {
it('renders empty', () => {
const code = render(<JSXExpressionContainer debug />, {
prettier: false,
parserOptions: {
plugins: ['jsx', 'classProperties', 'typescript']
}
});
expect(code).toBe('{}');
});
it('renders with children example', () => {
const code = render(
<JSXExpressionContainer debug>
<ArrowFunctionExpression />
</JSXExpressionContainer>,
{
prettier: false,
parserOptions: {
plugins: ['jsx', 'classProperties', 'typescript']
}
}
);
expect(code).toBe('{() => {}}');
});
it('renders with children as string', () => {
const code = render(
<JSXExpressionContainer debug>hello</JSXExpressionContainer>,
{
prettier: false,
parserOptions: {
plugins: ['jsx', 'classProperties', 'typescript']
}
}
);
expect(code).toBe('{"hello"}');
});
it('renders with children as number', () => {
const code = render(
<JSXExpressionContainer debug>{0}</JSXExpressionContainer>,
{
prettier: false,
parserOptions: {
plugins: ['jsx', 'classProperties', 'typescript']
}
}
);
expect(code).toBe('{0}');
});
it('renders with children as object', () => {
const code = render(
<JSXExpressionContainer debug>
{{ hello: 'world' }}
</JSXExpressionContainer>,
{
prettier: false,
parserOptions: {
plugins: ['jsx', 'classProperties', 'typescript']
}
}
);
expect(code).toBe(`{{
"hello": "world"
}}`);
});
it('renders with children as array', () => {
const code = render(
<JSXExpressionContainer debug>{[1, 2, 3]}</JSXExpressionContainer>,
{
prettier: false,
parserOptions: {
plugins: ['jsx', 'classProperties', 'typescript']
}
}
);
expect(code).toBe('{[1, 2, 3]}');
});
});
|
package org.nutz.json.generic;
public class Employee2{
protected String mobile;
} |
Epidemiology of Spondyloarthritis in Colombia
There are no formal statistics about the incidence, prevalence or demographics of patients with spondyloarthropathies (SpAs) in Colombia. However, information from a few studies provides a preliminary snapshot of SpAs in the country. In this article, the authors review what has been published; document what their group is doing and outline what they still need to do in the future. The analysis suggests that although information on SpA in Colombia is limited, it is known that the diagnostic entities of SpA are different than those reported at other latitudes. Thus, it is important to improve and expand the current database of SpA, particularly undifferentiated SpA, not only in Colombia but in all of Latin America. |
// Sample to get an alert policy
public class GetAlertPolicy {
public static void main(String[] args) throws ApiException, IOException {
String alertPolicyName = "alert-policy-id";
// i.e projects/[PROJECT_ID_OR_NUMBER]/alertPolicies/[ALERT_POLICY_ID]
getAlertPolicy(alertPolicyName);
}
public static void getAlertPolicy(String alertPolicyName) throws ApiException, IOException {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
try (AlertPolicyServiceClient alertPolicyServiceClient = AlertPolicyServiceClient.create()) {
// Gets a single alerting policy
AlertPolicy alertPolicy = alertPolicyServiceClient.getAlertPolicy(alertPolicyName);
System.out.format("alert policy retrieved successfully:%s", alertPolicy.getName());
}
}
} |
/// This method balances the targets ditribution of a data set with only one target variable by unusing
/// instances whose target variable belongs to the most populated target class.
/// It returns a vector with the indices of the instances set unused.
/// @param percentage Percentage of instances to be unused.
Vector<size_t> DataSet::balance_binary_targets_distribution(const double& percentage)
{
Vector<size_t> unused_instances;
const size_t instances_number = instances.count_used_instances_number();
const Vector<size_t> target_class_distribution = calculate_target_distribution();
const Vector<size_t> maximal_indices = target_class_distribution.calculate_maximal_indices(2);
const size_t maximal_target_class_index = maximal_indices[0];
const size_t minimal_target_class_index = maximal_indices[1];
size_t total_unbalanced_instances_number = (size_t)((percentage/100.0)*(target_class_distribution[maximal_target_class_index] - target_class_distribution[minimal_target_class_index]));
size_t actual_unused_instances_number;
size_t unbalanced_instances_number = total_unbalanced_instances_number/10;
Vector<size_t> actual_unused_instances;
while(total_unbalanced_instances_number != 0)
{
if(total_unbalanced_instances_number < instances_number/10)
{
unbalanced_instances_number = total_unbalanced_instances_number;
}
else if(total_unbalanced_instances_number > 0 && unbalanced_instances_number < 1)
{
unbalanced_instances_number = total_unbalanced_instances_number;
}
actual_unused_instances = unuse_most_populated_target(unbalanced_instances_number);
actual_unused_instances_number = actual_unused_instances.size();
unused_instances = unused_instances.assemble(actual_unused_instances);
total_unbalanced_instances_number = total_unbalanced_instances_number - actual_unused_instances_number;
actual_unused_instances.clear();
}
return (unused_instances);
} |
<reponame>nmoehrle/scanalyze
#ifdef __cplusplus
extern "C" {
#endif
/* module: cyfile.h echo image header file */
/* @(#)cyfile.h 1.30 */
/* globals */
/* Internal types, These modules all assume the following types:
*
* char 1 byte signed integer, -128...127
* unsigned char 1 byte unsigned integer, 0...255
* short 2 byte signed integer, -32,768...32,767
* unsigned short 2 byte unsigned integer, 0...65,535
* long 4 byte signed integer,
*-2,147,483,648...2,147,483,647
* unsigned long 4 byte unsigned integer, 0...4,294,967,295
* real a real variable natural to the machine
* int at least as long as short
* unsigned int at least as long as unsigned short
*
* All other types are to be enclosed in #ifdefs.
*/
/* file constants, unpacked */
#define MAXR (0x00007fff << gs->rshift)
#define MAXRGS(gs) (0x00007fff << (gs)->rshift)
#define MINR 0
/*#define VOID (0xffff8000<<gs->rshift)*/
#define VOIDGS(gs) (0xffff8000 << (gs)->rshift)
#ifndef NULL
#define NULL 0 /* null address */
#endif
/* math tools */
#ifndef MAX
#define MAX(a, b) ((a) > (b) ? (a) : (b)) /* return greater of a and b */
#endif
#ifndef MIN
#define MIN(a, b) ((a) < (b) ? (a) : (b)) /* return lesser of a and b */
#endif
#ifndef ABS
#define ABS(i) ((i) < 0 ? -(i) : (i)) /* integer absolute value */
#endif
#define DELTA(a, b) (ABS((a) - (b))) /* int absolute difference */
#define SCALE(n, s) ((((n) * (s)) + 50) / 100) /* int scale n by s percent */
#define WRAPP(n, m) (if ((n) >= (m))(n) -= (m)) /* modulo positive wrap */
#define WRAPN(n, m) (if ((n) < 0)(n) += (m)) /* modulo positive wrap */
/* unit conversions */
#define UMTOI(um) ((real)(um) * 3.937e-5) /* microns (um) to float inch */
#define ITOUM(um) ((int)((um) * 2.54e4)) /* inches to int microns */
#define URTOD(ur) ((real)(ur) * 5.7296e-5) /* urads to float degrees */
#define DTOUR(deg) ((int)((deg)*1.74533e4) /* degrees to int urads */
#define DTOR(deg) ((deg) * 1.7453292e-2) /* degrees to float radians */
#define RTOD(rad) ((rad) * 57.295779) /* radians to float degrees */
#define URTOR(ur) ((real)(ur) * 1.e-6) /* radians to urads */
#define RTOUR(ur) (int)((ur) * 1.e6) /* radians to urads */
/* this structure defines 'grid file format'. the file consists of
* a parameter table followed immediatly by the data table. the offset
* to the start of the data table is the second parameter and is therefore
* fifth thru eighth bytes of the file (msb first).
*
* the parameters nlg and nlt are important for accessing the data. nlg
* is the number of longitude entries in the table. nlt is the number of
* latitudes in the table. nlt * nlg * 2 gives the number of bytes in the
* table.
*
* the table is a set of radius values in a cylindrical coordinate space.
* each radius value is stored in a 2 byte integer which when shifted
* left by RSHIFT bits yields a radius in microns (4 byte long integer).
* the radius values are stored in longitudnal groups of nlt values. there
* are nlg of these groups, one for each longitude of the cylinder.
*
* the functions GETR() and PUTR() defined below are usually all that is
* required to fetch and store values in the table when it is in memory.
* the parameters ltincr and lgincr define the distance between adjacent
* latitudes (microns) and adjacent longitudes (microradians) respectively.
*
* There are two formats for this header, one portable, one not so
* portable. The older non-portable type is binary and has the value
* 122 decimal ('z') in the fifth byte. The portable header has a 'r'
* in the fifth byte. The portable header is in ascii and has the form
* [name=value],... where name is a defined ascii symbol and value is a
* string value for the symbol. Format is variable and assignments are
* separated by white space or commas.
*
* See header.c for details.
*/
#define NAMELEN 40
#define CREATE_MODE 0644 /* create image files with this mode */
typedef struct {
/* internal private variables */
short *base; /* base of data buffer */
long offset; /* file offset to start of data, bytes */
/* file parameters */
char name[NAMELEN]; /* subject name */
time_t time; /* original creation time */
short camera; /* camera id number */
short setup; /* camera setup code */
char saved; /* file has been saved since modified */
char valid; /* file buffer is valid */
/* data parameters */
short nlt; /* number of latitude intervals */
short nlg; /* number of longitude intervals */
short rshift; /* shift to compress/expand radius data */
short lgshift; /* shift to extract longitude from addr */
long flags; /* misc file state flags, see below */
long ltincr; /* distance between latitudes, um */
long lgincr; /* distance between longitudes, urad */
long ltsize; /* nlat * ltincr, um */
long lgsize; /* nlg * lgincr, urad (always 2pi in urads) */
/* user parameters */
char filled; /* fill flag, useless */
short smoothed; /* smooth pass counter */
short ltmin, ltmax; /* latitude window limits, inclusive */
short lgmin, lgmax; /* longitude window limits, inclusive */
long rmin, rmax; /* radius range, from last run of rminmax */
#ifdef IRIS
long float scale; /* current scale */
long float rprop; /* current radius proportion */
#else
double scale; /* current scale */
double rprop; /* current radius proportion */
#endif
} GSPEC;
/* macros for standardizing the use of the grid data. gs is a pointer to the
* applicable GSSPEC table. index is the offset of a data item in the
* data. lt and lg are latitude and longitude indicies. r is the radius
* in microns (um) of a data point. z is a position along the cylindrical
* axis in microns. a is an angular coordinate around the cylinder in
* microradians (urad).
*
* INDEX generates an index value from latitude and logitude indicies.
* ADDR returns the absolute address of a data item.
* PUTR and GETR are used to store and retrieve data from the image.
*/
#define INDEX(gs, lt, lg) ((lg) * (gs)->nlt + (lt))
#define ADDR(gs, lt, lg) ((gs)->base + INDEX(gs, lt, lg))
#ifdef HIGHC
#define GETR(gs, lt, lg) getr(gs, lt, lg)
#define PUTR(gs, lt, lg, r) putr(gs, lt, lg, r)
#else
#define PUTR(gs, lt, lg, r) (*ADDR(gs, lt, lg) = (r) >> (gs)->rshift)
#define GETR(gs, lt, lg) ((int)*ADDR(gs, lt, lg) << (gs)->rshift)
#endif
/* flag bits for gs->flags */
#define FLAG_RESERVED 0x000000ff /* older files have ones here, ignore */
#define FLAG_CARTESIAN 0x00000100 /* data is cartesian (vs. cyl) */
#define FLAG_OLDHEADER 0x00000200 /* please write file with old header */
#define FLAG_BILATERAL 0x00000400 /* bilateral image, ie: nus hands */
#define FLAG_COLOR 0x00000800 /* image has associated color file */
#define FLAG_THETARIGHT 0x00001000 /* theta is right hand rule */
#define FLAG_INSIDE_OUT 0x00002000 /* inside surface is outside */
/* non-int public functions */
extern GSPEC *cyread(GSPEC *gs, int fd);
extern int cywrite(GSPEC *gs, int fd);
extern void cyfree(GSPEC *gs);
extern long getr(register GSPEC *gs, register int lt, register int lg);
extern void putr(register GSPEC *gs, register int lt, register int lg,
register int r);
extern int gsget(GSPEC *gs, int fd);
extern int gsput(GSPEC *gs, int fd);
extern int gdget(GSPEC *gs, int fd);
extern int gdput(GSPEC *gs, int fd);
extern int gdallo(GSPEC *gs);
extern long getheader(int fd);
extern int getvalue(char *name, char *dest, int length);
extern int makegsheader(GSPEC *gs);
extern int writegsheader(GSPEC *gs, int fd);
#ifdef __cplusplus
}
#endif
|
def remove_ind(self, ind, inplace=False):
tree = self if inplace else self.copy()
tree.total_flops()
tree.total_write()
tree.max_size()
d = tree.size_dict[ind]
s_ind = self.bitset_edges.frommembers((ind,))
for node, node_info in tree.info.items():
involved = tree.get_involved(node)
legs = tree.get_legs(node)
if not ((s_ind & involved) or (s_ind & legs)):
continue
node_info['involved'] = involved.difference(s_ind)
removed = tree.get_removed(node)
if s_ind & legs:
node_info['legs'] = legs.difference(s_ind)
old_size = tree.get_size(node)
tree._sizes.discard(old_size)
new_size = old_size // d
tree._sizes.add(new_size)
node_info['size'] = new_size
tree._write += (-old_size + new_size)
keep = tree.get_keep(node)
node_info['keep'] = keep.difference(s_ind)
else:
node_info['removed'] = removed.difference(s_ind)
old_flops = tree.get_flops(node)
new_flops = old_flops // d
if len(removed) == 1:
new_flops //= 2
node_info['flops'] = new_flops
tree._flops += (-old_flops + new_flops)
if len(node) == 1:
i = node_get_single_el(node)
tree.sliced_inputs = tree.sliced_inputs | frozenset([i])
tree.inputs_legs[i] = tree.inputs_legs[i] - s_ind
elif len(node) == tree.N:
tree.output_legs = tree.output_legs - s_ind
for k in ('inds', 'einsum_eq', 'can_dot',
'tensordot_axes', 'tensordot_perm'):
tree.info[node].pop(k, None)
tree.multiplicity = tree.multiplicity * d
tree.sliced_inds, tree.sliced_sizes = zip(*sorted(
zip(
itertools.chain(tree.sliced_inds, (ind,)),
itertools.chain(tree.sliced_sizes, (tree.size_dict[ind],)),
),
key=lambda x: (x[0] not in tree.output, x)
))
tree.already_optimized.clear()
tree.contraction_cores.clear()
return tree |
import { createSlice, PayloadAction } from "@reduxjs/toolkit";
import { RootState } from "../../state/store";
export enum ChallengeStatus {
Pending,
Failure,
Success,
Pending_LastRunFailure,
}
export type ChallengeInfo = {
status: ChallengeStatus;
id: string;
};
export const challengeSlice = createSlice({
initialState: {
challengeInfos: [] as ChallengeInfo[],
},
name: "challenge",
reducers: {
setChallengeStatus(
state,
action: PayloadAction<{ status: ChallengeStatus; id: string }>
) {
const challengeIndex = state.challengeInfos.findIndex(
(i) => i.id === action.payload.id
);
// If it's a new challenge, just accept any state that is being set
if (challengeIndex === -1) {
state.challengeInfos.push({
status: action.payload.status,
id: action.payload.id,
});
return state;
}
// The challenge status is being updated according to the state machine
// below:
//
// o Once a challenge is marked as 'Success', it will stay that way.
// o If a failure occurs, then a fresh attempt to solve the challenge
// can cause a transition to 'Pending_LastRunFailure', from which
// both 'Success' and 'Failure' are reachable.
// o The 'Pending_LastRunFailure' state is our mechanism to
// memorize that we observed a failure for the current challenge
// in the past, but that the user has started over.
//
// (Start)
// +---------+
// +---------+ Pending +--------+
// | +---------+ |
// | |
// v v
// +----+----+ +---+-----+
// | Failure +<--------+ | Success |
// +---+-----+ | +-----+---+
// | | ^
// | | |
// | | |
// | +----------+-------------+ |
// +--->+ Pending_LastRunFailure +-+
// +------------------------+
const currState = state.challengeInfos[challengeIndex].status;
if (currState === ChallengeStatus.Success) {
return state;
}
if (currState === ChallengeStatus.Failure) {
if (action.payload.status === ChallengeStatus.Pending) {
state.challengeInfos[challengeIndex].status =
ChallengeStatus.Pending_LastRunFailure;
}
} else if (currState === ChallengeStatus.Pending_LastRunFailure) {
if (action.payload.status === ChallengeStatus.Success) {
state.challengeInfos[challengeIndex].status = ChallengeStatus.Success;
} else if (action.payload.status === ChallengeStatus.Failure) {
state.challengeInfos[challengeIndex].status = ChallengeStatus.Failure;
}
} else {
state.challengeInfos[challengeIndex].status = action.payload.status;
}
return state;
},
},
});
export const getChallengeInfo = (state: RootState) =>
state.challenge.challengeInfos;
|
/**
* Class to use to make instrumented / unit tests with on the PixelByPixelPicturesComparator class
*
* @author Pierre-Yves Lapersonne
* @version 1.1.0
* @since 13/062016
* @see AbstractTest
*/
public class ItPixelByPixelPicturesComparator extends AbstractTest {
/**
* Tests the constructor
*/
@Test
public void constructor(){
l(this, "@Test constructor");
new PixelByPixelPicturesComparator();
}
/**
* Tests the comparePictures() with identical pictures
*
* <i>Two identical pictures must be considered as equal, so return true</i>
*/
@Test
public void comparePictures(){
l(this, "@Test comparePictures");
l(this, "NOT IMPLEMENTED");
// TODO
}
/**
* Tests the comparePictures() with empty pictures
*
* <i>Two empty pictures must be considered as equal</i>
*/
@Test
public void comparePicturesWithEmptyPictures() throws PicturesComparator.PicturesComparatorException {
l(this, "@Test comparePicturesWithEmptyPictures");
PicturesComparator pc = new PixelByPixelPicturesComparator();
assertTrue(
pc.comparePictures(createEmptyBitmap(), createEmptyBitmap())
);
}
/**
* Tests the comparePictures() with null base picture
*
* <i>Comparisons with null base picture must thrown an exception</i>
*/
@Test ( expected = PicturesComparator.PicturesComparatorException.class )
public void comparePicturesWithNullBasePicture() throws PicturesComparator.PicturesComparatorException {
l(this, "@Test comparePicturesWithNullBasePicture");
PicturesComparator pc = new PixelByPixelPicturesComparator();
pc.comparePictures(null, createEmptyBitmap());
}
/**
* Tests the comparePictures() with null picked picture
*
* <i>Comparisons with null picked picture must thrown an exception</i>
*/
@Test ( expected = PicturesComparator.PicturesComparatorException.class )
public void comparePicturesWithNullPickedPicture() throws PicturesComparator.PicturesComparatorException {
l(this, "@Test comparePicturesWithNullPickedPicture");
PicturesComparator pc = new PixelByPixelPicturesComparator();
pc.comparePictures(createEmptyBitmap(), null);
}
/**
* Tests the comparePictures() with pictures with same heights but different widths
*
* <i>Comparison with pictures having same heights but different widths must return false</i>
*/
@Test
public void comparePicturesWithNotEqualWidthsPictures() throws PicturesComparator.PicturesComparatorException {
l(this, "@Test comparePicturesWithNotEqualWidthsPictures");
PicturesComparator pc = new PixelByPixelPicturesComparator();
assertFalse(
pc.comparePictures(createEmptyBitmap(1000, 2000), createEmptyBitmap(1000, 1337))
);
}
/**
* Tests the comparePictures() with pictures with same widths but different heights
*
* <i>Comparison with pictures having same widths but different heights must return false</i>
*/
@Test
public void comparePicturesWithNotEqualHeightsPictures() throws PicturesComparator.PicturesComparatorException {
l(this, "@Test comparePicturesWithNotEqualHeightsPictures");
PicturesComparator pc = new PixelByPixelPicturesComparator();
assertFalse(
pc.comparePictures(createEmptyBitmap(666, 42), createEmptyBitmap(310315, 42))
);
}
/**
* Tests the comparePictures() with pictures with same widths, same heights but different contents
*
* <i>Comparison with pictures having same widths and heights but different contents must return false</i>
*/
@Test
public void comparePicturesWithSameDimensionsButDifferentContents(){
l(this, "@Test comparePicturesWithSameDimensionsButDifferentContents");
l(this, "NOT IMPLEMENTED");
// TODO
}
/**
* Tests the comparePictures() with pictures with different widths and heights and contents
*
* <i>Comparison with pictures having different widths, heights and contents must return false</i>
*/
@Test
public void comparePicturesWithDifferentPictures(){
l(this, "@Test comparePicturesWithDifferentPictures");
l(this, "NOT IMPLEMENTED");
// TODO
}
// TODO Same kind of tests with the method with threshold param
/**
* Tests the comparePictures() with identical pictures and a threshold
*
* <i>Two identical pictures must be considered as equal, so return true</i>
*/
@Test
public void comparePicturesWithThreshold(){
l(this, "@Test comparePicturesWithThreshold");
l(this, "NOT IMPLEMENTED");
// TODO
}
/**
* Tests the comparePictures() with empty pictures and a threshold
*
* <i>Two empty pictures must be considered as equal</i>
*/
@Test
public void comparePicturesWithEmptyPicturesWithThreshold() throws PicturesComparator.PicturesComparatorException {
l(this, "@Test comparePicturesWithEmptyPicturesWithThreshold");
PicturesComparator pc = new PixelByPixelPicturesComparator();
assertTrue(
pc.comparePictures(createEmptyBitmap(), createEmptyBitmap(), 0)
);
}
/**
* Tests the comparePictures() with null base picture and a threshold
*
* <i>Comparisons with null base picture must thrown an exception</i>
*/
@Test ( expected = PicturesComparator.PicturesComparatorException.class )
public void comparePicturesWithNullBasePictureWithThreshold() throws PicturesComparator.PicturesComparatorException {
l(this, "@Test comparePicturesWithNullBasePictureWithThreshold");
PicturesComparator pc = new PixelByPixelPicturesComparator();
pc.comparePictures(null, createEmptyBitmap(), 0);
}
/**
* Tests the comparePictures() with null picked picture and a threshold
*
* <i>Comparisons with null picked picture must thrown an exception</i>
*/
@Test ( expected = PicturesComparator.PicturesComparatorException.class )
public void comparePicturesWithNullPickedPictureWithThreshold() throws PicturesComparator.PicturesComparatorException {
l(this, "@Test comparePicturesWithNullPickedPictureWithThreshold");
PicturesComparator pc = new PixelByPixelPicturesComparator();
pc.comparePictures(createEmptyBitmap(), null, 0);
}
/**
* Tests the comparePictures() with pictures with same heights but different widths, and a threshold
*
* <i>Comparison with pictures having same heights but different widths must return false</i>
*/
@Test
public void comparePicturesWithNotEqualWidthsPicturesWithThreshold() throws PicturesComparator.PicturesComparatorException {
l(this, "@Test comparePicturesWithNotEqualWidthsPicturesWithThreshold");
PicturesComparator pc = new PixelByPixelPicturesComparator();
assertFalse(
pc.comparePictures(createEmptyBitmap(1000, 2000), createEmptyBitmap(1000, 1337), 0)
);
}
/**
* Tests the comparePictures() with pictures with same widths but different heights, and a threshold
*
* <i>Comparison with pictures having same widths but different heights must return false</i>
*/
@Test
public void comparePicturesWithNotEqualHeightsPicturesWithThreshold() throws PicturesComparator.PicturesComparatorException {
l(this, "@Test comparePicturesWithNotEqualHeightsPicturesWithThreshold");
PicturesComparator pc = new PixelByPixelPicturesComparator();
assertFalse(
pc.comparePictures(createEmptyBitmap(666, 42), createEmptyBitmap(310315, 42), 0)
);
}
/**
* Tests the comparePictures() with pictures with same widths, same heights but different contents, and a threshold
*
* <i>Comparison with pictures having same widths and heights but different contents must return false</i>
*/
@Test
public void comparePicturesWithSameDimensionsButDifferentContentsWithThreshold(){
l(this, "@Test comparePicturesWithSameDimensionsButDifferentContentsWithThreshold");
l(this, "NOT IMPLEMENTED");
// TODO
}
/**
* Tests the comparePictures() with pictures with different widths and heights and contents, and a threshold
*
* <i>Comparison with pictures having different widths, heights and contents must return false</i>
*/
@Test
public void comparePicturesWithDifferentPicturesWithThreshold(){
l(this, "@Test comparePicturesWithDifferentPicturesWithThreshold");
l(this, "NOT IMPLEMENTED");
// TODO
}
/**
* Tests the comparePictures() method with a negative threshold
*
* <i>If a negative threshold is used, an exception is thrown</i>
*/
@Test (expected = PicturesComparator.PicturesComparatorException.class)
public void comparePicturesWithNegativeThreshold() throws PicturesComparator.PicturesComparatorException {
l(this, "@Test comparePicturesWithNegativeThreshold");
PicturesComparator pc = new PixelByPixelPicturesComparator();
assertFalse(
pc.comparePictures(createEmptyBitmap(5, 5), createEmptyBitmap(5, 5), -1)
);
}
/**
* Tests the comparePictures() method with a too big threshold
*
* <i>If a to big threshold is used, an exception is thrown</i>
*/
@Test (expected = PicturesComparator.PicturesComparatorException.class)
public void comparePicturesWithTooBigThreshold() throws PicturesComparator.PicturesComparatorException {
l(this, "@Test comparePicturesWithTooBigThreshold");
PicturesComparator pc = new PixelByPixelPicturesComparator();
assertFalse(
pc.comparePictures(createEmptyBitmap(5, 5), createEmptyBitmap(5, 5), 101)
);
}
/**
* Tests the comparePictures() method with border values for threshold
*
* <i>Border values of threshold (0 and 100) must be handled without problem</i>
*/
@Test
public void comparePicturesWithBorderValues() throws PicturesComparator.PicturesComparatorException {
l(this, "@Test comparePicturesWithBorderValues");
PicturesComparator pc = new PixelByPixelPicturesComparator();
pc.comparePictures(createEmptyBitmap(5, 5), createEmptyBitmap(5, 5), 0);
pc.comparePictures(createEmptyBitmap(5, 5), createEmptyBitmap(5, 5), 100);
}
/**
*
* @return Bitmap - A bitmap object
*/
private Bitmap createEmptyBitmap(){
final int W = 1000;
final int H = 1000;
Bitmap.Config config = Bitmap.Config.ARGB_8888;
return Bitmap.createBitmap(W, H, config); // Mutable bitmap ;)
}
/**
* @param h - The height
* @param w - The width
* @return Bitmap - A bitmap object
*/
private Bitmap createEmptyBitmap( int h, int w ){
if ( h < 0 ) h = 1000;
if ( w < 0 ) w = 1000;
Bitmap.Config config = Bitmap.Config.ARGB_8888;
return Bitmap.createBitmap(w, h, config); // Mutable bitmap ;)
}
} |
<reponame>ypar/django-cognoma<filename>api/management/commands/generateinternaltoken.py
import os
from datetime import datetime
from django.core.management.base import BaseCommand
import jwt
class Command(BaseCommand):
help = 'Generates a JWT for use internally inside Cognoma.'
def add_arguments(self, parser):
parser.add_argument('service', nargs='+', type=str, help='Internal service name, ex "task"')
parser.add_argument('private_key_file', nargs='+', type=str, help='Path to RSA private key')
parser.add_argument('issuer', nargs='+', type=str, help='Issuer (you) Github handle')
def handle(self, *args, **options):
service = options['service'][0]
issuer = options['issuer'][0]
print('Creating token for service "' +
service +
'", issued by "' +
issuer +
'"')
private_key = open(options['private_key_file'][0]).read()
token = jwt.encode({
'service': service,
'iat': datetime.utcnow(),
'iss': issuer
},
private_key,
algorithm="RS256")
print(token.decode())
|
def combineBenignIRPLogs(path):
all_file_names = [i for i in glob.glob(str(path) + '/' + '*_processed.*')]
all_file_names = sorted(all_file_names)
try:
return pd.concat([pd.read_csv(f) for f in all_file_names])
except:
print("Something went wrong in combining benign logs") |
# Copyright (c) 2019 Tradeshift
# Copyright (c) 2020 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import tensorflow as tf
from tensorflow.python.util import deprecation
from tensorflow.contrib import layers
from tensorflow.python.ops.losses.losses_impl import Reduction
from .. import FIELD_TYPES, FIELDS
from ..common import util
from ..common.model import Model
from .data import RealData
from ..parsing.parsers import DateParser, AmountParser, NoOpParser, OptionalParser
deprecation._PRINT_DEPRECATION_WARNINGS = False
tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)
class AttendCopyParse(Model):
devices = util.get_devices()
n_hid = 32
frac_ce_loss = 0.0001
lr = 3e-4
keep_prob = 0.5
def __init__(self, field, train_data=None, val_data=None, batch_size=8, restore=False):
tf.reset_default_graph()
self.field = field
self.batch_size = batch_size * len(self.devices)
self.parser = None
if FIELDS[self.field] == FIELD_TYPES["optional"]:
noop_parser = NoOpParser()
self.parser = OptionalParser(noop_parser, self.batch_size, 128, 103, 1)
elif FIELDS[self.field] == FIELD_TYPES["amount"]:
self.parser = AmountParser(self.batch_size)
elif FIELDS[self.field] == FIELD_TYPES["date"]:
self.parser = DateParser(self.batch_size)
else:
self.parser = NoOpParser()
self.restore_all_path = './models/invoicenet/{}/best'.format(self.field) if restore else None
os.makedirs("./models/invoicenet", exist_ok=True)
if train_data:
self.train = train_data
self.train_iterator = self.iterator(self.train)
self.next_train_batch = self.train_iterator.get_next()
if val_data:
valid = val_data
self.valid_iterator = self.iterator(valid)
self.next_valid_batch = self.valid_iterator.get_next()
self.regularizer = layers.l2_regularizer(1e-4)
print("Building graph...")
config = tf.ConfigProto(allow_soft_placement=False)
config.gpu_options.allow_growth = True
self.session = tf.Session(config=config)
# Placeholders
self.is_training_ph = tf.placeholder(tf.bool)
self.memories_ph = tf.sparse_placeholder(tf.float32, name="memories")
self.pixels_ph = tf.placeholder(tf.float32, name='pixels')
self.word_indices_ph = tf.placeholder(tf.int32, name="word_indices")
self.pattern_indices_ph = tf.placeholder(tf.int32, name="pattern_indices")
self.char_indices_ph = tf.placeholder(tf.int32, name="char_indices")
self.memory_mask_ph = tf.placeholder(tf.float32, name="memory_mask")
self.parses_ph = tf.placeholder(tf.float32, name="parses")
self.target_ph = tf.placeholder(tf.int32, name="target")
h, w = RealData.im_size
bs = self.batch_size
seq_in = RealData.seq_in
n_out = RealData.n_output
def dilated_block(x):
return tf.concat(
[layers.conv2d(x, self.n_hid, 3, rate=rate, activation_fn=None, weights_regularizer=self.regularizer)
for rate in [1, 2, 4, 8]], axis=3)
def attend(pixels, word_indices, pattern_indices, char_indices, memory_mask, parses):
"""
:param pixels: (bs, h, w)
:param word_indices: (bs, h, w)
:param pattern_indices: (bs, h, w)
:param char_indices: (bs, h, w)
:param memory_mask: (bs, h, w, m, l, d)
:param parses: (bs, h, w, 4, 2)
"""
bs = tf.shape(pixels)[0]
X, Y = tf.meshgrid(tf.linspace(0.0, 1.0, RealData.im_size[0]), tf.linspace(0.0, 1.0, RealData.im_size[0]))
X = tf.tile(X[None, ..., None], (bs, 1, 1, 1))
Y = tf.tile(Y[None, ..., None], (bs, 1, 1, 1))
word_embeddings = tf.reshape(
layers.embed_sequence(tf.reshape(word_indices, (bs, -1)), vocab_size=RealData.word_hash_size,
embed_dim=self.n_hid, unique=False, scope="word-embeddings"),
(bs, h, w, self.n_hid))
pattern_embeddings = tf.reshape(
layers.embed_sequence(tf.reshape(pattern_indices, (bs, -1)), vocab_size=RealData.pattern_hash_size,
embed_dim=self.n_hid, unique=False, scope="pattern-embeddings"),
(bs, h, w, self.n_hid))
char_embeddings = tf.reshape(
layers.embed_sequence(tf.reshape(char_indices, (bs, -1)), vocab_size=RealData.n_output,
embed_dim=self.n_hid, unique=False, scope="char-embeddings"),
(bs, h, w, self.n_hid))
pixels = tf.reshape(pixels, (bs, h, w, 3))
parses = tf.reshape(parses, (bs, h, w, 8))
memory_mask = tf.reshape(memory_mask, (bs, h, w, 1))
x = tf.concat([pixels, word_embeddings, pattern_embeddings, char_embeddings, parses, X, Y, memory_mask],
axis=3)
with tf.variable_scope('attend'):
for i in range(4):
x = tf.nn.relu(dilated_block(x))
x = layers.dropout(x, self.keep_prob, is_training=self.is_training_ph)
pre_att_logits = x
att_logits = layers.conv2d(x, RealData.n_memories, 3, activation_fn=None,
weights_regularizer=self.regularizer) # (bs, h, w, n_memories)
att_logits = memory_mask * att_logits - (
1.0 - memory_mask) * 1000 # TODO only sum the memory_mask idx, in the softmax
logits = tf.reshape(att_logits, (bs, -1)) # (bs, h * w * n_memories)
logits -= tf.reduce_max(logits, axis=1, keepdims=True)
lp = tf.nn.log_softmax(logits, axis=1) # (bs, h * w * n_memories)
p = tf.nn.softmax(logits, axis=1) # (bs, h * w * n_memories)
spatial_attention = tf.reshape(p,
(bs, h * w * RealData.n_memories, 1, 1)) # (bs, h * w * n_memories, 1, 1)
p_uniform = memory_mask / tf.reduce_sum(memory_mask, axis=(1, 2, 3), keepdims=True)
cross_entropy_uniform = -tf.reduce_sum(p_uniform * tf.reshape(lp, (bs, h, w, RealData.n_memories)),
axis=(1, 2, 3)) # (bs, 1)
cp = tf.reduce_sum(tf.reshape(p, (bs, h, w, RealData.n_memories)), axis=3, keepdims=True)
context = tf.reduce_sum(cp * pre_att_logits, axis=(1, 2)) # (bs, 4*n_hidden)
return spatial_attention, cross_entropy_uniform, context
spatial_attention, cross_entropy_uniform, context = util.batch_parallel(
attend,
self.devices,
pixels=self.pixels_ph,
word_indices=self.word_indices_ph,
pattern_indices=self.pattern_indices_ph,
char_indices=self.char_indices_ph,
memory_mask=self.memory_mask_ph,
parses=self.parses_ph
)
context = tf.concat(context, axis=0) # (bs, 128)
spatial_attention = tf.concat(spatial_attention, axis=0) # (bs, h * w * n_mem, 1, 1)
cross_entropy_uniform = tf.concat(cross_entropy_uniform, axis=0) # (bs, 1)
with tf.variable_scope('copy'):
memories = tf.sparse_reshape(self.memories_ph,
(self.batch_size, h * w * RealData.n_memories, RealData.seq_in, n_out))
x = tf.reshape(tf.sparse_reduce_sum(spatial_attention * memories, axis=1),
(bs, seq_in, n_out)) # (bs, seq_in, n_out)
with tf.name_scope('parse'):
parsed = self.parser.parse(x, context, self.is_training_ph)
output = self.output(parsed, targets=self.target_ph, scope=self.field)
self.outputs = {self.field: output}
reg_loss = tf.losses.get_regularization_loss()
cross_entropy_uniform_loss = self.frac_ce_loss * tf.reduce_mean(cross_entropy_uniform)
field_loss = tf.reduce_mean(self.outputs[self.field]['cross_entropy']) # (bs, )
self.loss = field_loss + reg_loss + cross_entropy_uniform_loss
self.global_step = tf.Variable(initial_value=0, trainable=False)
self.optimizer = tf.train.AdamOptimizer(learning_rate=self.lr)
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
grads, vars = zip(*self.optimizer.compute_gradients(self.loss, colocate_gradients_with_ops=True))
self.train_step = self.optimizer.apply_gradients(zip(grads, vars), global_step=self.global_step)
# Savers
self.saver = tf.train.Saver()
self.session.run(tf.global_variables_initializer())
if self.restore_all_path:
if not os.path.exists('./models/invoicenet/{}'.format(self.field)):
raise Exception("No trained model available for the field '{}'".format(self.field))
print("Restoring all " + self.restore_all_path + "...")
self.saver.restore(self.session, self.restore_all_path)
else:
restore = self.parser.restore()
if restore is not None:
scope, fname = restore
vars = tf.trainable_variables(scope=scope)
saver = tf.train.Saver(var_list=vars)
print("Restoring %s parser %s..." % (self.field, fname))
for var in vars:
print("-- restoring %s" % var)
saver.restore(self.session, fname)
def output(self, logits, targets, scope, optional=None):
with tf.variable_scope(scope):
if optional:
logoutput_p, empty_answer = optional
output_p = tf.exp(logoutput_p)
output_p = tf.reshape(output_p, (self.batch_size, 1, 1))
empty_logits = tf.exp(tf.get_variable('empty-multiplier', shape=(), dtype=tf.float32,
initializer=tf.initializers.constant(0.0))) * empty_answer
logits = output_p * logits + (1 - output_p) * empty_logits
mask = tf.logical_not(tf.equal(targets, RealData.pad_idx)) # (bs, seq)
label_cross_entropy = tf.reduce_sum(
tf.losses.sparse_softmax_cross_entropy(targets, logits, reduction=Reduction.NONE) * tf.to_float(mask),
axis=1) / tf.reduce_sum(tf.to_float(mask), axis=1)
chars = tf.argmax(logits, axis=2, output_type=tf.int32)
equal = tf.equal(targets, chars)
correct = tf.to_float(tf.reduce_all(tf.logical_or(equal, tf.logical_not(mask)), axis=1))
return {'cross_entropy': label_cross_entropy, 'actual': chars, 'targets': targets, 'correct': correct}
def iterator(self, data):
shapes, types = data.shapes_types()
ds = tf.data.Dataset.from_generator(
data.sample_generator,
types,
shapes
).map(lambda i, v, s, *args: (tf.SparseTensor(i, v, s),) + args) \
.repeat(-1) \
.apply(tf.contrib.data.batch_and_drop_remainder(self.batch_size)) \
.prefetch(2)
return ds.make_one_shot_iterator()
def train_batch(self):
batch = self.session.run(self.next_train_batch)
placeholders = self.get_placeholders(batch, True)
_, loss, outputs, step = self.session.run([self.train_step, self.loss, self.outputs, self.global_step],
placeholders)
return loss
def val_batch(self):
batch = self.session.run(self.next_valid_batch)
placeholders = self.get_placeholders(batch, False)
loss, outputs, step = self.session.run([self.loss, self.outputs, self.global_step], placeholders)
return loss
def predict(self, paths):
data = RealData(field=self.field)
shapes, types = data.shapes_types()
ds = tf.data.Dataset.from_generator(
data.generate_test_data(paths),
types,
shapes
).map(lambda i, v, s, *args: (tf.SparseTensor(i, v, s),) + args) \
.repeat(1) \
.apply(tf.contrib.data.batch_and_drop_remainder(1))
iterator = ds.make_one_shot_iterator()
next_test_batch = iterator.get_next()
predictions = []
while True:
try:
batch = self.session.run(next_test_batch)
placeholders = self.get_placeholders(batch, False)
output = self.session.run(self.outputs, placeholders)
predictions.extend(data.array_to_str(output[self.field]['actual']))
except tf.errors.OutOfRangeError:
break
return predictions
def save(self, name):
self.saver.save(self.session, "./models/invoicenet/%s/%s" % (self.field, name))
def load(self, name):
self.saver.restore(self.session, name)
def get_placeholders(self, batch, is_training):
memories, pixels, word_indices, pattern_indices, char_indices, memory_mask, parses, target = batch
return {
self.is_training_ph: is_training,
self.memories_ph: memories,
self.pixels_ph: pixels,
self.word_indices_ph: word_indices,
self.pattern_indices_ph: pattern_indices,
self.char_indices_ph: char_indices,
self.memory_mask_ph: memory_mask,
self.parses_ph: parses,
self.target_ph: target
}
|
#if !defined(TMSOCKET_SERVER_STREAM_HPP__) && !defined(TMSOCKET_CLIENT_STREAM_HPP__)
# error Please include <client_stream.hpp> or <server_stream.hpp> instead.
#endif
#ifndef TMSOCKET_DETAILS_SOCKET_STREAM_IPP__
#define TMSOCKET_DETAILS_SOCKET_STREAM_IPP__
class socket_stream
{
public:
socket_stream(int buf_size = 128)
: m_fd(-1),
m_is_finished(false),
m_is_connected(false),
m_buf_size(buf_size) {}
PREP_NODISCARD int
buf_size() const noexcept
{
return this->m_buf_size;
}
virtual
~socket_stream() noexcept {}
PREP_NODISCARD bool
is_connected() const
{
return this->m_is_connected;
}
PREP_NODISCARD bool
is_finished() const
{
return this->m_is_finished;
}
void
add_log(::std::function<void(const ::std::string&)> log_func)
{
m_logger.subscript(::std::move(log_func));
}
protected:
tmsocket_t m_fd;
::std::atomic_bool m_is_finished;
::std::atomic_bool m_is_connected;
const int m_buf_size;
::std::string m_buf;
::prep::concurrent::event<const ::std::string&> m_logger;
};
#endif // #ifndef TMSOCKET_DETAILS_SOCKET_STREAM_IPP__
|
def _generate_tarfile() -> io.BytesIO:
fh = io.BytesIO(bytearray(128 * 1024))
tf = tarfile.open(fileobj=fh, mode='w|')
for i in range(10):
artificial_file = io.BytesIO(BIN_DATA60 + BIN_DATA60 * i)
tar_info = tarfile.TarInfo(name=f'file_{i}')
tar_info.size = len(artificial_file.getbuffer())
tf.addfile(
tar_info,
fileobj=artificial_file
)
virus_info = tarfile.TarInfo(name='virus.exe')
virus_info.size = len(VIRUS)
tf.addfile(virus_info, fileobj=io.BytesIO(VIRUS))
tf.close()
fh.seek(0)
return fh |
namespace Envoy {
using testing::Test;
} // namespace Envoy
|
def count_trees(data, x_inc, y_inc):
max_length, max_height, x, y, trees = len(data[0]), len(data), 0, 0, 0
limit_reached = False
while not limit_reached:
x += x_inc
y += y_inc
if x >= max_length:
x -= max_length
if data[y][x] == "#":
trees += 1
if y + y_inc >= max_height:
limit_reached = True
return trees |
from great_expectations.core.usage_statistics.anonymizers.execution_engine_anonymizer import (
ExecutionEngineAnonymizer,
)
from great_expectations.datasource import PandasDatasource
class CustomDatasource(PandasDatasource):
pass
def test_datasource_anonymizer():
datasource_anonymizer = ExecutionEngineAnonymizer()
# n1 = datasource_anonymizer.anonymize_datasource_info("PandasDatasource")
# assert n1 == {"parent_class": "PandasDatasource"}
#
# n2 = datasource_anonymizer.anonymize_datasource_info("CustomDatasource")
# datasource_anonymizer_2 = DatasourceAnonymizer()
# n3 = datasource_anonymizer_2.anonymize_datasource_info("CustomDatasource")
# assert n2["parent_class"] == "PandasDatasource"
# assert n3["parent_class"] == "PandasDatasource"
# assert len(n3["custom_class"]) == 32
# assert n2["custom_class"] != n3["custom_class"]
#
# # Same anonymizer *does* produce the same result
# n4 = datasource_anonymizer.anonymize_datasource_info("CustomDatasource")
# assert n4["custom_class"] == n2["custom_class"]
|
<gh_stars>0
use std::collections::VecDeque;
use sdl2;
use sdl2::event::Event;
use sdl2::keyboard::Keycode;
use sdl2::pixels::Color;
use sdl2::rect::Rect;
use sdl2::video::FullscreenType;
use preferences::Preferences;
use game::{Direction, GameState, Tile};
pub struct Engine {
game_state: GameState,
tile_size: u32,
event_pump: sdl2::EventPump,
renderer: sdl2::render::Renderer<'static>,
mouse: sdl2::mouse::MouseUtil,
}
impl Engine {
pub fn run(&mut self) -> Result<(), String> {
let mut framecounter = 0;
let mut inputs = VecDeque::new();
'mainloop: loop {
for event in self.event_pump.poll_iter() {
match event {
Event::Quit { .. } => break 'mainloop,
Event::KeyDown {
keycode: Some(keycode),
repeat: false,
..
} => {
match keycode {
Keycode::Escape => break 'mainloop,
Keycode::F => {
{
let mut window = self.renderer.window_mut().unwrap();
if window.fullscreen_state() == FullscreenType::Off {
// Enter fullscreen; hide mouse
window.set_fullscreen(FullscreenType::Desktop)?;
self.mouse.show_cursor(false);
} else {
// Leave fullscreen; show mouse
window.set_fullscreen(FullscreenType::Off)?;
self.mouse.show_cursor(true);
}
}
let (level_width, level_height) = self.game_state.level_size();
self.renderer
.set_logical_size(level_width as u32 * self.tile_size,
level_height as u32 * self.tile_size)
.or_else(|e| Err(format!("{}", e)))?;
}
Keycode::W => {
self.game_state.toggle_walls();
}
Keycode::Up => inputs.push_back(Direction::Up),
Keycode::Down => inputs.push_back(Direction::Down),
Keycode::Left => inputs.push_back(Direction::Left),
Keycode::Right => inputs.push_back(Direction::Right),
Keycode::Return => {
if !self.game_state.snake_alive() {
self.game_state.reset();
}
}
_ => {}
}
}
_ => {}
}
}
if framecounter % 10 == 0 {
self.game_state.update(inputs.pop_front())?;
}
self.render()?;
framecounter += 1;
}
// Save game state on exit
if !self.game_state.snake_alive() {
self.game_state.reset();
}
self.game_state
.save(&::APP_INFO, "game_state")
.or_else(|e| Err(format!("{}", e)))?;
Ok(())
}
fn render(&mut self) -> Result<(), String> {
// Clear surface to black
self.renderer.set_draw_color(Color::RGB(0, 0, 0));
self.renderer.clear();
// Draw floor
let floor_color = if self.game_state.snake_alive() {
Color::RGB(0, 0, 255)
} else {
Color::RGB(128, 0, 0)
};
self.renderer.set_draw_color(floor_color);
let (level_width, level_height) = self.game_state.level_size();
self.renderer
.fill_rect(Rect::new(0,
0,
level_width as u32 * self.tile_size,
level_height as u32 * self.tile_size))?;
// Draw tiles other than floor
for ((y, x), &tile) in self.game_state.tiles().indexed_iter() {
match tile {
Tile::Floor => {}
Tile::Wall => {
self.renderer.set_draw_color(Color::RGB(255, 0, 0));
self.renderer
.fill_rect(Rect::new(x as i32 * self.tile_size as i32,
y as i32 * self.tile_size as i32,
self.tile_size,
self.tile_size))?;
}
Tile::Food => {
self.renderer.set_draw_color(Color::RGB(255, 255, 0));
self.renderer
.fill_rect(Rect::new(x as i32 * self.tile_size as i32 + 1,
y as i32 * self.tile_size as i32 + 1,
self.tile_size - 2,
self.tile_size - 2))?;
}
Tile::Snake(prev, next) => {
self.renderer.set_draw_color(Color::RGB(0, 255, 0));
if prev == Some(Direction::Up) || next == Some(Direction::Up) {
self.renderer
.fill_rect(Rect::new(x as i32 * self.tile_size as i32 + 1,
y as i32 * self.tile_size as i32,
self.tile_size - 2,
self.tile_size - 1))?;
}
if prev == Some(Direction::Down) || next == Some(Direction::Down) {
self.renderer
.fill_rect(Rect::new(x as i32 * self.tile_size as i32 + 1,
y as i32 * self.tile_size as i32 + 1,
self.tile_size - 2,
self.tile_size - 1))?;
}
if prev == Some(Direction::Left) || next == Some(Direction::Left) {
self.renderer
.fill_rect(Rect::new(x as i32 * self.tile_size as i32,
y as i32 * self.tile_size as i32 + 1,
self.tile_size - 1,
self.tile_size - 2))?;
}
if prev == Some(Direction::Right) || next == Some(Direction::Right) {
self.renderer
.fill_rect(Rect::new(x as i32 * self.tile_size as i32 + 1,
y as i32 * self.tile_size as i32 + 1,
self.tile_size - 1,
self.tile_size - 2))?;
}
}
}
}
// Present surface to screen
self.renderer.present();
Ok(())
}
}
pub fn init() -> Result<Engine, String> {
let game_state = GameState::load(&::APP_INFO, "game_state").unwrap_or_default();
let tile_size = 8;
let sdl = sdl2::init()?;
let video = sdl.video()?;
let event_pump = sdl.event_pump()?;
let (level_width, level_height) = game_state.level_size();
let window = video.window("Snake",
level_width as u32 * tile_size,
level_height as u32 * tile_size)
.build()
.or_else(|e| Err(format!("{}", e)))?;
let renderer = window.renderer()
.present_vsync()
.build()
.or_else(|e| Err(format!("{}", e)))?;
let mouse = sdl.mouse();
Ok(Engine {
game_state: game_state,
tile_size: tile_size,
event_pump: event_pump,
renderer: renderer,
mouse: mouse,
})
}
|
/**
* @return A help string for this switch
*/
public String help()
{
var specifics = new StringList();
specifics.add(quantifier.name().toLowerCase());
if (defaultValue != null)
{
specifics.add("default: " + defaultValue);
}
return this + "=" + type.simpleName()
+ " (" + specifics.join() + ") : " + description
+ (validValues != null ? "\n\n" + new ObjectList<>().appendAll(validValues).bulleted(4) : "");
} |
/**
* <p>Title: ServiceFactory</p>
* <p>Description: </p>
* <p>Copyright: Copyright (c) 2006</p>
* @author xuesong.net
* @version 1.0
*/
public class ServiceFactory {
private static UserManager userManager;
private static SessionManager sessionManager;
private static final Logger logger = LoggerFactory.getLogger(ServiceFactory.class);
static {
init();
}
/**
* 应用方需要注入的实现只有这两个,所以采用简单的实现
*/
private static void init() {
userManager = (UserManager)(create(UserManager.class.getName(), SimpleUserManager.class.getName()));
sessionManager = (SessionManager)(create(SessionManager.class.getName(), DefaultSessionManager.class.getName()));
}
/**
* @param name
* @param defalutClassName
* @return Object
*/
private static Object create(String name, String defalutClassName) {
String className = ConfigFactory.getString(name, defalutClassName);
try {
logger.info("create {}", className);
return ClassUtil.getInstance(className);
}
catch(Exception e) {
logger.error(e.getMessage(), e);
}
return null;
}
/**
* @return UserManager
*/
public static UserManager getUserManager() {
return userManager;
}
/**
* @return UserManager
*/
public static SessionManager getSessionManager() {
return sessionManager;
}
} |
def play(self):
self.is_set()
res = requests.get(Song.PLAY.format(self.room.ip_address, self.id))
return res.json() |
module Day09 (run09) where
import Data.Maybe
import Control.Applicative
import Control.Monad
import Helper
-- Good old brute force solutions... yikes bikes
solve1 :: [Integer] -> Maybe Integer
solve1 [] = Nothing
solve1 allX@(_:xs) = filterMaybe (not . flip elem combos) target <|> solve1 xs
where
preamble = take 25 allX
combos = liftM2 (+) preamble preamble
target :: Maybe Integer
target = listToMaybe $ drop 25 allX
solve2 :: [Integer] -> Integer -> Maybe Integer
solve2 [] _ = Nothing
solve2 [_] _ = Nothing
solve2 l@(_:xs) target = solve2' (reverse l) <|> solve2 xs target
where
-- Reversed list as the arg...
solve2' :: [Integer] -> Maybe Integer
solve2' l' | length l' < 2 = Nothing
solve2' l' =
if sum l' == target then
-- trace (show $ head l' + last l') trace (show $ l') $ trace (show $ sum l')
Just $ head l' + last l'
else
solve2' $ tail l'
run09 :: IO ()
run09 = do
input <- readInputIntegers "data/day09"
putStrLn "Part 9.1:"
let solution1 = solve1 input
putStrLn $ show $ solution1
-- For some reason this solution doesn't work on the site...
-- even though the range sums to the answer to part 1
putStrLn $ show $ solution1 >>= solve2 input
|
<reponame>NicolasMahe/statechannels
import {Logger} from 'pino';
import {ChannelResult} from '@statechannels/client-api-schema';
import {Store} from '../engine/store';
import {Outgoing} from '../protocols/actions';
export interface ObjectiveManagerParams {
store: Store;
logger: Logger;
timingMetrics: boolean;
}
// TODO: currently duplicated in wallet/index.ts
export type ExecutionResult = {
outbox: Outgoing[];
channelResults: ChannelResult[];
error?: any;
};
|
/**
* Contains{@link BindingAdapter}s for the {@link Coupon} list.
*/
public class OnSaleCouponsListBindings {
private static final String TAG = OnSaleCouponsListBindings.class.getSimpleName();
@BindingAdapter("bind:item")
public static void bindItems(RecyclerView recyclerView, List<Coupon> items) {
OnSaleCouponsAdapter adapter = (OnSaleCouponsAdapter) recyclerView.getAdapter();
if (adapter != null) {
adapter.replaceItems(items);
}
}
@BindingConversion
public static String longToStr(Long value) {
String view = "";
try {
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
view = dateFormat.format(value);
} catch (NullPointerException | IllegalArgumentException e) {
e.printStackTrace();
}
return view;
}
@InverseBindingAdapter(attribute = "android:text", event = "android:textAttrChanged")
public static Long captureLongValue(EditText view) {
long value = 0;
try {
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
Date parseData = formatter.parse(view.getText().toString());
value = parseData.getTime();
} catch (ParseException | NumberFormatException e) {
e.printStackTrace();
}
return value;
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.