content
stringlengths 10
4.9M
|
---|
/**
* Add a custom {@link io.burt.jmespath.function.Function} to JMESPath
* {@link Base64Function} and {@link Base64GZipFunction} are already built-in.
*
* @param function the function to add
* @param <T> Must extends {@link BaseFunction}
*/
public <T extends BaseFunction> void addFunction(T function) {
FunctionRegistry functionRegistryWithExtendedFunctions = configuration.functionRegistry().extend(function);
RuntimeConfiguration updatedConfig = new RuntimeConfiguration.Builder()
.withFunctionRegistry(functionRegistryWithExtendedFunctions)
.build();
jmesPath = new JacksonRuntime(updatedConfig, getObjectMapper());
} |
<reponame>jodhi/mantine<filename>docs/src/components/MdxPage/MdxPageTabs/StylesApi/StylesApiHeader/StylesApiHeader.tsx
import React from 'react';
import { Title, Text } from '@mantine/core';
import { Prism } from '@mantine/prism';
import { generateRootCode } from '../generate-styles-code';
import GatsbyLink from '../../../MdxProvider/GatsbyLink/GatsbyLink';
interface StylesApiHeaderProps {
component: string;
singleNode: boolean;
}
export function StylesApiHeader({ component, singleNode }: StylesApiHeaderProps) {
return (
<>
<Title order={2} style={{ fontWeight: 600, marginBottom: 15 }}>
{singleNode ? `${component} styles API` : 'Mantine styles API'}
</Title>
{singleNode ? (
<Text style={{ marginBottom: 15 }}>
{component} renders single node, use className and style props to override styles:
</Text>
) : (
<Text style={{ maxWidth: 600, marginBottom: 30 }}>
Mantine components support styling of individual elements by adding your class or inline
styles to any element inside component. For more information see{' '}
<GatsbyLink to="/theming/styles-api/">styles API guide</GatsbyLink>.
</Text>
)}
{!singleNode && (
<Text weight={500} style={{ marginBottom: 15 }}>
Add styles to root element with className, style or sx:
</Text>
)}
<Prism language="tsx">{generateRootCode(component)}</Prism>
</>
);
}
|
// New creates a new API server
func New(ctx interface{}, o *options.Base) (*API, error) {
var err error
a := API{
options: o,
logger: log.New().WithField("module", "core"),
}
base := web.New(ctx)
a.Router = router.New(base, ctx, "")
if o.Session.Secret == "" {
a.options.Session.Secret, err = options.GenerateSecret(256)
if err != nil {
return nil, err
}
}
sessionStore := sessions.NewCookieStore([]byte(o.Session.Secret))
if a.options.Session.DisableSecure {
log.Warn("SECURE COOKIE FLAG IS DISABLED. DEVELOPMENT USE ONLY.")
sessionStore.Options.Secure = false
} else {
sessionStore.Options.Secure = true
}
sessionStore.Options.HttpOnly = true
if a.options.ExternalAddress != "" {
sessionStore.Options.Domain = o.ExternalAddress
}
a.sessionStore = sessionStore
return &a, nil
} |
Urgent Change Needed to Radiation Protection Policy
AbstractAlthough almost 120 y of medical experience and data exist on human exposure to ionizing radiation, advisory bodies and regulators claim there are still significant uncertainties about radiation health risks that require extreme precautions be taken. Decades of evidence led to recommendations in the 1920s for protecting radiologists by limiting their daily exposure. These were shown in later studies to decrease both their overall mortality and cancer mortality below those of unexposed groups. In the 1950s, without scientific evidence, the National Academy of Sciences Biological Effects of Atomic Radiation (BEAR) Committee and the NCRP recommended that the linear no-threshold (LNT) model be used to assess the risk of radiation-induced mutations in germ cells and the risk of cancer in somatic cells. This policy change was accepted by the regulators of every country without a thorough review of its basis. Because use of the LNT model has created extreme public fear of radiation, which impairs vital medical applications of low-dose radiation in diagnostics and therapy and blocks nuclear energy projects, it is time to change radiation protection policy back into line with the data. |
// SendTransaction is a mock for SendTransaction function.
func (c *MockClient) SendTransaction(ctx context.Context,
from, to common.Address, amount *big.Int) (result *string, err error) {
gasLimit := uint64(4700000)
acc := c.Acc[strings.ToLower(from.String())]
nonce, err := c.Backend.NonceAt(context.Background(), acc.From, nil)
if err != nil {
return nil, err
}
gasPrice, err := c.Backend.SuggestGasPrice(context.Background())
if err != nil {
return nil, err
}
rawTx := types.NewTransaction(nonce, to,
amount, gasLimit, gasPrice, nil)
signTx, err := acc.Signer(types.HomesteadSigner{}, acc.From, rawTx)
if err := c.Backend.SendTransaction(ctx, signTx); err != nil {
return nil, err
}
c.Backend.Commit()
hash := strings.ToLower(signTx.Hash().String())
return &hash, nil
} |
// +build linux
package main
import (
"encoding/json"
"fmt"
"os"
"os/exec"
"strings"
"github.com/urfave/cli"
)
var psCommand = cli.Command{
Name: "ps",
Usage: "ps displays the processes running inside a container",
ArgsUsage: `<container-id> [ps options]`,
Flags: []cli.Flag{
cli.StringFlag{
Name: "format, f",
Value: "",
Usage: `select one of: ` + formatOptions,
},
},
Action: func(context *cli.Context) error {
container, err := getContainer(context)
if err != nil {
return err
}
pids, err := container.Processes()
if err != nil {
return err
}
if context.String("format") == "json" {
if err := json.NewEncoder(os.Stdout).Encode(pids); err != nil {
return err
}
return nil
}
pidlist := []string{}
for _, pid := range pids {
pidlist = append(pidlist, fmt.Sprintf("%d", pid))
}
// [1:] is to remove command name, ex:
// context.Args(): [containet_id ps_arg1 ps_arg2 ...]
// psArgs: [ps_arg1 ps_arg2 ...]
//
psArgs := context.Args()[1:]
if len(psArgs) == 0 {
psArgs = []string{"-f"}
}
psArgs = append(psArgs, "-p", strings.Join(pidlist, ","))
output, err := exec.Command("ps", psArgs...).Output()
if err != nil {
return err
}
fmt.Printf(string(output))
return nil
},
SkipArgReorder: true,
}
|
#include <iostream>
#include <string>
#include <vector>
using namespace std;
vector<int> h, m;
int main() {
string s, sub;
int i, j, n, H, M;
long long ans=0;
cin>>s;
n = s.size();
for(i=0;i<=n-5;i++) {
sub = s.substr(i, 5);
if(sub == "heavy") h.push_back(i);
else if(sub == "metal") m.push_back(i);
}
M = m.size();
H = h.size();
j=0;
for(i=0;i<H;i++) {
while(j<M && m[j]<h[i]+5) j++;
ans += (M-j);
}
cout<<ans<<endl;
return 0;
}
|
Behind the hot shooting of Jordan Hamilton and Quincy Miller, the Denver Nuggets sure didn’t look like the No. 22 seed as tournament play began at the NBA Summer League in Las Vegas.
Hamilton scored 18 points in the first quarter, Miller added 12 in the third quarter and the Nuggets advanced to the Round of 16 with a 87-82 win over the 11th-seeded New Orleans Pelicans on Wednesday.
The Nuggets, who lost all three games in preliminary play, will face the No. 6 seed Toronto Raptors on Thursday at 4 p.m. MT.
Behind Hamilton quick start, Denver led by as many as 16 points in the first quarter and had a 15-point advantage in the third before the Pelicans made a charge.
New Orleans went on a 17-3 run to close within a point early in the fourth quarter, but the Nuggets responded with a 12-2 surge to push the lead back to double figures.
Hamilton, who missed Tuesday’s game to attend a cousin’s funeral in Los Angeles, finished with 23 points, four rebounds and two steals. He scored 15 total points in his first two summer-league games.
“It’s been tough,” Hamilton told NBA TV after the game. “The few games before, it (his cousin’s death) was on my mind. I just had to play through it. Once I got a chance to spend time with my family and talk to them, I felt a lot better coming back to summer league. That's why I had a pretty good performance.”
Miller, who has been working on his outside shooting all summer, hit all five of his 3-point attempts and finished with 17 points. Erick Green, a second-round pick in last month’s draft, added 14 for Denver.
The Nuggets finished 12-for-22 from 3-point range and 19-of-21 from the free-throw line. |
#!/usr/bin/env python3
"""
Created on Tue Apr 24 15:48:52 2020
@author: <NAME>
"""
import sys
from os.path import splitext
import numpy as np
# import spatialmath as sp
from spatialmath import SE3
from spatialmath.base.argcheck import getvector, verifymatrix
from roboticstoolbox.robot.ELink import ELink, ETS
# from roboticstoolbox.backends.PyPlot.functions import \
# _plot, _teach, _fellipse, _vellipse, _plot_ellipse, \
# _plot2, _teach2
from roboticstoolbox.tools import xacro
from roboticstoolbox.tools import URDF
from roboticstoolbox.robot.Robot import Robot
from roboticstoolbox.robot.Gripper import Gripper
from pathlib import PurePath, PurePosixPath
from ansitable import ANSITable, Column
from spatialmath import SpatialAcceleration, SpatialVelocity, \
SpatialInertia, SpatialForce
class ERobot(Robot):
"""
The ERobot. A superclass which represents the
kinematics of a serial-link manipulator
:param et_list: List of elementary transforms which represent the robot
kinematics
:type et_list: ET list
:param name: Name of the robot
:type name: str, optional
:param manufacturer: Manufacturer of the robot
:type manufacturer: str, optional
:param base: Location of the base is the world frame
:type base: SE3, optional
:param tool: Offset of the flange of the robot to the end-effector
:type tool: SE3, optional
:param gravity: The gravity vector
:type n: ndarray(3)
:references:
- Kinematic Derivatives using the Elementary Transform Sequence,
<NAME> and <NAME>
"""
# TODO do we need tool and base as well?
def __init__(
self,
elinks,
base_link=None,
gripper_links=None,
**kwargs
):
self._ets = []
self._linkdict = {}
self._n = 0
self._ee_links = []
self._base_link = None
if isinstance(elinks, ETS):
# were passed an ETS string
ets = elinks
elinks = []
# chop it up into segments, a link frame after every joint
start = 0
for j, k in enumerate(ets.joints()):
ets_j = ets[start:k+1]
start = k + 1
if j == 0:
parent = None
else:
parent = elinks[-1]
elink = ELink(ets_j, parent=parent, name=f"link{j:d}")
elinks.append(elink)
n = len(ets.joints())
tool = ets[start:]
if len(tool) > 0:
elinks.append(ELink(tool, parent=elinks[-1], name="ee"))
elif isinstance(elinks, list):
# were passed a list of ELinks
# check all the incoming ELink objects
n = 0
for link in elinks:
if isinstance(link, ELink):
self._linkdict[link.name] = link
else:
raise TypeError("Input can be only ELink")
if link.isjoint:
n += 1
else:
raise TypeError('elinks must be a list of ELinks or an ETS')
self._n = n
# scan for base
for link in elinks:
# is this a base link?
if link._parent is None:
if self._base_link is not None:
raise ValueError('Multiple base links')
self._base_link = link
else:
# no, update children of this link's parent
link._parent._child.append(link)
# Set up the gripper, make a list containing the root of all
# grippers
if gripper_links is not None:
if isinstance(gripper_links, ELink):
gripper_links = [gripper_links]
else:
gripper_links = []
# An empty list to hold all grippers
self.grippers = []
# Make a gripper object for each gripper
for link in gripper_links:
g_links = self.dfs_links(link)
# Remove gripper links from the robot
for g_link in g_links:
elinks.remove(g_link)
# Save the gripper object
self.grippers.append(Gripper(g_links))
# Subtract the n of the grippers from the n of the robot
for gripper in self.grippers:
self._n -= gripper.n
# Set the ee links
self.ee_links = []
if len(gripper_links) == 0:
for link in elinks:
# is this a leaf node? and do we not have any grippers
if len(link.child) == 0:
# no children, must be an end-effector
self.ee_links.append(link)
else:
for link in gripper_links:
# use the passed in value
self.ee_links.append(link.parent)
# assign the joint indices
if all([link.jindex is None for link in elinks]):
jindex = [0] # "mutable integer" hack
def visit_link(link, jindex):
# if it's a joint, assign it a jindex and increment it
if link.isjoint and link in elinks:
link.jindex = jindex[0]
jindex[0] += 1
# visit all links in DFS order
self.dfs_links(
self.base_link, lambda link: visit_link(link, jindex))
elif all([link.jindex is not None for link in elinks]):
# jindex set on all, check they are unique and sequential
jset = set(range(self._n))
for link in elinks:
if link.jindex not in jset:
raise ValueError(
'joint index {link.jindex} was '
'repeated or out of range')
jset -= set([link.jindex])
if len(jset) > 0: # pragma nocover # is impossible
raise ValueError('joints {jset} were not assigned')
else:
# must be a mixture of ELinks with/without jindex
raise ValueError(
'all links must have a jindex, or none have a jindex')
# Current joint angles of the robot
# TODO should go to Robot class?
self.q = np.zeros(self.n)
self.qd = np.zeros(self.n)
self.qdd = np.zeros(self.n)
self.control_type = 'v'
super().__init__(elinks, **kwargs)
def dfs_links(self, start, func=None):
"""
Visit all links from start in depth-first order and will apply
func to each visited link
:param start: the link to start at
:type start: ELink
:param func: An optional function to apply to each link as it is found
:type func: function
:returns: A list of links
:rtype: list of ELink
"""
visited = []
def vis_children(link):
visited.append(link)
if func is not None:
func(link)
for li in link.child:
if li not in visited:
vis_children(li)
vis_children(start)
return visited
# def dfs_path(self, l1, l2):
# path = []
# visited = [l1]
# def vis_children(link):
# visited.append(link)
# for li in link.child:
# if li not in visited:
# if li == l2 or vis_children(li):
# path.append(li)
# return True
# vis_children(l1)
# path.append(l1)
# path.reverse()
# return path
def to_dict(self):
ob = {
'links': [],
'name': self.name,
'n': self.n
}
self.fkine_all()
for link in self.links:
li = {
'axis': [],
'eta': [],
'geometry': [],
'collision': []
}
for et in link.ets():
li['axis'].append(et.axis)
li['eta'].append(et.eta)
if link.v is not None:
li['axis'].append(link.v.axis)
li['eta'].append(link.v.eta)
for gi in link.geometry:
li['geometry'].append(gi.to_dict())
for gi in link.collision:
li['collision'].append(gi.to_dict())
ob['links'].append(li)
# Do the grippers now
for gripper in self.grippers:
for link in gripper.links:
li = {
'axis': [],
'eta': [],
'geometry': [],
'collision': []
}
for et in link.ets():
li['axis'].append(et.axis)
li['eta'].append(et.eta)
if link.v is not None:
li['axis'].append(link.v.axis)
li['eta'].append(link.v.eta)
for gi in link.geometry:
li['geometry'].append(gi.to_dict())
for gi in link.collision:
li['collision'].append(gi.to_dict())
ob['links'].append(li)
return ob
def fk_dict(self):
ob = {
'links': []
}
self.fkine_all()
# Do the robot
for link in self.links:
li = {
'geometry': [],
'collision': []
}
for gi in link.geometry:
li['geometry'].append(gi.fk_dict())
for gi in link.collision:
li['collision'].append(gi.fk_dict())
ob['links'].append(li)
# Do the grippers now
for gripper in self.grippers:
for link in gripper.links:
li = {
'geometry': [],
'collision': []
}
for gi in link.geometry:
li['geometry'].append(gi.fk_dict())
for gi in link.collision:
li['collision'].append(gi.fk_dict())
ob['links'].append(li)
return ob
# @classmethod
# def urdf_to_ets(cls, file_path):
# name, ext = splitext(file_path)
# if ext == '.xacro':
# urdf_string = xacro.main(file_path)
# urdf = URDF.loadstr(urdf_string, file_path)
# return ERobot(
# urdf.elinks,
# name=urdf.name
# )
def urdf_to_ets_args(self, file_path, tld=None):
"""
[summary]
:param file_path: File path relative to the xacro folder
:type file_path: str, in Posix file path fprmat
:param tld: top-level directory, defaults to None
:type tld: str, optional
:return: Links and robot name
:rtype: tuple(ELink list, str)
"""
# get the path to the class that defines the robot
classpath = sys.modules[self.__module__].__file__
# add on relative path to get to the URDF or xacro file
base_path = PurePath(classpath).parent.parent / 'URDF' / 'xacro'
file_path = base_path / PurePosixPath(file_path)
name, ext = splitext(file_path)
if ext == '.xacro':
# it's a xacro file, preprocess it
if tld is not None:
tld = base_path / PurePosixPath(tld)
urdf_string = xacro.main(file_path, tld)
urdf = URDF.loadstr(urdf_string, file_path)
else: # pragma nocover
urdf = URDF.loadstr(open(file_path).read(), file_path)
return urdf.elinks, urdf.name
# @classmethod
# def dh_to_ets(cls, robot):
# """
# Converts a robot modelled with standard or modified DH parameters to
# an ERobot representation
# :param robot: The robot model to be converted
# :type robot: SerialLink
# :return: List of returned :class:`bluepy.btle.Characteristic` objects
# :rtype: ets class
# """
# ets = []
# q_idx = []
# M = 0
# for j in range(robot.n):
# L = robot.links[j]
# # Method for modified DH parameters
# if robot.mdh:
# # Append Tx(a)
# if L.a != 0:
# ets.append(ET.Ttx(L.a))
# M += 1
# # Append Rx(alpha)
# if L.alpha != 0:
# ets.append(ET.TRx(L.alpha))
# M += 1
# if L.is_revolute:
# # Append Tz(d)
# if L.d != 0:
# ets.append(ET.Ttz(L.d))
# M += 1
# # Append Rz(q)
# ets.append(ET.TRz(joint=j+1))
# q_idx.append(M)
# M += 1
# else:
# # Append Tz(q)
# ets.append(ET.Ttz(joint=j+1))
# q_idx.append(M)
# M += 1
# # Append Rz(theta)
# if L.theta != 0:
# ets.append(ET.TRz(L.alpha))
# M += 1
# return cls(
# ets,
# q_idx,
# robot.name,
# robot.manuf,
# robot.base,
# robot.tool)
@property
def qlim(self):
v = np.zeros((2, self.n))
j = 0
for i in range(len(self.links)):
if self.links[i].isjoint:
v[:, j] = self.links[i].qlim
j += 1
return v
# @property
# def qdlim(self):
# return self.qdlim
# --------------------------------------------------------------------- #
@property
def n(self):
return self._n
# --------------------------------------------------------------------- #
@property
def elinks(self):
# return self._linkdict
return self._links
# --------------------------------------------------------------------- #
@property
def link_dict(self):
return self._linkdict
# --------------------------------------------------------------------- #
@property
def base_link(self):
return self._base_link
@base_link.setter
def base_link(self, link):
if isinstance(link, ELink):
self._base_link = link
else:
# self._base_link = self.links[link]
raise TypeError('Must be an ELink')
# self._reset_fk_path()
# --------------------------------------------------------------------- #
# TODO get configuration string
@property
def ee_links(self):
return self._ee_links
# def add_ee(self, link):
# if isinstance(link, ELink):
# self._ee_link.append(link)
# else:
# raise ValueError('must be an ELink')
# self._reset_fk_path()
@ee_links.setter
def ee_links(self, link):
if isinstance(link, ELink):
self._ee_links = [link]
elif isinstance(link, list) and \
all([isinstance(x, ELink) for x in link]):
self._ee_links = link
else:
raise TypeError('expecting an ELink or list of ELinks')
# self._reset_fk_path()
# --------------------------------------------------------------------- #
# @property
# def ets(self):
# return self._ets
# --------------------------------------------------------------------- #
# @property
# def M(self):
# return self._M
# --------------------------------------------------------------------- #
# @property
# def q_idx(self):
# return self._q_idx
# --------------------------------------------------------------------- #
def ets(self, ee=None):
if ee is None:
if len(self.ee_links) == 1:
link = self.ee_links[0]
else:
raise ValueError(
'robot has multiple end-effectors, specify one')
# elif isinstance(ee, str) and ee in self._linkdict:
# ee = self._linkdict[ee]
elif isinstance(ee, ELink) and ee in self._links:
link = ee
else:
raise ValueError('end-effector is not valid')
ets = ETS()
# build the ETS string from ee back to root
while link is not None:
ets = link.ets() * ets
link = link.parent
return ets
def config(self):
s = ''
for link in self.links:
if link.v is not None:
if link.v.isprismatic:
s += 'P'
elif link.v.isrevolute:
s += 'R'
return s
# --------------------------------------------------------------------- #
def fkine(self, q=None, from_link=None, to_link=None):
'''
Evaluates the forward kinematics of a robot based on its ETS and
joint angles q.
T = fkine(q) evaluates forward kinematics for the robot at joint
configuration q.
T = fkine() as above except uses the stored q value of the
robot object.
Trajectory operation:
Calculates fkine for each point on a trajectory of joints q where
q is (nxm) and the returning SE3 in (m)
:param q: The joint angles/configuration of the robot (Optional,
if not supplied will use the stored q values).
:type q: float ndarray(n)
:return: The transformation matrix representing the pose of the
end-effector
:rtype: SE3
:notes:
- The robot's base or tool transform, if present, are incorporated
into the result.
:references:
- Kinematic Derivatives using the Elementary Transform
Sequence, <NAME> and <NAME>
'''
if from_link is None:
from_link = self.base_link
if to_link is None:
to_link = self.ee_links[0]
trajn = 1
if q is None:
q = self.q
path, n = self.get_path(from_link, to_link)
use_jindex = True
try:
q = getvector(q, self.n, 'col')
except ValueError:
try:
q = getvector(q, n, 'col')
use_jindex = False
j = 0
except ValueError:
trajn = q.shape[1]
verifymatrix(q, (self.n, trajn))
for i in range(trajn):
tr = self.base.A
for link in path:
if link.isjoint:
if use_jindex:
T = link.A(q[link.jindex, i], fast=True)
else:
T = link.A(q[j, i], fast=True)
j += 1
else:
T = link.A(fast=True)
tr = tr @ T
if i == 0:
t = SE3(tr)
else:
t.append(SE3(tr))
return t
def fkine_all(self, q=None):
'''
Tall = fkine_all(q) evaluates fkine for each joint within a robot and
returns a trajecotry of poses.
Tall = fkine_all() as above except uses the stored q value of the
robot object.
:param q: The joint angles/configuration of the robot (Optional,
if not supplied will use the stored q values).
:type q: float ndarray(n)
:return T: Homogeneous transformation trajectory
:rtype T: SE3 list
:notes:
- The robot's base transform, if present, are incorporated
into the result.
:references:
- Kinematic Derivatives using the Elementary Transform
Sequence, <NAME> and <NAME>
'''
if q is None:
q = np.copy(self.q)
else:
q = getvector(q, self.n)
for link in self.links:
if link.isjoint:
t = link.A(q[link.jindex])
else:
t = link.A()
if link.parent is None:
link._fk = self.base * t
else:
link._fk = link.parent._fk * t
# Update the collision objects transform as well
for col in link.collision:
col.wT = link._fk
for gi in link.geometry:
gi.wT = link._fk
# Do the grippers now
for gripper in self.grippers:
for link in gripper.links:
# print(link.jindex)
if link.isjoint:
t = link.A(gripper.q[link.jindex])
else:
t = link.A()
link._fk = link.parent._fk * t
# Update the collision objects transform as well
for col in link.collision:
col.wT = link._fk
for gi in link.geometry:
gi.wT = link._fk
# def jacob0(self, q=None):
# """
# J0 = jacob0(q) is the manipulator Jacobian matrix which maps joint
# velocity to end-effector spatial velocity. v = J0*qd in the
# base frame.
# J0 = jacob0() as above except uses the stored q value of the
# robot object.
# :param q: The joint angles/configuration of the robot (Optional,
# if not supplied will use the stored q values).
# :type q: float ndarray(n)
# :return J: The manipulator Jacobian in ee frame
# :rtype: float ndarray(6,n)
# :references:
# - Kinematic Derivatives using the Elementary Transform
# Sequence, <NAME> and <NAME>
# """
# if q is None:
# q = np.copy(self.q)
# else:
# q = getvector(q, self.n)
# T = (self.base.inv() * self.fkine(q)).A
# U = np.eye(4)
# j = 0
# J = np.zeros((6, self.n))
# for link in self._fkpath:
# for k in range(link.M):
# if k != link.q_idx:
# U = U @ link.ets[k].T().A
# else:
# # self._jacoblink(link, k, T)
# U = U @ link.ets[k].T(q[j]).A
# Tu = np.linalg.inv(U) @ T
# n = U[:3, 0]
# o = U[:3, 1]
# a = U[:3, 2]
# x = Tu[0, 3]
# y = Tu[1, 3]
# z = Tu[2, 3]
# if link.ets[k].axis == 'Rz':
# J[:3, j] = (o * x) - (n * y)
# J[3:, j] = a
# elif link.ets[k].axis == 'Ry':
# J[:3, j] = (n * z) - (a * x)
# J[3:, j] = o
# elif link.ets[k].axis == 'Rx':
# J[:3, j] = (a * y) - (o * z)
# J[3:, j] = n
# elif link.ets[k].axis == 'tx':
# J[:3, j] = n
# J[3:, j] = np.array([0, 0, 0])
# elif link.ets[k].axis == 'ty':
# J[:3, j] = o
# J[3:, j] = np.array([0, 0, 0])
# elif link.ets[k].axis == 'tz':
# J[:3, j] = a
# J[3:, j] = np.array([0, 0, 0])
# j += 1
# return J
def get_path(self, from_link, to_link):
path = []
n = 0
link = to_link
path.append(link)
if link.isjoint:
n += 1
while link != from_link:
link = link.parent
path.append(link)
if link.isjoint:
n += 1
path.reverse()
return path, n
def jacob0(
self, q=None, from_link=None, to_link=None,
offset=None, T=None):
if from_link is None:
from_link = self.base_link
if to_link is None:
to_link = self.ee_links[0]
if offset is None:
offset = SE3()
path, n = self.get_path(from_link, to_link)
if q is None:
q = np.copy(self.q)
else:
try:
q = getvector(q, n)
except ValueError:
q = getvector(q, self.n)
if T is None:
T = (self.base.inv()
* self.fkine(q, from_link=from_link, to_link=to_link)
* offset)
T = T.A
U = np.eye(4)
j = 0
J = np.zeros((6, n))
for link in path:
if link.isjoint:
U = U @ link.A(q[j], fast=True)
if link == to_link:
U = U @ offset.A
Tu = np.linalg.inv(U) @ T
n = U[:3, 0]
o = U[:3, 1]
a = U[:3, 2]
x = Tu[0, 3]
y = Tu[1, 3]
z = Tu[2, 3]
if link.v.axis == 'Rz':
J[:3, j] = (o * x) - (n * y)
J[3:, j] = a
elif link.v.axis == 'Ry':
J[:3, j] = (n * z) - (a * x)
J[3:, j] = o
elif link.v.axis == 'Rx':
J[:3, j] = (a * y) - (o * z)
J[3:, j] = n
elif link.v.axis == 'tx':
J[:3, j] = n
J[3:, j] = np.array([0, 0, 0])
elif link.v.axis == 'ty':
J[:3, j] = o
J[3:, j] = np.array([0, 0, 0])
elif link.v.axis == 'tz':
J[:3, j] = a
J[3:, j] = np.array([0, 0, 0])
j += 1
else:
U = U @ link.A(fast=True)
return J
def jacobe(self, q=None, from_link=None, to_link=None, offset=None):
"""
Je = jacobe(q) is the manipulator Jacobian matrix which maps joint
velocity to end-effector spatial velocity. v = Je*qd in the
end-effector frame.
Je = jacobe() as above except uses the stored q value of the
robot object.
:param q: The joint angles/configuration of the robot (Optional,
if not supplied will use the stored q values).
:type q: float ndarray(n)
:return J: The manipulator Jacobian in ee frame
:rtype: float ndarray(6,n)
"""
if from_link is None:
from_link = self.base_link
if to_link is None:
to_link = self.ee_links[0]
if offset is None:
offset = SE3()
if q is None:
q = np.copy(self.q)
# else:
# q = getvector(q, n)
T = (self.base.inv()
* self.fkine(q, from_link=from_link, to_link=to_link)
* offset)
J0 = self.jacob0(q, from_link, to_link, offset, T)
Je = self.jacobev(q, from_link, to_link, offset, T) @ J0
return Je
def hessian0(self, q=None, J0=None, from_link=None, to_link=None):
"""
The manipulator Hessian tensor maps joint acceleration to end-effector
spatial acceleration, expressed in the world-coordinate frame. This
function calulcates this based on the ETS of the robot. One of J0 or q
is required. Supply J0 if already calculated to save computation time
:param q: The joint angles/configuration of the robot (Optional,
if not supplied will use the stored q values).
:type q: float ndarray(n)
:param J0: The manipulator Jacobian in the 0 frame
:type J0: float ndarray(6,n)
:return: The manipulator Hessian in 0 frame
:rtype: float ndarray(6,n,n)
:references:
- Kinematic Derivatives using the Elementary Transform
Sequence, <NAME> and <NAME>
"""
if from_link is None:
from_link = self.base_link
if to_link is None:
to_link = self.ee_links[0]
path, n = self.get_path(from_link, to_link)
if J0 is None:
if q is None:
q = np.copy(self.q)
else:
q = getvector(q, n)
J0 = self.jacob0(q, from_link, to_link)
else:
verifymatrix(J0, (6, n))
H = np.zeros((6, n, n))
for j in range(n):
for i in range(j, n):
H[:3, i, j] = np.cross(J0[3:, j], J0[:3, i])
H[3:, i, j] = np.cross(J0[3:, j], J0[3:, i])
if i != j:
H[:3, j, i] = H[:3, i, j]
return H
def manipulability(self, q=None, J=None, from_link=None, to_link=None):
"""
Calculates the manipulability index (scalar) robot at the joint
configuration q. It indicates dexterity, that is, how isotropic the
robot's motion is with respect to the 6 degrees of Cartesian motion.
The measure is high when the manipulator is capable of equal motion
in all directions and low when the manipulator is close to a
singularity. One of J or q is required. Supply J if already
calculated to save computation time
:param q: The joint angles/configuration of the robot (Optional,
if not supplied will use the stored q values).
:type q: float ndarray(n)
:param J: The manipulator Jacobian in any frame
:type J: float ndarray(6,n)
:return: The manipulability index
:rtype: float
:references:
- Analysis and control of robot manipulators with redundancy,
<NAME>,
- Robotics Research: The First International Symposium (<NAME>
and <NAME>, eds.), pp. 735-747, The MIT press, 1984.
"""
if from_link is None:
from_link = self.base_link
if to_link is None:
to_link = self.ee_links[0]
path, n = self.get_path(from_link, to_link)
if J is None:
if q is None:
q = np.copy(self.q)
else:
q = getvector(q, n)
J = self.jacob0(q, from_link, to_link)
else:
verifymatrix(J, (6, n))
return np.sqrt(np.linalg.det(J @ np.transpose(J)))
def jacobm(self, q=None, J=None, H=None, from_link=None, to_link=None):
"""
Calculates the manipulability Jacobian. This measure relates the rate
of change of the manipulability to the joint velocities of the robot.
One of J or q is required. Supply J and H if already calculated to
save computation time
:param q: The joint angles/configuration of the robot (Optional,
if not supplied will use the stored q values).
:type q: float ndarray(n)
:param J: The manipulator Jacobian in any frame
:type J: float ndarray(6,n)
:param H: The manipulator Hessian in any frame
:type H: float ndarray(6,n,n)
:return: The manipulability Jacobian
:rtype: float ndarray(n)
:references:
- Kinematic Derivatives using the Elementary Transform
Sequence, <NAME> and <NAME>
"""
if from_link is None:
from_link = self.base_link
if to_link is None:
to_link = self.ee_links[0]
path, n = self.get_path(from_link, to_link)
if J is None:
if q is None:
q = np.copy(self.q)
else:
q = getvector(q, n)
J = self.jacob0(q, from_link, to_link)
else:
verifymatrix(J, (6, n))
if H is None:
H = self.hessian0(J0=J, from_link=from_link, to_link=to_link)
else:
verifymatrix(H, (6, n, n))
manipulability = self.manipulability(
J=J, from_link=from_link, to_link=to_link)
b = np.linalg.inv(J @ np.transpose(J))
Jm = np.zeros((n, 1))
for i in range(n):
c = J @ np.transpose(H[:, :, i])
Jm[i, 0] = manipulability * \
np.transpose(c.flatten('F')) @ b.flatten('F')
return Jm
def __str__(self):
"""
Pretty prints the ETS Model of the robot. Will output angles in
degrees
:return: Pretty print of the robot model
:rtype: str
Constant links are shown in blue.
End-effector links are prefixed with an @
"""
table = ANSITable(
Column("id", headalign="^"),
Column("link", headalign="^"),
Column("parent", headalign="^"),
Column("joint", headalign="^"),
Column("ETS", headalign="^", colalign=">"),
border="thin")
for k, link in enumerate(self):
color = "" if link.isjoint else "<<blue>>"
ee = "@" if link in self.ee_links else ""
ets = link.ets()
table.row(
k,
color + ee + link.name,
link.parent.name if link.parent is not None else "-",
link._joint_name if link.parent is not None else "",
ets.__str__(f"q{link._jindex}"))
s = str(table)
s += self.configurations_str()
return s
def hierarchy(self):
"""
Pretty print the robot link hierachy
:return: Pretty print of the robot model
:rtype: str
Example:
.. runblock:: pycon
import roboticstoolbox as rtb
robot = rtb.models.URDF.Panda()
robot.hierarchy()
"""
# link = self.base_link
def recurse(link, indent=0):
print(' ' * indent * 2, link.name)
for child in link.child:
recurse(child, indent+1)
recurse(self.base_link)
def jacobev(
self, q=None, from_link=None, to_link=None,
offset=None, T=None):
"""
Jv = jacobev(q) is the spatial velocity Jacobian, at joint
configuration q, which relates the velocity in the base frame to the
velocity in the end-effector frame.
Jv = jacobev() as above except uses the stored q value of the
robot object.
:param q: The joint angles/configuration of the robot (Optional,
if not supplied will use the stored q values).
:type q: float ndarray(n)
:returns J: The velocity Jacobian in ee frame
:rtype J: float ndarray(6,6)
"""
if from_link is None:
from_link = self.base_link
if to_link is None:
to_link = self.ee_links[0]
if offset is None:
offset = SE3()
if T is None:
r = (self.base.inv() * self.fkine(
q, from_link, to_link) * offset).R
r = np.linalg.inv(r)
else:
r = np.linalg.inv(T.R)
Jv = np.zeros((6, 6))
Jv[:3, :3] = r
Jv[3:, 3:] = r
return Jv
def jacob0v(self, q=None):
"""
Jv = jacob0v(q) is the spatial velocity Jacobian, at joint
configuration q, which relates the velocity in the end-effector frame
to velocity in the base frame
Jv = jacob0v() as above except uses the stored q value of the
robot object.
:param q: The joint angles/configuration of the robot (Optional,
if not supplied will use the stored q values).
:type q: float ndarray(n)
:returns J: The velocity Jacobian in 0 frame
:rtype J: float ndarray(6,6)
"""
r = (self.base.inv() * self.fkine(q)).R
Jv = np.zeros((6, 6))
Jv[:3, :3] = r
Jv[3:, 3:] = r
return Jv
def joint_velocity_damper(self, ps=0.05, pi=0.1, n=None, gain=1.0):
'''
Formulates an inequality contraint which, when optimised for will
make it impossible for the robot to run into joint limits. Requires
the joint limits of the robot to be specified. See examples/mmc.py
for use case
:param ps: The minimum angle (in radians) in which the joint is
allowed to approach to its limit
:type ps: float
:param pi: The influence angle (in radians) in which the velocity
damper becomes active
:type pi: float
:param n: The number of joints to consider. Defaults to all joints
:type n: int
:param gain: The gain for the velocity damper
:type gain: float
:returns: Ain, Bin as the inequality contraints for an optisator
:rtype: ndarray(6), ndarray(6)
'''
if n is None:
n = self.n
Ain = np.zeros((n, n))
Bin = np.zeros(n)
for i in range(n):
if self.q[i] - self.qlim[0, i] <= pi:
Bin[i] = -gain * (
((self.qlim[0, i] - self.q[i]) + ps) / (pi - ps))
Ain[i, i] = -1
if self.qlim[1, i] - self.q[i] <= pi:
Bin[i] = gain * (
(self.qlim[1, i] - self.q[i]) - ps) / (pi - ps)
Ain[i, i] = 1
return Ain, Bin
def link_collision_damper(
self, shape, q=None, di=0.3, ds=0.05, xi=1.0,
from_link=None, to_link=None):
'''
Formulates an inequality contraint which, when optimised for will
make it impossible for the robot to run into a collision. Requires
See examples/neo.py for use case
:param ds: The minimum distance in which a joint is allowed to
approach the collision object shape
:type ds: float
:param di: The influence distance in which the velocity
damper becomes active
:type di: float
:param xi: The gain for the velocity damper
:type xi: float
:param from_link: The first link to consider, defaults to the base
link
:type from_link: ELink
:param to_link: The last link to consider, will consider all links
between from_link and to_link in the robot, defaults to the
end-effector link
:type to_link: ELink
:returns: Ain, Bin as the inequality contraints for an omptimisor
:rtype: ndarray(6), ndarray(6)
'''
if from_link is None:
from_link = self.base_link
if to_link is None:
to_link = self.ee_link
links, n = self.get_path(from_link, to_link)
if q is None:
q = np.copy(self.q)
else:
q = getvector(q, n)
j = 0
Ain = None
bin = None
def indiv_calculation(link, link_col, q):
d, wTlp, wTcp = link_col.closest_point(shape, di)
if d is not None:
lpTcp = wTlp.inv() * wTcp
norm = lpTcp.t / d
norm_h = np.expand_dims(np.r_[norm, 0, 0, 0], axis=0)
Je = self.jacobe(
q, from_link=self.base_link, to_link=link,
offset=link_col.base)
n_dim = Je.shape[1]
dp = norm_h @ shape.v
l_Ain = np.zeros((1, n))
l_Ain[0, :n_dim] = norm_h @ Je
l_bin = (xi * (d - ds) / (di - ds)) + dp
else:
l_Ain = None
l_bin = None
return l_Ain, l_bin, d, wTcp
for link in links:
if link.isjoint:
j += 1
for link_col in link.collision:
l_Ain, l_bin, d, wTcp = indiv_calculation(
link, link_col, q[:j])
if l_Ain is not None and l_bin is not None:
if Ain is None:
Ain = l_Ain
else:
Ain = np.r_[Ain, l_Ain]
if bin is None:
bin = np.array(l_bin)
else:
bin = np.r_[bin, l_bin]
return Ain, bin
def closest_point(self, shape, inf_dist=1.0):
'''
closest_point(shape, inf_dist) returns the minimum euclidean
distance between this robot and shape, provided it is less than
inf_dist. It will also return the points on self and shape in the
world frame which connect the line of length distance between the
shapes. If the distance is negative then the shapes are collided.
:param shape: The shape to compare distance to
:type shape: Shape
:param inf_dist: The minimum distance within which to consider
the shape
:type inf_dist: float
:returns: d, p1, p2 where d is the distance between the shapes,
p1 and p2 are the points in the world frame on the respective
shapes
:rtype: float, SE3, SE3
'''
d = 10000
p1 = None,
p2 = None
for link in self.links:
td, tp1, tp2 = link.closest_point(shape, inf_dist)
if td is not None and td < d:
d = td
p1 = tp1
p2 = tp2
if d == 10000:
d = None
return d, p1, p2
def collided(self, shape):
'''
collided(shape) checks if this robot and shape have collided
:param shape: The shape to compare distance to
:type shape: Shape
:returns: True if shapes have collided
:rtype: bool
'''
for link in self.links:
if link.collided(shape):
return True
return False
# inverse dynamics (recursive Newton-Euler) using spatial vector notation
def rne(robot, q, qd, qdd, gravity=None):
n = robot.n
# allocate intermediate variables
Xup = SE3.Alloc(n)
Xtree = SE3.Alloc(n)
v = SpatialVelocity.Alloc(n)
a = SpatialAcceleration.Alloc(n)
f = SpatialForce.Alloc(n)
I = SpatialInertia.Alloc(n) # noqa
s = [None for i in range(n)] # joint motion subspace
Q = np.zeros((n,)) # joint torque/force
# initialize intermediate variables
for j, link in enumerate(robot):
I[j] = SpatialInertia(m=link.m, r=link.r)
Xtree[j] = link.Ts
s[j] = link.v.s
if gravity is None:
a_grav = SpatialAcceleration(robot.gravity)
else:
a_grav = SpatialAcceleration(gravity)
# forward recursion
for j in range(0, n):
vJ = SpatialVelocity(s[j] * qd[j])
# transform from parent(j) to j
Xup[j] = robot[j].A(q[j]).inv()
if robot[j].parent is None:
v[j] = vJ
a[j] = Xup[j] * a_grav + SpatialAcceleration(s[j] * qdd[j])
else:
jp = robot[j].parent.jindex
v[j] = Xup[j] * v[jp] + vJ
a[j] = Xup[j] * a[jp] \
+ SpatialAcceleration(s[j] * qdd[j]) \
+ v[j] @ vJ
f[j] = I[j] * a[j] + v[j] @ (I[j] * v[j])
# backward recursion
for j in reversed(range(0, n)):
Q[j] = f[j].dot(s[j])
if robot[j].parent is not None:
jp = robot[j].parent.jindex
f[jp] = f[jp] + Xup[j] * f[j]
return Q
if __name__ == "__main__": # pragma nocover
import roboticstoolbox as rtb
np.set_printoptions(precision=4, suppress=True)
p = rtb.models.URDF.Panda()
print(p[1].m)
# robot = rtb.models.ETS.Panda()
# print(robot)
# print(robot.base, robot.tool)
# print(robot.ee_links)
# ets = robot.ets()
# print(ets)
# print('n', ets.n)
# ets2 = ets.compile()
# print(ets2)
# q = np.random.rand(7)
# # print(ets.eval(q))
# # print(ets2.eval(q))
# J1 = robot.jacob0(q)
# J2 = ets2.jacob0(q)
# print(J1-J2)
# print(robot[2].v, robot[2].v.jindex)
# print(robot[2].Ts)
|
// ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License (MIT). See License.txt in the repo root for license information.
// ------------------------------------------------------------
#include "stdafx.h"
using namespace Data::LoggingReplicator;
using namespace Data::LogRecordLib;
using namespace ktl;
IncrementalBackupLogRecordsAsyncEnumerator::SPtr IncrementalBackupLogRecordsAsyncEnumerator::Create(
__in Data::Utilities::IAsyncEnumerator<LogRecord::SPtr>& source,
__in BackupLogRecord const & backupLogRecord,
__in IReplicatedLogManager const & loggingReplicator,
__in KAllocator & allocator)
{
IncrementalBackupLogRecordsAsyncEnumerator * pointer = _new(INCREMENTAL_BACKUP_LOG_RECORDS_ASYNC_ENUMERATOR, allocator) IncrementalBackupLogRecordsAsyncEnumerator(
source,
backupLogRecord,
loggingReplicator);
THROW_ON_ALLOCATION_FAILURE(pointer);
THROW_ON_FAILURE(pointer->Status());
return SPtr(pointer);
}
TxnReplicator::Epoch IncrementalBackupLogRecordsAsyncEnumerator::get_StartingEpoch() const
{
return startingEpoch_;
}
FABRIC_SEQUENCE_NUMBER IncrementalBackupLogRecordsAsyncEnumerator::get_StartingLSN() const
{
return startingLSN_;
}
TxnReplicator::Epoch IncrementalBackupLogRecordsAsyncEnumerator::get_HighestBackedUpEpoch() const
{
return lastEpoch_;
}
FABRIC_SEQUENCE_NUMBER IncrementalBackupLogRecordsAsyncEnumerator::get_HighestBackedUpLSN() const
{
return lastLSN_;
}
LogRecord::SPtr IncrementalBackupLogRecordsAsyncEnumerator::GetCurrent()
{
ASSERT_IFNOT(isDisposed_ == false, "GetCurrent called after object was disposed.");
ASSERT_IFNOT(sourceSPtr_ != nullptr, "Source is nullptr.");
return sourceSPtr_->GetCurrent();
}
ktl::Awaitable<bool> IncrementalBackupLogRecordsAsyncEnumerator::MoveNextAsync(
__in CancellationToken const& cancellationToken)
{
KShared$ApiEntry();
while(true)
{
bool isNotEmpty = co_await sourceSPtr_->MoveNextAsync(cancellationToken);
if (isNotEmpty == false)
{
co_return false;
}
LogRecord::SPtr logRecord = sourceSPtr_->GetCurrent();
ASSERT_IFNOT(logRecord != nullptr, "Log record cannot be nullptr");
// If not a logical log record, skip.
if (logRecord->AsLogicalLogRecord() == nullptr)
{
continue;
}
// If lower than what we backed up on the last backup, skip.
if (logRecord->Lsn < backupLogRecordCSPtr_->HighestBackedupLsn)
{
continue;
}
if (logRecord->RecordType == LogRecordType::UpdateEpoch)
{
UpdateEpochLogRecord& updateEpochLogRecord = dynamic_cast<UpdateEpochLogRecord &>(*logRecord);
if (updateEpochLogRecord.EpochValue == backupLogRecordCSPtr_->get_HighestBackedupEpoch())
{
// Skip over the Update Epoch if it was previously backed up.
// Note that Update Epoch does not have a unique LSN.
continue;
}
lastEpoch_ = updateEpochLogRecord.EpochValue;
}
count_++;
lastLSN_ = logRecord->Lsn;
if (startingLSN_ == FABRIC_INVALID_SEQUENCE_NUMBER)
{
startingLSN_ = logRecord->Lsn;
startingEpoch_ = loggingReplicatorCSPtr_->GetEpoch(startingLSN_);
}
co_return true;
}
}
void IncrementalBackupLogRecordsAsyncEnumerator::Reset()
{
ASSERT_IFNOT(isDisposed_ == false, "Reset called after object was disposed.");
ASSERT_IFNOT(sourceSPtr_ != nullptr, "Source is nullptr.");
sourceSPtr_->Reset();
}
void IncrementalBackupLogRecordsAsyncEnumerator::Dispose()
{
if (isDisposed_)
{
ASSERT_IFNOT(sourceSPtr_ == nullptr, "IncrementalBackupLogRecordsAsyncEnumerator disposed but not the source.");
return;
}
sourceSPtr_->Dispose();
sourceSPtr_.Reset();
isDisposed_ = true;
}
ULONG32 IncrementalBackupLogRecordsAsyncEnumerator::Count()
{
return count_;
}
ktl::Awaitable<void> IncrementalBackupLogRecordsAsyncEnumerator::VerifyDrainedAsync()
{
KShared$ApiEntry();
ASSERT_IFNOT(sourceSPtr_ != nullptr, "Source is nullptr.");
bool isNotEmpty = co_await sourceSPtr_->MoveNextAsync(CancellationToken::None);
ASSERT_IFNOT(isNotEmpty == false, "enumerator must be drained");
// Difference with managed.
// To support incremental backup from difference sources, we must take into account that UpdateEpoch may or may not been backed up last time.
// If not, we have to put it which has same LSN as HighestBackedupLsn.
ASSERT_IFNOT(
startingLSN_ >= backupLogRecordCSPtr_->HighestBackedupLsn,
"Must have at least backedup one logical record. StartingLSN: {0} HighestBackedUpLSN: {1}",
startingLSN_,
backupLogRecordCSPtr_->HighestBackedupLsn);
ASSERT_IFNOT(count_ > 0, "Must have at least backedup one logical record.");
}
IncrementalBackupLogRecordsAsyncEnumerator::IncrementalBackupLogRecordsAsyncEnumerator(
__in Data::Utilities::IAsyncEnumerator<LogRecord::SPtr> & source,
__in BackupLogRecord const & backupLogRecord,
__in IReplicatedLogManager const & loggingReplicator) noexcept
: sourceSPtr_(&source)
, backupLogRecordCSPtr_(&backupLogRecord)
, loggingReplicatorCSPtr_(&loggingReplicator)
, isDisposed_(false)
, startingEpoch_(TxnReplicator::Epoch::InvalidEpoch())
, startingLSN_(FABRIC_INVALID_SEQUENCE_NUMBER)
, lastEpoch_(TxnReplicator::Epoch::InvalidEpoch())
, lastLSN_(FABRIC_INVALID_SEQUENCE_NUMBER)
, count_(0)
{
}
IncrementalBackupLogRecordsAsyncEnumerator::~IncrementalBackupLogRecordsAsyncEnumerator()
{
ASSERT_IFNOT(isDisposed_ == true, "Log records object is not disposed on destructor");
}
|
Executive Summary Document History Summary of Changes Version Section(s) Synopsis of Change 0.1 Outline 0.2 First Contributions 0.3 Integration of Contributed Sections 0.4 Corrections According to Internal Review 1.0 Final Assembled Version
In this deliverable the new, SerWorks aligned architecture of the BIONETS Simulation Platform is introduced, together with the simulator integration activities spanning across WPs and SPs. The reshaped BIONETS Simulation Platform gives us the opportunity to verify and test the properties of the BIONETS architecture on a wide scale, by simulating a huge number of T-nodes and U-nodes, as it is expected in real-life scenarios. The Simulation Platform was redesigned in a tight cooperation with the SerWorks and the WP5 prototyping task force, sharing the expertise between the different implementation activities as a joint work between WP.1.1 (Requirements and Architectural Principles), WP1.3 (Simulation and Performance Analysis) and WP5 (Prototyping and Validation). This cooperation activity is in consonance with the recommendations of the reviewers to create a Simulation Platform capable of testing the whole BIONETS architecture. This new structure is completely aligned with these principles, providing the possibility to simulate the overall BIONETS architecture. To achieve the integration of the different level components, the Simulation Platform was designed in a way to accommodate SerWorks, the unified service-oriented architecture, where the adaptation and the evolution of the network protocols is driven by the running services. In the presented version we provide implementation for the Networking Framework functions of SerWorks, integrating the results in this area from previous deliverables, and the architecture for higher layer services. The Networking Framework API was specified in detail, completely defining the lowest level framework of SerWorks. This new API allows dynamic composition of new networking services using already existing components. The primi-tives of these basic components were not defined in the first version of the SerWorks specification, so in order to build simulations and prototypes the first set of the API for these components were specified. A scalable and high-performance simulation of the T-Nodes were developed which allows flexible trade-off between performance and accuracy using two different T-Node models (Explicit and Virtual T-Nodes). We also provided a standardized simulation scenario by identifying the features of BIONETS that can and should be simulated in an integrated way. In the appendices the concrete examples of the integration of the various networking solutions are presented (introduced in earlier deliverables) like the Situated Adaptive Forwarding (SAF) and the Cooperative Content Retrieval (CCR). The unique features of the SAF dissemination protocol required unique modifications of the Simulation Platform. Also for the integration of CCR scheme the Simulation Platform had to be extended … |
// NewSearchUsersNamesParamsWithTimeout creates a new SearchUsersNamesParams object
// with the default values initialized, and the ability to set a timeout on a request
func NewSearchUsersNamesParamsWithTimeout(timeout time.Duration) *SearchUsersNamesParams {
var ()
return &SearchUsersNamesParams{
timeout: timeout,
}
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.test;
import org.apache.calcite.plan.Contexts;
import org.apache.calcite.plan.RelOptUtil;
import org.apache.calcite.plan.RelTraitDef;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.sql.parser.SqlParser;
import org.apache.calcite.tools.Frameworks;
import org.apache.calcite.tools.PigRelBuilder;
import org.apache.calcite.tools.Programs;
import org.apache.calcite.tools.RelBuilder;
import org.apache.calcite.util.Util;
import org.junit.jupiter.api.Test;
import java.util.List;
import java.util.function.Function;
import java.util.function.UnaryOperator;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
/**
* Unit test for {@link PigRelBuilder}.
*/
class PigRelBuilderTest {
/** Creates a config based on the "scott" schema. */
public static Frameworks.ConfigBuilder config() {
final SchemaPlus rootSchema = Frameworks.createRootSchema(true);
return Frameworks.newConfigBuilder()
.parserConfig(SqlParser.Config.DEFAULT)
.defaultSchema(
CalciteAssert.addSchema(rootSchema, CalciteAssert.SchemaSpec.SCOTT_WITH_TEMPORAL))
.traitDefs((List<RelTraitDef>) null)
.programs(Programs.heuristicJoinOrder(Programs.RULE_SET, true, 2));
}
static PigRelBuilder createBuilder(
UnaryOperator<RelBuilder.Config> transform) {
final Frameworks.ConfigBuilder configBuilder = config();
configBuilder.context(
Contexts.of(transform.apply(RelBuilder.Config.DEFAULT)));
return PigRelBuilder.create(configBuilder.build());
}
/** Converts a relational expression to a sting with linux line-endings. */
private String str(RelNode r) {
return Util.toLinux(RelOptUtil.toString(r));
}
@Test void testScan() {
// Equivalent SQL:
// SELECT *
// FROM emp
final PigRelBuilder builder = PigRelBuilder.create(config().build());
final RelNode root = builder
.scan("EMP")
.build();
assertThat(str(root),
is("LogicalTableScan(table=[[scott, EMP]])\n"));
}
@Test void testCogroup() {}
@Test void testCross() {}
@Test void testCube() {}
@Test void testDefine() {}
@Test void testDistinct() {
// Syntax:
// alias = DISTINCT alias [PARTITION BY partitioner] [PARALLEL n];
final PigRelBuilder builder = PigRelBuilder.create(config().build());
final RelNode root = builder
.scan("EMP")
.project(builder.field("DEPTNO"))
.distinct()
.build();
final String plan = "LogicalAggregate(group=[{0}])\n"
+ " LogicalProject(DEPTNO=[$7])\n"
+ " LogicalTableScan(table=[[scott, EMP]])\n";
assertThat(str(root), is(plan));
}
@Test void testFilter() {
// Syntax:
// FILTER name BY expr
// Example:
// output_var = FILTER input_var BY (field1 is not null);
final PigRelBuilder builder = PigRelBuilder.create(config().build());
final RelNode root = builder
.load("EMP.csv", null, null)
.filter(builder.isNotNull(builder.field("MGR")))
.build();
final String plan = "LogicalFilter(condition=[IS NOT NULL($3)])\n"
+ " LogicalTableScan(table=[[scott, EMP]])\n";
assertThat(str(root), is(plan));
}
@Test void testForeach() {}
@Test void testGroup() {
// Syntax:
// alias = GROUP alias { ALL | BY expression}
// [, alias ALL | BY expression ...] [USING 'collected' | 'merge']
// [PARTITION BY partitioner] [PARALLEL n];
// Equivalent to Pig Latin:
// r = GROUP e BY (deptno, job);
final Function<PigRelBuilder, RelNode> f = builder ->
builder.scan("EMP")
.group(null, null, -1, builder.groupKey("DEPTNO", "JOB").alias("e"))
.build();
final String plan = ""
+ "LogicalAggregate(group=[{0, 1}], EMP=[COLLECT($2)])\n"
+ " LogicalProject(JOB=[$2], DEPTNO=[$7], "
+ "$f8=[ROW($0, $1, $2, $3, $4, $5, $6, $7)])\n"
+ " LogicalTableScan(table=[[scott, EMP]])\n";
assertThat(str(f.apply(createBuilder(b -> b))), is(plan));
// now without pruning
final String plan2 = ""
+ "LogicalAggregate(group=[{2, 7}], EMP=[COLLECT($8)])\n"
+ " LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], "
+ "HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], $f8=[ROW($0, $1, $2, $3, $4, $5, $6, $7)])\n"
+ " LogicalTableScan(table=[[scott, EMP]])\n";
assertThat(
str(f.apply(createBuilder(b -> b.withPruneInputOfAggregate(false)))),
is(plan2));
}
@Test void testGroup2() {
// Equivalent to Pig Latin:
// r = GROUP e BY deptno, d BY deptno;
final PigRelBuilder builder = PigRelBuilder.create(config().build());
final RelNode root = builder
.scan("EMP")
.scan("DEPT")
.group(null, null, -1,
builder.groupKey("DEPTNO").alias("e"),
builder.groupKey("DEPTNO").alias("d"))
.build();
final String plan = "LogicalJoin(condition=[=($0, $2)], joinType=[inner])\n"
+ " LogicalAggregate(group=[{0}], EMP=[COLLECT($1)])\n"
+ " LogicalProject(EMPNO=[$0], $f8=[ROW($0, $1, $2, $3, $4, $5, $6, $7)])\n"
+ " LogicalTableScan(table=[[scott, EMP]])\n"
+ " LogicalAggregate(group=[{0}], DEPT=[COLLECT($1)])\n"
+ " LogicalProject(DEPTNO=[$0], $f3=[ROW($0, $1, $2)])\n"
+ " LogicalTableScan(table=[[scott, DEPT]])\n";
assertThat(str(root), is(plan));
}
@Test void testImport() {}
@Test void testJoinInner() {}
@Test void testJoinOuter() {}
@Test void testLimit() {}
@Test void testLoad() {
// Syntax:
// LOAD 'data' [USING function] [AS schema];
// Equivalent to Pig Latin:
// LOAD 'EMPS.csv'
final PigRelBuilder builder = PigRelBuilder.create(config().build());
final RelNode root = builder
.load("EMP.csv", null, null)
.build();
assertThat(str(root),
is("LogicalTableScan(table=[[scott, EMP]])\n"));
}
@Test void testMapReduce() {}
@Test void testOrderBy() {}
@Test void testRank() {}
@Test void testSample() {}
@Test void testSplit() {}
@Test void testStore() {}
@Test void testUnion() {}
}
|
class LeafCollection:
"""
A collection of leaf nodes in a Huffman tree
"""
def __init__(self):
self.leaves = list()
def add_leaf(self, leaf: Node):
"""
Adds a leaf to the collection
"""
self.leaves.append(leaf)
def get_leaf_by_value(self, value: bytes) -> Node:
"""
Returns a leaf given a value to search for
value - the value to search for
"""
for leaf in self:
if leaf.value == value:
return leaf
raise LeafNotFoundError
def get_leaf_by_code(self, code: Bits) -> Node:
"""
Returns a leaf given a prefix code to search for
code - the prefix code to search for
"""
for leaf in self:
if leaf.code == code:
return leaf
raise LeafNotFoundError
def __iter__(self) -> Iterator[Node]:
return iter(self.leaves)
def __len__(self) -> int:
return len(self.leaves)
def __repr__(self) -> str:
return f"<LeafCollection leaves={len(self)}>" |
// KEEP PERSISTING
// KEEP PERSISTING
// WORK HARD FOR YOUR DREAMS
// YOU HAVE TO BE THE BEST
// JUST 400 QUESTIONS ON SPOJ
// AND YOU WILL THE BEST
// DON'T QUIT
#include<bits/stdc++.h>
using namespace std;
#define MAXL 0xfffff
vector<int >ab;
int hash[65],prime[65],fact[65],a[102];
int dp[102][1<<17],sol[102][1<<17],ans[102][1<<17];
void init(){
for(int i=3;i<=8;i+=2)if(prime[i]==0)
for(int j=i*i;j<=60;j+=i)
prime[j]=1;
hash[2]=ab.size();
ab.push_back(2);
for(int i=3;i<=60;i+=2)if(prime[i]==0)
hash[i]=ab.size(),ab.push_back(i);
for(int i=0;i<ab.size();i++)
for(int j=1;j*ab[i]<=60;j++)
fact[j*ab[i]]|=1<<hash[ab[i]];
for(int i=1;i<=100;i++)
for(int x=0;x< 1<<17;x++)
dp[i][x]=MAXL,sol[i][x]=-1,ans[i][x]=-1;
}
int main(){
int n;
cin>>n;
init();
for(int i=1;i<=n;i++)
cin>>a[i];
// Main logic that I started with the
//
for(int i=1;i<=n;i++)
for(int k=1;k<60;k++)
{// uske jo unset bits hain usse jo set obtain ho raha hian
int x=(~fact[k])&((1<<17)-1);
for(int s=x;;s=(s-1)&x)
{
if(dp[i][s|fact[k]] > dp[i-1][s]+abs(k-a[i]) )
{
dp[i][s|fact[k]]=dp[i-1][s]+abs(k-a[i]);
sol[i][s|fact[k]]=k;
ans[i][s|fact[k]]=s;
// cout <<dp[i][s|fact[k]]<<" values " <<endl;
}
if(s==0)
break;
}
}
int s,x=n;
//cout <<" This is "<<n<<endl;
//cin >>s;
int maxl=MAXL;
for(int i=0;i< 1<<17;i++)if(dp[n][i]!=MAXL)
{
if(maxl> dp[n][i])
{
maxl=dp[n][i];
s=i;
}
}
//cout << "Working fine upto here "<<maxl<<endl;
// cout << s<<" This is s "<<endl;
stack<int > st;
while(x)
{
st.push(sol[x][s]);// sol main hoga is pe kya value aaye back khaa se aaya ye pta chalega
// cout << sol[x][s]<<endl;
x=x-1;
s=ans[x+1][s];
// p--;
}
while(!st.empty())
{
printf("%d ",st.top());
st.pop();
}
return 0;
} |
<gh_stars>1-10
/* --------------------------------------------------------------------------
*
* File T03_CustomSceneNode.cpp
* Author <NAME>
*
* --------------------------------------------------------------------------
*
* Copyright (C) 2010-2012 XMSoft. All rights reserved.
*
* Contact Email: <EMAIL>
*
* --------------------------------------------------------------------------
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library in the file COPYING.LIB;
* if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA
*
* -------------------------------------------------------------------------- */
#include "Precompiled.h"
#include "T03_CustomSceneNode.h"
class CSampleSceneNode : public scene::ISceneNode
{
/*
The parameters of the constructor specify the parent of the scene node,
a pointer to the scene manager, and an id of the scene node.
In the constructor we call the parent class' constructor,
set some properties of the material, and
create the 4 vertices of the tetraeder we will draw later.
*/
public :
CSampleSceneNode ( scene::ISceneNode* parent, scene::ISceneManager* mgr, s32 id )
: scene::ISceneNode ( parent, mgr, id )
{
Material.Wireframe = false;
Material.Lighting = false;
Material.Thickness = 0.f;
Vertices[0] = video::S3DVertex(0,0,10, 5,1,0,
video::SColor(255,0,255,255), 0, 1);
Vertices[1] = video::S3DVertex(10,0,-10, 10,0,0,
video::SColor(255,255,0,255), 1, 1);
Vertices[2] = video::S3DVertex(0,20,0, 20,1,1,
video::SColor(255,255,255,0), 1, 0);
Vertices[3] = video::S3DVertex(-10,0,-10, 40,0,1,
video::SColor(255,0,255,0), 0, 0);
/*
The Irrlicht Engine needs to know the bounding box of a scene node.
It will use it for automatic culling and other things. Hence, we
need to create a bounding box from the 4 vertices we use.
If you do not want the engine to use the box for automatic culling,
and/or don't want to create the box, you could also call
irr::scene::ISceneNode::setAutomaticCulling() with irr::scene::EAC_OFF.
*/
Box.reset ( Vertices[0].Pos );
for ( s32 i = 1; i < 4; ++i )
{
Box.addInternalPoint ( Vertices[i].Pos );
}
}
/*
Before it is drawn, the irr::scene::ISceneNode::OnRegisterSceneNode()
method of every scene node in the scene is called by the scene manager.
If the scene node wishes to draw itself, it may register itself in the
scene manager to be drawn. This is necessary to tell the scene manager
when it should call irr::scene::ISceneNode::render(). For
example, normal scene nodes render their content one after another,
while stencil buffer shadows would like to be drawn after all other
scene nodes. And camera or light scene nodes need to be rendered before
all other scene nodes (if at all). So here we simply register the
scene node to render normally. If we would like to let it be rendered
like cameras or light, we would have to call
SceneManager->registerNodeForRendering(this, SNRT_LIGHT_AND_CAMERA);
After this, we call the actual
irr::scene::ISceneNode::OnRegisterSceneNode() method of the base class,
which simply lets also all the child scene nodes of this node register
themselves.
*/
virtual void OnRegisterSceneNode ( )
{
if ( IsVisible )
{
SceneManager->registerNodeForRendering ( this );
}
ISceneNode::OnRegisterSceneNode();
}
/*
In the render() method most of the interesting stuff happens: The
Scene node renders itself. We override this method and draw the
tetraeder.
*/
virtual void render ( )
{
u16 indices[] = { 0,2,3, 2,1,3, 1,0,3, 2,0,1 };
video::IVideoDriver* driver = SceneManager->getVideoDriver();
driver->setMaterial(Material);
driver->setTransform(video::ETS_WORLD, AbsoluteTransformation);
driver->drawVertexPrimitiveList(&Vertices[0], 4, &indices[0], 4, video::EVT_STANDARD, scene::EPT_TRIANGLES, video::EIT_16BIT);
}
/*
And finally we create three small additional methods.
irr::scene::ISceneNode::getBoundingBox() returns the bounding box of
this scene node, irr::scene::ISceneNode::getMaterialCount() returns the
amount of materials in this scene node (our tetraeder only has one
material), and irr::scene::ISceneNode::getMaterial() returns the
material at an index. Because we have only one material here, we can
return the only one material, assuming that no one ever calls
getMaterial() with an index greater than 0.
*/
virtual const core::aabbox3d<f32>& getBoundingBox ( ) const
{
return Box;
}
virtual u32 getMaterialCount ( ) const
{
return 1;
}
virtual video::SMaterial& getMaterial ( u32 i )
{
return Material;
}
private :
/*
First, we declare some member variables:
The bounding box, 4 vertices, and the material of the tetraeder.
*/
core::aabbox3d<f32> Box;
video::S3DVertex Vertices[4];
video::SMaterial Material;
};
CT03_CustomSceneNode::CT03_CustomSceneNode ( KDvoid )
{
m_pScrMgr->addCameraSceneNode ( 0, core::vector3df ( 0, -40, 0 ), core::vector3df ( 0, 0, 0) );
/*
Create our scene node. I don't check the result of calling new, as it
should throw an exception rather than returning 0 on failure. Because
the new node will create itself with a reference count of 1, and then
will have another reference added by its parent scene node when it is
added to the scene, I need to drop my reference to it. Best practice is
to drop it only *after* I have finished using it, regardless of what
the reference count of the object is after creation.
*/
CSampleSceneNode* pNode = new CSampleSceneNode ( m_pScrMgr->getRootSceneNode ( ), m_pScrMgr, 666 );
/*
To animate something in this boring scene consisting only of one
tetraeder, and to show that you now can use your scene node like any
other scene node in the engine, we add an animator to the scene node,
which rotates the node a little bit.
irr::scene::ISceneManager::createRotationAnimator() could return 0, so
should be checked.
*/
scene::ISceneNodeAnimator* pAnim = m_pScrMgr->createRotationAnimator ( core::vector3df ( 0.8f, 0, 0.8f ) );
if ( pAnim )
{
pNode->addAnimator ( pAnim );
/*
I'm done referring to anim, so must
irr::IReferenceCounted::drop() this reference now because it
was produced by a createFoo() function. As I shouldn't refer to
it again, ensure that I can't by setting to 0.
*/
pAnim->drop ( );
}
/*
I'm done with my CSampleSceneNode object, and so must drop my reference.
This won't delete the object, yet, because it is still attached to the
scene graph, which prevents the deletion until the graph is deleted or the
custom scene node is removed from it.
*/
pNode->drop ( );
}
CT03_CustomSceneNode::~CT03_CustomSceneNode ( KDvoid )
{
}
video::SColor CT03_CustomSceneNode::getClear ( KDvoid )
{
return video::SColor ( 0, 100, 100, 100 );
}
const wchar_t* CT03_CustomSceneNode::getTitle ( KDvoid )
{
return L"03. CustomSceneNode";
} |
<filename>sort.go
package sort
import (
"sort"
)
// UintSlice attaches the methods of Interface to []uint, sorting in increasing order.
type UintSlice []uint
// Len implements sort.Interface.
func (u UintSlice) Len() int {
return len(u)
}
// Less implements sort.Interface.
func (u UintSlice) Less(i, j int) bool {
return u[i] < u[j]
}
// Swap implements sort.Interface.
func (u UintSlice) Swap(i, j int) {
u[i], u[j] = u[j], u[i]
}
// Sort is a convenience method.
// Same as sort.Sort(u).
func (u UintSlice) Sort() {
sort.Sort(u)
}
// Uints sorts a slice of uints in increasing order.
func Uints(v []uint) {
sort.Sort(UintSlice(v))
}
// Uint64Slice attaches the methods of Interface to []uint64, sorting in increasing order.
type Uint64Slice []uint64
// Len implements sort.Interface.
func (u Uint64Slice) Len() int {
return len(u)
}
// Less implements sort.Interface.
func (u Uint64Slice) Less(i, j int) bool {
return u[i] < u[j]
}
// Swap implements sort.Interface.
func (u Uint64Slice) Swap(i, j int) {
u[i], u[j] = u[j], u[i]
}
// Sort is a convenience method.
// Same as sort.Sort(u).
func (u Uint64Slice) Sort() {
sort.Sort(u)
}
// Uint64s sorts a slice of uint64s in increasing order.
func Uint64s(v []uint64) {
sort.Sort(Uint64Slice(v))
}
// Uint32Slice attaches the methods of Interface to []uint32, sorting in increasing order.
type Uint32Slice []uint32
// Len implements sort.Interface.
func (u Uint32Slice) Len() int {
return len(u)
}
// Less implements sort.Interface.
func (u Uint32Slice) Less(i, j int) bool {
return u[i] < u[j]
}
// Swap implements sort.Interface.
func (u Uint32Slice) Swap(i, j int) {
u[i], u[j] = u[j], u[i]
}
// Sort is a convenience method.
// Same as sort.Sort(u).
func (u Uint32Slice) Sort() {
sort.Sort(u)
}
// Uint32s sorts a slice of uint32s in increasing order.
func Uint32s(u []uint32) {
sort.Sort(Uint32Slice(u))
}
// Uint16Slice attaches the methods of Interface to []uint16, sorting in increasing order.
type Uint16Slice []uint16
// Len implements sort.Interface.
func (u Uint16Slice) Len() int {
return len(u)
}
// Less implements sort.Interface.
func (u Uint16Slice) Less(i, j int) bool {
return u[i] < u[j]
}
// Swap implements sort.Interface.
func (u Uint16Slice) Swap(i, j int) {
u[i], u[j] = u[j], u[i]
}
// Sort is a convenience method.
// Same as sort.Sort(u).
func (u Uint16Slice) Sort() {
sort.Sort(u)
}
// Uint16s sorts a slice of uint16s in increasing order.
func Uint16s(u []uint16) {
sort.Sort(Uint16Slice(u))
}
// Uint8Slice attaches the methods of Interface to []uint8, sorting in increasing order.
type Uint8Slice []uint8
// Len implements sort.Interface.
func (u Uint8Slice) Len() int {
return len(u)
}
// Less implements sort.Interface.
func (u Uint8Slice) Less(i, j int) bool {
return u[i] < u[j]
}
// Swap implements sort.Interface.
func (u Uint8Slice) Swap(i, j int) {
u[i], u[j] = u[j], u[i]
}
// Sort is a convenience method.
// Same as sort.Sort(u).
func (u Uint8Slice) Sort() {
sort.Sort(u)
}
// Uint8s sorts a slice of uint8s in increasing order.
func Uint8s(v []uint8) {
sort.Sort(Uint8Slice(v))
}
// Int64Slice attaches the methods of Interface to []int64, sorting in increasing order.
type Int64Slice []int64
// Len implements sort.Interface.
func (i Int64Slice) Len() int {
return len(i)
}
// Less implements sort.Interface.
func (i Int64Slice) Less(x, y int) bool {
return i[x] < i[y]
}
// Swap implements sort.Interface.
func (i Int64Slice) Swap(x, y int) {
i[x], i[y] = i[y], i[x]
}
// Sort is a convenience method.
func (i Int64Slice) Sort() {
sort.Sort(i)
}
// Int64s sorts a slice of int64s in increasing order.
func Int64s(i []int64) {
sort.Sort(Int64Slice(i))
}
// Int32Slice attaches the methods of Interface to []int32, sorting in increasing order.
type Int32Slice []int32
// Len implements sort.Interface.
func (i Int32Slice) Len() int {
return len(i)
}
// Less implements sort.Interface.
func (i Int32Slice) Less(x, y int) bool {
return i[x] < i[y]
}
// Swap implements sort.Interface.
func (i Int32Slice) Swap(x, y int) {
i[x], i[y] = i[y], i[x]
}
// Sort is a convenience method.
// Same as sort.Sort(i).
func (i Int32Slice) Sort() {
sort.Sort(i)
}
// Int32s sorts a slice of int32s in increasing order.
func Int32s(i []int32) {
sort.Sort(Int32Slice(i))
}
// Int16Slice attaches the methods of Interface to []int16, sorting in increasing order.
type Int16Slice []int16
// Len implements sort.Interface.
func (i Int16Slice) Len() int {
return len(i)
}
// Less implements sort.Interface.
func (i Int16Slice) Less(x, y int) bool {
return i[x] < i[y]
}
// Swap implements sort.Interface.
func (i Int16Slice) Swap(x, y int) {
i[x], i[y] = i[y], i[x]
}
// Sort is a convenience method.
// Same as sort.Sort(i).
func (i Int16Slice) Sort() {
sort.Sort(i)
}
// Int16s sorts a slice of int16s in increasing order.
func Int16s(i []int16) {
sort.Sort(Int16Slice(i))
}
// Int8Slice attaches the methods of Interface to []int8, sorting in increasing order.
type Int8Slice []int8
// Len implements sort.Interface.
func (i Int8Slice) Len() int {
return len(i)
}
// Less implements sort.Interface.
func (i Int8Slice) Less(x, y int) bool {
return i[x] < i[y]
}
// Swap implements sort.Interface.
func (i Int8Slice) Swap(x, y int) {
i[x], i[y] = i[y], i[x]
}
// Sort is a convenience method.
// Same as sort.Sort(i).
func (i Int8Slice) Sort() {
sort.Sort(i)
}
// Int8s sorts a slice of int8s in increasing order.
func Int8s(v []int8) {
sort.Sort(Int8Slice(v))
}
// Float32Slice attaches the methods of Interface to []float32, sorting in increasing order.
type Float32Slice []float32
// Len implements sort.Interface.
func (f Float32Slice) Len() int {
return len(f)
}
// Less implements sort.Interface.
func (f Float32Slice) Less(i, j int) bool {
return f[i] < f[j]
}
// Swap implements sort.Interface.
func (f Float32Slice) Swap(i, j int) {
f[i], f[j] = f[j], f[i]
}
// Sort is a convenience method.
// Same as sort.Sort(f).
func (f Float32Slice) Sort() {
sort.Sort(f)
}
// Float32s sorts a slice of float32s in increasing order.
func Float32s(v []float32) {
sort.Sort(Float32Slice(v))
}
|
<reponame>treeform/pystorm
import submodules.diamondbase
def run():
print "calling diamond1.run()"
submodules.diamondbase.run()
|
<filename>misc.h
/**
* @file misc.h
* @brief
*
* Copyright (c) 2021 Bouffalolab team
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership. The
* ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
*/
#ifndef _MISC_H
#define _MISC_H
#include <stdio.h>
#include <string.h>
#include <stdint.h>
#include <stdarg.h>
#include <stdbool.h>
#include <stdlib.h>
#include "gcc.h"
#include "common.h"
#ifdef BIT
#undef BIT
#define BIT(n) (1UL << (n))
#else
#define BIT(n) (1UL << (n))
#endif
/**
* @brief Null Type definition
*/
#ifndef NULL
#define NULL 0
#endif
/**
* @brief Error type definition
*/
typedef enum {
SUCCESS = 0,
BL_ERROR = 1,
TIMEOUT = 2,
INVALID = 3, /* invalid arguments */
NORESC = 4 /* no resource or resource temperary unavailable */
} BL_Err_Type;
/**
* @brief Functional type definition
*/
typedef enum {
DISABLE = 0,
ENABLE = 1,
} BL_Fun_Type;
/**
* @brief Status type definition
*/
typedef enum {
RESET = 0,
SET = 1,
} BL_Sts_Type;
/**
* @brief Mask type definition
*/
typedef enum {
UNMASK = 0,
MASK = 1
} BL_Mask_Type;
/**
* @brief Logical status Type definition
*/
typedef enum {
LOGIC_LO = 0,
LOGIC_HI = !LOGIC_LO
} LogicalStatus;
/**
* @brief Active status Type definition
*/
typedef enum {
DEACTIVE = 0,
ACTIVE = !DEACTIVE
} ActiveStatus;
/**
* @brief Interrupt callback function type
*/
typedef void(intCallback_Type)(void);
typedef void (*pFunc)(void);
#define ARCH_MemCpy arch_memcpy
#define ARCH_MemSet arch_memset
#define ARCH_MemCmp arch_memcmp
#define ARCH_MemCpy4 arch_memcpy4
#define ARCH_MemCpy_Fast arch_memcpy_fast
#define ARCH_MemSet4 arch_memset4
#ifdef DEBUG
void check_failed(uint8_t *file, uint32_t line);
#define CHECK_PARAM(expr) ((expr) ? (void)0 : check_failed((uint8_t *)__FILE__, __LINE__))
#else
#define CHECK_PARAM(expr) ((void)0)
#endif /* DEBUG */
void *arch_memcpy(void *dst, const void *src, uint32_t n);
void *arch_memset(void *s, uint8_t c, uint32_t n);
int arch_memcmp(const void *s1, const void *s2, uint32_t n);
uint32_t *arch_memcpy4(uint32_t *dst, const uint32_t *src, uint32_t n);
void *arch_memcpy_fast(void *pdst, const void *psrc, uint32_t n);
uint32_t *arch_memset4(uint32_t *dst, const uint32_t val, uint32_t n);
void memcopy_to_fifo(void *fifo_addr, uint8_t *data, uint32_t length);
void fifocopy_to_mem(void *fifo_addr, uint8_t *data, uint32_t length);
int arch_ctzll(uint64_t *val, uint32_t *bit);
int arch_clzll(uint64_t *val, uint32_t *bit);
int arch_ffsll(uint64_t *val, uint32_t *bit);
#endif
|
<reponame>Keneral/ae1
/* ila.h - ILA Interface */
#ifndef _LINUX_ILA_H
#define _LINUX_ILA_H
/* NETLINK_GENERIC related info */
#define ILA_GENL_NAME "ila"
#define ILA_GENL_VERSION 0x1
enum {
ILA_ATTR_UNSPEC,
ILA_ATTR_LOCATOR, /* u64 */
ILA_ATTR_IDENTIFIER, /* u64 */
ILA_ATTR_LOCATOR_MATCH, /* u64 */
ILA_ATTR_IFINDEX, /* s32 */
ILA_ATTR_DIR, /* u32 */
__ILA_ATTR_MAX,
};
#define ILA_ATTR_MAX (__ILA_ATTR_MAX - 1)
enum {
ILA_CMD_UNSPEC,
ILA_CMD_ADD,
ILA_CMD_DEL,
ILA_CMD_GET,
__ILA_CMD_MAX,
};
#define ILA_CMD_MAX (__ILA_CMD_MAX - 1)
#define ILA_DIR_IN (1 << 0)
#define ILA_DIR_OUT (1 << 1)
#endif /* _LINUX_ILA_H */
|
/**
* Check if there is one more tuple.
*/
static inline int
tp_hasnext(struct tpresponse *r)
{
return (uint32_t)(r->tuple_itr.cur_index + 1) < r->tuple_itr.elem_count;
} |
/**
* Created by liumapp on 1/5/18.
* E-mail:[email protected]
* home-page:http://www.liumapp.com
*/
public class SchedulePattern extends Pattern {
public static enum IntervalUnit {
MILLISECOND,
SECOND,
MINUTE,
HOUR,
DAY,
WEEK,
MONTH,
YEAR;
private IntervalUnit() {
}
}
} |
import FWCore.ParameterSet.Config as cms
testProducerWithPsetDesc = cms.EDProducer('ProducerWithPSetDesc',
testingAutoGeneratedCfi = cms.untracked.bool(True),
p_int = cms.int32(2147483647),
p_int_untracked = cms.untracked.int32(-2147483647),
p_int_opt = cms.int32(0),
p_int_optuntracked = cms.untracked.int32(7),
vint1 = cms.vint32(),
vint2 = cms.vint32(2147483647),
vint3 = cms.vint32(
2147483647,
-2147483647
),
vint4 = cms.vint32(
2147483647,
-2147483647,
0
),
uint1 = cms.uint32(4294967295),
uint2 = cms.untracked.uint32(0),
vuint1 = cms.vuint32(),
vuint2 = cms.vuint32(4294967295),
vuint3 = cms.vuint32(
4294967295,
0
),
vuint4 = cms.vuint32(
4294967295,
0,
11
),
vuint5 = cms.vuint32(
4294967295,
0,
11,
21,
31,
41
),
int64v1 = cms.int64(9000000000000000000),
int64v2 = cms.int64(-9000000000000000000),
int64v3 = cms.int64(0),
vint64v1 = cms.vint64(),
vint64v2 = cms.vint64(9000000000000000000),
vint64v3 = cms.vint64(
9000000000000000000,
-9000000000000000000
),
vint64v4 = cms.vint64(
9000000000000000000,
-9000000000000000000,
0
),
uint64v1 = cms.uint64(18000000000000000000),
uint64v2 = cms.untracked.uint64(0),
vuint64v1 = cms.vuint64(),
vuint64v2 = cms.vuint64(18000000000000000000),
vuint64v3 = cms.vuint64(
18000000000000000000,
0
),
vuint64v4 = cms.vuint64(
18000000000000000000,
0,
11
),
doublev1 = cms.double(2.2250738585072014e-308),
doublev2 = cms.untracked.double(0),
doublev3 = cms.untracked.double(0.3),
vdoublev1 = cms.vdouble(),
vdoublev2 = cms.vdouble(1e+300),
vdoublev3 = cms.vdouble(
1e+300,
0
),
vdoublev4 = cms.vdouble(
1e+300,
0,
11
),
vdoublev5 = cms.vdouble(
1e+300,
0,
11,
0.3
),
boolv1 = cms.bool(True),
boolv2 = cms.bool(False),
stringv1 = cms.string('Hello'),
stringv2 = cms.string(''),
vstringv1 = cms.vstring(),
vstringv2 = cms.vstring('Hello'),
vstringv3 = cms.vstring(
'Hello',
'World'
),
vstringv4 = cms.vstring(
'Hello',
'World',
''
),
eventIDv1 = cms.EventID(11, 0, 12),
eventIDv2 = cms.EventID(101, 0, 102),
vEventIDv1 = cms.VEventID(),
vEventIDv2 = cms.VEventID('1000:1100'),
vEventIDv3 = cms.VEventID(
'1000:1100',
'10000:11000'
),
vEventIDv4 = cms.VEventID(
'1000:1100',
'10000:11000',
'100000:110000'
),
luminosityIDv1 = cms.LuminosityBlockID(11, 12),
luminosityIDv2 = cms.LuminosityBlockID(101, 102),
vLuminosityBlockIDv1 = cms.VLuminosityBlockID(),
vLuminosityBlockIDv2 = cms.VLuminosityBlockID('1000:1100'),
vLuminosityBlockIDv3 = cms.VLuminosityBlockID(
'1000:1100',
'10000:11000'
),
vLuminosityBlockIDv4 = cms.VLuminosityBlockID(
'1000:1100',
'10000:11000',
'100000:110000'
),
lumiRangev1 = cms.LuminosityBlockRange('1:1-9:9'),
lumiRangev2 = cms.LuminosityBlockRange('3:4-1000:1000'),
vLumiRangev1 = cms.VLuminosityBlockRange(),
vLumiRangev2 = cms.VLuminosityBlockRange('1:1-9:9'),
vLumiRangev3 = cms.VLuminosityBlockRange(
'1:1-9:9',
'3:4-1000:1000'
),
eventRangev1 = cms.EventRange('1:1-8:8'),
eventRangev2 = cms.EventRange('3:4-1001:1002'),
vEventRangev1 = cms.VEventRange(),
vEventRangev2 = cms.VEventRange('1:1-8:8'),
vEventRangev3 = cms.VEventRange(
'1:1-8:8',
'3:4-1001:1002'
),
inputTagv1 = cms.InputTag('One', 'Two', 'Three'),
inputTagv2 = cms.InputTag('One', 'Two'),
inputTagv3 = cms.InputTag('One'),
inputTagv4 = cms.InputTag('One', '', 'Three'),
vInputTagv1 = cms.VInputTag(),
vInputTagv2 = cms.VInputTag('One:Two:Three'),
vInputTagv3 = cms.VInputTag(
'One:Two:Three',
'One:Two'
),
vInputTagv4 = cms.VInputTag(
'One:Two:Three',
'One:Two',
'One'
),
vInputTagv5 = cms.VInputTag(
'One:Two:Three',
'One:Two',
'One',
'One::Three'
),
fileInPath = cms.FileInPath('FWCore/Integration/test/ProducerWithPSetDesc.cc'),
bar = cms.PSet(
Drinks = cms.uint32(5),
uDrinks = cms.untracked.uint32(5),
oDrinks = cms.uint32(5),
ouDrinks = cms.untracked.uint32(5)
),
test104 = cms.untracked.VPSet(
cms.PSet()
),
test105 = cms.untracked.VPSet(
),
test1 = cms.double(0.1),
test2 = cms.double(0.2),
testA = cms.string('fooA'),
testB = cms.int32(100),
testC = cms.int32(101),
oiswitch = cms.int32(1),
oivalue1 = cms.double(101),
oivalue2 = cms.double(101),
testDeeplyNested2 = cms.PSet(
bswitch = cms.bool(False),
bvalue1 = cms.double(101),
bvalue2 = cms.double(101),
iswitch = cms.int32(1),
ivalue1 = cms.double(101),
ivalue2 = cms.untracked.double(101),
sswitch = cms.string('1'),
svalue1 = cms.double(101),
svalue2 = cms.double(101),
testint = cms.int32(1000)
),
bars = cms.VPSet(
cms.PSet(
oDrinks = cms.uint32(11)
),
cms.PSet(
ndouDrinks = cms.untracked.uint32(11),
oDrinks = cms.uint32(11),
ouDrinks = cms.untracked.uint32(11),
testDeeplyNested = cms.PSet(
testint = cms.int32(2)
),
anotherVPSet = cms.VPSet(
cms.PSet(),
cms.PSet(
xvalue = cms.int32(17)
)
)
)
),
subpset = cms.PSet(
xvalue = cms.int32(11),
bar = cms.untracked.PSet(
Drinks = cms.uint32(5),
uDrinks = cms.untracked.uint32(5),
oDrinks = cms.uint32(5),
ouDrinks = cms.untracked.uint32(5)
)
),
wildcardPset = cms.PSet(
p_uint_opt = cms.uint32(0)
),
switchPset = cms.PSet(
iswitch = cms.int32(1),
ivalue1 = cms.double(101),
ivalue2 = cms.double(101),
addTeVRefits = cms.bool(True),
pickySrc = cms.InputTag(''),
tpfmsSrc = cms.InputTag('')
),
xorPset = cms.PSet(
name = cms.string('11'),
name1 = cms.string('11'),
name3 = cms.string('11')
),
orPset = cms.PSet(
x1 = cms.string('11'),
y1 = cms.string('11')
),
andPset = cms.PSet(
x1 = cms.string('11'),
x2 = cms.uint32(11),
y1 = cms.string('11'),
y2 = cms.uint32(11),
z1 = cms.string('11'),
z2 = cms.uint32(11),
b1 = cms.string('11'),
b2 = cms.uint32(11),
b3 = cms.uint32(11),
b4 = cms.uint32(11),
b5 = cms.uint32(11),
b6 = cms.uint32(11)
),
ifExistsPset = cms.PSet(
x1 = cms.uint32(11),
x2 = cms.string('11'),
z1 = cms.uint32(11),
z2 = cms.string('11')
),
allowedLabelsPset = cms.PSet(
p_int_opt = cms.int32(0),
testAllowedLabels = cms.vstring(),
testAllowedLabelsUntracked = cms.untracked.vstring(),
testWithSet = cms.untracked.vstring(),
testWithVectorOfSets = cms.untracked.vstring()
),
noDefaultPset3 = cms.PSet(),
noDefaultPset4 = cms.PSet()
)
|
<filename>platform/android/Rhodes/src/com/rhomobile/rhodes/Push.java
package com.rhomobile.rhodes;
public class Push {
public static final String SENDER = "<EMAIL>";
}
|
<reponame>kneed/iot-device-simulator
package migrate
import (
"github.com/golang-migrate/migrate/v4"
_ "github.com/golang-migrate/migrate/v4/database/postgres"
_ "github.com/golang-migrate/migrate/v4/source/file"
"github.com/kneed/iot-device-simulator/settings"
log "github.com/sirupsen/logrus"
)
func DbMigrate() {
dbUrl := settings.DatabaseSetting.Url
log.Debugf("---------database migrate start---------")
m, err := migrate.New(
"file:./migrations",
dbUrl)
if err != nil {
log.Fatalf("new migrate error: %v", err.Error())
}
if err = m.Up(); err != nil {
if err != migrate.ErrNoChange {
log.Fatalf("%v", err.Error())
}
log.Debug(err.Error())
}
log.Debug("---------database migrate finished---------")
}
|
Comparison of HoNOS and HoNOS-Secure in a forensic mental health hospital
Abstract The Health of the Nation Outcome Scale (HoNOS) is a widely used tool for monitoring consumer outcomes within mental health services. However, concern about its suitability in forensic mental health settings led to the development of a forensic version of this tool (HoNOS-Secure). To date, no direct comparison of these versions has appeared in the empirical literature. In the present study, a cohort of forensic mental health consumers was rated using the HoNOS and HoNOS-Secure. Pearson correlations were generated to compare the tools at a total score and item level. Logistic regression was employed to evaluate how well these tools categorise patients on a range of measurable outcomes. HoNOS scores were also compared against civil mental health consumers to evaluate differences between these populations. The HoNOS/HoNOS-Secure correlated strongly at the total score level, but demonstrated variable correlations at the item level. Logistic regression suggested that the HoNOS-Secure ‘clinical and social functioning scale’ adds little to the HoNOS in a forensic setting; however, the HoNOS-Secure ‘security scale’ added significant benefit to both versions. Results remained stable when re-evaluated over time. Forensic and civil mental health patients were found to demonstrate the same degree of psychopathology at the point of admission; however, they differed at review and discharge collection occasions. Implications for clinical practice and policy are explored. |
Alkaline phosphatase activities following repeated suramin administration in some rat tissues in relation to their functions.
The effect of repeated administration of suramin, a trypanocide and filariacide, on the level of alkaline phosphatase activity in some rat tissues and organs was investigated. Daily administration of this drug to rats resulted in a very significant increase in enzyme activity in the kidney and small intestine immediately after the first dose. Activity observed in the liver was not affected until after the third dose when the level was increased. In the heart and the large intestine, administration of the drug did not affect the enzyme activity levels throughout the duration of drug administration. These results suggest that repeated suramin administration may result in very large increases in alkaline phosphatase activity in the tissues and organs which are involved in active transport mechanisms. |
package smart_farm_api.sensor.service;
import smart_farm_api.common.ResultDto;
import smart_farm_api.sensor.domain.SensorDataDto;
public interface ISensorService {
public void execute(SensorDataDto dataSet);
public ResultDto execute(Object obj);
}
|
<filename>types/amap-js-api-transfer/amap-js-api-transfer-tests.ts
declare const map: AMap.Map;
declare const lnglat: AMap.LngLat;
declare const lnglatTuple: [number, number];
// $ExpectError
new AMap.Transfer();
// $ExpectType Transfer
new AMap.Transfer({
city: 'city'
});
// $ExpectType Transfer
const transfer = new AMap.Transfer({
city: 'city1',
policy: AMap.TransferPolicy.LEAST_TIME,
nightflag: true,
cityd: 'city2',
extensions: 'base',
map,
panel: 'panel',
hideMarkers: false,
isOutline: true,
outlineColor: 'green',
autoFitView: true
});
// $ExpectType void
transfer.search(lnglat, lnglat);
// $ExpectType void
transfer.search(lnglatTuple, lnglatTuple);
// $ExpectType void
transfer.search(lnglat, lnglat, (status, result) => {
const temp: 'complete' | 'no_data' | 'error' = status;
if (typeof result !== 'string') {
// $ExpectType SearchResultBase
result;
// $ExpectType LngLat
result.destination;
// $ExpectType Poi | undefined
result.end;
if (result.end) {
const end = result.end;
// $ExpectType LngLat
end.location;
// $ExpectType string
end.name;
// $ExpectType "end" | "start" || "start" | "end"
end.type;
}
// $ExpectType string
result.info;
// $ExpectType LngLat
result.origin;
// $ExpectType TransferPlan[]
result.plans;
{
const plan = result.plans[0];
// $ExpectType number
plan.cost;
// $ExpectType number
plan.distance;
// $ExpectType boolean
plan.nightLine;
// $ExpectType LngLat[]
plan.path;
// $ExpectType number
plan.railway_distance;
// $ExpectType Segment[]
plan.segments;
const segments = plan.segments[0];
switch (segments.transit_mode) {
case 'WALK':
// $ExpectType number
segments.distance;
// $ExpectType string
segments.instruction;
// $ExpectType number
segments.time;
// $ExpectType WalkDetails
const walkDetails = segments.transit;
{
// $ExpectType LngLat
walkDetails.destination;
// $ExpectType LngLat
walkDetails.origin;
// $ExpectType LngLat[]
walkDetails.path;
// $ExpectType WalkStep[]
walkDetails.steps;
const walkStep = walkDetails.steps[0];
if (walkStep) {
// $ExpectType string
walkStep.action;
// $ExpectType string
walkStep.assist_action;
// $ExpectType number
walkStep.distance;
// $ExpectType string
walkStep.instruction;
// $ExpectType LngLat[]
walkStep.path;
// $ExpectType string
walkStep.road;
// $ExpectType number
walkStep.time;
}
}
// $ExpectType "WALK"
segments.transit_mode;
break;
case 'TAXI':
// $ExpectType number
segments.distance;
// $ExpectType string
segments.instruction;
// $ExpectType number
segments.time;
// $ExpectType string
segments.instruction;
// $ExpectType TaxiDetails
const taxiDetails = segments.transit;
{
// $ExpectType LngLat
taxiDetails.destination;
// $ExpectType number
taxiDetails.distance;
// $ExpectType LngLat
taxiDetails.origin;
// $ExpectType string
taxiDetails.sname;
// $ExpectType number
taxiDetails.time;
// $ExpectType string
taxiDetails.tname;
}
// $ExpectType "TAXI"
segments.transit_mode;
break;
case 'RAILWAY':
// $ExpectType number
segments.distance;
// $ExpectType string
segments.instruction;
// $ExpectType number
segments.time;
// $ExpectType RailwayDetails
const railwayDetails = segments.transit;
{
// $ExpectType RailStop
const arrivalStop = railwayDetails.arrival_stop;
{
// $ExpectType string
arrivalStop.adcode;
// $ExpectType string
arrivalStop.id;
// $ExpectType LngLat
arrivalStop.location;
// $ExpectType string
arrivalStop.name;
// $ExpectType RailwaySegment | undefined
arrivalStop.segment;
// $ExpectType number
arrivalStop.time;
}
// $ExpectType RailStop
railwayDetails.departure_stop;
// $ExpectType number
railwayDetails.distance;
// $ExpectType string
railwayDetails.id;
// $ExpectType string
railwayDetails.name;
// $ExpectType Space[]
railwayDetails.spaces;
{
const space = railwayDetails.spaces[0];
// $ExpectType number
space.cost;
// $ExpectType string | never[]
space.type;
}
// $ExpectType number
railwayDetails.time;
// $ExpectType string
railwayDetails.trip;
// $ExpectType string
railwayDetails.type;
if ('alters' in railwayDetails) {
// $ExpectType Alter[]
railwayDetails.alters;
{
const alter = railwayDetails.alters[0];
// $ExpectType string
alter.id;
// $ExpectType string
alter.name;
}
railwayDetails.alters;
// $ExpectType number
railwayDetails.via_num;
// $ExpectType ViaStop[]
railwayDetails.via_stops;
{
const viaStop = railwayDetails.via_stops[0];
// $ExpectType string
viaStop.id;
// $ExpectType LngLat
viaStop.location;
// $ExpectType string
viaStop.name;
// $ExpectType number
viaStop.time;
// $ExpectType number
viaStop.wait;
}
}
}
// $ExpectType "RAILWAY"
segments.transit_mode;
break;
case 'SUBWAY':
case 'METRO_RAIL':
case 'BUS':
// $ExpectType number
segments.distance;
// $ExpectType string
segments.instruction;
// $ExpectType number
segments.time;
// $ExpectType TransitDetails
const transitDetail = segments.transit;
{
// $ExpectType SubwayEntrance | undefined
const exit = transitDetail.exit;
if (exit) {
// $ExpectType LngLat
exit.location;
// $ExpectType string
exit.name;
}
// $ExpectType SubwayEntrance | undefined
transitDetail.entrance;
// $ExpectType TransitLine[]
transitDetail.lines;
{
const line = transitDetail.lines[0];
// $ExpectType string | never[]
line.etime;
// $ExpectType string
line.id;
// $ExpectType string
line.name;
// $ExpectType string | never[]
line.stime;
// $ExpectType string
line.type;
}
// $ExpectType Stop
const offStation = transitDetail.off_station;
{
// $ExpectType string
offStation.id;
// $ExpectType LngLat
offStation.location;
// $ExpectType string
offStation.name;
// $ExpectType TransitSegment | undefined
offStation.segment;
}
// $ExpectType Stop
transitDetail.on_station;
// $ExpectType LngLat[]
transitDetail.path;
// $ExpectType number
transitDetail.via_num;
// $ExpectType Stop[]
transitDetail.via_stops;
{
const viaStop = transitDetail.via_stops[0];
// $ExpectType string
viaStop.id;
// $ExpectType LngLat
viaStop.location;
// $ExpectType string
viaStop.name;
}
}
// $ExpectType "SUBWAY" | "METRO_RAIL" | "BUS"
segments.transit_mode;
break;
default:
// $ExpectType never
segments;
}
// $ExpectType number
plan.taxi_distance;
// $ExpectType number
plan.time;
// $ExpectType number
plan.transit_distance;
// $ExpectType number
plan.walking_distance;
}
// $ExpectType Poi | undefined
result.start;
// $ExpectType number
result.taxi_cost;
} else {
// $ExpectType string
result;
}
});
// $ExpectType void
transfer.search([{ keyword: 'origin' }, { keyword: 'destination' }], (status, result) => {
const temp: 'complete' | 'no_data' | 'error' = status;
if (typeof result !== 'string') {
// $ExpectType SearchResultExt
result;
// $ExpectType PoiExt
result.start;
// $ExpectType string
result.originName;
// $ExpectType PoiExt
result.end;
// $ExpectType string
result.destinationName;
} else {
// $ExpectType string
result;
}
});
transfer.on('complete', (event: AMap.Transfer.EventMap['complete']) => {
// $ExpectType "complete"
event.type;
if ('info' in event) {
// $ExpectType string
event.info;
// $ExpectType LngLat
event.origin;
// $ExpectType LngLat
event.destination;
// $ExpectType number
event.taxi_cost;
// $ExpectType TransferPlan[]
event.plans;
}
if ('originName' in event) {
// $ExpectType PoiExt
event.start;
// $ExpectType PoiExt
event.end;
// $ExpectType string
event.originName;
// $ExpectType string
event.destinationName;
} else {
// $ExpectType Poi | undefined
event.start;
// $ExpectType Poi | undefined
event.end;
}
});
transfer.on('error', (event: AMap.Transfer.EventMap['error']) => {
// $ExpectType "error"
event.type;
// $ExpectType string
event.info;
});
|
import { majorScale, Pane } from 'evergreen-ui'
import React from 'react'
interface Props {}
const RuleLayout: React.FC<Props> = ({ children }) => {
return (
<Pane display="flex" marginBottom={majorScale(4)}>
{children}
</Pane>
)
}
export default RuleLayout
|
import { Scene_data } from "../tl3d/engine/context/Scene_data";
/**
* ...
* @author ...
*/
export class VertexElementFormat {
static Single: string = "single";
static Vector2: string = "vector2";
static Vector3: string = "vector3";
static Vector4: string = "vector4";
static Color: string = "color";
static Byte4: string = "byte4";
static Short2: string = "short2";
static Short4: string = "short4";
static NormalizedShort2: string = "normalizedshort2";
static NormalizedShort4: string = "normalizedshort4";
static HalfVector2: string = "halfvector2";
static HalfVector4: string = "halfvector4";
/** @internal [组数量,数据类型,是否归一化:0为false]。*/
private static _elementInfos: any;
static __init__(): void {
var gl: WebGLRenderingContext = Laya.LayaGL.instance;
VertexElementFormat._elementInfos = {
"single": [1, gl.FLOAT, 0],
"vector2": [2, gl.FLOAT, 0],
"vector3": [3, gl.FLOAT, 0],
"vector4": [4, gl.FLOAT, 0],
"color": [4, gl.FLOAT, 0],
"byte4": [4, gl.UNSIGNED_BYTE, 0],
"short2": [2, gl.FLOAT, 0],
"short4": [4, gl.FLOAT, 0],
"normalizedshort2": [2, gl.FLOAT, 0],
"normalizedshort4": [4, gl.FLOAT, 0],
"halfvector2": [2, gl.FLOAT, 0],
"halfvector4": [4, gl.FLOAT, 0]
};
}
/**
* 获取顶点元素格式信息。
*/
static getElementInfos(element: string): any[] {
var info: any[] = VertexElementFormat._elementInfos[element];
if (info)
return info;
else
throw "VertexElementFormat: this vertexElementFormat is not implement.";
}
}
|
/**
* From an existing object, copy an individual part that will comprise a piece of a
* multipart upload object.
*
* @param upload
* the multipart upload to which this part will be added.
* @param partNumber
* the part's number; must be between 1 and 10,000 and must uniquely identify a given
* part and represent its order compared to all other parts. Part numbers need not
* be sequential.
* @param sourceBucketName
* the name of the bucket that contains the original object.
* @param sourceObjectKey
* the key name of the original object.
*
* @return
* information about the uploaded copy part, retain this information to eventually complete
* the object with {@link #multipartCompleteUpload(MultipartUpload, List)}.
* @throws S3ServiceException
*/
public MultipartPart multipartUploadPartCopy(MultipartUpload upload, Integer partNumber,
String sourceBucketName, String sourceObjectKey) throws S3ServiceException
{
MultipartPart part = multipartUploadPartCopyImpl(upload.getUploadId(),
upload.getBucketName(), upload.getObjectKey(), partNumber,
sourceBucketName, sourceObjectKey,
null, null, null, null, null, null, null);
upload.addMultipartPartToUploadedList(part);
return part;
} |
/**
* Perform the operation across all versions of a specific resource (by ID and type) on the server. Note that
* <code>theId</code> must be populated with both a resource type and a resource ID at a minimum.
*
* @param id the {@link IIdType} which must be populated with both a resource type and a
* resource ID at
* @param returnType Request that the method return a Bundle resource (such as
* <code>ca.uhn.fhir.model.dstu2.resource.Bundle</code>). Use this method if you
* are accessing a DSTU2+ server.
* @param count Request that the server return only up to <code>theCount</code> number of
* resources, may be NULL
* @param cutoff Request that the server return only resource versions that were created at or
* after the given time (inclusive), may be NULL
* @param iCutoff Request that the server return only resource versions that were created at or
* after the given time (inclusive), may be NULL
* @param <T> extends {@link IBaseBundle}
* @param extraParameters see {@link ExtraParameters} for a full list of parameters that can be passed,
* may be NULL
* @throws IllegalArgumentException If <code>id</code> does not contain at least a resource type and ID
* @return the {@link IBaseBundle}
*/
public <T extends IBaseBundle> T onInstance(
IIdType id, Class<T> returnType, Integer count, Date cutoff, IPrimitiveType<Date> iCutoff,
Map<ExtraParameters, Object> extraParameters) {
IHistoryTyped<T> tiHistoryTyped = client.history().onInstance(id).andReturnBundle(returnType);
processOptionalParams(count, cutoff, iCutoff, tiHistoryTyped);
ExtraParameters.process(extraParameters, tiHistoryTyped);
return tiHistoryTyped.execute();
} |
<gh_stars>10-100
use failure::{Context, Fail};
use std::result;
pub use failure::ResultExt;
#[derive(Debug, Fail)]
pub enum ErrorKind {
/// Wraps a `std::io::Error`.
#[fail(display = "IO error: {}", _0)]
Io(#[fail(cause)] std::io::Error),
/// Wraps an internal message
#[fail(display = "[{}:{}]: {}", file, line, message)]
InternalError {
file: String,
line: u32,
message: String,
},
}
impl ErrorKind {
pub(crate) fn internal_error(
file: impl Into<String>,
line: u32,
message: impl Into<String>,
) -> Self {
ErrorKind::InternalError {
file: file.into(),
line,
message: message.into(),
}
}
}
pub type Error = Context<ErrorKind>;
/// Result with an error
pub type Result<T> = result::Result<T, Error>;
#[cfg(test)]
mod tests {
#[test]
fn test_internal_error_format() {
let err = internal_err!("testing");
let err_str = err.to_string();
// yes, this is fragile
assert_eq!(err_str, "[src/errors.rs:43]: testing");
}
}
|
import { Stream } from 'most';
const ignoreElements = <T>(stream$: Stream<T>) => stream$.filter(() => false);
export default ignoreElements;
|
The best part is when the lights in the tents go on, one by one, says Elad Orian. Electricity here, in the hills south of Hebron, was long unreliable. Either it was not available or it was too expensive, produced for just a few hours each day by a noisy, diesel-guzzling generator. That changed when Elad Orian and Noam Dotan, two Israeli physicians who had tired of conflict, came along three years ago and installed solar panels and erected wind turbines. Since then, such facilities have been installed in 16 communities, providing 1,500 Palestinians with electricity.
The women here no longer have to make their butter by hand; they can refrigerate the sheep's cheese, which is their livelihood; and their children can do their homework at night. Now they can sit together and watch TV -- and connect to a world that seems far removed from their lives on the edge of the Judaean Desert. It is but a small revolution, achieved at little cost. But it is a good example of successful development aid.
The success, though, could soon be a thing of the past. Israel has threatened to tear them down with five municipalities in recent weeks having received "stop work" orders -- the first step on the road to demolition. The problem is that the facilities are in the so-called Area C, which covers 60 percent of the West Bank and is administered by Israel. Permission from the Israelis is a requirement before construction projects can move ahead -- and permits are almost never given to Palestinians.
'A Clear Signal'
The result is that Area C residents face poor roads and a lack of electricity and water. Farming is impossible, and the construction of factories forbidden. As a result, only 150,000 Palestinians live in Area C -- and 310,000 well-supplied Israeli settlers. The solar project helps make life a bit more bearable for Palestinians in Area C. That, though, would appear to be something that Israel does not want.
"The demolition orders are meant to send a clear signal to all European Union countries: Do not interfere, do not invest in Area C," says project founder Noam Dotan.
Some of the facilities have already been there for two years, which makes it hard to believe that they have only just been noticed now. Above all, the decision sends a signal to Germany which has provided most funds for the project, or a total of roughly 600,000 ($791,300). The project was carried out by the aid organization Medico International in cooperation with Comet-ME, the organization founded by the two Israeli physicians.
European diplomats in Ramallah and Tel Aviv suspect that the demolition orders are a reaction to a recently drafted, unusually critical EU report on the situation in Area C. It states: "The window for a two-state solution is closing rapidly with the continued expansion of Israeli settlements." The conclusion: The EU needs to target investment in economic development and improved living conditions of Palestinians in Area C.
Political Talking Point
A few months ago, a similar project co-financed by the Spanish government was scheduled for demolition, something which has been prevented thus far through massive diplomatic pressure.
Projects funded by foreign aid organizations or the EU have often been destroyed in the past, the best known example being the Gaza airport, financed with $38 million from the EU only to be destroyed by Israeli bombs a short time after its construction. Generally, though, the demolitions have been the result of security concerns. The fact that harmless solar cells -- installations which are funded by allied countries to provide basic humanitarian needs -- are at risk of demolition is a new development.
As such, when German Foreign Minister Guido Westerwelle traveled to Israel two weeks ago, he not only spoke to Prime Minister Benjamin Netanyahu and Defense Minister Ehud Barak about the peace process and Iran's nuclear program, but also about wind turbines and solar panels in places like Shaab al-Buttum.
Hundreds of people live in the village, and they are the poorest of the poor. A community of shepherds, they moved freely through the area until Israel occupied the West Bank in 1967. Since then, they have settled, collecting rain water during the winter and buying expensive drinking water brought in by a truck along a gravel track in the summer. A well-maintained road to the settlement doesn't exist, despite the fact that Shaab al-Buttum lies between two Israeli outposts. The settlements are illegal, but miraculously they have all the basics their Palestinian neighbors are missing: electricity, water and roads.
Social Changes
However, over the past four months, two wind turbines and 40 solar panels have supplied the villagers with energy: 40 to 60 kilowatt-hours each day. It is just enough to heat one square meter of a well-insulated house for a year -- or enough to supply a whole village.
Since the arrival of electricity, Israeli anthropologist Shuli Hartman, 60, has been living in the village. She wanted to find out what electricity does to people. She observed that women have more time because their workload is reduced and they can earn more. She saw they became more independent, using mobile phones, which they couldn't even charge until recently. And she saw how a village in which every family used to struggle to survive is now learning to become a community. An elderly villager told her: "Electricity for us is like water to a person walking through the desert." Her life has become a bit easier as a result of the mini-power station.
Last but not least, the project has brought together Israelis and Palestinians. "The Palestinians here had previously only known Israelis as settlers and soldiers," says Hartman.
"We did not want to just demonstrate and remain part of the conflict; we wanted to be part of the solution," explains Noam Dotan. But a solution is apparently not wanted. In the absence of a small miracle, the tents in Shaab al-Buttum will soon be dark once again. |
///
/// @file BitSieve240.cpp
/// @brief The BitSieve240 base class contains lookup tables that are
/// needed to implement a prime sieving algorithm where each
/// bit corresponds to an integer that is not divisible by 2,
/// 3 and 5. The 8 bits of each byte correspond to the offsets
/// { 1, 7, 11, 13, 17, 19, 23, 29 }. Since the sieve array
/// uses the uint64_t data type, one sieve array element
/// (8 bytes) corresponds to an interval of size 30 * 8 = 240.
///
/// Copyright (C) 2022 Kim Walisch, <[email protected]>
///
/// This file is distributed under the BSD License. See the COPYING
/// file in the top level directory.
///
#include <BitSieve240.hpp>
#include <pod_vector.hpp>
#include <stdint.h>
namespace {
/// The 8 bits in each byte of the sieve array correspond
/// to the offsets { 1, 7, 11, 13, 17, 19, 23, 29 }.
///
constexpr int right_shift(int n)
{
return (n % 30 >= 29) ? 56 - (n / 30 * 8)
: (n % 30 >= 23) ? 57 - (n / 30 * 8)
: (n % 30 >= 19) ? 58 - (n / 30 * 8)
: (n % 30 >= 17) ? 59 - (n / 30 * 8)
: (n % 30 >= 13) ? 60 - (n / 30 * 8)
: (n % 30 >= 11) ? 61 - (n / 30 * 8)
: (n % 30 >= 7) ? 62 - (n / 30 * 8)
: (n % 30 >= 1) ? 63 - (n / 30 * 8)
: 64 - (n / 30 * 8);
}
/// Bitmask to unset bits >= n
constexpr uint64_t unset_l(int n)
{
return (n == 0) ? 0 : ~0ull >> right_shift(n);
}
} // namespace
namespace primecount {
/// pi(x) for x < 6
const pod_array<uint64_t, 6> BitSieve240::pi_tiny_ = { 0, 0, 1, 2, 2, 3 };
/// Bitmasks needed to set a specific bit in the sieve array
const pod_array<uint64_t, 240> BitSieve240::set_bit_ =
{
0ull, 1ull << 0, 0ull, 0ull, 0ull,
0ull, 0ull, 1ull << 1, 0ull, 0ull,
0ull, 1ull << 2, 0ull, 1ull << 3, 0ull,
0ull, 0ull, 1ull << 4, 0ull, 1ull << 5,
0ull, 0ull, 0ull, 1ull << 6, 0ull,
0ull, 0ull, 0ull, 0ull, 1ull << 7,
0ull, 1ull << 8, 0ull, 0ull, 0ull,
0ull, 0ull, 1ull << 9, 0ull, 0ull,
0ull, 1ull << 10, 0ull, 1ull << 11, 0ull,
0ull, 0ull, 1ull << 12, 0ull, 1ull << 13,
0ull, 0ull, 0ull, 1ull << 14, 0ull,
0ull, 0ull, 0ull, 0ull, 1ull << 15,
0ull, 1ull << 16, 0ull, 0ull, 0ull,
0ull, 0ull, 1ull << 17, 0ull, 0ull,
0ull, 1ull << 18, 0ull, 1ull << 19, 0ull,
0ull, 0ull, 1ull << 20, 0ull, 1ull << 21,
0ull, 0ull, 0ull, 1ull << 22, 0ull,
0ull, 0ull, 0ull, 0ull, 1ull << 23,
0ull, 1ull << 24, 0ull, 0ull, 0ull,
0ull, 0ull, 1ull << 25, 0ull, 0ull,
0ull, 1ull << 26, 0ull, 1ull << 27, 0ull,
0ull, 0ull, 1ull << 28, 0ull, 1ull << 29,
0ull, 0ull, 0ull, 1ull << 30, 0ull,
0ull, 0ull, 0ull, 0ull, 1ull << 31,
0ull, 1ull << 32, 0ull, 0ull, 0ull,
0ull, 0ull, 1ull << 33, 0ull, 0ull,
0ull, 1ull << 34, 0ull, 1ull << 35, 0ull,
0ull, 0ull, 1ull << 36, 0ull, 1ull << 37,
0ull, 0ull, 0ull, 1ull << 38, 0ull,
0ull, 0ull, 0ull, 0ull, 1ull << 39,
0ull, 1ull << 40, 0ull, 0ull, 0ull,
0ull, 0ull, 1ull << 41, 0ull, 0ull,
0ull, 1ull << 42, 0ull, 1ull << 43, 0ull,
0ull, 0ull, 1ull << 44, 0ull, 1ull << 45,
0ull, 0ull, 0ull, 1ull << 46, 0ull,
0ull, 0ull, 0ull, 0ull, 1ull << 47,
0ull, 1ull << 48, 0ull, 0ull, 0ull,
0ull, 0ull, 1ull << 49, 0ull, 0ull,
0ull, 1ull << 50, 0ull, 1ull << 51, 0ull,
0ull, 0ull, 1ull << 52, 0ull, 1ull << 53,
0ull, 0ull, 0ull, 1ull << 54, 0ull,
0ull, 0ull, 0ull, 0ull, 1ull << 55,
0ull, 1ull << 56, 0ull, 0ull, 0ull,
0ull, 0ull, 1ull << 57, 0ull, 0ull,
0ull, 1ull << 58, 0ull, 1ull << 59, 0ull,
0ull, 0ull, 1ull << 60, 0ull, 1ull << 61,
0ull, 0ull, 0ull, 1ull << 62, 0ull,
0ull, 0ull, 0ull, 0ull, 1ull << 63
};
/// Bitmasks needed to unset a specific bit in the sieve array
const pod_array<uint64_t, 240> BitSieve240::unset_bit_ =
{
~0ull, ~(1ull << 0), ~0ull, ~0ull, ~0ull,
~0ull, ~0ull, ~(1ull << 1), ~0ull, ~0ull,
~0ull, ~(1ull << 2), ~0ull, ~(1ull << 3), ~0ull,
~0ull, ~0ull, ~(1ull << 4), ~0ull, ~(1ull << 5),
~0ull, ~0ull, ~0ull, ~(1ull << 6), ~0ull,
~0ull, ~0ull, ~0ull, ~0ull, ~(1ull << 7),
~0ull, ~(1ull << 8), ~0ull, ~0ull, ~0ull,
~0ull, ~0ull, ~(1ull << 9), ~0ull, ~0ull,
~0ull, ~(1ull << 10), ~0ull, ~(1ull << 11), ~0ull,
~0ull, ~0ull, ~(1ull << 12), ~0ull, ~(1ull << 13),
~0ull, ~0ull, ~0ull, ~(1ull << 14), ~0ull,
~0ull, ~0ull, ~0ull, ~0ull, ~(1ull << 15),
~0ull, ~(1ull << 16), ~0ull, ~0ull, ~0ull,
~0ull, ~0ull, ~(1ull << 17), ~0ull, ~0ull,
~0ull, ~(1ull << 18), ~0ull, ~(1ull << 19), ~0ull,
~0ull, ~0ull, ~(1ull << 20), ~0ull, ~(1ull << 21),
~0ull, ~0ull, ~0ull, ~(1ull << 22), ~0ull,
~0ull, ~0ull, ~0ull, ~0ull, ~(1ull << 23),
~0ull, ~(1ull << 24), ~0ull, ~0ull, ~0ull,
~0ull, ~0ull, ~(1ull << 25), ~0ull, ~0ull,
~0ull, ~(1ull << 26), ~0ull, ~(1ull << 27), ~0ull,
~0ull, ~0ull, ~(1ull << 28), ~0ull, ~(1ull << 29),
~0ull, ~0ull, ~0ull, ~(1ull << 30), ~0ull,
~0ull, ~0ull, ~0ull, ~0ull, ~(1ull << 31),
~0ull, ~(1ull << 32), ~0ull, ~0ull, ~0ull,
~0ull, ~0ull, ~(1ull << 33), ~0ull, ~0ull,
~0ull, ~(1ull << 34), ~0ull, ~(1ull << 35), ~0ull,
~0ull, ~0ull, ~(1ull << 36), ~0ull, ~(1ull << 37),
~0ull, ~0ull, ~0ull, ~(1ull << 38), ~0ull,
~0ull, ~0ull, ~0ull, ~0ull, ~(1ull << 39),
~0ull, ~(1ull << 40), ~0ull, ~0ull, ~0ull,
~0ull, ~0ull, ~(1ull << 41), ~0ull, ~0ull,
~0ull, ~(1ull << 42), ~0ull, ~(1ull << 43), ~0ull,
~0ull, ~0ull, ~(1ull << 44), ~0ull, ~(1ull << 45),
~0ull, ~0ull, ~0ull, ~(1ull << 46), ~0ull,
~0ull, ~0ull, ~0ull, ~0ull, ~(1ull << 47),
~0ull, ~(1ull << 48), ~0ull, ~0ull, ~0ull,
~0ull, ~0ull, ~(1ull << 49), ~0ull, ~0ull,
~0ull, ~(1ull << 50), ~0ull, ~(1ull << 51), ~0ull,
~0ull, ~0ull, ~(1ull << 52), ~0ull, ~(1ull << 53),
~0ull, ~0ull, ~0ull, ~(1ull << 54), ~0ull,
~0ull, ~0ull, ~0ull, ~0ull, ~(1ull << 55),
~0ull, ~(1ull << 56), ~0ull, ~0ull, ~0ull,
~0ull, ~0ull, ~(1ull << 57), ~0ull, ~0ull,
~0ull, ~(1ull << 58), ~0ull, ~(1ull << 59), ~0ull,
~0ull, ~0ull, ~(1ull << 60), ~0ull, ~(1ull << 61),
~0ull, ~0ull, ~0ull, ~(1ull << 62), ~0ull,
~0ull, ~0ull, ~0ull, ~0ull, ~(1ull << 63)
};
/// Unset bits > stop
const pod_array<uint64_t, 240> BitSieve240::unset_larger_ =
{
unset_l(0), unset_l(1), unset_l(2), unset_l(3), unset_l(4),
unset_l(5), unset_l(6), unset_l(7), unset_l(8), unset_l(9),
unset_l(10), unset_l(11), unset_l(12), unset_l(13), unset_l(14),
unset_l(15), unset_l(16), unset_l(17), unset_l(18), unset_l(19),
unset_l(20), unset_l(21), unset_l(22), unset_l(23), unset_l(24),
unset_l(25), unset_l(26), unset_l(27), unset_l(28), unset_l(29),
unset_l(30), unset_l(31), unset_l(32), unset_l(33), unset_l(34),
unset_l(35), unset_l(36), unset_l(37), unset_l(38), unset_l(39),
unset_l(40), unset_l(41), unset_l(42), unset_l(43), unset_l(44),
unset_l(45), unset_l(46), unset_l(47), unset_l(48), unset_l(49),
unset_l(50), unset_l(51), unset_l(52), unset_l(53), unset_l(54),
unset_l(55), unset_l(56), unset_l(57), unset_l(58), unset_l(59),
unset_l(60), unset_l(61), unset_l(62), unset_l(63), unset_l(64),
unset_l(65), unset_l(66), unset_l(67), unset_l(68), unset_l(69),
unset_l(70), unset_l(71), unset_l(72), unset_l(73), unset_l(74),
unset_l(75), unset_l(76), unset_l(77), unset_l(78), unset_l(79),
unset_l(80), unset_l(81), unset_l(82), unset_l(83), unset_l(84),
unset_l(85), unset_l(86), unset_l(87), unset_l(88), unset_l(89),
unset_l(90), unset_l(91), unset_l(92), unset_l(93), unset_l(94),
unset_l(95), unset_l(96), unset_l(97), unset_l(98), unset_l(99),
unset_l(100), unset_l(101), unset_l(102), unset_l(103), unset_l(104),
unset_l(105), unset_l(106), unset_l(107), unset_l(108), unset_l(109),
unset_l(110), unset_l(111), unset_l(112), unset_l(113), unset_l(114),
unset_l(115), unset_l(116), unset_l(117), unset_l(118), unset_l(119),
unset_l(120), unset_l(121), unset_l(122), unset_l(123), unset_l(124),
unset_l(125), unset_l(126), unset_l(127), unset_l(128), unset_l(129),
unset_l(130), unset_l(131), unset_l(132), unset_l(133), unset_l(134),
unset_l(135), unset_l(136), unset_l(137), unset_l(138), unset_l(139),
unset_l(140), unset_l(141), unset_l(142), unset_l(143), unset_l(144),
unset_l(145), unset_l(146), unset_l(147), unset_l(148), unset_l(149),
unset_l(150), unset_l(151), unset_l(152), unset_l(153), unset_l(154),
unset_l(155), unset_l(156), unset_l(157), unset_l(158), unset_l(159),
unset_l(160), unset_l(161), unset_l(162), unset_l(163), unset_l(164),
unset_l(165), unset_l(166), unset_l(167), unset_l(168), unset_l(169),
unset_l(170), unset_l(171), unset_l(172), unset_l(173), unset_l(174),
unset_l(175), unset_l(176), unset_l(177), unset_l(178), unset_l(179),
unset_l(180), unset_l(181), unset_l(182), unset_l(183), unset_l(184),
unset_l(185), unset_l(186), unset_l(187), unset_l(188), unset_l(189),
unset_l(190), unset_l(191), unset_l(192), unset_l(193), unset_l(194),
unset_l(195), unset_l(196), unset_l(197), unset_l(198), unset_l(199),
unset_l(200), unset_l(201), unset_l(202), unset_l(203), unset_l(204),
unset_l(205), unset_l(206), unset_l(207), unset_l(208), unset_l(209),
unset_l(210), unset_l(211), unset_l(212), unset_l(213), unset_l(214),
unset_l(215), unset_l(216), unset_l(217), unset_l(218), unset_l(219),
unset_l(220), unset_l(221), unset_l(222), unset_l(223), unset_l(224),
unset_l(225), unset_l(226), unset_l(227), unset_l(228), unset_l(229),
unset_l(230), unset_l(231), unset_l(232), unset_l(233), unset_l(234),
unset_l(235), unset_l(236), unset_l(237), unset_l(238), unset_l(239)
};
} // namespace
|
/****************************************************************************
* Flat Field correction
* Bool parameter used to know whether flat field correction
* is to be made
*
****************************************************************************/
void JLP_SpeckProcessPanel::OnSelectFFieldCorr(wxCommandEvent& event)
{
if(initialized != 1234) return;
switch(event.GetId()) {
case ID_PROC_NO_FFCORR:
Pset1.FlatFieldCorrection = false;
break;
case ID_PROC_FFCORR:
Pset1.FlatFieldCorrection = true;
if(Pset1.FlatFieldFileName[0] == '\0') OnLoadFFieldFrame(event);
break;
}
ChangesDone1 = true;
return;
} |
def _parse_channels(self, root, group=None):
def parse_channel(node, group=None):
id = int(node.get('id'))
interval = node.get('interval')
if interval is None:
timeseries = LazyVariableTimeSeries(parent=self, channel_id=id)
else:
timeseries = LazyUniformTimeSeries(
parent=self, channel_id=id,
frequency=Frequency.from_interval(interval)
)
channel = Channel(
id = node.get('id'),
name = node.findtext(ns('name')),
timeseries = timeseries,
units = node.get('units'),
description = node.findtext(ns('description')),
group = group
)
channel.__parent__ = self
return channel
def parse_channels(root, group=None):
for node in root.getchildren():
if node.tag == ns('channel'):
self.add_channel(parse_channel(node, group))
elif node.tag == ns('group'):
group = node.findtext(ns('name'))
description = node.findtext(ns('description'))
parse_channels(node, group)
if description:
self._group_descriptions[group] = description
parse_channels(root.find(ns('channels'))) |
/// Writes the inputs for the fuzzer
use std::io::Write;
use tokio_util::codec::Encoder;
fn main() -> Result<(), Box<dyn std::error::Error>> {
let in_dir = std::path::Path::new(env!("CARGO_MANIFEST_DIR")).join("in");
if in_dir.exists() {
std::fs::remove_dir_all(&in_dir)?;
}
std::fs::create_dir(&in_dir)?;
let packets = vec![
("connack", mqtt3::proto::Packet::ConnAck(mqtt3::proto::ConnAck {
session_present: true,
return_code: mqtt3::proto::ConnectReturnCode::Accepted,
})),
("connect", mqtt3::proto::Packet::Connect(mqtt3::proto::Connect {
username: Some("username".to_string()),
password: Some("password".to_string()),
will: Some(mqtt3::proto::Publication {
topic_name: "will-topic".to_string(),
qos: mqtt3::proto::QoS::ExactlyOnce,
retain: true,
payload: b"\x00\x01\x02\xFF\xFE\xFD"[..].into(),
}),
client_id: mqtt3::proto::ClientId::IdWithExistingSession("id".to_string()),
keep_alive: std::time::Duration::from_secs(5),
})),
("disconnect", mqtt3::proto::Packet::Disconnect(mqtt3::proto::Disconnect)),
("pingreq", mqtt3::proto::Packet::PingReq(mqtt3::proto::PingReq)),
("pingresp", mqtt3::proto::Packet::PingResp(mqtt3::proto::PingResp)),
("puback", mqtt3::proto::Packet::PubAck(mqtt3::proto::PubAck {
packet_identifier: mqtt3::proto::PacketIdentifier::new(5).unwrap(),
})),
("pubcomp", mqtt3::proto::Packet::PubComp(mqtt3::proto::PubComp {
packet_identifier: mqtt3::proto::PacketIdentifier::new(5).unwrap(),
})),
("publish", mqtt3::proto::Packet::Publish(mqtt3::proto::Publish {
packet_identifier_dup_qos: mqtt3::proto::PacketIdentifierDupQoS::ExactlyOnce(mqtt3::proto::PacketIdentifier::new(5).unwrap(), true),
retain: true,
topic_name: "publish-topic".to_string(),
payload: b"\x00\x01\x02\xFF\xFE\xFD"[..].into(),
})),
("pubrec", mqtt3::proto::Packet::PubRec(mqtt3::proto::PubRec {
packet_identifier: mqtt3::proto::PacketIdentifier::new(5).unwrap(),
})),
("pubrel", mqtt3::proto::Packet::PubRel(mqtt3::proto::PubRel {
packet_identifier: mqtt3::proto::PacketIdentifier::new(5).unwrap(),
})),
("suback", mqtt3::proto::Packet::SubAck(mqtt3::proto::SubAck {
packet_identifier: mqtt3::proto::PacketIdentifier::new(5).unwrap(),
qos: vec![
mqtt3::proto::SubAckQos::Success(mqtt3::proto::QoS::ExactlyOnce),
mqtt3::proto::SubAckQos::Failure,
],
})),
("subscribe", mqtt3::proto::Packet::Subscribe(mqtt3::proto::Subscribe {
packet_identifier: mqtt3::proto::PacketIdentifier::new(5).unwrap(),
subscribe_to: vec![
mqtt3::proto::SubscribeTo {
topic_filter: "subscribe-topic".to_string(),
qos: mqtt3::proto::QoS::ExactlyOnce,
},
],
})),
("unsuback", mqtt3::proto::Packet::UnsubAck(mqtt3::proto::UnsubAck {
packet_identifier: mqtt3::proto::PacketIdentifier::new(5).unwrap(),
})),
("unsubscribe", mqtt3::proto::Packet::Unsubscribe(mqtt3::proto::Unsubscribe {
packet_identifier: mqtt3::proto::PacketIdentifier::new(5).unwrap(),
unsubscribe_from: vec![
"unsubscribe-topic".to_string(),
],
})),
];
for (filename, packet) in packets {
let file = std::fs::OpenOptions::new().create(true).write(true).open(in_dir.join(filename))?;
let mut file = std::io::BufWriter::new(file);
let mut codec: mqtt3::proto::PacketCodec = Default::default();
let mut bytes = bytes::BytesMut::new();
codec.encode(packet, &mut bytes)?;
file.write_all(&bytes)?;
file.flush()?;
}
Ok(())
}
|
/**
* Instance conceptually encapsulates all Remote Build Execution resources for remote builds. An
* instance consists of storage and compute resources (for example, `ContentAddressableStorage`,
* `ActionCache`, `WorkerPools`) used for running remote builds. All Remote Build Execution API
* calls are scoped to an instance.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Remote Build Execution API. For a detailed
* explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class GoogleDevtoolsRemotebuildexecutionAdminV1alphaInstance extends com.google.api.client.json.GenericJson {
/**
* The policy to define whether or not RBE features can be used or how they can be used.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleDevtoolsRemotebuildexecutionAdminV1alphaFeaturePolicy featurePolicy;
/**
* The location is a GCP region. Currently only `us-central1` is supported.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String location;
/**
* Output only. Whether stack driver logging is enabled for the instance.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean loggingEnabled;
/**
* Output only. Instance resource name formatted as:
* `projects/[PROJECT_ID]/instances/[INSTANCE_ID]`. Name should not be populated when creating an
* instance since it is provided in the `instance_id` field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* Output only. State of the instance.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String state;
/**
* The policy to define whether or not RBE features can be used or how they can be used.
* @return value or {@code null} for none
*/
public GoogleDevtoolsRemotebuildexecutionAdminV1alphaFeaturePolicy getFeaturePolicy() {
return featurePolicy;
}
/**
* The policy to define whether or not RBE features can be used or how they can be used.
* @param featurePolicy featurePolicy or {@code null} for none
*/
public GoogleDevtoolsRemotebuildexecutionAdminV1alphaInstance setFeaturePolicy(GoogleDevtoolsRemotebuildexecutionAdminV1alphaFeaturePolicy featurePolicy) {
this.featurePolicy = featurePolicy;
return this;
}
/**
* The location is a GCP region. Currently only `us-central1` is supported.
* @return value or {@code null} for none
*/
public java.lang.String getLocation() {
return location;
}
/**
* The location is a GCP region. Currently only `us-central1` is supported.
* @param location location or {@code null} for none
*/
public GoogleDevtoolsRemotebuildexecutionAdminV1alphaInstance setLocation(java.lang.String location) {
this.location = location;
return this;
}
/**
* Output only. Whether stack driver logging is enabled for the instance.
* @return value or {@code null} for none
*/
public java.lang.Boolean getLoggingEnabled() {
return loggingEnabled;
}
/**
* Output only. Whether stack driver logging is enabled for the instance.
* @param loggingEnabled loggingEnabled or {@code null} for none
*/
public GoogleDevtoolsRemotebuildexecutionAdminV1alphaInstance setLoggingEnabled(java.lang.Boolean loggingEnabled) {
this.loggingEnabled = loggingEnabled;
return this;
}
/**
* Output only. Instance resource name formatted as:
* `projects/[PROJECT_ID]/instances/[INSTANCE_ID]`. Name should not be populated when creating an
* instance since it is provided in the `instance_id` field.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Output only. Instance resource name formatted as:
* `projects/[PROJECT_ID]/instances/[INSTANCE_ID]`. Name should not be populated when creating an
* instance since it is provided in the `instance_id` field.
* @param name name or {@code null} for none
*/
public GoogleDevtoolsRemotebuildexecutionAdminV1alphaInstance setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* Output only. State of the instance.
* @return value or {@code null} for none
*/
public java.lang.String getState() {
return state;
}
/**
* Output only. State of the instance.
* @param state state or {@code null} for none
*/
public GoogleDevtoolsRemotebuildexecutionAdminV1alphaInstance setState(java.lang.String state) {
this.state = state;
return this;
}
@Override
public GoogleDevtoolsRemotebuildexecutionAdminV1alphaInstance set(String fieldName, Object value) {
return (GoogleDevtoolsRemotebuildexecutionAdminV1alphaInstance) super.set(fieldName, value);
}
@Override
public GoogleDevtoolsRemotebuildexecutionAdminV1alphaInstance clone() {
return (GoogleDevtoolsRemotebuildexecutionAdminV1alphaInstance) super.clone();
}
} |
<reponame>BlueCodeSystems/opensrp-client-chw
package org.smartregister.chw.model;
public class NavigationModelFlv extends DefaultNavigationModelFlv {
}
|
/*
* Return ascii name of an item type.
*/
std::string Object::item_type_name ()
{
switch (item_type) {
case ITEM_LIGHT:
return "light";
case ITEM_SCROLL:
return "scroll";
case ITEM_WAND:
return "wand";
case ITEM_STAFF:
return "staff";
case ITEM_WEAPON:
return "weapon";
case ITEM_TREASURE:
return "treasure";
case ITEM_ARMOR:
return "armor";
case ITEM_POTION:
return "potion";
case ITEM_FURNITURE:
return "furniture";
case ITEM_TRASH:
return "trash";
case ITEM_CONTAINER:
return "container";
case ITEM_DRINK_CON:
return "drink container";
case ITEM_KEY:
return "key";
case ITEM_FOOD:
return "food";
case ITEM_MONEY:
return "money";
case ITEM_BOAT:
return "boat";
case ITEM_CORPSE_NPC:
return "npc corpse";
case ITEM_CORPSE_PC:
return "pc corpse";
case ITEM_FOUNTAIN:
return "fountain";
case ITEM_PILL:
return "pill";
case ITEM_DARKNESS:
return "darkness exuding";
}
bug_printf ("Item_type_name: unknown type %d.", item_type);
return "(unknown)";
} |
<gh_stars>0
public class MarkovGenerator {
}
|
The mouse androgen receptor is suppressed by the 5'-untranslated region of the gene.
The androgen receptor (AR) mediates the biological functions of androgens and is essential for normal growth and differentiation of urogenital organs as well as initiation and maintenance of spermatogenesis. Withdrawal of androgens by castration or other methods has been shown to cause a marked, although often temporary, regression of many prostate cancers. In order to gain a better understanding of the transcriptional regulation of the AR, a series of truncation mutants derived from the 5'-region of the mouse AR (mAR) were inserted into the promoter-less plasmid pBLCAT3 and transiently expressed in the mouse alpha T3-1 and GT1-7 cell lines. The results of these experiments indicate the presence of a negative regulatory element in the 5'-untranslated region of the gene, which is able to reduce chloramphenicol acetyltransferase (CAT) activity by 77-89%. We have named this element the mAR suppressor (mARS). DNase-I protection assays of the 5'-untranslated region disclosed a protected domain. Gel mobility assays using the mARS revealed the presence of three protein-DNA complexes that could specifically bind to this protected domain. Insertion of the mARS into the thymidine kinase promoter containing pBLCAT2 vector resulted in a 2- to 10-fold decrease in CAT activity, but only if the insert was 3' to the start of transcription initiation. Finally, point mutations within the mARS were able to increase transcription of the AR promoter by 2.3-fold. The results of these experiments indicate that the mAR 5'-untranslated region contains a suppressor element.(ABSTRACT TRUNCATED AT 250 WORDS) |
/******************************************************************************
* Copyright (c) Intel Corporation - All rights reserved. *
* This file is part of the LIBXSMM library. *
* *
* For information on the license, see the LICENSE file. *
* Further information: https://github.com/libxsmm/libxsmm/ *
* SPDX-License-Identifier: BSD-3-Clause *
******************************************************************************/
/* Hans Pabst (Intel Corp.)
******************************************************************************/
#ifndef LIBXSMM_UTILS_H
#define LIBXSMM_UTILS_H
/**
* Any intrinsics interface (libxsmm_intrinsics_x86.h) shall be explicitly
* included, i.e., separate from libxsmm_utils.h.
*/
#include "libxsmm_lpflt_quant.h"
#include "libxsmm_barrier.h"
#include "libxsmm_timer.h"
#include "libxsmm_math.h"
#include "libxsmm_mhd.h"
#if defined(__BLAS) && (1 == __BLAS)
# if defined(__OPENBLAS)
LIBXSMM_EXTERN void openblas_set_num_threads(int num_threads);
# define LIBXSMM_BLAS_INIT openblas_set_num_threads(1);
# endif
#endif
#if !defined(LIBXSMM_BLAS_INIT)
# define LIBXSMM_BLAS_INIT
#endif
/** Call libxsmm_gemm_print using LIBXSMM's GEMM-flags. */
#define LIBXSMM_GEMM_PRINT(OSTREAM, PRECISION, FLAGS, M, N, K, DALPHA, A, LDA, B, LDB, DBETA, C, LDC) \
LIBXSMM_GEMM_PRINT2(OSTREAM, PRECISION, PRECISION, FLAGS, M, N, K, DALPHA, A, LDA, B, LDB, DBETA, C, LDC)
#define LIBXSMM_GEMM_PRINT2(OSTREAM, IPREC, OPREC, FLAGS, M, N, K, DALPHA, A, LDA, B, LDB, DBETA, C, LDC) \
libxsmm_gemm_dprint2(OSTREAM, (libxsmm_datatype)(IPREC), (libxsmm_datatype)(OPREC), \
/* Use 'n' (instead of 'N') avoids warning about "no macro replacement within a character constant". */ \
(char)(0 == (LIBXSMM_GEMM_FLAG_TRANS_A & (FLAGS)) ? 'n' : 't'), \
(char)(0 == (LIBXSMM_GEMM_FLAG_TRANS_B & (FLAGS)) ? 'n' : 't'), \
M, N, K, DALPHA, A, LDA, B, LDB, DBETA, C, LDC)
/**
* Utility function, which either prints information about the GEMM call
* or dumps (FILE/ostream=0) all input and output data into MHD files.
* The Meta Image Format (MHD) is suitable for visual inspection using,
* e.g., ITK-SNAP or ParaView.
*/
LIBXSMM_API void libxsmm_gemm_print(void* ostream,
libxsmm_datatype precision, const char* transa, const char* transb,
const libxsmm_blasint* m, const libxsmm_blasint* n, const libxsmm_blasint* k,
const void* alpha, const void* a, const libxsmm_blasint* lda,
const void* b, const libxsmm_blasint* ldb,
const void* beta, void* c, const libxsmm_blasint* ldc);
LIBXSMM_API void libxsmm_gemm_print2(void* ostream,
libxsmm_datatype iprec, libxsmm_datatype oprec, const char* transa, const char* transb,
const libxsmm_blasint* m, const libxsmm_blasint* n, const libxsmm_blasint* k,
const void* alpha, const void* a, const libxsmm_blasint* lda,
const void* b, const libxsmm_blasint* ldb,
const void* beta, void* c, const libxsmm_blasint* ldc);
#endif /*LIBXSMM_UTILS_H*/
|
#include <cstdlib>
#include <new>
#include <utility>
struct MallocFreePolicy
{
template <typename T, typename... Args>
static auto alloc(Args&&... args)
{
auto ptr = (T*)std::malloc(sizeof(T));
new(ptr) T(std::forward<Args>(args)...);
return ptr;
}
template <typename T>
static void dealloc(T* ptr)
{
ptr->~T();
std::free(ptr);
}
};
template <typename T, typename AllocPolicy = MallocFreePolicy>
struct UniquePtr
{
T* _ptr;
template <typename U>
UniquePtr(U* ptr) : _ptr{ptr}
{
}
~UniquePtr()
{
if(_ptr != nullptr)
{
AllocPolicy::dealloc(_ptr);
_ptr = nullptr;
}
}
};
struct Foo
{
int _data;
Foo(int data) : _data{data}
{
}
};
int main()
{
UniquePtr<Foo> i = MallocFreePolicy::alloc<Foo>(100);
}
|
// ByteSliceDataSource creates a DataSource from a byte slice.
func ByteSliceDataSource(b []byte) FuncDataSource {
return FuncDataSource(func(w io.Writer) error {
_, err := io.Copy(w, bytes.NewReader(b))
return err
})
} |
<gh_stars>100-1000
package org.iotp.server.actors.rule;
import java.util.Optional;
import java.util.concurrent.ExecutionException;
import org.iotp.analytics.ruleengine.api.device.DeviceMetaData;
import org.iotp.analytics.ruleengine.api.rules.RuleContext;
import org.iotp.analytics.ruleengine.common.msg.device.ToDeviceActorMsg;
import org.iotp.infomgt.dao.alarm.AlarmService;
import org.iotp.infomgt.dao.event.EventService;
import org.iotp.infomgt.dao.timeseries.TimeseriesService;
import org.iotp.infomgt.data.Event;
import org.iotp.infomgt.data.alarm.Alarm;
import org.iotp.infomgt.data.id.AlarmId;
import org.iotp.infomgt.data.id.CustomerId;
import org.iotp.infomgt.data.id.DeviceId;
import org.iotp.infomgt.data.id.EntityId;
import org.iotp.infomgt.data.id.RuleId;
import org.iotp.infomgt.data.id.TenantId;
import org.iotp.server.actors.ActorSystemContext;
import com.google.common.util.concurrent.ListenableFuture;
public class RuleProcessingContext implements RuleContext {
private final TimeseriesService tsService;
private final EventService eventService;
private final AlarmService alarmService;
private final RuleId ruleId;
private TenantId tenantId;
private CustomerId customerId;
private DeviceId deviceId;
private DeviceMetaData deviceMetaData;
RuleProcessingContext(ActorSystemContext systemContext, RuleId ruleId) {
this.tsService = systemContext.getTsService();
this.eventService = systemContext.getEventService();
this.alarmService = systemContext.getAlarmService();
this.ruleId = ruleId;
}
void update(ToDeviceActorMsg toDeviceActorMsg, DeviceMetaData deviceMetaData) {
this.tenantId = toDeviceActorMsg.getTenantId();
this.customerId = toDeviceActorMsg.getCustomerId();
this.deviceId = toDeviceActorMsg.getDeviceId();
this.deviceMetaData = deviceMetaData;
}
@Override
public RuleId getRuleId() {
return ruleId;
}
@Override
public DeviceMetaData getDeviceMetaData() {
return deviceMetaData;
}
@Override
public Event save(Event event) {
checkEvent(event);
return eventService.save(event);
}
@Override
public Optional<Event> saveIfNotExists(Event event) {
checkEvent(event);
return eventService.saveIfNotExists(event);
}
@Override
public Optional<Event> findEvent(String eventType, String eventUid) {
return eventService.findEvent(tenantId, deviceId, eventType, eventUid);
}
@Override
public Alarm createOrUpdateAlarm(Alarm alarm) {
alarm.setTenantId(tenantId);
return alarmService.createOrUpdateAlarm(alarm);
}
public Optional<Alarm> findLatestAlarm(EntityId originator, String alarmType) {
try {
return Optional.ofNullable(alarmService.findLatestByOriginatorAndType(tenantId, originator, alarmType).get());
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException("Failed to lookup alarm!", e);
}
}
@Override
public ListenableFuture<Boolean> clearAlarm(AlarmId alarmId, long clearTs) {
return alarmService.clearAlarm(alarmId, clearTs);
}
private void checkEvent(Event event) {
if (event.getTenantId() == null) {
event.setTenantId(tenantId);
} else if (!tenantId.equals(event.getTenantId())) {
throw new IllegalArgumentException("Invalid Tenant id!");
}
if (event.getEntityId() == null) {
event.setEntityId(deviceId);
}
}
}
|
def brand_monitor(self, query, exclude=None, domain_status=None,
days_back=None, out_format="dict"):
req_dict = {}
self._add_query_param(req_dict, query, "|")
self._add_exclude_param(req_dict, exclude)
self._add_domain_status_param(req_dict, domain_status)
self._add_days_back_param(req_dict, days_back)
return self._invoke_service(req_dict, self._REQ_TOPIC_BRAND_MONITOR,
out_format) |
<filename>src/app/params.ts<gh_stars>1-10
export class Params {
lambda: number = 0.026; // J/(m*s*K) heat conductivity
rho: number = 1.225; // kg/m^3 density
c_p: number = 1004; // J/(kg*K) heat-capacity
T_c: number = 273; // K cold-temp
T_h: number = 273 + 5; // K hot-temp
v_max: number = 2; // m/s^2 max. abs. velocity
duration: number = 60; // s duration,
includeBuoyancy = true;
n_grid = 50;
}
|
syn = input().split()
s = int(syn[0])
n = int(syn[1])
fuerzas_bonus = []
for i in range(n):
fuerzas_bonus.append(input().split())
fuerzas_bonus[i][0] = int(fuerzas_bonus[i][0])
fuerzas_bonus[i][1] = int(fuerzas_bonus[i][1])
def mejor_posicion_debil(dragones):
j = 0
bono = 0
elimina = False
for i in range(len(dragones)):
if s > dragones[i][0] and dragones[i][1]>=bono:
elimina = True
bono = dragones[i][1]
j=i
if elimina:
return j
else:
return "perdio"
i=0
while not mejor_posicion_debil(fuerzas_bonus) == "perdio":
posicion_eliminar = mejor_posicion_debil(fuerzas_bonus)
s += fuerzas_bonus[posicion_eliminar][1]
fuerzas_bonus.pop(posicion_eliminar)
i+=1
if not fuerzas_bonus:
print("YES")
else:
print("NO") |
/// Creates a new log, using a log::Store for storage.
pub fn new(store: Box<dyn log::Store>) -> Result<Self> {
let (commit_index, commit_term) = match store.committed() {
0 => (0, 0),
index => store
.get(index)?
.map(|v| Self::deserialize::<Entry>(&v))
.transpose()?
.map(|e| (e.index, e.term))
.ok_or_else(|| Error::Internal("Committed entry not found".into()))?,
};
let (last_index, last_term) = match store.len() {
0 => (0, 0),
index => store
.get(index)?
.map(|v| Self::deserialize::<Entry>(&v))
.transpose()?
.map(|e| (e.index, e.term))
.ok_or_else(|| Error::Internal("Last entry not found".into()))?,
};
Ok(Self { store, last_index, last_term, commit_index, commit_term })
} |
NEW DELHI: The Supreme Court has mocked CBI for proposing a three-year fixed tenure for its director as a means of securing its autonomy, saying having an institutional framework is what is important.
The Supreme Court’s comment on Wednesday came even as the Attorney General expressed the government’s reluctance to let go of its control over the agency by pointing to Section 6A of the Delhi Special Police Establishment Act, which makes it mandatory for the CBI to seek government approval before it can even investigate any official above the rank of joint secretary.
“Everybody wants a longer tenure,” Justice RM Lodha said. “The length is immaterial. What is important is that there should be an institutional framework. No institution should be individual-centric.”
The top court also observed that CBI directors should not take up any job after retirement. “No post-retirement office should be given to the director. We are weakening the courts by creating more and more tribunals,” Justice Lodha said.
“We talk performance, but don’t assess functioning.” Justice Lodha is part of a three judge bench that is dealing with the issue of CBI’s autonomy in the coal blocks allocation case. These have been challenged through two petitions — one filed by lawyer Manohar Lal Sharma and another by a nonprofit organisation whose cause is being espoused by lawyer Prashant Bhushan.
The court took up the issue of CBI autonomy amid a controversy over a former law minister and coal and PMO officials vetting the CBI’s status report into the probe.
Attorney General GE Vahanvati said Section 6A must be adhered to as long as it exists on the statute book. The top court, which had earlier sought the government’s suggestions on ensuring CBI’s autonomy, on Wednesday sought its response by August 6 on the agency’s counter-views.
The CBI’s suggestions are a mere variation of the government’s proposals to grant autonomy to the CBI. The agency’s counsel, Amarendra Sharan, said the government could easily concede those steps, which do not require any constitutional amendments. |
White Plains, N.Y. — News of Tom Brady and the New England Patriots' Deflategate suspensions traveled fast.
Giants quarterback Eli Manning was delivering remarks at a Guiding Eyes for the Blind sponsor recognition party in White Plains, New York, when the NFL released the news that Brady would be suspended for four games without pay for his involvement in under-inflating footballs used in the AFC Championship game. The Patriots will also be fined $1 million and will have to forfeit a 2016 first round pick and a 2017 fourth round pick. Manning's initial reaction to Ted Wells' Deflategate investigation was that the findings "didn't look too good," but he said he felt bad for his buddy Brady. "I figured something like this may happen, but Tom's been a friend of mine, I don't like to see anybody getting suspended, I don't like to see anybody get in trouble, I don't like to see anything happen to the NFL, or another player or quarterback," Manning said. "In no way am I glad to see this happen." Still, Manning acknowledged the seriousness of the issue, as evidenced by the harsh punishment, and noted that there is an integrity issue at play. "Any time you lose a starting quarterback for four games and draft picks, it's a pretty big statement," Manning said. "The NFL is serious about not messing with the integrity of the game, no matter how big or little the issue is." Manning did not think that the Deflategate punishment would tarnish Brady's legacy, calling him "one of the best in the business." But he also made it clear where he stands on whether throwing an under-inflated ball makes a difference.
"I have studied it a little bit over the last few months and felt a few air pressures to see what it feels like and it is a way of getting an advantage and breaking the rules. I guess it's the short story to it all.
Eli Manning on Tom Brady's suspension #DeflateGate New England Patriots, New York Giants Full Clip: http://on.sny.tv/1QFeXkn Posted by SNY on Tuesday, May 12, 2015
"There is a difference, there is a noticeable difference. Whether it's an advantage or not, I guess that's all dependent on what a QB likes or what it's like in cold weather or if it's wet, there might be other factors to it."
Nick Powell may be reached at [email protected]. Follow him on Twitter @nickpowellbkny. Find NJ.com Giants on Facebook. |
<gh_stars>0
"""Story graph for the interactive storytelling system DoppioGioco."""
from collections import defaultdict
import csv
import os
from interactive_story.tension_evaluation import get_tension_value
from interactive_story.story_graph import StoryGraph
STORY_GRAPH_CSV = os.path.join(os.path.dirname(__file__),
'data/story_graph.csv')
UNIT_EMOTIONS_CSV = os.path.join(os.path.dirname(__file__),
'data/unit_emotions.csv')
UNIT_DETAILS_CSV = os.path.join(os.path.dirname(__file__),
'data/unit_details.csv')
UNIT_TEXTS_CSV = os.path.join(os.path.dirname(__file__),
'data/unit_texts.csv')
class DoppioGiocoStoryGraph(StoryGraph):
"""Extends the StoryGraph class to fit DoppioGioco."""
def __init__(self):
"""Initialize the story graph for DoppioGioco."""
super().__init__()
self.load_from_csv(STORY_GRAPH_CSV)
self._emotions = defaultdict()
self.load_emotions_from_csv(UNIT_EMOTIONS_CSV)
self.tension_function = get_tension_value
self._clip_uris = defaultdict()
self._initials = set()
self._finals = set()
self.load_units_details_from_csv(UNIT_DETAILS_CSV)
self._texts = defaultdict()
self.load_unit_texts_from_csv(UNIT_TEXTS_CSV)
def load_emotions_from_csv(self, emotions_csv):
"""Extract the emotions associated with units from a CSV file."""
with open(emotions_csv, 'r') as csv_file:
emotions_csv_reader = csv.reader(csv_file, delimiter=',')
csv_it = iter(emotions_csv_reader)
next(csv_it)
for pair in csv_it:
title, emotion = pair[0], pair[1]
if self.belongs_to_graph(title):
# annotate the emotion only if the unit actually belongs
# to the story graph, otherwise it is useless
self.annotate_emotion(title, emotion)
def load_units_details_from_csv(self, details_csv):
"""Load all unit details from a CSV file."""
with open(details_csv, 'r') as csv_file:
csv_reader = csv.reader(csv_file, delimiter=',')
csv_it = iter(csv_reader)
next(csv_it)
for detail in csv_it:
title, clip_uri, initial, final = detail
if self.belongs_to_graph(title):
# add unit details only if the unit actually belongs to the
# story graph
if clip_uri != "NULL":
self._clip_uris[title] = clip_uri
if int(initial) == 1:
self._initials.add(title)
if int(final) == 1:
self._finals.add(title)
def load_unit_texts_from_csv(self, texts_csv):
"""Load all unit texts from a CSV file.
There is a separate CSV for texts for two reasons:
* texts are very long, hence the details CSV is much smaller without
them;
* texts may be problematic for encoding, so it is better to handle them
separately.
"""
with open(texts_csv, 'r', encoding='utf8') as csv_file:
csv_reader = csv.reader(csv_file, delimiter=',')
csv_it = iter(csv_reader)
next(csv_it)
for row in csv_it:
# skip empty lines
if not row: # empty sequences are false
continue
title, text = row
if self.belongs_to_graph(title):
# add unit details only if the unit actually belongs to the
# story graph
if text != "NULL":
self._texts[title] = text
def has_emotion(self, unit):
"""Whether or not the unit has an annotated emotion."""
return unit in self._emotions.keys()
def annotate_emotion(self, unit, emotion):
"""Annotate a unit with an emotion."""
self._emotions[unit] = emotion
def get_unit_emotion(self, unit):
"""Get the emotion associated with a unit."""
return self._emotions[unit]
def get_unit_tension(self, unit):
"""Get the tension value for a single unit."""
if self.has_emotion(unit):
tension_value = self.tension_function(self.get_unit_emotion(unit))
else:
tension_value = 0
return tension_value
def has_clip(self, unit):
"""Whether or not the unit has an associated clip."""
return unit in self._clip_uris.keys()
def get_unit_clip(self, unit):
"""Get the clip URI associated to the given unit."""
return self._clip_uris[unit]
def has_text(self, unit):
"""Whether or not the unit has an associated text."""
return unit in self._texts.keys()
def get_unit_text(self, unit):
"""Get the text associated to the given unit."""
return self._texts[unit]
def get_html_linear_story(self, story):
"""Create an HTML page to display a linear story."""
import html
html_story = """<html>
<head>
<meta charset="UTF-8">
</head>
<body>
<table>"""
for unit in story:
html_story += '<tr>'
# add unit title
html_story += '<td>{}</td>\n'.format(html.escape(unit))
if self.has_text(unit):
text = self.get_unit_text(unit)
text = html.escape(text).replace('\n', '<br />')
else:
text = "missing"
# add unit text
html_story += '<td>{}</td>\n'.format(text)
html_story += '</tr>\n'
html_story += """</body>
</html>"""
return html_story
def get_graphviz_graph(self):
"""Display the graph in a graphical way, using graphviz."""
from graphviz import Digraph
graph = Digraph(name=self.__class__.__name__, format='pdf')
for unit in sorted(self.get_nodes()):
if unit == '000':
color, fontcolor = '#000000', '#ffffff'
else:
color, fontcolor = self._get_emotion_color(
self.get_unit_emotion(unit))
graph.node(unit, style='filled', color='black',
fillcolor=color, fontcolor=fontcolor)
graph.edges(self._get_ordered_edge_list())
# set orientation to be left to right (LR)
graph.graph_attr.update(rankdir='LR')
# node displayed as boxes and not as ellipses
graph.node_attr.update(shape='circle')
# group together similar units
graph.body.append(self._get_unit_ranks())
return graph
@staticmethod
def _get_emotion_color(emotion):
# helper for drawing the graph, associate to each emotion a background
# color and a text color
positive_high = ('#0000ff', '#ffffff')
positive_low = ('#ffc0bf', '#000000')
negative_low = ('#c0c0ff', '#000000')
negative_high = ('#ff0000', '#ffffff')
emotions_to_color = {
"joy": positive_high,
"amusement": positive_high,
"pride": positive_high,
"pleasure": positive_low,
"relief": positive_low,
"interest": positive_low,
"hot anger": negative_high,
"panic fear": negative_high,
"despair": negative_high,
"irritation": negative_low,
"anxiety": negative_low,
"sadness": negative_low
}
return emotions_to_color[emotion]
@staticmethod
def _get_unit_ranks():
# helper for drawing the graph, group together similar units
return """{rank = same; 001 002 003 004}
{rank = same; 005 006 007 010}
{rank = same; 009 011 012 013}
{rank = same; 014 015 016}
{rank = same; 017 018 019 020 021}
{rank = same; 022 023 024 025}
{rank = same; 061 026 027 028}
{rank = same; 029 030 031 032}
{rank = same; 033 034 035 036}
{rank = same; 037 038 039 040}
{rank = same; 041 042 047 048}
{rank = same; 049 050 051 052}
{rank = same; 053 054 055 056}
{rank = same; 057 058 059 060 062}
{rank = same; 063 064 065 066}
{rank = same; 067 068 069 070}
{rank = same; 071 072 073 074}
{rank = same; 075 076 077 078}
{rank = same; 079 080 081 082}
{rank = same; 083 084 085 086}
{rank = same; 087 088 089 090}
{rank = same; 107 108 109 110}"""
|
#coding:utf-8
import sys
import getopt
from camel.biz.application.celerysrv import CeleryApplicationClient,AsClient,setup,instance,celery
setup(CeleryApplicationClient)
instance.celeryManager.getService('test_server').send_task('access.celery.hello.hello',args=['sss',])
|
<filename>src/spacecharge/wrap_forcesolverfft2d.cc
#include "orbit_mpi.hh"
#include "pyORBIT_Object.hh"
#include "ForceSolverFFT2D.hh"
#include "Grid2D.hh"
#include "wrap_forcesolverfft2d.hh"
#include "wrap_spacecharge.hh"
#include <iostream>
using namespace OrbitUtils;
namespace wrap_spacecharge{
#ifdef __cplusplus
extern "C" {
#endif
//---------------------------------------------------------
//Python forceSolverFFT2D class definition
//---------------------------------------------------------
//constructor for python class wrapping forceSolverFFT2D instance
//It never will be called directly
static PyObject* ForceSolverFFT2D_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
{
pyORBIT_Object* self;
self = (pyORBIT_Object *) type->tp_alloc(type, 0);
self->cpp_obj = NULL;
//std::cerr<<"The ForceSolverFFT2D new has been called!"<<std::endl;
return (PyObject *) self;
}
//initializator for python ForceSolverFFT2D class
//this is implementation of the __init__ method
static int ForceSolverFFT2D_init(pyORBIT_Object *self, PyObject *args, PyObject *kwds){
int xSize, ySize;
if(!PyArg_ParseTuple(args,"ii:__init__",&xSize,&ySize)){
ORBIT_MPI_Finalize("PyForceSolverFFT2D - ForceSolverFFT2D(nX,nY) - constructor needs parameters.");
}
self->cpp_obj = new ForceSolverFFT2D(xSize,ySize);
((ForceSolverFFT2D*) self->cpp_obj)->setPyWrapper((PyObject*) self);
//std::cerr<<"The ForceSolverFFT2D __init__ has been called!"<<std::endl;
return 0;
}
//get grid size in X
static PyObject* ForceSolverFFT2D_getSizeX(PyObject *self, PyObject *args){
pyORBIT_Object* pyForceSolverFFT2D = (pyORBIT_Object*) self;
ForceSolverFFT2D* cpp_ForceSolverFFT2D = (ForceSolverFFT2D*) pyForceSolverFFT2D->cpp_obj;
return Py_BuildValue("i",cpp_ForceSolverFFT2D->getSizeX());
}
//get grid size in Y
static PyObject* ForceSolverFFT2D_getSizeY(PyObject *self, PyObject *args){
pyORBIT_Object* pyForceSolverFFT2D = (pyORBIT_Object*) self;
ForceSolverFFT2D* cpp_ForceSolverFFT2D = (ForceSolverFFT2D*) pyForceSolverFFT2D->cpp_obj;
return Py_BuildValue("i",cpp_ForceSolverFFT2D->getSizeY());
}
// getMaxX()
static PyObject* ForceSolverFFT2D_getMaxX(PyObject *self, PyObject *args){
pyORBIT_Object* pyForceSolverFFT2D = (pyORBIT_Object*) self;
ForceSolverFFT2D* cpp_ForceSolverFFT2D = (ForceSolverFFT2D*) pyForceSolverFFT2D->cpp_obj;
return Py_BuildValue("d",cpp_ForceSolverFFT2D->getMaxX());
}
// getMaxY()
static PyObject* ForceSolverFFT2D_getMaxY(PyObject *self, PyObject *args){
pyORBIT_Object* pyForceSolverFFT2D = (pyORBIT_Object*) self;
ForceSolverFFT2D* cpp_ForceSolverFFT2D = (ForceSolverFFT2D*) pyForceSolverFFT2D->cpp_obj;
return Py_BuildValue("d",cpp_ForceSolverFFT2D->getMaxY());
}
// getMinX()
static PyObject* ForceSolverFFT2D_getMinX(PyObject *self, PyObject *args){
pyORBIT_Object* pyForceSolverFFT2D = (pyORBIT_Object*) self;
ForceSolverFFT2D* cpp_ForceSolverFFT2D = (ForceSolverFFT2D*) pyForceSolverFFT2D->cpp_obj;
return Py_BuildValue("d",cpp_ForceSolverFFT2D->getMinX());
}
// getMinY()
static PyObject* ForceSolverFFT2D_getMinY(PyObject *self, PyObject *args){
pyORBIT_Object* pyForceSolverFFT2D = (pyORBIT_Object*) self;
ForceSolverFFT2D* cpp_ForceSolverFFT2D = (ForceSolverFFT2D*) pyForceSolverFFT2D->cpp_obj;
return Py_BuildValue("d",cpp_ForceSolverFFT2D->getMinY());
}
// getStepX()
static PyObject* ForceSolverFFT2D_getStepX(PyObject *self, PyObject *args){
pyORBIT_Object* pyForceSolverFFT2D = (pyORBIT_Object*) self;
ForceSolverFFT2D* cpp_ForceSolverFFT2D = (ForceSolverFFT2D*) pyForceSolverFFT2D->cpp_obj;
return Py_BuildValue("d",cpp_ForceSolverFFT2D->getStepX());
}
// getStepY()
static PyObject* ForceSolverFFT2D_getStepY(PyObject *self, PyObject *args){
pyORBIT_Object* pyForceSolverFFT2D = (pyORBIT_Object*) self;
ForceSolverFFT2D* cpp_ForceSolverFFT2D = (ForceSolverFFT2D*) pyForceSolverFFT2D->cpp_obj;
return Py_BuildValue("d",cpp_ForceSolverFFT2D->getStepY());
}
//findForce(Grid2D* rhoGrid2D,Grid2D* forceGridX, Grid2D* forceGridY)
static PyObject* ForceSolverFFT2D_findForce(PyObject *self, PyObject *args){
pyORBIT_Object* pyForceSolverFFT2D = (pyORBIT_Object*) self;
ForceSolverFFT2D* cpp_ForceSolverFFT2D = (ForceSolverFFT2D*) pyForceSolverFFT2D->cpp_obj;
PyObject* pyRhoG;
PyObject* pyForceGX;
PyObject* pyForceGY;
if(!PyArg_ParseTuple(args,"OOO:__init__",&pyRhoG,&pyForceGX,&pyForceGY)){
ORBIT_MPI_Finalize("PyForceSolverFFT2D.findForce(Grid2D rhoGrid2D,Grid2D forceXGrid2D, forceYGrid2D) - method needs parameters.");
}
PyObject* pyORBIT_Grid2D_Type = getSpaceChargeType("Grid2D");
if(!PyObject_IsInstance(pyRhoG,pyORBIT_Grid2D_Type) || !PyObject_IsInstance(pyForceGX,pyORBIT_Grid2D_Type) || !PyObject_IsInstance(pyForceGY,pyORBIT_Grid2D_Type)){
ORBIT_MPI_Finalize("PyForceSolverFFT2D.findForce(Grid2D rhoGrid2D,Grid2D forceXGrid2D, forceYGrid2D) - method needs parameters.");
}
Grid2D* grid2D_rho = (Grid2D*)(((pyORBIT_Object*) pyRhoG)->cpp_obj);
Grid2D* grid2D_xforce = (Grid2D*)(((pyORBIT_Object*) pyForceGX)->cpp_obj);
Grid2D* grid2D_yforce = (Grid2D*)(((pyORBIT_Object*) pyForceGY)->cpp_obj);
cpp_ForceSolverFFT2D->findForce(grid2D_rho,grid2D_xforce,grid2D_yforce);
Py_INCREF(Py_None);
return Py_None;
}
//-----------------------------------------------------
//destructor for python ForceSolverFFT2D class (__del__ method).
//-----------------------------------------------------
static void ForceSolverFFT2D_del(pyORBIT_Object* self){
//std::cerr<<"The ForceSolverFFT2D __del__ has been called! 0"<<std::endl;
ForceSolverFFT2D* cpp_ForceSolverFFT2D = (ForceSolverFFT2D*) self->cpp_obj;
if(cpp_ForceSolverFFT2D != NULL){
delete cpp_ForceSolverFFT2D;
}
self->ob_type->tp_free((PyObject*)self);
//std::cerr<<"The ForceSolverFFT2D __del__ has been called! 1"<<std::endl;
}
// defenition of the methods of the python ForceSolverFFT2D wrapper class
// they will be vailable from python level
static PyMethodDef ForceSolverFFT2DClassMethods[] = {
{ "getSizeX", ForceSolverFFT2D_getSizeX, METH_VARARGS,"returns the number of grid points in x-direction"},
{ "getSizeY", ForceSolverFFT2D_getSizeY, METH_VARARGS,"returns the number of grid points in y-direction"},
{ "getMaxX", ForceSolverFFT2D_getMaxX, METH_VARARGS,"returns max grid value in x-direction"},
{ "getMaxY", ForceSolverFFT2D_getMaxY, METH_VARARGS,"returns max grid value in y-direction"},
{ "getMinX", ForceSolverFFT2D_getMinX, METH_VARARGS,"returns min grid value in x-direction"},
{ "getMinY", ForceSolverFFT2D_getMinY, METH_VARARGS,"returns min grid value in y-direction"},
{ "getStepX", ForceSolverFFT2D_getStepX, METH_VARARGS,"returns grid step in x-direction"},
{ "getStepY", ForceSolverFFT2D_getStepY, METH_VARARGS,"returns grid step in y-direction"},
{ "findForce", ForceSolverFFT2D_findForce, METH_VARARGS,"findForce(Grid2D rhoGrid2D,Grid2D forceXGrid2D, Grid2D forceYGrid2D)"},
{NULL}
};
// defenition of the memebers of the python ForceSolverFFT2D wrapper class
// they will be vailable from python level
static PyMemberDef ForceSolverFFT2DClassMembers [] = {
{NULL}
};
//new python ForceSolverFFT2D wrapper type definition
static PyTypeObject pyORBIT_ForceSolverFFT2D_Type = {
PyObject_HEAD_INIT(NULL)
0, /*ob_size*/
"ForceSolverFFT2D", /*tp_name*/
sizeof(pyORBIT_Object), /*tp_basicsize*/
0, /*tp_itemsize*/
(destructor) ForceSolverFFT2D_del , /*tp_dealloc*/
0, /*tp_print*/
0, /*tp_getattr*/
0, /*tp_setattr*/
0, /*tp_compare*/
0, /*tp_repr*/
0, /*tp_as_number*/
0, /*tp_as_sequence*/
0, /*tp_as_mapping*/
0, /*tp_hash */
0, /*tp_call*/
0, /*tp_str*/
0, /*tp_getattro*/
0, /*tp_setattro*/
0, /*tp_as_buffer*/
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/
"The ForceSolverFFT2D python wrapper", /* tp_doc */
0, /* tp_traverse */
0, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
0, /* tp_iter */
0, /* tp_iternext */
ForceSolverFFT2DClassMethods, /* tp_methods */
ForceSolverFFT2DClassMembers, /* tp_members */
0, /* tp_getset */
0, /* tp_base */
0, /* tp_dict */
0, /* tp_descr_get */
0, /* tp_descr_set */
0, /* tp_dictoffset */
(initproc) ForceSolverFFT2D_init, /* tp_init */
0, /* tp_alloc */
ForceSolverFFT2D_new, /* tp_new */
};
//--------------------------------------------------
//Initialization function of the pyForceSolverFFT2D class
//It will be called from SpaceCharge wrapper initialization
//--------------------------------------------------
void initForceSolverFFT2D(PyObject* module){
if (PyType_Ready(&pyORBIT_ForceSolverFFT2D_Type) < 0) return;
Py_INCREF(&pyORBIT_ForceSolverFFT2D_Type);
PyModule_AddObject(module, "ForceSolverFFT2D", (PyObject *)&pyORBIT_ForceSolverFFT2D_Type);
//std::cout<<"debug ForceSolverFFT2D added! "<<std::endl;
}
#ifdef __cplusplus
}
#endif
//end of namespace wrap_spacecharge
}
|
/*
* Copyright The Cryostat Authors
*
* The Universal Permissive License (UPL), Version 1.0
*
* Subject to the condition set forth below, permission is hereby granted to any
* person obtaining a copy of this software, associated documentation and/or data
* (collectively the "Software"), free of charge and under any and all copyright
* rights in the Software, and any and all patent rights owned or freely
* licensable by each licensor hereunder covering either (i) the unmodified
* Software as contributed to or provided by such licensor, or (ii) the Larger
* Works (as defined below), to deal in both
*
* (a) the Software, and
* (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
* one is included with the Software (each a "Larger Work" to which the Software
* is contributed by such licensors),
*
* without restriction, including without limitation the rights to copy, create
* derivative works of, display, perform, and distribute the Software and make,
* use, sell, offer for sale, import, export, have made, and have sold the
* Software and the Larger Work(s), and to sublicense the foregoing rights on
* either these or other terms.
*
* This license is subject to the following condition:
* The above copyright notice and either this complete permission notice or at
* a minimum a reference to the UPL must be included in all copies or
* substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.cryostat.messaging;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import io.cryostat.core.log.Logger;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import io.vertx.core.Handler;
import io.vertx.core.http.ServerWebSocket;
import jdk.jfr.Category;
import jdk.jfr.Event;
import jdk.jfr.Label;
import jdk.jfr.Name;
class WsClient implements AutoCloseable, Handler<String> {
private final Logger logger;
private final BlockingQueue<String> inQ = new LinkedBlockingQueue<>();
private final ServerWebSocket sws;
private final Object threadLock = new Object();
private Thread readingThread;
WsClient(Logger logger, ServerWebSocket sws) {
this.logger = logger;
this.sws = sws;
}
@Override
public void handle(String msg) {
logger.info("({}): CMD {}", this.sws.remoteAddress().toString(), msg);
inQ.add(msg);
}
String readMessage() {
try {
synchronized (threadLock) {
readingThread = Thread.currentThread();
}
return inQ.take();
} catch (InterruptedException e) {
return null;
} finally {
synchronized (threadLock) {
readingThread = null;
}
}
}
void writeMessage(String message) {
if (!this.sws.isClosed()) {
WsMessageEmitted evt =
new WsMessageEmitted(
sws.remoteAddress().host(),
sws.remoteAddress().port(),
sws.uri(),
message.length());
evt.begin();
try {
this.sws.writeTextMessage(message);
} catch (Exception e) {
logger.warn(e);
evt.setExceptionThrown(true);
} finally {
evt.end();
if (evt.shouldCommit()) {
evt.commit();
}
}
}
}
@Name("io.cryostat.messaging.WsClient.WsMessageEmitted")
@Label("WebSocket Message Emitted")
@Category("Cryostat")
@SuppressFBWarnings(
value = "URF_UNREAD_FIELD",
justification = "Event fields are recorded with JFR instead of accessed directly")
public static class WsMessageEmitted extends Event {
String host;
int port;
String path;
int msgLen;
boolean exceptionThrown;
public WsMessageEmitted(String host, int port, String path, int msgLen) {
this.host = host;
this.port = port;
this.path = path;
this.msgLen = msgLen;
this.exceptionThrown = false;
}
public void setExceptionThrown(boolean exceptionThrown) {
this.exceptionThrown = exceptionThrown;
}
}
@Override
public void close() {
inQ.clear();
synchronized (threadLock) {
if (readingThread != null) {
readingThread.interrupt();
}
}
}
}
|
// PushPredicate implements the Operator interface
func (d *Derived) PushPredicate(expr sqlparser.Expr, semTable *semantics.SemTable) error {
tableInfo, err := semTable.TableInfoForExpr(expr)
if err != nil {
if err == semantics.ErrMultipleTables {
return semantics.ProjError{Inner: vterrors.Errorf(vtrpcpb.Code_UNIMPLEMENTED, "unsupported: unable to split predicates to derived table: %s", sqlparser.String(expr))}
}
return err
}
newExpr, err := semantics.RewriteDerivedExpression(expr, tableInfo)
if err != nil {
return err
}
return d.Inner.PushPredicate(newExpr, semTable)
} |
/**
* Jackson-friendly version of {@link Categories}.
*/
public class JsonAdaptedCategories {
private final String cuisine;
private final String occasion;
private final String priceRange;
/**
* Constructs a {@code JsonAdaptedCategories} with the given categories details.
*/
@JsonCreator
public JsonAdaptedCategories(@JsonProperty("cuisine") String cuisine, @JsonProperty("occasion") String occasion,
@JsonProperty("priceRange") String priceRange) {
this.cuisine = cuisine;
this.occasion = occasion;
this.priceRange = priceRange;
}
/**
* Converts a given {@code Categories} into this class for Jackson use.
*/
public JsonAdaptedCategories(Categories categories) {
this.cuisine = categories.getCuisine().map(cuisine -> cuisine.value).orElse(null);
this.occasion = categories.getOccasion().map(occasion -> occasion.value).orElse(null);
this.priceRange = categories.getPriceRange().map(priceRange -> priceRange.value).orElse(null);
}
/**
* Converts this Jackson-friendly adapted categories object into the model's {@code Categories} object.
*
* @throws IllegalValueException if there were any data constraints violated in the adapted categories.
*/
public Categories toModelType() throws IllegalValueException {
if (cuisine != null && !Cuisine.isValidCuisine(cuisine)) {
throw new IllegalValueException(Cuisine.MESSAGE_CONSTRAINTS);
}
if (occasion != null && !Occasion.isValidOccasion(occasion)) {
throw new IllegalValueException(Occasion.MESSAGE_CONSTRAINTS);
}
if (priceRange != null && !PriceRange.isValidPriceRange(priceRange)) {
throw new IllegalValueException(PriceRange.MESSAGE_CONSTRAINTS);
}
final Optional<Cuisine> modelCuisine = Optional.ofNullable(cuisine).map(Cuisine::new);
final Optional<Occasion> modelOccasion = Optional.ofNullable(occasion).map(Occasion::new);
final Optional<PriceRange> modelPriceRange = Optional.ofNullable(priceRange).map(PriceRange::new);
return new Categories(modelCuisine, modelOccasion, modelPriceRange);
}
} |
// RUN: %clang_cc1 %s -fno-rtti -triple=i386-pc-win32 -emit-llvm -o - | FileCheck %s
// See microsoft-abi-structors.cpp for constructor codegen tests.
namespace Test1 {
// Classic diamond, fully virtual.
struct A { int a; };
struct B : virtual A { int b; };
struct C : virtual A { int c; };
struct D : virtual B, virtual C { int d; };
D d; // Force vbtable emission.
// Layout should be:
// D: vbptr D
// int d
// A: int a
// B: vbptr B
// int b
// C: vbptr C
// int c
// CHECK-DAG: @"??_8D@Test1@@7B01@@" = linkonce_odr unnamed_addr constant [4 x i32] [i32 0, i32 8, i32 12, i32 20], comdat, align 4
// CHECK-DAG: @"??_8D@Test1@@7BB@1@@" = {{.*}} [2 x i32] [i32 0, i32 -4]
// CHECK-DAG: @"??_8D@Test1@@7BC@1@@" = {{.*}} [2 x i32] [i32 0, i32 -12]
// CHECK-DAG: @"??_8C@Test1@@7B@" = {{.*}} [2 x i32] [i32 0, i32 8]
// CHECK-DAG: @"??_8B@Test1@@7B@" = {{.*}} [2 x i32] [i32 0, i32 8]
}
namespace Test2 {
// Classic diamond, only A is virtual.
struct A { int a; };
struct B : virtual A { int b; };
struct C : virtual A { int c; };
struct D : B, C { int d; };
D d; // Force vbtable emission.
// Layout should be:
// B: vbptr B
// int b
// C: vbptr C
// int c
// D: int d
// A: int a
// CHECK-DAG: @"??_8D@Test2@@7BB@1@@" = {{.*}} [2 x i32] [i32 0, i32 20]
// CHECK-DAG: @"??_8D@Test2@@7BC@1@@" = {{.*}} [2 x i32] [i32 0, i32 12]
// CHECK-DAG: @"??_8C@Test2@@7B@" = {{.*}} [2 x i32] [i32 0, i32 8]
// CHECK-DAG: @"??_8B@Test2@@7B@" = {{.*}} [2 x i32] [i32 0, i32 8]
}
namespace Test3 {
struct A { int a; };
struct B { int b; };
struct C : virtual A, virtual B { int c; };
C c;
// CHECK-DAG: @"??_8C@Test3@@7B@" = {{.*}} [3 x i32] [i32 0, i32 8, i32 12]
}
namespace Test4 {
// Test reusing a vbptr from a non-virtual base.
struct A { int a; };
struct B : virtual A { int b; };
struct C : B, virtual A { int c; };
C c; // Force vbtable emission.
// CHECK-DAG: @"??_8C@Test4@@7B@" = {{.*}} [2 x i32] [i32 0, i32 12]
// CHECK-DAG: @"??_8B@Test4@@7B@" = {{.*}} [2 x i32] [i32 0, i32 8]
}
namespace Test5 {
// Test multiple base subobjects of the same type when that type has a virtual
// base.
struct A { int a; };
struct B : virtual A { int b; };
struct C : B { int c; };
struct D : B, C { int d; };
D d; // Force vbtable emission.
// CHECK-DAG: @"??_8D@Test5@@7BB@1@@"
// CHECK-DAG: @"??_8D@Test5@@7BC@1@@"
// CHECK-DAG: @"??_8C@Test5@@7B@"
// CHECK-DAG: @"??_8B@Test5@@7B@"
}
namespace Test6 {
// Test that we skip unneeded base path component names.
struct A { int a; };
struct B : virtual A { int b; };
struct C : B { int c; };
struct D : B, C { int d; };
struct E : D { int e; };
struct F : E, B, C { int f; };
struct G : F, virtual E { int g; };
G g;
// CHECK-DAG: @"??_8G@Test6@@7BB@1@E@1@F@1@@" =
// CHECK-DAG: @"??_8G@Test6@@7BC@1@E@1@F@1@@" =
// CHECK-DAG: @"??_8G@Test6@@7BB@1@F@1@@" =
// CHECK-DAG: @"??_8G@Test6@@7BC@1@F@1@@" =
// CHECK-DAG: @"??_8G@Test6@@7BB@1@E@1@@" =
// CHECK-DAG: @"??_8G@Test6@@7BC@1@E@1@@" =
// CHECK-DAG: @"??_8F@Test6@@7BB@1@E@1@@" = {{.*}} [2 x i32] [i32 0, i32 52]
// CHECK-DAG: @"??_8F@Test6@@7BC@1@E@1@@" = {{.*}} [2 x i32] [i32 0, i32 44]
// CHECK-DAG: @"??_8F@Test6@@7BB@1@@" = {{.*}} [2 x i32] [i32 0, i32 24]
// CHECK-DAG: @"??_8F@Test6@@7BC@1@@" = {{.*}} [2 x i32] [i32 0, i32 16]
// CHECK-DAG: @"??_8C@Test6@@7B@" = {{.*}} [2 x i32] [i32 0, i32 12]
// CHECK-DAG: @"??_8B@Test6@@7B@" = {{.*}} [2 x i32] [i32 0, i32 8]
// CHECK-DAG: @"??_8E@Test6@@7BB@1@@" = {{.*}} [2 x i32] [i32 0, i32 28]
// CHECK-DAG: @"??_8E@Test6@@7BC@1@@" = {{.*}} [2 x i32] [i32 0, i32 20]
// CHECK-DAG: @"??_8D@Test6@@7BB@1@@" = {{.*}} [2 x i32] [i32 0, i32 24]
// CHECK-DAG: @"??_8D@Test6@@7BC@1@@" = {{.*}} [2 x i32] [i32 0, i32 16]
}
namespace Test7 {
// Test a non-virtual base which reuses the vbptr of another base.
struct A { int a; };
struct B { int b; };
struct C { int c; };
struct D : virtual A { int d; };
struct E : B, D, virtual A, virtual C { int e; };
E o;
// CHECK-DAG: @"??_8E@Test7@@7B@" = {{.*}} [3 x i32] [i32 0, i32 12, i32 16]
// CHECK-DAG: @"??_8D@Test7@@7B@" = {{.*}} [2 x i32] [i32 0, i32 8]
}
namespace Test8 {
// Test a virtual base which reuses the vbptr of another base.
struct A { int a; };
struct B : virtual A { int b; };
struct C : B { int c; };
struct D : virtual C { int d; };
D o;
// CHECK-DAG: @"??_8D@Test8@@7B01@@" = {{.*}} [3 x i32] [i32 0, i32 8, i32 12]
// CHECK-DAG: @"??_8D@Test8@@7BC@1@@" = {{.*}} [2 x i32] [i32 0, i32 -4]
// CHECK-DAG: @"??_8C@Test8@@7B@" = {{.*}} [2 x i32] [i32 0, i32 12]
// CHECK-DAG: @"??_8B@Test8@@7B@" = {{.*}} [2 x i32] [i32 0, i32 8]
}
namespace Test9 {
// D has to add to B's vbtable because D has more morally virtual bases than B.
// D then takes B's vbptr and the vbtable is named for D, not B.
struct A { int a; };
struct B : virtual A { int b; };
struct C : virtual B { int c; };
struct BB : B { int bb; }; // Indirection =/
struct D : BB, C { int d; };
struct E : virtual D { };
E e;
// CHECK-DAG: @"??_8E@Test9@@7B01@@" =
// CHECK-DAG: @"??_8E@Test9@@7BD@1@@" =
// CHECK-DAG: @"??_8E@Test9@@7BC@1@@" =
// CHECK-DAG: @"??_8E@Test9@@7BB@1@@" =
// CHECK-DAG: @"??_8D@Test9@@7B@" =
// CHECK-DAG: @"??_8D@Test9@@7BC@1@@" =
// CHECK-DAG: @"??_8D@Test9@@7BB@1@@" =
// CHECK-DAG: @"??_8C@Test9@@7B01@@" =
// CHECK-DAG: @"??_8C@Test9@@7BB@1@@" =
// CHECK-DAG: @"??_8BB@Test9@@7B@" =
// CHECK-DAG: @"??_8B@Test9@@7B@" =
}
namespace Test10 {
struct A { int a; };
struct B { int b; };
struct C : virtual A { int c; };
struct D : B, C { int d; };
D d;
// CHECK-DAG: @"??_8D@Test10@@7B@" =
// CHECK-DAG: @"??_8C@Test10@@7B@" =
}
namespace Test11 {
// Typical diamond with an extra single inheritance indirection for B and C.
struct A { int a; };
struct B : virtual A { int b; };
struct C : virtual A { int c; };
struct D : B { int d; };
struct E : C { int e; };
struct F : D, E { int f; };
F f;
// CHECK-DAG: @"??_8F@Test11@@7BD@1@@" = linkonce_odr unnamed_addr constant [2 x i32] [i32 0, i32 28]
// CHECK-DAG: @"??_8F@Test11@@7BE@1@@" = linkonce_odr unnamed_addr constant [2 x i32] [i32 0, i32 16]
// CHECK-DAG: @"??_8E@Test11@@7B@" = linkonce_odr unnamed_addr constant [2 x i32] [i32 0, i32 12]
// CHECK-DAG: @"??_8C@Test11@@7B@" = linkonce_odr unnamed_addr constant [2 x i32] [i32 0, i32 8]
// CHECK-DAG: @"??_8D@Test11@@7B@" = linkonce_odr unnamed_addr constant [2 x i32] [i32 0, i32 12]
// CHECK-DAG: @"??_8B@Test11@@7B@" = linkonce_odr unnamed_addr constant [2 x i32] [i32 0, i32 8]
}
namespace Test12 {
// Another vbptr inside a virtual base.
struct A { int a; };
struct B : virtual A { int b; };
struct C : virtual B { int c; };
struct D : C, B { int d; };
struct E : D, C, B { int e; };
E e;
// CHECK-DAG: @"??_8E@Test12@@7BC@1@D@1@@" =
// CHECK-DAG: @"??_8E@Test12@@7BB@1@D@1@@" =
// CHECK-DAG: @"??_8E@Test12@@7BD@1@@" =
// CHECK-DAG: @"??_8E@Test12@@7BC@1@@" =
// CHECK-DAG: @"??_8E@Test12@@7BB@1@@" =
// CHECK-DAG: @"??_8C@Test12@@7B01@@" =
// CHECK-DAG: @"??_8C@Test12@@7BB@1@@" =
// CHECK-DAG: @"??_8D@Test12@@7BC@1@@" =
// CHECK-DAG: @"??_8D@Test12@@7BB@1@@" =
// CHECK-DAG: @"??_8D@Test12@@7B@" =
// CHECK-DAG: @"??_8B@Test12@@7B@" =
}
namespace Test13 {
struct A { int a; };
struct B : virtual A { int b; };
struct C : virtual B { int c; };
struct D : virtual C { int d; };
struct E : D, C, B { int e; };
E e;
// CHECK-DAG: @"??_8E@Test13@@7BD@1@@" =
// CHECK-DAG: @"??_8E@Test13@@7BC@1@D@1@@" =
// CHECK-DAG: @"??_8E@Test13@@7BB@1@D@1@@" =
// CHECK-DAG: @"??_8E@Test13@@7BC@1@@" =
// CHECK-DAG: @"??_8E@Test13@@7BB@1@@" =
// CHECK-DAG: @"??_8D@Test13@@7B@" =
// CHECK-DAG: @"??_8D@Test13@@7BC@1@@" =
// CHECK-DAG: @"??_8D@Test13@@7BB@1@@" =
// CHECK-DAG: @"??_8C@Test13@@7B01@@" =
// CHECK-DAG: @"??_8C@Test13@@7BB@1@@" =
// CHECK-DAG: @"??_8B@Test13@@7B@" =
}
namespace Test14 {
struct A { int a; };
struct B : virtual A { int b; };
struct C : virtual B { int c; };
struct D : virtual C { int d; };
struct E : D, virtual C, virtual B { int e; };
E e;
// CHECK-DAG: @"??_8E@Test14@@7B@" =
// CHECK-DAG: @"??_8E@Test14@@7BC@1@@" =
// CHECK-DAG: @"??_8E@Test14@@7BB@1@@" =
// CHECK-DAG: @"??_8D@Test14@@7B@" =
// CHECK-DAG: @"??_8D@Test14@@7BC@1@@" =
// CHECK-DAG: @"??_8D@Test14@@7BB@1@@" =
// CHECK-DAG: @"??_8C@Test14@@7B01@@" =
// CHECK-DAG: @"??_8C@Test14@@7BB@1@@" =
// CHECK-DAG: @"??_8B@Test14@@7B@" =
}
namespace Test15 {
struct A { int a; };
struct B : virtual A { int b; };
struct C : virtual A { int c; };
struct D : virtual B { int d; };
struct E : D, C, B { int e; };
E e;
// CHECK-DAG: @"??_8E@Test15@@7BD@1@@" =
// CHECK-DAG: @"??_8E@Test15@@7BB@1@D@1@@" =
// CHECK-DAG: @"??_8E@Test15@@7BC@1@@" =
// CHECK-DAG: @"??_8E@Test15@@7BB@1@@" =
// CHECK-DAG: @"??_8C@Test15@@7B@" =
// CHECK-DAG: @"??_8D@Test15@@7B01@@" =
// CHECK-DAG: @"??_8D@Test15@@7BB@1@@" =
// CHECK-DAG: @"??_8B@Test15@@7B@" =
}
namespace Test16 {
struct A { int a; };
struct B : virtual A { int b; };
struct C : virtual B { int c; }; // ambig
struct D : virtual C { int d; };
struct E : virtual D { int e; }; // ambig
struct F : E, D, C, B { int f; }; // ambig
F f;
// CHECK-DAG: @"??_8F@Test16@@7BE@1@@" =
// CHECK-DAG: @"??_8F@Test16@@7BD@1@E@1@@" =
// CHECK-DAG: @"??_8F@Test16@@7BC@1@E@1@@" =
// CHECK-DAG: @"??_8F@Test16@@7BB@1@E@1@@" =
// CHECK-DAG: @"??_8F@Test16@@7BD@1@@" =
// CHECK-DAG: @"??_8F@Test16@@7BC@1@@" =
// CHECK-DAG: @"??_8F@Test16@@7BB@1@@" =
// CHECK-DAG: @"??_8E@Test16@@7B01@@" =
// CHECK-DAG: @"??_8E@Test16@@7BD@1@@" =
// CHECK-DAG: @"??_8E@Test16@@7BC@1@@" =
// CHECK-DAG: @"??_8E@Test16@@7BB@1@@" =
// CHECK-DAG: @"??_8D@Test16@@7B@" =
// CHECK-DAG: @"??_8D@Test16@@7BC@1@@" =
// CHECK-DAG: @"??_8D@Test16@@7BB@1@@" =
// CHECK-DAG: @"??_8C@Test16@@7B01@@" =
// CHECK-DAG: @"??_8C@Test16@@7BB@1@@" =
// CHECK-DAG: @"??_8B@Test16@@7B@" =
}
namespace Test17 {
// This test case has an interesting alternating pattern of using "vbtable of B"
// and "vbtable of C for C". This may be the key to the underlying algorithm.
struct A { int a; };
struct B : virtual A { int b; };
struct C : virtual B { int c; }; // ambig
struct D : virtual C { int d; };
struct E : virtual D { int e; }; // ambig
struct F : virtual E { int f; };
struct G : virtual F { int g; }; // ambig
struct H : virtual G { int h; };
struct I : virtual H { int i; }; // ambig
struct J : virtual I { int j; };
struct K : virtual J { int k; }; // ambig
K k;
// CHECK-DAG: @"??_8K@Test17@@7B01@@" =
// CHECK-DAG: @"??_8J@Test17@@7B@" =
// CHECK-DAG: @"??_8I@Test17@@7B01@@" =
// CHECK-DAG: @"??_8H@Test17@@7B@" =
// CHECK-DAG: @"??_8G@Test17@@7B01@@" =
// CHECK-DAG: @"??_8F@Test17@@7B@" =
// CHECK-DAG: @"??_8E@Test17@@7B01@@" =
// CHECK-DAG: @"??_8D@Test17@@7B@" =
// CHECK-DAG: @"??_8C@Test17@@7B01@@" =
// CHECK-DAG: @"??_8B@Test17@@7B@" =
}
namespace Test18 {
struct A { int a; };
struct B : virtual A { int b; };
struct C : B { int c; };
struct D : C, B { int d; };
struct E : D, C, B { int e; };
E e;
// CHECK-DAG: @"??_8E@Test18@@7BC@1@D@1@@" =
// CHECK-DAG: @"??_8E@Test18@@7BB@1@D@1@@" =
// CHECK-DAG: @"??_8E@Test18@@7BC@1@@" =
// CHECK-DAG: @"??_8E@Test18@@7BB@1@@" =
// CHECK-DAG: @"??_8B@Test18@@7B@" =
// CHECK-DAG: @"??_8C@Test18@@7B@" =
// CHECK-DAG: @"??_8D@Test18@@7BC@1@@" =
// CHECK-DAG: @"??_8D@Test18@@7BB@1@@" =
}
namespace Test19 {
struct A { int a; };
struct B : virtual A { int b; };
struct C : virtual B { int c; };
struct D : virtual C, virtual B { int d; };
struct E : virtual D, virtual C, virtual B { int e; };
E e;
// CHECK-DAG: @"??_8E@Test19@@7B01@@" =
// CHECK-DAG: @"??_8E@Test19@@7BD@1@@" =
// CHECK-DAG: @"??_8E@Test19@@7BC@1@@" =
// CHECK-DAG: @"??_8E@Test19@@7BB@1@@" =
// CHECK-DAG: @"??_8D@Test19@@7B@" =
// CHECK-DAG: @"??_8D@Test19@@7BC@1@@" =
// CHECK-DAG: @"??_8D@Test19@@7BB@1@@" =
// CHECK-DAG: @"??_8C@Test19@@7B01@@" =
// CHECK-DAG: @"??_8C@Test19@@7BB@1@@" =
// CHECK-DAG: @"??_8B@Test19@@7B@" =
}
namespace Test20 {
// E has no direct vbases, but it adds to C's vbtable anyway.
struct A { int a; };
struct B { int b; };
struct C : virtual A { int c; };
struct D : virtual B { int d; };
struct E : C, D { int e; };
E f;
// CHECK-DAG: @"??_8E@Test20@@7BC@1@@" = linkonce_odr unnamed_addr constant [3 x i32] [i32 0, i32 20, i32 24]
// CHECK-DAG: @"??_8E@Test20@@7BD@1@@" = linkonce_odr unnamed_addr constant [2 x i32] [i32 0, i32 16]
// CHECK-DAG: @"??_8D@Test20@@7B@" = linkonce_odr unnamed_addr constant [2 x i32] [i32 0, i32 8]
// CHECK-DAG: @"??_8C@Test20@@7B@" = linkonce_odr unnamed_addr constant [2 x i32] [i32 0, i32 8]
}
namespace Test21 {
struct A { int a; };
struct B : virtual A { int b; };
struct C : B { int c; };
struct D : B { int d; };
struct E : C, D { int e; };
struct F : virtual E { int f; };
struct G : E { int g; };
struct H : F, G { int h; };
H h;
// CHECK-DAG: @"??_8H@Test21@@7B@" =
// CHECK-DAG: @"??_8H@Test21@@7BC@1@F@1@@" =
// CHECK-DAG: @"??_8H@Test21@@7BD@1@F@1@@" =
// CHECK-DAG: @"??_8H@Test21@@7BC@1@G@1@@" =
// CHECK-DAG: @"??_8H@Test21@@7BD@1@G@1@@" =
// CHECK-DAG: @"??_8G@Test21@@7BC@1@@" =
// CHECK-DAG: @"??_8G@Test21@@7BD@1@@" =
// CHECK-DAG: @"??_8F@Test21@@7B@" =
// CHECK-DAG: @"??_8F@Test21@@7BC@1@@" =
// CHECK-DAG: @"??_8F@Test21@@7BD@1@@" =
// CHECK-DAG: @"??_8E@Test21@@7BC@1@@" =
// CHECK-DAG: @"??_8E@Test21@@7BD@1@@" =
// CHECK-DAG: @"??_8D@Test21@@7B@" =
// CHECK-DAG: @"??_8B@Test21@@7B@" =
// CHECK-DAG: @"??_8C@Test21@@7B@" =
}
namespace Test22 {
struct A { int a; };
struct B : virtual A { int b; };
struct C { int c; };
struct D : B, virtual C { int d; };
D d;
// CHECK-DAG: @"??_8D@Test22@@7B@" = linkonce_odr unnamed_addr constant [3 x i32] [i32 0, i32 12, i32 16]
// CHECK-DAG: @"??_8B@Test22@@7B@" = linkonce_odr unnamed_addr constant [2 x i32] [i32 0, i32 8]
}
namespace Test23 {
struct A { int a; };
struct B : virtual A { int b; };
struct C { int c; };
// Note the unusual order of bases. It forces C to be laid out before A.
struct D : virtual C, B { int d; };
D d;
// CHECK-DAG: @"??_8D@Test23@@7B@" = linkonce_odr unnamed_addr constant [3 x i32] [i32 0, i32 16, i32 12]
// CHECK-DAG: @"??_8B@Test23@@7B@" = linkonce_odr unnamed_addr constant [2 x i32] [i32 0, i32 8]
}
namespace Test24 {
struct A { int a; };
struct B : virtual A { int b; };
struct C { int c; };
struct D : virtual C, B {
virtual void f(); // Issues a vfptr, but the vbptr is still shared with B.
int d;
};
D d;
// CHECK-DAG: @"??_8D@Test24@@7B@" = linkonce_odr unnamed_addr constant [3 x i32] [i32 0, i32 16, i32 12]
// CHECK-DAG: @"??_8B@Test24@@7B@" = linkonce_odr unnamed_addr constant [2 x i32] [i32 0, i32 8]
}
namespace Test25 {
struct A { int a; };
struct B : virtual A {
virtual void f(); // Issues a vfptr.
int b;
};
struct C { int c; };
struct D : virtual C, B { int d; };
D d;
// CHECK-DAG: @"??_8D@Test25@@7B@" = linkonce_odr unnamed_addr constant [3 x i32] [i32 -4, i32 16, i32 12]
// CHECK-DAG: @"??_8B@Test25@@7B@" = linkonce_odr unnamed_addr constant [2 x i32] [i32 -4, i32 8]
}
namespace Test26 {
struct A { int a; };
struct B { int b; };
struct C { int c; };
struct D : virtual A { int d; };
struct E : virtual B {
virtual void foo(); // Issues a vfptr.
int e;
};
struct F: virtual C, D, E { int f; };
F f;
// F reuses the D's vbptr, even though D is laid out after E.
// CHECK-DAG: @"??_8F@Test26@@7BD@1@@" = linkonce_odr unnamed_addr constant [4 x i32] [i32 0, i32 16, i32 12, i32 20]
// CHECK-DAG: @"??_8F@Test26@@7BE@1@@" = linkonce_odr unnamed_addr constant [2 x i32] [i32 -4, i32 28]
}
namespace Test27 {
// PR17748
struct A {};
struct B : virtual A {};
struct C : virtual B {};
struct D : C, B {};
struct E : D {};
struct F : C, E {};
struct G : F, D, C, B {};
G x;
// CHECK-DAG: @"??_8G@Test27@@7BB@1@@" =
// CHECK-DAG: @"??_8G@Test27@@7BB@1@F@1@@" =
// CHECK-DAG: @"??_8G@Test27@@7BC@1@@" =
// CHECK-DAG: @"??_8G@Test27@@7BC@1@D@1@@" =
// CHECK-DAG: @"??_8G@Test27@@7BC@1@E@1@@" =
// CHECK-DAG: @"??_8G@Test27@@7BC@1@F@1@@" =
// CHECK-DAG: @"??_8G@Test27@@7BD@1@@" =
// CHECK-DAG: @"??_8G@Test27@@7BF@1@@" =
}
namespace Test28 {
// PR17748
struct A {};
struct B : virtual A {};
struct C : virtual B {};
struct D : C, B {};
struct E : C, D {};
struct F : virtual E, virtual D, virtual C {};
F x;
// CHECK-DAG: @"??_8F@Test28@@7B01@@" =
// CHECK-DAG: @"??_8F@Test28@@7BB@1@@" =
// CHECK-DAG: @"??_8F@Test28@@7BC@1@@" =
// CHECK-DAG: @"??_8F@Test28@@7BC@1@D@1@@" =
// CHECK-DAG: @"??_8F@Test28@@7BC@1@D@1@E@1@@" =
// CHECK-DAG: @"??_8F@Test28@@7BC@1@E@1@@" =
// CHECK-DAG: @"??_8F@Test28@@7BD@1@@" =
// CHECK-DAG: @"??_8F@Test28@@7BE@1@@" =
}
namespace Test29 {
struct A {};
struct B : virtual A {};
struct C : virtual B {};
struct D : C {};
D d;
// CHECK-DAG: @"??_8D@Test29@@7BB@1@@" = linkonce_odr unnamed_addr constant [2 x i32] zeroinitializer
}
namespace Test30 {
struct A {};
template <class> struct B : virtual A {
B() {}
};
extern template class B<int>;
template B<int>::B();
// CHECK-DAG: @"??_8?$B@H@Test30@@7B@" = linkonce_odr unnamed_addr constant [2 x i32] [i32 0, i32 4], comdat
}
|
Prevalence and Associated Factors of Physical Activity among Medical Students from the Western Balkans
The student population includes young adults who need nutrition and regular physical activity (PA) for mental, cognitive, and physical development. It is estimated that, globally, only 25–40% of the university student population is involved in regular PA. To date, no research has been conducted in the Western Balkans to address the PA of medical students. The aim of this study was to investigate the prevalence and factors influencing PA among medical students from the Western Balkans. A cross-sectional study included 2452 students from 14 medical faculties in five countries (Slovenia, Croatia, Bosnia and Herzegovina, North Macedonia and Serbia). There were significantly more students who engaged than those who did not engage in some type of regular (daily) PA. Gender, overweight or obesity, and household income are significantly associated with students’ PA. Students who are more often involved in regular daily PA and have higher daily PA levels are more likely to be males whose household income is above average. In order to improve the health of the student population, the public health authorities need to continuously investigate the PA of students and introduce appropriate activities to increase their level of PA.
Introduction
The World Health Organization (WHO) defines physical activity (PA) as any bodily movement caused by skeletal muscles that requires energy consumption . PA includes all movements even during leisure time, for transport to go to and from places, or as a part of a person's work . Both moderate-and vigorous-intensity PA improve health .
WHO and Centers for Disease Control and Prevention (CDC) global recommendations for PA for the population aged 18 to 64 years emphasize that consistency in the frequency, duration, intensity, type and total amount of daily (regular) PA are necessary to reduce the risk for mass non-communicable diseases (NCD) . That means: at least 150 to 300 min of moderate-intensity aerobic PA, or at least 75 to 150 min of vigorous-intensity aerobic PA, or an equivalent combination of moderate-intensity PA and vigorous-intensity PA throughout the week .
The student population includes young adults whose health habits, i.e., proper nutrition and regular PA are necessary for mental, cognitive, and physical development . When students start attending universities, they usually change their life habits . A high-energy diet high in salt and irregular meals, and a lack of PA can negatively affect student health and lead to overweight and obesity . It is estimated that globally 20 to 40% of the university student population is overweight . Overweight and obesity * EU member; ** EU candidate for membership; *** expressed as the total mortality rate for the 5 leading diseases; **** expressed as total physical activity; ***** expressed as the total mortality rate for the 10 leading diseases.
Physicians and medical students who have healthy life habits are more prone to feel confident in counseling their patients and patients are more likely to trust advice on health behaviors (e.g., diet, PA) given by physicians who lead a heathy lifestyle . Therefore, it is of great significance that medical schools increase the proportion of students maintaining and adopting regular PA habits in order to increase the quality and rates of future physical counseling delivered by physicians .
Graduate medical school programs should focus on health promotion in students, because this will probably lead to improved health behavior in students' patient populations .
Up to now, no research has examined PA among the medical students in five countries of the Western Balkans, namely the Republic of Croatia, Republic of Serbia, Republic of Slovenia, Republic of North Macedonia, and Bosnia and Herzegovina.
The objective of this study was to examine the prevalence and factors influencing the PA in medical students from the Western Balkan. The study was performed by convenience sampling of medical faculties.
Study Questionnaire
The data were collected through an online survey generated on an online platform (Google Form) accessible from any device. The survey was uploaded, and the link was forwarded to the students through student representatives from the home faculty via e-mail and social networks (Facebook), or by posting on faculty websites. The survey participation was voluntary and anonymous. The participants were able to withdraw their participation in the survey at any stage before the submission. When the participants completed the survey linked to the Google Form, each survey was sent to a database from where it could be downloaded as a Microsoft Excel sheet. The online survey was generated so that only the answers from the fully completed survey were registered in the database and were included in further analysis. The privacy of respondents was guaranteed by research method.
Variables
The study was carried out in two parts. The first part included baseline demographic characteristics: gender, faculty the students attend, year of study, body height, body weight, average household income, and type of settlement students lived in before enrolling the university. The second part examined the mean daily PA, alcohol consumption and the smoking status of the respondents.
When asked about the mean daily PA, the respondents checked one of five offered answers: 1. I do not engage in regular PA; 2. Up to 30 min a day; 3. Up to 1 h a day; 4. 1-2 h a day; 5. 3 or more hours a day. The responses: 1-2 h of PA a day and 3 or more hours a day, were categorized as "I engage in PA more than 1 h a day".
Based on the year of study, medical students were grouped into two categories: 1-3-year students and 4-6-year students.
The body mass index (BMI) was calculated from self-reported data on body height and body weight. The classification was done according to WHO recommendations; underweight: BMI < 18.50 kg/m 2 ; normal weight: 18.50-24.99 kg/m 2 ; overweight: 25.0-29.99 kg/m 2 ; and obesity: ≥30.0 kg/m 2 .
Household income data were gathered by checking one of the five offered answers: 1. Far below average; 2. Below average; 3. Average; 4. Above average; 5. Far above average. Far below average and below average were categorized as "below average household income", while above average and far above average were categorized as "above average household income".
In regard to the type of settlement students lived in before enrolling the university, the respondents chose one of the seven offered answers: 1. A village with up to 500 inhabitants; 2. A village from 500 to 3000 inhabitants; 3. A village with over 3000 inhabitants; 4. A town with up to 20,000 inhabitants; 5. A town from 20,000 to 100,000 inhabitants; 6. A town from 100,000 to 1 million inhabitants; 7. A town with over 1 million inhabitants. The responses: a village with up to 500 inhabitants, a village from 500 to 3000 inhabitants and a village with over 3000 inhabitants, were categorized as "rural settlements", while the responses: a town with up to 20,000 inhabitants, a town from 20,000 to 100,000 inhabitants, a town from 100,000 to 1 million inhabitants and a town with over 1 million inhabitants, were categorized as "urban settlements".
In order to examine alcohol consumption among the respondents, they were asked to check one of the five offered answers: 1. I do not drink alcohol; 2. Occasionally; 3. On weekends; 4. Several times per week; 5. Daily.
The students were also asked about their smoking habits and the number of cigarettes smoked per day, by choosing one of the six offered answers: 1. I am not a smoker; 2. I smoke occasionally; 3. Up to 5 cigarettes per day; 4. 5-10 cigarettes per day; 5. 11-20 cigarettes per day; 6. More than 20 cigarettes per day. Based on their responses, the students were grouped into "smokers": I smoke occasionally; up to 5 cigarettes per day; 5-10 cigarettes per day; 11-20 cigarettes per day; and more than 20 cigarettes per day, and "non-smokers": I am not a smoker.
Statistical Analysis
In statistical analysis, categorical variables were described by frequency distribution and percentages.
A chi-square test was applied to analyze the association between categorical variables, and Cramer's V was used as association measure.
In order to establish the impact of respondents' gender, year of study, BMI, alcohol consumption, household income, type of settlement, and smoking status on PA, a binary logistic regression analysis was used with the independent variables coded as follows: gender (0-female, 1-male), year of study (0-1-3, 1-4-6), overweight and obese (0-No, 1-Yes), alcohol consumption (0-I do not drink alcohol, 1-occasionally, on weekends, several times per week and daily), household income (0-below average and average, 1-above average), type of settlement (0-rural, 1-urban), and student smoking status (0-No, 1-Yes). The odds ratio (OR) values were adjusted.
In the binary logistic regression analysis, the dependent variable (student PA) was "Has regular daily PA" (0 = No, 1 = Yes).
Statistical analysis was done using SPSS Statistics for Windows version 24 (IBM Corporation, Armonk, NY, USA). p value of < 0.05 was considered statistically significant.
Ethical Aspects of the Research
The study was conducted in accordance with the guidelines of the Declaration of Helsinki, and the Ethics Commissions/Committees of the faculties that took part in the research gave the opinion that the approval of the commissions/committees was not required, since the research did not incorporate invasive methods and violate the privacy of respondents.
Results
The highest percentage of students was in Bosnia and Herzegovina (35.8%) ( Table 2). Among the students, there were more female respondents (82.2%), most presented among students from the Republic of Croatia (88.5%) and least among students from the Republic of North Macedonia (76.1%). The highest percentage of students were involved in regular PA (62.3%) ( Table 3). The daily level of PA differed significantly between different faculties (χ 2 = 131.882, p < 0.001, fi = 0.232). The students from the Faculty of Pharmacy of the University of Mostar, Bosnia and Herzegovina (77.3%), in relation to the students of other faculties, most often engaged in some kind of regular PA, while the highest percentage of students of the Faculty of Pharmacy and Biochemistry of the University of Zagreb, Republic of Croatia (50.9%) did not engage in regular PA. There was a significant difference in daily level of PA between male and female students only at the Faculty of Medicine Novi Sad of the University of Novi Sad, Republic of Serbia (χ 2 = 7.957, p = 0.047, fi = 0.147) ( Table 4). Their female students more often did not engage in regular PA compared to male students (40.2% vs. 28.4%), and male students more often engaged in PA more than 1 h per day in comparison to female students (35.1% vs. 20.3%). Significant difference in daily level of PA of students in the terms of year of study was determined at the Faculty of Medicine of the University of Ljubljana, Republic of Slovenia (χ 2 = 9.757, p = 0.021, fi = 0.212), where 4-6-year students in higher percentage did not engage in regular PA (52.9%) and had PA up to 30 min a day (21.2%) compared to 1-3-year students (43.0% and 13.2%) and the Faculty of Medicine of the University of Zenica, Bosnia and Herzegovina (χ 2 = 10.349, p = 0.016, fi = 0.225), where 1-3-year students more often did not engage in regular PA (31.5%) compared to 4-6-year students (16.5%) ( Table 5).
In comparison with male students, female students more often did not engage in regular PA (39.1% vs. 31.4%) ( Table 6). Male students more frequently did have PA up to 30 min (21.1% vs. 19.5%) and more than 1 h a day (27.7% vs. 20.6%) compared to female students, while female students more often had PA up to 1 h per day (20.8%) than male students (19.9%). The difference was significant (χ 2 = 14.464, p = 0.002, fi = 0.077). Students whose household income was above average significantlly more often engaged in regular PA and had higher level of PA when compared to students whose household income was average and below average (χ 2 = 19.686, p = 0.003, fi = 0.063) ( Table 6).
The model of binary logistic regression analysis showed that gender, presence of overweight or obesity and household income were significantly associated with students PA ( Table 7). The odds of having regular daily PA for male students were 1.482 times higher than a female students (95% CI:1.178-1.865; p = 0.001). The odds of having regular daily PA for overweight or obese students were 0.732 times lower than a underweight and normal weight students (95% CI:0.578-0.928; p = 0.010). The odds of having regular daily PA for students whose household income was above average were 1.505 times higher than students whose household income was below average and average (95% CI:1.247-1.816; p < 0.001). Year of study, alchohol consumption, type of settlement and student smoking status were not significant predictors of students' PA status.
Discussion
Our study shows that the students of the Faculty of Pharmacy of the University of Mostar, Bosnia and Herzegovina, had the highest percentage of engagement in some of regular PA, while the students of the Faculty of Pharmacy and Biochemistry of the University of Zagreb, Republic of Croatia, most often did not engage in regular PA, comparing to the students of other faculties. Likus et al. in a study conducted in Poland indicate that most medical students do not engage in any form of PA, stating the lack of time due to faculty obligations to be the main excuse to engage in PA. According to results of the study by Dąbrowska-Galas et al. , which was also conducted in Poland, more than 80% of medical students engage in regular PA. The same study shows that university schedule, availability of sports centers and increased knowledge of health benefits have been associated with regular PA . Another study conducted in Poland which was done by Ilow et al. indicates low level of PA of pharmacy students. The authors point out that the most frequent barriers to PA are time limitation due to a busy study schedule . Also, Ilow et al. emphasize that PA should be promoted among students because of its positive influence on body weight and blood pressure. Martinović et al. in a study conducted among biomedical students from Split (Republic of Croatia) show that more than a half of biomedical students has some kind of regular PA. The same study indicates that higher level of PA of biomedical students is associated with higher knowledge and positive opinions towards healthy lifestyle . The authors point out that one of the major reasons for PA in a past few decades is predominantly the advancement in aesthetics and physical appearance as a way to achieve image of the perfect male and female bodies . Martinović et al. state that this could potentially be perceived as a risk, because the main reason for exercises should be maintaining good health, and that the influence of media and social networks could contribute to unreal body image aspirations.
The results of our research show that, only at the Faculty of Medicine Novi Sad of the University of Novi Sad, Republic of Serbia, there existed significant difference in daily level of PA between male and female medical students, where female medical students more frequently did not engage in regular PA in comparison to male medical students, while male medical students more frequently engaged in PA more than 1 h a day compared to female medical students. Having taken into consideration all medical students, regardless of the attended faculty, the obtained results of our research also indicate that female students compared to male students more frequently did not engage in regular PA. Male medical students more often did perform PA up to 30 min and more then 1 h per day in comparison with female medical students, while the female medical students more frequently had PA up to 1 h a day compared to male students. A study by Bin Abdulrahman et al. conducted at medical colleges in Saudi Arabia to investigate medical students' lifestyle habits, including PA shows that the greatest number of medical students exhibited healthy lifestyles to some extent. The same study indicates that these health-promoting behaviors differed by medical students' gender, especially when speaking of PA and eating styles . According to the results of the study conducted in Saudi Arabia, female medical students more often do not have any kind of PA compared to male medical students . The study by Bin Abdulrahman et al. also shows that male medical students engage in exercise more often than female medical students. The authors state that diminished levels of PA are in connection with an increase in the prevalence of diet-related non-communicable diseases, an overweight condition, and obesity among the young people . The two main barriers for PA identified by medical students are lack of time and stress . Jaremkówet al. concucted a study among medical and dentistry students in Poland and indicate that, similar to our results, male students spent more time on both PA/exercise significantly more often than female students. In a study conducted among health science students in Spain, Romero-Blanco et al. evaluated the level of PA of male and female students before and during the coronavirus lockdown, and showed that female students have a significant increase levels of PA in relation to male students. Romero-Blanco et al. hold the opinion that the major reason for this phenomenon is the female students' stronger motivation to reduce body weight gained during the coronavirus lockdown. The authors point out that perhaps male and female students may have different motivations and that the environmental factors influence one gender more strongly . The results of the studies on motives for PA by gender indicate that some variables that motivated male, but not female students, are factors related to the environment (e.g., competition or social recognition), while the main motivation for female students is weight control . Similar to the results of our research, the research by Stanford et al. performed in the USA shows that male attending physicians, resident and fellow physicians and medical students are more likely to have regular PA than female students. However, a study by Blake et al. , conducted in United Kingdom (UK) on medical and nursing students in Canada, investigates predictors of PA level and provides evidence that gender was not a positive predictor of regular PA.
The obtained results of our research show significant difference in medical students' daily level of PA regarding the year of study at the Faculty of Medicine of the University of Ljubljana, Republic of Slovenia, and the Faculty of Medicine of the University of Zenica, Bosnia and Herzegovina. At the Faculty of Medicine of the University of Ljubljana, Republic of Slovenia, there 4-6-year students in much higher percentage did not engage in regular PA and had PA up to 30 min a day in comparison to 1-3-year students. As for the Faculty of Medicine of the University of Zenica, Bosnia and Herzegovina, 1-3-year students more frequently did not engage in regular PA than 4-6-year students. Considering whole sample, regardless of attended faculty, the result of our research indicates that no significant difference was found in daily level of PA of medical students of different years of study. The results of a study by Romero-Blanco et al. conducted in Spain among students of health sciences show that students in higher years of study have a lower level of PA than students in younger years of study. The authors emphasize that the main reason is the increasing level of obligations during the years of study . A study done by Luciano et al. among medical students in Italy indicates that there is no significant difference in the level of PA between students of younger and higher years of study. Luciano et al. point out that improving PA and sleep as well as reducing sedentary lifestyle, would add benefit for the health of many students of medicine. The authors emphasize that medical school programs provide limited education on sleep and PA at the moment . This is mainly because of the lack of qualified staff and dedicated time, and regarding such education as low priority .
Our results show that medical students whose household income was above average were the ones who significantly more often engaged in regular PA. They also had higher level of PA in relation to medical students whose household income was average and below average. The reason for this can potentially be the fact that the universities of the Western Balkans do not have programmes that would enable free sports centers for all students. Medical students are obliged to finance sports activities on their own, so those with higher household incomes have more opportunities to exercise in places such as gym, pool etc. Contrary to our results, the results of a study by Rejali et al. conducted among medical and public health students in Iran indicate that students of lower socioeconomic status have a higher level of PA than students of higher socioeconomic status. The authors point out that organizing PA education programs, trainings and providing proving educational content in university health courses in order to encourage students for PA . A study by the Awadalla et al. conducted in Saudi Arabia shows that being a student in the college of medicine is associated with a high risk of physical inactivity, but, contrary to the results of our study, family income is not a significant predictor of physical inactivity. The obtained result of our research shows the odds of having regular daily PA were 1.505 times higher for medical students whose household income was above average in comparison to medical students whose household income was below average and average. As for socioeconomic standard and family income, there is a controversy about their influence on PA level . While some studies reported that sedentary behavior and low levels of PA are associated with low socioeconomic status , other studies found out that high physical inactivity is associated with high socioeconomic status .
Similar to our research, the research by Zeńczak-Praga et al. , conducted on medical and physiotherapy students from Germany and Spain, shows that there is no significant association between the level of PA and BMI among students. Bergier et al. in a study conducted among students including medical students from Ukraine and from the Visegrad countries (Hungary, Slovakia, Czech Republic and Poland), indicate that underweight and normal weight students from Visegrad countries have significantly higher amount of PA compared to Ukrainian students. Overweight and obese students have lower levels of PA than underweight and normal weight ones, but no significant difference is found between Visegrad and Ukrainian students . The authors describe the results with advantageous economic situation of the students in the Visegrad countries that provides more free time that can be spent in increased PA . Also, possible higher awareness of the PA role in a healthy lifestyle is proposed . Pavičić-Žeželj et al. in a study conducted among students of the Faculty of Medicine of the University of Rijeka (Republic of Croatia) point out that students with normal BMI are usually more physically active and that the desire for maintaining good health could contribute to a higher motivation to maintain a healthy lifestyle. A study by Catovic and Halilovic conducted among medical students from the Faculty of Medicine of University of Sarajevo (Bosnia and Herzegovina) shows that there is a positive association between obesity and the average time spent in vigorous PA. Catovic and Halilovic highlight that it is important to raise awareness about higher energy intake compared to energy consumption that may lead to the increase in body weight, and that PA is necessary to maintain a normal body mass and sustain good health.
Contrary to our results, in a study conducted among health science students in Spain, Romero-Blanco et al. , indicate a positive association of different alcohol consumption and the level of PA. Students who consume alcohol to a greater extent have a higher level of PA . In a study conducted on medical students in Romania, Nasui et al. show the differences between drinkers regarding the level of PA. The result of a study conducted in Romania indicates that medical students who have higher levels of PA are more prone to drinking alcohol in higher amounts . The autors point out that regular PA is one of those lifestyle factors that may help individuals deal with stress . Regular PA may function as an alternative to drinking .
According to the results obtained by Trivedi et al. , examined differences in obesityrelated behaviors across rural-urban adult populations (aged 20 years or more) in the USA, in comparison to their urban counterparts, rural residents are more likely to be physically inactive, which is not in agreement with the results of our study, stating that there was no significant difference in daily level of PA of medical students of different type of dwelling settlement. The same study also shows that rural residents are more likely to report no PA at all . The authors emphasize that when compared to their urban counterparts, rural residents have a higher representation of individuals less-educated about the significance of regular PA as modifiable risk factor for obesity among high-risk populations such as adult population in rural America . It is highlight that techniques that entitle rural settlements to increase the availability of recreational resources and quality food markets need to be identified and disseminated more .
A study by Mansouri et al. conducted among Iranian university students indicates a significant inverse association between PA and smoking, which is inconsistent with the results of our study that show no significant difference between the level of PA and the medical students' smoking status. The authors state that it appears that PA alleviates the psychological distress associated with cigarettes smoking . Furthermore, PA fills the students' leisure time that may otherwise be spent on smoking . The results obtained in a study by Tien Nam et al. conducted in Vietnam on health science students indicate that a vigorous level of PA is associated with smoking. There may exist an explanation that in study which is conducted in Vietnam, health science students who are physically active are more prone to interact socially (students communicate with close friends who are smokers) and, as a result of that, smoke more .
The result of our research shows that the odds of having regular daily PA were 0.732 times lower for overweight or obese medical students compared to underweight and normal weight medical students. Contrary to our result, a study conducted by Gallo et al. in Australia indicates that biomedical students with higher BMI are more likely to have increasing vigorous PA. Gallo et al. state that, as expected, in a study conducted in Australia higher levels of vigorous PA of biomedical students are associated with modest reductions in percentage of body fat and blood glucose levels, confirming the significance of high-intensity exercise in the maintenance of metabolic health. The research by Medagama et al. conducted among medical students in Sri Lanka shows that nutritional status do not represent a significant predictor of the student'level of PA. However, a study by Medagama et al. indicates that overweight students are engaged in PA in a higher percentage than normal weight students. Similar to our research, Blake et al. in their study conducted among medical and nursing students in Canada indicate that the year of study does not represent a significant predictor of the their PA status. However, contrary to our results, the study by Medagama et al. conducted on medical students in Sri Lanka, shows physical inactivity to be significantly associated with the year of study. The same study indicates that medical students in higher years of study are more likely to have PA compared to medical students at lower years of study . The authors point out that greater academic pressure during the lower (pre-clinical) years of study than higher (clinical) may represent possible reason for these results .
Nasui et al. in their study conducted on Romanian university medical students point out that there is a positive association of consuming alcohol in larger quantities and higher levels of PA, which is not in accordance with the result of our study that shows that alchohol consumption was not significant predictor of the PA status of medical students. The results of a study conducted by Nasui et al. indicate that it seems to be probably that alcohol consumption plays a remarkable part in the social life of Romanian university medical students. The same study also shows that there is an increase in alcohol consumption during the academic years of Romanian medical students . The authors state that regular PA may help individuals cope with stress during studies, but also other unhealthy habits, such as drinking or smoking can do the same .
The results of research by Trivedi et al. conducted in the USA indicate that it is more likely for adults living in urban areas to meet sufficient level of PA compared to the ones coming from rural areas, which is not consistent with our results that did not show that dwelling settlement was a significant predictor of the level of PA of medical students. The reason for that is explained by higher percentage of individuals less-educated about the importance of PA among rural residents in comparison to urban residents .
According to Mansouri et al. study conducted in Iran, PA is significantly associated with smoking. Iranian university students who are smokers are more likely to be physically inactive compared to the non-smoking students . However, in a study conducted among medical students in Saudi Arabia, Torchyan et al. indicate that highly physically active students are more likely to become smokers. The authors point out that students, owing to sports, during sports activities have more social interaction with their peers, which can be in connection with peer presure to start smoking . The results of the study by Mansouri et al. and study by Torchyan et al. do not correspond with the results of our research that show that there was not any significant medical students' smoking status influence on their level of PA.
A study by Kim et al. conducted among health college students in South Korea, and the study by Kosendiak et al. conducted among medical students in Poland indicate that there is a significant decrease in PA during COVID-19 pandemic and after pandemic measures have been reduced in relation to time before the beginning of the pandemic. Our study was conducted before the pandemic and future repeated research on the Western Balkan medical student population could have possible benefit because we would be able to establish if changes in PA levels were made due to changes in lifestyle of student population in past two years. This study also gives a basis for future research that can be conducted in order to determine the impact of PA promotion programs or university education between different Western Balkan countries on the prevalence of PA in the student population.
The importance of our research is that a broad study was conducted, for the first time in the Western Balkans, determining factors that affect the level of PA in the medical student population in this culturally specific region. Medical students, as future health workers, are key subjects in health promotion; they can take essential, necessary and continuous public health actions with the aim to improve life habits in the health studies student population. The contribution of this study implies application of its results to promote (national or international) public health activities specifically for future health workers, as well as for the overall student population.
Our study has several limitations. The research was conducted in the form of a crosssectional study, which is a snapshot of the current situation. We were unable to observe changes over time and, consequently, the conclusions about the cause-effect relationship could not be made . The online, anonymously filled-in self-administered questionnaire was used as an instrument for data collection, which is another limitation of our research. Despite emphasizing the anonymity of the survey and the confidentiality of the research results, respondents are frequently dishonest in giving answers , therefore the reliability of the answers cannot be established. Finally, a limitation of this study is the fact that it was conducted among students of medical faculties selected using convenience sampling method. Since the faculties were not randomly selected, the obtained results cannot be generalized to all students of medical faculties in the Western Balkans. For us, it was not possible to give precise numbers (response rate) of students from each faculty at the period of study performing, but the percentage of included students of all individual faculties definitly surpasses a representative 10% .
Conclusions
There are significantly more medical students who engaged than those who do not engage in some type of regular (daily) PA. Gender, overweight or obesity, and household income are significantly associated with medical students' PA. Medical students who are more often involved in regular daily PA and have higher daily PA levels are more likely to be males whose household income is above average.
In order to improve the health of the student population, the educational institutions such as faculties, and public health authorities need to continuously investigate the PA of students, and introduce appropriate activities to increase their level of PA.
Institutional Review Board Statement:
The study was conducted in accordance with the guidelines of the Declaration of Helsinki, and the Ethics Commissions/Committees of the faculties that took part in the research provided an oral standpoint that there was no approval of the committees/commissions required, since the research did not incorporate invasive methods and violate the privacy of respondents.
Informed Consent Statement: Data were collected by applying an online survey, and all participants involved in survey gave their consent by submitting their answers. In the Google forms database only fully completed surveys were taken into account, i.e., registered. The privacy of respondents was guaranteed by research method.
Data Availability Statement:
The data presented in this study are available on reasonable request from the corresponding author.
Conflicts of Interest:
The author M.I. is a MDPI employee, however he does not work for the International Journal of Environmental Research and Public Health at the time of submission and publication. |
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Scanner;
public class Main {
public static void main(String[] Args) {
new Main().solve();
}
Map<Integer, ArrayList<Integer>> map = new HashMap<Integer, ArrayList<Integer>>();
void solve() {
int n = si();
for (int i = 1; i <= n; i++) {
map.put(i, new ArrayList<Integer>());
}
for (int i = 1; i <= n; i++) {
int a = si(), b = si();
map.get(a).add(b);
map.get(b).add(a);
}
// find cycle
for (int i = 1; i <= n; i++) {
int[] v = new int[n + 1];
is = false;
dfsCycle(i, i, i, v);
}
// find distance
int[] visited = new int[n + 1], distance = new int[n + 1];
bfsDistance(cycle.get(0), visited, distance);
for (int i = 1; i <= n; i++)
System.out.print(distance[i] + " ");
}
boolean found = false;
void bfsDistance(int start, int[] v, int[] d) {
Queue<Integer> q = new LinkedList<Integer>();
q.add(start);
while (!q.isEmpty()) {
int now = q.remove();
v[now] = 1;
for (int to : map.get(now)) {
if (v[to] == 0) {
d[to] = d[now] + 1;
if (cycle.contains(to)){
d[to] = 0;
}
q.add(to);
}
}
}
}
ArrayList<Integer> cycle = new ArrayList<Integer>();
boolean is = false;
void dfsCycle(int start, int from, int now, int[] v) {
v[now] = 1;
for (int to : map.get(now)) {
if (to == start && start != from) {
cycle.add(start);
is = true;
return;
} else if (v[to] == 0) {
if (is)
return;
else
dfsCycle(start, now, to, v);
}
}
}
static int toi(Object s) {
return Integer.parseInt(s.toString());
}
// ----------------------- Library ------------------------
static int[] dx_ = { 0, 0, 1, -1 };
static int[] dy_ = { 1, -1, 0, 0 };
static int[] dx = { 1, 0, -1, 1, -1, 1, 0, -1 }, dy = { 1, 1, 1, 0, 0, -1,
-1, -1 };
static Scanner scan = new Scanner(System.in);
static int INF = 2147483647;
// finds GCD of a and b using Euclidian algorithm
public int GCD(int a, int b) {
if (b == 0)
return a;
return GCD(b, a % b);
}
static List<String> toList(String[] a) {
return Arrays.asList(a);
}
static String[] toArray(List<String> a) {
String[] o = new String[a.size()];
a.toArray(o);
return o;
}
static int[] pair(int... a) {
return a;
}
static int si() {
return scan.nextInt();
}
static String ss() {
return scan.nextLine();
}
static int[] sai(int n) {
int[] a = new int[n];
for (int i = 0; i < a.length; i++)
a[i] = si();
return a;
}
static int[] sai_(int n) {
int[] a = new int[n + 1];
for (int i = 1; i <= n; i++)
a[i] = si();
return a;
}
static String[] sas(int n) {
String[] a = new String[n];
for (int i = 0; i < a.length; i++)
a[i] = ss();
return a;
}
static Object[][] _sm1(int r, int c) {
Object[][] a = new Object[r][c];
for (int i = 0; i < r; i++)
for (int j = 0; j < c; j++)
a[i][j] = scan.next();
return a;
}
static Object[][] _sm2(int r) {
Object[][] a = new Object[r][3];
for (int i = 0; i < r; i++)
a[i] = new Object[] { ss(), ss(), ss() };
return a;
}
}
|
/**
* FSCK tool to recover failed directory mutations guarded by GCS Connector Cooperative Locking
* feature.
*
* <p>Usage:
*
* <pre>{@code
* hadoop jar /usr/lib/hadoop/lib/gcs-connector.jar com.google.cloud.hadoop.fs.gcs.CoopLockFsck \
* --{check,rollBack,rollForward} gs://<bucket_name> [all|<operation-id>]
* }</pre>
*/
public class CoopLockFsck extends Configured implements Tool {
static final String COMMAND_CHECK = "--check";
static final String COMMAND_ROLL_BACK = "--rollBack";
static final String COMMAND_ROLL_FORWARD = "--rollForward";
static final String ARGUMENT_ALL_OPERATIONS = "all";
private static final ImmutableSet<String> FSCK_COMMANDS =
ImmutableSet.of(COMMAND_CHECK, COMMAND_ROLL_FORWARD, COMMAND_ROLL_BACK);
public static void main(String[] args) throws Exception {
checkArgument(args.length > 0, "No arguments are specified");
if (args.length == 1 && "--help".equals(args[0])) {
System.out.println(
"FSCK tool to recover failed directory mutations guarded by"
+ " GCS Connector Cooperative Locking feature."
+ "\n\nUsage:"
+ String.format(
"\n\thadoop jar /usr/lib/hadoop/lib/gcs-connector.jar %s"
+ " --{check,rollBack,rollForward} gs://<bucket_name> [all|<operation_id>]",
CoopLockFsck.class.getCanonicalName())
+ "\n\nSupported commands:"
+ String.format("\n\t%s - print out operations status in the bucket", COMMAND_CHECK)
+ String.format(
"\n\t%s - recover directory operations in the bucket by rolling them forward",
COMMAND_ROLL_FORWARD)
+ String.format(
"\n\t%s - recover directory operations in the bucket by rolling them back",
COMMAND_ROLL_BACK));
return;
}
// Let ToolRunner handle generic command-line options
int result = ToolRunner.run(new Configuration(), new CoopLockFsck(), args);
System.exit(result);
}
@Override
public int run(String[] args) throws Exception {
String command = args[0];
checkArgument(FSCK_COMMANDS.contains(command), "Unknown %s command, should be %s", command);
int expectedArgsNumber = COMMAND_CHECK.equals(command) ? 2 : 3;
checkArgument(
args.length == expectedArgsNumber,
"%s arguments should be specified for %s command, but were: %s",
expectedArgsNumber,
command,
Arrays.asList(args));
String bucket = args[1];
checkArgument(
bucket.startsWith(GoogleHadoopFileSystem.SCHEME + "://"),
"bucket parameter should have 'gs://' scheme");
String operationId = COMMAND_CHECK.equals(command) ? null : args[2];
return new CoopLockFsckRunner(getConf(), URI.create(bucket), command, operationId).run();
}
} |
"""
Copyright (c) 2020, The Decred developers
See LICENSE for details.
Based on dcrd MsgPing.
"""
from decred.util.encode import ByteArray
CmdPong = "pong"
NonceLength = 8
class MsgPong:
"""
MsgPong implements the Message API and represents a Decred pong message
which is used primarily to confirm that a connection is still valid in
response to a Decred ping message (MsgPing).
This message was not added until protocol versions AFTER BIP0031Version.
"""
def __init__(self, nonce):
"""
Args:
nonce (int): Unique value associated with associated with a specific
ping message.
"""
self.nonce = nonce
@staticmethod
def btcDecode(b, pver):
"""
btcDecode decodes b using the Decred protocol encoding into the receiver. This
is part of the Message API.
Args:
b (ByteArray): The encoded MsgPong.
pver (int): The protocol version. Unused.
Returns:
MsgPong. The MsgPong.
"""
return MsgPong(nonce=b.unLittle().int())
def btcEncode(self, pver):
"""
btcEncode encodes the receiver using the Decred protocol encoding.
This is part of the Message API.
Args:
pver (int): The protocol version. Unused.
Returns:
ByteArray: The encoded MsgPong
"""
return ByteArray(self.nonce, length=8).littleEndian()
@staticmethod
def command():
"""
command returns the protocol command string for the message. This is
part of the Message API.
Returns:
str: The command string.
"""
return CmdPong
@staticmethod
def maxPayloadLength(pver):
"""
maxPayloadLength returns the maximum length the payload can be for the
receiver. This is part of the Message API.
Args:
pver (int): The protocol version. Unused.
Returns:
int: The maximum payload length.
"""
return NonceLength
|
def prefixed(self, prefix):
if not prefix:
return self.clone()
else:
return self.using(join(prefix, self)) |
def update_power_status(self, host, status, flag_on):
query_elem = self.get_full_query({"cm_id": host})
hosts_status = self.get_status(host)
result = False
if hosts_status and hosts_status[0]["status"] == "deployed":
host_status = hosts_status[0]
trans_status = {}
flag_update = False
if flag_on and host_status["transient"]["action"] == "on...":
result = True
if status == "ON":
trans_status["transient.action"] = "on"
trans_status["transient.status_1"] = "ON"
flag_update = True
elif not flag_on and host_status["transient"]["action"] == "off...":
result = True
if status == "OFF":
trans_status["transient.action"] = "off"
trans_status["transient.status_1"] = "OFF"
flag_update = True
if flag_update:
update_result = self.db_client.atom_update(
query_elem, {"$set": trans_status})
result = update_result["result"]
return result |
def ocr_core(self):
text = pytesseract.image_to_string(PIL.Image.open(fileName),lang=self.lang)
self.imagetext.setText(text) |
// Returns a description of specified virtual tapes in the virtual tape shelf
// (VTS). This operation is only supported in the tape gateway type. If a specific
// TapeARN is not specified, Storage Gateway returns a description of all virtual
// tapes found in the VTS associated with your account.
func (c *Client) DescribeTapeArchives(ctx context.Context, params *DescribeTapeArchivesInput, optFns ...func(*Options)) (*DescribeTapeArchivesOutput, error) {
if params == nil {
params = &DescribeTapeArchivesInput{}
}
result, metadata, err := c.invokeOperation(ctx, "DescribeTapeArchives", params, optFns, c.addOperationDescribeTapeArchivesMiddlewares)
if err != nil {
return nil, err
}
out := result.(*DescribeTapeArchivesOutput)
out.ResultMetadata = metadata
return out, nil
} |
<gh_stars>0
import * as ldcp from 'jsonld-context-parser'
import * as fs from 'fs'
import axios from 'axios'
import * as md5 from 'md5'
export const contextParser = new ldcp.ContextParser()
export const NGSI_LD_CORE_CONTEXT_URL = "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context-v1.3.jsonld"
export function appendCoreContext(nonNormalizedContext: any): Array<any> {
let result = nonNormalizedContext
if (result == null || result == undefined) {
result = []
}
else if (!(result instanceof Array)) {
result = [result]
}
if (!result.includes(NGSI_LD_CORE_CONTEXT_URL)) {
result.push(NGSI_LD_CORE_CONTEXT_URL)
}
return result
}
export function compactObject(obj: any, normalizedContext: ldcp.JsonLdContextNormalized): any {
if (obj === null) {
return null
}
else if (typeof (obj) === 'string') {
return normalizedContext.compactIri(obj, true)
}
else if (typeof (obj) === 'number') {
return obj
}
else if (typeof (obj) === 'boolean') {
return obj
}
else if (obj instanceof Array) {
const result = []
for (const item of obj) {
result.push(compactObject(item, normalizedContext))
}
return result
}
//############## BEGIN If member is an object ##############
else if (typeof obj === 'object') {
// NOTE: Cloning the object is necessary!
let clone = JSON.parse(JSON.stringify(obj))
let result: any = {}
//############ BEGIN Compact values ###########
for (const key in clone) {
clone[key] = compactObject(clone[key], normalizedContext)
}
//############ END Compact values ###########
//############ BEGIN Compact keys ###########
for (const key in clone) {
const newKey = normalizedContext.compactIri(key, true)
result[newKey] = clone[key]
}
//############ END Compact keys ###########
return result
}
//############## END If member is an object ##############
}
export function expandObject(obj: any, normalizedContext: ldcp.JsonLdContextNormalized): any {
if (obj === null) {
return null
}
else if (typeof (obj) === 'string') {
return normalizedContext.expandTerm(obj, true)
}
else if (typeof (obj) === 'number') {
return obj
}
else if (typeof (obj) === 'boolean') {
return obj
}
else if (obj instanceof Array) {
const result = []
for (let item of obj) {
result.push(expandObject(item, normalizedContext))
}
return result
}
//############## BEGIN If member is an object ##############
else if (typeof obj === 'object') {
const result: any = {}
//############ BEGIN Iterate over all keys ###########
for (const key in obj) {
let newKey = normalizedContext.expandTerm(key, true)
if (newKey == null) {
// TODO: 1 What to do in this case?
newKey = key
}
result[newKey] = obj[key]
}
//############ END Iterate over all keys ###########
//############ BEGIN Iterate over all keys ###########
for (const key in result) {
// ATTENTION: Excluding Property values from expansion IS correct, but hard-coding it this way probably
// isn't the best solution. We should try to implement this according to the JSON-LD + NGSI-LD specifications
// and take into account rules defined in context definitions.
// TODO: 2 Properly decide what is expanded and what not.
if (key != "value" && key != "https://uri.etsi.org/ngsi-ld/hasValue") {
result[key] = expandObject(result[key], normalizedContext)
}
}
//############ END Iterate over all keys ###########
return result
}
//############## END If member is an object ##############
}
export async function getNormalizedContext(nonNormalizedContext: any): Promise<ldcp.JsonLdContextNormalized> {
const nnc = await httpFetchContexts(nonNormalizedContext)
return await contextParser.parse(nnc)
}
export async function httpFetchContexts(context: any): Promise<Array<any>> {
const contextCacheDir = "contextCache/"
let result: Array<any> = []
if (!(context instanceof Array)) {
context = [context]
}
if (!fs.existsSync(contextCacheDir)) {
fs.mkdirSync(contextCacheDir);
}
for (let entry of context) {
if (typeof (entry) == "string" && ((entry.startsWith("https://") || entry.startsWith("http://")))) {
const url = entry
const fileName = contextCacheDir + md5(url) + ".jsonld"
if (!fs.existsSync(fileName)) {
const response = await axios.get(url).catch((e) => {
console.log("ERROR when trying to fetch context document")
console.log(e)
})
if (response != undefined) {
fs.writeFileSync(fileName, JSON.stringify(response.data))
}
}
const contextItem = JSON.parse(fs.readFileSync(fileName).toString())
result.push(contextItem)
}
else if (typeof (entry) == "object") {
result.push(entry)
}
else {
// throw error
console.log("Invalid context entry: " + entry)
}
}
return result
}
|
def fit_cluster_23():
data = classy.data.load()
X23 = data.loc[data.cluster == 23, ["z0", "z3"]]
gmm_23 = GaussianMixture(
n_components=2,
random_state=0,
means_init=[[-0.6, -0.4], [-0.3, -0.3]],
max_iter=1000,
).fit(X23)
CLASSES = ["L", "M"] if gmm_23.means_[0][0] < gmm_23.means_[1][0] else ["M", "L"]
return gmm_23, CLASSES |
import { BigNumber } from 'bignumber.js'
import { maxUint256 } from 'blockchain/calls/erc20'
import { DEFAULT_PROXY_ADDRESS } from 'helpers/mocks/vaults.mock'
import { openVaultStory } from 'helpers/stories/OpenVaultStory'
import { one } from 'helpers/zero'
import { OpenVaultView } from '../OpenVaultView'
const proxyAddress = DEFAULT_PROXY_ADDRESS
export const VaultWillBeUnderCollateralized = openVaultStory({
title: 'User is generating too much debt for the amount of collateral to be deposited',
proxyAddress,
})({
depositAmount: new BigNumber('10'),
generateAmount: new BigNumber('4000'),
})
export const VaultWillBeUnderCollateralizedNextPrice = openVaultStory({
title:
'User is generating too much debt for the amount of collateral to be deposited at next price. User can do this action but will be subject to liquidation when price updates',
proxyAddress,
priceInfo: {
collateralChangePercentage: new BigNumber('-0.7'),
},
})({
depositAmount: new BigNumber('30'),
generateAmount: new BigNumber('4000'),
})
export const DepositAmountExceedsCollateralBalance = openVaultStory({
title:
'Amount user is depositing exceeds the balance of collateral they have outstanding in their wallet',
proxyAddress,
balanceInfo: { collateralBalance: new BigNumber('999') },
})({
depositAmount: new BigNumber('1000'),
})
export const DepositingAllEthBalance = openVaultStory({
title:
'Error occurs when a user opening an VLX vault tries to deposit all their VLX into the vault',
balanceInfo: {
collateralBalance: new BigNumber('100'),
},
ilk: 'VLX-A',
proxyAddress,
})({
depositAmount: new BigNumber('100'),
})
export const GenerateAmountExceedsUsdvYieldFromDepositingCollateral = openVaultStory({
title:
'Amount of usdv user is attempting to generate exceeds the maximum amount of USDV that can be generated given the liquidation ratio of 150% in this case',
proxyAddress,
priceInfo: { collateralPrice: new BigNumber('2000') },
})({
depositAmount: new BigNumber('150'),
generateAmount: new BigNumber('200000.01'),
})
export const GenerateAmountExceedsUsdvYieldFromDepositingCollateralAtNextPrice = openVaultStory({
title:
'Amount of usdv user is attempting to generate exceeds the maximum amount of USDV that can be generated at next price update, the user could proceed with this transaction but is inadvised as they would be subject to liquidations on next price update',
proxyAddress,
priceInfo: {
collateralPrice: new BigNumber('2000'),
collateralChangePercentage: new BigNumber('-0.2'),
},
})({
depositAmount: new BigNumber('150'),
generateAmount: new BigNumber('180000'),
})
export const GenerateAmountExceedsDebtCeiling = openVaultStory({
title:
'Amount of usdv user is trying to generate exceeds the amount of usdv available for that ilk',
proxyAddress,
ilkData: {
ilkDebt: new BigNumber('10000'),
debtCeiling: new BigNumber('13000'),
},
})({
depositAmount: new BigNumber('20'),
generateAmount: new BigNumber('4000'),
})
export const GenerateAmountLessThanDebtFloor = openVaultStory({
title:
'Error is shown when a user is generating an amount of USDV that would cause the debt outstanding in the vault to be less than the dust limit/debt floor.',
ilkData: { debtFloor: new BigNumber('2000') },
proxyAddress,
})({
depositAmount: new BigNumber('10'),
generateAmount: new BigNumber('1999'),
})
export const CustomAllowanceEmpty = openVaultStory({
title: 'Error should block user if the allowance they wish to set is zero',
balanceInfo: { usdvBalance: new BigNumber('10000') },
proxyAddress,
})({
stage: 'allowanceWaitingForConfirmation',
depositAmount: new BigNumber('10'),
selectedAllowanceRadio: 'custom',
allowanceAmount: undefined,
})
export const CustomAllowanceAmountGreaterThanMaxUint256 = openVaultStory({
title: 'Error should block user if the allowance they wish to set a value above maxUint256',
balanceInfo: { usdvBalance: new BigNumber('10000') },
proxyAddress,
})({
stage: 'allowanceWaitingForConfirmation',
depositAmount: new BigNumber('10'),
selectedAllowanceRadio: 'custom',
allowanceAmount: maxUint256.plus(one),
})
export const CustomAllowanceAmountLessThanDepositAmount = openVaultStory({
title: 'Error should block user if the allowance they wish to set a value above maxUint256',
balanceInfo: { usdvBalance: new BigNumber('10000') },
proxyAddress,
})({
stage: 'allowanceWaitingForConfirmation',
depositAmount: new BigNumber('10'),
allowanceAmount: new BigNumber('9'),
selectedAllowanceRadio: 'custom',
})
// eslint-disable-next-line import/no-default-export
export default {
title: 'OpenVault/Blocking',
component: OpenVaultView,
}
|
def exportDataUsingSQL(self,tableOrReportURI,format,exportToFileObj,sql,config=None):
payLoad = ReportClientHelper.getAsPayLoad([config],None,sql)
url = ReportClientHelper.addQueryParams(tableOrReportURI,self.authtoken,"EXPORT",format)
return self.__sendRequest(url,"POST",payLoad,"EXPORT",exportToFileObj) |
#include <bits/stdc++.h>
using namespace std;
int p2(int x)
{
return x*x;
}
int main()
{
int n,s;
cin>>n>>s;
if(s==1000000)
{
cout<<0;
return 0;
}
int sum=s;
vector<pair<double,int> >distance;
for(int i=0;i<n;++i)
{
int k;
double x,y;
cin>>x>>y>>k;
distance.push_back( make_pair( sqrt( p2(x)*1.0 + p2(y)*1.0 ),k ) );
}
sort(distance.begin(),distance.end());
for(int i=0;i<n;++i)
{
sum+=distance[i].second;
if(sum>=1000000)
{
cout<<setprecision(8)<<distance[i].first;
return 0;
}
}
cout<<-1;
return 0;
}
|
async def list(
self, prefix: Optional[str] = None, *, continuation_token: Optional[str] = None
) -> AsyncIterable[File]:
assert prefix is None or not prefix.startswith('/'), 'the prefix to filter by should not start with a "/"'
while True:
params = {'list-type': 2, 'prefix': prefix, 'continuation-token': continuation_token}
r = await self._client.get(params={k: v for k, v in params.items() if v is not None})
xml_root = ElementTree.fromstring(xmlns_re.sub(b'', r.content))
for c in xml_root.findall('Contents'):
yield File.parse_obj({v.tag: v.text for v in c})
if xml_root.find('IsTruncated').text == 'false':
break
continuation_token = xml_root.find('NextContinuationToken').text |
/**
* Perform the dot product of the two matrices or vectors. Each row,column item is multiplied into the row,column item of the
* other matrix or vector and added to the sum. This operation is usually employed with vectors.
*
* @param matrix2
* @return the sum of the products
*/
public double dot( Matrix matrix2 ) {
double response = 0.0;
int dotRows = Math.min( getRows(), matrix2.getRows() );
int dotCols = Math.min( getCols(), matrix2.getCols() );
for( int row = 0; row < dotRows; ++row ) {
for( int col = 0; col < dotCols; ++col ) {
response += this.getValue( row, col ) * matrix2.getValue( row, col );
}
}
return response;
} |
/**
* @brief Saves the options and scenario config to the disk.
* @details This is needed for the analysis and to reproduce a specific run.
*
*/
void ProSeCoPlanner::save_config() const {
ROS_INFO_STREAM("Writing output to: " + oOpt().output_path);
util::saveJSON(oOpt().output_path + "/options_output", m_cfg->options.toJSON());
util::saveJSON(oOpt().output_path + "/scenario_output", sOpt().toJSON());
} |
Research Report : Cacti : A Front End for Program Visualization
In this paper we describe a system that allows the user to rapidly construct program visualizations over a variety of data sources. Such a system is a necessary foundation for using visualization as an aid to software understanding. The system supports an arbitrary set of data sources so that information from both static and dynamic analysis can be combined to offer meaningful software visualizations. It provides the user with a visual universal-relation front end that supports the dejinition of queries over multiple data sources without knowledge of the structure or contents of the sources. I t uses a Jlexible back end with a range of different visualizations, most geared to the e@cient display of large amounts of data. The result is a highquality, easy-to-de$ne program visualization that can address specijic problems and hence is useful for software understanding. The overall system is flexible and extensible in that both the underlying data model and the set of visualizations are dejined in resource jiles. 1.0 Background and Motivation Program visualization is the process of providing visual representations of a program and its execution to the programmer. Because software developers typically draw diagrams to describe and help others understand how their software works, the classical motivation for program visualization has been that it is an aid to software understanding. Software understanding is the task of helping a programmer to answer questions about the software during maintenance or development. It is a key to software development since it involves the ability to answer the specific questions that tend to arise in these phases. For example, a developer might want to know why a particular function is called so often or how a particular situation involving timing constraints could arise or what needs to be modified to add a parameter to a given function. Program visualization efforts have a long history, dating back to a variety of programs that would automatically produce flowcharts from a deck of Fortran cards , ranging to standard diagrams such as call graphs, depenFor Color Plate See Page 120 46 0-8186-8189-6/97 $10.00 |
import sys
import math
t = int(sys.stdin.readline())
for _ in range(t):
n,x = list(map(int, sys.stdin.readline().strip().split(' ')))
graph = {}
for _ in range(n-1):
u, v = list(map(int, sys.stdin.readline().strip().split(' ')))
if u in graph:
graph[u].append(v)
else:
graph[u] = [v]
if v in graph:
graph[v].append(u)
else:
graph[v] = [u]
if x not in graph or len(graph[x]) <= 1:
print("Ayush")
else:
if n % 2 == 0:
print("Ayush")
else:
print("Ashish")
|
/**
* Tests for UnitsHelper.
*/
import * as chai from "chai";
import { UnitsHelper } from "../../src/helpers/unitsHelper";
// tslint:disable-next-line:no-default-import
import convertUnitsJson from "./convert-units.json";
describe("UnitsHelper", () => {
it("can be created", () => {
const obj = new UnitsHelper();
chai.should().exist(obj);
});
describe("convertUnits", () => {
it("can fail if not a number", () => {
chai.expect(() => UnitsHelper.convertUnits(undefined, undefined, undefined)).to.throw("number formatted as a string");
});
it("can fail if not a valid number", () => {
chai.expect(() => UnitsHelper.convertUnits("", undefined, undefined)).to.throw("number formatted as a string");
});
it("can fail if no unit from", () => {
chai.expect(() => UnitsHelper.convertUnits("1", undefined, undefined)).to.throw("unitFrom");
});
it("can fail if not a valid unit from", () => {
chai.expect(() => UnitsHelper.convertUnits("1", "x", undefined)).to.throw("unitFrom must be");
});
it("can fail if no unit to", () => {
chai.expect(() => UnitsHelper.convertUnits("1", "i", undefined)).to.throw("unitTo");
});
it("can fail if not a valid unit to", () => {
chai.expect(() => UnitsHelper.convertUnits("1", "i", "x")).to.throw("unitTo must be");
});
it("can convert from and to", () => {
const tests: { value: string; from: string; to: string; expected: string }[] = [
{ value: "1234.5678", from: "i", to: "i", expected: "1234"},
{ value: "100", from: "Gi", to: "i", expected: "100000000000"},
{ value: "10.1", from: "Gi", to: "i", expected: "10100000000"},
{ value: "1", from: "i", to: "Ti", expected: "0.000000000001"},
{ value: "1", from: "Ti", to: "i", expected: "1000000000000"},
{ value: "1000", from: "Gi", to: "Ti", expected: "1"},
{ value: ".1", from: "Gi", to: "Ti", expected: "0.0001"}
];
tests.forEach(test => {
chai.expect(UnitsHelper.convertUnits(test.value, test.from, test.to)).to.be.equal(test.expected);
});
});
it("can convert from and to json", () => {
const numTestRounds = convertUnitsJson.length;
for (let i = 0; i < numTestRounds; i++) {
if (i % (numTestRounds / 10) === 0) {
// tslint:disable-next-line:no-console
console.log(`\t\t\t${i} of ${numTestRounds}`);
}
chai.expect(UnitsHelper.convertUnits(convertUnitsJson[i].value, convertUnitsJson[i].from, convertUnitsJson[i].to)).to.be.equal(convertUnitsJson[i].expected);
}
});
});
describe("format", () => {
it("can fail if not a number string", () => {
chai.expect(() => UnitsHelper.format(undefined)).to.throw("number formatted as a string");
});
it("can fail if max decimal places < 0", () => {
chai.expect(() => UnitsHelper.format("100", -1)).to.throw(">= 0");
});
it("can format", () => {
const tests: { value: string; dp: number; expected: string }[] = [
{ value: "1", dp: 0, expected: "1 i"},
{ value: "1", dp: 1, expected: "1 i"},
{ value: "1", dp: 2, expected: "1 i"},
{ value: "12", dp: 0, expected: "12 i"},
{ value: "12", dp: 1, expected: "12 i"},
{ value: "12", dp: 2, expected: "12 i"},
{ value: "123", dp: 0, expected: "123 i"},
{ value: "123", dp: 1, expected: "123 i"},
{ value: "123", dp: 2, expected: "123 i"},
{ value: "1234", dp: 0, expected: "1 Ki"},
{ value: "1234", dp: 1, expected: "1.2 Ki"},
{ value: "1234", dp: 2, expected: "1.23 Ki"},
{ value: "12345", dp: 0, expected: "12 Ki"},
{ value: "12345", dp: 1, expected: "12.3 Ki"},
{ value: "12345", dp: 2, expected: "12.34 Ki"},
{ value: "123456", dp: 0, expected: "123 Ki"},
{ value: "123456", dp: 1, expected: "123.4 Ki"},
{ value: "123456", dp: 2, expected: "123.45 Ki"},
{ value: "1234567", dp: 0, expected: "1 Mi"},
{ value: "1234567", dp: 1, expected: "1.2 Mi"},
{ value: "1234567", dp: 2, expected: "1.23 Mi"},
{ value: "12345678", dp: 0, expected: "12 Mi"},
{ value: "12345678", dp: 1, expected: "12.3 Mi"},
{ value: "12345678", dp: 2, expected: "12.34 Mi"},
{ value: "123456789", dp: 0, expected: "123 Mi"},
{ value: "123456789", dp: 1, expected: "123.4 Mi"},
{ value: "123456789", dp: 2, expected: "123.45 Mi"},
{ value: "1234567891", dp: 0, expected: "1 Gi"},
{ value: "1234567891", dp: 1, expected: "1.2 Gi"},
{ value: "1234567891", dp: 2, expected: "1.23 Gi"},
{ value: "12345678912", dp: 0, expected: "12 Gi"},
{ value: "12345678912", dp: 1, expected: "12.3 Gi"},
{ value: "12345678912", dp: 2, expected: "12.34 Gi"},
{ value: "123456789123", dp: 0, expected: "123 Gi"},
{ value: "123456789123", dp: 1, expected: "123.4 Gi"},
{ value: "123456789123", dp: 2, expected: "123.45 Gi"},
{ value: "1234567891234", dp: 0, expected: "1 Ti"},
{ value: "1234567891234", dp: 1, expected: "1.2 Ti"},
{ value: "1234567891234", dp: 2, expected: "1.23 Ti"},
{ value: "12345678912345", dp: 0, expected: "12 Ti"},
{ value: "12345678912345", dp: 1, expected: "12.3 Ti"},
{ value: "12345678912345", dp: 2, expected: "12.34 Ti"},
{ value: "123456789123456", dp: 0, expected: "123 Ti"},
{ value: "123456789123456", dp: 1, expected: "123.4 Ti"},
{ value: "123456789123456", dp: 2, expected: "123.45 Ti"},
{ value: "1234567891234567", dp: 0, expected: "1 Pi"},
{ value: "1234567891234567", dp: 1, expected: "1.2 Pi"},
{ value: "1234567891234567", dp: 2, expected: "1.23 Pi"},
{ value: "12345678912345678", dp: 0, expected: "12 Pi"},
{ value: "12345678912345678", dp: 1, expected: "12.3 Pi"},
{ value: "12345678912345678", dp: 2, expected: "12.34 Pi"},
{ value: "123456789123456789", dp: 0, expected: "123 Pi"},
{ value: "123456789123456789", dp: 1, expected: "123.4 Pi"},
{ value: "123456789123456789", dp: 2, expected: "123.45 Pi"}
];
tests.forEach(test => {
// tslint:disable-next-line:no-console
console.log(`\t\t\t${test.value} ${test.dp} = ${test.expected}`);
chai.expect(UnitsHelper.format(test.value, test.dp)).to.be.equal(test.expected);
});
});
});
});
|
Arsenal’s pre-season preparations, which will commence when the club’s professional players return to London Colney for training a week today, will take a somewhat different course than usual this year, with the traditional curtain raiser against Barnet at Underhill and subsequent tour of Austria, which had been a staple of Arsene Wenger’s build-up in previous years, scrapped in favour of a Far East tour which will incorporate matches against a Malaysian XI and Hangzhou Greentown, before a return to Europe for friendlies against Cologne and Benfica, sandwiched in between which will be the Emirates Cup and the return of the club’s all time leading goalscorer, Thierry Henry, to Emirates Stadium.
Arsenal’s large batch of young professionals all possess different aims going into their preparations for the new campaign, with some hoping to make the plane for that tour to Asia, whilst others will simply be seeking to re-build their fitness with the Reserves and under-18s.
Here is a player-by-player breakdown of what each individual will be seeking to gain from this pre-season.
Benik Afobe
Off the back of an impressive loan spell with Huddersfield, Benik has stated that the decision now rests with Arsene Wenger as to whether he is provided with an opportunity to fight for a place in the first-team squad or go out on loan. Should he impress sufficiently in training then, depending on the situation with the club’s other strikers, Benik could see himself make the plane to Asia, although the more likely option is that he will feature for the Reserves before embarking on another loan spell, this time in the Championship.
Chuks Aneke
Having earned a new contract following a series of impressive performances in the Reserves, Chuks is in a similar position to Benik in that he will be hoping to impose himself on the first-team this pre-season but, if not, will likely feature for the Reserves and the diligent midfielder is one of several youngsters likely to go on loan next season.
Emmanuel Frimpong
Emmanuel looked to have made his mark last pre-season before injury struck and he spent almost the entire 2010/11 campaign on the sidelines. With Francis Coquelin away with France U20s and Craig Eastmond recovering from injury, Frimpong should get his chance to shine again this time around and is a prime candidate for the Asia Tour. A loan spell, in some capacity, is likely option at some point this coming season.
Daniel Boateng
Despite having established himself as a Reserves regular, first-team opportunities still appear some way off for Daniel and he is likely to spend pre-season with the Reserves, possibly captaining them on occasion. Another who could go out on loan later in the season.
Craig Eastmond
Still recovering from the injury that prematurely curtailed his loan spell with Millwall, much will depend on his fitness but, if all goes well, he could travel to Asia, where he will look to compete with Frimpong for a place in the squad as cover.
Jay Emmanuel-Thomas
His spell at Cardiff wasn’t the most fruitful and he will be looking to use the Asia Tour as an opportunity to prove to Wenger that he can be part of his plans for the forthcoming campaign. Another loan spell in the Championship, however, is likely.
Sead Hajrovic
Another who is far away from the first-team at present, Sead’s form was rather inconsistent last campaign and he is likely to spend another season in the Reserves, possibly operating more regularly in his favoured centre-back position.
Gavin Hoyte
Also recovering from a long-term injury, it is likely that this coming campaign will be his last in an Arsenal shirt, with another loan spell, once he has regained full fitness, likely to precede a permanent transfer to a lower-league club.
Henri Lansbury
Perhaps the most capable of achieving a first-team breakthrough, much will depend on the future’s of Cesc Fabregas and Samir Nasri but, should he impress sufficiently in Asia, Henri could find himself promoted to the squad as cover.
Sean McDermott
Showed some signs of improvement last season, and made his debut for the Reserves, but still a long way off achieving his dream of becoming the club’s number one goalkeeper. Could compete with Martinez for Reserve-team duties this season if Shea goes on loan.
Ignasi Miquel
Made his breakthrough last season with two appearances against Leyton Orient in the FA Cup but could find his pre-season plans somewhat disrupted by participation in the FIFA U20 World Cup.
Conor Henderson
Also possessing a sprinkling of first-team experience, Conor is also likely to travel to Asia before embarking on a loan spell.
Rhys Murphy
Appears in desperate need of a successful loan spell if he is ever going to come close to reaching the immense potential he displayed as a goalscoring schoolboy.
Oguzhan Ozyakup
Having extended his contract at the club, Oguzhan should be introduced to the professional game this coming season via a loan spell. Should train with the first-team but competition for places means a place on Asia Tour seemingly unlikely.
James Shea
Provided much-needed first-team cover last season and, this time around, after spending pre-season in the Reserves, is likely to go out on loan.
Luke Freeman
Ended the season well with the Reserves and is likely to remain in the second-string set up during pre-season before embarking on what will hopefully be a more successful loan spell than his time at Yeovil.
Gilles Sunu
Unlikely to be seen at London Colney this summer due to his participation in the FIFA U20 World Cup with France, he should embark on what will be his third loan spell thereafter.
Nico Yennaris
Gradually easing his way back to fitness after injury, he is likely to play a key role for the Reserves this pre-season.
Damian Martinez
Poised to represent Argentina at the aforementioned U20 World Cup, he could return as the Reserves’ number one goalkeeper.
George Brislen-Hall
Having worked hard to earn a professional deal, George will be looking to use pre-season to establish himself as a key member of the Reserves set-up.
Jernade Meade
In a similar situation to Brislen-Hall, Meade will be hoping for more opportunities as a left-winger.
Martin Angha
This should be the campaign in which Angha asserts himself as a Reserves regular.
Elton Monteiro
Will also be hoping to break into the Reserves, but will spend most of his time with the under-18s.
Jamie Edge
Similarly to Monteiro, Edge is likely to remain with the under-18s in pre-season after failing to enjoy a consistent run of performances last time around.
Zak Ansah
Now fully over his injury troubles and having signed a pro deal, Zak will be seeking to spend as much time as possible with the Reserves this pre-season and has been touted as a potential loan target for Charlton next season.
Kyle Ebecilio
Will be granted an extended holiday following his participation at the U17 World Cup and is likely to return as part of the Reserves set-up.
Carl Jenkinson
The new signing has been promised first-team game time this pre-season and, should he impress sufficiently, will be promoted to the first-team squad as cover for Bacary Sagna.
Francis Coquelin
Another participant at the U20 World Cup and, as revealed yesterday, his immediate future will not be decided until the culmination of that competition, with Lorient hopeful of re-signing him on loan.
Kyle Bartley
Possesses first-team ambitions and Wenger will assess him this pre-season before considering offers of an extended loan spell at Rangers.
Pedro Botelho
Reportedly set for another season in Spain, this time in the top flight.
Ryo Miyaichi
Is likely to revel in returning to his home continent with Arsenal and, should he shine, the club may make another attempt to apply for a work permit.
Samuel Galindo
Unlikely to return to London Colney, with another loan spell in Spain the most likely option.
Sanchez Watt
Has become somewhat isolated from the first-team picture over the past 18 months and is unlikely to make the cut for Asia, so will be seeking to impress in the Reserves.
Wellington
Reported loan target for Levante, he is likely to be spending a few days training with Arsenal before a decision is made.
Advertisements |
class PrintoutIdentifier:
""" Represents an identifier that was parsed from a soar print command via parse_wm_printout
and implements the IdentifierExtensions interface for it """
def create(client, id, depth):
""" Will print the given identifier to the given depth and wrap the result in a PrintoutIdentifier """
printout = client.execute_command("p " + id + " -d " + str(depth))
if printout.strip().startswith("There is no identifier"):
return None
wmes = parse_wm_printout(printout)
return PrintoutIdentifier(wmes, id)
def __init__(self, wmes, root_id):
""" wmes is the result of a parse_wm_printout command,
root_id is the str id for this identifier """
self.wmes = wmes
self.root_id = root_id
def __lt__(self, other):
return self.root_id < other.root_id
def GetIdentifierSymbol(self):
return self.root_id
def GetChildString(self, attr):
return self._get_value(attr)
def GetChildInt(self, attr):
val = self._get_value(attr)
try:
return int(val)
except ValueError:
return None
def GetChildFloat(self, attr):
val = self._get_value(attr)
try:
return float(val)
except ValueError:
return None
def GetChildId(self, attr):
child_id = self._get_value(attr)
if child_id is not None:
return PrintoutIdentifier(self.wmes, child_id)
return None
def GetAllChildIds(self, attr=None):
# Get all children whose values are also identifiers in the wmes dict
child_wmes = [ wme for wme in self.wmes.get(self.root_id, []) if wme[2] in self.wmes ]
if attr is not None:
child_wmes = [ wme for wme in child_wmes if wme[1] == attr ]
return [ PrintoutIdentifier(self.wmes, wme[2]) for wme in child_wmes ]
def GetAllChildValues(self, attr=None):
# Get all children whose values are not identifiers in the wmes dict
child_wmes = [ wme for wme in self.wmes.get(self.root_id, []) if wme[2] not in self.wmes ]
if attr is not None:
child_wmes = [ wme for wme in child_wmes if wme[1] == attr ]
return [ wme[2] for wme in child_wmes ]
def GetAllChildWmes(self):
child_wmes = []
for wme in self.wmes.get(self.root_id, []):
if wme[2] in self.wmes:
# Identifier
child_wmes.append( (wme[1], PrintoutIdentifier(self.wmes, wme[2])) )
else:
# Value
child_wmes.append( (wme[1], wme[2]) )
return child_wmes
def _get_value(self, attr):
return next((wme[2] for wme in self.wmes.get(self.root_id, []) if wme[1] == attr), None) |
def tick(self):
self.logger.debug("heartbeat felt")
self.light_on()
time.sleep(0.5)
self.light_off() |
import cv2
import time
import pandas as pd
import numpy as np
import plotly.graph_objects as go
from typing import Dict
from configs.colored import bcolors as bc
def approve(
tresholds: Dict[str, float],
face_confidence: float,
face_class: str):
"""
Check confidence
Args:
tresholds ([dict]): [ideal values]
face_confidence ([float]): [current value]
face_class ([str]): ['with_mask' or 'no_mask' class]
Returns:
[bool]
"""
return face_confidence > tresholds[face_class]
def save_screenshot(**kwargs):
"""
Save screenshot when any disturbance detected
"""
frame = kwargs.get('frame', None)
if kwargs['mode'] == 'd':
path_to_save = kwargs.get(
'path_to_contraventions_screenshots',
'statistics/screenshots/distance/')
elif kwargs['mode'] == 'm':
path_to_save = kwargs.get(
'path_to_masks_screenshots',
'statistics/screenshots/masks/')
cv2.imwrite(path_to_save + 'frame.jpg', frame)
def write_data_to_file(**kwargs):
"""
Append row to a csv file when has called
Args:
path_to_masks_statistics ([str]): [path to file where statistics saved]
path_to_contraventions_statistics ([str]): [path to file where statistics saved]
amount_people ([int]): [all detected peoples]
amount_classes ([dict]): [dict with categories and tresholds]
contraventions ([dict]): distance incidents
absolute_time ([float]): [time between start of analizing algorythm and now]
"""
amount_people = kwargs.get('amount_people')
amount_classes = kwargs.get('amount_classes')
contraventions = kwargs.get('contraventions')
absolute_time = kwargs.get('absolute_time')
path_to_masks_statistics = kwargs.get('path_to_masks_statistics')
path_to_contraventions_statistics = kwargs.get('path_to_contraventions_statistics')
print('SAVING MAKSKS STATS: ' + path_to_masks_statistics)
print(amount_classes)
pack = [absolute_time, amount_people, amount_classes['with_mask'], amount_classes['no_mask']]
values_to_str = list(map(str, pack))
with open(path_to_masks_statistics, 'a') as f:
row = "{},{},{},{}\n".format(*values_to_str)
f.write(row)
print('SAVING DIST STATS: ' + path_to_contraventions_statistics)
pack = [absolute_time, amount_people, contraventions]
values_to_str = list(map(str, pack))
with open(path_to_contraventions_statistics, 'a') as f:
row = "{},{},{}\n".format(*values_to_str)
f.write(row)
def null_data(**kwargs):
"""
To nullify all counters when statistics has saved
Args:
init ([bool]): [true if u haven't counters and it's need to initialize them]
Returns:
[tuple]:
amount_people = 0,
amount_classes = {'label1': 0, ..., 'labelx': 0},
iteration_start_time = 0
"""
amount_people = 0
amount_classes = {
'with_mask': 0,
'no_mask': 0
}
violation_count = 0
iteration_start_time = time.time()
print(bc.OKBLUE + 'Counters wass nulled' + bc.ENDC)
return amount_people, amount_classes, violation_count, iteration_start_time
def update_line_graphics(**kwargs):
"""
update linear plots in special directory
Kwargs:
data ([pandas.DataFrame]): [pandas table with all needed columns]
path_to_masks_statistics ([str]): [path to statistic file]
path_to_graphics_dir ([str]): [path to save graphics]
"""
data = kwargs.get('data', False)
if not data:
path_to_statistic_file = kwargs.get('path_to_masks_statistics')
data = pd.read_csv(path_to_statistic_file, engine='python')
path_line_graphics = '{}/graph-1-1.html'.format(kwargs['path_to_graphics_dir'])
fig = go.Figure()
# saving plot for 'no_mask' and 'with_mask'
fig.add_trace(go.Scatter(
x=data.time, y=data.with_mask,
text=data.with_mask, line_color='lightgreen',
mode='lines+markers', name='with mask'
))
fig.add_trace(go.Scatter(
x=data.time, y=data.no_mask,
text=data.no_mask, line_color='crimson',
mode='lines+markers', name='no mask'
))
# plot design update
fig.update_layout(
margin=dict(l=0, r=0, t=0, b=0),
autosize=False, width=700, height=150,
legend=dict(x=-.1, y=1))
fig.write_html(path_line_graphics)
print(bc.OKBLUE + 'Graphics was saved to : {}'.format(path_line_graphics) + bc.ENDC)
def update_piechart(**kwargs):
"""
update piechert plots in special directory
Kwargs:
labels ([List[str]]): [classification labels]
path_to_masks_statistics ([str]): [path to statistic file]
path_to_graphics_dir ([str]): [path to save graphics]
data ([pandas.DataFrame]): [pandas table with all needed columns]
"""
labels = kwargs.get('labels', ['С масками', 'Без масок'])
data = kwargs.get('data', False)
if not data:
path_to_statistic_file = kwargs.get('path_to_masks_statistics')
data = pd.read_csv(path_to_statistic_file, engine='python')
print(bc.OKBLUE + 'UPDATE BARCHART: Data loading from {}'.format(path_to_statistic_file) + bc.ENDC)
path_piechart = '{}/graph-1-2.html'.format(kwargs['path_to_graphics_dir'])
values = [data['with_mask'].sum(), data['no_mask'].sum()]
fig = go.Figure(
data=[go.Pie(labels=labels, values=values, hole=.3)])
fig.update_layout(
margin=dict(l=0, r=0, t=0, b=0),
autosize=False, width=328, height=150,
legend=dict(x=-.1, y=1.2)
)
fig.update_traces(
marker=dict(colors=['lightgreen', 'crimson']))
fig.write_html(path_piechart)
print(bc.OKBLUE + 'Graphics was saved to : {}'.format(path_piechart) + bc.ENDC)
def update_barchart(**kwargs):
"""
Update barchart plots in specified directory
Kwargs:
data ([pandas.DataFrame]): dataframe should consist columns, named:
'hours', 'all_people', 'contraventions'
path_to_statistic_dir ([str]): path to statistic directory
path_to_graphics_dir ([str]): directory path to save graphics
"""
data = kwargs.get('data', False)
if not data:
path_to_contraventions_statistics = kwargs.get('path_to_contraventions_statistics')
data = pd.read_csv(path_to_contraventions_statistics, engine='python')
print(bc.OKBLUE + 'UPDATE BARCHART: Data loading from {}'.format(path_to_contraventions_statistics) + bc.ENDC)
path_barchart = '{}/graph-1-3.html'.format(kwargs['path_to_graphics_dir'])
data['hours'] = data['hours'].str[0:13]
data = data.groupby('hours').sum().reset_index()
fig = go.Figure()
fig.add_trace(go.Bar(
x=data.hours,
y=data.all_people,
name='Amount people',
marker_color='rgb(26, 118, 255)'
))
fig.add_trace(go.Bar(
x=data.hours,
y=data.disturbance,
name='Disturbances',
marker_color='crimson'
))
fig.update_layout(
uniformtext_minsize=8,
uniformtext_mode='hide',
margin=dict(l=0, r=0, t=0, b=0),
width=1100, height=300,
xaxis_tickfont_size=14,
legend=dict(
x=0,
y=1.0,
bgcolor='rgba(255, 255, 255, 0)',
bordercolor='rgba(255, 255, 255, 0)'
),
barmode='group',
bargap=0.40,
bargroupgap=0.1
)
fig.write_html(path_barchart)
print(bc.OKBLUE + 'Graphics was saved to : {}'.format(path_barchart) + bc.ENDC)
def interval_timer(iteration_start_time, interval):
time_diff = time.time() - iteration_start_time
if time_diff > interval:
print(bc.OKBLUE + 'New interval was detected' + bc.ENDC)
return True if time_diff > interval else False
def concretize_statistics_files(**kwargs):
"""
There is an function for defining path, that wasn't declare,
but can be reached from already declared kwargs
Returns:
[ Dict[str, Any] ]: kwargs
"""
path_to_statistics_dir = kwargs.get('path_to_statistics_dir', 'statistics/data')
path_to_screenshots_dir = kwargs.get('path_to_screenshots_dir', 'statistics/screenshots')
path_to_masks_statistics = '{}/masks_statistics_00.csv'.format(path_to_statistics_dir)
path_to_contraventions_statistics = '{}/contraventions_statistics_00.csv'.format(path_to_statistics_dir)
path_to_masks_screenshots = '{}/masks/'.format(path_to_screenshots_dir)
path_to_contraventions_screenshots = '{}/distance/'.format(path_to_screenshots_dir)
path_to_graphics_dir = kwargs.get('path_to_graphics_dir', 'graphics')
kwargs['path_to_statistics_dir'] = path_to_statistics_dir
kwargs['path_to_screenshots_dir'] = path_to_screenshots_dir
kwargs['path_to_masks_statistics'] = path_to_masks_statistics
kwargs['path_to_contraventions_statistics'] = path_to_contraventions_statistics
kwargs['path_to_masks_screenshots'] = path_to_masks_screenshots
kwargs['path_to_contraventions_screenshots'] = path_to_contraventions_screenshots
kwargs['path_to_graphics_dir'] = path_to_graphics_dir
return kwargs
def evaluate_statistics(**kwargs):
"""
Main function for statistics evaluating
**kwargs:
amount_people: ([ int ]): amount of people, recognized at the video for the entire iteration
amount_classes: ([ Dict[str, int] ]): dictionary containing the number of people 'with_mask' or 'no_mask'
contraventions: ([ int ]): amount of contraventions, recognized at the video for the entire iteration
absolute_time: ([ float ]): system time for logging
iteration_start_time: ([ float ]): system time at starting concrete iteration
interval: ([ int ]): time in seconds, defining iteration duration
frame: ([ np.ndarray ]): frame with distance disturbance or None
mask_frame: ([ np.ndarray ]): frame with mask disturbance or None
path_to_statistic_dir: ([ str ]): path to file where masks statistics would be saved //
default value = {'statistics/data/mask_stats.csv'}
path_to_graphics_dir: ([ str ]): path to file where statistical graphics would be saved //
default_value = {'graphics'}
path_to_screenshot_dir: ([ str ]): path to file where screenshots with contraventions would be saved //
default_value = {'statistics/screenshots/masks'}
Returns:
counters ([ tuple ]): amount_people, amount_classes, violation_count, iteration_start_time
"""
print(bc.OKBLUE + 'STATISTICS EVALUATION ENABLED' + bc.ENDC)
transform_data = lambda t: time.strftime('%Y-%m-%d %H:%M %Z', time.localtime(t))
kwargs['absolute_time'] = kwargs.get('absolute_time', transform_data(time.time()))
Ы
amount_people = kwargs.get('amount_people', 0)
kwargs['amount_classes'] = kwargs.get('amount_classes', {'with_mask': 0, 'no_mask': 0})
iteration_start_time = kwargs.get('iteration_start_time', 0)
counters = amount_people, kwargs['amount_classes'], iteration_start_time
kwargs = concretize_statistics_files(**kwargs)
print(bc.OKBLUE + 'Called statisitcs writer to file and graphics updating' + bc.ENDC)
write_data_to_file(**kwargs)
update_line_graphics(**kwargs)
update_piechart(**kwargs)
update_barchart(**kwargs)
# print(type(kwargs['frame']))
if isinstance(kwargs['frame'], np.ndarray):
print(bc.WARNING + 'Called SCREENSAVER FOR DISTANCE DIST' + bc.ENDC)
save_screenshot(frame=kwargs['frame'], mode='d')
if isinstance(kwargs['mask_frame'], np.ndarray):
print(bc.WARNING + 'Called SCREENSAVER FOR MASK DIST' + bc.ENDC)
save_screenshot(frame=kwargs['mask_frame'], mode='m')
counters = null_data()
print(bc.OKBLUE + 'Statistics sucsessfully writed to file: {} and graphics \
was updated'.format(kwargs['path_to_statistics_dir']) + bc.ENDC)
return counters
|
// This file is generated. Do not edit
// @generated
// https://github.com/Manishearth/rust-clippy/issues/702
#![allow(unknown_lints)]
#![allow(clippy)]
#![cfg_attr(rustfmt, rustfmt_skip)]
#![allow(box_pointers)]
#![allow(dead_code)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(non_upper_case_globals)]
#![allow(trivial_casts)]
#![allow(unsafe_code)]
#![allow(unused_imports)]
#![allow(unused_results)]
// TODO: Hand edited! Figure out a better solution for objecthash support
use alg;
use objecthash::{self, ObjectHash, ObjectHasher};
use protobuf::Message as Message_imported_for_functions;
use protobuf::ProtobufEnum as ProtobufEnum_imported_for_functions;
#[derive(PartialEq,Clone,Default)]
pub struct Credential {
// message fields
pub keyid: ::std::vec::Vec<u8>,
pub credential_type: Type,
pub credential_alg: ::std::string::String,
pub sealing_alg: alg::EncryptionAlg,
pub encrypted_value: ::std::vec::Vec<u8>,
pub salt: ::std::vec::Vec<u8>,
pub public_key: ::std::vec::Vec<u8>,
pub not_before: u64,
pub not_after: u64,
pub description: ::std::string::String,
// special fields
unknown_fields: ::protobuf::UnknownFields,
cached_size: ::protobuf::CachedSize,
}
// see codegen.rs for the explanation why impl Sync explicitly
unsafe impl ::std::marker::Sync for Credential {}
impl Credential {
pub fn new() -> Credential {
::std::default::Default::default()
}
pub fn default_instance() -> &'static Credential {
static mut instance: ::protobuf::lazy::Lazy<Credential> = ::protobuf::lazy::Lazy {
lock: ::protobuf::lazy::ONCE_INIT,
ptr: 0 as *const Credential,
};
unsafe { instance.get(Credential::new) }
}
// bytes keyid = 1;
pub fn clear_keyid(&mut self) {
self.keyid.clear();
}
// Param is passed by value, moved
pub fn set_keyid(&mut self, v: ::std::vec::Vec<u8>) {
self.keyid = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_keyid(&mut self) -> &mut ::std::vec::Vec<u8> {
&mut self.keyid
}
// Take field
pub fn take_keyid(&mut self) -> ::std::vec::Vec<u8> {
::std::mem::replace(&mut self.keyid, ::std::vec::Vec::new())
}
pub fn get_keyid(&self) -> &[u8] {
&self.keyid
}
fn get_keyid_for_reflect(&self) -> &::std::vec::Vec<u8> {
&self.keyid
}
fn mut_keyid_for_reflect(&mut self) -> &mut ::std::vec::Vec<u8> {
&mut self.keyid
}
// .ithos.object.Type credential_type = 2;
pub fn clear_credential_type(&mut self) {
self.credential_type = Type::SIGNATURE_KEY_PAIR;
}
// Param is passed by value, moved
pub fn set_credential_type(&mut self, v: Type) {
self.credential_type = v;
}
pub fn get_credential_type(&self) -> Type {
self.credential_type
}
fn get_credential_type_for_reflect(&self) -> &Type {
&self.credential_type
}
fn mut_credential_type_for_reflect(&mut self) -> &mut Type {
&mut self.credential_type
}
// string credential_alg = 3;
pub fn clear_credential_alg(&mut self) {
self.credential_alg.clear();
}
// Param is passed by value, moved
pub fn set_credential_alg(&mut self, v: ::std::string::String) {
self.credential_alg = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_credential_alg(&mut self) -> &mut ::std::string::String {
&mut self.credential_alg
}
// Take field
pub fn take_credential_alg(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.credential_alg, ::std::string::String::new())
}
pub fn get_credential_alg(&self) -> &str {
&self.credential_alg
}
fn get_credential_alg_for_reflect(&self) -> &::std::string::String {
&self.credential_alg
}
fn mut_credential_alg_for_reflect(&mut self) -> &mut ::std::string::String {
&mut self.credential_alg
}
// .ithos.EncryptionAlgorithm sealing_alg = 4;
pub fn clear_sealing_alg(&mut self) {
self.sealing_alg = alg::EncryptionAlg::AES256GCM;
}
// Param is passed by value, moved
pub fn set_sealing_alg(&mut self, v: alg::EncryptionAlg) {
self.sealing_alg = v;
}
pub fn get_sealing_alg(&self) -> alg::EncryptionAlg {
self.sealing_alg
}
fn get_sealing_alg_for_reflect(&self) -> &alg::EncryptionAlg {
&self.sealing_alg
}
fn mut_sealing_alg_for_reflect(&mut self) -> &mut alg::EncryptionAlg {
&mut self.sealing_alg
}
// bytes encrypted_value = 5;
pub fn clear_encrypted_value(&mut self) {
self.encrypted_value.clear();
}
// Param is passed by value, moved
pub fn set_encrypted_value(&mut self, v: ::std::vec::Vec<u8>) {
self.encrypted_value = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_encrypted_value(&mut self) -> &mut ::std::vec::Vec<u8> {
&mut self.encrypted_value
}
// Take field
pub fn take_encrypted_value(&mut self) -> ::std::vec::Vec<u8> {
::std::mem::replace(&mut self.encrypted_value, ::std::vec::Vec::new())
}
pub fn get_encrypted_value(&self) -> &[u8] {
&self.encrypted_value
}
fn get_encrypted_value_for_reflect(&self) -> &::std::vec::Vec<u8> {
&self.encrypted_value
}
fn mut_encrypted_value_for_reflect(&mut self) -> &mut ::std::vec::Vec<u8> {
&mut self.encrypted_value
}
// bytes salt = 6;
pub fn clear_salt(&mut self) {
self.salt.clear();
}
// Param is passed by value, moved
pub fn set_salt(&mut self, v: ::std::vec::Vec<u8>) {
self.salt = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_salt(&mut self) -> &mut ::std::vec::Vec<u8> {
&mut self.salt
}
// Take field
pub fn take_salt(&mut self) -> ::std::vec::Vec<u8> {
::std::mem::replace(&mut self.salt, ::std::vec::Vec::new())
}
pub fn get_salt(&self) -> &[u8] {
&self.salt
}
fn get_salt_for_reflect(&self) -> &::std::vec::Vec<u8> {
&self.salt
}
fn mut_salt_for_reflect(&mut self) -> &mut ::std::vec::Vec<u8> {
&mut self.salt
}
// bytes public_key = 7;
pub fn clear_public_key(&mut self) {
self.public_key.clear();
}
// Param is passed by value, moved
pub fn set_public_key(&mut self, v: ::std::vec::Vec<u8>) {
self.public_key = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_public_key(&mut self) -> &mut ::std::vec::Vec<u8> {
&mut self.public_key
}
// Take field
pub fn take_public_key(&mut self) -> ::std::vec::Vec<u8> {
::std::mem::replace(&mut self.public_key, ::std::vec::Vec::new())
}
pub fn get_public_key(&self) -> &[u8] {
&self.public_key
}
fn get_public_key_for_reflect(&self) -> &::std::vec::Vec<u8> {
&self.public_key
}
fn mut_public_key_for_reflect(&mut self) -> &mut ::std::vec::Vec<u8> {
&mut self.public_key
}
// uint64 not_before = 8;
pub fn clear_not_before(&mut self) {
self.not_before = 0;
}
// Param is passed by value, moved
pub fn set_not_before(&mut self, v: u64) {
self.not_before = v;
}
pub fn get_not_before(&self) -> u64 {
self.not_before
}
fn get_not_before_for_reflect(&self) -> &u64 {
&self.not_before
}
fn mut_not_before_for_reflect(&mut self) -> &mut u64 {
&mut self.not_before
}
// uint64 not_after = 9;
pub fn clear_not_after(&mut self) {
self.not_after = 0;
}
// Param is passed by value, moved
pub fn set_not_after(&mut self, v: u64) {
self.not_after = v;
}
pub fn get_not_after(&self) -> u64 {
self.not_after
}
fn get_not_after_for_reflect(&self) -> &u64 {
&self.not_after
}
fn mut_not_after_for_reflect(&mut self) -> &mut u64 {
&mut self.not_after
}
// string description = 10;
pub fn clear_description(&mut self) {
self.description.clear();
}
// Param is passed by value, moved
pub fn set_description(&mut self, v: ::std::string::String) {
self.description = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_description(&mut self) -> &mut ::std::string::String {
&mut self.description
}
// Take field
pub fn take_description(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.description, ::std::string::String::new())
}
pub fn get_description(&self) -> &str {
&self.description
}
fn get_description_for_reflect(&self) -> &::std::string::String {
&self.description
}
fn mut_description_for_reflect(&mut self) -> &mut ::std::string::String {
&mut self.description
}
}
impl ::protobuf::Message for Credential {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self,
is: &mut ::protobuf::CodedInputStream)
-> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_bytes_into(wire_type,
is,
&mut self.keyid)?;
}
2 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
};
let tmp = is.read_enum()?;
self.credential_type = tmp;
}
3 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type,
is,
&mut self.credential_alg)?;
}
4 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
};
let tmp = is.read_enum()?;
self.sealing_alg = tmp;
}
5 => {
::protobuf::rt::read_singular_proto3_bytes_into(wire_type,
is,
&mut self.encrypted_value)?;
}
6 => {
::protobuf::rt::read_singular_proto3_bytes_into(wire_type, is, &mut self.salt)?;
}
7 => {
::protobuf::rt::read_singular_proto3_bytes_into(wire_type,
is,
&mut self.public_key)?;
}
8 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
};
let tmp = is.read_uint64()?;
self.not_before = tmp;
}
9 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
};
let tmp = is.read_uint64()?;
self.not_after = tmp;
}
10 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type,
is,
&mut self.description)?;
}
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number,
wire_type,
is,
self.mut_unknown_fields())?;
}
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if self.keyid != ::std::vec::Vec::new() {
my_size += ::protobuf::rt::bytes_size(1, &self.keyid);
};
if self.credential_type != Type::SIGNATURE_KEY_PAIR {
my_size += ::protobuf::rt::enum_size(2, self.credential_type);
};
if self.credential_alg != ::std::string::String::new() {
my_size += ::protobuf::rt::string_size(3, &self.credential_alg);
};
if self.sealing_alg != alg::EncryptionAlg::AES256GCM {
my_size += ::protobuf::rt::enum_size(4, self.sealing_alg);
};
if self.encrypted_value != ::std::vec::Vec::new() {
my_size += ::protobuf::rt::bytes_size(5, &self.encrypted_value);
};
if self.salt != ::std::vec::Vec::new() {
my_size += ::protobuf::rt::bytes_size(6, &self.salt);
};
if self.public_key != ::std::vec::Vec::new() {
my_size += ::protobuf::rt::bytes_size(7, &self.public_key);
};
if self.not_before != 0 {
my_size += ::protobuf::rt::value_size(8,
self.not_before,
::protobuf::wire_format::WireTypeVarint);
};
if self.not_after != 0 {
my_size += ::protobuf::rt::value_size(9,
self.not_after,
::protobuf::wire_format::WireTypeVarint);
};
if self.description != ::std::string::String::new() {
my_size += ::protobuf::rt::string_size(10, &self.description);
};
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self,
os: &mut ::protobuf::CodedOutputStream)
-> ::protobuf::ProtobufResult<()> {
if self.keyid != ::std::vec::Vec::new() {
os.write_bytes(1, &self.keyid)?;
};
if self.credential_type != Type::SIGNATURE_KEY_PAIR {
os.write_enum(2, self.credential_type.value())?;
};
if self.credential_alg != ::std::string::String::new() {
os.write_string(3, &self.credential_alg)?;
};
if self.sealing_alg != alg::EncryptionAlg::AES256GCM {
os.write_enum(4, self.sealing_alg.value())?;
};
if self.encrypted_value != ::std::vec::Vec::new() {
os.write_bytes(5, &self.encrypted_value)?;
};
if self.salt != ::std::vec::Vec::new() {
os.write_bytes(6, &self.salt)?;
};
if self.public_key != ::std::vec::Vec::new() {
os.write_bytes(7, &self.public_key)?;
};
if self.not_before != 0 {
os.write_uint64(8, self.not_before)?;
};
if self.not_after != 0 {
os.write_uint64(9, self.not_after)?;
};
if self.description != ::std::string::String::new() {
os.write_string(10, &self.description)?;
};
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &::std::any::Any {
self as &::std::any::Any
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
::protobuf::MessageStatic::descriptor_static(None::<Self>)
}
}
impl ::protobuf::MessageStatic for Credential {
fn new() -> Credential {
Credential::new()
}
fn descriptor_static(_: ::std::option::Option<Credential>)
-> &'static ::protobuf::reflect::MessageDescriptor {
static mut descriptor: ::protobuf::lazy::Lazy<::protobuf::reflect::MessageDescriptor> =
::protobuf::lazy::Lazy {
lock: ::protobuf::lazy::ONCE_INIT,
ptr: 0 as *const ::protobuf::reflect::MessageDescriptor,
};
unsafe {
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeBytes>(
"keyid",
Credential::get_keyid_for_reflect,
Credential::mut_keyid_for_reflect,
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeEnum<Type>>(
"credential_type",
Credential::get_credential_type_for_reflect,
Credential::mut_credential_type_for_reflect,
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"credential_alg",
Credential::get_credential_alg_for_reflect,
Credential::mut_credential_alg_for_reflect,
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeEnum<alg::EncryptionAlg>>(
"sealing_alg",
Credential::get_sealing_alg_for_reflect,
Credential::mut_sealing_alg_for_reflect,
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeBytes>(
"encrypted_value",
Credential::get_encrypted_value_for_reflect,
Credential::mut_encrypted_value_for_reflect,
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeBytes>(
"salt",
Credential::get_salt_for_reflect,
Credential::mut_salt_for_reflect,
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeBytes>(
"public_key",
Credential::get_public_key_for_reflect,
Credential::mut_public_key_for_reflect,
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeUint64>(
"not_before",
Credential::get_not_before_for_reflect,
Credential::mut_not_before_for_reflect,
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeUint64>(
"not_after",
Credential::get_not_after_for_reflect,
Credential::mut_not_after_for_reflect,
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"description",
Credential::get_description_for_reflect,
Credential::mut_description_for_reflect,
));
::protobuf::reflect::MessageDescriptor::new::<Credential>(
"Credential",
fields,
file_descriptor_proto()
)
})
}
}
}
impl ::protobuf::Clear for Credential {
fn clear(&mut self) {
self.clear_keyid();
self.clear_credential_type();
self.clear_credential_alg();
self.clear_sealing_alg();
self.clear_encrypted_value();
self.clear_salt();
self.clear_public_key();
self.clear_not_before();
self.clear_not_after();
self.clear_description();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for Credential {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for Credential {
fn as_ref(&self) -> ::protobuf::reflect::ProtobufValueRef {
::protobuf::reflect::ProtobufValueRef::Message(self)
}
}
#[derive(Clone,PartialEq,Eq,Debug,Hash)]
pub enum Type {
SIGNATURE_KEY_PAIR = 0,
}
impl ::protobuf::ProtobufEnum for Type {
fn value(&self) -> i32 {
*self as i32
}
fn from_i32(value: i32) -> ::std::option::Option<Type> {
match value {
0 => ::std::option::Option::Some(Type::SIGNATURE_KEY_PAIR),
_ => ::std::option::Option::None,
}
}
fn values() -> &'static [Self] {
static values: &'static [Type] = &[Type::SIGNATURE_KEY_PAIR];
values
}
fn enum_descriptor_static(_: Option<Type>) -> &'static ::protobuf::reflect::EnumDescriptor {
static mut descriptor: ::protobuf::lazy::Lazy<::protobuf::reflect::EnumDescriptor> =
::protobuf::lazy::Lazy {
lock: ::protobuf::lazy::ONCE_INIT,
ptr: 0 as *const ::protobuf::reflect::EnumDescriptor,
};
unsafe {
descriptor.get(|| {
::protobuf::reflect::EnumDescriptor::new("Type", file_descriptor_proto())
})
}
}
}
impl ::std::marker::Copy for Type {}
impl ::std::default::Default for Type {
fn default() -> Self {
Type::SIGNATURE_KEY_PAIR
}
}
impl ::protobuf::reflect::ProtobufValue for Type {
fn as_ref(&self) -> ::protobuf::reflect::ProtobufValueRef {
::protobuf::reflect::ProtobufValueRef::Enum(self.descriptor())
}
}
static file_descriptor_proto_data: &'static [u8] =
&[0x0a, 0x17, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2f, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e,
0x74, 0x69, 0x61, 0x6c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0c, 0x69, 0x74, 0x68,
0x6f, 0x73, 0x2e, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x1a, 0x0f, 0x61, 0x6c, 0x67, 0x6f,
0x72, 0x69, 0x74, 0x68, 0x6d, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xfd, 0x02, 0x0a,
0x0a, 0x43, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x12, 0x14, 0x0a, 0x05,
0x6b, 0x65, 0x79, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x6b, 0x65,
0x79, 0x69, 0x64, 0x12, 0x3b, 0x0a, 0x0f, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69,
0x61, 0x6c, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x12,
0x2e, 0x69, 0x74, 0x68, 0x6f, 0x73, 0x2e, 0x6f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x2e, 0x54,
0x79, 0x70, 0x65, 0x52, 0x0e, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c,
0x54, 0x79, 0x70, 0x65, 0x12, 0x25, 0x0a, 0x0e, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74,
0x69, 0x61, 0x6c, 0x5f, 0x61, 0x6c, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d,
0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x41, 0x6c, 0x67, 0x12, 0x3b,
0x0a, 0x0b, 0x73, 0x65, 0x61, 0x6c, 0x69, 0x6e, 0x67, 0x5f, 0x61, 0x6c, 0x67, 0x18, 0x04,
0x20, 0x01, 0x28, 0x0e, 0x32, 0x1a, 0x2e, 0x69, 0x74, 0x68, 0x6f, 0x73, 0x2e, 0x45, 0x6e,
0x63, 0x72, 0x79, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x6c, 0x67, 0x6f, 0x72, 0x69, 0x74,
0x68, 0x6d, 0x52, 0x0a, 0x73, 0x65, 0x61, 0x6c, 0x69, 0x6e, 0x67, 0x41, 0x6c, 0x67, 0x12,
0x27, 0x0a, 0x0f, 0x65, 0x6e, 0x63, 0x72, 0x79, 0x70, 0x74, 0x65, 0x64, 0x5f, 0x76, 0x61,
0x6c, 0x75, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0e, 0x65, 0x6e, 0x63, 0x72,
0x79, 0x70, 0x74, 0x65, 0x64, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x73,
0x61, 0x6c, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x73, 0x61, 0x6c, 0x74,
0x12, 0x1d, 0x0a, 0x0a, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f, 0x6b, 0x65, 0x79, 0x18,
0x07, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x09, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65,
0x79, 0x12, 0x1d, 0x0a, 0x0a, 0x6e, 0x6f, 0x74, 0x5f, 0x62, 0x65, 0x66, 0x6f, 0x72, 0x65,
0x18, 0x08, 0x20, 0x01, 0x28, 0x04, 0x52, 0x09, 0x6e, 0x6f, 0x74, 0x42, 0x65, 0x66, 0x6f,
0x72, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x6e, 0x6f, 0x74, 0x5f, 0x61, 0x66, 0x74, 0x65, 0x72,
0x18, 0x09, 0x20, 0x01, 0x28, 0x04, 0x52, 0x08, 0x6e, 0x6f, 0x74, 0x41, 0x66, 0x74, 0x65,
0x72, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f,
0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69,
0x70, 0x74, 0x69, 0x6f, 0x6e, 0x2a, 0x1e, 0x0a, 0x04, 0x54, 0x79, 0x70, 0x65, 0x12, 0x16,
0x0a, 0x12, 0x53, 0x49, 0x47, 0x4e, 0x41, 0x54, 0x55, 0x52, 0x45, 0x5f, 0x4b, 0x45, 0x59,
0x5f, 0x50, 0x41, 0x49, 0x52, 0x10, 0x00, 0x4a, 0xf6, 0x06, 0x0a, 0x06, 0x12, 0x04, 0x00,
0x00, 0x16, 0x01, 0x0a, 0x08, 0x0a, 0x01, 0x0c, 0x12, 0x03, 0x00, 0x00, 0x12, 0x0a, 0x08,
0x0a, 0x01, 0x02, 0x12, 0x03, 0x02, 0x08, 0x14, 0x0a, 0x09, 0x0a, 0x02, 0x03, 0x00, 0x12,
0x03, 0x04, 0x07, 0x18, 0x0a, 0x0a, 0x0a, 0x02, 0x05, 0x00, 0x12, 0x04, 0x06, 0x00, 0x08,
0x01, 0x0a, 0x0a, 0x0a, 0x03, 0x05, 0x00, 0x01, 0x12, 0x03, 0x06, 0x05, 0x09, 0x0a, 0x25,
0x0a, 0x04, 0x05, 0x00, 0x02, 0x00, 0x12, 0x03, 0x07, 0x04, 0x1b, 0x22, 0x18, 0x20, 0x50,
0x75, 0x62, 0x6c, 0x69, 0x63, 0x2f, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x20, 0x6b,
0x65, 0x79, 0x70, 0x61, 0x69, 0x72, 0x0a, 0x0a, 0x0c, 0x0a, 0x05, 0x05, 0x00, 0x02, 0x00,
0x01, 0x12, 0x03, 0x07, 0x04, 0x16, 0x0a, 0x0c, 0x0a, 0x05, 0x05, 0x00, 0x02, 0x00, 0x02,
0x12, 0x03, 0x07, 0x19, 0x1a, 0x0a, 0x2a, 0x0a, 0x02, 0x04, 0x00, 0x12, 0x04, 0x0b, 0x00,
0x16, 0x01, 0x1a, 0x1e, 0x20, 0x45, 0x6e, 0x63, 0x72, 0x79, 0x70, 0x74, 0x65, 0x64, 0x20,
0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x20, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69,
0x61, 0x6c, 0x73, 0x0a, 0x0a, 0x0a, 0x0a, 0x03, 0x04, 0x00, 0x01, 0x12, 0x03, 0x0b, 0x08,
0x12, 0x0a, 0x0b, 0x0a, 0x04, 0x04, 0x00, 0x02, 0x00, 0x12, 0x03, 0x0c, 0x04, 0x14, 0x0a,
0x0d, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x00, 0x04, 0x12, 0x04, 0x0c, 0x04, 0x0b, 0x14, 0x0a,
0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x00, 0x05, 0x12, 0x03, 0x0c, 0x04, 0x09, 0x0a, 0x0c,
0x0a, 0x05, 0x04, 0x00, 0x02, 0x00, 0x01, 0x12, 0x03, 0x0c, 0x0a, 0x0f, 0x0a, 0x0c, 0x0a,
0x05, 0x04, 0x00, 0x02, 0x00, 0x03, 0x12, 0x03, 0x0c, 0x12, 0x13, 0x0a, 0x0b, 0x0a, 0x04,
0x04, 0x00, 0x02, 0x01, 0x12, 0x03, 0x0d, 0x04, 0x1d, 0x0a, 0x0d, 0x0a, 0x05, 0x04, 0x00,
0x02, 0x01, 0x04, 0x12, 0x04, 0x0d, 0x04, 0x0c, 0x14, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00,
0x02, 0x01, 0x06, 0x12, 0x03, 0x0d, 0x04, 0x08, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02,
0x01, 0x01, 0x12, 0x03, 0x0d, 0x09, 0x18, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x01,
0x03, 0x12, 0x03, 0x0d, 0x1b, 0x1c, 0x0a, 0x0b, 0x0a, 0x04, 0x04, 0x00, 0x02, 0x02, 0x12,
0x03, 0x0e, 0x04, 0x1e, 0x0a, 0x0d, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x02, 0x04, 0x12, 0x04,
0x0e, 0x04, 0x0d, 0x1d, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x02, 0x05, 0x12, 0x03,
0x0e, 0x04, 0x0a, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x02, 0x01, 0x12, 0x03, 0x0e,
0x0b, 0x19, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x02, 0x03, 0x12, 0x03, 0x0e, 0x1c,
0x1d, 0x0a, 0x0b, 0x0a, 0x04, 0x04, 0x00, 0x02, 0x03, 0x12, 0x03, 0x0f, 0x04, 0x28, 0x0a,
0x0d, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x03, 0x04, 0x12, 0x04, 0x0f, 0x04, 0x0e, 0x1e, 0x0a,
0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x03, 0x06, 0x12, 0x03, 0x0f, 0x04, 0x17, 0x0a, 0x0c,
0x0a, 0x05, 0x04, 0x00, 0x02, 0x03, 0x01, 0x12, 0x03, 0x0f, 0x18, 0x23, 0x0a, 0x0c, 0x0a,
0x05, 0x04, 0x00, 0x02, 0x03, 0x03, 0x12, 0x03, 0x0f, 0x26, 0x27, 0x0a, 0x0b, 0x0a, 0x04,
0x04, 0x00, 0x02, 0x04, 0x12, 0x03, 0x10, 0x04, 0x1e, 0x0a, 0x0d, 0x0a, 0x05, 0x04, 0x00,
0x02, 0x04, 0x04, 0x12, 0x04, 0x10, 0x04, 0x0f, 0x28, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00,
0x02, 0x04, 0x05, 0x12, 0x03, 0x10, 0x04, 0x09, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02,
0x04, 0x01, 0x12, 0x03, 0x10, 0x0a, 0x19, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x04,
0x03, 0x12, 0x03, 0x10, 0x1c, 0x1d, 0x0a, 0x0b, 0x0a, 0x04, 0x04, 0x00, 0x02, 0x05, 0x12,
0x03, 0x11, 0x04, 0x13, 0x0a, 0x0d, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x05, 0x04, 0x12, 0x04,
0x11, 0x04, 0x10, 0x1e, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x05, 0x05, 0x12, 0x03,
0x11, 0x04, 0x09, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x05, 0x01, 0x12, 0x03, 0x11,
0x0a, 0x0e, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x05, 0x03, 0x12, 0x03, 0x11, 0x11,
0x12, 0x0a, 0x0b, 0x0a, 0x04, 0x04, 0x00, 0x02, 0x06, 0x12, 0x03, 0x12, 0x04, 0x19, 0x0a,
0x0d, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x06, 0x04, 0x12, 0x04, 0x12, 0x04, 0x11, 0x13, 0x0a,
0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x06, 0x05, 0x12, 0x03, 0x12, 0x04, 0x09, 0x0a, 0x0c,
0x0a, 0x05, 0x04, 0x00, 0x02, 0x06, 0x01, 0x12, 0x03, 0x12, 0x0a, 0x14, 0x0a, 0x0c, 0x0a,
0x05, 0x04, 0x00, 0x02, 0x06, 0x03, 0x12, 0x03, 0x12, 0x17, 0x18, 0x0a, 0x0b, 0x0a, 0x04,
0x04, 0x00, 0x02, 0x07, 0x12, 0x03, 0x13, 0x04, 0x1a, 0x0a, 0x0d, 0x0a, 0x05, 0x04, 0x00,
0x02, 0x07, 0x04, 0x12, 0x04, 0x13, 0x04, 0x12, 0x19, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00,
0x02, 0x07, 0x05, 0x12, 0x03, 0x13, 0x04, 0x0a, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02,
0x07, 0x01, 0x12, 0x03, 0x13, 0x0b, 0x15, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x07,
0x03, 0x12, 0x03, 0x13, 0x18, 0x19, 0x0a, 0x0b, 0x0a, 0x04, 0x04, 0x00, 0x02, 0x08, 0x12,
0x03, 0x14, 0x04, 0x19, 0x0a, 0x0d, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x08, 0x04, 0x12, 0x04,
0x14, 0x04, 0x13, 0x1a, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x08, 0x05, 0x12, 0x03,
0x14, 0x04, 0x0a, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x08, 0x01, 0x12, 0x03, 0x14,
0x0b, 0x14, 0x0a, 0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x08, 0x03, 0x12, 0x03, 0x14, 0x17,
0x18, 0x0a, 0x0b, 0x0a, 0x04, 0x04, 0x00, 0x02, 0x09, 0x12, 0x03, 0x15, 0x04, 0x1c, 0x0a,
0x0d, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x09, 0x04, 0x12, 0x04, 0x15, 0x04, 0x14, 0x19, 0x0a,
0x0c, 0x0a, 0x05, 0x04, 0x00, 0x02, 0x09, 0x05, 0x12, 0x03, 0x15, 0x04, 0x0a, 0x0a, 0x0c,
0x0a, 0x05, 0x04, 0x00, 0x02, 0x09, 0x01, 0x12, 0x03, 0x15, 0x0b, 0x16, 0x0a, 0x0c, 0x0a,
0x05, 0x04, 0x00, 0x02, 0x09, 0x03, 0x12, 0x03, 0x15, 0x19, 0x1b, 0x62, 0x06, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x33];
static mut file_descriptor_proto_lazy: ::protobuf::lazy::Lazy<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::lazy::Lazy {
lock: ::protobuf::lazy::ONCE_INIT,
ptr: 0 as *const ::protobuf::descriptor::FileDescriptorProto,
};
fn parse_descriptor_proto() -> ::protobuf::descriptor::FileDescriptorProto {
::protobuf::parse_from_bytes(file_descriptor_proto_data).unwrap()
}
pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {
unsafe { file_descriptor_proto_lazy.get(|| parse_descriptor_proto()) }
}
// TODO: Hand edited! Figure out a better solution for objecthash support
impl ObjectHash for Credential {
#[inline]
fn objecthash<H: ObjectHasher>(&self, hasher: &mut H) {
objecthash_struct!(
hasher,
"keyid" => &self.keyid,
"credential_type" => &(self.credential_type as u32),
"credential_alg" => &self.credential_alg,
"sealing_alg" => &(self.sealing_alg as u32),
"encrypted_value" => &self.encrypted_value,
"salt" => &self.salt,
"public_key" => &self.public_key,
"not_before" => &(self.not_before as i64),
"not_after" => &(self.not_after as i64),
"description" => &self.description
);
}
}
|
<reponame>yasuc/vim-vixen<filename>src/background/operators/impls/ReopenTabOperator.ts
import Operator from "../Operator";
import TabPresenter from "../../presenters/TabPresenter";
export default class ReopenTabOperator implements Operator {
constructor(private readonly tabPresenter: TabPresenter) {}
run(): Promise<void> {
return this.tabPresenter.reopen();
}
}
|
<reponame>yotann/alive2
// Copyright (c) 2021-present The Alive2 Authors.
// Distributed under the MIT license that can be found in the LICENSE file.
// This program connects to a memodb-server server, gets alive.tv and
// alive.interpret jobs, evaluates them, and sends the results back to the
// server. You need a separate program to submit jobs to the server, or else
// this program will have nothing to do.
//
// When a crash or timeout happens, it's important to send an error message
// back to the server, so it knows not to retry the job. Alive2 doesn't have a
// good way to detect or recover from crashes, so we set up some extra threads
// and signal handlers to send an error result to the server when a crash or
// timeout happens. We still can't recover from the crash, and we still can't
// interrupt the main Alive2 thread if there's a timeout, so this program will
// exit after sending the error result.
//
// Here's a suggested way to run this program:
//
// yes | parallel -n0 alive-worker http://127.0.0.1:1234
//
// This command will run one alive-worker process per core. Whenever a process
// exits, it will start a new one to replace it.
#include "util/version.h"
#include "util/worker.h"
#include "llvm/ADT/StringExtras.h"
#include "llvm/Support/CommandLine.h"
#include "llvm/Support/Debug.h"
#include "llvm/Support/PrettyStackTrace.h"
#include "llvm/Support/Signals.h"
#include <condition_variable>
#include <iostream>
#include <mutex>
#include <optional>
#include <string>
#include <thread>
#include <utility>
#include <vector>
#include <semaphore.h>
#include <signal.h>
#include <unistd.h>
// The server may wait 60 seconds to respond if there are no jobs available yet.
#define CPPHTTPLIB_READ_TIMEOUT_SECOND 120
#include <httplib.h>
#include <jsoncons/byte_string.hpp>
#include <jsoncons/json.hpp>
#include <jsoncons_ext/cbor/decode_cbor.hpp>
#include <jsoncons_ext/cbor/encode_cbor.hpp>
using namespace util;
using namespace std;
using jsoncons::byte_string_arg;
using jsoncons::decode_base64url;
using jsoncons::encode_base64url;
using jsoncons::json_array_arg;
using jsoncons::json_object_arg;
using jsoncons::ojson;
using jsoncons::cbor::decode_cbor;
using jsoncons::cbor::encode_cbor;
static llvm::cl::OptionCategory alive_cmdargs("Alive2 worker options");
static llvm::cl::opt<string> opt_url(llvm::cl::Positional, llvm::cl::Required,
llvm::cl::desc("<memodb server URL>"),
llvm::cl::value_desc("url"),
llvm::cl::cat(alive_cmdargs));
// Used by signal handler to communicate with crash handler thread.
static sem_t crash_sem;
static volatile sig_atomic_t crash_handler_done = 0;
static std::mutex timeout_mutex;
static std::condition_variable timeout_cv;
static size_t timeout_millis = 0;
static size_t timeout_index = 0;
static bool timeout_cancelled = false;
static std::mutex result_mutex;
// May be empty if no result is in progress.
static std::string result_uri;
static std::optional<httplib::Client> session;
static const httplib::Headers cbor_headers{{"Accept", "application/cbor"}};
static ojson textCIDToBinary(const llvm::StringRef &text) {
if (!text.startswith("u")) {
llvm::errs() << "unsupported CID: " << text << "\n";
std::exit(1);
}
std::vector<uint8_t> bytes{0x00};
auto result = decode_base64url(text.begin() + 1, text.end(), bytes);
if (result.ec != jsoncons::conv_errc::success) {
llvm::errs() << "invalid CID: " << text << "\n";
std::exit(1);
}
return ojson(byte_string_arg, bytes, 42);
}
static std::string binaryCIDToText(const ojson &cid) {
std::string text = "u";
auto bytes = cid.as_byte_string_view();
if (bytes.size() == 0) {
llvm::errs() << "invalid CID\n";
std::exit(1);
}
encode_base64url(bytes.begin() + 1, bytes.end(), text);
return text;
}
static void checkResult(const httplib::Result &result) {
if (!result) {
std::cerr << "HTTP connection error: " << result.error() << '\n';
std::exit(1);
}
if (result->status < 200 || result->status > 299) {
std::cerr << "HTTP response error: " << result->body << "\n";
std::exit(1);
}
};
static ojson getNodeFromCID(const ojson &cid) {
std::string uri = "/cid/" + binaryCIDToText(cid);
auto result = session->Get(uri.c_str(), cbor_headers);
checkResult(result);
if (result->get_header_value("Content-Type") != "application/cbor") {
std::cerr << "unexpected Content-Type!\n";
std::exit(1);
}
return decode_cbor<ojson>(result->body);
};
static ojson putNode(const ojson &node) {
std::string buffer;
encode_cbor(node, buffer);
auto result = session->Post("/cid", cbor_headers, buffer, "application/cbor");
checkResult(result);
string cid_str = result->get_header_value("Location");
if (!llvm::StringRef(cid_str).startswith("/cid/")) {
llvm::errs() << "invalid CID URI!\n";
std::exit(1);
}
return textCIDToBinary(llvm::StringRef(cid_str).drop_front(5));
}
static void sendResult(const ojson &node) {
std::lock_guard lock(result_mutex);
if (result_uri.empty())
return;
auto cid = putNode(node);
std::string buffer;
encode_cbor(cid, buffer);
auto result = session->Put(result_uri.c_str(), cbor_headers, buffer,
"application/cbor");
checkResult(result);
result_uri.clear();
}
// Must be signal-safe.
static void signalHandler(void *) {
// Resume the signal handler thread.
sem_post(&crash_sem);
// Wait up to 10 seconds for the thread to send a result.
for (int i = 0; i < 10; ++i) {
if (crash_handler_done)
break;
sleep(1);
}
// Return and let LLVM's normal signal handlers run.
}
static void crashHandlerThread() {
// Don't handle any signals in this thread.
sigset_t sig_set;
sigfillset(&sig_set);
pthread_sigmask(SIG_BLOCK, &sig_set, nullptr);
// Wait for signalHandler() to tell us a signal has been raised.
while (sem_wait(&crash_sem))
;
std::cerr << "crashed\n";
sendResult(ojson(json_object_arg, {{"status", "crashed"}}));
// Tell signalHandler() we're done.
crash_handler_done = 1;
}
static void timeoutThread() {
std::unique_lock<std::mutex> lock(timeout_mutex);
while (true) {
auto expected_millis = timeout_millis;
auto expected_index = timeout_index;
if (!expected_millis) {
timeout_cv.wait(lock);
} else {
// If timeout_millis or timeout_index changes while we're waiting, the
// job completed before the timeout. But if they don't change for
// duration, the job hasn't completed yet.
auto duration = std::chrono::milliseconds(expected_millis);
auto predicate = [&] {
return timeout_millis != expected_millis ||
timeout_index != expected_index || timeout_cancelled;
};
if (!timeout_cv.wait_for(lock, duration, predicate))
break; // Timeout occurred! (or cancelled)
}
if (timeout_cancelled)
return;
}
sendResult(ojson(json_object_arg, {{"status", "timeout"}}));
// We can't call exit() because that would destroy the global SMT context
// while another thread might be using it.
_exit(2);
}
static void exitHandler() {
// We need to cancel the timeout mutex so it stops waiting on timeout_cv. The
// timeout_cv variable will be destroyed when the program exits, and it's
// illegal to destroy a condition variable that has threads waiting on it.
std::lock_guard lock(timeout_mutex);
timeout_cancelled = true;
timeout_cv.notify_all();
}
int main(int argc, char **argv) {
// Register our signal handler before LLVM's stack trace printer, so our
// handler will run first.
sem_init(&crash_sem, 0, 0);
llvm::sys::AddSignalHandler(signalHandler, nullptr);
// Start our threads before calling anything else from LLVM, in case LLVM's
// functions crash.
atexit(exitHandler);
std::thread(crashHandlerThread).detach();
std::thread(timeoutThread).detach();
llvm::sys::PrintStackTraceOnErrorSignal(argv[0]);
llvm::PrettyStackTraceProgram X(argc, argv);
llvm::EnableDebugBuffering = true;
llvm::llvm_shutdown_obj llvm_shutdown; // Call llvm_shutdown() on exit.
std::string Usage =
R"EOF(Alive2 stand-alone distributed worker:
version )EOF";
Usage += alive_version;
Usage += R"EOF(
see alive-worker --version for LLVM version info,
This program connects to a memodb-server server and evaluates Alive2-related
calls that are submitted to the server by other programs.
)EOF";
llvm::cl::HideUnrelatedOptions(alive_cmdargs);
llvm::cl::ParseCommandLineOptions(argc, argv, Usage);
std::string server_url = opt_url;
// httplib doesn't like the ending slash in http://127.0.0.1:1234/
if (!server_url.empty() && server_url.back() == '/')
server_url.pop_back();
session.emplace(server_url);
// Upload the list of funcs we can evaluate using POST /cid.
ojson worker_info(
json_object_arg,
{
{"funcs", ojson(json_array_arg, {"alive.tv_v2", "alive.interpret"})},
});
auto worker_info_cid = putNode(worker_info);
while (true) {
std::string buffer;
encode_cbor(worker_info_cid, buffer);
auto response =
session->Post("/worker", cbor_headers, buffer, "application/cbor");
checkResult(response);
if (response->get_header_value("Content-Type") != "application/cbor") {
std::cerr << "unexpected Content-Type!\n";
return 1;
}
ojson job = decode_cbor<ojson>(response->body);
if (job.is_null()) {
// No jobs available, try again.
sleep(1); // TODO: exponential backoff
continue;
}
string func = job["func"].as<string>();
std::string call_uri = "/call/" + func + "/";
for (ojson &arg : job["args"].array_range()) {
call_uri += binaryCIDToText(arg) + ",";
arg = getNodeFromCID(arg);
}
call_uri.pop_back(); // remove last comma
llvm::errs() << "evaluating " << call_uri << "\n";
std::unique_lock lock(result_mutex);
result_uri = call_uri;
lock.unlock();
uint64_t timeout =
job["args"][0].get_with_default<uint64_t>("timeout", 60000);
lock = unique_lock(timeout_mutex);
timeout_millis = timeout;
timeout_index++;
lock.unlock();
timeout_cv.notify_one();
ojson result = evaluateAliveFunc(job);
if (result.contains("test_input"))
result["test_input"] = putNode(result["test_input"]);
lock.lock();
timeout_millis = 0;
lock.unlock();
timeout_cv.notify_one();
sendResult(result);
}
return 0;
}
|
In case you haven’t used App Shortcuts in Android yet, it’s an awesome feature that allows us to provide our users with a way to quickly access parts of our apps from the home screen of their device. As developers, we can make these shortcuts either static (meaning they’re statically defined in an XML file) or dynamic (meaning they’re dynamically added and removed at runtime). Currently in the Buffer app for Android we have 2 types of dynamic shortcuts:
A shortcut that allows you to access the composer
Three shortcuts for the 3 previously selected profiles (this one is in currently only available in beta!)
But why do we need to write tests for this?
All of these shortcuts are dynamic, meaning that they are only accessible when you are signed in to an account. This means that we have to add them when a user signs-in and removes them when the user signs out – thankfully for us, the shortcut api makes it super easy to do this. However, just like any features of an application, this could easily get broken in the future – this means that if these do somehow break:
Users may not be able to see the shortcuts when they are signed in
Users may still be able to see the shortcuts when they have signed out. Without being signed in, it could be quite confusing for these shortcuts to show – so we want to be sure they’re never shown in this situation
Users may be shown the wrong recently selected profiles – this could be quite annoying and defeats the point of the shortcuts in the first place!
This sounds like a perfect situation to write tests to ensure that these situations never occur. However, because these shortcuts are outside of our application it means we can’t use Espresso to test them.
But! Whilst we can’t use Espresso, luckily we have what is known as UI Automator available to us, it’s all good!
If you haven’t used UI Automator before, it’s essentially a testing tool that allows us to interact with system-wide components – it’s useful for testing things outside of your app (such as notifications) or components invoked by the system (such as permission dialogs). Because of this, it means we can use it to test our App Shortcuts 🙌🏻
Nice! But how do I do this?
If you don’t want to learn about how to do this for yourself, we’ve bundled some handy functions into our testing utility library Biscotti that will allow you to test for app shortcuts. In this library you’ll find two handy methods:
assertAppShortcutsExist()
This method allows you to check that an app shortcut exists for your application. For example:
BiscottiShortcuts.assertAppShortcutsExists("Buffer", InstrumentationRegistry .getTargetContext().getString(R.string.shortcut_compose_update)) 1 2 BiscottiShortcuts . assertAppShortcutsExists ( "Buffer" , InstrumentationRegistry . getTargetContext ( ) . getString ( R . string . shortcut_compose_update ) )
assertAppShortcutsDoNotExist()
This method allows you to check that an app shortcut does not exist for your application. For example:
BiscottiShortcuts.assertAppShortcutsDoNotExist("Buffer", InstrumentationRegistry .getTargetContext().getString(R.string.shortcut_compose_update)) 1 2 BiscottiShortcuts . assertAppShortcutsDoNotExist ( "Buffer" , InstrumentationRegistry . getTargetContext ( ) . getString ( R . string . shortcut_compose_update ) )
Both of these methods also allow you to pass in multiple strings for checking multiple labels, just in case you need a test to check multiple related app shortcuts.
Cool! I want to know how this works!
So let’s take a dive into how we’re doing this with UI Automator, here is the complete code for checking that an app shortcut exists:
fun assertAppShortcutsExists(appName: String, vararg shortcutLabels: String) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N_MR1) { val device = UiDevice.getInstance(InstrumentationRegistry.getInstrumentation()) findAppIcon(device, appName).longClick() shortcutLabels.forEach { if (!device.hasObject(By.text(it))) { Assert.fail("The specified shortcut was not found") } } } } private fun findAppIcon(device: UiDevice, appName: String): UiObject { device.pressHome() if (device.hasObject(By.desc("Apps"))) { device.findObject(By.desc("Apps")).click() } val appDrawer = UiScrollable(UiSelector().scrollable(true)) appDrawer.scrollForward() appDrawer.scrollTextIntoView(appName) return device.findObject(UiSelector().text(appName)) } 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 fun assertAppShortcutsExists ( appName : String , vararg shortcutLabels : String ) { if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . N_MR1 ) { val device = UiDevice . getInstance ( InstrumentationRegistry . getInstrumentation ( ) ) findAppIcon ( device , appName ) . longClick ( ) shortcutLabels . forEach { if ( ! device . hasObject ( By . text ( it ) ) ) { Assert . fail ( "The specified shortcut was not found" ) } } } } private fun findAppIcon ( device : UiDevice , appName : String ) : UiObject { device . pressHome ( ) if ( device . hasObject ( By . desc ( "Apps" ) ) ) { device . findObject ( By . desc ( "Apps" ) ) . click ( ) } val appDrawer = UiScrollable ( UiSelector ( ) . scrollable ( true ) ) appDrawer . scrollForward ( ) appDrawer . scrollTextIntoView ( appName ) return device . findObject ( UiSelector ( ) . text ( appName ) ) }
We have two methods here; assertAppShortcutsExists() which is used by our application to test the app shortcut and findAppIcon() which is used for finding the app icon on the device. Let’s begin by taking a look at the assertAppShortcutsExists() method.
Wait, won’t this fail on devices running < 7.1?
Good question! You’ll notice here that the logic within this method is only used if the Build version of the device is Android 7.1 or newer – this is because app shortcuts are only available on there, so we don’t want these tests running on older versions of Android as they will fail. It would be nicer to have the test not run at all on devices that are running less than 7.1, but I’m not sure of a way to do this yet 🙂
Navigating to the application icon
The first step we need to carry out is navigating to the icon for our application, this is so that we can invoke the action to show the app shortcuts. We begin by retrieving a reference to the current device as a UiDevice instance:
val device = UiDevice.getInstance(InstrumentationRegistry.getInstrumentation()) 1 val device = UiDevice . getInstance ( InstrumentationRegistry . getInstrumentation ( ) )
We then use this value when calling the findAppIcon() method. This is used, along with the given app name, to find the application icon that we want to check for app shortcuts. Within this method we begin by using the pressHome() method to press the home button on the device, this will cause our app to be exited and the home screen of the device to be shown.
device.pressHome() 1 device . pressHome ( )
Once we’re on the home screen of our device, we need to find the icon of our app. Now, the way we do this can differ between devices – some devices will have an app launcher icon, yet some will have the pixel style launcher that swipes up from the bottom of the screen. To account for this we’re going to first check if the device has the App Launcher button like so:
device.hasObject(By.desc("Apps")) 1 device . hasObject ( By . desc ( "Apps" ) )
If the device has this button, you’ll notice that we go ahead and click it:
device.findObject(By.desc("Apps")).click() 1 device . findObject ( By . desc ( "Apps" ) ) . click ( )
If the device doesn’t have this button then it’s ok because we make use of the UiScrollable class to scroll up on the screen, this will bring up the pixel style launcher, revealing the apps on the devices. We still need to use this UiScrollable class here even if the device does have the app launcher button as we will need to scroll to the app icon anyway – this just means we are now accounted for both situations 🙂
Note: I haven’t yet tested this on devices that swipe horizontally. These tests will only be run on Android 7.1 above which I think makes this behaviour less common, but you may need to tweak this code if your tests are running on devices that use this approach.
val appDrawer = UiScrollable(UiSelector().scrollable(true)) appDrawer.scrollForward() 1 2 val appDrawer = UiScrollable ( UiSelector ( ) . scrollable ( true ) ) appDrawer . scrollForward ( )
Next, we need to actually scroll to our app icon, this is done by the appDrawer.scrollTextIntoView(appName) call. Once our app icon is found it is returned by again using the findObject() method.
appDrawer.scrollTextIntoView(appName) return device.findObject(UiSelector().text(appName)) 1 2 appDrawer . scrollTextIntoView ( appName ) return device . findObject ( UiSelector ( ) . text ( appName ) )
Asserting the object state
If we head back on over to our assertAppShortcutsExists() method, we now have the reference to our app icon so we immediately perform a longClick() action on it. A long click is what causes our app shortcuts to be shown. Now that these are showing, we can go ahead and check that our desired shortcuts are shown. We again use the hasObject() method to check whether or not the given shortcut label is shown and if not, we cause the test to fail.
if (!device.hasObject(By.text(it))) { Assert.fail("The specified shortcut was not found") } 1 2 3 if ( ! device . hasObject ( By . text ( it ) ) ) { Assert . fail ( "The specified shortcut was not found" ) }
The assertAppShortcutsDoNotExist() method works in the same way except we just assert that the given shortcut label is not shown on the screen.
Conclusion
It was exciting for me to discover that it was possible to write automated tests for app shortcuts and it feels great knowing that these are now more protected against future regressions. I haven’t been able to try this yet on a wide range of devices (it passes in our test suit on the devices we are using) so I’d be interested to know if there are any cases for you where this may not work out as intended!
Originally written Dec 5, 2017. Last updated Dec 5, 2017 |
def apply_bounds(self, column_name, lower_bound=-np.inf,
upper_bound=np.inf):
self.check_for_column(column_name)
if lower_bound is None:
lower_bound = -np.inf
if upper_bound is None:
upper_bound = np.inf
column = self.data[column_name]
self.data[column_name] = column.clip(lower_bound, upper_bound) |
<filename>goodsKill-spring-boot-provider/goodsKill-service/src/main/java/org/seckill/service/mp/SuccessKilledService.java
package org.seckill.service.mp;
import com.baomidou.mybatisplus.extension.service.IService;
import org.seckill.entity.SuccessKilled;
/**
* @author heng
*/
public interface SuccessKilledService extends IService<SuccessKilled> {
}
|
<reponame>WangZixuan/Leetcode
/*
Find Duplicate Subtrees
Given a binary tree, return all duplicate subtrees. For each kind of duplicate subtrees, you only need to return the root node of any one of them.
Two trees are duplicate if they have the same structure with same node values.
Example 1:
1
/ \
2 3
/ / \
4 2 4
/
4
The following are two duplicate subtrees:
2
/
4
and
4
@author Zixuan
@date 2017/12/20
*/
/**
* Definition for a binary tree node.
* struct TreeNode {
* int val;
* TreeNode *left;
* TreeNode *right;
* TreeNode(int x) : val(x), left(NULL), right(NULL) {}
* };
*/
#include <map>
#include <string>
#include <vector>
using namespace std;
class Solution
{
public:
vector<TreeNode*> findDuplicateSubtrees(TreeNode* root)
{
collect(root);
return duplicatesNodes;
}
private:
map<string, int> duplicatesStrings;
vector<TreeNode*> duplicatesNodes;
string collect(TreeNode* root)
{
if (!root)
return "#";
auto str = to_string(root->val) + "," + collect(root->left) + "," + collect(root->right);
++duplicatesStrings[str];
if (2 == duplicatesStrings[str])
duplicatesNodes.push_back(root);
return str;
}
}; |
<gh_stars>10-100
package iso20022
// Provide information on the status reason of the record.
type ValidationStatusReason1 struct {
// Party that issues the status.
Originator *PartyIdentification77 `xml:"Orgtr,omitempty"`
// Specifies the reason for the status.
Reason *StatusReason6Choice `xml:"Rsn,omitempty"`
// Provides details about the rule which could not be validated.
ValidationRule []*GenericValidationRuleIdentification1 `xml:"VldtnRule,omitempty"`
// Further details on the status reason.
//
// Usage: Additional information can be used for several purposes such as the reporting of repaired information.
AdditionalInformation []*Max105Text `xml:"AddtlInf,omitempty"`
}
func (v *ValidationStatusReason1) AddOriginator() *PartyIdentification77 {
v.Originator = new(PartyIdentification77)
return v.Originator
}
func (v *ValidationStatusReason1) AddReason() *StatusReason6Choice {
v.Reason = new(StatusReason6Choice)
return v.Reason
}
func (v *ValidationStatusReason1) AddValidationRule() *GenericValidationRuleIdentification1 {
newValue := new(GenericValidationRuleIdentification1)
v.ValidationRule = append(v.ValidationRule, newValue)
return newValue
}
func (v *ValidationStatusReason1) AddAdditionalInformation(value string) {
v.AdditionalInformation = append(v.AdditionalInformation, (*Max105Text)(&value))
}
|
// NewMIDIOutDefault opens a default MIDIOut port.
func NewMIDIOutDefault() (MIDIOut, error) {
out := C.rtmidi_out_create_default()
if !out.ok {
defer C.rtmidi_out_free(out)
return nil, errors.New(C.GoString(out.msg))
}
return &midiOut{out: out, midi: midi{midi: C.RtMidiPtr(out)}}, nil
} |
package br.com.jcpvix.githubapi.controller.v1;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import br.com.jcpvix.githubapi.dto.response.Response;
import br.com.jcpvix.githubapi.exception.ApiException;
import br.com.jcpvix.githubapi.model.File;
import br.com.jcpvix.githubapi.model.Page;
import br.com.jcpvix.githubapi.service.GithubService;
@RestController
@RequestMapping("/api/v1")
public class GithubapiController {
private final GithubService githubScraperService;
@Autowired
public GithubapiController(GithubService githubScraperService) {
this.githubScraperService = githubScraperService;
}
@GetMapping(path="/files", produces = "application/json")
public Response<Map<String, List<File>>> getAllFiles(@RequestParam String url) throws ApiException {
try {
Page page = githubScraperService.execute(url);
Map<String, List<File>> groupByExtension = page.getFiles().stream().collect(Collectors.groupingBy(File::getExtension));
return Response.<Map<String, List<File>>>ok(groupByExtension);
} catch (Exception e) {
throw new ApiException(e.getMessage());
}
}
} |
<gh_stars>1-10
import axios from 'axios';
import { Message } from 'discord.js';
import { queueAdd } from '../controller';
import { getSpotifyToken as getSpotifyToken } from '../loaders/nodecache';
import { CONSTANT_URL, REGEX } from '../shared/constants';
import { validateRegex } from '../shared/validation';
import { searchSong } from '../shared/yt-search';
/**
* Validates the spotify URL and categorizes type of the URL(album, playlist or a single song) and then adds it to the queue after fetching the data using the spotify APIs.
* @param message The incoming message
* @param url The URL of the spotify song, playlist or album
* @returns {Promise<Message>} Returns the message sent to user
*/
export const spotifyLinkHandler = async (message:Message, url:string):Promise<Message> => {
let typeOfRequest: string;
let id: string;
if (validateRegex(url, REGEX.SPOTIFY_WEB_URL_REGEX)) {
typeOfRequest = url.split('/')[3];
id = url.split('/')[4].split('?')[0];
} else if (validateRegex(url, REGEX.SPOTIFY_URL_REGEX)) {
typeOfRequest = url.split(':')[1];
id = url.split(':')[2];
} else {
return message.channel.send('Not a valid playlist url');
}
switch (typeOfRequest) {
case 'track':
await spotifyTrackHandler(message, id);
break;
case 'album':
await spotifyAlbumHandler(message, id);
break;
case 'playlist':
spotifyPlaylistHandler(message, id);
break;
default:
message.channel.send(' try sending a track, playlist or an album');
}
};
const spotifyTrackHandler = async (message, id) => {
let trackData = await axios({
method: 'GET',
url: CONSTANT_URL.SPOTIFY_TRACK_API(id),
headers: { Authorization: `Bearer ${await getSpotifyToken()}` },
});
if (!trackData.data.name) return message.channel.send('unable to add playlist');
let songData = await searchSong(message, `${trackData.data.name} ${trackData.data.artists[0].name}`);
let finalSongDetails = { ...songData, originalTitle: trackData.data.name };
await queueAdd(message, finalSongDetails);
};
const spotifyAlbumHandler = async (message, id, next?: string) => {
let albumData = await axios({
method: 'GET',
url: CONSTANT_URL.SPOTIFY_ALBUM_API(id, next),
headers: {
Authorization: `Bearer ${await getSpotifyToken()}`,
},
});
if (!albumData.data) return message.channel.send('unable to add playlist');
await addSpotifyALbumSongsToQueue(message, albumData.data.items);
if (albumData.data.next) {
await spotifyAlbumHandler(message, id, albumData.data.next);
}
};
const spotifyPlaylistHandler = async (message, id, next?: string) => {
let playlistData = await axios({
method: 'GET',
url: CONSTANT_URL.SPOTIFY_PLAYLIST_API(id, next),
headers: {
Authorization: `Bearer ${await getSpotifyToken()}`,
},
});
if (!playlistData.data) return message.channel.send('unable to add playlist');
await addSpotifyPlaylistSongsToQueue(message, playlistData.data.items);
if (playlistData.data.next) {
await spotifyPlaylistHandler(message, id, playlistData.data.next);
}
};
const addSpotifyALbumSongsToQueue = async (message, songs) => {
for (let i = 0; i < songs.length; i++) {
let songData = await searchSong(message, `${songs[i].name} ${songs[i].artists[0].name}`);
if (songData) {
queueAdd(message, {
title: `${songs[i].name}`,
url: songData.url,
originalTitle: songs[i].name,
timestamp: songData.timestamp,
artistName: songs[i].artists[0].name,
});
}
}
};
const addSpotifyPlaylistSongsToQueue = async (message, songs) => {
for (let i = 0; i < songs.length; i++) {
let songData = await searchSong(message, `${songs[i].track.name} ${songs[i].track.album.artists[0].name}`);
if (songData) {
queueAdd(message, {
title: songs[i].track.name,
url: songData.url,
originalTitle: songs[i].track.name,
timestamp: songData.timestamp,
artistName: songs[i].track.album.artists[0].name,
});
}
}
};
|
// BlueprintGeneratedClass BP_ProjectileTrajectory_HeldObjectSocket.BP_ProjectileTrajectory_HeldObjectSocket_C
// Size: 0x278 (Inherited: 0x269)
struct ABP_ProjectileTrajectory_HeldObjectSocket_C : ABP_ProjectileTrajectory_C {
char pad_269[0x7]; // 0x269(0x07)
struct FPointerToUberGraphFrame UberGraphFrame; // 0x270(0x08)
void ReceiveBeginPlay(); // Function BP_ProjectileTrajectory_HeldObjectSocket.BP_ProjectileTrajectory_HeldObjectSocket_C.ReceiveBeginPlay // (Event|Protected|BlueprintEvent) // @ game+0xda7c34
void ExecuteUbergraph_BP_ProjectileTrajectory_HeldObjectSocket(int32_t EntryPoint); // Function BP_ProjectileTrajectory_HeldObjectSocket.BP_ProjectileTrajectory_HeldObjectSocket_C.ExecuteUbergraph_BP_ProjectileTrajectory_HeldObjectSocket // (Final|UbergraphFunction|HasDefaults) // @ game+0xda7c34
};
|
def print_error(e):
print(" ".join([str(arg) for arg in e.args])) |
def main(engine: Any, schema: Optional[str],
warnings: bool, list_tables: bool, table_details: bool, partition: bool,
cycles: bool, insert_order: bool, export_graph: bool, transferable: bool) -> None:
inspector = inspect(engine)
if schema is None:
schema = inspector.default_schema_name
if transferable:
transferability(inspector, schema)
return
tables = sorted(inspector.get_table_names(schema))
if list_tables:
for table in tables:
print(table)
elif table_details:
for table in tables:
columns = inspector.get_columns(table, schema)
fks = inspector.get_foreign_keys(table, schema)
print("\ntable:", table)
if len(columns) > 0:
print("\tcolumns:", ", ".join([col['name'] for col in columns]))
if len(fks) > 0:
print("\tfks:", fks)
elif not export_graph:
print("Found %s tables in schema '%s'" % (len(tables), schema))
if warnings:
print_missing_primary_keys(inspector, schema)
pass
table_graph = nx.DiGraph()
if any([partition, cycles, insert_order, export_graph]):
table_graph = db_graph.build_fk_dependency_graph(inspector, schema)
if partition:
print_partition_info(table_graph)
if cycles:
print_cycle_info_and_break_cycles(table_graph)
if insert_order:
print_insertion_order(table_graph)
if export_graph:
graph_export_to_dot_file(table_graph) |
/// Build a single schema file and write it to the destination file.
///
/// **WARNING: THIS OVERWRITES THE DESTINATION FILE.**
pub fn build_schema(schema: impl AsRef<Path>, destination: impl AsRef<Path>) {
let (schema, destination) = (schema.as_ref(), destination.as_ref());
let compiler_path = compiler_path();
println!("cargo:rerun-if-changed={}", compiler_path.to_str().unwrap());
println!("cargo:rerun-if-changed={}", schema.to_str().unwrap());
let output = Command::new(compiler_path)
.arg("--files")
.arg(schema)
.arg("--rust")
.arg(destination.to_str().unwrap())
.output()
.expect("Could not run bebopc");
if !(output.status.success()) {
println!(
"cargo:warning=Failed to build schema {}",
schema.to_str().unwrap()
);
for line in String::from_utf8(output.stdout).unwrap().lines() {
println!("cargo:warning=STDOUT: {}", line);
}
for line in String::from_utf8(output.stderr).unwrap().lines() {
println!("cargo:warning=STDERR: {}", line);
}
panic!("Failed to build schema!");
}
fmt_file(destination);
} |
Subsets and Splits