content
stringlengths 10
4.9M
|
---|
{-# LANGUAGE RebindableSyntax #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# OPTIONS_GHC -fplugin Control.Supermonad.Plugin #-}
{-
******************************************************************************
* H M T C *
* *
* Module: ParseMonad *
* Purpose: Monad for scanning and parsing *
* Authors: <NAME> *
* *
* Copyright (c) <NAME>, 2006 - 2015 *
* *
******************************************************************************
-}
-- | Monad for scanning and parsing.
-- The scanner and parser are both monadic, following the design outlined
-- in the Happy documentation on monadic parsers. The parse monad P
-- is built on top of the diagnostics monad D, additionally keeping track
-- of the input and current source code position, and exploiting that
-- the source code position is readily available to avoid having to pass
-- the position as an explicit argument.
-- Updated 2015 in view of revised monad class hierarchy.
module ParseMonad (
-- The parse monad
P (..), -- Not abstract. Instances: Monad.
unP, -- :: P a -> (Int -> Int -> String -> DF a)
emitInfoP, -- :: String -> P ()
emitWngP, -- :: String -> P ()
emitErrP, -- :: String -> P ()
failP, -- :: String -> P a
getSrcPosP, -- :: P SrcPos
runP -- :: String -> P a -> DF a
) where
import Control.Supermonad.Prelude
import qualified Prelude as P
-- Standard library imports
--import Control.Applicative -- Backwards compatibibility
-- HMTC module imports
import SrcPos
import Diagnostics
newtype P a = P (Int -> Int -> String -> DF a)
unP :: P a -> (Int -> Int -> String -> DF a)
unP (P x) = x
instance Functor P where
fmap f p = P (\l c s -> fmap f (unP p l c s))
a <$ p = P (\l c s -> a <$ (unP p l c s))
instance Applicative P P P where
pf <*> pa = P (\l c s -> unP pf l c s <*> unP pa l c s)
{-
instance Monad P where
return = pure -- Backwards compatibility
p >>= f = P (\l c s -> unP p l c s >>= \a -> unP (f a) l c s)
-}
instance Bind P P P where
p >>= f = P (\l c s -> unP p l c s >>= \a -> unP (f a) l c s)
instance Return P where
return a = P (\_ _ _ -> pure a)
instance Fail P where
fail = error
-- NOTE: This instance is not required by any module except the one generated by happy.
instance P.Applicative P where
pure = pure
(<*>) = (<*>)
instance P.Monad P where
return = return
(>>=) = (>>=)
-- Liftings of useful computations from the underlying DF monad, taking
-- advantage of the fact that source code positions are available.
-- | Emits an information message.
emitInfoP :: String -> P ()
emitInfoP msg = P (\l c _ -> emitInfoD (SrcPos l c) msg)
-- | Emits a warning message.
emitWngP :: String -> P ()
emitWngP msg = P (\l c _ -> emitWngD (SrcPos l c) msg)
-- | Emits an error message.
emitErrP :: String -> P ()
emitErrP msg = P (\l c _ -> emitErrD (SrcPos l c) msg)
-- | Emits an error message and fails.
failP :: String -> P a
failP msg = P (\l c _ -> failD (SrcPos l c) msg)
-- | Gets the current source code position.
getSrcPosP :: P SrcPos
getSrcPosP = P (\l c _ -> return (SrcPos l c))
-- | Runs parser (and scanner), yielding a result in the diagnostics monad DF.
runP :: P a -> String -> DF a
runP p s = unP p 1 1 s
|
def has_valid_operator(nodal_set, pool_set, activation_set):
supported_nodal = get_default_nodal_set()
supported_pool = get_default_pool_set()
supported_activation = get_default_activation_set()
for nodal in nodal_set:
if isinstance(nodal, str):
if nodal not in supported_nodal:
raise 'Nodal operator ´´%s´´ not supported' % nodal
else:
assert callable(
nodal), "The given nodal operator is neither in default list nor callable"
for pool in pool_set:
if isinstance(pool, str):
if pool not in supported_pool:
raise "Pooling operator ´´%s´´ not supported" % pool
else:
assert callable(
pool), "The given pooling operator is neither in default list nor callable"
for activation in activation_set:
if isinstance(activation, str):
if activation not in supported_activation:
raise "Activation operator ´´%s´´ not supported" % pool
else:
assert callable(
pool), "The given activation operator is neither in default list nor callable"
return |
// ValidateBasic runs stateless checks on the message
func (msg MsgFund) ValidateBasic() error {
if !msg.Amount.IsValid() {
return sdkerrors.Wrap(sdkerrors.ErrInvalidCoins, msg.Amount.String())
}
if msg.Sender.Empty() {
return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "sender %s", msg.Sender.String())
}
if msg.Recipient.Empty() {
return sdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, "recipient %s", msg.Recipient.String())
}
return nil
} |
<reponame>odys-z/jclient<gh_stars>0
package io.oz.album.client;
import static io.oz.album.client.PrefsContentActivity.singleton;
import android.app.Activity;
import android.os.Bundle;
import android.text.InputType;
import android.util.Log;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.preference.EditTextPreference;
import androidx.preference.Preference;
import androidx.preference.PreferenceCategory;
import androidx.preference.PreferenceFragmentCompat;
import androidx.preference.PreferenceManager;
import io.odysz.common.LangExt;
import io.odysz.semantic.jsession.SessionInf;
import io.oz.AlbumApp;
import io.oz.R;
import io.oz.albumtier.AlbumContext;
public class AlbumPreferenceFragment extends PreferenceFragmentCompat {
Preference summery;
Preference homepref;
EditTextPreference device;
Preference btnRegist;
PreferenceCategory cateHome;
Activity ctx;
public AlbumPreferenceFragment(PrefsContentActivity ctx) {
this.ctx = ctx;
}
@Override
public void onCreatePreferences(@Nullable Bundle savedInstanceState, @Nullable String rootKey) {
addPreferencesFromResource(R.xml.pref);
bindPref2Val(findPreference(AlbumApp.keys.home));
bindPref2Val(findPreference(AlbumApp.keys.device));
bindPref2Val(findPreference(AlbumApp.keys.jserv));
bindPref2Val(findPreference(AlbumApp.keys.usrid));
bindPref2Val(findPreference(AlbumApp.keys.pswd));
cateHome = findPreference(AlbumApp.keys.homeCate);
btnRegist = findPreference(AlbumApp.keys.bt_regist);
device = findPreference(AlbumApp.keys.device);
EditTextPreference pswd = findPreference(AlbumApp.keys.pswd);
pswd.setSummary("");
pswd.setOnBindEditTextListener(editText ->
editText.setInputType(InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_VARIATION_PASSWORD));
homepref = findPreference(AlbumApp.keys.home);
String devid = singleton.photoUser.device;
EditTextPreference prefDev = findPreference(AlbumApp.keys.device);
if (!LangExt.isblank(devid)) {
// homepref.setSummary(getString(R.string.devide_name, devid));
homepref.setSummary(AlbumContext.getInstance().homeName);
prefDev.setEnabled(false);
cateHome.removePreference(btnRegist);
device.setSummary(getString(R.string.devide_name, devid));
}
else {
prefDev.setEnabled(true);
device.setSummary(R.string.txt_only_once);
}
summery = findPreference(AlbumApp.keys.login_summery);
}
static void bindPref2Val(@NonNull EditTextPreference preference) {
preference.setOnPreferenceChangeListener(prefsListener);
preference.setOnBindEditTextListener(TextView::setSingleLine);
prefsListener.onPreferenceChange(preference,
PreferenceManager
.getDefaultSharedPreferences(preference.getContext())
.getString(preference.getKey(), ""));
}
/**
* A preference value change listener that updates the preference's summary
* to reflect its new value.
*/
private static final Preference.OnPreferenceChangeListener prefsListener =
(preference, newValue) -> {
String stringValue = newValue.toString().trim();
String k = preference.getKey();
if (k.equals(AlbumApp.keys.jserv)) {
singleton.jserv(stringValue);
preference.setSummary(stringValue);
}
else if (AlbumApp.keys.pswd.equals(k)) {
singleton.pswd(stringValue);
preference.setSummary("");
}
else if (AlbumApp.keys.usrid.equals(k)) {
String device = singleton.photoUser.device;
singleton.photoUser = new SessionInf(singleton.photoUser.ssid(), stringValue);
singleton.photoUser.device = device;
preference.setSummary(stringValue);
}
else if (AlbumApp.keys.device.equals(k)) {
singleton.photoUser.device = stringValue;
preference.setSummary(stringValue);
}
else if (AlbumApp.keys.home.equals(k)) {
singleton.homeName = stringValue;
preference.setSummary(stringValue);
}
return true;
};
}
|
We have now reviewed the video of the Panel on the Future of Classics, which will be disseminated online today, February 14, 2019.
The video makes it clear that what was said to Prof. Padilla Peralta was: “You may have got your job because you’re black, but I would prefer to think you got your job because of merit.”
Despite this factual correction to Presidential letter of 1/10/19, the SCS leadership stands by the substance of the Presidential letter and the actions taken onsite in San Diego, which have been reviewed by the Professional Ethics Committee. We repeat here that the future of classical studies depends on expansion, inclusion, and focused attention on and action to remedy the under-representation of people of color in Classics.
Mary T. Boatwright
SCS President |
<reponame>code-epic/gdoc
export const environment = {
production: true,
ID : 'ID-001',
Url: 'https://10.120.0.58',
API: '/v1/api/',
Hash: ':c521f27fb1b3311d686d511b668e5bd4'
};
|
<reponame>sunshine98/repair
package org.tysf.gt.pojo;
public class TemplateData {
private TemplateContent first;
private TemplateContent keyword1;
private TemplateContent keyword2;
private TemplateContent keyword3;
private TemplateContent remark;
public TemplateData(TemplateContent first, TemplateContent keyword1, TemplateContent keyword2,
TemplateContent keyword3, TemplateContent remark) {
super();
this.first = first;
this.keyword1 = keyword1;
this.keyword2 = keyword2;
this.keyword3 = keyword3;
this.remark = remark;
}
public TemplateContent getFirst() {
return first;
}
public void setFirst(TemplateContent first) {
this.first = first;
}
public TemplateContent getKeyword1() {
return keyword1;
}
public void setKeyword1(TemplateContent keyword1) {
this.keyword1 = keyword1;
}
public TemplateContent getKeyword2() {
return keyword2;
}
public void setKeyword2(TemplateContent keyword2) {
this.keyword2 = keyword2;
}
public TemplateContent getKeyword3() {
return keyword3;
}
public void setKeyword3(TemplateContent keyword3) {
this.keyword3 = keyword3;
}
public TemplateContent getRemark() {
return remark;
}
public void setRemark(TemplateContent remark) {
this.remark = remark;
}
@Override
public String toString() {
return "TemplateData [first=" + first + ", keyword1=" + keyword1 + ", keyword2=" + keyword2 + ", keyword3="
+ keyword3 + ", remark=" + remark + "]";
}
}
|
/* ----------------------------------------------------------------------------
* GTSAM Copyright 2010-2014, Georgia Tech Research Corporation,
* Atlanta, Georgia 30332-0415
* All Rights Reserved
* Authors: Frank Dellaert, et al. (see THANKS for the full author list)
* See LICENSE for the license information
* -------------------------------------------------------------------------- */
/**
* @file EssentialMatrixConstraint.h
* @author Frank Dellaert
* @author Pablo Alcantarilla
* @date Jan 5, 2014
**/
#pragma once
#include <gtsam/nonlinear/NonlinearFactor.h>
#include <gtsam/geometry/EssentialMatrix.h>
namespace gtsam {
/**
* Binary factor between two Pose3 variables induced by an EssentialMatrix measurement
* @ingroup slam
*/
class GTSAM_EXPORT EssentialMatrixConstraint: public NoiseModelFactorN<Pose3, Pose3> {
private:
typedef EssentialMatrixConstraint This;
typedef NoiseModelFactorN<Pose3, Pose3> Base;
EssentialMatrix measuredE_; /** The measurement is an essential matrix */
public:
// shorthand for a smart pointer to a factor
typedef boost::shared_ptr<EssentialMatrixConstraint> shared_ptr;
/** default constructor - only use for serialization */
EssentialMatrixConstraint() {
}
/**
* Constructor
* @param key1 key for first pose
* @param key2 key for second pose
* @param measuredE measured EssentialMatrix
* @param model noise model, 5D !
*/
EssentialMatrixConstraint(Key key1, Key key2,
const EssentialMatrix& measuredE, const SharedNoiseModel& model) :
Base(model, key1, key2), measuredE_(measuredE) {
}
~EssentialMatrixConstraint() override {
}
/// @return a deep copy of this factor
gtsam::NonlinearFactor::shared_ptr clone() const override {
return boost::static_pointer_cast<gtsam::NonlinearFactor>(
gtsam::NonlinearFactor::shared_ptr(new This(*this)));
}
/** implement functions needed for Testable */
/** print */
void print(const std::string& s = "",
const KeyFormatter& keyFormatter = DefaultKeyFormatter) const override;
/** equals */
bool equals(const NonlinearFactor& expected, double tol = 1e-9) const override;
/** implement functions needed to derive from Factor */
/** vector of errors */
Vector evaluateError(const Pose3& p1, const Pose3& p2,
boost::optional<Matrix&> Hp1 = boost::none, //
boost::optional<Matrix&> Hp2 = boost::none) const override;
/** return the measured */
const EssentialMatrix& measured() const {
return measuredE_;
}
private:
/** Serialization function */
friend class boost::serialization::access;
template<class ARCHIVE>
void serialize(ARCHIVE & ar, const unsigned int /*version*/) {
// NoiseModelFactor2 instead of NoiseModelFactorN for backward compatibility
ar
& boost::serialization::make_nvp("NoiseModelFactor2",
boost::serialization::base_object<Base>(*this));
ar & BOOST_SERIALIZATION_NVP(measuredE_);
}
public:
GTSAM_MAKE_ALIGNED_OPERATOR_NEW
};
// \class EssentialMatrixConstraint
}/// namespace gtsam
|
#!/usr/bin/env python
"""
author: <NAME>
date: Jul.26,2016, 15:04
function: a GUI for Lung Single cell tools
"""
#-----------------------------------------------------------------------
import pdb,sys,os
if sys.version_info[0]<3:
import Tkinter as tk
import tkFileDialog
from Tkinter import *
import ttk
from ScrolledText import *
else:
import tkinter as tk
from tkinter import *
import tkinter.filedialog as tkFileDialog
import tkinter.ttk as ttk
import tkinter.scrolledtext as ScrolledText
from tkinter.scrolledtext import *
import subprocess
import multiprocessing
import threading
import pkg_resources
#----------------------------------------------------------------------
class App:
def __init__(self, parent):
self.myParent = parent
#---------------------------------------------------------------
# frme 0: Software Logo
self.frameLogo=Frame(parent,background='white')
imgpath=pkg_resources.resource_filename(__name__,"img/logo.gif")
#pdb.set_trace()
img=PhotoImage(file=imgpath)
imgLabel=Label(self.frameLogo,image=img)
imgLabel.image=img
imgLabel.pack()
self.frameLogo.grid(row=0,column=1,sticky='ew')
#---------------------------------------------------------------
# frame 1: expression frame
self.frameEx=Frame(parent)
self.lex1=ttk.Label(self.frameEx,text="Please Read in the Single Cell Expression data: ")
self.fileName=""
self.bex1=ttk.Button(self.frameEx,text='Read in Expression',command=self.readEx)
self.vex1=StringVar()
self.lex2=ttk.Label(self.frameEx,textvariable=self.vex1)
self.lex1.pack(side='left',fill=None,expand=False,padx=6,pady=20)
self.bex1.pack(side='left',fill=None,expand=False,padx=20,pady=20)
self.lex2.pack(side='left',fill=None,expand=False,padx=40,pady=20)
# frame grid
row_ct=1
self.frameEx.grid(row=row_ct,column=1,sticky='ew')
row_ct+=1
#---------------------------------------------------------------
# frame 2: TF-DNA interaction file
self.frameTFDNA=Frame(parent)
self.ltf1=ttk.Label(self.frameTFDNA,text="Please read in the TF-DNA interaction file: ")
self.btf1=ttk.Button(self.frameTFDNA,text='Read in TF-DNA',command=self.readTF)
self.vtf1=StringVar()
self.ltf2=ttk.Label(self.frameTFDNA,textvariable=self.vtf1)
self.ltf1.pack(side='left',fill=None,expand=False,padx=6)
self.btf1.pack(side='left',fill=None,expand=False,padx=40)
self.ltf2.pack(side='left',fill=None,expand=False,padx=20)
# frame grid
self.frameTFDNA.grid(row=row_ct,column=1,sticky='ew')
row_ct+=1
#---------------------------------------------------------------
# frame 3: K frame (optimal number of clusters for each time point)
self.frameK=Frame(parent)
self.KName="auto"
self.lk1=ttk.Label(self.frameK,text="Please Specificy the Optimal Number of Cluster K for Each Time Point: ")
self.vk1=IntVar()
self.rbk1=ttk.Radiobutton(self.frameK,text='user-defined',variable=self.vk1,value=1,command=self.readK)
self.rbk2=ttk.Radiobutton(self.frameK,text='auto',variable=self.vk1,value=0)
self.lk1.pack(side='left',padx=6,pady=20)
self.rbk1.pack(side='left',padx=29)
self.rbk2.pack(side='left',padx=20)
self.frameK.grid(row=row_ct,column=1,sticky='ew')
row_ct+=1
#---------------------------------------------------------------
# frame 4: Output
self.frameoutfolder=Frame(parent)
self.lo1=ttk.Label(self.frameoutfolder,text="Please Specify the Output Folder Name: ")
self.ev1=StringVar()
self.eo1=ttk.Entry(self.frameoutfolder,textvariable=self.ev1)
self.lo1.pack(side='left',padx=6)
self.eo1.pack(side='left',padx=20)
self.frameoutfolder.grid(row=row_ct,column=1,sticky='ew')
row_ct+=1
#--------------------------------------------------------------
# frame 5: run
self.framerun=Frame(parent)
self.br1=ttk.Button(self.framerun,text='Click to Run!', command=self.run)
self.br1.pack(side='left',padx=6,pady=20)
self.framerun.grid(row=row_ct,column=1,sticky='ew')
row_ct+=1
#---------------------------------------------------------------
# seperator of input and output
s1 = ttk.Separator(parent, orient=HORIZONTAL)
s1.grid(row=row_ct,column=1,sticky='ew',pady=10)
row_ct+=1
#---------------------------------------------------------------
# frame 6: progress bar
self.framePb=Frame(parent)
self.lpb1=ttk.Label(self.framePb,text='Running Progress: ')
self.pb=ttk.Progressbar(self.framePb,orient='horizontal',mode="determinate",length=580)
self.lpb1.pack(side='left',padx=6,pady=10)
self.pb.pack(side='left',padx=20)
self.framePb.grid(row=row_ct,column=1,sticky='ew')
row_ct+=1
#---------------------------------------------------------------
#---------------------------------------------------------------
# frame 7: output display area
self.framedisplay=Frame(parent)
self.ld1=ttk.Label(self.framedisplay,text='Running log: ')
self.textarea1=ScrolledText(self.framedisplay)
self.ld1.pack(side='left',padx=6,pady=20)
self.textarea1.pack(side='left',padx=50)
self.framedisplay.grid(row=row_ct,column=1,sticky='ew',pady=20)
row_ct+=1
def readEx(self):
self.fileName=''
self.fileName=tkFileDialog.askopenfilename()
self.vex1.set(self.fileName.split('/')[-1])
def readK(self):
self.KName=''
self.KName=tkFileDialog.askopenfilename()
def readTF(self):
self.TFName=''
self.TFName=tkFileDialog.askopenfilename()
self.vtf1.set(self.TFName.split('/')[-1])
def run(self):
#check whether input valid
if self.isInputValid():
# run
self.p=threading.Thread(target=self.trun)
self.p.start()
def trun(self):
#pdb.set_trace()
self.o=self.ev1.get()
#pdb.set_trace()
python=sys.executable
proc = subprocess.Popen([python,'scdiff.py','-i',self.fileName,'-t',self.TFName,'-k',self.KName,'-o',self.o],stdout=subprocess.PIPE,stderr=subprocess.PIPE)
ct=0
maxCT=10000
self.textarea1.insert(tk.INSERT,"starting...")
self.textarea1.see(tk.END)
for line in iter(proc.stdout.readline,''):
self.textarea1.insert(tk.INSERT,line)
self.textarea1.see(tk.END)
self.updateProgress(maxCT,ct)
ct+=1
self.textarea1.update_idletasks()
self.updateProgress(maxCT,maxCT)
proc.stdout.close()
proc.wait()
self.textarea1.insert(tk.INSERT,"end!")
self.textarea1.see(tk.END)
def updateProgress(self,MAX,VAL):
self.pb['maximum']=int(MAX)
self.pb['value']=int(VAL)
def isInputValid(self):
flag=0
# check if expression file exists
if os.path.isfile(self.fileName):
flag+=1
else:
self.textarea1.insert(tk.INSERT,'\nError: Input single Cell Expression data not found!\n')
self.textarea1.see(tk.END)
self.textarea1.update_idletasks()
# check if K valid
if self.vk1.get()==0:
flag+=1
elif self.vk1.get()==1:
if os.path.isfile(self.KName):
flag+=1
else:
self.textarea1.insert(tk.INSERT,'\nError: User-defined Optimal Number of Clusters K file not found!\n')
self.textarea1.see(tk.END)
self.textarea1.update_idletasks()
# check if output name valid
try:
if os.path.exists(self.ev1.get())==False:
os.mkdir(self.ev1.get())
flag+=1
except:
self.textarea1.insert(tk.INSERT,'\nError: Output folder name invalid!\n')
self.textarea1.see(tk.END)
self.textarea1.update_idletasks()
#pdb.set_trace()
if flag>=3:
return True
return False
def main():
root=tk.Tk()
#root.columnconfigure(0, weight=1)
app=App(root)
root.geometry("800x900")
root.title("SCDIFF")
root.mainloop()
if __name__=='__main__':
main()
|
<gh_stars>0
#include "CmdFindNext.h"
CmdFindNext::CmdFindNext(std::vector<WindowBase*>* pAllWindowsVector,
TextFinder& textFinder)
: CommandBase(pAllWindowsVector),
m_TextFinder(textFinder)
{
}
CmdFindNext::~CmdFindNext()
{
}
void CmdFindNext::Execute(struct Window* pActiveWindow)
{
m_TextFinder.displayNextResult();
}
|
/**
* Represents a type-cast expression.
* <pre>{@code CAST(expr AS type-name)}</pre>
*/
public class CastExpr implements Expr {
public final Expr expr;
public final Type typeName;
public CastExpr(Expr expr, Type typeName) {
this.expr = requireNonNull(expr);
this.typeName = requireNonNull(typeName);
}
@Override
public void toSql(Appendable a) throws IOException {
a.append("CAST(");
expr.toSql(a);
a.append(" AS ");
typeName.toSql(a);
a.append(')');
}
} |
I am an assistant professor at Northeastern University focusing on data visualization. Please see my homepage or Visualization @ CCIS for more information.
My research focuses on information visualization, visual analytics, and cognitive computing. In the past I've worked on improving the readability of network visualizations and the application of network analysis techniques to real-world problems. Some examples include visualizing citations in academic literature, interactions of people and organizations, relationships in archaeological dig sites, term co-occurrence, thesaurus category relationships, and computer network traffic flow. I contribute to the NodeXL project, an open source network visualization template for Microsoft Excel.
In 2013 I received my PhD in Computer Science under Ben Shneiderman at the University of Maryland Human Computer Interaction Lab. I earned an M.S. in Computer Science from the University of Maryland in 2009 and a B.A. in Computer Science and Mathematics from Cornell College in 2007. |
/**
* check used columns.
*
* @param builder
* the builder
* @param method
* the method
* @param columnNames
* the column names
* @return name of column name set
*/
public static String generateColumnCheckSet(TypeSpec.Builder builder, SQLiteModelMethod method, Set<String> columnNames) {
String columnNameSet = method.contentProviderMethodName + "ColumnSet";
StringBuilder initBuilder = new StringBuilder();
String temp = "";
for (String item : columnNames) {
initBuilder.append(temp + "\"" + item + "\"");
temp = ", ";
}
FieldSpec.Builder fieldBuilder = FieldSpec.builder(ParameterizedTypeName.get(Set.class, String.class), columnNameSet, Modifier.STATIC, Modifier.PRIVATE, Modifier.FINAL);
fieldBuilder.initializer("$T.asSet($T.class, $L)", CollectionUtils.class, String.class, initBuilder.toString());
builder.addField(fieldBuilder.build());
return columnNameSet;
} |
// Check validates the request.
func (r *GetWebSessionRequest) Check() error {
if r.User == "" {
return trace.BadParameter("user name missing")
}
if r.SessionID == "" {
return trace.BadParameter("session ID missing")
}
return nil
} |
/**
* Sorts the population based on the chromosome's comparator.
*
*/
public void sort() {
if (sorted)
return;
for (int n = 1; n < N; n *= 2) {
for (int i = 0; i < N - n; i += n + n) {
int hi = Math.min(i + n + n - 1, N - 1);
for (int k = i; k <= hi; k++) {
aux[k] = chromosomes.get(k);
}
merge
int x = i, y = i + n;
for (int k = i; k <= hi; k++) {
if (!(y > hi) && aux[y].fitness < aux[x].fitness) {
chromosomes.set(k, aux[y++]);
} else {
chromosomes.set(k, aux[x++]);
}
}
}
}
Collections.sort(chromosomes);
sorted = true;
} |
\\Algoritmo babilônico para calcular a raiz quadrada:
racional raiz_quadrada (racional n)
{
racional suposição, resultante;
suposição = n / 2;
para (inteiro i = 0; i < 10; i++)
{
resultante = n / suposição;
suposição = (suposição + resultante) / 2;
}
retornar suposição;
} |
def first_index_lt(data_list, value):
try:
index = next(data[0] for data in enumerate(data_list) if data[1] < value)
return index
except StopIteration: return - 1 |
<reponame>chen0040/cpp-steering-behaviors<gh_stars>1-10
#ifndef _H_GL_STATE_WANDER_H
#define _H_GL_STATE_WANDER_H
#include "GLState.h"
class GLState_Wander : public GLState
{
public:
virtual ~GLState_Wander();
static GLState_Wander* Instance();
private:
GLState_Wander();
GLState_Wander(const GLState_Wander& rhs) { }
GLState_Wander& operator=(const GLState_Wander& rhs) { return *this; }
public:
virtual void Entered(GameWorld* pWorld);
virtual void Exited(GameWorld* pWorld);
virtual void Update(GameWorld* pWorld, const long& lElapsedTicks);
public:
virtual void MouseButtonDown(GameWorld* pWorld);
protected:
int m_iMaxFPS;
int m_dPlaneLength;
};
#define glState_Wander (*(GLState_Wander::Instance()))
#endif |
<reponame>embeddery/stackrox<gh_stars>10-100
package blevesearch
import "fmt"
// ToMapKeyPath takes a path and generates a map key path
func ToMapKeyPath(path string) string {
return fmt.Sprintf("%s.keypair.key", path)
}
// ToMapValuePath takes a path and generated a map value path
func ToMapValuePath(path string) string {
return fmt.Sprintf("%s.keypair.value", path)
}
|
def y_on_ledger(self, pos_y):
return (self.y_outside_staff(pos_y) and
self.unit(pos_y).value % 1 == 0) |
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.*;
public class ColorfulField {
public static void main(String[] args) throws IOException {
BufferedReader scan = new BufferedReader(new InputStreamReader(System.in));
String [] line= scan.readLine().split(" ");
int n= Integer.parseInt(line[0]);
int m= Integer.parseInt(line[1]);
int w= Integer.parseInt(line[2]);
int q= Integer.parseInt(line[3]);
TreeMap<Index, Long> arr= new TreeMap<>();
Hashtable<Integer, Hashtable<Integer,Index>> indices= new Hashtable<>();
for(int i= 0; i< w; i++){
String [] wasteLine= scan.readLine().split(" ");
int row= Integer.parseInt(wasteLine[0]);
int col= Integer.parseInt(wasteLine[1]);
Index index = new Index(row, col, 1, true);
indices.putIfAbsent(row, new Hashtable<>());
indices.get(row).put(col, index);
arr.put(index, (long) 1);
}
//for make output sorted :))
int [][] quries= new int[q][2];
for(int i= 0; i< q; i++){
String [] queryLine= scan.readLine().split(" ");
int row= Integer.parseInt(queryLine[0]);
int col= Integer.parseInt(queryLine[1]);
quries[i][0]= row; quries[i][1]= col;
if(indices.get(row) == null || indices.get(row).get(col) == null) {
Index index = new Index(row, col, 0, false);
indices.putIfAbsent(row, new Hashtable<>());
indices.get(row).put(col, index);
arr.put(index, (long) 0);
}
}
//prefix sum
long sum= 0;
for(Index index: arr.keySet()){
sum += arr.get(index);
arr.put(index, sum);
}
for(int i= 0; i< q; i++){
int row= quries[i][0];
int col= quries[i][1];
if(indices.get(row) != null && indices.get(row).get(col).isWaste){
System.out.println("Waste");
}
else{
long f= (row - 1) * m + col - arr.get(indices.get(row).get(col));
if(f % 3 == 1){
System.out.println("Carrots");
}
else if(f % 3 == 2){
System.out.println("Kiwis");
}
else{
System.out.println("Grapes");
}
}
}
}
static class Index implements Comparable<Index> {
int row;
int col;
int val;
boolean isWaste;
Index(int row, int col, int val, boolean isWaste){
this.row= row;
this.col= col;
this.val= val;
this.isWaste= isWaste;
}
@Override
public int compareTo(Index index) {
if(this.row - index.row != 0)
return this.row - index.row;
return this.col - index.col;
}
}
} |
/**
* Test the valifity if a property value
*
* @param test posdsibly null value to test
* @return
*/
public static boolean validProperty(Object test)
{
if (test == null)
return (false);
if (test instanceof String) {
return (!Util.isEmptyString((String) test));
}
if (test instanceof Integer) {
return (((Integer) test).intValue() != Integer.MIN_VALUE);
}
if (test instanceof Date) {
return (((Date) test).getTime() != 0);
}
return (true);
} |
<gh_stars>0
/*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH under
* one or more contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright ownership.
* Licensed under the Zeebe Community License 1.1. You may not use this file
* except in compliance with the Zeebe Community License 1.1.
*/
package io.zeebe.broker.exporter.jar;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.junit.Assume.assumeTrue;
import io.zeebe.broker.exporter.util.JarCreatorRule;
import io.zeebe.broker.exporter.util.TestJarExporter;
import java.io.File;
import java.io.IOException;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.RuleChain;
import org.junit.rules.TemporaryFolder;
public final class ExporterJarRepositoryTest {
private final ExporterJarRepository jarRepository = new ExporterJarRepository();
private final TemporaryFolder temporaryFolder = new TemporaryFolder();
private final JarCreatorRule jarCreator = new JarCreatorRule(temporaryFolder);
@Rule public RuleChain chain = RuleChain.outerRule(temporaryFolder).around(jarCreator);
@Test
public void shouldThrowExceptionOnLoadIfNotAJar() throws IOException {
// given
final File fake = temporaryFolder.newFile("fake-file");
// then
assertThatThrownBy(() -> jarRepository.load(fake.getAbsolutePath()))
.isInstanceOf(ExporterJarLoadException.class);
}
@Test
@Ignore // Temporary disable.. doesn't work on gcloud
public void shouldThrowExceptionOnLoadIfNotReadable() throws Exception {
// given
final File dummy = temporaryFolder.newFile("unreadable.jar");
// when (ignoring test if file cannot be set to not be readable)
assumeTrue(dummy.setReadable(false));
// then
// System.out.println("was set = " + isSet);
assertThatThrownBy(() -> jarRepository.load(dummy.getAbsolutePath()))
.isInstanceOf(ExporterJarLoadException.class);
}
@Test
public void shouldThrowExceptionIfJarMissing() throws IOException {
// given
final File dummy = temporaryFolder.newFile("missing.jar");
// when
assertThat(dummy.delete()).isTrue();
// then
assertThatThrownBy(() -> jarRepository.load(dummy.getAbsolutePath()))
.isInstanceOf(ExporterJarLoadException.class);
}
@Test
public void shouldLoadClassLoaderForJar() throws IOException {
// given
final File dummy = temporaryFolder.newFile("readable.jar");
// when (ignoring test if file cannot be set to be readable)
assumeTrue(dummy.setReadable(true));
// then
assertThat(jarRepository.load(dummy.getAbsolutePath()))
.isInstanceOf(ExporterJarClassLoader.class);
}
@Test
public void shouldLoadClassLoaderCorrectlyOnlyOnce() throws Exception {
// given
final Class exportedClass = TestJarExporter.class;
final File jarFile = jarCreator.create(exportedClass);
// when
final ExporterJarClassLoader classLoader = jarRepository.load(jarFile.toPath());
// then
assertThat(classLoader.loadClass(exportedClass.getCanonicalName())).isNotEqualTo(exportedClass);
assertThat(jarRepository.load(jarFile.toPath())).isEqualTo(classLoader);
}
}
|
/**
* Adds a group to the {@link ASIOController}, for the level of the group to be
* calculated by the controller
*
* @param group The group to add to the list of groups
*/
public void addGroup(final Group group) {
if (!groupList.contains(group)) {
LOG.info("Group " + group.getName() + " added");
groupList.add(group);
}
} |
<reponame>TanXN/2019SE_work<gh_stars>0
package com.example.springdemo2;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
@SpringBootApplication
public class Springdemo2Application {
public static void main(String[] args) {
SpringApplication.run(Springdemo2Application.class, args);
}
public static String hello() {
return "com.example.springdemo2.hello";
}
};
|
(UPDATED) A grandson of the author of the 'Conjugal Dictatorship' calls on the youth to fight historical revisionism
Published 9:00 AM, February 14, 2017
MANILA, Philippines (UPDATED) – Months have passed since former president Ferdinand Marcos was buried at the Heroes' Cemetery, but the opposition against it remains – especially among the younger generation.
At age 19, JC Mijares Gurango is taking the challenge of preserving the hard truths of the Philippines' dark chapter that have left Filipinos stuck in polarizing debates.
Gurango is the grandson of Primitivo Mijares, media czar of the late strongman. As president of the National Press Club and chairman of the Media Advisory Council during the Marcos regime, Mijares had served as the dictator's top propagandist while the government controlled the press.
He, however, turned his back against Marcos in 1975 and exposed the scandals of the regime before a congressional committee in the United States. He wrote the book, Conjugal Dictatorship, which details his insider account of the Marcoses.
A year after the book was published in 1976, Mijares tried to return to the Philippines but he went missing. He was never found.
Gurango never met his grandfather to personally hear the stories of Martial Law. But he knows he has to do something to not only preserve his sacrfice and legacy but also history. (READ: 2016: The year the Marcoses had it so good)
He worked on to republish his grandfather's book and will relaunch it on February 21 – in time for the 31st anniversary of the 1986 EDSA People Power Revolution that ousted Marcos. (READ: 'Conjugal Dictatorship' to be relaunched in time for EDSA anniversary)
"Most of the people who are alive at the time are starting to die. Most of the people who can attest to the story, to what happened are slowly dying and I feel if we don't have something like this we're just gonna succumb to the whitewashing of the Marcoses of our history," he said in a Rappler Talk interview on Monday, February 13.
Connecting with the youth
He believes it is now the task of his generation to fight the rewriting of history, an issue that surfaced when Marcos was granted a state burial in November last year. (READ: Diokno: Next anti-Marcos campaign is to educate the youth)
"As future influencers, future voters, future teachers... [we need] to make sure that the story is told in a truthful way, accurate way," he said, still hopeful that there are many young people interested to read about Martial Law.
The new edition of the book contains 200 pages of annotation to aid young readers with the context of the events that his grandfather mentioned in his accounts.
What he wants other young people to appreciate about the book are the anecdotes about the Marcoses that show their character, which can only be told by a trusted man of the former president.
"This really tells the story [not only] from a perspective of the people [who are alive at the time] but from the perspective of [an] insider who was able to understand Ferdinand's mind," he shared. "One thing about my grandfather is Ferdinand trusted him enough to allow him inside his personal chambers."
Gurango hopes other books such as his grandfather's may be reprinted to be resource materials in schools.
Meanwhile, history professor Jo Ed Tirol of the Ateneo de Manila University has crafted a Martial Law curriculum that will be recommended to the Department of Education.
Education Secretary Leonor Brioners earlier committed to deepen the discussion of Martial Law in schools and to review the textbooks.
"It is important that this (historical revisionism) becomes not a war of resources (money and influence) but a war of knowledge," said Gurango. – Rappler.com |
import io
import os
from collections import Counter, defaultdict, deque
def makeRepeat(arr):
repeat = [[arr[0], 0]]
for x in arr:
if x == repeat[-1][0]:
repeat[-1][1] += 1
else:
repeat.append([x, 1])
return repeat
def solve(N, K, S):
if all(x == "L" for x in S):
if K:
return (min(N, K) - 1) * 2 + 1
else:
return 0
repeats = makeRepeat(S)
firstStreak = 0
lastStreak = 0
if repeats[-1][0] == "L":
lastStreak = repeats.pop()[1]
if repeats and repeats[0][0] == "L":
firstStreak = repeats[0][1]
repeats = repeats[1:]
base = 0
streaks = []
for ch, repeat in repeats:
if ch == "L":
streaks.append(repeat)
else:
base += (repeat - 1) * 2 + 1
streaks.sort()
# print(S, K, base)
# print(streaks, lastStreak)
total = 0
for x in streaks:
if x <= K:
total += x * 2 + 1
K -= x
else:
x = min(K, x)
total += x * 2
K -= x
# print("after", total, K, firstStreak, lastStreak)
if K and lastStreak:
x = min(K, lastStreak)
total += x * 2
K -= x
if K and firstStreak:
x = min(K, firstStreak)
total += x * 2
K -= x
return base + total
if __name__ == "__main__":
input = io.BytesIO(os.read(0, os.fstat(0).st_size)).readline
TC = int(input())
for tc in range(1, TC + 1):
N, K = [int(x) for x in input().split()]
S = input().decode().rstrip()
ans = solve(N, K, S)
print(ans)
|
<filename>backend/config/config.go
package config
// Config defines the configuration structure.
type Config struct {
General struct {
LogLevel int `mapstructure:"log_level"`
LogToSyslog bool `mapstructure:"log_to_syslog"`
PoolSize int `mapstructure:"sensor_data_pool_size"`
GrpcHttpGatewayPort uint16 `mapstructure:"grpc_http_gateway_port"`
GrpcServicePort uint16 `mapstructure:"grpc_service_port"`
HttpPort uint16 `mapstructure:"http_port"`
WebStaticDir string `mapstructure:"web_static_dir"`
} `mapstructure:"general"`
PostgreSQL struct {
DSN string `mapstructure:"dsn"`
Automigrate bool
MaxOpenConnections int `mapstructure:"max_open_connections"`
MaxIdleConnections int `mapstructure:"max_idle_connections"`
} `mapstructure:"postgresql"`
// #define REDIS_QUEUE "meteostation:bmp280"
Redis struct {
URL string `mapstructure:"url"` // deprecated
Password string `mapstructure:"password"`
Database int `mapstructure:"database"`
Queue string `mapstructure:"queue"`
} `mapstructure:"redis"`
}
// C holds the global configuration.
var C Config
// Get returns the configuration.
func Get() *Config {
return &C
}
|
A survey of physician practices on the inpatient medical stabilization of patients with avoidant/restrictive food intake disorder
Background Avoidant/restrictive food intake disorder (ARFID) was added to the Diagnostic and Statistical Manual of Mental Disorders Fifth Edition in 2013. ARFID can result in impaired growth and significant nutritional deficiency; individuals with ARFID may be so nutritionally compromised that they require medical stabilization in a hospital. Prior to the new diagnostic criteria, it is unclear how patients now diagnosed with ARFID may have been medically stabilized when hospitalized. Our study aim was to assess the inpatient medical management of adolescents with ARFID. Methods United States-based physician members of the Society for Adolescent Health and Medicine’s Eating Disorder Special Interest Group’s listserv or the National Eating Disorders Quality Improvement Collaborative were invited to participate in an anonymous survey regarding their practices of care for hospitalized patients with ARFID. Results Thirty-seven (44.6%) of 83 physicians completed the survey; 73.0% (n = 27) of respondents medically admitted patients with ARFID. Half of respondents who admitted did not use any protocol for refeeding; 55% of those with a protocol used an anorexia nervosa treatment protocol. Solid food and nasogastric feeds were most commonly used for nutritional rehabilitation. Few typically prescribed medications in the hospital during medical stabilization. Conclusions There is considerable variability of practice in the treatment of hospitalized patients with ARFID. An important next step is to test the efficacy of protocols for anorexia nervosa in treating ARFID patients.
Plain ENGLISH summary
The diagnosis of Avoidant/Restrictive food intake disorder (ARFID) was added to the Diagnostic and Statistical Manual of Mental Disorders Fifth Edition in 2013. It has been used in order to describe patients with poor nutritional intake leading to weight loss and/or growth failure but without poor body image. These patients may need to be hospitalized, and there is currently very little research on how treatment of ARFID in the hospital may be different than patients with anorexia. This study surveyed physicians in the United States who care for patients with ARFID who require medical hospitalization. Respondents to our survey often used a multidisciplinary team to care for patients with ARFID and that nasogastric feeding tubes are often used. Moreover, the findings demonstrated that there is no standardized way to care for patients with ARFID, and many providers are relying on anorexia nervosa protocols. This variability in treatment demonstrates that further studies are needed to determine what protocols may be best used in this patient population.
Background
Diagnosis and treatment of patients with eating disorders is evolving as evidenced by recent changes in eating disorder classifications in the Diagnostic and Statistical Manual of Mental Disorders Fifth Edition (DSM-5). Avoidant/restrictive food intake disorder (ARFID) is one of several new diagnoses added to the DSM-5 in 2013 . ARFID emerged due to lack of an encompassing diagnosis to describe patients with inadequate nutritional intake but absent body dysmorphia . ARFID is not a diagnosis limited to youth; ARFID may also be diagnosed in the adult population . The publication of the DSM-5 and with it the definition of ARFID as a diagnosis has made the study of patients with ARFID easier as prior to the DSM-5 they may have been given several different diagnoses. Regardless of the etiology of ARFID, the diagnosis states that there are medical or psychosocial complications that require intervention . While estimates of ARFID's prevalence vary, one study of patients with eating disorders across several institutions found that 14% met the DSM-5 criteria for ARFID . As ARFID can result in impaired growth and significant nutritional deficiency, these patients may require medical stabilization. Hospitalized patients with ARFID tend to be younger and require longer hospital stays than patients with other eating disorders leading to hospitalization . Prior to the new diagnostic criteria, it is unclear how patients now diagnosed with ARFID may have been medically stabilized when hospitalized. A recent retrospective chart review of patients assessed for an eating disorder found that more than half (57%) of patients diagnosed with ARFID had an inpatient hospitalization . There are not published criteria for the admission of ARFID, which means that providers may rely on established AN criteria such as bradycardia, orthostatic hypotension, electrolyte abnormalities, and low weight . Descriptive studies cite reasons for hospitalization with a diagnosis of ARFID, although these are not necessarily criteria. For example, weight below 80% of goal, loss of over 20% of weight, failure of outpatient treatment, and bradycardia .
While methods for refeeding medically unstable patients with AN are described in clinical practice guidelines , there is a paucity of research published on inpatient medical treatment of patients with ARFID. The aim of this preliminary study was to determine the current protocols and practices used for inpatient medical stabilization of patients with ARFID in the United States.
Methods
United States-based physician members of the Society for Adolescent Health and Medicine's Eating Disorder Special Interest Group's listserv or the National Eating Disorders Quality Improvement Collaborative were invited to participate on three separate occasions via email with a link to the survey. If members were a part of both listservs, they received a single invitation. Respondents who did not admit patients with ARFID for nutritional rehabilitation in a hospital setting completed only the demographics portion of the survey. Participants were able to skip questions, so results have varying response rates. The anonymous online survey was based on our prior work assessing variability in inpatient management of patients with AN and was exempt by the Boston Children's Hospital Institutional Review Board due to the fact that it was an anonymous survey of clinicians. Study data were collected and managed by REDCap (Nashville, TN).
Results
Thirty-seven of 83 eligible physicians completed the survey for a response rate of 44.6%. Based on response to a single survey question, all respondents reported that they were familiar with the diagnosis of ARFID. Of those who responded, 27 (73.0%) admitted patients with ARFID for nutritional rehabilitation. Providers who admitted patients with ARFID tended to be based in an academic medicine site and have specialization in Adolescent Medicine ( Table 1).
The majority of providers admitted patients with ARFID to a mixed pediatric and adolescent medical unit ( Table 2). Most respondents who cared for patients with ARFID in an inpatient setting reported that their teams included mental health providers and a medical provider. The most common adjunctive therapies available during a medical admission were group therapy and nutritional education.
With regards to providing nutritional rehabilitation to patients, only half of respondents (n = 11/22) reported having a standardized protocol ( Table 2). Of those who did have a standard protocol, more than half of them (n = 6/11) used the same protocol for patients with ARFID and AN. Therefore, only 22.7% of respondents (n = 5/22) report having a non-AN refeeding protocol used for their patients with ARFID. "Regular food" was the most common form of nutrition used for nutritional rehabilitation among all providers. Feeding via nasogastric tubes was also commonly reported (50%) and was provided as the initial feeding regimen, a nocturnal supplement, or in some other manner. Only 25.9% (n = 7/27) providers indicated that medications were typically prescribed during admission. Of those that did prescribe, all respondents used selective serotonin reuptake inhibitors and two-thirds additionally prescribed atypical anti-psychotics.
Seven respondents replied to the question, "Is there anything else you want to share about your management of ARFID patients or your refeeding protocol (if you have one)?" The main themes from free text responses were that ARFID needs to be treated differently than AN and that use of a multidisciplinary approach with additional services such as behavior modification/exposure therapy is important. For example, on respondent said: "Patients with ARFID generally only hospitalized if <75% MBW or medically unstable. Treatment is focused on exposure, more involved than with other ED patients, often will have a behavior modification program put in place by OT and psych." Another respondent said, "patients with ARFID are treated on our specialized medical ED unit with strong multidisciplinary presence. The framework is our structured -based ED program but the treatment is very much individualized for these patients depending on their symptoms and presentation." Another respondent noted that they "are struggling with trying to figure out how to provide services for in our partial hospitalization program… we treat like other AN patients who are admitted but will use the foods they are comfortable with rather than feeding them everything like the AN patients."
Discussion
This preliminary study is the first of its kind to examine inpatient medical provider management practices for medical stabilization of patients with ARFID. We found that providers were rarely using standardized refeeding protocols aimed at ARFID. Although patients with ARFID differ significantly from patients with AN , half of medical providers using protocols relied on AN protocols. Providers' concerns in open text responses reflect the need for research to clarify the efficacy of various treatments. This reflects how new ARFID is as diagnosis and subsequent lack of evidence for effective treatment. Moreover, few providers were prescribing medications to treat patients. There currently are not evidence-based pharmacologic treatments for ARFID . In this survey providers who prescribed medications more commonly used SSRIs, although a recent case report notes that an adolescent patient was successfully treated for ARFID with buspirone , demonstrating that more research to optimize pharmacological interventions is needed. A commonality among providers was the incorporation of a multidisciplinary team, which was previously found to be of benefit . Additionally, nutritional rehabilitation was frequently achieved with food and nasogastric tubes, although the efficacy of nasogastric tubes in this population is unknown. One retrospective review of outcomes of an inpatient protocol for medical stabilization of eating disorders found that patients with ARFID were significantly more likely to require nasogastric tube feeding compared to other eating disorder diagnoses . These results stress the need for assessment and evaluation of the efficacy of currently used treatment modalities for patients with ARFID.
The strengths of this preliminary study include the variability in practice experience and geographic location of the respondents and the wide range of years in practice of the physicians who participated in the survey. Additionally, this was an investigation of current practices from providers who actively treat ARFID patients. There are limitations to our study. We had an overall response rate of 44.6%, and of the respondents not all admitted patients with ARFID for inpatient nutritional rehabilitation. Thus, the resulting sample was small and limits further analysis of results. The sample of providers is adolescent medicine-focused so results may not reflect the management of younger children with ARFID. We believe if there is bias in our results, they would overstate the use of protocols and common practices given the source for our sample. Respondents were not asked if patients were admitted to other services, such as gastroenterology, where other specialties may treat these patients differently. However, one retrospective study of gastroenterology clinics found only 1.5% of referral patients had ARFID, so it is perhaps less likely they would be admitted to gastroenterology . Additionally, more specific measures of medical treatment may not be captured by this survey and there is no further information regarding the intricacies of the protocols developed by providers. Further research should seek to describe specific information for discharge criteria for ARFID patients as well as recommended post-discharge treatment after an inpatient stay for development of future protocols. A comparison of low-weight patients with ARFID to patients with AN demonstrated similar improvements in a partial hospitalization program, suggesting that there are areas of treatment that may overlap between the two diagnoses . However, it is important to have a baseline for how ARFID patients are treated in inpatient settings before developing new protocols.
Conclusion
In conclusion, the variability in the treatment approaches to the inpatient medical stabilization of patients with ARFID highlights the evidence gap regarding optimal treatment of this disorder. This may help explain why ARFID may result in longer hospital stay . Additionally, as of now, there is a lack of randomized control trials to establish evidence-based treatment of ARFID, including studies to support the use of medication in either an inpatient or outpatient setting . It may be that no single treatment works best for all ARFID patients, although as evidenced by this survey a multidisciplinary team is likely important to the effectiveness of treatment . Development of future protocols should consider the socio-culture background and values of patients as well as the differences between typical patients with ARFID and AN (such as younger age, prevalence of history of choking/gagging episode, and lack of body dysmorphia in those with ARFID in contrast to those with AN). An important next step is to assess the effectiveness of current protocols used in the treatment of patients with ARFID across programs.
Abbreviations AN: Anorexia nervosa; ARFID: Avoidant/restrictive food intake disorder; DSM-5: Diagnostic and Statistical Manual of Mental Disorders Fifth Edition |
<reponame>zyndor/dali-toolkit<gh_stars>0
/*
* Copyright (c) 2020 Samsung Electronics Co., Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#include <dali/dali.h>
#include <dali-toolkit/dali-toolkit.h>
#include <sstream>
using namespace Dali;
using namespace Dali::Toolkit;
namespace
{
// The name we will use to register our custom property by.
const char* const TAG_PROPERTY_NAME = "tagIdentifier";
// The image for our image view
const char* const IMAGE_CARDS = "images/cards.jpg";
} // namespace
/**
* This example shows how to set properties in C++ and how to register and look-up custom properties.
* An image is added to the screen which changes and a custom property is added to the image-view.
* This value is incremented every time the image is touched and the text-label is updated.
*/
class PropertyController: public ConnectionTracker
{
public:
PropertyController( Application& application )
: mTagText(),
mTagPropertyIndex( Property::INVALID_INDEX )
{
// Connect to the Application's Init signal
application.InitSignal().Connect( this, &PropertyController::Create );
}
~PropertyController()
{
}
// C++ EXAMPLE
void Create( Application& application )
{
// Get the window handle
Window window = application.GetWindow();
mImageView = ImageView::New();
// Set the property to move to the center
mImageView.SetProperty( Actor::Property::PARENT_ORIGIN, ParentOrigin::CENTER );
// Set another property to set the image-map
Property::Map imageMap;
imageMap[ Visual::Property::TYPE ] = Visual::IMAGE;
imageMap[ ImageVisual::Property::URL ] = IMAGE_CARDS;
imageMap[ ImageVisual::Property::DESIRED_WIDTH ] = 100;
imageMap[ ImageVisual::Property::DESIRED_HEIGHT ] = 100;
mImageView.SetProperty( ImageView::Property::IMAGE, imageMap );
// Add the image view to the window
window.Add( mImageView );
// Register a custom float property on mImageView and use it to store the number of times we are tapped
mTagPropertyIndex = mImageView.RegisterProperty( TAG_PROPERTY_NAME, 0, Property::READ_WRITE /* Event-side only, i.e. not animatable */ );
// Connect to the touch-event
mImageView.TouchedSignal().Connect( this, &PropertyController::OnTouched );
// Create text label
mTagText = Toolkit::TextLabel::New( "0" );
mTagText.SetProperty( Actor::Property::PARENT_ORIGIN, ParentOrigin::BOTTOM_CENTER );
mTagText.SetProperty( Actor::Property::ANCHOR_POINT, AnchorPoint::BOTTOM_CENTER );
mTagText.SetProperty( TextLabel::Property::TEXT_COLOR, Color::WHITE );
mTagText.SetProperty( TextLabel::Property::HORIZONTAL_ALIGNMENT, "CENTER" );
window.Add( mTagText );
}
/**
* Called when the image view is touched
* param[in] touch The touch-event
* return Set to true if the signal was consumed correctly
*/
bool OnTouched( Actor actor, const TouchEvent& touch )
{
int touchedCount = 0;
// Look up the tag property by the cached property index.
// Note: If the property belongs to a control in another library, or we do not know the index, we can look the index up first with:
// Property::Index index = actor.GetPropertyIndex( TAG_PROPERTY_NAME );
actor.GetProperty( mTagPropertyIndex ).Get( touchedCount );
// Increment and set back again
++touchedCount;
actor.SetProperty( mTagPropertyIndex, touchedCount );
// Set the text in the text-label
std::stringstream valueText;
valueText << touchedCount;
mTagText.SetProperty( TextLabel::Property::TEXT, valueText.str() );
return true; // Consumed meaning any gestures will be cancelled
}
// C++ EXAMPLE END
private:
ImageView mImageView; ///< An image view to show an image
TextLabel mTagText; ///< A text label used to show the last button pressed.
Property::Index mTagPropertyIndex; ///< A cached property index of our custom tag property.
};
// Entry point for applications.
int main( int argc, char **argv )
{
Application application = Application::New( &argc, &argv );
PropertyController test( application );
application.MainLoop();
return 0;
}
|
<reponame>chantelle-lingerie/sales
import { documents as __, minusPrice, addPrices, Total, Shipping, ItemTotal, ItemQty, Items, Price } from '../index'
import { deepFreeze } from './deepFreeze'
describe('Documents', () => {
const _ = {
total: <T extends Total>(documents: T[]) => __.total(deepFreeze(documents)),
shipping: <T extends Shipping>(documents: T[]) => __.shipping(deepFreeze(documents)),
items: {
total: <U extends ItemTotal, T extends Items<U>>(documents: T[]) => __.items.total(deepFreeze(documents)),
qty: <U extends ItemQty, T extends Items<U>>(documents: T[]) => __.items.qty(deepFreeze(documents)),
minus: <
I extends ItemQty & Total, S extends Items<I>,
P extends ItemQty & Total & Price, T extends Items<P>
>(from: T[], subtrahend: S[]) => __.items.minus(deepFreeze(from), deepFreeze(subtrahend)) } }
it('Reduce totals', () => {
expect(_.total([{ total: .1 }, { total: .2 }]))
.toBe(.3)
})
it('Reduce shipping', () => {
expect(_.shipping([{ shipping: .1 }, { shipping: .2 }, { shipping: .4 }]))
.toBe(.7)
})
describe('Items', () => {
it('Reduce quantity', () => {
expect(new Set(_.items.qty([
{ items: [{ id: 'a', qty: 1 }, { id: 'b', qty: 2 }] },
{ items: [{ id: 'c', qty: 3 }, { id: 'b', qty: 1 }] },
]))).toEqual(new Set([
{ id: 'a', qty: 1 }, { id: 'b', qty: 3 }, { id: 'c', qty: 3 }]))
})
it('Reduce totals', () => {
expect(new Set(_.items.total([
{ items: [{ id: 'a', total: .1 }, { id: 'b', total: .2 }] },
{ items: [{ id: 'c', total: .3 }, { id: 'b', total: .1 }] },
]))).toEqual(new Set([
{ id: 'a', total: .1 }, { id: 'b', total: .3 }, { id: 'c', total: .3 }]))
})
describe('Minus', () => {
const arrayChunks = (size: number) =>
<T>(xs: T[]) => {
const keys = Array.from(Array(Math.ceil(xs.length / size)).keys())
const result: T[][] = []
for (const k of keys) {
result.push(xs.slice(k * size, (k + 1) * size))
}
return result
}
const items = [
{ qty: 3, total: 7, price: 3 },
{ qty: 1, total: 5, price: 5 },
{ qty: 1, total: .6, price: 2 },
{ qty: 2, total: 8.07, price: 4.33 }]
const from = (ids: string[] = ['a', 'b', 'c', 'd', 'e']) => {
const result: { id: string, qty: number, total: number, price: number }[] = []
for (const item of items) {
result.push(...ids.map(id => ({ ...item, id })))
}
return arrayChunks(3)(result).map(chunk => ({ items: chunk }))
}
it('Exact match', () => {
expect(new Set(_.items.minus(from(), [
{ items: [{ id: 'a', qty: 3, total: 7 },
{ id: 'b', qty: 1, total: .6 }] },
{ items: [{ id: 'c', qty: 1, total: 4.04 }]},
{ items: [{ id: 'd', qty: 2, total: 4.67 },
{ id: 'e', qty: 2, total: 6.36 }] }
]))).toEqual(new Set([
{ id: 'a', qty: 1, total: 5, price: 5 },
{ id: 'a', qty: 1, total: .6, price: 2 },
{ id: 'a', qty: 2, total: 8.07, price: 4.33 },
{ id: 'b', qty: 3, total: 7, price: 3 },
{ id: 'b', qty: 1, total: 5, price: 5 },
{ id: 'b', qty: 2, total: 8.07, price: 4.33 },
{ id: 'c', qty: 3, total: 7, price: 3 },
{ id: 'c', qty: 1, total: 5, price: 5 },
{ id: 'c', qty: 1, total: .6, price: 2 },
{ id: 'c', qty: 1, total: 4.03, price: 4.33 },
{ id: 'd', qty: 1, total: 2.33, price: 3 },
{ id: 'd', qty: 1, total: 5, price: 5 },
{ id: 'd', qty: 1, total: .6, price: 2 },
{ id: 'd', qty: 2, total: 8.07, price: 4.33 },
{ id: 'e', qty: 2, total: 4.67, price: 3 },
{ id: 'e', qty: 1, total: 5, price: 5 },
{ id: 'e', qty: 1, total: .6, price: 2 },
{ id: 'e', qty: 1, total: 4.04, price: 4.33 }]))
})
it('Less expensive than exact match', () => {
expect(new Set(_.items.minus(from(), [
{ items: [{ id: 'a', qty: 3, total: 13.05 },
{ id: 'b', qty: 1, total: .49 }] },
{ items: [{ id: 'c', qty: 1, total: 3.99 }]},
{ items: [{ id: 'd', qty: 3, total: 5.23 },
{ id: 'e', qty: 2, total: 6.11 }] }
]))).toEqual(new Set([
{ id: 'a', qty: 3, total: addPrices(7, .01), price: 3 },
{ id: 'a', qty: 1, total: addPrices(.6, .01), price: 2 },
{ id: 'b', qty: 3, total: addPrices(7, .08), price: 3 },
{ id: 'b', qty: 1, total: 5, price: 5 },
{ id: 'b', qty: 2, total: addPrices(8.07, .03), price: 4.33 },
{ id: 'c', qty: 3, total: addPrices(7, .02), price: 3 },
{ id: 'c', qty: 1, total: 5, price: 5 },
{ id: 'c', qty: 1, total: addPrices(.6, .02), price: 2 },
{ id: 'c', qty: 1, total: 4.04, price: 4.33 },
{ id: 'd', qty: 1, total: addPrices(2.34, .02), price: 3 },
{ id: 'd', qty: 1, total: 5, price: 5 },
{ id: 'd', qty: 2, total: addPrices(8.07, .01), price: 4.33 },
{ id: 'e', qty: 2, total: addPrices(4.67, .11), price: 3 },
{ id: 'e', qty: 1, total: 5, price: 5 },
{ id: 'e', qty: 1, total: addPrices(.6, .12), price: 2 },
{ id: 'e', qty: 1, total: addPrices(4.04, .02), price: 4.33 }
]))
})
it('More expensive than exact match', () => {
expect(new Set(_.items.minus(from(), [
{ items: [{ id: 'a', qty: 3, total: 7.02 },
{ id: 'b', qty: 1, total: .9 }] },
{ items: [{ id: 'c', qty: 1, total: 4.14 }]},
{ items: [{ id: 'd', qty: 2, total: 4.88 },
{ id: 'e', qty: 2, total: 6.5 }] }
]))).toEqual(new Set([
{ id: 'a', qty: 1, total: minusPrice(5, .01), price: 5 },
{ id: 'a', qty: 1, total: .6, price: 2 },
{ id: 'a', qty: 2, total: minusPrice(8.07, .01), price: 4.33 },
{ id: 'b', qty: 3, total: minusPrice(7, .11), price: 3 },
{ id: 'b', qty: 1, total: minusPrice(5, .07), price: 5 },
{ id: 'b', qty: 2, total: minusPrice(8.07, .12), price: 4.33 },
{ id: 'c', qty: 3, total: minusPrice(7, .04), price: 3 },
{ id: 'c', qty: 1, total: minusPrice(5, .03), price: 5 },
{ id: 'c', qty: 1, total: .6, price: 2 },
{ id: 'c', qty: 1, total: minusPrice(4.03, .03), price: 4.33 },
{ id: 'd', qty: 1, total: minusPrice(2.33, .03), price: 3 },
{ id: 'd', qty: 1, total: minusPrice(5, .06), price: 5 },
{ id: 'd', qty: 1, total: minusPrice(.6, .01), price: 2 },
{ id: 'd', qty: 2, total: minusPrice(8.07, .11), price: 4.33 },
{ id: 'e', qty: 2, total: minusPrice(4.66, .04), price: 3 },
{ id: 'e', qty: 1, total: minusPrice(5, .04), price: 5 },
{ id: 'e', qty: 1, total: minusPrice(.6, .01), price: 2 },
{ id: 'e', qty: 1, total: minusPrice(4.03, .03), price: 4.33 }]))
})
it('Handles extreme cases', () => {
expect(new Set(_.items.minus([
...from(['a', 'b', 'c', 'd']), { items: [
{ id: 'e', price: 10, qty: 2, total: 18 },
{ id: 'e', price: 10, qty: 1, total: 10 }] }
], [
{ items: [{ id: 'a', qty: 2, total: 0 },
{ id: 'b', qty: 2, total: 11 }] },
{ items: [{ id: 'c', qty: 0, total: 7 }]},
{ items: [{ id: 'd', qty: 8, total: .01 },
{ id: 'e', qty: 2, total: 14.5 }] }
]))).toEqual(new Set([
{ id: 'a', qty: 2, total: addPrices(6, .35), price: 3 },
{ id: 'a', qty: 1, total: addPrices(5, .42), price: 5 },
{ id: 'a', qty: 2, total: addPrices(8.66, .24), price: 4.33 },
{ id: 'b', qty: 3, total: minusPrice(7, 1.18), price: 3 },
{ id: 'b', qty: 1, total: minusPrice(.6, .1), price: 2 },
{ id: 'b', qty: 1, total: minusPrice(4.03, .68), price: 4.33 },
{ id: 'c', qty: 3, total: minusPrice(7, 2.37), price: 3 },
{ id: 'c', qty: 1, total: minusPrice(5, 1.7), price: 5 },
{ id: 'c', qty: 1, total: minusPrice(.6, .2), price: 2 },
{ id: 'c', qty: 2, total: minusPrice(8.07, 2.73), price: 4.33 },
{ id: 'e', qty: 1, price: 10, total: 13.5 }]))
})
})
})
})
|
/**
* @brief Returns the next data available from an iterator
*
* Returns the next data available from an iterator
*
* @param[in] iter a pointer to the iterator
*
* @return a pointer to the next label
*
*/
zdb_rr_label*
zdb_zone_label_iterator_ex_next(zdb_zone_label_iterator_ex* iter)
{
zdb_rr_label *ret;
switch(iter->mode)
{
case ZDB_ZONE_LABEL_ITERATOR_ZONE_RECORDS:
{
ret = zdb_zone_label_iterator_next(&iter->iter.label_iter);
return ret;
}
case ZDB_ZONE_LABEL_ITERATOR_NSEC3_CHAIN:
{
ret = &iter->nsec3_label;
return ret;
}
}
return NULL;
} |
import * as pagerduty from '@pulumi/pagerduty'
import * as pulumi from '@pulumi/pulumi'
const config = new pulumi.Config('wanews:pagerduty')
const scheduleName = config.get('schedule') ?? 'Weekly DevOps'
const teamName = config.get('team') ?? 'Production Engineering'
const escalationPolicyName = config.get('escalation-policy') ?? 'Default'
export function getPagerDutyConfig() {
return {
defaultSchedule: pulumi.output(
pagerduty
.getSchedule({ name: scheduleName }, { async: true })
.catch((err) =>
pulumi.log.error(
`Unable to find default pagerduty schedule: ${err}`,
),
),
),
defaultTeam: pulumi.output(
pagerduty
.getTeam({ name: teamName }, { async: true })
.catch((err) =>
pulumi.log.error(
`Unable to find default pagerduty team: ${err}`,
),
),
),
defaultEscalationPolicy: pulumi.output(
pagerduty
.getEscalationPolicy(
{ name: escalationPolicyName },
{ async: true },
)
.catch((err) =>
pulumi.log.error(
`Unable to find default pagerduty escalation policy: ${err}`,
),
),
),
}
}
|
President Obama is a big fan of the way the Miami Heat run a break, but when the postseason dictates more half-court sets, Obama believes the Chicago Bulls will have a chance.
"(Luol) Deng seems more confident," Obama said in an exclusive interview with Grantland.com's Bill Simmons. "(Carlos) Boozer is in better shape. Derrick Rose has matured. I'm a little worried about making sure that they give him enough rest with the back spasms that he had. But the Bulls, I think, are right in there. I think they've got a great chance.
"The Heat are playing better than anybody right now. And when those folks get going on a fast break, it's over. But during the playoffs things slow down a little bit, and you got to run a half-court offense, and in that situation, I think the Bulls got a shot."
Obama, who is a former Illinois state senator from Chicago, has never been shy about his love for his hometown team. When hosting the NBA champion Dallas Mavericks at the White House in January, Obama said: "It's too bad that next year, it'll be the Chicago Bulls here." Rose said he hopes to hold Obama to his word.
Simmons asked Obama how often he's envisioned welcoming the Bulls to the White House as the NBA champions.
"Every year," he said. "And it hasn't happened yet, but it will happen."
"Somewhere along the line my Bulls are going to come through here," Obama added. "Absolutely."
Obama made sure to point out to Simmons, who is from Boston, how impressed he is with Bulls coach Tom Thibodeau, who is a former Celtics assistant.
"Now, Doc Rivers is a great coach as well, but for us to have been able to get that guy, he has just done a great job with what we've got," Obama said. "He's an outstanding coach."
Obama has hosted many championship teams to the White House, but he said the best one was the 1985 Chicago Bears, who couldn't do it after they won Super Bowl XX because it coincided with the space shuttle Challenger tragedy.
"And to see these guys -- (Mike) Ditka and all -- the whole team come back," Obama said. "Buddy Ryan came back, and he's really ailing. But to see how much they appreciated it, how much they had wanted that acknowledgment -- it was a lot of fun. And Ditka couldn't have been more gracious. And everybody just had a great time, and we had a whole bunch of Bears fans here. That was probably as good as it gets." |
// GetLoggingResources returns the logging config and log-based metric configurations
func (c *Client) GetLoggingResources() error {
defer utils.Elapsed("GetLoggingResources")()
worker := func(projectIDs <-chan string, results chan<- loggingClientResult) {
id := <-projectIDs
parent := fmt.Sprintf("projects/%s", id)
ctx := context.Background()
res := loggingClientResult{ProjectID: id, LogSinks: []*gcp.LoggingSinkResource{}}
req1 := loggingpb.ListSinksRequest{
Parent: parent,
}
it1 := c.logConfigClient.ListSinks(ctx, &req1)
for {
s, done := it1.Next()
if done == iterator.Done {
break
}
res.LogSinks = append(res.LogSinks, gcp.NewLoggingSinkResource(s))
}
req2 := loggingpb.ListLogMetricsRequest{
Parent: parent,
}
it2 := c.logMetricClient.ListLogMetrics(ctx, &req2)
for {
m, done := it2.Next()
if done == iterator.Done {
break
}
res.LogMetrics = append(res.LogMetrics, gcp.NewLoggingMetricResource(m))
}
results <- res
}
projectIDs := make(chan string, len(c.resourceprojects))
results := make(chan loggingClientResult, len(c.resourceprojects))
numWorkers := len(c.resourceprojects)
for w := 0; w < numWorkers; w++ {
go worker(projectIDs, results)
}
for _, p := range c.resourceprojects {
projectIDs <- p.ProjectId
}
for i := 0; i < numWorkers; i++ {
res := <-results
c.logSinks[res.ProjectID] = res.LogSinks
c.logMetrics[res.ProjectID] = res.LogMetrics
}
return nil
} |
package com.proxy.server.handler;
import com.proxy.common.entity.server.ClientNode;
import com.proxy.common.entity.server.ProxyChannel;
import com.proxy.common.entity.server.ProxyRealServer;
import com.proxy.common.protocol.CommonConstant;
import com.proxy.common.protocol.Message;
import com.proxy.server.service.ServerBeanManager;
import io.netty.buffer.ByteBuf;
import io.netty.channel.Channel;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.util.ReferenceCountUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.InetSocketAddress;
import java.util.Objects;
/**
* 处理用户请求的handler
*
* @author ztgreat
*/
public class TCPChannelHandler extends ChannelInboundHandlerAdapter {
private static Logger logger = LoggerFactory.getLogger(TCPChannelHandler.class);
public TCPChannelHandler() {
super();
}
/**
* 为用户连接产生ID
*/
private static Long getSessionID() {
return ServerBeanManager.getSessionIDGenerate().generateId();
}
@Override
public void channelActive(ChannelHandlerContext ctx) {
Channel userChannel = ctx.channel();
InetSocketAddress sa = (InetSocketAddress) userChannel.localAddress();
ProxyChannel proxyChannel = ServerBeanManager.getProxyChannelService().getServerProxy(sa.getPort());
//如果不存在key,那么不存在该代理客户端,后续理应不再判断,暂时为了避免考虑不全,先判断多次
if (proxyChannel == null) {
logger.error("端口{} 没有代理客户端", sa.getPort());
ctx.channel().close();
return;
}
ClientNode node = ServerBeanManager.getClientService().get(proxyChannel.getClientKey());
if (node == null || node.getChannel() == null || node.getStatus() != CommonConstant.ClientStatus.ONLINE) {
logger.error("端口{} 没有代理客户端", sa.getPort());
ctx.channel().close();
return;
}
long sessionID = getSessionID();
int sPort = sa.getPort();
sa = (InetSocketAddress) userChannel.remoteAddress();
String ip = sa.getAddress().getHostAddress();
int uPort = sa.getPort();
//将sessionID,用户ip,port,服务器port,封装到消息中
Message message = new Message();
message.setIp(ip);
message.setClientChannel(node.getChannel());
message.setuPort(uPort);
message.setsPort(sPort);
message.setSessionID(sessionID);
message.setType(CommonConstant.MessageType.TYPE_CONNECT_REALSERVER);
ProxyRealServer realServer = node.getServerPort2RealServer().get(sPort);
if (Objects.isNull(realServer)) {
logger.error("端口{} 没有开启映射", sPort);
ctx.channel().close();
return;
}
String address = realServer.getAddress();
message.setRemoteAddress(address);
message.setData(address.getBytes());
message.setProxyType(realServer.getProxyType().byteValue());
message.setCommand((ServerBeanManager.getConfigService().getConfigure("server") + ":" + realServer.getServerPort()).getBytes());
//将通道保存 调用UserService(会保存代理类型 type)
ServerBeanManager.getUserSessionService().add(sessionID, ctx.channel(), realServer);
//调用ServerService 将消息 放入队列
ServerBeanManager.getTransferService().toClient(message);
logger.debug("通知客户端({})与真实服务器{}建立连接 ", node.getClientKey(), address);
ctx.channel().config().setAutoRead(false);
logger.debug("用户请求访问通道设置为不可读");
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
//代理类型
Integer type = ServerBeanManager.getUserSessionService().getType(ctx.channel());
if (type != null) {
if (type == CommonConstant.ProxyType.TCP) {
//tcp 类型
logger.debug("tcp代理");
tcpHandler(ctx, (ByteBuf) msg, CommonConstant.ProxyType.TCP);
} else {
ReferenceCountUtil.release(msg);
logger.debug("非tcp代理:丢弃消息");
}
return;
}
ReferenceCountUtil.release(msg);
logger.debug("消息格式错误:丢弃消息");
}
@Override
public void channelInactive(ChannelHandlerContext ctx) {
logger.debug("用户连接失效");
Long sessionID = ServerBeanManager.getUserSessionService().getSessionID(ctx.channel());
if (sessionID == null) {
return;
}
//需要通知代理客户端和真实服务器断开连接
Channel userChannel = ctx.channel();
InetSocketAddress sa = (InetSocketAddress) userChannel.localAddress();
ProxyChannel proxyChannel = ServerBeanManager.getProxyChannelService().getServerProxy(sa.getPort());
//如果不存在key,那么不存在该代理客户端,后续理应不再判断,暂时为了避免考虑不全,先判断多次
if (Objects.isNull(proxyChannel)) {
return;
}
ClientNode node = ServerBeanManager.getClientService().get(proxyChannel.getClientKey());
if (node == null || node.getChannel() == null || node.getStatus() != CommonConstant.ClientStatus.ONLINE) {
return;
}
int sPort = sa.getPort();
sa = (InetSocketAddress) userChannel.remoteAddress();
String ip = sa.getAddress().getHostAddress();
int uPort = sa.getPort();
//将sessionID,用户ip,port,服务器port,封装到消息中
Message message = new Message();
message.setIp(ip);
message.setClientChannel(node.getChannel());
message.setuPort(uPort);
message.setsPort(sPort);
message.setSessionID(sessionID);
message.setType(CommonConstant.MessageType.TYPE_DISCONNECT);
ProxyRealServer realServer = node.getServerPort2RealServer().get(sPort);
if (Objects.isNull(realServer)) {
return;
}
String address = realServer.getAddress();
message.setRemoteAddress(address);
message.setData(address.getBytes());
ServerBeanManager.getUserSessionService().remove(sessionID);
//调用ServerService 将消息 放入队列
ServerBeanManager.getTransferService().toClient(message);
logger.debug("通知客户端({})与真实服务器{}断开连接 ", node.getClientKey(), address);
closeChannle(ctx);
}
@Override
public void channelWritabilityChanged(ChannelHandlerContext ctx) throws Exception {
super.channelWritabilityChanged(ctx);
}
/**
* 处理tcp 请求
*/
private void tcpHandler(ChannelHandlerContext ctx, ByteBuf buf, Integer proxyType) throws Exception {
Channel userChannel = ctx.channel();
InetSocketAddress sa = (InetSocketAddress) userChannel.localAddress();
ProxyChannel proxyChannel = ServerBeanManager.getProxyChannelService().getServerProxy(sa.getPort());
if (proxyChannel == null) {
// 该端口还没有代理客户端
logger.error("端口{} 没有代理客户端", sa.getPort());
userChannel.close();
ReferenceCountUtil.release(buf);
return;
}
ClientNode node = ServerBeanManager.getClientService().get(proxyChannel.getClientKey());
if (node == null || node.getChannel() == null || node.getStatus() != CommonConstant.ClientStatus.ONLINE) {
logger.error("端口{} 没有代理客户端", sa.getPort());
userChannel.close();
ReferenceCountUtil.release(buf);
return;
}
//封装消息
Long sessionID = ServerBeanManager.getUserSessionService().getSessionID(userChannel);
//ProxyRealServer realServer = node.getServerPort2RealServer().get(sa.getPort());
byte[] data = new byte[buf.readableBytes()];
buf.readBytes(data);
buf.release();
Message message = new Message();
message.setClientChannel(node.getChannel());
message.setData(data);
message.setsPort(sa.getPort());
message.setSessionID(sessionID);
message.setType(CommonConstant.MessageType.TYPE_TRANSFER);
message.setProxyType(proxyType.byteValue());
logger.debug("来自{}端口的请求转发至客户端({})", sa.getPort(), node.getClientKey());
ServerBeanManager.getTransferService().toClient(message);
}
/**
* 关闭用户连接
*/
private void closeChannle(ChannelHandlerContext ctx) {
Channel channel = ctx.channel();
if (channel != null && channel.isActive()) {
channel.close();
}
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
logger.error("发生异常({})", cause.getMessage());
cause.printStackTrace();
closeChannle(ctx);
}
}
|
<filename>src/extension.ts
import * as vscode from 'vscode';
import { readdirSync } from 'fs';
import { join } from 'path';
export const activate = (context: vscode.ExtensionContext) => {
deactivate(context);
readdirSync(
join(__dirname, './commands')
)
.filter((commandFile) => !commandFile.includes('.map'))
.forEach((commandFile) => {
const command = require(
join(__dirname, `./commands/${commandFile}`)
);
command.runCommand(context);
});
};
export const deactivate = (context: vscode.ExtensionContext) => {
context.workspaceState.update('masterkey', undefined);
}; |
/*
* Copyright (C) 2017-2019 HERE Europe B.V.
* Licensed under Apache 2.0, see full license in LICENSE
* SPDX-License-Identifier: Apache-2.0
*/
import { Feature, FeatureCollection, FeatureGeometry } from "@here/harp-datasource-protocol";
import { GeoJsonDataProvider } from "@here/harp-geojson-datasource";
import { OmvDataSource } from "@here/harp-omv-datasource";
import { MapViewFeature } from "./Features";
const NAME = "user-features-datasource";
const DEFAULT_GEOJSON: FeatureCollection = {
type: "FeatureCollection",
features: []
};
/**
* [[DataSource]] implementation to use for the addition of custom features.
*/
export class FeaturesDataSource extends OmvDataSource {
private m_featureCollection: FeatureCollection = {
type: "FeatureCollection",
features: []
};
/**
* Builds a `FeaturesDataSource`.
*
* @param workerTilerUrl Worker tiler URL. Defaults to `./decoder.bundle.ts` in the
* [[ConcurrentTilerFacade]].
*/
constructor(workerTilerUrl?: string) {
super({
dataProvider: new GeoJsonDataProvider(NAME, DEFAULT_GEOJSON, workerTilerUrl)
});
}
/**
* Adds a custom feature in the datasource.
*
* @param features The features to add in the datasource.
*/
add(...features: MapViewFeature[]): this {
for (const feature of features) {
this.addFeature(feature);
}
this.update();
return this;
}
/**
* Removes a custom feature in the datasource.
*
* @param features The features to add in the datasource.
*/
remove(...features: MapViewFeature[]): this {
for (const feature of features) {
this.removeFeature(feature);
}
this.update();
return this;
}
/**
* Removes all the custom features in this `FeaturesDataSource`.
*/
clear() {
this.m_featureCollection = {
type: "FeatureCollection",
features: []
};
this.update();
}
private addFeature(feature: MapViewFeature) {
// Check if the feature is not already in there.
const hasFeature = this.m_featureCollection.features.some(
_feature => _feature.properties.__mapViewUuid === feature.uuid
);
if (hasFeature) {
return;
}
// Create a GeoJson feature from the feature coordinates and push it.
const geometry: FeatureGeometry = {
type: feature.type,
coordinates: feature.coordinates
} as any;
const geojsonFeature: Feature = {
type: "Feature",
geometry,
properties: {
...feature.properties,
__mapViewUuid: feature.uuid
}
};
this.m_featureCollection.features.push(geojsonFeature);
}
private removeFeature(feature: MapViewFeature) {
// Remove geojson feature from the root FeatureCollection.
const index = this.m_featureCollection.features.findIndex(
_feature => _feature.properties.__mapViewUuid === feature.uuid
);
if (index === -1) {
return;
}
this.m_featureCollection.features.splice(index, 1);
}
private update() {
(this.dataProvider() as GeoJsonDataProvider).updateInput(this.m_featureCollection);
this.mapView.markTilesDirty(this);
this.mapView.clearTileCache(this.name);
}
}
|
//Import the sounds for in game events such as collisions, flapping the bird's wings, and also dying
public void importsounds() throws LineUnavailableException, IOException, UnsupportedAudioFileException{
audioInputStream =
AudioSystem.getAudioInputStream(new File("Resources/Audio/Wav-format-high-quality/hit.wav").getAbsoluteFile());
hit_sound = AudioSystem.getClip();
hit_sound.open(audioInputStream);
audioInputStream =
AudioSystem.getAudioInputStream(new File("Resources/Audio/Wav-format-high-quality/die.wav").getAbsoluteFile());
die_sound = AudioSystem.getClip();
die_sound.open(audioInputStream);
audioInputStream =
AudioSystem.getAudioInputStream(new File("Resources/Audio/Wav-format-high-quality/point.wav").getAbsoluteFile());
point_sound = AudioSystem.getClip();
point_sound.open(audioInputStream);
audioInputStream =
AudioSystem.getAudioInputStream(new File("Resources/Audio/Wav-format-high-quality/wing.wav").getAbsoluteFile());
wing_sound = AudioSystem.getClip();
wing_sound.open(audioInputStream);
} |
<filename>src/options/NodeListOptions.d.ts
import { ListOptions } from './ListOptions';
import { NodeEmbedField } from './NodeEmbedField';
export type NodeSortField = 'created' | 'modified' | 'names' | '-created' | '-modified' | '-names';
export interface NodeListOptions extends ListOptions {
readonly embed?: ReadonlySet<NodeEmbedField>;
readonly sort?: ReadonlyArray<NodeSortField>;
}
|
<reponame>nattimmis/Archideus
package com.jpaulmorrison.graphics;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.geom.GeneralPath;
public class FileBlock extends Block {
FileBlock(Diagram ctlr) {
super(ctlr);
type = Block.Types.FILE_BLOCK;
width = 64;
height = 72;
//calcEdges();
}
@Override
void draw(Graphics2D g) {
if (!visible && this != driver.selBlock) {
showZones(g);
return;
}
GeneralPath gp = new GeneralPath();
g.setColor(Color.BLACK);
int x = cx - width / 2;
int y = cy - height/2 + 4;
gp.moveTo(x, y);
x += width;
gp.quadTo(x - width/2, y - 10, x, y);
y += height - 8;
gp.lineTo(x, y);
x -= width;
gp.quadTo(x + width/2, y + 10, x, y);
gp.closePath();
//g.draw(gp);
if (this == driver.selBlock)
g.setColor(ly); // light yellow
else
g.setColor(lb); // light turquoise
g.fill(gp);
g.setColor(Color.BLACK);
g.draw(gp);
y = cy - height + height / 2 - 8;
x = cx - width / 2;
g.drawArc(x, y, width, 20, 190, 160);
if (description != null) {
centreDesc(g);
}
//showZones(g);
calcDiagMaxAndMin(cx - width / 2, cx + width / 2,
cy - height / 2, cy + height / 2);
}
} |
<filename>src/infrastructure/persistence/mod.rs
use std::env;
use diesel::mysql::MysqlConnection;
use diesel::{Connection};
pub mod task;
pub fn connect() -> MysqlConnection {
let database_url = env::var("DATABASE_URL").
expect("DATABASE_URL must be set");
MysqlConnection::establish(&database_url).
expect(&format!("Error connecting to {}", database_url))
}
|
#ifndef SRC_PIPELINE_STAGE_H_
#define SRC_PIPELINE_STAGE_H_
#include <queue>
#include "utils.h"
#include "instruction.h"
class PipelineStage {
std::string name;
std::vector<Instruction*> queue;
uint32_t width;
public:
PipelineStage(std::string name, uint32_t width);
virtual ~PipelineStage();
bool push(Instruction* inst);
bool isEmpty();
Instruction* front();
void pop();
std::vector<Instruction*>& getAllInstructions() {
return queue;
}
std::string toString();
};
#endif /* SRC_PIPELINE_STAGE_H_ */
|
/**
* Created by Willa aka Baba Imu on 2/1/18.
*/
@Handler(supports = { AdministeredVaccine.class }, order = 10)
public class AdministeredVaccineValidator extends BaseCustomizableValidator implements Validator {
@Override
public boolean supports(Class<?> aClass) {
return AdministeredVaccine.class.isAssignableFrom(aClass);
}
@Override
public void validate(Object target, Errors errors) {
AdministeredVaccine administeredVaccine = (AdministeredVaccine) target;
ValidationUtils.rejectIfEmpty(errors, "vaccineConfiguration",
"AdministeredVaccine.error.vaccineConfiguration.required");
ValidationUtils.rejectIfEmpty(errors, "obs", "AdministeredVaccine.error.obs.required");
}
} |
<filename>util/unmarshal.go<gh_stars>0
package util
import (
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"path/filepath"
"gopkg.in/yaml.v2"
)
func MarshalIndentToFile(p string, v interface{}, prefix, indent string) error {
wrapErr := func(err error) error {
return fmt.Errorf("marshal error: %w", err)
}
if p == "" {
return wrapErr(errors.New("path empty"))
}
if v == nil {
return wrapErr(errors.New("no type"))
}
ext := filepath.Ext(p)
var bs []byte
switch ext {
case ".json":
if x, err := json.MarshalIndent(v, prefix, indent); err != nil {
return wrapErr(err)
} else {
bs = x
}
case ".yaml":
if x, err := yaml.Marshal(v); err != nil {
return wrapErr(err)
} else {
bs = x
}
case ".yml":
if x, err := yaml.Marshal(v); err != nil {
return wrapErr(err)
} else {
bs = x
}
default:
return wrapErr(fmt.Errorf("unmarshal error: unsupported extention %v", ext))
}
if err := DirEnsure(p); err != nil {
return wrapErr(err)
}
if err := ioutil.WriteFile(p, bs, 0755); err != nil {
return wrapErr(err)
}
return nil
}
func UnmarshalFromFile(p string, v interface{}) error {
wrapErr := func(err error) error {
return fmt.Errorf("unmarshal error: %w", err)
}
if p == "" {
return wrapErr(errors.New("path empty"))
}
if v == nil {
return wrapErr(errors.New("no type"))
}
bs, err := ioutil.ReadFile(p)
if err != nil {
return wrapErr(err)
}
ext := filepath.Ext(p)
switch ext {
case ".json":
if err := json.Unmarshal(bs, v); err != nil {
return wrapErr(err)
}
case ".yaml":
if err := yaml.Unmarshal(bs, v); err != nil {
return wrapErr(err)
}
case ".yml":
if err := yaml.Unmarshal(bs, v); err != nil {
return wrapErr(err)
}
default:
return wrapErr(fmt.Errorf("unmarshal error: unsupported extention %v", ext))
}
return nil
}
|
import { Meta, Story } from '@storybook/react'
import { format } from 'date-fns'
import React, { ComponentProps } from 'react'
import LineChart from '..'
import {
lineChartData,
lineChartHoursData,
lineChartMultipleData,
} from './mockData'
export default {
component: LineChart,
title: 'Components/Data Display/Chart/LineChart',
} as Meta
const Template: Story<ComponentProps<typeof LineChart>> = args => (
<LineChart data={lineChartData} xScale={{ type: 'linear' }} {...args} />
)
export const Default = Template.bind({})
export const Time = Template.bind({})
Time.decorators = [
() => (
<LineChart
data={lineChartHoursData}
axisFormatters={{
bottom: value => format(new Date(value), 'dd-MM-y'),
}}
/>
),
]
export const FormattedAxisAndPoints = Template.bind({})
FormattedAxisAndPoints.decorators = [
() => (
<LineChart
data={lineChartHoursData}
axisFormatters={{
bottom: value => format(new Date(value), 'dd-MM'),
left: value => `${value.toString()} liters`,
}}
pointFormatters={{
x: value => format(new Date(value), 'dd-MM-y hh:mm'),
y: value => `${value.toString()} liters`,
}}
/>
),
]
export const MultipleSeriesWithCustomLegend = Template.bind({})
MultipleSeriesWithCustomLegend.decorators = [
() => (
<LineChart
data={lineChartMultipleData}
axisFormatters={{
bottom: value => format(new Date(value), 'dd-MM-y'),
}}
withLegend
/>
),
]
|
/**
* Class for holding a space indentation as used at the beginning
* of the line when writing in pretty print mode to disk file.
*
* @author <a href="mailto:[email protected]">Petroware AS</a>
*/
private final static class Indentation
{
/** Number of characters for the indentation. [0,>. */
private final int unit_;
/** The actual indentation string. */
private final String indent_;
/**
* Create an indentation instance of the specified unit,
* and an initial indentation.
*
* @param unit Number of characters per indentation. [0,>.
* @param indentation Current indentation. Non-null.
*/
private Indentation(int unit, String indentation)
{
assert unit >= 0 : "Invalid unit: " + unit;
assert indentation != null : "indentation cannot be null";
unit_ = unit;
indent_ = indentation;
}
/**
* Create a new indentation instance indented one level to the right.
*
* @return The requested indentation instance. Never null.
*/
private Indentation push()
{
int indentation = indent_.length() + unit_;
return new Indentation(unit_, Util.getSpaces(indentation));
}
/**
* Create a new indentation instance indented one level to the left.
*
* @return The requested indentation instance. Never null.
*/
private Indentation pop()
{
int indentation = Math.max(0, indent_.length() - unit_);
return new Indentation(unit_, Util.getSpaces(indentation));
}
/** {@inheritDoc} */
@Override
public String toString()
{
return indent_;
}
} |
class Settings:
"""Settings class that can be accessed using either dict notation (settings.get('abc')) or
dot notation (settings.snowflake.password). Reads from toml file and requires a table/section
called 'default', along with a table/section for each environment, e.g. [local], [dev], [prod].
Supports nested tables with dot notation, e.g. [local.snowflake].
Inspired by ConfigParser and DynaConf.
"""
TOML_TO_BUILTIN_MAP = {
items.String: str,
items.Bool: bool,
items.Integer: int,
items.Float: float,
}
INTERNAL_ATTRS = ["_store", "_ssm", "env"]
def __init__(self, config_filepath: str = None, env: str = os.getenv("PROJECT_ENV", "local")):
self._store = Box()
self.env = env
self._ssm = boto3.client("ssm") if _boto_available else None
# Load from .env file if exists. Will set env variables for use in .ini files.
load_dotenv(find_dotenv(usecwd=True), verbose=True)
if config_filepath:
self.load(config_filepath)
def load(self, config_filepath: str):
if not os.path.exists(config_filepath):
raise OSError("Could not load the default or provided settings file.")
self.clear()
with open(config_filepath, "r") as f:
settings_data = parse(f.read())
if "default" not in settings_data:
raise Exception("Settings file missing required section 'default'")
for table, items in settings_data.items():
if table.startswith(self.env) or table == "default":
for k, v in items.items():
self._set_value_from_config(k, v)
def clear(self):
self._store = Box()
def _set_value_from_config(self, name: str, value: Any, parent: str = None):
# If an env var exists, it takes precedence over everything else
if name.upper() in os.environ and not parent:
self.set_attr(name, os.getenv(name.upper()), parent)
# Or if there's an env var for nested child attr, such as PARENT_CHILD, it takes precedence over orig value
elif parent and f"{parent.upper()}_{name.upper()}" in os.environ:
self._set_from_parent_env_var(name, value, parent)
# If the value is a dict, recursively call this fcn to un-nest it's fields and set them.
elif isinstance(value, dict):
for k, v in value.items():
self._set_value_from_config(k, v, name)
# If the value defined in toml is "${MY_VAR}", it must be set from env var. Check it exists and set it.
elif isinstance(value, str) and value.startswith("${") and value.endswith("}"):
self._set_from_env_var_interpolation(name, value, parent)
# If value defined with "ssm:" prefix, try fetching it from SSM parameter store.
elif isinstance(value, str) and value.startswith("ssm:"):
self._set_from_ssm(name, value, parent)
# Otherwise, it should be standard type that can be set directly.
else:
self.set_attr(name, value, parent)
def _set_from_env_var_interpolation(self, name: str, value: Any, parent: str = None):
"""Expecting environment variable with the value between ${}. If not found, variable is set to None."""
var_name = re.findall(r'\${(.*?)}', value)[0]
if var_name in os.environ:
self.set_attr(name, os.getenv(var_name), parent)
else:
self.set_attr(name, None, parent)
def _set_from_parent_env_var(self, name: str, value: Any, parent: str = None):
"""Allows overriding nested attributes by setting env var as PARENT_CHILD."""
attr = os.getenv(f"{parent.upper()}_{name.upper()}")
try:
if type(value) in self.TOML_TO_BUILTIN_MAP:
attr = self.TOML_TO_BUILTIN_MAP[type(value)](attr)
elif type(value) == bool:
attr = attr.lower() == "true"
else:
attr = type(value)(attr)
except ValueError:
logger.info(f"Could not cast setting {parent}.{name} with value {attr} to type {type(value)}")
pass
self.set_attr(name, attr, parent)
def _set_from_ssm(self, name: str, value: Any, parent: str = None):
"""Gets parameter from ssm, where value starts with `ssm:` """
if not _boto_available:
raise ImportError("boto3 not available and is required to read from SSM.")
try:
param = self._ssm.get_parameter(
Name=value.replace("ssm:", ""),
WithDecryption=True
)
if param.get("Parameter"):
self.set_attr(name, param["Parameter"]["Value"], parent)
except ClientError as e:
# Best effort to load parameter
logger.error(e)
def set_attr(self, name: str, value: Any, parent: str = None):
if type(value) in self.TOML_TO_BUILTIN_MAP:
value = self.TOML_TO_BUILTIN_MAP[type(value)](value)
if isinstance(value, dict):
value = Box(value)
if parent:
if parent not in self._store:
self._store[parent] = Box()
self._store[parent][name] = value
else:
self._store[name] = value
def __dir__(self):
"""Enable auto-complete for code editors"""
return (
self.INTERNAL_ATTRS
+ [k.lower() for k in self._store.keys()]
)
def __getattr__(self, name):
"""Allow getting keys from self._store using dot notation"""
if name in self.INTERNAL_ATTRS:
return super(Settings, self).__getattribute__(name)
value = getattr(self._store, name)
return value
def __setattr__(self, name, value):
"""Allow `settings.FOO = 'value'` while keeping internal attrs."""
if name in self.INTERNAL_ATTRS:
super(Settings, self).__setattr__(name, value)
else:
self.set_attr(name, value)
def __contains__(self, item):
"""Respond to `item in settings`"""
return item.upper() in self._store or item.lower() in self._store
def __getitem__(self, item):
"""Allow getting variables as dict keys `settings['KEY']`"""
value = self._store.get(item)
if value is None:
raise KeyError(f"{item} does not exist")
return value
def __setitem__(self, key, value):
"""Allow `settings['KEY'] = 'value'`"""
self.set_attr(key, value)
def __iter__(self):
"""Redirects to store object"""
yield from self._store
def items(self):
"""Redirects to store object"""
return self._store.items()
def keys(self):
"""Redirects to store object"""
return self._store.keys()
def values(self):
"""Redirects to store object"""
return self._store.values() |
<reponame>raydan4/grr
# Lint as: python3
# -*- encoding: utf-8 -*-
"""Tests for JSON instant output plugin."""
import os
import zipfile
from absl import app
import json
from grr_response_core.lib.rdfvalues import client as rdf_client
from grr_response_core.lib.rdfvalues import client_fs as rdf_client_fs
from grr_response_core.lib.rdfvalues import paths as rdf_paths
from grr_response_server.output_plugins import test_plugins
from grr_response_server.output_plugins import json_plugin
from grr.test_lib import test_lib
class JsonInstantOutputPluginTest(test_plugins.InstantOutputPluginTestBase):
"""Tests the JSON instant output plugin."""
plugin_cls = json_plugin.JsonInstantOutputPluginWithExportConversion
def ProcessValuesToZip(self, values_by_cls):
fd_path = self.ProcessValues(values_by_cls)
file_basename, _ = os.path.splitext(os.path.basename(fd_path))
return zipfile.ZipFile(fd_path), file_basename
def testJsonPluginWithValuesOfSametypes(self):
responses = []
for i in range(10):
responses.append(
rdf_client_fs.StatEntry(
pathspec=rdf_paths.PathSpec(
path="/foo/bar/%d" % i, pathtype="OS"),
st_mode=33184,
st_ino=1063090,
st_dev=64512,
st_nlink=1 + i,
st_uid=139592,
st_gid=5000,
st_size=0,
st_atime=1336469177,
st_mtime=1336129892,
st_ctime=1336129892))
zip_fd, prefix = self.ProcessValuesToZip(
{rdf_client_fs.StatEntry: responses})
self.assertEqual(
set(zip_fd.namelist()), {
"%s/MANIFEST" % prefix,
"%s/ExportedFile/from_StatEntry.json" % prefix
})
parsed_manifest = json.loads(zip_fd.read("%s/MANIFEST" % prefix))
self.assertEqual(parsed_manifest,
{"export_stats": {
"StatEntry": {
"ExportedFile": 10
}}})
parsed_output = json.loads(
zip_fd.read("%s/ExportedFile/from_StatEntry.json" % prefix))
self.assertLen(parsed_output, 10)
for i in range(10):
self.assertEqual(parsed_output[i]["metadata"]["client_urn"],
"aff4:/%s" % self.client_id)
self.assertEqual(parsed_output[i]["metadata"]["source_urn"],
str(self.results_urn))
self.assertEqual(parsed_output[i]["urn"],
"aff4:/%s/fs/os/foo/bar/%d" % (self.client_id, i))
self.assertEqual(parsed_output[i]["st_mode"], "-rw-r-----")
self.assertEqual(parsed_output[i]["st_ino"], "1063090")
self.assertEqual(parsed_output[i]["st_dev"], "64512")
self.assertEqual(parsed_output[i]["st_nlink"], str(1 + i))
self.assertEqual(parsed_output[i]["st_uid"], "139592")
self.assertEqual(parsed_output[i]["st_gid"], "5000")
self.assertEqual(parsed_output[i]["st_size"], "0")
self.assertEqual(parsed_output[i]["st_atime"], "2012-05-08 09:26:17")
self.assertEqual(parsed_output[i]["st_mtime"], "2012-05-04 11:11:32")
self.assertEqual(parsed_output[i]["st_ctime"], "2012-05-04 11:11:32")
self.assertEqual(parsed_output[i]["st_blksize"], "0")
self.assertEqual(parsed_output[i]["st_rdev"], "0")
self.assertEqual(parsed_output[i]["symlink"], "")
def testJsonPluginWithValuesOfMultipleTypes(self):
zip_fd, prefix = self.ProcessValuesToZip({
rdf_client_fs.StatEntry: [
rdf_client_fs.StatEntry(
pathspec=rdf_paths.PathSpec(path="/foo/bar", pathtype="OS"))
],
rdf_client.Process: [rdf_client.Process(pid=42)]
})
self.assertEqual(
set(zip_fd.namelist()), {
"%s/MANIFEST" % prefix,
"%s/ExportedFile/from_StatEntry.json" % prefix,
"%s/ExportedProcess/from_Process.json" % prefix
})
parsed_manifest = json.loads(zip_fd.read("%s/MANIFEST" % prefix))
self.assertEqual(
parsed_manifest, {
"export_stats": {
"StatEntry": {
"ExportedFile": 1
},
"Process": {
"ExportedProcess": 1
}
}
})
parsed_output = json.loads(
zip_fd.read("%s/ExportedFile/from_StatEntry.json" % prefix))
self.assertLen(parsed_output, 1)
self.assertEqual(parsed_output[0]["metadata"]["client_urn"],
"aff4:/%s" % self.client_id)
self.assertEqual(parsed_output[0]["metadata"]["source_urn"],
str(self.results_urn))
self.assertEqual(parsed_output[0]["urn"],
"aff4:/%s/fs/os/foo/bar" % self.client_id)
parsed_output = json.loads(
zip_fd.read("%s/ExportedProcess/from_Process.json" % prefix))
self.assertLen(parsed_output, 1)
self.assertEqual(parsed_output[0]["pid"], "42")
def testJsonPluginWritesUnicodeValuesCorrectly(self):
zip_fd, prefix = self.ProcessValuesToZip({
rdf_client_fs.StatEntry: [
rdf_client_fs.StatEntry(
pathspec=rdf_paths.PathSpec(path="/中国新闻网新闻中", pathtype="OS"))
]
})
self.assertEqual(
set(zip_fd.namelist()), {
"%s/MANIFEST" % prefix,
"%s/ExportedFile/from_StatEntry.json" % prefix
})
parsed_output = json.loads(
zip_fd.read("%s/ExportedFile/from_StatEntry.json" % prefix))
self.assertLen(parsed_output, 1)
self.assertEqual(parsed_output[0]["urn"],
"aff4:/%s/fs/os/中国新闻网新闻中" % self.client_id)
def testJsonPluginWritesMoreThanOnePatchOfRowsCorrectly(self):
num_rows = self.__class__.plugin_cls.ROW_BATCH * 2 + 1
responses = []
for i in range(num_rows):
responses.append(
rdf_client_fs.StatEntry(
pathspec=rdf_paths.PathSpec(
path="/foo/bar/%d" %i, pathtype="OS")))
zip_fd, prefix = self.ProcessValuesToZip(
{rdf_client_fs.StatEntry: responses})
parsed_output = json.loads(
zip_fd.read("%s/ExportedFile/from_StatEntry.json" % prefix))
self.assertLen(parsed_output, num_rows)
for i in range(num_rows):
self.assertEqual(parsed_output[i]["urn"],
"aff4:/%s/fs/os/foo/bar/%d" % (self.client_id, i))
def main(argv):
test_lib.main(argv)
if __name__ == "__main__":
app.run(main)
|
<reponame>huangxinping/AccessoriesComponents
//
// AppKeFuLib.h
// AppKeFuLib
//
// Created by jack on 14-5-18.
// Copyright (c) 2014年 <EMAIL>. All rights reserved.
//
#import <Foundation/Foundation.h>
//登录成功通知
#define APPKEFU_LOGIN_SUCCEED_NOTIFICATION @"appkefu_login_succeed_notification"
//客服工作组在线状态
#define APPKEFU_WORKGROUP_ONLINESTATUS @"appkefu_workgroup_online_status"
@interface AppKeFuLib : NSObject
//1. 获取AppKeFuLib单例
+(AppKeFuLib *)sharedInstance;
//2.登录, appkey需要开发者到 http://appkefu.com 申请
-(void)loginWithAppkey:(NSString *)appkey;
//3. 注销,退出登录
-(void)logout;
//4. 显示会话窗口
- (void)pushChatViewController:(UINavigationController *)navController
withWorkgroupName:(NSString *)workgroupName //1. 需要填写 工作组名称,需要到管理后台申请,
// 注意:不是客服用户名,而是工作组名称,支持多客服
rightBarButtonItemCallback:(void (^)())rightBarButtonTouchUpInsideCallback //2. 会话页面右上角按钮回调函数;注意:VIP接口,需要另行开通
showInputBarSwitchMenu:(BOOL)shouldShowInputBarSwitchMenu //3. 在会话窗口显示自定义菜单, 类似于微信的自定义菜单;
// 如果需要显示自定义菜单,请首先到管理后台分配自定义菜单,请分配且只分配三个自定义菜单,多于三个的自定义菜单将不予显示。
// 显示: YES, 不显示: NO
withTitle:(NSString *)title //4. 自定义会话窗户标题
withProductInfo:(NSString *)productInfo //5. 成功连接客服之后,自动将此消息发送给客服,
// 如果不需要发送此信息, 可将其设置为 nil 或者 ""
withLeftBarButtonItemColor:(UIColor *)color //6. 导航左上角“结束会话”按钮颜色
hidesBottomBarWhenPushed:(BOOL)shouldHide; //7. 从具有Tabbar的viewController打开的时候,隐藏tabbar
//4.1
-(void)presentChatViewController:(UIViewController *)navController
withWorkgroupName:(NSString *)workgroupName
rightBarButtonItemCallback:(void (^)())rightBarButtonTouchUpInsideCallback
showInputBarSwitchMenu:(BOOL)shouldShowInputBarSwitchMenu
withTitle:(NSString *)title
withProductInfo:(NSString *)productInfo
withLeftBarButtonItemColor:(UIColor *)color
hidesBottomBarWhenPushed:(BOOL)shouldHide;
//5.查询工作组当前在线状态,如果工作组内客服至少有一个客服账号在线,则显示在线。否则,显示离线
-(void) queryWorkgroupOnlineStatus:(NSString *)workgroupname;
//5.1 清空与workgroupName的所有聊天信息
- (void) deleteMessagesWith:(NSString*)workgroupName;
#pragma mark 用户标签
//函数6:设置用户标签昵称
- (NSString *)getTagNickname;
//函数7:获取用户标签昵称
- (void) setTagNickname:(NSString *)nickname;
//函数8:设置用户标签性别
- (NSString *)getTagSex;
//函数9:获取用户标签性别
- (void) setTagSex:(NSString *)sex;
//函数10:设置用户标签语言
- (NSString *)getTagLanguage;
//函数11:获取用户标签语言
- (void) setTagLanguage:(NSString *)language;
//函数12:设置用户标签城市
- (NSString *)getTagCity;
//函数13:获取用户标签城市
- (void) setTagCity:(NSString *)city;
//函数14:设置用户标签省份
- (NSString *)getTagProvince;
//函数15:获取用户标签省份
- (void) setTagProvince:(NSString *)province;
//函数16:设置用户标签国家
- (NSString *)getTagCountry;
//函数17:设置用户标签国家
- (void) setTagCountry:(NSString *)country;
//函数18:设置用户标签其他
- (NSString *)getTagOther;
//函数19:获取用户标签其他
- (void) setTagOther:(NSString *)other;
//函数20:上传DeviceToken,用于离线消息推送
- (void) uploadDeviceToken:(NSData *)deviceToken;
@end
|
ORLANDO, Fla. - A man accused of stabbing a passenger as he got off of a Lynx bus in south Orlando on Wednesday made a memorable first court appearance at the Orange County Jail on Thursday.
[RELATED: Suspect held at gunpoint by good Samaritans]
Thomas Thorpe, 51, appeared before a judge and refused to have a lawyer represent him.
"Do you understand what an attorney is and what they do?" asked the judge.
"Yes, they screwed us," Thorpe responded.
Thorpe seemed in a hurry as the short hearing progressed, telling the judge he wanted to enter a plea without the assistance of the African-American attorney standing next to him.
"I said not guilty -- I pleaded not guilty and I don't want this negro standing next to me," Thorpe told the judge. "I don't want a negro standing next to me."
The judge expressed some concerns about Thorpe's mental health and he is now being watched as he sits in jail on attempted first-degree murder charges with no bond.
His next court appearance date has not been set and it's not known if he will have an attorney representing him when he makes that appearance at the Orange County Courthouse.
Copyright 2014 by ClickOrlando.com. All rights reserved. This material may not be published, broadcast, rewritten or redistributed. |
Sandwiches de miga are popular food items in Argentina, Chile and Uruguay, where they are consumed mainly at parties.[1] The sandwiches de miga are similar to the English cucumber sandwich, which is a typical tea-time food, and resembles the Italian tramezzino.
The Academia Argentina de Gastronomia suggests that the sandwiches may have been introduced into Argentina by immigrants from Northern Italy.[2] In contrast to that story, the Buenos Aires newspaper Clarín suggests that the sandwich was actually invented by local bakers at the Confitería Ideal who had made a sandwich with a recreated English-style bread to satisfy a group of home-sick British engineers who used to frequent their establishment during the early part of the twentieth century.[3][4]
The sandwiches are single, double or multiple layered and are made from a thin white bread without crust, i.e. the part of the bread called "miga" (i.e., crumb). They are filled with thinly sliced meat, especially ham, eggs, cheese, tomatoes, green peppers, tuna, lettuce, and sometimes other vegetables, such as asparagus.[5][6][7] Butter is another important ingredient. They can be toasted or untoasted.
Instead of making them from scratch, Argentines usually buy them at a local bakery.
Sandwiches de miga
See also [ edit ] |
<gh_stars>1-10
package Done;
public class Container {
private int x1;
private int y1;
private int x2;
private int y2;
public Container(int x, int y,int width, int height) {
this.x1 = x;
this.y1= y;
x2=x1+width-1;
y2=y1+height-1;
}
public int getX(){
return x1;
}
public int getY(){
return y1;
}
public int getWidth(){
return x2-x1+1;
}
public int getHeight(){
return y1-y1+1;
}
public boolean collides(Ball ball) {
if (ball.getX() - ball.getRadius() <= this.x1 ||
ball.getX() - ball.getRadius() >= this.x2) {
ball.reflectHorizontal();
ball.reflectVertical();
return true;
}
return false;
}
@Override
public String toString() {
return "Done.Container[" +
"(" + x1 +
"," + y1 +
"),(" + x2 +
"," + y2 +
")]";
}
}
|
<reponame>RadicalZephyr/relm<gh_stars>1000+
use crate::gui::person_list_box::{PersonListBox, PersonListBoxMsg};
use crate::model::Person;
use gtk::prelude::*;
use gtk::Orientation;
use relm::{Relm, StreamHandle, Widget};
use relm_derive::{widget, Msg};
#[derive(Msg)]
pub enum WinMsg {
/// Create a new person. This message is sent by the `Create` button.
CreatePerson,
/// Update the selected person. This message is sent by the `Update` button.
UpdatePerson,
/// Delete the selected person. This message is sent by the `Delete` button.
DeletePerson,
/// The selection in the `PersonListBox` has changed. This message is sent by the `person_list_box` component.
UpdateSelected(Option<Person>),
/// The filter has changed. This message is sent by the filter entry.
FilterChanged,
/// The window was closed.
Quit,
}
pub struct WinModel {
msg_stream: StreamHandle<WinMsg>,
selected_person: Option<Person>,
}
#[widget]
impl Widget for Win {
fn model(relm: &Relm<Self>, _: ()) -> WinModel {
WinModel {
msg_stream: relm.stream().clone(),
selected_person: None,
}
}
fn update(&mut self, event: WinMsg) {
match event {
WinMsg::CreatePerson => {
let person = self.get_person();
// `self.components` has all components of the widget referenced by name given in the `view!` macro.
// You can send messages to the component using the `emit` function.
// `self.compnents` and `self.widgets` are not the same. `self.widgets``refers to the `gtk::Widget` (or subclass).
self.components
.person_list_box
.emit(PersonListBoxMsg::AddPerson(person))
}
WinMsg::UpdatePerson => {
let person = self.get_person();
self.components
.person_list_box
.emit(PersonListBoxMsg::UpdateSelected(person))
}
WinMsg::DeletePerson => self
.components
.person_list_box
.emit(PersonListBoxMsg::DeleteSelected),
WinMsg::UpdateSelected(person_opt) => {
// Set the entry fields.
if let Some(person) = &person_opt {
self.widgets.entry_name.set_text(&person.get_name());
self.widgets.entry_surname.set_text(&person.get_surname());
} else {
self.widgets.entry_name.set_text("");
self.widgets.entry_surname.set_text("");
}
// Set the person in the model.
self.model.selected_person = person_opt;
}
WinMsg::FilterChanged => {
let filter = self.widgets.entry_filter.text();
self.components
.person_list_box
.emit(PersonListBoxMsg::Filter(filter.to_string()));
}
// Quit the application
WinMsg::Quit => gtk::main_quit(),
}
}
view! {
gtk::Window {
gtk::Box {
#[name="entry_filter"]
gtk::Entry {
placeholder_text: Some("Filter"),
changed => WinMsg::FilterChanged,
},
orientation: Orientation::Vertical,
spacing: 16,
gtk::Box {
spacing: 16,
#[name="person_list_box"]
// Create a new `PersonListBox` with the stream of this widget as the argument.
PersonListBox(self.model.msg_stream.clone()) {
hexpand: true,
},
gtk::Box {
spacing: 16,
orientation: Orientation::Vertical,
#[name="entry_name"]
gtk::Entry {
placeholder_text: Some("Name"),
},
#[name="entry_surname"]
gtk::Entry {
placeholder_text: Some("Surname"),
}
}
},
gtk::Box {
spacing: 16,
gtk::Button {
label: "Create",
clicked => WinMsg::CreatePerson
},
gtk::Button {
label: "Update",
// This button will only be sensitive if a person is selected.
sensitive: self.model.selected_person.is_some(),
clicked => WinMsg::UpdatePerson
},
gtk::Button {
label: "Delete",
// This button will only be sensitive if a person is selected.
sensitive: self.model.selected_person.is_some(),
clicked => WinMsg::DeletePerson
},
}
},
delete_event(_, _) => (WinMsg::Quit, Inhibit(false)),
}
}
}
impl Win {
/// Get the person from the entries `entry_name` and `entry_surname`.
fn get_person(&self) -> Person {
let name = self.widgets.entry_name.text();
let surname = self.widgets.entry_surname.text();
Person::new(&name, &surname)
}
}
|
The applicability of furfuryl-gelatin as a novel bioink for tissue engineering applications.
Three-dimensional bioprinting is an innovative technique in tissue engineering, to create layer-by-layer structures, required for mimicking body tissues. However, synthetic bioinks do not generally possess high printability and biocompatibility at the same time. So, there is an urgent need for naturally derived bioinks that can exhibit such optimized properties. We used furfuryl-gelatin as a novel, visible-light crosslinkable bioink for fabricating cell-laden structures with high viability. Hyaluronic acid was added as a viscosity enhancer and either Rose Bengal or Riboflavin was used as a visible-light crosslinker. Crosslinking was done by exposing the printed structure for 2.5 min to visible light and confirmed using Fourier transform infrared spectroscopy and rheometry. Scanning electron microscopy revealed a highly porous networked structure. Three different cell types were successfully bioprinted within these constructs. Mouse mesenchymal stem cells printed within monolayer and bilayer sheets showed viability, network formation and proliferation (∼5.33 times) within 72 h of culture. C2C12 and STO cells were used to print a double layered structure, which showed evidence of the viability of both cells and heterocellular clusters within the construct. This furfuryl-gelatin based bioink can be used for tissue engineering of complex tissues and help in understanding how cellular crosstalk happens in vivo during normal or diseased pathology. © 2018 Wiley Periodicals, Inc. J Biomed Mater Res Part B: Appl Biomater, 107B: 314-323, 2019. |
<filename>src/matrix/m4.rs<gh_stars>0
use crate::float::Float;
use crate::matrix::{FloatMatrix, FromVectors, IntoVectors, Matrix, M4};
use crate::numeric::Numeric;
use crate::vector::{Vector, V4};
use std::ops::{Add, Deref, DerefMut, Div, Mul, Sub};
impl<T> Deref for M4<T>
where
T: Numeric,
{
type Target = [[T; 4]; 4];
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T> DerefMut for M4<T>
where
T: Numeric,
{
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<T> IntoVectors<(V4<T>, V4<T>, V4<T>, V4<T>)> for M4<T>
where
T: Numeric,
{
fn into_cols(&self) -> (V4<T>, V4<T>, V4<T>, V4<T>) {
(
V4([self[0][0], self[1][0], self[2][0], self[3][0]]),
V4([self[0][1], self[1][1], self[2][1], self[3][1]]),
V4([self[0][2], self[1][2], self[2][2], self[3][2]]),
V4([self[0][3], self[1][3], self[2][3], self[3][3]]),
)
}
fn into_rows(&self) -> (V4<T>, V4<T>, V4<T>, V4<T>) {
(V4(self[0]), V4(self[1]), V4(self[2]), V4(self[3]))
}
}
impl<T> FromVectors<(V4<T>, V4<T>, V4<T>, V4<T>)> for M4<T>
where
T: Numeric,
{
fn from_cols(v: (V4<T>, V4<T>, V4<T>, V4<T>)) -> Self {
let (r1, r2, r3, r4) = v;
M4([
[r1[0], r2[0], r3[0], r4[0]],
[r1[1], r2[1], r3[1], r4[1]],
[r1[2], r2[2], r3[2], r4[2]],
[r1[3], r2[3], r3[3], r4[3]],
])
}
fn from_rows(v: (V4<T>, V4<T>, V4<T>, V4<T>)) -> Self {
let (V4(c1), V4(c2), V4(c3), V4(c4)) = v;
M4([c1, c2, c3, c4])
}
}
impl<T> Matrix for M4<T>
where
T: Numeric,
{
fn transpose(&mut self) {
unsafe {
(&mut self[0][1] as *mut T).swap(&mut self[1][0]);
(&mut self[0][2] as *mut T).swap(&mut self[2][0]);
(&mut self[0][3] as *mut T).swap(&mut self[3][0]);
(&mut self[1][2] as *mut T).swap(&mut self[2][1]);
(&mut self[1][3] as *mut T).swap(&mut self[3][1]);
(&mut self[2][3] as *mut T).swap(&mut self[3][2]);
}
}
}
impl<F> FloatMatrix<F> for M4<F>
where
F: Float,
{
fn determinant(&self) -> F {
self[0][3] * self[1][2] * self[2][1] * self[3][0]
- self[0][2] * self[1][3] * self[2][1] * self[3][0]
- self[0][3] * self[1][1] * self[2][2] * self[3][0]
+ self[0][1] * self[1][3] * self[2][2] * self[3][0]
+ self[0][2] * self[1][1] * self[2][3] * self[3][0]
- self[0][1] * self[1][2] * self[2][3] * self[3][0]
- self[0][3] * self[1][2] * self[2][0] * self[3][1]
+ self[0][2] * self[1][3] * self[2][0] * self[3][1]
+ self[0][3] * self[1][0] * self[2][2] * self[3][1]
- self[0][0] * self[1][3] * self[2][2] * self[3][1]
- self[0][2] * self[1][0] * self[2][3] * self[3][1]
+ self[0][0] * self[1][2] * self[2][3] * self[3][1]
+ self[0][3] * self[1][1] * self[2][0] * self[3][2]
- self[0][1] * self[1][3] * self[2][0] * self[3][2]
- self[0][3] * self[1][0] * self[2][1] * self[3][2]
+ self[0][0] * self[1][3] * self[2][1] * self[3][2]
+ self[0][1] * self[1][0] * self[2][3] * self[3][2]
- self[0][0] * self[1][1] * self[2][3] * self[3][2]
- self[0][2] * self[1][1] * self[2][0] * self[3][3]
+ self[0][1] * self[1][2] * self[2][0] * self[3][3]
+ self[0][2] * self[1][0] * self[2][1] * self[3][3]
- self[0][0] * self[1][2] * self[2][1] * self[3][3]
- self[0][1] * self[1][0] * self[2][2] * self[3][3]
+ self[0][0] * self[1][1] * self[2][2] * self[3][3]
}
fn cofactor(&self) -> Self {
M4([
[
self[1][2] * self[2][3] * self[3][1] - self[1][3] * self[2][2] * self[3][1]
+ self[1][3] * self[2][1] * self[3][2]
- self[1][1] * self[2][3] * self[3][2]
- self[1][2] * self[2][1] * self[3][3]
+ self[1][1] * self[2][2] * self[3][3],
self[0][3] * self[2][2] * self[3][1]
- self[0][2] * self[2][3] * self[3][1]
- self[0][3] * self[2][1] * self[3][2]
+ self[0][1] * self[2][3] * self[3][2]
+ self[0][2] * self[2][1] * self[3][3]
- self[0][1] * self[2][2] * self[3][3],
self[0][2] * self[1][3] * self[3][1] - self[0][3] * self[1][2] * self[3][1]
+ self[0][3] * self[1][1] * self[3][2]
- self[0][1] * self[1][3] * self[3][2]
- self[0][2] * self[1][1] * self[3][3]
+ self[0][1] * self[1][2] * self[3][3],
self[0][3] * self[1][2] * self[2][1]
- self[0][2] * self[1][3] * self[2][1]
- self[0][3] * self[1][1] * self[2][2]
+ self[0][1] * self[1][3] * self[2][2]
+ self[0][2] * self[1][1] * self[2][3]
- self[0][1] * self[1][2] * self[2][3],
],
[
self[1][3] * self[2][2] * self[3][0]
- self[1][2] * self[2][3] * self[3][0]
- self[1][3] * self[2][0] * self[3][2]
+ self[1][0] * self[2][3] * self[3][2]
+ self[1][2] * self[2][0] * self[3][3]
- self[1][0] * self[2][2] * self[3][3],
self[0][2] * self[2][3] * self[3][0] - self[0][3] * self[2][2] * self[3][0]
+ self[0][3] * self[2][0] * self[3][2]
- self[0][0] * self[2][3] * self[3][2]
- self[0][2] * self[2][0] * self[3][3]
+ self[0][0] * self[2][2] * self[3][3],
self[0][3] * self[1][2] * self[3][0]
- self[0][2] * self[1][3] * self[3][0]
- self[0][3] * self[1][0] * self[3][2]
+ self[0][0] * self[1][3] * self[3][2]
+ self[0][2] * self[1][0] * self[3][3]
- self[0][0] * self[1][2] * self[3][3],
self[0][2] * self[1][3] * self[2][0] - self[0][3] * self[1][2] * self[2][0]
+ self[0][3] * self[1][0] * self[2][2]
- self[0][0] * self[1][3] * self[2][2]
- self[0][2] * self[1][0] * self[2][3]
+ self[0][0] * self[1][2] * self[2][3],
],
[
self[1][1] * self[2][3] * self[3][0] - self[1][3] * self[2][1] * self[3][0]
+ self[1][3] * self[2][0] * self[3][1]
- self[1][0] * self[2][3] * self[3][1]
- self[1][1] * self[2][0] * self[3][3]
+ self[1][0] * self[2][1] * self[3][3],
self[0][3] * self[2][1] * self[3][0]
- self[0][1] * self[2][3] * self[3][0]
- self[0][3] * self[2][0] * self[3][1]
+ self[0][0] * self[2][3] * self[3][1]
+ self[0][1] * self[2][0] * self[3][3]
- self[0][0] * self[2][1] * self[3][3],
self[0][1] * self[1][3] * self[3][0] - self[0][3] * self[1][1] * self[3][0]
+ self[0][3] * self[1][0] * self[3][1]
- self[0][0] * self[1][3] * self[3][1]
- self[0][1] * self[1][0] * self[3][3]
+ self[0][0] * self[1][1] * self[3][3],
self[0][3] * self[1][1] * self[2][0]
- self[0][1] * self[1][3] * self[2][0]
- self[0][3] * self[1][0] * self[2][1]
+ self[0][0] * self[1][3] * self[2][1]
+ self[0][1] * self[1][0] * self[2][3]
- self[0][0] * self[1][1] * self[2][3],
],
[
self[1][2] * self[2][1] * self[3][0]
- self[1][1] * self[2][2] * self[3][0]
- self[1][2] * self[2][0] * self[3][1]
+ self[1][0] * self[2][2] * self[3][1]
+ self[1][1] * self[2][0] * self[3][2]
- self[1][0] * self[2][1] * self[3][2],
self[0][1] * self[2][2] * self[3][0] - self[0][2] * self[2][1] * self[3][0]
+ self[0][2] * self[2][0] * self[3][1]
- self[0][0] * self[2][2] * self[3][1]
- self[0][1] * self[2][0] * self[3][2]
+ self[0][0] * self[2][1] * self[3][2],
self[0][2] * self[1][1] * self[3][0]
- self[0][1] * self[1][2] * self[3][0]
- self[0][2] * self[1][0] * self[3][1]
+ self[0][0] * self[1][2] * self[3][1]
+ self[0][1] * self[1][0] * self[3][2]
- self[0][0] * self[1][1] * self[3][2],
self[0][1] * self[1][2] * self[2][0] - self[0][2] * self[1][1] * self[2][0]
+ self[0][2] * self[1][0] * self[2][1]
- self[0][0] * self[1][2] * self[2][1]
- self[0][1] * self[1][0] * self[2][2]
+ self[0][0] * self[1][1] * self[2][2],
],
])
}
}
impl<T> Add for M4<T>
where
T: Numeric,
{
type Output = M4<T>;
fn add(self, rhs: Self) -> Self::Output {
M4([
[
self[0][0] + rhs[0][0],
self[0][1] + rhs[0][1],
self[0][2] + rhs[0][2],
self[0][3] + rhs[0][3],
],
[
self[1][0] + rhs[1][0],
self[1][1] + rhs[1][1],
self[1][2] + rhs[1][2],
self[1][3] + rhs[1][3],
],
[
self[2][0] + rhs[2][0],
self[2][1] + rhs[2][1],
self[2][2] + rhs[2][2],
self[2][3] + rhs[2][3],
],
[
self[3][0] + rhs[3][0],
self[3][1] + rhs[3][1],
self[3][2] + rhs[3][2],
self[3][3] + rhs[3][3],
],
])
}
}
impl<T> Sub for M4<T>
where
T: Numeric,
{
type Output = M4<T>;
fn sub(self, rhs: Self) -> Self::Output {
M4([
[
self[0][0] - rhs[0][0],
self[0][1] - rhs[0][1],
self[0][2] - rhs[0][2],
self[0][3] - rhs[0][3],
],
[
self[1][0] - rhs[1][0],
self[1][1] - rhs[1][1],
self[1][2] - rhs[1][2],
self[1][3] - rhs[1][3],
],
[
self[2][0] - rhs[2][0],
self[2][1] - rhs[2][1],
self[2][2] - rhs[2][2],
self[2][3] - rhs[2][3],
],
[
self[3][0] - rhs[3][0],
self[3][1] - rhs[3][1],
self[3][2] - rhs[3][2],
self[3][3] - rhs[3][3],
],
])
}
}
impl<T> Mul for M4<T>
where
T: Numeric,
{
type Output = M4<T>;
fn mul(self, rhs: Self) -> Self::Output {
let (c1, c2, c3, c4) = self.into_rows();
let (r1, r2, r3, r4) = rhs.into_cols();
M4([
[c1.dot(r1), c1.dot(r2), c1.dot(r3), c1.dot(r4)],
[c2.dot(r1), c2.dot(r2), c2.dot(r3), c2.dot(r4)],
[c3.dot(r1), c3.dot(r2), c3.dot(r3), c3.dot(r4)],
[c4.dot(r1), c4.dot(r2), c4.dot(r3), c4.dot(r4)],
])
}
}
impl<T> Mul<V4<T>> for M4<T>
where
T: Numeric,
{
type Output = V4<T>;
fn mul(self, rhs: V4<T>) -> Self::Output {
let (c1, c2, c3, c4) = self.into_rows();
V4([c1.dot(rhs), c2.dot(rhs), c3.dot(rhs), c4.dot(rhs)])
}
}
impl<T> Div<T> for M4<T>
where
T: Numeric,
{
type Output = M4<T>;
fn div(self, rhs: T) -> Self::Output {
M4([
[
self[0][0] / rhs,
self[0][1] / rhs,
self[0][2] / rhs,
self[0][3] / rhs,
],
[
self[1][0] / rhs,
self[1][1] / rhs,
self[1][2] / rhs,
self[1][3] / rhs,
],
[
self[2][0] / rhs,
self[2][1] / rhs,
self[2][2] / rhs,
self[2][3] / rhs,
],
[
self[3][0] / rhs,
self[3][1] / rhs,
self[3][2] / rhs,
self[3][3] / rhs,
],
])
}
}
|
def _ConfigureLogging():
logging_format = '%(message)s'
logging.basicConfig(
stream=logging.sys.stdout, level=logging.INFO, format=logging_format) |
from NIM.algorithms.algorithm import Algorithm, Ackley
from numpy import asarray, zeros, full, inf, apply_along_axis, where, round, concatenate, fabs, exp, cos, pi, argsort, append, argmin
import logging
logging.basicConfig()
logger = logging.getLogger('MFO')
logger.setLevel('INFO')
class MothFlameOptimization(Algorithm):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def update_flame_no(self, iter):
r"""Update flames number.
:param iter: Current iteration times
:return: Number of flames
"""
return int(round(self.population - iter * ((self.population - 1) / self.iterations)))
def generate_a(self, iter):
r"""Generate a to calculate t
:param iter: Current iteration times
:return: a linearly decreases from -1 to -2
"""
return -1 + iter * ((-1) / self.iterations)
def update_flame(self, pre_moth_pos, pre_moth_fit, flame_pos, flame_fit):
# Sort the previous moths and flames
pos = concatenate((pre_moth_pos, flame_pos), axis=0)
fit = concatenate((pre_moth_fit, flame_fit), axis=0)
indexes = argsort(fit)[:self.population]
return pos[indexes], fit[indexes]
def update_moth(self, i, flame_no, a, moth_pos, flame_pos):
D = fabs(flame_pos[i] - moth_pos[i]) # distance to flame
b = 1 # a constant for defining the shape of the logarithmic spiral
t = (a - 1) * self.Rand.uniform(0, 1, self.dim) + 1 # a random number in [-1, 1]
if i <= flame_no:
return D * exp(b * t) * cos(2 * pi * t) + flame_pos[i]
else:
return D * exp(b * t) * cos(2 * pi * t) + flame_pos[flame_no-1]
def run(self):
moth_pos = self.initial_position()
moth_fit = apply_along_axis(self.cost_function, 1, moth_pos)
# Sort the first population of moths
idx = argsort(moth_fit)
flame_pos, flame_fit = moth_pos[idx], moth_fit[idx]
best_flame_pos, best_flame_fit = flame_pos[0], flame_fit[0]
self.iter = 0
while not self.stopping_criteria(self.iter):
self.iter += 1
self.iter_swarm_pos.loc[self.iter] = moth_pos
self.iter_solution.loc[self.iter] = append(best_flame_pos, best_flame_fit)
if self.debug:
logger.info("Iteration:{i}/{iterations} - {iter_sol}".format(i=self.iter, iterations=self.iterations,
iter_sol=self.iter_solution.loc[
self.iter].to_dict()))
flame_no = self.update_flame_no(self.iter)
previous_moth_pos, previous_moth_fit = moth_pos.copy(), moth_fit.copy()
a = self.generate_a(self.iter)
moth_pos = asarray([self.update_moth(i, flame_no, a, moth_pos, flame_pos) for i in range(self.population)])
moth_pos = apply_along_axis(self.boundary_handle, 1, moth_pos)
moth_fit = apply_along_axis(self.cost_function, 1, moth_pos)
flame_pos, flame_fit = self.update_flame(previous_moth_pos, previous_moth_fit, flame_pos, flame_fit)
best_flame_pos, best_flame_fit = flame_pos[0], flame_fit[0]
self.best_solution.iloc[:] = append(best_flame_pos, best_flame_fit)
return best_flame_pos, best_flame_fit
if __name__ == '__main__':
mfo = MothFlameOptimization(func=Ackley(dimension=50), iterations=5000, debug=False)
best_sol, best_val = mfo.run()
logger.info("best sol:{sol}, best val:{val}".format(sol=best_sol, val=best_val))
# print(mfo.eval_count)
|
// Begin begins a transaction, and returns the associated transaction id and
// the statements (if any) executed to initiate the transaction. In autocommit
// mode the statement will be "".
//
// Subsequent statements can access the connection through the transaction id.
func (axp *TxPool) Begin(ctx context.Context, options *querypb.ExecuteOptions) (int64, string, error) {
span, ctx := trace.NewSpan(ctx, "TxPool.Begin")
defer span.Finish()
var conn *connpool.DBConn
var err error
immediateCaller := callerid.ImmediateCallerIDFromContext(ctx)
effectiveCaller := callerid.EffectiveCallerIDFromContext(ctx)
if !axp.limiter.Get(immediateCaller, effectiveCaller) {
return 0, "", vterrors.Errorf(vtrpcpb.Code_RESOURCE_EXHAUSTED, "per-user transaction pool connection limit exceeded")
}
waiterCount := axp.waiters.Add(1)
defer axp.waiters.Add(-1)
if waiterCount > axp.waiterCap.Get() {
return 0, "", vterrors.New(vtrpcpb.Code_RESOURCE_EXHAUSTED, "transaction pool waiter count exceeded")
}
var beginSucceeded bool
defer func() {
if beginSucceeded {
return
}
if conn != nil {
conn.Recycle()
}
axp.limiter.Release(immediateCaller, effectiveCaller)
}()
if options.GetClientFoundRows() {
conn, err = axp.foundRowsPool.Get(ctx)
} else {
conn, err = axp.conns.Get(ctx)
}
if err != nil {
switch err {
case connpool.ErrConnPoolClosed:
return 0, "", err
case pools.ErrTimeout:
axp.LogActive()
return 0, "", vterrors.Errorf(vtrpcpb.Code_RESOURCE_EXHAUSTED, "transaction pool connection limit exceeded")
}
return 0, "", err
}
autocommitTransaction := false
beginQueries := ""
if queries, ok := txIsolations[options.GetTransactionIsolation()]; ok {
if queries.setIsolationLevel != "" {
if _, err := conn.Exec(ctx, "set transaction isolation level "+queries.setIsolationLevel, 1, false); err != nil {
return 0, "", err
}
beginQueries = queries.setIsolationLevel + "; "
}
if _, err := conn.Exec(ctx, queries.openTransaction, 1, false); err != nil {
return 0, "", err
}
beginQueries = beginQueries + queries.openTransaction
} else if options.GetTransactionIsolation() == querypb.ExecuteOptions_AUTOCOMMIT {
autocommitTransaction = true
} else {
return 0, "", fmt.Errorf("don't know how to open a transaction of this type: %v", options.GetTransactionIsolation())
}
beginSucceeded = true
transactionID := axp.lastID.Add(1)
axp.activePool.Register(
transactionID,
newTxConnection(
conn,
transactionID,
axp,
immediateCaller,
effectiveCaller,
autocommitTransaction,
),
options.GetWorkload() != querypb.ExecuteOptions_DBA,
)
return transactionID, beginQueries, nil
} |
LONDON (Reuters) - The Baghdad bureau chief for Reuters has left Iraq after he was threatened on Facebook and denounced by a Shi’ite paramilitary group’s satellite news channel in reaction to a Reuters report last week that detailed lynching and looting in the city of Tikrit.
The threats against journalist Ned Parker began on an Iraqi Facebook page run by a group that calls itself “the Hammer” and is believed by an Iraqi security source to be linked to armed Shi’ite groups. The April 5 post and subsequent comments demanded he be expelled from Iraq. One commenter said that killing Parker was “the best way to silence him, not kick him out.”
Three days later, a news show on Al-Ahd, a television station owned by Iranian-backed armed group Asaib Ahl al-Haq, broadcast a segment on Parker that included a photo of him. The segment accused the reporter and Reuters of denigrating Iraq and its government-backed forces, and called on viewers to demand Parker be expelled.
The pressure followed an April 3 report by Parker and two colleagues detailing human rights abuses in Tikrit after government forces and Iranian-backed militias liberated the city from the Islamic State extremist group. Two Reuters journalists in the city witnessed the lynching of an Islamic State fighter by Iraqi federal police. The report also described widespread incidents of looting and arson in the city, which local politicians blamed on Iranian-backed militias.
A Reuters spokeswoman said the agency stood by the accuracy and fairness of its report. Facebook, acting on a request from Reuters, removed a series of threatening posts this week.
The threats appear to be part of a broader power struggle in Iraq. The country is divided between its Shi’ite Muslim majority, which now dominates the government, and its Sunni Muslim minority, which held sway under the late dictator Saddam Hussein. Prime Minister Haidar al-Abadi, a moderate Shi’ite, is attempting to defeat Islamic State – a radical Sunni offshoot of Al Qaeda that has seized huge portions of Iraqi territory – while at the same time trying to mend fences with the broader Sunni community.
The Iraqi military is rebuilding following its collapse last June. That has forced Abadi’s government to rely on a constellation of Shi’ite paramilitary forces backed by Iran. The paramilitary forces, which include Asaib Ahl al-Haq, routinely denounce Western media coverage of Iraq’s internal conflict.
Abadi is scheduled to meet U.S. President Barack Obama in Washington on April 14 to discuss the campaign against Islamic State.
Rafid Jaboori, a spokesman for Abadi, said the government was “definitely against any message that encourages hatred or intimidation, whether it comes from a local or international network.” At the same time, the al-Ahd segment “was primarily a criticism of the government, something that we have to live with” he said.
Jaboori said the environment for media “has improved significantly since this prime minister took over.” He advised any foreign journalists who feel threatened to call the Iraqi police for help. Many Iraqis – both Sunni and Shi’ites – do not trust the police, some of whom are believed to have links with the Shi’ite paramilitaries.
Michael Lavallee, a U.S. State Department spokesperson, said: “We condemn all forms of intimidation and violence toward the media as the protection of journalistic freedoms is an essential aspect of all democratic societies.”
He said the State Department had spoken with Abadi’s office “to raise our concerns about the potentially dangerous atmosphere created by an editorial broadcast on a private Iraqi television network about the Reuters bureau chief and the Reuters staff in Iraq.” The State Department “will continue to closely monitor the treatment of international media in Iraq and raise objection to any form of intimidation that may inhibit the ability of the media to perform their work.”
The Committee to Protect Journalists, a media advocacy group, says that at least 15 journalists have been killed in Iraq since the beginning of 2013. |
async def cmd_remove(self, ctx: commands.Context, *, cmd_or_cog: CommandOrCogConverter):
cmd = cmd_or_cog.qualified_name
is_cog = isinstance(cmd_or_cog, commands.Cog)
key = "cog" if is_cog else "command"
async with getattr(self.config, f"{key}s")() as cmds:
if cmd not in cmds:
return await ctx.send(
f"I am already not tracking the {key} `{cmd}`.\n"
"If this {key} is being tracked, please make an issue on my github\n"
"<https://github.com/Just-Jojo/JojoCogs>"
)
cmds.remove(cmd)
await ctx.tick() |
export * from './caching'
export * from './interfaces'
export * from './testing'
export * from './themes'
export * from './util'
export * from './uuid' |
//===- DisassemblerEmitter.cpp - Generate a disassembler ------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#include "CodeGenTarget.h"
#include "WebAssemblyDisassemblerEmitter.h"
#include "X86DisassemblerTables.h"
#include "X86RecognizableInstr.h"
#include "llvm/TableGen/Error.h"
#include "llvm/TableGen/Record.h"
#include "llvm/TableGen/TableGenBackend.h"
using namespace llvm;
using namespace llvm::X86Disassembler;
/// DisassemblerEmitter - Contains disassembler table emitters for various
/// architectures.
/// X86 Disassembler Emitter
///
/// *** IF YOU'RE HERE TO RESOLVE A "Primary decode conflict", LOOK DOWN NEAR
/// THE END OF THIS COMMENT!
///
/// The X86 disassembler emitter is part of the X86 Disassembler, which is
/// documented in lib/Target/X86/X86Disassembler.h.
///
/// The emitter produces the tables that the disassembler uses to translate
/// instructions. The emitter generates the following tables:
///
/// - One table (CONTEXTS_SYM) that contains a mapping of attribute masks to
/// instruction contexts. Although for each attribute there are cases where
/// that attribute determines decoding, in the majority of cases decoding is
/// the same whether or not an attribute is present. For example, a 64-bit
/// instruction with an OPSIZE prefix and an XS prefix decodes the same way in
/// all cases as a 64-bit instruction with only OPSIZE set. (The XS prefix
/// may have effects on its execution, but does not change the instruction
/// returned.) This allows considerable space savings in other tables.
/// - Six tables (ONEBYTE_SYM, TWOBYTE_SYM, THREEBYTE38_SYM, THREEBYTE3A_SYM,
/// THREEBYTEA6_SYM, and THREEBYTEA7_SYM contain the hierarchy that the
/// decoder traverses while decoding an instruction. At the lowest level of
/// this hierarchy are instruction UIDs, 16-bit integers that can be used to
/// uniquely identify the instruction and correspond exactly to its position
/// in the list of CodeGenInstructions for the target.
/// - One table (INSTRUCTIONS_SYM) contains information about the operands of
/// each instruction and how to decode them.
///
/// During table generation, there may be conflicts between instructions that
/// occupy the same space in the decode tables. These conflicts are resolved as
/// follows in setTableFields() (X86DisassemblerTables.cpp)
///
/// - If the current context is the native context for one of the instructions
/// (that is, the attributes specified for it in the LLVM tables specify
/// precisely the current context), then it has priority.
/// - If the current context isn't native for either of the instructions, then
/// the higher-priority context wins (that is, the one that is more specific).
/// That hierarchy is determined by outranks() (X86DisassemblerTables.cpp)
/// - If the current context is native for both instructions, then the table
/// emitter reports a conflict and dies.
///
/// *** RESOLUTION FOR "Primary decode conflict"S
///
/// If two instructions collide, typically the solution is (in order of
/// likelihood):
///
/// (1) to filter out one of the instructions by editing filter()
/// (X86RecognizableInstr.cpp). This is the most common resolution, but
/// check the Intel manuals first to make sure that (2) and (3) are not the
/// problem.
/// (2) to fix the tables (X86.td and its subsidiaries) so the opcodes are
/// accurate. Sometimes they are not.
/// (3) to fix the tables to reflect the actual context (for example, required
/// prefixes), and possibly to add a new context by editing
/// include/llvm/Support/X86DisassemblerDecoderCommon.h. This is unlikely
/// to be the cause.
///
/// DisassemblerEmitter.cpp contains the implementation for the emitter,
/// which simply pulls out instructions from the CodeGenTarget and pushes them
/// into X86DisassemblerTables.
/// X86DisassemblerTables.h contains the interface for the instruction tables,
/// which manage and emit the structures discussed above.
/// X86DisassemblerTables.cpp contains the implementation for the instruction
/// tables.
/// X86ModRMFilters.h contains filters that can be used to determine which
/// ModR/M values are valid for a particular instruction. These are used to
/// populate ModRMDecisions.
/// X86RecognizableInstr.h contains the interface for a single instruction,
/// which knows how to translate itself from a CodeGenInstruction and provide
/// the information necessary for integration into the tables.
/// X86RecognizableInstr.cpp contains the implementation for a single
/// instruction.
namespace llvm {
extern void EmitFixedLenDecoder(RecordKeeper &RK, raw_ostream &OS,
const std::string &PredicateNamespace,
const std::string &GPrefix,
const std::string &GPostfix,
const std::string &ROK,
const std::string &RFail, const std::string &L);
void EmitDisassembler(RecordKeeper &Records, raw_ostream &OS) {
CodeGenTarget Target(Records);
emitSourceFileHeader(" * " + Target.getName().str() + " Disassembler", OS);
// X86 uses a custom disassembler.
if (Target.getName() == "X86") {
DisassemblerTables Tables;
ArrayRef<const CodeGenInstruction*> numberedInstructions =
Target.getInstructionsByEnumValue();
for (unsigned i = 0, e = numberedInstructions.size(); i != e; ++i)
RecognizableInstr::processInstr(Tables, *numberedInstructions[i], i);
if (Tables.hasConflicts()) {
PrintError(Target.getTargetRecord()->getLoc(), "Primary decode conflict");
return;
}
Tables.emit(OS);
return;
}
// WebAssembly has variable length opcodes, so can't use EmitFixedLenDecoder
// below (which depends on a Size table-gen Record), and also uses a custom
// disassembler.
if (Target.getName() == "WebAssembly") {
emitWebAssemblyDisassemblerTables(OS, Target.getInstructionsByEnumValue());
return;
}
// ARM and Thumb have a CHECK() macro to deal with DecodeStatuses.
if (Target.getName() == "ARM" || Target.getName() == "Thumb" ||
Target.getName() == "AArch64" || Target.getName() == "ARM64") {
std::string PredicateNamespace = std::string(Target.getName());
if (PredicateNamespace == "Thumb")
PredicateNamespace = "ARM";
EmitFixedLenDecoder(Records, OS, PredicateNamespace,
"if (!Check(S, ", "))",
"S", "MCDisassembler::Fail",
" MCDisassembler::DecodeStatus S = "
"MCDisassembler::Success;\n(void)S;");
return;
}
EmitFixedLenDecoder(Records, OS, std::string(Target.getName()), "if (",
" == MCDisassembler::Fail)", "MCDisassembler::Success",
"MCDisassembler::Fail", "");
}
} // end namespace llvm
|
// WithReasonHandler configure a reason handler
func WithReasonHandler(reasonHandler ReasonHandler) DispatcherOption {
return func(dispatcher *Dispatcher) {
dispatcher.reasonHandler = reasonHandler
}
} |
<reponame>wylwq/store-server
package com.ly.storeserver.admin.models.request;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import javax.validation.constraints.Max;
import javax.validation.constraints.Min;
/**
* @Description:
* @Author ly
* @Date 2020/4/23 23:29
* @Version V1.0.0
**/
@Data
public class UserRequest {
@ApiModelProperty(value = "用户id")
private Long id;
@ApiModelProperty(value = "用户名称")
private String userName;
@ApiModelProperty(value = "用户手机号")
private String userPhone;
@ApiModelProperty(value = "用户年龄")
private Integer userAge;
@ApiModelProperty(value = "用户性别")
private String userSex;
@ApiModelProperty(value = "用户家庭住址")
private String userAddress;
@ApiModelProperty(value = "管理员标志")
private String adminFlag;
}
|
def sleep_and_restart_diamond(ctx):
ctx.logger.info('Foo') |
module RandomSupply where
import Supply
import System.Random (split, randoms, getStdRandom, Random)
randomsIO :: Random a => IO [a]
randomsIO =
getStdRandom $ \g ->
let (a, b) = split g
in (randoms a, b)
-- (fst . runSupply Supply.next) `fmap` randomsIO
|
// MustValidate validates the environment and panics on any validation error
func (e *AssertedEnvironment) MustValidate() {
if err := validate(e.config, e.opts.Getenv); err != nil {
panic(err)
}
} |
// WithMinimumLevel set minimum level to be logged. Logger will always log level >= minimum level.
//
// Options: LevelTrace < LevelDebug < LevelInfo < LevelError < LevelImportantInfo
//
// Default: LevelDebug
func WithMinimumLevel(lvl Level) Option {
return func(opt *options) {
opt.minimumLogLevel = lvl
}
} |
def main():
n, m = (int(x) for x in input().split())
a = [int(x) for x in input().split()]
th = sum(a) / (4 * m)
a_ = [0 if x < th else 1 for x in a]
print(["Yes", "No"][not m <= sum(a_)])
if __name__ == "__main__":
main()
|
def exec_stmt(self, db, sql, args):
self.log.debug("exec_stmt: %s" % skytools.quote_statement(sql, args))
curs = db.cursor()
curs.execute(sql, args)
db.commit() |
// MustNewTipSet makes a new tipset or panics trying.
func MustNewTipSet(blks ...*types.Block) types.TipSet {
ts, err := types.NewTipSet(blks...)
if err != nil {
panic(err)
}
return ts
} |
<filename>Excercises/tesla_input_excercise.py
#!/usr/local/bin/python3
def checkDriverAge(age=0):
# if not age:
# age = int(input('What is your age?: '))
if int(age) < 18:
print('Sorry, you are too young to drive this car '
'Powering off!')
elif int(age) > 18:
print('Powering On. Enjoy the ride!')
elif int(age) == 18:
print('Congratulations on your first year of'
'driving. Enjoy the ride')
return age
if __name__ == "__main__":
age = checkDriverAge(19)
print(f'You\'re age is {age}')
|
The casual racism of our most popular dating apps Sites like Tinder and Grindr are littered with racial preferences and worse. Why are we so ready to let them slide?
If you don’t have enough jerks in your life, sign up for an online dating app. It will only be a matter of time before you encounter some spectacularly offensive and unsolicited people and materials.
Shallowness in online dating manifests in different ways, but is mostly about appearance. Fat people are ridiculed all the time. The plight of bald men has been well articulated by the likes of Larry David and Louis CK. And of course, anytime we talk about appearance, race will eventually come into play. Online dating apps provide fertile ground for these kinds of appearance-based biases to take root. And that’s starting to spark some very important discussions around dating and identity.
Advertisement:
OK Cupid co-founder Christian Rudder once told NPR, “Black users, especially, there's a bias against them. Every kind of way you can measure their success on a site — how people rate them, how often they reply to their messages, how many messages they get — that's all reduced.”
More recently, talk of sexual racism has exploded within the gay community, and a number of men using apps like Grindr and Scruff have come forward to discuss the race-based profiles they encounter.
The page Douchebags of Grindr features 57 pages of prize gems; screen shots of some of the most direct and exclusionary profiles around. One reads, “Not looking for Fat. Old. Or anything but White.” Another states, “I love men from different cultures. Just no Asians. I’m not racist.”
Everyone has particular preferences when it comes to sexual partners. “You’re dealing with people, who are naturally imperfect, you’re going to find those who can prefer a particular race or religion or cup size,” says relationship coach April Masini. Having a certain preference for a certain style isn’t inherently wrong. But the approach some employ when advertising them should be examined.
LGBT lifestyle expert Mikey Rox told AlterNet, “You don’t have to engage with anybody on these apps. You can choose to not respond to them. Why do you have to go out of your way to potentially hurt someone’s feelings?” In that sense, Rox says, stating a specific racial preference in one’s profile just isn’t necessary.
It’s hard to say why such overt prejudices seem so prevalent on gay dating apps in particular. Maybe it’s easier to be more direct in places where gender divisions don’t exist. Maybe others feel that maintaining certain formalities simply isn’t necessary.
Advertisement:
Rox says, “I think there is a distinguishing factor with particular gay sites. You know, Tinder is called a dating app. But Grindr and Scruff are very much hookup apps."
“On dating apps there’s more of a courtship element, where people have to mind their Ps and Qs, you know, you can’t be immediately racist on your profile. But with hookup apps, if it’s strictly about sex, people just get to the point; they don’t beat around the bush.”
He added, “We’re also talking about men, who tend to be a little bit more forward and to-the-point than women are on dating sites.”
So yes, if you don’t want to date a black person, you don’t have to. If you don’t want to date a white person, you don’t have to. But it is worth asking why those so committed to racialized dating feel the way they do. Kristen Martinez, a Seattle-based psychotherapist specializing in LGBT issues, says, “If you dig a little deeper into these motivations, you may start to notice some racist undertones to why you prefer certain ethnic groups over others.”
Advertisement:
An Australian study cited in a recent article by the Daily Beast, suggests, “Sexual racism… is closely associated with generic racist attitudes, which challenges the idea of racial attraction as solely a matter of personal preference.”
There aren’t many places left in society where you can get away with saying something like “No blacks.” Not in Brooklyn, at least. So why do such a significant portion of gay men feel comfortable writing it on their profiles? The answer most likely relates back to what we said earlier: the anonymity of the Internet provides a certain leeway to express oneself in a way that might otherwise be avoided.
And who easier to target than members of a community already hit by cemented racial stereotypes? When it comes to sex in particular, certain stigmas tend to fall on both black and Asian individuals regarding penis size. Rox says, “I talk with plenty of gay people who say that’s the reason they don’t want to hook up with these racial groups.”
Advertisement:
It’s also true that certain areas tend to be populated by certain demographics. And though most online dating apps operate in accordance to location, exclusionary politics knows no bounds.
LeNair Xavier, 44, tells AlterNet, “It’s offensive in general, but it’s even more offensive when I see a person who comes to my neighborhood — which when I was growing up was primarily black, and is just now getting gentrified — and writes a profile that says something like ‘no blacks.’”
“That comes from the whole attitude of white entitlement or white privilege. It’s like, you’re going to bring that to Bedstuy, Brooklyn? Of all places. Are you serious?”
Advertisement:
We’ve reached a point in time where diversity has become something to celebrate. If there’s one thing our techno-based society offers, it’s access to different values, different identities and different cultures. So why do some seem so resistant to embrace them?
Evolutionary psychologist Ethan Gregory suggests some current behaviors can be attributed to what helped us survive in the past. He says, “Safety for us meant sticking within the group where we had resources and mates. Strangers were potentially dangerous to interact with.”
“Fast-forward to today, where we live in a multicultural world, American culture claims itself as a melting pot, but in our homes we develop a preference for those that we are most comfortable with, and that typically means same ethnicity/race as ourselves,” he continued. “It takes open-mindedness and bravery to buck tradition and date outside of one's own ethnicity. Props to those brave souls that are willing to not only step out of the closet, but to step out of their ethnic comfort zones as well.”
Differences can be scary, especially when applied to sexual interactions. Mikey Rox explains, “I think most people are just afraid. It’s different. It’s different skin, different colors; you just sort of don’t know what to make of it. Different nationalities circumcise, some don’t. Things look different down there. And that can be frightening to someone who hasn’t seen something like that before.”
Advertisement:
There are those who will advise against placing a racial preference on one’s profile. But maybe it’s not all bad that some do. As Rox says, “There’s a silver lining, I suppose. It can give you a pretty good view into that person’s personality and how they treat other people.” |
<reponame>anuraganand789/ce<gh_stars>0
/*
* Copyright (c) 2020 <NAME> <<EMAIL>>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include <sys/types.h>
#include <sys/ioctl.h>
#include <stdio.h>
#include <stdlib.h>
#include <termios.h>
#include <unistd.h>
#include "ce.h"
#define TERM_MIN_ROWS 24
#define TERM_MIN_COLS 50
static struct termios cur;
static struct termios old;
static struct winsize winsz;
static int can_restore = 0;
static struct cebuf *termbuf = NULL;
void
ce_term_setup(void)
{
memset(&old, 0, sizeof(old));
memset(&cur, 0, sizeof(cur));
if (ioctl(STDOUT_FILENO, TIOCGWINSZ, &winsz) == -1)
fatal("%s: ioctl(): %s", __func__, errno_s);
if (winsz.ws_row < TERM_MIN_ROWS)
fatal("terminal too small (minimum %d rows)", TERM_MIN_ROWS);
if (winsz.ws_col < TERM_MIN_COLS)
fatal("terminal too small (minimum %d columns)", TERM_MIN_COLS);
if (tcgetattr(STDIN_FILENO, &old) == -1)
fatal("%s: tcgetattr: %s", __func__, errno_s);
cur = old;
cur.c_cc[VMIN] = 1;
cur.c_cc[VTIME] = 0;
cur.c_oflag &= ~ONLCR;
cur.c_iflag &= ~ONLCR;
cur.c_lflag &= ~(ICANON | ECHO | ISIG | ECHOE);
if (tcsetattr(STDIN_FILENO, TCSANOW, &cur) == -1)
fatal("%s: tcsetattr: %s", __func__, errno_s);
if (termbuf == NULL) {
if ((termbuf = calloc(1, sizeof(*termbuf))) == NULL) {
fatal("%s: calloc(%zu): %s", __func__,
sizeof(*termbuf), errno_s);
}
}
can_restore = 1;
ce_term_writestr(TERM_SEQUENCE_ALTERNATE_ON);
ce_term_writestr(TERM_SEQUENCE_RESET);
ce_term_writestr(TERM_SEQUENCE_CLEAR);
}
void
ce_term_restore(void)
{
if (can_restore == 0)
return;
ce_term_discard();
ce_term_writestr(TERM_SEQUENCE_ALTERNATE_OFF);
ce_term_flush();
(void)tcsetattr(STDIN_FILENO, TCSANOW, &old);
can_restore = 0;
free(termbuf->data);
free(termbuf);
termbuf = NULL;
}
size_t
ce_term_height(void)
{
return (winsz.ws_row);
}
size_t
ce_term_width(void)
{
return (winsz.ws_col);
}
void
ce_term_setpos(size_t line, size_t col)
{
u_int16_t adj;
if (col < TERM_CURSOR_MIN) {
fatal("%s: invalid column %zu (%zu)",
__func__, col, ce_term_width());
}
if (line < TERM_CURSOR_MIN || line > ce_term_height()) {
fatal("%s: invalid line %zu (%zu)",
__func__, line, ce_term_height());
}
adj = col / (ce_term_width() + 1);
if ((col = col % ce_term_width()) == 0)
col = ce_term_width();
ce_term_writef(TERM_SEQUENCE_FMT_SET_CURSOR, line + adj, col);
}
void
ce_term_color(int color)
{
ce_term_writef(TERM_SEQUENCE_FMT_SET_COLOR, color);
}
void
ce_term_reset(void)
{
ce_term_writestr(TERM_SEQUENCE_RESET);
}
void
ce_term_writestr(const char *data)
{
ce_term_write(data, strlen(data));
}
void
ce_term_writef(const char *fmt, ...)
{
int len;
va_list args;
char buf[2048];
va_start(args, fmt);
len = vsnprintf(buf, sizeof(buf), fmt, args);
if (len == -1)
fatal("%s: failed to format buffer", __func__);
if ((size_t)len >= sizeof(buf))
fatal("%s: format too large (%d) bytes", __func__, len);
va_end(args);
ce_term_write(buf, len);
}
void
ce_term_write(const void *data, size_t len)
{
ce_buffer_append(termbuf, data, len);
}
void
ce_term_discard(void)
{
ce_buffer_reset(termbuf);
}
void
ce_term_flush(void)
{
ssize_t sz;
if (termbuf->data == NULL || termbuf->length == 0)
return;
for (;;) {
sz = write(STDOUT_FILENO, termbuf->data, termbuf->length);
if (sz == -1) {
if (errno == EINTR)
continue;
fatal("%s: write: %s", __func__, errno_s);
}
break;
}
ce_debug("wrote %zu bytes", termbuf->length);
ce_buffer_reset(termbuf);
}
|
<filename>src/main/java/io/github/aquerr/pandobot/commands/HelpCommand.java
package io.github.aquerr.pandobot.commands;
import io.github.aquerr.pandobot.PandoBot;
import io.github.aquerr.pandobot.annotations.BotCommand;
import io.github.aquerr.pandobot.entities.VTEAMRoles;
import net.dv8tion.jda.core.EmbedBuilder;
import net.dv8tion.jda.core.entities.MessageChannel;
import net.dv8tion.jda.core.entities.User;
import java.awt.*;
import java.util.List;
import java.util.Map;
@BotCommand(minRole = VTEAMRoles.EVERYONE, argsCount = 0)
public class HelpCommand implements ICommand
{
@Override
public boolean execute(User user, MessageChannel channel, List<String> args)
{
Map<List<String>, ICommand> commands = PandoBot.getInstance().getCommandManager().getCommands();
EmbedBuilder embedBuilder = new EmbedBuilder();
embedBuilder.setColor(new Color(68, 158, 226));
embedBuilder.setTitle("Dostępne komendy u Pandy: ");
embedBuilder.addField("Komendy", ":small_blue_diamond: Użytkownik:\n" +
":small_orange_diamond: !pomoc · wyświetla spis dostępnych rang\n" +
":small_orange_diamond: !gif (txt) · wysyła gifa\n" +
":small_orange_diamond: !ankieta (nazwa ankiety) (reakcja A) (reakcja B) (reakcja C) · tworzy ankietę\n" +
"\n" +
":small_blue_diamond: Moderator:\n" +
":small_orange_diamond: !usun (ilość) · usuwa wiadomości\n" +
"\n" +
":small_blue_diamond: Właściciel:\n" +
":small_orange_diamond: !opis (txt) · ustawia opis bota", false);
// for (Map.Entry<List<String>, ICommand> commandEntry : commands.entrySet())
// {
// String commandAliases = String.join(", ", commandEntry.getKey());
//
// embedBuilder.addField(commandAliases, "", false);
// }
// embedBuilder.setDescription("- - - - - - - - - - - - - - - -" + "\n" + ":heart: - " + args.get(1) + "\n\n" +
// ":thumbsup: - " + args.get(2) + "\n\n" +
// ":thumbsdown: - " + args.get(3));
channel.sendMessage(embedBuilder.build()).queue();
return true;
}
@Override
public String getUsage()
{
return "!help";
}
}
|
/**
* A Tag Action allows a user to tag a build. Repo doesn't support a solid tag method, so right now we just display the static
* manifest information needed to recreate the exact state of the repository when the build was ran.
*/
@ExportedBean(defaultVisibility = 999)
public class SCMTagAction extends AbstractScmTagAction {
/**
* Constructs the tag action object. Just call the superclass.
*
* @param build Build which we are interested in tagging
*/
SCMTagAction(final Run<?, ?> build) {
super(build);
}
/**
* Returns the filename to use as the badge. Called by the default badge jelly file.
*/
public String getIconFileName() {
return "star.gif";
}
/**
* Returns the display name to use for the tag action. Called by the default badge jelly file.
*/
public String getDisplayName() {
return "Dynam Soft SCM any Where";
}
@Override
public boolean isTagged() {
return false;
}
} |
/**
* Recovers all previously delivered but not acknowledged messages.
*
* @throws Exception if an error occurs while performing the recover.
*/
public void recover() throws Exception {
LOG.debug("Session Recover for consumer: {}", getResourceInfo().getId());
ArrayList<JmsInboundMessageDispatch> redispatchList = new ArrayList<JmsInboundMessageDispatch>();
Delivery delivery = getEndpoint().head();
while (delivery != null) {
Delivery current = delivery;
delivery = delivery.next();
if (!(current.getContext() instanceof JmsInboundMessageDispatch)) {
LOG.debug("{} Found incomplete delivery with no context during recover processing", AmqpConsumer.this);
continue;
}
JmsInboundMessageDispatch envelope = (JmsInboundMessageDispatch) current.getContext();
if (envelope.isDelivered()) {
envelope.getMessage().getFacade().setRedeliveryCount(
envelope.getMessage().getFacade().getRedeliveryCount() + 1);
envelope.setEnqueueFirst(true);
envelope.setDelivered(false);
redispatchList.add(envelope);
}
}
ListIterator<JmsInboundMessageDispatch> reverseIterator = redispatchList.listIterator(redispatchList.size());
while (reverseIterator.hasPrevious()) {
deliver(reverseIterator.previous());
}
} |
<filename>ron/test/meta.cc
#include <iostream>
#include <cassert>
#include "../ron.hpp"
#define DEBUG 1
using namespace ron;
using namespace std;
typedef TextFrame Frame;
typedef Frame::Cursor Cursor;
typedef Frame::Builder Builder;
void test_simple_meta () {
string chain = "@1gN97b+gritzko :lww! 'key' 'value'";
}
/*void test_ct_basic () {
Uuid head{"1i08e4+gYpLcnUnF6"};
string frame{"@1i08e4+gYpLcnUnF6 :rga! 'a', 'b', 'c',"};
CT ct;
TextFrame::Cursor cur{frame};
ct.AppendAll(cur);
auto i = ct.begin();
assert(*i==head);
++i;
assert(*i==head.inc());
++i;
assert(*i==head.inc(2));
++i;
assert(*i==head.inc(3));
--i;
assert(*i==head.inc(2));
--i;
assert(*i==head.inc());
i.CausallyPrev();
assert(*i==head);
}*/
/*void test_ct_path () {
string frame{"@1i08e4+path :rga! 'a', @1i08z+path 'b', @1i08k+path :1i08e4+path 'c', 'd',"};
fsize_t depths[] = {0, 1, 2, 1, 2};
Cursor c{frame};
CTPath path{c.id()};
int p=0;
assert(path.depth() == depths[p]);
assert(path.position() == p);
while (c.Next()) {
++p;
path.AddNext(c);
assert(path.depth() == depths[p]);
assert(path.position() == p);
}
assert(p== sizeof(depths)/sizeof(fsize_t) - 1);
}
void test_ct_path_fail () {
string frame{"@1i08e4+path :rga! 'a', @1i08z+path 'b', @1i08k+path :1i0FAIL+path 'c',"};
Cursor c{frame};
CTPath path{c.id()};
c.Next();
assert(path.AddNext(c)); //a
c.Next();
assert(path.AddNext(c)); //b
c.Next();
assert(path.AddNext(c)==Status::CAUSEBREAK); //c
}*/
void test_inc_stack () {
inc_stack<fsize_t> is;
for(fsize_t i=0; i<1000; i++) {
is.push_back(i);
}
assert(is.size()==1000);
assert(is.span_size()==1);
is.push_back(0);
assert(is.size()==1001);
assert(is.span_size()==2);
int l=0;
for(auto i=is.begin(); i!=is.end(); ++i, ++l) {
assert(*i==l%1000);
}
assert(l==1001);
is.pop_back();
is.pop_back();
assert(is.size()==999);
assert(is.span_size()==1);
}
int main (int argn, char** args) {
test_simple_meta();
test_inc_stack();
//test_ct_basic();
//test_ct_path();
//test_ct_path_fail();
//test_ct_scan_all0();
//test_ct_scan_rm();
//test_ct_scan_rmun();
}
|
/**
* To be used when dealing with a 'JOIN' secondary index type
*/
public class AccumuloSecondaryIndexJoinEntryIteratorWrapper<T> extends
SecondaryIndexEntryIteratorWrapper<T, Pair<ByteArrayId, ByteArrayId>>
{
private final static Logger LOGGER = LoggerFactory.getLogger(AccumuloSecondaryIndexJoinEntryIteratorWrapper.class);
private final Scanner scanner;
public AccumuloSecondaryIndexJoinEntryIteratorWrapper(
final Scanner scanner,
final DataAdapter<T> adapter ) {
super(
scanner.iterator(),
adapter);
this.scanner = scanner;
}
@SuppressWarnings("unchecked")
@Override
protected Pair<ByteArrayId, ByteArrayId> decodeRow(
final Object row ) {
Entry<Key, Value> entry = null;
try {
entry = (Entry<Key, Value>) row;
}
catch (final ClassCastException e) {
LOGGER.error("Row is not an accumulo row entry.");
return null;
}
final byte[] cqBytes = entry.getKey().getColumnQualifierData().getBackingArray();
return Pair.of(
SecondaryIndexUtils.getPrimaryIndexId(cqBytes),
SecondaryIndexUtils.getPrimaryRowId(cqBytes));
}
@Override
public void close()
throws IOException {
scanner.close();
}
} |
package generate
const appTemplate = `package {{packageName}}
import (
"github.com/gin-gonic/gin"
"net/http"
"time"
)
func DemoAppCreate(c *gin.Context) {
data := gin.H{
"title": "create",
"nowDate": time.Now().Format("2006-03-01 00:00:00"),
"content": "POST请求演示",
}
c.HTML(http.StatusOK, "default/demo/create.tmpl", data)
}
func DemoAppDelete(c *gin.Context) {
data := gin.H{
"title": "delete",
"nowDate": time.Now().Format("2006-03-01 00:00:00"),
"content": "DELETE 请求演示",
}
c.HTML(http.StatusOK, "default/demo/delete.tmpl", data)
}
func DemoAppUpdate(c *gin.Context) {
data := gin.H{
"title": "put",
"nowDate": time.Now().Format("2006-03-01 00:00:00"),
"content": "PUT 请求演示",
}
c.HTML(http.StatusOK, "default/demo/update.tmpl", data)}
func DemoAppGet(c *gin.Context) {
data := gin.H{
"title": "get",
"nowDate": time.Now().Format("2006-03-01 00:00:00"),
"content": "GET 请求演示",
}
c.HTML(http.StatusOK, "default/demo/list.tmpl", data)
}
func DemoAppGetDetail(c *gin.Context) {
// db, _ := common.GetDefaultDbConn()
data := gin.H{
"title": "get",
"nowDate": time.Now().Format("2006-03-01 00:00:00"),
"content": "GET 请求演示",
}
c.HTML(http.StatusOK, "default/demo/detail.tmpl", data)
}
` |
/**
* Class ItemTypePredicate identifies and returns the class that should be used
* to decrypt the json rule as this has been given from the json file
*
* @author nikolaos.papageorgiou
*
*/
public class ItemTypePredicate extends RuntimeTypeAdapterPredicate {
public ItemTypePredicate() {
}
@Override
public String process(JsonElement element) {
JsonObject obj = element.getAsJsonObject();
if (obj.toString().contains("\"monitor\":\"java\"")) {
return "JavaRule";
} else if (obj.toString().contains("\"monitor\":\"machine\"")) {
return "MachineRule";
} else if (obj.toString().contains("\"monitor\":\"analytics\"")) {
return "AnalyticsRule";
} else if (obj.toString().contains("\"monitor\":\"universal\"")) {
return "UniversalRule";
} else if (obj.toString().contains("\"monitor\":\"dotnet\"")) {
return "DotNetRule";
} else if (obj.toString().contains("\"monitor\":\"network\"")) {
return "NetworkRule";
}
return "JavaRule";
}
} |
Thoraco-abdominal bypass as a method of evaluating vascular grafts in the dog.
Seven Vasculour d grafts, five Gore-Tex grafts and seven Solco-graft, 8 mm by 30 cm, implanted as thoraco-abdominal bypasses in dogs. Sixteen were retrieve at two months. Graft size was assessed angiographically in representative dogs of each group prior to sacrifice. Initial examination of the retrieved specimens revealed pannus extensions at both anastomoses on all graft types, and a variable midgraft appearance from smooth and glistening to partially thrombus coated. Histological assessment of graft segments revealed widely differing healing responses on the three graft types. Tissue attachment to both graft surfaces and varying degrees of tissue incorporation were observed in the Vasculour D and Gore-Tex prostheses. The Solco-graft specimens showed no evidence of tissue incorporation of the wall at two months. Tissue on the graft surfaces remains loosely attached and unincorporated. Pannus extensions were thickest on the Solco-graft. Variably thin fibrin is scattered over the luminal surface of the three graft types. Vascularization of the wall particularly the internal capsulae was noted in the Vasculour D grafts. Although biological responses in the dog are difficult to extrapolate to man, the types of comparison testing allowed by the use of the thoraco-abdominal by-pass procedure can provide valuable information of degree of dilation, relative tissue attachment, tissue incorporation into various types of prostheses and kidney infarcts study. The greatest value of the thoraco-abdominal by-pass lies in graft sizes and length without special regard for patency: assessment of graft size post-implantation and structural changes is then made possible. |
#pragma once
#include <cstdlib>
#include <iostream>
#include <boost/asio.hpp>
#include "Logger.hpp"
class UdpEndpoint : public boost::asio::ip::udp::endpoint {
};
class UdpServer
{
public:
UdpServer(boost::asio::io_service& io_context, short port)
: socket_(io_context, boost::asio::ip::udp::endpoint(boost::asio::ip::udp::v4(), port)) {
do_receive();
}
void do_receive() {
socket_.async_receive_from(
boost::asio::buffer(data_, max_length),
sender_endpoint_,
[this](boost::system::error_code ec, std::size_t bytes_recvd)
{
if (!ec && bytes_recvd > 0){
LOGV << sender_endpoint_;
auto static lastEndpoint = sender_endpoint_;
if (lastEndpoint != sender_endpoint_) {
LOGV << "lastEndpoint!=sender_endpoint_";
}
do_send(bytes_recvd);
}
else{
do_receive();
}
});
}
void do_send(std::size_t length){
socket_.async_send_to(
boost::asio::buffer(data_, length),
sender_endpoint_,
[this](boost::system::error_code /*ec*/, std::size_t /*bytes_sent*/){
do_receive();
});
}
private:
boost::asio::ip::udp::socket socket_;
UdpEndpoint sender_endpoint_;
enum { max_length = 1<<10 };
char data_[max_length];
}; |
Schumer: Senate would not override a Keystone veto
A top Senate Democrat predicted on Sunday that Republicans will not attract enough Democratic votes to override any veto by President Barack Obama of legislation to approve the Keystone XL pipeline.
The pipeline is expected to be one of the early tests for the new Republican-controlled Congress. Democrats plan to introduce several amendments to the bill, including a proposal that would require that the oil sent through the $8 billion project be used in America and not exported to other countries, Sen. Chuck Schumer (D-N.Y.) said on CBS’ “Face the Nation.”
Story Continued Below
“We’re going to say that the oil should stay here,” he said.
Other proposals Democrats plan to offer would require steel used for the pipeline to be manufactured in the U.S. and provide investments in clean energy as well.
But even if those provisions are adopted, Schumer was skeptical the bill would garner the two-thirds Senate majority needed to override a veto.
“These amendments will make it better, but certainly not good enough at this time,” Schumer said. “I think there will be enough Democratic votes to sustain the president’s veto.”
Republicans are pitching the bill as a plan to create more jobs. And the president has not said whether he would veto such a measure but has raised concerns the project would be more beneficial to Canadian oil companies than American workers. Experts picked from both sides are expected to weigh in when the Senate Energy and Natural Resources Committee holds a hearing on the issue this week. |
def forward_pass_on_convolutions(self, x):
conv_output = None
if self.type == 0:
x = self.model.module.main.features(x)
x = F.relu(x)
elif self.type == 1:
x = self.model.main.features(x)
elif self.type == 3:
x = self.model.module.main.features(x)
else:
x1 = self.model.abn_fe.main.features(x)
x2 = self.model.tb_fe.main.features(x)
x = torch.cat((x1, x2), dim=1)
x.register_hook(self.save_gradient)
conv_output = x
return conv_output, x |
/*
Copyright 2016 kanreisa
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import { Writable } from "stream";
import EventEmitter = require("eventemitter3");
import { TsStreamLite, TsCrc32, TsChar, TsLogo, tsDataModule } from "@chinachu/aribts";
import { StreamInfo, getTimeFromMJD } from "./common";
import * as log from "./log";
import EPG from "./EPG";
import status from "./status";
import _ from "./_";
import { getProgramItemId } from "./Program";
import Service from "./Service";
import ServiceItem from "./ServiceItem";
interface TSFilterOptions {
readonly output?: Writable;
readonly networkId?: number;
readonly serviceId?: number;
readonly eventId?: number;
readonly parseNIT?: boolean;
readonly parseSDT?: boolean;
readonly parseEIT?: boolean;
readonly tsmfRelTs?: number;
}
const PACKET_SIZE = 188;
const PROVIDE_PIDS = [
0x0000, // PAT
0x0001, // CAT
0x0010, // NIT
0x0011, // SDT
0x0012, // EIT
0x0013, // RST
0x0014, // TDT
0x0023, // SDTT
0x0024, // BIT
0x0028, // SDTT
0x0029 // CDT
];
const DSMCC_BLOCK_SIZE = 4066; // ARIB TR-B15
const LOGO_DATA_NAME_BS = Buffer.from("LOGO-05"); // ARIB STD-B21, ARIB TR-B15
const LOGO_DATA_NAME_CS = Buffer.from("CS_LOGO-05"); // ARIB STD-B21, ARIB TR-B15
interface BasicExtState {
basic: {
flags: FlagState[];
lastFlagsId: number;
};
extended: {
flags: FlagState[];
lastFlagsId: number;
};
}
interface FlagState {
flag: Buffer;
ignore: Buffer;
version_number: number;
}
interface DownloadData {
downloadId: number;
// blockSize: number; // 4066
moduleId: number;
moduleVersion: number;
moduleSize: number;
loadedBytes: number;
data?: Buffer;
}
export default class TSFilter extends EventEmitter {
streamInfo: StreamInfo = {};
// output
private _output: Writable;
// options
private _provideServiceId: number;
private _provideEventId: number;
private _parseNIT = false;
private _parseSDT = false;
private _parseEIT = false;
private _targetNetworkId: number;
private _enableParseCDT = false;
private _enableParseDSMCC = false;
// tsmf
private _tsmfEnableTsmfSplit = false;
private _tsmfSlotCounter = -1;
private _tsmfRelativeStreamNumber: number[] = [];
private _tsmfTsNumber = 0;
// aribts
private _parser = new TsStreamLite();
// epg
private _epg: EPG;
private _epgReady = false;
private _epgState: { [networkId: number]: { [serviceId: number]: BasicExtState } } = {};
// buffer
private _packet = Buffer.allocUnsafeSlow(PACKET_SIZE).fill(0);
private _offset = -1;
private _buffer: Buffer[] = [];
private _patsec = Buffer.allocUnsafeSlow(PACKET_SIZE - 4 - 1).fill(0); // TS header, pointer_field
private _patCRC = Buffer.allocUnsafeSlow(4).fill(0);
// state
private _closed = false;
private _ready = true;
private _providePids: Set<number> = null; // `null` to provides all
private _parsePids = new Set<number>();
private _tsid = -1;
private _serviceIds = new Set<number>();
private _parseServiceIds = new Set<number>();
private _pmtPid = -1;
private _pmtTimer: NodeJS.Timer;
private _streamTime: number = null;
private _essMap = new Map<number, number>(); // <serviceId, pid>
private _essEsPids = new Set<number>();
private _dlDataMap = new Map<number, DownloadData>();
private _logoDataTimer: NodeJS.Timer;
private _provideEventLastDetectedAt = -1;
private _provideEventTimeout: NodeJS.Timer = null;
/** Number divisible by a multiple of 188 */
private _maxBufferBytesBeforeReady: number = (() => {
let bytes = _.config.server.maxBufferBytesBeforeReady || 1024 * 1024 * 8;
bytes = bytes - bytes % PACKET_SIZE;
return Math.max(bytes, PACKET_SIZE);
})();
private _eventEndTimeout = _.config.server.eventEndTimeout || 1000;
constructor(options: TSFilterOptions) {
super();
const enabletsmf = options.tsmfRelTs || 0;
if (enabletsmf !== 0) {
this._tsmfEnableTsmfSplit = true;
this._tsmfTsNumber = options.tsmfRelTs;
}
this._targetNetworkId = options.networkId || null;
this._provideServiceId = options.serviceId || null;
this._provideEventId = options.eventId || null;
if (this._provideServiceId !== null) {
this._providePids = new Set(PROVIDE_PIDS);
this._ready = false;
}
if (this._provideEventId !== null) {
this._ready = false;
const program = _.program.get(
getProgramItemId(
this._targetNetworkId,
this._provideServiceId,
this._provideEventId
)
);
if (program) {
let timeout = program.startAt + program.duration - Date.now();
if (program.duration === 1) {
timeout += 1000 * 60 * 3;
}
if (timeout < 0) {
timeout = 1000 * 60 * 3;
}
this._provideEventTimeout = setTimeout(() => this._observeProvideEvent(), timeout);
}
}
if (options.output) {
this._output = options.output;
this._output.once("finish", this._close.bind(this));
this._output.once("close", this._close.bind(this));
} else {
this._provideServiceId = null;
this._provideEventId = null;
this._providePids = new Set();
this._ready = false;
}
if (options.parseNIT === true) {
this._parseNIT = true;
}
if (options.parseSDT === true) {
this._parseSDT = true;
}
if (options.parseEIT === true) {
this._parseEIT = true;
}
if (this._targetNetworkId) {
if (this._targetNetworkId === 4) { // ARIB TR-B15 (BS/CS)
this._enableParseDSMCC = true;
} else {
this._enableParseCDT = true;
}
}
this._parser.on("pat", this._onPAT.bind(this));
this._parser.on("pmt", this._onPMT.bind(this));
this._parser.on("nit", this._onNIT.bind(this));
this._parser.on("sdt", this._onSDT.bind(this));
this._parser.on("eit", this._onEIT.bind(this));
this._parser.on("tot", this._onTOT.bind(this));
this.once("end", this._close.bind(this));
this.once("close", this._close.bind(this));
log.info("TSFilter: created (serviceId=%d, eventId=%d)", this._provideServiceId, this._provideEventId);
if (this._ready === false) {
log.info("TSFilter: waiting for serviceId=%d, eventId=%d", this._provideServiceId, this._provideEventId);
}
++status.streamCount.tsFilter;
}
get closed(): boolean {
return this._closed;
}
write(chunk: Buffer): void {
if (this._closed) {
throw new Error("TSFilter has closed already");
}
let offset = 0;
const length = chunk.length;
const packets: Buffer[] = [];
if (this._offset > 0) {
if (length >= PACKET_SIZE - this._offset) {
offset = PACKET_SIZE - this._offset;
packets.push(Buffer.concat([
this._packet.slice(0, this._offset),
chunk.slice(0, offset)
]));
this._offset = 0;
} else {
chunk.copy(this._packet, this._offset);
this._offset += length;
// chunk drained
return;
}
}
for (; offset < length; offset += PACKET_SIZE) {
// sync byte (0x47) verifying
if (chunk[offset] !== 71) {
offset -= PACKET_SIZE - 1;
continue;
}
if (length - offset >= PACKET_SIZE) {
packets.push(chunk.slice(offset, offset + PACKET_SIZE));
} else {
chunk.copy(this._packet, 0, offset);
this._offset = length - offset;
}
}
this._processPackets(packets);
if (this._buffer.length !== 0) {
if (this._ready && this._output.writableLength < this._output.writableHighWaterMark) {
this._output.write(Buffer.concat(this._buffer));
this._buffer.length = 0;
} else {
const head = this._buffer.length - (this._maxBufferBytesBeforeReady / PACKET_SIZE);
if (head > 0) {
this._buffer.splice(0, head);
}
}
}
}
end(): void {
this._close();
}
close(): void {
this._close();
}
private _processPackets(packets: Buffer[]): void {
const parsingBuffers: Buffer[] = [];
for (let packet of packets) {
const pid = packet.readUInt16BE(1) & 0x1FFF;
// tsmf
if (this._tsmfEnableTsmfSplit) {
if (pid === 0x002F) {
const tsmfFlameSync = packet.readUInt16BE(4) & 0x1FFF;
if (tsmfFlameSync !== 0x1A86 && tsmfFlameSync !== 0x0579) {
continue;
}
this._tsmfRelativeStreamNumber = [];
for (let i = 0; i < 26; i++) {
this._tsmfRelativeStreamNumber.push((packet[73 + i] & 0xf0) >> 4);
this._tsmfRelativeStreamNumber.push(packet[73 + i] & 0x0f);
}
this._tsmfSlotCounter = 0;
continue;
}
if (this._tsmfSlotCounter < 0 || this._tsmfSlotCounter > 51) {
continue;
}
this._tsmfSlotCounter++;
if (this._tsmfRelativeStreamNumber[this._tsmfSlotCounter - 1] !== this._tsmfTsNumber) {
continue;
}
}
// NULL
if (pid === 0x1FFF) {
continue;
}
// transport_error_indicator
if ((packet[1] & 0x80) >> 7 === 1) {
if (this.streamInfo[pid]) {
++this.streamInfo[pid].drop;
}
continue;
}
// parse
if (pid === 0) {
const targetStart = packet[7] + 4;
if (targetStart + 4 > 188) {
// out of range. this packet is broken.
if (this.streamInfo[pid]) {
++this.streamInfo[pid].drop;
}
continue; // drop
}
if (this._patCRC.compare(packet, targetStart, targetStart + 4) !== 0) {
packet.copy(this._patCRC, 0, targetStart, targetStart + 4);
parsingBuffers.push(packet);
}
} else if (
(pid === 0x12 && (this._parseEIT || this._provideEventId !== null)) ||
pid === 0x14 ||
this._parsePids.has(pid)
) {
parsingBuffers.push(packet);
}
if (this._ready === false && (pid === 0x12 || this._provideEventId === null)) {
continue;
}
if (this._providePids !== null && this._providePids.has(pid) === false) {
continue;
}
// PAT (0) rewriting
if (pid === 0 && this._pmtPid !== -1) {
packet = Buffer.from(packet);
this._patsec.copy(packet, 5, 0);
}
// packet counter
if (this.streamInfo[pid] === undefined) {
this.streamInfo[pid] = {
packet: 0,
drop: 0
};
}
++this.streamInfo[pid].packet;
this._buffer.push(packet);
}
if (parsingBuffers.length !== 0) {
setImmediate(() => {
if (this._closed) { return; }
this._parser.write(parsingBuffers);
parsingBuffers.length = 0;
});
}
}
private _onPAT(pid: number, data: any): void {
this._tsid = data.transport_stream_id;
this._serviceIds = new Set();
this._parseServiceIds = new Set();
for (const program of data.programs) {
const serviceId = program.program_number as number;
if (serviceId === 0) {
const NIT_PID = program.network_PID;
log.debug("TSFilter#_onPAT: detected NIT PID=%d", NIT_PID);
if (this._parseNIT) {
this._parsePids.add(NIT_PID);
}
continue;
}
// detect ESS PMT PID
if (
// for future use
// (this._targetNetworkId !== 4 && serviceId >= 0xFFF0 && serviceId <= 0xFFF5) || // ARIB TR-B14 (GR)
(this._targetNetworkId === 4 && serviceId === 929) // ARIB TR-B15 (BS/CS)
) {
const essPmtPid = program.program_map_PID;
this._essMap.set(serviceId, essPmtPid);
log.debug("TSFilter#_onPAT: detected ESS PMT PID=%d as serviceId=%d", essPmtPid, serviceId);
continue;
}
this._serviceIds.add(serviceId);
const item = this._targetNetworkId === null ? null : _.service.get(this._targetNetworkId, serviceId);
log.debug(
"TSFilter#_onPAT: detected PMT PID=%d as serviceId=%d (%s)",
program.program_map_PID, serviceId, item ? item.name : "unregistered"
);
// detect PMT PID by specific service id
if (serviceId === this._provideServiceId) {
if (this._pmtPid !== program.program_map_PID) {
this._pmtPid = program.program_map_PID;
if (this._providePids.has(this._pmtPid) === false) {
this._providePids.add(this._pmtPid);
}
if (this._parsePids.has(this._pmtPid) === false) {
this._parsePids.add(this._pmtPid);
}
// edit PAT section
data._raw.copy(this._patsec, 0, 0, 8);
// section_length
this._patsec[2] = 17; // 0x11
// network_number = 0
this._patsec[8] = 0;
this._patsec[9] = 0;
// network_PID
this._patsec[10] = 224;
this._patsec[11] = 16;
// program_number
this._patsec[12] = serviceId >> 8;
this._patsec[13] = serviceId & 255;
// program_map_PID
this._patsec[14] = (this._pmtPid >> 8) + 224;
this._patsec[15] = this._pmtPid & 255;
// calculate CRC32
this._patsec.writeInt32BE(TsCrc32.calc(this._patsec.slice(0, 16)), 16);
// padding
this._patsec.fill(0xff, 20);
}
}
if (this._parseEIT && item) {
for (const service of _.service.findByNetworkId(this._targetNetworkId)) {
if (this._parseServiceIds.has(service.serviceId) === false) {
this._parseServiceIds.add(service.serviceId);
log.debug("TSFilter#_onPAT: parsing serviceId=%d (%s)", service.serviceId, service.name);
}
}
}
}
if (this._parseSDT) {
if (this._parsePids.has(0x11) === false) {
this._parsePids.add(0x11);
}
}
}
private _onPMT(pid: number, data: any): void {
if (this._essMap.has(data.program_number)) {
for (const stream of data.streams) {
for (const descriptor of stream.ES_info) {
if (descriptor.descriptor_tag === 0x52) { // stream identifier descriptor
if (
descriptor.component_tag === 0x79 || // ARIB TR-B15 (BS)
descriptor.component_tag === 0x7A // ...? (CS)
) {
this._parsePids.add(stream.elementary_PID);
this._essEsPids.add(stream.elementary_PID);
log.debug("TSFilter#_onPMT: detected ESS ES PID=%d", stream.elementary_PID);
break;
}
}
}
}
this._parsePids.delete(pid);
return;
}
if (this._ready === false && this._provideServiceId !== null && this._provideEventId === null) {
this._ready = true;
log.info("TSFilter#_onPMT: now ready for serviceId=%d", this._provideServiceId);
}
if (data.program_info[0]) {
this._providePids.add(data.program_info[0].CA_PID);
}
this._providePids.add(data.PCR_PID);
for (const stream of data.streams) {
this._providePids.add(stream.elementary_PID);
}
// sleep
if (this._parsePids.has(pid)) {
this._parsePids.delete(pid);
this._pmtTimer = setTimeout(() => {
this._parsePids.add(pid);
}, 1000);
}
}
private _onNIT(pid: number, data: any): void {
const _network = {
networkId: data.network_id,
areaCode: -1,
remoteControlKeyId: -1
};
if (data.transport_streams[0]) {
for (const desc of data.transport_streams[0].transport_descriptors) {
switch (desc.descriptor_tag) {
case 0xFA:
_network.areaCode = desc.area_code;
break;
case 0xCD:
_network.remoteControlKeyId = desc.remote_control_key_id;
break;
}
}
}
this.emit("network", _network);
if (this._parsePids.has(pid)) {
this._parsePids.delete(pid);
}
}
private _onSDT(pid: number, data: any): void {
if (this._tsid !== data.transport_stream_id) {
return;
}
const _services = [];
for (const service of data.services) {
if (this._serviceIds.has(service.service_id) === false) {
continue;
}
let name = "";
let type = -1;
let logoId = -1;
const m = service.descriptors.length;
for (let j = 0; j < m; j++) {
if (service.descriptors[j].descriptor_tag === 0x48) {
name = new TsChar(service.descriptors[j].service_name_char).decode();
type = service.descriptors[j].service_type;
}
if (service.descriptors[j].descriptor_tag === 0xCF) {
logoId = service.descriptors[j].logo_id;
}
if (name !== "" && logoId !== -1) {
break;
}
}
if (_services.some(_service => _service.id === service.service_id) === false) {
_services.push({
networkId: data.original_network_id,
serviceId: service.service_id,
name: name,
type: type,
logoId: logoId
});
}
}
this.emit("services", _services);
if (this._parsePids.has(pid)) {
this._parsePids.delete(pid);
}
}
private _onEIT(pid: number, data: any): void {
// detect current event
if (
this._pmtPid !== -1 &&
data.events.length !== 0 &&
this._provideEventId !== null && data.table_id === 0x4E && data.section_number === 0 &&
this._provideServiceId === data.service_id
) {
if (data.events[0].event_id === this._provideEventId) {
this._provideEventLastDetectedAt = Date.now();
if (this._ready === false) {
this._ready = true;
log.info("TSFilter#_onEIT: now ready for eventId=%d", this._provideEventId);
}
} else {
if (this._ready) {
log.info("TSFilter#_onEIT: closing because eventId=%d has ended...", this._provideEventId);
const eventId = this._provideEventId;
this._provideEventId = null;
setTimeout(() => {
this._ready = false;
this._provideEventId = eventId;
this._close();
}, this._eventEndTimeout);
}
}
}
// write EPG stream and store result
if (
this._parseEIT &&
this._parseServiceIds.has(data.service_id)
) {
if (!this._epg && status.epg[this._targetNetworkId] !== true) {
status.epg[this._targetNetworkId] = true;
this._epg = new EPG();
// Logo
this._standbyLogoData();
}
if (this._epg) {
this._epg.write(data);
if (!this._epgReady && data.table_id !== 0x4E && data.table_id !== 0x4F) {
this._updateEpgState(data);
}
}
}
}
private _onTOT(pid: number, data: any): void {
this._streamTime = getTimeFromMJD(data.JST_time);
}
private _onCDT(pid: number, data: any): void {
if (data.data_type === 0x01) {
// Logo
const dataModule = new tsDataModule.TsDataModuleCdtLogo(data.data_module_byte).decode();
if (dataModule.logo_type !== 0x05) {
return;
}
log.debug("TSFilter#_onCDT: received logo data (networkId=%d, logoId=%d)", data.original_network_id, dataModule.logo_id);
const logoData = TsLogo.decode(dataModule.data_byte);
Service.saveLogoData(data.original_network_id, dataModule.logo_id, logoData);
}
}
private _onDSMCC(pid: number, data: any): void {
if (data.table_id === 0x3C) {
// DDB - Download Data Block (frequently than DII)
const ddb = data.message;
const downloadId: number = ddb.downloadId;
const moduleId: number = ddb.moduleId;
const dl = this._dlDataMap.get(downloadId);
if (!dl || dl.moduleId !== moduleId || !dl.data) {
return;
}
const moduleVersion: number = ddb.moduleVersion;
if (dl.moduleVersion !== moduleVersion) {
this._dlDataMap.delete(downloadId);
return;
}
const blockNumber: number = ddb.blockNumber;
const blockDataByte: Buffer = ddb.blockDataByte;
blockDataByte.copy(dl.data, DSMCC_BLOCK_SIZE * blockNumber);
dl.loadedBytes += blockDataByte.length;
log.debug("TSFilter#_onDSMCC: detected DDB and logo data downloading... (downloadId=%d, %d/%d bytes)", downloadId, dl.loadedBytes, dl.moduleSize);
if (dl.loadedBytes !== dl.moduleSize) {
return;
}
const dlData = dl.data;
delete dl.data;
const dataModule = new tsDataModule.TsDataModuleLogo(dlData).decode();
for (const logo of dataModule.logos) {
for (const logoService of logo.services) {
const service = _.service.get(logoService.original_network_id, logoService.service_id);
if (!service) {
continue;
}
service.logoId = logo.logo_id;
log.debug("TSFilter#_onDSMCC: received logo data (networkId=%d, logoId=%d)", service.networkId, service.logoId);
const logoData = new TsLogo(logo.data_byte).decode(); // png
Service.saveLogoData(service.networkId, service.logoId, logoData);
break;
}
}
} else if (data.table_id === 0x3B) {
// DII - Download Info Indication
const dii = data.message;
if (this._dlDataMap.has(dii.downloadId)) {
return;
}
for (const module of dii.modules) {
for (const descriptor of module.moduleInfo) {
// name
if (descriptor.descriptor_tag !== 0x02) {
continue;
}
// find LOGO-05 or CS_LOGO-05
if (
!LOGO_DATA_NAME_BS.equals(descriptor.text_char) &&
!LOGO_DATA_NAME_CS.equals(descriptor.text_char)
) {
continue;
}
this._dlDataMap.set(dii.downloadId, {
downloadId: dii.downloadId,
// blockSize: dii.blockSize, // 4066
moduleId: module.moduleId,
moduleVersion: module.moduleVersion,
moduleSize: module.moduleSize,
loadedBytes: 0,
data: Buffer.allocUnsafeSlow(module.moduleSize).fill(0)
});
log.debug("TSFilter#_onDSMCC: detected DII and buffer allocated for logo data (downloadId=%d, %d bytes)", dii.downloadId, module.moduleSize);
break;
}
}
}
}
private _observeProvideEvent(): void {
// note: EIT p/f interval is max 3s. (ARIB TR-B15)
if (Date.now() - this._provideEventLastDetectedAt < 10000) {
this._provideEventTimeout = setTimeout(
() => this._observeProvideEvent(),
3000
);
return;
}
log.warn("TSFilter#_observeProvideEvent: closing because EIT p/f timed out for eventId=%d...", this._provideEventId);
this._close();
}
private async _standbyLogoData(): Promise<void> {
if (this._closed) {
return;
}
if (this._logoDataTimer) {
return;
}
if (this._enableParseDSMCC && this._essMap.size === 0) {
return;
}
// target service(s)
const targetServices: ServiceItem[] = [];
if (this._provideServiceId === null) {
targetServices.push(..._.service.findByNetworkId(this._targetNetworkId));
} else if (this._enableParseCDT) {
targetServices.push(_.service.get(this._targetNetworkId, this._provideServiceId));
} else if (this._enableParseDSMCC && this._targetNetworkId === 4) {
targetServices.push(
..._.service.findByNetworkId(4),
..._.service.findByNetworkId(6),
..._.service.findByNetworkId(7)
);
}
const logoIdNetworkMap: { [networkId: number]: Set<number> } = {};
for (const service of targetServices) {
if (typeof service.logoId === "number") {
if (!logoIdNetworkMap[service.networkId]) {
logoIdNetworkMap[service.networkId] = new Set();
}
logoIdNetworkMap[service.networkId].add(service.logoId);
}
}
const now = Date.now();
const logoDataInterval = _.config.server.logoDataInterval || 1000 * 60 * 60 * 24 * 7; // 7 days
for (const networkId in logoIdNetworkMap) {
for (const logoId of logoIdNetworkMap[networkId]) {
if (logoId === -1 && logoIdNetworkMap[networkId].size > 1) {
continue;
}
// check logoDataInterval
if (now - await Service.getLogoDataMTime(this._targetNetworkId, logoId) > logoDataInterval) {
if (this._closed) {
return; // break all loops
}
if (this._enableParseCDT) {
// for GR
if (logoId >= 0) {
this._parsePids.add(0x29); // CDT PID
}
// add listener
this._parser.on("cdt", this._onCDT.bind(this));
// add timer
this._logoDataTimer = setTimeout(() => {
this._parsePids.delete(0x29); // CDT
this._parser.removeAllListeners("cdt");
log.info("TSFilter#_standbyLogoData: stopped waiting for logo data (networkId=%d, logoId=%d)", this._targetNetworkId, logoId);
}, 1000 * 60 * 30); // 30 mins
log.info("TSFilter#_standbyLogoData: waiting for logo data for 30 minutes... (networkId=%d, logoId=%d)", this._targetNetworkId, logoId);
} else if (this._enableParseDSMCC) {
// for BS/CS
for (const essPmtPid of this._essMap.values()) {
this._parsePids.add(essPmtPid); // ESS PMT PID
}
// add listener
this._parser.on("dsmcc", this._onDSMCC.bind(this));
// add timer
this._logoDataTimer = setTimeout(() => {
delete this._logoDataTimer;
for (const essEsPid of this._essEsPids.values()) {
this._parsePids.delete(essEsPid);
}
this._parser.removeAllListeners("dsmcc");
log.info("TSFilter#_standbyLogoData: stopped waiting for logo data (networkId=[4,6,7])");
}, 1000 * 60 * 30); // 30 mins
log.info("TSFilter#_standbyLogoData: waiting for logo data for 30 minutes... (networkId=[4,6,7])");
}
return; // break all loops
}
}
}
}
private _updateEpgState(data: any): void {
const networkId = data.original_network_id;
const serviceId = data.service_id;
const versionNumber = data.version_number;
const stateByNet = this._epgState[networkId] || (this._epgState[networkId] = {});
let stateBySrv = stateByNet[serviceId];
if (!stateByNet[serviceId]) {
stateBySrv = stateByNet[serviceId] = {
basic: {
flags: [],
lastFlagsId: -1
},
extended: {
flags: [],
lastFlagsId: -1
}
};
for (let i = 0; i < 0x08; i++) {
for (const target of [stateBySrv.basic, stateBySrv.extended]) {
target.flags.push({
flag: Buffer.allocUnsafeSlow(32).fill(0x00),
ignore: Buffer.allocUnsafeSlow(32).fill(0xFF),
version_number: -1
});
}
}
}
const flagsId = data.table_id & 0x07;
const lastFlagsId = data.last_table_id & 0x07;
const segmentNumber = data.section_number >> 3;
const lastSegmentNumber = data.last_section_number >> 3;
const sectionNumber = data.section_number & 0x07;
const segmentLastSectionNumber = data.segment_last_section_number & 0x07;
const targetFlags = (data.table_id & 0x0F) < 0x08 ? stateBySrv.basic : stateBySrv.extended;
const targetFlag = targetFlags.flags[flagsId];
if ((targetFlags.lastFlagsId !== lastFlagsId) ||
(targetFlag.version_number !== -1 && targetFlag.version_number !== versionNumber)) {
// version check
if (targetFlag.version_number !== -1) {
const verDiff = versionNumber - targetFlag.version_number;
if (verDiff === -1 || verDiff > 1) {
return;
}
}
// reset fields
for (let i = 0; i < 0x08; i++) {
targetFlags.flags[i].flag.fill(0x00);
targetFlags.flags[i].ignore.fill(i <= lastFlagsId ? 0x00 : 0xFF);
}
}
// update ignore field (past segment)
if (flagsId === 0 && this._streamTime !== null) {
const segment = (this._streamTime + 9 * 60 * 60 * 1000) / (3 * 60 * 60 * 1000) & 0x07;
for (let i = 0; i < segment; i++) {
targetFlag.ignore[i] = 0xFF;
}
}
// update ignore field (segment)
for (let i = lastSegmentNumber + 1; i < 0x20 ; i++) {
targetFlag.ignore[i] = 0xFF;
}
// update ignore field (section)
for (let i = segmentLastSectionNumber + 1; i < 8; i++) {
targetFlag.ignore[segmentNumber] |= 1 << i;
}
// update flag field
targetFlag.flag[segmentNumber] |= 1 << sectionNumber;
// update last_table_id & version_number
targetFlags.lastFlagsId = lastFlagsId;
targetFlag.version_number = versionNumber;
let ready = true;
isReady: for (const nid in this._epgState) {
for (const sid in this._epgState[nid]) {
for (const table of this._epgState[nid][sid].basic.flags.concat(this._epgState[nid][sid].extended.flags)) {
for (let i = 0; i < table.flag.length; i++) {
if ((table.flag[i] | table.ignore[i]) !== 0xFF) {
ready = false;
break isReady;
}
}
}
}
}
if (ready === true) {
this._epgReady = true;
this._clearEpgState();
for (const service of _.service.findByNetworkId(this._targetNetworkId)) {
service.epgReady = true;
}
process.nextTick(() => this.emit("epgReady"));
}
}
private _clearEpgState() {
if (!this._epgState) {
return;
}
for (const nid in this._epgState) {
delete this._epgState[nid];
}
}
private _close(): void {
if (this._closed) {
return;
}
this._closed = true;
// clear timer
clearTimeout(this._pmtTimer);
clearTimeout(this._provideEventTimeout);
clearTimeout(this._logoDataTimer);
// clear buffer
setImmediate(() => {
delete this._packet;
delete this._buffer;
delete this._patsec;
delete this._patCRC;
});
// clear parser instance
this._parser.removeAllListeners();
this._parser.end();
delete this._parser;
// clear EPG instance & state
if (this._epg) {
this._epg.end();
delete this._epg;
status.epg[this._targetNetworkId] = false; // update status
if (this._epgReady === true) {
const now = Date.now();
for (const service of _.service.findByNetworkId(this._targetNetworkId)) {
service.epgUpdatedAt = now;
}
}
this._clearEpgState();
delete this._epgState;
}
// clear output stream
if (this._output) {
if (this._output.writableEnded === false) {
this._output.end();
}
this._output.removeAllListeners();
delete this._output;
}
// clear streamInfo
delete this.streamInfo;
--status.streamCount.tsFilter;
log.info("TSFilter#_close: closed (serviceId=%s, eventId=%s)", this._provideServiceId, this._provideEventId);
// close
this.emit("close");
this.emit("end");
}
}
|
<gh_stars>0
import { LoginComponent } from './components/user-login/login.component';
import { PasswordResetComponent } from './components/password-reset/password-reset.component';
import { ForgetPasswordComponent } from './components/forget-password/forget-password.component';
import { InputTextModule, TooltipModule, ButtonModule, ProgressBarModule, DialogModule } from 'primeng/primeng';
import { LoginService } from './services/login.service';
import { ResetPasswordLinkResolver } from './resolvers/reset-password-link.resolver';
import {MessageModule} from 'primeng/message';
// Component declarations
export const COMPONENT_DECLARATIONS: any = [
LoginComponent,
PasswordResetComponent,
ForgetPasswordComponent
];
export const SERVICE_DECLARATIONS: any = [
LoginService,
ResetPasswordLinkResolver
];
// PrimeNg imports
export const PRIMENG_IMPORTS: any = [
InputTextModule,
TooltipModule,
ButtonModule, ProgressBarModule, MessageModule,
DialogModule
];
|
def client(api_client):
return api_client(disable_retry_status_list={503, 404}) |
/// Converters: build symbols and expressions from Builtins
impl BuiltinFn {
pub fn as_symbol(&self) -> Symbol {
Symbol::builtin_function(&self.to_string(), self.param_types(), self.return_type())
}
pub fn as_expr(&self) -> Expr {
self.as_symbol().to_expr()
}
pub fn call(&self, arguments: Vec<Expr>, loc: Location) -> Expr {
self.as_expr().with_location(loc.clone()).call(arguments).with_location(loc)
}
} |
package com.xiaoxin.notes.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.xiaoxin.notes.entity.AdpicEntity;
import org.apache.ibatis.annotations.Mapper;
/**
*
*
* @date 2021-01-13 14:01:44
*/
@Mapper
public interface AdpicDao extends BaseMapper<AdpicEntity> {
}
|
class SMPPParameters:
"""PDU generating class"""
# A list of mandatory parameters and their initial values:
system_id=""
password=""
system_type="ESME"
service_type=""
interface_version=0x34
address=SMPPAddress("")
source=SMPPAddress("")
destination=SMPPAddress("")
esm_class=0
protocol_id=0
priority_flag=0
schedule_delivery_time=0
validity_period=0
registered_delivery=0
replace_if_present_flag=0
data_coding=0
sm_default_msg_id=1
sm_length=0
short_message=""
message_id=""
number_of_dests=0
dest_flag=0
no_unsuccess=0
dl_name=""
message_state=0x07
# A list of tag codes for the optional parameters:
OPT_DEST_ADDR_SUBUNIT = 0x0005
OPT_DEST_NETWORK_TYPE = 0x0006
OPT_DEST_BEARER_TYPE = 0x0007
OPT_DEST_TELEMATICS_ID = 0x0008
OPT_SOURCE_ADDR_SUBUNIT = 0x000D
OPT_SOURCE_NETWORK_TYPE = 0x000E
OPT_SOURCE_BEARER_TYPE = 0x000F
OPT_SOURCE_TELEMATICS_ID = 0x0010
OPT_QOS_TIME_TO_LIVE = 0x0017
OPT_PAYLOAD_TYPE = 0x0019
OPT_ADDITIONAL_STATUS_INFO_TEXT = 0x001D
OPT_RECEIPTED_MESSAGE_ID = 0x001E
OPT_MS_MSG_WAIT_FACILITIES = 0x0030
OPT_PRIVACY_INDICATOR = 0x0201
OPT_SOURCE_SUBADDRESS = 0x0202
OPT_DEST_SUBADDRESS = 0x0203
OPT_USER_MESSAGE_REFERENCE = 0x0204
OPT_USER_RESPONSE_CODE = 0x0205
OPT_SOURCE_PORT = 0x020A
OPT_DESTINATION_PORT = 0x020B
OPT_SAR_MSG_REF_NUM = 0x020C
OPT_LANGUAGE_INDICATOR = 0x020D
OPT_SAR_TOTAL_SEGMENTS = 0x020E
OPT_SAR_SEGMENT_SEQNUM = 0x020F
OPT_SC_INTERFACE_VERSION = 0x0210
OPT_CALLBACK_NUM_PRES_IND = 0x0302
OPT_CALLBACK_NUM_ATAG = 0x0303
OPT_NUMBER_OF_MESSAGES = 0x0304
OPT_CALLBACK_NUM = 0x0381
OPT_DPF_RESULT = 0x0420
OPT_SET_DPF = 0x0421
OPT_MS_AVAILABILITY_STATUS = 0x0422
OPT_NETWORK_ERROR_CODE = 0x0423
OPT_MESSAGE_PAYLOAD = 0x0424
OPT_DELIVERY_FAILURE_REASON = 0x0425
OPT_MORE_MESSAGES_TO_SEND = 0x0426
OPT_MESSAGE_STATE = 0x0427
OPT_USSD_SERVICE_OP = 0x0501
OPT_DISPLAY_TIME = 0x1201
OPT_SMS_SIGNAL = 0x1203
OPT_MS_VALIDITY = 0x1204
OPT_ALERT_ON_MESSAGE_DELIVERY = 0x130C
OPT_ITS_REPLY_TYPE = 0x1380
OPT_ITS_SESSION_INFO = 0x1383
# This dict is here to turn object variable names in **kwargs into codes
_optional_tag = {
'dest_addr_subunit': OPT_DEST_ADDR_SUBUNIT,
'dest_network_type': OPT_DEST_NETWORK_TYPE,
'dest_bearer_type': OPT_DEST_BEARER_TYPE,
'dest_telematics_id': OPT_DEST_TELEMATICS_ID,
'source_addr_subunit': OPT_SOURCE_ADDR_SUBUNIT,
'source_network_type': OPT_SOURCE_NETWORK_TYPE,
'source_bearer_type': OPT_SOURCE_BEARER_TYPE,
'source_telematics_id': OPT_SOURCE_TELEMATICS_ID,
'qos_time_to_live': OPT_QOS_TIME_TO_LIVE,
'payload_type': OPT_PAYLOAD_TYPE,
'additional_status_info_text': OPT_ADDITIONAL_STATUS_INFO_TEXT,
'receipted_message_id': OPT_RECEIPTED_MESSAGE_ID,
'ms_msg_wait_facilities': OPT_MS_MSG_WAIT_FACILITIES,
'privacy_indicator': OPT_PRIVACY_INDICATOR,
'source_subaddress': OPT_SOURCE_SUBADDRESS,
'dest_subaddress': OPT_DEST_SUBADDRESS,
'user_message_reference': OPT_USER_MESSAGE_REFERENCE,
'user_response_code': OPT_USER_RESPONSE_CODE,
'source_port': OPT_SOURCE_PORT,
'destination_port': OPT_DESTINATION_PORT,
'sar_msg_ref_num': OPT_SAR_MSG_REF_NUM,
'language_indicator': OPT_LANGUAGE_INDICATOR,
'sar_total_segments': OPT_SAR_TOTAL_SEGMENTS,
'sar_segment_seqnum': OPT_SAR_SEGMENT_SEQNUM,
'SC_interface_version': OPT_SC_INTERFACE_VERSION,
'callback_num_pres_ind': OPT_CALLBACK_NUM_PRES_IND,
'callback_num_atag': OPT_CALLBACK_NUM_ATAG,
'number_of_messages': OPT_NUMBER_OF_MESSAGES,
'callback_num': OPT_CALLBACK_NUM,
'dpf_result': OPT_DPF_RESULT,
'set_dpf': OPT_SET_DPF,
'ms_availability_status': OPT_MS_AVAILABILITY_STATUS,
'network_error_code': OPT_NETWORK_ERROR_CODE,
'message_payload': OPT_MESSAGE_PAYLOAD,
'delivery_failure_reason': OPT_DELIVERY_FAILURE_REASON,
'more_messages_to_send': OPT_MORE_MESSAGES_TO_SEND,
'message_state': OPT_MESSAGE_STATE,
'ussd_service_op': OPT_USSD_SERVICE_OP,
'display_time': OPT_DISPLAY_TIME,
'sms_signal': OPT_SMS_SIGNAL,
'ms_validity': OPT_MS_VALIDITY,
'alert_on_message_delivery': OPT_ALERT_ON_MESSAGE_DELIVERY,
'its_reply_type': OPT_ITS_REPLY_TYPE,
'its_session_info': OPT_ITS_SESSION_INFO
}
# _opt_parm_sz is the size of the optional parameters value in octets, 'S' for CString
_opt_parm_sz = {
OPT_DEST_ADDR_SUBUNIT: 1,
OPT_DEST_NETWORK_TYPE: 1,
OPT_DEST_BEARER_TYPE: 1,
OPT_DEST_TELEMATICS_ID: 2,
OPT_SOURCE_ADDR_SUBUNIT: 1,
OPT_SOURCE_NETWORK_TYPE: 1,
OPT_SOURCE_BEARER_TYPE: 1,
OPT_SOURCE_TELEMATICS_ID: 1,
OPT_QOS_TIME_TO_LIVE: 4,
OPT_PAYLOAD_TYPE: 1,
OPT_ADDITIONAL_STATUS_INFO_TEXT: 'S',
OPT_RECEIPTED_MESSAGE_ID: 'S',
OPT_MS_MSG_WAIT_FACILITIES: 1,
OPT_PRIVACY_INDICATOR: 1,
OPT_SOURCE_SUBADDRESS: 'S',
OPT_DEST_SUBADDRESS: 'S',
OPT_USER_MESSAGE_REFERENCE: 2,
OPT_USER_RESPONSE_CODE: 1,
OPT_SOURCE_PORT: 2,
OPT_DESTINATION_PORT: 2,
OPT_SAR_MSG_REF_NUM: 2,
OPT_LANGUAGE_INDICATOR: 1,
OPT_SAR_TOTAL_SEGMENTS: 1,
OPT_SAR_SEGMENT_SEQNUM: 1,
OPT_SC_INTERFACE_VERSION: 1,
OPT_CALLBACK_NUM_PRES_IND: 1,
OPT_CALLBACK_NUM_ATAG: 'S',
OPT_NUMBER_OF_MESSAGES: 1,
OPT_CALLBACK_NUM: 'S',
OPT_DPF_RESULT: 1,
OPT_SET_DPF: 1,
OPT_MS_AVAILABILITY_STATUS: 1,
OPT_NETWORK_ERROR_CODE: 3,
OPT_MESSAGE_PAYLOAD: 'S',
OPT_DELIVERY_FAILURE_REASON: 1,
OPT_MORE_MESSAGES_TO_SEND: 1,
OPT_MESSAGE_STATE: 1,
OPT_USSD_SERVICE_OP: 1,
OPT_DISPLAY_TIME: 1,
OPT_SMS_SIGNAL: 2,
OPT_MS_VALIDITY: 1,
OPT_ALERT_ON_MESSAGE_DELIVERY: 0,
OPT_ITS_REPLY_TYPE: 1,
OPT_ITS_SESSION_INFO: 2,
}
_optionals = {}
def __init__(self, command, **kwargs):
if isinstance(command, str):
command = command_a2b[command]
self.command = command
self._determine_optionals()
for variable, value in kwargs.items():
if variable == "pdu":
self.parse_pdu(value)
continue
elif variable == "source" or variable == "destination" or variable == "address":
if not isinstance(value, SMPPAddress):
raise SMPPException("SMPPParameters expects addresses to be of type SMPPAddress")
vars(self)[variable] = value
def _determine_optionals(self):
# Lists which optional parameters are allowed for the given command/response. Default is none.
if self.command == SUBMIT_SM:
self._optionals_allowed = [self.OPT_USER_MESSAGE_REFERENCE, self.OPT_SOURCE_PORT, self.OPT_SOURCE_ADDR_SUBUNIT, self.OPT_DESTINATION_PORT, self.OPT_DEST_ADDR_SUBUNIT, self.OPT_SAR_MSG_REF_NUM, self.OPT_SAR_TOTAL_SEGMENTS, self.OPT_SAR_SEGMENT_SEQNUM, self.OPT_MORE_MESSAGES_TO_SEND, self.OPT_PAYLOAD_TYPE, self.OPT_MESSAGE_PAYLOAD, self.OPT_PRIVACY_INDICATOR, self.OPT_CALLBACK_NUM, self.OPT_CALLBACK_NUM_PRES_IND, self.OPT_CALLBACK_NUM_ATAG, self.OPT_SOURCE_SUBADDRESS, self.OPT_DEST_SUBADDRESS, self.OPT_USER_RESPONSE_CODE, self.OPT_DISPLAY_TIME, self.OPT_SMS_SIGNAL, self.OPT_MS_VALIDITY, self.OPT_MS_MSG_WAIT_FACILITIES, self.OPT_NUMBER_OF_MESSAGES, self.OPT_ALERT_ON_MESSAGE_DELIVERY, self.OPT_LANGUAGE_INDICATOR, self.OPT_ITS_REPLY_TYPE, self.OPT_ITS_SESSION_INFO, self.OPT_USSD_SERVICE_OP]
elif self.command == DATA_SM:
self._optionals_allowed = [self.OPT_SOURCE_PORT, self.OPT_SOURCE_ADDR_SUBUNIT, self.OPT_SOURCE_NETWORK_TYPE, self.OPT_SOURCE_BEARER_TYPE, self.OPT_SOURCE_TELEMATICS_ID, self.OPT_DESTINATION_PORT, self.OPT_DEST_ADDR_SUBUNIT, self.OPT_DEST_NETWORK_TYPE, self.OPT_DEST_BEARER_TYPE, self.OPT_DEST_TELEMATICS_ID, self.OPT_SAR_MSG_REF_NUM, self.OPT_SAR_TOTAL_SEGMENTS, self.OPT_SAR_SEGMENT_SEQNUM, self.OPT_MORE_MESSAGES_TO_SEND, self.OPT_QOS_TIME_TO_LIVE, self.OPT_PAYLOAD_TYPE, self.OPT_MESSAGE_PAYLOAD, self.OPT_SET_DPF, self.OPT_RECEIPTED_MESSAGE_ID, self.OPT_MESSAGE_STATE, self.OPT_NETWORK_ERROR_CODE, self.OPT_USER_MESSAGE_REFERENCE, self.OPT_PRIVACY_INDICATOR, self.OPT_CALLBACK_NUM, self.OPT_CALLBACK_NUM_PRES_IND, self.OPT_CALLBACK_NUM_ATAG, self.OPT_SOURCE_SUBADDRESS, self.OPT_DEST_SUBADDRESS, self.OPT_USER_RESPONSE_CODE, self.OPT_DISPLAY_TIME, self.OPT_SMS_SIGNAL, self.OPT_MS_VALIDITY, self.OPT_MS_MSG_WAIT_FACILITIES, self.OPT_NUMBER_OF_MESSAGES, self.OPT_ALERT_ON_MESSAGE_DELIVERY, self.OPT_LANGUAGE_INDICATOR, self.OPT_ITS_REPLY_TYPE, self.OPT_ITS_SESSION_INFO]
elif self.command == SUBMIT_MULTI:
self._optionals_allowed = [self.OPT_USER_MESSAGE_REFERENCE, self.OPT_SOURCE_PORT, self.OPT_SOURCE_ADDR_SUBUNIT, self.OPT_DESTINATION_PORT, self.OPT_DEST_ADDR_SUBUNIT, self.OPT_SAR_MSG_REF_NUM, self.OPT_SAR_TOTAL_SEGMENTS, self.OPT_SAR_SEGMENT_SEQNUM, self.OPT_PAYLOAD_TYPE, self.OPT_MESSAGE_PAYLOAD, self.OPT_PRIVACY_INDICATOR, self.OPT_CALLBACK_NUM, self.OPT_CALLBACK_NUM_PRES_IND, self.OPT_CALLBACK_NUM_ATAG, self.OPT_SOURCE_SUBADDRESS, self.OPT_DEST_SUBADDRESS, self.OPT_DISPLAY_TIME, self.OPT_SMS_SIGNAL, self.OPT_MS_VALIDITY, self.OPT_MS_MSG_WAIT_FACILITIES, self.OPT_ALERT_ON_MESSAGE_DELIVERY, self.OPT_LANGUAGE_INDICATOR]
elif self.command == DATA_SM_RESP:
self._optionals_allowed = [self.OPT_DELIVERY_FAILURE_REASON, self.OPT_NETWORK_ERROR_CODE, self.OPT_ADDITIONAL_STATUS_INFO_TEXT, self.OPT_DPF_RESULT]
elif self.command == BIND_TRANSMITTER_RESP or self.command == BIND_RECEIVER_RESP or self.command == BIND_TRANSCEIVER_RESP:
self._optionals_allowed = [self.OPT_SC_INTERFACE_VERSION]
elif self.command == DELIVER_SM:
self._optionals_allowed = [self.OPT_USER_MESSAGE_REFERENCE, self.OPT_SOURCE_PORT, self.OPT_DESTINATION_PORT, self.OPT_SAR_MSG_REF_NUM, self.OPT_SAR_TOTAL_SEGMENTS, self.OPT_SAR_SEGMENT_SEQNUM, self.OPT_USER_RESPONSE_CODE, self.OPT_PRIVACY_INDICATOR, self.OPT_PAYLOAD_TYPE, self.OPT_MESSAGE_PAYLOAD, self.OPT_CALLBACK_NUM, self.OPT_SOURCE_SUBADDRESS, self.OPT_DEST_SUBADDRESS, self.OPT_LANGUAGE_INDICATOR, self.OPT_ITS_SESSION_INFO, self.OPT_NETWORK_ERROR_CODE, self.OPT_MESSAGE_STATE, self.OPT_RECEIPTED_MESSAGE_ID]
else:
self._optionals_allowed = []
def add_optional(self, **kwargs):
"""Add an optional parameter, in the form of SMPPParameters.add_optional(param1 = value1, param2 = value2, ...)
supported optional parameters can be seen via SMPPParameters._optionals_allowed on an object initialized with the
correct command code."""
for tag, value in kwargs.items():
if not self._optional_tag.has_key(tag):
raise SMPPException("SMPPParameters unknown optional parameter: %s" % tag)
tag_code = self._optional_tag[tag]
if not tag_code in self._optionals_allowed:
raise SMPPException("SMPPParameters, optional parameter %s not allowed in %s" % (tag, command_b2a[self.command]))
sz = self._opt_parm_sz[tag_code] # Get the size of the argument, 'S' is for null-terminated string
if isinstance(value, int):
if sz == 'S':
raise SMPPPException("SMPPParameters expected string value for %s" % tag)
elif isinstance(value, str):
if not sz == 'S':
raise SMPPPException("SMPPParameters expected integer value for %s" % tag)
self._optionals[tag_code] = value
def get_optionals(self):
"""Get a dictionary representing the optional parameters initialized either by parse_pdu() or add_optional()"""
return self._optionals
def has_optionals(self):
"""Boolean check for whether we have optional parameters from either parse_pdu() or add_optional()"""
if len(self._optionals) > 0:
return True
return False
def length(self):
"""Return the length of the PDU given by prepare_pdu() / __repr__()"""
return (len(self.prepare_pdu()) / 2)
def _readstr(self, d):
ret = ""
while 1:
c = d.read(1)
if c == '\0':
break
ret += c
return ret
def parse_pdu(self, pdu):
"""Read a binary PDU and parse it into object variables."""
length = len(pdu)
d = StringIO(pdu) # So we can use read()
left = lambda: length - d.tell()
readx = lambda x: d.read(x) if left() >= x else None
readone = lambda: ord(d.read(1)) if left() >= 1 else None
src_addr_ton = None
src_addr_npi = None
src_addr = None
dest_addr_ton = None
dest_addr_npi = None
dest_addr = None
if self.command == BIND_TRANSMITTER_RESP or self.command == BIND_RECEIVER_RESP or self.command == BIND_TRANSCEIVER_RESP:
self.system_id = self._readstr(d)
elif self.command == DATA_SM_RESP:
self.message_id = self._readstr(d)
elif self.command == DELIVER_SM:
self.system_type = self._readstr(d)
src_addr_ton = readone()
src_addr_npi = readone()
src_addr = self._readstr(d)
dest_addr_ton = readone()
dest_addr_npi = readone()
dest_addr = self._readstr(d)
self.esm_class = readone()
self.protocol_id = readone()
self.priority_flag = readone()
self.schedule_delivery_time = readone()
self.validity_period = readone()
self.registered_delivery = readone()
self.replace_if_present_flag = readone()
self.data_coding = readone()
self.sm_default_msg_id = readone()
self.sm_length = readone()
self.short_message = readx(self.sm_length)
elif self.command == DATA_SM:
self.system_type = self._readstr(d)
src_addr_ton = readone()
src_addr_npi = readone()
src_addr = self._readstr(d)
dest_addr_ton = readone()
dest_addr_npi = readone()
dest_addr = self._readstr(d)
self.esm_class = readone()
self.registered_delivery = readone()
self.data_coding = readone()
if src_addr and src_addr_ton and src_addr_npi:
self.source = SMPPAddress(src_addr)
self.source.set_ton(src_addr_ton)
self.source.set_npi(src_addr_npi)
if dest_addr and dest_addr_ton and dest_addr_npi:
self.destination = SMPPAddress(dest_addr)
self.destination.set_ton(dest_addr_ton)
self.destination.set_npi(dest_addr_ton)
while left() > 0:
# It seems we have optional parameters!
opt_code = readx(2)
opt_code = unpack("!H", opt_code)[0]
opt_len = readx(2)
opt_len = unpack("!H", opt_len)[0]
opt_val = readx(opt_len)
print "Got optional parameter: %.4X, length %.4X, value: %s" % (opt_code, opt_len, opt_val)
sz = self._opt_parm_sz[opt_code]
if not sz == 'S':
if sz == 4:
opt_val = unpack("!L", opt_val)[0]
elif sz == 2:
opt_val = unpack("!L", opt_val)[0]
if not opt_code in self._optionals_allowed:
raise SMPPException("SMPPParameters, pdu contains illegal optional parameter code %.4X" % opt_code)
self._optionals[opt_code] = opt_val
def __repr__(self):
"""__repr__() -> prepare_pdu()"""
return self.prepare_pdu()
def prepare_pdu(self):
"""Generate a hexlified version of a PDU for use in the SMPP module"""
pdub=""
if len(self.system_type) == 0 or not self.system_type[-1] == '\0':
self.system_type += '\0'
if self.command == BIND_TRANSMITTER \
or self.command == BIND_RECEIVER \
or self.command == BIND_TRANSCEIVER:
if len(self.system_id) == 0 or not self.system_id[-1] == '\0':
self.system_id += '\0'
if len(self.password) == 0 or not self.password[-1] == '\0':
self.password += '\0'
if len(self.system_id) > 16:
raise SMPPException("Systemid too long!")
if len(self.password) > 9:
raise SMPPException("Password too long!")
if len(self.system_type) > 13:
raise SMPPException("Systype too long!")
pdub += b2a_hex(self.system_id)
pdub += b2a_hex(self.password)
pdub += b2a_hex(self.system_type)
pdub += "%.2X" % self.interface_version
pdub += "%s" % self.address
elif self.command == DATA_SM:
pdub += b2a_hex(self.system_type)
pdub += "%s" % self.source
pdub += "%s" % self.destination
pdub += "%.2X" % self.esm_class
pdub += "%.2X" % self.registered_delivery
pdub += "%.2X" % self.data_coding
elif self.command == SUBMIT_SM:
pdub += b2a_hex(self.system_type)
pdub += "%s" % self.source
pdub += "%s" % self.destination
pdub += "%.2X" % self.esm_class
pdub += "%.2X" % self.protocol_id
pdub += "%.2X" % self.priority_flag
pdub += "%.2X" % self.schedule_delivery_time
pdub += "%.2X" % self.validity_period
pdub += "%.2X" % self.registered_delivery
pdub += "%.2X" % self.replace_if_present_flag
pdub += "%.2X" % self.data_coding
pdub += "%.2X" % self.sm_default_msg_id
pdub += "%.2X" % self.sm_length
pdub += "%.2X" % self.short_message
elif self.command == DELIVER_SM_RESP:
pdub += "00" # message_id hardcoded to null, as per spec
# Now add optional parameters
for key in self._optionals.keys():
if key in self._optionals_allowed:
value = ""
sz = self._opt_parm_sz[key]
if sz == 'S':
value = b2a_hex(self._optionals[key])
elif sz > 0:
fmt = "%%.%dX" % (self._opt_parm_sz[key] * 2)
value = fmt % self._optionals[key]
pdub += "%.4X" % key
pdub += "%.4X" % (len(value) / 2)
pdub += value
return pdub |
<gh_stars>0
from .getmyip import getMyIP, find_ip_url
|
#![deny(missing_docs)]
//! # Per-core variable support
//!
//! This module defines macros for declaring per-core variables. A new variable
//! can be declared with `declare_per_core!` and can be accessed with `get_per_core!`
//! and `get_per_core_mut!`. These access methods must not be used prior to the
//! invocation of `init_sections` by the BSP.
use crate::error::Result;
use alloc::vec::Vec;
static mut AP_PER_CORE_SECTIONS: Option<Vec<u8>> = None;
extern "C" {
// The _value_ of the first/last byte of the .per_core section. The
// address of this symbol is the start of .per_core
static PER_CORE_START: u8;
static PER_CORE_END: u8;
}
unsafe fn per_core_section_len() -> usize {
let section_start = &PER_CORE_START as *const u8;
let section_end = &PER_CORE_END as *const u8;
section_end as usize - section_start as usize
}
unsafe fn per_core_address(symbol_addr: *const u8, core: usize) -> *const u8 {
if core == 0 {
return symbol_addr;
}
let section_len = per_core_section_len();
let offset = symbol_addr as u64 - (&PER_CORE_START as *const _ as u64);
let ap_sections = AP_PER_CORE_SECTIONS
.as_ref()
.expect("Per-core sections not initialized");
&ap_sections[(section_len * (core - 1)) + offset as usize] as *const u8
}
/// Initialize the per-core sections
///
/// This must be called after the global allocator has been
/// initialized.
pub unsafe fn init_sections(ncores: usize) -> Result<()> {
let section_start = &PER_CORE_START as *const u8;
let section_len = per_core_section_len();
let per_core_section =
core::slice::from_raw_parts(section_start, section_len);
let mut ap_sections = Vec::with_capacity(section_len * (ncores - 1));
for _ in 0..ncores - 1 {
ap_sections.extend_from_slice(per_core_section);
}
AP_PER_CORE_SECTIONS = Some(ap_sections);
Ok(())
}
/// Get this current core's sequential index
pub fn read_core_idx() -> u64 {
unsafe {
let value: u64;
llvm_asm!("mov [%fs], %rax"
: "={rax}"(value)
::: "volatile");
value >> 3 // Shift away the RPL and TI bits (they will always be 0)
}
}
#[doc(hidden)]
pub unsafe fn get_pre_core_impl<T>(t: &T) -> &T {
core::mem::transmute(per_core_address(
t as *const T as *const u8,
read_core_idx() as usize,
))
}
#[doc(hidden)]
pub unsafe fn get_pre_core_mut_impl<T>(t: &mut T) -> &mut T {
core::mem::transmute(per_core_address(
t as *const T as *const u8,
read_core_idx() as usize,
))
}
#[macro_export]
macro_rules! get_per_core {
($name:ident) => {
#[allow(unused_unsafe)]
unsafe {
$crate::percore::get_pre_core_impl(&mut $name)
}
};
}
#[macro_export]
macro_rules! get_per_core_mut {
($name:ident) => {
#[allow(unused_unsafe)]
unsafe {
$crate::percore::get_pre_core_mut_impl(&mut $name)
}
};
}
// The following macros are derived from lazy-static
#[macro_export(local_inner_macros)]
#[doc(hidden)]
macro_rules! __declare_per_core_internal {
($(#[$attr:meta])* ($($vis:tt)*) static mut $N:ident : $T:ty = $e:expr; $($t:tt)*) => {
#[link_section = ".per_core"]
$($vis)* static mut $N: $T = $e;
declare_per_core!($($t)*);
};
() => ()
}
#[macro_export(local_inner_macros)]
macro_rules! declare_per_core {
($(#[$attr:meta])* static mut $N:ident : $T:ty = $e:expr; $($t:tt)*) => {
// use `()` to explicitly forward the information about private items
__declare_per_core_internal!($(#[$attr])* () static mut $N : $T = $e; $($t)*);
};
($(#[$attr:meta])* pub static mut $N:ident : $T:ty = $e:expr; $($t:tt)*) => {
__declare_per_core_internal!($(#[$attr])* (pub) static mut $N : $T = $e; $($t)*);
};
($(#[$attr:meta])* pub ($($vis:tt)+) static mut $N:ident : $T:ty = $e:expr; $($t:tt)*) => {
__declare_per_core_internal!($(#[$attr])* (pub ($($vis)+)) static mut $N : $T = $e; $($t)*);
};
() => ()
}
|
<filename>Applications/AppStore/_TtC8AppStoreP33_2321883E200C83810FD0FF7714F2A68F12FilterButton.h
//
// Generated by classdumpios 1.0.1 (64 bit) (iOS port by DreamDevLost)(Debug version compiled Sep 26 2020 13:48:20).
//
// Copyright (C) 1997-2019 <NAME>.
//
#import <UIKit/UIButton.h>
@interface _TtC8AppStoreP33_2321883E200C83810FD0FF7714F2A68F12FilterButton : UIButton
{
}
- (id)initWithCoder:(id)arg1; // IMP=0x00000001002d4384
- (id)initWithFrame:(struct CGRect)arg1; // IMP=0x00000001002d4314
- (struct CGRect)titleRectForContentRect:(struct CGRect)arg1; // IMP=0x00000001002d4198
- (struct CGRect)imageRectForContentRect:(struct CGRect)arg1; // IMP=0x00000001002d4118
@property(nonatomic) double alpha;
@end
|
// NewCmdSubmitUpgradeProposal implements a command handler for submitting a software upgrade proposal transaction.
func NewCmdSubmitUpgradeProposal() *gcli.Command {
cmd := &gcli.Command{
Name: "software-upgrade",
Desc: "Submit a software upgrade proposal",
Help: "Submit a software upgrade along with an initial deposit.\n" +
"Please specify a unique name and height for the upgrade to take effect.\n" +
"You may include info to reference a binary download link, in a format compatible with: https://github.com/cosmos/cosmos-sdk/tree/master/cosmovisor",
Config: func(cmd *gcli.Command) {
cmd.StrOpt(&softwareUpgradeOpts.Title, cli.FlagTitle, "", "", "title of proposal")
cmd.StrOpt(&softwareUpgradeOpts.Description, cli.FlagDescription, "", "",
"description of proposal")
cmd.StrOpt(&softwareUpgradeOpts.Deposit, cli.FlagDeposit, "", "", "deposit of proposal")
cmd.Int64Opt(&softwareUpgradeOpts.UpgradeHeight, FlagUpgradeHeight, "", 0,
"The height at which the upgrade must happen")
cmd.StrOpt(&softwareUpgradeOpts.UpgradeInfo, FlagUpgradeInfo, "", "",
"Optional info for the planned upgrade such as commit hash, etc.")
cmd.AddArg("name", "Unique name for the upgrade plan", true)
},
Func: func(cmd *gcli.Command, args []string) error {
clientCtx, err := client.GetClientTxContext()
if err != nil {
return err
}
name := args[0]
content, err := parseArgsToContent(name)
if err != nil {
return err
}
from := clientCtx.GetFromAddress()
depositStr := softwareUpgradeOpts.Deposit
deposit, err := sdk.ParseCoinsNormalized(depositStr)
if err != nil {
return err
}
msg, err := gov.NewMsgSubmitProposal(content, deposit, from)
if err != nil {
return err
}
return client.BroadcastTX(clientCtx, msg)
},
}
return cmd
} |
Ethics of sham surgery: Perspective of patients
Sham surgery is used as a control condition in neurosurgical clinical trials in Parkinson's disease (PD) but remains controversial. This study aimed to assess the perspective of patients with PD and the general public on the use of sham surgery controls. We surveyed consecutive patients from a university‐based neurology outpatient clinic and a community‐based general internal medicine practice. Background information was provided regarding PD and two possible methods of testing the efficacy of a novel gene transfer procedure, followed by questions that addressed participants' opinions related to the willingness to participate and permissibility of blinded and unblinded trial designs. Two hundred eighty‐eight (57.6%) patients returned surveys. Patients with PD expressed less willingness to participate in the proposed gene transfer surgery trials. Unblinded studies received greater support, but a majority would still allow the use of sham surgery. Those in favor of sham surgery were more educated and more likely to use societal perspective rationales. Patients with PD are more cautious about surgical research participation than patients with non‐PD. Their policy views were similar to others', with a majority supporting the use of sham controls. Future research needs to determine whether eliciting more considered judgments of laypersons would reveal different levels of support for sham surgery. © 2007 Movement Disorder Society |
def response(self):
try:
return self._response
except AttributeError:
pass
self._response = requests.get(self.url)
return self._response |
Last week, the Red Sox announced that they would be retiring Wade Boggs’ number 26. This will be the 9th retired number in the Red Sox organization (10 if you include the league-wide retirement of Jackie Robinson’s #42), and the 5th retirement since 2000. With that, two questions are begged: 1) Are MLB teams retiring numbers more than they had historically? and 2) When will every team run out of numbers to use?
We’ll take a look at each question separately. First, are MLB teams retiring numbers more than they have historically? The answer: it depends. Certainly the rate of retirements is increased from 2010 compared to 1950, but it’s hard to say that the numbers in the 2000s are dramatically higher than those in the 1980s. Don’t believe me? Here’s a chart with the quantity of retired MLB uniform numbers over time. I have excluded the one-time league-wide retirement of Jackie Robinson’s #42 in 1997 because it is a one-time event that would otherwise skew the numbers.
As you can see, there’s a sharp increase that begins in around the 1970s and continues upward. “That corresponds with MLB expansion quite nicely,” you may note, but when we remove teams that did not exist in 1940, we get the following chart:
Overall the numbers are pretty similar. So what it seems is that the answer to our first question is “kinda sorta yes.” MLB teams are retiring numbers at accelerating rates. The chart resembles the old distance-velocity-acceleration chart anyone who took high school physics has probably already forgotten. As time increases, the curve increases in an exponential fashion. We’ve established that we are in real danger of running out of uniform numbers.
That, however, is on the aggregate level. As we look at individual teams, we see an even more dramatic story. Take the New York Yankees. In 1965, the team had merely 3 retired numbers. In 1980 they had 7. In 1990? 13. Today, they have 20 numbers retired, eliminating the possibility of almost one out of every five uniform numbers from being used by active players. Here’s how the Yankees look over time.
Using these graphs and running a regression, we can start to forecast when every MLB team will run out of individual uniform numbers, answering the second question posed above. Before I show you the dates, I’ll include some notes about the data:
1) I’ve assumed that uniforms can have 2 numbers maximum, and that 0 and 00 are considered “different numbers” (given that 16 players have worn 0 and 20 have worn 00), however single digit numbers preceded by a 0 are not included (e.g. 03 is not permitted). This gives 101 possible uniform number combinations (0-99 + 00).
2) I have included Jackie Robinson’s league-wide retirement of 42, but removed double counts of other teams who have retired the number 42. The Cardinals, Dodgers, and Yankees have also retired 42, so that was not counted twice.
3) Similarly, if a team retired one number for two players, it counted as only one retirement. This one should be obvious, but the Yankees number 8 was retired for Yogi Berra and Bill Dickey, it counts as only one number retirement.
4) The line of best fit used a quadratic (ax^2+bx+c) equation where the data could support it, otherwise a linear line of best fit was used. The longer a team has retired numbers, the more data that exists, and the more reasonable it is to forecast an exponential type growth of number retirements. For teams that have not retired many numbers, a linear function was used as it will be a more reasonable estimate.
5) I have obviously ignored any unretirements or “releases” of uniform numbers. I am, for the purposes of this, assuming that the teams will ust be surprised one day when they don’t have enough numbers, and that no “counter measures” will be used. We all know this is unlikely, unless we add a third digit to uniforms or start using letters.
6) I have included the number when the teams cannot fill a 40 man roster, a 25 man roster, and all numbers will be gone. I also included the line of best fit, plus the r^2 value so you can see how well the line fits with the current data.
Without further ado, the numbers:
Team Equation R^2 Not enough for 40 man roster (year) Not enough for 25 man roster (year) Year team will run out of numbers completely (year) Reds 0.0051x2 - 0.0915x + 1.2344 0.9489 2082 2095 2113 Astros 0.0038x2 - 0.0192x + 1.0459 0.9619 2093 2108 2129 Yankees 0.0013x2 + 0.1424x - 0.1031 0.9637 2106 2130 2168 Braves 0.0022x2 + 0.0279x + 1.6032 0.8779 2123 2142 2170 Red Sox 0.0023x2 + 0.0939x + 2.3636 0.9099 2124 2144 2170 Giants 0.0011x2 + 0.0384x + 1.1033 0.9561 2160 2188 2227 Cardinals 0.2324x - 0.2984 0.9698 2222 2287 2395 White Sox 0.2015x + 2.0244 0.8652 2268 2342 2461 Athletics 0.1809x + 0.7508 0.8852 2324 2407 2539 Padres 0.1808x + 0.1847 0.9386 2324 2407 2540 Dodgers 0.1657x + 4.099 0.8286 2309 2400 2556 Diamondbacks 0.1786x + 0.5714 0.625 2348 2432 2566 Twins 0.1635x - 0.0631 0.9288 2313 2439 2586 Pirates 0.1453x + 0.7736 0.9187 2367 2469 2635 Angels 0.1504x + 0.921 0.7906 2381 2481 2638 Orioles 0.129x + 1.1667 0.9072 2436 2552 2738 Cubs 0.1311x + 0.4689 0.8645 2443 2558 2741 Tigers 0.1268x + 1.5901 0.8929 2449 2566 2756 Brewers 0.1138x + 0.2695 0.8717 2510 2641 2852 Indians 0.107x + 0.2738 0.9379 2524 2664 2888 Phillies 0.1004x + 0.1717 0.9125 2568 2717 2956 Royals 0.075x + 1.3379 0.5876 2783 2982 3302 Rangers 0.0701x + 0.8 0.7364 2855 3068 3411 Mets 0.0435x + 1.2511 0.7771 3339 3683 4235 Rays 0.0368x + 0.7868 0.3125 3636 4044 4696 Blue Jays Never Never Never Mariners Never Never Never Marlins Never Never Never Nationals Never Never Never Rockies Never Never Never |
def is_valid_id(self, gene_id: str) -> bool:
return gene_id in self.gene_dict |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.