content
stringlengths 10
4.9M
|
---|
def is_suppressed_event_type(cls, config, event):
sup_events = Utility.string_to_list(config.suppress_events)
if event["type"] in sup_events:
return True
return False |
/**
* <p>
* Describes the details of a LoadBalancer.
* </p>
*/
public class LoadBalancerDescription {
/**
* The name of the LoadBalancer.
*/
private String loadBalancerName;
/**
* The domain name of the LoadBalancer.
*/
private String domain;
/**
* A list of Listeners used by the LoadBalancer.
*/
private java.util.List<Listener> listeners;
/**
* The name of the LoadBalancer.
*
* @return The name of the LoadBalancer.
*/
public String getLoadBalancerName() {
return loadBalancerName;
}
/**
* The name of the LoadBalancer.
*
* @param loadBalancerName The name of the LoadBalancer.
*/
public void setLoadBalancerName(String loadBalancerName) {
this.loadBalancerName = loadBalancerName;
}
/**
* The name of the LoadBalancer.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param loadBalancerName The name of the LoadBalancer.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public LoadBalancerDescription withLoadBalancerName(String loadBalancerName) {
this.loadBalancerName = loadBalancerName;
return this;
}
/**
* The domain name of the LoadBalancer.
*
* @return The domain name of the LoadBalancer.
*/
public String getDomain() {
return domain;
}
/**
* The domain name of the LoadBalancer.
*
* @param domain The domain name of the LoadBalancer.
*/
public void setDomain(String domain) {
this.domain = domain;
}
/**
* The domain name of the LoadBalancer.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param domain The domain name of the LoadBalancer.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public LoadBalancerDescription withDomain(String domain) {
this.domain = domain;
return this;
}
/**
* A list of Listeners used by the LoadBalancer.
*
* @return A list of Listeners used by the LoadBalancer.
*/
public java.util.List<Listener> getListeners() {
if (listeners == null) {
listeners = new java.util.ArrayList<Listener>();
}
return listeners;
}
/**
* A list of Listeners used by the LoadBalancer.
*
* @param listeners A list of Listeners used by the LoadBalancer.
*/
public void setListeners(java.util.Collection<Listener> listeners) {
java.util.List<Listener> listenersCopy = new java.util.ArrayList<Listener>();
if (listeners != null) {
listenersCopy.addAll(listeners);
}
this.listeners = listenersCopy;
}
/**
* A list of Listeners used by the LoadBalancer.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param listeners A list of Listeners used by the LoadBalancer.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public LoadBalancerDescription withListeners(Listener... listeners) {
for (Listener value : listeners) {
getListeners().add(value);
}
return this;
}
/**
* A list of Listeners used by the LoadBalancer.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param listeners A list of Listeners used by the LoadBalancer.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public LoadBalancerDescription withListeners(java.util.Collection<Listener> listeners) {
java.util.List<Listener> listenersCopy = new java.util.ArrayList<Listener>();
if (listeners != null) {
listenersCopy.addAll(listeners);
}
this.listeners = listenersCopy;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
sb.append("LoadBalancerName: " + loadBalancerName + ", ");
sb.append("Domain: " + domain + ", ");
sb.append("Listeners: " + listeners + ", ");
sb.append("}");
return sb.toString();
}
} |
/**
* Need to draw a visible danger zone around the map
* to avoid player frustration over unexplained explosions
*/
public class DangerZone {
private int w = 10;
private double angle = 0.0f;
private double delta = Math.PI / 10.0;
private Rect r;
private int screenW2 = Constants.SCREEN_WIDTH/2;
private int screenH2 = Constants.SCREEN_HEIGHT/2;
private Paint paint;
public DangerZone() {
paint = new Paint();
paint.setStyle(Paint.Style.STROKE);
paint.setStrokeWidth((float) w);
r = new Rect(0, 0, (int) Constants.MAX_MAP, (int) Constants.MAX_MAP);
}
public void draw(Canvas canvas, float cx, float cy, int color) {
angle = (angle + delta) % (Math.PI * 2.0);
int val = 20 + (int) Math.round(20.0 * Math.sin(angle));
boolean isRed = color == Color.RED;
paint.setColor(Color.rgb(val, (isRed) ? 0 : val, (isRed) ? 0 : val));
canvas.save();
canvas.translate(-cx + screenW2, -cy + screenH2);
canvas.drawRect(r, paint);
canvas.restore();
}
} |
// ParseArtifact parses the name of an artifact.
func ParseArtifact(name string) (Artifact, error) {
if n, err := parseSpecArtifact(name); err == nil {
return Artifact{name: n}, nil
} else if n, err := parseVersionArtifact(name); err == nil {
return Artifact{name: n}, nil
} else if n, err := parseApiArtifact(name); err == nil {
return Artifact{name: n}, nil
} else if n, err := parseProjectArtifact(name); err == nil {
return Artifact{name: n}, nil
}
return Artifact{}, fmt.Errorf("invalid artifact name %q, must match one of: %v", name, []string{
projectArtifactRegexp.String(),
apiArtifactRegexp.String(),
versionArtifactRegexp.String(),
specArtifactRegexp.String(),
})
} |
Catalyst-Directed Chemoselective Double Amination of Bromo-chloro(hetero)arenes: A Synthetic Route toward Advanced Amino-aniline Intermediates.
A chemoselective sequential one-pot coupling protocol was developed for preparing several amino-anilines in high yield as building blocks for active pharmaceutical ingredients (APIs). Site (Cl vs Br on electrophile) and nucleophile (amine vs imine) selectivity is dictated by the catalyst employed. A Pd-crotyl (t-BuXPhos) precatalyst selectively coupled the Ar-Br of the polyhaloarene with benzophenone imine, even in the presence of a secondary amine, while Pd-based RuPhos or (BINAP)Pd(allyl)Cl coupled the Ar-Cl site with secondary amines. |
{-|
Module : Numeric.ER.Real.Approx
Description : classes abstracting exact reals
Copyright : (c) <NAME>
License : BSD3
Maintainer : <EMAIL>
Stability : experimental
Portability : portable
Definitions of classes that describe what is
required from arbitrary precision approximations
of exact real numbers.
We introduce two levels of abstraction for these
approximations:
* 'ERApprox' =
Approximating a real number by a *set* of real numbers
that includes the approximated number.
Precision is measured using some fixed measure on the sets.
Operations are "safe" wrt inclusion.
The sets can sometimes be "anti-consistent" - being smaller than
the empty set in the inclusion order.
* 'ERInnerOuterApprox' =
Like 'ERApprox' with the addition of operations that are "inner rounded"
in the sense that each element of the rounded result set can
be obtained by the same operation performed on some elements of the arument set(s).
* 'ERIntApprox' =
Like ERApprox but assuming that the sets are
*intervals* of real numbers with finitely
representable endpoints.
To be imported qualified, usually with the synonym RA.
-}
module Numeric.ER.Real.Approx
(
ERApprox(..),
eqSingletons,
leqSingletons,
ltSingletons,
effIx2ra,
ERInnerOuterApprox(..),
ERIntApprox(..),
splitIRA,
equalIntervals,
exactMiddle,
maxExtensionR2R,
maxExtensionInnerR2R,
ERApproxApprox(..),
ERIntApproxApprox(..)
)
where
import Numeric.ER.BasicTypes
import qualified Numeric.ER.BasicTypes.ExtendedInteger as EI
import Data.Typeable
{-|
A type whose elements represent sets that can be used
to approximate a single extended real number with arbitrary precision.
Operations are "safe" with respect to inclusion, which means that
for any numbers admitted by the operand approximations the result
of the operation is admitted by the result approximation.
The sets can sometimes be "anti-consistent" - being smaller than
the empty set in the inclusion order.
This can be understood as indicating that not only there is no correct real number
approximated here, but some numbers (ie those in interior of the set)
are excluded more strongly than the others.
Prime examples of such sets are directed "inverted" intervals such as [2,1].
Such sets arise naturally from "inner rounded" operations - see 'ERInnerOuterApprox'.
-}
class (Fractional ra, Show ra, Eq ra) => ERApprox ra
where
initialiseBaseArithmetic :: ra -> IO ()
getPrecision :: ra -> Precision
{-^
Precision is a measure of the set size. It can be infinite.
The default interpretation:
* If the diameter of the set is d, then the precision
should be near floor(- log_2 d).
-}
getGranularity :: ra -> Granularity
-- ^ the lower the granularity the bigger the rounding errors
setGranularityOuter :: Granularity -> ra -> ra
-- ^ increase or safely decrease granularity
setMinGranularityOuter :: Granularity -> ra -> ra
-- ^ ensure granularity is not below the first arg
isBottom :: ra -> Bool
-- ^ true if this approximation holds no information, ie it admits any real number
bottomApprox :: ra
-- ^ the bottom approximation - it admits any real number
isExact :: ra -> Bool
-- ^ true if this approximation admits only one real number
isConsistent :: ra -> Bool
{- ^ true iff this approximation admits at least one real number -}
isAnticonsistent :: ra -> Bool
{- ^ true if this approximation is anti-consistent, which is a computational error
unless we used inner rounded operations -}
toggleConsistency :: ra -> ra
{- ^
Toggle consistency - anti-consistency of the approximation.
Top is toggled with bottom.
Exact approximations are the only fixed points for this operation.
-}
isTop :: ra -> Bool
-- ^ true if this approximation is the most anti-consistent one
topApprox :: ra
-- ^ the top approximation - strongly rejects all real numbers
isDisjoint :: ra -> ra -> Bool
isDisjoint a b = not $ isConsistent $ a /\ b
isInteriorDisjoint :: ra -> ra -> Bool
isInteriorDisjoint a b = isAnticonsistent $ a /\ b
isBounded :: ra -> Bool
{- ^
True iff the approximation excludes infinity
and, if anti-consistent, does not strongly exclude infinity.
-}
plusInfinity :: ra
-- ^ an exact approximation admitting only the positive infinity
refines :: ra -> ra -> Bool
-- ^ first arg is a subset of the second arg
maybeRefines :: ra -> ra -> Maybe Bool
-- ^ like 'refines' but usable for types where 'refines' is only partially decidable
(/\) :: ra -> ra -> ra
-- ^ join; combining the information in two approximations of the same number
intersectMeasureImprovement ::
EffortIndex -> ra -> ra -> (ra, ra)
{-^
First component of result is the intersection and the second component:
* measures precision improvement of the intersection relative to the first argument
* is a positive number: 1 means no improvement, 2 means doubled precision, etc.
-}
equalReals :: ra -> ra -> Maybe Bool
-- ^ semantic semi-decidable equality test
compareReals :: ra -> ra -> Maybe Ordering
-- ^ semantic semi-decidable comparison
leqReals :: ra -> ra -> Maybe Bool
-- ^ semantic semi-decidable less-than-or-equal comparison
equalApprox :: ra -> ra -> Bool
-- ^ syntactic equality test
compareApprox :: ra -> ra -> Ordering
-- ^ syntactic linear ordering
double2ra :: Double -> ra
-- ^ safe approximate conversion
showApprox ::
Int {-^ number of relevant decimals to show -} ->
Bool {-^ should show granularity -} ->
Bool {-^ should show internal representation details -} ->
ra {-^ the approximation to show -} ->
String
{-|
Assuming the arguments are singletons, equality is decidable.
-}
eqSingletons :: (ERApprox ra) => ra -> ra -> Bool
eqSingletons s1 s2 =
case equalReals s1 s2 of
Just b -> b
_ -> False
{-|
Assuming the arguments are singletons, @<=@ is decidable.
-}
leqSingletons :: (ERApprox ra) => ra -> ra -> Bool
leqSingletons s1 s2 =
case compareReals s1 s2 of
Just EQ -> True
Just LT -> True
_ -> False
{-|
Assuming the arguments are singletons, @<@ is decidable.
-}
ltSingletons :: (ERApprox ra) => ra -> ra -> Bool
ltSingletons s1 s2 =
case compareReals s1 s2 of
Just LT -> True
_ -> False
{-|
This function converts
an effort index to a real number approximation.
Useful when an effort index is used in a formula
mixed with real approximations.
-}
effIx2ra ::
(ERApprox ra) =>
EffortIndex -> ra
effIx2ra = fromInteger . toInteger
{-|
A type whose elements represent some kind of nominal sets of real numbers
over which one can perform two kinds of arithmetic:
* "outer rounded": arithmetic that approximates maximal extensions from outside (ie the 'ERApprox' arithmetic)
* "inner rounded": arithmetic that approximates maximal extensions from inside, potentially leading to
anti-consistent set specifications (eg intervals whose endpoints are not in the usual order)
Another explanation of the difference:
* `outer': the approximation contains all the number(s) of interest
* `inner': all numbers eligible for the approximation are numbers of interest
Ie inner rounded operations have the property that each real number admitted by the result can
be obtained as the exact result of the same operation performed on some real numbers admitted
by the operand approximations.
While in "outer rounded" operations it is desirable to make the result set as small as
possible in order to reduce the amount of bogus result numbers,
in "inner rounded" operations it is desirable to make the result set as large as possible
to lose less of the genuinely feasible result numbers.
Inner rounded arithmetic is useful eg for proving/disproving inclusions "f(x) subset g(x)"
where f and g are expressions using arithmetic extended to sets.
For proving the inclusion, we need an inner rounded approximation of g(x)
and for disproving the inclusion we need an inner rounded approximation of f(x).
This is an abstraction of Kaucher's extended interval arithmetic
[Kaucher, E.: Interval Analysis in the Extended Interval Space IR,
Computing, Suppl. 2, 1980, pp. 33-49].
-}
class (ERApprox xra) => ERInnerOuterApprox xra
where
(/\:) :: xra -> xra -> xra
-- ^ inner rounded intersection
(+:) :: xra -> xra -> xra
-- ^ inner rounded addition
(-:) :: xra -> xra -> xra
-- ^ inner rounded subtraction
a -: b = a +: (negate b)
(*:) :: xra -> xra -> xra
-- ^ inner rounded multiplication
(/:) :: xra -> xra -> xra
-- ^ inner rounded division
setGranularityInner :: Granularity -> xra -> xra
-- ^ increase or safely decrease granularity
setMinGranularityInner :: Granularity -> xra -> xra
-- ^ ensure granularity is not below the first arg
{-|
A type whose elements represent sets that can be used
to approximate a recursive set of closed extended real number intervals
with arbitrary precision.
-}
--class (ERApprox sra) => ERSetApprox sra where
-- (\/) :: sra -> sra -> sra -- ^ union; either approximation could be correct
{-|
A type whose elements represent real *intervals* that can be used
to approximate a single extended real number with arbitrary precision.
Sometimes, these types can be used to approximate
a closed extended real number interval with arbitrary precision.
Nevetheless, this is not guaranteed.
-}
class (ERApprox ira) => ERIntApprox ira
where
doubleBounds :: ira -> (Double, Double)
floatBounds :: ira -> (Float, Float)
integerBounds :: ira -> (EI.ExtendedInteger, EI.ExtendedInteger)
bisectDomain ::
Maybe ira {-^ point to split at -} ->
ira {-^ interval to split -} ->
(ira, ira) -- ^ left and right, overlapping on a singleton
defaultBisectPt :: ira -> ira
-- | returns thin approximations of endpoints, in natural order
bounds :: ira -> (ira, ira)
-- | make an interval from thin approximations of endpoints
fromBounds :: (ira, ira) -> ira
{-|
meet, usually constructing interval from approximations of its endpoints
This does not need to be the meet of the real intervals
but it has to be a maximal element in the set of all
ira elements that are below the two parameters.
-}
(\/) :: ira -> ira -> ira
{-|
Return true if and only if the two intervals have equal endpoints.
-}
equalIntervals ::
(ERIntApprox ira) => ira -> ira -> Bool
equalIntervals d1 d2 =
d1L == d2L && d1U == d2U
where
(==) = eqSingletons
(d1L, d1U) = bounds d1
(d2L, d2U) = bounds d2
{-|
Split an interval to a sequence of intervals whose union is the
original interval using a given sequence of cut points.
The cut points are expected to be in increasing order and contained
in the given interval. Violations of this rule are tolerated.
-}
splitIRA ::
(ERIntApprox ira) =>
ira {-^ an interval to be split -} ->
[ira] {-^ approximations of the cut points in increasing order -} ->
[ira]
splitIRA interval splitPoints =
doSplit [] end pointsRev
where
(start, end) = bounds interval
pointsRev = reverse $ start : splitPoints
doSplit previousSegments nextRight [] = previousSegments
doSplit previousSegments nextRight (nextLeft : otherPoints) =
doSplit (nextLeft \/ nextRight : previousSegments) nextLeft otherPoints
{-|
* Return the endpoints of the interval as well as the exact midpoint.
* To be able to do this, there may be a need to increase granularity.
* All three singleton intervals are set to the same new granularity.
-}
exactMiddle ::
(ERIntApprox ira) =>
ira ->
(ira,ira,ira,Granularity)
exactMiddle dom =
case isExact domM of
True ->
(domL, domM, domR, gran)
False ->
(domLhg, domMhg, domRhg, higherGran)
where
(domL, domR) = bounds dom
gran = max (getGranularity domL) (getGranularity domR)
domM = (domL + domR) / 2
higherGran = gran + 1
domLhg = setMinGranularityOuter higherGran domL
domRhg = setMinGranularityOuter higherGran domR
domMhg = (domLhg + domRhg) / 2
{-|
This produces a function that computes the maximal extension of the
given function. A maximal extension function has the property:
f(I) = { f(x) | x in I }. Here we get this property only for the
limit function for its 'EffortIndex' tending to infinity.
For finite effor indices the function may add *outer* rounding
but it should be reasonably small.
-}
maxExtensionR2R ::
(ERIntApprox ira) =>
(EffortIndex -> ira -> [ira])
{-^ returns an *outer* approximation of all extrema within the interval -} ->
(EffortIndex -> ira -> ira)
{-^ an *outer* rounding function behaving well on sequences that intersect to a point -} ->
(EffortIndex -> ira -> ira)
{- ^ an outer rounding function behaving well on sequences that intersect to a non-empty interval -}
maxExtensionR2R getExtremes f ix x
| not $ isConsistent x =
toggleConsistency $
maxExtensionInnerR2R getExtremes f ix $ toggleConsistency x
| getPrecision x < effIx2prec ix =
foldl1 (\/) $ [f ix xL, f ix xR] ++ (getExtremes ix x)
-- x is thin enough (?), don't bother evaluating by endpoints and extrema:
| otherwise =
f ix x
where
(xL, xR) = bounds x
{-|
This produces a function that computes the maximal extension of the
given function. A maximal extension function has the property:
f(I) = { f(x) | x in I }. Here we get this property only for the
limit function for its 'EffortIndex' tending to infinity.
For finite effor indices the function may include *inner* rounding
but it should be reasonably small.
-}
maxExtensionInnerR2R ::
(ERIntApprox ira) =>
(EffortIndex -> ira -> [ira])
{-^ returns an *outer* approximation of all extrema within the interval -} ->
(EffortIndex -> ira -> ira)
{-^ an *outer* rounding function behaving well on sequences that intersect to a point -} ->
(EffortIndex -> ira -> ira)
{- ^ an inner rounding function behaving well on sequences that intersect to a non-empty interval -}
maxExtensionInnerR2R getExtremes f ix x
| not $ isConsistent x =
toggleConsistency $
maxExtensionR2R getExtremes f ix $ toggleConsistency x
| otherwise =
foldl1 (\/) $ map toggleConsistency $ [f ix xL, f ix xR] ++ (getExtremes ix x)
where
(xL, xR) = bounds x
{-|
A type whose elements are thought of as sets of approximations of real numbers.
Eg intervals of intervals, eg [[0,3],[1,2]] containing all intervals
whose left endpoint is between 0 and 1 and the right endpoint is between 2 and 3.
The upper bound interval can sometimes be anti-consistent,
eg [[0,3],[2,1]] containing all intervals (consistent as well as anti-consistent)
with a left endpoint between [0,2] and the right endpoint between [1,3].
-}
class ERApproxApprox xra
where
safeIncludes :: xra -> xra -> Bool
-- ^ safe inclusion of approximations
safeNotIncludes :: xra -> xra -> Bool
-- ^ safe negation of inclusion of approximations
includes :: xra -> xra -> Maybe Bool
-- ^ like 'safeIncludes' but usable for types where 'safeIncludes' is only partially decidable
includes aa1 aa2
| safeIncludes aa1 aa2 = Just True
| safeNotIncludes aa1 aa2 = Just False
| otherwise = Nothing
class ERIntApproxApprox xira
where
oiBounds :: xira -> ((xira, xira), (xira, xira))
fromOIBounds :: ((xira, xira), (xira, xira)) -> xira
-- safeIncludesMeasure :: xra -> xra -> Double
-- {- ^ positive result means inclusion satisfied,
-- the more negative the result is, the more likely it is
-- the inclusion is not satisfied -}
-- safeNotIncludesMeasure :: xra -> xra -> Double
-- includesMeasure :: xra -> xra -> (Maybe Bool, Double)
-- -- ^ when not decided, the higher the number, the larger the overlap that makes it impossible to decide
-- includesMeasure aa1 aa2
-- | includesMeasure >= 0 = Just True
-- | safeNotIncludes aa1 aa2 = Just False
-- | otherwise = Nothing
-- where
-- includeMeasure = safeIncludesMeasure aa1 aa2
-- excludeMeasure = safeNotIncludesMeasure aa1 aa2
|
def serialize_dependency_parse_tree(sentence_id, parse_trees, state="raw", pretty=False, dump=True):
options = {"ensure_ascii": False}
if type(parse_trees) != dict:
if len(parse_trees) == 0:
empty_tree = {
sentence_id: {
"meta": {
"id": sentence_id,
"state": state,
"type": "sentence"
},
"data": {
"root": None,
"nodes": {}
}
}
}
if dump:
return json.dumps(empty_tree, **options)
return empty_tree
parse_tree = vars([tree for tree in parse_trees][0])
else:
parse_tree = parse_trees
simplified_tree = {
"root": parse_tree["root"]["address"],
"nodes": {
int(number): filter_dict(node, DEPENDENCY_TREE_KEEP_FIELDS)
for number, node in parse_tree["nodes"].items()
}
}
if pretty:
options["indent"] = 4
serialized_dependency_parse_tree = {
sentence_id: {
"meta": {
"id": sentence_id,
"state": state,
"type": "sentence"
},
"data": simplified_tree
}
}
if dump:
return json.dumps(serialized_dependency_parse_tree, **options)
return serialized_dependency_parse_tree |
<reponame>thedigitaldesign/react-redux-experiments
import React from 'react'
// Packages
import { shallow } from 'enzyme'
// Utils
import { findByTestAttribute, storeFactory } from '../../test/testUtils'
// Component
import { User } from './User'
const setup = (initialState: any = {}) => {
const store = storeFactory(initialState)
const wrapper = shallow(<User store={store} />)
console.log('testing-----')
console.log(wrapper)
return wrapper
}
describe('Components > User', () => {
const foo = setup()
it('should ', () => {
});
// const user = shallow(<User />)
// it('should render correctly', () => {
// expect(user).toMatchSnapshot()
// })
// describe('when typing into the user input', () => {
// const username = '<NAME>'
// beforeEach(() => {
// user.find('.user-input').simulate('change', { target: { value: username } })
// })
// })
})
|
Do the chain rules for matrix functions hold without commutativity?
This article shows that the commutativity condition = 0 is often not necessary to guarantee the chain rules for matrix functions: and where A(t) is a square matrix of differentiable functions and f is an analytic function. A further question on the chain rules is presented and discussed. |
import os
import json
import datetime
from .log import PypeLogger
log = PypeLogger().get_logger(__name__)
# Prepare json decode error for `json` module (Maya, Nuke)
if hasattr(json.decoder, "JSONDecodeError"):
# JSONDecodeError is defined since Python 3.5
JsonError = json.decoder.JSONDecodeError
else:
JsonError = ValueError
# Keep track of project from 'get_presets' last call
global_last_project = None
def get_datetime_data(datetime_obj=None):
"""Returns current datetime data as dictionary.
:param datetime_obj: may return data for specific datetime object
:type datetime_obj: datetime, optional
:return: prepared date & time data
:rtype: dict
Available keys:
"d" - <Day of month number> in shortest possible way.
"dd" - <Day of month number> with 2 digits.
"ddd" - <Week day name> shortened week day. e.g.: `Mon`, ...
"dddd" - <Week day name> full name of week day. e.g.: `Monday`, ...
"m" - <Month number> in shortest possible way. e.g.: `1` if January
"mm" - <Month number> with 2 digits.
"mmm" - <Month name> shortened month name. e.g.: `Jan`, ...
"mmmm" - <Month name> full month name. e.g.: `January`, ...
"yy" - <Year number> shortened year. e.g.: `19`, `20`, ...
"yyyy" - <Year number> full year. e.g.: `2019`, `2020`, ...
"H" - <Hours number 24-hour> shortened hours.
"HH" - <Hours number 24-hour> with 2 digits.
"h" - <Hours number 12-hour> shortened hours.
"hh" - <Hours number 12-hour> with 2 digits.
"ht" - <Midday type> AM or PM.
"M" - <Minutes number> shortened minutes.
"MM" - <Minutes number> with 2 digits.
"S" - <Seconds number> shortened seconds.
"SS" - <Seconds number> with 2 digits.
"""
if not datetime_obj:
datetime_obj = datetime.datetime.now()
year = datetime_obj.strftime("%Y")
month = datetime_obj.strftime("%m")
month_name_full = datetime_obj.strftime("%B")
month_name_short = datetime_obj.strftime("%b")
day = datetime_obj.strftime("%d")
weekday_full = datetime_obj.strftime("%A")
weekday_short = datetime_obj.strftime("%a")
hours = datetime_obj.strftime("%H")
hours_midday = datetime_obj.strftime("%I")
hour_midday_type = datetime_obj.strftime("%p")
minutes = datetime_obj.strftime("%M")
seconds = datetime_obj.strftime("%S")
return {
"d": str(int(day)),
"dd": str(day),
"ddd": weekday_short,
"dddd": weekday_full,
"m": str(int(month)),
"mm": str(month),
"mmm": month_name_short,
"mmmm": month_name_full,
"yy": str(year[2:]),
"yyyy": str(year),
"H": str(int(hours)),
"HH": str(hours),
"h": str(int(hours_midday)),
"hh": str(hours_midday),
"ht": hour_midday_type,
"M": str(int(minutes)),
"MM": str(minutes),
"S": str(int(seconds)),
"SS": str(seconds),
}
def load_json(fpath, report_errors=True):
# Load json data
with open(fpath, "r") as opened_file:
lines = opened_file.read().splitlines()
# prepare json string
standard_json = ""
for line in lines:
# Remove all whitespace on both sides
line = line.strip()
# Skip blank lines
if len(line) == 0:
continue
standard_json += line
# Check if has extra commas
extra_comma = False
if ",]" in standard_json or ",}" in standard_json:
extra_comma = True
standard_json = standard_json.replace(",]", "]")
standard_json = standard_json.replace(",}", "}")
if extra_comma and report_errors:
log.error("Extra comma in json file: \"{}\"".format(fpath))
# return empty dict if file is empty
if standard_json == "":
if report_errors:
log.error("Empty json file: \"{}\"".format(fpath))
return {}
# Try to parse string
try:
return json.loads(standard_json)
except JsonError:
# Return empty dict if it is first time that decode error happened
if not report_errors:
return {}
# Repreduce the exact same exception but traceback contains better
# information about position of error in the loaded json
try:
with open(fpath, "r") as opened_file:
json.load(opened_file)
except JsonError:
log.warning(
"File has invalid json format \"{}\"".format(fpath),
exc_info=True
)
return {}
def collect_json_from_path(input_path, report_errors=True):
r""" Json collector
iterate through all subfolders and json files in *input_path*
Example:
``{input_path}/path/to/file.json`` will return dictionary
.. code-block:: none
{'path':
{'to':
{'file': {file.json data}
}
}
"""
output = None
if os.path.isdir(input_path):
output = {}
for file in os.listdir(input_path):
full_path = os.path.sep.join([input_path, file])
if os.path.isdir(full_path):
loaded = collect_json_from_path(full_path, report_errors)
if loaded:
output[file] = loaded
else:
basename, ext = os.path.splitext(os.path.basename(file))
if ext == '.json':
output[basename] = load_json(full_path, report_errors)
else:
basename, ext = os.path.splitext(os.path.basename(input_path))
if ext == '.json':
output = load_json(input_path, report_errors)
return output
def get_presets(project=None, first_run=False):
""" Loads preset files with usage of 'collect_json_from_path'
Default preset path is set to: ``{PYPE_CONFIG}/presets``
Project preset path is set to: ``{PYPE_PROJECT_CONFIGS}/*project_name*``
- environment variable **PYPE_STUDIO_CONFIG** is required
- **PYPE_STUDIO_CONFIGS** only if want to use overrides per project
Returns:
- None
- if default path does not exist
- default presets (dict)
- if project_name is not set
- if project's presets folder does not exist
- project presets (dict)
- if project_name is set and include override data
"""
# config_path should be set from environments?
config_path = os.path.normpath(os.environ['PYPE_CONFIG'])
preset_items = [config_path, 'presets']
config_path = os.path.sep.join(preset_items)
if not os.path.isdir(config_path):
log.error('Preset path was not found: "{}"'.format(config_path))
return None
default_data = collect_json_from_path(config_path, first_run)
if not project:
project = os.environ.get('AVALON_PROJECT', None)
global global_last_project
if first_run:
global_last_project = None
if not project:
return default_data
project_configs_path = os.environ.get('PYPE_PROJECT_CONFIGS')
if not project_configs_path:
return default_data
project_configs_path = os.path.normpath(project_configs_path)
project_config_items = [project_configs_path, project, 'presets']
project_config_path = os.path.sep.join(project_config_items)
if not os.path.isdir(project_config_path):
log.warning('Preset path for project {} not found: "{}"'.format(
project, project_config_path
))
return default_data
if project != global_last_project:
first_run = True
global_last_project = project
project_data = collect_json_from_path(project_config_path, first_run)
return update_dict(default_data, project_data)
def get_init_presets(project=None):
""" Loads content of presets like get_presets() but also evaluate init.json ponter to default presets
Returns:
- None
- if default path does not exist
- default presets (dict)
- if project_name is not set
- if project's presets folder does not exist
- project presets (dict)
- if project_name is set and include override data
"""
presets = get_presets(project)
try:
# try if it is not in projects custom directory
# `{PYPE_PROJECT_CONFIGS}/[PROJECT_NAME]/init.json`
# init.json define preset names to be used
p_init = presets["init"]
presets["colorspace"] = presets["colorspace"][p_init["colorspace"]]
presets["dataflow"] = presets["dataflow"][p_init["dataflow"]]
except KeyError:
log.warning("No projects custom preset available...")
presets["colorspace"] = presets["colorspace"]["default"]
presets["dataflow"] = presets["dataflow"]["default"]
log.info("Presets `colorspace` and `dataflow` loaded from `default`...")
return presets
def update_dict(main_dict, enhance_dict):
""" Merges dictionaries by keys.
Function call itself if value on key is again dictionary
.. note:: does not overrides whole value on first found key
but only values differences from enhance_dict
"""
for key, value in enhance_dict.items():
if key not in main_dict:
main_dict[key] = value
elif isinstance(value, dict) and isinstance(main_dict[key], dict):
main_dict[key] = update_dict(main_dict[key], value)
else:
main_dict[key] = value
return main_dict
|
/*********************************************************************
* FUNCTION: reportAnEvent(DWORD dwIdEvent, WORD cStrings, *
* LPTSTR *ppszStrings); *
* *
* PURPOSE: add the event to the event log *
* *
* INPUT: the event ID to report in the log, the number of insert *
* strings, and an array of null-terminated insert strings *
* *
* RETURNS: none *
*********************************************************************/
void reportAnIEvent(DWORD dwIdEvent, WORD cStrings, LPTSTR *pszStrings)
{
HANDLE hAppLog;
BOOL bSuccess;
hAppLog = RegisterEventSource(NULL,
"NTP");
PERR(hAppLog, "RegisterEventSource");
bSuccess = ReportEvent(hAppLog,
EVENTLOG_INFORMATION_TYPE,
0,
dwIdEvent,
NULL,
cStrings,
0,
pszStrings,
NULL);
PERR(bSuccess, "ReportEvent");
DeregisterEventSource(hAppLog);
return;
} |
def initializeHeartValveLib(self):
moduleDir = os.path.dirname(self.parent.path)
usPresetsScenePath = os.path.join(moduleDir, 'Resources/VrPresets', 'US-VrPresets.mrml')
HeartValveLib.setup(usPresetsScenePath) |
"""
https://codeforces.com/problemset/problem/996/A
"""
m = int(input())
bills = 0
while m > 0:
if m >= 1000:
m -= 1000
bills += 9
elif m >= 100:
m -= 100
elif m >= 20:
m -= 20
elif m >= 10:
m -= 10
elif m >= 5:
m -= 5
else:
m-= 1
bills += 1
print(bills) |
package com.cjburkey.freeboi.block;
import com.cjburkey.freeboi.value.Pos;
import com.cjburkey.freeboi.world.Chunk;
public final class BlockState {
public final BlockType blockType;
public final Chunk chunk;
public final boolean air;
public final int posInChunkX;
public final int posInChunkY;
public final int posInChunkZ;
private Pos blockPosInChunk;
public BlockState(BlockType blockType, Chunk chunk, int posInChunkX, int posInChunkY, int posInChunkZ) {
this.blockType = blockType;
air = blockType == null;
this.chunk = chunk;
this.posInChunkX = posInChunkX;
this.posInChunkY = posInChunkY;
this.posInChunkZ = posInChunkZ;
}
public boolean getIsTransparent() {
return air || blockType.getIsTransparent();
}
public Pos getPosInChunk() {
if (blockPosInChunk == null) {
blockPosInChunk = new Pos(posInChunkX, posInChunkY, posInChunkZ);
}
return blockPosInChunk;
}
public Pos getWorldPos() {
return chunk.getChunkWorldPos().add(posInChunkX, posInChunkY, posInChunkZ);
}
}
|
<filename>Lab2/Lab2.c<gh_stars>0
#include <stdio.h>
#include <stdlib.h>
struct node{
int data;
struct node *next;
};
void printLinkedList(struct node *head){
struct node *cur = head;
while(cur->next != NULL){
printf("%d ", cur->data);
cur = cur->next;
}
printf("%d\n", cur->data);
}
int printListRec(struct node *head){
struct node *cur = head;
printf("%d ", cur->data);
if(cur->next == NULL){
printf("\n");
return 0;
}
cur = cur->next;
printListRec(cur);
}
struct node2{
void *p_data; //a pointer to data (allocated with malloc)
int type; // 0 if int, 1 if float, 2 if double
struct node *next;
};
int print2(struct node2 *head){
struct node2 *cur = head;
if(cur->type == 0){
int *value = cur->p_data;
printf("%d ", *value);}
else if(cur->type == 1){
float *value2 = cur->p_data;
printf("%f ", *value2);}
else if(cur->type == 2){
double *value3 = cur->p_data;
printf("%f ", *value3);
}
if(cur->next == NULL){
printf("\n");
return 0;
}
cur = cur->next;
print2(cur);
}
void appendInt(struct node2 *head, int *p_value){
//int *p_value = (int *)malloc(sizeof(int));
struct node2 *cur = head;
while(cur->next != NULL){
cur = cur->next;
}
//cur->next is now equal to null
struct node2 *node = (struct node2 *)malloc(sizeof(struct node2));
cur->next = node;
node->p_data = p_value;
node->type = 0;
node->next = NULL;
}
void appendFloat(struct node2 *head, float *p_value){
struct node2 *cur = head;
while(cur->next != NULL){
cur = cur->next;
}
//cur->next is now equal to null
struct node2 *node = (struct node2 *)malloc(sizeof(struct node2));
cur->next = node;
node->p_data = p_value;
node->type = 1;
node->next = NULL;
}
void appendDouble(struct node2 *head, double *p_value){
struct node2 *cur = head;
while(cur->next != NULL){
cur = cur->next;
}
//cur->next is now equal to null
struct node2 *node = (struct node2 *)malloc(sizeof(struct node2));
cur->next = node;
node->p_data = p_value;
node->type = 2;
node->next = NULL;
}
int main(){
struct node *node0 = (struct node *)malloc(sizeof(struct node));
node0->data = 3;
struct node *node1 = (struct node *)malloc(sizeof(struct node));
node0->next = node1;
node1->data = 5;
struct node *node2 = (struct node *)malloc(sizeof(struct node));
node1->next = node2;
node2->data = 43;
node2->next = NULL;
printLinkedList(node0);
printListRec(node0);
struct node2 *node00 = (struct node2 *)malloc(sizeof(struct node2));
node00->type = 0;
int a = 3;
node00->p_data = (int *)&a;
struct node2 *node01 = (struct node2 *)malloc(sizeof(struct node2));
node00->next = node01;
float b = 5;
node01->type = 1;
node01->p_data = (int *)&b;
struct node2 *node02 = (struct node2 *)malloc(sizeof(struct node2));
node01->next = node02;
double c = 2.7;
node02->type = 2;
node02->p_data = (double *)&c;
node02->next = NULL;
print2(node00);
int value = 6;
appendInt(node00, &value);
float value2 = 7;
appendFloat(node00, &value2);
double value3 = 4.343;
appendDouble(node00, &value3);
print2(node00);
//print2(node0);
return 0;
}
|
<reponame>republicprotocol/store<gh_stars>10-100
// Package kv defines a standard interface for key-value storage and iteration.
// It supports persistent storage using LevelDB and BadgerDB. It also supports
// non-persistent storage using concurrent-safe in-memory maps.
package kv
import (
"github.com/renproject/kv/badgerdb"
"github.com/renproject/kv/cache/lru"
"github.com/renproject/kv/cache/ttl"
"github.com/renproject/kv/codec"
"github.com/renproject/kv/db"
"github.com/renproject/kv/leveldb"
"github.com/renproject/kv/memdb"
)
var (
// ErrKeyNotFound is returned when there is no value associated with a key.
ErrKeyNotFound = db.ErrKeyNotFound
// ErrEmptyKey is returned when a key is the empty string.
ErrEmptyKey = db.ErrEmptyKey
// ErrIndexOutOfRange is returned when the iterator index is less than zero,
// or, greater than or equal to the size of the iterator.
ErrIndexOutOfRange = db.ErrIndexOutOfRange
)
type (
// A Table is an abstraction over the DB that partitions key/value pairs. The
// Table name must be unique compared to other Table names.
Table = db.Table
// A DB is a low-level interface for storing and iterating over key/value
// pairs.
DB = db.DB
// A Codec defines an encoding/decoding between values and bytes.
Codec = db.Codec
// An Iterator is used to lazily iterate over key/value pairs.
Iterator = db.Iterator
)
// Codecs
var (
// BinaryCodec is a binary codec that marshals and unmarshals values using
// the standard Golang Binary marshalers. For more information, see
// https://golang.org/pkg/encoding.
BinaryCodec = codec.BinaryCodec
// JSONCodec is a json codec that marshals and unmarshals values using the
// standard Golang JSON marshalers. For more information, see
// https://golang.org/pkg/encoding/json.
JSONCodec = codec.JSONCodec
// GobCodec is a gob codec that encodes and decodes values using gob. For
// more information, see https://golang.org/pkg/encoding/gob.
GobCodec = codec.GobCodec
)
var (
// NewMemDB returns a key-value database that is implemented in-memory. This
// implementation is fast, but does not store data on-disk. It is safe for
// concurrent use.
NewMemDB = memdb.New
// NewBadgerDB returns a key-value database that is implemented using
// BadgerDB. For more information, see https://github.com/dgraph-io/badger.
NewBadgerDB = badgerdb.New
// NewLevelDB returns a key-value database that is implemented using
// levelDB. For more information, see https://github.com/syndtr/goleveldb.
NewLevelDB = leveldb.New
// NewTable returns a new table basing on the given DB and codec.
NewTable = db.NewTable
)
var (
// NewLRUTable wraps a given Table and creates a Table which has lru cache.
NewLRUTable = lru.NewLruTable
// NewTTLCache wraps a given DB and creates a time-to-live DB. It will
// automatically prune the data in the db until the context expires.
NewTTLCache = ttl.New
)
|
/*
* The locked version of hpcrun_addr_to_interval(). Lookup the PC
* address in the interval tree and return a pointer to the interval
* containing that address.
*
* Returns: pointer to unwind_interval struct if found, else NULL.
* The caller must hold the ui-tree lock.
*/
splay_interval_t *
hpcrun_addr_to_interval_locked(void *addr)
{
void *fcn_start, *fcn_end;
load_module_t *lm;
interval_status istat;
interval_tree_node *p, *q;
splay_interval_t *ans;
p = interval_tree_lookup(&ui_tree_root, addr);
if (p != NULL) {
TMSG(UITREE_LOOKUP, "found in unwind tree: addr %p", addr);
return (splay_interval_t *)p;
}
UI_TREE_UNLOCK;
bool ret = fnbounds_enclosing_addr(addr, &fcn_start, &fcn_end, &lm);
UI_TREE_LOCK;
if (! ret) {
TMSG(UITREE, "BAD fnbounds_enclosing_addr failed: addr %p", addr);
return (NULL);
}
if (addr < fcn_start || fcn_end <= addr) {
TMSG(UITREE, "BAD fnbounds_enclosing_addr failed: addr %p "
"not within fcn range %p to %p", addr, fcn_start, fcn_end);
return (NULL);
}
istat = build_intervals(fcn_start, fcn_end - fcn_start);
if (istat.first == NULL) {
TMSG(UITREE, "BAD build_intervals failed: fcn range %p to %p",
fcn_start, fcn_end);
return (NULL);
}
TMSG(UITREE, "begin unwind insert addr %p, fcn range %p to %p",
addr, fcn_start, fcn_end);
if (ENABLED(UITREE_VERIFY)) {
interval_tree_verify(ui_tree_root, "UITREE");
}
ans = NULL;
for (p = istat.first; p != NULL; p = q)
{
q = RIGHT(p);
if (START(p) >= END(p)) {
TMSG(UITREE, "BAD unwind interval [%p, %p) reverse order",
START(p), END(p));
}
else if (START(p) < fcn_start || fcn_end < END(p)) {
TMSG(UITREE, "BAD unwind interval [%p, %p) not within fcn range",
START(p), END(p));
free_ui_node_locked(p);
}
else if (interval_tree_insert(&ui_tree_root, p) != 0) {
TMSG(UITREE, "BAD unwind interval [%p, %p) insert failed",
START(p), END(p));
free_ui_node_locked(p);
}
else {
TMSG(UITREE, "unwind insert [%p, %p)", START(p), END(p));
}
if (START(p) <= addr && addr < END(p)) {
ans = p;
}
}
if (ans) {
ans->lm = lm;
}
if (ENABLED(UITREE_VERIFY)) {
interval_tree_verify(ui_tree_root, "UITREE");
}
TMSG(UITREE_LOOKUP, "unwind lookup, addr = %p, ans = %p", addr, ans);
return (ans);
} |
<gh_stars>0
#include <fileaccess/file_blockdev.h>
#include <blockdevs/parttable_readers.h>
#include <blockdevs/offset_blockdev.h>
#include <filesystems/filesystem.h>
#include <iostream>
#include <vector>
#include <string>
#include "ls.h"
#include "cat.h"
int blockdevmain(std::shared_ptr<blockdev> bdev, std::string fsname, std::vector<std::string>::iterator &args, const std::vector<std::string>::iterator &args_end) {
std::shared_ptr<filesystem> fs = open_filesystem(fsname, bdev);
if (!fs) {
std::cerr << "Failed to open filesystem " << fsname << "\n";
return 1;
}
std::shared_ptr<directory> rootdir = fs->GetRootDirectory(fs);
if (!rootdir) {
std::cerr << "Failed to open filesystem root directory " << fsname << "\n";
return 1;
}
if (args != args_end) {
auto cmd = *args;
++args;
if (cmd == "ls") {
return ls(rootdir, args, args_end);
} else if (cmd == "cat") {
return cat(rootdir, args, args_end);
} else {
std::cerr << "Invalid command: " << cmd << "\n";
return 1;
}
}
for (auto entry : rootdir->Entries()) {
auto item = entry->Item();
std::cout << std::oct << item->Mode() << std::dec << " " << item->Size() << " " << entry->Name() << "\n";
}
return 0;
}
int cppmain(std::vector<std::string> args) {
auto iterator = args.begin();
if (iterator == args.end()) {
return 0;
}
std::string cmd = *iterator;
if (++iterator == args.end()) {
return 0;
}
std::string blockdev_name = *iterator;
if (++iterator == args.end()) {
return 0;
}
std::size_t blocksize{0};
{
std::string blksz = *iterator;
char *endp;
blocksize = strtol(blksz.c_str(), &endp, 10);
if (*endp != '\0') {
std::cerr << "Invalud blocksize: " << blksz << "\n";
return 1;
}
}
std::shared_ptr<file_blockdev> fblockdev{new file_blockdev(blockdev_name, blocksize)};
std::cout << blockdev_name << " blocksize " << blocksize << " - partitions:\n";
parttable_readers parttableReaders{};
auto parttable = parttableReaders.ReadParttable(fblockdev);
if (parttable) {
std::cout << "Partition table of type " << parttable->GetTableType() << ":\n";
for (auto entry: parttable->GetEntries()) {
std::cout << " " << entry->GetOffset() << ", " << entry->GetSize() << ", " << entry->GetType() << "\n";
}
}
if (++iterator == args.end()) {
return 0;
}
std::shared_ptr<blockdev> fs_blockdev = fblockdev;
if (parttable) {
std::string i_part = *iterator;
char *endp;
int part_idx = strtol(i_part.c_str(), &endp, 10);
if (*endp != '\0' || part_idx < 0 || part_idx >= parttable->GetEntries().size()) {
std::cerr << "Invalud partition index: " << i_part << "\n";
return 1;
}
if (++iterator == args.end()) {
return 0;
}
auto part_entry = parttable->GetEntries()[part_idx];
std::shared_ptr<blockdev> partdev{new offset_blockdev(fblockdev, part_entry->GetOffset(), part_entry->GetSize())};
fs_blockdev = partdev;
}
std::string fs_name = *iterator;
{
bool found = false;
auto filesystems = get_filesystem_providers();
for (auto provider: filesystems) {
if (fs_name == provider) {
found = true;
break;
}
}
if (!found) {
std::cerr << fs_name << " is not a valid filesystem name. Available filesystems:\n";
for (auto provider: filesystems) {
std::cerr << " - " << provider << "\n";
}
return 1;
}
}
++iterator;
return blockdevmain(fs_blockdev, fs_name, iterator, args.end());
}
int main(int argc, char **argv) {
std::vector<std::string> args{};
for (int i = 0; i < argc; i++) {
args.push_back(std::string(argv[i]));
}
init_filesystem_providers();
register_filesystem_providers();
return cppmain(args);
}
|
#ifndef RingBufferHPP
#define RingBufferHPP
#include <Tools/RingBuffer.h>
template <typename DataType, uint16_t bufferSize>
RingBuffer<DataType, bufferSize>::RingBuffer()
: values()
, currentIndex(0)
, filled(false)
{
static_assert(std::is_arithmetic<DataType>::value, "type must be an numeric type");
}
template <typename DataType, uint16_t bufferSize>
RingBuffer<DataType, bufferSize>& RingBuffer<DataType, bufferSize>::operator+=(DataType value)
{
add(value);
return *this;
}
template <typename DataType, uint16_t bufferSize>
RingBuffer<DataType, bufferSize>::operator DataType() const
{
return average();
}
template <typename DataType, uint16_t bufferSize>
DataType RingBuffer<DataType, bufferSize>::add(DataType value)
{
const DataType oldValue = values[currentIndex];
values[currentIndex] = value;
currentIndex++;
if (bufferSize <= currentIndex)
{
currentIndex = 0;
if (!filled)
filled = true;
}
return oldValue;
}
template <typename DataType, uint16_t bufferSize>
void RingBuffer<DataType, bufferSize>::clear()
{
currentIndex = 0;
filled = false;
}
template <typename DataType, uint16_t bufferSize>
DataType RingBuffer<DataType, bufferSize>::average() const
{
if (!filled && 0 == currentIndex)
return 0;
float sum = 0;
uint8_t maxIndex = filled ? bufferSize : currentIndex;
for (uint8_t index = 0; index < bufferSize; index++)
sum += values[index];
const float fAverage = sum / static_cast<float>(maxIndex);
return static_cast<DataType>(fAverage);
}
#endif // RingBufferHPP
|
// NewChainFaucet creates a new faucet command to send coins to accounts.
func NewChainFaucet() *cobra.Command {
c := &cobra.Command{
Use: "faucet [address] [coin<,...>]",
Short: "Send coins to an account",
Args: cobra.ExactArgs(2),
RunE: chainFaucetHandler,
}
flagSetPath(c)
c.Flags().AddFlagSet(flagSetHome())
c.Flags().BoolP("verbose", "v", false, "Verbose output")
return c
} |
def derive_meme_path(meme_image, top, bottom, ext):
token = "%s|%s|%s" % (meme_image, top, bottom)
meme_id = md5(token.encode('utf-8')).hexdigest()
file_path = '%s.%s' % (meme_id, ext)
return MEME_PATH + file_path |
A Conservative cloak-and-dagger caper aimed at capturing a Liberal candidate in a verbal gaffe has taken another twist, with the governing party producing expert audio analysis to back up its version of who said what.
The Conservative party asked Edward J. Primeau, an audio forensic expert with 30 years experience, to analyze audio of a secretly recorded conversation involving Marlo Raynolds, the Liberal candidate in Banff-Airdrie.
In a report to the party, given to The Canadian Press, Primeau concludes that it was indeed Raynolds who said the Tory income-splitting plan would give couples with children money that they'd waste on television sets and cars, rather than caring for their kids.
Raynolds was flabbergasted Wednesday to learn the Conservatives are refusing to back down, even though another individual involved in the conversation, Tam McTavish, has said he's the one who uttered the offending remarks.
Sun Media has retracted the story on which the Tories based their attacks.
"It's quite astounding that the Conservatives continue to pursue this," Raynolds said in an interview.
He said he'll continue demanding an apology "until I clear my name because I know what I said and Tam knows what he said."
'This is getting silly'
For his part, McTavish said he'd be happy to provide the expert with another audio sample of his voice, repeating word for word what is said in the disputed audio.
"But this is getting silly. It really is as simple as everyone who knows me recognizes it (as me)," he said.
The audio — surreptitiously recorded by a woman identified as a Conservative operative who has conducted previous sting operations against Liberals — was given to Sun Media personality Brian Lilley.
He attributed the remarks to Raynolds in a column and on his television show last week. Sun Media issued a retraction Tuesday.
Four Conservative MPs, including Employment Minister Jason Kenney, have used the Sun story to attack Raynolds — and, by extension, all Liberals — for thinking they know better than parents how they should spend their money and care for their kids.
Since the retraction, they've simply doubled down on the matter.
"I think it's actually quite a remarkable situation when Sun News is demonstrating a higher level of ethical standards than the Conservative cabinet," Liberal Leader Justin Trudeau said Wednesday.
In the House of Commons, Liberal MP Kevin Lamoureux asked Kenney to show the same "standards of decency" as Sun Media and apologize.
Tory MPs refuse to apologize
Kenney was having none of it.
"That Liberal candidate (Raynolds), that Liberal member (Lamoureux) and every Liberal member opposes tax fairness for families," Kenney retorted, asserting that the Liberals are opposed to all the family-focused tax cuts the Conservative government has implemented.
"They want to take these benefits away from families, and they do so because they believe that Liberal politicians known better how to spend money than do moms and dads. We profoundly disagree on this side."
The Conservative MP for Banff-Airdrie, Blake Richards, also refused to apologize.
"Well, certainly the people who were at the meeting maintain that the statements were made by the Liberal candidate. So, you know, I would encourage people to just check the record for themselves," he said outside a Conservative caucus meeting.
The audio, recorded during a Nov. 13 public meeting in Canmore, Alta., is a little over two minutes long and of varying quality.
It is clear that at least two men and one woman are involved in the conversation about income splitting.
Expert '100 per cent certain'
'The relevant remarks are toward the end, by which time the audio is muffled and the voices partially obscured by background noise.
Lilley has told Raynolds the poor quality of the audio was due to the female Conservative supporter "zipping up her coat when she thought you were getting nervous that she might be recording you."
In his report to the Conservative party, Primeau says he's "100 per cent sure that the male at the beginning of the digital audio recording and the male speaking at the one minute and fifty-eight second mark, is the same person."
Comparing that male's voice to a YouTube video of Raynolds, Primeau says he's "75-80 per cent sure" that speaker is Raynolds.
Raynolds confirmed he's the one speaking at the start of the audio recording, expressing his opposition to income splitting.
But he and McTavish both say it's McTavish towards the end.
The speaker starts talking about "Planet Money," a National Public Radio podcast, just before the recording ends.
McTavish said that's one of his favourite shows. Raynolds said he'd never heard of the program until that moment. |
/**
* A new job scheduler is initialized for each test method. This method performs some set up that can be avoided for
* each test. The TempTableCreator job needs to run in order to create the raw, temp tables. It is run in this
* method, and the compression job is not run. This is done to avoid unnecessary work and to better facilitate
* testing things in isolation.
*/
@BeforeClass
public void initClass() throws Exception {
dataAccess = TestDataAccessFactory.newInstance(session, DateTimeService.now.get(), false);
configurationService = new ConfigurationService();
configurationService.init(rxSession);
metricsService = new MetricsServiceImpl();
metricsService.setDataAccess(dataAccess);
metricsService.setConfigurationService(configurationService);
metricsService.startUp(session, getKeyspace(), true, metricRegistry);
removeJobConfig(TempTableCreator.CONFIG_ID);
removeJobConfig(TempDataCompressor.CONFIG_ID);
saveJobConfigJobId(TempDataCompressor.CONFIG_ID, UUID.randomUUID().toString());
jobScheduler = new TestScheduler(rxSession);
jobScheduler.truncateTables(getKeyspace());
List<JobDetails> jobDetails = jobsManager.installJobs();
jobsService = new JobsServiceImpl();
jobsService.setSession(rxSession);
jobsService.setScheduler(jobScheduler);
jobsService.setMetricsService(metricsService);
jobsService.setConfigurationService(configurationService);
jobsService.start();
JobDetails tableCreator =
jobDetails.stream().filter(d -> d.getJobName().equalsIgnoreCase(TempTableCreator.JOB_NAME))
.findFirst().orElse(null);
assertNotNull(tableCreator);
CountDownLatch latch = new CountDownLatch(1);
jobScheduler.onJobFinished(details -> {
if (details.getJobName().equals(TempTableCreator.JOB_NAME)) {
latch.countDown();
}
});
jobScheduler.advanceTimeTo(tableCreator.getTrigger().getTriggerTime());
jobScheduler.advanceTimeBy(1);
assertTrue(latch.await(TIMEOUT, TimeUnit.SECONDS));
removeJobConfig(TempDataCompressor.CONFIG_ID);
jobScheduler.shutdown();
} |
<filename>src/subscription-token.test.ts
import assert = require('assert')
import { ISubscriptionToken, subscriptionToken } from './subscription-token'
import { IObservers } from './observers'
import { TNotifyCallback } from './notify-callback'
function getObservers(): IObservers {
return {
0: () => null,
1: () => null,
2: () => null
}
}
describe('subscriptionToken', function () {
let observers: IObservers
beforeEach(function () {
observers = getObservers()
})
it('returns an ISubscriptionToken', function () {
const token: ISubscriptionToken = subscriptionToken(observers, 0)
assert.equal(Object.keys(token).length, 2, 'token has two keys')
assert.equal(token.id, 0, 'token has id property')
assert.equal(typeof token.unsubscribe, 'function', 'token has unsubscribe function')
})
it('removes observers', function () {
Object.keys(observers).map((id) => {
return subscriptionToken(observers, +id)
}).forEach(function (token, index) {
assert.equal(typeof observers[token.id], 'function', 'observer is defined')
assert.equal(observers[token.id](null, index), null, 'function returns null')
token.unsubscribe()
assert.equal(typeof observers[token.id], 'undefined', 'observer is undefined')
})
assert.equal(Object.keys(observers).length, 0, 'all observers have been removed')
})
})
|
package gameserver
import (
"fmt"
"net"
)
//ServerCore : Server yığını
type ServerCore struct {
ListenerSocket net.Listener // Dinlenen soket
shutdownSignal chan bool // Kapatma sinyali
runing bool // Server calıştı bilgisi
settings ServerSettings // Server ayarları
}
//NewInstance : bir server nesnesi oluşturup geri döner.
func NewInstance(address string) *ServerCore {
server := &ServerCore{
shutdownSignal: make(chan bool, 1),
}
server.settings.HostAddr = address
server.settings.Load() // Server ayarları yükleniyor
return server
}
// IsRun : serverın çalışıp çalışmadığı bilgisini döner.
func (server *ServerCore) IsRun() bool {
return server.runing
}
//Start : metodu serverı başlatır
func (server *ServerCore) Start() {
fmt.Println("Server Starting...")
server.listen() // Server ana döngüsü başlatılıyor
server.runing = false
}
// Shutdown : metodu serverı kapatır
func (server *ServerCore) Shutdown() {
server.ListenerSocket.Close()
server.shutdownSignal <- true
fmt.Println("Server Shutdown!")
}
func (server *ServerCore) listen() {
l, err := net.Listen("tcp", server.settings.HostAddr)
if err != nil {
fmt.Println("Error: ")
fmt.Println(err)
return
}
defer l.Close()
server.ListenerSocket = l
fmt.Printf("Socket is listening: %s \r\n", server.ListenerSocket.Addr())
server.runing = true
for {
select {
case <-server.shutdownSignal:
return
default:
conn, err := server.ListenerSocket.Accept()
if err != nil {
fmt.Println(err)
return
}
fmt.Println("Incoming connection request <--")
// Baglantı handle' a gönderiliyor.
go server.settings.handsaheker.HandShake(&conn)
}
}
}
|
<reponame>voduchau/kintone-ui-component
import { expect, fixture } from "@open-wc/testing";
import { MobileText } from "../index";
describe("MobileText", () => {
describe("textAlign", () => {
it("should be left when not set textAlign in constructor", async () => {
const container = new MobileText();
const el = await fixture(container);
const inputEl = el.querySelector(
".kuc-mobile-text__input-form__input"
) as HTMLInputElement;
expect(inputEl.getAttribute("textalign")).to.equal("left");
expect(window.getComputedStyle(inputEl).textAlign).to.equal("left");
});
it("should be right when set right in constructor", async () => {
const container = new MobileText({ textAlign: "right" });
const el = await fixture(container);
const inputEl = el.querySelector(
".kuc-mobile-text__input-form__input"
) as HTMLInputElement;
expect(inputEl.getAttribute("textalign")).to.equal("right");
expect(window.getComputedStyle(inputEl).textAlign).to.equal("right");
});
it("should be right when updating to right ", async () => {
const container = new MobileText();
container.textAlign = "right";
const el = await fixture(container);
const inputEl = el.querySelector(
".kuc-mobile-text__input-form__input"
) as HTMLInputElement;
expect(inputEl.getAttribute("textalign")).to.equal("right");
expect(window.getComputedStyle(inputEl).textAlign).to.equal("right");
});
it("should be right when replacing textAlign from left to right", async () => {
const container = new MobileText({ textAlign: "left" });
container.textAlign = "right";
const el = await fixture(container);
const inputEl = el.querySelector(
".kuc-mobile-text__input-form__input"
) as HTMLInputElement;
expect(inputEl.getAttribute("textalign")).to.equal("right");
expect(window.getComputedStyle(inputEl).textAlign).to.equal("right");
});
});
});
|
def readme(filename: str = 'README.md', encoding: str = DEFAULT_ENCODING) -> str:
readme: str = os.path.join(here, filename)
with io.open(readme, mode='r', encoding=encoding) as f:
try:
return '\n' + f.read()
except FileNotFoundError:
return __description__ |
{-# LANGUAGE NoStrict #-}
{-# LANGUAGE NoStrictData #-}
{-# LANGUAGE TypeInType #-}
{-# LANGUAGE UndecidableInstances #-}
module Data.Construction where
import Prelude
import qualified Data.Generics.Traversable as GTraversable
-- import qualified Foreign.Marshal.Alloc as Mem
import Data.Generics.Traversable (GTraversable)
import Data.Foldable (traverse_)
--------------------------
-- === Construction === --
--------------------------
-- === Definition === --
class Monad m => Constructor m args a where
construct :: args -> m a
class Monad m => Constructor1 m args a where
construct1 :: ∀ t1. args -> m (a t1)
class Monad m => Constructor2 m args a where
construct2 :: ∀ t1 t2. args -> m (a t1 t2)
type Constructor' m = Constructor m ()
type Constructor1' m = Constructor1 m ()
type Constructor2' m = Constructor2 m ()
-- === API === --
construct' :: ∀ a m. Constructor' m a => m a
construct1' :: ∀ a m t1. Constructor1' m a => m (a t1)
construct2' :: ∀ a m t1 t2. Constructor2' m a => m (a t1 t2)
construct' = construct () ; {-# INLINE construct' #-}
construct1' = construct1 () ; {-# INLINE construct1' #-}
construct2' = construct2 () ; {-# INLINE construct2' #-}
-- === Defaulting === --
instance {-# OVERLAPPABLE #-} (Monad m, Constructor1 m args a)
=> Constructor m args (a t) where
construct = construct1 ; {-# INLINE construct #-}
instance {-# OVERLAPPABLE #-} (Monad m, Constructor2 m args a)
=> Constructor1 m args (a t) where
construct1 = construct2 ; {-# INLINE construct1 #-}
-------------------------
-- === Destruction === --
-------------------------
-- === Definition === --
class Monad m => Destructor m a where destruct :: a -> m ()
class Monad m => Destructor1 m a where destruct1 :: ∀ t1. (a t1) -> m ()
class Monad m => Destructor2 m a where destruct2 :: ∀ t1 t2. (a t1 t2) -> m ()
-- === Defaulting === --
instance {-# OVERLAPPABLE #-} (Monad m, Destructor1 m a)
=> Destructor m (a t) where
destruct = destruct1 ; {-# INLINE destruct #-}
instance {-# OVERLAPPABLE #-} (Monad m, Destructor2 m a)
=> Destructor1 m (a t) where
destruct1 = destruct2 ; {-# INLINE destruct1 #-}
instance {-# OVERLAPPABLE #-} (Monad m, GTraversable (Destructor m) a)
=> Destructor m a where
destruct = GTraversable.gmapM_ @(Destructor m) destruct ; {-# INLINE destruct #-}
---------------------------------
-- === Shallow Destruction === --
---------------------------------
-- === Definition === --
class Monad m => ShallowDestructor m a where
destructShallow :: a -> m ()
class Monad m => ShallowDestructor1 m a where
destructShallow1 :: ∀ t1. (a t1) -> m ()
class Monad m => ShallowDestructor2 m a where
destructShallow2 :: ∀ t1 t2. (a t1 t2) -> m ()
-- === Defaulting === --
instance {-# OVERLAPPABLE #-} (Monad m, ShallowDestructor1 m a)
=> ShallowDestructor m (a t) where
destructShallow = destructShallow1 ; {-# INLINE destructShallow #-}
instance {-# OVERLAPPABLE #-} (Monad m, ShallowDestructor2 m a)
=> ShallowDestructor1 m (a t) where
destructShallow1 = destructShallow2 ; {-# INLINE destructShallow1 #-}
instance {-# OVERLAPPABLE #-} (Monad m, GTraversable (ShallowDestructor m) a)
=> ShallowDestructor m a where
destructShallow = GTraversable.gmapM_ @(ShallowDestructor m) destructShallow ; {-# INLINE destructShallow #-}
-- === Instances === --
-- FIXME[MK->WD]: This instance is equivalent to the GTraversable default above,
-- but can't be resolved due to instance resolution rules ((Maybe a) looks more like
-- (a t) in the ShallowDestructor1 default than a in the GTraversable one.
instance {-# OVERLAPPABLE #-} (Applicative m, ShallowDestructor m a)
=> ShallowDestructor m (Maybe a) where
destructShallow = traverse_ destructShallow ; {-# INLINE destructShallow #-}
instance {-# OVERLAPPABLE #-} (Applicative m, ShallowDestructor m a, ShallowDestructor m b)
=> ShallowDestructor m (a, b) where
destructShallow = \(a, b) -> destructShallow a >> destructShallow b ; {-# INLINE destructShallow #-}
-- -----------------------
-- -- === Allocator === --
-- -----------------------
-- -- === Definition === --
-- class Allocator t m a where
-- alloc :: Int -> m (Ptr a)
-- -- === Malloc === --
-- data Malloc
-- instance Allocator Malloc m a where
-- alloc = liftIO . Mem.malloc
-- {-# INLINE alloc #-}
-- -------------------------------
-- -- === Copy Construction === --
-- -------------------------------
-- -- === Definition === --
-- class CopyWith t m a where
-- copyWith :: a -> m a
-- -- === Malloc === --
-- type Copy = CopyWith Malloc
-- copy :: Copy m a => a -> m a
-- copy = copyWith @Malloc
-- {-# INLINE copy #-}
|
// -*- mode:C++; tab-width:8; c-basic-offset:2; indent-tabs-mode:t -*-
// vim: ts=8 sw=2 smarttab ft=cpp
/*
* Ceph - scalable distributed file system
*
* Copyright (C) 2019 SUSE LLC
*
* This is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License version 2.1, as published by the Free Software
* Foundation. See file COPYING.
*
*/
#pragma once
#include <include/types.h>
class XMLObj;
class PublicAccessBlockConfiguration {
bool BlockPublicAcls;
bool IgnorePublicAcls;
bool BlockPublicPolicy;
bool RestrictPublicBuckets;
public:
PublicAccessBlockConfiguration():
BlockPublicAcls(false), IgnorePublicAcls(false),
BlockPublicPolicy(false), RestrictPublicBuckets(false)
{}
auto block_public_acls() const {
return BlockPublicAcls;
}
auto ignore_public_acls() const {
return IgnorePublicAcls;
}
auto block_public_policy() const {
return BlockPublicPolicy;
}
auto restrict_public_buckets() const {
return RestrictPublicBuckets;
}
void encode(ceph::bufferlist& bl) const {
ENCODE_START(1,1, bl);
encode(BlockPublicAcls, bl);
encode(IgnorePublicAcls, bl);
encode(BlockPublicPolicy, bl);
encode(RestrictPublicBuckets, bl);
ENCODE_FINISH(bl);
}
void decode(ceph::bufferlist::const_iterator& bl) {
DECODE_START(1,bl);
decode(BlockPublicAcls, bl);
decode(IgnorePublicAcls, bl);
decode(BlockPublicPolicy, bl);
decode(RestrictPublicBuckets, bl);
DECODE_FINISH(bl);
}
void decode_xml(XMLObj *obj);
void dump_xml(Formatter *f) const;
};
WRITE_CLASS_ENCODER(PublicAccessBlockConfiguration)
std::ostream& operator<< (std::ostream& os, const PublicAccessBlockConfiguration& access_conf);
|
/**
* Create a playspace or retrieve an existing one. Add the client as a member.
* @param playspaceStaticId Playspace static id
* @param svrConn Playspace server connection
* @return Playspace compound id
*/
public ClientPlayspaceRuntime joinPlayspace(String playspaceStaticId, ServerConnection svrConn)
{
ClientPlayspaceRuntime playspace = null;
CompoundId playspaceId = new CompoundId();
playspaceId.setPlayspaceStaticId(playspaceStaticId);
playspace = getPlayspace (playspaceId);
if (playspace != null) {
playspaceId.setPlayspaceRuntimeId(playspace.getRuntimeId());
}
else
{
Vector v = RuntimeFunctionsClient.joinPlayspace(svrConn, playspaceStaticId);
if (v.size() == 2) {
playspaceId = new CompoundId((String) v.get(0));
String xmlContent = (String) v.get(1);
Element playspaceElement = XMLUtils.stringToXmlElement(xmlContent);
playspace = new ClientPlayspaceRuntime(playspaceId, svrConn, playspaceElement);
playspace.addPropertyChangeListener(new PlayspaceListener());
playspaceMap.put(playspaceId.getPlayspaceRuntimeId(), playspace);
playspaceMap.put(playspaceId.getPlayspaceStaticId(), playspace);
System.out.println("Joined playspace " + playspaceStaticId
+ " (runid=\"" + playspaceId.getPlayspaceRuntimeId() + "\")\n");
}
}
return playspace;
} |
# This script add glow versions of any item images that don't currently have glow versions.
# This script uses the "convert" command, which is part of ImageMagick:
# https://www.imagemagick.org/script/download.php
# You must use an old version of ImageMagick for the convert command to work properly.
# Version 6.9.3-7 Q16 x64 is confirmed to work
import os, shutil
def add_glow_to_dir(dirname):
glows = {}
for file in os.listdir(os.path.join(dirname, 'glow')):
if file.endswith('.png'):
glows[file] = True
for file in os.listdir(dirname):
if file not in glows:
if file.endswith('.png'):
file_path = os.path.join(dirname, file)
file_glow_path = os.path.join(dirname, 'glow', file)
cmd = 'convert "' + file_path + '" ' +\
'( +clone -channel A -blur 0x2.5 -level 0,80% +channel +level-colors white ) ' +\
'-compose DstOver ' +\
'-composite "' + file_glow_path + '"'
print(cmd)
os.system(cmd)
shutil.copy(file_glow_path, 'copy_of_new_glow_images/')
paths_to_add = [
os.path.join('..', 'collectibles'),
os.path.join('..', 'collectibles', 'antibirth'),
os.path.join('..', 'collectibles', 'custom'),
]
for path in paths_to_add:
print('Scanning directory: ' + path)
add_glow_to_dir(path)
|
main = do
(h,m) <- getLine >>= return . (\(x,y) -> (read x, read $ tail y)) . break (==':') :: IO (Int, Int)
(putStrLn . unwords . map show) [fromIntegral (h `mod` 12) * 360 / 12 + fromIntegral m / 2, fromIntegral m * 6]
|
/** returns a single result entity or null if not found */
public static <T> T queryZeroOne(Class<T> resultType, String queryStr, Object... params) {
EntityManager em = null;
try {
em = createEntityManager();
final TypedQuery<T> query = em.createQuery(queryStr, resultType);
if (params != null) {
for (int i = 0; i < params.length; i++) {
Object param = params[i];
query.setParameter(i + 1, param);
}
}
try {
return query.getSingleResult();
} catch (NoResultException x) {
return null;
}
} finally {
if (em != null) PersistUtilHelper.closeEntityManager();
}
} |
/**
* Created by sherxon on 1/8/17.
*/
public class ZigZagOrderLevelTraversalBST {
public List<List<Integer>> zigzagLevelOrder(TreeNode root) {
List<List<Integer>> list = new ArrayList<>();
if (root == null) return list;
List<Integer> l = new LinkedList<>();
l.add(root.val);
list.add(l);
LinkedList<TreeNode> q = new LinkedList<>();
int level = 1;
q.add(root);
boolean flag = false;
while (!q.isEmpty()) {
TreeNode x = q.removeFirst();
if (x.right != null) q.addLast(x.right);
if (x.left != null) q.addLast(x.left);
level--;
if (level == 1 && !q.isEmpty()) {
LinkedList<Integer> ll = new LinkedList<>();
level += q.size();
for (TreeNode xx : q) {
if (flag)
ll.addFirst(xx.val);
else
ll.addLast(xx.val);
}
flag = !flag;
list.add(ll);
}
}
return list;
}
} |
<reponame>heng2j/delamain
"""
Shortest Path Networkx Directed Graph and DataFrame
This script creates a networkx directed graph using the topology data previously created.
Find the closest node of the starting and ending locations.
Then, calculates the shortest path between the two nodes.
Finally show the shortest path on the graph as well as return it as dataframe.
Beginning part of the code is based on: https://www.datacamp.com/community/tutorials/networkx-python-graph-tutorial
Modified, adapted, and developed by DevGlitch
"""
import os
import numpy as np
import networkx as nx
import pandas as pd
import matplotlib.pyplot as plt
from scipy import spatial
# Grab edge list data
edgelist = pd.read_parquet("Town02_data/Town02_topology_edge_list.parquet")
# Preview edgelist
# print(edgelist.head(10), "\n")
# Grab node list data hosted on Gist
nodelist = pd.read_parquet("Town02_data/Town02_topology_node_list.parquet")
# Preview nodelist
# print(nodelist.head(5), "\n")
# Create empty directed graph
g = nx.DiGraph()
# Add edges and edge attributes
for i, elrow in edgelist.iterrows():
g.add_edge(elrow[0], elrow[1], attr_dict=elrow[2:].to_dict())
# Edge list example
# print(elrow[0], "\n") # node1
# print(elrow[1], "\n") # node2
# print(elrow[2:].to_dict(), "\n") # edge attribute dict
# Add node attributes
for i, nlrow in nodelist.iterrows():
g.nodes[nlrow["id"]].update(nlrow[1:].to_dict())
# Node list example
# print(nlrow, "\n")
# Preview first 5 edges
# print(list(g.edges(data=True))[0:5], "\n")
# Preview first 10 nodes
# print(list(g.nodes(data=True))[0:10], "\n")
# Preview total number of edges and nodes
# print('# of edges: {}'.format(g.number_of_edges()), "\n")
# print('# of nodes: {}'.format(g.number_of_nodes()), "\n")
# Define node positions data structure (dict) for plotting
node_positions = {
node[0]: (node[1]["lat"], -node[1]["lon"]) for node in g.nodes(data=True)
}
# Preview of node_positions with a bit of hack (there is no head/slice method for dictionaries).
# print(dict(list(node_positions.items())[0:5]), "\n")
# Making a nice plot that lines up nicely and should look like the carla map
plt.figure(3, figsize=(8, 6))
nx.draw_networkx_nodes(g, pos=node_positions, node_size=20, node_color="red")
nx.draw_networkx_edges(g, pos=node_positions, edge_color="blue", arrows=False)
# Display plot
# plt.show()
# Filename using the name of the current carla map running
directory = "Town04_data"
if not os.path.exists(directory):
os.makedirs(directory)
filename = "Town02_networkx_digraph.png"
filepath = os.path.join(directory, filename)
# Save Networkx Graph
if os.path.isfile(filepath):
print("File already exist. No additional networkx directed graph was saved.")
else:
print("Saving networkx directed graph (png file).")
plt.savefig(filepath)
# Subset dataframe of node list incl. only lat and lon coordinates
node_geo = nodelist[["lat", "lon"]]
# print(node_geo.head(10), "\n")
# Create array for use with spicy
node_geo_array = np.array(node_geo)
# print(node_geo_array)
# Starting and destination locations
start_location = (
-0.055,
-0.006,
) # This is a test value will need to be the GNSS sensor data here
end_location = (
10,
20,
) # This is a test value will need to be the selected destination here
def find_closest_node(node_list, node_array, location):
"""
Find the closest node to a geolocation
Returns the Node ID, Latitude, Longitude, and Altitude.
"""
distance, index = spatial.KDTree(node_array).query(location)
return node_list.iloc[index]
# Get closest node to start and end locations
start_location_closest_node = find_closest_node(
node_list=nodelist, node_array=node_geo_array, location=start_location
)
# print(start_location_closest_node, "\n")
end_location_closest_node = find_closest_node(
node_list=nodelist, node_array=node_geo_array, location=end_location
)
# print(end_location_closest_node, "\n")
# Compute shortest path between the two nodes closest to start and end locations
# Return a list with node IDs with the first value being the starting node and the last value the ending node.
shortest_path = nx.shortest_path(
g,
source=start_location_closest_node[0],
target=end_location_closest_node[0],
weight="distance",
)
# See list of nodes of the shortest path
# print("Shortest Path:", shortest_path, "\n")
# print(nodelist.head(5), "\n")
# Get lat, lon, and alt attributes of each nodes
shortest_path_geo = pd.DataFrame(columns=["id", "lat", "lon", "alt"])
rows_list = []
for i in shortest_path:
node_attributes = nodelist.loc[nodelist["id"] == i]
# print(node_attributes)
shortest_path_geo = shortest_path_geo.append(node_attributes)
# Append destination to dataframe
destination_attributes = {"id": 999, "lat": 10, "lon": 20, "alt": 0}
shortest_path_geo = shortest_path_geo.append(destination_attributes, ignore_index=True)
# Show shortest path dataframe
# print(shortest_path_geo)
shortest_path_geo.to_csv("test_path_Town02.csv")
# Create a new graph to overlay on the shortest path on it
plt.figure(3, figsize=(8, 6))
nx.draw_networkx_nodes(g, pos=node_positions, node_size=20, node_color="red")
nx.draw_networkx_edges(g, pos=node_positions, edge_color="blue", arrows=False)
path = shortest_path
path_edges = list(zip(path, path[1:]))
nx.draw_networkx_nodes(
g, pos=node_positions, nodelist=path, node_color="r", node_size=50
)
nx.draw_networkx_edges(
g, pos=node_positions, edgelist=path_edges, edge_color="g", width=4
)
plt.show()
|
/**
* Implementation of {@link Flow} based on an internal array of objects.
*
* @since 5.2.0
*/
class ArrayFlow<T> extends AbstractFlow<T>
{
private final T[] values;
private final int start, count;
// Guarded by this
private Flow<T> rest;
/** Creates an ArrayFlow from the values in the other flow. */
ArrayFlow(Flow<T> flow)
{
this(toMutableList(flow));
}
@SuppressWarnings("unchecked")
ArrayFlow(Collection<T> values)
{
this((T[]) values.toArray());
}
ArrayFlow(T[] values)
{
this(values, 0, values.length);
}
ArrayFlow(T[] values, int start, int count)
{
this.values = values;
this.start = start;
this.count = count;
}
@Override
public Flow<T> each(Worker<? super T> worker)
{
for (int i = 0; i < count; i++)
worker.work(values[start + i]);
return this;
}
@Override
public <A> A reduce(Reducer<A, T> reducer, A initial)
{
assert reducer != null;
A accumulator = initial;
for (int i = 0; i < count; i++)
{
T value = values[start + i];
accumulator = reducer.reduce(accumulator, value);
}
return accumulator;
}
@Override
public List<T> toList()
{
return Arrays.asList(values).subList(start, start + count);
}
@Override
public Flow<T> reverse()
{
if (values.length < 2)
return this;
List<T> newValues = new ArrayList<T>();
newValues.addAll(Arrays.asList(values));
Collections.reverse(newValues);
return new ArrayFlow<T>(newValues);
}
@Override
public boolean isEmpty()
{
return false;
}
@Override
protected List<T> toMutableList()
{
List<T> result = new ArrayList<T>(count);
for (int i = 0; i < count; i++)
{
result.add(values[start + i]);
}
return result;
}
@Override
@SuppressWarnings("unchecked")
public Flow<T> sort()
{
if (values.length < 2)
return this;
List<Comparable> newValues = (List<Comparable>) toMutableList();
Collections.sort(newValues);
return new ArrayFlow<T>((List<T>) newValues);
}
@Override
public Flow<T> sort(Comparator<T> comparator)
{
assert comparator != null;
if (values.length < 2)
return this;
List<T> newValues = toMutableList();
Collections.sort(newValues, comparator);
return new ArrayFlow<T>(newValues);
}
@Override
public Iterator<T> iterator()
{
return toList().iterator();
}
@Override
public T first()
{
return values[start];
}
@Override
public synchronized Flow<T> rest()
{
if (rest == null)
rest = buildRest();
return rest;
}
private Flow<T> buildRest()
{
if (count < 2)
return F.emptyFlow();
return new ArrayFlow<T>(values, start + 1, count - 1);
}
@Override
public int count()
{
return count;
}
@Override
public Flow<T> take(int length)
{
if (length < 1)
return F.emptyFlow();
return new ArrayFlow<T>(values, start, Math.min(count, length));
}
@Override
public Flow<T> drop(int length)
{
assert length >= 0;
if (length == 0)
return this;
if (length >= count)
return F.emptyFlow();
return new ArrayFlow<T>(values, start + length, count - length);
}
} |
// GetSSHKeyPath returns the ssh key path
func (d *Driver) GetSSHKeyPath() string {
if d.SSHKeyPath == "" {
d.SSHKeyPath = d.ResolveStorePath("id_rsa")
}
return d.SSHKeyPath
} |
package importdb
import (
"time"
"github.com/jinzhu/gorm"
)
// Appeal is the main table that links to most of the others
type Appeal struct {
gorm.Model
AppealID string
Appellant Person
IsOnline bool
UNDPRU bool
NoticeDate time.Time
EnrollmentYear string
DateEntered time.Time
DateReceived time.Time
Comments string `gorm:"size:MAX"`
Reason Reason
IDR IDR
Outreach []Outreach
Action Action
NoticeComments string `gorm:"size:MAX"`
}
// Reason struct is the initial reason an appellant may apply for an appeal
type Reason struct {
ID uint
AppealID uint
Income bool
FamilySize bool
PremiumWavier bool
Residency bool
LawfulPresence bool
OtherInsurance bool
Incarceration bool
Other bool
OtherReason string
}
// IDR is the internal dispute resolution the Appeals team takes when dealing with a case.
type IDR struct {
ID uint
AppealID uint
AidPendingAppplied bool
AidPendingRemoved bool
InternalReview bool
CaseHolder string
Expedite bool
BusinessEvent string
Issue Issue
}
// TableName makes sure IDR table is called 'idr' and not 'id_rs'
func (IDR) TableName() string {
return "idr"
}
// Issue table is similar to the reason table because this is the official issue reason determined by the appeals team
type Issue struct {
ID uint
IDRID uint `gorm:"column:idr_id"`
Income bool
FamilySize bool
PWDenial bool
PublicMEC bool
PublicMECText string
Residency bool
LawfulPresence bool
Other bool
OtherReason string
SEP bool
ESI bool
IncarcerationStatus bool
TaxFilling bool
}
// Action table is for
type Action struct {
ID uint
AppealID uint
DismissedInvalid string
Dismissed string
WDDate time.Time
FinalLeterSent time.Time
LetterSentBy string
Hearing []Hearing
Accessibility Accessibility
}
// Hearing table is for
type Hearing struct {
ID uint
ActionID uint
Action string
HearingDate time.Time
HearingOfficer string
CCARep string
}
// Accessibility table is for
type Accessibility struct {
ID uint
ActionID uint
Interpreter string
Device string
Accommodation string
}
// Outreach table is for
type Outreach struct {
gorm.Model
AppealID uint
Notes string `gorm:"size:MAX"`
ContactMethod string
ContactMade bool
Outcome string
TimeSpent uint
}
// Rep table is for
type Rep struct {
ID uint
PersonID uint
FirstName string
LastName string
Address Address `gorm:"polymorphic:resident"`
}
// Person table is for
type Person struct {
ID uint
AppealID uint
FirstName string
LastName string
DOB string
MemberID string
Email string
Phone string
Rep Rep
Address Address `gorm:"polymorphic:resident"`
}
// Address table is for
type Address struct {
ID uint
Street string
City string
State string
Zip string
ResidentID uint
ResidentType string
}
|
// ----------------------------------------------------------------------------
// Constants.cpp
//
//
// Authors:
// <NAME> <EMAIL>
// ----------------------------------------------------------------------------
#include "Constants.h"
namespace lickport_array_controller
{
namespace constants
{
CONSTANT_STRING(device_name,"lickport_array_controller");
CONSTANT_STRING(firmware_name,"LickportArrayController");
// Use semantic versioning http://semver.org/
const modular_server::FirmwareInfo firmware_info =
{
.name_ptr=&firmware_name,
.version_major=0,
.version_minor=1,
.version_patch=1,
};
CONSTANT_STRING(hardware_name,"lickport_array_controller");
const double dispense_power = digital_controller::constants::power_max;
const size_t sync_delay = 10;
const size_t sync_count = 1;
CONSTANT_STRING(lick_datum_time,"time");
CONSTANT_STRING(lick_datum_millis,"millis");
CONSTANT_STRING(lick_datum_lickports_licked,"lickports_licked");
CONSTANT_STRING(lick_datum_lickports_activated,"lickports_activated");
const long duration_min = 10;
const long duration_max = 2000000000;
// Pins
CONSTANT_STRING(change_pin_name,"change");
CONSTANT_STRING(lick_detected_pin_name,"lick_detected");
const size_t lick_detected_pulse_duration = 200;
// Units
// Properties
CONSTANT_STRING(sync_period_min_property_name,"syncPeriodMin");
const long sync_period_min_default = 9000;
CONSTANT_STRING(sync_period_max_property_name,"syncPeriodMax");
const long sync_period_max_default = 11000;
CONSTANT_STRING(sync_on_duration_property_name,"syncOnDuration");
const long sync_on_duration_default = 50;
CONSTANT_STRING(dispense_delays_property_name,"dispenseDelays");
const long dispense_delay_min = 0;
const long dispense_delay_max = 1000;
const long dispense_delay_default = 10;
CONSTANT_STRING(dispense_periods_property_name,"dispensePeriods");
const long dispense_period_min = 1;
const long dispense_period_max = 10000;
const long dispense_period_default = 100;
CONSTANT_STRING(dispense_counts_property_name,"dispenseCounts");
const long dispense_count_min = 1;
const long dispense_count_max = 10;
const long dispense_count_default = 1;
CONSTANT_STRING(activated_dispense_durations_property_name,"activatedDispenseDurations");
const long activated_dispense_duration_default = 500;
// Parameters
CONSTANT_STRING(lickport_parameter_name,"lickport");
const long lickport_min = 0;
CONSTANT_STRING(lickports_parameter_name,"lickports");
const long lickports_array_length_min = 0;
CONSTANT_STRING(dispense_duration_parameter_name,"dispense_duration");
CONSTANT_STRING(dispense_durations_parameter_name,"dispense_durations");
const long dispense_durations_array_length_min = 0;
// Functions
CONSTANT_STRING(dispense_lickport_for_duration_function_name,"dispenseLickportForDuration");
CONSTANT_STRING(dispense_lickports_for_duration_function_name,"dispenseLickportsForDuration");
CONSTANT_STRING(dispense_lickports_for_durations_function_name,"dispenseLickportsForDurations");
CONSTANT_STRING(dispense_all_lickports_for_duration_function_name,"dispenseAllLickportsForDuration");
CONSTANT_STRING(get_activated_lickports_function_name,"getActivatedLickports");
CONSTANT_STRING(activate_only_lickport_function_name,"activateOnlyLickport");
CONSTANT_STRING(activate_only_lickports_function_name,"activateOnlyLickports");
CONSTANT_STRING(activate_lickport_function_name,"activateLickport");
CONSTANT_STRING(activate_lickports_function_name,"activateLickports");
CONSTANT_STRING(deactivate_lickport_function_name,"deactivateLickport");
CONSTANT_STRING(deactivate_lickports_function_name,"deactivateLickports");
CONSTANT_STRING(get_and_clear_lick_data_function_name,"getAndClearLickData");
// Callbacks
CONSTANT_STRING(calibrate_lick_sensor_callback_name,"calibrateLickSensor");
CONSTANT_STRING(manage_lick_status_change_callback_name,"manageLickStatusChange");
CONSTANT_STRING(activate_all_lickports_callback_name,"activateAllLickports");
CONSTANT_STRING(deactivate_all_lickports_callback_name,"deactivateAllLickports");
// Errors
}
}
|
/*Hey Girl, Fork my heart because I am ready to commit*/
#include <bits/stdc++.h>
using namespace std;
#define endl "\n"
#define fastio ios_base::sync_with_stdio(false);cin.tie(NULL);
#define PI 3.14159265
#define ll long long
#define vii vector<int, int>
#define vll vector<long long, long long>
#define vi vector<int>
#define vl vector<long long>
int main()
{
fastio;
int t;
cin >> t;
while(t-->0)
{
int n;
cin >> n;
int arr[n];
for(int i = 0;i<n;i++)
{
cin >> arr[i];
}
map<int, int> last[202];
for(int i = 1;i<=200;i++)
{
for(int j = n-1;j>=0;j--)
{
if(arr[j] == i)
{
if(last[i].size() == 0)
{
last[i][1] = j;
}
else
{
last[i][(*(last[i].rbegin())).first+1] = j;
}
}
}
}
// for(int i = 1;i<=3;i++)
// {
// for(auto it = last[i].begin();it!= last[i].end();it++)
// {
// cout << it->first << " " << it->second << " ";
// }
// cout << endl;
// }
int count[n+1][201];
for(int i = 0;i<=200;i++)
{
count[0][i] = 0;
}
for(int i = 1;i<=n;i++)
{
for(int j = 1;j<=200;j++)
{
if(arr[i-1] == j)
count[i][j] = count[i-1][j] + 1;
else
{
count[i][j] = count[i-1][j];
}
}
}
// for(int i = 1;i<=3;i++)
// {
// for(int j = 1;j<=n;j++)
// cout << count[j][i] << " " ;
// cout << endl;
// }
int ans = 1;
for(int i = 1;i<=200;i++)
{
for(auto it = last[i].begin();it!=last[i].end();it++)
{
int j = it->second;
int k = last[i][last[i].size() - it->first + 1];
if(k < j)
{
int temp_ans = 0;
for(int l = 1;l<=200;l++)
{
temp_ans = max(count[j][l] - count[k+1][l], temp_ans);
}
ans = max(ans, temp_ans + 2 * (it->first));
}
}
}
// cout << ans << endl;
cout << ans << endl;
}
} |
/**
* This class loads and stores routes for Walker NPCs, under a List of {@link WalkerLocation} ; the key being the npcId.
*/
public class WalkerRouteData implements IXmlReader
{
private final Map<Integer, List<WalkerLocation>> _routes = new HashMap<>();
protected WalkerRouteData()
{
load();
}
@Override
public void load()
{
parseFile("./data/xml/walkerRoutes.xml");
LOGGER.info("Loaded {} Walker routes.", _routes.size());
}
@Override
public void parseDocument(Document doc, Path path)
{
forEach(doc, "list", listNode -> forEach(listNode, "route", routeNode ->
{
final NamedNodeMap attrs = routeNode.getAttributes();
final List<WalkerLocation> list = new ArrayList<>();
final int npcId = parseInteger(attrs, "npcId");
final boolean run = parseBoolean(attrs, "run");
forEach(routeNode, "node", nodeNode -> list.add(new WalkerLocation(parseAttributes(nodeNode), run)));
_routes.put(npcId, list);
}));
}
public void reload()
{
_routes.clear();
load();
}
public List<WalkerLocation> getWalkerRoute(int npcId)
{
return _routes.get(npcId);
}
public static WalkerRouteData getInstance()
{
return SingletonHolder.INSTANCE;
}
private static class SingletonHolder
{
protected static final WalkerRouteData INSTANCE = new WalkerRouteData();
}
} |
/**
* This software is released as part of the Pumpernickel project.
*
* All com.pump resources in the Pumpernickel project are distributed under the
* MIT License:
* https://raw.githubusercontent.com/mickleness/pumpernickel/master/License.txt
*
* More information about the Pumpernickel project is available here:
* https://mickleness.github.io/pumpernickel/
*/
package com.pump.showcase;
import java.awt.Color;
import java.awt.Font;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.Rectangle;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JSlider;
import javax.swing.UIManager;
import com.pump.graphics.GraphicInstruction;
import com.pump.graphics.GraphicsWriter;
import com.pump.graphics.ImageInstruction;
import com.pump.plaf.QPanelUI;
/**
* This is a ShowcaseDemo that includes a "Configuration" and "Example" panel.
*/
public abstract class ShowcaseExampleDemo extends ShowcaseDemo {
private static final long serialVersionUID = 1L;
static class ShowcaseSlider extends JSlider {
private static final long serialVersionUID = 1L;
public ShowcaseSlider(int min, int max, int value) {
super(min, max, value);
}
public ShowcaseSlider() {
}
public ShowcaseSlider(int min, int max) {
super(min, max);
}
@Override
protected void paintComponent(Graphics g) {
if (getUI().getClass().getName()
.endsWith("plaf.windows.WindowsSliderUI")) {
paintDarkTrack(g);
} else {
super.paintComponent(g);
}
}
/**
* Paint an extra shadow on top of the track. I wish there were an
* easier way to do this, but I looked through the WindowsSliderUI and
* didn't see a way to customize the track color.
*/
protected void paintDarkTrack(Graphics g0) {
Graphics2D g = (Graphics2D) g0;
GraphicsWriter w = new GraphicsWriter(false, Integer.MAX_VALUE);
w.setRenderingHints(g.getRenderingHints());
w.clipRect(0, 0, getWidth(), getHeight());
super.paintComponent(w);
GraphicInstruction[] instructions = w.getInstructions(true);
for (int a = 0; a < instructions.length; a++) {
GraphicInstruction i = instructions[a];
i.paint((Graphics2D) g);
if (i instanceof ImageInstruction) {
Rectangle r = i.getBounds().getBounds();
if (r.width > getWidth() * .8) {
g.setColor(new Color(0, 0, 0, 40));
((Graphics2D) g).fill(i.getBounds());
}
}
}
}
}
protected JLabel configurationLabel = new JLabel("Configuration:");
protected JLabel exampleLabel = new JLabel("Example:");
protected JPanel configurationPanel = new JPanel();
protected JPanel examplePanel = new JPanel();
private boolean stretchExampleToFillHoriz = false;
private boolean stretchExampleToFillVert = false;
private boolean stretchConfigurationToFillHoriz = false;
public ShowcaseExampleDemo() {
this(false, false, true);
}
public ShowcaseExampleDemo(boolean stretchExampleToFillHoriz,
boolean stretchExampleToFillVert, boolean useRoundedCorners) {
super();
this.stretchExampleToFillHoriz = stretchExampleToFillHoriz;
this.stretchExampleToFillVert = stretchExampleToFillVert;
layoutComponents();
Font font = getHeaderLabelFont();
exampleLabel.setFont(font);
configurationLabel.setFont(font);
QPanelUI panelUI = QPanelUI.createBoxUI();
configurationPanel.setUI(panelUI);
if (useRoundedCorners) {
examplePanel.setUI(panelUI);
} else {
panelUI = QPanelUI.createBoxUI();
panelUI.setCornerSize(0);
configurationPanel.setUI(panelUI);
}
exampleLabel.setLabelFor(examplePanel);
configurationLabel.setLabelFor(configurationPanel);
}
static Font getHeaderLabelFont() {
Font font = UIManager.getFont("Label.font");
font = font.deriveFont(font.getSize2D() * 6 / 5);
return font;
}
private void layoutComponents() {
removeAll();
setLayout(new GridBagLayout());
GridBagConstraints c = new GridBagConstraints();
c.gridx = 0;
c.gridy = 0;
c.weightx = 0;
c.weighty = 0;
c.fill = GridBagConstraints.NONE;
c.insets = new Insets(10, 3, 3, 3);
c.anchor = GridBagConstraints.NORTHWEST;
add(configurationLabel, c);
c.gridy++;
c.insets = new Insets(3, 3, 3, 3);
if (stretchConfigurationToFillHoriz) {
c.fill = GridBagConstraints.HORIZONTAL;
}
add(configurationPanel, c);
c.fill = GridBagConstraints.NONE;
c.gridy++;
c.insets = new Insets(10, 3, 3, 3);
add(exampleLabel, c);
c.gridy++;
c.weighty = 1;
c.weightx = 1;
c.insets = new Insets(3, 3, 3, 3);
if (stretchExampleToFillHoriz && stretchExampleToFillVert) {
c.fill = GridBagConstraints.BOTH;
} else if (stretchExampleToFillVert) {
c.fill = GridBagConstraints.VERTICAL;
} else if (stretchExampleToFillHoriz) {
c.fill = GridBagConstraints.HORIZONTAL;
}
add(examplePanel, c);
}
} |
Vitiligo Iridis: A case report
Aim: To report an unusual case of vitiligo iridis.
Case Report: We examined a 55 year old woman attending our OPD with diminution of vision. Upon
examination, we found multiple greyish-white circular spots on the anterior surface of the iris with punched
out margins in the left eye. She also had a history of small pox in the past.
Vitiligo iridis is a rare sequelae of Variola (small pox) infection. There are a few causes like surgical trauma,
chronic iridocyclitis, chronic glaucoma, etc., which can cause iris atrophic patches but Variola infection is
the most common cause of vitiligo iridis presenting with focal areas of iris atrophy.
Conclusion: Even though the small pox has been eradicated, the patients of the elderly age group might
present with the incidental findings of vitiligo iridis, which may or may not be associated with elevated
intraocular pressure. Vitiligo iridis can present as a complication of a number of conditions. Hence, finding
out the probable cause becomes of great importance to help prevent misdiagnosis as an unrelated cause of
focal or multifocal iris atrophy. This can in turn help further management of the underlying cause.
Keywords: Iris, Small pox, Vitiligo iridis, Variola. |
/**
* Ak zadam meno, ktore nie je v tabulke, tak by mi tato metoda mala dat
* null
*/
@Test
public void dajPouzivatelaTest3() {
Pouzivatel pouzivatel = pouzivateliaDao.dajUzivatela("gadzo", "hahaha");
assertNull(pouzivatel);
} |
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "core/html/AutoplayUmaHelper.h"
#include "core/dom/Document.h"
#include "core/html/HTMLMediaElement.h"
#include "core/html/HTMLVideoElement.h"
#include "core/testing/DummyPageHolder.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace blink {
using ::testing::Invoke;
class MockAutoplayUmaHelper : public AutoplayUmaHelper {
public:
MockAutoplayUmaHelper(HTMLMediaElement* element)
: AutoplayUmaHelper(element) {
ON_CALL(*this, handleContextDestroyed())
.WillByDefault(
Invoke(this, &MockAutoplayUmaHelper::reallyHandleContextDestroyed));
}
void handlePlayingEvent() { AutoplayUmaHelper::handlePlayingEvent(); }
MOCK_METHOD0(handleContextDestroyed, void());
// Making this a wrapper function to avoid calling the mocked version.
void reallyHandleContextDestroyed() {
AutoplayUmaHelper::handleContextDestroyed();
}
};
class AutoplayUmaHelperTest : public testing::Test {
protected:
Document& document() { return m_pageHolder->document(); }
HTMLMediaElement& mediaElement() {
Element* element = document().getElementById("video");
DCHECK(element);
return toHTMLVideoElement(*element);
}
MockAutoplayUmaHelper& umaHelper() { return *m_umaHelper; }
std::unique_ptr<DummyPageHolder>& pageHolder() { return m_pageHolder; }
private:
void SetUp() override {
m_pageHolder = DummyPageHolder::create(IntSize(800, 600));
document().documentElement()->setInnerHTML("<video id=video></video>",
ASSERT_NO_EXCEPTION);
HTMLMediaElement& element = mediaElement();
m_umaHelper = new MockAutoplayUmaHelper(&element);
element.m_autoplayUmaHelper = m_umaHelper;
::testing::Mock::AllowLeak(&umaHelper());
}
void TearDown() override { m_umaHelper.clear(); }
std::unique_ptr<DummyPageHolder> m_pageHolder;
Persistent<MockAutoplayUmaHelper> m_umaHelper;
};
TEST_F(AutoplayUmaHelperTest, VisibilityChangeWhenUnload) {
EXPECT_CALL(umaHelper(), handleContextDestroyed());
mediaElement().setMuted(true);
umaHelper().onAutoplayInitiated(AutoplaySource::Attribute);
umaHelper().handlePlayingEvent();
pageHolder().reset();
::testing::Mock::VerifyAndClear(&umaHelper());
}
} // namespace blink
|
#pragma once
#include "Rendering/Renderer.h"
#include "State.h"
namespace StructureSynth {
namespace Model {
class RuleRef; // forward decl.
class Builder; // forward decl.
/// (Abstract) Base class for rules.
class Rule {
public:
/// Every rule must have a name.
Rule(QString name) : name(name) { maxDepth = -1; };
Rule() { maxDepth = -1; };
virtual ~Rule() {};
QString getName() const { return name; }
/// When applied the rule will add new pending rules to the ExecutionStack for execution.
/// Only PrimitiveRules will make use of the renderer.
virtual void apply(Builder* builder) const = 0;
/// Returns a list over rules that this rule references.
virtual QList<RuleRef*> getRuleRefs() const { return QList<RuleRef*>(); }
virtual void setMaxDepth(int maxDepth) { this->maxDepth = maxDepth; }
virtual int getMaxDepth() const { return maxDepth; }
protected:
QString name;
int maxDepth;
};
}
}
|
// Initalize the database with some values.
func init() {
rand.Seed(time.Now().UnixNano())
Save(Customer{Name: "Mary Jane"})
Save(Customer{Name: "Bob Smith"})
} |
/*
* Calculate the LBP histogram for an integer-valued image. This is an
* optimized version of the basic 8-bit LBP operator. Note that this
* assumes 4-byte integers. In some architectures, one must modify the
* code to reflect a different integer size.
*
* img: the image data, an array of rows*columns integers arranged in
* a horizontal raster-scan order
* rows: the number of rows in the image
* columns: the number of columns in the image
* result: an array of 256 integers. Will hold the 256-bin LBP histogram.
* interpolated: if != 0, a circular sampling of the neighborhood is
* performed. Each pixel value not matching the discrete image grid
* exactly is obtained using a bilinear interpolation. You must call
* calculate_points (only once) prior to using the interpolated version.
* return value: result
*/
void lbp_histogram(int* img, int rows, int columns, int* result, int interpolated)
{
int leap = columns*predicate;
int
*p0 = img,
*p1 = p0 + predicate,
*p2 = p1 + predicate,
*p3 = p2 + leap,
*p4 = p3 + leap,
*p5 = p4 - predicate,
*p6 = p5 - predicate,
*p7 = p6 - leap,
*center = p7 + predicate;
int value;
int pred2 = predicate << 1;
int r,c;
memset(result,0,sizeof(int)*256);
if (!interpolated)
{
for (r=0;r<rows-pred2;r++)
{
for (c=0;c<columns-pred2;c++)
{
value = 0;
compab_mask_inc(p0,0);
compab_mask_inc(p1,1);
compab_mask_inc(p2,2);
compab_mask_inc(p3,3);
compab_mask_inc(p4,4);
compab_mask_inc(p5,5);
compab_mask_inc(p6,6);
compab_mask_inc(p7,7);
center++;
result[value]++;
}
p0 += pred2;
p1 += pred2;
p2 += pred2;
p3 += pred2;
p4 += pred2;
p5 += pred2;
p6 += pred2;
p7 += pred2;
center += pred2;
}
}
else
{
p0 = center + points[5].x + points[5].y * columns;
p2 = center + points[7].x + points[7].y * columns;
p4 = center + points[1].x + points[1].y * columns;
p6 = center + points[3].x + points[3].y * columns;
for (r=0;r<rows-pred2;r++)
{
for (c=0;c<columns-pred2;c++)
{
value = 0;
compab_mask_inc(p1,1);
compab_mask_inc(p3,3);
compab_mask_inc(p5,5);
compab_mask_inc(p7,7);
compab_mask((int)(interpolate_at_ptr(p0,5,columns)+0.5),0);
compab_mask((int)(interpolate_at_ptr(p2,7,columns)+0.5),2);
compab_mask((int)(interpolate_at_ptr(p4,1,columns)+0.5),4);
compab_mask((int)(interpolate_at_ptr(p6,3,columns)+0.5),6);
p0++;
p2++;
p4++;
p6++;
center++;
result[value]++;
}
p0 += pred2;
p1 += pred2;
p2 += pred2;
p3 += pred2;
p4 += pred2;
p5 += pred2;
p6 += pred2;
p7 += pred2;
center += pred2;
}
}
} |
/**
* Name of the tracking database connection injection.
*/
export const TRACKING_CONNECTION = 'trackingConnection';
|
/**
* Created by dwiddows on 7/4/14.
*/
public class VectorStoreOrthographicalTest {
private static double TOL = 0.01;
@Test
public void initAndRetrieveTest() {
FlagConfig flagConfig = FlagConfig.getFlagConfig(null);
VectorStoreOrthographical store = new VectorStoreOrthographical(flagConfig);
Vector fooVector = store.getVector("foo");
Vector fooVector2 = store.getVector("foo");
Assert.assertEquals(1, fooVector.measureOverlap(fooVector2), TOL);
Vector footVector = store.getVector("foot");
Assert.assertTrue(1 > fooVector.measureOverlap(footVector));
Vector barVector = store.getVector("bar");
Assert.assertTrue(fooVector.measureOverlap(barVector) < fooVector.measureOverlap(footVector));
}
} |
// _Post is in charge of building the http Request and passing it on to the designated poster
func (n *Events) _Post(data string) error {
data = fmt.Sprintf("[%s]", data)
r, w := io.Pipe()
defer r.Close()
defer w.Close()
go func() {
zipper := gzip.NewWriter(w)
zipper.Write([]byte(data))
zipper.Flush()
w.Close()
zipper.Close()
}()
req, err := http.NewRequest("POST", n.URL, r)
if err != nil {
return err
}
req.Header.Add("Content-Type", "application/json")
req.Header.Add("X-Insert-Key", n.key)
req.Header.Add("Content-Encoding", "gzip")
return n.Poster(req)
} |
//This class exists so that CruiseAgents object can be injected to it as a scenario state
public class SetupAgents {
public static final String AGENT_2_4 = "agent_2_4";
private final CruiseAgents cruiseAgents;
public SetupAgents(CruiseAgents cruiseAgents) {
this.cruiseAgents = cruiseAgents;
}
@com.thoughtworks.gauge.Step("Start <numberOfAgents> agents with uUID <uuid>")
public void startAgentsWithUUID(int numberOfAgents, String uuid) throws Exception {
cruiseAgents.startAgentsWithUUID(numberOfAgents, uuid);
}
@com.thoughtworks.gauge.Step("Restart agent <index> using a new uUID")
public void restartAgentUsingANewUUID(int index) throws Exception {
cruiseAgents.restartAgentUsingANewUUID(index);
}
public void setUpAndStartAnAgentOlderThanGoVersion12_3WithUUID(String uuid) throws Exception {
cruiseAgents.copyAndStartOldAgentWithUUID(AGENT_2_4, uuid);
}
public void stopWithUUID(String agentUuid) throws Exception {
cruiseAgents.stopAgentWithUUID(agentUuid);
}
public void upgrade2_4AgentToLatestAndStartWithUUID(String uuid) throws Exception {
cruiseAgents.startAgentWithUUID(AGENT_2_4, uuid, null);
}
} |
// Copyright eeGeo Ltd (2012-2015), All Rights Reserved
#pragma once
#include "Types.h"
#include "CatalogBinding.h"
#include "SearchResultAddedMessage.h"
#include "SearchResultRemovedMessage.h"
#include "SearchQueryPerformedMessage.h"
#include "SearchQueryRefreshedMessage.h"
#include "SearchQueryRequestMessage.h"
#include "SearchQueryRemovedMessage.h"
#include "SearchQueryResultsRemovedMessage.h"
#include "AutocompleteSuggestionsReceivedMessage.h"
#include "SearchQueryResponseReceivedMessage.h"
#include "SearchQueryClearRequestMessage.h"
#include "CompassModeChangedMessage.h"
#include "CompassHeadingUpdateMessage.h"
#include "CompassModeUnauthorizedMessage.h"
#include "WorldPinGainedFocusMessage.h"
#include "WorldPinLostFocusMessage.h"
#include "WorldPinInFocusChangedLocationMessage.h"
#include "MyPinCreationStateChangedMessage.h"
#include "SearchResultOnMapItemModelSelectedMessage.h"
#include "UserCreatedMyPinDetailsModelSelectedMessage.h"
#include "SearchResultMyPinDetailsModelSelectedMessage.h"
#include "MyPinAddedToMenuMessage.h"
#include "MyPinRemovedFromMenuMessage.h"
#include "ShowInitialExperienceIntroMessage.h"
#include "CompletedCacheClearMessage.h"
#include "SearchResultPoiViewImageDownloadCompletedMessage.h"
#include "InteriorsExplorerFloorSelectedMessage.h"
#include "InteriorsExplorerInteriorStreamingMessage.h"
#include "InteriorsExplorerStateChangedMessage.h"
#include "WatermarkModelChangedMessage.h"
#include "WatermarkAlignmentStateChangedMessage.h"
#include "WatermarkInteriorStylingStateChangedMessage.h"
#include "SetWatermarkVisibilityMessage.h"
#include "AppModeChangedMessage.h"
#include "UserInteractionEnabledChangedMessage.h"
#include "URLRequestedMessage.h"
#include "DeeplinkURLRequestedMessage.h"
#include "ConnectivityChangedViewMessage.h"
#include "StartUxSurveyMessage.h"
#include "InteriorsExplorerUINotifyMessage.h"
#include "TagSearchAddedMessage.h"
#include "TagSearchRemovedMessage.h"
#include "VirtualKeyboardStateChangedMessage.h"
#include "AboutPageIndoorPositionTypeMessage.h"
#include "AboutPageIndoorAtlasDataMessage.h"
#include "AttractModeStateChangedMessage.h"
#include "AboutPageSenionDataMessage.h"
#include "AboutPageIndoorPositionSettingsMessage.h"
#include "OpenSearchMenuMessage.h"
#include "CompassIsRotatingStateChangedMessage.h"
#include "SelectMenuItemMessage.h"
#include "ClosePoiMessage.h"
#include "OpenSearchMenuSectionMessage.h"
#include "NavRoutingViewOpenMessage.h"
#include "NavRoutingStartLocationSetMessage.h"
#include "NavRoutingStartLocationClearedMessage.h"
#include "NavRoutingEndLocationSetMessage.h"
#include "NavRoutingEndLocationClearedMessage.h"
#include "NavRoutingRouteChangedMessage.h"
#include "NavRoutingRouteClearedMessage.h"
#include "NavRoutingCurrentDirectionSetMessage.h"
#include "NavRoutingCurrentDirectionUpdatedMessage.h"
#include "NavRoutingSelectedDirectionSetMessage.h"
#include "NavRoutingRemainingRouteDurationSetMessage.h"
#include "NavRoutingModeSetMessage.h"
#include "NavRoutingShowRerouteDialogMessage.h"
#include "NavRoutingSetCalculatingRouteMessage.h"
namespace ExampleApp
{
namespace ExampleAppMessaging
{
class NativeToUiMessageCatalog
: public Eegeo::Messaging::CatalogBinding<TagSearch::TagSearchAddedMessage>
, public Eegeo::Messaging::CatalogBinding<TagSearch::TagSearchRemovedMessage>
, public Eegeo::Messaging::CatalogBinding<SearchResultSection::SearchResultAddedMessage>
, public Eegeo::Messaging::CatalogBinding<SearchResultSection::SearchResultRemovedMessage>
, public Eegeo::Messaging::CatalogBinding<Search::SearchQueryPerformedMessage>
, public Eegeo::Messaging::CatalogBinding<Search::SearchQueryRequestMessage>
, public Eegeo::Messaging::CatalogBinding<Search::SearchQueryRefreshedMessage>
, public Eegeo::Messaging::CatalogBinding<Search::SearchQueryRemovedMessage>
, public Eegeo::Messaging::CatalogBinding<Search::SearchQueryResultsRemovedMessage>
, public Eegeo::Messaging::CatalogBinding<Search::SearchQueryResponseReceivedMessage>
, public Eegeo::Messaging::CatalogBinding<Search::SearchQueryClearRequestMessage>
, public Eegeo::Messaging::CatalogBinding<Search::AutocompleteSuggestionsReceivedMessage>
, public Eegeo::Messaging::CatalogBinding<Compass::CompassModeChangedMessage>
, public Eegeo::Messaging::CatalogBinding<Compass::CompassHeadingUpdateMessage>
, public Eegeo::Messaging::CatalogBinding<Compass::CompassModeUnauthorizedMessage>
, public Eegeo::Messaging::CatalogBinding<WorldPins::WorldPinGainedFocusMessage>
, public Eegeo::Messaging::CatalogBinding<WorldPins::WorldPinLostFocusMessage>
, public Eegeo::Messaging::CatalogBinding<WorldPins::WorldPinInFocusChangedLocationMessage>
, public Eegeo::Messaging::CatalogBinding<MyPinCreation::MyPinCreationStateChangedMessage>
, public Eegeo::Messaging::CatalogBinding<SearchResultOnMap::SearchResultOnMapItemModelSelectedMessage>
, public Eegeo::Messaging::CatalogBinding<MyPinDetails::UserCreatedMyPinDetailsModelSelectedMessage>
, public Eegeo::Messaging::CatalogBinding<MyPinDetails::SearchResultMyPinDetailsModelSelectedMessage>
, public Eegeo::Messaging::CatalogBinding<MyPins::MyPinAddedToMenuMessage>
, public Eegeo::Messaging::CatalogBinding<MyPins::MyPinRemovedFromMenuMessage>
, public Eegeo::Messaging::CatalogBinding<InitialExperience::ShowInitialExperienceIntroMessage>
, public Eegeo::Messaging::CatalogBinding<Options::CompletedCacheClearMessage>
, public Eegeo::Messaging::CatalogBinding<SearchResultPoi::SearchResultPoiViewImageDownloadCompletedMessage>
, public Eegeo::Messaging::CatalogBinding<Watermark::SetWatermarkVisibilityMessage>
, public Eegeo::Messaging::CatalogBinding<Watermark::WatermarkModelChangedMessage>
, public Eegeo::Messaging::CatalogBinding<Watermark::WatermarkAlignmentStateChangedMessage>
, public Eegeo::Messaging::CatalogBinding<Watermark::WatermarkInteriorStylingStateChangedMessage>
, public Eegeo::Messaging::CatalogBinding<InteriorsExplorer::InteriorsExplorerFloorSelectedMessage>
, public Eegeo::Messaging::CatalogBinding<InteriorsExplorer::InteriorsExplorerInteriorStreamingMessage>
, public Eegeo::Messaging::CatalogBinding<InteriorsExplorer::InteriorsExplorerStateChangedMessage>
, public Eegeo::Messaging::CatalogBinding<AppModes::AppModeChangedMessage>
, public Eegeo::Messaging::CatalogBinding<UserInteraction::UserInteractionEnabledChangedMessage>
, public Eegeo::Messaging::CatalogBinding<URLRequest::URLRequestedMessage>
, public Eegeo::Messaging::CatalogBinding<URLRequest::DeeplinkURLRequestedMessage>
, public Eegeo::Messaging::CatalogBinding<Net::ConnectivityChangedViewMessage>
, public Eegeo::Messaging::CatalogBinding<Surveys::StartUxSurveyMessage>
, public Eegeo::Messaging::CatalogBinding<InteriorsExplorer::InteriorsExplorerUINotifyMessage>
, public Eegeo::Messaging::CatalogBinding<VirtualKeyboard::VirtualKeyboardStateChangedMessage>
, public Eegeo::Messaging::CatalogBinding<AboutPage::AboutPageIndoorPositionTypeMessage>
, public Eegeo::Messaging::CatalogBinding<AboutPage::AboutPageSenionDataMessage>
, public Eegeo::Messaging::CatalogBinding<AboutPage::AboutPageIndoorAtlasDataMessage>
, public Eegeo::Messaging::CatalogBinding<AboutPage::AboutPageIndoorPositionSettingsMessage>
, public Eegeo::Messaging::CatalogBinding<AttractMode::AttractModeStateChangedMessage>
, public Eegeo::Messaging::CatalogBinding<SearchMenu::OpenSearchMenuMessage>
, public Eegeo::Messaging::CatalogBinding<Compass::CompassIsRotatingStateChangedMessage>
, public Eegeo::Messaging::CatalogBinding<Automation::SelectMenuItemMessage>
, public Eegeo::Messaging::CatalogBinding<Automation::ClosePoiMessage>
, public Eegeo::Messaging::CatalogBinding<Automation::OpenSearchMenuSectionMessage>
, public Eegeo::Messaging::CatalogBinding<NavRouting::NavRoutingViewOpenMessage>
, public Eegeo::Messaging::CatalogBinding<NavRouting::NavRoutingStartLocationSetMessage>
, public Eegeo::Messaging::CatalogBinding<NavRouting::NavRoutingStartLocationClearedMessage>
, public Eegeo::Messaging::CatalogBinding<NavRouting::NavRoutingEndLocationSetMessage>
, public Eegeo::Messaging::CatalogBinding<NavRouting::NavRoutingEndLocationClearedMessage>
, public Eegeo::Messaging::CatalogBinding<NavRouting::NavRoutingRouteChangedMessage>
, public Eegeo::Messaging::CatalogBinding<NavRouting::NavRoutingRouteClearedMessage>
, public Eegeo::Messaging::CatalogBinding<NavRouting::NavRoutingCurrentDirectionSetMessage>
, public Eegeo::Messaging::CatalogBinding<NavRouting::NavRoutingCurrentDirectionUpdatedMessage>
, public Eegeo::Messaging::CatalogBinding<NavRouting::NavRoutingSelectedDirectionSetMessage>
, public Eegeo::Messaging::CatalogBinding<NavRouting::NavRoutingRemainingRouteDurationSetMessage>
, public Eegeo::Messaging::CatalogBinding<NavRouting::NavRoutingModeSetMessage>
, public Eegeo::Messaging::CatalogBinding<NavRouting::NavRoutingShowRerouteDialogMessage>
, public Eegeo::Messaging::CatalogBinding<NavRouting::NavRoutingSetCalculatingRouteMessage>
, private Eegeo::NonCopyable
{
};
}
}
|
/**
* Registers the methods of all Java classes in {@link WrapperOpenDomino#WRAPPED_CLASSES}, prefixing them with "Open"
*
* @param context
* JSContext in which methods will be registered
* @since org.openntf.domino.xsp 2.5.0
*/
public static void register(final JSContext context) {
if (registered) {
return;
}
try {
Registry registry = context.getRegistry();
registry.registerPackage("OpenNTFDomino", 1337);
FBSDefaultObject defaultObject = registry.getRegistryObject();
List<Object> wregs = ExtensionManager.findServices(null, WrapperOpenDomino.class, WrapperRegistry.class.getName());
for (Object wreg : wregs) {
for (Class<?> clazz : ((WrapperRegistry) wreg).getWrapperClasses()) {
registry.registerWrapper(clazz, new OpenWrapperFactory(clazz));
defaultObject.createProperty("Open" + clazz.getSimpleName(), 1338, new OpenConstructor(context, clazz));
}
}
} catch (Exception e) {
e.printStackTrace();
}
registered = true;
} |
<reponame>bradh/skylight-uas<filename>src/skylight-cucs/src/main/java/br/skylight/cucs/plugins/vehiclecontrol/VehicleMapElementPainter.java
package br.skylight.cucs.plugins.vehiclecontrol;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Font;
import java.awt.Graphics2D;
import java.awt.Polygon;
import java.awt.RenderingHints;
import java.awt.font.GlyphVector;
import br.skylight.commons.MeasureType;
import br.skylight.commons.dli.services.MessageType;
import br.skylight.commons.dli.vehicle.InertialStates;
import br.skylight.commons.infra.MathHelper;
import br.skylight.cucs.mapkit.painters.MapElementPainter;
public class VehicleMapElementPainter extends MapElementPainter<VehicleMapElement> {
private Font labelFont = new Font("Arial", Font.PLAIN, 11);
@Override
protected Polygon paintElement(Graphics2D go, VehicleMapElement elem) {
if(elem.getPosition().getLatitude()==0 && elem.getPosition().getLongitude()==0) return null;
InertialStates inertialStates = elem.getVehicle().getLastReceivedMessage(MessageType.M101);
if(inertialStates==null) {
return null;
}
Graphics2D g = (Graphics2D) go.create();
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
//draw plane name/altitude
if(elem.getVehicle().getVehicleID()!=null) {
g.setColor(Color.YELLOW);
GlyphVector gv = labelFont.createGlyphVector(g.getFontRenderContext(), elem.getVehicle().getLabel());
g.drawGlyphVector(gv, 9, 0);
}
drawText(g, elem, "Alt: " + MeasureType.ALTITUDE.convertToTargetUnitStr(inertialStates.getAltitude(), true) + " ("+ inertialStates.getAltitudeType() +")", 0, false);
drawText(g, elem, "G.Speed: " + MeasureType.GROUND_SPEED.convertToTargetUnitStr(inertialStates.getGroundSpeed(), true), 15, false);
//draw plane model
//rotate graphics reference
g.rotate(-MathHelper.HALF_PI+inertialStates.getPsi());
// System.out.println("roll: "+inertialStates.getPhi() + "; pitch: " + inertialStates.getTheta() + "; yaw: "+inertialStates.getPsi());
//draw model
g.setPaint(Color.YELLOW);
BasicStroke bs = new BasicStroke(5, BasicStroke.CAP_ROUND, BasicStroke.JOIN_ROUND);
g.setStroke(bs);
int length = (int)(9F*Math.cos(inertialStates.getTheta()*2F));//pitch
int wing = (int)(9F*Math.cos(inertialStates.getPhi()*2F));//roll
int back = (int)(3F*Math.cos(inertialStates.getPhi()*2F));//roll
g.drawLine(-length, 0, length, 0);
g.drawLine(3, -wing, 3, wing);
g.drawLine(-8, -back, -8, back);
if(elem.isSelected()) {
g.setColor(Color.YELLOW.darker());
g.setStroke(new BasicStroke(1, BasicStroke.CAP_ROUND, BasicStroke.JOIN_ROUND));
g.drawArc(-20, -20, 40, 40, 0, 360);
}
//selection mask
Polygon p = new Polygon();
p.addPoint(-8, -8);
p.addPoint(-8, 8);
p.addPoint(8, 8);
p.addPoint(8, -8);
return p;
}
}
|
def _flatten_railtext(self):
output = ""
for rail in self.rails:
rail = "".join(rail)
output += rail
return output |
# -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2020.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""The standard gates moved to qiskit/circuit/library."""
from qiskit.circuit.library.standard_gates.x import (
XGate, CXGate, CCXGate, C3XGate, C4XGate, RCCXGate, RC3XGate, CnotGate, ToffoliGate,
MCXGate, MCXGrayCode, MCXVChain, MCXRecursive
)
__all__ = ['XGate', 'CXGate', 'CCXGate', 'C3XGate', 'C4XGate', 'RCCXGate', 'RC3XGate',
'CnotGate', 'ToffoliGate', 'MCXGate', 'MCXGrayCode', 'MCXVChain', 'MCXRecursive']
|
There are many interesting nuggets of information in Charles Robinson's excellent piece about the downfall of Chip Kelly over at Yahoo! Sports, but one really stands out: Kelly could have taken some time to regroup from his Philadelphia ouster by hanging with Bill Belichick and declined.
Robinson wrote on Tuesday about how Kelly ended up being run out of town with the 49ers, and the whole story is worth a read. But the part about Kelly turning down opportunities to "spend some time" with other franchises really stood out -- as Robinson notes, Kelly was bitter and felt burned by his experience with the Eagles.
He wanted another shot as a head coach "quickly" to shrug off the negative attention from his departure in Philadelphia.
Kelly wanted to get back on his feet quickly and right his career. Second, the 49ers materialized as the most concrete (and only) NFL head coaching opportunity available. While friends reached out to Kelly and invited him to spend some time with their organizations as he regrouped (including the New England Patriots' Bill Belichick), York was the only one who immediately expressed interest in snapping up Kelly.
There are a few layers of interest here with the Belichick thing. One, while he might not have had an official spot in the organization starting out, it's also possible Kelly could have become an offensive assistant on the Patriots and spent the season working with them, staying under the radar and reflecting on his schematic approach to the game. That would've served him better than getting neck deep in the mess that is the 49ers for a single season.
Kelly would be a much more attractive head coaching candidate right now if he'd done that. He also might have been a candidate for the Patriots offensive coordinator job if Josh McDaniels -- one of this year's hottest coaching candidates -- leaves Belichick's side.
Kelly could end up being the Pats offensive coordinator anyway for all we know, but there's certainly a much better chance of him becoming a head coach again if he puts his head down and grinds in New England for a year.
It sounds as if Kelly was being quiet about business in San Francisco anyway, almost to a fault (?) in terms of letting now-fired GM Trent Baalke run the show during the 49ers draft. Kelly's failures in Philly were blamed on him being a bad personnel guy and maybe he wasn't interested in getting off on the wrong foot.
From Robinson:
According to one 49ers source, Kelly seemed very careful to avoid problems with Baalke from the moment he stepped into the building. So much so that the 49ers source said Kelly basically took a backseat to Baalke on draft weekend, operating with a low-key demeanor that left some in the war room thinking he was making a point of not inserting himself into some of the decisions.
The relationship continued to get awkward throughout the season, with Baalke, according to Robinson, calling a staff meeting to rally the troops while Kelly was out of town after his father passed away unexpectedly.
According to one source, Baalke called a staff meeting at some point during Kelly's two days away from the franchise, apparently hoping to send a message about rallying down the stretch. Some in the building felt calling such a meeting was inappropriate with Kelly out of town.
It's not difficult to see that as inappropriate in terms of workplace behavior.
Kelly, who has said he won't rule out going back to college for the right opportunity, is too smart a football mind not to find a landing spot. But you can bet that in the future he's going to be a lot more careful about the place he chooses, given the nightmarish way that things unfolded in San Francisco.
This time around he'd be wise to take advantage of close friends willing to offer an opportunity to regroup. At least the good news is he's making plenty of money not to coach football. |
package event.ice_detector_probe;
public class IceDetectorProbeBodyActivate {
public String toString() {
return "Event: IceDetectorProbeBodyActivate";
}
} |
// add to left, right and top, bottom
void expand( scr_coord_val delta_x_par, scr_coord_val delta_y_par ) {
x -= delta_x_par;
y -= delta_y_par;
w += (delta_x_par<<1);
h += (delta_y_par<<1);
} |
<filename>src/login/internal/ver.go
package internal
var (
gAllowCltVer CltVer
gLastUpVerTime int64
gUpVerInterval int64 = 10
)
func AllowCltVer() *CltVer {
return &gAllowCltVer
}
type CltVer struct {
//大版本
BigVer int32
//小版本
SmallVer int32
//修复版本
FixVer int32
}
//
//func GetAllowCltVer(dt int32, ver *CltVer) error {
// if time.Now().Unix()-gLastUpVerTime < gUpVerInterval {
// return nil
// }
// uri := conf.Server.WebUrl + "/manage/game/client/version/" + strconv.Itoa(int(dt))
// resp, err := resty.R().Get(uri)
// if err != nil {
// return err
// }
//
// err = json.Unmarshal(resp.Body(), &ver)
// if err != nil {
// return err
// }
//
// gLastUpVerTime = time.Now().Unix()
// return nil
//}
//
//func CheckCltVer(loginVer *CltVer, dt int32) bool {
// if err := GetAllowCltVer(dt, &gAllowCltVer); err != nil {
// log.Error("GetAllowCltVer failed: %v", err)
// return false
// }
// if loginVer.BigVer == gAllowCltVer.BigVer &&
// loginVer.SmallVer == gAllowCltVer.SmallVer &&
// loginVer.FixVer == gAllowCltVer.FixVer {
// return true
// }
// return false
//}
|
<reponame>dukerspace/dezenter
import { Controller, Get, Post, Put, Delete, Param } from '@nestjs/common'
import { CountryService } from './country.service'
const site = `${process.env.SERVICE_HOSTNAME}:${process.env.SERVICE_PORT}`
@Controller('/v1/countries')
export class CountryController {
constructor(private readonly countryService: CountryService) {}
@Get('/')
async index() {
return {
success: true,
data: 'ok'
}
}
@Post()
async create() {}
@Get(':id')
async show(@Param('id') id) {
const result = await this.countryService.find(id)
return {
success: true,
data: this.transformer(result)
}
}
@Get(':id/provinces')
async showWithProvince(@Param('id') id) {
const result = await this.countryService.findWithProvince(id)
return {
success: true,
data: this.transformer(result)
}
}
@Put(':id')
async update(@Param('id') id) {}
@Delete()
async delete() {}
transformer(data) {
return {
id: data.id,
country_name_th: data.country_name_th,
country_name_en: data.country_name_en,
url: `${site}/air/v1/countries/${data.id}`
}
}
transformerWithProvince(data) {
return {
id: data.id,
country_name_th: data.country_name_th,
country_name_en: data.country_name_en,
url: `${site}/air/v1/countries/${data.id}`,
provinces: data.provinces
}
}
}
|
<filename>cron/helpers/file.py<gh_stars>1-10
import requests
import os
from user_agent import generate_user_agent
class File():
def set_file(self, url, type = None):
try:
session = requests.Session()
session.headers.update({
'0': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Language': 'u-RU,ru;q=0.8,en-US;q=0.5,en;q=0.3',
'Connection': 'keep-alive',
'Cache-Control': 'max-age=0',
'Keep-Alive': '300',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7',
'Pragma': '',
'User-Agent': generate_user_agent(device_type="desktop", os=('mac', 'linux'))
})
response = session.get(url, timeout=30)
#print(url + ' - ' + str(response.status_code))
with self.write('page', 'html') as output_file:
output_file.write(response.text) #response.text.encode('utf-8')
except requests.exceptions.ReadTimeout:
self.logging(type, 'Error: Read timeout occured - ' + url)
except requests.exceptions.ConnectTimeout:
self.logging(type, 'Error: Connection timeout occured! - ' + url)
except requests.exceptions.ConnectionError:
self.logging(type, 'Seems like dns lookup failed.. - ' + url)
except requests.exceptions.HTTPError as err:
self.logging(type, 'Error: HTTP Error occured - ' + url)
self.logging(type, 'Response is: {content}'.format(content=err.response.content))
except requests.exceptions.MissingSchema as err:
self.logging(type, 'Error: ' + str(err))
def logging(self, type, msg):
if not type is None:
f = self.write_a(type, 'log')
f.write(msg + '\n')
f.close()
print(msg)
def read(self, name, type):
return self.open_file(name, type, 'r')
def write(self, name, type):
return self.open_file(name, type, 'w')
def write_a(self, name, type):
return self.open_file(name, type, 'a')
def open_file(self, name, type, param):
fileDir = os.path.dirname(os.path.realpath('__file__'))
filename = os.path.join(fileDir, 'files/' + name + '.' + type)
return open(filename, param) |
def __populateCourseDependencies(self, material, df):
dependsOn = str(df._2).split(',')
for dep in dependsOn:
if dep.find('-') != -1:
base, ranges = self.__spreadRanges(dep)
for r in ranges:
self.graph.add(
(material, OER.coursePrerequisites, BNode(f'{base.strip()}.{r}')))
else:
self.graph.add(
(material, OER.coursePrerequisites, BNode(dep.strip()))) |
Ultrasound-guided erector spinae plane block for acute pain management in patients undergoing posterior lumbar interbody fusion under general anaesthesia
Control of postoperative spine surgery pain remains a challenge for the anaesthesiologist. In addition to incisional pain, these patients experience pain arising from deeper tissues, such as bones, ligaments, muscles, intervertebral disks, facet joints and damaged nerve roots. The pain from these structures may be severe and can lead to neural sensitisation and release of mediators both peripherally and centrally. The problem is that many of these patients are either opioid dependent or opioid tolerant, making them less responsive to the most commonly used therapy for postoperative pain (opioid-based intermittent or patient-controlled analgesia).2
Introduction
Patients complaining of spondylolisthesis often experience severe pain which reduces their quality of life. Failure to achieve adequate pain relief with conservative management therapy makes surgical intervention necessary and posterior lumbar interbody fusion (PLIF) is the management of choice. 1 Control of postoperative spine surgery pain remains a challenge for the anaesthesiologist. In addition to incisional pain, these patients experience pain arising from deeper tissues, such as bones, ligaments, muscles, intervertebral disks, facet joints and damaged nerve roots. The pain from these structures may be severe and can lead to neural sensitisation and release of mediators both peripherally and centrally. The problem is that many of these patients are either opioid dependent or opioid tolerant, making them less responsive to the most commonly used therapy for postoperative pain (opioid-based intermittent or patient-controlled analgesia). 2 There are many studies describing different modalities of acute postoperative pain relief which range from acute pain relief procedures, such as neuraxial analgesia, paravertebral block (PVB) and local infiltration for different nerves as supplementary analgesia with general anaesthesia, to systemically administered analgesia. 3,4 In the current era of fast-tracking in spine surgery, optimal perioperative pain management plays a vital role. 5 One of the first examples of erector spinae plane block (ESPB) was presented for thoracic analgesia in both chronic neuropathic pain as well as acute postsurgical or posttraumatic pain. Recently, considerable attention has been paid to ESPB, as it is a simple and safe technique thanks to easily identifiable ultrasonographic landmarks as well as low risk for serious complications, because the injection is into a tissue plane that is distant from pleura, major blood vessels and discrete nerves. 6 Recent reports stated that ESPB is effective during surgical interventions and strengthens perioperative analgesia when done at lower thoracic vertebrae. 1 To our knowledge, few publications are available which address the use of ESPB in spine surgeries. The aim of this study is to evaluate the efficacy of bilateral single shot ultrasound-guided ESPB for acute postoperative pain management in patients with double level lumbar spondylolisthesis (L3-L5) and candidates for posterior lumbar interbody fusion (PLIF) surgery under general anaesthesia.
Anesthesiologists (ASA) physical status I or II. Patients were complaining from double level lumbar spondylolisthesis (L3-L5) and scheduled for elective PLIF under general anaesthesia. The trial followed the CONSORT 2010 statement guidelines for conducting a randomised controlled trial. (Figure 1) Exclusion criteria included patient's refusal, BMI > 35 kg/m 2 , infection at the site of the needle puncture, allergy to any of the study drugs, coagulopathy, and uncooperative patients or who cannot express pain via visual analogue scale (VAS). Patients were randomised into two equal groups (30 patients in each) using computer-generated random numbers concealed in picking up sealed envelopes indicating the group of the assignment. Group I (control/no block) received only GA, group II (ESPB) received bilateral single shot US-guided ESP block with 20 ml 0.25% bupivacaine.
The drug was prepared by an anaesthesiologist who was not involved in data collection, while another anaesthesiologist who conducted general anaesthesia and collected the study data was blind to the type of the solution injected in regional block until the end of the study. The patients were also blind to the type of injected solution.
Before enrolment to the study, a pre-anaesthetic visit was done for history taking, physical examination, and routine laboratory investigations. Instructions about the use of the 100 mm visual analogue scale (VAS) for pain assessment and bilateral single shot ESPB technique were fully explained.
After recording baseline heart rate (HR), mean blood pressure (MBP) and oxygen saturation (SpO 2 ), patients were placed in the left lateral position and the block applied to the upper side. Two to five MHz ultrasound (US) probe (Phillips cx50 extreme edition; USA) was used for all blocks, which were done by the same anaesthesiologist who is an expert in the US-guided nerve block. The probe was covered with a sterile cover. Transverse process of vertebrae and erector spinae muscle were identified with the US probe placed in paramedian sagittal plane, 3 cm lateral to the spinous process of 3rd lumbar vertebrae (L3).
ESPB technique
Under aseptic technique and after infiltration with 5 ml of 1% lidocaine, a 22G, 50 mm, insulated facet type needle (visioplex® -vygon -France) was introduced in an in-plane approach, a cephalic to caudal direction until L3 transverse process was hit, then the needle was slightly withdrawn. Confirmation of the correct position of the needle tip was done by injecting 0.5-1 ml of local anaesthetic (LA). Correct needle tip location was confirmed by visualising LA spread lifting the erector spinae muscle off the bony shadow of the transverse process. Once confirmed, 20 ml of 0.25% bupivacaine was administered in group II under vision after confirming negative aspiration of blood, and 20 ml normal saline was administered in group I. LA distribution was observed in both cranial and caudal directions. Twenty minutes later, loss of cold sensation was evident between (T10-T12) and (L5-S1) vertebral level of the posterior dermatomes and dermatomes of the anterior roots of the spinal nerves (lumbar plexus, upper leg) on both sides (without haemodynamic changes). Failure of block occurred if the loss of sensation was not attained within 30 minutes.
Anaesthetic management
Anaesthesia was induced 30 minutes after finishing the USguided ESPB and a crystalloid intravenous infusion of 6 to 8 ml/kg/h was started. Previous monitors were applied plus temperature probe, capnography, and electrodes for monitoring the bispectral index (BISTM, model A-2000s; Aspect Medical Systems, Norwood, MA, USA). Induction was performed using fentanyl 1 µg/kg, atracurium 0.5 mg/kg and propofol 2 mg/ kg. Intubation was done by an armoured endotracheal tube of appropriate size. Anaesthesia was maintained with isoflurane 1.5%, atracurium 0.1 mg/kg as required, and ventilator settings were adjusted to keep EtCO 2 between 35 and 40 mmHg. The patient was turned to the prone position carefully with careful padding of pressure points and eyes. Intraoperative analgesia was provided by supplementary doses of intravenous fentanyl (1µg/kg) when heart rate or mean blood pressure increased more than 20% above the baseline. At the end of the surgery, isoflurane was discontinued, and muscle relaxant was reversed using slow intravenous injection of 0.05 mg/kg neostigmine and 0.01 mg/kg atropine sulphate. Careful tracheal extubation was performed at the OR after fulfilling the criteria for extubation (full consciousness, haemodynamic stability with compensated lost blood volume and adequate reversal of neuromuscular blockade).
After extubation, all patients were transferred immediately to the PACU. Patients were discharged from the PACU according to modified Aldrete score (if the score is ≥ 9, the patient can be discharged). 7 Intravenous paracetamol 1 gm/6 hours and ketorolac 30 mg loading dose then 15 mg/8 hours regularly.
Morphine 0.1 mg/kg IV was given as postoperative rescue analgesia if VAS was > 30.
Our primary outcome was to measure total morphine consumption during the first 24 hours postoperative. Secondary measurements were time till the first request of rescue analgesia which indicated the duration of analgesia and total intraoperative fentanyl consumption. Baseline HR and MBP were recorded before performing ESPB (T1) 5, 10, 15 min after the block (T2-T4) and immediately after intubation, then every 15 min until the end of surgery (named T5-T17).
Static VAS (at rest) was assessed immediately after PACU arrival and then after 2, 4, 6, 8, 12, 18 and 24 hours postoperative, while dynamic VAS (during ambulation) was evaluated first 6 hours postoperatively, when patient began to ambulate; length of PACU stay, complications (nausea, vomiting, hypotension, bradycardia, and somnolence) and LA toxicity (e.g., central nervous toxicity in the form of dizziness, tinnitus, numbness of tongue, metallic taste, visual disturbance, and dysarthria) were recorded. The observers who collected data and were responsible for postoperative follow-up were blinded to the study groups.
Statistical analysis
Based on the results of a previous study which measured total postoperative opioid consumption, 8 sample size calculation suggested a minimum of 21 patients in each group to detect a 40% difference in total morphine consumption at 24 hours postoperative (our primary outcome) at α error of 0.05, standard deviation of 6.92 and power of the study of 85%. Thus, in our study, 30 cases were enrolled in each group to compensate for possible dropouts.
The collected data were analysed using SPSS (SPSS Inc., Chicago, IL, USA) version 25. Normality of data was checked by Shapiro Wilks test and all our data were normally distributed. Quantitative data were presented as mean ± SD and compared by unpaired T-test. Qualitative data were presented as number and percentage (%) and compared by the chi-square test. The level of significance was adopted at p-value < 0.05.
Results
In our study, 94 patients were assessed for eligibility; 15 patients did not meet the inclusion criteria and 11 patients refused to participate in the study. Sixty-eight patients were randomised into 2 equal groups (34 patients in each one). Two patients in group II did not receive the intervention because they were uncooperative. Four patients in group I and two patients in group II were not followed up because they refused to continue the intervention. Thirty patients in each group were analysed as shown in Figure 1.
Patients enrolled in both groups were comparable for demographic data (age, weight, and gender), ASA classification and duration of surgery (Table I).
It was found that time until the first request of rescue analgesia was significantly prolonged in group II as compared to group I. Intraoperative fentanyl and postoperative 24 hour morphine consumption was significantly increased in group I as compared to group II. Length of PACU stay was significantly shorter in group II than in group I. The results are summarised in Table II. The results showed that pain scores began to increase in group II at 12 and 8 hours postoperative (static and dynamic VAS respectively), but they were lower than in group I. With regard to static VAS, there was significant difference between both groups on arrival to PACU 2, 4, 6 and 8 hours postoperatively (p < 0.001), while dynamic VAS was significantly lower in group II than group I at 6, 8 and 12 hours postoperative (p < 0.001) as shown in Figures 4 and 5.
The obtained results showed significant increase in HR and MBP in group I rather than in group II at T6, T8, T9, T10, T11, T12, T13, T15 and T16, as shown in Figures 6 and 7.
Incidence of complications (nausea, vomiting, and somnolence) was lower in group II than in group I. Somnolence was significantly lower in group II. No other complications were observed as summarised in Table II.
Discussion
The purpose of the study is to use ESPB to provide effective analgesia after PLIF surgeries. Intraoperative and postoperative opioid consumption was reduced in patients who received ESPB and VAS score was better with prolonged time to request of rescue analgesia. Shorter PACU stay and lower complications were observed in patients who received ESPB.
In our study, ESPB was performed by injection of LA in the fascial plane deep to the erector spinae muscle, from where it diffuses to the dorsal and ventral rami of the spinal nerves, achieving an extensive multi-dermatomal sensory block at posterior, lateral, and anterior aspect.⁹ Therefore ESPB can be used in pain management of lumbar spondylolisthesis. 10,11 These results are in agreement with other studies that have shown that ESPB performed safely and effectively for perioperative analgesia in lumbosacral spine surgery. The choice was the T10 or T12 transverse process level in six cases of lumbosacral spine surgery for their bilateral ESP blocks, targeting sensory block between L2-S1. 1 Also, adequate and effective ESP blocks at the level of the L2 transverse process were done by Takahashi et al. 10 Their target was to block an area extending from T12 to L5 but they used 20 ml of 0.1875% ropivacaine.
Chaudhary NK et al. 11 reported that bilateral ESP blocks were a safe and effective technique for postoperative pain management after spine surgery. Prolonged analgesia was maintained by inserting a catheter in the interfascial plane deep to the erector spinae muscle in a case series study. They chose the level of T10 transverse process for blocks and 20 ml 0.25% bupivacaine through both the catheters, targeting a sensory blockade between T7-T 8 and L2-L3 vertebral level in the anterior, lateral, and posterior part on both sides. In addition, Chin KJ et al. 12 suggested the use of adjuncts and catheter insertion for intermittent boluses or continuous infusions of local anaesthetic to prolong analgesia of ESPB.
The findings of our study are consistent with some studies with different types of surgeries that demonstrated that duration of the analgesia after single shot ESPB was around 10 hours or more.
Based on previous trials, 10 Compared with other regional techniques, ESPB is a safe and simple technique. The epidural analgesia involves the midline plane which is at the surgical site and is not preferred by many surgeons. 2 Lumbar paravertebral block has the potential of devastating complications, such as intrathecal or intramedullary injection, as the nerve roots are surrounded by dura, and root level blocks are done just outside the dura. 18
Study limitations
The limitation of the study results is the relatively short duration of the study. The postoperative follow-up should be for a longer time with the possibility of continuous US-guided ESPB by catheter insertion into the plane which may increase sensory block duration. Further studies are recommended to compare with other drugs (e.g., ropivacaine, levobupivacaine), other additives (e.g., dexamethasone, opioids) and other techniques (e.g., paravertebral block).
Conclusion
In conclusion, it has been demonstrated that effective acute postoperative pain control in patients with L3-L5 spondylolisthesis undergoing PLIF could be managed with bilateral single shot US-guided ESPB at the level of L3. Also, ESPB shortened PACU stay without adverse effects.
Acknowledgments
This research did not receive any financial support. |
package com.dottydingo.hyperion.client.builder;
import com.dottydingo.hyperion.api.ApiObject;
import com.dottydingo.hyperion.client.*;
import java.io.Serializable;
/**
*/
public class CreateRequestBuilder<T extends ApiObject<ID>,ID extends Serializable> extends RequestBuilder<T,ID>
{
private T body;
public CreateRequestBuilder(int version, Class<T> objectType,String entityName, T body)
{
super(version, objectType, entityName);
this.body = body;
}
public CreateRequestBuilder<T, ID> returnFields(String... fields)
{
setParameter("fields",join(fields));
return this;
}
@Override
public CreateRequestBuilder<T, ID> addParameter(String name, String value)
{
super.addParameter(name, value);
return this;
}
@Override
public CreateRequestBuilder<T, ID> setParameter(String name, String value)
{
super.setParameter(name, value);
return this;
}
@Override
public CreateRequestBuilder<T, ID> addHeader(String name, String value)
{
super.addHeader(name, value);
return this;
}
@Override
public CreateRequestBuilder<T, ID> setHeader(String name, String value)
{
super.setHeader(name, value);
return this;
}
@Override
public CreateRequestBuilder<T, ID> withHeaderFactory(HeaderFactory headerFactory)
{
super.withHeaderFactory(headerFactory);
return this;
}
@Override
public CreateRequestBuilder<T, ID> withParameterFactory(ParameterFactory parameterFactory)
{
super.withParameterFactory(parameterFactory);
return this;
}
@Override
public Request<T> build()
{
Request<T> request = super.build();
request.setRequestBody(body);
request.setRequestMethod(RequestMethod.POST);
return request;
}
public T execute(HyperionClient client)
{
return client.create(build());
}
}
|
def _get_normalized_count(self, header_number, num_alts, num_samples):
if header_number == "A":
return num_alts
elif header_number == "R":
return (num_alts + 1)
elif header_number == "G":
return num_samples
elif isinstance(header_number, int):
return max(1, int(header_number))
elif header_number == ".":
return self._unbounded_val_max_cols |
<filename>src/dx/core/XInfoQueue.h
#include <DefHeader.h>
#include <dx/core/XDebug.h>
namespace DX {
/// <summary>
/// Info Queue
/// </summary>
class XInfoQueue : public ScopedComPointer<ID3D12InfoQueue> {
public:
/// <summary>
/// Default empty debug device
/// </summary>
XInfoQueue() = default;
/// <summary>
/// Create debugdevice from adapter
/// </summary>
/// <param name="ptrDevice">Pointer to device</param>
XInfoQueue(ID3D12Device* ptrDevice);
/// <summary>
/// Retrive the number of messages in the que
/// </summary>
/// <returns>Number of messages</returns>
uint64_t getMessageCount() noexcept;
/// <summary>
/// Retrive message
/// </summary>
/// <param name="index">Index of the message to retrive</param>
/// <param name="ptrMessage">Pointer to message</param>
/// <returns>Size of the message</returns>
size_t getMessage(uint64_t index, D3D12_MESSAGE* ptrMessage) noexcept;
/// <summary>
/// Registers a message callback
/// </summary>
/// <param name="fptrMessageFunction">Callback function</param>
/// <param name="functionData">Pointer input for callback function</param>
/// <param name="flags">Input flags</param>
/// <returns>Message associated cookie value or NULL</returns>
DWORD registerCallback(D3D12MessageFunc fptrMessageFunction, void* functionData = nullptr, D3D12_MESSAGE_CALLBACK_FLAGS flags = D3D12_MESSAGE_CALLBACK_FLAG_NONE) noexcept;
/// <summary>
/// Unregisters a message callback
/// </summary>
/// <param name="cookie">Associated callback cookie</param>
/// <returns>true if unregister succeeds</returns>
bool unregisterCallback(DWORD cookie) noexcept;
};
} |
<gh_stars>1-10
package org.jetlinks.community.elastic.search.index.strategies;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.indices.GetIndexTemplatesRequest;
import org.elasticsearch.client.indices.GetIndexTemplatesResponse;
import org.elasticsearch.client.indices.PutIndexTemplateRequest;
import org.jetlinks.community.elastic.search.ElasticRestClient;
import org.jetlinks.community.elastic.search.index.ElasticSearchIndexMetadata;
import org.jetlinks.community.elastic.search.index.ElasticSearchIndexProperties;
import org.jetlinks.community.elastic.search.utils.ReactorActionListener;
import reactor.core.publisher.Mono;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public abstract class TemplateElasticSearchIndexStrategy extends AbstractElasticSearchIndexStrategy {
public TemplateElasticSearchIndexStrategy(String id, ElasticRestClient client, ElasticSearchIndexProperties properties) {
super(id, client,properties);
}
protected String getTemplate(String index) {
return wrapIndex(index).concat("_template");
}
protected String getAlias(String index) {
return wrapIndex(index).concat("_alias");
}
protected List<String> getIndexPatterns(String index) {
return Collections.singletonList(wrapIndex(index).concat("*"));
}
@Override
public abstract String getIndexForSave(String index);
@Override
public String getIndexForSearch(String index) {
return getAlias(index);
}
@Override
public Mono<Void> putIndex(ElasticSearchIndexMetadata metadata) {
return ReactorActionListener
.<AcknowledgedResponse>mono(listener -> client.getWriteClient()
.indices()//修改索引模版
.putTemplateAsync(createIndexTemplateRequest(metadata), RequestOptions.DEFAULT, listener))
//修改当前索引
.then(doPutIndex(metadata.newIndexName(getIndexForSave(metadata.getIndex())), true));
}
protected PutIndexTemplateRequest createIndexTemplateRequest(ElasticSearchIndexMetadata metadata) {
String index = wrapIndex(metadata.getIndex());
PutIndexTemplateRequest request = new PutIndexTemplateRequest(getTemplate(index));
request.alias(new Alias(getAlias(index)));
request.settings(properties.toSettings());
Map<String, Object> mappingConfig = new HashMap<>();
mappingConfig.put("properties", createElasticProperties(metadata.getProperties()));
mappingConfig.put("dynamic_templates", createDynamicTemplates());
request.mapping(mappingConfig);
request.patterns(getIndexPatterns(index));
return request;
}
@Override
public Mono<ElasticSearchIndexMetadata> loadIndexMetadata(String index) {
return ReactorActionListener
.<GetIndexTemplatesResponse>mono(listener -> client.getQueryClient()
.indices()
.getIndexTemplateAsync(new GetIndexTemplatesRequest(getTemplate(index)), RequestOptions.DEFAULT, listener))
.filter(resp -> resp.getIndexTemplates().size() > 0)
.flatMap(resp -> Mono.justOrEmpty(convertMetadata(index, resp.getIndexTemplates().get(0).mappings())));
}
}
|
/** Visitor used for walking the BKD tree. */
protected abstract static class SpatialVisitor {
/** relates a range of points (internal node) to the query */
protected abstract Relation relate(byte[] minPackedValue, byte[] maxPackedValue);
/** Gets a intersects predicate. Called when constructing a {@link Scorer} */
protected abstract Predicate<byte[]> intersects();
/** Gets a within predicate. Called when constructing a {@link Scorer} */
protected abstract Predicate<byte[]> within();
/** Gets a contains function. Called when constructing a {@link Scorer} */
protected abstract Function<byte[], Component2D.WithinRelation> contains();
private Predicate<byte[]> containsPredicate() {
final Function<byte[], Component2D.WithinRelation> contains = contains();
return bytes -> contains.apply(bytes) == Component2D.WithinRelation.CANDIDATE;
}
private BiFunction<byte[], byte[], Relation> getInnerFunction(
ShapeField.QueryRelation queryRelation) {
if (queryRelation == QueryRelation.DISJOINT) {
return (minPackedValue, maxPackedValue) ->
transposeRelation(relate(minPackedValue, maxPackedValue));
}
return (minPackedValue, maxPackedValue) -> relate(minPackedValue, maxPackedValue);
}
private Predicate<byte[]> getLeafPredicate(ShapeField.QueryRelation queryRelation) {
switch (queryRelation) {
case INTERSECTS:
return intersects();
case WITHIN:
return within();
case DISJOINT:
return intersects().negate();
case CONTAINS:
return containsPredicate();
default:
throw new IllegalArgumentException("Unsupported query type :[" + queryRelation + "]");
}
}
} |
<reponame>kamilk08/BookLoversClient
import { Identification } from 'src/app/modules/shared';
export interface TimeLine {
indentification: Identification
readerId: number
activitiesCount: number
}
|
/*
,--. ,--. ,--. ,--.
,-' '-.,--.--.,--,--.,---.| |,-.,-' '-.`--' ,---. ,--,--, Copyright 2018
'-. .-'| .--' ,-. | .--'| /'-. .-',--.| .-. || \ Tracktion Software
| | | | \ '-' \ `--.| \ \ | | | |' '-' '| || | Corporation
`---' `--' `--`--'`---'`--'`--' `---' `--' `---' `--''--' www.tracktion.com
Tracktion Engine uses a GPL/commercial licence - see LICENCE.md for details.
*/
namespace tracktion_engine
{
class HostedAudioDeviceType;
/**
The HostedAudioDeviceInterface allows an application or plugin
to pass audio and midi buffers to the engine, rather than the engine
directly opening the audio devices. This may be required for plugins
or applications that run multiple copies of the engine.
Don't create this class directly, it can be optained from the DeviceManager
via getHostedAudioDeviceInterface()
*/
class HostedAudioDeviceInterface
{
public:
HostedAudioDeviceInterface (Engine&);
//==============================================================================
/** Holds the parameters being used by an HostedAudioDeviceInterface. */
struct Parameters
{
/** Expected sample rate. This can be changed later with prepareToPlay */
double sampleRate = 44100.0;
/** Expected block size. This can be changed later with prepareToPlay */
int blockSize = 512;
/** If true, the system midi devices will be avaliable to the engine,
if false, just a single midi input and output will be avaliable,
which will be fed from the midi buffer provided to processBlock */
bool useMidiDevices = false;
/** Number of audio input channels */
int inputChannels = 2;
/** Number of audio output channels */
int outputChannels = 2;
/** If the size of the audio buffer passed to processBlock will be fixed or not.
If you are creating a plugin, this should be false, and your plguin will have
one block of latency. If you are handling the audio device callback yourself,
this can be true. */
bool fixedBlockSize = false;
/** Names of your audio channels. If left empty, names will automatically be generated */
juce::StringArray inputNames, outputNames;
};
void initialise (const Parameters&);
// Call each time the sample rate or block size changes
void prepareToPlay (double sampleRate, int blockSize);
// Pass audio and midi buffers to the engine. If fixedBlockSize == true
// then the buffer must have the same number of samples as specified in
// the last call to prepareToPlay.
void processBlock (juce::AudioBuffer<float>& buffer, juce::MidiBuffer&);
/** Returns true if the MidiInput device is a HostedMidiInputDevice. */
static bool isHostedMidiInputDevice (const MidiInputDevice&);
private:
friend DeviceManager;
friend class HostedAudioDevice;
friend class HostedAudioDeviceType;
friend class HostedMidiInputDevice;
friend class HostedMidiOutputDevice;
juce::StringArray getInputChannelNames();
juce::StringArray getOutputChannelNames();
MidiOutputDevice* createMidiOutput();
MidiInputDevice* createMidiInput();
Engine& engine;
Parameters parameters;
HostedAudioDeviceType* deviceType = nullptr;
juce::Array<MidiOutputDevice*> midiOutputs;
juce::Array<MidiInputDevice*> midiInputs;
int maxChannels = 0;
AudioMidiFifo inputFifo, outputFifo;
JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (HostedAudioDeviceInterface)
};
} // namespace tracktion_engine
|
/// Stupid and cursed Rust procedural macro that runs a C preprocessor on the input
///
/// # Example
///
/// ```no_run
/// cpreprocess::cpreprocess!(r#"
/// #define MACRO(NAME) fn print_ ## NAME () { println!("hello world"); }
///
/// MACRO(hello_world)
///
/// print_hello_world()
/// "#);
/// ```
pub fn cpreprocess(tokens: TokenStream) -> TokenStream {
#[cfg(not(feature = "nightly"))]
let tokens = syn::parse_macro_input!(tokens as syn::LitStr).value();
#[cfg(feature = "nightly")]
let tokens = match syn::parse::<syn::LitStr>(tokens.clone()) {
Ok(tokens) => tokens.value(),
Err(_) => proc_macro_faithful_display::faithful_display(&tokens).to_string()
};
match cpp::preprocess(tokens.as_bytes())
.map(|result| {
result.and_then(|code| {
String::from_utf8_lossy(&code).parse().map_err(Into::into)
})
})
{
Some(Ok(code)) => code,
Some(Err(err)) => format!("compile_error!(\"{}\")", err.to_string().replace('\\', "\\\\").replace('"', "\\\"")).parse().unwrap(),
None => "compile_error!(\"Couldn't find a compatible C compiler on this system\")".parse().unwrap()
}
} |
"""Test utilities."""
import numpy as np
import pytest
from napari_plot.layers.multiline._multiline_utils import (
check_keys,
check_length,
get_data_limits,
make_multiline_color,
make_multiline_connect,
make_multiline_line,
)
def make_data():
"""Make data for tests below."""
xs = [np.array([0, 0.5, 1]), np.array([0, 1, 2, 50])]
ys = [np.array([-100, 1, 2]), np.array([0, 1, 2, 3])]
color = np.full((2, 4), [1.0, 0.0, 0.0, 1.0])
return xs, ys, color
def test_get_data_limits():
xs, ys, _ = make_data()
limits = get_data_limits(xs, ys)
ylimits = limits[:, 0]
assert min(ylimits) == -100
assert max(ylimits) == 3
xlimits = limits[:, 1]
assert min(xlimits) == 0
assert max(xlimits) == 50
def test_check_keys():
assert check_keys({"A": None, "B": None}, ("A", "B"))
assert not check_keys({"A": None, "B": None}, ("A", "B", "C"))
def test_check_length():
check_length(np.arange(10), np.arange(10))
with pytest.raises(ValueError):
check_length(np.arange(10), np.arange(12))
with pytest.raises(ValueError):
check_length(np.arange(10), np.random.random((10, 2)))
def test_make_multiline_color():
_, ys, color = make_data()
colors = make_multiline_color(ys, color)
assert isinstance(colors, np.ndarray)
assert colors.shape == (7, 4) # color for each element
assert colors.max() <= 1.0
def test_make_multiline_connect():
_, ys, _ = make_data()
connect = make_multiline_connect(ys)
assert isinstance(connect, np.ndarray)
assert connect.shape == (5, 2) # excludes edges
def test_make_multiline_line():
xs, ys, color = make_data()
pos, connect, colors = make_multiline_line(xs, ys, color)
# make sure pos is correct
assert isinstance(pos, np.ndarray)
assert pos.shape == (7, 2) # each element
# make sure colors are correct
assert isinstance(colors, np.ndarray)
assert colors.shape == (7, 4) # color for each element
assert colors.max() <= 1.0
# make sure connect is correct
assert isinstance(connect, np.ndarray)
assert connect.shape == (5, 2) # excludes edges
|
<gh_stars>0
import tkinter as tk
from tkinter.scrolledtext import ScrolledText
root = tk.Tk(className=" Just another Text Editor")
textPad = ScrolledText(root, width=100, height=80, wrap=tk.WORD)
textPad.insert('1.0', 'fdsfdsfds')
textPad.pack()
root.mainloop()
|
/** Test method.
* @param args command line arguments
*/
public static void main(String[] args) {
int iarg = 0;
int width = 3;
VariableMap vmap = new VariableMap();
if (iarg == args.length) {
vmap.put("a" , "3");
vmap.put("b" , "4");
vmap.put("c" , "5");
System.out.println("vmap = " + vmap.toString());
vmap.put("b" , "5");
System.out.println("vmap = " + vmap.toString());
vmap.multiplyBy(new Matrix("[[1, -2, 2], [2, -1, 2], [2, -2, 3]]"));
System.out.println("multiplied: " + vmap.toString());
vmap.put("d2", "0");
System.out.println("vmap = " + vmap.toString());
} else if (iarg + 1 == args.length){
vmap = VariableMap.parse(args[iarg ++]);
System.out.println(vmap.toString());
}
} |
def from_string(self, s=""):
a = s.split(':')
if len(a) >= 3:
self.__set_chromosome(a[0])
self.coordinate = int(a[1])
self.sequence = a[2]
else:
a = s.split('\t')
if len(a) >= 3:
self.__set_chromosome(a[0])
self.coordinate = int(a[1])
self.sequence = a[2]
else:
raise ValueError("Cannot obtain Position object from string '%s'" % s) |
#include <wheel.h>
// text user interface, the iodev that programs read from and write to.
// this module receives events using `event.c` from kbd and mouse
// then translate raw keycode into ascii (optional).
// this module also receives output from user programs, with support
// for escape sequences like setting caret position and text color.
//------------------------------------------------------------------------------
// keycode to ascii conversion
// keyboard lock content, scroll lock is omitted
static int kbd_capslock = 0;
static int kbd_numlock = 1;
// we have to track the state of modifier keys
static int kbd_l_shift = 0;
static int kbd_r_shift = 0;
static int kbd_l_control = 0;
static int kbd_r_control = 0;
static int kbd_l_alt = 0;
static int kbd_r_alt = 0;
// notice that 0 appears first, different from keyboard layout
static const char syms[] = ")!@#$%^&*(";
// convert keycode to ascii, return -1 if no ascii
static char keycode_to_ascii(keycode_t code, int release) {
if (release) {
switch (code) {
case KEY_LEFTSHIFT:
kbd_l_shift = 0;
return -1;
case KEY_RIGHTSHIFT:
kbd_r_shift = 0;
return -1;
case KEY_LEFTCTRL:
kbd_l_control = 0;
return -1;
case KEY_RIGHTCTRL:
kbd_r_control = 0;
return -1;
case KEY_LEFTALT:
kbd_l_alt = 0;
return -1;
case KEY_RIGHTALT:
kbd_r_alt = 0;
return -1;
default:
return -1;
}
}
switch (code) {
// modifiers
case KEY_LEFTSHIFT:
kbd_l_shift = 1;
return -1;
case KEY_RIGHTSHIFT:
kbd_r_shift = 1;
return -1;
case KEY_LEFTCTRL:
kbd_l_control = 1;
return -1;
case KEY_RIGHTCTRL:
kbd_r_control = 1;
return -1;
case KEY_LEFTALT:
kbd_l_alt = 1;
return -1;
case KEY_RIGHTALT:
kbd_r_alt = 1;
return -1;
// locks
case KEY_CAPSLOCK:
kbd_capslock ^= 1;
return -1;
case KEY_NUMLOCK:
kbd_numlock ^= 1;
return -1;
// letters
case KEY_A: case KEY_B: case KEY_C: case KEY_D: case KEY_E:
case KEY_F: case KEY_G: case KEY_H: case KEY_I: case KEY_J:
case KEY_K: case KEY_L: case KEY_M: case KEY_N: case KEY_O:
case KEY_P: case KEY_Q: case KEY_R: case KEY_S: case KEY_T:
case KEY_U: case KEY_V: case KEY_W: case KEY_X: case KEY_Y:
case KEY_Z:
if (kbd_l_control | kbd_r_control) {
// TODO: control characters
return -1;
}
if (kbd_l_alt | kbd_r_alt) {
// TODO: option characters
return -1;
}
if (kbd_capslock ^ (kbd_l_shift | kbd_r_shift)) {
return 'A' + (code - KEY_A);
} else {
return 'a' + (code - KEY_A);
}
// numbers
case KEY_0: case KEY_1: case KEY_2: case KEY_3: case KEY_4:
case KEY_5: case KEY_6: case KEY_7: case KEY_8: case KEY_9:
if (kbd_l_control | kbd_r_control) {
// TODO: control characters
return -1;
}
if (kbd_l_alt | kbd_r_alt) {
// TODO: option characters
return -1;
}
if (kbd_l_shift | kbd_r_shift) {
return syms[code - KEY_0];
} else {
return '0' + (code - KEY_0);
}
case KEY_BACKTICK:
if (kbd_l_shift | kbd_r_shift) {
return '~';
} else {
return '`';
}
case KEY_MINUS:
if (kbd_l_shift | kbd_r_shift) {
return '_';
} else {
return '-';
}
case KEY_EQUAL:
if (kbd_l_shift | kbd_r_shift) {
return '+';
} else {
return '=';
}
case KEY_LEFTBRACE:
if (kbd_l_shift | kbd_r_shift) {
return '{';
} else {
return '[';
}
case KEY_RIGHTBRACE:
if (kbd_l_shift | kbd_r_shift) {
return '}';
} else {
return ']';
}
case KEY_SEMICOLON:
if (kbd_l_shift | kbd_r_shift) {
return ':';
} else {
return ';';
}
case KEY_QUOTE:
if (kbd_l_shift | kbd_r_shift) {
return '\"';
} else {
return '\'';
}
case KEY_COMMA:
if (kbd_l_shift | kbd_r_shift) {
return '<';
} else {
return ',';
}
case KEY_DOT:
if (kbd_l_shift | kbd_r_shift) {
return '>';
} else {
return '.';
}
case KEY_SLASH:
if (kbd_l_shift | kbd_r_shift) {
return '?';
} else {
return '/';
}
case KEY_BACKSLASH:
if (kbd_l_shift | kbd_r_shift) {
return '|';
} else {
return '\\';
}
// whitespace
case KEY_TAB:
return '\t';
case KEY_SPACE:
return ' ';
case KEY_ENTER:
return '\n';
// unsupported keys
default:
return -1;
}
}
//------------------------------------------------------------------------------
// read write function
#if (CFG_TTY_BUFF_SIZE & (CFG_TTY_BUFF_SIZE - 1)) != 0
#error "CFG_TTY_BUFF_SIZE must be power of 2"
#endif
typedef struct pender {
dlnode_t dl;
task_t * tid;
} pender_t;
static int tty_xlate = YES; // converts keycode to ascii
static int tty_echo = YES; // show input on screen
// tty input buffer, fixed size
static char tty_buff[CFG_TTY_BUFF_SIZE];
static spin_t tty_spin = SPIN_INIT;
static dllist_t tty_penders = DLLIST_INIT;
static fifo_t tty_fifo = FIFO_INIT(tty_buff, CFG_TTY_BUFF_SIZE * sizeof(char));
// wakeup pended readers
static void ready_read() {
while (1) {
dlnode_t * head = dl_pop_head(&tty_penders);
if (NULL == head) {
return;
}
pender_t * pender = PARENT(head, pender_t, dl);
task_t * tid = pender->tid;
raw_spin_take(&tid->spin);
sched_cont(tid, TS_PEND);
int cpu = tid->last_cpu;
raw_spin_give(&tid->spin);
if (cpu_index() != cpu) {
smp_resched(cpu);
}
}
}
// blocking read
static usize tty_read(file_t * file __UNUSED, u8 * buf, usize len) {
// return ios_read(stdin_r, buf, len);
while (1) {
u32 key = irq_spin_take(&tty_spin);
usize got = fifo_read(&tty_fifo, buf, len);
if (0 != got) {
irq_spin_give(&tty_spin, key);
return got;
}
// pend current task
task_t * tid = thiscpu_var(tid_prev);
pender_t pender = {
.dl = DLNODE_INIT,
.tid = tid,
};
dl_push_tail(&tty_penders, &pender.dl);
// pend here and try again
raw_spin_take(&tid->spin);
sched_stop(tid, TS_PEND);
raw_spin_give(&tid->spin);
irq_spin_give(&tty_spin, key);
task_switch();
}
}
// non blocking write
static usize tty_write(file_t * file __UNUSED, const u8 * buf, usize len) {
// TODO: parse buf, detect and handle escape sequences
// TODO: don't use debug function, call console driver directly
dbg_print("%*s", len, buf);
return len;
}
// listen from event and pipe data to stdin
// bottom half of keyboard ISR
// if tty input buffer is full, then new data is discarded
static void listener_proc() {
while (1) {
keycode_t code = kbd_recv();
if (YES == tty_xlate) {
char ch = keycode_to_ascii(code & 0x7fffffff, code & 0x80000000);
if ((char) -1 == ch) {
continue;
}
if (YES == tty_echo) {
dbg_print("%c", ch);
}
u32 key = irq_spin_take(&tty_spin);
fifo_write(&tty_fifo, (u8 *) &ch, sizeof(char), NO);
// notify readers if tty buffer full or hit enter
if (fifo_is_full(&tty_fifo) || ('\n' == ch)) {
ready_read();
irq_spin_give(&tty_spin, key);
task_switch();
} else {
irq_spin_give(&tty_spin, key);
}
} else {
u32 key = irq_spin_take(&tty_spin);
usize len = fifo_write(&tty_fifo, (u8 *) &code, sizeof(keycode_t), NO);
if (0 != len) {
ready_read();
irq_spin_give(&tty_spin, key);
task_switch();
} else {
irq_spin_give(&tty_spin, key);
}
}
}
}
//------------------------------------------------------------------------------
// tty device driver
// only one tty object (singleton)
// so `private` in file_t is not used at all
static const file_ops_t tty_ops = {
.read = (file_read_t) tty_read,
.write = (file_write_t) tty_write,
.lseek = (file_lseek_t) NULL,
};
static void tty_file_delete(file_t * file) {
kmem_free(sizeof(file_t), file);
}
file_t * tty_file_open(int mode) {
file_t * file = kmem_alloc(sizeof(file_t));
file->ref = KREF_INIT(tty_file_delete);
file->ops_mode = ((usize) &tty_ops & ~3UL) | (mode & 3);
file->private = NULL;
return file;
}
__INIT void tty_dev_init() {
// TODO: regist tty_dev into vfs as `/dev/tty`, so that any
// user program could access it.
task_resume(task_create(0, listener_proc, 0,0,0,0));
}
|
/** This skill does nothing but emit fire particles around the entity until it dies. */
public class FireAura extends AbstractVisualSkill
{
@Override
protected void displayVisualEffects(LivingEntity caster)
{
// Randomly display fire particles around the entity
if (this.random.nextBoolean())
{
ParticleUtil.spawnColoredParticlesOnEntity(caster, 20, 0, 0, 0, Particle.FLAME);
}
}
} |
/**
* A simple {@link Fragment} subclass.
*/
public class EnableFragment extends Fragment {
SharedPreferences sp;
private Context context;
private Toolbar tb;
private String[] addrArray={};
private String[] addrState={};
private String[] addrNick={};
private static String[] dataToken;
private ListView lvDevice;
boolean edit = false;
boolean nick = false;
public EnableFragment() {
// Required empty public constructor
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
context = getActivity();
addrArray = new String[64];
addrState = new String[64];
addrNick = new String[64];
sp = context.getSharedPreferences("address", Context.MODE_PRIVATE);
for(int i=0 ;i<64;i++){
String str="A"+i;
String savedString = sp.getString(str,null);
dataToken = savedString.split(",");
addrArray[i]= dataToken[0];
addrState[i]= dataToken[1];
addrNick[i] = dataToken[2];
}
// for(int i=0;i<64;i++) addrArray[i]= "A"+i;
Log.v("Zach","(1)----onCreate----Enable");
}
@Override
public void onViewCreated(View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
getActivity().setTitle("Enable Mode");
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,Bundle savedInstanceState) {
// Inflate the layout for this fragment
View v= inflater.inflate(R.layout.fragment_enable, container, false);
Log.v("Zach","(1.1)----onCreateView----Enable");
/*
//(1)set toolBar
tb = (Toolbar) v.findViewById(R.id.toolbarS);
// tb.setTitle("Enable Device");
// tb.setSubtitle("x Not Use");
// tb.setLogo(R.mipmap.dali);
((AppCompatActivity)getActivity()).setSupportActionBar(tb);
tb.setNavigationIcon(R.drawable.ic_menu_manage);
tb.setOnMenuItemClickListener(onMenuItemClick);
setHasOptionsMenu(true);
*/
//(2)set List View
lvDevice = (ListView) v.findViewById(R.id.lvDevice);
CustomAdapter customAdapter = new CustomAdapter();
lvDevice.setAdapter(customAdapter);
return v;
}
/*
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
inflater.inflate(R.menu.enable, menu);
}
private Toolbar.OnMenuItemClickListener onMenuItemClick = new Toolbar.OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem menuItem) {
String msg = "MenuItem ";
switch (menuItem.getItemId()) {
case R.id.action_edit:
nick = false;
edit =!edit;
tb.setNavigationIcon((edit)?R.drawable.ic_menu_send:R.drawable.ic_menu_gallery );
tb.getMenu().getItem(1).setIcon((edit)?R.drawable.ic_menu_send:R.drawable.ic_menu_gallery );
msg += "Click edit";
break;
case R.id.action_nick:
msg += "Click nick";
edit = false;
nick = !nick;
tb.getMenu().getItem(0).setIcon((nick)?R.drawable.ic_menu_send:R.drawable.ic_menu_gallery );
break;
case R.id.action_settings:
msg += "Click setting";
break;
}
if(!msg.equals("")) {
Toast.makeText(context, msg, Toast.LENGTH_SHORT).show();
}
return true;
}
};
*/
class CustomAdapter extends BaseAdapter{
@Override
public int getCount() {
return addrArray.length;
}
@Override
public Object getItem(int position) {
return null;
}
@Override
public long getItemId(int position) { return 0; }
@Override
public View getView(final int position, View cv, ViewGroup parent) {
cv = getActivity().getLayoutInflater().inflate(R.layout.list_fragment_enable,null);
final EditText txtUrl = new EditText(getContext());
final ImageView images = (ImageView) cv.findViewById(R.id.ive0);
final TextView tv0 = (TextView) cv.findViewById(R.id.tv0);
// final TextView tv1 = (TextView) cv.findViewById(R.id.tv1);
images.setImageResource(addrState[position].equals("1")?R.drawable.check:R.drawable.error);
tv0.setText(addrArray[position]+":"+addrNick[position]);
// tv1.setText(addrNick[position]);
cv.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View arg0) {
edit = ((MainActivity)getActivity()).beflag;
if(edit) { //change address status
String dataString = sp.getString("A" + position, null);
dataToken = dataString.split(",");
dataToken[1] = (dataToken[1].equals("1")) ? "0" : "1";
addrArray[position] = dataToken[0];
addrState[position] = dataToken[1];
addrNick[position] = dataToken[2];
sp.edit().putString(dataToken[0], dataToken[0] + "," + dataToken[1] + "," + dataToken[2]).commit();
Log.v("Zach", "Edit Saved:" + sp.getString(dataToken[0], null));
images.setImageResource(addrState[position].equals("1") ? R.drawable.check : R.drawable.error);
}
nick = ((MainActivity)getActivity()).bnflag;
if(nick){ //change nick name
AlertDialog.Builder bd= new AlertDialog.Builder(context);
bd.setTitle("Nick Name Editor");
bd.setMessage("Change Nick Name");
final EditText input = new EditText(context);
input.setRawInputType(InputType.TYPE_CLASS_TEXT);
bd.setView(input);
bd.setPositiveButton(R.string.string_ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
String str = input.getEditableText().toString();
String dataString = sp.getString("A" + position, null);
dataToken = dataString.split(",");
addrArray[position] = dataToken[0];
addrState[position] = dataToken[1];
addrNick[position] = dataToken[2] =(str.equals("")?" ":str);
sp.edit().putString(dataToken[0], dataToken[0] + "," + dataToken[1] + "," + dataToken[2]).commit();
// tv1.setText(str);
tv0.setText(addrArray[position]+":"+addrNick[position]);
}
});
bd.setNegativeButton(R.string.string_cancel, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
Context context=getActivity();
Toast.makeText(context,R.string.string_cancel, Toast.LENGTH_LONG).show();
}
});
bd.show();
}
}
});
return cv;
}
}
} |
// Messaging Queue Connection (RabbitMQ)
func MQConnection(mqConf *helper.RabbitMQConfig) (*amqp.Connection, error) {
mq, err := amqp.Dial(mqConf.URL)
helper.CheckError(err, "Failed to connect to MQ")
return mq, nil
} |
/**
* Delete event form data to back end service.
*/
public void submitDeletetoService() {
try {
eventClient.deleteEvent(this.selectedId);
} catch (UnknownUrlException e) {
System.err.println("The given URL is unreachable");
}
pageDispatcher.showMainPage();
} |
def predict_video(path):
cascadePath = "haarcascade_frontalface_default.xml"
faceCascade = cv2.CascadeClassifier(cascadePath)
recognizer1 = cv2.face.createLBPHFaceRecognizer()
recognizer2 = cv2.face.createFisherFaceRecognizer()
recognizer1.read("LBPFPatternRecogniser")
recognizer2.read("FisherfacesRecogniser")
result = set()
subjects = np.load('subjectlabels.npy').item()
vidcap = cv2.VideoCapture(path)
time = 0
success = True
while success:
vidcap.set(cv2.CAP_PROP_POS_MSEC, time)
time += 100
success, image = vidcap.read()
if success:
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(gray)
for (x, y, w, h) in faces:
if w < 100 or h < 100:
continue
img = gray[y: y + h, x: x + w]
img_resize = cv2.resize(img,(150,150))
nbr_predicted1, conf1 = recognizer1.predict(img_resize)
nbr_predicted2, conf2 = recognizer2.predict(img_resize)
final_conf = (.8*conf1 + .2*((conf2)/30))
final_predict = -1
if nbr_predicted1 == nbr_predicted2:
if final_conf < 70:
final_predict = nbr_predicted1
else:
if conf1 < 45:
final_predict = nbr_predicted1
elif conf2 < 500:
final_predict = nbr_predicted2
elif conf1 < conf2/11:
final_predict = nbr_predicted1
else:
final_predict = nbr_predicted2
if final_predict != -1:
result.add(subjects[final_predict])
cv2.imshow("Recognizing Face",img_resize)
cv2.waitKey(10)
cv2.destroyAllWindows()
with open('attendance.csv') as csvfile:
readCSV = csv.reader(csvfile, delimiter=',')
rolls = []
atts = []
for row in readCSV:
roll = row[0]
att = row[1]
rolls.append(roll)
atts.append(att)
for i in range(len(result)):
arr1 = list(result)
name = rolls.index(arr1[i])
atts[name] = int(atts[name]) + 1
arr = zip(rolls,atts)
myfile = open('attendance.csv', 'w')
with myfile:
writer = csv.writer(myfile)
writer.writerows(arr)
return result |
/**
* Service encapsulating schema management of the DSE platform. This service deals
* with both the Hive meta store schema as well as maintaining the mappings of
* column family and keyspace objects to meta store tables and databases respectively.
*/
public class SchemaManagerService
{
private static final Logger log = LoggerFactory.getLogger(SchemaManagerService.class);
private CassandraClientConfiguration configuration;
private String wareHouseRoot;
private CassandraConnector connector;
private Metadata metadata;
private Set<String> systemKeyspaces = Sets.newHashSet();//TODO FIX THIS DriverUtil.getSystemKeyspaces(null);
/*
* there are three states of dse_graph DB.
* DSE graph is disabled => no dse_graph db => graphNames == None
* DSE graph is enabled but no graph exist => dse_graph exist but empty => graphNames == Some(emptySet)
* graphs exists => Some (Set names)
*/
private Optional<Set<String>> graphNames = Optional.empty();
public static final String DSE_GRAPH_DATABASE_NAME = "dse_graph";
// For testing
public void setActiveSession(SparkSession spark) {
assert(spark != null);
SparkSession.setActiveSession(spark);
}
/**
* cache meta store persisters. I expect only the one in 99% of cases,
* but JobServer and Hive-Thrift server can creates connects to different clusters with different
* configurations in on JVM
*/
private static WeakHashMap<CassandraClientConfiguration, SchemaManagerService> SchemaManagerServiceCache = new WeakHashMap<>();
public static synchronized SchemaManagerService getInstance(
CassandraClientConfiguration clientConfiguration,
CassandraConnector connector)
{
SchemaManagerService schemaManagerService = SchemaManagerServiceCache.get(clientConfiguration);
if (schemaManagerService == null)
{
schemaManagerService = new SchemaManagerService(clientConfiguration, connector);
SchemaManagerServiceCache.put(clientConfiguration, schemaManagerService);
} else {
// update config with latest hadoop related changes
schemaManagerService.configuration = clientConfiguration;
}
return schemaManagerService;
}
private SchemaManagerService(CassandraClientConfiguration clientConfiguration, CassandraConnector connector)
{
this.configuration = clientConfiguration;
this.wareHouseRoot = HiveConf.getVar(configuration.getHadoopConfiguration(), HiveConf.ConfVars.METASTOREWAREHOUSE);
this.connector = connector;
}
public Set<String> getSystemKeyspaces()
{
return systemKeyspaces;
}
/**
* Creates a CassandraConnector given the inherited SparkConf
*
* The Client configuration should only really be important for testing. In real world cases
* all of the Hadoop configuration should be automatically set inside the SparkConf.
*/
public static CassandraConnector getCassandraConnector(CassandraClientConfiguration configuration) {
SparkConf base = new SparkConf(true);
configuration.getHadoopConfiguration()
.iterator()
.forEachRemaining(entry -> {
if (entry.getKey().startsWith("spark."))
base.set(entry.getKey(), entry.getValue());
else
base.set("spark.hadoop." + entry.getKey(), entry.getValue());
});
return new CassandraConnector(CassandraConnectorConf.fromSparkConf(base));
}
public void refreshMetadata()
{
log.info("Refresh cluster meta data");
connector.jWithSessionDo(session -> {
try
{
metadata = session.refreshSchema();
systemKeyspaces = Sets.newHashSet(); // TODO Fix this DriverUtil.getSystemKeyspaces(session);
// list all available graphs and check that graph is enabled
GraphResultSet rs = ((GraphSession)session).execute(ScriptGraphStatement.builder("system.graphs()").build());
graphNames = Optional.of(StreamSupport.stream(rs.spliterator(), false)
.map(r -> r.asString())
.collect(Collectors.toSet()));
} catch (InvalidQueryException | SyntaxError | ClassCastException | IllegalArgumentException | AllNodesFailedException e) {
//graph is not enabled or connected to OSS Cassandra so empty set //TODO Find a cleaner way of testing graph on
graphNames = Optional.empty();
}
return graphNames;
});
}
public boolean isGraphEnabled() {
return graphNames.isPresent();
}
private Metadata getClusterMetadata()
{
return metadata;
}
/**
* Returns a List of Keyspace that are not yet created as 'databases' in the Hive meta store.
*/
public List<String> findUnmappedKeyspaces(CassandraHiveMetaStore cassandraHiveMetaStore) {
refreshMetadata();
// make sure no duplicate keyspaces
Set<String> kss = new HashSet<>();
for (KeyspaceMetadata ksMetadata : getClusterMetadata().getKeyspaces().values())
{
String ksName = ksMetadata.getName().asInternal();
log.debug("Found ksDef name: {}", ksName);
if (isInternalKeyspace(ksName) || isLegacyGraphKs(ksName) || isKeyspaceMapped(ksName, cassandraHiveMetaStore))
continue;
log.debug("Adding ks name from unmapped List: {}", ksName);
kss.add(ksName);
}
List<String> ksList = new ArrayList<>();
ksList.addAll(kss);
return ksList;
}
private boolean isLegacyGraphKs(String ksName)
{
return ksName.endsWith("_pvt") || ksName.endsWith("_system") || metadata.getKeyspace(ksName + "_system").isPresent();
}
/**
* return the keyspace for the database
*/
public String getKeyspaceForDatabaseName(String databaseName)
{
for (KeyspaceMetadata ksMetadata : getClusterMetadata().getKeyspaces().values())
{
String ksName = ksMetadata.getName().asInternal();
if (StringUtils.equalsIgnoreCase(ksName, databaseName))
return ksName;
}
return null;
}
/**
* Returns true if this keyspaceName returns a Database via
* {@link com.datastax.bdp.hadoop.hive.metastore.CassandraHiveMetaStore#getDatabase(String)}
*/
public boolean isKeyspaceMapped(String keyspaceName, CassandraHiveMetaStore cassandraHiveMetaStore)
{
return cassandraHiveMetaStore.hasDatabase(keyspaceName);
}
/**
* Creates the database based on the Keyspace's name. The tables are
* created similarly based off the names of the column families. Column
* family meta data will be used to define the table's fields.
*/
public void createKeyspaceSchema(String ks, CassandraHiveMetaStore cassandraHiveMetaStore)
{
if (isInternalKeyspace(ks))
return;
cassandraHiveMetaStore.createDatabase(buildDatabase(ks));
}
public boolean createKeyspaceSchemaIfNeeded(String databaseName, CassandraHiveMetaStore cassandraHiveMetaStore)
{
refreshMetadata();
log.info("adding dse_graph keyspace if needed");
if (isGraphEnabled() && databaseName.equals(DSE_GRAPH_DATABASE_NAME))
{
if(!isKeyspaceMapped(databaseName, cassandraHiveMetaStore))
{
createKeyspaceSchema(DSE_GRAPH_DATABASE_NAME, cassandraHiveMetaStore);
return true;
}
else return false;
}
String ks = getKeyspaceForDatabaseName(databaseName);
if (ks != null)
{
log.debug("Cassandra keyspace {} exists, but is not present in the metastore. Automatically creating metastore schema now.", databaseName);
createKeyspaceSchema(ks, cassandraHiveMetaStore);
return true;
}
log.debug("No Cassandra Keyspace found with the name {}. Unable to build metastore schema for non-existent keyspace", databaseName);
return false;
}
/**
* create keyspace schema for unmapped keyspaces
*/
public void createKeyspaceSchemasIfNeeded(CassandraHiveMetaStore cassandraHiveMetaStore)
{
if (configuration.isAutoCreateSchema())
{
try
{
log.info("Updating Cassandra Keyspace to Metastore Database Mapping");
List<String> keyspaces = findUnmappedKeyspaces(cassandraHiveMetaStore);
for (String ks : keyspaces)
createKeyspaceSchema(ks, cassandraHiveMetaStore);
log.info("adding dse_graph keyspace if needed");
if (isGraphEnabled() && !isKeyspaceMapped(DSE_GRAPH_DATABASE_NAME, cassandraHiveMetaStore))
createKeyspaceSchema(DSE_GRAPH_DATABASE_NAME, cassandraHiveMetaStore);
}
catch (Exception e)
{
throw new CassandraHiveMetaStoreException("Problem finding unmapped keyspaces", e);
}
}
}
/**
* Compares the column families in the keyspace with what we have
* in hive so far, creating tables for any that do not exist as such already.
*/
public void createUnmappedTables(String dbName, CassandraHiveMetaStore cassandraHiveMetaStore)
{
// handle dse graph database separately
if(dbName.equals(DSE_GRAPH_DATABASE_NAME)) {
createUnmappedGraphTables(cassandraHiveMetaStore);
return;
}
String ks = getKeyspaceForDatabaseName(dbName);
log.info("Create mapping in hive db: {}, for unmapped tables from keyspace: {}", dbName, ks);
if (ks == null || isInternalKeyspace(ks))
return;
try
{
for (CatalogTableMetadata catalogTableMetadata : getTableOrViewMetadatas(ks))
{
try
{
if (!cassandraHiveMetaStore.hasMapping(ks, catalogTableMetadata.getTableName()))
{
createTableMapping(catalogTableMetadata, cassandraHiveMetaStore);
}
}
catch (InvalidObjectException ioe)
{
throw new CassandraHiveMetaStoreException(
"Could not create table for CF: " + catalogTableMetadata.getTableName(), ioe);
}
catch (MetaException me)
{
throw new CassandraHiveMetaStoreException(
"Problem persisting table for CF: " + catalogTableMetadata.getTableName(), me);
}
}
}
catch (Exception ex)
{
throw new CassandraHiveMetaStoreException(
"There was a problem retrieving column families for keyspace " + ks, ex);
}
}
private void createUnmappedGraphTables(CassandraHiveMetaStore cassandraHiveMetaStore)
{
for (String graphName: graphNames.get())
{
try
{
if (!cassandraHiveMetaStore.hasMapping(DSE_GRAPH_DATABASE_NAME, graphName + GraphVertexTableMetadata.POSTFIX))
{
createTableMapping(new GraphVertexTableMetadata(graphName), cassandraHiveMetaStore);
}
if (!cassandraHiveMetaStore.hasMapping(DSE_GRAPH_DATABASE_NAME, graphName + GraphEdgeTableMetadata.POSTFIX))
{
createTableMapping(new GraphEdgeTableMetadata(graphName), cassandraHiveMetaStore);
}
}
catch (InvalidObjectException ioe)
{
throw new CassandraHiveMetaStoreException(
"Could not create table for Graph: " + graphNames, ioe);
}
catch (MetaException me)
{
throw new CassandraHiveMetaStoreException(
"Problem persisting metadata for Graph: " + graphNames, me);
}
}
}
/**
* If this table is external, we need to check that it still exists in
* Cassandra. Returns true for non-external tables. Returns false if the
* table no longer exists in Cassandra.
*/
public boolean verifyExternalTable(Table table) throws CassandraHiveMetaStoreException
{
if (table.getTableType() == null ||
!table.getTableType().equals(TableType.EXTERNAL_TABLE.toString()))
return true;
boolean foundName = false;
Map<String, String> params = table.getParameters();
// Only verify C* ks.cf if the external table was auto created
if (! params.containsKey("auto_created"))
return true;
Map<String, String> serdeParameters =table.getSd().getSerdeInfo().getParameters();
String ksName = serdeParameters.get("keyspace");
String cfName = serdeParameters.get("table");
try
{
for (CatalogTableMetadata catalogTableMetadata : getTableOrViewMetadatas(ksName))
{
if (StringUtils.equalsIgnoreCase(catalogTableMetadata.getTableName(), cfName))
{
foundName = true;
break;
}
}
}
catch (Exception ex)
{
throw new CassandraHiveMetaStoreException(
"There was a problem verifying an externally mapped table", ex);
}
return foundName || verifyGraphExternalTable(table);
}
private Pattern graphTablePattern = Pattern.compile("(.*)("
+ GraphEdgeTableMetadata.POSTFIX
+ "|" + GraphVertexTableMetadata.POSTFIX + ")");
private boolean verifyGraphExternalTable(Table table)
{
if(!isGraphEnabled() || !table.getDbName().equals(DSE_GRAPH_DATABASE_NAME))
return false;
Matcher m = graphTablePattern.matcher(table.getTableName());
return m.matches() && graphNames.get().contains(m.group(1));
}
/**
* build database for the keyspace
*/
private Database buildDatabase(String ks)
{
Database database = new Database();
database.setLocationUri(
new Path(wareHouseRoot,
ks.toLowerCase() + ".db").toString());
database.setName(ks);
return database;
}
/**
* create appropriate hive or spark table mapping from C*
*/
private Table createTableMapping(CatalogTableMetadata catalogTableMetadata,
CassandraHiveMetaStore cassandraHiveMetaStore)
throws InvalidObjectException, MetaException
{
Table table = buildSparkSourceTable(catalogTableMetadata);
if (table != null)
cassandraHiveMetaStore.createTable(table);
return table;
}
/**
* build a hive table to store spark source table table for the keyspace and column family
*/
public Table buildSparkSourceTable(CatalogTableMetadata catalogTableMetadata)
{
Table table = new Table();
String ksName = catalogTableMetadata.getDbName();
String tableName = catalogTableMetadata.getTableName();
/* We need to get the Schema that this table would have and place it in the metastore
Otherwise we end up with intermediary nodes with incorrect schema definitions see Catalog
Relation. Basically the Spark expects this data so we must put it
in its proper place.
*/
SparkSession session = SparkSession.getActiveSession().get();
StructType schema = session
.read()
.format(catalogTableMetadata.getSourceProvider())
.options(catalogTableMetadata.getSerDeInfo().getParameters())
.load()
.schema();
// Translated from Scala to Java from HiveExternalCatalog in Spark 2.2
// Serialized JSON schema string may be too long to be stored into a single metastore table
// property. In this case, we split the JSON string and store each part as a separate table
// property.
int threshold = 4000; //Magic Number Copied from Spark Internal Config Param
String schemaJsonString = schema.json();
// Split the JSON string.
Iterable<String> parts = Splitter.fixedLength(threshold).split(schemaJsonString);
table.putToParameters(HiveExternalCatalog.DATASOURCE_SCHEMA_NUMPARTS(), Integer.toString(Iterables.size(parts)));
int index = 0;
for (String part : parts)
{
table.putToParameters(HiveExternalCatalog.DATASOURCE_SCHEMA_PART_PREFIX() + index, part);
index++;
}
log.info("Creating external Spark table mapping for {}.{} C* table",ksName, tableName);
table.setDbName(ksName);
table.setTableName(tableName);
table.setTableType(TableType.EXTERNAL_TABLE.toString());
table.putToParameters("EXTERNAL", "TRUE");
table.putToParameters("auto_created", "true");
table.putToParameters("spark.sql.sources.provider", catalogTableMetadata.getSourceProvider());
table.setPartitionKeys(Collections.<FieldSchema>emptyList());
table.setCreateTime((int)(System.currentTimeMillis()/1000));
try
{
table.setOwner(UserGroupInformation.getCurrentUser().getShortUserName());
}
catch (IOException e)
{
throw new RuntimeException(e);
}
table.setPrivileges(new PrincipalPrivilegeSet());
StorageDescriptor sd = new StorageDescriptor();
sd.setParameters(new HashMap<>());
//Add fake columns to pass Hive table validation
sd.addToCols(new FieldSchema("Fake", "string","Fake column for source table"));
log.debug("create source table options");
SerDeInfo serde = catalogTableMetadata.getSerDeInfo();
sd.setSerdeInfo(serde);
sd.setBucketCols(Collections.<String>emptyList());
sd.setSortCols(Collections.<Order>emptyList());
table.setSd(sd);
if (log.isDebugEnabled())
log.debug("constructed table for CF:{} {}", tableName, table.toString());
return table;
}
/**
* Get list of column families for the specified keyspace
* from java driver cluster table
*/
public Collection<CatalogTableMetadata> getTableOrViewMetadatas(String ksName) {
if (!isInternalKeyspace(ksName))
return getTableOrViewMetadatas(getClusterMetadata().getKeyspace(CqlIdentifier.fromInternal(ksName)).orElse(null)); //TODO Handle this better
else
return Collections.EMPTY_LIST;
}
public Collection<CatalogTableMetadata> getAllTableOrViewMetadatas() {
Collection<CatalogTableMetadata> catalogTableMetadata = new LinkedList<CatalogTableMetadata>();
for (KeyspaceMetadata ksMetadata : getClusterMetadata().getKeyspaces().values())
{
if (!isInternalKeyspace(ksMetadata.getName().asInternal()))
catalogTableMetadata.addAll(getTableOrViewMetadatas(ksMetadata));
}
return catalogTableMetadata;
}
private Collection<CatalogTableMetadata> getTableOrViewMetadatas(KeyspaceMetadata ksMetadata) {
Collection<CatalogTableMetadata> metadatas = new LinkedList<>();
Collection<TableMetadata> tableMetadatas = (ksMetadata == null) ? Collections.<TableMetadata>emptyList() : ksMetadata.getTables().values();
for (TableMetadata tableMetadata : tableMetadatas) {
metadatas.add(new TableOrViewMetadata(tableMetadata));
}
Collection<ViewMetadata> viewMetadatas = (ksMetadata == null) ? Collections.<ViewMetadata>emptyList() : ksMetadata.getViews().values();
for (ViewMetadata tableMetadata : viewMetadatas) {
metadatas.add(new TableOrViewMetadata(tableMetadata));
}
return metadatas;
}
public boolean isInternalKeyspace(String ksName)
{
return getSystemKeyspaces().contains(ksName);
}
public static SimpleStatement getMetaStoreTableSchema(String keyspaceName, String tableName)
{
return SchemaBuilder.createTable(keyspaceName, tableName)
.withPartitionKey("key", DataTypes.TEXT)
.withClusteringColumn("entity", DataTypes.TEXT)
.withColumn("value", DataTypes.BLOB)
.build();
}
} |
<filename>src/utils/mod.rs
use jwalk::DirEntry;
use std::cmp::Ordering;
use std::collections::HashMap;
use std::collections::HashSet;
use std::path::{Path, PathBuf};
use jwalk::WalkDir;
mod platform;
use self::platform::*;
#[derive(Debug, Default, Eq)]
pub struct Node {
pub name: PathBuf,
pub size: u64,
pub children: Vec<Node>,
}
impl Ord for Node {
fn cmp(&self, other: &Self) -> Ordering {
if self.size == other.size {
self.name.cmp(&other.name)
} else {
self.size.cmp(&other.size)
}
}
}
impl PartialOrd for Node {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl PartialEq for Node {
fn eq(&self, other: &Self) -> bool {
self.name == other.name && self.size == other.size && self.children == other.children
}
}
pub fn is_a_parent_of<P: AsRef<Path>>(parent: P, child: P) -> bool {
let parent = parent.as_ref();
let child = child.as_ref();
(child.starts_with(parent) && !parent.starts_with(child))
}
pub fn simplify_dir_names<P: AsRef<Path>>(filenames: Vec<P>) -> HashSet<PathBuf> {
let mut top_level_names: HashSet<PathBuf> = HashSet::with_capacity(filenames.len());
let mut to_remove: Vec<PathBuf> = Vec::with_capacity(filenames.len());
for t in filenames {
let top_level_name = normalize_path(t);
let mut can_add = true;
for tt in top_level_names.iter() {
if is_a_parent_of(&top_level_name, tt) {
to_remove.push(tt.to_path_buf());
} else if is_a_parent_of(tt, &top_level_name) {
can_add = false;
}
}
to_remove.sort_unstable();
top_level_names.retain(|tr| to_remove.binary_search(tr).is_err());
to_remove.clear();
if can_add {
top_level_names.insert(top_level_name);
}
}
top_level_names
}
pub fn get_dir_tree<P: AsRef<Path>>(
top_level_names: &HashSet<P>,
ignore_directories: &Option<Vec<PathBuf>>,
apparent_size: bool,
limit_filesystem: bool,
threads: Option<usize>,
) -> (bool, HashMap<PathBuf, u64>) {
let mut permissions = 0;
let mut data: HashMap<PathBuf, u64> = HashMap::new();
let restricted_filesystems = if limit_filesystem {
get_allowed_filesystems(top_level_names)
} else {
None
};
for b in top_level_names.iter() {
examine_dir(
b,
apparent_size,
&restricted_filesystems,
ignore_directories,
&mut data,
&mut permissions,
threads,
);
}
(permissions == 0, data)
}
fn get_allowed_filesystems<P: AsRef<Path>>(top_level_names: &HashSet<P>) -> Option<HashSet<u64>> {
let mut limit_filesystems: HashSet<u64> = HashSet::new();
for file_name in top_level_names.iter() {
if let Ok(a) = get_filesystem(file_name) {
limit_filesystems.insert(a);
}
}
Some(limit_filesystems)
}
pub fn normalize_path<P: AsRef<Path>>(path: P) -> PathBuf {
// normalize path ...
// 1. removing repeated separators
// 2. removing interior '.' ("current directory") path segments
// 3. removing trailing extra separators and '.' ("current directory") path segments
// * `Path.components()` does all the above work; ref: <https://doc.rust-lang.org/std/path/struct.Path.html#method.components>
// 4. changing to os preferred separator (automatically done by recollecting components back into a PathBuf)
path.as_ref().components().collect::<PathBuf>()
}
fn examine_dir<P: AsRef<Path>>(
top_dir: P,
apparent_size: bool,
filesystems: &Option<HashSet<u64>>,
ignore_directories: &Option<Vec<PathBuf>>,
data: &mut HashMap<PathBuf, u64>,
file_count_no_permission: &mut u64,
threads: Option<usize>,
) {
let top_dir = top_dir.as_ref();
let mut inodes: HashSet<(u64, u64)> = HashSet::new();
let mut iter = WalkDir::new(top_dir)
.preload_metadata(true)
.skip_hidden(false);
if let Some(threads_to_start) = threads {
iter = iter.num_threads(threads_to_start);
}
'entry: for entry in iter {
if let Ok(e) = entry {
let maybe_size_and_inode = get_metadata(&e, apparent_size);
if let Some(dirs) = ignore_directories {
let path = e.path();
let parts = path.components().collect::<Vec<std::path::Component>>();
for d in dirs {
let seq = d.components().collect::<Vec<std::path::Component>>();
if parts
.windows(seq.len())
.any(|window| window.iter().collect::<PathBuf>() == *d)
{
continue 'entry;
}
}
}
match maybe_size_and_inode {
Some((size, maybe_inode)) => {
if !should_ignore_file(apparent_size, filesystems, &mut inodes, maybe_inode) {
process_file_with_size_and_inode(top_dir, data, e, size)
}
}
None => *file_count_no_permission += 1,
}
} else {
*file_count_no_permission += 1
}
}
}
fn should_ignore_file(
apparent_size: bool,
restricted_filesystems: &Option<HashSet<u64>>,
inodes: &mut HashSet<(u64, u64)>,
maybe_inode: Option<(u64, u64)>,
) -> bool {
if let Some(inode_dev_pair) = maybe_inode {
// Ignore files on different devices (if flag applied)
if let Some(rs) = restricted_filesystems {
if !rs.contains(&inode_dev_pair.1) {
return true;
}
}
if !apparent_size {
// Ignore files already visited or symlinked
if inodes.contains(&inode_dev_pair) {
return true;
}
inodes.insert(inode_dev_pair);
}
}
false
}
fn process_file_with_size_and_inode<P: AsRef<Path>>(
top_dir: P,
data: &mut HashMap<PathBuf, u64>,
e: DirEntry,
size: u64,
) {
let top_dir = top_dir.as_ref();
// This path and all its parent paths have their counter incremented
for path in e.path().ancestors() {
// This is required due to bug in Jwalk that adds '/' to all sub dir lists
// see: https://github.com/jessegrosjean/jwalk/issues/13
if path.to_string_lossy() == "/" && top_dir.to_string_lossy() != "/" {
continue;
}
let s = data.entry(normalize_path(path)).or_insert(0);
*s += size;
if path.starts_with(top_dir) && top_dir.starts_with(path) {
break;
}
}
}
pub fn sort_by_size_first_name_second(a: &(PathBuf, u64), b: &(PathBuf, u64)) -> Ordering {
let result = b.1.cmp(&a.1);
if result == Ordering::Equal {
a.0.cmp(&b.0)
} else {
result
}
}
pub fn sort(data: HashMap<PathBuf, u64>) -> Vec<(PathBuf, u64)> {
let mut new_l: Vec<(PathBuf, u64)> = data.iter().map(|(a, b)| (a.clone(), *b)).collect();
new_l.sort_unstable_by(sort_by_size_first_name_second);
new_l
}
pub fn find_big_ones(new_l: Vec<(PathBuf, u64)>, max_to_show: usize) -> Vec<(PathBuf, u64)> {
if max_to_show > 0 && new_l.len() > max_to_show {
new_l[0..max_to_show].to_vec()
} else {
new_l
}
}
fn depth_of_path(name: &PathBuf) -> usize {
// Filter required as paths can have some odd preliminary
// ("Prefix") bits (for example, from windows, "\\?\" or "\\UNC\")
name.components()
.filter(|&c| match c {
std::path::Component::Prefix(_) => false,
_ => true,
})
.count()
}
pub fn trim_deep_ones(
input: Vec<(PathBuf, u64)>,
max_depth: u64,
top_level_names: &HashSet<PathBuf>,
) -> Vec<(PathBuf, u64)> {
let mut result: Vec<(PathBuf, u64)> = Vec::with_capacity(input.len() * top_level_names.len());
for name in top_level_names {
let my_max_depth = depth_of_path(name) + max_depth as usize;
for &(ref k, ref v) in input.iter() {
if k.starts_with(name) && depth_of_path(k) <= my_max_depth {
result.push((k.clone(), *v));
}
}
}
result
}
mod tests {
#[allow(unused_imports)]
use super::*;
#[test]
fn test_simplify_dir() {
let mut correct = HashSet::new();
correct.insert(PathBuf::from("a"));
assert_eq!(simplify_dir_names(vec!["a"]), correct);
}
#[test]
fn test_simplify_dir_rm_subdir() {
let mut correct = HashSet::new();
correct.insert(["a", "b"].iter().collect::<PathBuf>());
assert_eq!(simplify_dir_names(vec!["a/b", "a/b/c", "a/b/d/f"]), correct);
}
#[test]
fn test_simplify_dir_duplicates() {
let mut correct = HashSet::new();
correct.insert(["a", "b"].iter().collect::<PathBuf>());
correct.insert(PathBuf::from("c"));
assert_eq!(
simplify_dir_names(vec![
"a/b",
"a/b//",
"a/././b///",
"c",
"c/",
"c/.",
"c/././",
"c/././."
]),
correct
);
}
#[test]
fn test_simplify_dir_rm_subdir_and_not_substrings() {
let mut correct = HashSet::new();
correct.insert(PathBuf::from("b"));
correct.insert(["c", "a", "b"].iter().collect::<PathBuf>());
correct.insert(["a", "b"].iter().collect::<PathBuf>());
assert_eq!(simplify_dir_names(vec!["a/b", "c/a/b/", "b"]), correct);
}
#[test]
fn test_simplify_dir_dots() {
let mut correct = HashSet::new();
correct.insert(PathBuf::from("src"));
assert_eq!(simplify_dir_names(vec!["src/."]), correct);
}
#[test]
fn test_simplify_dir_substring_names() {
let mut correct = HashSet::new();
correct.insert(PathBuf::from("src"));
correct.insert(PathBuf::from("src_v2"));
assert_eq!(simplify_dir_names(vec!["src/", "src_v2"]), correct);
}
#[test]
fn test_is_a_parent_of() {
assert!(is_a_parent_of("/usr", "/usr/andy"));
assert!(is_a_parent_of("/usr", "/usr/andy/i/am/descendant"));
assert!(!is_a_parent_of("/usr", "/usr/."));
assert!(!is_a_parent_of("/usr", "/usr/"));
assert!(!is_a_parent_of("/usr", "/usr"));
assert!(!is_a_parent_of("/usr/", "/usr"));
assert!(!is_a_parent_of("/usr/andy", "/usr"));
assert!(!is_a_parent_of("/usr/andy", "/usr/sibling"));
assert!(!is_a_parent_of("/usr/folder", "/usr/folder_not_a_child"));
}
#[test]
fn test_is_a_parent_of_root() {
assert!(is_a_parent_of("/", "/usr/andy"));
assert!(is_a_parent_of("/", "/usr"));
assert!(!is_a_parent_of("/", "/"));
}
#[test]
fn test_should_ignore_file() {
let mut files = HashSet::new();
files.insert((10, 20));
assert!(!should_ignore_file(true, &None, &mut files, None));
// New file is not known it will be inserted to the hashmp and should not be ignored
let new_fd = (11, 12);
assert!(!should_ignore_file(false, &None, &mut files, Some(new_fd)));
assert!(files.contains(&new_fd));
// The same file will be ignored the second time
assert!(should_ignore_file(false, &None, &mut files, Some(new_fd)));
}
#[test]
fn test_should_ignore_file_on_different_device() {
let mut files = HashSet::new();
files.insert((10, 20));
let mut devices = HashSet::new();
devices.insert(99);
let od = Some(devices);
// If we are looking at a different device (disk) and the device flag is set
// then apparent_size is irrelevant - we ignore files on other devices
let new_file = (11, 12);
assert!(should_ignore_file(false, &od, &mut files, Some(new_file)));
assert!(should_ignore_file(true, &od, &mut files, Some(new_file)));
// We do not ignore files on the same device
assert!(!should_ignore_file(false, &od, &mut files, Some((2, 99))));
assert!(!should_ignore_file(true, &od, &mut files, Some((2, 99))));
}
}
|
Hello, my dearest dears, and welcome back to another festive installment of Will It Sous Vide?, the weekly column where I make whatever you want me to with my immersion circulator.
Advertisement
During our last topic picking session, we settled on Thanksgiving sides because, let’s face it, most people care way more about the sides than they do the turkey. No specific side won out, so I decided to try three of my favorites: cornbread dressing, mashed potatoes, and cranberry sauce.
The Smash Hit: Cornbread Dressing
Advertisement
My (opinionated, southern) family has two, very serious rules about the bread-based side that is served alongside turkey:
The bread must be of the corn variety. You better not call it “stuffing.”
Good dressing starts with good cornbread, so I made a batch of my grandmother’s very simple, very easy, very tasty cornbread. You can use any cornbread you damn well please (including store-bought), but I’ll go ahead and include the recipe, just in case you want to try it. (I know it’s not technically a “sous vide” recipe, but there was no way I was going to make dressing with any other cornbread.)
Advertisement
Claire’s Grandmother’s Not-Sous-Vide Cornbread
Ingredients:
About four tablespoons of bacon grease
1 1/3 cup self-rising yellow cornmeal
1 egg
1 3/4 cup buttermilk
Preheat your oven to 450℉. Add bacon grease to a cake pan or skillet (my grandmother always uses a cake pan, and she does not give a single damn) and pop it in the oven. Combine remaining ingredients in a bowl. Once you hear the bacon grease start to sizzle (about 5-10 minutes) remove your pan and pour the batter in. Return to the oven and bake until golden brown on top (about 35-45 minutes). Let cool and turn out. Crumble if you are going to be using it in dressing.
Advertisement
Traditional dressing calls for cornbread, eggs, stock, butter, and herbs. I first tried a version with stock—I’m actually not sure why; just to see what would happen, I guess?—and it was an unsurprisingly soupy mess. I then decided to eliminate the broth altogether, and used my BFF Better Than Bullion to give it that stock-y quality. This is the recipe I settled on, and it is very freaking good.
Claire’s Fantastic Sous-Vide Cornbread Dressing
Ingredients:
4 cups of crumbled cornbread (which, incidentally, is exactly how much the above recipe makes)
1 onion, chopped (You may notice a lack of celery, this is because celery is extremely bad tasting.)
5-7 fresh sage leaves, depending on the size
1-2 fresh sprigs of marjoram
1-2 fresh sprigs of thyme
2 eggs
2 tablespoons of bacon grease or lard (or butter if you must)
2 teaspoons of Roasted Chicken Better Than Bullion
Remove herbs from stems and chop finely. Add herbs, chopped onion, and cornbread to a 1-gallon freezer bag and shake to distribute flavorful goodness.
Advertisement
Beat eggs in a bowl and add your fat of choice and Better Than Bullion. Mix to create a visually unappealing slurry.
Advertisement
Pour the questionable-looking liquid over your sexy crumb mixture, push the air out using the water submersion technique, and place in a water bath set to 165℉ for two and a half hours.
Dump it out in a cast iron pan and pop it into the broiler to brown on top. I don’t recommend torching it, as that results in a burned, rather than browned stuffing, and burnt stuffing isn’t a whole lot of fun.
Advertisement
So, does cornbread dressing sous vide?
The Answer: Fuck yeah, it does. I’ve eaten a lot of dressing in my life—all of it pretty good—but this was the best I’ve ever had. The fat and herbs infuse right into the cornbread without making it soggy, so every bite is packed with flavor, and the eggs give everything a nice, moist, almost cake-like texture. You may notice that the onions aren’t extremely cooked, but that’s how I like them in dressing. The just-softened, barely-transparent alliums provide a bit of bite to cut through the richness of the dressing, and I appreciate it. If you need your onions browned, just brown them before adding to the dressing mixture. If I wasn’t already sous vide-ing pumpkin pie for my family this Thanksgiving, I would insist on making this. I give it 5 out of 5 turkey legs.
Advertisement
Next side.
The One With a Caveat: Mashed Potatoes
Advertisement
As a few of you have pointed out, my kitchen appliance game is strange. I own an immersion circulator, but not a microwave; an ice cream attachment for my KitchenAid, but no coffee pot. I am also lacking in the way of potato ricer, because I’ve always mashed my potatoes with a wooden spoon—maybe my stand mixer, equipped with paddle attachment—and never had a problem with lumps. This was not the case with sous-vide mashed potatoes. (More thoughts on why in a moment.) As far as recipes go, I used this one from Anova’s site as a template, making a few changes.
Very Garlicky, Slightly Lumpy, Sous-Vide Mashed Potatoes:
Ingredients:
2 pounds of Russet potatoes, sliced into 1/8-inch slices
5 cloves of garlic, peeled and smashed
8 ounces (2 sticks) of butter
1 cup buttermilk
1 big sprig of rosemary (This is optional. I made two batches and preferred the batch without it.)
Advertisement
Add everything to a 1-gallon freezer bag and submerge in a water bath set to 194℉ for two hours. (The Anova recipe calls for an hour and a half, but my potatoes were not done in that amount of time.)
Once they’re done cooking. drain off melted butter and buttermilk and reserve. If you have a potato ricer, pass the potatoes through it now. I do not have a potato rice, so I tried to beat them into submission using my spoon and then my stand mixer. This did not work, and I was never able to get all the lumps out.
Advertisement
Because these babies had been cooked in fat, they were a little slippery, and portions of them deftly avoided both my spoon and paddle attachment, slipping away unmashed. They were, however, some of the tastiest mashed potatoes I’ve ever had. Like the stuffing before it, cooking them in fat infused them with a whole lot of flavor, the flavor in this case being garlic, and that garlic was present in every single bite.
Advertisement
Returning to question we always return to: Will mashed potatoes sous vide?
The Answer: Yeah, sure, and they taste really freaking good, but you need a food mill or potato ricer to get the texture right, as there is no way to eliminate the lumps without them. If you have access to one of those devices, I urge you to make these. I give ‘em 3.75 out of 5 bottles of Beaujolais nouveau.
Moving on.
The Pointless, Though Tasty, Option: Cranberry Sauce
Advertisement
I had seen a few recipes for sous-vide cranberry sauce, and could not for the life of me figure out what the advantage would be. As we all know, liquid can’t evaporate when sealed in a bag, so there’s no way for the sauce to reduce. There’s also nothing easier than boiling cranberries with water and sugar so, again, not totally sure why anyone would do this. (But that’s half the point of this column.)
I didn’t really use a recipe for this, I just threw twelve ounces of fresh cranberries and a cup of white sugar in a 1-gallon freezer bag and submerged in a 185℉ water bath, until the berries began to break down (about two hours). (I left the water out, for obvious reasons.) I took the bag out, smushed it around with my hands to make sure everyone was well acquainted, transferred the sauce to some vintage Pyrex, and let it chill overnight in the fridge.
Advertisement
Once more, with feeling: Does cranberry sauce sous vide?
The answer: Technically, yes. You do get a sweet and sour sauce that is made of cranberries, but it takes a real long time, and you don’t extract enough pectin to get any gelling, and I am very much about a gelled cranberry sauce. The sauce was also a bit tarter than one prepared the traditional way, which could be seen as a positive or a negative, depending on your palate. So, it’s not bad, but it’s not as good as stove-top cranberry sauce, and it takes longer. I give it 1 out of 5 slices of pie. (Brutal.) |
/***********************************************************************/
/* CAT Access Method opening routine. */
/***********************************************************************/
bool TDBCAT::OpenDB(PGLOBAL g)
{
if (Use == USE_OPEN) {
N = -1;
return false;
}
if (Mode != MODE_READ) {
strcpy(g->Message, "CAT tables are read only");
return true;
}
if (Initialize(g))
return true;
Use = USE_OPEN;
return InitCol(g);
} |
Author PG Wodehouse was born in England and died in the US, but in between he lived for several years in France, a country that looms large in some of his most colourful creations.
It is true what they say in the blurb - there honestly is no better antidote to anguish, ennui or general world-weariness than to flick through a few pages of the Master.
I speak from experience. PG Wodehouse has coaxed me out from many a dark place - and now I have become intrigued by a hitherto unexplored sidelight on the life and oeuvre. Really, someone should take it up as an academic thesis - Wodehouse and the French.
Plum - as he was known - began living in France in 1934. He chose Le Touquet, on the north coast, because it was a fashionable resort with golf, casino and a beach for the dogs.
He had visited the country many times before of course, and French places and characters were already featuring regularly in the books.
Cannes, the playground of toffs par excellence, was a favourite. In the story Noblesse Oblige for example, Drones Club member Freddie Widgeon is invited to be the judge in a sea-front "pretty mother-and-baby" competition.
From Our Own Correspondent Insight, wit and analysis from BBC correspondents, journalists and writers from around the world
Broadcast on Radio 4 and BBC World Service Listen to the programme Download the programme
He accedes, understanding that his reward will be a much-needed 500 franc note. Too late does he realise that the 500 francs is what he - an evidently copiously-endowed English milord - is supposed to supply to the winning mother-and-baby. And of course noblesse oblige - so he does.
Cannes also provides the setting for what buffs regard as one of the most perfect opening Wodehouse paragraphs, this from The Luck of the Bodkins:
"Into the face of the young man who sat on the terrace of the Hotel Magnifique... there had crept a look of furtive shame, the shifty hangdog look which announces that an Englishman is about to talk French."
PG Wodehouse always said his own French was rudimentary. But his unfailingly brilliant ear - combined, one supposes, with a solid grounding from school - meant that his renderings on the page were always perfectly hilarious.
Sir Pelham Grenville Wodehouse Born in 1881, Guildford
Educated at Dulwich College
Spends two years with the Hong Kong and Shanghai Bank, before becoming a full-time writer
Writes 93 books in a career lasting 73 years - novels, short stories and musical comedies
In France from 1934-40
Spends later life in the US
Knighted in 1975, aged 93
Nowhere is this truer than in depictions of his most famous French character, that gift of God to the gastric juices, the temperamental chef Anatole.
In Right Ho, Jeeves, Anatole is described as a tubby little man with a moustache of the outsize or soup-strainer type, which turns up or droops down depending on mood.
With his "Sylphides a la creme d'ecrevisses" or his "Nonnettes de poulet Agnes Sorel", Anatole alone is capable of soothing the chronic indigestion of Tom Travers - businessman husband of Bertie Wooster's favourite aunt Dahlia - and there is much intriguing by various malefactors who try to steal Anatole for their own kitchens.
Anatole's English is fluent but mixed. To add to his inevitable Frenchisms, he also once worked for an American family in Nice, where he studied under their chauffeur, one of the Maloneys of Brooklyn, we are told, and this gives a certain rough colour.
But when fully aroused he reverts to French:
"Words like "marmiton de Domange", "pignouf", "hurluberlu" and "roustisseur" were fluttering from him like bats out of a barn. Lost on me of course [this is Bertie Wooster writing in the first person] because I am still more or less in the "esker-vous-avez" stage. I regretted this, for they sounded good."
Those words do exist, by the way.
Image caption Bertie Wooster and Jeeves were brought to life by Hugh Laurie and Stephen Fry
Then there is Nicolas Jules St Xavier Auguste, Marquis de Maufringneuse et Valerie-Moberanne, lead rogue in the book French Leave who has a job in a ministry as "employe attache a l'expedition du troisieme bureau" (in other words - clerk).
Or the picturesque Brittany town of St Rocque, site of Chateau Blissac, where in one delicious scene in Hot Water two characters, neither of whom can speak French, are both pretending to be French, and so have to improvise a French-sounding conversation in front of other guests in order to preserve their false credentials.
Image caption Wodehouse wrote up until his death aged 93
Poor old Plum's time in France did not end happily. In 1940 he was detained at his home Low Water in Le Touquet by the advancing German army.
Sent to an internment camp, he was released the next year when he turned 60, but he then made his famously ill-judged broadcasts, intended as light-hearted diversions for American fans but of great propaganda value to the Nazis.
Wodehouse came to live in Paris and it was at the Hotel Bristol - down the road from where I write this in the BBC office - that at the Liberation in 1944 he was found and questioned by the journalist Malcolm Muggeridge, then working for MI6.
Wodehouse was detained by the French for a while but he was never charged and in 1947 he left for the United States.
I know he must have loved it in France. They are a minor theme of course, but in his hands, France and the French radiate optimism, gaiety, sheer fun.
Now as ever, now more than ever, balm for the soul.
How to listen to From Our Own Correspondent:
BBC Radio 4: Saturdays at 11:30 and some Thursdays at 11:00.
Listen online or download the podcast.
BBC World Service: Short editions Monday-Friday - see World Service programme schedule.
You can follow the Magazine on Twitter and on Facebook |
Rep. Maxine Waters Maxine Moore WatersWorse than nothing's been done since the massive Equifax hack Juan Williams: Racial shifts spark fury in Trump and his base Maxine Waters apologizes to millennials struggling to get a job MORE (D-Calif.), who has called for President Trump Donald John TrumpHouse committee believes it has evidence Trump requested putting ally in charge of Cohen probe: report Vietnamese airline takes steps to open flights to US on sidelines of Trump-Kim summit Manafort's attorneys say he should get less than 10 years in prison MORE's impeachment, flatly stated that she doesn't "respect this president" during an interview on Friday.
“I don’t honor him,” Waters said on MSNBC’s “Morning Joe.” “I don’t respect this president. I don’t think that he deserves to be president of this great country. I am very concerned about him and I really think he’s dangerous. This is abnormal what we’re experiencing with this president.”
Waters has emerged as a fierce critic of Trump, challenging the president on issues ranging from his treatment of women to taxes.
The California lawmaker renewed her call for Trump’s impeachment last weekend during Saturday’s Tax March in Washington.
ADVERTISEMENT
“Show the people your taxes,” she said, calling Trump a “liar” for claiming he cannot release his tax returns because of an ongoing IRS audit. “Stop stonewalling, stop hiding.”
“I will fight every day until he is impeached,” Waters added, leading audience members in chants of “impeach 45.”
Trump has repeatedly refused to release his tax returns, breaking with decades of precedent. The IRS has said nothing prevents an individual from making their tax returns public, even if they are under audit.
Waters said Friday she would keep sounding alarm over alleged ties between members of Trump's presidential campaign and Russia and the need for probing them.
“I believe that if we have the credible investigations that it will lead to his impeachment,” she told MSNBC hosts Joe Scarborough and Mika Brzezinski. “I believe that.” |
<gh_stars>0
use super::TaskContext;
use super::{pid_alloc, KernelStack, PidHandle};
use crate::fs::{File, MailBox, Serial, Socket, Stdin, Stdout};
use crate::mm::{translate_writable_va, MemorySet, PhysAddr, PhysPageNum, VirtAddr, KERNEL_SPACE};
use crate::task::pid::add_task_2_map;
use crate::trap::{trap_handler, TrapContext, UserTrapInfo};
use crate::{
config::{PAGE_SIZE, TRAP_CONTEXT, USER_TRAP_BUFFER},
loader::get_app_data_by_name,
mm::translated_str,
};
use alloc::sync::{Arc, Weak};
use alloc::vec;
use alloc::vec::Vec;
use core::fmt::{self, Debug, Formatter};
use spin::{Mutex, MutexGuard};
#[derive(Debug)]
pub struct TaskControlBlock {
// immutable
pub pid: PidHandle,
pub kernel_stack: KernelStack,
// mutable
inner: Mutex<TaskControlBlockInner>,
}
pub struct TaskControlBlockInner {
pub trap_cx_ppn: PhysPageNum,
pub base_size: usize,
pub task_cx_ptr: usize,
pub user_trap_info: Option<UserTrapInfo>,
pub task_status: TaskStatus,
pub priority: isize,
pub memory_set: MemorySet,
pub parent: Option<Weak<TaskControlBlock>>,
pub children: Vec<Arc<TaskControlBlock>>,
pub exit_code: i32,
pub fd_table: Vec<Option<Arc<dyn File + Send + Sync>>>,
pub mail_box: Arc<MailBox>,
}
impl Debug for TaskControlBlockInner {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.write_fmt(format_args!(
"TCBInner: {{\r\n trap cx addr: {:?} , base_size: {:#x} \r\n task_cx_ptr: {:#x} , token: {:#x} \r\n}}",
PhysAddr::from(self.trap_cx_ppn), self.base_size, self.task_cx_ptr, self.memory_set.token()
))
}
}
impl TaskControlBlockInner {
pub fn get_task_cx_ptr2(&self) -> *const usize {
&self.task_cx_ptr as *const usize
}
pub fn get_trap_cx(&self) -> &'static mut TrapContext {
self.trap_cx_ppn.get_mut()
}
pub fn get_user_token(&self) -> usize {
self.memory_set.token()
}
fn get_status(&self) -> TaskStatus {
self.task_status
}
pub fn is_zombie(&self) -> bool {
self.get_status() == TaskStatus::Zombie
}
pub fn set_priority(&mut self, priority: isize) -> Result<isize, isize> {
if priority < 2 {
return Err(-1);
}
self.priority = priority;
Ok(priority)
}
pub fn mmap(&mut self, start: usize, len: usize, port: usize) -> Result<isize, isize> {
self.memory_set.mmap(start, len, port)
}
pub fn munmap(&mut self, start: usize, len: usize) -> Result<isize, isize> {
self.memory_set.munmap(start, len)
}
pub fn alloc_fd(&mut self) -> usize {
if let Some(fd) = (0..self.fd_table.len()).find(|fd| self.fd_table[*fd].is_none()) {
fd
} else {
self.fd_table.push(None);
self.fd_table.len() - 1
}
}
pub fn is_mailbox_full(&self) -> bool {
self.mail_box.is_full()
}
pub fn is_mailbox_empty(&self) -> bool {
self.mail_box.is_empty()
}
pub fn is_user_trap_enabled(&self) -> bool {
self.get_trap_cx().sstatus.uie()
}
pub fn init_user_trap(&mut self) -> Result<isize, isize> {
use riscv::register::sstatus;
if self.user_trap_info.is_none() {
// R | W
if self.mmap(USER_TRAP_BUFFER, PAGE_SIZE, 0b11).is_ok() {
let phys_addr =
translate_writable_va(self.get_user_token(), USER_TRAP_BUFFER).unwrap();
self.user_trap_info = Some(UserTrapInfo {
user_trap_buffer_ppn: PhysPageNum::from(PhysAddr::from(phys_addr)),
user_trap_record_num: 0,
devices: Vec::new(),
});
unsafe {
sstatus::set_uie();
}
return Ok(USER_TRAP_BUFFER as isize);
} else {
warn!("[init user trap] mmap failed!");
}
} else {
warn!("[init user trap] self user trap info is not None!");
}
Err(-1)
}
pub fn restore_user_trap_info(&mut self) {
use riscv::register::{uip, uscratch};
if self.is_user_trap_enabled() {
if let Some(trap_info) = &mut self.user_trap_info {
if trap_info.user_trap_record_num > 0 {
trace!("restore {} user trap", trap_info.user_trap_record_num);
uscratch::write(trap_info.user_trap_record_num as usize);
trap_info.user_trap_record_num = 0;
unsafe {
uip::set_usoft();
}
}
}
}
}
}
impl TaskControlBlock {
pub fn acquire_inner_lock(&self) -> MutexGuard<TaskControlBlockInner> {
self.inner.lock()
}
pub fn new(elf_data: &[u8]) -> Arc<TaskControlBlock> {
// memory_set with elf program headers/trampoline/trap context/user stack
let (memory_set, user_sp, entry_point) = MemorySet::from_elf(elf_data);
let trap_cx_ppn = memory_set
.translate(VirtAddr::from(TRAP_CONTEXT).into())
.unwrap()
.ppn();
// alloc a pid and a kernel stack in kernel space
let pid_handle = pid_alloc();
let kernel_stack = KernelStack::new(&pid_handle);
let kernel_stack_top = kernel_stack.get_top();
// push a task context which goes to trap_return to the top of kernel stack
let task_cx_ptr = kernel_stack.push_on_top(TaskContext::goto_trap_return());
trace!("new task cx ptr: {:#x?}", task_cx_ptr as usize);
let task_control_block = Arc::new(TaskControlBlock {
pid: pid_handle,
kernel_stack,
inner: Mutex::new(TaskControlBlockInner {
trap_cx_ppn,
base_size: user_sp,
task_cx_ptr: task_cx_ptr as usize,
user_trap_info: None,
task_status: TaskStatus::Ready,
memory_set,
parent: None,
children: Vec::new(),
exit_code: 0,
priority: 16,
fd_table: vec![
// 0 -> stdin
Some(Arc::new(Stdin)),
// 1 -> stdout
Some(Arc::new(Stdout)),
// 2 -> stderr
Some(Arc::new(Stdout)),
// 3 -> serial 3
Some(Arc::new(Serial::<2>)),
// 4 -> serial 4
Some(Arc::new(Serial::<3>)),
],
mail_box: Arc::new(MailBox::new()),
}),
});
add_task_2_map(task_control_block.getpid(), task_control_block.clone());
// prepare TrapContext in user space
let trap_cx = task_control_block.acquire_inner_lock().get_trap_cx();
*trap_cx = TrapContext::app_init_context(
entry_point,
user_sp,
KERNEL_SPACE.lock().token(),
kernel_stack_top,
trap_handler as usize,
);
task_control_block
}
pub fn exec(&self, elf_data: &[u8]) {
// memory_set with elf program headers/trampoline/trap context/user stack
let (memory_set, user_sp, entry_point) = MemorySet::from_elf(elf_data);
let trap_cx_ppn = memory_set
.translate(VirtAddr::from(TRAP_CONTEXT).into())
.unwrap()
.ppn();
// **** hold current PCB lock
let mut inner = self.acquire_inner_lock();
inner.user_trap_info = None;
// substitute memory_set
inner.memory_set = memory_set;
// update trap_cx ppn
inner.trap_cx_ppn = trap_cx_ppn;
// initialize trap_cx
let trap_cx = inner.get_trap_cx();
*trap_cx = TrapContext::app_init_context(
entry_point,
user_sp,
KERNEL_SPACE.lock().token(),
self.kernel_stack.get_top(),
trap_handler as usize,
);
// **** release current PCB lock
}
pub fn fork(self: &Arc<TaskControlBlock>) -> Arc<TaskControlBlock> {
// ---- hold parent PCB lock
let mut parent_inner = self.acquire_inner_lock();
// copy user space(include trap context)
let memory_set = MemorySet::from_existed_user(&parent_inner.memory_set);
let trap_cx_ppn = memory_set
.translate(VirtAddr::from(TRAP_CONTEXT).into())
.unwrap()
.ppn();
// alloc a pid and a kernel stack in kernel space
let pid_handle = pid_alloc();
let kernel_stack = KernelStack::new(&pid_handle);
let kernel_stack_top = kernel_stack.get_top();
// push a goto_trap_return task_cx on the top of kernel stack
let task_cx_ptr = kernel_stack.push_on_top(TaskContext::goto_trap_return());
debug!("forked task cx ptr: {:#x?}", task_cx_ptr as usize);
// copy fd table
let mut new_fd_table: Vec<Option<Arc<dyn File + Send + Sync>>> = Vec::new();
for fd in parent_inner.fd_table.iter() {
if let Some(file) = fd {
new_fd_table.push(Some(file.clone()));
} else {
new_fd_table.push(None);
}
}
let mut user_trap_info: Option<UserTrapInfo> = None;
if let Some(mut trap_info) = parent_inner.user_trap_info.clone() {
debug!("[fork] copy parent trap info");
trap_info.user_trap_buffer_ppn = memory_set
.translate(VirtAddr::from(USER_TRAP_BUFFER).into())
.unwrap()
.ppn();
user_trap_info = Some(trap_info);
}
let task_control_block = Arc::new(TaskControlBlock {
pid: pid_handle,
kernel_stack,
inner: Mutex::new(TaskControlBlockInner {
trap_cx_ppn,
base_size: parent_inner.base_size,
task_cx_ptr: task_cx_ptr as usize,
user_trap_info,
task_status: TaskStatus::Ready,
memory_set,
parent: Some(Arc::downgrade(self)),
children: Vec::new(),
exit_code: 0,
priority: 16,
fd_table: new_fd_table,
mail_box: Arc::new(MailBox::new()),
}),
});
add_task_2_map(task_control_block.getpid(), task_control_block.clone());
// add child
parent_inner.children.push(task_control_block.clone());
// modify kernel_sp in trap_cx
// **** acquire child PCB lock
let trap_cx = task_control_block.acquire_inner_lock().get_trap_cx();
// **** release child PCB lock
trap_cx.kernel_sp = kernel_stack_top;
// return
task_control_block
// ---- release parent PCB lock
}
pub fn getpid(&self) -> usize {
self.pid.0
}
pub fn spawn(
self: &Arc<TaskControlBlock>,
file: *const u8,
) -> Result<Arc<TaskControlBlock>, isize> {
let mut parent_inner = self.acquire_inner_lock();
let parent_token = parent_inner.get_user_token();
let f = translated_str(parent_token, file);
debug!("SPAWN exec {:?}", &f);
if let Some(elf_data) = get_app_data_by_name(f.as_str()) {
let (memory_set, user_sp, entry_point) = MemorySet::from_elf(elf_data);
let trap_cx_ppn = memory_set
.translate(VirtAddr::from(TRAP_CONTEXT).into())
.unwrap()
.ppn();
let pid_handle = pid_alloc();
let kernel_stack = KernelStack::new(&pid_handle);
let kernel_stack_top = kernel_stack.get_top();
let task_cx_ptr = kernel_stack.push_on_top(TaskContext::goto_trap_return());
trace!("spawned task cx ptr: {:#x?}", task_cx_ptr as usize);
let task_control_block = Arc::new(TaskControlBlock {
pid: pid_handle,
kernel_stack,
inner: Mutex::new(TaskControlBlockInner {
trap_cx_ppn,
base_size: user_sp,
task_cx_ptr: task_cx_ptr as usize,
user_trap_info: None,
task_status: TaskStatus::Ready,
memory_set,
parent: Some(Arc::downgrade(self)),
children: Vec::new(),
exit_code: 0,
priority: 16,
fd_table: vec![
// 0 -> stdin
Some(Arc::new(Stdin)),
// 1 -> stdout
Some(Arc::new(Stdout)),
// 2 -> stderr
Some(Arc::new(Stdout)),
// 3 -> serial 2
Some(Arc::new(Serial::<2>)),
// 4 -> serial 3
Some(Arc::new(Serial::<3>)),
],
mail_box: Arc::new(MailBox::new()),
}),
});
add_task_2_map(task_control_block.getpid(), task_control_block.clone());
parent_inner.children.push(task_control_block.clone());
let trap_cx = task_control_block.acquire_inner_lock().get_trap_cx();
*trap_cx = TrapContext::app_init_context(
entry_point,
user_sp,
KERNEL_SPACE.lock().token(),
kernel_stack_top,
trap_handler as usize,
);
return Ok(task_control_block);
}
Err(-1)
}
pub fn create_socket(&self) -> Arc<Socket> {
self.inner.lock().mail_box.create_socket()
}
}
impl PartialEq for TaskControlBlock {
fn eq(&self, other: &Self) -> bool {
self.pid == other.pid
}
}
impl Eq for TaskControlBlock {}
impl PartialOrd for TaskControlBlock {
fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for TaskControlBlock {
fn cmp(&self, other: &Self) -> core::cmp::Ordering {
self.pid.cmp(&other.pid)
}
}
#[derive(Copy, Clone, PartialEq)]
pub enum TaskStatus {
Ready,
Running,
Zombie,
}
|
<filename>tests/pos/Constraints.hs
module Constraints where
{-@
cmp :: forall < pref :: b -> Bool, postf :: b -> c -> Bool
, pre :: a -> Bool, postg :: a -> b -> Bool
, post :: a -> c -> Bool
>.
{xx::a<pre>, w::b<postg xx> |- c<postf w> <: c<post xx>}
{ww::a<pre> |- b<postg ww> <: b<pref>}
f:(y:b<pref> -> c<postf y>)
-> g:(z:a<pre > -> b<postg z>)
-> x: a<pre> -> c<post x>
@-}
cmp :: (b -> c)
-> (a -> b)
-> a -> c
cmp f g x = f (g x)
{-@ incr :: x:Nat -> {v:Nat | v == x + 1} @-}
incr :: Int -> Int
incr x = x + 1
{-@ incr2 :: x:Nat -> {v:Nat | v == x + 2} @-}
incr2 :: Int -> Int
incr2 = cmp incr incr
{-@ plus1 :: x:Nat -> {v:Nat | v == x + 20} @-}
plus1 :: Int -> Int
plus1 x = x + 20
{-@ plus2 :: x:{v:Nat | v > 10} -> {v:Nat | v == x + 22} @-}
plus2 :: Int -> Int
plus2 x = x + 22
{-@ plus42 :: x:Nat -> {v:Nat | v == x + 42} @-}
plus42 :: Int -> Int
plus42 = cmp plus2 plus1
|
/**
* <p>
* Inject a snippet of JavaScript into the page for execution in the context
* of the currently selected frame.<br>
* Support: Web(WebView)
*
* @param code The script to execute
* @return The results of execution
* @throws Exception
*/
public JSONObject execute(String code) throws Exception {
JSONObject jsonObject = new JSONObject();
jsonObject.put("script", code);
jsonObject.put("args", new JSONArray());
return execute.execute(jsonObject);
} |
<filename>FangameReader/UserAliasFile.cpp<gh_stars>1-10
#include <common.h>
#pragma hdrstop
#include <UserAliasFile.h>
#include <BossInfo.h>
namespace Fangame {
//////////////////////////////////////////////////////////////////////////
const CUnicodeView aliasRootName = L"Aliases";
CUserAliasFile::CUserAliasFile( CUnicodeView fileName )
{
try {
aliasDoc.LoadFromFile( fileName );
} catch( CException& ) {
// File doesn't exists, it's ok.
aliasDoc.SetRoot( aliasRootName );
return;
}
for( auto& child : aliasDoc.GetRoot().Children() ) {
parseAliasElement( child );
}
}
void CUserAliasFile::SaveChanges()
{
aliasDoc.SaveToFile( aliasDoc.GetName() );
}
const CUnicodeView keyNameAttrib = L"name";
const CUnicodeView nameAliasAttrib = L"nameAlias";
const CUnicodeView statusAttrib = L"statusAlias";
void CUserAliasFile::parseAliasElement( CXmlElement& elem )
{
const auto name = elem.GetAttributeValue( keyNameAttrib, CUnicodePart() );
auto& bossData = bossAliases.GetOrCreate( name, elem ).Value();
bossData.Name = elem.GetAttributeValue( nameAliasAttrib, CUnicodePart() );
const auto statusStr = elem.GetAttributeValue( statusAttrib, CUnicodePart() );
bossData.Status = AttackStatusToNameDict.FindEnum( statusStr, ACS_EnumCount );
for( auto& child : elem.Children() ) {
parseAttackAlias( child, bossData );
}
}
const CUnicodeView iconAttrib = L"iconAlias";
void CUserAliasFile::parseAttackAlias( CXmlElement& elem, CBossAliasData& parent )
{
const auto name = elem.GetAttributeValue( keyNameAttrib, CUnicodePart() );
auto& attackData = parent.Attacks.GetOrCreate( name, elem ).Value();
attackData.Name = elem.GetAttributeValue( nameAliasAttrib, CUnicodePart() );
attackData.IconPath = elem.GetAttributeValue( iconAttrib, CUnicodePart() );
const auto statusStr = elem.GetAttributeValue( statusAttrib, CUnicodePart() );
attackData.Status = AttackStatusToNameDict.FindEnum( statusStr, ACS_EnumCount );
}
CUnicodePart CUserAliasFile::GetUserBossName( CUnicodePart baseName ) const
{
const auto bossData = bossAliases.Get( baseName );
if( bossData == nullptr ) {
return baseName;
}
return bossData->Name.IsEmpty() ? baseName : bossData->Name;
}
TAttackCurrentStatus CUserAliasFile::GetUserBossStatus( CUnicodePart bossName, TAttackCurrentStatus baseStatus ) const
{
const auto bossData = bossAliases.Get( bossName );
if( bossData == nullptr ) {
return baseStatus;
}
return bossData->Status == ACS_EnumCount ? baseStatus : bossData->Status;
}
CUnicodePart CUserAliasFile::GetUserAttackName( CUnicodePart bossName, CUnicodePart baseAttackName, CUnicodePart attackDefaultName ) const
{
const auto bossData = bossAliases.Get( bossName );
if( bossData == nullptr ) {
return attackDefaultName;
}
const auto attackData = bossData->Attacks.Get( baseAttackName );
if( attackData == nullptr ) {
return attackDefaultName;
}
return attackData->Name.IsEmpty() ? attackDefaultName : attackData->Name;
}
CUnicodePart CUserAliasFile::GetUserIconPath( CUnicodePart bossName, CUnicodePart attackName, CUnicodePart defaultPath ) const
{
const auto bossData = bossAliases.Get( bossName );
if( bossData == nullptr ) {
return defaultPath;
}
const auto attackData = bossData->Attacks.Get( attackName );
if( attackData == nullptr ) {
return defaultPath;
}
return attackData->IconPath.IsEmpty() ? defaultPath : attackData->IconPath;
}
TAttackCurrentStatus CUserAliasFile::GetUserAttackStatus( CUnicodePart bossName, CUnicodePart attackName, TAttackCurrentStatus baseStatus ) const
{
const auto bossData = bossAliases.Get( bossName );
if( bossData == nullptr ) {
return baseStatus;
}
const auto attackData = bossData->Attacks.Get( attackName );
if( attackData == nullptr ) {
return baseStatus;
}
return attackData->Status == ACS_EnumCount ? baseStatus : attackData->Status;
}
void CUserAliasFile::SetUserBossName( CUnicodePart baseName, CUnicodePart newName )
{
auto bossData = bossAliases.Get( baseName );
if( bossData == nullptr ) {
bossData = &createBossData( baseName );
}
assert( bossData != nullptr );
bossData->Name = newName;
bossData->SrcElem.SetAttributeValue( nameAliasAttrib, newName );
}
void CUserAliasFile::SetUserBossStatus( CUnicodePart baseName, TAttackCurrentStatus newStatus )
{
auto bossData = bossAliases.Get( baseName );
if( bossData == nullptr ) {
bossData = &createBossData( baseName );
}
assert( bossData != nullptr );
bossData->Status = newStatus;
bossData->SrcElem.SetAttributeValue( statusAttrib, AttackStatusToNameDict[newStatus] );
}
void CUserAliasFile::SetUserAttackName( CUnicodePart bossName, CUnicodePart baseAttackName, CUnicodePart newAttackName )
{
auto bossData = bossAliases.Get( bossName );
if( bossData == nullptr ) {
bossData = &createBossData( bossName );
}
assert( bossData != nullptr );
auto attackData = bossData->Attacks.Get( baseAttackName );
if( attackData == nullptr ) {
attackData = &createAttackData( *bossData, baseAttackName );
}
assert( attackData != nullptr );
attackData->Name = newAttackName;
attackData->SrcElem.SetAttributeValue( nameAliasAttrib, newAttackName );
}
void CUserAliasFile::SetUserIconPath( CUnicodePart bossName, CUnicodePart baseAttackName, CUnicodePart newPath )
{
auto bossData = bossAliases.Get( bossName );
if( bossData == nullptr ) {
bossData = &createBossData( bossName );
}
assert( bossData != nullptr );
auto attackData = bossData->Attacks.Get( baseAttackName );
if( attackData == nullptr ) {
attackData = &createAttackData( *bossData, baseAttackName );
}
assert( attackData != nullptr );
attackData->IconPath = newPath;
attackData->SrcElem.SetAttributeValue( iconAttrib, newPath );
}
void CUserAliasFile::SetUserAttackStatus( CUnicodePart bossName, CUnicodePart baseAttackName, TAttackCurrentStatus newStatus )
{
auto bossData = bossAliases.Get( bossName );
if( bossData == nullptr ) {
bossData = &createBossData( bossName );
}
assert( bossData != nullptr );
auto attackData = bossData->Attacks.Get( baseAttackName );
if( attackData == nullptr ) {
attackData = &createAttackData( *bossData, baseAttackName );
}
assert( attackData != nullptr );
attackData->Status = newStatus;
attackData->SrcElem.SetAttributeValue( statusAttrib, AttackStatusToNameDict[newStatus] );
}
const CUnicodeView bossChildName = L"Boss";
CUserAliasFile::CBossAliasData& CUserAliasFile::createBossData( CUnicodePart name )
{
auto& root = aliasDoc.GetRoot();
auto& newElem = root.CreateChild( bossChildName );
newElem.AddAttribute( keyNameAttrib, name );
return bossAliases.Add( name, newElem ).Value();
}
const CUnicodeView attackChildName = L"Attack";
CUserAliasFile::CAttackAliasData& CUserAliasFile::createAttackData( CBossAliasData& parent, CUnicodePart name )
{
auto& parentElem = parent.SrcElem;
auto& newElem = parentElem.CreateChild( attackChildName );
newElem.AddAttribute( keyNameAttrib, name );
return parent.Attacks.Add( name, newElem ).Value();
}
//////////////////////////////////////////////////////////////////////////
} // namespace Fangame.
|
Oh, another NP completeness game. Point of fact, this is NOT 3D logic. It is 100% 2D and PLANAR! Because it is 3 adjacent faces of a cube that share a corner. It might as well be drawn on 3 concentric non-moving fixed-position concentric equilateral triangles (basically what you would get if you took those 3 faces sharing a corner and flattened it with a hammer by hitting it on the shared corner really hard), decidedly in 2D, it's just shown on a cube that rotates around as you move the cursor around to be annoying and inconvenient to the player. It's not so much a test of my ability to think in 3d, as a test of my ability to tolerate an arbitrary annoyance like a mosquito while I do something unrelated which is not as much as you make it out to be. And that problem is actually this - do I draw edges connecting two vertices UNDER or OVER other vertices, under the constraint that I can only draw so many lines (variable number) under and so many over each vertex.
How many frigging levels are there anyway? More than 15, I've been at this 15 minutes and they're getting annoying and there's no level select to show me if there is an end in sight, for all I know they're generated by the output of a linear feedback shift register and they go on forever, it's only some of the ones at the beginning that have goofy text that come up when you win them. |
/**
* Implementation of the belt connection interface.
*/
class BeltConnectionController extends BeltConnectionInterface implements
GattController.GattEventListener, BluetoothScanner.BluetoothScannerDelegate,
BeltCommunicationController.HandshakeCallback {
// Debug
@SuppressWarnings("unused")
private static final String DEBUG_TAG = "FeelSpace-Debug";
@SuppressWarnings("unused")
private static final boolean DEBUG = true;
/**
* The application context to establish connection.
*/
private @NonNull Context applicationContext;
/**
* The GATT controller.
*/
private @NonNull GattController gattController;
/**
* The Bluetooth scanner.
*/
private @NonNull BluetoothScanner scanner;
/**
* The communication controller.
*/
private @NonNull BeltCommunicationController communicationController;
/**
* Executor for timeout task and other delayed tasks.
*/
private @NonNull ScheduledThreadPoolExecutor executor;
/**
* Flag for pending connect.
*/
private boolean connectOnFirstBeltFound = false;
/**
* Constructor.
*
* @param applicationContext The application context.
* @throws IllegalArgumentException If the application context is <code>null</code>.
*/
public BeltConnectionController(Context applicationContext) throws IllegalArgumentException {
if (applicationContext == null) {
throw new IllegalArgumentException("Null context.");
}
this.applicationContext = applicationContext;
executor = new ScheduledThreadPoolExecutor(1);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
executor.setRemoveOnCancelPolicy(true);
}
gattController = new GattController(executor);
gattController.addGattEventListener(this);
communicationController = new BeltCommunicationController(gattController);
scanner = new BluetoothScanner(executor,this);
}
@Override
public void scan() {
synchronized (this) {
connectOnFirstBeltFound = false;
state = BeltConnectionState.STATE_SCANNING;
}
gattController.disconnect();
scanner.startScan();
notifyState();
}
@Override
public void connect(BluetoothDevice device) throws IllegalArgumentException {
synchronized (this) {
if (device == null) {
throw new IllegalArgumentException("Cannot connect with a null device.");
}
connectOnFirstBeltFound = false;
state = BeltConnectionState.STATE_CONNECTING;
}
// Stop scan before any connection attempt, even when not scanning
scanner.stopScan();
gattController.connect(applicationContext, device);
notifyState();
}
@Override
public void scanAndConnect() throws IllegalStateException {
synchronized (this) {
connectOnFirstBeltFound = true;
state = BeltConnectionState.STATE_SCANNING;
}
gattController.disconnect();
scanner.startScan();
notifyState();
}
@Override
public void stopScan() {
synchronized (this) {
if (state != BeltConnectionState.STATE_SCANNING) {
return;
}
connectOnFirstBeltFound = false;
state = BeltConnectionState.STATE_DISCONNECTED;
}
scanner.stopScan();
notifyState();
}
@Override
public void disconnect() {
synchronized (this) {
if (state == BeltConnectionState.STATE_SCANNING ||
state == BeltConnectionState.STATE_DISCONNECTED) {
return;
}
state = BeltConnectionState.STATE_DISCONNECTED;
connectOnFirstBeltFound = false;
}
gattController.disconnect();
notifyState();
}
@Override protected @NonNull ScheduledThreadPoolExecutor getExecutor() {
return executor;
}
@Override
public BeltCommandInterface getCommandInterface() {
return communicationController;
}
@Override
public BeltCommunicationInterface getCommunicationInterface() {
return communicationController;
}
@Override
public void onGattConnectionStateChange(GattConnectionState gattState) {
boolean handshake = false;
synchronized (this) {
switch (gattState) {
case GATT_DISCONNECTED:
switch (this.state) {
case STATE_DISCONNECTED:
// Ignore event
return;
case STATE_SCANNING:
// Ignore event
return;
case STATE_CONNECTING:
state = BeltConnectionState.STATE_DISCONNECTED;
break;
case STATE_RECONNECTING:
case STATE_HANDSHAKE:
case STATE_CONNECTED:
state = BeltConnectionState.STATE_DISCONNECTED;
break;
}
break;
case GATT_CONNECTING:
state = BeltConnectionState.STATE_CONNECTING;
break;
case GATT_DISCOVERING_SERVICES:
state = BeltConnectionState.STATE_DISCOVERING_SERVICES;
break;
case GATT_CONNECTED:
// Continue with handshake
state = BeltConnectionState.STATE_HANDSHAKE;
handshake = true;
break;
case GATT_RECONNECTING:
state = BeltConnectionState.STATE_RECONNECTING;
break;
}
}
if (handshake) {
communicationController.startHandshake(this);
}
notifyState();
}
@Override
public void onGattConnectionFailed() {
synchronized (this) {
if (state == BeltConnectionState.STATE_SCANNING ||
state == BeltConnectionState.STATE_DISCONNECTED) {
// Ignore GATT connection event
return;
}
state = BeltConnectionState.STATE_DISCONNECTED;
}
notifyConnectionFailed();
notifyState();
}
@Override
public void onGattConnectionLost() {
synchronized (this) {
if (state == BeltConnectionState.STATE_SCANNING ||
state == BeltConnectionState.STATE_DISCONNECTED) {
// Ignore GATT connection event
return;
}
state = BeltConnectionState.STATE_DISCONNECTED;
}
notifyConnectionLost();
notifyState();
}
@Override
public void onCharacteristicNotificationSet(@NonNull BluetoothGattCharacteristic characteristic,
boolean enable, boolean success) {
// Nothing to do
}
@Override
public void onCharacteristicRead(@NonNull BluetoothGattCharacteristic characteristic,
@Nullable byte[] value, boolean success) {
// Nothing to do
}
@Override
public void onCharacteristicWrite(@NonNull BluetoothGattCharacteristic characteristic,
@Nullable byte[] value, boolean success) {
// Nothing to do
}
@Override
public void onCharacteristicChanged(@Nullable BluetoothGattCharacteristic characteristic,
@Nullable byte[] value) {
// Nothing to do
}
@Override
public void onRequestCompleted(@Nullable BluetoothGattCharacteristic notifiedCharacteristic,
@Nullable byte[] notifiedValue) {
// Nothing to do
}
@Override
public void onScanStarted() {
if (DEBUG) Log.i(DEBUG_TAG, "BeltConnectionController: Scan started.");
synchronized (this) {
if (state == BeltConnectionState.STATE_SCANNING) {
return;
}
state = BeltConnectionState.STATE_SCANNING;
}
notifyState();
}
@Override
public void onBeltFound(@NonNull BluetoothDevice device) {
boolean connect = false;
synchronized (this) {
if (state != BeltConnectionState.STATE_SCANNING) {
// Ignore if not scanning
return;
}
if (connectOnFirstBeltFound) {
connectOnFirstBeltFound = false;
connect = true;
}
}
if (connect) {
try {
connect(device);
} catch (Exception e) {
// Should not happen
}
} else {
notifyBeltFound(device);
}
}
@Override
public void onScanFinished() {
if (DEBUG) Log.i(DEBUG_TAG, "BeltConnectionController: Scan finished.");
boolean noBeltFound = false;
synchronized (this) {
if (state != BeltConnectionState.STATE_SCANNING) {
// Ignore if not scanning
return;
}
if (connectOnFirstBeltFound) {
noBeltFound = true;
connectOnFirstBeltFound = false;
}
state = BeltConnectionState.STATE_DISCONNECTED;
}
if (noBeltFound) {
notifyNoBeltFound();
}
notifyState();
}
@Override
public void onScanFailed() {
if (DEBUG) Log.e(DEBUG_TAG, "BeltConnectionController: Scan failed.");
synchronized (this) {
if (state != BeltConnectionState.STATE_SCANNING) {
// Ignore if not scanning
return;
}
connectOnFirstBeltFound = false;
state = BeltConnectionState.STATE_DISCONNECTED;
}
notifyScanFailed();
}
@Override
public void onHandshakeCompleted(boolean success) {
BluetoothDevice device;
synchronized (this) {
if (state != BeltConnectionState.STATE_HANDSHAKE) {
// Ignore if not handshake ongoing
return;
}
device = gattController.getDevice();
if (device == null) {
// Should not happen
state = BeltConnectionState.STATE_DISCONNECTED;
} else if (success) {
saveDeviceAddress(applicationContext, gattController.getDevice());
state = BeltConnectionState.STATE_CONNECTED;
}
}
if (device == null) {
// Should not happen
notifyConnectionFailed();
notifyState();
} else if (success) {
notifyState();
} else {
gattController.reconnect();
}
}
} |
<reponame>xbsoftware/node-wfs-s3
import * as policies from "./policy";
import {Operation} from "./types";
import S3 from "./S3";
export {S3, policies, Operation}; |
/**
* <p>Adds 1 or more dependencies to the given dependent node.</p>
*
* <p>If the dependent nodeContent or any of the given dependencies don't exist
* as Nodes in the graph, new Nodes will automatically be created and added
* first.</p>
*
* <p>Because this is a strongly connected digraph,
* each new dependency will represent 2 new snapshotEdges (or connections)
* between 2 Nodes (dependent and dependency). For example, if A depends on B
* then B will have an outgoing connection to A as well as A will have an
* incoming connection from B.</p>
*
* @param dependent The node that depends on each node provided in the
* dependencies collection
* @param dependencies All Nodes that dependent depends on
* @throws NullPointerException if any of the arguments or any of the elements in "dependencies" is null
*/
public void addDependencies(final T dependent, final Collection<T> dependencies)
{
ensureContentNotNull(dependent, dependencies);
mutateSafely(() -> {
Node<T> to = nodes.get(dependent);
boolean modified = false;
boolean hasIncoming = false;
if (to == null) {
to = addNode(dependent);
modified = true;
} else {
hasIncoming = to.hasIncoming();
if (to.hasOutgoing() && !hasIncoming && !dependencies.isEmpty()) {
final boolean added = bidirectionalNodes.add(to);
assert added : dependent + " has outgoing but not incoming connections, hence it should NOT have been found in internal snapshotBidiNodes set.";
}
}
for (final T dep : dependencies) {
Node<T> from = nodes.get(dep);
if (from == null) {
from = addNode(dep);
modified = true;
} else if (!from.hasOutgoing()) {
final boolean removed = dependentFree.remove(from);
assert removed : dep + " has no outgoing connections, hence it should have been found in internal snapshotDependentFree set.";
}
if (!hasIncoming) {
final boolean removed = vertices.remove(to);
assert removed : dependent + " has no incoming connections, hence it should have been found in internal sortedVertices set.";
hasIncoming = true;
}
final boolean connectedOut = from.addOutgoing(to);
final boolean connectedIn = to.addIncoming(from);
if (connectedOut || connectedIn) {
modified = true;
}
if (connectedOut && connectedIn) {
edges.incrementAndGet();
} else {
assert connectedOut == connectedIn : String.format(
"Found a previously existing illegal weak connection (one-way only) between %s and %s: %s -%s %s",
dependent, dep, dependent, (connectedOut ? "<" : ">"), dep);
}
if (from.hasIncoming()) {
bidirectionalNodes.add(from);
}
}
if (modified) {
markDirty();
}
});
} |
/**
* Helper AsyncTask to access the call logs database asynchronously since database operations
* can take a long time depending on the system's load. Since it extends AsyncTask, it uses
* its own thread pool.
*/
private class LogCallAsyncTask extends AsyncTask<AddCallArgs, Void, Uri[]> {
private LogCallCompletedListener[] mListeners;
@Override
protected Uri[] doInBackground(AddCallArgs... callList) {
int count = callList.length;
Uri[] result = new Uri[count];
mListeners = new LogCallCompletedListener[count];
for (int i = 0; i < count; i++) {
AddCallArgs c = callList[i];
mListeners[i] = c.logCallCompletedListener;
try {
// May block.
result[i] = addCall(c);
} catch (Exception e) {
// This is very rare but may happen in legitimate cases.
// E.g. If the phone is encrypted and thus write request fails, it may cause
// some kind of Exception (right now it is IllegalArgumentException, but this
// might change).
//
// We don't want to crash the whole process just because of that, so just log
// it instead.
Log.e(TAG, e, "Exception raised during adding CallLog entry.");
result[i] = null;
}
}
return result;
}
private Uri addCall(AddCallArgs c) {
PhoneAccount phoneAccount = mPhoneAccountRegistrar
.getPhoneAccountUnchecked(c.accountHandle);
if (phoneAccount != null &&
phoneAccount.hasCapabilities(PhoneAccount.CAPABILITY_MULTI_USER)) {
if (c.initiatingUser != null &&
UserUtil.isManagedProfile(mContext, c.initiatingUser)) {
return addCall(c, c.initiatingUser);
} else {
return addCall(c, null);
}
} else {
return addCall(c, c.accountHandle == null ? null : c.accountHandle.getUserHandle());
}
}
/**
* Insert the call to a specific user or all users except managed profile.
* @param c context
* @param userToBeInserted user handle of user that the call going be inserted to. null
* if insert to all users except managed profile.
*/
private Uri addCall(AddCallArgs c, UserHandle userToBeInserted) {
return Calls.addCall(c.callerInfo, c.context, c.number, c.postDialDigits, c.viaNumber,
c.presentation, c.callType, c.features, c.accountHandle, c.timestamp,
c.durationInSec, c.dataUsage, userToBeInserted == null,
userToBeInserted);
}
@Override
protected void onPostExecute(Uri[] result) {
for (int i = 0; i < result.length; i++) {
Uri uri = result[i];
/*
Performs a simple sanity check to make sure the call was written in the database.
Typically there is only one result per call so it is easy to identify which one
failed.
*/
if (uri == null) {
Log.w(TAG, "Failed to write call to the log.");
}
if (mListeners[i] != null) {
mListeners[i].onLogCompleted(uri);
}
}
}
} |
WASHINGTON – The Justice Department’s Civil Rights Division today filed an amicus brief in support of a mosque in Murfreesboro, Tenn., that has met with community opposition and a lawsuit.
The brief was filed in a state court action in which a group of Murfreesboro landowners are attempting to stop construction of the mosque. Rutherford County, Tenn., is the defendant in the civil case, and had granted permission for the construction of the mosque. The county is opposing the landowners’ attempt to stop construction.
The department’s brief argues that Islam is a religion entitled to protection under the First Amendment to the U.S. Constitution, and points out that, “consistent among all three branches of government, the United States has recognized Islam as a major world religion.” It also argues that mosques are places of religious worship, and that Rutherford County properly determined that it must treat the mosque project as it would other proposals for construction of places of worship.
“A mosque is quite plainly a place of worship, and the county rightly recognized that it had an obligation to treat mosques the same as churches, synagogue, or any other religious assemblies. This is not only common sense; it is required by federal law. The Justice Department is committed to protecting rights of Americans of all faiths to build places of worship and to worship in peace,” said Thomas E. Perez, Assistant Attorney General for Civil Rights.
“Although this is presently a local matter, the U. S. Department of Justice and the U.S. Attorney’s Office for the Middle District of Tennessee vigorously support the decision of the Rutherford County Regional Planning Commission and the Board of Commissioners in approving the site plans and authorizing construction of a mosque and Islamic center,” said U.S. Attorney Jerry E. Martin. “To suggest that Islam is not a religion is quite simply ridiculous. Each branch of the federal government has independently recognized Islam as one of the major religions of the world. As pointed out in our brief filed with the court, had the Rutherford County Government adopted the position the plaintiffs set forth, it would likely be in violation of the Religious Land Use and Institutionalized Persons Act, enacted by the U.S. Congress in 2000.”
The department’s brief comes shortly after the celebration of the 10th anniversary of the Religious Land Use or Institutionalized Persons act (RLUIPA) on Sept. 22, 2010. RLUIPA protects the rights of religious assemblies and institutions to be free from discrimination in the application of zoning and land-use laws. In the 10 years since its passage, RLUIPA has helped secure the ability of thousands of individuals and institutions to practice their faiths freely and without discrimination.
In the past 10 years, the department has opened 51 RLUIPA investigations, filed seven lawsuits under RLUIPA’s land-use provisions and participated in 40 privately filed lawsuits. More information about the Civil Rights Division and the laws it enforces is available at www.usdoj.gov/crt. Individuals who believe that they have been victims of discrimination can call the Discrimination Tip Line at 1-800-896-7743, or e-mail the Justice Department at [email protected]. |
def cleaner(s):
initial_add = ''
final_list = []
last_add = ''
if s[0] == '0':
if '1' not in s:
return s
initial_add = s[: s.index('1')]
s = s[s.index('1') :]
if s[-1] == '1':
reverse = s[:: -1]
if '0' not in s:
return initial_add + s
last_add = reverse[: reverse.index('0')]
s = s[: len(s) - 1 - reverse.index('0')]
"""i = 0
while i < len(s):
char = s[i]
final_list.append(char)
while s[i] == char:
i += 1
if i == len(s):
break
i += 1
if '1' in final_list and '0' in final_list:
final_list = ['0']
"""
short_string = initial_add + '0' + last_add
return(short_string)
if __name__ == "__main__":
test_cases = int(input())
while test_cases:
n = int(input())
print(cleaner(input()))
test_cases -= 1 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.