content
stringlengths
10
4.9M
def _on_register(self): pass
import numpy as np from tensorcomlib import base from tensorcomlib import tensor from sklearn.utils.extmath import randomized_svd from tensorcomlib.MatrixSVD import SVD from matplotlib.pylab import plt import time #hosvd def hosvd(X): U = [None for _ in range(X.ndims())] dims = X.ndims() S = X for d in range(dims): C = base.unfold(X,d) U1,S1,V1 = np.linalg.svd(C,full_matrices=False) S = base.tensor_times_mat(S, U1.T,d) U[d] = U1 core = S return U,core #randomized_hosvd def randomized_hosvd(X): U = [None for _ in range(X.ndims())] dims = X.ndims() S = X for d in range(dims): C = base.unfold(X,d) U1, S1, V1 = randomized_svd(C, n_components=3, n_oversamples=10, n_iter='auto', power_iteration_normalizer='auto', transpose='auto', flip_sign=True, random_state=42) S = base.tensor_times_mat(S, U1.T, d) U[d] = U1 core = S return U, core #TruncatedHosvd def TruncatedHosvd(X,eps): U = [None for _ in range(X.ndims())] dims = X.ndims() S = X R = [None for _ in range(X.ndims())] for d in range(dims): C = base.unfold(X,d) U1,S1,r = SVD.TruncatedSvd(C,eps_svd=eps) R[d] = r U[d] = U1 S = base.tensor_times_mat(S,U[d].T,d) return U,S,R,eps #PartialHosvd def PartialHosvd(X,ranks): U = [None for _ in range(X.ndims())] dims = X.ndims() S = X for d in range(dims): C = base.unfold(X, d) U1,_,_= SVD.PartialSvd(C,ranks[d]) U[d] = U1 S = base.tensor_times_mat(S, U[d].T, d) return U, S #hooi def hooi(X,maxiter=1000,init='svd',eps = 1e-11,tol=1e-10, plot = True): time0 = time.time() dims = X.ndims() modelist = list(range(dims)) if init == 'svd': U,core,ranks,eps_svd = TruncatedHosvd(X,eps= eps) print('TruncatedHosvd Ranks:\t'+str(ranks)) data = base.tensor_multi_times_mat(core, U, modelist=modelist, transpose=False) errorsvd = base.tennorm(base.tensor_sub(data, X)) print('---------------------------->>>>>>') print('TruncatedHosvd Init:') print("Original tensor:",X.data.reshape(1024)[1:10]) print("TruncatedHosvd tensor:",data.data.reshape(1024)[1:10]) print("Truncated error:",errorsvd) else: U,core = randomized_hosvd(X) error_X = [] error_iter = [] normx = base.tennorm(X) S1 = X for iteration in range(maxiter): Uk = [None for _ in range(dims)] for i in range(dims): U1 = U.copy() U1.pop(i) L = list(range(dims)) L.pop(i) Y = base.tensor_multi_times_mat(X,U1,modelist=L,transpose=True) C = base.unfold(Y,i) Uk[i],_,_ = SVD.PartialSvd(C,ranks[i]) core = base.tensor_multi_times_mat(X,Uk,list(range(dims)),transpose=True) U = Uk S2 = base.tensor_multi_times_mat(core,Uk,list(range(dims)),transpose=False) error0 = base.tennorm(base.tensor_sub(S2,S1)) S1 = S2 error_iter.append(error0) error1 = base.tennorm(base.tensor_sub(X, S2)) error_X.append(error1) if error0<tol: print('---------------------------->>>>>>') print('HOOI:') print('Iteration:' + str(iteration) + '\t\t' + 'Error_iter:' + str(error0)+'\t\t'+'Error_X:' + str(error1)) print("Cost time:",time.time()-time0) break if plot: plt.plot(error_X) plt.title('The norm difference between the reduction tensor and the original tensor') plt.xlabel('Iteration') plt.ylabel('Norm difference') plt.show() plt.plot(error_iter) plt.title('The difference between the norm of restoring tensors in two consecutive iterations') plt.xlabel('Iteration') plt.ylabel('Norm difference') plt.show() return U,core
def _hierarchize(fdict): hdict = {} for key, val in fdict.iteritems(): _insert_to_hdict(hdict, key, val) return hdict
<reponame>henrywarhurst/matrix // Copyright (c) 2008-2016 <NAME> and <NAME>, Inc. // Distributed under the Boost Software License, Version 1.0. (See accompanying // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) #ifndef UUID_185557CE385511E780ACD7B781262D2E #define UUID_185557CE385511E780ACD7B781262D2E #include <boost/qvm/gen/vec_assign2.hpp> #include <boost/qvm/gen/vec_assign3.hpp> #include <boost/qvm/gen/vec_assign4.hpp> namespace boost { namespace qvm { namespace qvm_detail { template <int D> struct assign_vv_defined { static bool const value = false; }; template <int I, int N> struct copy_vector_elements { template <class A, class B> static void f(A &a, B const &b) { vec_traits<A>::template write_element<I>(a) = vec_traits<B>::template read_element<I>(b); copy_vector_elements<I + 1, N>::f(a, b); } }; template <int N> struct copy_vector_elements<N, N> { template <class A, class B> static void f(A &, B const &) {} }; } // namespace qvm_detail template <class A, class B> inline typename boost::enable_if_c< is_vec<A>::value && is_vec<B>::value && vec_traits<A>::dim == vec_traits<B>::dim && !qvm_detail::assign_vv_defined<vec_traits<A>::dim>::value, A &>::type assign(A &a, B const &b) { qvm_detail::copy_vector_elements<0, vec_traits<A>::dim>::f(a, b); return a; } } // namespace qvm } // namespace boost #endif
Spatial Neglect: Hypothetical Mechanisms of Disturbed Interhemispheric Crosstalk for Orientation Schematic drawings are presented of the major anatomical structures involved, along with their functional excitatory and inhibitory connections as the basis of a hypothetical model of visuospatial neglect. It is assumed that multisensory integration centers for attention and orientation (MSO) are represented in the temporo‐parietal cortex of both hemispheres, with the dominant MSO being in the right hemisphere (nondominant hemisphere in right‐handedness). A lesion of the dominant MSO will cause a left‐sided neglect due to a lack of ipsilateral activation of the visual cortex, which is further enhanced by increased inhibition from the contralateral visual cortex. The nondominant MSO in the left hemisphere might be involved in the manifestation of the less frequent and more transient right‐sided neglect and in the plastic mechanisms of gradual recovery from a left‐sided neglect or extinction.
Electro-Actuation System Strategy for a Morphing Flap Within the framework of the Clean Sky-JTI (Joint Technology Initiative) project, the design and technological demonstration of a novel wing flap architecture were addressed. Research activities were carried out to substantiate the feasibility of morphing concepts enabling flap camber variation in compliance with the demanding safety requirements applicable to the next generation green regional aircraft. The driving motivation for the investigation on such a technology was found in the opportunity to replace a conventional double slotted flap with a single slotted camber-morphing flap assuring similar high lift performances—in terms of maximum attainable lift coefficient and stall angle—while lowering emitted noise and system complexity. The actuation and control logics aimed at preserving prescribed geometries of the device under variable load conditions are numerically and experimentally investigated with reference to an ‘iron-bird’ demonstrator. The actuation concept is based on load-bearing actuators acting on morphing ribs, directly and individually. The adopted un-shafted distributed electromechanical system arrangement uses brushless actuators, each rated for the torque of a single adaptive rib of the morphing structure. An encoder-based distributed sensor system generates the information for appropriate control-loop and, at the same time, monitors possible failures in the actuation mechanism. Further activities were then discussed in order to increase the TRL (Technology Readiness Level) of the validated architecture.
// get the text state of the text objects inside paragraph by iterating content kid objects bool GetParagraphTextState(PdsStructElement* struct_elem, PdfTextState* ts) { for (int i = 0; i < struct_elem->GetNumChildren(); i++) { if (struct_elem->GetChildType(i) == kPdsStructChildPageContent) { PdfDoc* doc = struct_elem->GetStructTree()->GetDoc(); auto page_deleter = [](PdfPage* page) { page->Release(); }; std::unique_ptr<PdfPage, decltype(page_deleter)> page(doc->AcquirePage(struct_elem->GetChildPageNumber(i)), page_deleter); int mcid = struct_elem->GetChildMcid(i); auto content = page->GetContent(); for (int j = 0; j < content->GetNumObjects(); j++) { if (GetPageObjectTextState(content->GetObject(j), mcid, ts)) return true; } } } return false; }
/** * Does the given media list contain any media present in this list? * <p> * If query list A matches B, then if a medium matches B it will also match A. * The opposite may not be true. * * @param otherMedia the other media list to test. * @return <code>true</code> if the other media contains any media which applies * to this list, <code>false</code> otherwise. */ @Override public boolean matches(MediaQueryList otherMedia) { if (otherMedia == null) { return !isNotAllMedia(); } if (otherMedia.isNotAllMedia()) { return false; } if (isAllMedia()) { return true; } if (otherMedia.isAllMedia()) { return false; } NSACMediaQueryList otherqlist; if (otherMedia.getClass() == NSACMediaQueryList.class) { otherqlist = (NSACMediaQueryList) otherMedia; } else { return oldMatch(otherMedia); } HashSet<NSACMediaQuery> otherList = new HashSet<NSACMediaQuery>(otherqlist.queryList.size()); otherList.addAll(otherqlist.queryList); Iterator<NSACMediaQuery> it = queryList.iterator(); while (it.hasNext()) { NSACMediaQuery query = it.next(); Iterator<NSACMediaQuery> otherIt = otherList.iterator(); while (otherIt.hasNext()) { NSACMediaQuery othermq = otherIt.next(); if (query.matches(othermq)) { otherIt.remove(); } } if (otherList.isEmpty()) { return true; } } return false; }
// Process creates and deletes DNS records. func (a Route53) Process(ctx context.Context, actions []dnser.Action) error { inputs := a.changeSetInputs(actions) g, ctx := errgroup.WithContext(ctx) for _, input := range inputs { input := input g.Go(func() error { _, err := a.client.ChangeResourceRecordSetsWithContext(ctx, input) return err }) } return g.Wait() }
AUTOZINE TECHNICAL SCHOOL Compression Variable Compression - Saab SVC Variable is good. From valve timing, valve lift, intake manifold, exhaust, ignition, fuel injection, turbocharging, cooling to lubrication, many things on today's engines can be variable. However, one thing is still fixed. That is compression ratio. The idea of variable compression has always been fascinating. When I was still a teenager I dreamed of a variable compression engine utilizing variable-length connecting rods. That is infeasible, of course. Why do we want variable compression? Because when you turbocharge an engine you need to lower the compression ratio to avoid overheating and overstressing the cylinder head, otherwise it may cause knocking or even damages. When the engine runs off-boost, you get a weak output due to that lower compression. Suppose if we can vary the compression, using a higher ratio before the turbo gets into operation and a lower ratio under boost, we will get a perfect turbocharged engine. In 2000, Saab announced a variable compression concept dubbed SVC (Saab Variable Compression). It implemented VC by an innovative and interesting approach - slidable cylinder head and cylinder unit. Let’s see these pictures first: Left: high compression ratio; Right: low compression ratio As seen, the SVC engine have a cylinder head with integrated cylinders - which is known as monohead. The monohead is pivoted at the crankcase and its slope can be adjusted slightly (by up to 4 degrees) in relation to the engine block, pistons, crankcase etc. by means of a hydraulic actuator, therefore the volume of the combustion chambers can be varied slightly. When the piston is at top dead center, a small change of volume can lead to big change of compression ratio, ranging from 8:1 to 14:1. SVC was cleverer than any previous attempts for variable compression as it involved no additional moving parts at the critical combustion chambers or any reciprocating components, so it was relatively simple, durable and free of leakage. The monohead was self-contained, that means it had its own cooling system, with coolant passages across the head and the cylinder wall. There was a rubber sealing between the monohead and engine block. The VC allowed the Saab engine to run on an unusually high boost pressure, i.e. 1.8 bar (above atmospheric pressure), or about twice the boost pressure of 9-3 Viggen. It was so high that the turbochargers of its days could not provide. Therefore it employed a supercharger instead. The VC was adjustable continuously according to needs - depending on rev, load, temperature, fuel used etc., all determined by the engine management system. Therefore power and fuel consumption (hence emission) could be optimized at any conditions. The SVC demonstrated in 2000 was a 1.6-liter 5-cylinder with 4-valve head. Max output was claimed to be 225 hp and 224 lbft, while fuel consumption was 30% lower than comparable conventional engines. Moreover, the variable compression allowed the engine to drink different Octane fuels easily, so it could be sold worldwide without needing specific tuning. Unfortunately, the SVC never saw the light of production, probably due to its complexity and reliability issue. Variable Compression - Nissan VC-T Unexpectedly, Nissan becomes the first manufacturer to put variable compression ratio engines into production. The so-called VC-T (Variable Compression - Turbo) technology is to be applied to a 2.0-liter four-cylinder turbo and slated to reach the market from early 2018. The first application will be on Infiniti QX50 SUV. The mechanism Nissan used is very different from Saab's. It varies the length of stroke by using a complicated "multi-link" system in place of conventional connecting rods. As pictured above, the multi-link system consists of an upper link, a diamond-shape center link and a lower link. The diamond-shape center link is mounted on the crankshaft journal, but it is free to swivel about the crankshaft journal. When it swivels clockwise for a few degrees, it will push up the piston thus increase the compression ratio. Vice versa, it will decrease the compression ratio when it swivels anti-clockwise for a few degrees. The angle of the diamond-shape center link is controlled by the lower link. An electric motor (called "harmonic drive") rotates the control shaft through an actuator arm. The control shaft connects to the lower link of each cylinder through an eccentric cam. Thanks to the latter, when the control shaft rotates, the lower link can move up or down, adjusting the angle of the diamond-shape center link hence the compression ratio. If you still don't understand, you may watch Nissan's video The angle of the diamond-shape center link is controlled by the lower link. An electric motor (called "harmonic drive") rotates the control shaft through an actuator arm. The control shaft connects to the lower link of each cylinder through an eccentric cam. Thanks to the latter, when the control shaft rotates, the lower link can move up or down, adjusting the angle of the diamond-shape center link hence the compression ratio. If you still don't understand, you may watch Nissan's video HERE Another benefit of the VC-T technology is that its multi-link geometry keeps the con-rods (upper links) more upright throughout the combustion cycle compared with conventional con-rods. This means less side force is generated, resulting in less vibration and less friction between the pistons and cylinder walls. The former saves the need for balancer shafts, while the latter enhances efficiency further. On the downside, the multi-link system is quite cumbersome, adding considerable weight, inertia and friction. Whether these drawbacks can be offset by the aforementioned benefits is yet to be seen. However, it is certain to be more costly to build, thanks to the additional parts, especially bearings. Moreover, the mechanism is not compatible with V-engines, so its applications will be limited to high-end 4-cylinder engines that can swallow the additional costs. No wonder Nissan wants the 2.0-liter VC-T engine to replace its long-serving 3.5-liter naturally aspirated V6. Rated at 268 hp and 288 lbft of torque, its output is sufficient to do so, while fuel consumption is estimated to be 27% lower than the V6. Nevertheless, compared with some of the best 2-liter turbo engines currently on the market, it has yet to show any advantages. The VC-T can vary compression ratio between 8.0:1 and 14.0:1. The lower compression ratio is used at high-power mode because it can work with higher turbo boost pressure without causing knock. On the contrary, 14.0:1 compression is used when highest fuel efficiency is demanded. Note that this is only the theoretical compression ratio. In fact, for highest efficiency the engine uses VVT to delay the intake valve closure and implement Miller cycle combustion. In other words, the variable compression feature enables a higher, 14:1 expansion ratio to maximize the benefits of Miller cycle combustion.Another benefit of the VC-T technology is that its multi-link geometry keeps the con-rods (upper links) more upright throughout the combustion cycle compared with conventional con-rods. This means less side force is generated, resulting in less vibration and less friction between the pistons and cylinder walls. The former saves the need for balancer shafts, while the latter enhances efficiency further.On the downside, the multi-link system is quite cumbersome, adding considerable weight, inertia and friction. Whether these drawbacks can be offset by the aforementioned benefits is yet to be seen. However, it is certain to be more costly to build, thanks to the additional parts, especially bearings. Moreover, the mechanism is not compatible with V-engines, so its applications will be limited to high-end 4-cylinder engines that can swallow the additional costs. No wonder Nissan wants the 2.0-liter VC-T engine to replace its long-serving 3.5-liter naturally aspirated V6. Rated at 268 hp and 288 lbft of torque, its output is sufficient to do so, while fuel consumption is estimated to be 27% lower than the V6. Nevertheless, compared with some of the best 2-liter turbo engines currently on the market, it has yet to show any advantages. Atkinson Cycle Engine Conventional Otto cycle engines have 4 stages in each combustion cycle – intake, compression, expansion (explosion) and exhaust, each of them takes equal time and piston displacement. Atkinson cycle engines are different. They employ slightly shorter intake stroke than expansion stroke. In other words, the compression ratio is actually smaller than expansion ratio. What is the benefit of this arrangement? The answer is higher fuel efficiency. If we analyse the pressure-volume curve of combustion cycle, you will see: For an Otto cycle engine (yellow loop), the piston starts compressing air from bottom dead center (point 1), chamber volume reduces and pressure increases until it reaches the top dead center (point 2). Then ignition takes place and the fuel-air mixture explodes, pressure surges and peaks immediately (point 3). This pushes the piston downwards, expanding the volume and decreasing pressure until the piston reaches bottom dead center (point 4). At this moment, the exhaust valves open. As the pressure of hot exhaust gas is higher than the outside world, it rushes out to the exhaust manifold quickly thus the pressure drops suddenly to atmospheric pressure (point 1). The exhaust and intake strokes are not shown here as they do not contribute to power generation. The work done (energy) produced by the combustion is the yellow area. Now for an Atkinson cycle engine, the expansion phase is allowed to run further (the orange part), preferably until the gas pressure drops to atmospheric pressure (point 4A). This means the thermal and kinetic energy normally lost in Otto cycle through exhaust can be utilized by Atkinson cycle to produce power. This additional energy is the orange area. Atkinson cycle engine is not a new idea. In fact, it was invented by British engineer James Atkinson in 1882. His original design was very complicated, using not only crankshaft but also an auxiliary shaft and additional linkages to allow the pistons travel shorter distance in compression stroke than expansion stroke. It was quite brilliant, but blame to this complexity and resultant extra size and weight, it was never made into commercial use on cars. Application on hybrid cars More recently, car makers resurrected the Atkinson concept in a bid to achieve superior fuel economy. In 1997, Toyota introduced a 1.5-liter Atkinson engine on its Atkinson cycle engine is not a new idea. In fact, it was invented by British engineer James Atkinson in 1882. His original design was very complicated, using not only crankshaft but also an auxiliary shaft and additional linkages to allow the pistons travel shorter distance in compression stroke than expansion stroke. It was quite brilliant, but blame to this complexity and resultant extra size and weight, it was never made into commercial use on cars.More recently, car makers resurrected the Atkinson concept in a bid to achieve superior fuel economy. In 1997, Toyota introduced a 1.5-liter Atkinson engine on its first Prius Since then all of the company's hybrid cars also adopt this kind of engines. They implement Atkinson cycle by delaying the closure of intake valves such that some fresh air is pumped back to the intake manifold in the early phase of compression stroke. This reduces the effective displacement and compression ratio. While the quoted compression ratio (i.e. geometric compression ratio) remains at slightly more than 10:1, the effective compression ratio is closer to 8:1. Except valve timing, modern Atkinson engines are exactly the same as Otto engines, thus adds no extra cost and weight. On the downside, Atkinson-cycle engines are less powerful than their Otto-cycle counterparts of the same size and weight. This is due to several reasons: 1) Smaller effective capacity means less air and fuel involve the combustion thus less power is generated; 2) Lower compression ratio leads to less power; 3) Lower exhaust gas pressure means the exhaust gas escapes slower thus is not benefitial to scavenging effect and revvability. 4) Longer expansion stroke works against high rev. However, the lack of power is less significant on hybrid cars as fuel efficiency is placed at first priority. Also, hybrid cars can compensate the loss of power with electric motors. Otto-Atkinson cycle engines Thanks to stringent requirements for fuel economy, in recent years car makers also started using Atkinson principles on conventional cars. However, they use variable valve timing to adjust the closure timing of intake valves, so that the engines can run Atkinson cycle at idle and light load or switch to Otto cycle when more power is demanded, satisfying the best of both worlds. In 2012, Mazda was the first to put such Otto-Atkinson cycle engines into production. Its innovative 2.0-liter On the downside, Atkinson-cycle engines are less powerful than their Otto-cycle counterparts of the same size and weight. This is due to several reasons: 1) Smaller effective capacity means less air and fuel involve the combustion thus less power is generated; 2) Lower compression ratio leads to less power; 3) Lower exhaust gas pressure means the exhaust gas escapes slower thus is not benefitial to scavenging effect and revvability. 4) Longer expansion stroke works against high rev. However, the lack of power is less significant on hybrid cars as fuel efficiency is placed at first priority. Also, hybrid cars can compensate the loss of power with electric motors.Thanks to stringent requirements for fuel economy, in recent years car makers also started using Atkinson principles on conventional cars. However, they use variable valve timing to adjust the closure timing of intake valves, so that the engines can run Atkinson cycle at idle and light load or switch to Otto cycle when more power is demanded, satisfying the best of both worlds. In 2012, Mazda was the first to put such Otto-Atkinson cycle engines into production. Its innovative 2.0-liter Skyactiv-G employed an unusually high compression ratio of 14.0:1 (or 13:1 for US market due to 87 Octane fuel). As a result, the effective compression ratio at Atkinson mode remained relatively high, minimizing the loss of power. In 2014, Toyota followed the same Otto-Atkinson concept with its 2.0-liter VVT-iW (on Camry), 2.0 Turbo (on Lexus NX200t) and 5.0 V8 (Lexus RC F), although their compression ratios were not as high as Mazda's. The long-abandoned invention of James Atkinson finally takes off, albeit in a much different form. Miller Cycle Engine Mazda's 2.3 litres Miller Cycle engine was the only one of its kind. Although it achieved 10-15 % fuel consumption reduction over comparable coventional engines, high production cost prevented it from being popular. Invented by American Ralph Miller in the 1940s, Miller cycle was a variant of Atkinson cycle. As explained above, Atkinson cycle engines use longer expansion stroke than compression stroke to capture the residual energy that would be otherwise lost in exhaust. Therefore it returns higher fuel efficiency than conventional Otto cycle engines. However, a big drawback is lack of power compared with Otto cycle engines of the same size and construction. To address this problem, Miller cycle adds a supercharger to boost the air pressure so to restore 100% effective capacity. Of course, in order to return higher fuel efficiency, the supercharger needs to be efficient, wasting less energy than the saving gained by Miller cycle. Valve timing of Mazda's Miller Cycle V6. Its inlet valves close at 47 degrees after BDC (bottom dead center, i.e. the lowest position of piston during a cycle). This equals to 20% of the height of stroke. In other words, during the first 20% of the compression stroke, the intake valves remain open, thus air flows out without compression. The actual compression takes place at the remaining 80% stroke, thus the effective engine capacity is only 80% of the geometric capacity. As a result, compression ratio is decreased from 10:1 to slightly under 8:1. Mazda introduced a 2.3-liter Miller cycle V6 to its Millenia / Eunos 800 in 1994. It was claimed to consume 13% less fuel than Mazda's 3-liter conventional V6, while generated more power and a better torque curve. Nevertheless, since then neither Mazda nor other car makers followed its footprints. Why? Think about it: although it was claimed to be a 2.3-litre engine, it was actually constructed like a 3-litre engine, no matter in size, construction and materials. Then the supercharger and intercoolers added extra cost and weight. Considering the slim advantage in fuel economy, a smaller super or turbocharged engine could easily better it. High Compression Engine - Mazda Skyactiv-G Higher compression ratio brings higher combustion efficiency hence power. That's why automotive engineers want to raise compression as high as possible. However, a compression too high will lead to early explosion of fuel-air mixture, or what we call "knocking". Knocking is bad to engines, not only because it causes NVH but also it reduces output. When I started reading about cars, most engines in the world ran at lower than 10:1 compression. As engine management and valve-timing technology improves, nowadays the figure can be higher than 11:1. Direct injection engine may even lift that figure to 12:1 or so thanks to its cooling effect, but anything higher than that remains a dream. However, Mazda made a breakthrough with its Skyactiv-G engine in 2010. It works at an incredible 14:1 compression ! How can Mazda avoid knocking? A crucial factor causing knocking is the high temperature of combustion chambers. Temperature in the chamber rises during compression stroke. It peaks when the piston reaches the top dead center (TDC, i.e. the highest position). At this point, knocking is most likely to occur. Obviously, if we want to reduce the risk of knocking, we had better to lower the combustion chamber temperature. Then why is the combustion chamber so hot ? One of the reasons is the existence of residual exhaust gas, i.e. the exhaust gas that flows back into the combustion chamber during the intake stroke just before the exhaust valves close. No one can completely get rid of residual exhaust gas, because for high breathing efficiency engines always need to run with a certain level of valve overlapping (overlapping between the opening period of intake and exhaust valves). Suppose exhaust gas is 750ºC and the fresh intake air is 25 º C, and their mixture ratio is 1 to 10, you can see the residual exhaust gas can raise the combustion chamber temperature a lot. The more the amount of residual exhaust gas, the higher the combustion chamber temperature is. In other words, if we want to reduce temperature, we can reduce the amount of residual exhaust gas in the combustion chamber. This graph shows that a 14:1 compression engine always has higher comnbustion chamber temperature than a 10:1 engine on a given residual exhaust gas level. However, if the amount of residual exhaust gas is reduced to 4 percent, combustion chamber temperature will be about the same as a 10:1 engine running with 8 percent of residual exhaust gas. Now the question is: how to lower the percentage of residual exhaust gas? Surprisingly, Mazda uses a very conventional approach to do that: a long, 4-to-2-to-1 exhaust manifold. On a typical inline-4 cylinder engine with short, 4-to-1 exhaust manifolds (the first picture), once the exhaust valve of Cylinder 3 opens, its exhaust pressure waves (grey area) flows through the short manifolds to the exhaust valve of Cylinder 1, which is at the end of its exhaust phase. This pumps some exhaust gas back into Cylinder 1 and becomes residual exhaust gas. When the engine is running at low speed (2000 rpm in the first picture), the exhaust pressure wave arrives Cylinder 1 early enough to cause high percentage of residual exhaust gas. As engine rev rises, the opening and closing of valves speeds up as well, thus the exhaust pressure waves of Cylinder 3 reaches Cylinder 1 at later stage, causing lower percentage of residual exhaust gas. In short, from low to mid-range engine speed the level of residual exhaust gas is pretty high for this engine configuration. In the case of Skyactiv-G's 4-2-1 exhaust manifolds (the second picture above), exhaust pressure waves from Cylinder 3 has to travel a long way to reach Cylinder 1, by the time Cylinder 1 has already, or nearly completed its exhaust phase. Therefore the level of residual exhaust gas is much lower than the previous case, especially for low to mid-range rpm. As a result, the Skyactiv-G engine attains lower temperature in its combustion chambers and allows a higher compression ratio to be used. Well, if the principle is so simple, why not others discovered already? It's not that simple, of course. One critical drawback of the long 4-2-1 exhaust manifold is that it takes relatively long time to heat up the NOx catalyst during cold start. In fact, this is exactly the reason why most modern production engines have abandoned this exhaust configuration - with the exception of high-performance engines which may use thin-wall fabricated stainless steel exhaust manifolds to compensate for its extra length. On cost-conscious mass production engines, cheap cast-iron exhaust manifolds are still the norm. Its extra mass and surface area absorb a great deal of heat and delay the proper functioning of catalyst. This causes difficulty to comply with emission regulations. Mazda overcomes the cold-start problem by retarding ignition. This leads to a higher exhaust gas temperature to compensate for the long manifolds. The late ignition may result in unstable combustion. This is dealt with a specially shaped piston which concentrates the stratified air-fuel mixture around the spark plug. Other supporting features like high-pressure direct injection and six-hole injectors also contribute to the optimized combustion. Low Compression Diesel Engine - Mazda Skyactiv-D Unlike petrol engines, diesel engines do not have ignition system as the combustion of diesel-air mixture happens automatically under the high pressure and temperature attained during compression stroke. However, this also means diesel engines need to run higher compression ratios. It goes without saying a higher compression ratio necessitates stronger cylinder block and head, pistons, con-rods and bearings to withstand the high pressure. This explains why diesel engines are much heavier and lower revving than petrol ones. As a result, reducing compression ratio becomes the trend of diesel engine development. A decade or so ago, a typical turbo diesel engine ran about 18:1 compression. Now most employ between 16:1 and 16.5:1, while some more advanced engines are even down to 15.5:1. However, none of them are comparable to Mazda's Skyactiv-D engines, which achieve an incredibly low 14.0:1. Yes, the same as the Skyactiv-G petrol engines. One thing preventing the use of such a low compression ratio is cold start. When the engine is cold, especially at freezing temperature, diesel engines are difficult to start. Traditionally this can be addressed by the use of glow plugs, which heat up part of the combustion chamber where fuel is injected. The lower compression ratio of Skyactiv-D just makes the matter worse as it produces lower temperature in the combustion chambers. It needs quicker acting ceramic glow plugs to deal with cold start. The use of multi-hole piezo fuel injectors, which enables more precise control of fuel spray timing and patterns, is also said to work better at cold starting. Once the engine is started, there is still possibility of misfire. Mazda avoids this by introducing variable valve lift at the exhaust side. During the intake stroke, the VVL opens the exhaust valves slightly, drawing the hot exhaust gas back to the combustion chamber so to increase temperature. Once the desired operating temperature is reached, the VVL can be reverted to normal stage. (Note: now we understand why the high-power versions of Volkswagen Unlike petrol engines, diesel engines do not have ignition system as the combustion of diesel-air mixture happens automatically under the high pressure and temperature attained during compression stroke. However, this also means diesel engines need to run higher compression ratios. It goes without saying a higher compression ratio necessitates stronger cylinder block and head, pistons, con-rods and bearings to withstand the high pressure. This explains why diesel engines are much heavier and lower revving than petrol ones. As a result, reducing compression ratio becomes the trend of diesel engine development. A decade or so ago, a typical turbo diesel engine ran about 18:1 compression. Now most employ between 16:1 and 16.5:1, while some more advanced engines are even down to 15.5:1. However, none of them are comparable to Mazda's Skyactiv-D engines, which achieve an incredibly low 14.0:1. Yes, the same as the Skyactiv-G petrol engines.One thing preventing the use of such a low compression ratio is cold start. When the engine is cold, especially at freezing temperature, diesel engines are difficult to start. Traditionally this can be addressed by the use of glow plugs, which heat up part of the combustion chamber where fuel is injected. The lower compression ratio of Skyactiv-D just makes the matter worse as it produces lower temperature in the combustion chambers. It needs quicker acting ceramic glow plugs to deal with cold start. The use of multi-hole piezo fuel injectors, which enables more precise control of fuel spray timing and patterns, is also said to work better at cold starting.Once the engine is started, there is still possibility of misfire. Mazda avoids this by introducing variable valve lift at the exhaust side. During the intake stroke, the VVL opens the exhaust valves slightly, drawing the hot exhaust gas back to the combustion chamber so to increase temperature. Once the desired operating temperature is reached, the VVL can be reverted to normal stage. (Note: now we understand why the high-power versions of Volkswagen EA288 2.0 TDI engine employs variable exhaust cam phasing). The exhaust VVL is a switchable roller rocker Low compression produces less pollutants Apart from weight saving, another benefit is emission. In a high-compression diesel engine, fuel is ignited as soon as it is injected into the combustion chamber due to the high pressure and temperature. This gives little time for the fuel to spread throughout the chamber. As the fuel is not sufficiently mixed with air, some fuel has no oxygen to react with, and vice versa. Consequently, the incomplete combustion generates a lot of soot (carbon particles) and NOx. Now with lower compression, the Skyactiv-D allows sufficient time for fuel to mix with air before combustion happens. It is therefore inherently cleaner, saving the need of expensive particle filter and NOx aftertreatment. Moreover, under the current stringent emission standards, high-compression diesel engines may need to retard the timing of injection to reduce pollutants (this means the injection takes place after the piston has reached top dead center and started descending, so both pressure and temperature are reduced). This effectively shortens the expansion stroke thus waste energy (you can see it as the opposite of Atkinson-cycle engine ). With lower compression, Skyactiv-D allows the fuel to be injected before piston reaches TDC, maintaining the expansion stroke thus capturing more energy. Overall, Mazda said its 2.2-liter twin-turbo Skyactiv-D engine saves 20 percent fuel compared with its same-capacity predecessor with 16.3:1 compression ratio. The lower stress means it can switch from cast-iron block to an aluminum block to save 25 kg. Thinner cylinder head saves another 3 kg. The pistons and crankshaft are 25 percent lighter. Moreover, smaller diameter main journals can be used to reduce friction. The lower friction and lighter reciprocating mass, in addition to the use of 2-stage turbocharging, lift its maximum rev from 4500 rpm to 5200 rpm. As a result, the low-compression diesel engine behaves more like a petrol engine than ever.
/* * Copyright 2012-2023 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigure.web.embedded; import java.util.Locale; import java.util.function.Consumer; import org.apache.catalina.Context; import org.apache.catalina.Valve; import org.apache.catalina.startup.Tomcat; import org.apache.catalina.valves.AccessLogValve; import org.apache.catalina.valves.ErrorReportValve; import org.apache.catalina.valves.RemoteIpValve; import org.apache.coyote.AbstractProtocol; import org.apache.coyote.ajp.AbstractAjpProtocol; import org.apache.coyote.http11.AbstractHttp11Protocol; import org.apache.coyote.http2.Http2Protocol; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.boot.autoconfigure.web.ServerProperties; import org.springframework.boot.autoconfigure.web.ServerProperties.ForwardHeadersStrategy; import org.springframework.boot.context.properties.bind.Bindable; import org.springframework.boot.context.properties.bind.Binder; import org.springframework.boot.context.properties.source.ConfigurationPropertySources; import org.springframework.boot.web.embedded.tomcat.TomcatServletWebServerFactory; import org.springframework.boot.web.embedded.tomcat.TomcatWebServer; import org.springframework.boot.web.server.WebServer; import org.springframework.mock.env.MockEnvironment; import org.springframework.test.context.support.TestPropertySourceUtils; import org.springframework.util.unit.DataSize; import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link TomcatWebServerFactoryCustomizer} * * @author Brian Clozel * @author Phillip Webb * @author Rob Tompkins * @author Artsiom Yudovin * @author Stephane Nicoll * @author Andrew McGhie * @author Rafiullah Hamedy * @author Victor Mandujano * @author Parviz Rozikov */ class TomcatWebServerFactoryCustomizerTests { private MockEnvironment environment; private ServerProperties serverProperties; private TomcatWebServerFactoryCustomizer customizer; @BeforeEach void setup() { this.environment = new MockEnvironment(); this.serverProperties = new ServerProperties(); ConfigurationPropertySources.attach(this.environment); this.customizer = new TomcatWebServerFactoryCustomizer(this.environment, this.serverProperties); } @Test void defaultsAreConsistent() { customizeAndRunServer((server) -> assertThat( ((AbstractHttp11Protocol<?>) server.getTomcat().getConnector().getProtocolHandler()) .getMaxSwallowSize()) .isEqualTo(this.serverProperties.getTomcat().getMaxSwallowSize().toBytes())); } @Test void customAcceptCount() { bind("server.tomcat.accept-count=10"); customizeAndRunServer((server) -> assertThat( ((AbstractProtocol<?>) server.getTomcat().getConnector().getProtocolHandler()).getAcceptCount()) .isEqualTo(10)); } @Test void customProcessorCache() { bind("server.tomcat.processor-cache=100"); customizeAndRunServer((server) -> assertThat( ((AbstractProtocol<?>) server.getTomcat().getConnector().getProtocolHandler()).getProcessorCache()) .isEqualTo(100)); } @Test void customKeepAliveTimeout() { bind("server.tomcat.keep-alive-timeout=30ms"); customizeAndRunServer((server) -> assertThat( ((AbstractProtocol<?>) server.getTomcat().getConnector().getProtocolHandler()).getKeepAliveTimeout()) .isEqualTo(30)); } @Test void defaultKeepAliveTimeoutWithHttp2() { bind("server.http2.enabled=true"); customizeAndRunServer((server) -> assertThat( ((Http2Protocol) server.getTomcat().getConnector().findUpgradeProtocols()[0]).getKeepAliveTimeout()) .isEqualTo(20000L)); } @Test void customKeepAliveTimeoutWithHttp2() { bind("server.tomcat.keep-alive-timeout=30s", "server.http2.enabled=true"); customizeAndRunServer((server) -> assertThat( ((Http2Protocol) server.getTomcat().getConnector().findUpgradeProtocols()[0]).getKeepAliveTimeout()) .isEqualTo(30000L)); } @Test void customMaxKeepAliveRequests() { bind("server.tomcat.max-keep-alive-requests=-1"); customizeAndRunServer((server) -> assertThat( ((AbstractHttp11Protocol<?>) server.getTomcat().getConnector().getProtocolHandler()) .getMaxKeepAliveRequests()) .isEqualTo(-1)); } @Test void defaultMaxKeepAliveRequests() { customizeAndRunServer((server) -> assertThat( ((AbstractHttp11Protocol<?>) server.getTomcat().getConnector().getProtocolHandler()) .getMaxKeepAliveRequests()) .isEqualTo(100)); } @Test void unlimitedProcessorCache() { bind("server.tomcat.processor-cache=-1"); customizeAndRunServer((server) -> assertThat( ((AbstractProtocol<?>) server.getTomcat().getConnector().getProtocolHandler()).getProcessorCache()) .isEqualTo(-1)); } @Test void customBackgroundProcessorDelay() { bind("server.tomcat.background-processor-delay=5"); TomcatWebServer server = customizeAndGetServer(); assertThat(server.getTomcat().getEngine().getBackgroundProcessorDelay()).isEqualTo(5); } @Test void customDisableMaxHttpFormPostSize() { bind("server.tomcat.max-http-form-post-size=-1"); customizeAndRunServer((server) -> assertThat(server.getTomcat().getConnector().getMaxPostSize()).isEqualTo(-1)); } @Test void customMaxConnections() { bind("server.tomcat.max-connections=5"); customizeAndRunServer((server) -> assertThat( ((AbstractProtocol<?>) server.getTomcat().getConnector().getProtocolHandler()).getMaxConnections()) .isEqualTo(5)); } @Test void customMaxHttpFormPostSize() { bind("server.tomcat.max-http-form-post-size=10000"); customizeAndRunServer( (server) -> assertThat(server.getTomcat().getConnector().getMaxPostSize()).isEqualTo(10000)); } @Test void defaultMaxHttpRequestHeaderSize() { customizeAndRunServer((server) -> assertThat( ((AbstractHttp11Protocol<?>) server.getTomcat().getConnector().getProtocolHandler()) .getMaxHttpRequestHeaderSize()) .isEqualTo(DataSize.ofKilobytes(8).toBytes())); } @Test void customMaxHttpRequestHeaderSize() { bind("server.max-http-request-header-size=10MB"); customizeAndRunServer((server) -> assertThat( ((AbstractHttp11Protocol<?>) server.getTomcat().getConnector().getProtocolHandler()) .getMaxHttpRequestHeaderSize()) .isEqualTo(DataSize.ofMegabytes(10).toBytes())); } @Test void customMaxRequestHttpHeaderSizeIgnoredIfNegative() { bind("server.max-http-request-header-size=-1"); customizeAndRunServer((server) -> assertThat( ((AbstractHttp11Protocol<?>) server.getTomcat().getConnector().getProtocolHandler()) .getMaxHttpRequestHeaderSize()) .isEqualTo(DataSize.ofKilobytes(8).toBytes())); } @Test void customMaxRequestHttpHeaderSizeIgnoredIfZero() { bind("server.max-http-request-header-size=0"); customizeAndRunServer((server) -> assertThat( ((AbstractHttp11Protocol<?>) server.getTomcat().getConnector().getProtocolHandler()) .getMaxHttpRequestHeaderSize()) .isEqualTo(DataSize.ofKilobytes(8).toBytes())); } @Test void defaultMaxHttpResponseHeaderSize() { customizeAndRunServer((server) -> assertThat( ((AbstractHttp11Protocol<?>) server.getTomcat().getConnector().getProtocolHandler()) .getMaxHttpResponseHeaderSize()) .isEqualTo(DataSize.ofKilobytes(8).toBytes())); } @Test void customMaxHttpResponseHeaderSize() { bind("server.tomcat.max-http-response-header-size=10MB"); customizeAndRunServer((server) -> assertThat( ((AbstractHttp11Protocol<?>) server.getTomcat().getConnector().getProtocolHandler()) .getMaxHttpResponseHeaderSize()) .isEqualTo(DataSize.ofMegabytes(10).toBytes())); } @Test void customMaxResponseHttpHeaderSizeIgnoredIfNegative() { bind("server.tomcat.max-http-response-header-size=-1"); customizeAndRunServer((server) -> assertThat( ((AbstractHttp11Protocol<?>) server.getTomcat().getConnector().getProtocolHandler()) .getMaxHttpResponseHeaderSize()) .isEqualTo(DataSize.ofKilobytes(8).toBytes())); } @Test void customMaxResponseHttpHeaderSizeIgnoredIfZero() { bind("server.tomcat.max-http-response-header-size=0"); customizeAndRunServer((server) -> assertThat( ((AbstractHttp11Protocol<?>) server.getTomcat().getConnector().getProtocolHandler()) .getMaxHttpResponseHeaderSize()) .isEqualTo(DataSize.ofKilobytes(8).toBytes())); } @Test void customMaxSwallowSize() { bind("server.tomcat.max-swallow-size=10MB"); customizeAndRunServer((server) -> assertThat( ((AbstractHttp11Protocol<?>) server.getTomcat().getConnector().getProtocolHandler()) .getMaxSwallowSize()) .isEqualTo(DataSize.ofMegabytes(10).toBytes())); } @Test void customRemoteIpValve() { bind("server.tomcat.remoteip.remote-ip-header=x-my-remote-ip-header", "server.tomcat.remoteip.protocol-header=x-my-protocol-header", "server.tomcat.remoteip.internal-proxies=192.168.0.1", "server.tomcat.remoteip.host-header=x-my-forward-host", "server.tomcat.remoteip.port-header=x-my-forward-port", "server.tomcat.remoteip.protocol-header-https-value=On", "server.tomcat.remoteip.trusted-proxies=proxy1|proxy2"); TomcatServletWebServerFactory factory = customizeAndGetFactory(); assertThat(factory.getEngineValves()).hasSize(1); Valve valve = factory.getEngineValves().iterator().next(); assertThat(valve).isInstanceOf(RemoteIpValve.class); RemoteIpValve remoteIpValve = (RemoteIpValve) valve; assertThat(remoteIpValve.getProtocolHeader()).isEqualTo("x-my-protocol-header"); assertThat(remoteIpValve.getProtocolHeaderHttpsValue()).isEqualTo("On"); assertThat(remoteIpValve.getRemoteIpHeader()).isEqualTo("x-my-remote-ip-header"); assertThat(remoteIpValve.getHostHeader()).isEqualTo("x-my-forward-host"); assertThat(remoteIpValve.getPortHeader()).isEqualTo("x-my-forward-port"); assertThat(remoteIpValve.getInternalProxies()).isEqualTo("192.168.0.1"); assertThat(remoteIpValve.getTrustedProxies()).isEqualTo("proxy1|proxy2"); } @Test void customStaticResourceAllowCaching() { bind("server.tomcat.resource.allow-caching=false"); customizeAndRunServer((server) -> { Tomcat tomcat = server.getTomcat(); Context context = (Context) tomcat.getHost().findChildren()[0]; assertThat(context.getResources().isCachingAllowed()).isFalse(); }); } @Test void customStaticResourceCacheTtl() { bind("server.tomcat.resource.cache-ttl=10000"); customizeAndRunServer((server) -> { Tomcat tomcat = server.getTomcat(); Context context = (Context) tomcat.getHost().findChildren()[0]; assertThat(context.getResources().getCacheTtl()).isEqualTo(10000L); }); } @Test void customRelaxedPathChars() { bind("server.tomcat.relaxed-path-chars=|,^"); customizeAndRunServer((server) -> assertThat( ((AbstractHttp11Protocol<?>) server.getTomcat().getConnector().getProtocolHandler()) .getRelaxedPathChars()) .isEqualTo("|^")); } @Test void customRelaxedQueryChars() { bind("server.tomcat.relaxed-query-chars=^ , | "); customizeAndRunServer((server) -> assertThat( ((AbstractHttp11Protocol<?>) server.getTomcat().getConnector().getProtocolHandler()) .getRelaxedQueryChars()) .isEqualTo("^|")); } @Test void deduceUseForwardHeaders() { this.environment.setProperty("DYNO", "-"); testRemoteIpValveConfigured(); } @Test void defaultUseForwardHeaders() { TomcatServletWebServerFactory factory = customizeAndGetFactory(); assertThat(factory.getEngineValves()).isEmpty(); } @Test void forwardHeadersWhenStrategyIsNativeShouldConfigureValve() { this.serverProperties.setForwardHeadersStrategy(ServerProperties.ForwardHeadersStrategy.NATIVE); testRemoteIpValveConfigured(); } @Test void forwardHeadersWhenStrategyIsNoneShouldNotConfigureValve() { this.environment.setProperty("DYNO", "-"); this.serverProperties.setForwardHeadersStrategy(ServerProperties.ForwardHeadersStrategy.NONE); TomcatServletWebServerFactory factory = customizeAndGetFactory(); assertThat(factory.getEngineValves()).isEmpty(); } @Test void defaultRemoteIpValve() { // Since 1.1.7 you need to specify at least the protocol bind("server.tomcat.remoteip.protocol-header=X-Forwarded-Proto", "server.tomcat.remoteip.remote-ip-header=X-Forwarded-For"); testRemoteIpValveConfigured(); } @Test void setUseNativeForwardHeadersStrategy() { this.serverProperties.setForwardHeadersStrategy(ForwardHeadersStrategy.NATIVE); testRemoteIpValveConfigured(); } private void testRemoteIpValveConfigured() { TomcatServletWebServerFactory factory = customizeAndGetFactory(); assertThat(factory.getEngineValves()).hasSize(1); Valve valve = factory.getEngineValves().iterator().next(); assertThat(valve).isInstanceOf(RemoteIpValve.class); RemoteIpValve remoteIpValve = (RemoteIpValve) valve; assertThat(remoteIpValve.getProtocolHeader()).isEqualTo("X-Forwarded-Proto"); assertThat(remoteIpValve.getProtocolHeaderHttpsValue()).isEqualTo("https"); assertThat(remoteIpValve.getRemoteIpHeader()).isEqualTo("X-Forwarded-For"); assertThat(remoteIpValve.getHostHeader()).isEqualTo("X-Forwarded-Host"); assertThat(remoteIpValve.getPortHeader()).isEqualTo("X-Forwarded-Port"); String expectedInternalProxies = "10\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}|" // 10/8 + "192\\.168\\.\\d{1,3}\\.\\d{1,3}|" // 192.168/16 + "169\\.254\\.\\d{1,3}\\.\\d{1,3}|" // 169.254/16 + "127\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}|" // 127/8 + "100\\.6[4-9]{1}\\.\\d{1,3}\\.\\d{1,3}|" // 100.64.0.0/10 + "100\\.[7-9]{1}\\d{1}\\.\\d{1,3}\\.\\d{1,3}|" // 100.64.0.0/10 + "100\\.1[0-1]{1}\\d{1}\\.\\d{1,3}\\.\\d{1,3}|" // 100.64.0.0/10 + "100\\.12[0-7]{1}\\.\\d{1,3}\\.\\d{1,3}|" // 100.64.0.0/10 + "172\\.1[6-9]{1}\\.\\d{1,3}\\.\\d{1,3}|" // 172.16/12 + "172\\.2[0-9]{1}\\.\\d{1,3}\\.\\d{1,3}|" // 172.16/12 + "172\\.3[0-1]{1}\\.\\d{1,3}\\.\\d{1,3}|" // 172.16/12 + "0:0:0:0:0:0:0:1|::1"; assertThat(remoteIpValve.getInternalProxies()).isEqualTo(expectedInternalProxies); } @Test void defaultBackgroundProcessorDelay() { TomcatWebServer server = customizeAndGetServer(); assertThat(server.getTomcat().getEngine().getBackgroundProcessorDelay()).isEqualTo(10); } @Test void disableRemoteIpValve() { bind("server.tomcat.remoteip.remote-ip-header=", "server.tomcat.remoteip.protocol-header="); TomcatServletWebServerFactory factory = customizeAndGetFactory(); assertThat(factory.getEngineValves()).isEmpty(); } @Test void errorReportValveIsConfiguredToNotReportStackTraces() { TomcatWebServer server = customizeAndGetServer(); Valve[] valves = server.getTomcat().getHost().getPipeline().getValves(); assertThat(valves).hasAtLeastOneElementOfType(ErrorReportValve.class); for (Valve valve : valves) { if (valve instanceof ErrorReportValve errorReportValve) { assertThat(errorReportValve.isShowReport()).isFalse(); assertThat(errorReportValve.isShowServerInfo()).isFalse(); } } } @Test void testCustomizeMinSpareThreads() { bind("server.tomcat.threads.min-spare=10"); assertThat(this.serverProperties.getTomcat().getThreads().getMinSpare()).isEqualTo(10); } @Test void customConnectionTimeout() { bind("server.tomcat.connection-timeout=30s"); customizeAndRunServer((server) -> assertThat( ((AbstractProtocol<?>) server.getTomcat().getConnector().getProtocolHandler()).getConnectionTimeout()) .isEqualTo(30000)); } @Test void accessLogBufferingCanBeDisabled() { bind("server.tomcat.accesslog.enabled=true", "server.tomcat.accesslog.buffered=false"); TomcatServletWebServerFactory factory = customizeAndGetFactory(); assertThat(((AccessLogValve) factory.getEngineValves().iterator().next()).isBuffered()).isFalse(); } @Test void accessLogCanBeEnabled() { bind("server.tomcat.accesslog.enabled=true"); TomcatServletWebServerFactory factory = customizeAndGetFactory(); assertThat(factory.getEngineValves()).hasSize(1); assertThat(factory.getEngineValves()).first().isInstanceOf(AccessLogValve.class); } @Test void accessLogFileDateFormatByDefault() { bind("server.tomcat.accesslog.enabled=true"); TomcatServletWebServerFactory factory = customizeAndGetFactory(); assertThat(((AccessLogValve) factory.getEngineValves().iterator().next()).getFileDateFormat()) .isEqualTo(".yyyy-MM-dd"); } @Test void accessLogFileDateFormatCanBeRedefined() { bind("server.tomcat.accesslog.enabled=true", "server.tomcat.accesslog.file-date-format=yyyy-MM-dd.HH"); TomcatServletWebServerFactory factory = customizeAndGetFactory(); assertThat(((AccessLogValve) factory.getEngineValves().iterator().next()).getFileDateFormat()) .isEqualTo("yyyy-MM-dd.HH"); } @Test void accessLogIsBufferedByDefault() { bind("server.tomcat.accesslog.enabled=true"); TomcatServletWebServerFactory factory = customizeAndGetFactory(); assertThat(((AccessLogValve) factory.getEngineValves().iterator().next()).isBuffered()).isTrue(); } @Test void accessLogIsDisabledByDefault() { TomcatServletWebServerFactory factory = customizeAndGetFactory(); assertThat(factory.getEngineValves()).isEmpty(); } @Test void accessLogMaxDaysDefault() { bind("server.tomcat.accesslog.enabled=true"); TomcatServletWebServerFactory factory = customizeAndGetFactory(); assertThat(((AccessLogValve) factory.getEngineValves().iterator().next()).getMaxDays()) .isEqualTo(this.serverProperties.getTomcat().getAccesslog().getMaxDays()); } @Test void accessLogConditionCanBeSpecified() { bind("server.tomcat.accesslog.enabled=true", "server.tomcat.accesslog.conditionIf=foo", "server.tomcat.accesslog.conditionUnless=bar"); TomcatServletWebServerFactory factory = customizeAndGetFactory(); assertThat(((AccessLogValve) factory.getEngineValves().iterator().next()).getConditionIf()).isEqualTo("foo"); assertThat(((AccessLogValve) factory.getEngineValves().iterator().next()).getConditionUnless()) .isEqualTo("bar"); assertThat(((AccessLogValve) factory.getEngineValves().iterator().next()).getCondition()) .describedAs("value of condition should equal conditionUnless - provided for backwards compatibility") .isEqualTo("bar"); } @Test void accessLogEncodingIsNullWhenNotSpecified() { bind("server.tomcat.accesslog.enabled=true"); TomcatServletWebServerFactory factory = customizeAndGetFactory(); assertThat(((AccessLogValve) factory.getEngineValves().iterator().next()).getEncoding()).isNull(); } @Test void accessLogEncodingCanBeSpecified() { bind("server.tomcat.accesslog.enabled=true", "server.tomcat.accesslog.encoding=UTF-8"); TomcatServletWebServerFactory factory = customizeAndGetFactory(); assertThat(((AccessLogValve) factory.getEngineValves().iterator().next()).getEncoding()).isEqualTo("UTF-8"); } @Test void accessLogWithDefaultLocale() { bind("server.tomcat.accesslog.enabled=true"); TomcatServletWebServerFactory factory = customizeAndGetFactory(); assertThat(((AccessLogValve) factory.getEngineValves().iterator().next()).getLocale()) .isEqualTo(Locale.getDefault().toString()); } @Test void accessLogLocaleCanBeSpecified() { String locale = "en_AU".equals(Locale.getDefault().toString()) ? "en_US" : "en_AU"; bind("server.tomcat.accesslog.enabled=true", "server.tomcat.accesslog.locale=" + locale); TomcatServletWebServerFactory factory = customizeAndGetFactory(); assertThat(((AccessLogValve) factory.getEngineValves().iterator().next()).getLocale()).isEqualTo(locale); } @Test void accessLogCheckExistsDefault() { bind("server.tomcat.accesslog.enabled=true"); TomcatServletWebServerFactory factory = customizeAndGetFactory(); assertThat(((AccessLogValve) factory.getEngineValves().iterator().next()).isCheckExists()).isFalse(); } @Test void accessLogCheckExistsSpecified() { bind("server.tomcat.accesslog.enabled=true", "server.tomcat.accesslog.check-exists=true"); TomcatServletWebServerFactory factory = customizeAndGetFactory(); assertThat(((AccessLogValve) factory.getEngineValves().iterator().next()).isCheckExists()).isTrue(); } @Test void accessLogMaxDaysCanBeRedefined() { bind("server.tomcat.accesslog.enabled=true", "server.tomcat.accesslog.max-days=20"); TomcatServletWebServerFactory factory = customizeAndGetFactory(); assertThat(((AccessLogValve) factory.getEngineValves().iterator().next()).getMaxDays()).isEqualTo(20); } @Test void accessLogDoesNotUseIpv6CanonicalFormatByDefault() { bind("server.tomcat.accesslog.enabled=true"); TomcatServletWebServerFactory factory = customizeAndGetFactory(); assertThat(((AccessLogValve) factory.getEngineValves().iterator().next()).getIpv6Canonical()).isFalse(); } @Test void accessLogWithIpv6CanonicalSet() { bind("server.tomcat.accesslog.enabled=true", "server.tomcat.accesslog.ipv6-canonical=true"); TomcatServletWebServerFactory factory = customizeAndGetFactory(); assertThat(((AccessLogValve) factory.getEngineValves().iterator().next()).getIpv6Canonical()).isTrue(); } @Test void ajpConnectorCanBeCustomized() { TomcatServletWebServerFactory factory = new TomcatServletWebServerFactory(0); factory.setProtocol("AJP/1.3"); factory.addConnectorCustomizers( (connector) -> ((AbstractAjpProtocol<?>) connector.getProtocolHandler()).setSecretRequired(false)); this.customizer.customize(factory); WebServer server = factory.getWebServer(); server.start(); server.stop(); } private void bind(String... inlinedProperties) { TestPropertySourceUtils.addInlinedPropertiesToEnvironment(this.environment, inlinedProperties); new Binder(ConfigurationPropertySources.get(this.environment)).bind("server", Bindable.ofInstance(this.serverProperties)); } private void customizeAndRunServer(Consumer<TomcatWebServer> consumer) { TomcatWebServer server = customizeAndGetServer(); server.start(); try { consumer.accept(server); } finally { server.stop(); } } private TomcatWebServer customizeAndGetServer() { TomcatServletWebServerFactory factory = customizeAndGetFactory(); return (TomcatWebServer) factory.getWebServer(); } private TomcatServletWebServerFactory customizeAndGetFactory() { TomcatServletWebServerFactory factory = new TomcatServletWebServerFactory(0); factory.setHttp2(this.serverProperties.getHttp2()); this.customizer.customize(factory); return factory; } }
<reponame>phodge/predator from predator.grammar.common import Item class Choice(Item): """ A grammar object that attempts to match text using one of several other grammar Items. """ def __init__(self, name=None): super().__init__(name) self._choices = [] def addchoice(self, item): assert isinstance(item, Item) self._choices.append(item)
<reponame>mooyoul/poo.email import * as React from 'react'; import { Link } from 'react-router-dom'; export function Navbar() { return ( <nav className="inbox-navbar navbar" role="navigation" aria-label="main navigation"> <div className="navbar-brand"> <Link to="/" className="navbar-item"> {/* eslint-disable-next-line */} <span className="icon">💩 + ✉️</span> poo.email </Link> </div> </nav> ); }
<filename>docs/extensions/list_filter.py import re from docutils import nodes from docutils.parsers.rst import Directive def setup(app): app.add_directive('list', ListFilter) return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'} class ListFilter(Directive): """ Provides a list implementation directive that support clauses of the kind .. list:: - Content :filter: - Content - Content Where the :filter: part becomes selective to only include the content if one of the provided tags is set, same as the logic used by the "only" directive. The directive also works for numbered list. """ RE_PATTERN = re.compile(r'^\s*:(.+?):\s*(.+)$') has_content = True required_arguments = 0 optional_arguments = 0 final_argument_whitespace = True option_spec = {} def run(self): # Raise an error if the directive does not have contents. self.assert_has_content() # Remove all list entries that should not be on display env = self.state.document.settings.env filt_data = [self.filter_entry(env, e) for e in self.content.data if e is not None] # Clean up deleted values from content self.content.data = [data for data in filt_data if data is not None] self.content.items = [items for data, items in zip(filt_data, self.content.items) if data is not None] # Parse the filtered content and return the new node node = nodes.paragraph() self.state.nested_parse(self.content, self.content_offset, node) return [node] def filter_entry(self, env, entry): m = self.RE_PATTERN.match(entry) if m is not None: tag_filter, entry = m.groups() if not env.app.builder.tags.eval_condition(tag_filter): return None return entry
import ScrollBar from "./ScrollBar"; import {ScrollProperty} from "./define"; export default class VerScrollBar extends ScrollBar { public static BAR_CLASS: string = ".gm-scrollbar.-vertical"; public scrollHandle(scrollPos: number): void { const pos = (scrollPos * this.trackMax / this.scrollMax) || 0; this.thumbElement.style.transform = "translate3d(0, " + pos + "px, 0)"; } public setScrollOption(current: ScrollToOptions, target: number) { current.top = target; } protected barClasses(): string[] { return ["-vertical"]; } protected getScrollProperty(): ScrollProperty { return { client: "clientHeight", area: "height", clientPos: "clientY", pos: "top", offsetPos: "offsetY", }; } }
/** * Convert string value which indicates color into the integer value. * * @param argbString #AARRGGBB * @return */ protected Integer parseArgb(String argbString) { try { int argb = (int)Long.parseLong(argbString.replaceFirst("#", ""), 16); return argb; } catch (Exception ignore) { return null; } }
<reponame>LopezMDidac/python-for-brodas # retocar while para que en impares la cuelta sea "corriendo" y en los pares "saltando" # ejemplo de output: # 1 --> corriendo # 2 --> saltando # 3 --> corriendo # 4 --> saltando # 5 --> corriendo vueltas = 1 final = 5 is_running = True # <> = while vueltas != final: vueltas = vueltas +1 if is_running: print (str(vueltas) + "--> corriendo") else: print(str(vueltas) + "--> saltando") is_running = not is_running #\n - Newline. #\t- Horizontal tab. #\r- Carriage return. #\b- Backspace. #\f- Form feed. #\'- Single Quote. #\"- double quote. #\\-Backslash. def dummy(name:str, num:int)->int: print("holaaa") print(name) print(num) print(num * 3) print("--------------------------------------------------------------------------") return num * 3 dummy("tomaa", 6) resultado = dummy("yaaa", 5 ) print(resultado) def elegir_opcion(): print(compra) print("Que desea hacer?") def aregar_producto(): agregar_producto = input() compra.append(agregar_producto) def cambiar_producto(): print(compra) modificar_producto = input() print ("Por que producto desea cambiarlo") cambiar_producto = input() for producto in compra: if modificar_producto == producto: compra.remove(modificar_producto) compra.append(cambiar_producto) print("Has modificado " + modificar_producto + "por" + cambiar_producto) print(compra) break def borrar_producto(): print(compra) print ("Que articulo desea quitar de la lista?") print ("Escriba el articulo que desea quitar?") try: borrar = input() compra.remove(borrar) print("El producto '" + borrar + "' ha sido eliminado de la lista, quedan: ", len(compra)) except ValueError: print("el producto '" + borrar + "' no esta en la lista") else: print("opcion no valida")
package plugin import ( "encoding/base64" "encoding/json" "net/http" "strconv" "strings" distreference "github.com/docker/distribution/reference" "github.com/docker/docker/api/server/httputils" "github.com/docker/docker/api/types" "github.com/docker/docker/pkg/ioutils" "github.com/docker/docker/pkg/streamformatter" "github.com/docker/docker/reference" "github.com/pkg/errors" "golang.org/x/net/context" ) func parseHeaders(headers http.Header) (map[string][]string, *types.AuthConfig) { metaHeaders := map[string][]string{} for k, v := range headers { if strings.HasPrefix(k, "X-Meta-") { metaHeaders[k] = v } } // Get X-Registry-Auth authEncoded := headers.Get("X-Registry-Auth") authConfig := &types.AuthConfig{} if authEncoded != "" { authJSON := base64.NewDecoder(base64.URLEncoding, strings.NewReader(authEncoded)) if err := json.NewDecoder(authJSON).Decode(authConfig); err != nil { authConfig = &types.AuthConfig{} } } return metaHeaders, authConfig } // parseRemoteRef parses the remote reference into a reference.Named // returning the tag associated with the reference. In the case the // given reference string includes both digest and tag, the returned // reference will have the digest without the tag, but the tag will // be returned. func parseRemoteRef(remote string) (reference.Named, string, error) { // Parse remote reference, supporting remotes with name and tag // NOTE: Using distribution reference to handle references // containing both a name and digest remoteRef, err := distreference.ParseNamed(remote) if err != nil { return nil, "", err } var tag string if t, ok := remoteRef.(distreference.Tagged); ok { tag = t.Tag() } // Convert distribution reference to docker reference // TODO: remove when docker reference changes reconciled upstream ref, err := reference.WithName(remoteRef.Name()) if err != nil { return nil, "", err } if d, ok := remoteRef.(distreference.Digested); ok { ref, err = reference.WithDigest(ref, d.Digest()) if err != nil { return nil, "", err } } else if tag != "" { ref, err = reference.WithTag(ref, tag) if err != nil { return nil, "", err } } else { ref = reference.WithDefaultTag(ref) } return ref, tag, nil } func (pr *pluginRouter) getPrivileges(ctx context.Context, w http.ResponseWriter, r *http.Request, vars map[string]string) error { if err := httputils.ParseForm(r); err != nil { return err } metaHeaders, authConfig := parseHeaders(r.Header) ref, _, err := parseRemoteRef(r.FormValue("remote")) if err != nil { return err } privileges, err := pr.backend.Privileges(ctx, ref, metaHeaders, authConfig) if err != nil { return err } return httputils.WriteJSON(w, http.StatusOK, privileges) } func (pr *pluginRouter) upgradePlugin(ctx context.Context, w http.ResponseWriter, r *http.Request, vars map[string]string) error { if err := httputils.ParseForm(r); err != nil { return errors.Wrap(err, "failed to parse form") } var privileges types.PluginPrivileges dec := json.NewDecoder(r.Body) if err := dec.Decode(&privileges); err != nil { return errors.Wrap(err, "failed to parse privileges") } if dec.More() { return errors.New("invalid privileges") } metaHeaders, authConfig := parseHeaders(r.Header) ref, tag, err := parseRemoteRef(r.FormValue("remote")) if err != nil { return err } name, err := getName(ref, tag, vars["name"]) if err != nil { return err } w.Header().Set("Docker-Plugin-Name", name) w.Header().Set("Content-Type", "application/json") output := ioutils.NewWriteFlusher(w) if err := pr.backend.Upgrade(ctx, ref, name, metaHeaders, authConfig, privileges, output); err != nil { if !output.Flushed() { return err } output.Write(streamformatter.NewJSONStreamFormatter().FormatError(err)) } return nil } func (pr *pluginRouter) pullPlugin(ctx context.Context, w http.ResponseWriter, r *http.Request, vars map[string]string) error { if err := httputils.ParseForm(r); err != nil { return errors.Wrap(err, "failed to parse form") } var privileges types.PluginPrivileges dec := json.NewDecoder(r.Body) if err := dec.Decode(&privileges); err != nil { return errors.Wrap(err, "failed to parse privileges") } if dec.More() { return errors.New("invalid privileges") } metaHeaders, authConfig := parseHeaders(r.Header) ref, tag, err := parseRemoteRef(r.FormValue("remote")) if err != nil { return err } name, err := getName(ref, tag, r.FormValue("name")) if err != nil { return err } w.Header().Set("Docker-Plugin-Name", name) w.Header().Set("Content-Type", "application/json") output := ioutils.NewWriteFlusher(w) if err := pr.backend.Pull(ctx, ref, name, metaHeaders, authConfig, privileges, output); err != nil { if !output.Flushed() { return err } output.Write(streamformatter.NewJSONStreamFormatter().FormatError(err)) } return nil } func getName(ref reference.Named, tag, name string) (string, error) { if name == "" { if _, ok := ref.(reference.Canonical); ok { trimmed := reference.TrimNamed(ref) if tag != "" { nt, err := reference.WithTag(trimmed, tag) if err != nil { return "", err } name = nt.String() } else { name = reference.WithDefaultTag(trimmed).String() } } else { name = ref.String() } } else { localRef, err := reference.ParseNamed(name) if err != nil { return "", err } if _, ok := localRef.(reference.Canonical); ok { return "", errors.New("cannot use digest in plugin tag") } if distreference.IsNameOnly(localRef) { // TODO: log change in name to out stream name = reference.WithDefaultTag(localRef).String() } } return name, nil } func (pr *pluginRouter) createPlugin(ctx context.Context, w http.ResponseWriter, r *http.Request, vars map[string]string) error { if err := httputils.ParseForm(r); err != nil { return err } options := &types.PluginCreateOptions{ RepoName: r.FormValue("name")} if err := pr.backend.CreateFromContext(ctx, r.Body, options); err != nil { return err } //TODO: send progress bar w.WriteHeader(http.StatusNoContent) return nil } func (pr *pluginRouter) enablePlugin(ctx context.Context, w http.ResponseWriter, r *http.Request, vars map[string]string) error { if err := httputils.ParseForm(r); err != nil { return err } name := vars["name"] timeout, err := strconv.Atoi(r.Form.Get("timeout")) if err != nil { return err } config := &types.PluginEnableConfig{Timeout: timeout} return pr.backend.Enable(name, config) } func (pr *pluginRouter) disablePlugin(ctx context.Context, w http.ResponseWriter, r *http.Request, vars map[string]string) error { if err := httputils.ParseForm(r); err != nil { return err } name := vars["name"] config := &types.PluginDisableConfig{ ForceDisable: httputils.BoolValue(r, "force"), } return pr.backend.Disable(name, config) } func (pr *pluginRouter) removePlugin(ctx context.Context, w http.ResponseWriter, r *http.Request, vars map[string]string) error { if err := httputils.ParseForm(r); err != nil { return err } name := vars["name"] config := &types.PluginRmConfig{ ForceRemove: httputils.BoolValue(r, "force"), } return pr.backend.Remove(name, config) } func (pr *pluginRouter) pushPlugin(ctx context.Context, w http.ResponseWriter, r *http.Request, vars map[string]string) error { if err := httputils.ParseForm(r); err != nil { return errors.Wrap(err, "failed to parse form") } metaHeaders, authConfig := parseHeaders(r.Header) w.Header().Set("Content-Type", "application/json") output := ioutils.NewWriteFlusher(w) if err := pr.backend.Push(ctx, vars["name"], metaHeaders, authConfig, output); err != nil { if !output.Flushed() { return err } output.Write(streamformatter.NewJSONStreamFormatter().FormatError(err)) } return nil } func (pr *pluginRouter) setPlugin(ctx context.Context, w http.ResponseWriter, r *http.Request, vars map[string]string) error { var args []string if err := json.NewDecoder(r.Body).Decode(&args); err != nil { return err } if err := pr.backend.Set(vars["name"], args); err != nil { return err } w.WriteHeader(http.StatusNoContent) return nil } func (pr *pluginRouter) listPlugins(ctx context.Context, w http.ResponseWriter, r *http.Request, vars map[string]string) error { l, err := pr.backend.List() if err != nil { return err } return httputils.WriteJSON(w, http.StatusOK, l) } func (pr *pluginRouter) inspectPlugin(ctx context.Context, w http.ResponseWriter, r *http.Request, vars map[string]string) error { result, err := pr.backend.Inspect(vars["name"]) if err != nil { return err } return httputils.WriteJSON(w, http.StatusOK, result) }
/** * Ovde ide opis klase * @author Nemanja */ public class TypeShape { public static int type = 0; public static final int RECTANGLE = 1; public static final int ELLIPSE = 2; public static final int TEXT = 3; }
/** * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the LICENSE * file in the root directory of this source tree. */ package com.facebook.yoga; import org.junit.Test; import static org.junit.Assert.assertEquals; public class YogaValueTest { @Test public void testEquals() { assertEquals(new YogaValue(0, YogaUnit.UNDEFINED), new YogaValue(0, YogaUnit.UNDEFINED)); assertEquals(new YogaValue(0, YogaUnit.POINT), new YogaValue(0, YogaUnit.POINT)); assertEquals(new YogaValue(0, YogaUnit.PERCENT), new YogaValue(0, YogaUnit.PERCENT)); assertEquals(new YogaValue(0, YogaUnit.UNDEFINED), new YogaValue(1, YogaUnit.UNDEFINED)); assertEquals(new YogaValue(Float.NaN, YogaUnit.POINT), new YogaValue(Float.NaN, YogaUnit.POINT)); } }
package cn.org.rookie.jeesdp.core.utils; import cn.org.rookie.jeesdp.core.entity.Tree; import org.springframework.util.ReflectionUtils; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.List; import java.util.Objects; public class CommonUtils { public static List<Tree> toTree(List<Tree> data, String rootId) { List<Tree> treeList = new ArrayList<>(); for (int size = data.size() - 1; size > -1; size--) { Tree item = data.get(size); if (rootId.equals(item.getParent())) { data.remove(size); treeList.add(item); } } for (Tree tree : treeList) { tree.setChildren(toTree(data, tree.getId())); } return treeList.size() > 0 ? treeList : null; } public static <T> List<T> toTree(List<T> data, String rootId, String idName, String parentName, String childName, Class<T> type) { List<T> result = new ArrayList<>(); Field idField = ReflectionUtils.findField(type, idName); Field parentField = ReflectionUtils.findField(type, parentName); Field childField = ReflectionUtils.findField(type, childName); Objects.requireNonNull(idField).setAccessible(true); Objects.requireNonNull(parentField).setAccessible(true); Objects.requireNonNull(childField).setAccessible(true); for (int size = data.size() - 1; size > -1; size--) { T item = data.get(size); if (rootId.equals(ReflectionUtils.getField(Objects.requireNonNull(parentField), item))) { data.remove(size); result.add(item); } } for (T item : result) { ReflectionUtils.setField(Objects.requireNonNull(childField), item, toTree(data, Objects.requireNonNull(ReflectionUtils.getField(Objects.requireNonNull(idField), item)).toString(), idName, parentName, childName, type)); } return result.size() > 0 ? result : null; } public static void main(String[] args) { Tree tree = new Tree(); tree.setId("0"); tree.setLabel("0"); tree.setParent("99"); tree.setValue("0"); Tree tree2 = new Tree(); tree2.setId("1"); tree2.setLabel("1"); tree2.setParent("0"); tree2.setValue("1"); Tree tree3 = new Tree(); tree3.setId("2"); tree3.setLabel("2"); tree3.setParent("0"); tree3.setValue("2"); Tree tree4 = new Tree(); tree4.setId("3"); tree4.setLabel("3"); tree4.setParent("1"); tree4.setValue("3"); List<Tree> treeList; treeList = new ArrayList<>(); treeList.add(tree); treeList.add(tree2); treeList.add(tree3); treeList.add(tree4); System.out.println(toTree(treeList, "99", "id", "parent", "children", Tree.class)); } }
def _process_ups(ups): if len(ups) == 0: return elif len(ups) == 1: return ups[0] elif len(ups) == 2: up = ups[0] up.update(ups[1]) return up else: raise NotImplementedError()
/** * Executes this command * @param model {@code Model} which the command should operate on. * @return the command result of executing this command * @throws CommandException if the index given is invalid */ public CommandResult execute(ItemModel model) throws CommandException { requireNonNull(model); beforeOpen = model.getVisualList().deepCopy(); VisualizeList lastShownList = model.getVisualList(); if (index.getZeroBased() >= lastShownList.size()) { throw new CommandException(Messages.MESSAGE_INVALID_ITEM_DISPLAYED_INDEX); } Item toOpen = lastShownList.get(index.getZeroBased()); return new OpenCommandResult(String.format(MESSAGE_SUCCESS, index.getOneBased()), toOpen); }
{-# LANGUAGE OverloadedStrings #-} module LogStuff where import Control.Monad.Logger (LogLevel (..), LogStr, MonadLogger, ToLogStr (..), logWithoutLoc) import Network.MQTT.Topic (Topic (..)) instance ToLogStr Topic where toLogStr = toLogStr . unTopic logAt :: (MonadLogger m, ToLogStr msg) => LogLevel -> msg -> m () logAt = logWithoutLoc "" logErr :: (MonadLogger m, ToLogStr msg) => msg -> m () logErr = logAt LevelError logInfo :: (MonadLogger m, ToLogStr msg) => msg -> m () logInfo = logAt LevelInfo logDbg :: (MonadLogger m, ToLogStr msg) => msg -> m () logDbg = logAt LevelDebug lstr :: Show a => a -> LogStr lstr = toLogStr . show deLine :: String -> String deLine = unwords . words
/// Creates an anchor node with defines/binding edge to the target and emits /// it. /// /// # Errors /// If an error occurs while writing the entry, an error is returned. pub fn emit_anchor( &mut self, anchor_vname: &VName, target_vname: &VName, byte_start: u32, byte_end: u32, ) -> Result<(), KytheError> { self.emit_node(anchor_vname, "/kythe/node/kind", b"anchor".to_vec())?; self.emit_node( anchor_vname, "/kythe/loc/start", byte_start.to_string().into_bytes().to_vec(), )?; self.emit_node(anchor_vname, "/kythe/loc/end", byte_end.to_string().into_bytes().to_vec())?; self.emit_edge(anchor_vname, target_vname, "/kythe/edge/defines/binding") }
Softwire Mesh Multicast The Internet will need to support IPv4 and IPv6 packets. Both address families and their attendent protocol suites support multicast of the single-source and any-source varieties. As part of the transition to IPv6, there will be scenarios where a backbone network running one IP address family internally (referred to as internal IP or I-IP) will provide transit services to attached client networks running another IP address family (referred to as external IP or E-IP). It is expected that the I-IP backbone will offer unicast and multicast transit services to the client E-IP networks. Softwires Mesh is a solution for supporting E-IP unicast and multicast across an I-IP backbone. This document describes the mechanisms for suppporting Internet-style multicast across a set of E-IP and I-IP networks supporting softwires mesh.
import dns from 'dns'; import _ from 'lodash'; /** * Resolve srv record and return first host+port (sorted by priority and weight) * Default protocol: http * You can pass srv record with protocol: https://service-name.local * E.g. ECS service with srv record (service discovery endpoint). */ const ResolveSrv = (srvRecord: string): Promise<string> => new Promise((resolve, reject) => { const [protocol, host] = srvRecord.split('://'); dns.resolveSrv(host || protocol, (err, addresses) => { if (err) { return reject(err); } const sortedAddresses = _.orderBy(addresses, ['priority', 'weight'], ['asc', 'desc']); const resolvedHost = sortedAddresses[0]?.name ?? ''; const resolvedPort = sortedAddresses[0]?.port ?? ''; if (!resolvedHost) { return reject('Unable to resolve srv record: empty list.'); } return resolve(`${host ? `${protocol}://` : ''}${resolvedHost}:${resolvedPort}`); }); }); export default ResolveSrv;
// Copyright 2010 Google Inc. All Rights Reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #include "supersonic/base/memory/arena.h" #include <algorithm> using std::copy; using std::max; using std::min; using std::reverse; using std::sort; using std::swap; namespace supersonic { Arena::Arena(BufferAllocator* const buffer_allocator, size_t initial_buffer_size, size_t max_buffer_size) : buffer_allocator_(buffer_allocator), max_buffer_size_(max_buffer_size), arena_footprint_(0) { CHECK_NOTNULL(AddComponent(initial_buffer_size, 0)); } Arena::Arena(size_t initial_buffer_size, size_t max_buffer_size) : buffer_allocator_(HeapBufferAllocator::Get()), max_buffer_size_(max_buffer_size), arena_footprint_(0) { CHECK_NOTNULL(AddComponent(initial_buffer_size, 0)); } void* Arena::AllocateBytes(const size_t size) { void* result = current_->AllocateBytes(size); if (result != NULL) return result; // Need to allocate more space. size_t next_component_size = min(2 * current_->size(), max_buffer_size_); // But, allocate enough, even if the request is large. In this case, // might violate the max_element_size bound. if (next_component_size < size) { next_component_size = size; } // If soft quota is exhausted we will only get the "minimal" amount of memory // we ask for. In this case if we always use "size" as minimal, we may degrade // to allocating a lot of tiny components, one for each string added to the // arena. This would be very inefficient, so let's first try something between // "size" and "next_component_size". If it fails due to hard quota being // exhausted, we'll fall back to using "size" as minimal. size_t minimal = (size + next_component_size) / 2; CHECK_LE(size, minimal); CHECK_LE(minimal, next_component_size); // Now, just make sure we can actually get the memory. Component* component = AddComponent(next_component_size, minimal); if (component == NULL) { component = AddComponent(next_component_size, size); } if (!component) return NULL; // Now, must succeed. The component has at least 'size' bytes. result = component->AllocateBytes(size); CHECK(result != NULL); return result; } Arena::Component* Arena::AddComponent(size_t requested_size, size_t minimum_size) { Buffer* buffer = buffer_allocator_->BestEffortAllocate(requested_size, minimum_size); if (buffer == NULL) return NULL; current_ = new Component(buffer); arena_.push_back(linked_ptr<Component>(current_)); arena_footprint_ += current_->size(); return current_; } void Arena::Reset() { linked_ptr<Component> last = arena_.back(); if (arena_.size() > 1) { arena_.clear(); arena_.push_back(last); current_ = last.get(); } last->Reset(); #ifndef NDEBUG // In debug mode release the last component too for (hopefully) better // detection of memory-related bugs (invalid shallow copies, etc.). arena_.clear(); CHECK_NOTNULL(AddComponent(last->size(), 0)); #endif } } // namespace supersonic
<reponame>trespasserw/MPS /* * Copyright 2003-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jetbrains.mps.generator.impl.cache; import jetbrains.mps.util.io.ModelInputStream; import jetbrains.mps.util.io.ModelOutputStream; import org.jetbrains.mps.openapi.model.SModelReference; import org.jetbrains.mps.openapi.model.SNode; import org.jetbrains.mps.openapi.model.SNodeId; import org.jetbrains.mps.openapi.persistence.PersistenceFacade; import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; /** * <NAME>, Sep 23, 2010 */ public class TransientModelWithMetainfo { private static final int END_MARKER = '$' + ('M' << 24) + ('P' << 16) + ('S' << 8); public static final String CONDITIONALS_ID = ""; private SModelReference myReference; private List<SNode> myRoots; private Map<SNodeId, SNodeId> myRootToOriginal; private Map<SNodeId, MappingsMemento> myMappingsMemento; public TransientModelWithMetainfo(SModelReference reference, List<SNode> roots) { myReference = reference; myRoots = roots; myRootToOriginal = new HashMap<>(); myMappingsMemento = new HashMap<>(); } public List<SNode> getRoots() { return myRoots; } public MappingsMemento getMappingsMemento(String originalId) { return myMappingsMemento.get(PersistenceFacade.getInstance().createNodeId(originalId)); } public MappingsMemento getMappingsMemento(SNode originalRoot, boolean create) { SNodeId key = originalRoot == null ? null : originalRoot.getNodeId(); MappingsMemento mappingsMemento = myMappingsMemento.get(key); if(mappingsMemento == null && create) { mappingsMemento = new MappingsMemento(); myMappingsMemento.put(key, mappingsMemento); } return mappingsMemento; } public void updateMappings(String originalId, MappingsMemento mappingsMemento) { myMappingsMemento.put(PersistenceFacade.getInstance().createNodeId(originalId), mappingsMemento); } public String getOriginal(SNode root) { SNodeId id = myRootToOriginal.get(root.getNodeId()); if(id == null) { return CONDITIONALS_ID; } return id.toString(); } public void setOriginal(SNodeId sNodeId, String originalId) { myRootToOriginal.put(sNodeId, originalId.equals(CONDITIONALS_ID) ? null : PersistenceFacade.getInstance().createNodeId(originalId)); } public void save(ModelOutputStream os) throws IOException { new TransientModelPersistence(myReference).saveModel(myRoots, os); saveMetainfo(os); } private void saveMetainfo(ModelOutputStream os) throws IOException { os.writeInt(myRootToOriginal.size()); for (Entry<SNodeId, SNodeId> e : myRootToOriginal.entrySet()) { os.writeNodeId(e.getKey()); os.writeNodeId(e.getValue()); } os.writeInt(myMappingsMemento.size()); for(Entry<SNodeId, MappingsMemento> e : myMappingsMemento.entrySet()) { os.writeNodeId(e.getKey()); e.getValue().save(os); } os.writeInt(END_MARKER); } private void loadMetainfo(ModelInputStream is) throws IOException { int size = is.readInt(); for (; size > 0; size--) { SNodeId key = is.readNodeId(); SNodeId value = is.readNodeId(); myRootToOriginal.put(key, value); } size = is.readInt(); for(; size > 0; size--) { SNodeId key = is.readNodeId(); MappingsMemento mappingsMemento = MappingsMemento.load(is); myMappingsMemento.put(key, mappingsMemento); } if(is.readInt() != END_MARKER) { throw new IOException("corrupted file"); } } public static TransientModelWithMetainfo load(ModelInputStream is, SModelReference modelReference) throws IOException { List<SNode> roots = new TransientModelPersistence(modelReference).loadModel(is); TransientModelWithMetainfo result = new TransientModelWithMetainfo(modelReference, roots); result.loadMetainfo(is); return result; } }
# --------------------------------------------------------------- # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved. # # This work is licensed under the NVIDIA Source Code License # for LACE. To view a copy of this license, see the LICENSE file. # --------------------------------------------------------------- import sys from typing import Any import numpy as np from scipy import linalg import torch import torch.nn as nn from prefgen.external_modules.LACE.FFHQ.metrics.img_classifier import MobileNet def compute_n_params(model, return_str=True): tot = 0 for p in model.parameters(): w = 1 for x in p.shape: w *= x tot += w if return_str: if tot >= 1e6: return '{:.1f}M'.format(tot / 1e6) else: return '{:.1f}K'.format(tot / 1e3) else: return tot class Logger(object): """ Redirect stderr to stdout, optionally print stdout to a file, and optionally force flushing on both stdout and the file. """ def __init__(self, file_name: str = None, file_mode: str = "w", should_flush: bool = True): self.file = None if file_name is not None: self.file = open(file_name, file_mode) self.should_flush = should_flush self.stdout = sys.stdout self.stderr = sys.stderr sys.stdout = self sys.stderr = self def __enter__(self) -> "Logger": return self def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None: self.close() def write(self, text: str) -> None: """Write text to stdout (and a file) and optionally flush.""" if len(text) == 0: # workaround for a bug in VSCode debugger: sys.stdout.write(''); sys.stdout.flush() => crash return if self.file is not None: self.file.write(text) self.stdout.write(text) if self.should_flush: self.flush() def flush(self) -> None: """Flush written text to both stdout and a file, if open.""" if self.file is not None: self.file.flush() self.stdout.flush() def close(self) -> None: """Flush, close possible files, and remove stdout/stderr mirroring.""" self.flush() # if using multiple loggers, prevent closing in wrong order if sys.stdout is self: sys.stdout = self.stdout if sys.stderr is self: sys.stderr = self.stderr if self.file is not None: self.file.close() def get_loss(logits, y, device='cuda', reduce=False): assert y.ndim == 1 and logits.ndim == 2, (y.ndim, logits.ndim) n_classes = logits.size(1) eff_idxes = (y != -1) logits, y = logits[eff_idxes], y[eff_idxes] if n_classes > 1: # discrete attribute y = y.long() weight = torch.tensor([1 for _ in range(n_classes)]).float().to(device) reduction = 'mean' if reduce else 'none' loss = nn.CrossEntropyLoss(reduction=reduction, weight=weight)(logits, y) else: # continuous attribute assert n_classes == 1, n_classes y = y.float() weight = torch.tensor([1 for _ in range(y.size(0))]).float().to(device) loss = torch.linalg.norm(logits - y[:, None], dim=1) ** 2 * 0.5 * weight if reduce: loss = loss.mean() assert loss.ndim == 1 if not reduce else loss.ndim == 0 return loss def get_acc(logits, y, c=None): assert y.ndim == 1 and logits.ndim == 2, (y.ndim, logits.ndim) n_classes = logits.size(1) eff_idxes = (y != -1) logits, y = logits[eff_idxes], y[eff_idxes] if n_classes > 1: # discrete attribute y = y.long() if c is None: correct = (logits.max(1)[1] == y).float() else: correct = ((logits.max(1)[1] == y) * (y == c)).float() num_c = (y == c).sum() else: # continuous attribute assert n_classes == 1, n_classes y = y.float() correct = 1 - torch.abs(logits.squeeze() - y) assert correct.ndim == 1 if n_classes > 1 and c is not None: return correct.detach(), num_c else: return correct.detach() class FFHQ_Classifier(nn.Module): def __init__(self, n_classes_list=[2]): super(FFHQ_Classifier, self).__init__() self.backbone = MobileNet() self.n_classes_list = n_classes_list self.last_dim = 1024 self.heads = nn.Linear(self.last_dim, sum(n_classes_list)) print('Initialized the ffhq image classifier!') def classify(self, x): x = self.backbone.features(x) x = x.view(x.size(0), -1) logits = self.heads(x) logits_list = list(torch.split(logits, self.n_classes_list, dim=1)) return logits_list def calc_fid(sample_mean, sample_cov, real_mean, real_cov, eps=1e-6): cov_sqrt, _ = linalg.sqrtm(sample_cov @ real_cov, disp=False) if not np.isfinite(cov_sqrt).all(): print("product of cov matrices is singular") offset = np.eye(sample_cov.shape[0]) * eps cov_sqrt = linalg.sqrtm((sample_cov + offset) @ (real_cov + offset)) if np.iscomplexobj(cov_sqrt): if not np.allclose(np.diagonal(cov_sqrt).imag, 0, atol=1e-3): m = np.max(np.abs(cov_sqrt.imag)) raise ValueError(f"Imaginary component {m}") cov_sqrt = cov_sqrt.real mean_diff = sample_mean - real_mean mean_norm = mean_diff @ mean_diff trace = np.trace(sample_cov) + np.trace(real_cov) - 2 * np.trace(cov_sqrt) fid = mean_norm + trace return fid def logical_comb(s: str, es: list): stack = [0, '+'] val = 0. s += '$' for c in s: if c.isnumeric(): val = es[int(c)] elif c in ['+', '-', '*', ')', '$']: symbol = stack.pop() if symbol == '+': stack[-1] += val elif symbol == '-': alpha = torch.clamp(0.1 / torch.abs(val.detach().clone()), min=0, max=1.) stack[-1] -= val * alpha else: tl, tr, cof = 1.0, 1.0, 20.0 # these values work well stack[-1] = torch.log(torch.exp(stack[-1] * tl) * cof + torch.exp(val * tr)) if c == ')': val = stack.pop() stack.pop() elif c in ['+', '-', '*']: stack.append(c) val = 0. elif c == '(': stack.append(c) stack.extend([0, '+']) return stack[-1] def get_z_inits(num, batch_size, latent_dim, device): z_0 = torch.FloatTensor(batch_size, latent_dim).normal_(0, 1).to(device) z_1 = torch.FloatTensor(batch_size, latent_dim).normal_(0, 1).to(device) def rescale_z(z): z_mean_norm = (torch.linalg.norm(z_0, dim=1, keepdim=True) + torch.linalg.norm(z_1, dim=1, keepdim=True)) / 2 assert z_mean_norm.shape[0] == batch_size return z / torch.linalg.norm(z, dim=1, keepdim=True) * z_mean_norm z_inits = [z_0] for i in range(1, num): z_k = z_0 + (i / num) * (z_1 - z_0) z_inits.append(rescale_z(z_k)) z_inits.append(z_1) return z_inits # heuristics from the styleflow paper: https://github.com/RameenAbdal/StyleFlow subset_dict = { 'light0': list(range(7, 12)), 'light3': list(range(7, 12)), 'smile': list(range(4, 6)), 'yaw': list(range(0, 4)), 'pitch': list(range(0, 4)), 'age': list(range(4, 8)), 'gender': list(range(0, 8)), 'glasses': list(range(0, 6)), 'bald': list(range(0, 6)), 'beard': list(range(5, 10)), } def subset_from_att_names(att_names_list): set_all = list(range(18)) if len(att_names_list) == 0: # no subset selection return [] subset_sel = [] for att_name in att_names_list: subset_sel += subset_dict.get(att_name, set_all) subset_nonsel = [i for i in set_all if i not in set(subset_sel)] return subset_nonsel
// createUploadRequest services requests to create a new upload id. It validates // the given request, and ensures that the returned upload id is unique. Upload // requests are persisted until deleted or a successful upload occurs. func (r *uploadResource) createUploadRequest(request *restful.Request, response *restful.Response, user schema.User) (interface{}, error) { if cr, err := request2IDRequest(request, user.ID); err != nil { app.Log.Debugf("request2IDRequst failed", err) return nil, err } else { session := request.Attribute("session").(*rethinkdb.Session) project := request.Attribute("project").(schema.Project) directory := request.Attribute("directory").(schema.Directory) idService := uploads.NewIDService(session) if upload, err := idService.ID(cr, &project, &directory); err != nil { app.Log.Debugf("idService.ID failed", err) return nil, err } else { startingBlock := findStartingBlock(upload.File.Blocks) resp := mcstoreapi.CreateUploadResponse{ RequestID: upload.ID, StartingBlock: startingBlock, } return &resp, nil } } }
<reponame>RichardoMrMu/gsoap-onvif #ifndef ONVIFCLIENTDEVICE_HPP #define ONVIFCLIENTDEVICE_HPP #include <iostream> #include "stdio.h" #include "gsoap/wsseapi.h" #include <openssl/rsa.h> #include "onvif/soapDeviceBindingProxy.h" #include<stdio.h> #include<string.h> #include<time.h> #define MAX_MSG_LEN 1024 typedef unsigned int uint; class OnvifClientDevice{ public: OnvifClientDevice(std::string url, std::string user, std::string password, bool showCapabilities); ~OnvifClientDevice(); virtual std::string ErrorString(); protected: bool has_media_; bool has_ptz_; std::string device_url_; std::string media_url_; std::string ptz_url_; std::string user_; std::string passwd_; struct soap *soap_; private: DeviceBindingProxy proxy_device_; }; #endif
/// Returns the children of a node pub fn children(&self, hash: Hash) -> Content<T> { let nodes = self.dag.get(&hash).map(|node| { node.children .iter() .copied() .filter_map(|child| self.dag.get(&child).map(|node| (child, node))) .collect() }); Content { nodes: nodes.unwrap_or_default(), } }
/** * Form bean for the default context page. * * @author Amy Roh * @version $Revision: 466595 $ $Date: 2006-10-21 23:24:41 +0100 (Sat, 21 Oct 2006) $ */ public final class DefaultContextForm extends ActionForm { // ----------------------------------------------------- Instance Variables /** * The administrative action represented by this form. */ private String adminAction = "Edit"; /** * The object name of the DefaultContext this bean refers to. */ private String objectName = null; /** * The object name of the parent of this DefaultContext. */ private String parentObjectName = null; /** * The object name of the loader of this DefaultContext. */ private String loaderObjectName = null; /** * The object name of the manager of this DefaultContext. */ private String managerObjectName = null; /** * The text for the node label. */ private String nodeLabel = null; /** * The value of cookies. */ private String cookies = "true"; /** * The value of cross context. */ private String crossContext = "true"; /** * The text for reloadable boolean. */ private String reloadable = "false"; /** * The text for swallowOutput boolean. */ private String swallowOutput = "false"; /** * The text for use naming boolean. */ private String useNaming = "true"; /** * The text for the loader check interval. */ private String ldrCheckInterval = "15"; /** * The text for the loader Debug level. */ private String ldrDebugLvl = "0"; /** * The text for the boolean value of loader reloadable. */ private String ldrReloadable = "false"; /** * The text for the session manager check interval. */ private String mgrCheckInterval = "60"; /** * The text for the session manager Debug level. */ private String mgrDebugLvl = "0"; /** * The text for the session mgr session ID initializer. */ private String mgrSessionIDInit = ""; /** * The text for the session mgr max active sessions. */ private String mgrMaxSessions = "0"; /** * Set of valid values for debug level. */ private List debugLvlVals = null; /* * Represent boolean (true, false) values for cookies etc. */ private List booleanVals = null; // ------------------------------------------------------------- Properties /** * Return the administrative action represented by this form. */ public String getAdminAction() { return this.adminAction; } /** * Set the administrative action represented by this form. */ public void setAdminAction(String adminAction) { this.adminAction = adminAction; } /** * Return the object name of the DefaultContext this bean refers to. */ public String getObjectName() { return this.objectName; } /** * Set the object name of the DefaultContext this bean refers to. */ public void setObjectName(String objectName) { this.objectName = objectName; } /** * Return the parent object name of the DefaultContext this bean refers to. */ public String getParentObjectName() { return this.parentObjectName; } /** * Set the parent object name of the DefaultContext this bean refers to. */ public void setParentObjectName(String parentObjectName) { this.parentObjectName = parentObjectName; } /** * Return the loader object name of the DefaultContext this bean refers to. */ public String getLoaderObjectName() { return this.loaderObjectName; } /** * Set the loader object name of the DefaultContext this bean refers to. */ public void setLoaderObjectName(String loaderObjectName) { this.loaderObjectName = loaderObjectName; } /** * Return the manager object name of the DefaultContext this bean refers to. */ public String getManagerObjectName() { return this.managerObjectName; } /** * Set the manager object name of the DefaultContext this bean refers to. */ public void setManagerObjectName(String managerObjectName) { this.managerObjectName = managerObjectName; } /** * Return the label of the node that was clicked. */ public String getNodeLabel() { return this.nodeLabel; } /** * Set the node label. */ public void setNodeLabel(String nodeLabel) { this.nodeLabel = nodeLabel; } /** * Return the debugVals. */ public List getDebugLvlVals() { return this.debugLvlVals; } /** * Set the debugVals. */ public void setDebugLvlVals(List debugLvlVals) { this.debugLvlVals = debugLvlVals; } /** * Return the booleanVals. */ public List getBooleanVals() { return this.booleanVals; } /** * Set the debugVals. */ public void setBooleanVals(List booleanVals) { this.booleanVals = booleanVals; } /** * Return the Cookies. */ public String getCookies() { return this.cookies; } /** * Set the Cookies. */ public void setCookies(String cookies) { this.cookies = cookies; } /** * Return the Cross Context. */ public String getCrossContext() { return this.crossContext; } /** * Set the Cross Context. */ public void setCrossContext(String crossContext) { this.crossContext = crossContext; } /** * Return the reloadable boolean value. */ public String getReloadable() { return this.reloadable; } /** * Set the reloadable value. */ public void setReloadable(String reloadable) { this.reloadable = reloadable; } /** * Return the swallowOutput boolean value. */ public String getSwallowOutput() { return this.swallowOutput; } /** * Set the swallowOutput value. */ public void setSwallowOutput(String swallowOutput) { this.swallowOutput = swallowOutput; } /** * Return the use naming boolean value. */ public String getUseNaming() { return this.useNaming; } /** * Set the useNaming value. */ public void setUseNaming(String useNaming) { this.useNaming = useNaming; } /** * Return the loader check interval. */ public String getLdrCheckInterval() { return this.ldrCheckInterval; } /** * Set the loader Check Interval. */ public void setLdrCheckInterval(String ldrCheckInterval) { this.ldrCheckInterval = ldrCheckInterval; } /** * Return the Loader Debug Level Text. */ public String getLdrDebugLvl() { return this.ldrDebugLvl; } /** * Set the Loader Debug Level Text. */ public void setLdrDebugLvl(String ldrDebugLvl) { this.ldrDebugLvl = ldrDebugLvl; } /** * Return the loader reloadable boolean value. */ public String getLdrReloadable() { return this.ldrReloadable; } /** * Set the loader reloadable value. */ public void setLdrReloadable(String ldrReloadable) { this.ldrReloadable = ldrReloadable; } /** * Return the session manager check interval. */ public String getMgrCheckInterval() { return this.mgrCheckInterval; } /** * Set the session manager Check Interval. */ public void setMgrCheckInterval(String mgrCheckInterval) { this.mgrCheckInterval = mgrCheckInterval; } /** * Return the session mgr Debug Level Text. */ public String getMgrDebugLvl() { return this.mgrDebugLvl; } /** * Set the session mgr Debug Level Text. */ public void setMgrDebugLvl(String mgrDebugLvl) { this.mgrDebugLvl = mgrDebugLvl; } /** * Return the session ID initializer. */ public String getMgrSessionIDInit() { return this.mgrSessionIDInit; } /** * Set the mgr Session ID Initizializer. */ public void setMgrSessionIDInit(String mgrSessionIDInit) { this.mgrSessionIDInit = mgrSessionIDInit; } /** * Return the Session mgr maximum active sessions. */ public String getMgrMaxSessions() { return this.mgrMaxSessions; } /** * Set the Session mgr maximum active sessions. */ public void setMgrMaxSessions(String mgrMaxSessions) { this.mgrMaxSessions = mgrMaxSessions; } // --------------------------------------------------------- Public Methods /** * Reset all properties to their default values. * * @param mapping The mapping used to select this instance * @param request The servlet request we are processing */ public void reset(ActionMapping mapping, HttpServletRequest request) { this.objectName = null; this.parentObjectName = null; this.loaderObjectName = null; this.managerObjectName = null; // default context properties this.cookies = "true"; this.crossContext = "true"; this.reloadable = "false"; this.swallowOutput = "false"; this.useNaming = "true"; // loader properties this.ldrCheckInterval = "15"; this.ldrDebugLvl = "0"; this.ldrReloadable = "true"; // session manager properties this.mgrCheckInterval = "60"; this.mgrDebugLvl = "0"; this.mgrSessionIDInit = "0"; this.mgrMaxSessions = "-1"; } /** * Render this object as a String. */ public String toString() { StringBuffer sb = new StringBuffer("DefaultContextForm[adminAction="); sb.append(adminAction); sb.append(",cookies="); sb.append(cookies); sb.append(",crossContext="); sb.append(crossContext); sb.append(",reloadable="); sb.append(reloadable); sb.append(",swallowOutput="); sb.append(swallowOutput); sb.append(",useNaming="); sb.append(useNaming); // loader properties sb.append(",ldrCheckInterval="); sb.append(ldrCheckInterval); sb.append(",ldrDebugLvl="); sb.append(ldrDebugLvl); sb.append(",ldrReloadable="); sb.append(ldrReloadable); // manager properties sb.append(",mgrDebugLvl="); sb.append(mgrDebugLvl); sb.append(",mgrCheckInterval="); sb.append(mgrCheckInterval); sb.append(",mgrSessionIDInit="); sb.append(mgrSessionIDInit); sb.append(",mgrMaxSessions="); sb.append(mgrMaxSessions); // object names sb.append("',objectName='"); sb.append(objectName); sb.append("',parentObjectName="); sb.append(parentObjectName); sb.append("',loaderObjectName="); sb.append(loaderObjectName); sb.append("',managerObjectName="); sb.append(managerObjectName); sb.append("]"); return (sb.toString()); } /** * Validate the properties that have been set from this HTTP request, * and return an <code>ActionErrors</code> object that encapsulates any * validation errors that have been found. If no errors are found, return * <code>null</code> or an <code>ActionErrors</code> object with no * recorded error messages. * * @param mapping The mapping used to select this instance * @param request The servlet request we are processing */ private ActionErrors errors; public ActionErrors validate(ActionMapping mapping, HttpServletRequest request) { errors = new ActionErrors(); String submit = request.getParameter("submit"); // front end validation when save is clicked. if (submit != null) { // loader properties // FIXME-- verify if these ranges are ok. numberCheck("ldrCheckInterval", ldrCheckInterval , true, 0, 10000); // session manager properties numberCheck("mgrCheckInterval", mgrCheckInterval, true, 0, 10000); numberCheck("mgrMaxSessions", mgrMaxSessions, false, -1, 100); //if ((mgrSessionIDInit == null) || (mgrSessionIDInit.length() < 1)) { // errors.add("mgrSessionIDInit", new ActionMessage("error.mgrSessionIDInit.required")); //} } return errors; } /* * Helper method to check that it is a required number and * is a valid integer within the given range. (min, max). * * @param field The field name in the form for which this error occured. * @param numText The string representation of the number. * @param rangeCheck Boolean value set to true of reange check should be performed. * * @param min The lower limit of the range * @param max The upper limit of the range * */ private void numberCheck(String field, String numText, boolean rangeCheck, int min, int max) { // Check for 'is required' if ((numText == null) || (numText.length() < 1)) { errors.add(field, new ActionMessage("error."+field+".required")); } else { // check for 'must be a number' in the 'valid range' try { int num = Integer.parseInt(numText); // perform range check only if required if (rangeCheck) { if ((num < min) || (num > max )) errors.add( field, new ActionMessage("error."+ field +".range")); } } catch (NumberFormatException e) { errors.add(field, new ActionMessage("error."+ field + ".format")); } } } }
import {ExceptionStackTraceElementDetail} from "./exception-stack-trace-element-detail.model"; import {Serializable} from "../../../infrastructure/serializable.model"; export class ExceptionDetail implements Serializable<ExceptionDetail> { exceptionClassName: string; message: string; stackTrace: Array<ExceptionStackTraceElementDetail> = []; cause: ExceptionDetail; suppressed: Array<ExceptionDetail> = []; asString: string; asDetailedString: string; deserialize(input: Object): ExceptionDetail { this.exceptionClassName = input["exceptionClassName"]; this.message = input["message"]; for (let stackTraceJson of input["stackTrace"]) { this.stackTrace.push(new ExceptionStackTraceElementDetail().deserialize(stackTraceJson)) } if (input["cause"]) { this.cause = new ExceptionDetail().deserialize(input["cause"]); } for (let suppressedJson of input["suppressed"]) { this.suppressed.push(new ExceptionDetail().deserialize(suppressedJson)) } this.asString = input["asString"]; this.asDetailedString = input["asDetailedString"]; return this; } serialize(): string { return ""; } }
A A MILL CREEK, Wash. - A bicyclist was struck and critically injured early Wednesday by a hit-and-run driver in an SUV who fled the scene without stopping, Mill Creek police said. A witness said the silver SUV sustained obvious damage to its side in the collision, and officers are looking for it. Police and medics responded to the scene, near 35th Avenue SE at Silver Crest Drive, at about 1:30 a.m. after receiving a report of a hit-and-run crash, said Ian Durkee of the Mill Creek police. The bicyclist, identified as a 53-year-old man, was found there with severe injuries. He was taken to Providence Medical Center in Everett, where he is listed in critical condition. One witness heard the crash then ran outside to see a silver SUV headed west on Silver Crest Drive. "It had some heavy side damage - we're not sure which side of the vehicle - but the witness described it looking like it had been T-boned. It would have been in a T-bone collision (with) heavy damage," Durkee said. The crash left a lot of debris on the roadway - snacks and drinks that the cyclist was likely carrying plus other items. Crime scene investigators are there gathering every single piece of evidence possible. Durkee said 35th Avenue SE was closed during the initial investigation - which threw a lot of drivers off, including one that Durkee says almost hit him. He said the car came so close to him that his flashlight hit the window and broke it. A state trooper who is at the scene helping out arrested that driver on suspicion of DUI. All streets in the area have now reopened.
# coding=utf-8 class Tree(): def __init__(self, init_field): self.root = init_field def search(self, field_id, field): global final_queen_pos #print("field_id:", field_id) #field.print_field() q_r = rest_row[field_id - n] if final_queen_pos: return None else: for q_c, cell in enumerate(field.state[q_r]): if cell == 0: if final_queen_pos: break if field_id == 7: field.queen_pos.append([q_r, q_c]) final_queen_pos = field.queen_pos break else: next_field = Field(field_id + 1, field) field.children.append(next_field) next_field.update(q_r, q_c) self.search(field_id + 1, next_field) else: continue class Field(): def __init__(self, field_id, parent=None): self.id = field_id self.parent = parent self.children = [] if field_id == 0: self.state = [[0 for _ in range(W)] for H in range(H)] self.queen_pos = [] else: self.state = [] self.queen_pos = [] for row in self.parent.state: x = row.copy() self.state.append(x) for parent_queen_pos in self.parent.queen_pos: self.queen_pos.append(parent_queen_pos) def update(self, q_r, q_c): self.queen_pos.append([q_r, q_c]) for i in range(8): self.state[q_r][i] = 1 self.state[i][q_c] = 1 summ = q_c + q_r diff = q_c - q_r for i in range(8): if (0 <= i + diff < 8): self.state[i][i + diff] = 1 if (0 <= summ - i < 8): self.state[i][summ - i] = 1 def print_field(self): #for debug print("queen_pos:", self.queen_pos) for row in self.state: print(*row) H, W = 8, 8 n = int(input()) init_field = Field(0) tree = Tree(init_field) field_list = [init_field] rest_row = [i for i in range(8)] queen_pos = [] final_queen_pos = None final_result = [["." for _ in range(8)] for _ in range(8)] for _ in range(n): r, c = map(int, input().split()) queen_pos.append([r, c]) rest_row.remove(r) if n < 8: i = 0 prev_field = init_field for q_r, q_c in queen_pos: i += 1 field = Field(i, prev_field) field.update(q_r, q_c) field_list.append(field) prev_field.children = [field] prev_field = field tree.search(i, field) for q_r, q_c in final_queen_pos: final_result[q_r][q_c] = "Q" for row in final_result: print("".join(row), end = "\n") else: for q_r, q_c in queen_pos: final_result[q_r][q_c] = "Q" for row in final_result: print("".join(row), end = "\n")
Update: Check out our completed iPhone 5 mock ups and renders! Photos of the alleged backside of Apple’s iPhone 5 leaked earlier this week on May 29th and, then, a day later, the alleged schematics for the same device also spilled out to the Web. The iPhone 5 back panels looked convincing: We saw the rumored new charging port on the bottom of the phone, as well as the relocated 3.5mm headphone jack. The schematics were even more compelling. We saw, for the first time, the alleged plans that were used to create the bodies published a day earlier. Better yet, the image was stamped with Apple’s name on the bottom. That is, by no means, a reason to believe the sketches are legitimate, but they are highly detailed and, as we discovered, match the leaked backsides perfectly. We decided to take all of the leaked information we’ve seen so far and investigate each part, piece by piece. Surprisingly, we were able to build and render an image of, what we believe to be, the iPhone 5 from all of this data. There is a caveat, of course. Someone, somewhere, could have drafted up the schematics and a manufacturer could have used those plans to build the leaked back parts we saw this week. That seems like an expensive and intricate endeavor with no true purpose, however. Worse, it would mean that every seemingly legitimate leak we’ve seen is fake. Without further ado, let’s dive in and, for the first time, take a look at what the iPhone 5 will look like, based on the leaked materials. iPhone 5 Leaked Schematic I flattened, scaled, and printed the above leaked iPhone 5 schematic and compared it with an iPhone 4S as well as an iPhone 4 schematic. I then printed out similar schematics for the iPhone 4S, which you’ll see comparisons with below. iPhone 5 Schematic Comparison If this schematic is correct, then it does show that the iPhone 5 will have a height increase of around 0.3 inches, bumping it up to around 4.8 inches from the iPhone 4S height of 4.5 inches. The screen also appears to have a diagonal increase of around 0.5 inches, bumping it up to around 4 inches from the current diagonal measurement of 3.5 inches. iPhone 5 Parts Leak This is one of the photos of some iPhone 5 parts that have been leaked. If the above leaked schematic is overlayed and compared with the leak, it appears to line up correctly: In the instance above, you can see that I’ve overlaid the schematic on top of both the screen and the body. Both match up perfectly. Taking that into consideration, I decided to build a render of what the iPhone 5 might look like in comparison to the current generation iPhone 4S. iPhone 5 Rough Mock Up Based on the schematic and measurements above, here is a scaled render of what the iPhone 5 might potentially look like: The new dimensions are clearly visible in our render above, so we decided to bring it to life. The above mockup was then printed to scale and compared with a real iPhone 4S: Contrary to arguments that the iPhone 5 will be too big if Apple decides to increase the screen size, we found that it should be rather palm friendly. Below is a photo that will roughy illustrated how the iPhone 5 will fit into your hands: iPhone 5 Full Front Mock Up The larger rumored 4.0 inch screen coupled with the rumored 1136 x 640 resolution provided an increased pixel height of 176 pixels. As rumors have suggested, this height increase appears to allow for an additional row of icons at the bottom. Or, if iOS 6 permits, perhaps it will allow room for widgets. We’ll know more about that after WWDC later this month. I also decided to take this a step further and render a picture of what the new homescreen will look like. You can see that below. Here’s the mockup of the iPhone 5’s homescreen If we take that and compare it to to the iPhone 4’s homescreen you’ll see the large amount of added pixel space. In the below image the iPhone 5 mockup has a 1136 x 640-pixel screen resolution while the iPhone 4 has its standard 960 x 640-pixel resolution homescreen. How about a better render comparison? Here is the iPhone 5 mockup in comparison to the iPhone 4. Again, you’ll note the clearly visible height increase, and the renders were made with the exact specifications taken from the leaked schematics and body parts above. The above mockups were once again printed to scale and compared with a real iPhone 4S: The above image shows the iPhone 4 in my hand. Below are photos of a cut-out version of the scaled iPhone 5 mockup: As you can see from what we’ve discovered, an iPhone 5 with a larger home screen isn’t only plausible, it’s completely possible with the dimensions that have already been spilled on the web. Those dimensions have allowed us to create a replica of the new iPhone. As a forum poster with The Verge explained recently, it’s entirely possible for Apple to launch this phone without upsetting developers. Much like when it launched the new iPad, there will be a bit of space around some applications that are not optimized for the new real estate at first. The pixel density and ratio of the graphics is the same, however; there’s just an additional 176px worth of height. There you have it folks, that’s a look at the iPhone 5. There are two sides to every story. This render either means that we’ve discovered and built an accurate render of the new iPhone 5, or that every leaked part and schematic we’ve seen so far is fake. We’re leaning towards the former. Print Your Own Life Size iPhone 5 Curious to see what our iPhone 5 render would look like in real life? Simply download our printable portrait or landscape versions of the mockup and see for yourself! The dimension of the printables are standard 8.5″ x 11″.
/// This is the main SNR calculator. @Override protected double calculateBottomStage() { double snr = 0.0; long videoCnt = 0; for (ImgQualityDataCache.videoDataContainer data : cache.cachedVideoData) { final Vector<HashMap<Integer,Double>> avgFG = data.avgFG; final Vector<Double> avgBG = data.avgBG; final Vector<Double> stdBG = data.stdBG; long noFGs = 0; double l_snr = 0.; for (int time=0; time < avgFG.size(); ++time) { if (stdBG.get(time) == 0.0) continue; for (Double fg : avgFG.get(time).values()) { l_snr += Math.abs(fg - avgBG.get(time)) / stdBG.get(time); ++noFGs; } } if (noFGs > 0) { l_snr /= (double)noFGs; log.info("SNR for video "+data.video+": "+l_snr); snr += l_snr; ++videoCnt; } else log.info("SNR for video "+data.video+": Couldn't calculate average SNR because there are missing labels."); } if (videoCnt > 0) { snr /= (double)videoCnt; log.info("SNR for dataset: "+snr); } else log.info("SNR for dataset: Couldn't calculate average SNR because there are missing labels."); return (snr); }
// consoleIndexHandler displays the console's index page. func consoleIndexHandler(w http.ResponseWriter, r *http.Request) { type install struct { AccountID int InstallationID int Type string Name string CanDisable bool State string } page := struct { Title string Email string Installs []install HasSubscription bool NewCustomer bool }{Title: "Console"} session := session.FromContext(r.Context()) if !session.LoggedIn() { http.Redirect(w, r, "/gh/login", http.StatusFound) return } user := r.Context().Value(userCtxKey{}).(*users.User) page.Email = user.Email if r.FormValue("success") != "" { page.NewCustomer = true } ghUser, _, err := user.GHClient.Users.Get(r.Context(), "") if err != nil { logger.WithError(err).Info("could not get github user, reattempting oauth flow") http.Redirect(w, r, "/gh/login", http.StatusFound) return } ghMemberships, _, err := user.GHClient.Organizations.ListOrgMemberships(r.Context(), &github.ListOrgMembershipsOptions{State: "active"}) if err != nil { logger.WithError(err).Info("could not get github list org memberships, reattempting oauth flow") http.Redirect(w, r, "/gh/login", http.StatusFound) return } ei, err := user.EnabledInstallations() if err != nil { logger.WithError(err).Error("could not get enabled installations") errorHandler(w, r, http.StatusInternalServerError, "") return } enabledInstallations := make(map[int]bool) for _, installationID := range ei { enabledInstallations[installationID] = true } accountIDs := []int{*ghUser.ID} page.Installs = append(page.Installs, install{ AccountID: *ghUser.ID, Type: "Personal", Name: *ghUser.Login, CanDisable: true, }) for _, m := range ghMemberships { accountIDs = append(accountIDs, *m.Organization.ID) install := install{ AccountID: *m.Organization.ID, Type: "Organisation", Name: *m.Organization.Login, } page.Installs = append(page.Installs, install) } gciInstalls, err := gciClient.ListInstallations(accountIDs...) if err != nil { logger.WithError(err).Error("could not list installations") errorHandler(w, r, http.StatusInternalServerError, "") return } for i := range page.Installs { page.Installs[i].State = "New" for _, gciInstall := range gciInstalls { if gciInstall.AccountID != page.Installs[i].AccountID { continue } page.Installs[i].InstallationID = gciInstall.InstallationID page.Installs[i].State = "Disabled" if _, ok := enabledInstallations[gciInstall.InstallationID]; ok { page.Installs[i].State = "Enabled" delete(enabledInstallations, gciInstall.InstallationID) } } } for installationID := range enabledInstallations { page.Installs = append(page.Installs, install{ InstallationID: installationID, Type: "Orphaned", Name: fmt.Sprintf("Unknown, Installation ID %v", installationID), State: "Enabled", }) } customer, err := user.StripeCustomer() switch { case err != nil: user.Logger.WithError(err).Error("could not get stripe customer") errorHandler(w, r, http.StatusInternalServerError, "") return case customer != nil: subs := user.StripeSubscriptions(customer) for _, sub := range subs { if sub.CancelledAt.IsZero() { page.HasSubscription = true } } } if err := templates.ExecuteTemplate(w, "console-index.tmpl", page); err != nil { logger.WithError(err).Error("error parsing console-index template") } }
def run(matrix_size=13, show_all_clusters=False, no_overlap=True, cluster_min_size=3, x_frequency=5): print("") matrix = execute_timed(make_matrix, matrix_size, x_frequency, message="Generating random " + cross(matrix_size) + " matrix containing 'X' and 'O'") clusters = execute_timed(get_clusters, matrix, cluster_min_size, message="Searching for square shaped clusters that are at least " + cross(cluster_min_size)) if no_overlap: clusters = execute_timed(filter_overlap, clusters, message="Filtering overlapping clusters") print_result(matrix, clusters, show_all_clusters)
You gotta check this video out! It’s an ode to female programmers (they exist, really!) and it’s unbelievably hilarious if you’ve ever typed a line of code before in your life. Also, the guy rapping has a smooth delivery and helps make it a good song in its own right. And lastly, the ‘coder girl’ likes to get her “code on” with a MacBook Pro, giving us a great excuse to post it for your enjoyment. Some of the lines we liked included: It feels like my first ‘hello world’, cuz, she’s my coder girl When we compile she’s easy to interpret, an across platform version I can work with she’s not wrapped in flash, all she wants is a java and a shell to bash and it always leads to an overflow.. when it’s run time, and we take it slow wanna get it in where I fit in like a plug-in [Sidenote: love the shoutout to Too $hort] and it ain’t hard to like how she writes with a pretty interface plus her source is tight she got a data stack that’s straight stuntin’ no mismatch exceptions or debuggin’ invariably, how would I pass this? well my coder girl grants me root access
def minion_mods( opts, context=None, utils=None, whitelist=None, initial_load=False, loaded_base_name=None, notify=False, static_modules=None, proxy=None, ): if not whitelist: whitelist = opts.get("whitelist_modules", None) ret = LazyLoader( _module_dirs(opts, "modules", "module"), opts, tag="module", pack={"__context__": context, "__utils__": utils, "__proxy__": proxy}, whitelist=whitelist, loaded_base_name=loaded_base_name, static_modules=static_modules, extra_module_dirs=utils.module_dirs if utils else None, ) ret.pack["__salt__"] = ret providers = opts.get("providers", False) if providers and isinstance(providers, dict): for mod in providers: try: funcs = raw_mod(opts, providers[mod], ret) except TypeError: break else: if funcs: for func in funcs: f_key = "{}{}".format(mod, func[func.rindex(".") :]) ret[f_key] = funcs[func] if notify: with salt.utils.event.get_event("minion", opts=opts, listen=False) as evt: evt.fire_event( {"complete": True}, tag=salt.defaults.events.MINION_MOD_REFRESH_COMPLETE ) return ret
<filename>pkg/cmd/pty-device-plugin/cmd.go package ptydp import ( "context" "os" "os/signal" "time" "golang.org/x/sys/unix" "google.golang.org/grpc" k8sDP "k8s.io/kubernetes/pkg/kubelet/apis/deviceplugin/v1beta1" "arhat.dev/kube-host-pty/pkg/server" "arhat.dev/kube-host-pty/pkg/util" "arhat.dev/kube-host-pty/pkg/util/log" ) const ( Name = "pty-device-plugin" ) func NewCmd() (*util.Command, error) { var configFile string opt := &Options{} optFromConfigFile := &Options{} cmd := util.DefaultCmd( Name, optFromConfigFile, nil, func(ctx context.Context, exit context.CancelFunc) error { opt.merge(optFromConfigFile) return run(ctx, exit, opt) }, ) cmd.Flags().StringVarP(&opt.KubeletSocket, "kubelet-unix-sock", "k", k8sDP.KubeletSocket, "kubelet service unix sock listening address") cmd.Flags().StringVarP(&opt.ListenSocket, "plugin-listen-unix-sock", "l", k8sDP.DevicePluginPath+"arhat.sock", "unix sock address to listen") cmd.Flags().StringVarP(&opt.PTSSocketDir, "pts-unix-sock-dir", "d", "/var/run/arhat/pts", "dir to host pts unix sockets") cmd.Flags().Uint8VarP(&opt.MaxPtyCount, "max-pty", "m", 10, "maximum pty count allowed on this host") cmd.Flags().StringVarP(&opt.Shell, "shell", "s", "sh", "default shell for pty session") cmd.Flags().StringVarP(&configFile, "config", "c", "", "set config file") return cmd, nil } func run(ctx context.Context, exit context.CancelFunc, opt *Options) error { addressField := log.String("addr", opt.ListenSocket) log.D("creating device-plugin service", addressField, log.String("api", k8sDP.Version)) srv := grpc.NewServer([]grpc.ServerOption{}...) sigCh := make(chan os.Signal, 1) signal.Notify(sigCh, os.Interrupt, unix.SIGQUIT) util.Workers.Add(func(func()) (_ interface{}, _ error) { for range sigCh { srv.GracefulStop() exit() return } return }) k8sDP.RegisterDevicePluginServer(srv, server.NewPtyDevicePluginServer(opt.Shell, opt.PTSSocketDir, opt.MaxPtyCount)) errCh := util.Workers.Add(func(sigContinue func()) (_ interface{}, err error) { log.I("ListenAndServe device-plugin", addressField) defer log.I("ListenAndServe device-plugin exited", addressField) if err = util.GRPCListenAndServe(srv, "unix", opt.ListenSocket); err != nil { log.E("ListenAndServe device-plugin failed", addressField) } return })[0].Error select { case err := <-errCh: return err default: // continue } conn, err := util.DialGRPC(ctx, "unix", opt.ListenSocket, 5*time.Second, nil) if err != nil { log.E("dial device-plugin service failed", log.Err(err)) return err } else { _ = conn.Close() } util.InitGraceUpgrade(exit, 30*time.Second, unix.SIGHUP) if err := opt.registerResource(ctx); err != nil { return err } return nil }
/* initialize a memory block allocated for the user. the start and the end * of the block is initialized with the canary characters. if 'zero' is * true, the user memory area is zero-initialized, otherwise it is also * filled with the canary character to simulate garbage in memory. */ static void debugmalloc_memory_init(DebugmallocEntry *elem, bool zero) { unsigned char *real_mem = (unsigned char *) elem->real_mem; unsigned char *user_mem = (unsigned char *) elem->user_mem; unsigned char *canary1 = real_mem; unsigned char *canary2 = real_mem + debugmalloc_canary_size + elem->size; memset(canary1, debugmalloc_canary_char, debugmalloc_canary_size); memset(canary2, debugmalloc_canary_char, debugmalloc_canary_size); memset(user_mem, zero ? 0 : debugmalloc_canary_char, elem->size); }
<gh_stars>10-100 # -*- coding: utf-8 -*- from collections import OrderedDict import inspect from symbols import * from debug_cp import * from gb_utils.greenberry_search import GreenBerrySearch L_USER = "<NAME>" # another lex would be to identify blobks first this is a side effect MATH_OPS = ["+", "-", "*", "/"] BOOLS = [S.TRUE, S.FALSE] BOOL_OPS = [S.GREATER, S.LESS] EOS = [S.NL, S.EOF] class GreenBerryPlot(object): def __init__(self): print(self, "does not have an initialiser") def linear_plot(self, dataX, labelX, dataY, labelY): """simple line plot""" try: import matplotlib.pyplot as plt plt.plot(dataX, dataY) plt.xlabel(labelX) plt.ylabel(labelY) plt.show() except ImportError: print("matplotlib unimported") def plot_handling(self, i, words, line): """ handles plotting of points """ try: comma_i = GreenBerrySearch.search_symbol(i, 1, words, S.COMMA)[1] if words[i + 1][0] != "'" and words[i + 1][0] != '"': dataX = list(map(float, words[i + 1].split("-"))) else: file_name = words[i + 1][1:-1] with open(file_name, "r") as file: dataX = list(map(float, file.read().split("-"))) if words[comma_i + 1][0] != "'" and words[comma_i + 1][0] != '"': dataY = list(map(float, words[comma_i + 1].split("-"))) else: file_name = words[comma_i + 1][1:-1] with open(file_name, "r") as file: dataY = list(map(float, file.read().split("-"))) labelX = GreenBerrySearch.search(i, 1, words, S.COMMA) labelY = GreenBerrySearch.search(comma_i, 1, words, [S.NL, S.EOF]) self.linear_plot(dataX, labelX, dataY, labelY) except: print(E.PLOT, line)
For some reason, photoshopping people’s faces onto other people’s heads is funny. I don’t know why, it just is. I was thinking about this, and it occurred to me that it should be possible to do it completely automatically, with face detection. So, instead of searching the literature to find the inevitable excellent team of researchers who had done it years ago far better than I ever could, I did what I used to do. I jumped in and started writing code. A basic solution turns out to be pretty easy; I had a working prototype within an hour. How it Works The program is only 90 lines long, and its operation is very simple. It identifies the faces in the image, picks one at random, and pastes that face onto all the other face locations, adjusting the colour and blending a little at the edges. Finding Faces To automatically detect faces in the image, I used OpenCV, which has handy python bindings and includes a nice face detection system using a Haar Feature-based Cascade Classifier. I don’t fully understand how a Haar Feature-based Cascade Classifier works, but for the purposes of this application, that’s not really required. The important thing is that it scans over the image in increasingly large windows, looking for things that look like faces. When it’s done, it returns a list of boxes which it thinks have faces in them. Blending Once you know where the faces are, swapping them is just a matter of copying the contents of one face box into the location of another. But that of course that looks terrible, because you can easily see the boundary. I did two things to deal with this: Before pasting, the colour of the image is adjusted so there’s no sharp colour change at the boundary. This is done very simply, by taking the average colour of both faces, and adjusting the brightness levels of the channels in the new face such that it has the same average colour as the place it’s about to be pasted. (Later I wrote a more complex version that used K-Means Clustering to find the skin tone and adjust based on that, but it didn’t work noticeably better than the mean-colour method, so I reverted to the simpler system). The other thing I did to hide the boundary is use an alpha mask when pasting, so the face isn’t just pasted in a square. The mask I used is very simple, just a circle with smoothly fading edges. Considering the program’s simplicity, I was surprised at the quality of the results: Simon and River don’t have their heads pointed straight at the camera, so the system pretty much fails completely for them. I think Mal is my favourite here though; to me he kind of looks like Teal'c from SG-1. Here’s the result of attempting a more challenging image, a group photo of my research group, The Intelligent Modelling and Analysis Group at UoN CS: You can click on the picture for the full size version. So, some are better than others here. One face isn’t recognised, and there are a few false positives on people’s clothing. But all in all, for a quick hack and zero human input, I think it’s not bad. Code Here’s the full code listing. It’s written for Python 2.6. To get the dependencies, run something like: sudo apt-get install python-opencv libcv-dev opencv-doc You’ll also need to download the alpha mask image and put it in the same directory, renamed as circlemask.png . I want to give credit and thanks to the various StackOverflow users and blogs whose code snippets and inspiration I used, but I sadly didn’t keep track of them, because I never thought this program would work well enough to share. So, here’s to you, unnamed internet heroes.
/** * When resuming, restart the progress/error UI if necessary by re-reporting previous values */ @Override public void onResume() { super.onResume(); mPaused = false; if (mState != STATE_START) { reportProgress(mState, mProgressException); } }
// PrivateKeyToPEM converts a private key to PEM encoded PKCS #8 data. func PrivateKeyToPEM(privateKey crypto.PrivateKey) ([]byte, error) { privateKeyBytes, err := x509.MarshalPKCS8PrivateKey(privateKey) if err != nil { return nil, err } block := &pem.Block{ Type: "PRIVATE KEY", Bytes: privateKeyBytes, } return pemEncode(block) }
Testing three proposed DNA barcodes for the wood identification of Dalbergia odorifera T. Chen and Dalbergia tonkinensis Prain Abstract Dalbergia odorifera T. Chen is a first-grade state protected plant in China. However, it is difficult to distinguish it from the closely related species Dalbergia tonkinensis Prain, which is less important in economic value, by wood anatomical features. In this study, three potential DNA barcode sequences, namely rpoC1, trnH-psbA and internal transcribed spacer (ITS), were used to differentiate wood of D. odorifera from D. tonkinensis. The average quantities of DNA extracts from twigs, sapwood and heartwood were 16.3, 11.5 and 6.0 ng mg-1, respectively. The success rates for polymerase chain reaction (PCR) amplification for three loci, namely ITS, trnH-psbA and rpoC1, were 62.5, 100 and 81.25%, respectively. The success rate for bidirectional sequencing of amplified products was 100% for all the three loci. The identification power of the three proposed DNA barcodes has been calculated by the BLAST, tree-based method and the TAXONDNA method. The interspecific differences of the trnH-psbA region were greater than intraspecific variations. Moreover, the identification power of trnH-psbA was higher than that of ITS and rpoC1 regions at the species level. Finally, the trnH-psbA region is proposed as a DNA barcode for wood identification between D. odorifera and D. tonkinensis.
// - Snapshot size must be finite, so we can't keep track of infinite child streams. Consider // this a tool for debug. If you really need guaranteed *all*, you need to build a service for it. // - 2nd return is truncated flag func (s *Store) ChildStreams() ([]eh.StreamName, bool) { s.mu.Lock() defer s.mu.Unlock() ourName := s.version.Stream() children := []eh.StreamName{} for _, chilName := range s.state.ChildStreams { children = append(children, ourName.Child(chilName)) } return children, len(children) == maxKeepTrackOfChildren }
Washington D.C.-based rapper Wale told Revolt TV that he believes the Black Lives Matter movement is misguided, emphasizing that black people should “love ourselves more” before we can expect anyone else to. “It starts with us. It’s been easy to blame other people for stuff like that and point the finger. But Black lives should matter to black” people, the Grammy Award winning rapper said. Wale’s comments came on the same day that three Black Lives Matter leaders visited the White House and met with President Obama’s senior adviser Valerie Jarrett. “I don’t really care about trying to rally up people for the wrong reasons,” the D.C. native said. “I want us to realize the power that we got within ourselves and to love ourselves more. This should really be the first objective as far as understanding the whole “black lives” thing. “I don’t think we love ourselves enough as much as we used to,” Wale concluded. Other black rappers like Kendrick Lamar have similarly dismissed Black Lives Matter. “When we don’t have respect for ourselves, how do we expect them to respect us?” Lamar said in an interview with Billboard. “It starts from within. Don’t start with just a rally, it don’t start from looting—it starts from within.” A few months ago, rap mogul Sean “Diddy” Combs said, “We as a people don’t have any respect for our own Black lives. If #BLACKLIVESMATTER SO AS A PEOPLE LETS PRACTICE WHAT WE PREACH!!!” In July, Wale was invited to the White House to perform at the Reach Higher initiative event for First Lady Michelle Obama’s 2015 Beating the Odds Summit aimed at celebrating college-bound students who overcame poverty and homelessness. An August Rasmussen poll showed that just 31 percent of black respondents said the term “black lives matter” aligns with their own beliefs.
/** * Performs a search with the parameters currently set * * @return The Solr QueryResponse * @throws IOException if there is a communication error with the server * @throws SolrServerException if there is an error on the server */ public QueryResponse search() throws IOException, SolrServerException { if (StringUtils.isEmpty(solrQuery.get("q"))) { withParam("hl.q", "*:*"); } return client.query(solrQuery); }
/** * List templates * * @see <a href="https://developers.messagebird.com/api/integrations/#list-templates">List templates</a> * @author ssk910 */ public class ExampleListTemplates { public static void main(String[] args) { if (args.length == 0) { System.out.println("Please specify your access key example : java -jar <this jar file> test_accesskey"); return; } // First create your service object final MessageBirdService wsr = new MessageBirdServiceImpl(args[0]); // Add the service to the client final MessageBirdClient messageBirdClient = new MessageBirdClient(wsr); try { System.out.println("Retrieving WhatsApp Template list"); final TemplateList templateList = messageBirdClient.listWhatsAppTemplates(); System.out.println(templateList.toString()); } catch (GeneralException | UnauthorizedException exception) { exception.printStackTrace(); } } }
/** * Tests for mbean proxy * * @author <a href="mailto:[email protected]">Juha Lindfors</a>. * @version $Revision: 57200 $ */ public class MBeanProxyTEST extends TestCase { public MBeanProxyTEST(String s) { super(s); } public void testGetWithServer() throws Exception { MBeanServer server = MBeanServerFactory.createMBeanServer(); ObjectName oname = new ObjectName("test:name=test"); server.registerMBean(new Trivial(), oname); TrivialMBean mbean = (TrivialMBean)MBeanProxy.get( TrivialMBean.class, oname, server); mbean.doOperation(); } public void testGetWithAgentID() throws Exception { MBeanServer server = MBeanServerFactory.createMBeanServer(); String agentID = AgentID.get(server); ObjectName oname = new ObjectName("test:name=test"); server.registerMBean(new Trivial(), oname); TrivialMBean mbean = (TrivialMBean)MBeanProxy.get( TrivialMBean.class, oname, agentID); mbean.doOperation(); } public void testCreateWithServer() throws Exception { MBeanServer server = MBeanServerFactory.createMBeanServer(); ObjectName oname = new ObjectName("test:test=test"); TrivialMBean mbean = (TrivialMBean)MBeanProxy.create( Trivial.class, TrivialMBean.class, oname, server); mbean.doOperation(); } public void testCreateWithAgentID() throws Exception { MBeanServer server = MBeanServerFactory.createMBeanServer(); ObjectName oname = new ObjectName("test:test=test"); String agentID = AgentID.get(server); TrivialMBean mbean = (TrivialMBean)MBeanProxy.create( Trivial.class, TrivialMBean.class, oname, agentID); mbean.doOperation(); } public void testProxyInvocations() throws Exception { MBeanServer server = MBeanServerFactory.createMBeanServer(); ObjectName oname = new ObjectName("test:name=test"); server.registerMBean(new Trivial(), oname); TrivialMBean mbean = (TrivialMBean)MBeanProxy.get( TrivialMBean.class, oname, AgentID.get(server)); mbean.doOperation(); mbean.setSomething("JBossMX"); assertEquals("JBossMX", mbean.getSomething()); } public void testProxyInvocationWithConflictingMBeanAndContextMethods() throws Exception { MBeanServer server = MBeanServerFactory.createMBeanServer(); ObjectName oname = new ObjectName("test:test=test"); server.registerMBean(new Trivial(), oname); TrivialMBean mbean = (TrivialMBean)MBeanProxy.get( TrivialMBean.class, oname, AgentID.get(server)); mbean.getMBeanServer(); assertTrue(mbean.isGMSInvoked()); } public void testContextAccess() throws Exception { MBeanServer server = MBeanServerFactory.createMBeanServer(); ObjectName oname = new ObjectName("test:test=test"); Trivial2MBean mbean = (Trivial2MBean)MBeanProxy.create( Trivial2.class, Trivial2MBean.class, oname, server ); ProxyContext ctx = (ProxyContext)mbean; ctx.getMBeanServer(); } public void testProxyInvocationBetweenServers() throws Exception { MBeanServer server1 = MBeanServerFactory.createMBeanServer(); MBeanServer server2 = MBeanServerFactory.createMBeanServer(); ObjectName oname1 = new ObjectName("test:name=target"); ObjectName oname2 = new ObjectName("test:name=proxy"); // createMBean on server1 and retrieve a proxy to it Trivial2MBean mbean = (Trivial2MBean)MBeanProxy.create( Trivial2.class, Trivial2MBean.class, oname1, server1 ); //bind the proxy to server2 server2.registerMBean(mbean, oname2); // invoke on server2 server2.invoke(oname2, "doOperation", null, null); // check that server1 received the invocation assertTrue(((Boolean)server1.getAttribute(oname1, "OperationInvoked")).booleanValue()); } public void testSimultaneousTypedAndDetypedInvocations() throws Exception { MBeanServer server = MBeanServerFactory.createMBeanServer(); ObjectName oname = new ObjectName("test:test=test"); Trivial2MBean mbean = (Trivial2MBean)MBeanProxy.create( Trivial2.class, Trivial2MBean.class, oname,server ); // typed proxy interface mbean.setSomething("Kissa"); assertTrue(mbean.getSomething().equals("Kissa")); // detyped proxy interface DynamicMBean mbean2 = (DynamicMBean)mbean; mbean2.setAttribute(new Attribute("Something", "Koira")); assertTrue(mbean2.getAttribute("Something").equals("Koira")); // direct local server invocation server.setAttribute(oname, new Attribute("Something", "Kissa")); assertTrue(server.getAttribute(oname, "Something").equals("Kissa")); // typed proxy interface invocation mbean.doOperation(); assertTrue(mbean.isOperationInvoked()); mbean.reset(); // detyped proxy invocation mbean2.invoke("doOperation", null, null); assertTrue(((Boolean)mbean2.getAttribute("OperationInvoked")).booleanValue()); mbean2.invoke("reset", null, null); // direct local server invocation server.invoke(oname, "doOperation", null, null); assertTrue(((Boolean)server.getAttribute(oname, "OperationInvoked")).booleanValue()); } public void testContextAccessToMBeanServer() throws Exception { MBeanServer server = MBeanServerFactory.createMBeanServer(); ObjectName oname = new ObjectName("test:test=test"); Trivial2MBean mbean = (Trivial2MBean)MBeanProxy.create( Trivial2.class, Trivial2MBean.class, oname, server ); // query the server this mbean is registered to ProxyContext ctx = (ProxyContext)mbean; MBeanServer srvr = ctx.getMBeanServer(); Set mbeans = srvr.queryMBeans(new ObjectName("test:*"), null); ObjectInstance oi = (ObjectInstance)mbeans.iterator().next(); assertTrue(oi.getObjectName().equals(oname)); assertTrue(srvr.getAttribute( new ObjectName("JMImplementation:type=MBeanServerDelegate"), "ImplementationName" ).equals("JBossMX")); } public void testArbitraryInterfaceWithProxy() throws Exception { MBeanServer server = MBeanServerFactory.createMBeanServer(); ObjectName oname = new ObjectName("test:test=test"); RequiredModelMBean rmm = new RequiredModelMBean(); Resource resource = new Resource(); rmm.setManagedResource(resource, "ObjectReference"); rmm.setModelMBeanInfo(resource.getMBeanInfo()); server.registerMBean(rmm, oname); MyInterface mbean = (MyInterface)MBeanProxy.get( MyInterface.class, oname, server ); mbean.setAttributeName("foo"); mbean.setAttributeName2("bar"); assertTrue(mbean.getAttributeName2().equals("bar")); assertTrue(mbean.doOperation().equals("tamppi")); } /** * This test shows how to override the default exception handling for * proxy invocations. The default handling for exceptions that are not * declared as part of the proxy typed interface (such as InstanceNotFound * or AttributeNotFound exceptions) are rethrown as unchecked * RuntimeProxyExceptions. See org.jboss.mx.proxy.DefaultExceptionHandler * for details. This behavior can be changed at runtime through the proxy * context interface setExceptionHandler() method. */ public void testCustomExceptionHandler() throws Exception { MBeanServer server = MBeanServerFactory.createMBeanServer(); ObjectName oname = new ObjectName("test:test=test"); ObjectName oname2 = new ObjectName("test:test=test2"); RequiredModelMBean rmm = new RequiredModelMBean(); Resource resource = new Resource(); rmm.setManagedResource(resource, "ObjectReference"); rmm.setModelMBeanInfo(resource.getMBeanInfo()); // create two identical mbeans server.registerMBean(rmm, oname); server.registerMBean(rmm, oname2); ProxyContext ctx = (ProxyContext)MBeanProxy.get( MyInterface.class, oname, server ); // override InstanceNotFound exception to redirect from test=test instance // to test=test2 instance ctx.setExceptionHandler(new DefaultExceptionHandler() { public Object handleInstanceNotFound(ProxyContext proxyCtx, InstanceNotFoundException e, Method m, Object[] args) throws Exception { return proxyCtx.getMBeanServer().invoke(new ObjectName("test:test=test2"), m.getName(), args, null); } }); // unregister mbean 1 server.unregisterMBean(oname); // invocation attempt to mbean1 should now redirect to mbean2 MyInterface mbean = (MyInterface)ctx; assertTrue(mbean.doOperation().equals("tamppi")); } public void testObjectToStringOnProxy() throws Exception { MBeanServer server = MBeanServerFactory.createMBeanServer(); ObjectName oname = new ObjectName("test:test=test"); RequiredModelMBean rmm = new RequiredModelMBean(); Resource resource = new Resource(); rmm.setManagedResource(resource, "ObjectReference"); rmm.setModelMBeanInfo(resource.getMBeanInfo()); server.registerMBean(rmm, oname); MyInterface mbean = (MyInterface)MBeanProxy.get( MyInterface.class, oname, server ); mbean.toString(); Object o = (Object)mbean; o.toString(); } public void testObjectToStringOverride() throws Exception { MBeanServer server = MBeanServerFactory.createMBeanServer(); ObjectName oname = new ObjectName("test:test=test"); RequiredModelMBean rmm = new RequiredModelMBean(); ResourceOverride resource = new ResourceOverride(); rmm.setManagedResource(resource, "ObjectReference"); rmm.setModelMBeanInfo(resource.getMBeanInfo()); server.registerMBean(rmm, oname); MyInterface mbean = (MyInterface)MBeanProxy.get( MyInterface.class, oname, server ); assertTrue(mbean.toString().equals("Resource")); Object o = (Object)mbean; assertTrue(o.toString().equals("Resource")); } public void testObjectHashCodeOnProxy() throws Exception { MBeanServer server = MBeanServerFactory.createMBeanServer(); ObjectName oname = new ObjectName("test:test=test"); RequiredModelMBean rmm = new RequiredModelMBean(); Resource resource = new Resource(); rmm.setManagedResource(resource, "ObjectReference"); rmm.setModelMBeanInfo(resource.getMBeanInfo()); server.registerMBean(rmm, oname); MyInterface mbean = (MyInterface)MBeanProxy.get( MyInterface.class, oname, server ); mbean.hashCode(); Object o = (Object)mbean; o.toString(); } public void testObjectHashCodeOverride() throws Exception { MBeanServer server = MBeanServerFactory.createMBeanServer(); ObjectName oname = new ObjectName("test:test=test"); RequiredModelMBean rmm = new RequiredModelMBean(); ResourceOverride resource = new ResourceOverride(); rmm.setManagedResource(resource, "ObjectReference"); rmm.setModelMBeanInfo(resource.getMBeanInfo()); server.registerMBean(rmm, oname); MyInterface mbean = (MyInterface)MBeanProxy.get( MyInterface.class, oname, server ); assertTrue(mbean.hashCode() == 10); Object o = (Object)mbean; assertTrue(o.hashCode() == 10); } public void testObjectEqualsOnProxy() throws Exception { MBeanServer server = MBeanServerFactory.createMBeanServer(); ObjectName oname = new ObjectName("test:test=test"); ModelMBean mmbean = new XMBean(); Resource resource = new Resource(); mmbean.setManagedResource(resource, "ObjectReference"); mmbean.setModelMBeanInfo(resource.getMBeanInfo()); server.registerMBean(mmbean, oname); MyInterface mbean = (MyInterface)MBeanProxy.get( MyInterface.class, oname, server ); MyInterface mbean2 = (MyInterface)MBeanProxy.get( MyInterface.class, oname, server ); assertTrue(mbean.equals(mbean)); assertTrue(!mbean.equals(mbean2)); assertTrue(!mbean2.equals(mbean)); } public void testObjectEqualsOverride() throws Exception { MBeanServer server = MBeanServerFactory.createMBeanServer(); ObjectName oname = new ObjectName("test:test=test"); ModelMBean rmm = new RequiredModelMBean(); ResourceOverride resource = new ResourceOverride("state"); rmm.setManagedResource(resource, "ObjectReference"); rmm.setModelMBeanInfo(resource.getMBeanInfo()); server.registerMBean(rmm, oname); MyInterface mbean = (MyInterface)MBeanProxy.get( MyInterface.class, oname, server ); assertTrue(mbean.equals(mbean)); } public void testAttributeNotFoundOnTypedProxy() throws Exception { MBeanServer server = MBeanServerFactory.createMBeanServer(); ObjectName oname = new ObjectName("test:test=test"); ModelMBean mmbean = new XMBean(); ResourceIncorrectInfo resource = new ResourceIncorrectInfo(); mmbean.setManagedResource(resource, "ObjectReference"); mmbean.setModelMBeanInfo(resource.getMBeanInfo()); server.registerMBean(mmbean, oname); MyInterface mbean = (MyInterface)MBeanProxy.get( MyInterface.class, oname, server ); ProxyContext ctx = (ProxyContext)mbean; ctx.setExceptionHandler(new DefaultExceptionHandler()); try { mbean.setAttributeName2("some name"); } catch (IllegalArgumentException e) { // expected // by default, if no such attribute 'AttributeName2' exists in the // MBean metadata (as is the case with ResourceIncorrectInfo), the // MBeanProxy invocation handler falls back attempting to execute // MBeanServer.invoke() which fails with IAE. } } public void testAttributeNotFoundOnDeTypedProxy() throws Exception { MBeanServer server = MBeanServerFactory.createMBeanServer(); ObjectName oname = new ObjectName("test:test=test"); ModelMBean mmbean = new XMBean(); ResourceIncorrectInfo resource = new ResourceIncorrectInfo(); mmbean.setManagedResource(resource, "ObjectReference"); mmbean.setModelMBeanInfo(resource.getMBeanInfo()); server.registerMBean(mmbean, oname); DynamicMBean mbean = (DynamicMBean)MBeanProxy.get(oname, server); ProxyContext ctx = (ProxyContext)mbean; ctx.setExceptionHandler(new DefaultExceptionHandler()); try { mbean.setAttribute(new Attribute("AttributeName2", "some name")); } catch (AttributeNotFoundException e) { // expected } } public void testInheritanceInTypedProxyArgs() throws Exception { MBeanServer server = MBeanServerFactory.createMBeanServer(); ObjectName oname = new ObjectName("test:test=test"); XMBean mmbean = new XMBean(); ExtendedResource resource = new ExtendedResource(); mmbean.setManagedResource(resource, "ObjectReference"); mmbean.setModelMBeanInfo(resource.getMBeanInfo()); server.registerMBean(mmbean, oname); MyInterface2 mbean = (MyInterface2)MBeanProxy.get( MyInterface2.class, oname, server); assertTrue(mbean.doOperation().equals("doOperation")); try { assertTrue(mbean.executeThis("executeThis").equals("executeThis")); } catch (ClassCastException e) { fail("KNOWN ISSUE: proxy generates incorrect JMX invocation " + "signature in case argument subclasses are used."); } } public void testInheritanceInProxyAttribute() throws Exception { MBeanServer server = MBeanServerFactory.createMBeanServer(); ObjectName oname = new ObjectName("test:test=test"); XMBean mmbean = new XMBean(); ExtendedResource resource = new ExtendedResource(); mmbean.setManagedResource(resource, "ObjectReference"); mmbean.setModelMBeanInfo(resource.getMBeanInfo()); server.registerMBean(mmbean, oname); MyInterface2 mbean = (MyInterface2)MBeanProxy.get( MyInterface2.class, oname, server); mbean.setAttribute3("Attribute3"); assertTrue(mbean.getAttribute3().equals("Attribute3")); } public void testInheritanceInProxyReturnType() throws Exception { MBeanServer server = MBeanServerFactory.createMBeanServer(); ObjectName oname = new ObjectName("test:test=test"); XMBean mmbean = new XMBean(); ExtendedResource resource = new ExtendedResource(); mmbean.setManagedResource(resource, "ObjectReference"); mmbean.setModelMBeanInfo(resource.getMBeanInfo()); server.registerMBean(mmbean, oname); MyInterface2 mbean = (MyInterface2)MBeanProxy.get( MyInterface2.class, oname, server); assertTrue(mbean.runMe("runMe").equals("runMe")); } }
from collections import defaultdict from math import factorial # alpha = 'abcdefghijklmnopqrstuvwxyz' def com(n,r): return factorial(n) // (factorial(n-r)*factorial(r)) N = int(input()) ans = 0 anss = defaultdict(int) for i in range(N): t = input() ts = ''.join(c for c in sorted(t)) anss[ts] += 1 # print(anss) for k in anss.keys(): if anss[k] > 1: ans += com(anss[k],2) print(ans)
<reponame>trattoria/java8 package jp.co.trattoria.chapter1_4; import java.io.File; import java.util.ArrayList; import java.util.List; /** * Fileクラスの配列を受け取って、ディレクトリ優先で名前をソートする。 */ public class FileSort { /** * ディレクトリ、ファイル混在のFileオブジェクト配列を、ディレクトリ優先で、名前の昇順でソートする * @param fileList 未ソートのFileオブジェクト配列 * @return 文字列でソートしたFileオブジェクト配列 */ File[] getFileListSort(final File[] fileList){ List<File> directorys = new ArrayList<File>(); List<File> files = new ArrayList<File>(); for(File f: fileList){ if(f.isDirectory()){ directorys.add(f); }else{ files.add(f); } } directorys.sort((File a, File b)->{ return a.compareTo(b); }); files.sort((File a, File b)->{ return a.compareTo(b); }); List<File> result = new ArrayList<File>(); result.addAll(directorys); result.addAll(files); return result.toArray(new File[0]); } }
"""Functions to plot motion fields.""" import matplotlib.pylab as plt import matplotlib.colors as colors import numpy as np def quiver(UV, ax=None, geodata=None, **kwargs): """Function to plot a motion field as arrows. Parameters ---------- UV : array-like Array of shape (2,m,n) containing the input motion field. ax : axis object Optional axis object to use for plotting. geodata : dictionary Optional dictionary containing geographical information about the field. If geodata is not None, it must contain the following key-value pairs: .. tabularcolumns:: |p{1.5cm}|L| +-----------------+----------------------------------------------------+ | Key | Value | +=================+====================================================+ | projection | PROJ.4-compatible projection definition | +-----------------+----------------------------------------------------+ | x1 | x-coordinate of the lower-left corner of the data | | | raster (meters) | +-----------------+----------------------------------------------------+ | y1 | y-coordinate of the lower-left corner of the data | | | raster (meters) | +-----------------+----------------------------------------------------+ | x2 | x-coordinate of the upper-right corner of the data | | | raster (meters) | +-----------------+----------------------------------------------------+ | y2 | y-coordinate of the upper-right corner of the data | | | raster (meters) | +-----------------+----------------------------------------------------+ | yorigin | a string specifying the location of the first | | | element in the data raster w.r.t. y-axis: | | | 'upper' = upper border, 'lower' = lower border | +-----------------+----------------------------------------------------+ Other Parameters ---------------- step : int Optional resample step to control the density of the arrows. Default : 20 color : string Optional color of the arrows. This is a synonym for the PolyCollection facecolor kwarg in matplotlib.collections. Default : black Returns ------- out : axis object Figure axes. Needed if one wants to add e.g. text inside the plot. """ if ax is None: ax = plt # defaults step = kwargs.get("step", 20) quiver_keys = ["scale", "scale_units", "width", "headwidth", "headlength", "headaxislength", "minshaft", "minlength", "pivot", "color"] kwargs_quiver = {k: kwargs[k] for k in set(quiver_keys).intersection(kwargs)} kwargs_quiver["color"] = kwargs.get("color", "black") # prepare x y coordinates if geodata is not None: x = np.linspace(geodata['x1'], geodata['x2'], UV.shape[2]) y = np.linspace(geodata['y1'], geodata['y2'], UV.shape[1]) else: x = np.arange(UV.shape[2]) y = np.arange(UV.shape[1]) # reduce number of vectors to plot UV_ = UV[:, 0:UV.shape[1]:step, 0:UV.shape[2]:step] y_ = y[0:UV.shape[1]:step] x_ = x[0:UV.shape[2]:step] ax.quiver(x_, np.flipud(y_), UV_[0,:,:], -UV_[1,:,:], angles='xy', zorder=1e6, **kwargs_quiver) axes = plt.gca() if ax == plt else ax if geodata is None: axes.xaxis.set_ticklabels([]) axes.yaxis.set_ticklabels([]) return axes def streamplot(UV, ax=None, geodata=None, **kwargs): """Function to plot a motion field as streamlines. Parameters ---------- UV : array-like Array of shape (2, m,n) containing the input motion field. ax : axis object Optional axis object to use for plotting. geodata : dictionary Optional dictionary containing geographical information about the field. If geodata is not None, it must contain the following key-value pairs: .. tabularcolumns:: |p{1.5cm}|L| +-----------------+----------------------------------------------------+ | Key | Value | +=================+====================================================+ | projection | PROJ.4-compatible projection definition | +-----------------+----------------------------------------------------+ | x1 | x-coordinate of the lower-left corner of the data | | | raster (meters) | +-----------------+----------------------------------------------------+ | y1 | y-coordinate of the lower-left corner of the data | | | raster (meters) | +-----------------+----------------------------------------------------+ | x2 | x-coordinate of the upper-right corner of the data | | | raster (meters) | +-----------------+----------------------------------------------------+ | y2 | y-coordinate of the upper-right corner of the data | | | raster (meters) | +-----------------+----------------------------------------------------+ | yorigin | a string specifying the location of the first | | | element in the data raster w.r.t. y-axis: | | | 'upper' = upper border, 'lower' = lower border | +-----------------+----------------------------------------------------+ Other Parameters ---------------- density : float Controls the closeness of streamlines. Default : 1.5 color : string Optional streamline color. This is a synonym for the PolyCollection facecolor kwarg in matplotlib.collections. Default : black Returns ------- out : axis object Figure axes. Needed if one wants to add e.g. text inside the plot. """ if ax is None: ax = plt # defaults density = kwargs.get("density", 1.5) color = kwargs.get("color", "black") # prepare x y coordinates if geodata is not None: x = np.linspace(geodata['x1'], geodata['x2'], UV.shape[2]) y = np.linspace(geodata['y1'], geodata['y2'], UV.shape[1]) else: x = np.arange(UV.shape[2]) y = np.arange(UV.shape[1],0,-1) ax.streamplot(x, np.flipud(y), UV[0,:,:], -UV[1,:,:], density=density, color=color, zorder=1e6) axes = plt.gca() if ax == plt else ax if geodata is None: axes.xaxis.set_ticklabels([]) axes.yaxis.set_ticklabels([]) return axes
from sys import stdin N = int(stdin.readline()) for case in range(N): num, maxsum = map(int, stdin.readline().split()) power = 0 sol = 0 ds = 0 idx = 0 for i in str(num): ds += int(i) for count in range(18): if ds <= maxsum: break digit = int(str(num)[-1 - power]) if digit == 0: power += 1 continue ds -= digit sol += 10**power * ((10-digit)% 10) num += 10**power * ((10-digit)% 10) power += 1 while(int(str(num)[-1 - power]) == 0): power += 1 ds -= 9 ds += 1 print(sol)
// ValidateLocale checks if the provided locale is one of Yelp's supported locales. func ValidateLocale(locale string) error { if _, ok := validLocales[locale]; !ok { return fmt.Errorf("Invalid locale provided: %s", locale) } return nil }
<filename>src/app/pages/infosUtiles/barrage/barrage-routing.module.ts<gh_stars>0 import { RouterModule, Routes } from '@angular/router'; import { NgModule } from '@angular/core'; import { BarrageComponent } from './barrage.component'; import { InfosDashboardComponent } from '../infos-dashboard/infos-dashboard.component'; import { MarcheComponent } from '../marche/marche.component'; import { PhytoComponent } from '../phyto/phyto.component'; const routes: Routes = [{ path: '', children: [ { path: 'Dashboard', component: InfosDashboardComponent, }, { path: 'Barrage', component: BarrageComponent, }, { path: 'Marche', component: MarcheComponent, }, { path: 'Phyto', component: PhytoComponent, }, ], }]; @NgModule({ imports: [RouterModule.forChild(routes)], exports: [RouterModule], }) export class BarrageRoutingModule { }
/** * Test parsing output for a test run that produces INSTRUMENTATION_RESULT output. * <p/> * This mimics launch performance test output. */ public void testParse_instrumentationResults() { StringBuilder output = new StringBuilder(); addResultKey(output, "other_pss", "2390"); addResultKey(output, "java_allocated", "2539"); addResultKey(output, "foo", "bar"); addResultKey(output, "stream", "should not be captured"); addLine(output, "INSTRUMENTATION_CODE: -1"); Capture<Map<String, String>> captureMetrics = new Capture<Map<String, String>>(); mMockListener.testRunStarted(RUN_NAME, 0); mMockListener.testRunEnded(EasyMock.anyLong(), EasyMock.capture(captureMetrics)); injectAndVerifyTestString(output.toString()); assertEquals("2390", captureMetrics.getValue().get("other_pss")); assertEquals("2539", captureMetrics.getValue().get("java_allocated")); assertEquals("bar", captureMetrics.getValue().get("foo")); assertEquals(3, captureMetrics.getValue().size()); }
<filename>src/sagas/handleBatch.ts import { Action, BatchCall } from "../types"; import { all, call, put, select } from "redux-saga/effects"; import { startLoading } from "../actions/loading"; import { batchFinished } from "../actions/batchFinished"; import { handleCall } from "./handleCall"; export function* handleBatch({ payload: { requests, onFinished } }: Action<BatchCall>) { yield all(requests.map(action => call(handleCall, action))); if (onFinished) { yield all( typeof onFinished === "function" ? onFinished(yield select()) : onFinished ); } yield put(batchFinished()); }
/* Calculate X * Y exactly and store the result in *HI + *LO. It is given that the values are small enough that no overflow occurs and large enough (or zero) that no underflow occurs. */ static void mul_split (double *hi, double *lo, double x, double y) { #ifdef __FP_FAST_FMA *hi = x * y; *lo = __builtin_fma (x, y, -*hi); #elif defined FP_FAST_FMA *hi = x * y; *lo = __fma (x, y, -*hi); #else *hi = x * y; # define C ((1 << (DBL_MANT_DIG + 1) / 2) + 1) double x1 = x * C; double y1 = y * C; # undef C x1 = (x - x1) + x1; y1 = (y - y1) + y1; double x2 = x - x1; double y2 = y - y1; *lo = (((x1 * y1 - *hi) + x1 * y2) + x2 * y1) + x2 * y2; #endif }
Assembly of Binary Ternary and Quaternary Nanorods : From Local to Device Scale Ordering Influenced by Surface Charge In this article we outline the assembly of binary, ternary and quaternary nanorods using three separate protocols. The rods are important photoabsorbers CdS, CdSexS1-x, CuInxGa1-xS, and Cu2ZnSnS4. In the droplet based assembly, we form either 1D rail-tracks or 2D discs of aligned rods depending on the net charge. In the vial based approach, we demonstrate crystallisation of rods into perfectly faceted hexagons. Finally we use electrophoretic deposition to assemble the charged rods directly at the substrate interface to obtain thin-films consisting of 50 or more nanorod layers obtained with complete orientational order. Page 10 of 10 CrystEngComm C ry st E ng C om m A cc ep te d M an us cr ip t
export const dummy = 0;
Factors Contributing to the Reduction of Pain during Electromyography and Nerve Conduction Studies. BACKGROUND Electromyography (EMG) and nerve conduction studies (NCS) are an unpleasant and sometimes painful examinations. Pain can reduce patient's compliance and have a negative effect on the examination results. Different studies report that music affects pain perception by acting as a distractor, by inducing positive emotional valence or through the concept of convergence of different sensory modalities. The aim of this study was to explore the effect of music and different environmental and sociodemographic factors on pain perception during EMG and NCS. SUBJECTS AND METHODS Sixty patients with suspected neuromuscular disease were randomized into music and control group. Specific questionnaire assessed sociodemographic characteristics, medical history, examination waiting time, examination extent and biometeorological forecast. The numerical rating scale was used for the evaluation of pain. The examiner evaluated patient's compliance after the examination. RESULTS NCS was less painful for patients in the music group (p=0.03), as well as for more cooperative patients (p=0.011). For patients who previously underwent EMG/NCS, present NCS was more painful (p=0.001), regardless of the music intervention (p=0.019). EMG was more painful for older patients (p=0.041). Patients with lower level of education reported lower pain during NCS (p=0.026). Gender, financial satisfaction, biometeorological forecast, diabetes, depression or malignant disease, use and dosing of analgesics or antidepressants, symptoms, examination waiting time and the examination extent had no effect on pain perception. CONCLUSIONS Music significantly decreased the perception of pain associated with NCS, but not the EMG portion of the examination. During EMG pain level was not significantly reduced, but the median of pain was still lower. Generally, the pain level during NCS, unlike the one during EMG, was affected by patients' compliance, level of education and painful predetermination. We propose using music during EMG/NCS because it can make the examination more comfortable for the patient and thus contribute to better quality of this examination.
<reponame>naritotakizawa/django-easy-uploader<filename>easy_uploader/admin.py from django.contrib import admin from .models import File, Category admin.site.register(File) admin.site.register(Category)
/** * Created by mike on 2/7/15. */ public class Utils { public static void displayErrorDialog(String message, String title, Context context) { AlertDialog.Builder builder = new AlertDialog.Builder(context); builder.setMessage(message) .setTitle(title) .setPositiveButton(android.R.string.ok, null); AlertDialog dialog = builder.create(); dialog.show(); } public static void displayChoiceDialog(Context context, int items, DialogInterface.OnClickListener listener){ AlertDialog.Builder builder = new AlertDialog.Builder(context); builder.setItems(items, listener); AlertDialog dialog = builder.create(); dialog.show(); } public static void switchActivity(Context fromContext, Class toClass, Boolean clearHistory, Uri extraData){ Intent intent = new Intent(fromContext, toClass); if(clearHistory) { intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK); } intent.setData(extraData); fromContext.startActivity(intent); } public static void switchActivity(Context fromContext, Class toClass, Uri extraData, Object extra){ switchActivity(fromContext, toClass, false, extraData); } public static void switchActivity(Context fromContext, Class toClass, Boolean clearHistory){ Intent intent = new Intent(fromContext, toClass); if(clearHistory) { intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK); } fromContext.startActivity(intent); } public static void switchActivity(Context fromContext, Class toClass){ //Clear the history by default switchActivity(fromContext, toClass, true); } //General error toast public static void showErrorToast(Context context){ showToast(context, R.string.general_error); } public static void showToast(Context context, int strResourceId){ Toast.makeText(context, context.getString(strResourceId), Toast.LENGTH_LONG).show(); } public static void registerUserInstall(ParseUser user){ ParseInstallation installation = ParseInstallation.getCurrentInstallation(); installation.put(ParseConstants.KEY_USER_ID, user.getObjectId()); installation.saveInBackground(); } }
/// \brief Create output filename based on ArgValue, which could either be a /// full filename, filename without extension, or a directory. If ArgValue /// does not provide a filename, then use BaseName, and use the extension /// suitable for FileType. static const char *MakeCLOutputFilename(const ArgList &Args, StringRef ArgValue, StringRef BaseName, types::ID FileType) { SmallString<128> Filename = ArgValue; if (ArgValue.empty()) { Filename = BaseName; } else if (llvm::sys::path::is_separator(Filename.back())) { llvm::sys::path::append(Filename, BaseName); } if (!llvm::sys::path::has_extension(ArgValue)) { const char *Extension = types::getTypeTempSuffix(FileType, true); if (FileType == types::TY_Image && Args.hasArg(options::OPT__SLASH_LD, options::OPT__SLASH_LDd)) { Extension = "dll"; } llvm::sys::path::replace_extension(Filename, Extension); } return Args.MakeArgString(Filename.c_str()); }
def create_index_document(self, data, version, next_version): return { u'data': self.create_data(data), u'meta': self.create_metadata(version, next_version), }
class Graph(): #non-directed def __init__(self,n,edge): self.n = n self.graph = [[] for _ in range(n)] self.deg = [0 for _ in range(n)] for e in edge: self.graph[e[0]-1].append(e[1]-1) self.graph[e[1]-1].append(e[0]-1) self.deg[e[0]-1] += 1 self.deg[e[1]-1] += 1 import sys input = sys.stdin.readline N, M = map(int, input().split()) H = list(map(int, input().split())) E = [tuple(map(int, input().split())) for _ in range(M)] G = Graph(N, E) res = 0 for node in range(N): flag = True for adj in G.graph[node]: if H[adj] >= H[node]: flag = False break res += flag print(res)
<filename>backend/registration_adapter_api/swagger_gen/restapi/operations/get_beneficiaries_responses.go<gh_stars>100-1000 // Code generated by go-swagger; DO NOT EDIT. package operations // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command import ( "net/http" "github.com/go-openapi/runtime" ) // GetBeneficiariesOKCode is the HTTP code returned for type GetBeneficiariesOK const GetBeneficiariesOKCode int = 200 /*GetBeneficiariesOK OK swagger:response getBeneficiariesOK */ type GetBeneficiariesOK struct { /* In: Body */ Payload []*GetBeneficiariesOKBodyItems0 `json:"body,omitempty"` } // NewGetBeneficiariesOK creates GetBeneficiariesOK with default headers values func NewGetBeneficiariesOK() *GetBeneficiariesOK { return &GetBeneficiariesOK{} } // WithPayload adds the payload to the get beneficiaries o k response func (o *GetBeneficiariesOK) WithPayload(payload []*GetBeneficiariesOKBodyItems0) *GetBeneficiariesOK { o.Payload = payload return o } // SetPayload sets the payload to the get beneficiaries o k response func (o *GetBeneficiariesOK) SetPayload(payload []*GetBeneficiariesOKBodyItems0) { o.Payload = payload } // WriteResponse to the client func (o *GetBeneficiariesOK) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) { rw.WriteHeader(200) payload := o.Payload if payload == nil { // return empty array payload = make([]*GetBeneficiariesOKBodyItems0, 0, 50) } if err := producer.Produce(rw, payload); err != nil { panic(err) // let the recovery middleware deal with this } } // GetBeneficiariesBadRequestCode is the HTTP code returned for type GetBeneficiariesBadRequest const GetBeneficiariesBadRequestCode int = 400 /*GetBeneficiariesBadRequest Bad Request swagger:response getBeneficiariesBadRequest */ type GetBeneficiariesBadRequest struct { /* In: Body */ Payload interface{} `json:"body,omitempty"` } // NewGetBeneficiariesBadRequest creates GetBeneficiariesBadRequest with default headers values func NewGetBeneficiariesBadRequest() *GetBeneficiariesBadRequest { return &GetBeneficiariesBadRequest{} } // WithPayload adds the payload to the get beneficiaries bad request response func (o *GetBeneficiariesBadRequest) WithPayload(payload interface{}) *GetBeneficiariesBadRequest { o.Payload = payload return o } // SetPayload sets the payload to the get beneficiaries bad request response func (o *GetBeneficiariesBadRequest) SetPayload(payload interface{}) { o.Payload = payload } // WriteResponse to the client func (o *GetBeneficiariesBadRequest) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) { rw.WriteHeader(400) payload := o.Payload if err := producer.Produce(rw, payload); err != nil { panic(err) // let the recovery middleware deal with this } } // GetBeneficiariesUnauthorizedCode is the HTTP code returned for type GetBeneficiariesUnauthorized const GetBeneficiariesUnauthorizedCode int = 401 /*GetBeneficiariesUnauthorized Unauthorized swagger:response getBeneficiariesUnauthorized */ type GetBeneficiariesUnauthorized struct { /* In: Body */ Payload interface{} `json:"body,omitempty"` } // NewGetBeneficiariesUnauthorized creates GetBeneficiariesUnauthorized with default headers values func NewGetBeneficiariesUnauthorized() *GetBeneficiariesUnauthorized { return &GetBeneficiariesUnauthorized{} } // WithPayload adds the payload to the get beneficiaries unauthorized response func (o *GetBeneficiariesUnauthorized) WithPayload(payload interface{}) *GetBeneficiariesUnauthorized { o.Payload = payload return o } // SetPayload sets the payload to the get beneficiaries unauthorized response func (o *GetBeneficiariesUnauthorized) SetPayload(payload interface{}) { o.Payload = payload } // WriteResponse to the client func (o *GetBeneficiariesUnauthorized) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) { rw.WriteHeader(401) payload := o.Payload if err := producer.Produce(rw, payload); err != nil { panic(err) // let the recovery middleware deal with this } }
/* * Translates CMIS permission to a set of JCR privileges. * * @param cmisPermission * @return */ public static String[] jcrPermissions( String cmisPermission ) { switch (cmisPermission) { case "cmis:read" : return new String[] { Privilege.JCR_READ, Privilege.JCR_READ_ACCESS_CONTROL }; case "cmis:write" : return new String[] { Privilege.JCR_WRITE, Privilege.JCR_READ_ACCESS_CONTROL, Privilege.JCR_MODIFY_ACCESS_CONTROL, Privilege.JCR_NODE_TYPE_MANAGEMENT, Privilege.JCR_RETENTION_MANAGEMENT, Privilege.JCR_VERSION_MANAGEMENT }; case "cmis:all" : return new String[] {Privilege.JCR_ALL}; default : return null; } }
package parser import ( "fmt" "regexp" "sort" "strconv" "strings" "github.com/smartystreets/goconvey/web/server/contract" ) var ( testNamePattern = regexp.MustCompile("^=== RUN:? +(.+)$") ) func ParsePackageResults(result *contract.PackageResult, rawOutput string) { newOutputParser(result, rawOutput).parse() } type outputParser struct { raw string lines []string result *contract.PackageResult tests []*contract.TestResult // place holders for loops line string test *contract.TestResult testMap map[string]*contract.TestResult } func newOutputParser(result *contract.PackageResult, rawOutput string) *outputParser { self := new(outputParser) self.raw = strings.TrimSpace(rawOutput) self.lines = strings.Split(self.raw, "\n") self.result = result self.tests = []*contract.TestResult{} self.testMap = make(map[string]*contract.TestResult) return self } func (self *outputParser) parse() { self.separateTestFunctionsAndMetadata() self.parseEachTestFunction() } func (self *outputParser) separateTestFunctionsAndMetadata() { for _, self.line = range self.lines { if self.processNonTestOutput() { break } // Hack for results from ginkgo tests lines := strings.Split(self.line, " --- ") if len(lines) == 2 && len(strings.TrimSpace(lines[0])) > 0 && strings.HasPrefix(lines[1], "PASS") { self.line = lines[0] self.processTestOutput() self.line = "--- " + lines[1] self.processTestOutput() continue } self.processTestOutput() } } func (self *outputParser) processNonTestOutput() bool { if noGoFiles(self.line) { self.recordFinalOutcome(contract.NoGoFiles) } else if buildFailed(self.line) { self.recordFinalOutcome(contract.BuildFailure) } else if noTestFiles(self.line) { self.recordFinalOutcome(contract.NoTestFiles) } else if noTestFunctions(self.line) { self.recordFinalOutcome(contract.NoTestFunctions) } else { return false } return true } func (self *outputParser) recordFinalOutcome(outcome string) { self.result.Outcome = outcome self.result.BuildOutput = strings.Join(self.lines, "\n") } func (self *outputParser) processTestOutput() { self.line = strings.TrimSpace(self.line) if isNewTest(self.line) { self.registerTestFunction() } else if isTestResult(self.line) { self.recordTestMetadata() } else if isPackageReport(self.line) { self.recordPackageMetadata() } else { self.saveLineForParsingLater() } } func (self *outputParser) registerTestFunction() { testNameReg := testNamePattern.FindStringSubmatch(self.line) if len(testNameReg) < 2 { // Test-related lines that aren't about a new test return } self.test = contract.NewTestResult(testNameReg[1]) self.tests = append(self.tests, self.test) self.testMap[self.test.TestName] = self.test } func (self *outputParser) recordTestMetadata() { testName := strings.Split(self.line, " ")[2] if test, ok := self.testMap[testName]; ok { self.test = test self.test.Passed = !strings.HasPrefix(self.line, "--- FAIL: ") self.test.Skipped = strings.HasPrefix(self.line, "--- SKIP: ") self.test.Elapsed = parseTestFunctionDuration(self.line) } } func (self *outputParser) recordPackageMetadata() { if packageFailed(self.line) { self.recordTestingOutcome(contract.Failed) } else if packagePassed(self.line) { self.recordTestingOutcome(contract.Passed) } else if isCoverageSummary(self.line) { self.recordCoverageSummary(self.line) } } func (self *outputParser) recordTestingOutcome(outcome string) { self.result.Outcome = outcome fields := strings.Split(self.line, "\t") self.result.PackageName = strings.TrimSpace(fields[1]) self.result.Elapsed = parseDurationInSeconds(fields[2], 3) } func (self *outputParser) recordCoverageSummary(summary string) { start := len("coverage: ") end := strings.Index(summary, "%") value := summary[start:end] parsed, err := strconv.ParseFloat(value, 64) if err != nil { self.result.Coverage = -1 } else { self.result.Coverage = parsed } } func (self *outputParser) saveLineForParsingLater() { self.line = strings.TrimLeft(self.line, "\t") if self.test == nil { fmt.Println("Potential error parsing output of", self.result.PackageName, "; couldn't handle this stray line:", self.line) return } self.test.RawLines = append(self.test.RawLines, self.line) } // TestResults is a collection of TestResults that implements sort.Interface. type TestResults []contract.TestResult func (r TestResults) Len() int { return len(r) } // Less compares TestResults on TestName func (r TestResults) Less(i, j int) bool { return r[i].TestName < r[j].TestName } func (r TestResults) Swap(i, j int) { r[i], r[j] = r[j], r[i] } func (self *outputParser) parseEachTestFunction() { for _, self.test = range self.tests { self.test = parseTestOutput(self.test) if self.test.Error != "" { self.result.Outcome = contract.Panicked } self.test.RawLines = []string{} self.result.TestResults = append(self.result.TestResults, *self.test) } sort.Sort(TestResults(self.result.TestResults)) }
import { createSlice, PayloadAction } from "@reduxjs/toolkit"; const LEFT_BOX_WIDTH = 240; const RIGHT_BOX_WIDTH = 400; export interface ExplorerState { leftBoxWidth: number; rightBoxWidth: number; } const initialState: ExplorerState = { leftBoxWidth: LEFT_BOX_WIDTH, rightBoxWidth: RIGHT_BOX_WIDTH, }; const explorer = createSlice({ name: "explorer", initialState: initialState, reducers: { setLeftBoxWidth: (state: ExplorerState, action: PayloadAction<number>) => { state.leftBoxWidth = action.payload; }, setRightBoxWidth: (state: ExplorerState, action: PayloadAction<number>) => { state.rightBoxWidth = action.payload; }, }, }); export const { setLeftBoxWidth, setRightBoxWidth } = explorer.actions; export default explorer.reducer;
def send_message(self, message, channel: str): if not self.connected: raise IOError("Client has disconnected") message_uuid = uuid.uuid4() serialized_message = { "header": { "channel": channel, "pubtime": str(message.pubtime), "acqtime": str(message.acqtime), "proto": str(_unsigned_to_signed(message.proto.schema.node.id)), "uuid": { "lower": (message_uuid.int >> 0) & ((1 << 64) - 1), "upper": (message_uuid.int >> 64) & ((1 << 64) - 1), } }, "payload": _proto_to_dict(message.proto) } self._outgoing_text += json.dumps(serialized_message) + "\n"
Hypermetabolism in motor neurone disease is associated with a greater functional decline but not weight loss Objectives Motor Neurone Disease (MND) is fatal neurological disease. Hypermetabolism (increased resting energy expenditure (REE)) and loss of body weight occur in MND. We aimed to determine whether hypermetabolism correlates with disease severity and weight loss. Methods Patients (n=44) and control participants (n=45) presented at the MND clinic at the CCR following an overnight fast. Body composition was assessed using air displacement plethysmography (BodPod; COSMED). REE was assessed by indirect calorimetry (Quark RMR, COSMED) and a metabolic index (MI;% change in measured REE versus predicted REE) determined. Hypermetabolism was defined as MI>120%. Disease severity was assessed using the Amyotrophic Lateral Sclerosis Functional Rating scale (ALSFRS-R) and respiratory function tests (RFTs). A subset of MND patients (n=30) returned for a repeat assessment ~4 months later. Results Hypermetabolism was more prevalent in MND (41%) when compared with control (18%) participants. In longitudinal analyses, hypermetabolism was not associated with a change in RFTs (including forced vital capacity, p=0.91 and sniff nasal pressure, p=0.40), loss of body weight (p=0.79) or a reduction in% fat mass (p=0.78). Strikingly, hypermetabolic individuals experienced a greater decline in ALSFRS-R over the sampling interval (p<0.01). Conclusions We confirm a greater prevalence of hypermetabolism in MND when compared with matched controls. Hypermetabolism in MND patients is associated with a greater functional decline but not a change in RFTs or weight loss. While challenging the notions that hypermetabolism is a consequence of respiratory dysfunction and that it contributes to weight loss, our emerging insights suggest that hypermetabolism contributes to disease progression. Thus, hypermetabolism could have major implications for quality of life and possibly disease survival. Ongoing studies aim to clarify the cause for and consequence of hypermetabolism in MND.
/** * Registers the given HistoryRecords into global variable under the given instance name, * in order to be accessed by instance of {@link FlinkDatabaseHistory}. */ public static void registerHistoryRecords(String instanceName, ConcurrentLinkedQueue<HistoryRecord> historyRecords) { synchronized (FlinkDatabaseHistory.ALL_RECORDS) { FlinkDatabaseHistory.ALL_RECORDS.put(instanceName, historyRecords); } }
import * as React from 'react' import {List} from 'antd' import PostItem from './PostItem' import {Post} from '../../@types/data' import Button from 'antd/lib/button/button' import {lang} from '../../constants/lang' interface Props { lists: Post[] } class PostsList extends React.Component<Props> { render() { const {lists} = this.props return ( <List itemLayout="vertical" size="large" locale={{emptyText: lang.no_content}} split={false} dataSource={lists} footer={<div style={{textAlign: 'right'}}><Button type="primary" disabled>发言</Button></div>} renderItem={(post: Post) => <PostItem post={post}/>} /> ) } } export default PostsList
/** * /** Test TIMER_FIRED event for timer start bpmn event. */ @Deployment public void testTimerFiredForTimerStart() throws Exception { Calendar tomorrow=Calendar.getInstance(); tomorrow.add(Calendar.DAY_OF_YEAR,1); processEngineConfiguration.getClock().setCurrentTime(tomorrow.getTime()); waitForJobExecutorToProcessAllJobs(2000,100); assertEquals(6,listener.getEventsReceived().size()); assertEquals(ActivitiEventType.ENTITY_CREATED,listener.getEventsReceived().get(0).getType()); assertEquals(ActivitiEventType.ENTITY_INITIALIZED,listener.getEventsReceived().get(1).getType()); assertEquals(ActivitiEventType.ENTITY_DELETED,listener.getEventsReceived().get(2).getType()); assertEquals(ActivitiEventType.TIMER_FIRED,listener.getEventsReceived().get(3).getType()); assertEquals(ActivitiEventType.JOB_EXECUTION_SUCCESS,listener.getEventsReceived().get(5).getType()); checkEventCount(0,ActivitiEventType.JOB_CANCELED); }
/** * This class represents a storage facility for DID objects and private keys. * * The DIDStore manages different types of entries: * - RootIdentity * - DIDDocument * - VerifiableCredential * - PrivateKey */ public final class DIDStore { /** * The type string for DIDStore. */ protected static final String DID_STORE_TYPE = "did:elastos:store"; /** * Current DIDStore version. */ protected static final int DID_STORE_VERSION = 3; private static final int CACHE_INITIAL_CAPACITY = 16; private static final int CACHE_MAX_CAPACITY = 128; private static final Object NULL = new Object(); private static final String DID_EXPORT = "did.elastos.export/2.0"; private static final String DID_LAZY_PRIVATEKEY = "lazy-private-key"; private Cache<Key, Object> cache; private DIDStorage storage; private Metadata metadata; /** * the default conflict handle implementation. */ protected static final ConflictHandle defaultConflictHandle = (c, l) -> { return l; }; private static final Logger log = LoggerFactory.getLogger(DIDStore.class); static class Key { private static final int TYPE_ROOT_IDENTITY = 0x00; private static final int TYPE_ROOT_IDENTITY_PRIVATEKEY = 0x01; private static final int TYPE_DID_DOCUMENT = 0x10; private static final int TYPE_DID_METADATA = 0x11; private static final int TYPE_DID_PRIVATEKEY = 0x12; private static final int TYPE_CREDENTIAL = 0x20; private static final int TYPE_CREDENTIAL_METADATA = 0x21; private int type; private Object id; private Key(int type, Object id) { this.type = type; this.id = id; } @Override public int hashCode() { return type + id.hashCode(); } @Override public boolean equals(Object obj) { if (obj == this) return true; if (obj instanceof Key) { Key key = (Key)obj; return type == key.type ? id.equals(key.id) : false; } return false; } public static Key forRootIdentity(String id) { return new Key(TYPE_ROOT_IDENTITY, id); } public static Key forRootIdentityPrivateKey(String id) { return new Key(TYPE_ROOT_IDENTITY_PRIVATEKEY, id); } public static Key forDidDocument(DID did) { return new Key(TYPE_DID_DOCUMENT, did); } public static Key forDidMetadata(DID did) { return new Key(TYPE_DID_METADATA, did); } private static Key forDidPrivateKey(DIDURL id) { return new Key(TYPE_DID_PRIVATEKEY, id); } private static Key forCredential(DIDURL id) { return new Key(TYPE_CREDENTIAL, id); } private static Key forCredentialMetadata(DIDURL id) { return new Key(TYPE_CREDENTIAL_METADATA, id); } } static class Metadata extends AbstractMetadata { private static final String TYPE = "type"; private static final String VERSION = "version"; private static final String FINGERPRINT = "fingerprint"; private static final String DEFAULT_ROOT_IDENTITY = "defaultRootIdentity"; protected Metadata(DIDStore store) { super(store); put(TYPE, DID_STORE_TYPE); put(VERSION, DID_STORE_VERSION); } /** * The default constructor for JSON deserialize creator. */ protected Metadata() { this(null); } protected String getType() { return get(TYPE); } public int getVersion() { return getInteger(VERSION, -1); } private void setFingerprint(String fingerprint) { checkArgument(fingerprint != null && !fingerprint.isEmpty(), "Invalid fingerprint"); put(FINGERPRINT, fingerprint); } public String getFingerprint() { return get(FINGERPRINT); } protected void setDefaultRootIdentity(String id) { put(DEFAULT_ROOT_IDENTITY, id); } public String getDefaultRootIdentity() { return get(DEFAULT_ROOT_IDENTITY); } @Override protected void save() { if (attachedStore()) { try { getStore().storage.storeMetadata(this); } catch (DIDStoreException ignore) { log.error("INTERNAL - error store metadata for DIDStore"); } } } } /** * ConflictHandle is a interface for solving the conflict, * if the local document is different with the one resolved from chain. */ @FunctionalInterface public interface ConflictHandle { /** * The method to merge two did document. * * @param chainCopy the document from chain * @param localCopy the document from local device * @return the merged DIDDocument object */ DIDDocument merge(DIDDocument chainCopy, DIDDocument localCopy); } /** * A filter for DIDs. * * <p> * Instances of this interface may be passed to the listDids(DIDFilter) * method of the DIDStore class. * </p> */ @FunctionalInterface public interface DIDFilter { /** * Tests whether or not the specified DID should be included in a * DIDs list. * * @param did the DID to be tested * @return true if and only if DID should be included */ public boolean accept(DID did); } /** * A filter for DIDURLs. * * <p> * Instances of this interface may be passed to the * listCredentials(CredentialFilter) method of the DIDStore class. * </p> */ @FunctionalInterface public interface CredentialFilter { /** * Tests whether or not the specified id should be included in a * id list. * * @param id the DIDURL to be tested * @return true if and only if DIDURL should be included */ public boolean accept(DIDURL id); } private DIDStore(int initialCacheCapacity, int maxCacheCapacity, DIDStorage storage) throws DIDStoreException { if (initialCacheCapacity < 0) initialCacheCapacity = 0; if (maxCacheCapacity < 0) maxCacheCapacity = 0; // The RemovalListener used for debug purpose. // TODO: comment the RemovalListener /* RemovalListener<Object, Object> listener; listener = new RemovalListener<Object, Object>() { @Override public void onRemoval(RemovalNotification<Object, Object> n) { if (n.wasEvicted()) { String cause = n.getCause().name(); log.trace("Cache removed {} cause {}", n.getKey(), cause); } } }; */ cache = CacheBuilder.newBuilder() .initialCapacity(initialCacheCapacity) .maximumSize(maxCacheCapacity) .softValues() // .removalListener(listener) // .recordStats() .build(); this.storage = storage; this.metadata = storage.loadMetadata(); this.metadata.attachStore(this); log.info("DID store opened: {}, cache(init:{}, max:{})", storage.getLocation(), initialCacheCapacity, maxCacheCapacity); } /** * Open a DIDStore instance with given storage location. * * @param location the storage location for the DIDStore * @param initialCacheCapacity the initial cache capacity * @param maxCacheCapacity the maximum cache capacity * @return the DIDStore object * @throws DIDStoreException if an error occurred when opening the store */ public static DIDStore open(File location, int initialCacheCapacity, int maxCacheCapacity) throws DIDStoreException { checkArgument(location != null, "Invalid store location"); checkArgument(maxCacheCapacity >= initialCacheCapacity, "Invalid cache capacity spec"); try { location = location.getCanonicalFile(); } catch (IOException e) { throw new IllegalArgumentException("Invalid store location", e); } DIDStorage storage = new FileSystemStorage(location); return new DIDStore(initialCacheCapacity, maxCacheCapacity, storage); } /** * Open a DIDStore instance with given storage location. * * @param location the storage location for the DIDStore * @param initialCacheCapacity the initial cache capacity * @param maxCacheCapacity the maximum cache capacity * @return the DIDStore object * @throws DIDStoreException if an error occurred when opening the store */ public static DIDStore open(String location, int initialCacheCapacity, int maxCacheCapacity) throws DIDStoreException { checkArgument(location != null && !location.isEmpty(), "Invalid store location"); return open(new File(location), initialCacheCapacity, maxCacheCapacity); } /** * Open a DIDStore instance with given storage location. * * @param location the storage location for the DIDStore * @return the DIDStore object * @throws DIDStoreException if an error occurred when opening the store */ public static DIDStore open(File location) throws DIDStoreException { return open(location, CACHE_INITIAL_CAPACITY, CACHE_MAX_CAPACITY); } /** * Open a DIDStore instance with given storage location. * * @param location the storage location for the DIDStore * @return the DIDStore object * @throws DIDStoreException if an error occurred when opening the store */ public static DIDStore open(String location) throws DIDStoreException { return open(location, CACHE_INITIAL_CAPACITY, CACHE_MAX_CAPACITY); } /** * Close this DIDStore object. */ public void close() { // log.verbose("Cache statistics: {}", cache.stats().toString()); cache.invalidateAll(); cache = null; metadata = null; storage = null; } private static String calcFingerprint(String password) throws DIDStoreException { // Here should use Argon2, better to avoid the password attack. // But spongycastle library not include the Argon2 implementation, // so here we use one-time AES encryption to secure the password hash. MD5Digest md5 = new MD5Digest(); byte[] digest = new byte[md5.getDigestSize()]; byte[] passwd = password.getBytes(); md5.update(passwd, 0, passwd.length); md5.doFinal(digest, 0); md5.reset(); try { byte[] cipher = Aes256cbc.encrypt(digest, password); md5.update(cipher, 0, cipher.length); md5.doFinal(digest, 0); return Hex.toHexString(digest); } catch (CryptoException e) { throw new DIDStoreCryptoException("Calculate fingerprint error.", e); } } private static String encryptToBase64(byte[] input, String passwd) throws DIDStoreException { try { byte[] cipher = Aes256cbc.encrypt(input, passwd); return Base64.encodeToString(cipher, Base64.URL_SAFE | Base64.NO_PADDING | Base64.NO_WRAP); } catch (CryptoException e) { throw new DIDStoreCryptoException("Encrypt data error.", e); } } private static byte[] decryptFromBase64(String input, String passwd) throws DIDStoreException { try { byte[] cipher = Base64.decode(input, Base64.URL_SAFE | Base64.NO_PADDING | Base64.NO_WRAP); return Aes256cbc.decrypt(cipher, passwd); } catch (CryptoException e) { throw new WrongPasswordException("Decrypt private key error.", e); } } private static String reEncrypt(String secret, String oldpass, String newpass) throws DIDStoreException { byte[] plain = decryptFromBase64(secret, oldpass); String newSecret = encryptToBase64(plain, newpass); Arrays.fill(plain, (byte)0); return newSecret; } private String encrypt(byte[] input, String passwd) throws DIDStoreException { String fingerprint = metadata.getFingerprint(); String currentFingerprint = calcFingerprint(passwd); if (fingerprint != null && !currentFingerprint.equals(fingerprint)) throw new WrongPasswordException("Password mismatched with previous password."); String result = encryptToBase64(input, passwd); if (fingerprint == null || fingerprint.isEmpty()) metadata.setFingerprint(currentFingerprint); return result; } private byte[] decrypt(String input, String passwd) throws DIDStoreException { String fingerprint = metadata.getFingerprint(); String currentFingerprint = calcFingerprint(passwd); byte[] result = decryptFromBase64(input, passwd); if (fingerprint == null || fingerprint.isEmpty()) metadata.setFingerprint(currentFingerprint); return result; } /** * Save the RootIdentity object with private keys to this DID store. * * @param identity an RootIdentity object * @param storepass the password for this DID store * @throws DIDStoreException if an error occurred when accessing the store */ protected void storeRootIdentity(RootIdentity identity, String storepass) throws DIDStoreException { checkArgument(identity != null, "Invalid identity"); checkArgument(storepass != null && !storepass.isEmpty(), "Invalid storepass"); String encryptedMnemonic = null; if (identity.getMnemonic() != null) encryptedMnemonic = encrypt(identity.getMnemonic().getBytes(), storepass); String encryptedPrivateKey = encrypt(identity.getRootPrivateKey().serialize(), storepass); String publicKey = identity.getPreDerivedPublicKey().serializePublicKeyBase58(); storage.storeRootIdentity(identity.getId(), encryptedMnemonic, encryptedPrivateKey, publicKey, identity.getIndex()); if (metadata.getDefaultRootIdentity() == null) metadata.setDefaultRootIdentity(identity.getId()); cache.invalidate(Key.forRootIdentity(identity.getId())); cache.invalidate(Key.forRootIdentityPrivateKey(identity.getId())); } /** * Save the RootIdentity object to this DID store(Update the derive index * only). * * @param identity an RootIdentity object * @throws DIDStoreException if an error occurred when accessing the store */ protected void storeRootIdentity(RootIdentity identity) throws DIDStoreException { checkArgument(identity != null, "Invalid identity"); storage.updateRootIdentityIndex(identity.getId(), identity.getIndex()); } /** * Set the identity as the default RootIdentity of the DIDStore. * * @param identity a RootIdentity object * @throws DIDStoreException if an error occurred when accessing the store */ protected void setDefaultRootIdentity(RootIdentity identity) throws DIDStoreException { checkArgument(identity != null, "Invalid identity"); if (!containsRootIdentity(identity.getId())) throw new IllegalArgumentException("Invalid identity, not exists in the store"); metadata.setDefaultRootIdentity(identity.getId()); } /** * Load a RootIdentity object from this DIDStore. * * @param id the id of the RootIdentity * @return the RootIdentity object, null if the identity not exists * @throws DIDStoreException if an error occurred when accessing the store */ public RootIdentity loadRootIdentity(String id) throws DIDStoreException { checkArgument(id != null && !id.isEmpty(), "Invalid id"); try { Object value = cache.get(Key.forRootIdentity(id), new Callable<Object>() { @Override public Object call() throws DIDStoreException { RootIdentity identity = storage.loadRootIdentity(id); if (identity != null) { identity.setMetadata(loadRootIdentityMetadata(id)); return identity; } else { return NULL; } } }); return value == NULL ? null : (RootIdentity)value; } catch (ExecutionException e) { throw new DIDStoreException("Load root identity failed: " + id, e); } } /** * Load the default RootIdentity object from this DIDStore. * * @return the default RootIdentity object, null if the identity exists * @throws DIDStoreException if an error occurred when accessing the store */ public RootIdentity loadRootIdentity() throws DIDStoreException { String id = metadata.getDefaultRootIdentity(); if (id == null || id.isEmpty()) { List<RootIdentity> ids = storage.listRootIdentities(); if (ids.size() != 1) { return null; } else { RootIdentity identity = ids.get(0); identity.setMetadata(loadRootIdentityMetadata(identity.getId())); metadata.setDefaultRootIdentity(identity.getId()); return identity; } } return loadRootIdentity(id); } /** * Check whether the RootIdentity exists in this DIDStore. * * @param id the id of the RootIdentity to be check * @return true if exists else false * @throws DIDStoreException if an error occurred when accessing the store */ public boolean containsRootIdentity(String id) throws DIDStoreException { return storage.loadRootIdentity(id) != null; } /** * Export the mnemonic of the specific RootIdentity from this DIDStore. * * @param id the id of the RootIdentity * @param storepass the password for DIDStore * @return the mnemonic string, null if the identity not exists or does * not have mnemonic * @throws DIDStoreException if an error occurred when accessing the store */ protected String exportRootIdentityMnemonic(String id, String storepass) throws DIDStoreException { checkArgument(id != null && !id.isEmpty(), "Invalid id"); checkArgument(storepass != null && !storepass.isEmpty(), "Invalid storepass"); String encryptedMnemonic = storage.loadRootIdentityMnemonic(id); if (encryptedMnemonic != null) return new String(decrypt(encryptedMnemonic, storepass)); else return null; } /** * Check whether the RootIdentity has mnemonic. * * @param id the id of the RootIdentity * @return true if exists else false * @throws DIDStoreException if an error occurred when accessing the store */ protected boolean containsRootIdentityMnemonic(String id) throws DIDStoreException { checkArgument(id != null && !id.isEmpty(), "Invalid id"); String encryptedMnemonic = storage.loadRootIdentityMnemonic(id); return encryptedMnemonic != null; } private HDKey loadRootIdentityPrivateKey(String id, String storepass) throws DIDStoreException { try { Object value = cache.get(Key.forRootIdentityPrivateKey(id), new Callable<Object>() { @Override public Object call() throws DIDStorageException { String encryptedKey = storage.loadRootIdentityPrivateKey(id); return encryptedKey != null ? encryptedKey : NULL; } }); if (value != NULL) { byte[] keyData = decrypt((String)value, storepass); return HDKey.deserialize(keyData); } else { return null; } } catch (ExecutionException e) { throw new DIDStoreException("Load root identity private key failed: " + id, e); } } HDKey derive(String id, String path, String storepass) throws DIDStoreException { checkArgument(id != null && !id.isEmpty(), "Invalid identity"); checkArgument(path != null && !path.isEmpty(), "Invalid path"); checkArgument(storepass != null && !storepass.isEmpty(), "Invalid storepass"); HDKey rootPrivateKey = loadRootIdentityPrivateKey(id, storepass); HDKey key = rootPrivateKey.derive(path); rootPrivateKey.wipe(); return key; } /** * Delete the specific RootIdentity object from this store. * * @param id the id of RootIdentity object * @return true if the identity exists and delete successful; false otherwise * @throws DIDStoreException if an error occurred when accessing the store */ public boolean deleteRootIdentity(String id) throws DIDStoreException { checkArgument(id != null && !id.isEmpty(), "Invalid id"); boolean success = storage.deleteRootIdentity(id); if (success) { if (metadata.getDefaultRootIdentity() != null && metadata.getDefaultRootIdentity().equals(id)) metadata.setDefaultRootIdentity(null); cache.invalidate(Key.forRootIdentity(id)); cache.invalidate(Key.forRootIdentityPrivateKey(id)); } return success; } /** * List all RootIdentity object from this store. * * @return an array of RootIdentity objects * @throws DIDStoreException if an error occurred when accessing the store */ public List<RootIdentity> listRootIdentities() throws DIDStoreException { List<RootIdentity> ids = storage.listRootIdentities(); for (RootIdentity id : ids) { RootIdentity.Metadata metadata = storage.loadRootIdentityMetadata(id.getId()); if (metadata == null) metadata = new RootIdentity.Metadata(); metadata.setId(id.getId()); metadata.attachStore(this); id.setMetadata(metadata); } return Collections.unmodifiableList(ids); } /** * Check whether the this store has RootIdentity objects. * * @return true if the store has RootIdentity objects else false * @throws DIDStoreException if an error occurred when accessing the store */ public boolean containsRootIdentities() throws DIDStoreException { return storage.containsRootIdenities(); } /** * Save the RootIdentity metadata to this store. * * @param id the id of the RootIdentity object * @param metadata a RootIdentity.Metadata object * @throws DIDStoreException if an error occurred when accessing the store */ protected void storeRootIdentityMetadata(String id, RootIdentity.Metadata metadata) throws DIDStoreException { checkArgument(id != null && !id.isEmpty(), "Invalid id"); checkArgument(metadata != null, "Invalid metadata"); storage.storeRootIdentityMetadata(id, metadata); } /** * Read the RootIdentity metadata from this store. * * @param id the id of the RootIdentity object * @return the RootIdentity.Metadata object * @throws DIDStoreException if an error occurred when accessing the store */ protected RootIdentity.Metadata loadRootIdentityMetadata(String id) throws DIDStoreException { checkArgument(id != null && !id.isEmpty(), "Invalid id"); RootIdentity.Metadata metadata = storage.loadRootIdentityMetadata(id); if (metadata != null) { metadata.setId(id); metadata.attachStore(this); } else { metadata = new RootIdentity.Metadata(id, this); } return metadata; } /** * Save the DID document to this store. * * @param doc the DIDDocument object * @throws DIDStoreException if an error occurred when accessing the store */ public void storeDid(DIDDocument doc) throws DIDStoreException { checkArgument(doc != null, "Invalid doc"); storage.storeDid(doc); if (doc.getStore() != this) { DIDMetadata metadata = loadDidMetadata(doc.getSubject()); doc.getMetadata().merge(metadata); doc.getMetadata().attachStore(this); } storeDidMetadata(doc.getSubject(), doc.getMetadata()); for (VerifiableCredential vc : doc.getCredentials()) storeCredential(vc); cache.put(Key.forDidDocument(doc.getSubject()), doc); } /** * Read the specific DID document from this store. * * @param did the DID to be load * @return the DIDDocument object * @throws DIDStoreException if an error occurred when accessing the store */ public DIDDocument loadDid(DID did) throws DIDStoreException { checkArgument(did != null, "Invalid did"); try { Object value = cache.get(Key.forDidDocument(did), new Callable<Object>() { @Override public Object call() throws DIDStoreException { DIDDocument doc = storage.loadDid(did); if (doc != null) { doc.setMetadata(loadDidMetadata(did)); return doc; } else { return NULL; } } }); return value == NULL ? null : (DIDDocument)value; } catch (ExecutionException e) { throw new DIDStoreException("Load did document failed: " + did, e); } } /** * Read the specific DID document from this store. * * @param did the DID to be load * @return the DIDDocument object * @throws DIDStoreException if an error occurred when accessing the store */ public DIDDocument loadDid(String did) throws DIDStoreException { return loadDid(DID.valueOf(did)); } /** * Check if this store contains the specific DID. * * @param did the specified DID * @return true if the store contains this DID, false otherwise * @throws DIDStoreException if an error occurred when accessing the store */ public boolean containsDid(DID did) throws DIDStoreException { checkArgument(did != null, "Invalid did"); return loadDid(did) != null; } /** * Check if this store contains the specific DID. * * @param did the specified DID * @return true if the store contains this DID, false otherwise * @throws DIDStoreException if an error occurred when accessing the store */ public boolean containsDid(String did) throws DIDStoreException { return containsDid(DID.valueOf(did)); } /** * Save the DID Metadata to this store. * * @param did the owner of the metadata object * @param metadata the DID metadata object * @throws DIDStoreException if an error occurred when accessing the store */ protected void storeDidMetadata(DID did, DIDMetadata metadata) throws DIDStoreException { checkArgument(did != null, "Invalid did"); checkArgument(metadata != null, "Invalid metadata"); storage.storeDidMetadata(did, metadata); metadata.attachStore(this); cache.put(Key.forDidMetadata(did), metadata); } /** * Read the specific DID metadata object for this store. * * @param did a DID to be load * @return the DID metadata object * @throws DIDStoreException if an error occurred when accessing the store */ protected DIDMetadata loadDidMetadata(DID did) throws DIDStoreException { checkArgument(did != null, "Invalid did"); try { Object value = cache.get(Key.forDidMetadata(did) , new Callable<Object>() { @Override public Object call() throws DIDStorageException { DIDMetadata metadata = storage.loadDidMetadata(did); if (metadata != null) { metadata.setDid(did); metadata.attachStore(DIDStore.this); } else { metadata = new DIDMetadata(did, DIDStore.this); } return metadata; } }); return value == NULL ? null : (DIDMetadata)value; } catch (ExecutionException e) { throw new DIDStoreException("Load did metadata failed: " + did, e); } } /** * Delete the specific DID from this store. * * <p> * When delete the DID, all private keys, credentials that owned by this * DID will also be deleted. * </p> * * @param did the DID to be delete * @return true if the DID exist and deleted successful, false otherwise * @throws DIDStoreException if an error occurred when accessing the store */ public boolean deleteDid(DID did) throws DIDStoreException { checkArgument(did != null, "Invalid did"); boolean success = storage.deleteDid(did); if (success) { cache.invalidate(Key.forDidDocument(did)); cache.invalidate(Key.forDidMetadata(did)); // invalidate every thing belongs to this did for (Key key : cache.asMap().keySet()) { if (key.id instanceof DIDURL) { DIDURL id = (DIDURL)key.id; if (id.getDid().equals(did)) cache.invalidate(key); } } } return success; } /** * Delete the specific DID from this store. * * <p> * When delete the DID, all private keys, credentials that owned by this * DID will also be deleted. * </p> * * @param did the DID to be delete * @return true if the DID exist and deleted successful, false otherwise * @throws DIDStoreException if an error occurred when accessing the store */ public boolean deleteDid(String did) throws DIDStoreException { return deleteDid(DID.valueOf(did)); } /** * List all DIDs from this store. * * @return an array of DIDs * @throws DIDStoreException if an error occurred when accessing the store */ public List<DID> listDids() throws DIDStoreException { List<DID> dids = storage.listDids(); for (DID did : dids) { DIDMetadata metadata = storage.loadDidMetadata(did); if (metadata == null) metadata = new DIDMetadata(); metadata.setDid(did); metadata.attachStore(this); did.setMetadata(metadata); } return Collections.unmodifiableList(dids); } /** * List all DIDs that satisfy the specified filter from this store. * * @param filter a DID filter * @return an array of DIDs * @throws DIDStoreException if an error occurred when accessing the store */ public List<DID> listDids(DIDFilter filter) throws DIDStoreException { List<DID> dids = listDids(); if (filter != null) { List<DID> dest = new ArrayList<DID>(); for (DID did : dids) { if (filter.accept(did)) dest.add(did); } dids = dest; } return Collections.unmodifiableList(dids); } /** * Save the credential object to this store. * * @param credential a VerifiableCredential object * @throws DIDStoreException if an error occurred when accessing the store */ public void storeCredential(VerifiableCredential credential) throws DIDStoreException { checkArgument(credential != null, "Invalid credential"); storage.storeCredential(credential); if (credential.getMetadata().getStore() != this) { CredentialMetadata metadata = loadCredentialMetadata(credential.getId()); credential.getMetadata().merge(metadata); credential.getMetadata().attachStore(this); } storeCredentialMetadata(credential.getId(), credential.getMetadata()); cache.put(Key.forCredential(credential.getId()), credential); } /** * Read the specific credential object from this store. * * @param id the credential id * @return the VerifiableCredential object * @throws DIDStoreException if an error occurred when accessing the store */ public VerifiableCredential loadCredential(DIDURL id) throws DIDStoreException { checkArgument(id != null, "Invalid credential id"); try { Object value = cache.get(Key.forCredential(id), new Callable<Object>() { @Override public Object call() throws DIDStoreException { VerifiableCredential vc = storage.loadCredential(id); if (vc != null) { vc.setMetadata(loadCredentialMetadata(id)); return vc; } else { return NULL; } } }); return value == NULL ? null : (VerifiableCredential)value; } catch (ExecutionException e) { throw new DIDStoreException("Load credential failed: " + id, e); } } /** * Read the specific credential object from this store. * * @param id the credential id * @return the VerifiableCredential object * @throws DIDStoreException if an error occurred when accessing the store */ public VerifiableCredential loadCredential(String id) throws DIDStoreException { return loadCredential(DIDURL.valueOf(id)); } /** * Check whether this store contains the specific credential. * * @param id the credential id * @return true if the store contains this credential, false otherwise * @throws DIDStoreException if an error occurred when accessing the store */ public boolean containsCredential(DIDURL id) throws DIDStoreException { checkArgument(id != null, "Invalid credential id"); return loadCredential(id) != null; } /** * Check whether this store contains the specific credential. * * @param id the credential id * @return true if the store contains this credential, false otherwise * @throws DIDStoreException if an error occurred when accessing the store */ public boolean containsCredential(String id) throws DIDStoreException { return containsCredential(DIDURL.valueOf(id)); } /** * Check whether this store contains the credentials that owned by the * specific DID. * * @param did the credential owner's DID * @return true if the store contains this credential, false otherwise * @throws DIDStoreException if an error occurred when accessing the store */ public boolean containsCredentials(DID did) throws DIDStoreException { checkArgument(did != null, "Invalid did"); return storage.containsCredentials(did); } /** * Check whether this store contains the credentials that owned by the * specific DID. * * @param did the credential owner's DID * @return true if the store contains this credential, false otherwise * @throws DIDStoreException if an error occurred when accessing the store */ public boolean containsCredentials(String did) throws DIDStoreException { return containsCredentials(DID.valueOf(did)); } /** * Save the credential's metadata to this store. * * @param id the credential id * @param metadata the credential metadata object * @throws DIDStoreException if an error occurred when accessing the store */ protected void storeCredentialMetadata(DIDURL id, CredentialMetadata metadata) throws DIDStoreException { checkArgument(id != null, "Invalid credential id"); checkArgument(metadata != null, "Invalid credential metadata"); storage.storeCredentialMetadata(id, metadata); metadata.attachStore(this); cache.put(Key.forCredentialMetadata(id), metadata); } /** * Read the credential's metadata from this store. * * @param id the credential id * @return the credential metadata object * @throws DIDStoreException if an error occurred when accessing the store */ protected CredentialMetadata loadCredentialMetadata(DIDURL id) throws DIDStoreException { checkArgument(id != null, "Invalid credential id"); try { Object value = cache.get(Key.forCredentialMetadata(id), new Callable<Object>() { @Override public Object call() throws DIDStorageException { CredentialMetadata metadata = storage.loadCredentialMetadata(id); if (metadata != null) { metadata.setId(id); metadata.attachStore(DIDStore.this); } else { metadata = new CredentialMetadata(id, DIDStore.this); } return metadata; } }); return value == NULL ? null : (CredentialMetadata)value; } catch (ExecutionException e) { throw new DIDStoreException("Load Credential metadata failed: " + id, e); } } /** * Delete the specific credential from this store. * * @param id the credential id to be delete * @return true if the credential exist and deleted successful, false otherwise * @throws DIDStoreException if an error occurred when accessing the store */ public boolean deleteCredential(DIDURL id) throws DIDStoreException { checkArgument(id != null, "Invalid credential id"); boolean success = storage.deleteCredential(id); if (success) { cache.invalidate(Key.forCredential(id)); cache.invalidate(Key.forCredentialMetadata(id)); } return success; } /** * Delete the specific credential from this store. * * @param id the credential id to be delete * @return true if the credential exist and deleted successful, false otherwise * @throws DIDStoreException if an error occurred when accessing the store */ public boolean deleteCredential(String id) throws DIDStoreException { return deleteCredential(DIDURL.valueOf(id)); } /** * List all credentials that owned the specific DID. * * @param did the credential owner's DID * @return an array of DIDURL denoting the credentials * @throws DIDStoreException if an error occurred when accessing the store */ public List<DIDURL> listCredentials(DID did) throws DIDStoreException { checkArgument(did != null, "Invalid did"); List<DIDURL> ids = storage.listCredentials(did); for (DIDURL id : ids) { CredentialMetadata metadata = storage.loadCredentialMetadata(id); if (metadata == null) metadata = new CredentialMetadata(); metadata.setId(id); metadata.attachStore(this); id.setMetadata(metadata); } return Collections.unmodifiableList(ids); } /** * List all credentials that owned the specific DID. * * @param did the credential owner's DID * @return an array of DIDURL denoting the credentials * @throws DIDStoreException if an error occurred when accessing the store */ public List<DIDURL> listCredentials(String did) throws DIDStoreException { return listCredentials(DID.valueOf(did)); } /** * List all credentials that owned the specific DID and satisfy the * specified filter from this store. * * @param did the credential owner's DID * @param filter a credential filter * @return an array of DIDURL denoting the credentials * @throws DIDStoreException if an error occurred when accessing the store */ public List<DIDURL> listCredentials(DID did, CredentialFilter filter) throws DIDStoreException { checkArgument(did != null, "Invalid did"); List<DIDURL> vcs = listCredentials(did); if (filter != null) { List<DIDURL> dest = new ArrayList<DIDURL>(); for (DIDURL id : vcs) { if (filter.accept(id)) dest.add(id); } vcs = dest; } return Collections.unmodifiableList(vcs); } /** * List all credentials that owned the specific DID and satisfy the * specified filter from this store. * * @param did the credential owner's DID * @param filter a credential filter * @return an array of DIDURL denoting the credentials * @throws DIDStoreException if an error occurred when accessing the store */ public List<DIDURL> listCredentials(String did, CredentialFilter filter) throws DIDStoreException { return listCredentials(DID.valueOf(did), filter); } /** * Save the DID's lazy private key string to the store. * * @param id the private key id * @throws DIDStoreException if an error occurred when accessing the store */ private void storeLazyPrivateKey(DIDURL id) throws DIDStoreException { checkArgument(id != null, "Invalid private key id"); storage.storePrivateKey(id, DID_LAZY_PRIVATEKEY); cache.invalidate(Key.forDidPrivateKey(id)); } /** * Save the DID's private key to the store, the private key will be encrypt * using the store password. * * @param id the private key id * @param privateKey the binary extended private key * @param storepass the password for this store * @throws DIDStoreException if an error occurred when accessing the store */ public void storePrivateKey(DIDURL id, byte[] privateKey, String storepass) throws DIDStoreException { checkArgument(id != null, "Invalid private key id"); checkArgument(privateKey != null && privateKey.length != 0, "Invalid private key"); checkArgument(storepass != null && !storepass.isEmpty(), "Invalid storepass"); String encryptedKey = encrypt(privateKey, storepass); storage.storePrivateKey(id, encryptedKey); cache.put(Key.forDidPrivateKey(id), encryptedKey); } /** * Save the DID's private key to the store, the private key will be encrypt * using the store password. * * @param id the private key id * @param privateKey the binary extended private key * @param storepass the password for this store * @throws DIDStoreException if an error occurred when accessing the store */ public void storePrivateKey(String id, byte[] privateKey, String storepass) throws DIDStoreException { storePrivateKey(DIDURL.valueOf(id), privateKey, storepass); } private String loadPrivateKey(DIDURL id) throws DIDStoreException { try { Object value = cache.get(Key.forDidPrivateKey(id), new Callable<Object>() { @Override public Object call() throws DIDStoreException { String encryptedKey = storage.loadPrivateKey(id); return encryptedKey != null ? encryptedKey : NULL; } }); return value == NULL ? null : (String)value; } catch (ExecutionException e) { throw new DIDStoreException("Load did private key failed: " + id, e); } } byte[] loadPrivateKey(DIDURL id, String storepass) throws DIDStoreException { checkArgument(id != null, "Invalid private key id"); checkArgument(storepass != null && !storepass.isEmpty(), "Invalid storepass"); String encryptedKey = loadPrivateKey(id); if (encryptedKey == null || encryptedKey.isEmpty()) { return null; } else if (encryptedKey.equals(DID_LAZY_PRIVATEKEY)) { // fail-back to lazy private key generation return RootIdentity.lazyCreateDidPrivateKey(id, this, storepass); } else { return decrypt(encryptedKey, storepass); } } /** * Check if this store contains the specific private key. * * @param id the key id * @return true if this store contains the specific key, false otherwise * @throws DIDStoreException if an error occurred when accessing the store */ public boolean containsPrivateKey(DIDURL id) throws DIDStoreException { checkArgument(id != null, "Invalid private key id"); String privatekey = loadPrivateKey(id); return privatekey != null && !privatekey.isEmpty(); } /** * Check if this store contains the specific private key. * * @param id the key id * @return true if this store contains the specific key, false otherwise * @throws DIDStoreException if an error occurred when accessing the store */ public boolean containsPrivateKey(String id) throws DIDStoreException { return containsPrivateKey(DIDURL.valueOf(id)); } /** * Check if this store contains the private keys that owned by the * specific DID. * * @param did the owner's DID * @return true if this store contains the private keys owned by the the * DID, false otherwise * @throws DIDStoreException if an error occurred when accessing the store */ public boolean containsPrivateKeys(DID did) throws DIDStoreException { checkArgument(did != null, "Invalid did"); return storage.containsPrivateKeys(did); } /** * Check if this store contains the private keys that owned by the * specific DID. * * @param did the owner's DID * @return true if this store contains the private keys owned by the the * DID, false otherwise * @throws DIDStoreException if an error occurred when accessing the store */ public boolean containsPrivateKeys(String did) throws DIDStoreException { return containsPrivateKeys(DID.valueOf(did)); } /** * Delete the specific private key from this store. * * @param id the key id * @return true if the private key exist and deleted successful, false otherwise * @throws DIDStoreException if an error occurred when accessing the store */ public boolean deletePrivateKey(DIDURL id) throws DIDStoreException { checkArgument(id != null, "Invalid private key id"); boolean success = storage.deletePrivateKey(id); if (success) cache.invalidate(Key.forDidPrivateKey(id)); return success; } /** * Delete the specific private key from this store. * * @param id the key id * @return true if the private key exist and deleted successful, false otherwise * @throws DIDStoreException if an error occurred when accessing the store */ public boolean deletePrivateKey(String id) throws DIDStoreException { return deletePrivateKey(DIDURL.valueOf(id)); } /** * Sign the digest using the specified key. * * @param id the key id * @param storepass the password for this store * @param digest the binary digest in bytes array * @return the base64(URL safe) encoded signature string * @throws DIDStoreException if an error occurred when accessing the store */ protected String sign(DIDURL id, String storepass, byte[] digest) throws DIDStoreException { checkArgument(id != null, "Invalid private key id"); checkArgument(storepass != null && !storepass.isEmpty(), "Invalid storepass"); checkArgument(digest != null && digest.length > 0, "Invalid digest"); HDKey key = HDKey.deserialize(loadPrivateKey(id, storepass)); byte[] sig = EcdsaSigner.sign(key.getPrivateKeyBytes(), digest); key.wipe(); return Base64.encodeToString(sig, Base64.URL_SAFE | Base64.NO_PADDING | Base64.NO_WRAP); } /** * Change the password for this store. * * @param oldPassword the old password * @param newPassword the new password * @throws DIDStoreException if an error occurred when accessing the store */ public void changePassword(String oldPassword, String newPassword) throws DIDStoreException { checkArgument(oldPassword != null && !oldPassword.isEmpty(), "Invalid old password"); checkArgument(newPassword != null && !newPassword.isEmpty(), "Invalid new password"); storage.changePassword((data) -> { return DIDStore.reEncrypt(data, oldPassword, newPassword); }); metadata.setFingerprint(calcFingerprint(newPassword)); cache.invalidateAll(); } /** * Internal DID synchronize implementation. * * @param did the DID to be synchronize * @param handle an application defined handle to process the conflict * between the chain copy and the local copy * @param rootIdentity the related root identity or null if customized DID * @param index the derive index * @return true if synchronized success, false if not synchronized * @throws DIDResolveException if an error occurred when resolving DIDs * @throws DIDStoreException if an error occurred when accessing the store */ protected synchronized boolean synchronize(DID did, ConflictHandle handle, String rootIdentity, int index) throws DIDResolveException, DIDStoreException { log.info("Synchronize {}/{}...", did.toString(), (index >= 0 ? Integer.toString(index) : "n/a")); DIDDocument resolvedDoc = did.resolve(); if (resolvedDoc == null) { log.info("Synchronize {}/{}...not exists", did.toString(), (index >= 0 ? Integer.toString(index) : "n/a")); return false; } boolean isCustomizedDid = resolvedDoc.isCustomizedDid(); log.debug("Synchronize {}/{}..exists, got the on-chain copy.", did.toString(), (index >= 0 ? Integer.toString(index) : "n/a")); DIDDocument finalDoc = resolvedDoc; DIDDocument localDoc = storage.loadDid(did); if (localDoc != null) { // Update metadata off-store, then store back localDoc.setMetadata(storage.loadDidMetadata(did)); localDoc.getMetadata().detachStore(); // localdoc == resolveddoc || localdoc not modified since last publish if (localDoc.getSignature().equals(resolvedDoc.getSignature()) || (localDoc.getMetadata().getSignature() != null && localDoc.getProof().getSignature().equals( localDoc.getMetadata().getSignature()))) { finalDoc.getMetadata().merge(localDoc.getMetadata()); } else { log.debug("{} on-chain copy conflict with local copy.", did.toString()); if (handle == null) handle = defaultConflictHandle; // Local copy was modified finalDoc = handle.merge(resolvedDoc, localDoc); if (finalDoc == null || !finalDoc.getSubject().equals(did)) { localDoc.getMetadata().attachStore(this); log.error("Conflict handle merge the DIDDocument error."); throw new DIDStoreException("deal with local modification error."); } else { log.debug("Conflict handle return the final copy."); } } } DIDMetadata metadata = finalDoc.getMetadata(); metadata.setPublishTime(resolvedDoc.getMetadata().getPublishTime()); metadata.setSignature(resolvedDoc.getProof().getSignature()); if (resolvedDoc.getMetadata().isDeactivated()) metadata.setDeactivated(true); if (!isCustomizedDid && rootIdentity != null) { metadata.setRootIdentityId(rootIdentity); metadata.setIndex(index); } metadata.attachStore(this); finalDoc.setMetadata(metadata); if (localDoc != null) localDoc.getMetadata().attachStore(this); // Invalidate current cached items cache.invalidate(Key.forDidDocument(did)); cache.invalidate(Key.forDidMetadata(did)); storage.storeDid(finalDoc); storage.storeDidMetadata(did, metadata); for (VerifiableCredential vc : finalDoc.getCredentials()) storage.storeCredential(vc); if (!isCustomizedDid && rootIdentity != null) storeLazyPrivateKey(finalDoc.getDefaultPublicKeyId()); List<DIDURL> vcIds = storage.listCredentials(did); for (DIDURL vcId : vcIds) { VerifiableCredential localVc = storage.loadCredential(vcId); VerifiableCredential resolvedVc = VerifiableCredential.resolve(vcId, localVc.getIssuer()); if (resolvedVc == null) continue; resolvedVc.getMetadata().merge(localVc.getMetadata()); cache.invalidate(Key.forCredential(vcId)); cache.invalidate(Key.forCredentialMetadata(vcId)); storage.storeCredential(resolvedVc); storage.storeCredentialMetadata(vcId, resolvedVc.getMetadata()); } return true; } /** * Synchronize all RootIdentities, DIDs and credentials in this store. * * <p> * If the ConflictHandle is not set by the developers, this method will * use the default ConflictHandle implementation: if conflict between * the chain copy and the local copy, it will keep the local copy, but * update the local metadata with the chain copy. * </p> * * @param handle an application defined handle to process the conflict * between the chain copy and the local copy * @throws DIDResolveException if an error occurred when resolving DIDs * @throws DIDStoreException if an error occurred when accessing the store */ public void synchronize(ConflictHandle handle) throws DIDResolveException, DIDStoreException { if (handle == null) handle = defaultConflictHandle; List<RootIdentity> identities = listRootIdentities(); for (RootIdentity identity : identities) { identity.synchronize(handle); } List<DID> dids = storage.listDids(); for (DID did : dids) { DIDDocument doc = storage.loadDid(did); if (doc.isCustomizedDid()) synchronize(did, handle, null, -1); } } /** * Synchronize all RootIdentities, DIDs and credentials in this store. * * @throws DIDResolveException if an error occurred when resolving DIDs * @throws DIDStoreException if an error occurred when accessing the store */ public void synchronize() throws DIDResolveException, DIDStoreException { synchronize((ConflictHandle)null); } /** * Synchronize all RootIdentities, DIDs and credentials in * asynchronous mode. * * <p> * If the ConflictHandle is not set by the developers, this method will * use the default ConflictHandle implementation: if conflict between * the chain copy and the local copy, it will keep the local copy, but * update the local metadata with the chain copy. * </p> * * @param handle an application defined handle to process the conflict * between the chain copy and the local copy * @return a new CompletableStage */ public CompletableFuture<Void> synchronizeAsync(ConflictHandle handle) { CompletableFuture<Void> future = CompletableFuture.runAsync(() -> { try { synchronize(handle); } catch (DIDResolveException | DIDStoreException e) { throw new CompletionException(e); } }); return future; } /** * Synchronize all RootIdentities, DIDs and credentials in * asynchronous mode. * * @return a new CompletableStage */ public CompletableFuture<Void> synchronizeAsync() { return synchronizeAsync((ConflictHandle)null); } /** * Synchronize specific DID in this store. * * <p> * If the ConflictHandle is not set by the developers, this method will * use the default ConflictHandle implementation: if conflict between * the chain copy and the local copy, it will keep the local copy, but * update the local metadata with the chain copy. * </p> * * @param did the DID to be synchronize * @param handle an application defined handle to process the conflict * between the chain copy and the local copy * @return true if synchronized success, false if not synchronized * * @throws DIDResolveException if an error occurred when resolving DIDs * @throws DIDStoreException if an error occurred when accessing the store */ public boolean synchronize(DID did, ConflictHandle handle) throws DIDResolveException, DIDStoreException { checkArgument(did != null, "Invalid DID"); DIDDocument doc = loadDid(did); if (doc == null) return false; String rootIdentity = null; int index = -1; if (!doc.isCustomizedDid()) { rootIdentity = doc.getMetadata().getRootIdentityId(); index = doc.getMetadata().getIndex(); } return synchronize(did, handle, rootIdentity, index); } /** * Synchronize specific DID in this store. * * @param did the DID to be synchronize * @return true if synchronized success, false if not synchronized * * @throws DIDResolveException if an error occurred when resolving DIDs * @throws DIDStoreException if an error occurred when accessing the store */ public boolean synchronize(DID did) throws DIDResolveException, DIDStoreException { return synchronize(did, null); } /** * Synchronize specific DID in asynchronous mode. * * <p> * If the ConflictHandle is not set by the developers, this method will * use the default ConflictHandle implementation: if conflict between * the chain copy and the local copy, it will keep the local copy, but * update the local metadata with the chain copy. * </p> * * @param did the DID to be synchronize * @param handle an application defined handle to process the conflict * between the chain copy and the local copy * @return a new CompletableStage */ public CompletableFuture<Void> synchronizeAsync(DID did, ConflictHandle handle) { CompletableFuture<Void> future = CompletableFuture.runAsync(() -> { try { synchronize(did, handle); } catch (DIDResolveException | DIDStoreException e) { throw new CompletionException(e); } }); return future; } /** * Synchronize specific DID in asynchronous mode. * * @param did the DID to be synchronize * @return a new CompletableStage */ public CompletableFuture<Void> synchronizeAsync(DID did) { return synchronizeAsync(did, null); } @JsonPropertyOrder({ "type", "id", "document", "credential", "privateKey", "created", "fingerprint" }) @JsonInclude(Include.NON_NULL) static class DIDExport extends DIDEntity<DIDExport> { @JsonProperty("type") private String type; @JsonProperty("id") private DID id; @JsonProperty("document") private Document document; @JsonProperty("credential") private List<Credential> credentials; @JsonProperty("privateKey") private List<PrivateKey> privatekeys; @JsonProperty("created") private Date created; @JsonProperty("fingerprint") private String fingerprint; @JsonPropertyOrder({ "content", "metadata" }) @JsonInclude(Include.NON_NULL) static class Document { @JsonProperty("content") private DIDDocument content; @JsonProperty("metadata") private DIDMetadata metadata; @JsonCreator protected Document(@JsonProperty(value = "content", required = true) DIDDocument content, @JsonProperty(value = "metadata") DIDMetadata metadata) { this.content = content; this.metadata = metadata; } } @JsonPropertyOrder({ "content", "metadata" }) @JsonInclude(Include.NON_NULL) static class Credential { @JsonProperty("content") private VerifiableCredential content; @JsonProperty("metadata") private CredentialMetadata metadata; @JsonCreator protected Credential(@JsonProperty(value = "content", required = true) VerifiableCredential content, @JsonProperty(value = "metadata") CredentialMetadata metadata) { this.content = content; this.metadata = metadata; } } @JsonPropertyOrder({ "id", "key" }) static class PrivateKey { @JsonProperty("id") private DIDURL id; @JsonProperty("key") private String key; @JsonCreator protected PrivateKey(@JsonProperty(value = "id", required = true) DIDURL id) { this.id = id; } public DIDURL getId() { return id; } public void setId(DIDURL id) { this.id = id; } public String getKey(String exportpass, String storepass) throws DIDStoreException { return reEncrypt(key, exportpass, storepass); } public void setKey(String key, String storepass, String exportpass) throws DIDStoreException { this.key = reEncrypt(key, storepass, exportpass); } } @JsonCreator protected DIDExport(@JsonProperty(value = "type", required = true) String type, @JsonProperty(value = "id", required = true) DID id) { if (type == null) throw new IllegalArgumentException("Invalid export type"); this.type = type; this.id = id; } public DID getId() { return id; } public DIDDocument getDocument() { return document.content; } public void setDocument(DIDDocument doc) { this.document = new Document(doc, doc.getMetadata().isEmpty() ? null : doc.getMetadata()); } public List<VerifiableCredential> getCredentials() { if (credentials == null) return Collections.emptyList(); List<VerifiableCredential> vcs = new ArrayList<VerifiableCredential>(); for (Credential cred : credentials) vcs.add(cred.content); return vcs; } public void addCredential(VerifiableCredential credential) { if (this.credentials == null) this.credentials = new ArrayList<Credential>(); this.credentials.add(new Credential(credential, credential.getMetadata().isEmpty() ? null : credential.getMetadata())); } public List<PrivateKey> getPrivateKeys() { return privatekeys != null ? privatekeys : Collections.emptyList(); } public void addPrivatekey(DIDURL id, String privatekey, String storepass, String exportpass) throws DIDStoreException { if (this.privatekeys == null) this.privatekeys = new ArrayList<PrivateKey>(); PrivateKey sk = new PrivateKey(id); sk.setKey(privatekey, storepass, exportpass); this.privatekeys.add(sk); } private String calculateFingerprint(String exportpass) { SHA256Digest sha256 = new SHA256Digest(); byte[] bytes = exportpass.getBytes(); sha256.update(bytes, 0, bytes.length); bytes = type.getBytes(); sha256.update(bytes, 0, bytes.length); bytes = id.toString().getBytes(); sha256.update(bytes, 0, bytes.length); bytes = document.content.toString(true).getBytes(); sha256.update(bytes, 0, bytes.length); if (document.metadata != null) { bytes = document.metadata.toString(true).getBytes(); sha256.update(bytes, 0, bytes.length); } if (credentials != null && credentials.size() > 0) { for (Credential cred : credentials) { bytes = cred.content.toString(true).getBytes(); sha256.update(bytes, 0, bytes.length); if (cred.metadata != null) { bytes = cred.metadata.toString(true).getBytes(); sha256.update(bytes, 0, bytes.length); } } } if (privatekeys != null && privatekeys.size() > 0) { for (PrivateKey sk : privatekeys) { bytes = sk.id.toString().getBytes(); sha256.update(bytes, 0, bytes.length); bytes = sk.key.getBytes(); sha256.update(bytes, 0, bytes.length); } } bytes = dateFormat.format(created).getBytes(); sha256.update(bytes, 0, bytes.length); byte digest[] = new byte[32]; sha256.doFinal(digest, 0); return Base64.encodeToString(digest, Base64.URL_SAFE | Base64.NO_PADDING | Base64.NO_WRAP); } public DIDExport seal(String exportpass) { Calendar now = Calendar.getInstance(); now.set(Calendar.MILLISECOND, 0); this.created = now.getTime(); fingerprint = calculateFingerprint(exportpass); return this; } public void verify(String exportpass) throws MalformedExportDataException { if (!fingerprint.equals(calculateFingerprint(exportpass))) throw new MalformedExportDataException( "Invalid export data, fingerprint mismatch."); } @Override protected void sanitize() throws MalformedExportDataException { if (type == null || !type.equals(DID_EXPORT)) throw new MalformedExportDataException( "Invalid export data, unknown type."); if (created == null) throw new MalformedExportDataException( "Invalid export data, missing created time."); if (id == null) throw new MalformedExportDataException( "Invalid export data, missing id."); if (document == null || document.content == null) throw new MalformedExportDataException( "Invalid export data, missing document."); document.content.setMetadata(document.metadata); if (credentials != null) { for (Credential cred : credentials) { if (cred == null || cred.content == null) throw new MalformedExportDataException( "Invalid export data, invalid credential."); cred.content.setMetadata(cred.metadata); } } if (privatekeys != null) { for (PrivateKey sk : privatekeys) { if (sk == null || sk.id == null || sk.key == null || sk.key.isEmpty()) throw new MalformedExportDataException( "Invalid export data, invalid privatekey."); } } if (fingerprint == null || fingerprint.isEmpty()) throw new MalformedExportDataException( "Invalid export data, missing fingerprint."); } } private DIDExport exportDid(DID did, String password, String storepass) throws DIDStoreException, IOException { // All objects should load directly from storage, // avoid affects the cached objects. DIDDocument doc = storage.loadDid(did); if (doc == null) throw new DIDStoreException("Export DID " + did + " failed, not exist."); doc.setMetadata(storage.loadDidMetadata(did)); log.debug("Exporting {}...", did.toString()); DIDExport de = new DIDExport(DID_EXPORT, did); de.setDocument(doc); if (storage.containsCredentials(did)) { List<DIDURL> ids = new ArrayList<DIDURL>(listCredentials(did)); Collections.sort(ids); for (DIDURL id : ids) { log.debug("Exporting credential {}...", id.toString()); VerifiableCredential vc = storage.loadCredential(id); vc.setMetadata(storage.loadCredentialMetadata(id)); de.addCredential(vc); } } if (storage.containsPrivateKeys(did)) { List<PublicKey> pks = doc.getPublicKeys(); for (PublicKey pk : pks) { if (!pk.getController().equals(did)) continue; DIDURL id = pk.getId(); String key = storage.loadPrivateKey(id); if (key != null) { log.debug("Exporting private key {}...", id.toString()); de.addPrivatekey(id, key, storepass, password); } } } return de.seal(password); } /** * Export the specific DID with all DID objects that related with this DID, * include: document, credentials, private keys and their metadata. * * @param did the DID to be export * @param out the output stream that the data export to * @param password the password to encrypt the private keys in the exported data * @param storepass the password for this store * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when writing the exported data */ public void exportDid(DID did, OutputStream out, String password, String storepass) throws DIDStoreException, IOException { checkArgument(did != null, "Invalid did"); checkArgument(out != null, "Invalid output stream"); checkArgument(password != null && !password.isEmpty(), "Invalid password"); checkArgument(storepass != null && !storepass.isEmpty(), "Invaid store password"); exportDid(did, password, storepass).serialize(out, true); } /** * Export the specific DID with all DID objects that related with this DID, * include: document, credentials, private keys and their metadata. * * @param did the DID to be export * @param out the output stream that the data export to * @param password the password to encrypt the private keys in the exported data * @param storepass the password for this store * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when writing the exported data */ public void exportDid(String did, OutputStream out, String password, String storepass) throws DIDStoreException, IOException { exportDid(DID.valueOf(did), out, password, storepass); } /** * Export the specific DID with all DID objects that related with this DID, * include: document, credentials, private keys and their metadata. * * @param did the DID to be export * @param out the writer object that the data export to * @param password the password to encrypt the private keys in the exported data * @param storepass the password for this store * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when writing the exported data */ public void exportDid(DID did, Writer out, String password, String storepass) throws DIDStoreException, IOException { checkArgument(did != null, "Invalid did"); checkArgument(out != null, "Invalid output writer"); checkArgument(password != null && !password.isEmpty(), "Invalid password"); checkArgument(storepass != null && !storepass.isEmpty(), "Invaid store password"); exportDid(did, password, storepass).serialize(out, true); } /** * Export the specific DID with all DID objects that related with this DID, * include: document, credentials, private keys and their metadata. * * @param did the DID to be export * @param out the writer object that the data export to * @param password the password to encrypt the private keys in the exported data * @param storepass the password for this store * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when writing the exported data */ public void exportDid(String did, Writer out, String password, String storepass) throws DIDStoreException, IOException { exportDid(DID.valueOf(did), out, password, storepass); } /** * Export the specific DID with all DID objects that related with this DID, * include: document, credentials, private keys and their metadata. * * @param did the DID to be export * @param file the File object that the data export to * @param password the password to encrypt the private keys in the exported data * @param storepass the password for this store * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when writing the exported data */ public void exportDid(DID did, File file, String password, String storepass) throws DIDStoreException, IOException { checkArgument(did != null, "Invalid did"); checkArgument(file != null, "Invalid output file"); checkArgument(password != null && !password.isEmpty(), "Invalid password"); checkArgument(storepass != null && !storepass.isEmpty(), "Invaid store password"); exportDid(did, password, storepass).serialize(file, true); } /** * Export the specific DID with all DID objects that related with this DID, * include: document, credentials, private keys and their metadata. * * @param did the DID to be export * @param file the File object that the data export to * @param password the password to encrypt the private keys in the exported data * @param storepass the password for this store * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when writing the exported data */ public void exportDid(String did, File file, String password, String storepass) throws DIDStoreException, IOException { exportDid(DID.valueOf(did), file, password, storepass); } /** * Export the specific DID with all DID objects that related with this DID, * include: document, credentials, private keys and their metadata. * * @param did the DID to be export * @param file the file name that the data export to * @param password the password to encrypt the private keys in the exported data * @param storepass the password for this store * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when writing the exported data */ public void exportDid(DID did, String file, String password, String storepass) throws DIDStoreException, IOException { checkArgument(did != null, "Invalid did"); checkArgument(file != null && !file.isEmpty(), "Invalid output file name"); checkArgument(password != null && !password.isEmpty(), "Invalid password"); checkArgument(storepass != null && !storepass.isEmpty(), "Invaid store password"); exportDid(did, new File(file), password, storepass); } /** * Export the specific DID with all DID objects that related with this DID, * include: document, credentials, private keys and their metadata. * * @param did the DID to be export * @param file the file name that the data export to * @param password the password to encrypt the private keys in the exported data * @param storepass the password for this store * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when writing the exported data */ public void exportDid(String did, String file, String password, String storepass) throws DIDStoreException, IOException { exportDid(DID.valueOf(did), file, password, storepass); } private void importDid(DIDExport de, String password, String storepass) throws MalformedExportDataException, DIDStoreException, IOException { de.verify(password); // Save log.debug("Importing document..."); DIDDocument doc = de.document.content; storage.storeDid(doc); storage.storeDidMetadata(doc.getSubject(), doc.getMetadata()); List<VerifiableCredential> vcs = de.getCredentials(); for (VerifiableCredential vc : vcs) { log.debug("Importing credential {}...", vc.getId().toString()); storage.storeCredential(vc); storage.storeCredentialMetadata(vc.getId(), vc.getMetadata()); } List<DIDExport.PrivateKey> sks = de.getPrivateKeys(); for (DIDExport.PrivateKey sk : sks) { log.debug("Importing private key {}...", sk.getId().toString()); storage.storePrivateKey(sk.getId(), sk.getKey(password, storepass)); } } /** * Import a DID and all related DID object from the exported data to * this store. * * @param in the input stream for the exported data * @param password the password for the exported data * @param storepass the password for this store * @throws MalformedExportDataException if the exported data is invalid * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when reading the exported data */ public void importDid(InputStream in, String password, String storepass) throws MalformedExportDataException, DIDStoreException, IOException { checkArgument(in != null, "Invalid input stream"); checkArgument(password != null && !password.isEmpty(), "Invalid password"); checkArgument(storepass != null && !storepass.isEmpty(), "Invaid store password"); DIDExport de; try { de = DIDExport.parse(in, DIDExport.class); } catch (DIDSyntaxException e) { throw (MalformedExportDataException)e; } importDid(de, password, storepass); } /** * Import a DID and all related DID object from the exported data to * this store. * * @param in the reader object for the exported data * @param password the password for the exported data * @param storepass the password for this store * @throws MalformedExportDataException if the exported data is invalid * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when reading the exported data */ public void importDid(Reader in, String password, String storepass) throws MalformedExportDataException, DIDStoreException, IOException { checkArgument(in != null, "Invalid input reader"); checkArgument(password != null && !password.isEmpty(), "Invalid password"); checkArgument(storepass != null && !storepass.isEmpty(), "Invaid store password"); DIDExport de; try { de = DIDExport.parse(in, DIDExport.class); } catch (DIDSyntaxException e) { throw (MalformedExportDataException)e; } importDid(de, password, storepass); } /** * Import a DID and all related DID object from the exported data to * this store. * * @param file the file object for the exported data * @param password the password for the exported data * @param storepass the password for this store * @throws MalformedExportDataException if the exported data is invalid * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when reading the exported data */ public void importDid(File file, String password, String storepass) throws MalformedExportDataException, DIDStoreException, IOException { checkArgument(file != null, "Invalid input file"); checkArgument(password != null && !password.isEmpty(), "Invalid password"); checkArgument(storepass != null && !storepass.isEmpty(), "Invaid store password"); DIDExport de; try { de = DIDExport.parse(file, DIDExport.class); } catch (DIDSyntaxException e) { throw (MalformedExportDataException)e; } importDid(de, password, storepass); } /** * Import a DID and all related DID object from the exported data to * this store. * * @param file the file name for the exported data * @param password the password for the exported data * @param storepass the password for this store * @throws MalformedExportDataException if the exported data is invalid * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when reading the exported data */ public void importDid(String file, String password, String storepass) throws MalformedExportDataException, DIDStoreException, IOException { checkArgument(file != null, "Invalid input file name"); checkArgument(password != null && !password.isEmpty(), "Invalid password"); checkArgument(storepass != null && !storepass.isEmpty(), "Invaid store password"); importDid(new File(file), password, storepass); } @JsonPropertyOrder({ "type", "mnemonic", "privateKey", "publicKey", "index", "default", "created", "fingerprint" }) @JsonInclude(Include.NON_NULL) static class RootIdentityExport extends DIDEntity<RootIdentityExport> { @JsonProperty("type") private String type; @JsonProperty("mnemonic") private String mnemonic; @JsonProperty("privateKey") private String privateKey; @JsonProperty("publicKey") private String publicKey; @JsonProperty("index") @JsonSerialize(using = ToStringSerializer.class) private int index; @JsonProperty("default") @JsonSerialize(using = ToStringSerializer.class) @JsonInclude(Include.NON_NULL) private Boolean isDefault; @JsonProperty("created") private Date created; @JsonProperty("fingerprint") private String fingerprint; @JsonCreator protected RootIdentityExport(@JsonProperty(value = "type", required = true) String type) { if (type == null) throw new IllegalArgumentException("Invalid export type"); this.type = type; } public String getMnemonic(String exportpass, String storepass) throws DIDStoreException { return mnemonic == null ? null : reEncrypt(mnemonic, exportpass, storepass); } public void setMnemonic(String mnemonic, String storepass, String exportpass) throws DIDStoreException { this.mnemonic = reEncrypt(mnemonic, storepass, exportpass); } public String getPrivateKey(String exportpass, String storepass) throws DIDStoreException { return reEncrypt(privateKey, exportpass, storepass); } public void setPrivateKey(String privateKey, String storepass, String exportpass) throws DIDStoreException { this.privateKey = reEncrypt(privateKey, storepass, exportpass); } public String getPublicKey() { return publicKey; } public void setPubkey(String publicKey) { this.publicKey = publicKey; } public int getIndex() { return index; } public void setIndex(int index) { this.index = index; } public boolean isDefault() { return isDefault == null ? false : isDefault; } public void setDefault() { isDefault = Boolean.valueOf(true); } private String calculateFingerprint(String exportpass) { SHA256Digest sha256 = new SHA256Digest(); byte[] bytes = exportpass.getBytes(); sha256.update(bytes, 0, bytes.length); bytes = type.getBytes(); sha256.update(bytes, 0, bytes.length); if (mnemonic != null) { bytes = mnemonic.getBytes(); sha256.update(bytes, 0, bytes.length); } bytes = privateKey.getBytes(); sha256.update(bytes, 0, bytes.length); bytes = publicKey.getBytes(); sha256.update(bytes, 0, bytes.length); bytes = Integer.toString(index).getBytes(); sha256.update(bytes, 0, bytes.length); bytes = Boolean.toString(isDefault()).getBytes(); sha256.update(bytes, 0, bytes.length); bytes = dateFormat.format(created).getBytes(); sha256.update(bytes, 0, bytes.length); byte digest[] = new byte[32]; sha256.doFinal(digest, 0); return Base64.encodeToString(digest, Base64.URL_SAFE | Base64.NO_PADDING | Base64.NO_WRAP); } public RootIdentityExport seal(String exportpass) { Calendar now = Calendar.getInstance(); now.set(Calendar.MILLISECOND, 0); this.created = now.getTime(); this.fingerprint = calculateFingerprint(exportpass); return this; } public void verify(String exportpass) throws MalformedExportDataException { if (!fingerprint.equals(calculateFingerprint(exportpass))) throw new MalformedExportDataException( "Invalid export data, fingerprint mismatch."); } @Override protected void sanitize() throws MalformedExportDataException { if (type == null || !type.equals(DID_EXPORT)) throw new MalformedExportDataException( "Invalid export data, unknown type."); if (created == null) throw new MalformedExportDataException( "Invalid export data, missing created time."); if (privateKey == null || privateKey.isEmpty()) throw new MalformedExportDataException( "Invalid export data, missing key."); if (fingerprint == null || fingerprint.isEmpty()) throw new MalformedExportDataException( "Invalid export data, missing fingerprint."); } } private RootIdentityExport exportRootIdentity(String id, String password, String storepass) throws DIDStoreException { RootIdentityExport rie = new RootIdentityExport(DID_EXPORT); // TODO: support multiple named root identities String mnemonic = storage.loadRootIdentityMnemonic(id); if (mnemonic != null) rie.setMnemonic(mnemonic, storepass, password); rie.setPrivateKey(storage.loadRootIdentityPrivateKey(id), storepass, password); RootIdentity identity = storage.loadRootIdentity(id); rie.setPubkey(identity.getPreDerivedPublicKey().serializePublicKeyBase58()); rie.setIndex(identity.getIndex()); if (identity.getId().equals(metadata.getDefaultRootIdentity())) rie.setDefault(); return rie.seal(password); } /** * Export the specific RootIdentity, include: mnemonic, private key, * pre-derived public key, derive index, metadata... * * @param id the id of the RootIdentity to be export * @param out the output stream that the data export to * @param password the password to encrypt the private keys in the exported data * @param storepass the password for this store * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when writing the exported data */ public void exportRootIdentity(String id, OutputStream out, String password, String storepass) throws DIDStoreException, IOException { checkArgument(id != null && !id.isEmpty(), "Invalid identity id"); checkArgument(out != null, "Invalid output stream"); checkArgument(password != null && !password.isEmpty(), "Invalid password"); checkArgument(storepass != null && !storepass.isEmpty(), "Invalid storepass"); exportRootIdentity(id, password, storepass).serialize(out); } /** * Export the specific RootIdentity, include: mnemonic, private key, * pre-derived public key, derive index, metadata... * * @param id the id of the RootIdentity to be export * @param out the writer object that the data export to * @param password the password to encrypt the private keys in the exported data * @param storepass the password for this store * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when writing the exported data */ public void exportRootIdentity(String id, Writer out, String password, String storepass) throws DIDStoreException, IOException { checkArgument(id != null && !id.isEmpty(), "Invalid identity id"); checkArgument(out != null, "Invalid output writer"); checkArgument(password != null && !password.isEmpty(), "Invalid password"); checkArgument(storepass != null && !storepass.isEmpty(), "Invalid storepass"); exportRootIdentity(id, password, storepass).serialize(out); } /** * Export the specific RootIdentity, include: mnemonic, private key, * pre-derived public key, derive index, metadata... * * @param id the id of the RootIdentity to be export * @param file the file object that the data export to * @param password the password to encrypt the private keys in the exported data * @param storepass the password for this store * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when writing the exported data */ public void exportRootIdentity(String id, File file, String password, String storepass) throws DIDStoreException, IOException { checkArgument(id != null && !id.isEmpty(), "Invalid identity id"); checkArgument(file != null, "Invalid output file"); checkArgument(password != null && !password.isEmpty(), "Invalid password"); checkArgument(storepass != null && !storepass.isEmpty(), "Invalid storepass"); exportRootIdentity(id, password, storepass).serialize(file); } /** * Export the specific RootIdentity, include: mnemonic, private key, * pre-derived public key, derive index, metadata... * * @param id the id of the RootIdentity to be export * @param file the file name that the data export to * @param password the password to encrypt the private keys in the exported data * @param storepass the password for this store * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when writing the exported data */ public void exportRootIdentity(String id, String file, String password, String storepass) throws DIDStoreException, IOException { checkArgument(id != null && !id.isEmpty(), "Invalid identity id"); checkArgument(file != null && !file.isEmpty(), "Invalid output file name"); checkArgument(password != null && !password.isEmpty(), "Invalid password"); checkArgument(storepass != null && !storepass.isEmpty(), "Invalid storepass"); exportRootIdentity(id, new File(file), password, storepass); } private void importRootIdentity(RootIdentityExport rie, String password, String storepass) throws MalformedExportDataException, DIDStoreException, IOException { rie.verify(password); // Save String encryptedMnemonic = rie.getMnemonic(password, storepass); String encryptedPrivateKey = (rie.getPrivateKey(password, storepass)); String publicKey = rie.getPublicKey(); HDKey pk = HDKey.deserializeBase58(publicKey); String id = RootIdentity.getId(pk.serializePublicKey()); storage.storeRootIdentity(id, encryptedMnemonic, encryptedPrivateKey, publicKey, rie.getIndex()); if (rie.isDefault() && metadata.getDefaultRootIdentity() == null) metadata.setDefaultRootIdentity(id); } /** * Import a RootIdentity object from the exported data to this store. * * @param in the input stream for the exported data * @param password the password for the exported data * @param storepass the password for this store * @throws MalformedExportDataException if the exported data is invalid * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when reading the exported data */ public void importRootIdentity(InputStream in, String password, String storepass) throws MalformedExportDataException, DIDStoreException, IOException { checkArgument(in != null, "Invalid input stream"); checkArgument(password != null && !password.isEmpty(), "Invalid password"); checkArgument(storepass != null && !storepass.isEmpty(), "Invalid storepass"); try { RootIdentityExport rie = RootIdentityExport.parse(in, RootIdentityExport.class); importRootIdentity(rie, password, storepass); } catch (DIDSyntaxException e) { throw (MalformedExportDataException)e; } } /** * Import a RootIdentity object from the exported data to this store. * * @param in the reader object for the exported data * @param password the password for the exported data * @param storepass the password for this store * @throws MalformedExportDataException if the exported data is invalid * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when reading the exported data */ public void importRootIdentity(Reader in, String password, String storepass) throws MalformedExportDataException, DIDStoreException, IOException { checkArgument(in != null, "Invalid input reader"); checkArgument(password != null && !password.isEmpty(), "Invalid password"); checkArgument(storepass != null && !storepass.isEmpty(), "Invalid storepass"); try { RootIdentityExport rie = RootIdentityExport.parse(in, RootIdentityExport.class); importRootIdentity(rie, password, storepass); } catch (DIDSyntaxException e) { throw (MalformedExportDataException)e; } } /** * Import a RootIdentity object from the exported data to this store. * * @param file the file object for the exported data * @param password the password for the exported data * @param storepass the password for this store * @throws MalformedExportDataException if the exported data is invalid * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when reading the exported data */ public void importRootIdentity(File file, String password, String storepass) throws MalformedExportDataException, DIDStoreException, IOException { checkArgument(file != null, "Invalid input file"); checkArgument(password != null && !password.isEmpty(), "Invalid password"); checkArgument(storepass != null && !storepass.isEmpty(), "Invalid storepass"); try { RootIdentityExport rie = RootIdentityExport.parse(file, RootIdentityExport.class); importRootIdentity(rie, password, storepass); } catch (DIDSyntaxException e) { throw (MalformedExportDataException)e; } } /** * Import a RootIdentity object from the exported data to this store. * * @param file the file name for the exported data * @param password the password for the exported data * @param storepass the password for this store * @throws MalformedExportDataException if the exported data is invalid * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when reading the exported data */ public void importRootIdentity(String file, String password, String storepass) throws MalformedExportDataException, DIDStoreException, IOException { checkArgument(file != null && !file.isEmpty(), "Invalid input file name"); checkArgument(password != null && !password.isEmpty(), "Invalid password"); checkArgument(storepass != null && !storepass.isEmpty(), "Invalid storepass"); importRootIdentity(new File(file), password, storepass); } /** * Export all DID objects from this store. * * @param out the zip output stream that the data export to * @param password the password to encrypt the private keys in the exported data * @param storepass the password for this store * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when writing the exported data */ public void exportStore(ZipOutputStream out, String password, String storepass) throws DIDStoreException, IOException { checkArgument(out != null, "Invalid zip output stream"); checkArgument(password != null && !password.isEmpty(), "Invalid password"); checkArgument(storepass != null && !storepass.isEmpty(), "Invalid storepass"); ZipEntry ze; List<RootIdentity> ris = listRootIdentities(); for (RootIdentity ri : ris) { ze = new ZipEntry("rootIdentity-" + ri.getId()); out.putNextEntry(ze); exportRootIdentity(ri.getId(), out, password, storepass); out.closeEntry(); } List<DID> dids = listDids(); for (DID did : dids) { ze = new ZipEntry("did-" + did.getMethodSpecificId()); out.putNextEntry(ze); exportDid(did, out, password, storepass); out.closeEntry(); } } /** * Export all DID objects from this store. * * @param zipFile the ZipFile object that the data export to * @param password the password to encrypt the private keys in the exported data * @param storepass the password for this store * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when writing the exported data */ public void exportStore(File zipFile, String password, String storepass) throws DIDStoreException, IOException { checkArgument(zipFile != null, "Invalid zip output file"); checkArgument(password != null && !password.isEmpty(), "Invalid password"); checkArgument(storepass != null && !storepass.isEmpty(), "Invalid storepass"); ZipOutputStream out = new ZipOutputStream(new FileOutputStream(zipFile)); exportStore(out, password, storepass); out.close(); } /** * Export all DID objects from this store. * * @param zipFile the zip file name that the data export to * @param password the password to encrypt the private keys in the exported data * @param storepass the password for this store * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when writing the exported data */ public void exportStore(String zipFile, String password, String storepass) throws DIDStoreException, IOException { checkArgument(zipFile != null && !zipFile.isEmpty(), "Invalid zip output file name"); checkArgument(password != null && !password.isEmpty(), "Invalid password"); checkArgument(storepass != null && !storepass.isEmpty(), "Invalid storepass"); exportStore(new File(zipFile), password, storepass); } /** * Import a exported DIDStore from the exported data to this store. * * @param in the zip input stream for the exported data * @param password the password for the exported data * @param storepass the password for this store * @throws MalformedExportDataException if the exported data is invalid * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when reading the exported data */ public void importStore(ZipInputStream in, String password, String storepass) throws MalformedExportDataException, DIDStoreException, IOException { checkArgument(in != null, "Invalid zip input stream"); checkArgument(password != null && !password.isEmpty(), "Invalid password"); checkArgument(storepass != null && !storepass.isEmpty(), "Invalid storepass"); String fingerprint = metadata.getFingerprint(); String currentFingerprint = calcFingerprint(storepass); if (fingerprint != null && !currentFingerprint.equals(fingerprint)) throw new WrongPasswordException("Password mismatched with previous password."); ZipEntry ze; while ((ze = in.getNextEntry()) != null) { if (ze.getName().startsWith("rootIdentity")) importRootIdentity(in, password, storepass); else if (ze.getName().startsWith("did")) importDid(in, password, storepass); else log.warn("Skip unknow export entry: " + ze.getName()); in.closeEntry(); } if (fingerprint == null || fingerprint.isEmpty()) metadata.setFingerprint(currentFingerprint); } /** * Import a exported DIDStore from the exported data to this store. * * @param zipFile the ZipFile object for the exported data * @param password the password for the exported data * @param storepass the password for this store * @throws MalformedExportDataException if the exported data is invalid * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when reading the exported data */ public void importStore(File zipFile, String password, String storepass) throws MalformedExportDataException, DIDStoreException, IOException { checkArgument(zipFile != null, "Invalid zip input file"); checkArgument(password != null && !password.isEmpty(), "Invalid password"); checkArgument(storepass != null && !storepass.isEmpty(), "Invalid storepass"); ZipInputStream in = new ZipInputStream(new FileInputStream(zipFile)); importStore(in, password, storepass); in.close(); } /** * Import a exported DIDStore from the exported data to this store. * * @param zipFile the zip file name for the exported data * @param password the password for the exported data * @param storepass the password for this store * @throws MalformedExportDataException if the exported data is invalid * @throws DIDStoreException if an error occurred when accessing the store * @throws IOException if an IO error occurred when reading the exported data */ public void importStore(String zipFile, String password, String storepass) throws MalformedExportDataException, DIDStoreException, IOException { checkArgument(zipFile != null && !zipFile.isEmpty(), "Invalid zip input file name"); checkArgument(password != null && !password.isEmpty(), "Invalid password"); checkArgument(storepass != null && !storepass.isEmpty(), "Invalid storepass"); importStore(new File(zipFile), password, storepass); } }
// GoBuf create an unsafe reference to a Go byte array for passing it down to C++ // func GoBuf(b []byte) C.GoBuf { h := (*reflect.SliceHeader)(unsafe.Pointer(&b)) return C.GoBuf{ len: C.ulong(h.Len), cap: C.ulong(h.Cap), data: unsafe.Pointer(h.Data), } }
<filename>pattern/src/main/java/net/zhaoxuyang/pattern/template_method/BClass.java package net.zhaoxuyang.pattern.template_method; public class BClass extends AbstractClass{ @Override public void start() { System.out.println("B.start()"); } @Override public void stop() { System.out.println("B.stop()"); } }
def check_position(self,position): position[:] = np.round(position) if position[0]<0: return False if position[1]<0: return False if position[0]>self.shape[0]-1: return False if position[1]>self.shape[1]-1: return False return True
import * as React from 'react'; import { Redirect, RouteComponentProps } from 'react-router'; import { ValueType } from 'react-select/lib/types'; import { AuditParametersType } from 'redux/entities/auditParameters/types'; import { PageType } from 'redux/entities/pages/types'; import { ProjectType } from 'redux/entities/projects/types'; import { ScriptType } from 'redux/entities/scripts/types'; import Badge from 'components/Badge'; import Loader from 'components/Loader'; import MessagePill from 'components/MessagePill'; import Select from 'components/Select'; import dayjs from 'dayjs'; import { FormattedMessage, InjectedIntlProps } from 'react-intl'; import { auditStatus, AuditStatusHistoryType } from 'redux/entities/auditStatusHistories/types'; import { useFetchProjectIfUndefined } from 'redux/entities/projects/useFetchProjectIfUndefined'; import { routeDefinitions } from 'routes'; import { colorUsage, getSpacing } from 'stylesheet'; import AnalyticsBlock from './AnalyticsBlock'; import { Container, PageTitle, PageTitleBlock, ScriptStepBlock, ScriptStepBlockTitle, Title, } from './Audits.style'; import GraphsBlock from './GraphsBlock'; interface ScriptStepOption { value: string; label: string; } export type OwnProps = {} & RouteComponentProps<{ projectId: string; pageOrScriptId: string; auditParametersId: string; scriptStepId?: string; }>; type Props = { project?: ProjectType | null; page?: PageType | null; script?: ScriptType | null; currentAuditParameters?: AuditParametersType | null; scriptSteps: Record<string, string>; sortedPageAuditResultsIds: string[] | null; sortedScriptAuditResultsIds: Record<string, string[]> | null; fetchProjectsRequest: (projectId: string) => void; pageAuditStatusHistory?: AuditStatusHistoryType | null; scriptAuditStatusHistory?: AuditStatusHistoryType | null; fetchAuditResultsRequest: ( auditParametersId: string, pageOrScriptId: string, type: 'page' | 'script', fromDate?: dayjs.Dayjs, toDate?: dayjs.Dayjs ) => void; setCurrentAuditParametersId: (auditParametersId: string | null | undefined) => void; setCurrentPageId: (pageId: string | null | undefined) => void; setCurrentScriptId: (scriptId: string | null | undefined) => void; setCurrentScriptStepId: (scriptStepId: string | null | undefined) => void; } & OwnProps & InjectedIntlProps; export const Audits: React.FunctionComponent<Props> = ({ currentAuditParameters, fetchProjectsRequest, history, intl, match, project, page, script, scriptSteps, sortedPageAuditResultsIds, sortedScriptAuditResultsIds, pageAuditStatusHistory, scriptAuditStatusHistory, fetchAuditResultsRequest, setCurrentAuditParametersId, setCurrentPageId, setCurrentScriptId, setCurrentScriptStepId, }) => { const { projectId, pageOrScriptId, auditParametersId, scriptStepId } = match.params; useFetchProjectIfUndefined(fetchProjectsRequest, projectId, project); React.useEffect( () => { const fromDate = dayjs().subtract(7, 'day'); if (page) { setCurrentPageId(pageOrScriptId ? pageOrScriptId : undefined); setCurrentScriptId(undefined); if (!sortedPageAuditResultsIds) { fetchAuditResultsRequest(auditParametersId, pageOrScriptId, 'page', fromDate); }; } else if (script) { setCurrentPageId(undefined); setCurrentScriptId(pageOrScriptId ? pageOrScriptId : undefined); if (!sortedScriptAuditResultsIds) { fetchAuditResultsRequest(auditParametersId, pageOrScriptId, 'script', fromDate); }; } }, // eslint is disabled because the hook exhaustive-deps wants to add page and script as dependencies, but they rerender too much // eslint-disable-next-line [ auditParametersId, pageOrScriptId, fetchAuditResultsRequest, // eslint-disable-next-line page && page.uuid, // eslint-disable-next-line script && script.uuid, setCurrentPageId, setCurrentScriptId, sortedPageAuditResultsIds, sortedScriptAuditResultsIds, ], ); React.useEffect( () => { setCurrentAuditParametersId(auditParametersId); }, [auditParametersId, setCurrentAuditParametersId], ); React.useEffect( () => { setCurrentScriptStepId(script && scriptStepId ? scriptStepId : undefined); }, // eslint-disable-next-line [script && script.uuid, scriptStepId, setCurrentScriptStepId], ); // we set a loader if the project hasn't been loaded from the server or if the page or the script haven't been // loaded (one of them must be defined when the page is active) if (project === undefined || (page === undefined && script === undefined)) { return ( <Container> <Loader /> </Container> ); } if (project === null) { return ( <Container> <MessagePill messageType="error"> <FormattedMessage id="Project.project_error" /> </MessagePill> </Container> ); } if ( (!project.pagesIds || 0 === project.pagesIds.length) && (!project.scriptsIds || 0 === project.scriptsIds.length) ) { return ( <Container> <MessagePill messageType="error"> <FormattedMessage id="Project.no_page_or_script_error" /> </MessagePill> </Container> ); } if (page === null && script === null) { return ( <Container> <MessagePill messageType="error"> <FormattedMessage id="Audits.page_or_script_unavailable" /> </MessagePill> </Container> ); } if (0 === project.auditParametersIds.length) { return ( <Container> <MessagePill messageType="error"> <FormattedMessage id="Project.no_audit_parameters_error" /> </MessagePill> </Container> ); } if (currentAuditParameters === null) { return ( <Container> <MessagePill messageType="error"> <FormattedMessage id="Audits.audit_parameters_unavailable" /> </MessagePill> </Container> ); } if ( script && sortedScriptAuditResultsIds && 0 !== Object.keys(sortedScriptAuditResultsIds).length && !scriptStepId ) { return ( <Redirect to={routeDefinitions.auditsScriptDetails.path .replace(':projectId', projectId) .replace(':pageOrScriptId', pageOrScriptId) .replace(':auditParametersId', auditParametersId) .replace(':scriptStepId', Object.keys(sortedScriptAuditResultsIds)[0])} /> ); } const getBadgeParams = () => { if (page) { return { backgroundColor: colorUsage.pageBadgeBackground, color: colorUsage.pageBadgeText, text: intl.formatMessage({ id: `Menu.page_badge` }), }; } else if (script) { return { backgroundColor: colorUsage.scriptBadgeBackground, color: colorUsage.scriptBadgeText, text: intl.formatMessage({ id: `Menu.script_badge` }), }; } return { backgroundColor: '', color: '', text: '', }; }; const getLastAuditMessage = (auditStatusHistory: AuditStatusHistoryType) => { switch(auditStatusHistory.status) { case auditStatus.requested: return <FormattedMessage id="Audits.AuditStatusHistory.audit_requested" />; case auditStatus.queuing: return auditStatusHistory.info && auditStatusHistory.info.positionInQueue ? <FormattedMessage id="Audits.AuditStatusHistory.audit_in_queue_behind" values={{ positionInQueue: auditStatusHistory.info.positionInQueue }}/> : <FormattedMessage id="Audits.AuditStatusHistory.audit_in_queue" /> case auditStatus.running: if(auditStatusHistory.info && auditStatusHistory.info.runningTime) { return <FormattedMessage id="Audits.AuditStatusHistory.audit_started" values={{ runningTime: auditStatusHistory.info.runningTime }}/> } else if(auditStatusHistory.info && auditStatusHistory.info.totalTests && auditStatusHistory.info.completedTests) { return ( <FormattedMessage id="Audits.AuditStatusHistory.audit_tests_running" values={{ completedTests: auditStatusHistory.info.completedTests, totalTests: auditStatusHistory.info.totalTests, }} /> ) } } return <FormattedMessage id="Audits.AuditStatusHistory.audit_in_queue" /> } const pageOrScriptName = page ? page.name : script ? script.name : ''; const latestAuditStatusHistory = page ? pageAuditStatusHistory : script ? scriptAuditStatusHistory : null; const badgeParams = getBadgeParams(); const sortedAuditResultsIds = page ? sortedPageAuditResultsIds : script && sortedScriptAuditResultsIds ? scriptStepId && sortedScriptAuditResultsIds[scriptStepId] ? sortedScriptAuditResultsIds[scriptStepId] : [] : null; const scriptStepSelectOptions = Object.keys(scriptSteps).map(scriptStepKey => ({ value: scriptStepKey, label: (scriptStepKey !== 'null' ? scriptStepKey : 0) + ' : ' + (scriptSteps[scriptStepKey] || 'Unknown step'), })); const handleScriptStepSelection = (selectedOption: ValueType<ScriptStepOption | {}>) => { // Check needed to avoid TS2339 error if (selectedOption && 'value' in selectedOption) { history.push( routeDefinitions.auditsScriptDetails.path .replace(':projectId', projectId) .replace(':pageOrScriptId', pageOrScriptId) .replace(':auditParametersId', auditParametersId) .replace(':scriptStepId', selectedOption.value), ); } }; return ( <Container> <PageTitleBlock> <PageTitle>{project.name + ' / ' + pageOrScriptName}</PageTitle> {(page || script) && ( <Badge backgroundColor={badgeParams.backgroundColor} color={badgeParams.color} margin={`0 0 0 ${getSpacing(4)}`} text={badgeParams.text} /> )} </PageTitleBlock> { latestAuditStatusHistory && auditStatus.success !== latestAuditStatusHistory.status && (auditStatus.error === latestAuditStatusHistory.status ? <MessagePill messageType="error"> <FormattedMessage id="Audits.AuditStatusHistory.audit_failure" /> </MessagePill> : <MessagePill messageType="info">{getLastAuditMessage(latestAuditStatusHistory)}</MessagePill>) } <Title> <FormattedMessage id="Audits.title" /> </Title> {script && 0 !== scriptStepSelectOptions.length && ( <ScriptStepBlock> <ScriptStepBlockTitle> <FormattedMessage id="Audits.script_step_selection" /> </ScriptStepBlockTitle> <Select defaultValue={scriptStepSelectOptions.find(scriptStepOption => { return scriptStepOption.value === scriptStepId; })} onChange={handleScriptStepSelection} options={scriptStepSelectOptions} margin={`0 0 ${getSpacing(4)} 0`} /> </ScriptStepBlock> )} <GraphsBlock blockMargin={`0 0 ${getSpacing(8)} 0`} auditResultIds={sortedAuditResultsIds} /> <Title> <FormattedMessage id="Audits.webpagetest_analysis" /> </Title> <AnalyticsBlock blockMargin={`0 0 ${getSpacing(8)} 0`} auditResultIds={sortedAuditResultsIds} /> </Container> ); };
/* Copyright 2019 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package errors import ( "sync" ) // UntilErrorConcurrent runs all funcs in separate goroutines, returning the // first non-nil error returned from funcs, or nil if all funcs return nil func UntilErrorConcurrent(funcs []func() error) error { errCh := make(chan error, len(funcs)) for _, f := range funcs { f := f // capture f go func() { errCh <- f() }() } for i := 0; i < len(funcs); i++ { if err := <-errCh; err != nil { return err } } return nil } // AggregateConcurrent runs fns concurrently, returning a NewAggregate if there are > 1 errors func AggregateConcurrent(funcs []func() error) error { // run all fns concurrently ch := make(chan error, len(funcs)) var wg sync.WaitGroup for _, f := range funcs { f := f // capture f wg.Add(1) go func() { defer wg.Done() ch <- f() }() } wg.Wait() close(ch) // collect up and return errors errs := []error{} for err := range ch { if err != nil { errs = append(errs, err) } } if len(errs) > 1 { return NewAggregate(errs) } else if len(errs) == 1 { return errs[0] } return nil }
def __request_with_bytes_response(self, method: str, endpoint: str, **kwargs) -> bytes: raw_response = self._run_request(method, endpoint, **kwargs) if not raw_response.ok: self.__handle_error(raw_response) content_type_header = raw_response.headers.get(HEADER_CONTENT_TYPE, "") is_actually_json_response = MEDIA_TYPE_APPLICATION_JSON in content_type_header if is_actually_json_response: raise TypeError( f"Expected the return content to be bytes, but got [{content_type_header}]: {raw_response}" ) return raw_response.content
/** * Return the Method object representing the string methodName in Class c. * Return null if no such method if found. * * @param c * the class to find a method in * @param methodName * the name of the method to find * @return the Method object if a method is found, null otherwise */ private Method findMethod(Class<?> c, String methodName) { for (Method m : c.getMethods()) { if (methodName.equalsIgnoreCase(m.getName())) { return m; } } return null; }
<filename>src/main/python/hutts_verification/verification/text_verify.py """ Contains the logic used to verify the extracted text from a form of ID. """ import Levenshtein from hutts_verification.utils.hutts_logger import logger, prettify_json_message __author__ = "<NAME>" __copyright__ = "Copyright 2017, Java the Hutts" __license__ = "BSD" __maintainer__ = "<NAME>" __email__ = "<EMAIL>" __status__ = "Development" class TextVerify: """ This class is responsible for the verification of text that is extracted from an ID. """ def __init__(self): """ Initialises the TextVerify object. """ # Logging for debugging purposes. logger.debug('Initialising %s...' % type(self).__name__) def verify(self, extracted, verifier, threshold=75.00, min_matches=4, verbose=False): """ This function is responsible for the verification of text that is extracted from an ID and is passed in, along with information that is to be used to verify the extracted text. :param extracted (dict): The information that was extracted from an ID. :param verifier (dict): The information against which the extracted data is to be verified. :param threshold (float): A percentage used to determine if the match percentage is accepted as verified. :param min_matches (int): The minimum number of matches needed to allow a positive match. :param verbose (boolean): Indicates whether or not to return all of the calculated match percentages. Returns: - ((bool, float) | dict): The first value returned is a bool that indicates whether or not the total percentage match is above the specified threshold value, while the second return value is the total percentage match value if verbose is False, or returns a dict of all the determined percentage match values if verbose is True. Raises: TypeError: If any parameter is not of the correct type. """ if not isinstance(extracted, dict): raise TypeError( 'Bad type for arg extracted - expected dict. Received type "%s"' % type(extracted).__name__ ) if not isinstance(verifier, dict): raise TypeError( 'Bad type for arg verifier - expected dict. Received type "%s"' % type(verifier).__name__ ) if not isinstance(threshold, float): raise TypeError( 'Bad type for arg threshold - expected float. Received type "%s"' % type(threshold).__name__ ) if not isinstance(min_matches, int): raise TypeError( 'Bad type for arg min_matches - expected int. Received type "%s"' % type(min_matches).__name__ ) if not isinstance(verbose, bool): raise TypeError( 'Bad type for arg verbose - expected bool. Received type "%s"' % type(verbose).__name__ ) # Set minimum number of matches, if zero or less set to one. min_matches = min_matches if min_matches > 0 else 1 # Logging for debugging and verbose purposes. logger.debug('Threshold for verification set as: %.2f' % threshold) logger.debug('Minimum number of matches for verification set as: %d' % min_matches) logger.debug('Simplified percentages to be returned' if not verbose else 'Verbose percentages to be returned') logger.debug('-' * 50) logger.debug('Verifying:') logger.debug('-' * 50) # Prettify and log the extracted information. [logger.debug(log_line) for log_line in prettify_json_message(extracted).split('\n')] logger.debug('-' * 50) logger.debug('Against:') logger.debug('-' * 50) # Prettify and log the verifier information. [logger.debug(log_line) for log_line in prettify_json_message(verifier).split('\n')] logger.debug('-' * 50) # Initialise a dictionary to house the final matching percentages. match_percentages = {} # Iterate over the verifier and calculate a percentage match for the values, # if the keys match and the corresponding values exist. for key, value in verifier.items(): if key in extracted and extracted[key] is not None: # Compute the match percentage. logger.debug('Computing match "%s" and "%s"...' % (value, extracted[key])) match_percentages[key] = { 'match_percentage': self._match_percentage(value, extracted[key]), 'verifier_field_value': value, 'extracted_field_value': extracted[key] } logger.debug( '"%s" and "%s" match percentage: %.2f' % (value, extracted[key], match_percentages[key]['match_percentage']) ) else: logger.warning('Could not find corresponding field "%s" in extracted information to verify' % key) # Determine the number of percentages calculated and initialise a default value for the total match score. num_scores = len(match_percentages) total_match_percentage = 0.0 # Check if enough matches were found. if num_scores >= min_matches: # Calculate the total match score. total_match_percentage = self._total_percentage_match(match_percentages) # Either the minimum number of percentages criteria was not met. else: logger.warning('A total of %d matches were found, which is less than the minimum' % num_scores) # Determine whether or not the text is verified. is_verified = total_match_percentage >= threshold # Logging for debugging purposes. logger.debug('-' * 50) logger.debug('Intermediate match percentages:') logger.debug('-' * 50) [logger.debug(log_line) for log_line in prettify_json_message(match_percentages).split('\n')] logger.debug('-' * 50) logger.debug('Final match percentage: %.2f' % total_match_percentage) logger.debug('Threshold to pass: %.2f' % threshold) logger.debug('Result: ' + 'Passed' if is_verified else 'Failed') # Return the final result. if not verbose: return is_verified, total_match_percentage # Append the total and non-matches to the existing percentages for verbose purposes, # and return all percentage values. match_percentages.update(self._get_non_matches(extracted, verifier)) match_percentages['total'] = total_match_percentage return is_verified, match_percentages @staticmethod def _match_percentage(str_x, str_y): """ This function is responsible for determining the percentage match for two strings and returning said percentage. :param str_x (str): The first string that is used to perform matching. :param str_y (str): The second string that is used to perform matching. Returns: - (float): Match percentage of the two given strings. Raises: TypeError: If either parameter is not a string. """ if not isinstance(str_x, str): raise TypeError( 'Bad type for arg str_x - expected string. Received type "%s"' % type(str_x).__name__ ) if not isinstance(str_y, str): raise TypeError( 'Bad type for arg str_y - expected string. Received type "%s"' % type(str_y).__name__ ) return round(Levenshtein.ratio(str_x, str_y) * 100, 2) @staticmethod def _total_percentage_match(matches): """ This function is responsible for calculating a single, total percentage match value for a dict of match values that have been calculated. :param matches (dict): A dictionary of pre-calculated, match percentages. Returns: - (float): A total match percentage for a given set of match percentages. """ return round(sum(value['match_percentage'] for value in matches.values()) / len(matches), 2) @staticmethod def _get_non_matches(extracted, verifier): """ Creates a dictionary containing fields for which matches could not be computed, due to non-existence of fields or field values. :param extracted (dict): A dictionary containing the information that was extracted from an ID. :param verifier (dict): A dictionary containing the information against which the extracted data is to be verified. Returns: - (dict): A dictionary containing fields for which no matches can be found. """ non_matches = {} # Iterate over the extracted and verifier dictionaries to determine the field values for which match # percentages cannot be computed due to non-existence of values. for (verify_key, verify_value), (extract_key, extract_value) in zip(verifier.items(), extracted.items()): # There exists no corresponding field or field value for the verifier in the extracted ID info. if verify_key not in extracted or extracted[verify_key] is None: non_matches[verify_key] = { 'match_percentage': None, 'verifier_field_value': verify_value, 'extracted_field_value': None } # There exists no corresponding field or field value for the extracted ID info in the verifier. if extract_key not in verifier or verifier[extract_key] is None: non_matches[extract_key] = { 'match_percentage': None, 'verifier_field_value': None, 'extracted_field_value': extract_value } return non_matches def validate_id_number(self, id_number, valid_length=13): """ Determines whether a given id number is valid or not. :param id_number (str): The ID number that has to be validated. :param valid_length (int): Specifies the length of a given id number to be considered as valid. Returns: - (boolean): True if the id number is valid, False otherwise. Raises: TypeError: If id_number is not a string containing only numeric characters. TypeError: If valid_length is not an integer. """ if (not isinstance(id_number, str)) or (isinstance(id_number, str) and not id_number.isnumeric()): raise TypeError( 'Bad type for arg id_number - expected string of ONLY numeric characters. Received type "%s"' % type(id_number).__name__ ) if not isinstance(valid_length, int): raise TypeError( 'Bad type for arg valid_length - expected integer. Received type "%s"' % type(valid_length).__name__ ) # Logging for debugging purposes. logger.debug('Checking if extracted id number is valid...') # Determine if the id number is of a valid length. is_valid_length = len(id_number) == valid_length logger.debug('Extracted id number length appears %s' % ('valid' if is_valid_length else 'invalid')) # Return early since the result will be false anyways. # Do not calculate the checksum if it is not required. if not is_valid_length: logger.debug('Extracted id number appears invalid') return False # Determine if the id number checksum is valid. is_valid_id_checksum = self._compute_checksum(id_number) == 0 # Both the length and the checksum must be valid for the entire id number to be valid. is_valid_id_number = is_valid_length and is_valid_id_checksum # Logging for debugging purposes. logger.debug('Extracted id number checksum appears %s' % ('valid' if is_valid_id_checksum else 'invalid')) logger.debug('Extracted id number appears %s' % ('valid' if is_valid_id_number else 'invalid')) # Return final result of validation. return is_valid_id_number @staticmethod def _compute_checksum(id_number): """ Compute the Luhn checksum for the given id number string for validation. :param id_number (str): A string containing an id number for which the Luhn checksum is to be calculated. Returns: - (int): Luhn checksum value for validation. """ # Map the digits of the given id number to new integers and create a list from said mapping. digits = list(map(int, id_number)) # Create a sum of the even digits by multiplying each digit by 2, performing mod 10 division and summing # the resultant digits. even_partial_sum = [sum(divmod(2 * digit, 10)) for digit in digits[-2::-2]] even_sum = sum(even_partial_sum) # Sum all the odd positioned digits. odd_sum = sum(digits[-1::-2]) # Return the Luhn checksum value for validation. return (even_sum + odd_sum) % 10
import math s=input() m=int(input()) l=list(map(int,input().split())) for i in range(len(l)): l[i]-=1 n=len(s); q=[0]*n; ans=list(s); p=["#"]*n for i in range(len(l)): if p[l[i]]=="#": p[l[i]]=l[i] else: p[l[i]]="#" c=0 for i in range(len(q)): if p[i]!="#": c+=1 q[i]=c%2 else: q[i]=c%2 for i in range(int(math.ceil(len(q)/2))): if q[i]==1: ans[i],ans[n-i-1]=ans[n-i-1],ans[i] print(''.join(ans))
/* * This will verify the existance of a VGA adapter on the machine. * Video function call 0x4F00 returns 0x004F in AX if successful, and * returns a VbeInfoBlock describing the features of the VESA BIOS. */ int vesa_detect(void) { int vbe_info_sel = -1; int vbe_info_seg; struct VbeInfoBlock vbe_info; __dpmi_regs regs; unsigned long mode_addr; struct ModeInfoBlock mode_info; const char *mode_str; vbe_info_seg = __dpmi_allocate_dos_memory( (sizeof(vbe_info) + 15) / 16, &vbe_info_sel); if (vbe_info_seg < 0) goto error; memset(&vbe_info, 0, sizeof(vbe_info)); memcpy(vbe_info.VbeSignature, "VBE2", 4); dosmemput(&vbe_info, sizeof(vbe_info), vbe_info_seg * 16L); memset(&regs, 0, sizeof(regs)); regs.x.ax = 0x4F00; regs.x.di = 0; regs.x.es = vbe_info_seg; (void) __dpmi_int(VIDEO_BIOS, &regs); if (regs.x.ax != 0x004F) goto error; dosmemget(vbe_info_seg * 16L, sizeof(vbe_info), &vbe_info); if (memcmp(vbe_info.VbeSignature, "VESA", 4) != 0) goto error; mode_addr = (vbe_info.VideoModePtr >> 16) * 16L + (vbe_info.VideoModePtr & 0xFFFF); mode_str = nh_getenv("NH_DISPLAY_MODE"); if (mode_str != NULL) { char *end; unsigned long num = strtoul(mode_str, &end, 16); if (*end == '\0') { if (vesa_GetModeInfo(num, &mode_info) && mode_info.XResolution >= 640 && mode_info.YResolution >= 480 && mode_info.BitsPerPixel >= 8) { vesa_mode = num & 0x47FF; } } if (vesa_mode == 0xFFFF) mode_str = NULL; } if (get_palette() != NULL && vesa_mode == 0xFFFF) vesa_mode = vesa_FindMode(mode_addr, 8); if (vesa_mode == 0xFFFF) vesa_mode = vesa_FindMode(mode_addr, 32); if (vesa_mode == 0xFFFF) vesa_mode = vesa_FindMode(mode_addr, 24); if (vesa_mode == 0xFFFF) vesa_mode = vesa_FindMode(mode_addr, 16); if (vesa_mode == 0xFFFF) vesa_mode = vesa_FindMode(mode_addr, 15); if (vesa_mode == 0xFFFF) vesa_mode = vesa_FindMode(mode_addr, 8); if (vesa_mode == 0xFFFF) goto error; vesa_GetModeInfo(vesa_mode, &mode_info); vesa_x_res = mode_info.XResolution; vesa_y_res = mode_info.YResolution; vesa_x_center = 0; vesa_y_center = 0; vesa_scan_line = mode_info.BytesPerScanLine; vesa_win_size = mode_info.WinSize * 1024L; vesa_win_gran = mode_info.WinGranularity * 1024L; vesa_pixel_size = mode_info.BitsPerPixel; vesa_pixel_bytes = (vesa_pixel_size + 7) / 8; if (vbe_info.VbeVersion >= 0x0300) { if (mode_info.ModeAttributes & 0x80) { vesa_red_pos = mode_info.LinRedFieldPosition; vesa_red_shift = 8 - mode_info.LinRedMaskSize; vesa_green_pos = mode_info.LinGreenFieldPosition; vesa_green_shift = 8 - mode_info.LinGreenMaskSize; vesa_blue_pos = mode_info.LinBlueFieldPosition; vesa_blue_shift = 8 - mode_info.LinBlueMaskSize; } else { vesa_red_pos = mode_info.RedFieldPosition; vesa_red_shift = 8 - mode_info.RedMaskSize; vesa_green_pos = mode_info.GreenFieldPosition; vesa_green_shift = 8 - mode_info.GreenMaskSize; vesa_blue_pos = mode_info.BlueFieldPosition; vesa_blue_shift = 8 - mode_info.BlueMaskSize; } } else { switch (vesa_pixel_size) { case 15: vesa_blue_pos = 0; vesa_blue_shift = 3; vesa_green_pos = 5; vesa_green_shift = 3; vesa_red_pos = 10; vesa_red_shift = 3; break; case 16: vesa_blue_pos = 0; vesa_blue_shift = 3; vesa_green_pos = 5; vesa_green_shift = 2; vesa_red_pos = 11; vesa_red_shift = 3; break; case 24: case 32: vesa_blue_pos = 0; vesa_blue_shift = 0; vesa_green_pos = 8; vesa_green_shift = 0; vesa_red_pos = 16; vesa_red_shift = 0; break; } } vesa_win_func = mode_info.WinFuncPtr; vesa_win_addr[0] = mode_info.WinASegment * 16L; vesa_win_addr[1] = mode_info.WinBSegment * 16L; vesa_win_pos[0] = 0xFFFFFFFF; vesa_win_pos[1] = 0xFFFFFFFF; if (mode_info.WinAAttributes & 0x2) { vesa_read_win = 0; } else if (mode_info.WinBAttributes & 0x2) { vesa_read_win = 1; } else { goto error; } if (mode_info.WinAAttributes & 0x4) { vesa_write_win = 0; } else if (mode_info.WinBAttributes & 0x4) { vesa_write_win = 1; } else { goto error; } if ((mode_info.ModeAttributes & 0x80) != 0 && (mode_str == NULL || (vesa_mode & 0x4000) != 0)) { unsigned sel = vesa_segment; unsigned win_size = mode_info.BytesPerScanLine * mode_info.YResolution; unsigned addr = vesa_map_frame_buffer(mode_info.PhysBasePtr, win_size); if (sel == 0) { sel = __dpmi_allocate_ldt_descriptors(1); } if (addr != 0) { vesa_mode |= 0x4000; vesa_segment = sel; __dpmi_set_segment_base_address(sel, addr); __dpmi_set_segment_limit(sel, (win_size - 1) | 0xFFF); } else { __dpmi_free_ldt_descriptor(sel); vesa_segment = 0; } } __dpmi_free_dos_memory(vbe_info_sel); return TRUE; error: if (vbe_info_sel != -1) __dpmi_free_dos_memory(vbe_info_sel); return FALSE; }
use num_dual::*; #[test] fn test_dual_recip() { let res = Dual64::from(1.2).derive().recip(); assert!((res.re - 0.833333333333333).abs() < 1e-12); assert!((res.eps[0] - -0.694444444444445).abs() < 1e-12); } #[test] fn test_dual_exp() { let res = Dual64::from(1.2).derive().exp(); assert!((res.re - 3.32011692273655).abs() < 1e-12); assert!((res.eps[0] - 3.32011692273655).abs() < 1e-12); } #[test] fn test_dual_exp_m1() { let res = Dual64::from(1.2).derive().exp_m1(); assert!((res.re - 2.32011692273655).abs() < 1e-12); assert!((res.eps[0] - 3.32011692273655).abs() < 1e-12); } #[test] fn test_dual_exp2() { let res = Dual64::from(1.2).derive().exp2(); assert!((res.re - 2.29739670999407).abs() < 1e-12); assert!((res.eps[0] - 1.59243405216008).abs() < 1e-12); } #[test] fn test_dual_ln() { let res = Dual64::from(1.2).derive().ln(); assert!((res.re - 0.182321556793955).abs() < 1e-12); assert!((res.eps[0] - 0.833333333333333).abs() < 1e-12); } #[test] fn test_dual_log() { let res = Dual64::from(1.2).derive().log(4.2); assert!((res.re - 0.127045866345188).abs() < 1e-12); assert!((res.eps[0] - 0.580685888982970).abs() < 1e-12); } #[test] fn test_dual_ln_1p() { let res = Dual64::from(1.2).derive().ln_1p(); assert!((res.re - 0.788457360364270).abs() < 1e-12); assert!((res.eps[0] - 0.454545454545455).abs() < 1e-12); } #[test] fn test_dual_log2() { let res = Dual64::from(1.2).derive().log2(); assert!((res.re - 0.263034405833794).abs() < 1e-12); assert!((res.eps[0] - 1.20224586740747).abs() < 1e-12); } #[test] fn test_dual_log10() { let res = Dual64::from(1.2).derive().log10(); assert!((res.re - 0.0791812460476248).abs() < 1e-12); assert!((res.eps[0] - 0.361912068252710).abs() < 1e-12); } #[test] fn test_dual_sqrt() { let res = Dual64::from(1.2).derive().sqrt(); assert!((res.re - 1.09544511501033).abs() < 1e-12); assert!((res.eps[0] - 0.456435464587638).abs() < 1e-12); } #[test] fn test_dual_cbrt() { let res = Dual64::from(1.2).derive().cbrt(); assert!((res.re - 1.06265856918261).abs() < 1e-12); assert!((res.eps[0] - 0.295182935884059).abs() < 1e-12); } #[test] fn test_dual_powf() { let res = Dual64::from(1.2).derive().powf(4.2); assert!((res.re - 2.15060788316847).abs() < 1e-12); assert!((res.eps[0] - 7.52712759108966).abs() < 1e-12); } #[test] fn test_dual_powf_0() { let res = Dual64::from(0.0).derive().powf(0.0); assert!((res.re - 1.00000000000000).abs() < 1e-12); assert!((res.eps[0]).abs() < 1e-12); } #[test] fn test_dual_powf_1() { let res = Dual64::from(0.0).derive().powf(1.0); assert!((res.re).abs() < 1e-12); assert!((res.eps[0] - 1.00000000000000).abs() < 1e-12); } #[test] fn test_dual_powf_2() { let res = Dual64::from(0.0).derive().powf(2.0); assert!((res.re).abs() < 1e-12); assert!((res.eps[0]).abs() < 1e-12); } #[test] fn test_dual_powf_3() { let res = Dual64::from(0.0).derive().powf(3.0); assert!((res.re).abs() < 1e-12); assert!((res.eps[0]).abs() < 1e-12); } #[test] fn test_dual_powf_4() { let res = Dual64::from(0.0).derive().powf(4.0); assert!((res.re).abs() < 1e-12); assert!((res.eps[0]).abs() < 1e-12); } #[test] fn test_dual_powi() { let res = Dual64::from(1.2).derive().powi(6); assert!((res.re - 2.98598400000000).abs() < 1e-12); assert!((res.eps[0] - 14.9299200000000).abs() < 1e-12); } #[test] fn test_dual_powi_0() { let res = Dual64::from(0.0).derive().powi(0); assert!((res.re - 1.00000000000000).abs() < 1e-12); assert!((res.eps[0]).abs() < 1e-12); } #[test] fn test_dual_powi_1() { let res = Dual64::from(0.0).derive().powi(1); assert!((res.re).abs() < 1e-12); assert!((res.eps[0] - 1.00000000000000).abs() < 1e-12); } #[test] fn test_dual_powi_2() { let res = Dual64::from(0.0).derive().powi(2); assert!((res.re).abs() < 1e-12); assert!((res.eps[0]).abs() < 1e-12); } #[test] fn test_dual_powi_3() { let res = Dual64::from(0.0).derive().powi(3); assert!((res.re).abs() < 1e-12); assert!((res.eps[0]).abs() < 1e-12); } #[test] fn test_dual_powi_4() { let res = Dual64::from(0.0).derive().powi(4); assert!((res.re).abs() < 1e-12); assert!((res.eps[0]).abs() < 1e-12); } #[test] fn test_dual_sin() { let res = Dual64::from(1.2).derive().sin(); assert!((res.re - 0.932039085967226).abs() < 1e-12); assert!((res.eps[0] - 0.362357754476674).abs() < 1e-12); } #[test] fn test_dual_cos() { let res = Dual64::from(1.2).derive().cos(); assert!((res.re - 0.362357754476674).abs() < 1e-12); assert!((res.eps[0] - -0.932039085967226).abs() < 1e-12); } #[test] fn test_dual_tan() { let res = Dual64::from(1.2).derive().tan(); assert!((res.re - 2.57215162212632).abs() < 1e-12); assert!((res.eps[0] - 7.61596396720705).abs() < 1e-12); } #[test] fn test_dual_asin() { let res = Dual64::from(0.2).derive().asin(); assert!((res.re - 0.201357920790331).abs() < 1e-12); assert!((res.eps[0] - 1.02062072615966).abs() < 1e-12); } #[test] fn test_dual_acos() { let res = Dual64::from(0.2).derive().acos(); assert!((res.re - 1.36943840600457).abs() < 1e-12); assert!((res.eps[0] - -1.02062072615966).abs() < 1e-12); } #[test] fn test_dual_atan() { let res = Dual64::from(0.2).derive().atan(); assert!((res.re - 0.197395559849881).abs() < 1e-12); assert!((res.eps[0] - 0.961538461538462).abs() < 1e-12); } #[test] fn test_dual_sinh() { let res = Dual64::from(1.2).derive().sinh(); assert!((res.re - 1.50946135541217).abs() < 1e-12); assert!((res.eps[0] - 1.81065556732437).abs() < 1e-12); } #[test] fn test_dual_cosh() { let res = Dual64::from(1.2).derive().cosh(); assert!((res.re - 1.81065556732437).abs() < 1e-12); assert!((res.eps[0] - 1.50946135541217).abs() < 1e-12); } #[test] fn test_dual_tanh() { let res = Dual64::from(1.2).derive().tanh(); assert!((res.re - 0.833654607012155).abs() < 1e-12); assert!((res.eps[0] - 0.305019996207409).abs() < 1e-12); } #[test] fn test_dual_asinh() { let res = Dual64::from(1.2).derive().asinh(); assert!((res.re - 1.01597313417969).abs() < 1e-12); assert!((res.eps[0] - 0.640184399664480).abs() < 1e-12); } #[test] fn test_dual_acosh() { let res = Dual64::from(1.2).derive().acosh(); assert!((res.re - 0.622362503714779).abs() < 1e-12); assert!((res.eps[0] - 1.50755672288882).abs() < 1e-12); } #[test] fn test_dual_atanh() { let res = Dual64::from(0.2).derive().atanh(); assert!((res.re - 0.202732554054082).abs() < 1e-12); assert!((res.eps[0] - 1.04166666666667).abs() < 1e-12); } #[test] fn test_dual_sph_j0() { let res = Dual64::from(1.2).derive().sph_j0(); assert!((res.re - 0.776699238306022).abs() < 1e-12); assert!((res.eps[0] - -0.345284569857790).abs() < 1e-12); } #[test] fn test_dual_sph_j1() { let res = Dual64::from(1.2).derive().sph_j1(); assert!((res.re - 0.345284569857790).abs() < 1e-12); assert!((res.eps[0] - 0.201224955209705).abs() < 1e-12); } #[test] fn test_dual_sph_j2() { let res = Dual64::from(1.2).derive().sph_j2(); assert!((res.re - 0.0865121863384538).abs() < 1e-12); assert!((res.eps[0] - 0.129004104011656).abs() < 1e-12); } #[test] fn test_dual_bessel_j0_0() { let res = Dual64::from(0.0).derive().bessel_j0(); assert!((res.re - 1.00000000000000).abs() < 1e-12); assert!((res.eps[0]).abs() < 1e-12); } #[test] fn test_dual_bessel_j1_0() { let res = Dual64::from(0.0).derive().bessel_j1(); assert!((res.re).abs() < 1e-12); assert!((res.eps[0] - 0.500000000000000).abs() < 1e-12); } #[test] fn test_dual_bessel_j2_0() { let res = Dual64::from(0.0).derive().bessel_j2(); assert!((res.re).abs() < 1e-12); assert!((res.eps[0]).abs() < 1e-12); } #[test] fn test_dual_bessel_j0_1() { let res = Dual64::from(1.2).derive().bessel_j0(); assert!((res.re - 0.671132744264363).abs() < 1e-12); assert!((res.eps[0] - -0.498289057567215).abs() < 1e-12); } #[test] fn test_dual_bessel_j1_1() { let res = Dual64::from(1.2).derive().bessel_j1(); assert!((res.re - 0.498289057567215).abs() < 1e-12); assert!((res.eps[0] - 0.255891862958350).abs() < 1e-12); } #[test] fn test_dual_bessel_j2_1() { let res = Dual64::from(1.2).derive().bessel_j2(); assert!((res.re - 0.159349018347663).abs() < 1e-12); assert!((res.eps[0] - 0.232707360321110).abs() < 1e-12); } #[test] fn test_dual_bessel_j0_2() { let res = Dual64::from(7.2).derive().bessel_j0(); assert!((res.re - 0.295070691400958).abs() < 1e-12); assert!((res.eps[0] - -0.0543274202223671).abs() < 1e-12); } #[test] fn test_dual_bessel_j1_2() { let res = Dual64::from(7.2).derive().bessel_j1(); assert!((res.re - 0.0543274202223671).abs() < 1e-12); assert!((res.eps[0] - 0.287525216370074).abs() < 1e-12); } #[test] fn test_dual_bessel_j2_2() { let res = Dual64::from(7.2).derive().bessel_j2(); assert!((res.re - -0.279979741339189).abs() < 1e-12); assert!((res.eps[0] - 0.132099570594364).abs() < 1e-12); } #[test] fn test_dual_bessel_j0_3() { let res = Dual64::from(-1.2).derive().bessel_j0(); assert!((res.re - 0.671132744264363).abs() < 1e-12); assert!((res.eps[0] - 0.498289057567215).abs() < 1e-12); } #[test] fn test_dual_bessel_j1_3() { let res = Dual64::from(-1.2).derive().bessel_j1(); assert!((res.re - -0.498289057567215).abs() < 1e-12); assert!((res.eps[0] - 0.255891862958350).abs() < 1e-12); } #[test] fn test_dual_bessel_j2_3() { let res = Dual64::from(-1.2).derive().bessel_j2(); assert!((res.re - 0.159349018347663).abs() < 1e-12); assert!((res.eps[0] - -0.232707360321110).abs() < 1e-12); } #[test] fn test_dual_bessel_j0_4() { let res = Dual64::from(-7.2).derive().bessel_j0(); assert!((res.re - 0.295070691400958).abs() < 1e-12); assert!((res.eps[0] - 0.0543274202223671).abs() < 1e-12); } #[test] fn test_dual_bessel_j1_4() { let res = Dual64::from(-7.2).derive().bessel_j1(); assert!((res.re - -0.0543274202223671).abs() < 1e-12); assert!((res.eps[0] - 0.287525216370074).abs() < 1e-12); } #[test] fn test_dual_bessel_j2_4() { let res = Dual64::from(-7.2).derive().bessel_j2(); assert!((res.re - -0.279979741339189).abs() < 1e-12); assert!((res.eps[0] - -0.132099570594364).abs() < 1e-12); }
I once asked my dad who were parents of lord Shiva. Though the answer he gave was not convincing for me when I was kid. But now when I start to interpret lord Shiva in a very subtle way, which he is by himself it started making sense or at least it sounded like why there was such an explanation for his origin. First let us understand what the Hindu mythology wants to say about his birth. There are actually many versions of it. But I would like to quote the popular version. First it was believed that lord Vishnu who is the responsible for all the maintenance (jeeva lahari) activities on the creation was born and from his navel lord Brahma who is the creator of the world was born on lotus. They were existing in their own infinite time spaces without knowing that they both existed to each other in infinite peace. But one fine day they saw each other and it was shocking for each other to see that there was someone existing on this universe before each other. As time passed there was this element of Ego got into both of their head and they started to prove each other to be greater than the other one. This led to a cold war between each other to say that “I” am great than the other one. This was the origin of “I” self on this universe. But later the quarrel led to fight and that initiated the existence of someone greater than both to give decision on who is great. Then came this voice from an infinite oval shaped object called as lingam from behind. Both agreed to listen to it as “he” was third party and was neither on each side. This infinite lingam had no ending and no beginning. It said who will find the starting point of this lingam would be greatest. So, both brahma and Vishnu chose one side of lingam to move in search of the edge. Both went on for years searching for the end but neither of them could find it. But now there is a twist in the story brahma who went to one side of lingam found his favourite flower “kedake” floating in the infinite time space. Since “I” (Aham) in Brahma grew a bit greater than Vishnu at this point of time he made a cunning idea of lying to lingam. He asked kedake to help him in this lie also. He declared that he found the end and kedake is his witness. Then there came lord Shiva out of lingam to punish Brahma for lying. And the mythology says this was the reason why nobody on the universe worships brahma because he was the origin of greed on this universe. Which is the source for many of the problems on this universe. And now it must be believed that Shiva who came out of infinite lingam was existed even before they both existed. So technically speaking there is no proper origin for Shiva. He just existed from time infinity and came out of is form lingam. Though it seems unscientific from what that the Hindu Mythology says on origin of Shiva, we being radical humans look this story from a philosophical point of view there is a lot bigger message to observe. Rather than just seeing Shiva as a mythological character, if we start looking himself a philosophy that is trying to give some message through his appearances character and stories. I will try to analyse this small story of his origin from another point of view to start with. At the beginning of the story it is said that both brahma and Vishnu existed in peace in their own time spaces. And later when they saw each other they start to quarrel on their greatness’s. For example, this can be said as when anyone is in peace within himself without comparing himself to anyone he can be in peace for time infinity till he sees someone and starts to compare himself to him. And with this comparison started competition. Even brahma who is the creator of everything was not left out with this pride” ahamkara”. Ironically it can be said that after origin of “I” everything originated. And since this I turned into greed in Brahma later Shiva punishes him of not eligible for prayers. The very description of lingam is saying that it is infinitely oval. How can something like that can exist infinitely oval without ending? It would have been stated so to see something with a view of time which ultimately existed from infinity and extends to infinity without ending. And in most of the descriptions of Shiva it is said that he knew what happened in past and what is happening now and what is going to happen in future. In my point of view this simulation of Shiva to something like Time would perfectly fit in. So, does time has any parents no, because it existed or it is believed to be flowing one directional to future from the very moment all started. Why is Brahma the creator of everything is being addressed with character of filled with Ego not Vishnu. It should be because every creator has this pride on his creation. That this is “His creation”. May be that is the reason he is been addressed with the carrier of pride into all creators. The very creator Brahma is believed to exist from the navel part of Vishnu. Probably this would be to convey the message that even the creator is himself not great, because he also existed from something, he also is being created from something. This can convey the scientific fact that nothing can be created without anything. And significance of flower “kedge”. A lie cannot exist on its own. A lie always needs some proof to say that it is true. Because it is not true someone has to justify it. If it would have been Truth it could have existed on its own. Or probably I would have not come to end of this analysis, because they both would have been searching for the end till now. - Sid
<filename>Source/FSD/Private/WoodLouse.cpp #include "WoodLouse.h" #include "Net/UnrealNetwork.h" #include "Perception/PawnSensingComponent.h" #include "Components/SceneComponent.h" class APawn; void AWoodLouse::StopSpecial() { } void AWoodLouse::StartSpecial() { } void AWoodLouse::SetWantsToStandUp(bool aWantsToStandUp) { } void AWoodLouse::SetState(EWoodLouseState aState) { } void AWoodLouse::SetRotateToTarget(bool aRotateToTarget) { } void AWoodLouse::SeePawn(APawn* aSenPawn) { } void AWoodLouse::OnRep_State() { } void AWoodLouse::OnRep_LastHit() { } EWoodLouseState AWoodLouse::GetRollerState() const { return EWoodLouseState::Unfolded; } void AWoodLouse::GetLifetimeReplicatedProps(TArray<FLifetimeProperty>& OutLifetimeProps) const { Super::GetLifetimeReplicatedProps(OutLifetimeProps); DOREPLIFETIME(AWoodLouse, LastHit); DOREPLIFETIME(AWoodLouse, CurrentState); DOREPLIFETIME(AWoodLouse, CurrentTarget); DOREPLIFETIME(AWoodLouse, RotateTowardsTarget); DOREPLIFETIME(AWoodLouse, WantsToStandUp); DOREPLIFETIME(AWoodLouse, IsShooting); } AWoodLouse::AWoodLouse() { this->RollingCenter = CreateDefaultSubobject<USceneComponent>(TEXT("RollingCenter")); this->PawnSensing = CreateDefaultSubobject<UPawnSensingComponent>(TEXT("Sensing")); this->forceState = EWoodLouseState::Size; this->RollingCenterOffsetOnStanding = 60.00f; this->RollingCenterOffsetOnFold = 80.00f; this->ChanceToWalk = 0.60f; this->DebugAngle = false; this->FakeMoverSettings = NULL; this->BurstProjectileClass = NULL; this->BurstTime = 0.25f; this->TiltModifier = 1.00f; this->BurstXOffset = 0.00f; this->BurstYOffset = 0.00f; this->BurstZOffset = 0.00f; this->HighDifficultyCustomProjectileGravity = 1.00f; this->BurstCount = 3; this->InvounerableOnRoll = true; this->LockToRollMode = false; this->LockToWalkMode = false; this->ForgetRange = 300.00f; this->ForgetTime = 0.70f; this->RefreshTimeMax = 0.00f; this->RefreshTimeMin = 0.00f; this->BumpPower = 0.00f; this->DirectionalBumpPower = 0.00f; this->CurrentState = EWoodLouseState::Folded; this->CurrentTarget = NULL; this->FoldedStateMaxTime = 0.00f; this->FoldedStateMinTime = 0.00f; this->UnfoldedStateMaxTime = 0.00f; this->UnflodedStateMinTime = 0.00f; this->BumpSound = NULL; this->TimeBetweenBumpingSamePlayer = 0.00f; this->MinBumpDamage = 10.00f; this->MaxBumpDamage = 25.00f; this->BumpDamage = 0.00f; this->BumpDamageType = NULL; this->MaxBumpPower = 0.00f; this->BumpRange = 0.00f; this->AcceptableAngles = 30.00f; this->RollSpeedModifier = 1.15f; this->RollAlpha = 1.00f; this->TiltAlpha = 1.00f; this->RollingSound = NULL; this->RollMoveSettings = NULL; this->StopRollMoveSettings = NULL; this->RotateTowardsTarget = false; this->SeeTargetSafetyTime = 20.00f; this->WantsToStandUp = false; this->IsShooting = false; this->CanStandOnAnySurface = false; }
/** * ツイートに含まれるユーザー情報に関する設定 */ /** * プロフィール画像のURL末尾に付与される縮小フィルタの文字列 * これを取り除くことで、オリジナル画像へのアクセスが可能になる */ const profileImageUrlSuffix = { jpg: { suffix: '_normal.jpg', replacement: '.jpg', }, gif: { suffix: '_normal.gif', replacement: '.gif', }, } as const /** * 文字列について、**末尾に完全一致する文字**を対象に置換処理を行う * @param string 置換したい文字列 * @param pattern 置き換え対象となる文字列 * @param replacement 置き換える文字列 * @returns 置換後の文字列 */ const replaceSuffix = ( string: string, pattern: string, replacement: string, ) => { return string.replace(new RegExp(`${pattern}$`), replacement) } /** * アイコン画像のURLから一部の文字列を取り除いて、オリジナル画像のURLを取得する * @param url アイコン画像のURL * @returns オリジナル画像のURL */ const getOriginalProfileImageUrl = (url: string) => { // JPEG画像 if (url.endsWith(profileImageUrlSuffix.jpg.suffix)) { return replaceSuffix( url, profileImageUrlSuffix.jpg.suffix, profileImageUrlSuffix.jpg.replacement, ) } // GIF画像 return replaceSuffix( url, profileImageUrlSuffix.gif.suffix, profileImageUrlSuffix.gif.replacement, ) } export { profileImageUrlSuffix, replaceSuffix, getOriginalProfileImageUrl }
Monetary Policy Trade-Offs and Monetary Policy Credibility This chapter focuses on two key aspects of the monetary policy process: the trade-offs between the three goals that fall into the purview of central banks, price stability, output stabilization, and financial stability; and the role of central bank independence and transparency in enhancing the credibility and the efficacy of monetary policy. The chapter presents the theoretical background for each issue and discusses related empirical studies. The emphasis is on how the specific features of LFDCs impact the nature of the policy trade-offs, like informality in the labour market for the Phillips curve and the inflation–output trade-off, or the unsophisticated financial markets for financial stability concerns. In the discussion on central bank independence and transparency, the situation in LFDCs is compared with that in advanced and emerging market countries.
/* * Copyright 2019. techflowing * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.neuqer.android.splash.model; /** * 广告页面数据model * * @author techflowing * @since 2019/1/1 6:13 PM */ public class Ad implements Splash { private long mStartTime; private long mEndTime; private String mImgPath; public Ad(long startTime, long endTime, String imgPath) { mStartTime = startTime; mEndTime = endTime; mImgPath = imgPath; } public long getStartTime() { return mStartTime; } public long getEndTime() { return mEndTime; } public String getImgPath() { return mImgPath; } }
def combine(): click.echo(f'Combining images in {cfg.resize_original} with images in ' f'{cfg.transfer_output}...') safe_mkdir(cfg.combine_output) for filename in os.listdir(cfg.resize_original): img = cv2.imread(os.path.join(cfg.resize_original, filename)) sibling_path = os.path.join(cfg.transfer_output, filename) if os.path.exists(sibling_path): sibling = cv2.imread(sibling_path) combined = np.concatenate([img, sibling], axis=1) combined_path = os.path.join(cfg.combine_output, filename) cv2.imwrite(combined_path, combined) return None
<filename>src/app.controller.ts import { BadRequestException, Body, Controller, Get, Post, Req, Res, UnauthorizedException } from '@nestjs/common'; import { AppService } from './app.service'; import * as bcrypt from 'bcrypt'; import { JwtService } from '@nestjs/jwt'; import { Repository } from 'typeorm'; import { Request, Response } from 'express'; import { PassThrough } from 'node:stream'; import { userDto } from './dto/user.dto'; import { loginDto } from './dto/login.dto'; import { ApiCreatedResponse, ApiOkResponse, ApiUnauthorizedResponse } from '@nestjs/swagger'; @Controller() export class AppController { constructor( private readonly appService: AppService, private jwtService: JwtService ) { } /** * * @description 新增使用者,PWD用hash型態儲存 * @param name * @param email * @param password * @returns 帳號資訊[user] * */ @Post('register') @ApiCreatedResponse({description: 'User Registration'}) async register(@Body() userDto: userDto) { const hashPassword = await bcrypt.hash(userDto.password, 12); userDto.password = <PASSWORD>; const user = await this.appService.create(userDto); // const { password, ...result } = user; //將pwd濾掉不往前送 delete user.password; return user; } /** * @description login認證成功回傳token * @param email * @param password * @returns token */ @Post('login') @ApiOkResponse({description: 'User login'}) @ApiUnauthorizedResponse({description: 'invalid'}) async login( @Body() loginUser: loginDto, @Res({ passthrough: true }) response: Response ) { const email = loginUser.email; const pwd = loginUser.password; const user = await this.appService.findOne(email); if (!user) { throw new BadRequestException('invalid user'); } if (!await bcrypt.compare(pwd, user.password)) { throw new BadRequestException('invalid pwd'); } const jwt = await this.jwtService.signAsync({ id: user.id }); return jwt; // response.cookie('jwt', jwt, { httpOnly: true }); //將token存於res cookie中的jwt欄位 // return { // message: 'success' // }; } @Post('logout') async logout(@Res({passthrough:true}) response: Response) { response.clearCookie('jwt'); return { message: 'success' } } }