content
stringlengths
10
4.9M
def notify_user_interaction_required(self, user_name, rp_id): if user_name: request_string = "\nAuthentication request for user: {0}".format( user_name) else: request_string = "\nAuthentication request" print(request_string) print("From service: {0}".format(rp_id)) print("Insert and touch the authenticator device you wish to use to " "consent to the authentication...\n")
def put_pixel(self, x: int, y: int, color): qcolor = _to_qcolor(color) self._image.setPixel(x, y, qcolor.rgba()) self._mask.setPixel(x, y, MASK_BLACK.rgba()) self._updated()
/** * Validate and parse a hostname or hostname/port * * If port_out is not null, accepts a port as well. This is * stored in *port_out. If no port is given, a 0 is stored. * If an invalid port is given, -1 is stored. * * If the hostname is invalid, null is returned and no value * is written to *port_out. * * If port_out is null, null will be returned if the string * contains a port. * * The return value must be freed with talloc_free unless * it is null. * * @param mem_ctx talloc context for hostname result * @param s string to parse * @param port_out pointer to an allocated integer, or NULL * @return pointer to the hostname or null on error */ char *tr_parse_host(TALLOC_CTX *mem_ctx, const char *s, int *port_out) { const char *colon; char *hostname; int port; if (s == NULL) return NULL; if (port_out == NULL) colon = NULL; else colon = strrchr(s, ':'); if (strchr(s, ':') != colon && *(colon - 1) != ']') colon = NULL; if (colon == NULL) hostname = talloc_strdup(NULL, s); else hostname = talloc_strndup(NULL, s, colon-s); if (hostname == NULL) return NULL; if (! tr_valid_host(hostname)) { talloc_free(hostname); return NULL; } if (port_out != NULL) { if (colon == NULL) { *port_out = 0; } else { port = tr_parse_port(colon+1); if ((port > 0) && tr_str_all_digits(colon+1)) *port_out = port; else *port_out = -1; } } return hostname; }
<reponame>tsigalko18/apogen package abstractdt; /** * Class Edge represents a state connection as it is * retrieved by Crawljax * @author tsigalko18 * */ public class Edge { private String from, to, text, via, element, event; /** * default constructor for the Edge class * @param from * @param to */ public Edge(String from, String to) { super(); this.from = from; this.to = to; } /** * @return the from */ public String getFrom() { return from; } /** * @param from the from to set */ public void setFrom(String from) { this.from = from; } /** * @return the to */ public String getTo() { return to; } /** * @param to the to to set */ public void setTo(String to) { this.to = to; } /** * @return the text */ public String getText() { return text; } /** * @param text the text to set */ public void setText(String text) { this.text = text; } /** * @return the via */ public String getVia() { return via; } /** * @param via the via to set */ public void setVia(String via) { this.via = via; } /** * @return the element */ public String getElement() { return element; } /** * @param element the element to set */ public void setElement(String element) { this.element = element; } /** * @return the event */ public String getEvent() { return event; } /** * @param event the event to set */ public void setEvent(String event) { this.event = event; } /* (non-Javadoc) * @see java.lang.Object#toString() */ @Override public String toString() { return "Edge [" + (from != null ? "from=" + from + ", " : "") + (to != null ? "to=" + to + ", " : "") + (via != null ? "via=" + via : "") + "]"; } /* (non-Javadoc) * @see java.lang.Object#hashCode() */ @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((from == null) ? 0 : from.hashCode()); result = prime * result + ((to == null) ? 0 : to.hashCode()); result = prime * result + ((via == null) ? 0 : via.hashCode()); return result; } /* (non-Javadoc) * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Edge other = (Edge) obj; if (from == null) { if (other.from != null) return false; } else if (!from.equals(other.from)) return false; if (to == null) { if (other.to != null) return false; } else if (!to.equals(other.to)) return false; if (via == null) { if (other.via != null) return false; } else if (!via.equals(other.via)) return false; return true; } }
def reissue(old_certificate_name, notify, commit): if commit: print("[!] Running in COMMIT mode.") print("[+] Starting certificate re-issuance.") status = FAILURE_METRIC_STATUS try: old_cert = validate_certificate(old_certificate_name) if not old_cert: for certificate in get_all_pending_reissue(): request_reissue(certificate, notify, commit) else: request_reissue(old_cert, notify, commit) status = SUCCESS_METRIC_STATUS print("[+] Done!") except Exception as e: capture_exception() current_app.logger.exception("Error reissuing certificate.", exc_info=True) print("[!] Failed to reissue certificates. Reason: {}".format(e)) metrics.send( "certificate_reissue_job", "counter", 1, metric_tags={"status": status} )
def zonalStats(rast, zones, **runivarargs): if 'flags' in runivarargs: runivarargs['flags'] += 't' else: runivarargs['flags'] = 't' lines = grass.read_command( 'r.univar', map=rast, zones=zones, **runivarargs).split('\n') stats = np.array([tuple(s.split('|')) for s in lines[1:-1]], dtype=str) colnames = lines[0].split('|') cols = [] for c in stats.T: try: cols += [list(map(int, c))] except ValueError: try: cols += [list(map(float, c))] except ValueError: cols += [c] df = pd.DataFrame(zip(*cols), columns=colnames) df.set_index('zone', inplace=True) reg = grass.region() df['non_null_area'] = df['non_null_cells'] * reg['ewres'] * reg['nsres'] df['null_area'] = df['null_cells'] * reg['ewres'] * reg['nsres'] return df
def edi_resample(src, w, h, edi=None, kernel='spline16', a1=None, a2=None, sx=None, sy=None, invks=False, taps=4, invkstaps=4, **kwargs): name = 'edi_resample' valid_edis = { 'eedi2': ['mthresh', 'lthresh', 'vthresh', 'estr', 'dstr', 'maxd', 'map', 'nt', 'pp'], 'eedi3': ['alpha', 'beta', 'gamma', 'nrad', 'mdis', 'hp', 'ucubic', 'cost3', 'vcheck', 'vthresh0', 'vthresh1', 'vthresh2', 'sclip', 'opt'], 'eedi3cl': ['alpha', 'beta', 'gamma', 'nrad', 'mdis', 'hp', 'ucubic', 'cost3', 'vcheck', 'vthresh0', 'vthresh1', 'vthresh2', 'sclip', 'opt', 'device'], 'nnedi3': ['nsize', 'nns', 'qual', 'etype', 'pscrn', 'opt', 'int16_prescreener', 'int16_predictor', 'exp'], 'nnedi3cl': ['nsize', 'nns', 'qual', 'etype', 'pscrn', 'device'], } if not isinstance(src, vs.VideoNode): raise TypeError(name + ": 'src' must be a clip") if not isinstance(edi, str): raise TypeError(name + ": Must use a supported edge-directed interpolation filter string") edi = edi.lower() if edi not in valid_edis: raise TypeError(name + ": '" + edi + "' is not a supported edge-directed interpolation filter") for arg in kwargs: if arg not in valid_edis[edi]: raise TypeError(name + ": '" + arg + "' is not a valid argument for " + edi) edifuncs = { 'eedi2': (lambda src: core.eedi2.EEDI2(src, field=1, **kwargs).std.Transpose()), 'eedi3': (lambda src: core.eedi3m.EEDI3(src, field=1, dh=True, **kwargs).std.Transpose()), 'eedi3cl': (lambda src: core.eedi3m.EEDI3CL(src, field=1, dh=True, **kwargs).std.Transpose()), 'nnedi3': (lambda src: core.znedi3.nnedi3(src, field=1, dh=True, **kwargs).std.Transpose()), 'nnedi3cl': (lambda src: core.nnedi3cl.NNEDI3CL(src, field=1, dh=True, dw=True, **kwargs)), } scale = h / src.height if scale == 1: return src double_count = ceil(log(scale, 2)) double_count = double_count * 2 if edi != 'nnedi3cl' else double_count doubled = src for _ in range(double_count): doubled = edifuncs[edi](doubled) if sx is None: sx = [-0.5, -0.5 * src.format.subsampling_w] if double_count >= 1 else 0 if sy is None: sy = [-0.5, -0.5 * src.format.subsampling_h] if double_count >= 1 else 0 down = core.fmtc.resample(doubled, w=w, h=h, sx=sx, sy=sy, kernel=kernel, a1=a1, a2=a2, taps=taps, invks=invks, invkstaps=invkstaps) return fvf.Depth(down, src.format.bits_per_sample)
#include "516_input.h" #define DELAY(X) {unsigned int _i = X; while(_i--);} /* description: get the pressed key and stores the result into the given variable and return the result of the read reaction. parameters: record <unsigned char *>: the address of the variable to store the key code; return: 0 for no key pressed, 1 for key pressed. */ int get_key( unsigned char * record ) { unsigned char counter; unsigned char characters[6] = {'.', 'C', '+', '-', '*', '/'}; // matrix key detection MATRIX_PIN = 0x0f; if ( MATRIX_PIN != 0x0f ) { DELAY( 1000 ); // check again to avoid the shake if ( MATRIX_PIN != 0x0f ) { // inits the counter variable with 0 for later use counter = 0; MATRIX_PIN = 0x0f; switch( MATRIX_PIN ) { case( 0x07 ): *record = 0; break; case( 0x0b ): *record = 1; break; case( 0x0d ): *record = 2; break; case( 0x0e ): *record = 3; break; default: // ignore the others return 0; } MATRIX_PIN = 0xf0; switch( MATRIX_PIN ) { case( 0x70 ): // *record = *record; break; case( 0xb0 ): *record += 4; break; case( 0xd0 ): *record += 8; break; case( 0xe0 ): *record += 12; break; default: // ignore the others return 0; } // ignore the release action ( while ( MATRIX_PIN != 0xf0); if ( *record < 10 ) *record += '0'; else *record = characters[*record % 10]; // notify success return 1; } } // detect the separate keys if ( K1 == 0) { DELAY( 1000 ); if ( K1 == 0 ) { *record = '('; while ( !K1 ); return 1; } } if ( K2 == 0) { DELAY( 1000 ); if ( K2 == 0 ) { *record = ')'; while ( !K2 ); return 1; } } if ( K3 == 0) { DELAY( 1000 ); if ( K3 == 0 ) { *record = '$'; while ( !K3 ); return 1; } } if ( K4 == 0) { DELAY( 1000 ); if ( K4 == 0 ) { *record = '='; while ( !K4 ); return 1; } } if ( K5 == 0) { DELAY( 1000 ); if ( K5 == 0 ) { *record = 'u'; while ( !K5 ); return 1; } } /* for (counter = 0; counter < 8; counter++) { SEPARATE_PIN = 0xff; if ( SEPARATE_PIN^counter == 0 ) { DELAY( 1000 ); if ( SEPARATE_PIN^counter == 0 ) { *record = 16 + counter; while ( !SEPARATE_PIN^counter ); // notify success return 1; } } } */ // notify nothing found return 0; }
/* * Copyright 2020 Google LLC * * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #ifndef GrD3DGpuDescriptorTableManager_DEFINED #define GrD3DGpuDescriptorTableManager_DEFINED #include "src/gpu/d3d/GrD3DDescriptorHeap.h" class GrD3DCommandList; class GrD3DDirectCommandList; class GrD3DGpu; class GrD3DDescriptorTable : public SkRefCnt { public: GrD3DDescriptorTable(D3D12_CPU_DESCRIPTOR_HANDLE baseCPU, D3D12_GPU_DESCRIPTOR_HANDLE baseGPU, ID3D12DescriptorHeap* heap, D3D12_DESCRIPTOR_HEAP_TYPE type) : fDescriptorTableCpuStart(baseCPU) , fDescriptorTableGpuStart(baseGPU) , fHeap(heap) , fType(type) {} const D3D12_CPU_DESCRIPTOR_HANDLE* baseCpuDescriptorPtr() { return &fDescriptorTableCpuStart; } const D3D12_GPU_DESCRIPTOR_HANDLE baseGpuDescriptor() { return fDescriptorTableGpuStart; } ID3D12DescriptorHeap* heap() const { return fHeap; } D3D12_DESCRIPTOR_HEAP_TYPE type() const { return fType; } private: D3D12_CPU_DESCRIPTOR_HANDLE fDescriptorTableCpuStart; D3D12_GPU_DESCRIPTOR_HANDLE fDescriptorTableGpuStart; ID3D12DescriptorHeap* fHeap; D3D12_DESCRIPTOR_HEAP_TYPE fType; }; class GrD3DDescriptorTableManager { public: GrD3DDescriptorTableManager(GrD3DGpu*); sk_sp<GrD3DDescriptorTable> createShaderViewTable(GrD3DGpu*, unsigned int count); sk_sp<GrD3DDescriptorTable> createSamplerTable(GrD3DGpu*, unsigned int count); void prepForSubmit(GrD3DGpu* gpu); private: class Heap : public GrRecycledResource { public: static sk_sp<Heap> Make(GrD3DGpu* gpu, D3D12_DESCRIPTOR_HEAP_TYPE type, unsigned int numDescriptors); sk_sp<GrD3DDescriptorTable> allocateTable(unsigned int count); bool canAllocate(unsigned int count) const { return (fDescriptorCount - fNextAvailable) >= count; } ID3D12DescriptorHeap* d3dDescriptorHeap() const { return fHeap->descriptorHeap(); } D3D12_DESCRIPTOR_HEAP_TYPE type() const { return fType; } unsigned int descriptorCount() { return fDescriptorCount; } bool used() { return fNextAvailable > 0; } void reset() { fNextAvailable = 0; } private: Heap(GrD3DGpu* gpu, std::unique_ptr<GrD3DDescriptorHeap>& heap, D3D12_DESCRIPTOR_HEAP_TYPE type, unsigned int descriptorCount) : INHERITED() , fGpu(gpu) , fHeap(std::move(heap)) , fType(type) , fDescriptorCount(descriptorCount) , fNextAvailable(0) { } void freeGPUData() const override {} void onRecycle() const override; #ifdef SK_TRACE_MANAGED_RESOURCES void dumpInfo() const override { SkDebugf("GrD3DDescriptorTable::Heap: %p (%d refs)\n", fHeap.get(), this->getRefCnt()); } #endif GrD3DGpu* fGpu; std::unique_ptr<GrD3DDescriptorHeap> fHeap; D3D12_DESCRIPTOR_HEAP_TYPE fType; unsigned int fDescriptorCount; unsigned int fNextAvailable; using INHERITED = GrRecycledResource; }; class HeapPool { public: HeapPool(GrD3DGpu*, D3D12_DESCRIPTOR_HEAP_TYPE); sk_sp<GrD3DDescriptorTable> allocateTable(GrD3DGpu*, unsigned int count); void recycle(sk_sp<Heap>); sk_sp<Heap>& currentDescriptorHeap(); void prepForSubmit(GrD3DGpu* gpu); private: inline static constexpr int kInitialHeapDescriptorCount = 256; std::vector<sk_sp<Heap>> fDescriptorHeaps; D3D12_DESCRIPTOR_HEAP_TYPE fHeapType; unsigned int fCurrentHeapDescriptorCount; }; void recycle(Heap*); HeapPool fShaderViewDescriptorPool; HeapPool fSamplerDescriptorPool; }; #endif
{-| Module : Calculi Description : Definitions of sequent calculi for use in logix. Copyright : (c) <NAME>, 2017 License : BSD3 Maintainer : <EMAIL> Stability : experimental This module is where we define the actual Calculi for logix. It can be edited to customize the software. -} module Calculi ( calculi , andForm , orForm , impliesForm , forallForm , existsForm ) where import Calculus import Data.Char import Prelude hiding (even, odd) -- | All the calculi for logix. To change the default calculus upon startup, simply -- switch it to the front of the list. calculi :: [Calculus] calculi = [g3c, g3cp, g0c, g3ip_em, g3i, g3ip, g0i, g3ipm, g4ip, wll, pfll] -- Random notes -- 1. Do not use the SubstPat construct in the conclusion of any derivation rule; it -- will probably not do what you intended. Support for this is pretty tricky, but -- it's on my todo list. -------------------------------------------------------------------------------- -- Calculi definitions -- formula connectives botForm = ZeroaryOp (UniName ("_|_", "βŠ₯")) andForm = BinaryOp (UniName ("&","&")) orForm = BinaryOp (UniName ("|","∨")) impliesForm = BinaryOp (UniName ("->","βŠƒ")) forallForm = Quant (UniName ("forall ","βˆ€")) existsForm = Quant (UniName ("exists ","βˆƒ")) -- connective patterns botPat = ZeroaryOpPat (UniName ("_|_", "βŠ₯")) andPat = BinaryOpPat (UniName ("&","&")) orPat = BinaryOpPat (UniName ("|","∨")) impliesPat = BinaryOpPat (UniName ("->","βŠƒ")) forallPat = QuantPat (UniName ("forall ","βˆ€")) existsPat = QuantPat (UniName ("exists ","βˆƒ")) -- base patterns p = PredPat "P" a = FormPat "A" b = FormPat "B" c = FormPat "C" d = FormPat "D" e = FormPat "E" gamma = SetPat "Ξ“" gamma' = SetPat "Ξ“'" delta = SetPat "Ξ”" delta' = SetPat "Ξ”'" -- abbreviations neg = UAbbrev (UniName ("~", "Β¬")) "A" (impliesPat a botPat) negPat x = impliesPat x botPat iff = BAbbrev (UniName ("<->", "↔")) "A" "B" (andPat (impliesPat a b) (impliesPat b a)) -- quantifier and subst patterns a_x_y = SubstPat "x" (VarPat "y") "A" a_x_t = SubstPat "x" (TermPat "t") "A" forall_x_a = forallPat "x" a exists_x_a = existsPat "x" a nofree_y = NoFreePat "y" -- | Infix andPat. (>&<) = andPat -- | Infix orPat. (>|<) = orPat -- | Infix impliesPat. (>>>) = impliesPat g3c :: Calculus g3c = Calculus { calcName = "g3c", axioms = [("Axiom", [p, gamma] ::=> [delta, p])], rules = [ ("R&", ([ [gamma] ::=> [delta, a], [gamma] ::=> [delta, b] ], [gamma] ::=> [delta, a >&< b])) , ("R|", ([ [gamma] ::=> [delta, a, b] ], [gamma] ::=> [delta, a >|< b])) , ("R->", ([ [a, gamma] ::=> [delta, b] ], [gamma] ::=> [delta, a >>> b])) , ("L&", ([ [a, b, gamma] ::=> [delta] ], [a >&< b, gamma] ::=> [delta])) , ("L|", ([ [a, gamma] ::=> [delta], [b, gamma] ::=> [delta] ], [a >|< b, gamma] ::=> [delta])) , ("L->", ([ [gamma] ::=> [delta, a], [b, gamma] ::=> [delta] ], [a >>> b, gamma] ::=> [delta])) , ("L_|_", ([ ], [botPat, gamma] ::=> [delta])) , ("Lforall", ([ [a_x_t, forall_x_a, gamma] ::=> [delta] ], [forall_x_a, gamma] ::=> [delta])) , ("Rforall", ([ [gamma] ::=> [delta, a_x_y] ], [nofree_y gamma] ::=> [nofree_y delta, nofree_y forall_x_a])) , ("Lexists", ([ [a_x_y, gamma] ::=> [delta] ], [nofree_y exists_x_a, nofree_y gamma] ::=> [nofree_y delta])) , ("Rexists", ([ [gamma] ::=> [delta, exists_x_a, a_x_t] ], [gamma] ::=> [delta, exists_x_a])) ], uAbbrevs = [neg], bAbbrevs = [iff] } g3cp :: Calculus g3cp = Calculus { calcName = "g3cp", axioms = [("Axiom", [p, gamma] ::=> [delta, p])], rules = [ ("R&", ([ [gamma] ::=> [delta, a], [gamma] ::=> [delta, b] ], [gamma] ::=> [delta, a >&< b])) , ("R|", ([ [gamma] ::=> [delta, a, b] ], [gamma] ::=> [delta, a >|< b])) , ("R->", ([ [a, gamma] ::=> [delta, b] ], [gamma] ::=> [delta, a >>> b])) , ("L&", ([ [a, b, gamma] ::=> [delta] ], [a >&< b, gamma] ::=> [delta])) , ("L|", ([ [a, gamma] ::=> [delta], [b, gamma] ::=> [delta] ], [a >|< b, gamma] ::=> [delta])) , ("L->", ([ [gamma] ::=> [delta, a], [b, gamma] ::=> [delta] ], [a >>> b, gamma] ::=> [delta])) , ("L_|_", ([ ], [botPat, gamma] ::=> [delta])) ], uAbbrevs = [neg], bAbbrevs = [iff] } g3i :: Calculus g3i = Calculus { calcName = "g3i", axioms = [("Axiom", [p, gamma] ::=> [p])], rules = [ ("R&", ([ [gamma] ::=> [a], [gamma] ::=> [b] ], [gamma] ::=> [a >&< b])) , ("R|1", ([ [gamma] ::=> [a] ], [gamma] ::=> [a >|< b])) , ("R|2", ([ [gamma] ::=> [b] ], [gamma] ::=> [a >|< b])) , ("R->", ([ [a, gamma] ::=> [b] ], [gamma] ::=> [a >>> b])) , ("L&", ([ [a, b, gamma] ::=> [c] ], [a >&< b, gamma] ::=> [c])) , ("L|", ([ [a, gamma] ::=> [c], [b, gamma] ::=> [c] ], [a >|< b, gamma] ::=> [c])) , ("L->", ([ [a >>> b, gamma] ::=> [a], [b, gamma] ::=> [c] ], [a >>> b, gamma] ::=> [c])) , ("L_|_", ([], [botPat, gamma] ::=> [c])) , ("Lforall", ([ [a_x_t, forall_x_a, gamma] ::=> [c] ], [ forall_x_a, gamma] ::=> [c])) , ("Rforall", ([ [gamma] ::=> [a_x_y] ], [nofree_y gamma] ::=> [nofree_y forall_x_a])) , ("Lexists", ([ [a_x_y, gamma] ::=> [c] ], [nofree_y exists_x_a, nofree_y gamma] ::=> [nofree_y c])) , ("Rexists", ([ [gamma] ::=> [a_x_t] ], [gamma] ::=> [exists_x_a])) ], uAbbrevs = [neg], bAbbrevs = [iff] } g3ip :: Calculus g3ip = Calculus { calcName = "g3ip", axioms = [("Axiom", [p, gamma] ::=> [p])], rules = [ ("R&", ([ [gamma] ::=> [a], [gamma] ::=> [b] ], [gamma] ::=> [a >&< b])) , ("R|1", ([ [gamma] ::=> [a] ], [gamma] ::=> [a >|< b])) , ("R|2", ([ [gamma] ::=> [b] ], [gamma] ::=> [a >|< b])) , ("R->", ([ [a, gamma] ::=> [b] ], [gamma] ::=> [a >>> b])) , ("L&", ([ [a, b, gamma] ::=> [c] ], [a >&< b, gamma] ::=> [c])) , ("L|", ([ [a, gamma] ::=> [c], [b, gamma] ::=> [c] ], [a >|< b, gamma] ::=> [c])) , ("L->", ([ [a >>> b, gamma] ::=> [a], [b, gamma] ::=> [c] ], [a >>> b, gamma] ::=> [c])) , ("L_|_", ([], [botPat, gamma] ::=> [c])) , ("Cut", ([[gamma] ::=> [d], [d, gamma'] ::=> [c]] , [gamma, gamma'] ::=> [c])) ], uAbbrevs = [neg], bAbbrevs = [iff] } g3ip_em :: Calculus g3ip_em = Calculus { calcName = "g3ip_em", axioms = [("Axiom", [p, gamma] ::=> [p])], rules = [ ("R&", ([ [gamma] ::=> [a], [gamma] ::=> [b] ], [gamma] ::=> [a >&< b])) , ("R|1", ([ [gamma] ::=> [a] ], [gamma] ::=> [a >|< b])) , ("R|2", ([ [gamma] ::=> [b] ], [gamma] ::=> [a >|< b])) , ("R->", ([ [a, gamma] ::=> [b] ], [gamma] ::=> [a >>> b])) , ("L&", ([ [a, b, gamma] ::=> [c] ], [a >&< b, gamma] ::=> [c])) , ("L|", ([ [a, gamma] ::=> [c], [b, gamma] ::=> [c] ], [a >|< b, gamma] ::=> [c])) , ("L->", ([ [a >>> b, gamma] ::=> [a], [b, gamma] ::=> [c] ], [a >>> b, gamma] ::=> [c])) , ("L_|_", ([], [botPat, gamma] ::=> [c])) , ("EM", ([ [p, gamma] ::=> [c], [negPat p, gamma] ::=> [c] ], [gamma] ::=> [c])) ], uAbbrevs = [neg], bAbbrevs = [iff] } g0c :: Calculus g0c = Calculus { calcName = "g0c", axioms = [("Axiom", [a] ::=> [a])], rules = [ ("R&", ([ [gamma] ::=> [delta, a], [gamma'] ::=> [delta', b] ], [gamma, gamma'] ::=> [delta, delta', a >&< b])) , ("R|", ([ [gamma] ::=> [delta, a, b] ], [gamma] ::=> [delta, a >|< b])) , ("R->", ([ [a, gamma] ::=> [delta, b] ], [gamma] ::=> [delta, a >>> b])) , ("L_|_", ([ ], [botPat] ::=> [c])) , ("L&", ([ [a, b, gamma] ::=> [delta] ], [a >&< b, gamma] ::=> [delta])) , ("L|", ([ [a, gamma] ::=> [delta], [b, gamma'] ::=> [delta'] ], [a >|< b, gamma, gamma'] ::=> [delta, delta'])) , ("L->", ([ [gamma] ::=> [delta, a], [b, gamma'] ::=> [delta'] ], [a >>> b, gamma, gamma'] ::=> [delta, delta'])) , ("Lforall", ([ [a_x_t, forall_x_a, gamma] ::=> [delta] ], [forall_x_a, gamma] ::=> [delta])) , ("Rforall", ([ [gamma] ::=> [delta, a_x_y] ], [nofree_y gamma] ::=> [nofree_y delta, nofree_y forall_x_a])) , ("Lexists", ([ [a_x_y, gamma] ::=> [delta] ], [nofree_y exists_x_a, nofree_y gamma] ::=> [nofree_y delta])) , ("Rexists", ([ [gamma] ::=> [delta, exists_x_a, a_x_t] ], [gamma] ::=> [delta, exists_x_a])) , ("LW", ([ [gamma] ::=> [delta] ], [a, gamma] ::=> [delta])) , ("RW", ([ [gamma] ::=> [delta] ], [gamma] ::=> [delta, a])) , ("LC", ([ [a, a, gamma] ::=> [delta] ], [a, gamma] ::=> [delta])) , ("RC", ([ [gamma] ::=> [delta, a, a] ], [gamma] ::=> [delta, a])) ] , uAbbrevs = [neg], bAbbrevs = [iff] } g0i :: Calculus g0i = Calculus { calcName = "g0i", axioms = [("Axiom", [a] ::=> [a])], rules = [ ("R&", ([ [gamma] ::=> [a], [delta] ::=> [b] ], [gamma, delta] ::=> [a >&< b])) , ("R|1", ([ [gamma] ::=> [a] ], [gamma] ::=> [a >|< b])) , ("R|2", ([ [gamma] ::=> [b] ], [gamma] ::=> [a >|< b])) , ("R->", ([ [a, gamma] ::=> [b] ], [gamma] ::=> [a >>> b])) , ("L&", ([ [a, b, gamma] ::=> [c] ], [a >&< b, gamma] ::=> [c])) , ("L|", ([ [a, gamma] ::=> [c], [b, delta] ::=> [c] ], [a >|< b, gamma, delta] ::=> [c])) , ("L->", ([ [gamma] ::=> [a], [b, delta] ::=> [c] ], [a >>> b, gamma, delta] ::=> [c])) , ("L_|_", ([], [botPat] ::=> [c])) , ("Lforall", ([ [a_x_t, forall_x_a, gamma] ::=> [c] ], [ forall_x_a, gamma] ::=> [c])) , ("Rforall", ([ [gamma] ::=> [a_x_y] ], [nofree_y gamma] ::=> [nofree_y forall_x_a])) , ("Lexists", ([ [a_x_y, gamma] ::=> [c] ], [nofree_y exists_x_a, nofree_y gamma] ::=> [nofree_y c])) , ("Rexists", ([ [gamma] ::=> [a_x_t] ], [gamma] ::=> [exists_x_a])) , ("Wk", ([ [gamma] ::=> [c] ], [a, gamma] ::=> [c])) , ("Ctr", ([ [a, a, gamma] ::=> [c] ], [a, gamma] ::=> [c])) ], uAbbrevs = [neg], bAbbrevs = [iff] } g3ipm :: Calculus g3ipm = Calculus { calcName = "g3ipm", axioms = [("Axiom", [p, gamma] ::=> [delta, p])], rules = [ ("R&", ([ [gamma] ::=> [delta, a], [gamma] ::=> [delta, b] ], [gamma] ::=> [delta, a >&< b])) , ("R|", ([ [gamma] ::=> [delta, a, b] ], [gamma] ::=> [delta, a >|< b])) , ("R->", ([ [a, gamma] ::=> [b] ], [gamma] ::=> [delta, a >>> b])) , ("L&", ([ [a, b, gamma] ::=> [delta] ], [a >&< b, gamma] ::=> [delta])) , ("L|", ([ [a, gamma] ::=> [delta], [b, gamma] ::=> [delta] ], [a >|< b, gamma] ::=> [delta])) , ("L->", ([ [a >>> b, gamma] ::=> [a], [b, gamma] ::=> [delta] ], [a >>> b, gamma] ::=> [delta])) , ("L_|_", ([], [botPat, gamma] ::=> [delta])) ], uAbbrevs = [neg], bAbbrevs = [iff] } g4ip :: Calculus g4ip = Calculus { calcName = "g4ip", axioms = [("Axiom", [p, gamma] ::=> [p])], rules = [ ("R&", ([ [gamma] ::=> [a], [gamma] ::=> [b] ], [gamma] ::=> [a >&< b])) , ("R|1", ([ [gamma] ::=> [a] ], [gamma] ::=> [a >|< b])) , ("R|2", ([ [gamma] ::=> [b] ], [gamma] ::=> [a >|< b])) , ("R->", ([ [a, gamma] ::=> [b] ], [gamma] ::=> [a >>> b])) , ("L&", ([ [a, b, gamma] ::=> [c] ], [a >&< b, gamma] ::=> [c])) , ("L|", ([ [a, gamma] ::=> [c], [b, gamma] ::=> [c] ], [a >|< b, gamma] ::=> [c])) , ("L0->", ([ [p, b, gamma] ::=> [e] ], [p, p >>> b, gamma] ::=> [e])) , ("L&->", ([ [c >>> (d >>> b), gamma] ::=> [e] ], [(c >&< d) >>> b, gamma] ::=> [e])) , ("L|->", ([ [c >>> b, d >>> b, gamma] ::=> [e] ], [(c >|< d) >>> b, gamma] ::=> [e])) , ("L->>>", ([ [c, d >>> b, gamma] ::=> [d], [b, gamma] ::=> [e] ], [(c >>> d) >>> b, gamma] ::=> [e])) , ("L_|_", ([], [botPat, gamma] ::=> [c])) ], uAbbrevs = [neg], bAbbrevs = [iff] } -------------------------------------------------------------------------------- -- Linear logic -- From "A taste of linear logic", Wadler. -- connectives -- Intuitionistic and linear assumptions. Linear assumptions are the default, so we -- just provide a unary op indicating that an assumption is intuitionistic. This is -- like Wadler's [] notation, but we'll use * instead of brackets. intPat = UnaryOpPat (UniName ("*","*")) -- unary operators ofCoursePat = UnaryOpPat (UniName ("!","!")) -- binary operators lolPat = BinaryOpPat (UniName ("-o", "-o")) timesPat = BinaryOpPat (UniName ("x", "βŠ—")) plusPat = BinaryOpPat (UniName ("+", "βŠ•")) -- we also use andPat, above. -- wll, or Wadler's linear logic. wll :: Calculus wll = Calculus { calcName = "wll" , axioms = [ ("Id", [a] ::=> [a]) , ("*Id", [intPat a] ::=> [a])] , rules = [ ("!I", ([[intPat gamma] ::=> [a]] , [intPat gamma] ::=> [ofCoursePat a])) , ("!E", ([[gamma] ::=> [ofCoursePat a], [delta, intPat a] ::=> [b]] , [gamma, delta] ::=> [b])) , ("-oI", ([[gamma, a] ::=> [b]] , [gamma] ::=> [a `lolPat` b])) , ("-oE", ([[gamma] ::=> [a `lolPat` b], [delta] ::=> [a]] , [gamma, delta] ::=> [b])) , ("xI", ([[gamma] ::=> [a], [delta] ::=> [b]] , [gamma, delta] ::=> [a `timesPat` b])) , ("xE", ([[gamma] ::=> [a `timesPat` b], [delta, a, b] ::=> [c]] , [gamma, delta] ::=> [c])) , ("&I", ([[gamma] ::=> [a], [gamma] ::=> [b]] , [gamma] ::=> [a `andPat` b])) , ("&E1", ([[gamma] ::=> [a `andPat` b]] , [gamma] ::=> [a])) , ("&E2", ([[gamma] ::=> [a `andPat` b]] , [gamma] ::=> [b])) , ("+I1", ([[gamma] ::=> [a]] , [gamma] ::=> [a `plusPat` b])) , ("+I2", ([[gamma] ::=> [b]] , [gamma] ::=> [a `plusPat` b])) , ("+E", ([[gamma] ::=> [a `plusPat` b], [delta, a] ::=> [c], [delta, b] ::=> [c]] , [gamma, delta] ::=> [c])) , ("Ctr", ([[gamma, intPat a, intPat a] ::=> [b]] , [gamma, intPat a] ::=> [b])) , ("Wk", ([[gamma] ::=> [b]] , [gamma, intPat a] ::=> [b])) ] , uAbbrevs = [] , bAbbrevs = [] } -- From Pfenning. one = ZeroaryOpPat (UniName ("1", "1")) zero = ZeroaryOpPat (UniName ("0", "0")) top = ZeroaryOpPat (UniName ("T", "⊀")) withPat = BinaryOpPat (UniName ("&", "&")) pfll :: Calculus pfll = Calculus { calcName = "pfll" , axioms = [] , rules = [ ("Id", ([], [a] ::=> [a])) , ("xL", ([[delta, a, b] ::=> [c]] , [delta, a `timesPat` b] ::=> [c])) , ("xR", ([[delta] ::=> [a], [delta'] ::=> [b]] , [delta, delta'] ::=> [a `timesPat` b])) , ("1L", ([] , [] ::=> [one])) , ("1R", ([[delta] ::=> [c]] , [delta, one] ::=> [c])) , ("&L1", ([[delta, a] ::=> [c]] , [delta, a `withPat` b] ::=> [c])) , ("&L2", ([[delta, b] ::=> [c]] , [delta, a `withPat` b] ::=> [c])) , ("&R", ([[delta] ::=> [a], [delta] ::=> [b]] , [delta] ::=> [a `withPat` b])) , ("TR", ([] , [delta] ::=> [top])) , ("+L", ([[delta, a] ::=> [c], [delta, b] ::=> [c]] , [delta, a `plusPat` b] ::=> [c])) , ("+R1", ([[delta] ::=> [a]] , [delta] ::=> [a `plusPat` b])) , ("+R2", ([[delta] ::=> [b]] , [delta] ::=> [a `plusPat` b])) , ("0L", ([] , [delta, zero] ::=> [c])) , ("-oL", ([[delta] ::=> [a], [delta', b] ::=> [c]] , [delta, delta', a `lolPat` b] ::=> [c])) , ("-oR", ([[delta, a] ::=> [b]] , [delta] ::=> [a `lolPat` b])) , ("copy", ([[delta, intPat a, a] ::=> [c]] , [delta, intPat a] ::=> [c])) , ("!L", ([[gamma, intPat a] ::=> [c]] , [gamma, ofCoursePat a] ::=> [c])) , ("!R", ([[intPat gamma] ::=> [a]] , [intPat gamma] ::=> [ofCoursePat a])) , ("Cut", ([[delta] ::=> [a], [delta', a] ::=> [c]] , [delta, delta'] ::=> [c])) ] , uAbbrevs = [] , bAbbrevs = [] }
// Set the current frame of animation // @param newFrame The frame to jump to in the animation // todo: check bounds on newFrame bool ar3DS::setFrame(int newFrame) { _currentFrame = newFrame; return true; }
<filename>KERN/nke/sctp_nke/Leopard/netinet/sctp_lock_windows.h /*- * Copyright (c) 2001-2006, Cisco Systems, Inc. All rights reserved. * Copyright (c) 2008-2012, by <NAME>. All rights reserved. * Copyright (c) 2008-2012, by <NAME>. All rights reserved. * Copyright (c) 2008-2012, by <NAME>. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * a) Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * b) Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the distribution. * * c) Neither the name of Cisco Systems, Inc. nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef _NETINET_SCTP_LOCK_WINDOWS_H_ #define _NETINET_SCTP_LOCK_WINDOWS_H_ /* * General locking concepts: The goal of our locking is to of course provide * consistency and yet minimize overhead. We will attempt to use * non-recursive locks which are supposed to be quite inexpensive. Now in * order to do this the goal is that most functions are not aware of locking. * Once we have a TCB we lock it and unlock when we are through. This means * that the TCB lock is kind-of a "global" lock when working on an * association. Caution must be used when asserting a TCB_LOCK since if we * recurse we deadlock. * * Most other locks (INP and INFO) attempt to localize the locking i.e. we try * to contain the lock and unlock within the function that needs to lock it. * This sometimes mean we do extra locks and unlocks and lose a bit of * efficency, but if the performance statements about non-recursive locks are * true this should not be a problem. One issue that arises with this only * lock when needed is that if an implicit association setup is done we have * a problem. If at the time I lookup an association I have NULL in the tcb * return, by the time I call to create the association some other processor * could have created it. This is what the CREATE lock on the endpoint. * Places where we will be implicitly creating the association OR just * creating an association (the connect call) will assert the CREATE_INP * lock. This will assure us that during all the lookup of INP and INFO if * another creator is also locking/looking up we can gate the two to * synchronize. So the CREATE_INP lock is also another one we must use * extreme caution in locking to make sure we don't hit a re-entrancy issue. * * For non FreeBSD 5.x we provide a bunch of EMPTY lock macros so we can * blatantly put locks everywhere and they reduce to nothing on * NetBSD/OpenBSD and FreeBSD 4.x * */ /* * When working with the global SCTP lists we lock and unlock the INP_INFO * lock. So when we go to lookup an association we will want to do a * SCTP_INP_INFO_RLOCK() and then when we want to add a new association to * the SCTP_BASE_INFO() list's we will do a SCTP_INP_INFO_WLOCK(). */ #ifdef SCTP_LOCK_LOGGING #include <netinet/sctputil.h> #endif extern struct sctp_foo_stuff sctp_logoff[]; extern int sctp_logoff_stuff; extern LARGE_INTEGER zero_timeout; #define SCTP_IPI_COUNT_INIT() #define SCTP_IPI_COUNT_DESTROY() #define SCTP_STATLOG_INIT_LOCK() #define SCTP_STATLOG_DESTROY() #define SCTP_STATLOG_LOCK() #define SCTP_STATLOG_UNLOCK() #define SCTP_STATLOG_GETREF(x) do { \ (x) = atomic_fetchadd_int(&global_sctp_cwnd_log_at, 1); \ if ((x) == SCTP_STAT_LOG_SIZE) { \ global_sctp_cwnd_log_at = 1; \ (x) = 0; \ global_sctp_cwnd_log_rolled = 1; \ } \ } while (0) #define SCTP_INP_INFO_LOCK_INIT() do { \ rwlock_init(&SCTP_BASE_INFO(ipi_ep_lock), "sctp-info", "inp_info", 0); \ } while (0) #define SCTP_INP_INFO_LOCK_DESTROY() do { \ rwlock_destroy(&SCTP_BASE_INFO(ipi_ep_lock)); \ } while (0) #define SCTP_INP_INFO_RLOCK() do { \ rwlock_acquire(&SCTP_BASE_INFO(ipi_ep_lock), 0); \ } while (0) #define SCTP_INP_INFO_WLOCK() do { \ rwlock_acquire(&SCTP_BASE_INFO(ipi_ep_lock), 1); \ } while (0) #define SCTP_INP_INFO_RUNLOCK() do { \ rwlock_release(&SCTP_BASE_INFO(ipi_ep_lock)); \ } while (0) #define SCTP_INP_INFO_WUNLOCK() do { \ rwlock_release(&SCTP_BASE_INFO(ipi_ep_lock)); \ } while (0) #define SCTP_WQ_ADDR_INIT() do { \ rwlock_init(&SCTP_BASE_INFO(wq_addr_mtx), "sctp-addr-wq","sctp_addr_wq", 0); \ } while (0) #define SCTP_WQ_ADDR_DESTROY() do { \ rwlock_destroy(&SCTP_BASE_INFO(wq_addr_mtx)); \ } while (0) #define SCTP_WQ_ADDR_LOCK() do { \ rwlock_acquire(&SCTP_BASE_INFO(wq_addr_mtx), 1); \ } while (0) #define SCTP_WQ_ADDR_UNLOCK() do { \ rwlock_release(&SCTP_BASE_INFO(wq_addr_mtx)); \ } while (0) #define SCTP_IPI_ADDR_INIT() do { \ rwlock_init(&SCTP_BASE_INFO(ipi_addr_lock), "sctp-addr", "sctp_addr", 0); \ } while (0) #define SCTP_IPI_ADDR_DESTROY() do { \ rwlock_destroy(&SCTP_BASE_INFO(ipi_addr_lock)); \ } while (0) #define SCTP_IPI_ADDR_RLOCK() do { \ rwlock_acquire(&SCTP_BASE_INFO(ipi_addr_lock), 0); \ } while (0) #define SCTP_IPI_ADDR_WLOCK() do { \ rwlock_acquire(&SCTP_BASE_INFO(ipi_addr_lock), 1); \ } while (0) #define SCTP_IPI_ADDR_RUNLOCK() do { \ rwlock_release(&SCTP_BASE_INFO(ipi_addr_lock)); \ } while (0) #define SCTP_IPI_ADDR_WUNLOCK() do { \ rwlock_release(&SCTP_BASE_INFO(ipi_addr_lock)); \ } while (0) #define SCTP_IPI_ITERATOR_WQ_INIT() do { \ spinlock_init(&sctp_it_ctl.ipi_iterator_wq_lock, "sctp-it-wq", "sctp_it_wq", 0); \ } while (0) #define SCTP_IPI_ITERATOR_WQ_DESTROY() do { \ spinlock_destroy(&sctp_it_ctl.ipi_iterator_wq_lock); \ } while (0) #define SCTP_IPI_ITERATOR_WQ_LOCK() do { \ spinlock_acquire(&sctp_it_ctl.ipi_iterator_wq_lock); \ } while (0) #define SCTP_IPI_ITERATOR_WQ_UNLOCK() do { \ spinlock_release(&sctp_it_ctl.ipi_iterator_wq_lock); \ } while (0) /* * The INP locks we will use for locking an SCTP endpoint, so for example if * we want to change something at the endpoint level for example random_store * or cookie secrets we lock the INP level. */ #define SCTP_INP_READ_INIT(_inp) do { \ spinlock_init(&(_inp)->inp_rdata_lock, "sctp-read", "inpr", 0); \ } while (0) #define SCTP_INP_READ_DESTROY(_inp) do { \ spinlock_destroy(&(_inp)->inp_rdata_lock); \ } while (0) #define SCTP_INP_READ_LOCK(_inp) do { \ spinlock_acquire(&(_inp)->inp_rdata_lock); \ } while (0) #define SCTP_INP_READ_UNLOCK(_inp) do { \ spinlock_release(&(_inp)->inp_rdata_lock); \ } while (0) #define SCTP_INP_LOCK_INIT(_inp) do { \ rwlock_init(&(_inp)->inp_lock, "sctp-inp", "inp", 0); \ } while (0) #define SCTP_INP_LOCK_DESTROY(_inp) do { \ rwlock_destroy(&(_inp)->inp_lock); \ } while (0) #ifdef SCTP_LOCK_LOGGING #define SCTP_INP_RLOCK(_inp) do { \ sctp_log_lock(_inp, (struct sctp_tcb *)NULL, SCTP_LOG_LOCK_INP); \ rwlock_acquire(&(_inp)->inp_lock, 0); \ } while (0) #else #define SCTP_INP_RLOCK(_inp) do { \ rwlock_acquire(&(_inp)->inp_lock, 0); \ } while (0) #endif #ifdef SCTP_LOCK_LOGGING #define SCTP_INP_WLOCK(_inp) do { \ sctp_log_lock(_inp, (struct sctp_tcb *)NULL, SCTP_LOG_LOCK_INP); \ rwlock_acquire(&(_inp)->inp_lock, 1); \ } while (0) #else #define SCTP_INP_WLOCK(_inp) do { \ rwlock_acquire(&(_inp)->inp_lock, 1); \ } while (0) #endif #define SCTP_INP_RUNLOCK(_inp) do { \ rwlock_release(&(_inp)->inp_lock); \ } while (0) #define SCTP_INP_WUNLOCK(_inp) do { \ rwlock_release(&(_inp)->inp_lock); \ } while (0) #define SCTP_INP_INCR_REF(_inp) do { \ atomic_add_int(&(_inp)->refcount, 1); \ } while (0) #define SCTP_INP_DECR_REF(_inp) do { \ atomic_subtract_int(&(_inp)->refcount, 1); \ } while (0) #define SCTP_INP_LOCK_CONTENDED(_inp) (0) /* Don't know if this is possible */ #define SCTP_INP_READ_CONTENDED(_inp) (0) /* Don't know if this is possible */ #define SCTP_ASOC_CREATE_LOCK_CONTENDED(_inp) (0) /* Don't know if this is possible */ #define SCTP_ASOC_CREATE_LOCK_INIT(_inp) do { \ spinlock_init(&(_inp)->inp_create_lock, "sctp-create", "inp_create", 0); \ } while (0) #define SCTP_ASOC_CREATE_LOCK_DESTROY(_inp) do { \ spinlock_destroy(&(_inp)->inp_create_lock); \ } while (0) #ifdef SCTP_LOCK_LOGGING #define SCTP_ASOC_CREATE_LOCK(_inp) do { \ sctp_log_lock(_inp, (struct sctp_tcb *)NULL, SCTP_LOG_LOCK_CREATE); \ spinlock_acquire(&(_inp)->inp_create_lock); \ } while (0) #else #define SCTP_ASOC_CREATE_LOCK(_inp) do { \ spinlock_acquire(&(_inp)->inp_create_lock); \ } while (0) #endif #define SCTP_ASOC_CREATE_UNLOCK(_inp) do { \ spinlock_release(&(_inp)->inp_create_lock); \ } while (0) /* * For the majority of things (once we have found the association) we will * lock the actual association mutex. This will protect all the assoiciation * level queues and streams and such. We will need to lock the socket layer * when we stuff data up into the receiving sb_mb. I.e. we will need to do an * extra SOCKBUF_LOCK(&so->so_rcv) even though the association is locked. */ #define SCTP_TCB_LOCK_INIT(_tcb) do { \ spinlock_init(&(_tcb)->tcb_lock, "sctp-tcb", "tcb", 0); \ } while (0) #define SCTP_TCB_LOCK_DESTROY(_tcb) do { \ spinlock_destroy(&(_tcb)->tcb_lock); \ } while (0) #ifdef SCTP_LOCK_LOGGING #define SCTP_TCB_LOCK(_tcb) do { \ sctp_log_lock((_tcb)->sctp_ep, _tcb, SCTP_LOG_LOCK_TCB); \ spinlock_acquire(&(_tcb)->tcb_lock); \ } while (0) #else #define SCTP_TCB_LOCK(_tcb) do { \ spinlock_acquire(&(_tcb)->tcb_lock); \ } while (0) #endif __inline int _SCTP_TCB_TRYLOCK(struct sctp_tcb *tcb, char *filename, int lineno) { _spinlock_acquire(&tcb->tcb_lock, filename, lineno); return 1; } #define SCTP_TCB_TRYLOCK(_tcb) _SCTP_TCB_TRYLOCK((_tcb), __FILE__, __LINE__) #define SCTP_TCB_UNLOCK(_tcb) do { \ spinlock_release(&(_tcb)->tcb_lock); \ } while (0) #define SCTP_TCB_UNLOCK_IFOWNED(_tcb) do { \ spinlock_release(&(_tcb)->tcb_lock); \ } while (0) #define SCTP_TCB_INCR_REF(_tcb) do { \ atomic_add_int(&(_tcb)->asoc.refcnt, 1); \ } while (0) #define SCTP_TCB_DECR_REF(_tcb) do { \ atomic_subtract_int(&(_tcb)->asoc.refcnt, 1); \ } while (0) #define SCTP_TCB_SEND_LOCK_INIT(_tcb) do { \ spinlock_init(&(_tcb)->tcb_send_lock, "sctp-send-tcb", "tcbs", 0); \ } while (0) #define SCTP_TCB_SEND_LOCK_DESTROY(_tcb) do { \ spinlock_destroy(&(_tcb)->tcb_send_lock); \ } while (0) #define SCTP_TCB_SEND_LOCK(_tcb) do { \ spinlock_acquire(&(_tcb)->tcb_send_lock); \ } while (0) #define SCTP_TCB_SEND_UNLOCK(_tcb) do { \ spinlock_release(&(_tcb)->tcb_send_lock); \ } while (0) #define SCTP_RADDR_INCR_REF(_net) do { \ atomic_add_int(&(_net)->ref_count, 1); \ } while (0) #define SCTP_RADDR_DECR_REF(_net) do { \ atomic_subtract_int(&(_net)->ref_count, 1); \ } while (0) #define SCTP_LADDR_INCR_REF(_ifa) do { \ atomic_add_int(&(_ifa)->refcount, 1); \ } while (0) #define SCTP_LADDR_DECR_REF(_ifa) do { \ atomic_subtract_int(&(_ifa)->refcount, 1); \ } while (0) #define SCTP_INCR_TCB_FREE_STRMOQ_COUNT(_tcb) do { \ atomic_add_int(&(_tcb)->asoc.free_strmoq_cnt, 1); \ } while (0) #define SCTP_DECR_TCB_FREE_STRMOQ_COUNT(_tcb) do { \ atomic_subtract_int(&(_tcb)->asoc.free_strmoq_cnt, 1); \ } while (0) #define SCTP_INCR_TCB_FREE_CHK_COUNT(_tcb) do { \ atomic_add_int(&(_tcb)->asoc.free_chunk_cnt, 1); \ } while (0) #define SCTP_DECR_TCB_FREE_CHK_COUNT(_tcb) do { \ atomic_subtract_int(&(_tcb)->asoc.free_chunk_cnt, 1); \ } while (0) #ifdef INVARIANTS #define SCTP_TCB_LOCK_ASSERT(_tcb) \ _ASSERT(KeReadStateMutex(&(_tcb)->tcb_mtx) == 0) #else #define SCTP_TCB_LOCK_ASSERT(_tcb) #endif #define SCTP_ITERATOR_LOCK_INIT() do { \ spinlock_init(&sctp_it_ctl.it_lock, "sctp-it", "iterator", 0); \ } while (0) #define SCTP_ITERATOR_LOCK_DESTROY() do { \ spinlock_destroy(&sctp_it_ctl.it_lock); \ } while (0) #ifdef INVARIANTS #define SCTP_ITERATOR_LOCK() do { \ spinlock_acquire(&sctp_it_ctl.it_lock); \ } while (0) #else #define SCTP_ITERATOR_LOCK() do { \ spinlock_acquire(&sctp_it_ctl.it_lock); \ } while (0) #endif #define SCTP_ITERATOR_UNLOCK() do { \ spinlock_release(&sctp_it_ctl.it_lock); \ } while (0) #define SCTP_INCR_EP_COUNT() do { \ atomic_add_int(&SCTP_BASE_INFO(ipi_count_ep), 1); \ } while (0) #define SCTP_DECR_EP_COUNT() do { \ atomic_subtract_int(&SCTP_BASE_INFO(ipi_count_ep), 1); \ } while (0) #define SCTP_INCR_ASOC_COUNT() do { \ atomic_add_int(&SCTP_BASE_INFO(ipi_count_asoc), 1); \ } while (0) #define SCTP_DECR_ASOC_COUNT() do { \ atomic_subtract_int(&SCTP_BASE_INFO(ipi_count_asoc), 1); \ } while (0) #define SCTP_INCR_LADDR_COUNT() do { \ atomic_add_int(&SCTP_BASE_INFO(ipi_count_laddr), 1); \ } while (0) #define SCTP_DECR_LADDR_COUNT() do { \ atomic_subtract_int(&SCTP_BASE_INFO(ipi_count_laddr), 1); \ } while (0) #define SCTP_INCR_RADDR_COUNT() do { \ atomic_add_int(&SCTP_BASE_INFO(ipi_count_raddr), 1); \ } while (0) #define SCTP_DECR_RADDR_COUNT() do { \ atomic_subtract_int(&SCTP_BASE_INFO(ipi_count_raddr), 1); \ } while (0) #define SCTP_INCR_CHK_COUNT() do { \ atomic_add_int(&SCTP_BASE_INFO(ipi_count_chun)k, 1); \ } while (0) #define SCTP_DECR_CHK_COUNT() do { \ atomic_subtract_int(&SCTP_BASE_INFO(ipi_count_chunk), 1); \ } while (0) #define SCTP_INCR_READQ_COUNT() do { \ atomic_add_int(&SCTP_BASE_INFO(ipi_count_readq), 1); \ } while (0) #define SCTP_DECR_READQ_COUNT() do { \ atomic_subtract_int(&SCTP_BASE_INFO(ipi_count_readq), 1); \ } while (0) #define SCTP_INCR_STRMOQ_COUNT() do { \ atomic_add_int(&SCTP_BASE_INFO(ipi_count_strmoq), 1); \ } while (0) #define SCTP_DECR_STRMOQ_COUNT() do { \ atomic_subtract_int(&SCTP_BASE_INFO(ipi_count_strmoq), 1); \ } while (0) #define SCTP_INCR_FREE_STRMOQ_COUNT() do { \ atomic_add_int(&SCTP_BASE_INFO(ipi_free_strmoq), 1); \ } while (0) #define SCTP_DECR_FREE_STRMOQ_COUNT() do { \ atomic_subtract_int(&SCTP_BASE_INFO(ipi_free_strmoq), 1); \ } while (0) #define SCTP_INCR_FREE_CHK_COUNT() do { \ atomic_add_int(&SCTP_BASE_INFO(ipi_free_chunks), 1); \ } while (0) #define SCTP_DECR_FREE_CHK_COUNT() do { \ atomic_subtract_int(&SCTP_BASE_INFO(ipi_free_chunks), 1); \ } while (0) #endif
import tensorflow as tf """tf.conj(x, name=None) εŠŸθƒ½οΌšθΏ”ε›žxηš„ε…±θ½­ε€ζ•°γ€‚""" a = tf.constant([1 + 2j, 2 - 3j]) z = tf.conj(a) sess = tf.Session() print(sess.run(z)) sess.close() # z==>[1.-2.j 2.+3.j]
def remove_context_menu_items(self): context_menu = component.get('MenuBar').torrentmenu context_menu.remove(self.cmenu_item_toggle) context_menu.remove(self.cmenu_item_run) context_menu.remove(self.cmenu_item_run_all) context_menu.remove(self.sep_1) context_menu.remove(self.sep_2)
// RenderPresetters render preset config with current context // and save into context func RenderPresetters(ctx *Context, pcs []types.PresetConfig) error { ps := make([]Presetter, 0, len(pcs)) for _, pc := range pcs { p := Presetter{ Name: pc.Name, } if len(pc.Args) != 0 { p.Args = map[string]string{} } for k, v := range pc.Args { rendered, err := v.Render(ctx.Variables) if err != nil { return err } p.Args[k] = rendered } ps = append(ps, p) } ctx.Presetters = ps return nil }
/// Notifies all watchers of a change fn notify_all<T>(shared: &Shared<T>) { let watchers = shared.watchers.lock().unwrap(); for watcher in watchers.iter() { // Notify the task watcher.waker.wake(); } }
// FileContractHistory returns the set of revisions made to the specified // contract. func (w *SeedWallet) FileContractHistory(id types.FileContractID) []FileContract { w.mu.Lock() defer w.mu.Unlock() return w.store.FileContractHistory(id) }
def files_to_clean(limit=7): for dirf in ('store','stats'): cache_f = os.path.join(CACHE_ROOT, dirf) now = datetime.datetime.now() for root, dirs, files in os.walk(cache_f): for fpath in files: fullfpath = os.path.join(root, fpath) time_m = datetime.datetime.fromtimestamp(os.stat(fullfpath).st_mtime) delta = now - time_m if delta.days > limit: yield fullfpath
/** * Calculates the enharmonic equivalent and returns it as a note object. * Returns null if there is no enharmonic for the given note. * * @param direction The higher or lower enharmonic. * @param note The note used for calculating. * @return The enharmonic note object or null. */ public static XinityNote findEnharmonicEquivalent(String direction, XinityNote note) { Integer noteIndex = getLetterIndex(note.getLetter()); Integer accidentalIndex = getAccidentalIndex(note); Integer octaveNumber = note.getOctave(); String calculatedNote; XinityNote returnNote; if (direction.equals("+")) { if ((note.getLetter().equals("B") && !note.getAccidental().equals(NoteAccidental.FLAT))) { octaveNumber++; } calculatedNote = handleHigherEnharmonic(noteIndex, accidentalIndex); } else { if (note.getLetter().equals("C") && !note.getAccidental().equals(NoteAccidental.SHARP)) { octaveNumber--; } calculatedNote = handleLowerEnharmonic(noteIndex, accidentalIndex); } if (calculatedNote == null) { return null; } else { try { returnNote = new XinityNote(calculatedNote + octaveNumber); } catch (Exception ex) { return null; } return returnNote; } }
/** * Non-persistent enum-like composite entity that is used in CentreUpdaterTest for testing centre diff serialisation; used as property value in {@link TgCentreDiffSerialisation}. * * @author TG Team * */ @KeyType(DynamicEntityKey.class) @KeyTitle("Key") @CompanionObject(ITgCentreDiffSerialisationNonPersistentCompositeChild.class) @EntityTitle("Grouping Property") public class TgCentreDiffSerialisationNonPersistentCompositeChild extends AbstractEntity<DynamicEntityKey> { @IsProperty @Title("Key 1") @CompositeKeyMember(1) private String key1; @IsProperty @Title("Key 2") @CompositeKeyMember(2) private String key2; @Observable public TgCentreDiffSerialisationNonPersistentCompositeChild setKey2(final String key2) { this.key2 = key2; return this; } public String getKey2() { return key2; } @Observable public TgCentreDiffSerialisationNonPersistentCompositeChild setKey1(final String key1) { this.key1 = key1; return this; } public String getKey1() { return key1; } public enum GroupingProperty { TEAM_WEST("Team", "West"), TEAM_EAST("Team", "East"), PERSON_GOOD("Person", "Good"), PERSON_BAD("Person", "Bad"); public final String key1; public final String key2; public final TgCentreDiffSerialisationNonPersistentCompositeChild value; GroupingProperty(final String key1, final String key2) { this.key1 = key1; this.key2 = key2; this.value = new TgCentreDiffSerialisationNonPersistentCompositeChild(); this.value.setKey1(key1); this.value.setKey2(key2); } public static GroupingProperty fromValue(final TgCentreDiffSerialisationNonPersistentCompositeChild value) { final String key1 = value.getKey1(); final String key2 = value.getKey2(); return of(values()) .filter(gp -> gp.key1.equals(key1) && gp.key2.equals(key2)) .findFirst() .orElseThrow(() -> new IllegalArgumentException(format("Value [%s %s] is not supported.", key1, key2))); } /** * Finds grouping property by its unique key. * * @param key * @return */ public static Optional<GroupingProperty> findByKey(final String key) { final GroupingProperty[] allValues = values(); for (final GroupingProperty value : allValues) { if ((value.key1 + " " + value.key2).equals(key)) { return Optional.of(value); } } return empty(); } } }
<gh_stars>0 package jp.sinya.swipeback.demo.library3.core.debug; /** * δΈΊδΊ†θ°ƒθ―•ζ—Ά ζŸ₯ηœ‹ζ ˆθ§†ε›Ύ * Created by YoKeyword on 16/2/21. */ @Deprecated public class DebugFragmentRecord { // public CharSequence fragmentName; // public List<DebugFragmentRecord> childFragmentRecord; // // public DebugFragmentRecord(CharSequence fragmentName, List<DebugFragmentRecord> childFragmentRecord) { // this.fragmentName = fragmentName; // this.childFragmentRecord = childFragmentRecord; // } }
import { composeModules } from "microinject"; import ConfigModule from "./config/module"; import WutWotModule from "./services/WutWot/module"; import ReplModule from "./services/Repl/module"; export default composeModules(ConfigModule, WutWotModule, ReplModule);
#!/usr/bin/python # FSEvents Parser Python Script # ------------------------------------------------------ # Parse FSEvent records from allocated fsevent files and carved gzip files. # Outputs parsed information to a tab delimited txt file and SQLite database. # Errors and exceptions are recorded in the exceptions logfile. # Copyright 2019 G-C Partners, LLC # <NAME> # # G-C Partners licenses this file to you under the Apache License, Version # 2.0 (the "License"); you may not use this file except in compliance with the # License. You may obtain a copy of the License at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. See the License for the specific language governing # permissions and limitations under the License. import sys import os import struct import binascii import gzip import re import datetime import sqlite3 import json import StringIO from time import (gmtime, strftime) from optparse import OptionParser import contextlib try: from dfvfs.analyzer import analyzer from dfvfs.lib import definitions from dfvfs.path import factory as path_spec_factory from dfvfs.volume import tsk_volume_system from dfvfs.resolver import resolver from dfvfs.lib import raw from dfvfs.helpers import source_scanner DFVFS_IMPORT = True IMPORT_ERROR = None except ImportError as exp: DFVFS_IMPORT = False IMPORT_ERROR =("\n%s\n\ You have specified the source type as image but DFVFS \n\ is not installed and is required for image support. \n\ To install DFVFS please refer to \n\ http://www.hecfblog.com/2015/12/how-to-install-dfvfs-on-windows-without.html" % (exp)) VERSION = '4.0' EVENTMASK = { 0x00000000: 'None;', 0x00000001: 'FolderEvent;', 0x00000002: 'Mount;', 0x00000004: 'Unmount;', 0x00000020: 'EndOfTransaction;', 0x00000800: 'LastHardLinkRemoved;', 0x00001000: 'HardLink;', 0x00004000: 'SymbolicLink;', 0x00008000: 'FileEvent;', 0x00010000: 'PermissionChange;', 0x00020000: 'ExtendedAttrModified;', 0x00040000: 'ExtendedAttrRemoved;', 0x00100000: 'DocumentRevisioning;', 0x00400000: 'ItemCloned;', # macOS HighSierra 0x01000000: 'Created;', 0x02000000: 'Removed;', 0x04000000: 'InodeMetaMod;', 0x08000000: 'Renamed;', 0x10000000: 'Modified;', 0x20000000: 'Exchange;', 0x40000000: 'FinderInfoMod;', 0x80000000: 'FolderCreated;', 0x00000008: 'NOT_USED-0x00000008;', 0x00000010: 'NOT_USED-0x00000010;', 0x00000040: 'NOT_USED-0x00000040;', 0x00000080: 'NOT_USED-0x00000080;', 0x00000100: 'NOT_USED-0x00000100;', 0x00000200: 'NOT_USED-0x00000200;', 0x00000400: 'NOT_USED-0x00000400;', 0x00002000: 'NOT_USED-0x00002000;', 0x00080000: 'NOT_USED-0x00080000;', 0x00200000: 'NOT_USED-0x00200000;', 0x00800000: 'NOT_USED-0x00800000;' } print('\n==========================================================================') print('FSEParser v {} -- provided by G-C Partners, LLC'.format(VERSION)) print('==========================================================================') def get_options(): """ Get needed options for processing """ usage = "usage: %prog -s SOURCE -o OUTDIR -t SOURCETYPE [folder|image] [-c CASENAME -q REPORT_QUERIES]" options = OptionParser(usage=usage) options.add_option("-s", action="store", type="string", dest="source", default=False, help="REQUIRED. The source directory or image containing fsevent files to be parsed") options.add_option("-o", action="store", type="string", dest="outdir", default=False, help="REQUIRED. The destination directory used to store parsed reports") options.add_option("-t", action="store", type="string", dest="sourcetype", default=False, help="REQUIRED. The source type to be parsed. Available options are 'folder' or 'image'") options.add_option("-c", action="store", type="string", dest="casename", default=False, help="OPTIONAL. The name of the current session, \ used for naming standards. Defaults to 'FSE_Reports'") options.add_option("-q", action="store", type="string", dest="report_queries", default=False, help="OPTIONAL. The location of the report_queries.json file \ containing custom report queries to generate targeted reports." ) # Return options to caller # return options def parse_options(): """ Capture and return command line arguments. """ # Get options options = get_options() (opts, args) = options.parse_args() # The meta will store all information about the arguments passed # meta = { 'casename': opts.casename, 'reportqueries': opts.report_queries, 'sourcetype': opts.sourcetype, 'source': opts.source, 'outdir': opts.outdir } # Print help if no options are provided if len(sys.argv[1:]) == 0: options.print_help() sys.exit(1) # Test required arguments if meta['source'] is False or meta['outdir'] is False or meta['sourcetype'] is False: options.error('Unable to proceed. The following parameters ' 'are required:\n-s SOURCE\n-o OUTDIR\n-t SOURCETYPE') if not os.path.exists(meta['source']): options.error("Unable to proceed. \n\n%s does not exist.\n" % meta['source']) if not os.path.exists(meta['outdir']): options.error("Unable to proceed. \n\n%s does not exist.\n" % meta['outdir']) if meta['reportqueries'] and not os.path.exists(meta['reportqueries']): options.error("Unable to proceed. \n\n%s does not exist.\n" % meta['reportqueries']) if meta['sourcetype'].lower() != 'folder' and meta['sourcetype'].lower() != 'image': options.error( 'Unable to proceed. \n\nIncorrect source type provided: "%s". The following are valid options:\ \n -t folder\n -t image\n' % (meta['sourcetype'])) if meta['sourcetype'] == 'image' and DFVFS_IMPORT is False: options.error(IMPORT_ERROR) if meta['reportqueries'] ==False: print '[Info]: Report queries file not specified using the -q option. Custom reports will not be generated.' if meta['casename'] is False: print('[Info]: No casename specified using -c. Defaulting to "FSE_Reports".') meta['casename'] = 'FSE_Reports' # Return meta to caller # return meta def main(): """ Call the main processes. """ # Process fsevents FSEventHandler() # Commit transaction SQL_CON.commit() # Close database connection SQL_CON.close() def enumerate_flags(flag, f_map): """ Iterate through record flag mappings and enumerate. """ # Reset string based flags to null f_type = '' f_flag = '' # Iterate through flags for i in f_map: if i & flag: if f_map[i] == 'FolderEvent;' or \ f_map[i] == 'FileEvent;' or \ f_map[i] == 'SymbolicLink;' or \ f_map[i] == 'HardLink;': f_type = ''.join([f_type, f_map[i]]) else: f_flag = ''.join([f_flag, f_map[i]]) return f_type, f_flag def progress(count, total): """ Handles the progress bar in the console. """ bar_len = 45 filled_len = int(round(bar_len * count / float(total))) percents = round(100 * count / float(total), 1) p_bar = '=' * filled_len + '.' * (bar_len - filled_len) try: sys.stdout.write(' File {} of {} [{}] {}{}\r'.format(count, total, p_bar, percents, '%')) except: pass sys.stdout.flush() class FSEventHandler(): """ FSEventHandler iterates through and parses fsevents. """ def __init__(self): """ """ self.meta = parse_options() if self.meta['reportqueries']: # Check json file try: # Basic json syntax self.r_queries = json.load(open(self.meta['reportqueries'])) # Check to see if required keys are present for i in self.r_queries['process_list']: i['report_name'] i['query'] except Exception as exp: print('An error occurred while reading the json file. \n{}'.format(str(exp))) sys.exit(0) else: # if report queries option was not specified self.r_queries = False self.path = self.meta['source'] create_sqlite_db(self) self.files = [] self.pages = [] self.src_fullpath = '' self.dls_version = 0 # Initialize statistic counters self.all_records_count = 0 self.all_files_count = 0 self.parsed_file_count = 0 self.error_file_count = 0 # Try to open the output files try: # Try to open ouput files self.l_all_fsevents = open( os.path.join(self.meta['outdir'], self.meta['casename'], 'All_FSEVENTS.tsv'), 'wb' ) # Process report queries output files # if option was specified. if self.r_queries: # Try to open custom report query output files for i in self.r_queries['process_list']: r_file = os.path.join(self.meta['outdir'], self.meta['casename'], i['report_name'] + '.tsv') if os.path.exists(r_file): os.remove(r_file) setattr(self, 'l_' + i['report_name'], open(r_file, 'wb')) # Output log file for exceptions l_file = os.path.join(self.meta['outdir'], self.meta['casename'], 'EXCEPTIONS_LOG.txt') self.logfile = open(l_file, 'w') except Exception as exp: # Print error to command prompt if unable to open files if 'Permission denied' in str(exp): print('{}\nEnsure that you have permissions to write to file ' '\nand output file is not in use by another application.\n'.format(str(exp))) else: print(exp) sys.exit(0) # Begin FSEvent processing print('\n[STARTED] {} UTC Parsing files.'.format(strftime("%m/%d/%Y %H:%M:%S", gmtime()))) if self.meta['sourcetype'] == 'image': self._get_fsevent_image_files() elif self.meta['sourcetype'] == 'folder': self._get_fsevent_files() print('\n All Files Attempted: {}\n All Parsed Files: {}\n Files ' 'with Errors: {}\n All Records Parsed: {}'.format( self.all_files_count, self.parsed_file_count, self.error_file_count, self.all_records_count)) print('[FINISHED] {} UTC Parsing files.\n'.format(strftime("%m/%d/%Y %H:%M:%S", gmtime()))) print('[STARTED] {} UTC Sorting fsevents table in Database.'.format(strftime("%m/%d/%Y %H:%M:%S", gmtime()))) row_count = reorder_sqlite_db(self) if row_count != 0: print('[FINISHED] {} UTC Sorting fsevents table in Database.\n'.format(strftime("%m/%d/%Y %H:%M:%S", gmtime()))) print('[STARTED] {} UTC Exporting fsevents table from Database.'.format( strftime("%m/%d/%Y %H:%M:%S", gmtime()))) self.export_fsevent_report(self.l_all_fsevents, row_count) print('[FINISHED] {} UTC Exporting fsevents table from Database.\n'.format( strftime("%m/%d/%Y %H:%M:%S", gmtime()))) if self.r_queries: print('[STARTED] {} UTC Exporting views from database ' 'to TSV files.'.format(strftime("%m/%d/%Y %H:%M:%S", gmtime()))) for i in self.r_queries['process_list']: Output.print_columns(getattr(self, 'l_' + i['report_name'])) # Export report views to output files self.export_sqlite_views() print('[FINISHED] {} UTC Exporting views from database ' 'to TSV files.\n'.format(strftime("%m/%d/%Y %H:%M:%S", gmtime()))) print(" Exception log and Reports exported to:\n '{}'\n".format(os.path.join(self.meta['outdir'], self.meta['casename']))) # Close output files self.l_all_fsevents.close() self.logfile.close() else: print('[FINISHED] {} UTC No records were parsed.\n'.format(strftime("%m/%d/%Y %H:%M:%S", gmtime()))) print('Nothing to export.\n') @contextlib.contextmanager def skip_gzip_check(self): """ Context manager that replaces gzip.GzipFile._read_eof with a no-op. This is useful when decompressing partial files, something that won't work if GzipFile does it's checksum comparison. stackoverflow.com/questions/1732709/unzipping-part-of-a-gz-file-using-python/18602286 """ _read_eof = gzip.GzipFile._read_eof gzip.GzipFile._read_eof = lambda *args, **kwargs: None yield gzip.GzipFile._read_eof = _read_eof def _get_fsevent_files(self): """ get_fsevent_files will iterate through each file in the fsevents dir provided, and attempt to decompress the gzip. If it is unable to decompress, it will write an entry in the logfile. If successful, the script will check for a DLS header signature in the decompress gzip. If found, the contents of the gzip will be placed into a buffer and passed to the next phase of processing. """ # Print the header columns to the output files Output.print_columns(self.l_all_fsevents) # Total number of files in events dir # t_files = len(os.listdir(self.path)) for filename in os.listdir(self.path): if filename == 'fseventsd-uuid': t_files -= 1 self.time_range_src_mod = [] prev_mod_date = "Unknown" prev_last_wd = 0 c_last_wd = 0 # Uses file mod dates to generate time ranges by default unless # files are carved or mod dates lost due to exporting self.use_file_mod_dates = True # Run simple test to see if file mod dates # should be used to generate time ranges # In some instances fsevent files may not have # their original mod times preserved on export # This code will flag true when the same date and hour # exists for the first file and the last file # in the provided source fsevents folder first = os.path.join(self.path, os.listdir(self.path)[0]) last = os.path.join(self.path, os.listdir(self.path)[len(os.listdir(self.path)) - 1]) first = os.path.getmtime(first) last = os.path.getmtime(last) first = str(datetime.datetime.utcfromtimestamp(first))[:14] last = str(datetime.datetime.utcfromtimestamp(last))[:14] if first == last: self.use_file_mod_dates = False # Iterate through each file in supplied fsevents dir for filename in os.listdir(self.path): if filename == 'fseventsd-uuid': continue # Variables self.all_files_count += 1 # Call the progress bar which shows parsing stats progress(self.all_files_count, t_files) buf = "" # Full path to source fsevent file self.src_fullpath = os.path.join(self.path, filename) # Name of source fsevent file self.src_filename = filename # UTC mod date of source fsevent file self.m_time = os.path.getmtime(self.src_fullpath) self.m_time = str(datetime.datetime.utcfromtimestamp((self.m_time))) + " [UTC]" # Regex to match against source fsevent log filename regexp = re.compile(r'^.*[\][0-9a-fA-F]{16}$') # Test to see if fsevent file name matches naming standard # if not, assume this is a carved gzip if len(self.src_filename) == 16 and regexp.search(filename) is not None: c_last_wd = int(self.src_filename, 16) self.time_range_src_mod = prev_last_wd, c_last_wd, prev_mod_date, self.m_time self.is_carved_gzip = False else: self.is_carved_gzip = True # Attempt to decompress the fsevent archive try: with self.skip_gzip_check(): self.files = gzip.GzipFile(self.src_fullpath, "rb") buf = self.files.read() except Exception as exp: # When permission denied is encountered if "Permission denied" in str(exp) and not os.path.isdir(self.src_fullpath): print('\nEnsure that you have permissions to read ' 'from {}\n{}\n'.format(self.path, str(exp))) sys.exit(0) # Otherwise write error to log file else: self.logfile.write( "%s\tError: Error while decompressing FSEvents file.%s\n" % ( self.src_filename, str(exp) ) ) self.error_file_count += 1 continue # If decompress is success, check for DLS headers in the current file dls_chk = FSEventHandler.dls_header_search(self, buf, self.src_fullpath) # If check for DLS returns false, write information to logfile if dls_chk is False: self.logfile.write('%s\tInfo: DLS Header Check Failed. Unable to find a ' 'DLS header. Unable to parse File.\n' % (self.src_filename)) # Continue to the next file in the fsevents directory self.error_file_count += 1 continue self.parsed_file_count += 1 # Accounts for fsevent files that get flushed to disk # at the same time. Usually the result of a shutdown # or unmount if not self.is_carved_gzip and self.use_file_mod_dates: prev_mod_date = self.m_time prev_last_wd = int(self.src_filename, 16) # If DLSs were found, pass the decompressed file to be parsed FSEventHandler.parse(self, buf) def _get_fsevent_image_files(self): """ get_fsevent_files will iterate through each file in the fsevents dir and attempt to decompress the gzip. If it is unable to decompress, it will write an entry in the logfile. If successful, the script will check for a DLS header signature in the decompress gzip. If found, the contents of the gzip will be placed into a buffer and passed to the next phase of processing. """ # Print the header columns to the output file Output.print_columns(self.l_all_fsevents) scan_path_spec = None scanner = source_scanner.SourceScanner() scan_context = source_scanner.SourceScannerContext() scan_context.OpenSourcePath(self.meta['source']) scanner.Scan( scan_context, scan_path_spec=scan_path_spec ) for file_system_path_spec, file_system_scan_node in scan_context._file_system_scan_nodes.items(): t_files = 0 self.all_files_count = 0 self.error_file_count = 0 self.all_records_count = 0 self.parsed_file_count = 0 try: location = file_system_path_spec.parent.location except: location = file_system_path_spec.location print " Processing Volume {}.\n".format(location) fs_event_path_spec = path_spec_factory.Factory.NewPathSpec( file_system_path_spec.type_indicator, parent=file_system_path_spec.parent, location="/.fseventsd" ) file_entry = resolver.Resolver.OpenFileEntry( fs_event_path_spec ) if file_entry != None: t_files = file_entry.number_of_sub_file_entries for sub_file_entry in file_entry.sub_file_entries: if sub_file_entry.name == 'fseventsd-uuid': t_files -= 1 self.time_range_src_mod = [] prev_mod_date = "Unknown" prev_last_wd = 0 c_last_wd = 0 counter = 0 # Uses file mod dates to generate time ranges by default unless # files are carved or mod dates lost due to exporting self.use_file_mod_dates = True # Iterate through each file in supplied fsevents dir for sub_file_entry in file_entry.sub_file_entries: if sub_file_entry.name == 'fseventsd-uuid': continue # Variables counter += 1 self.all_files_count += 1 # Call the progress bar which shows parsing stats progress(counter, t_files) buf = "" # Name of source fsevent file self.src_filename = sub_file_entry.name self.src_fullpath = self.meta['source'] + ": " + location + sub_file_entry.path_spec.location stat_object = sub_file_entry.GetStat() # UTC mod date of source fsevent file self.m_time = datetime.datetime.fromtimestamp( stat_object.mtime).strftime( '%Y-%m-%d %H:%M:%S') + " [UTC]" # Regex to match against source fsevent log filename regexp = re.compile(r'^.*[\][0-9a-fA-F]{16}$') # Test to see if fsevent file name matches naming standard # if not, assume this is a carved gzip if len(self.src_filename) == 16 and regexp.search(self.src_filename) is not None: c_last_wd = int(self.src_filename, 16) self.time_range_src_mod = prev_last_wd, c_last_wd, prev_mod_date, self.m_time self.is_carved_gzip = False else: self.is_carved_gzip = True file_object = sub_file_entry.GetFileObject() compressedFile = StringIO.StringIO() compressedFile.write(file_object.read()) compressedFile.seek(0) # Attempt to decompress the fsevent archive try: with self.skip_gzip_check(): self.files = gzip.GzipFile(fileobj=compressedFile, mode='rb') buf = self.files.read() except Exception as exp: self.logfile.write( "%s\tError: Error while decompressing FSEvents file.%s\n" % ( self.src_filename, str(exp) ) ) self.error_file_count += 1 continue # If decompress is success, check for DLS headers in the current file dls_chk = FSEventHandler.dls_header_search(self, buf, self.src_filename) # If check for DLS returns false, write information to logfile if dls_chk is False: self.logfile.write('%s\tInfo: DLS Header Check Failed. Unable to find a ' 'DLS header. Unable to parse File.\n' % (self.src_filename)) # Continue to the next file in the fsevents directory self.error_file_count += 1 continue self.parsed_file_count += 1 # Accounts for fsevent files that get flushed to disk # at the same time. Usually the result of a shutdown # or unmount if not self.is_carved_gzip and self.use_file_mod_dates: prev_mod_date = self.m_time prev_last_wd = int(self.src_filename, 16) # If DLSs were found, pass the decompressed file to be parsed FSEventHandler.parse(self, buf) else: print('Unable to process volume or no fsevent files found') continue print('\n\n All Files Attempted: {}\n All Parsed Files: {}\n Files ' 'with Errors: {}\n All Records Parsed: {}'.format( self.all_files_count, self.parsed_file_count, self.error_file_count, self.all_records_count)) def dls_header_search(self, buf, f_name): """ Search within the unzipped file for all occurrences of the DLS magic header. There can be more than one DLS header in an fsevents file. The start and end offsets are stored and used for parsing the records contained within each DLS page. """ self.file_size = len(buf) self.my_dls = [] raw_file = buf dls_count = 0 start_offset = 0 end_offset = 0 while end_offset != self.file_size: try: start_offset = end_offset page_len = struct.unpack("<I", raw_file[start_offset + 8:start_offset + 12])[0] end_offset = start_offset + page_len if raw_file[start_offset:start_offset + 4] == '1SLD' or raw_file[start_offset:start_offset + 4] == '2SLD': self.my_dls.append({'Start Offset': start_offset, 'End Offset': end_offset}) dls_count += 1 else: self.logfile.write("%s: Error in length of page when finding page headers." % (f_name)) break except: self.logfile.write("%s: Error in length of page when finding page headers." % (f_name)) break if dls_count == 0: # Return false to caller so that the next file will be searched return False else: # Return true so that the DLSs found can be parsed return True def parse(self, buf): """ Parse the decompressed fsevent log. First finding other dates, then iterating through eash DLS page found. Then parse records within each page. """ # Initialize variables pg_count = 0 # Call the date finder for current fsevent file FSEventHandler.find_date(self, buf) self.valid_record_check = True # Iterate through DLS pages found in current fsevent file for i in self.my_dls: # Assign current DLS offsets start_offset = self.my_dls[pg_count]['Start Offset'] end_offset = self.my_dls[pg_count]['End Offset'] # Extract the raw DLS page from the fsevents file raw_page = buf[start_offset:end_offset] self.page_offset = start_offset # Reverse byte stream to match byte order little-endian m_dls_chk = raw_page[3] + raw_page[2] + raw_page[1] + raw_page[0] # Assign DLS version based off magic header in page if m_dls_chk == "DLS1": self.dls_version = 1 elif m_dls_chk == "DLS2": self.dls_version = 2 else: self.logfile.write("%s: Unknown DLS Version." % (self.src_filename)) break # Pass the raw page + a start offset to find records within page FSEventHandler.find_page_records( self, raw_page, start_offset ) # Increment the DLS page count by 1 pg_count += 1 def find_date(self, raw_file): """ Search within current file for names of log files that are created that store the date as a part of its naming standard. """ # Reset variables self.time_range = [] # Add previous file's mod timestamp, wd and current file's timestamp, wd # to time range if not self.is_carved_gzip and self.use_file_mod_dates: c_time_1 = str(self.time_range_src_mod[2])[:10].replace("-", ".") c_time_2 = str(self.time_range_src_mod[3])[:10].replace("-", ".") self.time_range.append([self.time_range_src_mod[0], c_time_1]) self.time_range.append([self.time_range_src_mod[1], c_time_2]) # Regex's for logs with dates in name regex_1 = ("private/var/log/asl/[\x30-\x39]{4}[.][\x30-\x39]{2}" + "[.][\x30-\x39]{2}[.][\x30-\x7a]{2,8}[.]asl") regex_2 = ("mobile/Library/Logs/CrashReporter/DiagnosticLogs/security[.]log" + "[.][\x30-\x39]{8}T[\x30-\x39]{6}Z") regex_3 = ("private/var/log/asl/Logs/aslmanager[.][\x30-\x39]{8}T[\x30-\x39]" + "{6}[-][\x30-\x39]{2}") regex_4 = ("private/var/log/DiagnosticMessages/[\x30-\x39]{4}[.][\x30-\x39]{2}" + "[.][\x30-\x39]{2}[.]asl") regex_5 = ("private/var/log/com[.]apple[.]clouddocs[.]asl/[\x30-\x39]{4}[.]" + "[\x30-\x39]{2}[.][\x30-\x39]{2}[.]asl") regex_6 = ("private/var/log/powermanagement/[\x30-\x39]{4}[.][\x30-\x39]{2}[.]" + "[\x30-\x39]{2}[.]asl") regex_7 = ("private/var/log/asl/AUX[.][\x30-\x39]{4}[.][\x30-\x39]{2}[.]" + "[\x30-\x39]{2}/[0-9]{9}") regex_8 = "private/var/audit/[\x30-\x39]{14}[.]not_terminated" # Regex that matches only events with created flag flag_regex = ("[\x00-\xFF]{9}[\x01|\x11|\x21|\x31|\x41|\x51|\x61|\x05|\x15|" + "\x25|\x35|\x45|\x55|\x65]") # Concatenating date, flag matching regexes # Also grabs working descriptor for record m_regex = "(" + regex_1 + "|" + regex_2 + "|" + regex_3 + "|" + regex_4 + "|" + regex_5 m_regex = m_regex + "|" + regex_6 + "|" + regex_7 + "|" + regex_8 + ")" + flag_regex # Start searching within fsevent file for events that match dates regex # As the length of each log location is different, create if statements for each # so that the date can be pulled from the correct location within the fullpath for match in re.finditer(m_regex, raw_file): if raw_file[match.regs[0][0]:match.regs[0][0] + 35] == "private/var/log/asl/Logs/aslmanager": # Clear timestamp temp variable t_temp = '' # t_start uses the start offset of the match t_start = match.regs[0][0] + 36 # The date is 8 chars long in the format of yyyymmdd t_end = t_start + 8 # Strip the date from the fsevent file t_temp = raw_file[t_start:t_end] # Format the date t_temp = t_temp[:4] + "." + t_temp[4:6] + "." + t_temp[6:8] wd_temp = struct.unpack("<Q", raw_file[match.regs[0][1] - 9:match.regs[0][1] - 1])[0] elif raw_file[match.regs[0][0]:match.regs[0][0] + 23] == "private/var/log/asl/AUX": # Clear timestamp temp variable t_temp = '' # t_start uses the start offset of the match t_start = match.regs[0][0] + 24 # The date is 10 chars long in the format of yyyy.mm.dd t_end = t_start + 10 # Strip the date from the fsevent file t_temp = raw_file[t_start:t_end] wd_temp = struct.unpack("<Q", raw_file[match.regs[0][1] - 9:match.regs[0][1] - 1])[0] elif raw_file[match.regs[0][0]:match.regs[0][0] + 19] == "private/var/log/asl": # Clear timestamp temp variable t_temp = '' # t_start uses the start offset of the match t_start = match.regs[0][0] + 20 # The date is 10 chars long in the format of yyyy.mm.dd t_end = t_start + 10 # Strip the date from the fsevent file t_temp = raw_file[t_start:t_end] wd_temp = struct.unpack("<Q", raw_file[match.regs[0][1] - 9:match.regs[0][1] - 1])[0] elif raw_file[match.regs[0][0]:match.regs[0][0] + 4] == "mobi": # Clear timestamp temp variable t_temp = '' # t_start uses the start offset of the match t_start = match.regs[0][0] + 62 # The date is 8 chars long in the format of yyyymmdd t_end = t_start + 8 # Strip the date from the fsevent file t_temp = raw_file[t_start:t_end] # Format the date t_temp = t_temp[:4] + "." + t_temp[4:6] + "." + t_temp[6:8] wd_temp = struct.unpack("<Q", raw_file[match.regs[0][1] - 9:match.regs[0][1] - 1])[0] elif raw_file[match.regs[0][0]:match.regs[0][0] + 34] == "private/var/log/DiagnosticMessages": # Clear timestamp temp variable t_temp = '' # t_start uses the start offset of the match t_start = match.regs[0][0] + 35 # The date is 10 chars long in the format of yyyy.mm.dd t_end = t_start + 10 # Strip the date from the fsevent file t_temp = raw_file[t_start:t_end] wd_temp = struct.unpack("<Q", raw_file[match.regs[0][1] - 9:match.regs[0][1] - 1])[0] elif raw_file[match.regs[0][0]:match.regs[0][0] + 39] == "private/var/log/com.apple.clouddocs.asl": # Clear timestamp temp variable t_temp = '' # t_start uses the start offset of the match t_start = match.regs[0][0] + 40 # The date is 10 chars long in the format of yyyy.mm.dd t_end = t_start + 10 # Strip the date from the fsevent file t_temp = raw_file[t_start:t_end] wd_temp = struct.unpack("<Q", raw_file[match.regs[0][1] - 9:match.regs[0][1] - 1])[0] elif raw_file[match.regs[0][0]:match.regs[0][0] + 31] == "private/var/log/powermanagement": # Clear timestamp temp variable t_temp = '' # t_start uses the start offset of the match t_start = match.regs[0][0] + 32 # The date is 10 chars long in the format of yyyy.mm.dd t_end = t_start + 10 # Strip the date from the fsevent file t_temp = raw_file[t_start:t_end] wd_temp = struct.unpack("<Q", raw_file[match.regs[0][1] - 9:match.regs[0][1] - 1])[0] elif raw_file[match.regs[0][0]:match.regs[0][0] + 17] == "private/var/audit": # Clear timestamp temp variable t_temp = '' # t_start uses the start offset of the match t_start = match.regs[0][0] + 18 # The date is 8 chars long in the format of yyyymmdd t_end = t_start + 8 # Strip the date from the fsevent file t_temp = raw_file[t_start:t_end] # Format the date t_temp = t_temp[:4] + "." + t_temp[4:6] + "." + t_temp[6:8] wd_temp = struct.unpack("<Q", raw_file[match.regs[0][1] - 9:match.regs[0][1] - 1])[0] else: t_temp = '' wd_temp = '' # Append date, wd to time range list self.time_range.append([wd_temp, t_temp]) # Sort the time range list by wd self.time_range = sorted(self.time_range, key=self.get_key) # Call the time range builder to rebuild time range self.build_time_range() def get_key(self, item): """ Return the key in the time range item provided. """ return item[0] def build_time_range(self): """ Rebuilds the time range list to include the previous and current working descriptor as well as the previous and current date found """ prev_date = '0' prev_wd = 0 temp = [] # Iterate through each in time range list for i in self.time_range: # Len is 7 when prev_date is 'Unknown' if len(prev_date) == 7: p_date = 0 c_date = i[1][:10].replace(".", "") # When current date is 'Unknown' if len(i[1]) == 7: p_date = prev_date[:10].replace(".", "") c_date = 0 # When both dates are known if len(prev_date) != 7 and len(i[1]) != 7: p_date = prev_date[:10].replace(".", "") c_date = i[1][:10].replace(".", "") # Bypass a date when current date is less than prev date if int(c_date) < int(p_date): prev_wd = prev_wd prev_date = prev_date else: # Reassign prev_date to 'Unknown' if prev_date == '0': prev_date = 'Unknown' # Add previous, current wd and previous, current date to temp temp.append([prev_wd, i[0], prev_date, i[1]]) prev_wd = i[0] prev_date = i[1] # Assign temp list to time range list self.time_range = temp def find_page_records(self, page_buf, page_start_off): """ Input values are starting offset of current page and end offset of current page within the current fsevent file find_page_records will identify all records within a given page. """ # Initialize variables fullpath = '' char = '' # Start, end offset of first record to be parsed within current DLS page start_offset = 12 end_offset = 13 len_buf = len(page_buf) # Call the file header parser for current DLS page try: FsEventFileHeader( page_buf[:13], self.src_fullpath ) except: self.logfile.write( "%s\tError: Unable to parse file header at offset %d\n" % ( self.src_filename, page_start_off ) ) # Account for length of record for different DLS versions # Prior to HighSierra if self.dls_version == 1: bin_len = 13 rbin_len = 12 # HighSierra elif self.dls_version == 2: bin_len = 21 rbin_len = 20 else: pass # Iterate through the page. # Valid record check should be true while parsing. # If an invalid record is encounted (occurs in carved gzips) # parsing stops for the current file while len_buf > start_offset and self.valid_record_check: # Grab the first char char = page_buf[start_offset:end_offset].encode('hex') if char != '00': # Replace non-printable char with nothing if str(char).lower() == '0d' or str(char).lower() == '0a': self.logfile.write('%s\tInfo: Non-printable char %s in record fullpath at ' 'page offset %d. Parser removed char for reporting ' 'purposes.\n' % \ (self.src_filename, char, page_start_off + start_offset)) char = '' # Append the current char to the full path for current record fullpath = fullpath + char # Increment the offsets by one start_offset += 1 end_offset += 1 # Continue the while loop continue elif char == '00': # When 00 is found, then it is the end of fullpath # Increment the offsets by bin_len, this will be the start of next full path start_offset += bin_len end_offset += bin_len # Decode fullpath that was stored as hex fullpath = fullpath.decode('hex').replace('\t', '') # Store the record length record_len = len(fullpath) + bin_len # Account for records that do not have a fullpath if record_len == bin_len: # Assign NULL as the path fullpath = "NULL" # Assign raw record offsets # r_start = start_offset - rbin_len r_end = start_offset # Strip raw record from page buffer # raw_record = page_buf[r_start:r_end] # Strip mask from buffer and encode as hex # mask_hex = "0x" + raw_record[8:12].encode('hex') # Account for carved files when record end offset # occurs after the length of the buffer if r_end > len_buf: continue # Set fs_node_id to empty for DLS version 1 # Prior to HighSierra if self.dls_version == 1: fs_node_id = "" # Assign file system node id if DLS version is 2 # Introduced with HighSierra if self.dls_version == 2: fs_node_id = struct.unpack("<q", raw_record[12:])[0] record_off = start_offset + page_start_off record = FSEventRecord(raw_record, record_off, mask_hex) # Check record to see if is valid. Identifies invalid/corrupted # that sometimes occur in carved gzip files self.valid_record_check = self.check_record(record.mask, fullpath) # If record is not valid, stop parsing records in page if self.valid_record_check is False or record.wd == 0: self.logfile.write('%s\tInfo: First invalid record found in carved ' 'gzip at offset %d. The remainder of this buffer ' 'will not be parsed.\n' % \ (self.src_filename, page_start_off + start_offset)) fullpath = '' break # Otherwise assign attributes and add to outpur reports else: f_path, f_name = os.path.split(fullpath) dates = self.apply_date(record.wd) # Assign our current records attributes attributes = { 'id': record.wd, 'id_hex': record.wd_hex + " (" + str(record.wd) + ")", 'fullpath': fullpath, 'filename': f_name, 'type': record.mask[0], 'flags': record.mask[1], 'approx_dates_plus_minus_one_day': dates, 'mask': mask_hex, 'node_id': fs_node_id, 'record_end_offset': record_off, 'source': self.src_fullpath, 'source_modified_time': self.m_time } output = Output(attributes) # Print the parsed record to output file output.append_row() fullpath = '' # Increment the current record count by 1 self.all_records_count += 1 def check_record(self, mask, fullpath): """ Checks for conflicts in the record's flags to determine if the record is valid to limit the number of invalid records in parsed output. Applies only to carved gzip """ if self.is_carved_gzip: decode_error = False # Flag conflicts # These flag combinations can not exist together type_err = "FolderEvent" in mask[0] and "FileEvent" in mask[0] fol_cr_err = "FolderEvent" in mask[0] and "Created" in mask[1] and \ "FolderCreated" not in mask[1] fil_cr_err = "FileEvent" in mask[0] and "FolderCreated" in mask[1] lnk_err = "SymbolicLink" in mask[0] and "HardLink" in mask[0] h_lnk_err = "HardLink" not in mask[0] and "LastHardLink" in mask[1] h_lnk_err_2 = "LastHardLink" in mask[1] and ";Removed" not in mask[1] n_used_err = "NOT_USED-0x0" in mask[1] ver_error = "ItemCloned" in mask[1] and self.dls_version == 1 # Check for decode errors try: fullpath.decode('utf-8') except: decode_error = True # If any error exists return false to caller if type_err or \ fol_cr_err or \ fil_cr_err or \ lnk_err or \ h_lnk_err or \ h_lnk_err_2 or \ n_used_err or \ decode_error or \ ver_error: return False else: # Record passed tests and may be valid # return true so that record is included in output reports return True else: # Return true. fsevent file was not identified as being carved return True def apply_date(self, wd): """ Applies the approximate date to the current record by comparing thewd to what is stored in the time range list. """ t_range_count = len(self.time_range) count = 1 c_mod_date = str(self.m_time)[:10].replace("-", ".") # No dates were found. Return source mod date if len(self.time_range) == 0 and not self.is_carved_gzip and self.use_file_mod_dates: return c_mod_date # If dates were found elif len(self.time_range) != 0 and not self.is_carved_gzip: # Iterate through the time range list # and assign the time range based off the # wd/record event id. for i in self.time_range: # When record id falls between the previous # id and the current id within the time range list if wd > i[0] and wd < i[1]: # When the previous date is the same as current if i[2] == i[3]: return i[2] # Otherwise return the date range else: return i[2] + " - " + i[3] # When event id matches previous wd in list # assign previous date elif wd == i[0]: return str(i[2]) # When event id matches current wd in list # assign current date elif wd == i[1]: return str(i[3]) # When the event id is greater than the last in list # assign return source mod date elif count == t_range_count and wd >= i[1] and self.use_file_mod_dates: return c_mod_date else: count = count + 1 continue else: return "Unknown" def export_fsevent_report(self, outfile, row_count): """ Export rows from fsevents table in DB to tab delimited report. """ counter = 0 query = 'SELECT \ id_hex, \ node_id, \ fullpath, \ type, \ flags, \ approx_dates_plus_minus_one_day, \ source, \ source_modified_time \ FROM fsevents_sorted_by_event_id' SQL_TRAN.execute(query) while row_count > counter: row = SQL_TRAN.fetchone() values = [] for cell in row: if type(cell) is str or type(cell) is unicode: try: values.append(cell) except: print row_count print type(cell) print cell print row values.append("ERROR_IN_VALUE") else: try: values.append(unicode(cell)) except: print row_count print type(cell) print cell print row values.append("ERROR_IN_VALUE") m_row = u'\t'.join(values) m_row = m_row + u'\n' outfile.write(m_row.encode("utf-8")) counter = counter + 1 def export_sqlite_views(self): """ Exports sqlite views from database if -q is set. """ # Gather the names of report views in the db SQL_TRAN.execute("SELECT name FROM sqlite_master WHERE type='view'") view_names = SQL_TRAN.fetchall() # Export report views to tsv files for i in view_names: query = "SELECT * FROM %s" % (i[0]) SQL_TRAN.execute(query) row = ' ' # Get outfile to write to outfile = getattr(self, "l_" + i[0]) row = SQL_TRAN.fetchone() if row is None: print(" No records found in view {}. Nothing to export".format(i[0])) outfile.close() os.remove(outfile.name) else: print(" Exporting view {} from database".format(i[0])) # For each row join using tab and output to file while row is not None: values = [] try: for cell in row: if type(cell) is str or type(cell) is unicode: values.append(cell) else: values.append(unicode(cell)) except: values.append("ERROR_IN_VALUE") print "ERROR: ", row m_row = u'\t'.join(values) m_row = m_row + u'\n' outfile.write(m_row.encode("utf-8")) row = SQL_TRAN.fetchone() class FsEventFileHeader(): """ FSEvent file header structure. Each page within the decompressed begins with DLS1 or DLS2 It is stored using a byte order of little-endian. """ def __init__(self, buf, filename): """ """ # Name and path of current source fsevent file self.src_fullpath = filename # Page header 'DLS1' or 'DLS2' # Was written to disk using little-endian # Byte stream contains either "1SLD" or "2SLD", reversing order self.signature = buf[4] + buf[3] + buf[2] + buf[1] # Unknown raw values in DLS header # self.unknown_raw = buf[4:8] # Unknown hex version # self.unknown_hex = buf[4:8].encode("hex") # Unknown integer version # self.unknown_int = struct.unpack("<I", self.unknown_raw)[0] # Size of current DLS page self.filesize = struct.unpack("<I", buf[8:12])[0] class FSEventRecord(dict): """ FSEvent record structure. """ def __init__(self, buf, offset, mask_hex): """ """ # Offset of the record within the fsevent file self.file_offset = offset # Raw record hex version self.header_hex = binascii.b2a_hex(buf) # Record wd or event id self.wd = struct.unpack("<Q", buf[0:8])[0] # Record wd_hex wd_buf = buf[7] + buf[6] + buf[5] + buf[4] + buf[3] + buf[2] + buf[1] + buf[0] self.wd_hex = binascii.b2a_hex(wd_buf) # Enumerate mask flags, string version self.mask = enumerate_flags( struct.unpack(">I", buf[8:12])[0], EVENTMASK ) class Output(dict): """ Output class handles outputting parsed fsevent records to report files. """ COLUMNS = [ u'id', u'id_hex', u'fullpath', u'filename', u'type', u'flags', u'approx_dates_plus_minus_one_day', u'mask', u'node_id', u'record_end_offset', u'source', u'source_modified_time' ] R_COLUMNS = [ u'event_id', u'node_id', u'fullpath', u'type', u'flags', u'approx_dates_plus_minus_one_day', u'source', u'source_modified_time' ] def __init__(self, attribs): """ Update column values. """ self.update(attribs) @staticmethod def print_columns(outfile): """ Output column header to report files. """ values = [] for key in Output.R_COLUMNS: values.append(str(key)) row = '\t'.join(values) row = row + '\n' outfile.write(row) def append_row(self): """ Output parsed fsevents row to database. """ values = [] vals_to_insert = '' for key in Output.COLUMNS: values.append(str(self[key])) # Replace any Quotes in parsed record with double quotes for i in values: vals_to_insert += i.replace('"', '""') + '","' vals_to_insert = '"' + vals_to_insert[:-3] + '"' insert_sqlite_db(vals_to_insert) def create_sqlite_db(self): """ Creates our output database for parsed records and connects to it. """ db_filename = os.path.join(self.meta['outdir'], self.meta['casename'], 'FSEvents.sqlite') table_schema = "CREATE TABLE [fsevents](\ [id] [BLOB] NULL, \ [id_hex] [TEXT] NULL, \ [fullpath] [TEXT] NULL, \ [filename] [TEXT] NULL, \ [type] [TEXT] NULL, \ [flags] [TEXT] NULL, \ [approx_dates_plus_minus_one_day] [TEXT] NULL, \ [mask] [TEXT] NULL, \ [node_id] [TEXT] NULL, \ [record_end_offset] [TEXT] NULL, \ [source] [TEXT] NULL, \ [source_modified_time] [TEXT] NULL)" if not os.path.isdir(os.path.join(self.meta['outdir'], self.meta['casename'])): os.makedirs(os.path.join(self.meta['outdir'], self.meta['casename'])) # If database already exists delete it try: if os.path.isfile(db_filename): os.remove(db_filename) # Create database file if it doesn't exist db_is_new = not os.path.exists(db_filename) except: print("\nThe following output file is currently in use by " "another program.\n -{}\nPlease ensure that the file is closed." " Then rerun the parser.".format(db_filename)) sys.exit(0) # Setup global global SQL_CON SQL_CON = sqlite3.connect(os.path.join("", db_filename)) if db_is_new: # Create table if it's a new database SQL_CON.execute(table_schema) if self.r_queries: # Run queries in report queries list # to add report database views for i in self.r_queries['process_list']: # Try to execute the query cols = 'id_hex, \ node_id, \ fullpath, \ type, \ flags, \ approx_dates_plus_minus_one_day, \ source, \ source_modified_time' query = i['query'].split("*") query = query[0] + cols + query[1] try: SQL_CON.execute(query) except Exception as exp: print("SQLite error when executing query in json file. {}".format(str(exp))) sys.exit(0) # Setup global global SQL_TRAN # Setup transaction cursor and return it SQL_TRAN = SQL_CON.cursor() def insert_sqlite_db(vals_to_insert): """ Insert parsed fsevent record values into database. """ insert_statement = "\ insert into fsevents (\ [id], \ [id_hex], \ [fullpath], \ [filename], \ [type], \ [flags], \ [approx_dates_plus_minus_one_day], \ [mask], \ [node_id], \ [record_end_offset], \ [source], \ [source_modified_time]\ ) values (" + vals_to_insert + ")" try: SQL_TRAN.execute(insert_statement) except Exception as exp: print("insert failed!: {}".format(exp)) def reorder_sqlite_db(self): """ Order database table rows by id. Returns count: The number of rows in the table """ query = "CREATE TABLE [fsevents_sorted_by_event_id](\ [id] [BLOB] NULL, \ [id_hex] [TEXT] NULL, \ [fullpath] [TEXT] NULL, \ [filename] [TEXT] NULL, \ [type] [TEXT] NULL, \ [flags] [TEXT] NULL, \ [approx_dates_plus_minus_one_day] [TEXT] NULL, \ [mask] [TEXT] NULL, \ [node_id] [TEXT] NULL, \ [record_end_offset] [TEXT] NULL, \ [source] [TEXT] NULL, \ [source_modified_time] [TEXT] NULL)" SQL_TRAN.execute(query) query = "INSERT INTO fsevents_sorted_by_event_id ( \ id, \ id_hex, \ fullpath, \ filename, \ type, \ flags, \ approx_dates_plus_minus_one_day, \ mask, \ node_id, \ record_end_offset, \ source, \ source_modified_time) \ SELECT id,\ id_hex,\ fullpath,\ filename,\ type,flags,\ approx_dates_plus_minus_one_day,\ mask,\ node_id,\ record_end_offset,\ source,\ source_modified_time \ FROM fsevents ORDER BY id_hex;" SQL_TRAN.execute(query) count = SQL_TRAN.lastrowid return count if __name__ == '__main__': """ Init checks to see if running appropriate python version. If it is, start the parser. """ if sys.version_info > (3, 0): print('\nError: FSEventsParser does not currently support running under Python 3.x' '. Python 2.7 recommended.\n') else: main()
Biography The English rock trio Muse consists of guitarist/vocalist Matthew Bellamy, bassist Chris Wolstenholme, and drummer Dominic Howard. Bored by the sleepy life provided by their hometown of Teignmouth, Devon, the three friends began playing music together. They started the first incarnation of their band when they were all 13, changing the name of the group from Gothic Plague to Fixed Penalty to Rocket Baby Dolls as time passed. By 1997, the band settled on the name Muse and released their self-titled debut EP on Dangerous Records, followed by the Muscle Museum EP in 1998. The group's emotive, passionate sound and live dates drew critical acclaim and industry buzz, and after a trip to New York's CMJ festival, Muse signed a deal with Maverick Records. The singles "Cave" and "Uno" preceded their debut full-length album, Showbiz, which was released toward the end of 1999. Two years later, Muse issued The Origin of Symmetry and had a major hit with "Hyper Music." In 2002, fans were treated to a combination rarities/live set, the double-disc Hullabaloo Soundtrack. Muse's fourth album, Absolution, got the usual royal treatment upon its late 2003 release, and stateside fans finally got a taste when it was released on Warner Bros. the following March. A short North American tour in the spring of 2004 coincided with Muse's spot on the fifth annual Coachella Music and Arts Festival. Their fifth effort, 2006's Black Holes and Revelations, marked the band's brightest, most dynamic set of material to date. Within a week of its release, the band topped the U.K. album charts, earning their second consecutive number one album. In America, Black Holes and Revelations broke into the Top Ten. Muse have also won multiple awards (and multiple times) for Best Live Act, including the NME Awards, the Q Awards, and the Vodafone Live Music Awards.
/** * Serializer/Deserializer for ulong(csharp)'s counterpart BigInteger. * Note that only value below between [0, 18446744073709551615(max ulong value in csharp)] is supported. * * Created by marsqing on 21/03/2017. */ @SuppressWarnings("serial") public class BigIntegerCustomizationFactory implements TypeCustomizationFactory<BigInteger> { @Override public Class<?>[] getTargetClasses() { return new Class<?>[]{ BigInteger.class }; } @Override public DataType getTargetProtobufDataType() { return ScalarType.UINT64; } @Override public Builder getMessageElementBuilder() { return null; } @Override public JsonSerializer<BigInteger> createSerializer() { return new StdScalarSerializer<BigInteger>(BigInteger.class) { @Override public void serialize(BigInteger value, JsonGenerator gen, SerializerProvider provider) throws IOException { ProtobufGenerator pgen = (ProtobufGenerator) gen; // Just write. If value is bigger than Long.MAX_VALUE then the sign bit will match ulong. pgen.writeNumber(value.longValue()); } }; } @Override public JsonDeserializer<BigInteger> createDeserializer() { return new StdDeserializer<BigInteger>(BigInteger.class) { @Override public BigInteger deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { // longValue's bits are same as ulong long longValue = p.getLongValue(); // dump bits and parse it as ulong, effectively parse it as ulong return new BigInteger(Long.toHexString(longValue), 16); } }; } }
Animals don't handle stress well. I'm not talking about acute stressors, the predator charging at you through the brush, you run away and it's over. We handle that stress very well indeed. But severe stress, losing a job, a divorce, a death in the family, these can really wear us down. Severe life stressors can not only impact your physical health, they also often occur before the onset of mental illness, particularly major depressive disorder. Depression takes many forms (lack of interest in activities, sleep changes, eating changes, severely depressed mood), but one of the most debilitating ones is the way that it impacts motivation. While some stressors (like, say, a deadline), might before have been a motivator, making you work to get it done, during depression, these stressors become insurmountable obstacles. Things you did before you couldn't possibly get done now. You'll never make the deadline. You can't run the race. Stress can't motivate you any more. What has changed? To look at this, Lemos et al at the University of Washington, Seattle, looked at one of the signals in response to stress in the brain: corticotropin releasing factor (CRF). CRF is the first step in the process that eventually allows cortisol to be released into the bloodstream, the molecule we usually associate with stress. (Source) You can see at the top of the chain there CRF being released from the hypothalamus. From there the next step in the chain is the anterior pituitary, and from there adrenocorticotropic releasing hormone (ACTH) is released, and stimulates the adrenal glands (sitting in little pads of fat above your kidneys) to release cortisol. But in the brain, it's more complicated than that. CRF isn't just released from the hypothalamus to the pituitary, it's released to other regions, too. Lamos et al wanted to look in particular at the nucleus accumbens (NAc). This is an area of the brain that we usually associate with things like drug addiction, but the nucleus accumbens is associated with the motivational properties of many things, from cocaine to sex to...stress? That's right, stress can be very motivating in the accumbens. Lemos et al showed that this is due to CRF. CRF release does project to the accumbens, and there are receptors for it there (the CRF1 and CRF2 receptors, this may sound uncreative, but believe me you'd never remember it if they were all named funny things like "The motivator"). And they showed that when you add CRF to the nucleus accumbens, you get increases in dopamine, a chemical messenger associated with reward and motivation. What you can see above are signals from a technique called voltammetry. In voltammetry, you use a very tiny carbon fiber electrode, encased in glass, and inserted into a brain (or brain slice, as shown here). When you apply an electrical potential elsewhere in the brain or on the slice, dopamine will be released, and the carbon fiber will allow it to, very, briefly, oxidize. And scientists can detect that signal, and produce the heatmaps you see above, where the dark purple is the dopamine signal. Using that, you can quantify just how much dopamine you have. And you can see that increasing CRF in the brain slice increases dopamine as well. Dopamine is usually thought to be a good clue as to whether something is motivating, the more dopamine, the more motivating (if you think those signals are big, you should see cocaine!). But to really find out if CRF is motivating, you need to perform a behavioral task. What you can see here is a task called conditioned place preference. You give an animal (in this case, a mouse) a choice between two compartments. At first, they are pretty much the same. But then you give the mouse saline and put him in one side. Then, you give him CRF and put him in the other side. Repeat this for several days, so that the mouse learns to associate one side with the "feeling" of saline and the other side with the "feeling" of CRF. Then you put him in between the two compartments, with nothing at all, and see which side he prefers. If he "prefers" the feeling of CRF over the feeling of saline, he will spend more time in the CRF -paired compartment. And you can see above that this is what happened. When you give a single dose of CRF, the mouse prefers the CRF paired compartment. CRF, and acute stress, is a motivator under these conditions. But what about after chronic stress? Lamos et al exposed the mice to a severe stress. This stress was a two day swim task. On the first day, they swam 15 minutes, and one the second day, they swam four bouts of 6 minutes, with 6 minutes in between each bout. Mice are very good swimmers and it's not dangerous, but it is still very stressful. The authors then looked again to see how dopamine in the accumbens, and their conditioned place preference, responded. What they found that the exact OPPOSITE of the acute stress response. When animals got acute CRF, they saw an increase in dopamine signal. But after a chronic stress, there was no change at all. And when they looked at how they responded behaviorally, they found a totally opposite response. While animals showed place preference for a single injection of CRF, after chronic stress, CRF was very aversive. The authors were able to show that severe stress can SWITCH how mice (and possibly, humans), respond to stress, making the formerly motivating signal highly aversive. How is this happening? Well that's not certain, though it looks like feedback with glucocortocoid receptors (the receptors that respond to cortisol, the stress chemical at the end of the line) might play a role. But it's amazing to see how sever stress could change the way we respond to stress in future, and how it might really make future stress a much harder problem to deal with. Lemos JC, Wanat MJ, Smith JS, Reyes BA, Hollon NG, Van Bockstaele EJ, Chavkin C, & Phillips PE (2012). Severe stress switches CRF action in the nucleus accumbens from appetitive to aversive. Nature, 490 (7420), 402-6 PMID: 22992525
<gh_stars>1-10 """ Source: https://stackoverflow.com/questions/2536307/decorators-in-the-python-standard-lib-deprecated-specifically """ import functools import inspect import warnings def deprecated(reason): """ This is a decorator which can be used to mark functions as deprecated. It will result in a warning being emitted when the function is used. """ if isinstance(reason, str): # The @deprecated is used with a 'reason'. # # .. code-block:: python # # @deprecated("please, use another function") # def old_function(x, y): # pass def decorator(func): if inspect.isclass(func): msg = "Call to deprecated class {name} ({reason})." else: msg = "Call to deprecated function {name} ({reason})." @functools.wraps(func) def wrapper(*args, **kwargs): warn_deprecated(msg.format(name=func.__name__, reason=reason)) warnings.simplefilter('default', DeprecationWarning) return func(*args, **kwargs) return wrapper return decorator elif inspect.isclass(reason) or inspect.isfunction(reason): # The @deprecated is used without any 'reason'. # # .. code-block:: python # # @deprecated # def old_function(x, y): # pass func1 = reason if inspect.isclass(func1): msg1 = "Call to deprecated class {name}." else: msg1 = "Call to deprecated function {name}." @functools.wraps(func1) def wrapper1(*args, **kwargs): warn_deprecated(msg1.format(name=func1.__name__)) return func1(*args, **kwargs) return wrapper1 else: raise TypeError(repr(type(reason))) def warn_deprecated(message, warning=DeprecationWarning, stacklevel=2): warnings.simplefilter('always', warning) warnings.warn(message, category=warning, stacklevel=stacklevel) warnings.simplefilter('default', warning)
/** * Read different types of input parameters and set them in PreparedStatement * @param i index of the parameter * @param ps associated ps * @param drdaType drda type of the parameter * * @exception DRDAProtocolException, SQLException */ private void readAndSetExtParam( int i, DRDAStatement stmt, int drdaType, int extLen) throws DRDAProtocolException, SQLException { PreparedStatement ps = stmt.getPreparedStatement(); drdaType = (drdaType & 0x000000ff); boolean checkNullability = false; if (sqlamLevel >= MGRLVL_7 && FdocaConstants.isNullable(drdaType)) checkNullability = true; try { byte[] paramBytes = reader.getExtData(checkNullability); String paramString = null; switch (drdaType) { case FdocaConstants.DRDA_TYPE_LOBBYTES: case FdocaConstants.DRDA_TYPE_NLOBBYTES: if (SanityManager.DEBUG) trace("parameter value is: "+paramBytes); ps.setBytes(i+1, paramBytes); break; case FdocaConstants.DRDA_TYPE_LOBCSBCS: case FdocaConstants.DRDA_TYPE_NLOBCSBCS: paramString = new String(paramBytes, stmt.ccsidSBCEncoding); if (SanityManager.DEBUG) trace("parameter value is: "+ paramString); ps.setString(i+1,paramString); break; case FdocaConstants.DRDA_TYPE_LOBCDBCS: case FdocaConstants.DRDA_TYPE_NLOBCDBCS: paramString = new String(paramBytes, stmt.ccsidDBCEncoding ); if (SanityManager.DEBUG) trace("parameter value is: "+ paramString); ps.setString(i+1,paramString); break; case FdocaConstants.DRDA_TYPE_LOBCMIXED: case FdocaConstants.DRDA_TYPE_NLOBCMIXED: paramString = new String(paramBytes, stmt.ccsidMBCEncoding); if (SanityManager.DEBUG) trace("parameter value is: "+ paramString); ps.setString(i+1,paramString); break; default: invalidValue(drdaType); } } catch (java.io.UnsupportedEncodingException e) { throw new SQLException (e.getMessage()); } }
import React, { useState, useEffect, useRef } from "react"; import { useParams } from "react-router-dom"; import SubmissionListItem from "../components/SubmissionListItem"; import Submission from "../models/Submission"; import request from "../helpers/request"; import CodeMirror, { EditorFromTextArea } from "codemirror"; import ResultList from "../components/ResultList"; import "codemirror/mode/clike/clike"; interface SubmissionPageRouterProps { submissionId: string; } function SubmissionPage() { let submissionId = useParams<SubmissionPageRouterProps>().submissionId; let [submission, setSubmission] = useState<Submission | null>(null); let [code, setCode] = useState(""); let [compileMessage, setCompileMessage] = useState(""); let codeRef = useRef<HTMLTextAreaElement | null>(null); let editor = useRef<EditorFromTextArea | null>(null); useEffect( function() { request("/api/submission/" + submissionId).then(setSubmission); request("/api/submission/" + submissionId + "/code") .then(({ code }) => code) .then(setCode); request("/api/submission/" + submissionId + "/compile") .then(({ compileMessage }) => compileMessage) .then(setCompileMessage); }, [submissionId] ); useEffect(function() { if (codeRef.current) { editor.current = CodeMirror.fromTextArea(codeRef.current, { lineNumbers: true, readOnly: "nocursor", mode: "text/x-c++src" }); } }, []); useEffect( function() { if (editor.current) { editor.current.setValue(code); } }, [code] ); return ( <> <header className="page-name align-left-right"> <h1>Submission #{submissionId}</h1> </header> <section className="submission-page align-left-right"> <table className="submission-list my-table"> <tr className="my-table__header"> <th className="id">#</th> <th className="date">Submission time</th> <th>Handle</th> <th>Problem</th> <th>Language</th> <th className="status-cell">Result</th> <th>Execution time</th> <th>Memory</th> </tr> {submission && <SubmissionListItem submission={submission} />} </table> <textarea ref={codeRef}></textarea> {compileMessage.length > 0 && ( <div className="submission-page__compile-msg"> <h2>Compilation message</h2> <pre>{compileMessage}</pre> </div> )} <ResultList submissionId={submissionId} /> </section> </> ); } export default SubmissionPage;
/** * An analog to * {@link #reduceSuccessions(Function, BiFunction, Duration)} * to use outside of JavaFX application thread. * * @param initialTransformation function to transform a single event * from this stream to an event that can be emitted from the returned * stream. * @param reduction function to accumulate an event to the stored value * @param timeout the maximum time difference between two subsequent * events that can still be accumulated. * @param scheduler used to schedule timeout expiration * @param eventThreadExecutor executor that executes actions on the * thread on which this stream's events are emitted. The returned stream * will use this executor to emit events. */ default <U> AwaitingEventStream<U> reduceSuccessions( Function<? super T, ? extends U> initialTransformation, BiFunction<? super U, ? super T, ? extends U> reduction, Duration timeout, ScheduledExecutorService scheduler, Executor eventThreadExecutor) { Function<Runnable, Timer> timerFactory = action -> ScheduledExecutorServiceTimer.create( timeout, action, scheduler, eventThreadExecutor); return new SuccessionReducingStream<T, U>( this, initialTransformation, reduction, timerFactory); }
/*! * Copyright 2016 The ANTLR Project. All rights reserved. * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information. */ import { Equatable } from './Stubs'; /** An immutable inclusive interval a..b */ export declare class Interval implements Equatable { a: number; b: number; private static _INVALID; static readonly INVALID: Interval; private static readonly cache; /** * @param a The start of the interval * @param b The end of the interval (inclusive) */ constructor(a: number, b: number); /** Interval objects are used readonly so share all with the * same single value a==b up to some max size. Use an array as a perfect hash. * Return shared object for 0..INTERVAL_POOL_MAX_VALUE or a new * Interval object with a..a in it. On Java.g4, 218623 IntervalSets * have a..a (set with 1 element). */ static of(a: number, b: number): Interval; /** return number of elements between a and b inclusively. x..x is length 1. * if b &lt; a, then length is 0. 9..10 has length 2. */ readonly length: number; equals(o: any): boolean; hashCode(): number; /** Does this start completely before other? Disjoint */ startsBeforeDisjoint(other: Interval): boolean; /** Does this start at or before other? Nondisjoint */ startsBeforeNonDisjoint(other: Interval): boolean; /** Does this.a start after other.b? May or may not be disjoint */ startsAfter(other: Interval): boolean; /** Does this start completely after other? Disjoint */ startsAfterDisjoint(other: Interval): boolean; /** Does this start after other? NonDisjoint */ startsAfterNonDisjoint(other: Interval): boolean; /** Are both ranges disjoint? I.e., no overlap? */ disjoint(other: Interval): boolean; /** Are two intervals adjacent such as 0..41 and 42..42? */ adjacent(other: Interval): boolean; properlyContains(other: Interval): boolean; /** Return the interval computed from combining this and other */ union(other: Interval): Interval; /** Return the interval in common between this and o */ intersection(other: Interval): Interval; /** Return the interval with elements from {@code this} not in {@code other}; * {@code other} must not be totally enclosed (properly contained) * within {@code this}, which would result in two disjoint intervals * instead of the single one returned by this method. */ differenceNotProperlyContained(other: Interval): Interval | undefined; toString(): string; }
/// if numeric - returns numeric value, else return: InvalidNumericKeyCode ( -1 ) [[nodiscard]] inline auto toNumeric(KeyCode key) -> int { switch (key) { case KeyCode::KEY_0: return 0; case KeyCode::KEY_1: return 1; case KeyCode::KEY_2: return 2; case KeyCode::KEY_3: return 3; case KeyCode::KEY_4: return 4; case KeyCode::KEY_5: return 5; case KeyCode::KEY_6: return 6; case KeyCode::KEY_7: return 7; case KeyCode::KEY_8: return 8; case KeyCode::KEY_9: return 9; default: return InvalidNumericKeyCode; } }
// Find position of next character which is not whitespace func nextToken(data []byte) int { for i, c := range data { switch c { case ' ', '\n', '\r', '\t': continue default: return i } } return -1 }
// Added this code from Image, as it has less dependencies here. // Warning: duplicate code. inline const char* NameForTextureFormat(ETEX_Format ETF) { switch (ETF) { case eTF_Unknown: return "Unknown"; case eTF_R8G8B8A8S: return "R8G8B8A8S"; case eTF_R8G8B8A8: return "R8G8B8A8"; case eTF_A8: return "A8"; case eTF_R8: return "R8"; case eTF_R8S: return "R8S"; case eTF_R16: return "R16"; case eTF_R16F: return "R16F"; case eTF_R32F: return "R32F"; case eTF_R8G8: return "R8G8"; case eTF_R8G8S: return "R8G8S"; case eTF_R16G16: return "R16G16"; case eTF_R16G16S: return "R16G16S"; case eTF_R16G16F: return "R16G16F"; case eTF_R11G11B10F: return "R11G11B10F"; case eTF_R10G10B10A2: return "R10G10B10A2"; case eTF_R16G16B16A16: return "R16G16B16A16"; case eTF_R16G16B16A16S: return "R16G16B16A16S"; case eTF_R16G16B16A16F: return "R16G16B16A16F"; case eTF_R32G32B32A32F: return "R32G32B32A32F"; case eTF_CTX1: return "CTX1"; case eTF_BC1: return "BC1"; case eTF_BC2: return "BC2"; case eTF_BC3: return "BC3"; case eTF_BC4U: return "BC4"; case eTF_BC4S: return "BC4S"; case eTF_BC5U: return "BC5"; case eTF_BC5S: return "BC5S"; case eTF_BC6UH: return "BC6UH"; case eTF_BC6SH: return "BC6SH"; case eTF_BC7: return "BC7"; case eTF_R9G9B9E5: return "R9G9B9E5"; case eTF_D16: return "D16"; case eTF_D24S8: return "D24S8"; case eTF_D32F: return "D32F"; case eTF_D32FS8: return "D32FS8"; case eTF_B5G6R5: return "R5G5B5"; case eTF_B5G5R5: return "R5G6B5"; case eTF_B4G4R4A4: return "B4G4R4A4"; case eTF_EAC_R11: return "EAC_R11"; case eTF_EAC_RG11: return "EAC_RG11"; case eTF_ETC2: return "ETC2"; case eTF_ETC2A: return "ETC2A"; case eTF_PVRTC2: return "PVRTC2"; case eTF_PVRTC4: return "PVRTC4"; case eTF_ASTC_4x4: return "ASTC_4x4"; case eTF_ASTC_5x4: return "ASTC_5x4"; case eTF_ASTC_5x5: return "ASTC_5x5"; case eTF_ASTC_6x5: return "ASTC_6x5"; case eTF_ASTC_6x6: return "ASTC_6x6"; case eTF_ASTC_8x5: return "ASTC_8x5"; case eTF_ASTC_8x6: return "ASTC_8x6"; case eTF_ASTC_8x8: return "ASTC_8x8"; case eTF_ASTC_10x5: return "ASTC_10x5"; case eTF_ASTC_10x6: return "ASTC_10x6"; case eTF_ASTC_10x8: return "ASTC_10x8"; case eTF_ASTC_10x10: return "ASTC_10x10"; case eTF_ASTC_12x10: return "ASTC_12x10"; case eTF_ASTC_12x12: return "ASTC_12x12"; case eTF_A8L8: return "A8L8"; case eTF_L8: return "L8"; case eTF_L8V8U8: return "L8V8U8"; case eTF_B8G8R8: return "B8G8R8"; case eTF_L8V8U8X8: return "L8V8U8X8"; case eTF_B8G8R8X8: return "B8G8R8X8"; case eTF_B8G8R8A8: return "B8G8R8A8"; default: assert(0); } return "Unknown"; }
import java.io.*; import java.util.*; public class Main { static StreamTokenizer in; static PrintWriter out; static void solve(StreamTokenizer in, PrintWriter out) throws Exception{ int a = nextInt(); int b = nextInt(); int n = nextInt(); int i = 1; while(true){ int x; if(i % 2 == 1) x = GCD(a, n); else x = GCD(b, n); if(x > n) break; else n-=x; i++; } if(i % 2 == 0) out.print(0); else out.print(1); } static int GCD(int a, int b){ if(a == 0 || b == 0) return a+b; return a > b ? GCD(a % b, b) : GCD(a, b % a); } public static void main(String[] args) throws Exception { in = new StreamTokenizer(new BufferedReader(new InputStreamReader(System.in))); out = new PrintWriter(System.out); solve(in, out); out.flush(); } static int min(int a,int b){ if(a>b) return b; else return a; } static int max(int a,int b){ if(a<b) return b; else return a; } static int nextInt() throws Exception{ in.nextToken(); return (int)in.nval; } static byte nextByte() throws Exception{ in.nextToken(); return (byte)in.nval; } static long nextLong() throws Exception{ in.nextToken(); return (long)in.nval; } }
def make_position_dialog(self, type_='object'): min_x = 0.35 min_y = 0.18 w, h = (0.09, 0.18) if type_ == 'object': a = 0.045 else: a = 0.06 rax = plt.axes([min_x, min_y + a - h, w, h], axisbg=self.radio_boxcolor) rax.patch.set_visible(False) rax.axis('off') self.radio['relation'] = RadioButtons(rax, self.relations[type_]) self.radio['relation'].on_clicked(self.relation_func) min_x += w + 0.04 w, h = (0.09, 0.18) rax = plt.axes([min_x, min_y + 0.045 - h, w, h], axisbg=self.radio_boxcolor) rax.patch.set_visible(False) rax.axis('off') self.radio['grounding'] = RadioButtons(rax, self.groundings[type_]) self.radio['grounding'].on_clicked(self.grounding_func)
<reponame>wan-nyan-wan/xv6_for_internet_os #include "types.h" #include "param.h" #include "memlayout.h" #include "arch/riscv.h" #include "spinlock.h" #include "proc.h" #include "defs.h" #include "memlayout.h" #include "pci.h" #define MAX_PCI_DEVICE_NUM 32 struct pci_dev_raw { uint32_t id; volatile uint32_t* base; }; static int dev_num = 0; static struct pci_dev_raw pci_dev_raws[MAX_PCI_DEVICE_NUM]; static struct pci_dev* pci_devs[MAX_PCI_DEVICE_NUM]; static void pci_scan_bus(); void pci_init() { memset(pci_dev_raws, 0, sizeof(pci_dev_raws)); memset(pci_devs, 0, sizeof(pci_devs)); pci_scan_bus(); } static void pci_scan_bus() { int raw_dev_num = 0; for(int dev = 0; dev < MAX_PCI_DEVICE_NUM; dev++) { int bus = 0; int func = 0; int offset = 0; uint32_t off = (bus << 16) | (dev << 11) | (func << 8) | (offset); volatile uint32_t *base = (uint32_t *)ECAM + off; uint32_t id = base[0]; if (id == -1) continue; pci_dev_raws[raw_dev_num].base = base; printf("id: %x, base: %p\n", id, base); pci_dev_raws[raw_dev_num].id = id; raw_dev_num += 1; } } int pci_register_device(struct pci_dev *dev) { for (int i = 0; i < MAX_PCI_DEVICE_NUM; i++) { if (pci_dev_raws[i].id == dev->id) { pci_devs[dev_num] = dev; dev->base = pci_dev_raws[i].base; if (dev->driver->init(dev) < 0) { return -1; } dev_num += 1; return 0; } } return -1; }
<gh_stars>10-100 /* ** resolver.h ** ** name resolver library header ** */ #ifndef __RESOLVER_H #define __RESOLVER_H /* ** resolver_lookup ** ** resolves a hosts name from it's ip address ** or ** resolves an ip address from it's host name ** ** returns a pointer to buff, or NULL if an error occured ** */ #ifdef _mangle # define resolver_initialize _mangle(resolver_initialize) # define resolver_shutdown _mangle(resolver_shutdown) # define resolver_getname _mangle(resolver_getname) # define resolver_getip _mangle(resolver_getip) #endif void resolver_initialize(void); void resolver_shutdown(void); char *resolver_getname(const char *ip, char *buff, int len); char *resolver_getip(const char *name, char *buff, int len); #endif
def mock_device_tracker_conf(): devices = [] async def mock_update_config(path, id, entity): devices.append(entity) with patch( "openpeerpower.components.device_tracker.legacy" ".DeviceTracker.async_update_config", side_effect=mock_update_config, ), patch( "openpeerpower.components.device_tracker.legacy.async_load_config", side_effect=lambda *args: devices, ): yield devices
Visualization and Detection of Changes in Brain States Using t-SNE Dimensionality reduction techniques are used primarily for visualization purposes. With sophisticated visualization techniques like t-distributed Stochastic Neighbor Embedding (t-SNE), we can preserve the original neighborhood information even in lower dimensions. Taking advantage of this property, we present a post-processing technique for fMRI data, which can identify changes in the brain states in the tSNE space. The predicted brain state changes detected by such method show high temporal correlation to actual experimental paradigm. Such a technique can be used to extract additional information and better understand the temporal characteristics during task and resting-state fMRI experiments.
// dockHeight returns the height of the entire dock area func (o *output) dockHeight(area dockArea) uint16 { var height uint16 for _, f := range o.dockAreas[area] { height += f.height } return height }
#include<stdio.h> int pk(int x,int y) { int s1,s2,d1,d2; s1=x/10; s2=x%10; d1=y/10; d2=y%10; if(s1==d2 && s2==d1) return 1; return 0; } void bilmemne(int x,int y) { char s1,s2,d1,d2; s1=x/10; s2=x%10; d1=y/10; d2=y%10; printf("%d%d:%d%d\n",s1,s2,d1,d2); } int main () { char s1,s2,c,d1,d2; int i,s,d; scanf("%c%c%c%c%c",&s1,&s2,&c,&d1,&d2); s=(s1-'0')*10+s2-'0'; d=(d1-'0')*10+d2-'0'; while(5) { d++; if(d>=60) { s++; d-=60; } if(s>=24) s-=24; if(pk(s,d)) { bilmemne(s,d); return 0; } } return 0; }
package ch.uzh.ifi.seal.soprafs20.rest.dto.move; import ch.uzh.ifi.seal.soprafs20.entity.game.cards.DevelopmentCard; public class CardMoveDTO extends MoveDTO { private DevelopmentCard developmentCard; public DevelopmentCard getDevelopmentCard() { return developmentCard; } public void setDevelopmentCard(DevelopmentCard developmentCard) { this.developmentCard = developmentCard; } }
#include<iostream> #include<vector> #include<string> #include<cctype> using namespace std; #define MAX 72 #define INV (-1) struct Dig{ vector<char> s; void push( char c ){ s.push_back( c ); } void pop(){ s.pop_back(); } void simplify(){ while( true ){ if( s.size() == 1 ) break; if( s[0] == '0' ){ s.erase( s.begin() ); }else break; } } bool operator<(const Dig &t){ if( s.size() < t.s.size() ) return true; else if( s.size() == t.s.size() ){ for(unsigned int i = 0; i < s.size(); ++i){ if( s[i] == t.s[i] ) continue; if( s[i] < t.s[i] ) return true; else return false; } } return false; } }; vector< Dig > vdigs; void ResetMap(char map[MAX][MAX]){ for(int i = 0; i < MAX; ++i){ for( int j = 0; j < MAX; ++j){ map[i][j] = INV; } } } template<typename _T> ostream &operator << (ostream &os, const vector<_T> &v){ for(unsigned int i = 0; i < v.size(); ++i){ os << v[i]; } return os; } int main(){ while(true){ int H,W; Dig ans; char map[MAX][MAX]; Dig dmap[MAX][MAX]; cin >> W >> H; if( W == 0 && H == 0 ) break; vdigs.clear(); ResetMap( map ); for(int i = 1; i <= H; ++i){ string s; cin >> s; for(int j = 1; j <= W; ++j){ map[i][j] = s[j-1]; } } for(int i = 1; i <= H; ++i){ for(int j = 1; j <= W; ++j){ if( !isdigit(map[i][j]) ) continue; else{ Dig d,d1,d2; Dig dmax; d.push( map[i][j] ); if( isdigit( map[i-1][j] ) ){ d1 = dmap[i-1][j]; d1.push( map[i][j] ); } if( isdigit( map[i][j-1] ) ){ d2 = dmap[i][j-1]; d2.push( map[i][j] ); } dmax = d; if( dmax < d1 ) dmax = d1; if( dmax < d2 ) dmax = d2; dmax.simplify(); dmap[i][j] = dmax; } } } for(int i = 1; i <= H; ++i){ for(int j = 1; j <= W; ++j){ if( ans < dmap[i][j] ){ ans = dmap[i][j]; } } } cout << ans.s << endl; } return 0; }
package earth.eu.jtzipi.jbat.ui; import earth.eu.jtzipi.jbat.ui.node.PathNodeFX; import earth.eu.jtzipi.jbat.ui.task.SearchTask; import earth.eu.jtzipi.modules.node.path.IPathNode; import javafx.beans.property.BooleanProperty; import javafx.beans.property.ObjectProperty; import javafx.beans.property.SimpleBooleanProperty; import javafx.beans.property.SimpleObjectProperty; import javafx.scene.control.TreeItem; import javafx.scene.control.TreeTableCell; import javafx.scene.control.TreeTableColumn; import javafx.scene.control.TreeTableView; import javafx.scene.layout.Pane; import java.nio.file.Path; public class SearchPane extends Pane { TreeTableView<PathNodeFX> ttv; private SearchTask searchTask; private BooleanProperty fxTaskRunningProp = new SimpleBooleanProperty( this, "", false ); private ObjectProperty<Path> fxSearchPathProp = new SimpleObjectProperty<>( this, "", null ); SearchPane( final Path dir ) { } private void createSearchPane() { } private TreeTableView<PathNodeFX> createSearchTableView( final IPathNode path ) { TreeItem<PathNodeFX> treeView = new TreeItem<>( PathNodeFX.of( path ) ); TreeTableView<PathNodeFX> ttv = new TreeTableView<>( treeView ); // Path name TreeTableColumn<PathNodeFX, IPathNode> nameTC = new TreeTableColumn<>( "File" ); nameTC.setCellValueFactory( cb -> cb.getValue().getValue().getPathNodeProp() ); nameTC.setCellFactory( cb -> new TreeTableCell<>() ); nameTC.setPrefWidth( 500D ); ttv.getColumns().add( nameTC ); return ttv; } }
/** * Encodes this {@code DnsMessage} in binary format. * * @return bytes */ final byte[] encode() { try (final MessageOutputStream output = new MessageOutputStream()) { output.writeShort((short) 0); output.writeShort(flags); output.writeShort((short) questions.size()); output.writeShort((short) nbAnswers); output.writeShort((short) nbAuthorities); output.writeShort((short) nbAdditional); questions.forEach(q -> write(q, output)); answers.forEach(a -> write(a.record(), a.stamp(), output)); return output.toByteArray(); } }
def save_feed(user, name, new): feed = DmsFeed() feed.name = name feed.title = new['title'] feed.description = new['text'] feed.link = new['url_more'] feed.general_mode = new['section'] feed.owner = User.objects.get(username=user) feed.is_deleted = False feed.last_modified = get_last_modified() feed.save()
Ryan Kerrigan celebrates. (Patrick Smith/Getty Images) Ryan Kerrigan's postgame persona is usually "relaxed manila folder." He's among the Redskins' most popular players but also among their least expressive, at least in front of cameras. Late Sunday night, though, here came Kerrigan striding through the locker room, grinning like a school kid at recess. "It was one of, if not the, funnest game I've ever played as a Redskin," Kerrigan said of Washington's 27-10 demolition of Oakland. This is a guy who's been to the playoffs twice, who's been in divisional games that mattered in December. This was a nonconference game in September. And this one stood out as the most fun? "Probably," Kerrigan said, still grinning. "I mean, start to finish, I don't know if I've been part of a game with the Redskins where we were that dominant on defense. I mean, it was a blast. And especially knowing you're doing that against one of the better teams in the league, one of the top teams in the AFC last year. And so it was really fun, yeah." Haven't heard a "defense!" chant here on a FG in so long. pic.twitter.com/HQwhzlPrT9 β€” Barno (@DCBarno) September 25, 2017 That wasn't a minority opinion Sunday night β€” Bashaud Breeland was virtually giggling at his locker β€” and it isn't crazy when you skim through past schedules. So many of this team's biggest wins in recent years have come with a "but." The Redskins routed Jacksonville early in 2014, but their starting quarterback suffered a serious injury. They notched huge wins as they marched to the playoffs in 2015, but those were either unconvincing or against poor teams. (Still fun! Just not quite the same.) They've had glorious prime time victories against divisional rivals (the Eagles in 2016, the Cowboys in 2014), but those were on the road, which isn't really as joyous. Their last appearance on Sunday Night Football looks like a lark in retrospect β€” an 18-point win over the Packers last year β€” but that was still a five-point game late in the fourth quarter. This? This was a complete demolition. It was a kid stomping on ants. It was a car running over a Wiffle ball. It was a matchup of unequals, conducted in front of a national audience, in a game that has the potential to upend everything we thought about this season. Kirk Cousins will struggle without his veteran receivers, or without his offensive coordinator, or when Jordan Reed misses games with injury, right? He just threw for 365 yards, three touchdowns and no turnovers while completing more than 80 percent of passes. (Only the Chiefs' Alex Smith in the season opener against the Patriots reached all those marks.) The defense will again wrestle with its long-standing third-down demons? How's 0-for-11 sound, the first time the Redskins held an opponent without a third-down conversion in a decade and tied for the best third-down performance in team history? The Redskins will toggle between games when either the offense or the defense lags behind? Not this time. This time, they took turns sending team officials scurrying to the record books. The offense gained 472 yards, with Chris Thompson notching the most receiving yards for a Washington running back in at least 57 years. The defense allowed 128, the fewest for the Redskins since 1992. The differential was the biggest for Washington since 1974, which means none of the Super Bowl teams ever put together a show quite like that one. I've been watching the Redskins for a very long time and can't remember their defense ever dominating like this. Unbelievable. β€” Tom Jackman (@TomJackmanWP) September 25, 2017 Can't remember the Redskins playing defense like this against an elite team in a long time. β€” Shaun H. Ahmad (@ShaunAhmad) September 25, 2017 Most dominating #Redskins win I've seen in a long time. β€” Lou Pettey (@golferlouie) September 25, 2017 And if you want to know how fun it was for Washington, just consider how miserable it was for the Raiders. Ask Coach Jack Del Rio about the third-down misery, for example. "It sucked," he said. "You know, getting off on third down, for a defense it gives them life and, for the other offense, it's joyful. It really is." Or ask the Raiders players what happened. "We just got our butt kicked," quarterback Derek Carr said. "We got our butts handed to us today, man," lineman Donald Penn said. And yet the final score still flattered them. Oakland only scored when Washington turned the ball over on the wrong side of the 50-yard line, and those two turnovers were probably the only plays you could quibble with over the entire contest. That's what made this game different from so many Redskins wins of the past five or six years. There was nothing to nitpick, nothing to regret, no injuries to mourn, no key performers who failed, no coaching failures, no uncertainty, no "yeah, but." There was no need to debate Cousins's salary or the way Washington had promoted a new defensive coordinator with a mixed track record, to lament wasted draft picks (Josh Doctson!) or uncertain depth (Vernon Davis!). It was all just joy. They played one of the five or six teams that oddsmakers gave the best chances to win the Super Bowl. And they dominated. Still buzzin' from last night. What a win! Best feeling ever #HTTR pic.twitter.com/P5TYr17FtR β€” Spencer Long (@slong_61) September 25, 2017 Cousins has lately become an evangelist for a popular media talking point: The NFL is such a week-to-week league that it's almost impossible to create big-picture conclusions from single games, and he said it again Sunday night. ("It's hard to say what we are and what we aren't," he said, which is true.) The Redskins have bounced in and out of uncertainty for months, from the front-office changes to the Cousins negotiations to the poor preseason showing to the odd Su'a Cravens situation to the Week 1 disappointment. And then came Sunday night. For the first time in a long time, it was pure good feelings without any sort of remorse. Maybe it will carry forward to Kansas City and beyond, and maybe it won't. But for one night in Landover, it would be hard for anyone associated with the Redskins to disagree with Kerrigan: This was a blast. Read more on the Redskins: Redskins put it all together in dominant prime-time victory over Raiders Redskins' defensive performance in win over the Raiders was their best in 25 years Best and worst moments from the Redskins' 27-10 win over the Raiders Redskins players, owner link arms during anthem following Trump's criticism
Analysis of dielectric Rod Antenna with an infinite ground plane The Analysis of dielectric Rod Antenna integrated with Dual mode reciprocal phase shifter for phased array application has been presented. The analysis of the structure has been presented as the dielectric rod antenna structure used in phased array application. An attempt to study and analyze the dielectric rod antenna has been done by many researchers according to their designs. The dielectric cylindrical antennas belong to the class of surface wave antennas. The wave excites the HE11 surface waves in and on the dielectric rod. The dominant mode HE11 is the widely used mode in the dielectric rod antenna, though TM0m mode has been sometimes used. It is assumed that the radiation takes place only from the two aperture at the feed and free end. These cylindrical antennas behave like an end-fire antennas with a major lobe in the forward direction. The design equations and calculations are done considering HE1 1 mode. To feed the energy to the antenna over infinite ground plane aperture coupling feed is provided. The simulated and theoretical results are compared. The radiation pattern, insertion loss and bandwidth are obtained for the structure simulated in HFSS. The radiation observed is an endfire radiation with major lobe in the forward direction.
Effects of lossy, layered filler particles on the bulk permittivity of a composite material The ability to control the frequency at which a dielectric material exhibits maximum loss (the relaxation frequency) is of interest in telecommunications and radar absorption. A theoretical investigation of the behaviour of the complex bulk permittivity of a composite material with coated, spheroidal filler particles is presented. In the model, the filler particles are replaced mathematically by electric multipole sources located at their centres (Harfield N 2000 J. Mater. Sci. 35 5809–16). It is shown how factors such as particle shape, orientation with respect to the applied electric field, thickness of coating and permittivity value of the individual phases influences the bulk permittivity of the composite material. For a composite with coated filler particles, one or two relaxation frequencies may be observed. Employing the theory of Pauly and Schwan (Hanai T 1968 Electrical properties of emulsions Emulsion Science ed P Sherman (London: Academic)), particular attention is paid to the way in which the relaxation frequencies are affected by the material parameters.
#include<stdio.h> int gcd(int a,int b) { if(b==0) return a; return (gcd(b,a%b)); } int main() { int maxm=-1,gcdval,i,t; scanf("%d",&t); int d[t]; for(i=0;i<t;i++) { scanf("%d",&d[i]); if(d[i]>maxm) maxm=d[i]; } gcdval=d[0]; for(i=1;i<t;i++) gcdval=gcd(gcdval,d[i]); if((maxm/gcdval-t)%2==0) printf("Bob"); else printf("Alice"); return 0; }
Adaptive Pricing in Insurance: Generalized Linear Models and Gaussian Process Regression Approaches We study the application of dynamic pricing to insurance. We view this as an online revenue management problem where the insurance company looks to set prices to optimize the long-run revenue from selling a new insurance product. We develop two pricing models: an adaptive Generalized Linear Model (GLM) and an adaptive Gaussian Process (GP) regression model. Both balance between exploration, where we choose prices in order to learn the distribution of demands & claims for the insurance product, and exploitation, where we myopically choose the best price from the information gathered so far. The performance of the pricing policies is measured in terms of regret: the expected revenue loss caused by not using the optimal price. As is commonplace in insurance, we model demand and claims by GLMs. In our adaptive GLM design, we use the maximum quasi-likelihood estimation (MQLE) to estimate the unknown parameters. We show that, if prices are chosen with suitably decreasing variability, the MQLE parameters eventually exist and converge to the correct values, which in turn implies that the sequence of chosen prices will also converge to the optimal price. In the adaptive GP regression model, we sample demand and claims from Gaussian Processes and then choose selling prices by the upper confidence bound rule. We also analyze these GLM and GP pricing algorithms with delayed claims. Although similar results exist in other domains, this is among the first works to consider dynamic pricing problems in the field of insurance. We also believe this is the first work to consider Gaussian Process regression in the context of insurance pricing. These initial findings suggest that online machine learning algorithms could be a fruitful area of future investigation and application in insurance.
/** * Populates subcomponent solution nodes for the highest level security GSN and also creates the * lower threat-level fragments * * @param subCompId * @param cutsets * @param acceptableProb * @param cyberReqId * @return * @throws IOException */ public GsnNode populateSubComponentSolutionNode( String subCompId, List<Cutset> cutsets, String acceptableProb, String cyberReqId) throws IOException { GsnNode subCompSolNode = new GsnNode(); subCompSolNode.setNodeType("solution"); String solutionId = "SOLUTION_" + Integer.toString(solutionCounter); solutionCounter++; subCompSolNode.setNodeId(solutionId); Solution sol = new Solution(); sol.setDisplayText("Evidence that&#10;" + subCompId + "&#10;is secure"); /** Create a GSN fragment that starts at this subcomponent and create artifacts for it */ GsnNode subCompFragment = populateSubComponentFragment(subCompId, cutsets, acceptableProb, cyberReqId); SecurityGSNInterface interfaceObj = new SecurityGSNInterface(); String svgFileName = interfaceObj.createArtifactFiles(subCompFragment, cyberReqId + "_" + subCompId); String svgDestination = hostGSNDir + "/" + svgFileName; sol.setUrl(svgDestination); sol.setStatus(subCompFragment.getGoal().getStatus()); subCompSolNode.setSolution(sol); return subCompSolNode; }
/* * Copyright (C) 2018 ETH Zurich and University of Bologna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef __ARCHI_SOC_EU_SOC_EU_V2_H__ #define __ARCHI_SOC_EU_SOC_EU_V2_H__ #define SOC_EU_EVENT 0x00 #define SOC_FC_FIRST_MASK 0x04 #define SOC_CL_FIRST_MASK 0x24 #define SOC_PR_FIRST_MASK 0x44 #define SOC_ERR_FIRST_MASK 0x64 #define SOC_TIMER_SEL_HI 0x84 #define SOC_TIMER_SEL_LO 0x88 #define SOC_EU_EVENT_0 0x1 #define SOC_EU_EVENT_1 0x2 #define SOC_EU_EVENT_2 0x4 #define SOC_EU_EVENT_3 0x8 #define SOC_EU_EVENT_4 0x10 #define SOC_EU_EVENT_5 0x20 #define SOC_EU_EVENT_6 0x40 #define SOC_EU_EVENT_7 0x80 #define SOC_TIMER_SEL_ENABLE_SHIFT 31 #define SOC_TIMER_SEL_EVT_SHIFT 0 #define SOC_TIMER_SEL_EVT_WIDTH 8 #define SOC_TIMER_SEL_EVT_MASK ((~0U) >> (32 - SOC_TIMER_SEL_EVT_WIDTH)) // #define SOC_TIMER_SEL_EVT_MASK 0xff #define SOC_TIMER_SEL_ENABLE_DISABLED 0 #define SOC_TIMER_SEL_ENABLE_ENABLED 1 #define SOC_TIMER_SEL_ENABLE_DIS (0 << SOC_TIMER_SEL_ENABLE_SHIFT) #define SOC_TIMER_SEL_ENABLE_ENA (1 << SOC_TIMER_SEL_ENABLE_SHIFT) #define SOC_TIMER_SEL_EVT_VAL(val) ((val) << SOC_TIMER_SEL_EVT_SHIFT) // related to XX_FIRST_MASK registers #define SOC_NB_EVENT_REGS 8 #define SOC_NB_EVENT_TARGETS 3 #define SOC_FC_MASK(x) (SOC_FC_FIRST_MASK + (x)*4) #define SOC_CL_MASK(x) (SOC_CL_FIRST_MASK + (x)*4) #define SOC_PR_MASK(x) (SOC_PR_FIRST_MASK + (x)*4) #define ARCHI_SOC_EVENT_PERIPH_EVT_BASE(periph) ((periph)*ARCHI_SOC_EVENT_UDMA_NB_CHANNEL_EVT) #endif
//----------------------------------------------------------------------------- // // AIFFBehavior::getRealSize(...) // // Purpose: Validate the passed in size value, identify the valid size if the // passed in isn't valid and return the valid size. // Throw an exception if the passed in size isn't valid and there's // no way to identify a valid size. // //----------------------------------------------------------------------------- XMP_Uns64 AIFFBehavior::getRealSize( const XMP_Uns64 size, const ChunkIdentifier& id, IChunkContainer& tree, XMP_IO* stream ) { if( (size & 0x80000000) > 0 ) { XMP_Throw( "Unknown size value", kXMPErr_BadFileFormat ); } return size; }
/** * Contains integration tests (interaction with the Model) for {@code SearchCommand}. */ public class SearchCommandTest { private Model model = new ModelManager(getTypicalKanBugTracker(), new UserPrefs()); private Model expectedModel = new ModelManager(getTypicalKanBugTracker(), new UserPrefs()); @Test public void equals() { BugContainsQueryStringPredicate firstPredicate = new BugContainsQueryStringPredicate("first"); BugContainsQueryStringPredicate secondPredicate = new BugContainsQueryStringPredicate("second"); SearchCommand searchFirstCommand = new SearchCommand(firstPredicate); SearchCommand searchSecondCommand = new SearchCommand(secondPredicate); // same object -> returns true assertTrue(searchFirstCommand.equals(searchFirstCommand)); // same values -> returns true SearchCommand searchFirstCommandCopy = new SearchCommand(firstPredicate); assertTrue(searchFirstCommand.equals(searchFirstCommandCopy)); // different types -> returns false assertFalse(searchFirstCommand.equals(1)); // null -> returns false assertFalse(searchFirstCommand.equals(null)); // different bug -> returns false assertFalse(searchFirstCommand.equals(searchSecondCommand)); } @Test public void execute_emptyData_noBugFound() { String expectedMessage = String.format(MESSAGE_BUGS_LISTED_OVERVIEW, 0); BugContainsQueryStringPredicate predicate = preparePredicate("No data"); SearchCommand command = new SearchCommand(predicate); expectedModel.updateFilteredBugList(predicate); assertCommandSuccess(command, model, expectedMessage, expectedModel); assertEquals(Collections.emptyList(), model.getFilteredBugList()); } @Test public void execute_oneKeywordQueryString_oneBugFound() { String expectedMessage = String.format(MESSAGE_BUGS_LISTED_OVERVIEW, 1); BugContainsQueryStringPredicate predicate = preparePredicate("jar"); SearchCommand command = new SearchCommand(predicate); expectedModel.updateFilteredBugList(predicate); assertCommandSuccess(command, model, expectedMessage, expectedModel); assertEquals(Arrays.asList(BUGSEVEN), model.getFilteredBugList()); } @Test public void execute_oneMixedCaseKeywordQueryString_oneBugFound() { String expectedMessage = String.format(MESSAGE_BUGS_LISTED_OVERVIEW, 1); BugContainsQueryStringPredicate predicate = preparePredicate("eXiT"); SearchCommand command = new SearchCommand(predicate); expectedModel.updateFilteredBugList(predicate); assertCommandSuccess(command, model, expectedMessage, expectedModel); assertEquals(Arrays.asList(BUGFIVE), model.getFilteredBugList()); } @Test public void execute_multipleKeywordsQueryString_oneBugFound() { String expectedMessage = String.format(MESSAGE_BUGS_LISTED_OVERVIEW, 1); BugContainsQueryStringPredicate predicate = preparePredicate("Note rendering"); SearchCommand command = new SearchCommand(predicate); expectedModel.updateFilteredBugList(predicate); assertCommandSuccess(command, model, expectedMessage, expectedModel); assertEquals(Arrays.asList(BUGSIX), model.getFilteredBugList()); } @Test public void execute_oneKeywordQueryString_multipleBugsFound() { String expectedMessage = String.format(MESSAGE_BUGS_LISTED_OVERVIEW, 4); BugContainsQueryStringPredicate predicate = preparePredicate("command"); SearchCommand command = new SearchCommand(predicate); expectedModel.updateFilteredBugList(predicate); assertCommandSuccess(command, model, expectedMessage, expectedModel); assertEquals(Arrays.asList(BUGONE, BUGTWO, BUGFOUR, BUGFIVE), model.getFilteredBugList()); } @Test public void execute_oneMixedCaseKeywordQueryString_multipleBugsFound() { String expectedMessage = String.format(MESSAGE_BUGS_LISTED_OVERVIEW, 4); BugContainsQueryStringPredicate predicate = preparePredicate("cOmMaND"); SearchCommand command = new SearchCommand(predicate); expectedModel.updateFilteredBugList(predicate); assertCommandSuccess(command, model, expectedMessage, expectedModel); assertEquals(Arrays.asList(BUGONE, BUGTWO, BUGFOUR, BUGFIVE), model.getFilteredBugList()); } @Test public void execute_multipleMixedCaseKeywordsQueryString_multipleBugsFound() { String expectedMessage = String.format(MESSAGE_BUGS_LISTED_OVERVIEW, 2); BugContainsQueryStringPredicate predicate = preparePredicate("mAiN wINDow"); SearchCommand command = new SearchCommand(predicate); expectedModel.updateFilteredBugList(predicate); assertCommandSuccess(command, model, expectedMessage, expectedModel); assertEquals(Arrays.asList(BUGTHREE, BUGSIX), model.getFilteredBugList()); } /** * Parses {@code userInput} into a {@code NameContainsKeywordsPredicate}. */ private BugContainsQueryStringPredicate preparePredicate(String userInput) { return new BugContainsQueryStringPredicate(userInput); } }
/** * The parse() method is the parser of the formatter input file. It is * called at the beginning of the IO format definition file, and it consumes * the entire input file while building an internal representation of the * I/O format * * @param args * - file name to parse expected in args[0]. * @exception - FormatterError. */ public void parse() throws FormatterError { try { while (parseAliceBob()) parseLineName(); st = null; } catch (Exception e) { System.err.println("parse: exception " + e.getMessage()); return; } }
def inverse_transform_lemmas(self, predictions): pred_lemmas = [] if self.include_lemma == 'generate': for pred in predictions: pred_lem = '' for positions in pred: top_idx = np.argmax(positions) c = self.lemma_char_idx[top_idx] if c in ('$', '%'): continue if c == '|': break else: pred_lem += c pred_lemmas.append(pred_lem) elif self.include_lemma == 'label': predictions = np.argmax(predictions, axis=1) pred_lemmas = self.lemma_encoder.inverse_transform(predictions) return pred_lemmas
<gh_stars>0 package resourcequota import ( "errors" "github.com/kubernetes/dashboard/src/app/backend/api" "github.com/kubernetes/dashboard/src/app/backend/resource/dataselect" v1 "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/api/resource" metaV1 "k8s.io/apimachinery/pkg/apis/meta/v1" k8sClient "k8s.io/client-go/kubernetes" ) type ResourceQuotaSpec struct { // Name of the resource quota. Name string `json:"name"` Tenant string `json:"tenant"` NameSpace string `json:"name_space"` ResourceCPU string `json:"cpu"` ResourceMemory string `json:"memory"` ResourcePods string `json:"pods"` ResourceConfigMaps string `json:"config_maps"` ResourcePVC string `json:"pvc"` ResourceSecrets string `json:"secrets"` ResourceServices string `json:"services"` ResourceStorage string `json:"storage"` ResourceEphemeralStorage string `json:"ephemeral_storage"` } // ResourceStatus provides the status of the resource defined by a resource quota. type ResourceStatus struct { Used string `json:"used,omitempty"` Hard string `json:"hard,omitempty"` } func toResourceQuota(resourcequota v1.ResourceQuota) ResourceQuota { return ResourceQuota{ ObjectMeta: api.NewObjectMeta(resourcequota.ObjectMeta), TypeMeta: api.NewTypeMeta(api.ResourceKindResourceQuota), } } type ResourceQuota struct { ObjectMeta api.ObjectMeta `json:"objectMeta"` TypeMeta api.TypeMeta `json:"typeMeta"` } // ResourceQuotaDetail provides the presentation layer view of Kubernetes Resource Quotas resource. type ResourceQuotaDetail struct { ResourceQuota `json:",inline"` ObjectMeta api.ObjectMeta `json:"objectMeta"` TypeMeta api.TypeMeta `json:"typeMeta"` // Scopes defines quota scopes Scopes []v1.ResourceQuotaScope `json:"scopes,omitempty"` // StatusList is a set of (resource name, Used, Hard) tuple. StatusList map[v1.ResourceName]ResourceStatus `json:"statusList,omitempty"` // List of non-critical errors, that occurred during resource retrieval. Errors []error `json:"errors"` } // ResourceQuotaDetailList type ResourceQuotaDetailList struct { ListMeta api.ListMeta `json:"listMeta"` Items []ResourceQuotaDetail `json:"items"` } func AddResourceQuotas(client k8sClient.Interface, namespace string, tenant string, spec *ResourceQuotaSpec) (*v1.ResourceQuota, error) { if tenant == "" { tenant = "default" } ns, err := client.CoreV1().NamespacesWithMultiTenancy(tenant).Get(namespace, metaV1.GetOptions{}) if err != nil { return nil, err } var resList = make(v1.ResourceList) if spec.ResourceCPU != "" { resList[v1.ResourceCPU] = resource.MustParse(spec.ResourceCPU) } if spec.ResourceConfigMaps != "" { resList[v1.ResourceConfigMaps] = resource.MustParse(spec.ResourceConfigMaps) } if spec.ResourcePVC != "" { resList[v1.ResourcePersistentVolumeClaims] = resource.MustParse(spec.ResourcePVC) } if spec.ResourcePods != "" { resList[v1.ResourcePods] = resource.MustParse(spec.ResourcePods) } if spec.ResourceServices != "" { resList[v1.ResourceServices] = resource.MustParse(spec.ResourceServices) } if spec.ResourceSecrets != "" { resList[v1.ResourceSecrets] = resource.MustParse(spec.ResourceSecrets) } if spec.ResourceStorage != "" { resList[v1.ResourceStorage] = resource.MustParse(spec.ResourceStorage) } if spec.ResourceEphemeralStorage != "" { resList[v1.ResourceEphemeralStorage] = resource.MustParse(spec.ResourceEphemeralStorage) } if spec.Tenant == "" { spec.Tenant = tenant } if spec.NameSpace == "" { spec.NameSpace = namespace } if spec.Name == "" { err := errors.New("empty resource-quota name error") return nil, err } resQuota, err := client.CoreV1().ResourceQuotasWithMultiTenancy(namespace, ns.Tenant).Create(&v1.ResourceQuota{ TypeMeta: metaV1.TypeMeta{}, ObjectMeta: metaV1.ObjectMeta{ Name: spec.Name, Tenant: spec.Tenant, Namespace: spec.NameSpace, }, Spec: v1.ResourceQuotaSpec{ Hard: resList, Scopes: nil, ScopeSelector: nil, }, Status: v1.ResourceQuotaStatus{}, }) if err != nil { return nil, err } return resQuota, nil } // DeleteResourceQuota func DeleteResourceQuota(client k8sClient.Interface, namespace string, tenant string, name string) error { if tenant == "" { tenant = "default" } ns, err := client.CoreV1().NamespacesWithMultiTenancy(tenant).Get(namespace, metaV1.GetOptions{}) if err != nil { return nil } err = client.CoreV1().ResourceQuotasWithMultiTenancy(namespace, ns.Tenant).Delete(name, &metaV1.DeleteOptions{}) if err != nil { return nil } return nil } func GetResourceQuotaLists(client k8sClient.Interface, namespace string, tenant string) (*ResourceQuotaDetailList, error) { if tenant == "" { tenant = "default" } ns, err := client.CoreV1().NamespacesWithMultiTenancy(tenant).Get(namespace, metaV1.GetOptions{}) if err != nil { return nil, err } list, err := client.CoreV1().ResourceQuotasWithMultiTenancy(namespace, ns.Tenant).List(metaV1.ListOptions{}) if err != nil { return nil, err } result := &ResourceQuotaDetailList{ Items: make([]ResourceQuotaDetail, 0), ListMeta: api.ListMeta{TotalItems: len(list.Items)}, } for _, item := range list.Items { detail := ToResourceQuotaDetail(&item) result.Items = append(result.Items, *detail) } return result, nil } func GetResourceQuotaDetails(client k8sClient.Interface, namespace string, tenant string, name string) (*ResourceQuotaDetail, error) { if tenant == "" { tenant = "default" } ns, err := client.CoreV1().NamespacesWithMultiTenancy(tenant).Get(namespace, metaV1.GetOptions{}) if err != nil { return nil, err } list, err := client.CoreV1().ResourceQuotasWithMultiTenancy(namespace, ns.Tenant).List(metaV1.ListOptions{}) if err != nil { return nil, err } itemNew := new(ResourceQuotaDetail) for _, item := range list.Items { if name == item.GetName() { detail := ToResourceQuotaDetail(&item) itemNew = detail } } return itemNew, nil } type ResourceQuotaCell ResourceQuota func (self ResourceQuotaCell) GetProperty(name dataselect.PropertyName) dataselect.ComparableValue { switch name { case dataselect.NameProperty: return dataselect.StdComparableString(self.ObjectMeta.Name) case dataselect.CreationTimestampProperty: return dataselect.StdComparableTime(self.ObjectMeta.CreationTimestamp.Time) case dataselect.NamespaceProperty: return dataselect.StdComparableString(self.ObjectMeta.Namespace) default: // if name is not supported then just return a constant dummy value, sort will have no effect. return nil } } func toCells(std []ResourceQuota) []dataselect.DataCell { cells := make([]dataselect.DataCell, len(std)) for i := range std { cells[i] = ResourceQuotaCell(std[i]) } return cells } func fromCells(cells []dataselect.DataCell) []ResourceQuota { std := make([]ResourceQuota, len(cells)) for i := range std { std[i] = ResourceQuota(cells[i].(ResourceQuotaCell)) } return std } func GetResourceQuotaDetail(client k8sClient.Interface, namespace string, name string) (*ResourceQuotaDetail, error) { rawObject, err := client.CoreV1().ResourceQuotas(namespace).Get(name, metaV1.GetOptions{}) if err != nil { return nil, err } cr := toResourceQuotaDetail(*rawObject) return &cr, nil } func ToResourceQuotaDetail(rawResourceQuota *v1.ResourceQuota) *ResourceQuotaDetail { statusList := make(map[v1.ResourceName]ResourceStatus) for key, value := range rawResourceQuota.Status.Hard { used := rawResourceQuota.Status.Used[key] statusList[key] = ResourceStatus{ Used: used.String(), Hard: value.String(), } } return &ResourceQuotaDetail{ ObjectMeta: api.NewObjectMeta(rawResourceQuota.ObjectMeta), TypeMeta: api.NewTypeMeta(api.ResourceKindResourceQuota), Scopes: rawResourceQuota.Spec.Scopes, StatusList: statusList, } } func toResourceQuotaDetail(cr v1.ResourceQuota) ResourceQuotaDetail { return ResourceQuotaDetail{ ResourceQuota: toResourceQuota(cr), Errors: []error{}, } }
<filename>remote/getCert/getCert.go package main import ( "fmt" "gopkg.in/alecthomas/kingpin.v2" "os" "os/exec" "strings" ) const version = "0.1.0" func main() { app := kingpin.New( "remotePlugin", "remote part of the wattsPluginMyproxySSH", ) wattsUID := app.Arg("WaTTS User ID", "").Required().String() host := app.Arg("Host", "").Required().String() password := app.Arg("Password", "").Required().String() app.Author("<NAME>") app.Version(version) kingpin.MustParse(app.Parse(os.Args[1:])) cmd := exec.Command("myproxy-logon", "-l", *wattsUID, "-s", *host, "-S", "-o", "-") cmd.Stdin = strings.NewReader(*password) output, err := cmd.CombinedOutput() kingpin.FatalIfError(err, string(output)) fmt.Println(string(output)) }
//! Π Π΅Π°Π»ΠΈΠ·ΡƒΠ΅Ρ‚ ΠΏΡ€ΠΎΡΡ‚ΡƒΡŽ ΡΠ΅Ρ€ΠΈΠ°Π»ΠΈΠ·Π°Ρ†ΠΈΡŽ ΠΈ Π΄Π΅ΡΠ΅Ρ€ΠΈΠ°Π»ΠΈΠ·Π°Ρ†ΠΈΡŽ структур, Π½Π°ΠΈΠ±ΠΎΠ»Π΅Π΅ Π±Π»ΠΈΠ·ΠΊΡƒΡŽ ΠΊ ΠΈΡ… //! ΠΏΡ€Π΅Π΄ΡΡ‚Π°Π²Π»Π΅Π½ΠΈΡŽ Π² памяти. //! //! # ΠŸΡ€ΠΈΠΌΠ΅Ρ€ //! Π§ΠΈΡ‚Π°Π΅ΠΌ Π·Π°Π³ΠΎΠ»ΠΎΠ²ΠΎΠΊ GFF Ρ„Π°ΠΉΠ»Π° (Ρ„ΠΎΡ€ΠΌΠ°Ρ‚ Bioware, ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅ΠΌΡ‹ΠΉ для хранСния Π΄Π°Π½Π½Ρ‹Ρ… Π² //! Ρ‚Π°ΠΊΠΈΡ… ΠΈΠ³Ρ€Π°Ρ…, ΠΊΠ°ΠΊ Neverwinter Nights, Neverwinter Nights 2 ΠΈ Π’Π΅Π΄ΡŒΠΌΠ°ΠΊ): //! ```rust //! # extern crate byteorder; //! # #[macro_use] //! # extern crate serde_derive; //! # extern crate serde_pod; //! # use serde_pod::{from_bytes, Result}; //! #[derive(Debug, Deserialize, PartialEq)] //! struct Signature([u8; 4]); //! //! #[derive(Debug, Deserialize, PartialEq)] //! struct Version([u8; 4]); //! //! #[derive(Debug, Deserialize, PartialEq)] //! struct Section { //! offset: u32, //! count: u32, //! } //! #[derive(Debug, Deserialize, PartialEq)] //! struct GffHeader { //! signature: Signature, //! version: Version, //! structs: Section, //! fields: Section, //! labels: Section, //! field_data: Section, //! field_indices: Section, //! list_indices: Section, //! } //! //! # fn main() -> Result<()> { //! let header: GffHeader = from_bytes::<byteorder::LE, _>(&[ //! // Signature //! 0x47, 0x55, 0x49, 0x20, //! // Version //! 0x56, 0x33, 0x2E, 0x32, //! // structs //! 0x38, 0x00, 0x00, 0x00, 0x0F, 0x00, 0x00, 0x00, //! // fields //! 0xEC, 0x00, 0x00, 0x00, 0x93, 0x00, 0x00, 0x00, //! // labels //! 0xD0, 0x07, 0x00, 0x00, 0x1A, 0x00, 0x00, 0x00, //! // field_data //! 0x70, 0x09, 0x00, 0x00, 0x1D, 0x02, 0x00, 0x00, //! // field_indices //! 0x8D, 0x0B, 0x00, 0x00, 0x4C, 0x02, 0x00, 0x00, //! // list_indices //! 0xD9, 0x0D, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, //! ])?; //! //! assert_eq!(header, GffHeader { //! signature: Signature(*b"GUI "), //! version: Version(*b"V3.2"), //! structs: Section { offset: 0x38, count: 15 }, //! fields: Section { offset: 0xEC, count: 147 }, //! labels: Section { offset: 0x07D0, count: 26 }, //! field_data: Section { offset: 0x0970, count: 541 }, //! field_indices: Section { offset: 0x0B8D, count: 588 }, //! list_indices: Section { offset: 0x0DD9, count: 36 }, //! }); //! # Ok(()) //! # } //! ``` #![deny(missing_docs)] extern crate serde; extern crate byteorder; #[cfg(test)] #[macro_use] extern crate serde_derive; #[cfg(test)] #[macro_use] extern crate quickcheck; use byteorder::{BE, LE}; pub mod error; pub mod ser; pub mod de; /// Π‘Π΅Ρ€ΠΈΠ°Π»ΠΈΠ·Π°Ρ‚ΠΎΡ€, Π·Π°ΠΏΠΈΡΡ‹Π²Π°ΡŽΡ‰ΠΈΠΉ числа Π² ΠΏΠΎΡ‚ΠΎΠΊ Π² порядкС `Big-Endian` pub type BESerializer<W> = ser::Serializer<BE, W>; /// Π‘Π΅Ρ€ΠΈΠ°Π»ΠΈΠ·Π°Ρ‚ΠΎΡ€, Π·Π°ΠΏΠΈΡΡ‹Π²Π°ΡŽΡ‰ΠΈΠΉ числа Π² ΠΏΠΎΡ‚ΠΎΠΊ Π² порядкС `Little-Endian` pub type LESerializer<W> = ser::Serializer<LE, W>; /// ДСсСриализатор, Ρ‡ΠΈΡ‚Π°ΡŽΡ‰ΠΈΠΉ числа ΠΈΠ· ΠΏΠΎΡ‚ΠΎΠΊΠ° Π² порядкС `Big-Endian` pub type BEDeserializer<R> = de::Deserializer<BE, R>; /// ДСсСриализатор, Ρ‡ΠΈΡ‚Π°ΡŽΡ‰ΠΈΠΉ числа ΠΈΠ· ΠΏΠΎΡ‚ΠΎΠΊΠ° Π² порядкС `Little-Endian` pub type LEDeserializer<R> = de::Deserializer<LE, R>; pub use error::{Error, Result}; pub use ser::{to_vec, to_writer}; pub use de::from_bytes;
<reponame>jeremyevans6/Rocket.Chat.ReactNative import React, { forwardRef, useImperativeHandle, useState } from 'react'; import { InteractionManager, StyleSheet, Text, View } from 'react-native'; import Modal from 'react-native-modal'; import { connect } from 'react-redux'; import I18n from '../../i18n'; import Button from '../../containers/Button'; import TextInput from '../../containers/TextInput'; import RocketChat from '../../lib/rocketchat'; import sharedStyles from '../Styles'; import { themes } from '../../constants/colors'; import { IApplicationState } from '../../definitions'; const styles = StyleSheet.create({ container: { flex: 1, justifyContent: 'center', alignItems: 'center' }, content: { padding: 16, width: '100%', borderRadius: 4 }, title: { fontSize: 16, paddingBottom: 8, ...sharedStyles.textBold, ...sharedStyles.textAlignCenter }, button: { minWidth: 96, marginBottom: 0 }, buttonContainer: { flexDirection: 'row', justifyContent: 'space-between' }, tablet: { height: undefined } }); export interface IJoinCodeProps { rid: string; t: string; onJoin: Function; isMasterDetail: boolean; theme: string; } const JoinCode = React.memo( forwardRef(({ rid, t, onJoin, isMasterDetail, theme }: IJoinCodeProps, ref) => { const [visible, setVisible] = useState(false); const [error, setError] = useState(false); const [code, setCode] = useState(''); const show = () => setVisible(true); const hide = () => setVisible(false); const joinRoom = async () => { try { await RocketChat.joinRoom(rid, code, t as any); onJoin(); hide(); } catch (e) { setError(true); } }; useImperativeHandle(ref, () => ({ show })); return ( <Modal avoidKeyboard useNativeDriver isVisible={visible} hideModalContentWhileAnimating> <View style={styles.container} testID='join-code'> <View style={[ styles.content, isMasterDetail && [sharedStyles.modalFormSheet, styles.tablet], { backgroundColor: themes[theme].backgroundColor } ]}> <Text style={[styles.title, { color: themes[theme].titleText }]}>{I18n.t('Insert_Join_Code')}</Text> <TextInput value={code} theme={theme} // TODO: find a way to type this ref inputRef={(e: any) => InteractionManager.runAfterInteractions(() => e?.getNativeRef()?.focus())} returnKeyType='send' autoCapitalize='none' onChangeText={setCode} onSubmitEditing={joinRoom} placeholder={I18n.t('Join_Code')} secureTextEntry error={error ? { error: 'error-code-invalid', reason: I18n.t('Code_or_password_invalid') } : undefined} testID='join-code-input' /> <View style={styles.buttonContainer}> <Button title={I18n.t('Cancel')} type='secondary' style={styles.button} backgroundColor={themes[theme].chatComponentBackground} theme={theme} testID='join-code-cancel' onPress={hide} /> <Button title={I18n.t('Join')} type='primary' style={styles.button} theme={theme} testID='join-code-submit' onPress={joinRoom} /> </View> </View> </View> </Modal> ); }) ); const mapStateToProps = (state: IApplicationState) => ({ isMasterDetail: state.app.isMasterDetail }); export default connect(mapStateToProps, null, null, { forwardRef: true })(JoinCode);
<gh_stars>0 {-# LANGUAGE OverloadedStrings #-} module Main where import Prelude() import Relude import qualified Data.Text as T import qualified Chronos as Ch import qualified Data.CaseInsensitive as CI import qualified Network.HTTP.Types as H import qualified Network.Wai as W import qualified Options.Applicative as O import qualified Network.Wai.Handler.Warp as Warp main :: IO () main = do port <- O.execParser opts putTextLn $ "Listen on port: " <> show port Warp.run port . reqLogMiddleware $ \_req respond -> respond $ W.responseLBS H.status200 [] "" -- TODO: pretty printer / color -- query -- with co-log reqLogMiddleware :: W.Middleware reqLogMiddleware app req respond = do putTextLn $ T.replicate 60 "-" putTextLn =<< encodeTime <$> Ch.now putBSLn $ W.requestMethod req <> " " <> W.rawPathInfo req <> " " <> show (W.httpVersion req) for_ (W.requestHeaders req) $ \(name, value) -> putBSLn $ CI.original name <> ": " <> value app req respond opt :: O.Parser Int opt = O.option O.auto $ O.short 'p' <> O.long "port" <> O.value 8080 <> O.showDefault <> O.help "Port number" <> O.metavar "PORT" opts :: O.ParserInfo Int opts = O.info (opt <**> O.helper) $ O.fullDesc <> O.header "rin - HTTP server for dev/debug" -- e.g. 2019-08-17T03:33:51.99+0900 encodeTime :: Ch.Time -> Text encodeTime time = Ch.encode_YmdHMSz offsetFormat subsecondPrecision Ch.w3c offsetDatetime where jstOffset = Ch.Offset (9 * 60) offsetDatetime = Ch.timeToOffsetDatetime jstOffset time offsetFormat = Ch.OffsetFormatColonOff subsecondPrecision = Ch.SubsecondPrecisionFixed 2
With taxpayers continuing to throng the GST Network (GSTN) portal over the last few days, finance minister Arun Jaitley on Wednesday urged the assessees to avoid last-minute rush for tax filing. With taxpayers continuing to throng the GST Network (GSTN) portal over the last few days, finance minister Arun Jaitley on Wednesday urged the assessees to avoid last-minute rush for tax filing. Nearly 75% of the eligible taxpayers were still to file the summarised return (GSTR 3B) for August on the last day. Jaitley said GSTN, which operates the IT backbone for the new indirect tax regime, had no difficulty in uploading returns till Tuesday night, but if 75% of businesses rush to the portal on a single day, there would be trouble. β€œToday is the last day to pay taxes for August. Till last night, about 25% people had filed the return and paid their taxes. So, 75% waited for the last day,” he said. Jaitley added that the GST Network (GSTN) has the capacity to handle 1 lakh returns per hour, which translates to 24 lakh returns in a day. As FE had reported earlier, only about 36 lakh invoices had been uploaded a week before the deadline for filing full-fledged returns for July, the deadline for which was September 4. This was only about 12% of the expected invoice uploads, and meant that taxpayers would have to upload over 2.50 crore invoices in the last week leading up to the deadline. The heavy rush in subsequent days prevented assessees to file return, forcing the GST Council to extend the dates of filing invoice-wise detailed return for July by a month. Since the businesses have a broad idea about the taxes to be paid, they should start filing returns by 14th or 15th of the next month to have an easy entry into GSTN systems, Jaitley said. β€œTherefore, I would appeal to everybody, it is in their interest (to file returns early),” he added. Although GSTN suffers from several technical glitches causing inconvenience to taxpayers, their tendency to wait for the last few day to file return has further aggravated the situation. In its 21st meeting earlier this month, the GST Council constituted a group of ministers to monitor and resolve the glitches in the network. The GoM, headed by Bihar deputy chief minister Sushil Modi, met earlier this week, and said most problems afflicting GSTN would be resolved by the end of October. To ease compliance burden, the GST Council has allowed businesses to file their initial tax returns in form GSTR-3B in the first six months of the GST rollout till December. Accordingly, the GSTR-3B returns would have to be filed by the 20th of the next month, which means August returns need to be uploaded by September 20. Over 47 lakh GSTR-3B returns were filed for July and the tally is expected to be similar for August. Taxes to the tune of Rs 95,000 crore were collected in the first month of the rollout. Till noon on Wednesday, only 18.41 lakh businesses had filed returns for August.
<reponame>hpedrorodrigues/Researcher package com.hpedrorodrigues.imagesearch.data.event_tracker; import com.hpedrorodrigues.imagesearch.data.remote.api.Api; import javax.inject.Inject; public class EventTracker { @Inject public FabricAnswer answer; @Inject public AnalyticsTracker tracker; @Inject public EventTracker() { } public void logActivityScreen(final String screenName) { answer.logActivityScreen(screenName); tracker.trackScreen(screenName); } public void logFragmentScreen(final String screenName) { answer.logFragmentScreen(screenName); tracker.trackScreen(screenName); } public void track(final String message) { answer.log(message); tracker.track(message); } public void track(final String message, final Object value) { answer.log(message, value); tracker.track(message, value); } public void trackShare(final String message) { answer.logShare(message); tracker.trackShare(message); } public void trackSearch(final Api api, final String query, final int page) { answer.logSearch(api, query, page); tracker.trackSearch(api, query, page); } }
<gh_stars>0 import LayerStyle from '../../layers/layer-style'; import { Viewport2d } from '../../viewport'; export class Shape { public visible: boolean; public style: LayerStyle; public role: string; private _className: string; constructor(className: string) { this.visible = true; this.style = null; this.role = null; this.setClassName(className); } public accept(visitor) { return visitor(this); } public draw(viewer: Viewport2d) { return undefined; } protected drawSelf(viewer: Viewport2d) { return undefined; } public translate(dx, dy) { return undefined; } protected translateSelf(dx, dy) { return undefined; } public get className(): string { return this._className; } protected setClassName(className: string) { this._className = className; } }
<reponame>filippietruszynski/furtive-server import cors from "cors"; import express from "express"; import morgan from "morgan"; import { json, urlencoded } from "body-parser"; import { logInUser, signUpUser } from "./controllers/auth.controllers"; import { protectRoutes } from "./middleware/auth.middleware"; import { connect } from "./database/database"; import userRouter from "./routers/user.router"; import config from "./config"; export const app = express(); app.disable("x-powered-by"); app.use(cors()); app.use(json()); app.use(urlencoded({ extended: true })); app.use(morgan("dev")); app.post("/signup", signUpUser); app.post("/login", logInUser); app.use("/api", protectRoutes); app.use("/api/user", userRouter); export const start = async () => { try { await connect(); app.listen(config.app.port, () => { console.log( `Server listens on http://${config.app.host}:${config.app.port}/` ); }); } catch (error) { console.error(error); } };
x1, x2, x3, x4=input().split() x1=int(x1) x2=int(x2) x3=int(x3) x4=int(x4) s=input() n1=s.count('1') n2=s.count('2') n3=s.count('3') n4=s.count('4') print(n1*x1 + n2*x2 + n3*x3 + n4*x4)
/** * GlobalPlan for a query applied to all the pruned segments. * * */ public class GlobalPlanImplV0 extends Plan { private static final Logger LOGGER = LoggerFactory.getLogger(UResultOperator.class); private InstanceResponsePlanNode _rootNode; private DataTable _instanceResponseDataTable; public GlobalPlanImplV0(InstanceResponsePlanNode rootNode) { _rootNode = rootNode; } @Override public void print() { _rootNode.showTree(""); } @Override public PlanNode getRoot() { return _rootNode; } @Override public void execute() { long startTime = System.currentTimeMillis(); PlanNode root = getRoot(); UResultOperator operator = (UResultOperator) root.run(); long endTime1 = System.currentTimeMillis(); LOGGER.info("InstanceResponsePlanNode.run took:" + (endTime1 - startTime)); InstanceResponseBlock instanceResponseBlock = (InstanceResponseBlock) operator.nextBlock(); long endTime2 = System.currentTimeMillis(); LOGGER.info("UResultOperator took :" + (endTime2 - endTime1)); _instanceResponseDataTable = instanceResponseBlock.getInstanceResponseDataTable(); long endTime3 = System.currentTimeMillis(); LOGGER.info("Converting to InstanceResponseBlock to DataTable took :" + (endTime3 - endTime2)); long endTime = System.currentTimeMillis(); _instanceResponseDataTable.getMetadata().put("timeUsedMs", "" + (endTime - startTime)); } @Override public DataTable getInstanceResponse() { return _instanceResponseDataTable; } }
// Copyright 2015 <NAME>. All rights reserved. // Use of this source code is governed by The MIT License // found in the accompanying LICENSE file. package lg15 import ( "fmt" "strings" "gopkg.in/inconshreveable/log15.v2" ) var Log = lgr{log15.New("lib", "ora")} type lgr struct { log15.Logger } func (s lgr) Infof(format string, args ...interface{}) { s.Debug(fmt.Sprintf(format, args...)) } func (s lgr) Infoln(args ...interface{}) { s.Debug(strings.Join(asStrings(args), " ")) } func (s lgr) Errorf(format string, args ...interface{}) { s.Error(fmt.Sprintf(format, args...)) } func (s lgr) Errorln(args ...interface{}) { s.Error(strings.Join(asStrings(args), " ")) } func asStrings(args ...interface{}) []string { arr := make([]string, len(args)) for i, a := range args { if s, ok := a.(string); ok { arr[i] = s continue } if s, ok := a.(fmt.Stringer); ok { arr[i] = s.String() continue } arr[i] = fmt.Sprintf("%v", a) } return arr }
<filename>mad_gui/plot_tools/plots/video_plot.py<gh_stars>1-10 """This module keeps a class to create plots with IMU data and annotations. The annotations are kept in a separate class.""" import numpy as np import pandas as pd from mad_gui.models.local import PlotData from mad_gui.models.ui_state import MODES from mad_gui.plot_tools.labels.base_label import BaseRegionLabel from mad_gui.plot_tools.plots.base_plot import BasePlot from mad_gui.plot_tools.plots.sensor_plot import SensorPlotState from mad_gui.plot_tools.plots.sensor_plot_mode_handler import BaseModeHandler, InvestigateModeHandler from mad_gui.plot_tools.plots.video_plot_mode_handler import SyncModeHandler from mad_gui.state_keeper import StateKeeper from typing import Dict, List, Optional, Type class VideoPlot(BasePlot): """A graph of this class will be shown in the main window in order to synchronize video data with sensor data.""" MODE_HANDLER: Dict[MODES, Type[BaseModeHandler]] = { "investigate": InvestigateModeHandler, "edit": InvestigateModeHandler, "remove": InvestigateModeHandler, "add": InvestigateModeHandler, "sync": SyncModeHandler, } def __init__(self, parent=None, video_window=None): plot_data = PlotData() # following two parts are necessary when initializing a plot plot_data.data = pd.DataFrame(data=[], columns=["time"]) plot_data.annotations = {} super().__init__(plot_data=plot_data, label_classes=[BaseRegionLabel], parent=parent) StateKeeper.video_duration_available.connect(self.update_video_duration) self.state = SensorPlotState() self.mode_handler = InvestigateModeHandler(self) self.state.bind(self._change_mode, "mode", initial_set=False) self.video_window = video_window def move_video_cursor_line(self, percent_since_start: float): self.video_cursor_line.setValue(self.video_window.slider.value() / 1000) def _change_mode(self, new_mode: MODES): """Adapt tool tip text depending on mode and remove potentially plotted green line indicating a new event. Parameters ---------- new_mode One of `add`, `edit`, `remove`, or `investigate` """ # Deactivate old mode_handler: self.mode_handler.deactivate() self.mode_handler = self.MODE_HANDLER[new_mode](self) self._set_tooltip(new_mode) def distribute_video_sync(self): # Better: we should bind the sync items region to the sync of the VideoWindow self.video_window.start = self.sync_item.getRegion()[0] * 1000 # needs it in milli-seconds self.video_window.end = self.sync_item.getRegion()[1] * 1000 def _set_tooltip(self, mode: MODES): tips = { "sync": "Move the lines such that\n - the green lines indicate moments at which all data streams are at " } tooltip = tips.get(mode, None) self.setToolTip(tooltip) def update_video_duration(self, length_seconds: float, fps: float): percentage = np.asarray([float(n) for n in range(0, 101)]) x_values = percentage / 100 * length_seconds self.set_data(x=x_values, y=np.zeros(len(x_values)), fps=fps) def set_data(self, x: List, y: List, fps: Optional[float] = 1): self.plot(x=x / 1000, y=y) self.plot_data = PlotData() self.plot_data.sampling_rate_hz = fps self.plot_data.data = y ax_bottom = self.getAxis("bottom") if fps != 1: ax_bottom.setLabel(text="time [seconds]") else: ax_bottom.setLabel(text="time [samples]") def add_sync_item(self): # just make sure we have the correct sampling frequency and video duration self.update_video_duration(self.video_window.player.duration(), self.video_window.fps) super().add_sync_item()
//Findset : Return representative element //O(1) Comlplexity int findset(int ele , node **adr) { node * temp = adr[ele]; return (temp->rep)->data; }
def parseSite(self): print('Beginning MangaFox check: %s' % self.manga) url = 'http://mangafox.me/manga/%s/' % self.fixFormatting( self.manga ) if self.verbose_FLAG: print(url) source, redirectURL = getSourceCode(url, self.proxy, True) if (redirectURL != url or source is None or 'the page you have requested cannot be found' in source): url = 'http://mangafox.me/search.php?name_method=bw&name=%s&is_completed=&advopts=1' % '+'.join(self.manga.split()) if self.verbose_FLAG: print(url) try: source = getSourceCode(url, self.proxy) seriesResults = [] if source is not None: seriesResults = MangaFox.re_getSeries.findall(source) if ( 0 == len(seriesResults) ): url = 'http://mangafox.me/search.php?name_method=cw&name=%s&is_completed=&advopts=1' % '+'.join(self.manga.split()) if self.verbose_FLAG: print(url) source = getSourceCode(url, self.proxy) if source is not None: seriesResults = MangaFox.re_getSeries.findall(source) except AttributeError: raise self.MangaNotFound('It doesn\'t exist, or cannot be resolved by autocorrect.') else: keyword = self.selectFromResults(seriesResults) if self.verbose_FLAG: print ("Keyword: %s" % keyword) url = 'http://mangafox.me/manga/%s/' % keyword if self.verbose_FLAG: print ("URL: %s" % url) source = getSourceCode(url, self.proxy) if (source is None): raise self.MangaNotFound('Search Failed to find Manga.') else: keyword = self.fixFormatting( self.manga ) if self.verbose_FLAG: print ("Keyword: %s" % keyword) if('it is not available in Manga Fox.' in source): raise self.MangaNotFound('It has been removed.') isChapterOnly = False re_getChapters = re.compile('a href="http://.*?mangafox.*?/manga/%s/(v[\d|(TBD)]+)/(c[\d\.]+)/[^"]*?" title' % keyword) self.chapters = re_getChapters.findall(source) if not self.chapters: if self.verbose_FLAG: print ("Trying chapter only regex") isChapterOnly = True re_getChapters = re.compile('a href="http://.*?mangafox.*?/manga/%s/(c[\d\.]+)/[^"]*?" title' % keyword) self.chapters = re_getChapters.findall(source) self.chapters.reverse() lowerRange = 0 if isChapterOnly: for i in range(0, len(self.chapters)): if self.verbose_FLAG: print("%s" % self.chapters[i]) if (not self.auto): print('(%i) %s' % (i + 1, self.chapters[i])) else: if (self.lastDownloaded == self.chapters[i]): lowerRange = i + 1 self.chapters[i] = ('http://mangafox.me/manga/%s/%s' % (keyword, self.chapters[i]), self.chapters[i], self.chapters[i]) else: for i in range(0, len(self.chapters)): if self.verbose_FLAG: print("%s %s" % (self.chapters[i][0], self.chapters[i][1])) self.chapters[i] = ('http://mangafox.me/manga/%s/%s/%s' % (keyword, self.chapters[i][0], self.chapters[i][1]), self.chapters[i][0] + "." + self.chapters[i][1], self.chapters[i][1]) if (not self.auto): print('(%i) %s' % (i + 1, self.chapters[i][1])) else: if (self.lastDownloaded == self.chapters[i][1]): lowerRange = i + 1 upperRange = len(self.chapters) if (not self.auto): self.chapters_to_download = self.selectChapters(self.chapters) else: if ( lowerRange == upperRange): raise self.NoUpdates for i in range (lowerRange, upperRange): self.chapters_to_download.append(i) return
Mayor Gregor Robertson has announced Vancouver will move forward on taxing empty homes as the affordability crisis grows larger and rental vacancy rates shrink to unprecedented lows. "Vancouver housing is first and foremost for homes, not a commodity to make money with," he said. "We need a tax on empty homes to encourage the best use of all our housing, and help boost our rental supply at a time when there's almost no vacancy and a real crunch on affordability." Unclear how much the tax will be, but Robertson says it must be high enough to deter people sitting on empty homes. β€”@FarrahMerali A preliminary report recommends Vancouver work in concert with the B.C. government to come up with a scheme to tax empty homes, but it says "the City is prepared to take action on its own in absence of provincial response." Robertson said he wrote Premier Christy Clark last year asking for the province to support a speculation tax but hasn't heard back. The city's report states the preferred option is for the provincial government to create and administer a new class of "residential vacant" property through BC Assessment. The designation would trigger the city to charge extra taxes on empty or under-occupied investment properties. Thank you <a href="https://twitter.com/CityofVancouver">@CityofVancouver</a> for your thoughtful work on housing vacancy. We are reviewing your report and will respond quickly. β€”@christyclarkbc The second option is for the city to establish a new business tax for empty and under-occupied homes held as investment properties. Premier Clark responded on twitter saying "We are reviewing your report and will respond quickly." The current rental vacancy rate in Vancouver 0.6 per cent. A city-commissioned report in March estimated there are 10,800 homes and condos sitting empty in Vancouver, though the rate of empty homes has remained flat over the past 14 years and is in line with other Canadian cities. It's thought a high percentage of the homes that do remain empty are owned by foreign investors who do not live in or rent the units. "We'll continue to pursue all possible options at city hall to create opportunities for people struggling to find home in Vancouver." said Robertson. With files from Farrah Merali
/** * Encodes the given bytes based on this tree's structure, writing the * resulting bits to the given output stream. * * @throws IOException If there is a problem writing to stream. */ public void encode(byte[] bytes, BitWriter out) throws IOException { if (this.root == null) { return; } Map<Byte, List<Boolean>> dict = new HashMap<Byte, List<Boolean>>(); loadPaths(dict, this.root, new ArrayDeque<Boolean>()); for (byte b : bytes) { List<Boolean> path = dict.get(b); for (boolean bit : path) { out.write(bit); } } }
Structure-Based Mutagenesis of the Human Immunodeficiency Virus Type 1 DNA Attachment Site: Effects on Integration and cDNA Synthesis ABSTRACT Sequences at the ends of linear retroviral cDNA important for integration define the viral DNA attachment (att) site. Whereas determinants of human immunodeficiency virus type 1 (HIV-1) integrase important for replication in T lymphocytes have been extensively characterized, regions of the att site important for viral spread have not been thoroughly examined. Previous transposon-mediated footprinting of preintegration complexes isolated from infected cells revealed enhanced regions of bacteriophage Mu insertion near the ends of HIV-1 cDNA, in the regions of theatt sites. Here, we identified the subterminal cDNA sequences cleaved during in vitro footprinting and used this structure-based information together with results of previous work to construct and characterize 24 att site mutant viruses. We found that although subterminal cDNA sequences contributed to HIV-1 replication, the identities of these bases were not critical for integration. In contrast, the phylogenetically conserved CA dinucleotides located at the ends of HIV-1 contributed significantly to virus replication and integration. Mutants containing one intact CA end displayed delays in peak virus growth compared to the wild type. In contrast, double mutant viruses lacking both CAs were replication defective. The A of the CA appeared to be the most critical determinant of integration, because two different U5 mutant viruses containing the substitution of TG for CA partially reverted by changing the G back to A. We also identified a U5 deletion mutant in which the CA played a crucial role in reverse transcription.
<gh_stars>10-100 # Copyright (c) 2019 Ezybaas by <NAME>. # CTO @ Susthitsoft Technologies Private Limited. # All rights reserved. # Please see the LICENSE.txt included as part of this package. from . import config from . import constants from . import utils from . import baas from .models import MetaModel, App, DbSettings, Config, Table, Version from .serializers import MetaModelSerializer, AppSerializer from django.http import HttpResponse from django.shortcuts import render from django.http import Http404 from django.contrib import messages # Messages from django.contrib.auth import update_session_auth_hash from django.contrib.auth.forms import PasswordChangeForm from django.core import management # Auth from rest_framework import permissions from rest_framework.permissions import IsAuthenticated # For checking user permission from django.contrib.auth.mixins import UserPassesTestMixin from django.views.generic import (ListView, TemplateView, View) from django.contrib.auth import authenticate, login, logout from django.shortcuts import (redirect, render) from django.utils.translation import gettext as _ from django.urls import reverse # ASPER: https://stackoverflow.com/questions/12003736/django-login-required-decorator-for-a-superuser # ASPER: https://docs.djangoproject.com/en/dev/topics/auth/default/#django.contrib.auth.decorators.user_passes_test from django.contrib.auth.decorators import user_passes_test from django.contrib.auth.mixins import UserPassesTestMixin import requests from .api import * class IsSuperUserMixin(UserPassesTestMixin): def test_func(self): return self.request.user.is_superuser class AppsPageView(IsSuperUserMixin, TemplateView): template_name = 'ezybaas/apps.html' def get_context_data(self, **kwargs): context = get_dashboard_context(self.request) return context ''' def get(self, request, **kwargs): try: context = get_dashboard_context() return render(request, 'ezybaas/apps.html', context={'login_error': error}) except Exception as e: error = _("Error occurred! Cause: ") + e.__cause__ messages.add_message(request, messages.ERROR, error) return render(request, 'ezybaas/login.html', context={'login_error': error }) ''' # Create your views here. class DashboardPageView(IsSuperUserMixin, TemplateView): template_name = 'ezybaas/index.html' def get(self, request, **kwargs): try: if request.user and request.user.is_superuser: return index(request) else: return render(request, 'ezybaas/login.html',context={'login_error': None}) pass except Exception as e: error = ("Error occurred! Cause: ") + str(e.__cause__) error="error" messages.add_message(request, messages.ERROR, error) return render(request, 'ezybaas/login.html') class LoginPageView(TemplateView): template_name = 'ezybaas/login.html' def get(self, request, **kwargs): return render(request, 'ezybaas/login.html', context={'login_error': None}) def post(self, request, **kwargs): error = None username = request.POST['username'] #request.POST.get("title", "") password = request.POST['password'] next = "index" user = authenticate(request, username=username, password=password) try: if user is not None: # the password verified for the user if user.is_active: if user.is_superuser: login(request, user) if next: return redirect(next) return index(request) # return render(request, 'home.html', context={'app': AppConfig, 'error': False }) else: logout(request) error = _(constants.MSG_USER_NOT_SUPERUSER) else: logout(request) error = _(constants.MSG_USER_INACTIVE) else: error = _(constants.MSG_USER_PWD_INVALID) except Exception as e: error = _("Error occurred! Cause: ") + e.__cause__ messages.add_message(request, messages.ERROR, error) return render(request, 'ezybaas/login.html', context={'login_error': error }) class LogoutPageView(TemplateView): template_name = 'ezybaas/logout.html' def get(self, request, **kwargs): logout(request) warning = _(constants.MSG_USER_LOGOUT) messages.add_message(request, messages.INFO, warning) return render(request, 'ezybaas/login.html', context={'login_error': warning} ) # ASPER - https://stackoverflow.com/questions/1873806/how-to-allow-users-to-change-their-own-passwords-in-django class ProfilePageView(IsSuperUserMixin, TemplateView): template_name = 'ezybaas/profile.html' def get(self, request, **kwargs): context = get_database_context(request) return render(request, 'ezybaas/profile.html', context) def post(self, request, **kwargs): old_pass = request.POST.get('input-old-password','') new_pass = request.POST.get('input-new-password','') new_pass_again = request.POST.get('input-new-password-again', '') try: if not request.user.check_password(old_pass): messages.add_message(request, messages.ERROR, constants.MSG_PWD_OLD_INVALID) elif new_pass != new_pass_again: messages.add_message(request, messages.ERROR, constants.MSG_PWD_NOT_MATCHING) else: request.user.set_password(<PASSWORD>) request.user.save() update_session_auth_hash(request, request.user) messages.add_message(request, messages.INFO, constants.MSG_PWD_CHANGED) except Exception as e: print('Exception ' + str(e)) pass return render(request, 'ezybaas/profile.html') class DbSettingsPageView(IsSuperUserMixin, TemplateView): template_name = 'ezybaas/dbsettings.html' def get(self, request, **kwargs): context = get_dashboard_context(request) return render(request, 'ezybaas/dbsettings.html', context) def post(self, request, **kwargs): old_pass = request.POST.get('input-old-password','') new_pass = request.POST.get('input-new-password','') new_pass_again = request.POST.get('input-new-password-again','') try: if not request.user.check_password(old_pass): messages.add_message(request, messages.ERROR, constants.MSG_PWD_OLD_INVALID) elif new_pass != new_pass_again: messages.add_message(request, messages.ERROR, constants.MSG_PWD_NOT_MATCHING) else: request.user.set_password(<PASSWORD>) request.user.save() update_session_auth_hash(request, request.user) messages.add_message(request, messages.INFO, constants.MSG_PWD_CHANGED) except Exception as e: print('Exception ' + str(e)) pass return render(request, 'ezybaas/profile.html') @user_passes_test(lambda u: u.is_superuser) def erd(request): context = get_dashboard_context(request) return render(request, 'ezybaas/erd.html', context) @user_passes_test(lambda u: u.is_superuser) def index(request): context = get_dashboard_context(request) return render(request, 'ezybaas/index.html', context) def wizard(request): context = get_dashboard_context(request) return render(request, 'ezybaas/wizard.html', context) def edit_wizard(request): app = request.GET['app'] context = get_dashboard_context(request) return render(request, 'ezybaas/editWizard.html', context, app) def get_dashboard_context(request): apps = utils.get_apps() models = [] if request.method == 'GET' and 'app' in request.GET: models = utils.get_models(request.GET['app']) # else: # models = utils.get_models(apps[0]) tables = utils.get_tables() fields = MetaModel.objects.filter().values() context = { 'apps' : apps, 'models' : models, 'app_count' : len(apps), 'table_count' : len(tables), 'fields_count' : len(fields), 'app_data' : utils.populate_index_table(), 'mode': config.EZYBAAS_RELEASE_STANDALONE, 'version': Version(), 'ezybaas_version':config.EZYBAAS_RELEASE_VERSION, } return context def authentication(request): context = get_dashboard_context(request) return render(request, 'ezybaas/authentication.html', context) def database(request): fields = [] tables = [] if request.method == 'GET' and 'app' in request.GET: fields = utils.get_fields(request.GET['app']) tables = utils.get_tables(request.GET['app']) else: fields =[] tables = [] context = {'apps':utils.get_apps(), 'tables':tables, 'fields' : fields, 'data' : get_dashboard_context(request)} return render(request, 'ezybaas/database.html',context) def get_database_context(request): fields = [] tables = [] if request.method == 'GET' and 'app' in request.GET: fields = utils.get_fields(request.GET['app']) tables = utils.get_tables(request.GET['app']) else: fields =[] tables = [] context = {'apps':utils.get_apps(), 'tables':tables, 'fields' : fields, 'context' : get_dashboard_context(request)} return context class DatabaseView(TemplateView): template_name = 'ezybaas/database.html' def get_context_data(self, *args, **kwargs): context = get_database_context(self.request) return context def database_append(request): if request.method == 'POST': print('\n\n') print(request.POST['app']) print(request.POST['table']) print(request.POST['tblAppendGrid_rowOrder']) print('printed') my_list = request.POST['tblAppendGrid_rowOrder'].split(",") n = len(my_list) baas.append_function(request.POST['app'],request.POST['table'],request,len(my_list)) baas.create_models(request.POST['app']) return render(request, 'ezybaas/index.html',) def database_new_table(request): context = {} if request.method == 'GET' and 'app' in request.GET: app = request.GET["app"] context = { 'app' : app} print("APPPPP\n\n" + str(context)) return render(request, 'ezybaas/database_new_table.html',context) def save_to_file(request): if request.method == 'GET' and 'app' in request.GET: app = request.GET['app'] fields = utils.get_fields(app) tables = utils.get_tables(app) baas.go_live(app) else: fields =[] tables = [] context = {'apps':utils.get_apps(), 'tables':tables, 'fields' : fields} return index(context) class ImportData(TemplateView): template_name = 'ezybaas/import.html' def get_context_data(self, *args, **kwargs): context = get_dashboard_context(self.request) return context class Sysinfo(TemplateView): template_name = 'ezybaas/sysinfo.html' def get_context_data(self, *args, **kwargs): context = get_dashboard_context(self.request) return context class AboutPageView(TemplateView): template_name = 'ezybaas/about.html' def get_context_data(self, *args, **kwargs): context = get_dashboard_context(self.request) return context class LayoutPageView(TemplateView): template_name = 'ezybaas/layout.html' def get_context_data(self, *args, **kwargs): context = get_dashboard_context(self.request) return context
/* OSPPTNProbeConnect - connect socket to specific address for probes */ int OSPPTNProbeConnect( OSPTIPADDR ipAddr) { int fdSocket; int uErr = OSPC_ERR_NO_ERROR; struct timeval timeout; OSPTSSLSESSION *sslsess = OSPC_OSNULL; OSPM_UDP_SOCKET(fdSocket, uErr); timeout.tv_sec = OSPC_DEFAULT_PROBE_CONNECT / 1000; timeout.tv_usec = OSPC_DEFAULT_PROBE_CONNECT % 1000 * 1000; if ((fdSocket >= 0) && uErr == 0) { uErr = OSPPSockConnect(&fdSocket, OSPC_DEFAULT_BLOCKING_FLAG, ipAddr, htons(OSPC_TN_UDP_ECHOPORT), &timeout, &sslsess); if (uErr) { fdSocket = -1; } } return fdSocket; }
/** * Called to add all entries that use the ore_name * * @param ore_name - ore dictionary group of the item * @param weight - weight of entries * @param function - logic to use for adding the entry */ protected void addEntries(final String ore_name, final int weight, final Function<ItemStack, Boolean> function) { if (OreDictionary.doesOreNameExist(ore_name)) { for (ItemStack stack : OreDictionary.getOres(ore_name)) { if (stack != null && stack.getItem() != null && function.apply(stack)) { addEntry(ore_name, stack, weight); } } } }
/* Copyright (C) CFEngine AS This file is part of CFEngine 3 - written and maintained by CFEngine AS. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; version 3. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA To the extent this program is licensed as part of the Enterprise versions of CFEngine, the applicable Commercial Open Source License (COSL) may apply to this file if you as a licensee so wish it. See included file COSL.txt. */ #define true 1 #define false 0 #define bool int typedef struct Item_ Item; struct Item_ { char *name; char *classes; int counter; time_t time; Item *next; }; typedef enum { ITEM_MATCH_TYPE_LITERAL_START, ITEM_MATCH_TYPE_LITERAL_COMPLETE, ITEM_MATCH_TYPE_LITERAL_SOMEWHERE, ITEM_MATCH_TYPE_REGEX_COMPLETE, ITEM_MATCH_TYPE_LITERAL_START_NOT, ITEM_MATCH_TYPE_LITERAL_COMPLETE_NOT, ITEM_MATCH_TYPE_LITERAL_SOMEWHERE_NOT, ITEM_MATCH_TYPE_REGEX_COMPLETE_NOT } ItemMatchType; void PrependFullItem(Item **liststart, const char *itemstring, const char *classes, int counter, time_t t); Item *ReturnItemIn(Item *list, const char *item); Item *ReturnItemInClass(Item *list, const char *item, const char *classes); Item *ReturnItemAtIndex(Item *list, int index); Item *EndOfList(Item *start); void PrependItemList(Item **liststart, const char *itemstring); void InsertAfter(Item **filestart, Item *ptr, const char *string); Item *SplitStringAsItemList(char *string, char sep); Item *SplitString(const char *string, char sep); int ListLen(const Item *list); bool IsItemIn(const Item *list, const char *item); bool ListsCompare(const Item *list1, const Item *list2); bool ListSubsetOfList(const Item *list1, const Item *list2); Item *ConcatLists(Item *list1, Item *list2); void CopyList(Item **dest, const Item *source); void IdempItemCount(Item **liststart, const char *itemstring, const char *classes); Item *IdempPrependItem(Item **liststart, const char *itemstring, const char *classes); Item *IdempPrependItemClass(Item **liststart, const char *itemstring, const char *classes); Item *ReverseItemList(Item *list); /* Eats list, spits it out reversed. */ Item *PrependItem(Item **liststart, const char *itemstring, const char *classes); /* Warning: AppendItem()'s cost is proportional to list length; it is * usually cheaper to build a list using PrependItem, then reverse it; * building it with AppendItem() is quadratic in length. */ void AppendItem(Item **liststart, const char *itemstring, const char *classes); void DeleteItemList(Item *item); void DeleteItem(Item **liststart, Item *item); void IncrementItemListCounter(Item *ptr, const char *string); void SetItemListCounter(Item *ptr, const char *string, int value); char *ItemList2CSV(const Item *list); size_t ItemList2CSV_bound(const Item *list, char *buf, size_t buf_size, char separator); int ItemListSize(const Item *list); void DeleteItemLiteral(Item **liststart, char *name);
def attribute_dependencies(self): attributes = self.all_attributes + self.all_aggregate_attributes return {attr.ref:attr.dependencies for attr in attributes}
/** * * @author Administrator * @date 2018/11/9 */ @Service public class OrderServiceImpl implements OrderService{ @Reference(version = "1.0") UserService userService; @Override public void insertUser(UserModel model) { userService.insertUser(model); } }
// init registers constructor functions for dynamically creating elements based off the XML namespace and name func init() { gooxml.RegisterConstructor("urn:schemas-microsoft-com:office:word", "CT_Border", NewCT_Border) gooxml.RegisterConstructor("urn:schemas-microsoft-com:office:word", "CT_Wrap", NewCT_Wrap) gooxml.RegisterConstructor("urn:schemas-microsoft-com:office:word", "CT_AnchorLock", NewCT_AnchorLock) gooxml.RegisterConstructor("urn:schemas-microsoft-com:office:word", "bordertop", NewBordertop) gooxml.RegisterConstructor("urn:schemas-microsoft-com:office:word", "borderleft", NewBorderleft) gooxml.RegisterConstructor("urn:schemas-microsoft-com:office:word", "borderright", NewBorderright) gooxml.RegisterConstructor("urn:schemas-microsoft-com:office:word", "borderbottom", NewBorderbottom) gooxml.RegisterConstructor("urn:schemas-microsoft-com:office:word", "wrap", NewWrap) gooxml.RegisterConstructor("urn:schemas-microsoft-com:office:word", "anchorlock", NewAnchorlock) }
moves = [] n=int(input()) for _ in range(0, n): d = int(input()) moves.append(d) g=999999999 for i in range(0, 2**n): res=0 t = '0'*(n-len(bin(i)[2 : ])) + bin(i)[2 : ] # print(t) for j in range(0, n): if t[j] == '1': res+=moves[j] res%=360 else: res-=moves[j] res%=360 g=min(g, res) if g == 0: print("YES") else: print("NO")
/** * Creates an interval with the total extents of the two given intervals. */ INTERVAL *mergeIntervals(INTERVAL *inter1, INTERVAL *inter2) { INTERVAL *interval; POSTGIS_DEBUGF(2, "mergeIntervals called with %p, %p", inter1, inter2); interval = lwalloc(sizeof(INTERVAL)); interval->max = FP_MAX(inter1->max, inter2->max); interval->min = FP_MIN(inter1->min, inter2->min); POSTGIS_DEBUGF(3, "interval min = %8.3f, max = %8.3f", interval->min, interval->max); return interval; }
/** * Equip location determines whether the item can be equipped * and, if so, in which location it may be equipped. An * equip location of 07 indicates the item cannot be equipped. * Equip location values of 01 to 06 indicate the item * may be equipped in the relevant slot. Items * given an equip location value of 02 occupy both hand slots. */ public class EquipLocations { public static final String[] EQUIP_LOCATIONS = { "Invalid", //00 "One handed", //01 "Two handed", //02 "Body", //03 "Head", //04 "Ring", //05 "Amulet", //06 "Cannot be equipped" //07 }; }
#!/usr/bin/env python3 """ Generates test points for FFT pip dependencies: numpy==1.13.3 numfi==0.2.4 """ import cmath from cmath import e, pi from numpy import fft SHOW_DEBUG_INFO = False # prints out step-by-step generation of test points IOWidth = 16 # IOWidth from Tail.scala bp = 8 # BP from Tail.scala freq = 16 # 16 mhz freq_samp = 128 # 128 mhz points_n = range(1, 9) # e^(-2pi * f/fs * nj) points = [] for n in points_n: exponent = (-2 * pi * (freq / freq_samp) * complex(0, n) ) points.append(e ** exponent) # complex(0, n) = nj # This prints approximately what you should expect to see from the hardware FFT output new_points = [complex(int(point.real * 2**8) / 2**8, int(point.imag * 2**8) / 2**8) for point in points] print(f"FFT Expected Output:\n{fft.fft(new_points)}\nActual output may differ since 1 bit position may vary between hw and reference\n") from numfi import * points_in_bin = "" for n in points_n: point = points[n - 1] numfi_real = numfi(point.real, 1, IOWidth, bp) # arg0: input float, arg1: signed(1)/unsigned(0), arg2: total width, arg3: fracwidth numfi_imag = numfi(point.imag, 1, IOWidth, bp) real_bits = f"{numfi_real.bin}"[2:-2] imag_bits = f"{numfi_imag.bin}"[2:-2] points_in_bin += f"0b{(real_bits + imag_bits)},\n" if SHOW_DEBUG_INFO: print(f"=== POINT {n} === ") print(f"Real:\n\tFloat:{point.real}\n\tFP:{numfi_real}\n\tBIN:{numfi_real.bin}") print(f"Imag:\n\tFloat:{point.imag}\n\tFP:{numfi_imag}\n\tBIN:{numfi_imag.bin}") print(f"fft-test: \n\tbinary input: {real_bits + imag_bits}") print() print("Test points:") print(points_in_bin.strip()[:-1])
<reponame>reespozzi/fact-api package uk.gov.hmcts.dts.fact.controllers.admin; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest; import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.http.MediaType; import org.springframework.test.web.servlet.MockMvc; import uk.gov.hmcts.dts.fact.exception.NotFoundException; import uk.gov.hmcts.dts.fact.model.admin.SpoeAreaOfLaw; import uk.gov.hmcts.dts.fact.services.admin.AdminCourtSpoeAreasOfLawService; import java.util.List; import static java.util.Arrays.asList; import static org.mockito.Mockito.when; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import static uk.gov.hmcts.dts.fact.util.TestHelper.getResourceAsJson; @WebMvcTest(AdminCourtSpoeAreasOfLawController.class) @AutoConfigureMockMvc(addFilters = false) public class AdminCourtSpoeAreasOfLawControllerTest { private static final String BASE_PATH = "/admin/courts/"; private static final String CHILD_PATH = "/SpoeAreasOfLaw"; private static final String TEST_SLUG = "unknownSlug"; private static final String TEST_COURT_AREAS_OF_LAW_PATH = "court-spoe-areas-of-law.json"; private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); @Autowired private transient MockMvc mockMvc; @MockBean private AdminCourtSpoeAreasOfLawService adminService; @Test void shouldReturnAllSpoeAreasOfLaw() throws Exception { final String expectedJson = getResourceAsJson(TEST_COURT_AREAS_OF_LAW_PATH); final List<SpoeAreaOfLaw> spoeAreasOfLaw = asList(OBJECT_MAPPER.readValue(expectedJson, SpoeAreaOfLaw[].class)); when(adminService.getAllSpoeAreasOfLaw()).thenReturn(spoeAreasOfLaw); mockMvc.perform(get(BASE_PATH + CHILD_PATH)) .andExpect(status().isOk()) .andExpect(content().json(expectedJson)); } @Test void shouldReturnCourtSpoeAreasOfLaw() throws Exception { final String expectedJson = getResourceAsJson(TEST_COURT_AREAS_OF_LAW_PATH); final List<SpoeAreaOfLaw> spoeAreasOfLaw = asList(OBJECT_MAPPER.readValue(expectedJson, SpoeAreaOfLaw[].class)); when(adminService.getCourtSpoeAreasOfLawBySlug(TEST_SLUG)).thenReturn(spoeAreasOfLaw); mockMvc.perform(get(BASE_PATH + TEST_SLUG + CHILD_PATH)) .andExpect(status().isOk()) .andExpect(content().json(expectedJson)); } @Test void retrieveCourtSpoeAreasOfLawAndShouldReturnNotFoundForUnknownCourtSlug() throws Exception { when(adminService.getCourtSpoeAreasOfLawBySlug(TEST_SLUG)).thenThrow(new NotFoundException(TEST_SLUG)); mockMvc.perform(get(BASE_PATH + TEST_SLUG + CHILD_PATH)) .andExpect(status().isNotFound()) .andExpect(content().string("Not found: " + TEST_SLUG)); } @Test void updateCourtSpoeAreasOfLawShouldReturnUpdatedCourtSpoeAreasOfLaw() throws Exception { final String expectedJson = getResourceAsJson(TEST_COURT_AREAS_OF_LAW_PATH); final List<SpoeAreaOfLaw> spoeAreasOfLaw = asList(OBJECT_MAPPER.readValue(expectedJson, SpoeAreaOfLaw[].class)); when(adminService.updateSpoeAreasOfLawForCourt(TEST_SLUG, spoeAreasOfLaw)).thenReturn(spoeAreasOfLaw); mockMvc.perform(put(BASE_PATH + TEST_SLUG + CHILD_PATH) .content(expectedJson) .contentType(MediaType.APPLICATION_JSON) .accept(MediaType.APPLICATION_JSON)) .andExpect(status().isOk()) .andExpect(content().json(expectedJson)); } @Test void updateCourtSpoeAreasOfLawShouldReturnNotFoundForUnknownCourtSlug() throws Exception { final String jsonBody = getResourceAsJson(TEST_COURT_AREAS_OF_LAW_PATH); final List<SpoeAreaOfLaw> spoeAreasOfLaw = asList(OBJECT_MAPPER.readValue(jsonBody, SpoeAreaOfLaw[].class)); when(adminService.updateSpoeAreasOfLawForCourt(TEST_SLUG, spoeAreasOfLaw)) .thenThrow(new NotFoundException(TEST_SLUG)); mockMvc.perform(put(BASE_PATH + TEST_SLUG + CHILD_PATH) .content(jsonBody) .contentType(MediaType.APPLICATION_JSON) .accept(MediaType.APPLICATION_JSON)) .andExpect(status().isNotFound()) .andExpect(content().string("Not found: " + TEST_SLUG)); } }
/** * A base class for mod registries */ public abstract class CommonRegistry { /** * Registration event */ public abstract void register(); protected static ConfiguredFeature<?, ?> createConfiguredFeature(Block oreBlock, int veinSize) { return Feature.ORE.configure(new OreFeatureConfig( OreConfiguredFeatures.STONE_ORE_REPLACEABLES, oreBlock.getDefaultState(), veinSize)); } protected static PlacedFeature createPlacedFeature(ConfiguredFeature<?, ?> configuredFeature, int maxY, int repeats) { return configuredFeature.withPlacement( CountPlacementModifier.of(repeats), SquarePlacementModifier.of(), HeightRangePlacementModifier.uniform(YOffset.getBottom(), YOffset.fixed(maxY)) ); } /** * Registers an item to the game * @param name Item name (ID) * @param item Item instance */ protected void registerItem(String name, Item item) { Registry.register(Registry.ITEM, new Identifier(Info.MOD_ID, name), item); } /** * Registers a block and a block item to the game * @param name Block name (ID) * @param block Block instance * @param group Item group with the block item */ protected void registerBlock(String name, Block block, ItemGroup group) { // Registering the block Registry.register(Registry.BLOCK, new Identifier(Info.MOD_ID, name), block); // Registering the block item Registry.register(Registry.ITEM, new Identifier(Info.MOD_ID, name), new BlockItem(block, new Item.Settings().group(group))); } protected void registerOreFeature(String name, ConfiguredFeature<?, ?> configuredFeature, PlacedFeature placedFeature) { Identifier id = new Identifier(Info.MOD_ID, name); Registry.register(BuiltinRegistries.CONFIGURED_FEATURE, id, configuredFeature); Registry.register(BuiltinRegistries.PLACED_FEATURE, id, placedFeature); BiomeModifications.addFeature(BiomeSelectors.foundInOverworld(), GenerationStep.Feature.UNDERGROUND_ORES, RegistryKey.of(Registry.PLACED_FEATURE_KEY, id)); } }
/******************************************************************************* * Copyright 2016, 2017 ARM Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ #include "pal.h" #include "pal_plat_update.h" #include "pal_update.h" #include "pal_macros.h" #include <stdlib.h> #include <stdio.h> #include <inttypes.h> PAL_PRIVATE uint8_t palUpdateInitFlag = 0; #define PAL_KILOBYTE 1024 #define TRACE_GROUP "PAL" #ifndef PAL_UPDATE_IMAGE_LOCATION #error "Please definee PAL_UPDATE_IMAGE_LOCATION to UPDATE_USE_FLASH (value 1) or UPDATE_USE_FS(2)" #endif #if (PAL_UPDATE_IMAGE_LOCATION == PAL_UPDATE_USE_FS) #define SEEK_POS_INVALID 0xFFFFFFFF PAL_PRIVATE FirmwareHeader_t pal_pi_mbed_firmware_header; PAL_PRIVATE palImageSignalEvent_t g_palUpdateServiceCBfunc; PAL_PRIVATE palFileDescriptor_t image_file[IMAGE_COUNT_MAX]; PAL_PRIVATE bool last_read_nwrite[IMAGE_COUNT_MAX]; PAL_PRIVATE uint32_t last_seek_pos[IMAGE_COUNT_MAX]; PAL_PRIVATE bool valid_index(uint32_t index); PAL_PRIVATE size_t safe_read(uint32_t index, size_t offset, uint8_t *buffer, uint32_t size); PAL_PRIVATE size_t safe_write(uint32_t index, size_t offset, const uint8_t *buffer, uint32_t size); PAL_PRIVATE bool open_if_necessary(uint32_t index, bool read_nwrite); PAL_PRIVATE bool seek_if_necessary(uint32_t index, size_t offset, bool read_nwrite); PAL_PRIVATE bool close_if_necessary(uint32_t index); PAL_PRIVATE const char *image_path_alloc_from_index(uint32_t index); PAL_PRIVATE const char *header_path_alloc_from_index(uint32_t index); PAL_PRIVATE const char *path_join_and_alloc(const char * const * path_list); PAL_PRIVATE palStatus_t pal_set_fw_header(palImageId_t index, FirmwareHeader_t *headerP); PAL_PRIVATE uint32_t internal_crc32(const uint8_t* buffer, uint32_t length); char* pal_imageGetFolder(void) { return PAL_UPDATE_FIRMWARE_DIR; } palStatus_t pal_imageInitAPI(palImageSignalEvent_t CBfunction) { palStatus_t status = PAL_SUCCESS; //printf("pal_imageInitAPI\r\n"); PAL_MODULE_INIT(palUpdateInitFlag); // create absolute path. pal_fsMkDir(PAL_UPDATE_FIRMWARE_DIR); g_palUpdateServiceCBfunc = CBfunction; g_palUpdateServiceCBfunc(PAL_IMAGE_EVENT_INIT); return status; } palStatus_t pal_imageDeInit(void) { //printf("pal_plat_imageDeInit\r\n"); PAL_MODULE_DEINIT(palUpdateInitFlag); for (int i = 0; i < IMAGE_COUNT_MAX; i++) { close_if_necessary(i); } return PAL_SUCCESS; } palStatus_t pal_imagePrepare(palImageId_t imageId, palImageHeaderDeails_t *headerDetails) { //printf("pal_imagePrepare(imageId=%lu, size=%lu)\r\n", imageId, headerDetails->imageSize); PAL_MODULE_IS_INIT(palUpdateInitFlag); palStatus_t ret = PAL_ERR_INVALID_ARGUMENT; uint8_t *buffer; // write the image header to file system memset(&pal_pi_mbed_firmware_header,0,sizeof(pal_pi_mbed_firmware_header)); pal_pi_mbed_firmware_header.totalSize = headerDetails->imageSize; pal_pi_mbed_firmware_header.magic = FIRMWARE_HEADER_MAGIC; pal_pi_mbed_firmware_header.version = FIRMWARE_HEADER_VERSION; pal_pi_mbed_firmware_header.firmwareVersion = headerDetails->version; // XXX: as the code expects buffer to have a SHA256, we better at least check for it and // fail operation early if it is not. if (headerDetails->hash.bufferLength == SIZEOF_SHA256) { memcpy(pal_pi_mbed_firmware_header.firmwareSHA256,headerDetails->hash.buffer,SIZEOF_SHA256); pal_pi_mbed_firmware_header.checksum = internal_crc32((uint8_t *) &pal_pi_mbed_firmware_header, sizeof(pal_pi_mbed_firmware_header)); ret = pal_set_fw_header(imageId, &pal_pi_mbed_firmware_header); } /*Check that the size of the image is valid and reserve space for it*/ if (ret == PAL_SUCCESS) { buffer = malloc(PAL_KILOBYTE); if (NULL != buffer) { uint32_t writeCounter = 0; memset(buffer,0,PAL_KILOBYTE); while(writeCounter <= headerDetails->imageSize) { size_t written = safe_write(imageId,0,buffer,PAL_KILOBYTE); writeCounter+=PAL_KILOBYTE; if (PAL_KILOBYTE != written) { ret = PAL_ERR_UPDATE_ERROR; } } if ((PAL_SUCCESS == ret) && (writeCounter < headerDetails->imageSize)) { //writing the last bytes size_t written = safe_write(imageId,0,buffer,(headerDetails->imageSize - writeCounter)); if ((headerDetails->imageSize - writeCounter) != written) { ret = PAL_ERR_UPDATE_ERROR; } } free(buffer); if (PAL_SUCCESS == ret) { ret = pal_fsFseek(&(image_file[imageId]),0,PAL_FS_OFFSET_SEEKSET); } else { char *image_path=(char *)image_path_alloc_from_index(imageId); pal_fsUnlink(image_path); free(image_path); } } else { ret = PAL_ERR_NO_MEMORY; } } if (PAL_SUCCESS == ret) { g_palUpdateServiceCBfunc(PAL_IMAGE_EVENT_PREPARE); } else { g_palUpdateServiceCBfunc(PAL_IMAGE_EVENT_ERROR); } return ret; } palStatus_t pal_imageWrite(palImageId_t imageId, size_t offset, palConstBuffer_t *chunk) { //printf("pal_imageWrite(imageId=%lu, offset=%lu)\r\n", imageId, offset); PAL_MODULE_IS_INIT(palUpdateInitFlag); palStatus_t ret = PAL_ERR_UPDATE_ERROR; int xfer_size_or_error = safe_write(imageId, offset, chunk->buffer, chunk->bufferLength); if ((xfer_size_or_error < 0) || ((uint32_t)xfer_size_or_error != chunk->bufferLength)) { //printf("Error writing to file\r\n"); } else { ret = PAL_SUCCESS; g_palUpdateServiceCBfunc(PAL_IMAGE_EVENT_WRITE); } return ret; } palStatus_t pal_imageFinalize(palImageId_t imageId) { //printf("pal_imageFinalize(id=%i)\r\n", imageId); PAL_MODULE_IS_INIT(palUpdateInitFlag); palStatus_t ret = PAL_ERR_UPDATE_ERROR; if (close_if_necessary(imageId)) { ret = PAL_SUCCESS; g_palUpdateServiceCBfunc(PAL_IMAGE_EVENT_FINALIZE); } return ret; } palStatus_t pal_imageGetDirectMemoryAccess(palImageId_t imageId, void** imagePtr, size_t* imageSizeInBytes) { PAL_MODULE_IS_INIT(palUpdateInitFlag); palStatus_t status = PAL_SUCCESS; status = pal_plat_imageGetDirectMemAccess(imageId, imagePtr, imageSizeInBytes); return status; } palStatus_t pal_imageReadToBuffer(palImageId_t imageId, size_t offset, palBuffer_t *chunk) { //printf("pal_imageReadToBuffer(imageId=%lu, offset=%lu)\r\n", imageId, offset); PAL_MODULE_IS_INIT(palUpdateInitFlag); palStatus_t ret = PAL_ERR_UPDATE_ERROR; int xfer_size_or_error = safe_read(imageId, offset, chunk->buffer, chunk->maxBufferLength); if (xfer_size_or_error < 0) { //printf("Error reading from file\r\n"); } else { chunk->bufferLength = xfer_size_or_error; g_palUpdateServiceCBfunc(PAL_IMAGE_EVENT_READTOBUFFER); ret = PAL_SUCCESS; } return ret; } palStatus_t pal_imageActivate(palImageId_t imageId) { PAL_MODULE_IS_INIT(palUpdateInitFlag); palStatus_t status = PAL_SUCCESS; status = pal_plat_imageActivate(imageId); return status; } palStatus_t pal_imageGetFirmwareHeaderData(palImageId_t imageId, palBuffer_t *headerData) { palStatus_t ret = PAL_SUCCESS; palFileDescriptor_t file = 0; size_t xfer_size; if (NULL == headerData) { return PAL_ERR_NULL_POINTER; } if (headerData->maxBufferLength < sizeof(palFirmwareHeader_t)) { PAL_LOG_ERR("Firmware header buffer size is too small(is %" PRIu32 " needs to be at least %zu)\r\n" ,headerData->maxBufferLength, sizeof(palFirmwareHeader_t)); return PAL_ERR_INVALID_ARGUMENT; } const char *file_path = header_path_alloc_from_index(imageId); if (file_path) { ret = pal_fsFopen(file_path, PAL_FS_FLAG_READONLY, &file); if (ret == PAL_SUCCESS) { ret = pal_fsFread(&file, headerData->buffer, sizeof(palFirmwareHeader_t), &xfer_size); if (PAL_SUCCESS == ret) { headerData->bufferLength = xfer_size; } pal_fsFclose(&file); } free((void*)file_path); } else { ret = PAL_ERR_NO_MEMORY; } return ret; } palStatus_t pal_imageGetActiveHash(palBuffer_t *hash) { //printf("pal_imageGetActiveHash\r\n"); PAL_MODULE_IS_INIT(palUpdateInitFlag); palStatus_t ret; if (hash->maxBufferLength < SIZEOF_SHA256) { ret = PAL_ERR_BUFFER_TOO_SMALL; goto exit; } hash->bufferLength = 0; memset(hash->buffer, 0, hash->maxBufferLength); ret = pal_plat_imageGetActiveHash(hash); if (ret == PAL_SUCCESS) { g_palUpdateServiceCBfunc(PAL_IMAGE_EVENT_GETACTIVEHASH); } exit: return ret; } palStatus_t pal_imageGetActiveVersion(palBuffer_t *version) { PAL_MODULE_IS_INIT(palUpdateInitFlag); palStatus_t status = PAL_SUCCESS; status = pal_plat_imageGetActiveVersion(version); return status; } palStatus_t pal_imageWriteDataToMemory(palImagePlatformData_t dataId, const palConstBuffer_t * const dataBuffer) { palStatus_t status = PAL_SUCCESS; PAL_MODULE_IS_INIT(palUpdateInitFlag); // this switch is for further use when there will be more options switch(dataId) { case PAL_IMAGE_DATA_HASH: status = pal_plat_imageWriteHashToMemory(dataBuffer); break; default: { PAL_LOG_ERR("Update image write to memory error"); status = PAL_ERR_GENERIC_FAILURE; } } return status; } PAL_PRIVATE palStatus_t pal_set_fw_header(palImageId_t index, FirmwareHeader_t *headerP) { palStatus_t ret; palFileDescriptor_t file = 0; size_t xfer_size; const char *file_path = header_path_alloc_from_index(index); ret = pal_fsFopen(file_path, PAL_FS_FLAG_READWRITETRUNC, &file); if (ret != PAL_SUCCESS) { //printf("pal_fsFopen returned 0x%x\r\n", ret); goto exit; } ret = pal_fsFwrite(&file, headerP, sizeof(FirmwareHeader_t), &xfer_size); if (ret != PAL_SUCCESS) { //printf("pal_fsFread returned 0x%x\r\n", ret); goto exit; } else if (xfer_size != sizeof(FirmwareHeader_t)) { //printf("Size written %lu expected %lu\r\n", xfer_size, sizeof(FirmwareHeader_t)); goto exit; } ret = PAL_SUCCESS; exit: if (file != 0) { ret = pal_fsFclose(&file); if (ret != PAL_SUCCESS) { //printf("Error closing file %s\r\n", file_path); ret = PAL_ERR_UPDATE_ERROR; } } free((void*)file_path); return ret; } /** * @brief Bitwise CRC32 calculation * @details Modified from ARM Keil code: * http://www.keil.com/appnotes/docs/apnt_277.asp * * @param buffer Input byte array. * @param length Number of bytes in array. * * @return CRC32 */ PAL_PRIVATE uint32_t internal_crc32(const uint8_t* buffer, uint32_t length) { const uint8_t* current = buffer; uint32_t crc = 0xFFFFFFFF; while (length--) { crc ^= *current++; for (uint32_t counter = 0; counter < 8; counter++) { if (crc & 1) { crc = (crc >> 1) ^ 0xEDB88320; } else { crc = crc >> 1; } } } return (crc ^ 0xFFFFFFFF); } PAL_PRIVATE bool valid_index(uint32_t index) { return (index < IMAGE_COUNT_MAX); } PAL_PRIVATE size_t safe_read(uint32_t index, size_t offset, uint8_t *buffer, uint32_t size) { const bool read_nwrite = true; size_t xfer_size = 0; palStatus_t status; if ((!valid_index(index)) || (!open_if_necessary(index, read_nwrite)) || (!seek_if_necessary(index, offset, read_nwrite))) { return 0; } status = pal_fsFread(&(image_file[index]), buffer, size, &xfer_size); if (status == PAL_SUCCESS) { last_read_nwrite[index] = read_nwrite; last_seek_pos[index] += xfer_size; } return xfer_size; } PAL_PRIVATE size_t safe_write(uint32_t index, size_t offset, const uint8_t *buffer, uint32_t size) { const bool read_nwrite = false; size_t xfer_size = 0; palStatus_t status; if ((!valid_index(index)) || (!open_if_necessary(index, read_nwrite)) || (!seek_if_necessary(index, offset, read_nwrite))) { return 0; } status = pal_fsFseek(&(image_file[index]), offset, PAL_FS_OFFSET_SEEKSET); if (status == PAL_SUCCESS) { status = pal_fsFwrite(&(image_file[index]), buffer, size, &xfer_size); if (status == PAL_SUCCESS) { last_read_nwrite[index] = read_nwrite; last_seek_pos[index] += xfer_size; if (size != xfer_size) { //printf("WRONG SIZE expected %u got %lu\r\n", size, xfer_size); return 0; } } } return xfer_size; } PAL_PRIVATE bool open_if_necessary(uint32_t index, bool read_nwrite) { if (!valid_index(index)) { return false; } if ( (unsigned int*)image_file[index] == NULL ) { const char *file_path = image_path_alloc_from_index(index); pal_fsFileMode_t mode = read_nwrite ? PAL_FS_FLAG_READWRITE : PAL_FS_FLAG_READWRITETRUNC; palStatus_t ret = pal_fsFopen(file_path, mode, &(image_file[index])); free((void*)file_path); last_seek_pos[index] = 0; if (ret != PAL_SUCCESS) { return false; } } return true; } PAL_PRIVATE bool seek_if_necessary(uint32_t index, size_t offset, bool read_nwrite) { if (!valid_index(index)) { return false; } if ((read_nwrite != last_read_nwrite[index]) || (offset != last_seek_pos[index])) { palStatus_t ret = pal_fsFseek(&(image_file[index]), offset, PAL_FS_OFFSET_SEEKSET); if (ret != PAL_SUCCESS) { last_seek_pos[index] = SEEK_POS_INVALID; return false; } } last_read_nwrite[index] = read_nwrite; last_seek_pos[index] = offset; return true; } PAL_PRIVATE bool close_if_necessary(uint32_t index) { if (!valid_index(index)) { return false; } palFileDescriptor_t file = image_file[index]; image_file[index] = 0; last_seek_pos[index] = SEEK_POS_INVALID; if (file != 0) { palStatus_t ret = pal_fsFclose(&file); if (ret != 0) { return false; } } return true; } PAL_PRIVATE const char *image_path_alloc_from_index(uint32_t index) { char file_name[32] = {0}; snprintf(file_name, sizeof(file_name)-1, "image_%" PRIu32 ".bin", index); file_name[sizeof(file_name) - 1] = 0; const char * const path_list[] = { (char*)PAL_UPDATE_FIRMWARE_DIR, file_name, NULL }; return path_join_and_alloc(path_list); } PAL_PRIVATE const char *header_path_alloc_from_index(uint32_t index) { char file_name[32] = {0}; if (ACTIVE_IMAGE_INDEX == index) { snprintf(file_name, sizeof(file_name)-1, "header_active.bin"); } else { snprintf(file_name, sizeof(file_name)-1, "header_%" PRIu32 ".bin", index); } const char * const path_list[] = { (char*)PAL_UPDATE_FIRMWARE_DIR, file_name, NULL }; return path_join_and_alloc(path_list); } PAL_PRIVATE const char *path_join_and_alloc(const char * const * path_list) { uint32_t string_size = 1; uint32_t pos = 0; // Determine size of string to return while (path_list[pos] != NULL) { // Size of string and space for separator string_size += strlen(path_list[pos]) + 1; pos++; } // Allocate and initialize memory char *path = (char*)malloc(string_size); if (NULL != path) { memset(path, 0, string_size); // Write joined path pos = 0; while (path_list[pos] != NULL) { bool has_slash = '/' == path_list[pos][strlen(path_list[pos]) - 1]; bool is_last = NULL == path_list[pos + 1]; strncat(path, path_list[pos], string_size - strlen(path) - 1); if (!has_slash && !is_last) { strncat(path, "/", string_size - strlen(path) - 1); } pos++; } } return path; } #elif (PAL_UPDATE_IMAGE_LOCATION == PAL_UPDATE_USE_FLASH) palStatus_t pal_imageInitAPI(palImageSignalEvent_t CBfunction) { PAL_MODULE_INIT(palUpdateInitFlag); palStatus_t status = PAL_SUCCESS; status = pal_plat_imageInitAPI(CBfunction); return status; } palStatus_t pal_imageDeInit(void) { PAL_MODULE_DEINIT(palUpdateInitFlag); palStatus_t status = PAL_SUCCESS; status = pal_plat_imageDeInit(); return status; } palStatus_t pal_imagePrepare(palImageId_t imageId, palImageHeaderDeails_t *headerDetails) { PAL_MODULE_IS_INIT(palUpdateInitFlag); palStatus_t status = PAL_SUCCESS; pal_plat_imageSetHeader(imageId,headerDetails); status = pal_plat_imageReserveSpace(imageId, headerDetails->imageSize); return status; } palStatus_t pal_imageWrite (palImageId_t imageId, size_t offset, palConstBuffer_t *chunk) { PAL_MODULE_IS_INIT(palUpdateInitFlag); palStatus_t status = PAL_SUCCESS; status = pal_plat_imageWrite(imageId, offset, chunk); return status; } palStatus_t pal_imageFinalize(palImageId_t imageId) { PAL_MODULE_IS_INIT(palUpdateInitFlag); palStatus_t status = PAL_SUCCESS; status = pal_plat_imageFlush(imageId); return status; } palStatus_t pal_imageGetDirectMemoryAccess(palImageId_t imageId, void** imagePtr, size_t* imageSizeInBytes) { PAL_MODULE_IS_INIT(palUpdateInitFlag); palStatus_t status = PAL_SUCCESS; status = pal_plat_imageGetDirectMemAccess(imageId, imagePtr, imageSizeInBytes); return status; } palStatus_t pal_imageReadToBuffer(palImageId_t imageId, size_t offset, palBuffer_t *chunk) { PAL_MODULE_IS_INIT(palUpdateInitFlag); palStatus_t status = PAL_SUCCESS; status = pal_plat_imageReadToBuffer(imageId,offset,chunk); return status; } palStatus_t pal_imageActivate(palImageId_t imageId) { PAL_MODULE_IS_INIT(palUpdateInitFlag); palStatus_t status = PAL_SUCCESS; status = pal_plat_imageActivate(imageId); return status; } palStatus_t pal_imageGetActiveHash(palBuffer_t *hash) { PAL_MODULE_IS_INIT(palUpdateInitFlag); palStatus_t status = PAL_SUCCESS; status = pal_plat_imageGetActiveHash(hash); return status; } palStatus_t pal_imageGetActiveVersion(palBuffer_t *version) { PAL_MODULE_IS_INIT(palUpdateInitFlag); palStatus_t status = PAL_SUCCESS; status = pal_plat_imageGetActiveVersion(version); return status; } palStatus_t pal_imageWriteDataToMemory(palImagePlatformData_t dataId, const palConstBuffer_t * const dataBuffer) { palStatus_t status = PAL_SUCCESS; PAL_MODULE_IS_INIT(palUpdateInitFlag); // this switch is for further use when there will be more options switch(dataId) { case PAL_IMAGE_DATA_HASH: status = pal_plat_imageWriteHashToMemory(dataBuffer); break; default: { PAL_LOG_ERR("Update write data to mem status %d", (int)dataId); status = PAL_ERR_GENERIC_FAILURE; } } return status; } #endif
/** * @file client.cpp * @author <NAME> (<EMAIL>) * @brief * @version 1.0.2 * @date 2022-03-24 * * @copyright Copyright (c) 2022 * */ // Adding the libraries #include "client.h" #include "server.h" Client::Client(std::string _id, const Server& _server) : id { _id } , server { &_server } { /** * @brief Construct a new Client:: Client object * * @param _id The id of the client * @param _server The server object */ // std::cout << "An object of client class is created." << std::endl; crypto::generate_key(public_key, private_key); } // end of Client::Client std::string Client::get_id() const { /** * @brief Get the id string * * @return std::string */ return id; } // end of Client::get_id std::string Client::get_publickey() const { /** * @brief Get the publickey string * * @return std::string */ return public_key; } // end of Client::get_publickey double Client::get_wallet() const { /** * @brief Get the wallet double * * @return double */ return server->get_wallet(id); } // end of Client::get_wallet std::string Client::sign(std::string txt) const { /** * @brief Sign the string * * @param txt The string to sign * @return std::string */ return crypto::signMessage(private_key, txt); } // end of Client::sign bool Client::transfer_money(std::string receiver, double value) const { /** * @brief Transfer money to the receiver * * @param receiver The receiver id * @param value The value to transfer * @return bool */ std::string trxString { id + '-' + receiver + '-' + std::to_string(value) }; std::string signature { sign(trxString) }; return server->add_pending_trx(trxString, signature); } // end of Client::transfer_money size_t Client::generate_nonce() const { /** * @brief Generate a nonce * * @return size_t */ // True random number generator to obtain a seed (slow) std::random_device seeder; // Efficient pseudo-random generator std::default_random_engine generator { seeder() }; // std::mt19937 generator { seeder() }; // Creating a uniform distribution std::uniform_real_distribution<double> dist(0, 999999999); return static_cast<size_t>(dist(generator)); } // end of Client::generate_nonce
/// /// Coroutine awaiter which waits for a handle to be signaled. /// class handle_awaiter : public signal_observer { public: handle_awaiter(signal_context &context, HANDLE h) : handle(h), context(&context) {} bool await_suspend(std::coroutine_handle<> coro) { coroutine = std::move(coro); context->install(handle, *this); return true; } void await_resume() {} bool await_ready() const { return false; } void on_signaled() override { coroutine.resume(); } protected: HANDLE handle; signal_context *context; std::coroutine_handle<> coroutine; }
""" A command line interface to create reporec data. """ import argparse import json import os import pandas as pd import requests import yaml import reporec # Options parser = argparse.ArgumentParser(description='A CLI for the Repository Record.') parser.add_argument("config_file", help="A configuration file to use.") parser.add_argument("--dir", default="rrdata", type=str, help="Folder to add the data to.") def read_config_file(fname): """Reads a JSON or YAML file. """ if fname.endswith(".yaml") or fname.endswith(".yml"): rfunc = yaml.load elif fname.endswith(".json"): rfunc = json.load else: raise TypeError("Did not understand file type {}.".format(fname)) with open(fname, "r") as handle: ret = rfunc(handle) return ret def main(): args = vars(parser.parse_args()) # Handle paths config_path = os.path.join(os.getcwd(), args["config_file"]) config = read_config_file(config_path) directory = os.path.join(os.getcwd(), args["dir"]) if not os.path.exists(directory): os.makedirs(directory) evaluator_dict = { "conda": { "path_affix": "-conda.csv", "function": reporec.conda.build_table, "required": ["type", "username", "repository"], }, "github": { "path_affix": "-github.csv", "function": reporec.github.build_table, "required": ["type", "username", "repository"], }, "pypi": { "path_affix": "-pypi.csv", "function": reporec.pypi.build_table, "required": ["type", "repository"], }, } for proj, records in config.items(): print(f"\nStarting project {proj}") print("-"*40) write_path = os.path.join(directory, proj) # Build path and initial data blob for each entry data = {} for name, blob in evaluator_dict.items(): t = {} t["path"] = write_path + blob["path_affix"] t["data"] = None if os.path.exists(t["path"]): t["data"] = pd.read_csv(t["path"]) data[name] = t # Loop over records for num, r in enumerate(records): print(f"Recording type {r['type']}") ftype = r["type"].lower() if ftype not in evaluator_dict: raise KeyError("Did not understand type key '{}'.".format(r["type"])) fdata = evaluator_dict[ftype] r["repository"] = r.get("repository", proj) missing = set(fdata["required"]) - r.keys() if len(missing): raise KeyError("Did not find keys '{}' for record {}:{}".format(missing, proj, num)) repository = r.get("repository", proj) args = [r[key] for key in fdata["required"] if key != "type"] try: data[ftype]["data"] = fdata["function"](*args, old_data=data[ftype]["data"]) except requests.exceptions.HTTPError as exc: print(f"Could not obtain results for {proj}-{ftype}.") print(str(exc)) for k, v in data.items(): if v["data"] is None: continue v["data"].sort_values(by=["timestamp"], inplace=True) v["data"].to_csv(v["path"], index=False) print("Finished project '{}'".format(proj)) if __name__ == '__main__': main()
/** * Adds a block of perl code which scans a log file for supplied strings. * The test will pass if there is *any* number of matches found for the supplied * string. It will fail if no occurrence of the given string is found in the log. * * It does this by creating a while loop which opens a log and does a regex match on each line. * If a match is found the count variable is incremented, this method also supports multiple expected messages and * builds the if statement arguments accordingly. * * @param comment is a short explanation of what the test is doing. * @param file is the file whose output is scanned. * @param patterns contains the strings that the data file is expected to contain. * @throws StfException if anything goes wrong. */ public void doFindFileMatches(String comment, FileRef file, String... patterns) throws StfException { generator.startNewCommand(comment, "count", "Count file matches", "TargetFile:", file.getSpec(), "SearchStrings:", formatStringArray(patterns).toString()); extensionBase.outputCountFileMatches("$file_match_count", file, patterns); extensionBase.outputFailIfTrue("java", comment, "$file_match_count", "==", 0); generator.outputLine("info('Found instances of " + formatStringArray(patterns) + "');"); }
import * as React from 'react'; const TextboxAlignTop24RegularIcon = () => { return( <svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg"> <path d="M21 5.75C21 4.23122 19.7688 3 18.25 3H5.75C4.23122 3 3 4.23122 3 5.75V18.25C3 19.7688 4.23122 21 5.75 21H18.25C19.7688 21 21 19.7688 21 18.25V5.75ZM5.75 4.5H18.25C18.9404 4.5 19.5 5.05964 19.5 5.75V18.25C19.5 18.9404 18.9404 19.5 18.25 19.5H5.75C5.05964 19.5 4.5 18.9404 4.5 18.25V5.75C4.5 5.05964 5.05964 4.5 5.75 4.5ZM6.75 7.5H17.25C17.6642 7.5 18 7.83579 18 8.25C18 8.6297 17.7178 8.94349 17.3518 8.99315L17.25 9H6.75C6.33579 9 6 8.66421 6 8.25C6 7.8703 6.28215 7.55651 6.64823 7.50685L6.75 7.5ZM17.25 10.5H6.75L6.64823 10.5068C6.28215 10.5565 6 10.8703 6 11.25C6 11.6642 6.33579 12 6.75 12H17.25L17.3518 11.9932C17.7178 11.9435 18 11.6297 18 11.25C18 10.8358 17.6642 10.5 17.25 10.5Z" fill="#212121"/> </svg> )}; export default TextboxAlignTop24RegularIcon;
<filename>examples/convolutional.py from krikos.data.loader import * from krikos.nn.layer import Convolutional, Flatten, ReLU, BatchNorm2d, MaxPooling from krikos.nn.network import Sequential from krikos.nn.regularization import L2 from krikos.data.utils import * from krikos.nn.loss import SoftmaxCrossEntropy loader = CIFAR10Loader(batch_size=16) layers = [Convolutional(3, 5, 4, stride=2), ReLU(), BatchNorm2d(5), MaxPooling(2, stride=1), Convolutional(5, 7, 4, stride=2), ReLU(), BatchNorm2d(7), MaxPooling(2, stride=1), Convolutional(7, 10, 5, stride=1), Flatten()] loss = SoftmaxCrossEntropy() conv_network = Sequential(layers, loss, 1e-3, regularization=L2(0.01)) for i in range(10000): batch, labels = loader.get_batch() pred, loss = conv_network.train(batch, labels) if (i + 1) % 100 == 0: accuracy = eval_accuracy(pred, labels) print("Training Accuracy: %f" % accuracy) if (i + 1) % 500 == 0: accuracy = eval_accuracy(conv_network.eval(loader.validation_set), loader.validation_labels) print("Validation Accuracy: %f \n" % accuracy) accuracy = eval_accuracy(conv_network.eval(loader.test_set), loader.test_labels) print("Test Accuracy: %f \n" % accuracy)
Cloning of 87 kDa outer membrane protein gene of Pasteurella multocida P52. Pasteurella multocida serotype B:2 is the causative agent of haemorrhagic septicaemia, a fatal disease of cattle and buffaloes. Formalin inactivated whole cell bacterin is used to prepare vaccines in India. However, outer membrane proteins (OMPS) of P. multocida were reported to be potential immunogens. The 87-kDa OMP of P. multocida P52, serotype B:2 has been identified as one of the major antigens because this protein reacted with serum of vaccinated animals. The gene omp87, encoding an 87-kDa OMP was amplified and cloned into pBluescript SK(-) vector. This gene was found to localise in a 9.0 kb Hind III fragment of P. multocida genome.
//Se muestran todas las bases de datos public void listaDB(){ Optional<List<Document>> databases = controller.getDataBases(); System.out.println("Todos las bases de datos existentes"); databases.ifPresent(documents -> documents.forEach(db -> System.out.println(db.toJson()))); }
<reponame>bsairline/hamonize /* * AccessControlRule.cpp - implementation of the AccessControlRule class * * Copyright (c) 2016-2021 <NAME> <<EMAIL>> * * This file is part of Veyon - https://veyon.io * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public * License as published by the Free Software Foundation; either * version 2 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License for more details. * * You should have received a copy of the GNU General Public * License along with this program (see COPYING); if not, write to the * Free Software Foundation, Inc., 59 Temple Place - Suite 330, * Boston, MA 02111-1307, USA. * */ #include <QJsonArray> #include "AccessControlRule.h" AccessControlRule::AccessControlRule() : m_name(), m_description(), m_action( Action::None ), m_parameters(), m_invertConditions( false ), m_ignoreConditions( false ) { } AccessControlRule::AccessControlRule(const AccessControlRule &other) : m_name( other.name() ), m_description( other.description() ), m_action( other.action() ), m_parameters( other.parameters() ), m_invertConditions( other.areConditionsInverted() ), m_ignoreConditions( other.areConditionsIgnored() ) { } AccessControlRule::AccessControlRule(const QJsonValue &jsonValue) : m_name(), m_description(), m_action( Action::None ), m_parameters(), m_invertConditions( false ), m_ignoreConditions( false ) { if( jsonValue.isObject() ) { QJsonObject json = jsonValue.toObject(); m_name = json[QStringLiteral("Name")].toString(); m_description = json[QStringLiteral("Description")].toString(); m_action = static_cast<Action>( json[QStringLiteral("Action")].toInt() ); m_invertConditions = json[QStringLiteral("InvertConditions")].toBool(); m_ignoreConditions = json[QStringLiteral("IgnoreConditions")].toBool(); auto parameters = json[QStringLiteral("Parameters")].toArray(); for( auto parametersValue : parameters ) { QJsonObject parametersObj = parametersValue.toObject(); auto condition = static_cast<Condition>( parametersObj[QStringLiteral("Condition")].toInt() ); m_parameters[condition].enabled = parametersObj[QStringLiteral("Enabled")].toBool(); m_parameters[condition].subject = static_cast<Subject>( parametersObj[QStringLiteral("Subject")].toInt() ); m_parameters[condition].argument = parametersObj[QStringLiteral("Argument")].toString(); } } } AccessControlRule& AccessControlRule::operator=( const AccessControlRule& other ) { m_name = other.name(); m_description = other.description(); m_action = other.action(); m_parameters = other.parameters(); m_invertConditions = other.areConditionsInverted(); m_ignoreConditions = other.areConditionsIgnored(); return *this; } QJsonObject AccessControlRule::toJson() const { QJsonObject json; json[QStringLiteral("Name")] = m_name; json[QStringLiteral("Description")] = m_description; json[QStringLiteral("Action")] = static_cast<int>( m_action ); json[QStringLiteral("InvertConditions")] = m_invertConditions; json[QStringLiteral("IgnoreConditions")] = m_ignoreConditions; QJsonArray parameters; for( auto it = m_parameters.constBegin(), end = m_parameters.constEnd(); it != end; ++it ) { if( isConditionEnabled( it.key() ) ) { QJsonObject parametersObject; parametersObject[QStringLiteral("Condition")] = static_cast<int>( it.key() ); parametersObject[QStringLiteral("Enabled")] = true; parametersObject[QStringLiteral("Subject")] = static_cast<int>( subject( it.key() ) ); parametersObject[QStringLiteral("Argument")] = argument( it.key() ); parameters.append( parametersObject ); } } json[QStringLiteral("Parameters")] = parameters; return json; }
<filename>pkg/reader/frm/internal/types.go // Package internal holds the frm file struct definitions. // NOTE: all the number is stored as little-endian. package internal // FrmFileHeader defines the frm file header struct. // NOTE: the header always been 64 bytes and padding to 4096 bytes length (4KB). type FrmFileHeader struct { // MagicNumber is the fixed number defined as .frm format file. // Value: 0x01fe MagicNumber uint16 // 0x00 // FrmVer is the frm version. // RM_VER (which is in include/mysql_version.h) +3 +test(create_info->varchar) FrmVer uint8 // 0x02 // LegacyDBType is the database type (engine). LegacyDBType uint8 // 0x03 // Unknown0 defines the unknown field. // UNUSED. _ uint8 // 0x04 // Unknown1 defines the unknown field. _ uint8 // 0x05 // IOSize is the size of io, the size of file header and other section. // Value: Always 4096 IOSize uint16 // 0x06 // Unknown2 defines the unknown field. _ uint16 // 0x08 // Unknown3 defines the unknown field. _ uint32 // 0x0a // TmpKeyLength is the key length, if the value is 0xFFFF then the value is stored at 0x2f TmpKeyLength uint16 // 0x0e // RecLength is the default value length RecLength uint16 // 0x10 // MaxRows is the table MAX_ROWS option. MaxRows uint32 // 0x12 // MinRows is the table MIN_ROWS option. MinRows uint32 // 0x16 // Unknown4 defines the unknown field. // Always been 0x0200, meas "use long pack-fields" _ uint16 // 0x1a // KeyInfoLength is the keyinfo section length. KeyInfoLength uint16 // 0x1c // CreateOptions is the db_create_options. // EX: HA_LONG_BLOB_PTR CreateOptions uint16 // 0x1e // Unknown5 defines the unknown field. // UNUSED. _ uint8 // 0x20 // Version5FrmFile is the mark for 5.0 frm file. // Value: always 1 after mysql 5 Version5FrmFile uint8 // 0x21 // AvgRowLength is the table AVG_ROW_LENGTH option. AvgRowLength uint32 // 0x22 // DefaultTableCharset is the table DEFAULT CHARACTER SET option. DefaultTableCharset uint8 // 0x26 // Unknown7 defines the unknown field. _ uint8 // 0x27 // RowType is the row types. RowType uint8 // 0x28 // TableCharsetHighByte is the high byte of charset TableCharsetHighByte uint8 // 0x29 // StatsSamplePages is the pages. StatsSamplePages uint16 // 0x2a // StatsAutoRecalc is the auto recalc flag. StatsAutoRecalc uint8 // 0x2c // Unknown8 defines the unknown field. _ uint16 // 0x2d // KeyLength is the key length. KeyLength uint32 // 0x2f // MysqlVersionID is the mysql version. MysqlVersionID uint32 // 0x33 // ExtraSize of the extra info. // EX: CONNECTION=<> // ENGINE=<> // PARTITION BY clause + partitioning flags // WITH PARSER names (MySQL 5.1+) // Table COMMENT ExtraSize uint32 // 0x37 // ExtraRecBufLength is the extra rec buf length. ExtraRecBufLength uint16 // 0x3b // DefaultPartDBType is the enum legacy_db_type, if table is partitioned. DefaultPartDBType uint8 // 0x3d // KeyBlockSize is the table KEY_BLOCK_SIZE option. KeyBlockSize uint16 // 0x3e // Unknown9 defines the unknown field. // NOTE: padding // _ [4096 - 64]byte // 0x40 } // FrmKeyInfoSectionHeader defines the frm file key information section header. // NOTE: it always start at 0x1000 and padding to 6 bytes. type FrmKeyInfoSectionHeader struct { // Data is the header. // If the Data[0] == 0x80: // Then: // Key Count is Data[1] << 7 | Data[0] &0x7F // Key Parts Count is Data[2] << 8 | Data[3] // Else: // Key Count is Data[0] // Key Parts Count is Data[1] // NOTE: // Key Count defines the index count in this table (include PRIMARY KEY) // Key Parts Count defines the index column count in all index Data [4]byte // NOTE: padding _ [2]byte } // Count return the keyCount and keyPartsCount func (k *FrmKeyInfoSectionHeader) Count() (keyCount int, keyPartsCount int) { if k.Data[0] == 0x80 { return int(k.Data[1])<<7 | int(k.Data[0])&0x7F, int(k.Data[3])<<7 | int(k.Data[2]) } return int(k.Data[0]), int(k.Data[1]) } // KeyMetadata defines the metadata of index key. type KeyMetadata struct { // Flags is the key flags, such as HA_USES_COMMENT Flags uint16 // KeyLength is the length of index KeyLength uint16 // UserDefinedKeyParts is the column count cover by the index UserDefinedKeyParts uint8 // Algorithm is the index algorithm, such as HA_KEY_ALG_BTREE Algorithm uint8 // BlockSize is the block size of index, table KEY_BLOCK_SIZE option BlockSize uint16 } // KeyParts defines the struct of index key user defined key parts. type KeyParts struct { // FieldNumber is the field index of current parts. // NOTE: this field should be mark with 0x3FFF FieldNumber uint16 // Offset in the mysql internal data struct. // NOTE: internal-usage Offset uint16 // KeyPartFlag is the flag of key part. KeyPartFlag int8 // KeyType is the key_type, SEE ha_base_keytype KeyType uint16 // Length is the column index length Length uint16 } // ColumnMetadata defines the metadata of column. type ColumnMetadata struct { // MagicNumber is the fixed number defined as .frm column metadata section. // Value: 0x03 MagicNumber uint16 // 0x00 // NumberOfColumn is the count of column. NumberOfColumn uint16 // 0x02 // Pos is the length of all screen. Pos uint16 // 0x04 // BytesInColumn is the bytes in all column BytesInColumn uint16 // 0x06 _ [4]byte // 0x08 // Length is the column length. Length uint16 // 0x0c // IntervalCount is the number of different SET/ENUM column. IntervalCount uint16 // 0x0e // IntervalParts is the number of different strings in SET/ENUM column. IntervalParts uint16 // 0x10 // IntLength is the column length. IntLength uint16 // 0x12 _ [6]byte // 0x14 // NumberOfNullColumn is the number of nullable columns. NumberOfNullColumn uint16 // 0x1a // CommentLength is the comment length of all column. CommentLength uint16 // 0x1c } // ColumnField defines the column information of field. type ColumnField struct { _ [2]byte // 0x00 // Length is the column length. Length uint8 // 0x02 // BytesInColumn is the bytes length. BytesInColumn uint16 // 0x03 _ [2]byte // 0x05 // Unireg unknown. Unireg uint8 // 0x07 // Flags is the column flag, such as FIELDFLAG_MAYBE_NULL Flags uint16 // 0x08 // UniregType is the type for field, such as NEXT_NUMBER UniregType uint8 // 0x0a // CharsetLow is the charset number (<<8) CharsetLow uint8 // 0x0b // IntervalNumber is the number of field. IntervalNumber uint8 // 0x0c // DataType is the field type, see enum_field_types. DataType uint8 // 0x0d // Charset is the charset number. Charset uint8 // 0x0e // CommentLength is the comment string length of field. CommentLength uint8 // 0x0f _ [1]byte }