text
stringlengths
2
100k
meta
dict
# DO NOT EDIT. This file is generated by tools/proto_format/proto_sync.py. load("@envoy_api//bazel:api_build_system.bzl", "api_proto_package") licenses(["notice"]) # Apache 2 api_proto_package( deps = [ "//envoy/annotations:pkg", "//envoy/api/v2/core:pkg", "//envoy/config/bootstrap/v2:pkg", "//envoy/service/tap/v2alpha:pkg", "//envoy/type:pkg", "@com_github_cncf_udpa//udpa/annotations:pkg", ], )
{ "pile_set_name": "Github" }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Text.RegularExpressions; namespace TodoList { [Command( Name: "reset", ShortDescription: "Marks a todo task as incomplete.", ErrorText: "", LongHelpText: "Change the status of a todo task to incomplete. This restores an task that has been marked as abandoned or complete to its default status." )] internal class Reset : ICommand { [SwitchDocumentation("The ID of the task to unmark.")] [DefaultSwitch(0)] public UInt32 id = 0; [SwitchDocumentation("Path to task file.")] public string file = "todo.txt"; public void Invoke(Dictionary<String, Object> PipedArguments) { if (String.IsNullOrEmpty(file)) { Console.WriteLine("No file specified. How did you manage that? It defaults to todo.txt"); return; } if (id == 0) { Console.WriteLine("You need to specify the entry you're editing."); return; } var list = EntryList.LoadFile(file, true); var entry = list.Root.FindChildWithID(id); if (entry == null) { Console.WriteLine("Could not find entry with ID {0}.", id); return; } entry.Status = "-"; EntryList.SaveFile(file, list); Presentation.OutputEntry(entry, null, 0); } } }
{ "pile_set_name": "Github" }
/* * Marvell Orion SoC clocks * * Copyright (C) 2014 Thomas Petazzoni * * Thomas Petazzoni <[email protected]> * * This file is licensed under the terms of the GNU General Public * License version 2. This program is licensed "as is" without any * warranty of any kind, whether express or implied. */ #include <linux/kernel.h> #include <linux/clk-provider.h> #include <linux/io.h> #include <linux/of.h> #include "common.h" static const struct coreclk_ratio orion_coreclk_ratios[] __initconst = { { .id = 0, .name = "ddrclk", } }; /* * Orion 5182 */ #define SAR_MV88F5182_TCLK_FREQ 8 #define SAR_MV88F5182_TCLK_FREQ_MASK 0x3 static u32 __init mv88f5182_get_tclk_freq(void __iomem *sar) { u32 opt = (readl(sar) >> SAR_MV88F5182_TCLK_FREQ) & SAR_MV88F5182_TCLK_FREQ_MASK; if (opt == 1) return 150000000; else if (opt == 2) return 166666667; else return 0; } #define SAR_MV88F5182_CPU_FREQ 4 #define SAR_MV88F5182_CPU_FREQ_MASK 0xf static u32 __init mv88f5182_get_cpu_freq(void __iomem *sar) { u32 opt = (readl(sar) >> SAR_MV88F5182_CPU_FREQ) & SAR_MV88F5182_CPU_FREQ_MASK; if (opt == 0) return 333333333; else if (opt == 1 || opt == 2) return 400000000; else if (opt == 3) return 500000000; else return 0; } static void __init mv88f5182_get_clk_ratio(void __iomem *sar, int id, int *mult, int *div) { u32 opt = (readl(sar) >> SAR_MV88F5182_CPU_FREQ) & SAR_MV88F5182_CPU_FREQ_MASK; if (opt == 0 || opt == 1) { *mult = 1; *div = 2; } else if (opt == 2 || opt == 3) { *mult = 1; *div = 3; } else { *mult = 0; *div = 1; } } static const struct coreclk_soc_desc mv88f5182_coreclks = { .get_tclk_freq = mv88f5182_get_tclk_freq, .get_cpu_freq = mv88f5182_get_cpu_freq, .get_clk_ratio = mv88f5182_get_clk_ratio, .ratios = orion_coreclk_ratios, .num_ratios = ARRAY_SIZE(orion_coreclk_ratios), }; static void __init mv88f5182_clk_init(struct device_node *np) { return mvebu_coreclk_setup(np, &mv88f5182_coreclks); } CLK_OF_DECLARE(mv88f5182_clk, "marvell,mv88f5182-core-clock", mv88f5182_clk_init); /* * Orion 5281 */ static u32 __init mv88f5281_get_tclk_freq(void __iomem *sar) { /* On 5281, tclk is always 166 Mhz */ return 166666667; } #define SAR_MV88F5281_CPU_FREQ 4 #define SAR_MV88F5281_CPU_FREQ_MASK 0xf static u32 __init mv88f5281_get_cpu_freq(void __iomem *sar) { u32 opt = (readl(sar) >> SAR_MV88F5281_CPU_FREQ) & SAR_MV88F5281_CPU_FREQ_MASK; if (opt == 1 || opt == 2) return 400000000; else if (opt == 3) return 500000000; else return 0; } static void __init mv88f5281_get_clk_ratio(void __iomem *sar, int id, int *mult, int *div) { u32 opt = (readl(sar) >> SAR_MV88F5281_CPU_FREQ) & SAR_MV88F5281_CPU_FREQ_MASK; if (opt == 1) { *mult = 1; *div = 2; } else if (opt == 2 || opt == 3) { *mult = 1; *div = 3; } else { *mult = 0; *div = 1; } } static const struct coreclk_soc_desc mv88f5281_coreclks = { .get_tclk_freq = mv88f5281_get_tclk_freq, .get_cpu_freq = mv88f5281_get_cpu_freq, .get_clk_ratio = mv88f5281_get_clk_ratio, .ratios = orion_coreclk_ratios, .num_ratios = ARRAY_SIZE(orion_coreclk_ratios), }; static void __init mv88f5281_clk_init(struct device_node *np) { return mvebu_coreclk_setup(np, &mv88f5281_coreclks); } CLK_OF_DECLARE(mv88f5281_clk, "marvell,mv88f5281-core-clock", mv88f5281_clk_init); /* * Orion 6183 */ #define SAR_MV88F6183_TCLK_FREQ 9 #define SAR_MV88F6183_TCLK_FREQ_MASK 0x1 static u32 __init mv88f6183_get_tclk_freq(void __iomem *sar) { u32 opt = (readl(sar) >> SAR_MV88F6183_TCLK_FREQ) & SAR_MV88F6183_TCLK_FREQ_MASK; if (opt == 0) return 133333333; else if (opt == 1) return 166666667; else return 0; } #define SAR_MV88F6183_CPU_FREQ 1 #define SAR_MV88F6183_CPU_FREQ_MASK 0x3f static u32 __init mv88f6183_get_cpu_freq(void __iomem *sar) { u32 opt = (readl(sar) >> SAR_MV88F6183_CPU_FREQ) & SAR_MV88F6183_CPU_FREQ_MASK; if (opt == 9) return 333333333; else if (opt == 17) return 400000000; else return 0; } static void __init mv88f6183_get_clk_ratio(void __iomem *sar, int id, int *mult, int *div) { u32 opt = (readl(sar) >> SAR_MV88F6183_CPU_FREQ) & SAR_MV88F6183_CPU_FREQ_MASK; if (opt == 9 || opt == 17) { *mult = 1; *div = 2; } else { *mult = 0; *div = 1; } } static const struct coreclk_soc_desc mv88f6183_coreclks = { .get_tclk_freq = mv88f6183_get_tclk_freq, .get_cpu_freq = mv88f6183_get_cpu_freq, .get_clk_ratio = mv88f6183_get_clk_ratio, .ratios = orion_coreclk_ratios, .num_ratios = ARRAY_SIZE(orion_coreclk_ratios), }; static void __init mv88f6183_clk_init(struct device_node *np) { return mvebu_coreclk_setup(np, &mv88f6183_coreclks); } CLK_OF_DECLARE(mv88f6183_clk, "marvell,mv88f6183-core-clock", mv88f6183_clk_init);
{ "pile_set_name": "Github" }
/* * This file is part of ACADO Toolkit. * * ACADO Toolkit -- A Toolkit for Automatic Control and Dynamic Optimization. * Copyright (C) 2008-2014 by Boris Houska, Hans Joachim Ferreau, * Milan Vukov, Rien Quirynen, KU Leuven. * Developed within the Optimization in Engineering Center (OPTEC) * under supervision of Moritz Diehl. All rights reserved. * * ACADO Toolkit is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 3 of the License, or (at your option) any later version. * * ACADO Toolkit is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with ACADO Toolkit; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA * */ /** * \file include/acado/integrator/integrator_runge_kutta78.hpp * \author Boris Houska, Hans Joachim Ferreau */ #ifndef ACADO_TOOLKIT_INTEGRATOR_RUNGE_KUTTA78_HPP #define ACADO_TOOLKIT_INTEGRATOR_RUNGE_KUTTA78_HPP #include <acado/integrator/integrator_fwd.hpp> BEGIN_NAMESPACE_ACADO /** * \brief Implements the Runge-Kutta-78 scheme for integrating ODEs. * * \ingroup NumericalAlgorithms * * The class IntegratorRK78 implements the Runge-Kutta-78 scheme * for integrating ordinary differential equations (ODEs). * * \author Boris Houska, Hans Joachim Ferreau */ class IntegratorRK78 : public IntegratorRK{ // // PUBLIC MEMBER FUNCTIONS: // public: /** Default constructor. */ IntegratorRK78( ); /** Default constructor. */ IntegratorRK78( const DifferentialEquation &rhs_ ); /** Copy constructor (deep copy). */ IntegratorRK78( const IntegratorRK78& arg ); /** Destructor. */ virtual ~IntegratorRK78( ); /** Assignment operator (deep copy). */ virtual IntegratorRK78& operator=( const IntegratorRK78& arg ); /** The (virtual) copy constructor */ virtual Integrator* clone() const; protected: /** This routine initializes the coefficients of the Butcher Tableau. */ virtual void initializeButcherTableau(); }; CLOSE_NAMESPACE_ACADO #include <acado/integrator/integrator_runge_kutta78.ipp> #endif // ACADO_TOOLKIT_INTEGRATOR_RUNGE_KUTTA78_HPP // end of file.
{ "pile_set_name": "Github" }
fileFormatVersion: 2 guid: d460b468d82a4bf4693f7e3f5d22f09e timeCreated: 1505825458 licenseType: Free NativeFormatImporter: mainObjectFileID: 2100000 userData: assetBundleName: assetBundleVariant:
{ "pile_set_name": "Github" }
<!-- Any copyright is dedicated to the Public Domain. http://creativecommons.org/publicdomain/zero/1.0/ --> <!DOCTYPE html> <title>CSS Test: Test a passing non-custom property declaration in an @supports rule whose value contains a variable reference and a CDO token.</title> <link rel="author" title="Cameron McCormack" href="mailto:[email protected]"> <link rel="help" href="http://www.w3.org/TR/css-variables-1/#using-variables"> <link rel="match" href="support/color-green-ref.html"> <style> body { color: red; } @supports (color: var(--a) <!--) { p { color: green; } } </style> <p>This text must be green.</p>
{ "pile_set_name": "Github" }
// +build !go1.8 package request import "io" // NoBody is an io.ReadCloser with no bytes. Read always returns EOF // and Close always returns nil. It can be used in an outgoing client // request to explicitly signal that a request has zero bytes. // An alternative, however, is to simply set Request.Body to nil. // // Copy of Go 1.8 NoBody type from net/http/http.go type noBody struct{} func (noBody) Read([]byte) (int, error) { return 0, io.EOF } func (noBody) Close() error { return nil } func (noBody) WriteTo(io.Writer) (int64, error) { return 0, nil } // NoBody is an empty reader that will trigger the Go HTTP client to not include // and body in the HTTP request. var NoBody = noBody{} // ResetBody rewinds the request body back to its starting position, and // sets the HTTP Request body reference. When the body is read prior // to being sent in the HTTP request it will need to be rewound. // // ResetBody will automatically be called by the SDK's build handler, but if // the request is being used directly ResetBody must be called before the request // is Sent. SetStringBody, SetBufferBody, and SetReaderBody will automatically // call ResetBody. func (r *Request) ResetBody() { body, err := r.getNextRequestBody() if err != nil { r.Error = err return } r.HTTPRequest.Body = body }
{ "pile_set_name": "Github" }
{ "name": "phpunit/php-file-iterator", "description": "FilterIterator implementation that filters files based on a list of suffixes.", "type": "library", "keywords": [ "iterator", "filesystem" ], "homepage": "https://github.com/sebastianbergmann/php-file-iterator/", "license": "BSD-3-Clause", "authors": [ { "name": "Sebastian Bergmann", "email": "[email protected]", "role": "lead" } ], "support": { "issues": "https://github.com/sebastianbergmann/php-file-iterator/issues" }, "require": { "php": "^7.1" }, "require-dev": { "phpunit/phpunit": "^7.1" }, "autoload": { "classmap": [ "src/" ] }, "extra": { "branch-alias": { "dev-master": "2.0.x-dev" } } }
{ "pile_set_name": "Github" }
/** * Copyright 2015-2017 Red Hat, Inc, and individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wildfly.swarm.container.runtime.usage; import java.io.BufferedReader; import java.io.InputStream; import java.io.InputStreamReader; import java.util.stream.Collectors; import javax.enterprise.context.ApplicationScoped; import org.jboss.modules.Module; /** * Created by bob on 8/30/17. */ @ApplicationScoped public class ModuleUsageProvider implements UsageProvider { String USAGE_TXT = "usage.txt"; String META_INF_USAGE_TXT = "META-INF/" + USAGE_TXT; String WEB_INF_USAGE_TXT = "WEB-INF/" + USAGE_TXT; @Override public String getRawUsageText() throws Exception { Module module = Module.getBootModuleLoader().loadModule("thorntail.application"); ClassLoader cl = module.getClassLoader(); InputStream in = cl.getResourceAsStream(META_INF_USAGE_TXT); if (in == null) { in = cl.getResourceAsStream(WEB_INF_USAGE_TXT); } if (in == null) { in = cl.getResourceAsStream(USAGE_TXT); } if (in != null) { try (BufferedReader reader = new BufferedReader(new InputStreamReader(in))) { return reader .lines() .collect(Collectors.joining("\n")); } } return null; } }
{ "pile_set_name": "Github" }
class nyan { bool x, y; public: nyan(){}; ~nyan(){}; };
{ "pile_set_name": "Github" }
import React, { Component } from 'react'; import { DataTable } from '../../components/datatable/DataTable'; import { Column } from '../../components/column/Column'; import { CustomerService } from '../service/CustomerService'; import { TabView, TabPanel } from '../../components/tabview/TabView'; import { Button } from '../../components/button/Button'; import { LiveEditor } from '../liveeditor/LiveEditor'; import { AppInlineHeader } from '../../AppInlineHeader'; export class DataTablePaginatorDemo extends Component { constructor(props) { super(props); this.state = { customers: [] }; this.customerService = new CustomerService(); } componentDidMount() { this.customerService.getCustomersLarge().then(data => this.setState({ customers: data })); } render() { const paginatorLeft = <Button type="button" icon="pi pi-refresh" className="p-button-text" />; const paginatorRight = <Button type="button" icon="pi pi-cloud" className="p-button-text" />; return ( <div> <div className="content-section introduction"> <AppInlineHeader changelogText="dataTable"> <h1>DataTable <span>Paginator</span></h1> <p>Pagination is enabled by setting paginator property to true, rows attribute defines the number of rows per page and pageLinks specify the the number of page links to display.</p> </AppInlineHeader> </div> <div className="content-section implementation"> <div className="card"> <DataTable value={this.state.customers} paginator paginatorTemplate="CurrentPageReport FirstPageLink PrevPageLink PageLinks NextPageLink LastPageLink RowsPerPageDropdown" currentPageReportTemplate="Showing {first} to {last} of {totalRecords}" rows={10} rowsPerPageOptions={[10,20,50]} paginatorLeft={paginatorLeft} paginatorRight={paginatorRight}> <Column field="name" header="Name"></Column> <Column field="country.name" header="Country"></Column> <Column field="company" header="Company"></Column> <Column field="representative.name" header="Representative"></Column> </DataTable> </div> </div> <DataTablePaginatorDemoDoc></DataTablePaginatorDemoDoc> </div> ); } } export class DataTablePaginatorDemoDoc extends Component { constructor(props) { super(props); this.sources = { 'class': { tabName: 'Class Source', content: ` import React, { Component } from 'react'; import { DataTable } from 'primereact/datatable'; import { Column } from 'primereact/column'; import { CustomerService } from '../service/CustomerService'; import { Button } from 'primereact/button'; export class DataTablePaginatorDemo extends Component { constructor(props) { super(props); this.state = { customers: [] }; this.customerService = new CustomerService(); } componentDidMount() { this.customerService.getCustomersLarge().then(data => this.setState({ customers: data })); } render() { const paginatorLeft = <Button type="button" icon="pi pi-refresh" className="p-button-text" />; const paginatorRight = <Button type="button" icon="pi pi-cloud" className="p-button-text" />; return ( <div> <div className="card"> <DataTable value={this.state.customers} paginator paginatorTemplate="CurrentPageReport FirstPageLink PrevPageLink PageLinks NextPageLink LastPageLink RowsPerPageDropdown" currentPageReportTemplate="Showing {first} to {last} of {totalRecords}" rows={10} rowsPerPageOptions={[10,20,50]} paginatorLeft={paginatorLeft} paginatorRight={paginatorRight}> <Column field="name" header="Name"></Column> <Column field="country.name" header="Country"></Column> <Column field="company" header="Company"></Column> <Column field="representative.name" header="Representative"></Column> </DataTable> </div> </div> ); } } ` }, 'hooks': { tabName: 'Hooks Source', content: ` import React, { useState, useEffect } from 'react'; import {DataTable} from 'primereact/datatable'; import {Column} from 'primereact/column'; import {CarService} from '../service/CarService'; import {Button} from 'primereact/button'; const DataTablePaginatorDemo = () => { const [cars, setCars] = useState([]); const carservice = new CarService(); useEffect(() => { carservice.getCarsLarge().then(data => setCars(data)); }, []); // eslint-disable-line react-hooks/exhaustive-deps const paginatorLeft = <Button icon="pi pi-refresh"/>; return ( <div className="datatable-paginator-demo"> <DataTable value={cars} paginator paginatorLeft={paginatorLeft} paginatorTemplate="FirstPageLink PrevPageLink PageLinks NextPageLink LastPageLink CurrentPageReport RowsPerPageDropdown" currentPageReportTemplate="Showing {first} to {last} of {totalRecords} entries" rows={10} rowsPerPageOptions={[5,10,20]} > <Column field="vin" header="Vin" /> <Column field="year" header="Year" /> <Column field="brand" header="Brand" /> <Column field="color" header="Color" /> </DataTable> </div> ); } ` }, 'ts': { tabName: 'TS Source', content: ` import React, { useState, useEffect } from 'react'; import {DataTable} from 'primereact/datatable'; import {Column} from 'primereact/column'; import {CarService} from '../service/CarService'; import {Button} from 'primereact/button'; const DataTablePaginatorDemo = () => { const [cars, setCars] = useState([]); const carservice = new CarService(); useEffect(() => { carservice.getCarsLarge().then(data => setCars(data)); }, []); // eslint-disable-line react-hooks/exhaustive-deps const paginatorLeft = <Button icon="pi pi-refresh"/>; return ( <div className="datatable-paginator-demo"> <DataTable value={cars} paginator paginatorLeft={paginatorLeft} paginatorTemplate="FirstPageLink PrevPageLink PageLinks NextPageLink LastPageLink CurrentPageReport RowsPerPageDropdown" currentPageReportTemplate="Showing {first} to {last} of {totalRecords} entries" rows={10} rowsPerPageOptions={[5,10,20]} > <Column field="vin" header="Vin" /> <Column field="year" header="Year" /> <Column field="brand" header="Brand" /> <Column field="color" header="Color" /> </DataTable> </div> ); } ` } } this.extFiles = { 'index.css': ` .datatable-paginator-demo .p-paginator-current { float: right; } ` } } shouldComponentUpdate() { return false; } render() { return ( <div className="content-section documentation"> <TabView> <TabPanel header="Source"> <LiveEditor name="DataTablePaginatorDemo" sources={this.sources} service="CarService" data="cars-large" extFiles={this.extFiles} /> </TabPanel> </TabView> </div> ) } }
{ "pile_set_name": "Github" }
concrete SentenceGrc of Sentence = CatGrc ** open Prelude, ResGrc, (T=TenseGrc) in { flags optimize=all_subs ; lin PredVP np vp = let agr = np.a in mkClause (np.s ! Nom) agr vp ; PredSCVP sc vp = mkClause sc.s (agrP3 Sg) vp ; ImpVP vp = { -- Sketch only TODO s = \\pol,f => let imp = vp.s ! (VPImp f) ; dont = case pol of { Pos => [] ; Neg => "mh'" } ; pn = case f of {ImpF IPres SgP2 => <Sg, P2> ; ImpF IPres SgP3 => <Sg, P3> ; ImpF IPres PlP2 => <Pl, P2> ; ImpF IPres PlP3 => <Pl, P3> ; ImpF IAor SgP2 => <Sg, P2> ; ImpF IAor SgP3 => <Sg, P3> ; ImpF IAor PlP2 => <Pl, P2> ; ImpF IAor PlP3 => <Pl, P3> ; ImpF IPerf SgP2 => <Sg, P2> ; ImpF IPerf SgP3 => <Sg, P3> ; ImpF IPerf PlP2 => <Pl, P2> ; ImpF IPerf PlP3 => <Pl, P3> } ; n = pn.p1 ; p = pn.p2 in dont ++ imp ++ vp.obj ! (Ag Masc n p) ++ vp.adj ! Masc ! n; } ; SlashVP np vp = -- : NP -> VPSlash -> ClSlash ; -- (whom) he sees mkClause (np.s ! Nom) np.a vp ** {c2 = vp.c2} ; -- AdvSlash slash adv = { -- s = \\t,a,b,o => slash.s ! t ! a ! b ! o ++ adv.s ; -- c2 = slash.c2 -- } ; SlashPrep cl prep = cl ** {c2 = prep} ; SlashVS np vs slash = -- TODO: Check with Greek grammar mkClause (np.s ! Nom) np.a (insertObj (\\_ => conjThat ++ slash.s) (predV vs)) ** {c2 = slash.c2} ; EmbedS s = {s = conjThat ++ s.s} ; -- EmbedQS qs = {s = qs.s ! QIndir} ; -- EmbedVP vp = {s = infVP False vp (agrP3 Sg)} ; --- agr UseCl t p cl = let ta = antTense t.t t.a in lin S { s = t.s ++ p.s ++ cl.s ! ta ! p.p ! SVO } ; -- TODO: Order UseQCl t p cl = { s = \\q => t.s ++ p.s ++ cl.s ! (antTense t.t t.a) ! p.p ! q } ; UseRCl temp p cl = let ta = antTense temp.t temp.a in { s = \\agr => temp.s ++ p.s ++ cl.s ! ta ! p.p ! agr ; c = cl.c } ; UseSlash t p cl = let ta = antTense t.t t.a in { s = t.s ++ p.s ++ cl.s ! ta ! p.p ! OSV ; -- TODO: Order c2 = cl.c2 } ; AdvS a s = {s = a.s ++ s.s} ; -- TODO: check with Greek grammar RelS s r = {s = s.s ++ "," ++ r.s ! agrP3 Sg} ; -- TODO: check with Greek grammar }
{ "pile_set_name": "Github" }
import sys import six class Peeker(object): """Generator to enable "peeking" the next item Parameters ---------- it : iterator iterator which we one to peek in dummy : if not None, will be returned by peek if the iterator is empty Example ------- >>> a = [1, 2, 3, 4] >>> peeker = Peeker(a) >>> for i in peeker: >>> try: >>> next = peeker.peek() >>> print "Next to %d is %d" % (i, next) >>> except StopIteration: >>> print "End of stream", i """ def __init__(self, it, dummy=None): self._it = iter(it) self._cache = None if dummy is None: self.peek = self._peek_no_dummy else: self.peek = self._peek_dummy self._dummy = dummy def __next__(self): return self.next() def next(self): if self._cache: i = self._cache self._cache = None return i else: return six.advance_iterator(self._it) #self._cache = None #return i def _peek_dummy(self): if self._cache: return self._cache else: try: i = six.advance_iterator(self._it) except StopIteration: return self._dummy self._cache = i return i def _peek_no_dummy(self): if self._cache: return self._cache else: i = six.advance_iterator(self._it) self._cache = i return i def __iter__(self): return self class BackwardGenerator(object): def __init__(self, gen): self._gen = gen self._cache = [] self._previous = None def next(self): c = six.advance_iterator(self._gen) if len(self._cache) == 2: old, new = self._cache self._cache = [new] self._cache.append(c) return c def __next__(self): return self.next() def previous(self): if len(self._cache) < 2: raise ValueError() return self._cache[0] def __iter__(self): return self def print_tokens_simple(lexer): while True: tok = lexer.token() if not tok: break print(tok) def count_lines(s): return len(s.splitlines())
{ "pile_set_name": "Github" }
import React, { Component } from 'react'; import ReactDOM from 'react-dom'; import { Editor } from 'react-draft-wysiwyg'; import { convertToRaw, ContentState, EditorState } from 'draft-js'; import draftToHtml from 'draftjs-to-html'; import htmlToDraft from './library'; import '../node_modules/react-draft-wysiwyg/dist/react-draft-wysiwyg.css'; import './styles.css'; // in constructor, I use your code above, but I change outputEditorState to inputEditorState // in the first Editor, I use this.state.inputEditorState as editorState class Playground extends Component { // state = { // outputEditorState: undefined, // } constructor(props) { super(props) const html = ''; const contentBlock = htmlToDraft(html); if (contentBlock) { const contentState = ContentState.createFromBlockArray(contentBlock.contentBlocks, contentBlock.entityMap); const inputEditorState = EditorState.createWithContent(contentState); this.state = { inputEditorState, }; } } onInputEditorChange = (inputEditorState) => { console.log('into onInputEditorChange') // console.log('*****', inputEditorState.getCurrentContent()) const rawContent = convertToRaw(inputEditorState.getCurrentContent()); const html = draftToHtml(rawContent); console.log('html', html) const contentBlock = htmlToDraft(html); // console.log('1', contentBlock) // console.log('2', convertFromHTML(html) && convertFromHTML(html)) if (contentBlock) { const contentState = ContentState.createFromBlockArray(contentBlock.contentBlocks); const outputEditorState = EditorState.createWithContent(contentState); this.setState({ inputEditorState, outputEditorState, }); // console.log('1', inputEditorState.getCurrentContent().getBlocksAsArray()) // console.log('2', contentBlock.contentBlocks) } } render() { // console.log('*****', this.state.inputEditorState.getCurrentContent()) // value={this.state.inputEditorState && draftToHtml(convertToRaw(this.state.inputEditorState.getCurrentContent()))} return ( <div> <div style={{ height: 200 }}> <Editor editorState={this.state.inputEditorState} onEditorStateChange={this.onInputEditorChange} mention={{ separator: ' ', trigger: '@', suggestions: [ { text: 'A', value: 'a', url: 'href-a' }, { text: 'AB', value: 'ab', url: 'href-ab' }, { text: 'ABC', value: 'abc', url: 'href-abc' }, { text: 'ABCD', value: 'abcd', url: 'href-abcd' }, { text: 'ABCDE', value: 'abcde', url: 'href-abcde' }, { text: 'ABCDEF', value: 'abcdef', url: 'href-abcdef' }, { text: 'ABCDEFG', value: 'abcdefg', url: 'href-abcdefg' }, ], }} /> </div> <div style={{ height: 200 }}> <textarea disabled className="demo-content" value={this.state.inputEditorState && draftToHtml(convertToRaw(this.state.inputEditorState.getCurrentContent()))} /> </div> <div style={{ height: 200 }}> <Editor editorState={this.state.outputEditorState} /> </div> </div> ); } } ReactDOM.render( <Playground />, document.getElementById('root') );
{ "pile_set_name": "Github" }
// // MASUtilities.h // Masonry // // Created by Jonas Budelmann on 19/08/13. // Copyright (c) 2013 Jonas Budelmann. All rights reserved. // #import <Foundation/Foundation.h> #if TARGET_OS_IPHONE || TARGET_OS_TV #import <UIKit/UIKit.h> #define MAS_VIEW UIView #define MAS_VIEW_CONTROLLER UIViewController #define MASEdgeInsets UIEdgeInsets typedef UILayoutPriority MASLayoutPriority; static const MASLayoutPriority MASLayoutPriorityRequired = UILayoutPriorityRequired; static const MASLayoutPriority MASLayoutPriorityDefaultHigh = UILayoutPriorityDefaultHigh; static const MASLayoutPriority MASLayoutPriorityDefaultMedium = 500; static const MASLayoutPriority MASLayoutPriorityDefaultLow = UILayoutPriorityDefaultLow; static const MASLayoutPriority MASLayoutPriorityFittingSizeLevel = UILayoutPriorityFittingSizeLevel; #elif TARGET_OS_MAC #import <AppKit/AppKit.h> #define MAS_VIEW NSView #define MASEdgeInsets NSEdgeInsets typedef NSLayoutPriority MASLayoutPriority; static const MASLayoutPriority MASLayoutPriorityRequired = NSLayoutPriorityRequired; static const MASLayoutPriority MASLayoutPriorityDefaultHigh = NSLayoutPriorityDefaultHigh; static const MASLayoutPriority MASLayoutPriorityDragThatCanResizeWindow = NSLayoutPriorityDragThatCanResizeWindow; static const MASLayoutPriority MASLayoutPriorityDefaultMedium = 501; static const MASLayoutPriority MASLayoutPriorityWindowSizeStayPut = NSLayoutPriorityWindowSizeStayPut; static const MASLayoutPriority MASLayoutPriorityDragThatCannotResizeWindow = NSLayoutPriorityDragThatCannotResizeWindow; static const MASLayoutPriority MASLayoutPriorityDefaultLow = NSLayoutPriorityDefaultLow; static const MASLayoutPriority MASLayoutPriorityFittingSizeCompression = NSLayoutPriorityFittingSizeCompression; #endif /** * Allows you to attach keys to objects matching the variable names passed. * * view1.mas_key = @"view1", view2.mas_key = @"view2"; * * is equivalent to: * * MASAttachKeys(view1, view2); */ #define MASAttachKeys(...) \ { \ NSDictionary *keyPairs = NSDictionaryOfVariableBindings(__VA_ARGS__); \ for (id key in keyPairs.allKeys) { \ id obj = keyPairs[key]; \ NSAssert([obj respondsToSelector:@selector(setMas_key:)], \ @"Cannot attach mas_key to %@", obj); \ [obj setMas_key:key]; \ } \ } /** * Used to create object hashes * Based on http://www.mikeash.com/pyblog/friday-qa-2010-06-18-implementing-equality-and-hashing.html */ #define MAS_NSUINT_BIT (CHAR_BIT * sizeof(NSUInteger)) #define MAS_NSUINTROTATE(val, howmuch) ((((NSUInteger)val) << howmuch) | (((NSUInteger)val) >> (MAS_NSUINT_BIT - howmuch))) /** * Given a scalar or struct value, wraps it in NSValue * Based on EXPObjectify: https://github.com/specta/expecta */ static inline id _MASBoxValue(const char *type, ...) { va_list v; va_start(v, type); id obj = nil; if (strcmp(type, @encode(id)) == 0) { id actual = va_arg(v, id); obj = actual; } else if (strcmp(type, @encode(CGPoint)) == 0) { CGPoint actual = (CGPoint)va_arg(v, CGPoint); obj = [NSValue value:&actual withObjCType:type]; } else if (strcmp(type, @encode(CGSize)) == 0) { CGSize actual = (CGSize)va_arg(v, CGSize); obj = [NSValue value:&actual withObjCType:type]; } else if (strcmp(type, @encode(MASEdgeInsets)) == 0) { MASEdgeInsets actual = (MASEdgeInsets)va_arg(v, MASEdgeInsets); obj = [NSValue value:&actual withObjCType:type]; } else if (strcmp(type, @encode(double)) == 0) { double actual = (double)va_arg(v, double); obj = [NSNumber numberWithDouble:actual]; } else if (strcmp(type, @encode(float)) == 0) { float actual = (float)va_arg(v, double); obj = [NSNumber numberWithFloat:actual]; } else if (strcmp(type, @encode(int)) == 0) { int actual = (int)va_arg(v, int); obj = [NSNumber numberWithInt:actual]; } else if (strcmp(type, @encode(long)) == 0) { long actual = (long)va_arg(v, long); obj = [NSNumber numberWithLong:actual]; } else if (strcmp(type, @encode(long long)) == 0) { long long actual = (long long)va_arg(v, long long); obj = [NSNumber numberWithLongLong:actual]; } else if (strcmp(type, @encode(short)) == 0) { short actual = (short)va_arg(v, int); obj = [NSNumber numberWithShort:actual]; } else if (strcmp(type, @encode(char)) == 0) { char actual = (char)va_arg(v, int); obj = [NSNumber numberWithChar:actual]; } else if (strcmp(type, @encode(bool)) == 0) { bool actual = (bool)va_arg(v, int); obj = [NSNumber numberWithBool:actual]; } else if (strcmp(type, @encode(unsigned char)) == 0) { unsigned char actual = (unsigned char)va_arg(v, unsigned int); obj = [NSNumber numberWithUnsignedChar:actual]; } else if (strcmp(type, @encode(unsigned int)) == 0) { unsigned int actual = (unsigned int)va_arg(v, unsigned int); obj = [NSNumber numberWithUnsignedInt:actual]; } else if (strcmp(type, @encode(unsigned long)) == 0) { unsigned long actual = (unsigned long)va_arg(v, unsigned long); obj = [NSNumber numberWithUnsignedLong:actual]; } else if (strcmp(type, @encode(unsigned long long)) == 0) { unsigned long long actual = (unsigned long long)va_arg(v, unsigned long long); obj = [NSNumber numberWithUnsignedLongLong:actual]; } else if (strcmp(type, @encode(unsigned short)) == 0) { unsigned short actual = (unsigned short)va_arg(v, unsigned int); obj = [NSNumber numberWithUnsignedShort:actual]; } va_end(v); return obj; } #define MASBoxValue(value) _MASBoxValue(@encode(__typeof__((value))), (value))
{ "pile_set_name": "Github" }
// Copyright (c) 2001-2011 Joel de Guzman // Copyright (c) 2001-2011 Hartmut Kaiser // // Distributed under the Boost Software License, Version 1.0. (See accompanying // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) #if !defined(BOOST_SPIRIT_KARMA_GRAMMAR_MAR_05_2007_0542PM) #define BOOST_SPIRIT_KARMA_GRAMMAR_MAR_05_2007_0542PM #if defined(_MSC_VER) #pragma once #endif #include <boost/spirit/home/support/unused.hpp> #include <boost/spirit/home/support/info.hpp> #include <boost/spirit/home/support/assert_msg.hpp> #include <boost/spirit/home/karma/domain.hpp> #include <boost/spirit/home/karma/nonterminal/rule.hpp> #include <boost/spirit/home/karma/nonterminal/nonterminal_fwd.hpp> #include <boost/spirit/home/karma/reference.hpp> #include <boost/noncopyable.hpp> #include <boost/type_traits/is_same.hpp> namespace boost { namespace spirit { namespace karma { template < typename OutputIterator, typename T1, typename T2, typename T3 , typename T4> struct grammar : proto::extends< typename proto::terminal< reference<rule<OutputIterator, T1, T2, T3, T4> const> >::type , grammar<OutputIterator, T1, T2, T3, T4> > , generator<grammar<OutputIterator, T1, T2, T3, T4> > , noncopyable { typedef OutputIterator iterator_type; typedef rule<OutputIterator, T1, T2, T3, T4> start_type; typedef typename start_type::properties properties; typedef typename start_type::sig_type sig_type; typedef typename start_type::locals_type locals_type; typedef typename start_type::delimiter_type delimiter_type; typedef typename start_type::encoding_type encoding_type; typedef grammar<OutputIterator, T1, T2, T3, T4> base_type; typedef reference<start_type const> reference_; typedef typename proto::terminal<reference_>::type terminal; static size_t const params_size = start_type::params_size; template <typename Context, typename Unused> struct attribute { typedef typename start_type::attr_type type; }; // the output iterator is always wrapped by karma typedef detail::output_iterator<OutputIterator, properties> output_iterator; grammar(start_type const& start , std::string const& name_ = "unnamed-grammar") : proto::extends<terminal, base_type>(terminal::make(reference_(start))) , name_(name_) {} // This constructor is used to catch if the start rule is not // compatible with the grammar. template <typename Iterator_, typename T1_, typename T2_, typename T3_, typename T4_> grammar(rule<Iterator_, T1_, T2_, T3_, T4_> const& , std::string const& = "unnamed-grammar") { // If you see the assertion below failing then the start rule // passed to the constructor of the grammar is not compatible with // the grammar (i.e. it uses different template parameters). BOOST_SPIRIT_ASSERT_MSG( (is_same<start_type, rule<Iterator_, T1_, T2_, T3_, T4_> >::value) , incompatible_start_rule, (rule<Iterator_, T1_, T2_, T3_, T4_>)); } std::string name() const { return name_; } void name(std::string const& str) { name_ = str; } template <typename Context, typename Delimiter, typename Attribute> bool generate(output_iterator& sink, Context& context , Delimiter const& delim, Attribute const& attr) const { return this->proto_base().child0.generate( sink, context, delim, attr); } template <typename Context> info what(Context&) const { return info(name_); } // bring in the operator() overloads start_type const& get_parameterized_subject() const { return this->proto_base().child0.ref.get(); } typedef start_type parameterized_subject_type; #include <boost/spirit/home/karma/nonterminal/detail/fcall.hpp> std::string name_; }; }}} namespace boost { namespace spirit { namespace traits { /////////////////////////////////////////////////////////////////////////// template < typename IteratorA, typename IteratorB, typename Attribute , typename Context, typename T1, typename T2, typename T3, typename T4> struct handles_container< karma::grammar<IteratorA, T1, T2, T3, T4>, Attribute, Context , IteratorB> : detail::nonterminal_handles_container< typename attribute_of< karma::grammar<IteratorA, T1, T2, T3, T4> , Context, IteratorB >::type, Attribute> {}; }}} #endif
{ "pile_set_name": "Github" }
// // TestTarget.xcconfig // // These are Test target settings for the gtest framework and examples. It // is set in the "Based On:" dropdown in the "Target" info dialog. PRODUCT_NAME = $(TARGET_NAME) HEADER_SEARCH_PATHS = ../include
{ "pile_set_name": "Github" }
#ifndef IV_UTILS_H_ #define IV_UTILS_H_ #include <cstddef> #include <cstdio> #include <vector> #include <string> #include <algorithm> #include <iv/debug.h> #include <iv/arith.h> #include <iv/platform.h> #include <iv/detail/cstdint.h> #if defined(IV_ENABLE_JIT) #include <iv/third_party/mie/string.hpp> #endif namespace iv { namespace core { // A macro to disallow the copy constructor and operator= functions // This should be used in the private: declarations for a class #define IV_DISALLOW_COPY_AND_ASSIGN(TypeName) \ TypeName(const TypeName&); \ void operator=(const TypeName&) // alignment hack // make struct using char (alignment 1 byte) + T (unknown alignment) // shift 1 byte using char and get struct offset (offsetof returns T alignment) template <typename T> class AlignOfImpl { private: struct Helper { char a_; T b_; }; public: static const std::size_t value = offsetof(Helper, b_); }; #define IV_ALIGN_OF(type) ::iv::core::AlignOfImpl<type>::value #define IV_ALIGN_OFFSET(offset, alignment) \ ((size_t)((offset) + ((alignment) - 1)) & ~(size_t)((alignment) - 1)) #define IV_ALIGN_TYPE(offset, type) IV_ALIGNED_OFFSET(offset, IV_ALIGN_OF(type)) // see http://www5d.biglobe.ne.jp/~noocyte/Programming/BigAlignmentBlock.html #define IV_ALIGNED_SIZE(size, alignment) ((size) + (alignment) - 1) #define IV_ALIGNED_ADDRESS(address, alignment)\ ((address + (alignment - 1)) & ~(alignment - 1)) // only 2^n and unsigned #define IV_ROUNDUP(x, y) (((x) + (y - 1)) & ~(y - 1)) // only 2^n and unsinged #define IV_ROUNDDOWN(x, y) ((x) & (-(y))) // OFFSETOF for C++ classes (non-POD) // http://www.kijineko.co.jp/tech/cpptempls/struct/offsetof.html // This macro purges '&' operator override, // and return offset value of non-POD class. // IV_DUMMY provides dummy address, that is not used in other places. #define IV_OFFSETOF(type, member) (reinterpret_cast<ptrdiff_t>(&(reinterpret_cast<type*>(0x2000)->member)) - 0x2000) /* NOLINT */ #define IV_CAST_OFFSET(from, to)\ /* NOLINT */ (reinterpret_cast<uintptr_t>(static_cast<to>((reinterpret_cast<from>(0x2000)))) - 0x2000) #define IV_TO_STRING_IMPL(s) #s #define IV_TO_STRING(s) IV_TO_STRING_IMPL(s) template<class T> T LowestOneBit(T value) { return value & (~value + 1u); } template <typename T> std::size_t PtrAlignOf(T* value) { return static_cast<std::size_t>( LowestOneBit(reinterpret_cast<uintptr_t>(value))); } // ptr returnd by malloc guaranteed that any type can be set // so ptr (void*) is the most biggest alignment of all class Size { public: static const std::size_t KB = 1 << 10; static const std::size_t MB = KB << 10; static const std::size_t GB = MB << 10; static const int kCharSize = sizeof(char); // NOLINT static const int kShortSize = sizeof(short); // NOLINT static const int kIntSize = sizeof(int); // NOLINT static const int kDoubleSize = sizeof(double); // NOLINT static const int kPointerSize = sizeof(void*); // NOLINT static const int kIntptrSize = sizeof(intptr_t); // NOLINT static const int kCharAlign = IV_ALIGN_OF(char); // NOLINT static const int kShortAlign = IV_ALIGN_OF(short); // NOLINT static const int kIntAlign = IV_ALIGN_OF(int); // NOLINT static const int kDoubleAlign = IV_ALIGN_OF(double); // NOLINT static const int kPointerAlign = IV_ALIGN_OF(void*); // NOLINT static const int kIntptrAlign = IV_ALIGN_OF(intptr_t); // NOLINT }; #define UNREACHABLE() assert(!"UNREACHABLE") template<typename LIter, typename RIter> inline int CompareIterators(LIter lit, LIter llast, RIter rit, RIter rlast) { while (lit != llast && rit != rlast) { if (*lit != *rit) { return (*lit < *rit) ? -1 : 1; } ++lit; ++rit; } if (lit == llast) { if (rit == rlast) { return 0; } else { return -1; } } else { return 1; } } inline std::size_t NextCapacity(std::size_t capacity) { if (capacity == 0) { return 0; } if (capacity < 8) { return 8; } return math::CLP2(capacity); } template<class Vector> inline void ShrinkToFit(Vector& vec) { // NOLINT #if defined(IV_CXX11) vec.shrink_to_fit(); #else Vector(vec).swap(vec); #endif } } } // namespace iv::core #endif // IV_UTILS_H_
{ "pile_set_name": "Github" }
from .gen.custom_fields import _CustomFields class CustomFields(_CustomFields): """Custom Fields resource""" def add_enum_option(self, custom_field, params={}, **options): self.create_enum_option(custom_field, params, **options) def reorder_enum_option(self, custom_field, params={}, **options): self.insert_enum_option(custom_field, params, **options) def create(self, params={}, **options): """Creates a new custom field in a workspace. Every custom field is required to be created in a specific workspace, and this workspace cannot be changed once set. A custom field's `name` must be unique within a workspace and not conflict with names of existing task properties such as 'Due Date' or 'Assignee'. A custom field's `type` must be one of 'text', 'enum', or 'number'. Returns the full record of the newly created custom field. Parameters ---------- [data] : {Object} Data for the request - workspace : {Gid} The workspace to create a custom field in. - resource_subtype : {String} The type of the custom field. Must be one of the given values. - [type] : {String} **Deprecated: New integrations should prefer the `resource_subtype` parameter.** - name : {String} The name of the custom field. - [description] : {String} The description of the custom field. - [precision] : {Integer} The number of decimal places for the numerical values. Required if the custom field is of type 'number'. - [enum_options] : {String} The discrete values the custom field can assume. Required if the custom field is of type 'enum'. """ return self.client.post("/custom_fields", params, **options) def find_by_id(self, custom_field, params={}, **options): """Returns the complete definition of a custom field's metadata. Parameters ---------- custom_field : {Gid} Globally unique identifier for the custom field. [params] : {Object} Parameters for the request """ path = "/custom_fields/%s" % (custom_field) return self.client.get(path, params, **options) def find_by_workspace(self, workspace, params={}, **options): """Returns a list of the compact representation of all of the custom fields in a workspace. Parameters ---------- workspace : {Gid} The workspace or organization to find custom field definitions in. [params] : {Object} Parameters for the request """ path = "/workspaces/%s/custom_fields" % (workspace) return self.client.get_collection(path, params, **options) def update(self, custom_field, params={}, **options): """A specific, existing custom field can be updated by making a PUT request on the URL for that custom field. Only the fields provided in the `data` block will be updated; any unspecified fields will remain unchanged When using this method, it is best to specify only those fields you wish to change, or else you may overwrite changes made by another user since you last retrieved the custom field. An enum custom field's `enum_options` cannot be updated with this endpoint. Instead see "Work With Enum Options" for information on how to update `enum_options`. Locked custom fields can only be updated by the user who locked the field. Returns the complete updated custom field record. Parameters ---------- custom_field : {Gid} Globally unique identifier for the custom field. [data] : {Object} Data for the request """ path = "/custom_fields/%s" % (custom_field) return self.client.put(path, params, **options) def delete(self, custom_field, params={}, **options): """A specific, existing custom field can be deleted by making a DELETE request on the URL for that custom field. Locked custom fields can only be deleted by the user who locked the field. Returns an empty data record. Parameters ---------- custom_field : {Gid} Globally unique identifier for the custom field. """ path = "/custom_fields/%s" % (custom_field) return self.client.delete(path, params, **options) def create_enum_option(self, custom_field, params={}, **options): """Creates an enum option and adds it to this custom field's list of enum options. A custom field can have at most 50 enum options (including disabled options). By default new enum options are inserted at the end of a custom field's list. Locked custom fields can only have enum options added by the user who locked the field. Returns the full record of the newly created enum option. Parameters ---------- custom_field : {Gid} Globally unique identifier for the custom field. [data] : {Object} Data for the request - name : {String} The name of the enum option. - [color] : {String} The color of the enum option. Defaults to 'none'. - [insert_before] : {Gid} An existing enum option within this custom field before which the new enum option should be inserted. Cannot be provided together with after_enum_option. - [insert_after] : {Gid} An existing enum option within this custom field after which the new enum option should be inserted. Cannot be provided together with before_enum_option. """ path = "/custom_fields/%s/enum_options" % (custom_field) return self.client.post(path, params, **options) def update_enum_option(self, enum_option, params={}, **options): """Updates an existing enum option. Enum custom fields require at least one enabled enum option. Locked custom fields can only be updated by the user who locked the field. Returns the full record of the updated enum option. Parameters ---------- enum_option : {Gid} Globally unique identifier for the enum option. [data] : {Object} Data for the request - name : {String} The name of the enum option. - [color] : {String} The color of the enum option. Defaults to 'none'. - [enabled] : {Boolean} Whether or not the enum option is a selectable value for the custom field. """ path = "/enum_options/%s" % (enum_option) return self.client.put(path, params, **options) def insert_enum_option(self, custom_field, params={}, **options): """Moves a particular enum option to be either before or after another specified enum option in the custom field. Locked custom fields can only be reordered by the user who locked the field. Parameters ---------- custom_field : {Gid} Globally unique identifier for the custom field. [data] : {Object} Data for the request - enum_option : {Gid} The ID of the enum option to relocate. - name : {String} The name of the enum option. - [color] : {String} The color of the enum option. Defaults to 'none'. - [before_enum_option] : {Gid} An existing enum option within this custom field before which the new enum option should be inserted. Cannot be provided together with after_enum_option. - [after_enum_option] : {Gid} An existing enum option within this custom field after which the new enum option should be inserted. Cannot be provided together with before_enum_option. """ path = "/custom_fields/%s/enum_options/insert" % (custom_field) return self.client.post(path, params, **options)
{ "pile_set_name": "Github" }
<?php return array( 'id' => 'Ang ID', 'name' => 'Ang Pangalan ng Departamento', 'manager' => 'Ang Namamahala', 'location' => 'Ang Lokasyon', 'create' => 'Magsagawa ng Departamento', 'update' => 'I-update ang Departamento', );
{ "pile_set_name": "Github" }
// Copyright Vladimir Prus 2002-2004. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt // or copy at http://www.boost.org/LICENSE_1_0.txt) #define BOOST_PROGRAM_OPTIONS_SOURCE #include <boost/program_options/config.hpp> #include <boost/program_options/detail/config_file.hpp> #include <boost/program_options/errors.hpp> #include <boost/program_options/detail/convert.hpp> #include <boost/throw_exception.hpp> #include <iostream> #include <fstream> #include <cassert> namespace boost { namespace program_options { namespace detail { using namespace std; common_config_file_iterator::common_config_file_iterator( const std::set<std::string>& allowed_options, bool allow_unregistered) : allowed_options(allowed_options), m_allow_unregistered(allow_unregistered) { for(std::set<std::string>::const_iterator i = allowed_options.begin(); i != allowed_options.end(); ++i) { add_option(i->c_str()); } } void common_config_file_iterator::add_option(const char* name) { string s(name); assert(!s.empty()); if (*s.rbegin() == '*') { s.resize(s.size()-1); bool bad_prefixes(false); // If 's' is a prefix of one of allowed suffix, then // lower_bound will return that element. // If some element is prefix of 's', then lower_bound will // return the next element. set<string>::iterator i = allowed_prefixes.lower_bound(s); if (i != allowed_prefixes.end()) { if (i->find(s) == 0) bad_prefixes = true; } if (i != allowed_prefixes.begin()) { --i; if (s.find(*i) == 0) bad_prefixes = true; } if (bad_prefixes) boost::throw_exception(error("options '" + string(name) + "' and '" + *i + "*' will both match the same " "arguments from the configuration file")); allowed_prefixes.insert(s); } } namespace { string trim_ws(const string& s) { string::size_type n, n2; n = s.find_first_not_of(" \t\r\n"); if (n == string::npos) return string(); else { n2 = s.find_last_not_of(" \t\r\n"); return s.substr(n, n2-n+1); } } } void common_config_file_iterator::get() { string s; string::size_type n; bool found = false; while(this->getline(s)) { // strip '#' comments and whitespace if ((n = s.find('#')) != string::npos) s = s.substr(0, n); s = trim_ws(s); if (!s.empty()) { // Handle section name if (*s.begin() == '[' && *s.rbegin() == ']') { m_prefix = s.substr(1, s.size()-2); if (*m_prefix.rbegin() != '.') m_prefix += '.'; } else if ((n = s.find('=')) != string::npos) { string name = m_prefix + trim_ws(s.substr(0, n)); string value = trim_ws(s.substr(n+1)); bool registered = allowed_option(name); if (!registered && !m_allow_unregistered) boost::throw_exception(unknown_option(name)); found = true; this->value().string_key = name; this->value().value.clear(); this->value().value.push_back(value); this->value().unregistered = !registered; this->value().original_tokens.clear(); this->value().original_tokens.push_back(name); this->value().original_tokens.push_back(value); break; } else { boost::throw_exception(invalid_config_file_syntax(s, invalid_syntax::unrecognized_line)); } } } if (!found) found_eof(); } bool common_config_file_iterator::allowed_option(const std::string& s) const { set<string>::const_iterator i = allowed_options.find(s); if (i != allowed_options.end()) return true; // If s is "pa" where "p" is allowed prefix then // lower_bound should find the element after "p". // This depends on 'allowed_prefixes' invariant. i = allowed_prefixes.lower_bound(s); if (i != allowed_prefixes.begin() && s.find(*--i) == 0) return true; return false; } #if BOOST_WORKAROUND(__COMO_VERSION__, BOOST_TESTED_AT(4303)) || \ (defined(__sgi) && BOOST_WORKAROUND(_COMPILER_VERSION, BOOST_TESTED_AT(741))) template<> bool basic_config_file_iterator<wchar_t>::getline(std::string& s) { std::wstring ws; // On Comeau, using two-argument version causes // call to some internal function with std::wstring, and '\n' // (not L'\n') and compile can't resolve that call. if (std::getline(*is, ws, L'\n')) { s = to_utf8(ws); return true; } else { return false; } } #endif }}} #if 0 using boost::program_options::config_file; #include <sstream> #include <cassert> int main() { try { stringstream s( "a = 1\n" "b = 2\n"); config_file cf(s); cf.add_option("a"); cf.add_option("b"); assert(++cf); assert(cf.name() == "a"); assert(cf.value() == "1"); assert(++cf); assert(cf.name() == "b"); assert(cf.value() == "2"); assert(!++cf); } catch(exception& e) { cout << e.what() << "\n"; } } #endif
{ "pile_set_name": "Github" }
foo.bar baz { a: b; }
{ "pile_set_name": "Github" }
CREATE TABLE eventlog( user TEXT, realm TEXT, sessionid TEXT COLLATE NOCASE, timestamp TEXT, notes TEXT, dump TEXT, addr TEXT ); CREATE TABLE sessions( timestamp TEXT, id TEXT COLLATE NOCASE, user TEXT, realm TEXT, password TEXT, machine_managed BOOLEAN, operation INTEGER, type TEXT, pps TEXT, redirect_uri TEXT, devinfo TEXT, devdetail TEXT, cert TEXT, cert_pem TEXT, mac_addr TEXT, osu_user TEXT, osu_password TEXT, eap_method TEXT, mobile_identifier_hash TEXT, test TEXT ); CREATE index sessions_id_index ON sessions(id); CREATE TABLE osu_config( realm TEXT, field TEXT, value TEXT ); CREATE TABLE users( identity TEXT PRIMARY KEY, methods TEXT, password TEXT, machine_managed BOOLEAN, remediation TEXT, phase2 INTEGER, realm TEXT, policy TEXT, devinfo TEXT, devdetail TEXT, pps TEXT, fetch_pps INTEGER, osu_user TEXT, osu_password TEXT, shared INTEGER, cert TEXT, cert_pem TEXT, t_c_timestamp INTEGER, mac_addr TEXT, last_msk TEXT, polupd_done TEXT, subrem TEXT ); CREATE TABLE wildcards( identity TEXT PRIMARY KEY, methods TEXT ); CREATE TABLE authlog( timestamp TEXT, session TEXT, nas_ip TEXT, username TEXT, note TEXT ); CREATE TABLE pending_tc( mac_addr TEXT PRIMARY KEY, identity TEXT ); CREATE TABLE current_sessions( mac_addr TEXT PRIMARY KEY, identity TEXT, start_time TEXT, nas TEXT, hs20_t_c_filtering BOOLEAN, waiting_coa_ack BOOLEAN, coa_ack_received BOOLEAN ); CREATE TABLE cert_enroll( mac_addr TEXT PRIMARY KEY, user TEXT, realm TEXT, serialnum TEXT ); CREATE TABLE sim_provisioning( mobile_identifier_hash TEXT PRIMARY KEY, imsi TEXT, mac_addr TEXT, eap_method TEXT, timestamp TEXT );
{ "pile_set_name": "Github" }
# -*- coding: utf-8; mode: tcl; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:fenc=utf-8:ft=tcl:et:sw=4:ts=4:sts=4 PortSystem 1.0 PortGroup haskell 1.0 haskell.setup data-default-instances-old-locale 0.0.1 revision 3 checksums rmd160 38030222adb35f1d1e47ed8d4e2a907a2a878933 \ sha256 60d3b02922958c4908d7bf2b24ddf61511665745f784227d206745784b0c0802 maintainers nomaintainer platforms darwin license BSD description Default instances for types in old-locale long_description {*}${description} depends_lib-append port:hs-data-default-class
{ "pile_set_name": "Github" }
// // TwitterLoadListsAction.h // HelTweetica // // Created by Lucius Kwok on 5/1/10. /* Copyright (c) 2010, Felt Tip Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder(s) nor the names of any contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #import "TwitterAction.h" #import "TwitterList.h" #import "LKJSONParser.h" @interface TwitterLoadListsAction : TwitterAction <LKJSONParserDelegate> { NSMutableArray *lists; TwitterList *currentList; } @property (nonatomic, retain) NSMutableArray *lists; @property (nonatomic, retain) TwitterList *currentList; - (id)initWithUser:(NSString*)userOrNil subscriptions:(BOOL)subscriptions; @end
{ "pile_set_name": "Github" }
/* packet-fc-sp.c * Routines for Fibre Channel Security Protocol (FC-SP) * This decoder is for FC-SP version 1.1 * Copyright 2003, Dinesh G Dutt <[email protected]> * * Wireshark - Network traffic analyzer * By Gerald Combs <[email protected]> * Copyright 1998 Gerald Combs * * SPDX-License-Identifier: GPL-2.0-or-later */ #include "config.h" #include <epan/packet.h> #include <epan/to_str.h> #include <epan/expert.h> void proto_register_fcsp(void); /* Message Codes */ #define FC_AUTH_MSG_AUTH_REJECT 0x0A #define FC_AUTH_MSG_AUTH_NEGOTIATE 0x0B #define FC_AUTH_MSG_AUTH_DONE 0x0C #define FC_AUTH_DHCHAP_CHALLENGE 0x10 #define FC_AUTH_DHCHAP_REPLY 0x11 #define FC_AUTH_DHCHAP_SUCCESS 0x12 #define FC_AUTH_FCAP_REQUEST 0x13 #define FC_AUTH_FCAP_ACKNOWLEDGE 0x14 #define FC_AUTH_FCAP_CONFIRM 0x15 #define FC_AUTH_FCPAP_INIT 0x16 #define FC_AUTH_FCPAP_ACCEPT 0x17 #define FC_AUTH_FCPAP_COMPLETE 0x18 #define FC_AUTH_NAME_TYPE_WWN 0x1 #define FC_AUTH_PROTO_TYPE_DHCHAP 0x1 #define FC_AUTH_PROTO_TYPE_FCAP 0x2 #define FC_AUTH_DHCHAP_HASH_MD5 0x5 #define FC_AUTH_DHCHAP_HASH_SHA1 0x6 #define FC_AUTH_DHCHAP_PARAM_HASHLIST 0x1 #define FC_AUTH_DHCHAP_PARAM_DHgIDLIST 0x2 /* Initialize the protocol and registered fields */ static int proto_fcsp = -1; static int hf_auth_proto_ver = -1; static int hf_auth_msg_code = -1; static int hf_auth_flags = -1; static int hf_auth_len = -1; static int hf_auth_tid = -1; static int hf_auth_initiator_wwn = -1; static int hf_auth_initiator_name = -1; static int hf_auth_usable_proto = -1; static int hf_auth_rjt_code = -1; static int hf_auth_rjt_codedet = -1; static int hf_auth_responder_wwn = -1; static int hf_auth_responder_name = -1; /* static int hf_auth_dhchap_groupid = -1; */ /* static int hf_auth_dhchap_hashid = -1; */ static int hf_auth_dhchap_chal_len = -1; static int hf_auth_dhchap_val_len = -1; static int hf_auth_dhchap_rsp_len = -1; static int hf_auth_initiator_name_type = -1; static int hf_auth_initiator_name_len = -1; static int hf_auth_responder_name_len = -1; static int hf_auth_responder_name_type = -1; static int hf_auth_proto_type = -1; static int hf_auth_proto_param_len = -1; static int hf_auth_dhchap_param_tag = -1; static int hf_auth_dhchap_param_len = -1; static int hf_auth_dhchap_hash_type = -1; static int hf_auth_dhchap_group_type = -1; static int hf_auth_dhchap_dhvalue = -1; static int hf_auth_dhchap_chal_value = -1; static int hf_auth_dhchap_rsp_value = -1; /* Initialize the subtree pointers */ static gint ett_fcsp = -1; static expert_field ei_auth_fcap_undecoded = EI_INIT; static const value_string fcauth_msgcode_vals[] = { {FC_AUTH_MSG_AUTH_REJECT, "AUTH_Reject"}, {FC_AUTH_MSG_AUTH_NEGOTIATE, "AUTH_Negotiate"}, {FC_AUTH_MSG_AUTH_DONE, "AUTH_Done"}, {FC_AUTH_DHCHAP_CHALLENGE, "DHCHAP_Challenge"}, {FC_AUTH_DHCHAP_REPLY, "DHCHAP_Reply"}, {FC_AUTH_DHCHAP_SUCCESS, "DHCHAP_Success"}, {FC_AUTH_FCAP_REQUEST, "FCAP_Request"}, {FC_AUTH_FCAP_ACKNOWLEDGE, "FCAP_Acknowledge"}, {FC_AUTH_FCAP_CONFIRM, "FCAP_Confirm"}, {FC_AUTH_FCPAP_INIT, "FCPAP_Init"}, {FC_AUTH_FCPAP_ACCEPT, "FCPAP_Accept"}, {FC_AUTH_FCPAP_COMPLETE, "FCPAP_Complete"}, {0, NULL}, }; static const value_string fcauth_rjtcode_vals[] = { {0x01, "Authentication Failure"}, {0x02, "Logical Error"}, {0, NULL}, }; static const value_string fcauth_rjtcode_detail_vals[] = { {0x01, "Authentication Mechanism Not Usable"}, {0x02, "DH Group Not Usable"}, {0x03, "Hash Algorithm Not Usable"}, {0x04, "Authentication Protocol Instance Already Started"}, {0x05, "Authentication Failed "}, {0x06, "Incorrect Payload "}, {0x07, "Incorrect Authentication Protocol Message"}, {0x08, "Protocol Reset"}, {0, NULL}, }; static const value_string fcauth_dhchap_param_vals[] = { {FC_AUTH_DHCHAP_PARAM_HASHLIST, "HashList"}, {FC_AUTH_DHCHAP_PARAM_DHgIDLIST, "DHgIDList"}, {0, NULL}, }; static const value_string fcauth_dhchap_hash_algo_vals[] = { {FC_AUTH_DHCHAP_HASH_MD5, "MD5"}, {FC_AUTH_DHCHAP_HASH_SHA1, "SHA-1"}, {0, NULL}, }; static const value_string fcauth_name_type_vals[] = { {FC_AUTH_NAME_TYPE_WWN, "WWN"}, {0, NULL}, }; static const value_string fcauth_proto_type_vals[] = { {FC_AUTH_PROTO_TYPE_DHCHAP, "DHCHAP"}, {FC_AUTH_PROTO_TYPE_FCAP, "FCAP"}, {0, NULL}, }; static const value_string fcauth_dhchap_dhgid_vals[] = { {0, "DH NULL"}, {1, "DH Group 1024"}, {2, "DH Group 1280"}, {3, "DH Group 1536"}, {4, "DH Group 2048"}, {0, NULL}, }; /* this format is required because a script is used to build the C function that calls all the protocol registration. */ static void dissect_fcsp_dhchap_auth_param(tvbuff_t *tvb, proto_tree *tree, int offset, gint32 total_len) { guint16 auth_param_tag; guint16 param_len, i; if (tree) { total_len -= 4; while (total_len > 0) { proto_tree_add_item(tree, hf_auth_dhchap_param_tag, tvb, offset, 2, ENC_BIG_ENDIAN); proto_tree_add_item(tree, hf_auth_dhchap_param_len, tvb, offset+2, 2, ENC_BIG_ENDIAN); auth_param_tag = tvb_get_ntohs(tvb, offset); param_len = tvb_get_ntohs(tvb, offset+2)*4; switch (auth_param_tag) { case FC_AUTH_DHCHAP_PARAM_HASHLIST: offset += 4; total_len -= 4; for (i = 0; i < param_len; i += 4) { proto_tree_add_item(tree, hf_auth_dhchap_hash_type, tvb, offset, 4, ENC_BIG_ENDIAN); offset += 4; } break; case FC_AUTH_DHCHAP_PARAM_DHgIDLIST: offset += 4; total_len -= 4; for (i = 0; i < param_len; i += 4) { proto_tree_add_item(tree, hf_auth_dhchap_group_type, tvb, offset, 4, ENC_BIG_ENDIAN); offset += 4; } break; default: /* If we don't recognize the auth_param_tag and the param_len * is 0 then just return to prevent an infinite loop. See * https://bugs.wireshark.org/bugzilla/show_bug.cgi?id=8359 */ if (param_len == 0) { return; } break; } total_len -= param_len; } } } static void dissect_fcsp_dhchap_challenge(tvbuff_t *tvb, proto_tree *tree) { int offset = 12; guint16 name_type; guint16 param_len, name_len; if (tree) { proto_tree_add_item(tree, hf_auth_responder_name_type, tvb, offset, 2, ENC_BIG_ENDIAN); name_type = tvb_get_ntohs(tvb, offset); proto_tree_add_item(tree, hf_auth_responder_name_len, tvb, offset+2, 2, ENC_BIG_ENDIAN); name_len = tvb_get_ntohs(tvb, offset+2); if (name_type == FC_AUTH_NAME_TYPE_WWN) { proto_tree_add_item(tree, hf_auth_responder_wwn, tvb, offset+4, 8, ENC_NA); } else { proto_tree_add_item(tree, hf_auth_responder_name, tvb, offset+4, name_len, ENC_NA); } offset += (4+name_len); proto_tree_add_item(tree, hf_auth_dhchap_hash_type, tvb, offset, 4, ENC_BIG_ENDIAN); proto_tree_add_item(tree, hf_auth_dhchap_group_type, tvb, offset+4, 4, ENC_BIG_ENDIAN); proto_tree_add_item(tree, hf_auth_dhchap_chal_len, tvb, offset+8, 4, ENC_BIG_ENDIAN); param_len = tvb_get_ntohl(tvb, offset+8); proto_tree_add_item(tree, hf_auth_dhchap_chal_value, tvb, offset+12, param_len, ENC_NA); offset += (param_len + 12); proto_tree_add_item(tree, hf_auth_dhchap_val_len, tvb, offset, 4, ENC_BIG_ENDIAN); param_len = tvb_get_ntohl(tvb, offset); proto_tree_add_item(tree, hf_auth_dhchap_dhvalue, tvb, offset+4, param_len, ENC_NA); } } static void dissect_fcsp_dhchap_reply(tvbuff_t *tvb, proto_tree *tree) { int offset = 12; guint32 param_len; if (tree) { proto_tree_add_item(tree, hf_auth_dhchap_rsp_len, tvb, offset, 4, ENC_BIG_ENDIAN); param_len = tvb_get_ntohl(tvb, offset); proto_tree_add_item(tree, hf_auth_dhchap_rsp_value, tvb, offset+4, param_len, ENC_NA); offset += (param_len + 4); proto_tree_add_item(tree, hf_auth_dhchap_val_len, tvb, offset, 4, ENC_BIG_ENDIAN); param_len = tvb_get_ntohl(tvb, offset); proto_tree_add_item(tree, hf_auth_dhchap_dhvalue, tvb, offset+4, param_len, ENC_NA); offset += (param_len + 4); proto_tree_add_item(tree, hf_auth_dhchap_chal_len, tvb, offset, 4, ENC_BIG_ENDIAN); param_len = tvb_get_ntohl(tvb, offset); proto_tree_add_item(tree, hf_auth_dhchap_chal_value, tvb, offset+4, param_len, ENC_NA); } } static void dissect_fcsp_dhchap_success(tvbuff_t *tvb, proto_tree *tree) { int offset = 12; guint32 param_len; if (tree) { proto_tree_add_item(tree, hf_auth_dhchap_rsp_len, tvb, offset, 4, ENC_BIG_ENDIAN); param_len = tvb_get_ntohl(tvb, offset); proto_tree_add_item(tree, hf_auth_dhchap_rsp_value, tvb, offset+4, param_len, ENC_NA); } } static void dissect_fcsp_auth_negotiate(tvbuff_t *tvb, proto_tree *tree) { int offset = 12; guint16 name_type, name_len, proto_type, param_len; guint32 num_protos, i; if (tree) { proto_tree_add_item(tree, hf_auth_initiator_name_type, tvb, offset, 2, ENC_BIG_ENDIAN); name_type = tvb_get_ntohs(tvb, offset); proto_tree_add_item(tree, hf_auth_initiator_name_len, tvb, offset+2, 2, ENC_BIG_ENDIAN); name_len = tvb_get_ntohs(tvb, offset+2); if (name_type == FC_AUTH_NAME_TYPE_WWN) { proto_tree_add_item(tree, hf_auth_initiator_wwn, tvb, offset+4, 8, ENC_NA); } else { proto_tree_add_item(tree, hf_auth_initiator_name, tvb, offset+4, name_len, ENC_NA); } offset += (4+name_len); proto_tree_add_item(tree, hf_auth_usable_proto, tvb, offset, 4, ENC_BIG_ENDIAN); num_protos = tvb_get_ntohl(tvb, offset); offset += 4; for (i = 0; i < num_protos; i++) { proto_tree_add_item(tree, hf_auth_proto_param_len, tvb, offset, 4, ENC_BIG_ENDIAN); param_len = tvb_get_ntohl(tvb, offset); offset += 4; if (tvb_bytes_exist(tvb, offset, param_len)) { proto_type = tvb_get_ntohl(tvb, offset); proto_tree_add_item(tree, hf_auth_proto_type, tvb, offset, 4, ENC_BIG_ENDIAN); switch (proto_type) { case FC_AUTH_PROTO_TYPE_DHCHAP: dissect_fcsp_dhchap_auth_param(tvb, tree, offset+4, param_len); break; case FC_AUTH_PROTO_TYPE_FCAP: break; default: break; } } offset += param_len; } } } static void dissect_fcsp_auth_done(tvbuff_t *tvb _U_, proto_tree *tree _U_) { } static void dissect_fcsp_auth_rjt(tvbuff_t *tvb, proto_tree *tree) { int offset = 12; if (tree) { proto_tree_add_item(tree, hf_auth_rjt_code, tvb, offset, 1, ENC_BIG_ENDIAN); proto_tree_add_item(tree, hf_auth_rjt_codedet, tvb, offset+1, 1, ENC_BIG_ENDIAN); } } static int dissect_fcsp(tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree, void* data _U_) { proto_item *ti = NULL; guint8 opcode; int offset = 0; proto_tree *fcsp_tree = NULL; /* Make entry in the Info column on summary display */ opcode = tvb_get_guint8(tvb, 2); col_add_str(pinfo->cinfo, COL_INFO, val_to_str(opcode, fcauth_msgcode_vals, "0x%x")); if (tree) { ti = proto_tree_add_protocol_format(tree, proto_fcsp, tvb, 0, tvb_captured_length(tvb), "FC-SP"); fcsp_tree = proto_item_add_subtree(ti, ett_fcsp); proto_tree_add_item(fcsp_tree, hf_auth_flags, tvb, offset+1, 1, ENC_BIG_ENDIAN); proto_tree_add_item(fcsp_tree, hf_auth_msg_code, tvb, offset+2, 1, ENC_BIG_ENDIAN); proto_tree_add_item(fcsp_tree, hf_auth_proto_ver, tvb, offset+3, 1, ENC_BIG_ENDIAN); proto_tree_add_item(fcsp_tree, hf_auth_len, tvb, offset+4, 4, ENC_BIG_ENDIAN); proto_tree_add_item(fcsp_tree, hf_auth_tid, tvb, offset+8, 4, ENC_BIG_ENDIAN); switch (opcode) { case FC_AUTH_MSG_AUTH_REJECT: dissect_fcsp_auth_rjt(tvb, tree); break; case FC_AUTH_MSG_AUTH_NEGOTIATE: dissect_fcsp_auth_negotiate(tvb, tree); break; case FC_AUTH_MSG_AUTH_DONE: dissect_fcsp_auth_done(tvb, tree); break; case FC_AUTH_DHCHAP_CHALLENGE: dissect_fcsp_dhchap_challenge(tvb, tree); break; case FC_AUTH_DHCHAP_REPLY: dissect_fcsp_dhchap_reply(tvb, tree); break; case FC_AUTH_DHCHAP_SUCCESS: dissect_fcsp_dhchap_success(tvb, tree); break; case FC_AUTH_FCAP_REQUEST: case FC_AUTH_FCAP_ACKNOWLEDGE: case FC_AUTH_FCAP_CONFIRM: case FC_AUTH_FCPAP_INIT: case FC_AUTH_FCPAP_ACCEPT: case FC_AUTH_FCPAP_COMPLETE: proto_tree_add_expert(fcsp_tree, pinfo, &ei_auth_fcap_undecoded, tvb, offset+12, -1); break; default: break; } } return tvb_captured_length(tvb); } void proto_register_fcsp(void) { /* Setup list of header fields See Section 1.6.1 for details*/ static hf_register_info hf[] = { { &hf_auth_proto_ver, { "Protocol Version", "fcsp.version", FT_UINT8, BASE_HEX, NULL, 0x0, NULL, HFILL}}, { &hf_auth_msg_code, { "Message Code", "fcsp.opcode", FT_UINT8, BASE_HEX, VALS(fcauth_msgcode_vals), 0x0, NULL, HFILL}}, { &hf_auth_flags, { "Flags", "fcsp.flags", FT_UINT8, BASE_HEX, NULL, 0x0, NULL, HFILL}}, { &hf_auth_len, { "Packet Length", "fcsp.len", FT_UINT32, BASE_DEC, NULL, 0x0, NULL, HFILL}}, { &hf_auth_tid, { "Transaction Identifier", "fcsp.tid", FT_UINT32, BASE_HEX, NULL, 0x0, NULL, HFILL}}, { &hf_auth_initiator_wwn, { "Initiator Name (WWN)", "fcsp.initwwn", FT_FCWWN, BASE_NONE, NULL, 0x0, NULL, HFILL}}, { &hf_auth_initiator_name, { "Initiator Name (Unknown Type)", "fcsp.initname", FT_BYTES, BASE_NONE, NULL, 0x0, NULL, HFILL}}, { &hf_auth_initiator_name_type, { "Initiator Name Type", "fcsp.initnametype", FT_UINT16, BASE_HEX, VALS(fcauth_name_type_vals), 0x0, NULL, HFILL}}, { &hf_auth_initiator_name_len, { "Initiator Name Length", "fcsp.initnamelen", FT_UINT16, BASE_DEC, NULL, 0x0, NULL, HFILL}}, { &hf_auth_usable_proto, { "Number of Usable Protocols", "fcsp.usableproto", FT_UINT32, BASE_DEC, NULL, 0x0, NULL, HFILL}}, { &hf_auth_rjt_code, { "Reason Code", "fcsp.rjtcode", FT_UINT8, BASE_DEC, VALS(fcauth_rjtcode_vals), 0x0, NULL, HFILL}}, { &hf_auth_rjt_codedet, { "Reason Code Explanation", "fcsp.rjtcodet", FT_UINT8, BASE_DEC, VALS(fcauth_rjtcode_detail_vals), 0x0, NULL, HFILL}}, { &hf_auth_responder_wwn, { "Responder Name (WWN)", "fcsp.rspwwn", FT_FCWWN, BASE_NONE, NULL, 0x0, NULL, HFILL}}, { &hf_auth_responder_name, { "Responder Name (Unknown Type)", "fcsp.rspname", FT_BYTES, BASE_NONE, NULL, 0x0, NULL, HFILL}}, { &hf_auth_responder_name_type, { "Responder Name Type", "fcsp.rspnametype", FT_UINT16, BASE_HEX, VALS(fcauth_name_type_vals), 0x0, NULL, HFILL}}, { &hf_auth_responder_name_len, { "Responder Name Type", "fcsp.rspnamelen", FT_UINT16, BASE_DEC, NULL, 0x0, NULL, HFILL}}, #if 0 { &hf_auth_dhchap_hashid, { "Hash Identifier", "fcsp.dhchap.hashid", FT_UINT32, BASE_HEX, NULL, 0x0, NULL, HFILL}}, #endif #if 0 { &hf_auth_dhchap_groupid, { "DH Group Identifier", "fcsp.dhchap.groupid", FT_UINT32, BASE_HEX, NULL, 0x0, NULL, HFILL}}, #endif { &hf_auth_dhchap_chal_len, { "Challenge Value Length", "fcsp.dhchap.challen", FT_UINT32, BASE_DEC, NULL, 0x0, NULL, HFILL}}, { &hf_auth_dhchap_val_len, { "DH Value Length", "fcsp.dhchap.vallen", FT_UINT32, BASE_DEC, NULL, 0x0, NULL, HFILL}}, { &hf_auth_dhchap_rsp_len, { "Response Value Length", "fcsp.dhchap.rsplen", FT_UINT32, BASE_DEC, NULL, 0x0, NULL, HFILL}}, { &hf_auth_proto_type, { "Authentication Protocol Type", "fcsp.proto", FT_UINT32, BASE_DEC, VALS(fcauth_proto_type_vals), 0x0, NULL, HFILL}}, { &hf_auth_proto_param_len, { "Protocol Parameters Length", "fcsp.protoparamlen", FT_UINT32, BASE_DEC, NULL, 0x0, NULL, HFILL}}, { &hf_auth_dhchap_param_tag, { "Parameter Tag", "fcsp.dhchap.paramtype", FT_UINT16, BASE_HEX, VALS(fcauth_dhchap_param_vals), 0x0, NULL, HFILL}}, { &hf_auth_dhchap_param_len, { "Parameter Length", "fcsp.dhchap.paramlen", FT_UINT16, BASE_DEC, NULL, 0x0, NULL, HFILL}}, { &hf_auth_dhchap_hash_type, { "Hash Algorithm", "fcsp.dhchap.hashtype", FT_UINT32, BASE_DEC, VALS(fcauth_dhchap_hash_algo_vals), 0x0, NULL, HFILL}}, { &hf_auth_dhchap_group_type, { "DH Group", "fcsp.dhchap.dhgid", FT_UINT32, BASE_DEC, VALS(fcauth_dhchap_dhgid_vals), 0x0, NULL, HFILL}}, { &hf_auth_dhchap_chal_value, { "Challenge Value", "fcsp.dhchap.chalval", FT_BYTES, BASE_NONE, NULL, 0x0, NULL, HFILL}}, { &hf_auth_dhchap_dhvalue, { "DH Value", "fcsp.dhchap.dhvalue", FT_BYTES, BASE_NONE, NULL, 0x0, NULL, HFILL}}, { &hf_auth_dhchap_rsp_value, { "Response Value", "fcsp.dhchap.rspval", FT_BYTES, BASE_NONE, NULL, 0x0, NULL, HFILL}}, }; /* Setup protocol subtree array */ static gint *ett[] = { &ett_fcsp, }; static ei_register_info ei[] = { { &ei_auth_fcap_undecoded, { "fcsp.fcap_undecoded", PI_UNDECODED, PI_WARN, "FCAP Decoding Not Supported", EXPFILL }}, }; expert_module_t* expert_fcsp; /* Register the protocol name and description */ proto_fcsp = proto_register_protocol("Fibre Channel Security Protocol", "FC-SP", "fcsp"); register_dissector("fcsp", dissect_fcsp, proto_fcsp); proto_register_field_array(proto_fcsp, hf, array_length(hf)); proto_register_subtree_array(ett, array_length(ett)); expert_fcsp = expert_register_protocol(proto_fcsp); expert_register_field_array(expert_fcsp, ei, array_length(ei)); } /* * Editor modelines - https://www.wireshark.org/tools/modelines.html * * Local variables: * c-basic-offset: 4 * tab-width: 8 * indent-tabs-mode: nil * End: * * vi: set shiftwidth=4 tabstop=8 expandtab: * :indentSize=4:tabSize=8:noTabs=true: */
{ "pile_set_name": "Github" }
QA output created by 787 log records: 7565 === hinv.ncpu === +++ forwards +++ Note: timezone set to local timezone of host "super.elastic.org" from archive metric: hinv.ncpu archive: archives/bug-1044 host: super.elastic.org start: Fri Jan 10 18:00:00 2014 end: Mon Jan 13 08:59:49 2014 semantics: discrete instantaneous value units: none samples: 63 interval: 3600.00 sec 18:00:00.000 4 19:00:00.000 4 20:00:00.000 4 21:00:00.000 4 22:00:00.000 4 23:00:00.000 4 00:00:00.000 4 01:00:00.000 4 02:00:00.000 4 03:00:00.000 4 04:00:00.000 4 05:00:00.000 4 06:00:00.000 4 07:00:00.000 4 08:00:00.000 4 09:00:00.000 4 10:00:00.000 4 11:00:00.000 4 12:00:00.000 4 13:00:00.000 4 14:00:00.000 4 15:00:00.000 4 16:00:00.000 4 17:00:00.000 4 18:00:00.000 4 19:00:00.000 4 20:00:00.000 4 21:00:00.000 4 22:00:00.000 4 23:00:00.000 4 00:00:00.000 4 01:00:00.000 4 02:00:00.000 4 03:00:00.000 4 04:00:00.000 4 05:00:00.000 4 06:00:00.000 4 07:00:00.000 4 08:00:00.000 4 09:00:00.000 4 10:00:00.000 4 11:00:00.000 4 12:00:00.000 4 13:00:00.000 4 14:00:00.000 4 15:00:00.000 4 16:00:00.000 4 17:00:00.000 4 18:00:00.000 4 19:00:00.000 4 20:00:00.000 4 21:00:00.000 4 22:00:00.000 4 23:00:00.000 4 00:00:00.000 4 01:00:00.000 4 02:00:00.000 4 03:00:00.000 4 04:00:00.000 4 05:00:00.000 4 06:00:00.000 4 07:00:00.000 4 08:00:00.000 4 log reads: 7448 +++ backwards +++ metric[0]: hinv.ncpu sample 1, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 2, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 3, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 4, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 5, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 6, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 7, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 8, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 9, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 10, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 11, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 12, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 13, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 14, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 15, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 16, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 17, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 18, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 19, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 20, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 21, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 22, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 23, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 24, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 25, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 26, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 27, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 28, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 29, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 30, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 31, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 32, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 33, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 34, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 35, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 36, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 37, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 38, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 39, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 40, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 41, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 42, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 43, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 44, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 45, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 46, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 47, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 48, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 49, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 50, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 51, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 52, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 53, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 54, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 55, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 56, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 57, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 58, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 59, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 60, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 61, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 62, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample 63, delta time=-3600.000 secs hinv.ncpu: value[0]: 4 sample[64] pmFetch: End of PCP archive log 64 samples required 7565 log reads === kernel.all.nprocs === +++ forwards +++ Note: timezone set to local timezone of host "super.elastic.org" from archive metric: kernel.all.nprocs archive: archives/bug-1044 host: super.elastic.org start: Fri Jan 10 18:00:00 2014 end: Mon Jan 13 08:59:49 2014 semantics: instantaneous value units: none samples: 63 interval: 3600.00 sec 18:00:00.000 444 19:00:00.000 436 20:00:00.000 436 21:00:00.000 442 22:00:00.000 426 23:00:00.000 1082 00:00:00.000 1283 01:00:00.000 788 02:00:00.000 637 03:00:00.000 639 04:00:00.000 637 05:00:00.000 642 06:00:00.000 635 07:00:00.000 638 08:00:00.000 635 09:00:00.000 651 10:00:00.000 651 11:00:00.000 647 12:00:00.000 636 13:00:00.000 638 14:00:00.000 646 15:00:00.000 656 16:00:00.000 648 17:00:00.000 634 18:00:00.000 634 19:00:00.000 645 20:00:00.000 636 21:00:00.000 636 22:00:00.000 635 23:00:00.000 636 00:00:00.000 636 01:00:00.000 637 02:00:00.000 633 03:00:00.000 636 04:00:00.000 630 05:00:00.000 638 06:00:00.000 634 07:00:00.000 636 08:00:00.000 638 09:00:00.000 845 10:00:00.000 843 11:00:00.000 688 12:00:00.000 687 13:00:00.000 686 14:00:00.000 689 15:00:00.000 690 16:00:00.000 686 17:00:00.000 686 18:00:00.000 1101 19:00:00.000 1091 20:00:00.000 788 21:00:00.000 788 22:00:00.000 786 23:00:00.000 793 00:00:00.000 786 01:00:00.000 782 02:00:00.000 795 03:00:00.000 786 04:00:00.000 785 05:00:00.000 789 06:00:00.000 789 07:00:00.000 785 08:00:00.000 942 log reads: 7448 +++ backwards +++ metric[0]: kernel.all.nprocs sample 1, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 942 sample 2, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 785 sample 3, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 789 sample 4, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 789 sample 5, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 785 sample 6, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 786 sample 7, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 795 sample 8, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 782 sample 9, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 786 sample 10, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 793 sample 11, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 786 sample 12, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 788 sample 13, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 788 sample 14, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 1091 sample 15, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 1101 sample 16, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 686 sample 17, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 686 sample 18, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 690 sample 19, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 689 sample 20, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 686 sample 21, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 687 sample 22, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 688 sample 23, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 843 sample 24, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 845 sample 25, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 638 sample 26, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 636 sample 27, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 634 sample 28, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 638 sample 29, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 630 sample 30, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 636 sample 31, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 633 sample 32, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 637 sample 33, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 636 sample 34, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 636 sample 35, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 635 sample 36, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 636 sample 37, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 636 sample 38, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 645 sample 39, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 634 sample 40, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 634 sample 41, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 648 sample 42, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 656 sample 43, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 646 sample 44, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 638 sample 45, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 636 sample 46, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 647 sample 47, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 651 sample 48, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 651 sample 49, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 635 sample 50, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 638 sample 51, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 635 sample 52, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 642 sample 53, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 637 sample 54, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 639 sample 55, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 637 sample 56, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 788 sample 57, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 1283 sample 58, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 1082 sample 59, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 426 sample 60, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 442 sample 61, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 436 sample 62, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 436 sample 63, delta time=-3600.000 secs kernel.all.nprocs: value[0]: 444 sample[64] pmFetch: End of PCP archive log 64 samples required 7565 log reads === pmcd.numagents === +++ forwards +++ Note: timezone set to local timezone of host "super.elastic.org" from archive metric: pmcd.numagents archive: archives/bug-1044 host: super.elastic.org start: Fri Jan 10 18:00:00 2014 end: Mon Jan 13 08:59:49 2014 semantics: instantaneous value units: none samples: 63 interval: 3600.00 sec 18:00:00.000 No values available 19:00:00.000 No values available 20:00:00.000 No values available 21:00:00.000 No values available 22:00:00.000 No values available 23:00:00.000 No values available 00:00:00.000 No values available 01:00:00.000 No values available 02:00:00.000 No values available 03:00:00.000 No values available 04:00:00.000 No values available 05:00:00.000 No values available 06:00:00.000 No values available 07:00:00.000 No values available 08:00:00.000 No values available 09:00:00.000 No values available 10:00:00.000 No values available 11:00:00.000 No values available 12:00:00.000 No values available 13:00:00.000 No values available 14:00:00.000 No values available 15:00:00.000 No values available 16:00:00.000 No values available 17:00:00.000 No values available 18:00:00.000 No values available 19:00:00.000 No values available 20:00:00.000 No values available 21:00:00.000 No values available 22:00:00.000 No values available 23:00:00.000 No values available 00:00:00.000 No values available 01:00:00.000 No values available 02:00:00.000 No values available 03:00:00.000 No values available 04:00:00.000 No values available 05:00:00.000 No values available 06:00:00.000 No values available 07:00:00.000 No values available 08:00:00.000 No values available 09:00:00.000 No values available 10:00:00.000 No values available 11:00:00.000 No values available 12:00:00.000 No values available 13:00:00.000 No values available 14:00:00.000 No values available 15:00:00.000 No values available 16:00:00.000 No values available 17:00:00.000 No values available 18:00:00.000 No values available 19:00:00.000 No values available 20:00:00.000 No values available 21:00:00.000 No values available 22:00:00.000 No values available 23:00:00.000 No values available 00:00:00.000 No values available 01:00:00.000 No values available 02:00:00.000 No values available 03:00:00.000 No values available 04:00:00.000 No values available 05:00:00.000 No values available 06:00:00.000 No values available 07:00:00.000 No values available 08:00:00.000 No values available log reads: 13266 +++ backwards +++ metric[0]: pmcd.numagents sample 1, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 2, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 3, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 4, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 5, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 6, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 7, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 8, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 9, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 10, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 11, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 12, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 13, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 14, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 15, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 16, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 17, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 18, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 19, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 20, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 21, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 22, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 23, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 24, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 25, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 26, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 27, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 28, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 29, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 30, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 31, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 32, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 33, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 34, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 35, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 36, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 37, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 38, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 39, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 40, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 41, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 42, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 43, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 44, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 45, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 46, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 47, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 48, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 49, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 50, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 51, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 52, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 53, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 54, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 55, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 56, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 57, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 58, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 59, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 60, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 61, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 62, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample 63, delta time=-3600.000 secs pmcd.numagents: no current values no prior values sample[64] pmFetch: End of PCP archive log 64 samples required 12911 log reads === pmcd.numagents converted to discrete semantics === Note: timezone set to local timezone of host "super.elastic.org" from archive metric: pmcd.numagents archive: TMP host: super.elastic.org start: Fri Jan 10 18:00:00 2014 end: Mon Jan 13 08:59:49 2014 semantics: discrete instantaneous value units: none samples: 63 interval: 3600.00 sec 18:00:00.000 No values available 19:00:00.000 No values available 20:00:00.000 No values available 21:00:00.000 No values available 22:00:00.000 No values available 23:00:00.000 No values available 00:00:00.000 No values available 01:00:00.000 No values available 02:00:00.000 No values available 03:00:00.000 No values available 04:00:00.000 No values available 05:00:00.000 No values available 06:00:00.000 No values available 07:00:00.000 No values available 08:00:00.000 No values available 09:00:00.000 No values available 10:00:00.000 No values available 11:00:00.000 4 12:00:00.000 4 13:00:00.000 4 14:00:00.000 4 15:00:00.000 4 16:00:00.000 4 17:00:00.000 4 18:00:00.000 4 19:00:00.000 4 20:00:00.000 4 21:00:00.000 4 22:00:00.000 4 23:00:00.000 4 00:00:00.000 4 01:00:00.000 4 02:00:00.000 4 03:00:00.000 4 04:00:00.000 4 05:00:00.000 4 06:00:00.000 4 07:00:00.000 4 08:00:00.000 4 09:00:00.000 4 10:00:00.000 4 11:00:00.000 4 12:00:00.000 4 13:00:00.000 4 14:00:00.000 4 15:00:00.000 4 16:00:00.000 4 17:00:00.000 4 18:00:00.000 4 19:00:00.000 4 20:00:00.000 4 21:00:00.000 4 22:00:00.000 4 23:00:00.000 4 00:00:00.000 4 01:00:00.000 4 02:00:00.000 4 03:00:00.000 4 04:00:00.000 4 05:00:00.000 4 06:00:00.000 4 07:00:00.000 4 08:00:00.000 4 log reads: 13266 === all metrics at once === pmie: timezone set to local timezone from archives/bug-1044 hinv_ncpu (Fri Jan 10 18:00:00 2014): 4 kernel_all_nprocs (Fri Jan 10 18:00:00 2014): 444 pmcd_numagents (Fri Jan 10 18:00:00 2014): ? hinv_ncpu (Fri Jan 10 19:00:00 2014): 4 kernel_all_nprocs (Fri Jan 10 19:00:00 2014): 436 pmcd_numagents (Fri Jan 10 19:00:00 2014): ? hinv_ncpu (Fri Jan 10 20:00:00 2014): 4 kernel_all_nprocs (Fri Jan 10 20:00:00 2014): 436 pmcd_numagents (Fri Jan 10 20:00:00 2014): ? hinv_ncpu (Fri Jan 10 21:00:00 2014): 4 kernel_all_nprocs (Fri Jan 10 21:00:00 2014): 442 pmcd_numagents (Fri Jan 10 21:00:00 2014): ? hinv_ncpu (Fri Jan 10 22:00:00 2014): 4 kernel_all_nprocs (Fri Jan 10 22:00:00 2014): 426 pmcd_numagents (Fri Jan 10 22:00:00 2014): ? hinv_ncpu (Fri Jan 10 23:00:00 2014): 4 kernel_all_nprocs (Fri Jan 10 23:00:00 2014): 1082 pmcd_numagents (Fri Jan 10 23:00:00 2014): ? hinv_ncpu (Sat Jan 11 00:00:00 2014): 4 kernel_all_nprocs (Sat Jan 11 00:00:00 2014): 1283 pmcd_numagents (Sat Jan 11 00:00:00 2014): ? hinv_ncpu (Sat Jan 11 01:00:00 2014): 4 kernel_all_nprocs (Sat Jan 11 01:00:00 2014): 788 pmcd_numagents (Sat Jan 11 01:00:00 2014): ? hinv_ncpu (Sat Jan 11 02:00:00 2014): 4 kernel_all_nprocs (Sat Jan 11 02:00:00 2014): 637 pmcd_numagents (Sat Jan 11 02:00:00 2014): ? hinv_ncpu (Sat Jan 11 03:00:00 2014): 4 kernel_all_nprocs (Sat Jan 11 03:00:00 2014): 639 pmcd_numagents (Sat Jan 11 03:00:00 2014): ? hinv_ncpu (Sat Jan 11 04:00:00 2014): 4 kernel_all_nprocs (Sat Jan 11 04:00:00 2014): 637 pmcd_numagents (Sat Jan 11 04:00:00 2014): ? hinv_ncpu (Sat Jan 11 05:00:00 2014): 4 kernel_all_nprocs (Sat Jan 11 05:00:00 2014): 642 pmcd_numagents (Sat Jan 11 05:00:00 2014): ? hinv_ncpu (Sat Jan 11 06:00:00 2014): 4 kernel_all_nprocs (Sat Jan 11 06:00:00 2014): 635 pmcd_numagents (Sat Jan 11 06:00:00 2014): ? hinv_ncpu (Sat Jan 11 07:00:00 2014): 4 kernel_all_nprocs (Sat Jan 11 07:00:00 2014): 638 pmcd_numagents (Sat Jan 11 07:00:00 2014): ? hinv_ncpu (Sat Jan 11 08:00:00 2014): 4 kernel_all_nprocs (Sat Jan 11 08:00:00 2014): 635 pmcd_numagents (Sat Jan 11 08:00:00 2014): ? hinv_ncpu (Sat Jan 11 09:00:00 2014): 4 kernel_all_nprocs (Sat Jan 11 09:00:00 2014): 651 pmcd_numagents (Sat Jan 11 09:00:00 2014): ? hinv_ncpu (Sat Jan 11 10:00:00 2014): 4 kernel_all_nprocs (Sat Jan 11 10:00:00 2014): 651 pmcd_numagents (Sat Jan 11 10:00:00 2014): ? hinv_ncpu (Sat Jan 11 11:00:00 2014): 4 kernel_all_nprocs (Sat Jan 11 11:00:00 2014): 647 pmcd_numagents (Sat Jan 11 11:00:00 2014): ? hinv_ncpu (Sat Jan 11 12:00:00 2014): 4 kernel_all_nprocs (Sat Jan 11 12:00:00 2014): 636 pmcd_numagents (Sat Jan 11 12:00:00 2014): ? hinv_ncpu (Sat Jan 11 13:00:00 2014): 4 kernel_all_nprocs (Sat Jan 11 13:00:00 2014): 638 pmcd_numagents (Sat Jan 11 13:00:00 2014): ? hinv_ncpu (Sat Jan 11 14:00:00 2014): 4 kernel_all_nprocs (Sat Jan 11 14:00:00 2014): 646 pmcd_numagents (Sat Jan 11 14:00:00 2014): ? hinv_ncpu (Sat Jan 11 15:00:00 2014): 4 kernel_all_nprocs (Sat Jan 11 15:00:00 2014): 656 pmcd_numagents (Sat Jan 11 15:00:00 2014): ? hinv_ncpu (Sat Jan 11 16:00:00 2014): 4 kernel_all_nprocs (Sat Jan 11 16:00:00 2014): 648 pmcd_numagents (Sat Jan 11 16:00:00 2014): ? hinv_ncpu (Sat Jan 11 17:00:00 2014): 4 kernel_all_nprocs (Sat Jan 11 17:00:00 2014): 634 pmcd_numagents (Sat Jan 11 17:00:00 2014): ? hinv_ncpu (Sat Jan 11 18:00:00 2014): 4 kernel_all_nprocs (Sat Jan 11 18:00:00 2014): 634 pmcd_numagents (Sat Jan 11 18:00:00 2014): ? hinv_ncpu (Sat Jan 11 19:00:00 2014): 4 kernel_all_nprocs (Sat Jan 11 19:00:00 2014): 645 pmcd_numagents (Sat Jan 11 19:00:00 2014): ? hinv_ncpu (Sat Jan 11 20:00:00 2014): 4 kernel_all_nprocs (Sat Jan 11 20:00:00 2014): 636 pmcd_numagents (Sat Jan 11 20:00:00 2014): ? hinv_ncpu (Sat Jan 11 21:00:00 2014): 4 kernel_all_nprocs (Sat Jan 11 21:00:00 2014): 636 pmcd_numagents (Sat Jan 11 21:00:00 2014): ? hinv_ncpu (Sat Jan 11 22:00:00 2014): 4 kernel_all_nprocs (Sat Jan 11 22:00:00 2014): 635 pmcd_numagents (Sat Jan 11 22:00:00 2014): ? hinv_ncpu (Sat Jan 11 23:00:00 2014): 4 kernel_all_nprocs (Sat Jan 11 23:00:00 2014): 636 pmcd_numagents (Sat Jan 11 23:00:00 2014): ? hinv_ncpu (Sun Jan 12 00:00:00 2014): 4 kernel_all_nprocs (Sun Jan 12 00:00:00 2014): 636 pmcd_numagents (Sun Jan 12 00:00:00 2014): ? hinv_ncpu (Sun Jan 12 01:00:00 2014): 4 kernel_all_nprocs (Sun Jan 12 01:00:00 2014): 637 pmcd_numagents (Sun Jan 12 01:00:00 2014): ? hinv_ncpu (Sun Jan 12 02:00:00 2014): 4 kernel_all_nprocs (Sun Jan 12 02:00:00 2014): 633 pmcd_numagents (Sun Jan 12 02:00:00 2014): ? hinv_ncpu (Sun Jan 12 03:00:00 2014): 4 kernel_all_nprocs (Sun Jan 12 03:00:00 2014): 636 pmcd_numagents (Sun Jan 12 03:00:00 2014): ? hinv_ncpu (Sun Jan 12 04:00:00 2014): 4 kernel_all_nprocs (Sun Jan 12 04:00:00 2014): 630 pmcd_numagents (Sun Jan 12 04:00:00 2014): ? hinv_ncpu (Sun Jan 12 05:00:00 2014): 4 kernel_all_nprocs (Sun Jan 12 05:00:00 2014): 638 pmcd_numagents (Sun Jan 12 05:00:00 2014): ? hinv_ncpu (Sun Jan 12 06:00:00 2014): 4 kernel_all_nprocs (Sun Jan 12 06:00:00 2014): 634 pmcd_numagents (Sun Jan 12 06:00:00 2014): ? hinv_ncpu (Sun Jan 12 07:00:00 2014): 4 kernel_all_nprocs (Sun Jan 12 07:00:00 2014): 636 pmcd_numagents (Sun Jan 12 07:00:00 2014): ? hinv_ncpu (Sun Jan 12 08:00:00 2014): 4 kernel_all_nprocs (Sun Jan 12 08:00:00 2014): 638 pmcd_numagents (Sun Jan 12 08:00:00 2014): ? hinv_ncpu (Sun Jan 12 09:00:00 2014): 4 kernel_all_nprocs (Sun Jan 12 09:00:00 2014): 845 pmcd_numagents (Sun Jan 12 09:00:00 2014): ? hinv_ncpu (Sun Jan 12 10:00:00 2014): 4 kernel_all_nprocs (Sun Jan 12 10:00:00 2014): 843 pmcd_numagents (Sun Jan 12 10:00:00 2014): ? hinv_ncpu (Sun Jan 12 11:00:00 2014): 4 kernel_all_nprocs (Sun Jan 12 11:00:00 2014): 688 pmcd_numagents (Sun Jan 12 11:00:00 2014): ? hinv_ncpu (Sun Jan 12 12:00:00 2014): 4 kernel_all_nprocs (Sun Jan 12 12:00:00 2014): 687 pmcd_numagents (Sun Jan 12 12:00:00 2014): ? hinv_ncpu (Sun Jan 12 13:00:00 2014): 4 kernel_all_nprocs (Sun Jan 12 13:00:00 2014): 686 pmcd_numagents (Sun Jan 12 13:00:00 2014): ? hinv_ncpu (Sun Jan 12 14:00:00 2014): 4 kernel_all_nprocs (Sun Jan 12 14:00:00 2014): 689 pmcd_numagents (Sun Jan 12 14:00:00 2014): ? hinv_ncpu (Sun Jan 12 15:00:00 2014): 4 kernel_all_nprocs (Sun Jan 12 15:00:00 2014): 690 pmcd_numagents (Sun Jan 12 15:00:00 2014): ? hinv_ncpu (Sun Jan 12 16:00:00 2014): 4 kernel_all_nprocs (Sun Jan 12 16:00:00 2014): 686 pmcd_numagents (Sun Jan 12 16:00:00 2014): ? hinv_ncpu (Sun Jan 12 17:00:00 2014): 4 kernel_all_nprocs (Sun Jan 12 17:00:00 2014): 686 pmcd_numagents (Sun Jan 12 17:00:00 2014): ? hinv_ncpu (Sun Jan 12 18:00:00 2014): 4 kernel_all_nprocs (Sun Jan 12 18:00:00 2014): 1101 pmcd_numagents (Sun Jan 12 18:00:00 2014): ? hinv_ncpu (Sun Jan 12 19:00:00 2014): 4 kernel_all_nprocs (Sun Jan 12 19:00:00 2014): 1091 pmcd_numagents (Sun Jan 12 19:00:00 2014): ? hinv_ncpu (Sun Jan 12 20:00:00 2014): 4 kernel_all_nprocs (Sun Jan 12 20:00:00 2014): 788 pmcd_numagents (Sun Jan 12 20:00:00 2014): ? hinv_ncpu (Sun Jan 12 21:00:00 2014): 4 kernel_all_nprocs (Sun Jan 12 21:00:00 2014): 788 pmcd_numagents (Sun Jan 12 21:00:00 2014): ? hinv_ncpu (Sun Jan 12 22:00:00 2014): 4 kernel_all_nprocs (Sun Jan 12 22:00:00 2014): 786 pmcd_numagents (Sun Jan 12 22:00:00 2014): ? hinv_ncpu (Sun Jan 12 23:00:00 2014): 4 kernel_all_nprocs (Sun Jan 12 23:00:00 2014): 793 pmcd_numagents (Sun Jan 12 23:00:00 2014): ? hinv_ncpu (Mon Jan 13 00:00:00 2014): 4 kernel_all_nprocs (Mon Jan 13 00:00:00 2014): 786 pmcd_numagents (Mon Jan 13 00:00:00 2014): ? hinv_ncpu (Mon Jan 13 01:00:00 2014): 4 kernel_all_nprocs (Mon Jan 13 01:00:00 2014): 782 pmcd_numagents (Mon Jan 13 01:00:00 2014): ? hinv_ncpu (Mon Jan 13 02:00:00 2014): 4 kernel_all_nprocs (Mon Jan 13 02:00:00 2014): 795 pmcd_numagents (Mon Jan 13 02:00:00 2014): ? hinv_ncpu (Mon Jan 13 03:00:00 2014): 4 kernel_all_nprocs (Mon Jan 13 03:00:00 2014): 786 pmcd_numagents (Mon Jan 13 03:00:00 2014): ? hinv_ncpu (Mon Jan 13 04:00:00 2014): 4 kernel_all_nprocs (Mon Jan 13 04:00:00 2014): 785 pmcd_numagents (Mon Jan 13 04:00:00 2014): ? hinv_ncpu (Mon Jan 13 05:00:00 2014): 4 kernel_all_nprocs (Mon Jan 13 05:00:00 2014): 789 pmcd_numagents (Mon Jan 13 05:00:00 2014): ? hinv_ncpu (Mon Jan 13 06:00:00 2014): 4 kernel_all_nprocs (Mon Jan 13 06:00:00 2014): 789 pmcd_numagents (Mon Jan 13 06:00:00 2014): ? hinv_ncpu (Mon Jan 13 07:00:00 2014): 4 kernel_all_nprocs (Mon Jan 13 07:00:00 2014): 785 pmcd_numagents (Mon Jan 13 07:00:00 2014): ? hinv_ncpu (Mon Jan 13 08:00:00 2014): 4 kernel_all_nprocs (Mon Jan 13 08:00:00 2014): 942 pmcd_numagents (Mon Jan 13 08:00:00 2014): ? log reads: 14947
{ "pile_set_name": "Github" }
/* Copyright (c) 2011, Open Knowledge Foundation Ltd. All rights reserved. HTTP Content-Type Autonegotiation. The functions in this package implement the behaviour specified in http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. Neither the name of the Open Knowledge Foundation Ltd. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package goautoneg import ( "sort" "strconv" "strings" ) // Structure to represent a clause in an HTTP Accept Header type Accept struct { Type, SubType string Q float64 Params map[string]string } // For internal use, so that we can use the sort interface type accept_slice []Accept func (accept accept_slice) Len() int { slice := []Accept(accept) return len(slice) } func (accept accept_slice) Less(i, j int) bool { slice := []Accept(accept) ai, aj := slice[i], slice[j] if ai.Q > aj.Q { return true } if ai.Type != "*" && aj.Type == "*" { return true } if ai.SubType != "*" && aj.SubType == "*" { return true } return false } func (accept accept_slice) Swap(i, j int) { slice := []Accept(accept) slice[i], slice[j] = slice[j], slice[i] } // Parse an Accept Header string returning a sorted list // of clauses func ParseAccept(header string) (accept []Accept) { parts := strings.Split(header, ",") accept = make([]Accept, 0, len(parts)) for _, part := range parts { part := strings.Trim(part, " ") a := Accept{} a.Params = make(map[string]string) a.Q = 1.0 mrp := strings.Split(part, ";") media_range := mrp[0] sp := strings.Split(media_range, "/") a.Type = strings.Trim(sp[0], " ") switch { case len(sp) == 1 && a.Type == "*": a.SubType = "*" case len(sp) == 2: a.SubType = strings.Trim(sp[1], " ") default: continue } if len(mrp) == 1 { accept = append(accept, a) continue } for _, param := range mrp[1:] { sp := strings.SplitN(param, "=", 2) if len(sp) != 2 { continue } token := strings.Trim(sp[0], " ") if token == "q" { a.Q, _ = strconv.ParseFloat(sp[1], 32) } else { a.Params[token] = strings.Trim(sp[1], " ") } } accept = append(accept, a) } slice := accept_slice(accept) sort.Sort(slice) return } // Negotiate the most appropriate content_type given the accept header // and a list of alternatives. func Negotiate(header string, alternatives []string) (content_type string) { asp := make([][]string, 0, len(alternatives)) for _, ctype := range alternatives { asp = append(asp, strings.SplitN(ctype, "/", 2)) } for _, clause := range ParseAccept(header) { for i, ctsp := range asp { if clause.Type == ctsp[0] && clause.SubType == ctsp[1] { content_type = alternatives[i] return } if clause.Type == ctsp[0] && clause.SubType == "*" { content_type = alternatives[i] return } if clause.Type == "*" && clause.SubType == "*" { content_type = alternatives[i] return } } } return }
{ "pile_set_name": "Github" }
using System; using System.Collections.Generic; using System.Linq; using System.Web; namespace ResourceMetadata.API.ViewModels { public class PagedCollectionViewModel<T> { public IEnumerable<T> Data { get; set; } public int TotalCount { get; set; } } }
{ "pile_set_name": "Github" }
<?xml version="1.0"?> <DDDefinition xmlns="http://www.cern.ch/cms/DDL" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.cern.ch/cms/DDL ../../../DetectorDescription/Schema/DDLSchema.xsd"> <SpecParSection label="regions.xml" eval="true"> <SpecPar name="preshower"> <PartSelector path="//SF"/> <Parameter name="CMSCutsRegion" value="PreshowerRegion" eval="false"/> <Parameter name="ProdCutsForElectrons" value="1*mm"/> <Parameter name="ProdCutsForPositrons" value="1*mm"/> <Parameter name="ProdCutsForGamma" value="1*mm"/> </SpecPar> <SpecPar name="preshowerSensitive"> <PartSelector path="//SFSX"/> <PartSelector path="//SFSY"/> <Parameter name="CMSCutsRegion" value="PreshowerSensRegion" eval="false"/> <Parameter name="ProdCutsForElectrons" value="0.1*mm"/> <Parameter name="ProdCutsForPositrons" value="0.1*mm"/> <Parameter name="ProdCutsForGamma" value="0.1*mm"/> </SpecPar> </SpecParSection> </DDDefinition>
{ "pile_set_name": "Github" }
// Code generated by TestPretty. DO NOT EDIT. // GENERATED FILE DO NOT EDIT 1: - SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 10: ---------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 13: ------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 25: ------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 26: -------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 27: --------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 30: ------------------------------ SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 31: ------------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 32: -------------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 34: ---------------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 35: ----------------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 37: ------------------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 39: --------------------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 40: ---------------------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 42: ------------------------------------------ SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 43: ------------------------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 44: -------------------------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 47: ----------------------------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 53: ----------------------------------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 54: ------------------------------------------------------ SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 56: -------------------------------------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 59: ----------------------------------------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 60: ------------------------------------------------------------ SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 64: ---------------------------------------------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 68: -------------------------------------------------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray( i.indkey ) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 71: ----------------------------------------------------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray(i.indkey) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 113: ----------------------------------------------------------------------------------------------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray(i.indkey) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 143: ----------------------------------------------------------------------------------------------------------------------------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN ( SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray(i.indkey) AS keys FROM pg_catalog.pg_index AS i ) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 150: ------------------------------------------------------------------------------------------------------------------------------------------------------ SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN (SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray(i.indkey) AS keys FROM pg_catalog.pg_index AS i) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 159: --------------------------------------------------------------------------------------------------------------------------------------------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN (SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray(i.indkey) AS keys FROM pg_catalog.pg_index AS i) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 172: ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN (SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray(i.indkey) AS keys FROM pg_catalog.pg_index AS i) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 174: ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN (SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray(i.indkey) AS keys FROM pg_catalog.pg_index AS i) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 200: -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN (SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray(i.indkey) AS keys FROM pg_catalog.pg_index AS i) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 405: --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN (SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray(i.indkey) AS keys FROM pg_catalog.pg_index AS i) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq 639: --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- SELECT NULL AS table_cat, n.nspname AS table_schem, ct.relname AS table_name, a.attname AS column_name, (i.keys).n AS key_seq, ci.relname AS pk_name FROM pg_catalog.pg_class AS ct JOIN pg_catalog.pg_attribute AS a ON ct.oid = a.attrelid JOIN pg_catalog.pg_namespace AS n ON ct.relnamespace = n.oid JOIN (SELECT i.indexrelid, i.indrelid, i.indisprimary, information_schema._pg_expandarray(i.indkey) AS keys FROM pg_catalog.pg_index AS i) AS i ON a.attnum = (i.keys).x AND a.attrelid = i.indrelid JOIN pg_catalog.pg_class AS ci ON ci.oid = i.indexrelid WHERE true AND ct.relname = 'j' AND i.indisprimary ORDER BY table_name, pk_name, key_seq
{ "pile_set_name": "Github" }
package junit4; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import junit.framework.TestCase; public class SecurityManagerTest extends TestCase { private boolean setUpCalled = false; private static boolean tearDownCalled = false; public void setUp() { setUpCalled = true; tearDownCalled = false; System.out.println( "Called setUp" ); } public void tearDown() { setUpCalled = false; tearDownCalled = true; System.out.println( "Called tearDown" ); } public void testSetUp() { assertTrue( "setUp was not called", setUpCalled ); } public void testNotMuch() { } }
{ "pile_set_name": "Github" }
var LazyWrapper = require('./LazyWrapper'), getFuncName = require('./getFuncName'), lodash = require('../chain/lodash'); /** * Checks if `func` has a lazy counterpart. * * @private * @param {Function} func The function to check. * @returns {boolean} Returns `true` if `func` has a lazy counterpart, else `false`. */ function isLaziable(func) { var funcName = getFuncName(func); return !!funcName && func === lodash[funcName] && funcName in LazyWrapper.prototype; } module.exports = isLaziable;
{ "pile_set_name": "Github" }
package com.mucommander.commons.file.impl.s3; import com.mucommander.commons.file.AbstractFile; import com.mucommander.commons.file.AbstractFileTest; import com.mucommander.commons.file.FileOperation; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import java.io.IOException; /** * An {@link AbstractFileTest} implementation for the Amazon S3 file implementation. * The S3 temporary folder where test files are created is defined by the {@link #TEMP_FOLDER_PROPERTY} system property. * * @author Maxence Bernard */ @Test public class S3FileTest extends AbstractFileTest { /** The system property that holds the URI to the temporary S3 folder */ public final static String TEMP_FOLDER_PROPERTY = "test_properties.s3_test.temp_folder"; /** Base temporary folder */ private static AbstractFile tempFolder; @BeforeClass() public static void setupTemporaryFolder() { tempFolder = getTemporaryFolder(TEMP_FOLDER_PROPERTY); } ///////////////////////////////////// // AbstractFileTest implementation // ///////////////////////////////////// @Override public AbstractFile getTemporaryFile() throws IOException { return tempFolder.getDirectChild(getPseudoUniqueFilename(S3FileTest.class.getName())); } @Override public FileOperation[] getSupportedOperations() { return new FileOperation[] { FileOperation.READ_FILE, FileOperation.RANDOM_READ_FILE, FileOperation.CREATE_DIRECTORY, FileOperation.LIST_CHILDREN, FileOperation.DELETE, FileOperation.RENAME, FileOperation.COPY_REMOTELY, }; } }
{ "pile_set_name": "Github" }
/************************************************************************************* Toolkit for WPF Copyright (C) 2007-2020 Xceed Software Inc. This program is provided to you under the terms of the XCEED SOFTWARE, INC. COMMUNITY LICENSE AGREEMENT (for non-commercial use) as published at https://github.com/xceedsoftware/wpftoolkit/blob/master/license.md For more features, controls, and fast professional support, pick up the Plus Edition at https://xceed.com/xceed-toolkit-plus-for-wpf/ Stay informed: follow @datagrid on Twitter or Like http://facebook.com/datagrids ***********************************************************************************/ namespace Xceed.Wpf.Toolkit.Core.Input { public enum KeyModifier { None, Blocked, Ctrl, LeftCtrl, RightCtrl, Shift, LeftShift, RightShift, Alt, LeftAlt, RightAlt, Exact, } }
{ "pile_set_name": "Github" }
package: go.uber.org/ratelimit import: [] testImport: - package: github.com/stretchr/testify subpackages: - assert - package: github.com/uber-go/atomic
{ "pile_set_name": "Github" }
/* ---------------------------------------------------------------------------- * SAM Software Package License * ---------------------------------------------------------------------------- * Copyright (c) 2012, Atmel Corporation * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following condition is met: * * - Redistributions of source code must retain the above copyright notice, * this list of conditions and the disclaimer below. * * Atmel's name may not be used to endorse or promote products derived from * this software without specific prior written permission. * * DISCLAIMER: THIS SOFTWARE IS PROVIDED BY ATMEL "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT ARE * DISCLAIMED. IN NO EVENT SHALL ATMEL BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * ---------------------------------------------------------------------------- */ #include "sam3n.h" /* Initialize segments */ extern uint32_t _sfixed; extern uint32_t _efixed; extern uint32_t _etext; extern uint32_t _srelocate; extern uint32_t _erelocate; extern uint32_t _szero; extern uint32_t _ezero; extern uint32_t _sstack; extern uint32_t _estack; /** \cond DOXYGEN_SHOULD_SKIP_THIS */ int main(void); /** \endcond */ void __libc_init_array(void); /* Default empty handler */ void Dummy_Handler(void); /* Cortex-M3 core handlers */ void NMI_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void HardFault_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void MemManage_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void BusFault_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void UsageFault_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void SVC_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void DebugMon_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void PendSV_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void SysTick_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); /* Peripherals handlers */ void SUPC_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void RSTC_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void RTC_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void RTT_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void WDT_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void PMC_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void EFC_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void UART0_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void UART1_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void PIOA_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void PIOB_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); #ifdef _SAM3N_PIOC_INSTANCE_ void PIOC_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); #endif /* _SAM3N_PIOC_INSTANCE_ */ void USART0_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); #ifdef _SAM3N_USART1_INSTANCE_ void USART1_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); #endif /* _SAM3N_USART1_INSTANCE_ */ void TWI0_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void TWI1_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void SPI_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void TC0_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void TC1_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void TC2_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); #ifdef _SAM3N_TC1_INSTANCE_ void TC3_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void TC4_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void TC5_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); #endif /* _SAM3N_TC1_INSTANCE_ */ void ADC_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void DACC_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); void PWM_Handler ( void ) __attribute__ ((weak, alias("Dummy_Handler"))); /* Exception Table */ __attribute__ ((section(".vectors"))) const DeviceVectors exception_table = { /* Configure Initial Stack Pointer, using linker-generated symbols */ (void*) (&_estack), (void*) Reset_Handler, (void*) NMI_Handler, (void*) HardFault_Handler, (void*) MemManage_Handler, (void*) BusFault_Handler, (void*) UsageFault_Handler, (void*) (0UL), /* Reserved */ (void*) (0UL), /* Reserved */ (void*) (0UL), /* Reserved */ (void*) (0UL), /* Reserved */ (void*) SVC_Handler, (void*) DebugMon_Handler, (void*) (0UL), /* Reserved */ (void*) PendSV_Handler, (void*) SysTick_Handler, /* Configurable interrupts */ (void*) SUPC_Handler, /* 0 Supply Controller */ (void*) RSTC_Handler, /* 1 Reset Controller */ (void*) RTC_Handler, /* 2 Real Time Clock */ (void*) RTT_Handler, /* 3 Real Time Timer */ (void*) WDT_Handler, /* 4 Watchdog Timer */ (void*) PMC_Handler, /* 5 PMC */ (void*) EFC_Handler, /* 6 EEFC */ (void*) (0UL), /* 7 Reserved */ (void*) UART0_Handler, /* 8 UART0 */ (void*) UART1_Handler, /* 9 UART1 */ (void*) (0UL), /* 10 Reserved */ (void*) PIOA_Handler, /* 11 Parallel IO Controller A */ (void*) PIOB_Handler, /* 12 Parallel IO Controller B */ #ifdef _SAM3N_PIOC_INSTANCE_ (void*) PIOC_Handler, /* 13 Parallel IO Controller C */ #else (void*) (0UL), /* 13 Reserved */ #endif /* _SAM3N_PIOC_INSTANCE_ */ (void*) USART0_Handler, /* 14 USART 0 */ #ifdef _SAM3N_USART1_INSTANCE_ (void*) USART1_Handler, /* 15 USART 1 */ #else (void*) (0UL), /* 15 Reserved */ #endif /* _SAM3N_USART1_INSTANCE_ */ (void*) (0UL), /* 16 Reserved */ (void*) (0UL), /* 17 Reserved */ (void*) (0UL), /* 18 Reserved */ (void*) TWI0_Handler, /* 19 TWI 0 */ (void*) TWI1_Handler, /* 20 TWI 1 */ (void*) SPI_Handler, /* 21 SPI */ (void*) (0UL), /* 22 Reserved */ (void*) TC0_Handler, /* 23 Timer Counter 0 */ (void*) TC1_Handler, /* 24 Timer Counter 1 */ (void*) TC2_Handler, /* 25 Timer Counter 2 */ #ifdef _SAM3N_TC1_INSTANCE_ (void*) TC3_Handler, /* 26 Timer Counter 3 */ (void*) TC4_Handler, /* 27 Timer Counter 4 */ (void*) TC5_Handler, /* 28 Timer Counter 5 */ #else (void*) (0UL), /* 26 Reserved */ (void*) (0UL), /* 27 Reserved */ (void*) (0UL), /* 28 Reserved */ #endif /* _SAM3N_TC1_INSTANCE_ */ (void*) ADC_Handler, /* 29 ADC controller */ (void*) DACC_Handler, /* 30 DAC controller */ (void*) PWM_Handler /* 31 PWM */ }; /** * \brief This is the code that gets called on processor reset. * To initialize the device, and call the main() routine. */ void Reset_Handler(void) { uint32_t *pSrc, *pDest; /* Initialize the relocate segment */ pSrc = &_etext; pDest = &_srelocate; if (pSrc != pDest) { for (; pDest < &_erelocate;) { *pDest++ = *pSrc++; } } /* Clear the zero segment */ for (pDest = &_szero; pDest < &_ezero;) { *pDest++ = 0; } /* Set the vector table base address */ pSrc = (uint32_t *) & _sfixed; SCB->VTOR = ((uint32_t) pSrc & SCB_VTOR_TBLOFF_Msk); if (((uint32_t) pSrc >= IRAM_ADDR) && ((uint32_t) pSrc < IRAM_ADDR + IRAM_SIZE)) { SCB->VTOR |= (1UL) << SCB_VTOR_TBLBASE_Pos; } /* Initialize the C library */ __libc_init_array(); /* Branch to main function */ main(); /* Infinite loop */ while (1); } /** * \brief Default interrupt handler for unused IRQs. */ void Dummy_Handler(void) { while (1) { } }
{ "pile_set_name": "Github" }
#!/usr/bin/env bash # create a clj-1472 patched version of Clojure that overcomes locking issue so that we can compile GraalVM native-image set -eou pipefail # constants JIRA_ISSUE="CLJ-1472" status-line() { echo "" echo -e "\033[42m \033[30;46m $1 \033[42m \033[0m" } error-line() { echo "" echo -e "\033[30;43m*\033[41m error: $1 \033[43m*\033[0m" } trap 'error-line "Unexpected error at line $LINENO"' ERR check-cmd-prerequisites() { local is_error=false for cmd in git jet mvn clojure curl sed; do if ! [ -x "$(command -v ${cmd})" ]; then is_error=true >&2 echo "! ${cmd} not found" fi done set +e if ! git extras --version &>/dev/null; then is_error=true >&2 echo "! git-extras not found" fi set -e if [ "$is_error" = true ]; then >&2 error-line "prerequisites check failed" exit 1 fi } jira-patches() { set -eou pipefail local jira_issue="$1" curl -s -L "https://clojure.atlassian.net/rest/api/latest/issue/${jira_issue}" | jet --from json --keywordize | jet --query '[:fields :attachment]' | jet --query '(filter (re-find #jet/lit ".*\\.patch$" :filename))' | jet --query '(map (select-keys [:filename :content]))' } jira-patch-url-for-filename() { set -eou pipefail local jira_patches="$1" local jira_patch_filename="$2" echo "${jira_patches}" | jet --query "(first (filter (= :filename #jet/lit \"${jira_patch_filename}\")))" | jet --query ':content' | sed -e 's/^"//' -e 's/"$//' } jira-print-patch-filenames() { local jira_patches="$1" echo "${jira_patches}" | jet --query '(map :filename)' --pretty } patch-version-string() { # version string gets new qualifier that includes sha and a representation of patch file # Clojure build system uses ant which has some restrictions on versions, so we can't go # all willy nilly. set -eou pipefail local current_full_version="$1" local clojure_sha="$2" local patch_filename="$3" local version local snapshot local qualifier # regex created by looking at examples here: https://octopus.com/blog/maven-versioning-explained if [[ $current_full_version =~ ([.0-9]+)([.-]?[a-zA-Z][a-zA-Z0-9]*)?([.-]'SNAPSHOT')?$ ]]; then version="${BASH_REMATCH[1]}" qualifier="${BASH_REMATCH[2]}" snapshot="${BASH_REMATCH[3]}" fi local new_qualifier new_qualifier="patch_${clojure_sha}_$(basename "${patch_filename}" ".patch" | tr - _ | tr '[:upper:]' '[:lower:]')" if [ -n "${qualifier}" ]; then new_qualifier="${qualifier}_${new_qualifier}" else new_qualifier="-${new_qualifier}" fi echo "${version}${new_qualifier}${snapshot}" } maven() { # shellcheck disable=SC2068 mvn --batch-mode $@ } get-pom-version() { set -eou pipefail # shellcheck disable=SC2016 maven -q \ -Dexec.executable=echo \ -Dexec.args='${project.version}' \ --non-recursive \ exec:exec } set-pom-version() { maven versions:set -DnewVersion="$1" } set-pom-property() { local name=$1 local value=$2 maven versions:set-property -Dproperty="${name}" -DnewVersion="${value}" } get-pom-dep-version() { set -eou pipefail # must be an easier way to do this local group_id=$1 local artifact_id=$2 local temp_file;temp_file=$(mktemp -t "clj-patcher-dep-version.XXXXXXXXXX") maven dependency:list -DincludeArtifactIds="${artifact_id}" \ -DoutputFile="${temp_file}" -DexcludeTransitive=true -q local version;version=$(grep "${group_id}:${artifact_id}" "${temp_file}" | cut -d : -f 4) rm "${temp_file}" echo "${version}" } set-pom-dep-version() { local group_id=$1 local artifact_id=$2 local version=$3 maven versions:use-dep-version -Dincludes="${group_id}:${artifact_id}" \ -DdepVersion="${version}" -DforceVersion=true } mvn-clean-install() { rm -rf target && maven install -Dmaven.test.skip=true } usage() { echo "Usage: $(basename "$0") [options...]" echo "" echo " -h, --help" echo "" echo " -p, --patch-filename <filename>" echo " name of patch file to download from CLJ-1472" echo " defaults to the currently recommended clj-1472-5.patch" echo "" echo " -c, --clojure-commit <commit>" echo " choose clojure commit to patch, can be sha or tag" echo " specify HEAD for most recent commit" echo " defaults to \"clojure-10.0.1\" tag" echo "" echo " -w, --work-dir <dir name>" echo " temporary work directory" echo " defaults to system generated temp dir" echo " NOTE: for safety, this script will only delete what it creates under specified work dir" } # defaults for args ARG_HELP=false ARG_INVALID=false ARG_PATCH_FILENAME="clj-1472-5.patch" ARG_WORK_DIR_SET=false ARG_CLOJURE_COMMIT="clojure-1.10.1" while [[ $# -gt 0 ]] do ARG="$1" case $ARG in -h|--help) ARG_HELP=true shift ;; -p|--patch-filename) ARG_PATCH_FILENAME="$2" shift shift ;; -c|--clojure-commit) ARG_CLOJURE_COMMIT="$2" shift shift ;; -w|--work-dir) ARG_WORK_DIR="$2" ARG_WORK_DIR_SET=true shift shift ;; *) ARG_INVALID=true shift ;; esac done if [ ${ARG_HELP} == true ]; then usage exit 0 fi if [ ${ARG_INVALID} == true ]; then error-line "invalid usage" echo "" usage exit 1 fi if [ ${ARG_WORK_DIR_SET} == false ]; then # some versions of osx require -t? WORK_DIR=$(mktemp -d -t "clj-patcher.XXXXXXXXXX") else # add patch-work dir, I am comfortable creating and deleting patch-work # under provided dir but not provide work dir itself - too dangerous. WORK_DIR="${ARG_WORK_DIR}/patch-work" rm -rf "${WORK_DIR}" mkdir -p "${WORK_DIR}" # a fully qualified path will be turfable work regardless of current working dir WORK_DIR=$(cd "${WORK_DIR}";pwd) fi trap 'rm -rf ${WORK_DIR}' EXIT check-cmd-prerequisites JIRA_PATCHES=$(jira-patches "${JIRA_ISSUE}") URL_FOR_PATCH=$(jira-patch-url-for-filename "${JIRA_PATCHES}" "${ARG_PATCH_FILENAME}") if [ "${URL_FOR_PATCH}" == "nil" ]; then error-line "patch file \"${ARG_PATCH_FILENAME}\" not found in jira issue \"${JIRA_ISSUE}\"" echo "- patches found for ${JIRA_ISSUE}:" jira-print-patch-filenames "${JIRA_PATCHES}" exit 1 fi status-line "ok, let's do it" echo "Apply issue ${JIRA_ISSUE} patch ${ARG_PATCH_FILENAME}" echo "Temporary work dir: ${WORK_DIR}" echo "Patch url: ${URL_FOR_PATCH}" cd "${WORK_DIR}" # # clojure clone and patch # status-line "clojure - cloning" git clone https://github.com/clojure/clojure.git cd clojure status-line "clojure - resetting to: ${ARG_CLOJURE_COMMIT}" git reset --hard "${ARG_CLOJURE_COMMIT}" status-line "clojure - finding versions" CLOJURE_SHORT_SHA=$(git rev-parse --short=8 HEAD) CLOJURE_SPEC_VERSION=$(get-pom-dep-version "org.clojure" "spec.alpha") SPEC_ALPHA_COMMIT="spec.alpha-${CLOJURE_SPEC_VERSION}" echo "spec alpha commit: ${SPEC_ALPHA_COMMIT}" # The clojure build system uses ant which is fussy about what goes into a version # Converting dashes to underscores seems to do the trick # Also converting to lowercase to normalize a bit CLOJURE_VERSION="$(patch-version-string "$(get-pom-version)" "${CLOJURE_SHORT_SHA}" "${ARG_PATCH_FILENAME}")" echo "clojure patch version: ${CLOJURE_VERSION}" SPEC_VERSION="$(patch-version-string "${CLOJURE_SPEC_VERSION}" "${CLOJURE_SHORT_SHA}" "${ARG_PATCH_FILENAME}")" echo "spec patch version: ${SPEC_VERSION}" status-line "clojure - patching with: ${ARG_PATCH_FILENAME}" curl -L -O "${URL_FOR_PATCH}" git rebase-patch "${ARG_PATCH_FILENAME}" status-line "clojure - setting version: ${CLOJURE_VERSION}" set-pom-version "${CLOJURE_VERSION}" cd .. # # spec build # status-line "spec - cloning" git clone https://github.com/clojure/spec.alpha.git cd spec.alpha status-line "spec - resetting to: ${SPEC_ALPHA_COMMIT}" git reset --hard "${SPEC_ALPHA_COMMIT}" status-line "spec - setting version: ${SPEC_VERSION}" set-pom-version "${SPEC_VERSION}" status-line "spec - building" mvn-clean-install cd .. # # clojure build # status-line "clojure - building" cd clojure mvn-clean-install cd .. # # spec rebuild # status-line "spec - rebuilding with patched clojure" cd spec.alpha set-pom-property "clojure.version" "${CLOJURE_VERSION}" mvn-clean-install cd .. # # cloure rebuild # status-line "clojure - rebuilding with rebuilt spec" cd clojure set-pom-dep-version "org.clojure" "spec.alpha" "${SPEC_VERSION}" mvn-clean-install echo "" echo "Installed to local maven repo:" echo "- org.clojure/clojure ${CLOJURE_VERSION}" echo "- org.clojure/spec.alpha ${SPEC_VERSION}" status-line "done"
{ "pile_set_name": "Github" }
## ide-backend-common Common shared library for ide-backend ide-backend-server It can be compiled using [GHCJS](https://github.com/ghcjs/ghcjs). This comes in handy because it's also used by [stack-ide's API](https://github.com/commercialhaskell/stack-ide/tree/master/stack-ide-api). This allows us to easily talk to stack-ide from GHCJS!
{ "pile_set_name": "Github" }
/******************************************************************************* Intel PRO/10GbE Linux driver Copyright(c) 1999 - 2006 Intel Corporation. This program is free software; you can redistribute it and/or modify it under the terms and conditions of the GNU General Public License, version 2, as published by the Free Software Foundation. This program is distributed in the hope it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA. The full GNU General Public License is included in this distribution in the file called "COPYING". Contact Information: Linux NICS <[email protected]> e1000-devel Mailing List <[email protected]> Intel Corporation, 5200 N.E. Elam Young Parkway, Hillsboro, OR 97124-6497 *******************************************************************************/ #ifndef _IXGB_IDS_H_ #define _IXGB_IDS_H_ /********************************************************************** ** The Device and Vendor IDs for 10 Gigabit MACs **********************************************************************/ #define INTEL_VENDOR_ID 0x8086 #define INTEL_SUBVENDOR_ID 0x8086 #define IXGB_DEVICE_ID_82597EX 0x1048 #define IXGB_DEVICE_ID_82597EX_SR 0x1A48 #define IXGB_DEVICE_ID_82597EX_LR 0x1B48 #define IXGB_SUBDEVICE_ID_A11F 0xA11F #define IXGB_SUBDEVICE_ID_A01F 0xA01F #define IXGB_DEVICE_ID_82597EX_CX4 0x109E #define IXGB_SUBDEVICE_ID_A00C 0xA00C #define IXGB_SUBDEVICE_ID_A01C 0xA01C #endif /* #ifndef _IXGB_IDS_H_ */ /* End of File */
{ "pile_set_name": "Github" }
auto ManifestViewer::construct() -> void { setCollapsible(); setVisible(false); manifestLabel.setText("Manifest Viewer").setFont(Font().setBold()); manifestList.onChange([&] { eventChange(); }); manifestView.setEditable(false).setFont(Font().setFamily(Font::Mono)); } auto ManifestViewer::reload() -> void { manifestList.reset(); for(auto peripheral : higan::Node::enumerate<higan::Node::Peripheral>(emulator->root)) { if(!peripheral->manifest()) continue; //ignore peripherals with no manifest available ComboButtonItem item{&manifestList}; item.setAttribute<higan::Node::Peripheral>("node", peripheral); item.setText(peripheral->name()); } eventChange(); } auto ManifestViewer::unload() -> void { manifestList.reset(); eventChange(); } auto ManifestViewer::refresh() -> void { if(auto item = manifestList.selected()) { if(auto peripheral = item.attribute<higan::Node::Peripheral>("node")) { manifestView.setText(peripheral->manifest()); } } else { manifestView.setText(); } } auto ManifestViewer::eventChange() -> void { refresh(); } auto ManifestViewer::setVisible(bool visible) -> ManifestViewer& { if(visible) refresh(); VerticalLayout::setVisible(visible); return *this; }
{ "pile_set_name": "Github" }
/* * Copyright 2020 Confluent Inc. * * Licensed under the Confluent Community License (the "License"); you may not use * this file except in compliance with the License. You may obtain a copy of the * License at * * http://www.confluent.io/confluent-community-license * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package io.confluent.support.metrics.utils; import java.util.concurrent.ThreadLocalRandom; public final class Jitter { private Jitter() { throw new IllegalStateException("Utility class should not be instantiated"); } /** * Adds 1% to a value. If value is 0, returns 0. If value is negative, adds 1% of abs(value) to it * * @param value Number to add 1% to. Could be negative. * @return Value +1% of abs(value) */ public static long addOnePercentJitter(final long value) { if (value == 0 || value < 100) { return value; } return value + ThreadLocalRandom.current().nextInt((int) Math.abs(value) / 100); } }
{ "pile_set_name": "Github" }
/** * Copyright © 2010-2020 Nokia * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jsonschema2pojo.integration; import com.thoughtworks.qdox.JavaDocBuilder; import com.thoughtworks.qdox.model.JavaClass; import com.thoughtworks.qdox.model.JavaField; import com.thoughtworks.qdox.model.JavaMethod; import com.thoughtworks.qdox.model.Type; import org.jsonschema2pojo.integration.util.Jsonschema2PojoRule; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; import java.io.File; import java.io.IOException; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertThat; public class RequiredArrayIT extends RequiredIT { @ClassRule public static Jsonschema2PojoRule classSchemaRule = new Jsonschema2PojoRule(); private static JavaClass classWithRequired; @BeforeClass public static void generateClasses() throws IOException { classSchemaRule.generateAndCompile("/schema/required/requiredArray.json", "com.example"); File generatedJavaFile = classSchemaRule.generated("com/example/RequiredArray.java"); JavaDocBuilder javaDocBuilder = new JavaDocBuilder(); javaDocBuilder.addSource(generatedJavaFile); classWithRequired = javaDocBuilder.getClassByName("com.example.RequiredArray"); } @Test public void requiredAppearsInFieldJavadoc() { JavaField javaField = classWithRequired.getFieldByName("requiredProperty"); String javaDocComment = javaField.getComment(); assertThat(javaDocComment, containsString("(Required)")); } @Test public void requiredAppearsInGetterJavadoc() { JavaMethod javaMethod = classWithRequired.getMethodBySignature("getRequiredProperty", new Type[] {}); String javaDocComment = javaMethod.getComment(); assertThat(javaDocComment, containsString("(Required)")); } @Test public void requiredAppearsInSetterJavadoc() { JavaMethod javaMethod = classWithRequired.getMethodBySignature("setRequiredProperty", new Type[] { new Type("java.lang.String") }); String javaDocComment = javaMethod.getComment(); assertThat(javaDocComment, containsString("(Required)")); } @Test public void nonRequiredFiedHasNoRequiredText() { JavaField javaField = classWithRequired.getFieldByName("nonRequiredProperty"); String javaDocComment = javaField.getComment(); assertThat(javaDocComment, not(containsString("(Required)"))); } @Test public void notRequiredIsTheDefault() { JavaField javaField = classWithRequired.getFieldByName("defaultNotRequiredProperty"); String javaDocComment = javaField.getComment(); assertThat(javaDocComment, not(containsString("(Required)"))); } }
{ "pile_set_name": "Github" }
1 219 421 468 530 570 618 726 895 973 1044 1218 1257 1360 1427 1631 1786 2015 2170 2313 2355 2399 2453 2608 2665 2796 2799
{ "pile_set_name": "Github" }
<a name="module_symbol-tree"></a> ## symbol-tree **Author:** Joris van der Wel <[email protected]> * [symbol-tree](#module_symbol-tree) * [SymbolTree](#exp_module_symbol-tree--SymbolTree) ⏏ * [new SymbolTree([description])](#new_module_symbol-tree--SymbolTree_new) * [.initialize(object)](#module_symbol-tree--SymbolTree+initialize) ⇒ <code>Object</code> * [.hasChildren(object)](#module_symbol-tree--SymbolTree+hasChildren) ⇒ <code>Boolean</code> * [.firstChild(object)](#module_symbol-tree--SymbolTree+firstChild) ⇒ <code>Object</code> * [.lastChild(object)](#module_symbol-tree--SymbolTree+lastChild) ⇒ <code>Object</code> * [.previousSibling(object)](#module_symbol-tree--SymbolTree+previousSibling) ⇒ <code>Object</code> * [.nextSibling(object)](#module_symbol-tree--SymbolTree+nextSibling) ⇒ <code>Object</code> * [.parent(object)](#module_symbol-tree--SymbolTree+parent) ⇒ <code>Object</code> * [.lastInclusiveDescendant(object)](#module_symbol-tree--SymbolTree+lastInclusiveDescendant) ⇒ <code>Object</code> * [.preceding(object, [options])](#module_symbol-tree--SymbolTree+preceding) ⇒ <code>Object</code> * [.following(object, [options])](#module_symbol-tree--SymbolTree+following) ⇒ <code>Object</code> * [.childrenToArray(parent, [options])](#module_symbol-tree--SymbolTree+childrenToArray) ⇒ <code>Array.&lt;Object&gt;</code> * [.ancestorsToArray(object, [options])](#module_symbol-tree--SymbolTree+ancestorsToArray) ⇒ <code>Array.&lt;Object&gt;</code> * [.treeToArray(root, [options])](#module_symbol-tree--SymbolTree+treeToArray) ⇒ <code>Array.&lt;Object&gt;</code> * [.childrenIterator(parent, [options])](#module_symbol-tree--SymbolTree+childrenIterator) ⇒ <code>Object</code> * [.previousSiblingsIterator(object)](#module_symbol-tree--SymbolTree+previousSiblingsIterator) ⇒ <code>Object</code> * [.nextSiblingsIterator(object)](#module_symbol-tree--SymbolTree+nextSiblingsIterator) ⇒ <code>Object</code> * [.ancestorsIterator(object)](#module_symbol-tree--SymbolTree+ancestorsIterator) ⇒ <code>Object</code> * [.treeIterator(root, options)](#module_symbol-tree--SymbolTree+treeIterator) ⇒ <code>Object</code> * [.index(child)](#module_symbol-tree--SymbolTree+index) ⇒ <code>Number</code> * [.childrenCount(parent)](#module_symbol-tree--SymbolTree+childrenCount) ⇒ <code>Number</code> * [.compareTreePosition(left, right)](#module_symbol-tree--SymbolTree+compareTreePosition) ⇒ <code>Number</code> * [.remove(removeObject)](#module_symbol-tree--SymbolTree+remove) ⇒ <code>Object</code> * [.insertBefore(referenceObject, newObject)](#module_symbol-tree--SymbolTree+insertBefore) ⇒ <code>Object</code> * [.insertAfter(referenceObject, newObject)](#module_symbol-tree--SymbolTree+insertAfter) ⇒ <code>Object</code> * [.prependChild(referenceObject, newObject)](#module_symbol-tree--SymbolTree+prependChild) ⇒ <code>Object</code> * [.appendChild(referenceObject, newObject)](#module_symbol-tree--SymbolTree+appendChild) ⇒ <code>Object</code> <a name="exp_module_symbol-tree--SymbolTree"></a> ### SymbolTree ⏏ **Kind**: Exported class <a name="new_module_symbol-tree--SymbolTree_new"></a> #### new SymbolTree([description]) | Param | Default | Description | | --- | --- | --- | | [description] | <code>&#x27;SymbolTree data&#x27;</code> | Description used for the Symbol | <a name="module_symbol-tree--SymbolTree+initialize"></a> #### symbolTree.initialize(object) ⇒ <code>Object</code> You can optionally initialize an object after its creation, to take advantage of V8's fast properties. Also useful if you would like to freeze your object. `O(1)` **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> **Returns**: <code>Object</code> - object | Param | Type | | --- | --- | | object | <code>Object</code> | <a name="module_symbol-tree--SymbolTree+hasChildren"></a> #### symbolTree.hasChildren(object) ⇒ <code>Boolean</code> Returns `true` if the object has any children. Otherwise it returns `false`. `O(1)` **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> | Param | Type | | --- | --- | | object | <code>Object</code> | <a name="module_symbol-tree--SymbolTree+firstChild"></a> #### symbolTree.firstChild(object) ⇒ <code>Object</code> Return the first child of the given object. `O(1)` **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> | Param | Type | | --- | --- | | object | <code>Object</code> | <a name="module_symbol-tree--SymbolTree+lastChild"></a> #### symbolTree.lastChild(object) ⇒ <code>Object</code> Return the last child of the given object. `O(1)` **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> | Param | Type | | --- | --- | | object | <code>Object</code> | <a name="module_symbol-tree--SymbolTree+previousSibling"></a> #### symbolTree.previousSibling(object) ⇒ <code>Object</code> Return the previous sibling of the given object. `O(1)` **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> | Param | Type | | --- | --- | | object | <code>Object</code> | <a name="module_symbol-tree--SymbolTree+nextSibling"></a> #### symbolTree.nextSibling(object) ⇒ <code>Object</code> Return the nextSibling sibling of the given object. `O(1)` **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> | Param | Type | | --- | --- | | object | <code>Object</code> | <a name="module_symbol-tree--SymbolTree+parent"></a> #### symbolTree.parent(object) ⇒ <code>Object</code> Return the parent of the given object. `O(1)` **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> | Param | Type | | --- | --- | | object | <code>Object</code> | <a name="module_symbol-tree--SymbolTree+lastInclusiveDescendant"></a> #### symbolTree.lastInclusiveDescendant(object) ⇒ <code>Object</code> Find the inclusive descendant that is last in tree order of the given object. `O(n)` (worst case) where n is the depth of the subtree of `object` **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> | Param | Type | | --- | --- | | object | <code>Object</code> | <a name="module_symbol-tree--SymbolTree+preceding"></a> #### symbolTree.preceding(object, [options]) ⇒ <code>Object</code> Find the preceding object (A) of the given object (B). An object A is preceding an object B if A and B are in the same tree and A comes before B in tree order. `O(n)` (worst case) <br> `O(1)` (amortized when walking the entire tree) **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> | Param | Type | Description | | --- | --- | --- | | object | <code>Object</code> | | | [options] | <code>Object</code> | | | [options.root] | <code>Object</code> | If set, `root` must be an inclusive ancestor of the return value (or else null is returned). This check _assumes_ that `root` is also an inclusive ancestor of the given `node` | <a name="module_symbol-tree--SymbolTree+following"></a> #### symbolTree.following(object, [options]) ⇒ <code>Object</code> Find the following object (A) of the given object (B). An object A is following an object B if A and B are in the same tree and A comes after B in tree order. `O(n)` (worst case) where n is the amount of objects in the entire tree<br> `O(1)` (amortized when walking the entire tree) **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> | Param | Type | Default | Description | | --- | --- | --- | --- | | object | <code>Object</code> | | | | [options] | <code>Object</code> | | | | [options.root] | <code>Object</code> | | If set, `root` must be an inclusive ancestor of the return value (or else null is returned). This check _assumes_ that `root` is also an inclusive ancestor of the given `node` | | [options.skipChildren] | <code>Boolean</code> | <code>false</code> | If set, ignore the children of `object` | <a name="module_symbol-tree--SymbolTree+childrenToArray"></a> #### symbolTree.childrenToArray(parent, [options]) ⇒ <code>Array.&lt;Object&gt;</code> Append all children of the given object to an array. `O(n)` where n is the amount of children of the given `parent` **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> | Param | Type | Default | Description | | --- | --- | --- | --- | | parent | <code>Object</code> | | | | [options] | <code>Object</code> | | | | [options.array] | <code>Array.&lt;Object&gt;</code> | <code>[]</code> | | | [options.filter] | <code>function</code> | | Function to test each object before it is added to the array. Invoked with arguments (object). Should return `true` if an object is to be included. | | [options.thisArg] | <code>\*</code> | | Value to use as `this` when executing `filter`. | <a name="module_symbol-tree--SymbolTree+ancestorsToArray"></a> #### symbolTree.ancestorsToArray(object, [options]) ⇒ <code>Array.&lt;Object&gt;</code> Append all inclusive ancestors of the given object to an array. `O(n)` where n is the amount of ancestors of the given `object` **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> | Param | Type | Default | Description | | --- | --- | --- | --- | | object | <code>Object</code> | | | | [options] | <code>Object</code> | | | | [options.array] | <code>Array.&lt;Object&gt;</code> | <code>[]</code> | | | [options.filter] | <code>function</code> | | Function to test each object before it is added to the array. Invoked with arguments (object). Should return `true` if an object is to be included. | | [options.thisArg] | <code>\*</code> | | Value to use as `this` when executing `filter`. | <a name="module_symbol-tree--SymbolTree+treeToArray"></a> #### symbolTree.treeToArray(root, [options]) ⇒ <code>Array.&lt;Object&gt;</code> Append all descendants of the given object to an array (in tree order). `O(n)` where n is the amount of objects in the sub-tree of the given `object` **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> | Param | Type | Default | Description | | --- | --- | --- | --- | | root | <code>Object</code> | | | | [options] | <code>Object</code> | | | | [options.array] | <code>Array.&lt;Object&gt;</code> | <code>[]</code> | | | [options.filter] | <code>function</code> | | Function to test each object before it is added to the array. Invoked with arguments (object). Should return `true` if an object is to be included. | | [options.thisArg] | <code>\*</code> | | Value to use as `this` when executing `filter`. | <a name="module_symbol-tree--SymbolTree+childrenIterator"></a> #### symbolTree.childrenIterator(parent, [options]) ⇒ <code>Object</code> Iterate over all children of the given object `O(1)` for a single iteration **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> **Returns**: <code>Object</code> - An iterable iterator (ES6) | Param | Type | Default | | --- | --- | --- | | parent | <code>Object</code> | | | [options] | <code>Object</code> | | | [options.reverse] | <code>Boolean</code> | <code>false</code> | <a name="module_symbol-tree--SymbolTree+previousSiblingsIterator"></a> #### symbolTree.previousSiblingsIterator(object) ⇒ <code>Object</code> Iterate over all the previous siblings of the given object. (in reverse tree order) `O(1)` for a single iteration **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> **Returns**: <code>Object</code> - An iterable iterator (ES6) | Param | Type | | --- | --- | | object | <code>Object</code> | <a name="module_symbol-tree--SymbolTree+nextSiblingsIterator"></a> #### symbolTree.nextSiblingsIterator(object) ⇒ <code>Object</code> Iterate over all the next siblings of the given object. (in tree order) `O(1)` for a single iteration **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> **Returns**: <code>Object</code> - An iterable iterator (ES6) | Param | Type | | --- | --- | | object | <code>Object</code> | <a name="module_symbol-tree--SymbolTree+ancestorsIterator"></a> #### symbolTree.ancestorsIterator(object) ⇒ <code>Object</code> Iterate over all inclusive ancestors of the given object `O(1)` for a single iteration **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> **Returns**: <code>Object</code> - An iterable iterator (ES6) | Param | Type | | --- | --- | | object | <code>Object</code> | <a name="module_symbol-tree--SymbolTree+treeIterator"></a> #### symbolTree.treeIterator(root, options) ⇒ <code>Object</code> Iterate over all descendants of the given object (in tree order). where n is the amount of objects in the sub-tree of the given `root`: `O(n)` (worst case for a single iterator) `O(n)` (amortized, when completing the iterator)<br> **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> **Returns**: <code>Object</code> - An iterable iterator (ES6) | Param | Type | Default | | --- | --- | --- | | root | <code>Object</code> | | | options | <code>Object</code> | | | [options.reverse] | <code>Boolean</code> | <code>false</code> | <a name="module_symbol-tree--SymbolTree+index"></a> #### symbolTree.index(child) ⇒ <code>Number</code> Find the index of the given object (the number of preceding siblings). `O(n)` where n is the amount of preceding siblings<br> `O(1)` (amortized, if the tree is not modified) **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> **Returns**: <code>Number</code> - The number of preceding siblings, or -1 if the object has no parent | Param | Type | | --- | --- | | child | <code>Object</code> | <a name="module_symbol-tree--SymbolTree+childrenCount"></a> #### symbolTree.childrenCount(parent) ⇒ <code>Number</code> Calculate the number of children. `O(n)` where n is the amount of children<br> `O(1)` (amortized, if the tree is not modified) **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> | Param | Type | | --- | --- | | parent | <code>Object</code> | <a name="module_symbol-tree--SymbolTree+compareTreePosition"></a> #### symbolTree.compareTreePosition(left, right) ⇒ <code>Number</code> Compare the position of an object relative to another object. A bit set is returned: <ul> <li>DISCONNECTED : 1</li> <li>PRECEDING : 2</li> <li>FOLLOWING : 4</li> <li>CONTAINS : 8</li> <li>CONTAINED_BY : 16</li> </ul> The semantics are the same as compareDocumentPosition in DOM, with the exception that DISCONNECTED never occurs with any other bit. where n and m are the amount of ancestors of `left` and `right`; where o is the amount of children of the lowest common ancestor of `left` and `right`: `O(n + m + o)` (worst case) `O(n + m) (amortized, if the tree is not modified) **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> | Param | Type | | --- | --- | | left | <code>Object</code> | | right | <code>Object</code> | <a name="module_symbol-tree--SymbolTree+remove"></a> #### symbolTree.remove(removeObject) ⇒ <code>Object</code> Remove the object from this tree. Has no effect if already removed. `O(1)` **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> **Returns**: <code>Object</code> - removeObject | Param | Type | | --- | --- | | removeObject | <code>Object</code> | <a name="module_symbol-tree--SymbolTree+insertBefore"></a> #### symbolTree.insertBefore(referenceObject, newObject) ⇒ <code>Object</code> Insert the given object before the reference object. `newObject` is now the previous sibling of `referenceObject`. `O(1)` **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> **Returns**: <code>Object</code> - newObject **Throws**: - <code>Error</code> If the newObject is already present in this SymbolTree | Param | Type | | --- | --- | | referenceObject | <code>Object</code> | | newObject | <code>Object</code> | <a name="module_symbol-tree--SymbolTree+insertAfter"></a> #### symbolTree.insertAfter(referenceObject, newObject) ⇒ <code>Object</code> Insert the given object after the reference object. `newObject` is now the next sibling of `referenceObject`. `O(1)` **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> **Returns**: <code>Object</code> - newObject **Throws**: - <code>Error</code> If the newObject is already present in this SymbolTree | Param | Type | | --- | --- | | referenceObject | <code>Object</code> | | newObject | <code>Object</code> | <a name="module_symbol-tree--SymbolTree+prependChild"></a> #### symbolTree.prependChild(referenceObject, newObject) ⇒ <code>Object</code> Insert the given object as the first child of the given reference object. `newObject` is now the first child of `referenceObject`. `O(1)` **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> **Returns**: <code>Object</code> - newObject **Throws**: - <code>Error</code> If the newObject is already present in this SymbolTree | Param | Type | | --- | --- | | referenceObject | <code>Object</code> | | newObject | <code>Object</code> | <a name="module_symbol-tree--SymbolTree+appendChild"></a> #### symbolTree.appendChild(referenceObject, newObject) ⇒ <code>Object</code> Insert the given object as the last child of the given reference object. `newObject` is now the last child of `referenceObject`. `O(1)` **Kind**: instance method of <code>[SymbolTree](#exp_module_symbol-tree--SymbolTree)</code> **Returns**: <code>Object</code> - newObject **Throws**: - <code>Error</code> If the newObject is already present in this SymbolTree | Param | Type | | --- | --- | | referenceObject | <code>Object</code> | | newObject | <code>Object</code> |
{ "pile_set_name": "Github" }
// License: Apache 2.0. See LICENSE file in root directory. // Copyright(c) 2019 Intel Corporation. All Rights Reserved. #pragma once #include <string> #include <memory> #include <utility> #include <list> #include <opencv2/core/types.hpp> // for cv::Rect namespace openvino_helpers { /* Describe detected face */ class detected_object { size_t _id; // Some unique identifier that assigned to us std::string _label; // Any string assigned to us in the ctor cv::Rect _location; // In the color frame cv::Rect _depth_location; // In the depth frame float _intensity; // Some heuristic calculated on _location, allowing approximate "identity" based on pixel intensity float _depth; public: using ptr = std::shared_ptr< detected_object >; explicit detected_object( size_t id, std::string const & label, cv::Rect const & location, cv::Rect const & depth_location = cv::Rect {}, float intensity = 1, float depth = 0 ); virtual ~detected_object() {} // Update the location and intensity of the face void move( cv::Rect const & location, cv::Rect const & depth_location = cv::Rect {}, float intensity = 1, float depth = 0 ) { _location = location; _depth_location = depth_location; _intensity = intensity; _depth = depth; } cv::Rect const & get_location() const { return _location; } cv::Rect const & get_depth_location() const { return _depth_location; } float get_intensity() const { return _intensity; } float get_depth() const { return _depth; } size_t get_id() const { return _id; } std::string const & get_label () const { return _label; } }; typedef std::list< detected_object::ptr > detected_objects; // Returns a face found approximately in the given location, or null if none detected_object::ptr find_object( cv::Rect rect, detected_objects const & objects ); }
{ "pile_set_name": "Github" }
// Copyright 2016 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build linux,!appengine darwin package imports import "syscall" func direntInode(dirent *syscall.Dirent) uint64 { return uint64(dirent.Ino) }
{ "pile_set_name": "Github" }
// // ClientApplication.swift // MastodonKit // // Created by Ornithologist Coder on 4/17/17. // Copyright © 2017 MastodonKit. All rights reserved. // import Foundation public class ClientApplication: Codable { /// The application ID. public let id: String /// Where the user should be redirected after authorization. public let redirectURI: String /// The application client ID. public let clientID: String /// The application client secret. public let clientSecret: String private enum CodingKeys: String, CodingKey { case id case redirectURI = "redirect_uri" case clientID = "client_id" case clientSecret = "client_secret" } }
{ "pile_set_name": "Github" }
<script src="dist/js/tether.js"></script> <script src="docs/js/markAttachment.js"></script> <script src="docs/js/intro.js"></script> <link rel="stylesheet" href="docs/css/intro.css"></link> Tether ====== Tether is a JavaScript library for efficiently making an absolutely positioned element stay next to another element on the page. For example, you might want a tooltip or dialog to open, and remain, next to the relevant item on the page. Tether includes the ability to constrain the element within the viewport, its scroll parent, any other element on the page, or a fixed bounding box. When it exceeds those constraints it can be pinned to the edge, flip to the other side of its target, or hide itself. Tether optimizes its location placement to result in the minimum amount of 'jankyness' as the page is scrolled and resized. The page can maintain 60fps scrolling even with dozens or hundreds of tethers on screen (pop open the devtools timeline as you scroll this page). Tether is 5kb minified and gzipped, and supports IE9+, and all modern browsers. <h2 class="projects-header">Projects Built With Tether</h2> <p class="projects-paragraph"> <a href="http://github.hubspot.com/select/docs/welcome"><span>Select</span><img src="http://github.hubspot.com/os-icons/select-icon.png" /></a> <a href="http://github.hubspot.com/drop/docs/welcome"><span>Drop</span><img src="http://github.hubspot.com/os-icons/drop-icon.png" /></a> <a href="http://github.hubspot.com/tooltip/docs/welcome"><span>Tooltip</span><img src="http://github.hubspot.com/os-icons/tooltip-icon.png" /></a> <a href="http://github.hubspot.com/shepherd/docs/welcome"><span>Shepherd</span><img src="http://github.hubspot.com/os-icons/shepherd-icon.png" /></a> </p> Usage ----- The element to be moved is called the 'element'. The element in the page it's to be attached to is called the 'target'. To use Tether, you define a point on the target and a point on the element. Tether moves the element to keep those two points on top of each other. That point is called the attachment (we've marked it in the examples with a red <span class="attachment-mark"></span>). For example, if you'd like the element to sit on the left of the target: <pre class="pre-with-output"><code class="lang-javascript" data-example='usage'>new Tether({ element: yellowBox, target: greenBox, attachment: 'top right', targetAttachment: 'top left' }); </code></pre><output data-example='usage'></output> Attachment ---------- You can move the attachment points of both the element and the target. For example, lets move the element's attachment: <pre class="pre-with-output"><code class="lang-javascript" data-example>new Tether({ element: yellowBox, target: greenBox, attachment: <mark>'bottom left'</mark>, targetAttachment: 'top left' }); </code></pre><output></output> We can also change the target's attachment point: <pre class="pre-with-output"><code class="lang-javascript" data-example>new Tether({ element: yellowBox, target: greenBox, attachment: 'bottom left', targetAttachment: <mark>'bottom right'</mark> }); </code></pre><output></output> There are two more attachment points we haven't seen yet, center and middle: <pre class="pre-with-output"><code class="lang-javascript" data-example>new Tether({ element: yellowBox, target: greenBox, attachment: <mark>'middle center'</mark>, targetAttachment: <mark>'middle center'</mark> }); </code></pre><output></output> All told, Tether provides six built in attachment positions: - left - center - right - top - middle - bottom The syntax of the attachment properties is: `"vertical-attachment horizontal-attachment"`. You must always supply an `attachment`. If you don't supply a `target-attachment`, it is assumed to be the mirror image of `attachment`. ### Offset The six attachment points we provide are not always enough to place the element exactly where you want it. To correct this, we provide two more properties, `offset` and `targetOffset`. <pre class="pre-with-output"><code class="lang-javascript" data-example>new Tether({ element: yellowBox, target: greenBox, attachment: 'top right', targetAttachment: 'top left', <mark>offset: '0 10px'</mark> }); </code></pre><output></output> As you can see, we've moved the attachment point of the element 10px to the right. We can also move the attachment point of the target: <pre class="pre-with-output"><code class="lang-javascript" data-example>new Tether({ element: yellowBox, target: greenBox, attachment: 'top right', targetAttachment: 'top left', offset: '0 10px', <mark>targetOffset: '20px 0'</mark> }); </code></pre><output></output> The offset properties also accept percentages. Percentages in `offset` refer to the height and width of the element, `targetOffset` the height and width of the target. <pre class="pre-with-output"><code class="lang-javascript" data-example>new Tether({ element: yellowBox, target: greenBox, attachment: 'top right', targetAttachment: 'top left', targetOffset: <mark>'0 75%'</mark> }); </code></pre><output></output> The syntax of the offset properties is `"vertical-offset horizontal-offset"` Tether offers a couple of special attachments, using the `targetModifier` option: <pre class="pre-with-output"><code class="lang-javascript" data-example>new Tether({ element: yellowBox, target: scrollBox, attachment: 'middle right', targetAttachment: 'middle left', targetModifier: 'scroll-handle' }); </code></pre><output></output> Set the target to `document.body` to have the element follow the page's scroll bar. The `targetModifier` `visible` can be used to attach an element to the visible part of an element: <pre class="pre-with-output"><code class="lang-javascript" data-example>new Tether({ element: yellowBox, target: document.body, attachment: 'middle center', targetAttachment: 'middle center', <mark>targetModifier: 'visible'</mark> }); </code></pre><output deactivated></output> <pre class="pre-with-output"><code class="lang-javascript" data-example="scroll-visible">new Tether({ element: yellowBox, <mark>target: scrollBox</mark>, attachment: 'middle center', targetAttachment: 'middle center', targetModifier: 'visible' }); </code></pre><output class="no-green scroll-page" data-example="scroll-visible"></output> Constraints ----------- If you have tried any of the previous examples, you'll notice that it's pretty easy to scroll the regions in such a way that the element is hanging out on its own, with no target in sight. Constraints allow you to control what happens when the tethered element would have to fall outside of a defined region to maintain the attachment. <pre class="pre-with-output"><code class="lang-javascript" data-example>new Tether({ element: yellowBox, target: greenBox, attachment: 'middle left', targetAttachment: 'middle left', <mark>constraints</mark>: [ { to: 'scrollParent', pin: true } ] }); </code></pre><output></output> We've created a constraint which will keep the element within its scroll parent by 'pinning' it to the edges if it tries to escape. For the sake of the example, we're also highlighting the pinned edge in red. Specify an array of sides if you'd only like to pin those edges: <pre class="pre-with-output"><code class="lang-javascript" data-example>new Tether({ element: yellowBox, target: greenBox, attachment: 'middle left', targetAttachment: 'middle left', constraints: [ { to: 'scrollParent', pin: <mark>['top']</mark> } ] }); </code></pre><output></output> You might want to allow the element to change its attachment, if doing so would keep more of it within its assigned region: <pre class="pre-with-output"><code class="lang-javascript" data-example>new Tether({ element: yellowBox, target: greenBox, attachment: 'top left', targetAttachment: 'bottom left', constraints: [ { to: 'scrollParent', <mark>attachment: 'together'</mark> } ] }); </code></pre><output></output> If you scroll the example a bit, you'll see it flip the attachment when necessary. You can combine `pin` and `attachment` as well: <pre class="pre-with-output"><code class="lang-javascript" data-example>new Tether({ element: yellowBox, target: greenBox, attachment: 'top left', targetAttachment: 'bottom left', constraints: [ { to: 'scrollParent', attachment: 'together', <mark>pin: true</mark> } ] }); </code></pre><output></output> Attachment will accept any of these values: - `element`: Only change the element's attachment - `target`: Only change the target's attachment - `both`: Change either's attachment (or both), as needed - `together`: Change both the element's and target's attachment at the same time (to 'flip' the element to the other side of the attachment) - `none`: Don't allow changes to attachment (the default) Together is the option you will use most commonly: <pre class="pre-with-output"><code class="lang-javascript" data-example>new Tether({ element: yellowBox, target: greenBox, attachment: 'top right', targetAttachment: 'bottom left', constraints: [ { to: 'scrollParent', attachment: <mark>'together'</mark> } ] }); </code></pre><output></output> You can also provide different settings for the vertical and horizontal attachments: <pre class="pre-with-output"><code class="lang-javascript" data-example>new Tether({ element: yellowBox, target: greenBox, attachment: 'top left', targetAttachment: 'bottom left', constraints: [ { to: 'scrollParent', attachment: <mark>'together none'</mark> } ] }); </code></pre><output></output> Whenever the element is out of the constrained area, we add the `tether-out-of-bounds` class to it. If you add some CSS to make items with that class `display: none`, the tether will hide. <pre class="pre-with-output"><code class="lang-javascript" data-example="hide">new Tether({ element: yellowBox, target: greenBox, attachment: 'middle center', targetAttachment: 'middle center', constraints: [ { to: 'scrollParent' } ] }); </code></pre><output data-example="hide"></output> You can also constrain the element to the viewport, you'll have to scroll the page to see this one. <pre class="pre-with-output"><code class="lang-javascript" data-example="window">new Tether({ element: yellowBox, target: greenBox, attachment: 'top left', targetAttachment: 'bottom left', constraints: [ { to: <mark>'window'</mark>, attachment: 'together' } ] }); </code></pre><output data-example="window" class="scroll-page"></output> You can, of course, use pin with the window as well to make it always visible no matter where the user scrolls: <pre class="pre-with-output"><code class="lang-javascript" data-example>new Tether({ element: yellowBox, target: greenBox, attachment: 'top left', targetAttachment: 'bottom left', constraints: [ { to: 'window', attachment: 'together', <mark>pin: true</mark> } ] }); </code></pre><output deactivated class="scroll-page visible-enabled"></output> `to` can be any of: - `'scrollParent'` - `'window'` - any DOM element - an array of bound points relative to the body `[X1, Y1, X2, Y2]` You can also provide multiple constraints, keeping in mind that they are processed in the order supplied (the last one always has the final word). <pre class="pre-with-output"><code class="lang-javascript" data-example>new Tether({ element: yellowBox, target: greenBox, attachment: 'top left', targetAttachment: 'bottom left', constraints: [ { to: <mark>'scrollParent'</mark>, pin: true }, { to: <mark>'window'</mark>, attachment: 'together' } ] }); </code></pre><output></output> Optimization ------------ ### Element Moving The goal of Tether's optimizer is to not have to change the positioning CSS as the page is scrolled or resized. To accomplish this it looks at the last few positions, finds commonalities, and uses them to decide whether to position the element absolutely or with fixed positioning. If the element is fully contained within its scroll parent, its DOM node can also be moved inside the scroll parent, to avoid repaints as the container is scrolled. <pre class="pre-with-output"><code class="lang-javascript" data-example="optimizer">new Tether({ element: yellowBox, target: greenBox, attachment: 'top left', targetAttachment: 'bottom left' }); </code></pre><output data-example="optimizer"></output> We are moving where the DOM node is, so if you have CSS which styles elements within the offset parent, you may see some rendering changes. Also note that this optimization works best if the scroll parent is the offset parent. In other words, **the scroll parent should be made position relative, fixed or absolute to enable this optimization.** If you do see stylistic changes occur when the element is moved, you might want to disable this optimization. You can do that by setting `optimizations.moveElement` to false. <pre class="pre-with-output"><code class="lang-javascript" data-example="optimizer2">new Tether({ element: yellowBox, target: greenBox, attachment: 'top left', targetAttachment: 'bottom left', optimizations: { <mark>moveElement: false</mark> } }); </code></pre><output data-example="optimizer2"></output> ### GPU By default tether positions elements using CSS transforms. These transforms allow the tethered element to be moved as its own layer to not force a repaint of the underlying page. This method of positioning can cause some issues however, including color shifts and artifacts. If you experience these issues, you can disable this optimization by setting `optimizations.gpu` to false: <pre class="pre-with-output"><code class="lang-javascript" data-example>new Tether({ element: yellowBox, target: greenBox, attachment: 'top left', optimizations: { <mark>gpu: false</mark> } }); </code></pre><output></output> Methods ------- The `Tether` constructor we've been using in these examples returns us a `Tether` object. The `Tether` object has these methods: - `setOptions({ options })` - Update any of the options (such as attachment) - `disable()` - Disable the tethering - `enable()` - Enable the tethering - `destroy()` - Disable and remove all references - `position()` - Manually trigger a repositioning Options ------- The full list of options which can be passed to the `Tether` constructor and `setOptions`: - `element`: The DOM element, jQuery element, or a selector string of an element which will be moved - `target`: The DOM element, jQuery element, or a selector string of an element which the `element` will be attached to - `attachment`: A string of the form `'vert-attachment horiz-attachment'` - `vert-attachment` can be any of `'top'`, `'middle'`, `'bottom'` - `horiz-attachment` can be any of `'left'`, `'center'`, `'right'` - `targetAttachment`: A string similar to `attachment`. The one difference is that, if it's not provided, targetAttachment will assume the mirror image of `attachment`. - `offset`: A string of the form `'vert-offset horiz-offset'` - `vert-offset` and `horiz-offset` can be of the form `"20px"` or `"55%"` - `targetOffset`: A string similar to `offset`, but refering to the offset of the target - `targetModifier`: Can be set to `'visible'` or `'scroll-handle'` - `enabled`: Should the tether be enabled initially? Defaults to `true`. - `classes`: A hash of classes which should be changed or disabled - `classPrefix`: The prefix placed at the beginning of the default classes, defaults to `'tether'` - `optimizations`: A hash of optimizations, used to disable them - `constraints`: An array of constraint definition objects. Each definition is of the form: - `to`: A DOM element, bounding box, the string `'window'`, or the string `'scrollParent'` - `pin`: `true` or an array of strings representing the sides of the constraint - `attachment`: A string of the form `"vert-modifier horiz-modifier"`, or a single value representing both - Each modifier should be one of `"none"`, `"together"`, `"element"`, `"target"`, or `"both"`. - `outOfBoundsClass`: An alternative to `"tether-out-of-bounds"`, useful if the class needs to be differentiated from that of another constraint. - `pinnedClass`: An alternative to `"tether-pinned"`, similar to `outOfBoundsClass`. Classes ------- Tether adds a variety of classes to the element and target to allow you to style them based on their tethering. You can change the prefix of the classes with the `classPrefix` option. It is `'tether'` by default, but you could, for example, change it to be `'bill'` if you were building the bill library and all the classes would be `'bill-*'`. ```javascript new Tether({ classPrefix: 'bill' }); ``` The sass/css is similarily configurable, see [tooltip](https://github.com/HubSpot/tooltip/blob/master/sass/tooltip-theme-arrows.sass#L14) for an example of how to make your own prefixed css file. All classes can be changed or disabled with the `classes` option. For example, to change the `tether-element` class to be `my-box`: ```javascript new Tether({ classes: { element: 'my-box' } }); ``` You can also disable classes you're not going to use: ```javascript new Tether({ classes: { out-of-bounds: false } }); ``` - `tether-element` is added to the element - `tether-target` is added to the target - `tether-enabled` is added to both elements when tether is not disabled - `tether-element-attached-[left,right,top,bottom,middle,center]` is added to both elements based on the elements attachment, if the element becomes detached (for example, if it's pinned), that class is removed. The class reflects how the element is actually attached, so if a constraint changes the attachment, that change will be reflected in the class. - `tether-target-attached-[left,right,top,bottom,middle,center]` is added to both elements based on the target's attachment. All of the characteristics are the same as for element-attached. ### Constraint-related Classes - `tether-out-of-bounds`, `tether-out-of-bounds-[side]` are added to both the element and the target when the element is placed outside of its constraint. - `tether-pinned`, `tether-pinned-[side]` are added to both the element and target when a constraint has pinned the element to the [side] of the container. Browser Support --------------- Tether supports IE9+, and all modern browsers. Google doesn't support IE8, Microsoft is dropping support in a few months, and not supporting it saves us a whole lot of trouble. If you are interested in adding support, get in touch, we're happy to accept a PR. Contributing ------------ Please contribute! Tether is developed in Coffeescript, but if that's problematic for you, feel free to submit pull requests which just change the JavaScript files, we can adapt them as needed. To build Tether, you need: - Node.js #### Instructions - Install the build tool ```bash npm install -g gulp ``` - Install the project ```bash # In the project directory npm install ``` - Build / Watch ```bash gulp ```
{ "pile_set_name": "Github" }
// // Generated by class-dump 3.5 (64 bit) (Debug version compiled Oct 15 2018 10:31:50). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by Steve Nygard. // #import <Vision/VNEspressoModelClassifier.h> __attribute__((visibility("hidden"))) @interface VNSmartCamClassifier : VNEspressoModelClassifier { } + (void)initDumpDebugIntermediates:(id *)arg1 debugInfo:(id *)arg2; + (id)createObservationWithDescriptors:(id)arg1 forRequestRevision:(unsigned long long)arg2; + (id)returnAllResultsOptionKey; + (Class)espressoModelImageprintClass; + (id)classifierResourceTypesToNamesForRevision:(unsigned long long)arg1; + (shared_ptr_b26ea6de)createDescriprorProcessorWithModelPath:(const char *)arg1 nBatch:(int)arg2 computePlatform:(int)arg3 computePath:(int)arg4 options:(struct Options)arg5; + (shared_ptr_047f28ed)createClassifierWithDescriptor:(shared_ptr_b26ea6de)arg1 classifierAbsolutePath:(const char *)arg2 computePlatform:(int)arg3 computePath:(int)arg4 labelsFilename:(const char *)arg5 options:(struct Options)arg6; - (id)processWithOptions:(id)arg1 regionOfInterest:(struct CGRect)arg2 warningRecorder:(id)arg3 error:(id *)arg4; - (BOOL)completeInitializationAndReturnError:(id *)arg1; @end
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <item xmlns="http://www.supermemo.net/2006/smux"> <lesson-title>GRE词汇:《再要你命三千》</lesson-title> <chapter-title>List 8</chapter-title> <question-title>根据单词,回想词义</question-title> <question><span style="font-family: Comic Sans MS; font-weight: normal; color: #550000; font-style: normal">dolorous</span><br /> <span style="font-family: 微软雅黑; font-weight: normal; color: #000000; font-style: normal"><span style="color: #550000;">['dɒl(ə)rəs]</span></span><br /> </question> <answer>【考法 1】 adj. 忧伤的: causing, marked by, or expressing misery or grief<br /> 【例】 He lifted a pair of sapphire['sæfaiə]蓝色的, dolorous eyes. 他抬起了一双忧郁的蓝宝石般的眼睛<br /> 【近】 agonized, bemoaning, bewailing, deplorable, grievous ,miserable, plaintive, rueful, sorrowful, woeful<br /> 【反】 happy, joyful, jovial, jubilant 高兴的<br /> <br /> 【记】 来自dole;condole 同情 是 con 同 + 悲伤,同悲,同情;dollar令人悲哀<br /> </answer> <modified>2015-07-30</modified> <template-id>10008</template-id> <question-audio>true</question-audio> </item>
{ "pile_set_name": "Github" }
namespace Surging.Core.SwaggerGen { public interface ISchemaRegistryFactory { ISchemaRegistry Create(); } }
{ "pile_set_name": "Github" }
#!/bin/bash # This example script activates an interface based on the specified # configuration. # # In the interest of keeping the KVP daemon code free of distro specific # information; the kvp daemon code invokes this external script to configure # the interface. # # The only argument to this script is the configuration file that is to # be used to configure the interface. # # Each Distro is expected to implement this script in a distro specific # fashion. For instance on Distros that ship with Network Manager enabled, # this script can be based on the Network Manager APIs for configuring the # interface. # # This example script is based on a RHEL environment. # # Here is the format of the ip configuration file: # # HWADDR=macaddr # DEVICE=interface name # BOOTPROTO=<protocol> (where <protocol> is "dhcp" if DHCP is configured # or "none" if no boot-time protocol should be used) # # IPADDR0=ipaddr1 # IPADDR1=ipaddr2 # IPADDRx=ipaddry (where y = x + 1) # # NETMASK0=netmask1 # NETMASKx=netmasky (where y = x + 1) # # GATEWAY=ipaddr1 # GATEWAYx=ipaddry (where y = x + 1) # # DNSx=ipaddrx (where first DNS address is tagged as DNS1 etc) # # IPV6 addresses will be tagged as IPV6ADDR, IPV6 gateway will be # tagged as IPV6_DEFAULTGW and IPV6 NETMASK will be tagged as # IPV6NETMASK. # # The host can specify multiple ipv4 and ipv6 addresses to be # configured for the interface. Furthermore, the configuration # needs to be persistent. A subsequent GET call on the interface # is expected to return the configuration that is set via the SET # call. # echo "IPV6INIT=yes" >> $1 echo "NM_CONTROLLED=no" >> $1 echo "PEERDNS=yes" >> $1 echo "ONBOOT=yes" >> $1 cp $1 /etc/sysconfig/network-scripts/ interface=$(echo $1 | awk -F - '{ print $2 }') /sbin/ifdown $interface 2>/dev/null /sbin/ifup $interface 2>/dev/null
{ "pile_set_name": "Github" }
if (!this.uuid) { // node.js uuid = require('../uuid'); } // // x-platform log/assert shims // function _log(msg, type) { type = type || 'log'; if (typeof(document) != 'undefined') { document.write('<div class="' + type + '">' + msg.replace(/\n/g, '<br />') + '</div>'); } if (typeof(console) != 'undefined') { var color = { log: '\033[39m', warn: '\033[33m', error: '\033[31m' }; console[type](color[type] + msg + color.log); } } function log(msg) {_log(msg, 'log');} function warn(msg) {_log(msg, 'warn');} function error(msg) {_log(msg, 'error');} function assert(res, msg) { if (!res) { error('FAIL: ' + msg); } else { log('Pass: ' + msg); } } // // Unit tests // // Verify ordering of v1 ids created with explicit times var TIME = 1321644961388; // 2011-11-18 11:36:01.388-08:00 function compare(name, ids) { ids = ids.map(function(id) { return id.split('-').reverse().join('-'); }).sort(); var sorted = ([].concat(ids)).sort(); assert(sorted.toString() == ids.toString(), name + ' have expected order'); } // Verify ordering of v1 ids created using default behavior compare('uuids with current time', [ uuid.v1(), uuid.v1(), uuid.v1(), uuid.v1(), uuid.v1() ]); // Verify ordering of v1 ids created with explicit times compare('uuids with time option', [ uuid.v1({msecs: TIME - 10*3600*1000}), uuid.v1({msecs: TIME - 1}), uuid.v1({msecs: TIME}), uuid.v1({msecs: TIME + 1}), uuid.v1({msecs: TIME + 28*24*3600*1000}) ]); assert( uuid.v1({msecs: TIME}) != uuid.v1({msecs: TIME}), 'IDs created at same msec are different' ); // Verify throw if too many ids created var thrown = false; try { uuid.v1({msecs: TIME, nsecs: 10000}); } catch (e) { thrown = true; } assert(thrown, 'Exception thrown when > 10K ids created in 1 ms'); // Verify clock regression bumps clockseq var uidt = uuid.v1({msecs: TIME}); var uidtb = uuid.v1({msecs: TIME - 1}); assert( parseInt(uidtb.split('-')[3], 16) - parseInt(uidt.split('-')[3], 16) === 1, 'Clock regression by msec increments the clockseq' ); // Verify clock regression bumps clockseq var uidtn = uuid.v1({msecs: TIME, nsecs: 10}); var uidtnb = uuid.v1({msecs: TIME, nsecs: 9}); assert( parseInt(uidtnb.split('-')[3], 16) - parseInt(uidtn.split('-')[3], 16) === 1, 'Clock regression by nsec increments the clockseq' ); // Verify explicit options produce expected id var id = uuid.v1({ msecs: 1321651533573, nsecs: 5432, clockseq: 0x385c, node: [ 0x61, 0xcd, 0x3c, 0xbb, 0x32, 0x10 ] }); assert(id == 'd9428888-122b-11e1-b85c-61cd3cbb3210', 'Explicit options produce expected id'); // Verify adjacent ids across a msec boundary are 1 time unit apart var u0 = uuid.v1({msecs: TIME, nsecs: 9999}); var u1 = uuid.v1({msecs: TIME + 1, nsecs: 0}); var before = u0.split('-')[0], after = u1.split('-')[0]; var dt = parseInt(after, 16) - parseInt(before, 16); assert(dt === 1, 'Ids spanning 1ms boundary are 100ns apart'); // // Test parse/unparse // id = '00112233445566778899aabbccddeeff'; assert(uuid.unparse(uuid.parse(id.substr(0,10))) == '00112233-4400-0000-0000-000000000000', 'Short parse'); assert(uuid.unparse(uuid.parse('(this is the uuid -> ' + id + id)) == '00112233-4455-6677-8899-aabbccddeeff', 'Dirty parse'); // // Perf tests // var generators = { v1: uuid.v1, v4: uuid.v4 }; var UUID_FORMAT = { v1: /[0-9a-f]{8}-[0-9a-f]{4}-1[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}/i, v4: /[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}/i }; var N = 1e4; // Get %'age an actual value differs from the ideal value function divergence(actual, ideal) { return Math.round(100*100*(actual - ideal)/ideal)/100; } function rate(msg, t) { log(msg + ': ' + (N / (Date.now() - t) * 1e3 | 0) + ' uuids\/second'); } for (var version in generators) { var counts = {}, max = 0; var generator = generators[version]; var format = UUID_FORMAT[version]; log('\nSanity check ' + N + ' ' + version + ' uuids'); for (var i = 0, ok = 0; i < N; i++) { id = generator(); if (!format.test(id)) { throw Error(id + ' is not a valid UUID string'); } if (id != uuid.unparse(uuid.parse(id))) { assert(fail, id + ' is not a valid id'); } // Count digits for our randomness check if (version == 'v4') { var digits = id.replace(/-/g, '').split(''); for (var j = digits.length-1; j >= 0; j--) { var c = digits[j]; max = Math.max(max, counts[c] = (counts[c] || 0) + 1); } } } // Check randomness for v4 UUIDs if (version == 'v4') { // Limit that we get worried about randomness. (Purely empirical choice, this!) var limit = 2*100*Math.sqrt(1/N); log('\nChecking v4 randomness. Distribution of Hex Digits (% deviation from ideal)'); for (var i = 0; i < 16; i++) { var c = i.toString(16); var bar = '', n = counts[c], p = Math.round(n/max*100|0); // 1-3,5-8, and D-F: 1:16 odds over 30 digits var ideal = N*30/16; if (i == 4) { // 4: 1:1 odds on 1 digit, plus 1:16 odds on 30 digits ideal = N*(1 + 30/16); } else if (i >= 8 && i <= 11) { // 8-B: 1:4 odds on 1 digit, plus 1:16 odds on 30 digits ideal = N*(1/4 + 30/16); } else { // Otherwise: 1:16 odds on 30 digits ideal = N*30/16; } var d = divergence(n, ideal); // Draw bar using UTF squares (just for grins) var s = n/max*50 | 0; while (s--) bar += '='; assert(Math.abs(d) < limit, c + ' |' + bar + '| ' + counts[c] + ' (' + d + '% < ' + limit + '%)'); } } } // Perf tests for (var version in generators) { log('\nPerformance testing ' + version + ' UUIDs'); var generator = generators[version]; var buf = new uuid.BufferClass(16); for (var i = 0, t = Date.now(); i < N; i++) generator(); rate('uuid.' + version + '()', t); for (var i = 0, t = Date.now(); i < N; i++) generator('binary'); rate('uuid.' + version + '(\'binary\')', t); for (var i = 0, t = Date.now(); i < N; i++) generator('binary', buf); rate('uuid.' + version + '(\'binary\', buffer)', t); }
{ "pile_set_name": "Github" }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.eagle.topology.entity; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import org.apache.eagle.log.entity.meta.*; import org.apache.eagle.topology.TopologyConstants; @JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL) @Table("hadoop_topology") @ColumnFamily("f") @Prefix("hdfsservicestatus") @Service(TopologyConstants.HDFS_INSTANCE_SERVICE_NAME) @TimeSeries(false) @Tags( {TopologyConstants.SITE_TAG, TopologyConstants.HOSTNAME_TAG, TopologyConstants.RACK_TAG, TopologyConstants.ROLE_TAG}) public class HdfsServiceTopologyAPIEntity extends TopologyBaseAPIEntity { @Column("a") private String status; @Column("b") private String configuredCapacityTB; @Column("c") private String usedCapacityTB; @Column("d") private String numBlocks; @Column("e") private String numFailedVolumes; @Column("f") private long writtenTxidDiff; @Column("g") private long lastUpdateTime; @Column("h") private String version; public long getLastUpdateTime() { return lastUpdateTime; } public void setLastUpdateTime(long lastUpdateTime) { this.lastUpdateTime = lastUpdateTime; valueChanged("lastUpdateTime"); } public String getNumFailedVolumes() { return numFailedVolumes; } public void setNumFailedVolumes(String numFailedVolumes) { this.numFailedVolumes = numFailedVolumes; valueChanged("numFailedVolumes"); } public String getNumBlocks() { return numBlocks; } public void setNumBlocks(String numBlocks) { this.numBlocks = numBlocks; valueChanged("numBlocks"); } public String getStatus() { return status; } public void setStatus(String status) { this.status = status; valueChanged("status"); } public String getConfiguredCapacityTB() { return configuredCapacityTB; } public void setConfiguredCapacityTB(String configuredCapacityTB) { this.configuredCapacityTB = configuredCapacityTB; valueChanged("configuredCapacityTB"); } public String getUsedCapacityTB() { return usedCapacityTB; } public void setUsedCapacityTB(String usedCapacityTB) { this.usedCapacityTB = usedCapacityTB; valueChanged("usedCapacityTB"); } public long getWrittenTxidDiff() { return writtenTxidDiff; } public void setWrittenTxidDiff(long writtenTxidDiff) { this.writtenTxidDiff = writtenTxidDiff; valueChanged("writtenTxidDiff"); } public String getVersion() { return version; } public void setVersion(String version) { this.version = version; valueChanged("version"); } }
{ "pile_set_name": "Github" }
jest.mock('../content-preview', () => 'ContentPreview'); import React from 'react'; import renderer from 'react-test-renderer'; import {Map, fromJS} from 'immutable'; import ContentPreviewContainer from '../index'; import {createStore, combineReducers} from 'redux'; import {Provider} from 'react-redux'; import {createFormReducer} from '../../../reducer'; import {NAME} from '../../../constants'; const schema = Map({type: 'string', title: 'Name'}); const formName = 'test-form'; const loadedState = { [NAME]: fromJS({ forms: { [formName]: { schema: { type: 'object', properties: { name: schema } }, data: { name: 'Andrew' } } } }) }; const reducer = combineReducers({[NAME]: createFormReducer()}); const store = createStore(reducer, loadedState); describe('ScalarFieldContainer', () => { it('renders', () => { const props = { schema, formName, path: 'name' }; const tree = renderer .create( <Provider store={store}> <ContentPreviewContainer {...props} /> </Provider> ) .toJSON(); expect(tree).toMatchSnapshot(); }); });
{ "pile_set_name": "Github" }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Windows.Controls.Primitives; using System.Windows; namespace ICSharpCode.XamlDesigner { public class EnumButton : ToggleButton { static EnumButton() { DefaultStyleKeyProperty.OverrideMetadata(typeof(EnumButton), new FrameworkPropertyMetadata(typeof(EnumButton))); } public static readonly DependencyProperty ValueProperty = DependencyProperty.Register("Value", typeof(object), typeof(EnumButton)); public object Value { get { return (object)GetValue(ValueProperty); } set { SetValue(ValueProperty, value); } } } }
{ "pile_set_name": "Github" }
{ "word": "Unhealthful", "definitions": [ "Harmful to health." ], "parts-of-speech": "Adjective" }
{ "pile_set_name": "Github" }
/******************************************************************************* Header File to describe the DMA descriptors and related definitions. This is for DWMAC100 and 1000 cores. This program is free software; you can redistribute it and/or modify it under the terms and conditions of the GNU General Public License, version 2, as published by the Free Software Foundation. This program is distributed in the hope it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. The full GNU General Public License is included in this distribution in the file called "COPYING". Author: Giuseppe Cavallaro <[email protected]> *******************************************************************************/ #ifndef __DESCS_H__ #define __DESCS_H__ #include <linux/bitops.h> /* Normal receive descriptor defines */ /* RDES0 */ #define RDES0_PAYLOAD_CSUM_ERR BIT(0) #define RDES0_CRC_ERROR BIT(1) #define RDES0_DRIBBLING BIT(2) #define RDES0_MII_ERROR BIT(3) #define RDES0_RECEIVE_WATCHDOG BIT(4) #define RDES0_FRAME_TYPE BIT(5) #define RDES0_COLLISION BIT(6) #define RDES0_IPC_CSUM_ERROR BIT(7) #define RDES0_LAST_DESCRIPTOR BIT(8) #define RDES0_FIRST_DESCRIPTOR BIT(9) #define RDES0_VLAN_TAG BIT(10) #define RDES0_OVERFLOW_ERROR BIT(11) #define RDES0_LENGTH_ERROR BIT(12) #define RDES0_SA_FILTER_FAIL BIT(13) #define RDES0_DESCRIPTOR_ERROR BIT(14) #define RDES0_ERROR_SUMMARY BIT(15) #define RDES0_FRAME_LEN_MASK GENMASK(29, 16) #define RDES0_FRAME_LEN_SHIFT 16 #define RDES0_DA_FILTER_FAIL BIT(30) #define RDES0_OWN BIT(31) /* RDES1 */ #define RDES1_BUFFER1_SIZE_MASK GENMASK(10, 0) #define RDES1_BUFFER2_SIZE_MASK GENMASK(21, 11) #define RDES1_BUFFER2_SIZE_SHIFT 11 #define RDES1_SECOND_ADDRESS_CHAINED BIT(24) #define RDES1_END_RING BIT(25) #define RDES1_DISABLE_IC BIT(31) /* Enhanced receive descriptor defines */ /* RDES0 (similar to normal RDES) */ #define ERDES0_RX_MAC_ADDR BIT(0) /* RDES1: completely differ from normal desc definitions */ #define ERDES1_BUFFER1_SIZE_MASK GENMASK(12, 0) #define ERDES1_SECOND_ADDRESS_CHAINED BIT(14) #define ERDES1_END_RING BIT(15) #define ERDES1_BUFFER2_SIZE_MASK GENMASK(28, 16) #define ERDES1_BUFFER2_SIZE_SHIFT 16 #define ERDES1_DISABLE_IC BIT(31) /* Normal transmit descriptor defines */ /* TDES0 */ #define TDES0_DEFERRED BIT(0) #define TDES0_UNDERFLOW_ERROR BIT(1) #define TDES0_EXCESSIVE_DEFERRAL BIT(2) #define TDES0_COLLISION_COUNT_MASK GENMASK(6, 3) #define TDES0_VLAN_FRAME BIT(7) #define TDES0_EXCESSIVE_COLLISIONS BIT(8) #define TDES0_LATE_COLLISION BIT(9) #define TDES0_NO_CARRIER BIT(10) #define TDES0_LOSS_CARRIER BIT(11) #define TDES0_PAYLOAD_ERROR BIT(12) #define TDES0_FRAME_FLUSHED BIT(13) #define TDES0_JABBER_TIMEOUT BIT(14) #define TDES0_ERROR_SUMMARY BIT(15) #define TDES0_IP_HEADER_ERROR BIT(16) #define TDES0_TIME_STAMP_STATUS BIT(17) #define TDES0_OWN ((u32)BIT(31)) /* silence sparse */ /* TDES1 */ #define TDES1_BUFFER1_SIZE_MASK GENMASK(10, 0) #define TDES1_BUFFER2_SIZE_MASK GENMASK(21, 11) #define TDES1_BUFFER2_SIZE_SHIFT 11 #define TDES1_TIME_STAMP_ENABLE BIT(22) #define TDES1_DISABLE_PADDING BIT(23) #define TDES1_SECOND_ADDRESS_CHAINED BIT(24) #define TDES1_END_RING BIT(25) #define TDES1_CRC_DISABLE BIT(26) #define TDES1_CHECKSUM_INSERTION_MASK GENMASK(28, 27) #define TDES1_CHECKSUM_INSERTION_SHIFT 27 #define TDES1_FIRST_SEGMENT BIT(29) #define TDES1_LAST_SEGMENT BIT(30) #define TDES1_INTERRUPT BIT(31) /* Enhanced transmit descriptor defines */ /* TDES0 */ #define ETDES0_DEFERRED BIT(0) #define ETDES0_UNDERFLOW_ERROR BIT(1) #define ETDES0_EXCESSIVE_DEFERRAL BIT(2) #define ETDES0_COLLISION_COUNT_MASK GENMASK(6, 3) #define ETDES0_VLAN_FRAME BIT(7) #define ETDES0_EXCESSIVE_COLLISIONS BIT(8) #define ETDES0_LATE_COLLISION BIT(9) #define ETDES0_NO_CARRIER BIT(10) #define ETDES0_LOSS_CARRIER BIT(11) #define ETDES0_PAYLOAD_ERROR BIT(12) #define ETDES0_FRAME_FLUSHED BIT(13) #define ETDES0_JABBER_TIMEOUT BIT(14) #define ETDES0_ERROR_SUMMARY BIT(15) #define ETDES0_IP_HEADER_ERROR BIT(16) #define ETDES0_TIME_STAMP_STATUS BIT(17) #define ETDES0_SECOND_ADDRESS_CHAINED BIT(20) #define ETDES0_END_RING BIT(21) #define ETDES0_CHECKSUM_INSERTION_MASK GENMASK(23, 22) #define ETDES0_CHECKSUM_INSERTION_SHIFT 22 #define ETDES0_TIME_STAMP_ENABLE BIT(25) #define ETDES0_DISABLE_PADDING BIT(26) #define ETDES0_CRC_DISABLE BIT(27) #define ETDES0_FIRST_SEGMENT BIT(28) #define ETDES0_LAST_SEGMENT BIT(29) #define ETDES0_INTERRUPT BIT(30) #define ETDES0_OWN ((u32)BIT(31)) /* silence sparse */ /* TDES1 */ #define ETDES1_BUFFER1_SIZE_MASK GENMASK(12, 0) #define ETDES1_BUFFER2_SIZE_MASK GENMASK(28, 16) #define ETDES1_BUFFER2_SIZE_SHIFT 16 /* Extended Receive descriptor definitions */ #define ERDES4_IP_PAYLOAD_TYPE_MASK GENMASK(2, 6) #define ERDES4_IP_HDR_ERR BIT(3) #define ERDES4_IP_PAYLOAD_ERR BIT(4) #define ERDES4_IP_CSUM_BYPASSED BIT(5) #define ERDES4_IPV4_PKT_RCVD BIT(6) #define ERDES4_IPV6_PKT_RCVD BIT(7) #define ERDES4_MSG_TYPE_MASK GENMASK(11, 8) #define ERDES4_PTP_FRAME_TYPE BIT(12) #define ERDES4_PTP_VER BIT(13) #define ERDES4_TIMESTAMP_DROPPED BIT(14) #define ERDES4_AV_PKT_RCVD BIT(16) #define ERDES4_AV_TAGGED_PKT_RCVD BIT(17) #define ERDES4_VLAN_TAG_PRI_VAL_MASK GENMASK(20, 18) #define ERDES4_L3_FILTER_MATCH BIT(24) #define ERDES4_L4_FILTER_MATCH BIT(25) #define ERDES4_L3_L4_FILT_NO_MATCH_MASK GENMASK(27, 26) /* Extended RDES4 message type definitions */ #define RDES_EXT_NO_PTP 0x0 #define RDES_EXT_SYNC 0x1 #define RDES_EXT_FOLLOW_UP 0x2 #define RDES_EXT_DELAY_REQ 0x3 #define RDES_EXT_DELAY_RESP 0x4 #define RDES_EXT_PDELAY_REQ 0x5 #define RDES_EXT_PDELAY_RESP 0x6 #define RDES_EXT_PDELAY_FOLLOW_UP 0x7 #define RDES_PTP_ANNOUNCE 0x8 #define RDES_PTP_MANAGEMENT 0x9 #define RDES_PTP_SIGNALING 0xa #define RDES_PTP_PKT_RESERVED_TYPE 0xf /* Basic descriptor structure for normal and alternate descriptors */ struct dma_desc { __le32 des0; __le32 des1; __le32 des2; __le32 des3; }; /* Extended descriptor structure (e.g. >= databook 3.50a) */ struct dma_extended_desc { struct dma_desc basic; /* Basic descriptors */ __le32 des4; /* Extended Status */ __le32 des5; /* Reserved */ __le32 des6; /* Tx/Rx Timestamp Low */ __le32 des7; /* Tx/Rx Timestamp High */ }; /* Transmit checksum insertion control */ #define TX_CIC_FULL 3 /* Include IP header and pseudoheader */ #endif /* __DESCS_H__ */
{ "pile_set_name": "Github" }
/* Copyright (c) 2017-2020 Hans-Kristian Arntzen * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #pragma once #include <stdio.h> #ifdef GRANITE_LOGGING_QUEUE #include "global_managers.hpp" #include "message_queue.hpp" #include <string.h> #include <stdarg.h> namespace Util { static inline void queued_log(const char *tag, const char *fmt, ...) { auto *message_queue = ::Granite::Global::message_queue(); if (!message_queue || !message_queue->is_uncorked()) return; char message_buffer[16 * 1024]; memcpy(message_buffer, tag, strlen(tag)); va_list va; va_start(va, fmt); vsnprintf(message_buffer + strlen(tag), sizeof(message_buffer) - strlen(tag), fmt, va); va_end(va); size_t message_size = strlen(message_buffer) + 1; while (message_size >= 2 && message_buffer[message_size - 2] == '\n') { message_buffer[message_size - 2] = '\0'; message_size--; } auto message_payload = message_queue->allocate_write_payload(message_size); if (message_payload) { memcpy(static_cast<char *>(message_payload.get_payload_data()), message_buffer, message_size); message_queue->push_written_payload(std::move(message_payload)); } } } #define QUEUED_LOGE(...) do { \ ::Util::queued_log("[ERROR]: ", __VA_ARGS__); \ } while(0) #define QUEUED_LOGW(...) do { \ ::Util::queued_log("[WARN]: ", __VA_ARGS__); \ } while(0) #define QUEUED_LOGI(...) do { \ ::Util::queued_log("[INFO]: ", __VA_ARGS__); \ } while(0) #else #define QUEUED_LOGE(...) #define QUEUED_LOGW(...) #define QUEUED_LOGI(...) #endif #if defined(HAVE_LIBRETRO) #include "libretro.h" namespace Granite { extern retro_log_printf_t libretro_log; } #define LOGE(...) do { if (::Granite::libretro_log) ::Granite::libretro_log(RETRO_LOG_ERROR, __VA_ARGS__); QUEUED_LOGE(__VA_ARGS__); } while(0) #define LOGW(...) do { if (::Granite::libretro_log) ::Granite::libretro_log(RETRO_LOG_WARN, __VA_ARGS__); QUEUED_LOGW(__VA_ARGS__); } while(0) #define LOGI(...) do { if (::Granite::libretro_log) ::Granite::libretro_log(RETRO_LOG_INFO, __VA_ARGS__); QUEUED_LOGI(__VA_ARGS__); } while(0) #elif defined(_MSC_VER) #define WIN32_LEAN_AND_MEAN #include <windows.h> #define LOGE(...) do { \ fprintf(stderr, "[ERROR]: " __VA_ARGS__); \ fflush(stderr); \ char buffer[16 * 1024]; \ snprintf(buffer, sizeof(buffer), "[ERROR]: " __VA_ARGS__); \ OutputDebugStringA(buffer); \ QUEUED_LOGE(__VA_ARGS__); \ } while(false) #define LOGW(...) do { \ fprintf(stderr, "[WARN]: " __VA_ARGS__); \ fflush(stderr); \ char buffer[16 * 1024]; \ snprintf(buffer, sizeof(buffer), "[WARN]: " __VA_ARGS__); \ OutputDebugStringA(buffer); \ QUEUED_LOGW(__VA_ARGS__); \ } while(false) #define LOGI(...) do { \ fprintf(stderr, "[INFO]: " __VA_ARGS__); \ fflush(stderr); \ char buffer[16 * 1024]; \ snprintf(buffer, sizeof(buffer), "[INFO]: " __VA_ARGS__); \ OutputDebugStringA(buffer); \ QUEUED_LOGI(__VA_ARGS__); \ } while(false) #elif defined(ANDROID) #include <android/log.h> #define LOGE(...) do { __android_log_print(ANDROID_LOG_ERROR, "Granite", __VA_ARGS__); QUEUED_LOGE(__VA_ARGS__); } while(0) #define LOGW(...) do { __android_log_print(ANDROID_LOG_WARN, "Granite", __VA_ARGS__); QUEUED_LOGW(__VA_ARGS__); } while(0) #define LOGI(...) do { __android_log_print(ANDROID_LOG_INFO, "Granite", __VA_ARGS__); QUEUED_LOGI(__VA_ARGS__); } while(0) #else #define LOGE(...) \ do \ { \ fprintf(stderr, "[ERROR]: " __VA_ARGS__); \ fflush(stderr); \ QUEUED_LOGE(__VA_ARGS__); \ } while (false) #define LOGW(...) \ do \ { \ fprintf(stderr, "[WARN]: " __VA_ARGS__); \ fflush(stderr); \ QUEUED_LOGW(__VA_ARGS__); \ } while (false) #define LOGI(...) \ do \ { \ fprintf(stderr, "[INFO]: " __VA_ARGS__); \ fflush(stderr); \ QUEUED_LOGI(__VA_ARGS__); \ } while (false) #endif
{ "pile_set_name": "Github" }
// Stacked Icons // ------------------------- .#{$fa-css-prefix}-stack { position: relative; display: inline-block; width: 2em; height: 2em; line-height: 2em; vertical-align: middle; } .#{$fa-css-prefix}-stack-1x, .#{$fa-css-prefix}-stack-2x { position: absolute; left: 0; width: 100%; text-align: center; } .#{$fa-css-prefix}-stack-1x { line-height: inherit; } .#{$fa-css-prefix}-stack-2x { font-size: 2em; } .#{$fa-css-prefix}-inverse { color: $fa-inverse; }
{ "pile_set_name": "Github" }
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en"> <head> <meta http-equiv="content-type" content="text/html; charset=utf-8" /> <meta name="generator" content="JsDoc Toolkit" /> <title>JsDoc Reference - cc.ParticleSystemQuad</title> <style type="text/css"> /* default.css */ body { font: 12px "Lucida Grande", Tahoma, Arial, Helvetica, sans-serif; width: 800px; } .header { clear: both; background-color: #ccc; padding: 8px; } h1 { font-size: 150%; font-weight: bold; padding: 0; margin: 1em 0 0 .3em; } hr { border: none 0; border-top: 1px solid #7F8FB1; height: 1px; } pre.code { display: block; padding: 8px; border: 1px dashed #ccc; } #index { margin-top: 24px; float: left; width: 160px; position: absolute; left: 8px; background-color: #F3F3F3; padding: 8px; } #content { margin-left: 190px; width: 600px; } .classList { list-style-type: none; padding: 0; margin: 0 0 0 8px; font-family: arial, sans-serif; font-size: 1em; overflow: auto; } .classList li { padding: 0; margin: 0 0 8px 0; } .summaryTable { width: 100%; } h1.classTitle { font-size:170%; line-height:130%; } h2 { font-size: 110%; } caption, div.sectionTitle { background-color: #7F8FB1; color: #fff; font-size:130%; text-align: left; padding: 2px 6px 2px 6px; border: 1px #7F8FB1 solid; } div.sectionTitle { margin-bottom: 8px; } .summaryTable thead { display: none; } .summaryTable td { vertical-align: top; padding: 4px; border-bottom: 1px #7F8FB1 solid; border-right: 1px #7F8FB1 solid; } /*col#summaryAttributes {}*/ .summaryTable td.attributes { border-left: 1px #7F8FB1 solid; width: 140px; text-align: right; } td.attributes, .fixedFont { line-height: 15px; color: #002EBE; font-family: "Courier New",Courier,monospace; font-size: 13px; } .summaryTable td.nameDescription { text-align: left; font-size: 13px; line-height: 15px; } .summaryTable td.nameDescription, .description { line-height: 15px; padding: 4px; padding-left: 4px; } .summaryTable { margin-bottom: 8px; } ul.inheritsList { list-style: square; margin-left: 20px; padding-left: 0; } .detailList { margin-left: 20px; line-height: 15px; } .detailList dt { margin-left: 20px; } .detailList .heading { font-weight: bold; padding-bottom: 6px; margin-left: 0; } .light, td.attributes, .light a:link, .light a:visited { color: #777; font-style: italic; } .fineprint { text-align: right; font-size: 10px; } </style> </head> <body> <!-- ============================== header ================================= --> <!-- begin static/header.html --> <div id="header"> </div> <!-- end static/header.html --> <!-- ============================== classes index ============================ --> <div id="index"> <!-- begin publish.classesIndex --> <div align="center"><a href="../index.html">Class Index</a> | <a href="../files.html">File Index</a></div> <hr /> <h2>Classes</h2> <ul class="classList"> <li><i><a href="../symbols/_global_.html">_global_</a></i></li> <li><a href="../symbols/cc.%24.html">cc.$</a></li> <li><a href="../symbols/cc.Action.html">cc.Action</a></li> <li><a href="../symbols/cc.ActionEase.html">cc.ActionEase</a></li> <li><a href="../symbols/cc.ActionInstant.html">cc.ActionInstant</a></li> <li><a href="../symbols/cc.ActionInterval.html">cc.ActionInterval</a></li> <li><a href="../symbols/cc.ActionManager.html">cc.ActionManager</a></li> <li><a href="../symbols/cc.ActionTween.html">cc.ActionTween</a></li> <li><a href="../symbols/cc.ActionTweenDelegate.html">cc.ActionTweenDelegate</a></li> <li><a href="../symbols/cc.Animate.html">cc.Animate</a></li> <li><a href="../symbols/cc.Animation.html">cc.Animation</a></li> <li><a href="../symbols/cc.AnimationCache.html">cc.AnimationCache</a></li> <li><a href="../symbols/cc.AnimationFrame.html">cc.AnimationFrame</a></li> <li><a href="../symbols/cc.AppController.html">cc.AppController</a></li> <li><a href="../symbols/cc.Application.html">cc.Application</a></li> <li><a href="../symbols/cc.AtlasNode.html">cc.AtlasNode</a></li> <li><a href="../symbols/cc.BezierBy.html">cc.BezierBy</a></li> <li><a href="../symbols/cc.BezierTo.html">cc.BezierTo</a></li> <li><a href="../symbols/cc.BlendFunc.html">cc.BlendFunc</a></li> <li><a href="../symbols/cc.Blink.html">cc.Blink</a></li> <li><a href="../symbols/cc.BMFontConfiguration.html">cc.BMFontConfiguration</a></li> <li><a href="../symbols/cc.c4f.html">cc.c4f</a></li> <li><a href="../symbols/cc.CallFunc.html">cc.CallFunc</a></li> <li><a href="../symbols/cc.CardinalSplineBy.html">cc.CardinalSplineBy</a></li> <li><a href="../symbols/cc.CardinalSplineTo.html">cc.CardinalSplineTo</a></li> <li><a href="../symbols/cc.CatmullRomBy.html">cc.CatmullRomBy</a></li> <li><a href="../symbols/cc.CatmullRomTo.html">cc.CatmullRomTo</a></li> <li><a href="../symbols/cc.Class.html">cc.Class</a></li> <li><a href="../symbols/cc.Color3B.html">cc.Color3B</a></li> <li><a href="../symbols/cc.Color4B.html">cc.Color4B</a></li> <li><a href="../symbols/cc.Color4F.html">cc.Color4F</a></li> <li><a href="../symbols/cc.DelayTime.html">cc.DelayTime</a></li> <li><a href="../symbols/cc.DictMaker.html">cc.DictMaker</a></li> <li><a href="../symbols/cc.Director.html">cc.Director</a></li> <li><a href="../symbols/cc.DisplayLinkDirector.html">cc.DisplayLinkDirector</a></li> <li><a href="../symbols/cc.DOM.html">cc.DOM</a></li> <li><a href="../symbols/cc.DrawingPrimitive.html">cc.DrawingPrimitive</a></li> <li><a href="../symbols/cc.DrawingPrimitiveCanvas.html">cc.DrawingPrimitiveCanvas</a></li> <li><a href="../symbols/cc.EaseBackIn.html">cc.EaseBackIn</a></li> <li><a href="../symbols/cc.EaseBackInOut.html">cc.EaseBackInOut</a></li> <li><a href="../symbols/cc.EaseBackOut.html">cc.EaseBackOut</a></li> <li><a href="../symbols/cc.EaseBounce.html">cc.EaseBounce</a></li> <li><a href="../symbols/cc.EaseBounceIn.html">cc.EaseBounceIn</a></li> <li><a href="../symbols/cc.EaseBounceInOut.html">cc.EaseBounceInOut</a></li> <li><a href="../symbols/cc.EaseBounceOut.html">cc.EaseBounceOut</a></li> <li><a href="../symbols/cc.EaseElastic.html">cc.EaseElastic</a></li> <li><a href="../symbols/cc.EaseElasticIn.html">cc.EaseElasticIn</a></li> <li><a href="../symbols/cc.EaseElasticInOut.html">cc.EaseElasticInOut</a></li> <li><a href="../symbols/cc.EaseElasticOut.html">cc.EaseElasticOut</a></li> <li><a href="../symbols/cc.EaseExponentialIn.html">cc.EaseExponentialIn</a></li> <li><a href="../symbols/cc.EaseExponentialInOut.html">cc.EaseExponentialInOut</a></li> <li><a href="../symbols/cc.EaseExponentialOut.html">cc.EaseExponentialOut</a></li> <li><a href="../symbols/cc.EaseIn.html">cc.EaseIn</a></li> <li><a href="../symbols/cc.EaseInOut.html">cc.EaseInOut</a></li> <li><a href="../symbols/cc.EaseOut.html">cc.EaseOut</a></li> <li><a href="../symbols/cc.EaseRateAction.html">cc.EaseRateAction</a></li> <li><a href="../symbols/cc.EaseSineIn.html">cc.EaseSineIn</a></li> <li><a href="../symbols/cc.EaseSineInOut.html">cc.EaseSineInOut</a></li> <li><a href="../symbols/cc.EaseSineOut.html">cc.EaseSineOut</a></li> <li><a href="../symbols/cc.FadeIn.html">cc.FadeIn</a></li> <li><a href="../symbols/cc.FadeOut.html">cc.FadeOut</a></li> <li><a href="../symbols/cc.FadeTo.html">cc.FadeTo</a></li> <li><a href="../symbols/cc.FileUtils.html">cc.FileUtils</a></li> <li><a href="../symbols/cc.FiniteTimeAction.html">cc.FiniteTimeAction</a></li> <li><a href="../symbols/cc.FlipX.html">cc.FlipX</a></li> <li><a href="../symbols/cc.FlipY.html">cc.FlipY</a></li> <li><a href="../symbols/cc.Follow.html">cc.Follow</a></li> <li><a href="../symbols/cc.GridSize.html">cc.GridSize</a></li> <li><a href="../symbols/cc.HashElement.html">cc.HashElement</a></li> <li><a href="../symbols/cc.HashSelectorEntry.html">cc.HashSelectorEntry</a></li> <li><a href="../symbols/cc.HashUpdateEntry.html">cc.HashUpdateEntry</a></li> <li><a href="../symbols/cc.Hide.html">cc.Hide</a></li> <li><a href="../symbols/cc.Image.html">cc.Image</a></li> <li><a href="../symbols/cc.IMEDelegate.html">cc.IMEDelegate</a></li> <li><a href="../symbols/cc.IMEDispatcher.html">cc.IMEDispatcher</a></li> <li><a href="../symbols/cc.IMEDispatcher.Impl.html">cc.IMEDispatcher.Impl</a></li> <li><a href="../symbols/cc.inherits-tempCtor.html">cc.inherits-tempCtor</a></li> <li><a href="../symbols/cc.JumpBy.html">cc.JumpBy</a></li> <li><a href="../symbols/cc.JumpTo.html">cc.JumpTo</a></li> <li><a href="../symbols/cc.KeyboardDelegate.html">cc.KeyboardDelegate</a></li> <li><a href="../symbols/cc.KeyboardDispatcher.html">cc.KeyboardDispatcher</a></li> <li><a href="../symbols/cc.KeyboardHandler.html">cc.KeyboardHandler</a></li> <li><a href="../symbols/cc.LabelAtlas.html">cc.LabelAtlas</a></li> <li><a href="../symbols/cc.LabelBMFont.html">cc.LabelBMFont</a></li> <li><a href="../symbols/cc.LabelTTF.html">cc.LabelTTF</a></li> <li><a href="../symbols/cc.Layer.html">cc.Layer</a></li> <li><a href="../symbols/cc.LayerColor.html">cc.LayerColor</a></li> <li><a href="../symbols/cc.LayerGradient.html">cc.LayerGradient</a></li> <li><a href="../symbols/cc.LayerMultiplex.html">cc.LayerMultiplex</a></li> <li><a href="../symbols/cc.LazyLayer.html">cc.LazyLayer</a></li> <li><a href="../symbols/cc.ListEntry.html">cc.ListEntry</a></li> <li><a href="../symbols/cc.Loader.html">cc.Loader</a></li> <li><a href="../symbols/cc.LoaderScene.html">cc.LoaderScene</a></li> <li><a href="../symbols/cc.Menu.html">cc.Menu</a></li> <li><a href="../symbols/cc.MenuItem.html">cc.MenuItem</a></li> <li><a href="../symbols/cc.MenuItemAtlasFont.html">cc.MenuItemAtlasFont</a></li> <li><a href="../symbols/cc.MenuItemFont.html">cc.MenuItemFont</a></li> <li><a href="../symbols/cc.MenuItemImage.html">cc.MenuItemImage</a></li> <li><a href="../symbols/cc.MenuItemLabel.html">cc.MenuItemLabel</a></li> <li><a href="../symbols/cc.MenuItemSprite.html">cc.MenuItemSprite</a></li> <li><a href="../symbols/cc.MenuItemToggle.html">cc.MenuItemToggle</a></li> <li><a href="../symbols/cc.MotionStreak.html">cc.MotionStreak</a></li> <li><a href="../symbols/cc.MouseHandler.html">cc.MouseHandler</a></li> <li><a href="../symbols/cc.MoveBy.html">cc.MoveBy</a></li> <li><a href="../symbols/cc.MoveTo.html">cc.MoveTo</a></li> <li><a href="../symbols/cc.Node.html">cc.Node</a></li> <li><a href="../symbols/cc.NSMutableSet.html">cc.NSMutableSet</a></li> <li><a href="../symbols/cc.ParallaxNode.html">cc.ParallaxNode</a></li> <li><a href="../symbols/cc.Particle.html">cc.Particle</a></li> <li><a href="../symbols/cc.Particle.ModeA.html">cc.Particle.ModeA</a></li> <li><a href="../symbols/cc.Particle.ModeB.html">cc.Particle.ModeB</a></li> <li><a href="../symbols/cc.ParticleBatchNode.html">cc.ParticleBatchNode</a></li> <li><a href="../symbols/cc.ParticleExplosion.html">cc.ParticleExplosion</a></li> <li><a href="../symbols/cc.ParticleFire.html">cc.ParticleFire</a></li> <li><a href="../symbols/cc.ParticleFireworks.html">cc.ParticleFireworks</a></li> <li><a href="../symbols/cc.ParticleFlower.html">cc.ParticleFlower</a></li> <li><a href="../symbols/cc.ParticleGalaxy.html">cc.ParticleGalaxy</a></li> <li><a href="../symbols/cc.ParticleMeteor.html">cc.ParticleMeteor</a></li> <li><a href="../symbols/cc.ParticleRain.html">cc.ParticleRain</a></li> <li><a href="../symbols/cc.ParticleSmoke.html">cc.ParticleSmoke</a></li> <li><a href="../symbols/cc.ParticleSnow.html">cc.ParticleSnow</a></li> <li><a href="../symbols/cc.ParticleSpiral.html">cc.ParticleSpiral</a></li> <li><a href="../symbols/cc.ParticleSun.html">cc.ParticleSun</a></li> <li><a href="../symbols/cc.ParticleSystem.html">cc.ParticleSystem</a></li> <li><a href="../symbols/cc.ParticleSystem.ModeA.html">cc.ParticleSystem.ModeA</a></li> <li><a href="../symbols/cc.ParticleSystem.ModeB.html">cc.ParticleSystem.ModeB</a></li> <li><a href="../symbols/cc.ParticleSystemQuad.html">cc.ParticleSystemQuad</a></li> <li><a href="../symbols/cc.Place.html">cc.Place</a></li> <li><a href="../symbols/cc.Point.html">cc.Point</a></li> <li><a href="../symbols/cc.PointObject.html">cc.PointObject</a></li> <li><a href="../symbols/cc.PointSprite.html">cc.PointSprite</a></li> <li><a href="../symbols/cc.ProgressFromTo.html">cc.ProgressFromTo</a></li> <li><a href="../symbols/cc.ProgressTimer.html">cc.ProgressTimer</a></li> <li><a href="../symbols/cc.ProgressTo.html">cc.ProgressTo</a></li> <li><a href="../symbols/cc.Quad2.html">cc.Quad2</a></li> <li><a href="../symbols/cc.Quad3.html">cc.Quad3</a></li> <li><a href="../symbols/cc.Rect.html">cc.Rect</a></li> <li><a href="../symbols/cc.RenderTexture.html">cc.RenderTexture</a></li> <li><a href="../symbols/cc.Repeat.html">cc.Repeat</a></li> <li><a href="../symbols/cc.RepeatForever.html">cc.RepeatForever</a></li> <li><a href="../symbols/cc.ReverseTime.html">cc.ReverseTime</a></li> <li><a href="../symbols/cc.RotateBy.html">cc.RotateBy</a></li> <li><a href="../symbols/cc.RotateTo.html">cc.RotateTo</a></li> <li><a href="../symbols/cc.SAXParser.html">cc.SAXParser</a></li> <li><a href="../symbols/cc.ScaleBy.html">cc.ScaleBy</a></li> <li><a href="../symbols/cc.ScaleTo.html">cc.ScaleTo</a></li> <li><a href="../symbols/cc.Scene.html">cc.Scene</a></li> <li><a href="../symbols/cc.Scheduler.html">cc.Scheduler</a></li> <li><a href="../symbols/cc.Sequence.html">cc.Sequence</a></li> <li><a href="../symbols/cc.Set.html">cc.Set</a></li> <li><a href="../symbols/cc.Show.html">cc.Show</a></li> <li><a href="../symbols/cc.Size.html">cc.Size</a></li> <li><a href="../symbols/cc.SkewBy.html">cc.SkewBy</a></li> <li><a href="../symbols/cc.SkewTo.html">cc.SkewTo</a></li> <li><a href="../symbols/cc.Spawn.html">cc.Spawn</a></li> <li><a href="../symbols/cc.Speed.html">cc.Speed</a></li> <li><a href="../symbols/cc.Sprite.html">cc.Sprite</a></li> <li><a href="../symbols/cc.SpriteBatchNode.html">cc.SpriteBatchNode</a></li> <li><a href="../symbols/cc.SpriteFrame.html">cc.SpriteFrame</a></li> <li><a href="../symbols/cc.SpriteFrameCache.html">cc.SpriteFrameCache</a></li> <li><a href="../symbols/cc.StandardTouchDelegate.html">cc.StandardTouchDelegate</a></li> <li><a href="../symbols/cc.StandardTouchHandler.html">cc.StandardTouchHandler</a></li> <li><a href="../symbols/cc.TargetedAction.html">cc.TargetedAction</a></li> <li><a href="../symbols/cc.TargetedTouchDelegate.html">cc.TargetedTouchDelegate</a></li> <li><a href="../symbols/cc.TargetedTouchHandler.html">cc.TargetedTouchHandler</a></li> <li><a href="../symbols/cc.Tex2F.html">cc.Tex2F</a></li> <li><a href="../symbols/cc.TextFieldDelegate.html">cc.TextFieldDelegate</a></li> <li><a href="../symbols/cc.TextFieldTTF.html">cc.TextFieldTTF</a></li> <li><a href="../symbols/cc.TextureAtlas.html">cc.TextureAtlas</a></li> <li><a href="../symbols/cc.TextureCache.html">cc.TextureCache</a></li> <li><a href="../symbols/cc.TileMapAtlas.html">cc.TileMapAtlas</a></li> <li><a href="../symbols/cc.Time.html">cc.Time</a></li> <li><a href="../symbols/cc.Timer.html">cc.Timer</a></li> <li><a href="../symbols/cc.TintBy.html">cc.TintBy</a></li> <li><a href="../symbols/cc.TintTo.html">cc.TintTo</a></li> <li><a href="../symbols/cc.TMXLayer.html">cc.TMXLayer</a></li> <li><a href="../symbols/cc.TMXLayerInfo.html">cc.TMXLayerInfo</a></li> <li><a href="../symbols/cc.TMXMapInfo.html">cc.TMXMapInfo</a></li> <li><a href="../symbols/cc.TMXObjectGroup.html">cc.TMXObjectGroup</a></li> <li><a href="../symbols/cc.TMXTiledMap.html">cc.TMXTiledMap</a></li> <li><a href="../symbols/cc.TMXTilesetInfo.html">cc.TMXTilesetInfo</a></li> <li><a href="../symbols/cc.ToggleVisibility.html">cc.ToggleVisibility</a></li> <li><a href="../symbols/cc.Touch.html">cc.Touch</a></li> <li><a href="../symbols/cc.TouchDelegate.html">cc.TouchDelegate</a></li> <li><a href="../symbols/cc.TouchDispatcher.html">cc.TouchDispatcher</a></li> <li><a href="../symbols/cc.TouchHandler.html">cc.TouchHandler</a></li> <li><a href="../symbols/cc.TransformValues.html">cc.TransformValues</a></li> <li><a href="../symbols/cc.TransitionCrossFade.html">cc.TransitionCrossFade</a></li> <li><a href="../symbols/cc.TransitionEaseScene.html">cc.TransitionEaseScene</a></li> <li><a href="../symbols/cc.TransitionFade.html">cc.TransitionFade</a></li> <li><a href="../symbols/cc.TransitionFadeBL.html">cc.TransitionFadeBL</a></li> <li><a href="../symbols/cc.TransitionFadeDown.html">cc.TransitionFadeDown</a></li> <li><a href="../symbols/cc.TransitionFadeTR.html">cc.TransitionFadeTR</a></li> <li><a href="../symbols/cc.TransitionFadeUp.html">cc.TransitionFadeUp</a></li> <li><a href="../symbols/cc.TransitionFlipAngular.html">cc.TransitionFlipAngular</a></li> <li><a href="../symbols/cc.TransitionFlipX.html">cc.TransitionFlipX</a></li> <li><a href="../symbols/cc.TransitionFlipY.html">cc.TransitionFlipY</a></li> <li><a href="../symbols/cc.TransitionJumpZoom.html">cc.TransitionJumpZoom</a></li> <li><a href="../symbols/cc.TransitionMoveInB.html">cc.TransitionMoveInB</a></li> <li><a href="../symbols/cc.TransitionMoveInL.html">cc.TransitionMoveInL</a></li> <li><a href="../symbols/cc.TransitionMoveInR.html">cc.TransitionMoveInR</a></li> <li><a href="../symbols/cc.TransitionMoveInT.html">cc.TransitionMoveInT</a></li> <li><a href="../symbols/cc.TransitionPageTurn.html">cc.TransitionPageTurn</a></li> <li><a href="../symbols/cc.TransitionProgress.html">cc.TransitionProgress</a></li> <li><a href="../symbols/cc.TransitionProgressHorizontal.html">cc.TransitionProgressHorizontal</a></li> <li><a href="../symbols/cc.TransitionProgressInOut.html">cc.TransitionProgressInOut</a></li> <li><a href="../symbols/cc.TransitionProgressOutIn.html">cc.TransitionProgressOutIn</a></li> <li><a href="../symbols/cc.TransitionProgressRadialCCW.html">cc.TransitionProgressRadialCCW</a></li> <li><a href="../symbols/cc.TransitionProgressRadialCW.html">cc.TransitionProgressRadialCW</a></li> <li><a href="../symbols/cc.TransitionProgressVertical.html">cc.TransitionProgressVertical</a></li> <li><a href="../symbols/cc.TransitionRotoZoom.html">cc.TransitionRotoZoom</a></li> <li><a href="../symbols/cc.TransitionScene.html">cc.TransitionScene</a></li> <li><a href="../symbols/cc.TransitionSceneOriented.html">cc.TransitionSceneOriented</a></li> <li><a href="../symbols/cc.TransitionShrinkGrow.html">cc.TransitionShrinkGrow</a></li> <li><a href="../symbols/cc.TransitionSlideInB.html">cc.TransitionSlideInB</a></li> <li><a href="../symbols/cc.TransitionSlideInL.html">cc.TransitionSlideInL</a></li> <li><a href="../symbols/cc.TransitionSlideInR.html">cc.TransitionSlideInR</a></li> <li><a href="../symbols/cc.TransitionSlideInT.html">cc.TransitionSlideInT</a></li> <li><a href="../symbols/cc.TransitionSplitCols.html">cc.TransitionSplitCols</a></li> <li><a href="../symbols/cc.TransitionSplitRows.html">cc.TransitionSplitRows</a></li> <li><a href="../symbols/cc.TransitionTurnOffTiles.html">cc.TransitionTurnOffTiles</a></li> <li><a href="../symbols/cc.TransitionZoomFlipAngular.html">cc.TransitionZoomFlipAngular</a></li> <li><a href="../symbols/cc.TransitionZoomFlipX.html">cc.TransitionZoomFlipX</a></li> <li><a href="../symbols/cc.TransitionZoomFlipY.html">cc.TransitionZoomFlipY</a></li> <li><a href="../symbols/cc.TurnOffTiles.html">cc.TurnOffTiles</a></li> <li><a href="../symbols/cc.UserDefault.html">cc.UserDefault</a></li> <li><a href="../symbols/cc.V2F_C4B_T2F.html">cc.V2F_C4B_T2F</a></li> <li><a href="../symbols/cc.V2F_C4B_T2F_Quad.html">cc.V2F_C4B_T2F_Quad</a></li> <li><a href="../symbols/cc.V2F_C4F_T2F.html">cc.V2F_C4F_T2F</a></li> <li><a href="../symbols/cc.V2F_C4F_T2F_Quad.html">cc.V2F_C4F_T2F_Quad</a></li> <li><a href="../symbols/cc.V3F_C4B_T2F.html">cc.V3F_C4B_T2F</a></li> <li><a href="../symbols/cc.V3F_C4B_T2F_Quad.html">cc.V3F_C4B_T2F_Quad</a></li> <li><a href="../symbols/cc.Vertex2F.html">cc.Vertex2F</a></li> <li><a href="../symbols/cc.Vertex3F.html">cc.Vertex3F</a></li> <li><a href="../symbols/tImageSource.html">tImageSource</a></li> </ul> <hr /> <!-- end publish.classesIndex --> </div> <div id="content"> <!-- ============================== class title ============================ --> <h1 class="classTitle"> Class cc.ParticleSystemQuad </h1> <!-- ============================== class summary ========================== --> <p class="description"> <br />Extends <a href="../symbols/cc.ParticleSystem.html">cc.ParticleSystem</a>.<br /> <br /><i>Defined in: </i> <a href="../symbols/src/Projects_cocos2d-html5_cocos2d_particle_nodes_CCParticleSystemQuad.js.html">CCParticleSystemQuad.js</a>. </p> <!-- ============================== constructor summary ==================== --> <table class="summaryTable" cellspacing="0" summary="A summary of the constructor documented in the class cc.ParticleSystemQuad."> <caption>Class Summary</caption> <thead> <tr> <th scope="col">Constructor Attributes</th> <th scope="col">Constructor Name and Description</th> </tr> </thead> <tbody> <tr> <td class="attributes">&nbsp;</td> <td class="nameDescription" > <div class="fixedFont"> <b><a href="../symbols/cc.ParticleSystemQuad.html#constructor">cc.ParticleSystemQuad</a></b>() </div> <div class="description"><p> CCParticleSystemQuad is a subclass of CCParticleSystem<br/> <br/> It includes all the features of ParticleSystem.</div> </td> </tr> </tbody> </table> <!-- ============================== properties summary ===================== --> <!-- ============================== methods summary ======================== --> <table class="summaryTable" cellspacing="0" summary="A summary of the methods documented in the class cc.ParticleSystemQuad."> <caption>Method Summary</caption> <thead> <tr> <th scope="col">Method Attributes</th> <th scope="col">Method Name and Description</th> </tr> </thead> <tbody> <tr> <td class="attributes">&nbsp;</td> <td class="nameDescription"> <div class="fixedFont"><b><a href="../symbols/cc.ParticleSystemQuad.html#clone">clone</a></b>() </div> <div class="description"></div> </td> </tr> <tr> <td class="attributes">&lt;static&gt; &nbsp;</td> <td class="nameDescription"> <div class="fixedFont">cc.ParticleSystemQuad.<b><a href="../symbols/cc.ParticleSystemQuad.html#.create">create</a></b>(pListFile) </div> <div class="description"><p> creates an initializes a CCParticleSystemQuad from a plist file.</div> </td> </tr> <tr> <td class="attributes">&nbsp;</td> <td class="nameDescription"> <div class="fixedFont"><b><a href="../symbols/cc.ParticleSystemQuad.html#ctor">ctor</a></b>() </div> <div class="description">Constructor</div> </td> </tr> <tr> <td class="attributes">&nbsp;</td> <td class="nameDescription"> <div class="fixedFont"><b><a href="../symbols/cc.ParticleSystemQuad.html#draw">draw</a></b>(ctx) </div> <div class="description">draw particle</div> </td> </tr> <tr> <td class="attributes">&nbsp;</td> <td class="nameDescription"> <div class="fixedFont"><b><a href="../symbols/cc.ParticleSystemQuad.html#initTexCoordsWithRect">initTexCoordsWithRect</a></b>(pointRect) </div> <div class="description"><p> initilizes the texture with a rectangle measured Points<br/> pointRect should be in Texture coordinates, not pixel coordinates </p></div> </td> </tr> <tr> <td class="attributes">&nbsp;</td> <td class="nameDescription"> <div class="fixedFont"><b><a href="../symbols/cc.ParticleSystemQuad.html#initWithTotalParticles">initWithTotalParticles</a></b>(numberOfParticles) </div> <div class="description">Initializes a system with a fixed number of particles</div> </td> </tr> <tr> <td class="attributes">&nbsp;</td> <td class="nameDescription"> <div class="fixedFont"><b><a href="../symbols/cc.ParticleSystemQuad.html#listenBackToForeground">listenBackToForeground</a></b>(obj) </div> <div class="description">listen the event that coming to foreground on Android</div> </td> </tr> <tr> <td class="attributes">&nbsp;</td> <td class="nameDescription"> <div class="fixedFont"><b><a href="../symbols/cc.ParticleSystemQuad.html#postStep">postStep</a></b>() </div> <div class="description">override cc.ParticleSystem</div> </td> </tr> <tr> <td class="attributes">&nbsp;</td> <td class="nameDescription"> <div class="fixedFont"><b><a href="../symbols/cc.ParticleSystemQuad.html#setBatchNode">setBatchNode</a></b>(batchNode) </div> <div class="description"></div> </td> </tr> <tr> <td class="attributes">&nbsp;</td> <td class="nameDescription"> <div class="fixedFont"><b><a href="../symbols/cc.ParticleSystemQuad.html#setDisplayFrame">setDisplayFrame</a></b>(spriteFrame) </div> <div class="description"><p> Sets a new CCSpriteFrame as particle.</div> </td> </tr> <tr> <td class="attributes">&nbsp;</td> <td class="nameDescription"> <div class="fixedFont"><b><a href="../symbols/cc.ParticleSystemQuad.html#setTexture">setTexture</a></b>(texture, isCallSuper) </div> <div class="description">set Texture of Particle System</div> </td> </tr> <tr> <td class="attributes">&nbsp;</td> <td class="nameDescription"> <div class="fixedFont"><b><a href="../symbols/cc.ParticleSystemQuad.html#setTextureWithRect">setTextureWithRect</a></b>(texture, rect) </div> <div class="description">Sets a new texture with a rect.</div> </td> </tr> <tr> <td class="attributes">&nbsp;</td> <td class="nameDescription"> <div class="fixedFont"><b><a href="../symbols/cc.ParticleSystemQuad.html#setTotalParticles">setTotalParticles</a></b>(tp) </div> <div class="description"></div> </td> </tr> <tr> <td class="attributes">&nbsp;</td> <td class="nameDescription"> <div class="fixedFont"><b><a href="../symbols/cc.ParticleSystemQuad.html#setupIndices">setupIndices</a></b>() </div> <div class="description">initialices the indices for the vertices</div> </td> </tr> <tr> <td class="attributes">&nbsp;</td> <td class="nameDescription"> <div class="fixedFont"><b><a href="../symbols/cc.ParticleSystemQuad.html#updateQuadWithParticle">updateQuadWithParticle</a></b>(particle, newPosition) </div> <div class="description">update particle's quad</div> </td> </tr> </tbody> </table> <dl class="inheritsList"> <dt>Methods borrowed from class <a href="../symbols/cc.ParticleSystem.html">cc.ParticleSystem</a>: </dt><dd><a href="../symbols/cc.ParticleSystem.html#addParticle">addParticle</a>, <a href="../symbols/cc.ParticleSystem.html#destroyParticleSystem">destroyParticleSystem</a>, <a href="../symbols/cc.ParticleSystem.html#getAngle">getAngle</a>, <a href="../symbols/cc.ParticleSystem.html#getAngleVar">getAngleVar</a>, <a href="../symbols/cc.ParticleSystem.html#getAtlasIndex">getAtlasIndex</a>, <a href="../symbols/cc.ParticleSystem.html#getBatchNode">getBatchNode</a>, <a href="../symbols/cc.ParticleSystem.html#getBlendFunc">getBlendFunc</a>, <a href="../symbols/cc.ParticleSystem.html#getBoundingBoxToWorld">getBoundingBoxToWorld</a>, <a href="../symbols/cc.ParticleSystem.html#getDrawMode">getDrawMode</a>, <a href="../symbols/cc.ParticleSystem.html#getDuration">getDuration</a>, <a href="../symbols/cc.ParticleSystem.html#getEmissionRate">getEmissionRate</a>, <a href="../symbols/cc.ParticleSystem.html#getEmitterMode">getEmitterMode</a>, <a href="../symbols/cc.ParticleSystem.html#getEndColor">getEndColor</a>, <a href="../symbols/cc.ParticleSystem.html#getEndColorVar">getEndColorVar</a>, <a href="../symbols/cc.ParticleSystem.html#getEndRadius">getEndRadius</a>, <a href="../symbols/cc.ParticleSystem.html#getEndRadiusVar">getEndRadiusVar</a>, <a href="../symbols/cc.ParticleSystem.html#getEndSize">getEndSize</a>, <a href="../symbols/cc.ParticleSystem.html#getEndSizeVar">getEndSizeVar</a>, <a href="../symbols/cc.ParticleSystem.html#getEndSpin">getEndSpin</a>, <a href="../symbols/cc.ParticleSystem.html#getEndSpinVar">getEndSpinVar</a>, <a href="../symbols/cc.ParticleSystem.html#getGravity">getGravity</a>, <a href="../symbols/cc.ParticleSystem.html#getLife">getLife</a>, <a href="../symbols/cc.ParticleSystem.html#getLifeVar">getLifeVar</a>, <a href="../symbols/cc.ParticleSystem.html#getOpacityModifyRGB">getOpacityModifyRGB</a>, <a href="../symbols/cc.ParticleSystem.html#getParticleCount">getParticleCount</a>, <a href="../symbols/cc.ParticleSystem.html#getPositionType">getPositionType</a>, <a href="../symbols/cc.ParticleSystem.html#getPosVar">getPosVar</a>, <a href="../symbols/cc.ParticleSystem.html#getRadialAccel">getRadialAccel</a>, <a href="../symbols/cc.ParticleSystem.html#getRadialAccelVar">getRadialAccelVar</a>, <a href="../symbols/cc.ParticleSystem.html#getRotatePerSecond">getRotatePerSecond</a>, <a href="../symbols/cc.ParticleSystem.html#getRotatePerSecondVar">getRotatePerSecondVar</a>, <a href="../symbols/cc.ParticleSystem.html#getShapeType">getShapeType</a>, <a href="../symbols/cc.ParticleSystem.html#getSourcePosition">getSourcePosition</a>, <a href="../symbols/cc.ParticleSystem.html#getSpeed">getSpeed</a>, <a href="../symbols/cc.ParticleSystem.html#getSpeedVar">getSpeedVar</a>, <a href="../symbols/cc.ParticleSystem.html#getStartColor">getStartColor</a>, <a href="../symbols/cc.ParticleSystem.html#getStartColorVar">getStartColorVar</a>, <a href="../symbols/cc.ParticleSystem.html#getStartRadius">getStartRadius</a>, <a href="../symbols/cc.ParticleSystem.html#getStartRadiusVar">getStartRadiusVar</a>, <a href="../symbols/cc.ParticleSystem.html#getStartSize">getStartSize</a>, <a href="../symbols/cc.ParticleSystem.html#getStartSizeVar">getStartSizeVar</a>, <a href="../symbols/cc.ParticleSystem.html#getStartSpin">getStartSpin</a>, <a href="../symbols/cc.ParticleSystem.html#getStartSpinVar">getStartSpinVar</a>, <a href="../symbols/cc.ParticleSystem.html#getTangentialAccel">getTangentialAccel</a>, <a href="../symbols/cc.ParticleSystem.html#getTangentialAccelVar">getTangentialAccelVar</a>, <a href="../symbols/cc.ParticleSystem.html#getTexture">getTexture</a>, <a href="../symbols/cc.ParticleSystem.html#getTotalParticles">getTotalParticles</a>, <a href="../symbols/cc.ParticleSystem.html#init">init</a>, <a href="../symbols/cc.ParticleSystem.html#initParticle">initParticle</a>, <a href="../symbols/cc.ParticleSystem.html#initWithDictionary">initWithDictionary</a>, <a href="../symbols/cc.ParticleSystem.html#initWithFile">initWithFile</a>, <a href="../symbols/cc.ParticleSystem.html#isActive">isActive</a>, <a href="../symbols/cc.ParticleSystem.html#isAutoRemoveOnFinish">isAutoRemoveOnFinish</a>, <a href="../symbols/cc.ParticleSystem.html#isBlendAdditive">isBlendAdditive</a>, <a href="../symbols/cc.ParticleSystem.html#isFull">isFull</a>, <a href="../symbols/cc.ParticleSystem.html#resetSystem">resetSystem</a>, <a href="../symbols/cc.ParticleSystem.html#setAngle">setAngle</a>, <a href="../symbols/cc.ParticleSystem.html#setAngleVar">setAngleVar</a>, <a href="../symbols/cc.ParticleSystem.html#setAtlasIndex">setAtlasIndex</a>, <a href="../symbols/cc.ParticleSystem.html#setAutoRemoveOnFinish">setAutoRemoveOnFinish</a>, <a href="../symbols/cc.ParticleSystem.html#setBlendAdditive">setBlendAdditive</a>, <a href="../symbols/cc.ParticleSystem.html#setBlendFunc">setBlendFunc</a>, <a href="../symbols/cc.ParticleSystem.html#setDrawMode">setDrawMode</a>, <a href="../symbols/cc.ParticleSystem.html#setDuration">setDuration</a>, <a href="../symbols/cc.ParticleSystem.html#setEmissionRate">setEmissionRate</a>, <a href="../symbols/cc.ParticleSystem.html#setEmitterMode">setEmitterMode</a>, <a href="../symbols/cc.ParticleSystem.html#setEndColor">setEndColor</a>, <a href="../symbols/cc.ParticleSystem.html#setEndColorVar">setEndColorVar</a>, <a href="../symbols/cc.ParticleSystem.html#setEndRadius">setEndRadius</a>, <a href="../symbols/cc.ParticleSystem.html#setEndRadiusVar">setEndRadiusVar</a>, <a href="../symbols/cc.ParticleSystem.html#setEndSize">setEndSize</a>, <a href="../symbols/cc.ParticleSystem.html#setEndSizeVar">setEndSizeVar</a>, <a href="../symbols/cc.ParticleSystem.html#setEndSpin">setEndSpin</a>, <a href="../symbols/cc.ParticleSystem.html#setEndSpinVar">setEndSpinVar</a>, <a href="../symbols/cc.ParticleSystem.html#setGravity">setGravity</a>, <a href="../symbols/cc.ParticleSystem.html#setLife">setLife</a>, <a href="../symbols/cc.ParticleSystem.html#setLifeVar">setLifeVar</a>, <a href="../symbols/cc.ParticleSystem.html#setOpacityModifyRGB">setOpacityModifyRGB</a>, <a href="../symbols/cc.ParticleSystem.html#setParticleCount">setParticleCount</a>, <a href="../symbols/cc.ParticleSystem.html#setPositionType">setPositionType</a>, <a href="../symbols/cc.ParticleSystem.html#setPosVar">setPosVar</a>, <a href="../symbols/cc.ParticleSystem.html#setRadialAccel">setRadialAccel</a>, <a href="../symbols/cc.ParticleSystem.html#setRadialAccelVar">setRadialAccelVar</a>, <a href="../symbols/cc.ParticleSystem.html#setRotatePerSecond">setRotatePerSecond</a>, <a href="../symbols/cc.ParticleSystem.html#setRotatePerSecondVar">setRotatePerSecondVar</a>, <a href="../symbols/cc.ParticleSystem.html#setRotation">setRotation</a>, <a href="../symbols/cc.ParticleSystem.html#setScale">setScale</a>, <a href="../symbols/cc.ParticleSystem.html#setScaleX">setScaleX</a>, <a href="../symbols/cc.ParticleSystem.html#setScaleY">setScaleY</a>, <a href="../symbols/cc.ParticleSystem.html#setShapeType">setShapeType</a>, <a href="../symbols/cc.ParticleSystem.html#setSourcePosition">setSourcePosition</a>, <a href="../symbols/cc.ParticleSystem.html#setSpeed">setSpeed</a>, <a href="../symbols/cc.ParticleSystem.html#setSpeedVar">setSpeedVar</a>, <a href="../symbols/cc.ParticleSystem.html#setStartColor">setStartColor</a>, <a href="../symbols/cc.ParticleSystem.html#setStartColorVar">setStartColorVar</a>, <a href="../symbols/cc.ParticleSystem.html#setStartRadius">setStartRadius</a>, <a href="../symbols/cc.ParticleSystem.html#setStartRadiusVar">setStartRadiusVar</a>, <a href="../symbols/cc.ParticleSystem.html#setStartSize">setStartSize</a>, <a href="../symbols/cc.ParticleSystem.html#setStartSizeVar">setStartSizeVar</a>, <a href="../symbols/cc.ParticleSystem.html#setStartSpin">setStartSpin</a>, <a href="../symbols/cc.ParticleSystem.html#setStartSpinVar">setStartSpinVar</a>, <a href="../symbols/cc.ParticleSystem.html#setTangentialAccel">setTangentialAccel</a>, <a href="../symbols/cc.ParticleSystem.html#setTangentialAccelVar">setTangentialAccelVar</a>, <a href="../symbols/cc.ParticleSystem.html#stopSystem">stopSystem</a>, <a href="../symbols/cc.ParticleSystem.html#update">update</a>, <a href="../symbols/cc.ParticleSystem.html#updateWithNoTime">updateWithNoTime</a></dd><dt>Methods borrowed from class <a href="../symbols/cc.Node.html">cc.Node</a>: </dt><dd><a href="../symbols/cc.Node.html#addChild">addChild</a>, <a href="../symbols/cc.Node.html#cleanup">cleanup</a>, <a href="../symbols/cc.Node.html#convertToNodeSpace">convertToNodeSpace</a>, <a href="../symbols/cc.Node.html#convertToNodeSpaceAR">convertToNodeSpaceAR</a>, <a href="../symbols/cc.Node.html#convertTouchToNodeSpace">convertTouchToNodeSpace</a>, <a href="../symbols/cc.Node.html#convertTouchToNodeSpaceAR">convertTouchToNodeSpaceAR</a>, <a href="../symbols/cc.Node.html#convertToWorldSpace">convertToWorldSpace</a>, <a href="../symbols/cc.Node.html#convertToWorldSpaceAR">convertToWorldSpaceAR</a>, <a href="../symbols/cc.Node.html#description">description</a>, <a href="../symbols/cc.Node.html#getActionByTag">getActionByTag</a>, <a href="../symbols/cc.Node.html#getActionManager">getActionManager</a>, <a href="../symbols/cc.Node.html#getAnchorPoint">getAnchorPoint</a>, <a href="../symbols/cc.Node.html#getAnchorPointInPoints">getAnchorPointInPoints</a>, <a href="../symbols/cc.Node.html#getBoundingBox">getBoundingBox</a>, <a href="../symbols/cc.Node.html#getCamera">getCamera</a>, <a href="../symbols/cc.Node.html#getChildByTag">getChildByTag</a>, <a href="../symbols/cc.Node.html#getChildren">getChildren</a>, <a href="../symbols/cc.Node.html#getChildrenCount">getChildrenCount</a>, <a href="../symbols/cc.Node.html#getContentSize">getContentSize</a>, <a href="../symbols/cc.Node.html#getGLServerState">getGLServerState</a>, <a href="../symbols/cc.Node.html#getGrid">getGrid</a>, <a href="../symbols/cc.Node.html#getOrderOfArrival">getOrderOfArrival</a>, <a href="../symbols/cc.Node.html#getParent">getParent</a>, <a href="../symbols/cc.Node.html#getPosition">getPosition</a>, <a href="../symbols/cc.Node.html#getPositionX">getPositionX</a>, <a href="../symbols/cc.Node.html#getPositionY">getPositionY</a>, <a href="../symbols/cc.Node.html#getRotation">getRotation</a>, <a href="../symbols/cc.Node.html#getScale">getScale</a>, <a href="../symbols/cc.Node.html#getScaleX">getScaleX</a>, <a href="../symbols/cc.Node.html#getScaleY">getScaleY</a>, <a href="../symbols/cc.Node.html#getScheduler">getScheduler</a>, <a href="../symbols/cc.Node.html#getShaderProgram">getShaderProgram</a>, <a href="../symbols/cc.Node.html#getSkewX">getSkewX</a>, <a href="../symbols/cc.Node.html#getSkewY">getSkewY</a>, <a href="../symbols/cc.Node.html#getTag">getTag</a>, <a href="../symbols/cc.Node.html#getUserData">getUserData</a>, <a href="../symbols/cc.Node.html#getUserObject">getUserObject</a>, <a href="../symbols/cc.Node.html#getVertexZ">getVertexZ</a>, <a href="../symbols/cc.Node.html#getZOrder">getZOrder</a>, <a href="../symbols/cc.Node.html#ignoreAnchorPointForPosition">ignoreAnchorPointForPosition</a>, <a href="../symbols/cc.Node.html#isIgnoreAnchorPointForPosition">isIgnoreAnchorPointForPosition</a>, <a href="../symbols/cc.Node.html#isRunning">isRunning</a>, <a href="../symbols/cc.Node.html#isVisible">isVisible</a>, <a href="../symbols/cc.Node.html#nodeToParentTransform">nodeToParentTransform</a>, <a href="../symbols/cc.Node.html#nodeToWorldTransform">nodeToWorldTransform</a>, <a href="../symbols/cc.Node.html#numberOfRunningActions">numberOfRunningActions</a>, <a href="../symbols/cc.Node.html#onEnter">onEnter</a>, <a href="../symbols/cc.Node.html#onEnterTransitionDidFinish">onEnterTransitionDidFinish</a>, <a href="../symbols/cc.Node.html#onExit">onExit</a>, <a href="../symbols/cc.Node.html#onExitTransitionDidStart">onExitTransitionDidStart</a>, <a href="../symbols/cc.Node.html#parentToNodeTransform">parentToNodeTransform</a>, <a href="../symbols/cc.Node.html#pauseSchedulerAndActions">pauseSchedulerAndActions</a>, <a href="../symbols/cc.Node.html#release">release</a>, <a href="../symbols/cc.Node.html#removeAllChildren">removeAllChildren</a>, <a href="../symbols/cc.Node.html#removeAllChildrenWithCleanup">removeAllChildrenWithCleanup</a>, <a href="../symbols/cc.Node.html#removeChild">removeChild</a>, <a href="../symbols/cc.Node.html#removeChildByTag">removeChildByTag</a>, <a href="../symbols/cc.Node.html#removeFromParent">removeFromParent</a>, <a href="../symbols/cc.Node.html#removeFromParentAndCleanup">removeFromParentAndCleanup</a>, <a href="../symbols/cc.Node.html#reorderChild">reorderChild</a>, <a href="../symbols/cc.Node.html#resumeSchedulerAndActions">resumeSchedulerAndActions</a>, <a href="../symbols/cc.Node.html#retain">retain</a>, <a href="../symbols/cc.Node.html#runAction">runAction</a>, <a href="../symbols/cc.Node.html#schedule">schedule</a>, <a href="../symbols/cc.Node.html#scheduleOnce">scheduleOnce</a>, <a href="../symbols/cc.Node.html#scheduleUpdate">scheduleUpdate</a>, <a href="../symbols/cc.Node.html#scheduleUpdateWithPriority">scheduleUpdateWithPriority</a>, <a href="../symbols/cc.Node.html#setActionManager">setActionManager</a>, <a href="../symbols/cc.Node.html#setAnchorPoint">setAnchorPoint</a>, <a href="../symbols/cc.Node.html#setContentSize">setContentSize</a>, <a href="../symbols/cc.Node.html#setGLServerState">setGLServerState</a>, <a href="../symbols/cc.Node.html#setGrid">setGrid</a>, <a href="../symbols/cc.Node.html#setNodeDirty">setNodeDirty</a>, <a href="../symbols/cc.Node.html#setOrderOfArrival">setOrderOfArrival</a>, <a href="../symbols/cc.Node.html#setParent">setParent</a>, <a href="../symbols/cc.Node.html#setPosition">setPosition</a>, <a href="../symbols/cc.Node.html#setPositionX">setPositionX</a>, <a href="../symbols/cc.Node.html#setPositionY">setPositionY</a>, <a href="../symbols/cc.Node.html#setScheduler">setScheduler</a>, <a href="../symbols/cc.Node.html#setShaderProgram">setShaderProgram</a>, <a href="../symbols/cc.Node.html#setSkewX">setSkewX</a>, <a href="../symbols/cc.Node.html#setSkewY">setSkewY</a>, <a href="../symbols/cc.Node.html#setTag">setTag</a>, <a href="../symbols/cc.Node.html#setUserData">setUserData</a>, <a href="../symbols/cc.Node.html#setUserObject">setUserObject</a>, <a href="../symbols/cc.Node.html#setVertexZ">setVertexZ</a>, <a href="../symbols/cc.Node.html#setVisible">setVisible</a>, <a href="../symbols/cc.Node.html#setZOrder">setZOrder</a>, <a href="../symbols/cc.Node.html#sortAllChildren">sortAllChildren</a>, <a href="../symbols/cc.Node.html#stopAction">stopAction</a>, <a href="../symbols/cc.Node.html#stopActionByTag">stopActionByTag</a>, <a href="../symbols/cc.Node.html#stopAllActions">stopAllActions</a>, <a href="../symbols/cc.Node.html#transform">transform</a>, <a href="../symbols/cc.Node.html#transformAncestors">transformAncestors</a>, <a href="../symbols/cc.Node.html#unschedule">unschedule</a>, <a href="../symbols/cc.Node.html#unscheduleAllCallbacks">unscheduleAllCallbacks</a>, <a href="../symbols/cc.Node.html#unscheduleUpdate">unscheduleUpdate</a>, <a href="../symbols/cc.Node.html#updateTransform">updateTransform</a>, <a href="../symbols/cc.Node.html#visit">visit</a>, <a href="../symbols/cc.Node.html#worldToNodeTransform">worldToNodeTransform</a></dd> </dl> <!-- ============================== events summary ======================== --> <!-- ============================== constructor details ==================== --> <div class="details"><a name="constructor"> </a> <div class="sectionTitle"> Class Detail </div> <div class="fixedFont"> <b>cc.ParticleSystemQuad</b>() </div> <div class="description"> <p> CCParticleSystemQuad is a subclass of CCParticleSystem<br/> <br/> It includes all the features of ParticleSystem.<br/> <br/> Special features and Limitations:<br/> - Particle size can be any float number. <br/> - The system can be scaled <br/> - The particles can be rotated <br/> - It supports subrects <br/> - It supports batched rendering since 1.1<br/> </p> </div> <pre class="code">//create a particle system this._emitter = new cc.ParticleSystemQuad(); this._emitter.initWithTotalParticles(150);</pre> </div> <!-- ============================== field details ========================== --> <!-- ============================== method details ========================= --> <div class="sectionTitle"> Method Detail </div> <a name="clone"> </a> <div class="fixedFont"> <b>clone</b>() </div> <div class="description"> </div> <hr /> <a name=".create"> </a> <div class="fixedFont">&lt;static&gt; <span class="light">{<a href="../symbols/cc.ParticleSystem.html">cc.ParticleSystem</a>}</span> <span class="light">cc.ParticleSystemQuad.</span><b>create</b>(pListFile) </div> <div class="description"> <p> creates an initializes a CCParticleSystemQuad from a plist file.<br/> This plist files can be creted manually or with Particle Designer:<br/> http://particledesigner.71squared.com/<br/> </p> </div> <pre class="code"> //creates an initializes a CCParticleSystemQuad from a plist file. var system = cc.ParticleSystemQuad.create("Images/SpinningPeas.plist");</pre> <dl class="detailList"> <dt class="heading">Parameters:</dt> <dt> <span class="light fixedFont">{String}</span> <b>pListFile</b> </dt> <dd></dd> </dl> <dl class="detailList"> <dt class="heading">Returns:</dt> <dd><span class="light fixedFont">{<a href="../symbols/cc.ParticleSystem.html">cc.ParticleSystem</a>}</span> </dd> </dl> <hr /> <a name="ctor"> </a> <div class="fixedFont"> <b>ctor</b>() </div> <div class="description"> Constructor </div> <hr /> <a name="draw"> </a> <div class="fixedFont"> <b>draw</b>(ctx) </div> <div class="description"> draw particle </div> <dl class="detailList"> <dt class="heading">Parameters:</dt> <dt> <span class="light fixedFont">{CanvasContext}</span> <b>ctx</b> </dt> <dd>CanvasContext</dd> </dl> <hr /> <a name="initTexCoordsWithRect"> </a> <div class="fixedFont"> <b>initTexCoordsWithRect</b>(pointRect) </div> <div class="description"> <p> initilizes the texture with a rectangle measured Points<br/> pointRect should be in Texture coordinates, not pixel coordinates </p> </div> <dl class="detailList"> <dt class="heading">Parameters:</dt> <dt> <span class="light fixedFont">{<a href="../symbols/cc.Rect.html">cc.Rect</a>}</span> <b>pointRect</b> </dt> <dd></dd> </dl> <hr /> <a name="initWithTotalParticles"> </a> <div class="fixedFont"> <span class="light">{Boolean}</span> <b>initWithTotalParticles</b>(numberOfParticles) </div> <div class="description"> Initializes a system with a fixed number of particles </div> <dl class="detailList"> <dt class="heading">Parameters:</dt> <dt> <span class="light fixedFont">{Number}</span> <b>numberOfParticles</b> </dt> <dd></dd> </dl> <dl class="detailList"> <dt class="heading">Returns:</dt> <dd><span class="light fixedFont">{Boolean}</span> </dd> </dl> <hr /> <a name="listenBackToForeground"> </a> <div class="fixedFont"> <b>listenBackToForeground</b>(obj) </div> <div class="description"> listen the event that coming to foreground on Android </div> <dl class="detailList"> <dt class="heading">Parameters:</dt> <dt> <span class="light fixedFont">{<a href="../symbols/cc.Class.html">cc.Class</a>}</span> <b>obj</b> </dt> <dd></dd> </dl> <hr /> <a name="postStep"> </a> <div class="fixedFont"> <b>postStep</b>() </div> <div class="description"> override cc.ParticleSystem </div> <hr /> <a name="setBatchNode"> </a> <div class="fixedFont"> <b>setBatchNode</b>(batchNode) </div> <div class="description"> </div> <dl class="detailList"> <dt class="heading">Parameters:</dt> <dt> <b>batchNode</b> </dt> <dd></dd> </dl> <hr /> <a name="setDisplayFrame"> </a> <div class="fixedFont"> <b>setDisplayFrame</b>(spriteFrame) </div> <div class="description"> <p> Sets a new CCSpriteFrame as particle.</br> WARNING: this method is experimental. Use setTexture:withRect instead. </p> </div> <dl class="detailList"> <dt class="heading">Parameters:</dt> <dt> <span class="light fixedFont">{<a href="../symbols/cc.SpriteFrame.html">cc.SpriteFrame</a>}</span> <b>spriteFrame</b> </dt> <dd></dd> </dl> <hr /> <a name="setTexture"> </a> <div class="fixedFont"> <b>setTexture</b>(texture, isCallSuper) </div> <div class="description"> set Texture of Particle System </div> <dl class="detailList"> <dt class="heading">Parameters:</dt> <dt> <span class="light fixedFont">{HTMLImageElement|HTMLCanvasElement|<a href="../symbols/cc.html#.Texture2D">cc.Texture2D</a>}</span> <b>texture</b> </dt> <dd></dd> <dt> <span class="light fixedFont">{Boolean}</span> <b>isCallSuper</b> </dt> <dd>is direct call super method</dd> </dl> <hr /> <a name="setTextureWithRect"> </a> <div class="fixedFont"> <b>setTextureWithRect</b>(texture, rect) </div> <div class="description"> Sets a new texture with a rect. The rect is in Points. </div> <dl class="detailList"> <dt class="heading">Parameters:</dt> <dt> <span class="light fixedFont">{<a href="../symbols/cc.html#.Texture2D">cc.Texture2D</a>}</span> <b>texture</b> </dt> <dd></dd> <dt> <span class="light fixedFont">{<a href="../symbols/cc.Rect.html">cc.Rect</a>}</span> <b>rect</b> </dt> <dd></dd> </dl> <hr /> <a name="setTotalParticles"> </a> <div class="fixedFont"> <b>setTotalParticles</b>(tp) </div> <div class="description"> </div> <dl class="detailList"> <dt class="heading">Parameters:</dt> <dt> <b>tp</b> </dt> <dd></dd> </dl> <hr /> <a name="setupIndices"> </a> <div class="fixedFont"> <b>setupIndices</b>() </div> <div class="description"> initialices the indices for the vertices </div> <hr /> <a name="updateQuadWithParticle"> </a> <div class="fixedFont"> <b>updateQuadWithParticle</b>(particle, newPosition) </div> <div class="description"> update particle's quad </div> <dl class="detailList"> <dt class="heading">Parameters:</dt> <dt> <span class="light fixedFont">{<a href="../symbols/cc.Particle.html">cc.Particle</a>}</span> <b>particle</b> </dt> <dd></dd> <dt> <span class="light fixedFont">{<a href="../symbols/cc.Point.html">cc.Point</a>}</span> <b>newPosition</b> </dt> <dd></dd> </dl> <!-- ============================== event details ========================= --> <hr /> </div> <!-- ============================== footer ================================= --> <div class="fineprint" style="clear:both"> Documentation generated by <a href="http://code.google.com/p/jsdoc-toolkit/" target="_blank">JsDoc Toolkit</a> 2.4.0 on Wed Jan 09 2013 07:51:53 GMT-0800 (PST) </div> </body> </html>
{ "pile_set_name": "Github" }
# Huff0 entropy compression This package provides Huff0 encoding and decoding as used in zstd. [Huff0](https://github.com/Cyan4973/FiniteStateEntropy#new-generation-entropy-coders), a Huffman codec designed for modern CPU, featuring OoO (Out of Order) operations on multiple ALU (Arithmetic Logic Unit), achieving extremely fast compression and decompression speeds. This can be used for compressing input with a lot of similar input values to the smallest number of bytes. This does not perform any multi-byte [dictionary coding](https://en.wikipedia.org/wiki/Dictionary_coder) as LZ coders, but it can be used as a secondary step to compressors (like Snappy) that does not do entropy encoding. * [Godoc documentation](https://godoc.org/github.com/klauspost/compress/huff0) THIS PACKAGE IS NOT CONSIDERED STABLE AND API OR ENCODING MAY CHANGE IN THE FUTURE. ## News * Mar 2018: First implementation released. Consider this beta software for now. # Usage This package provides a low level interface that allows to compress single independent blocks. Each block is separate, and there is no built in integrity checks. This means that the caller should keep track of block sizes and also do checksums if needed. Compressing a block is done via the [`Compress1X`](https://godoc.org/github.com/klauspost/compress/huff0#Compress1X) and [`Compress4X`](https://godoc.org/github.com/klauspost/compress/huff0#Compress4X) functions. You must provide input and will receive the output and maybe an error. These error values can be returned: | Error | Description | |---------------------|-----------------------------------------------------------------------------| | `<nil>` | Everything ok, output is returned | | `ErrIncompressible` | Returned when input is judged to be too hard to compress | | `ErrUseRLE` | Returned from the compressor when the input is a single byte value repeated | | `ErrTooBig` | Returned if the input block exceeds the maximum allowed size (128 Kib) | | `(error)` | An internal error occurred. | As can be seen above some of there are errors that will be returned even under normal operation so it is important to handle these. To reduce allocations you can provide a [`Scratch`](https://godoc.org/github.com/klauspost/compress/huff0#Scratch) object that can be re-used for successive calls. Both compression and decompression accepts a `Scratch` object, and the same object can be used for both. Be aware, that when re-using a `Scratch` object that the *output* buffer is also re-used, so if you are still using this you must set the `Out` field in the scratch to nil. The same buffer is used for compression and decompression output. The `Scratch` object will retain state that allows to re-use previous tables for encoding and decoding. ## Tables and re-use Huff0 allows for reusing tables from the previous block to save space if that is expected to give better/faster results. The Scratch object allows you to set a [`ReusePolicy`](https://godoc.org/github.com/klauspost/compress/huff0#ReusePolicy) that controls this behaviour. See the documentation for details. This can be altered between each block. Do however note that this information is *not* stored in the output block and it is up to the users of the package to record whether [`ReadTable`](https://godoc.org/github.com/klauspost/compress/huff0#ReadTable) should be called, based on the boolean reported back from the CompressXX call. If you want to store the table separate from the data, you can access them as `OutData` and `OutTable` on the [`Scratch`](https://godoc.org/github.com/klauspost/compress/huff0#Scratch) object. ## Decompressing The first part of decoding is to initialize the decoding table through [`ReadTable`](https://godoc.org/github.com/klauspost/compress/huff0#ReadTable). This will initialize the decoding tables. You can supply the complete block to `ReadTable` and it will return the data part of the block which can be given to the decompressor. Decompressing is done by calling the [`Decompress1X`](https://godoc.org/github.com/klauspost/compress/huff0#Scratch.Decompress1X) or [`Decompress4X`](https://godoc.org/github.com/klauspost/compress/huff0#Scratch.Decompress4X) function. You must provide the output from the compression stage, at exactly the size you got back. If you receive an error back your input was likely corrupted. It is important to note that a successful decoding does *not* mean your output matches your original input. There are no integrity checks, so relying on errors from the decompressor does not assure your data is valid. # Contributing Contributions are always welcome. Be aware that adding public functions will require good justification and breaking changes will likely not be accepted. If in doubt open an issue before writing the PR.
{ "pile_set_name": "Github" }
/** * Copyright 2011-2019 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.windgate.core.session; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; import java.io.File; import java.util.Properties; import org.junit.Assume; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import com.asakusafw.windgate.core.ProfileContext; import com.asakusafw.windgate.core.process.ProcessProfile; import com.asakusafw.windgate.file.session.FileSessionProvider; /** * Test for {@link SessionProfile}. */ public class SessionProfileTest { /** * Temporary folder. */ @Rule public TemporaryFolder folder = new TemporaryFolder(); /** * Test method for {@link ProcessProfile#loadFrom(Properties, ProfileContext)}. */ @Test public void loadFrom() { Properties p = new Properties(); p.setProperty(SessionProfile.KEY_PROVIDER, FileSessionProvider.class.getName()); SessionProfile profile = SessionProfile.loadFrom(p, ProfileContext.system(getClass().getClassLoader())); assertThat(profile.getProviderClass(), is((Object) FileSessionProvider.class)); assertThat(profile.getConfiguration().size(), is(0)); } /** * Test method for {@link ProcessProfile#loadFrom(Properties, ProfileContext)}. */ @Test public void loadFrom_configure() { String path = new File("target/testing/" + getClass().getSimpleName()).getAbsolutePath(); Properties p = new Properties(); p.setProperty(SessionProfile.KEY_PROVIDER, FileSessionProvider.class.getName()); p.setProperty(SessionProfile.KEY_PREFIX + FileSessionProvider.KEY_DIRECTORY, path); SessionProfile profile = SessionProfile.loadFrom(p, ProfileContext.system(getClass().getClassLoader())); assertThat(profile.getProviderClass(), is((Object) FileSessionProvider.class)); assertThat(profile.getConfiguration().size(), is(1)); assertThat(profile.getConfiguration().get(FileSessionProvider.KEY_DIRECTORY), is(path)); } /** * Test method for {@link SessionProfile#storeTo(java.util.Properties)}. */ @Test public void storeTo() { String path = new File("target/testing/" + getClass().getSimpleName()).getAbsolutePath(); Properties p = new Properties(); p.setProperty(SessionProfile.KEY_PROVIDER, FileSessionProvider.class.getName()); p.setProperty(SessionProfile.KEY_PREFIX + FileSessionProvider.KEY_DIRECTORY, path); SessionProfile profile = SessionProfile.loadFrom(p, ProfileContext.system(getClass().getClassLoader())); Properties restored = new Properties(); profile.storeTo(restored); assertThat(restored, is(p)); } /** * Test method for {@link SessionProfile#storeTo(java.util.Properties)}. */ @Test public void storeTo_configure() { Properties p = new Properties(); p.setProperty(SessionProfile.KEY_PROVIDER, FileSessionProvider.class.getName()); SessionProfile profile = SessionProfile.loadFrom(p, ProfileContext.system(getClass().getClassLoader())); Properties restored = new Properties(); profile.storeTo(restored); assertThat(restored, is(p)); } /** * Test method for {@link SessionProfile#removeCorrespondingKeys(java.util.Properties)}. */ @Test public void removeCorrespondingKeys() { Properties p = new Properties(); p.setProperty(SessionProfile.KEY_PROVIDER, "aaa"); p.setProperty(SessionProfile.KEY_PROVIDER + "_", "bbb"); p.setProperty(SessionProfile.KEY_PREFIX + "", ""); p.setProperty(SessionProfile.KEY_PREFIX + "ccc", "ccc"); SessionProfile.removeCorrespondingKeys(p); Properties answer = new Properties(); answer.setProperty(SessionProfile.KEY_PROVIDER + "_", "bbb"); assertThat(p, is(answer)); } /** * Test method for {@link SessionProfile#createProvider()}. * @throws Exception if failed */ @Test public void createProvider() throws Exception { File path = folder.newFolder("session"); Assume.assumeTrue(path.delete()); Properties p = new Properties(); p.setProperty(SessionProfile.KEY_PROVIDER, FileSessionProvider.class.getName()); p.setProperty(SessionProfile.KEY_PREFIX + FileSessionProvider.KEY_DIRECTORY, path.getPath()); SessionProfile profile = SessionProfile.loadFrom(p, ProfileContext.system(getClass().getClassLoader())); SessionProvider provider = profile.createProvider(); try (SessionMirror session = provider.create("hello")) { // do nothing } assertThat(path.isDirectory(), is(true)); } }
{ "pile_set_name": "Github" }
// RUN: %clang_cc1 -triple=i386-pc-solaris2.11 -w -emit-llvm %s -o - | FileCheck %s #pragma redefine_extname fake real #pragma redefine_extname name alias extern int fake(void); int name; // __PRAGMA_REDEFINE_EXTNAME should be defined. This will fail if it isn't... int fish() { return fake() + __PRAGMA_REDEFINE_EXTNAME + name; } // Check that the call to fake() is emitted as a call to real() // CHECK: call i32 @real() // Check that this also works with variables names // CHECK: load i32, i32* @alias // This is a case when redefenition is deferred *and* we have a local of the // same name. PR23923. #pragma redefine_extname foo bar int f() { int foo = 0; return foo; } extern int foo() { return 1; } // CHECK: define i32 @bar() // Check that pragma redefine_extname applies to external declarations only. #pragma redefine_extname foo_static bar_static static int foo_static() { return 1; } int baz() { return foo_static(); } // CHECK-NOT: call i32 @bar_static()
{ "pile_set_name": "Github" }
# -*- coding: utf-8; mode: tcl; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:fenc=utf-8:ft=tcl:et:sw=4:ts=4:sts=4 PortSystem 1.0 PortGroup perl5 1.0 name ddclient version 3.9.1 platforms darwin categories net license GPL maintainers {snc @nerdling} openmaintainer description Update dynamic DNS entries long_description ddclient is a Perl client used to update dynamic \ DNS entries for accounts on many dynamic DNS services. homepage http://ddclient.sourceforge.net/ master_sites sourceforge checksums rmd160 4b879422b6462241725d62e4a7d247dcd79e4942 \ sha256 e4969e15cc491fc52bdcd649d4c2b0e4b1bf0c9f9dba23471c634871acc52470 \ size 63469 depends_lib port:p${perl5.major}-io-socket-ssl depends_run port:perl${perl5.major} \ port:p${perl5.major}-data-validate-ip \ path:share/curl/curl-ca-bundle.crt:curl-ca-bundle patchfiles patch-ddclient.diff post-patch { reinplace "s|@@PREFIX@@|${prefix}|g" ${worksrcpath}/${name} reinplace "s|^#!.*/perl.*$|#!${perl5.bin}|" ${worksrcpath}/${name} } use_configure no build {} destroot { file mkdir ${destroot}${prefix}/etc/${name} xinstall -m 555 ${worksrcpath}/sample-etc_ddclient.conf \ ${destroot}${prefix}/etc/${name}/ddclient.conf.sample reinplace "s|/var/run|${prefix}/var/run|" \ ${destroot}${prefix}/etc/${name}/ddclient.conf.sample xinstall -m 755 ${worksrcpath}/${name} ${destroot}${prefix}/sbin xinstall -d ${destroot}${prefix}/share/doc/${name} file copy {*}[glob ${worksrcpath}/sample-*] \ ${destroot}${prefix}/share/doc/${name} file copy {*}[glob ${worksrcpath}/COPY*] \ ${destroot}${prefix}/share/doc/${name} file copy {*}[glob ${worksrcpath}/README*] \ ${destroot}${prefix}/share/doc/${name} destroot.keepdirs ${destroot}${prefix}/var/run } post-activate { file mkdir ${prefix}/var/cache/${name} } startupitem.create yes startupitem.start ${prefix}/sbin/${name} startupitem.stop "/bin/kill \$(cat ${prefix}/var/run/${name}.pid)" livecheck.regex "${name}-(\\d+\\.\\d+\\.\\d+)"
{ "pile_set_name": "Github" }
/** * Handles opening of and synchronization with the reveal.js * notes window. */ var RevealNotes = (function() { function openNotes() { var notesPopup = window.open( 'plugin/notes/notes.html', 'reveal.js - Notes', 'width=1120,height=850' ); // Fires when slide is changed Reveal.addEventListener( 'slidechanged', function( event ) { post('slidechanged'); } ); // Fires when a fragment is shown Reveal.addEventListener( 'fragmentshown', function( event ) { post('fragmentshown'); } ); // Fires when a fragment is hidden Reveal.addEventListener( 'fragmenthidden', function( event ) { post('fragmenthidden'); } ); /** * Posts the current slide data to the notes window * * @param {String} eventType Expecting 'slidechanged', 'fragmentshown' * or 'fragmenthidden' set in the events above to define the needed * slideDate. */ function post( eventType ) { var slideElement = Reveal.getCurrentSlide(), messageData; if( eventType === 'slidechanged' ) { var notes = slideElement.querySelector( 'aside.notes' ), indexh = Reveal.getIndices().h, indexv = Reveal.getIndices().v, nextindexh, nextindexv; if( slideElement.nextElementSibling && slideElement.parentNode.nodeName == 'SECTION' ) { nextindexh = indexh; nextindexv = indexv + 1; } else { nextindexh = indexh + 1; nextindexv = 0; } messageData = { notes : notes ? notes.innerHTML : '', indexh : indexh, indexv : indexv, nextindexh : nextindexh, nextindexv : nextindexv, markdown : notes ? typeof notes.getAttribute( 'data-markdown' ) === 'string' : false }; } else if( eventType === 'fragmentshown' ) { messageData = { fragment : 'next' }; } else if( eventType === 'fragmenthidden' ) { messageData = { fragment : 'prev' }; } notesPopup.postMessage( JSON.stringify( messageData ), '*' ); } // Navigate to the current slide when the notes are loaded notesPopup.addEventListener( 'load', function( event ) { post('slidechanged'); }, false ); } // If the there's a 'notes' query set, open directly if( window.location.search.match( /(\?|\&)notes/gi ) !== null ) { openNotes(); } // Open the notes when the 's' key is hit document.addEventListener( 'keydown', function( event ) { // Disregard the event if the target is editable or a // modifier is present if ( document.querySelector( ':focus' ) !== null || event.shiftKey || event.altKey || event.ctrlKey || event.metaKey ) return; if( event.keyCode === 83 ) { event.preventDefault(); openNotes(); } }, false ); return { open: openNotes }; })();
{ "pile_set_name": "Github" }
'use strict'; /* Execute this script with: node -r esm invert.js */ let { Image } = require('../../src'); let image = new Image(1, 2, [230, 83, 120, 255, 100, 140, 13, 240]); image.invert(); console.log(image);
{ "pile_set_name": "Github" }
default_app_config = 'select2_many_to_many.apps.TestApp'
{ "pile_set_name": "Github" }
// Copyright: 2010 - 2017 https://github.com/ensime/ensime-server/graphs/contributors // License: http://www.gnu.org/licenses/lgpl-3.0.en.html package spray.json import org.scalatest._ import Matchers._ import JsWriter.ops._ import JsReader.ops._ class StandardFormatsSpec extends WordSpec { "The optionFormat" should { "convert None to JsNull" in { None.asInstanceOf[Option[Int]].toJson shouldEqual JsNull } "convert JsNull to None" in { JsNull.as[Option[Int]] shouldEqual Right(None) } "convert Some(Hello) to JsString(Hello)" in { Some("Hello").asInstanceOf[Option[String]].toJson shouldEqual JsString( "Hello" ) } "convert JsString(Hello) to Some(Hello)" in { JsString("Hello").as[Option[String]] shouldEqual Right(Some("Hello")) } } "The eitherFormat" should { val a: Either[Int, String] = Left(42) val b: Either[Int, String] = Right("Hello") "convert the left side of an Either value to Json" in { a.toJson shouldEqual JsNumber(42) } "convert the right side of an Either value to Json" in { b.toJson shouldEqual JsString("Hello") } "convert the left side of an Either value from Json" in { JsNumber(42).as[Either[Int, String]] shouldEqual Right(Left(42)) } "convert the right side of an Either value from Json" in { JsString("Hello").as[Either[Int, String]] shouldEqual Right( Right( "Hello" ) ) } } }
{ "pile_set_name": "Github" }
/**************************************************************************** ** ** Copyright (C) 2016 The Qt Company Ltd. ** Copyright (C) 2016 Intel Corporation. ** Contact: https://www.qt.io/licensing/ ** ** This file is part of the QtCore module of the Qt Toolkit. ** ** $QT_BEGIN_LICENSE:LGPL$ ** Commercial License Usage ** Licensees holding valid commercial Qt licenses may use this file in ** accordance with the commercial license agreement provided with the ** Software or, alternatively, in accordance with the terms contained in ** a written agreement between you and The Qt Company. For licensing terms ** and conditions see https://www.qt.io/terms-conditions. For further ** information use the contact form at https://www.qt.io/contact-us. ** ** GNU Lesser General Public License Usage ** Alternatively, this file may be used under the terms of the GNU Lesser ** General Public License version 3 as published by the Free Software ** Foundation and appearing in the file LICENSE.LGPL3 included in the ** packaging of this file. Please review the following information to ** ensure the GNU Lesser General Public License version 3 requirements ** will be met: https://www.gnu.org/licenses/lgpl-3.0.html. ** ** GNU General Public License Usage ** Alternatively, this file may be used under the terms of the GNU ** General Public License version 2.0 or (at your option) the GNU General ** Public license version 3 or any later version approved by the KDE Free ** Qt Foundation. The licenses are as published by the Free Software ** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3 ** included in the packaging of this file. Please review the following ** information to ensure the GNU General Public License requirements will ** be met: https://www.gnu.org/licenses/gpl-2.0.html and ** https://www.gnu.org/licenses/gpl-3.0.html. ** ** $QT_END_LICENSE$ ** ****************************************************************************/ #include <QtCore/qglobal.h> #ifndef QATOMIC_H #define QATOMIC_H #include <QtCore/qbasicatomic.h> QT_BEGIN_NAMESPACE QT_WARNING_PUSH QT_WARNING_DISABLE_GCC("-Wextra") // High-level atomic integer operations template <typename T> class QAtomicInteger : public QBasicAtomicInteger<T> { public: // Non-atomic API #ifdef QT_BASIC_ATOMIC_HAS_CONSTRUCTORS constexpr QAtomicInteger(T value = 0) Q_DECL_NOTHROW : QBasicAtomicInteger<T>(value) {} #else inline QAtomicInteger(T value = 0) Q_DECL_NOTHROW { this->_q_value = value; } #endif inline QAtomicInteger(const QAtomicInteger &other) Q_DECL_NOTHROW #ifdef QT_BASIC_ATOMIC_HAS_CONSTRUCTORS : QBasicAtomicInteger<T>() #endif { this->storeRelease(other.loadAcquire()); } inline QAtomicInteger &operator=(const QAtomicInteger &other) Q_DECL_NOTHROW { this->storeRelease(other.loadAcquire()); return *this; } #ifdef Q_QDOC T load() const; T loadAcquire() const; void store(T newValue); void storeRelease(T newValue); operator T() const; QAtomicInteger &operator=(T); static Q_DECL_CONSTEXPR bool isReferenceCountingNative(); static Q_DECL_CONSTEXPR bool isReferenceCountingWaitFree(); bool ref(); bool deref(); static Q_DECL_CONSTEXPR bool isTestAndSetNative(); static Q_DECL_CONSTEXPR bool isTestAndSetWaitFree(); bool testAndSetRelaxed(T expectedValue, T newValue); bool testAndSetAcquire(T expectedValue, T newValue); bool testAndSetRelease(T expectedValue, T newValue); bool testAndSetOrdered(T expectedValue, T newValue); static Q_DECL_CONSTEXPR bool isFetchAndStoreNative(); static Q_DECL_CONSTEXPR bool isFetchAndStoreWaitFree(); T fetchAndStoreRelaxed(T newValue); T fetchAndStoreAcquire(T newValue); T fetchAndStoreRelease(T newValue); T fetchAndStoreOrdered(T newValue); static Q_DECL_CONSTEXPR bool isFetchAndAddNative(); static Q_DECL_CONSTEXPR bool isFetchAndAddWaitFree(); T fetchAndAddRelaxed(T valueToAdd); T fetchAndAddAcquire(T valueToAdd); T fetchAndAddRelease(T valueToAdd); T fetchAndAddOrdered(T valueToAdd); T fetchAndSubRelaxed(T valueToSub); T fetchAndSubAcquire(T valueToSub); T fetchAndSubRelease(T valueToSub); T fetchAndSubOrdered(T valueToSub); T fetchAndOrRelaxed(T valueToOr); T fetchAndOrAcquire(T valueToOr); T fetchAndOrRelease(T valueToOr); T fetchAndOrOrdered(T valueToOr); T fetchAndAndRelaxed(T valueToAnd); T fetchAndAndAcquire(T valueToAnd); T fetchAndAndRelease(T valueToAnd); T fetchAndAndOrdered(T valueToAnd); T fetchAndXorRelaxed(T valueToXor); T fetchAndXorAcquire(T valueToXor); T fetchAndXorRelease(T valueToXor); T fetchAndXorOrdered(T valueToXor); T operator++(); T operator++(int); T operator--(); T operator--(int); T operator+=(T value); T operator-=(T value); T operator|=(T value); T operator&=(T value); T operator^=(T value); #endif }; class QAtomicInt : public QAtomicInteger<int> { public: // Non-atomic API // We could use QT_COMPILER_INHERITING_CONSTRUCTORS, but we need only one; // the implicit definition for all the others is fine. #ifdef QT_BASIC_ATOMIC_HAS_CONSTRUCTORS constexpr #endif QAtomicInt(int value = 0) Q_DECL_NOTHROW : QAtomicInteger<int>(value) {} }; // High-level atomic pointer operations template <typename T> class QAtomicPointer : public QBasicAtomicPointer<T> { public: #ifdef QT_BASIC_ATOMIC_HAS_CONSTRUCTORS constexpr QAtomicPointer(T *value = 0) Q_DECL_NOTHROW : QBasicAtomicPointer<T>(value) {} #else inline QAtomicPointer(T *value = 0) Q_DECL_NOTHROW { this->store(value); } #endif inline QAtomicPointer(const QAtomicPointer<T> &other) Q_DECL_NOTHROW #ifdef QT_BASIC_ATOMIC_HAS_CONSTRUCTORS : QBasicAtomicPointer<T>() #endif { this->storeRelease(other.loadAcquire()); } inline QAtomicPointer<T> &operator=(const QAtomicPointer<T> &other) Q_DECL_NOTHROW { this->storeRelease(other.loadAcquire()); return *this; } #ifdef Q_QDOC T *load() const; T *loadAcquire() const; void store(T *newValue); void storeRelease(T *newValue); static Q_DECL_CONSTEXPR bool isTestAndSetNative(); static Q_DECL_CONSTEXPR bool isTestAndSetWaitFree(); bool testAndSetRelaxed(T *expectedValue, T *newValue); bool testAndSetAcquire(T *expectedValue, T *newValue); bool testAndSetRelease(T *expectedValue, T *newValue); bool testAndSetOrdered(T *expectedValue, T *newValue); static Q_DECL_CONSTEXPR bool isFetchAndStoreNative(); static Q_DECL_CONSTEXPR bool isFetchAndStoreWaitFree(); T *fetchAndStoreRelaxed(T *newValue); T *fetchAndStoreAcquire(T *newValue); T *fetchAndStoreRelease(T *newValue); T *fetchAndStoreOrdered(T *newValue); static Q_DECL_CONSTEXPR bool isFetchAndAddNative(); static Q_DECL_CONSTEXPR bool isFetchAndAddWaitFree(); T *fetchAndAddRelaxed(qptrdiff valueToAdd); T *fetchAndAddAcquire(qptrdiff valueToAdd); T *fetchAndAddRelease(qptrdiff valueToAdd); T *fetchAndAddOrdered(qptrdiff valueToAdd); #endif }; QT_WARNING_POP #ifdef QT_BASIC_ATOMIC_HAS_CONSTRUCTORS # undef QT_BASIC_ATOMIC_HAS_CONSTRUCTORS #endif /*! This is a helper for the assignment operators of implicitly shared classes. Your assignment operator should look like this: \snippet code/src.corelib.thread.qatomic.h 0 */ template <typename T> inline void qAtomicAssign(T *&d, T *x) { if (d == x) return; x->ref.ref(); if (!d->ref.deref()) delete d; d = x; } /*! This is a helper for the detach method of implicitly shared classes. Your private class needs a copy constructor which copies the members and sets the refcount to 1. After that, your detach function should look like this: \snippet code/src.corelib.thread.qatomic.h 1 */ template <typename T> inline void qAtomicDetach(T *&d) { if (d->ref.load() == 1) return; T *x = d; d = new T(*d); if (!x->ref.deref()) delete x; } QT_END_NAMESPACE #endif // QATOMIC_H
{ "pile_set_name": "Github" }
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.core.type.delegate; import com.netflix.hollow.api.objects.delegate.HollowObjectDelegate; import com.netflix.hollow.core.type.BooleanTypeAPI; public interface BooleanDelegate extends HollowObjectDelegate { public boolean getValue(int ordinal); public Boolean getValueBoxed(int ordinal); @Override public BooleanTypeAPI getTypeAPI(); }
{ "pile_set_name": "Github" }
<?php /* * This file is part of the Symfony package. * * (c) Fabien Potencier <[email protected]> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace Symfony\Component\Form; use Symfony\Component\Form\Exception\BadMethodCallException; /** * Wraps errors in forms. * * @author Bernhard Schussek <[email protected]> */ class FormError { protected $messageTemplate; protected $messageParameters; protected $messagePluralization; private $message; private $cause; /** * The form that spawned this error. * * @var FormInterface */ private $origin; /** * Any array key in $messageParameters will be used as a placeholder in * $messageTemplate. * * @param string $message The translated error message * @param string|null $messageTemplate The template for the error message * @param array $messageParameters The parameters that should be * substituted in the message template * @param int|null $messagePluralization The value for error message pluralization * @param mixed $cause The cause of the error * * @see \Symfony\Component\Translation\Translator */ public function __construct(string $message, string $messageTemplate = null, array $messageParameters = [], int $messagePluralization = null, $cause = null) { $this->message = $message; $this->messageTemplate = $messageTemplate ?: $message; $this->messageParameters = $messageParameters; $this->messagePluralization = $messagePluralization; $this->cause = $cause; } /** * Returns the error message. * * @return string */ public function getMessage() { return $this->message; } /** * Returns the error message template. * * @return string */ public function getMessageTemplate() { return $this->messageTemplate; } /** * Returns the parameters to be inserted in the message template. * * @return array */ public function getMessageParameters() { return $this->messageParameters; } /** * Returns the value for error message pluralization. * * @return int|null */ public function getMessagePluralization() { return $this->messagePluralization; } /** * Returns the cause of this error. * * @return mixed The cause of this error */ public function getCause() { return $this->cause; } /** * Sets the form that caused this error. * * This method must only be called once. * * @throws BadMethodCallException If the method is called more than once */ public function setOrigin(FormInterface $origin) { if (null !== $this->origin) { throw new BadMethodCallException('setOrigin() must only be called once.'); } $this->origin = $origin; } /** * Returns the form that caused this error. * * @return FormInterface|null The form that caused this error */ public function getOrigin() { return $this->origin; } }
{ "pile_set_name": "Github" }
// Copyright DApps Platform Inc. All rights reserved. import Foundation import UIKit import Eureka import QRCodeReaderViewController import TrustCore protocol ImportMainWalletViewControllerDelegate: class { func didImportWallet(wallet: WalletInfo, in controller: ImportMainWalletViewController) func didSkipImport(in controller: ImportMainWalletViewController) } final class ImportMainWalletViewController: FormViewController { let keystore: Keystore struct Values { static let mnemonic = "mnemonic" static let password = "password" } private var mnemonicRow: TextAreaRow? { return form.rowBy(tag: Values.mnemonic) } private var passwordRow: TextFloatLabelRow? { return form.rowBy(tag: Values.password) } weak var delegate: ImportMainWalletViewControllerDelegate? init( keystore: Keystore ) { self.keystore = keystore super.init(nibName: nil, bundle: nil) } override func viewDidLoad() { super.viewDidLoad() title = R.string.localizable.importMainWallet() navigationItem.rightBarButtonItems = [ UIBarButtonItem(title: R.string.localizable.skip(), style: .plain, target: self, action: #selector(skip)), UIBarButtonItem(image: R.image.qr_code_icon(), style: .done, target: self, action: #selector(openReader)), ] form +++ Section() // Mnemonic +++ Section(footer: ImportSelectionType.mnemonic.footerTitle) <<< AppFormAppearance.textArea(tag: Values.mnemonic) { $0.placeholder = R.string.localizable.backupPhrase() $0.textAreaHeight = .fixed(cellHeight: 140) $0.add(rule: RuleRequired()) $0.cell.textView?.autocapitalizationType = .none } +++ Section() <<< ButtonRow(R.string.localizable.importWalletImportButtonTitle()) { $0.title = $0.tag }.onCellSelection { [weak self] _, _ in self?.importWallet() } } func didImport(account: WalletInfo) { delegate?.didImportWallet(wallet: account, in: self) } func importWallet() { let validatedError = mnemonicRow?.section?.form?.validate() guard let errors = validatedError, errors.isEmpty else { return } let password = ""//passwordRow?.value ?? "" let mnemonicInput = mnemonicRow?.value?.trimmed ?? "" let words = mnemonicInput.components(separatedBy: " ").map { $0.trimmed.lowercased() } displayLoading(text: R.string.localizable.importWalletImportingIndicatorLabelTitle(), animated: false) let importType = ImportType.mnemonic(words: words, password: password, derivationPath: Coin.ethereum.derivationPath(at: 0)) DispatchQueue.global(qos: .userInitiated).async { self.keystore.importWallet(type: importType, coin: .ethereum) { result in switch result { case .success(let account): self.addWallets(wallet: account) DispatchQueue.main.async { self.hideLoading(animated: false) self.didImport(account: account) } case .failure(let error): DispatchQueue.main.async { self.hideLoading(animated: false) self.displayError(error: error) } } } } } @discardableResult func addWallets(wallet: WalletInfo) -> Bool { // Create coins based on supported networks guard let w = wallet.currentWallet else { return false } let derivationPaths = Config.current.servers.map { $0.derivationPath(at: 0) } let _ = keystore.addAccount(to: w, derivationPaths: derivationPaths) return true } @objc func openReader() { let controller = QRCodeReaderViewController() controller.delegate = self present(controller, animated: true, completion: nil) } @objc private func skip() { self.delegate?.didSkipImport(in: self) } required init?(coder aDecoder: NSCoder) { fatalError("init(coder:) has not been implemented") } } extension ImportMainWalletViewController: QRCodeReaderDelegate { func readerDidCancel(_ reader: QRCodeReaderViewController!) { reader.stopScanning() reader.dismiss(animated: true, completion: nil) } func reader(_ reader: QRCodeReaderViewController!, didScanResult result: String!) { reader.stopScanning() mnemonicRow?.value = result mnemonicRow?.reload() reader.dismiss(animated: true) } }
{ "pile_set_name": "Github" }
/* * XFree86 vbe module * Copyright 2000 Egbert Eich * * The mode query/save/set/restore functions from the vesa driver * have been moved here. * Copyright (c) 2000 by Conectiva S.A. (http://www.conectiva.com) * Authors: Paulo César Pereira de Andrade <[email protected]> */ #ifndef _VBE_H #define _VBE_H #include "xf86int10.h" #include "xf86DDC.h" typedef enum { DDC_UNCHECKED, DDC_NONE, DDC_1, DDC_2, DDC_1_2 } ddc_lvl; typedef struct { xf86Int10InfoPtr pInt10; int version; pointer memory; int real_mode_base; int num_pages; Bool init_int10; ddc_lvl ddc; Bool ddc_blank; } vbeInfoRec, *vbeInfoPtr; #define VBE_VERSION_MAJOR(x) *((CARD8*)(&x) + 1) #define VBE_VERSION_MINOR(x) (CARD8)(x) vbeInfoPtr VBEInit(xf86Int10InfoPtr pInt, int entityIndex); vbeInfoPtr VBEExtendedInit(xf86Int10InfoPtr pInt, int entityIndex, int Flags); void vbeFree(vbeInfoPtr pVbe); xf86MonPtr vbeDoEDID(vbeInfoPtr pVbe, pointer pDDCModule); #pragma pack(1) typedef struct vbeControllerInfoBlock { CARD8 VbeSignature[4]; CARD16 VbeVersion; CARD32 OemStringPtr; CARD8 Capabilities[4]; CARD32 VideoModePtr; CARD16 TotalMem; CARD16 OemSoftwareRev; CARD32 OemVendorNamePtr; CARD32 OemProductNamePtr; CARD32 OemProductRevPtr; CARD8 Scratch[222]; CARD8 OemData[256]; } vbeControllerInfoRec, *vbeControllerInfoPtr; #if defined(__GNUC__) || defined(__USLC__) || defined(__SUNPRO_C) #pragma pack() /* All GCC versions recognise this syntax */ #else #pragma pack(0) #endif #ifndef __GNUC__ #define __attribute__(a) #endif typedef struct _VbeInfoBlock VbeInfoBlock; typedef struct _VbeModeInfoBlock VbeModeInfoBlock; typedef struct _VbeCRTCInfoBlock VbeCRTCInfoBlock; /* * INT 0 */ struct _VbeInfoBlock { /* VESA 1.2 fields */ CARD8 VESASignature[4]; /* VESA */ CARD16 VESAVersion; /* Higher byte major, lower byte minor */ /*CARD32*/char *OEMStringPtr; /* Pointer to OEM string */ CARD8 Capabilities[4]; /* Capabilities of the video environment */ /*CARD32*/CARD16 *VideoModePtr; /* pointer to supported Super VGA modes */ CARD16 TotalMemory; /* Number of 64kb memory blocks on board */ /* if not VESA 2, 236 scratch bytes follow (256 bytes total size) */ /* VESA 2 fields */ CARD16 OemSoftwareRev; /* VBE implementation Software revision */ /*CARD32*/char *OemVendorNamePtr; /* Pointer to Vendor Name String */ /*CARD32*/char *OemProductNamePtr; /* Pointer to Product Name String */ /*CARD32*/char *OemProductRevPtr; /* Pointer to Product Revision String */ CARD8 Reserved[222]; /* Reserved for VBE implementation */ CARD8 OemData[256]; /* Data Area for OEM Strings */ } __attribute__((packed)); /* Return Super VGA Information */ VbeInfoBlock *VBEGetVBEInfo(vbeInfoPtr pVbe); void VBEFreeVBEInfo(VbeInfoBlock *block); /* * INT 1 */ struct _VbeModeInfoBlock { CARD16 ModeAttributes; /* mode attributes */ CARD8 WinAAttributes; /* window A attributes */ CARD8 WinBAttributes; /* window B attributes */ CARD16 WinGranularity; /* window granularity */ CARD16 WinSize; /* window size */ CARD16 WinASegment; /* window A start segment */ CARD16 WinBSegment; /* window B start segment */ CARD32 WinFuncPtr; /* real mode pointer to window function */ CARD16 BytesPerScanline; /* bytes per scanline */ /* Mandatory information for VBE 1.2 and above */ CARD16 XResolution; /* horizontal resolution in pixels or characters */ CARD16 YResolution; /* vertical resolution in pixels or characters */ CARD8 XCharSize; /* character cell width in pixels */ CARD8 YCharSize; /* character cell height in pixels */ CARD8 NumberOfPlanes; /* number of memory planes */ CARD8 BitsPerPixel; /* bits per pixel */ CARD8 NumberOfBanks; /* number of banks */ CARD8 MemoryModel; /* memory model type */ CARD8 BankSize; /* bank size in KB */ CARD8 NumberOfImages; /* number of images */ CARD8 Reserved; /* 1 */ /* reserved for page function */ /* Direct color fields (required for direct/6 and YUV/7 memory models) */ CARD8 RedMaskSize; /* size of direct color red mask in bits */ CARD8 RedFieldPosition; /* bit position of lsb of red mask */ CARD8 GreenMaskSize; /* size of direct color green mask in bits */ CARD8 GreenFieldPosition; /* bit position of lsb of green mask */ CARD8 BlueMaskSize; /* size of direct color blue mask in bits */ CARD8 BlueFieldPosition; /* bit position of lsb of blue mask */ CARD8 RsvdMaskSize; /* size of direct color reserved mask in bits */ CARD8 RsvdFieldPosition; /* bit position of lsb of reserved mask */ CARD8 DirectColorModeInfo; /* direct color mode attributes */ /* Mandatory information for VBE 2.0 and above */ CARD32 PhysBasePtr; /* physical address for flat memory frame buffer */ CARD32 Reserved32; /* 0 */ /* Reserved - always set to 0 */ CARD16 Reserved16; /* 0 */ /* Reserved - always set to 0 */ /* Mandatory information for VBE 3.0 and above */ CARD16 LinBytesPerScanLine; /* bytes per scan line for linear modes */ CARD8 BnkNumberOfImagePages; /* number of images for banked modes */ CARD8 LinNumberOfImagePages; /* number of images for linear modes */ CARD8 LinRedMaskSize; /* size of direct color red mask (linear modes) */ CARD8 LinRedFieldPosition; /* bit position of lsb of red mask (linear modes) */ CARD8 LinGreenMaskSize; /* size of direct color green mask (linear modes) */ CARD8 LinGreenFieldPosition; /* bit position of lsb of green mask (linear modes) */ CARD8 LinBlueMaskSize; /* size of direct color blue mask (linear modes) */ CARD8 LinBlueFieldPosition; /* bit position of lsb of blue mask (linear modes) */ CARD8 LinRsvdMaskSize; /* size of direct color reserved mask (linear modes) */ CARD8 LinRsvdFieldPosition; /* bit position of lsb of reserved mask (linear modes) */ CARD32 MaxPixelClock; /* maximum pixel clock (in Hz) for graphics mode */ CARD8 Reserved2[189]; /* remainder of VbeModeInfoBlock */ } __attribute__((packed)); /* Return VBE Mode Information */ VbeModeInfoBlock *VBEGetModeInfo(vbeInfoPtr pVbe, int mode); void VBEFreeModeInfo(VbeModeInfoBlock *block); /* * INT2 */ #define CRTC_DBLSCAN (1<<0) #define CRTC_INTERLACE (1<<1) #define CRTC_NHSYNC (1<<2) #define CRTC_NVSYNC (1<<3) struct _VbeCRTCInfoBlock { CARD16 HorizontalTotal; /* Horizontal total in pixels */ CARD16 HorizontalSyncStart; /* Horizontal sync start in pixels */ CARD16 HorizontalSyncEnd; /* Horizontal sync end in pixels */ CARD16 VerticalTotal; /* Vertical total in lines */ CARD16 VerticalSyncStart; /* Vertical sync start in lines */ CARD16 VerticalSyncEnd; /* Vertical sync end in lines */ CARD8 Flags; /* Flags (Interlaced, Double Scan etc) */ CARD32 PixelClock; /* Pixel clock in units of Hz */ CARD16 RefreshRate; /* Refresh rate in units of 0.01 Hz */ CARD8 Reserved[40]; /* remainder of ModeInfoBlock */ } __attribute__((packed)); /* VbeCRTCInfoBlock is in the VESA 3.0 specs */ Bool VBESetVBEMode(vbeInfoPtr pVbe, int mode, VbeCRTCInfoBlock *crtc); /* * INT 3 */ Bool VBEGetVBEMode(vbeInfoPtr pVbe, int *mode); /* * INT 4 */ /* Save/Restore Super VGA video state */ /* function values are (values stored in VESAPtr): * 0 := query & allocate amount of memory to save state * 1 := save state * 2 := restore state * * function 0 called automatically if function 1 called without * a previous call to function 0. */ typedef enum { MODE_QUERY, MODE_SAVE, MODE_RESTORE } vbeSaveRestoreFunction; Bool VBESaveRestore(vbeInfoPtr pVbe, vbeSaveRestoreFunction fuction, pointer *memory, int *size, int *real_mode_pages); /* * INT 5 */ Bool VBEBankSwitch(vbeInfoPtr pVbe, unsigned int iBank, int window); /* * INT 6 */ typedef enum { SCANWID_SET, SCANWID_GET, SCANWID_SET_BYTES, SCANWID_GET_MAX } vbeScanwidthCommand; #define VBESetLogicalScanline(pVbe, width) \ VBESetGetLogicalScanlineLength(pVbe, SCANWID_SET, width, \ NULL, NULL, NULL) #define VBESetLogicalScanlineBytes(pVbe, width) \ VBESetGetLogicalScanlineLength(pVbe, SCANWID_SET_BYTES, width, \ NULL, NULL, NULL) #define VBEGetLogicalScanline(pVbe, pixels, bytes, max) \ VBESetGetLogicalScanlineLength(pVbe, SCANWID_GET, 0, \ pixels, bytes, max) #define VBEGetMaxLogicalScanline(pVbe, pixels, bytes, max) \ VBESetGetLogicalScanlineLength(pVbe, SCANWID_GET_MAX, 0, \ pixels, bytes, max) Bool VBESetGetLogicalScanlineLength(vbeInfoPtr pVbe, vbeScanwidthCommand command, int width, int *pixels, int *bytes, int *max); /* * INT 7 */ /* 16 bit code */ Bool VBESetDisplayStart(vbeInfoPtr pVbe, int x, int y, Bool wait_retrace); Bool VBEGetDisplayStart(vbeInfoPtr pVbe, int *x, int *y); /* * INT 8 */ /* if bits is 0, then it is a GET */ int VBESetGetDACPaletteFormat(vbeInfoPtr pVbe, int bits); /* * INT 9 */ /* * If getting a palette, the data argument is not used. It will return * the data. * If setting a palette, it will return the pointer received on success, * NULL on failure. */ CARD32 *VBESetGetPaletteData(vbeInfoPtr pVbe, Bool set, int first, int num, CARD32 *data, Bool secondary, Bool wait_retrace); #define VBEFreePaletteData(data) xfree(data) /* * INT A */ typedef struct _VBEpmi { int seg_tbl; int tbl_off; int tbl_len; } VBEpmi; VBEpmi *VBEGetVBEpmi(vbeInfoPtr pVbe); #define VESAFreeVBEpmi(pmi) xfree(pmi) /* high level helper functions */ typedef struct _vbeModeInfoRec { int width; int height; int bpp; int n; struct _vbeModeInfoRec *next; } vbeModeInfoRec, *vbeModeInfoPtr; vbeModeInfoPtr VBEBuildVbeModeList(vbeInfoPtr pVbe, VbeInfoBlock *vbe); unsigned short VBECalcVbeModeIndex(vbeModeInfoPtr m, DisplayModePtr mode, int bpp); typedef struct { CARD8 *state; CARD8 *pstate; int statePage; int stateSize; int stateMode; } vbeSaveRestoreRec, *vbeSaveRestorePtr; void VBEVesaSaveRestore(vbeInfoPtr pVbe, vbeSaveRestorePtr vbe_sr, vbeSaveRestoreFunction function); int VBEGetPixelClock(vbeInfoPtr pVbe, int mode, int Clock); Bool VBEDPMSSet(vbeInfoPtr pVbe, int mode); struct vbePanelID { short hsize; short vsize; short fptype; char redbpp; char greenbpp; char bluebpp; char reservedbpp; int reserved_offscreen_mem_size; int reserved_offscreen_mem_pointer; char reserved[14]; }; void VBEInterpretPanelID(int scrnIndex, struct vbePanelID *data); struct vbePanelID *VBEReadPanelID(vbeInfoPtr pVbe); #endif
{ "pile_set_name": "Github" }
{ "timeStamp": 1566348244578, "date": "2019-08-21", "name": "Sreekrishnapuram  Grama Panchayat, Palakkad District", "district": "Palakkad", "block": "Sreekrishnapuram", "area": "29.56km", "localbody_code": "G090405", "no_of_wards": 14, "population": 17476, "male": 8153, "female": 9323, "population_density": 591, "sex_ratio": 1144, "literacy_rate": 90.58, "literacy_rate_male": 94.55, "literacy_rate_female": 87.16, "president": "SHAJUSANKARC.N", "wards": [ { "ward_number": "1", "name": "VALAMBILIMANGALAM", "person": { "id": "2015083000101", "name": "UNNIKRISHNAN V.C" } }, { "ward_number": "2", "name": "VALAMBILIMANGALAM EAST", "person": { "id": "2015083000201", "name": "VINOJ V" } }, { "ward_number": "3", "name": "EASWARAMANGALAM", "person": { "id": "2015083000301", "name": "MADHAVIKUTTY C" } }, { "ward_number": "4", "name": "SREEKRISHNAPURAM", "person": { "id": "2015083000401", "name": "SARIKA T" } }, { "ward_number": "5", "name": "MANNAMPATTA", "person": { "id": "2015083000501", "name": "RADHIKA V.K" } }, { "ward_number": "6", "name": "POOZHIYAPARAMBU", "person": { "id": "2015083000601", "name": "SHAJU SANKAR C.N" } }, { "ward_number": "7", "name": "KULAKKATTUKURSSI", "person": { "id": "2015083000701", "name": "RAJAN K" } }, { "ward_number": "8", "name": "PUNNAMPARAMBU", "person": { "id": "2015083000801", "name": "RAJIKA C" } }, { "ward_number": "9", "name": "THALAYINAKKADU", "person": { "id": "2015083000901", "name": "RATHNAKUMAR A.P" } }, { "ward_number": "10", "name": "PARTHALA", "person": { "id": "2015083001001", "name": "JAYASREE C" } }, { "ward_number": "11", "name": "MANGALAMKUNNU", "person": { "id": "2015083001101", "name": "GANGADHARAN P.K" } }, { "ward_number": "12", "name": "RAGAMCORNER", "person": { "id": "2015083001201", "name": "GIREESAN P" } }, { "ward_number": "13", "name": "CHANTHAPURA", "person": { "id": "2015083001301", "name": "RUGMINI M" } }, { "ward_number": "14", "name": "PERUMANGODE", "person": { "id": "2015083001401", "name": "USHAKUMARY" } } ] }
{ "pile_set_name": "Github" }
/* * JavaScript tracker for Snowplow: tests/functional/integration.spec.js * * Significant portions copyright 2010 Anthon Pang. Remainder copyright * 2012-2020 Snowplow Analytics Ltd. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of Anthon Pang nor Snowplow Analytics Ltd nor the * names of their contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import util from 'util' import F from 'lodash/fp' import { reset, fetchResults, start, stop } from '../micro' const dumpLog = log => console.log(util.inspect(log, true, null, true)) const isMatchWithCB = F.isMatchWith((lt, rt) => F.isFunction(rt) ? rt(lt) : undefined ) describe('Test that request_recorder logs meet expectations', () => { if ( F.isMatch( { version: '12603.3.8', browserName: 'safari' }, browser.capabilities ) ) { // the safari driver sauce uses for safari 10 doesnt support // setting cookies, so this whole suite fails // https://github.com/webdriverio/webdriverio/issues/2004 fit('skipping in safari 10', () => {}) } let log = [] let container let containerUrl const logContains = ev => F.some(isMatchWithCB(ev), log) beforeAll(() => { browser.call(() => { return start() .then(e => { container = e return container.inspect() }) .then(info => { containerUrl = 'snowplow-js-tracker.local:' + F.get('NetworkSettings.Ports["9090/tcp"][0].HostPort', info) }) }) browser.url('/index.html') browser.setCookies({ name: 'container', value: containerUrl }) browser.url('/session-integration.html') browser.pause(15000) // Time for requests to get written browser.call(() => fetchResults(containerUrl).then(r => { log = r return Promise.resolve() }) ) }) afterAll(() => { log = [] browser.call(() => { return stop(container) }) }) it('should count sessions using cookies', () => { expect( logContains({ event: { parameters: { e: 'pv', tna: 'cookieSessionTracker', vid: '2', }, }, }) ).toBe(true) }) it('should count sessions using local storage', () => { expect( logContains({ event: { parameters: { e: 'pv', tna: 'localStorageSessionTracker', vid: '2', }, }, }) ).toBe(true) }) it('should count sessions using anonymousSessionTracking', () => { expect( logContains({ event: { parameters: { e: 'pv', tna: 'anonymousSessionTracker', vid: '2', }, }, }) ).toBe(true) }) it('should only increment vid outside of session timeout (local storage)', () => { const withSingleVid = ev => F.get('event.parameters.tna', ev) === 'localStorageSessionTracker' && F.get('event.parameters.vid', ev) === '1' expect(F.size(F.filter(withSingleVid, log))).toBe(2) }) it('should only increment vid outside of session timeout (anonymous session tracking)', () => { const withSingleVid = ev => F.get('event.parameters.tna', ev) === 'anonymousSessionTracker' && F.get('event.parameters.vid', ev) === '1' expect(F.size(F.filter(withSingleVid, log))).toBe(2) }) it('should only increment vid outside of session timeout (cookie storage)', () => { const withSingleVid = ev => F.get('event.parameters.tna', ev) === 'cookieSessionTracker' && F.get('event.parameters.vid', ev) === '1' expect(F.size(F.filter(withSingleVid, log))).toBe(2) }) })
{ "pile_set_name": "Github" }
{ "animation":{ "frametime":2 } }
{ "pile_set_name": "Github" }
/* * linux/mm/fremap.c * * Explicit pagetable population and nonlinear (random) mappings support. * * started by Ingo Molnar, Copyright (C) 2002, 2003 */ #include <linux/mm.h> #include <linux/swap.h> #include <linux/file.h> #include <linux/mman.h> #include <linux/pagemap.h> #include <linux/swapops.h> #include <linux/rmap.h> #include <linux/module.h> #include <linux/syscalls.h> #include <asm/mmu_context.h> #include <asm/cacheflush.h> #include <asm/tlbflush.h> static void zap_pte(struct mm_struct *mm, struct vm_area_struct *vma, unsigned long addr, pte_t *ptep) { pte_t pte = *ptep; if (pte_present(pte)) { struct page *page; flush_cache_page(vma, addr, pte_pfn(pte)); pte = ptep_clear_flush(vma, addr, ptep); page = vm_normal_page(vma, addr, pte); if (page) { if (pte_dirty(pte)) set_page_dirty(page); page_remove_rmap(page, vma); page_cache_release(page); update_hiwater_rss(mm); dec_mm_counter(mm, file_rss); } } else { if (!pte_file(pte)) free_swap_and_cache(pte_to_swp_entry(pte)); pte_clear_not_present_full(mm, addr, ptep, 0); } } /* * Install a file pte to a given virtual memory address, release any * previously existing mapping. */ static int install_file_pte(struct mm_struct *mm, struct vm_area_struct *vma, unsigned long addr, unsigned long pgoff, pgprot_t prot) { int err = -ENOMEM; pte_t *pte; spinlock_t *ptl; pte = get_locked_pte(mm, addr, &ptl); if (!pte) goto out; if (!pte_none(*pte)) zap_pte(mm, vma, addr, pte); set_pte_at(mm, addr, pte, pgoff_to_pte(pgoff)); /* * We don't need to run update_mmu_cache() here because the "file pte" * being installed by install_file_pte() is not a real pte - it's a * non-present entry (like a swap entry), noting what file offset should * be mapped there when there's a fault (in a non-linear vma where * that's not obvious). */ pte_unmap_unlock(pte, ptl); err = 0; out: return err; } static int populate_range(struct mm_struct *mm, struct vm_area_struct *vma, unsigned long addr, unsigned long size, pgoff_t pgoff) { int err; do { err = install_file_pte(mm, vma, addr, pgoff, vma->vm_page_prot); if (err) return err; size -= PAGE_SIZE; addr += PAGE_SIZE; pgoff++; } while (size); return 0; } /*** * sys_remap_file_pages - remap arbitrary pages of a shared backing store * file within an existing vma. * @start: start of the remapped virtual memory range * @size: size of the remapped virtual memory range * @prot: new protection bits of the range * @pgoff: to be mapped page of the backing store file * @flags: 0 or MAP_NONBLOCKED - the later will cause no IO. * * this syscall works purely via pagetables, so it's the most efficient * way to map the same (large) file into a given virtual window. Unlike * mmap()/mremap() it does not create any new vmas. The new mappings are * also safe across swapout. * * NOTE: the 'prot' parameter right now is ignored, and the vma's default * protection is used. Arbitrary protections might be implemented in the * future. */ asmlinkage long sys_remap_file_pages(unsigned long start, unsigned long size, unsigned long __prot, unsigned long pgoff, unsigned long flags) { struct mm_struct *mm = current->mm; struct address_space *mapping; unsigned long end = start + size; struct vm_area_struct *vma; int err = -EINVAL; int has_write_lock = 0; if (__prot) return err; /* * Sanitize the syscall parameters: */ start = start & PAGE_MASK; size = size & PAGE_MASK; /* Does the address range wrap, or is the span zero-sized? */ if (start + size <= start) return err; /* Can we represent this offset inside this architecture's pte's? */ #if PTE_FILE_MAX_BITS < BITS_PER_LONG if (pgoff + (size >> PAGE_SHIFT) >= (1UL << PTE_FILE_MAX_BITS)) return err; #endif /* We need down_write() to change vma->vm_flags. */ down_read(&mm->mmap_sem); retry: vma = find_vma(mm, start); /* * Make sure the vma is shared, that it supports prefaulting, * and that the remapped range is valid and fully within * the single existing vma. vm_private_data is used as a * swapout cursor in a VM_NONLINEAR vma. */ if (!vma || !(vma->vm_flags & VM_SHARED)) goto out; if (vma->vm_private_data && !(vma->vm_flags & VM_NONLINEAR)) goto out; if (!(vma->vm_flags & VM_CAN_NONLINEAR)) goto out; if (end <= start || start < vma->vm_start || end > vma->vm_end) goto out; /* Must set VM_NONLINEAR before any pages are populated. */ if (!(vma->vm_flags & VM_NONLINEAR)) { /* Don't need a nonlinear mapping, exit success */ if (pgoff == linear_page_index(vma, start)) { err = 0; goto out; } if (!has_write_lock) { up_read(&mm->mmap_sem); down_write(&mm->mmap_sem); has_write_lock = 1; goto retry; } mapping = vma->vm_file->f_mapping; spin_lock(&mapping->i_mmap_lock); flush_dcache_mmap_lock(mapping); vma->vm_flags |= VM_NONLINEAR; vma_prio_tree_remove(vma, &mapping->i_mmap); vma_nonlinear_insert(vma, &mapping->i_mmap_nonlinear); flush_dcache_mmap_unlock(mapping); spin_unlock(&mapping->i_mmap_lock); } err = populate_range(mm, vma, start, size, pgoff); if (!err && !(flags & MAP_NONBLOCK)) { if (unlikely(has_write_lock)) { downgrade_write(&mm->mmap_sem); has_write_lock = 0; } make_pages_present(start, start+size); } /* * We can't clear VM_NONLINEAR because we'd have to do * it after ->populate completes, and that would prevent * downgrading the lock. (Locks can't be upgraded). */ out: if (likely(!has_write_lock)) up_read(&mm->mmap_sem); else up_write(&mm->mmap_sem); return err; }
{ "pile_set_name": "Github" }
# Package Information for pkg-config prefix=@prefix@ exec_prefix=@exec_prefix@ libdir=@libdir@ includedir=@includedir@ Name: libpcrecpp Description: PCRECPP - C++ wrapper for PCRE Version: @PACKAGE_VERSION@ Libs: -L${libdir} -lpcre -lpcrecpp Cflags: -I${includedir}
{ "pile_set_name": "Github" }
from django.conf import settings from django.contrib.auth.decorators import login_required from django.shortcuts import render, redirect from django.views.decorators.http import require_POST import account.views from pycon.finaid.context_processors import financial_aid import symposion.forms from symposion.forms import LanguageForm from symposion.proposals.models import ProposalSection from symposion.utils.signup import generate_username class SignupView(account.views.SignupView): form_class = symposion.forms.SignupForm def create_user(self, form, commit=True): user_kwargs = { "first_name": form.cleaned_data["first_name"], "last_name": form.cleaned_data["last_name"] } return super(SignupView, self).create_user(form, commit=commit, **user_kwargs) def generate_username(self, form): return generate_username(form.cleaned_data['email']) class LoginView(account.views.LoginView): form_class = account.forms.LoginEmailForm @login_required def dashboard(request): if request.session.get("pending-token"): return redirect("speaker_create_token", request.session["pending-token"]) context = {'proposals_are_open': bool(ProposalSection.available()), } context.update(financial_aid(request)) if settings.USE_I18N: context['language_form'] = LanguageForm( initial={'language': request.LANGUAGE_CODE}) return render( request, "dashboard.html", context, ) @require_POST def change_language(request): form = LanguageForm(request.POST) if form.is_valid(): request.session['django_language'] = form.cleaned_data['language'] return redirect(request.POST['next'])
{ "pile_set_name": "Github" }
/* * Copyright (C) 2018 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.example.roomwordssample; import android.arch.persistence.db.SupportSQLiteDatabase; import android.arch.persistence.room.Database; import android.arch.persistence.room.Room; import android.arch.persistence.room.RoomDatabase; import android.content.Context; import android.os.AsyncTask; import android.support.annotation.NonNull; /** * WordRoomDatabase. Includes code to create the database. * After the app creates the database, all further interactions * with it happen through the WordViewModel. */ @Database(entities = {Word.class}, version = 1, exportSchema = false) public abstract class WordRoomDatabase extends RoomDatabase { public abstract WordDao wordDao(); private static WordRoomDatabase INSTANCE; public static WordRoomDatabase getDatabase(final Context context) { if (INSTANCE == null) { synchronized (WordRoomDatabase.class) { if (INSTANCE == null) { // Create database here INSTANCE = Room.databaseBuilder(context.getApplicationContext(), WordRoomDatabase.class, "word_database") // Wipes and rebuilds instead of migrating if no Migration object. // Migration is not part of this practical. .fallbackToDestructiveMigration() .addCallback(sRoomDatabaseCallback) .build(); } } } return INSTANCE; } // This callback is called when the database has opened. // In this case, use PopulateDbAsync to populate the database // with the initial data set if the database has no entries. private static RoomDatabase.Callback sRoomDatabaseCallback = new RoomDatabase.Callback(){ @Override public void onOpen (@NonNull SupportSQLiteDatabase db){ super.onOpen(db); new PopulateDbAsync(INSTANCE).execute(); } }; // Populate the database with the initial data set // only if the database has no entries. private static class PopulateDbAsync extends AsyncTask<Void, Void, Void> { private final WordDao mDao; // Initial data set private static String [] words = {"dolphin", "crocodile", "cobra", "elephant", "goldfish", "tiger", "snake"}; PopulateDbAsync(WordRoomDatabase db) { mDao = db.wordDao(); } @Override protected Void doInBackground(final Void... params) { // If we have no words, then create the initial list of words if (mDao.getAnyWord().length < 1) { for (int i = 0; i <= words.length - 1; i++) { Word word = new Word(words[i]); mDao.insert(word); } } return null; } } }
{ "pile_set_name": "Github" }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.adapter.cassandra; import org.apache.calcite.avatica.util.ByteString; import org.apache.calcite.avatica.util.DateTimeUtils; import org.apache.calcite.linq4j.Enumerator; import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.rel.type.RelDataTypeSystem; import org.apache.calcite.rel.type.RelProtoDataType; import org.apache.calcite.sql.type.SqlTypeFactoryImpl; import com.datastax.driver.core.LocalDate; import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.Row; import com.datastax.driver.core.TupleValue; import java.nio.ByteBuffer; import java.util.Date; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.stream.IntStream; /** Enumerator that reads from a Cassandra column family. */ class CassandraEnumerator implements Enumerator<Object> { private Iterator<Row> iterator; private Row current; private List<RelDataTypeField> fieldTypes; /** Creates a CassandraEnumerator. * * @param results Cassandra result set ({@link com.datastax.driver.core.ResultSet}) * @param protoRowType The type of resulting rows */ CassandraEnumerator(ResultSet results, RelProtoDataType protoRowType) { this.iterator = results.iterator(); this.current = null; final RelDataTypeFactory typeFactory = new SqlTypeFactoryImpl(RelDataTypeSystem.DEFAULT); this.fieldTypes = protoRowType.apply(typeFactory).getFieldList(); } /** Produces the next row from the results. * * @return A new row from the results */ public Object current() { if (fieldTypes.size() == 1) { // If we just have one field, produce it directly return currentRowField(0); } else { // Build an array with all fields in this row Object[] row = new Object[fieldTypes.size()]; for (int i = 0; i < fieldTypes.size(); i++) { row[i] = currentRowField(i); } return row; } } /** Get a field for the current row from the underlying object. * * @param index Index of the field within the Row object */ private Object currentRowField(int index) { final Object o = current.get(index, CassandraSchema.CODEC_REGISTRY.codecFor( current.getColumnDefinitions().getType(index))); return convertToEnumeratorObject(o); } /** Convert an object into the expected internal representation. * * @param obj Object to convert, if needed */ private Object convertToEnumeratorObject(Object obj) { if (obj instanceof ByteBuffer) { ByteBuffer buf = (ByteBuffer) obj; byte [] bytes = new byte[buf.remaining()]; buf.get(bytes, 0, bytes.length); return new ByteString(bytes); } else if (obj instanceof LocalDate) { // converts dates to the expected numeric format return ((LocalDate) obj).getMillisSinceEpoch() / DateTimeUtils.MILLIS_PER_DAY; } else if (obj instanceof Date) { return ((Date) obj).toInstant().toEpochMilli(); } else if (obj instanceof LinkedHashSet) { // MULTISET is handled as an array return ((LinkedHashSet) obj).toArray(); } else if (obj instanceof TupleValue) { // STRUCT can be handled as an array final TupleValue tupleValue = (TupleValue) obj; int numComponents = tupleValue.getType().getComponentTypes().size(); return IntStream.range(0, numComponents) .mapToObj(i -> tupleValue.get(i, CassandraSchema.CODEC_REGISTRY.codecFor( tupleValue.getType().getComponentTypes().get(i))) ).map(this::convertToEnumeratorObject) .toArray(); } return obj; } public boolean moveNext() { if (iterator.hasNext()) { current = iterator.next(); return true; } else { return false; } } public void reset() { throw new UnsupportedOperationException(); } public void close() { // Nothing to do here } }
{ "pile_set_name": "Github" }
/* * librdkafka - Apache Kafka C library * * Copyright (c) 2019 Magnus Edenhill * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ /** * Mocks - protocol request handlers * */ #include "rdkafka_int.h" #include "rdbuf.h" #include "rdrand.h" #include "rdkafka_interceptor.h" #include "rdkafka_mock_int.h" #include "rdkafka_transport_int.h" #include "rdkafka_offset.h" /** * @brief Handle ProduceRequest */ static int rd_kafka_mock_handle_Produce (rd_kafka_mock_connection_t *mconn, rd_kafka_buf_t *rkbuf) { const rd_bool_t log_decode_errors = rd_true; rd_kafka_mock_cluster_t *mcluster = mconn->broker->cluster; rd_kafka_buf_t *resp = rd_kafka_mock_buf_new_response(rkbuf); int32_t TopicsCnt; rd_kafkap_str_t TransactionalId = RD_KAFKAP_STR_INITIALIZER; int16_t Acks; int32_t TimeoutMs; rd_kafka_resp_err_t all_err; if (rkbuf->rkbuf_reqhdr.ApiVersion >= 3) rd_kafka_buf_read_str(rkbuf, &TransactionalId); rd_kafka_buf_read_i16(rkbuf, &Acks); rd_kafka_buf_read_i32(rkbuf, &TimeoutMs); rd_kafka_buf_read_i32(rkbuf, &TopicsCnt); /* Response: #Topics */ rd_kafka_buf_write_i32(resp, TopicsCnt); /* Inject error, if any */ all_err = rd_kafka_mock_next_request_error(mconn, rkbuf->rkbuf_reqhdr.ApiKey); while (TopicsCnt-- > 0) { rd_kafkap_str_t Topic; int32_t PartitionCnt; rd_kafka_mock_topic_t *mtopic; rd_kafka_buf_read_str(rkbuf, &Topic); rd_kafka_buf_read_i32(rkbuf, &PartitionCnt); mtopic = rd_kafka_mock_topic_find_by_kstr(mcluster, &Topic); /* Response: Topic */ rd_kafka_buf_write_kstr(resp, &Topic); /* Response: #Partitions */ rd_kafka_buf_write_i32(resp, PartitionCnt); while (PartitionCnt-- > 0) { int32_t Partition; rd_kafka_mock_partition_t *mpart = NULL; rd_kafkap_bytes_t records; rd_kafka_resp_err_t err = RD_KAFKA_RESP_ERR_NO_ERROR; int64_t BaseOffset = -1; rd_kafka_buf_read_i32(rkbuf, &Partition); if (mtopic) mpart = rd_kafka_mock_partition_find(mtopic, Partition); rd_kafka_buf_read_bytes(rkbuf, &records); /* Response: Partition */ rd_kafka_buf_write_i32(resp, Partition); if (all_err) err = all_err; else if (!mpart) err = RD_KAFKA_RESP_ERR_UNKNOWN_TOPIC_OR_PART; else if (mpart->leader != mconn->broker) err = RD_KAFKA_RESP_ERR_NOT_LEADER_FOR_PARTITION; /* Append to partition log */ if (!err) err = rd_kafka_mock_partition_log_append( mpart, &records, &BaseOffset); /* Response: ErrorCode */ rd_kafka_buf_write_i16(resp, err); if (err) { /* Response: BaseOffset */ rd_kafka_buf_write_i64(resp, BaseOffset); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 2) { /* Response: LogAppendTimeMs */ rd_kafka_buf_write_i64(resp, -1); } if (rkbuf->rkbuf_reqhdr.ApiVersion >= 6) { /* Response: LogStartOffset */ rd_kafka_buf_write_i64(resp, -1); } } else { /* Response: BaseOffset */ rd_kafka_buf_write_i64(resp, BaseOffset); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 2) { /* Response: LogAppendTimeMs */ rd_kafka_buf_write_i64(resp, 1234); } if (rkbuf->rkbuf_reqhdr.ApiVersion >= 6) { /* Response: LogStartOffset */ rd_kafka_buf_write_i64( resp, mpart->start_offset); } } } } if (rkbuf->rkbuf_reqhdr.ApiVersion >= 1) { /* Response: ThrottleTime */ rd_kafka_buf_write_i32(resp, 0); } rd_kafka_mock_connection_send_response(mconn, resp); return 0; err_parse: rd_kafka_buf_destroy(resp); return -1; } /** * @brief Handle FetchRequest */ static int rd_kafka_mock_handle_Fetch (rd_kafka_mock_connection_t *mconn, rd_kafka_buf_t *rkbuf) { const rd_bool_t log_decode_errors = rd_true; rd_kafka_mock_cluster_t *mcluster = mconn->broker->cluster; rd_kafka_buf_t *resp = rd_kafka_mock_buf_new_response(rkbuf); rd_kafka_resp_err_t all_err; int32_t ReplicaId, MaxWait, MinBytes, MaxBytes = -1, SessionId = -1, Epoch, TopicsCnt; int8_t IsolationLevel; size_t totsize = 0; rd_kafka_buf_read_i32(rkbuf, &ReplicaId); rd_kafka_buf_read_i32(rkbuf, &MaxWait); rd_kafka_buf_read_i32(rkbuf, &MinBytes); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 3) rd_kafka_buf_read_i32(rkbuf, &MaxBytes); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 4) rd_kafka_buf_read_i8(rkbuf, &IsolationLevel); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 7) { rd_kafka_buf_read_i32(rkbuf, &SessionId); rd_kafka_buf_read_i32(rkbuf, &Epoch); } if (rkbuf->rkbuf_reqhdr.ApiVersion >= 1) { /* Response: ThrottleTime */ rd_kafka_buf_write_i32(resp, 0); } /* Inject error, if any */ all_err = rd_kafka_mock_next_request_error(mconn, rkbuf->rkbuf_reqhdr.ApiKey); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 7) { /* Response: ErrorCode */ rd_kafka_buf_write_i16(resp, all_err); /* Response: SessionId */ rd_kafka_buf_write_i32(resp, SessionId); } rd_kafka_buf_read_i32(rkbuf, &TopicsCnt); /* Response: #Topics */ rd_kafka_buf_write_i32(resp, TopicsCnt); while (TopicsCnt-- > 0) { rd_kafkap_str_t Topic; int32_t PartitionCnt; rd_kafka_mock_topic_t *mtopic; rd_kafka_buf_read_str(rkbuf, &Topic); rd_kafka_buf_read_i32(rkbuf, &PartitionCnt); mtopic = rd_kafka_mock_topic_find_by_kstr(mcluster, &Topic); /* Response: Topic */ rd_kafka_buf_write_kstr(resp, &Topic); /* Response: #Partitions */ rd_kafka_buf_write_i32(resp, PartitionCnt); while (PartitionCnt-- > 0) { int32_t Partition, CurrentLeaderEpoch, PartMaxBytes; int64_t FetchOffset, LogStartOffset; rd_kafka_mock_partition_t *mpart = NULL; rd_kafka_resp_err_t err = all_err; rd_bool_t on_follower; size_t partsize = 0; const rd_kafka_mock_msgset_t *mset = NULL; rd_kafka_buf_read_i32(rkbuf, &Partition); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 9) rd_kafka_buf_read_i32(rkbuf, &CurrentLeaderEpoch); rd_kafka_buf_read_i64(rkbuf, &FetchOffset); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 5) rd_kafka_buf_read_i64(rkbuf, &LogStartOffset); rd_kafka_buf_read_i32(rkbuf, &PartMaxBytes); if (mtopic) mpart = rd_kafka_mock_partition_find(mtopic, Partition); /* Response: Partition */ rd_kafka_buf_write_i32(resp, Partition); /* Fetch is directed at follower and this is * the follower broker. */ on_follower = mpart && mpart->follower_id == mconn->broker->id; if (!all_err && !mpart) err = RD_KAFKA_RESP_ERR_UNKNOWN_TOPIC_OR_PART; else if (!all_err && mpart->leader != mconn->broker && !on_follower) err = RD_KAFKA_RESP_ERR_NOT_LEADER_FOR_PARTITION; /* Find MessageSet for FetchOffset */ if (!err && FetchOffset != mpart->end_offset) { if (on_follower && FetchOffset <= mpart->end_offset && FetchOffset > mpart->follower_end_offset) err = RD_KAFKA_RESP_ERR_OFFSET_NOT_AVAILABLE; else if (!(mset = rd_kafka_mock_msgset_find( mpart, FetchOffset, on_follower))) err = RD_KAFKA_RESP_ERR_OFFSET_OUT_OF_RANGE; } /* Response: ErrorCode */ rd_kafka_buf_write_i16(resp, err); /* Response: Highwatermark */ rd_kafka_buf_write_i64(resp, mpart ? (on_follower ? mpart->follower_end_offset : mpart->end_offset) : -1); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 4) { /* Response: LastStableOffset */ rd_kafka_buf_write_i64(resp, mpart ? mpart->end_offset : -1); } if (rkbuf->rkbuf_reqhdr.ApiVersion >= 5) { /* Response: LogStartOffset */ rd_kafka_buf_write_i64( resp, !mpart ? -1 : (on_follower ? mpart->follower_start_offset : mpart->start_offset)); } if (rkbuf->rkbuf_reqhdr.ApiVersion >= 4) { /* Response: #Aborted */ rd_kafka_buf_write_i32(resp, 0); } if (rkbuf->rkbuf_reqhdr.ApiVersion >= 11) { int32_t PreferredReadReplica = mpart && mpart->leader == mconn->broker && mpart->follower_id != -1 ? mpart->follower_id : -1; /* Response: PreferredReplica */ rd_kafka_buf_write_i32( resp, PreferredReadReplica); if (PreferredReadReplica != -1) { /* Don't return any data when * PreferredReadReplica is set */ mset = NULL; MaxWait = 0; } } if (mset && partsize < (size_t)PartMaxBytes && totsize < (size_t)MaxBytes) { /* Response: Records */ rd_kafka_buf_write_kbytes(resp, &mset->bytes); partsize += RD_KAFKAP_BYTES_SIZE(&mset->bytes); totsize += RD_KAFKAP_BYTES_SIZE(&mset->bytes); /* FIXME: Multiple messageSets ? */ } else { /* Empty Response: Records: Null */ rd_kafka_buf_write_i32(resp, 0); } } } if (rkbuf->rkbuf_reqhdr.ApiVersion >= 7) { int32_t ForgottenTopicCnt; rd_kafka_buf_read_i32(rkbuf, &ForgottenTopicCnt); while (ForgottenTopicCnt-- > 0) { rd_kafkap_str_t Topic; int32_t ForgPartCnt; rd_kafka_buf_read_str(rkbuf, &Topic); rd_kafka_buf_read_i32(rkbuf, &ForgPartCnt); while (ForgPartCnt-- > 0) { int32_t Partition; rd_kafka_buf_read_i32(rkbuf, &Partition); } } } if (rkbuf->rkbuf_reqhdr.ApiVersion >= 11) { rd_kafkap_str_t RackId; char *rack; rd_kafka_buf_read_str(rkbuf, &RackId); RD_KAFKAP_STR_DUPA(&rack, &RackId); /* Matt might do something sensible with this */ } /* If there was no data, delay up to MaxWait. * This isn't strictly correct since we should cut the wait short * and feed newly produced data if a producer writes to the * partitions, but that is too much of a hassle here since we * can't block the thread. */ if (!totsize && MaxWait > 0) resp->rkbuf_ts_retry = rd_clock() + (MaxWait * 1000); rd_kafka_mock_connection_send_response(mconn, resp); return 0; err_parse: rd_kafka_buf_destroy(resp); return -1; } /** * @brief Handle ListOffset */ static int rd_kafka_mock_handle_ListOffset (rd_kafka_mock_connection_t *mconn, rd_kafka_buf_t *rkbuf) { const rd_bool_t log_decode_errors = rd_true; rd_kafka_mock_cluster_t *mcluster = mconn->broker->cluster; rd_kafka_buf_t *resp = rd_kafka_mock_buf_new_response(rkbuf); rd_kafka_resp_err_t all_err; int32_t ReplicaId, TopicsCnt; int8_t IsolationLevel; rd_kafka_buf_read_i32(rkbuf, &ReplicaId); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 2) rd_kafka_buf_read_i8(rkbuf, &IsolationLevel); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 2) { /* Response: ThrottleTime */ rd_kafka_buf_write_i32(resp, 0); } /* Inject error, if any */ all_err = rd_kafka_mock_next_request_error(mconn, rkbuf->rkbuf_reqhdr.ApiKey); rd_kafka_buf_read_i32(rkbuf, &TopicsCnt); /* Response: #Topics */ rd_kafka_buf_write_i32(resp, TopicsCnt); while (TopicsCnt-- > 0) { rd_kafkap_str_t Topic; int32_t PartitionCnt; rd_kafka_mock_topic_t *mtopic; rd_kafka_buf_read_str(rkbuf, &Topic); rd_kafka_buf_read_i32(rkbuf, &PartitionCnt); mtopic = rd_kafka_mock_topic_find_by_kstr(mcluster, &Topic); /* Response: Topic */ rd_kafka_buf_write_kstr(resp, &Topic); /* Response: #Partitions */ rd_kafka_buf_write_i32(resp, PartitionCnt); while (PartitionCnt-- > 0) { int32_t Partition, CurrentLeaderEpoch; int64_t Timestamp, MaxNumOffsets, Offset = -1; rd_kafka_mock_partition_t *mpart = NULL; rd_kafka_resp_err_t err = all_err; rd_kafka_buf_read_i32(rkbuf, &Partition); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 4) rd_kafka_buf_read_i32(rkbuf, &CurrentLeaderEpoch); rd_kafka_buf_read_i64(rkbuf, &Timestamp); rd_kafka_buf_read_i32(rkbuf, &MaxNumOffsets); if (mtopic) mpart = rd_kafka_mock_partition_find(mtopic, Partition); /* Response: Partition */ rd_kafka_buf_write_i32(resp, Partition); if (!all_err && !mpart) err = RD_KAFKA_RESP_ERR_UNKNOWN_TOPIC_OR_PART; else if (!all_err && mpart->leader != mconn->broker) err = RD_KAFKA_RESP_ERR_NOT_LEADER_FOR_PARTITION; /* Response: ErrorCode */ rd_kafka_buf_write_i16(resp, err); if (!err && mpart) { if (Timestamp == RD_KAFKA_OFFSET_BEGINNING) Offset = mpart->start_offset; else if (Timestamp == RD_KAFKA_OFFSET_END) Offset = mpart->end_offset; else if (Timestamp < 0) Offset = -1; else /* FIXME: by timestamp */ Offset = -1; } if (rkbuf->rkbuf_reqhdr.ApiVersion == 0) { /* Response: #OldStyleOffsets */ rd_kafka_buf_write_i32(resp, Offset != -1 ? 1 : 0); /* Response: OldStyleOffsets[0] */ if (Offset != -1) rd_kafka_buf_write_i64(resp, Offset); } else { /* Response: Offset */ rd_kafka_buf_write_i64(resp, Offset); } if (rkbuf->rkbuf_reqhdr.ApiVersion >= 1) { /* Response: Timestamp (FIXME) */ rd_kafka_buf_write_i64(resp, -1); } if (rkbuf->rkbuf_reqhdr.ApiVersion >= 4) { /* Response: LeaderEpoch */ rd_kafka_buf_write_i64(resp, -1); } rd_kafka_dbg(mcluster->rk, MOCK, "MOCK", "Topic %.*s [%"PRId32"] returning " "offset %"PRId64" for %s: %s", RD_KAFKAP_STR_PR(&Topic), Partition, Offset, rd_kafka_offset2str(Timestamp), rd_kafka_err2str(err)); } } rd_kafka_mock_connection_send_response(mconn, resp); return 0; err_parse: rd_kafka_buf_destroy(resp); return -1; } /** * @brief Handle OffsetFetch (fetch committed offsets) */ static int rd_kafka_mock_handle_OffsetFetch (rd_kafka_mock_connection_t *mconn, rd_kafka_buf_t *rkbuf) { const rd_bool_t log_decode_errors = rd_true; rd_kafka_mock_cluster_t *mcluster = mconn->broker->cluster; rd_kafka_buf_t *resp = rd_kafka_mock_buf_new_response(rkbuf); rd_kafka_mock_broker_t *mrkb; rd_kafka_resp_err_t all_err; int32_t TopicsCnt; rd_kafkap_str_t GroupId; if (rkbuf->rkbuf_reqhdr.ApiVersion >= 3) { /* Response: ThrottleTime */ rd_kafka_buf_write_i32(resp, 0); } rd_kafka_buf_read_str(rkbuf, &GroupId); /* Inject error, if any */ all_err = rd_kafka_mock_next_request_error(mconn, rkbuf->rkbuf_reqhdr.ApiKey); mrkb = rd_kafka_mock_cluster_get_coord(mcluster, RD_KAFKA_COORD_GROUP, &GroupId); if (!mrkb && !all_err) all_err = RD_KAFKA_RESP_ERR_NOT_COORDINATOR; rd_kafka_buf_read_i32(rkbuf, &TopicsCnt); /* Response: #Topics */ rd_kafka_buf_write_i32(resp, TopicsCnt); while (TopicsCnt-- > 0) { rd_kafkap_str_t Topic; int32_t PartitionCnt; rd_kafka_mock_topic_t *mtopic; rd_kafka_buf_read_str(rkbuf, &Topic); rd_kafka_buf_read_i32(rkbuf, &PartitionCnt); mtopic = rd_kafka_mock_topic_find_by_kstr(mcluster, &Topic); /* Response: Topic */ rd_kafka_buf_write_kstr(resp, &Topic); /* Response: #Partitions */ rd_kafka_buf_write_i32(resp, PartitionCnt); while (PartitionCnt-- > 0) { int32_t Partition; rd_kafka_mock_partition_t *mpart = NULL; const rd_kafka_mock_committed_offset_t *coff = NULL; rd_kafka_resp_err_t err = all_err; rd_kafka_buf_read_i32(rkbuf, &Partition); if (mtopic) mpart = rd_kafka_mock_partition_find(mtopic, Partition); /* Response: Partition */ rd_kafka_buf_write_i32(resp, Partition); if (!all_err && !mpart) err = RD_KAFKA_RESP_ERR_UNKNOWN_TOPIC_OR_PART; if (!err) coff = rd_kafka_mock_committed_offset_find( mpart, &GroupId); /* Response: CommittedOffset */ rd_kafka_buf_write_i64(resp, coff ? coff->offset : -1); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 5) { /* Response: CommittedLeaderEpoch */ rd_kafka_buf_write_i32(resp, -1); } /* Response: Metadata */ rd_kafka_buf_write_kstr(resp, coff ? coff->metadata : NULL); /* Response: ErrorCode */ rd_kafka_buf_write_i16(resp, err); if (coff) rd_kafka_dbg(mcluster->rk, MOCK, "MOCK", "Topic %s [%"PRId32"] returning " "committed offset %"PRId64 " for group %s", mtopic->name, mpart->id, coff->offset, coff->group); else rd_kafka_dbg(mcluster->rk, MOCK, "MOCK", "Topic %.*s [%"PRId32"] has no " "committed offset for group %.*s: " "%s", RD_KAFKAP_STR_PR(&Topic), Partition, RD_KAFKAP_STR_PR(&GroupId), rd_kafka_err2str(err)); } } if (rkbuf->rkbuf_reqhdr.ApiVersion >= 2) { /* Response: Outer ErrorCode */ rd_kafka_buf_write_i16(resp, all_err); } rd_kafka_mock_connection_send_response(mconn, resp); return 0; err_parse: rd_kafka_buf_destroy(resp); return -1; } /** * @brief Handle OffsetCommit */ static int rd_kafka_mock_handle_OffsetCommit (rd_kafka_mock_connection_t *mconn, rd_kafka_buf_t *rkbuf) { const rd_bool_t log_decode_errors = rd_true; rd_kafka_mock_cluster_t *mcluster = mconn->broker->cluster; rd_kafka_buf_t *resp = rd_kafka_mock_buf_new_response(rkbuf); rd_kafka_mock_broker_t *mrkb; rd_kafka_resp_err_t all_err; int32_t GenerationId = -1, TopicsCnt; rd_kafkap_str_t GroupId, MemberId, GroupInstanceId; if (rkbuf->rkbuf_reqhdr.ApiVersion >= 3) { /* Response: ThrottleTime */ rd_kafka_buf_write_i32(resp, 0); } rd_kafka_buf_read_str(rkbuf, &GroupId); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 1) { rd_kafka_buf_read_i32(rkbuf, &GenerationId); rd_kafka_buf_read_str(rkbuf, &MemberId); } if (rkbuf->rkbuf_reqhdr.ApiVersion >= 7) rd_kafka_buf_read_str(rkbuf, &GroupInstanceId); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 2 && rkbuf->rkbuf_reqhdr.ApiVersion <= 4) { int64_t RetentionTimeMs; rd_kafka_buf_read_i64(rkbuf, &RetentionTimeMs); } /* Inject error, if any */ all_err = rd_kafka_mock_next_request_error(mconn, rkbuf->rkbuf_reqhdr.ApiKey); mrkb = rd_kafka_mock_cluster_get_coord(mcluster, RD_KAFKA_COORD_GROUP, &GroupId); if (!mrkb && !all_err) all_err = RD_KAFKA_RESP_ERR_NOT_COORDINATOR; if (!all_err) { rd_kafka_mock_cgrp_t *mcgrp; mcgrp = rd_kafka_mock_cgrp_find(mcluster, &GroupId); if (mcgrp) { rd_kafka_mock_cgrp_member_t *member = NULL; if (!RD_KAFKAP_STR_IS_NULL(&MemberId)) member = rd_kafka_mock_cgrp_member_find( mcgrp, &MemberId); if (!member) all_err = RD_KAFKA_RESP_ERR_UNKNOWN_MEMBER_ID; else all_err = rd_kafka_mock_cgrp_check_state( mcgrp, member, rkbuf, GenerationId); } /* FIXME: also check that partitions are assigned to member */ } rd_kafka_buf_read_i32(rkbuf, &TopicsCnt); /* Response: #Topics */ rd_kafka_buf_write_i32(resp, TopicsCnt); while (TopicsCnt-- > 0) { rd_kafkap_str_t Topic; int32_t PartitionCnt; rd_kafka_mock_topic_t *mtopic; rd_kafka_buf_read_str(rkbuf, &Topic); rd_kafka_buf_read_i32(rkbuf, &PartitionCnt); mtopic = rd_kafka_mock_topic_find_by_kstr(mcluster, &Topic); /* Response: Topic */ rd_kafka_buf_write_kstr(resp, &Topic); /* Response: #Partitions */ rd_kafka_buf_write_i32(resp, PartitionCnt); while (PartitionCnt-- > 0) { int32_t Partition; rd_kafka_mock_partition_t *mpart = NULL; rd_kafka_resp_err_t err = all_err; int64_t CommittedOffset; rd_kafkap_str_t Metadata; rd_kafka_buf_read_i32(rkbuf, &Partition); if (mtopic) mpart = rd_kafka_mock_partition_find(mtopic, Partition); /* Response: Partition */ rd_kafka_buf_write_i32(resp, Partition); if (!all_err && !mpart) err = RD_KAFKA_RESP_ERR_UNKNOWN_TOPIC_OR_PART; rd_kafka_buf_read_i64(rkbuf, &CommittedOffset); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 6) { int32_t CommittedLeaderEpoch; rd_kafka_buf_read_i32(rkbuf, &CommittedLeaderEpoch); } if (rkbuf->rkbuf_reqhdr.ApiVersion == 1) { int64_t CommitTimestamp; rd_kafka_buf_read_i64(rkbuf, &CommitTimestamp); } rd_kafka_buf_read_str(rkbuf, &Metadata); if (!err) rd_kafka_mock_commit_offset( mpart, &GroupId, CommittedOffset, &Metadata); /* Response: ErrorCode */ rd_kafka_buf_write_i16(resp, err); } } rd_kafka_mock_connection_send_response(mconn, resp); return 0; err_parse: rd_kafka_buf_destroy(resp); return -1; } /** * @brief Handle ApiVersionRequest */ static int rd_kafka_mock_handle_ApiVersion (rd_kafka_mock_connection_t *mconn, rd_kafka_buf_t *rkbuf); /** * @brief Write a MetadataResponse.Topics. entry to \p resp. * * @param mtopic may be NULL */ static void rd_kafka_mock_buf_write_Metadata_Topic (rd_kafka_buf_t *resp, int16_t ApiVersion, const char *topic, const rd_kafka_mock_topic_t *mtopic, rd_kafka_resp_err_t err) { int i; /* Response: Topics.ErrorCode */ rd_kafka_buf_write_i16(resp, err); /* Response: Topics.Name */ rd_kafka_buf_write_str(resp, topic, -1); if (ApiVersion >= 1) { /* Response: Topics.IsInternal */ rd_kafka_buf_write_bool(resp, rd_false); } /* Response: Topics.#Partitions */ rd_kafka_buf_write_i32(resp, mtopic ? mtopic->partition_cnt : 0); for (i = 0 ; mtopic && i < mtopic->partition_cnt ; i++) { const rd_kafka_mock_partition_t *mpart = &mtopic->partitions[i]; int r; /* Response: ..Partitions.ErrorCode */ rd_kafka_buf_write_i16(resp, 0); /* Response: ..Partitions.PartitionIndex */ rd_kafka_buf_write_i32(resp, mpart->id); /* Response: ..Partitions.Leader */ rd_kafka_buf_write_i32(resp, mpart->leader ? mpart->leader->id : -1); if (ApiVersion >= 7) { /* Response: ..Partitions.LeaderEpoch */ rd_kafka_buf_write_i32(resp, -1); } /* Response: ..Partitions.#ReplicaNodes */ rd_kafka_buf_write_i32(resp, mpart->replica_cnt); for (r = 0 ; r < mpart->replica_cnt ; r++) rd_kafka_buf_write_i32( resp, mpart->replicas[r]->id); /* Response: ..Partitions.#IsrNodes */ /* Let Replicas == ISRs for now */ rd_kafka_buf_write_i32(resp, mpart->replica_cnt); for (r = 0 ; r < mpart->replica_cnt ; r++) rd_kafka_buf_write_i32( resp, mpart->replicas[r]->id); if (ApiVersion >= 5) { /* Response: ...OfflineReplicas */ rd_kafka_buf_write_i32(resp, 0); } } } /** * @brief Handle MetadataRequest */ static int rd_kafka_mock_handle_Metadata (rd_kafka_mock_connection_t *mconn, rd_kafka_buf_t *rkbuf) { const rd_bool_t log_decode_errors = rd_true; rd_kafka_mock_cluster_t *mcluster = mconn->broker->cluster; rd_bool_t AllowAutoTopicCreation = rd_true; rd_kafka_buf_t *resp = rd_kafka_mock_buf_new_response(rkbuf); const rd_kafka_mock_broker_t *mrkb; rd_kafka_topic_partition_list_t *requested_topics = NULL; rd_bool_t list_all_topics = rd_false; int32_t TopicsCnt; int i; if (rkbuf->rkbuf_reqhdr.ApiVersion >= 3) { /* Response: ThrottleTime */ rd_kafka_buf_write_i32(resp, 0); } /* Response: #Brokers */ rd_kafka_buf_write_i32(resp, mcluster->broker_cnt); TAILQ_FOREACH(mrkb, &mcluster->brokers, link) { /* Response: Brokers.Nodeid */ rd_kafka_buf_write_i32(resp, mrkb->id); /* Response: Brokers.Host */ rd_kafka_buf_write_str(resp, mrkb->advertised_listener, -1); /* Response: Brokers.Port */ rd_kafka_buf_write_i32(resp, mrkb->port); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 1) { /* Response: Brokers.Rack (Matt's going to love this) */ rd_kafka_buf_write_str(resp, mrkb->rack, -1); } } if (rkbuf->rkbuf_reqhdr.ApiVersion >= 2) { /* Response: ClusterId */ rd_kafka_buf_write_str(resp, mcluster->id, -1); } if (rkbuf->rkbuf_reqhdr.ApiVersion >= 1) { /* Response: ControllerId */ rd_kafka_buf_write_i32(resp, mcluster->controller_id); } /* #Topics */ rd_kafka_buf_read_i32(rkbuf, &TopicsCnt); if (TopicsCnt > 0) requested_topics = rd_kafka_topic_partition_list_new(TopicsCnt); else if (rkbuf->rkbuf_reqhdr.ApiVersion == 0 || TopicsCnt == -1) list_all_topics = rd_true; for (i = 0 ; i < TopicsCnt ; i++) { rd_kafkap_str_t Topic; char *topic; rd_kafka_buf_read_str(rkbuf, &Topic); RD_KAFKAP_STR_DUPA(&topic, &Topic); rd_kafka_topic_partition_list_add(requested_topics, topic, RD_KAFKA_PARTITION_UA); } if (rkbuf->rkbuf_reqhdr.ApiVersion >= 4) rd_kafka_buf_read_bool(rkbuf, &AllowAutoTopicCreation); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 8) { rd_bool_t IncludeClusterAuthorizedOperations; rd_bool_t IncludeTopicAuthorizedOperations; rd_kafka_buf_read_bool(rkbuf, &IncludeClusterAuthorizedOperations); rd_kafka_buf_read_bool(rkbuf, &IncludeTopicAuthorizedOperations); } if (list_all_topics) { rd_kafka_mock_topic_t *mtopic; /* Response: #Topics */ rd_kafka_buf_write_i32(resp, mcluster->topic_cnt); TAILQ_FOREACH(mtopic, &mcluster->topics, link) { rd_kafka_mock_buf_write_Metadata_Topic( resp, rkbuf->rkbuf_reqhdr.ApiVersion, mtopic->name, mtopic, RD_KAFKA_RESP_ERR_NO_ERROR); } } else if (requested_topics) { /* Response: #Topics */ rd_kafka_buf_write_i32(resp, requested_topics->cnt); for (i = 0 ; i < requested_topics->cnt ; i++) { const rd_kafka_topic_partition_t *rktpar = &requested_topics->elems[i]; rd_kafka_mock_topic_t *mtopic; rd_kafka_resp_err_t err = RD_KAFKA_RESP_ERR_NO_ERROR; mtopic = rd_kafka_mock_topic_find(mcluster, rktpar->topic); if (!mtopic && AllowAutoTopicCreation) mtopic = rd_kafka_mock_topic_auto_create( mcluster, rktpar->topic, -1, &err); else if (!mtopic) err = RD_KAFKA_RESP_ERR_UNKNOWN_TOPIC_OR_PART; rd_kafka_mock_buf_write_Metadata_Topic( resp, rkbuf->rkbuf_reqhdr.ApiVersion, rktpar->topic, mtopic, err); } if (rkbuf->rkbuf_reqhdr.ApiVersion >= 8) { /* TopicAuthorizedOperations */ rd_kafka_buf_write_i32(resp, INT32_MIN); } } else { /* Response: #Topics: brokers only */ rd_kafka_buf_write_i32(resp, 0); } if (rkbuf->rkbuf_reqhdr.ApiVersion >= 8) { /* ClusterAuthorizedOperations */ rd_kafka_buf_write_i32(resp, INT32_MIN); } if (requested_topics) rd_kafka_topic_partition_list_destroy(requested_topics); rd_kafka_mock_connection_send_response(mconn, resp); return 0; err_parse: if (requested_topics) rd_kafka_topic_partition_list_destroy(requested_topics); rd_kafka_buf_destroy(resp); return -1; } /** * @brief Handle FindCoordinatorRequest */ static int rd_kafka_mock_handle_FindCoordinator (rd_kafka_mock_connection_t *mconn, rd_kafka_buf_t *rkbuf) { rd_kafka_mock_cluster_t *mcluster = mconn->broker->cluster; const rd_bool_t log_decode_errors = rd_true; rd_kafka_buf_t *resp = rd_kafka_mock_buf_new_response(rkbuf); rd_kafkap_str_t Key; int8_t KeyType = RD_KAFKA_COORD_GROUP; const rd_kafka_mock_broker_t *mrkb = NULL; rd_kafka_resp_err_t err; /* Key */ rd_kafka_buf_read_str(rkbuf, &Key); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 1) { /* KeyType */ rd_kafka_buf_read_i8(rkbuf, &KeyType); } /* * Construct response */ if (rkbuf->rkbuf_reqhdr.ApiVersion >= 1) { /* Response: Throttle */ rd_kafka_buf_write_i32(resp, 0); } /* Inject error, if any */ err = rd_kafka_mock_next_request_error(mconn, rkbuf->rkbuf_reqhdr.ApiKey); if (!err && RD_KAFKAP_STR_LEN(&Key) > 0) { mrkb = rd_kafka_mock_cluster_get_coord(mcluster, KeyType, &Key); rd_assert(mrkb); } if (!mrkb && !err) err = RD_KAFKA_RESP_ERR_COORDINATOR_NOT_AVAILABLE; if (err) { /* Response: ErrorCode and ErrorMessage */ rd_kafka_buf_write_i16(resp, err); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 1) rd_kafka_buf_write_str(resp, rd_kafka_err2str(err), -1); /* Response: NodeId, Host, Port */ rd_kafka_buf_write_i32(resp, -1); rd_kafka_buf_write_str(resp, NULL, -1); rd_kafka_buf_write_i32(resp, -1); } else { /* Response: ErrorCode and ErrorMessage */ rd_kafka_buf_write_i16(resp, 0); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 1) rd_kafka_buf_write_str(resp, NULL, -1); /* Response: NodeId, Host, Port */ rd_kafka_buf_write_i32(resp, mrkb->id); rd_kafka_buf_write_str(resp, mrkb->advertised_listener, -1); rd_kafka_buf_write_i32(resp, mrkb->port); } rd_kafka_mock_connection_send_response(mconn, resp); return 0; err_parse: rd_kafka_buf_destroy(resp); return -1; } /** * @brief Handle JoinGroupRequest */ static int rd_kafka_mock_handle_JoinGroup (rd_kafka_mock_connection_t *mconn, rd_kafka_buf_t *rkbuf) { rd_kafka_mock_cluster_t *mcluster = mconn->broker->cluster; rd_kafka_mock_broker_t *mrkb; const rd_bool_t log_decode_errors = rd_true; rd_kafka_buf_t *resp = rd_kafka_mock_buf_new_response(rkbuf); rd_kafkap_str_t GroupId, MemberId, ProtocolType; rd_kafkap_str_t GroupInstanceId = RD_KAFKAP_STR_INITIALIZER; int32_t SessionTimeoutMs; int32_t MaxPollIntervalMs = -1; int32_t ProtocolCnt = 0; int32_t i; rd_kafka_resp_err_t err; rd_kafka_mock_cgrp_t *mcgrp; rd_kafka_mock_cgrp_proto_t *protos = NULL; rd_kafka_buf_read_str(rkbuf, &GroupId); rd_kafka_buf_read_i32(rkbuf, &SessionTimeoutMs); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 1) rd_kafka_buf_read_i32(rkbuf, &MaxPollIntervalMs); rd_kafka_buf_read_str(rkbuf, &MemberId); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 5) rd_kafka_buf_read_str(rkbuf, &GroupInstanceId); rd_kafka_buf_read_str(rkbuf, &ProtocolType); rd_kafka_buf_read_i32(rkbuf, &ProtocolCnt); if (ProtocolCnt > 1000) { rd_kafka_dbg(mcluster->rk, MOCK, "MOCK", "JoinGroupRequest: ProtocolCnt %"PRId32 " > max allowed 1000", ProtocolCnt); rd_kafka_buf_destroy(resp); return -1; } protos = rd_malloc(sizeof(*protos) * ProtocolCnt); for (i = 0 ; i < ProtocolCnt ; i++) { rd_kafkap_str_t ProtocolName; rd_kafkap_bytes_t Metadata; rd_kafka_buf_read_str(rkbuf, &ProtocolName); rd_kafka_buf_read_bytes(rkbuf, &Metadata); protos[i].name = rd_kafkap_str_copy(&ProtocolName); protos[i].metadata = rd_kafkap_bytes_copy(&Metadata); } /* * Construct response */ if (rkbuf->rkbuf_reqhdr.ApiVersion >= 2) { /* Response: Throttle */ rd_kafka_buf_write_i32(resp, 0); } /* Inject error, if any */ err = rd_kafka_mock_next_request_error(mconn, rkbuf->rkbuf_reqhdr.ApiKey); if (!err) { mrkb = rd_kafka_mock_cluster_get_coord(mcluster, RD_KAFKA_COORD_GROUP, &GroupId); if (!mrkb) err = RD_KAFKA_RESP_ERR_COORDINATOR_NOT_AVAILABLE; else if (mrkb != mconn->broker) err = RD_KAFKA_RESP_ERR_NOT_COORDINATOR; } if (!err) { mcgrp = rd_kafka_mock_cgrp_get(mcluster, &GroupId, &ProtocolType); rd_assert(mcgrp); /* This triggers an async rebalance, the response will be * sent later. */ err = rd_kafka_mock_cgrp_member_add( mcgrp, mconn, resp, &MemberId, &ProtocolType, protos, ProtocolCnt, SessionTimeoutMs); if (!err) { /* .._add() assumes ownership of resp and protos */ protos = NULL; rd_kafka_mock_connection_set_blocking(mconn, rd_true); return 0; } } rd_kafka_mock_cgrp_protos_destroy(protos, ProtocolCnt); /* Error case */ rd_kafka_buf_write_i16(resp, err); /* ErrorCode */ rd_kafka_buf_write_i32(resp, -1); /* GenerationId */ rd_kafka_buf_write_str(resp, NULL, -1); /* ProtocolName */ rd_kafka_buf_write_str(resp, NULL, -1); /* LeaderId */ rd_kafka_buf_write_kstr(resp, NULL); /* MemberId */ rd_kafka_buf_write_i32(resp, 0); /* MemberCnt */ rd_kafka_mock_connection_send_response(mconn, resp); return 0; err_parse: rd_kafka_buf_destroy(resp); if (protos) rd_kafka_mock_cgrp_protos_destroy(protos, ProtocolCnt); return -1; } /** * @brief Handle HeartbeatRequest */ static int rd_kafka_mock_handle_Heartbeat (rd_kafka_mock_connection_t *mconn, rd_kafka_buf_t *rkbuf) { rd_kafka_mock_cluster_t *mcluster = mconn->broker->cluster; rd_kafka_mock_broker_t *mrkb; const rd_bool_t log_decode_errors = rd_true; rd_kafka_buf_t *resp = rd_kafka_mock_buf_new_response(rkbuf); rd_kafkap_str_t GroupId, MemberId; rd_kafkap_str_t GroupInstanceId = RD_KAFKAP_STR_INITIALIZER; int32_t GenerationId; rd_kafka_resp_err_t err; rd_kafka_mock_cgrp_t *mcgrp; rd_kafka_mock_cgrp_member_t *member = NULL; rd_kafka_buf_read_str(rkbuf, &GroupId); rd_kafka_buf_read_i32(rkbuf, &GenerationId); rd_kafka_buf_read_str(rkbuf, &MemberId); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 3) rd_kafka_buf_read_str(rkbuf, &GroupInstanceId); /* * Construct response */ if (rkbuf->rkbuf_reqhdr.ApiVersion >= 1) { /* Response: Throttle */ rd_kafka_buf_write_i32(resp, 0); } /* Inject error, if any */ err = rd_kafka_mock_next_request_error(mconn, rkbuf->rkbuf_reqhdr.ApiKey); if (!err) { mrkb = rd_kafka_mock_cluster_get_coord(mcluster, RD_KAFKA_COORD_GROUP, &GroupId); if (!mrkb) err = RD_KAFKA_RESP_ERR_COORDINATOR_NOT_AVAILABLE; else if (mrkb != mconn->broker) err = RD_KAFKA_RESP_ERR_NOT_COORDINATOR; } if (!err) { mcgrp = rd_kafka_mock_cgrp_find(mcluster, &GroupId); if (!mcgrp) err = RD_KAFKA_RESP_ERR_GROUP_ID_NOT_FOUND; } if (!err) { member = rd_kafka_mock_cgrp_member_find(mcgrp, &MemberId); if (!member) err = RD_KAFKA_RESP_ERR_UNKNOWN_MEMBER_ID; } if (!err) err = rd_kafka_mock_cgrp_check_state(mcgrp, member, rkbuf, GenerationId); if (!err) rd_kafka_mock_cgrp_member_active(member); rd_kafka_buf_write_i16(resp, err); /* ErrorCode */ rd_kafka_mock_connection_send_response(mconn, resp); return 0; err_parse: rd_kafka_buf_destroy(resp); return -1; } /** * @brief Handle LeaveGroupRequest */ static int rd_kafka_mock_handle_LeaveGroup (rd_kafka_mock_connection_t *mconn, rd_kafka_buf_t *rkbuf) { rd_kafka_mock_cluster_t *mcluster = mconn->broker->cluster; rd_kafka_mock_broker_t *mrkb; const rd_bool_t log_decode_errors = rd_true; rd_kafka_buf_t *resp = rd_kafka_mock_buf_new_response(rkbuf); rd_kafkap_str_t GroupId, MemberId; rd_kafka_resp_err_t err; rd_kafka_mock_cgrp_t *mcgrp; rd_kafka_mock_cgrp_member_t *member = NULL; rd_kafka_buf_read_str(rkbuf, &GroupId); rd_kafka_buf_read_str(rkbuf, &MemberId); /* * Construct response */ if (rkbuf->rkbuf_reqhdr.ApiVersion >= 1) { /* Response: Throttle */ rd_kafka_buf_write_i32(resp, 0); } /* Inject error, if any */ err = rd_kafka_mock_next_request_error(mconn, rkbuf->rkbuf_reqhdr.ApiKey); if (!err) { mrkb = rd_kafka_mock_cluster_get_coord(mcluster, RD_KAFKA_COORD_GROUP, &GroupId); if (!mrkb) err = RD_KAFKA_RESP_ERR_COORDINATOR_NOT_AVAILABLE; else if (mrkb != mconn->broker) err = RD_KAFKA_RESP_ERR_NOT_COORDINATOR; } if (!err) { mcgrp = rd_kafka_mock_cgrp_find(mcluster, &GroupId); if (!mcgrp) err = RD_KAFKA_RESP_ERR_GROUP_ID_NOT_FOUND; } if (!err) { member = rd_kafka_mock_cgrp_member_find(mcgrp, &MemberId); if (!member) err = RD_KAFKA_RESP_ERR_UNKNOWN_MEMBER_ID; } if (!err) err = rd_kafka_mock_cgrp_check_state(mcgrp, member, rkbuf, -1); if (!err) rd_kafka_mock_cgrp_member_leave(mcgrp, member); rd_kafka_buf_write_i16(resp, err); /* ErrorCode */ rd_kafka_mock_connection_send_response(mconn, resp); return 0; err_parse: rd_kafka_buf_destroy(resp); return -1; } /** * @brief Handle SyncGroupRequest */ static int rd_kafka_mock_handle_SyncGroup (rd_kafka_mock_connection_t *mconn, rd_kafka_buf_t *rkbuf) { rd_kafka_mock_cluster_t *mcluster = mconn->broker->cluster; rd_kafka_mock_broker_t *mrkb; const rd_bool_t log_decode_errors = rd_true; rd_kafka_buf_t *resp = rd_kafka_mock_buf_new_response(rkbuf); rd_kafkap_str_t GroupId, MemberId; rd_kafkap_str_t GroupInstanceId = RD_KAFKAP_STR_INITIALIZER; int32_t GenerationId, AssignmentCnt; int32_t i; rd_kafka_resp_err_t err; rd_kafka_mock_cgrp_t *mcgrp = NULL; rd_kafka_mock_cgrp_member_t *member = NULL; rd_bool_t is_leader; rd_kafka_buf_read_str(rkbuf, &GroupId); rd_kafka_buf_read_i32(rkbuf, &GenerationId); rd_kafka_buf_read_str(rkbuf, &MemberId); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 3) rd_kafka_buf_read_str(rkbuf, &GroupInstanceId); rd_kafka_buf_read_i32(rkbuf, &AssignmentCnt); /* * Construct response */ if (rkbuf->rkbuf_reqhdr.ApiVersion >= 1) { /* Response: Throttle */ rd_kafka_buf_write_i32(resp, 0); } /* Inject error, if any */ err = rd_kafka_mock_next_request_error(mconn, rkbuf->rkbuf_reqhdr.ApiKey); if (!err) { mrkb = rd_kafka_mock_cluster_get_coord(mcluster, RD_KAFKA_COORD_GROUP, &GroupId); if (!mrkb) err = RD_KAFKA_RESP_ERR_COORDINATOR_NOT_AVAILABLE; else if (mrkb != mconn->broker) err = RD_KAFKA_RESP_ERR_NOT_COORDINATOR; } if (!err) { mcgrp = rd_kafka_mock_cgrp_find(mcluster, &GroupId); if (!mcgrp) err = RD_KAFKA_RESP_ERR_GROUP_ID_NOT_FOUND; } if (!err) { member = rd_kafka_mock_cgrp_member_find(mcgrp, &MemberId); if (!member) err = RD_KAFKA_RESP_ERR_UNKNOWN_MEMBER_ID; } if (!err) err = rd_kafka_mock_cgrp_check_state(mcgrp, member, rkbuf, GenerationId); if (!err) rd_kafka_mock_cgrp_member_active(member); is_leader = mcgrp->leader && mcgrp->leader == member; if (!err) { if (AssignmentCnt > 0 && !is_leader) err = RD_KAFKA_RESP_ERR_NOT_LEADER_FOR_PARTITION; /* FIXME */ else if (AssignmentCnt == 0 && is_leader) err = RD_KAFKA_RESP_ERR_INVALID_PARTITIONS; /* FIXME */ } for (i = 0 ; i < AssignmentCnt ; i++) { rd_kafkap_str_t MemberId2; rd_kafkap_bytes_t Metadata; rd_kafka_mock_cgrp_member_t *member2; rd_kafka_buf_read_str(rkbuf, &MemberId2); rd_kafka_buf_read_bytes(rkbuf, &Metadata); if (err) continue; /* Find member */ member2 = rd_kafka_mock_cgrp_member_find(mcgrp, &MemberId2); if (!member2) continue; rd_kafka_mock_cgrp_member_assignment_set(mcgrp, member2, &Metadata); } if (!err) { err = rd_kafka_mock_cgrp_member_sync_set(mcgrp, member, mconn, resp); /* .._sync_set() assumes ownership of resp */ if (!err) return 0; /* Response will be sent when all members * are synchronized */ } /* Error case */ rd_kafka_buf_write_i16(resp, err); /* ErrorCode */ rd_kafka_buf_write_bytes(resp, NULL, -1); /* MemberState */ rd_kafka_mock_connection_send_response(mconn, resp); return 0; err_parse: rd_kafka_buf_destroy(resp); return -1; } /** * @brief Generate a unique ProducerID */ static void rd_kafka_mock_pid_generate (rd_kafka_mock_cluster_t *mcluster, rd_kafka_pid_t *pid) { pid->id = rd_jitter(1, 900000) * 1000; pid->epoch = 0; } /** * @brief Handle InitProducerId */ static int rd_kafka_mock_handle_InitProducerId (rd_kafka_mock_connection_t *mconn, rd_kafka_buf_t *rkbuf) { rd_kafka_mock_cluster_t *mcluster = mconn->broker->cluster; const rd_bool_t log_decode_errors = rd_true; rd_kafka_buf_t *resp = rd_kafka_mock_buf_new_response(rkbuf); rd_kafkap_str_t TransactionalId; rd_kafka_pid_t pid; int32_t TxnTimeoutMs; rd_kafka_resp_err_t err; /* TransactionalId */ rd_kafka_buf_read_str(rkbuf, &TransactionalId); /* TransactionTimeoutMs */ rd_kafka_buf_read_i32(rkbuf, &TxnTimeoutMs); /* * Construct response */ /* ThrottleTimeMs */ rd_kafka_buf_write_i32(resp, 0); /* Inject error */ err = rd_kafka_mock_next_request_error(mconn, rkbuf->rkbuf_reqhdr.ApiKey); if (!err && !RD_KAFKAP_STR_IS_NULL(&TransactionalId) && rd_kafka_mock_cluster_get_coord(mcluster, RD_KAFKA_COORD_TXN, &TransactionalId) != mconn->broker) err = RD_KAFKA_RESP_ERR_NOT_COORDINATOR; /* ErrorCode */ rd_kafka_buf_write_i16(resp, err); if (!err) rd_kafka_mock_pid_generate(mcluster, &pid); else rd_kafka_pid_reset(&pid); /* ProducerId */ rd_kafka_buf_write_i64(resp, pid.id); /* ProducerEpoch */ rd_kafka_buf_write_i16(resp, pid.epoch); rd_kafka_mock_connection_send_response(mconn, resp); return 0; err_parse: rd_kafka_buf_destroy(resp); return -1; } /** * @brief Handle AddPartitionsToTxn */ static int rd_kafka_mock_handle_AddPartitionsToTxn (rd_kafka_mock_connection_t *mconn, rd_kafka_buf_t *rkbuf) { rd_kafka_mock_cluster_t *mcluster = mconn->broker->cluster; const rd_bool_t log_decode_errors = rd_true; rd_kafka_buf_t *resp = rd_kafka_mock_buf_new_response(rkbuf); rd_kafka_resp_err_t all_err; rd_kafkap_str_t TransactionalId; rd_kafka_pid_t pid; int32_t TopicsCnt; /* Response: ThrottleTimeMs */ rd_kafka_buf_write_i32(resp, 0); /* TransactionalId */ rd_kafka_buf_read_str(rkbuf, &TransactionalId); /* ProducerId */ rd_kafka_buf_read_i64(rkbuf, &pid.id); /* Epoch */ rd_kafka_buf_read_i16(rkbuf, &pid.epoch); /* #Topics */ rd_kafka_buf_read_i32(rkbuf, &TopicsCnt); /* Response: #Results */ rd_kafka_buf_write_i32(resp, TopicsCnt); /* Inject error */ all_err = rd_kafka_mock_next_request_error(mconn, rkbuf->rkbuf_reqhdr.ApiKey); if (!all_err && rd_kafka_mock_cluster_get_coord(mcluster, RD_KAFKA_COORD_TXN, &TransactionalId) != mconn->broker) all_err = RD_KAFKA_RESP_ERR_NOT_COORDINATOR; while (TopicsCnt-- > 0) { rd_kafkap_str_t Topic; int32_t PartsCnt; const rd_kafka_mock_topic_t *mtopic; /* Topic */ rd_kafka_buf_read_str(rkbuf, &Topic); /* Response: Topic */ rd_kafka_buf_write_kstr(resp, &Topic); /* #Partitions */ rd_kafka_buf_read_i32(rkbuf, &PartsCnt); /* Response: #Partitions */ rd_kafka_buf_write_i32(resp, PartsCnt); mtopic = rd_kafka_mock_topic_find_by_kstr(mcluster, &Topic); while (PartsCnt--) { int32_t Partition; rd_kafka_resp_err_t err = all_err; /* Partition */ rd_kafka_buf_read_i32(rkbuf, &Partition); /* Response: Partition */ rd_kafka_buf_write_i32(resp, Partition); if (!mtopic || Partition < 0 || Partition >= mtopic->partition_cnt) err = RD_KAFKA_RESP_ERR_UNKNOWN_TOPIC_OR_PART; else if (mtopic && mtopic->err) err = mtopic->err; /* Response: ErrorCode */ rd_kafka_buf_write_i16(resp, err); } } rd_kafka_mock_connection_send_response(mconn, resp); return 0; err_parse: rd_kafka_buf_destroy(resp); return -1; } /** * @brief Handle AddOffsetsToTxn */ static int rd_kafka_mock_handle_AddOffsetsToTxn (rd_kafka_mock_connection_t *mconn, rd_kafka_buf_t *rkbuf) { rd_kafka_mock_cluster_t *mcluster = mconn->broker->cluster; const rd_bool_t log_decode_errors = rd_true; rd_kafka_buf_t *resp = rd_kafka_mock_buf_new_response(rkbuf); rd_kafka_resp_err_t err; rd_kafkap_str_t TransactionalId, GroupId; rd_kafka_pid_t pid; /* TransactionalId */ rd_kafka_buf_read_str(rkbuf, &TransactionalId); /* ProducerId */ rd_kafka_buf_read_i64(rkbuf, &pid.id); /* Epoch */ rd_kafka_buf_read_i16(rkbuf, &pid.epoch); /* GroupIdId */ rd_kafka_buf_read_str(rkbuf, &GroupId); /* Response: ThrottleTimeMs */ rd_kafka_buf_write_i32(resp, 0); /* Inject error */ err = rd_kafka_mock_next_request_error(mconn, rkbuf->rkbuf_reqhdr.ApiKey); if (!err && rd_kafka_mock_cluster_get_coord(mcluster, RD_KAFKA_COORD_TXN, &TransactionalId) != mconn->broker) err = RD_KAFKA_RESP_ERR_NOT_COORDINATOR; /* Response: ErrorCode */ rd_kafka_buf_write_i16(resp, err); rd_kafka_mock_connection_send_response(mconn, resp); return 0; err_parse: rd_kafka_buf_destroy(resp); return -1; } /** * @brief Handle TxnOffsetCommit */ static int rd_kafka_mock_handle_TxnOffsetCommit (rd_kafka_mock_connection_t *mconn, rd_kafka_buf_t *rkbuf) { rd_kafka_mock_cluster_t *mcluster = mconn->broker->cluster; const rd_bool_t log_decode_errors = rd_true; rd_kafka_buf_t *resp = rd_kafka_mock_buf_new_response(rkbuf); rd_kafka_resp_err_t err; rd_kafkap_str_t TransactionalId, GroupId; rd_kafka_pid_t pid; int32_t TopicsCnt; /* Response: ThrottleTimeMs */ rd_kafka_buf_write_i32(resp, 0); /* TransactionalId */ rd_kafka_buf_read_str(rkbuf, &TransactionalId); /* GroupId */ rd_kafka_buf_read_str(rkbuf, &GroupId); /* ProducerId */ rd_kafka_buf_read_i64(rkbuf, &pid.id); /* Epoch */ rd_kafka_buf_read_i16(rkbuf, &pid.epoch); /* #Topics */ rd_kafka_buf_read_i32(rkbuf, &TopicsCnt); /* Response: #Results */ rd_kafka_buf_write_i32(resp, TopicsCnt); /* Inject error */ err = rd_kafka_mock_next_request_error(mconn, rkbuf->rkbuf_reqhdr.ApiKey); if (!err && rd_kafka_mock_cluster_get_coord(mcluster, RD_KAFKA_COORD_GROUP, &GroupId) != mconn->broker) err = RD_KAFKA_RESP_ERR_NOT_COORDINATOR; while (TopicsCnt-- > 0) { rd_kafkap_str_t Topic; int32_t PartsCnt; /* Topic */ rd_kafka_buf_read_str(rkbuf, &Topic); /* Response: Topic */ rd_kafka_buf_write_kstr(resp, &Topic); /* #Partitions */ rd_kafka_buf_read_i32(rkbuf, &PartsCnt); /* Response: #Partitions */ rd_kafka_buf_write_i32(resp, PartsCnt); /* Ignore if the topic or partition exists or not. */ while (PartsCnt-- > 0) { int32_t Partition; int64_t Offset; rd_kafkap_str_t Metadata; /* Partition */ rd_kafka_buf_read_i32(rkbuf, &Partition); /* Response: Partition */ rd_kafka_buf_write_i32(resp, Partition); /* CommittedOffset */ rd_kafka_buf_read_i64(rkbuf, &Offset); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 2) { /* CommittedLeaderEpoch */ int32_t Epoch; rd_kafka_buf_read_i32(rkbuf, &Epoch); } /* CommittedMetadata */ rd_kafka_buf_read_str(rkbuf, &Metadata); /* Response: ErrorCode */ rd_kafka_buf_write_i16(resp, err); } } rd_kafka_mock_connection_send_response(mconn, resp); return 0; err_parse: rd_kafka_buf_destroy(resp); return -1; } /** * @brief Handle EndTxn */ static int rd_kafka_mock_handle_EndTxn (rd_kafka_mock_connection_t *mconn, rd_kafka_buf_t *rkbuf) { rd_kafka_mock_cluster_t *mcluster = mconn->broker->cluster; const rd_bool_t log_decode_errors = rd_true; rd_kafka_buf_t *resp = rd_kafka_mock_buf_new_response(rkbuf); rd_kafka_resp_err_t err; rd_kafkap_str_t TransactionalId; rd_kafka_pid_t pid; rd_bool_t committed; /* TransactionalId */ rd_kafka_buf_read_str(rkbuf, &TransactionalId); /* ProducerId */ rd_kafka_buf_read_i64(rkbuf, &pid.id); /* ProducerEpoch */ rd_kafka_buf_read_i16(rkbuf, &pid.epoch); /* Committed */ rd_kafka_buf_read_bool(rkbuf, &committed); /* * Construct response */ /* ThrottleTimeMs */ rd_kafka_buf_write_i32(resp, 0); /* Inject error */ err = rd_kafka_mock_next_request_error(mconn, rkbuf->rkbuf_reqhdr.ApiKey); if (!err && rd_kafka_mock_cluster_get_coord(mcluster, RD_KAFKA_COORD_TXN, &TransactionalId) != mconn->broker) err = RD_KAFKA_RESP_ERR_NOT_COORDINATOR; /* ErrorCode */ rd_kafka_buf_write_i16(resp, err); rd_kafka_mock_connection_send_response(mconn, resp); return 0; err_parse: rd_kafka_buf_destroy(resp); return -1; } /** * @brief Default request handlers */ const struct rd_kafka_mock_api_handler rd_kafka_mock_api_handlers[RD_KAFKAP__NUM] = { [RD_KAFKAP_Produce] = { 0, 7, -1, rd_kafka_mock_handle_Produce }, [RD_KAFKAP_Fetch] = { 0, 11, -1, rd_kafka_mock_handle_Fetch }, [RD_KAFKAP_Offset] = { 0, 5, -1, rd_kafka_mock_handle_ListOffset }, [RD_KAFKAP_OffsetFetch] = { 0, 5, 6, rd_kafka_mock_handle_OffsetFetch }, [RD_KAFKAP_OffsetCommit] = { 0, 7, 8, rd_kafka_mock_handle_OffsetCommit }, [RD_KAFKAP_ApiVersion] = { 0, 2, 3, rd_kafka_mock_handle_ApiVersion }, [RD_KAFKAP_Metadata] = { 0, 2, 9, rd_kafka_mock_handle_Metadata }, [RD_KAFKAP_FindCoordinator] = { 0, 2, 3, rd_kafka_mock_handle_FindCoordinator }, [RD_KAFKAP_InitProducerId] = { 0, 1, 2, rd_kafka_mock_handle_InitProducerId }, [RD_KAFKAP_JoinGroup] = { 0, 5, 6, rd_kafka_mock_handle_JoinGroup }, [RD_KAFKAP_Heartbeat] = { 0, 3, 4, rd_kafka_mock_handle_Heartbeat }, [RD_KAFKAP_LeaveGroup] = { 0, 1, 4, rd_kafka_mock_handle_LeaveGroup }, [RD_KAFKAP_SyncGroup] = { 0, 3, 4, rd_kafka_mock_handle_SyncGroup }, [RD_KAFKAP_AddPartitionsToTxn] = { 0, 1, -1, rd_kafka_mock_handle_AddPartitionsToTxn }, [RD_KAFKAP_AddOffsetsToTxn] = { 0, 1, -1, rd_kafka_mock_handle_AddOffsetsToTxn }, [RD_KAFKAP_TxnOffsetCommit] = { 0, 2, 3, rd_kafka_mock_handle_TxnOffsetCommit }, [RD_KAFKAP_EndTxn] = { 0, 1, -1, rd_kafka_mock_handle_EndTxn }, }; /** * @brief Handle ApiVersionRequest. * * @remark This is the only handler that needs to handle unsupported ApiVersions. */ static int rd_kafka_mock_handle_ApiVersion (rd_kafka_mock_connection_t *mconn, rd_kafka_buf_t *rkbuf) { rd_kafka_mock_cluster_t *mcluster = mconn->broker->cluster; rd_kafka_buf_t *resp = rd_kafka_mock_buf_new_response(rkbuf); size_t of_ApiKeysCnt; int cnt = 0; rd_kafka_resp_err_t err = RD_KAFKA_RESP_ERR_NO_ERROR; int i; if (!rd_kafka_mock_cluster_ApiVersion_check( mcluster, rkbuf->rkbuf_reqhdr.ApiKey, rkbuf->rkbuf_reqhdr.ApiVersion)) err = RD_KAFKA_RESP_ERR_UNSUPPORTED_VERSION; /* ErrorCode */ rd_kafka_buf_write_i16(resp, err); /* #ApiKeys (updated later) */ /* FIXME: FLEXVER: This is a uvarint and will require more than 1 byte * if the array count exceeds 126. */ if (rkbuf->rkbuf_flags & RD_KAFKA_OP_F_FLEXVER) of_ApiKeysCnt = rd_kafka_buf_write_i8(resp, 0); else of_ApiKeysCnt = rd_kafka_buf_write_i32(resp, 0); for (i = 0 ; i < RD_KAFKAP__NUM ; i++) { if (!mcluster->api_handlers[i].cb || mcluster->api_handlers[i].MaxVersion == -1) continue; if (rkbuf->rkbuf_reqhdr.ApiVersion >= 3) { if (err && i != RD_KAFKAP_ApiVersion) continue; } /* ApiKey */ rd_kafka_buf_write_i16(resp, (int16_t)i); /* MinVersion */ rd_kafka_buf_write_i16( resp, mcluster->api_handlers[i].MinVersion); /* MaxVersion */ rd_kafka_buf_write_i16( resp, mcluster->api_handlers[i].MaxVersion); cnt++; } /* FIXME: uvarint */ if (rkbuf->rkbuf_flags & RD_KAFKA_OP_F_FLEXVER) { rd_assert(cnt <= 126); rd_kafka_buf_update_i8(resp, of_ApiKeysCnt, cnt); } else rd_kafka_buf_update_i32(resp, of_ApiKeysCnt, cnt); if (rkbuf->rkbuf_reqhdr.ApiVersion >= 1) { /* ThrottletimeMs */ rd_kafka_buf_write_i32(resp, 0); } rd_kafka_mock_connection_send_response(mconn, resp); return 0; }
{ "pile_set_name": "Github" }
vc.project.guid = ${vc.project.guidFromName} vc.project.name = TestSuite vc.project.target = TestSuite vc.project.type = testsuite vc.project.pocobase = ..\\.. vc.project.platforms = Win32, x64 vc.project.configurations = debug_shared, release_shared, debug_static_mt, release_static_mt, debug_static_md, release_static_md vc.project.prototype = TestSuite_vs90.vcproj vc.project.compiler.include = ..\\..\\Foundation\\include;..\\..\\Net\\include vc.project.linker.dependencies.Win32 = ws2_32.lib iphlpapi.lib vc.project.linker.dependencies.x64 = ws2_32.lib iphlpapi.lib
{ "pile_set_name": "Github" }
<?xml version="1.0"?> <Template originator="Michael Hutchinson" created="2010/03/10" lastModified="2010/03/10"> <!-- Template Header --> <TemplateConfiguration> <_Name>Preprocessed T4 Template</_Name> <Icon></Icon> <_Category>Text Templating</_Category> <_Description>A T4 template that will be preprocessed into a partial class.</_Description> <DefaultFilename>PreprocessedT4Template</DefaultFilename> <Language>C#</Language> </TemplateConfiguration> <!-- Template Content --> <TemplateFiles> <File name="${Name}.tt" CustomTool="TextTemplatingFilePreprocessor"> <![CDATA[<#@ template language="C#" #> <#@ assembly name="System.Core" #> <#@ import namespace="System.Linq" #> <#@ import namespace="System.Text" #> <#@ import namespace="System.Collections.Generic" #> ]]> </File> </TemplateFiles> </Template>
{ "pile_set_name": "Github" }
/* SPDX-License-Identifier: GPL-2.0-only */ /************************************************************************** * Copyright (c) 2009-2011, Intel Corporation. * All Rights Reserved. * * Authors: * Benjamin Defnet <[email protected]> * Rajesh Poornachandran <[email protected]> * **************************************************************************/ #ifndef _PSB_IRQ_H_ #define _PSB_IRQ_H_ struct drm_crtc; struct drm_device; bool sysirq_init(struct drm_device *dev); void sysirq_uninit(struct drm_device *dev); void psb_irq_preinstall(struct drm_device *dev); int psb_irq_postinstall(struct drm_device *dev); void psb_irq_uninstall(struct drm_device *dev); irqreturn_t psb_irq_handler(int irq, void *arg); int psb_irq_enable_dpst(struct drm_device *dev); int psb_irq_disable_dpst(struct drm_device *dev); void psb_irq_turn_on_dpst(struct drm_device *dev); void psb_irq_turn_off_dpst(struct drm_device *dev); int psb_enable_vblank(struct drm_crtc *crtc); void psb_disable_vblank(struct drm_crtc *crtc); u32 psb_get_vblank_counter(struct drm_crtc *crtc); int mdfld_enable_te(struct drm_device *dev, int pipe); void mdfld_disable_te(struct drm_device *dev, int pipe); #endif /* _PSB_IRQ_H_ */
{ "pile_set_name": "Github" }
/* * Copyright (C) 2013 salesforce.com, inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ ({ /* Test that addTab calls a callback function */ testAddTab: { test: [ function(cmp) { var callbackCalled = false; var config = {}; var values = {}; values["title"] = "title"; values["name"] = "name"; values["active"] = true; values["hidden"] = false; values["closable"] = true; config["attributes"] = values; config["descriptor"] = "markup://ui:tabItem"; var tabBar = cmp.find("tabBar"); tabBar.get("e.addTab").setParams({ tab: config, index: 0, callback: function(newTabItem) { var def = newTabItem.getDef().getDescriptor().getQualifiedName(); $A.test.assertEquals("markup://ui:tabItem", def, "The object type of the callback parameter is not correct."); callbackCalled = true; } }).fire(); $A.test.addWaitForWithFailureMessage(true, function(){ return callbackCalled; }, "Callback was not called." ); } ] } })
{ "pile_set_name": "Github" }
# Generated by camel build tools - do NOT edit this file! components=aws-eks groupId=org.apache.camel artifactId=camel-aws-eks version=3.6.0-SNAPSHOT projectName=Camel :: AWS EKS projectDescription=A Camel Amazon EKS Web Service Component
{ "pile_set_name": "Github" }
module github.com/gonvenience/ytbx go 1.12 require ( github.com/BurntSushi/toml v0.3.1 github.com/gonvenience/bunt v1.1.1 github.com/gonvenience/neat v1.2.1 github.com/gonvenience/wrap v1.1.0 github.com/gorilla/mux v1.7.3 github.com/onsi/ginkgo v1.11.0 github.com/onsi/gomega v1.8.1 github.com/spf13/cobra v0.0.5 github.com/virtuald/go-ordered-json v0.0.0-20170621173500-b18e6e673d74 gopkg.in/yaml.v3 v3.0.0-20191120175047-4206685974f2 )
{ "pile_set_name": "Github" }
/* * Copyright (C) 2010-2020 Fabio Cavallo (aka FHorse) * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ #include <string.h> #include "mappers.h" #include "info.h" #include "mem_map.h" #include "save_slot.h" INLINE static void ks7037_update(void); struct _ks7037 { BYTE ind; BYTE reg[8]; } ks7037; struct _ks7037tmp { BYTE *prg_7000; BYTE *prg_B000; } ks7037tmp; void map_init_KS7037(void) { EXTCL_AFTER_MAPPER_INIT(KS7037); EXTCL_CPU_WR_MEM(KS7037); EXTCL_CPU_RD_MEM(KS7037); EXTCL_SAVE_MAPPER(KS7037); mapper.internal_struct[0] = (BYTE *) &ks7037; mapper.internal_struct_size[0] = sizeof(ks7037); memset(&ks7037, 0x00, sizeof(ks7037)); info.prg.ram.banks_8k_plus = 1; info.mapper.extend_rd = TRUE; info.mapper.extend_wr = TRUE; info.mapper.ram_plus_op_controlled_by_mapper = TRUE; } void extcl_after_mapper_init_KS7037(void) { // posso farlo solo dopo il map_prg_ram_init(); ks7037_update(); } void extcl_cpu_wr_mem_KS7037(WORD address, BYTE value) { switch (address & 0xF001) { case 0x6000: case 0x6001: prg.ram_plus_8k[address & 0x0FFF] = value; return; case 0x7000: case 0x7001: case 0xA000: case 0xA001: return; case 0xB000: case 0xB001: ks7037tmp.prg_B000[address & 0x0FFF] = value; return; case 0x8000: case 0x9000: case 0xC000: case 0xD000: case 0xE000: case 0xF000: ks7037.ind = value & 0x07; return; case 0x8001: case 0x9001: case 0xC001: case 0xD001: case 0xE001: case 0xF001: ks7037.reg[ks7037.ind] = value; ks7037_update(); return; } } BYTE extcl_cpu_rd_mem_KS7037(WORD address, BYTE openbus, UNUSED(BYTE before)) { switch (address & 0xF000) { case 0x6000: return (prg.ram_plus_8k[address & 0x0FFF]); case 0x7000: return (ks7037tmp.prg_7000[address & 0x0FFF]); case 0xA000: return (prg.rom_8k[1][address & 0x0FFF]); case 0xB000: return (ks7037tmp.prg_B000[address & 0x0FFF]); } return (openbus); } BYTE extcl_save_mapper_KS7037(BYTE mode, BYTE slot, FILE *fp) { save_slot_ele(mode, slot, ks7037.ind); save_slot_ele(mode, slot, ks7037.reg); if (mode == SAVE_SLOT_READ) { ks7037_update(); } return (EXIT_OK); } INLINE static void ks7037_update(void) { WORD value; // 0x7000 value = 0x0F; control_bank(info.prg.rom[0].max.banks_4k) ks7037tmp.prg_7000 = prg_chip_byte_pnt(0, value << 12); // 0x8000 - 0x9000 value = ks7037.reg[6]; control_bank(info.prg.rom[0].max.banks_8k) map_prg_rom_8k(1, 0, value); prg.rom_8k[0] = prg_chip_byte_pnt(prg.rom_chip[0], mapper.rom_map_to[0] << 13); // 0xA000 value = 0xFC; control_bank(info.prg.rom[0].max.banks_4k) prg.rom_8k[1] = prg_chip_byte_pnt(prg.rom_chip[0], value << 12); // 0xB000 ks7037tmp.prg_B000 = &prg.ram_plus_8k[1 << 12]; // 0xC000 - 0xD000 value = ks7037.reg[7]; control_bank(info.prg.rom[0].max.banks_8k) map_prg_rom_8k(1, 2, value); prg.rom_8k[2] = prg_chip_byte_pnt(prg.rom_chip[0], mapper.rom_map_to[2] << 13); // mirroring ntbl.bank_1k[0] = &ntbl.data[(ks7037.reg[2] & 0x01) * 0x400]; ntbl.bank_1k[1] = &ntbl.data[(ks7037.reg[4] & 0x01) * 0x400]; ntbl.bank_1k[2] = &ntbl.data[(ks7037.reg[3] & 0x01) * 0x400]; ntbl.bank_1k[3] = &ntbl.data[(ks7037.reg[5] & 0x01) * 0x400]; }
{ "pile_set_name": "Github" }
// Tencent is pleased to support the open source community by making RapidJSON available. // // Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved. // // Licensed under the MIT License (the "License"); you may not use this file except // in compliance with the License. You may obtain a copy of the License at // // http://opensource.org/licenses/MIT // // Unless required by applicable law or agreed to in writing, software distributed // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR // CONDITIONS OF ANY KIND, either express or implied. See the License for the // specific language governing permissions and limitations under the License. // This is a C++ header-only implementation of Grisu2 algorithm from the publication: // Loitsch, Florian. "Printing floating-point numbers quickly and accurately with // integers." ACM Sigplan Notices 45.6 (2010): 233-243. #ifndef RAPIDJSON_DTOA_ #define RAPIDJSON_DTOA_ #include "itoa.h" // GetDigitsLut() #include "diyfp.h" #include "ieee754.h" RAPIDJSON_NAMESPACE_BEGIN namespace internal { #ifdef __GNUC__ RAPIDJSON_DIAG_PUSH RAPIDJSON_DIAG_OFF(effc++) #endif inline void GrisuRound(char* buffer, int len, uint64_t delta, uint64_t rest, uint64_t ten_kappa, uint64_t wp_w) { while (rest < wp_w && delta - rest >= ten_kappa && (rest + ten_kappa < wp_w || /// closer wp_w - rest > rest + ten_kappa - wp_w)) { buffer[len - 1]--; rest += ten_kappa; } } inline unsigned CountDecimalDigit32(uint32_t n) { // Simple pure C++ implementation was faster than __builtin_clz version in this situation. if (n < 10) return 1; if (n < 100) return 2; if (n < 1000) return 3; if (n < 10000) return 4; if (n < 100000) return 5; if (n < 1000000) return 6; if (n < 10000000) return 7; if (n < 100000000) return 8; // Will not reach 10 digits in DigitGen() //if (n < 1000000000) return 9; //return 10; return 9; } inline void DigitGen(const DiyFp& W, const DiyFp& Mp, uint64_t delta, char* buffer, int* len, int* K) { static const uint32_t kPow10[] = { 1, 10, 100, 1000, 10000, 100000, 1000000, 10000000, 100000000, 1000000000 }; const DiyFp one(uint64_t(1) << -Mp.e, Mp.e); const DiyFp wp_w = Mp - W; uint32_t p1 = static_cast<uint32_t>(Mp.f >> -one.e); uint64_t p2 = Mp.f & (one.f - 1); int kappa = CountDecimalDigit32(p1); // kappa in [0, 9] *len = 0; while (kappa > 0) { uint32_t d = 0; switch (kappa) { case 9: d = p1 / 100000000; p1 %= 100000000; break; case 8: d = p1 / 10000000; p1 %= 10000000; break; case 7: d = p1 / 1000000; p1 %= 1000000; break; case 6: d = p1 / 100000; p1 %= 100000; break; case 5: d = p1 / 10000; p1 %= 10000; break; case 4: d = p1 / 1000; p1 %= 1000; break; case 3: d = p1 / 100; p1 %= 100; break; case 2: d = p1 / 10; p1 %= 10; break; case 1: d = p1; p1 = 0; break; default:; } if (d || *len) buffer[(*len)++] = static_cast<char>('0' + static_cast<char>(d)); kappa--; uint64_t tmp = (static_cast<uint64_t>(p1) << -one.e) + p2; if (tmp <= delta) { *K += kappa; GrisuRound(buffer, *len, delta, tmp, static_cast<uint64_t>(kPow10[kappa]) << -one.e, wp_w.f); return; } } // kappa = 0 for (;;) { p2 *= 10; delta *= 10; char d = static_cast<char>(p2 >> -one.e); if (d || *len) buffer[(*len)++] = static_cast<char>('0' + d); p2 &= one.f - 1; kappa--; if (p2 < delta) { *K += kappa; GrisuRound(buffer, *len, delta, p2, one.f, wp_w.f * kPow10[-kappa]); return; } } } inline void Grisu2(double value, char* buffer, int* length, int* K) { const DiyFp v(value); DiyFp w_m, w_p; v.NormalizedBoundaries(&w_m, &w_p); const DiyFp c_mk = GetCachedPower(w_p.e, K); const DiyFp W = v.Normalize() * c_mk; DiyFp Wp = w_p * c_mk; DiyFp Wm = w_m * c_mk; Wm.f++; Wp.f--; DigitGen(W, Wp, Wp.f - Wm.f, buffer, length, K); } inline char* WriteExponent(int K, char* buffer) { if (K < 0) { *buffer++ = '-'; K = -K; } if (K >= 100) { *buffer++ = static_cast<char>('0' + static_cast<char>(K / 100)); K %= 100; const char* d = GetDigitsLut() + K * 2; *buffer++ = d[0]; *buffer++ = d[1]; } else if (K >= 10) { const char* d = GetDigitsLut() + K * 2; *buffer++ = d[0]; *buffer++ = d[1]; } else *buffer++ = static_cast<char>('0' + static_cast<char>(K)); return buffer; } inline char* Prettify(char* buffer, int length, int k) { const int kk = length + k; // 10^(kk-1) <= v < 10^kk if (length <= kk && kk <= 21) { // 1234e7 -> 12340000000 for (int i = length; i < kk; i++) buffer[i] = '0'; buffer[kk] = '.'; buffer[kk + 1] = '0'; return &buffer[kk + 2]; } else if (0 < kk && kk <= 21) { // 1234e-2 -> 12.34 std::memmove(&buffer[kk + 1], &buffer[kk], length - kk); buffer[kk] = '.'; return &buffer[length + 1]; } else if (-6 < kk && kk <= 0) { // 1234e-6 -> 0.001234 const int offset = 2 - kk; std::memmove(&buffer[offset], &buffer[0], length); buffer[0] = '0'; buffer[1] = '.'; for (int i = 2; i < offset; i++) buffer[i] = '0'; return &buffer[length + offset]; } else if (length == 1) { // 1e30 buffer[1] = 'e'; return WriteExponent(kk - 1, &buffer[2]); } else { // 1234e30 -> 1.234e33 std::memmove(&buffer[2], &buffer[1], length - 1); buffer[1] = '.'; buffer[length + 1] = 'e'; return WriteExponent(kk - 1, &buffer[0 + length + 2]); } } inline char* dtoa(double value, char* buffer) { Double d(value); if (d.IsZero()) { if (d.Sign()) *buffer++ = '-'; // -0.0, Issue #289 buffer[0] = '0'; buffer[1] = '.'; buffer[2] = '0'; return &buffer[3]; } else { if (value < 0) { *buffer++ = '-'; value = -value; } int length, K; Grisu2(value, buffer, &length, &K); return Prettify(buffer, length, K); } } #ifdef __GNUC__ RAPIDJSON_DIAG_POP #endif } // namespace internal RAPIDJSON_NAMESPACE_END #endif // RAPIDJSON_DTOA_
{ "pile_set_name": "Github" }
import filters from '../../src/store/filters'; import { SET_FILTERS, SET_FILTER_VALUE } from "../../src/store/filters"; import * as querystringUtils from "sharp/util/querystring"; describe('store filters', () => { beforeEach(() => { jest.restoreAllMocks() }); test('state match snapshot', ()=>{ expect(filters.state()).toMatchSnapshot(); }); describe('mutations', () => { test('SET_FILTERS', () => { const state = {}; const testFilters = []; filters.mutations[SET_FILTERS](state, testFilters); expect(state.filters).toBe(testFilters); }); test('SET_FILTER_VALUE', () => { const state = { values: {} }; filters.mutations[SET_FILTER_VALUE](state, { key:'prop', value:'value' }); expect(state.values.prop).toBe('value'); }); }); describe('getters', () => { test('value', ()=>{ const state = { values: { prop: 'value' } }; expect(filters.getters.value(state)('prop')).toBe('value'); }); test('filters', ()=>{ expect(filters.getters.filters({ filter: null })).toEqual([]); expect(filters.getters.filters({ filters: [{}] })).toEqual([{}]); }); test('filter', ()=>{ expect(filters.getters.filter({ filters: null })('name')).toEqual(undefined); expect( filters.getters.filter({ filters: [{ key:'name' }], })('name') ).toEqual({ key:'name'}); }); test('defaultValue', ()=>{ expect(filters.getters.defaultValue()(null)).toBeUndefined(); expect(filters.getters.defaultValue()({ default:'default' })).toEqual('default'); }); test('isDateRange', ()=>{ expect(filters.getters.isDateRange()(null)).toBe(false); expect(filters.getters.isDateRange()({ type:'daterange' })).toBe(true); }); test('filterQueryKey', ()=>{ expect(filters.getters.filterQueryKey()('key')).toBe('filter_key'); }); test('getQueryParams', ()=>{ const state = { }; const getters = { filterQueryKey: jest.fn(key => `filter_${key}`), filter: jest.fn(key => ({ key })), serializeValue: jest.fn(({ filter, value }) => value), }; expect(filters.getters.getQueryParams(state, getters)({ })).toEqual({}); expect(filters.getters.getQueryParams(state, getters)({ type: 'aaa', name: 'bbb', })).toEqual({ 'filter_type': 'aaa', 'filter_name': 'bbb' }); expect(getters.serializeValue).toHaveBeenCalledWith({ filter:{ key:'type' }, value:'aaa' }); expect(getters.serializeValue).toHaveBeenCalledWith({ filter:{ key:'name' }, value:'bbb' }); }); test('getValuesFromQuery', ()=>{ expect(filters.getters.getValuesFromQuery()(null)).toEqual({ }); expect(filters.getters.getValuesFromQuery()({ filter_type: 'aaa', filter_name: 'bbb', custom: 'ccc' })) .toEqual({ type:'aaa', name: 'bbb' }) }); test('resolveFilterValue', ()=>{ const state = {}; const getters = { defaultValue: jest.fn(()=>'defaultValue'), isDateRange: jest.fn(()=>false), }; const resolveFilterValue = (...args) => filters.getters.resolveFilterValue(state, getters)(...args); expect(resolveFilterValue({ filter: { key:'filter' }, value: undefined })).toEqual('defaultValue'); expect(getters.defaultValue).toHaveBeenCalledWith({ key:'filter' }); expect(resolveFilterValue({ filter: {}, value: null })).toEqual('defaultValue'); expect(resolveFilterValue({ filter:{ multiple: true }, value: 3 })).toEqual([3]); expect(resolveFilterValue({ filter: {}, value: 'test' })).toEqual('test'); expect(resolveFilterValue({ filter: { multiple: true }, value: [3] })).toEqual([3]); jest.spyOn(querystringUtils, 'parseRange') .mockImplementation(() => 'parsedRange'); getters.isDateRange.mockReturnValue(true); expect(resolveFilterValue({ filter: {}, value: '2019-06-21..2019-06-24', })).toEqual('parsedRange'); expect(querystringUtils.parseRange) .toHaveBeenCalledWith('2019-06-21..2019-06-24'); }); test('serializeValue', ()=>{ const state = {}; const getters = { isDateRange: jest.fn(()=>false), }; expect(filters.getters.serializeValue(state, getters)({ filter: {}, value: 'val' })).toEqual('val'); jest.spyOn(querystringUtils, 'serializeRange') .mockImplementation(() => 'serializedRange'); getters.isDateRange.mockReturnValue(true); expect(filters.getters.serializeValue(state, getters)({ filter: {}, value: { start: 'start', end: 'end', } })).toEqual('serializedRange'); expect(querystringUtils.serializeRange).toHaveBeenCalledWith({ start:'start', end: 'end' }); }); test('nextValues', ()=>{ const state = { values: { type: 'aa' } }; expect(filters.getters.nextValues(state)({ filter: { key:'filter' }, value: 1 })) .toEqual({ type: 'aa', filter: 1 }); expect(filters.getters.nextValues(state)({ filter: { key:'filter', master: true }, value: 1 })) .toEqual({ type: null, filter: 1 }); }); test('nextQuery', ()=>{ const getters = { getQueryParams: jest.fn(()=>'query params'), nextValues: jest.fn(()=>'next values') }; expect(filters.getters.nextQuery(null, getters)({ filter:{ key:'filter' }, value:1 })).toEqual('query params'); expect(getters.getQueryParams).toHaveBeenCalledWith('next values'); expect(getters.nextValues).toHaveBeenCalledWith({ filter:{ key:'filter' }, value:1 }); }); }); describe('actions', () => { test('update', async () => { const commit = jest.fn(); const dispatch = jest.fn(); const testFilters = [{ key:'prop1' }, { key:'prop2' }]; const testValues = { prop1:'aaa', prop2:'bbb' }; filters.actions.update({ commit, dispatch }, { filters: testFilters, values: testValues }); expect(commit).toHaveBeenCalledWith(SET_FILTERS, testFilters); expect(dispatch).toHaveBeenCalledWith('setFilterValue', { filter: { key:'prop1' }, value: 'aaa' }); expect(dispatch).toHaveBeenCalledWith('setFilterValue', { filter: { key:'prop2' }, value: 'bbb' }); dispatch.mockClear(); expect(() => { filters.actions.update({ commit, dispatch }, { filters: null, values: null }); }).not.toThrow(); expect(commit).toHaveBeenCalledWith(SET_FILTERS, null); expect(dispatch).not.toHaveBeenCalled(); }); test('setFilterValue', ()=>{ const commit = jest.fn(); const getters = { resolveFilterValue: jest.fn(()=>'resolvedValue') }; const filter = { key:'filter' }; const value = 'value'; filters.actions.setFilterValue({ commit, getters }, { filter, value }); expect(commit).toHaveBeenCalledWith(SET_FILTER_VALUE, { key: 'filter', value: 'resolvedValue' }); expect(getters.resolveFilterValue).toHaveBeenCalledWith({ filter, value }); }); }); });
{ "pile_set_name": "Github" }
# @liaison/store A base class for implementing Liaison stores. ## Installation ``` npm install @liaison/store ``` ## License MIT
{ "pile_set_name": "Github" }
AWS SDK for Go Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. Copyright 2014-2015 Stripe, Inc.
{ "pile_set_name": "Github" }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Persistent data formats for the committers. * * All of these formats share a base class of * {@link org.apache.hadoop.fs.s3a.commit.files.PersistentCommitData}; * the subclasses record * <ol> * <li>The content of a single pending commit * (used by the Magic committer).</li> * <li>The list of all the files uploaded by a staging committer.</li> * <li>The summary information saved in the {@code _SUCCESS} file.</li> * </ol> * * There are no guarantees of stability between versions; these are internal * structures. * * The {@link org.apache.hadoop.fs.s3a.commit.files.SuccessData} file is * the one visible to callers after a job completes; it is an unstable * manifest intended for testing only. * */ @InterfaceAudience.Private @InterfaceStability.Unstable package org.apache.hadoop.fs.s3a.commit.files; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability;
{ "pile_set_name": "Github" }
/////////1/////////2/////////3/////////4/////////5/////////6/////////7/////////8 // basic_iserializer.cpp: // (C) Copyright 2002 Robert Ramey - http://www.rrsd.com . // Use, modification and distribution is subject to the Boost Software // License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) // See http://www.boost.org for updates, documentation, and revision history. #include <cstddef> // NULL #define BOOST_ARCHIVE_SOURCE #include <boost/serialization/config.hpp> #include <boost/archive/detail/basic_iserializer.hpp> namespace boost { namespace archive { namespace detail { BOOST_ARCHIVE_DECL basic_iserializer::basic_iserializer( const boost::serialization::extended_type_info & eti ) : basic_serializer(eti), m_bpis(NULL) {} BOOST_ARCHIVE_DECL basic_iserializer::~basic_iserializer(){} } // namespace detail } // namespace archive } // namespace boost
{ "pile_set_name": "Github" }
varReference: - path: metadata/name kind: Certificate - path: metadata/annotations/getambassador.io\/config kind: Service - path: spec/dnsNames kind: Certificate - path: spec/issuerRef/name kind: Certificate - path: metadata/annotations/kubernetes.io\/ingress.global-static-ip-name kind: Ingress - path: spec/commonName kind: Certificate - path: spec/secretName kind: Certificate - path: spec/acme/config/domains kind: Certificate - path: spec/acme/config/http01/ingress kind: Certificate - path: metadata/name kind: Ingress - path: metadata/annotations/certmanager.k8s.io\/issuer kind: Ingress - path: metadata/name kind: CloudEndpoint - path: spec/project kind: CloudEndpoint - path: spec/targetIngress/name kind: CloudEndpoint - path: spec/targetIngress/namespace kind: CloudEndpoint - path: spec/domains kind: ManagedCertificate
{ "pile_set_name": "Github" }
{ "name": "react-swipeable-views-utils", "version": "0.14.0-alpha.0", "description": "react-swipeable-views utility modules", "main": "lib/index.js", "scripts": { "prepublish": "pkgfiles" }, "repository": { "type": "git", "url": "https://github.com/oliviertassinari/react-swipeable-views.git" }, "author": "Olivier Tassinari <[email protected]> (https://github.com/oliviertassinari)", "bugs": { "url": "https://github.com/oliviertassinari/react-swipeable-views/issues" }, "dependencies": { "@babel/runtime": "7.0.0", "keycode": "^2.1.7", "prop-types": "^15.6.0", "react-event-listener": "^0.6.0", "react-swipeable-views-core": "^0.14.0-alpha.0", "shallow-equal": "^1.2.1" }, "devDependencies": { "pkgfiles": "^2.3.2" }, "license": "MIT", "engines": { "node": ">=6.0.0" } }
{ "pile_set_name": "Github" }
/** * Orko - Copyright © 2018-2019 Graham Crockford * * <p>This program is free software: you can redistribute it and/or modify it under the terms of the * GNU Affero General Public License as published by the Free Software Foundation, either version 3 * of the License, or (at your option) any later version. * * <p>This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * <p>You should have received a copy of the GNU Affero General Public License along with this * program. If not, see <http://www.gnu.org/licenses/>. */ package com.gruelbox.orko.app.monolith; import com.gruelbox.orko.auth.Hasher; import io.dropwizard.cli.Cli; import io.dropwizard.cli.Command; import io.dropwizard.setup.Bootstrap; import net.sourceforge.argparse4j.inf.Namespace; import net.sourceforge.argparse4j.inf.Subparser; class HashCommand extends Command { private static final String SALT_PARM = "salt"; private static final String VALUE_PARM = "value"; HashCommand() { super("hash", "Hashes the specified value using a provided salt"); } @Override public void run(Bootstrap<?> bootstrap, Namespace namespace) throws Exception { Hasher hasher = new Hasher(); String salt = namespace.getString(SALT_PARM); if (salt == null) { salt = hasher.salt(); System.out.println("Salt used: " + salt); System.out.println("Hashed result: " + hasher.hash(namespace.getString(VALUE_PARM), salt)); } else { System.out.print(hasher.hash(namespace.getString(VALUE_PARM), salt)); } } @Override public void configure(Subparser subparser) { subparser .addArgument("--" + SALT_PARM, "-s") .help("An encryption salt. If not provided, a new one will be used and returned."); subparser.addArgument(VALUE_PARM).required(true).help("The value for which to create a hash."); } @Override public void onError(Cli cli, Namespace namespace, Throwable e) { cli.getStdErr().println(e.getMessage()); } }
{ "pile_set_name": "Github" }