repo_name
stringlengths
4
116
path
stringlengths
3
942
size
stringlengths
1
7
content
stringlengths
3
1.05M
license
stringclasses
15 values
bryson/packer
builder/virtualbox/common/vboxbundle_config_test.go
638
package common import ( "reflect" "testing" ) func TestVBoxBundleConfigPrepare_VBoxBundle(t *testing.T) { // Test with empty c := new(VBoxBundleConfig) errs := c.Prepare(testConfigTemplate(t)) if len(errs) > 0 { t.Fatalf("err: %#v", errs) } if !reflect.DeepEqual(*c, VBoxBundleConfig{BundleISO: false}) { t.Fatalf("bad: %#v", c) } // Test with a good one c = new(VBoxBundleConfig) c.BundleISO = true errs = c.Prepare(testConfigTemplate(t)) if len(errs) > 0 { t.Fatalf("err: %#v", errs) } expected := VBoxBundleConfig{ BundleISO: true, } if !reflect.DeepEqual(*c, expected) { t.Fatalf("bad: %#v", c) } }
mpl-2.0
clientIO/joint
demo/links/custom-links.html
751
<!DOCTYPE html> <html> <head> <meta charset="utf8"/> <title>Custom Link</title> <link rel="stylesheet" type="text/css" href="../../build/joint.css" /> <style> #paper { display: inline-block; border: 1px solid gray; } </style> </head> <body> <div id="paper"></div> <!-- Dependencies: --> <script src="../../node_modules/jquery/dist/jquery.js"></script> <script src="../../node_modules/lodash/lodash.js"></script> <script src="../../node_modules/backbone/backbone.js"></script> <script src="../../build/joint.js"></script> <script src="./src/custom-links.js"></script> </body> </html>
mpl-2.0
adevress/armadillo
include/armadillo_bits/operator_cube_minus.hpp
4562
// Copyright (C) 2008-2015 Conrad Sanderson // Copyright (C) 2008-2015 NICTA (www.nicta.com.au) // // This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at http://mozilla.org/MPL/2.0/. //! \addtogroup operator_cube_minus //! @{ //! unary - template<typename T1> arma_inline const eOpCube<T1, eop_neg> operator- ( const BaseCube<typename T1::elem_type,T1>& X ) { arma_extra_debug_sigprint(); return eOpCube<T1, eop_neg>(X.get_ref()); } //! cancellation of two consecutive negations: -(-T1) template<typename T1> arma_inline const typename ProxyCube<T1>::stored_type& operator- ( const eOpCube<T1, eop_neg>& X ) { arma_extra_debug_sigprint(); return X.P.Q; } //! BaseCube - scalar template<typename T1> arma_inline const eOpCube<T1, eop_scalar_minus_post> operator- ( const BaseCube<typename T1::elem_type,T1>& X, const typename T1::elem_type k ) { arma_extra_debug_sigprint(); return eOpCube<T1, eop_scalar_minus_post>(X.get_ref(), k); } //! scalar - BaseCube template<typename T1> arma_inline const eOpCube<T1, eop_scalar_minus_pre> operator- ( const typename T1::elem_type k, const BaseCube<typename T1::elem_type,T1>& X ) { arma_extra_debug_sigprint(); return eOpCube<T1, eop_scalar_minus_pre>(X.get_ref(), k); } //! complex scalar - non-complex BaseCube (experimental) template<typename T1> arma_inline const mtOpCube<typename std::complex<typename T1::pod_type>, T1, op_cx_scalar_minus_pre> operator- ( const std::complex<typename T1::pod_type>& k, const BaseCube<typename T1::pod_type, T1>& X ) { arma_extra_debug_sigprint(); return mtOpCube<typename std::complex<typename T1::pod_type>, T1, op_cx_scalar_minus_pre>('j', X.get_ref(), k); } //! non-complex BaseCube - complex scalar (experimental) template<typename T1> arma_inline const mtOpCube<typename std::complex<typename T1::pod_type>, T1, op_cx_scalar_minus_post> operator- ( const BaseCube<typename T1::pod_type, T1>& X, const std::complex<typename T1::pod_type>& k ) { arma_extra_debug_sigprint(); return mtOpCube<typename std::complex<typename T1::pod_type>, T1, op_cx_scalar_minus_post>('j', X.get_ref(), k); } //! subtraction of BaseCube objects with same element type template<typename T1, typename T2> arma_inline const eGlueCube<T1, T2, eglue_minus> operator- ( const BaseCube<typename T1::elem_type,T1>& X, const BaseCube<typename T1::elem_type,T2>& Y ) { arma_extra_debug_sigprint(); return eGlueCube<T1, T2, eglue_minus>(X.get_ref(), Y.get_ref()); } //! subtraction of BaseCube objects with different element types template<typename T1, typename T2> inline const mtGlueCube<typename promote_type<typename T1::elem_type, typename T2::elem_type>::result, T1, T2, glue_mixed_minus> operator- ( const BaseCube< typename force_different_type<typename T1::elem_type, typename T2::elem_type>::T1_result, T1>& X, const BaseCube< typename force_different_type<typename T1::elem_type, typename T2::elem_type>::T2_result, T2>& Y ) { arma_extra_debug_sigprint(); typedef typename T1::elem_type eT1; typedef typename T2::elem_type eT2; typedef typename promote_type<eT1,eT2>::result out_eT; promote_type<eT1,eT2>::check(); return mtGlueCube<out_eT, T1, T2, glue_mixed_minus>( X.get_ref(), Y.get_ref() ); } template<typename eT, typename T2> arma_inline Cube<eT> operator- ( const subview_cube_each1<eT>& X, const Base<eT,T2>& Y ) { arma_extra_debug_sigprint(); return subview_cube_each1_aux::operator_minus(X, Y.get_ref()); } template<typename T1, typename eT> arma_inline Cube<eT> operator- ( const Base<eT,T1>& X, const subview_cube_each1<eT>& Y ) { arma_extra_debug_sigprint(); return subview_cube_each1_aux::operator_minus(X.get_ref(), Y); } template<typename eT, typename TB, typename T2> arma_inline Cube<eT> operator- ( const subview_cube_each2<eT,TB>& X, const Base<eT,T2>& Y ) { arma_extra_debug_sigprint(); return subview_cube_each2_aux::operator_minus(X, Y.get_ref()); } template<typename T1, typename eT, typename TB> arma_inline Cube<eT> operator- ( const Base<eT,T1>& X, const subview_cube_each2<eT,TB>& Y ) { arma_extra_debug_sigprint(); return subview_cube_each2_aux::operator_minus(X.get_ref(), Y); } //! @}
mpl-2.0
Yukarumya/Yukarum-Redfoxes
layout/reftests/columns/margin-collapsing-bug616722-1.html
1638
<!DOCTYPE html> <html><head> <meta http-equiv="content-type" content="text/html; charset=UTF-8"> <title>616722-1</title> <style type="text/css"> body,html { margin:0;padding: 10px;} p { margin: 3px 0; background:pink; } div { background-color:lime; margin: 0em; border: 1px solid black; } .no-border { border-style: none; } .columns { -webkit-column-count: 1; -moz-column-count: 1; column-count: 1; } </style> </head> <body> <div class="columns"> <p style="margin-top:10px">The quick brown fox jumps over the lazy dog.</p> </div> <div class="columns"> <p style="margin-bottom:10px">The quick brown fox jumps over the lazy dog.</p> </div> <div class="columns"> <p style="margin:10px 0">The quick brown fox jumps over the lazy dog.</p> </div> <div class="columns"> <p style="margin:20px 0 10px 0">The quick brown fox jumps over the lazy dog.</p> </div> <div class="columns"> <p style="margin:20px 0 10px 0"></p> </div> <div class="columns no-border"> <p style="margin-top:10px">The quick brown fox jumps over the lazy dog.</p> </div> <div class="columns no-border"> <p style="margin-bottom:10px">The quick brown fox jumps over the lazy dog.</p> </div> <div class="columns no-border"> <p style="margin:10px 0">The quick brown fox jumps over the lazy dog.</p> </div> <div class="columns no-border"> <p style="margin:20px 0 10px 0">The quick brown fox jumps over the lazy dog.</p> </div> <div class="columns no-border"> <p style="margin:20px 0 10px 0"></p> </div> </body></html>
mpl-2.0
mgax/czl-scrape
munca/src/main/java/ro/code4/czl/scrape/client/representation/PublicationRepresentation.java
4287
package ro.code4.czl.scrape.client.representation; import java.util.List; /** * @author Ionut-Maxim Margelatu ([email protected]) */ public class PublicationRepresentation { private String identifier; private String title; private String type; private String institution; private String date; private String description; private int feedback_days; private ContactRepresentation contact; private List<DocumentRepresentation> documents; public String getIdentifier() { return identifier; } public void setIdentifier(String identifier) { this.identifier = identifier; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getType() { return type; } public void setType(String type) { this.type = type; } public String getInstitution() { return institution; } public void setInstitution(String institution) { this.institution = institution; } public String getDate() { return date; } public void setDate(String date) { this.date = date; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public int getFeedback_days() { return feedback_days; } public void setFeedback_days(int feedback_days) { this.feedback_days = feedback_days; } public ContactRepresentation getContact() { return contact; } public void setContact(ContactRepresentation contact) { this.contact = contact; } public List<DocumentRepresentation> getDocuments() { return documents; } public void setDocuments(List<DocumentRepresentation> documents) { this.documents = documents; } public static final class PublicationRepresentationBuilder { private String identifier; private String title; private String type; private String institution; private String date; private String description; private int feedback_days; private ContactRepresentation contact; private List<DocumentRepresentation> documents; private PublicationRepresentationBuilder() { } public static PublicationRepresentationBuilder aPublicationRepresentation() { return new PublicationRepresentationBuilder(); } public PublicationRepresentationBuilder withIdentifier(String identifier) { this.identifier = identifier; return this; } public PublicationRepresentationBuilder withTitle(String title) { this.title = title; return this; } public PublicationRepresentationBuilder withType(String type) { this.type = type; return this; } public PublicationRepresentationBuilder withInstitution(String institution) { this.institution = institution; return this; } public PublicationRepresentationBuilder withDate(String date) { this.date = date; return this; } public PublicationRepresentationBuilder withDescription(String description) { this.description = description; return this; } public PublicationRepresentationBuilder withFeedback_days(int feedback_days) { this.feedback_days = feedback_days; return this; } public PublicationRepresentationBuilder withContact(ContactRepresentation contact) { this.contact = contact; return this; } public PublicationRepresentationBuilder withDocuments(List<DocumentRepresentation> documents) { this.documents = documents; return this; } public PublicationRepresentation build() { PublicationRepresentation publicationRepresentation = new PublicationRepresentation(); publicationRepresentation.setIdentifier(identifier); publicationRepresentation.setTitle(title); publicationRepresentation.setType(type); publicationRepresentation.setInstitution(institution); publicationRepresentation.setDate(date); publicationRepresentation.setDescription(description); publicationRepresentation.setFeedback_days(feedback_days); publicationRepresentation.setContact(contact); publicationRepresentation.setDocuments(documents); return publicationRepresentation; } } }
mpl-2.0
sunclx/anki
anki/latex.py
4659
# -*- coding: utf-8 -*- # Copyright: Damien Elmes <[email protected]> # License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html import re, os, shutil, cgi from anki.utils import checksum, call, namedtmp, tmpdir, isMac, stripHTML from anki.hooks import addHook from anki.lang import _ # if you modify these in an add-on, you must make sure to take tmp.tex as the # input, and output tmp.png as the output file latexCmds = [ ["latex", "-interaction=nonstopmode", "tmp.tex"], ["dvipng", "-D", "200", "-T", "tight", "tmp.dvi", "-o", "tmp.png"] # ["dvipng", "-D", "600", "-T", "tight", "-bg", "Transparent", "tmp.dvi", "-o", "tmp.png"] ] build = True # if off, use existing media but don't create new regexps = { "standard": re.compile(r"\[latex\](.+?)\[/latex\]", re.DOTALL | re.IGNORECASE), "expression": re.compile(r"\[\$\](.+?)\[/\$\]", re.DOTALL | re.IGNORECASE), "math": re.compile(r"\[\$\$\](.+?)\[/\$\$\]", re.DOTALL | re.IGNORECASE), } # add standard tex install location to osx if isMac: os.environ['PATH'] += ":/usr/texbin" def stripLatex(text): for match in regexps['standard'].finditer(text): text = text.replace(match.group(), "") for match in regexps['expression'].finditer(text): text = text.replace(match.group(), "") for match in regexps['math'].finditer(text): text = text.replace(match.group(), "") return text def mungeQA(html, type, fields, model, data, col): "Convert TEXT with embedded latex tags to image links." for match in regexps['standard'].finditer(html): html = html.replace(match.group(), _imgLink(col, match.group(1), model)) for match in regexps['expression'].finditer(html): html = html.replace(match.group(), _imgLink( col, "$" + match.group(1) + "$", model)) for match in regexps['math'].finditer(html): html = html.replace(match.group(), _imgLink( col, "\\begin{displaymath}" + match.group(1) + "\\end{displaymath}", model)) return html def _imgLink(col, latex, model): "Return an img link for LATEX, creating if necesssary." txt = _latexFromHtml(col, latex) fname = "latex-%s.png" % checksum(txt.encode("utf8")) link = '<img class=latex src="%s">' % fname if os.path.exists(fname): return link elif not build: return u"[latex]%s[/latex]" % latex else: err = _buildImg(col, txt, fname, model) if err: return err else: return link def _latexFromHtml(col, latex): "Convert entities and fix newlines." latex = re.sub("<br( /)?>|<div>", "\n", latex) latex = stripHTML(latex) return latex def _buildImg(col, latex, fname, model): # add header/footer & convert to utf8 latex = (model["latexPre"] + "\n" + latex + "\n" + model["latexPost"]) latex = latex.encode("utf8") # it's only really secure if run in a jail, but these are the most common tmplatex = latex.replace("\\includegraphics", "") for bad in ("\\write18", "\\readline", "\\input", "\\include", "\\catcode", "\\openout", "\\write", "\\loop", "\\def", "\\shipout"): # don't mind if the sequence is only part of a command bad_re = "\\" + bad + "[^a-zA-Z]" if re.search(bad_re, tmplatex): return _("""\ For security reasons, '%s' is not allowed on cards. You can still use \ it by placing the command in a different package, and importing that \ package in the LaTeX header instead.""") % bad # write into a temp file log = open(namedtmp("latex_log.txt"), "w") texpath = namedtmp("tmp.tex") texfile = file(texpath, "w") texfile.write(latex) texfile.close() mdir = col.media.dir() oldcwd = os.getcwd() png = namedtmp("tmp.png") try: # generate png os.chdir(tmpdir()) for latexCmd in latexCmds: if call(latexCmd, stdout=log, stderr=log): return _errMsg(latexCmd[0], texpath) # add to media shutil.copyfile(png, os.path.join(mdir, fname)) return finally: os.chdir(oldcwd) def _errMsg(type, texpath): msg = (_("Error executing %s.") % type) + "<br>" msg += (_("Generated file: %s") % texpath) + "<br>" try: log = open(namedtmp("latex_log.txt", rm=False)).read() if not log: raise Exception() msg += "<small><pre>" + cgi.escape(log) + "</pre></small>" except: msg += _("Have you installed latex and dvipng?") pass return msg # setup q/a filter addHook("mungeQA", mungeQA)
agpl-3.0
Asqatasun/Asqatasun
rules/rules-accessiweb2.2/src/test/resources/testcases/accessiweb22/Aw22Rule06042/AW22.Test.06.04.02-2Failed-02.html
900
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> <html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en"> <head> <meta http-equiv="Content-type" content="text/html; charset=utf-8" /> <title>AW22 Test.6.4.2 Failed 02</title> </head> <body> <div> <p class="test-detail"> <span>Does each identical link of type image have the same purpose and target?</span> </p> <a href="my-link.html" > <img src="image.jpg" alt="CLIQUEZ ICI "/> </a> <a href="link2.htm"> <img src="image.jpg" alt="cliquez ici"/> </a> <p class="test-explanation">Failed : The page contains two identical links without context and with different target</p> </div> </body> </html>
agpl-3.0
prusa3d/Slic3r
src/libigl/igl/average_onto_faces.cpp
1017
// This file is part of libigl, a simple c++ geometry processing library. // // Copyright (C) 2013 Alec Jacobson <[email protected]> // // This Source Code Form is subject to the terms of the Mozilla Public License // v. 2.0. If a copy of the MPL was not distributed with this file, You can // obtain one at http://mozilla.org/MPL/2.0/. #include "average_onto_faces.h" template <typename T, typename I> IGL_INLINE void igl::average_onto_faces(const Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic> &V, const Eigen::Matrix<I, Eigen::Dynamic, Eigen::Dynamic> &F, const Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic> &S, Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic> &SF) { SF = Eigen::Matrix<T, Eigen::Dynamic, Eigen::Dynamic>::Zero(F.rows(),S.cols()); for (int i = 0; i <F.rows(); ++i) for (int j = 0; j<F.cols(); ++j) SF.row(i) += S.row(F(i,j)); SF.array() /= F.cols(); }; #ifdef IGL_STATIC_LIBRARY // Explicit template instantiation #endif
agpl-3.0
ll22145683/cdw11_2
static/content/a/g1/40223153-20160429.md
1006
Title: 40223153 cdw11 報告 Category:ag1_40223153 Tags: 40223153, cdw11 Author: 40223153 啟動 cdw11 協同專案 <!-- PELICAN_END_SUMMARY --> pelican 網誌位置: <a href="http:ag1_4022315311-ag100.rhcloud.com/static/">http:ag1_4022315311-ag100.rhcloud.com/static/</a> 分組程式: <a href="httag1_40223153dw11-ag100.rhcloud.com/option">httag1_40223153dw11-ag100.rhcloud.com/option</a> fileuploadform: <a href="hag1_40223153/cdw11-ag100.rhcloud.com/fileuploadform">hag1_40223153/cdw11-ag100.rhcloud.com/fileuploadform</a> imageuploadform: <a href=ag1_40223153://cdw11-ag100.rhcloud.com/imageuploadform"ag1_40223153://cdw11-ag100.rhcloud.com/imageuploadform</a> 請各組在 CDW11 下課前完成下列3個圖形的零件組合繪圖: <img ag1_40223153http://cdw11-ag100.rhcloud.com/images/cdw11_4a.png" width="100%" /> <imag1_40223153="http://cdw11-ag100.rhcloud.com/images/cdw11_badc.png" width="100%" /> <ag1_40223153rc="http://cdw11-ag100.rhcloud.com/images/cdw11_abcd.png" width="100%" />
agpl-3.0
ppy/osu-web
resources/lang/it/livestreams.php
759
<?php // Copyright (c) ppy Pty Ltd <[email protected]>. Licensed under the GNU Affero General Public License v3.0. // See the LICENCE file in the repository root for full licence text. return [ 'promote' => [ 'pin' => 'Sei sicuro di voler promuovere questa trasmissione in diretta?', 'unpin' => "Sei sicuro di voler rimuovere la promozione di questa trasmissione in diretta?", ], 'top-headers' => [ 'headline' => 'Livestream', 'description' => 'I dati vengono reperiti da twitch.tv ogni cinque minuti. Sei libero di porter avviare una diretta e di comparire sulla lista! Per maggiori informazioni su come iniziare, controlla :link.', 'link' => 'la pagina della wiki riguardante le trasmissioni', ], ];
agpl-3.0
matematikk-mooc/canvas-lms
db/migrate/20160616151853_moderated_grading_foreign_key_indexes.rb
933
class ModeratedGradingForeignKeyIndexes < ActiveRecord::Migration[4.2] tag :postdeploy disable_ddl_transaction! def change add_index :submission_comments, :provisional_grade_id, where: "provisional_grade_id IS NOT NULL", algorithm: :concurrently add_index :moderated_grading_provisional_grades, :source_provisional_grade_id, name: 'index_provisional_grades_on_source_grade', where: "source_provisional_grade_id IS NOT NULL", algorithm: :concurrently add_index :moderated_grading_selections, :selected_provisional_grade_id, name: 'index_moderated_grading_selections_on_selected_grade', where: "selected_provisional_grade_id IS NOT NULL", algorithm: :concurrently # this index is useless; the index on [assignment_id, student_id] already covers it remove_index :moderated_grading_selections, column: :assignment_id add_index :moderated_grading_selections, :student_id, algorithm: :concurrently end end
agpl-3.0
curoverse/l7g
tools/cglf-tools/dlug/.save/cp2/dlug.go
9203
package dlug // 0,1,2,3,4,5,6,7,8 var ByteLen []int = []int{1,2,3,4,5,6,8,9,17} var PfxBitLen []int = []int{1,2,3,5,5,5,8,8,8} var BitLen []uint = []uint{7,14,21,27,35,43,56,64,128} var Pfx []byte = []byte{0,0x80,0xc0,0xe0,0xe8,0xf0,0xf8,0xf9,0xfa,0xff} func Check(d []byte) bool { if len(d)==0 { return false } idx := GetDlugIndex(d) if idx<0 { return false } if idx>= len(ByteLen) { return false } if len(d) != ByteLen[idx] { return false } return true } func CheckCode(d []byte) int { if len(d)==0 { return -1 } idx := GetDlugIndex(d) if idx<0 { return -2 } if idx>= len(ByteLen) { return -3 } if len(d) != ByteLen[idx] { return -4 } return 0 } func EqualByte(d []byte, b byte) bool { if len(d)==0 { return false } if len(d)==1 { if (d[0]&(0x80)) != 0 { return false } if (d[0]&0x7f) == b { return true } return false } k := GetDlugIndex(d) if k<0 { return false } if d[0]&byte(0xff << (8-byte(PfxBitLen[k]))) != Pfx[k] { return false } n:=len(d) if d[n-1]!=b {return false} for i:=1; i<(n-1); i++ { if d[i]!=0 { return false } } return true } func GetDlugIndex(d []byte) int { if len(d)==0 { return -1 } for i:=0; i<len(ByteLen); i++ { if (d[0] & byte(0xff << (8-byte(PfxBitLen[i])))) == Pfx[i] { return i } } return -2 } func GetByteLen(d []byte) int { if len(d)==0 { return -1 } for i:=0; i<len(ByteLen); i++ { if (d[0] & byte(0xff << (8-byte(PfxBitLen[i])))) == Pfx[i] { return ByteLen[i] } } return -2 } func GetDataBitLen(d []byte) int { if len(d)==0 { return -1 } for i:=0; i<len(ByteLen); i++ { if (d[0] & byte(0xff << (8-byte(PfxBitLen[i])))) == Pfx[i] { return int(BitLen[i]) } } return -2 } func GetPrefixBitLen(d []byte) int { if len(d)==0 { return -1 } for i:=0; i<len(ByteLen); i++ { if (d[0] & byte(0xff << (8-byte(PfxBitLen[i])))) == Pfx[i] { return PfxBitLen[i] } } return -2 } //----------------------- // Marshal Byte Functions //----------------------- func MarshalByte(b byte) []byte { if b<(1<<BitLen[0]) { return []byte{b} } return []byte{ 0x80, b } } func MarshalUint32(u uint32) []byte { if u<(1<<BitLen[0]) { return []byte{ byte(u&0xff) } } if u<(1<<BitLen[1]) { return []byte{ byte(Pfx[1] | byte(0xff & (u>>8))), byte(0xff & u) } } if u<(1<<BitLen[2]) { return []byte{ byte(Pfx[2] | byte(0xff & (u>>16))), byte(0xff & (u>>8)), byte(0xff & u) } } if u<(1<<BitLen[3]) { return []byte{ byte(Pfx[3] | byte(0xff & (u>>24))), byte(0xff & (u>>16)), byte(0xff & (u>>8)), byte(0xff & u) } } return []byte{ Pfx[4], byte(0xff & (u>>24)), byte(0xff & (u>>16)), byte(0xff & (u>>8)), byte(0xff & u) } } func MarshalUint64(u uint64) []byte { if u<(1<<BitLen[0]) { return []byte{ byte(u&0xff) } } if u<(1<<BitLen[1]) { return []byte{ byte(Pfx[1] | byte(0xff & (u>>8))), byte(0xff & u) } } if u<(1<<BitLen[2]) { return []byte{ byte(Pfx[2] | byte(0xff & (u>>16))), byte(0xff & (u>>8)), byte(0xff & u) } } if u<(1<<BitLen[3]) { return []byte{ byte(Pfx[3] | byte(0xff & (u>>24))), byte(0xff & (u>>16)), byte(0xff & (u>>8)), byte(0xff & u) } } if u<(1<<BitLen[4]) { return []byte{ byte(Pfx[4] | byte(0xff & (u>>32))), byte(0xff & (u>>24)), byte(0xff & (u>>16)), byte(0xff & (u>>8)), byte(0xff & u) } } if u<(1<<uint64(BitLen[5])) { return []byte{ byte(Pfx[5] | byte(0xff & (u>>40))), byte(0xff & (u>>32)), byte(0xff & (u>>24)), byte(0xff & (u>>16)), byte(0xff & (u>>8)), byte(0xff & u) } } if u<(1<<uint64(BitLen[6])) { return []byte{ Pfx[6], byte(0xff & (u>>48)), byte(0xff & (u>>40)), byte(0xff & (u>>32)), byte(0xff & (u>>24)), byte(0xff & (u>>16)), byte(0xff & (u>>8)), byte(0xff & u) } } return []byte{ Pfx[7], byte(0xff & (u>>56)), byte(0xff & (u>>48)), byte(0xff & (u>>40)), byte(0xff & (u>>32)), byte(0xff & (u>>24)), byte(0xff & (u>>16)), byte(0xff & (u>>8)), byte(0xff & u) } } //--------------------- // Fill Slice Functions //--------------------- func FillSliceByte(s []byte, b byte) int { if len(s) == 0 { return -1 } if b<(1<<BitLen[0]) { s[0] = b; return 1 } if len(s) < 2 { return -1 } s[0] = 0x80 s[1] = b return 2 } func FillSliceUint32(s []byte, u uint32) int { if len(s)==0 { return -1 } if u<(1<<BitLen[0]) { s[0] = byte(u&0xff) return 1 } if len(s)<int(ByteLen[1]) { return -1 } if u<(1<<BitLen[1]) { s[0] = byte(Pfx[1] | byte(0xff & (u>>8))) s[1] = byte(0xff & u) return ByteLen[1] } if len(s)<ByteLen[2] { return -1 } if u<(1<<BitLen[2]) { s[0] = byte(Pfx[2] | byte(0xff & (u>>16))) s[1] = byte(0xff & (u>>8)) s[2] = byte(0xff & u) return ByteLen[2] } if len(s)<ByteLen[3] { return -1 } if u<(1<<BitLen[3]) { s[0] = byte(Pfx[3] | byte(0xff & (u>>24))) s[1] = byte(0xff & (u>>16)) s[2] = byte(0xff & (u>>8)) s[3] = byte(0xff & u) return ByteLen[3] } if len(s)<ByteLen[4] { return -1 } s[0] = Pfx[4] s[1] = byte(0xff & (u>>24)) s[2] = byte(0xff & (u>>16)) s[3] = byte(0xff & (u>>8)) s[4] = byte(0xff & u) return ByteLen[4] } func FillSliceUint64(s []byte, u uint64) int { if len(s)==0 { return -1 } if u<(1<<BitLen[0]) { s[0] = byte(u&0xff) return 1 } if len(s)<ByteLen[1] { return -1 } if u<(1<<BitLen[1]) { s[0] = byte(Pfx[1] | byte(0xff & (u>>8))) s[1] = byte(0xff & u) return ByteLen[1] } if len(s)<ByteLen[2] { return -1 } if u<(1<<BitLen[2]) { s[0] = byte(Pfx[2] | byte(0xff & (u>>16))) s[1] = byte(0xff & (u>>8)) s[2] = byte(0xff & u) return ByteLen[2] } if len(s)<ByteLen[3] { return -1 } if u<(1<<BitLen[3]) { s[0] = byte(Pfx[3] | byte(0xff & (u>>24))) s[1] = byte(0xff & (u>>16)) s[2] = byte(0xff & (u>>8)) s[3] = byte(0xff & u) return ByteLen[3] } if len(s)<ByteLen[4] { return -1 } if u<(1<<BitLen[4]) { s[0] = Pfx[4] | byte(0xff & (u>>32)) s[1] = byte(0xff & (u>>24)) s[2] = byte(0xff & (u>>16)) s[3] = byte(0xff & (u>>8)) s[4] = byte(0xff & u) return ByteLen[4] } if len(s)<ByteLen[5] { return -1 } if u<(1<<uint64(BitLen[5])) { s[0] = Pfx[5] | byte(0xff & (u>>40)) s[1] = byte(0xff & (u>>32)) s[2] = byte(0xff & (u>>24)) s[3] = byte(0xff & (u>>16)) s[4] = byte(0xff & (u>>8)) s[5] = byte(0xff & u) return ByteLen[5] } if len(s)<ByteLen[6] { return -1 } if u<(1<<uint64(BitLen[6])) { s[0] = Pfx[6] s[1] = byte(0xff & (u>>48)) s[2] = byte(0xff & (u>>40)) s[3] = byte(0xff & (u>>32)) s[4] = byte(0xff & (u>>24)) s[5] = byte(0xff & (u>>16)) s[6] = byte(0xff & (u>>8)) s[7] = byte(0xff & u) return ByteLen[6] } if len(s)<ByteLen[7] { return -1 } s[0] = Pfx[7] s[1] = byte(0xff & (u>>56)) s[2] = byte(0xff & (u>>48)) s[3] = byte(0xff & (u>>40)) s[4] = byte(0xff & (u>>32)) s[5] = byte(0xff & (u>>24)) s[6] = byte(0xff & (u>>16)) s[7] = byte(0xff & (u>>8)) s[8] = byte(0xff & u) return ByteLen[7] } //------------------ // Convert Functions //------------------ func ConvertByte(b []byte) (byte, int) { idx := GetDlugIndex(b) if idx<0 { return 0,idx } if idx==0 { return b[0]&0x7f,1 } if len(b) < ByteLen[idx] { return 0,-1 } return b[ByteLen[idx]-1], ByteLen[idx] } func ConvertUint32(b []byte) (uint32, int) { idx := GetDlugIndex(b) if idx<0 { return 0,idx } if idx==0 { return uint32(b[0]&0x7f),1 } if len(b) < ByteLen[idx] { return 0,-1 } if idx==1 { return (uint32(b[0]&(^Pfx[1])) << 8) + uint32(b[1]), 2 } if idx==2 { return (uint32(b[0]&(^Pfx[2])) << 16) + (uint32(b[1])<<8) + uint32(b[2]), 3 } if idx==3 { return (uint32(b[0]&(^Pfx[3])) << 24) + (uint32(b[1])<<16) + (uint32(b[2])<<8) + uint32(b[3]), 4 } n := ByteLen[idx] return (uint32(b[n-4])<<24) + (uint32(b[n-3])<<16) + (uint32(b[n-2])<<8) + uint32(b[n-1]), n } func ConvertUint64(b []byte) (uint64, int) { idx := GetDlugIndex(b) if idx<0 { return 0,idx } if idx==0 { return uint64(b[0]&0x7f),1 } if len(b) < ByteLen[idx] { return 0,-1 } if idx==1 { return (uint64(b[0]&(^Pfx[1]))<<8) + uint64(b[1]), 2 } if idx==2 { return (uint64(b[0]&(^Pfx[2])) << 16) + (uint64(b[1])<<8) + uint64(b[2]), 3 } if idx==3 { return (uint64(b[0]&(^Pfx[3])) << 24) + (uint64(b[1])<<16) + (uint64(b[2])<<8) + uint64(b[3]), 4 } if idx==4 { return (uint64(b[0]&(^Pfx[4])) << 32) + (uint64(b[1])<<24) + (uint64(b[2])<<16) + (uint64(b[3])<<8) + uint64(b[4]), 5 } if idx==5 { return (uint64(b[0]&(^Pfx[5])) << 40) + (uint64(b[1])<<32) + (uint64(b[2])<<24) + (uint64(b[3])<<16) + (uint64(b[4])<<8) + uint64(b[5]), 6 } if idx==6 { return (uint64(b[1])<<48) + (uint64(b[2])<<40) + (uint64(b[3])<<32) + (uint64(b[4])<<24) + (uint64(b[5])<<16) + (uint64(b[6])<<8) + uint64(b[7]), 8 } if idx==6 { return (uint64(b[1])<<48) + (uint64(b[2])<<40) + (uint64(b[3])<<32) + (uint64(b[4])<<24) + (uint64(b[5])<<16) + (uint64(b[6])<<8) + uint64(b[7]), 8 } n := ByteLen[idx] return (uint64(b[n-8])<<56) + (uint64(b[n-7])<<48) + (uint64(b[n-6])<<40) + (uint64(b[n-5])<<32) + (uint64(b[n-4])<<24) + (uint64(b[n-3])<<16) + (uint64(b[n-2])<<8) + uint64(b[n-1]), n }
agpl-3.0
ghostx2013/FabricEngine_Backup
Native/Core/MR/ConstArray.cpp
3557
/* * Copyright 2010-2012 Fabric Engine Inc. All rights reserved. */ #include <Fabric/Core/MR/ConstArray.h> #include <Fabric/Core/RT/FixedArrayDesc.h> #include <Fabric/Core/RT/Manager.h> #include <Fabric/Core/RT/VariableArrayDesc.h> #include <Fabric/Base/JSON/Decoder.h> #include <Fabric/Base/JSON/Encoder.h> #include <Fabric/Base/Exception.h> namespace Fabric { namespace MR { RC::Handle<ConstArray> ConstArray::Create( RC::ConstHandle<RT::Manager> const &rtManager, RC::ConstHandle<RT::Desc> const &elementDesc, JSON::Entity const &entity ) { return new ConstArray( rtManager, elementDesc, entity ); } RC::Handle<ConstArray> ConstArray::Create( RC::ConstHandle<RT::Manager> const &rtManager, RC::ConstHandle<RT::ArrayDesc> const &arrayDesc, void const *data ) { return new ConstArray( rtManager, arrayDesc, data ); } ConstArray::ConstArray( RC::ConstHandle<RT::Manager> const &rtManager, RC::ConstHandle<RT::Desc> const &elementDesc, JSON::Entity const &entity ) { entity.requireArray(); m_fixedArrayDesc = rtManager->getFixedArrayOf( elementDesc, entity.value.array.size ); m_data.resize( m_fixedArrayDesc->getAllocSize(), 0 ); m_fixedArrayDesc->decodeJSON( entity, &m_data[0] ); } ConstArray::ConstArray( RC::ConstHandle<RT::Manager> const &rtManager, RC::ConstHandle<RT::ArrayDesc> const &arrayDesc, void const *data ) { RC::ConstHandle<RT::Desc> elementDesc = arrayDesc->getMemberDesc(); size_t count = arrayDesc->getNumMembers( data ); m_fixedArrayDesc = rtManager->getFixedArrayOf( elementDesc, count ); m_data.resize( m_fixedArrayDesc->getAllocSize(), 0 ); for ( size_t i=0; i<count; ++i ) elementDesc->setData( arrayDesc->getImmutableMemberData( data, i ), m_fixedArrayDesc->getMutableMemberData( &m_data[0], i ) ); } ConstArray::~ConstArray() { m_fixedArrayDesc->disposeData( &m_data[0] ); } RC::ConstHandle<RT::Desc> ConstArray::getElementDesc() const { return m_fixedArrayDesc->getMemberDesc(); } const RC::Handle<ArrayProducer::ComputeState> ConstArray::createComputeState() const { return ComputeState::Create( this ); } RC::Handle<ConstArray::ComputeState> ConstArray::ComputeState::Create( RC::ConstHandle<ConstArray> const &constArray ) { return new ComputeState( constArray ); } ConstArray::ComputeState::ComputeState( RC::ConstHandle<ConstArray> const &constArray ) : ArrayProducer::ComputeState( constArray ) , m_constArray( constArray ) { setCount( m_constArray->m_fixedArrayDesc->getNumMembers() ); } void ConstArray::ComputeState::produce( size_t index, void *data ) const { return m_constArray->getElementDesc()->setData( m_constArray->m_fixedArrayDesc->getImmutableMemberData( &m_constArray->m_data[0], index ), data ); } void ConstArray::ComputeState::produceJSON( size_t index, JSON::Encoder &jg ) const { return m_constArray->getElementDesc()->encodeJSON( m_constArray->m_fixedArrayDesc->getImmutableMemberData( &m_constArray->m_data[0], index ), jg ); } RC::ConstHandle<RT::ArrayDesc> ConstArray::getArrayDesc() const { return m_fixedArrayDesc; } void const *ConstArray::getImmutableData() const { return &m_data[0]; } void ConstArray::flush() { } } }
agpl-3.0
roskens/opennms-pre-github
opennms-config-model/src/main/java/org/opennms/netmgt/config/service/Value.java
5047
/******************************************************************************* * This file is part of OpenNMS(R). * * Copyright (C) 2011-2014 The OpenNMS Group, Inc. * OpenNMS(R) is Copyright (C) 1999-2014 The OpenNMS Group, Inc. * * OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc. * * OpenNMS(R) is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published * by the Free Software Foundation, either version 3 of the License, * or (at your option) any later version. * * OpenNMS(R) is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with OpenNMS(R). If not, see: * http://www.gnu.org/licenses/ * * For more information contact: * OpenNMS(R) Licensing <[email protected]> * http://www.opennms.org/ * http://www.opennms.com/ *******************************************************************************/ /** * This class was original generated with Castor, but is no longer. */ package org.opennms.netmgt.config.service; //---------------------------------/ //- Imported classes and packages -/ //---------------------------------/ import java.io.Serializable; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlValue; /** * Class Value. * * @version $Revision$ $Date$ */ @XmlRootElement(name = "value") @XmlAccessorType(XmlAccessType.FIELD) public class Value implements Serializable { private static final long serialVersionUID = 8678345448589083586L; // --------------------------/ // - Class/Member Variables -/ // --------------------------/ /** * internal content storage */ @XmlValue private String _content = ""; /** * Field _type. */ @XmlAttribute(name = "type") private String _type; // ----------------/ // - Constructors -/ // ----------------/ public Value() { super(); setContent(""); } public Value(final String type, final String content) { super(); setType(type); setContent(content); } // -----------/ // - Methods -/ // -----------/ /** * Overrides the java.lang.Object.equals method. * * @param obj * @return true if the objects are equal. */ @Override() public boolean equals(final Object obj) { if (this == obj) return true; if (obj instanceof Value) { Value temp = (Value) obj; if (this._content != null) { if (temp._content == null) return false; else if (!(this._content.equals(temp._content))) return false; } else if (temp._content != null) return false; if (this._type != null) { if (temp._type == null) return false; else if (!(this._type.equals(temp._type))) return false; } else if (temp._type != null) return false; return true; } return false; } /** * Returns the value of field 'content'. The field 'content' has the * following description: internal content storage * * @return the value of field 'Content'. */ public String getContent() { return this._content; } /** * Returns the value of field 'type'. * * @return the value of field 'Type'. */ public String getType() { return this._type; } /** * Overrides the java.lang.Object.hashCode method. * <p> * The following steps came from <b>Effective Java Programming Language * Guide</b> by Joshua Bloch, Chapter 3 * * @return a hash code value for the object. */ public int hashCode() { int result = 17; if (_content != null) { result = 37 * result + _content.hashCode(); } if (_type != null) { result = 37 * result + _type.hashCode(); } return result; } /** * Sets the value of field 'content'. The field 'content' has the * following description: internal content storage * * @param content * the value of field 'content'. */ public void setContent(final String content) { this._content = content; } /** * Sets the value of field 'type'. * * @param type * the value of field 'type'. */ public void setType(final String type) { this._type = type; } }
agpl-3.0
wlwwt/shopware
vendor/google/proto-client/src/Google/Cloud/Dlp/V2beta1/BucketingConfig_Bucket.php
3854
<?php # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/privacy/dlp/v2beta1/dlp.proto namespace Google\Cloud\Dlp\V2beta1; use Google\Protobuf\Internal\GPBType; use Google\Protobuf\Internal\RepeatedField; use Google\Protobuf\Internal\GPBUtil; /** * Buckets represented as ranges, along with replacement values. Ranges must * be non-overlapping. * * Generated from protobuf message <code>google.privacy.dlp.v2beta1.BucketingConfig.Bucket</code> */ class BucketingConfig_Bucket extends \Google\Protobuf\Internal\Message { /** * Lower bound of the range, inclusive. Type should be the same as max if * used. * * Generated from protobuf field <code>.google.privacy.dlp.v2beta1.Value min = 1;</code> */ private $min = null; /** * Upper bound of the range, exclusive; type must match min. * * Generated from protobuf field <code>.google.privacy.dlp.v2beta1.Value max = 2;</code> */ private $max = null; /** * Replacement value for this bucket. If not provided * the default behavior will be to hyphenate the min-max range. * * Generated from protobuf field <code>.google.privacy.dlp.v2beta1.Value replacement_value = 3;</code> */ private $replacement_value = null; public function __construct() { \GPBMetadata\Google\Privacy\Dlp\V2Beta1\Dlp::initOnce(); parent::__construct(); } /** * Lower bound of the range, inclusive. Type should be the same as max if * used. * * Generated from protobuf field <code>.google.privacy.dlp.v2beta1.Value min = 1;</code> * @return \Google\Cloud\Dlp\V2beta1\Value */ public function getMin() { return $this->min; } /** * Lower bound of the range, inclusive. Type should be the same as max if * used. * * Generated from protobuf field <code>.google.privacy.dlp.v2beta1.Value min = 1;</code> * @param \Google\Cloud\Dlp\V2beta1\Value $var * @return $this */ public function setMin($var) { GPBUtil::checkMessage($var, \Google\Cloud\Dlp\V2beta1\Value::class); $this->min = $var; return $this; } /** * Upper bound of the range, exclusive; type must match min. * * Generated from protobuf field <code>.google.privacy.dlp.v2beta1.Value max = 2;</code> * @return \Google\Cloud\Dlp\V2beta1\Value */ public function getMax() { return $this->max; } /** * Upper bound of the range, exclusive; type must match min. * * Generated from protobuf field <code>.google.privacy.dlp.v2beta1.Value max = 2;</code> * @param \Google\Cloud\Dlp\V2beta1\Value $var * @return $this */ public function setMax($var) { GPBUtil::checkMessage($var, \Google\Cloud\Dlp\V2beta1\Value::class); $this->max = $var; return $this; } /** * Replacement value for this bucket. If not provided * the default behavior will be to hyphenate the min-max range. * * Generated from protobuf field <code>.google.privacy.dlp.v2beta1.Value replacement_value = 3;</code> * @return \Google\Cloud\Dlp\V2beta1\Value */ public function getReplacementValue() { return $this->replacement_value; } /** * Replacement value for this bucket. If not provided * the default behavior will be to hyphenate the min-max range. * * Generated from protobuf field <code>.google.privacy.dlp.v2beta1.Value replacement_value = 3;</code> * @param \Google\Cloud\Dlp\V2beta1\Value $var * @return $this */ public function setReplacementValue($var) { GPBUtil::checkMessage($var, \Google\Cloud\Dlp\V2beta1\Value::class); $this->replacement_value = $var; return $this; } }
agpl-3.0
alfredoavanzosc/odoo-addons
stock_information/models/sale_order_line.py
1803
# -*- coding: utf-8 -*- # (c) 2016 Alfredo de la Fuente - AvanzOSC # License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html from openerp import models, fields, api from dateutil.relativedelta import relativedelta class saleOrderLine(models.Model): _inherit = 'sale.order.line' @api.multi @api.depends('order_id', 'order_id.date_order', 'delay') def _compute_date_planned(self): for line in self: new_date = fields.Date.context_today(self) if line.order_id and line.order_id.date_order: new_date = fields.Datetime.from_string( line.order_id.date_order).date() if line.delay: new_date = (new_date + (relativedelta(days=line.delay))) line.date_planned = new_date date_planned = fields.Date( 'Date planned', compute='_compute_date_planned', store=True, default=_compute_date_planned) def _find_sale_lines_from_stock_information( self, company, to_date, product, location, from_date=None): cond = [('company_id', '=', company.id), ('product_id', '=', product.id), ('date_planned', '<=', to_date), ('state', '=', 'draft')] if from_date: cond.append(('date_planned', '>=', from_date)) sale_lines = self.search(cond) sale_lines = sale_lines.filtered( lambda x: x.order_id.state not in ('cancel', 'except_picking', 'except_invoice', 'done', 'approved')) sale_lines = sale_lines.filtered( lambda x: x.order_id.warehouse_id.lot_stock_id.id == location.id) return sale_lines
agpl-3.0
jondo/paperpile
plack/perl5/linux64/base/unicore/lib/bc/ON.pl
1689
# !!!!!!! DO NOT EDIT THIS FILE !!!!!!! # This file is built by mktables from e.g. UnicodeData.txt. # Any changes made here will be lost! # # BidiClass category 'Other_Neutral' # return <<'END'; 0021 0022 0026 002A 003B 0040 005B 0060 007B 007E 00A1 00A6 00A9 00AB 00AC 00AE 00AF 00B4 00B6 00B8 00BB 00BF 00D7 00F7 02B9 02BA 02C2 02CF 02D2 02DF 02E5 02ED 02EF 02FF 0374 0375 037E 0384 0385 0387 03F6 058A 060E 060F 06E9 07F6 07F9 0BF3 0BF8 0BFA 0CF1 0CF2 0F3A 0F3D 1390 1399 169B 169C 17F0 17F9 1800 180A 1940 1944 1945 19DE 19FF 1FBD 1FBF 1FC1 1FCD 1FCF 1FDD 1FDF 1FED 1FEF 1FFD 1FFE 2010 2027 2035 2043 2045 205E 207C 207E 208C 208E 2100 2101 2103 2106 2108 2109 2114 2116 2118 211E 2123 2125 2127 2129 213A 213B 2140 2144 214A 214D 2153 215F 2190 2211 2214 2335 237B 2394 2396 23E7 2400 2426 2440 244A 2460 2487 24EA 269C 26A0 26AB 26AD 26B2 2701 2704 2706 2709 270C 2727 2729 274B 274D 274F 2752 2756 2758 275E 2761 2794 2798 27AF 27B1 27BE 27C0 27CA 27D0 27EB 27F0 27FF 2900 2B1A 2B20 2B23 2CE5 2CEA 2CF9 2CFF 2E00 2E17 2E1C 2E1D 2E80 2E99 2E9B 2EF3 2F00 2FD5 2FF0 2FFB 3001 3004 3008 3020 3030 3036 3037 303D 303F 309B 309C 30A0 30FB 31C0 31CF 321D 321E 3250 325F 327C 327E 32B1 32BF 32CC 32CF 3377 337A 33DE 33DF 33FF 4DC0 4DFF A490 A4C6 A700 A71A A720 A721 A828 A82B A874 A877 FD3E FD3F FDFD FE10 FE19 FE30 FE4F FE51 FE54 FE56 FE5E FE60 FE61 FE64 FE66 FE68 FE6B FF01 FF02 FF06 FF0A FF1B FF20 FF3B FF40 FF5B FF65 FFE2 FFE4 FFE8 FFEE FFF9 FFFD 10101 10140 1018A 1091F 1D200 1D241 1D245 1D300 1D356 END
agpl-3.0
yipenggao/moose
docs/content/documentation/modules/phase_field/MultiPhase/KKSMultiComponentExample.md
1221
# Kim-Kim-Suzuki Example for three or more components !listing modules/phase_field/examples/kim-kim-suzuki/kks_example_ternary.i When additional chemical components are added to the KKS model, a Cahn-Hilliard equation must be added for each additional component. (For $n$ components, $n-1$ Cahn-Hilliard equations are required). Each additional Cahn-Hilliard equation requires the kernels: * [`KKSSplitCHCRes`](KKSSplitCHCRes.md) * [`CoupledTimeDerivative`](framework/CoupledTimeDerivative.md) * [`SplitCHWRes`](phase_field/SplitCHWRes.md) To enforce the composition and chemical potential constraints, each additional component also requires the kernels * [`KKSPhaseConcentration`](KKSPhaseConcentration.md) * [`KKSPhaseChemicalPotential`](KKSPhaseChemicalPotential.md) The Allen-Cahn equation is also modified when additional components are added. The residual becomes $$ R=-\frac{dh}{d\eta} \left(F_a-F_b- \sum_{i=1}^{n-1} \frac{dF_a}{dc_{ia}}(c_{ia}-c_{ib})\right) + w\frac{dg}{d\eta}. $$ where $n$ is the number of components. A single [`KKSACBulkF`](KKSACBulkF.md) kernel is needed as in the 2-component case, and an additional [`KKSACBulkC`](KKSACBulkC.md) kernel must added for each additional component.
lgpl-2.1
Adam5Wu/Arduino
cores/esp8266/gdb_hooks.c
1357
/* gdb_hooks.c - Default (no-op) hooks for GDB Stub library Copyright (c) 2018 Ivan Grokhotkov. All right reserved. This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ #include "ets_sys.h" #include "gdb_hooks.h" /* gdb_init and gdb_do_break do not return anything, but since the return value is in register, it doesn't hurt to return a bool, so that the same stub can be used for gdb_present. */ bool ICACHE_RAM_ATTR __gdb_no_op() { return false; } extern void gdb_init(void) __attribute__ ((weak, alias("__gdb_no_op"))); extern void gdb_do_break(void) __attribute__ ((weak, alias("__gdb_no_op"))); extern bool gdb_present(void) __attribute__ ((weak, alias("__gdb_no_op")));
lgpl-2.1
jbarriosc/ACSUFRO
LGPL/Tools/loki/ws/include/lokiMultiMethods.h
15676
//////////////////////////////////////////////////////////////////////////////// // The Loki Library // Copyright (c) 2001 by Andrei Alexandrescu // This code accompanies the book: // Alexandrescu, Andrei. "Modern C++ Design: Generic Programming and Design // Patterns Applied". Copyright (c) 2001. Addison-Wesley. // Permission to use, copy, modify, distribute and sell this software for any // purpose is hereby granted without fee, provided that the above copyright // notice appear in all copies and that both that copyright notice and this // permission notice appear in supporting documentation. // The author or Addison-Wesley Longman make no representations about the // suitability of this software for any purpose. It is provided "as is" // without express or implied warranty. //////////////////////////////////////////////////////////////////////////////// #ifndef LOKI_MULTIMETHODS_INC_ #define LOKI_MULTIMETHODS_INC_ // $Header: /diskb/tmp/stefano/project2/CVS/ACS/LGPL/Tools/loki/ws/include/lokiMultiMethods.h,v 1.2 2007/02/01 17:29:00 sharring Exp $ #include "lokiTypelist.h" #include "lokiTypeInfo.h" #include "lokiFunctor.h" #include "lokiAssocVector.h" //////////////////////////////////////////////////////////////////////////////// // IMPORTANT NOTE: // The double dispatchers implemented below differ from the excerpts shown in // the book - they are simpler while respecting the same interface. //////////////////////////////////////////////////////////////////////////////// namespace Loki { //////////////////////////////////////////////////////////////////////////////// // class template InvocationTraits (helper) // Helps implementing optional symmetry //////////////////////////////////////////////////////////////////////////////// namespace Private { template <class SomeLhs, class SomeRhs, class Executor, typename ResultType> struct InvocationTraits { static ResultType DoDispatch(SomeLhs& lhs, SomeRhs& rhs, Executor& exec, Int2Type<false>) { return exec.Fire(lhs, rhs); } static ResultType DoDispatch(SomeLhs& lhs, SomeRhs& rhs, Executor& exec, Int2Type<true>) { return exec.Fire(rhs, lhs); } }; } //////////////////////////////////////////////////////////////////////////////// // class template StaticDispatcher // Implements an automatic static double dispatcher based on two typelists //////////////////////////////////////////////////////////////////////////////// template < class Executor, class BaseLhs, class TypesLhs, bool symmetric = true, class BaseRhs = BaseLhs, class TypesRhs = TypesLhs, typename ResultType = void > class StaticDispatcher { template <class SomeLhs> static ResultType DispatchRhs(SomeLhs& lhs, BaseRhs& rhs, Executor exec, NullType) { return exec.OnError(lhs, rhs); } template <class Head, class Tail, class SomeLhs> static ResultType DispatchRhs(SomeLhs& lhs, BaseRhs& rhs, Executor exec, Typelist<Head, Tail>) { if (Head* p2 = dynamic_cast<Head*>(&rhs)) { Int2Type<(symmetric && int(TL::IndexOf<TypesRhs, Head>::value) < int(TL::IndexOf<TypesLhs, SomeLhs>::value))> i2t; typedef Private::InvocationTraits< SomeLhs, Head, Executor, ResultType> CallTraits; return CallTraits::DoDispatch(lhs, *p2, exec, i2t); } return DispatchRhs(lhs, rhs, exec, Tail()); } static ResultType DispatchLhs(BaseLhs& lhs, BaseRhs& rhs, Executor exec, NullType) { return exec.OnError(lhs, rhs); } template <class Head, class Tail> static ResultType DispatchLhs(BaseLhs& lhs, BaseRhs& rhs, Executor exec, Typelist<Head, Tail>) { if (Head* p1 = dynamic_cast<Head*>(&lhs)) { return DispatchRhs(*p1, rhs, exec, TypesRhs()); } return DispatchLhs(lhs, rhs, exec, Tail()); } public: static ResultType Go(BaseLhs& lhs, BaseRhs& rhs, Executor exec) { return DispatchLhs(lhs, rhs, exec, TypesLhs()); } }; //////////////////////////////////////////////////////////////////////////////// // class template BasicDispatcher // Implements a logarithmic double dispatcher for functors (or functions) // Doesn't offer automated casts or symmetry //////////////////////////////////////////////////////////////////////////////// template < class BaseLhs, class BaseRhs = BaseLhs, typename ResultType = void, typename CallbackType = ResultType (*)(BaseLhs&, BaseRhs&) > class BasicDispatcher { typedef std::pair<TypeInfo,TypeInfo> KeyType; typedef CallbackType MappedType; typedef AssocVector<KeyType, MappedType> MapType; MapType callbackMap_; void DoAdd(TypeInfo lhs, TypeInfo rhs, CallbackType fun); bool DoRemove(TypeInfo lhs, TypeInfo rhs); public: template <class SomeLhs, class SomeRhs> void Add(CallbackType fun) { DoAdd(typeid(SomeLhs), typeid(SomeRhs), fun); } template <class SomeLhs, class SomeRhs> bool Remove() { return DoRemove(typeid(SomeLhs), typeid(SomeRhs)); } ResultType Go(BaseLhs& lhs, BaseRhs& rhs); }; // Non-inline to reduce compile time overhead... template <class BaseLhs, class BaseRhs, typename ResultType, typename CallbackType> void BasicDispatcher<BaseLhs,BaseRhs,ResultType,CallbackType> ::DoAdd(TypeInfo lhs, TypeInfo rhs, CallbackType fun) { callbackMap_[KeyType(lhs, rhs)] = fun; } template <class BaseLhs, class BaseRhs, typename ResultType, typename CallbackType> bool BasicDispatcher<BaseLhs,BaseRhs,ResultType,CallbackType> ::DoRemove(TypeInfo lhs, TypeInfo rhs) { return callbackMap_.erase(KeyType(lhs, rhs)) == 1; } template <class BaseLhs, class BaseRhs, typename ResultType, typename CallbackType> ResultType BasicDispatcher<BaseLhs,BaseRhs,ResultType,CallbackType> ::Go(BaseLhs& lhs, BaseRhs& rhs) { typename MapType::key_type k(typeid(lhs),typeid(rhs)); typename MapType::iterator i = callbackMap_.find(k); if (i == callbackMap_.end()) { throw std::runtime_error("Function not found"); } return (i->second)(lhs, rhs); } //////////////////////////////////////////////////////////////////////////////// // class template StaticCaster // Implementation of the CastingPolicy used by FunctorDispatcher //////////////////////////////////////////////////////////////////////////////// template <class To, class From> struct StaticCaster { static To& Cast(From& obj) { return static_cast<To&>(obj); } }; //////////////////////////////////////////////////////////////////////////////// // class template DynamicCaster // Implementation of the CastingPolicy used by FunctorDispatcher //////////////////////////////////////////////////////////////////////////////// template <class To, class From> struct DynamicCaster { static To& Cast(From& obj) { return dynamic_cast<To&>(obj); } }; //////////////////////////////////////////////////////////////////////////////// // class template Private::FnDispatcherHelper // Implements trampolines and argument swapping used by FnDispatcher //////////////////////////////////////////////////////////////////////////////// namespace Private { template <class BaseLhs, class BaseRhs, class SomeLhs, class SomeRhs, typename ResultType, class CastLhs, class CastRhs, ResultType (*Callback)(SomeLhs&, SomeRhs&)> struct FnDispatcherHelper { static ResultType Trampoline(BaseLhs& lhs, BaseRhs& rhs) { return Callback(CastLhs::Cast(lhs), CastRhs::Cast(rhs)); } static ResultType TrampolineR(BaseRhs& rhs, BaseLhs& lhs) { return Trampoline(lhs, rhs); } }; } //////////////////////////////////////////////////////////////////////////////// // class template FnDispatcher // Implements an automatic logarithmic double dispatcher for functions // Features automated conversions //////////////////////////////////////////////////////////////////////////////// template <class BaseLhs, class BaseRhs = BaseLhs, typename ResultType = void, template <class, class> class CastingPolicy = DynamicCaster, template <class, class, class, class> class DispatcherBackend = BasicDispatcher> class FnDispatcher { DispatcherBackend<BaseLhs, BaseRhs, ResultType, ResultType (*)(BaseLhs&, BaseRhs&)> backEnd_; public: template <class SomeLhs, class SomeRhs> void Add(ResultType (*pFun)(BaseLhs&, BaseRhs&)) { return backEnd_.template Add<SomeLhs, SomeRhs>(pFun); } template <class SomeLhs, class SomeRhs, ResultType (*callback)(SomeLhs&, SomeRhs&)> void Add() { typedef Private::FnDispatcherHelper< BaseLhs, BaseRhs, SomeLhs, SomeRhs, ResultType, CastingPolicy<SomeLhs,BaseLhs>, CastingPolicy<SomeRhs,BaseRhs>, callback> Local; Add<SomeLhs, SomeRhs>(&Local::Trampoline); } template <class SomeLhs, class SomeRhs, ResultType (*callback)(SomeLhs&, SomeRhs&), bool symmetric> void Add(bool = true) // [gcc] dummy bool { typedef Private::FnDispatcherHelper< BaseLhs, BaseRhs, SomeLhs, SomeRhs, ResultType, CastingPolicy<SomeLhs,BaseLhs>, CastingPolicy<SomeRhs,BaseRhs>, callback> Local; Add<SomeLhs, SomeRhs>(&Local::Trampoline); if (symmetric) { Add<SomeRhs, SomeLhs>(&Local::TrampolineR); } } template <class SomeLhs, class SomeRhs> void Remove() { backEnd_.template Remove<SomeLhs, SomeRhs>(); } ResultType Go(BaseLhs& lhs, BaseRhs& rhs) { return backEnd_.Go(lhs, rhs); } }; //////////////////////////////////////////////////////////////////////////////// // class template FunctorDispatcherAdaptor // permits use of FunctorDispatcher under gcc.2.95.2/3 /////////////////////////////////////////////////////////////////////////////// namespace Private { template <class BaseLhs, class BaseRhs, class SomeLhs, class SomeRhs, typename ResultType, class CastLhs, class CastRhs, class Fun, bool SwapArgs> class FunctorDispatcherHelper { Fun fun_; ResultType Fire(BaseLhs& lhs, BaseRhs& rhs,Int2Type<false>) { return fun_(CastLhs::Cast(lhs), CastRhs::Cast(rhs)); } ResultType Fire(BaseLhs& rhs, BaseRhs& lhs,Int2Type<true>) { return fun_(CastLhs::Cast(lhs), CastRhs::Cast(rhs)); } public: FunctorDispatcherHelper(const Fun& fun) : fun_(fun) {} ResultType operator()(BaseLhs& lhs, BaseRhs& rhs) { return Fire(lhs,rhs,Int2Type<SwapArgs>()); } }; } //////////////////////////////////////////////////////////////////////////////// // class template FunctorDispatcher // Implements a logarithmic double dispatcher for functors // Features automated casting //////////////////////////////////////////////////////////////////////////////// template <class BaseLhs, class BaseRhs = BaseLhs, typename ResultType = void, template <class, class> class CastingPolicy = DynamicCaster, template <class, class, class, class> class DispatcherBackend = BasicDispatcher> class FunctorDispatcher { typedef LOKI_TYPELIST_2(BaseLhs&, BaseRhs&) ArgsList; typedef Functor<ResultType, ArgsList, LOKI_DEFAULT_THREADING> FunctorType; DispatcherBackend<BaseLhs, BaseRhs, ResultType, FunctorType> backEnd_; public: template <class SomeLhs, class SomeRhs, class Fun> void Add(const Fun& fun) { typedef Private::FunctorDispatcherHelper< BaseLhs, BaseRhs, SomeLhs, SomeRhs, ResultType, CastingPolicy<SomeLhs, BaseLhs>, CastingPolicy<SomeRhs, BaseRhs>, Fun, false> Adapter; backEnd_.template Add<SomeLhs, SomeRhs>(FunctorType(Adapter(fun))); } template <class SomeLhs, class SomeRhs, bool symmetric, class Fun> void Add(const Fun& fun) { Add<SomeLhs,SomeRhs>(fun); if (symmetric) { // Note: symmetry only makes sense where BaseLhs==BaseRhs typedef Private::FunctorDispatcherHelper< BaseLhs, BaseLhs, SomeLhs, SomeRhs, ResultType, CastingPolicy<SomeLhs, BaseLhs>, CastingPolicy<SomeRhs, BaseLhs>, Fun, true> AdapterR; backEnd_.template Add<SomeRhs, SomeLhs>(FunctorType(AdapterR(fun))); } } template <class SomeLhs, class SomeRhs> void Remove() { backEnd_.template Remove<SomeLhs, SomeRhs>(); } ResultType Go(BaseLhs& lhs, BaseRhs& rhs) { return backEnd_.Go(lhs, rhs); } }; } // namespace Loki //////////////////////////////////////////////////////////////////////////////// // Change log: // June 20, 2001: ported by Nick Thurn to gcc 2.95.3. Kudos, Nick!!! // February 2, 2003: fixed dependent names - credit due to Rani Sharoni //////////////////////////////////////////////////////////////////////////////// #endif // $Log: lokiMultiMethods.h,v $ // Revision 1.2 2007/02/01 17:29:00 sharring // updating to newer version of loki library, with support for multi-threading enabled. manually renamed files to avoid name conflicts, by // prepending "loki" to the names of header files. also manually edited lokiThreads.h to #define LOKI_OBJECT_LEVEL_THREADING; this could // also be done with a compile FLAG, perhaps would be better. // // Revision 1.1.28.1 2007/02/01 07:36:57 sharring // // updating loki to newer version for testing in SFI in the hopes of fixing some // multi-threading problems seen in acs logging code for which the stack trace // indicates that loki smart pointers were involved. // // Revision 1.4 2006/01/16 19:05:09 rich_sposato // Added cvs keywords. //
lgpl-2.1
ramiusGitHub/WCF
wcfsetup/install/files/lib/data/bbcode/BBCodeEditor.class.php
812
<?php namespace wcf\data\bbcode; use wcf\data\DatabaseObjectEditor; use wcf\data\IEditableCachedObject; use wcf\system\cache\builder\BBCodeCacheBuilder; /** * Provides functions to edit bbcodes. * * @author Alexander Ebert * @copyright 2001-2015 WoltLab GmbH * @license GNU Lesser General Public License <http://opensource.org/licenses/lgpl-license.php> * @package com.woltlab.wcf * @subpackage data.bbcode * @category Community Framework */ class BBCodeEditor extends DatabaseObjectEditor implements IEditableCachedObject { /** * @see \wcf\data\DatabaseObjectDecorator::$baseClass */ public static $baseClass = 'wcf\data\bbcode\BBCode'; /** * @see \wcf\data\IEditableCachedObject::resetCache() */ public static function resetCache() { BBCodeCacheBuilder::getInstance()->reset(); } }
lgpl-2.1
clutter-project/clutter-box2d
box2d/Documentation/API/classb2_stack_allocator-members.html
2669
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/> <title>Box2D: Member List</title> <link href="tabs.css" rel="stylesheet" type="text/css"/> <link href="doxygen.css" rel="stylesheet" type="text/css"/> </head> <body> <!-- Generated by Doxygen 1.6.3 --> <div class="navigation" id="top"> <div class="tabs"> <ul> <li><a href="index.html"><span>Main&nbsp;Page</span></a></li> <li class="current"><a href="annotated.html"><span>Classes</span></a></li> <li><a href="files.html"><span>Files</span></a></li> </ul> </div> <div class="tabs"> <ul> <li><a href="annotated.html"><span>Class&nbsp;List</span></a></li> <li><a href="hierarchy.html"><span>Class&nbsp;Hierarchy</span></a></li> <li><a href="functions.html"><span>Class&nbsp;Members</span></a></li> </ul> </div> </div> <div class="contents"> <h1>b2StackAllocator Member List</h1>This is the complete list of members for <a class="el" href="classb2_stack_allocator.html">b2StackAllocator</a>, including all inherited members.<table> <tr class="memlist"><td><a class="el" href="classb2_stack_allocator.html#a4b8c515d8e1a1c2d5b151c3a2f96fa19">Allocate</a>(int32 size)</td><td><a class="el" href="classb2_stack_allocator.html">b2StackAllocator</a></td><td></td></tr> <tr class="memlist"><td><a class="el" href="classb2_stack_allocator.html#a00f8771300fe1efeb6fbaab489a8dd0a">b2StackAllocator</a>()</td><td><a class="el" href="classb2_stack_allocator.html">b2StackAllocator</a></td><td></td></tr> <tr class="memlist"><td><a class="el" href="classb2_stack_allocator.html#a3a4384cf5f467828db3022985673db66">Free</a>(void *p)</td><td><a class="el" href="classb2_stack_allocator.html">b2StackAllocator</a></td><td></td></tr> <tr class="memlist"><td><a class="el" href="classb2_stack_allocator.html#a1530b6889eaa679ab1b0e092e4911366">GetMaxAllocation</a>() const </td><td><a class="el" href="classb2_stack_allocator.html">b2StackAllocator</a></td><td></td></tr> <tr class="memlist"><td><a class="el" href="classb2_stack_allocator.html#a9f9bd23569d3521316d2f00b1de454b2">~b2StackAllocator</a>()</td><td><a class="el" href="classb2_stack_allocator.html">b2StackAllocator</a></td><td></td></tr> </table></div> <hr class="footer"/><address style="text-align: right;"><small>Generated on Sat Apr 3 18:18:09 2010 for Box2D by&nbsp; <a href="http://www.doxygen.org/index.html"> <img class="footer" src="doxygen.png" alt="doxygen"/></a> 1.6.3 </small></address> </body> </html>
lgpl-2.1
svallaghe/libmesh
contrib/netcdf/4.4.1.1/nc_test4/tst_knmi.c
2561
/** \file Performance test from KNMI. Copyright 2009, UCAR/Unidata. See \ref copyright file for copying and redistribution conditions. */ #include <nc_tests.h> #include "err_macros.h" #include <time.h> #include <sys/time.h> #include <unistd.h> #define FILE_NAME_1 "MSGCPP_CWP_NC3.nc" #define FILE_NAME_2 "MSGCPP_CWP_NC4.nc" #define NDIMS3 3 #define DATA_VAR_NAME "pr" #define NUM_CACHE_TRIES 1 #define LON_DIMID 0 #define LAT_DIMID 1 #define BNDS_DIMID 2 #define TIME_DIMID 3 #define LON_LEN 256 #define LAT_LEN 128 #define BNDS_LEN 2 #define TIME_LEN 1560 #define NUM_TS 1 extern const char* nc_strerror(int ncerr); static int complain(int stat) { if(stat) { fprintf(stderr,"%s\n",nc_strerror(stat)); fflush(stderr); } return stat; } static int read_file(char *filename) { #define CWP "cwp" #define XLEN 3712 #define YLEN 3712 int ncid, varid; struct timeval start_time, end_time, diff_time; short *data; int time_us; printf("**** reading file %s\n", filename); if (gettimeofday(&start_time, NULL)) ERR_RET; if(complain(nc_open(filename, NC_NOWRITE, &ncid))) ERR_RET; if (gettimeofday(&end_time, NULL)) ERR_RET; if (nc4_timeval_subtract(&diff_time, &end_time, &start_time)) ERR_RET; time_us = (int)diff_time.tv_sec * MILLION + (int)diff_time.tv_usec; printf("File open time (us): %d\n", (int)time_us); if (!(data = malloc(sizeof(short) * XLEN * YLEN))) ERR; if (gettimeofday(&start_time, NULL)) ERR_RET; if (nc_inq_varid(ncid, CWP, &varid)) ERR; if (nc_get_var_short(ncid, varid, data)) ERR; if (gettimeofday(&end_time, NULL)) ERR_RET; if (nc4_timeval_subtract(&diff_time, &end_time, &start_time)) ERR_RET; time_us = (int)diff_time.tv_sec * MILLION + (int)diff_time.tv_usec; printf("Data read time (us): %d\n", (int)time_us); free(data); if (nc_close(ncid)) ERR_RET; return 0; } int main(int argc, char **argv) { int c, header = 0, verbose = 0, timeseries = 0; int ncid, varid, storage; char name_in[NC_MAX_NAME + 1]; size_t len; size_t cs[NDIMS3] = {0, 0, 0}; int cache = MEGABYTE; int ndims, dimid[NDIMS3]; float hor_data[LAT_LEN * LON_LEN]; int read_1_us, avg_read_us; float ts_data[TIME_LEN]; size_t start[NDIMS3], count[NDIMS3]; int deflate, shuffle, deflate_level; struct timeval start_time, end_time, diff_time; printf("\n*** Testing netcdf-4 vs. netcdf-3 performance.\n"); if (complain(read_file(FILE_NAME_1))) ERR; if (complain(read_file(FILE_NAME_2))) ERR; SUMMARIZE_ERR; FINAL_RESULTS; }
lgpl-2.1
modolabs/Kurogo-Mobile-Web
app/modules/athletics/AthleticsShellModule.php
3854
<?php /* * Copyright © 2010 - 2013 Modo Labs Inc. All rights reserved. * * The license governing the contents of this file is located in the LICENSE * file located at the root directory of this distribution. If the LICENSE file * is missing, please contact [email protected]. * */ includePackage('News'); includePackage('DateTime'); class AthleticsShellModule extends ShellModule { protected $id = 'athletics'; protected static $defaultEventModel = 'AthleticEventsDataModel'; protected $feeds = array(); protected $navFeeds = array(); public function loadScheduleData() { $scheduleFeeds = $this->getModuleSections('schedule'); $default = $this->getOptionalModuleSection('schedule','module'); foreach ($scheduleFeeds as $index=>&$feedData) { $feedData = array_merge($default, $feedData); } return $scheduleFeeds; } protected function getScheduleFeed($sport) { $scheduleData = $this->loadScheduleData(); if ($feedData = Kurogo::arrayVal($scheduleData, $sport)) { $dataModel = Kurogo::arrayVal($feedData, 'MODEL_CLASS', self::$defaultEventModel); $this->scheduleFeed = AthleticEventsDataModel::factory($dataModel, $feedData); return $this->scheduleFeed; } return null; } protected function getNewsFeed($sport, $gender=null) { if ($sport=='topnews') { $feedData = $this->getNavData('topnews'); } elseif (isset($this->feeds[$sport])) { $feedData = $this->feeds[$sport]; } else { throw new KurogoDataException($this->getLocalizedString('ERROR_INVALID_SPORT', $sport)); } if (isset($feedData['DATA_RETRIEVER']) || isset($feedData['BASE_URL'])) { $dataModel = isset($feedData['MODEL_CLASS']) ? $feedData['MODEL_CLASS'] : 'AthleticNewsDataModel'; $newsFeed = DataModel::factory($dataModel, $feedData); return $newsFeed; } return null; } protected function getNavData($tab) { $data = isset($this->navFeeds[$tab]) ? $this->navFeeds[$tab] : ''; if (!$data) { throw new KurogoDataException($this->getLocalizedString('ERROR_NAV', $tab)); } return $data; } protected function getSportsForGender($gender) { $feeds = array(); foreach ($this->feeds as $key=>$feed) { if (isset($feed['GENDER']) && $feed['GENDER'] == $gender) { $feeds[$key] = $feed; } } return $feeds; } public function getAllControllers() { $controllers = array(); $controllers[] = $this->getNewsFeed('topnews'); foreach (array('men', 'women', 'coed') as $gender) { if ($sportsConfig = $this->getSportsForGender($gender)) { foreach ($sportsConfig as $key => $sportData) { if ($newsFeed = $this->getNewsFeed($key)) { $controllers[] = $newsFeed; } if ($scheduleFeed = $this->getScheduleFeed($key)) { $controllers[] = $scheduleFeed; } } } } return $controllers; } protected function initializeForCommand() { $this->feeds = $this->loadFeedData(); $this->navFeeds = $this->getModuleSections('page-index'); switch($this->command) { case 'fetchAllData': $this->preFetchAllData(); return 0; break; default: $this->invalidCommand(); break; } } }
lgpl-2.1
farseerri/git_code
src/plugins/qnx/blackberryconfigurationmanager.h
4692
/************************************************************************** ** ** Copyright (C) 2014 BlackBerry Limited. All rights reserved. ** ** Contact: BlackBerry ([email protected]) ** Contact: KDAB ([email protected]) ** ** This file is part of Qt Creator. ** ** Commercial License Usage ** Licensees holding valid commercial Qt licenses may use this file in ** accordance with the commercial license agreement provided with the ** Software or, alternatively, in accordance with the terms contained in ** a written agreement between you and Digia. For licensing terms and ** conditions see http://www.qt.io/licensing. For further information ** use the contact form at http://www.qt.io/contact-us. ** ** GNU Lesser General Public License Usage ** Alternatively, this file may be used under the terms of the GNU Lesser ** General Public License version 2.1 or version 3 as published by the Free ** Software Foundation and appearing in the file LICENSE.LGPLv21 and ** LICENSE.LGPLv3 included in the packaging of this file. Please review the ** following information to ensure the GNU Lesser General Public License ** requirements will be met: https://www.gnu.org/licenses/lgpl.html and ** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. ** ** In addition, as a special exception, Digia gives you certain additional ** rights. These rights are described in the Digia Qt LGPL Exception ** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. ** ****************************************************************************/ #ifndef BLACKBERRYCONFIGURATIONMANAGER_H #define BLACKBERRYCONFIGURATIONMANAGER_H #include <utils/environment.h> #include <utils/fileutils.h> #include <QSettings> #include <QObject> namespace Utils { class PersistentSettingsWriter; } namespace Qnx { namespace Internal { class BlackBerryApiLevelConfiguration; class BlackBerryRuntimeConfiguration; class QnxPlugin; class BlackBerryConfigurationManager : public QObject { Q_OBJECT public: enum ConfigurationType { ApiLevel = 0x01, Runtime = 0x02 }; Q_DECLARE_FLAGS(ConfigurationTypes, ConfigurationType) static BlackBerryConfigurationManager *instance(); ~BlackBerryConfigurationManager(); bool addApiLevel(BlackBerryApiLevelConfiguration *config); void removeApiLevel(BlackBerryApiLevelConfiguration *config); bool addRuntime(BlackBerryRuntimeConfiguration *runtime); void removeRuntime(BlackBerryRuntimeConfiguration *runtime); QList<BlackBerryApiLevelConfiguration*> apiLevels() const; QList<BlackBerryRuntimeConfiguration *> runtimes() const; QList<BlackBerryApiLevelConfiguration*> manualApiLevels() const; QList<BlackBerryApiLevelConfiguration *> activeApiLevels() const; BlackBerryApiLevelConfiguration *apiLevelFromEnvFile(const Utils::FileName &envFile) const; BlackBerryRuntimeConfiguration *runtimeFromFilePath(const QString &path); BlackBerryApiLevelConfiguration *defaultApiLevel() const; QString barsignerCskPath() const; QString idTokenPath() const; QString barsignerDbPath() const; QString defaultKeystorePath() const; QString defaultDebugTokenPath() const; // returns the environment for the default API level QList<Utils::EnvironmentItem> defaultConfigurationEnv() const; void loadAutoDetectedConfigurations(QFlags<ConfigurationType> types); void setDefaultConfiguration(BlackBerryApiLevelConfiguration *config); bool newestApiLevelEnabled() const; void emitSettingsChanged(); #ifdef WITH_TESTS void initUnitTest(); #endif public slots: void loadSettings(); void saveSettings(); void checkToolChainConfiguration(); signals: void settingsLoaded(); void settingsChanged(); private: BlackBerryConfigurationManager(QObject *parent = 0); static BlackBerryConfigurationManager *m_instance; QList<BlackBerryApiLevelConfiguration*> m_apiLevels; QList<BlackBerryRuntimeConfiguration*> m_runtimes; BlackBerryApiLevelConfiguration *m_defaultConfiguration; Utils::PersistentSettingsWriter *m_writer; void saveConfigurations(); void restoreConfigurations(); void loadAutoDetectedApiLevels(); void loadAutoDetectedRuntimes(); void loadManualConfigurations(); void setKitsAutoDetectionSource(); void insertApiLevelByVersion(BlackBerryApiLevelConfiguration* apiLevel); void insertRuntimeByVersion(BlackBerryRuntimeConfiguration* runtime); friend class QnxPlugin; }; } // namespace Internal } // namespace Qnx Q_DECLARE_OPERATORS_FOR_FLAGS(Qnx::Internal::BlackBerryConfigurationManager::ConfigurationTypes) #endif // BLACKBERRYCONFIGURATIONMANAGER_H
lgpl-2.1
adfernandes/pcp
qa/src/disk_test.c
3204
/* * Copyright (c) 1997-2002 Silicon Graphics, Inc. All Rights Reserved. */ #include <pcp/pmapi.h> #include "libpcp.h" static int listOne[2]; #define LEN(l) (sizeof(l)/sizeof(l[0])) static const char *namelist[] = { "disk.dev.read" }; #define NCONTEXTS 2 int main(int argc, char **argv) { int e, i; int h[NCONTEXTS]; pmID metrics[2]; pmResult *resp; pmInDom diskindom; pmDesc desc; int *inst; char **name; int numinst; int c; int sts; int errflag = 0; pmSetProgname(argv[0]); while ((c = getopt(argc, argv, "D:?")) != EOF) { switch (c) { case 'D': /* debug options */ sts = pmSetDebug(optarg); if (sts < 0) { fprintf(stderr, "%s: unrecognized debug options specification (%s)\n", pmGetProgname(), optarg); errflag++; } break; case '?': default: errflag++; break; } } if (errflag || optind != argc) { fprintf(stderr, "Usage: %s [-D debugspec]\n", pmGetProgname()); exit(1); } for (i=0; i < NCONTEXTS; i++) { if ((h[i] = pmNewContext(PM_CONTEXT_HOST, "localhost")) < 0) { printf("pmNewContext: %s\n", pmErrStr(h[i])); exit(1); } if ((e = pmLookupName(1, namelist, metrics)) < 0) { printf("pmLookupName: %s\n", pmErrStr(e)); exit(1); } if ((e = pmLookupDesc(metrics[0], &desc)) < 0) { printf("pmLookupDesc: %s\n", pmErrStr(e)); exit(1); } } diskindom = desc.indom; if ((numinst = pmGetInDom(diskindom, &inst, &name)) < 0) { printf("pmGetInDom: %s\n", pmErrStr(numinst)); exit(1); } printf("Disks:\n"); for (i = 0; i < numinst; i++) printf("\t[%d]: %d %s\n", i, inst[i], name[i]); for (i=0; i < NCONTEXTS; i++) { pmUseContext(h[i]); listOne[0] = inst[0]; listOne[1] = inst[numinst-1]; pmAddProfile(diskindom, 0, (int *)0); printf("all drives should be included here\n"); if ((e = pmFetch(1, metrics, &resp)) < 0) { printf("pmFetch[2]: %s\n", pmErrStr(e)); } else __pmDumpResult(stdout, resp); pmDelProfile(diskindom, 0, (int *)0); putchar('\n'); printf("no drives should be included here\n"); if ((e = pmFetch(1, metrics, &resp)) < 0) { printf("pmFetch[3]: %s\n", pmErrStr(e)); } else __pmDumpResult(stdout, resp); pmDelProfile(diskindom, 0, (int *)0); pmAddProfile(diskindom, LEN(listOne), listOne); putchar('\n'); printf("only the first and last drive should be included here\n"); if ((e = pmFetch(1, metrics, &resp)) < 0) { printf("pmFetch[0]: %s\n", pmErrStr(e)); } else __pmDumpResult(stdout, resp); pmDelProfile(diskindom, 0, (int *)0); pmAddProfile(diskindom, numinst, inst); pmDelProfile(diskindom, 1, listOne); putchar('\n'); printf("all except drive zero should be included here\n"); if ((e = pmFetch(1, metrics, &resp)) < 0) { printf("pmFetch[1]: %s\n", pmErrStr(e)); } else __pmDumpResult(stdout, resp); pmDelProfile(diskindom, 0, (int *)0); pmAddProfile(diskindom, 1, inst); putchar('\n'); printf("drive zero ONLY should be included here\n"); if ((e = pmFetch(1, metrics, &resp)) < 0) { printf("pmFetch[1]: %s\n", pmErrStr(e)); } else __pmDumpResult(stdout, resp); } exit(0); }
lgpl-2.1
libvirt/libvirt
src/storage_file/storage_file_probe.c
32781
/* * storage_file_probe.c: file utility functions for FS storage backend * * Copyright (C) 2007-2017 Red Hat, Inc. * Copyright (C) 2007-2008 Daniel P. Berrange * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library. If not, see * <http://www.gnu.org/licenses/>. */ #include <config.h> #include <sys/types.h> #include <unistd.h> #include <fcntl.h> #include "internal.h" #include "storage_file_probe.h" #include "storage_source_conf.h" #include "viralloc.h" #include "virbitmap.h" #include "virendian.h" #include "virfile.h" #include "virlog.h" #define VIR_FROM_THIS VIR_FROM_STORAGE VIR_LOG_INIT("storage_file.storagefileprobe"); enum lv_endian { LV_LITTLE_ENDIAN = 1, /* 1234 */ LV_BIG_ENDIAN /* 4321 */ }; enum { BACKING_STORE_OK, BACKING_STORE_INVALID, BACKING_STORE_ERROR, }; #define FILE_TYPE_VERSIONS_LAST 3 struct FileEncryptionInfo { int format; /* Encryption format to assign */ int magicOffset; /* Byte offset of the magic */ const char *magic; /* Optional string of magic */ enum lv_endian endian; /* Endianness of file format */ int versionOffset; /* Byte offset from start of file * where we find version number, * -1 to always fail the version test, * -2 to always pass the version test */ int versionSize; /* Size in bytes of version data (0, 2, or 4) */ int versionNumbers[FILE_TYPE_VERSIONS_LAST]; /* Version numbers to validate. Zeroes are ignored. */ int modeOffset; /* Byte offset of the format native encryption mode */ char modeValue; /* Value expected at offset */ int payloadOffset; /* start offset of the volume data (in 512 byte sectors) */ }; struct FileTypeInfo { int magicOffset; /* Byte offset of the magic */ const char *magic; /* Optional string of file magic * to check at head of file */ enum lv_endian endian; /* Endianness of file format */ int versionOffset; /* Byte offset from start of file * where we find version number, * -1 to always fail the version test, * -2 to always pass the version test */ int versionSize; /* Size in bytes of version data (0, 2, or 4) */ int versionNumbers[FILE_TYPE_VERSIONS_LAST]; /* Version numbers to validate. Zeroes are ignored. */ int sizeOffset; /* Byte offset from start of file * where we find capacity info, * -1 to use st_size as capacity */ int sizeBytes; /* Number of bytes for size field */ int sizeMultiplier; /* A scaling factor if size is not in bytes */ /* Store a COW base image path (possibly relative), * or NULL if there is no COW base image, to RES; * return BACKING_STORE_* */ const struct FileEncryptionInfo *cryptInfo; /* Encryption info */ unsigned long long (*getClusterSize)(const char *buf, size_t buf_size); int (*getBackingStore)(char **res, int *format, const char *buf, size_t buf_size); int (*getFeatures)(virBitmap **features, int format, char *buf, ssize_t len); }; static int cowGetBackingStore(char **, int *, const char *, size_t); static unsigned long long qcow2GetClusterSize(const char *buf, size_t buf_size); static int qcowXGetBackingStore(char **, int *, const char *, size_t); static int qcow2GetFeatures(virBitmap **features, int format, char *buf, ssize_t len); static int vmdk4GetBackingStore(char **, int *, const char *, size_t); static int qedGetBackingStore(char **, int *, const char *, size_t); #define QCOWX_HDR_VERSION (4) #define QCOWX_HDR_BACKING_FILE_OFFSET (QCOWX_HDR_VERSION+4) #define QCOWX_HDR_BACKING_FILE_SIZE (QCOWX_HDR_BACKING_FILE_OFFSET+8) #define QCOWX_HDR_CLUSTER_BITS_OFFSET (QCOWX_HDR_BACKING_FILE_SIZE+4) #define QCOWX_HDR_IMAGE_SIZE (QCOWX_HDR_CLUSTER_BITS_OFFSET+4) #define QCOW1_HDR_CRYPT (QCOWX_HDR_IMAGE_SIZE+8+1+1+2) #define QCOW2_HDR_CRYPT (QCOWX_HDR_IMAGE_SIZE+8) #define QCOW1_HDR_TOTAL_SIZE (QCOW1_HDR_CRYPT+4+8) #define QCOW2_HDR_TOTAL_SIZE (QCOW2_HDR_CRYPT+4+4+8+8+4+4+8) #define QCOW2_HDR_EXTENSION_END 0 #define QCOW2_HDR_EXTENSION_BACKING_FORMAT 0xE2792ACA #define QCOW2v3_HDR_FEATURES_INCOMPATIBLE (QCOW2_HDR_TOTAL_SIZE) #define QCOW2v3_HDR_FEATURES_COMPATIBLE (QCOW2v3_HDR_FEATURES_INCOMPATIBLE+8) #define QCOW2v3_HDR_FEATURES_AUTOCLEAR (QCOW2v3_HDR_FEATURES_COMPATIBLE+8) /* The location of the header size [4 bytes] */ #define QCOW2v3_HDR_SIZE (QCOW2_HDR_TOTAL_SIZE+8+8+8+4) #define QED_HDR_FEATURES_OFFSET (4+4+4+4) #define QED_HDR_IMAGE_SIZE (QED_HDR_FEATURES_OFFSET+8+8+8+8) #define QED_HDR_BACKING_FILE_OFFSET (QED_HDR_IMAGE_SIZE+8) #define QED_HDR_BACKING_FILE_SIZE (QED_HDR_BACKING_FILE_OFFSET+4) #define QED_F_BACKING_FILE 0x01 #define QED_F_BACKING_FORMAT_NO_PROBE 0x04 #define PLOOP_IMAGE_SIZE_OFFSET 36 #define PLOOP_SIZE_MULTIPLIER 512 #define LUKS_HDR_MAGIC_LEN 6 #define LUKS_HDR_VERSION_LEN 2 #define LUKS_HDR_CIPHER_NAME_LEN 32 #define LUKS_HDR_CIPHER_MODE_LEN 32 #define LUKS_HDR_HASH_SPEC_LEN 32 #define LUKS_HDR_PAYLOAD_LEN 4 /* Format described by qemu commit id '3e308f20e' */ #define LUKS_HDR_VERSION_OFFSET LUKS_HDR_MAGIC_LEN #define LUKS_HDR_PAYLOAD_OFFSET (LUKS_HDR_MAGIC_LEN+\ LUKS_HDR_VERSION_LEN+\ LUKS_HDR_CIPHER_NAME_LEN+\ LUKS_HDR_CIPHER_MODE_LEN+\ LUKS_HDR_HASH_SPEC_LEN) static struct FileEncryptionInfo const luksEncryptionInfo[] = { { .format = VIR_STORAGE_ENCRYPTION_FORMAT_LUKS, /* Magic is 'L','U','K','S', 0xBA, 0xBE */ .magicOffset = 0, .magic = "\x4c\x55\x4b\x53\xba\xbe", .endian = LV_BIG_ENDIAN, .versionOffset = LUKS_HDR_VERSION_OFFSET, .versionSize = LUKS_HDR_VERSION_LEN, .versionNumbers = {1}, .modeOffset = -1, .modeValue = -1, .payloadOffset = LUKS_HDR_PAYLOAD_OFFSET, }, { 0 } }; static struct FileEncryptionInfo const qcow1EncryptionInfo[] = { { .format = VIR_STORAGE_ENCRYPTION_FORMAT_QCOW, .magicOffset = 0, .magic = NULL, .endian = LV_BIG_ENDIAN, .versionOffset = -1, .versionSize = 0, .versionNumbers = {}, .modeOffset = QCOW1_HDR_CRYPT, .modeValue = 1, .payloadOffset = -1, }, { 0 } }; static struct FileEncryptionInfo const qcow2EncryptionInfo[] = { { .format = VIR_STORAGE_ENCRYPTION_FORMAT_QCOW, .magicOffset = 0, .magic = NULL, .endian = LV_BIG_ENDIAN, .versionOffset = -1, .versionSize = 0, .versionNumbers = {}, .modeOffset = QCOW2_HDR_CRYPT, .modeValue = 1, .payloadOffset = -1, }, { .format = VIR_STORAGE_ENCRYPTION_FORMAT_LUKS, .magicOffset = 0, .magic = NULL, .endian = LV_BIG_ENDIAN, .versionOffset = -1, .versionSize = 0, .versionNumbers = {}, .modeOffset = QCOW2_HDR_CRYPT, .modeValue = 2, .payloadOffset = -1, }, { 0 } }; static struct FileTypeInfo const fileTypeInfo[] = { [VIR_STORAGE_FILE_NONE] = { 0, NULL, LV_LITTLE_ENDIAN, -1, 0, {0}, 0, 0, 0, NULL, NULL, NULL, NULL }, [VIR_STORAGE_FILE_RAW] = { 0, NULL, LV_LITTLE_ENDIAN, -1, 0, {0}, 0, 0, 0, luksEncryptionInfo, NULL, NULL, NULL }, [VIR_STORAGE_FILE_DIR] = { 0, NULL, LV_LITTLE_ENDIAN, -1, 0, {0}, 0, 0, 0, NULL, NULL, NULL, NULL }, [VIR_STORAGE_FILE_BOCHS] = { /*"Bochs Virtual HD Image", */ /* Untested */ 0, NULL, LV_LITTLE_ENDIAN, 64, 4, {0x20000}, 32+16+16+4+4+4+4+4, 8, 1, NULL, NULL, NULL, NULL }, [VIR_STORAGE_FILE_CLOOP] = { /* #!/bin/sh #V2.0 Format modprobe cloop file=$0 && mount -r -t iso9660 /dev/cloop $1 */ /* Untested */ 0, NULL, LV_LITTLE_ENDIAN, -1, 0, {0}, -1, 0, 0, NULL, NULL, NULL, NULL }, [VIR_STORAGE_FILE_DMG] = { /* XXX QEMU says there's no magic for dmg, * /usr/share/misc/magic lists double magic (both offsets * would have to match) but then disables that check. */ 0, NULL, 0, -1, 0, {0}, -1, 0, 0, NULL, NULL, NULL, NULL }, [VIR_STORAGE_FILE_ISO] = { 32769, "CD001", LV_LITTLE_ENDIAN, -2, 0, {0}, -1, 0, 0, NULL, NULL, NULL, NULL }, [VIR_STORAGE_FILE_VPC] = { 0, "conectix", LV_BIG_ENDIAN, 12, 4, {0x10000}, 8 + 4 + 4 + 8 + 4 + 4 + 2 + 2 + 4, 8, 1, NULL, NULL, NULL, NULL }, /* TODO: add getBackingStore function */ [VIR_STORAGE_FILE_VDI] = { 64, "\x7f\x10\xda\xbe", LV_LITTLE_ENDIAN, 68, 4, {0x00010001}, 64 + 5 * 4 + 256 + 7 * 4, 8, 1, NULL, NULL, NULL, NULL}, /* Not direct file formats, but used for various drivers */ [VIR_STORAGE_FILE_FAT] = { 0, NULL, LV_LITTLE_ENDIAN, -1, 0, {0}, 0, 0, 0, NULL, NULL, NULL, NULL }, [VIR_STORAGE_FILE_VHD] = { 0, NULL, LV_LITTLE_ENDIAN, -1, 0, {0}, 0, 0, 0, NULL, NULL, NULL, NULL }, [VIR_STORAGE_FILE_PLOOP] = { 0, "WithouFreSpacExt", LV_LITTLE_ENDIAN, -2, 0, {0}, PLOOP_IMAGE_SIZE_OFFSET, 0, PLOOP_SIZE_MULTIPLIER, NULL, NULL, NULL, NULL }, /* All formats with a backing store probe below here */ [VIR_STORAGE_FILE_COW] = { 0, "OOOM", LV_BIG_ENDIAN, 4, 4, {2}, 4+4+1024+4, 8, 1, NULL, NULL, cowGetBackingStore, NULL }, [VIR_STORAGE_FILE_QCOW] = { 0, "QFI", LV_BIG_ENDIAN, 4, 4, {1}, QCOWX_HDR_IMAGE_SIZE, 8, 1, qcow1EncryptionInfo, NULL, qcowXGetBackingStore, NULL }, [VIR_STORAGE_FILE_QCOW2] = { 0, "QFI", LV_BIG_ENDIAN, 4, 4, {2, 3}, QCOWX_HDR_IMAGE_SIZE, 8, 1, qcow2EncryptionInfo, qcow2GetClusterSize, qcowXGetBackingStore, qcow2GetFeatures }, [VIR_STORAGE_FILE_QED] = { /* https://wiki.qemu.org/Features/QED */ 0, "QED", LV_LITTLE_ENDIAN, -2, 0, {0}, QED_HDR_IMAGE_SIZE, 8, 1, NULL, NULL, qedGetBackingStore, NULL }, [VIR_STORAGE_FILE_VMDK] = { 0, "KDMV", LV_LITTLE_ENDIAN, 4, 4, {1, 2, 3}, 4+4+4, 8, 512, NULL, NULL, vmdk4GetBackingStore, NULL }, }; G_STATIC_ASSERT(G_N_ELEMENTS(fileTypeInfo) == VIR_STORAGE_FILE_LAST); /* qcow2 compatible features in the order they appear on-disk */ enum qcow2CompatibleFeature { QCOW2_COMPATIBLE_FEATURE_LAZY_REFCOUNTS = 0, QCOW2_COMPATIBLE_FEATURE_LAST }; /* conversion to virStorageFileFeature */ static const virStorageFileFeature qcow2CompatibleFeatureArray[] = { VIR_STORAGE_FILE_FEATURE_LAZY_REFCOUNTS, }; G_STATIC_ASSERT(G_N_ELEMENTS(qcow2CompatibleFeatureArray) == QCOW2_COMPATIBLE_FEATURE_LAST); /* qcow2 incompatible features in the order they appear on-disk */ enum qcow2IncompatibleFeature { QCOW2_INCOMPATIBLE_FEATURE_DIRTY = 0, QCOW2_INCOMPATIBLE_FEATURE_CORRUPT, QCOW2_INCOMPATIBLE_FEATURE_DATA_FILE, QCOW2_INCOMPATIBLE_FEATURE_COMPRESSION, QCOW2_INCOMPATIBLE_FEATURE_EXTL2, QCOW2_INCOMPATIBLE_FEATURE_LAST }; /* conversion to virStorageFileFeature */ static const virStorageFileFeature qcow2IncompatibleFeatureArray[] = { VIR_STORAGE_FILE_FEATURE_LAST, /* QCOW2_INCOMPATIBLE_FEATURE_DIRTY */ VIR_STORAGE_FILE_FEATURE_LAST, /* QCOW2_INCOMPATIBLE_FEATURE_CORRUPT */ VIR_STORAGE_FILE_FEATURE_LAST, /* QCOW2_INCOMPATIBLE_FEATURE_DATA_FILE */ VIR_STORAGE_FILE_FEATURE_LAST, /* QCOW2_INCOMPATIBLE_FEATURE_COMPRESSION */ VIR_STORAGE_FILE_FEATURE_EXTENDED_L2, /* QCOW2_INCOMPATIBLE_FEATURE_EXTL2 */ }; G_STATIC_ASSERT(G_N_ELEMENTS(qcow2IncompatibleFeatureArray) == QCOW2_INCOMPATIBLE_FEATURE_LAST); static int cowGetBackingStore(char **res, int *format, const char *buf, size_t buf_size) { #define COW_FILENAME_MAXLEN 1024 *res = NULL; *format = VIR_STORAGE_FILE_AUTO; if (buf_size < 4+4+ COW_FILENAME_MAXLEN) return BACKING_STORE_INVALID; if (buf[4+4] == '\0') { /* cow_header_v2.backing_file[0] */ *format = VIR_STORAGE_FILE_NONE; return BACKING_STORE_OK; } *res = g_strndup((const char *)buf + 4 + 4, COW_FILENAME_MAXLEN); return BACKING_STORE_OK; } static int qcow2GetExtensions(const char *buf, size_t buf_size, int *backingFormat) { size_t offset; size_t extension_start; size_t extension_end; int version = virReadBufInt32BE(buf + QCOWX_HDR_VERSION); if (version < 2) { /* QCow1 doesn't have the extensions capability * used to store backing format */ return 0; } if (version == 2) extension_start = QCOW2_HDR_TOTAL_SIZE; else extension_start = virReadBufInt32BE(buf + QCOW2v3_HDR_SIZE); /* * Traditionally QCow2 files had a layout of * * [header] * [backingStoreName] * * Although the backingStoreName typically followed * the header immediately, this was not required by * the format. By specifying a higher byte offset for * the backing file offset in the header, it was * possible to leave space between the header and * start of backingStore. * * This hack is now used to store extensions to the * qcow2 format: * * [header] * [extensions] * [backingStoreName] * * Thus the file region to search for extensions is * between the end of the header (QCOW2_HDR_TOTAL_SIZE) * and the start of the backingStoreName (offset) * * for qcow2 v3 images, the length of the header * is stored at QCOW2v3_HDR_SIZE */ extension_end = virReadBufInt64BE(buf + QCOWX_HDR_BACKING_FILE_OFFSET); if (extension_end > buf_size) return -1; /* * The extensions take format of * * int32: magic * int32: length * byte[length]: payload * * Unknown extensions can be ignored by skipping * over "length" bytes in the data stream. */ offset = extension_start; while (offset < (buf_size-8) && offset < (extension_end-8)) { unsigned int magic = virReadBufInt32BE(buf + offset); unsigned int len = virReadBufInt32BE(buf + offset + 4); offset += 8; if ((offset + len) < offset) break; if ((offset + len) > buf_size) break; switch (magic) { case QCOW2_HDR_EXTENSION_BACKING_FORMAT: { g_autofree char *tmp = NULL; if (!backingFormat) break; tmp = g_new0(char, len + 1); memcpy(tmp, buf + offset, len); tmp[len] = '\0'; *backingFormat = virStorageFileFormatTypeFromString(tmp); if (*backingFormat <= VIR_STORAGE_FILE_NONE) return -1; break; } case QCOW2_HDR_EXTENSION_END: return 0; } offset += len; } return 0; } static unsigned long long qcow2GetClusterSize(const char *buf, size_t buf_size) { int clusterBits = 0; if ((QCOWX_HDR_CLUSTER_BITS_OFFSET + 4) > buf_size) return 0; clusterBits = virReadBufInt32BE(buf + QCOWX_HDR_CLUSTER_BITS_OFFSET); if (clusterBits > 0) return 1ULL << clusterBits; return 0; } static int qcowXGetBackingStore(char **res, int *format, const char *buf, size_t buf_size) { unsigned long long offset; unsigned int size; *res = NULL; *format = VIR_STORAGE_FILE_AUTO; if (buf_size < QCOWX_HDR_BACKING_FILE_OFFSET+8+4) return BACKING_STORE_INVALID; offset = virReadBufInt64BE(buf + QCOWX_HDR_BACKING_FILE_OFFSET); if (offset > buf_size) return BACKING_STORE_INVALID; if (offset == 0) { *format = VIR_STORAGE_FILE_NONE; return BACKING_STORE_OK; } size = virReadBufInt32BE(buf + QCOWX_HDR_BACKING_FILE_SIZE); if (size == 0) { *format = VIR_STORAGE_FILE_NONE; return BACKING_STORE_OK; } if (size > 1023) return BACKING_STORE_INVALID; if (offset + size > buf_size || offset + size < offset) return BACKING_STORE_INVALID; *res = g_new0(char, size + 1); memcpy(*res, buf + offset, size); (*res)[size] = '\0'; if (qcow2GetExtensions(buf, buf_size, format) < 0) return BACKING_STORE_INVALID; return BACKING_STORE_OK; } static int vmdk4GetBackingStore(char **res, int *format, const char *buf, size_t buf_size) { static const char prefix[] = "parentFileNameHint=\""; char *start, *end; size_t len; g_autofree char *desc = NULL; desc = g_new0(char, VIR_STORAGE_MAX_HEADER); *res = NULL; /* * Technically this should have been VMDK, since * VMDK spec / VMware impl only support VMDK backed * by VMDK. QEMU isn't following this though and * does probing on VMDK backing files, hence we set * AUTO */ *format = VIR_STORAGE_FILE_AUTO; if (buf_size <= 0x200) return BACKING_STORE_INVALID; len = buf_size - 0x200; if (len > VIR_STORAGE_MAX_HEADER) len = VIR_STORAGE_MAX_HEADER; memcpy(desc, buf + 0x200, len); desc[len] = '\0'; start = strstr(desc, prefix); if (start == NULL) { *format = VIR_STORAGE_FILE_NONE; return BACKING_STORE_OK; } start += strlen(prefix); end = strchr(start, '"'); if (end == NULL) return BACKING_STORE_INVALID; if (end == start) { *format = VIR_STORAGE_FILE_NONE; return BACKING_STORE_OK; } *end = '\0'; *res = g_strdup(start); return BACKING_STORE_OK; } static int qedGetBackingStore(char **res, int *format, const char *buf, size_t buf_size) { unsigned long long flags; unsigned long offset, size; *res = NULL; /* Check if this image has a backing file */ if (buf_size < QED_HDR_FEATURES_OFFSET+8) return BACKING_STORE_INVALID; flags = virReadBufInt64LE(buf + QED_HDR_FEATURES_OFFSET); if (!(flags & QED_F_BACKING_FILE)) { *format = VIR_STORAGE_FILE_NONE; return BACKING_STORE_OK; } /* Parse the backing file */ if (buf_size < QED_HDR_BACKING_FILE_OFFSET+8) return BACKING_STORE_INVALID; offset = virReadBufInt32LE(buf + QED_HDR_BACKING_FILE_OFFSET); if (offset > buf_size) return BACKING_STORE_INVALID; size = virReadBufInt32LE(buf + QED_HDR_BACKING_FILE_SIZE); if (size == 0) return BACKING_STORE_OK; if (offset + size > buf_size || offset + size < offset) return BACKING_STORE_INVALID; *res = g_new0(char, size + 1); memcpy(*res, buf + offset, size); (*res)[size] = '\0'; if (flags & QED_F_BACKING_FORMAT_NO_PROBE) *format = VIR_STORAGE_FILE_RAW; else *format = VIR_STORAGE_FILE_AUTO_SAFE; return BACKING_STORE_OK; } static bool virStorageFileMatchesMagic(int magicOffset, const char *magic, char *buf, size_t buflen) { int mlen; if (magic == NULL) return false; /* Validate magic data */ mlen = strlen(magic); if (magicOffset + mlen > buflen) return false; if (memcmp(buf + magicOffset, magic, mlen) != 0) return false; return true; } static bool virStorageFileMatchesVersion(int versionOffset, int versionSize, const int *versionNumbers, int endian, char *buf, size_t buflen) { int version; size_t i; /* Validate version number info */ if (versionOffset == -1) return false; /* -2 == non-versioned file format, so trivially match */ if (versionOffset == -2) return true; /* A positive versionOffset, requires using a valid versionSize */ if (versionSize != 2 && versionSize != 4) return false; if ((versionOffset + versionSize) > buflen) return false; if (endian == LV_LITTLE_ENDIAN) { if (versionSize == 4) version = virReadBufInt32LE(buf + versionOffset); else version = virReadBufInt16LE(buf + versionOffset); } else { if (versionSize == 4) version = virReadBufInt32BE(buf + versionOffset); else version = virReadBufInt16BE(buf + versionOffset); } for (i = 0; i < FILE_TYPE_VERSIONS_LAST && versionNumbers[i]; i++) { VIR_DEBUG("Compare detected version %d vs one of the expected versions %d", version, versionNumbers[i]); if (version == versionNumbers[i]) return true; } return false; } static int virStorageFileProbeFormatFromBuf(const char *path, char *buf, size_t buflen) { int format = VIR_STORAGE_FILE_RAW; size_t i; int possibleFormat = VIR_STORAGE_FILE_RAW; VIR_DEBUG("path=%s, buf=%p, buflen=%zu", path, buf, buflen); /* First check file magic */ for (i = 0; i < VIR_STORAGE_FILE_LAST; i++) { if (virStorageFileMatchesMagic(fileTypeInfo[i].magicOffset, fileTypeInfo[i].magic, buf, buflen)) { if (!virStorageFileMatchesVersion(fileTypeInfo[i].versionOffset, fileTypeInfo[i].versionSize, fileTypeInfo[i].versionNumbers, fileTypeInfo[i].endian, buf, buflen)) { possibleFormat = i; continue; } format = i; goto cleanup; } } if (possibleFormat != VIR_STORAGE_FILE_RAW) VIR_WARN("File %s matches %s magic, but version is wrong. " "Please report new version to [email protected]", path, virStorageFileFormatTypeToString(possibleFormat)); cleanup: VIR_DEBUG("format=%d", format); return format; } static void qcow2GetFeaturesProcessGroup(uint64_t bits, const virStorageFileFeature *featuremap, size_t nfeatures, virBitmap *features) { size_t i; for (i = 0; i < nfeatures; i++) { if ((bits & ((uint64_t) 1 << i)) && featuremap[i] != VIR_STORAGE_FILE_FEATURE_LAST) ignore_value(virBitmapSetBit(features, featuremap[i])); } } static int qcow2GetFeatures(virBitmap **features, int format, char *buf, ssize_t len) { int version = -1; version = virReadBufInt32BE(buf + fileTypeInfo[format].versionOffset); if (version == 2) return 0; if (len < QCOW2v3_HDR_SIZE) return -1; *features = virBitmapNew(VIR_STORAGE_FILE_FEATURE_LAST); qcow2GetFeaturesProcessGroup(virReadBufInt64BE(buf + QCOW2v3_HDR_FEATURES_COMPATIBLE), qcow2CompatibleFeatureArray, G_N_ELEMENTS(qcow2CompatibleFeatureArray), *features); qcow2GetFeaturesProcessGroup(virReadBufInt64BE(buf + QCOW2v3_HDR_FEATURES_INCOMPATIBLE), qcow2IncompatibleFeatureArray, G_N_ELEMENTS(qcow2IncompatibleFeatureArray), *features); return 0; } static bool virStorageFileHasEncryptionFormat(const struct FileEncryptionInfo *info, char *buf, size_t len) { if (!info->magic && info->modeOffset == -1) return false; /* Shouldn't happen - expect at least one */ if (info->magic) { if (!virStorageFileMatchesMagic(info->magicOffset, info->magic, buf, len)) return false; if (info->versionOffset != -1 && !virStorageFileMatchesVersion(info->versionOffset, info->versionSize, info->versionNumbers, info->endian, buf, len)) return false; return true; } else if (info->modeOffset != -1) { int crypt_format; if (info->modeOffset >= len) return false; crypt_format = virReadBufInt32BE(buf + info->modeOffset); if (crypt_format != info->modeValue) return false; return true; } else { return false; } } static int virStorageFileGetEncryptionPayloadOffset(const struct FileEncryptionInfo *info, char *buf) { int payload_offset = -1; if (info->payloadOffset != -1) { if (info->endian == LV_LITTLE_ENDIAN) payload_offset = virReadBufInt32LE(buf + info->payloadOffset); else payload_offset = virReadBufInt32BE(buf + info->payloadOffset); } return payload_offset; } /* Given a header in BUF with length LEN, as parsed from the storage file * assuming it has the given FORMAT, populate information into META * with information about the file and its backing store. Return format * of the backing store as BACKING_FORMAT. PATH and FORMAT have to be * pre-populated in META. * * Note that this function may be called repeatedly on @meta, so it must * clean up any existing allocated memory which would be overwritten. */ int virStorageFileProbeGetMetadata(virStorageSource *meta, char *buf, size_t len) { int format; size_t i; VIR_DEBUG("path=%s, buf=%p, len=%zu, meta->format=%d", meta->path, buf, len, meta->format); if (meta->format == VIR_STORAGE_FILE_AUTO) meta->format = virStorageFileProbeFormatFromBuf(meta->path, buf, len); if (meta->format <= VIR_STORAGE_FILE_NONE || meta->format >= VIR_STORAGE_FILE_LAST) { virReportSystemError(EINVAL, _("unknown storage file meta->format %d"), meta->format); return -1; } if (fileTypeInfo[meta->format].cryptInfo != NULL) { for (i = 0; fileTypeInfo[meta->format].cryptInfo[i].format != 0; i++) { if (virStorageFileHasEncryptionFormat(&fileTypeInfo[meta->format].cryptInfo[i], buf, len)) { int expt_fmt = fileTypeInfo[meta->format].cryptInfo[i].format; if (!meta->encryption) { meta->encryption = g_new0(virStorageEncryption, 1); meta->encryption->format = expt_fmt; } else { if (meta->encryption->format != expt_fmt) { virReportError(VIR_ERR_XML_ERROR, _("encryption format %d doesn't match " "expected format %d"), meta->encryption->format, expt_fmt); return -1; } } meta->encryption->payload_offset = virStorageFileGetEncryptionPayloadOffset(&fileTypeInfo[meta->format].cryptInfo[i], buf); } } } /* XXX we should consider moving virStorageBackendUpdateVolInfo * code into this method, for non-magic files */ if (!fileTypeInfo[meta->format].magic) return 0; /* Optionally extract capacity from file */ if (fileTypeInfo[meta->format].sizeOffset != -1) { if ((fileTypeInfo[meta->format].sizeOffset + 8) > len) return 0; if (fileTypeInfo[meta->format].endian == LV_LITTLE_ENDIAN) meta->capacity = virReadBufInt64LE(buf + fileTypeInfo[meta->format].sizeOffset); else meta->capacity = virReadBufInt64BE(buf + fileTypeInfo[meta->format].sizeOffset); /* Avoid unlikely, but theoretically possible overflow */ if (meta->capacity > (ULLONG_MAX / fileTypeInfo[meta->format].sizeMultiplier)) return 0; meta->capacity *= fileTypeInfo[meta->format].sizeMultiplier; } if (fileTypeInfo[meta->format].getClusterSize != NULL) meta->clusterSize = fileTypeInfo[meta->format].getClusterSize(buf, len); VIR_FREE(meta->backingStoreRaw); if (fileTypeInfo[meta->format].getBackingStore != NULL) { int store = fileTypeInfo[meta->format].getBackingStore(&meta->backingStoreRaw, &format, buf, len); meta->backingStoreRawFormat = format; if (store == BACKING_STORE_INVALID) return 0; if (store == BACKING_STORE_ERROR) return -1; } g_clear_pointer(&meta->features, virBitmapFree); if (fileTypeInfo[meta->format].getFeatures != NULL && fileTypeInfo[meta->format].getFeatures(&meta->features, meta->format, buf, len) < 0) return -1; VIR_FREE(meta->compat); if (meta->format == VIR_STORAGE_FILE_QCOW2 && meta->features) meta->compat = g_strdup("1.1"); return 0; } /** * virStorageFileProbeFormat: * * Probe for the format of 'path', returning the detected * disk format. * * Callers are advised never to trust the returned 'format' * unless it is listed as VIR_STORAGE_FILE_RAW, since a * malicious guest can turn a raw file into any other non-raw * format at will. * * Best option: Don't use this function */ int virStorageFileProbeFormat(const char *path, uid_t uid, gid_t gid) { struct stat sb; ssize_t len = VIR_STORAGE_MAX_HEADER; VIR_AUTOCLOSE fd = -1; g_autofree char *header = NULL; if ((fd = virFileOpenAs(path, O_RDONLY, 0, uid, gid, 0)) < 0) { virReportSystemError(-fd, _("Failed to open file '%s'"), path); return -1; } if (fstat(fd, &sb) < 0) { virReportSystemError(errno, _("cannot stat file '%s'"), path); return -1; } /* No header to probe for directories */ if (S_ISDIR(sb.st_mode)) return VIR_STORAGE_FILE_DIR; if (lseek(fd, 0, SEEK_SET) == (off_t)-1) { virReportSystemError(errno, _("cannot set to start of '%s'"), path); return -1; } if ((len = virFileReadHeaderFD(fd, len, &header)) < 0) { virReportSystemError(errno, _("cannot read header '%s'"), path); return -1; } return virStorageFileProbeFormatFromBuf(path, header, len); }
lgpl-2.1
maui-packages/qt-creator
src/plugins/remotelinux/packageuploader.cpp
4820
/**************************************************************************** ** ** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies). ** Contact: http://www.qt-project.org/legal ** ** This file is part of Qt Creator. ** ** Commercial License Usage ** Licensees holding valid commercial Qt licenses may use this file in ** accordance with the commercial license agreement provided with the ** Software or, alternatively, in accordance with the terms contained in ** a written agreement between you and Digia. For licensing terms and ** conditions see http://qt.digia.com/licensing. For further information ** use the contact form at http://qt.digia.com/contact-us. ** ** GNU Lesser General Public License Usage ** Alternatively, this file may be used under the terms of the GNU Lesser ** General Public License version 2.1 as published by the Free Software ** Foundation and appearing in the file LICENSE.LGPL included in the ** packaging of this file. Please review the following information to ** ensure the GNU Lesser General Public License version 2.1 requirements ** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. ** ** In addition, as a special exception, Digia gives you certain additional ** rights. These rights are described in the Digia Qt LGPL Exception ** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. ** ****************************************************************************/ #include "packageuploader.h" #include <utils/qtcassert.h> #include <ssh/sftpchannel.h> #include <ssh/sshconnection.h> using namespace QSsh; namespace RemoteLinux { namespace Internal { PackageUploader::PackageUploader(QObject *parent) : QObject(parent), m_state(Inactive), m_connection(0) { } PackageUploader::~PackageUploader() { } void PackageUploader::uploadPackage(SshConnection *connection, const QString &localFilePath, const QString &remoteFilePath) { QTC_ASSERT(m_state == Inactive, return); setState(InitializingSftp); emit progress(tr("Preparing SFTP connection...")); m_localFilePath = localFilePath; m_remoteFilePath = remoteFilePath; m_connection = connection; connect(m_connection, SIGNAL(error(QSsh::SshError)), SLOT(handleConnectionFailure())); m_uploader = m_connection->createSftpChannel(); connect(m_uploader.data(), SIGNAL(initialized()), this, SLOT(handleSftpChannelInitialized())); connect(m_uploader.data(), SIGNAL(channelError(QString)), this, SLOT(handleSftpChannelError(QString))); connect(m_uploader.data(), SIGNAL(finished(QSsh::SftpJobId,QString)), this, SLOT(handleSftpJobFinished(QSsh::SftpJobId,QString))); m_uploader->initialize(); } void PackageUploader::cancelUpload() { QTC_ASSERT(m_state == InitializingSftp || m_state == Uploading, return); cleanup(); } void PackageUploader::handleConnectionFailure() { if (m_state == Inactive) return; const QString errorMsg = m_connection->errorString(); setState(Inactive); emit uploadFinished(tr("Connection failed: %1").arg(errorMsg)); } void PackageUploader::handleSftpChannelError(const QString &errorMsg) { QTC_ASSERT(m_state == InitializingSftp || m_state == Inactive, return); if (m_state == Inactive) return; setState(Inactive); emit uploadFinished(tr("SFTP error: %1").arg(errorMsg)); } void PackageUploader::handleSftpChannelInitialized() { QTC_ASSERT(m_state == InitializingSftp || m_state == Inactive, return); if (m_state == Inactive) return; const SftpJobId job = m_uploader->uploadFile(m_localFilePath, m_remoteFilePath, SftpOverwriteExisting); if (job == SftpInvalidJob) { setState(Inactive); emit uploadFinished(tr("Package upload failed: Could not open file.")); } else { emit progress(tr("Starting upload...")); setState(Uploading); } } void PackageUploader::handleSftpJobFinished(SftpJobId, const QString &errorMsg) { QTC_ASSERT(m_state == Uploading || m_state == Inactive, return); if (m_state == Inactive) return; if (!errorMsg.isEmpty()) emit uploadFinished(tr("Failed to upload package: %2").arg(errorMsg)); else emit uploadFinished(); cleanup(); } void PackageUploader::cleanup() { m_uploader->closeChannel(); setState(Inactive); } void PackageUploader::setState(State newState) { if (m_state == newState) return; if (newState == Inactive) { if (m_uploader) { disconnect(m_uploader.data(), 0, this, 0); m_uploader.clear(); } if (m_connection) { disconnect(m_connection, 0, this, 0); m_connection = 0; } } m_state = newState; } } // namespace Internal } // namespace RemoteLinux
lgpl-2.1
apetresc/JFreeChart
src/main/java/org/jfree/chart/plot/DefaultDrawingSupplier.java
18055
/* =========================================================== * JFreeChart : a free chart library for the Java(tm) platform * =========================================================== * * (C) Copyright 2000-2008, by Object Refinery Limited and Contributors. * * Project Info: http://www.jfree.org/jfreechart/index.html * * This library is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2.1 of the License, or * (at your option) any later version. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, * USA. * * [Java is a trademark or registered trademark of Sun Microsystems, Inc. * in the United States and other countries.] * * --------------------------- * DefaultDrawingSupplier.java * --------------------------- * (C) Copyright 2003-2008, by Object Refinery Limited. * * Original Author: David Gilbert (for Object Refinery Limited); * Contributor(s): Jeremy Bowman; * * Changes * ------- * 16-Jan-2003 : Version 1 (DG); * 17-Jan-2003 : Added stroke method, renamed DefaultPaintSupplier * --> DefaultDrawingSupplier (DG) * 27-Jan-2003 : Incorporated code from SeriesShapeFactory, originally * contributed by Jeremy Bowman (DG); * 25-Mar-2003 : Implemented Serializable (DG); * 20-Aug-2003 : Implemented Cloneable and PublicCloneable (DG); * ------------- JFREECHART 1.0.x --------------------------------------------- * 13-Jun-2007 : Added fillPaintSequence (DG); * */ package org.jfree.chart.plot; import java.awt.BasicStroke; import java.awt.Color; import java.awt.Paint; import java.awt.Polygon; import java.awt.Shape; import java.awt.Stroke; import java.awt.geom.Ellipse2D; import java.awt.geom.Rectangle2D; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.Arrays; import org.jfree.chart.ChartColor; import org.jfree.io.SerialUtilities; import org.jfree.util.PublicCloneable; import org.jfree.util.ShapeUtilities; /** * A default implementation of the {@link DrawingSupplier} interface. All * {@link Plot} instances have a new instance of this class installed by * default. */ public class DefaultDrawingSupplier implements DrawingSupplier, Cloneable, PublicCloneable, Serializable { /** For serialization. */ private static final long serialVersionUID = -7339847061039422538L; /** The default fill paint sequence. */ public static final Paint[] DEFAULT_PAINT_SEQUENCE = ChartColor.createDefaultPaintArray(); /** The default outline paint sequence. */ public static final Paint[] DEFAULT_OUTLINE_PAINT_SEQUENCE = new Paint[] { Color.lightGray}; /** The default fill paint sequence. */ public static final Paint[] DEFAULT_FILL_PAINT_SEQUENCE = new Paint[] { Color.white}; /** The default stroke sequence. */ public static final Stroke[] DEFAULT_STROKE_SEQUENCE = new Stroke[] { new BasicStroke(1.0f, BasicStroke.CAP_SQUARE, BasicStroke.JOIN_BEVEL)}; /** The default outline stroke sequence. */ public static final Stroke[] DEFAULT_OUTLINE_STROKE_SEQUENCE = new Stroke[] {new BasicStroke(1.0f, BasicStroke.CAP_SQUARE, BasicStroke.JOIN_BEVEL)}; /** The default shape sequence. */ public static final Shape[] DEFAULT_SHAPE_SEQUENCE = createStandardSeriesShapes(); /** The paint sequence. */ private transient Paint[] paintSequence; /** The current paint index. */ private int paintIndex; /** The outline paint sequence. */ private transient Paint[] outlinePaintSequence; /** The current outline paint index. */ private int outlinePaintIndex; /** The fill paint sequence. */ private transient Paint[] fillPaintSequence; /** The current fill paint index. */ private int fillPaintIndex; /** The stroke sequence. */ private transient Stroke[] strokeSequence; /** The current stroke index. */ private int strokeIndex; /** The outline stroke sequence. */ private transient Stroke[] outlineStrokeSequence; /** The current outline stroke index. */ private int outlineStrokeIndex; /** The shape sequence. */ private transient Shape[] shapeSequence; /** The current shape index. */ private int shapeIndex; /** * Creates a new supplier, with default sequences for fill paint, outline * paint, stroke and shapes. */ public DefaultDrawingSupplier() { this(DEFAULT_PAINT_SEQUENCE, DEFAULT_FILL_PAINT_SEQUENCE, DEFAULT_OUTLINE_PAINT_SEQUENCE, DEFAULT_STROKE_SEQUENCE, DEFAULT_OUTLINE_STROKE_SEQUENCE, DEFAULT_SHAPE_SEQUENCE); } /** * Creates a new supplier. * * @param paintSequence the fill paint sequence. * @param outlinePaintSequence the outline paint sequence. * @param strokeSequence the stroke sequence. * @param outlineStrokeSequence the outline stroke sequence. * @param shapeSequence the shape sequence. */ public DefaultDrawingSupplier(Paint[] paintSequence, Paint[] outlinePaintSequence, Stroke[] strokeSequence, Stroke[] outlineStrokeSequence, Shape[] shapeSequence) { this.paintSequence = paintSequence; this.fillPaintSequence = DEFAULT_FILL_PAINT_SEQUENCE; this.outlinePaintSequence = outlinePaintSequence; this.strokeSequence = strokeSequence; this.outlineStrokeSequence = outlineStrokeSequence; this.shapeSequence = shapeSequence; } /** * Creates a new supplier. * * @param paintSequence the paint sequence. * @param fillPaintSequence the fill paint sequence. * @param outlinePaintSequence the outline paint sequence. * @param strokeSequence the stroke sequence. * @param outlineStrokeSequence the outline stroke sequence. * @param shapeSequence the shape sequence. * * @since 1.0.6 */ public DefaultDrawingSupplier(Paint[] paintSequence, Paint[] fillPaintSequence, Paint[] outlinePaintSequence, Stroke[] strokeSequence, Stroke[] outlineStrokeSequence, Shape[] shapeSequence) { this.paintSequence = paintSequence; this.fillPaintSequence = fillPaintSequence; this.outlinePaintSequence = outlinePaintSequence; this.strokeSequence = strokeSequence; this.outlineStrokeSequence = outlineStrokeSequence; this.shapeSequence = shapeSequence; } /** * Returns the next paint in the sequence. * * @return The paint. */ public Paint getNextPaint() { Paint result = this.paintSequence[this.paintIndex % this.paintSequence.length]; this.paintIndex++; return result; } /** * Returns the next outline paint in the sequence. * * @return The paint. */ public Paint getNextOutlinePaint() { Paint result = this.outlinePaintSequence[ this.outlinePaintIndex % this.outlinePaintSequence.length]; this.outlinePaintIndex++; return result; } /** * Returns the next fill paint in the sequence. * * @return The paint. * * @since 1.0.6 */ public Paint getNextFillPaint() { Paint result = this.fillPaintSequence[this.fillPaintIndex % this.fillPaintSequence.length]; this.fillPaintIndex++; return result; } /** * Returns the next stroke in the sequence. * * @return The stroke. */ public Stroke getNextStroke() { Stroke result = this.strokeSequence[ this.strokeIndex % this.strokeSequence.length]; this.strokeIndex++; return result; } /** * Returns the next outline stroke in the sequence. * * @return The stroke. */ public Stroke getNextOutlineStroke() { Stroke result = this.outlineStrokeSequence[ this.outlineStrokeIndex % this.outlineStrokeSequence.length]; this.outlineStrokeIndex++; return result; } /** * Returns the next shape in the sequence. * * @return The shape. */ public Shape getNextShape() { Shape result = this.shapeSequence[ this.shapeIndex % this.shapeSequence.length]; this.shapeIndex++; return result; } /** * Creates an array of standard shapes to display for the items in series * on charts. * * @return The array of shapes. */ public static Shape[] createStandardSeriesShapes() { Shape[] result = new Shape[10]; double size = 6.0; double delta = size / 2.0; int[] xpoints = null; int[] ypoints = null; // square result[0] = new Rectangle2D.Double(-delta, -delta, size, size); // circle result[1] = new Ellipse2D.Double(-delta, -delta, size, size); // up-pointing triangle xpoints = intArray(0.0, delta, -delta); ypoints = intArray(-delta, delta, delta); result[2] = new Polygon(xpoints, ypoints, 3); // diamond xpoints = intArray(0.0, delta, 0.0, -delta); ypoints = intArray(-delta, 0.0, delta, 0.0); result[3] = new Polygon(xpoints, ypoints, 4); // horizontal rectangle result[4] = new Rectangle2D.Double(-delta, -delta / 2, size, size / 2); // down-pointing triangle xpoints = intArray(-delta, +delta, 0.0); ypoints = intArray(-delta, -delta, delta); result[5] = new Polygon(xpoints, ypoints, 3); // horizontal ellipse result[6] = new Ellipse2D.Double(-delta, -delta / 2, size, size / 2); // right-pointing triangle xpoints = intArray(-delta, delta, -delta); ypoints = intArray(-delta, 0.0, delta); result[7] = new Polygon(xpoints, ypoints, 3); // vertical rectangle result[8] = new Rectangle2D.Double(-delta / 2, -delta, size / 2, size); // left-pointing triangle xpoints = intArray(-delta, delta, delta); ypoints = intArray(0.0, -delta, +delta); result[9] = new Polygon(xpoints, ypoints, 3); return result; } /** * Tests this object for equality with another object. * * @param obj the object (<code>null</code> permitted). * * @return A boolean. */ public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof DefaultDrawingSupplier)) { return false; } DefaultDrawingSupplier that = (DefaultDrawingSupplier) obj; if (!Arrays.equals(this.paintSequence, that.paintSequence)) { return false; } if (this.paintIndex != that.paintIndex) { return false; } if (!Arrays.equals(this.outlinePaintSequence, that.outlinePaintSequence)) { return false; } if (this.outlinePaintIndex != that.outlinePaintIndex) { return false; } if (!Arrays.equals(this.strokeSequence, that.strokeSequence)) { return false; } if (this.strokeIndex != that.strokeIndex) { return false; } if (!Arrays.equals(this.outlineStrokeSequence, that.outlineStrokeSequence)) { return false; } if (this.outlineStrokeIndex != that.outlineStrokeIndex) { return false; } if (!equalShapes(this.shapeSequence, that.shapeSequence)) { return false; } if (this.shapeIndex != that.shapeIndex) { return false; } return true; } /** * A utility method for testing the equality of two arrays of shapes. * * @param s1 the first array (<code>null</code> permitted). * @param s2 the second array (<code>null</code> permitted). * * @return A boolean. */ private boolean equalShapes(Shape[] s1, Shape[] s2) { if (s1 == null) { return s2 == null; } if (s2 == null) { return false; } if (s1.length != s2.length) { return false; } for (int i = 0; i < s1.length; i++) { if (!ShapeUtilities.equal(s1[i], s2[i])) { return false; } } return true; } /** * Handles serialization. * * @param stream the output stream. * * @throws IOException if there is an I/O problem. */ private void writeObject(ObjectOutputStream stream) throws IOException { stream.defaultWriteObject(); int paintCount = this.paintSequence.length; stream.writeInt(paintCount); for (int i = 0; i < paintCount; i++) { SerialUtilities.writePaint(this.paintSequence[i], stream); } int outlinePaintCount = this.outlinePaintSequence.length; stream.writeInt(outlinePaintCount); for (int i = 0; i < outlinePaintCount; i++) { SerialUtilities.writePaint(this.outlinePaintSequence[i], stream); } int strokeCount = this.strokeSequence.length; stream.writeInt(strokeCount); for (int i = 0; i < strokeCount; i++) { SerialUtilities.writeStroke(this.strokeSequence[i], stream); } int outlineStrokeCount = this.outlineStrokeSequence.length; stream.writeInt(outlineStrokeCount); for (int i = 0; i < outlineStrokeCount; i++) { SerialUtilities.writeStroke(this.outlineStrokeSequence[i], stream); } int shapeCount = this.shapeSequence.length; stream.writeInt(shapeCount); for (int i = 0; i < shapeCount; i++) { SerialUtilities.writeShape(this.shapeSequence[i], stream); } } /** * Restores a serialized object. * * @param stream the input stream. * * @throws IOException if there is an I/O problem. * @throws ClassNotFoundException if there is a problem loading a class. */ private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { stream.defaultReadObject(); int paintCount = stream.readInt(); this.paintSequence = new Paint[paintCount]; for (int i = 0; i < paintCount; i++) { this.paintSequence[i] = SerialUtilities.readPaint(stream); } int outlinePaintCount = stream.readInt(); this.outlinePaintSequence = new Paint[outlinePaintCount]; for (int i = 0; i < outlinePaintCount; i++) { this.outlinePaintSequence[i] = SerialUtilities.readPaint(stream); } int strokeCount = stream.readInt(); this.strokeSequence = new Stroke[strokeCount]; for (int i = 0; i < strokeCount; i++) { this.strokeSequence[i] = SerialUtilities.readStroke(stream); } int outlineStrokeCount = stream.readInt(); this.outlineStrokeSequence = new Stroke[outlineStrokeCount]; for (int i = 0; i < outlineStrokeCount; i++) { this.outlineStrokeSequence[i] = SerialUtilities.readStroke(stream); } int shapeCount = stream.readInt(); this.shapeSequence = new Shape[shapeCount]; for (int i = 0; i < shapeCount; i++) { this.shapeSequence[i] = SerialUtilities.readShape(stream); } } /** * Helper method to avoid lots of explicit casts in getShape(). Returns * an array containing the provided doubles cast to ints. * * @param a x * @param b y * @param c z * * @return int[3] with converted params. */ private static int[] intArray(double a, double b, double c) { return new int[] {(int) a, (int) b, (int) c}; } /** * Helper method to avoid lots of explicit casts in getShape(). Returns * an array containing the provided doubles cast to ints. * * @param a x * @param b y * @param c z * @param d t * * @return int[4] with converted params. */ private static int[] intArray(double a, double b, double c, double d) { return new int[] {(int) a, (int) b, (int) c, (int) d}; } /** * Returns a clone. * * @return A clone. * * @throws CloneNotSupportedException if a component of the supplier does * not support cloning. */ public Object clone() throws CloneNotSupportedException { DefaultDrawingSupplier clone = (DefaultDrawingSupplier) super.clone(); return clone; } }
lgpl-2.1
dlatnikov/jagger
chassis/invokers/src/main/java/com/griddynamics/jagger/invoker/http/HttpQuery.java
3338
/* * Copyright (c) 2010-2012 Grid Dynamics Consulting Services, Inc, All Rights Reserved * http://www.griddynamics.com * * This library is free software; you can redistribute it and/or modify it under the terms of * the GNU Lesser General Public License as published by the Free Software Foundation; either * version 2.1 of the License, or any later version. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.griddynamics.jagger.invoker.http; import com.google.common.collect.Maps; import org.apache.commons.httpclient.methods.RequestEntity; import org.springframework.beans.factory.annotation.Required; import java.io.Serializable; import java.util.Map; /** * @author Alexey Kiselyov * Date: 04.08.11 */ public class HttpQuery implements Serializable { public static enum Method { POST, PUT, GET, DELETE, TRACE, HEAD, OPTIONS } private Method method = Method.GET; private Map<String, String> methodParams = Maps.newHashMap(); private Map<String, Object> clientParams = Maps.newHashMap(); public HttpQuery() { } public Method getMethod() { return this.method; } @Required public void setMethod(Method method) { this.method = method; } public Map<String, String> getMethodParams() { return this.methodParams; } public void setMethodParams(Map<String, String> params) { this.methodParams = params; } public Map<String, Object> getClientParams() { return this.clientParams; } public void setClientParams(Map<String, Object> clientParams) { this.clientParams = clientParams; } @Override public String toString() { return "HttpQuery{" + "method=" + method + ", methodParams=" + methodParams + ", clientParams=" + clientParams + '}'; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; HttpQuery httpQuery = (HttpQuery) o; if (!clientParams.equals(httpQuery.clientParams)) return false; if (method != httpQuery.method) return false; if (!methodParams.equals(httpQuery.methodParams)) return false; return true; } @Override public int hashCode() { int result = method.hashCode(); result = 31 * result + methodParams.hashCode(); result = 31 * result + clientParams.hashCode(); return result; } }
lgpl-2.1
timthelion/FreeCAD
src/Mod/Fem/App/FemMesh.h
5629
/*************************************************************************** * Copyright (c) Jürgen Riegel ([email protected]) 2009 * * * * This file is part of the FreeCAD CAx development system. * * * * This library is free software; you can redistribute it and/or * * modify it under the terms of the GNU Library General Public * * License as published by the Free Software Foundation; either * * version 2 of the License, or (at your option) any later version. * * * * This library is distributed in the hope that it will be useful, * * but WITHOUT ANY WARRANTY; without even the implied warranty of * * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * * GNU Library General Public License for more details. * * * * You should have received a copy of the GNU Library General Public * * License along with this library; see the file COPYING.LIB. If not, * * write to the Free Software Foundation, Inc., 59 Temple Place, * * Suite 330, Boston, MA 02111-1307, USA * * * ***************************************************************************/ #ifndef FEM_FEMMESH_H #define FEM_FEMMESH_H #include <App/ComplexGeoData.h> #include <Base/Placement.h> #include <Base/Quantity.h> #include <vector> #include <list> #include <boost/shared_ptr.hpp> class SMESH_Gen; class SMESH_Mesh; class SMESH_Hypothesis; class TopoDS_Shape; class TopoDS_Face; class TopoDS_Edge; class TopoDS_Vertex; class TopoDS_Solid; namespace Fem { typedef boost::shared_ptr<SMESH_Hypothesis> SMESH_HypothesisPtr; /** The representation of a FemMesh */ class AppFemExport FemMesh : public Data::ComplexGeoData { TYPESYSTEM_HEADER(); public: FemMesh(); FemMesh(const FemMesh&); ~FemMesh(); FemMesh &operator=(const FemMesh&); const SMESH_Mesh* getSMesh() const; SMESH_Mesh* getSMesh(); static SMESH_Gen * getGenerator(); void addHypothesis(const TopoDS_Shape & aSubShape, SMESH_HypothesisPtr hyp); void setStanardHypotheses(); void compute(); // from base class virtual unsigned int getMemSize (void) const; virtual void Save (Base::Writer &/*writer*/) const; virtual void Restore(Base::XMLReader &/*reader*/); void SaveDocFile (Base::Writer &writer) const; void RestoreDocFile(Base::Reader &reader); /** @name Subelement management */ //@{ /** Sub type list * List of different subelement types * it is NOT a list of the subelements itself */ virtual std::vector<const char*> getElementTypes(void) const; virtual unsigned long countSubElements(const char* Type) const; /// get the subelement by type and number virtual Data::Segment* getSubElement(const char* Type, unsigned long) const; //@} /** @name search and retrieval */ //@{ /// retrieving by region growing std::set<long> getSurfaceNodes(long ElemId, short FaceId, float Angle=360)const; /// retrieving by solid std::set<int> getNodesBySolid(const TopoDS_Solid &solid) const; /// retrieving by face std::set<int> getNodesByFace(const TopoDS_Face &face) const; /// retrieving by edge std::set<int> getNodesByEdge(const TopoDS_Edge &edge) const; /// retrieving by vertex std::set<int> getNodesByVertex(const TopoDS_Vertex &vertex) const; /// retrieving node IDs by element ID std::list<int> getElementNodes(int id) const; /// retrieving face IDs number by face std::list<int> getFacesByFace(const TopoDS_Face &face) const; /// retrieving volume IDs and face IDs number by face std::list<std::pair<int, int> > getVolumesByFace(const TopoDS_Face &face) const; /// retrieving volume IDs and CalculiX face number by face std::map<int, int> getccxVolumesByFace(const TopoDS_Face &face) const; //@} /** @name Placement control */ //@{ /// set the transformation void setTransform(const Base::Matrix4D& rclTrf); /// get the transformation Base::Matrix4D getTransform(void) const; /// Bound box from the shape Base::BoundBox3d getBoundBox(void)const; /// get the volume (when there are volume elements) Base::Quantity getVolume(void)const; //@} /** @name Modification */ //@{ /// Applies a transformation on the real geometric data type void transformGeometry(const Base::Matrix4D &rclMat); //@} struct FemMeshInfo { int numFaces; int numNode; int numTria; int numQuad; int numPoly; int numVolu; int numTetr; int numHexa; int numPyrd; int numPris; int numHedr; }; /// struct FemMeshInfo getInfo(void) const; /// import from files void read(const char *FileName); void write(const char *FileName) const; void writeABAQUS(const std::string &Filename) const; private: void copyMeshData(const FemMesh&); void readNastran(const std::string &Filename); private: /// positioning matrix Base::Matrix4D _Mtrx; SMESH_Mesh *myMesh; std::list<SMESH_HypothesisPtr> hypoth; }; } //namespace Part #endif // FEM_FEMMESH_H
lgpl-2.1
sunblithe/qt-everywhere-opensource-src-4.7.1
examples/tutorials/addressbook/part2/main.cpp
2274
/**************************************************************************** ** ** Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies). ** All rights reserved. ** Contact: Nokia Corporation ([email protected]) ** ** This file is part of the examples of the Qt Toolkit. ** ** $QT_BEGIN_LICENSE:BSD$ ** You may use this file under the terms of the BSD license as follows: ** ** "Redistribution and use in source and binary forms, with or without ** modification, are permitted provided that the following conditions are ** met: ** * Redistributions of source code must retain the above copyright ** notice, this list of conditions and the following disclaimer. ** * Redistributions in binary form must reproduce the above copyright ** notice, this list of conditions and the following disclaimer in ** the documentation and/or other materials provided with the ** distribution. ** * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor ** the names of its contributors may be used to endorse or promote ** products derived from this software without specific prior written ** permission. ** ** THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ** "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT ** LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ** A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT ** OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, ** SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT ** LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, ** DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY ** THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT ** (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE ** OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." ** $QT_END_LICENSE$ ** ****************************************************************************/ #include <QtGui> #include "addressbook.h" //! [main function] int main(int argc, char *argv[]) { QApplication app(argc, argv); AddressBook addressBook; addressBook.show(); return app.exec(); } //! [main function]
lgpl-2.1
instarcam/InstarVision-Android
ffmpeg/libavcodec/apedec.c
51236
/* * Monkey's Audio lossless audio decoder * Copyright (c) 2007 Benjamin Zores <[email protected]> * based upon libdemac from Dave Chapman. * * This file is part of FFmpeg. * * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ #include "libavutil/avassert.h" #include "libavutil/channel_layout.h" #include "libavutil/opt.h" #include "avcodec.h" #include "dsputil.h" #include "bytestream.h" #include "internal.h" #include "get_bits.h" #include "unary.h" /** * @file * Monkey's Audio lossless audio decoder */ #define MAX_CHANNELS 2 #define MAX_BYTESPERSAMPLE 3 #define APE_FRAMECODE_MONO_SILENCE 1 #define APE_FRAMECODE_STEREO_SILENCE 3 #define APE_FRAMECODE_PSEUDO_STEREO 4 #define HISTORY_SIZE 512 #define PREDICTOR_ORDER 8 /** Total size of all predictor histories */ #define PREDICTOR_SIZE 50 #define YDELAYA (18 + PREDICTOR_ORDER*4) #define YDELAYB (18 + PREDICTOR_ORDER*3) #define XDELAYA (18 + PREDICTOR_ORDER*2) #define XDELAYB (18 + PREDICTOR_ORDER) #define YADAPTCOEFFSA 18 #define XADAPTCOEFFSA 14 #define YADAPTCOEFFSB 10 #define XADAPTCOEFFSB 5 /** * Possible compression levels * @{ */ enum APECompressionLevel { COMPRESSION_LEVEL_FAST = 1000, COMPRESSION_LEVEL_NORMAL = 2000, COMPRESSION_LEVEL_HIGH = 3000, COMPRESSION_LEVEL_EXTRA_HIGH = 4000, COMPRESSION_LEVEL_INSANE = 5000 }; /** @} */ #define APE_FILTER_LEVELS 3 /** Filter orders depending on compression level */ static const uint16_t ape_filter_orders[5][APE_FILTER_LEVELS] = { { 0, 0, 0 }, { 16, 0, 0 }, { 64, 0, 0 }, { 32, 256, 0 }, { 16, 256, 1280 } }; /** Filter fraction bits depending on compression level */ static const uint8_t ape_filter_fracbits[5][APE_FILTER_LEVELS] = { { 0, 0, 0 }, { 11, 0, 0 }, { 11, 0, 0 }, { 10, 13, 0 }, { 11, 13, 15 } }; /** Filters applied to the decoded data */ typedef struct APEFilter { int16_t *coeffs; ///< actual coefficients used in filtering int16_t *adaptcoeffs; ///< adaptive filter coefficients used for correcting of actual filter coefficients int16_t *historybuffer; ///< filter memory int16_t *delay; ///< filtered values int avg; } APEFilter; typedef struct APERice { uint32_t k; uint32_t ksum; } APERice; typedef struct APERangecoder { uint32_t low; ///< low end of interval uint32_t range; ///< length of interval uint32_t help; ///< bytes_to_follow resp. intermediate value unsigned int buffer; ///< buffer for input/output } APERangecoder; /** Filter histories */ typedef struct APEPredictor { int32_t *buf; int32_t lastA[2]; int32_t filterA[2]; int32_t filterB[2]; int32_t coeffsA[2][4]; ///< adaption coefficients int32_t coeffsB[2][5]; ///< adaption coefficients int32_t historybuffer[HISTORY_SIZE + PREDICTOR_SIZE]; unsigned int sample_pos; } APEPredictor; /** Decoder context */ typedef struct APEContext { AVClass *class; ///< class for AVOptions AVCodecContext *avctx; DSPContext dsp; int channels; int samples; ///< samples left to decode in current frame int bps; int fileversion; ///< codec version, very important in decoding process int compression_level; ///< compression levels int fset; ///< which filter set to use (calculated from compression level) int flags; ///< global decoder flags uint32_t CRC; ///< frame CRC int frameflags; ///< frame flags APEPredictor predictor; ///< predictor used for final reconstruction int32_t *decoded_buffer; int decoded_size; int32_t *decoded[MAX_CHANNELS]; ///< decoded data for each channel int blocks_per_loop; ///< maximum number of samples to decode for each call int16_t* filterbuf[APE_FILTER_LEVELS]; ///< filter memory APERangecoder rc; ///< rangecoder used to decode actual values APERice riceX; ///< rice code parameters for the second channel APERice riceY; ///< rice code parameters for the first channel APEFilter filters[APE_FILTER_LEVELS][2]; ///< filters used for reconstruction GetBitContext gb; uint8_t *data; ///< current frame data uint8_t *data_end; ///< frame data end int data_size; ///< frame data allocated size const uint8_t *ptr; ///< current position in frame data int error; void (*entropy_decode_mono)(struct APEContext *ctx, int blockstodecode); void (*entropy_decode_stereo)(struct APEContext *ctx, int blockstodecode); void (*predictor_decode_mono)(struct APEContext *ctx, int count); void (*predictor_decode_stereo)(struct APEContext *ctx, int count); } APEContext; static void ape_apply_filters(APEContext *ctx, int32_t *decoded0, int32_t *decoded1, int count); static void entropy_decode_mono_0000(APEContext *ctx, int blockstodecode); static void entropy_decode_stereo_0000(APEContext *ctx, int blockstodecode); static void entropy_decode_mono_3860(APEContext *ctx, int blockstodecode); static void entropy_decode_stereo_3860(APEContext *ctx, int blockstodecode); static void entropy_decode_mono_3900(APEContext *ctx, int blockstodecode); static void entropy_decode_stereo_3900(APEContext *ctx, int blockstodecode); static void entropy_decode_stereo_3930(APEContext *ctx, int blockstodecode); static void entropy_decode_mono_3990(APEContext *ctx, int blockstodecode); static void entropy_decode_stereo_3990(APEContext *ctx, int blockstodecode); static void predictor_decode_mono_3800(APEContext *ctx, int count); static void predictor_decode_stereo_3800(APEContext *ctx, int count); static void predictor_decode_mono_3930(APEContext *ctx, int count); static void predictor_decode_stereo_3930(APEContext *ctx, int count); static void predictor_decode_mono_3950(APEContext *ctx, int count); static void predictor_decode_stereo_3950(APEContext *ctx, int count); // TODO: dsputilize static av_cold int ape_decode_close(AVCodecContext *avctx) { APEContext *s = avctx->priv_data; int i; for (i = 0; i < APE_FILTER_LEVELS; i++) av_freep(&s->filterbuf[i]); av_freep(&s->decoded_buffer); av_freep(&s->data); s->decoded_size = s->data_size = 0; return 0; } static av_cold int ape_decode_init(AVCodecContext *avctx) { APEContext *s = avctx->priv_data; int i; if (avctx->extradata_size != 6) { av_log(avctx, AV_LOG_ERROR, "Incorrect extradata\n"); return AVERROR(EINVAL); } if (avctx->channels > 2) { av_log(avctx, AV_LOG_ERROR, "Only mono and stereo is supported\n"); return AVERROR(EINVAL); } s->bps = avctx->bits_per_coded_sample; switch (s->bps) { case 8: avctx->sample_fmt = AV_SAMPLE_FMT_U8P; break; case 16: avctx->sample_fmt = AV_SAMPLE_FMT_S16P; break; case 24: avctx->sample_fmt = AV_SAMPLE_FMT_S32P; break; default: avpriv_request_sample(avctx, "%d bits per coded sample", s->bps); return AVERROR_PATCHWELCOME; } s->avctx = avctx; s->channels = avctx->channels; s->fileversion = AV_RL16(avctx->extradata); s->compression_level = AV_RL16(avctx->extradata + 2); s->flags = AV_RL16(avctx->extradata + 4); av_log(avctx, AV_LOG_DEBUG, "Compression Level: %d - Flags: %d\n", s->compression_level, s->flags); if (s->compression_level % 1000 || s->compression_level > COMPRESSION_LEVEL_INSANE || !s->compression_level || (s->fileversion < 3930 && s->compression_level == COMPRESSION_LEVEL_INSANE)) { av_log(avctx, AV_LOG_ERROR, "Incorrect compression level %d\n", s->compression_level); return AVERROR_INVALIDDATA; } s->fset = s->compression_level / 1000 - 1; for (i = 0; i < APE_FILTER_LEVELS; i++) { if (!ape_filter_orders[s->fset][i]) break; FF_ALLOC_OR_GOTO(avctx, s->filterbuf[i], (ape_filter_orders[s->fset][i] * 3 + HISTORY_SIZE) * 4, filter_alloc_fail); } if (s->fileversion < 3860) { s->entropy_decode_mono = entropy_decode_mono_0000; s->entropy_decode_stereo = entropy_decode_stereo_0000; } else if (s->fileversion < 3900) { s->entropy_decode_mono = entropy_decode_mono_3860; s->entropy_decode_stereo = entropy_decode_stereo_3860; } else if (s->fileversion < 3930) { s->entropy_decode_mono = entropy_decode_mono_3900; s->entropy_decode_stereo = entropy_decode_stereo_3900; } else if (s->fileversion < 3990) { s->entropy_decode_mono = entropy_decode_mono_3900; s->entropy_decode_stereo = entropy_decode_stereo_3930; } else { s->entropy_decode_mono = entropy_decode_mono_3990; s->entropy_decode_stereo = entropy_decode_stereo_3990; } if (s->fileversion < 3930) { s->predictor_decode_mono = predictor_decode_mono_3800; s->predictor_decode_stereo = predictor_decode_stereo_3800; } else if (s->fileversion < 3950) { s->predictor_decode_mono = predictor_decode_mono_3930; s->predictor_decode_stereo = predictor_decode_stereo_3930; } else { s->predictor_decode_mono = predictor_decode_mono_3950; s->predictor_decode_stereo = predictor_decode_stereo_3950; } ff_dsputil_init(&s->dsp, avctx); avctx->channel_layout = (avctx->channels==2) ? AV_CH_LAYOUT_STEREO : AV_CH_LAYOUT_MONO; return 0; filter_alloc_fail: ape_decode_close(avctx); return AVERROR(ENOMEM); } /** * @name APE range decoding functions * @{ */ #define CODE_BITS 32 #define TOP_VALUE ((unsigned int)1 << (CODE_BITS-1)) #define SHIFT_BITS (CODE_BITS - 9) #define EXTRA_BITS ((CODE_BITS-2) % 8 + 1) #define BOTTOM_VALUE (TOP_VALUE >> 8) /** Start the decoder */ static inline void range_start_decoding(APEContext *ctx) { ctx->rc.buffer = bytestream_get_byte(&ctx->ptr); ctx->rc.low = ctx->rc.buffer >> (8 - EXTRA_BITS); ctx->rc.range = (uint32_t) 1 << EXTRA_BITS; } /** Perform normalization */ static inline void range_dec_normalize(APEContext *ctx) { while (ctx->rc.range <= BOTTOM_VALUE) { ctx->rc.buffer <<= 8; if(ctx->ptr < ctx->data_end) { ctx->rc.buffer += *ctx->ptr; ctx->ptr++; } else { ctx->error = 1; } ctx->rc.low = (ctx->rc.low << 8) | ((ctx->rc.buffer >> 1) & 0xFF); ctx->rc.range <<= 8; } } /** * Calculate culmulative frequency for next symbol. Does NO update! * @param ctx decoder context * @param tot_f is the total frequency or (code_value)1<<shift * @return the culmulative frequency */ static inline int range_decode_culfreq(APEContext *ctx, int tot_f) { range_dec_normalize(ctx); ctx->rc.help = ctx->rc.range / tot_f; return ctx->rc.low / ctx->rc.help; } /** * Decode value with given size in bits * @param ctx decoder context * @param shift number of bits to decode */ static inline int range_decode_culshift(APEContext *ctx, int shift) { range_dec_normalize(ctx); ctx->rc.help = ctx->rc.range >> shift; return ctx->rc.low / ctx->rc.help; } /** * Update decoding state * @param ctx decoder context * @param sy_f the interval length (frequency of the symbol) * @param lt_f the lower end (frequency sum of < symbols) */ static inline void range_decode_update(APEContext *ctx, int sy_f, int lt_f) { ctx->rc.low -= ctx->rc.help * lt_f; ctx->rc.range = ctx->rc.help * sy_f; } /** Decode n bits (n <= 16) without modelling */ static inline int range_decode_bits(APEContext *ctx, int n) { int sym = range_decode_culshift(ctx, n); range_decode_update(ctx, 1, sym); return sym; } #define MODEL_ELEMENTS 64 /** * Fixed probabilities for symbols in Monkey Audio version 3.97 */ static const uint16_t counts_3970[22] = { 0, 14824, 28224, 39348, 47855, 53994, 58171, 60926, 62682, 63786, 64463, 64878, 65126, 65276, 65365, 65419, 65450, 65469, 65480, 65487, 65491, 65493, }; /** * Probability ranges for symbols in Monkey Audio version 3.97 */ static const uint16_t counts_diff_3970[21] = { 14824, 13400, 11124, 8507, 6139, 4177, 2755, 1756, 1104, 677, 415, 248, 150, 89, 54, 31, 19, 11, 7, 4, 2, }; /** * Fixed probabilities for symbols in Monkey Audio version 3.98 */ static const uint16_t counts_3980[22] = { 0, 19578, 36160, 48417, 56323, 60899, 63265, 64435, 64971, 65232, 65351, 65416, 65447, 65466, 65476, 65482, 65485, 65488, 65490, 65491, 65492, 65493, }; /** * Probability ranges for symbols in Monkey Audio version 3.98 */ static const uint16_t counts_diff_3980[21] = { 19578, 16582, 12257, 7906, 4576, 2366, 1170, 536, 261, 119, 65, 31, 19, 10, 6, 3, 3, 2, 1, 1, 1, }; /** * Decode symbol * @param ctx decoder context * @param counts probability range start position * @param counts_diff probability range widths */ static inline int range_get_symbol(APEContext *ctx, const uint16_t counts[], const uint16_t counts_diff[]) { int symbol, cf; cf = range_decode_culshift(ctx, 16); if(cf > 65492){ symbol= cf - 65535 + 63; range_decode_update(ctx, 1, cf); if(cf > 65535) ctx->error=1; return symbol; } /* figure out the symbol inefficiently; a binary search would be much better */ for (symbol = 0; counts[symbol + 1] <= cf; symbol++); range_decode_update(ctx, counts_diff[symbol], counts[symbol]); return symbol; } /** @} */ // group rangecoder static inline void update_rice(APERice *rice, unsigned int x) { int lim = rice->k ? (1 << (rice->k + 4)) : 0; rice->ksum += ((x + 1) / 2) - ((rice->ksum + 16) >> 5); if (rice->ksum < lim) rice->k--; else if (rice->ksum >= (1 << (rice->k + 5))) rice->k++; } static inline int get_rice_ook(GetBitContext *gb, int k) { unsigned int x; x = get_unary(gb, 1, get_bits_left(gb)); if (k) x = (x << k) | get_bits(gb, k); return x; } static inline int ape_decode_value_3860(APEContext *ctx, GetBitContext *gb, APERice *rice) { unsigned int x, overflow; overflow = get_unary(gb, 1, get_bits_left(gb)); if (ctx->fileversion > 3880) { while (overflow >= 16) { overflow -= 16; rice->k += 4; } } if (!rice->k) x = overflow; else if(rice->k <= MIN_CACHE_BITS) { x = (overflow << rice->k) + get_bits(gb, rice->k); } else { av_log(ctx->avctx, AV_LOG_ERROR, "Too many bits: %d\n", rice->k); return AVERROR_INVALIDDATA; } rice->ksum += x - (rice->ksum + 8 >> 4); if (rice->ksum < (rice->k ? 1 << (rice->k + 4) : 0)) rice->k--; else if (rice->ksum >= (1 << (rice->k + 5)) && rice->k < 24) rice->k++; /* Convert to signed */ if (x & 1) return (x >> 1) + 1; else return -(x >> 1); } static inline int ape_decode_value_3900(APEContext *ctx, APERice *rice) { unsigned int x, overflow; int tmpk; overflow = range_get_symbol(ctx, counts_3970, counts_diff_3970); if (overflow == (MODEL_ELEMENTS - 1)) { tmpk = range_decode_bits(ctx, 5); overflow = 0; } else tmpk = (rice->k < 1) ? 0 : rice->k - 1; if (tmpk <= 16 || ctx->fileversion < 3910) { if (tmpk > 23) { av_log(ctx->avctx, AV_LOG_ERROR, "Too many bits: %d\n", tmpk); return AVERROR_INVALIDDATA; } x = range_decode_bits(ctx, tmpk); } else if (tmpk <= 32) { x = range_decode_bits(ctx, 16); x |= (range_decode_bits(ctx, tmpk - 16) << 16); } else { av_log(ctx->avctx, AV_LOG_ERROR, "Too many bits: %d\n", tmpk); return AVERROR_INVALIDDATA; } x += overflow << tmpk; update_rice(rice, x); /* Convert to signed */ if (x & 1) return (x >> 1) + 1; else return -(x >> 1); } static inline int ape_decode_value_3990(APEContext *ctx, APERice *rice) { unsigned int x, overflow; int base, pivot; pivot = rice->ksum >> 5; if (pivot == 0) pivot = 1; overflow = range_get_symbol(ctx, counts_3980, counts_diff_3980); if (overflow == (MODEL_ELEMENTS - 1)) { overflow = range_decode_bits(ctx, 16) << 16; overflow |= range_decode_bits(ctx, 16); } if (pivot < 0x10000) { base = range_decode_culfreq(ctx, pivot); range_decode_update(ctx, 1, base); } else { int base_hi = pivot, base_lo; int bbits = 0; while (base_hi & ~0xFFFF) { base_hi >>= 1; bbits++; } base_hi = range_decode_culfreq(ctx, base_hi + 1); range_decode_update(ctx, 1, base_hi); base_lo = range_decode_culfreq(ctx, 1 << bbits); range_decode_update(ctx, 1, base_lo); base = (base_hi << bbits) + base_lo; } x = base + overflow * pivot; update_rice(rice, x); /* Convert to signed */ if (x & 1) return (x >> 1) + 1; else return -(x >> 1); } static void decode_array_0000(APEContext *ctx, GetBitContext *gb, int32_t *out, APERice *rice, int blockstodecode) { int i; int ksummax, ksummin; rice->ksum = 0; for (i = 0; i < 5; i++) { out[i] = get_rice_ook(&ctx->gb, 10); rice->ksum += out[i]; } rice->k = av_log2(rice->ksum / 10) + 1; for (; i < 64; i++) { out[i] = get_rice_ook(&ctx->gb, rice->k); rice->ksum += out[i]; rice->k = av_log2(rice->ksum / ((i + 1) * 2)) + 1; } ksummax = 1 << rice->k + 7; ksummin = rice->k ? (1 << rice->k + 6) : 0; for (; i < blockstodecode; i++) { out[i] = get_rice_ook(&ctx->gb, rice->k); rice->ksum += out[i] - out[i - 64]; while (rice->ksum < ksummin) { rice->k--; ksummin = rice->k ? ksummin >> 1 : 0; ksummax >>= 1; } while (rice->ksum >= ksummax) { rice->k++; if (rice->k > 24) return; ksummax <<= 1; ksummin = ksummin ? ksummin << 1 : 128; } } for (i = 0; i < blockstodecode; i++) { if (out[i] & 1) out[i] = (out[i] >> 1) + 1; else out[i] = -(out[i] >> 1); } } static void entropy_decode_mono_0000(APEContext *ctx, int blockstodecode) { decode_array_0000(ctx, &ctx->gb, ctx->decoded[0], &ctx->riceY, blockstodecode); } static void entropy_decode_stereo_0000(APEContext *ctx, int blockstodecode) { decode_array_0000(ctx, &ctx->gb, ctx->decoded[0], &ctx->riceY, blockstodecode); decode_array_0000(ctx, &ctx->gb, ctx->decoded[1], &ctx->riceX, blockstodecode); } static void entropy_decode_mono_3860(APEContext *ctx, int blockstodecode) { int32_t *decoded0 = ctx->decoded[0]; while (blockstodecode--) *decoded0++ = ape_decode_value_3860(ctx, &ctx->gb, &ctx->riceY); } static void entropy_decode_stereo_3860(APEContext *ctx, int blockstodecode) { int32_t *decoded0 = ctx->decoded[0]; int32_t *decoded1 = ctx->decoded[1]; int blocks = blockstodecode; while (blockstodecode--) *decoded0++ = ape_decode_value_3860(ctx, &ctx->gb, &ctx->riceY); while (blocks--) *decoded1++ = ape_decode_value_3860(ctx, &ctx->gb, &ctx->riceX); } static void entropy_decode_mono_3900(APEContext *ctx, int blockstodecode) { int32_t *decoded0 = ctx->decoded[0]; while (blockstodecode--) *decoded0++ = ape_decode_value_3900(ctx, &ctx->riceY); } static void entropy_decode_stereo_3900(APEContext *ctx, int blockstodecode) { int32_t *decoded0 = ctx->decoded[0]; int32_t *decoded1 = ctx->decoded[1]; int blocks = blockstodecode; while (blockstodecode--) *decoded0++ = ape_decode_value_3900(ctx, &ctx->riceY); range_dec_normalize(ctx); // because of some implementation peculiarities we need to backpedal here ctx->ptr -= 1; range_start_decoding(ctx); while (blocks--) *decoded1++ = ape_decode_value_3900(ctx, &ctx->riceX); } static void entropy_decode_stereo_3930(APEContext *ctx, int blockstodecode) { int32_t *decoded0 = ctx->decoded[0]; int32_t *decoded1 = ctx->decoded[1]; while (blockstodecode--) { *decoded0++ = ape_decode_value_3900(ctx, &ctx->riceY); *decoded1++ = ape_decode_value_3900(ctx, &ctx->riceX); } } static void entropy_decode_mono_3990(APEContext *ctx, int blockstodecode) { int32_t *decoded0 = ctx->decoded[0]; while (blockstodecode--) *decoded0++ = ape_decode_value_3990(ctx, &ctx->riceY); } static void entropy_decode_stereo_3990(APEContext *ctx, int blockstodecode) { int32_t *decoded0 = ctx->decoded[0]; int32_t *decoded1 = ctx->decoded[1]; while (blockstodecode--) { *decoded0++ = ape_decode_value_3990(ctx, &ctx->riceY); *decoded1++ = ape_decode_value_3990(ctx, &ctx->riceX); } } static int init_entropy_decoder(APEContext *ctx) { /* Read the CRC */ if (ctx->fileversion >= 3900) { if (ctx->data_end - ctx->ptr < 6) return AVERROR_INVALIDDATA; ctx->CRC = bytestream_get_be32(&ctx->ptr); } else { ctx->CRC = get_bits_long(&ctx->gb, 32); } /* Read the frame flags if they exist */ ctx->frameflags = 0; if ((ctx->fileversion > 3820) && (ctx->CRC & 0x80000000)) { ctx->CRC &= ~0x80000000; if (ctx->data_end - ctx->ptr < 6) return AVERROR_INVALIDDATA; ctx->frameflags = bytestream_get_be32(&ctx->ptr); } /* Initialize the rice structs */ ctx->riceX.k = 10; ctx->riceX.ksum = (1 << ctx->riceX.k) * 16; ctx->riceY.k = 10; ctx->riceY.ksum = (1 << ctx->riceY.k) * 16; if (ctx->fileversion >= 3900) { /* The first 8 bits of input are ignored. */ ctx->ptr++; range_start_decoding(ctx); } return 0; } static const int32_t initial_coeffs_fast_3320[1] = { 375, }; static const int32_t initial_coeffs_a_3800[3] = { 64, 115, 64, }; static const int32_t initial_coeffs_b_3800[2] = { 740, 0 }; static const int32_t initial_coeffs_3930[4] = { 360, 317, -109, 98 }; static void init_predictor_decoder(APEContext *ctx) { APEPredictor *p = &ctx->predictor; /* Zero the history buffers */ memset(p->historybuffer, 0, PREDICTOR_SIZE * sizeof(*p->historybuffer)); p->buf = p->historybuffer; /* Initialize and zero the coefficients */ if (ctx->fileversion < 3930) { if (ctx->compression_level == COMPRESSION_LEVEL_FAST) { memcpy(p->coeffsA[0], initial_coeffs_fast_3320, sizeof(initial_coeffs_fast_3320)); memcpy(p->coeffsA[1], initial_coeffs_fast_3320, sizeof(initial_coeffs_fast_3320)); } else { memcpy(p->coeffsA[0], initial_coeffs_a_3800, sizeof(initial_coeffs_a_3800)); memcpy(p->coeffsA[1], initial_coeffs_a_3800, sizeof(initial_coeffs_a_3800)); } } else { memcpy(p->coeffsA[0], initial_coeffs_3930, sizeof(initial_coeffs_3930)); memcpy(p->coeffsA[1], initial_coeffs_3930, sizeof(initial_coeffs_3930)); } memset(p->coeffsB, 0, sizeof(p->coeffsB)); if (ctx->fileversion < 3930) { memcpy(p->coeffsB[0], initial_coeffs_b_3800, sizeof(initial_coeffs_b_3800)); memcpy(p->coeffsB[1], initial_coeffs_b_3800, sizeof(initial_coeffs_b_3800)); } p->filterA[0] = p->filterA[1] = 0; p->filterB[0] = p->filterB[1] = 0; p->lastA[0] = p->lastA[1] = 0; p->sample_pos = 0; } /** Get inverse sign of integer (-1 for positive, 1 for negative and 0 for zero) */ static inline int APESIGN(int32_t x) { return (x < 0) - (x > 0); } static av_always_inline int filter_fast_3320(APEPredictor *p, const int decoded, const int filter, const int delayA) { int32_t predictionA; p->buf[delayA] = p->lastA[filter]; if (p->sample_pos < 3) { p->lastA[filter] = decoded; p->filterA[filter] = decoded; return decoded; } predictionA = p->buf[delayA] * 2 - p->buf[delayA - 1]; p->lastA[filter] = decoded + (predictionA * p->coeffsA[filter][0] >> 9); if ((decoded ^ predictionA) > 0) p->coeffsA[filter][0]++; else p->coeffsA[filter][0]--; p->filterA[filter] += p->lastA[filter]; return p->filterA[filter]; } static av_always_inline int filter_3800(APEPredictor *p, const int decoded, const int filter, const int delayA, const int delayB, const int start, const int shift) { int32_t predictionA, predictionB, sign; int32_t d0, d1, d2, d3, d4; p->buf[delayA] = p->lastA[filter]; p->buf[delayB] = p->filterB[filter]; if (p->sample_pos < start) { predictionA = decoded + p->filterA[filter]; p->lastA[filter] = decoded; p->filterB[filter] = decoded; p->filterA[filter] = predictionA; return predictionA; } d2 = p->buf[delayA]; d1 = (p->buf[delayA] - p->buf[delayA - 1]) << 1; d0 = p->buf[delayA] + ((p->buf[delayA - 2] - p->buf[delayA - 1]) << 3); d3 = p->buf[delayB] * 2 - p->buf[delayB - 1]; d4 = p->buf[delayB]; predictionA = d0 * p->coeffsA[filter][0] + d1 * p->coeffsA[filter][1] + d2 * p->coeffsA[filter][2]; sign = APESIGN(decoded); p->coeffsA[filter][0] += (((d0 >> 30) & 2) - 1) * sign; p->coeffsA[filter][1] += (((d1 >> 28) & 8) - 4) * sign; p->coeffsA[filter][2] += (((d2 >> 28) & 8) - 4) * sign; predictionB = d3 * p->coeffsB[filter][0] - d4 * p->coeffsB[filter][1]; p->lastA[filter] = decoded + (predictionA >> 11); sign = APESIGN(p->lastA[filter]); p->coeffsB[filter][0] += (((d3 >> 29) & 4) - 2) * sign; p->coeffsB[filter][1] -= (((d4 >> 30) & 2) - 1) * sign; p->filterB[filter] = p->lastA[filter] + (predictionB >> shift); p->filterA[filter] = p->filterB[filter] + ((p->filterA[filter] * 31) >> 5); return p->filterA[filter]; } static void long_filter_high_3800(int32_t *buffer, int order, int shift, int32_t *coeffs, int32_t *delay, int length) { int i, j; int32_t dotprod, sign; memset(coeffs, 0, order * sizeof(*coeffs)); for (i = 0; i < order; i++) delay[i] = buffer[i]; for (i = order; i < length; i++) { dotprod = 0; sign = APESIGN(buffer[i]); for (j = 0; j < order; j++) { dotprod += delay[j] * coeffs[j]; coeffs[j] -= (((delay[j] >> 30) & 2) - 1) * sign; } buffer[i] -= dotprod >> shift; for (j = 0; j < order - 1; j++) delay[j] = delay[j + 1]; delay[order - 1] = buffer[i]; } } static void long_filter_ehigh_3830(int32_t *buffer, int length) { int i, j; int32_t dotprod, sign; int32_t coeffs[8], delay[8]; memset(coeffs, 0, sizeof(coeffs)); memset(delay, 0, sizeof(delay)); for (i = 0; i < length; i++) { dotprod = 0; sign = APESIGN(buffer[i]); for (j = 7; j >= 0; j--) { dotprod += delay[j] * coeffs[j]; coeffs[j] -= (((delay[j] >> 30) & 2) - 1) * sign; } for (j = 7; j > 0; j--) delay[j] = delay[j - 1]; delay[0] = buffer[i]; buffer[i] -= dotprod >> 9; } } static void predictor_decode_stereo_3800(APEContext *ctx, int count) { APEPredictor *p = &ctx->predictor; int32_t *decoded0 = ctx->decoded[0]; int32_t *decoded1 = ctx->decoded[1]; int32_t coeffs[256], delay[256]; int start = 4, shift = 10; if (ctx->compression_level == COMPRESSION_LEVEL_HIGH) { start = 16; long_filter_high_3800(decoded0, 16, 9, coeffs, delay, count); long_filter_high_3800(decoded1, 16, 9, coeffs, delay, count); } else if (ctx->compression_level == COMPRESSION_LEVEL_EXTRA_HIGH) { int order = 128, shift2 = 11; if (ctx->fileversion >= 3830) { order <<= 1; shift++; shift2++; long_filter_ehigh_3830(decoded0 + order, count - order); long_filter_ehigh_3830(decoded1 + order, count - order); } start = order; long_filter_high_3800(decoded0, order, shift2, coeffs, delay, count); long_filter_high_3800(decoded1, order, shift2, coeffs, delay, count); } while (count--) { int X = *decoded0, Y = *decoded1; if (ctx->compression_level == COMPRESSION_LEVEL_FAST) { *decoded0 = filter_fast_3320(p, Y, 0, YDELAYA); decoded0++; *decoded1 = filter_fast_3320(p, X, 1, XDELAYA); decoded1++; } else { *decoded0 = filter_3800(p, Y, 0, YDELAYA, YDELAYB, start, shift); decoded0++; *decoded1 = filter_3800(p, X, 1, XDELAYA, XDELAYB, start, shift); decoded1++; } /* Combined */ p->buf++; p->sample_pos++; /* Have we filled the history buffer? */ if (p->buf == p->historybuffer + HISTORY_SIZE) { memmove(p->historybuffer, p->buf, PREDICTOR_SIZE * sizeof(*p->historybuffer)); p->buf = p->historybuffer; } } } static void predictor_decode_mono_3800(APEContext *ctx, int count) { APEPredictor *p = &ctx->predictor; int32_t *decoded0 = ctx->decoded[0]; int32_t coeffs[256], delay[256]; int start = 4, shift = 10; if (ctx->compression_level == COMPRESSION_LEVEL_HIGH) { start = 16; long_filter_high_3800(decoded0, 16, 9, coeffs, delay, count); } else if (ctx->compression_level == COMPRESSION_LEVEL_EXTRA_HIGH) { int order = 128, shift2 = 11; if (ctx->fileversion >= 3830) { order <<= 1; shift++; shift2++; long_filter_ehigh_3830(decoded0 + order, count - order); } start = order; long_filter_high_3800(decoded0, order, shift2, coeffs, delay, count); } while (count--) { if (ctx->compression_level == COMPRESSION_LEVEL_FAST) { *decoded0 = filter_fast_3320(p, *decoded0, 0, YDELAYA); decoded0++; } else { *decoded0 = filter_3800(p, *decoded0, 0, YDELAYA, YDELAYB, start, shift); decoded0++; } /* Combined */ p->buf++; p->sample_pos++; /* Have we filled the history buffer? */ if (p->buf == p->historybuffer + HISTORY_SIZE) { memmove(p->historybuffer, p->buf, PREDICTOR_SIZE * sizeof(*p->historybuffer)); p->buf = p->historybuffer; } } } static av_always_inline int predictor_update_3930(APEPredictor *p, const int decoded, const int filter, const int delayA) { int32_t predictionA, sign; int32_t d0, d1, d2, d3; p->buf[delayA] = p->lastA[filter]; d0 = p->buf[delayA ]; d1 = p->buf[delayA ] - p->buf[delayA - 1]; d2 = p->buf[delayA - 1] - p->buf[delayA - 2]; d3 = p->buf[delayA - 2] - p->buf[delayA - 3]; predictionA = d0 * p->coeffsA[filter][0] + d1 * p->coeffsA[filter][1] + d2 * p->coeffsA[filter][2] + d3 * p->coeffsA[filter][3]; p->lastA[filter] = decoded + (predictionA >> 9); p->filterA[filter] = p->lastA[filter] + ((p->filterA[filter] * 31) >> 5); sign = APESIGN(decoded); p->coeffsA[filter][0] += ((d0 < 0) * 2 - 1) * sign; p->coeffsA[filter][1] += ((d1 < 0) * 2 - 1) * sign; p->coeffsA[filter][2] += ((d2 < 0) * 2 - 1) * sign; p->coeffsA[filter][3] += ((d3 < 0) * 2 - 1) * sign; return p->filterA[filter]; } static void predictor_decode_stereo_3930(APEContext *ctx, int count) { APEPredictor *p = &ctx->predictor; int32_t *decoded0 = ctx->decoded[0]; int32_t *decoded1 = ctx->decoded[1]; ape_apply_filters(ctx, ctx->decoded[0], ctx->decoded[1], count); while (count--) { /* Predictor Y */ int Y = *decoded1, X = *decoded0; *decoded0 = predictor_update_3930(p, Y, 0, YDELAYA); decoded0++; *decoded1 = predictor_update_3930(p, X, 1, XDELAYA); decoded1++; /* Combined */ p->buf++; /* Have we filled the history buffer? */ if (p->buf == p->historybuffer + HISTORY_SIZE) { memmove(p->historybuffer, p->buf, PREDICTOR_SIZE * sizeof(*p->historybuffer)); p->buf = p->historybuffer; } } } static void predictor_decode_mono_3930(APEContext *ctx, int count) { APEPredictor *p = &ctx->predictor; int32_t *decoded0 = ctx->decoded[0]; ape_apply_filters(ctx, ctx->decoded[0], NULL, count); while (count--) { *decoded0 = predictor_update_3930(p, *decoded0, 0, YDELAYA); decoded0++; p->buf++; /* Have we filled the history buffer? */ if (p->buf == p->historybuffer + HISTORY_SIZE) { memmove(p->historybuffer, p->buf, PREDICTOR_SIZE * sizeof(*p->historybuffer)); p->buf = p->historybuffer; } } } static av_always_inline int predictor_update_filter(APEPredictor *p, const int decoded, const int filter, const int delayA, const int delayB, const int adaptA, const int adaptB) { int32_t predictionA, predictionB, sign; p->buf[delayA] = p->lastA[filter]; p->buf[adaptA] = APESIGN(p->buf[delayA]); p->buf[delayA - 1] = p->buf[delayA] - p->buf[delayA - 1]; p->buf[adaptA - 1] = APESIGN(p->buf[delayA - 1]); predictionA = p->buf[delayA ] * p->coeffsA[filter][0] + p->buf[delayA - 1] * p->coeffsA[filter][1] + p->buf[delayA - 2] * p->coeffsA[filter][2] + p->buf[delayA - 3] * p->coeffsA[filter][3]; /* Apply a scaled first-order filter compression */ p->buf[delayB] = p->filterA[filter ^ 1] - ((p->filterB[filter] * 31) >> 5); p->buf[adaptB] = APESIGN(p->buf[delayB]); p->buf[delayB - 1] = p->buf[delayB] - p->buf[delayB - 1]; p->buf[adaptB - 1] = APESIGN(p->buf[delayB - 1]); p->filterB[filter] = p->filterA[filter ^ 1]; predictionB = p->buf[delayB ] * p->coeffsB[filter][0] + p->buf[delayB - 1] * p->coeffsB[filter][1] + p->buf[delayB - 2] * p->coeffsB[filter][2] + p->buf[delayB - 3] * p->coeffsB[filter][3] + p->buf[delayB - 4] * p->coeffsB[filter][4]; p->lastA[filter] = decoded + ((predictionA + (predictionB >> 1)) >> 10); p->filterA[filter] = p->lastA[filter] + ((p->filterA[filter] * 31) >> 5); sign = APESIGN(decoded); p->coeffsA[filter][0] += p->buf[adaptA ] * sign; p->coeffsA[filter][1] += p->buf[adaptA - 1] * sign; p->coeffsA[filter][2] += p->buf[adaptA - 2] * sign; p->coeffsA[filter][3] += p->buf[adaptA - 3] * sign; p->coeffsB[filter][0] += p->buf[adaptB ] * sign; p->coeffsB[filter][1] += p->buf[adaptB - 1] * sign; p->coeffsB[filter][2] += p->buf[adaptB - 2] * sign; p->coeffsB[filter][3] += p->buf[adaptB - 3] * sign; p->coeffsB[filter][4] += p->buf[adaptB - 4] * sign; return p->filterA[filter]; } static void predictor_decode_stereo_3950(APEContext *ctx, int count) { APEPredictor *p = &ctx->predictor; int32_t *decoded0 = ctx->decoded[0]; int32_t *decoded1 = ctx->decoded[1]; ape_apply_filters(ctx, ctx->decoded[0], ctx->decoded[1], count); while (count--) { /* Predictor Y */ *decoded0 = predictor_update_filter(p, *decoded0, 0, YDELAYA, YDELAYB, YADAPTCOEFFSA, YADAPTCOEFFSB); decoded0++; *decoded1 = predictor_update_filter(p, *decoded1, 1, XDELAYA, XDELAYB, XADAPTCOEFFSA, XADAPTCOEFFSB); decoded1++; /* Combined */ p->buf++; /* Have we filled the history buffer? */ if (p->buf == p->historybuffer + HISTORY_SIZE) { memmove(p->historybuffer, p->buf, PREDICTOR_SIZE * sizeof(*p->historybuffer)); p->buf = p->historybuffer; } } } static void predictor_decode_mono_3950(APEContext *ctx, int count) { APEPredictor *p = &ctx->predictor; int32_t *decoded0 = ctx->decoded[0]; int32_t predictionA, currentA, A, sign; ape_apply_filters(ctx, ctx->decoded[0], NULL, count); currentA = p->lastA[0]; while (count--) { A = *decoded0; p->buf[YDELAYA] = currentA; p->buf[YDELAYA - 1] = p->buf[YDELAYA] - p->buf[YDELAYA - 1]; predictionA = p->buf[YDELAYA ] * p->coeffsA[0][0] + p->buf[YDELAYA - 1] * p->coeffsA[0][1] + p->buf[YDELAYA - 2] * p->coeffsA[0][2] + p->buf[YDELAYA - 3] * p->coeffsA[0][3]; currentA = A + (predictionA >> 10); p->buf[YADAPTCOEFFSA] = APESIGN(p->buf[YDELAYA ]); p->buf[YADAPTCOEFFSA - 1] = APESIGN(p->buf[YDELAYA - 1]); sign = APESIGN(A); p->coeffsA[0][0] += p->buf[YADAPTCOEFFSA ] * sign; p->coeffsA[0][1] += p->buf[YADAPTCOEFFSA - 1] * sign; p->coeffsA[0][2] += p->buf[YADAPTCOEFFSA - 2] * sign; p->coeffsA[0][3] += p->buf[YADAPTCOEFFSA - 3] * sign; p->buf++; /* Have we filled the history buffer? */ if (p->buf == p->historybuffer + HISTORY_SIZE) { memmove(p->historybuffer, p->buf, PREDICTOR_SIZE * sizeof(*p->historybuffer)); p->buf = p->historybuffer; } p->filterA[0] = currentA + ((p->filterA[0] * 31) >> 5); *(decoded0++) = p->filterA[0]; } p->lastA[0] = currentA; } static void do_init_filter(APEFilter *f, int16_t *buf, int order) { f->coeffs = buf; f->historybuffer = buf + order; f->delay = f->historybuffer + order * 2; f->adaptcoeffs = f->historybuffer + order; memset(f->historybuffer, 0, (order * 2) * sizeof(*f->historybuffer)); memset(f->coeffs, 0, order * sizeof(*f->coeffs)); f->avg = 0; } static void init_filter(APEContext *ctx, APEFilter *f, int16_t *buf, int order) { do_init_filter(&f[0], buf, order); do_init_filter(&f[1], buf + order * 3 + HISTORY_SIZE, order); } static void do_apply_filter(APEContext *ctx, int version, APEFilter *f, int32_t *data, int count, int order, int fracbits) { int res; int absres; while (count--) { /* round fixedpoint scalar product */ res = ctx->dsp.scalarproduct_and_madd_int16(f->coeffs, f->delay - order, f->adaptcoeffs - order, order, APESIGN(*data)); res = (res + (1 << (fracbits - 1))) >> fracbits; res += *data; *data++ = res; /* Update the output history */ *f->delay++ = av_clip_int16(res); if (version < 3980) { /* Version ??? to < 3.98 files (untested) */ f->adaptcoeffs[0] = (res == 0) ? 0 : ((res >> 28) & 8) - 4; f->adaptcoeffs[-4] >>= 1; f->adaptcoeffs[-8] >>= 1; } else { /* Version 3.98 and later files */ /* Update the adaption coefficients */ absres = FFABS(res); if (absres) *f->adaptcoeffs = ((res & (-1<<31)) ^ (-1<<30)) >> (25 + (absres <= f->avg*3) + (absres <= f->avg*4/3)); else *f->adaptcoeffs = 0; f->avg += (absres - f->avg) / 16; f->adaptcoeffs[-1] >>= 1; f->adaptcoeffs[-2] >>= 1; f->adaptcoeffs[-8] >>= 1; } f->adaptcoeffs++; /* Have we filled the history buffer? */ if (f->delay == f->historybuffer + HISTORY_SIZE + (order * 2)) { memmove(f->historybuffer, f->delay - (order * 2), (order * 2) * sizeof(*f->historybuffer)); f->delay = f->historybuffer + order * 2; f->adaptcoeffs = f->historybuffer + order; } } } static void apply_filter(APEContext *ctx, APEFilter *f, int32_t *data0, int32_t *data1, int count, int order, int fracbits) { do_apply_filter(ctx, ctx->fileversion, &f[0], data0, count, order, fracbits); if (data1) do_apply_filter(ctx, ctx->fileversion, &f[1], data1, count, order, fracbits); } static void ape_apply_filters(APEContext *ctx, int32_t *decoded0, int32_t *decoded1, int count) { int i; for (i = 0; i < APE_FILTER_LEVELS; i++) { if (!ape_filter_orders[ctx->fset][i]) break; apply_filter(ctx, ctx->filters[i], decoded0, decoded1, count, ape_filter_orders[ctx->fset][i], ape_filter_fracbits[ctx->fset][i]); } } static int init_frame_decoder(APEContext *ctx) { int i, ret; if ((ret = init_entropy_decoder(ctx)) < 0) return ret; init_predictor_decoder(ctx); for (i = 0; i < APE_FILTER_LEVELS; i++) { if (!ape_filter_orders[ctx->fset][i]) break; init_filter(ctx, ctx->filters[i], ctx->filterbuf[i], ape_filter_orders[ctx->fset][i]); } return 0; } static void ape_unpack_mono(APEContext *ctx, int count) { if (ctx->frameflags & APE_FRAMECODE_STEREO_SILENCE) { /* We are pure silence, so we're done. */ av_log(ctx->avctx, AV_LOG_DEBUG, "pure silence mono\n"); return; } ctx->entropy_decode_mono(ctx, count); /* Now apply the predictor decoding */ ctx->predictor_decode_mono(ctx, count); /* Pseudo-stereo - just copy left channel to right channel */ if (ctx->channels == 2) { memcpy(ctx->decoded[1], ctx->decoded[0], count * sizeof(*ctx->decoded[1])); } } static void ape_unpack_stereo(APEContext *ctx, int count) { int32_t left, right; int32_t *decoded0 = ctx->decoded[0]; int32_t *decoded1 = ctx->decoded[1]; if (ctx->frameflags & APE_FRAMECODE_STEREO_SILENCE) { /* We are pure silence, so we're done. */ av_log(ctx->avctx, AV_LOG_DEBUG, "pure silence stereo\n"); return; } ctx->entropy_decode_stereo(ctx, count); /* Now apply the predictor decoding */ ctx->predictor_decode_stereo(ctx, count); /* Decorrelate and scale to output depth */ while (count--) { left = *decoded1 - (*decoded0 / 2); right = left + *decoded0; *(decoded0++) = left; *(decoded1++) = right; } } static int ape_decode_frame(AVCodecContext *avctx, void *data, int *got_frame_ptr, AVPacket *avpkt) { AVFrame *frame = data; const uint8_t *buf = avpkt->data; APEContext *s = avctx->priv_data; uint8_t *sample8; int16_t *sample16; int32_t *sample24; int i, ch, ret; int blockstodecode; /* this should never be negative, but bad things will happen if it is, so check it just to make sure. */ av_assert0(s->samples >= 0); if(!s->samples){ uint32_t nblocks, offset; int buf_size; if (!avpkt->size) { *got_frame_ptr = 0; return 0; } if (avpkt->size < 8) { av_log(avctx, AV_LOG_ERROR, "Packet is too small\n"); return AVERROR_INVALIDDATA; } buf_size = avpkt->size & ~3; if (buf_size != avpkt->size) { av_log(avctx, AV_LOG_WARNING, "packet size is not a multiple of 4. " "extra bytes at the end will be skipped.\n"); } if (s->fileversion < 3950) // previous versions overread two bytes buf_size += 2; av_fast_malloc(&s->data, &s->data_size, buf_size); if (!s->data) return AVERROR(ENOMEM); s->dsp.bswap_buf((uint32_t*)s->data, (const uint32_t*)buf, buf_size >> 2); memset(s->data + (buf_size & ~3), 0, buf_size & 3); s->ptr = s->data; s->data_end = s->data + buf_size; nblocks = bytestream_get_be32(&s->ptr); offset = bytestream_get_be32(&s->ptr); if (s->fileversion >= 3900) { if (offset > 3) { av_log(avctx, AV_LOG_ERROR, "Incorrect offset passed\n"); s->data = NULL; return AVERROR_INVALIDDATA; } if (s->data_end - s->ptr < offset) { av_log(avctx, AV_LOG_ERROR, "Packet is too small\n"); return AVERROR_INVALIDDATA; } s->ptr += offset; } else { init_get_bits(&s->gb, s->ptr, (s->data_end - s->ptr) * 8); if (s->fileversion > 3800) skip_bits_long(&s->gb, offset * 8); else skip_bits_long(&s->gb, offset); } if (!nblocks || nblocks > INT_MAX) { av_log(avctx, AV_LOG_ERROR, "Invalid sample count: %u.\n", nblocks); return AVERROR_INVALIDDATA; } s->samples = nblocks; /* Initialize the frame decoder */ if (init_frame_decoder(s) < 0) { av_log(avctx, AV_LOG_ERROR, "Error reading frame header\n"); return AVERROR_INVALIDDATA; } } if (!s->data) { *got_frame_ptr = 0; return avpkt->size; } blockstodecode = FFMIN(s->blocks_per_loop, s->samples); // for old files coefficients were not interleaved, // so we need to decode all of them at once if (s->fileversion < 3930) blockstodecode = s->samples; /* reallocate decoded sample buffer if needed */ av_fast_malloc(&s->decoded_buffer, &s->decoded_size, 2 * FFALIGN(blockstodecode, 8) * sizeof(*s->decoded_buffer)); if (!s->decoded_buffer) return AVERROR(ENOMEM); memset(s->decoded_buffer, 0, s->decoded_size); s->decoded[0] = s->decoded_buffer; s->decoded[1] = s->decoded_buffer + FFALIGN(blockstodecode, 8); /* get output buffer */ frame->nb_samples = blockstodecode; if ((ret = ff_get_buffer(avctx, frame, 0)) < 0) return ret; s->error=0; if ((s->channels == 1) || (s->frameflags & APE_FRAMECODE_PSEUDO_STEREO)) ape_unpack_mono(s, blockstodecode); else ape_unpack_stereo(s, blockstodecode); emms_c(); if (s->error) { s->samples=0; av_log(avctx, AV_LOG_ERROR, "Error decoding frame\n"); return AVERROR_INVALIDDATA; } switch (s->bps) { case 8: for (ch = 0; ch < s->channels; ch++) { sample8 = (uint8_t *)frame->data[ch]; for (i = 0; i < blockstodecode; i++) *sample8++ = (s->decoded[ch][i] + 0x80) & 0xff; } break; case 16: for (ch = 0; ch < s->channels; ch++) { sample16 = (int16_t *)frame->data[ch]; for (i = 0; i < blockstodecode; i++) *sample16++ = s->decoded[ch][i]; } break; case 24: for (ch = 0; ch < s->channels; ch++) { sample24 = (int32_t *)frame->data[ch]; for (i = 0; i < blockstodecode; i++) *sample24++ = s->decoded[ch][i] << 8; } break; } s->samples -= blockstodecode; *got_frame_ptr = 1; return !s->samples ? avpkt->size : 0; } static void ape_flush(AVCodecContext *avctx) { APEContext *s = avctx->priv_data; s->samples= 0; } #define OFFSET(x) offsetof(APEContext, x) #define PAR (AV_OPT_FLAG_DECODING_PARAM | AV_OPT_FLAG_AUDIO_PARAM) static const AVOption options[] = { { "max_samples", "maximum number of samples decoded per call", OFFSET(blocks_per_loop), AV_OPT_TYPE_INT, { .i64 = 4608 }, 1, INT_MAX, PAR, "max_samples" }, { "all", "no maximum. decode all samples for each packet at once", 0, AV_OPT_TYPE_CONST, { .i64 = INT_MAX }, INT_MIN, INT_MAX, PAR, "max_samples" }, { NULL}, }; static const AVClass ape_decoder_class = { .class_name = "APE decoder", .item_name = av_default_item_name, .option = options, .version = LIBAVUTIL_VERSION_INT, }; AVCodec ff_ape_decoder = { .name = "ape", .long_name = NULL_IF_CONFIG_SMALL("Monkey's Audio"), .type = AVMEDIA_TYPE_AUDIO, .id = AV_CODEC_ID_APE, .priv_data_size = sizeof(APEContext), .init = ape_decode_init, .close = ape_decode_close, .decode = ape_decode_frame, .capabilities = CODEC_CAP_SUBFRAMES | CODEC_CAP_DELAY | CODEC_CAP_DR1, .flush = ape_flush, .sample_fmts = (const enum AVSampleFormat[]) { AV_SAMPLE_FMT_U8P, AV_SAMPLE_FMT_S16P, AV_SAMPLE_FMT_S32P, AV_SAMPLE_FMT_NONE }, .priv_class = &ape_decoder_class, };
lgpl-2.1
pascalprimer/DataStructure
TestCode/TrainTest/Station.hpp
3065
#ifndef SJTU_STATION_HPP #define SJTU_STATION_HPP #include <string> #include "Date.hpp" #include "lib/pair.hpp" #include "lib/vector.hpp" using std::string; namespace sjtu { class Station { public: const static int MAXN = 2000; struct Node { double price; string level; int left_n; Node(double _price, const string &_level, int _left_n): price(_price), level(_level), left_n(_left_n) { } }; private: string location; Date arrival_time, departure_time; vector<Node> types; int dist; public: Station(const string &_location, const Date &_arrival_time, const Date &_departure_time, const vector<Node> &_types, int _dist): location(_location), arrival_time(_arrival_time), departure_time(_departure_time), types(_types), dist(_dist) { } Station(const string &_location, const Date &_arrival_time, const Date &_departure_time, const vector<string> &_type, const vector<double> &_price, int _dist): location(_location), arrival_time(_arrival_time), departure_time(_departure_time), dist(_dist) { types.clear(); for (int i = 0; i < (int)_price.size(); ++i) { types.push_back(Node(_price[i], _type[i], MAXN)); } } const string query_location() const { return location; } const Date query_arrival_time() const { return arrival_time; } const Date query_departure_time() const { return departure_time; } const int query_single_number(const string &request) const { for (int i = 0; i < types.size(); ++i) { if (types[i].level == request) { return types[i].left_n; } } throw Exception("未找到" + request + "票"); } const double query_single_price(const string &request) const { for (int i = 0; i < types.size(); ++i) { if (types[i].level == request) { return types[i].price; } } throw Exception("未找到" + request + "票"); } vector<Node> query_types() { return types; } vector<double> query_specified_price(const vector<string> &request) { vector<double> ret; for (int i = 0; i < (int)request.size(); ++i) { try { ret.push_back(query_single_price(request[i])); } catch (const Exception &exp) { throw exp; } } return ret; } const bool is_same_location(const string &another) const { for (int i = 0; i < 6 && i < location.size() && i < another.size(); ++i) { if (another[i] != location[i]) { return false; } } return true; } const bool modify_number(const string &request, int delta) { for (int i = 0; i < types.size(); ++i) { if (types[i].level == request) { if (types[i].left_n + delta < 0) { throw Exception(request+"票数目不够"); return false; } types[i].left_n += delta; return true; } } throw Exception("未找到" + request + "票"); return false; } const int query_distance() const { return dist; } void go_one_day() { arrival_time.go_one_day(); departure_time.go_one_day(); } }; } #endif
lgpl-3.0
xautlx/s2jh
common-service/src/main/java/lab/s2jh/ctx/FreemarkerService.java
4142
package lab.s2jh.ctx; import java.io.File; import java.io.IOException; import java.io.StringWriter; import java.util.Map; import lab.s2jh.core.exception.ServiceException; import lab.s2jh.core.service.PropertiesConfigService; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; import org.apache.struts2.ServletActionContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; import org.springframework.util.Assert; import freemarker.cache.StringTemplateLoader; import freemarker.template.Configuration; import freemarker.template.TemplateException; @Service public class FreemarkerService extends Configuration { private final static Logger logger = LoggerFactory.getLogger(FreemarkerService.class); private static StringTemplateLoader stringTemplateLoader = new StringTemplateLoader(); public FreemarkerService() { this.setDefaultEncoding("UTF-8"); this.setTemplateLoader(stringTemplateLoader); } public String processTemplate(String templateName, long version, String templateContents, Map<String, Object> dataMap) { Assert.notNull(templateName); Assert.notNull(version); if (StringUtils.isBlank(templateContents)) { return null; } Object templateSource = stringTemplateLoader.findTemplateSource(templateName); if (templateSource == null) { logger.debug("Init freemarker template: {}", templateName); stringTemplateLoader.putTemplate(templateName, templateContents, version); } else { long ver = stringTemplateLoader.getLastModified(templateSource); if (version > ver) { logger.debug("Update freemarker template: {}", templateName); stringTemplateLoader.putTemplate(templateName, templateContents, version); } } return processTemplateByName(templateName, dataMap); } private String processTemplateByName(String templateName, Map<String, Object> dataMap) { StringWriter strWriter = new StringWriter(); try { this.getTemplate(templateName).process(dataMap, strWriter); strWriter.flush(); } catch (TemplateException e) { throw new ServiceException("error.freemarker.template.process", e); } catch (IOException e) { throw new ServiceException("error.freemarker.template.process", e); } return strWriter.toString(); } public String processTemplateByContents(String templateContents, Map<String, Object> dataMap) { String templateName = "_" + templateContents.hashCode(); return processTemplate(templateName, 0, templateContents, dataMap); } public String processTemplateByFileName(String templateFileName, Map<String, Object> dataMap) { String templateDir = PropertiesConfigService.getWebRootRealPath(); templateDir += File.separator + "WEB-INF" + File.separator + "template" + File.separator + "freemarker"; File targetTemplateFile = new File(templateDir + File.separator + templateFileName + ".ftl"); //TODO: 可添加额外从classpath加载文件处理 logger.debug("Processing freemarker template file: {}", targetTemplateFile.getAbsolutePath()); long fileVersion = targetTemplateFile.lastModified(); Object templateSource = stringTemplateLoader.findTemplateSource(templateFileName); long templateVersion = 0; if (templateSource != null) { templateVersion = stringTemplateLoader.getLastModified(templateSource); } if (fileVersion > templateVersion) { try { String contents = FileUtils.readFileToString(targetTemplateFile); return processTemplate(templateFileName, fileVersion, contents, dataMap); } catch (IOException e) { throw new ServiceException("error.freemarker.template.process", e); } } else { return processTemplateByName(templateFileName, dataMap); } } }
lgpl-3.0
Alfresco/community-edition
projects/repository/source/test-java/org/alfresco/repo/tenant/MultiTNodeServiceInterceptorTest.java
4003
/* * #%L * Alfresco Repository * %% * Copyright (C) 2005 - 2016 Alfresco Software Limited * %% * This file is part of the Alfresco software. * If the software was purchased under a paid Alfresco license, the terms of * the paid license agreement will prevail. Otherwise, the software is * provided under the following open source license terms: * * Alfresco is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Alfresco is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with Alfresco. If not, see <http://www.gnu.org/licenses/>. * #L% */ package org.alfresco.repo.tenant; import junit.framework.TestCase; import org.alfresco.repo.transaction.RetryingTransactionHelper.RetryingTransactionCallback; import org.alfresco.service.transaction.TransactionService; import org.alfresco.test_category.OwnJVMTestsCategory; import org.alfresco.util.ApplicationContextHelper; import org.alfresco.util.GUID; import org.junit.experimental.categories.Category; import org.springframework.context.ApplicationContext; /** * @see MultiTNodeServiceInterceptor * * @since 3.0 * @author Derek Hulley */ @Category(OwnJVMTestsCategory.class) public class MultiTNodeServiceInterceptorTest extends TestCase { public static ApplicationContext ctx = ApplicationContextHelper.getApplicationContext(); private String tenant1 = "tenant-" + GUID.generate(); private String tenant1Pwd = "pwd1"; private boolean enableTest = true; private TransactionService transactionService; private TenantAdminService tenantAdminService; private TenantService tenantService; private MultiTNodeServiceInterceptor interceptor; @Override public void setUp() throws Exception { transactionService = (TransactionService) ctx.getBean("TransactionService"); tenantAdminService = (TenantAdminService) ctx.getBean("tenantAdminService"); tenantService = (TenantService) ctx.getBean("tenantService"); interceptor = (MultiTNodeServiceInterceptor) ctx.getBean("multiTNodeServiceInterceptor"); // If MT is disabled, then disable all tests if (!tenantAdminService.isEnabled()) { enableTest = false; return; } // Create a tenant RetryingTransactionCallback<Object> createTenantCallback = new RetryingTransactionCallback<Object>() { public Object execute() throws Throwable { tenantAdminService.createTenant(tenant1, tenant1Pwd.toCharArray()); return null; } }; transactionService.getRetryingTransactionHelper().doInTransaction(createTenantCallback, false, true); } @Override public void tearDown() throws Exception { // If MT is disabled, then disable all tests if (!tenantAdminService.isEnabled()) { return; } // Delete a tenant RetryingTransactionCallback<Object> createTenantCallback = new RetryingTransactionCallback<Object>() { public Object execute() throws Throwable { tenantAdminService.deleteTenant(tenant1); return null; } }; transactionService.getRetryingTransactionHelper().doInTransaction(createTenantCallback, false, true); } /** * Control case. */ public void testSetUp() { } }
lgpl-3.0
LebedevRI/gegl
tests/buffer/tests/mipmap_set2.c
1275
TEST () { GeglBuffer *buffer2, *buffer; GeglRectangle bound = {0, 0, 20, 20}; //GeglRectangle source = {2, 2, 5, 5}; GeglRectangle dest = {4, 4, 5, 5}; float *blank = g_malloc0 (100000); gchar *temp = g_malloc0 (100000); test_start (); buffer2 = gegl_buffer_new (&bound, babl_format ("Y float")); buffer = gegl_buffer_new (&bound, babl_format ("Y float")); vgrad (buffer2); blank[0] = 0.5; blank[1] = 0.25; blank[2] = 1.0; blank[3] = 1.0; blank[4] = 1.0; blank[5] = 0.2; /* we need to expand the width/height to compensate for the level */ dest.width *= 2; dest.height *= 2; gegl_buffer_set (buffer2, &dest, 1, babl_format ("Y float"), blank, GEGL_AUTO_ROWSTRIDE); print_buffer (buffer2); gegl_buffer_get (buffer2, &bound, 0.5, babl_format ("Y float"), temp, GEGL_AUTO_ROWSTRIDE, GEGL_ABYSS_NONE); gegl_buffer_set (buffer, &bound, 0, babl_format ("Y float"), temp, GEGL_AUTO_ROWSTRIDE); print_buffer (buffer); gegl_buffer_get (buffer2, &bound, 0.25, babl_format ("Y float"), temp, GEGL_AUTO_ROWSTRIDE, GEGL_ABYSS_NONE); gegl_buffer_set (buffer, &bound, 0, babl_format ("Y float"), temp, GEGL_AUTO_ROWSTRIDE); print_buffer (buffer); test_end (); g_object_unref (buffer); g_object_unref (buffer2); }
lgpl-3.0
chafca/p2pEngine
build/html/loginForm.html
787
<div id="login"> <input type="button" id="bootstrapSetting" value="Bootstrap Settings" onclick="includeBoostrapSetting();" /> <h1>Sign In</h1> <p class="feedbackBox" onclick="clearFeedback();"></p> <p> <label class="label" id="label_username">Username : </label> <input type="text" id="username" name="username" required="required" placeholder="AliceWonderland" /> </p> <p> <label class="label" id="label_password">Password : </label> <input type="password" id="password" name="password" required="required" placeholder="ex : p4$Sw0r6!" /> </p> <p> <input type="button" id="loginButton" onclick="signIn();" value="Sign In"/> </p> <p> <input type="button" id="registrationButton" onclick="includeRegistration();" value="Registration"/> </p> </div>
lgpl-3.0
ricardorcr/fenixedu-academic
src/main/java/org/fenixedu/academic/domain/Shift.java
22904
/** * Copyright © 2002 Instituto Superior Técnico * * This file is part of FenixEdu Academic. * * FenixEdu Academic is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * FenixEdu Academic is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with FenixEdu Academic. If not, see <http://www.gnu.org/licenses/>. */ package org.fenixedu.academic.domain; import static org.fenixedu.academic.predicate.AccessControl.check; import java.math.BigDecimal; import java.text.Collator; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import org.apache.commons.lang.StringUtils; import org.fenixedu.academic.domain.degree.DegreeType; import org.fenixedu.academic.domain.exceptions.DomainException; import org.fenixedu.academic.domain.person.RoleType; import org.fenixedu.academic.domain.student.Registration; import org.fenixedu.academic.domain.util.email.ExecutionCourseSender; import org.fenixedu.academic.domain.util.email.Message; import org.fenixedu.academic.domain.util.email.Recipient; import org.fenixedu.academic.predicate.AccessControl; import org.fenixedu.academic.predicate.ResourceAllocationRolePredicates; import org.fenixedu.academic.util.Bundle; import org.fenixedu.academic.util.DiaSemana; import org.fenixedu.academic.util.WeekDay; import org.fenixedu.bennu.core.domain.Bennu; import org.fenixedu.bennu.core.groups.UserGroup; import org.fenixedu.bennu.core.i18n.BundleUtil; import org.joda.time.Duration; import pt.ist.fenixframework.Atomic; import pt.ist.fenixframework.dml.runtime.RelationAdapter; public class Shift extends Shift_Base { public static final Comparator<Shift> SHIFT_COMPARATOR_BY_NAME = new Comparator<Shift>() { @Override public int compare(Shift o1, Shift o2) { return Collator.getInstance().compare(o1.getNome(), o2.getNome()); } }; public static final Comparator<Shift> SHIFT_COMPARATOR_BY_TYPE_AND_ORDERED_LESSONS = new Comparator<Shift>() { @Override public int compare(Shift o1, Shift o2) { final int ce = o1.getExecutionCourse().getNome().compareTo(o2.getExecutionCourse().getNome()); if (ce != 0) { return ce; } final int cs = o1.getShiftTypesIntegerComparator().compareTo(o2.getShiftTypesIntegerComparator()); if (cs != 0) { return cs; } final int cl = o1.getLessonsStringComparator().compareTo(o2.getLessonsStringComparator()); return cl == 0 ? DomainObjectUtil.COMPARATOR_BY_ID.compare(o1, o2) : cl; } }; static { Registration.getRelationShiftStudent().addListener(new ShiftStudentListener()); } public Shift(final ExecutionCourse executionCourse, Collection<ShiftType> types, final Integer lotacao) { // check(this, ResourceAllocationRolePredicates.checkPermissionsToManageShifts); super(); setRootDomainObject(Bennu.getInstance()); shiftTypeManagement(types, executionCourse); setLotacao(lotacao); executionCourse.setShiftNames(); if (getCourseLoadsSet().isEmpty()) { throw new DomainException("error.Shift.empty.courseLoads"); } } public void edit(List<ShiftType> newTypes, Integer newCapacity, ExecutionCourse newExecutionCourse, String newName, String comment) { check(this, ResourceAllocationRolePredicates.checkPermissionsToManageShifts); ExecutionCourse beforeExecutionCourse = getExecutionCourse(); final Shift otherShiftWithSameNewName = newExecutionCourse.findShiftByName(newName); if (otherShiftWithSameNewName != null && otherShiftWithSameNewName != this) { throw new DomainException("error.Shift.with.this.name.already.exists"); } if (newCapacity != null && getStudentsSet().size() > newCapacity.intValue()) { throw new DomainException("errors.exception.invalid.finalAvailability"); } setLotacao(newCapacity); shiftTypeManagement(newTypes, newExecutionCourse); beforeExecutionCourse.setShiftNames(); if (!beforeExecutionCourse.equals(newExecutionCourse)) { newExecutionCourse.setShiftNames(); } if (getCourseLoadsSet().isEmpty()) { throw new DomainException("error.Shift.empty.courseLoads"); } setComment(comment); } @Override public Set<StudentGroup> getAssociatedStudentGroupsSet() { Set<StudentGroup> result = new HashSet<StudentGroup>(); for (StudentGroup sg : super.getAssociatedStudentGroupsSet()) { if (sg.getValid()) { result.add(sg); } } return Collections.unmodifiableSet(result); } public void delete() { check(this, ResourceAllocationRolePredicates.checkPermissionsToManageShifts); DomainException.throwWhenDeleteBlocked(getDeletionBlockers()); final ExecutionCourse executionCourse = getExecutionCourse(); for (; !getAssociatedLessonsSet().isEmpty(); getAssociatedLessonsSet().iterator().next().delete()) { ; } for (; !getAssociatedShiftProfessorshipSet().isEmpty(); getAssociatedShiftProfessorshipSet().iterator().next().delete()) { ; } for (; !getShiftDistributionEntriesSet().isEmpty(); getShiftDistributionEntriesSet().iterator().next().delete()) { ; } getAssociatedClassesSet().clear(); getCourseLoadsSet().clear(); if (getShiftGroupingProperties() != null) { getShiftGroupingProperties().delete(); } setRootDomainObject(null); super.deleteDomainObject(); executionCourse.setShiftNames(); } @jvstm.cps.ConsistencyPredicate protected boolean checkRequiredParameters() { return getLotacao() != null && !StringUtils.isEmpty(getNome()); } @Deprecated public ExecutionCourse getDisciplinaExecucao() { return getExecutionCourse(); } public ExecutionCourse getExecutionCourse() { CourseLoad courseLoad = getCourseLoadsSet().iterator().next(); if (courseLoad != null) { return courseLoad.getExecutionCourse(); } else { return null; } } public ExecutionSemester getExecutionPeriod() { return getExecutionCourse().getExecutionPeriod(); } private void shiftTypeManagement(Collection<ShiftType> types, ExecutionCourse executionCourse) { if (executionCourse != null) { getCourseLoadsSet().clear(); for (ShiftType shiftType : types) { CourseLoad courseLoad = executionCourse.getCourseLoadByShiftType(shiftType); if (courseLoad != null) { addCourseLoads(courseLoad); } } } } public List<ShiftType> getTypes() { List<ShiftType> result = new ArrayList<ShiftType>(); for (CourseLoad courseLoad : getCourseLoadsSet()) { result.add(courseLoad.getType()); } return result; } public SortedSet<ShiftType> getSortedTypes() { SortedSet<ShiftType> result = new TreeSet<ShiftType>(); for (CourseLoad courseLoad : getCourseLoadsSet()) { result.add(courseLoad.getType()); } return result; } public boolean containsType(ShiftType shiftType) { if (shiftType != null) { for (CourseLoad courseLoad : getCourseLoadsSet()) { if (courseLoad.getType().equals(shiftType)) { return true; } } } return false; } @Override protected void checkForDeletionBlockers(Collection<String> blockers) { super.checkForDeletionBlockers(blockers); if (!getAssociatedStudentGroupsSet().isEmpty()) { blockers.add(BundleUtil.getString(Bundle.RESOURCE_ALLOCATION, "error.deleteShift.with.studentGroups", getNome())); } if (!getStudentsSet().isEmpty()) { blockers.add(BundleUtil.getString(Bundle.RESOURCE_ALLOCATION, "error.deleteShift.with.students", getNome())); } if (!getAssociatedSummariesSet().isEmpty()) { blockers.add(BundleUtil.getString(Bundle.RESOURCE_ALLOCATION, "error.deleteShift.with.summaries", getNome())); } } public BigDecimal getTotalHours() { Collection<Lesson> lessons = getAssociatedLessonsSet(); BigDecimal lessonTotalHours = BigDecimal.ZERO; for (Lesson lesson : lessons) { lessonTotalHours = lessonTotalHours.add(lesson.getTotalHours()); } return lessonTotalHours; } public Duration getTotalDuration() { Duration duration = Duration.ZERO; Collection<Lesson> lessons = getAssociatedLessonsSet(); for (Lesson lesson : lessons) { duration = duration.plus(lesson.getTotalDuration()); } return duration; } public BigDecimal getMaxLessonDuration() { BigDecimal maxHours = BigDecimal.ZERO; for (Lesson lesson : getAssociatedLessonsSet()) { BigDecimal lessonHours = lesson.getUnitHours(); if (maxHours.compareTo(lessonHours) == -1) { maxHours = lessonHours; } } return maxHours; } public BigDecimal getUnitHours() { BigDecimal hours = BigDecimal.ZERO; Collection<Lesson> lessons = getAssociatedLessonsSet(); for (Lesson lesson : lessons) { hours = hours.add(lesson.getUnitHours()); } return hours; } public double getHoursOnSaturdaysOrNightHours(int nightHour) { double hours = 0; Collection<Lesson> lessons = this.getAssociatedLessonsSet(); for (Lesson lesson : lessons) { if (lesson.getDiaSemana().equals(new DiaSemana(DiaSemana.SABADO))) { hours += lesson.getUnitHours().doubleValue(); } else { hours += lesson.hoursAfter(nightHour); } } return hours; } public int getNumberOfLessonInstances() { Collection<Lesson> lessons = getAssociatedLessonsSet(); int totalLessonsDates = 0; for (Lesson lesson : lessons) { totalLessonsDates += lesson.getFinalNumberOfLessonInstances(); } return totalLessonsDates; } public BigDecimal getCourseLoadWeeklyAverage() { BigDecimal weeklyHours = BigDecimal.ZERO; for (CourseLoad courseLoad : getCourseLoadsSet()) { weeklyHours = weeklyHours.add(courseLoad.getWeeklyHours()); } return weeklyHours; } public BigDecimal getCourseLoadTotalHours() { BigDecimal weeklyHours = BigDecimal.ZERO; for (CourseLoad courseLoad : getCourseLoadsSet()) { weeklyHours = weeklyHours.add(courseLoad.getTotalQuantity()); } return weeklyHours; } public void associateSchoolClass(SchoolClass schoolClass) { if (schoolClass == null) { throw new NullPointerException(); } if (!this.getAssociatedClassesSet().contains(schoolClass)) { this.getAssociatedClassesSet().add(schoolClass); } if (!schoolClass.getAssociatedShiftsSet().contains(this)) { schoolClass.getAssociatedShiftsSet().add(this); } } public SortedSet<Lesson> getLessonsOrderedByWeekDayAndStartTime() { final SortedSet<Lesson> lessons = new TreeSet<Lesson>(Lesson.LESSON_COMPARATOR_BY_WEEKDAY_AND_STARTTIME); lessons.addAll(getAssociatedLessonsSet()); return lessons; } public String getLessonsStringComparator() { final StringBuilder stringBuilder = new StringBuilder(); for (final Lesson lesson : getLessonsOrderedByWeekDayAndStartTime()) { stringBuilder.append(lesson.getDiaSemana().getDiaSemana().toString()); stringBuilder.append(lesson.getBeginHourMinuteSecond().toString()); } return stringBuilder.toString(); } public Integer getShiftTypesIntegerComparator() { final StringBuilder stringBuilder = new StringBuilder(); for (ShiftType shiftType : getSortedTypes()) { stringBuilder.append(shiftType.ordinal() + 1); } return Integer.valueOf(stringBuilder.toString()); } public boolean reserveForStudent(final Registration registration) { final boolean result = getLotacao().intValue() > getStudentsSet().size(); if (result || isResourceAllocationManager()) { GroupsAndShiftsManagementLog.createLog(getExecutionCourse(), Bundle.MESSAGING, "log.executionCourse.groupAndShifts.shifts.attends.added", registration.getNumber().toString(), getNome(), getExecutionCourse().getNome(), getExecutionCourse().getDegreePresentationString()); addStudents(registration); } return result; } private boolean isResourceAllocationManager() { final Person person = AccessControl.getPerson(); return person != null && RoleType.RESOURCE_ALLOCATION_MANAGER.isMember(person.getUser()); } public SortedSet<ShiftEnrolment> getShiftEnrolmentsOrderedByDate() { final SortedSet<ShiftEnrolment> shiftEnrolments = new TreeSet<ShiftEnrolment>(ShiftEnrolment.COMPARATOR_BY_DATE); shiftEnrolments.addAll(getShiftEnrolmentsSet()); return shiftEnrolments; } public String getClassesPrettyPrint() { StringBuilder builder = new StringBuilder(); int index = 0; for (SchoolClass schoolClass : getAssociatedClassesSet()) { builder.append(schoolClass.getNome()); index++; if (index < getAssociatedClassesSet().size()) { builder.append(", "); } } return builder.toString(); } public String getShiftTypesPrettyPrint() { StringBuilder builder = new StringBuilder(); int index = 0; SortedSet<ShiftType> sortedTypes = getSortedTypes(); for (ShiftType shiftType : sortedTypes) { builder.append(BundleUtil.getString(Bundle.ENUMERATION, shiftType.getName())); index++; if (index < sortedTypes.size()) { builder.append(", "); } } return builder.toString(); } public String getShiftTypesCapitalizedPrettyPrint() { StringBuilder builder = new StringBuilder(); int index = 0; SortedSet<ShiftType> sortedTypes = getSortedTypes(); for (ShiftType shiftType : sortedTypes) { builder.append(shiftType.getFullNameTipoAula()); index++; if (index < sortedTypes.size()) { builder.append(", "); } } return builder.toString(); } public String getShiftTypesCodePrettyPrint() { StringBuilder builder = new StringBuilder(); int index = 0; SortedSet<ShiftType> sortedTypes = getSortedTypes(); for (ShiftType shiftType : sortedTypes) { builder.append(shiftType.getSiglaTipoAula()); index++; if (index < sortedTypes.size()) { builder.append(", "); } } return builder.toString(); } public List<Summary> getExtraSummaries() { List<Summary> result = new ArrayList<Summary>(); Set<Summary> summaries = getAssociatedSummariesSet(); for (Summary summary : summaries) { if (summary.isExtraSummary()) { result.add(summary); } } return result; } private static class ShiftStudentListener extends RelationAdapter<Registration, Shift> { @Override public void afterAdd(Registration registration, Shift shift) { if (!shift.hasShiftEnrolment(registration)) { new ShiftEnrolment(shift, registration); } } @Override public void afterRemove(Registration registration, Shift shift) { shift.unEnrolStudent(registration); } } private boolean hasShiftEnrolment(final Registration registration) { for (final ShiftEnrolment shiftEnrolment : getShiftEnrolmentsSet()) { if (shiftEnrolment.hasRegistration(registration)) { return true; } } return false; } public void unEnrolStudent(final Registration registration) { final ShiftEnrolment shiftEnrolment = findShiftEnrolment(registration); if (shiftEnrolment != null) { shiftEnrolment.delete(); } } private ShiftEnrolment findShiftEnrolment(final Registration registration) { for (final ShiftEnrolment shiftEnrolment : getShiftEnrolmentsSet()) { if (shiftEnrolment.getRegistration() == registration) { return shiftEnrolment; } } return null; } public int getCapacityBasedOnSmallestRoom() { int capacity = getAssociatedLessonsSet().stream().filter(Lesson::hasSala) .mapToInt(lesson -> lesson.getSala().getAllocatableCapacity()).min().orElse(0); return capacity + (capacity / 10); } public boolean hasShiftType(final ShiftType shiftType) { for (CourseLoad courseLoad : getCourseLoadsSet()) { if (courseLoad.getType() == shiftType) { return true; } } return false; } public boolean hasSchoolClassForDegreeType(DegreeType degreeType) { for (SchoolClass schoolClass : getAssociatedClassesSet()) { if (schoolClass.getExecutionDegree().getDegreeType() == degreeType) { return true; } } return false; } @Atomic public void removeAttendFromShift(Registration registration, ExecutionCourse executionCourse) { GroupsAndShiftsManagementLog.createLog(getExecutionCourse(), Bundle.MESSAGING, "log.executionCourse.groupAndShifts.shifts.attends.removed", registration.getNumber().toString(), getNome(), getExecutionCourse().getNome(), getExecutionCourse().getDegreePresentationString()); registration.removeShifts(this); ExecutionCourseSender sender = ExecutionCourseSender.newInstance(executionCourse); Collection<Recipient> recipients = Collections.singletonList(new Recipient(UserGroup.of(registration.getPerson().getUser()))); final String subject = BundleUtil.getString(Bundle.APPLICATION, "label.shift.remove.subject"); final String body = BundleUtil.getString(Bundle.APPLICATION, "label.shift.remove.body", getNome()); new Message(sender, sender.getConcreteReplyTos(), recipients, subject, body, ""); } public boolean hasAnyStudentsInAssociatedStudentGroups() { for (final StudentGroup studentGroup : getAssociatedStudentGroupsSet()) { if (studentGroup.getAttendsSet().size() > 0) { return true; } } return false; } public String getPresentationName() { StringBuilder stringBuilder = new StringBuilder(this.getNome()); if (!this.getAssociatedLessonsSet().isEmpty()) { stringBuilder.append(" ( "); for (Iterator<Lesson> iterator = this.getAssociatedLessonsSet().iterator(); iterator.hasNext();) { Lesson lesson = iterator.next(); stringBuilder.append(WeekDay.getWeekDay(lesson.getDiaSemana()).getLabelShort()); stringBuilder.append(" "); stringBuilder.append(lesson.getBeginHourMinuteSecond().toString("HH:mm")); stringBuilder.append(" - "); stringBuilder.append(lesson.getEndHourMinuteSecond().toString("HH:mm")); if (lesson.hasSala()) { stringBuilder.append(" - "); stringBuilder.append(lesson.getSala().getName()); } if (iterator.hasNext()) { stringBuilder.append(" ; "); } } stringBuilder.append(" ) "); } return stringBuilder.toString(); } public String getLessonPresentationString() { StringBuilder stringBuilder = new StringBuilder(this.getNome()); if (!this.getAssociatedLessonsSet().isEmpty()) { for (Iterator<Lesson> iterator = this.getAssociatedLessonsSet().iterator(); iterator.hasNext();) { Lesson lesson = iterator.next(); stringBuilder.append(" "); stringBuilder.append(WeekDay.getWeekDay(lesson.getDiaSemana()).getLabelShort()); stringBuilder.append(" "); stringBuilder.append(lesson.getBeginHourMinuteSecond().toString("HH:mm")); stringBuilder.append(" - "); stringBuilder.append(lesson.getEndHourMinuteSecond().toString("HH:mm")); if (lesson.hasSala()) { stringBuilder.append(" - "); stringBuilder.append(lesson.getSala().getName()); } if (iterator.hasNext()) { stringBuilder.append(" ; "); } } } return stringBuilder.toString(); } public List<StudentGroup> getAssociatedStudentGroups(Grouping grouping) { List<StudentGroup> result = new ArrayList<StudentGroup>(); for (StudentGroup studentGroup : getAssociatedStudentGroupsSet()) { if (studentGroup.getGrouping() == grouping) { result.add(studentGroup); } } return result; } public boolean isTotalShiftLoadExceeded() { final BigDecimal totalHours = getTotalHours(); for (final CourseLoad courseLoad : getCourseLoadsSet()) { if (totalHours.compareTo(courseLoad.getTotalQuantity()) == 1) { return true; } } return false; } }
lgpl-3.0
marieke-bijlsma/molgenis
molgenis-data-annotators/src/test/java/org/molgenis/data/annotation/resources/impl/ResourceImplTest.java
2417
package org.molgenis.data.annotation.resources.impl; import static org.mockito.Mockito.when; import java.io.File; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.molgenis.data.Query; import org.molgenis.data.annotation.resources.ResourceConfig; import org.molgenis.data.support.QueryImpl; import org.molgenis.util.ResourceUtils; import org.testng.Assert; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; public class ResourceImplTest { @Mock ResourceConfig config; @Mock TabixRepositoryFactory factory; private ResourceImpl resource; @BeforeMethod public void beforeMethod() { MockitoAnnotations.initMocks(this); resource = new ResourceImpl("cadd_test", config, new TabixVcfRepositoryFactory("cadd")); } @Test public void ifSettingIsNotDefinedResourceIsUnavailable() { Assert.assertFalse(resource.isAvailable()); } /** * FIXME: reuse in config test * * @Test public void ifSettingBecomesDefinedAndFileExistsResourceBecomesAvailable() { * Assert.assertFalse(resource.isAvailable()); when(molgenisSettings.getProperty("cadd_key", * null)).thenReturn("src/test/resources/cadd_test.vcf.gz"); Assert.assertTrue(resource.isAvailable()); * when(molgenisSettings.getProperty("cadd_key", null)).thenReturn("nonsense"); * Assert.assertFalse(resource.isAvailable()); when(molgenisSettings.getProperty("cadd_key", * null)).thenReturn("src/test/resources/cadd_test.vcf.gz"); Assert.assertTrue(resource.isAvailable()); } * @Test public void ifDefaultDoesNotExistResourceIsUnavailable() { resource = new ResourceImpl("cadd_test", config, * new TabixVcfRepositoryFactory("cadd")); Assert.assertFalse(resource.isAvailable()); } **/ @Test public void testFindAllReturnsResult() { File file = ResourceUtils.getFile(getClass(), "/gonl/gonl.chr1.snps_indels.r5.vcf.gz"); when(config.getFile()).thenReturn(file); Query query = QueryImpl.EQ("#CHROM", "1").and().eq("POS", 126108); System.out.println(resource.findAll(query)); } @Test public void testFindAllReturnsResultFile2() { File file = ResourceUtils.getFile(getClass(), "/ALL.chr1.phase3_shapeit2_mvncall_integrated_v5.20130502.genotypes.vcf.gz"); when(config.getFile()).thenReturn(file); Query query = QueryImpl.EQ("#CHROM", "1").and().eq("POS", 10352); System.out.println(resource.findAll(query)); } }
lgpl-3.0
deib-polimi/SPF2
sPFFramework/src/main/java/it/polimi/spf/framework/services/SPFServiceRegistry.java
5348
/* * Copyright 2014 Jacopo Aliprandi, Dario Archetti * * This file is part of SPF. * * SPF is free software: you can redistribute it and/or modify it under the * terms of the GNU Lesser General Public License as published by the Free * Software Foundation, either version 3 of the License, or (at your option) * any later version. * * SPF is distributed in the hope that it will be useful, but WITHOUT ANY * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for * more details. * * You should have received a copy of the GNU Lesser General Public License * along with SPF. If not, see <http://www.gnu.org/licenses/>. * */ package it.polimi.spf.framework.services; import java.util.Collection; import android.content.Context; import android.util.Log; import it.polimi.spf.shared.model.InvocationRequest; import it.polimi.spf.shared.model.InvocationResponse; import it.polimi.spf.shared.model.SPFActivity; import it.polimi.spf.shared.model.SPFServiceDescriptor; /** * Refactored version of {@link ServiceDispatcher} * * @author darioarchetti * */ public class SPFServiceRegistry { private static final String TAG = "ServiceRegistry"; private ActivityConsumerRouteTable mActivityTable; private ServiceRegistryTable mServiceTable; private AppCommunicationAgent mCommunicationAgent; public SPFServiceRegistry(Context context) { mServiceTable = new ServiceRegistryTable(context); mActivityTable = new ActivityConsumerRouteTable(context); mCommunicationAgent = new AppCommunicationAgent(context); } /** * Registers a service. The owner application must be already registered. * * @param descriptor * @return true if the service was registered */ public boolean registerService(SPFServiceDescriptor descriptor) { return mServiceTable.registerService(descriptor) && mActivityTable.registerService(descriptor); } /** * Unregisters a service. * * @param descriptor * - the descriptor of the service to unregister * @return true if the service was removed */ public boolean unregisterService(SPFServiceDescriptor descriptor) { return mServiceTable.unregisterService(descriptor) && mActivityTable.unregisterService(descriptor); } /** * Unregisters all the service of an application * * @param appIdentifier * - the identifier of the app whose service to remove * @return true if all the services where removed. */ public boolean unregisterAllServicesOfApp(String appIdentifier) { return mServiceTable.unregisterAllServicesOfApp(appIdentifier) && mActivityTable.unregisterAllServicesOfApp(appIdentifier); } /** * Retrieves all the services of an app. * * @param appIdentifier * - the id of the app whose service to retrieve * @return the list of its services */ public SPFServiceDescriptor[] getServicesOfApp(String appIdentifier) { return mServiceTable.getServicesOfApp(appIdentifier); } /** * Dispatches an invocation request to the right application. If the * application is not found, an error response is returned. * * @param request * @return */ public InvocationResponse dispatchInvocation(InvocationRequest request) { String appName = request.getAppName(); String serviceName = request.getServiceName(); String componentName = mServiceTable.getComponentForService(appName, serviceName); if (componentName == null) { return InvocationResponse.error("Application " + appName + " doesn't have a service named " + serviceName); } AppServiceProxy proxy = mCommunicationAgent.getProxy(componentName); if (proxy == null) { return InvocationResponse.error("Cannot bind to service"); } try { return proxy.executeService(request); } catch (Throwable t) { Log.e("ServiceRegistry", "Error dispatching invocation: ", t); return InvocationResponse.error("Internal error: " + t.getMessage()); } } /** * Dispatches an activity to the right application according to {@link * ActivityConsumerRouteTable#} * * @param activity * @return */ public InvocationResponse sendActivity(SPFActivity activity) { ServiceIdentifier id = mActivityTable.getServiceFor(activity); String componentName = mServiceTable.getComponentForService(id); if (componentName == null) { String msg = "No service to handle " + activity; Log.d(TAG, msg); return InvocationResponse.error(msg); } AppServiceProxy proxy = mCommunicationAgent.getProxy(componentName); if (proxy == null) { String msg = "Can't bind to service " + componentName; Log.d(TAG, msg); return InvocationResponse.error(msg); } try { InvocationResponse r = proxy.sendActivity(activity); Log.v(TAG, "Activity dispatched: " + r); return r; } catch (Throwable t) { Log.e(TAG, "Error dispatching invocation: ", t); return InvocationResponse.error("Internal error: " + t.getMessage()); } } @Deprecated public Collection<ActivityVerb> getVerbSupportList() { return mActivityTable.getVerbSupport(); } public Collection<ActivityVerb> getSupportedVerbs() { return mActivityTable.getVerbSupport(); } public void setDefaultConsumerForVerb(String verb, ServiceIdentifier identifier) { mActivityTable.setDefaultServiceForVerb(verb, identifier); } }
lgpl-3.0
BayRu/log4r
src/log4r/formatter/log4jxmlformatter.rb
1699
# :include: ../rdoc/log4jxmlformatter # # == Other Info # # Version:: $Id$ require "log4r/formatter/formatter" require "rubygems" require "builder" module Log4r class Log4jXmlFormatter < BasicFormatter def format(logevent) logger = logevent.fullname.gsub('::', '.') timestamp = (Time.now.to_f * 1000).to_i level = LNAMES[logevent.level] message = format_object(logevent.data) exception = message if logevent.data.kind_of? Exception file, line, method = parse_caller(logevent.tracer[0]) if logevent.tracer builder = Builder::XmlMarkup.new xml = builder.log4j :event, :logger => logger, :timestamp => timestamp, :level => level, :thread => '' do |e| e.log4j :NDC, NDC.get e.log4j :message, message e.log4j :throwable, exception if exception e.log4j :locationInfo, :class => '', :method => method, :file => file, :line => line e.log4j :properties do |p| MDC.get_context.each do |key, value| p.log4j :data, :name => key, :value => value end end end xml end ####### private ####### def parse_caller(line) if /^(.+?):(\d+)(?::in `(.*)')?/ =~ line file = Regexp.last_match[1] line = Regexp.last_match[2].to_i method = Regexp.last_match[3] [file, line, method] else [] end end end end
lgpl-3.0
jimdowling/nat-traverser
network/netty/src/main/java/se/sics/gvod/net/MessageCounter.java
2282
package se.sics.gvod.net; import io.netty.channel.ChannelDuplexHandler; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelPromise; import io.netty.util.concurrent.Future; import io.netty.util.concurrent.GenericFutureListener; import java.util.List; import java.util.concurrent.atomic.AtomicLong; /** * @author <a href="mailto:[email protected]">Bruno de Carvalho</a> * @author Steffen Grohsschmiedt */ public class MessageCounter extends ChannelDuplexHandler { // internal vars ---------------------------------------------------------- private final String id; private final AtomicLong writtenMessages; private final AtomicLong readMessages; // constructors ----------------------------------------------------------- public MessageCounter(String id) { this.id = id; this.writtenMessages = new AtomicLong(); this.readMessages = new AtomicLong(); } // SimpleChannelHandler --------------------------------------------------- @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { this.readMessages.incrementAndGet(); super.channelRead(ctx, msg); } @Override public void write(ChannelHandlerContext ctx, Object msgs, ChannelPromise promise) throws Exception { promise.addListener(new GenericFutureListener<Future<? super Void>>() { @Override public void operationComplete(Future<? super Void> future) throws Exception { MessageCounter.this.readMessages.getAndIncrement(); } }); super.write(ctx, msgs, promise); } // @Override // public void channelUnregistered(ChannelHandlerContext ctx) throws Exception { // System.out.println(this.id + ctx.channel() + " -> sent: " + this.getWrittenMessages() // + ", recv: " + this.getReadMessages()); // super.channelUnregistered(ctx); // } // getters & setters ------------------------------------------------------ public long getWrittenMessages() { return writtenMessages.get(); } public long getReadMessages() { return readMessages.get(); } }
lgpl-3.0
Soya93/Extract-Refactoring
plugins/yaml/src/org/jetbrains/yaml/completion/YamlKeyCompletionInsertHandler.java
4294
package org.jetbrains.yaml.completion; import com.intellij.codeInsight.completion.InsertHandler; import com.intellij.codeInsight.completion.InsertionContext; import com.intellij.codeInsight.lookup.LookupElement; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.EditorModificationUtil; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiElement; import com.intellij.psi.util.PsiTreeUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.yaml.YAMLElementGenerator; import org.jetbrains.yaml.YAMLTokenTypes; import org.jetbrains.yaml.psi.YAMLDocument; import org.jetbrains.yaml.psi.YAMLKeyValue; import org.jetbrains.yaml.psi.YAMLValue; public abstract class YamlKeyCompletionInsertHandler<T extends LookupElement> implements InsertHandler<T> { @NotNull protected abstract YAMLKeyValue createNewEntry(@NotNull YAMLDocument document, T item); @Override public void handleInsert(InsertionContext context, T item) { final PsiElement currentElement = context.getFile().findElementAt(context.getStartOffset()); assert currentElement != null : "no element at " + context.getStartOffset(); final YAMLDocument holdingDocument = PsiTreeUtil.getParentOfType(currentElement, YAMLDocument.class); assert holdingDocument != null; final YAMLValue oldValue = deleteLookupTextAndRetrieveOldValue(context, currentElement); final YAMLKeyValue created = createNewEntry(holdingDocument, item); context.getEditor().getCaretModel().moveToOffset(created.getTextRange().getEndOffset()); if (oldValue != null) { WriteCommandAction.runWriteCommandAction(context.getProject(), new Runnable() { @Override public void run() { created.setValue(oldValue); } }); } PsiDocumentManager.getInstance(context.getProject()).doPostponedOperationsAndUnblockDocument(context.getDocument()); if (!isCharAtCaret(context.getEditor(), ' ')) { EditorModificationUtil.insertStringAtCaret(context.getEditor(), " "); } else { context.getEditor().getCaretModel().moveCaretRelatively(1, 0, false, false, true); } } @Nullable protected YAMLValue deleteLookupTextAndRetrieveOldValue(InsertionContext context, @NotNull PsiElement elementAtCaret) { final YAMLValue oldValue; if (elementAtCaret.getNode().getElementType() != YAMLTokenTypes.SCALAR_KEY) { deleteLookupPlain(context); return null; } final YAMLKeyValue keyValue = PsiTreeUtil.getParentOfType(elementAtCaret, YAMLKeyValue.class); assert keyValue != null; context.commitDocument(); if (keyValue.getValue() != null) { // Save old value somewhere final YAMLKeyValue dummyKV = YAMLElementGenerator.getInstance(context.getProject()).createYamlKeyValue("foo", "b"); dummyKV.setValue(keyValue.getValue()); oldValue = dummyKV.getValue(); } else { oldValue = null; } context.setTailOffset(keyValue.getTextRange().getEndOffset()); WriteCommandAction.runWriteCommandAction(context.getProject(), new Runnable() { @Override public void run() { keyValue.getParentMapping().deleteKeyValue(keyValue); } }); return oldValue; } private static void deleteLookupPlain(InsertionContext context) { final Document document = context.getDocument(); final CharSequence sequence = document.getCharsSequence(); int offset = context.getStartOffset() - 1; while (offset >= 0) { final char c = sequence.charAt(offset); if (c != ' ' && c != '\t') { if (c == '\n') { offset--; } else { offset = context.getStartOffset() - 1; } break; } offset--; } document.deleteString(offset + 1, context.getTailOffset()); context.commitDocument(); } public static boolean isCharAtCaret(Editor editor, char ch) { final int startOffset = editor.getCaretModel().getOffset(); final Document document = editor.getDocument(); return document.getTextLength() > startOffset && document.getCharsSequence().charAt(startOffset) == ch; } }
apache-2.0
Lucas1313/miesner
www/wp-content/plugins/nextgen-gallery/products/photocrati_nextgen/modules/lightbox/static/shutter_reloaded/nextgen_shutter_reloaded.js
397
jQuery(function($){ var callback = function(){ var shutterLinks = {}, shutterSets = {}; shutterReloaded.Init(); }; $(this).bind('refreshed', callback); $(document).on('lazy_resources_loaded', function(){ var flag = 'shutterReloaded'; if (typeof($(window).data(flag)) == 'undefined') $(window).data(flag, true); else return; callback(); }); });
apache-2.0
jagatsingh/bigtop
bigtop-bigpetstore/bigpetstore-data-generator/src/main/java/org/apache/bigtop/bigpetstore/datagenerator/generators/store/StoreLocationIncomePDF.java
2116
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.bigtop.bigpetstore.datagenerator.generators.store; import java.util.List; import org.apache.bigtop.bigpetstore.datagenerator.datamodels.inputs.ZipcodeRecord; import org.apache.bigtop.bigpetstore.datagenerator.framework.pdfs.ProbabilityDensityFunction; public class StoreLocationIncomePDF implements ProbabilityDensityFunction<ZipcodeRecord> { double incomeNormalizationFactor; double minIncome; double k; public StoreLocationIncomePDF(List<ZipcodeRecord> zipcodeTable, double incomeScalingFactor) { double maxIncome = 0.0; minIncome = Double.MAX_VALUE; for(ZipcodeRecord record : zipcodeTable) { maxIncome = Math.max(maxIncome, record.getMedianHouseholdIncome()); minIncome = Math.min(minIncome, record.getMedianHouseholdIncome()); } k = Math.log(incomeScalingFactor) / (maxIncome - minIncome); incomeNormalizationFactor = 0.0d; for(ZipcodeRecord record : zipcodeTable) { double weight = incomeWeight(record); incomeNormalizationFactor += weight; } } private double incomeWeight(ZipcodeRecord record) { return Math.exp(k * (record.getMedianHouseholdIncome() - minIncome)); } @Override public double probability(ZipcodeRecord datum) { double weight = incomeWeight(datum); return weight / this.incomeNormalizationFactor; } }
apache-2.0
squeek502/lit
commands/unclaim.lua
155
return function () local core = require('core')() if #args ~= 2 then error("Usage: lit unclaim orgname") end assert(core.unclaim(args[2])) end
apache-2.0
zer0se7en/netty
buffer/src/test/java/io/netty/buffer/PooledLittleEndianHeapByteBufTest.java
1146
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.buffer; import java.nio.ByteOrder; import static org.junit.Assert.*; /** * Tests little-endian heap channel buffers */ public class PooledLittleEndianHeapByteBufTest extends AbstractPooledByteBufTest { @Override protected ByteBuf alloc(int length, int maxCapacity) { ByteBuf buffer = PooledByteBufAllocator.DEFAULT.heapBuffer(length, maxCapacity).order(ByteOrder.LITTLE_ENDIAN); assertSame(ByteOrder.LITTLE_ENDIAN, buffer.order()); return buffer; } }
apache-2.0
iLordTony/Strongfit
StrongFit/web/Fuentes/Anagram-fontfacekit/web fonts/anagram_regular_macroman/Anagram-demo.html
24069
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en"> <head> <meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.4.4/jquery.min.js" type="text/javascript" charset="utf-8"></script> <script src="specimen_files/easytabs.js" type="text/javascript" charset="utf-8"></script> <link rel="stylesheet" href="specimen_files/specimen_stylesheet.css" type="text/css" charset="utf-8" /> <link rel="stylesheet" href="stylesheet.css" type="text/css" charset="utf-8" /> <style type="text/css"> body{ font-family: 'anagramregular'; } </style> <title>Anagram Regular Specimen</title> <script type="text/javascript" charset="utf-8"> $(document).ready(function() { $('#container').easyTabs({defaultContent:1}); }); </script> </head> <body> <div id="container"> <div id="header"> Anagram Regular </div> <ul class="tabs"> <li><a href="#specimen">Specimen</a></li> <li><a href="#layout">Sample Layout</a></li> <li><a href="#installing">Installing Webfonts</a></li> </ul> <div id="main_content"> <div id="specimen"> <div class="section"> <div class="grid12 firstcol"> <div class="huge">AaBb</div> </div> </div> <div class="section"> <div class="glyph_range">A&#x200B;B&#x200b;C&#x200b;D&#x200b;E&#x200b;F&#x200b;G&#x200b;H&#x200b;I&#x200b;J&#x200b;K&#x200b;L&#x200b;M&#x200b;N&#x200b;O&#x200b;P&#x200b;Q&#x200b;R&#x200b;S&#x200b;T&#x200b;U&#x200b;V&#x200b;W&#x200b;X&#x200b;Y&#x200b;Z&#x200b;a&#x200b;b&#x200b;c&#x200b;d&#x200b;e&#x200b;f&#x200b;g&#x200b;h&#x200b;i&#x200b;j&#x200b;k&#x200b;l&#x200b;m&#x200b;n&#x200b;o&#x200b;p&#x200b;q&#x200b;r&#x200b;s&#x200b;t&#x200b;u&#x200b;v&#x200b;w&#x200b;x&#x200b;y&#x200b;z&#x200b;1&#x200b;2&#x200b;3&#x200b;4&#x200b;5&#x200b;6&#x200b;7&#x200b;8&#x200b;9&#x200b;0&#x200b;&amp;&#x200b;.&#x200b;,&#x200b;?&#x200b;!&#x200b;&#64;&#x200b;(&#x200b;)&#x200b;#&#x200b;$&#x200b;%&#x200b;*&#x200b;+&#x200b;-&#x200b;=&#x200b;:&#x200b;;</div> </div> <div class="section"> <div class="grid12 firstcol"> <table class="sample_table"> <tr><td>10</td><td class="size10">abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ</td></tr> <tr><td>11</td><td class="size11">abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ</td></tr> <tr><td>12</td><td class="size12">abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ</td></tr> <tr><td>13</td><td class="size13">abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ</td></tr> <tr><td>14</td><td class="size14">abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ</td></tr> <tr><td>16</td><td class="size16">abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ</td></tr> <tr><td>18</td><td class="size18">abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ</td></tr> <tr><td>20</td><td class="size20">abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ</td></tr> <tr><td>24</td><td class="size24">abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ</td></tr> <tr><td>30</td><td class="size30">abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ</td></tr> <tr><td>36</td><td class="size36">abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ</td></tr> <tr><td>48</td><td class="size48">abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ</td></tr> <tr><td>60</td><td class="size60">abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ</td></tr> <tr><td>72</td><td class="size72">abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ</td></tr> <tr><td>90</td><td class="size90">abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ</td></tr> </table> </div> </div> <div class="section" id="bodycomparison"> <div id="xheight"> <div class="fontbody">&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;&#x25FC;body</div><div class="arialbody">body</div><div class="verdanabody">body</div><div class="georgiabody">body</div></div> <div class="fontbody" style="z-index:1"> body<span>Anagram Regular</span> </div> <div class="arialbody" style="z-index:1"> body<span>Arial</span> </div> <div class="verdanabody" style="z-index:1"> body<span>Verdana</span> </div> <div class="georgiabody" style="z-index:1"> body<span>Georgia</span> </div> </div> <div class="section psample psample_row1" id=""> <div class="grid2 firstcol"> <p class="size10"><span>10.</span>Aenean lacinia bibendum nulla sed consectetur. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Nullam id dolor id nibh ultricies vehicula ut id elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nulla vitae elit libero, a pharetra augue.</p> </div> <div class="grid3"> <p class="size11"><span>11.</span>Aenean lacinia bibendum nulla sed consectetur. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Nullam id dolor id nibh ultricies vehicula ut id elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nulla vitae elit libero, a pharetra augue.</p> </div> <div class="grid3"> <p class="size12"><span>12.</span>Aenean lacinia bibendum nulla sed consectetur. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Nullam id dolor id nibh ultricies vehicula ut id elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nulla vitae elit libero, a pharetra augue.</p> </div> <div class="grid4"> <p class="size13"><span>13.</span>Aenean lacinia bibendum nulla sed consectetur. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Nullam id dolor id nibh ultricies vehicula ut id elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nulla vitae elit libero, a pharetra augue.</p> </div> <div class="white_blend"></div> </div> <div class="section psample psample_row2" id=""> <div class="grid3 firstcol"> <p class="size14"><span>14.</span>Aenean lacinia bibendum nulla sed consectetur. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Nullam id dolor id nibh ultricies vehicula ut id elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nulla vitae elit libero, a pharetra augue.</p> </div> <div class="grid4"> <p class="size16"><span>16.</span>Aenean lacinia bibendum nulla sed consectetur. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Nullam id dolor id nibh ultricies vehicula ut id elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nulla vitae elit libero, a pharetra augue.</p> </div> <div class="grid5"> <p class="size18"><span>18.</span>Aenean lacinia bibendum nulla sed consectetur. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Nullam id dolor id nibh ultricies vehicula ut id elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nulla vitae elit libero, a pharetra augue.</p> </div> <div class="white_blend"></div> </div> <div class="section psample psample_row3" id=""> <div class="grid5 firstcol"> <p class="size20"><span>20.</span>Aenean lacinia bibendum nulla sed consectetur. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Nullam id dolor id nibh ultricies vehicula ut id elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nulla vitae elit libero, a pharetra augue.</p> </div> <div class="grid7"> <p class="size24"><span>24.</span>Aenean lacinia bibendum nulla sed consectetur. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Nullam id dolor id nibh ultricies vehicula ut id elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nulla vitae elit libero, a pharetra augue.</p> </div> <div class="white_blend"></div> </div> <div class="section psample psample_row4" id=""> <div class="grid12 firstcol"> <p class="size30"><span>30.</span>Aenean lacinia bibendum nulla sed consectetur. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Nullam id dolor id nibh ultricies vehicula ut id elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nulla vitae elit libero, a pharetra augue.</p> </div> <div class="white_blend"></div> </div> <div class="section psample psample_row1 fullreverse"> <div class="grid2 firstcol"> <p class="size10"><span>10.</span>Aenean lacinia bibendum nulla sed consectetur. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Nullam id dolor id nibh ultricies vehicula ut id elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nulla vitae elit libero, a pharetra augue.</p> </div> <div class="grid3"> <p class="size11"><span>11.</span>Aenean lacinia bibendum nulla sed consectetur. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Nullam id dolor id nibh ultricies vehicula ut id elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nulla vitae elit libero, a pharetra augue.</p> </div> <div class="grid3"> <p class="size12"><span>12.</span>Aenean lacinia bibendum nulla sed consectetur. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Nullam id dolor id nibh ultricies vehicula ut id elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nulla vitae elit libero, a pharetra augue.</p> </div> <div class="grid4"> <p class="size13"><span>13.</span>Aenean lacinia bibendum nulla sed consectetur. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Nullam id dolor id nibh ultricies vehicula ut id elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nulla vitae elit libero, a pharetra augue.</p> </div> <div class="black_blend"></div> </div> <div class="section psample psample_row2 fullreverse"> <div class="grid3 firstcol"> <p class="size14"><span>14.</span>Aenean lacinia bibendum nulla sed consectetur. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Nullam id dolor id nibh ultricies vehicula ut id elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nulla vitae elit libero, a pharetra augue.</p> </div> <div class="grid4"> <p class="size16"><span>16.</span>Aenean lacinia bibendum nulla sed consectetur. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Nullam id dolor id nibh ultricies vehicula ut id elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nulla vitae elit libero, a pharetra augue.</p> </div> <div class="grid5"> <p class="size18"><span>18.</span>Aenean lacinia bibendum nulla sed consectetur. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Nullam id dolor id nibh ultricies vehicula ut id elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nulla vitae elit libero, a pharetra augue.</p> </div> <div class="black_blend"></div> </div> <div class="section psample fullreverse psample_row3" id=""> <div class="grid5 firstcol"> <p class="size20"><span>20.</span>Aenean lacinia bibendum nulla sed consectetur. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Nullam id dolor id nibh ultricies vehicula ut id elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nulla vitae elit libero, a pharetra augue.</p> </div> <div class="grid7"> <p class="size24"><span>24.</span>Aenean lacinia bibendum nulla sed consectetur. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Nullam id dolor id nibh ultricies vehicula ut id elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nulla vitae elit libero, a pharetra augue.</p> </div> <div class="black_blend"></div> </div> <div class="section psample fullreverse psample_row4" id="" style="border-bottom: 20px #000 solid;"> <div class="grid12 firstcol"> <p class="size30"><span>30.</span>Aenean lacinia bibendum nulla sed consectetur. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Nullam id dolor id nibh ultricies vehicula ut id elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Nulla vitae elit libero, a pharetra augue.</p> </div> <div class="black_blend"></div> </div> </div> <div id="layout"> <div class="section"> <div class="grid12 firstcol"> <h1>Lorem Ipsum Dolor</h1> <h2>Etiam porta sem malesuada magna mollis euismod</h2> <p class="byline">By <a href="#link">Aenean Lacinia</a></p> </div> </div> <div class="section"> <div class="grid8 firstcol"> <p class="large">Donec sed odio dui. Morbi leo risus, porta ac consectetur ac, vestibulum at eros. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. </p> <h3>Pellentesque ornare sem</h3> <p>Maecenas sed diam eget risus varius blandit sit amet non magna. Maecenas faucibus mollis interdum. Donec ullamcorper nulla non metus auctor fringilla. Nullam id dolor id nibh ultricies vehicula ut id elit. Nullam id dolor id nibh ultricies vehicula ut id elit. </p> <p>Aenean eu leo quam. Pellentesque ornare sem lacinia quam venenatis vestibulum. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Cum sociis natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. </p> <p>Nulla vitae elit libero, a pharetra augue. Praesent commodo cursus magna, vel scelerisque nisl consectetur et. Aenean lacinia bibendum nulla sed consectetur. </p> <p>Nullam quis risus eget urna mollis ornare vel eu leo. Nullam quis risus eget urna mollis ornare vel eu leo. Maecenas sed diam eget risus varius blandit sit amet non magna. Donec ullamcorper nulla non metus auctor fringilla. </p> <h3>Cras mattis consectetur</h3> <p>Aenean eu leo quam. Pellentesque ornare sem lacinia quam venenatis vestibulum. Aenean lacinia bibendum nulla sed consectetur. Integer posuere erat a ante venenatis dapibus posuere velit aliquet. Cras mattis consectetur purus sit amet fermentum. </p> <p>Nullam id dolor id nibh ultricies vehicula ut id elit. Nullam quis risus eget urna mollis ornare vel eu leo. Cras mattis consectetur purus sit amet fermentum.</p> </div> <div class="grid4 sidebar"> <div class="box reverse"> <p class="last">Nullam quis risus eget urna mollis ornare vel eu leo. Donec ullamcorper nulla non metus auctor fringilla. Cras mattis consectetur purus sit amet fermentum. Sed posuere consectetur est at lobortis. Lorem ipsum dolor sit amet, consectetur adipiscing elit. </p> </div> <p class="caption">Maecenas sed diam eget risus varius.</p> <p>Vestibulum id ligula porta felis euismod semper. Integer posuere erat a ante venenatis dapibus posuere velit aliquet. Vestibulum id ligula porta felis euismod semper. Sed posuere consectetur est at lobortis. Maecenas sed diam eget risus varius blandit sit amet non magna. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. </p> <p>Duis mollis, est non commodo luctus, nisi erat porttitor ligula, eget lacinia odio sem nec elit. Aenean lacinia bibendum nulla sed consectetur. Vivamus sagittis lacus vel augue laoreet rutrum faucibus dolor auctor. Aenean lacinia bibendum nulla sed consectetur. Nullam quis risus eget urna mollis ornare vel eu leo. </p> <p>Praesent commodo cursus magna, vel scelerisque nisl consectetur et. Donec ullamcorper nulla non metus auctor fringilla. Maecenas faucibus mollis interdum. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. </p> </div> </div> </div> <div id="specs"> </div> <div id="installing"> <div class="section"> <div class="grid7 firstcol"> <h1>Installing Webfonts</h1> <p>Webfonts are supported by all major browser platforms but not all in the same way. There are currently four different font formats that must be included in order to target all browsers. This includes TTF, WOFF, EOT and SVG.</p> <h2>1. Upload your webfonts</h2> <p>You must upload your webfont kit to your website. They should be in or near the same directory as your CSS files.</p> <h2>2. Include the webfont stylesheet</h2> <p>A special CSS @font-face declaration helps the various browsers select the appropriate font it needs without causing you a bunch of headaches. Learn more about this syntax by reading the <a href="http://www.fontspring.com/blog/further-hardening-of-the-bulletproof-syntax">Fontspring blog post</a> about it. The code for it is as follows:</p> <code> @font-face{ font-family: 'MyWebFont'; src: url('WebFont.eot'); src: url('WebFont.eot?iefix') format('eot'), url('WebFont.woff') format('woff'), url('WebFont.ttf') format('truetype'), url('WebFont.svg#webfont') format('svg'); } </code> <p>We've already gone ahead and generated the code for you. All you have to do is link to the stylesheet in your HTML, like this:</p> <code>&lt;link rel=&quot;stylesheet&quot; href=&quot;stylesheet.css&quot; type=&quot;text/css&quot; charset=&quot;utf-8&quot; /&gt;</code> <h2>3. Modify your own stylesheet</h2> <p>To take advantage of your new fonts, you must tell your stylesheet to use them. Look at the original @font-face declaration above and find the property called "font-family." The name linked there will be what you use to reference the font. Prepend that webfont name to the font stack in the "font-family" property, inside the selector you want to change. For example:</p> <code>p { font-family: 'MyWebFont', Arial, sans-serif; }</code> <h2>4. Test</h2> <p>Getting webfonts to work cross-browser <em>can</em> be tricky. Use the information in the sidebar to help you if you find that fonts aren't loading in a particular browser.</p> </div> <div class="grid5 sidebar"> <div class="box"> <h2>Troubleshooting<br />Font-Face Problems</h2> <p>Having trouble getting your webfonts to load in your new website? Here are some tips to sort out what might be the problem.</p> <h3>Fonts not showing in any browser</h3> <p>This sounds like you need to work on the plumbing. You either did not upload the fonts to the correct directory, or you did not link the fonts properly in the CSS. If you've confirmed that all this is correct and you still have a problem, take a look at your .htaccess file and see if requests are getting intercepted.</p> <h3>Fonts not loading in iPhone or iPad</h3> <p>The most common problem here is that you are serving the fonts from an IIS server. IIS refuses to serve files that have unknown MIME types. If that is the case, you must set the MIME type for SVG to "image/svg+xml" in the server settings. Follow these instructions from Microsoft if you need help.</p> <h3>Fonts not loading in Firefox</h3> <p>The primary reason for this failure? You are still using a version Firefox older than 3.5. So upgrade already! If that isn't it, then you are very likely serving fonts from a different domain. Firefox requires that all font assets be served from the same domain. Lastly it is possible that you need to add WOFF to your list of MIME types (if you are serving via IIS.)</p> <h3>Fonts not loading in IE</h3> <p>Are you looking at Internet Explorer on an actual Windows machine or are you cheating by using a service like Adobe BrowserLab? Many of these screenshot services do not render @font-face for IE. Best to test it on a real machine.</p> <h3>Fonts not loading in IE9</h3> <p>IE9, like Firefox, requires that fonts be served from the same domain as the website. Make sure that is the case.</p> </div> </div> </div> </div> </div> <div id="footer"> <p>&copy;2010-2011 Fontspring. All rights reserved.</p> </div> </div> </body> </html>
apache-2.0
ilinum/intellij-scala
test/org/jetbrains/plugins/scala/annotator/ParametersAnnotatorTest.scala
3448
package org.jetbrains.plugins.scala package annotator import org.intellij.lang.annotations.Language import org.jetbrains.plugins.scala.base.SimpleTestCase import org.jetbrains.plugins.scala.extensions._ import org.jetbrains.plugins.scala.lang.psi.api.statements.ScParameterOwner import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScNamedElement /** * Pavel.Fatin, 18.05.2010 */ class ParametersAnnotatorTest extends SimpleTestCase { final val Header = "class A; class B; class C;\n" def testFine(): Unit = { assertMatches(messages("def f(a: A) {}")) { case Nil => } assertMatches(messages("def f(a: A*) {}")) { case Nil => } assertMatches(messages("def f(a: A, b: B) {}")) { case Nil => } assertMatches(messages("def f(a: A, b: B*) {}")) { case Nil => } assertMatches(messages("def f(a: A, b: B, c: C*) {}")) { case Nil => } } def testMalformed(): Unit = { assertMatches(messages("def f(a: A*, b: B) {}")) { case Error("a: A*", "*-parameter must come last") :: Nil => } assertMatches(messages("def f(a: A, b: B*, c: C) {}")) { case Error("b: B*", "*-parameter must come last") :: Nil => } assertMatches(messages("def f(a: A*, b: B*) {}")) { case Error("a: A*", "*-parameter must come last") :: Nil => } assertMatches(messages("def f(a: A*, b: B*, c: C) {}")) { case Error("a: A*", "*-parameter must come last") :: Error("b: B*", "*-parameter must come last") :: Nil => } assertMatches(messages("def f(a: A*, c: C)(b: B*, c: C) {}")) { case Error("a: A*", "*-parameter must come last") :: Error("b: B*", "*-parameter must come last") :: Nil => } } def testRepeatedWithDefault(): Unit = { assertMatches(messages("def f(i: Int, js: Int* = 1) {}")) { case Error("(i: Int, js: Int* = 1)", "Parameter section with *-parameter cannot have default arguments") :: Nil => } } def testByName(): Unit = { assertMatches(messages("def f(a: A)(implicit b: => B) {}")) { case Error("b: => B", "implicit parameters may not be call-by-name") :: Nil => } assertMatches(messages("case class D(a: A, b: => B)")) { case Error("b: => B", "case class parameters may not be call-by-name") :: Nil => } assertMatches(messages("class D(a: A, val b: => B)")) { case Error("val b: => B", "'val' parameters may not be call-by-name") :: Nil => } assertMatches(messages("class D(a: A, var b: => B)")) { case Error("var b: => B", "'var' parameters may not be call-by-name") :: Nil => } } def testMissingTypeAnnotation(): Unit = { assertMatches(messages("def test(p1: String, p2 = \"default\") = p1 concat p2")) { //SCL-3799 case Error("p2 = \"default\"", "Missing type annotation for parameter: p2") :: Nil => } } def messages(@Language(value = "Scala", prefix = Header) code: String): List[Message] = { val annotator = new ParametersAnnotator() {} val file = (Header + code).parse val mock = new AnnotatorHolderMock(file) val owner = file.depthFirst().filterByType[ScParameterOwner].collectFirst { case named: ScNamedElement if !Set("A", "B", "C").contains(named.name) => named }.get annotator.annotateParameters(owner.clauses.get, mock) for (p <- owner.parameters) { annotator.annotateParameter(p, mock) } mock.annotations } }
apache-2.0
stoksey69/googleads-java-lib
modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201405/LineItemCreativeAssociationOperationErrorReason.java
1675
package com.google.api.ads.dfp.jaxws.v201405; import javax.xml.bind.annotation.XmlEnum; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for LineItemCreativeAssociationOperationError.Reason. * * <p>The following schema fragment specifies the expected content contained within this class. * <p> * <pre> * &lt;simpleType name="LineItemCreativeAssociationOperationError.Reason"> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}string"> * &lt;enumeration value="NOT_ALLOWED"/> * &lt;enumeration value="NOT_APPLICABLE"/> * &lt;enumeration value="CANNOT_ACTIVATE_INVALID_CREATIVE"/> * &lt;enumeration value="UNKNOWN"/> * &lt;/restriction> * &lt;/simpleType> * </pre> * */ @XmlType(name = "LineItemCreativeAssociationOperationError.Reason") @XmlEnum public enum LineItemCreativeAssociationOperationErrorReason { /** * * The operation is not allowed due to permissions * * */ NOT_ALLOWED, /** * * The operation is not applicable to the current state * * */ NOT_APPLICABLE, /** * * Cannot activate an invalid creative * * */ CANNOT_ACTIVATE_INVALID_CREATIVE, /** * * The value returned if the actual value is not exposed by the requested API version. * * */ UNKNOWN; public String value() { return name(); } public static LineItemCreativeAssociationOperationErrorReason fromValue(String v) { return valueOf(v); } }
apache-2.0
wangcy6/storm_app
frame/c++/webrtc-master/media/base/fakevideorenderer.h
4408
/* * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ #ifndef MEDIA_BASE_FAKEVIDEORENDERER_H_ #define MEDIA_BASE_FAKEVIDEORENDERER_H_ #include "api/video/video_frame.h" #include "media/base/videosinkinterface.h" #include "rtc_base/criticalsection.h" #include "rtc_base/logging.h" namespace cricket { // Faked video renderer that has a callback for actions on rendering. class FakeVideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame> { public: FakeVideoRenderer() : errors_(0), width_(0), height_(0), rotation_(webrtc::kVideoRotation_0), timestamp_us_(0), num_rendered_frames_(0), black_frame_(false) {} virtual void OnFrame(const webrtc::VideoFrame& frame) { rtc::CritScope cs(&crit_); // TODO(zhurunz) Check with VP8 team to see if we can remove this // tolerance on Y values. Some unit tests produce Y values close // to 16 rather than close to zero, for supposedly black frames. // Largest value observed is 34, e.g., running // PeerConnectionIntegrationTest.SendAndReceive16To9AspectRatio. black_frame_ = CheckFrameColorYuv(0, 48, 128, 128, 128, 128, &frame); // Treat unexpected frame size as error. ++num_rendered_frames_; width_ = frame.width(); height_ = frame.height(); rotation_ = frame.rotation(); timestamp_us_ = frame.timestamp_us(); } int errors() const { return errors_; } int width() const { rtc::CritScope cs(&crit_); return width_; } int height() const { rtc::CritScope cs(&crit_); return height_; } webrtc::VideoRotation rotation() const { rtc::CritScope cs(&crit_); return rotation_; } int64_t timestamp_us() const { rtc::CritScope cs(&crit_); return timestamp_us_; } int num_rendered_frames() const { rtc::CritScope cs(&crit_); return num_rendered_frames_; } bool black_frame() const { rtc::CritScope cs(&crit_); return black_frame_; } private: static bool CheckFrameColorYuv(uint8_t y_min, uint8_t y_max, uint8_t u_min, uint8_t u_max, uint8_t v_min, uint8_t v_max, const webrtc::VideoFrame* frame) { if (!frame || !frame->video_frame_buffer()) { return false; } rtc::scoped_refptr<const webrtc::I420BufferInterface> i420_buffer = frame->video_frame_buffer()->ToI420(); // Y int y_width = frame->width(); int y_height = frame->height(); const uint8_t* y_plane = i420_buffer->DataY(); const uint8_t* y_pos = y_plane; int32_t y_pitch = i420_buffer->StrideY(); for (int i = 0; i < y_height; ++i) { for (int j = 0; j < y_width; ++j) { uint8_t y_value = *(y_pos + j); if (y_value < y_min || y_value > y_max) { return false; } } y_pos += y_pitch; } // U and V int chroma_width = i420_buffer->ChromaWidth(); int chroma_height = i420_buffer->ChromaHeight(); const uint8_t* u_plane = i420_buffer->DataU(); const uint8_t* v_plane = i420_buffer->DataV(); const uint8_t* u_pos = u_plane; const uint8_t* v_pos = v_plane; int32_t u_pitch = i420_buffer->StrideU(); int32_t v_pitch = i420_buffer->StrideV(); for (int i = 0; i < chroma_height; ++i) { for (int j = 0; j < chroma_width; ++j) { uint8_t u_value = *(u_pos + j); if (u_value < u_min || u_value > u_max) { return false; } uint8_t v_value = *(v_pos + j); if (v_value < v_min || v_value > v_max) { return false; } } u_pos += u_pitch; v_pos += v_pitch; } return true; } int errors_; int width_; int height_; webrtc::VideoRotation rotation_; int64_t timestamp_us_; int num_rendered_frames_; bool black_frame_; rtc::CriticalSection crit_; }; } // namespace cricket #endif // MEDIA_BASE_FAKEVIDEORENDERER_H_
apache-2.0
dbushong/crush-tools
src/libcrush/test/mempool_test.c
2499
/***************************************** Copyright 2008 Google Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *****************************************/ #include <stdio.h> #include <string.h> #include <crush/mempool.h> #include "unittest.h" int main(int argc, char *argv[]) { int test_int = 0xffffffff; void *ptr_a, *ptr_b; mempool_t *pool = NULL; pool = mempool_create(16); ASSERT_TRUE(pool != NULL, "mempool_create returns valid pointer"); ASSERT_LONG_EQ(16, pool->page_size, "mempool_create sets page size"); ASSERT_LONG_EQ(1, pool->n_pages, "mempool_create initializes one page"); ASSERT_TRUE(pool->pages != NULL, "mempool_create page list not null"); ASSERT_TRUE(pool->pages[0].buffer != NULL, "mempool_create allocates page"); ASSERT_LONG_EQ(0, pool->pages[0].next, "mempool_create initializes page->next"); ptr_a = mempool_alloc(pool, sizeof(test_int)); ASSERT_TRUE(ptr_a != NULL, "mempool_alloc returns valid pointer"); ASSERT_LONG_EQ(sizeof(test_int), pool->pages[0].next, "mempool_alloc sets next location correctly"); *((int *) ptr_a) = test_int; ptr_b = mempool_add(pool, "hello world", strlen("hello world") + 1); ASSERT_TRUE(ptr_b != NULL, "mempool_add returns valid pointer"); ASSERT_STR_EQ("hello world", (char *)ptr_b, "mempool_add copies data into pool"); ASSERT_INT_EQ(test_int, *((int *) ptr_a), "mempool_add doesn't clobber pool data"); ASSERT_TRUE(ptr_b == ptr_a + sizeof(test_int), "mempool_add puts new data in correct place"); ASSERT_LONG_EQ(1, pool->next_unfull, "mempool_alloc detects full buffer"); ASSERT_LONG_EQ(1, pool->n_pages, "mempool_alloc doesn't allocate new pages needlessly"); mempool_add(pool, "goodbye world", strlen("goodbye world") + 1); ASSERT_LONG_EQ(2, pool->n_pages, "mempool_alloc adds new pages as necessary"); mempool_destroy(pool); return 0; }
apache-2.0
john9x/jdbi
core/src/test/java/org/jdbi/v3/core/qualifier/QualifiedConstructorParamThing.java
1607
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jdbi.v3.core.qualifier; import java.util.Objects; public class QualifiedConstructorParamThing { private final int id; private final String name; public QualifiedConstructorParamThing(int id, @Reversed String name) { this.id = id; this.name = name; } public int getId() { return id; } public String getName() { return name; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } QualifiedConstructorParamThing that = (QualifiedConstructorParamThing) o; return id == that.id && Objects.equals(name, that.name); } @Override public int hashCode() { return Objects.hash(id, name); } @Override public String toString() { return "QualifiedConstructorParamThing{" + "id=" + id + ", name='" + name + '\'' + '}'; } }
apache-2.0
juanavelez/hazelcast
hazelcast/src/test/java/com/hazelcast/cache/recordstore/CacheRecordStoreTestSupport.java
5087
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.cache.recordstore; import com.hazelcast.cache.impl.AbstractCacheService; import com.hazelcast.cache.impl.CacheRecordStore; import com.hazelcast.cache.impl.ICacheRecordStore; import com.hazelcast.cache.impl.ICacheService; import com.hazelcast.config.CacheConfig; import com.hazelcast.config.Config; import com.hazelcast.config.InMemoryFormat; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.instance.Node; import com.hazelcast.instance.TestUtil; import com.hazelcast.internal.serialization.impl.DefaultSerializationServiceBuilder; import com.hazelcast.nio.serialization.Data; import com.hazelcast.spi.NodeEngine; import com.hazelcast.spi.serialization.SerializationService; import com.hazelcast.test.HazelcastTestSupport; import com.hazelcast.test.TestHazelcastInstanceFactory; import org.junit.After; import org.junit.Before; import static org.junit.Assert.assertTrue; public abstract class CacheRecordStoreTestSupport extends HazelcastTestSupport { protected static final String CACHE_NAME_PREFIX = "hz:"; protected static final String DEFAULT_CACHE_NAME = "MyCache"; protected static final int DEFAULT_PARTITION_ID = 1; protected static final int CACHE_RECORD_COUNT = 50; protected TestHazelcastInstanceFactory factory; protected HazelcastInstance hz; @Before public void init() { factory = new TestHazelcastInstanceFactory(1); hz = factory.newHazelcastInstance(createConfig()); } @After public void tearDown() { factory.shutdownAll(); } protected Config createConfig() { return new Config(); } protected CacheConfig createCacheConfig(String cacheName, InMemoryFormat inMemoryFormat) { return new CacheConfig() .setName(cacheName) .setManagerPrefix(CACHE_NAME_PREFIX) .setInMemoryFormat(inMemoryFormat); } protected ICacheService getCacheService(HazelcastInstance instance) { Node node = TestUtil.getNode(instance); return node.getNodeEngine().getService(ICacheService.SERVICE_NAME); } protected NodeEngine getNodeEngine(HazelcastInstance instance) { Node node = TestUtil.getNode(instance); return node.getNodeEngine(); } protected ICacheRecordStore createCacheRecordStore(HazelcastInstance instance, String cacheName, int partitionId, InMemoryFormat inMemoryFormat) { NodeEngine nodeEngine = getNodeEngine(instance); ICacheService cacheService = getCacheService(instance); CacheConfig cacheConfig = createCacheConfig(cacheName, inMemoryFormat); cacheService.putCacheConfigIfAbsent(cacheConfig); return new CacheRecordStore(CACHE_NAME_PREFIX + cacheName, partitionId, nodeEngine, (AbstractCacheService) cacheService); } protected ICacheRecordStore createCacheRecordStore(HazelcastInstance instance, InMemoryFormat inMemoryFormat) { return createCacheRecordStore(instance, DEFAULT_CACHE_NAME, DEFAULT_PARTITION_ID, inMemoryFormat); } protected ICacheRecordStore createCacheRecordStore(InMemoryFormat inMemoryFormat) { return createCacheRecordStore(hz, DEFAULT_CACHE_NAME, DEFAULT_PARTITION_ID, inMemoryFormat); } protected void putAndGetFromCacheRecordStore(ICacheRecordStore cacheRecordStore, InMemoryFormat inMemoryFormat) { SerializationService serializationService = new DefaultSerializationServiceBuilder().build(); for (int i = 0; i < CACHE_RECORD_COUNT; i++) { cacheRecordStore.put(serializationService.toData(i), "value-" + i, null, null, -1); } if (inMemoryFormat == InMemoryFormat.BINARY || inMemoryFormat == InMemoryFormat.NATIVE) { for (int i = 0; i < CACHE_RECORD_COUNT; i++) { assertTrue(Data.class.isAssignableFrom( cacheRecordStore.get(serializationService.toData(i), null).getClass())); } } else if (inMemoryFormat == InMemoryFormat.OBJECT) { for (int i = 0; i < CACHE_RECORD_COUNT; i++) { assertTrue(String.class.isAssignableFrom( cacheRecordStore.get(serializationService.toData(i), null).getClass())); } } else { throw new IllegalArgumentException("Unsupported in-memory format: " + inMemoryFormat); } } }
apache-2.0
cloudfoundry/v3-cli-plugin
vendor/code.cloudfoundry.org/cli/cf/commands/user/org_users_test.go
15566
package user_test import ( "code.cloudfoundry.org/cli/cf/api/apifakes" "code.cloudfoundry.org/cli/cf/commandregistry" "code.cloudfoundry.org/cli/cf/configuration/coreconfig" "code.cloudfoundry.org/cli/cf/models" "code.cloudfoundry.org/cli/cf/requirements" "code.cloudfoundry.org/cli/cf/requirements/requirementsfakes" "code.cloudfoundry.org/cli/cf/trace/tracefakes" "code.cloudfoundry.org/cli/plugin/models" testcmd "code.cloudfoundry.org/cli/testhelpers/commands" testconfig "code.cloudfoundry.org/cli/testhelpers/configuration" testterm "code.cloudfoundry.org/cli/testhelpers/terminal" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "os" . "code.cloudfoundry.org/cli/testhelpers/matchers" ) var _ = Describe("org-users command", func() { var ( ui *testterm.FakeUI requirementsFactory *requirementsfakes.FakeFactory configRepo coreconfig.Repository userRepo *apifakes.FakeUserRepository deps commandregistry.Dependency ) updateCommandDependency := func(pluginCall bool) { deps.UI = ui deps.Config = configRepo deps.RepoLocator = deps.RepoLocator.SetUserRepository(userRepo) commandregistry.Commands.SetCommand(commandregistry.Commands.FindCommand("org-users").SetDependency(deps, pluginCall)) } BeforeEach(func() { ui = &testterm.FakeUI{} userRepo = new(apifakes.FakeUserRepository) configRepo = testconfig.NewRepositoryWithDefaults() requirementsFactory = new(requirementsfakes.FakeFactory) deps = commandregistry.NewDependency(os.Stdout, new(tracefakes.FakePrinter), "") }) runCommand := func(args ...string) bool { return testcmd.RunCLICommand("org-users", args, requirementsFactory, updateCommandDependency, false, ui) } Describe("requirements", func() { It("fails with usage when invoked without an org name", func() { requirementsFactory.NewLoginRequirementReturns(requirements.Passing{}) runCommand() Expect(ui.Outputs()).To(ContainSubstrings( []string{"Incorrect Usage", "Requires an argument"}, )) }) It("fails when not logged in", func() { requirementsFactory.NewLoginRequirementReturns(requirements.Failing{Message: "not logged in"}) Expect(runCommand("say-hello-to-my-little-org")).To(BeFalse()) }) }) Context("when logged in and given an org with no users in a particular role", func() { var ( user1, user2 models.UserFields ) BeforeEach(func() { org := models.Organization{} org.Name = "the-org" org.GUID = "the-org-guid" user1 = models.UserFields{} user1.Username = "user1" user2 = models.UserFields{} user2.Username = "user2" requirementsFactory.NewLoginRequirementReturns(requirements.Passing{}) organizationReq := new(requirementsfakes.FakeOrganizationRequirement) organizationReq.GetOrganizationReturns(org) requirementsFactory.NewOrganizationRequirementReturns(organizationReq) }) Context("shows friendly messaage when no users in ORG_MANAGER role", func() { It("shows the special users in the given org", func() { userRepo.ListUsersInOrgForRoleStub = func(_ string, roleName models.Role) ([]models.UserFields, error) { userFields := map[models.Role][]models.UserFields{ models.RoleOrgManager: {}, models.RoleBillingManager: {user1}, models.RoleOrgAuditor: {user2}, }[roleName] return userFields, nil } runCommand("the-org") Expect(userRepo.ListUsersInOrgForRoleCallCount()).To(Equal(3)) for i, expectedRole := range []models.Role{models.RoleOrgManager, models.RoleBillingManager, models.RoleOrgAuditor} { orgGUID, actualRole := userRepo.ListUsersInOrgForRoleArgsForCall(i) Expect(orgGUID).To(Equal("the-org-guid")) Expect(actualRole).To(Equal(expectedRole)) } Expect(ui.Outputs()).To(BeInDisplayOrder( []string{"Getting users in org", "the-org", "my-user"}, []string{"ORG MANAGER"}, []string{" No ORG MANAGER found"}, []string{"BILLING MANAGER"}, []string{" user1"}, []string{"ORG AUDITOR"}, []string{" user2"}, )) }) }) Context("shows friendly messaage when no users in BILLING_MANAGER role", func() { It("shows the special users in the given org", func() { userRepo.ListUsersInOrgForRoleStub = func(_ string, roleName models.Role) ([]models.UserFields, error) { userFields := map[models.Role][]models.UserFields{ models.RoleOrgManager: {user1}, models.RoleBillingManager: {}, models.RoleOrgAuditor: {user2}, }[roleName] return userFields, nil } runCommand("the-org") Expect(userRepo.ListUsersInOrgForRoleCallCount()).To(Equal(3)) for i, expectedRole := range []models.Role{models.RoleOrgManager, models.RoleBillingManager, models.RoleOrgAuditor} { orgGUID, actualRole := userRepo.ListUsersInOrgForRoleArgsForCall(i) Expect(orgGUID).To(Equal("the-org-guid")) Expect(actualRole).To(Equal(expectedRole)) } Expect(ui.Outputs()).To(BeInDisplayOrder( []string{"Getting users in org", "the-org", "my-user"}, []string{"ORG MANAGER"}, []string{" user1"}, []string{"BILLING MANAGER"}, []string{" No BILLING MANAGER found"}, []string{"ORG AUDITOR"}, []string{" user2"}, )) }) }) Context("shows friendly messaage when no users in ORG_AUDITOR role", func() { It("shows the special users in the given org", func() { userRepo.ListUsersInOrgForRoleStub = func(_ string, roleName models.Role) ([]models.UserFields, error) { userFields := map[models.Role][]models.UserFields{ models.RoleOrgManager: {user1}, models.RoleBillingManager: {user2}, models.RoleOrgAuditor: {}, }[roleName] return userFields, nil } runCommand("the-org") Expect(userRepo.ListUsersInOrgForRoleCallCount()).To(Equal(3)) for i, expectedRole := range []models.Role{models.RoleOrgManager, models.RoleBillingManager, models.RoleOrgAuditor} { orgGUID, actualRole := userRepo.ListUsersInOrgForRoleArgsForCall(i) Expect(orgGUID).To(Equal("the-org-guid")) Expect(actualRole).To(Equal(expectedRole)) } Expect(ui.Outputs()).To(BeInDisplayOrder( []string{"Getting users in org", "the-org", "my-user"}, []string{"ORG MANAGER"}, []string{" user1"}, []string{"BILLING MANAGER"}, []string{" user2"}, []string{"ORG AUDITOR"}, []string{" No ORG AUDITOR found"}, )) }) }) }) Context("when logged in and given an org with users", func() { BeforeEach(func() { org := models.Organization{} org.Name = "the-org" org.GUID = "the-org-guid" user := models.UserFields{Username: "user1"} user2 := models.UserFields{Username: "user2"} user3 := models.UserFields{Username: "user3"} user4 := models.UserFields{Username: "user4"} userRepo.ListUsersInOrgForRoleStub = func(_ string, roleName models.Role) ([]models.UserFields, error) { userFields := map[models.Role][]models.UserFields{ models.RoleOrgManager: {user, user2}, models.RoleBillingManager: {user4}, models.RoleOrgAuditor: {user3}, }[roleName] return userFields, nil } requirementsFactory.NewLoginRequirementReturns(requirements.Passing{}) organizationReq := new(requirementsfakes.FakeOrganizationRequirement) organizationReq.GetOrganizationReturns(org) requirementsFactory.NewOrganizationRequirementReturns(organizationReq) }) It("shows the special users in the given org", func() { runCommand("the-org") orgGUID, _ := userRepo.ListUsersInOrgForRoleArgsForCall(0) Expect(orgGUID).To(Equal("the-org-guid")) Expect(ui.Outputs()).To(ContainSubstrings( []string{"Getting users in org", "the-org", "my-user"}, []string{"ORG MANAGER"}, []string{"user1"}, []string{"user2"}, []string{"BILLING MANAGER"}, []string{"user4"}, []string{"ORG AUDITOR"}, []string{"user3"}, )) }) Context("when the -a flag is provided", func() { BeforeEach(func() { user := models.UserFields{Username: "user1"} user2 := models.UserFields{Username: "user2"} userRepo.ListUsersInOrgForRoleStub = func(_ string, roleName models.Role) ([]models.UserFields, error) { userFields := map[models.Role][]models.UserFields{ models.RoleOrgUser: {user, user2}, }[roleName] return userFields, nil } }) It("lists all org users, regardless of role", func() { runCommand("-a", "the-org") orgGUID, _ := userRepo.ListUsersInOrgForRoleArgsForCall(0) Expect(orgGUID).To(Equal("the-org-guid")) Expect(ui.Outputs()).To(ContainSubstrings( []string{"Getting users in org", "the-org", "my-user"}, []string{"USERS"}, []string{"user1"}, []string{"user2"}, )) }) }) Context("when cc api verson is >= 2.21.0", func() { It("calls ListUsersInOrgForRoleWithNoUAA()", func() { configRepo.SetAPIVersion("2.22.0") runCommand("the-org") Expect(userRepo.ListUsersInOrgForRoleWithNoUAACallCount()).To(BeNumerically(">=", 1)) Expect(userRepo.ListUsersInOrgForRoleCallCount()).To(Equal(0)) }) }) Context("when cc api verson is < 2.21.0", func() { It("calls ListUsersInOrgForRole()", func() { configRepo.SetAPIVersion("2.20.0") runCommand("the-org") Expect(userRepo.ListUsersInOrgForRoleWithNoUAACallCount()).To(Equal(0)) Expect(userRepo.ListUsersInOrgForRoleCallCount()).To(BeNumerically(">=", 1)) }) }) }) Describe("when invoked by a plugin", func() { var ( pluginUserModel []plugin_models.GetOrgUsers_Model ) BeforeEach(func() { configRepo.SetAPIVersion("2.22.0") }) Context("single roles", func() { BeforeEach(func() { org := models.Organization{} org.Name = "the-org" org.GUID = "the-org-guid" // org managers user := models.UserFields{} user.Username = "user1" user.GUID = "1111" user2 := models.UserFields{} user2.Username = "user2" user2.GUID = "2222" // billing manager user3 := models.UserFields{} user3.Username = "user3" user3.GUID = "3333" // auditors user4 := models.UserFields{} user4.Username = "user4" user4.GUID = "4444" userRepo.ListUsersInOrgForRoleWithNoUAAStub = func(_ string, roleName models.Role) ([]models.UserFields, error) { userFields := map[models.Role][]models.UserFields{ models.RoleOrgManager: {user, user2}, models.RoleBillingManager: {user4}, models.RoleOrgAuditor: {user3}, models.RoleOrgUser: {user3}, }[roleName] return userFields, nil } requirementsFactory.NewLoginRequirementReturns(requirements.Passing{}) organizationReq := new(requirementsfakes.FakeOrganizationRequirement) organizationReq.GetOrganizationReturns(org) requirementsFactory.NewOrganizationRequirementReturns(organizationReq) pluginUserModel = []plugin_models.GetOrgUsers_Model{} deps.PluginModels.OrgUsers = &pluginUserModel }) It("populates the plugin model with users with single roles", func() { testcmd.RunCLICommand("org-users", []string{"the-org"}, requirementsFactory, updateCommandDependency, true, ui) Expect(pluginUserModel).To(HaveLen(4)) for _, u := range pluginUserModel { switch u.Username { case "user1": Expect(u.Guid).To(Equal("1111")) Expect(u.Roles).To(ConsistOf([]string{"RoleOrgManager"})) case "user2": Expect(u.Guid).To(Equal("2222")) Expect(u.Roles).To(ConsistOf([]string{"RoleOrgManager"})) case "user3": Expect(u.Guid).To(Equal("3333")) Expect(u.Roles).To(ConsistOf([]string{"RoleOrgAuditor"})) case "user4": Expect(u.Guid).To(Equal("4444")) Expect(u.Roles).To(ConsistOf([]string{"RoleBillingManager"})) default: Fail("unexpected user: " + u.Username) } } }) It("populates the plugin model with users with single roles -a flag", func() { testcmd.RunCLICommand("org-users", []string{"-a", "the-org"}, requirementsFactory, updateCommandDependency, true, ui) Expect(pluginUserModel).To(HaveLen(1)) Expect(pluginUserModel[0].Username).To(Equal("user3")) Expect(pluginUserModel[0].Guid).To(Equal("3333")) Expect(pluginUserModel[0].Roles[0]).To(Equal("RoleOrgUser")) }) }) Context("multiple roles", func() { BeforeEach(func() { org := models.Organization{} org.Name = "the-org" org.GUID = "the-org-guid" // org managers user := models.UserFields{} user.Username = "user1" user.GUID = "1111" user.IsAdmin = true user2 := models.UserFields{} user2.Username = "user2" user2.GUID = "2222" // billing manager user3 := models.UserFields{} user3.Username = "user3" user3.GUID = "3333" // auditors user4 := models.UserFields{} user4.Username = "user4" user4.GUID = "4444" userRepo.ListUsersInOrgForRoleWithNoUAAStub = func(_ string, roleName models.Role) ([]models.UserFields, error) { userFields := map[models.Role][]models.UserFields{ models.RoleOrgManager: {user, user2, user3, user4}, models.RoleBillingManager: {user2, user4}, models.RoleOrgAuditor: {user, user3}, models.RoleOrgUser: {user, user2, user3, user4}, }[roleName] return userFields, nil } requirementsFactory.NewLoginRequirementReturns(requirements.Passing{}) organizationReq := new(requirementsfakes.FakeOrganizationRequirement) organizationReq.GetOrganizationReturns(org) requirementsFactory.NewOrganizationRequirementReturns(organizationReq) pluginUserModel = []plugin_models.GetOrgUsers_Model{} deps.PluginModels.OrgUsers = &pluginUserModel }) It("populates the plugin model with users with multiple roles", func() { testcmd.RunCLICommand("org-users", []string{"the-org"}, requirementsFactory, updateCommandDependency, true, ui) Expect(pluginUserModel).To(HaveLen(4)) for _, u := range pluginUserModel { switch u.Username { case "user1": Expect(u.Guid).To(Equal("1111")) Expect(u.Roles).To(ConsistOf([]string{"RoleOrgManager", "RoleOrgAuditor"})) Expect(u.IsAdmin).To(BeTrue()) case "user2": Expect(u.Guid).To(Equal("2222")) Expect(u.Roles).To(ConsistOf([]string{"RoleOrgManager", "RoleBillingManager"})) case "user3": Expect(u.Guid).To(Equal("3333")) Expect(u.Roles).To(ConsistOf([]string{"RoleOrgAuditor", "RoleOrgManager"})) case "user4": Expect(u.Guid).To(Equal("4444")) Expect(u.Roles).To(ConsistOf([]string{"RoleBillingManager", "RoleOrgManager"})) default: Fail("unexpected user: " + u.Username) } } }) It("populates the plugin model with users with multiple roles -a flag", func() { testcmd.RunCLICommand("org-users", []string{"-a", "the-org"}, requirementsFactory, updateCommandDependency, true, ui) Expect(pluginUserModel).To(HaveLen(4)) for _, u := range pluginUserModel { switch u.Username { case "user1": Expect(u.Guid).To(Equal("1111")) Expect(u.Roles).To(ConsistOf([]string{"RoleOrgUser"})) case "user2": Expect(u.Guid).To(Equal("2222")) Expect(u.Roles).To(ConsistOf([]string{"RoleOrgUser"})) case "user3": Expect(u.Guid).To(Equal("3333")) Expect(u.Roles).To(ConsistOf([]string{"RoleOrgUser"})) case "user4": Expect(u.Guid).To(Equal("4444")) Expect(u.Roles).To(ConsistOf([]string{"RoleOrgUser"})) default: Fail("unexpected user: " + u.Username) } } }) }) }) })
apache-2.0
devigned/azure-powershell
src/Common/Commands.Common.Network/Version2017_10_01/Models/VirtualNetworkGatewayIPConfiguration.cs
4565
// <auto-generated> // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. // </auto-generated> namespace Microsoft.Azure.Management.Internal.Network.Version2017_10_01.Models { using Microsoft.Rest; using Microsoft.Rest.Serialization; using Newtonsoft.Json; using System.Linq; /// <summary> /// IP configuration for virtual network gateway /// </summary> [Rest.Serialization.JsonTransformation] public partial class VirtualNetworkGatewayIPConfiguration : SubResource { /// <summary> /// Initializes a new instance of the /// VirtualNetworkGatewayIPConfiguration class. /// </summary> public VirtualNetworkGatewayIPConfiguration() { CustomInit(); } /// <summary> /// Initializes a new instance of the /// VirtualNetworkGatewayIPConfiguration class. /// </summary> /// <param name="id">Resource ID.</param> /// <param name="privateIPAllocationMethod">The private IP allocation /// method. Possible values are: 'Static' and 'Dynamic'. Possible /// values include: 'Static', 'Dynamic'</param> /// <param name="subnet">The reference of the subnet resource.</param> /// <param name="publicIPAddress">The reference of the public IP /// resource.</param> /// <param name="provisioningState">The provisioning state of the /// public IP resource. Possible values are: 'Updating', 'Deleting', /// and 'Failed'.</param> /// <param name="name">The name of the resource that is unique within a /// resource group. This name can be used to access the /// resource.</param> /// <param name="etag">A unique read-only string that changes whenever /// the resource is updated.</param> public VirtualNetworkGatewayIPConfiguration(string id = default(string), string privateIPAllocationMethod = default(string), SubResource subnet = default(SubResource), SubResource publicIPAddress = default(SubResource), string provisioningState = default(string), string name = default(string), string etag = default(string)) : base(id) { PrivateIPAllocationMethod = privateIPAllocationMethod; Subnet = subnet; PublicIPAddress = publicIPAddress; ProvisioningState = provisioningState; Name = name; Etag = etag; CustomInit(); } /// <summary> /// An initialization method that performs custom operations like setting defaults /// </summary> partial void CustomInit(); /// <summary> /// Gets or sets the private IP allocation method. Possible values are: /// 'Static' and 'Dynamic'. Possible values include: 'Static', /// 'Dynamic' /// </summary> [JsonProperty(PropertyName = "properties.privateIPAllocationMethod")] public string PrivateIPAllocationMethod { get; set; } /// <summary> /// Gets or sets the reference of the subnet resource. /// </summary> [JsonProperty(PropertyName = "properties.subnet")] public SubResource Subnet { get; set; } /// <summary> /// Gets or sets the reference of the public IP resource. /// </summary> [JsonProperty(PropertyName = "properties.publicIPAddress")] public SubResource PublicIPAddress { get; set; } /// <summary> /// Gets the provisioning state of the public IP resource. Possible /// values are: 'Updating', 'Deleting', and 'Failed'. /// </summary> [JsonProperty(PropertyName = "properties.provisioningState")] public string ProvisioningState { get; private set; } /// <summary> /// Gets or sets the name of the resource that is unique within a /// resource group. This name can be used to access the resource. /// </summary> [JsonProperty(PropertyName = "name")] public string Name { get; set; } /// <summary> /// Gets or sets a unique read-only string that changes whenever the /// resource is updated. /// </summary> [JsonProperty(PropertyName = "etag")] public string Etag { get; set; } } }
apache-2.0
Oleh-Kravchenko/kaa
server/common/server-shared/src/main/java/org/kaaproject/kaa/server/common/plugin/PluginConfig.java
1376
/* * Copyright 2014-2016 CyberVision, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaaproject.kaa.server.common.plugin; import org.apache.avro.Schema; /** * Represents configuration of particular plugin. * * @author Igor Kulikov * */ public interface PluginConfig { /** * Returns the plugin display name. There is no strict rule for this * name to be unique. * * @return the plugin display name */ String getPluginTypeName(); /** * Returns the class name of the plugin implementation. * * @return the class name of the plugin implementation */ String getPluginClassName(); /** * Returns the avro schema of the plugin configuration. * * @return the avro schema of the plugin configuration */ Schema getPluginConfigSchema(); }
apache-2.0
jakubschwan/jbpm
jbpm-human-task/jbpm-human-task-core/src/main/java/org/jbpm/services/task/impl/model/xml/JaxbFaultData.java
2814
package org.jbpm.services.task.impl.model.xml; import static org.jbpm.services.task.impl.model.xml.AbstractJaxbTaskObject.unsupported; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import org.kie.internal.task.api.model.AccessType; import org.kie.internal.task.api.model.FaultData; import com.fasterxml.jackson.annotation.JsonAutoDetect; @XmlRootElement(name="fault-data") @XmlAccessorType(XmlAccessType.FIELD) @JsonAutoDetect(getterVisibility=JsonAutoDetect.Visibility.NONE, setterVisibility=JsonAutoDetect.Visibility.NONE, fieldVisibility=JsonAutoDetect.Visibility.ANY) public class JaxbFaultData implements FaultData { @XmlElement private AccessType accessType; @XmlElement @XmlSchemaType(name="string") private String type; @XmlElement @XmlSchemaType(name="base64Binary") private byte[] content = null; private Object contentObject; @XmlElement(name="fault-name") @XmlSchemaType(name="string") private String faultName; public JaxbFaultData() { // JAXB constructor } public JaxbFaultData(FaultData faultData) { this.accessType = faultData.getAccessType(); this.content = faultData.getContent(); this.faultName = faultData.getFaultName(); this.type = faultData.getType(); } @Override public AccessType getAccessType() { return accessType; } @Override public void setAccessType( AccessType accessType ) { this.accessType = accessType; } @Override public String getType() { return type; } @Override public void setType( String type ) { this.type = type; } @Override public byte[] getContent() { return content; } @Override public void setContent( byte[] content ) { this.content = content; } @Override public String getFaultName() { return faultName; } @Override public void setFaultName( String faultName ) { this.faultName = faultName; } @Override public Object getContentObject() { return contentObject; } @Override public void setContentObject(Object object) { this.contentObject = object; } @Override public void writeExternal( ObjectOutput out ) throws IOException { unsupported(FaultData.class); } @Override public void readExternal( ObjectInput in ) throws IOException, ClassNotFoundException { unsupported(FaultData.class); } }
apache-2.0
forcedotcom/scrollerjs
src/config.js
6867
/* * Copyright (C) 2013 salesforce.com, inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * ======================================= * CONFIGURATION DOCS * ======================================= */ /** * Config object that contains all of the configuration options for * a scroller instance. * * This object is supplied by the implementer when instantiating a scroller. Some * properties have default values if they are not supplied by the implementer. * All the properties with the exception of `enabled` and `scroll` are saved * inside the instance and are accessible through the `this.opts` property. * * You can add your own options to be used by your own plugins. * * @class config * @static * **/ /** * Toggle the state of the scroller. If `enabled:false`, the scroller will not * respond to any gesture events. * * @property {Boolean} enabled * @default true * **/ /** * Define the duration in ms of the transition when the scroller snaps out of the boundaries. * * * @property {Boolean} bounceTime * @default 600 * **/ /** * Use CSS transitions to perform the scrolling. By default this is set to false and * a transition based on `requestAnimationFrame` is used instead. * * Given a position and duration to scroll, it applies a `matrix3d()` transform, * a `transition-timing-function` (by default a cubic-bezier curve), * and a `transition-duration` to make the element scroll. * * Most of the libraries use this CSS technique to create a synthetic scroller. * While this is the most simple and leanest (that is, closest to the browser) implementation * possible, when dealing with large ammounts of DOM or really large scroller sizes, * performance will start to degrade due to the massive amounts of GPU, CPU, and memory * needed to manipulate this large and complex region. * * Moreover, this technique does not allow you to have any control over * or give you any position information while scrolling, given that the only event * fired by the browser is a `transitionEnd`, which is triggered once the transition is over. * * **It's recommended to use this configuration when:** * * - The scrolling size is reasonably small * - The content of the scroller is not changing often (little DOM manipulation) * - You don't need position information updates while scrolling * * * @property {Boolean} useCSSTransition * @default false * **/ /** * * Enable dual listeners (mouse and pointer events at the same time). This is useful for devices * where they can handle both types of interactions interchangeably. * This is set to false by default, allowing only one type of input interaction. * * @property {Boolean} dualListeners * @default false * **/ /** * * The minimum numbers of pixels necessary to start moving the scroller. * This is useful when you want to make sure that the user gesture * has well-defined direction (either horizontal or vertical). * * @property {integer} minThreshold * @default 5 * **/ /** * * The minimum number of pixels neccesary to calculate * the direction of the gesture. * * Ideally this value should be less than `minThreshold` to be able to * control the action of the scroller based on the direction of the gesture. * For example, you may want to lock the scroller movement if the gesture is horizontal. * * @property {integer} minDirectionThreshold * @default 2 * **/ /** * * Locks the scroller if the direction of the gesture matches one provided. * This property is meant to be used in conjunction with `minThreshold and``minDirectionThreshold`. * * Valid values: * - horizontal * - vertical * * @property {boolean} lockOnDirection * **/ /** * * Sets the scroller with the height of the items that the scroller contains. * * This property is used only when * `scroll:vertical` and `gpuOptimization: true`. * It helps the scroller calculate the positions of the surfaces * attached to the DOM, which slightly improves the performance of the scroller * (that is, the painting of that surface can occur asyncronously and outside of the JS execution). * * @plugin SurfaceManager * @property {integer} itemHeight * **/ /** * * Sets the scroller with the width of the items that the scroller contains. * * This property is used only when * `scroll:vertical` and `gpuOptimization: true`. * It helps the scroller calculate the positions of the surfaces * attached to the DOM, which slightly improves the performance of the scroller * (that is, the painting of that surface can occur asyncronously and outside of the JS execution). * * @plugin SurfaceManager * @property {integer} itemWidth * **/ /** * * Bind the event handlers to the scroller wrapper. * This is useful when using nested scrollers or when adding some custom logic * in a parent node as the event bubbles up. * * If set to true once the scroller is out of the wrapper container, it will stop scrolling. * * @property {integer} bindToWrapper * @default false * **/ /** * * Set the direction of the scroll. * By default, vertical scrolling is enabled. * * Valid values: * - horizontal * - vertical * * @property {string} scroll * @default vertical * **/ /** * * Activates pullToRefresh functionality. * Note that you need to include the `PullToRefresh` plugin as part of your scroller bundle, * otherwise this option is ignored. * * @plugin PullToRefresh * @property {boolean} pullToRefresh * @default false **/ /** * * Activates pullToLoadMore functionality. * Note that you need to include the `PullToLoadMore` plugin as part of your scroller bundle, * otherwise this option is ignored. * * @plugin PullToLoadMore * @property {boolean} pullToLoadMore * @default false * **/ /** * * Creates scrollbars on the direction of the scroll. * @plugin Indicators * @property {boolean} scrollbars * @default false * **/ /** * * Scrollbar configuration. * * @plugin Indicators * @property {Object} scrollbarsConfig * @default false * **/ /** * * Activates infiniteLoading. * * @plugin InfiniteLoading * @property {boolean} infiniteLoading * @default false * **/ /** * * Sets the configuration for infiniteLoading. * The `infiniteLoading` option must be set to true. * * @property {Object} infiniteLoadingConfig * **/ /** * * TODO: Debounce * * @property {boolean} debounce * **/ /** * * TODO: GPUOptimization * @plugin SurfaceManager * @property {boolean} gpuOptimization * **/
apache-2.0
mdecourci/assertj-core
src/test/java/org/assertj/core/api/iterable/Iterable_generics_with_varargs_Test.java
1467
/** * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2015 the original author or authors. */ package org.assertj.core.api.iterable; import static java.util.Arrays.asList; import static org.assertj.core.api.Assertions.assertThat; import java.util.List; import org.junit.Test; /** * @author Johannes Schneider (<a href="mailto:[email protected]">[email protected]</a>) */ public class Iterable_generics_with_varargs_Test { @SuppressWarnings({ "unchecked", "rawtypes" }) @Test public void testWithoutGenerics() throws Exception { List strings = asList("a", "b", "c"); assertThat(strings).contains("a", "b"); } @Test public void testConcreteType() throws Exception { List<String> strings = asList("a", "b", "c"); assertThat(strings).contains("a", "b"); } @Test public void testListAssertWithGenerics() throws Exception { List<? extends String> strings = asList("a", "b", "c"); assertThat(strings).contains("a", "b"); } }
apache-2.0
googleapis/google-api-java-client-services
clients/google-api-services-classroom/v1/1.29.2/com/google/api/services/classroom/model/DriveFile.java
3882
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.classroom.model; /** * Representation of a Google Drive file. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Google Classroom API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class DriveFile extends com.google.api.client.json.GenericJson { /** * URL that can be used to access the Drive item. * * Read-only. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String alternateLink; /** * Drive API resource ID. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String id; /** * URL of a thumbnail image of the Drive item. * * Read-only. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String thumbnailUrl; /** * Title of the Drive item. * * Read-only. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String title; /** * URL that can be used to access the Drive item. * * Read-only. * @return value or {@code null} for none */ public java.lang.String getAlternateLink() { return alternateLink; } /** * URL that can be used to access the Drive item. * * Read-only. * @param alternateLink alternateLink or {@code null} for none */ public DriveFile setAlternateLink(java.lang.String alternateLink) { this.alternateLink = alternateLink; return this; } /** * Drive API resource ID. * @return value or {@code null} for none */ public java.lang.String getId() { return id; } /** * Drive API resource ID. * @param id id or {@code null} for none */ public DriveFile setId(java.lang.String id) { this.id = id; return this; } /** * URL of a thumbnail image of the Drive item. * * Read-only. * @return value or {@code null} for none */ public java.lang.String getThumbnailUrl() { return thumbnailUrl; } /** * URL of a thumbnail image of the Drive item. * * Read-only. * @param thumbnailUrl thumbnailUrl or {@code null} for none */ public DriveFile setThumbnailUrl(java.lang.String thumbnailUrl) { this.thumbnailUrl = thumbnailUrl; return this; } /** * Title of the Drive item. * * Read-only. * @return value or {@code null} for none */ public java.lang.String getTitle() { return title; } /** * Title of the Drive item. * * Read-only. * @param title title or {@code null} for none */ public DriveFile setTitle(java.lang.String title) { this.title = title; return this; } @Override public DriveFile set(String fieldName, Object value) { return (DriveFile) super.set(fieldName, value); } @Override public DriveFile clone() { return (DriveFile) super.clone(); } }
apache-2.0
raindev/testng
src/main/java/org/testng/ITestResult.java
2332
package org.testng; /** * This class describes the result of a test. * * @author Cedric Beust, May 2, 2004 * @since May 2, 2004 * @version $Revision: 721 $, $Date: 2009-05-23 09:55:46 -0700 (Sat, 23 May 2009) $ * */ public interface ITestResult extends IAttributes, Comparable<ITestResult> { // // Test status // public static final int SUCCESS = 1; public static final int FAILURE = 2; public static final int SKIP = 3; public static final int SUCCESS_PERCENTAGE_FAILURE = 4; public static final int STARTED= 16; /** * @return The status of this result, using one of the constants * above. */ public int getStatus(); public void setStatus(int status); /** * @return The test method this result represents. */ public ITestNGMethod getMethod(); /** * @return The parameters this method was invoked with. */ public Object[] getParameters(); public void setParameters(Object[] parameters); /** * @return The test class used this object is a result for. */ public IClass getTestClass(); /** * @return The throwable that was thrown while running the * method, or null if no exception was thrown. */ public Throwable getThrowable(); public void setThrowable(Throwable throwable); /** * @return the start date for this test, in milliseconds. */ public long getStartMillis(); /** * @return the end date for this test, in milliseconds. */ public long getEndMillis(); public void setEndMillis(long millis); /** * @return The name of this TestResult, typically identical to the name * of the method. */ public String getName(); /** * @return true if if this test run is a SUCCESS */ public boolean isSuccess(); /** * @return The host where this suite was run, or null if it was run locally. The * returned string has the form: host:port */ public String getHost(); /** * The instance on which this method was run. */ public Object getInstance(); /** * If this result's related instance implements ITest or use @Test(testName=...), returns its test name, otherwise returns null. */ public String getTestName(); public String getInstanceName(); /** * @return the {@link ITestContext} for this test result. */ public ITestContext getTestContext(); }
apache-2.0
lannka/amphtml
extensions/amp-brightcove/0.1/test/test-amp-brightcove.js
10948
/** * Copyright 2015 The AMP HTML Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS-IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import '../amp-brightcove'; import * as consent from '../../../../src/consent'; import {BaseElement} from '../../../../src/base-element'; import {CONSENT_POLICY_STATE} from '../../../../src/consent-state'; import {CommonSignals} from '../../../../src/common-signals'; import {VideoEvents} from '../../../../src/video-interface'; import { createElementWithAttributes, whenUpgradedToCustomElement, } from '../../../../src/dom'; import {listenOncePromise} from '../../../../src/event-helper'; import {macroTask} from '../../../../testing/yield'; import {parseUrlDeprecated} from '../../../../src/url'; import {user} from '../../../../src/log'; describes.realWin( 'amp-brightcove', { amp: { extensions: ['amp-brightcove'], runtimeOn: true, }, }, (env) => { let win, doc; beforeEach(() => { win = env.win; doc = win.document; // make sync env.sandbox .stub(BaseElement.prototype, 'mutateElement') .callsFake((mutator) => { mutator(); }); }); async function getBrightcoveBuild(attributes) { const element = createElementWithAttributes(doc, 'amp-brightcove', { width: '111', height: '222', ...attributes, }); doc.body.appendChild(element); await whenUpgradedToCustomElement(element); await element.whenBuilt(); return element; } async function getBrightcove(attributes) { const element = await getBrightcoveBuild(attributes); const impl = await element.getImpl(false); await element.signals().whenSignal(CommonSignals.LOAD_START); // Wait for the promise in layoutCallback() to resolve await macroTask(); try { fakePostMessage(impl, {event: 'ready'}); } catch (_) { // This fails when the iframe is not available (after layoutCallback // fails) in which case awaiting the LOAD_END sigal below will throw. } await element.signals().whenSignal(CommonSignals.LOAD_END); return element; } function fakePostMessage(impl, info) { impl.handlePlayerMessage_({ origin: 'https://players.brightcove.net', source: impl.element.querySelector('iframe').contentWindow, data: JSON.stringify(info), }); } // https://go.amp.dev/issue/32706 it('should remove `dock`', async () => { const warn = env.sandbox.spy(user(), 'warn'); const element = await getBrightcoveBuild({ 'data-account': '1290862519001', 'data-video-id': 'ref:amp-test-video', 'dock': '', }); expect(element.hasAttribute('dock')).to.be.false; expect( warn.withArgs( env.sandbox.match.any, env.sandbox.match(/`dock` has been disabled/) ) ).to.have.been.calledOnce; }); // https://go.amp.dev/issue/32706 it('should not warn without `dock`', async () => { const warn = env.sandbox.spy(user(), 'warn'); const element = await getBrightcoveBuild({ 'data-account': '1290862519001', 'data-video-id': 'ref:amp-test-video', }); expect(element.hasAttribute('dock')).to.be.false; expect(warn).to.not.have.been.called; }); it('renders', () => { return getBrightcove({ 'data-account': '1290862519001', 'data-video-id': 'ref:amp-test-video', }).then((bc) => { const iframe = bc.querySelector('iframe'); expect(iframe).to.not.be.null; expect(iframe.tagName).to.equal('IFRAME'); expect(iframe.src).to.equal( 'https://players.brightcove.net/1290862519001/default_default' + '/index.html?videoId=ref:amp-test-video&playsinline=true' ); }); }); it('removes iframe after unlayoutCallback', async () => { const bc = await getBrightcove({ 'data-account': '1290862519001', 'data-video-id': 'ref:amp-test-video', }); const obj = await bc.getImpl(); const iframe = bc.querySelector('iframe'); expect(iframe).to.not.be.null; obj.unlayoutCallback(); expect(bc.querySelector('iframe')).to.be.null; expect(obj.iframe_).to.be.null; }); it('should pass data-param-* attributes to the iframe src', () => { return getBrightcove({ 'data-account': '1290862519001', 'data-video-id': 'ref:amp-test-video', 'data-param-my-param': 'hello world', }).then((bc) => { const iframe = bc.querySelector('iframe'); const params = parseUrlDeprecated(iframe.src).search.split('&'); expect(params).to.contain('myParam=hello%20world'); }); }); it('should propagate mutated attributes', () => { return getBrightcove({ 'data-account': '1290862519001', 'data-video-id': 'ref:amp-test-video', }).then((bc) => { const iframe = bc.querySelector('iframe'); expect(iframe.src).to.equal( 'https://players.brightcove.net/1290862519001/default_default' + '/index.html?videoId=ref:amp-test-video&playsinline=true' ); bc.setAttribute('data-account', '12345'); bc.setAttribute('data-video-id', 'abcdef'); bc.mutatedAttributesCallback({ 'data-account': '12345', 'data-video-id': 'abcdef', }); expect(iframe.src).to.equal( 'https://players.brightcove.net/' + '12345/default_default/index.html?videoId=abcdef&playsinline=true' ); }); }); it('should give precedence to playlist id', () => { return getBrightcove({ 'data-account': '1290862519001', 'data-video-id': 'ref:amp-test-video', 'data-playlist-id': 'ref:test-playlist', }).then((bc) => { const iframe = bc.querySelector('iframe'); expect(iframe.src).to.contain('playlistId=ref:test-playlist'); expect(iframe.src).not.to.contain('videoId'); }); }); it('should allow both playlist and video id to be unset', () => { return getBrightcove({ 'data-account': '1290862519001', }).then((bc) => { const iframe = bc.querySelector('iframe'); expect(iframe.src).not.to.contain('&playlistId'); expect(iframe.src).not.to.contain('&videoId'); }); }); it('should pass referrer', () => { return getBrightcove({ 'data-account': '1290862519001', 'data-referrer': 'COUNTER', }).then((bc) => { const iframe = bc.querySelector('iframe'); expect(iframe.src).to.contain('referrer=1'); }); }); it('should force playsinline', () => { return getBrightcove({ 'data-account': '1290862519001', 'data-video-id': 'ref:amp-test-video', 'data-param-playsinline': 'false', }).then((bc) => { const iframe = bc.querySelector('iframe'); expect(iframe.src).to.contain('playsinline=true'); }); }); it('should forward events', async () => { const bc = await getBrightcove({ 'data-account': '1290862519001', 'data-video-id': 'ref:amp-test-video', }); const impl = await bc.getImpl(); return Promise.resolve() .then(() => { const p = listenOncePromise(bc, VideoEvents.LOAD); fakePostMessage(impl, {event: 'ready', muted: false, playing: false}); return p; }) .then(() => { const p = listenOncePromise(bc, VideoEvents.LOADEDMETADATA); fakePostMessage(impl, { event: 'loadedmetadata', muted: false, playing: false, }); return p; }) .then(() => { const p = listenOncePromise(bc, VideoEvents.AD_START); fakePostMessage(impl, { event: 'ads-ad-started', muted: false, playing: false, }); return p; }) .then(() => { const p = listenOncePromise(bc, VideoEvents.AD_END); fakePostMessage(impl, { event: 'ads-ad-ended', muted: false, playing: false, }); return p; }) .then(() => { const p = listenOncePromise(bc, VideoEvents.PLAYING); fakePostMessage(impl, { event: 'playing', muted: false, playing: true, }); return p; }) .then(() => { const p = listenOncePromise(bc, VideoEvents.MUTED); fakePostMessage(impl, { event: 'volumechange', muted: true, playing: true, }); return p; }) .then(() => { const p = listenOncePromise(bc, VideoEvents.UNMUTED); fakePostMessage(impl, { event: 'volumechange', muted: false, playing: true, }); return p; }) .then(() => { const p = listenOncePromise(bc, VideoEvents.PAUSE); fakePostMessage(impl, {event: 'pause', muted: false, playing: false}); return p; }) .then(() => { const p = listenOncePromise(bc, VideoEvents.ENDED); fakePostMessage(impl, {event: 'ended', muted: false, playing: false}); return p; }); }); it('should propagate consent state to iframe', () => { env.sandbox .stub(consent, 'getConsentPolicyState') .resolves(CONSENT_POLICY_STATE.SUFFICIENT); env.sandbox .stub(consent, 'getConsentPolicySharedData') .resolves({a: 1, b: 2}); env.sandbox.stub(consent, 'getConsentPolicyInfo').resolves('abc'); return getBrightcove({ 'data-account': '1290862519001', 'data-video-id': 'ref:amp-test-video', 'data-block-on-consent': '_till_accepted', }).then((bc) => { const iframe = bc.querySelector('iframe'); expect(iframe.src).to.contain( `ampInitialConsentState=${CONSENT_POLICY_STATE.SUFFICIENT}` ); expect(iframe.src).to.contain( `ampConsentSharedData=${encodeURIComponent( JSON.stringify({a: 1, b: 2}) )}` ); expect(iframe.src).to.contain('ampInitialConsentValue=abc'); }); }); } );
apache-2.0
zachjanicki/osf.io
website/views.py
7843
# -*- coding: utf-8 -*- import logging import itertools import math import urllib import httplib as http from modularodm import Q from modularodm.exceptions import NoResultsFound from flask import request from framework import utils from framework import sentry from framework.auth.core import User from framework.flask import redirect # VOL-aware redirect from framework.routing import proxy_url from framework.exceptions import HTTPError from framework.auth.forms import SignInForm from framework.forms import utils as form_utils from framework.auth.forms import RegistrationForm from framework.auth.forms import ResetPasswordForm from framework.auth.forms import ForgotPasswordForm from framework.auth.decorators import must_be_logged_in from website.models import Guid from website.models import Node, Institution from website.institutions.views import view_institution from website.util import sanitize from website.project import model from website.util import permissions from website.project import new_bookmark_collection logger = logging.getLogger(__name__) def _render_node(node, auth=None): """ :param node: :return: """ perm = None # NOTE: auth.user may be None if viewing public project while not # logged in if auth and auth.user and node.get_permissions(auth.user): perm_list = node.get_permissions(auth.user) perm = permissions.reduce_permissions(perm_list) return { 'title': node.title, 'id': node._primary_key, 'url': node.url, 'api_url': node.api_url, 'primary': node.primary, 'date_modified': utils.iso8601format(node.date_modified), 'category': node.category, 'permissions': perm, # A string, e.g. 'admin', or None, 'archiving': node.archiving, } def _render_nodes(nodes, auth=None, show_path=False): """ :param nodes: :return: """ ret = { 'nodes': [ _render_node(node, auth) for node in nodes ], 'show_path': show_path } return ret def index(): try: #TODO : make this way more robust inst = Institution.find_one(Q('domains', 'eq', request.host.lower())) inst_dict = view_institution(inst._id) inst_dict.update({ 'home': False, 'institution': True, 'redirect_url': '/institutions/{}/'.format(inst._id) }) return inst_dict except NoResultsFound: pass return {'home': True} def find_bookmark_collection(user): bookmark_collection = Node.find(Q('is_bookmark_collection', 'eq', True) & Q('contributors', 'eq', user._id)) if bookmark_collection.count() == 0: new_bookmark_collection(user) return bookmark_collection[0] @must_be_logged_in def dashboard(auth): user = auth.user dashboard_folder = find_bookmark_collection(user) dashboard_id = dashboard_folder._id return {'addons_enabled': user.get_addon_names(), 'dashboard_id': dashboard_id, } def validate_page_num(page, pages): if page < 0 or (pages and page >= pages): raise HTTPError(http.BAD_REQUEST, data=dict( message_long='Invalid value for "page".' )) def paginate(items, total, page, size): pages = math.ceil(total / float(size)) validate_page_num(page, pages) start = page * size paginated_items = itertools.islice(items, start, start + size) return paginated_items, pages @must_be_logged_in def watched_logs_get(**kwargs): user = kwargs['auth'].user try: page = int(request.args.get('page', 0)) except ValueError: raise HTTPError(http.BAD_REQUEST, data=dict( message_long='Invalid value for "page".' )) try: size = int(request.args.get('size', 10)) except ValueError: raise HTTPError(http.BAD_REQUEST, data=dict( message_long='Invalid value for "size".' )) total = sum(1 for x in user.get_recent_log_ids()) paginated_logs, pages = paginate(user.get_recent_log_ids(), total, page, size) logs = (model.NodeLog.load(id) for id in paginated_logs) return { "logs": [serialize_log(log) for log in logs], "total": total, "pages": pages, "page": page } def serialize_log(node_log, auth=None, anonymous=False): '''Return a dictionary representation of the log.''' return { 'id': str(node_log._primary_key), 'user': node_log.user.serialize() if isinstance(node_log.user, User) else {'fullname': node_log.foreign_user}, 'contributors': [node_log._render_log_contributor(c) for c in node_log.params.get("contributors", [])], 'action': node_log.action, 'params': sanitize.unescape_entities(node_log.params), 'date': utils.iso8601format(node_log.date), 'node': node_log.original_node.serialize(auth) if node_log.original_node else None, 'anonymous': anonymous } def reproducibility(): return redirect('/ezcuj/wiki') def registration_form(): return form_utils.jsonify(RegistrationForm(prefix='register')) def signin_form(): return form_utils.jsonify(SignInForm()) def forgot_password_form(): return form_utils.jsonify(ForgotPasswordForm(prefix='forgot_password')) def reset_password_form(): return form_utils.jsonify(ResetPasswordForm()) # GUID ### def _build_guid_url(base, suffix=None): url = '/'.join([ each.strip('/') for each in [base, suffix] if each ]) if not isinstance(url, unicode): url = url.decode('utf-8') return u'/{0}/'.format(url) def resolve_guid(guid, suffix=None): """Load GUID by primary key, look up the corresponding view function in the routing table, and return the return value of the view function without changing the URL. :param str guid: GUID primary key :param str suffix: Remainder of URL after the GUID :return: Return value of proxied view function """ # Look up GUID guid_object = Guid.load(guid) if guid_object: # verify that the object implements a GuidStoredObject-like interface. If a model # was once GuidStoredObject-like but that relationship has changed, it's # possible to have referents that are instances of classes that don't # have a deep_url attribute or otherwise don't behave as # expected. if not hasattr(guid_object.referent, 'deep_url'): sentry.log_message( 'Guid `{}` resolved to an object with no deep_url'.format(guid) ) raise HTTPError(http.NOT_FOUND) referent = guid_object.referent if referent is None: logger.error('Referent of GUID {0} not found'.format(guid)) raise HTTPError(http.NOT_FOUND) if not referent.deep_url: raise HTTPError(http.NOT_FOUND) url = _build_guid_url(urllib.unquote(referent.deep_url), suffix) return proxy_url(url) # GUID not found; try lower-cased and redirect if exists guid_object_lower = Guid.load(guid.lower()) if guid_object_lower: return redirect( _build_guid_url(guid.lower(), suffix) ) # GUID not found raise HTTPError(http.NOT_FOUND) ##### Redirects ##### # Redirect /about/ to OSF wiki page # https://github.com/CenterForOpenScience/osf.io/issues/3862 # https://github.com/CenterForOpenScience/community/issues/294 def redirect_about(**kwargs): return redirect('https://osf.io/4znzp/wiki/home/') def redirect_howosfworks(**kwargs): return redirect('/getting-started/') def redirect_getting_started(**kwargs): return redirect('http://help.osf.io/') def redirect_to_home(): # Redirect to support page return redirect('/')
apache-2.0
knative/test-infra
vendor/github.com/google/go-containerregistry/pkg/authn/authn.go
1409
// Copyright 2018 Google LLC All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package authn // AuthConfig contains authorization information for connecting to a Registry // Inlined what we use from github.com/cli/cli/config/types type AuthConfig struct { Username string `json:"username,omitempty"` Password string `json:"password,omitempty"` Auth string `json:"auth,omitempty"` // IdentityToken is used to authenticate the user and get // an access token for the registry. IdentityToken string `json:"identitytoken,omitempty"` // RegistryToken is a bearer token to be sent to a registry RegistryToken string `json:"registrytoken,omitempty"` } // Authenticator is used to authenticate Docker transports. type Authenticator interface { // Authorization returns the value to use in an http transport's Authorization header. Authorization() (*AuthConfig, error) }
apache-2.0
Stratio/streaming-cep-engine
engine/src/test/java/com/stratio/decision/unit/engine/validator/MongoNameRegularExpressionValidatorTest.java
1609
/** * Copyright (C) 2014 Stratio (http://stratio.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.stratio.decision.unit.engine.validator; import com.stratio.decision.commons.messages.StratioStreamingMessage; import com.stratio.decision.exception.RequestValidationException; import com.stratio.decision.functions.validator.MongoStreamNameValidator; public class MongoNameRegularExpressionValidatorTest extends BaseRegularExpressionValidatorTest { private MongoStreamNameValidator mongoStreamNameValidator; @Override public void setUp() { mongoStreamNameValidator = new MongoStreamNameValidator(); } @Override public void test(StratioStreamingMessage message) throws RequestValidationException { mongoStreamNameValidator.validate(message); } @Override public String[] getGoodStrings() { return new String[] { "test_test$etstsdd", "&&&&", "$$$$", "\n\n\n" }; } @Override public String[] getBadStrings() { return new String[] { "*test", "test*", "test test", ">><<<<>", "_____|" }; } }
apache-2.0
MarkDacek/commons-lang
src/test/java/org/apache/commons/lang3/NotImplementedExceptionTest.java
2682
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.lang3; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import org.junit.jupiter.api.Test; /** * Unit tests {@link org.apache.commons.lang3.NotImplementedException}. */ public class NotImplementedExceptionTest { @Test public void testConstructors() { final Throwable nested = new RuntimeException(); final String message = "Not Implemented"; final String code = "CODE"; NotImplementedException nie = new NotImplementedException(message); assertCorrect("Issue in (String)", nie, message, null, null); nie = new NotImplementedException(nested); assertCorrect("Issue in (Throwable)", nie, nested.toString(), nested, null); nie = new NotImplementedException(message, nested); assertCorrect("Issue in (String, Throwable)", nie, message, nested, null); nie = new NotImplementedException(message, code); assertCorrect("Issue in (String, String)", nie, message, null, code); nie = new NotImplementedException(nested, code); assertCorrect("Issue in (Throwable, String)", nie, nested.toString(), nested, code); nie = new NotImplementedException(message, nested, code); assertCorrect("Issue in (String, Throwable, String)", nie, message, nested, code); } private void assertCorrect(final String assertMessage, final NotImplementedException nie, final String message, final Throwable nested, final String code) { assertNotNull(nie, assertMessage + ": target is null"); assertEquals(message, nie.getMessage(), assertMessage + ": Message not equal"); assertEquals(nested, nie.getCause(), assertMessage + ": Nested throwable not equal"); assertEquals(code, nie.getCode(), assertMessage + ": Code not equal"); } }
apache-2.0
Qining/gapid
cmd/gapit/stresstest.go
2609
// Copyright (C) 2017 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package main import ( "context" "flag" "math/rand" "sync" "time" "github.com/google/gapid/core/app" "github.com/google/gapid/core/app/crash" "github.com/google/gapid/core/event/task" "github.com/google/gapid/core/log" "github.com/google/gapid/gapis/api" "github.com/google/gapid/gapis/service" "github.com/google/gapid/gapis/service/path" ) type stresstestVerb struct{ StressTestFlags } func init() { app.AddVerb(&app.Verb{ Name: "stress-test", ShortHelp: "Performs evil things on GAPIS to try to break it", Action: &stresstestVerb{}, }) } func (verb *stresstestVerb) Run(ctx context.Context, flags flag.FlagSet) error { if flags.NArg() != 1 { app.Usage(ctx, "Exactly one gfx trace file expected, got %d", flags.NArg()) return nil } client, c, err := getGapisAndLoadCapture(ctx, verb.Gapis, verb.Gapir, flags.Arg(0), verb.CaptureFileFlags) if err != nil { return err } defer client.Close() boxedCapture, err := client.Get(ctx, c.Path(), nil) if err != nil { return log.Err(ctx, err, "Failed to load the capture") } count := int(boxedCapture.(*service.Capture).NumCommands) wg := sync.WaitGroup{} for l := 0; l < 10; l++ { for i := 0; i < 10000; i++ { at := uint64(rand.Intn(count - 1)) duration := time.Second + time.Duration(rand.Intn(int(time.Second*10))) wg.Add(1) const ( getStateAfter = iota getMesh getCount ) method := rand.Intn(getCount) crash.Go(func() { defer wg.Done() ctx, _ := task.WithTimeout(ctx, duration) switch method { case getStateAfter: boxedTree, err := client.Get(ctx, c.Command(at).StateAfter().Tree().Path(), nil) if err == nil { tree := boxedTree.(*service.StateTree) client.Get(ctx, tree.Root.Path(), nil) } case getMesh: boxedMesh, err := client.Get(ctx, c.Command(at).Mesh(path.NewMeshOptions(true)).Path(), nil) if err == nil { mesh := boxedMesh.(*api.Mesh) _ = mesh } } }) } wg.Wait() } return nil }
apache-2.0
rabipanda/tensorflow
tensorflow/contrib/data/python/kernel_tests/filter_dataset_op_test.py
7371
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for the experimental input pipeline ops.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.contrib.data.python.kernel_tests import dataset_serialization_test_base from tensorflow.contrib.data.python.ops import dataset_ops from tensorflow.python.framework import dtypes from tensorflow.python.framework import errors from tensorflow.python.framework import sparse_tensor from tensorflow.python.ops import array_ops from tensorflow.python.ops import functional_ops from tensorflow.python.ops import math_ops from tensorflow.python.platform import test class FilterDatasetTest(test.TestCase): def testFilterDataset(self): components = ( np.arange(7, dtype=np.int64), np.array([[1, 2, 3]], dtype=np.int64) * np.arange( 7, dtype=np.int64)[:, np.newaxis], np.array(37.0, dtype=np.float64) * np.arange(7) ) count = array_ops.placeholder(dtypes.int64, shape=[]) modulus = array_ops.placeholder(dtypes.int64) def _map_fn(x, y, z): return math_ops.square(x), math_ops.square(y), math_ops.square(z) iterator = ( dataset_ops.Dataset.from_tensor_slices(components).map(_map_fn) .repeat(count) .filter(lambda x, _y, _z: math_ops.equal(math_ops.mod(x, modulus), 0)) .make_initializable_iterator()) init_op = iterator.initializer get_next = iterator.get_next() self.assertEqual([c.shape[1:] for c in components], [t.shape for t in get_next]) with self.test_session() as sess: # Test that we can dynamically feed a different modulus value for each # iterator. def do_test(count_val, modulus_val): sess.run(init_op, feed_dict={count: count_val, modulus: modulus_val}) for _ in range(count_val): for i in [x for x in range(7) if x**2 % modulus_val == 0]: result = sess.run(get_next) for component, result_component in zip(components, result): self.assertAllEqual(component[i]**2, result_component) with self.assertRaises(errors.OutOfRangeError): sess.run(get_next) do_test(14, 2) do_test(4, 18) # Test an empty dataset. do_test(0, 1) def testFilterRange(self): dataset = dataset_ops.Dataset.range(100).filter( lambda x: math_ops.not_equal(math_ops.mod(x, 3), 2)) iterator = dataset.make_one_shot_iterator() get_next = iterator.get_next() with self.test_session() as sess: self.assertEqual(0, sess.run(get_next)) self.assertEqual(1, sess.run(get_next)) self.assertEqual(3, sess.run(get_next)) def testFilterDict(self): iterator = (dataset_ops.Dataset.range(10) .map(lambda x: {"foo": x * 2, "bar": x ** 2}) .filter(lambda d: math_ops.equal(d["bar"] % 2, 0)) .map(lambda d: d["foo"] + d["bar"]) .make_initializable_iterator()) init_op = iterator.initializer get_next = iterator.get_next() with self.test_session() as sess: sess.run(init_op) for i in range(10): if (i ** 2) % 2 == 0: self.assertEqual(i * 2 + i ** 2, sess.run(get_next)) with self.assertRaises(errors.OutOfRangeError): sess.run(get_next) def testUseStepContainerInFilter(self): input_data = np.array([[1, 2, 3], [4, 5, 6]], dtype=np.int64) # Define a predicate that returns true for the first element of # the sequence and not the second, and uses `tf.map_fn()`. def _predicate(xs): squared_xs = functional_ops.map_fn(lambda x: x * x, xs) summed = math_ops.reduce_sum(squared_xs) return math_ops.equal(summed, 1 + 4 + 9) iterator = ( dataset_ops.Dataset.from_tensor_slices([[1, 2, 3], [4, 5, 6]]) .filter(_predicate) .make_initializable_iterator()) init_op = iterator.initializer get_next = iterator.get_next() with self.test_session() as sess: sess.run(init_op) self.assertAllEqual(input_data[0], sess.run(get_next)) with self.assertRaises(errors.OutOfRangeError): sess.run(get_next) def assertSparseValuesEqual(self, a, b): self.assertAllEqual(a.indices, b.indices) self.assertAllEqual(a.values, b.values) self.assertAllEqual(a.dense_shape, b.dense_shape) def testSparse(self): def _map_fn(i): return sparse_tensor.SparseTensorValue( indices=np.array([[0, 0]]), values=(i * np.array([1])), dense_shape=np.array([1, 1])), i def _filter_fn(_, i): return math_ops.equal(i % 2, 0) iterator = ( dataset_ops.Dataset.range(10).map(_map_fn).filter(_filter_fn).map( lambda x, i: x).make_initializable_iterator()) init_op = iterator.initializer get_next = iterator.get_next() with self.test_session() as sess: sess.run(init_op) for i in range(5): actual = sess.run(get_next) self.assertTrue(isinstance(actual, sparse_tensor.SparseTensorValue)) self.assertSparseValuesEqual(actual, _map_fn(i * 2)[0]) with self.assertRaises(errors.OutOfRangeError): sess.run(get_next) class FilterDatasetSerializationTest( dataset_serialization_test_base.DatasetSerializationTestBase): def _build_filter_range_graph(self, div): return dataset_ops.Dataset.range(100).filter( lambda x: math_ops.not_equal(math_ops.mod(x, div), 2)) def testFilterCore(self): div = 3 num_outputs = np.sum([x % 3 is not 2 for x in range(100)]) self.run_core_tests(lambda: self._build_filter_range_graph(div), lambda: self._build_filter_range_graph(div * 2), num_outputs) def _build_filter_dict_graph(self): return dataset_ops.Dataset.range(10).map( lambda x: {"foo": x * 2, "bar": x ** 2}).filter( lambda d: math_ops.equal(d["bar"] % 2, 0)).map( lambda d: d["foo"] + d["bar"]) def testFilterDictCore(self): num_outputs = np.sum([(x**2) % 2 == 0 for x in range(10)]) self.run_core_tests(self._build_filter_dict_graph, None, num_outputs) def _build_sparse_filter(self): def _map_fn(i): return sparse_tensor.SparseTensor( indices=[[0, 0]], values=(i * [1]), dense_shape=[1, 1]), i def _filter_fn(_, i): return math_ops.equal(i % 2, 0) return dataset_ops.Dataset.range(10).map(_map_fn).filter(_filter_fn).map( lambda x, i: x) def testSparseCore(self): num_outputs = 5 self.run_core_tests(self._build_sparse_filter, None, num_outputs) if __name__ == "__main__": test.main()
apache-2.0
pperboires/PocDrools
drools-core/src/main/java/org/drools/conflict/AbstractConflictResolver.java
1213
/* * Copyright 2005 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.conflict; import org.drools.spi.Activation; import org.drools.spi.ConflictResolver; /** * Convenience base class for <code>ConflictResolver</code>s. * * * @version $Id: AbstractConflictResolver.java,v 1.1 2004/10/06 13:38:05 * mproctor Exp $ */ public abstract class AbstractConflictResolver implements ConflictResolver { /** * @see ConflictResolver */ public final int compare(final Object existing, final Object adding) { return compare( (Activation) existing, (Activation) adding ); } }
apache-2.0
WindCanDie/spark
core/src/main/scala/org/apache/spark/ui/HttpSecurityFilter.scala
4296
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.ui import java.util.{Enumeration, Map => JMap} import javax.servlet._ import javax.servlet.http.{HttpServletRequest, HttpServletRequestWrapper, HttpServletResponse} import scala.collection.JavaConverters._ import org.apache.commons.lang3.StringEscapeUtils import org.apache.spark.{SecurityManager, SparkConf} import org.apache.spark.internal.config.UI._ /** * A servlet filter that implements HTTP security features. The following actions are taken * for every request: * * - perform access control of authenticated requests. * - check request data for disallowed content (e.g. things that could be used to create XSS * attacks). * - set response headers to prevent certain kinds of attacks. * * Request parameters are sanitized so that HTML content is escaped, and disallowed content is * removed. */ private class HttpSecurityFilter( conf: SparkConf, securityMgr: SecurityManager) extends Filter { override def destroy(): Unit = { } override def init(config: FilterConfig): Unit = { } override def doFilter(req: ServletRequest, res: ServletResponse, chain: FilterChain): Unit = { val hreq = req.asInstanceOf[HttpServletRequest] val hres = res.asInstanceOf[HttpServletResponse] hres.setHeader("Cache-Control", "no-cache, no-store, must-revalidate") if (!securityMgr.checkUIViewPermissions(hreq.getRemoteUser())) { hres.sendError(HttpServletResponse.SC_FORBIDDEN, "User is not authorized to access this page.") return } // SPARK-10589 avoid frame-related click-jacking vulnerability, using X-Frame-Options // (see http://tools.ietf.org/html/rfc7034). By default allow framing only from the // same origin, but allow framing for a specific named URI. // Example: spark.ui.allowFramingFrom = https://example.com/ val xFrameOptionsValue = conf.getOption("spark.ui.allowFramingFrom") .map { uri => s"ALLOW-FROM $uri" } .getOrElse("SAMEORIGIN") hres.setHeader("X-Frame-Options", xFrameOptionsValue) hres.setHeader("X-XSS-Protection", conf.get(UI_X_XSS_PROTECTION)) if (conf.get(UI_X_CONTENT_TYPE_OPTIONS)) { hres.setHeader("X-Content-Type-Options", "nosniff") } if (hreq.getScheme() == "https") { conf.get(UI_STRICT_TRANSPORT_SECURITY).foreach( hres.setHeader("Strict-Transport-Security", _)) } chain.doFilter(new XssSafeRequest(hreq), res) } } private class XssSafeRequest(req: HttpServletRequest) extends HttpServletRequestWrapper(req) { private val NEWLINE_AND_SINGLE_QUOTE_REGEX = raw"(?i)(\r\n|\n|\r|%0D%0A|%0A|%0D|'|%27)".r private val parameterMap: Map[String, Array[String]] = { super.getParameterMap().asScala.map { case (name, values) => stripXSS(name) -> values.map(stripXSS) }.toMap } override def getParameterMap(): JMap[String, Array[String]] = parameterMap.asJava override def getParameterNames(): Enumeration[String] = { parameterMap.keys.iterator.asJavaEnumeration } override def getParameterValues(name: String): Array[String] = parameterMap.get(name).orNull override def getParameter(name: String): String = { parameterMap.get(name).flatMap(_.headOption).orNull } private def stripXSS(str: String): String = { if (str != null) { // Remove new lines and single quotes, followed by escaping HTML version 4.0 StringEscapeUtils.escapeHtml4(NEWLINE_AND_SINGLE_QUOTE_REGEX.replaceAllIn(str, "")) } else { null } } }
apache-2.0
danilaml/qreal
qrxc/graphicType.cpp
17325
/* Copyright 2007-2016 QReal Research Group * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "graphicType.h" #include <QtCore/QDebug> #include <qrutils/outFile.h> #include "property.h" #include "label.h" #include "diagram.h" #include "nameNormalizer.h" #include "nodeType.h" #include "edgeType.h" using namespace utils; GraphicType::ContainerProperties::ContainerProperties() : isSortingContainer(false), sizeOfForestalling(4, 0) , sizeOfChildrenForestalling(0), hasMovableChildren(true) , minimizesToChildren(false), maximizesChildren(false) { } GraphicType::GeneralizationProperties::GeneralizationProperties(const QString &name, const QString &overrides) : name(name) { overridePorts = overrides.contains("ports", Qt::CaseInsensitive); overrideLabels = overrides.contains("labels", Qt::CaseInsensitive); overridePictures = overrides.contains("pictures", Qt::CaseInsensitive); if (overrides.contains("all", Qt::CaseInsensitive)) { overridePorts = true; overrideLabels = true; overridePictures = true; } } GraphicType::ResolvingHelper::ResolvingHelper(bool &resolvingFlag) : mResolvingFlag(resolvingFlag) { mResolvingFlag = true; } GraphicType::ResolvingHelper::~ResolvingHelper() { mResolvingFlag = false; } GraphicType::GraphicType(Diagram *diagram) : Type(false, diagram) , mVisible(false) , mWidth(-1) , mHeight(-1) , mCreateChildrenFromMenu(false) , mResolving(false) { } GraphicType::~GraphicType() { foreach (Label *label, mLabels) { delete label; } } void GraphicType::copyFields(GraphicType *type) const { Type::copyFields(type); type->mElement = mElement; type->mGraphics = mGraphics; type->mHeight = mHeight; for (Label *label : mLabels) { type->mLabels.append(label->clone()); } type->mLogic = mLogic; type->mParents = mParents; type->mVisible = mVisible; type->mWidth = mWidth; type->mContainerProperties = mContainerProperties; type->mContains = mContains; type->mExplosions = mExplosions; } void GraphicType::copyLabels(GraphicType *parent) { for (Label *label : parent->mLabels) { mLabels.append(label->clone()); } } QStringList GraphicType::immediateParents() const { QStringList result; for (const GeneralizationProperties &generalization : mParents) { result << generalization.name; } return result; } void GraphicType::generateCommonData(OutFile &out) const { generateName(out); generateFriendlyName(out); generateDiagram(out); generateDescription(out); generateLabels(out); } void GraphicType::generateLabels(OutFile &out) const { for (const Label *label : mLabels) { label->generateCodeForConstructor(out); } } bool GraphicType::init(const QDomElement &element, const QString &context) { mElement = element; if (Type::init(element, context)) { mDescription = element.attribute("description", ""); mAbstract = element.attribute("abstract", ""); mLogic = element.firstChildElement("logic"); if (mLogic.isNull()) { qDebug() << "ERROR: can't find logic tag of graphic type"; return false; } mGraphics = element.firstChildElement("graphics"); return initRoles() && initParents() && initProperties() && initDividability() && initContainers() && initGraphics() && initLabels() && initPossibleEdges() && initPortTypes() && initCreateChildrenFromMenu() && initContainerProperties() && initExplosions(); } return false; } bool GraphicType::initParents() { QDomElement parentsElement = mLogic.firstChildElement("generalizations"); if (parentsElement.isNull()) { return true; } for (QDomElement parentElement = parentsElement.firstChildElement("parent") ; !parentElement.isNull() ; parentElement = parentElement.nextSiblingElement("parent")) { const QString parentName = parentElement.attribute("parentName"); if (parentName.isEmpty()) { qWarning() << "ERROR: anonymous parent of node" << qualifiedName(); return false; } const QString overrides = parentElement.attribute("overrides"); for (const auto &parent : mParents) { if (parent.name == parentName) { qWarning() << "ERROR: parent of node" << qualifiedName() << "duplicated"; return false; } } mParents.append({parentName, overrides}); } return true; } bool GraphicType::initProperties() { initRoleProperties(); const QDomElement propertiesElement = mLogic.firstChildElement("properties"); if (propertiesElement.isNull()) { return true; } for (QDomElement propertyElement = propertiesElement.firstChildElement("property") ; !propertyElement.isNull() ; propertyElement = propertyElement.nextSiblingElement("property")) { Property *property = new Property(); if (!property->init(propertyElement)) { delete property; continue; } if (!addProperty(property, "")) { return false; } } return true; } bool GraphicType::initFieldList(const QString &listName, const QString &listElementName , QStringList &resultingList, const QString &fieldName, const bool isNeedToNormalizeAtt) const { QDomElement containerElement = mLogic.firstChildElement(listName); if (containerElement.isNull()) { return true; } for (QDomElement childElement = containerElement.firstChildElement(listElementName) ; !childElement.isNull() ; childElement = childElement.nextSiblingElement(listElementName)) { QString typeName; if (isNeedToNormalizeAtt) { typeName = NameNormalizer::normalize(childElement.attribute(fieldName)); } else { typeName = childElement.attribute(fieldName); } if (typeName.isEmpty()) { qDebug() << "Error: anonymous " << fieldName << " in the " << listName << " list, in " << qualifiedName(); return false; } if (!resultingList.contains(typeName)) { resultingList.append(typeName); } else { qDebug() << "ERROR: " << fieldName << " in the " << listName << " list in " << qualifiedName() << "duplicated"; return false; } } return true; } bool GraphicType::initTypeList(const QString &listName, const QString &listElementName , QStringList &resultingList) const { return initFieldList(listName, listElementName, resultingList, "type", true); } bool GraphicType::initContainers() { return initTypeList("container", "contains", mContains); } bool GraphicType::initContainerProperties() { QDomElement containerElement = mLogic.firstChildElement("container"); if (containerElement.isNull()) { return true; } QDomElement containerPropertiesElement = containerElement.firstChildElement("properties"); if (containerPropertiesElement.isNull()) { return true; } for (QDomElement childElement = containerPropertiesElement.firstChildElement() ; !childElement.isNull() ; childElement = childElement.nextSiblingElement()) { if (childElement.tagName() == "sortContainer") { mContainerProperties.isSortingContainer = true; } else if (childElement.tagName() == "forestalling") { QString sizeAttribute = childElement.attribute("size"); bool isSizeOk = false; mContainerProperties.sizeOfForestalling = toIntVector(sizeAttribute, &isSizeOk); if (!isSizeOk) { return false; } } else if (childElement.tagName() == "childrenForestalling") { QString sizeAttribute = childElement.attribute("size"); bool isSizeOk = false; mContainerProperties.sizeOfChildrenForestalling = sizeAttribute.toInt(&isSizeOk); if (!isSizeOk) { return false; } } else if (childElement.tagName() == "minimizeToChildren") { mContainerProperties.minimizesToChildren = true; } else if (childElement.tagName() == "banChildrenMove") { mContainerProperties.hasMovableChildren = false; } else if (childElement.tagName() == "maximizeChildren") { mContainerProperties.maximizesChildren = true; } } return true; } bool GraphicType::initCreateChildrenFromMenu() { if (!mLogic.elementsByTagName("createChildrenFromMenu").isEmpty()) { mCreateChildrenFromMenu = true; } return true; } bool GraphicType::initPossibleEdges() { const QString listName = "possibleEdges"; const QString listElementName = "possibleEdge"; QDomElement containerElement = mLogic.firstChildElement(listName); if (containerElement.isNull()) { return true; } for (QDomElement childElement = containerElement.firstChildElement(listElementName); !childElement.isNull(); childElement = childElement.nextSiblingElement(listElementName)) { QString beginName = NameNormalizer::normalize(childElement.attribute("beginName")); QString endName = NameNormalizer::normalize(childElement.attribute("endName")); QString temp = childElement.attribute("directed"); if (beginName.isEmpty() || endName.isEmpty() || ((temp != "true") && (temp != "false"))) { qWarning() << "Error: one of attributes is incorrect " << "(perhaps, \"beginName\" or \"endName\" is empty or " << "\"directed\" isn't \"true\" or \"false\".')" << qualifiedName(); return false; } const bool directed = temp == "true"; const QString edgeName = NameNormalizer::normalize(qualifiedName()); QPair<QPair<QString, QString>, QPair<bool, QString> > possibleEdge(qMakePair(beginName, endName) , qMakePair(directed, edgeName)); if (!mPossibleEdges.contains(possibleEdge)) { mPossibleEdges.append(possibleEdge); } else { qDebug() << "ERROR: this edge is already in list " << qualifiedName(); return false; } } return true; } bool GraphicType::initExplosions() { const QDomElement explodesTo = mLogic.firstChildElement("explodesTo"); if (explodesTo.isNull()) { return true; } for (QDomElement targetElement = explodesTo.firstChildElement() ; !targetElement.isNull() ; targetElement = targetElement.nextSiblingElement()) { const QString targetName = targetElement.attribute("type"); if (targetName.isEmpty()) { return false; } const bool isReusable = targetElement.attribute("makeReusable", "false").toLower().trimmed() == "true"; const bool immediateLinkage = targetElement.attribute("requireImmediateLinkage", "false").toLower().trimmed() == "true"; mExplosions[targetName] = qMakePair(isReusable, immediateLinkage); } return true; } bool GraphicType::initLabels() { int count = 1; for (QDomElement element = mGraphics.firstChildElement("labels").firstChildElement("label"); !element.isNull(); element = element.nextSiblingElement("label")) { Label *label = new Label(); if (!initLabel(label, element, count)) { delete label; } else { mLabels.append(label); ++count; } } return true; } bool GraphicType::addProperty(Property *property, const QString &roleName) { QString propertyName = this->propertyName(property, roleName); if (propertyName.isEmpty()) { propertyName = property->name(); } if (mProperties.contains(propertyName)) { // This will automaticly dispose property in this branch. QScopedPointer<Property> propertyDisposer(property); Q_UNUSED(propertyDisposer) /// @todo Good for overriding parent properties, but bad in multiple inheritance case /// --- we can allow invalid rhomb inheritance. if (mProperties[propertyName] != property && *mProperties[propertyName] != *property) { qWarning() << "Property" << propertyName << "duplicated with different attributes"; return false; } } else { mProperties[propertyName] = property; } return true; } bool GraphicType::isResolving() const { return mResolving; } bool GraphicType::resolve() { if (mResolvingFinished) { return true; } ResolvingHelper helper(mResolving); Q_UNUSED(helper) /// @todo Ensure that parents are not duplicated. for (const GeneralizationProperties &generalization : mParents) { // Parents are searched in "native" context of a type, so if it was imported links must not be broken. const QString qualifiedParentName = generalization.name.contains("::") ? generalization.name : nativeContext() + "::" + generalization.name; Type *parent = mDiagram->findType(qualifiedParentName); if (parent == nullptr) { // Parent was not found in local context, trying to search in global context parent = mDiagram->findType(generalization.name); if (parent == nullptr) { qDebug() << "ERROR: can't find parent" << generalization.name << "for" << qualifiedName(); return false; } } if (parent->isResolving()) { qDebug() << "ERROR: circular inheritance between" << generalization.name << "and" << qualifiedName(); return false; } if (!parent->isResolved()) { if (!parent->resolve()) { return false; } } for (Property *property : parent->properties().values()) { if (!addProperty(property->clone(), "")) { return false; } } GraphicType * const graphicParent = dynamic_cast<GraphicType*>(parent); if (graphicParent != nullptr) { if (!generalization.overrideLabels) { copyLabels(graphicParent); } if (!generalization.overridePictures) { copyPictures(graphicParent); } NodeType* const nodeParent = dynamic_cast<NodeType*>(parent); if (nodeParent != nullptr) { if (!generalization.overridePorts) { copyPorts(nodeParent); } } for (PossibleEdge pEdge : graphicParent->mPossibleEdges) { mPossibleEdges.append(qMakePair(pEdge.first,qMakePair(pEdge.second.first,name()))); } } } for (int i = 0; i < mLabels.size(); ++i) { mLabels.value(i)->changeIndex(i + 1); } mResolvingFinished = true; return true; } void GraphicType::generateName(OutFile &out) const { const QString normalizedName = NameNormalizer::normalize(qualifiedName()); out() << QString("\t\t\tsetName(\"%1\");\n").arg(normalizedName); /// @todo: I don`t know why we check it here but think it can live for now. This should be moved /// into appropriate place later. for (const QPair<QString, QStringList> &part : mDiagram->paletteGroups()) { for (auto part2 : part.second) { if (part2 == normalizedName && mAbstract == "true" ) { qDebug() << "ERROR! Element" << qualifiedName() << "is abstract."; return; } } } } void GraphicType::generateFriendlyName(OutFile &out) const { const QString actualDisplayedName = displayedName().isEmpty() ? name() : displayedName(); out() << QString("\t\t\tsetFriendlyName(QObject::tr(\"%1\"));\n").arg(actualDisplayedName); } void GraphicType::generateDiagram(OutFile &out) const { const QString diagramName = NameNormalizer::normalize(mDiagram->name()); out() << QString("\t\t\tsetDiagram(\"%1\");\n").arg(diagramName); } void GraphicType::generateDescription(OutFile &out) const { out() << "\t\t\tsetDescription(QObject::tr(\"" << mDescription << "\"));\n"; } QStringList GraphicType::sortProperties(const QList<QString> &properties) const { QList<QString> result; QStringList propertiesWithRoles; QStringList pureProperties; for (const QString &property : properties) { if (property.contains("!")) { propertiesWithRoles.append(property); } else { pureProperties.append(property); } } propertiesWithRoles.sort(); pureProperties.sort(); result = propertiesWithRoles + pureProperties; return result; } void GraphicType::generatePropertyData(OutFile &out) const { out() << "\t\tvoid initProperties()\n\t\t{\n"; const QStringList keys = sortProperties(mProperties.keys()); for (const QString &key : keys) { Property *property = mProperties[key]; // Validating property names. if (property->name() == "fromPort" || property->name() == "toPort" || property->name() == "from" || property->name() == "to" || property->name() == "name") { qWarning() << "ERROR: predefined property" << property->name() << "should not appear in metamodel, ignored"; continue; } const QString name = key == property->name() ? property->name() : key; const QString stringConstructor = property->type() == "string" ? "QObject::tr" : "QString::fromUtf8"; out() << QString("\t\t\taddProperty(\"%1\", \"%2\", %3(\"%4\"), QObject::tr(\"%5\"), "\ "QObject::tr(\"%6\"), %7);\n").arg(name, property->type(), stringConstructor , property->defaultValue(), property->displayedName(), property->description() , property->isReferenceProperty() ? "true" : "false"); } out() << "\t\t}\n"; } QString GraphicType::resourceName(const QString &resourceType) const { QString name = NameNormalizer::normalize(qualifiedName()); return name + resourceType + ".sdf"; } QStringList GraphicType::containedTypes() const { return mContains; } const QMap<QString, QPair<bool, bool> > &GraphicType::explosions() const { return mExplosions; } QString GraphicType::boolToString(bool value) const { return value ? "true" : "false"; } QVector<int> GraphicType::toIntVector(const QString &s, bool *isOk) const { const QStringList strings = s.split(','); QVector<int> result(4, 0); if (strings.size() != 4) { *isOk = false; return result; } for (int i = 0; i < 4; i++) { result[i] = strings[i].toInt(isOk); if (!*isOk) return result; } return result; }
apache-2.0
jawi/ApacheAnt.docset
Contents/Resources/Documents/api/org/apache/tools/ant/taskdefs/optional/junit/CustomJUnit4TestAdapterCache.html
14752
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!--NewPage--> <HTML> <HEAD> <!-- Generated by javadoc (build 1.6.0_27) on Mon Jul 08 20:19:16 CEST 2013 --> <TITLE> CustomJUnit4TestAdapterCache (Apache Ant API) </TITLE> <META NAME="date" CONTENT="2013-07-08"> <LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../../stylesheet.css" TITLE="Style"> <SCRIPT type="text/javascript"> function windowTitle() { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="CustomJUnit4TestAdapterCache (Apache Ant API)"; } } </SCRIPT> <NOSCRIPT> </NOSCRIPT> </HEAD> <BODY BGCOLOR="white" onload="windowTitle();"> <HR> <!-- ========= START OF TOP NAVBAR ======= --> <A NAME="navbar_top"><!-- --></A> <A HREF="#skip-navbar_top" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_top_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;<A HREF="../../../../../../../org/apache/tools/ant/taskdefs/optional/junit/Constants.html" title="class in org.apache.tools.ant.taskdefs.optional.junit"><B>PREV CLASS</B></A>&nbsp; &nbsp;<A HREF="../../../../../../../org/apache/tools/ant/taskdefs/optional/junit/DOMUtil.html" title="class in org.apache.tools.ant.taskdefs.optional.junit"><B>NEXT CLASS</B></A></FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../../../index.html?org/apache/tools/ant/taskdefs/optional/junit/CustomJUnit4TestAdapterCache.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="CustomJUnit4TestAdapterCache.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> <TR> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> SUMMARY:&nbsp;<A HREF="#nested_classes_inherited_from_class_java.util.AbstractMap">NESTED</A>&nbsp;|&nbsp;FIELD&nbsp;|&nbsp;CONSTR&nbsp;|&nbsp;<A HREF="#method_summary">METHOD</A></FONT></TD> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> DETAIL:&nbsp;FIELD&nbsp;|&nbsp;CONSTR&nbsp;|&nbsp;<A HREF="#method_detail">METHOD</A></FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_top"></A> <!-- ========= END OF TOP NAVBAR ========= --> <HR> <!-- ======== START OF CLASS DATA ======== --> <H2> <FONT SIZE="-1"> org.apache.tools.ant.taskdefs.optional.junit</FONT> <BR> Class CustomJUnit4TestAdapterCache</H2> <PRE> java.lang.Object <IMG SRC="../../../../../../../resources/inherit.gif" ALT="extended by ">java.util.AbstractMap&lt;K,V&gt; <IMG SRC="../../../../../../../resources/inherit.gif" ALT="extended by ">java.util.HashMap&lt;org.junit.runner.Description,junit.framework.Test&gt; <IMG SRC="../../../../../../../resources/inherit.gif" ALT="extended by ">junit.framework.JUnit4TestAdapterCache <IMG SRC="../../../../../../../resources/inherit.gif" ALT="extended by "><B>org.apache.tools.ant.taskdefs.optional.junit.CustomJUnit4TestAdapterCache</B> </PRE> <DL> <DT><B>All Implemented Interfaces:</B> <DD>java.io.Serializable, java.lang.Cloneable, java.util.Map&lt;org.junit.runner.Description,junit.framework.Test&gt;</DD> </DL> <HR> <DL> <DT><PRE>public class <B>CustomJUnit4TestAdapterCache</B><DT>extends junit.framework.JUnit4TestAdapterCache</DL> </PRE> <P> Provides a custom implementation of the notifier for a JUnit4TestAdapter so that skipped and ignored tests can be reported to the existing <tt>TestListener</tt>s. <P> <P> <DL> <DT><B>See Also:</B><DD><A HREF="../../../../../../../serialized-form.html#org.apache.tools.ant.taskdefs.optional.junit.CustomJUnit4TestAdapterCache">Serialized Form</A></DL> <HR> <P> <!-- ======== NESTED CLASS SUMMARY ======== --> <A NAME="nested_class_summary"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2"> <B>Nested Class Summary</B></FONT></TH> </TR> </TABLE> &nbsp;<A NAME="nested_classes_inherited_from_class_java.util.AbstractMap"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#EEEEFF" CLASS="TableSubHeadingColor"> <TH ALIGN="left"><B>Nested classes/interfaces inherited from class java.util.AbstractMap</B></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD><CODE>java.util.AbstractMap.SimpleEntry&lt;K,V&gt;, java.util.AbstractMap.SimpleImmutableEntry&lt;K,V&gt;</CODE></TD> </TR> </TABLE> &nbsp; <!-- ========== METHOD SUMMARY =========== --> <A NAME="method_summary"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2"> <B>Method Summary</B></FONT></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>static&nbsp;<A HREF="../../../../../../../org/apache/tools/ant/taskdefs/optional/junit/CustomJUnit4TestAdapterCache.html" title="class in org.apache.tools.ant.taskdefs.optional.junit">CustomJUnit4TestAdapterCache</A></CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../../../org/apache/tools/ant/taskdefs/optional/junit/CustomJUnit4TestAdapterCache.html#getInstance()">getInstance</A></B>()</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;org.junit.runner.notification.RunNotifier</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../../../org/apache/tools/ant/taskdefs/optional/junit/CustomJUnit4TestAdapterCache.html#getNotifier(junit.framework.TestResult)">getNotifier</A></B>(junit.framework.TestResult&nbsp;result)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;org.junit.runner.notification.RunNotifier</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../../../org/apache/tools/ant/taskdefs/optional/junit/CustomJUnit4TestAdapterCache.html#getNotifier(junit.framework.TestResult, junit.framework.JUnit4TestAdapter)">getNotifier</A></B>(junit.framework.TestResult&nbsp;result, junit.framework.JUnit4TestAdapter&nbsp;adapter)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</TD> </TR> </TABLE> &nbsp;<A NAME="methods_inherited_from_class_junit.framework.JUnit4TestAdapterCache"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#EEEEFF" CLASS="TableSubHeadingColor"> <TH ALIGN="left"><B>Methods inherited from class junit.framework.JUnit4TestAdapterCache</B></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD><CODE>asTest, asTestList, getDefault</CODE></TD> </TR> </TABLE> &nbsp;<A NAME="methods_inherited_from_class_java.util.HashMap"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#EEEEFF" CLASS="TableSubHeadingColor"> <TH ALIGN="left"><B>Methods inherited from class java.util.HashMap</B></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD><CODE>clear, clone, containsKey, containsValue, entrySet, get, isEmpty, keySet, put, putAll, remove, size, values</CODE></TD> </TR> </TABLE> &nbsp;<A NAME="methods_inherited_from_class_java.util.AbstractMap"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#EEEEFF" CLASS="TableSubHeadingColor"> <TH ALIGN="left"><B>Methods inherited from class java.util.AbstractMap</B></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD><CODE>equals, hashCode, toString</CODE></TD> </TR> </TABLE> &nbsp;<A NAME="methods_inherited_from_class_java.lang.Object"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#EEEEFF" CLASS="TableSubHeadingColor"> <TH ALIGN="left"><B>Methods inherited from class java.lang.Object</B></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD><CODE>finalize, getClass, notify, notifyAll, wait, wait, wait</CODE></TD> </TR> </TABLE> &nbsp;<A NAME="methods_inherited_from_class_java.util.Map"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#EEEEFF" CLASS="TableSubHeadingColor"> <TH ALIGN="left"><B>Methods inherited from interface java.util.Map</B></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD><CODE>equals, hashCode</CODE></TD> </TR> </TABLE> &nbsp; <P> <!-- ============ METHOD DETAIL ========== --> <A NAME="method_detail"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2"> <B>Method Detail</B></FONT></TH> </TR> </TABLE> <A NAME="getInstance()"><!-- --></A><H3> getInstance</H3> <PRE> public static <A HREF="../../../../../../../org/apache/tools/ant/taskdefs/optional/junit/CustomJUnit4TestAdapterCache.html" title="class in org.apache.tools.ant.taskdefs.optional.junit">CustomJUnit4TestAdapterCache</A> <B>getInstance</B>()</PRE> <DL> <DD><DL> </DL> </DD> </DL> <HR> <A NAME="getNotifier(junit.framework.TestResult, junit.framework.JUnit4TestAdapter)"><!-- --></A><H3> getNotifier</H3> <PRE> public org.junit.runner.notification.RunNotifier <B>getNotifier</B>(junit.framework.TestResult&nbsp;result, junit.framework.JUnit4TestAdapter&nbsp;adapter)</PRE> <DL> <DD><DL> <DT><B>Overrides:</B><DD><CODE>getNotifier</CODE> in class <CODE>junit.framework.JUnit4TestAdapterCache</CODE></DL> </DD> <DD><DL> </DL> </DD> </DL> <HR> <A NAME="getNotifier(junit.framework.TestResult)"><!-- --></A><H3> getNotifier</H3> <PRE> public org.junit.runner.notification.RunNotifier <B>getNotifier</B>(junit.framework.TestResult&nbsp;result)</PRE> <DL> <DD><DL> </DL> </DD> </DL> <!-- ========= END OF CLASS DATA ========= --> <HR> <!-- ======= START OF BOTTOM NAVBAR ====== --> <A NAME="navbar_bottom"><!-- --></A> <A HREF="#skip-navbar_bottom" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_bottom_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;<A HREF="../../../../../../../org/apache/tools/ant/taskdefs/optional/junit/Constants.html" title="class in org.apache.tools.ant.taskdefs.optional.junit"><B>PREV CLASS</B></A>&nbsp; &nbsp;<A HREF="../../../../../../../org/apache/tools/ant/taskdefs/optional/junit/DOMUtil.html" title="class in org.apache.tools.ant.taskdefs.optional.junit"><B>NEXT CLASS</B></A></FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../../../index.html?org/apache/tools/ant/taskdefs/optional/junit/CustomJUnit4TestAdapterCache.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="CustomJUnit4TestAdapterCache.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> <TR> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> SUMMARY:&nbsp;<A HREF="#nested_classes_inherited_from_class_java.util.AbstractMap">NESTED</A>&nbsp;|&nbsp;FIELD&nbsp;|&nbsp;CONSTR&nbsp;|&nbsp;<A HREF="#method_summary">METHOD</A></FONT></TD> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> DETAIL:&nbsp;FIELD&nbsp;|&nbsp;CONSTR&nbsp;|&nbsp;<A HREF="#method_detail">METHOD</A></FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_bottom"></A> <!-- ======== END OF BOTTOM NAVBAR ======= --> <HR> </BODY> </HTML>
apache-2.0
AlanJager/zstack
sdk/src/main/java/org/zstack/sdk/iam2/entity/StateEvent.java
83
package org.zstack.sdk.iam2.entity; public enum StateEvent { enable, disable, }
apache-2.0
fbudin69500/ITK
Modules/ThirdParty/GDCM/src/gdcm/Source/DataStructureAndEncodingDefinition/gdcmDataSet.h
11051
/*========================================================================= Program: GDCM (Grassroots DICOM). A DICOM library Copyright (c) 2006-2011 Mathieu Malaterre All rights reserved. See Copyright.txt or http://gdcm.sourceforge.net/Copyright.html for details. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the above copyright notice for more information. =========================================================================*/ #ifndef GDCMDATASET_H #define GDCMDATASET_H #include "gdcmDataElement.h" #include "gdcmTag.h" #include "gdcmVR.h" #include "gdcmElement.h" #include "gdcmMediaStorage.h" #include <set> #include <iterator> namespace gdcm_ns { class GDCM_EXPORT DataElementException : public std::exception {}; class PrivateTag; /** * \brief Class to represent a Data Set (which contains Data Elements) * \details A Data Set represents an instance of a real world Information Object * \note * DATA SET: * Exchanged information consisting of a structured set of Attribute values * directly or indirectly related to Information Objects. The value of each * Attribute in a Data Set is expressed as a Data Element. * A collection of Data Elements ordered by increasing Data Element Tag * number that is an encoding of the values of Attributes of a real world * object. * \note * Implementation note. If one do: * DataSet ds; * ds.SetLength(0); * ds.Read(is); * setting length to 0 actually means try to read is as if it was a root * DataSet. Other value are undefined (nested dataset with undefined length) * or defined length (different from 0) means nested dataset with defined * length. * * \warning * a DataSet does not have a Transfer Syntax type, only a File does. */ class GDCM_EXPORT DataSet { friend class CSAHeader; public: typedef std::set<DataElement> DataElementSet; typedef DataElementSet::const_iterator ConstIterator; typedef DataElementSet::iterator Iterator; typedef DataElementSet::size_type SizeType; //typedef typename DataElementSet::iterator iterator; ConstIterator Begin() const { return DES.begin(); } Iterator Begin() { return DES.begin(); } ConstIterator End() const { return DES.end(); } Iterator End() { return DES.end(); } const DataElementSet &GetDES() const { return DES; } DataElementSet &GetDES() { return DES; } void Clear() { DES.clear(); assert( DES.empty() ); } SizeType Size() const { return DES.size(); } void Print(std::ostream &os, std::string const &indent = "") const { // CT_Phillips_JPEG2K_Decompr_Problem.dcm has a SQ of length == 0 //int s = DES.size(); //assert( s ); //std::copy(DES.begin(), DES.end(), // std::ostream_iterator<DataElement>(os, "\n")); ConstIterator it = DES.begin(); for( ; it != DES.end(); ++it) { os << indent << *it << "\n"; } } template <typename TDE> unsigned int ComputeGroupLength(Tag const &tag) const { assert( tag.GetElement() == 0x0 ); const DataElement r(tag); ConstIterator it = DES.find(r); unsigned int res = 0; for( ++it; it != DES.end() && it->GetTag().GetGroup() == tag.GetGroup(); ++it) { assert( it->GetTag().GetElement() != 0x0 ); assert( it->GetTag().GetGroup() == tag.GetGroup() ); res += it->GetLength<TDE>(); } return res; } template <typename TDE> VL GetLength() const { if( DES.empty() ) return 0; assert( !DES.empty() ); VL ll = 0; assert( ll == 0 ); ConstIterator it = DES.begin(); for( ; it != DES.end(); ++it) { assert( !(it->GetLength<TDE>().IsUndefined()) ); if ( it->GetTag() != Tag(0xfffe,0xe00d) ) { ll += it->GetLength<TDE>(); } } return ll; } /// Insert a DataElement in the DataSet. /// \warning: Tag need to be >= 0x8 to be considered valid data element void Insert(const DataElement& de) { // FIXME: there is a special case where a dataset can have value < 0x8, see: // $ gdcmdump --csa gdcmData/SIEMENS-JPEG-CorruptFrag.dcm if( de.GetTag().GetGroup() >= 0x0008 || de.GetTag().GetGroup() == 0x4 ) { // prevent user error: if( de.GetTag() == Tag(0xfffe,0xe00d) || de.GetTag() == Tag(0xfffe,0xe0dd) || de.GetTag() == Tag(0xfffe,0xe000) ) { } else { InsertDataElement( de ); } } else { gdcmErrorMacro( "Cannot add element with group < 0x0008 and != 0x4 in the dataset: " << de.GetTag() ); } } /// Replace a dataelement with another one void Replace(const DataElement& de) { ConstIterator it = DES.find(de); if( it != DES.end() ) { // detect loop: gdcmAssertAlwaysMacro( &*it != &de ); DES.erase(it); } DES.insert(de); } /// Only replace a DICOM attribute when it is missing or empty void ReplaceEmpty(const DataElement& de) { ConstIterator it = DES.find(de); if( it != DES.end() && it->IsEmpty() ) { // detect loop: gdcmAssertAlwaysMacro( &*it != &de ); DES.erase(it); } DES.insert(de); } /// Completely remove a dataelement from the dataset SizeType Remove(const Tag& tag) { DataElementSet::size_type count = DES.erase(tag); assert( count == 0 || count == 1 ); return count; } /// Return the DataElement with Tag 't' /// \warning: /// This only search at the 'root level' of the DataSet //DataElement& GetDataElement(const Tag &t) { // DataElement r(t); // Iterator it = DES.find(r); // if( it != DES.end() ) // return *it; // return GetDEEnd(); // } const DataElement& GetDataElement(const Tag &t) const { const DataElement r(t); ConstIterator it = DES.find(r); if( it != DES.end() ) return *it; return GetDEEnd(); } const DataElement& operator[] (const Tag &t) const { return GetDataElement(t); } const DataElement& operator() (uint16_t group, uint16_t element) const { return GetDataElement( Tag(group,element) ); } /// Return the private creator of the private tag 't': std::string GetPrivateCreator(const Tag &t) const; /// Look up if private tag 't' is present in the dataset: bool FindDataElement(const PrivateTag &t) const; /// Return the dataelement const DataElement& GetDataElement(const PrivateTag &t) const; // DUMB: this only search within the level of the current DataSet bool FindDataElement(const Tag &t) const { const DataElement r(t); //ConstIterator it = DES.find(r); if( DES.find(r) != DES.end() ) { return true; } return false; } // WARNING: // This only search at the same level as the DataSet is ! const DataElement& FindNextDataElement(const Tag &t) const { const DataElement r(t); ConstIterator it = DES.lower_bound(r); if( it != DES.end() ) return *it; return GetDEEnd(); } /// Returns if the dataset is empty bool IsEmpty() const { return DES.empty(); }; DataSet& operator=(DataSet const &val) { DES = val.DES; return *this; } /* template <typename TOperation> void ExecuteOperation(TOperation & operation) { assert( !DES.empty() ); DataElementSet::iterator it = Begin(); for( ; it != End(); ++it) { DataElement &de = (DataElement&)*it; operation( de ); } } */ template <typename TDE, typename TSwap> std::istream &ReadNested(std::istream &is); template <typename TDE, typename TSwap> std::istream &Read(std::istream &is); template <typename TDE, typename TSwap> std::istream &ReadUpToTag(std::istream &is, const Tag &t, std::set<Tag> const & skiptags); template <typename TDE, typename TSwap> std::istream &ReadUpToTagWithLength(std::istream &is, const Tag &t, std::set<Tag> const & skiptags, VL & length); template <typename TDE, typename TSwap> std::istream &ReadSelectedTags(std::istream &is, const std::set<Tag> & tags, bool readvalues = true); template <typename TDE, typename TSwap> std::istream &ReadSelectedTagsWithLength(std::istream &is, const std::set<Tag> & tags, VL & length, bool readvalues = true); template <typename TDE, typename TSwap> std::istream &ReadSelectedPrivateTags(std::istream &is, const std::set<PrivateTag> & tags, bool readvalues = true); template <typename TDE, typename TSwap> std::istream &ReadSelectedPrivateTagsWithLength(std::istream &is, const std::set<PrivateTag> & tags, VL & length, bool readvalues = true); template <typename TDE, typename TSwap> std::ostream const &Write(std::ostream &os) const; template <typename TDE, typename TSwap> std::istream &ReadWithLength(std::istream &is, VL &length); MediaStorage GetMediaStorage() const; protected: /* GetDEEnd is a Win32 only issue, one cannot use a dllexported * static member data in an inline function, otherwise symbol * will get reported as missing in any dll using the inlined function */ const DataElement& GetDEEnd() const; // This function is not safe, it does not check for the value of the tag // so depending whether we are getting called from a dataset or file meta header // the condition is different void InsertDataElement(const DataElement& de) { //if( de.GetTag() == Tag(0xfffe,0xe00d) ) return; //if( de.GetTag() == Tag(0xfffe,0xe0dd) ) return; #ifndef NDEBUG std::pair<Iterator,bool> pr = DES.insert(de); if( pr.second == false ) { gdcmWarningMacro( "DataElement: " << de << " was already found, skipping duplicate entry.\n" "Original entry kept is: " << *pr.first ); } #else DES.insert(de); #endif assert( de.IsEmpty() || de.GetVL() == de.GetValue().GetLength() ); } protected: // Internal function, that will compute the actual Tag (if found) of // a requested Private Tag (XXXX,YY,"PRIVATE") Tag ComputeDataElement(const PrivateTag & t) const; private: DataElementSet DES; static DataElement DEEnd; friend std::ostream& operator<<(std::ostream &_os, const DataSet &val); }; //----------------------------------------------------------------------------- inline std::ostream& operator<<(std::ostream &os, const DataSet &val) { val.Print(os); return os; } #if defined(SWIGPYTHON) || defined(SWIGCSHARP) || defined(SWIGJAVA) || defined(SWIGPHP) /* * HACK: I need this temp class to be able to manipulate a std::set from python, * swig does not support wrapping of simple class like std::set... */ class SWIGDataSet { public: SWIGDataSet(DataSet &des):Internal(des),it(des.Begin()) {} const DataElement& GetCurrent() const { return *it; } void Start() { it = Internal.Begin(); } bool IsAtEnd() const { return it == Internal.End(); } void Next() { ++it; } private: DataSet & Internal; DataSet::ConstIterator it; }; #endif /* SWIG */ /** * \example SimplePrint.cs * This is a C# example on how to use gdcm::SWIGDataSet */ } // end namespace gdcm_ns #include "gdcmDataSet.txx" #endif //GDCMDATASET_H
apache-2.0
kohsuke/jboss-marshalling
river/src/test/java/org/jboss/marshalling/river/ReadWriteTest.java
2135
/* * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.marshalling.river; import org.jboss.marshalling.MarshallerFactory; import org.jboss.marshalling.Marshaller; import org.jboss.marshalling.Marshalling; import org.jboss.marshalling.Unmarshaller; import org.jboss.marshalling.MarshallingConfiguration; import java.io.ByteArrayOutputStream; import java.io.ByteArrayInputStream; /** * */ public abstract class ReadWriteTest { public void run() throws Throwable { final MarshallerFactory factory = new RiverMarshallerFactory(); final MarshallingConfiguration configuration = new MarshallingConfiguration(); configure(configuration); final Marshaller marshaller = factory.createMarshaller(configuration); final ByteArrayOutputStream baos = new ByteArrayOutputStream(10240); marshaller.start(Marshalling.createByteOutput(baos)); runWrite(marshaller); marshaller.finish(); final byte[] bytes = baos.toByteArray(); final Unmarshaller unmarshaller = factory.createUnmarshaller(configuration); unmarshaller.start(Marshalling.createByteInput(new ByteArrayInputStream(bytes))); runRead(unmarshaller); unmarshaller.finish(); } public void configure(MarshallingConfiguration configuration) throws Throwable {} public void runWrite(Marshaller marshaller) throws Throwable {}; public void runRead(Unmarshaller unmarshaller) throws Throwable {}; }
apache-2.0
nealwangzi/ZFTabBar
ZFTabBar/ZFTabBar/Class/FirstViewController.h
248
// // FirstViewController.h // TabBarTest // // Created by 任子丰 on 15/9/9. // Copyright (c) 2015年 任子丰. All rights reserved. // #import <UIKit/UIKit.h> @interface FirstViewController : UITableViewController - (void)refrshUI; @end
apache-2.0
dneuman64/traffic_control
traffic_monitor/experimental/traffic_monitor/manager/lastkbpsstats.go
756
package manager import ( ds "github.com/Comcast/traffic_control/traffic_monitor/experimental/traffic_monitor/deliveryservice" "sync" ) type LastStatsThreadsafe struct { stats *ds.LastStats m *sync.RWMutex } func NewLastStatsThreadsafe() LastStatsThreadsafe { s := ds.NewLastStats() return LastStatsThreadsafe{m: &sync.RWMutex{}, stats: &s} } // Get returns the last KBPS stats object. Callers MUST NOT modify the object. It is not threadsafe for writing. If the object must be modified, callers must call LastStats.Copy() and modify the copy. func (o *LastStatsThreadsafe) Get() ds.LastStats { o.m.RLock() defer o.m.RUnlock() return *o.stats } func (o *LastStatsThreadsafe) Set(s ds.LastStats) { o.m.Lock() *o.stats = s o.m.Unlock() }
apache-2.0
shaneknapp/spark
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2StrategySuite.scala
1898
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.datasources.v2 import org.apache.spark.sql.catalyst.dsl.expressions._ import org.apache.spark.sql.catalyst.expressions.Expression import org.apache.spark.sql.catalyst.plans.PlanTest import org.apache.spark.sql.connector.expressions.{FieldReference, LiteralValue} import org.apache.spark.sql.connector.expressions.filter.{EqualTo => V2EqualTo, Filter => V2Filter} import org.apache.spark.sql.test.SharedSparkSession import org.apache.spark.sql.types.BooleanType class DataSourceV2StrategySuite extends PlanTest with SharedSparkSession { test("SPARK-36644: Push down boolean column filter") { testTranslateFilter('col.boolean, Some(new V2EqualTo(FieldReference("col"), LiteralValue(true, BooleanType)))) } /** * Translate the given Catalyst [[Expression]] into data source [[V2Filter]] * then verify against the given [[V2Filter]]. */ def testTranslateFilter(catalystFilter: Expression, result: Option[V2Filter]): Unit = { assertResult(result) { DataSourceV2Strategy.translateFilterV2(catalystFilter, true) } } }
apache-2.0
jhrcek/kie-wb-common
kie-wb-common-stunner/kie-wb-common-stunner-sets/kie-wb-common-stunner-bpmn/kie-wb-common-stunner-bpmn-backend/src/main/java/org/kie/workbench/common/stunner/bpmn/backend/legacy/profile/impl/DefaultProfileImpl.java
13298
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.stunner.bpmn.backend.legacy.profile.impl; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import javax.enterprise.context.ApplicationScoped; import javax.servlet.ServletContext; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import bpsim.impl.BpsimFactoryImpl; import com.fasterxml.jackson.core.JsonParseException; import org.eclipse.bpmn2.Bpmn2Package; import org.eclipse.bpmn2.Definitions; import org.eclipse.bpmn2.DocumentRoot; import org.eclipse.bpmn2.util.Bpmn2ResourceFactoryImpl; import org.eclipse.emf.common.util.URI; import org.eclipse.emf.ecore.resource.Resource; import org.eclipse.emf.ecore.resource.ResourceSet; import org.eclipse.emf.ecore.resource.impl.ResourceSetImpl; import org.eclipse.emf.ecore.xmi.XMLResource; import org.jboss.drools.impl.DroolsFactoryImpl; import org.kie.workbench.common.stunner.bpmn.backend.legacy.Bpmn2JsonMarshaller; import org.kie.workbench.common.stunner.bpmn.backend.legacy.Bpmn2JsonUnmarshaller; import org.kie.workbench.common.stunner.bpmn.backend.legacy.plugin.IDiagramPlugin; import org.kie.workbench.common.stunner.bpmn.backend.legacy.plugin.impl.PluginServiceImpl; import org.kie.workbench.common.stunner.bpmn.backend.legacy.profile.IDiagramProfile; import org.kie.workbench.common.stunner.bpmn.backend.legacy.repository.Repository; import org.kie.workbench.common.stunner.bpmn.backend.legacy.resource.JBPMBpmn2ResourceImpl; import org.kie.workbench.common.stunner.bpmn.backend.legacy.util.ConfigurationProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The implementation of the default profile for Process Designer. * @author Antoine Toulme */ @ApplicationScoped public class DefaultProfileImpl implements IDiagramProfile { private static Logger _logger = LoggerFactory.getLogger(DefaultProfileImpl.class); private Map<String, IDiagramPlugin> _plugins = new LinkedHashMap<String, IDiagramPlugin>(); private String _stencilSet; private String _localHistoryEnabled; private String _localHistoryTimeout; private String _repositoryId; private String _repositoryRoot; private String _repositoryName; private String _repositoryHost; private String _repositoryProtocol; private String _repositorySubdomain; private String _repositoryUsr; private String _repositoryPwd; private String _repositoryGlobalDir; public DefaultProfileImpl() { } public DefaultProfileImpl(ServletContext servletContext) { this(servletContext, true); } public DefaultProfileImpl(ServletContext servletContext, boolean initializeLocalPlugins) { if (initializeLocalPlugins) { initializeLocalPlugins(servletContext); } } public String getTitle() { return "Process Designer"; } public String getStencilSet() { return _stencilSet; } public Collection<String> getStencilSetExtensions() { return Collections.emptyList(); } public Collection<String> getPlugins() { return Collections.unmodifiableCollection(_plugins.keySet()); } private void initializeLocalPlugins(ServletContext context) { Map<String, IDiagramPlugin> registry = PluginServiceImpl.getLocalPluginsRegistry(context); //we read the default.xml file and make sense of it. FileInputStream fileStream = null; try { try { fileStream = new FileInputStream(new StringBuilder(context.getRealPath("/")).append("/"). append(ConfigurationProvider.getInstance().getDesignerContext()).append("profiles").append("/").append("default.xml").toString()); } catch (FileNotFoundException e) { throw new RuntimeException(e); } XMLInputFactory factory = XMLInputFactory.newInstance(); XMLStreamReader reader = factory.createXMLStreamReader(fileStream, "UTF-8"); while (reader.hasNext()) { if (reader.next() == XMLStreamReader.START_ELEMENT) { if ("profile".equals(reader.getLocalName())) { for (int i = 0; i < reader.getAttributeCount(); i++) { if ("stencilset".equals(reader.getAttributeLocalName(i))) { _stencilSet = reader.getAttributeValue(i); } } } else if ("plugin".equals(reader.getLocalName())) { String name = null; for (int i = 0; i < reader.getAttributeCount(); i++) { if ("name".equals(reader.getAttributeLocalName(i))) { name = reader.getAttributeValue(i); } } _plugins.put(name, registry.get(name)); } } } } catch (XMLStreamException e) { _logger.error(e.getMessage(), e); throw new RuntimeException(e); // stop initialization } finally { if (fileStream != null) { try { fileStream.close(); } catch (IOException e) { } } ; } } public String getName() { return "default"; } public String getSerializedModelExtension() { return "bpmn"; } public String getRepositoryId() { return _repositoryId; } public String getRepositoryRoot() { return _repositoryRoot; } public String getRepositoryName() { return _repositoryName; } public String getRepositoryHost() { return _repositoryHost; } public String getRepositoryProtocol() { return _repositoryProtocol; } public String getRepositorySubdomain() { return _repositorySubdomain; } public String getRepositoryUsr() { return _repositoryUsr; } public String getRepositoryPwd() { return _repositoryPwd; } public String getRepositoryGlobalDir() { return _repositoryGlobalDir; } public String getRepositoryGlobalDir(String uuid) { return _repositoryGlobalDir; } public String getLocalHistoryEnabled() { return _localHistoryEnabled; } public String getLocalHistoryTimeout() { return _localHistoryTimeout; } @Override public String getStoreSVGonSaveOption() { return "false"; } public Repository getRepository() { return null; } @Override public void init(ServletContext context) { } public IDiagramMarshaller createMarshaller() { return new IDiagramMarshaller() { public String parseModel(String jsonModel, String preProcessingData) { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); //Definitions def; Resource res; try { res = unmarshaller.unmarshall(jsonModel, preProcessingData); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); Map saveMap = new HashMap(); saveMap.put(XMLResource.OPTION_ENCODING, "UTF-8"); saveMap.put(XMLResource.OPTION_DEFER_IDREF_RESOLUTION, true); saveMap.put(XMLResource.OPTION_DISABLE_NOTIFY, true); saveMap.put(XMLResource.OPTION_PROCESS_DANGLING_HREF, XMLResource.OPTION_PROCESS_DANGLING_HREF_RECORD); res.save(outputStream, saveMap); return outputStream.toString(); } catch (JsonParseException e) { _logger.error(e.getMessage(), e); } catch (IOException e) { _logger.error(e.getMessage(), e); } return ""; } public Definitions getDefinitions(String jsonModel, String preProcessingData) { try { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); JBPMBpmn2ResourceImpl res = (JBPMBpmn2ResourceImpl) unmarshaller.unmarshall(jsonModel, preProcessingData); return (Definitions) res.getContents().get(0); } catch (JsonParseException e) { _logger.error(e.getMessage(), e); } catch (IOException e) { _logger.error(e.getMessage(), e); } return null; } public Resource getResource(String jsonModel, String preProcessingData) { try { Bpmn2JsonUnmarshaller unmarshaller = new Bpmn2JsonUnmarshaller(); return (JBPMBpmn2ResourceImpl) unmarshaller.unmarshall(jsonModel, preProcessingData); } catch (JsonParseException e) { _logger.error(e.getMessage(), e); } catch (IOException e) { _logger.error(e.getMessage(), e); } return null; } }; } public IDiagramUnmarshaller createUnmarshaller() { return new IDiagramUnmarshaller() { public String parseModel(String xmlModel, IDiagramProfile profile, String preProcessingData) { Bpmn2JsonMarshaller marshaller = new Bpmn2JsonMarshaller(); marshaller.setProfile(profile); try { return marshaller.marshall(getDefinitions(xmlModel), preProcessingData); } catch (Exception e) { _logger.error(e.getMessage(), e); } return ""; } }; } private Definitions getDefinitions(String xml) { try { DroolsFactoryImpl.init(); BpsimFactoryImpl.init(); ResourceSet resourceSet = new ResourceSetImpl(); resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap() .put(Resource.Factory.Registry.DEFAULT_EXTENSION, new Bpmn2ResourceFactoryImpl()); resourceSet.getPackageRegistry().put("http://www.omg.org/spec/BPMN/20100524/MODEL", Bpmn2Package.eINSTANCE); Resource resource = resourceSet.createResource(URI.createURI("inputStream://dummyUriWithValidSuffix.xml")); InputStream is = new ByteArrayInputStream(xml.getBytes("UTF-8")); resource.load(is, Collections.EMPTY_MAP); resource.load(Collections.EMPTY_MAP); return ((DocumentRoot) resource.getContents().get(0)).getDefinitions(); } catch (Throwable t) { t.printStackTrace(); return null; } } public String getStencilSetURL() { return "/org.jbpm.designer.jBPMDesigner/stencilsets/bpmn2.0/bpmn2.0.json"; } public String getStencilSetNamespaceURL() { return "http://b3mn.org/stencilset/bpmn2.0#"; } public String getStencilSetExtensionURL() { return "http://oryx-editor.org/stencilsets/extensions/bpmncosts-2.0#"; } }
apache-2.0
seryl/go-docker-registry
src/registry/config/config.go
474
package config import ( "encoding/json" "registry/api" "registry/storage" "os" ) type Config struct { API *api.Config `json:"api"` Storage *storage.Config `json:"storage"` } func New(filename string) (*Config, error) { // read in config var cfg Config if cfgFile, err := os.Open(filename); err != nil { return nil, err } else { dec := json.NewDecoder(cfgFile) if err := dec.Decode(&cfg); err != nil { return nil, err } } return &cfg, nil }
apache-2.0
Rikkola/guvnor
droolsjbpm-ide-common/src/main/java/org/drools/ide/common/client/factconstraints/dataprovider/FieldDataProvider.java
1045
/* * Copyright 2010 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.ide.common.client.factconstraints.dataprovider; import java.util.Map; public interface FieldDataProvider { public void setFactTYpe(String factType); public void setFieldName(String fieldName); public String[] getArgumentKeys(); public Object getArgumentValue(String key); public void setArgumentValue(String key, Object value); public Map<Object,String> getData(); public Object getDefault(); }
apache-2.0
mtaal/olingo-odata4-jpa
lib/client-core/src/main/java/org/apache/olingo/client/core/http/HttpMerge.java
1741
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.olingo.client.core.http; import java.net.URI; import org.apache.http.annotation.NotThreadSafe; import org.apache.http.client.methods.HttpEntityEnclosingRequestBase; /** * Class identifying MERGE HTTP method. */ @NotThreadSafe public class HttpMerge extends HttpEntityEnclosingRequestBase { public final static String METHOD_NAME = "MERGE"; /** * Constructor. */ public HttpMerge() { super(); } /** * Constructor. * * @param uri request URI. */ public HttpMerge(final URI uri) { super(); setURI(uri); } /** * Constructor. * * @param uri request URI. * @throws IllegalArgumentException if the uri is invalid. */ public HttpMerge(final String uri) { super(); setURI(URI.create(uri)); } /** * Gets HTTP method name. * * @return HTTP method name. */ @Override public String getMethod() { return METHOD_NAME; } }
apache-2.0
rectang/lucy-clownfish
runtime/python/src/clownfish/__init__.py
846
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import clownfish._clownfish from clownfish._clownfish import *
apache-2.0
mitsuhiko/rust
src/test/run-pass/xc_conditions_client.rs
1358
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // xfail-fast // aux-build:xc_conditions.rs extern mod xc_conditions; use xc_conditions::oops; use xc_conditions::trouble; // Tests of cross-crate conditions; the condition is // defined in lib, and we test various combinations // of `trap` and `raise` in the client or the lib where // the condition was defined. Also in test #4 we use // more complex features (generics, traits) in // combination with the condition. // // trap raise // ------------ // xc_conditions : client lib // xc_conditions_2: client client // xc_conditions_3: lib client // xc_conditions_4: client client (with traits) // // the trap=lib, raise=lib case isn't tested since // there's no cross-crate-ness to test in that case. pub fn main() { oops::cond.trap(|_i| 12345).inside(|| { let x = trouble(); assert_eq!(x,12345); }) }
apache-2.0
Brocade-OpenSource/OpenStack-DNRM-Nova
nova/service.py
15048
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright 2011 Justin Santa Barbara # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Generic Node base class for all workers that run on hosts.""" import os import random import sys from oslo.config import cfg from nova import conductor from nova import context from nova import exception from nova.openstack.common import importutils from nova.openstack.common import log as logging from nova.openstack.common import rpc from nova.openstack.common import service from nova import servicegroup from nova import utils from nova import version from nova import wsgi LOG = logging.getLogger(__name__) service_opts = [ cfg.IntOpt('report_interval', default=10, help='seconds between nodes reporting state to datastore'), cfg.BoolOpt('periodic_enable', default=True, help='enable periodic tasks'), cfg.IntOpt('periodic_fuzzy_delay', default=60, help='range of seconds to randomly delay when starting the' ' periodic task scheduler to reduce stampeding.' ' (Disable by setting to 0)'), cfg.ListOpt('enabled_apis', default=['ec2', 'osapi_compute', 'metadata'], help='a list of APIs to enable by default'), cfg.ListOpt('enabled_ssl_apis', default=[], help='a list of APIs with enabled SSL'), cfg.StrOpt('ec2_listen', default="0.0.0.0", help='IP address for EC2 API to listen'), cfg.IntOpt('ec2_listen_port', default=8773, help='port for ec2 api to listen'), cfg.IntOpt('ec2_workers', default=None, help='Number of workers for EC2 API service'), cfg.StrOpt('osapi_compute_listen', default="0.0.0.0", help='IP address for OpenStack API to listen'), cfg.IntOpt('osapi_compute_listen_port', default=8774, help='list port for osapi compute'), cfg.IntOpt('osapi_compute_workers', default=None, help='Number of workers for OpenStack API service'), cfg.StrOpt('metadata_manager', default='nova.api.manager.MetadataManager', help='OpenStack metadata service manager'), cfg.StrOpt('metadata_listen', default="0.0.0.0", help='IP address for metadata api to listen'), cfg.IntOpt('metadata_listen_port', default=8775, help='port for metadata api to listen'), cfg.IntOpt('metadata_workers', default=None, help='Number of workers for metadata service'), cfg.StrOpt('compute_manager', default='nova.compute.manager.ComputeManager', help='full class name for the Manager for compute'), cfg.StrOpt('console_manager', default='nova.console.manager.ConsoleProxyManager', help='full class name for the Manager for console proxy'), cfg.StrOpt('cert_manager', default='nova.cert.manager.CertManager', help='full class name for the Manager for cert'), cfg.StrOpt('network_manager', default='nova.network.manager.VlanManager', help='full class name for the Manager for network'), cfg.StrOpt('scheduler_manager', default='nova.scheduler.manager.SchedulerManager', help='full class name for the Manager for scheduler'), cfg.IntOpt('service_down_time', default=60, help='maximum time since last check-in for up service'), ] CONF = cfg.CONF CONF.register_opts(service_opts) CONF.import_opt('host', 'nova.netconf') class Service(service.Service): """Service object for binaries running on hosts. A service takes a manager and enables rpc by listening to queues based on topic. It also periodically runs tasks on the manager and reports it state to the database services table. """ def __init__(self, host, binary, topic, manager, report_interval=None, periodic_enable=None, periodic_fuzzy_delay=None, periodic_interval_max=None, db_allowed=True, *args, **kwargs): super(Service, self).__init__() self.host = host self.binary = binary self.topic = topic self.manager_class_name = manager # NOTE(russellb) We want to make sure to create the servicegroup API # instance early, before creating other things such as the manager, # that will also create a servicegroup API instance. Internally, the # servicegroup only allocates a single instance of the driver API and # we want to make sure that our value of db_allowed is there when it # gets created. For that to happen, this has to be the first instance # of the servicegroup API. self.servicegroup_api = servicegroup.API(db_allowed=db_allowed) manager_class = importutils.import_class(self.manager_class_name) self.manager = manager_class(host=self.host, *args, **kwargs) self.report_interval = report_interval self.periodic_enable = periodic_enable self.periodic_fuzzy_delay = periodic_fuzzy_delay self.periodic_interval_max = periodic_interval_max self.saved_args, self.saved_kwargs = args, kwargs self.backdoor_port = None self.conductor_api = conductor.API(use_local=db_allowed) self.conductor_api.wait_until_ready(context.get_admin_context()) def start(self): verstr = version.version_string_with_package() LOG.audit(_('Starting %(topic)s node (version %(version)s)'), {'topic': self.topic, 'version': verstr}) self.basic_config_check() self.manager.init_host() self.model_disconnected = False ctxt = context.get_admin_context() try: self.service_ref = self.conductor_api.service_get_by_args(ctxt, self.host, self.binary) self.service_id = self.service_ref['id'] except exception.NotFound: self.service_ref = self._create_service_ref(ctxt) if self.backdoor_port is not None: self.manager.backdoor_port = self.backdoor_port self.conn = rpc.create_connection(new=True) LOG.debug(_("Creating Consumer connection for Service %s") % self.topic) self.manager.pre_start_hook(rpc_connection=self.conn) rpc_dispatcher = self.manager.create_rpc_dispatcher(self.backdoor_port) # Share this same connection for these Consumers self.conn.create_consumer(self.topic, rpc_dispatcher, fanout=False) node_topic = '%s.%s' % (self.topic, self.host) self.conn.create_consumer(node_topic, rpc_dispatcher, fanout=False) self.conn.create_consumer(self.topic, rpc_dispatcher, fanout=True) # Consume from all consumers in a thread self.conn.consume_in_thread() self.manager.post_start_hook() LOG.debug(_("Join ServiceGroup membership for this service %s") % self.topic) # Add service to the ServiceGroup membership group. self.servicegroup_api.join(self.host, self.topic, self) if self.periodic_enable: if self.periodic_fuzzy_delay: initial_delay = random.randint(0, self.periodic_fuzzy_delay) else: initial_delay = None self.tg.add_dynamic_timer(self.periodic_tasks, initial_delay=initial_delay, periodic_interval_max= self.periodic_interval_max) def _create_service_ref(self, context): svc_values = { 'host': self.host, 'binary': self.binary, 'topic': self.topic, 'report_count': 0 } service = self.conductor_api.service_create(context, svc_values) self.service_id = service['id'] return service def __getattr__(self, key): manager = self.__dict__.get('manager', None) return getattr(manager, key) @classmethod def create(cls, host=None, binary=None, topic=None, manager=None, report_interval=None, periodic_enable=None, periodic_fuzzy_delay=None, periodic_interval_max=None, db_allowed=True): """Instantiates class and passes back application object. :param host: defaults to CONF.host :param binary: defaults to basename of executable :param topic: defaults to bin_name - 'nova-' part :param manager: defaults to CONF.<topic>_manager :param report_interval: defaults to CONF.report_interval :param periodic_enable: defaults to CONF.periodic_enable :param periodic_fuzzy_delay: defaults to CONF.periodic_fuzzy_delay :param periodic_interval_max: if set, the max time to wait between runs """ if not host: host = CONF.host if not binary: binary = os.path.basename(sys.argv[0]) if not topic: topic = binary.rpartition('nova-')[2] if not manager: manager_cls = ('%s_manager' % binary.rpartition('nova-')[2]) manager = CONF.get(manager_cls, None) if report_interval is None: report_interval = CONF.report_interval if periodic_enable is None: periodic_enable = CONF.periodic_enable if periodic_fuzzy_delay is None: periodic_fuzzy_delay = CONF.periodic_fuzzy_delay service_obj = cls(host, binary, topic, manager, report_interval=report_interval, periodic_enable=periodic_enable, periodic_fuzzy_delay=periodic_fuzzy_delay, periodic_interval_max=periodic_interval_max, db_allowed=db_allowed) return service_obj def kill(self): """Destroy the service object in the datastore.""" self.stop() try: self.conductor_api.service_destroy(context.get_admin_context(), self.service_id) except exception.NotFound: LOG.warn(_('Service killed that has no database entry')) def stop(self): try: self.conn.close() except Exception: pass super(Service, self).stop() def periodic_tasks(self, raise_on_error=False): """Tasks to be run at a periodic interval.""" ctxt = context.get_admin_context() return self.manager.periodic_tasks(ctxt, raise_on_error=raise_on_error) def basic_config_check(self): """Perform basic config checks before starting processing.""" # Make sure the tempdir exists and is writable try: with utils.tempdir(): pass except Exception as e: LOG.error(_('Temporary directory is invalid: %s'), e) sys.exit(1) class WSGIService(object): """Provides ability to launch API from a 'paste' configuration.""" def __init__(self, name, loader=None, use_ssl=False, max_url_len=None): """Initialize, but do not start the WSGI server. :param name: The name of the WSGI server given to the loader. :param loader: Loads the WSGI application using the given name. :returns: None """ self.name = name self.manager = self._get_manager() self.loader = loader or wsgi.Loader() self.app = self.loader.load_app(name) self.host = getattr(CONF, '%s_listen' % name, "0.0.0.0") self.port = getattr(CONF, '%s_listen_port' % name, 0) self.workers = getattr(CONF, '%s_workers' % name, None) self.use_ssl = use_ssl self.server = wsgi.Server(name, self.app, host=self.host, port=self.port, use_ssl=self.use_ssl, max_url_len=max_url_len) # Pull back actual port used self.port = self.server.port self.backdoor_port = None def _get_manager(self): """Initialize a Manager object appropriate for this service. Use the service name to look up a Manager subclass from the configuration and initialize an instance. If no class name is configured, just return None. :returns: a Manager instance, or None. """ fl = '%s_manager' % self.name if fl not in CONF: return None manager_class_name = CONF.get(fl, None) if not manager_class_name: return None manager_class = importutils.import_class(manager_class_name) return manager_class() def start(self): """Start serving this service using loaded configuration. Also, retrieve updated port number in case '0' was passed in, which indicates a random port should be used. :returns: None """ if self.manager: self.manager.init_host() self.manager.pre_start_hook() if self.backdoor_port is not None: self.manager.backdoor_port = self.backdoor_port self.server.start() if self.manager: self.manager.post_start_hook() def stop(self): """Stop serving this API. :returns: None """ self.server.stop() def wait(self): """Wait for the service to stop serving this API. :returns: None """ self.server.wait() def process_launcher(): return service.ProcessLauncher() # NOTE(vish): the global launcher is to maintain the existing # functionality of calling service.serve + # service.wait _launcher = None def serve(server, workers=None): global _launcher if _launcher: raise RuntimeError(_('serve() can only be called once')) _launcher = service.launch(server, workers=workers) def wait(): _launcher.wait()
apache-2.0
apache/jackrabbit-ocm
src/test/java/org/apache/jackrabbit/ocm/manager/collectionconverter/DigesterHashMapTest.java
5619
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.ocm.manager.collectionconverter; import java.util.Map; import junit.framework.Test; import junit.framework.TestSuite; import org.apache.jackrabbit.ocm.DigesterRepositoryTestBase; import org.apache.jackrabbit.ocm.manager.ObjectContentManager; import org.apache.jackrabbit.ocm.testmodel.collection.Element; import org.apache.jackrabbit.ocm.testmodel.collection.HashMapElement; import org.apache.jackrabbit.ocm.testmodel.collection.Main; /** * Test NTCollectionConverterImpl * * @author <a href="mailto:[email protected]">Christophe Lombart</a> */ public class DigesterHashMapTest extends DigesterRepositoryTestBase { public static Test suite() { // All methods starting with "test" will be executed in the test suite. return new TestSuite(DigesterHashMapTest.class); } public void testHashMap() { try { ObjectContentManager ocm = getObjectContentManager(); // -------------------------------------------------------------------------------- // Create and store an object graph in the repository with null hashmap // -------------------------------------------------------------------------------- Main main = new Main(); main.setPath("/test"); main.setText("Main text"); ocm.insert(main); ocm.save(); // -------------------------------------------------------------------------------- // Get the object // -------------------------------------------------------------------------------- main = (Main) ocm.getObject( "/test"); assertTrue("Incorrect text", main.getText().equals("Main text")); assertNull("HashMap is not null", main.getHashMapElement()); // -------------------------------------------------------------------------------- // Update an object graph in the repository // -------------------------------------------------------------------------------- main = new Main(); main.setPath("/test"); main.setText("Main text"); HashMapElement hashMapElement = new HashMapElement(); Element e1 = new Element(); e1.setId("e1"); e1.setText("Element 1"); hashMapElement.addObject(e1); Element e2 = new Element(); e2.setId("e2"); e2.setText("Element 2"); hashMapElement.addObject(e2); main.setHashMapElement(hashMapElement); ocm.update(main); ocm.save(); // -------------------------------------------------------------------------------- // Get the object // -------------------------------------------------------------------------------- main = (Main) ocm.getObject( "/test"); assertNotNull("main.getHashMap() is null", main.getHashMapElement()); assertTrue("Incorrect text", main.getText().equals("Main text")); Map map = (Map) main.getHashMapElement().getObjects(); assertTrue("Incorrect para element", ((Element) map.get("e1")).getText().equals("Element 1")); // -------------------------------------------------------------------------------- // Update the object // -------------------------------------------------------------------------------- hashMapElement = new HashMapElement(); e1 = new Element(); e1.setId("e1"); e1.setText("Element 1"); hashMapElement.addObject(e1); e2 = new Element(); e2.setId("e3"); e2.setText("Element 3"); hashMapElement.addObject(e2); Element e3 = new Element(); e3.setId("e4"); e3.setText("Element 4"); hashMapElement.addObject(e3); main.setHashMapElement(hashMapElement); ocm.update(main); ocm.save(); // -------------------------------------------------------------------------------- // Get the object // -------------------------------------------------------------------------------- assertNotNull("main.getElements() is null", main.getHashMapElement()); assertTrue("Incorrect text", main.getText().equals("Main text")); map = (Map) main.getHashMapElement().getObjects(); assertTrue("Incorrect para element", ((Element) map.get("e4")).getText().equals("Element 4")); } catch (Exception e) { e.printStackTrace(); fail("Exception occurs during the unit test : " + e); } } }
apache-2.0
robin13/elasticsearch
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/DataTier.java
6329
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.core; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.shard.IndexSettingProvider; import org.elasticsearch.xpack.cluster.routing.allocation.DataTierAllocationDecider; import java.util.Arrays; import java.util.HashSet; import java.util.Set; /** * The {@code DataTier} class encapsulates the formalization of the "content", * "hot", "warm", and "cold" tiers as node roles. In contains the * roles themselves as well as helpers for validation and determining if a node * has a tier configured. * * Related: * {@link org.elasticsearch.xpack.cluster.routing.allocation.DataTierAllocationDecider} */ public class DataTier { public static final String DATA_CONTENT = "data_content"; public static final String DATA_HOT = "data_hot"; public static final String DATA_WARM = "data_warm"; public static final String DATA_COLD = "data_cold"; public static final String DATA_FROZEN = "data_frozen"; public static final Set<String> ALL_DATA_TIERS = new HashSet<>(Arrays.asList(DATA_CONTENT, DATA_HOT, DATA_WARM, DATA_COLD, DATA_FROZEN)); /** * Returns true if the given tier name is a valid tier */ public static boolean validTierName(String tierName) { return DATA_CONTENT.equals(tierName) || DATA_HOT.equals(tierName) || DATA_WARM.equals(tierName) || DATA_COLD.equals(tierName) || DATA_FROZEN.equals(tierName); } /** * Returns true iff the given settings have a data tier setting configured */ public static boolean isExplicitDataTier(Settings settings) { /* * This method can be called before the o.e.n.NodeRoleSettings.NODE_ROLES_SETTING is * initialized. We do not want to trigger initialization prematurely because that will bake * the default roles before plugins have had a chance to register them. Therefore, * to avoid initializing this setting prematurely, we avoid using the actual node roles * setting instance here in favor of the string. */ if (settings.hasValue("node.roles")) { return settings.getAsList("node.roles").stream().anyMatch(DataTier::validTierName); } return false; } public static boolean isContentNode(DiscoveryNode discoveryNode) { return discoveryNode.getRoles().contains(DiscoveryNodeRole.DATA_CONTENT_NODE_ROLE) || discoveryNode.getRoles().contains(DiscoveryNodeRole.DATA_ROLE); } public static boolean isHotNode(DiscoveryNode discoveryNode) { return discoveryNode.getRoles().contains(DiscoveryNodeRole.DATA_HOT_NODE_ROLE) || discoveryNode.getRoles().contains(DiscoveryNodeRole.DATA_ROLE); } public static boolean isWarmNode(DiscoveryNode discoveryNode) { return discoveryNode.getRoles().contains(DiscoveryNodeRole.DATA_WARM_NODE_ROLE) || discoveryNode.getRoles().contains(DiscoveryNodeRole.DATA_ROLE); } public static boolean isColdNode(DiscoveryNode discoveryNode) { return discoveryNode.getRoles().contains(DiscoveryNodeRole.DATA_COLD_NODE_ROLE) || discoveryNode.getRoles().contains(DiscoveryNodeRole.DATA_ROLE); } public static boolean isFrozenNode(DiscoveryNode discoveryNode) { return isFrozenNode(discoveryNode.getRoles()); } public static boolean isFrozenNode(final Set<DiscoveryNodeRole> roles) { return roles.contains(DiscoveryNodeRole.DATA_FROZEN_NODE_ROLE) || roles.contains(DiscoveryNodeRole.DATA_ROLE); } /** * This setting provider injects the setting allocating all newly created indices with * {@code index.routing.allocation.include._tier: "data_hot"} unless the user overrides the * setting while the index is being created (in a create index request for instance) */ public static class DefaultHotAllocationSettingProvider implements IndexSettingProvider { private static final Logger logger = LogManager.getLogger(DefaultHotAllocationSettingProvider.class); @Override public Settings getAdditionalIndexSettings(String indexName, boolean isDataStreamIndex, Settings indexSettings) { Set<String> settings = indexSettings.keySet(); if (settings.contains(DataTierAllocationDecider.INDEX_ROUTING_PREFER)) { // It's okay to put it, it will be removed or overridden by the template/request settings return Settings.builder().put(DataTierAllocationDecider.INDEX_ROUTING_PREFER, DATA_HOT).build(); } else if (settings.stream().anyMatch(s -> s.startsWith(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX + ".")) || settings.stream().anyMatch(s -> s.startsWith(IndexMetadata.INDEX_ROUTING_EXCLUDE_GROUP_PREFIX + ".")) || settings.stream().anyMatch(s -> s.startsWith(IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_PREFIX + "."))) { // A different index level require, include, or exclude has been specified, so don't put the setting logger.debug("index [{}] specifies custom index level routing filtering, skipping tier allocation", indexName); return Settings.EMPTY; } else { // Otherwise, put the setting in place by default, the "hot" // tier if the index is part of a data stream, the "content" // tier if it is not. if (isDataStreamIndex) { return Settings.builder().put(DataTierAllocationDecider.INDEX_ROUTING_PREFER, DATA_HOT).build(); } else { return Settings.builder().put(DataTierAllocationDecider.INDEX_ROUTING_PREFER, DATA_CONTENT).build(); } } } } }
apache-2.0
apache/cloudstack-www
source/api/apidocs-4.14/apis/updateVmwareDc.html
7226
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 1.0 Transitional//EN"> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <META http-equiv="Content-Type" content="text/html; charset=UTF-8"> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <link rel="stylesheet" href="../includes/main.css" type="text/css"> <link rel="shortcut icon" href="../favicon.ico" type="image/x-icon"> <title>Apache CloudStack | The Power Behind Your Cloud</title> </head> <body> <div id="insidetopbg"> <div id="inside_wrapper"> <div class="uppermenu_panel"> <div class="uppermenu_box"></div> </div> <div id="main_master"> <div id="inside_header"> <div class="header_top"> <a class="cloud_logo" href="http://cloudstack.org"></a> <div class="mainemenu_panel"></div> </div> </div> <div id="main_content"> <div class="inside_apileftpanel"> <div class="inside_contentpanel" style="width:930px;"> <div class="api_titlebox"> <div class="api_titlebox_left"> <span> Apache CloudStack 4.14.1.0-SNAPSHOT Root Admin API Reference </span> <p></p> <h1>updateVmwareDc</h1> <p>Updates a VMware datacenter details for a zone</p> </div> <div class="api_titlebox_right"> <a class="api_backbutton" href="../index.html"></a> </div> </div> <div class="api_tablepanel"> <h2>Request parameters</h2> <table class="apitable"> <tr class="hed"> <td style="width:200px;"><strong>Parameter Name</strong></td><td style="width:500px;">Description</td><td style="width:180px;">Required</td> </tr> <tr> <td style="width:200px;"><strong>zoneid</strong></td><td style="width:500px;"><strong>The zone ID</strong></td><td style="width:180px;"><strong>true</strong></td> </tr> <tr> <td style="width:200px;"><i>isrecursive</i></td><td style="width:500px;"><i>Specify if cluster level username/password/url and host level guid need to be updated as well. By default this is true.</i></td><td style="width:180px;"><i>false</i></td> </tr> <tr> <td style="width:200px;"><i>name</i></td><td style="width:500px;"><i>VMware datacenter name.</i></td><td style="width:180px;"><i>false</i></td> </tr> <tr> <td style="width:200px;"><i>password</i></td><td style="width:500px;"><i>The password for specified username.</i></td><td style="width:180px;"><i>false</i></td> </tr> <tr> <td style="width:200px;"><i>username</i></td><td style="width:500px;"><i>The username required to connect to resource.</i></td><td style="width:180px;"><i>false</i></td> </tr> <tr> <td style="width:200px;"><i>vcenter</i></td><td style="width:500px;"><i>The name/IP of vCenter. Make sure it is IP address or full qualified domain name for host running vCenter server.</i></td><td style="width:180px;"><i>false</i></td> </tr> </table> </div> <div class="api_tablepanel"> <h2>Response Tags</h2> <table class="apitable"> <tr class="hed"> <td style="width:200px;"><strong>Response Name</strong></td><td style="width:500px;">Description</td> </tr> <tr> <td style="width:200px;"><strong>id</strong></td><td style="width:500px;">The VMware Datacenter ID</td> </tr> <tr> <td style="width:200px;"><strong>name</strong></td><td style="width:500px;">The VMware Datacenter name</td> </tr> <tr> <td style="width:200px;"><strong>vcenter</strong></td><td style="width:500px;">The VMware vCenter name/ip</td> </tr> <tr> <td style="width:200px;"><strong>zoneid</strong></td><td style="width:500px;">the Zone ID associated with this VMware Datacenter</td> </tr> </table> </div> </div> </div> </div> </div> <div id="footer"> <div id="comments_thread"> <script type="text/javascript" src="https://comments.apache.org/show_comments.lua?site=test" async="true"></script> <noscript> <iframe width="930" height="500" src="https://comments.apache.org/iframe.lua?site=test&amp;page=4.2.0/rootadmin"></iframe> </noscript> </div> <div id="footer_mainmaster"> <p> Copyright &copy; 2015 The Apache Software Foundation, Licensed under the <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache License, Version 2.0.</a> <br> Apache, CloudStack, Apache CloudStack, the Apache CloudStack logo, the CloudMonkey logo and the Apache feather logo are trademarks of The Apache Software Foundation. </p> </div> </div> </div> </div> </body> </html>
apache-2.0
LosFuzzys/CTFd
CTFd/users.py
2090
from flask import Blueprint, render_template, request, url_for from CTFd.models import Users from CTFd.utils import config from CTFd.utils.decorators import authed_only from CTFd.utils.decorators.visibility import ( check_account_visibility, check_score_visibility, ) from CTFd.utils.helpers import get_errors, get_infos from CTFd.utils.user import get_current_user users = Blueprint("users", __name__) @users.route("/users") @check_account_visibility def listing(): q = request.args.get("q") field = request.args.get("field", "name") if field not in ("name", "affiliation", "website"): field = "name" filters = [] if q: filters.append(getattr(Users, field).like("%{}%".format(q))) users = ( Users.query.filter_by(banned=False, hidden=False) .filter(*filters) .order_by(Users.id.asc()) .paginate(per_page=50) ) args = dict(request.args) args.pop("page", 1) return render_template( "users/users.html", users=users, prev_page=url_for(request.endpoint, page=users.prev_num, **args), next_page=url_for(request.endpoint, page=users.next_num, **args), q=q, field=field, ) @users.route("/profile") @users.route("/user") @authed_only def private(): infos = get_infos() errors = get_errors() user = get_current_user() if config.is_scoreboard_frozen(): infos.append("Scoreboard has been frozen") return render_template( "users/private.html", user=user, account=user.account, infos=infos, errors=errors, ) @users.route("/users/<int:user_id>") @check_account_visibility @check_score_visibility def public(user_id): infos = get_infos() errors = get_errors() user = Users.query.filter_by(id=user_id, banned=False, hidden=False).first_or_404() if config.is_scoreboard_frozen(): infos.append("Scoreboard has been frozen") return render_template( "users/public.html", user=user, account=user.account, infos=infos, errors=errors )
apache-2.0
DonLuc/Atajo-TD
provider/node_modules/npm/html/doc/cli/npm-uninstall.html
4572
<!doctype html> <html> <title>npm-uninstall</title> <meta http-equiv="content-type" value="text/html;utf-8"> <link rel="stylesheet" type="text/css" href="../../static/style.css"> <link rel="canonical" href="https://www.npmjs.org/doc/cli/npm-uninstall.html"> <script async=true src="../../static/toc.js"></script> <body> <div id="wrapper"> <h1><a href="../cli/npm-rm.html">npm-rm</a></h1> <p>Remove a package</p> <h2 id="synopsis">SYNOPSIS</h2> <pre><code>npm uninstall [@&lt;scope&gt;/]&lt;package&gt; [--save|--save-dev|--save-optional] npm rm (with any of the previous argument usage) </code></pre><h2 id="description">DESCRIPTION</h2> <p>This uninstalls a package, completely removing everything npm installed on its behalf.</p> <p>Example:</p> <pre><code>npm uninstall sax </code></pre><p>In global mode (ie, with <code>-g</code> or <code>--global</code> appended to the command), it uninstalls the current package context as a global package.</p> <p><code>npm uninstall</code> takes 3 exclusive, optional flags which save or update the package version in your main package.json:</p> <ul> <li><p><code>--save</code>: Package will be removed from your <code>dependencies</code>.</p> </li> <li><p><code>--save-dev</code>: Package will be removed from your <code>devDependencies</code>.</p> </li> <li><p><code>--save-optional</code>: Package will be removed from your <code>optionalDependencies</code>.</p> </li> </ul> <p>Scope is optional and follows the usual rules for <code><a href="../misc/npm-scope.html"><a href="../misc/npm-scope.html">npm-scope(7)</a></a></code>.</p> <p>Examples:</p> <pre><code>npm uninstall sax --save npm uninstall @myorg/privatepackage --save npm uninstall node-tap --save-dev npm uninstall dtrace-provider --save-optional </code></pre><h2 id="see-also">SEE ALSO</h2> <ul> <li><a href="../cli/npm-prune.html"><a href="../cli/npm-prune.html">npm-prune(1)</a></a></li> <li><a href="../cli/npm-install.html"><a href="../cli/npm-install.html">npm-install(1)</a></a></li> <li><a href="../files/npm-folders.html"><a href="../files/npm-folders.html">npm-folders(5)</a></a></li> <li><a href="../cli/npm-config.html"><a href="../cli/npm-config.html">npm-config(1)</a></a></li> <li><a href="../misc/npm-config.html"><a href="../misc/npm-config.html">npm-config(7)</a></a></li> <li><a href="../files/npmrc.html"><a href="../files/npmrc.html">npmrc(5)</a></a></li> </ul> </div> <table border=0 cellspacing=0 cellpadding=0 id=npmlogo> <tr><td style="width:180px;height:10px;background:rgb(237,127,127)" colspan=18>&nbsp;</td></tr> <tr><td rowspan=4 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td><td style="width:40px;height:10px;background:#fff" colspan=4>&nbsp;</td><td style="width:10px;height:10px;background:rgb(237,127,127)" rowspan=4>&nbsp;</td><td style="width:40px;height:10px;background:#fff" colspan=4>&nbsp;</td><td rowspan=4 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td><td colspan=6 style="width:60px;height:10px;background:#fff">&nbsp;</td><td style="width:10px;height:10px;background:rgb(237,127,127)" rowspan=4>&nbsp;</td></tr> <tr><td colspan=2 style="width:20px;height:30px;background:#fff" rowspan=3>&nbsp;</td><td style="width:10px;height:10px;background:rgb(237,127,127)" rowspan=3>&nbsp;</td><td style="width:10px;height:10px;background:#fff" rowspan=3>&nbsp;</td><td style="width:20px;height:10px;background:#fff" rowspan=4 colspan=2>&nbsp;</td><td style="width:10px;height:20px;background:rgb(237,127,127)" rowspan=2>&nbsp;</td><td style="width:10px;height:10px;background:#fff" rowspan=3>&nbsp;</td><td style="width:20px;height:10px;background:#fff" rowspan=3 colspan=2>&nbsp;</td><td style="width:10px;height:10px;background:rgb(237,127,127)" rowspan=3>&nbsp;</td><td style="width:10px;height:10px;background:#fff" rowspan=3>&nbsp;</td><td style="width:10px;height:10px;background:rgb(237,127,127)" rowspan=3>&nbsp;</td></tr> <tr><td style="width:10px;height:10px;background:#fff" rowspan=2>&nbsp;</td></tr> <tr><td style="width:10px;height:10px;background:#fff">&nbsp;</td></tr> <tr><td style="width:60px;height:10px;background:rgb(237,127,127)" colspan=6>&nbsp;</td><td colspan=10 style="width:10px;height:10px;background:rgb(237,127,127)">&nbsp;</td></tr> <tr><td colspan=5 style="width:50px;height:10px;background:#fff">&nbsp;</td><td style="width:40px;height:10px;background:rgb(237,127,127)" colspan=4>&nbsp;</td><td style="width:90px;height:10px;background:#fff" colspan=9>&nbsp;</td></tr> </table> <p id="footer">npm-uninstall &mdash; [email protected]</p>
apache-2.0
mrinsss/Full-Repo
tripezi/system/application/libraries/multilanguage/TMXParser.php
4332
<?php /* private variable and functions are prefixed with underscore('_') */ require_once('TranslationContainer.php'); class TMXParser { private $_parser = null; private $_xmlFile = null; private $_currentTuid = null; private $_boolSeg = false; private $_currentLanguage = null; private $_currentPage = null; private $_boolPage = false; private $_currentData = ''; private $_tc = null; private $_masterLanguage = 'en'; public function __construct($xmlFile='', $masterLanguage = '') { if($masterLanguage != '') { $this->_masterLanguage = $masterLanguage; } $this->_xmlFile = $xmlFile; } public function doParse() { $this->_tc = new TranslationContainer($this->_masterLanguage); $this->_parser = xml_parser_create(); xml_set_element_handler($this->_parser, "_startElement", "_endElement"); xml_set_object($this->_parser, $this); xml_parser_set_option($this->_parser, XML_OPTION_CASE_FOLDING, 0); xml_parser_set_option($this->_parser, XML_OPTION_TARGET_ENCODING, 'utf-8'); xml_set_character_data_handler($this->_parser, "_contentElement"); if ($this->_xmlFile === null) { throw new Exception('Translation source xml is set. Use setXML() or constructor to set source xml.'); } if (!is_readable($this->_xmlFile)) { throw new Exception('Translation source xml is not readable.'); } if (!xml_parse($this->_parser, file_get_contents($this->_xmlFile))) { $ex = sprintf('XML error: %s at line %d', xml_error_string(xml_get_error_code($this->_parser)), xml_get_current_line_number($this->_parser)); xml_parser_free($this->_parser); throw new Exception($ex); } return $this->_tc; } private function _startElement($parser, $name, $atrrs) { //echo '<br>name='.$name.'<br>'; //print_r($atrrs); if ($this->_boolSeg != false) { //echo '<p>##############</p>'; $this->_currentData .= "<".$name; foreach($atrrs as $key => $value) { $this->_currentData .= " $key=\"$value\""; } $this->_content .= ">"; } else { switch (strtolower($name)) { case 'tu': if(isset($atrrs['tuid']) === true){ $this->_currentTuid = $atrrs['tuid']; } break; case 'tuv': if(isset($atrrs['xml:lang']) === true){ $this->_currentLanguage = $atrrs['xml:lang']; } break; case 'prop': if( isset($atrrs['type']) === true && $atrrs['type'] == 'page' ){ $this->_boolPage = true; $this->_currentPage = ''; } break; case 'seg': $this->_boolSeg = true; $this->_currentData = ''; break; } } } private function _endElement($parser, $name) { //echo '<br>endname='.$name.'<br>'; if (($this->_boolSeg != false) and ($name !== 'seg')) { //echo '<p>##############</p>'; $this->_currentData .= "</".$name.">"; } else { switch (strtolower($name)) { case 'tu': $this->_currentTuid = null; break; case 'tuv': $this->_currentLanguage = null; break; case 'prop': if( $this->_boolPage ){ $this->_boolPage = false; //$this->_currentPage = ''; } break; case 'seg': $this->_boolSeg = false; if( ($this->_tc !== null) || !$this->_tc->hasId($this->_currentTuid) ) { //$this->_currentData = html_entity_decode($this->_currentData, ENT_QUOTES, 'utf-8'); $this->_currentData = base64_decode($this->_currentData); $this->_tc->addWordTuid($this->_currentTuid, $this->_currentData, $this->_currentLanguage, $this->_currentPage); //echo ' <br> '.$this->_currentData; } break; } } } private function _contentElement($parser, $data) { //echo ' <br>content='.$this->_currentData; if($this->_boolSeg && $this->_currentTuid !== null && $this->_currentLanguage !== null ) { $this->_currentData .= $data; } if( $this->_boolPage ) { $this->_currentPage = $data; } } public function getXML() { return $this->_xmlFile; } public function setXML($xmlFile) { $this->_xmlFile = $xmlFile; } public function getMasterLanguage() { return $this->_xmlFile; } public function setMasterLanguage($lang) { $this->_masterLanguage = $lang; } }
apache-2.0
davidkarlsen/camel
components/camel-undertow/src/test/java/org/apache/camel/component/undertow/rest/RestUndertowHttpGetCorsTest.java
5600
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.undertow.rest; import org.apache.camel.Exchange; import org.apache.camel.Processor; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.undertow.BaseUndertowTest; import org.apache.camel.spi.RestConfiguration; import org.junit.Test; public class RestUndertowHttpGetCorsTest extends BaseUndertowTest { @Test public void testCorsGet() throws Exception { // send OPTIONS first which should not be routed getMockEndpoint("mock:inputGet").expectedMessageCount(0); Exchange out = template.request("http://localhost:" + getPort() + "/users/123/basic", new Processor() { @Override public void process(Exchange exchange) throws Exception { exchange.getIn().setHeader(Exchange.HTTP_METHOD, "OPTIONS"); } }); assertEquals(RestConfiguration.CORS_ACCESS_CONTROL_ALLOW_ORIGIN, out.getOut().getHeader("Access-Control-Allow-Origin")); assertEquals(RestConfiguration.CORS_ACCESS_CONTROL_ALLOW_METHODS, out.getOut().getHeader("Access-Control-Allow-Methods")); assertEquals(RestConfiguration.CORS_ACCESS_CONTROL_ALLOW_HEADERS, out.getOut().getHeader("Access-Control-Allow-Headers")); assertEquals(RestConfiguration.CORS_ACCESS_CONTROL_MAX_AGE, out.getOut().getHeader("Access-Control-Max-Age")); assertMockEndpointsSatisfied(); resetMocks(); getMockEndpoint("mock:inputGet").expectedMessageCount(1); // send GET request which should be routed String out2 = fluentTemplate.to("http://localhost:" + getPort() + "/users/123/basic") .withHeader(Exchange.HTTP_METHOD, "GET") .request(String.class); assertEquals("123;Donald Duck", out2); assertMockEndpointsSatisfied(); } @Test public void testCorsPut() throws Exception { // send OPTIONS first which should not be routed getMockEndpoint("mock:inputPut").expectedMessageCount(0); Exchange out = template.request("http://localhost:" + getPort() + "/users/123/basic", new Processor() { @Override public void process(Exchange exchange) throws Exception { exchange.getIn().setHeader(Exchange.HTTP_METHOD, "OPTIONS"); } }); assertEquals(RestConfiguration.CORS_ACCESS_CONTROL_ALLOW_ORIGIN, out.getOut().getHeader("Access-Control-Allow-Origin")); assertEquals(RestConfiguration.CORS_ACCESS_CONTROL_ALLOW_METHODS, out.getOut().getHeader("Access-Control-Allow-Methods")); assertEquals(RestConfiguration.CORS_ACCESS_CONTROL_ALLOW_HEADERS, out.getOut().getHeader("Access-Control-Allow-Headers")); assertEquals(RestConfiguration.CORS_ACCESS_CONTROL_MAX_AGE, out.getOut().getHeader("Access-Control-Max-Age")); assertMockEndpointsSatisfied(); resetMocks(); getMockEndpoint("mock:inputPut").expectedMessageCount(1); // send PUT request which should be routed String out2 = fluentTemplate.to("http://localhost:" + getPort() + "/users/123/basic") .withHeader(Exchange.HTTP_METHOD, "PUT") .request(String.class); assertEquals("123;Donald Duck", out2); assertMockEndpointsSatisfied(); } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { // configure to use undertow on localhost with the given port restConfiguration().component("undertow").host("localhost").port(getPort()).enableCORS(true); // use the rest DSL to define the rest services rest("/users/") .get("{id}/basic") .route() .to("mock:inputGet") .process(new Processor() { public void process(Exchange exchange) throws Exception { String id = exchange.getIn().getHeader("id", String.class); exchange.getOut().setBody(id + ";Donald Duck"); } }).endRest() .put("{id}/basic") .route() .to("mock:inputPut") .process(new Processor() { public void process(Exchange exchange) throws Exception { String id = exchange.getIn().getHeader("id", String.class); exchange.getOut().setBody(id + ";Donald Duck"); } }); } }; } }
apache-2.0
Fabryprog/camel
docs/components/modules/ROOT/examples/core/camel-core/src/test/java/org/apache/camel/processor/interceptor/AdviceWithMockMultipleEndpointsWithSkipTest.java
3016
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.processor.interceptor; import org.apache.camel.ContextTestSupport; import org.apache.camel.builder.AdviceWithRouteBuilder; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.seda.SedaEndpoint; import org.apache.camel.reifier.RouteReifier; import org.junit.Test; public class AdviceWithMockMultipleEndpointsWithSkipTest extends ContextTestSupport { // START SNIPPET: e1 // tag::e1[] @Test public void testAdvisedMockEndpointsWithSkip() throws Exception { // advice the first route using the inlined AdviceWith route builder // which has extended capabilities than the regular route builder RouteReifier.adviceWith(context.getRouteDefinitions().get(0), context, new AdviceWithRouteBuilder() { @Override public void configure() throws Exception { // mock sending to direct:foo and direct:bar and skip send to it mockEndpointsAndSkip("direct:foo", "direct:bar"); } }); getMockEndpoint("mock:result").expectedBodiesReceived("Hello World"); getMockEndpoint("mock:direct:foo").expectedMessageCount(1); getMockEndpoint("mock:direct:bar").expectedMessageCount(1); template.sendBody("direct:start", "Hello World"); assertMockEndpointsSatisfied(); // the message was not send to the direct:foo route and thus not sent to the seda endpoint SedaEndpoint seda = context.getEndpoint("seda:foo", SedaEndpoint.class); assertEquals(0, seda.getCurrentQueueSize()); } // end::e1[] // END SNIPPET: e1 // START SNIPPET: route // tag::route[] @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { from("direct:start").to("direct:foo").to("direct:bar").to("mock:result"); from("direct:foo").transform(constant("Bye World")).to("seda:foo"); from("direct:bar").transform(constant("Hi World")).to("seda:foo"); } }; } // end::route[] // END SNIPPET: route }
apache-2.0
synaptek/TypeScript
tests/baselines/reference/importCallExpressionDeclarationEmit2.js
339
//// [tests/cases/conformance/dynamicImport/importCallExpressionDeclarationEmit2.ts] //// //// [0.ts] export function foo() { return "foo"; } //// [1.ts] var p1 = import("./0"); //// [0.js] export function foo() { return "foo"; } //// [1.js] var p1 = import("./0"); //// [0.d.ts] export declare function foo(): string;
apache-2.0
holsky/elixir-school
fr/lessons/basics/sigils.md
6080
--- layout: page title: Sigils category: basics order: 10 lang: fr --- Utilisation et création de sigils. {% include toc.html %} ## <a name="vue-d-ensemble-des-sigils"></a>Vue d'ensemble des sigils Elixir fournit une syntaxe alternative pour représenter et travailler avec des valeurs littérales. Un sigil commence avec un tilde `~` suivi par un caractère. Elixir fournit de base un certain nombre de sigils, et il est aussi possible d'en créer soi-même quand il est nécessaire d'étendre le langage. La liste des sigils disponibles inclut: - `~C` Génère une liste de caractères **sans** échappement ni interpolation - `~c` Génère une liste de caractères **avec** échappement et interpolation - `~R` Génère une expression régulière **sans** échappement ni interpolation - `~r` Génère une expression régulière **avec** échappement et interpolation - `~S` Génère une chaîne de caractères **sans** échappement ni interpolation - `~s` Génère une chaîne de caractères **avec** échappement et interpolation - `~W` Génère une liste de mots **sans** échappement ni interpolation - `~w` Génère une liste de mots **avec** échappement et interpolation La liste des délimiteurs inclut: - `<...>` Une paire de chevrons - `{...}` Une paire d'accolades - `[...]` Une paire de crochets - `(...)` Une paire de parenthèses - `|...|` Une paire de barres verticales - `/.../` Une paire de barres obliques - `"..."` Une paire de guillemets doubles - `'...'` Une paire de guillemets simples ### <a name="listes-de-caracteres"></a>Listes de caractères Les sigils `~c` and `~C` génèrent des listes de caractères. Par exemple: ```elixir iex> ~c/2 + 7 = #{2 + 7}/ '2 + 7 = 9' iex> ~C/2 + 7 = #{2 + 7}/ '2 + 7 = #{2 + 7}' ``` On peut voir que le `~c` en minuscule interpole le calcul, au contraire du `~C` majuscule. Nous verrons que ce comportement majuscule / minuscule est commun pour tous les sigils pré-définis. ### <a name="expressions-regulieres"></a>Expressions régulières Les sigils `~r` et `~R` sont utilisés pour représenter des Expressions Régulières. On les crée soit à la volée, soit pour une utilisation avec les fonctions du module `Regex`. Par exemple: ```elixir iex> re = ~r/elixir/ ~/elixir iex> "Elixir" =~ re false iex> "elixir" =~ re true ``` On peut voir dans le premier test d'égalité, que `Elixir` n'a pas de correspondance avec l'expression régulière. C'est parce que le mot est capitalisé. Mais comme Elixir utilise les Expressions Régulières Compatibles de Perl (ou PCRE - "Perl Compatible Regular Expressions" en anglais), on peut ajouter `i` à la fin de l'expression régulière de notre sigil pour désactiver la sensibilité à la casse: ```elixir iex> re = ~r/elixir/i ~/elixir iex> "Elixir" =~ re true iex> "elixir" =~ re true ``` De plus, Elixir fournit l'API [Regex](http://elixir-lang.org/docs/stable/elixir/Regex.html) construite sur la bibliothèque d'expressions régulières d'Erlang. Implémentons `Regex.split/2` en utilisant un sigil regex: ```elixir iex> string = "100_000_000" "100_000_000" iex> Regex.split(~r/_/, string) ["100", "000", "000"] ``` Comme on peut voir, la chaîne de caractères `"100_000_000"` est séparé à chaque tiret bas, grâce au sigil `~r/_/`. La fonction `Regex.split` retourne une liste. ### <a name="chaines-de-caracteres"></a>Chaînes de caractères Les sigils `~s` et `~S` sont utilisés pour générer des chaînes de caractères. Par exemple: ```elixir iex> ~s/the cat in the hat on the mat/ "the cat in the hat on the mat" iex> ~S/the cat in the hat on the mat/ "the cat in the hat on the mat" ``` Mais quelle est la différence ? La différence est similaire à celle du sigil de liste de caractères que nous avons déjà vue. La réponse est l'interpolation et l'utilisation des séquences échappées. Si on prend un autre exemple: ```elixir iex> ~s/bienvenue à elixir #{String.downcase "school"}/ "welcome to elixir school" iex> ~S/bienvenue à elixir #{String.downcase "school"}/ "welcome to elixir \#{String.downcase \"school\"}" ``` ### Listes de mots Le sigil de liste de mots peut parfois être très pratique. Il peut sauver du temps et des frappes au clavier, et sans doute réduire la complexité du code. Prenez l'exemple suivant: ```elixir iex> ~w/i love elixir school/ ["i", "love", "elixir", "school"] iex> ~W/i love elixir school/ ["i", "love", "elixir", "school"] ``` On peut voir que ce qui est tapé entre les délimiteurs est séparé par des espaces et mis dans une liste. Pourtant, il n'y a pas de différence entre ces deux exemples. Encore une fois, la différence vient de l'interpolation et de l'échappement des séquences. Prenez l'exemple suivant: ```elixir iex> ~w/i love #{'e'}lixir school/ ["i", "love", "elixir", "school"] iex> ~W/i love #{'e'}lixir school/ ["i", "love", "\#{'e'}lixir", "school"] ``` ## <a name="creation-de-sigils"></a>Création de sigils Un des objectifs d'Elixir est d'être un langage de programmation extensible. C'est donc sans surprise qu'il est possible de créer facilement nos propres sigils. Dans cet exemple, nous créerons un sigil pour passer une chaîne de caractères en majuscules. Comme il existe déjà une fonction pour cela dans la bibliothèque standard d'Elixir (`String.upcase/1`), nous allons créer notre sigil autour de cette fonction: ```elixir iex> defmodule MySigils do ...> def sigil_u(string, []), do: String.upcase(string) ...> end iex> import MySigils nil iex> ~u/elixir school/ ELIXIR SCHOOL ``` Comme il n'y a jusque là pas de sigil `~u` parmi ceux existants, nous allons le créer. On définit premièrement un module appelé `MySigils`, et dans celui-ci nous avons créé la fonction nommée `sigil_u`. Le `_u` indique que nous souhaitons utiliser `u` comme le caractère après le tilde. La définition de la fonction doit prendre deux arguments, une donnée d'entrée et une liste de caractères pour les options passées (telle que le `i` utilisé avec les expressions régulières).
apache-2.0
caskdata/hadoop_cookbook
recipes/pig.rb
714
# # Cookbook:: hadoop # Recipe:: pig # # Copyright © 2013-2016 Cask Data, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # include_recipe 'hadoop::repo' package hadoop_package('pig') do action :install end
apache-2.0
rcsanchez97/mongo-c-driver
src/libmongoc/src/mongoc/mongoc-stream-private.h
1452
/* * Copyright 2013-2014 MongoDB, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "mongoc-prelude.h" #ifndef MONGOC_STREAM_PRIVATE_H #define MONGOC_STREAM_PRIVATE_H #include "mongoc-iovec.h" #include "mongoc-stream.h" BSON_BEGIN_DECLS #define MONGOC_STREAM_SOCKET 1 #define MONGOC_STREAM_FILE 2 #define MONGOC_STREAM_BUFFERED 3 #define MONGOC_STREAM_GRIDFS 4 #define MONGOC_STREAM_TLS 5 #define MONGOC_STREAM_GRIDFS_UPLOAD 6 #define MONGOC_STREAM_GRIDFS_DOWNLOAD 7 bool mongoc_stream_wait (mongoc_stream_t *stream, int64_t expire_at); bool _mongoc_stream_writev_full (mongoc_stream_t *stream, mongoc_iovec_t *iov, size_t iovcnt, int32_t timeout_msec, bson_error_t *error); mongoc_stream_t * mongoc_stream_get_root_stream (mongoc_stream_t *stream); BSON_END_DECLS #endif /* MONGOC_STREAM_PRIVATE_H */
apache-2.0
Donnerbart/hazelcast
hazelcast/src/test/java/com/hazelcast/cache/merge/CacheMergePolicyProviderTest.java
4605
/* * Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.cache.merge; import com.hazelcast.cache.BuiltInCacheMergePolicies; import com.hazelcast.cache.CacheMergePolicy; import com.hazelcast.cache.impl.CacheService; import com.hazelcast.cache.impl.merge.policy.CacheMergePolicyProvider; import com.hazelcast.config.InvalidConfigurationException; import com.hazelcast.test.HazelcastParallelClassRunner; import com.hazelcast.test.HazelcastTestSupport; import com.hazelcast.test.annotation.ParallelTest; import com.hazelcast.test.annotation.QuickTest; import org.hamcrest.core.IsInstanceOf; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; @RunWith(HazelcastParallelClassRunner.class) @Category({QuickTest.class, ParallelTest.class}) public class CacheMergePolicyProviderTest extends HazelcastTestSupport { private CacheMergePolicyProvider mergePolicyProvider; @Rule public ExpectedException expected = ExpectedException.none(); @Before public void setup() { CacheService service = getNodeEngineImpl(createHazelcastInstance()).getService(CacheService.SERVICE_NAME); mergePolicyProvider = service.getMergePolicyProvider(); } @Test public void getMergePolicy_NotExistingMergePolicy() { expected.expect(InvalidConfigurationException.class); expected.expectCause(IsInstanceOf.any(ClassNotFoundException.class)); mergePolicyProvider.getMergePolicy("No such policy!"); } @Test public void getMergePolicy_NullPolicy() { expected.expect(InvalidConfigurationException.class); mergePolicyProvider.getMergePolicy(null); } @Test public void getMergePolicy_withClassName_PutIfAbsentCacheMergePolicy() { assertMergePolicyCorrectlyInitialised(PutIfAbsentCacheMergePolicy.class.getName(), PutIfAbsentCacheMergePolicy.class); } @Test public void getMergePolicy_withConstant_PutIfAbsentCacheMergePolicy() { assertMergePolicyCorrectlyInitialised(BuiltInCacheMergePolicies.PUT_IF_ABSENT.name(), PutIfAbsentCacheMergePolicy.class); } @Test public void getMergePolicy_withClassName_LatestAccessCacheMergePolicy() { assertMergePolicyCorrectlyInitialised(LatestAccessCacheMergePolicy.class.getName(), LatestAccessCacheMergePolicy.class); } @Test public void getMergePolicy_withConstant_LatestAccessCacheMergePolicy() { assertMergePolicyCorrectlyInitialised(BuiltInCacheMergePolicies.LATEST_ACCESS.name(), LatestAccessCacheMergePolicy.class); } @Test public void getMergePolicy_withClassName_PassThroughCachePolicy() { assertMergePolicyCorrectlyInitialised(PassThroughCacheMergePolicy.class.getName(), PassThroughCacheMergePolicy.class); } @Test public void getMergePolicy_withConstant_PassThroughCachePolicy() { assertMergePolicyCorrectlyInitialised(BuiltInCacheMergePolicies.PASS_THROUGH.name(), PassThroughCacheMergePolicy.class); } @Test public void getMergePolicy_withClassName_HigherHitsMapCachePolicy() { assertMergePolicyCorrectlyInitialised(HigherHitsCacheMergePolicy.class.getName(), HigherHitsCacheMergePolicy.class); } @Test public void getMergePolicy_withConstant_HigherHitsMapCachePolicy() { assertMergePolicyCorrectlyInitialised(BuiltInCacheMergePolicies.HIGHER_HITS.name(), HigherHitsCacheMergePolicy.class); } private void assertMergePolicyCorrectlyInitialised(String mergePolicyName, Class<? extends CacheMergePolicy> expectedMergePolicyClass) { Object mergePolicy = mergePolicyProvider.getMergePolicy(mergePolicyName); assertNotNull(mergePolicy); assertEquals(expectedMergePolicyClass, mergePolicy.getClass()); } }
apache-2.0