text
stringlengths
2
100k
meta
dict
package cc.mrbird.security.validate.smscode; import org.springframework.security.authentication.AbstractAuthenticationToken; import org.springframework.security.core.GrantedAuthority; import org.springframework.security.core.SpringSecurityCoreVersion; import java.util.Collection; public class SmsAuthenticationToken extends AbstractAuthenticationToken { private static final long serialVersionUID = SpringSecurityCoreVersion.SERIAL_VERSION_UID; private final Object principal; public SmsAuthenticationToken(String mobile) { super(null); this.principal = mobile; setAuthenticated(false); } public SmsAuthenticationToken(Object principal, Collection<? extends GrantedAuthority> authorities) { super(authorities); this.principal = principal; super.setAuthenticated(true); // must use super, as we override } @Override public Object getCredentials() { return null; } public Object getPrincipal() { return this.principal; } public void setAuthenticated(boolean isAuthenticated) throws IllegalArgumentException { if (isAuthenticated) { throw new IllegalArgumentException( "Cannot set this token to trusted - use constructor which takes a GrantedAuthority list instead"); } super.setAuthenticated(false); } @Override public void eraseCredentials() { super.eraseCredentials(); } }
{ "pile_set_name": "Github" }
Texture2D RT : register(t0); Texture2D<float2> NormalMap : register(t1); SamplerState samplerState[2]: register(s0); struct PS_INPUT { float2 uv0 : TEXCOORD0; }; float4 main( PS_INPUT inPs ) : SV_Target { float2 normal = 2 * (NormalMap.Sample( samplerState[1], inPs.uv0 * 2.5 ).xy - 0.5); return RT.Sample( samplerState[0], inPs.uv0 + normal.xy * 0.05 ); }
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html lang="en"> <head> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/> <meta name="viewport" content="width=device-width, initial-scale=1"/> <title>BibleVerse Predictor</title> <!-- CSS --> <link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet"> <link href="static/css/materialize.css" type="text/css" rel="stylesheet" media="screen,projection"/> <link href="static/css/style.css" type="text/css" rel="stylesheet" media="screen,projection"/> <link rel="stylesheet" href="https://use.fontawesome.com/releases/v5.5.0/css/all.css" integrity="sha384-B4dIYHKNBt8Bc12p+WXckhzcICo0wtJAoU8YZTY5qE0Id1GSseTk6S+L3BlXeVIU" crossorigin="anonymous"> </head> </head> <body><div class="navbar-fixed"> <nav class="purple darken-4" role="navigation"> <div class="nav-wrapper container"> <a id="logo-container" href="#" class="brand-logo"><h5>Verse Predictor</h5></a> <ul class="right hide-on-med-and-down"> <li><a href="{{url_for('index')}}">Home</a></li> <li><a href="#aboutapp">About</a></li> </ul> <a href="#" data-target="nav-mobile" class="sidenav-trigger"><i class="material-icons">menu</i></a> </div> </nav> </div> <ul id="nav-mobile" class="sidenav"> <li><a href="{{url_for('index')}}">Home</a></li> <li><a href="#contactme">About</a></li> </ul> <!--Slider --> <section class="slider"> <div id="index-banner" class="parallax-container"> <ul class="slides"> <li> <img src="backgroundblue.png" alt="Unsplashed background img 1" alt=""> <div class="caption center-align"> <h2>Bible Verse Prediction </h2> <h5 class="light grey-text text-lighten-3 hide-on-small-only"> Using Machine Learning </h5> <div class="row center"> <a href="{{url_for('index')}}" id="download-button" class="btn-large waves-effect waves-light blue lighten-1">Refresh</a> </div> </div> </li> <li> <img src="backgroundpurple.png" alt="Unsplashed background img 1"> <div class="caption left-align"> <h2>Bible Passage Prediction</h2> <h5 class="light grey-text text-lighten-3 hide-on-small-only"> Predicting Location of Text </h5> <a href="#aboutapp" class="btn btn-large waves-effect waves-light light-blue lighten-1">Learn More</a> </div> </li> </ul> </div> </section> <!-- Start of Main Section --> <div class="container"> <div class="section"> <!-- Icon Section --> <div class="row"> <div class="input-field col s12 m10"> <div class="icon-block"> <h2 class="center brown-text"><i class="material-icons">chrome_reader_mode</i></h2> <form method="POST" action="/predict"> <textarea name="rawtext" cols="3" rows="5" class="form-control" required="true" placeholder="Enter Text Here"></textarea> <br/> <button class="btn btn-small waves-effect waves-light light-blue lighten-1" type="reset">Clear</button> <button class="btn btn-small waves-effect waves-light purple lighten-1" type="submit">Predict</button> </form> </div> </div> </div> <br/> <div class="row"> <div> <p>Your Verse was : <i>{{ raw_text }}</i></p> </div> </div> </div> </div> <!-- End --> <!-- Result Display--> <!-- SECTION: SOLUTIONS ABOUT --> <section class="section section-solutions-about grey darken-2 "> <div class="container"> <div class="row"> <div class="col s12 m12"> <div class="row"> <div class="col s12"> <ul class="tabs grey darken-2"> <li class="tab col s3"> <a href="#tab1" class="blue-text">Result</a> </li> <li class="tab col s3"> <a href="#tab2" class="blue-text">Sentiment</a> </li> </ul> </div> <div id="tab1" class="col s12"> <h5>Result</h5> <p style="font-style:italic;">Testament of the Bible</p> {% if prediction == 0 %} <button class="btn btn-small waves-effect waves-light light-blue lighten-1">Old Testament</button> {% elif prediction == 1 %} <button class="btn btn-small waves-effect waves-light light-green lighten-1">New Testament </button> {%endif%} <p>Score:{{ pred_score }}</p> </div> <div id="tab2" class="col s12"> <h5>Sentiment</h5> <p>Verse Sentiment <button class="btn btn-small waves-effect waves-light light-teal lighten-1"> {{verse_sentiment }}</button> </p> </div> </div> </div> </div> </div> </section> <footer class="page-footer purple darken-4"> <div class="container" id="aboutapp"> <div class="row"> <div class="col l6 s12"> <h5 class="white-text">About Bible Verse Predictor</h5> <p class="grey-text text-lighten-4">Using Machine Learning to location of predict bible verses .</p> </div> <div class="col l3 s12"> <h5 class="white-text">Connect With Me</h5> <ul> <a href="https://facebook.com/jcharistech" target="_blank" class="white-text"> <i class="fab fa-facebook fa-4x"></i> </a> <a href="https://gh.linkedin.com/in/jesiel-emmanuel-agbemabiase-6935b690" target="_blank" class="white-text"> <i class="fab fa-linkedin fa-4x"></i> </a> <a href="https://www.youtube.com/channel/UC2wMHF4HBkTMGLsvZAIWzRg" target="_blank" class="white-text"> <i class="fab fa-youtube-square fa-4x"></i> </a> <a href="https://github.com/Jcharis/" target="_blank" class="white-text"> <i class="fab fa-github-square fa-4x"></i> </a> </ul> </div> </div> </div> <div class="footer-copyright"> <div class="container"> Made by <a class="white-text text-lighten-3" href="https://jcharistech.wordpress.com">Jesse E.Agbe & JCharisTech</a><br/> <a class="white-text text-lighten-3" href="https://jcharistech.wordpress.com">Jesus Saves @JCharisTech</a> </div> </div> </footer> <!-- Scripts--> <script src="https://code.jquery.com/jquery-2.1.1.min.js"></script> <script src="static/js/materialize.js"></script> <script src="static/js/init.js"></script> <!-- Jesse JCharis --> </body> </html>
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <!-- ~ This file is part of the PhotoEditor Software Development Kit. ~ ~ Copyright (C) 2017 9elements GmbH <[email protected]> ~ All rights reserved. ~ ~ Redistribution and use in source and binary forms, without ~ modification, are permitted provided that the following license agreement ~ is approved and a legal/financial contract was signed by the user. ~ ~ The license agreement can be found under the following link: ~ ~ https://www.photoeditorsdk.com/LICENSE.txt --> <vector xmlns:android="http://schemas.android.com/apk/res/android" android:viewportWidth="48" android:viewportHeight="48" android:width="48dp" android:height="48dp"> <path android:pathData="M18 34H30V32H18ZM12 16H36V14H12Zm0 6H36V20H12Zm0 6H36V26H12Z" android:fillColor="@color/imgly_icon_color" /> </vector>
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <style xmlns="http://purl.org/net/xbiblio/csl" version="1.0" default-locale="en-US"> <!-- Elsevier, generated from "elsevier" metadata at https://github.com/citation-style-language/journals --> <info> <title>Biologically Inspired Cognitive Architectures</title> <id>http://www.zotero.org/styles/biologically-inspired-cognitive-architectures</id> <link href="http://www.zotero.org/styles/biologically-inspired-cognitive-architectures" rel="self"/> <link href="http://www.zotero.org/styles/apa" rel="independent-parent"/> <category citation-format="author-date"/> <issn>2212-683X</issn> <updated>2018-02-16T12:00:00+00:00</updated> <rights license="http://creativecommons.org/licenses/by-sa/3.0/">This work is licensed under a Creative Commons Attribution-ShareAlike 3.0 License</rights> </info> </style>
{ "pile_set_name": "Github" }
package org.geogebra.common.main.exam; import java.util.Date; import org.geogebra.common.factories.FormatFactory; import org.geogebra.common.factories.UtilFactory; import org.geogebra.common.kernel.commands.CmdGetTime; import org.geogebra.common.kernel.commands.CommandDispatcher; import org.geogebra.common.kernel.commands.filter.CommandArgumentFilter; import org.geogebra.common.kernel.commands.filter.ExamCommandFilter; import org.geogebra.common.kernel.commands.selector.CommandFilter; import org.geogebra.common.kernel.commands.selector.CommandFilterFactory; import org.geogebra.common.main.App; import org.geogebra.common.main.Localization; import org.geogebra.common.main.Translation; import org.geogebra.common.main.exam.event.CheatingEvent; import org.geogebra.common.main.exam.event.CheatingEvents; import org.geogebra.common.main.localization.CommandErrorMessageBuilder; import org.geogebra.common.main.settings.CASSettings; import org.geogebra.common.main.settings.Settings; import org.geogebra.common.util.CopyPaste; import org.geogebra.common.util.GTimer; import org.geogebra.common.util.GTimerListener; import org.geogebra.common.util.TimeFormatAdapter; import org.geogebra.common.util.debug.Log; public class ExamEnvironment { private static final long EXAM_START_TIME_NOT_STARTED = -1; /** how long notification for protocol saved is shown */ static public final int EXAM_PROTOCOL_SAVED_NOTIFICATION_DURATION = 5000; /** exam start timestamp (milliseconds) */ private long examStartTime = EXAM_START_TIME_NOT_STARTED; private final CheatingEvents cheatingEvents; private long closed = -1; private boolean hasGraph = false; private TimeFormatAdapter timeFormatter; private CommandArgumentFilter examCommandFilter = new ExamCommandFilter(); private static final CommandFilter noCASFilter = CommandFilterFactory .createNoCasCommandFilter(); /** * application */ protected App app; private Localization localization; private long ignoreBlurUntil = -1; private boolean temporaryBlur; private CommandDispatcher commandDispatcher; private boolean wasCasEnabled; /** * * @param app * application */ public ExamEnvironment(App app) { this.app = app; this.localization = app.getLocalization(); cheatingEvents = new CheatingEvents(); commandDispatcher = app.getKernel().getAlgebraProcessor().getCommandDispatcher(); wasCasEnabled = app.getSettings().getCasSettings().isEnabled(); } /** * @return exam start timestamp */ public long getStart() { return examStartTime; } /** * * @return true if exam is started */ public boolean isStarted() { return examStartTime > 0; } /** * @param time * timestamp in milliseconds */ public void setStart(long time) { examStartTime = time; closed = -1; clearClipboard(); } /** * Start cheating when window left */ public void windowLeft() { if (isStarted()) { cheatingEvents.addWindowLeftEvent(); } } /** * Start cheating when window left, checks if the blur events are * temporarily allowed */ public void checkedWindowLeft() { if (ignoreBlurUntil > System.currentTimeMillis()) { temporaryBlur = true; return; } windowLeft(); } /** * Log end of cheating. */ public void stopCheating() { this.temporaryBlur = false; if (getStart() > 0) { cheatingEvents.addWindowEnteredEvent(); Log.debug("STOPPED CHEATING"); } } /** * @return whether some cheating events occured since exam mode was started */ public boolean isCheating() { return !cheatingEvents.isEmpty(); } /** * @return whether the exam was ended */ public boolean isClosed() { return closed != -1; } /** * @param translation * The translation identifier from the Translation enum. * @return The translation identified by the Translation parameter. */ public String getTranslatedString(Translation translation) { switch (translation) { case EXAM_MODE: return localization.getMenu("exam_menu_entry"); case OK: return localization.getMenu("OK"); case ALERT: return localization.getMenu("exam_alert"); case SHOW_TO_TEACHER: return localization.getMenu("exam_log_show_screen_to_teacher"); case DATE: return localization.getMenu("exam_start_date"); case START_TIME: return localization.getMenu("exam_start_time"); case END_TIME: return localization.getMenu("exam_end_time"); case ACTIVITY: return localization.getMenu("exam_activity"); case EXAM_STARTED: return localization.getMenu("exam_started"); case EXAM_ENDED: return localization.getMenu("exam_ended"); case EXIT: return localization.getMenu("Exit"); case DURATION: return localization.getMenu("Duration"); } return null; } /** * @return The exam date in localized format. */ public String getDate() { // eg "23 October 2015" // don't use \\S for 23rd (not used in eg French) return CmdGetTime.buildLocalizedDate("\\j \\F \\Y", new Date(examStartTime), localization); } /** * @return The exam start time in localized format. */ public String getStartTime() { return getLocalizedTimeOnly(localization, examStartTime); } /** * @return The exam end time in localized format. */ public String getEndTime() { return getLocalizedTimeOnly(localization, closed); } /** * @param withEndTime * Whether the returned log string should contain the elapsed * time as exam end time. * @return The (cheating) activity log. */ public String getActivityLog(boolean withEndTime) { if (cheatingEvents.isEmpty()) { return ""; } ExamLogBuilder logBuilder = new ExamLogBuilder(); appendLogTimes(localization, logBuilder, withEndTime); return logBuilder.toString().trim(); } private static String getLocalizedTimeOnly(Localization loc, long time) { // eg "14:08:48" return CmdGetTime.buildLocalizedDate("\\H:\\i:\\s", new Date(time), loc); } private static String getLocalizedDateOnly(Localization loc, long time) { // eg "Fri 23 October 2015" // don't use \\S for 23rd (not used in eg French) return CmdGetTime.buildLocalizedDate("\\D, \\j \\F \\Y", new Date(time), loc); } /** * @param loc * localization * @param settings * settings * @param builder * log builder */ private void appendSettings(Localization loc, Settings settings, ExamLogBuilder builder) { // Deactivated Views boolean supportsCAS = settings.getCasSettings().isEnabled(); boolean supports3D = settings.supports3D(); if (!hasGraph) { StringBuilder sb = new StringBuilder(); if (!supportsCAS || !supports3D) { sb.append(loc.getMenu("exam_views_deactivated")); sb.append(": "); } if (!supportsCAS) { sb.append(loc.getMenu("Perspective.CAS")); } if (!supportsCAS && !supports3D) { sb.append(", "); } if (!supports3D) { sb.append(loc.getMenu("Perspective.3DGraphics")); } builder.addLine(sb); } } private void appendStartEnd(Localization loc, ExamLogBuilder builder, boolean showEndTime) { // Exam Start Date builder.addField(loc.getMenu("exam_start_date"), getLocalizedDateOnly(loc, examStartTime)); // Exam Start Time builder.addField(loc.getMenu("exam_start_time"), getLocalizedTimeOnly(loc, examStartTime)); // Exam End Time if (showEndTime && closed > 0) { builder.addField(loc.getMenu("exam_end_time"), getLocalizedTimeOnly(loc, closed)); } } /** * * @return elapsed time */ public String getElapsedTime() { return timeToString(System.currentTimeMillis()); } /** * @param loc * localization * @param builder * log builder * @param withEndTime * true if add end timestamp */ public void appendLogTimes(Localization loc, ExamLogBuilder builder, boolean withEndTime) { // Log times StringBuilder sb = new StringBuilder(); sb.append("0:00"); sb.append(' '); sb.append(loc.getMenu("exam_started")); builder.addLine(sb); for (CheatingEvent cheatingEvent : cheatingEvents.getEvents()) { sb.setLength(0); sb.append(timeToString(cheatingEvent.getTime())); sb.append(' '); sb.append(cheatingEvent.getAction().toString(loc)); builder.addLine(sb); } if (withEndTime && closed > 0) { sb.setLength(0); sb.append(timeToString(closed)); // get exit timestamp sb.append(' '); sb.append(loc.getMenu("exam_ended")); builder.addLine(sb); } } /** * NEW LOG DIALOG * * @param loc * localization * @param settings * settings * @return log text */ public String getLog(Localization loc, Settings settings) { ExamLogBuilder sb = new ExamLogBuilder(); getLog(loc, settings, sb); return sb.toString(); } /** * @param loc * localization * @param settings * settings * @param sb * log builder */ public void getLog(Localization loc, Settings settings, ExamLogBuilder sb) { if (!app.isUnbundled()) { appendSettings(loc, settings, sb); } appendStartEnd(loc, sb, true); sb.addField(loc.getMenu("exam_activity"), ""); appendLogTimes(loc, sb, true); } /** * @param loc * localization * @return exam start and end */ public String getLogStartEnd(Localization loc) { return getLogStartEnd(loc, true); } /** * @param loc * localization * @param showEndTime * whether to include end time * @return log with start and end of the exam */ private String getLogStartEnd(Localization loc, boolean showEndTime) { ExamLogBuilder sb = new ExamLogBuilder(); appendStartEnd(loc, sb, showEndTime); return sb.toString(); } /** * @param loc * localization * @return log times with description separated by newline */ public String getLogTimes(Localization loc) { return getLogTimes(loc, true); } /** * @param loc * localization * @param showEndTime * whether to show end time * @return log times with description separated by newline */ private String getLogTimes(Localization loc, boolean showEndTime) { ExamLogBuilder sb = new ExamLogBuilder(); appendLogTimes(loc, sb, showEndTime); return sb.toString(); } public void setHasGraph(boolean hasGraph) { this.hasGraph = hasGraph; } /** * @param timestamp * relative timestamp * @return MM:SS */ public String timeToString(long timestamp) { if (examStartTime < 0) { return "0:00"; } int secs = (int) ((timestamp - examStartTime) / 1000); int mins = secs / 60; secs -= mins * 60; String secsS = secs + ""; if (secs < 10) { secsS = "0" + secsS; } return mins + ":" + secsS; } /** * store end time */ public void storeEndTime() { this.closed = System.currentTimeMillis(); } /** * Store end time */ public void exit() { storeEndTime(); restoreCommands(); clearClipboard(); } private void clearClipboard() { CopyPaste copyPaste = app.getCopyPaste(); if (copyPaste != null) { copyPaste.clearClipboard(); } app.copyTextToSystemClipboard(""); } /** * close exam mode and reset CAS etc. * */ public void closeExam() { examStartTime = EXAM_START_TIME_NOT_STARTED; disableExamCommandFilter(); setShowSyntax(true); app.fileNew(); } private void setShowSyntax(boolean showSyntax) { CommandErrorMessageBuilder builder = localization .getCommandErrorMessageBuilder(); builder.setShowingSyntax(showSyntax); } /** * @return calculator name for status bar */ public String getCalculatorNameForStatusBar() { return localization.getMenu(app.getConfig().getAppNameShort()); } /** * @return calculator name for exam log header */ public String getCalculatorNameForHeader() { return localization.getMenu(app.getConfig().getAppNameShort()); } /** * Run this when unlocked task detected; notifies about cheating */ public void taskUnlocked() { if (getStart() > 0) { cheatingEvents.addScreenUnlockedEvent(); Log.debug("STARTED CHEATING: task unlocked"); } } /** * If task was previously unlocked, add cheating end to the log */ public void taskLocked() { if (getStart() > 0) { cheatingEvents.addScreenLockedEvent(); Log.debug("STOPPED CHEATING: task locked"); } } /** * Add airplane mode cheating event */ public void airplaneModeTurnedOff() { if (getStart() > 0) { cheatingEvents.addAirplaneModeDisabledEvent(); } } /** * Add airrplane mode stop-cheating event */ public void airplaneModeTurnedOn() { if (getStart() > 0) { cheatingEvents.addAirplaneModeEnabledEvent(); } } /** * Add Wifi cheating event */ public void wifiEnabled() { if (getStart() > 0) { cheatingEvents.addWifiEnabledEvent(); } } /** * Add Wifi stop-cheating event */ public void wifiDisabled() { if (getStart() > 0) { cheatingEvents.addWifiDisabledEvent(); } } /** * Add Bluetooth cheating event */ public void bluetoothEnabled() { if (getStart() > 0) { cheatingEvents.addBluetoothEnabledEvent(); } } /** * Add Bluetooth stop-cheating event */ public void bluetoothDisabled() { if (getStart() > 0) { cheatingEvents.addBluetoothDisabledEvent(); } } /** * * @return the localized elapsed time string */ public String getElapsedTimeLocalized() { return timeToStringLocalized(System.currentTimeMillis()); } /** * * @param timestamp * current timestamp in millis * @return the localized formatted time string */ private String timeToStringLocalized(long timestamp) { if (timeFormatter == null) { timeFormatter = FormatFactory.getPrototype().getTimeFormat(); } if (examStartTime < 0) { return timeFormatter.format(localization.getLocale(), "%02d:%02d", 0); } int millis = (int) (timestamp - examStartTime); return timeFormatter.format(localization.getLocale(), "%02d:%02d", millis); } /** * @return number of cheating events */ public int getEventCount() { return cheatingEvents.size(); } /** * Saves the current command filter into the nonExamCommandFilter field and * sets the exam command filter for the duration of the exam mode. */ private void enableExamCommandFilter() { commandDispatcher.addCommandArgumentFilter(examCommandFilter); } /** * Prepares the exam for starting. */ public void setupExamEnvironment() { enableExamCommandFilter(); setShowSyntax(false); } private void restoreCommands() { if (wasCasEnabled) { enableCAS(); } else { disableCAS(); } } /** * Disables the exam command filter by setting the nonExamCommandFilter to * the CommandDispatcher */ private void disableExamCommandFilter() { commandDispatcher.removeCommandArgumentFilter(examCommandFilter); } /** * @param ignoreBlurFor * expiration timestamp of blur free pass */ public void setIgnoreBlurInterval(int ignoreBlurFor) { this.ignoreBlurUntil = System.currentTimeMillis() + ignoreBlurFor; GTimer timer = UtilFactory.getPrototype() .newTimer(new GTimerListener() { @Override public void onRun() { onBlurTimer(); } }, ignoreBlurFor); timer.start(); } /** * Handler for blur timer */ private void onBlurTimer() { if (temporaryBlur) { windowLeft(); } } /** * Enables/disables CAS commands. * * @param casEnabled * whether CAS is enabled */ public void setCasEnabled(boolean casEnabled) { if (casEnabled) { enableCAS(); } else { disableCAS(); } } private void enableCAS() { getCasSettings().setEnabled(true); commandDispatcher.removeCommandFilter(noCASFilter); } private void disableCAS() { getCasSettings().setEnabled(false); commandDispatcher.addCommandFilter(noCASFilter); } private CASSettings getCasSettings() { return app.getSettings().getCasSettings(); } }
{ "pile_set_name": "Github" }
// ---------------------------------------------------------------------------------- // // Copyright Microsoft Corporation // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ---------------------------------------------------------------------------------- using System; using System.Collections.Generic; namespace Tools.Common.Models { [Serializable] public class OutputMetadata { private List<string> _parameterSets = new List<String>(); /// <summary> /// The output type /// </summary> public TypeMetadata Type { get; set; } /// <summary> /// The parameter sets for the given output type /// </summary> public List<string> ParameterSets { get { return _parameterSets; } } } }
{ "pile_set_name": "Github" }
C> \brief \b DGETRF VARIANT: Crout Level 3 BLAS version of the algorithm. * * =========== DOCUMENTATION =========== * * Online html documentation available at * http://www.netlib.org/lapack/explore-html/ * * Definition: * =========== * * SUBROUTINE DGETRF ( M, N, A, LDA, IPIV, INFO) * * .. Scalar Arguments .. * INTEGER INFO, LDA, M, N * .. * .. Array Arguments .. * INTEGER IPIV( * ) * DOUBLE PRECISION A( LDA, * ) * .. * * Purpose * ======= * C>\details \b Purpose: C>\verbatim C> C> DGETRF computes an LU factorization of a general M-by-N matrix A C> using partial pivoting with row interchanges. C> C> The factorization has the form C> A = P * L * U C> where P is a permutation matrix, L is lower triangular with unit C> diagonal elements (lower trapezoidal if m > n), and U is upper C> triangular (upper trapezoidal if m < n). C> C> This is the Crout Level 3 BLAS version of the algorithm. C> C>\endverbatim * * Arguments: * ========== * C> \param[in] M C> \verbatim C> M is INTEGER C> The number of rows of the matrix A. M >= 0. C> \endverbatim C> C> \param[in] N C> \verbatim C> N is INTEGER C> The number of columns of the matrix A. N >= 0. C> \endverbatim C> C> \param[in,out] A C> \verbatim C> A is DOUBLE PRECISION array, dimension (LDA,N) C> On entry, the M-by-N matrix to be factored. C> On exit, the factors L and U from the factorization C> A = P*L*U; the unit diagonal elements of L are not stored. C> \endverbatim C> C> \param[in] LDA C> \verbatim C> LDA is INTEGER C> The leading dimension of the array A. LDA >= max(1,M). C> \endverbatim C> C> \param[out] IPIV C> \verbatim C> IPIV is INTEGER array, dimension (min(M,N)) C> The pivot indices; for 1 <= i <= min(M,N), row i of the C> matrix was interchanged with row IPIV(i). C> \endverbatim C> C> \param[out] INFO C> \verbatim C> INFO is INTEGER C> = 0: successful exit C> < 0: if INFO = -i, the i-th argument had an illegal value C> > 0: if INFO = i, U(i,i) is exactly zero. The factorization C> has been completed, but the factor U is exactly C> singular, and division by zero will occur if it is used C> to solve a system of equations. C> \endverbatim C> * * Authors: * ======== * C> \author Univ. of Tennessee C> \author Univ. of California Berkeley C> \author Univ. of Colorado Denver C> \author NAG Ltd. * C> \date December 2016 * C> \ingroup variantsGEcomputational * * ===================================================================== SUBROUTINE DGETRF ( M, N, A, LDA, IPIV, INFO) * * -- LAPACK computational routine (version 3.1) -- * -- LAPACK is a software package provided by Univ. of Tennessee, -- * -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- * December 2016 * * .. Scalar Arguments .. INTEGER INFO, LDA, M, N * .. * .. Array Arguments .. INTEGER IPIV( * ) DOUBLE PRECISION A( LDA, * ) * .. * * ===================================================================== * * .. Parameters .. DOUBLE PRECISION ONE PARAMETER ( ONE = 1.0D+0 ) * .. * .. Local Scalars .. INTEGER I, IINFO, J, JB, NB * .. * .. External Subroutines .. EXTERNAL DGEMM, DGETF2, DLASWP, DTRSM, XERBLA * .. * .. External Functions .. INTEGER ILAENV EXTERNAL ILAENV * .. * .. Intrinsic Functions .. INTRINSIC MAX, MIN * .. * .. Executable Statements .. * * Test the input parameters. * INFO = 0 IF( M.LT.0 ) THEN INFO = -1 ELSE IF( N.LT.0 ) THEN INFO = -2 ELSE IF( LDA.LT.MAX( 1, M ) ) THEN INFO = -4 END IF IF( INFO.NE.0 ) THEN CALL XERBLA( 'DGETRF', -INFO ) RETURN END IF * * Quick return if possible * IF( M.EQ.0 .OR. N.EQ.0 ) $ RETURN * * Determine the block size for this environment. * NB = ILAENV( 1, 'DGETRF', ' ', M, N, -1, -1 ) IF( NB.LE.1 .OR. NB.GE.MIN( M, N ) ) THEN * * Use unblocked code. * CALL DGETF2( M, N, A, LDA, IPIV, INFO ) ELSE * * Use blocked code. * DO 20 J = 1, MIN( M, N ), NB JB = MIN( MIN( M, N )-J+1, NB ) * * Update current block. * CALL DGEMM( 'No transpose', 'No transpose', $ M-J+1, JB, J-1, -ONE, $ A( J, 1 ), LDA, A( 1, J ), LDA, ONE, $ A( J, J ), LDA ) * * Factor diagonal and subdiagonal blocks and test for exact * singularity. * CALL DGETF2( M-J+1, JB, A( J, J ), LDA, IPIV( J ), IINFO ) * * Adjust INFO and the pivot indices. * IF( INFO.EQ.0 .AND. IINFO.GT.0 ) $ INFO = IINFO + J - 1 DO 10 I = J, MIN( M, J+JB-1 ) IPIV( I ) = J - 1 + IPIV( I ) 10 CONTINUE * * Apply interchanges to column 1:J-1 * CALL DLASWP( J-1, A, LDA, J, J+JB-1, IPIV, 1 ) * IF ( J+JB.LE.N ) THEN * * Apply interchanges to column J+JB:N * CALL DLASWP( N-J-JB+1, A( 1, J+JB ), LDA, J, J+JB-1, $ IPIV, 1 ) * CALL DGEMM( 'No transpose', 'No transpose', $ JB, N-J-JB+1, J-1, -ONE, $ A( J, 1 ), LDA, A( 1, J+JB ), LDA, ONE, $ A( J, J+JB ), LDA ) * * Compute block row of U. * CALL DTRSM( 'Left', 'Lower', 'No transpose', 'Unit', $ JB, N-J-JB+1, ONE, A( J, J ), LDA, $ A( J, J+JB ), LDA ) END IF 20 CONTINUE END IF RETURN * * End of DGETRF * END
{ "pile_set_name": "Github" }
/* * Copyright 2012-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.context.properties.bind.test; import java.util.ArrayList; import java.util.List; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.boot.context.properties.bind.Bindable; import org.springframework.boot.context.properties.bind.Binder; import org.springframework.boot.context.properties.source.ConfigurationPropertyName; import org.springframework.boot.context.properties.source.ConfigurationPropertySource; import org.springframework.boot.context.properties.source.MockConfigurationPropertySource; import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link Binder} using package private Java beans. * * @author Madhura Bhave */ class PackagePrivateBeanBindingTests { private List<ConfigurationPropertySource> sources = new ArrayList<>(); private Binder binder; private ConfigurationPropertyName name; @BeforeEach void setup() { this.binder = new Binder(this.sources); this.name = ConfigurationPropertyName.of("foo"); } @Test void bindToPackagePrivateClassShouldBindToInstance() { MockConfigurationPropertySource source = new MockConfigurationPropertySource(); source.put("foo.bar", "999"); this.sources.add(source); ExamplePackagePrivateBean bean = this.binder.bind(this.name, Bindable.of(ExamplePackagePrivateBean.class)) .get(); assertThat(bean.getBar()).isEqualTo(999); } static class ExamplePackagePrivateBean { private int bar; int getBar() { return this.bar; } void setBar(int bar) { this.bar = bar; } } }
{ "pile_set_name": "Github" }
//================================================================ /** * @packageDocumentation * @module std */ //================================================================ import { AdaptorContainer } from "../internal/container/linear/AdaptorContainer"; import { List } from "./List"; /** * Queue; FIFO (First In First Out). * * @author Jeongho Nam - https://github.com/samchon */ export class Queue<T> extends AdaptorContainer<T, List<T>, Queue<T>> { /* --------------------------------------------------------- CONSTRUCTORS --------------------------------------------------------- */ /** * Default Constructor. */ public constructor(); /** * Copy Constructor. * * @param obj Object to copy. */ public constructor(obj: Queue<T>); public constructor(obj?: Queue<T>) { super(new List()); if (obj !== undefined) this.source_.assign(obj.source_.begin(), obj.source_.end()); } /* --------------------------------------------------------- ACCESSORS --------------------------------------------------------- */ /** * Get the first element. * * @return The first element. */ public front(): T { return this.source_.front(); } /** * Get the last element. * * @return The last element. */ public back(): T { return this.source_.back(); } /** * @inheritDoc */ public pop(): void { this.source_.pop_front(); } }
{ "pile_set_name": "Github" }
/*============================================================================= Copyright (c) 2001-2003 Joel de Guzman Copyright (c) 2001 Daniel Nuffer http://spirit.sourceforge.net/ Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) =============================================================================*/ #if !defined(BOOST_SPIRIT_PARSER_ID_HPP) #define BOOST_SPIRIT_PARSER_ID_HPP #if defined(BOOST_SPIRIT_DEBUG) # include <ostream> #endif #include <boost/spirit/home/classic/namespace.hpp> /////////////////////////////////////////////////////////////////////////////// namespace boost { namespace spirit { BOOST_SPIRIT_CLASSIC_NAMESPACE_BEGIN /////////////////////////////////////////////////////////////////////////// // // parser_id class // /////////////////////////////////////////////////////////////////////////// class parser_id { public: parser_id() : p(0) {} explicit parser_id(void const* prule) : p(prule) {} parser_id(std::size_t l_) : l(l_) {} bool operator==(parser_id const& x) const { return p == x.p; } bool operator!=(parser_id const& x) const { return !(*this == x); } bool operator<(parser_id const& x) const { return p < x.p; } std::size_t to_long() const { return l; } private: union { void const* p; std::size_t l; }; }; #if defined(BOOST_SPIRIT_DEBUG) inline std::ostream& operator<<(std::ostream& out, parser_id const& rid) { out << (unsigned int)rid.to_long(); return out; } #endif /////////////////////////////////////////////////////////////////////////// // // parser_tag_base class: base class of all parser tags // /////////////////////////////////////////////////////////////////////////// struct parser_tag_base {}; /////////////////////////////////////////////////////////////////////////// // // parser_address_tag class: tags a parser with its address // /////////////////////////////////////////////////////////////////////////// struct parser_address_tag : parser_tag_base { parser_id id() const { return parser_id(reinterpret_cast<std::size_t>(this)); } }; /////////////////////////////////////////////////////////////////////////// // // parser_tag class: tags a parser with an integer ID // /////////////////////////////////////////////////////////////////////////// template <int N> struct parser_tag : parser_tag_base { static parser_id id() { return parser_id(std::size_t(N)); } }; /////////////////////////////////////////////////////////////////////////// // // dynamic_parser_tag class: tags a parser with a dynamically changeable // integer ID // /////////////////////////////////////////////////////////////////////////// class dynamic_parser_tag : public parser_tag_base { public: dynamic_parser_tag() : tag(std::size_t(0)) {} parser_id id() const { return tag.to_long() ? tag : parser_id(reinterpret_cast<std::size_t>(this)); } void set_id(parser_id id_) { tag = id_; } private: parser_id tag; }; /////////////////////////////////////////////////////////////////////////////// BOOST_SPIRIT_CLASSIC_NAMESPACE_END }} // namespace BOOST_SPIRIT_CLASSIC_NS #endif
{ "pile_set_name": "Github" }
idx label predict basePredict correct time 0 3 3 3 1 0:00:17.006613 20 7 9 9 0 0:00:15.768338 40 4 8 8 0 0:00:15.816635 60 7 7 4 1 0:00:15.768966 80 8 8 8 1 0:00:15.767851 100 4 5 5 0 0:00:15.778859 120 8 0 0 0 0:00:15.795037 140 6 3 2 0 0:00:15.747020 160 2 0 2 0 0:00:15.841779 180 0 0 0 1 0:00:15.761162 200 5 3 3 0 0:00:15.731568 220 7 7 7 1 0:00:15.770849 240 1 1 1 1 0:00:15.760161 260 8 3 3 0 0:00:15.834975 280 9 9 9 1 0:00:15.758149 300 6 6 4 1 0:00:15.794001 320 3 3 2 1 0:00:15.753198 340 2 4 4 0 0:00:15.731757 360 9 7 5 0 0:00:15.823725 380 6 6 4 1 0:00:15.781116 400 9 9 9 1 0:00:15.813756 420 4 4 4 1 0:00:15.733694 440 1 9 6 0 0:00:15.893474 460 5 5 2 1 0:00:16.109746 480 8 9 9 0 0:00:15.918803 500 4 6 2 0 0:00:15.835166 520 5 7 7 0 0:00:15.783152 540 1 1 8 1 0:00:15.854693 560 0 0 0 1 0:00:15.816215 580 4 4 2 1 0:00:15.804139 600 8 8 3 1 0:00:15.863440 620 7 7 2 1 0:00:15.779746 640 5 3 3 0 0:00:15.852527 660 9 9 3 1 0:00:15.779103 680 9 7 7 0 0:00:15.735242 700 7 7 3 1 0:00:15.811614 720 4 3 3 0 0:00:15.747240 740 2 3 3 0 0:00:15.811367 760 3 4 4 0 0:00:15.806262 780 9 1 1 0 0:00:15.754416 800 7 7 7 1 0:00:15.850817 820 6 8 8 0 0:00:15.813914 840 7 7 7 1 0:00:15.781603 860 4 2 2 0 0:00:16.936308 880 8 8 8 1 0:00:15.765703 900 2 6 4 0 0:00:17.428316 920 6 6 3 1 0:00:16.431894 940 9 9 9 1 0:00:17.408148 960 9 9 8 1 0:00:16.993281 980 2 6 4 0 0:00:15.762751 1000 5 5 5 1 0:00:15.172724 1020 1 1 8 1 0:00:15.206531 1040 7 5 5 0 0:00:15.078798 1060 9 9 4 1 0:00:15.119209 1080 6 6 2 1 0:00:15.053878 1100 7 0 8 0 0:00:15.215411 1120 5 2 2 0 0:00:15.107835 1140 6 4 4 0 0:00:15.049177 1160 8 0 0 0 0:00:15.130823 1180 3 3 3 1 0:00:15.135932 1200 8 0 0 0 0:00:15.120498 1220 9 6 6 0 0:00:15.096501 1240 2 6 6 0 0:00:15.112365 1260 1 9 9 0 0:00:15.105390 1280 3 5 5 0 0:00:15.139254 1300 4 3 3 0 0:00:15.110811 1320 7 2 2 0 0:00:15.068123 1340 1 1 1 1 0:00:15.131471 1360 7 7 2 1 0:00:15.112650 1380 4 5 5 0 0:00:15.139103 1400 5 3 3 0 0:00:15.197915 1420 4 7 7 0 0:00:15.097469 1440 0 8 8 0 0:00:15.119591 1460 6 6 2 1 0:00:15.114040 1480 1 5 5 0 0:00:15.116668 1500 1 1 1 1 0:00:15.214616 1520 7 7 2 1 0:00:15.131469 1540 8 8 8 1 0:00:15.040836 1560 7 7 7 1 0:00:15.081785 1580 6 1 3 0 0:00:15.214633 1600 8 6 6 0 0:00:15.069880 1620 5 8 8 0 0:00:15.173596 1640 7 3 3 0 0:00:15.098742 1660 3 5 5 0 0:00:15.231892 1680 6 6 3 1 0:00:15.099768 1700 5 2 2 0 0:00:15.163946 1720 4 2 2 0 0:00:15.212739 1740 1 9 9 0 0:00:15.070929 1760 0 8 8 0 0:00:15.143667 1780 7 7 2 1 0:00:15.217926 1800 4 0 8 0 0:00:15.151120 1820 8 9 9 0 0:00:15.179174 1840 8 4 4 0 0:00:15.124723 1860 8 8 3 1 0:00:15.075681 1880 1 1 8 1 0:00:15.122062 1900 8 8 8 1 0:00:15.170066 1920 2 2 2 1 0:00:15.111339 1940 5 2 2 0 0:00:15.185480 1960 2 5 5 0 0:00:15.084495 1980 9 9 9 1 0:00:15.310220 2000 1 6 2 0 0:00:15.186791 2020 9 9 9 1 0:00:15.079906 2040 0 0 8 1 0:00:15.129778 2060 5 3 3 0 0:00:15.193574 2080 1 9 9 0 0:00:15.202846 2100 2 2 2 1 0:00:15.120208 2120 4 8 8 0 0:00:15.160057 2140 9 9 9 1 0:00:15.202020 2160 0 8 8 0 0:00:15.048985 2180 4 4 4 1 0:00:15.164629 2200 0 2 2 0 0:00:15.074054 2220 1 1 8 1 0:00:15.234924 2240 9 1 8 0 0:00:15.145341 2260 4 4 3 1 0:00:15.115723 2280 4 3 3 0 0:00:15.171268 2300 3 4 2 0 0:00:15.153174 2320 5 2 2 0 0:00:15.118551 2340 7 7 7 1 0:00:15.125410 2360 7 5 5 0 0:00:15.134077 2380 8 8 8 1 0:00:15.141507 2400 0 0 4 1 0:00:15.231925 2420 8 5 5 0 0:00:15.154871 2440 7 7 0 1 0:00:15.186295 2460 9 7 7 0 0:00:15.188515 2480 8 8 8 1 0:00:15.057060 2500 4 2 2 0 0:00:15.145513 2520 1 1 7 1 0:00:15.146271 2540 2 5 5 0 0:00:15.186620 2560 7 7 4 1 0:00:15.167380 2580 6 5 5 0 0:00:15.170221 2600 8 8 8 1 0:00:15.151345 2620 1 1 8 1 0:00:15.218717 2640 7 7 4 1 0:00:15.068364 2660 3 2 2 0 0:00:15.125862 2680 0 0 0 1 0:00:15.065900 2700 9 1 1 0 0:00:15.073216 2720 3 3 2 1 0:00:15.095082 2740 1 1 7 1 0:00:15.195364 2760 2 2 2 1 0:00:15.113329 2780 7 7 5 1 0:00:15.197055 2800 4 6 6 0 0:00:15.238991 2820 6 6 2 1 0:00:15.148457 2840 3 3 3 1 0:00:15.114833 2860 5 6 6 0 0:00:15.162941 2880 4 4 4 1 0:00:15.138142 2900 3 5 2 0 0:00:15.179240 2920 2 3 3 0 0:00:15.219820 2940 3 0 0 0 0:00:15.127609 2960 9 9 8 1 0:00:15.112810 2980 8 1 1 0 0:00:15.073894 3000 5 5 5 1 0:00:15.095220 3020 1 1 8 1 0:00:15.203490 3040 7 7 7 1 0:00:15.164073 3060 7 7 5 1 0:00:15.123513 3080 1 1 2 1 0:00:15.210784 3100 0 0 8 1 0:00:15.086549 3120 4 2 2 0 0:00:15.074731 3140 8 8 8 1 0:00:15.304421 3160 6 2 2 0 0:00:15.207726 3180 3 3 5 1 0:00:15.078677 3200 5 7 7 0 0:00:15.168534 3220 3 -1 6 0 0:00:15.229010 3240 4 1 8 0 0:00:15.181231 3260 7 7 6 1 0:00:15.084061 3280 3 5 0 0 0:00:15.175402 3300 4 4 2 1 0:00:15.133478 3320 2 7 7 0 0:00:15.151562 3340 6 6 2 1 0:00:15.178191 3360 4 6 6 0 0:00:15.110077 3380 8 8 8 1 0:00:15.133629 3400 6 4 4 0 0:00:15.203174 3420 5 3 3 0 0:00:15.226623 3440 2 3 3 0 0:00:15.251697 3460 1 1 9 1 0:00:15.162175 3480 2 2 2 1 0:00:15.205021 3500 1 9 9 0 0:00:15.168433 3520 1 1 1 1 0:00:15.315767 3540 6 6 3 1 0:00:15.153031 3560 1 9 7 0 0:00:15.067661 3580 4 2 2 0 0:00:15.114244 3600 4 2 2 0 0:00:15.267894 3620 0 6 6 0 0:00:15.175539 3640 6 2 2 0 0:00:15.173419 3660 0 0 8 1 0:00:15.271695 3680 0 4 6 0 0:00:15.042876 3700 3 3 5 1 0:00:15.157066 3720 2 2 2 1 0:00:15.128600 3740 0 0 8 1 0:00:15.186843 3760 7 7 4 1 0:00:15.153405 3780 4 2 2 0 0:00:15.203688 3800 9 9 3 1 0:00:15.173628 3820 0 2 2 0 0:00:15.215873 3840 6 3 3 0 0:00:15.163819 3860 7 7 7 1 0:00:15.264664 3880 6 4 4 0 0:00:15.168763 3900 3 6 6 0 0:00:15.185854 3920 7 5 5 0 0:00:15.209036 3940 6 5 4 0 0:00:15.114873 3960 2 2 2 1 0:00:15.153767 3980 9 7 7 0 0:00:15.113395 4000 8 7 0 0 0:00:15.179067 4020 8 9 8 0 0:00:15.154297 4040 0 0 0 1 0:00:15.120984 4060 6 -1 3 0 0:00:15.103771 4080 1 1 8 1 0:00:15.110377 4100 7 7 7 1 0:00:15.075842 4120 4 4 8 1 0:00:15.194456 4140 5 5 2 1 0:00:15.093427 4160 5 2 2 0 0:00:15.165773 4180 0 8 8 0 0:00:15.138125 4200 4 2 2 0 0:00:15.147083 4220 4 0 0 0 0:00:15.171925 4240 7 0 8 0 0:00:15.114398 4260 8 8 8 1 0:00:15.116431 4280 8 1 1 0 0:00:15.143709 4300 8 8 8 1 0:00:15.171285 4320 1 8 8 0 0:00:15.234917 4340 0 0 0 1 0:00:15.116606 4360 6 9 9 0 0:00:15.173972 4380 9 8 8 0 0:00:15.117331 4400 3 4 4 0 0:00:15.186894 4420 5 5 7 1 0:00:15.154780 4440 2 2 2 1 0:00:15.158611 4460 9 9 8 1 0:00:15.151740 4480 9 9 3 1 0:00:15.195751 4500 3 5 5 0 0:00:15.162339 4520 3 6 6 0 0:00:15.083717 4540 9 9 9 1 0:00:15.112941 4560 1 1 8 1 0:00:15.190938 4580 6 4 4 0 0:00:15.179779 4600 4 7 7 0 0:00:15.257747 4620 7 3 3 0 0:00:15.137691 4640 2 2 2 1 0:00:15.124223 4660 7 4 6 0 0:00:15.132792 4680 9 1 1 0 0:00:15.150493 4700 6 5 5 0 0:00:15.314369 4720 8 0 0 0 0:00:15.134940 4740 5 3 3 0 0:00:15.105361 4760 3 2 2 0 0:00:15.185040 4780 0 0 8 1 0:00:15.061346 4800 9 9 9 1 0:00:15.158175 4820 3 3 7 1 0:00:15.122016 4840 0 0 0 1 0:00:15.117323 4860 5 5 3 1 0:00:15.145562 4880 0 8 2 0 0:00:15.151156 4900 3 6 6 0 0:00:15.145103 4920 7 7 7 1 0:00:15.077224 4940 6 4 2 0 0:00:15.188172 4960 4 3 3 0 0:00:15.179898 4980 1 9 0 0 0:00:15.113958 5000 7 7 7 1 0:00:15.243343 5020 8 8 8 1 0:00:15.091596 5040 3 1 2 0 0:00:15.108754 5060 6 2 2 0 0:00:15.136831 5080 7 7 7 1 0:00:15.168833 5100 3 3 3 1 0:00:15.065542 5120 9 -1 8 0 0:00:15.235082 5140 8 8 8 1 0:00:15.170609 5160 0 0 8 1 0:00:15.146235 5180 9 7 4 0 0:00:15.178035 5200 3 3 3 1 0:00:15.147407 5220 0 0 8 1 0:00:15.132568 5240 1 9 4 0 0:00:15.140859 5260 0 0 0 1 0:00:15.157227 5280 0 8 8 0 0:00:15.150483 5300 9 9 5 1 0:00:15.266348 5320 7 7 7 1 0:00:15.121798 5340 3 4 4 0 0:00:15.296785 5360 9 9 3 1 0:00:15.307180 5380 1 9 9 0 0:00:15.201502 5400 9 9 8 1 0:00:15.044454 5420 6 6 3 1 0:00:15.113121 5440 9 9 9 1 0:00:15.198647 5460 0 0 0 1 0:00:15.150794 5480 1 1 4 1 0:00:15.247099 5500 8 8 8 1 0:00:15.063635 5520 4 3 3 0 0:00:15.090645 5540 5 6 4 0 0:00:15.167309 5560 3 5 5 0 0:00:15.113066 5580 5 3 3 0 0:00:15.297490 5600 6 9 4 0 0:00:15.132943 5620 3 6 6 0 0:00:15.078855 5640 2 2 2 1 0:00:15.090907 5660 9 8 8 0 0:00:15.144448 5680 2 3 3 0 0:00:15.123205 5700 3 5 5 0 0:00:15.108288 5720 9 9 9 1 0:00:15.135392 5740 6 6 5 1 0:00:15.172531 5760 2 2 8 1 0:00:15.142733 5780 7 7 7 1 0:00:15.183871 5800 2 4 4 0 0:00:15.051004 5820 5 3 2 0 0:00:15.035904 5840 2 2 5 1 0:00:15.113738 5860 0 8 8 0 0:00:15.044392 5880 0 0 8 1 0:00:15.109531 5900 4 0 0 0 0:00:15.088833 5920 8 8 8 1 0:00:15.089457 5940 7 4 4 0 0:00:15.173644 5960 2 6 6 0 0:00:14.990377 5980 9 9 2 1 0:00:15.072296 6000 8 9 4 0 0:00:15.092058 6020 6 6 2 1 0:00:15.112925 6040 2 7 7 0 0:00:15.077942 6060 3 4 2 0 0:00:15.018127 6080 1 9 9 0 0:00:15.051692 6100 1 1 8 1 0:00:15.057749 6120 5 5 3 1 0:00:15.074768 6140 9 9 1 1 0:00:15.196387 6160 2 5 5 0 0:00:15.050609 6180 0 0 2 1 0:00:15.157455 6200 3 3 2 1 0:00:15.112204 6220 2 5 5 0 0:00:15.085054 6240 2 1 4 0 0:00:15.141927 6260 2 6 6 0 0:00:15.226939 6280 8 5 5 0 0:00:15.027734 6300 1 1 2 1 0:00:15.174110 6320 0 0 8 1 0:00:15.161060 6340 3 3 3 1 0:00:15.034455 6360 2 4 2 0 0:00:15.113282 6380 3 7 4 0 0:00:15.117521 6400 0 8 8 0 0:00:15.111902 6420 7 7 7 1 0:00:15.118282 6440 3 0 4 0 0:00:15.159242 6460 3 1 3 0 0:00:15.186540 6480 0 0 2 1 0:00:15.128688 6500 7 8 8 0 0:00:15.155974 6520 6 2 2 0 0:00:15.089327 6540 8 8 8 1 0:00:15.357308 6560 6 6 2 1 0:00:15.084530 6580 1 7 4 0 0:00:15.205669 6600 7 7 7 1 0:00:15.101343 6620 7 7 3 1 0:00:15.068436 6640 5 3 3 0 0:00:15.155825 6660 0 0 8 1 0:00:15.052382 6680 3 5 2 0 0:00:15.077789 6700 6 0 0 0 0:00:15.093222 6720 2 2 4 1 0:00:15.088293 6740 2 2 6 1 0:00:15.031607 6760 5 5 3 1 0:00:15.154673 6780 7 7 7 1 0:00:15.052531 6800 6 4 2 0 0:00:15.122501 6820 1 1 1 1 0:00:15.137576 6840 9 9 1 1 0:00:15.131993 6860 0 2 2 0 0:00:15.105551 6880 2 7 7 0 0:00:15.101035 6900 3 7 7 0 0:00:15.060305 6920 5 2 2 0 0:00:15.131227 6940 1 1 1 1 0:00:15.040082 6960 9 8 8 0 0:00:15.237485 6980 0 8 8 0 0:00:15.165979 7000 2 8 8 0 0:00:15.047893 7020 7 7 7 1 0:00:15.107531 7040 7 8 8 0 0:00:15.127222 7060 8 8 8 1 0:00:15.115343 7080 5 4 4 0 0:00:15.061876 7100 9 5 3 0 0:00:15.031659 7120 7 5 5 0 0:00:15.169089 7140 4 3 3 0 0:00:15.146684 7160 5 5 5 1 0:00:15.100684 7180 6 6 3 1 0:00:15.109329 7200 4 2 2 0 0:00:15.108881 7220 9 9 8 1 0:00:15.145039 7240 0 8 8 0 0:00:15.182961 7260 1 1 8 1 0:00:15.091022 7280 1 9 5 0 0:00:15.099834 7300 3 4 4 0 0:00:15.126048 7320 8 0 0 0 0:00:15.123819 7340 2 4 4 0 0:00:15.132019 7360 2 4 4 0 0:00:15.064690 7380 7 7 2 1 0:00:15.143434 7400 3 5 3 0 0:00:15.088862 7420 3 4 2 0 0:00:15.056116 7440 3 3 2 1 0:00:15.129198 7460 5 5 5 1 0:00:15.105122 7480 1 8 8 0 0:00:15.081316 7500 6 6 6 1 0:00:15.176369 7520 4 4 2 1 0:00:15.087893 7540 5 5 5 1 0:00:15.193069 7560 0 0 4 1 0:00:15.076979 7580 8 8 8 1 0:00:15.077159 7600 8 9 2 0 0:00:15.111286 7620 5 3 4 0 0:00:15.091764 7640 9 8 8 0 0:00:15.148988 7660 7 4 4 0 0:00:15.133014 7680 3 5 5 0 0:00:15.094122 7700 6 6 4 1 0:00:15.104465 7720 5 2 2 0 0:00:15.074580 7740 4 2 2 0 0:00:15.056031 7760 2 9 6 0 0:00:15.118605 7780 3 8 8 0 0:00:15.108394 7800 0 0 8 1 0:00:15.178158 7820 5 8 8 0 0:00:15.292265 7840 1 1 1 1 0:00:15.196943 7860 8 8 2 1 0:00:15.091528 7880 0 4 2 0 0:00:15.036736 7900 0 8 8 0 0:00:15.053866 7920 9 9 2 1 0:00:15.098643 7940 2 4 4 0 0:00:15.044581 7960 0 0 8 1 0:00:15.014183 7980 3 3 2 1 0:00:15.047683 8000 9 9 8 1 0:00:15.059526 8020 6 3 3 0 0:00:15.000188 8040 2 2 2 1 0:00:15.075985 8060 6 6 5 1 0:00:15.130198 8080 4 6 2 0 0:00:15.130053 8100 6 2 2 0 0:00:15.110796 8120 8 9 9 0 0:00:15.112062 8140 5 4 4 0 0:00:15.163192 8160 6 6 6 1 0:00:15.034504 8180 4 4 2 1 0:00:15.174437 8200 3 4 4 0 0:00:15.074488 8220 6 7 3 0 0:00:15.004592 8240 7 7 4 1 0:00:15.069523 8260 1 1 1 1 0:00:15.121330 8280 4 5 5 0 0:00:15.144710 8300 5 2 2 0 0:00:15.085654 8320 7 4 4 0 0:00:15.049064 8340 5 3 3 0 0:00:15.140270 8360 7 7 7 1 0:00:15.177059 8380 9 9 8 1 0:00:15.145665 8400 0 8 8 0 0:00:15.040423 8420 3 6 6 0 0:00:15.066425 8440 5 3 2 0 0:00:15.139798 8460 8 8 8 1 0:00:15.086075 8480 2 2 4 1 0:00:15.197523 8500 4 4 2 1 0:00:15.075444 8520 8 1 8 0 0:00:15.106404 8540 4 2 2 0 0:00:15.235061 8560 7 7 4 1 0:00:15.175405 8580 3 2 2 0 0:00:15.104087 8600 3 2 4 0 0:00:15.117052 8620 3 4 4 0 0:00:15.111407 8640 1 1 8 1 0:00:15.085284 8660 7 7 7 1 0:00:15.122522 8680 3 8 8 0 0:00:15.133996 8700 3 7 7 0 0:00:15.125834 8720 2 0 0 0 0:00:15.084394 8740 2 2 5 1 0:00:15.160688 8760 8 8 8 1 0:00:15.084788 8780 4 4 2 1 0:00:15.095371 8800 0 0 2 1 0:00:15.131981 8820 2 2 2 1 0:00:15.122861 8840 2 7 7 0 0:00:15.058310 8860 2 2 6 1 0:00:15.052907 8880 1 1 4 1 0:00:15.117930 8900 2 4 4 0 0:00:15.011915 8920 6 6 2 1 0:00:15.063017 8940 6 3 3 0 0:00:15.071470 8960 0 0 2 1 0:00:15.096987 8980 9 9 9 1 0:00:15.070340 9000 8 8 8 1 0:00:15.220846 9020 7 7 7 1 0:00:15.188173 9040 2 5 3 0 0:00:15.089607 9060 9 9 3 1 0:00:15.083805 9080 3 2 2 0 0:00:15.131578 9100 9 5 3 0 0:00:15.055555 9120 3 0 0 0 0:00:15.131123 9140 7 6 2 0 0:00:15.100854 9160 5 3 3 0 0:00:15.090460 9180 5 5 5 1 0:00:15.110565 9200 8 8 8 1 0:00:15.094365 9220 8 8 8 1 0:00:15.038104 9240 8 8 8 1 0:00:15.043156 9260 5 5 5 1 0:00:15.111029 9280 9 8 8 0 0:00:15.101587 9300 5 2 2 0 0:00:15.095212 9320 9 9 8 1 0:00:15.127853 9340 1 1 1 1 0:00:15.167255 9360 5 0 0 0 0:00:15.053834 9380 5 3 3 0 0:00:15.111635 9400 6 6 2 1 0:00:15.053699 9420 5 5 5 1 0:00:15.039689 9440 8 8 8 1 0:00:15.037478 9460 3 7 7 0 0:00:15.173676 9480 2 4 4 0 0:00:15.117239 9500 9 9 5 1 0:00:15.155564 9520 1 1 1 1 0:00:15.096143 9540 5 3 3 0 0:00:15.003508 9560 9 7 5 0 0:00:15.074769 9580 1 1 4 1 0:00:15.208468 9600 8 0 8 0 0:00:15.133180 9620 4 4 4 1 0:00:15.074862 9640 5 2 2 0 0:00:15.101118 9660 6 6 6 1 0:00:15.095451 9680 8 8 8 1 0:00:15.149221 9700 0 0 6 1 0:00:15.047616 9720 8 1 4 0 0:00:15.103400 9740 3 6 2 0 0:00:15.101487 9760 9 3 3 0 0:00:15.276959 9780 4 2 2 0 0:00:15.097571 9800 1 1 1 1 0:00:14.978163 9820 0 8 8 0 0:00:15.161626 9840 4 5 3 0 0:00:15.168517 9860 0 5 5 0 0:00:15.079943 9880 7 8 3 0 0:00:15.061665 9900 8 8 8 1 0:00:15.094760 9920 6 6 4 1 0:00:15.128899 9940 4 5 3 0 0:00:15.190511 9960 2 0 0 0 0:00:15.071107 9980 0 0 0 1 0:00:15.028454
{ "pile_set_name": "Github" }
open Pcre let read_whole_channel ch = let size = 4096 in let strbuf = Bytes.create size in let buf = Buffer.create 65536 in let len = ref size in while !len <> 0 do len := input ch strbuf 0 size; Buffer.add_subbytes buf strbuf 0 !len done; Buffer.contents buf let () = let str = read_whole_channel stdin in let str = qreplace ~pat:"/\\*(.|\n)*?\\*/" str in let str = qreplace_first ~pat:"^(\n|\\s)+" str in let str = qreplace ~pat:"\n+((\n|\\s)\n)*" ~templ:"\n" str in print_string str
{ "pile_set_name": "Github" }
describe('fab.html', () => { beforeEach(() => { browser.get('/bindings/angular2/examples/fab.html'); }); it('should have ons-fab elements', () => { expect($('ons-fab').isPresent()).toBeTruthy(); }); });
{ "pile_set_name": "Github" }
[ { "request": { "body": "{\"ordType\":\"Limit\",\"orderQty\":\"1000000\",\"price\":\"10000.5\",\"side\":\"Buy\",\"symbol\":\"XBTH19\",\"timeInForce\":\"GoodTillCancel\"}", "headers": { "Content-Type": "application/json", "api-nonce": "***", "api-key": "***", "api-signature": "***" }, "method": "post", "options": [], "request_body": "", "url": "https://testnet.bitmex.com/api/v1/order" }, "response": { "binary": false, "body": "{\"error\":{\"message\":\"The system is currently overloaded. Please try again later.\",\"name\":\"HTTPError\"}}", "headers": { "Date": "Fri, 14 Dec 2018 05:56:24 GMT", "Content-Type": "application/json; charset=utf-8", "Content-Length": "115", "Connection": "keep-alive", "X-RateLimit-Limit": "300", "X-RateLimit-Remaining": "299", "X-RateLimit-Reset": "1544766984", "X-Powered-By": "Profit", "ETag": "W/\"73-Hv1mXKgyWgBBzbesQar8Uy2ViJ4\"", "Strict-Transport-Security": "max-age=31536000; includeSubDomains" }, "status_code": 503, "type": "ok" } } ]
{ "pile_set_name": "Github" }
git https://gitlab.com/DataLinkDroid/omer-count.git
{ "pile_set_name": "Github" }
package staticcheck import "honnef.co/go/tools/lint" var Docs = map[string]*lint.Documentation{ "SA1000": { Title: `Invalid regular expression`, Since: "2017.1", }, "SA1001": { Title: `Invalid template`, Since: "2017.1", }, "SA1002": { Title: `Invalid format in time.Parse`, Since: "2017.1", }, "SA1003": { Title: `Unsupported argument to functions in encoding/binary`, Text: `The encoding/binary package can only serialize types with known sizes. This precludes the use of the int and uint types, as their sizes differ on different architectures. Furthermore, it doesn't support serializing maps, channels, strings, or functions. Before Go 1.8, bool wasn't supported, either.`, Since: "2017.1", }, "SA1004": { Title: `Suspiciously small untyped constant in time.Sleep`, Text: `The time.Sleep function takes a time.Duration as its only argument. Durations are expressed in nanoseconds. Thus, calling time.Sleep(1) will sleep for 1 nanosecond. This is a common source of bugs, as sleep functions in other languages often accept seconds or milliseconds. The time package provides constants such as time.Second to express large durations. These can be combined with arithmetic to express arbitrary durations, for example '5 * time.Second' for 5 seconds. If you truly meant to sleep for a tiny amount of time, use 'n * time.Nanosecond' to signal to Staticcheck that you did mean to sleep for some amount of nanoseconds.`, Since: "2017.1", }, "SA1005": { Title: `Invalid first argument to exec.Command`, Text: `os/exec runs programs directly (using variants of the fork and exec system calls on Unix systems). This shouldn't be confused with running a command in a shell. The shell will allow for features such as input redirection, pipes, and general scripting. The shell is also responsible for splitting the user's input into a program name and its arguments. For example, the equivalent to ls / /tmp would be exec.Command("ls", "/", "/tmp") If you want to run a command in a shell, consider using something like the following – but be aware that not all systems, particularly Windows, will have a /bin/sh program: exec.Command("/bin/sh", "-c", "ls | grep Awesome")`, Since: "2017.1", }, "SA1006": { Title: `Printf with dynamic first argument and no further arguments`, Text: `Using fmt.Printf with a dynamic first argument can lead to unexpected output. The first argument is a format string, where certain character combinations have special meaning. If, for example, a user were to enter a string such as Interest rate: 5% and you printed it with fmt.Printf(s) it would lead to the following output: Interest rate: 5%!(NOVERB). Similarly, forming the first parameter via string concatenation with user input should be avoided for the same reason. When printing user input, either use a variant of fmt.Print, or use the %s Printf verb and pass the string as an argument.`, Since: "2017.1", }, "SA1007": { Title: `Invalid URL in net/url.Parse`, Since: "2017.1", }, "SA1008": { Title: `Non-canonical key in http.Header map`, Text: `Keys in http.Header maps are canonical, meaning they follow a specific combination of uppercase and lowercase letters. Methods such as http.Header.Add and http.Header.Del convert inputs into this canonical form before manipulating the map. When manipulating http.Header maps directly, as opposed to using the provided methods, care should be taken to stick to canonical form in order to avoid inconsistencies. The following piece of code demonstrates one such inconsistency: h := http.Header{} h["etag"] = []string{"1234"} h.Add("etag", "5678") fmt.Println(h) // Output: // map[Etag:[5678] etag:[1234]] The easiest way of obtaining the canonical form of a key is to use http.CanonicalHeaderKey.`, Since: "2017.1", }, "SA1010": { Title: `(*regexp.Regexp).FindAll called with n == 0, which will always return zero results`, Text: `If n >= 0, the function returns at most n matches/submatches. To return all results, specify a negative number.`, Since: "2017.1", }, "SA1011": { Title: `Various methods in the strings package expect valid UTF-8, but invalid input is provided`, Since: "2017.1", }, "SA1012": { Title: `A nil context.Context is being passed to a function, consider using context.TODO instead`, Since: "2017.1", }, "SA1013": { Title: `io.Seeker.Seek is being called with the whence constant as the first argument, but it should be the second`, Since: "2017.1", }, "SA1014": { Title: `Non-pointer value passed to Unmarshal or Decode`, Since: "2017.1", }, "SA1015": { Title: `Using time.Tick in a way that will leak. Consider using time.NewTicker, and only use time.Tick in tests, commands and endless functions`, Since: "2017.1", }, "SA1016": { Title: `Trapping a signal that cannot be trapped`, Text: `Not all signals can be intercepted by a process. Speficially, on UNIX-like systems, the syscall.SIGKILL and syscall.SIGSTOP signals are never passed to the process, but instead handled directly by the kernel. It is therefore pointless to try and handle these signals.`, Since: "2017.1", }, "SA1017": { Title: `Channels used with os/signal.Notify should be buffered`, Text: `The os/signal package uses non-blocking channel sends when delivering signals. If the receiving end of the channel isn't ready and the channel is either unbuffered or full, the signal will be dropped. To avoid missing signals, the channel should be buffered and of the appropriate size. For a channel used for notification of just one signal value, a buffer of size 1 is sufficient.`, Since: "2017.1", }, "SA1018": { Title: `strings.Replace called with n == 0, which does nothing`, Text: `With n == 0, zero instances will be replaced. To replace all instances, use a negative number, or use strings.ReplaceAll.`, Since: "2017.1", }, "SA1019": { Title: `Using a deprecated function, variable, constant or field`, Since: "2017.1", }, "SA1020": { Title: `Using an invalid host:port pair with a net.Listen-related function`, Since: "2017.1", }, "SA1021": { Title: `Using bytes.Equal to compare two net.IP`, Text: `A net.IP stores an IPv4 or IPv6 address as a slice of bytes. The length of the slice for an IPv4 address, however, can be either 4 or 16 bytes long, using different ways of representing IPv4 addresses. In order to correctly compare two net.IPs, the net.IP.Equal method should be used, as it takes both representations into account.`, Since: "2017.1", }, "SA1023": { Title: `Modifying the buffer in an io.Writer implementation`, Text: `Write must not modify the slice data, even temporarily.`, Since: "2017.1", }, "SA1024": { Title: `A string cutset contains duplicate characters`, Text: `The strings.TrimLeft and strings.TrimRight functions take cutsets, not prefixes. A cutset is treated as a set of characters to remove from a string. For example, strings.TrimLeft("42133word", "1234")) will result in the string "word" – any characters that are 1, 2, 3 or 4 are cut from the left of the string. In order to remove one string from another, use strings.TrimPrefix instead.`, Since: "2017.1", }, "SA1025": { Title: `It is not possible to use (*time.Timer).Reset's return value correctly`, Since: "2019.1", }, "SA1026": { Title: `Cannot marshal channels or functions`, Since: "2019.2", }, "SA1027": { Title: `Atomic access to 64-bit variable must be 64-bit aligned`, Text: `On ARM, x86-32, and 32-bit MIPS, it is the caller's responsibility to arrange for 64-bit alignment of 64-bit words accessed atomically. The first word in a variable or in an allocated struct, array, or slice can be relied upon to be 64-bit aligned. You can use the structlayout tool to inspect the alignment of fields in a struct.`, Since: "2019.2", }, "SA1028": { Title: `sort.Slice can only be used on slices`, Text: `The first argument of sort.Slice must be a slice.`, Since: "2020.1", }, "SA1029": { Title: `Inappropriate key in call to context.WithValue`, Text: `The provided key must be comparable and should not be of type string or any other built-in type to avoid collisions between packages using context. Users of WithValue should define their own types for keys. To avoid allocating when assigning to an interface{}, context keys often have concrete type struct{}. Alternatively, exported context key variables' static type should be a pointer or interface.`, Since: "2020.1", }, "SA2000": { Title: `sync.WaitGroup.Add called inside the goroutine, leading to a race condition`, Since: "2017.1", }, "SA2001": { Title: `Empty critical section, did you mean to defer the unlock?`, Text: `Empty critical sections of the kind mu.Lock() mu.Unlock() are very often a typo, and the following was intended instead: mu.Lock() defer mu.Unlock() Do note that sometimes empty critical sections can be useful, as a form of signaling to wait on another goroutine. Many times, there are simpler ways of achieving the same effect. When that isn't the case, the code should be amply commented to avoid confusion. Combining such comments with a //lint:ignore directive can be used to suppress this rare false positive.`, Since: "2017.1", }, "SA2002": { Title: `Called testing.T.FailNow or SkipNow in a goroutine, which isn't allowed`, Since: "2017.1", }, "SA2003": { Title: `Deferred Lock right after locking, likely meant to defer Unlock instead`, Since: "2017.1", }, "SA3000": { Title: `TestMain doesn't call os.Exit, hiding test failures`, Text: `Test executables (and in turn 'go test') exit with a non-zero status code if any tests failed. When specifying your own TestMain function, it is your responsibility to arrange for this, by calling os.Exit with the correct code. The correct code is returned by (*testing.M).Run, so the usual way of implementing TestMain is to end it with os.Exit(m.Run()).`, Since: "2017.1", }, "SA3001": { Title: `Assigning to b.N in benchmarks distorts the results`, Text: `The testing package dynamically sets b.N to improve the reliability of benchmarks and uses it in computations to determine the duration of a single operation. Benchmark code must not alter b.N as this would falsify results.`, Since: "2017.1", }, "SA4000": { Title: `Boolean expression has identical expressions on both sides`, Since: "2017.1", }, "SA4001": { Title: `&*x gets simplified to x, it does not copy x`, Since: "2017.1", }, "SA4002": { Title: `Comparing strings with known different sizes has predictable results`, Since: "2017.1", }, "SA4003": { Title: `Comparing unsigned values against negative values is pointless`, Since: "2017.1", }, "SA4004": { Title: `The loop exits unconditionally after one iteration`, Since: "2017.1", }, "SA4005": { Title: `Field assignment that will never be observed. Did you mean to use a pointer receiver?`, Since: "2017.1", }, "SA4006": { Title: `A value assigned to a variable is never read before being overwritten. Forgotten error check or dead code?`, Since: "2017.1", }, "SA4008": { Title: `The variable in the loop condition never changes, are you incrementing the wrong variable?`, Since: "2017.1", }, "SA4009": { Title: `A function argument is overwritten before its first use`, Since: "2017.1", }, "SA4010": { Title: `The result of append will never be observed anywhere`, Since: "2017.1", }, "SA4011": { Title: `Break statement with no effect. Did you mean to break out of an outer loop?`, Since: "2017.1", }, "SA4012": { Title: `Comparing a value against NaN even though no value is equal to NaN`, Since: "2017.1", }, "SA4013": { Title: `Negating a boolean twice (!!b) is the same as writing b. This is either redundant, or a typo.`, Since: "2017.1", }, "SA4014": { Title: `An if/else if chain has repeated conditions and no side-effects; if the condition didn't match the first time, it won't match the second time, either`, Since: "2017.1", }, "SA4015": { Title: `Calling functions like math.Ceil on floats converted from integers doesn't do anything useful`, Since: "2017.1", }, "SA4016": { Title: `Certain bitwise operations, such as x ^ 0, do not do anything useful`, Since: "2017.1", }, "SA4017": { Title: `A pure function's return value is discarded, making the call pointless`, Since: "2017.1", }, "SA4018": { Title: `Self-assignment of variables`, Since: "2017.1", }, "SA4019": { Title: `Multiple, identical build constraints in the same file`, Since: "2017.1", }, "SA4020": { Title: `Unreachable case clause in a type switch`, Text: `In a type switch like the following type T struct{} func (T) Read(b []byte) (int, error) { return 0, nil } var v interface{} = T{} switch v.(type) { case io.Reader: // ... case T: // unreachable } the second case clause can never be reached because T implements io.Reader and case clauses are evaluated in source order. Another example: type T struct{} func (T) Read(b []byte) (int, error) { return 0, nil } func (T) Close() error { return nil } var v interface{} = T{} switch v.(type) { case io.Reader: // ... case io.ReadCloser: // unreachable } Even though T has a Close method and thus implements io.ReadCloser, io.Reader will always match first. The method set of io.Reader is a subset of io.ReadCloser. Thus it is impossible to match the second case without matching the first case. Structurally equivalent interfaces A special case of the previous example are structurally identical interfaces. Given these declarations type T error type V error func doSomething() error { err, ok := doAnotherThing() if ok { return T(err) } return U(err) } the following type switch will have an unreachable case clause: switch doSomething().(type) { case T: // ... case V: // unreachable } T will always match before V because they are structurally equivalent and therefore doSomething()'s return value implements both.`, Since: "2019.2", }, "SA4021": { Title: `x = append(y) is equivalent to x = y`, Since: "2019.2", }, "SA4022": { Title: `Comparing the address of a variable against nil`, Text: `Code such as 'if &x == nil' is meaningless, because taking the address of a variable always yields a non-nil pointer.`, Since: "2020.1", }, "SA5000": { Title: `Assignment to nil map`, Since: "2017.1", }, "SA5001": { Title: `Defering Close before checking for a possible error`, Since: "2017.1", }, "SA5002": { Title: `The empty for loop (for {}) spins and can block the scheduler`, Since: "2017.1", }, "SA5003": { Title: `Defers in infinite loops will never execute`, Text: `Defers are scoped to the surrounding function, not the surrounding block. In a function that never returns, i.e. one containing an infinite loop, defers will never execute.`, Since: "2017.1", }, "SA5004": { Title: `for { select { ... with an empty default branch spins`, Since: "2017.1", }, "SA5005": { Title: `The finalizer references the finalized object, preventing garbage collection`, Text: `A finalizer is a function associated with an object that runs when the garbage collector is ready to collect said object, that is when the object is no longer referenced by anything. If the finalizer references the object, however, it will always remain as the final reference to that object, preventing the garbage collector from collecting the object. The finalizer will never run, and the object will never be collected, leading to a memory leak. That is why the finalizer should instead use its first argument to operate on the object. That way, the number of references can temporarily go to zero before the object is being passed to the finalizer.`, Since: "2017.1", }, "SA5006": { Title: `Slice index out of bounds`, Since: "2017.1", }, "SA5007": { Title: `Infinite recursive call`, Text: `A function that calls itself recursively needs to have an exit condition. Otherwise it will recurse forever, until the system runs out of memory. This issue can be caused by simple bugs such as forgetting to add an exit condition. It can also happen "on purpose". Some languages have tail call optimization which makes certain infinite recursive calls safe to use. Go, however, does not implement TCO, and as such a loop should be used instead.`, Since: "2017.1", }, "SA5008": { Title: `Invalid struct tag`, Since: "2019.2", }, "SA5009": { Title: `Invalid Printf call`, Since: "2019.2", }, "SA5010": { Title: `Impossible type assertion`, Text: `Some type assertions can be statically proven to be impossible. This is the case when the method sets of both arguments of the type assertion conflict with each other, for example by containing the same method with different signatures. The Go compiler already applies this check when asserting from an interface value to a concrete type. If the concrete type misses methods from the interface, or if function signatures don't match, then the type assertion can never succeed. This check applies the same logic when asserting from one interface to another. If both interface types contain the same method but with different signatures, then the type assertion can never succeed, either.`, Since: "2020.1", }, "SA5011": { Title: `Possible nil pointer dereference`, Text: `A pointer is being dereferenced unconditionally, while also being checked against nil in another place. This suggests that the pointer may be nil and dereferencing it may panic. This is commonly a result of improperly ordered code or missing return statements. Consider the following examples: func fn(x *int) { fmt.Println(*x) // This nil check is equally important for the previous dereference if x != nil { foo(*x) } } func TestFoo(t *testing.T) { x := compute() if x == nil { t.Errorf("nil pointer received") } // t.Errorf does not abort the test, so if x is nil, the next line will panic. foo(*x) } Staticcheck tries to deduce which functions abort control flow. For example, it is aware that a function will not continue execution after a call to panic or log.Fatal. However, sometimes this detection fails, in particular in the presence of conditionals. Consider the following example: func Log(msg string, level int) { fmt.Println(msg) if level == levelFatal { os.Exit(1) } } func Fatal(msg string) { Log(msg, levelFatal) } func fn(x *int) { if x == nil { Fatal("unexpected nil pointer") } fmt.Println(*x) } Staticcheck will flag the dereference of x, even though it is perfectly safe. Staticcheck is not able to deduce that a call to Fatal will exit the program. For the time being, the easiest workaround is to modify the definition of Fatal like so: func Fatal(msg string) { Log(msg, levelFatal) panic("unreachable") } We also hard-code functions from common logging packages such as logrus. Please file an issue if we're missing support for a popular package.`, Since: "2020.1", }, "SA6000": { Title: `Using regexp.Match or related in a loop, should use regexp.Compile`, Since: "2017.1", }, "SA6001": { Title: `Missing an optimization opportunity when indexing maps by byte slices`, Text: `Map keys must be comparable, which precludes the use of byte slices. This usually leads to using string keys and converting byte slices to strings. Normally, a conversion of a byte slice to a string needs to copy the data and causes allocations. The compiler, however, recognizes m[string(b)] and uses the data of b directly, without copying it, because it knows that the data can't change during the map lookup. This leads to the counter-intuitive situation that k := string(b) println(m[k]) println(m[k]) will be less efficient than println(m[string(b)]) println(m[string(b)]) because the first version needs to copy and allocate, while the second one does not. For some history on this optimization, check out commit f5f5a8b6209f84961687d993b93ea0d397f5d5bf in the Go repository.`, Since: "2017.1", }, "SA6002": { Title: `Storing non-pointer values in sync.Pool allocates memory`, Text: `A sync.Pool is used to avoid unnecessary allocations and reduce the amount of work the garbage collector has to do. When passing a value that is not a pointer to a function that accepts an interface, the value needs to be placed on the heap, which means an additional allocation. Slices are a common thing to put in sync.Pools, and they're structs with 3 fields (length, capacity, and a pointer to an array). In order to avoid the extra allocation, one should store a pointer to the slice instead. See the comments on https://go-review.googlesource.com/c/go/+/24371 that discuss this problem.`, Since: "2017.1", }, "SA6003": { Title: `Converting a string to a slice of runes before ranging over it`, Text: `You may want to loop over the runes in a string. Instead of converting the string to a slice of runes and looping over that, you can loop over the string itself. That is, for _, r := range s {} and for _, r := range []rune(s) {} will yield the same values. The first version, however, will be faster and avoid unnecessary memory allocations. Do note that if you are interested in the indices, ranging over a string and over a slice of runes will yield different indices. The first one yields byte offsets, while the second one yields indices in the slice of runes.`, Since: "2017.1", }, "SA6005": { Title: `Inefficient string comparison with strings.ToLower or strings.ToUpper`, Text: `Converting two strings to the same case and comparing them like so if strings.ToLower(s1) == strings.ToLower(s2) { ... } is significantly more expensive than comparing them with strings.EqualFold(s1, s2). This is due to memory usage as well as computational complexity. strings.ToLower will have to allocate memory for the new strings, as well as convert both strings fully, even if they differ on the very first byte. strings.EqualFold, on the other hand, compares the strings one character at a time. It doesn't need to create two intermediate strings and can return as soon as the first non-matching character has been found. For a more in-depth explanation of this issue, see https://blog.digitalocean.com/how-to-efficiently-compare-strings-in-go/`, Since: "2019.2", }, "SA9001": { Title: `Defers in range loops may not run when you expect them to`, Since: "2017.1", }, "SA9002": { Title: `Using a non-octal os.FileMode that looks like it was meant to be in octal.`, Since: "2017.1", }, "SA9003": { Title: `Empty body in an if or else branch`, Since: "2017.1", }, "SA9004": { Title: `Only the first constant has an explicit type`, Text: `In a constant declaration such as the following: const ( First byte = 1 Second = 2 ) the constant Second does not have the same type as the constant First. This construct shouldn't be confused with const ( First byte = iota Second ) where First and Second do indeed have the same type. The type is only passed on when no explicit value is assigned to the constant. When declaring enumerations with explicit values it is therefore important not to write const ( EnumFirst EnumType = 1 EnumSecond = 2 EnumThird = 3 ) This discrepancy in types can cause various confusing behaviors and bugs. Wrong type in variable declarations The most obvious issue with such incorrect enumerations expresses itself as a compile error: package pkg const ( EnumFirst uint8 = 1 EnumSecond = 2 ) func fn(useFirst bool) { x := EnumSecond if useFirst { x = EnumFirst } } fails to compile with ./const.go:11:5: cannot use EnumFirst (type uint8) as type int in assignment Losing method sets A more subtle issue occurs with types that have methods and optional interfaces. Consider the following: package main import "fmt" type Enum int func (e Enum) String() string { return "an enum" } const ( EnumFirst Enum = 1 EnumSecond = 2 ) func main() { fmt.Println(EnumFirst) fmt.Println(EnumSecond) } This code will output an enum 2 as EnumSecond has no explicit type, and thus defaults to int.`, Since: "2019.1", }, "SA9005": { Title: `Trying to marshal a struct with no public fields nor custom marshaling`, Text: `The encoding/json and encoding/xml packages only operate on exported fields in structs, not unexported ones. It is usually an error to try to (un)marshal structs that only consist of unexported fields. This check will not flag calls involving types that define custom marshaling behavior, e.g. via MarshalJSON methods. It will also not flag empty structs.`, Since: "2019.2", }, }
{ "pile_set_name": "Github" }
/* * Copyright (C) 2015 Apple Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef JITBitXorGenerator_h #define JITBitXorGenerator_h #if ENABLE(JIT) #include "JITBitBinaryOpGenerator.h" namespace JSC { class JITBitXorGenerator : public JITBitBinaryOpGenerator { public: JITBitXorGenerator(const SnippetOperand& leftOperand, const SnippetOperand& rightOperand, JSValueRegs result, JSValueRegs left, JSValueRegs right, GPRReg unused = InvalidGPRReg) : JITBitBinaryOpGenerator(leftOperand, rightOperand, result, left, right, unused) { } void generateFastPath(CCallHelpers&); }; } // namespace JSC #endif // ENABLE(JIT) #endif // JITBitOrGenerator_h
{ "pile_set_name": "Github" }
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. { 'includes': [ '../../coreconf/config.gypi', '../../cmd/platlibs.gypi' ], 'targets': [ { 'target_name': 'fipstest', 'type': 'executable', 'sources': [ 'fipstest.c' ], 'dependencies': [ '<(DEPTH)/exports.gyp:nss_exports', '<(DEPTH)/lib/sqlite/sqlite.gyp:sqlite3' ] } ], 'target_defaults': { 'defines': [ 'NSS_USE_STATIC_LIBS' ] }, 'variables': { 'module': 'nss', 'use_static_libs': 1 } }
{ "pile_set_name": "Github" }
centralService_first: service_code: "centralServiceCode" target_service_id: <%= ActiveRecord::Fixtures.identify(:identifier_firstCentralService) %>
{ "pile_set_name": "Github" }
#N canvas 100 100 1856 460 10; #X text 20 8 Created with makeorgan.pl; #X obj 928 40 inlet; #X obj 928 80 unpack f f f; #X obj 928 120 mod 88; #X obj 928 160 pack f f f; #X obj 928 360 *~ 0.0113636363636364; #X obj 928 200 route 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87; #X obj 928 420 outlet~; #X obj 48 260 voice~; #X obj 68 261 voice~; #X obj 88 262 voice~; #X obj 108 263 voice~; #X obj 128 264 voice~; #X obj 148 265 voice~; #X obj 168 266 voice~; #X obj 188 267 voice~; #X obj 208 268 voice~; #X obj 228 269 voice~; #X obj 248 270 voice~; #X obj 268 271 voice~; #X obj 288 272 voice~; #X obj 308 273 voice~; #X obj 328 274 voice~; #X obj 348 275 voice~; #X obj 368 276 voice~; #X obj 388 277 voice~; #X obj 408 277 voice~; #X obj 428 278 voice~; #X obj 448 279 voice~; #X obj 468 280 voice~; #X obj 488 281 voice~; #X obj 508 281 voice~; #X obj 528 282 voice~; #X obj 548 283 voice~; #X obj 568 284 voice~; #X obj 588 284 voice~; #X obj 608 285 voice~; #X obj 628 285 voice~; #X obj 648 286 voice~; #X obj 668 286 voice~; #X obj 688 287 voice~; #X obj 708 287 voice~; #X obj 728 288 voice~; #X obj 748 288 voice~; #X obj 768 288 voice~; #X obj 788 289 voice~; #X obj 808 289 voice~; #X obj 828 289 voice~; #X obj 848 289 voice~; #X obj 868 289 voice~; #X obj 888 289 voice~; #X obj 908 289 voice~; #X obj 928 289 voice~; #X obj 948 289 voice~; #X obj 968 289 voice~; #X obj 988 289 voice~; #X obj 1008 289 voice~; #X obj 1028 289 voice~; #X obj 1048 289 voice~; #X obj 1068 289 voice~; #X obj 1088 288 voice~; #X obj 1108 288 voice~; #X obj 1128 288 voice~; #X obj 1148 287 voice~; #X obj 1168 287 voice~; #X obj 1188 286 voice~; #X obj 1208 286 voice~; #X obj 1228 285 voice~; #X obj 1248 285 voice~; #X obj 1268 284 voice~; #X obj 1288 284 voice~; #X obj 1308 283 voice~; #X obj 1328 282 voice~; #X obj 1348 281 voice~; #X obj 1368 281 voice~; #X obj 1388 280 voice~; #X obj 1408 279 voice~; #X obj 1428 278 voice~; #X obj 1448 277 voice~; #X obj 1468 277 voice~; #X obj 1488 276 voice~; #X obj 1508 275 voice~; #X obj 1528 274 voice~; #X obj 1548 273 voice~; #X obj 1568 272 voice~; #X obj 1588 271 voice~; #X obj 1608 270 voice~; #X obj 1628 269 voice~; #X obj 1648 268 voice~; #X obj 1668 267 voice~; #X obj 1688 266 voice~; #X obj 1708 265 voice~; #X obj 1728 264 voice~; #X obj 1748 263 voice~; #X obj 1768 262 voice~; #X obj 1788 261 voice~; #X connect 1 0 2 0; #X connect 2 0 3 0; #X connect 2 1 4 1; #X connect 2 2 4 2; #X connect 3 0 4 0; #X connect 4 0 6 0; #X connect 5 0 7 0; #X connect 6 0 8 0; #X connect 6 1 9 0; #X connect 6 2 10 0; #X connect 6 3 11 0; #X connect 6 4 12 0; #X connect 6 5 13 0; #X connect 6 6 14 0; #X connect 6 7 15 0; #X connect 6 8 16 0; #X connect 6 9 17 0; #X connect 6 10 18 0; #X connect 6 11 19 0; #X connect 6 12 20 0; #X connect 6 13 21 0; #X connect 6 14 22 0; #X connect 6 15 23 0; #X connect 6 16 24 0; #X connect 6 17 25 0; #X connect 6 18 26 0; #X connect 6 19 27 0; #X connect 6 20 28 0; #X connect 6 21 29 0; #X connect 6 22 30 0; #X connect 6 23 31 0; #X connect 6 24 32 0; #X connect 6 25 33 0; #X connect 6 26 34 0; #X connect 6 27 35 0; #X connect 6 28 36 0; #X connect 6 29 37 0; #X connect 6 30 38 0; #X connect 6 31 39 0; #X connect 6 32 40 0; #X connect 6 33 41 0; #X connect 6 34 42 0; #X connect 6 35 43 0; #X connect 6 36 44 0; #X connect 6 37 45 0; #X connect 6 38 46 0; #X connect 6 39 47 0; #X connect 6 40 48 0; #X connect 6 41 49 0; #X connect 6 42 50 0; #X connect 6 43 51 0; #X connect 6 44 52 0; #X connect 6 45 53 0; #X connect 6 46 54 0; #X connect 6 47 55 0; #X connect 6 48 56 0; #X connect 6 49 57 0; #X connect 6 50 58 0; #X connect 6 51 59 0; #X connect 6 52 60 0; #X connect 6 53 61 0; #X connect 6 54 62 0; #X connect 6 55 63 0; #X connect 6 56 64 0; #X connect 6 57 65 0; #X connect 6 58 66 0; #X connect 6 59 67 0; #X connect 6 60 68 0; #X connect 6 61 69 0; #X connect 6 62 70 0; #X connect 6 63 71 0; #X connect 6 64 72 0; #X connect 6 65 73 0; #X connect 6 66 74 0; #X connect 6 67 75 0; #X connect 6 68 76 0; #X connect 6 69 77 0; #X connect 6 70 78 0; #X connect 6 71 79 0; #X connect 6 72 80 0; #X connect 6 73 81 0; #X connect 6 74 82 0; #X connect 6 75 83 0; #X connect 6 76 84 0; #X connect 6 77 85 0; #X connect 6 78 86 0; #X connect 6 79 87 0; #X connect 6 80 88 0; #X connect 6 81 89 0; #X connect 6 82 90 0; #X connect 6 83 91 0; #X connect 6 84 92 0; #X connect 6 85 93 0; #X connect 6 86 94 0; #X connect 6 87 95 0; #X connect 8 0 5 0; #X connect 9 0 5 0; #X connect 10 0 5 0; #X connect 11 0 5 0; #X connect 12 0 5 0; #X connect 13 0 5 0; #X connect 14 0 5 0; #X connect 15 0 5 0; #X connect 16 0 5 0; #X connect 17 0 5 0; #X connect 18 0 5 0; #X connect 19 0 5 0; #X connect 20 0 5 0; #X connect 21 0 5 0; #X connect 22 0 5 0; #X connect 23 0 5 0; #X connect 24 0 5 0; #X connect 25 0 5 0; #X connect 26 0 5 0; #X connect 27 0 5 0; #X connect 28 0 5 0; #X connect 29 0 5 0; #X connect 30 0 5 0; #X connect 31 0 5 0; #X connect 32 0 5 0; #X connect 33 0 5 0; #X connect 34 0 5 0; #X connect 35 0 5 0; #X connect 36 0 5 0; #X connect 37 0 5 0; #X connect 38 0 5 0; #X connect 39 0 5 0; #X connect 40 0 5 0; #X connect 41 0 5 0; #X connect 42 0 5 0; #X connect 43 0 5 0; #X connect 44 0 5 0; #X connect 45 0 5 0; #X connect 46 0 5 0; #X connect 47 0 5 0; #X connect 48 0 5 0; #X connect 49 0 5 0; #X connect 50 0 5 0; #X connect 51 0 5 0; #X connect 52 0 5 0; #X connect 53 0 5 0; #X connect 54 0 5 0; #X connect 55 0 5 0; #X connect 56 0 5 0; #X connect 57 0 5 0; #X connect 58 0 5 0; #X connect 59 0 5 0; #X connect 60 0 5 0; #X connect 61 0 5 0; #X connect 62 0 5 0; #X connect 63 0 5 0; #X connect 64 0 5 0; #X connect 65 0 5 0; #X connect 66 0 5 0; #X connect 67 0 5 0; #X connect 68 0 5 0; #X connect 69 0 5 0; #X connect 70 0 5 0; #X connect 71 0 5 0; #X connect 72 0 5 0; #X connect 73 0 5 0; #X connect 74 0 5 0; #X connect 75 0 5 0; #X connect 76 0 5 0; #X connect 77 0 5 0; #X connect 78 0 5 0; #X connect 79 0 5 0; #X connect 80 0 5 0; #X connect 81 0 5 0; #X connect 82 0 5 0; #X connect 83 0 5 0; #X connect 84 0 5 0; #X connect 85 0 5 0; #X connect 86 0 5 0; #X connect 87 0 5 0; #X connect 88 0 5 0; #X connect 89 0 5 0; #X connect 90 0 5 0; #X connect 91 0 5 0; #X connect 92 0 5 0; #X connect 93 0 5 0; #X connect 94 0 5 0; #X connect 95 0 5 0;
{ "pile_set_name": "Github" }
' Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. ' SPDX-License-Identifier: CC-BY-ND-2.0 (For details, see https://github.com/awslabs/aws-icons-for-plantuml/blob/master/LICENSE) sprite $Satellite [64x64/16z] { xTO5ekn644DHbPggfEn_kuyPsqxoSUum73FXPtKf7by3RHvj07psqqVJ6wzZkGxuYxzuttRQspdwIFcSTpK40qoVyXKnvvYBIEzx--7PFHP1n2ArwrspCO9z oPLaPVN8slJM0DZ7w0I6N4NSQkzvPs2Kxwg1I4yacwKjxuLMjkwoYyY9HMhFEz7bbpFJa0s1jkSLrEi1YbHfONh3Uw2iLpanRUrvHRVUv1xoG6ru9_AzJ-J4 j3Ry27tGux--yrvexqVl1IphU-y9TB_LIQ1Pttid1b__qre4isrvaMxq2rmZm2MpRtdCVoRaMseh1n4zRGjpopDMObcgDLeW-f8jUQ5slAi12CRjSnif-sc- jE7bsn63SsN9zlDQoJqlsMvHjb_laDu__bkCUiM3XyuVBc8-7pydA1_niZeuIpVL37JvwFdBsO2u2MZf4-S_kzReVQoopvq_VT_b-k5xV_57U-s7wc3vof_e l_DWEnEVHnkdmf_GDG } AWSEntityColoring(Satellite) !define Satellite(e_alias, e_label, e_techn) AWSEntity(e_alias, e_label, e_techn, #3B48CC, Satellite, Satellite) !define Satellite(e_alias, e_label, e_techn, e_descr) AWSEntity(e_alias, e_label, e_techn, e_descr, #3B48CC, Satellite, Satellite) !define SatelliteParticipant(p_alias, p_label, p_techn) AWSParticipant(p_alias, p_label, p_techn, #3B48CC, Satellite, Satellite) !define SatelliteParticipant(p_alias, p_label, p_techn, p_descr) AWSParticipant(p_alias, p_label, p_techn, p_descr, #3B48CC, Satellite, Satellite)
{ "pile_set_name": "Github" }
/******************************************************************************* * Copyright (c)2007 Actuate Corporation. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Actuate Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.birt.report.engine.api; import java.util.Collection; /* * */ public interface ICascadingParameterSelectionChoice extends IParameterSelectionChoice { Collection getChildSelectionList( ); }
{ "pile_set_name": "Github" }
/*********************************************************************** A JavaScript tokenizer / parser / beautifier / compressor. https://github.com/mishoo/UglifyJS2 -------------------------------- (C) --------------------------------- Author: Mihai Bazon <[email protected]> http://mihai.bazon.net/blog Distributed under the BSD license: Copyright 2012 (c) Mihai Bazon <[email protected]> Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ***********************************************************************/ "use strict"; function DEFNODE(type, props, methods, base) { if (arguments.length < 4) base = AST_Node; if (!props) props = []; else props = props.split(/\s+/); var self_props = props; if (base && base.PROPS) props = props.concat(base.PROPS); var code = "return function AST_" + type + "(props){ if (props) { "; for (var i = props.length; --i >= 0;) { code += "this." + props[i] + " = props." + props[i] + ";"; } var proto = base && new base; if (proto && proto.initialize || (methods && methods.initialize)) code += "this.initialize();"; code += "}}"; var ctor = new Function(code)(); if (proto) { ctor.prototype = proto; ctor.BASE = base; } if (base) base.SUBCLASSES.push(ctor); ctor.prototype.CTOR = ctor; ctor.PROPS = props || null; ctor.SELF_PROPS = self_props; ctor.SUBCLASSES = []; if (type) { ctor.prototype.TYPE = ctor.TYPE = type; } if (methods) for (i in methods) if (methods.hasOwnProperty(i)) { if (/^\$/.test(i)) { ctor[i.substr(1)] = methods[i]; } else { ctor.prototype[i] = methods[i]; } } ctor.DEFMETHOD = function(name, method) { this.prototype[name] = method; }; exports["AST_" + type] = ctor; return ctor; }; var AST_Token = DEFNODE("Token", "type value line col pos endline endcol endpos nlb comments_before file raw", { }, null); var AST_Node = DEFNODE("Node", "start end", { clone: function() { return new this.CTOR(this); }, $documentation: "Base class of all AST nodes", $propdoc: { start: "[AST_Token] The first token of this node", end: "[AST_Token] The last token of this node" }, _walk: function(visitor) { return visitor._visit(this); }, walk: function(visitor) { return this._walk(visitor); // not sure the indirection will be any help } }, null); AST_Node.warn_function = null; AST_Node.warn = function(txt, props) { if (AST_Node.warn_function) AST_Node.warn_function(string_template(txt, props)); }; /* -----[ statements ]----- */ var AST_Statement = DEFNODE("Statement", null, { $documentation: "Base class of all statements", }); var AST_Debugger = DEFNODE("Debugger", null, { $documentation: "Represents a debugger statement", }, AST_Statement); var AST_Directive = DEFNODE("Directive", "value scope quote", { $documentation: "Represents a directive, like \"use strict\";", $propdoc: { value: "[string] The value of this directive as a plain string (it's not an AST_String!)", scope: "[AST_Scope/S] The scope that this directive affects", quote: "[string] the original quote character" }, }, AST_Statement); var AST_SimpleStatement = DEFNODE("SimpleStatement", "body", { $documentation: "A statement consisting of an expression, i.e. a = 1 + 2", $propdoc: { body: "[AST_Node] an expression node (should not be instanceof AST_Statement)" }, _walk: function(visitor) { return visitor._visit(this, function(){ this.body._walk(visitor); }); } }, AST_Statement); function walk_body(node, visitor) { if (node.body instanceof AST_Statement) { node.body._walk(visitor); } else node.body.forEach(function(stat){ stat._walk(visitor); }); }; var AST_Block = DEFNODE("Block", "body", { $documentation: "A body of statements (usually bracketed)", $propdoc: { body: "[AST_Statement*] an array of statements" }, _walk: function(visitor) { return visitor._visit(this, function(){ walk_body(this, visitor); }); } }, AST_Statement); var AST_BlockStatement = DEFNODE("BlockStatement", null, { $documentation: "A block statement", }, AST_Block); var AST_EmptyStatement = DEFNODE("EmptyStatement", null, { $documentation: "The empty statement (empty block or simply a semicolon)", _walk: function(visitor) { return visitor._visit(this); } }, AST_Statement); var AST_StatementWithBody = DEFNODE("StatementWithBody", "body", { $documentation: "Base class for all statements that contain one nested body: `For`, `ForIn`, `Do`, `While`, `With`", $propdoc: { body: "[AST_Statement] the body; this should always be present, even if it's an AST_EmptyStatement" }, _walk: function(visitor) { return visitor._visit(this, function(){ this.body._walk(visitor); }); } }, AST_Statement); var AST_LabeledStatement = DEFNODE("LabeledStatement", "label", { $documentation: "Statement with a label", $propdoc: { label: "[AST_Label] a label definition" }, _walk: function(visitor) { return visitor._visit(this, function(){ this.label._walk(visitor); this.body._walk(visitor); }); } }, AST_StatementWithBody); var AST_IterationStatement = DEFNODE("IterationStatement", null, { $documentation: "Internal class. All loops inherit from it." }, AST_StatementWithBody); var AST_DWLoop = DEFNODE("DWLoop", "condition", { $documentation: "Base class for do/while statements", $propdoc: { condition: "[AST_Node] the loop condition. Should not be instanceof AST_Statement" } }, AST_IterationStatement); var AST_Do = DEFNODE("Do", null, { $documentation: "A `do` statement", _walk: function(visitor) { return visitor._visit(this, function(){ this.body._walk(visitor); this.condition._walk(visitor); }); } }, AST_DWLoop); var AST_While = DEFNODE("While", null, { $documentation: "A `while` statement", _walk: function(visitor) { return visitor._visit(this, function(){ this.condition._walk(visitor); this.body._walk(visitor); }); } }, AST_DWLoop); var AST_For = DEFNODE("For", "init condition step", { $documentation: "A `for` statement", $propdoc: { init: "[AST_Node?] the `for` initialization code, or null if empty", condition: "[AST_Node?] the `for` termination clause, or null if empty", step: "[AST_Node?] the `for` update clause, or null if empty" }, _walk: function(visitor) { return visitor._visit(this, function(){ if (this.init) this.init._walk(visitor); if (this.condition) this.condition._walk(visitor); if (this.step) this.step._walk(visitor); this.body._walk(visitor); }); } }, AST_IterationStatement); var AST_ForIn = DEFNODE("ForIn", "init name object", { $documentation: "A `for ... in` statement", $propdoc: { init: "[AST_Node] the `for/in` initialization code", name: "[AST_SymbolRef?] the loop variable, only if `init` is AST_Var", object: "[AST_Node] the object that we're looping through" }, _walk: function(visitor) { return visitor._visit(this, function(){ this.init._walk(visitor); this.object._walk(visitor); this.body._walk(visitor); }); } }, AST_IterationStatement); var AST_With = DEFNODE("With", "expression", { $documentation: "A `with` statement", $propdoc: { expression: "[AST_Node] the `with` expression" }, _walk: function(visitor) { return visitor._visit(this, function(){ this.expression._walk(visitor); this.body._walk(visitor); }); } }, AST_StatementWithBody); /* -----[ scope and functions ]----- */ var AST_Scope = DEFNODE("Scope", "directives variables functions uses_with uses_eval parent_scope enclosed cname", { $documentation: "Base class for all statements introducing a lexical scope", $propdoc: { directives: "[string*/S] an array of directives declared in this scope", variables: "[Object/S] a map of name -> SymbolDef for all variables/functions defined in this scope", functions: "[Object/S] like `variables`, but only lists function declarations", uses_with: "[boolean/S] tells whether this scope uses the `with` statement", uses_eval: "[boolean/S] tells whether this scope contains a direct call to the global `eval`", parent_scope: "[AST_Scope?/S] link to the parent scope", enclosed: "[SymbolDef*/S] a list of all symbol definitions that are accessed from this scope or any subscopes", cname: "[integer/S] current index for mangling variables (used internally by the mangler)", }, }, AST_Block); var AST_Toplevel = DEFNODE("Toplevel", "globals", { $documentation: "The toplevel scope", $propdoc: { globals: "[Object/S] a map of name -> SymbolDef for all undeclared names", }, wrap_enclose: function(arg_parameter_pairs) { var self = this; var args = []; var parameters = []; arg_parameter_pairs.forEach(function(pair) { var splitAt = pair.lastIndexOf(":"); args.push(pair.substr(0, splitAt)); parameters.push(pair.substr(splitAt + 1)); }); var wrapped_tl = "(function(" + parameters.join(",") + "){ '$ORIG'; })(" + args.join(",") + ")"; wrapped_tl = parse(wrapped_tl); wrapped_tl = wrapped_tl.transform(new TreeTransformer(function before(node){ if (node instanceof AST_Directive && node.value == "$ORIG") { return MAP.splice(self.body); } })); return wrapped_tl; }, wrap_commonjs: function(name, export_all) { var self = this; var to_export = []; if (export_all) { self.figure_out_scope(); self.walk(new TreeWalker(function(node){ if (node instanceof AST_SymbolDeclaration && node.definition().global) { if (!find_if(function(n){ return n.name == node.name }, to_export)) to_export.push(node); } })); } var wrapped_tl = "(function(exports, global){ '$ORIG'; '$EXPORTS'; global['" + name + "'] = exports; }({}, (function(){return this}())))"; wrapped_tl = parse(wrapped_tl); wrapped_tl = wrapped_tl.transform(new TreeTransformer(function before(node){ if (node instanceof AST_Directive) { switch (node.value) { case "$ORIG": return MAP.splice(self.body); case "$EXPORTS": var body = []; to_export.forEach(function(sym){ body.push(new AST_SimpleStatement({ body: new AST_Assign({ left: new AST_Sub({ expression: new AST_SymbolRef({ name: "exports" }), property: new AST_String({ value: sym.name }), }), operator: "=", right: new AST_SymbolRef(sym), }), })); }); return MAP.splice(body); } } })); return wrapped_tl; } }, AST_Scope); var AST_Lambda = DEFNODE("Lambda", "name argnames uses_arguments", { $documentation: "Base class for functions", $propdoc: { name: "[AST_SymbolDeclaration?] the name of this function", argnames: "[AST_SymbolFunarg*] array of function arguments", uses_arguments: "[boolean/S] tells whether this function accesses the arguments array" }, _walk: function(visitor) { return visitor._visit(this, function(){ if (this.name) this.name._walk(visitor); this.argnames.forEach(function(arg){ arg._walk(visitor); }); walk_body(this, visitor); }); } }, AST_Scope); var AST_Accessor = DEFNODE("Accessor", null, { $documentation: "A setter/getter function. The `name` property is always null." }, AST_Lambda); var AST_Function = DEFNODE("Function", null, { $documentation: "A function expression" }, AST_Lambda); var AST_Defun = DEFNODE("Defun", null, { $documentation: "A function definition" }, AST_Lambda); /* -----[ JUMPS ]----- */ var AST_Jump = DEFNODE("Jump", null, { $documentation: "Base class for “jumps” (for now that's `return`, `throw`, `break` and `continue`)" }, AST_Statement); var AST_Exit = DEFNODE("Exit", "value", { $documentation: "Base class for “exits” (`return` and `throw`)", $propdoc: { value: "[AST_Node?] the value returned or thrown by this statement; could be null for AST_Return" }, _walk: function(visitor) { return visitor._visit(this, this.value && function(){ this.value._walk(visitor); }); } }, AST_Jump); var AST_Return = DEFNODE("Return", null, { $documentation: "A `return` statement" }, AST_Exit); var AST_Throw = DEFNODE("Throw", null, { $documentation: "A `throw` statement" }, AST_Exit); var AST_LoopControl = DEFNODE("LoopControl", "label", { $documentation: "Base class for loop control statements (`break` and `continue`)", $propdoc: { label: "[AST_LabelRef?] the label, or null if none", }, _walk: function(visitor) { return visitor._visit(this, this.label && function(){ this.label._walk(visitor); }); } }, AST_Jump); var AST_Break = DEFNODE("Break", null, { $documentation: "A `break` statement" }, AST_LoopControl); var AST_Continue = DEFNODE("Continue", null, { $documentation: "A `continue` statement" }, AST_LoopControl); /* -----[ IF ]----- */ var AST_If = DEFNODE("If", "condition alternative", { $documentation: "A `if` statement", $propdoc: { condition: "[AST_Node] the `if` condition", alternative: "[AST_Statement?] the `else` part, or null if not present" }, _walk: function(visitor) { return visitor._visit(this, function(){ this.condition._walk(visitor); this.body._walk(visitor); if (this.alternative) this.alternative._walk(visitor); }); } }, AST_StatementWithBody); /* -----[ SWITCH ]----- */ var AST_Switch = DEFNODE("Switch", "expression", { $documentation: "A `switch` statement", $propdoc: { expression: "[AST_Node] the `switch` “discriminant”" }, _walk: function(visitor) { return visitor._visit(this, function(){ this.expression._walk(visitor); walk_body(this, visitor); }); } }, AST_Block); var AST_SwitchBranch = DEFNODE("SwitchBranch", null, { $documentation: "Base class for `switch` branches", }, AST_Block); var AST_Default = DEFNODE("Default", null, { $documentation: "A `default` switch branch", }, AST_SwitchBranch); var AST_Case = DEFNODE("Case", "expression", { $documentation: "A `case` switch branch", $propdoc: { expression: "[AST_Node] the `case` expression" }, _walk: function(visitor) { return visitor._visit(this, function(){ this.expression._walk(visitor); walk_body(this, visitor); }); } }, AST_SwitchBranch); /* -----[ EXCEPTIONS ]----- */ var AST_Try = DEFNODE("Try", "bcatch bfinally", { $documentation: "A `try` statement", $propdoc: { bcatch: "[AST_Catch?] the catch block, or null if not present", bfinally: "[AST_Finally?] the finally block, or null if not present" }, _walk: function(visitor) { return visitor._visit(this, function(){ walk_body(this, visitor); if (this.bcatch) this.bcatch._walk(visitor); if (this.bfinally) this.bfinally._walk(visitor); }); } }, AST_Block); var AST_Catch = DEFNODE("Catch", "argname", { $documentation: "A `catch` node; only makes sense as part of a `try` statement", $propdoc: { argname: "[AST_SymbolCatch] symbol for the exception" }, _walk: function(visitor) { return visitor._visit(this, function(){ this.argname._walk(visitor); walk_body(this, visitor); }); } }, AST_Block); var AST_Finally = DEFNODE("Finally", null, { $documentation: "A `finally` node; only makes sense as part of a `try` statement" }, AST_Block); /* -----[ VAR/CONST ]----- */ var AST_Definitions = DEFNODE("Definitions", "definitions", { $documentation: "Base class for `var` or `const` nodes (variable declarations/initializations)", $propdoc: { definitions: "[AST_VarDef*] array of variable definitions" }, _walk: function(visitor) { return visitor._visit(this, function(){ this.definitions.forEach(function(def){ def._walk(visitor); }); }); } }, AST_Statement); var AST_Var = DEFNODE("Var", null, { $documentation: "A `var` statement" }, AST_Definitions); var AST_Const = DEFNODE("Const", null, { $documentation: "A `const` statement" }, AST_Definitions); var AST_VarDef = DEFNODE("VarDef", "name value", { $documentation: "A variable declaration; only appears in a AST_Definitions node", $propdoc: { name: "[AST_SymbolVar|AST_SymbolConst] name of the variable", value: "[AST_Node?] initializer, or null of there's no initializer" }, _walk: function(visitor) { return visitor._visit(this, function(){ this.name._walk(visitor); if (this.value) this.value._walk(visitor); }); } }); /* -----[ OTHER ]----- */ var AST_Call = DEFNODE("Call", "expression args", { $documentation: "A function call expression", $propdoc: { expression: "[AST_Node] expression to invoke as function", args: "[AST_Node*] array of arguments" }, _walk: function(visitor) { return visitor._visit(this, function(){ this.expression._walk(visitor); this.args.forEach(function(arg){ arg._walk(visitor); }); }); } }); var AST_New = DEFNODE("New", null, { $documentation: "An object instantiation. Derives from a function call since it has exactly the same properties" }, AST_Call); var AST_Seq = DEFNODE("Seq", "car cdr", { $documentation: "A sequence expression (two comma-separated expressions)", $propdoc: { car: "[AST_Node] first element in sequence", cdr: "[AST_Node] second element in sequence" }, $cons: function(x, y) { var seq = new AST_Seq(x); seq.car = x; seq.cdr = y; return seq; }, $from_array: function(array) { if (array.length == 0) return null; if (array.length == 1) return array[0].clone(); var list = null; for (var i = array.length; --i >= 0;) { list = AST_Seq.cons(array[i], list); } var p = list; while (p) { if (p.cdr && !p.cdr.cdr) { p.cdr = p.cdr.car; break; } p = p.cdr; } return list; }, to_array: function() { var p = this, a = []; while (p) { a.push(p.car); if (p.cdr && !(p.cdr instanceof AST_Seq)) { a.push(p.cdr); break; } p = p.cdr; } return a; }, add: function(node) { var p = this; while (p) { if (!(p.cdr instanceof AST_Seq)) { var cell = AST_Seq.cons(p.cdr, node); return p.cdr = cell; } p = p.cdr; } }, _walk: function(visitor) { return visitor._visit(this, function(){ this.car._walk(visitor); if (this.cdr) this.cdr._walk(visitor); }); } }); var AST_PropAccess = DEFNODE("PropAccess", "expression property", { $documentation: "Base class for property access expressions, i.e. `a.foo` or `a[\"foo\"]`", $propdoc: { expression: "[AST_Node] the “container” expression", property: "[AST_Node|string] the property to access. For AST_Dot this is always a plain string, while for AST_Sub it's an arbitrary AST_Node" } }); var AST_Dot = DEFNODE("Dot", null, { $documentation: "A dotted property access expression", _walk: function(visitor) { return visitor._visit(this, function(){ this.expression._walk(visitor); }); } }, AST_PropAccess); var AST_Sub = DEFNODE("Sub", null, { $documentation: "Index-style property access, i.e. `a[\"foo\"]`", _walk: function(visitor) { return visitor._visit(this, function(){ this.expression._walk(visitor); this.property._walk(visitor); }); } }, AST_PropAccess); var AST_Unary = DEFNODE("Unary", "operator expression", { $documentation: "Base class for unary expressions", $propdoc: { operator: "[string] the operator", expression: "[AST_Node] expression that this unary operator applies to" }, _walk: function(visitor) { return visitor._visit(this, function(){ this.expression._walk(visitor); }); } }); var AST_UnaryPrefix = DEFNODE("UnaryPrefix", null, { $documentation: "Unary prefix expression, i.e. `typeof i` or `++i`" }, AST_Unary); var AST_UnaryPostfix = DEFNODE("UnaryPostfix", null, { $documentation: "Unary postfix expression, i.e. `i++`" }, AST_Unary); var AST_Binary = DEFNODE("Binary", "left operator right", { $documentation: "Binary expression, i.e. `a + b`", $propdoc: { left: "[AST_Node] left-hand side expression", operator: "[string] the operator", right: "[AST_Node] right-hand side expression" }, _walk: function(visitor) { return visitor._visit(this, function(){ this.left._walk(visitor); this.right._walk(visitor); }); } }); var AST_Conditional = DEFNODE("Conditional", "condition consequent alternative", { $documentation: "Conditional expression using the ternary operator, i.e. `a ? b : c`", $propdoc: { condition: "[AST_Node]", consequent: "[AST_Node]", alternative: "[AST_Node]" }, _walk: function(visitor) { return visitor._visit(this, function(){ this.condition._walk(visitor); this.consequent._walk(visitor); this.alternative._walk(visitor); }); } }); var AST_Assign = DEFNODE("Assign", null, { $documentation: "An assignment expression — `a = b + 5`", }, AST_Binary); /* -----[ LITERALS ]----- */ var AST_Array = DEFNODE("Array", "elements", { $documentation: "An array literal", $propdoc: { elements: "[AST_Node*] array of elements" }, _walk: function(visitor) { return visitor._visit(this, function(){ this.elements.forEach(function(el){ el._walk(visitor); }); }); } }); var AST_Object = DEFNODE("Object", "properties", { $documentation: "An object literal", $propdoc: { properties: "[AST_ObjectProperty*] array of properties" }, _walk: function(visitor) { return visitor._visit(this, function(){ this.properties.forEach(function(prop){ prop._walk(visitor); }); }); } }); var AST_ObjectProperty = DEFNODE("ObjectProperty", "key value", { $documentation: "Base class for literal object properties", $propdoc: { key: "[string] the property name converted to a string for ObjectKeyVal. For setters and getters this is an arbitrary AST_Node.", value: "[AST_Node] property value. For setters and getters this is an AST_Function." }, _walk: function(visitor) { return visitor._visit(this, function(){ this.value._walk(visitor); }); } }); var AST_ObjectKeyVal = DEFNODE("ObjectKeyVal", "quote", { $documentation: "A key: value object property", $propdoc: { quote: "[string] the original quote character" } }, AST_ObjectProperty); var AST_ObjectSetter = DEFNODE("ObjectSetter", null, { $documentation: "An object setter property", }, AST_ObjectProperty); var AST_ObjectGetter = DEFNODE("ObjectGetter", null, { $documentation: "An object getter property", }, AST_ObjectProperty); var AST_Symbol = DEFNODE("Symbol", "scope name thedef", { $propdoc: { name: "[string] name of this symbol", scope: "[AST_Scope/S] the current scope (not necessarily the definition scope)", thedef: "[SymbolDef/S] the definition of this symbol" }, $documentation: "Base class for all symbols", }); var AST_SymbolAccessor = DEFNODE("SymbolAccessor", null, { $documentation: "The name of a property accessor (setter/getter function)" }, AST_Symbol); var AST_SymbolDeclaration = DEFNODE("SymbolDeclaration", "init", { $documentation: "A declaration symbol (symbol in var/const, function name or argument, symbol in catch)", $propdoc: { init: "[AST_Node*/S] array of initializers for this declaration." } }, AST_Symbol); var AST_SymbolVar = DEFNODE("SymbolVar", null, { $documentation: "Symbol defining a variable", }, AST_SymbolDeclaration); var AST_SymbolConst = DEFNODE("SymbolConst", null, { $documentation: "A constant declaration" }, AST_SymbolDeclaration); var AST_SymbolFunarg = DEFNODE("SymbolFunarg", null, { $documentation: "Symbol naming a function argument", }, AST_SymbolVar); var AST_SymbolDefun = DEFNODE("SymbolDefun", null, { $documentation: "Symbol defining a function", }, AST_SymbolDeclaration); var AST_SymbolLambda = DEFNODE("SymbolLambda", null, { $documentation: "Symbol naming a function expression", }, AST_SymbolDeclaration); var AST_SymbolCatch = DEFNODE("SymbolCatch", null, { $documentation: "Symbol naming the exception in catch", }, AST_SymbolDeclaration); var AST_Label = DEFNODE("Label", "references", { $documentation: "Symbol naming a label (declaration)", $propdoc: { references: "[AST_LoopControl*] a list of nodes referring to this label" }, initialize: function() { this.references = []; this.thedef = this; } }, AST_Symbol); var AST_SymbolRef = DEFNODE("SymbolRef", null, { $documentation: "Reference to some symbol (not definition/declaration)", }, AST_Symbol); var AST_LabelRef = DEFNODE("LabelRef", null, { $documentation: "Reference to a label symbol", }, AST_Symbol); var AST_This = DEFNODE("This", null, { $documentation: "The `this` symbol", }, AST_Symbol); var AST_Constant = DEFNODE("Constant", null, { $documentation: "Base class for all constants", getValue: function() { return this.value; } }); var AST_String = DEFNODE("String", "value quote", { $documentation: "A string literal", $propdoc: { value: "[string] the contents of this string", quote: "[string] the original quote character" } }, AST_Constant); var AST_Number = DEFNODE("Number", "value literal", { $documentation: "A number literal", $propdoc: { value: "[number] the numeric value", literal: "[string] numeric value as string (optional)" } }, AST_Constant); var AST_RegExp = DEFNODE("RegExp", "value", { $documentation: "A regexp literal", $propdoc: { value: "[RegExp] the actual regexp" } }, AST_Constant); var AST_Atom = DEFNODE("Atom", null, { $documentation: "Base class for atoms", }, AST_Constant); var AST_Null = DEFNODE("Null", null, { $documentation: "The `null` atom", value: null }, AST_Atom); var AST_NaN = DEFNODE("NaN", null, { $documentation: "The impossible value", value: 0/0 }, AST_Atom); var AST_Undefined = DEFNODE("Undefined", null, { $documentation: "The `undefined` value", value: (function(){}()) }, AST_Atom); var AST_Hole = DEFNODE("Hole", null, { $documentation: "A hole in an array", value: (function(){}()) }, AST_Atom); var AST_Infinity = DEFNODE("Infinity", null, { $documentation: "The `Infinity` value", value: 1/0 }, AST_Atom); var AST_Boolean = DEFNODE("Boolean", null, { $documentation: "Base class for booleans", }, AST_Atom); var AST_False = DEFNODE("False", null, { $documentation: "The `false` atom", value: false }, AST_Boolean); var AST_True = DEFNODE("True", null, { $documentation: "The `true` atom", value: true }, AST_Boolean); /* -----[ TreeWalker ]----- */ function TreeWalker(callback) { this.visit = callback; this.stack = []; this.directives = Object.create(null); }; TreeWalker.prototype = { _visit: function(node, descend) { this.push(node); var ret = this.visit(node, descend ? function(){ descend.call(node); } : noop); if (!ret && descend) { descend.call(node); } this.pop(node); return ret; }, parent: function(n) { return this.stack[this.stack.length - 2 - (n || 0)]; }, push: function (node) { if (node instanceof AST_Lambda) { this.directives = Object.create(this.directives); } else if (node instanceof AST_Directive) { this.directives[node.value] = this.directives[node.value] ? "up" : true; } this.stack.push(node); }, pop: function(node) { this.stack.pop(); if (node instanceof AST_Lambda) { this.directives = Object.getPrototypeOf(this.directives); } }, self: function() { return this.stack[this.stack.length - 1]; }, find_parent: function(type) { var stack = this.stack; for (var i = stack.length; --i >= 0;) { var x = stack[i]; if (x instanceof type) return x; } }, has_directive: function(type) { var dir = this.directives[type]; if (dir) return dir; var node = this.stack[this.stack.length - 1]; if (node instanceof AST_Scope) { for (var i = 0; i < node.body.length; ++i) { var st = node.body[i]; if (!(st instanceof AST_Directive)) break; if (st.value == type) return true; } } }, in_boolean_context: function() { var stack = this.stack; var i = stack.length, self = stack[--i]; while (i > 0) { var p = stack[--i]; if ((p instanceof AST_If && p.condition === self) || (p instanceof AST_Conditional && p.condition === self) || (p instanceof AST_DWLoop && p.condition === self) || (p instanceof AST_For && p.condition === self) || (p instanceof AST_UnaryPrefix && p.operator == "!" && p.expression === self)) { return true; } if (!(p instanceof AST_Binary && (p.operator == "&&" || p.operator == "||"))) return false; self = p; } }, loopcontrol_target: function(label) { var stack = this.stack; if (label) for (var i = stack.length; --i >= 0;) { var x = stack[i]; if (x instanceof AST_LabeledStatement && x.label.name == label.name) { return x.body; } } else for (var i = stack.length; --i >= 0;) { var x = stack[i]; if (x instanceof AST_Switch || x instanceof AST_IterationStatement) return x; } } };
{ "pile_set_name": "Github" }
# The MIT License # # Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Reginaldo L. Russinholi, Cleiber Silva # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. Schedule=Agenda
{ "pile_set_name": "Github" }
package main // Serves human-readable status information over http. import ( "html/template" "net/http" "time" ) var ( templ = template.Must(template.New("status").Parse(templText)) upSince = time.Now() ) func HandleStatus(w http.ResponseWriter, r *http.Request) { RLock() defer RUnlock() if r.URL.Path != "/" { http.Error(w, "Does not compute", http.StatusNotFound) return } err := templ.Execute(w, &status{}) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) } } type status struct{} // dummy type to define template methods on func (*status) IPRange() string { return *flag_scan + ": " + *flag_ports } func (*status) Ports() string { return *flag_ports } func (*status) ThisAddr() string { return thisAddr } func (*status) Uptime() time.Duration { return Since(time.Now(), upSince) } func (*status) MumaxVersion() string { return MumaxVersion } func (*status) GPUs() []string { return GPUs } func (*status) Processes() map[string]*Process { return Processes } func (*status) Users() map[string]*User { return Users } func (*status) NextUser() string { return nextUser() } func (*status) Peers() map[string]*Peer { return peers } func (*status) FS(a string) string { return FS(a) } const templText = ` {{define "Job"}} <tr class={{.Status}}> <td class={{.Status}}> [<a class={{.Status}} href="http://{{.FS .ID}}">{{.LocalPath}}</a>] </td> <td class={{.Status}}> [{{with .Output}}<a href="http://{{$.FS $.Output}}">.out</a>{{end}}] </td> <td class={{.Status}}> [{{with .Output}}<a onclick='doEvent("rm", "{{$.ID}}")'>rm</a>{{end}}]</td> <td class={{.Status}}> [{{with .Host}}<a href="http://{{.}}">{{.}}</a>{{end}}] </td> <td class={{.Status}}> [{{with .ExitStatus}}{{if eq . "0"}} OK {{else}}<a class={{$.Status}} href="http://{{$.FS $.Output}}stdout.txt">FAIL</a>{{end}}{{end}}] </td> <td class={{.Status}}> [{{with .Output}}{{$.Duration}}{{end}}{{with .RequeCount}} {{.}}x re-queued{{end}}{{with .Error}} {{.}}{{end}}] </td> </tr> {{end}} <html> <head> <style> body{font-family:monospace; margin-left:5%; margin-top:1em} p{margin-left: 2em} h3{margin-left: 2em} a{text-decoration: none; color:#0000AA} a:hover{text-decoration: underline; cursor: hand;} .FAILED{color:red; font-weight:bold} .RUNNING{font-weight: bold; color:blue} .QUEUED{color:black} .FINISHED{color: grey} .active, .collapsible:hover {cursor:pointer; font-weight:normal; background-color:#eee; width:50%;} </style> </head> <script> function doEvent(method, arg){ try{ var req = new XMLHttpRequest(); var URL = "http://" + window.location.hostname + ":" + window.location.port + "/do/" + method + "/" + arg; req.open("GET", URL, false); req.send(null); }catch(e){ alert(e); } location.reload(); } function refreshPage () { document.location.reload(true); } setTimeout(refreshPage, 60000); </script> <body> <h1>{{.ThisAddr}}</h1> Uptime: {{.Uptime}} <br/> <h2>Peer nodes</h2> <b>scan</b> {{.IPRange}}<br/> <b>ports</b> {{.Ports}}<br/> <button onclick='doEvent("Rescan", "")'>Rescan</button> <br/> {{range $k,$v := .Peers}} <a href="http://{{$k}}">{{$k}}</a> <br/> {{end}} <h2>Compute service</h2><p> <b>mumax:</b> {{with .MumaxVersion}} {{.}} {{else}} not available<br/> {{end}} <br/> {{with .GPUs}} {{range $i, $v := .}} <b>GPU{{$i}}</b>: {{$v}}<br/> {{end}} {{else}} No GPUs available<br/> {{end}} </p> <h3>Running jobs</h3><p> <table> {{range $k,$v := .Processes}} <tr> <td> [<a href="http://{{$.FS $k}}">{{$k}}</a>] </td> <td> [{{$v.Duration}}]</td> <td> [<a href="http://{{$v.GUI}}">GUI</a>]</td> <td> <button onclick='doEvent("Kill", "{{$k}}")'>kill</button> </td> </tr> {{end}} </table> </p> <h2>Queue service</h2><p> <h3>Users</h3><p> <table> {{range $k,$v := .Users}} <tr> <td>{{$k}}</td><td>{{$v.FairShare}} GPU-seconds</td><td>{{with .HasJob}} has {{else}} no {{end}} queued jobs</td> </tr>{{end}} </table> <b>Next job for:</b> {{.NextUser}} </p> <h3>Jobs</h3> <button onclick='doEvent("LoadJobs", "")'>Reload all</button> (consider reloading just your own files). <br/> <button onclick='doEvent("WakeupWatchdog", "")'>Wake-up Watchdog</button> (re-queue dead simulations right now). {{range $k,$v := .Users}} <a id="{{$k}}"></a> <h3 title="Click to show/hide" class="collapsible" onclick='this.classList.toggle("active");var cont=this.nextElementSibling;if (cont.style.display==="none") {cont.style.display="block"; window.location.hash = "{{$k}}";} else cont.style.display = "none";'> &dtrif; {{$k}}</h3><p> <b>Jobs</b> <button onclick='doEvent("LoadUserJobs", "{{$k}}")'>Reload</button> (only needed when you changed your files on disk) <table> {{range $v.Jobs}} {{template "Job" .}} {{end}} </table> </p> {{end}} </p> <script> //let's collapse all job lists. var collapsibleElements = document.getElementsByClassName("collapsible"); var hash = self.location.hash; for (var i = 0; i < collapsibleElements.length; i++) { if(hash=="" || !collapsibleElements[i].textContent.includes(hash.split("#")[1]) ) { //If there's an anchor link. Let's open that user ! collapsibleElements[i].classList.toggle("active"); var cont = collapsibleElements[i].nextElementSibling; cont.style.display=(cont.style.display==="none"?"block":"none"); } } </script> </body> </html> `
{ "pile_set_name": "Github" }
/* * Copyright (c) 2008 open80211s Ltd. * Author: Luis Carlos Cobo <[email protected]> * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2 as * published by the Free Software Foundation. */ #include "mesh.h" #define TEST_FRAME_LEN 8192 #define MAX_METRIC 0xffffffff #define ARITH_SHIFT 8 /* Number of frames buffered per destination for unresolved destinations */ #define MESH_FRAME_QUEUE_LEN 10 #define MAX_PREQ_QUEUE_LEN 64 /* Destination only */ #define MP_F_DO 0x1 /* Reply and forward */ #define MP_F_RF 0x2 static inline u32 u32_field_get(u8 *preq_elem, int offset, bool ae) { if (ae) offset += 6; return get_unaligned_le32(preq_elem + offset); } /* HWMP IE processing macros */ #define AE_F (1<<6) #define AE_F_SET(x) (*x & AE_F) #define PREQ_IE_FLAGS(x) (*(x)) #define PREQ_IE_HOPCOUNT(x) (*(x + 1)) #define PREQ_IE_TTL(x) (*(x + 2)) #define PREQ_IE_PREQ_ID(x) u32_field_get(x, 3, 0) #define PREQ_IE_ORIG_ADDR(x) (x + 7) #define PREQ_IE_ORIG_DSN(x) u32_field_get(x, 13, 0); #define PREQ_IE_LIFETIME(x) u32_field_get(x, 17, AE_F_SET(x)); #define PREQ_IE_METRIC(x) u32_field_get(x, 21, AE_F_SET(x)); #define PREQ_IE_DST_F(x) (*(AE_F_SET(x) ? x + 32 : x + 26)) #define PREQ_IE_DST_ADDR(x) (AE_F_SET(x) ? x + 33 : x + 27) #define PREQ_IE_DST_DSN(x) u32_field_get(x, 33, AE_F_SET(x)); #define PREP_IE_FLAGS(x) PREQ_IE_FLAGS(x) #define PREP_IE_HOPCOUNT(x) PREQ_IE_HOPCOUNT(x) #define PREP_IE_TTL(x) PREQ_IE_TTL(x) #define PREP_IE_ORIG_ADDR(x) (x + 3) #define PREP_IE_ORIG_DSN(x) u32_field_get(x, 9, 0); #define PREP_IE_LIFETIME(x) u32_field_get(x, 13, AE_F_SET(x)); #define PREP_IE_METRIC(x) u32_field_get(x, 17, AE_F_SET(x)); #define PREP_IE_DST_ADDR(x) (AE_F_SET(x) ? x + 27 : x + 21) #define PREP_IE_DST_DSN(x) u32_field_get(x, 27, AE_F_SET(x)); #define PERR_IE_DST_ADDR(x) (x + 2) #define PERR_IE_DST_DSN(x) u32_field_get(x, 8, 0); #define MSEC_TO_TU(x) (x*1000/1024) #define DSN_GT(x, y) ((long) (y) - (long) (x) < 0) #define DSN_LT(x, y) ((long) (x) - (long) (y) < 0) #define net_traversal_jiffies(s) \ msecs_to_jiffies(s->u.mesh.mshcfg.dot11MeshHWMPnetDiameterTraversalTime) #define default_lifetime(s) \ MSEC_TO_TU(s->u.mesh.mshcfg.dot11MeshHWMPactivePathTimeout) #define min_preq_int_jiff(s) \ (msecs_to_jiffies(s->u.mesh.mshcfg.dot11MeshHWMPpreqMinInterval)) #define max_preq_retries(s) (s->u.mesh.mshcfg.dot11MeshHWMPmaxPREQretries) #define disc_timeout_jiff(s) \ msecs_to_jiffies(sdata->u.mesh.mshcfg.min_discovery_timeout) enum mpath_frame_type { MPATH_PREQ = 0, MPATH_PREP, MPATH_PERR }; static int mesh_path_sel_frame_tx(enum mpath_frame_type action, u8 flags, u8 *orig_addr, __le32 orig_dsn, u8 dst_flags, u8 *dst, __le32 dst_dsn, u8 *da, u8 hop_count, u8 ttl, __le32 lifetime, __le32 metric, __le32 preq_id, struct ieee80211_sub_if_data *sdata) { struct ieee80211_local *local = sdata->local; struct sk_buff *skb = dev_alloc_skb(local->hw.extra_tx_headroom + 400); struct ieee80211_mgmt *mgmt; u8 *pos; int ie_len; if (!skb) return -1; skb_reserve(skb, local->hw.extra_tx_headroom); /* 25 is the size of the common mgmt part (24) plus the size of the * common action part (1) */ mgmt = (struct ieee80211_mgmt *) skb_put(skb, 25 + sizeof(mgmt->u.action.u.mesh_action)); memset(mgmt, 0, 25 + sizeof(mgmt->u.action.u.mesh_action)); mgmt->frame_control = cpu_to_le16(IEEE80211_FTYPE_MGMT | IEEE80211_STYPE_ACTION); memcpy(mgmt->da, da, ETH_ALEN); memcpy(mgmt->sa, sdata->dev->dev_addr, ETH_ALEN); /* BSSID is left zeroed, wildcard value */ mgmt->u.action.category = MESH_PATH_SEL_CATEGORY; mgmt->u.action.u.mesh_action.action_code = action; switch (action) { case MPATH_PREQ: ie_len = 37; pos = skb_put(skb, 2 + ie_len); *pos++ = WLAN_EID_PREQ; break; case MPATH_PREP: ie_len = 31; pos = skb_put(skb, 2 + ie_len); *pos++ = WLAN_EID_PREP; break; default: kfree_skb(skb); return -ENOTSUPP; break; } *pos++ = ie_len; *pos++ = flags; *pos++ = hop_count; *pos++ = ttl; if (action == MPATH_PREQ) { memcpy(pos, &preq_id, 4); pos += 4; } memcpy(pos, orig_addr, ETH_ALEN); pos += ETH_ALEN; memcpy(pos, &orig_dsn, 4); pos += 4; memcpy(pos, &lifetime, 4); pos += 4; memcpy(pos, &metric, 4); pos += 4; if (action == MPATH_PREQ) { /* destination count */ *pos++ = 1; *pos++ = dst_flags; } memcpy(pos, dst, ETH_ALEN); pos += ETH_ALEN; memcpy(pos, &dst_dsn, 4); ieee80211_tx_skb(sdata, skb, 1); return 0; } /** * mesh_send_path error - Sends a PERR mesh management frame * * @dst: broken destination * @dst_dsn: dsn of the broken destination * @ra: node this frame is addressed to */ int mesh_path_error_tx(u8 *dst, __le32 dst_dsn, u8 *ra, struct ieee80211_sub_if_data *sdata) { struct ieee80211_local *local = sdata->local; struct sk_buff *skb = dev_alloc_skb(local->hw.extra_tx_headroom + 400); struct ieee80211_mgmt *mgmt; u8 *pos; int ie_len; if (!skb) return -1; skb_reserve(skb, local->hw.extra_tx_headroom); /* 25 is the size of the common mgmt part (24) plus the size of the * common action part (1) */ mgmt = (struct ieee80211_mgmt *) skb_put(skb, 25 + sizeof(mgmt->u.action.u.mesh_action)); memset(mgmt, 0, 25 + sizeof(mgmt->u.action.u.mesh_action)); mgmt->frame_control = cpu_to_le16(IEEE80211_FTYPE_MGMT | IEEE80211_STYPE_ACTION); memcpy(mgmt->da, ra, ETH_ALEN); memcpy(mgmt->sa, sdata->dev->dev_addr, ETH_ALEN); /* BSSID is left zeroed, wildcard value */ mgmt->u.action.category = MESH_PATH_SEL_CATEGORY; mgmt->u.action.u.mesh_action.action_code = MPATH_PERR; ie_len = 12; pos = skb_put(skb, 2 + ie_len); *pos++ = WLAN_EID_PERR; *pos++ = ie_len; /* mode flags, reserved */ *pos++ = 0; /* number of destinations */ *pos++ = 1; memcpy(pos, dst, ETH_ALEN); pos += ETH_ALEN; memcpy(pos, &dst_dsn, 4); ieee80211_tx_skb(sdata, skb, 1); return 0; } void ieee80211s_update_metric(struct ieee80211_local *local, struct sta_info *stainfo, struct sk_buff *skb) { struct ieee80211_tx_info *txinfo = IEEE80211_SKB_CB(skb); struct ieee80211_hdr *hdr = (struct ieee80211_hdr *) skb->data; int failed; if (!ieee80211_is_data(hdr->frame_control)) return; failed = !(txinfo->flags & IEEE80211_TX_STAT_ACK); /* moving average, scaled to 100 */ stainfo->fail_avg = ((80 * stainfo->fail_avg + 5) / 100 + 20 * failed); if (stainfo->fail_avg > 95) mesh_plink_broken(stainfo); } static u32 airtime_link_metric_get(struct ieee80211_local *local, struct sta_info *sta) { struct ieee80211_supported_band *sband; /* This should be adjusted for each device */ int device_constant = 1 << ARITH_SHIFT; int test_frame_len = TEST_FRAME_LEN << ARITH_SHIFT; int s_unit = 1 << ARITH_SHIFT; int rate, err; u32 tx_time, estimated_retx; u64 result; sband = local->hw.wiphy->bands[local->hw.conf.channel->band]; if (sta->fail_avg >= 100) return MAX_METRIC; if (sta->last_tx_rate.flags & IEEE80211_TX_RC_MCS) return MAX_METRIC; err = (sta->fail_avg << ARITH_SHIFT) / 100; /* bitrate is in units of 100 Kbps, while we need rate in units of * 1Mbps. This will be corrected on tx_time computation. */ rate = sband->bitrates[sta->last_tx_rate.idx].bitrate; tx_time = (device_constant + 10 * test_frame_len / rate); estimated_retx = ((1 << (2 * ARITH_SHIFT)) / (s_unit - err)); result = (tx_time * estimated_retx) >> (2 * ARITH_SHIFT) ; return (u32)result; } /** * hwmp_route_info_get - Update routing info to originator and transmitter * * @sdata: local mesh subif * @mgmt: mesh management frame * @hwmp_ie: hwmp information element (PREP or PREQ) * * This function updates the path routing information to the originator and the * transmitter of a HWMP PREQ or PREP frame. * * Returns: metric to frame originator or 0 if the frame should not be further * processed * * Notes: this function is the only place (besides user-provided info) where * path routing information is updated. */ static u32 hwmp_route_info_get(struct ieee80211_sub_if_data *sdata, struct ieee80211_mgmt *mgmt, u8 *hwmp_ie) { struct ieee80211_local *local = sdata->local; struct mesh_path *mpath; struct sta_info *sta; bool fresh_info; u8 *orig_addr, *ta; u32 orig_dsn, orig_metric; unsigned long orig_lifetime, exp_time; u32 last_hop_metric, new_metric; bool process = true; u8 action = mgmt->u.action.u.mesh_action.action_code; rcu_read_lock(); sta = sta_info_get(local, mgmt->sa); if (!sta) { rcu_read_unlock(); return 0; } last_hop_metric = airtime_link_metric_get(local, sta); /* Update and check originator routing info */ fresh_info = true; switch (action) { case MPATH_PREQ: orig_addr = PREQ_IE_ORIG_ADDR(hwmp_ie); orig_dsn = PREQ_IE_ORIG_DSN(hwmp_ie); orig_lifetime = PREQ_IE_LIFETIME(hwmp_ie); orig_metric = PREQ_IE_METRIC(hwmp_ie); break; case MPATH_PREP: /* Originator here refers to the MP that was the destination in * the Path Request. The draft refers to that MP as the * destination address, even though usually it is the origin of * the PREP frame. We divert from the nomenclature in the draft * so that we can easily use a single function to gather path * information from both PREQ and PREP frames. */ orig_addr = PREP_IE_ORIG_ADDR(hwmp_ie); orig_dsn = PREP_IE_ORIG_DSN(hwmp_ie); orig_lifetime = PREP_IE_LIFETIME(hwmp_ie); orig_metric = PREP_IE_METRIC(hwmp_ie); break; default: rcu_read_unlock(); return 0; } new_metric = orig_metric + last_hop_metric; if (new_metric < orig_metric) new_metric = MAX_METRIC; exp_time = TU_TO_EXP_TIME(orig_lifetime); if (memcmp(orig_addr, sdata->dev->dev_addr, ETH_ALEN) == 0) { /* This MP is the originator, we are not interested in this * frame, except for updating transmitter's path info. */ process = false; fresh_info = false; } else { mpath = mesh_path_lookup(orig_addr, sdata); if (mpath) { spin_lock_bh(&mpath->state_lock); if (mpath->flags & MESH_PATH_FIXED) fresh_info = false; else if ((mpath->flags & MESH_PATH_ACTIVE) && (mpath->flags & MESH_PATH_DSN_VALID)) { if (DSN_GT(mpath->dsn, orig_dsn) || (mpath->dsn == orig_dsn && action == MPATH_PREQ && new_metric > mpath->metric)) { process = false; fresh_info = false; } } } else { mesh_path_add(orig_addr, sdata); mpath = mesh_path_lookup(orig_addr, sdata); if (!mpath) { rcu_read_unlock(); return 0; } spin_lock_bh(&mpath->state_lock); } if (fresh_info) { mesh_path_assign_nexthop(mpath, sta); mpath->flags |= MESH_PATH_DSN_VALID; mpath->metric = new_metric; mpath->dsn = orig_dsn; mpath->exp_time = time_after(mpath->exp_time, exp_time) ? mpath->exp_time : exp_time; mesh_path_activate(mpath); spin_unlock_bh(&mpath->state_lock); mesh_path_tx_pending(mpath); /* draft says preq_id should be saved to, but there does * not seem to be any use for it, skipping by now */ } else spin_unlock_bh(&mpath->state_lock); } /* Update and check transmitter routing info */ ta = mgmt->sa; if (memcmp(orig_addr, ta, ETH_ALEN) == 0) fresh_info = false; else { fresh_info = true; mpath = mesh_path_lookup(ta, sdata); if (mpath) { spin_lock_bh(&mpath->state_lock); if ((mpath->flags & MESH_PATH_FIXED) || ((mpath->flags & MESH_PATH_ACTIVE) && (last_hop_metric > mpath->metric))) fresh_info = false; } else { mesh_path_add(ta, sdata); mpath = mesh_path_lookup(ta, sdata); if (!mpath) { rcu_read_unlock(); return 0; } spin_lock_bh(&mpath->state_lock); } if (fresh_info) { mesh_path_assign_nexthop(mpath, sta); mpath->flags &= ~MESH_PATH_DSN_VALID; mpath->metric = last_hop_metric; mpath->exp_time = time_after(mpath->exp_time, exp_time) ? mpath->exp_time : exp_time; mesh_path_activate(mpath); spin_unlock_bh(&mpath->state_lock); mesh_path_tx_pending(mpath); } else spin_unlock_bh(&mpath->state_lock); } rcu_read_unlock(); return process ? new_metric : 0; } static void hwmp_preq_frame_process(struct ieee80211_sub_if_data *sdata, struct ieee80211_mgmt *mgmt, u8 *preq_elem, u32 metric) { struct ieee80211_if_mesh *ifmsh = &sdata->u.mesh; struct mesh_path *mpath; u8 *dst_addr, *orig_addr; u8 dst_flags, ttl; u32 orig_dsn, dst_dsn, lifetime; bool reply = false; bool forward = true; /* Update destination DSN, if present */ dst_addr = PREQ_IE_DST_ADDR(preq_elem); orig_addr = PREQ_IE_ORIG_ADDR(preq_elem); dst_dsn = PREQ_IE_DST_DSN(preq_elem); orig_dsn = PREQ_IE_ORIG_DSN(preq_elem); dst_flags = PREQ_IE_DST_F(preq_elem); if (memcmp(dst_addr, sdata->dev->dev_addr, ETH_ALEN) == 0) { forward = false; reply = true; metric = 0; if (time_after(jiffies, ifmsh->last_dsn_update + net_traversal_jiffies(sdata)) || time_before(jiffies, ifmsh->last_dsn_update)) { dst_dsn = ++ifmsh->dsn; ifmsh->last_dsn_update = jiffies; } } else { rcu_read_lock(); mpath = mesh_path_lookup(dst_addr, sdata); if (mpath) { if ((!(mpath->flags & MESH_PATH_DSN_VALID)) || DSN_LT(mpath->dsn, dst_dsn)) { mpath->dsn = dst_dsn; mpath->flags |= MESH_PATH_DSN_VALID; } else if ((!(dst_flags & MP_F_DO)) && (mpath->flags & MESH_PATH_ACTIVE)) { reply = true; metric = mpath->metric; dst_dsn = mpath->dsn; if (dst_flags & MP_F_RF) dst_flags |= MP_F_DO; else forward = false; } } rcu_read_unlock(); } if (reply) { lifetime = PREQ_IE_LIFETIME(preq_elem); ttl = ifmsh->mshcfg.dot11MeshTTL; if (ttl != 0) mesh_path_sel_frame_tx(MPATH_PREP, 0, dst_addr, cpu_to_le32(dst_dsn), 0, orig_addr, cpu_to_le32(orig_dsn), mgmt->sa, 0, ttl, cpu_to_le32(lifetime), cpu_to_le32(metric), 0, sdata); else ifmsh->mshstats.dropped_frames_ttl++; } if (forward) { u32 preq_id; u8 hopcount, flags; ttl = PREQ_IE_TTL(preq_elem); lifetime = PREQ_IE_LIFETIME(preq_elem); if (ttl <= 1) { ifmsh->mshstats.dropped_frames_ttl++; return; } --ttl; flags = PREQ_IE_FLAGS(preq_elem); preq_id = PREQ_IE_PREQ_ID(preq_elem); hopcount = PREQ_IE_HOPCOUNT(preq_elem) + 1; mesh_path_sel_frame_tx(MPATH_PREQ, flags, orig_addr, cpu_to_le32(orig_dsn), dst_flags, dst_addr, cpu_to_le32(dst_dsn), sdata->dev->broadcast, hopcount, ttl, cpu_to_le32(lifetime), cpu_to_le32(metric), cpu_to_le32(preq_id), sdata); ifmsh->mshstats.fwded_mcast++; ifmsh->mshstats.fwded_frames++; } } static void hwmp_prep_frame_process(struct ieee80211_sub_if_data *sdata, struct ieee80211_mgmt *mgmt, u8 *prep_elem, u32 metric) { struct mesh_path *mpath; u8 *dst_addr, *orig_addr; u8 ttl, hopcount, flags; u8 next_hop[ETH_ALEN]; u32 dst_dsn, orig_dsn, lifetime; /* Note that we divert from the draft nomenclature and denominate * destination to what the draft refers to as origininator. So in this * function destnation refers to the final destination of the PREP, * which corresponds with the originator of the PREQ which this PREP * replies */ dst_addr = PREP_IE_DST_ADDR(prep_elem); if (memcmp(dst_addr, sdata->dev->dev_addr, ETH_ALEN) == 0) /* destination, no forwarding required */ return; ttl = PREP_IE_TTL(prep_elem); if (ttl <= 1) { sdata->u.mesh.mshstats.dropped_frames_ttl++; return; } rcu_read_lock(); mpath = mesh_path_lookup(dst_addr, sdata); if (mpath) spin_lock_bh(&mpath->state_lock); else goto fail; if (!(mpath->flags & MESH_PATH_ACTIVE)) { spin_unlock_bh(&mpath->state_lock); goto fail; } memcpy(next_hop, mpath->next_hop->sta.addr, ETH_ALEN); spin_unlock_bh(&mpath->state_lock); --ttl; flags = PREP_IE_FLAGS(prep_elem); lifetime = PREP_IE_LIFETIME(prep_elem); hopcount = PREP_IE_HOPCOUNT(prep_elem) + 1; orig_addr = PREP_IE_ORIG_ADDR(prep_elem); dst_dsn = PREP_IE_DST_DSN(prep_elem); orig_dsn = PREP_IE_ORIG_DSN(prep_elem); mesh_path_sel_frame_tx(MPATH_PREP, flags, orig_addr, cpu_to_le32(orig_dsn), 0, dst_addr, cpu_to_le32(dst_dsn), mpath->next_hop->sta.addr, hopcount, ttl, cpu_to_le32(lifetime), cpu_to_le32(metric), 0, sdata); rcu_read_unlock(); sdata->u.mesh.mshstats.fwded_unicast++; sdata->u.mesh.mshstats.fwded_frames++; return; fail: rcu_read_unlock(); sdata->u.mesh.mshstats.dropped_frames_no_route++; return; } static void hwmp_perr_frame_process(struct ieee80211_sub_if_data *sdata, struct ieee80211_mgmt *mgmt, u8 *perr_elem) { struct mesh_path *mpath; u8 *ta, *dst_addr; u32 dst_dsn; ta = mgmt->sa; dst_addr = PERR_IE_DST_ADDR(perr_elem); dst_dsn = PERR_IE_DST_DSN(perr_elem); rcu_read_lock(); mpath = mesh_path_lookup(dst_addr, sdata); if (mpath) { spin_lock_bh(&mpath->state_lock); if (mpath->flags & MESH_PATH_ACTIVE && memcmp(ta, mpath->next_hop->sta.addr, ETH_ALEN) == 0 && (!(mpath->flags & MESH_PATH_DSN_VALID) || DSN_GT(dst_dsn, mpath->dsn))) { mpath->flags &= ~MESH_PATH_ACTIVE; mpath->dsn = dst_dsn; spin_unlock_bh(&mpath->state_lock); mesh_path_error_tx(dst_addr, cpu_to_le32(dst_dsn), sdata->dev->broadcast, sdata); } else spin_unlock_bh(&mpath->state_lock); } rcu_read_unlock(); } void mesh_rx_path_sel_frame(struct ieee80211_sub_if_data *sdata, struct ieee80211_mgmt *mgmt, size_t len) { struct ieee802_11_elems elems; size_t baselen; u32 last_hop_metric; /* need action_code */ if (len < IEEE80211_MIN_ACTION_SIZE + 1) return; baselen = (u8 *) mgmt->u.action.u.mesh_action.variable - (u8 *) mgmt; ieee802_11_parse_elems(mgmt->u.action.u.mesh_action.variable, len - baselen, &elems); switch (mgmt->u.action.u.mesh_action.action_code) { case MPATH_PREQ: if (!elems.preq || elems.preq_len != 37) /* Right now we support just 1 destination and no AE */ return; last_hop_metric = hwmp_route_info_get(sdata, mgmt, elems.preq); if (!last_hop_metric) return; hwmp_preq_frame_process(sdata, mgmt, elems.preq, last_hop_metric); break; case MPATH_PREP: if (!elems.prep || elems.prep_len != 31) /* Right now we support no AE */ return; last_hop_metric = hwmp_route_info_get(sdata, mgmt, elems.prep); if (!last_hop_metric) return; hwmp_prep_frame_process(sdata, mgmt, elems.prep, last_hop_metric); break; case MPATH_PERR: if (!elems.perr || elems.perr_len != 12) /* Right now we support only one destination per PERR */ return; hwmp_perr_frame_process(sdata, mgmt, elems.perr); default: return; } } /** * mesh_queue_preq - queue a PREQ to a given destination * * @mpath: mesh path to discover * @flags: special attributes of the PREQ to be sent * * Locking: the function must be called from within a rcu read lock block. * */ static void mesh_queue_preq(struct mesh_path *mpath, u8 flags) { struct ieee80211_sub_if_data *sdata = mpath->sdata; struct ieee80211_if_mesh *ifmsh = &sdata->u.mesh; struct mesh_preq_queue *preq_node; preq_node = kmalloc(sizeof(struct mesh_preq_queue), GFP_ATOMIC); if (!preq_node) { printk(KERN_DEBUG "Mesh HWMP: could not allocate PREQ node\n"); return; } spin_lock(&ifmsh->mesh_preq_queue_lock); if (ifmsh->preq_queue_len == MAX_PREQ_QUEUE_LEN) { spin_unlock(&ifmsh->mesh_preq_queue_lock); kfree(preq_node); if (printk_ratelimit()) printk(KERN_DEBUG "Mesh HWMP: PREQ node queue full\n"); return; } memcpy(preq_node->dst, mpath->dst, ETH_ALEN); preq_node->flags = flags; list_add_tail(&preq_node->list, &ifmsh->preq_queue.list); ++ifmsh->preq_queue_len; spin_unlock(&ifmsh->mesh_preq_queue_lock); if (time_after(jiffies, ifmsh->last_preq + min_preq_int_jiff(sdata))) ieee80211_queue_work(&sdata->local->hw, &ifmsh->work); else if (time_before(jiffies, ifmsh->last_preq)) { /* avoid long wait if did not send preqs for a long time * and jiffies wrapped around */ ifmsh->last_preq = jiffies - min_preq_int_jiff(sdata) - 1; ieee80211_queue_work(&sdata->local->hw, &ifmsh->work); } else mod_timer(&ifmsh->mesh_path_timer, ifmsh->last_preq + min_preq_int_jiff(sdata)); } /** * mesh_path_start_discovery - launch a path discovery from the PREQ queue * * @sdata: local mesh subif */ void mesh_path_start_discovery(struct ieee80211_sub_if_data *sdata) { struct ieee80211_if_mesh *ifmsh = &sdata->u.mesh; struct mesh_preq_queue *preq_node; struct mesh_path *mpath; u8 ttl, dst_flags; u32 lifetime; spin_lock_bh(&ifmsh->mesh_preq_queue_lock); if (!ifmsh->preq_queue_len || time_before(jiffies, ifmsh->last_preq + min_preq_int_jiff(sdata))) { spin_unlock_bh(&ifmsh->mesh_preq_queue_lock); return; } preq_node = list_first_entry(&ifmsh->preq_queue.list, struct mesh_preq_queue, list); list_del(&preq_node->list); --ifmsh->preq_queue_len; spin_unlock_bh(&ifmsh->mesh_preq_queue_lock); rcu_read_lock(); mpath = mesh_path_lookup(preq_node->dst, sdata); if (!mpath) goto enddiscovery; spin_lock_bh(&mpath->state_lock); if (preq_node->flags & PREQ_Q_F_START) { if (mpath->flags & MESH_PATH_RESOLVING) { spin_unlock_bh(&mpath->state_lock); goto enddiscovery; } else { mpath->flags &= ~MESH_PATH_RESOLVED; mpath->flags |= MESH_PATH_RESOLVING; mpath->discovery_retries = 0; mpath->discovery_timeout = disc_timeout_jiff(sdata); } } else if (!(mpath->flags & MESH_PATH_RESOLVING) || mpath->flags & MESH_PATH_RESOLVED) { mpath->flags &= ~MESH_PATH_RESOLVING; spin_unlock_bh(&mpath->state_lock); goto enddiscovery; } ifmsh->last_preq = jiffies; if (time_after(jiffies, ifmsh->last_dsn_update + net_traversal_jiffies(sdata)) || time_before(jiffies, ifmsh->last_dsn_update)) { ++ifmsh->dsn; sdata->u.mesh.last_dsn_update = jiffies; } lifetime = default_lifetime(sdata); ttl = sdata->u.mesh.mshcfg.dot11MeshTTL; if (ttl == 0) { sdata->u.mesh.mshstats.dropped_frames_ttl++; spin_unlock_bh(&mpath->state_lock); goto enddiscovery; } if (preq_node->flags & PREQ_Q_F_REFRESH) dst_flags = MP_F_DO; else dst_flags = MP_F_RF; spin_unlock_bh(&mpath->state_lock); mesh_path_sel_frame_tx(MPATH_PREQ, 0, sdata->dev->dev_addr, cpu_to_le32(ifmsh->dsn), dst_flags, mpath->dst, cpu_to_le32(mpath->dsn), sdata->dev->broadcast, 0, ttl, cpu_to_le32(lifetime), 0, cpu_to_le32(ifmsh->preq_id++), sdata); mod_timer(&mpath->timer, jiffies + mpath->discovery_timeout); enddiscovery: rcu_read_unlock(); kfree(preq_node); } /** * mesh_nexthop_lookup - put the appropriate next hop on a mesh frame * * @skb: 802.11 frame to be sent * @sdata: network subif the frame will be sent through * * Returns: 0 if the next hop was found. Nonzero otherwise. If no next hop is * found, the function will start a path discovery and queue the frame so it is * sent when the path is resolved. This means the caller must not free the skb * in this case. */ int mesh_nexthop_lookup(struct sk_buff *skb, struct ieee80211_sub_if_data *sdata) { struct sk_buff *skb_to_free = NULL; struct mesh_path *mpath; struct ieee80211_hdr *hdr = (struct ieee80211_hdr *) skb->data; u8 *dst_addr = hdr->addr3; int err = 0; rcu_read_lock(); mpath = mesh_path_lookup(dst_addr, sdata); if (!mpath) { mesh_path_add(dst_addr, sdata); mpath = mesh_path_lookup(dst_addr, sdata); if (!mpath) { sdata->u.mesh.mshstats.dropped_frames_no_route++; err = -ENOSPC; goto endlookup; } } if (mpath->flags & MESH_PATH_ACTIVE) { if (time_after(jiffies, mpath->exp_time - msecs_to_jiffies(sdata->u.mesh.mshcfg.path_refresh_time)) && !memcmp(sdata->dev->dev_addr, hdr->addr4, ETH_ALEN) && !(mpath->flags & MESH_PATH_RESOLVING) && !(mpath->flags & MESH_PATH_FIXED)) { mesh_queue_preq(mpath, PREQ_Q_F_START | PREQ_Q_F_REFRESH); } memcpy(hdr->addr1, mpath->next_hop->sta.addr, ETH_ALEN); } else { struct ieee80211_tx_info *info = IEEE80211_SKB_CB(skb); if (!(mpath->flags & MESH_PATH_RESOLVING)) { /* Start discovery only if it is not running yet */ mesh_queue_preq(mpath, PREQ_Q_F_START); } if (skb_queue_len(&mpath->frame_queue) >= MESH_FRAME_QUEUE_LEN) skb_to_free = skb_dequeue(&mpath->frame_queue); info->flags |= IEEE80211_TX_INTFL_NEED_TXPROCESSING; skb_queue_tail(&mpath->frame_queue, skb); if (skb_to_free) mesh_path_discard_frame(skb_to_free, sdata); err = -ENOENT; } endlookup: rcu_read_unlock(); return err; } void mesh_path_timer(unsigned long data) { struct ieee80211_sub_if_data *sdata; struct mesh_path *mpath; rcu_read_lock(); mpath = (struct mesh_path *) data; mpath = rcu_dereference(mpath); if (!mpath) goto endmpathtimer; sdata = mpath->sdata; if (sdata->local->quiescing) { rcu_read_unlock(); return; } spin_lock_bh(&mpath->state_lock); if (mpath->flags & MESH_PATH_RESOLVED || (!(mpath->flags & MESH_PATH_RESOLVING))) mpath->flags &= ~(MESH_PATH_RESOLVING | MESH_PATH_RESOLVED); else if (mpath->discovery_retries < max_preq_retries(sdata)) { ++mpath->discovery_retries; mpath->discovery_timeout *= 2; mesh_queue_preq(mpath, 0); } else { mpath->flags = 0; mpath->exp_time = jiffies; mesh_path_flush_pending(mpath); } spin_unlock_bh(&mpath->state_lock); endmpathtimer: rcu_read_unlock(); }
{ "pile_set_name": "Github" }
// // Generated by class-dump 3.5 (64 bit) (Debug version compiled Oct 15 2018 10:31:50). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by Steve Nygard. // #import <Intents/INIntentResolutionResult.h> @interface INTaskResolutionResult : INIntentResolutionResult { } + (id)confirmationRequiredWithTaskToConfirm:(id)arg1; + (id)disambiguationWithTasksToDisambiguate:(id)arg1; + (id)successWithResolvedTask:(id)arg1; - (id)_vocabularyValueForObject:(id)arg1 slotDescription:(id)arg2; - (id)_intentSlotValueForObject:(id)arg1 slotDescription:(id)arg2; @end
{ "pile_set_name": "Github" }
CHANGELOG ========= ### v1.0 * Initial release.
{ "pile_set_name": "Github" }
//! -*- mode: c++; coding: utf-8; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4; show-trailing-whitespace: t -*- vim:fenc=utf-8:ft=cpp:et:sw=4:ts=4:sts=4 //! //! This file is part of the Feel++ library //! //! This library is free software; you can redistribute it and/or //! modify it under the terms of the GNU Lesser General Public //! License as published by the Free Software Foundation; either //! version 2.1 of the License, or (at your option) any later version. //! //! This library is distributed in the hope that it will be useful, //! but WITHOUT ANY WARRANTY; without even the implied warranty of //! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU //! Lesser General Public License for more details. //! //! You should have received a copy of the GNU Lesser General Public //! License along with this library; if not, write to the Free Software //! Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA //! //! @file //! @author Christophe Prud'homme <[email protected]> //! @date 25 Oct 2017 //! @copyright 2019 Feel++ Consortium #ifndef FEELPP_FILTERS_DETAILS_MESHCONTIGUOUSNUMBERINGMAPPING_HPP #define FEELPP_FILTERS_DETAILS_MESHCONTIGUOUSNUMBERINGMAPPING_HPP 1 namespace Feel { namespace detail { template <typename MeshType,typename StorageNodeValueType> struct MeshContiguousNumberingMapping { using mesh_type = MeshType; using mesh_ptrtype = std::shared_ptr<mesh_type>; using index_type = typename mesh_type::index_type; using storage_node_value_type = StorageNodeValueType; using range_element_type = elements_reference_wrapper_t<mesh_type>; using point_ref_type = boost::reference_wrapper< typename mesh_type::point_type const>; explicit MeshContiguousNumberingMapping( mesh_type* mesh, bool interprocessPointAreDuplicated = false ) : M_mesh( mesh ), M_interprocessPointAreDuplicated( interprocessPointAreDuplicated ) { this->updateForUse(); } void updateForUse() { mesh_type* mesh = M_mesh; rank_type currentPid = mesh->worldComm().localRank(); rank_type worldSize = mesh->worldComm().localSize(); if ( M_partIdToRangeElement.empty() ) { std::map<int,int> collectionOfMarkersFlag; auto const en_part = mesh->endParts(); for ( auto it_part = mesh->beginParts() ; it_part!=en_part;++it_part ) collectionOfMarkersFlag[it_part->first] = it_part->first; auto allRanges = collectionOfMarkedelements( mesh, collectionOfMarkersFlag ); for ( auto const& [part,rangeElt] : allRanges ) { std::string markerName = mesh->markerName( part ); if ( markerName.empty() || !mesh->hasElementMarker( markerName ) ) markerName = ""; M_partIdToRangeElement[part] = std::make_tuple(markerName, rangeElt ); } } // point id -> ( ( map of idsInOtherPart ), ( vector of ( marker, element id, id in elt) ) ) std::unordered_map<index_type, std::tuple< std::map<rank_type, index_type>, std::vector< std::tuple<int,index_type,uint16_type>> >> dataPointsInterProcess; for ( auto const& [part,nameAndRangeElt] : M_partIdToRangeElement ) { auto const& rangeElt = std::get<1>( nameAndRangeElt ); index_type nEltInRange = nelements(rangeElt); auto & elementIdToContiguous = M_elementIdToContiguous[part]; auto & pointIdsInElements = M_pointIdsInElements[part]; auto & pointIdToContiguous = M_pointIdToContiguous[part]; pointIdsInElements.resize( nEltInRange*mesh_type::element_type::numPoints, invalid_v<index_type> ); index_type countPtId = 0, countEltId = 0; for ( auto const& eltWrap : rangeElt ) { auto const& elt = unwrap_ref( eltWrap ); index_type eltId = elt.id(); auto [itElt,eltIsInserted] = elementIdToContiguous.try_emplace( eltId, countEltId++ ); DCHECK( eltIsInserted ) << "something wrong, element already inserted"; index_type newEltId = itElt->second; for ( uint16_type j = 0; j < mesh_type::element_type::numPoints; j++ ) { auto const& pt = elt.point( j ); index_type ptid = pt.id(); auto const& ptIdnOthersPartitions = pt.idInOthersPartitions(); if ( M_interprocessPointAreDuplicated || ptIdnOthersPartitions.empty() ) // not a interprocess point { auto [itPt,isInserted] = pointIdToContiguous.try_emplace( ptid,std::make_pair(countPtId,boost::cref(pt)) ); if ( isInserted ) ++countPtId; index_type newPtId = itPt->second.first; DCHECK( (newEltId*mesh_type::element_type::numPoints+j) < pointIdsInElements.size() ) << "invalid size : " << (newEltId*mesh_type::element_type::numPoints+j) << " vs " << pointIdsInElements.size(); pointIdsInElements[newEltId*mesh_type::element_type::numPoints+j] = newPtId; } else { auto infoIpElt = std::make_tuple( part, newEltId/*eltId*/, j ); auto itFindPtIP = dataPointsInterProcess.find( ptid ); if ( itFindPtIP == dataPointsInterProcess.end() ) { dataPointsInterProcess.emplace( ptid, std::make_tuple( ptIdnOthersPartitions, std::vector< std::tuple<int,index_type,uint16_type>>( { infoIpElt } ) ) ); } else std::get<1>( itFindPtIP->second ).push_back( infoIpElt ); } } } } // --------------------------------------------------------------------------------------- // // treatment of interprocess point std::map<int,std::map<rank_type,std::map<index_type,index_type>>> dataPointsNotInProcess; if ( !M_interprocessPointAreDuplicated && worldSize > 1 ) { std::map<rank_type, std::vector<std::pair<int,index_type>>> dataToSend; std::map<rank_type, std::vector<std::pair<int,index_type>>> dataToRecv; for ( auto const& [ ptId, dataIP ] : dataPointsInterProcess ) { auto const& dataIdInOtherPartition = std::get<0>( dataIP ); auto const& dataEltInfo = std::get<1>( dataIP ); for ( auto const& [ procId, ptIdOtherPart ] : dataIdInOtherPartition ) { for ( auto const& [marker,eltId,ptIdInElt ] : dataEltInfo ) dataToSend[procId].push_back( std::make_pair(marker, ptIdOtherPart) ); } } int neighborSubdomains = mesh->neighborSubdomains().size(); int nbRequest = 2 * neighborSubdomains; mpi::request* reqs = new mpi::request[nbRequest]; int cptRequest = 0; std::map<rank_type,size_type> sizeRecv; // get size of data to transfer for ( rank_type neighborRank : mesh->neighborSubdomains() ) { reqs[cptRequest++] = mesh->worldComm().localComm().isend( neighborRank , 0, (size_type)dataToSend[neighborRank].size() ); reqs[cptRequest++] = mesh->worldComm().localComm().irecv( neighborRank , 0, sizeRecv[neighborRank] ); } // wait all requests mpi::wait_all(reqs, reqs + cptRequest); cptRequest = 0; for ( rank_type neighborRank : mesh->neighborSubdomains() ) { int nSendData = dataToSend[neighborRank].size(); if ( nSendData > 0 ) reqs[cptRequest++] = mesh->worldComm().localComm().isend( neighborRank, 0, &(dataToSend[neighborRank][0]), nSendData ); int nRecvData = sizeRecv[neighborRank]; dataToRecv[neighborRank].resize( nRecvData ); if ( nRecvData > 0 ) reqs[cptRequest++] = mesh->worldComm().localComm().irecv( neighborRank, 0, &(dataToRecv[neighborRank][0]), nRecvData ); } // wait all requests mpi::wait_all( reqs, reqs + cptRequest ); std::map<int,std::map<index_type, std::set<rank_type> > > treatRecv; // others process for ( auto const& [pid, ptData ] : dataToRecv ) { for ( auto const& [marker, ptId] : ptData ) treatRecv[marker][ptId].insert( pid ); } // self process for ( auto const& [ ptId, dataIP ] : dataPointsInterProcess ) { auto const& dataEltInfo = std::get<1>( dataIP ); for ( auto const& [marker,eltId,ptIdInElt ] : dataEltInfo ) treatRecv[marker][ptId].insert( currentPid ); } // determine which process have points std::map<int,std::set<index_type>> ipPointsOnCurrentProcess; for ( auto const& [marker, mapIdToPids] : treatRecv ) { for ( auto const& [ptId,allPids] : mapIdToPids ) if ( currentPid == *allPids.begin() ) ipPointsOnCurrentProcess[marker].insert( ptId ); } // update M_pointIdToContiguous std::map<rank_type,std::vector<boost::tuple<int,index_type,index_type>>> dataToReSend; std::map<rank_type,std::vector<boost::tuple<int,index_type,index_type>>> dataToReRecv; for ( auto const& [marker, ptIds] : ipPointsOnCurrentProcess ) { auto & pointIdToContiguous = M_pointIdToContiguous[marker]; auto & pointIdsInElements = M_pointIdsInElements[marker]; index_type countPtId = pointIdToContiguous.size(); for ( index_type ptId : ptIds ) { auto const& pt = mesh->point( ptId ); auto [itPt,isInserted] = pointIdToContiguous.try_emplace( ptId,std::make_pair(countPtId,boost::cref(pt)) ); index_type newPtId = itPt->second.first; if ( isInserted ) { ++countPtId; for (auto const& [opid, optId] : pt.idInOthersPartitions() ) dataToReSend[opid].push_back( boost::make_tuple( marker,optId,newPtId ) ); } auto itFindDataIP = dataPointsInterProcess.find( ptId ); CHECK( itFindDataIP != dataPointsInterProcess.end() ) << "point not register"; auto const& infoEltAssociated = std::get<1>( itFindDataIP->second ); for ( auto const& [ marker2, newEltId, j ] : infoEltAssociated ) { if ( marker != marker2 ) continue; pointIdsInElements[newEltId*mesh_type::element_type::numPoints+j] = newPtId; } } } // get size of data to transfer cptRequest = 0; for ( rank_type neighborRank : mesh->neighborSubdomains() ) { reqs[cptRequest++] = mesh->worldComm().localComm().isend( neighborRank , 0, (size_type)dataToReSend[neighborRank].size() ); reqs[cptRequest++] = mesh->worldComm().localComm().irecv( neighborRank , 0, sizeRecv[neighborRank] ); } // wait all requests mpi::wait_all(reqs, reqs + cptRequest); cptRequest = 0; for ( rank_type neighborRank : mesh->neighborSubdomains() ) { int nSendData = dataToReSend[neighborRank].size(); if ( nSendData > 0 ) reqs[cptRequest++] = mesh->worldComm().localComm().isend( neighborRank, 0, &(dataToReSend[neighborRank][0]), nSendData ); int nRecvData = sizeRecv[neighborRank]; dataToReRecv[neighborRank].resize( nRecvData ); if ( nRecvData > 0 ) reqs[cptRequest++] = mesh->worldComm().localComm().irecv( neighborRank, 0, &(dataToReRecv[neighborRank][0]), nRecvData ); } // wait all requests mpi::wait_all( reqs, reqs + cptRequest ); // delete reqs because finish comm delete[] reqs; for ( auto const& [pid, dataByProc] : dataToReRecv ) { for ( auto const& dataPt : dataByProc ) { int marker = boost::get<0>( dataPt ); index_type ptId = boost::get<1>( dataPt ); index_type newPtId = boost::get<2>( dataPt ); dataPointsNotInProcess[marker][pid][ptId] = newPtId; } } } // --------------------------------------------------------------------------------------- // // build nodes vector for ( auto const& [markerId,pointIdToContiguous] : M_pointIdToContiguous ) { auto & nodes = M_nodes[markerId]; nodes.resize( 3*pointIdToContiguous.size(),0 ); for ( auto const& [ptId,ptData] : pointIdToContiguous ) { index_type newPtId = ptData.first; auto const& pt = unwrap_ref( ptData.second ); for ( uint16_type d=0 ; d<mesh_type::nRealDim ;++d ) { DCHECK( (3*newPtId+d) < nodes.size() ) << "invalid size : " << (3*newPtId+d) << " vs " << nodes.size() ; nodes[3*newPtId+d] = pt.node()[d]; } } } // --------------------------------------------------------------------------------------- // // information in world comm int k=0; std::vector<boost::tuple<index_type,index_type>> nPointElementByMarker( M_pointIdToContiguous.size() ); for ( auto const& [marker,pointIdToContiguous] : M_pointIdToContiguous ) { nPointElementByMarker[k] = boost::make_tuple( pointIdToContiguous.size(), M_elementIdToContiguous.find( marker)->second.size() ); ++k; } std::vector<std::vector<boost::tuple<index_type,index_type>>> recvInfos; mpi::all_gather( mesh->worldComm().comm(), nPointElementByMarker, recvInfos ); k=0; for ( auto const& [marker,pointIdToContiguous] : M_pointIdToContiguous ) { auto & numberOfPointElement = M_numberOfPointElement[marker]; numberOfPointElement.resize( worldSize ); index_type startPtId = 0, startEltId = 0; for ( rank_type p=0;p<worldSize;++p ) { index_type nPt = boost::get<0>( recvInfos[p][k] ); index_type nElt = boost::get<1>( recvInfos[p][k] ); numberOfPointElement[p] = std::make_tuple( startPtId, nPt, startEltId, nElt ); startPtId+=nPt; startEltId+=nElt; } ++k; M_numberOfPointElementAllProcess[marker] = std::make_tuple( startPtId,startEltId ); } // --------------------------------------------------------------------------------------- // // shift ids for ( auto const& [marker,pointIdToContiguous] : M_pointIdToContiguous ) { index_type spi = this->startPointIds(marker,currentPid); index_type sei = this->startElementIds(marker,currentPid); for ( auto & [ptId,newPtData] : M_pointIdToContiguous[marker] ) newPtData.first += spi; for ( auto & [eltId,newEltId] : M_elementIdToContiguous[marker] ) newEltId += sei; for ( index_type & ptId : M_pointIdsInElements[marker] ) { if ( ptId != invalid_v<index_type> ) ptId += spi; } } // --------------------------------------------------------------------------------------- // // update M_pointIdsInElements with interprocess points for ( auto const& [marker, dataByMarker ] : dataPointsNotInProcess ) { auto & pointIdsInElements = M_pointIdsInElements[marker]; for( auto const& [pid,dataByProc] : dataByMarker ) { index_type shiftPointId = this->startPointIds( marker, pid ); for ( auto const& [ ptId,newPtId ] : dataByProc ) { auto itFindPtIp = dataPointsInterProcess.find( ptId ); CHECK( itFindPtIp != dataPointsInterProcess.end() ) << "invalid point "; auto const& infosElt = std::get<1>( itFindPtIp->second ); // up pointIdsInElements for (auto const& [ marker2, newEltId, j ] : infosElt ) { if ( marker == marker2 ) pointIdsInElements[newEltId*mesh_type::element_type::numPoints+j] = newPtId + shiftPointId; } } } } } const mesh_type* mesh() const { return M_mesh; } std::map<int,std::tuple<std::string,range_element_type>> const& partIdToRangeElement() const { return M_partIdToRangeElement; } std::string const& name( int part ) const { auto itFindPart = M_partIdToRangeElement.find( part ); CHECK( itFindPart != M_partIdToRangeElement.end() ) << "part not registerd"; return std::get<0>( itFindPart->second ); } range_element_type const& rangeElement( int part ) const { auto itFindPart = M_partIdToRangeElement.find( part ); CHECK( itFindPart != M_partIdToRangeElement.end() ) << "part not registerd"; return std::get<1>( itFindPart->second ); } std::unordered_map<index_type,std::pair<index_type,point_ref_type>> const& pointIdToContiguous( int part ) const { auto itFindData = M_pointIdToContiguous.find( part ); CHECK( itFindData != M_pointIdToContiguous.end() ) << "part not registerd"; return itFindData->second; } index_type pointIdToContiguous( int part, index_type ptId ) const { auto itFindData = M_pointIdToContiguous.find( part ); if ( itFindData == M_pointIdToContiguous.end() ) return invalid_v<index_type>; auto const& data = itFindData->second; auto itFindPt = data.find( ptId ); if ( itFindPt == data.end() ) return invalid_v<index_type>; return itFindPt->second.first; } std::unordered_map<index_type,index_type> const& elementIdToContiguous( int part ) const { auto itFindData = M_elementIdToContiguous.find( part ); CHECK( itFindData == M_elementIdToContiguous.end() ) << "part not registerd"; return itFindData->second; } index_type elementIdToContiguous( int part, index_type eltId ) const { auto itFindData = M_elementIdToContiguous.find( part ); if ( itFindData == M_elementIdToContiguous.end() ) return invalid_v<index_type>; auto const& data = itFindData->second; auto itFindElt = data.find( eltId ); if ( itFindElt == data.end() ) return invalid_v<index_type>; return itFindElt->second; } std::vector<index_type> const& pointIdsInElements( int part ) const { auto itFindPointIdsInElements = M_pointIdsInElements.find( part ); CHECK( itFindPointIdsInElements != M_pointIdsInElements.end() ) << "part not registerd"; return itFindPointIdsInElements->second; } std::vector<storage_node_value_type> const& nodes( int part ) const { auto itFindNodes = M_nodes.find( part ); CHECK( itFindNodes != M_nodes.end() ) << "part not registerd"; return itFindNodes->second; } index_type startPointIds( int part, rank_type therank ) const { return genericInfo<0>( part, therank ); } index_type numberOfPoint( int part, rank_type therank ) const { return genericInfo<1>( part, therank ); } index_type startElementIds( int part, rank_type therank ) const { return genericInfo<2>( part, therank ); } index_type numberOfElement( int part, rank_type therank ) const { return genericInfo<3>( part, therank ); } index_type numberOfPointAllProcess( int part ) const { return genericInfoAllProcess<0>( part ); } index_type numberOfElementAllProcess( int part ) const { return genericInfoAllProcess<1>( part ); } void updateNodesCoordinates() { rank_type currentPid = M_mesh->worldComm().localRank(); for ( auto const& [part,pointIdToContiguous] : M_pointIdToContiguous ) { index_type spi = this->startPointIds(part,currentPid); auto & nodes = M_nodes[part]; CHECK( nodes.size() == 3*pointIdToContiguous.size() ) << "wrong size"; //nodes.resize( 3*pointIdToContiguous.size(),0 ); for ( auto const& [ptId,ptData] : pointIdToContiguous ) { index_type newPtId = ptData.first - spi; auto const& pt = unwrap_ref( ptData.second ); for ( uint16_type d=0 ; d<mesh_type::nRealDim ;++d ) { CHECK( (3*newPtId+d) < nodes.size() ) << "invalid size : " << (3*newPtId+d) << " vs " << nodes.size() ; nodes[3*newPtId+d] = pt.node()[d]; } } } } //! reorder the nodes ids in the element and put the new ordering in arg \newPointsIdsInElt template <typename TheNodeIndexType> void updateOrderingOfPointsIdsInElt( int part, rank_type therank, std::vector<TheNodeIndexType> & newPointsIdsInElt, std::vector<uint16_type> const& mappingWithThisKindOfElement, int shiftId = 0, int nPointsUsedInElt = mesh_type::element_type::numPoints ) const { CHECK( nPointsUsedInElt <= mappingWithThisKindOfElement.size() ) << "incomplete ordering"; index_type _nElt = this->numberOfElement( part,therank ); auto const& pointsIdsInElt_B = this->pointIdsInElements( part ); newPointsIdsInElt.resize( _nElt*nPointsUsedInElt ); for ( int k=0;k<_nElt;++k ) { for ( uint16_type p=0;p<nPointsUsedInElt;++p ) newPointsIdsInElt[ k*nPointsUsedInElt + mappingWithThisKindOfElement[p] ] = pointsIdsInElt_B[ k*mesh_type::element_type::numPoints+p ] + shiftId; } } private : template <int TupleId> index_type genericInfo( int part, rank_type therank ) const { auto itFindNumberOfPointElement = M_numberOfPointElement.find( part ); CHECK( itFindNumberOfPointElement!= M_numberOfPointElement.end()) << "invalid part"; CHECK( therank < itFindNumberOfPointElement->second.size() ) << "invalid rank"; return std::get<TupleId>( itFindNumberOfPointElement->second[therank] ); } template <int TupleId> index_type genericInfoAllProcess( int part ) const { auto itFindNumberOfPointElementAllProcess = M_numberOfPointElementAllProcess.find( part ); CHECK( itFindNumberOfPointElementAllProcess != M_numberOfPointElementAllProcess.end()) << "invalid part"; return std::get<TupleId>( itFindNumberOfPointElementAllProcess->second ); } private: mesh_type* M_mesh; bool M_interprocessPointAreDuplicated; std::map<int,std::tuple<std::string,range_element_type>> M_partIdToRangeElement; std::map<int,std::unordered_map<index_type,std::pair<index_type,point_ref_type> >> M_pointIdToContiguous; std::map<int,std::unordered_map<index_type,index_type>> M_elementIdToContiguous; std::map<int,std::vector<index_type>> M_pointIdsInElements; std::map<int,std::vector<storage_node_value_type>> M_nodes; std::map<int,std::vector<std::tuple<index_type,index_type,index_type,index_type>>> M_numberOfPointElement; std::map<int,std::tuple<index_type,index_type>> M_numberOfPointElementAllProcess; }; template <typename T> struct MeshPoints { template <typename MeshType, typename IteratorType> MeshPoints( MeshType* mesh, const WorldComm&, IteratorType it, IteratorType en, const bool outer = false, const bool renumber = false, const bool fill = false, const int startIndex = 1 ); int translatePointIds( std::vector<int32_t>& ids ); int translateElementIds( std::vector<int32_t>& ids ); int globalNumberOfPoints() const { return global_npts; } int globalNumberOfElements() const { return global_nelts; } std::vector<int> numberOfPoints, numberOfElements; int global_nelts{0}, global_npts{0}; std::vector<int32_t> ids; std::unordered_map<int32_t, int32_t> new2old; std::unordered_map<int32_t, int32_t> old2new; std::unordered_map<int32_t, int32_t> nodemap; std::vector<T> coords; std::vector<int32_t> elemids; std::vector<int32_t> elem; size_type offsets_pts, global_offsets_pts; size_type offsets_elts, global_offsets_elts; }; //! //! Builds information around faces/elements for exporting data //! @param mesh The mesh from which data is extracted //! @param it Starting iterator over the faces/elements //! @param en Endoing iterator over the faces/elements //! @param outer If false, the vertices are place in an x1 y1 z1 ... xn yn zn order, otherwise in the x1 ... xn y1 ... yn z1 ... zn //! @param renumber If true, the vertices will be renumbered with maps to keep the correspondance between the twoi, otherwise the original ids are kept //! @param fill It true, the method will generate points coordinates that are 3D, even if the point is specified with 1D or 2D coordinates (filled with 0) //! @param Specify the startIndex of the renumbered points (typically set to 0 or 1, but no restriction). This is only used when renumber is true, otherwise it is not used. //! template <typename T> template <typename MeshType, typename IteratorType> MeshPoints<T>::MeshPoints( MeshType* mesh, const WorldComm& worldComm, IteratorType it, IteratorType en, const bool outer, const bool renumber, const bool fill, const int startIndex ) { std::set<int> nodeset; size_type p = 0; auto elt_it = it; //! Gather all the vertices of which the elements are made up with into a std::set */ //! build up correspondance arrays between index in nodeset and previous id */ for ( auto eit = it; eit != en; ++eit ) { auto const& elt = boost::unwrap_ref( *eit ); for ( size_type j = 0; j < MeshType::element_type::numPoints; j++ ) { int pid = elt.point( j ).id(); auto ins = nodeset.insert( pid ); if ( ins.second ) { if ( renumber ) { ids.push_back( p + startIndex ); } else { ids.push_back( pid ); } //! old id -> new id */ old2new[pid] = ids[p]; //! old id -> new id */ new2old[ids[p]] = pid; //! old id -> index of the new id */ nodemap[pid] = p; ++p; } } } CHECK( p == ids.size() ) << "Invalid number of points " << ids.size() << "!=" << p; int nv = ids.size(); coords.resize( 3 * nv, 0 ); auto pit = ids.begin(); auto pen = ids.end(); //! for( auto i = 0; i < nv; ++i ) //! put coords of each point into the coords array */ //! if outer is true, the coords are placed like: x1 x2 ... xn y1 y2 ... yn z1 z2 ... zn */ //! otherwise, the coords are placed like: x1 y1 z1 x2 y2 z2 ... xn yn zn */ for ( int i = 0; pit != pen; ++pit, ++i ) { //! CHECK( *pit > 0 ) << "invalid id " << *pit; //! LOG(INFO) << "p " << i << "/" << nv << " =" << *pit; //! int pid = (renumber)?nodemap[*pit]+1:*pit; int pid = *pit; auto const& p = mesh->point( new2old[*pit] ); if ( outer ) { coords[i] = (T)p.node()[0]; } else { coords[3 * i] = (T)p.node()[0]; } if ( MeshType::nRealDim >= 2 ) { if ( outer ) { coords[nv + i] = ( T )( p.node()[1] ); } else { coords[3 * i + 1] = ( T )( p.node()[1] ); } } //! Fill 2nd components with 0 if told to do so */ else { if ( fill ) { if ( outer ) { coords[nv + i] = (T)0; } else { coords[3 * i + 1] = (T)0; } } } if ( MeshType::nRealDim >= 3 ) { if ( outer ) { coords[2 * nv + i] = ( T )( p.node()[2] ); } else { coords[3 * i + 2] = ( T )( p.node()[2] ); } } //! Fill 3nd components with 0 if told to do so */ else { if ( fill ) { if ( outer ) { coords[2 * nv + i] = (T)0; } else { coords[3 * i + 2] = (T)0; } } } } //! number of local elements */ int __ne = std::distance( it, en ); //! only do this resize if we have at least one element in the iterator */ //! otherwise it will segfault */ if ( it != en ) { elem.resize( __ne * MeshType::element_type::numPoints ); //! elem.resize( __ne*mesh->numLocalVertices() ); elemids.resize( __ne ); } //! build the array containing the id of each vertex for each element */ elt_it = it; size_type e = 0; for ( ; elt_it != en; ++elt_it, ++e ) { auto const& elt = boost::unwrap_ref( *elt_it ); elemids[e] = elt.id() + 1; //! std::cout << "LocalV = " << elt.numLocalVertices << std::endl; //! for ( size_type j = 0; j < mesh->numLocalVertices(); j++ ) for ( size_type j = 0; j < MeshType::element_type::numPoints; j++ ) { //! std::cout << "LocalVId = " << j << " " << e*elt.numLocalVertices+j << std::endl; //! std::cout << elt.point( j ).id() << std::endl; //! ensight id start at 1 elem[e * MeshType::element_type::numPoints + j] = old2new[elt.point( j ).id()]; #if 0 DCHECK( (elem[e*mesh->numLocalVertices()+j] > 0) && (elem[e*mesh->numLocalVertices()+j] <= nv ) ) << "Invalid entry : " << elem[e*mesh->numLocalVertices()+j] << " at index : " << e*mesh->numLocalVertices()+j << " element : " << e << " vertex : " << j; #endif } } #if 0 CHECK( e==__ne) << "Invalid number of elements, e= " << e << " should be " << __ne; std::for_each( elem.begin(), elem.end(), [=]( int e ) { CHECK( ( e > 0) && e <= __nv ) << "invalid entry e = " << e << " nv = " << nv; } ); #endif //! gather the number of points and elements fo each process */ std::vector<int> ost{nv, __ne}; std::vector<std::vector<int>> ospe; mpi::all_gather( worldComm.comm(), ost, ospe ); //! copy information about number of points/elements //! per process in a local array */ for ( size_type i = 0; i < ospe.size(); i++ ) { numberOfPoints.push_back( ospe[i][0] ); numberOfElements.push_back( ospe[i][1] ); } //! compute offsets to shift the point and element ids */ //! regarding to the processor rank */ offsets_pts = 0; global_offsets_pts = 0; offsets_elts = 0; global_offsets_elts = 0; for ( size_type i = 0; i < ospe.size(); i++ ) { if ( i < worldComm.localRank() ) { offsets_pts += ospe[i][0]; offsets_elts += ospe[i][1]; } global_offsets_pts += ospe[i][0]; global_offsets_elts += ospe[i][1]; } global_npts = global_offsets_pts; global_nelts = global_offsets_elts; //! //! std::cout << "local offset pts : " << offsets_pts << std::endl; //! std::cout << "local offset elts : " << offsets_elts << std::endl; //! std::cout << "global offset pts : " << global_offsets_pts << std::endl; //! std::cout << "global offset elts : " << global_offsets_elts << std::endl; //! std::cout << "done with offsets" << std::endl; } //! //! Translate the list of points ids to the new global layout //! @param ids Array of local point ids to be translated //! template <typename T> int MeshPoints<T>::translatePointIds( std::vector<int32_t>& ptids ) { for ( int i = 0; i < ptids.size(); i++ ) { ptids[i] = offsets_pts + old2new[ptids[i]]; } return 0; } //! //! Translate the list of element ids to the new global layout //! @param ids Array of local point ids to be translated //! template <typename T> int MeshPoints<T>::translateElementIds( std::vector<int32_t>& elids ) { for ( int i = 0; i < elids.size(); i++ ) { elids[i] = offsets_elts + elids[i]; } return 0; } } // namespace detail } // namespace Feel #endif
{ "pile_set_name": "Github" }
{ "version": "1.0", "examples": { } }
{ "pile_set_name": "Github" }
// // RACUnarySequence.h // ReactiveCocoa // // Created by Justin Spahr-Summers on 2013-05-01. // Copyright (c) 2013 GitHub, Inc. All rights reserved. // #import "RACSequence.h" // Private class representing a sequence of exactly one value. @interface RACUnarySequence : RACSequence @end
{ "pile_set_name": "Github" }
/* * Copyright (C) 2015-2020 Lightbend Inc. <https://www.lightbend.com> */ package akka.stream.impl import java.util.concurrent.atomic.AtomicReference import scala.annotation.tailrec import scala.util.control.NonFatal import org.reactivestreams.Processor import org.reactivestreams.Publisher import org.reactivestreams.Subscriber import org.reactivestreams.Subscription import akka.annotation.InternalApi import akka.stream._ import akka.stream.impl.Stages.DefaultAttributes import akka.util.OptionVal /** * INTERNAL API */ @InternalApi private[stream] object StreamLayout { // compile-time constant final val Debug = false /** * This is the only extension point for the sealed type hierarchy: composition * (i.e. the module tree) is managed strictly within this file, only leaf nodes * may be declared elsewhere. */ trait AtomicModule[+S <: Shape, +M] extends Graph[S, M] } /** * INTERNAL API */ @InternalApi private[stream] object VirtualProcessor { // intentional syntax to make compile time constant final val Debug = false sealed trait HasActualSubscriber { def subscriber: Subscriber[Any] } case object Inert { val subscriber = new CancellingSubscriber[Any] } final case class Both(subscriber: Subscriber[Any]) extends HasActualSubscriber final case class Establishing( subscriber: Subscriber[Any], onCompleteBuffered: Boolean = false, onErrorBuffered: OptionVal[Throwable] = OptionVal.None) extends HasActualSubscriber object Establishing { def create(s: Subscriber[_]) = Establishing(s.asInstanceOf[Subscriber[Any]]) } } /** * INTERNAL API * * This is a transparent processor that shall consume as little resources as * possible. Due to the possibility of receiving uncoordinated inputs from both * downstream and upstream, this needs an atomic state machine which looks a * little like this: * * * +--------+ (2) +---------------+ * | null +------------>+ Subscriber | * +---+----+ +-----+---------+ * | | * (1)| | (1) * v v * +---+----------+ (2) +-----+---------+ * | Subscription +------>+ Establishing | * +---+----------+ +-----+---------+ * | | * | | (4) * | v * | +-----+---------+ --- * | (3) | Both | | (5) * | +-----+---------+ <-- * | | * | | * v v * +---+----------+ (2) +-----+---------+ --- * | Publisher +-----> | Inert | | (5, *) * +--------------+ +---------------+ <-- * * * The idea is to keep the major state in only one atomic reference. The actions * that can happen are: * * (1) onSubscribe * (2) subscribe * (3) onError / onComplete * (4) establishing subscription completes * (5) onNext * (*) Inert can be reached also by cancellation after which onNext is still fine * so we just silently ignore possible spec violations here * * Any event that occurs in a state where no matching outgoing arrow can be found * is a spec violation, leading to the shutdown of this processor (meaning that * the state is updated such that all following actions match that of a failed * Publisher or a cancelling Subscriber, and the non-guilty party is informed if * already connected). * * request() can only be called after the Subscriber has received the Subscription * and that also means that onNext() will only happen after having transitioned into * the Both state as well. The Publisher state means that if the real * Publisher terminates before we get the Subscriber, we can just forget about the * real one and keep an already finished one around for the Subscriber. * * The Subscription that is offered to the Subscriber must cancel the original * Publisher if things go wrong (like `request(0)` coming in from downstream) and * it must ensure that we drop the Subscriber reference when `cancel` is invoked. */ @InternalApi private[stream] final class VirtualProcessor[T] extends AtomicReference[AnyRef] with Processor[T, T] { import ReactiveStreamsCompliance._ import VirtualProcessor._ override def toString: String = s"VirtualProcessor(${this.hashCode()})" if (VirtualProcessor.Debug) println(s"created: $this") override def subscribe(s: Subscriber[_ >: T]): Unit = { @tailrec def rec(sub: Subscriber[Any]): Unit = { get() match { case null => if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode(null).subscribe.rec($s) -> sub") if (!compareAndSet(null, s)) rec(sub) case subscription: Subscription => if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode($subscription).subscribe.rec($s) -> Establishing(sub)") val establishing = Establishing(sub, false) if (compareAndSet(subscription, establishing)) establishSubscription(establishing, subscription) else rec(sub) case pub: Publisher[_] => if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode($pub).subscribe.rec($s) -> Inert") if (compareAndSet(pub, Inert)) pub.subscribe(sub) else rec(sub) case other => if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode($other).subscribe.rec($s): rejectAdditionalSubscriber") rejectAdditionalSubscriber(sub, "VirtualProcessor") } } if (s == null) { val ex = subscriberMustNotBeNullException try rec(Inert.subscriber) finally throw ex // must throw NPE, rule 2:13 } else rec(s.asInstanceOf[Subscriber[Any]]) } override def onSubscribe(s: Subscription): Unit = { @tailrec def rec(obj: AnyRef): Unit = { get() match { case null => if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode(null).onSubscribe.rec($obj) -> ${obj.getClass}") if (!compareAndSet(null, obj)) rec(obj) case subscriber: Subscriber[_] => obj match { case subscription: Subscription => if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode($subscriber).onSubscribe.rec($obj) -> Establishing") val establishing = Establishing.create(subscriber) if (compareAndSet(subscriber, establishing)) establishSubscription(establishing, subscription) else rec(obj) case pub: Publisher[_] => if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode($subscriber).onSubscribe.rec($obj) -> INert") getAndSet(Inert) match { case Inert => // nothing to be done case _ => pub.subscribe(subscriber.asInstanceOf[Subscriber[Any]]) } } case state @ _ => if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode(_).onSubscribe.rec($s) spec violation") // spec violation tryCancel(s, new IllegalStateException(s"VirtualProcessor in wrong state [$state]. Spec violation")) } } if (s == null) { val ex = subscriptionMustNotBeNullException try rec(ErrorPublisher(ex, "failed-VirtualProcessor")) finally throw ex // must throw NPE, rule 2:13 } else rec(s) } private def establishSubscription(establishing: Establishing, subscription: Subscription): Unit = { val wrapped = new WrappedSubscription(subscription) try { if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode.establishSubscription(wrapped)") establishing.subscriber.onSubscribe(wrapped) // while we were establishing some stuff could have happened // most likely case, nobody changed it while we where establishing if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode.establishSubscription.rec($establishing) -> Both") if (compareAndSet(establishing, Both(establishing.subscriber))) { // cas won - life is good // Requests will be only allowed once onSubscribe has returned to avoid reentering on an onNext before // onSubscribe completed wrapped.ungateDemandAndRequestBuffered() } else { // changed by someone else get() match { case Establishing(sub, _, OptionVal.Some(error)) => // there was an onError while establishing if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode.establishSubscription.rec(Establishing(buffered-error) -> Inert") tryOnError(sub, error) set(Inert) case Establishing(sub, true, _) => // there was on onComplete while we were establishing if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode.establishSubscription.rec(Establishing(buffered-complete) -> Inert") tryOnComplete(sub) set(Inert) case Inert => tryCancel(subscription, new IllegalStateException("VirtualProcessor was already subscribed to.")) case other => throw new IllegalStateException( s"Unexpected state while establishing: [$other], if this ever happens it is a bug.") } } } catch { case NonFatal(ex) => set(Inert) tryCancel(subscription, ex) tryOnError(establishing.subscriber, ex) } } override def onError(t: Throwable): Unit = { /* * `ex` is always a reasonable Throwable that we should communicate downstream, * but if `t` was `null` then the spec requires us to throw an NPE (which `ex` * will be in this case). */ @tailrec def rec(ex: Throwable): Unit = get() match { case null => if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode(null).onError(${ex.getMessage}) -> ErrorPublisher") if (!compareAndSet(null, ErrorPublisher(ex, "failed-VirtualProcessor"))) rec(ex) case s: Subscription => if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode($s).onError(${ex.getMessage}) -> ErrorPublisher") if (!compareAndSet(s, ErrorPublisher(ex, "failed-VirtualProcessor"))) rec(ex) case Both(s) => if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode(Both($s)).onError(${ex.getMessage}) -> ErrorPublisher") set(Inert) tryOnError(s, ex) case s: Subscriber[_] => // spec violation if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode($s).onError(${ex.getMessage}) -> Inert") getAndSet(Inert) match { case Inert => // nothing to be done case _ => ErrorPublisher(ex, "failed-VirtualProcessor").subscribe(s) } case est @ Establishing(_, false, OptionVal.None) => if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode($est).onError(${ex.getMessage}), loop") if (!compareAndSet(est, est.copy(onErrorBuffered = OptionVal.Some(ex)))) rec(ex) case other => // spec violation or cancellation race, but nothing we can do if (VirtualProcessor.Debug) println( s"VirtualPublisher#$hashCode($other).onError(${ex.getMessage}). spec violation or cancellation race") } val ex = if (t == null) exceptionMustNotBeNullException else t rec(ex) // must throw NPE, rule 2.13 if (t == null) throw ex } @tailrec override def onComplete(): Unit = { get() match { case null => if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode(null).onComplete -> EmptyPublisher") if (!compareAndSet(null, EmptyPublisher)) onComplete() case s: Subscription => if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode($s).onComplete -> EmptyPublisher") if (!compareAndSet(s, EmptyPublisher)) onComplete() case _ @Both(s) => if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode($s).onComplete -> Inert") set(Inert) tryOnComplete(s) case s: Subscriber[_] => // spec violation if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode($s).onComplete -> Inert") set(Inert) EmptyPublisher.subscribe(s) case est @ Establishing(_, false, OptionVal.None) => if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode($est).onComplete -> Establishing with buffered complete") if (!est.onCompleteBuffered && !compareAndSet(est, est.copy(onCompleteBuffered = true))) onComplete() case other => if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode($other).onComplete spec violation") // spec violation or cancellation race, but nothing we can do } } override def onNext(t: T): Unit = if (t == null) { val ex = elementMustNotBeNullException if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode.onNext(null)") @tailrec def rec(): Unit = get() match { case x @ (null | _: Subscription) => if (!compareAndSet(x, ErrorPublisher(ex, "failed-VirtualProcessor"))) rec() case s: Subscriber[_] => try s.onError(ex) catch { case NonFatal(_) => } finally set(Inert) case Both(s) => try s.onError(ex) catch { case NonFatal(_) => } finally set(Inert) case _ => // spec violation or cancellation race, but nothing we can do } rec() throw ex // must throw NPE, rule 2:13 } else { @tailrec def rec(): Unit = { get() match { case h: HasActualSubscriber => val s = h.subscriber try { if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode(${h.getClass.getName}($s)).onNext($t).rec()") s.onNext(t) } catch { case NonFatal(e) => if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode(Both($s)).onNext($t) threw, spec violation -> Inert") set(Inert) throw new IllegalStateException("Subscriber threw exception, this is in violation of rule 2:13", e) } case s: Subscriber[_] => // spec violation if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode($s).onNext($t).rec(): spec violation -> Inert") val ex = new IllegalStateException(noDemand) getAndSet(Inert) match { case Inert => // nothing to be done case _ => ErrorPublisher(ex, "failed-VirtualProcessor").subscribe(s) } throw ex case Inert | _: Publisher[_] => if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode(Inert|Publisher).onNext($t).rec(): nop") // nothing to be done case other => if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode($other).onNext($t).rec() -> ErrorPublisher") val pub = ErrorPublisher(new IllegalStateException(noDemand), "failed-VirtualPublisher") if (!compareAndSet(other, pub)) rec() else throw pub.t } } rec() } private def noDemand = "spec violation: onNext was signaled from upstream without demand" object WrappedSubscription { sealed trait SubscriptionState { def demand: Long } case object PassThrough extends SubscriptionState { override def demand: Long = 0 } case class Buffering(demand: Long) extends SubscriptionState val NoBufferedDemand = Buffering(0) } // Extdending AtomicReference to make the hot memory location share the same cache line with the Subscription private class WrappedSubscription(real: Subscription) extends AtomicReference[WrappedSubscription.SubscriptionState](WrappedSubscription.NoBufferedDemand) with Subscription { import WrappedSubscription._ // Release def ungateDemandAndRequestBuffered(): Unit = { if (VirtualProcessor.Debug) println( s"VirtualPublisher#${VirtualProcessor.this.hashCode}.WrappedSubscription($real).ungateDemandAndRequestBuffered") // Ungate demand val requests = getAndSet(PassThrough).demand // And request buffered demand if (requests > 0) real.request(requests) } override def request(n: Long): Unit = { if (n < 1) { if (VirtualProcessor.Debug) println(s"VirtualPublisher#${VirtualProcessor.this.hashCode}.WrappedSubscription($real).request($n)") tryCancel(real, new IllegalArgumentException(s"Demand must not be < 1 but was $n")) VirtualProcessor.this.getAndSet(Inert) match { case Both(subscriber) => rejectDueToNonPositiveDemand(subscriber) case est: Establishing => rejectDueToNonPositiveDemand(est.subscriber) case Inert => // another failure has won the race case _ => // this cannot possibly happen, but signaling errors is impossible at this point } } else { // NOTE: At this point, batched requests might not have been dispatched, i.e. this can reorder requests. // This does not violate the Spec though, since we are a "Processor" here and although we, in reality, // proxy downstream requests, it is virtually *us* that emit the requests here and we are free to follow // any pattern of emitting them. // The only invariant we need to keep is to never emit more requests than the downstream emitted so far. @tailrec def bufferDemand(n: Long): Unit = { val current = get() if (current eq PassThrough) { if (VirtualProcessor.Debug) println( s"VirtualPublisher#${VirtualProcessor.this.hashCode}WrappedSubscription($real).bufferDemand($n) passthrough") real.request(n) } else if (!compareAndSet(current, Buffering(current.demand + n))) { if (VirtualProcessor.Debug) println( s"VirtualPublisher#${VirtualProcessor.this.hashCode}WrappedSubscription($real).bufferDemand($n) buffering") bufferDemand(n) } } bufferDemand(n) } } override def cancel(): Unit = { if (VirtualProcessor.Debug) println(s"VirtualPublisher#${VirtualProcessor.this.hashCode}WrappedSubscription.cancel() -> Inert") VirtualProcessor.this.set(Inert) real.cancel() } } } /** * INTERNAL API * * The implementation of `Sink.asPublisher` needs to offer a `Publisher` that * defers to the upstream that is connected during materialization. This would * be trivial if it were not for materialized value computations that may even * spawn the code that does `pub.subscribe(sub)` in a Future, running concurrently * with the actual materialization. Therefore we implement a minimal shell here * that plugs the downstream and the upstream together as soon as both are known. * Using a VirtualProcessor would technically also work, but it would defeat the * purpose of subscription timeouts—the subscription would always already be * established from the Actor’s perspective, regardless of whether a downstream * will ever be connected. * * One important consideration is that this `Publisher` must not retain a reference * to the `Subscriber` after having hooked it up with the real `Publisher`, hence * the use of `Inert.subscriber` as a tombstone. */ @InternalApi private[impl] class VirtualPublisher[T] extends AtomicReference[AnyRef] with Publisher[T] { import ReactiveStreamsCompliance._ import VirtualProcessor.Inert override def subscribe(subscriber: Subscriber[_ >: T]): Unit = { requireNonNullSubscriber(subscriber) if (VirtualProcessor.Debug) println(s"$this.subscribe: $subscriber") @tailrec def rec(): Unit = { get() match { case null => if (!compareAndSet(null, subscriber)) rec() // retry case pub: Publisher[_] => if (compareAndSet(pub, Inert.subscriber)) { pub.asInstanceOf[Publisher[T]].subscribe(subscriber) } else rec() // retry case _: Subscriber[_] => rejectAdditionalSubscriber(subscriber, "Sink.asPublisher(fanout = false)") } } rec() // return value is boolean only to make the expressions above compile } @tailrec final def registerPublisher(pub: Publisher[_]): Unit = { if (VirtualProcessor.Debug) println(s"$this.registerPublisher: $pub") get() match { case null => if (!compareAndSet(null, pub)) registerPublisher(pub) // retry case sub: Subscriber[r] => set(Inert.subscriber) pub.asInstanceOf[Publisher[r]].subscribe(sub) case p: Publisher[_] => throw new IllegalStateException( s"internal error, already registered [$p], yet attempted to register 2nd publisher [$pub]!") case unexpected => throw new IllegalStateException(s"internal error, unexpected state: $unexpected") } } // this is when the subscription timeout hits, implemented like this to // avoid allocating a separate object for that def onSubscriptionTimeout(am: Materializer, mode: StreamSubscriptionTimeoutTerminationMode): Unit = { import StreamSubscriptionTimeoutTerminationMode._ get() match { case null | _: Publisher[_] => mode match { case CancelTermination => subscribe(new CancellingSubscriber[T]) case WarnTermination => am.logger.warning("Subscription timeout for {}", this) case NoopTermination => // never happens } case _ => // we're ok } } override def toString: String = s"VirtualPublisher(state = ${get()})" } /** * INTERNAL API */ @InternalApi private[akka] final case class ProcessorModule[In, Out, Mat]( val createProcessor: () => (Processor[In, Out], Mat), attributes: Attributes = DefaultAttributes.processor) extends StreamLayout.AtomicModule[FlowShape[In, Out], Mat] { val inPort = Inlet[In]("ProcessorModule.in") val outPort = Outlet[Out]("ProcessorModule.out") override val shape = new FlowShape(inPort, outPort) override def withAttributes(attributes: Attributes) = copy(attributes = attributes) override def toString: String = f"ProcessorModule [${System.identityHashCode(this)}%08x]" override private[stream] def traversalBuilder = LinearTraversalBuilder.fromModule(this, attributes).makeIsland(ProcessorModuleIslandTag) }
{ "pile_set_name": "Github" }
{% if request.is_ajax() %} {%- import "shuup/front/macros/general.jinja" as macros with context -%} {%- import "shuup/front/macros/category.jinja" as category_macros with context -%} {{ category_macros.render_product_list(supplier) }} {% else %} {% extends "shuup/front/base.jinja" %} {% block extrameta %} {% if category %} {{ macros.render_metadata(category, category.name, category.description, "category") }} {% endif %} {% endblock %} {% block title %}{{ (category.name if category else _("Categories")) }}{% endblock %} {% block content_title %}{{ (category.name if category else "") }}{% endblock %} {% block breadcrumb %} {% if category %} {% call macros.breadcrumb() %} {{ macros.render_breadcrumb_item("/", _("Home")) }} {% for c in category.get_ancestors() if c.is_visible(customer=request.customer) %} {{ macros.render_breadcrumb_item(shuup.urls.model_url(c), c.name) }} {% endfor %} {{ macros.render_breadcrumb_item(None, category.name, True) }} {% endcall %} {% endif %} {% endblock %} {% block content %} {% placeholder "category_top" %}{% endplaceholder %} <script>window.PRODUCT_LIST_FILTERS = {{ form.fields.keys()|list|json }};</script> {{ category_macros.render_information() }} {{ category_macros.render_products_section(supplier) }} {% endblock %} {% endif %} {% block extrajs %} {{ category_macros.render_extrajs() }} {% endblock %}
{ "pile_set_name": "Github" }
import java.util.*; public class gen_stress_javacert { public static void main(String[] args) throws Exception { new gen_stress_javacert().go(); } void go() throws Exception { stress(); } int k; int n; int m; int[] p; static class Pair { int wi; int ni; Pair(int wi, int ni) { this.wi = wi; this.ni = ni; } @Override public String toString() { return wi + " " + ni + " " + (100.0*(ni - wi) / ni); } } int w; List<List<Pair>> llpall = new ArrayList<List<Pair>>(); List<List<Pair>> llp = new ArrayList<List<Pair>>(); int soldiff; int maxsteps; Random rnd = new Random(); void stress() { n = 100; m = 10; wi = new int[m]; ni = new int[m]; p = new int[m]; prebuildPairs(); presortPairs(); while (true) { checkOne(); } } long timelimit; private void checkOne() { randomSol(); selectPairs(); soldiff = Integer.MAX_VALUE; long time = System.currentTimeMillis(); timelimit = time + 10000L; int steps = find(0, 0, 0, Integer.MAX_VALUE, Integer.MIN_VALUE); time = System.currentTimeMillis() - time; assert soldiff < Integer.MAX_VALUE; if (steps > maxsteps) { System.out.println(); System.out.printf("%d %d %d%n", k, n, m); for (int i = 0; i < m; i++) { System.out.printf("%d %d %d%n", p[i], wi[i], ni[i]); } System.out.printf("%d steps in %d ms%n", steps, time); maxsteps = steps; } else System.out.println("."); } int[] wi; int[] ni; private void randomSol() { int ns = 0; w = 0; int epr = rnd.nextInt(20); for (int i = 0; i < m; i++) { int rem = m - i - 1; ni[i] = rem == 0 ? n - ns : rnd.nextInt(10) > epr ? Math.min(n - ns - rem, Math.round(n / m)) : 1 + rnd.nextInt(n - ns - rem); wi[i] = rnd.nextInt(ni[i] + 1); p[i] = (int)Math.round(100.0 * (ni[i] - wi[i]) / ni[i]); ns += ni[i]; w += wi[i]; } k = n - w; } private void selectPairs() { llp.clear(); for (int i = 0; i < m; i++) { llp.add(llpall.get(p[i])); } } private void prebuildPairs() { for (int i = 0; i <= n; i++) { llpall.add(new ArrayList<Pair>()); } for (int ni = 1; ni <= n; ni++) { for (int wi = 0; wi <= ni; wi++) { int ppp = 100 * (ni - wi); int pp = (2 * ppp + ni) / (2 * ni); if (2 * ppp % ni == 0 && 2 * ppp / ni % 2 == 1 && pp % 2 == 1) pp--; llpall.get(pp).add(new Pair(wi, ni)); } } } private void presortPairs() { final double mid = n / m; for (int i = 0; i <= n; i++) { Collections.sort(llpall.get(i), new Comparator<Pair>() { public int compare(Pair p1, Pair p2) { double d1 = Math.abs(p1.ni - mid); double d2 = Math.abs(p2.ni - mid); return Double.compare(d1, d2); } }); } } int find(int i, int ws, int ns, int minni, int maxni) { if (ns > n || ws > w) return 1; int diff = maxni - minni; if (diff >= soldiff) return 1; if (i == m) { if (ns < n || ws < w) return 1; soldiff = diff; return 1; } if (System.currentTimeMillis() > timelimit) return 1; int res = 0; for (Pair p : llp.get(i)) { res += find(i + 1, ws + p.wi, ns + p.ni, Math.min(minni, p.ni), Math.max(maxni, p.ni)); } return res; } }
{ "pile_set_name": "Github" }
# --- Do not remove these libs --- from freqtrade.strategy.interface import IStrategy from typing import Dict, List from functools import reduce from pandas import DataFrame, DatetimeIndex, merge # -------------------------------- import talib.abstract as ta import freqtrade.vendor.qtpylib.indicators as qtpylib import numpy # noqa # DO NOT USE, just playing with smooting and graphs! class SmoothOperator(IStrategy): """ author@: Gert Wohlgemuth idea: The concept is about combining several common indicators, with a heavily smoothing, while trying to detect a none completed peak shape. """ # Minimal ROI designed for the strategy. # we only sell after 100%, unless our sell points are found before minimal_roi = { "0": 0.10 } # Optimal stoploss designed for the strategy # This attribute will be overridden if the config file contains "stoploss" # should be converted to a trailing stop loss stoploss = -0.05 # Optimal ticker interval for the strategy ticker_interval = '5m' # resample factor to establish our general trend. Basically don't buy if a trend is not given resample_factor = 12 def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame: # resampled dataframe to establish if we are in an uptrend, downtrend or sideways trend dataframe = StrategyHelper.resample(dataframe, self.ticker_interval, self.resample_factor) ################################################################################## # required for entry and exit # CCI dataframe['cci'] = ta.CCI(dataframe, timeperiod=20) dataframe['rsi'] = ta.RSI(dataframe, timeperiod=14) dataframe['adx'] = ta.ADX(dataframe) dataframe['mfi'] = ta.MFI(dataframe) dataframe['mfi_smooth'] = ta.EMA(dataframe, timeperiod=11, price='mfi') dataframe['cci_smooth'] = ta.EMA(dataframe, timeperiod=11, price='cci') dataframe['rsi_smooth'] = ta.EMA(dataframe, timeperiod=11, price='rsi') ################################################################################## # required for graphing bollinger = qtpylib.bollinger_bands(dataframe['close'], window=20, stds=2) dataframe['bb_lowerband'] = bollinger['lower'] dataframe['bb_upperband'] = bollinger['upper'] dataframe['bb_middleband'] = bollinger['mid'] # MACD macd = ta.MACD(dataframe) dataframe['macd'] = macd['macd'] dataframe['macdsignal'] = macd['macdsignal'] dataframe['macdhist'] = macd['macdhist'] ################################################################################## # required for entry bollinger = qtpylib.bollinger_bands(dataframe['close'], window=20, stds=1.6) dataframe['entry_bb_lowerband'] = bollinger['lower'] dataframe['entry_bb_upperband'] = bollinger['upper'] dataframe['entry_bb_middleband'] = bollinger['mid'] dataframe['bpercent'] = (dataframe['close'] - dataframe['bb_lowerband']) / ( dataframe['bb_upperband'] - dataframe['bb_lowerband']) * 100 dataframe['bsharp'] = (dataframe['bb_upperband'] - dataframe['bb_lowerband']) / ( dataframe['bb_middleband']) # these seem to be kind useful to measure when bands widen # but than they are directly based on the moving average dataframe['bsharp_slow'] = ta.SMA(dataframe, price='bsharp', timeperiod=11) dataframe['bsharp_medium'] = ta.SMA(dataframe, price='bsharp', timeperiod=8) dataframe['bsharp_fast'] = ta.SMA(dataframe, price='bsharp', timeperiod=5) ################################################################################## # rsi and mfi are slightly weighted dataframe['mfi_rsi_cci_smooth'] = (dataframe['rsi_smooth'] * 1.125 + dataframe['mfi_smooth'] * 1.125 + dataframe[ 'cci_smooth']) / 3 dataframe['mfi_rsi_cci_smooth'] = ta.TEMA(dataframe, timeperiod=21, price='mfi_rsi_cci_smooth') # playgound dataframe['candle_size'] = (dataframe['close'] - dataframe['open']) * ( dataframe['close'] - dataframe['open']) / 2 # helps with pattern recognition dataframe['average'] = (dataframe['close'] + dataframe['open'] + dataframe['high'] + dataframe['low']) / 4 dataframe['sma_slow'] = ta.SMA(dataframe, timeperiod=200, price='close') dataframe['sma_medium'] = ta.SMA(dataframe, timeperiod=100, price='close') dataframe['sma_fast'] = ta.SMA(dataframe, timeperiod=50, price='close') return dataframe def populate_buy_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame: dataframe.loc[ ( # protection against pump and dump # (dataframe['volume'] < (dataframe['volume'].rolling(window=30).mean().shift(1) * 20)) # # & (dataframe['macd'] < dataframe['macdsignal']) # & (dataframe['macd'] > 0) # # spike below entry band for 3 consecutive ticks # & (dataframe['low'] < dataframe['entry_bb_lowerband']) # & (dataframe['low'].shift(1) < dataframe['bb_lowerband'].shift(1)) # & (dataframe['low'].shift(2) < dataframe['bb_lowerband'].shift(2)) # # pattern recognition # & ( # (dataframe['close'] > dataframe['open']) # | (dataframe['CDLHAMMER'] == 100) # | (dataframe['CDLINVERTEDHAMMER'] == 100) # | (dataframe['CDLDRAGONFLYDOJI'] == 100) # ) # bottom curve detection # & (dataframe['mfi_rsi_cci_smooth'] < 0) # # | ( # simple v bottom shape (lopsided to the left to increase reactivity) # which has to be below a very slow average # this pattern only catches a few, but normally very good buy points ( (dataframe['average'].shift(5) > dataframe['average'].shift(4)) & (dataframe['average'].shift(4) > dataframe['average'].shift(3)) & (dataframe['average'].shift(3) > dataframe['average'].shift(2)) & (dataframe['average'].shift(2) > dataframe['average'].shift(1)) & (dataframe['average'].shift(1) < dataframe['average'].shift(0)) & (dataframe['low'].shift(1) < dataframe['bb_middleband']) & (dataframe['cci'].shift(1) < -100) & (dataframe['rsi'].shift(1) < 30) ) | # buy in very oversold conditions ( (dataframe['low'] < dataframe['bb_middleband']) & (dataframe['cci'] < -200) & (dataframe['rsi'] < 30) & (dataframe['mfi'] < 30) ) | # etc tends to trade like this # over very long periods of slowly building up coins # does not happen often, but once in a while ( (dataframe['mfi'] < 10) & (dataframe['cci'] < -150) & (dataframe['rsi'] < dataframe['mfi']) ) ) & # ensure we have an overall uptrend (dataframe['close'] > dataframe) ), 'buy'] = 1 return dataframe def populate_sell_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame: # different strategy used for sell points, due to be able to duplicate it to 100% dataframe.loc[ ( ( # This generates very nice sale points, and mostly sit's one stop behind # the top of the peak ( (dataframe['mfi_rsi_cci_smooth'] > 100) & (dataframe['mfi_rsi_cci_smooth'].shift(1) > dataframe['mfi_rsi_cci_smooth']) & (dataframe['mfi_rsi_cci_smooth'].shift(2) < dataframe['mfi_rsi_cci_smooth'].shift(1)) & (dataframe['mfi_rsi_cci_smooth'].shift(3) < dataframe['mfi_rsi_cci_smooth'].shift(2)) ) | # This helps with very long, sideways trends, to get out of a market before # it dumps ( StrategyHelper.eight_green_candles(dataframe) ) | # in case of very overbought market, like some one pumping # sell ( (dataframe['cci'] > 200) & (dataframe['rsi'] > 70) ) ) ), 'sell'] = 1 return dataframe class StrategyHelper: """ simple helper class to predefine a couple of patterns for our strategy """ @staticmethod def seven_green_candles(dataframe): """ evaluates if we are having 7 green candles in a row :param self: :param dataframe: :return: """ return ( (dataframe['open'] < dataframe['close']) & (dataframe['open'].shift(1) < dataframe['close'].shift(1)) & (dataframe['open'].shift(2) < dataframe['close'].shift(2)) & (dataframe['open'].shift(3) < dataframe['close'].shift(3)) & (dataframe['open'].shift(4) < dataframe['close'].shift(4)) & (dataframe['open'].shift(5) < dataframe['close'].shift(5)) & (dataframe['open'].shift(6) < dataframe['close'].shift(6)) & (dataframe['open'].shift(7) < dataframe['close'].shift(7)) ) @staticmethod def eight_green_candles(dataframe): """ evaluates if we are having 8 green candles in a row :param self: :param dataframe: :return: """ return ( (dataframe['open'] < dataframe['close']) & (dataframe['open'].shift(1) < dataframe['close'].shift(1)) & (dataframe['open'].shift(2) < dataframe['close'].shift(2)) & (dataframe['open'].shift(3) < dataframe['close'].shift(3)) & (dataframe['open'].shift(4) < dataframe['close'].shift(4)) & (dataframe['open'].shift(5) < dataframe['close'].shift(5)) & (dataframe['open'].shift(6) < dataframe['close'].shift(6)) & (dataframe['open'].shift(7) < dataframe['close'].shift(7)) & (dataframe['open'].shift(8) < dataframe['close'].shift(8)) ) @staticmethod def eight_red_candles(dataframe, shift=0): """ evaluates if we are having 8 red candles in a row :param self: :param dataframe: :param shift: shift the pattern by n :return: """ return ( (dataframe['open'].shift(shift) > dataframe['close'].shift(shift)) & (dataframe['open'].shift(1 + shift) > dataframe['close'].shift(1 + shift)) & (dataframe['open'].shift(2 + shift) > dataframe['close'].shift(2 + shift)) & (dataframe['open'].shift(3 + shift) > dataframe['close'].shift(3 + shift)) & (dataframe['open'].shift(4 + shift) > dataframe['close'].shift(4 + shift)) & (dataframe['open'].shift(5 + shift) > dataframe['close'].shift(5 + shift)) & (dataframe['open'].shift(6 + shift) > dataframe['close'].shift(6 + shift)) & (dataframe['open'].shift(7 + shift) > dataframe['close'].shift(7 + shift)) & (dataframe['open'].shift(8 + shift) > dataframe['close'].shift(8 + shift)) ) @staticmethod def four_green_one_red_candle(dataframe): """ evaluates if we are having a red candle and 4 previous green :param self: :param dataframe: :return: """ return ( (dataframe['open'] > dataframe['close']) & (dataframe['open'].shift(1) < dataframe['close'].shift(1)) & (dataframe['open'].shift(2) < dataframe['close'].shift(2)) & (dataframe['open'].shift(3) < dataframe['close'].shift(3)) & (dataframe['open'].shift(4) < dataframe['close'].shift(4)) ) @staticmethod def four_red_one_green_candle(dataframe): """ evaluates if we are having a green candle and 4 previous red :param self: :param dataframe: :return: """ return ( (dataframe['open'] < dataframe['close']) & (dataframe['open'].shift(1) > dataframe['close'].shift(1)) & (dataframe['open'].shift(2) > dataframe['close'].shift(2)) & (dataframe['open'].shift(3) > dataframe['close'].shift(3)) & (dataframe['open'].shift(4) > dataframe['close'].shift(4)) ) @staticmethod def resample( dataframe, interval, factor): # defines the reinforcement logic # resampled dataframe to establish if we are in an uptrend, downtrend or sideways trend df = dataframe.copy() df = df.set_index(DatetimeIndex(df['date'])) ohlc_dict = { 'open': 'first', 'high': 'max', 'low': 'min', 'close': 'last' } df = df.resample(str(int(interval[:-1]) * factor) + 'min', plotoschow=ohlc_dict) df['resample_sma'] = ta.SMA(df, timeperiod=25, price='close') df = df.drop(columns=['open', 'high', 'low', 'close']) df = df.resample(interval[:-1] + 'min') df = df.interpolate(method='time') df['date'] = df.index df.index = range(len(df)) dataframe = merge(dataframe, df, on='date', how='left') return dataframe
{ "pile_set_name": "Github" }
// Copyright 2015 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build arm,!gccgo,!appengine,!nacl package poly1305 // This function is implemented in sum_arm.s //go:noescape func poly1305_auth_armv6(out *[16]byte, m *byte, mlen uint32, key *[32]byte) // Sum generates an authenticator for m using a one-time key and puts the // 16-byte result into out. Authenticating two different messages with the same // key allows an attacker to forge messages at will. func Sum(out *[16]byte, m []byte, key *[32]byte) { var mPtr *byte if len(m) > 0 { mPtr = &m[0] } poly1305_auth_armv6(out, mPtr, uint32(len(m)), key) }
{ "pile_set_name": "Github" }
const webpack = require('webpack'); const path = require('path'); const HtmlWebpackPlugin = require('html-webpack-plugin'); const ExtractTextPlugin = require('extract-text-webpack-plugin'); const autoprefixer = require('autoprefixer'); const CopyWebpackPlugin = require('copy-webpack-plugin'); const nodeEnv = process.env.NODE_ENV || 'development'; const isProduction = nodeEnv === 'production'; const jsSourcePath = path.join(__dirname, './src'); const buildPath = path.join(__dirname, './dist'); const assetsDirName = 'assets'; const assetsPath = path.join(__dirname, './' + assetsDirName); const sourcePath = path.join(__dirname, './src'); // Common plugins const plugins = [ // new webpack.optimize.CommonsChunkPlugin({ // name: 'vendor', // filename: 'vendor.js', // minChunks(module) { // const context = module.context; // return context && context.indexOf('node_modules') >= 0; // }, // }), new webpack.DefinePlugin({ 'process.env': { NODE_ENV: JSON.stringify(nodeEnv), }, }), new webpack.NamedModulesPlugin(), new HtmlWebpackPlugin({ template: path.join(sourcePath, 'index.html'), path: buildPath, filename: 'index.html', }), new webpack.LoaderOptionsPlugin({ options: { postcss: [ autoprefixer({ browsers: [ 'last 2 version', 'ie >= 10', ], }), ], context: sourcePath, }, }), ]; // Common rules const rules = [ { test: /\.js$/, exclude: /node_modules/, use: [ 'babel-loader', ], }, { test: /\.(ttf|eot|svg|woff|woff2|otf)(\?v=\d+\.\d+\.\d+)?/, use: [ { loader: 'url-loader?limit=20480', }, ], }, { test: /\.(svg|png|jpg|jpeg|gif|fsh|vsh|json)(\?v=\d+\.\d+\.\d+)?$/, include: assetsPath, use: [ { loader: 'file-loader?limit=20480', }, ], }, ]; if (isProduction) { // Production plugins plugins.push( new webpack.optimize.UglifyJsPlugin({ compress: { warnings: false, screw_ie8: true, conditionals: true, unused: true, comparisons: true, sequences: true, dead_code: true, evaluate: true, if_return: true, join_vars: true, }, output: { comments: false, }, }), new ExtractTextPlugin('jsorrery.css'), new CopyWebpackPlugin([ { from: assetsPath, to: buildPath + '/' + assetsDirName, } ]) ); // Production rules rules.push( { test: /\.scss$/, loader: ExtractTextPlugin.extract({ fallback: 'style-loader', use: 'css-loader!postcss-loader!sass-loader', }), } ); } else { // Development plugins plugins.push( new webpack.HotModuleReplacementPlugin() ); // Development rules rules.push( { test: /\.scss$/, use: [ 'style-loader', // Using source maps breaks urls in the CSS loader // https://github.com/webpack/css-loader/issues/232 // This comment solves it, but breaks testing from a local network // https://github.com/webpack/css-loader/issues/232#issuecomment-240449998 // 'css-loader?sourceMap', 'css-loader', { loader: 'postcss-loader', options: { sourceMap: true }, }, 'sass-loader?sourceMap', ], } ); } module.exports = { devtool: isProduction ? false : 'source-map', context: jsSourcePath, entry: { js: './index.js', }, output: { path: buildPath, publicPath: isProduction ? '' : '/', filename: 'jsorrery.js', }, module: { rules, }, resolve: { extensions: ['.webpack-loader.js', '.web-loader.js', '.loader.js', '.js'], modules: [ path.resolve(__dirname, 'node_modules'), jsSourcePath, ], alias: { }, }, plugins, devServer: { contentBase: isProduction ? buildPath : sourcePath, historyApiFallback: true, port: 2018, compress: isProduction, inline: !isProduction, hot: !isProduction, host: '0.0.0.0', //to make sure that any host will work (provided it points to 127.0.0.1 and has the correct port) disableHostCheck: true, stats: { assets: true, children: false, chunks: false, hash: false, modules: false, publicPath: false, timings: true, version: false, warnings: true, colors: { green: '\u001b[32m', }, }, headers: { "Access-Control-Allow-Origin": "*", } }, };
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html lang="en" ng-app="jpm"> <head> <meta charset="utf-8" /> <meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1" /> <meta name="viewport" content="width=device-width, initial-scale=1" /> <link href="/releases/4.3.0/css/style.css" rel="stylesheet" /> <script src="https://code.jquery.com/jquery-3.4.1.min.js"></script> <script src="/js/releases.js"></script> <!-- Begin Jekyll SEO tag v2.5.0 --> <title>generate</title> <meta name="generator" content="Jekyll v3.8.5" /> <meta property="og:title" content="generate" /> <meta property="og:locale" content="en_US" /> <meta name="description" content="Description" /> <meta property="og:description" content="Description" /> <script type="application/ld+json"> {"@type":"WebPage","url":"/releases/4.3.0/commands/generate.html","headline":"generate","description":"Description","@context":"http://schema.org"}</script> <!-- End Jekyll SEO tag --> </head> <body> <ul class="container12 menu-bar"> <li span=11><a class=menu-link href="/releases/4.3.0/"><img class=menu-logo src='/releases/4.3.0/img/bnd-80x40-white.png'></a> <a href="/releases/4.3.0/chapters/110-introduction.html">Intro </a><a href="/releases/4.3.0/chapters/800-headers.html">Headers </a><a href="/releases/4.3.0/chapters/825-instructions-ref.html">Instructions </a><a href="/releases/4.3.0/chapters/855-macros-ref.html">Macros </a><a href="/releases/4.3.0/chapters/400-commands.html">Commands </a><div class="releases"><button class="dropbtn">4.3.0</button><div class="dropdown-content"></div></div> <li class=menu-link span=1> <a href="https://github.com/bndtools/bnd" target="_"><img style="position:absolute;top:0;right:0;margin:0;padding:0;z-index:100" src="https://camo.githubusercontent.com/38ef81f8aca64bb9a64448d0d70f1308ef5341ab/68747470733a2f2f73332e616d617a6f6e6177732e636f6d2f6769746875622f726962626f6e732f666f726b6d655f72696768745f6461726b626c75655f3132313632312e706e67" alt="Fork me on GitHub" data-canonical-src="https://s3.amazonaws.com/github/ribbons/forkme_right_darkblue_121621.png"></a> </ul> <ul class=container12> <li span=3> <div> <ul class="side-nav"> <li><a href="/releases/4.3.0/chapters/110-introduction.html">Introduction</a> <li><a href="/releases/4.3.0/chapters/120-install.html">How to install bnd</a> <li><a href="/releases/4.3.0/chapters/123-tour-workspace.html">Guided Tour</a> <li><a href="/releases/4.3.0/chapters/125-tour-features.html">Guided Tour Workspace & Projects</a> <li><a href="/releases/4.3.0/chapters/130-concepts.html">Concepts</a> <li><a href="/releases/4.3.0/chapters/140-best-practices.html">Best practices</a> <li><a href="/releases/4.3.0/chapters/150-build.html">Build</a> <li><a href="/releases/4.3.0/chapters/155-project-setup.html">Project Setup</a> <li><a href="/releases/4.3.0/chapters/160-jars.html">Generating JARs</a> <li><a href="/releases/4.3.0/chapters/170-versioning.html">Versioning</a> <li><a href="/releases/4.3.0/chapters/180-baselining.html">Baselining</a> <li><a href="/releases/4.3.0/chapters/200-components.html">Service Components</a> <li><a href="/releases/4.3.0/chapters/210-metatype.html">Metatype</a> <li><a href="/releases/4.3.0/chapters/220-contracts.html">Contracts</a> <li><a href="/releases/4.3.0/chapters/230-manifest-annotations.html">Bundle Annotations</a> <li><a href="/releases/4.3.0/chapters/235-accessor-properties.html">Accessor Properties</a> <li><a href="/releases/4.3.0/chapters/240-spi-annotations.html">SPI Annotations</a> <li><a href="/releases/4.3.0/chapters/250-resolving.html">Resolving Dependencies</a> <li><a href="/releases/4.3.0/chapters/300-launching.html">Launching</a> <li><a href="/releases/4.3.0/chapters/305-startlevels.html">Startlevels</a> <li><a href="/releases/4.3.0/chapters/310-testing.html">Testing</a> <li><a href="/releases/4.3.0/chapters/315-launchpad-testing.html">Testing with Launchpad</a> <li><a href="/releases/4.3.0/chapters/320-packaging.html">Packaging Applications</a> <li><a href="/releases/4.3.0/chapters/330-jpms.html">JPMS Libraries</a> <li><a href="/releases/4.3.0/chapters/390-wrapping.html">Wrapping Libraries to OSGi Bundles</a> <li><a href="/releases/4.3.0/chapters/395-generating-documentation.html">Generating Documentation</a> <li><a href="/releases/4.3.0/chapters/400-commands.html">Commands</a> <li><a href="/releases/4.3.0/chapters/600-developer.html">For Developers</a> <li><a href="/releases/4.3.0/chapters/700-tools.html">Tools bound to bnd</a> <li><a href="/releases/4.3.0/chapters/800-headers.html">Headers</a> <li><a href="/releases/4.3.0/chapters/820-instructions.html">Instruction Reference</a> <li><a href="/releases/4.3.0/chapters/825-instructions-ref.html">Instruction Index</a> <li><a href="/releases/4.3.0/chapters/850-macros.html">Macro Reference</a> <li><a href="/releases/4.3.0/chapters/855-macros-ref.html">Macro Index</a> <li><a href="/releases/4.3.0/chapters/870-plugins.html">Plugins</a> <li><a href="/releases/4.3.0/chapters/880-settings.html">Settings</a> <li><a href="/releases/4.3.0/chapters/900-errors.html">Errors</a> <li><a href="/releases/4.3.0/chapters/910-warnings.html">Warnings</a> <li><a href="/releases/4.3.0/chapters/920-faq.html">Frequently Asked Questions</a> </ul> </div> <li span=9> <div class=notes-margin> <h1> generate</h1> <h2 id="description">Description</h2> <p>Generate autocompletion file for bash</p> <h2 id="synopsis">Synopsis</h2> <p>generate</p> <h2 id="options">Options</h2> <h2 id="examples">Examples</h2> </div> </ul> <nav class=next-prev> <a href='/releases/4.3.0/commands/find.html'></a> <a href='/releases/4.3.0/commands/grep.html'></a> </nav> <footer class="container12" style="border-top: 1px solid black;padding:10px 0"> <ul span=12 row> <li span=12> <ul> <li><a href="/releases/4.3.0/">GitHub</a> </ul> </ul> </footer> </body> </html>
{ "pile_set_name": "Github" }
大石忍最新番号 【LEZM-001】禁断の同窓会 トリプルレズベスト 15人 4時間 【NACX-012】五十路熟女の絶倫性交12人 【HQIS-065】ヘンリー塚本原作 母(ぎぼ)と息子 【GIGL-500】不意の乳揉みで全身が性感帯のように敏感になってしまった巨乳おばさんが発情したらレズの誘いも拒まない 【NASS-825】五十路美熟女濃厚中出し10人 【GOJU-048】上品な五十路女たちのえげつない肉食セックス大全集 5時間 【NATR-574】五十路母が息子に手ほどき性教育 大石忍 【AST-48】近親相姦 五十路のお母さんに膣中出し 四時間12人スペシャルコレクション第五章 【YLW-4429】夏だ!裸だ!青姦だ! 【MLW-5040】五十路 美熟女ベスト 大石忍 4時間 【YLW-4419】おんな一人旅 旅先でハメちゃう淫らな熟女たち 【HQIS-027】ヘンリー塚本原作 義母(おふくろ)という名の女 男なしでは生きられぬ未亡人の母/二度目の母を眠らせてレイプ!/娘は妊娠中!…婿を誘惑する母 【GIGL-372】ワケあり女の一人旅で訪れた先で不意に見知らぬ男のチ○ポを見てしまったおばさんは、内心ヤりたい欲望を抑えきれず拒むフリをしても挿入を許してしまう 【GIGL-366】ま、まさか、50過ぎの母親の裸体で勃起するなんて…決して裕福ではない母子家庭でシングルマザーとして懸命にボクを育ててくれた母との温泉旅行。二人っきりの混浴風呂で久しぶりに見た母さんの熟れた乳房に目が釘付けに…4 【MSJR-003】回春エステでビンビンに勃起したチ○ポをハメちゃう五十路マッサージおばさん 【GOJU-010】女性用大人のおもちゃを訪問販売された五十路妻たち 【AKBS-034】6人の五十路女が叫ぶ! いく!いく!いぐぅ! 【MLW-5039】嫁の母 濡れすぎて発情 12人4時間 【GEKS-009】嫁の母~淫らな性愛 【AWD-93】近親相姦 母のお尻 五十路母の色白完熟尻 大石忍 【QIZZX-003】おばさん家庭教師~お子さんの童貞卒業させてあげます~DX 3 10人4時間 【HQIS-012】ヘンリー塚本原作 色っぽい人妻 【AVOP-211】元祖 時間よ止まれ!~いつでもどこでもハレンチ天国~ 【RUNG-2002】五十路 美熟汁グショグショオナニー オマ○コかきまわしてマン汁タレながし! 23人4時間 【MLW-2153】母さんが抜いてあげるわよ。寝ている息子をムリヤリ発射! 【MLW-2151】巨乳キャリアOLの逆セクハラ~部長はセックスモンスター 大石忍 【NASS-455】彼女の母に…~大人の色気に魅せられて~ 【MLW-2145】隣の奥さん 玄関先でムリヤリ発射 たまってるんじゃないの?抜いてあげるわよ。 【NASS-432】嫁の母に… ~禁断の家族交尾~ 12人4時間30分 【LES-1004】熟女レズ発情 禁断の同窓会 大石忍 花島瑞江 加納綾子 【MLW-2139】密室の母と子 奪われた五十路母 大石忍 【NASS-389】嫁の母に… ~禁断の中出し性交~ 【MLW-2137】熟した女の禁断交尾 【DVDES-929】SEXのハードルが異常に低い世界 11 【GMED-099】推定五十代の美しい人妻がジム仲間の男と疲労回復マッサージを体験!「密着ポジションでリンパを刺激し合いフルボッキチ●ポとビジョビジョマ●コが擦れ始め旦那に内緒で不倫生中出し!」 【MLW-2135】嫁の母 ~禁断の欲情~ 【YUM-1004】熟れた女の熟れ乳 発情 4時間 20人 【JLZ-06】熟女レズ 自慰と放尿 大石忍 本間夏子 【MLW-2124】背徳交尾~五十路義母の発情 大石忍 【BKD-134】母子交尾 ~奥鬼怒路~ 大石忍</a>2015-09-14ルビー$$$旅路110分钟【LEZM-001】禁断の同窓会 トリプルレズベスト 15人 4時間 【NACX-012】五十路熟女の絶倫性交12人 【HQIS-065】ヘンリー塚本原作 母(ぎぼ)と息子 【GIGL-500】不意の乳揉みで全身が性感帯のように敏感になってしまった巨乳おばさんが発情したらレズの誘いも拒まない 【NASS-825】五十路美熟女濃厚中出し10人 【GOJU-048】上品な五十路女たちのえげつない肉食セックス大全集 5時間 【NATR-574】五十路母が息子に手ほどき性教育 大石忍 【AST-48】近親相姦 五十路のお母さんに膣中出し 四時間12人スペシャルコレクション第五章 【YLW-4429】夏だ!裸だ!青姦だ! 【MLW-5040】五十路 美熟女ベスト 大石忍 4時間 【YLW-4419】おんな一人旅 旅先でハメちゃう淫らな熟女たち 【HQIS-027】ヘンリー塚本原作 義母(おふくろ)という名の女 男なしでは生きられぬ未亡人の母/二度目の母を眠らせてレイプ!/娘は妊娠中!…婿を誘惑する母 【GIGL-372】ワケあり女の一人旅で訪れた先で不意に見知らぬ男のチ○ポを見てしまったおばさんは、内心ヤりたい欲望を抑えきれず拒むフリをしても挿入を許してしまう 【GIGL-366】ま、まさか、50過ぎの母親の裸体で勃起するなんて…決して裕福ではない母子家庭でシングルマザーとして懸命にボクを育ててくれた母との温泉旅行。二人っきりの混浴風呂で久しぶりに見た母さんの熟れた乳房に目が釘付けに…4 【MSJR-003】回春エステでビンビンに勃起したチ○ポをハメちゃう五十路マッサージおばさん 【GOJU-010】女性用大人のおもちゃを訪問販売された五十路妻たち 【AKBS-034】6人の五十路女が叫ぶ! いく!いく!いぐぅ! 【MLW-5039】嫁の母 濡れすぎて発情 12人4時間 【GEKS-009】嫁の母~淫らな性愛 【AWD-93】近親相姦 母のお尻 五十路母の色白完熟尻 大石忍 【QIZZX-003】おばさん家庭教師~お子さんの童貞卒業させてあげます~DX 3 10人4時間 【HQIS-012】ヘンリー塚本原作 色っぽい人妻 【AVOP-211】元祖 時間よ止まれ!~いつでもどこでもハレンチ天国~ 【RUNG-2002】五十路 美熟汁グショグショオナニー オマ○コかきまわしてマン汁タレながし! 23人4時間 【MLW-2153】母さんが抜いてあげるわよ。寝ている息子をムリヤリ発射! 【MLW-2151】巨乳キャリアOLの逆セクハラ~部長はセックスモンスター 大石忍 【NASS-455】彼女の母に…~大人の色気に魅せられて~ 【MLW-2145】隣の奥さん 玄関先でムリヤリ発射 たまってるんじゃないの?抜いてあげるわよ。 【NASS-432】嫁の母に… ~禁断の家族交尾~ 12人4時間30分 【LES-1004】熟女レズ発情 禁断の同窓会 大石忍 花島瑞江 加納綾子 【MLW-2139】密室の母と子 奪われた五十路母 大石忍 【NASS-389】嫁の母に… ~禁断の中出し性交~ 【MLW-2137】熟した女の禁断交尾 【DVDES-929】SEXのハードルが異常に低い世界 11 【GMED-099】推定五十代の美しい人妻がジム仲間の男と疲労回復マッサージを体験!「密着ポジションでリンパを刺激し合いフルボッキチ●ポとビジョビジョマ●コが擦れ始め旦那に内緒で不倫生中出し!」 【MLW-2135】嫁の母 ~禁断の欲情~ 【YUM-1004】熟れた女の熟れ乳 発情 4時間 20人 【JLZ-06】熟女レズ 自慰と放尿 大石忍 本間夏子 【MLW-2124】背徳交尾~五十路義母の発情 大石忍 【BKD-134】母子交尾 ~奥鬼怒路~ 大石忍</a>2015-09-14ルビー$$$旅路110分钟
{ "pile_set_name": "Github" }
/**************************************************************************** * Core Library Version 1.7, August 2004 * Copyright (c) 1995-2004 Exact Computation Project * All rights reserved. * * This file is part of CORE (http://cs.nyu.edu/exact/core/). * You can redistribute it and/or modify it under the terms of the GNU * General Public License as published by the Free Software Foundation, * either version 3 of the License, or (at your option) any later version. * * Licensees holding a valid commercial license may use this file in * accordance with the commercial license agreement provided with the * software. * * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. * * * $URL$ * $Id$ ***************************************************************************/ /****************************************************************** * Core Library Version 1.7, August 2004 * Copyright (c) 1995-2002 Exact Computation Project * * File: LinearAlgebra.h * Synopsis: * Linear Algebra Extension of Core Library introducing * class Vector * class Matrix * * Written by * Shubin Zhao ([email protected]) (2001) * * WWW URL: http://cs.nyu.edu/exact/ * Email: [email protected] * * $Id$ *****************************************************************/ #ifndef CORE_LINEAR_ALGEBRA_H #define CORE_LINEAR_ALGEBRA_H #ifndef CORE_LEVEL # define CORE_LEVEL 3 #endif #include <cstdarg> #include <CGAL/CORE/CORE.h> class Vector; class Matrix; //////////////////////////////////////////////////////////////////////// // Class Vector // Generic vectors // Operations implemented: addition, subtraction, dot product //////////////////////////////////////////////////////////////////////// class Vector { private: int dim; double* _rep; public: class RangeException { }; class ArithmeticException { }; explicit Vector(int); Vector(); Vector(double, double); Vector(double, double, double); Vector(const Vector&); Vector(int, double *); ~Vector(); const Vector& operator=(const Vector&); bool operator==(const Vector&); bool operator!=(const Vector&); const Vector& operator+=(const Vector&); const Vector& operator-=(const Vector&); const Vector& operator*=(double); const double& operator[](int) const; double& operator[](int); double norm() const; double maxnorm() const; double infnorm() const; double dimension() const {return dim;} bool isZero() const; Vector cross(const Vector &v) const; static Vector crossProduct(int, ...); friend Vector operator+(const Vector&, const Vector&); friend Vector operator-(const Vector&, const Vector&); friend Vector operator-(const Vector&); friend Vector operator*(const Vector&, double); friend Vector operator*(double, const Vector&); friend Vector operator*(const Matrix&, const Vector&); friend Vector operator*(const Vector&, const Matrix&); friend double dotProduct(const Vector&, const Vector&); friend std::istream& operator>>(std::istream&, Vector&); friend std::ostream& operator<<(std::ostream&, const Vector&); }; //////////////////////////////////////////////////////////////////////// // Class Matrix // Generic matrices // Operations implemented: addition, subtraction, multiplication //////////////////////////////////////////////////////////////////////// class Matrix { private: int dim1, dim2; double* _rep; public: class RangeException { }; class ArithmeticException { }; explicit Matrix(int); Matrix(int, int); Matrix(int, int, double *); Matrix(double, double, double, double); Matrix(double, double, double, double, double, double, double, double, double); Matrix(const Matrix&); ~Matrix(); Matrix& operator=(const Matrix&); bool operator==(const Matrix&); bool operator!=(const Matrix&); const Matrix& operator+=(const Matrix&); const Matrix& operator-=(const Matrix&); const Matrix& operator*=(double); const double& operator()(int, int) const; double& operator()(int, int); // added by chen li // const Vector& row(int i) const; // const Vector& col(int i) const; Matrix matrixAlgebraRemainder(int, int) const; double valueAlgebraRemainder(int, int) const; const Matrix& transpose(); double determinant() const; int dimension_1() const { return dim1; } int dimension_2() const { return dim2; } friend Matrix operator+(const Matrix&, const Matrix&); friend Matrix operator-(const Matrix&, const Matrix&); friend Matrix operator*(const Matrix&, double); friend Matrix operator*(double, const Matrix&); friend Vector operator*(const Vector&, const Matrix&); friend Vector operator*(const Matrix&, const Vector&); friend Matrix operator*(const Matrix&, const Matrix&); friend Matrix transpose(const Matrix&); friend double det(const double a, const double b, const double c, const double d); friend double det(const Vector u, const Vector & v); // u,v are 2d vectors friend std::istream& operator>>(std::istream&, Matrix&); friend std::ostream& operator<<(std::ostream&, const Matrix&); }; //Matrix #endif
{ "pile_set_name": "Github" }
#ifndef CARYLL_INCLUDE_TABLE_CFF_H #define CARYLL_INCLUDE_TABLE_CFF_H #include "table-common.h" #include "head.h" #include "glyf.h" typedef struct { scale_t a; scale_t b; scale_t c; scale_t d; VQ x; VQ y; } cff_FontMatrix; typedef struct { arity_t blueValuesCount; OWNING double *blueValues; arity_t otherBluesCount; OWNING double *otherBlues; arity_t familyBluesCount; OWNING double *familyBlues; arity_t familyOtherBluesCount; OWNING double *familyOtherBlues; double blueScale; double blueShift; double blueFuzz; double stdHW; double stdVW; arity_t stemSnapHCount; OWNING double *stemSnapH; arity_t stemSnapVCount; OWNING double *stemSnapV; bool forceBold; uint32_t languageGroup; double expansionFactor; double initialRandomSeed; double defaultWidthX; double nominalWidthX; } cff_PrivateDict; typedef struct _table_CFF table_CFF; struct _table_CFF { // Name sds fontName; // General properties bool isCID; sds version; sds notice; sds copyright; sds fullName; sds familyName; sds weight; bool isFixedPitch; double italicAngle; double underlinePosition; double underlineThickness; double fontBBoxTop; double fontBBoxBottom; double fontBBoxLeft; double fontBBoxRight; double strokeWidth; OWNING cff_PrivateDict *privateDict; OWNING cff_FontMatrix *fontMatrix; // CID-only operators sds cidRegistry; sds cidOrdering; uint32_t cidSupplement; double cidFontVersion; double cidFontRevision; uint32_t cidCount; uint32_t UIDBase; // CID FDArray tableid_t fdArrayCount; OWNING table_CFF **fdArray; }; extern caryll_RefElementInterface(table_CFF) table_iCFF; // CFF and glyf typedef struct { OWNING table_CFF *meta; OWNING table_glyf *glyphs; } table_CFFAndGlyf; #endif
{ "pile_set_name": "Github" }
import { assert, expect } from 'chai'; import 'mocha'; import { Index } from '../lib/index'; import { DataFrame } from '../lib/dataframe'; import { ArrayIterable } from '../lib/iterables/array-iterable'; import { DeflateRaw } from 'zlib'; describe('DataFrame melt', () => { var df: DataFrame; beforeEach(()=> { df = new DataFrame([ {'A': 'a', 'B': 1, 'C': 2, 'D': 5}, {'A': 'b', 'B': 3, 'C': 4, 'D': 6}, {'A': 'c', 'B': 5, 'C': 6, 'D': 7}, ]); }); it('can melt a dataframe with a single id column and 2 value columns passed as input', () => { expect(df.melt('A', ['B', 'C']).toArray()).to.eql([ {'A': 'a', 'variable': 'B', 'value': 1}, {'A': 'b', 'variable': 'B', 'value': 3}, {'A': 'c', 'variable': 'B', 'value': 5}, {'A': 'a', 'variable': 'C', 'value': 2}, {'A': 'b', 'variable': 'C', 'value': 4}, {'A': 'c', 'variable': 'C', 'value': 6} ]); }); it('can melt a dataframe with single id columns and single value column passed as input', () => { expect(df.melt('A', 'B').toArray()).to.eql([ {'A': 'a', 'variable': 'B', 'value': 1}, {'A': 'b', 'variable': 'B', 'value': 3}, {'A': 'c', 'variable': 'B', 'value': 5} ]); }); it('can melt a dataframe with multiple id columns and single value column passed as input', () => { expect(df.melt(['A', 'B'], 'C').toArray()).to.eql([ {'A': 'a', 'B': 1, 'variable': 'C', 'value': 2}, {'A': 'b', 'B': 3, 'variable': 'C', 'value': 4}, {'A': 'c', 'B': 5, 'variable': 'C', 'value': 6}, ]); }); it('can melt a dataframe with multiple id columns and multiple value columns passed as input', () => { expect(df.melt(['A', 'B'], ['C', 'D']).toArray()).to.eql([ {'A': 'a', 'B': 1, 'variable': 'C', 'value': 2}, {'A': 'b', 'B': 3, 'variable': 'C', 'value': 4}, {'A': 'c', 'B': 5, 'variable': 'C', 'value': 6}, {'A': 'a', 'B': 1, 'variable': 'D', 'value': 5}, {'A': 'b', 'B': 3, 'variable': 'D', 'value': 6}, {'A': 'c', 'B': 5, 'variable': 'D', 'value': 7}, ]); }); it('can melt an empty dataframe', () => { df = new DataFrame([]); expect(df.melt('A', ['B', 'C']).toArray()).to.eql([]); }); it('should return an empty dataframe if no value columns are passed', () => { expect(df.melt('A', []).toArray()).to.eql([]); }); it('can melt a dataframe if no id columns are passed', () => { expect(df.melt([], ['B', 'C']).toArray()).to.eql([ {'variable': 'B', 'value': 1}, {'variable': 'B', 'value': 3}, {'variable': 'B', 'value': 5}, {'variable': 'C', 'value': 2}, {'variable': 'C', 'value': 4}, {'variable': 'C', 'value': 6} ]); }); it('should return an empty dataframe if no id columns and no value columns are passed', () => { expect(df.melt([], []).toArray()).to.eql([]); }); });
{ "pile_set_name": "Github" }
// 代码地址: https://github.com/CoderMJLee/MJRefresh // 代码地址: http://code4app.com/ios/%E5%BF%AB%E9%80%9F%E9%9B%86%E6%88%90%E4%B8%8B%E6%8B%89%E4%B8%8A%E6%8B%89%E5%88%B7%E6%96%B0/52326ce26803fabc46000000 // MJRefreshHeader.h // MJRefreshExample // // Created by MJ Lee on 15/3/4. // Copyright (c) 2015年 小码哥. All rights reserved. // 下拉刷新控件:负责监控用户下拉的状态 #import "MJRefreshComponent.h" @interface MJRefreshHeader : MJRefreshComponent /** 创建header */ + (instancetype)headerWithRefreshingBlock:(MJRefreshComponentRefreshingBlock)refreshingBlock; /** 创建header */ + (instancetype)headerWithRefreshingTarget:(id)target refreshingAction:(SEL)action; /** 这个key用来存储上一次下拉刷新成功的时间 */ @property (copy, nonatomic) NSString *lastUpdatedTimeKey; /** 上一次下拉刷新成功的时间 */ @property (strong, nonatomic, readonly) NSDate *lastUpdatedTime; /** 忽略多少scrollView的contentInset的top */ @property (assign, nonatomic) CGFloat ignoredScrollViewContentInsetTop; @end
{ "pile_set_name": "Github" }
// passthrough vertex shader // no vertex transformation #version 410 core in vec4 ciPosition; in vec3 ciColor; out vec3 vColor; void main() { vColor = ciColor; gl_Position = ciPosition; }
{ "pile_set_name": "Github" }
#region PDFsharp - A .NET library for processing PDF // // Authors: // Stefan Lange // // Copyright (c) 2005-2019 empira Software GmbH, Cologne Area (Germany) // // http://www.pdfsharp.com // http://sourceforge.net/projects/pdfsharp // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL // THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. #endregion // ReSharper disable InconsistentNaming namespace PdfSharp.Drawing { ///<summary> /// Determines whether rendering based on GDI+ or WPF. /// For internal use in hybrid build only only. /// </summary> enum XGraphicTargetContext { // NETFX_CORE_TODO NONE = 0, /// <summary> /// Rendering does not depent on a particular technology. /// </summary> CORE = 1, /// <summary> /// Renders using GDI+. /// </summary> GDI = 2, /// <summary> /// Renders using WPF (including Silverlight). /// </summary> WPF = 3, /// <summary> /// Universal Windows Platform. /// </summary> UWP = 10, } }
{ "pile_set_name": "Github" }
# Net::SMTP.pm # # Copyright (c) 1995-2004 Graham Barr <[email protected]>. All rights reserved. # This program is free software; you can redistribute it and/or # modify it under the same terms as Perl itself. package Net::SMTP; require 5.001; use strict; use vars qw($VERSION @ISA); use Socket 1.3; use Carp; use IO::Socket; use Net::Cmd; use Net::Config; $VERSION = "2.31"; @ISA = qw(Net::Cmd IO::Socket::INET); sub new { my $self = shift; my $type = ref($self) || $self; my ($host, %arg); if (@_ % 2) { $host = shift; %arg = @_; } else { %arg = @_; $host = delete $arg{Host}; } my $hosts = defined $host ? $host : $NetConfig{smtp_hosts}; my $obj; my $h; foreach $h (@{ref($hosts) ? $hosts : [$hosts]}) { $obj = $type->SUPER::new( PeerAddr => ($host = $h), PeerPort => $arg{Port} || 'smtp(25)', LocalAddr => $arg{LocalAddr}, LocalPort => $arg{LocalPort}, Proto => 'tcp', Timeout => defined $arg{Timeout} ? $arg{Timeout} : 120 ) and last; } return undef unless defined $obj; $obj->autoflush(1); $obj->debug(exists $arg{Debug} ? $arg{Debug} : undef); unless ($obj->response() == CMD_OK) { $obj->close(); return undef; } ${*$obj}{'net_smtp_exact_addr'} = $arg{ExactAddresses}; ${*$obj}{'net_smtp_host'} = $host; (${*$obj}{'net_smtp_banner'}) = $obj->message; (${*$obj}{'net_smtp_domain'}) = $obj->message =~ /\A\s*(\S+)/; unless ($obj->hello($arg{Hello} || "")) { $obj->close(); return undef; } $obj; } sub host { my $me = shift; ${*$me}{'net_smtp_host'}; } ## ## User interface methods ## sub banner { my $me = shift; return ${*$me}{'net_smtp_banner'} || undef; } sub domain { my $me = shift; return ${*$me}{'net_smtp_domain'} || undef; } sub etrn { my $self = shift; defined($self->supports('ETRN', 500, ["Command unknown: 'ETRN'"])) && $self->_ETRN(@_); } sub auth { my ($self, $username, $password) = @_; eval { require MIME::Base64; require Authen::SASL; } or $self->set_status(500, ["Need MIME::Base64 and Authen::SASL todo auth"]), return 0; my $mechanisms = $self->supports('AUTH', 500, ["Command unknown: 'AUTH'"]); return unless defined $mechanisms; my $sasl; if (ref($username) and UNIVERSAL::isa($username, 'Authen::SASL')) { $sasl = $username; $sasl->mechanism($mechanisms); } else { die "auth(username, password)" if not length $username; $sasl = Authen::SASL->new( mechanism => $mechanisms, callback => { user => $username, pass => $password, authname => $username, } ); } # We should probably allow the user to pass the host, but I don't # currently know and SASL mechanisms that are used by smtp that need it my $client = $sasl->client_new('smtp', ${*$self}{'net_smtp_host'}, 0); my $str = $client->client_start; # We dont support sasl mechanisms that encrypt the socket traffic. # todo that we would really need to change the ISA hierarchy # so we dont inherit from IO::Socket, but instead hold it in an attribute my @cmd = ("AUTH", $client->mechanism); my $code; push @cmd, MIME::Base64::encode_base64($str, '') if defined $str and length $str; while (($code = $self->command(@cmd)->response()) == CMD_MORE) { @cmd = ( MIME::Base64::encode_base64( $client->client_step(MIME::Base64::decode_base64(($self->message)[0])), '' ) ); } $code == CMD_OK; } sub hello { my $me = shift; my $domain = shift || "localhost.localdomain"; my $ok = $me->_EHLO($domain); my @msg = $me->message; if ($ok) { my $h = ${*$me}{'net_smtp_esmtp'} = {}; my $ln; foreach $ln (@msg) { $h->{uc $1} = $2 if $ln =~ /(\w+)\b[= \t]*([^\n]*)/; } } elsif ($me->status == CMD_ERROR) { @msg = $me->message if $ok = $me->_HELO($domain); } return undef unless $ok; $msg[0] =~ /\A\s*(\S+)/; return ($1 || " "); } sub supports { my $self = shift; my $cmd = uc shift; return ${*$self}{'net_smtp_esmtp'}->{$cmd} if exists ${*$self}{'net_smtp_esmtp'}->{$cmd}; $self->set_status(@_) if @_; return; } sub _addr { my $self = shift; my $addr = shift; $addr = "" unless defined $addr; if (${*$self}{'net_smtp_exact_addr'}) { return $1 if $addr =~ /^\s*(<.*>)\s*$/s; } else { return $1 if $addr =~ /(<[^>]*>)/; $addr =~ s/^\s+|\s+$//sg; } "<$addr>"; } sub mail { my $me = shift; my $addr = _addr($me, shift); my $opts = ""; if (@_) { my %opt = @_; my ($k, $v); if (exists ${*$me}{'net_smtp_esmtp'}) { my $esmtp = ${*$me}{'net_smtp_esmtp'}; if (defined($v = delete $opt{Size})) { if (exists $esmtp->{SIZE}) { $opts .= sprintf " SIZE=%d", $v + 0; } else { carp 'Net::SMTP::mail: SIZE option not supported by host'; } } if (defined($v = delete $opt{Return})) { if (exists $esmtp->{DSN}) { $opts .= " RET=" . ((uc($v) eq "FULL") ? "FULL" : "HDRS"); } else { carp 'Net::SMTP::mail: DSN option not supported by host'; } } if (defined($v = delete $opt{Bits})) { if ($v eq "8") { if (exists $esmtp->{'8BITMIME'}) { $opts .= " BODY=8BITMIME"; } else { carp 'Net::SMTP::mail: 8BITMIME option not supported by host'; } } elsif ($v eq "binary") { if (exists $esmtp->{'BINARYMIME'} && exists $esmtp->{'CHUNKING'}) { $opts .= " BODY=BINARYMIME"; ${*$me}{'net_smtp_chunking'} = 1; } else { carp 'Net::SMTP::mail: BINARYMIME option not supported by host'; } } elsif (exists $esmtp->{'8BITMIME'} or exists $esmtp->{'BINARYMIME'}) { $opts .= " BODY=7BIT"; } else { carp 'Net::SMTP::mail: 8BITMIME and BINARYMIME options not supported by host'; } } if (defined($v = delete $opt{Transaction})) { if (exists $esmtp->{CHECKPOINT}) { $opts .= " TRANSID=" . _addr($me, $v); } else { carp 'Net::SMTP::mail: CHECKPOINT option not supported by host'; } } if (defined($v = delete $opt{Envelope})) { if (exists $esmtp->{DSN}) { $v =~ s/([^\041-\176]|=|\+)/sprintf "+%02x", ord($1)/sge; $opts .= " ENVID=$v"; } else { carp 'Net::SMTP::mail: DSN option not supported by host'; } } if (defined($v = delete $opt{ENVID})) { # expected to be in a format as required by RFC 3461, xtext-encoded if (exists $esmtp->{DSN}) { $opts .= " ENVID=$v"; } else { carp 'Net::SMTP::mail: DSN option not supported by host'; } } if (defined($v = delete $opt{AUTH})) { # expected to be in a format as required by RFC 2554, # rfc2821-quoted and xtext-encoded, or <> if (exists $esmtp->{AUTH}) { $v = '<>' if !defined($v) || $v eq ''; $opts .= " AUTH=$v"; } else { carp 'Net::SMTP::mail: AUTH option not supported by host'; } } if (defined($v = delete $opt{XVERP})) { if (exists $esmtp->{'XVERP'}) { $opts .= " XVERP"; } else { carp 'Net::SMTP::mail: XVERP option not supported by host'; } } carp 'Net::SMTP::recipient: unknown option(s) ' . join(" ", keys %opt) . ' - ignored' if scalar keys %opt; } else { carp 'Net::SMTP::mail: ESMTP not supported by host - options discarded :-('; } } $me->_MAIL("FROM:" . $addr . $opts); } sub send { my $me = shift; $me->_SEND("FROM:" . _addr($me, $_[0])) } sub send_or_mail { my $me = shift; $me->_SOML("FROM:" . _addr($me, $_[0])) } sub send_and_mail { my $me = shift; $me->_SAML("FROM:" . _addr($me, $_[0])) } sub reset { my $me = shift; $me->dataend() if (exists ${*$me}{'net_smtp_lastch'}); $me->_RSET(); } sub recipient { my $smtp = shift; my $opts = ""; my $skip_bad = 0; if (@_ && ref($_[-1])) { my %opt = %{pop(@_)}; my $v; $skip_bad = delete $opt{'SkipBad'}; if (exists ${*$smtp}{'net_smtp_esmtp'}) { my $esmtp = ${*$smtp}{'net_smtp_esmtp'}; if (defined($v = delete $opt{Notify})) { if (exists $esmtp->{DSN}) { $opts .= " NOTIFY=" . join(",", map { uc $_ } @$v); } else { carp 'Net::SMTP::recipient: DSN option not supported by host'; } } if (defined($v = delete $opt{ORcpt})) { if (exists $esmtp->{DSN}) { $opts .= " ORCPT=" . $v; } else { carp 'Net::SMTP::recipient: DSN option not supported by host'; } } carp 'Net::SMTP::recipient: unknown option(s) ' . join(" ", keys %opt) . ' - ignored' if scalar keys %opt; } elsif (%opt) { carp 'Net::SMTP::recipient: ESMTP not supported by host - options discarded :-('; } } my @ok; my $addr; foreach $addr (@_) { if ($smtp->_RCPT("TO:" . _addr($smtp, $addr) . $opts)) { push(@ok, $addr) if $skip_bad; } elsif (!$skip_bad) { return 0; } } return $skip_bad ? @ok : 1; } BEGIN { *to = \&recipient; *cc = \&recipient; *bcc = \&recipient; } sub data { my $me = shift; if (exists ${*$me}{'net_smtp_chunking'}) { carp 'Net::SMTP::data: CHUNKING extension in use, must call bdat instead'; } else { my $ok = $me->_DATA() && $me->datasend(@_); $ok && @_ ? $me->dataend : $ok; } } sub bdat { my $me = shift; if (exists ${*$me}{'net_smtp_chunking'}) { my $data = shift; $me->_BDAT(length $data) && $me->rawdatasend($data) && $me->response() == CMD_OK; } else { carp 'Net::SMTP::bdat: CHUNKING extension is not in use, call data instead'; } } sub bdatlast { my $me = shift; if (exists ${*$me}{'net_smtp_chunking'}) { my $data = shift; $me->_BDAT(length $data, "LAST") && $me->rawdatasend($data) && $me->response() == CMD_OK; } else { carp 'Net::SMTP::bdat: CHUNKING extension is not in use, call data instead'; } } sub datafh { my $me = shift; return unless $me->_DATA(); return $me->tied_fh; } sub expand { my $me = shift; $me->_EXPN(@_) ? ($me->message) : (); } sub verify { shift->_VRFY(@_) } sub help { my $me = shift; $me->_HELP(@_) ? scalar $me->message : undef; } sub quit { my $me = shift; $me->_QUIT; $me->close; } sub DESTROY { # ignore } ## ## RFC821 commands ## sub _EHLO { shift->command("EHLO", @_)->response() == CMD_OK } sub _HELO { shift->command("HELO", @_)->response() == CMD_OK } sub _MAIL { shift->command("MAIL", @_)->response() == CMD_OK } sub _RCPT { shift->command("RCPT", @_)->response() == CMD_OK } sub _SEND { shift->command("SEND", @_)->response() == CMD_OK } sub _SAML { shift->command("SAML", @_)->response() == CMD_OK } sub _SOML { shift->command("SOML", @_)->response() == CMD_OK } sub _VRFY { shift->command("VRFY", @_)->response() == CMD_OK } sub _EXPN { shift->command("EXPN", @_)->response() == CMD_OK } sub _HELP { shift->command("HELP", @_)->response() == CMD_OK } sub _RSET { shift->command("RSET")->response() == CMD_OK } sub _NOOP { shift->command("NOOP")->response() == CMD_OK } sub _QUIT { shift->command("QUIT")->response() == CMD_OK } sub _DATA { shift->command("DATA")->response() == CMD_MORE } sub _BDAT { shift->command("BDAT", @_) } sub _TURN { shift->unsupported(@_); } sub _ETRN { shift->command("ETRN", @_)->response() == CMD_OK } sub _AUTH { shift->command("AUTH", @_)->response() == CMD_OK } 1; __END__ =head1 NAME Net::SMTP - Simple Mail Transfer Protocol Client =head1 SYNOPSIS use Net::SMTP; # Constructors $smtp = Net::SMTP->new('mailhost'); $smtp = Net::SMTP->new('mailhost', Timeout => 60); =head1 DESCRIPTION This module implements a client interface to the SMTP and ESMTP protocol, enabling a perl5 application to talk to SMTP servers. This documentation assumes that you are familiar with the concepts of the SMTP protocol described in RFC821. A new Net::SMTP object must be created with the I<new> method. Once this has been done, all SMTP commands are accessed through this object. The Net::SMTP class is a subclass of Net::Cmd and IO::Socket::INET. =head1 EXAMPLES This example prints the mail domain name of the SMTP server known as mailhost: #!/usr/local/bin/perl -w use Net::SMTP; $smtp = Net::SMTP->new('mailhost'); print $smtp->domain,"\n"; $smtp->quit; This example sends a small message to the postmaster at the SMTP server known as mailhost: #!/usr/local/bin/perl -w use Net::SMTP; $smtp = Net::SMTP->new('mailhost'); $smtp->mail($ENV{USER}); $smtp->to('postmaster'); $smtp->data(); $smtp->datasend("To: postmaster\n"); $smtp->datasend("\n"); $smtp->datasend("A simple test message\n"); $smtp->dataend(); $smtp->quit; =head1 CONSTRUCTOR =over 4 =item new ( [ HOST ] [, OPTIONS ] ) This is the constructor for a new Net::SMTP object. C<HOST> is the name of the remote host to which an SMTP connection is required. C<HOST> is optional. If C<HOST> is not given then it may instead be passed as the C<Host> option described below. If neither is given then the C<SMTP_Hosts> specified in C<Net::Config> will be used. C<OPTIONS> are passed in a hash like fashion, using key and value pairs. Possible options are: B<Hello> - SMTP requires that you identify yourself. This option specifies a string to pass as your mail domain. If not given localhost.localdomain will be used. B<Host> - SMTP host to connect to. It may be a single scalar, as defined for the C<PeerAddr> option in L<IO::Socket::INET>, or a reference to an array with hosts to try in turn. The L</host> method will return the value which was used to connect to the host. B<LocalAddr> and B<LocalPort> - These parameters are passed directly to IO::Socket to allow binding the socket to a local port. B<Timeout> - Maximum time, in seconds, to wait for a response from the SMTP server (default: 120) B<ExactAddresses> - If true the all ADDRESS arguments must be as defined by C<addr-spec> in RFC2822. If not given, or false, then Net::SMTP will attempt to extract the address from the value passed. B<Debug> - Enable debugging information Example: $smtp = Net::SMTP->new('mailhost', Hello => 'my.mail.domain', Timeout => 30, Debug => 1, ); # the same $smtp = Net::SMTP->new( Host => 'mailhost', Hello => 'my.mail.domain', Timeout => 30, Debug => 1, ); # Connect to the default server from Net::config $smtp = Net::SMTP->new( Hello => 'my.mail.domain', Timeout => 30, ); =back =head1 METHODS Unless otherwise stated all methods return either a I<true> or I<false> value, with I<true> meaning that the operation was a success. When a method states that it returns a value, failure will be returned as I<undef> or an empty list. =over 4 =item banner () Returns the banner message which the server replied with when the initial connection was made. =item domain () Returns the domain that the remote SMTP server identified itself as during connection. =item hello ( DOMAIN ) Tell the remote server the mail domain which you are in using the EHLO command (or HELO if EHLO fails). Since this method is invoked automatically when the Net::SMTP object is constructed the user should normally not have to call it manually. =item host () Returns the value used by the constructor, and passed to IO::Socket::INET, to connect to the host. =item etrn ( DOMAIN ) Request a queue run for the DOMAIN given. =item auth ( USERNAME, PASSWORD ) Attempt SASL authentication. =item mail ( ADDRESS [, OPTIONS] ) =item send ( ADDRESS ) =item send_or_mail ( ADDRESS ) =item send_and_mail ( ADDRESS ) Send the appropriate command to the server MAIL, SEND, SOML or SAML. C<ADDRESS> is the address of the sender. This initiates the sending of a message. The method C<recipient> should be called for each address that the message is to be sent to. The C<mail> method can some additional ESMTP OPTIONS which is passed in hash like fashion, using key and value pairs. Possible options are: Size => <bytes> Return => "FULL" | "HDRS" Bits => "7" | "8" | "binary" Transaction => <ADDRESS> Envelope => <ENVID> # xtext-encodes its argument ENVID => <ENVID> # similar to Envelope, but expects argument encoded XVERP => 1 AUTH => <submitter> # encoded address according to RFC 2554 The C<Return> and C<Envelope> parameters are used for DSN (Delivery Status Notification). The submitter address in C<AUTH> option is expected to be in a format as required by RFC 2554, in an RFC2821-quoted form and xtext-encoded, or <> . =item reset () Reset the status of the server. This may be called after a message has been initiated, but before any data has been sent, to cancel the sending of the message. =item recipient ( ADDRESS [, ADDRESS, [...]] [, OPTIONS ] ) Notify the server that the current message should be sent to all of the addresses given. Each address is sent as a separate command to the server. Should the sending of any address result in a failure then the process is aborted and a I<false> value is returned. It is up to the user to call C<reset> if they so desire. The C<recipient> method can also pass additional case-sensitive OPTIONS as an anonymous hash using key and value pairs. Possible options are: Notify => ['NEVER'] or ['SUCCESS','FAILURE','DELAY'] (see below) ORcpt => <ORCPT> SkipBad => 1 (to ignore bad addresses) If C<SkipBad> is true the C<recipient> will not return an error when a bad address is encountered and it will return an array of addresses that did succeed. $smtp->recipient($recipient1,$recipient2); # Good $smtp->recipient($recipient1,$recipient2, { SkipBad => 1 }); # Good $smtp->recipient($recipient1,$recipient2, { Notify => ['FAILURE','DELAY'], SkipBad => 1 }); # Good @goodrecips=$smtp->recipient(@recipients, { Notify => ['FAILURE'], SkipBad => 1 }); # Good $smtp->recipient("$recipient,$recipient2"); # BAD Notify is used to request Delivery Status Notifications (DSNs), but your SMTP/ESMTP service may not respect this request depending upon its version and your site's SMTP configuration. Leaving out the Notify option usually defaults an SMTP service to its default behavior equivalent to ['FAILURE'] notifications only, but again this may be dependent upon your site's SMTP configuration. The NEVER keyword must appear by itself if used within the Notify option and "requests that a DSN not be returned to the sender under any conditions." {Notify => ['NEVER']} $smtp->recipient(@recipients, { Notify => ['NEVER'], SkipBad => 1 }); # Good You may use any combination of these three values 'SUCCESS','FAILURE','DELAY' in the anonymous array reference as defined by RFC3461 (see http://rfc.net/rfc3461.html for more information. Note: quotations in this topic from same.). A Notify parameter of 'SUCCESS' or 'FAILURE' "requests that a DSN be issued on successful delivery or delivery failure, respectively." A Notify parameter of 'DELAY' "indicates the sender's willingness to receive delayed DSNs. Delayed DSNs may be issued if delivery of a message has been delayed for an unusual amount of time (as determined by the Message Transfer Agent (MTA) at which the message is delayed), but the final delivery status (whether successful or failure) cannot be determined. The absence of the DELAY keyword in a NOTIFY parameter requests that a "delayed" DSN NOT be issued under any conditions." {Notify => ['SUCCESS','FAILURE','DELAY']} $smtp->recipient(@recipients, { Notify => ['FAILURE','DELAY'], SkipBad => 1 }); # Good ORcpt is also part of the SMTP DSN extension according to RFC3461. It is used to pass along the original recipient that the mail was first sent to. The machine that generates a DSN will use this address to inform the sender, because he can't know if recipients get rewritten by mail servers. It is expected to be in a format as required by RFC3461, xtext-encoded. =item to ( ADDRESS [, ADDRESS [...]] ) =item cc ( ADDRESS [, ADDRESS [...]] ) =item bcc ( ADDRESS [, ADDRESS [...]] ) Synonyms for C<recipient>. =item data ( [ DATA ] ) Initiate the sending of the data from the current message. C<DATA> may be a reference to a list or a list. If specified the contents of C<DATA> and a termination string C<".\r\n"> is sent to the server. And the result will be true if the data was accepted. If C<DATA> is not specified then the result will indicate that the server wishes the data to be sent. The data must then be sent using the C<datasend> and C<dataend> methods described in L<Net::Cmd>. =item expand ( ADDRESS ) Request the server to expand the given address Returns an array which contains the text read from the server. =item verify ( ADDRESS ) Verify that C<ADDRESS> is a legitimate mailing address. Most sites usually disable this feature in their SMTP service configuration. Use "Debug => 1" option under new() to see if disabled. =item help ( [ $subject ] ) Request help text from the server. Returns the text or undef upon failure =item quit () Send the QUIT command to the remote SMTP server and close the socket connection. =back =head1 ADDRESSES Net::SMTP attempts to DWIM with addresses that are passed. For example an application might extract The From: line from an email and pass that to mail(). While this may work, it is not recommended. The application should really use a module like L<Mail::Address> to extract the mail address and pass that. If C<ExactAddresses> is passed to the constructor, then addresses should be a valid rfc2821-quoted address, although Net::SMTP will accept accept the address surrounded by angle brackets. funny user@domain WRONG "funny user"@domain RIGHT, recommended <"funny user"@domain> OK =head1 SEE ALSO L<Net::Cmd> =head1 AUTHOR Graham Barr <[email protected]> =head1 COPYRIGHT Copyright (c) 1995-2004 Graham Barr. All rights reserved. This program is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut
{ "pile_set_name": "Github" }
// Copyright 2012 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build !windows package test // Session functional tests. import ( "bytes" "errors" "fmt" "io" "strings" "testing" "golang.org/x/crypto/ssh" ) func TestRunCommandSuccess(t *testing.T) { server := newServer(t) defer server.Shutdown() conn := server.Dial(clientConfig()) defer conn.Close() session, err := conn.NewSession() if err != nil { t.Fatalf("session failed: %v", err) } defer session.Close() err = session.Run("true") if err != nil { t.Fatalf("session failed: %v", err) } } func TestHostKeyCheck(t *testing.T) { server := newServer(t) defer server.Shutdown() conf := clientConfig() hostDB := hostKeyDB() conf.HostKeyCallback = hostDB.Check // change the keys. hostDB.keys[ssh.KeyAlgoRSA][25]++ hostDB.keys[ssh.KeyAlgoDSA][25]++ hostDB.keys[ssh.KeyAlgoECDSA256][25]++ conn, err := server.TryDial(conf) if err == nil { conn.Close() t.Fatalf("dial should have failed.") } else if !strings.Contains(err.Error(), "host key mismatch") { t.Fatalf("'host key mismatch' not found in %v", err) } } func TestRunCommandStdin(t *testing.T) { server := newServer(t) defer server.Shutdown() conn := server.Dial(clientConfig()) defer conn.Close() session, err := conn.NewSession() if err != nil { t.Fatalf("session failed: %v", err) } defer session.Close() r, w := io.Pipe() defer r.Close() defer w.Close() session.Stdin = r err = session.Run("true") if err != nil { t.Fatalf("session failed: %v", err) } } func TestRunCommandStdinError(t *testing.T) { server := newServer(t) defer server.Shutdown() conn := server.Dial(clientConfig()) defer conn.Close() session, err := conn.NewSession() if err != nil { t.Fatalf("session failed: %v", err) } defer session.Close() r, w := io.Pipe() defer r.Close() session.Stdin = r pipeErr := errors.New("closing write end of pipe") w.CloseWithError(pipeErr) err = session.Run("true") if err != pipeErr { t.Fatalf("expected %v, found %v", pipeErr, err) } } func TestRunCommandFailed(t *testing.T) { server := newServer(t) defer server.Shutdown() conn := server.Dial(clientConfig()) defer conn.Close() session, err := conn.NewSession() if err != nil { t.Fatalf("session failed: %v", err) } defer session.Close() err = session.Run(`bash -c "kill -9 $$"`) if err == nil { t.Fatalf("session succeeded: %v", err) } } func TestRunCommandWeClosed(t *testing.T) { server := newServer(t) defer server.Shutdown() conn := server.Dial(clientConfig()) defer conn.Close() session, err := conn.NewSession() if err != nil { t.Fatalf("session failed: %v", err) } err = session.Shell() if err != nil { t.Fatalf("shell failed: %v", err) } err = session.Close() if err != nil { t.Fatalf("shell failed: %v", err) } } func TestFuncLargeRead(t *testing.T) { server := newServer(t) defer server.Shutdown() conn := server.Dial(clientConfig()) defer conn.Close() session, err := conn.NewSession() if err != nil { t.Fatalf("unable to create new session: %s", err) } stdout, err := session.StdoutPipe() if err != nil { t.Fatalf("unable to acquire stdout pipe: %s", err) } err = session.Start("dd if=/dev/urandom bs=2048 count=1024") if err != nil { t.Fatalf("unable to execute remote command: %s", err) } buf := new(bytes.Buffer) n, err := io.Copy(buf, stdout) if err != nil { t.Fatalf("error reading from remote stdout: %s", err) } if n != 2048*1024 { t.Fatalf("Expected %d bytes but read only %d from remote command", 2048, n) } } func TestKeyChange(t *testing.T) { server := newServer(t) defer server.Shutdown() conf := clientConfig() hostDB := hostKeyDB() conf.HostKeyCallback = hostDB.Check conf.RekeyThreshold = 1024 conn := server.Dial(conf) defer conn.Close() for i := 0; i < 4; i++ { session, err := conn.NewSession() if err != nil { t.Fatalf("unable to create new session: %s", err) } stdout, err := session.StdoutPipe() if err != nil { t.Fatalf("unable to acquire stdout pipe: %s", err) } err = session.Start("dd if=/dev/urandom bs=1024 count=1") if err != nil { t.Fatalf("unable to execute remote command: %s", err) } buf := new(bytes.Buffer) n, err := io.Copy(buf, stdout) if err != nil { t.Fatalf("error reading from remote stdout: %s", err) } want := int64(1024) if n != want { t.Fatalf("Expected %d bytes but read only %d from remote command", want, n) } } if changes := hostDB.checkCount; changes < 4 { t.Errorf("got %d key changes, want 4", changes) } } func TestInvalidTerminalMode(t *testing.T) { server := newServer(t) defer server.Shutdown() conn := server.Dial(clientConfig()) defer conn.Close() session, err := conn.NewSession() if err != nil { t.Fatalf("session failed: %v", err) } defer session.Close() if err = session.RequestPty("vt100", 80, 40, ssh.TerminalModes{255: 1984}); err == nil { t.Fatalf("req-pty failed: successful request with invalid mode") } } func TestValidTerminalMode(t *testing.T) { server := newServer(t) defer server.Shutdown() conn := server.Dial(clientConfig()) defer conn.Close() session, err := conn.NewSession() if err != nil { t.Fatalf("session failed: %v", err) } defer session.Close() stdout, err := session.StdoutPipe() if err != nil { t.Fatalf("unable to acquire stdout pipe: %s", err) } stdin, err := session.StdinPipe() if err != nil { t.Fatalf("unable to acquire stdin pipe: %s", err) } tm := ssh.TerminalModes{ssh.ECHO: 0} if err = session.RequestPty("xterm", 80, 40, tm); err != nil { t.Fatalf("req-pty failed: %s", err) } err = session.Shell() if err != nil { t.Fatalf("session failed: %s", err) } stdin.Write([]byte("stty -a && exit\n")) var buf bytes.Buffer if _, err := io.Copy(&buf, stdout); err != nil { t.Fatalf("reading failed: %s", err) } if sttyOutput := buf.String(); !strings.Contains(sttyOutput, "-echo ") { t.Fatalf("terminal mode failure: expected -echo in stty output, got %s", sttyOutput) } } func TestWindowChange(t *testing.T) { server := newServer(t) defer server.Shutdown() conn := server.Dial(clientConfig()) defer conn.Close() session, err := conn.NewSession() if err != nil { t.Fatalf("session failed: %v", err) } defer session.Close() stdout, err := session.StdoutPipe() if err != nil { t.Fatalf("unable to acquire stdout pipe: %s", err) } stdin, err := session.StdinPipe() if err != nil { t.Fatalf("unable to acquire stdin pipe: %s", err) } tm := ssh.TerminalModes{ssh.ECHO: 0} if err = session.RequestPty("xterm", 80, 40, tm); err != nil { t.Fatalf("req-pty failed: %s", err) } if err := session.WindowChange(100, 100); err != nil { t.Fatalf("window-change failed: %s", err) } err = session.Shell() if err != nil { t.Fatalf("session failed: %s", err) } stdin.Write([]byte("stty size && exit\n")) var buf bytes.Buffer if _, err := io.Copy(&buf, stdout); err != nil { t.Fatalf("reading failed: %s", err) } if sttyOutput := buf.String(); !strings.Contains(sttyOutput, "100 100") { t.Fatalf("terminal WindowChange failure: expected \"100 100\" stty output, got %s", sttyOutput) } } func testOneCipher(t *testing.T, cipher string, cipherOrder []string) { server := newServer(t) defer server.Shutdown() conf := clientConfig() conf.Ciphers = []string{cipher} // Don't fail if sshd doesn't have the cipher. conf.Ciphers = append(conf.Ciphers, cipherOrder...) conn, err := server.TryDial(conf) if err != nil { t.Fatalf("TryDial: %v", err) } defer conn.Close() numBytes := 4096 // Exercise sending data to the server if _, _, err := conn.Conn.SendRequest("drop-me", false, make([]byte, numBytes)); err != nil { t.Fatalf("SendRequest: %v", err) } // Exercise receiving data from the server session, err := conn.NewSession() if err != nil { t.Fatalf("NewSession: %v", err) } out, err := session.Output(fmt.Sprintf("dd if=/dev/zero of=/dev/stdout bs=%d count=1", numBytes)) if err != nil { t.Fatalf("Output: %v", err) } if len(out) != numBytes { t.Fatalf("got %d bytes, want %d bytes", len(out), numBytes) } } var deprecatedCiphers = []string{ "aes128-cbc", "3des-cbc", "arcfour128", "arcfour256", } func TestCiphers(t *testing.T) { var config ssh.Config config.SetDefaults() cipherOrder := append(config.Ciphers, deprecatedCiphers...) for _, ciph := range cipherOrder { t.Run(ciph, func(t *testing.T) { testOneCipher(t, ciph, cipherOrder) }) } } func TestMACs(t *testing.T) { var config ssh.Config config.SetDefaults() macOrder := config.MACs for _, mac := range macOrder { server := newServer(t) defer server.Shutdown() conf := clientConfig() conf.MACs = []string{mac} // Don't fail if sshd doesn't have the MAC. conf.MACs = append(conf.MACs, macOrder...) if conn, err := server.TryDial(conf); err == nil { conn.Close() } else { t.Fatalf("failed for MAC %q", mac) } } } func TestKeyExchanges(t *testing.T) { var config ssh.Config config.SetDefaults() kexOrder := config.KeyExchanges for _, kex := range kexOrder { server := newServer(t) defer server.Shutdown() conf := clientConfig() // Don't fail if sshd doesn't have the kex. conf.KeyExchanges = append([]string{kex}, kexOrder...) conn, err := server.TryDial(conf) if err == nil { conn.Close() } else { t.Errorf("failed for kex %q", kex) } } } func TestClientAuthAlgorithms(t *testing.T) { for _, key := range []string{ "rsa", "dsa", "ecdsa", "ed25519", } { server := newServer(t) conf := clientConfig() conf.SetDefaults() conf.Auth = []ssh.AuthMethod{ ssh.PublicKeys(testSigners[key]), } conn, err := server.TryDial(conf) if err == nil { conn.Close() } else { t.Errorf("failed for key %q", key) } server.Shutdown() } }
{ "pile_set_name": "Github" }
fileFormatVersion: 2 guid: 5a3598dafa118754db95756064347da7 NativeFormatImporter: userData:
{ "pile_set_name": "Github" }
// Copyright 2013 Mozilla Corporation. All rights reserved. // This code is governed by the license found in the LICENSE file. /** * @description Tests that Intl.DateTimeFormat.supportedLocalesOf * doesn't access arguments that it's not given. * @author Norbert Lindenberg */ $INCLUDE("testIntl.js"); taintDataProperty(Object.prototype, "1"); new Intl.DateTimeFormat("und");
{ "pile_set_name": "Github" }
import { NODE_VISIBLE } from "./globals"; import { getTraitFromNode } from "./treeMiscHelpers"; /** * traverse the tree to get state counts for supplied traits. * @param {Array} nodes - list of nodes * @param {Array} traits - list of traits to count across the tree * @param {Array | false} visibility - if Array provided then only consider visible nodes. If false, consider all nodes. * @param {bool} terminalOnly - only consider terminal / leaf nodes? * @return {obj} keys: the traits. Values: an object mapping trait values -> INT */ export const countTraitsAcrossTree = (nodes, traits, visibility, terminalOnly) => { const counts = {}; traits.forEach((trait) => {counts[trait] = new Map();}); nodes.forEach((node) => { traits.forEach((trait) => { // traits are "country" or "author" etc const value = getTraitFromNode(node, trait); // value is "USA", "black" etc if (terminalOnly && node.hasChildren) { return; } if (visibility && visibility[node.arrayIdx] !== NODE_VISIBLE) { return; } const currentValue = counts[trait].get(value) || 0; counts[trait].set(value, currentValue+1); }); }); return counts; }; /** * for each node, calculate the number of subtending tips which are visible * side effects: n.tipCount for each node * @param root - deserialized JSON root to begin traversal */ export const calcTipCounts = (node, visibility) => { node.tipCount = 0; if (typeof node.children !== "undefined") { for (let i = 0; i < node.children.length; i++) { calcTipCounts(node.children[i], visibility); node.tipCount += node.children[i].tipCount; } } else { node.tipCount = visibility[node.arrayIdx] === NODE_VISIBLE ? 1 : 0; } }; /** * calculate the total number of tips in the tree * @param {Array} nodes flat list of all nodes */ export const calcTotalTipsInTree = (nodes) => { let count = 0; nodes.forEach((n) => { if (!n.hasChildren) count++; }); return count; };
{ "pile_set_name": "Github" }
# Copyright 1999-2020 Gentoo Authors # Distributed under the terms of the GNU General Public License v2 EAPI=7 # ebuild generated by hackport 0.6.4.9999 CABAL_FEATURES="lib profile haddock hoogle hscolour" inherit haskell-cabal DESCRIPTION="Lambdabot core functionality" HOMEPAGE="https://wiki.haskell.org/Lambdabot" SRC_URI="https://hackage.haskell.org/package/${P}/${P}.tar.gz" LICENSE="GPL-2" SLOT="0/${PV}" KEYWORDS="~amd64 ~x86" IUSE="" RDEPEND=">=dev-haskell/dependent-map-0.2:=[profile?] >=dev-haskell/dependent-sum-0.3:=[profile?] >=dev-haskell/dependent-sum-template-0.0.0.1:=[profile?] >=dev-haskell/edit-distance-0.2:=[profile?] >=dev-haskell/haskeline-0.7:=[profile?] <dev-haskell/haskeline-0.8:=[profile?] >=dev-haskell/hslogger-1.2.1:=[profile?] >=dev-haskell/http-4000:=[profile?] >=dev-haskell/lifted-base-0.2:=[profile?] >=dev-haskell/monad-control-1.0:=[profile?] >=dev-haskell/mtl-2:=[profile?] >=dev-haskell/network-2.7:=[profile?] <dev-haskell/network-3.2:=[profile?] >=dev-haskell/network-bsd-2.7:=[profile?] <dev-haskell/network-bsd-2.9:=[profile?] >=dev-haskell/parsec-3:=[profile?] >=dev-haskell/prim-uniq-0.1:=[profile?] <dev-haskell/prim-uniq-0.2:=[profile?] >=dev-haskell/random-1:=[profile?] >=dev-haskell/random-fu-0.2.6.2:=[profile?] >=dev-haskell/random-source-0.3:=[profile?] >=dev-haskell/regex-tdfa-1.1:=[profile?] >=dev-haskell/safesemaphore-0.9:=[profile?] >=dev-haskell/split-0.2:=[profile?] >=dev-haskell/syb-0.3:=[profile?] >=dev-haskell/transformers-base-0.4:=[profile?] >=dev-haskell/utf8-string-0.3:=[profile?] >=dev-haskell/zlib-0.5:=[profile?] >=dev-lang/ghc-7.8.2:= " DEPEND="${RDEPEND} >=dev-haskell/cabal-1.18.1.3 " PATCHES=( "${FILESDIR}"/${PN}-5.1.0.4-regex-base-0.94.patch ) src_prepare() { default # Setup.hs is not compatible to cabal-3 rm Setup.hs || die cabal_chdeps \ 'dependent-map == 0.2.*' 'dependent-map >= 0.2' \ 'dependent-sum >= 0.3 && < 0.6' 'dependent-sum >= 0.3' }
{ "pile_set_name": "Github" }
# Primitives for screen control. # Require Linux and a modern terminal. == code enable-screen-grid-mode: # . prologue 55/push-ebp 89/<- %ebp 4/r32/esp # (flush Stdout) (flush Stderr) # switch to second screen buffer (write 1 Esc) (write 1 "[?1049h") # (clear-real-screen) $enable-screen-grid-mode:end: # . epilogue 89/<- %esp 5/r32/ebp 5d/pop-to-ebp c3/return enable-screen-type-mode: # . prologue 55/push-ebp 89/<- %ebp 4/r32/esp # switch to first screen buffer (write 1 Esc) (write 1 "[?1049l") $enable-screen-type-mode:end: # . epilogue 89/<- %esp 5/r32/ebp 5d/pop-to-ebp c3/return real-screen-size: # -> nrows/eax: int, ncols/ecx: int # . prologue 55/push-ebp 89/<- %ebp 4/r32/esp # . save registers 52/push-edx 53/push-ebx 56/push-esi 57/push-edi # (_maybe-open-terminal) # var window-size-info/esi: (addr winsize) # winsize is a type from the Linux kernel. We don't care how large it is. 81 5/subop/subtract %esp 0x40/imm32 89/<- %esi 4/r32/esp # ioctl(*Terminal-file-descriptor, TIOCGWINSZ, window-size-info) 89/<- %edx 6/r32/esi b9/copy-to-ecx 0x5413/imm32/TIOCGWINSZ 8b/-> *Terminal-file-descriptor 3/r32/ebx e8/call syscall_ioctl/disp32 # some bitworking to extract 2 16-bit shorts 8b/-> *esi 0/r32/eax 81 4/subop/and %eax 0xffff/imm32 8b/-> *esi 1/r32/ecx c1/shift 5/subop/logical-right %ecx 0x10/imm8 $real-screen-size:end: # . reclaim locals 81 0/subop/add %esp 0x40/imm32 # . restore registers 5f/pop-to-edi 5e/pop-to-esi 5b/pop-to-ebx 5a/pop-to-edx # . epilogue 89/<- %esp 5/r32/ebp 5d/pop-to-ebp c3/return clear-real-screen: # . prologue 55/push-ebp 89/<- %ebp 4/r32/esp # (write 1 Esc) (write 1 "[H") (write 1 Esc) (write 1 "[2J") $clear-real-screen:end: # . epilogue 89/<- %esp 5/r32/ebp 5d/pop-to-ebp c3/return # row and col count from the top-left as (1, 1) move-cursor-on-real-screen: # row: int, column: int # . prologue 55/push-ebp 89/<- %ebp 4/r32/esp # . save registers 51/push-ecx # var buf/ecx: (stream byte 32) 81 5/subop/subtract %esp 0x20/imm32 68/push 0x20/imm32/size 68/push 0/imm32/read 68/push 0/imm32/write 89/<- %ecx 4/r32/esp # construct directive in buf (write %ecx Esc) (write %ecx "[") (write-int32-decimal %ecx *(ebp+8)) (write %ecx ";") (write-int32-decimal %ecx *(ebp+0xc)) (write %ecx "H") # flush (write-stream 2 %ecx) $move-cursor-on-real-screen:end: # . reclaim locals 81 0/subop/add %esp 0x2c/imm32 # . restore registers 59/pop-to-ecx # . epilogue 89/<- %esp 5/r32/ebp 5d/pop-to-ebp c3/return print-string-to-real-screen: # s: (addr array byte) # . prologue 55/push-ebp 89/<- %ebp 4/r32/esp # (write 1 *(ebp+8)) $print-string-to-real-screen:end: # . epilogue 89/<- %esp 5/r32/ebp 5d/pop-to-ebp c3/return print-slice-to-real-screen: # s: (addr slice) # . prologue 55/push-ebp 89/<- %ebp 4/r32/esp # (write-slice-buffered Stdout *(ebp+8)) (flush Stdout) $print-slice-to-real-screen:end: # . epilogue 89/<- %esp 5/r32/ebp 5d/pop-to-ebp c3/return print-stream-to-real-screen: # s: (addr stream byte) # . prologue 55/push-ebp 89/<- %ebp 4/r32/esp # (write-stream-data Stdout *(ebp+8)) (flush Stdout) $print-stream-to-real-screen:end: # . epilogue 89/<- %esp 5/r32/ebp 5d/pop-to-ebp c3/return # print a grapheme in utf-8 (only up to 4 bytes so far) print-grapheme-to-real-screen: # c: grapheme # . prologue 55/push-ebp 89/<- %ebp 4/r32/esp # . save registers 50/push-eax # var curr/eax: byte = 0 b8/copy-to-eax 0/imm32 # curr = *(ebp+8) 8a/byte-> *(ebp+8) 0/r32/al # if (curr == 0) return 3d/compare-eax-and 0/imm32 74/jump-if-= $print-grapheme-to-real-screen:end/disp8 # (print-byte-to-real-screen %eax) # curr = *(ebp+9) 8a/byte-> *(ebp+9) 0/r32/al # if (curr == 0) return 3d/compare-eax-and 0/imm32 74/jump-if-= $print-grapheme-to-real-screen:end/disp8 # (print-byte-to-real-screen %eax) # curr = *(ebp+10) 8a/byte-> *(ebp+0xa) 0/r32/al # if (curr == 0) return 3d/compare-eax-and 0/imm32 74/jump-if-= $print-grapheme-to-real-screen:end/disp8 # (print-byte-to-real-screen %eax) # curr = *(ebp+11) 8a/byte-> *(ebp+0xb) 0/r32/al # if (curr == 0) return 3d/compare-eax-and 0/imm32 74/jump-if-= $print-grapheme-to-real-screen:end/disp8 # (print-byte-to-real-screen %eax) $print-grapheme-to-real-screen:end: # . restore registers 58/pop-to-eax # . epilogue 89/<- %esp 5/r32/ebp 5d/pop-to-ebp c3/return print-byte-to-real-screen: # c: byte # . prologue 55/push-ebp 89/<- %ebp 4/r32/esp # . save registers 51/push-ecx # var s/ecx: (addr array byte) ff 6/subop/push *(ebp+8) 68/push 1/imm32/size 89/<- %ecx 4/r32/esp (write 1 %ecx) $print-byte-to-real-screen:end: # . reclaim locals 81 0/subop/add %esp 8/imm32 # . restore registers 59/pop-to-ecx # . epilogue 89/<- %esp 5/r32/ebp 5d/pop-to-ebp c3/return print-int32-hex-to-real-screen: # n: int # . prologue 55/push-ebp 89/<- %ebp 4/r32/esp # (write-int32-hex-buffered Stdout *(ebp+8)) (flush Stdout) $print-int32-hex-to-real-screen:end: # . epilogue 89/<- %esp 5/r32/ebp 5d/pop-to-ebp c3/return print-int32-decimal-to-real-screen: # n: int # . prologue 55/push-ebp 89/<- %ebp 4/r32/esp # (write-int32-decimal-buffered Stdout *(ebp+8)) (flush Stdout) $print-int32-decimal-to-real-screen:end: # . epilogue 89/<- %esp 5/r32/ebp 5d/pop-to-ebp c3/return write-int32-decimal-buffered: # f: (addr buffered-file), n: int # . prologue 55/push-ebp 89/<- %ebp 4/r32/esp # . save registers 51/push-ecx # var ecx: (stream byte 16) 81 5/subop/subtract %esp 0x10/imm32 68/push 0x10/imm32/size 68/push 0/imm32/read 68/push 0/imm32/write 89/<- %ecx 4/r32/esp (write-int32-decimal %ecx *(ebp+0xc)) (write-stream-data *(ebp+8) %ecx) $write-int32-decimal-buffered:end: # . reclaim locals 81 0/subop/add %esp 0x1c/imm32 # . restore registers 59/pop-to-ecx # . epilogue 89/<- %esp 5/r32/ebp 5d/pop-to-ebp c3/return reset-formatting-on-real-screen: # . prologue 55/push-ebp 89/<- %ebp 4/r32/esp # (write 1 Esc) (write 1 "(B") (write 1 Esc) (write 1 "[m") $reset-formatting-on-real-screen:end: # . epilogue 89/<- %esp 5/r32/ebp 5d/pop-to-ebp c3/return start-color-on-real-screen: # fg: int, bg: int # . prologue 55/push-ebp 89/<- %ebp 4/r32/esp # . save registers 51/push-ecx # var buf/ecx: (stream byte 32) 81 5/subop/subtract %esp 0x20/imm32 68/push 0x20/imm32/size 68/push 0/imm32/read 68/push 0/imm32/write 89/<- %ecx 4/r32/esp # construct directive in buf # . set fg (write %ecx Esc) (write %ecx "[38;5;") (write-int32-decimal %ecx *(ebp+8)) (write %ecx "m") # . set bg (write %ecx Esc) (write %ecx "[48;5;") (write-int32-decimal %ecx *(ebp+0xc)) (write %ecx "m") # flush (write-stream 2 %ecx) $start-color-on-real-screen:end: # . reclaim locals 81 0/subop/add %esp 0x2c/imm32 # . restore registers 59/pop-to-ecx # . epilogue 89/<- %esp 5/r32/ebp 5d/pop-to-ebp c3/return start-bold-on-real-screen: # . prologue 55/push-ebp 89/<- %ebp 4/r32/esp # (write 1 Esc) (write 1 "[1m") $start-bold-on-real-screen:end: # . epilogue 89/<- %esp 5/r32/ebp 5d/pop-to-ebp c3/return start-underline-on-real-screen: # . prologue 55/push-ebp 89/<- %ebp 4/r32/esp # (write 1 Esc) (write 1 "[4m") $start-underline-on-real-screen:end: # . epilogue 89/<- %esp 5/r32/ebp 5d/pop-to-ebp c3/return start-reverse-video-on-real-screen: # . prologue 55/push-ebp 89/<- %ebp 4/r32/esp # (write 1 Esc) (write 1 "[7m") $start-reverse-video-on-real-screen:end: # . epilogue 89/<- %esp 5/r32/ebp 5d/pop-to-ebp c3/return # might require enabling blinking in your terminal program start-blinking-on-real-screen: # . prologue 55/push-ebp 89/<- %ebp 4/r32/esp # (write 1 Esc) (write 1 "[5m") $start-blinking-on-real-screen:end: # . epilogue 89/<- %esp 5/r32/ebp 5d/pop-to-ebp c3/return hide-cursor-on-real-screen: # . prologue 55/push-ebp 89/<- %ebp 4/r32/esp # (write 1 Esc) (write 1 "[?25l") $hide-cursor-on-real-screen:end: # . epilogue 89/<- %esp 5/r32/ebp 5d/pop-to-ebp c3/return show-cursor-on-real-screen: # . prologue 55/push-ebp 89/<- %ebp 4/r32/esp # (write 1 Esc) (write 1 "[?12l") (write 1 Esc) (write 1 "[?25h") $show-cursor-on-real-screen:end: # . epilogue 89/<- %esp 5/r32/ebp 5d/pop-to-ebp c3/return # This is a low-level detail; I don't think everything should be a file. # # Open "/dev/tty" if necessary and cache its file descriptor in Terminal-file-descriptor # where later primitives can use it. _maybe-open-terminal: 81 7/subop/compare *Terminal-file-descriptor -1/imm32 75/jump-if-!= $_maybe-open-terminal:epilogue/disp8 # . save registers 50/push-eax 51/push-ecx 53/push-ebx # open("/dev/tty", O_RDWR) bb/copy-to-ebx Terminal-filename/imm32 b9/copy-to-ecx 2/imm32/O_RDWR e8/call syscall_open/disp32 89/<- *Terminal-file-descriptor 0/r32/eax $_maybe-open-terminal:end: # . restore registers 5b/pop-to-ebx 59/pop-to-ecx 58/pop-to-eax $_maybe-open-terminal:epilogue: c3/return == data Terminal-file-descriptor: # (addr int) -1/imm32 Esc: # (addr array byte) # size 1/imm32 # data 0x1b Terminal-filename: # (addr kernel-string) # "/dev/null" 2f/slash 64/d 65/e 76/v 2f/slash 74/t 74/t 79/y 0/nul
{ "pile_set_name": "Github" }
[ssh_colors] background = #2c3643 cursor = #b4b1b1 foreground = #ffffff color0 = #080200 color1 = #fa5e5b color2 = #16c98d color3 = #ffc83f color4 = #288ad6 color5 = #d34590 color6 = #28ddde color7 = #e7e7e7 color8 = #6f6b68 color9 = #fa5e5b color10 = #16c98d color11 = #feef6d color12 = #278ad6 color13 = #d34590 color14 = #27dede color15 = #ffffff colorBD = #ffffff colorIT = colorUL =
{ "pile_set_name": "Github" }
package org.fossasia.openevent.general.event import android.content.Intent import android.net.Uri import android.os.Build import android.os.Bundle import android.provider.CalendarContract import android.text.Editable import android.text.TextWatcher import android.view.LayoutInflater import android.view.Menu import android.view.MenuInflater import android.view.MenuItem import android.view.View import android.view.ViewGroup import android.widget.ImageView import androidx.appcompat.app.AlertDialog import androidx.core.content.ContextCompat import androidx.core.view.isVisible import androidx.databinding.DataBindingUtil import androidx.fragment.app.Fragment import androidx.lifecycle.Observer import androidx.navigation.Navigation.findNavController import androidx.navigation.fragment.FragmentNavigatorExtras import androidx.navigation.fragment.navArgs import androidx.recyclerview.widget.LinearLayoutManager import com.squareup.picasso.Picasso import kotlinx.android.synthetic.main.content_event.view.alreadyRegisteredLayout import kotlinx.android.synthetic.main.content_event.view.eventDateDetailsFirst import kotlinx.android.synthetic.main.content_event.view.eventDateDetailsSecond import kotlinx.android.synthetic.main.content_event.view.eventDescription import kotlinx.android.synthetic.main.content_event.view.eventImage import kotlinx.android.synthetic.main.content_event.view.eventLocationLinearLayout import kotlinx.android.synthetic.main.content_event.view.eventName import kotlinx.android.synthetic.main.content_event.view.eventOrganiserDescription import kotlinx.android.synthetic.main.content_event.view.eventTimingLinearLayout import kotlinx.android.synthetic.main.content_event.view.feedbackBtn import kotlinx.android.synthetic.main.content_event.view.feedbackProgress import kotlinx.android.synthetic.main.content_event.view.feedbackRv import kotlinx.android.synthetic.main.content_event.view.imageMap import kotlinx.android.synthetic.main.content_event.view.nestedContentEventScroll import kotlinx.android.synthetic.main.content_event.view.noFeedBackTv import kotlinx.android.synthetic.main.content_event.view.priceRangeTextView import kotlinx.android.synthetic.main.content_event.view.seeFeedbackTextView import kotlinx.android.synthetic.main.content_event.view.seeMore import kotlinx.android.synthetic.main.content_event.view.seeMoreOrganizer import kotlinx.android.synthetic.main.content_event.view.sessionContainer import kotlinx.android.synthetic.main.content_event.view.sessionsRv import kotlinx.android.synthetic.main.content_event.view.shimmerSimilarEvents import kotlinx.android.synthetic.main.content_event.view.similarEventsContainer import kotlinx.android.synthetic.main.content_event.view.similarEventsRecycler import kotlinx.android.synthetic.main.content_event.view.socialLinkContainer import kotlinx.android.synthetic.main.content_event.view.socialLinksRecycler import kotlinx.android.synthetic.main.content_event.view.speakerRv import kotlinx.android.synthetic.main.content_event.view.speakersContainer import kotlinx.android.synthetic.main.content_event.view.sponsorsRecyclerView import kotlinx.android.synthetic.main.content_event.view.sponsorsSummaryContainer import kotlinx.android.synthetic.main.content_event.view.ticketPriceLinearLayout import kotlinx.android.synthetic.main.content_fetching_event_error.view.retry import kotlinx.android.synthetic.main.dialog_feedback.view.feedback import kotlinx.android.synthetic.main.dialog_feedback.view.feedbackTextInputLayout import kotlinx.android.synthetic.main.dialog_feedback.view.feedbackrating import kotlinx.android.synthetic.main.fragment_event.view.buttonTickets import kotlinx.android.synthetic.main.fragment_event.view.container import kotlinx.android.synthetic.main.fragment_event.view.eventErrorCard import org.fossasia.openevent.general.R import org.fossasia.openevent.general.common.EventClickListener import org.fossasia.openevent.general.common.FavoriteFabClickListener import org.fossasia.openevent.general.common.SessionClickListener import org.fossasia.openevent.general.common.SpeakerClickListener import org.fossasia.openevent.general.databinding.FragmentEventBinding import org.fossasia.openevent.general.event.EventUtils.loadMapUrl import org.fossasia.openevent.general.event.similarevent.SimilarEventsListAdapter import org.fossasia.openevent.general.feedback.FeedbackRecyclerAdapter import org.fossasia.openevent.general.feedback.LIMITED_FEEDBACK_NUMBER import org.fossasia.openevent.general.sessions.SessionRecyclerAdapter import org.fossasia.openevent.general.social.SocialLinksRecyclerAdapter import org.fossasia.openevent.general.speakers.SpeakerRecyclerAdapter import org.fossasia.openevent.general.sponsor.SponsorClickListener import org.fossasia.openevent.general.sponsor.SponsorRecyclerAdapter import org.fossasia.openevent.general.utils.EVENT_IDENTIFIER import org.fossasia.openevent.general.utils.Utils import org.fossasia.openevent.general.utils.Utils.progressDialog import org.fossasia.openevent.general.utils.Utils.setToolbar import org.fossasia.openevent.general.utils.Utils.show import org.fossasia.openevent.general.utils.extensions.nonNull import org.fossasia.openevent.general.utils.extensions.setSharedElementEnterTransition import org.fossasia.openevent.general.utils.nullToEmpty import org.fossasia.openevent.general.utils.stripHtml import org.jetbrains.anko.design.longSnackbar import org.jetbrains.anko.design.snackbar import org.koin.androidx.viewmodel.ext.android.viewModel import timber.log.Timber const val EVENT_DETAIL_FRAGMENT = "eventDetailFragment" class EventDetailsFragment : Fragment() { private val eventViewModel by viewModel<EventDetailsViewModel>() private val safeArgs: EventDetailsFragmentArgs by navArgs() private val feedbackAdapter = FeedbackRecyclerAdapter(true) private val speakersAdapter = SpeakerRecyclerAdapter() private val sponsorsAdapter = SponsorRecyclerAdapter() private val sessionsAdapter = SessionRecyclerAdapter() private val socialLinkAdapter = SocialLinksRecyclerAdapter() private val similarEventsAdapter = SimilarEventsListAdapter() private var hasSimilarEvents: Boolean = false private lateinit var rootView: View private lateinit var binding: FragmentEventBinding private val LINE_COUNT: Int = 3 private val LINE_COUNT_ORGANIZER: Int = 2 private var menuActionBar: Menu? = null private var currentEvent: Event? = null override fun onCreateView( inflater: LayoutInflater, container: ViewGroup?, savedInstanceState: Bundle? ): View? { setSharedElementEnterTransition() binding = DataBindingUtil.inflate(inflater, R.layout.fragment_event, container, false) val progressDialog = progressDialog(context, getString(R.string.loading_message)) rootView = binding.root setToolbar(activity) setHasOptionsMenu(true) setupOrder() setupEventOverview() setupSocialLinks() setupFeedback() setupSessions() setupSpeakers() setupSponsors() setupSimilarEvents() rootView.buttonTickets.setOnClickListener { val ticketUrl = currentEvent?.ticketUrl if (!ticketUrl.isNullOrEmpty() && Uri.parse(ticketUrl).host != getString(R.string.FRONTEND_HOST)) { Utils.openUrl(requireContext(), ticketUrl) } else { loadTicketFragment() } } eventViewModel.popMessage .nonNull() .observe(viewLifecycleOwner, Observer { rootView.snackbar(it) showEventErrorScreen(it == getString(R.string.error_fetching_event_message)) }) eventViewModel.progress .nonNull() .observe(viewLifecycleOwner, Observer { progressDialog.show(it) }) rootView.retry.setOnClickListener { currentEvent?.let { eventViewModel.loadEvent(it.id) } } return rootView } private fun setupOrder() { if (eventViewModel.orders.value == null) eventViewModel.loadOrders() eventViewModel.orders .nonNull() .observe(viewLifecycleOwner, Observer { it.forEach { order -> if (order.event?.id == safeArgs.eventId) { rootView.alreadyRegisteredLayout.isVisible = true rootView.alreadyRegisteredLayout.setOnClickListener { order.identifier?.let { identifier -> EventDetailsFragmentDirections.actionEventDetailsToOrderDetail( eventId = safeArgs.eventId, orderId = order.id, orderIdentifier = identifier ) }?.let { navigation -> findNavController(rootView).navigate(navigation) } } return@forEach } } }) } private fun setupEventOverview() { eventViewModel.event .nonNull() .observe(viewLifecycleOwner, Observer { currentEvent = it loadEvent(it) if (eventViewModel.similarEvents.value == null) { val eventTopicId = it.eventTopic?.id ?: 0 val eventLocation = it.searchableLocationName ?: it.locationName eventViewModel.fetchSimilarEvents(it.id, eventTopicId, eventLocation) } if (eventViewModel.eventFeedback.value == null) eventViewModel.fetchEventFeedback(it.id) if (eventViewModel.eventSessions.value == null) eventViewModel.fetchEventSessions(it.id) if (eventViewModel.eventSpeakers.value == null) eventViewModel.fetchEventSpeakers(it.id) if (eventViewModel.eventSponsors.value == null) eventViewModel.fetchEventSponsors(it.id) if (eventViewModel.socialLinks.value == null) eventViewModel.fetchSocialLink(it.id) if (eventViewModel.priceRange.value == null) eventViewModel.syncTickets(it) // Update favorite icon and external event url menu option activity?.invalidateOptionsMenu() Timber.d("Fetched events of id ${it.id}") showEventErrorScreen(false) setHasOptionsMenu(true) }) eventViewModel.priceRange .nonNull() .observe(viewLifecycleOwner, Observer { rootView.ticketPriceLinearLayout.isVisible = true rootView.priceRangeTextView.text = it }) val eventIdentifier = arguments?.getString(EVENT_IDENTIFIER) val event = eventViewModel.event.value when { event != null -> { currentEvent = event loadEvent(event) } !eventIdentifier.isNullOrEmpty() -> eventViewModel.loadEventByIdentifier(eventIdentifier) else -> eventViewModel.loadEvent(safeArgs.eventId) } // Set toolbar title to event name if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { rootView.nestedContentEventScroll.setOnScrollChangeListener { _, _, scrollY, _, _ -> if (scrollY > rootView.eventName.height + rootView.eventImage.height) /*Toolbar title set to name of Event if scrolled more than combined height of eventImage and eventName views*/ setToolbar(activity, eventViewModel.event.value?.name ?: "") else // Toolbar title set to an empty string setToolbar(activity) } } } private fun setupSocialLinks() { val socialLinkLinearLayoutManager = LinearLayoutManager(context) socialLinkLinearLayoutManager.orientation = LinearLayoutManager.HORIZONTAL rootView.socialLinksRecycler.layoutManager = socialLinkLinearLayoutManager rootView.socialLinksRecycler.adapter = socialLinkAdapter eventViewModel.socialLinks.observe(viewLifecycleOwner, Observer { socialLinkAdapter.addAll(it) rootView.socialLinkContainer.isVisible = it.isNotEmpty() }) } private fun setupFeedback() { rootView.feedbackRv.layoutManager = LinearLayoutManager(context) rootView.feedbackRv.adapter = feedbackAdapter eventViewModel.feedbackProgress .nonNull() .observe(viewLifecycleOwner, Observer { rootView.feedbackProgress.isVisible = it rootView.feedbackBtn.isVisible = !it }) eventViewModel.eventFeedback.observe(viewLifecycleOwner, Observer { feedbackAdapter.addAll(it) if (it.isEmpty()) { rootView.feedbackRv.isVisible = false rootView.noFeedBackTv.isVisible = true rootView.seeFeedbackTextView.isVisible = false } else { rootView.feedbackRv.isVisible = true rootView.noFeedBackTv.isVisible = false rootView.seeFeedbackTextView.isVisible = it.size >= LIMITED_FEEDBACK_NUMBER } }) eventViewModel.submittedFeedback .nonNull() .observe(viewLifecycleOwner, Observer { if (feedbackAdapter.itemCount < LIMITED_FEEDBACK_NUMBER) feedbackAdapter.add(it) else rootView.seeFeedbackTextView.isVisible = true rootView.feedbackRv.isVisible = true rootView.noFeedBackTv.isVisible = false }) rootView.feedbackBtn.setOnClickListener { checkForAuthentication() } } private fun setupSpeakers() { val linearLayoutManager = LinearLayoutManager(context) linearLayoutManager.orientation = LinearLayoutManager.HORIZONTAL rootView.speakerRv.layoutManager = linearLayoutManager rootView.speakerRv.adapter = speakersAdapter eventViewModel.eventSpeakers.observe(viewLifecycleOwner, Observer { speakersAdapter.addAll(it) rootView.speakersContainer.isVisible = it.isNotEmpty() }) val speakerClickListener: SpeakerClickListener = object : SpeakerClickListener { override fun onClick(speakerId: Long) { findNavController(rootView).navigate(EventDetailsFragmentDirections .actionEventDetailsToSpeaker(speakerId)) } } speakersAdapter.apply { onSpeakerClick = speakerClickListener } } private fun setupSessions() { val linearLayoutManagerSessions = LinearLayoutManager(context) linearLayoutManagerSessions.orientation = LinearLayoutManager.HORIZONTAL rootView.sessionsRv.layoutManager = linearLayoutManagerSessions rootView.sessionsRv.adapter = sessionsAdapter eventViewModel.eventSessions.observe(viewLifecycleOwner, Observer { sessionsAdapter.addAll(it) rootView.sessionContainer.isVisible = it.isNotEmpty() }) val sessionClickListener: SessionClickListener = object : SessionClickListener { override fun onClick(sessionId: Long) { findNavController(rootView).navigate(EventDetailsFragmentDirections .actionEventDetailsToSession(sessionId)) } } sessionsAdapter.apply { onSessionClick = sessionClickListener } } private fun setupSponsors() { val sponsorLinearLayoutManager = LinearLayoutManager(context) sponsorLinearLayoutManager.orientation = LinearLayoutManager.HORIZONTAL rootView.sponsorsRecyclerView.layoutManager = sponsorLinearLayoutManager rootView.sponsorsRecyclerView.adapter = sponsorsAdapter eventViewModel.eventSponsors.observe(viewLifecycleOwner, Observer { sponsors -> sponsorsAdapter.addAll(sponsors) rootView.sponsorsSummaryContainer.isVisible = sponsors.isNotEmpty() }) val sponsorClickListener: SponsorClickListener = object : SponsorClickListener { override fun onClick() { moveToSponsorSection() } } sponsorsAdapter.apply { onSponsorClick = sponsorClickListener } rootView.sponsorsSummaryContainer.setOnClickListener { moveToSponsorSection() } } private fun setupSimilarEvents() { eventViewModel.similarEventsProgress .nonNull() .observe(viewLifecycleOwner, Observer { rootView.shimmerSimilarEvents.isVisible = it if (it) { rootView.shimmerSimilarEvents.startShimmer() rootView.similarEventsContainer.isVisible = true } else { rootView.shimmerSimilarEvents.stopShimmer() if (!similarEventsAdapter.currentList.isNullOrEmpty()) { hasSimilarEvents = true } rootView.similarEventsContainer.isVisible = hasSimilarEvents } }) val similarLinearLayoutManager = LinearLayoutManager(context) similarLinearLayoutManager.orientation = LinearLayoutManager.HORIZONTAL rootView.similarEventsRecycler.layoutManager = similarLinearLayoutManager rootView.similarEventsRecycler.adapter = similarEventsAdapter eventViewModel.similarEvents .nonNull() .observe(viewLifecycleOwner, Observer { similarEvents -> similarEventsAdapter.submitList(similarEvents) }) } private fun loadEvent(event: Event) { val startsAt = EventUtils.getEventDateTime(event.startsAt, event.timezone) val endsAt = EventUtils.getEventDateTime(event.endsAt, event.timezone) binding.event = event binding.executePendingBindings() // Set Cover Image Picasso.get() .load(event.originalImageUrl) .placeholder(R.drawable.header) .into(rootView.eventImage) // Organizer Section if (!event.ownerName.isNullOrEmpty()) { val organizerDescriptionListener = View.OnClickListener { rootView.eventOrganiserDescription.toggle() rootView.seeMoreOrganizer.text = if (rootView.eventOrganiserDescription.isExpanded) getString(R.string.see_less) else getString(R.string.see_more) } rootView.eventOrganiserDescription.post { if (rootView.eventOrganiserDescription.lineCount > LINE_COUNT_ORGANIZER) { rootView.seeMoreOrganizer.isVisible = true // Set up toggle organizer description rootView.seeMoreOrganizer.setOnClickListener(organizerDescriptionListener) rootView.eventOrganiserDescription.setOnClickListener(organizerDescriptionListener) } } } // About event on-click val aboutEventOnClickListener = View.OnClickListener { currentEvent?.let { findNavController(rootView).navigate(EventDetailsFragmentDirections .actionEventDetailsToAboutEvent(it.id)) } } // Event Description Section val description = event.description.nullToEmpty().stripHtml() if (!description.isNullOrEmpty()) { rootView.eventDescription.post { if (rootView.eventDescription.lineCount > LINE_COUNT) { rootView.seeMore.isVisible = true // start about fragment rootView.eventDescription.setOnClickListener(aboutEventOnClickListener) rootView.seeMore.setOnClickListener(aboutEventOnClickListener) } } } // load location to map val mapClickListener = View.OnClickListener { startMap(event) } if (!event.locationName.isNullOrEmpty() && hasCoordinates(event)) { rootView.imageMap.setOnClickListener(mapClickListener) rootView.eventLocationLinearLayout.setOnClickListener(mapClickListener) Picasso.get() .load(eventViewModel.loadMap(event)) .placeholder(R.drawable.ic_map_black) .error(R.drawable.ic_map_black) .into(rootView.imageMap) } else { rootView.imageMap.isVisible = false } // Date and Time section rootView.eventDateDetailsFirst.text = EventUtils.getFormattedEventDateTimeRange(startsAt, endsAt) rootView.eventDateDetailsSecond.text = EventUtils.getFormattedEventDateTimeRangeSecond(startsAt, endsAt) // Add event to Calendar val dateClickListener = View.OnClickListener { startCalendar(event) } rootView.eventTimingLinearLayout.setOnClickListener(dateClickListener) } private fun hasCoordinates(event: Event): Boolean { return event.longitude != null && event.longitude != 0.00 && event.latitude != null && event.latitude != 0.00 } override fun onViewCreated(view: View, savedInstanceState: Bundle?) { super.onViewCreated(view, savedInstanceState) eventViewModel.connection .nonNull() .observe(viewLifecycleOwner, Observer { isConnected -> if (isConnected) { val currentFeedback = eventViewModel.eventFeedback.value if (currentFeedback == null) { currentEvent?.let { eventViewModel.fetchEventFeedback(it.id) } } else { feedbackAdapter.addAll(currentFeedback) if (currentFeedback.isEmpty()) { rootView.feedbackRv.isVisible = false rootView.noFeedBackTv.isVisible = true } else { rootView.feedbackRv.isVisible = true rootView.noFeedBackTv.isVisible = false } } val currentSpeakers = eventViewModel.eventSpeakers.value if (currentSpeakers == null) { currentEvent?.let { eventViewModel.fetchEventSpeakers(it.id) } } else { speakersAdapter.addAll(currentSpeakers) rootView.speakersContainer.isVisible = currentSpeakers.isNotEmpty() } val currentSessions = eventViewModel.eventSessions.value if (currentSessions == null) { currentEvent?.let { eventViewModel.fetchEventSessions(it.id) } } else { sessionsAdapter.addAll(currentSessions) rootView.sessionContainer.isVisible = currentSessions.isNotEmpty() } val currentSponsors = eventViewModel.eventSponsors.value if (currentSponsors == null) { currentEvent?.let { eventViewModel.fetchEventSponsors(it.id) } } else { sponsorsAdapter.addAll(currentSponsors) rootView.sponsorsSummaryContainer.isVisible = currentSponsors.isNotEmpty() } val currentSocialLinks = eventViewModel.socialLinks.value if (currentSocialLinks == null) { currentEvent?.let { eventViewModel.fetchSocialLink(it.id) } } else { socialLinkAdapter.addAll(currentSocialLinks) rootView.socialLinkContainer.isVisible = currentSocialLinks.isNotEmpty() } } }) val eventClickListener: EventClickListener = object : EventClickListener { override fun onClick(eventID: Long, imageView: ImageView) { findNavController(rootView) .navigate(EventDetailsFragmentDirections.actionSimilarEventsToEventDetails(eventID), FragmentNavigatorExtras(imageView to "eventDetailImage")) } } val redirectToLogin = object : RedirectToLogin { override fun goBackToLogin() { redirectToLogin() } } val favFabClickListener: FavoriteFabClickListener = object : FavoriteFabClickListener { override fun onClick(event: Event, itemPosition: Int) { if (eventViewModel.isLoggedIn()) { event.favorite = !event.favorite eventViewModel.setFavorite(event, event.favorite) similarEventsAdapter.notifyItemChanged(itemPosition) } else { EventUtils.showLoginToLikeDialog(requireContext(), layoutInflater, redirectToLogin, event.originalImageUrl, event.name) } } } similarEventsAdapter.apply { onEventClick = eventClickListener onFavFabClick = favFabClickListener } rootView.seeFeedbackTextView.setOnClickListener { currentEvent?.let { findNavController(rootView).navigate(EventDetailsFragmentDirections.actionEventDetailsToFeedback(it.id)) } } } override fun onOptionsItemSelected(item: MenuItem): Boolean { return when (item.itemId) { android.R.id.home -> { activity?.onBackPressed() true } R.id.add_to_calendar -> { // Add event to Calendar currentEvent?.let { startCalendar(it) } true } R.id.report_event -> { currentEvent?.let { reportEvent(it) } true } R.id.open_external_event_url -> { currentEvent?.externalEventUrl?.let { Utils.openUrl(requireContext(), it) } true } R.id.favorite_event -> { currentEvent?.let { if (eventViewModel.isLoggedIn()) { it.favorite = !it.favorite eventViewModel.setFavorite(it, it.favorite) currentEvent = it } else { EventUtils.showLoginToLikeDialog(requireContext(), layoutInflater, object : RedirectToLogin { override fun goBackToLogin() { redirectToLogin() } }, it.originalImageUrl, it.name) } } true } R.id.call_for_speakers -> { currentEvent?.let { findNavController(rootView).navigate(EventDetailsFragmentDirections .actionEventDetailsToSpeakersCall(it.identifier, it.id, it.timezone)) } true } R.id.event_share -> { currentEvent?.let { EventUtils.share(it, requireContext()) } return true } R.id.code_of_conduct -> { currentEvent?.let { event -> findNavController(rootView) .navigate(EventDetailsFragmentDirections.actionEventDetailsToConductCode(event.id)) } return true } R.id.open_faqs -> { currentEvent?.let { findNavController(rootView).navigate(EventDetailsFragmentDirections .actionEventDetailsToFaq(it.id)) } return true } else -> super.onOptionsItemSelected(item) } } private fun startCalendar(event: Event) { val intent = Intent(Intent.ACTION_INSERT) intent.type = "vnd.android.cursor.item/event" intent.putExtra(CalendarContract.Events.TITLE, event.name) intent.putExtra(CalendarContract.Events.DESCRIPTION, event.description?.stripHtml()) intent.putExtra(CalendarContract.Events.EVENT_LOCATION, event.locationName) intent.putExtra(CalendarContract.Events.CALENDAR_TIME_ZONE, event.timezone) intent.putExtra(CalendarContract.EXTRA_EVENT_BEGIN_TIME, EventUtils.getTimeInMilliSeconds(event.startsAt, event.timezone)) intent.putExtra(CalendarContract.EXTRA_EVENT_END_TIME, EventUtils.getTimeInMilliSeconds(event.endsAt, event.timezone)) startActivity(intent) } private fun reportEvent(event: Event) { val email = "[email protected]" val subject = "Report of ${event.name} (${event.identifier})" val body = "Let us know what's wrong" val emailIntent = Intent(Intent.ACTION_SENDTO, Uri.parse("mailto:$email")) emailIntent.putExtra(Intent.EXTRA_SUBJECT, subject) emailIntent.putExtra(Intent.EXTRA_TEXT, body) startActivity(Intent.createChooser(emailIntent, "Chooser Title")) } override fun onCreateOptionsMenu(menu: Menu, inflater: MenuInflater) { inflater.inflate(R.menu.event_details, menu) menuActionBar = menu } override fun onPrepareOptionsMenu(menu: Menu) { currentEvent?.let { currentEvent -> if (currentEvent.externalEventUrl.isNullOrBlank()) menu.findItem(R.id.open_external_event_url).isVisible = false if (currentEvent.codeOfConduct.isNullOrBlank()) menu.findItem(R.id.code_of_conduct).isVisible = false setFavoriteIconFilled(currentEvent.favorite) } super.onPrepareOptionsMenu(menu) } override fun onDestroyView() { super.onDestroyView() Picasso.get().cancelRequest(rootView.eventImage) speakersAdapter.onSpeakerClick = null sponsorsAdapter.onSponsorClick = null sessionsAdapter.onSessionClick = null similarEventsAdapter.apply { onEventClick = null onFavFabClick = null } } private fun loadTicketFragment() { val currency = currentEvent?.paymentCurrency ?: "USD" currentEvent?.let { findNavController(rootView).navigate(EventDetailsFragmentDirections .actionEventDetailsToTickets(it.id, currency)) } } private fun startMap(event: Event) { // start map intent val mapUrl = loadMapUrl(event) val mapIntent = Intent(Intent.ACTION_VIEW, Uri.parse(mapUrl)) val packageManager = activity?.packageManager if (packageManager != null && mapIntent.resolveActivity(packageManager) != null) { startActivity(mapIntent) } } private fun setFavoriteIconFilled(filled: Boolean) { val id = when { filled -> R.drawable.ic_baseline_favorite_white else -> R.drawable.ic_baseline_favorite_border_white } menuActionBar?.findItem(R.id.favorite_event)?.icon = ContextCompat.getDrawable(requireContext(), id) } private fun showEventErrorScreen(show: Boolean) { rootView.container.isVisible = !show rootView.eventErrorCard.isVisible = show val menuItemSize = menuActionBar?.size() ?: 0 for (i in 0 until menuItemSize) { menuActionBar?.getItem(i)?.isVisible = !show } } private fun checkForAuthentication() { if (eventViewModel.isLoggedIn()) writeFeedback() else { rootView.nestedContentEventScroll.longSnackbar(getString(R.string.log_in_first)) redirectToLogin() } } private fun redirectToLogin() { findNavController(rootView).navigate(EventDetailsFragmentDirections .actionEventDetailsToAuth(getString(R.string.log_in_first), EVENT_DETAIL_FRAGMENT)) } private fun writeFeedback() { val layout = layoutInflater.inflate(R.layout.dialog_feedback, null) val alertDialog = AlertDialog.Builder(requireContext()) .setTitle(getString(R.string.submit_feedback)) .setView(layout) .setPositiveButton(getString(R.string.submit)) { _, _ -> currentEvent?.let { eventViewModel.submitFeedback(layout.feedback.text.toString(), layout.feedbackrating.rating, it.id) } } .setNegativeButton(getString(R.string.cancel)) { dialog, _ -> dialog.cancel() } .show() alertDialog.getButton(AlertDialog.BUTTON_POSITIVE).isEnabled = false layout.feedback.addTextChangedListener(object : TextWatcher { override fun afterTextChanged(p0: Editable?) { if (layout.feedback.text.toString().isNotEmpty()) { layout.feedbackTextInputLayout.error = null alertDialog.getButton(AlertDialog.BUTTON_POSITIVE).isEnabled = true layout.feedbackTextInputLayout.isErrorEnabled = false } else { layout.feedbackTextInputLayout.error = getString(R.string.cant_be_empty) } } override fun beforeTextChanged(p0: CharSequence?, p1: Int, p2: Int, p3: Int) { /*Implement here*/ } override fun onTextChanged(p0: CharSequence?, p1: Int, p2: Int, p3: Int) { /*Implement here*/ } }) } private fun moveToSponsorSection() { currentEvent?.let { findNavController(rootView).navigate(EventDetailsFragmentDirections .actionEventDetailsToSponsor(it.id)) } } }
{ "pile_set_name": "Github" }
/* * Copyright (C) 2010-2020 Structr GmbH * * This file is part of Structr <http://structr.org>. * * Structr is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * Structr is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with Structr. If not, see <http://www.gnu.org/licenses/>. */ package org.structr.test.web.advanced; import java.io.IOException; import java.io.InputStream; import java.util.List; import org.testng.annotations.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.structr.common.AccessMode; import org.structr.common.Permission; import org.structr.common.SecurityContext; import org.structr.common.error.FrameworkException; import org.structr.core.app.App; import org.structr.core.app.StructrApp; import org.structr.core.entity.AbstractNode; import org.structr.core.entity.Principal; import org.structr.core.graph.NodeAttribute; import org.structr.core.graph.Tx; import org.structr.core.property.PropertyKey; import org.structr.test.web.StructrUiTest; import org.structr.web.common.FileHelper; import org.structr.web.common.ImageHelper; import org.structr.web.entity.Folder; import org.structr.web.entity.Image; import static org.testng.AssertJUnit.assertEquals; import static org.testng.AssertJUnit.fail; public class ImageTest extends StructrUiTest { private static final Logger logger = LoggerFactory.getLogger(ImageTest.class.getName()); private static final String Base64ImageData = "iVBORw0KGgoAAAANSUhEUgAAAGQAAAA7CAYAAACNOi92AAAGcklEQVR42u2ca0/bVhjH+/X2eu/2FfYNJu3FJm3Spk2rqq1Sq95ooS1sXYGiQEnKnYQ2BCiEkBDIxXZsx87NuZAr/JfnSGZh0MQOYSTlPNJfcY7PObbP7xyf25PcArdLmWEYqNfrPcvv1k0uzEKhgHw+fyltbGwgFotdOp9qtdpbILlcbiBr99HR0SeVzWbPxaHvmUwGqVTqNEyW5bb5FIvFtufNOOeAECmqNXZFN+7xeJBMJrtKTw/Zj0BmZ2eZHj58iDdv3uDevXt49eoVhoaGWPjo6CgeP36MO3fusOPh4WE4nU52/tGjR3j58iUmJydx//59aJpmHwgVbLtEVDMo49baQS2DwkulUtc1hK57HUb33u6+qIISNFMUn0THdI6O3W430un06aunNb4Zjz47lUFXQKiWLC4uMvovXryAw+FgtYdqDYXNzMywz/HxcUxMTODp06d49uwZS9ePQGq1WsdXSSdNrT3AQWzv0vmcnJzYB2K+U+mTagUdU2uh77qun75bzZpEcSjMSssbVHMmfoZRU65mlFWpVLqiS83yr4U/zrzK7KhcLnMgvR72Tgnf3sihMwfCgXAgHAgHwoF8VkDYePzgBzY57OVCW78aPSM9K2k69mN/AaGbo4WxXDXBvtNNXpXRsNwsCJK5IGfFaC7Umva/srMW1/qMzsRPKNZT/QWEZrzboQ/NAqpcCITmGe0Kw2rBmHnT9RonNWhGvFk7ZUsqFA3IqgQ9lUQkdoi4EIUkC5ASAuJiFLlC2nJe6bzSvH4VESkAh/gdSvV0/wGpn1SarURCykigUEuekW5IKFdK7OElWUSiKVESoCZlaHoSSV2FoiaahaXBKGTOpc+VVbbWQzP6o3oO2/seFOoa3OIwltS7lqQbYrPSeLG2sYjltXdYeT8Ht3ceS24XYkoQQsZvOa/d5Lvm9XVkchrmldv9B4RWbNnrpFzAtjzbvOnfz2hFeAJR24dr0YHl9y54fAtYWHnLjilsfXsVq945+PxuSJm9c+nnY3eRrYoMLL0aj4+Pbb8eO8W1k5f5vCSn+Gt/ASGjAjJlLpJ1+7AXxaVBA1lAXsJhav10lZZappWOl+LSWtunREs+dpZvKE9z68Ap/NJ/QDpZo9Gw3Ie0A3KYX8Fu1mEJcGscWmtrZ1SJzGtYGVG2VrrJ8PeoHZcHC0ivgPo1J7zKn9jT5xmYdoqlt1CsZeAPbiJtJKHocWz51+Hd9MC3tYZgeAfBg21sbntZv7SlTHXMk/RRcaDcMCDIB8g3+7dIytfTZx2oPfV9Yx53A1/gY3oc0YKnrRLZEKrHRVQaBWQNHbliCnuhHUSFcBOGH6GDAA6iQRZWbRQR0lc65kna11ZZvqmcykZavR7mDxSQUG4O77Uhyx0v9TEkGp2ZxxeJOmerfQi9smj+Y6Y1d/puJJBgzoWt9N+WBxrUsVuV3ZGlqYsGMTcGSCA7g43U6Ge9LDNQQMYiX+PJ/lccSL/YnrSGj7E5DqSXw1da5DOdIOyKPAQjkUjX6UntJpM3Dog5Kmnn8ODz+U49VUzvFvIFo2MKTyQSEEURkiSxczTLpmM6R7NuK04VHIgNIOT9R35eIyMjcLlc7Pvz588xNjaGqakp5udFvmGvX79m4eRBSOHkQUh+YDTc5UBsDBepVpu1365UVYWiKF2np1ZEcw4OpEfmj6/Ad/iWd+r9YpG8uzkXmeZAOBAOhAPhQDgQDmSQgdCuX7agMY+P6/o9CQfSYrQRRJtNako4sylkd3vYVL/ORwYKyAkaiKmBM0C63bG7Soe+GwOEjFqJlAlgTv4N8/JteMQRhIUd5kq0G/ZhJ+TFh61lvFtywL0+j4XVt5h2TmBueRprviX4dlaxF9lEUFti6UkR3YdiqXDqTcKBWDBaB/vXrafEXG9I6bzKCpNEzndSQkQqrTHPxHzRYM55qqpAViTmiCdKcWRzaeapSOnJDZT1S1UB9Ubt2lvOwP9xAC0WdtOHtLoGsT6lVkQ4usuB9E//dIx8TeVA+gVIvphlPle99iLhQLo08rWinxbwURYHwoFwIANgO5lJPAh92VPHaQ7kEpaqRDER/Ya3kP/DaC+ffHfbKR6PM+cJ868+2okDuaRRIZq/ePqUzD8n6xTvqpdWOJCmyLcrHA5DEATm88WBXDMQmhi2/tUUB3LFRnsmvA8ZQDN/3ctHWdw4EA6EmyX7B3U7uu30qAOXAAAAAElFTkSuQmCC"; @Test public void testThumbnailGeneration() { final PropertyKey passwordKey = StructrApp.key(Principal.class, "password"); Principal tester1 = null; Principal tester2 = null; Principal tester3 = null; try (final Tx tx = app.tx()) { tester1 = app.create(Principal.class, new NodeAttribute<>(Principal.name, "tester1"), new NodeAttribute<>(passwordKey, "test")); tester2 = app.create(Principal.class, new NodeAttribute<>(Principal.name, "tester2"), new NodeAttribute<>(passwordKey, "test")); tester3 = app.create(Principal.class, new NodeAttribute<>(Principal.name, "tester3"), new NodeAttribute<>(passwordKey, "test")); final Folder folder1 = FileHelper.createFolderPath(securityContext, "/Test1"); folder1.setProperty(AbstractNode.visibleToAuthenticatedUsers, true); folder1.grant(Permission.write, tester1); folder1.grant(Permission.write, tester2); folder1.grant(Permission.write, tester3); final Folder folder2 = FileHelper.createFolderPath(securityContext, "/Test1/Subtest2"); folder2.setProperty(AbstractNode.visibleToAuthenticatedUsers, true); folder2.grant(Permission.write, tester1); folder2.grant(Permission.write, tester2); folder2.grant(Permission.write, tester3); final Folder folder3 = FileHelper.createFolderPath(securityContext, "/Test1/Subtest3"); folder3.setProperty(AbstractNode.visibleToAuthenticatedUsers, true); folder3.grant(Permission.write, tester1); folder3.grant(Permission.write, tester2); folder3.grant(Permission.write, tester3); tx.success(); } catch (FrameworkException fex) { fail("Unexpected exception."); } final SecurityContext ctx1 = SecurityContext.getInstance(tester1, AccessMode.Backend); final SecurityContext ctx2 = SecurityContext.getInstance(tester2, AccessMode.Backend); final SecurityContext ctx3 = SecurityContext.getInstance(tester3, AccessMode.Backend); final App app1 = StructrApp.getInstance(ctx1); final App app2 = StructrApp.getInstance(ctx2); final App app3 = StructrApp.getInstance(ctx3); try (final Tx tx = app1.tx()) { createImage(ctx1, "tester1 - image01.png", "/"); createImage(ctx1, "tester1 - image02.png", "/"); createImage(ctx1, "tester1 - image03.png", "/Test1"); createImage(ctx1, "tester1 - image04.png", "/Test1/Subtest2"); createImage(ctx1, "tester1 - image05.png", "/Test1/Subtest3"); tx.success(); } catch (FrameworkException fex) { fail("Unexpected exception."); } try (final Tx tx = app2.tx()) { createImage(ctx2, "tester2 - image01.png", "/"); createImage(ctx2, "tester2 - image02.png", "/"); createImage(ctx2, "tester2 - image03.png", "/Test1"); createImage(ctx2, "tester2 - image04.png", "/Test1/Subtest2"); createImage(ctx2, "tester2 - image05.png", "/Test1/Subtest3"); tx.success(); } catch (FrameworkException fex) { fail("Unexpected exception."); } try (final Tx tx = app3.tx()) { createImage(ctx3, "tester3 - image01.png", "/"); createImage(ctx3, "tester3 - image02.png", "/"); createImage(ctx3, "tester3 - image03.png", "/Test1"); createImage(ctx3, "tester3 - image04.png", "/Test1/Subtest2"); createImage(ctx3, "tester3 - image05.png", "/Test1/Subtest3"); tx.success(); } catch (FrameworkException fex) { fail("Unexpected exception."); } try (final Tx tx = app.tx()) { System.out.println("############# Folders:"); final List<Folder> folders = app.nodeQuery(Folder.class).sort(StructrApp.key(Folder.class, "path")).getAsList(); folders.stream().forEach(f -> { System.out.println(f.getPath()); }); assertEquals("Invalid number of folders after thumbnail creation", 7, folders.size()); assertEquals("Invalid folder path", "/._structr_thumbnails", folders.get(0).getPath()); assertEquals("Invalid folder path", "/._structr_thumbnails/Test1", folders.get(1).getPath()); assertEquals("Invalid folder path", "/._structr_thumbnails/Test1/Subtest2", folders.get(2).getPath()); assertEquals("Invalid folder path", "/._structr_thumbnails/Test1/Subtest3", folders.get(3).getPath()); assertEquals("Invalid folder path", "/Test1", folders.get(4).getPath()); assertEquals("Invalid folder path", "/Test1/Subtest2", folders.get(5).getPath()); assertEquals("Invalid folder path", "/Test1/Subtest3", folders.get(6).getPath()); tx.success(); } catch (FrameworkException fex) { fail("Unexpected exception."); } } /* * Test is disabled because we cannot prevent users from creating identical folder paths. If that happens in a production system, the administrator should consider enabling the application.filesystem.enabled flag. @Test public void testFolderPaths() { final PropertyKey passwordKey = StructrApp.key(Principal.class, "password"); Principal tester1 = null; Principal tester2 = null; Principal tester3 = null; try (final Tx tx = app.tx()) { tester1 = app.create(Principal.class, new NodeAttribute<>(Principal.name, "tester1"), new NodeAttribute<>(passwordKey, "test")); tester2 = app.create(Principal.class, new NodeAttribute<>(Principal.name, "tester2"), new NodeAttribute<>(passwordKey, "test")); tester3 = app.create(Principal.class, new NodeAttribute<>(Principal.name, "tester3"), new NodeAttribute<>(passwordKey, "test")); tx.success(); } catch (FrameworkException fex) { fail("Unexpected exception."); } final SecurityContext ctx1 = SecurityContext.getInstance(tester1, AccessMode.Backend); final SecurityContext ctx2 = SecurityContext.getInstance(tester2, AccessMode.Backend); final SecurityContext ctx3 = SecurityContext.getInstance(tester3, AccessMode.Backend); final App app1 = StructrApp.getInstance(ctx1); final App app2 = StructrApp.getInstance(ctx2); final App app3 = StructrApp.getInstance(ctx3); try (final Tx tx = app1.tx()) { FileHelper.createFolderPath(ctx1, "/Test1/data"); tx.success(); } catch (FrameworkException fex) { fail("Unexpected exception."); } try (final Tx tx = app2.tx()) { FileHelper.createFolderPath(ctx2, "/Test1/data"); tx.success(); } catch (FrameworkException fex) { fail("Unexpected exception."); } try (final Tx tx = app3.tx()) { FileHelper.createFolderPath(ctx3, "/Test1/data"); tx.success(); } catch (FrameworkException fex) { fail("Unexpected exception."); } try (final Tx tx = app.tx()) { System.out.println("############# Folders:"); app.nodeQuery(Folder.class).getAsList().stream().forEach(f -> { System.out.println(f.getPath()); }); tx.success(); } catch (FrameworkException fex) { fail("Unexpected exception."); } } */ private void createImage(final SecurityContext securityContext, final String name, final String folderPath) throws FrameworkException { try( final InputStream is = ImageTest.class.getResourceAsStream("/test/thumbtest.png")) { final Image image = ImageHelper.createImage(securityContext, is, "image/png", Image.class, name, false); final Folder path = FileHelper.createFolderPath(securityContext, folderPath); // set path image.setParent(path); // request thumbnail creation image.getProperty(StructrApp.key(Image.class, "tnMid")); } catch (IOException ioex) { ioex.printStackTrace(); } } }
{ "pile_set_name": "Github" }
/* * Azure Blockchain Workbench REST API * The Azure Blockchain Workbench REST API is a Workbench extensibility point, which allows developers to create and manage blockchain applications, manage users and organizations within a consortium, integrate blockchain applications into services and platforms, perform transactions on a blockchain, and retrieve transactional and contract data from a blockchain. * * OpenAPI spec version: v1 * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ package io.swagger.client; public class Configuration { private static ApiClient defaultApiClient = new ApiClient(); /** * Get the default API client, which would be used when creating API * instances without providing an API client. * * @return Default API client */ public static ApiClient getDefaultApiClient() { return defaultApiClient; } /** * Set the default API client, which would be used when creating API * instances without providing an API client. * * @param apiClient API client */ public static void setDefaultApiClient(ApiClient apiClient) { defaultApiClient = apiClient; } }
{ "pile_set_name": "Github" }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.collections4.multimap; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.HashMap; import java.util.Arrays; import java.util.ArrayList; import junit.framework.Test; import org.apache.commons.collections4.BulkTest; import org.apache.commons.collections4.ListValuedMap; import org.apache.commons.collections4.MultiValuedMap; /** * Test ArrayListValuedHashMap * * @since 4.1 */ public class ArrayListValuedHashMapTest<K, V> extends AbstractMultiValuedMapTest<K, V> { public ArrayListValuedHashMapTest(final String testName) { super(testName); } public static Test suite() { return BulkTest.makeSuite(ArrayListValuedHashMapTest.class); } // ----------------------------------------------------------------------- @Override public ListValuedMap<K, V> makeObject() { return new ArrayListValuedHashMap<>(); } // ----------------------------------------------------------------------- @SuppressWarnings("unchecked") public void testListValuedMapAdd() { final ListValuedMap<K, V> listMap = makeObject(); assertTrue(listMap.get((K) "whatever") instanceof List); final List<V> list = listMap.get((K) "A"); list.add((V) "a1"); assertEquals(1, listMap.size()); assertTrue(listMap.containsKey("A")); } @SuppressWarnings("unchecked") public void testListValuedMapAddViaListIterator() { final ListValuedMap<K, V> listMap = makeObject(); final ListIterator<V> listIt = listMap.get((K) "B").listIterator(); assertFalse(listIt.hasNext()); listIt.add((V) "b1"); listIt.add((V) "b2"); listIt.add((V) "b3"); assertEquals(3, listMap.size()); assertTrue(listMap.containsKey("B")); // As ListIterator always adds before the current cursor assertFalse(listIt.hasNext()); } @SuppressWarnings("unchecked") public void testListValuedMapRemove() { final ListValuedMap<K, V> listMap = makeObject(); final List<V> list = listMap.get((K) "A"); list.add((V) "a1"); list.add((V) "a2"); list.add((V) "a3"); assertEquals(3, listMap.size()); assertEquals("a1", list.remove(0)); assertEquals(2, listMap.size()); assertEquals("a2", list.remove(0)); assertEquals(1, listMap.size()); assertEquals("a3", list.remove(0)); assertEquals(0, listMap.size()); assertFalse(listMap.containsKey("A")); } @SuppressWarnings("unchecked") public void testListValuedMapRemoveViaListIterator() { final ListValuedMap<K, V> listMap = makeObject(); ListIterator<V> listIt = listMap.get((K) "B").listIterator(); listIt.add((V) "b1"); listIt.add((V) "b2"); assertEquals(2, listMap.size()); assertTrue(listMap.containsKey("B")); listIt = listMap.get((K) "B").listIterator(); while (listIt.hasNext()) { listIt.next(); listIt.remove(); } assertFalse(listMap.containsKey("B")); listIt.add((V) "b1"); listIt.add((V) "b2"); assertTrue(listMap.containsKey("B")); assertEquals(2, listMap.get((K) "B").size()); } @SuppressWarnings({ "unchecked", "rawtypes" }) public void testEqualsHashCodeContract() { final MultiValuedMap map1 = makeObject(); final MultiValuedMap map2 = makeObject(); map1.put("a", "a1"); map1.put("a", "a2"); map2.put("a", "a1"); map2.put("a", "a2"); assertEquals(map1, map2); assertEquals(map1.hashCode(), map2.hashCode()); map2.put("a", "a2"); assertNotSame(map1, map2); assertNotSame(map1.hashCode(), map2.hashCode()); } @SuppressWarnings({ "unchecked", "rawtypes" }) public void testListValuedMapEqualsHashCodeContract() { final ListValuedMap map1 = makeObject(); final ListValuedMap map2 = makeObject(); map1.put("a", "a1"); map1.put("a", "a2"); map2.put("a", "a1"); map2.put("a", "a2"); assertEquals(map1, map2); assertEquals(map1.hashCode(), map2.hashCode()); map1.put("b", "b1"); map1.put("b", "b2"); map2.put("b", "b2"); map2.put("b", "b1"); assertNotSame(map1, map2); assertNotSame(map1.hashCode(), map2.hashCode()); } public void testArrayListValuedHashMap() { ListValuedMap<K, V> listMap = null; ListValuedMap<K, V> listMap1 = null; final Map<K, V> map = new HashMap<>(); final Map<K, V> map1 = new HashMap<>(); map.put((K) "A", (V) "W"); map.put((K) "B", (V) "X"); map.put((K) "C", (V) "F"); listMap = new ArrayListValuedHashMap<>(map); assertEquals(1, listMap.get((K) "A").size()); assertEquals(1, listMap.get((K) "B").size()); assertEquals(1, listMap.get((K) "C").size()); listMap1 = new ArrayListValuedHashMap<>(map1); assertEquals("{}", listMap1.toString()); } public void testTrimToSize(){ final ArrayListValuedHashMap<K, V> listMap = new ArrayListValuedHashMap<>(4); assertEquals("{}", listMap.toString()); listMap.put((K) "A", (V) "W"); listMap.put((K) "A", (V) "X"); listMap.put((K) "B", (V) "F"); assertEquals(2, listMap.get((K) "A").size()); assertEquals(1, listMap.get((K) "B").size()); listMap.trimToSize(); assertEquals(2, listMap.get((K) "A").size()); assertEquals(1, listMap.get((K) "B").size()); } public void testWrappedListAdd() { final ListValuedMap<K, V> listMap = makeObject(); final List<V> listA = listMap.get((K) "A"); listA.add(0, (V) "W"); listA.add(1, (V) "X"); listA.add(2, (V) "F"); assertEquals("{A=[W, X, F]}", listMap.toString()); listMap.get((K) "A").set(1, (V) "Q"); assertEquals("{A=[W, Q, F]}", listMap.toString()); } public void testWrappedListAddAll() { final ListValuedMap<K, V> listMap = makeObject(); final List<V> listA = listMap.get((K) "A"); final List<V> list = Arrays.asList((V) "W", (V) "X", (V) "F"); listA.addAll(0, list); assertEquals("{A=[W, X, F]}", listMap.toString()); final List<V> list1 = Arrays.asList((V) "Q", (V) "Q", (V) "L"); listA.addAll(3, list1); assertEquals("{A=[W, X, F, Q, Q, L]}", listMap.toString()); assertEquals("W", listMap.get((K) "A").get(0)); assertEquals("X", listMap.get((K) "A").get(1)); assertEquals("F", listMap.get((K) "A").get(2)); assertEquals("Q", listMap.get((K) "A").get(3)); assertEquals("Q", listMap.get((K) "A").get(4)); assertEquals("L", listMap.get((K) "A").get(5)); assertEquals(0, listMap.get((K) "A").indexOf("W")); assertEquals(2, listMap.get((K) "A").indexOf("F")); assertEquals(-1, listMap.get((K) "A").indexOf("C")); assertEquals(3, listMap.get((K) "A").indexOf("Q")); assertEquals(4, listMap.get((K) "A").lastIndexOf("Q")); assertEquals(-1, listMap.get((K) "A").lastIndexOf("A")); final List<V> list2 = new ArrayList<>(); listMap.get((K) "B").addAll(0, list2); assertEquals("{A=[W, X, F, Q, Q, L]}", listMap.toString()); final List<V> list3 = listMap.get((K) "A").subList(1, 4); assertEquals(3, list3.size()); assertEquals("Q", list3.get(2)); } public void testValuesListIteratorMethods(){ final ListValuedMap<K, V> listMap = makeObject(); final List<V> listA = listMap.get((K) "A"); final List<V> list = Arrays.asList((V) "W", (V) "X", (V) "F", (V) "Q", (V) "Q", (V) "F"); listA.addAll(0, list); final ListIterator<V> it = listMap.get((K) "A").listIterator(1); assertTrue(it.hasNext()); assertEquals("X", it.next()); assertEquals("F", it.next()); assertTrue(it.hasPrevious()); assertEquals("F", it.previous()); assertEquals(2, it.nextIndex()); assertEquals(1, it.previousIndex()); it.set((V) "Z"); assertEquals("Z", it.next()); assertEquals("Q", it.next()); } // public void testCreate() throws Exception { // writeExternalFormToDisk((java.io.Serializable) makeObject(), // "src/test/resources/data/test/ArrayListValuedHashMap.emptyCollection.version4.1.obj"); // writeExternalFormToDisk((java.io.Serializable) makeFullMap(), // "src/test/resources/data/test/ArrayListValuedHashMap.fullCollection.version4.1.obj"); // } }
{ "pile_set_name": "Github" }
# This migration comes from spree_auth (originally 20150416152553) class AddMissingIndicesOnUser < ActiveRecord::Migration[4.2] def change unless index_exists?(:spree_users, :bill_address_id) add_index :spree_users, :bill_address_id end unless index_exists?(:spree_users, :ship_address_id) add_index :spree_users, :ship_address_id end end end
{ "pile_set_name": "Github" }
#!/usr/bin/env python # -*- coding: utf-8 -*- """Setup for ipypublish.""" import io from importlib import import_module from setuptools import setup, find_packages with open("requirements.txt") as f: requirements = f.read().splitlines() with io.open("README.md") as readme: readme_str = readme.read() setup( name="ipypublish", version=import_module("ipypublish").__version__, description=( "A workflow for creating and editing publication ready " "scientific reports, from one or more Jupyter Notebooks" ), long_description=readme_str, long_description_content_type="text/markdown", install_requires=requirements, extras_require={ "sphinx": {"sphinx>=1.8", "sphinxcontrib-bibtex"}, "tests": { "pytest>=3.6", "pytest-regressions", "pytest-cov", "coverage", "pillow", "nbsphinx>=0.5,<0.6", "ipykernel", "sphinx>=1.6,<3", "sphinxcontrib-bibtex", "texsoup<=0.1.4", }, "code_style": [ "black==19.3b0", "pre-commit==1.17.0", "flake8<3.8.0,>=3.7.0", "doc8<0.9.0,>=0.8.0", "pygments", # required by doc8 ], "science": {"matplotlib", "numpy", "pandas", "sympy"}, "rtd": { "recommonmark>=0.5", "pytest>=4.4", "pillow", "numpy", "matplotlib", "pandas", "sympy<1.3", "sphinx>=1.8", "sphinxcontrib-bibtex", "ipykernel", "ipywidgets>=7.5,<8", }, }, license="MIT", author="Chris Sewell", author_email="[email protected]", url="https://github.com/chrisjsewell/ipypublish", classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Environment :: Web Environment", "Intended Audience :: End Users/Desktop", "Intended Audience :: Science/Research", "Intended Audience :: Financial and Insurance Industry", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Topic :: Scientific/Engineering", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Utilities", "Framework :: Sphinx :: Extension", ], keywords="python, jupyter-notebook, nbconvert, pandoc, latex, pdf", zip_safe=True, packages=find_packages(), include_package_data=True, entry_points={ "console_scripts": [ "nbpublish = ipypublish.frontend.nbpublish:run", "nbpresent = ipypublish.frontend.nbpresent:run", "ipubpandoc = ipypublish.filters_pandoc.main:pandoc_filters", ], "ipypublish.postprocessors": [ "remove-blank-lines = ipypublish.postprocessors.stream_modify:RemoveBlankLines", "remove-trailing-space = ipypublish.postprocessors.stream_modify:RemoveTrailingSpace", "filter-output-files = ipypublish.postprocessors.stream_modify:FilterOutputFiles", "fix-slide-refs = ipypublish.postprocessors.stream_modify:FixSlideReferences", "pdf-export = ipypublish.postprocessors.pdfexport:PDFExport", "write-stream = ipypublish.postprocessors.to_stream:WriteStream", "write-text-file = ipypublish.postprocessors.file_actions:WriteTextFile", "remove-folder = ipypublish.postprocessors.file_actions:RemoveFolder", "write-resource-files = ipypublish.postprocessors.file_actions:WriteResourceFiles", "copy-resource-paths = ipypublish.postprocessors.file_actions:CopyResourcePaths", "reveal-server = ipypublish.postprocessors.reveal_serve:RevealServer", "run-sphinx = ipypublish.postprocessors.sphinx:RunSphinx [sphinx]", "convert-bibgloss = ipypublish.postprocessors.convert_bibgloss:ConvertBibGloss", ], }, )
{ "pile_set_name": "Github" }
//----------------------------------------------- // Copyright 2009 Wellcome Trust Sanger Institute // Written by Jared Simpson ([email protected]) // Released under the GPL license //----------------------------------------------- // // index - Build a BWT/FM-index for a set of reads // #ifndef INDEX_H #define INDEX_H #include <getopt.h> #include "config.h" #include "SuffixArray.h" int indexMain(int argc, char** argv); void indexInMemorySAIS(); void indexInMemoryBCR(); void indexInMemoryRopebwt(); void indexOnDisk(); void buildIndexForTable(std::string outfile, const ReadTable* pRT, bool isReverse); void parseIndexOptions(int argc, char** argv); #endif
{ "pile_set_name": "Github" }
; Adapted from: https://nsis.sourceforge.io/Another_String_Replace_(and_Slash/BackSlash_Converter) (2019-08-31) !macro _StrSlashConstructor out in Push "${in}" Push "\" Call StrSlash Pop ${out} !macroend !define StrSlash '!insertmacro "_StrSlashConstructor"' ; Push $filenamestring (e.g. 'c:\this\and\that\filename.htm') ; Push "\" ; Call StrSlash ; Pop $R0 ; ;Now $R0 contains 'c:/this/and/that/filename.htm' Function StrSlash Exch $R3 ; $R3 = needle ("\" or "/") Exch Exch $R1 ; $R1 = String to replacement in (haystack) Push $R2 ; Replaced haystack Push $R4 ; $R4 = not $R3 ("/" or "\") Push $R6 Push $R7 ; Scratch reg StrCpy $R2 "" StrLen $R6 $R1 StrCpy $R4 "\" StrCmp $R3 "/" loop StrCpy $R4 "/" loop: StrCpy $R7 $R1 1 StrCpy $R1 $R1 $R6 1 StrCmp $R7 $R3 found StrCpy $R2 "$R2$R7" StrCmp $R1 "" done loop found: StrCpy $R2 "$R2$R4" StrCmp $R1 "" done loop done: StrCpy $R3 $R2 Pop $R7 Pop $R6 Pop $R4 Pop $R2 Pop $R1 Exch $R3 FunctionEnd
{ "pile_set_name": "Github" }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.codehaus.groovy.macro.runtime; import org.apache.groovy.lang.annotation.Incubating; import org.codehaus.groovy.ast.expr.MethodCallExpression; import org.codehaus.groovy.control.CompilationUnit; import org.codehaus.groovy.control.SourceUnit; /** * Macro method context. Every macro method must be an extension method of it. * * @since 2.5.0 */ @Incubating public class MacroContext { private final MethodCallExpression call; private final SourceUnit sourceUnit; private final CompilationUnit compilationUnit; public MacroContext(CompilationUnit compilationUnit, SourceUnit sourceUnit, MethodCallExpression call) { this.compilationUnit = compilationUnit; this.sourceUnit = sourceUnit; this.call = call; } /** * * @return original method call expression */ public MethodCallExpression getCall() { return call; } /** * * @return current source unit */ public SourceUnit getSourceUnit() { return sourceUnit; } /** * * @return current compilation unit */ public CompilationUnit getCompilationUnit() { return compilationUnit; } }
{ "pile_set_name": "Github" }
<div class='sheet-4colrow'> <div class='sheet-col'> <label>Player name</label> <label>Street name</label> <label>Real name</label> </div> <div class='sheet-col'> <input type="text" name="attr_player_name" /><br> <input type="text" name="attr_street_name" /><br> <input type="text" name="attr_real_name" /> </div> <div class='sheet-col'> <label>Organization</label> <label>Home city</label> </div> <div class='sheet-col'> <input type="text" name="attr_home_city" /><br /> <input type="text" name="attr_organization" /> </div> </div> <br> <h1>Attributes</h1> <div class='sheet-6colrow sheet-section'> <div class='sheet-col'> <label>Body</label> <label>Dexterity</label> <label>Strength</label> </div> <div class='sheet-col'> <input type="number" name="attr_body" disabled="true" value="@{Dexterity}+@{Strength}" /><button type='roll' value='/roll 4dF+@{Body} Rolling Body'></button><br> <input type="number" name="attr_dexterity" value="0" /><button type='roll' value='/roll 4dF+@{Dexterity} Rolling Dexterity'></button><br> <input type="number" name="attr_strength" value="0" /><button type='roll' value='/roll 4dF+@{Strength} Rolling Strength'></button> </div> <div class='sheet-col'> <label>Mind</label> <label>Focus</label> <label>Spirit</label> </div> <div class='sheet-col'> <input type="number" name="attr_mind" disabled="true" value="@{Focus}+@{Spirit}"><button type='roll' value='/roll 4dF+@{Mind} Rolling Mind'></button><br> <input type="number" name="attr_focus" value="0" /><button type='roll' value='/roll 4dF+@{Focus} Rolling Focus'></button><br> <input type="number" name="attr_spirit" value="0" /><button type='roll' value='/roll 4dF+@{Spirit} Rolling Spirit'></button> </div> <div class='sheet-col'> <label>Persona</label> <label>Presence</label> <label>Status</label> </div> <div class='sheet-col'> <input type="number" name="attr_persona" disabled="true" value="@{Presence}+@{Status}"><button type='roll' value='/roll 4dF+@{Persona} Rolling Persona'></button><br> <input type="number" name="attr_presence" value="0" /><button type='roll' value='/roll 4dF+@{Presence} Rolling Presence'></button><br> <input type="number" name="attr_status" value="0" /><button type='roll' value='/roll 4dF+@{Status} Rolling Status'></button> </div> </div> <div class='sheet-2colrow'> <div class='sheet-col'> <label>Total ODFs <input type="number" name="attr_odf" /></label> <label>Total DDFs <input type="number" name="attr_ddf" /></label> </div> <div class='sheet-col'> <label>Luck <input type="number" name="attr_luck" /></label> <label>BP <input type="number" name="attr_bp" /></label> </div> </div> <div class='sheet-2colrow'> <div class='sheet-col' style='display: inline-block'> <h1>Gifts / Powers</h1> <div class='sheet-section'> <fieldset class="repeating_gifts"> <input type="text" name="attr_gift" class='gift'/> </fieldset> </div> </div> <div class='sheet-col' style='display: inline-block'> <h1>Faults</h1> <div class='sheet-section'> <fieldset class="repeating_faults"> <input type="text" name="attr_fault" class='fault' /> </fieldset> </div> </div> </div> <br> <h1>Skills</h1> <div class='sheet-section'> <span class="sheet-span-header" style='padding: 0px; width: 250px'>Skill name</span> <span class="sheet-span-header" style='padding: 0px; width: 120px'>Linked attribute</span> <span class="sheet-span-header" style='padding: 0px; width: 90px'>Skill level</span> <span class="sheet-span-header" style='padding: 0px; width: 280px'>Specialization</span> <fieldset class="repeating_skills repitem"> <div class='sheet-4colrow'> <div class='sheet-col' style='width: 250px'><input type="text" name="attr_skill-name" style='width: 250px' /></div> <div class='sheet-col' style='width: 120px'> <select name="attr_linked_attribute" style='width: 120px'> <option value="Dexterity">Dexterity</option> <option value="Strength">Strength</option> <option value="Focus">Focus</option> <option value="Spirit">Spirit</option> <option value="Presence">Presence</option> <option value="Status">Status</option> </select> </div> <div class='sheet-col' class='width: 90px'><input type="number" name="attr_skill-level" style='width: 90px' /></div> <div class='sheet-col' class='width: 280px'><input type="text" name="attr_skill-specialization" style='width: 280px' /></div> <button type="roll" value="/roll 4dF+@{skill-level} Rolling @{skill-name}"> Roll </button> </div> </fieldset> </div> <br> <h1>Wound Track</h1> <div class='sheet-7colrow sheet-section repitem'> <div class='sheet-col'> <p>Damage Dealt</p> <p>Physical Track</p> <p>Wound Level</p> <p>Mental Track</p> </div> <div class='sheet-col sheet-damage'> <p>0</p> <p>-</p> <p>None</p> <p>-</p> </div> <div class='sheet-col sheet-damage'> <p>1-2</p> <p><input type="checkbox" name="attr_phys_scratch1" /><input type="checkbox" name="attr_phys_scratch2" /><input type="checkbox" name="attr_phys_scratch3" /></p> <p>Scratched</p> <p><input type="checkbox" name="attr_mental_scratch1" /><input type="checkbox" name="attr_mental_scratch2" /><input type="checkbox" name="attr_mental_scratch3" /></p> </div> <div class='sheet-col sheet-damage'> <p>3-4</p> <p><input type="checkbox" name="attr_phys_hurt" /></p> <p>Hurt</p> <p><input type="checkbox" name="attr_mental_hurt" /></p> </div> <div class='sheet-col sheet-damage'> <p>5-6</p> <p><input type="checkbox" name="attr_phys_veryhurt"/></p> <p>Very Hurt</p> <p><input type="checkbox" name="attr_mental_veryhurt" /></p> </div> <div class='sheet-col sheet-damage'> <p>7-8</p> <p><input type="checkbox" name="attr_phys_incapacitated" /></p> <p>Incapacitated</p> <p><input type="checkbox" name="attr_mental_incapacitated" /></p> </div> <div class='sheet-col sheet-damage'> <p>9+</p> <p><input type="checkbox" name="attr_phys_neardeath" /></p> <p>Near Death</p> <p><input type="checkbox" name="attr_mental_neardeath" /></p> </div> </div> <br /> <!-- Second page --> <h1>Gear</h1> <div class='sheet-section'> <span class="sheet-span-header" style='padding: 0px; width: 260px'>Name</span> <span class="sheet-span-header" style='padding: 0px; width: 100px'>ODF / DDF</span> <span class="sheet-span-header" style='padding: 0px; width: 160px'>Gifts</span> <span class="sheet-span-header" style='padding: 0px; width: 160px'>Faults</span> <span class="sheet-span-header" style='padding: 0px; width: 100px'>Cost</span> <fieldset class='repeating_gear'> <div class='sheet-5colrow'> <div class='sheet-col' style='width: 260px'> <input type='text' name='attr_gear-name' style='width: 260px'/> </div> <div class='sheet-col' style='width: 100px'> <input type='text' name='attr_gear-ODF-DDF' style='width: 100px'/> </div> <div class='sheet-col' style='width: 160px'> <input type='text' name='attr_gear-gifts' style='width: 160px'/> </div> <div class='sheet-col' style='width: 160px'> <input type='text' name='attr_gear-faults' style='width: 160px'/> </div> <div class='sheet-col' style='width: 100px'> <input type='number' name='attr_gear-cost' style='width: 100px'/> </div> </div> </fieldset> </div> <br /> <h1>Cyberware, psionic powers and magic devices</h1> <textarea name='attr_cyberware-psi-magic'></textarea> <h1>Vehicle</h1> <div class='sheet-section'> <fieldset class='repeating_vehicle'> <div style='border-bottom-width: 2px; border-bottom-style: solid;'> <div style='display: inline-block'> <label style='text-align: center'>Name</label><input type='text' name='attr_vehicle-name' style='width: 30em'/> </div> <div style='display: inline-block'> <label style='text-align: center'>Size</label><input type='text' name='attr_vehicle-size' style='width: 6em' /> </div> <div style='display: inline-block'> <label style='text-align: center'>Speed</label><input type='text' name='attr_vehicle-speed' style='width: 6em' /> </div> <div style='display: inline-block'> <label style='text-align: center'>Handling</label><input type='text' name='attr_vehicle-handling' style='width: 6em' /> </div> <div style='display: inline-block'> <label style='text-align: center'>Weapons</label><input type='text' name='attr_vehicle-weapons' style='width: 6em' /> </div> <div style='display: inline-block'> <label style='text-align: center'>Durability</label><input type='text' name='attr_vehicle-durability' style='width: 6em' /> </div> <div style='display: inline-block'> <label style='text-align: center'>Gifts</label><input type='text' name='attr_vehicle-gifts' style='width: 25em' /> </div> <div style='display: inline-block'> <label style='text-align: center'>Faults</label><input type='text' name='attr_vehicle-faults' style='width: 25em' /> </div> <div style='display: inline-block'> <label style='text-align: center'>Cost</label><input type='text' name='attr_vehicle-cost' style='width: 6em' /> </div> <div> <br /> </fieldset> </div> <br /> <div class='sheet-2colrow'> <div class='sheet-col' style='width:74%'> <h1>Background & Notes</h1> <textarea name='attr_background-notes'></textarea> </div> <div class='sheet-col' style='width: 20%'> <h1 style='font-size: 150%'>Trait Ladder</h1> <div class='sheet-section'> <table> <tr> <td>Astonishing</td> <td>+7</td> </tr> <tr> <td>Extraordinary</td> <td>+6</td> </tr> <tr> <td>Phenomenal</td> <td>+5</td> </tr> <tr> <td>Wonderful</td> <td>+4</td> </tr> <tr> <td>Superb</td> <td>+3</td> </tr> <tr> <td>Great</td> <td>+2</td> </tr> <tr> <td>Good</td> <td>+1</td> </tr> <tr> <td>Fair</td> <td>0</td> </tr> <tr> <td>Mediocre</td> <td>-1</td> </tr> <tr> <td>Poor</td> <td>-2</td> </tr> <tr> <td>Abysmal</td> <td>-3</td> </tr> </table> </div> </div> </div>
{ "pile_set_name": "Github" }
<?php /* * * ____ _ _ __ __ _ __ __ ____ * | _ \ ___ ___| | _____| |_| \/ (_)_ __ ___ | \/ | _ \ * | |_) / _ \ / __| |/ / _ \ __| |\/| | | '_ \ / _ \_____| |\/| | |_) | * | __/ (_) | (__| < __/ |_| | | | | | | | __/_____| | | | __/ * |_| \___/ \___|_|\_\___|\__|_| |_|_|_| |_|\___| |_| |_|_| * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * @author PocketMine Team * @link http://www.pocketmine.net/ * * */ declare(strict_types=1); namespace pocketmine\network\mcpe\protocol; #include <rules/DataPacket.h> use pocketmine\network\mcpe\NetworkSession; use pocketmine\network\mcpe\protocol\types\inventory\InventoryTransactionChangedSlotsHack; use pocketmine\network\mcpe\protocol\types\NetworkInventoryAction; use function count; class InventoryTransactionPacket extends DataPacket{ public const NETWORK_ID = ProtocolInfo::INVENTORY_TRANSACTION_PACKET; public const TYPE_NORMAL = 0; public const TYPE_MISMATCH = 1; public const TYPE_USE_ITEM = 2; public const TYPE_USE_ITEM_ON_ENTITY = 3; public const TYPE_RELEASE_ITEM = 4; public const USE_ITEM_ACTION_CLICK_BLOCK = 0; public const USE_ITEM_ACTION_CLICK_AIR = 1; public const USE_ITEM_ACTION_BREAK_BLOCK = 2; public const RELEASE_ITEM_ACTION_RELEASE = 0; //bow shoot public const RELEASE_ITEM_ACTION_CONSUME = 1; //eat food, drink potion public const USE_ITEM_ON_ENTITY_ACTION_INTERACT = 0; public const USE_ITEM_ON_ENTITY_ACTION_ATTACK = 1; /** @var int */ public $requestId; /** @var InventoryTransactionChangedSlotsHack[] */ public $requestChangedSlots; /** @var int */ public $transactionType; /** @var bool */ public $hasItemStackIds; /** @var NetworkInventoryAction[] */ public $actions = []; /** @var \stdClass */ public $trData; protected function decodePayload(){ $this->requestId = $this->readGenericTypeNetworkId(); $this->requestChangedSlots = []; if($this->requestId !== 0){ for($i = 0, $len = $this->getUnsignedVarInt(); $i < $len; ++$i){ $this->requestChangedSlots[] = InventoryTransactionChangedSlotsHack::read($this); } } $this->transactionType = $this->getUnsignedVarInt(); $this->hasItemStackIds = $this->getBool(); for($i = 0, $count = $this->getUnsignedVarInt(); $i < $count; ++$i){ $this->actions[] = $action = (new NetworkInventoryAction())->read($this, $this->hasItemStackIds); } $this->trData = new \stdClass(); switch($this->transactionType){ case self::TYPE_NORMAL: case self::TYPE_MISMATCH: //Regular ComplexInventoryTransaction doesn't read any extra data break; case self::TYPE_USE_ITEM: $this->trData->actionType = $this->getUnsignedVarInt(); $this->getBlockPosition($this->trData->x, $this->trData->y, $this->trData->z); $this->trData->face = $this->getVarInt(); $this->trData->hotbarSlot = $this->getVarInt(); $this->trData->itemInHand = $this->getSlot(); $this->trData->playerPos = $this->getVector3(); $this->trData->clickPos = $this->getVector3(); $this->trData->blockRuntimeId = $this->getUnsignedVarInt(); break; case self::TYPE_USE_ITEM_ON_ENTITY: $this->trData->entityRuntimeId = $this->getEntityRuntimeId(); $this->trData->actionType = $this->getUnsignedVarInt(); $this->trData->hotbarSlot = $this->getVarInt(); $this->trData->itemInHand = $this->getSlot(); $this->trData->playerPos = $this->getVector3(); $this->trData->clickPos = $this->getVector3(); break; case self::TYPE_RELEASE_ITEM: $this->trData->actionType = $this->getUnsignedVarInt(); $this->trData->hotbarSlot = $this->getVarInt(); $this->trData->itemInHand = $this->getSlot(); $this->trData->headPos = $this->getVector3(); break; default: throw new \UnexpectedValueException("Unknown transaction type $this->transactionType"); } } protected function encodePayload(){ $this->writeGenericTypeNetworkId($this->requestId); if($this->requestId !== 0){ $this->putUnsignedVarInt(count($this->requestChangedSlots)); foreach($this->requestChangedSlots as $changedSlots){ $changedSlots->write($this); } } $this->putUnsignedVarInt($this->transactionType); $this->putBool($this->hasItemStackIds); $this->putUnsignedVarInt(count($this->actions)); foreach($this->actions as $action){ $action->write($this, $this->hasItemStackIds); } switch($this->transactionType){ case self::TYPE_NORMAL: case self::TYPE_MISMATCH: break; case self::TYPE_USE_ITEM: $this->putUnsignedVarInt($this->trData->actionType); $this->putBlockPosition($this->trData->x, $this->trData->y, $this->trData->z); $this->putVarInt($this->trData->face); $this->putVarInt($this->trData->hotbarSlot); $this->putSlot($this->trData->itemInHand); $this->putVector3($this->trData->playerPos); $this->putVector3($this->trData->clickPos); $this->putUnsignedVarInt($this->trData->blockRuntimeId); break; case self::TYPE_USE_ITEM_ON_ENTITY: $this->putEntityRuntimeId($this->trData->entityRuntimeId); $this->putUnsignedVarInt($this->trData->actionType); $this->putVarInt($this->trData->hotbarSlot); $this->putSlot($this->trData->itemInHand); $this->putVector3($this->trData->playerPos); $this->putVector3($this->trData->clickPos); break; case self::TYPE_RELEASE_ITEM: $this->putUnsignedVarInt($this->trData->actionType); $this->putVarInt($this->trData->hotbarSlot); $this->putSlot($this->trData->itemInHand); $this->putVector3($this->trData->headPos); break; default: throw new \InvalidArgumentException("Unknown transaction type $this->transactionType"); } } public function handle(NetworkSession $session) : bool{ return $session->handleInventoryTransaction($this); } }
{ "pile_set_name": "Github" }
# Copyright (C) 2014 WebDevStudios # This file is distributed under the same license as the CMB2 (beta) package. # Translators: # Translators: msgid "" msgstr "" "Project-Id-Version: CMB2\n" "Report-Msgid-Bugs-To: http://wordpress.org/support/plugin/cmb2\n" "POT-Creation-Date: 2014-12-23 14:37:49+00:00\n" "PO-Revision-Date: 2014-12-23 21:54+0000\n" "Last-Translator: FxB <[email protected]>\n" "Language-Team: Indonesian (http://www.transifex.com/projects/p/cmb2/language/id/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Language: id\n" "Plural-Forms: nplurals=1; plural=0;\n" "X-Generator: grunt-wp-i18n 0.4.9\n" "X-Poedit-Basepath: ../\n" "X-Poedit-Bookmarks: \n" "X-Poedit-Country: United States\n" "X-Poedit-KeywordsList: __;_e;_x:1,2c;_ex:1,2c;_n:1,2;_nx:1,2,4c;_n_noop:1,2;_nx_noop:1,2,3c;esc_attr__;esc_html__;esc_attr_e;esc_html_e;esc_attr_x:1,2c;esc_html_x:1,2c;\n" "X-Poedit-SearchPath-0: .\n" "X-Poedit-SourceCharset: UTF-8\n" "X-Textdomain-Support: yes\n" #: CMB2.php:100 msgid "Metabox configuration is required to have an ID parameter" msgstr "" #: CMB2.php:287 msgid "Click to toggle" msgstr "" #: example-functions.php:53 msgid "Test Metabox" msgstr "" #: example-functions.php:61 example-functions.php:295 msgid "Test Text" msgstr "" #: example-functions.php:62 example-functions.php:73 example-functions.php:80 #: example-functions.php:87 example-functions.php:95 example-functions.php:102 #: example-functions.php:114 example-functions.php:120 #: example-functions.php:127 example-functions.php:142 #: example-functions.php:150 example-functions.php:157 #: example-functions.php:163 example-functions.php:169 #: example-functions.php:181 example-functions.php:192 #: example-functions.php:203 example-functions.php:214 #: example-functions.php:222 example-functions.php:229 #: example-functions.php:237 example-functions.php:243 #: example-functions.php:255 example-functions.php:296 #: example-functions.php:362 example-functions.php:369 #: example-functions.php:375 example-functions.php:381 #: example-functions.php:387 example-functions.php:393 #: example-functions.php:399 example-functions.php:417 msgid "field description (optional)" msgstr "" #: example-functions.php:72 msgid "Test Text Small" msgstr "" #: example-functions.php:79 msgid "Test Text Medium" msgstr "" #: example-functions.php:86 msgid "Website URL" msgstr "" #: example-functions.php:94 msgid "Test Text Email" msgstr "" #: example-functions.php:101 msgid "Test Time" msgstr "" #: example-functions.php:107 example-functions.php:108 msgid "Time zone" msgstr "" #: example-functions.php:113 msgid "Test Date Picker" msgstr "" #: example-functions.php:119 msgid "Test Date Picker (UNIX timestamp)" msgstr "" #: example-functions.php:126 msgid "Test Date/Time Picker Combo (UNIX timestamp)" msgstr "" #: example-functions.php:141 msgid "Test Money" msgstr "" #: example-functions.php:149 msgid "Test Color Picker" msgstr "" #: example-functions.php:156 msgid "Test Text Area" msgstr "" #: example-functions.php:162 msgid "Test Text Area Small" msgstr "" #: example-functions.php:168 msgid "Test Text Area for Code" msgstr "" #: example-functions.php:174 msgid "Test Title Weeeee" msgstr "" #: example-functions.php:175 msgid "This is a title description" msgstr "" #: example-functions.php:180 msgid "Test Select" msgstr "" #: example-functions.php:185 example-functions.php:196 #: example-functions.php:207 msgid "Option One" msgstr "" #: example-functions.php:186 example-functions.php:197 #: example-functions.php:208 msgid "Option Two" msgstr "" #: example-functions.php:187 example-functions.php:198 #: example-functions.php:209 msgid "Option Three" msgstr "" #: example-functions.php:191 msgid "Test Radio inline" msgstr "" #: example-functions.php:202 msgid "Test Radio" msgstr "" #: example-functions.php:213 msgid "Test Taxonomy Radio" msgstr "" #: example-functions.php:221 msgid "Test Taxonomy Select" msgstr "" #: example-functions.php:228 msgid "Test Taxonomy Multi Checkbox" msgstr "" #: example-functions.php:236 msgid "Test Checkbox" msgstr "" #: example-functions.php:242 msgid "Test Multi Checkbox" msgstr "" #: example-functions.php:247 msgid "Check One" msgstr "" #: example-functions.php:248 msgid "Check Two" msgstr "" #: example-functions.php:249 msgid "Check Three" msgstr "" #: example-functions.php:254 msgid "Test wysiwyg" msgstr "" #: example-functions.php:261 msgid "Test Image" msgstr "" #: example-functions.php:262 msgid "Upload an image or enter a URL." msgstr "" #: example-functions.php:267 msgid "Multiple Files" msgstr "" #: example-functions.php:268 msgid "Upload or add multiple images/attachments." msgstr "" #: example-functions.php:274 msgid "oEmbed" msgstr "" #: example-functions.php:275 msgid "" "Enter a youtube, twitter, or instagram URL. Supports services listed at <a " "href=\"http://codex.wordpress.org/Embeds\">http://codex.wordpress.org/Embeds</a>." msgstr "" #: example-functions.php:287 msgid "About Page Metabox" msgstr "" #: example-functions.php:308 msgid "Repeating Field Group" msgstr "" #: example-functions.php:314 msgid "Generates reusable form entries" msgstr "" #: example-functions.php:316 msgid "Entry {#}" msgstr "" #: example-functions.php:317 msgid "Add Another Entry" msgstr "" #: example-functions.php:318 msgid "Remove Entry" msgstr "" #: example-functions.php:355 msgid "User Profile Metabox" msgstr "" #: example-functions.php:361 msgid "Extra Info" msgstr "" #: example-functions.php:368 msgid "Avatar" msgstr "" #: example-functions.php:374 msgid "Facebook URL" msgstr "" #: example-functions.php:380 msgid "Twitter URL" msgstr "" #: example-functions.php:386 msgid "Google+ URL" msgstr "" #: example-functions.php:392 msgid "Linkedin URL" msgstr "" #: example-functions.php:398 msgid "User Field" msgstr "" #: example-functions.php:412 msgid "Theme Options Metabox" msgstr "" #: example-functions.php:416 msgid "Site Background Color" msgstr "" #: includes/CMB2_Ajax.php:37 msgid "Please Try Again" msgstr "" #: includes/CMB2_Ajax.php:133 tests/test-cmb-types.php:734 msgid "Remove Embed" msgstr "" #: includes/CMB2_Ajax.php:137 msgid "No oEmbed Results Found for %s. View more info at" msgstr "" #: includes/CMB2_Field.php:765 msgid "Add Group" msgstr "" #: includes/CMB2_Field.php:766 msgid "Remove Group" msgstr "" #: includes/CMB2_Field.php:787 msgid "None" msgstr "" #: includes/CMB2_Field.php:788 msgid "All" msgstr "" #: includes/CMB2_Types.php:290 msgid "Add Row" msgstr "" #: includes/CMB2_Types.php:349 includes/CMB2_Types.php:805 #: includes/CMB2_Types.php:882 includes/CMB2_hookup.php:173 #: tests/test-cmb-types.php:147 tests/test-cmb-types.php:155 #: tests/test-cmb-types.php:683 tests/test-cmb-types.php:708 msgid "Remove" msgstr "" #: includes/CMB2_Types.php:683 includes/CMB2_Types.php:731 msgid "No terms" msgstr "" #: includes/CMB2_Types.php:775 includes/CMB2_Types.php:830 #: tests/test-cmb-types.php:659 tests/test-cmb-types.php:685 #: tests/test-cmb-types.php:695 tests/test-cmb-types.php:710 msgid "Add or Upload File" msgstr "" #: includes/CMB2_Types.php:796 includes/CMB2_Types.php:877 #: includes/CMB2_hookup.php:172 msgid "Remove Image" msgstr "" #: includes/CMB2_Types.php:805 includes/CMB2_Types.php:882 #: includes/CMB2_hookup.php:174 tests/test-cmb-types.php:684 #: tests/test-cmb-types.php:709 msgid "File:" msgstr "" #: includes/CMB2_Types.php:805 includes/CMB2_Types.php:882 #: includes/CMB2_hookup.php:175 tests/test-cmb-types.php:682 #: tests/test-cmb-types.php:707 msgid "Download" msgstr "" #: includes/CMB2_hookup.php:123 includes/CMB2_hookup.php:160 msgid "Clear" msgstr "" #: includes/CMB2_hookup.php:124 msgid "Default" msgstr "" #: includes/CMB2_hookup.php:125 msgid "Select Color" msgstr "" #: includes/CMB2_hookup.php:126 msgid "Current Color" msgstr "" #: includes/CMB2_hookup.php:150 msgid "mm/dd/yy" msgstr "" #: includes/CMB2_hookup.php:151 msgid "Sunday, Monday, Tuesday, Wednesday, Thursday, Friday, Saturday" msgstr "" #: includes/CMB2_hookup.php:152 msgid "Su, Mo, Tu, We, Th, Fr, Sa" msgstr "" #: includes/CMB2_hookup.php:153 msgid "Sun, Mon, Tue, Wed, Thu, Fri, Sat" msgstr "" #: includes/CMB2_hookup.php:154 msgid "" "January, February, March, April, May, June, July, August, September, " "October, November, December" msgstr "" #: includes/CMB2_hookup.php:155 msgid "Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov, Dec" msgstr "" #: includes/CMB2_hookup.php:156 msgid "Next" msgstr "Berikutnya" #: includes/CMB2_hookup.php:157 msgid "Prev" msgstr "" #: includes/CMB2_hookup.php:158 msgid "Today" msgstr "" #: includes/CMB2_hookup.php:159 msgid "Done" msgstr "" #: includes/CMB2_hookup.php:171 msgid "Use this file" msgstr "" #: includes/CMB2_hookup.php:176 msgid "Select / Deselect All" msgstr "" #: includes/helper-functions.php:243 msgid "Save" msgstr "" #. Plugin Name of the plugin/theme msgid "CMB2 (beta)" msgstr "" #. Plugin URI of the plugin/theme msgid "https://github.com/WebDevStudios/CMB2" msgstr "" #. Description of the plugin/theme msgid "" "CMB2 will create metaboxes and forms with custom fields that will blow your " "mind." msgstr "" #. Author of the plugin/theme msgid "WebDevStudios" msgstr "" #. Author URI of the plugin/theme msgid "http://webdevstudios.com" msgstr ""
{ "pile_set_name": "Github" }
{ "name": "PACE", "main": "pace.js", "version": "1.0.2", "homepage": "http://github.hubspot.com/pace/docs/welcome", "authors": [ "Zack Bloom <[email protected]>", "Adam Schwartz <[email protected]>" ], "description": "Automatic page load progress bar", "keywords": [ "loading", "load", "pageload", "progress", "activity", "ajax", "spinner", "progress", "bar", "automatic", "client-side" ], "license": "MIT", "ignore": [ ".*", "Gruntfile.coffee", "bower_components", "docs", "node_modules", "package.json", "templates", "tests" ] }
{ "pile_set_name": "Github" }
function on_activate(parent, ability) local max_dist = parent:stats().attack_distance local targets = parent:targets():without_self() local targeter = parent:create_targeter(ability) targeter:set_selection_attackable() targeter:set_free_select(max_dist) targeter:set_shape_cone(parent:center_x(), parent:center_y(), 1.0, max_dist, math.pi / 8) targeter:add_all_effectable(targets) targeter:activate() end function on_target_select(parent, ability, targets) local targets = targets:to_table() for i = 1, #targets do parent:anim_weapon_attack(targets[i]) end ability:activate(parent) end
{ "pile_set_name": "Github" }
/* * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.tools.idea.uibuilder.actions import com.intellij.openapi.editor.textarea.TextComponentEditorImpl import com.intellij.openapi.project.Project import javax.swing.text.JTextComponent /** * TODO Write documention */ class MorphEditor(project: Project?, textComponent: JTextComponent) : TextComponentEditorImpl(project, textComponent) { }
{ "pile_set_name": "Github" }
using System; using NUnit.Framework; using BLToolkit.DataAccess; using BLToolkit.Mapping; namespace HowTo.DataAccess { [TestFixture] public class MultiplePrimaryKey { [TableName("Person")] public class Person { [MapField("PersonID"), NonUpdatable] public int ID; // These fields are not real primary key of the table. // They are made primary key for demonstration purpose only. // [/*[a]*/PrimaryKey(1)/*[/a]*/] public string FirstName; [/*[a]*/PrimaryKey(2)/*[/a]*/] public string LastName; public string MiddleName; } [Test] public void Test() { SqlQuery<Person> query = new SqlQuery<Person>(); Person person = query./*[a]*/SelectByKey("Tester", "Testerson")/*[/a]*/; Assert.IsNotNull(person); } } }
{ "pile_set_name": "Github" }
foo
{ "pile_set_name": "Github" }
/* * Copyright 2010-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.dynamodbv2.model.transform; import com.amazonaws.services.dynamodbv2.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.amazonaws.util.json.AwsJsonReader; /** * JSON unmarshaller for POJO GlobalSecondaryIndexUpdate */ class GlobalSecondaryIndexUpdateJsonUnmarshaller implements Unmarshaller<GlobalSecondaryIndexUpdate, JsonUnmarshallerContext> { public GlobalSecondaryIndexUpdate unmarshall(JsonUnmarshallerContext context) throws Exception { AwsJsonReader reader = context.getReader(); if (!reader.isContainer()) { reader.skipValue(); return null; } GlobalSecondaryIndexUpdate globalSecondaryIndexUpdate = new GlobalSecondaryIndexUpdate(); reader.beginObject(); while (reader.hasNext()) { String name = reader.nextName(); if (name.equals("Update")) { globalSecondaryIndexUpdate .setUpdate(UpdateGlobalSecondaryIndexActionJsonUnmarshaller.getInstance() .unmarshall(context)); } else if (name.equals("Create")) { globalSecondaryIndexUpdate .setCreate(CreateGlobalSecondaryIndexActionJsonUnmarshaller.getInstance() .unmarshall(context)); } else if (name.equals("Delete")) { globalSecondaryIndexUpdate .setDelete(DeleteGlobalSecondaryIndexActionJsonUnmarshaller.getInstance() .unmarshall(context)); } else { reader.skipValue(); } } reader.endObject(); return globalSecondaryIndexUpdate; } private static GlobalSecondaryIndexUpdateJsonUnmarshaller instance; public static GlobalSecondaryIndexUpdateJsonUnmarshaller getInstance() { if (instance == null) instance = new GlobalSecondaryIndexUpdateJsonUnmarshaller(); return instance; } }
{ "pile_set_name": "Github" }
// Copyright 2020 yuzu Emulator Project // Licensed under GPLv2 or any later version // Refer to the license.txt file included. #include "common/assert.h" #include "common/logging/log.h" #include "core/core.h" #include "core/frontend/applets/controller.h" #include "core/hle/service/hid/controllers/npad.h" #include "core/hle/service/hid/hid.h" #include "core/hle/service/sm/sm.h" namespace Core::Frontend { ControllerApplet::~ControllerApplet() = default; DefaultControllerApplet::~DefaultControllerApplet() = default; void DefaultControllerApplet::ReconfigureControllers(std::function<void()> callback, ControllerParameters parameters) const { LOG_INFO(Service_HID, "called, deducing the best configuration based on the given parameters!"); auto& npad = Core::System::GetInstance() .ServiceManager() .GetService<Service::HID::Hid>("hid") ->GetAppletResource() ->GetController<Service::HID::Controller_NPad>(Service::HID::HidController::NPad); auto& players = Settings::values.players; const std::size_t min_supported_players = parameters.enable_single_mode ? 1 : parameters.min_players; // Disconnect Handheld first. npad.DisconnectNPadAtIndex(8); // Deduce the best configuration based on the input parameters. for (std::size_t index = 0; index < players.size() - 2; ++index) { // First, disconnect all controllers regardless of the value of keep_controllers_connected. // This makes it easy to connect the desired controllers. npad.DisconnectNPadAtIndex(index); // Only connect the minimum number of required players. if (index >= min_supported_players) { continue; } // Connect controllers based on the following priority list from highest to lowest priority: // Pro Controller -> Dual Joycons -> Left Joycon/Right Joycon -> Handheld if (parameters.allow_pro_controller) { npad.AddNewControllerAt( npad.MapSettingsTypeToNPad(Settings::ControllerType::ProController), index); } else if (parameters.allow_dual_joycons) { npad.AddNewControllerAt( npad.MapSettingsTypeToNPad(Settings::ControllerType::DualJoyconDetached), index); } else if (parameters.allow_left_joycon && parameters.allow_right_joycon) { // Assign left joycons to even player indices and right joycons to odd player indices. // We do this since Captain Toad Treasure Tracker expects a left joycon for Player 1 and // a right Joycon for Player 2 in 2 Player Assist mode. if (index % 2 == 0) { npad.AddNewControllerAt( npad.MapSettingsTypeToNPad(Settings::ControllerType::LeftJoycon), index); } else { npad.AddNewControllerAt( npad.MapSettingsTypeToNPad(Settings::ControllerType::RightJoycon), index); } } else if (index == 0 && parameters.enable_single_mode && parameters.allow_handheld && !Settings::values.use_docked_mode) { // We should *never* reach here under any normal circumstances. npad.AddNewControllerAt(npad.MapSettingsTypeToNPad(Settings::ControllerType::Handheld), index); } else { UNREACHABLE_MSG("Unable to add a new controller based on the given parameters!"); } } callback(); } } // namespace Core::Frontend
{ "pile_set_name": "Github" }
"""Create portable serialized representations of Python objects. See module cPickle for a (much) faster implementation. See module copy_reg for a mechanism for registering custom picklers. See module pickletools source for extensive comments. Classes: Pickler Unpickler Functions: dump(object, file) dumps(object) -> string load(file) -> object loads(string) -> object Misc variables: __version__ format_version compatible_formats """ __version__ = "$Revision: 72223 $" # Code version from types import * from copy_reg import dispatch_table from copy_reg import _extension_registry, _inverted_registry, _extension_cache import marshal import sys import struct import re __all__ = ["PickleError", "PicklingError", "UnpicklingError", "Pickler", "Unpickler", "dump", "dumps", "load", "loads"] # These are purely informational; no code uses these. format_version = "2.0" # File format version we write compatible_formats = ["1.0", # Original protocol 0 "1.1", # Protocol 0 with INST added "1.2", # Original protocol 1 "1.3", # Protocol 1 with BINFLOAT added "2.0", # Protocol 2 ] # Old format versions we can read # Keep in synch with cPickle. This is the highest protocol number we # know how to read. HIGHEST_PROTOCOL = 2 # Why use struct.pack() for pickling but marshal.loads() for # unpickling? struct.pack() is 40% faster than marshal.dumps(), but # marshal.loads() is twice as fast as struct.unpack()! mloads = marshal.loads class PickleError(Exception): """A common base class for the other pickling exceptions.""" pass class PicklingError(PickleError): """This exception is raised when an unpicklable object is passed to the dump() method. """ pass class UnpicklingError(PickleError): """This exception is raised when there is a problem unpickling an object, such as a security violation. Note that other exceptions may also be raised during unpickling, including (but not necessarily limited to) AttributeError, EOFError, ImportError, and IndexError. """ pass # An instance of _Stop is raised by Unpickler.load_stop() in response to # the STOP opcode, passing the object that is the result of unpickling. class _Stop(Exception): def __init__(self, value): self.value = value # Jython has PyStringMap; it's a dict subclass with string keys try: from org.python.core import PyStringMap except ImportError: PyStringMap = None # UnicodeType may or may not be exported (normally imported from types) try: UnicodeType except NameError: UnicodeType = None # Pickle opcodes. See pickletools.py for extensive docs. The listing # here is in kind-of alphabetical order of 1-character pickle code. # pickletools groups them by purpose. MARK = '(' # push special markobject on stack STOP = '.' # every pickle ends with STOP POP = '0' # discard topmost stack item POP_MARK = '1' # discard stack top through topmost markobject DUP = '2' # duplicate top stack item FLOAT = 'F' # push float object; decimal string argument INT = 'I' # push integer or bool; decimal string argument BININT = 'J' # push four-byte signed int BININT1 = 'K' # push 1-byte unsigned int LONG = 'L' # push long; decimal string argument BININT2 = 'M' # push 2-byte unsigned int NONE = 'N' # push None PERSID = 'P' # push persistent object; id is taken from string arg BINPERSID = 'Q' # " " " ; " " " " stack REDUCE = 'R' # apply callable to argtuple, both on stack STRING = 'S' # push string; NL-terminated string argument BINSTRING = 'T' # push string; counted binary string argument SHORT_BINSTRING = 'U' # " " ; " " " " < 256 bytes UNICODE = 'V' # push Unicode string; raw-unicode-escaped'd argument BINUNICODE = 'X' # " " " ; counted UTF-8 string argument APPEND = 'a' # append stack top to list below it BUILD = 'b' # call __setstate__ or __dict__.update() GLOBAL = 'c' # push self.find_class(modname, name); 2 string args DICT = 'd' # build a dict from stack items EMPTY_DICT = '}' # push empty dict APPENDS = 'e' # extend list on stack by topmost stack slice GET = 'g' # push item from memo on stack; index is string arg BINGET = 'h' # " " " " " " ; " " 1-byte arg INST = 'i' # build & push class instance LONG_BINGET = 'j' # push item from memo on stack; index is 4-byte arg LIST = 'l' # build list from topmost stack items EMPTY_LIST = ']' # push empty list OBJ = 'o' # build & push class instance PUT = 'p' # store stack top in memo; index is string arg BINPUT = 'q' # " " " " " ; " " 1-byte arg LONG_BINPUT = 'r' # " " " " " ; " " 4-byte arg SETITEM = 's' # add key+value pair to dict TUPLE = 't' # build tuple from topmost stack items EMPTY_TUPLE = ')' # push empty tuple SETITEMS = 'u' # modify dict by adding topmost key+value pairs BINFLOAT = 'G' # push float; arg is 8-byte float encoding TRUE = 'I01\n' # not an opcode; see INT docs in pickletools.py FALSE = 'I00\n' # not an opcode; see INT docs in pickletools.py # Protocol 2 PROTO = '\x80' # identify pickle protocol NEWOBJ = '\x81' # build object by applying cls.__new__ to argtuple EXT1 = '\x82' # push object from extension registry; 1-byte index EXT2 = '\x83' # ditto, but 2-byte index EXT4 = '\x84' # ditto, but 4-byte index TUPLE1 = '\x85' # build 1-tuple from stack top TUPLE2 = '\x86' # build 2-tuple from two topmost stack items TUPLE3 = '\x87' # build 3-tuple from three topmost stack items NEWTRUE = '\x88' # push True NEWFALSE = '\x89' # push False LONG1 = '\x8a' # push long from < 256 bytes LONG4 = '\x8b' # push really big long _tuplesize2code = [EMPTY_TUPLE, TUPLE1, TUPLE2, TUPLE3] __all__.extend([x for x in dir() if re.match("[A-Z][A-Z0-9_]+$",x)]) del x # Pickling machinery class Pickler: def __init__(self, file, protocol=None): """This takes a file-like object for writing a pickle data stream. The optional protocol argument tells the pickler to use the given protocol; supported protocols are 0, 1, 2. The default protocol is 0, to be backwards compatible. (Protocol 0 is the only protocol that can be written to a file opened in text mode and read back successfully. When using a protocol higher than 0, make sure the file is opened in binary mode, both when pickling and unpickling.) Protocol 1 is more efficient than protocol 0; protocol 2 is more efficient than protocol 1. Specifying a negative protocol version selects the highest protocol version supported. The higher the protocol used, the more recent the version of Python needed to read the pickle produced. The file parameter must have a write() method that accepts a single string argument. It can thus be an open file object, a StringIO object, or any other custom object that meets this interface. """ if protocol is None: protocol = 0 if protocol < 0: protocol = HIGHEST_PROTOCOL elif not 0 <= protocol <= HIGHEST_PROTOCOL: raise ValueError("pickle protocol must be <= %d" % HIGHEST_PROTOCOL) self.write = file.write self.memo = {} self.proto = int(protocol) self.bin = protocol >= 1 self.fast = 0 def clear_memo(self): """Clears the pickler's "memo". The memo is the data structure that remembers which objects the pickler has already seen, so that shared or recursive objects are pickled by reference and not by value. This method is useful when re-using picklers. """ self.memo.clear() def dump(self, obj): """Write a pickled representation of obj to the open file.""" if self.proto >= 2: self.write(PROTO + chr(self.proto)) self.save(obj) self.write(STOP) def memoize(self, obj): """Store an object in the memo.""" # The Pickler memo is a dictionary mapping object ids to 2-tuples # that contain the Unpickler memo key and the object being memoized. # The memo key is written to the pickle and will become # the key in the Unpickler's memo. The object is stored in the # Pickler memo so that transient objects are kept alive during # pickling. # The use of the Unpickler memo length as the memo key is just a # convention. The only requirement is that the memo values be unique. # But there appears no advantage to any other scheme, and this # scheme allows the Unpickler memo to be implemented as a plain (but # growable) array, indexed by memo key. if self.fast: return assert id(obj) not in self.memo memo_len = len(self.memo) self.write(self.put(memo_len)) self.memo[id(obj)] = memo_len, obj # Return a PUT (BINPUT, LONG_BINPUT) opcode string, with argument i. def put(self, i, pack=struct.pack): if self.bin: if i < 256: return BINPUT + chr(i) else: return LONG_BINPUT + pack("<i", i) return PUT + repr(i) + '\n' # Return a GET (BINGET, LONG_BINGET) opcode string, with argument i. def get(self, i, pack=struct.pack): if self.bin: if i < 256: return BINGET + chr(i) else: return LONG_BINGET + pack("<i", i) return GET + repr(i) + '\n' def save(self, obj): # Check for persistent id (defined by a subclass) pid = self.persistent_id(obj) if pid is not None: self.save_pers(pid) return # Check the memo x = self.memo.get(id(obj)) if x: self.write(self.get(x[0])) return # Check the type dispatch table t = type(obj) f = self.dispatch.get(t) if f: f(self, obj) # Call unbound method with explicit self return # Check copy_reg.dispatch_table reduce = dispatch_table.get(t) if reduce: rv = reduce(obj) else: # Check for a class with a custom metaclass; treat as regular class try: issc = issubclass(t, TypeType) except TypeError: # t is not a class (old Boost; see SF #502085) issc = 0 if issc: self.save_global(obj) return # Check for a __reduce_ex__ method, fall back to __reduce__ reduce = getattr(obj, "__reduce_ex__", None) if reduce: rv = reduce(self.proto) else: reduce = getattr(obj, "__reduce__", None) if reduce: rv = reduce() else: raise PicklingError("Can't pickle %r object: %r" % (t.__name__, obj)) # Check for string returned by reduce(), meaning "save as global" if type(rv) is StringType: self.save_global(obj, rv) return # Assert that reduce() returned a tuple if type(rv) is not TupleType: raise PicklingError("%s must return string or tuple" % reduce) # Assert that it returned an appropriately sized tuple l = len(rv) if not (2 <= l <= 5): raise PicklingError("Tuple returned by %s must have " "two to five elements" % reduce) # Save the reduce() output and finally memoize the object self.save_reduce(obj=obj, *rv) def persistent_id(self, obj): # This exists so a subclass can override it return None def save_pers(self, pid): # Save a persistent id reference if self.bin: self.save(pid) self.write(BINPERSID) else: self.write(PERSID + str(pid) + '\n') def save_reduce(self, func, args, state=None, listitems=None, dictitems=None, obj=None): # This API is called by some subclasses # Assert that args is a tuple or None if not isinstance(args, TupleType): raise PicklingError("args from reduce() should be a tuple") # Assert that func is callable if not hasattr(func, '__call__'): raise PicklingError("func from reduce should be callable") save = self.save write = self.write # Protocol 2 special case: if func's name is __newobj__, use NEWOBJ if self.proto >= 2 and getattr(func, "__name__", "") == "__newobj__": # A __reduce__ implementation can direct protocol 2 to # use the more efficient NEWOBJ opcode, while still # allowing protocol 0 and 1 to work normally. For this to # work, the function returned by __reduce__ should be # called __newobj__, and its first argument should be a # new-style class. The implementation for __newobj__ # should be as follows, although pickle has no way to # verify this: # # def __newobj__(cls, *args): # return cls.__new__(cls, *args) # # Protocols 0 and 1 will pickle a reference to __newobj__, # while protocol 2 (and above) will pickle a reference to # cls, the remaining args tuple, and the NEWOBJ code, # which calls cls.__new__(cls, *args) at unpickling time # (see load_newobj below). If __reduce__ returns a # three-tuple, the state from the third tuple item will be # pickled regardless of the protocol, calling __setstate__ # at unpickling time (see load_build below). # # Note that no standard __newobj__ implementation exists; # you have to provide your own. This is to enforce # compatibility with Python 2.2 (pickles written using # protocol 0 or 1 in Python 2.3 should be unpicklable by # Python 2.2). cls = args[0] if not hasattr(cls, "__new__"): raise PicklingError( "args[0] from __newobj__ args has no __new__") if obj is not None and cls is not obj.__class__: raise PicklingError( "args[0] from __newobj__ args has the wrong class") args = args[1:] save(cls) save(args) write(NEWOBJ) else: save(func) save(args) write(REDUCE) if obj is not None: # If the object is already in the memo, this means it is # recursive. In this case, throw away everything we put on the # stack, and fetch the object back from the memo. if id(obj) in self.memo: write(POP + self.get(self.memo[id(obj)][0])) else: self.memoize(obj) # More new special cases (that work with older protocols as # well): when __reduce__ returns a tuple with 4 or 5 items, # the 4th and 5th item should be iterators that provide list # items and dict items (as (key, value) tuples), or None. if listitems is not None: self._batch_appends(listitems) if dictitems is not None: self._batch_setitems(dictitems) if state is not None: save(state) write(BUILD) # Methods below this point are dispatched through the dispatch table dispatch = {} def save_none(self, obj): self.write(NONE) dispatch[NoneType] = save_none def save_bool(self, obj): if self.proto >= 2: self.write(obj and NEWTRUE or NEWFALSE) else: self.write(obj and TRUE or FALSE) dispatch[bool] = save_bool def save_int(self, obj, pack=struct.pack): if self.bin: # If the int is small enough to fit in a signed 4-byte 2's-comp # format, we can store it more efficiently than the general # case. # First one- and two-byte unsigned ints: if obj >= 0: if obj <= 0xff: self.write(BININT1 + chr(obj)) return if obj <= 0xffff: self.write("%c%c%c" % (BININT2, obj&0xff, obj>>8)) return # Next check for 4-byte signed ints: high_bits = obj >> 31 # note that Python shift sign-extends if high_bits == 0 or high_bits == -1: # All high bits are copies of bit 2**31, so the value # fits in a 4-byte signed int. self.write(BININT + pack("<i", obj)) return # Text pickle, or int too big to fit in signed 4-byte format. self.write(INT + repr(obj) + '\n') dispatch[IntType] = save_int def save_long(self, obj, pack=struct.pack): if self.proto >= 2: bytes = encode_long(obj) n = len(bytes) if n < 256: self.write(LONG1 + chr(n) + bytes) else: self.write(LONG4 + pack("<i", n) + bytes) return self.write(LONG + repr(obj) + '\n') dispatch[LongType] = save_long def save_float(self, obj, pack=struct.pack): if self.bin: self.write(BINFLOAT + pack('>d', obj)) else: self.write(FLOAT + repr(obj) + '\n') dispatch[FloatType] = save_float def save_string(self, obj, pack=struct.pack): if self.bin: n = len(obj) if n < 256: self.write(SHORT_BINSTRING + chr(n) + obj) else: self.write(BINSTRING + pack("<i", n) + obj) else: self.write(STRING + repr(obj) + '\n') self.memoize(obj) dispatch[StringType] = save_string def save_unicode(self, obj, pack=struct.pack): if self.bin: encoding = obj.encode('utf-8') n = len(encoding) self.write(BINUNICODE + pack("<i", n) + encoding) else: obj = obj.replace("\\", "\\u005c") obj = obj.replace("\n", "\\u000a") self.write(UNICODE + obj.encode('raw-unicode-escape') + '\n') self.memoize(obj) dispatch[UnicodeType] = save_unicode if StringType is UnicodeType: # This is true for Jython def save_string(self, obj, pack=struct.pack): unicode = obj.isunicode() if self.bin: if unicode: obj = obj.encode("utf-8") l = len(obj) if l < 256 and not unicode: self.write(SHORT_BINSTRING + chr(l) + obj) else: s = pack("<i", l) if unicode: self.write(BINUNICODE + s + obj) else: self.write(BINSTRING + s + obj) else: if unicode: obj = obj.replace("\\", "\\u005c") obj = obj.replace("\n", "\\u000a") obj = obj.encode('raw-unicode-escape') self.write(UNICODE + obj + '\n') else: self.write(STRING + repr(obj) + '\n') self.memoize(obj) dispatch[StringType] = save_string def save_tuple(self, obj): write = self.write proto = self.proto n = len(obj) if n == 0: if proto: write(EMPTY_TUPLE) else: write(MARK + TUPLE) return save = self.save memo = self.memo if n <= 3 and proto >= 2: for element in obj: save(element) # Subtle. Same as in the big comment below. if id(obj) in memo: get = self.get(memo[id(obj)][0]) write(POP * n + get) else: write(_tuplesize2code[n]) self.memoize(obj) return # proto 0 or proto 1 and tuple isn't empty, or proto > 1 and tuple # has more than 3 elements. write(MARK) for element in obj: save(element) if id(obj) in memo: # Subtle. d was not in memo when we entered save_tuple(), so # the process of saving the tuple's elements must have saved # the tuple itself: the tuple is recursive. The proper action # now is to throw away everything we put on the stack, and # simply GET the tuple (it's already constructed). This check # could have been done in the "for element" loop instead, but # recursive tuples are a rare thing. get = self.get(memo[id(obj)][0]) if proto: write(POP_MARK + get) else: # proto 0 -- POP_MARK not available write(POP * (n+1) + get) return # No recursion. self.write(TUPLE) self.memoize(obj) dispatch[TupleType] = save_tuple # save_empty_tuple() isn't used by anything in Python 2.3. However, I # found a Pickler subclass in Zope3 that calls it, so it's not harmless # to remove it. def save_empty_tuple(self, obj): self.write(EMPTY_TUPLE) def save_list(self, obj): write = self.write if self.bin: write(EMPTY_LIST) else: # proto 0 -- can't use EMPTY_LIST write(MARK + LIST) self.memoize(obj) self._batch_appends(iter(obj)) dispatch[ListType] = save_list # Keep in synch with cPickle's BATCHSIZE. Nothing will break if it gets # out of synch, though. _BATCHSIZE = 1000 def _batch_appends(self, items): # Helper to batch up APPENDS sequences save = self.save write = self.write if not self.bin: for x in items: save(x) write(APPEND) return r = xrange(self._BATCHSIZE) while items is not None: tmp = [] for i in r: try: x = items.next() tmp.append(x) except StopIteration: items = None break n = len(tmp) if n > 1: write(MARK) for x in tmp: save(x) write(APPENDS) elif n: save(tmp[0]) write(APPEND) # else tmp is empty, and we're done def save_dict(self, obj): write = self.write if self.bin: write(EMPTY_DICT) else: # proto 0 -- can't use EMPTY_DICT write(MARK + DICT) self.memoize(obj) self._batch_setitems(obj.iteritems()) dispatch[DictionaryType] = save_dict if not PyStringMap is None: dispatch[PyStringMap] = save_dict def _batch_setitems(self, items): # Helper to batch up SETITEMS sequences; proto >= 1 only save = self.save write = self.write if not self.bin: for k, v in items: save(k) save(v) write(SETITEM) return r = xrange(self._BATCHSIZE) while items is not None: tmp = [] for i in r: try: tmp.append(items.next()) except StopIteration: items = None break n = len(tmp) if n > 1: write(MARK) for k, v in tmp: save(k) save(v) write(SETITEMS) elif n: k, v = tmp[0] save(k) save(v) write(SETITEM) # else tmp is empty, and we're done def save_inst(self, obj): cls = obj.__class__ memo = self.memo write = self.write save = self.save if hasattr(obj, '__getinitargs__'): args = obj.__getinitargs__() len(args) # XXX Assert it's a sequence _keep_alive(args, memo) else: args = () write(MARK) if self.bin: save(cls) for arg in args: save(arg) write(OBJ) else: for arg in args: save(arg) write(INST + cls.__module__ + '\n' + cls.__name__ + '\n') self.memoize(obj) try: getstate = obj.__getstate__ except AttributeError: stuff = obj.__dict__ else: stuff = getstate() _keep_alive(stuff, memo) save(stuff) write(BUILD) dispatch[InstanceType] = save_inst def save_global(self, obj, name=None, pack=struct.pack): write = self.write memo = self.memo if name is None: name = obj.__name__ module = getattr(obj, "__module__", None) if module is None: module = whichmodule(obj, name) try: __import__(module) mod = sys.modules[module] klass = getattr(mod, name) except (ImportError, KeyError, AttributeError): raise PicklingError( "Can't pickle %r: it's not found as %s.%s" % (obj, module, name)) else: if klass is not obj: raise PicklingError( "Can't pickle %r: it's not the same object as %s.%s" % (obj, module, name)) if self.proto >= 2: code = _extension_registry.get((module, name)) if code: assert code > 0 if code <= 0xff: write(EXT1 + chr(code)) elif code <= 0xffff: write("%c%c%c" % (EXT2, code&0xff, code>>8)) else: write(EXT4 + pack("<i", code)) return write(GLOBAL + module + '\n' + name + '\n') self.memoize(obj) dispatch[ClassType] = save_global dispatch[FunctionType] = save_global dispatch[BuiltinFunctionType] = save_global dispatch[TypeType] = save_global # Pickling helpers def _keep_alive(x, memo): """Keeps a reference to the object x in the memo. Because we remember objects by their id, we have to assure that possibly temporary objects are kept alive by referencing them. We store a reference at the id of the memo, which should normally not be used unless someone tries to deepcopy the memo itself... """ try: memo[id(memo)].append(x) except KeyError: # aha, this is the first one :-) memo[id(memo)]=[x] # A cache for whichmodule(), mapping a function object to the name of # the module in which the function was found. classmap = {} # called classmap for backwards compatibility def whichmodule(func, funcname): """Figure out the module in which a function occurs. Search sys.modules for the module. Cache in classmap. Return a module name. If the function cannot be found, return "__main__". """ # Python functions should always get an __module__ from their globals. mod = getattr(func, "__module__", None) if mod is not None: return mod if func in classmap: return classmap[func] for name, module in sys.modules.items(): if module is None: continue # skip dummy package entries if name != '__main__' and getattr(module, funcname, None) is func: break else: name = '__main__' classmap[func] = name return name # Unpickling machinery class Unpickler: def __init__(self, file): """This takes a file-like object for reading a pickle data stream. The protocol version of the pickle is detected automatically, so no proto argument is needed. The file-like object must have two methods, a read() method that takes an integer argument, and a readline() method that requires no arguments. Both methods should return a string. Thus file-like object can be a file object opened for reading, a StringIO object, or any other custom object that meets this interface. """ self.readline = file.readline self.read = file.read self.memo = {} def load(self): """Read a pickled object representation from the open file. Return the reconstituted object hierarchy specified in the file. """ self.mark = object() # any new unique object self.stack = [] self.append = self.stack.append read = self.read dispatch = self.dispatch try: while 1: key = read(1) dispatch[key](self) except _Stop, stopinst: return stopinst.value # Return largest index k such that self.stack[k] is self.mark. # If the stack doesn't contain a mark, eventually raises IndexError. # This could be sped by maintaining another stack, of indices at which # the mark appears. For that matter, the latter stack would suffice, # and we wouldn't need to push mark objects on self.stack at all. # Doing so is probably a good thing, though, since if the pickle is # corrupt (or hostile) we may get a clue from finding self.mark embedded # in unpickled objects. def marker(self): stack = self.stack mark = self.mark k = len(stack)-1 while stack[k] is not mark: k = k-1 return k dispatch = {} def load_eof(self): raise EOFError dispatch[''] = load_eof def load_proto(self): proto = ord(self.read(1)) if not 0 <= proto <= 2: raise ValueError, "unsupported pickle protocol: %d" % proto dispatch[PROTO] = load_proto def load_persid(self): pid = self.readline()[:-1] self.append(self.persistent_load(pid)) dispatch[PERSID] = load_persid def load_binpersid(self): pid = self.stack.pop() self.append(self.persistent_load(pid)) dispatch[BINPERSID] = load_binpersid def load_none(self): self.append(None) dispatch[NONE] = load_none def load_false(self): self.append(False) dispatch[NEWFALSE] = load_false def load_true(self): self.append(True) dispatch[NEWTRUE] = load_true def load_int(self): data = self.readline() if data == FALSE[1:]: val = False elif data == TRUE[1:]: val = True else: try: val = int(data) except ValueError: val = long(data) self.append(val) dispatch[INT] = load_int def load_binint(self): self.append(mloads('i' + self.read(4))) dispatch[BININT] = load_binint def load_binint1(self): self.append(ord(self.read(1))) dispatch[BININT1] = load_binint1 def load_binint2(self): self.append(mloads('i' + self.read(2) + '\000\000')) dispatch[BININT2] = load_binint2 def load_long(self): self.append(long(self.readline()[:-1], 0)) dispatch[LONG] = load_long def load_long1(self): n = ord(self.read(1)) bytes = self.read(n) self.append(decode_long(bytes)) dispatch[LONG1] = load_long1 def load_long4(self): n = mloads('i' + self.read(4)) bytes = self.read(n) self.append(decode_long(bytes)) dispatch[LONG4] = load_long4 def load_float(self): self.append(float(self.readline()[:-1])) dispatch[FLOAT] = load_float def load_binfloat(self, unpack=struct.unpack): self.append(unpack('>d', self.read(8))[0]) dispatch[BINFLOAT] = load_binfloat def load_string(self): rep = self.readline()[:-1] for q in "\"'": # double or single quote if rep.startswith(q): if len(rep) < 2 or not rep.endswith(q): raise ValueError, "insecure string pickle" rep = rep[len(q):-len(q)] break else: raise ValueError, "insecure string pickle" self.append(rep.decode("string-escape")) dispatch[STRING] = load_string def load_binstring(self): len = mloads('i' + self.read(4)) self.append(self.read(len)) dispatch[BINSTRING] = load_binstring def load_unicode(self): self.append(unicode(self.readline()[:-1],'raw-unicode-escape')) dispatch[UNICODE] = load_unicode def load_binunicode(self): len = mloads('i' + self.read(4)) self.append(unicode(self.read(len),'utf-8')) dispatch[BINUNICODE] = load_binunicode def load_short_binstring(self): len = ord(self.read(1)) self.append(self.read(len)) dispatch[SHORT_BINSTRING] = load_short_binstring def load_tuple(self): k = self.marker() self.stack[k:] = [tuple(self.stack[k+1:])] dispatch[TUPLE] = load_tuple def load_empty_tuple(self): self.stack.append(()) dispatch[EMPTY_TUPLE] = load_empty_tuple def load_tuple1(self): self.stack[-1] = (self.stack[-1],) dispatch[TUPLE1] = load_tuple1 def load_tuple2(self): self.stack[-2:] = [(self.stack[-2], self.stack[-1])] dispatch[TUPLE2] = load_tuple2 def load_tuple3(self): self.stack[-3:] = [(self.stack[-3], self.stack[-2], self.stack[-1])] dispatch[TUPLE3] = load_tuple3 def load_empty_list(self): self.stack.append([]) dispatch[EMPTY_LIST] = load_empty_list def load_empty_dictionary(self): self.stack.append({}) dispatch[EMPTY_DICT] = load_empty_dictionary def load_list(self): k = self.marker() self.stack[k:] = [self.stack[k+1:]] dispatch[LIST] = load_list def load_dict(self): k = self.marker() d = {} items = self.stack[k+1:] for i in range(0, len(items), 2): key = items[i] value = items[i+1] d[key] = value self.stack[k:] = [d] dispatch[DICT] = load_dict # INST and OBJ differ only in how they get a class object. It's not # only sensible to do the rest in a common routine, the two routines # previously diverged and grew different bugs. # klass is the class to instantiate, and k points to the topmost mark # object, following which are the arguments for klass.__init__. def _instantiate(self, klass, k): args = tuple(self.stack[k+1:]) del self.stack[k:] instantiated = 0 if (not args and type(klass) is ClassType and not hasattr(klass, "__getinitargs__")): try: value = _EmptyClass() value.__class__ = klass instantiated = 1 except RuntimeError: # In restricted execution, assignment to inst.__class__ is # prohibited pass if not instantiated: try: value = klass(*args) except TypeError, err: raise TypeError, "in constructor for %s: %s" % ( klass.__name__, str(err)), sys.exc_info()[2] self.append(value) def load_inst(self): module = self.readline()[:-1] name = self.readline()[:-1] klass = self.find_class(module, name) self._instantiate(klass, self.marker()) dispatch[INST] = load_inst def load_obj(self): # Stack is ... markobject classobject arg1 arg2 ... k = self.marker() klass = self.stack.pop(k+1) self._instantiate(klass, k) dispatch[OBJ] = load_obj def load_newobj(self): args = self.stack.pop() cls = self.stack[-1] obj = cls.__new__(cls, *args) self.stack[-1] = obj dispatch[NEWOBJ] = load_newobj def load_global(self): module = self.readline()[:-1] name = self.readline()[:-1] klass = self.find_class(module, name) self.append(klass) dispatch[GLOBAL] = load_global def load_ext1(self): code = ord(self.read(1)) self.get_extension(code) dispatch[EXT1] = load_ext1 def load_ext2(self): code = mloads('i' + self.read(2) + '\000\000') self.get_extension(code) dispatch[EXT2] = load_ext2 def load_ext4(self): code = mloads('i' + self.read(4)) self.get_extension(code) dispatch[EXT4] = load_ext4 def get_extension(self, code): nil = [] obj = _extension_cache.get(code, nil) if obj is not nil: self.append(obj) return key = _inverted_registry.get(code) if not key: raise ValueError("unregistered extension code %d" % code) obj = self.find_class(*key) _extension_cache[code] = obj self.append(obj) def find_class(self, module, name): # Subclasses may override this __import__(module) mod = sys.modules[module] klass = getattr(mod, name) return klass def load_reduce(self): stack = self.stack args = stack.pop() func = stack[-1] value = func(*args) stack[-1] = value dispatch[REDUCE] = load_reduce def load_pop(self): del self.stack[-1] dispatch[POP] = load_pop def load_pop_mark(self): k = self.marker() del self.stack[k:] dispatch[POP_MARK] = load_pop_mark def load_dup(self): self.append(self.stack[-1]) dispatch[DUP] = load_dup def load_get(self): self.append(self.memo[self.readline()[:-1]]) dispatch[GET] = load_get def load_binget(self): i = ord(self.read(1)) self.append(self.memo[repr(i)]) dispatch[BINGET] = load_binget def load_long_binget(self): i = mloads('i' + self.read(4)) self.append(self.memo[repr(i)]) dispatch[LONG_BINGET] = load_long_binget def load_put(self): self.memo[self.readline()[:-1]] = self.stack[-1] dispatch[PUT] = load_put def load_binput(self): i = ord(self.read(1)) self.memo[repr(i)] = self.stack[-1] dispatch[BINPUT] = load_binput def load_long_binput(self): i = mloads('i' + self.read(4)) self.memo[repr(i)] = self.stack[-1] dispatch[LONG_BINPUT] = load_long_binput def load_append(self): stack = self.stack value = stack.pop() list = stack[-1] list.append(value) dispatch[APPEND] = load_append def load_appends(self): stack = self.stack mark = self.marker() list = stack[mark - 1] list.extend(stack[mark + 1:]) del stack[mark:] dispatch[APPENDS] = load_appends def load_setitem(self): stack = self.stack value = stack.pop() key = stack.pop() dict = stack[-1] dict[key] = value dispatch[SETITEM] = load_setitem def load_setitems(self): stack = self.stack mark = self.marker() dict = stack[mark - 1] for i in range(mark + 1, len(stack), 2): dict[stack[i]] = stack[i + 1] del stack[mark:] dispatch[SETITEMS] = load_setitems def load_build(self): stack = self.stack state = stack.pop() inst = stack[-1] setstate = getattr(inst, "__setstate__", None) if setstate: setstate(state) return slotstate = None if isinstance(state, tuple) and len(state) == 2: state, slotstate = state if state: try: d = inst.__dict__ try: for k, v in state.iteritems(): d[intern(k)] = v # keys in state don't have to be strings # don't blow up, but don't go out of our way except TypeError: d.update(state) except RuntimeError: # XXX In restricted execution, the instance's __dict__ # is not accessible. Use the old way of unpickling # the instance variables. This is a semantic # difference when unpickling in restricted # vs. unrestricted modes. # Note, however, that cPickle has never tried to do the # .update() business, and always uses # PyObject_SetItem(inst.__dict__, key, value) in a # loop over state.items(). for k, v in state.items(): setattr(inst, k, v) if slotstate: for k, v in slotstate.items(): setattr(inst, k, v) dispatch[BUILD] = load_build def load_mark(self): self.append(self.mark) dispatch[MARK] = load_mark def load_stop(self): value = self.stack.pop() raise _Stop(value) dispatch[STOP] = load_stop # Helper class for load_inst/load_obj class _EmptyClass: pass # Encode/decode longs in linear time. import binascii as _binascii def encode_long(x): r"""Encode a long to a two's complement little-endian binary string. Note that 0L is a special case, returning an empty string, to save a byte in the LONG1 pickling context. >>> encode_long(0L) '' >>> encode_long(255L) '\xff\x00' >>> encode_long(32767L) '\xff\x7f' >>> encode_long(-256L) '\x00\xff' >>> encode_long(-32768L) '\x00\x80' >>> encode_long(-128L) '\x80' >>> encode_long(127L) '\x7f' >>> """ if x == 0: return '' if x > 0: ashex = hex(x) assert ashex.startswith("0x") njunkchars = 2 + ashex.endswith('L') nibbles = len(ashex) - njunkchars if nibbles & 1: # need an even # of nibbles for unhexlify ashex = "0x0" + ashex[2:] elif int(ashex[2], 16) >= 8: # "looks negative", so need a byte of sign bits ashex = "0x00" + ashex[2:] else: # Build the 256's-complement: (1L << nbytes) + x. The trick is # to find the number of bytes in linear time (although that should # really be a constant-time task). ashex = hex(-x) assert ashex.startswith("0x") njunkchars = 2 + ashex.endswith('L') nibbles = len(ashex) - njunkchars if nibbles & 1: # Extend to a full byte. nibbles += 1 nbits = nibbles * 4 x += 1L << nbits assert x > 0 ashex = hex(x) njunkchars = 2 + ashex.endswith('L') newnibbles = len(ashex) - njunkchars if newnibbles < nibbles: ashex = "0x" + "0" * (nibbles - newnibbles) + ashex[2:] if int(ashex[2], 16) < 8: # "looks positive", so need a byte of sign bits ashex = "0xff" + ashex[2:] if ashex.endswith('L'): ashex = ashex[2:-1] else: ashex = ashex[2:] assert len(ashex) & 1 == 0, (x, ashex) binary = _binascii.unhexlify(ashex) return binary[::-1] def decode_long(data): r"""Decode a long from a two's complement little-endian binary string. >>> decode_long('') 0L >>> decode_long("\xff\x00") 255L >>> decode_long("\xff\x7f") 32767L >>> decode_long("\x00\xff") -256L >>> decode_long("\x00\x80") -32768L >>> decode_long("\x80") -128L >>> decode_long("\x7f") 127L """ nbytes = len(data) if nbytes == 0: return 0L ashex = _binascii.hexlify(data[::-1]) n = long(ashex, 16) # quadratic time before Python 2.3; linear now if data[-1] >= '\x80': n -= 1L << (nbytes * 8) return n # Shorthands try: from cStringIO import StringIO except ImportError: from StringIO import StringIO def dump(obj, file, protocol=None): Pickler(file, protocol).dump(obj) def dumps(obj, protocol=None): file = StringIO() Pickler(file, protocol).dump(obj) return file.getvalue() def load(file): return Unpickler(file).load() def loads(str): file = StringIO(str) return Unpickler(file).load() # Doctest def _test(): import doctest return doctest.testmod() if __name__ == "__main__": _test()
{ "pile_set_name": "Github" }
fileFormatVersion: 2 guid: 5b83bcc92c73944e491d002a47b331c7 timeCreated: 1498706735 licenseType: Pro NativeFormatImporter: mainObjectFileID: 11400000 userData: assetBundleName: assetBundleVariant:
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns="http://quartznet.sourceforge.net/JobSchedulingData" targetNamespace="http://quartznet.sourceforge.net/JobSchedulingData" elementFormDefault="qualified" version="2.0"> <xs:element name="job-scheduling-data"> <xs:annotation> <xs:documentation>Root level node</xs:documentation> </xs:annotation> <xs:complexType> <xs:sequence maxOccurs="unbounded"> <xs:element name="pre-processing-commands" type="pre-processing-commandsType" minOccurs="0" maxOccurs="1"> <xs:annotation> <xs:documentation>Commands to be executed before scheduling the jobs and triggers in this file.</xs:documentation> </xs:annotation> </xs:element> <xs:element name="processing-directives" type="processing-directivesType" minOccurs="0" maxOccurs="1"> <xs:annotation> <xs:documentation>Directives to be followed while scheduling the jobs and triggers in this file.</xs:documentation> </xs:annotation> </xs:element> <xs:element name="schedule" minOccurs="0" maxOccurs="unbounded"> <xs:complexType> <xs:sequence maxOccurs="unbounded"> <xs:element name="job" type="job-detailType" minOccurs="0" maxOccurs="unbounded" /> <xs:element name="trigger" type="triggerType" minOccurs="0" maxOccurs="unbounded" /> </xs:sequence> </xs:complexType> </xs:element> </xs:sequence> <xs:attribute name="version" type="xs:string"> <xs:annotation> <xs:documentation>Version of the XML Schema instance</xs:documentation> </xs:annotation> </xs:attribute> </xs:complexType> </xs:element> <xs:complexType name="pre-processing-commandsType"> <xs:sequence maxOccurs="unbounded"> <xs:element name="delete-jobs-in-group" type="xs:string" minOccurs="0" maxOccurs="unbounded"> <xs:annotation> <xs:documentation>Delete all jobs, if any, in the identified group. "*" can be used to identify all groups. Will also result in deleting all triggers related to the jobs.</xs:documentation> </xs:annotation> </xs:element> <xs:element name="delete-triggers-in-group" type="xs:string" minOccurs="0" maxOccurs="unbounded"> <xs:annotation> <xs:documentation>Delete all triggers, if any, in the identified group. "*" can be used to identify all groups. Will also result in deletion of related jobs that are non-durable.</xs:documentation> </xs:annotation> </xs:element> <xs:element name="delete-job" minOccurs="0" maxOccurs="unbounded"> <xs:annotation> <xs:documentation>Delete the identified job if it exists (will also result in deleting all triggers related to it).</xs:documentation> </xs:annotation> <xs:complexType> <xs:sequence> <xs:element name="name" type="xs:string" /> <xs:element name="group" type="xs:string" minOccurs="0" /> </xs:sequence> </xs:complexType> </xs:element> <xs:element name="delete-trigger" minOccurs="0" maxOccurs="unbounded"> <xs:annotation> <xs:documentation>Delete the identified trigger if it exists (will also result in deletion of related jobs that are non-durable).</xs:documentation> </xs:annotation> <xs:complexType> <xs:sequence> <xs:element name="name" type="xs:string" /> <xs:element name="group" type="xs:string" minOccurs="0" /> </xs:sequence> </xs:complexType> </xs:element> </xs:sequence> </xs:complexType> <xs:complexType name="processing-directivesType"> <xs:sequence> <xs:element name="overwrite-existing-data" type="xs:boolean" minOccurs="0" default="true"> <xs:annotation> <xs:documentation>Whether the existing scheduling data (with same identifiers) will be overwritten. If false, and ignore-duplicates is not false, and jobs or triggers with the same names already exist as those in the file, an error will occur.</xs:documentation> </xs:annotation> </xs:element> <xs:element name="ignore-duplicates" type="xs:boolean" minOccurs="0" default="false"> <xs:annotation> <xs:documentation>If true (and overwrite-existing-data is false) then any job/triggers encountered in this file that have names that already exist in the scheduler will be ignored, and no error will be produced.</xs:documentation> </xs:annotation> </xs:element> <xs:element name="schedule-trigger-relative-to-replaced-trigger" type="xs:boolean" minOccurs="0" default="false"> <xs:annotation> <xs:documentation>If true trigger's start time is calculated based on earlier run time instead of fixed value. Trigger's start time must be undefined for this to work.</xs:documentation> </xs:annotation> </xs:element> </xs:sequence> </xs:complexType> <xs:complexType name="job-detailType"> <xs:annotation> <xs:documentation>Define a JobDetail</xs:documentation> </xs:annotation> <xs:sequence> <xs:element name="name" type="xs:string" /> <xs:element name="group" type="xs:string" minOccurs="0" /> <xs:element name="description" type="xs:string" minOccurs="0" /> <xs:element name="job-type" type="xs:string" /> <xs:sequence minOccurs="0"> <xs:element name="durable" type="xs:boolean" /> <xs:element name="recover" type="xs:boolean" /> </xs:sequence> <xs:element name="job-data-map" type="job-data-mapType" minOccurs="0" /> </xs:sequence> </xs:complexType> <xs:complexType name="job-data-mapType"> <xs:annotation> <xs:documentation>Define a JobDataMap</xs:documentation> </xs:annotation> <xs:sequence minOccurs="0" maxOccurs="unbounded"> <xs:element name="entry" type="entryType" /> </xs:sequence> </xs:complexType> <xs:complexType name="entryType"> <xs:annotation> <xs:documentation>Define a JobDataMap entry</xs:documentation> </xs:annotation> <xs:sequence> <xs:element name="key" type="xs:string" /> <xs:element name="value" type="xs:string" /> </xs:sequence> </xs:complexType> <xs:complexType name="triggerType"> <xs:annotation> <xs:documentation>Define a Trigger</xs:documentation> </xs:annotation> <xs:choice> <xs:element name="simple" type="simpleTriggerType" /> <xs:element name="cron" type="cronTriggerType" /> <xs:element name="calendar-interval" type="calendarIntervalTriggerType" /> </xs:choice> </xs:complexType> <xs:complexType name="abstractTriggerType" abstract="true"> <xs:annotation> <xs:documentation>Common Trigger definitions</xs:documentation> </xs:annotation> <xs:sequence> <xs:element name="name" type="xs:string" /> <xs:element name="group" type="xs:string" minOccurs="0" /> <xs:element name="description" type="xs:string" minOccurs="0" /> <xs:element name="job-name" type="xs:string" /> <xs:element name="job-group" type="xs:string" minOccurs="0" /> <xs:element name="priority" type="xs:nonNegativeInteger" minOccurs="0" /> <xs:element name="calendar-name" type="xs:string" minOccurs="0" /> <xs:element name="job-data-map" type="job-data-mapType" minOccurs="0" /> <xs:sequence minOccurs="0"> <xs:choice> <xs:element name="start-time" type="xs:dateTime" /> <xs:element name="start-time-seconds-in-future" type="xs:nonNegativeInteger" /> </xs:choice> <xs:element name="end-time" type="xs:dateTime" minOccurs="0" /> </xs:sequence> </xs:sequence> </xs:complexType> <xs:complexType name="simpleTriggerType"> <xs:annotation> <xs:documentation>Define a SimpleTrigger</xs:documentation> </xs:annotation> <xs:complexContent> <xs:extension base="abstractTriggerType"> <xs:sequence> <xs:element name="misfire-instruction" type="simple-trigger-misfire-instructionType" minOccurs="0" /> <xs:sequence minOccurs="0"> <xs:element name="repeat-count" type="repeat-countType" /> <xs:element name="repeat-interval" type="xs:nonNegativeInteger" /> </xs:sequence> </xs:sequence> </xs:extension> </xs:complexContent> </xs:complexType> <xs:complexType name="cronTriggerType"> <xs:annotation> <xs:documentation>Define a CronTrigger</xs:documentation> </xs:annotation> <xs:complexContent> <xs:extension base="abstractTriggerType"> <xs:sequence> <xs:element name="misfire-instruction" type="cron-trigger-misfire-instructionType" minOccurs="0" /> <xs:element name="cron-expression" type="cron-expressionType" /> <xs:element name="time-zone" type="xs:string" minOccurs="0" /> </xs:sequence> </xs:extension> </xs:complexContent> </xs:complexType> <xs:complexType name="calendarIntervalTriggerType"> <xs:annotation> <xs:documentation>Define a DateIntervalTrigger</xs:documentation> </xs:annotation> <xs:complexContent> <xs:extension base="abstractTriggerType"> <xs:sequence> <xs:element name="misfire-instruction" type="date-interval-trigger-misfire-instructionType" minOccurs="0" /> <xs:element name="repeat-interval" type="xs:nonNegativeInteger" /> <xs:element name="repeat-interval-unit" type="interval-unitType" /> </xs:sequence> </xs:extension> </xs:complexContent> </xs:complexType> <xs:simpleType name="cron-expressionType"> <xs:annotation> <xs:documentation> Cron expression (see JavaDoc for examples) Special thanks to Chris Thatcher ([email protected]) for the regular expression! Regular expressions are not my strong point but I believe this is complete, with the caveat that order for expressions like 3-0 is not legal but will pass, and month and day names must be capitalized. If you want to examine the correctness look for the [\s] to denote the seperation of individual regular expressions. This is how I break them up visually to examine them: SECONDS: ( ((([0-9]|[0-5][0-9])(-([0-9]|[0-5][0-9]))?,)*([0-9]|[0-5][0-9])(-([0-9]|[0-5][0-9]))?) | (([\*]|[0-9]|[0-5][0-9])/([0-9]|[0-5][0-9])) | ([\?]) | ([\*]) ) [\s] MINUTES: ( ((([0-9]|[0-5][0-9])(-([0-9]|[0-5][0-9]))?,)*([0-9]|[0-5][0-9])(-([0-9]|[0-5][0-9]))?) | (([\*]|[0-9]|[0-5][0-9])/([0-9]|[0-5][0-9])) | ([\?]) | ([\*]) ) [\s] HOURS: ( ((([0-9]|[0-1][0-9]|[2][0-3])(-([0-9]|[0-1][0-9]|[2][0-3]))?,)*([0-9]|[0-1][0-9]|[2][0-3])(-([0-9]|[0-1][0-9]|[2][0-3]))?) | (([\*]|[0-9]|[0-1][0-9]|[2][0-3])/([0-9]|[0-1][0-9]|[2][0-3])) | ([\?]) | ([\*]) ) [\s] DAY OF MONTH: ( ((([1-9]|[0][1-9]|[1-2][0-9]|[3][0-1])(-([1-9]|[0][1-9]|[1-2][0-9]|[3][0-1]))?,)*([1-9]|[0][1-9]|[1-2][0-9]|[3][0-1])(-([1-9]|[0][1-9]|[1-2][0-9]|[3][0-1]))?(C)?) | (([1-9]|[0][1-9]|[1-2][0-9]|[3][0-1])/([1-9]|[0][1-9]|[1-2][0-9]|[3][0-1])(C)?) | (L(-[0-9])?) | (L(-[1-2][0-9])?) | (L(-[3][0-1])?) | (LW) | ([1-9]W) | ([1-3][0-9]W) | ([\?]) | ([\*]) )[\s] MONTH: ( ((([1-9]|0[1-9]|1[0-2])(-([1-9]|0[1-9]|1[0-2]))?,)*([1-9]|0[1-9]|1[0-2])(-([1-9]|0[1-9]|1[0-2]))?) | (([1-9]|0[1-9]|1[0-2])/([1-9]|0[1-9]|1[0-2])) | (((JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC)(-(JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC))?,)*(JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC)(-(JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC))?) | ((JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC)/(JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC)) | ([\?]) | ([\*]) )[\s] DAY OF WEEK: ( (([1-7](-([1-7]))?,)*([1-7])(-([1-7]))?) | ([1-7]/([1-7])) | (((MON|TUE|WED|THU|FRI|SAT|SUN)(-(MON|TUE|WED|THU|FRI|SAT|SUN))?,)*(MON|TUE|WED|THU|FRI|SAT|SUN)(-(MON|TUE|WED|THU|FRI|SAT|SUN))?(C)?) | ((MON|TUE|WED|THU|FRI|SAT|SUN)/(MON|TUE|WED|THU|FRI|SAT|SUN)(C)?) | (([1-7]|(MON|TUE|WED|THU|FRI|SAT|SUN))(L|LW)?) | (([1-7]|MON|TUE|WED|THU|FRI|SAT|SUN)#([1-7])?) | ([\?]) | ([\*]) ) YEAR (OPTIONAL): ( [\s]? ([\*])? | ((19[7-9][0-9])|(20[0-9][0-9]))? | (((19[7-9][0-9])|(20[0-9][0-9]))/((19[7-9][0-9])|(20[0-9][0-9])))? | ((((19[7-9][0-9])|(20[0-9][0-9]))(-((19[7-9][0-9])|(20[0-9][0-9])))?,)*((19[7-9][0-9])|(20[0-9][0-9]))(-((19[7-9][0-9])|(20[0-9][0-9])))?)? ) </xs:documentation> </xs:annotation> <xs:restriction base="xs:string"> <xs:pattern value="(((([0-9]|[0-5][0-9])(-([0-9]|[0-5][0-9]))?,)*([0-9]|[0-5][0-9])(-([0-9]|[0-5][0-9]))?)|(([\*]|[0-9]|[0-5][0-9])/([0-9]|[0-5][0-9]))|([\?])|([\*]))[\s](((([0-9]|[0-5][0-9])(-([0-9]|[0-5][0-9]))?,)*([0-9]|[0-5][0-9])(-([0-9]|[0-5][0-9]))?)|(([\*]|[0-9]|[0-5][0-9])/([0-9]|[0-5][0-9]))|([\?])|([\*]))[\s](((([0-9]|[0-1][0-9]|[2][0-3])(-([0-9]|[0-1][0-9]|[2][0-3]))?,)*([0-9]|[0-1][0-9]|[2][0-3])(-([0-9]|[0-1][0-9]|[2][0-3]))?)|(([\*]|[0-9]|[0-1][0-9]|[2][0-3])/([0-9]|[0-1][0-9]|[2][0-3]))|([\?])|([\*]))[\s](((([1-9]|[0][1-9]|[1-2][0-9]|[3][0-1])(-([1-9]|[0][1-9]|[1-2][0-9]|[3][0-1]))?,)*([1-9]|[0][1-9]|[1-2][0-9]|[3][0-1])(-([1-9]|[0][1-9]|[1-2][0-9]|[3][0-1]))?(C)?)|(([1-9]|[0][1-9]|[1-2][0-9]|[3][0-1])/([1-9]|[0][1-9]|[1-2][0-9]|[3][0-1])(C)?)|(L(-[0-9])?)|(L(-[1-2][0-9])?)|(L(-[3][0-1])?)|(LW)|([1-9]W)|([1-3][0-9]W)|([\?])|([\*]))[\s](((([1-9]|0[1-9]|1[0-2])(-([1-9]|0[1-9]|1[0-2]))?,)*([1-9]|0[1-9]|1[0-2])(-([1-9]|0[1-9]|1[0-2]))?)|(([1-9]|0[1-9]|1[0-2])/([1-9]|0[1-9]|1[0-2]))|(((JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC)(-(JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC))?,)*(JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC)(-(JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC))?)|((JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC)/(JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC))|([\?])|([\*]))[\s]((([1-7](-([1-7]))?,)*([1-7])(-([1-7]))?)|([1-7]/([1-7]))|(((MON|TUE|WED|THU|FRI|SAT|SUN)(-(MON|TUE|WED|THU|FRI|SAT|SUN))?,)*(MON|TUE|WED|THU|FRI|SAT|SUN)(-(MON|TUE|WED|THU|FRI|SAT|SUN))?(C)?)|((MON|TUE|WED|THU|FRI|SAT|SUN)/(MON|TUE|WED|THU|FRI|SAT|SUN)(C)?)|(([1-7]|(MON|TUE|WED|THU|FRI|SAT|SUN))?(L|LW)?)|(([1-7]|MON|TUE|WED|THU|FRI|SAT|SUN)#([1-7])?)|([\?])|([\*]))([\s]?(([\*])?|(19[7-9][0-9])|(20[0-9][0-9]))?| (((19[7-9][0-9])|(20[0-9][0-9]))/((19[7-9][0-9])|(20[0-9][0-9])))?| ((((19[7-9][0-9])|(20[0-9][0-9]))(-((19[7-9][0-9])|(20[0-9][0-9])))?,)*((19[7-9][0-9])|(20[0-9][0-9]))(-((19[7-9][0-9])|(20[0-9][0-9])))?)?)" /> </xs:restriction> </xs:simpleType> <xs:simpleType name="repeat-countType"> <xs:annotation> <xs:documentation>Number of times to repeat the Trigger (-1 for indefinite)</xs:documentation> </xs:annotation> <xs:restriction base="xs:integer"> <xs:minInclusive value="-1" /> </xs:restriction> </xs:simpleType> <xs:simpleType name="simple-trigger-misfire-instructionType"> <xs:annotation> <xs:documentation>Simple Trigger Misfire Instructions</xs:documentation> </xs:annotation> <xs:restriction base="xs:string"> <xs:pattern value="SmartPolicy" /> <xs:pattern value="RescheduleNextWithExistingCount" /> <xs:pattern value="RescheduleNextWithRemainingCount" /> <xs:pattern value="RescheduleNowWithExistingRepeatCount" /> <xs:pattern value="RescheduleNowWithRemainingRepeatCount" /> <xs:pattern value="FireNow" /> <xs:pattern value="IgnoreMisfirePolicy" /> </xs:restriction> </xs:simpleType> <xs:simpleType name="cron-trigger-misfire-instructionType"> <xs:annotation> <xs:documentation>Cron Trigger Misfire Instructions</xs:documentation> </xs:annotation> <xs:restriction base="xs:string"> <xs:pattern value="SmartPolicy" /> <xs:pattern value="DoNothing" /> <xs:pattern value="FireOnceNow" /> <xs:pattern value="IgnoreMisfirePolicy" /> </xs:restriction> </xs:simpleType> <xs:simpleType name="date-interval-trigger-misfire-instructionType"> <xs:annotation> <xs:documentation>Date Interval Trigger Misfire Instructions</xs:documentation> </xs:annotation> <xs:restriction base="xs:string"> <xs:pattern value="SmartPolicy" /> <xs:pattern value="DoNothing" /> <xs:pattern value="FireOnceNow" /> <xs:pattern value="IgnoreMisfirePolicy" /> </xs:restriction> </xs:simpleType> <xs:simpleType name="interval-unitType"> <xs:annotation> <xs:documentation>Interval Units</xs:documentation> </xs:annotation> <xs:restriction base="xs:string"> <xs:pattern value="Day" /> <xs:pattern value="Hour" /> <xs:pattern value="Minute" /> <xs:pattern value="Month" /> <xs:pattern value="Second" /> <xs:pattern value="Week" /> <xs:pattern value="Year" /> </xs:restriction> </xs:simpleType> </xs:schema>
{ "pile_set_name": "Github" }
/** * This file has no copyright assigned and is placed in the Public Domain. * This file is part of the mingw-w64 runtime package. * No warranty is given; refer to the file DISCLAIMER.PD within this package. */ #if !defined(MAPIDEFS_H) && !defined(WABDEFS_H) #define WABDEFS_H #if defined (NT) && !defined (_WINNT) #warning NT defined but not _WINNT. You must define _WINNT to ensure " "successful compile of Windows NT code. #define _WINNT #endif #ifndef _WINDOWS_ #define INC_OLE2 #define INC_RPC #define _OLE_H_ #include <windows.h> #endif #ifndef _OLEERROR_H_ #include <objerror.h> #endif #include <objbase.h> #include <stddef.h> #ifndef MAPI_DIM #define MAPI_DIM 1 #endif #ifndef STDMAPIINITCALLTYPE #define STDMAPIINITCALLTYPE __cdecl #define STDINITMETHODIMP HRESULT __cdecl #define STDINITMETHODIMP_(type) type __cdecl #endif #ifdef __cplusplus extern "C" { #endif #ifndef __WCHAR_DEFINED #define __WCHAR_DEFINED typedef WORD WCHAR; #endif #if defined(UNICODE) typedef WCHAR TCHAR; #else typedef char TCHAR; #endif typedef WCHAR *LPWSTR; typedef const WCHAR *LPCWSTR; typedef TCHAR *LPTSTR; typedef const TCHAR *LPCTSTR; typedef BYTE *LPBYTE; typedef ULONG *LPULONG; #ifndef __LHANDLE #define __LHANDLE typedef unsigned __LONG32 LHANDLE,*LPLHANDLE; #endif #if !defined(_WINBASE_) && !defined(_FILETIME_) #define _FILETIME_ typedef struct _FILETIME { DWORD dwLowDateTime; DWORD dwHighDateTime; } FILETIME,*LPFILETIME; #endif #ifndef BEGIN_INTERFACE #define BEGIN_INTERFACE #endif #define MAPI_MODIFY ((ULONG) 0x00000001) #define MAPI_ACCESS_MODIFY ((ULONG) 0x00000001) #define MAPI_ACCESS_READ ((ULONG) 0x00000002) #define MAPI_ACCESS_DELETE ((ULONG) 0x00000004) #define MAPI_ACCESS_CREATE_HIERARCHY ((ULONG) 0x00000008) #define MAPI_ACCESS_CREATE_CONTENTS ((ULONG) 0x00000010) #define MAPI_ACCESS_CREATE_ASSOCIATED ((ULONG) 0x00000020) #define MAPI_UNICODE ((ULONG) 0x80000000) #if defined(UNICODE) #define fMapiUnicode MAPI_UNICODE #else #define fMapiUnicode 0 #endif #define hrSuccess 0 #ifndef MAPI_ORIG #define MAPI_ORIG 0 #define MAPI_TO 1 #define MAPI_CC 2 #define MAPI_BCC 3 #define MAPI_P1 0x10000000 #define MAPI_SUBMITTED 0x80000000 #endif #define MAPI_SHORTTERM 0x80 #define MAPI_NOTRECIP 0x40 #define MAPI_THISSESSION 0x20 #define MAPI_NOW 0x10 #define MAPI_NOTRESERVED 0x08 #define MAPI_COMPOUND 0x80 typedef struct { BYTE abFlags[4]; BYTE ab[MAPI_DIM]; } ENTRYID,*LPENTRYID; #define CbNewENTRYID(_cb) (offsetof(ENTRYID,ab) + (_cb)) #define CbENTRYID(_cb) (offsetof(ENTRYID,ab) + (_cb)) #define SizedENTRYID(_cb,_name) struct _ENTRYID_ ## _name { BYTE abFlags[4]; BYTE ab[_cb]; } _name typedef struct _MAPIUID { BYTE ab[16]; } MAPIUID,*LPMAPIUID; #define IsEqualMAPIUID(lpuid1,lpuid2) (!memcmp(lpuid1,lpuid2,sizeof(MAPIUID))) #define MAPI_STORE ((ULONG) 0x00000001) #define MAPI_ADDRBOOK ((ULONG) 0x00000002) #define MAPI_FOLDER ((ULONG) 0x00000003) #define MAPI_ABCONT ((ULONG) 0x00000004) #define MAPI_MESSAGE ((ULONG) 0x00000005) #define MAPI_MAILUSER ((ULONG) 0x00000006) #define MAPI_ATTACH ((ULONG) 0x00000007) #define MAPI_DISTLIST ((ULONG) 0x00000008) #define MAPI_PROFSECT ((ULONG) 0x00000009) #define MAPI_STATUS ((ULONG) 0x0000000A) #define MAPI_SESSION ((ULONG) 0x0000000B) #define MAPI_FORMINFO ((ULONG) 0x0000000C) #ifndef cchProfileNameMax #define cchProfileNameMax 64 #define cchProfilePassMax 64 #endif #define MV_FLAG 0x1000 #define PT_UNSPECIFIED ((ULONG) 0) #define PT_NULL ((ULONG) 1) #define PT_I2 ((ULONG) 2) #define PT_LONG ((ULONG) 3) #define PT_R4 ((ULONG) 4) #define PT_DOUBLE ((ULONG) 5) #define PT_CURRENCY ((ULONG) 6) #define PT_APPTIME ((ULONG) 7) #define PT_ERROR ((ULONG) 10) #define PT_BOOLEAN ((ULONG) 11) #define PT_OBJECT ((ULONG) 13) #define PT_I8 ((ULONG) 20) #define PT_STRING8 ((ULONG) 30) #define PT_UNICODE ((ULONG) 31) #define PT_SYSTIME ((ULONG) 64) #define PT_CLSID ((ULONG) 72) #define PT_BINARY ((ULONG) 258) #define PT_SHORT PT_I2 #define PT_I4 PT_LONG #define PT_FLOAT PT_R4 #define PT_R8 PT_DOUBLE #define PT_LONGLONG PT_I8 #if defined(UNICODE) #define PT_TSTRING PT_UNICODE #define PT_MV_TSTRING (MV_FLAG|PT_UNICODE) #define LPSZ lpszW #define LPPSZ lppszW #define MVSZ MVszW #else #define PT_TSTRING PT_STRING8 #define PT_MV_TSTRING (MV_FLAG|PT_STRING8) #define LPSZ lpszA #define LPPSZ lppszA #define MVSZ MVszA #endif #define PROP_TYPE_MASK ((ULONG)0x0000FFFF) #define PROP_TYPE(ulPropTag) (((ULONG)(ulPropTag))&PROP_TYPE_MASK) #define PROP_ID(ulPropTag) (((ULONG)(ulPropTag))>>16) #define PROP_TAG(ulPropType,ulPropID) ((((ULONG)(ulPropID))<<16)|((ULONG)(ulPropType))) #define PROP_ID_NULL 0 #define PROP_ID_INVALID 0xFFFF #define PR_NULL PROP_TAG(PT_NULL,PROP_ID_NULL) #define CHANGE_PROP_TYPE(ulPropTag,ulPropType) (((ULONG)0xFFFF0000 & ulPropTag) | ulPropType) #define PT_MV_I2 (MV_FLAG|PT_I2) #define PT_MV_LONG (MV_FLAG|PT_LONG) #define PT_MV_R4 (MV_FLAG|PT_R4) #define PT_MV_DOUBLE (MV_FLAG|PT_DOUBLE) #define PT_MV_CURRENCY (MV_FLAG|PT_CURRENCY) #define PT_MV_APPTIME (MV_FLAG|PT_APPTIME) #define PT_MV_SYSTIME (MV_FLAG|PT_SYSTIME) #define PT_MV_STRING8 (MV_FLAG|PT_STRING8) #define PT_MV_BINARY (MV_FLAG|PT_BINARY) #define PT_MV_UNICODE (MV_FLAG|PT_UNICODE) #define PT_MV_CLSID (MV_FLAG|PT_CLSID) #define PT_MV_I8 (MV_FLAG|PT_I8) #define PT_MV_SHORT PT_MV_I2 #define PT_MV_I4 PT_MV_LONG #define PT_MV_FLOAT PT_MV_R4 #define PT_MV_R8 PT_MV_DOUBLE #define PT_MV_LONGLONG PT_MV_I8 #define MV_INSTANCE 0x2000 #define MVI_FLAG (MV_FLAG | MV_INSTANCE) #define MVI_PROP(tag) ((tag) | MVI_FLAG) typedef struct _SPropTagArray { ULONG cValues; ULONG aulPropTag[MAPI_DIM]; } SPropTagArray,*LPSPropTagArray; #define CbNewSPropTagArray(_ctag) (offsetof(SPropTagArray,aulPropTag) + (_ctag)*sizeof(ULONG)) #define CbSPropTagArray(_lparray) (offsetof(SPropTagArray,aulPropTag) + (UINT)((_lparray)->cValues)*sizeof(ULONG)) #define SizedSPropTagArray(_ctag,_name) struct _SPropTagArray_ ## _name { ULONG cValues; ULONG aulPropTag[_ctag]; } _name typedef struct _SPropValue SPropValue; #ifndef _tagCY_DEFINED #define _tagCY_DEFINED #define _CY_DEFINED typedef union tagCY { struct { unsigned __LONG32 Lo; __LONG32 Hi; }; LONGLONG int64; } CY; #endif typedef CY CURRENCY; typedef struct _SBinary { ULONG cb; LPBYTE lpb; } SBinary,*LPSBinary; typedef struct _SShortArray { ULONG cValues; short int *lpi; } SShortArray; typedef struct _SGuidArray { ULONG cValues; GUID *lpguid; } SGuidArray; typedef struct _SRealArray { ULONG cValues; float *lpflt; } SRealArray; typedef struct _SLongArray { ULONG cValues; LONG *lpl; } SLongArray; typedef struct _SLargeIntegerArray { ULONG cValues; LARGE_INTEGER *lpli; } SLargeIntegerArray; typedef struct _SDateTimeArray { ULONG cValues; FILETIME *lpft; } SDateTimeArray; typedef struct _SAppTimeArray { ULONG cValues; double *lpat; } SAppTimeArray; typedef struct _SCurrencyArray { ULONG cValues; CURRENCY *lpcur; } SCurrencyArray; typedef struct _SBinaryArray { ULONG cValues; SBinary *lpbin; } SBinaryArray; typedef struct _SDoubleArray { ULONG cValues; double *lpdbl; } SDoubleArray; typedef struct _SWStringArray { ULONG cValues; LPWSTR *lppszW; } SWStringArray; typedef struct _SLPSTRArray { ULONG cValues; LPSTR *lppszA; } SLPSTRArray; typedef union _PV { short int i; LONG l; ULONG ul; float flt; double dbl; unsigned short int b; CURRENCY cur; double at; FILETIME ft; LPSTR lpszA; SBinary bin; LPWSTR lpszW; LPGUID lpguid; LARGE_INTEGER li; SShortArray MVi; SLongArray MVl; SRealArray MVflt; SDoubleArray MVdbl; SCurrencyArray MVcur; SAppTimeArray MVat; SDateTimeArray MVft; SBinaryArray MVbin; SLPSTRArray MVszA; SWStringArray MVszW; SGuidArray MVguid; SLargeIntegerArray MVli; SCODE err; LONG x; } __UPV; typedef struct _SPropValue { ULONG ulPropTag; ULONG dwAlignPad; union _PV Value; } SPropValue,*LPSPropValue; typedef struct _SPropProblem { ULONG ulIndex; ULONG ulPropTag; SCODE scode; } SPropProblem,*LPSPropProblem; typedef struct _SPropProblemArray { ULONG cProblem; SPropProblem aProblem[MAPI_DIM]; } SPropProblemArray,*LPSPropProblemArray; #define CbNewSPropProblemArray(_cprob) (offsetof(SPropProblemArray,aProblem) + (_cprob)*sizeof(SPropProblem)) #define CbSPropProblemArray(_lparray) (offsetof(SPropProblemArray,aProblem) + (UINT) ((_lparray)->cProblem*sizeof(SPropProblem))) #define SizedSPropProblemArray(_cprob,_name) struct _SPropProblemArray_ ## _name { ULONG cProblem; SPropProblem aProblem[_cprob]; } _name typedef SBinaryArray ENTRYLIST,*LPENTRYLIST; typedef struct { ULONG cb; BYTE abEntry[MAPI_DIM]; } FLATENTRY,*LPFLATENTRY; typedef struct { ULONG cEntries; ULONG cbEntries; BYTE abEntries[MAPI_DIM]; } FLATENTRYLIST,*LPFLATENTRYLIST; typedef struct { ULONG cb; BYTE ab[MAPI_DIM]; } MTSID,*LPMTSID; typedef struct { ULONG cMTSIDs; ULONG cbMTSIDs; BYTE abMTSIDs[MAPI_DIM]; } FLATMTSIDLIST,*LPFLATMTSIDLIST; #define CbNewFLATENTRY(_cb) (offsetof(FLATENTRY,abEntry) + (_cb)) #define CbFLATENTRY(_lpentry) (offsetof(FLATENTRY,abEntry) + (_lpentry)->cb) #define CbNewFLATENTRYLIST(_cb) (offsetof(FLATENTRYLIST,abEntries) + (_cb)) #define CbFLATENTRYLIST(_lplist) (offsetof(FLATENTRYLIST,abEntries) + (_lplist)->cbEntries) #define CbNewMTSID(_cb) (offsetof(MTSID,ab) + (_cb)) #define CbMTSID(_lpentry) (offsetof(MTSID,ab) + (_lpentry)->cb) #define CbNewFLATMTSIDLIST(_cb) (offsetof(FLATMTSIDLIST,abMTSIDs) + (_cb)) #define CbFLATMTSIDLIST(_lplist) (offsetof(FLATMTSIDLIST,abMTSIDs) + (_lplist)->cbMTSIDs) typedef struct _ADRENTRY { ULONG ulReserved1; ULONG cValues; LPSPropValue rgPropVals; } ADRENTRY,*LPADRENTRY; typedef struct _ADRLIST { ULONG cEntries; ADRENTRY aEntries[MAPI_DIM]; } ADRLIST,*LPADRLIST; #define CbNewADRLIST(_centries) (offsetof(ADRLIST,aEntries) + (_centries)*sizeof(ADRENTRY)) #define CbADRLIST(_lpadrlist) (offsetof(ADRLIST,aEntries) + (UINT)(_lpadrlist)->cEntries*sizeof(ADRENTRY)) #define SizedADRLIST(_centries,_name) struct _ADRLIST_ ## _name { ULONG cEntries; ADRENTRY aEntries[_centries]; } _name typedef struct _SRow { ULONG ulAdrEntryPad; ULONG cValues; LPSPropValue lpProps; } SRow,*LPSRow; typedef struct _SRowSet { ULONG cRows; SRow aRow[MAPI_DIM]; } SRowSet,*LPSRowSet; #define CbNewSRowSet(_crow) (offsetof(SRowSet,aRow) + (_crow)*sizeof(SRow)) #define CbSRowSet(_lprowset) (offsetof(SRowSet,aRow) + (UINT)((_lprowset)->cRows*sizeof(SRow))) #define SizedSRowSet(_crow,_name) struct _SRowSet_ ## _name { ULONG cRows; SRow aRow[_crow]; } _name typedef SCODE (WINAPI ALLOCATEBUFFER)(ULONG cbSize,LPVOID *lppBuffer); typedef SCODE (WINAPI ALLOCATEMORE)(ULONG cbSize,LPVOID lpObject,LPVOID *lppBuffer); typedef ULONG (WINAPI FREEBUFFER)(LPVOID lpBuffer); typedef ALLOCATEBUFFER *LPALLOCATEBUFFER; typedef ALLOCATEMORE *LPALLOCATEMORE; typedef FREEBUFFER *LPFREEBUFFER; #if defined(MAPI_IF) && (!defined(__cplusplus) || defined(CINTERFACE)) #define DECLARE_MAPI_INTERFACE(iface) typedef struct iface##Vtbl iface##Vtbl,*iface; struct iface##Vtbl #define DECLARE_MAPI_INTERFACE_(iface,baseiface) DECLARE_MAPI_INTERFACE(iface) #define DECLARE_MAPI_INTERFACE_PTR(iface,piface) typedef struct iface##Vtbl iface##Vtbl,*iface,**piface; #else #define DECLARE_MAPI_INTERFACE(iface) DECLARE_INTERFACE(iface) #define DECLARE_MAPI_INTERFACE_(iface,baseiface) DECLARE_INTERFACE_(iface,baseiface) #ifdef __cplusplus #define DECLARE_MAPI_INTERFACE_PTR(iface,piface) struct iface; typedef iface *piface #else #define DECLARE_MAPI_INTERFACE_PTR(iface,piface) typedef struct iface iface,*piface #endif #endif #define MAPIMETHOD(method) MAPIMETHOD_(HRESULT,method) #define MAPIMETHOD_(type,method) STDMETHOD_(type,method) #define MAPIMETHOD_DECLARE(type,method,prefix) STDMETHODIMP_(type) prefix##method #define MAPIMETHOD_TYPEDEF(type,method,prefix) typedef type (WINAPI prefix##method##_METHOD) #define MAPI_IUNKNOWN_METHODS(IPURE) MAPIMETHOD(QueryInterface) (THIS_ REFIID riid,LPVOID *ppvObj) IPURE; MAPIMETHOD_(ULONG,AddRef) (THIS) IPURE; MAPIMETHOD_(ULONG,Release) (THIS) IPURE; #undef IMPL #define IMPL typedef const IID *LPCIID; DECLARE_MAPI_INTERFACE_PTR(IMsgStore,LPMDB); DECLARE_MAPI_INTERFACE_PTR(IMAPIFolder,LPMAPIFOLDER); DECLARE_MAPI_INTERFACE_PTR(IMessage,LPMESSAGE); DECLARE_MAPI_INTERFACE_PTR(IAttach,LPATTACH); DECLARE_MAPI_INTERFACE_PTR(IAddrBook,LPADRBOOK); DECLARE_MAPI_INTERFACE_PTR(IABContainer,LPABCONT); DECLARE_MAPI_INTERFACE_PTR(IMailUser,LPMAILUSER); DECLARE_MAPI_INTERFACE_PTR(IDistList,LPDISTLIST); DECLARE_MAPI_INTERFACE_PTR(IMAPIStatus,LPMAPISTATUS); DECLARE_MAPI_INTERFACE_PTR(IMAPITable,LPMAPITABLE); DECLARE_MAPI_INTERFACE_PTR(IProfSect,LPPROFSECT); DECLARE_MAPI_INTERFACE_PTR(IMAPIProp,LPMAPIPROP); DECLARE_MAPI_INTERFACE_PTR(IMAPIContainer,LPMAPICONTAINER); DECLARE_MAPI_INTERFACE_PTR(IMAPIAdviseSink,LPMAPIADVISESINK); DECLARE_MAPI_INTERFACE_PTR(IMAPIProgress,LPMAPIPROGRESS); DECLARE_MAPI_INTERFACE_PTR(IProviderAdmin,LPPROVIDERADMIN); typedef struct _MAPIERROR { ULONG ulVersion; LPTSTR lpszError; LPTSTR lpszComponent; ULONG ulLowLevelError; ULONG ulContext; } MAPIERROR,*LPMAPIERROR; #define fnevCriticalError ((ULONG) 0x00000001) #define fnevNewMail ((ULONG) 0x00000002) #define fnevObjectCreated ((ULONG) 0x00000004) #define fnevObjectDeleted ((ULONG) 0x00000008) #define fnevObjectModified ((ULONG) 0x00000010) #define fnevObjectMoved ((ULONG) 0x00000020) #define fnevObjectCopied ((ULONG) 0x00000040) #define fnevSearchComplete ((ULONG) 0x00000080) #define fnevTableModified ((ULONG) 0x00000100) #define fnevStatusObjectModified ((ULONG) 0x00000200) #define fnevReservedForMapi ((ULONG) 0x40000000) #define fnevExtended ((ULONG) 0x80000000) #define TABLE_CHANGED 1 #define TABLE_ERROR 2 #define TABLE_ROW_ADDED 3 #define TABLE_ROW_DELETED 4 #define TABLE_ROW_MODIFIED 5 #define TABLE_SORT_DONE 6 #define TABLE_RESTRICT_DONE 7 #define TABLE_SETCOL_DONE 8 #define TABLE_RELOAD 9 typedef struct _ERROR_NOTIFICATION { ULONG cbEntryID; LPENTRYID lpEntryID; SCODE scode; ULONG ulFlags; LPMAPIERROR lpMAPIError; } ERROR_NOTIFICATION; typedef struct _NEWMAIL_NOTIFICATION { ULONG cbEntryID; LPENTRYID lpEntryID; ULONG cbParentID; LPENTRYID lpParentID; ULONG ulFlags; LPTSTR lpszMessageClass; ULONG ulMessageFlags; } NEWMAIL_NOTIFICATION; typedef struct _OBJECT_NOTIFICATION { ULONG cbEntryID; LPENTRYID lpEntryID; ULONG ulObjType; ULONG cbParentID; LPENTRYID lpParentID; ULONG cbOldID; LPENTRYID lpOldID; ULONG cbOldParentID; LPENTRYID lpOldParentID; LPSPropTagArray lpPropTagArray; } OBJECT_NOTIFICATION; typedef struct _TABLE_NOTIFICATION { ULONG ulTableEvent; HRESULT hResult; SPropValue propIndex; SPropValue propPrior; SRow row; ULONG ulPad; } TABLE_NOTIFICATION; typedef struct _EXTENDED_NOTIFICATION { ULONG ulEvent; ULONG cb; LPBYTE pbEventParameters; } EXTENDED_NOTIFICATION; typedef struct { ULONG cbEntryID; LPENTRYID lpEntryID; ULONG cValues; LPSPropValue lpPropVals; } STATUS_OBJECT_NOTIFICATION; typedef struct _NOTIFICATION { ULONG ulEventType; ULONG ulAlignPad; union { ERROR_NOTIFICATION err; NEWMAIL_NOTIFICATION newmail; OBJECT_NOTIFICATION obj; TABLE_NOTIFICATION tab; EXTENDED_NOTIFICATION ext; STATUS_OBJECT_NOTIFICATION statobj; } info; } NOTIFICATION,*LPNOTIFICATION; #define MAPI_IMAPIADVISESINK_METHODS(IPURE) MAPIMETHOD_(ULONG,OnNotify) (THIS_ ULONG cNotif,LPNOTIFICATION lpNotifications) IPURE; #undef INTERFACE #define INTERFACE IMAPIAdviseSink DECLARE_MAPI_INTERFACE_(IMAPIAdviseSink,IUnknown) { BEGIN_INTERFACE MAPI_IUNKNOWN_METHODS(PURE) MAPI_IMAPIADVISESINK_METHODS(PURE) }; typedef __LONG32 (WINAPI NOTIFCALLBACK) (LPVOID lpvContext,ULONG cNotification,LPNOTIFICATION lpNotifications); typedef NOTIFCALLBACK *LPNOTIFCALLBACK; #define szMAPINotificationMsg "MAPI Notify window message" #define MAPI_TOP_LEVEL ((ULONG) 0x00000001) #define MAPI_IMAPIPROGRESS_METHODS(IPURE) MAPIMETHOD(Progress) (THIS_ ULONG ulValue,ULONG ulCount,ULONG ulTotal) IPURE; MAPIMETHOD(GetFlags) (THIS_ ULONG *lpulFlags) IPURE; MAPIMETHOD(GetMax) (THIS_ ULONG *lpulMax) IPURE; MAPIMETHOD(GetMin) (THIS_ ULONG *lpulMin) IPURE; MAPIMETHOD(SetLimits) (THIS_ LPULONG lpulMin,LPULONG lpulMax,LPULONG lpulFlags) IPURE; #undef INTERFACE #define INTERFACE IMAPIProgress DECLARE_MAPI_INTERFACE_(IMAPIProgress,IUnknown) { BEGIN_INTERFACE MAPI_IUNKNOWN_METHODS(PURE) MAPI_IMAPIPROGRESS_METHODS(PURE) }; #define MAPI_ERROR_VERSION __MSABI_LONG(0x00000000) #define KEEP_OPEN_READONLY ((ULONG) 0x00000001) #define KEEP_OPEN_READWRITE ((ULONG) 0x00000002) #define FORCE_SAVE ((ULONG) 0x00000004) #define MAPI_CREATE ((ULONG) 0x00000002) #define STREAM_APPEND ((ULONG) 0x00000004) #define MAPI_MOVE ((ULONG) 0x00000001) #define MAPI_NOREPLACE ((ULONG) 0x00000002) #define MAPI_DECLINE_OK ((ULONG) 0x00000004) #ifndef MAPI_DIALOG #define MAPI_DIALOG ((ULONG) 0x00000008) #endif #ifndef MAPI_USE_DEFAULT #define MAPI_USE_DEFAULT 0x00000040 #endif #define MAPI_NO_STRINGS ((ULONG) 0x00000001) #define MAPI_NO_IDS ((ULONG) 0x00000002) #define MNID_ID 0 #define MNID_STRING 1 typedef struct _MAPINAMEID { LPGUID lpguid; ULONG ulKind; union { LONG lID; LPWSTR lpwstrName; } Kind; } MAPINAMEID,*LPMAPINAMEID; #define MAPI_IMAPIPROP_METHODS(IPURE) MAPIMETHOD(GetLastError) (THIS_ HRESULT hResult,ULONG ulFlags,LPMAPIERROR *lppMAPIError) IPURE; MAPIMETHOD(SaveChanges) (THIS_ ULONG ulFlags) IPURE; MAPIMETHOD(GetProps) (THIS_ LPSPropTagArray lpPropTagArray,ULONG ulFlags,ULONG *lpcValues,LPSPropValue *lppPropArray) IPURE; MAPIMETHOD(GetPropList) (THIS_ ULONG ulFlags,LPSPropTagArray *lppPropTagArray) IPURE; MAPIMETHOD(OpenProperty) (THIS_ ULONG ulPropTag,LPCIID lpiid,ULONG ulInterfaceOptions,ULONG ulFlags,LPUNKNOWN *lppUnk) IPURE; MAPIMETHOD(SetProps) (THIS_ ULONG cValues,LPSPropValue lpPropArray,LPSPropProblemArray *lppProblems) IPURE; MAPIMETHOD(DeleteProps) (THIS_ LPSPropTagArray lpPropTagArray,LPSPropProblemArray *lppProblems) IPURE; MAPIMETHOD(CopyTo) (THIS_ ULONG ciidExclude,LPCIID rgiidExclude,LPSPropTagArray lpExcludeProps,ULONG ulUIParam,LPMAPIPROGRESS lpProgress,LPCIID lpInterface,LPVOID lpDestObj,ULONG ulFlags,LPSPropProblemArray *lppProblems) IPURE; MAPIMETHOD(CopyProps) (THIS_ LPSPropTagArray lpIncludeProps,ULONG ulUIParam,LPMAPIPROGRESS lpProgress,LPCIID lpInterface,LPVOID lpDestObj,ULONG ulFlags,LPSPropProblemArray *lppProblems) IPURE; MAPIMETHOD(GetNamesFromIDs) (THIS_ LPSPropTagArray *lppPropTags,LPGUID lpPropSetGuid,ULONG ulFlags,ULONG *lpcPropNames,LPMAPINAMEID **lpppPropNames) IPURE; MAPIMETHOD(GetIDsFromNames) (THIS_ ULONG cPropNames,LPMAPINAMEID *lppPropNames,ULONG ulFlags,LPSPropTagArray *lppPropTags) IPURE; #undef INTERFACE #define INTERFACE IMAPIProp DECLARE_MAPI_INTERFACE_(IMAPIProp,IUnknown) { BEGIN_INTERFACE MAPI_IUNKNOWN_METHODS(PURE) MAPI_IMAPIPROP_METHODS(PURE) }; #define TBLSTAT_COMPLETE ((ULONG) 0) #define TBLSTAT_QCHANGED ((ULONG) 7) #define TBLSTAT_SORTING ((ULONG) 9) #define TBLSTAT_SORT_ERROR ((ULONG) 10) #define TBLSTAT_SETTING_COLS ((ULONG) 11) #define TBLSTAT_SETCOL_ERROR ((ULONG) 13) #define TBLSTAT_RESTRICTING ((ULONG) 14) #define TBLSTAT_RESTRICT_ERROR ((ULONG) 15) #define TBLTYPE_SNAPSHOT ((ULONG) 0) #define TBLTYPE_KEYSET ((ULONG) 1) #define TBLTYPE_DYNAMIC ((ULONG) 2) #define TABLE_SORT_ASCEND ((ULONG) 0x00000000) #define TABLE_SORT_DESCEND ((ULONG) 0x00000001) #define TABLE_SORT_COMBINE ((ULONG) 0x00000002) typedef struct _SSortOrder { ULONG ulPropTag; ULONG ulOrder; } SSortOrder,*LPSSortOrder; typedef struct _SSortOrderSet { ULONG cSorts; ULONG cCategories; ULONG cExpanded; SSortOrder aSort[MAPI_DIM]; } SSortOrderSet,*LPSSortOrderSet; #define CbNewSSortOrderSet(_csort) (offsetof(SSortOrderSet,aSort) + (_csort)*sizeof(SSortOrder)) #define CbSSortOrderSet(_lpset) (offsetof(SSortOrderSet,aSort) + (UINT)((_lpset)->cSorts*sizeof(SSortOrder))) #define SizedSSortOrderSet(_csort,_name) struct _SSortOrderSet_ ## _name { ULONG cSorts; ULONG cCategories; ULONG cExpanded; SSortOrder aSort[_csort]; } _name typedef ULONG BOOKMARK; #define BOOKMARK_BEGINNING ((BOOKMARK) 0) #define BOOKMARK_CURRENT ((BOOKMARK) 1) #define BOOKMARK_END ((BOOKMARK) 2) #define FL_FULLSTRING ((ULONG) 0x00000000) #define FL_SUBSTRING ((ULONG) 0x00000001) #define FL_PREFIX ((ULONG) 0x00000002) #define FL_IGNORECASE ((ULONG) 0x00010000) #define FL_IGNORENONSPACE ((ULONG) 0x00020000) #define FL_LOOSE ((ULONG) 0x00040000) typedef struct _SRestriction *LPSRestriction; #define RES_AND ((ULONG) 0x00000000) #define RES_OR ((ULONG) 0x00000001) #define RES_NOT ((ULONG) 0x00000002) #define RES_CONTENT ((ULONG) 0x00000003) #define RES_PROPERTY ((ULONG) 0x00000004) #define RES_COMPAREPROPS ((ULONG) 0x00000005) #define RES_BITMASK ((ULONG) 0x00000006) #define RES_SIZE ((ULONG) 0x00000007) #define RES_EXIST ((ULONG) 0x00000008) #define RES_SUBRESTRICTION ((ULONG) 0x00000009) #define RES_COMMENT ((ULONG) 0x0000000A) #define RELOP_LT ((ULONG) 0) #define RELOP_LE ((ULONG) 1) #define RELOP_GT ((ULONG) 2) #define RELOP_GE ((ULONG) 3) #define RELOP_EQ ((ULONG) 4) #define RELOP_NE ((ULONG) 5) #define RELOP_RE ((ULONG) 6) #define BMR_EQZ ((ULONG) 0) #define BMR_NEZ ((ULONG) 1) typedef struct _SAndRestriction { ULONG cRes; LPSRestriction lpRes; } SAndRestriction; typedef struct _SOrRestriction { ULONG cRes; LPSRestriction lpRes; } SOrRestriction; typedef struct _SNotRestriction { ULONG ulReserved; LPSRestriction lpRes; } SNotRestriction; typedef struct _SContentRestriction { ULONG ulFuzzyLevel; ULONG ulPropTag; LPSPropValue lpProp; } SContentRestriction; typedef struct _SBitMaskRestriction { ULONG relBMR; ULONG ulPropTag; ULONG ulMask; } SBitMaskRestriction; typedef struct _SPropertyRestriction { ULONG relop; ULONG ulPropTag; LPSPropValue lpProp; } SPropertyRestriction; typedef struct _SComparePropsRestriction { ULONG relop; ULONG ulPropTag1; ULONG ulPropTag2; } SComparePropsRestriction; typedef struct _SSizeRestriction { ULONG relop; ULONG ulPropTag; ULONG cb; } SSizeRestriction; typedef struct _SExistRestriction { ULONG ulReserved1; ULONG ulPropTag; ULONG ulReserved2; } SExistRestriction; typedef struct _SSubRestriction { ULONG ulSubObject; LPSRestriction lpRes; } SSubRestriction; typedef struct _SCommentRestriction { ULONG cValues; LPSRestriction lpRes; LPSPropValue lpProp; } SCommentRestriction; typedef struct _SRestriction { ULONG rt; union { SComparePropsRestriction resCompareProps; SAndRestriction resAnd; SOrRestriction resOr; SNotRestriction resNot; SContentRestriction resContent; SPropertyRestriction resProperty; SBitMaskRestriction resBitMask; SSizeRestriction resSize; SExistRestriction resExist; SSubRestriction resSub; SCommentRestriction resComment; } res; } SRestriction; #define TBL_ALL_COLUMNS ((ULONG) 0x00000001) #define TBL_LEAF_ROW ((ULONG) 1) #define TBL_EMPTY_CATEGORY ((ULONG) 2) #define TBL_EXPANDED_CATEGORY ((ULONG) 3) #define TBL_COLLAPSED_CATEGORY ((ULONG) 4) #define TBL_NOWAIT ((ULONG) 0x00000001) #define TBL_ASYNC ((ULONG) 0x00000001) #define TBL_BATCH ((ULONG) 0x00000002) #define DIR_BACKWARD ((ULONG) 0x00000001) #define TBL_NOADVANCE ((ULONG) 0x00000001) #define MAPI_IMAPITABLE_METHODS(IPURE) MAPIMETHOD(GetLastError) (THIS_ HRESULT hResult,ULONG ulFlags,LPMAPIERROR *lppMAPIError) IPURE; MAPIMETHOD(Advise) (THIS_ ULONG ulEventMask,LPMAPIADVISESINK lpAdviseSink,ULONG *lpulConnection) IPURE; MAPIMETHOD(Unadvise) (THIS_ ULONG ulConnection) IPURE; MAPIMETHOD(GetStatus) (THIS_ ULONG *lpulTableStatus,ULONG *lpulTableType) IPURE; MAPIMETHOD(SetColumns) (THIS_ LPSPropTagArray lpPropTagArray,ULONG ulFlags) IPURE; MAPIMETHOD(QueryColumns) (THIS_ ULONG ulFlags,LPSPropTagArray *lpPropTagArray) IPURE; MAPIMETHOD(GetRowCount) (THIS_ ULONG ulFlags,ULONG *lpulCount) IPURE; MAPIMETHOD(SeekRow) (THIS_ BOOKMARK bkOrigin,LONG lRowCount,LONG *lplRowsSought) IPURE; MAPIMETHOD(SeekRowApprox) (THIS_ ULONG ulNumerator,ULONG ulDenominator) IPURE; MAPIMETHOD(QueryPosition) (THIS_ ULONG *lpulRow,ULONG *lpulNumerator,ULONG *lpulDenominator) IPURE; MAPIMETHOD(FindRow) (THIS_ LPSRestriction lpRestriction,BOOKMARK bkOrigin,ULONG ulFlags) IPURE; MAPIMETHOD(Restrict) (THIS_ LPSRestriction lpRestriction,ULONG ulFlags) IPURE; MAPIMETHOD(CreateBookmark) (THIS_ BOOKMARK *lpbkPosition) IPURE; MAPIMETHOD(FreeBookmark) (THIS_ BOOKMARK bkPosition) IPURE; MAPIMETHOD(SortTable) (THIS_ LPSSortOrderSet lpSortCriteria,ULONG ulFlags) IPURE; MAPIMETHOD(QuerySortOrder) (THIS_ LPSSortOrderSet *lppSortCriteria) IPURE; MAPIMETHOD(QueryRows) (THIS_ LONG lRowCount,ULONG ulFlags,LPSRowSet *lppRows) IPURE; MAPIMETHOD(Abort) (THIS) IPURE; MAPIMETHOD(ExpandRow) (THIS_ ULONG cbInstanceKey,LPBYTE pbInstanceKey,ULONG ulRowCount,ULONG ulFlags,LPSRowSet *lppRows,ULONG *lpulMoreRows) IPURE; MAPIMETHOD(CollapseRow) (THIS_ ULONG cbInstanceKey,LPBYTE pbInstanceKey,ULONG ulFlags,ULONG *lpulRowCount) IPURE; MAPIMETHOD(WaitForCompletion) (THIS_ ULONG ulFlags,ULONG ulTimeout,ULONG *lpulTableStatus) IPURE; MAPIMETHOD(GetCollapseState) (THIS_ ULONG ulFlags,ULONG cbInstanceKey,LPBYTE lpbInstanceKey,ULONG *lpcbCollapseState,LPBYTE *lppbCollapseState) IPURE; MAPIMETHOD(SetCollapseState) (THIS_ ULONG ulFlags,ULONG cbCollapseState,LPBYTE pbCollapseState,BOOKMARK *lpbkLocation) IPURE; #undef INTERFACE #define INTERFACE IMAPITable DECLARE_MAPI_INTERFACE_(IMAPITable,IUnknown) { BEGIN_INTERFACE MAPI_IUNKNOWN_METHODS(PURE) MAPI_IMAPITABLE_METHODS(PURE) }; #define PS_PROFILE_PROPERTIES_INIT { 0x98,0x15,0xAC,0x08,0xAA,0xB0,0x10,0x1A,0x8C,0x93,0x08,0x00,0x2B,0x2A,0x56,0xC2 } #define MAPI_IPROFSECT_METHODS(IPURE) #undef INTERFACE #define INTERFACE IProfSect DECLARE_MAPI_INTERFACE_(IProfSect,IMAPIProp) { BEGIN_INTERFACE MAPI_IUNKNOWN_METHODS(PURE) MAPI_IMAPIPROP_METHODS(PURE) MAPI_IPROFSECT_METHODS(PURE) }; #define MAPI_STORE_PROVIDER ((ULONG) 33) #define MAPI_AB ((ULONG) 34) #define MAPI_AB_PROVIDER ((ULONG) 35) #define MAPI_TRANSPORT_PROVIDER ((ULONG) 36) #define MAPI_SPOOLER ((ULONG) 37) #define MAPI_PROFILE_PROVIDER ((ULONG) 38) #define MAPI_SUBSYSTEM ((ULONG) 39) #define MAPI_HOOK_PROVIDER ((ULONG) 40) #define STATUS_VALIDATE_STATE ((ULONG) 0x00000001) #define STATUS_SETTINGS_DIALOG ((ULONG) 0x00000002) #define STATUS_CHANGE_PASSWORD ((ULONG) 0x00000004) #define STATUS_FLUSH_QUEUES ((ULONG) 0x00000008) #define STATUS_DEFAULT_OUTBOUND ((ULONG) 0x00000001) #define STATUS_DEFAULT_STORE ((ULONG) 0x00000002) #define STATUS_PRIMARY_IDENTITY ((ULONG) 0x00000004) #define STATUS_SIMPLE_STORE ((ULONG) 0x00000008) #define STATUS_XP_PREFER_LAST ((ULONG) 0x00000010) #define STATUS_NO_PRIMARY_IDENTITY ((ULONG) 0x00000020) #define STATUS_NO_DEFAULT_STORE ((ULONG) 0x00000040) #define STATUS_TEMP_SECTION ((ULONG) 0x00000080) #define STATUS_OWN_STORE ((ULONG) 0x00000100) #define STATUS_NEED_IPM_TREE ((ULONG) 0x00000800) #define STATUS_PRIMARY_STORE ((ULONG) 0x00001000) #define STATUS_SECONDARY_STORE ((ULONG) 0x00002000) #define STATUS_AVAILABLE ((ULONG) 0x00000001) #define STATUS_OFFLINE ((ULONG) 0x00000002) #define STATUS_FAILURE ((ULONG) 0x00000004) #define STATUS_INBOUND_ENABLED ((ULONG) 0x00010000) #define STATUS_INBOUND_ACTIVE ((ULONG) 0x00020000) #define STATUS_INBOUND_FLUSH ((ULONG) 0x00040000) #define STATUS_OUTBOUND_ENABLED ((ULONG) 0x00100000) #define STATUS_OUTBOUND_ACTIVE ((ULONG) 0x00200000) #define STATUS_OUTBOUND_FLUSH ((ULONG) 0x00400000) #define STATUS_REMOTE_ACCESS ((ULONG) 0x00800000) #define SUPPRESS_UI ((ULONG) 0x00000001) #define REFRESH_XP_HEADER_CACHE ((ULONG) 0x00010000) #define PROCESS_XP_HEADER_CACHE ((ULONG) 0x00020000) #define FORCE_XP_CONNECT ((ULONG) 0x00040000) #define FORCE_XP_DISCONNECT ((ULONG) 0x00080000) #define CONFIG_CHANGED ((ULONG) 0x00100000) #define ABORT_XP_HEADER_OPERATION ((ULONG) 0x00200000) #define SHOW_XP_SESSION_UI ((ULONG) 0x00400000) #define UI_READONLY ((ULONG) 0x00000001) #define FLUSH_UPLOAD ((ULONG) 0x00000002) #define FLUSH_DOWNLOAD ((ULONG) 0x00000004) #define FLUSH_FORCE ((ULONG) 0x00000008) #define FLUSH_NO_UI ((ULONG) 0x00000010) #define FLUSH_ASYNC_OK ((ULONG) 0x00000020) #define MAPI_IMAPISTATUS_METHODS(IPURE) MAPIMETHOD(ValidateState) (THIS_ ULONG ulUIParam,ULONG ulFlags) IPURE; MAPIMETHOD(SettingsDialog) (THIS_ ULONG ulUIParam,ULONG ulFlags) IPURE; MAPIMETHOD(ChangePassword) (THIS_ LPTSTR lpOldPass,LPTSTR lpNewPass,ULONG ulFlags) IPURE; MAPIMETHOD(FlushQueues) (THIS_ ULONG ulUIParam,ULONG cbTargetTransport,LPENTRYID lpTargetTransport,ULONG ulFlags) IPURE; #undef INTERFACE #define INTERFACE IMAPIStatus DECLARE_MAPI_INTERFACE_(IMAPIStatus,IMAPIProp) { BEGIN_INTERFACE MAPI_IUNKNOWN_METHODS(PURE) MAPI_IMAPIPROP_METHODS(PURE) MAPI_IMAPISTATUS_METHODS(PURE) }; #define MAPI_BEST_ACCESS ((ULONG) 0x00000010) #define WAB_LOCAL_CONTAINERS 0x00100000 #define WAB_PROFILE_CONTENTS 0x00200000 #define CONVENIENT_DEPTH ((ULONG) 0x00000001) #define SEARCH_RUNNING ((ULONG) 0x00000001) #define SEARCH_REBUILD ((ULONG) 0x00000002) #define SEARCH_RECURSIVE ((ULONG) 0x00000004) #define SEARCH_FOREGROUND ((ULONG) 0x00000008) #define STOP_SEARCH ((ULONG) 0x00000001) #define RESTART_SEARCH ((ULONG) 0x00000002) #define RECURSIVE_SEARCH ((ULONG) 0x00000004) #define SHALLOW_SEARCH ((ULONG) 0x00000008) #define FOREGROUND_SEARCH ((ULONG) 0x00000010) #define BACKGROUND_SEARCH ((ULONG) 0x00000020) #define MAPI_IMAPICONTAINER_METHODS(IPURE) MAPIMETHOD(GetContentsTable) (THIS_ ULONG ulFlags,LPMAPITABLE *lppTable) IPURE; MAPIMETHOD(GetHierarchyTable) (THIS_ ULONG ulFlags,LPMAPITABLE *lppTable) IPURE; MAPIMETHOD(OpenEntry) (THIS_ ULONG cbEntryID,LPENTRYID lpEntryID,LPCIID lpInterface,ULONG ulFlags,ULONG *lpulObjType,LPUNKNOWN *lppUnk) IPURE; MAPIMETHOD(SetSearchCriteria) (THIS_ LPSRestriction lpRestriction,LPENTRYLIST lpContainerList,ULONG ulSearchFlags) IPURE; MAPIMETHOD(GetSearchCriteria) (THIS_ ULONG ulFlags,LPSRestriction *lppRestriction,LPENTRYLIST *lppContainerList,ULONG *lpulSearchState)IPURE; #undef INTERFACE #define INTERFACE IMAPIContainer DECLARE_MAPI_INTERFACE_(IMAPIContainer,IMAPIProp) { BEGIN_INTERFACE MAPI_IUNKNOWN_METHODS(PURE) MAPI_IMAPIPROP_METHODS(PURE) MAPI_IMAPICONTAINER_METHODS(PURE) }; typedef struct _flaglist { ULONG cFlags; ULONG ulFlag[MAPI_DIM]; } FlagList,*LPFlagList; #define AB_RECIPIENTS ((ULONG) 0x00000001) #define AB_SUBCONTAINERS ((ULONG) 0x00000002) #define AB_MODIFIABLE ((ULONG) 0x00000004) #define AB_UNMODIFIABLE ((ULONG) 0x00000008) #define AB_FIND_ON_OPEN ((ULONG) 0x00000010) #define AB_NOT_DEFAULT ((ULONG) 0x00000020) #define CREATE_CHECK_DUP_STRICT ((ULONG) 0x00000001) #define CREATE_CHECK_DUP_LOOSE ((ULONG) 0x00000002) #define CREATE_REPLACE ((ULONG) 0x00000004) #define CREATE_MERGE ((ULONG) 0x00000008) #define WAB_IGNORE_PROFILES 0x00800000 #define MAPI_UNRESOLVED ((ULONG) 0x00000000) #define MAPI_AMBIGUOUS ((ULONG) 0x00000001) #define MAPI_RESOLVED ((ULONG) 0x00000002) #define MAPI_IABCONTAINER_METHODS(IPURE) MAPIMETHOD(CreateEntry) (THIS_ ULONG cbEntryID,LPENTRYID lpEntryID,ULONG ulCreateFlags,LPMAPIPROP *lppMAPIPropEntry) IPURE; MAPIMETHOD(CopyEntries) (THIS_ LPENTRYLIST lpEntries,ULONG ulUIParam,LPMAPIPROGRESS lpProgress,ULONG ulFlags) IPURE; MAPIMETHOD(DeleteEntries) (THIS_ LPENTRYLIST lpEntries,ULONG ulFlags) IPURE; MAPIMETHOD(ResolveNames) (THIS_ LPSPropTagArray lpPropTagArray,ULONG ulFlags,LPADRLIST lpAdrList,LPFlagList lpFlagList) IPURE; #undef INTERFACE #define INTERFACE IABContainer DECLARE_MAPI_INTERFACE_(IABContainer,IMAPIContainer) { BEGIN_INTERFACE MAPI_IUNKNOWN_METHODS(PURE) MAPI_IMAPIPROP_METHODS(PURE) MAPI_IMAPICONTAINER_METHODS(PURE) MAPI_IABCONTAINER_METHODS(PURE) }; #define MAPI_SEND_NO_RICH_INFO ((ULONG) 0x00010000) #define MAPI_DIAG(_code) ((LONG) _code) #define MAPI_DIAG_NO_DIAGNOSTIC MAPI_DIAG(-1) #define MAPI_DIAG_OR_NAME_UNRECOGNIZED MAPI_DIAG(0) #define MAPI_DIAG_OR_NAME_AMBIGUOUS MAPI_DIAG(1) #define MAPI_DIAG_MTS_CONGESTED MAPI_DIAG(2) #define MAPI_DIAG_LOOP_DETECTED MAPI_DIAG(3) #define MAPI_DIAG_RECIPIENT_UNAVAILABLE MAPI_DIAG(4) #define MAPI_DIAG_MAXIMUM_TIME_EXPIRED MAPI_DIAG(5) #define MAPI_DIAG_EITS_UNSUPPORTED MAPI_DIAG(6) #define MAPI_DIAG_CONTENT_TOO_LONG MAPI_DIAG(7) #define MAPI_DIAG_IMPRACTICAL_TO_CONVERT MAPI_DIAG(8) #define MAPI_DIAG_PROHIBITED_TO_CONVERT MAPI_DIAG(9) #define MAPI_DIAG_CONVERSION_UNSUBSCRIBED MAPI_DIAG(10) #define MAPI_DIAG_PARAMETERS_INVALID MAPI_DIAG(11) #define MAPI_DIAG_CONTENT_SYNTAX_IN_ERROR MAPI_DIAG(12) #define MAPI_DIAG_LENGTH_CONSTRAINT_VIOLATD MAPI_DIAG(13) #define MAPI_DIAG_NUMBER_CONSTRAINT_VIOLATD MAPI_DIAG(14) #define MAPI_DIAG_CONTENT_TYPE_UNSUPPORTED MAPI_DIAG(15) #define MAPI_DIAG_TOO_MANY_RECIPIENTS MAPI_DIAG(16) #define MAPI_DIAG_NO_BILATERAL_AGREEMENT MAPI_DIAG(17) #define MAPI_DIAG_CRITICAL_FUNC_UNSUPPORTED MAPI_DIAG(18) #define MAPI_DIAG_CONVERSION_LOSS_PROHIB MAPI_DIAG(19) #define MAPI_DIAG_LINE_TOO_LONG MAPI_DIAG(20) #define MAPI_DIAG_PAGE_TOO_LONG MAPI_DIAG(21) #define MAPI_DIAG_PICTORIAL_SYMBOL_LOST MAPI_DIAG(22) #define MAPI_DIAG_PUNCTUATION_SYMBOL_LOST MAPI_DIAG(23) #define MAPI_DIAG_ALPHABETIC_CHARACTER_LOST MAPI_DIAG(24) #define MAPI_DIAG_MULTIPLE_INFO_LOSSES MAPI_DIAG(25) #define MAPI_DIAG_REASSIGNMENT_PROHIBITED MAPI_DIAG(26) #define MAPI_DIAG_REDIRECTION_LOOP_DETECTED MAPI_DIAG(27) #define MAPI_DIAG_EXPANSION_PROHIBITED MAPI_DIAG(28) #define MAPI_DIAG_SUBMISSION_PROHIBITED MAPI_DIAG(29) #define MAPI_DIAG_EXPANSION_FAILED MAPI_DIAG(30) #define MAPI_DIAG_RENDITION_UNSUPPORTED MAPI_DIAG(31) #define MAPI_DIAG_MAIL_ADDRESS_INCORRECT MAPI_DIAG(32) #define MAPI_DIAG_MAIL_OFFICE_INCOR_OR_INVD MAPI_DIAG(33) #define MAPI_DIAG_MAIL_ADDRESS_INCOMPLETE MAPI_DIAG(34) #define MAPI_DIAG_MAIL_RECIPIENT_UNKNOWN MAPI_DIAG(35) #define MAPI_DIAG_MAIL_RECIPIENT_DECEASED MAPI_DIAG(36) #define MAPI_DIAG_MAIL_ORGANIZATION_EXPIRED MAPI_DIAG(37) #define MAPI_DIAG_MAIL_REFUSED MAPI_DIAG(38) #define MAPI_DIAG_MAIL_UNCLAIMED MAPI_DIAG(39) #define MAPI_DIAG_MAIL_RECIPIENT_MOVED MAPI_DIAG(40) #define MAPI_DIAG_MAIL_RECIPIENT_TRAVELLING MAPI_DIAG(41) #define MAPI_DIAG_MAIL_RECIPIENT_DEPARTED MAPI_DIAG(42) #define MAPI_DIAG_MAIL_NEW_ADDRESS_UNKNOWN MAPI_DIAG(43) #define MAPI_DIAG_MAIL_FORWARDING_UNWANTED MAPI_DIAG(44) #define MAPI_DIAG_MAIL_FORWARDING_PROHIB MAPI_DIAG(45) #define MAPI_DIAG_SECURE_MESSAGING_ERROR MAPI_DIAG(46) #define MAPI_DIAG_DOWNGRADING_IMPOSSIBLE MAPI_DIAG(47) #define MAPI_IMAILUSER_METHODS(IPURE) #undef INTERFACE #define INTERFACE IMailUser DECLARE_MAPI_INTERFACE_(IMailUser,IMAPIProp) { BEGIN_INTERFACE MAPI_IUNKNOWN_METHODS(PURE) MAPI_IMAPIPROP_METHODS(PURE) MAPI_IMAILUSER_METHODS(PURE) }; #define MAPI_IDISTLIST_METHODS(IPURE) MAPIMETHOD(CreateEntry) (THIS_ ULONG cbEntryID,LPENTRYID lpEntryID,ULONG ulCreateFlags,LPMAPIPROP *lppMAPIPropEntry) IPURE; MAPIMETHOD(CopyEntries) (THIS_ LPENTRYLIST lpEntries,ULONG ulUIParam,LPMAPIPROGRESS lpProgress,ULONG ulFlags) IPURE; MAPIMETHOD(DeleteEntries) (THIS_ LPENTRYLIST lpEntries,ULONG ulFlags) IPURE; MAPIMETHOD(ResolveNames) (THIS_ LPSPropTagArray lpPropTagArray,ULONG ulFlags,LPADRLIST lpAdrList,LPFlagList lpFlagList) IPURE; #undef INTERFACE #define INTERFACE IDistList DECLARE_MAPI_INTERFACE_(IDistList,IMAPIContainer) { BEGIN_INTERFACE MAPI_IUNKNOWN_METHODS(PURE) MAPI_IMAPIPROP_METHODS(PURE) MAPI_IMAPICONTAINER_METHODS(PURE) MAPI_IDISTLIST_METHODS(PURE) }; #define FOLDER_ROOT ((ULONG) 0x00000000) #define FOLDER_GENERIC ((ULONG) 0x00000001) #define FOLDER_SEARCH ((ULONG) 0x00000002) #define MESSAGE_MOVE ((ULONG) 0x00000001) #define MESSAGE_DIALOG ((ULONG) 0x00000002) #define OPEN_IF_EXISTS ((ULONG) 0x00000001) #define DEL_MESSAGES ((ULONG) 0x00000001) #define FOLDER_DIALOG ((ULONG) 0x00000002) #define DEL_FOLDERS ((ULONG) 0x00000004) #define DEL_ASSOCIATED ((ULONG) 0x00000008) #define FOLDER_MOVE ((ULONG) 0x00000001) #define COPY_SUBFOLDERS ((ULONG) 0x00000010) #define GENERATE_RECEIPT_ONLY ((ULONG) 0x00000010) #define MSGSTATUS_HIGHLIGHTED ((ULONG) 0x00000001) #define MSGSTATUS_TAGGED ((ULONG) 0x00000002) #define MSGSTATUS_HIDDEN ((ULONG) 0x00000004) #define MSGSTATUS_DELMARKED ((ULONG) 0x00000008) #define MSGSTATUS_REMOTE_DOWNLOAD ((ULONG) 0x00001000) #define MSGSTATUS_REMOTE_DELETE ((ULONG) 0x00002000) #define RECURSIVE_SORT ((ULONG) 0x00000002) #define FLDSTATUS_HIGHLIGHTED ((ULONG) 0x00000001) #define FLDSTATUS_TAGGED ((ULONG) 0x00000002) #define FLDSTATUS_HIDDEN ((ULONG) 0x00000004) #define FLDSTATUS_DELMARKED ((ULONG) 0x00000008) #define MAPI_IMAPIFOLDER_METHODS(IPURE) MAPIMETHOD(CreateMessage) (THIS_ LPCIID lpInterface,ULONG ulFlags,LPMESSAGE *lppMessage) IPURE; MAPIMETHOD(CopyMessages) (THIS_ LPENTRYLIST lpMsgList,LPCIID lpInterface,LPVOID lpDestFolder,ULONG ulUIParam,LPMAPIPROGRESS lpProgress,ULONG ulFlags) IPURE; MAPIMETHOD(DeleteMessages) (THIS_ LPENTRYLIST lpMsgList,ULONG ulUIParam,LPMAPIPROGRESS lpProgress,ULONG ulFlags) IPURE; MAPIMETHOD(CreateFolder) (THIS_ ULONG ulFolderType,LPTSTR lpszFolderName,LPTSTR lpszFolderComment,LPCIID lpInterface,ULONG ulFlags,LPMAPIFOLDER *lppFolder) IPURE; MAPIMETHOD(CopyFolder) (THIS_ ULONG cbEntryID,LPENTRYID lpEntryID,LPCIID lpInterface,LPVOID lpDestFolder,LPTSTR lpszNewFolderName,ULONG ulUIParam,LPMAPIPROGRESS lpProgress,ULONG ulFlags) IPURE; MAPIMETHOD(DeleteFolder) (THIS_ ULONG cbEntryID,LPENTRYID lpEntryID,ULONG ulUIParam,LPMAPIPROGRESS lpProgress,ULONG ulFlags) IPURE; MAPIMETHOD(SetReadFlags) (THIS_ LPENTRYLIST lpMsgList,ULONG ulUIParam,LPMAPIPROGRESS lpProgress,ULONG ulFlags) IPURE; MAPIMETHOD(GetMessageStatus) (THIS_ ULONG cbEntryID,LPENTRYID lpEntryID,ULONG ulFlags,ULONG *lpulMessageStatus) IPURE; MAPIMETHOD(SetMessageStatus) (THIS_ ULONG cbEntryID,LPENTRYID lpEntryID,ULONG ulNewStatus,ULONG ulNewStatusMask,ULONG *lpulOldStatus) IPURE; MAPIMETHOD(SaveContentsSort) (THIS_ LPSSortOrderSet lpSortCriteria,ULONG ulFlags) IPURE; MAPIMETHOD(EmptyFolder) (THIS_ ULONG ulUIParam,LPMAPIPROGRESS lpProgress,ULONG ulFlags) IPURE; #undef INTERFACE #define INTERFACE IMAPIFolder DECLARE_MAPI_INTERFACE_(IMAPIFolder,IMAPIContainer) { BEGIN_INTERFACE MAPI_IUNKNOWN_METHODS(PURE) MAPI_IMAPIPROP_METHODS(PURE) MAPI_IMAPICONTAINER_METHODS(PURE) MAPI_IMAPIFOLDER_METHODS(PURE) }; #define STORE_ENTRYID_UNIQUE ((ULONG) 0x00000001) #define STORE_READONLY ((ULONG) 0x00000002) #define STORE_SEARCH_OK ((ULONG) 0x00000004) #define STORE_MODIFY_OK ((ULONG) 0x00000008) #define STORE_CREATE_OK ((ULONG) 0x00000010) #define STORE_ATTACH_OK ((ULONG) 0x00000020) #define STORE_OLE_OK ((ULONG) 0x00000040) #define STORE_SUBMIT_OK ((ULONG) 0x00000080) #define STORE_NOTIFY_OK ((ULONG) 0x00000100) #define STORE_MV_PROPS_OK ((ULONG) 0x00000200) #define STORE_CATEGORIZE_OK ((ULONG) 0x00000400) #define STORE_RTF_OK ((ULONG) 0x00000800) #define STORE_RESTRICTION_OK ((ULONG) 0x00001000) #define STORE_SORT_OK ((ULONG) 0x00002000) #define STORE_HAS_SEARCHES ((ULONG) 0x01000000) #define LOGOFF_NO_WAIT ((ULONG) 0x00000001) #define LOGOFF_ORDERLY ((ULONG) 0x00000002) #define LOGOFF_PURGE ((ULONG) 0x00000004) #define LOGOFF_ABORT ((ULONG) 0x00000008) #define LOGOFF_QUIET ((ULONG) 0x00000010) #define LOGOFF_COMPLETE ((ULONG) 0x00010000) #define LOGOFF_INBOUND ((ULONG) 0x00020000) #define LOGOFF_OUTBOUND ((ULONG) 0x00040000) #define LOGOFF_OUTBOUND_QUEUE ((ULONG) 0x00080000) #define MSG_LOCKED ((ULONG) 0x00000001) #define MSG_UNLOCKED ((ULONG) 0x00000000) #define FOLDER_IPM_SUBTREE_VALID ((ULONG) 0x00000001) #define FOLDER_IPM_INBOX_VALID ((ULONG) 0x00000002) #define FOLDER_IPM_OUTBOX_VALID ((ULONG) 0x00000004) #define FOLDER_IPM_WASTEBASKET_VALID ((ULONG) 0x00000008) #define FOLDER_IPM_SENTMAIL_VALID ((ULONG) 0x00000010) #define FOLDER_VIEWS_VALID ((ULONG) 0x00000020) #define FOLDER_COMMON_VIEWS_VALID ((ULONG) 0x00000040) #define FOLDER_FINDER_VALID ((ULONG) 0x00000080) #define MAPI_IMSGSTORE_METHODS(IPURE) MAPIMETHOD(Advise) (THIS_ ULONG cbEntryID,LPENTRYID lpEntryID,ULONG ulEventMask,LPMAPIADVISESINK lpAdviseSink,ULONG *lpulConnection) IPURE; MAPIMETHOD(Unadvise) (THIS_ ULONG ulConnection) IPURE; MAPIMETHOD(CompareEntryIDs) (THIS_ ULONG cbEntryID1,LPENTRYID lpEntryID1,ULONG cbEntryID2,LPENTRYID lpEntryID2,ULONG ulFlags,ULONG *lpulResult) IPURE; MAPIMETHOD(OpenEntry) (THIS_ ULONG cbEntryID,LPENTRYID lpEntryID,LPCIID lpInterface,ULONG ulFlags,ULONG *lpulObjType,LPUNKNOWN *lppUnk) IPURE; MAPIMETHOD(SetReceiveFolder) (THIS_ LPTSTR lpszMessageClass,ULONG ulFlags,ULONG cbEntryID,LPENTRYID lpEntryID) IPURE; MAPIMETHOD(GetReceiveFolder) (THIS_ LPTSTR lpszMessageClass,ULONG ulFlags,ULONG *lpcbEntryID,LPENTRYID *lppEntryID,LPTSTR *lppszExplicitClass) IPURE; MAPIMETHOD(GetReceiveFolderTable) (THIS_ ULONG ulFlags,LPMAPITABLE *lppTable) IPURE; MAPIMETHOD(StoreLogoff) (THIS_ ULONG *lpulFlags) IPURE; MAPIMETHOD(AbortSubmit) (THIS_ ULONG cbEntryID,LPENTRYID lpEntryID,ULONG ulFlags) IPURE; MAPIMETHOD(GetOutgoingQueue) (THIS_ ULONG ulFlags,LPMAPITABLE *lppTable) IPURE; MAPIMETHOD(SetLockState) (THIS_ LPMESSAGE lpMessage,ULONG ulLockState) IPURE; MAPIMETHOD(FinishedMsg) (THIS_ ULONG ulFlags,ULONG cbEntryID,LPENTRYID lpEntryID) IPURE; MAPIMETHOD(NotifyNewMail) (THIS_ LPNOTIFICATION lpNotification) IPURE; #undef INTERFACE #define INTERFACE IMsgStore DECLARE_MAPI_INTERFACE_(IMsgStore,IMAPIProp) { BEGIN_INTERFACE MAPI_IUNKNOWN_METHODS(PURE) MAPI_IMAPIPROP_METHODS(PURE) MAPI_IMSGSTORE_METHODS(PURE) }; #define FORCE_SUBMIT ((ULONG) 0x00000001) #define MSGFLAG_READ ((ULONG) 0x00000001) #define MSGFLAG_UNMODIFIED ((ULONG) 0x00000002) #define MSGFLAG_SUBMIT ((ULONG) 0x00000004) #define MSGFLAG_UNSENT ((ULONG) 0x00000008) #define MSGFLAG_HASATTACH ((ULONG) 0x00000010) #define MSGFLAG_FROMME ((ULONG) 0x00000020) #define MSGFLAG_ASSOCIATED ((ULONG) 0x00000040) #define MSGFLAG_RESEND ((ULONG) 0x00000080) #define MSGFLAG_RN_PENDING ((ULONG) 0x00000100) #define MSGFLAG_NRN_PENDING ((ULONG) 0x00000200) #define SUBMITFLAG_LOCKED ((ULONG) 0x00000001) #define SUBMITFLAG_PREPROCESS ((ULONG) 0x00000002) #define MODRECIP_ADD ((ULONG) 0x00000002) #define MODRECIP_MODIFY ((ULONG) 0x00000004) #define MODRECIP_REMOVE ((ULONG) 0x00000008) #define SUPPRESS_RECEIPT ((ULONG) 0x00000001) #define CLEAR_READ_FLAG ((ULONG) 0x00000004) #define GENERATE_RECEIPT_ONLY ((ULONG) 0x00000010) #define CLEAR_RN_PENDING ((ULONG) 0x00000020) #define CLEAR_NRN_PENDING ((ULONG) 0x00000040) #define ATTACH_DIALOG ((ULONG) 0x00000001) #define SECURITY_SIGNED ((ULONG) 0x00000001) #define SECURITY_ENCRYPTED ((ULONG) 0x00000002) #define PRIO_URGENT ((__LONG32) 1) #define PRIO_NORMAL ((__LONG32) 0) #define PRIO_NONURGENT ((__LONG32) -1) #define SENSITIVITY_NONE ((ULONG) 0x00000000) #define SENSITIVITY_PERSONAL ((ULONG) 0x00000001) #define SENSITIVITY_PRIVATE ((ULONG) 0x00000002) #define SENSITIVITY_COMPANY_CONFIDENTIAL ((ULONG) 0x00000003) #define IMPORTANCE_LOW ((__LONG32) 0) #define IMPORTANCE_NORMAL ((__LONG32) 1) #define IMPORTANCE_HIGH ((__LONG32) 2) #define MAPI_IMESSAGE_METHODS(IPURE) MAPIMETHOD(GetAttachmentTable) (THIS_ ULONG ulFlags,LPMAPITABLE *lppTable) IPURE; MAPIMETHOD(OpenAttach) (THIS_ ULONG ulAttachmentNum,LPCIID lpInterface,ULONG ulFlags,LPATTACH *lppAttach) IPURE; MAPIMETHOD(CreateAttach) (THIS_ LPCIID lpInterface,ULONG ulFlags,ULONG *lpulAttachmentNum,LPATTACH *lppAttach) IPURE; MAPIMETHOD(DeleteAttach) (THIS_ ULONG ulAttachmentNum,ULONG ulUIParam,LPMAPIPROGRESS lpProgress,ULONG ulFlags) IPURE; MAPIMETHOD(GetRecipientTable) (THIS_ ULONG ulFlags,LPMAPITABLE *lppTable) IPURE; MAPIMETHOD(ModifyRecipients) (THIS_ ULONG ulFlags,LPADRLIST lpMods) IPURE; MAPIMETHOD(SubmitMessage) (THIS_ ULONG ulFlags) IPURE; MAPIMETHOD(SetReadFlag) (THIS_ ULONG ulFlags) IPURE; #undef INTERFACE #define INTERFACE IMessage DECLARE_MAPI_INTERFACE_(IMessage,IMAPIProp) { BEGIN_INTERFACE MAPI_IUNKNOWN_METHODS(PURE) MAPI_IMAPIPROP_METHODS(PURE) MAPI_IMESSAGE_METHODS(PURE) }; #define NO_ATTACHMENT ((ULONG) 0x00000000) #define ATTACH_BY_VALUE ((ULONG) 0x00000001) #define ATTACH_BY_REFERENCE ((ULONG) 0x00000002) #define ATTACH_BY_REF_RESOLVE ((ULONG) 0x00000003) #define ATTACH_BY_REF_ONLY ((ULONG) 0x00000004) #define ATTACH_EMBEDDED_MSG ((ULONG) 0x00000005) #define ATTACH_OLE ((ULONG) 0x00000006) #define MAPI_IATTACH_METHODS(IPURE) #undef INTERFACE #define INTERFACE IAttach DECLARE_MAPI_INTERFACE_(IAttach,IMAPIProp) { BEGIN_INTERFACE MAPI_IUNKNOWN_METHODS(PURE) MAPI_IMAPIPROP_METHODS(PURE) MAPI_IATTACH_METHODS(PURE) }; #define GET_ADRPARM_VERSION(ulFlags) (((ULONG)ulFlags) & 0xF0000000) #define SET_ADRPARM_VERSION(ulFlags,ulVersion) (((ULONG)ulVersion) | (((ULONG)ulFlags) & 0x0FFFFFFF)) #define ADRPARM_HELP_CTX ((ULONG) 0x00000000) #define DIALOG_MODAL ((ULONG) 0x00000001) #define DIALOG_SDI ((ULONG) 0x00000002) #define DIALOG_OPTIONS ((ULONG) 0x00000004) #define ADDRESS_ONE ((ULONG) 0x00000008) #define AB_SELECTONLY ((ULONG) 0x00000010) #define AB_RESOLVE ((ULONG) 0x00000020) #define DT_MAILUSER ((ULONG) 0x00000000) #define DT_DISTLIST ((ULONG) 0x00000001) #define DT_FORUM ((ULONG) 0x00000002) #define DT_AGENT ((ULONG) 0x00000003) #define DT_ORGANIZATION ((ULONG) 0x00000004) #define DT_PRIVATE_DISTLIST ((ULONG) 0x00000005) #define DT_REMOTE_MAILUSER ((ULONG) 0x00000006) #define DT_MODIFIABLE ((ULONG) 0x00010000) #define DT_GLOBAL ((ULONG) 0x00020000) #define DT_LOCAL ((ULONG) 0x00030000) #define DT_WAN ((ULONG) 0x00040000) #define DT_NOT_SPECIFIC ((ULONG) 0x00050000) #define DT_FOLDER ((ULONG) 0x01000000) #define DT_FOLDER_LINK ((ULONG) 0x02000000) typedef WINBOOL (WINAPI ACCELERATEABSDI)(ULONG ulUIParam,LPVOID lpvmsg); typedef ACCELERATEABSDI *LPFNABSDI; typedef void (WINAPI DISMISSMODELESS)(ULONG ulUIParam,LPVOID lpvContext); typedef DISMISSMODELESS *LPFNDISMISS; typedef SCODE (WINAPI *LPFNBUTTON)(ULONG ulUIParam,LPVOID lpvContext,ULONG cbEntryID,LPENTRYID lpSelection,ULONG ulFlags); typedef struct _ADRPARM { ULONG cbABContEntryID; LPENTRYID lpABContEntryID; ULONG ulFlags; LPVOID lpReserved; ULONG ulHelpContext; LPTSTR lpszHelpFileName; LPFNABSDI lpfnABSDI; LPFNDISMISS lpfnDismiss; LPVOID lpvDismissContext; LPTSTR lpszCaption; LPTSTR lpszNewEntryTitle; LPTSTR lpszDestWellsTitle; ULONG cDestFields; ULONG nDestFieldFocus; LPTSTR *lppszDestTitles; ULONG *lpulDestComps; LPSRestriction lpContRestriction; LPSRestriction lpHierRestriction; } ADRPARM,*LPADRPARM; #define MAPI_ONE_OFF_NO_RICH_INFO 0x0001 #define MAPI_DEFERRED_ERRORS ((ULONG) 0x00000008) #define MAPI_ASSOCIATED ((ULONG) 0x00000040) #define MDB_NO_DIALOG ((ULONG) 0x00000001) #define MDB_WRITE ((ULONG) 0x00000004) #define MDB_TEMPORARY ((ULONG) 0x00000020) #define MDB_NO_MAIL ((ULONG) 0x00000080) #define AB_NO_DIALOG ((ULONG) 0x00000001) #define MAPI_ENABLED ((ULONG) 0x00000000) #define MAPI_DISABLED ((ULONG) 0x00000001) #define MAPI_IMAPICONTROL_METHODS(IPURE) MAPIMETHOD(GetLastError) (THIS_ HRESULT hResult,ULONG ulFlags,LPMAPIERROR *lppMAPIError) IPURE; MAPIMETHOD(Activate) (THIS_ ULONG ulFlags,ULONG ulUIParam) IPURE; MAPIMETHOD(GetState) (THIS_ ULONG ulFlags,ULONG *lpulState) IPURE; #undef INTERFACE #define INTERFACE IMAPIControl DECLARE_MAPI_INTERFACE_(IMAPIControl,IUnknown) { BEGIN_INTERFACE MAPI_IUNKNOWN_METHODS(PURE) MAPI_IMAPICONTROL_METHODS(PURE) }; DECLARE_MAPI_INTERFACE_PTR(IMAPIControl,LPMAPICONTROL); #define DT_MULTILINE ((ULONG) 0x00000001) #define DT_EDITABLE ((ULONG) 0x00000002) #define DT_REQUIRED ((ULONG) 0x00000004) #define DT_SET_IMMEDIATE ((ULONG) 0x00000008) #define DT_PASSWORD_EDIT ((ULONG) 0x00000010) #define DT_ACCEPT_DBCS ((ULONG) 0x00000020) #define DT_SET_SELECTION ((ULONG) 0x00000040) #define DTCT_LABEL ((ULONG) 0x00000000) #define DTCT_EDIT ((ULONG) 0x00000001) #define DTCT_LBX ((ULONG) 0x00000002) #define DTCT_COMBOBOX ((ULONG) 0x00000003) #define DTCT_DDLBX ((ULONG) 0x00000004) #define DTCT_CHECKBOX ((ULONG) 0x00000005) #define DTCT_GROUPBOX ((ULONG) 0x00000006) #define DTCT_BUTTON ((ULONG) 0x00000007) #define DTCT_PAGE ((ULONG) 0x00000008) #define DTCT_RADIOBUTTON ((ULONG) 0x00000009) #define DTCT_MVLISTBOX ((ULONG) 0x0000000B) #define DTCT_MVDDLBX ((ULONG) 0x0000000C) typedef struct _DTBLLABEL { ULONG ulbLpszLabelName; ULONG ulFlags; } DTBLLABEL,*LPDTBLLABEL; #define SizedDtblLabel(n,u) struct _DTBLLABEL_ ## u { DTBLLABEL dtbllabel; TCHAR lpszLabelName[n]; } u typedef struct _DTBLEDIT { ULONG ulbLpszCharsAllowed; ULONG ulFlags; ULONG ulNumCharsAllowed; ULONG ulPropTag; } DTBLEDIT,*LPDTBLEDIT; #define SizedDtblEdit(n,u) struct _DTBLEDIT_ ## u { DTBLEDIT dtbledit; TCHAR lpszCharsAllowed[n]; } u #define MAPI_NO_HBAR ((ULONG) 0x00000001) #define MAPI_NO_VBAR ((ULONG) 0x00000002) typedef struct _DTBLLBX { ULONG ulFlags; ULONG ulPRSetProperty; ULONG ulPRTableName; } DTBLLBX,*LPDTBLLBX; typedef struct _DTBLCOMBOBOX { ULONG ulbLpszCharsAllowed; ULONG ulFlags; ULONG ulNumCharsAllowed; ULONG ulPRPropertyName; ULONG ulPRTableName; } DTBLCOMBOBOX,*LPDTBLCOMBOBOX; #define SizedDtblComboBox(n,u) struct _DTBLCOMBOBOX_ ## u { DTBLCOMBOBOX dtblcombobox; TCHAR lpszCharsAllowed[n]; } u typedef struct _DTBLDDLBX { ULONG ulFlags; ULONG ulPRDisplayProperty; ULONG ulPRSetProperty; ULONG ulPRTableName; } DTBLDDLBX,*LPDTBLDDLBX; typedef struct _DTBLCHECKBOX { ULONG ulbLpszLabel; ULONG ulFlags; ULONG ulPRPropertyName; } DTBLCHECKBOX,*LPDTBLCHECKBOX; #define SizedDtblCheckBox(n,u) struct _DTBLCHECKBOX_ ## u { DTBLCHECKBOX dtblcheckbox; TCHAR lpszLabel[n]; } u typedef struct _DTBLGROUPBOX { ULONG ulbLpszLabel; ULONG ulFlags; } DTBLGROUPBOX,*LPDTBLGROUPBOX; #define SizedDtblGroupBox(n,u) struct _DTBLGROUPBOX_ ## u { DTBLGROUPBOX dtblgroupbox; TCHAR lpszLabel[n]; } u typedef struct _DTBLBUTTON { ULONG ulbLpszLabel; ULONG ulFlags; ULONG ulPRControl; } DTBLBUTTON,*LPDTBLBUTTON; #define SizedDtblButton(n,u) struct _DTBLBUTTON_ ## u { DTBLBUTTON dtblbutton; TCHAR lpszLabel[n]; } u typedef struct _DTBLPAGE { ULONG ulbLpszLabel; ULONG ulFlags; ULONG ulbLpszComponent; ULONG ulContext; } DTBLPAGE,*LPDTBLPAGE; #define SizedDtblPage(n,n1,u) struct _DTBLPAGE_ ## u { DTBLPAGE dtblpage; TCHAR lpszLabel[n]; TCHAR lpszComponent[n1]; } u typedef struct _DTBLRADIOBUTTON { ULONG ulbLpszLabel; ULONG ulFlags; ULONG ulcButtons; ULONG ulPropTag; __LONG32 lReturnValue; } DTBLRADIOBUTTON,*LPDTBLRADIOBUTTON; #define SizedDtblRadioButton(n,u) struct _DTBLRADIOBUTTON_ ## u { DTBLRADIOBUTTON dtblradiobutton; TCHAR lpszLabel[n]; } u typedef struct _DTBLMVLISTBOX { ULONG ulFlags; ULONG ulMVPropTag; } DTBLMVLISTBOX,*LPDTBLMVLISTBOX; typedef struct _DTBLMVDDLBX { ULONG ulFlags; ULONG ulMVPropTag; } DTBLMVDDLBX,*LPDTBLMVDDLBX; #define UI_SERVICE 0x00000002 #define SERVICE_UI_ALWAYS 0x00000002 #define SERVICE_UI_ALLOWED 0x00000010 #define UI_CURRENT_PROVIDER_FIRST 0x00000004 #define MAPI_IPROVIDERADMIN_METHODS(IPURE) MAPIMETHOD(GetLastError) (THIS_ HRESULT hResult,ULONG ulFlags,LPMAPIERROR *lppMAPIError) IPURE; MAPIMETHOD(GetProviderTable) (THIS_ ULONG ulFlags,LPMAPITABLE *lppTable) IPURE; MAPIMETHOD(CreateProvider) (THIS_ LPTSTR lpszProvider,ULONG cValues,LPSPropValue lpProps,ULONG ulUIParam,ULONG ulFlags,MAPIUID *lpUID) IPURE; MAPIMETHOD(DeleteProvider) (THIS_ LPMAPIUID lpUID) IPURE; MAPIMETHOD(OpenProfileSection) (THIS_ LPMAPIUID lpUID,LPCIID lpInterface,ULONG ulFlags,LPPROFSECT *lppProfSect) IPURE; #undef INTERFACE #define INTERFACE IProviderAdmin DECLARE_MAPI_INTERFACE_(IProviderAdmin,IUnknown) { BEGIN_INTERFACE MAPI_IUNKNOWN_METHODS(PURE) MAPI_IPROVIDERADMIN_METHODS(PURE) }; typedef HANDLE HANDLE_16; typedef WPARAM WPARAM_16; #define EXTERN_C_16 #define WINAPI_16 #define CALLBACK_16 #define EXPORT_16 #define LOADDS_16 #define HUGEP_16 #define APIENTRY_16 WINAPI #define IF_WIN16(x) #define IF_NOT_WIN16(x) x #define IF_WIN32(x) x #ifdef __cplusplus } #endif #endif
{ "pile_set_name": "Github" }
%YAML 1.1 %TAG !u! tag:unity3d.com,2011: --- !u!159 &1 EditorSettings: m_ObjectHideFlags: 0 serializedVersion: 9 m_ExternalVersionControlSupport: Hidden Meta Files m_SerializationMode: 2 m_LineEndingsForNewScripts: 1 m_DefaultBehaviorMode: 1 m_PrefabRegularEnvironment: {fileID: 0} m_PrefabUIEnvironment: {fileID: 0} m_SpritePackerMode: 0 m_SpritePackerPaddingPower: 1 m_EtcTextureCompressorBehavior: 0 m_EtcTextureFastCompressor: 2 m_EtcTextureNormalCompressor: 2 m_EtcTextureBestCompressor: 5 m_ProjectGenerationIncludedExtensions: txt;xml;fnt;cd;asmdef;asmref;rsp;asmref m_ProjectGenerationRootNamespace: m_CollabEditorSettings: inProgressEnabled: 1 m_EnableTextureStreamingInEditMode: 1 m_EnableTextureStreamingInPlayMode: 1 m_AsyncShaderCompilation: 1 m_EnterPlayModeOptionsEnabled: 0 m_EnterPlayModeOptions: 3 m_ShowLightmapResolutionOverlay: 1 m_UseLegacyProbeSampleCount: 1 m_AssetPipelineMode: 1 m_CacheServerMode: 0 m_CacheServerEndpoint: m_CacheServerNamespacePrefix: default m_CacheServerEnableDownload: 1 m_CacheServerEnableUpload: 1
{ "pile_set_name": "Github" }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.stratos.autoscaler.context.cluster; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.stratos.autoscaler.context.InstanceContext; import org.apache.stratos.autoscaler.context.partition.ClusterLevelPartitionContext; import org.apache.stratos.autoscaler.pojo.policy.autoscale.LoadAverage; import org.apache.stratos.autoscaler.pojo.policy.autoscale.MemoryConsumption; import org.apache.stratos.autoscaler.pojo.policy.autoscale.RequestsInFlight; import org.apache.stratos.autoscaler.rule.AutoscalerRuleEvaluator; import org.apache.stratos.common.constants.StratosConstants; import org.apache.stratos.messaging.domain.topology.Member; import org.drools.runtime.StatefulKnowledgeSession; import org.drools.runtime.rule.FactHandle; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /* * It holds the runtime data of a VM cluster */ public class ClusterInstanceContext extends InstanceContext { private static final Log log = LogFactory.getLog(ClusterInstanceContext.class); //partition algorithm private final String partitionAlgorithm; // Map<PartitionId, Partition Context> protected Map<String, ClusterLevelPartitionContext> partitionCtxts; //boolean values to keep whether the requests in flight parameters are reset or not private boolean rifReset, averageRifReset, gradientRifReset, secondDerivativeRifRest; //boolean values to keep whether the memory consumption parameters are reset or not private boolean memoryConsumptionReset, averageMemoryConsumptionReset, gradientMemoryConsumptionReset, secondDerivativeMemoryConsumptionRest; //boolean values to keep whether the load average parameters are reset or not private boolean loadAverageReset, averageLoadAverageReset, gradientLoadAverageReset, secondDerivativeLoadAverageRest; //boolean values to keep whether average requests served per instance parameters are reset or not private boolean averageRequestServedPerInstanceReset; //Following information will keep events details private RequestsInFlight requestsInFlight; private MemoryConsumption memoryConsumption; private LoadAverage loadAverage; private int scaleDownRequestsCount = 0; private float averageRequestsServedPerInstance; private float requestsServedPerInstance; private int minInstanceCount = 0, maxInstanceCount = 0; private int requiredInstanceCountBasedOnStats; private int requiredInstanceCountBasedOnDependencies; //details required for partition selection algorithms private int currentPartitionIndex; private String networkPartitionId; private String clusterId; private boolean hasScalingDependants; private boolean groupScalingEnabledSubtree; private StatefulKnowledgeSession minCheckKnowledgeSession; private StatefulKnowledgeSession maxCheckKnowledgeSession; private StatefulKnowledgeSession obsoleteCheckKnowledgeSession; private StatefulKnowledgeSession scaleCheckKnowledgeSession; private StatefulKnowledgeSession dependentScaleCheckKnowledgeSession; private AutoscalerRuleEvaluator autoscalerRuleEvaluator; private FactHandle minCheckFactHandle; private FactHandle maxCheckFactHandle; private FactHandle obsoleteCheckFactHandle; private FactHandle scaleCheckFactHandle; private FactHandle dependentScaleCheckFactHandle; public ClusterInstanceContext(String clusterInstanceId, String partitionAlgo, int min, int max, String networkPartitionId, String clusterId, boolean hasScalingDependants, boolean groupScalingEnabledSubtree) { super(clusterInstanceId); this.networkPartitionId = networkPartitionId; this.clusterId = clusterId; this.minInstanceCount = min; this.maxInstanceCount = max; this.partitionAlgorithm = partitionAlgo; partitionCtxts = new HashMap<String, ClusterLevelPartitionContext>(); requestsInFlight = new RequestsInFlight(); loadAverage = new LoadAverage(); memoryConsumption = new MemoryConsumption(); requiredInstanceCountBasedOnStats = minInstanceCount; requiredInstanceCountBasedOnDependencies = minInstanceCount; this.hasScalingDependants = hasScalingDependants; this.groupScalingEnabledSubtree = groupScalingEnabledSubtree; autoscalerRuleEvaluator = AutoscalerRuleEvaluator.getInstance(); this.obsoleteCheckKnowledgeSession = autoscalerRuleEvaluator.getStatefulSession( StratosConstants.OBSOLETE_CHECK_DROOL_FILE); this.scaleCheckKnowledgeSession = autoscalerRuleEvaluator.getStatefulSession( StratosConstants.SCALE_CHECK_DROOL_FILE); this.minCheckKnowledgeSession = autoscalerRuleEvaluator.getStatefulSession( StratosConstants.MIN_CHECK_DROOL_FILE); this.maxCheckKnowledgeSession = autoscalerRuleEvaluator.getStatefulSession( StratosConstants.MAX_CHECK_DROOL_FILE); this.dependentScaleCheckKnowledgeSession = autoscalerRuleEvaluator.getStatefulSession( StratosConstants.DEPENDENT_SCALE_CHECK_DROOL_FILE); } public List<ClusterLevelPartitionContext> getPartitionCtxts() { return new ArrayList<ClusterLevelPartitionContext>(partitionCtxts.values()); } public void setPartitionCtxt(Map<String, ClusterLevelPartitionContext> partitionCtxt) { this.partitionCtxts = partitionCtxt; } // public ClusterLevelPartitionContext getNetworkPartitionCtxt(String PartitionId) { // return partitionCtxts.get(PartitionId); // } public ClusterLevelPartitionContext[] getPartitionCtxtsAsAnArray() { return partitionCtxts.values().toArray(new ClusterLevelPartitionContext[partitionCtxts.size()]); } public boolean partitionCtxtAvailable(String partitionId) { return partitionCtxts.containsKey(partitionId); } public void addPartitionCtxt(ClusterLevelPartitionContext ctxt) { this.partitionCtxts.put(ctxt.getPartitionId(), ctxt); } public void removePartitionCtxt(String partitionId) { if (partitionCtxts.containsKey(partitionId)) { partitionCtxts.remove(partitionId); } } public ClusterLevelPartitionContext getPartitionCtxt(String id) { return partitionCtxts.get(id); } public ClusterLevelPartitionContext getPartitionCtxt(Member member) { log.info("Getting [Partition] " + member.getPartitionId()); String partitionId = member.getPartitionId(); return partitionCtxts.get(partitionId); } public int getActiveMemberCount() { int activeMemberCount = 0; for (ClusterLevelPartitionContext partitionContext : partitionCtxts.values()) { activeMemberCount += partitionContext.getActiveMemberCount(); } return activeMemberCount; } public int getPendingMemberCount() { int activeMemberCount = 0; for (ClusterLevelPartitionContext partitionContext : partitionCtxts.values()) { activeMemberCount += partitionContext.getPendingMembers().size(); } return activeMemberCount; } public int getNonTerminatedMemberCount() { int nonTerminatedMemberCount = 0; for (ClusterLevelPartitionContext partitionContext : partitionCtxts.values()) { nonTerminatedMemberCount += partitionContext.getNonTerminatedMemberCount(); } return nonTerminatedMemberCount; } public int getMinInstanceCount() { return minInstanceCount; } public void setMinInstanceCount(int minInstanceCount) { this.minInstanceCount = minInstanceCount; } public int getMaxInstanceCount() { return maxInstanceCount; } public void setMaxInstanceCount(int maxInstanceCount) { this.maxInstanceCount = maxInstanceCount; } @Override public String toString() { return "NetworkPartitionContext [id=" + id + "partitionAlgorithm=" + partitionAlgorithm + ", minInstanceCount=" + minInstanceCount + ", maxInstanceCount=" + maxInstanceCount + "]"; } public int getCurrentPartitionIndex() { return currentPartitionIndex; } public void setCurrentPartitionIndex(int currentPartitionIndex) { this.currentPartitionIndex = currentPartitionIndex; } public float getAverageRequestsServedPerInstance() { return averageRequestsServedPerInstance; } public void setAverageRequestsServedPerInstance(float averageRequestServedPerInstance) { this.averageRequestsServedPerInstance = averageRequestServedPerInstance; averageRequestServedPerInstanceReset = true; if (log.isDebugEnabled()) { log.debug(String.format("Average Requesets Served Per Instance stats are reset, ready to do scale check " + "[network partition] %s", this.id)); } } public float getRequestsServedPerInstance() { return requestsServedPerInstance; } public float getAverageRequestsInFlight() { return requestsInFlight.getAverage(); } public void setAverageRequestsInFlight(float averageRequestsInFlight) { requestsInFlight.setAverage(averageRequestsInFlight); averageRifReset = true; if (secondDerivativeRifRest && gradientRifReset) { rifReset = true; if (log.isDebugEnabled()) { log.debug(String.format("Requests in flights stats are reset, ready to do scale check [network partition] %s" , this.id)); } } } public float getRequestsInFlightSecondDerivative() { return requestsInFlight.getSecondDerivative(); } public void setRequestsInFlightSecondDerivative(float requestsInFlightSecondDerivative) { requestsInFlight.setSecondDerivative(requestsInFlightSecondDerivative); secondDerivativeRifRest = true; if (averageRifReset && gradientRifReset) { rifReset = true; if (log.isDebugEnabled()) { log.debug(String.format("Requests in flights stats are reset, ready to do scale check [network partition] %s" , this.id)); } } } public float getRequestsInFlightGradient() { return requestsInFlight.getGradient(); } public void setRequestsInFlightGradient(float requestsInFlightGradient) { requestsInFlight.setGradient(requestsInFlightGradient); gradientRifReset = true; if (secondDerivativeRifRest && averageRifReset) { rifReset = true; if (log.isDebugEnabled()) { log.debug(String.format("Requests in flights stats are reset, ready to do scale check [network partition] %s" , this.id)); } } } public boolean isRifReset() { return rifReset; } public void setRifReset(boolean rifReset) { this.rifReset = rifReset; this.averageRifReset = rifReset; this.gradientRifReset = rifReset; this.secondDerivativeRifRest = rifReset; } public float getAverageMemoryConsumption() { return memoryConsumption.getAverage(); } public void setAverageMemoryConsumption(float averageMemoryConsumption) { memoryConsumption.setAverage(averageMemoryConsumption); averageMemoryConsumptionReset = true; if (secondDerivativeMemoryConsumptionRest && gradientMemoryConsumptionReset) { memoryConsumptionReset = true; if (log.isDebugEnabled()) { log.debug(String.format("Memory consumption stats are reset, ready to do scale check [network partition] %s" , this.id)); } } } public float getMemoryConsumptionSecondDerivative() { return memoryConsumption.getSecondDerivative(); } public void setMemoryConsumptionSecondDerivative(float memoryConsumptionSecondDerivative) { memoryConsumption.setSecondDerivative(memoryConsumptionSecondDerivative); secondDerivativeMemoryConsumptionRest = true; if (averageMemoryConsumptionReset && gradientMemoryConsumptionReset) { memoryConsumptionReset = true; if (log.isDebugEnabled()) { log.debug(String.format("Memory consumption stats are reset, ready to do scale check [network partition] %s" , this.id)); } } } public float getMemoryConsumptionGradient() { return memoryConsumption.getGradient(); } public void setMemoryConsumptionGradient(float memoryConsumptionGradient) { memoryConsumption.setGradient(memoryConsumptionGradient); gradientMemoryConsumptionReset = true; if (secondDerivativeMemoryConsumptionRest && averageMemoryConsumptionReset) { memoryConsumptionReset = true; if (log.isDebugEnabled()) { log.debug(String.format("Memory consumption stats are reset, ready to do scale check [network partition] %s" , this.id)); } } } public boolean isMemoryConsumptionReset() { return memoryConsumptionReset; } public void setMemoryConsumptionReset(boolean memoryConsumptionReset) { this.memoryConsumptionReset = memoryConsumptionReset; this.averageMemoryConsumptionReset = memoryConsumptionReset; this.gradientMemoryConsumptionReset = memoryConsumptionReset; this.secondDerivativeMemoryConsumptionRest = memoryConsumptionReset; } public float getAverageLoadAverage() { return loadAverage.getAverage(); } public void setAverageLoadAverage(float averageLoadAverage) { loadAverage.setAverage(averageLoadAverage); averageLoadAverageReset = true; if (secondDerivativeLoadAverageRest && gradientLoadAverageReset) { loadAverageReset = true; if (log.isDebugEnabled()) { log.debug(String.format("Load average stats are reset, ready to do scale check [network partition] %s" , this.id)); } } } public float getLoadAverageSecondDerivative() { return loadAverage.getSecondDerivative(); } public void setLoadAverageSecondDerivative(float loadAverageSecondDerivative) { loadAverage.setSecondDerivative(loadAverageSecondDerivative); secondDerivativeLoadAverageRest = true; if (averageLoadAverageReset && gradientLoadAverageReset) { loadAverageReset = true; if (log.isDebugEnabled()) { log.debug(String.format("Load average stats are reset, ready to do scale check [network partition] %s" , this.id)); } } } public float getLoadAverageGradient() { return loadAverage.getGradient(); } public void setLoadAverageGradient(float loadAverageGradient) { loadAverage.setGradient(loadAverageGradient); gradientLoadAverageReset = true; if (secondDerivativeLoadAverageRest && averageLoadAverageReset) { loadAverageReset = true; if (log.isDebugEnabled()) { log.debug(String.format("Load average stats are reset, ready to do scale check [network partition] %s" , this.id)); } } } public boolean isLoadAverageReset() { return loadAverageReset; } public void setLoadAverageReset(boolean loadAverageReset) { this.loadAverageReset = loadAverageReset; this.averageLoadAverageReset = loadAverageReset; this.gradientLoadAverageReset = loadAverageReset; this.secondDerivativeLoadAverageRest = loadAverageReset; } /* public Map<String, ClusterLevelPartitionContext> getPartitionCtxts() { return partitionCtxts; } public ClusterLevelPartitionContext getPartitionCtxt(String partitionId) { return partitionCtxts.get(partitionId); } public void addPartitionContext(ClusterLevelPartitionContext partitionContext) { partitionCtxts.put(partitionContext.getPartitionId(), partitionContext); }*/ public String getPartitionAlgorithm() { return partitionAlgorithm; } /*public int getNonTerminatedMemberCountOfPartition(String partitionId) { if (partitionCtxts.containsKey(partitionId)) { return getPartitionCtxt(partitionId).getNonTerminatedMemberCount(); } return 0; } public int getActiveMemberCount(String currentPartitionId) { if (partitionCtxts.containsKey(currentPartitionId)) { return getPartitionCtxt(currentPartitionId).getActiveMemberCount(); } return 0; } */ public int getScaleDownRequestsCount() { return scaleDownRequestsCount; } public void resetScaleDownRequestsCount() { this.scaleDownRequestsCount = 0; } public void increaseScaleDownRequestsCount() { this.scaleDownRequestsCount += 1; } public float getRequiredInstanceCountBasedOnStats() { return requiredInstanceCountBasedOnStats; } public void setRequiredInstanceCountBasedOnStats(int requiredInstanceCountBasedOnStats) { this.requiredInstanceCountBasedOnStats = requiredInstanceCountBasedOnStats; } public int getRequiredInstanceCountBasedOnDependencies() { return requiredInstanceCountBasedOnDependencies; } public void setRequiredInstanceCountBasedOnDependencies(int requiredInstanceCountBasedOnDependencies) { this.requiredInstanceCountBasedOnDependencies = requiredInstanceCountBasedOnDependencies; } public String getNetworkPartitionId() { return networkPartitionId; } public int getActiveMembers() { int activeMembers = 0; for (ClusterLevelPartitionContext partitionContext : partitionCtxts.values()) { activeMembers += partitionContext.getActiveInstanceCount(); } return activeMembers; } public boolean isAverageRequestServedPerInstanceReset() { return averageRequestServedPerInstanceReset; } public boolean hasScalingDependants() { return hasScalingDependants; } public String getClusterId() { return clusterId; } public boolean isInGroupScalingEnabledSubtree() { return groupScalingEnabledSubtree; } public StatefulKnowledgeSession getMinCheckKnowledgeSession() { return minCheckKnowledgeSession; } public void setMinCheckKnowledgeSession( StatefulKnowledgeSession minCheckKnowledgeSession) { this.minCheckKnowledgeSession = minCheckKnowledgeSession; } public StatefulKnowledgeSession getMaxCheckKnowledgeSession() { return maxCheckKnowledgeSession; } public StatefulKnowledgeSession getObsoleteCheckKnowledgeSession() { return obsoleteCheckKnowledgeSession; } public void setObsoleteCheckKnowledgeSession( StatefulKnowledgeSession obsoleteCheckKnowledgeSession) { this.obsoleteCheckKnowledgeSession = obsoleteCheckKnowledgeSession; } public StatefulKnowledgeSession getScaleCheckKnowledgeSession() { return scaleCheckKnowledgeSession; } public void setScaleCheckKnowledgeSession( StatefulKnowledgeSession scaleCheckKnowledgeSession) { this.scaleCheckKnowledgeSession = scaleCheckKnowledgeSession; } public StatefulKnowledgeSession getDependentScaleCheckKnowledgeSession() { return dependentScaleCheckKnowledgeSession; } public void setDependentScaleCheckKnowledgeSession(StatefulKnowledgeSession dependentScaleCheckKnowledgeSession) { this.dependentScaleCheckKnowledgeSession = dependentScaleCheckKnowledgeSession; } public FactHandle getMinCheckFactHandle() { return minCheckFactHandle; } public void setMinCheckFactHandle(FactHandle minCheckFactHandle) { this.minCheckFactHandle = minCheckFactHandle; } public FactHandle getObsoleteCheckFactHandle() { return obsoleteCheckFactHandle; } public void setObsoleteCheckFactHandle(FactHandle obsoleteCheckFactHandle) { this.obsoleteCheckFactHandle = obsoleteCheckFactHandle; } public FactHandle getScaleCheckFactHandle() { return scaleCheckFactHandle; } public void setScaleCheckFactHandle(FactHandle scaleCheckFactHandle) { this.scaleCheckFactHandle = scaleCheckFactHandle; } public FactHandle getMaxCheckFactHandle() { return maxCheckFactHandle; } public void setMaxCheckFactHandle(FactHandle maxCheckFactHandle) { this.maxCheckFactHandle = maxCheckFactHandle; } public FactHandle getDependentScaleCheckFactHandle() { return dependentScaleCheckFactHandle; } public void setDependentScaleCheckFactHandle(FactHandle dependentScaleCheckFactHandle) { this.dependentScaleCheckFactHandle = dependentScaleCheckFactHandle; } }
{ "pile_set_name": "Github" }
## pachctl completion zsh Print or install the zsh completion code. ### Synopsis Print or install the zsh completion code. ``` pachctl completion zsh [flags] ``` ### Options ``` -h, --help help for zsh --install Install the completion. --path string Path to install the completions to. (default "_pachctl") ``` ### Options inherited from parent commands ``` --no-color Turn off colors. -v, --verbose Output verbose logs ```
{ "pile_set_name": "Github" }
file(REMOVE_RECURSE "rviz_cloud_annotation_com_autogen" "CMakeFiles/rviz_cloud_annotation_com_autogen.dir/AutogenOldSettings.txt" "rviz_cloud_annotation_plugin_autogen" "CMakeFiles/rviz_cloud_annotation_plugin_autogen.dir/AutogenOldSettings.txt" "rviz_cloud_annotation_node_autogen" "CMakeFiles/rviz_cloud_annotation_node_autogen.dir/AutogenOldSettings.txt" "CMakeFiles/rviz_cloud_annotation_plugin_autogen" "rviz_cloud_annotation_plugin_autogen/mocs_compilation.cpp" ) # Per-language clean rules from dependency scanning. foreach(lang ) include(CMakeFiles/rviz_cloud_annotation_plugin_autogen.dir/cmake_clean_${lang}.cmake OPTIONAL) endforeach()
{ "pile_set_name": "Github" }
// Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef BASE_TASK_THREAD_POOL_CAN_RUN_POLICY_TEST_H_ #define BASE_TASK_THREAD_POOL_CAN_RUN_POLICY_TEST_H_ #include "base/synchronization/atomic_flag.h" #include "base/synchronization/waitable_event.h" #include "base/task/thread_pool/task_tracker.h" #include "base/task/thread_pool/test_utils.h" #include "base/task_runner.h" #include "base/test/bind_test_util.h" #include "base/test/test_timeouts.h" #include "base/threading/platform_thread.h" #include "build/build_config.h" namespace base { namespace internal { namespace test { // Verify that tasks only run when allowed by the CanRunPolicy. |target| is the // object on which DidUpdateCanRunPolicy() must be called after updating the // CanRunPolicy in |task_tracker|. |create_task_runner| is a function that // receives a TaskPriority and returns a TaskRunner. |task_tracker| is the // TaskTracker. template <typename Target, typename CreateTaskRunner> void TestCanRunPolicyBasic(Target* target, CreateTaskRunner create_task_runner, TaskTracker* task_tracker) { AtomicFlag foreground_can_run; WaitableEvent foreground_did_run; AtomicFlag best_effort_can_run; WaitableEvent best_effort_did_run; task_tracker->SetCanRunPolicy(CanRunPolicy::kNone); target->DidUpdateCanRunPolicy(); const auto user_visible_task_runner = create_task_runner(TaskPriority::USER_VISIBLE); user_visible_task_runner->PostTask(FROM_HERE, BindLambdaForTesting([&]() { EXPECT_TRUE(foreground_can_run.IsSet()); foreground_did_run.Signal(); })); const auto best_effort_task_runner = create_task_runner(TaskPriority::BEST_EFFORT); best_effort_task_runner->PostTask(FROM_HERE, BindLambdaForTesting([&]() { EXPECT_TRUE(best_effort_can_run.IsSet()); best_effort_did_run.Signal(); })); PlatformThread::Sleep(TestTimeouts::tiny_timeout()); foreground_can_run.Set(); task_tracker->SetCanRunPolicy(CanRunPolicy::kForegroundOnly); target->DidUpdateCanRunPolicy(); foreground_did_run.Wait(); PlatformThread::Sleep(TestTimeouts::tiny_timeout()); best_effort_can_run.Set(); task_tracker->SetCanRunPolicy(CanRunPolicy::kAll); target->DidUpdateCanRunPolicy(); best_effort_did_run.Wait(); } // Verify that if a task was allowed to run by the CanRunPolicy when it was // posted, but the CanRunPolicy is updated to disallow it from running before it // starts running, it doesn't run. |target| is the object on which // DidUpdateCanRunPolicy() must be called after updating the CanRunPolicy in // |task_tracker|. |create_task_runner| is a function that receives a // TaskPriority and returns a *Sequenced*TaskRunner. |task_tracker| is the // TaskTracker. template <typename Target, typename CreateTaskRunner> void TestCanRunPolicyChangedBeforeRun(Target* target, CreateTaskRunner create_task_runner, TaskTracker* task_tracker) { constexpr struct { // Descriptor for the test case. const char* descriptor; // Task priority being tested. TaskPriority priority; // Policy that disallows running tasks with |priority|. CanRunPolicy disallow_policy; // Policy that allows running tasks with |priority|. CanRunPolicy allow_policy; } kTestCases[] = { {"BestEffort/kNone/kAll", TaskPriority::BEST_EFFORT, CanRunPolicy::kNone, CanRunPolicy::kAll}, {"BestEffort/kForegroundOnly/kAll", TaskPriority::BEST_EFFORT, CanRunPolicy::kForegroundOnly, CanRunPolicy::kAll}, {"UserVisible/kNone/kForegroundOnly", TaskPriority::USER_VISIBLE, CanRunPolicy::kNone, CanRunPolicy::kForegroundOnly}, {"UserVisible/kNone/kAll", TaskPriority::USER_VISIBLE, CanRunPolicy::kNone, CanRunPolicy::kAll}}; for (auto& test_case : kTestCases) { SCOPED_TRACE(test_case.descriptor); WaitableEvent first_task_started; WaitableEvent first_task_blocked; AtomicFlag second_task_can_run; task_tracker->SetCanRunPolicy(test_case.allow_policy); target->DidUpdateCanRunPolicy(); const auto task_runner = create_task_runner(test_case.priority); task_runner->PostTask( FROM_HERE, BindLambdaForTesting([&]() { first_task_started.Signal(); test::WaitWithoutBlockingObserver(&first_task_blocked); })); task_runner->PostTask(FROM_HERE, BindLambdaForTesting([&]() { EXPECT_TRUE(second_task_can_run.IsSet()); })); first_task_started.Wait(); task_tracker->SetCanRunPolicy(test_case.disallow_policy); target->DidUpdateCanRunPolicy(); first_task_blocked.Signal(); PlatformThread::Sleep(TestTimeouts::tiny_timeout()); second_task_can_run.Set(); task_tracker->SetCanRunPolicy(test_case.allow_policy); target->DidUpdateCanRunPolicy(); task_tracker->FlushForTesting(); } } // Regression test for https://crbug.com/950383 template <typename Target, typename CreateTaskRunner> void TestCanRunPolicyLoad(Target* target, CreateTaskRunner create_task_runner, TaskTracker* task_tracker) { constexpr struct { // Descriptor for the test case. const char* descriptor; // Task priority being tested. TaskPriority priority; // Policy that allows running tasks with |priority|. CanRunPolicy allow_policy; // Policy that disallows running tasks with |priority|. CanRunPolicy disallow_policy; } kTestCases[] = { {"BestEffort/kAll/kNone", TaskPriority::BEST_EFFORT, CanRunPolicy::kAll, CanRunPolicy::kNone}, {"BestEffort/kAll/kForegroundOnly", TaskPriority::BEST_EFFORT, CanRunPolicy::kAll, CanRunPolicy::kForegroundOnly}, {"UserVisible/kForegroundOnly/kNone", TaskPriority::USER_VISIBLE, CanRunPolicy::kForegroundOnly, CanRunPolicy::kNone}, {"UserVisible/kAll/kNone", TaskPriority::USER_VISIBLE, CanRunPolicy::kAll, CanRunPolicy::kNone}}; for (auto& test_case : kTestCases) { SCOPED_TRACE(test_case.descriptor); task_tracker->SetCanRunPolicy(test_case.allow_policy); target->DidUpdateCanRunPolicy(); const auto task_runner = create_task_runner(test_case.priority); // Post less tasks on iOS to avoid timeouts. const size_t kLargeNumber = #if defined(OS_IOS) 16; #else 256; #endif for (size_t i = 0; i < kLargeNumber; ++i) task_runner->PostTask(FROM_HERE, DoNothing()); // Change the CanRunPolicy concurrently with running tasks. // This should not cause crashes. for (size_t i = 0; i < kLargeNumber; ++i) { task_tracker->SetCanRunPolicy(test_case.disallow_policy); target->DidUpdateCanRunPolicy(); task_tracker->SetCanRunPolicy(test_case.allow_policy); target->DidUpdateCanRunPolicy(); } task_tracker->FlushForTesting(); } } } // namespace test } // namespace internal } // namespace base #endif // BASE_TASK_THREAD_POOL_CAN_RUN_POLICY_TEST_H_
{ "pile_set_name": "Github" }
package com.developerphil.adbidea.ui import com.android.ddmlib.IDevice import com.developerphil.adbidea.ObjectGraph import com.developerphil.adbidea.preference.ProjectPreferences import com.intellij.openapi.project.Project import com.intellij.openapi.ui.DialogWrapper import com.intellij.openapi.util.Disposer import org.jetbrains.android.facet.AndroidFacet import org.jetbrains.android.util.AndroidBundle import org.joor.Reflect import javax.swing.JCheckBox import javax.swing.JComponent import javax.swing.JPanel /** * https://android.googlesource.com/platform/tools/adt/idea/+/refs/heads/mirror-goog-studio-master-dev/android/src/com/android/tools/idea/run/DeviceChooserDialog.java */ class DeviceChooserDialog(facet: AndroidFacet) : DialogWrapper(facet.module.project, true) { lateinit var myPanel: JPanel lateinit var myDeviceChooserWrapper: JPanel lateinit var useSameDeviceSCheckBox: JCheckBox private val myProject: Project private val myDeviceChooser: MyDeviceChooser private val projectPreferences: ProjectPreferences val selectedDevices: Array<IDevice> get() = myDeviceChooser.selectedDevices init { title = AndroidBundle.message("choose.device.dialog.title") myProject = facet.module.project projectPreferences = myProject.getComponent(ObjectGraph::class.java).projectPreferences okAction.isEnabled = false myDeviceChooser = MyDeviceChooser(true, okAction, facet, null) Disposer.register(myDisposable, myDeviceChooser) myDeviceChooser.addListener(object : DeviceChooserListener { override fun selectedDevicesChanged() { updateOkButton() } }) myDeviceChooserWrapper.add(myDeviceChooser.panel) myDeviceChooser.init(projectPreferences.getSelectedDeviceSerials()) init() updateOkButton() } private fun persistSelectedSerialsToPreferences() { projectPreferences.saveSelectedDeviceSerials(myDeviceChooser.selectedDevices.map { it.serialNumber }.toList()) } private fun updateOkButton() { okAction.isEnabled = selectedDevices.isNotEmpty() } override fun getPreferredFocusedComponent(): JComponent? { return try { myDeviceChooser.preferredFocusComponent } catch (e: NoSuchMethodError) { // that means that we are probably on a preview version of android studio or in intellij 13 Reflect.on(myDeviceChooser).call("getDeviceTable").get<JComponent>() } } override fun doOKAction() { myDeviceChooser.finish() persistSelectedSerialsToPreferences() super.doOKAction() } override fun getDimensionServiceKey() = javaClass.canonicalName override fun createCenterPanel(): JComponent = myPanel fun useSameDevices() = useSameDeviceSCheckBox.isSelected }
{ "pile_set_name": "Github" }
/** * \file src/opr-mm/include/megbrain/opr/io_remote.h * MegEngine is Licensed under the Apache License, Version 2.0 (the "License") * * Copyright (c) 2014-2020 Megvii Inc. All rights reserved. * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ #pragma once #include "megbrain/graph.h" #include "megbrain/opr/internal/mixin_base.h" #include "megbrain/opr/group_manager.h" #include "megray.h" namespace mgb { namespace opr { /*! * \brief base class for remote I/O nodes */ MGB_DEFINE_CLS_WITH_SUPER(RemoteIOBase, cg::SingleCNOperatorNodeBase) // { public: const std::string& key() const { return m_key; } std::shared_ptr<GroupClient> group_client() const { return m_group_client; } protected: std::string m_key; std::shared_ptr<GroupClient> m_group_client; std::shared_ptr<MegRay::Communicator> m_megray_comm; std::shared_ptr<MegRay::Context> m_megray_ctx; bool m_init = false; using Super::Super; }; /*! * \brief send a variable to remote address; a virtual output is produced * for expressing dependency */ MGB_DEFINE_OPR_CLASS(RemoteSend, RemoteIOBase) // { public: RemoteSend(const std::string& key, VarNode* var, std::shared_ptr<GroupClient> group_client, bool is_grad, const OperatorNodeConfig& config); static SymbolVar make( const std::string& key, SymbolVar var, std::shared_ptr<GroupClient> group_client, bool is_grad, const OperatorNodeConfig& config = {}); bool is_grad() const { return m_is_grad; } private: HostTensorND m_output_val; bool m_is_grad; void scn_do_execute() override; void init_output_static_infer_desc() override; NodeProp* do_make_node_prop() const override; }; /*! * \brief receive a variable from remote address; target computing node * of the var must be specified in config */ MGB_DEFINE_OPR_CLASS(RemoteRecv, RemoteIOBase) // { public: RemoteRecv(const std::string& key, cg::ComputingGraph& graph, std::shared_ptr<GroupClient> group_client, const OperatorNodeConfig& config, const TensorShape& shape, DType dtype); static SymbolVar make( const std::string& key, cg::ComputingGraph& graph, std::shared_ptr<GroupClient> group_client, const OperatorNodeConfig& config, const TensorShape& shape, DType dtype); private: const TensorShape m_shape; const DType m_dtype; const CompNode m_comp_node; DeviceTensorND m_dev_buffer; void scn_do_execute() override; void init_output_static_infer_desc() override; NodeProp* do_make_node_prop() const override; }; } // namespace opr } // namespace mgb // vim: syntax=cpp.doxygen foldmethod=marker foldmarker=f{{{,f}}}
{ "pile_set_name": "Github" }
/*M/////////////////////////////////////////////////////////////////////////////////////// // // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. // // By downloading, copying, installing or using the software you agree to this license. // If you do not agree to this license, do not download, install, // copy or use the software. // // // Intel License Agreement // For Open Source Computer Vision Library // // Copyright (C) 2000, Intel Corporation, all rights reserved. // Third party copyrights are property of their respective owners. // // Redistribution and use in source and binary forms, with or without modification, // are permitted provided that the following conditions are met: // // * Redistribution's of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistribution's in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * The name of Intel Corporation may not be used to endorse or promote products // derived from this software without specific prior written permission. // // This software is provided by the copyright holders and contributors "as is" and // any express or implied warranties, including, but not limited to, the implied // warranties of merchantability and fitness for a particular purpose are disclaimed. // In no event shall the Intel Corporation or contributors be liable for any direct, // indirect, incidental, special, exemplary, or consequential damages // (including, but not limited to, procurement of substitute goods or services; // loss of use, data, or profits; or business interruption) however caused // and on any theory of liability, whether in contract, strict liability, // or tort (including negligence or otherwise) arising in any way out of // the use of this software, even if advised of the possibility of such damage. // //M*/ #ifndef __OPENCV_OLD_AUX_HPP__ #define __OPENCV_OLD_AUX_HPP__ //#if defined(__GNUC__) //#warning "This is a deprecated opencv header provided for compatibility. Please include a header from a corresponding opencv module" //#endif #include <cvaux.h> #endif
{ "pile_set_name": "Github" }
# Troubleshooting Applications This document describes how one can troubleshoot common issues when deploying or debugging an application that fails to start or deploy. ## Application has a Dockerfile, but a Buildpack Deployment Occurs When you deploy an application to Workflow using `git push deis master` and the [Builder][] attempts to deploy using the Buildpack workflow, check the following steps: 1. Are you deploying the correct project? 2. Are you pushing the correct git branch (`git push deis <branch>`)? 3. Is the `Dockerfile` in the project's root directory? 4. Have you committed the `Dockerfile` to the project? ## Application was Deployed, but is Failing to Start If you deployed your application but it is failing to start, you can use `deis logs` to check why the application fails to boot. Sometimes, the application container may fail to boot without logging any information about the error. This typically occurs when the healthcheck configured for the application fails. In this case, you can start by [troubleshooting using kubectl][troubleshooting-kubectl]. You can inspect the application's current state by examining the pod deployed in the application's namespace. To do that, run $ kubectl --namespace=myapp get pods NAME READY STATUS RESTARTS AGE myapp-cmd-1585713350-3brbo 0/1 CrashLoopBackOff 2 43s We can then describe the pod and determine why it is failing to boot: Events: FirstSeen LastSeen Count From SubobjectPath Type Reason Message --------- -------- ----- ---- ------------- -------- ------ ------- 43s 43s 1 {default-scheduler } Normal Scheduled Successfully assigned myapp-cmd-1585713350-3brbo to kubernetes-node-1 41s 41s 1 {kubelet kubernetes-node-1} spec.containers{myapp-cmd} Normal Created Created container with docker id b86bd851a61f 41s 41s 1 {kubelet kubernetes-node-1} spec.containers{myapp-cmd} Normal Started Started container with docker id b86bd851a61f 37s 35s 1 {kubelet kubernetes-node-1} spec.containers{myapp-cmd} Warning Unhealthy Liveness probe failed: Get http://10.246.39.13:8000/healthz: dial tcp 10.246.39.13:8000: getsockopt: connection refused In this instance, we set the healthcheck initial delay timeout for the application at 1 second, which is too aggressive. The application needs some time to set up the API server after the container has booted. By increasing the healthcheck initial delay timeout to 10 seconds, the application is able to boot and is responding correctly. See [Custom Health Checks][healthchecks] for more information on how to customize the application's health checks to better suit the application's needs. [builder]: ../understanding-workflow/components.md#builder [healthchecks]: ../applications/managing-app-configuration.md#custom-health-checks [troubleshooting-kubectl]: kubectl.md ## Application or Workflow component fails to connect to external services If a Workflow pod is trying to connect to external services hosted in the cloud with the Kubernetes cluster, ie. off-cluster database, it might fail depending on the Kubernetes automatic firewall configuration. Since Kubernetes 1.9.x, Google Cloud has implemented new Automatic Firewall rules. In order to troubleshoot these issues with the automatic firewall, Google Cloud/GKE has provided this [guide][gke-autofirewall-guide] covering the auto firewall rules. [gke-autofirewall-guide]: https://cloud.google.com/kubernetes-engine/docs/troubleshooting#autofirewall
{ "pile_set_name": "Github" }
# Lunar ### Intelligent adaptive brightness for your external display *Note: Lunar changes the actual (physical) brightness and contrast of the monitor.* *It doesn't use a software overlay.* ## Table of Contents - [Installation methods](#installation-methods) - [Features](#features) - [Known to work list](#tested-and-known-to-work-with-the-following-types-of-connections) - [Troubleshooting](#troubleshooting) - [Caveats](#caveats) - [Contributing](#contributing) ## Installation methods - Download DMG from [Official website](https://lunar.fyi) - Download DMG from the [Releases page](https://github.com/alin23/Lunar/releases) - `brew cask install lunar` ![Display page](Images/display.png) ![Settings page](Images/settings.png) ![Hotkeys page](Images/hotkeys.png) ## Features - **Sync-based Adaptive Brightness** (and contrast) based on the built-in light sensor of the Macbook or iMac - **Location-based Adaptive Brightness** (and contrast) based on the sunrise/sunset times in your location - **App Exception** list if you need more brightness for specific activities (watching movies, design work) - individual settings per display - **Manual controls** hotkeys for setting brightness and contrast that respect the min/max values per monitor It doesn't interfere at all with the native adaptive brightness that macOS implements for the built-in display. ## Tested and known to work with the following types of connections - HDMI (1.0 - 2.1) - DisplayPort (1.0 - 2.0) - Thunderbolt 3 (USB Type-C) - Thunderbolt 2 (mini DisplayPort) - VGA - Adapters that forward DDC messages properly ## Troubleshooting 1. If Lunar doesn't start at all, try installing the [Swift 5 Runtime Support](https://support.apple.com/kb/DL1998?locale=en_US) from Apple - Some older macOS versions don't have these libraries pre-installed and Lunar requires them 2. If Lunar freezes your system, make sure you have the latest version installed - Version 2.9.1 was trying to read the monitor brightness periodically through DDC and if the monitor didn't support that, the system freezed 3. If you activated the *Read Monitor Brightness Periodically* and your system freezes when using Lunar - Make sure Lunar is not running - Open Terminal.app - Run the following command `defaults write site.lunarapp.Lunar refreshValues 0` - If the above doesn't work, you can reset Lunar settings by deleting the following file: - `~/Library/Preferences/site.lunarapp.Lunar.plist` - If Lunar starts at login and freezes the computer before you can do anything, try doing the above in [Safe Mode](https://support.apple.com/en-us/HT201262) 4. If you get system lag or occasional UI freeze, this might be caused by a slow DDC response from the monitor - Make sure *Smooth Transition* is turned off in Lunar preferences - If you are using *Sync* mode, set the *Polling Interval* to a bigger value like 5 seconds to avoid making DDC requests too often 5. If you don't get the Location Permissions prompt, run the following commands in a terminal and restart Lunar: ```shell sudo defaults delete /var/db/locationd/clients.plist site.lunarapp.Lunar sudo pkill -9 locationd ``` ## Caveats - Lunar *usually* doesn't work with monitors connected through USB hubs/docks/adapters **because a lot of them don't forward DDC messages properly** - Sync mode doesn't work when the Macbook lid is closed because the light sensor is completely covered ### Contributing Run `make dev` to prepare dev environment.
{ "pile_set_name": "Github" }
package typings.devexpressAspnetcoreBootstrap.global.DevExpress.AspNetCore import scala.scalajs.js import scala.scalajs.js.`|` import scala.scalajs.js.annotation._ @JSGlobal("DevExpress.AspNetCore.BootstrapListBox") @js.native class BootstrapListBox () extends typings.devexpressAspnetcoreBootstrap.DevExpress.AspNetCore.BootstrapListBox
{ "pile_set_name": "Github" }
DROP TABLE vet_specialties IF EXISTS; DROP TABLE vets IF EXISTS; DROP TABLE specialties IF EXISTS; DROP TABLE visits IF EXISTS; DROP TABLE pets IF EXISTS; DROP TABLE types IF EXISTS; DROP TABLE owners IF EXISTS; CREATE TABLE vets ( id INTEGER IDENTITY PRIMARY KEY, first_name VARCHAR(30), last_name VARCHAR(30) ); CREATE INDEX vets_last_name ON vets (last_name); CREATE TABLE specialties ( id INTEGER IDENTITY PRIMARY KEY, name VARCHAR(80) ); CREATE INDEX specialties_name ON specialties (name); CREATE TABLE vet_specialties ( vet_id INTEGER NOT NULL, specialty_id INTEGER NOT NULL ); ALTER TABLE vet_specialties ADD CONSTRAINT fk_vet_specialties_vets FOREIGN KEY (vet_id) REFERENCES vets (id); ALTER TABLE vet_specialties ADD CONSTRAINT fk_vet_specialties_specialties FOREIGN KEY (specialty_id) REFERENCES specialties (id); CREATE TABLE types ( id INTEGER IDENTITY PRIMARY KEY, name VARCHAR(80) ); CREATE INDEX types_name ON types (name); CREATE TABLE owners ( id INTEGER IDENTITY PRIMARY KEY, first_name VARCHAR(30), last_name VARCHAR_IGNORECASE(30), address VARCHAR(255), city VARCHAR(80), telephone VARCHAR(20) ); CREATE INDEX owners_last_name ON owners (last_name); CREATE TABLE pets ( id INTEGER IDENTITY PRIMARY KEY, name VARCHAR(30), birth_date DATE, type_id INTEGER NOT NULL, owner_id INTEGER NOT NULL ); ALTER TABLE pets ADD CONSTRAINT fk_pets_owners FOREIGN KEY (owner_id) REFERENCES owners (id); ALTER TABLE pets ADD CONSTRAINT fk_pets_types FOREIGN KEY (type_id) REFERENCES types (id); CREATE INDEX pets_name ON pets (name); CREATE TABLE visits ( id INTEGER IDENTITY PRIMARY KEY, pet_id INTEGER NOT NULL, visit_date DATE, description VARCHAR(255) ); ALTER TABLE visits ADD CONSTRAINT fk_visits_pets FOREIGN KEY (pet_id) REFERENCES pets (id); CREATE INDEX visits_pet_id ON visits (pet_id);
{ "pile_set_name": "Github" }
/***************************************************************** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. ****************************************************************/ package org.apache.cayenne.exp.property; import java.util.Collection; import java.util.Map; import org.apache.cayenne.Persistent; import org.apache.cayenne.exp.Expression; import org.apache.cayenne.exp.ExpressionFactory; import org.apache.cayenne.exp.parser.ASTPath; /** * Property that represents to-many relationship mapped on {@link Map}. * * @see org.apache.cayenne.exp.property * @since 4.2 */ public class MapProperty<K, V extends Persistent> extends BaseProperty<Map<K, V>> implements RelationshipProperty<Map<K, V>> { protected Class<K> keyType; protected Class<V> entityType; /** * Constructs a new property with the given name and expression * * @param name of the property (will be used as alias for the expression) * @param expression expression for property * @param keyType type of keys of the property * @param entityType type of related entities * @see PropertyFactory#createMap(String, Expression, Class, Class) */ protected MapProperty(String name, Expression expression, Class<K> keyType, Class<V> entityType) { super(name, expression, Map.class); this.keyType = keyType; this.entityType = entityType; } /** * <p>Create new "flat" property for toMany relationship.</p> * <p> * Example: * <pre>{@code * List<Object[]> result = ObjectSelect * .columnQuery(Artist.class, Artist.ARTIST_NAME, Artist.PAINTING_ARRAY.flat(Painting.class)) * .select(context); * }</pre> * </p> */ public EntityProperty<V> flat() { return PropertyFactory.createEntity(ExpressionFactory.fullObjectExp(getExpression()), getEntityType()); } // TODO: move all *contains* methods to RelationshipProperty once Property class is removed /** * @return An expression representing equality to a value. */ public Expression contains(V value) { return ExpressionFactory.matchExp(getExpression(), value); } /** * @return An expression representing inequality to a value. */ public Expression notContains(V value) { return ExpressionFactory.noMatchExp(getExpression(), value); } /** * @return An expression for finding objects with values in the given set. */ @SafeVarargs public final Expression contains(V firstValue, V... moreValues) { int moreValuesLength = moreValues != null ? moreValues.length : 0; Object[] values = new Object[moreValuesLength + 1]; values[0] = firstValue; if (moreValuesLength > 0) { System.arraycopy(moreValues, 0, values, 1, moreValuesLength); } return ExpressionFactory.inExp(getExpression(), values); } /** * @return An expression for finding objects with values in the given set. */ public Expression contains(Collection<V> values) { return ExpressionFactory.inExp(getExpression(), values); } /** * @return An expression for finding objects with values not in the given set. */ public Expression notContains(Collection<V> values) { return ExpressionFactory.notInExp(getExpression(), values); } /** * @return An expression for finding objects with values not in the given set. */ @SafeVarargs public final Expression notContains(V firstValue, V... moreValues) { int moreValuesLength = moreValues != null ? moreValues.length : 0; Object[] values = new Object[moreValuesLength + 1]; values[0] = firstValue; if (moreValuesLength > 0) { System.arraycopy(moreValues, 0, values, 1, moreValuesLength); } return ExpressionFactory.notInExp(getExpression(), values); } /** * @param id object id * @return An expression for finding object with given id. */ public Expression containsId(Object id) { return ExpressionFactory.matchExp(getExpression(), id); } /** * @return An expression for finding objects with given id set */ public Expression containsId(Object firstId, Object... moreId) { int moreValuesLength = moreId != null ? moreId.length : 0; Object[] values = new Object[moreValuesLength + 1]; values[0] = firstId; if (moreValuesLength > 0) { System.arraycopy(moreId, 0, values, 1, moreValuesLength); } return ExpressionFactory.inExp(getExpression(), values); } /** * @return An expression for finding objects with given id set. */ public Expression containsId(Collection<Object> ids) { return ExpressionFactory.inExp(getExpression(), ids); } /** * @param id object id * @return An expression for finding object without given id. */ public Expression notContainsId(Object id) { return ExpressionFactory.noMatchExp(getExpression(), id); } /** * @return An expression for finding objects without given id set. */ public Expression notContainsId(Object firstId, Object... moreId) { int moreValuesLength = moreId != null ? moreId.length : 0; Object[] values = new Object[moreValuesLength + 1]; values[0] = firstId; if (moreValuesLength > 0) { System.arraycopy(moreId, 0, values, 1, moreValuesLength); } return ExpressionFactory.notInExp(getExpression(), values); } /** * @return An expression for finding objects without given id set. */ public Expression notContainsId(Collection<Object> ids) { return ExpressionFactory.notInExp(getExpression(), ids); } /** * {@inheritDoc} */ @Override public MapProperty<K, V> alias(String alias) { ASTPath exp = PropertyUtils.createPathExp(this.getName(), alias, getExpression().getPathAliases()); return PropertyFactory.createMap(exp.getPath(), exp, this.getKeyType(), this.getEntityType()); } /** * {@inheritDoc} */ @Override public MapProperty<K, V> outer() { return getName().endsWith("+") ? this : PropertyFactory.createMap(getName() + "+", getKeyType(), getEntityType()); } /** * @return type of keys in represented attribute */ protected Class<K> getKeyType() { return keyType; } /** * @return type of object entity in represented attribute */ protected Class<V> getEntityType() { return entityType; } /** * @return property that will be translated relative to parent query */ public MapProperty<K, V> enclosing() { return PropertyFactory.createMap(null, ExpressionFactory.enclosingObjectExp(getExpression()), getKeyType(), getEntityType()); } }
{ "pile_set_name": "Github" }
package http import ( "fmt" "strconv" "strings" "time" "go-common/library/ecode" "go-common/library/log" bm "go-common/library/net/http/blademaster" "go-common/library/net/metadata" "go-common/library/stat" ) // logger is logger middleware func logger() bm.HandlerFunc { const noUser = "no_user" return func(c *bm.Context) { now := time.Now() ip := metadata.String(c, metadata.RemoteIP) req := c.Request path := req.URL.Path params := req.Form c.Next() mid, _ := c.Get("mid") userI, _ := c.Get("user") err := c.Error cerr := ecode.Cause(err) dt := time.Since(now) // user user, ok := userI.(string) if !ok || user == "" { user = noUser } realPath := "" if strings.HasPrefix(path, "/x/internal/shorturl") { realPath = path[1:] } else { realPath = "shorturl" } stat.HTTPServer.Incr(user, realPath, strconv.FormatInt(int64(cerr.Code()), 10)) stat.HTTPServer.Timing(user, int64(dt/time.Millisecond), realPath) lf := log.Infov errmsg := "" if err != nil { errmsg = err.Error() lf = log.Errorv } lf(c, log.KV("method", req.Method), log.KV("mid", mid), log.KV("ip", ip), log.KV("user", user), log.KV("path", path), log.KV("params", params.Encode()), log.KV("ret", cerr.Code()), log.KV("msg", cerr.Message()), log.KV("stack", fmt.Sprintf("%+v", err)), log.KV("err", errmsg), ) } }
{ "pile_set_name": "Github" }
// Copyright (c) Microsoft Corporation. All Rights Reserved. // Licensed under the MIT License. using System; using System.Windows; namespace InteractiveDataDisplay.WPF { /// <summary> /// Performs transformations between data values and plot coordinates. /// </summary> public abstract class DataTransform : DependencyObject { /// <summary>Gets range of valid data values. <see cref="DataToPlot"/> method returns NaN for /// values outside this range. /// </summary> public Range Domain { get; private set; } /// <summary> /// Initializes a new instance of <see cref="DataTransform"/> class. /// </summary> /// <param name="domain">A range of valid data.</param> protected DataTransform(Range domain) { Domain = domain; } /// <summary> /// Converts value from data to plot coordinates. /// </summary> /// <param name="dataValue">A value in data coordinates.</param> /// <returns>Value converted to plot coordinates or NaN if <paramref name="dataValue"/> /// falls outside of <see cref="Domain"/>.</returns> public abstract double DataToPlot(double dataValue); /// <summary> /// Converts value from plot coordinates to data. /// </summary> /// <param name="plotValue">A value in plot coordinates.</param> /// <returns>Value converted to data coordinates or NaN if no value in data coordinates /// matches <paramref name="plotValue"/>.</returns> public abstract double PlotToData(double plotValue); /// <summary>Identity transformation</summary> public static readonly DataTransform Identity = new IdentityDataTransform(); } /// <summary> /// Provides identity transformation between data and plot coordinates. /// </summary> public class IdentityDataTransform : DataTransform { /// <summary> /// Initializes a new instance of <see cref="IdentityDataTransform"/> class. /// </summary> public IdentityDataTransform() : base(new Range(double.MinValue, double.MaxValue)) { } /// <summary> /// Returns a value in data coordinates without convertion. /// </summary> /// <param name="dataValue">A value in data coordinates.</param> /// <returns></returns> public override double DataToPlot(double dataValue) { return dataValue; } /// <summary> /// Returns a value in plot coordinates without convertion. /// </summary> /// <param name="plotValue">A value in plot coordinates.</param> /// <returns></returns> public override double PlotToData(double plotValue) { return plotValue; } } /// <summary> /// Represents a mercator transform, used in maps. /// Transforms y coordinates. /// </summary> public sealed class MercatorTransform : DataTransform { /// <summary> /// Initializes a new instance of the <see cref="MercatorTransform"/> class. /// </summary> public MercatorTransform() : base(new Range(-85, 85)) { CalcScale(maxLatitude); } /// <summary> /// Initializes a new instance of the <see cref="MercatorTransform"/> class. /// </summary> /// <param name="maxLatitude">The maximal latitude.</param> public MercatorTransform(double maxLatitude) : base(new Range(-maxLatitude, maxLatitude)) { this.maxLatitude = maxLatitude; CalcScale(maxLatitude); } private void CalcScale(double inputMaxLatitude) { double maxLatDeg = inputMaxLatitude; double maxLatRad = maxLatDeg * Math.PI / 180; scale = maxLatDeg / Math.Log(Math.Tan(maxLatRad / 2 + Math.PI / 4)); } private double scale; /// <summary> /// Gets the scale. /// </summary> /// <value>The scale.</value> public double Scale { get { return scale; } } private double maxLatitude = 85; /// <summary> /// Gets the maximal latitude. /// </summary> /// <value>The max latitude.</value> public double MaxLatitude { get { return maxLatitude; } } /// <summary> /// Converts value from mercator to plot coordinates. /// </summary> /// <param name="dataValue">A value in mercator coordinates.</param> /// <returns>Value converted to plot coordinates.</returns> public override double DataToPlot(double dataValue) { if (-maxLatitude <= dataValue && dataValue <= maxLatitude) { dataValue = scale * Math.Log(Math.Tan(Math.PI * (dataValue + 90) / 360)); } return dataValue; } /// <summary> /// Converts value from plot to mercator coordinates. /// </summary> /// <param name="plotValue">A value in plot coordinates.</param> /// <returns>Value converted to mercator coordinates.</returns> public override double PlotToData(double plotValue) { if (-maxLatitude <= plotValue && plotValue <= maxLatitude) { double e = Math.Exp(plotValue / scale); plotValue = 360 * Math.Atan(e) / Math.PI - 90; } return plotValue; } } /// <summary> /// Provides linear transform u = <see cref="Scale"/> * d + <see cref="Offset"/> from data value d to plot coordinate u. /// </summary> public sealed class LinearDataTransform : DataTransform { /// <summary> /// Gets or sets the scale factor. /// </summary> public double Scale { get { return (double)GetValue(ScaleProperty); } set { SetValue(ScaleProperty, value); } } /// <summary> /// Identifies the <see cref="Scale"/> dependency property. /// </summary> public static readonly DependencyProperty ScaleProperty = DependencyProperty.Register("Scale", typeof(double), typeof(LinearDataTransform), new PropertyMetadata(1.0)); /// <summary> /// Gets or sets the distance to translate an value. /// </summary> public double Offset { get { return (double)GetValue(OffsetProperty); } set { SetValue(OffsetProperty, value); } } /// <summary> /// Identifies the <see cref="Offset"/> dependency property. /// </summary> public static readonly DependencyProperty OffsetProperty = DependencyProperty.Register("Offset", typeof(double), typeof(LinearDataTransform), new PropertyMetadata(0.0)); /// <summary> /// Initializes a new instance of the <see cref="LinearDataTransform"/> class. /// </summary> public LinearDataTransform() : base(new Range(double.MinValue, double.MaxValue)) { } /// <summary> /// Transforms a value according to defined <see cref="Scale"/> and <see cref="Offset"/>. /// </summary> /// <param name="dataValue">A value in data coordinates.</param> /// <returns>Transformed value.</returns> public override double DataToPlot(double dataValue) { return dataValue * Scale + Offset; } /// <summary> /// Returns a value in data coordinates from its transformed value. /// </summary> /// <param name="plotValue">Transformed value.</param> /// <returns>Original value or NaN if <see cref="Scale"/> is 0.</returns> public override double PlotToData(double plotValue) { if (Scale != 0) { return (plotValue - Offset) / Scale; } else return double.NaN; } } }
{ "pile_set_name": "Github" }
<?php namespace Doctrine\DBAL\Platforms; use Doctrine\DBAL\LockMode; use Doctrine\DBAL\Schema\Column; use Doctrine\DBAL\Schema\ColumnDiff; use Doctrine\DBAL\Schema\ForeignKeyConstraint; use Doctrine\DBAL\Schema\Identifier; use Doctrine\DBAL\Schema\Index; use Doctrine\DBAL\Schema\Table; use Doctrine\DBAL\Schema\TableDiff; use Doctrine\DBAL\Types; use InvalidArgumentException; use function array_merge; use function array_unique; use function array_values; use function count; use function crc32; use function dechex; use function explode; use function func_get_args; use function implode; use function is_array; use function is_bool; use function is_numeric; use function is_string; use function preg_match; use function preg_replace; use function sprintf; use function str_replace; use function stripos; use function stristr; use function strlen; use function strpos; use function strtoupper; use function substr; use function substr_count; /** * The SQLServerPlatform provides the behavior, features and SQL dialect of the * Microsoft SQL Server database platform. */ class SQLServerPlatform extends AbstractPlatform { /** * {@inheritdoc} */ public function getCurrentDateSQL() { return $this->getConvertExpression('date', 'GETDATE()'); } /** * {@inheritdoc} */ public function getCurrentTimeSQL() { return $this->getConvertExpression('time', 'GETDATE()'); } /** * Returns an expression that converts an expression of one data type to another. * * @param string $dataType The target native data type. Alias data types cannot be used. * @param string $expression The SQL expression to convert. * * @return string */ private function getConvertExpression($dataType, $expression) { return sprintf('CONVERT(%s, %s)', $dataType, $expression); } /** * {@inheritdoc} */ protected function getDateArithmeticIntervalExpression($date, $operator, $interval, $unit) { $factorClause = ''; if ($operator === '-') { $factorClause = '-1 * '; } return 'DATEADD(' . $unit . ', ' . $factorClause . $interval . ', ' . $date . ')'; } /** * {@inheritDoc} */ public function getDateDiffExpression($date1, $date2) { return 'DATEDIFF(day, ' . $date2 . ',' . $date1 . ')'; } /** * {@inheritDoc} * * Microsoft SQL Server prefers "autoincrement" identity columns * since sequences can only be emulated with a table. */ public function prefersIdentityColumns() { return true; } /** * {@inheritDoc} * * Microsoft SQL Server supports this through AUTO_INCREMENT columns. */ public function supportsIdentityColumns() { return true; } /** * {@inheritDoc} */ public function supportsReleaseSavepoints() { return false; } /** * {@inheritdoc} */ public function supportsSchemas() { return true; } /** * {@inheritdoc} */ public function getDefaultSchemaName() { return 'dbo'; } /** * {@inheritDoc} */ public function supportsColumnCollation() { return true; } /** * {@inheritDoc} */ public function hasNativeGuidType() { return true; } /** * {@inheritDoc} */ public function getCreateDatabaseSQL($name) { return 'CREATE DATABASE ' . $name; } /** * {@inheritDoc} */ public function getDropDatabaseSQL($name) { return 'DROP DATABASE ' . $name; } /** * {@inheritDoc} */ public function supportsCreateDropDatabase() { return true; } /** * {@inheritDoc} */ public function getCreateSchemaSQL($schemaName) { return 'CREATE SCHEMA ' . $schemaName; } /** * {@inheritDoc} */ public function getDropForeignKeySQL($foreignKey, $table) { if (! $foreignKey instanceof ForeignKeyConstraint) { $foreignKey = new Identifier($foreignKey); } if (! $table instanceof Table) { $table = new Identifier($table); } $foreignKey = $foreignKey->getQuotedName($this); $table = $table->getQuotedName($this); return 'ALTER TABLE ' . $table . ' DROP CONSTRAINT ' . $foreignKey; } /** * {@inheritDoc} */ public function getDropIndexSQL($index, $table = null) { if ($index instanceof Index) { $index = $index->getQuotedName($this); } elseif (! is_string($index)) { throw new InvalidArgumentException('AbstractPlatform::getDropIndexSQL() expects $index parameter to be string or \Doctrine\DBAL\Schema\Index.'); } if (! isset($table)) { return 'DROP INDEX ' . $index; } if ($table instanceof Table) { $table = $table->getQuotedName($this); } return sprintf( <<<SQL IF EXISTS (SELECT * FROM sysobjects WHERE name = '%s') ALTER TABLE %s DROP CONSTRAINT %s ELSE DROP INDEX %s ON %s SQL , $index, $table, $index, $index, $table ); } /** * {@inheritDoc} */ protected function _getCreateTableSQL($tableName, array $columns, array $options = []) { $defaultConstraintsSql = []; $commentsSql = []; // @todo does other code breaks because of this? // force primary keys to be not null foreach ($columns as &$column) { if (isset($column['primary']) && $column['primary']) { $column['notnull'] = true; } // Build default constraints SQL statements. if (isset($column['default'])) { $defaultConstraintsSql[] = 'ALTER TABLE ' . $tableName . ' ADD' . $this->getDefaultConstraintDeclarationSQL($tableName, $column); } if (empty($column['comment']) && ! is_numeric($column['comment'])) { continue; } $commentsSql[] = $this->getCreateColumnCommentSQL($tableName, $column['name'], $column['comment']); } $columnListSql = $this->getColumnDeclarationListSQL($columns); if (isset($options['uniqueConstraints']) && ! empty($options['uniqueConstraints'])) { foreach ($options['uniqueConstraints'] as $name => $definition) { $columnListSql .= ', ' . $this->getUniqueConstraintDeclarationSQL($name, $definition); } } if (isset($options['primary']) && ! empty($options['primary'])) { $flags = ''; if (isset($options['primary_index']) && $options['primary_index']->hasFlag('nonclustered')) { $flags = ' NONCLUSTERED'; } $columnListSql .= ', PRIMARY KEY' . $flags . ' (' . implode(', ', array_unique(array_values($options['primary']))) . ')'; } $query = 'CREATE TABLE ' . $tableName . ' (' . $columnListSql; $check = $this->getCheckDeclarationSQL($columns); if (! empty($check)) { $query .= ', ' . $check; } $query .= ')'; $sql = [$query]; if (isset($options['indexes']) && ! empty($options['indexes'])) { foreach ($options['indexes'] as $index) { $sql[] = $this->getCreateIndexSQL($index, $tableName); } } if (isset($options['foreignKeys'])) { foreach ((array) $options['foreignKeys'] as $definition) { $sql[] = $this->getCreateForeignKeySQL($definition, $tableName); } } return array_merge($sql, $commentsSql, $defaultConstraintsSql); } /** * {@inheritDoc} */ public function getCreatePrimaryKeySQL(Index $index, $table) { $flags = ''; if ($index->hasFlag('nonclustered')) { $flags = ' NONCLUSTERED'; } return 'ALTER TABLE ' . $table . ' ADD PRIMARY KEY' . $flags . ' (' . $this->getIndexFieldDeclarationListSQL($index) . ')'; } /** * Returns the SQL statement for creating a column comment. * * SQL Server does not support native column comments, * therefore the extended properties functionality is used * as a workaround to store them. * The property name used to store column comments is "MS_Description" * which provides compatibility with SQL Server Management Studio, * as column comments are stored in the same property there when * specifying a column's "Description" attribute. * * @param string $tableName The quoted table name to which the column belongs. * @param string $columnName The quoted column name to create the comment for. * @param string $comment The column's comment. * * @return string */ protected function getCreateColumnCommentSQL($tableName, $columnName, $comment) { if (strpos($tableName, '.') !== false) { [$schemaSQL, $tableSQL] = explode('.', $tableName); $schemaSQL = $this->quoteStringLiteral($schemaSQL); $tableSQL = $this->quoteStringLiteral($tableSQL); } else { $schemaSQL = "'dbo'"; $tableSQL = $this->quoteStringLiteral($tableName); } return $this->getAddExtendedPropertySQL( 'MS_Description', $comment, 'SCHEMA', $schemaSQL, 'TABLE', $tableSQL, 'COLUMN', $columnName ); } /** * Returns the SQL snippet for declaring a default constraint. * * @param string $table Name of the table to return the default constraint declaration for. * @param mixed[] $column Column definition. * * @return string * * @throws InvalidArgumentException */ public function getDefaultConstraintDeclarationSQL($table, array $column) { if (! isset($column['default'])) { throw new InvalidArgumentException("Incomplete column definition. 'default' required."); } $columnName = new Identifier($column['name']); return ' CONSTRAINT ' . $this->generateDefaultConstraintName($table, $column['name']) . $this->getDefaultValueDeclarationSQL($column) . ' FOR ' . $columnName->getQuotedName($this); } /** * {@inheritDoc} */ public function getUniqueConstraintDeclarationSQL($name, Index $index) { $constraint = parent::getUniqueConstraintDeclarationSQL($name, $index); $constraint = $this->_appendUniqueConstraintDefinition($constraint, $index); return $constraint; } /** * {@inheritDoc} */ public function getCreateIndexSQL(Index $index, $table) { $constraint = parent::getCreateIndexSQL($index, $table); if ($index->isUnique() && ! $index->isPrimary()) { $constraint = $this->_appendUniqueConstraintDefinition($constraint, $index); } return $constraint; } /** * {@inheritDoc} */ protected function getCreateIndexSQLFlags(Index $index) { $type = ''; if ($index->isUnique()) { $type .= 'UNIQUE '; } if ($index->hasFlag('clustered')) { $type .= 'CLUSTERED '; } elseif ($index->hasFlag('nonclustered')) { $type .= 'NONCLUSTERED '; } return $type; } /** * Extend unique key constraint with required filters * * @param string $sql * * @return string */ private function _appendUniqueConstraintDefinition($sql, Index $index) { $fields = []; foreach ($index->getQuotedColumns($this) as $field) { $fields[] = $field . ' IS NOT NULL'; } return $sql . ' WHERE ' . implode(' AND ', $fields); } /** * {@inheritDoc} */ public function getAlterTableSQL(TableDiff $diff) { $queryParts = []; $sql = []; $columnSql = []; $commentsSql = []; foreach ($diff->addedColumns as $column) { if ($this->onSchemaAlterTableAddColumn($column, $diff, $columnSql)) { continue; } $columnDef = $column->toArray(); $queryParts[] = 'ADD ' . $this->getColumnDeclarationSQL($column->getQuotedName($this), $columnDef); if (isset($columnDef['default'])) { $queryParts[] = $this->getAlterTableAddDefaultConstraintClause($diff->name, $column); } $comment = $this->getColumnComment($column); if (empty($comment) && ! is_numeric($comment)) { continue; } $commentsSql[] = $this->getCreateColumnCommentSQL( $diff->name, $column->getQuotedName($this), $comment ); } foreach ($diff->removedColumns as $column) { if ($this->onSchemaAlterTableRemoveColumn($column, $diff, $columnSql)) { continue; } $queryParts[] = 'DROP COLUMN ' . $column->getQuotedName($this); } foreach ($diff->changedColumns as $columnDiff) { if ($this->onSchemaAlterTableChangeColumn($columnDiff, $diff, $columnSql)) { continue; } $column = $columnDiff->column; $comment = $this->getColumnComment($column); $hasComment = ! empty($comment) || is_numeric($comment); if ($columnDiff->fromColumn instanceof Column) { $fromComment = $this->getColumnComment($columnDiff->fromColumn); $hasFromComment = ! empty($fromComment) || is_numeric($fromComment); if ($hasFromComment && $hasComment && $fromComment !== $comment) { $commentsSql[] = $this->getAlterColumnCommentSQL( $diff->name, $column->getQuotedName($this), $comment ); } elseif ($hasFromComment && ! $hasComment) { $commentsSql[] = $this->getDropColumnCommentSQL($diff->name, $column->getQuotedName($this)); } elseif ($hasComment) { $commentsSql[] = $this->getCreateColumnCommentSQL( $diff->name, $column->getQuotedName($this), $comment ); } } // Do not add query part if only comment has changed. if ($columnDiff->hasChanged('comment') && count($columnDiff->changedProperties) === 1) { continue; } $requireDropDefaultConstraint = $this->alterColumnRequiresDropDefaultConstraint($columnDiff); if ($requireDropDefaultConstraint) { $queryParts[] = $this->getAlterTableDropDefaultConstraintClause( $diff->name, $columnDiff->oldColumnName ); } $columnDef = $column->toArray(); $queryParts[] = 'ALTER COLUMN ' . $this->getColumnDeclarationSQL($column->getQuotedName($this), $columnDef); if (! isset($columnDef['default']) || (! $requireDropDefaultConstraint && ! $columnDiff->hasChanged('default'))) { continue; } $queryParts[] = $this->getAlterTableAddDefaultConstraintClause($diff->name, $column); } foreach ($diff->renamedColumns as $oldColumnName => $column) { if ($this->onSchemaAlterTableRenameColumn($oldColumnName, $column, $diff, $columnSql)) { continue; } $oldColumnName = new Identifier($oldColumnName); $sql[] = "sp_RENAME '" . $diff->getName($this)->getQuotedName($this) . '.' . $oldColumnName->getQuotedName($this) . "', '" . $column->getQuotedName($this) . "', 'COLUMN'"; // Recreate default constraint with new column name if necessary (for future reference). if ($column->getDefault() === null) { continue; } $queryParts[] = $this->getAlterTableDropDefaultConstraintClause( $diff->name, $oldColumnName->getQuotedName($this) ); $queryParts[] = $this->getAlterTableAddDefaultConstraintClause($diff->name, $column); } $tableSql = []; if ($this->onSchemaAlterTable($diff, $tableSql)) { return array_merge($tableSql, $columnSql); } foreach ($queryParts as $query) { $sql[] = 'ALTER TABLE ' . $diff->getName($this)->getQuotedName($this) . ' ' . $query; } $sql = array_merge($sql, $commentsSql); if ($diff->newName !== false) { $sql[] = "sp_RENAME '" . $diff->getName($this)->getQuotedName($this) . "', '" . $diff->getNewName()->getName() . "'"; /** * Rename table's default constraints names * to match the new table name. * This is necessary to ensure that the default * constraints can be referenced in future table * alterations as the table name is encoded in * default constraints' names. */ $sql[] = "DECLARE @sql NVARCHAR(MAX) = N''; " . "SELECT @sql += N'EXEC sp_rename N''' + dc.name + ''', N''' " . "+ REPLACE(dc.name, '" . $this->generateIdentifierName($diff->name) . "', " . "'" . $this->generateIdentifierName($diff->newName) . "') + ''', ''OBJECT'';' " . 'FROM sys.default_constraints dc ' . 'JOIN sys.tables tbl ON dc.parent_object_id = tbl.object_id ' . "WHERE tbl.name = '" . $diff->getNewName()->getName() . "';" . 'EXEC sp_executesql @sql'; } $sql = array_merge( $this->getPreAlterTableIndexForeignKeySQL($diff), $sql, $this->getPostAlterTableIndexForeignKeySQL($diff) ); return array_merge($sql, $tableSql, $columnSql); } /** * Returns the SQL clause for adding a default constraint in an ALTER TABLE statement. * * @param string $tableName The name of the table to generate the clause for. * @param Column $column The column to generate the clause for. * * @return string */ private function getAlterTableAddDefaultConstraintClause($tableName, Column $column) { $columnDef = $column->toArray(); $columnDef['name'] = $column->getQuotedName($this); return 'ADD' . $this->getDefaultConstraintDeclarationSQL($tableName, $columnDef); } /** * Returns the SQL clause for dropping an existing default constraint in an ALTER TABLE statement. * * @param string $tableName The name of the table to generate the clause for. * @param string $columnName The name of the column to generate the clause for. * * @return string */ private function getAlterTableDropDefaultConstraintClause($tableName, $columnName) { return 'DROP CONSTRAINT ' . $this->generateDefaultConstraintName($tableName, $columnName); } /** * Checks whether a column alteration requires dropping its default constraint first. * * Different to other database vendors SQL Server implements column default values * as constraints and therefore changes in a column's default value as well as changes * in a column's type require dropping the default constraint first before being to * alter the particular column to the new definition. * * @param ColumnDiff $columnDiff The column diff to evaluate. * * @return bool True if the column alteration requires dropping its default constraint first, false otherwise. */ private function alterColumnRequiresDropDefaultConstraint(ColumnDiff $columnDiff) { // We can only decide whether to drop an existing default constraint // if we know the original default value. if (! $columnDiff->fromColumn instanceof Column) { return false; } // We only need to drop an existing default constraint if we know the // column was defined with a default value before. if ($columnDiff->fromColumn->getDefault() === null) { return false; } // We need to drop an existing default constraint if the column was // defined with a default value before and it has changed. if ($columnDiff->hasChanged('default')) { return true; } // We need to drop an existing default constraint if the column was // defined with a default value before and the native column type has changed. return $columnDiff->hasChanged('type') || $columnDiff->hasChanged('fixed'); } /** * Returns the SQL statement for altering a column comment. * * SQL Server does not support native column comments, * therefore the extended properties functionality is used * as a workaround to store them. * The property name used to store column comments is "MS_Description" * which provides compatibility with SQL Server Management Studio, * as column comments are stored in the same property there when * specifying a column's "Description" attribute. * * @param string $tableName The quoted table name to which the column belongs. * @param string $columnName The quoted column name to alter the comment for. * @param string $comment The column's comment. * * @return string */ protected function getAlterColumnCommentSQL($tableName, $columnName, $comment) { if (strpos($tableName, '.') !== false) { [$schemaSQL, $tableSQL] = explode('.', $tableName); $schemaSQL = $this->quoteStringLiteral($schemaSQL); $tableSQL = $this->quoteStringLiteral($tableSQL); } else { $schemaSQL = "'dbo'"; $tableSQL = $this->quoteStringLiteral($tableName); } return $this->getUpdateExtendedPropertySQL( 'MS_Description', $comment, 'SCHEMA', $schemaSQL, 'TABLE', $tableSQL, 'COLUMN', $columnName ); } /** * Returns the SQL statement for dropping a column comment. * * SQL Server does not support native column comments, * therefore the extended properties functionality is used * as a workaround to store them. * The property name used to store column comments is "MS_Description" * which provides compatibility with SQL Server Management Studio, * as column comments are stored in the same property there when * specifying a column's "Description" attribute. * * @param string $tableName The quoted table name to which the column belongs. * @param string $columnName The quoted column name to drop the comment for. * * @return string */ protected function getDropColumnCommentSQL($tableName, $columnName) { if (strpos($tableName, '.') !== false) { [$schemaSQL, $tableSQL] = explode('.', $tableName); $schemaSQL = $this->quoteStringLiteral($schemaSQL); $tableSQL = $this->quoteStringLiteral($tableSQL); } else { $schemaSQL = "'dbo'"; $tableSQL = $this->quoteStringLiteral($tableName); } return $this->getDropExtendedPropertySQL( 'MS_Description', 'SCHEMA', $schemaSQL, 'TABLE', $tableSQL, 'COLUMN', $columnName ); } /** * {@inheritdoc} */ protected function getRenameIndexSQL($oldIndexName, Index $index, $tableName) { return [sprintf( "EXEC sp_RENAME N'%s.%s', N'%s', N'INDEX'", $tableName, $oldIndexName, $index->getQuotedName($this) ), ]; } /** * Returns the SQL statement for adding an extended property to a database object. * * @link http://msdn.microsoft.com/en-us/library/ms180047%28v=sql.90%29.aspx * * @param string $name The name of the property to add. * @param string|null $value The value of the property to add. * @param string|null $level0Type The type of the object at level 0 the property belongs to. * @param string|null $level0Name The name of the object at level 0 the property belongs to. * @param string|null $level1Type The type of the object at level 1 the property belongs to. * @param string|null $level1Name The name of the object at level 1 the property belongs to. * @param string|null $level2Type The type of the object at level 2 the property belongs to. * @param string|null $level2Name The name of the object at level 2 the property belongs to. * * @return string */ public function getAddExtendedPropertySQL( $name, $value = null, $level0Type = null, $level0Name = null, $level1Type = null, $level1Name = null, $level2Type = null, $level2Name = null ) { return 'EXEC sp_addextendedproperty ' . 'N' . $this->quoteStringLiteral($name) . ', N' . $this->quoteStringLiteral($value) . ', ' . 'N' . $this->quoteStringLiteral($level0Type) . ', ' . $level0Name . ', ' . 'N' . $this->quoteStringLiteral($level1Type) . ', ' . $level1Name . ', ' . 'N' . $this->quoteStringLiteral($level2Type) . ', ' . $level2Name; } /** * Returns the SQL statement for dropping an extended property from a database object. * * @link http://technet.microsoft.com/en-gb/library/ms178595%28v=sql.90%29.aspx * * @param string $name The name of the property to drop. * @param string|null $level0Type The type of the object at level 0 the property belongs to. * @param string|null $level0Name The name of the object at level 0 the property belongs to. * @param string|null $level1Type The type of the object at level 1 the property belongs to. * @param string|null $level1Name The name of the object at level 1 the property belongs to. * @param string|null $level2Type The type of the object at level 2 the property belongs to. * @param string|null $level2Name The name of the object at level 2 the property belongs to. * * @return string */ public function getDropExtendedPropertySQL( $name, $level0Type = null, $level0Name = null, $level1Type = null, $level1Name = null, $level2Type = null, $level2Name = null ) { return 'EXEC sp_dropextendedproperty ' . 'N' . $this->quoteStringLiteral($name) . ', ' . 'N' . $this->quoteStringLiteral($level0Type) . ', ' . $level0Name . ', ' . 'N' . $this->quoteStringLiteral($level1Type) . ', ' . $level1Name . ', ' . 'N' . $this->quoteStringLiteral($level2Type) . ', ' . $level2Name; } /** * Returns the SQL statement for updating an extended property of a database object. * * @link http://msdn.microsoft.com/en-us/library/ms186885%28v=sql.90%29.aspx * * @param string $name The name of the property to update. * @param string|null $value The value of the property to update. * @param string|null $level0Type The type of the object at level 0 the property belongs to. * @param string|null $level0Name The name of the object at level 0 the property belongs to. * @param string|null $level1Type The type of the object at level 1 the property belongs to. * @param string|null $level1Name The name of the object at level 1 the property belongs to. * @param string|null $level2Type The type of the object at level 2 the property belongs to. * @param string|null $level2Name The name of the object at level 2 the property belongs to. * * @return string */ public function getUpdateExtendedPropertySQL( $name, $value = null, $level0Type = null, $level0Name = null, $level1Type = null, $level1Name = null, $level2Type = null, $level2Name = null ) { return 'EXEC sp_updateextendedproperty ' . 'N' . $this->quoteStringLiteral($name) . ', N' . $this->quoteStringLiteral($value) . ', ' . 'N' . $this->quoteStringLiteral($level0Type) . ', ' . $level0Name . ', ' . 'N' . $this->quoteStringLiteral($level1Type) . ', ' . $level1Name . ', ' . 'N' . $this->quoteStringLiteral($level2Type) . ', ' . $level2Name; } /** * {@inheritDoc} */ public function getEmptyIdentityInsertSQL($quotedTableName, $quotedIdentifierColumnName) { return 'INSERT INTO ' . $quotedTableName . ' DEFAULT VALUES'; } /** * {@inheritDoc} */ public function getListTablesSQL() { // "sysdiagrams" table must be ignored as it's internal SQL Server table for Database Diagrams // Category 2 must be ignored as it is "MS SQL Server 'pseudo-system' object[s]" for replication return "SELECT name FROM sysobjects WHERE type = 'U' AND name != 'sysdiagrams' AND category != 2 ORDER BY name"; } /** * {@inheritDoc} */ public function getListTableColumnsSQL($table, $database = null) { return "SELECT col.name, type.name AS type, col.max_length AS length, ~col.is_nullable AS notnull, def.definition AS [default], col.scale, col.precision, col.is_identity AS autoincrement, col.collation_name AS collation, CAST(prop.value AS NVARCHAR(MAX)) AS comment -- CAST avoids driver error for sql_variant type FROM sys.columns AS col JOIN sys.types AS type ON col.user_type_id = type.user_type_id JOIN sys.objects AS obj ON col.object_id = obj.object_id JOIN sys.schemas AS scm ON obj.schema_id = scm.schema_id LEFT JOIN sys.default_constraints def ON col.default_object_id = def.object_id AND col.object_id = def.parent_object_id LEFT JOIN sys.extended_properties AS prop ON obj.object_id = prop.major_id AND col.column_id = prop.minor_id AND prop.name = 'MS_Description' WHERE obj.type = 'U' AND " . $this->getTableWhereClause($table, 'scm.name', 'obj.name'); } /** * {@inheritDoc} */ public function getListTableForeignKeysSQL($table, $database = null) { return 'SELECT f.name AS ForeignKey, SCHEMA_NAME (f.SCHEMA_ID) AS SchemaName, OBJECT_NAME (f.parent_object_id) AS TableName, COL_NAME (fc.parent_object_id,fc.parent_column_id) AS ColumnName, SCHEMA_NAME (o.SCHEMA_ID) ReferenceSchemaName, OBJECT_NAME (f.referenced_object_id) AS ReferenceTableName, COL_NAME(fc.referenced_object_id,fc.referenced_column_id) AS ReferenceColumnName, f.delete_referential_action_desc, f.update_referential_action_desc FROM sys.foreign_keys AS f INNER JOIN sys.foreign_key_columns AS fc INNER JOIN sys.objects AS o ON o.OBJECT_ID = fc.referenced_object_id ON f.OBJECT_ID = fc.constraint_object_id WHERE ' . $this->getTableWhereClause($table, 'SCHEMA_NAME (f.schema_id)', 'OBJECT_NAME (f.parent_object_id)'); } /** * {@inheritDoc} */ public function getListTableIndexesSQL($table, $currentDatabase = null) { return "SELECT idx.name AS key_name, col.name AS column_name, ~idx.is_unique AS non_unique, idx.is_primary_key AS [primary], CASE idx.type WHEN '1' THEN 'clustered' WHEN '2' THEN 'nonclustered' ELSE NULL END AS flags FROM sys.tables AS tbl JOIN sys.schemas AS scm ON tbl.schema_id = scm.schema_id JOIN sys.indexes AS idx ON tbl.object_id = idx.object_id JOIN sys.index_columns AS idxcol ON idx.object_id = idxcol.object_id AND idx.index_id = idxcol.index_id JOIN sys.columns AS col ON idxcol.object_id = col.object_id AND idxcol.column_id = col.column_id WHERE " . $this->getTableWhereClause($table, 'scm.name', 'tbl.name') . ' ORDER BY idx.index_id ASC, idxcol.key_ordinal ASC'; } /** * {@inheritDoc} */ public function getCreateViewSQL($name, $sql) { return 'CREATE VIEW ' . $name . ' AS ' . $sql; } /** * {@inheritDoc} */ public function getListViewsSQL($database) { return "SELECT name FROM sysobjects WHERE type = 'V' ORDER BY name"; } /** * Returns the where clause to filter schema and table name in a query. * * @param string $table The full qualified name of the table. * @param string $schemaColumn The name of the column to compare the schema to in the where clause. * @param string $tableColumn The name of the column to compare the table to in the where clause. * * @return string */ private function getTableWhereClause($table, $schemaColumn, $tableColumn) { if (strpos($table, '.') !== false) { [$schema, $table] = explode('.', $table); $schema = $this->quoteStringLiteral($schema); $table = $this->quoteStringLiteral($table); } else { $schema = 'SCHEMA_NAME()'; $table = $this->quoteStringLiteral($table); } return sprintf('(%s = %s AND %s = %s)', $tableColumn, $table, $schemaColumn, $schema); } /** * {@inheritDoc} */ public function getDropViewSQL($name) { return 'DROP VIEW ' . $name; } /** * {@inheritDoc} * * @deprecated Use application-generated UUIDs instead */ public function getGuidExpression() { return 'NEWID()'; } /** * {@inheritDoc} */ public function getLocateExpression($str, $substr, $startPos = false) { if ($startPos === false) { return 'CHARINDEX(' . $substr . ', ' . $str . ')'; } return 'CHARINDEX(' . $substr . ', ' . $str . ', ' . $startPos . ')'; } /** * {@inheritDoc} */ public function getModExpression($expression1, $expression2) { return $expression1 . ' % ' . $expression2; } /** * {@inheritDoc} */ public function getTrimExpression($str, $pos = TrimMode::UNSPECIFIED, $char = false) { if (! $char) { switch ($pos) { case TrimMode::LEADING: $trimFn = 'LTRIM'; break; case TrimMode::TRAILING: $trimFn = 'RTRIM'; break; default: return 'LTRIM(RTRIM(' . $str . '))'; } return $trimFn . '(' . $str . ')'; } /** Original query used to get those expressions declare @c varchar(100) = 'xxxBarxxx', @trim_char char(1) = 'x'; declare @pat varchar(10) = '%[^' + @trim_char + ']%'; select @c as string , @trim_char as trim_char , stuff(@c, 1, patindex(@pat, @c) - 1, null) as trim_leading , reverse(stuff(reverse(@c), 1, patindex(@pat, reverse(@c)) - 1, null)) as trim_trailing , reverse(stuff(reverse(stuff(@c, 1, patindex(@pat, @c) - 1, null)), 1, patindex(@pat, reverse(stuff(@c, 1, patindex(@pat, @c) - 1, null))) - 1, null)) as trim_both; */ $pattern = "'%[^' + " . $char . " + ']%'"; if ($pos === TrimMode::LEADING) { return 'stuff(' . $str . ', 1, patindex(' . $pattern . ', ' . $str . ') - 1, null)'; } if ($pos === TrimMode::TRAILING) { return 'reverse(stuff(reverse(' . $str . '), 1, patindex(' . $pattern . ', reverse(' . $str . ')) - 1, null))'; } return 'reverse(stuff(reverse(stuff(' . $str . ', 1, patindex(' . $pattern . ', ' . $str . ') - 1, null)), 1, patindex(' . $pattern . ', reverse(stuff(' . $str . ', 1, patindex(' . $pattern . ', ' . $str . ') - 1, null))) - 1, null))'; } /** * {@inheritDoc} */ public function getConcatExpression() { $args = func_get_args(); return '(' . implode(' + ', $args) . ')'; } /** * {@inheritDoc} */ public function getListDatabasesSQL() { return 'SELECT * FROM sys.databases'; } /** * {@inheritDoc} */ public function getListNamespacesSQL() { return "SELECT name FROM sys.schemas WHERE name NOT IN('guest', 'INFORMATION_SCHEMA', 'sys')"; } /** * {@inheritDoc} */ public function getSubstringExpression($value, $from, $length = null) { if ($length !== null) { return 'SUBSTRING(' . $value . ', ' . $from . ', ' . $length . ')'; } return 'SUBSTRING(' . $value . ', ' . $from . ', LEN(' . $value . ') - ' . $from . ' + 1)'; } /** * {@inheritDoc} */ public function getLengthExpression($column) { return 'LEN(' . $column . ')'; } /** * {@inheritDoc} */ public function getSetTransactionIsolationSQL($level) { return 'SET TRANSACTION ISOLATION LEVEL ' . $this->_getTransactionIsolationLevelSQL($level); } /** * {@inheritDoc} */ public function getIntegerTypeDeclarationSQL(array $field) { return 'INT' . $this->_getCommonIntegerTypeDeclarationSQL($field); } /** * {@inheritDoc} */ public function getBigIntTypeDeclarationSQL(array $field) { return 'BIGINT' . $this->_getCommonIntegerTypeDeclarationSQL($field); } /** * {@inheritDoc} */ public function getSmallIntTypeDeclarationSQL(array $field) { return 'SMALLINT' . $this->_getCommonIntegerTypeDeclarationSQL($field); } /** * {@inheritDoc} */ public function getGuidTypeDeclarationSQL(array $field) { return 'UNIQUEIDENTIFIER'; } /** * {@inheritDoc} */ protected function getVarcharTypeDeclarationSQLSnippet($length, $fixed) { return $fixed ? ($length ? 'NCHAR(' . $length . ')' : 'CHAR(255)') : ($length ? 'NVARCHAR(' . $length . ')' : 'NVARCHAR(255)'); } /** * {@inheritdoc} */ protected function getBinaryTypeDeclarationSQLSnippet($length, $fixed) { return $fixed ? 'BINARY(' . ($length ?: 255) . ')' : 'VARBINARY(' . ($length ?: 255) . ')'; } /** * {@inheritdoc} */ public function getBinaryMaxLength() { return 8000; } /** * {@inheritDoc} */ public function getClobTypeDeclarationSQL(array $field) { return 'VARCHAR(MAX)'; } /** * {@inheritDoc} */ protected function _getCommonIntegerTypeDeclarationSQL(array $columnDef) { return ! empty($columnDef['autoincrement']) ? ' IDENTITY' : ''; } /** * {@inheritDoc} */ public function getDateTimeTypeDeclarationSQL(array $fieldDeclaration) { return 'DATETIME'; } /** * {@inheritDoc} */ public function getDateTypeDeclarationSQL(array $fieldDeclaration) { return 'DATETIME'; } /** * {@inheritDoc} */ public function getTimeTypeDeclarationSQL(array $fieldDeclaration) { return 'DATETIME'; } /** * {@inheritDoc} */ public function getBooleanTypeDeclarationSQL(array $field) { return 'BIT'; } /** * {@inheritDoc} */ protected function doModifyLimitQuery($query, $limit, $offset = null) { $where = []; if ($offset > 0) { $where[] = sprintf('doctrine_rownum >= %d', $offset + 1); } if ($limit !== null) { $where[] = sprintf('doctrine_rownum <= %d', $offset + $limit); $top = sprintf('TOP %d', $offset + $limit); } else { $top = 'TOP 9223372036854775807'; } if (empty($where)) { return $query; } // We'll find a SELECT or SELECT distinct and prepend TOP n to it // Even if the TOP n is very large, the use of a CTE will // allow the SQL Server query planner to optimize it so it doesn't // actually scan the entire range covered by the TOP clause. $selectPattern = '/^(\s*SELECT\s+(?:DISTINCT\s+)?)(.*)$/im'; $replacePattern = sprintf('$1%s $2', $top); $query = preg_replace($selectPattern, $replacePattern, $query); if (stristr($query, 'ORDER BY')) { // Inner order by is not valid in SQL Server for our purposes // unless it's in a TOP N subquery. $query = $this->scrubInnerOrderBy($query); } // Build a new limited query around the original, using a CTE return sprintf( 'WITH dctrn_cte AS (%s) ' . 'SELECT * FROM (' . 'SELECT *, ROW_NUMBER() OVER (ORDER BY (SELECT 0)) AS doctrine_rownum FROM dctrn_cte' . ') AS doctrine_tbl ' . 'WHERE %s ORDER BY doctrine_rownum ASC', $query, implode(' AND ', $where) ); } /** * Remove ORDER BY clauses in subqueries - they're not supported by SQL Server. * Caveat: will leave ORDER BY in TOP N subqueries. * * @param string $query * * @return string */ private function scrubInnerOrderBy($query) { $count = substr_count(strtoupper($query), 'ORDER BY'); $offset = 0; while ($count-- > 0) { $orderByPos = stripos($query, ' ORDER BY', $offset); if ($orderByPos === false) { break; } $qLen = strlen($query); $parenCount = 0; $currentPosition = $orderByPos; while ($parenCount >= 0 && $currentPosition < $qLen) { if ($query[$currentPosition] === '(') { $parenCount++; } elseif ($query[$currentPosition] === ')') { $parenCount--; } $currentPosition++; } if ($this->isOrderByInTopNSubquery($query, $orderByPos)) { // If the order by clause is in a TOP N subquery, do not remove // it and continue iteration from the current position. $offset = $currentPosition; continue; } if ($currentPosition >= $qLen - 1) { continue; } $query = substr($query, 0, $orderByPos) . substr($query, $currentPosition - 1); $offset = $orderByPos; } return $query; } /** * Check an ORDER BY clause to see if it is in a TOP N query or subquery. * * @param string $query The query * @param int $currentPosition Start position of ORDER BY clause * * @return bool true if ORDER BY is in a TOP N query, false otherwise */ private function isOrderByInTopNSubquery($query, $currentPosition) { // Grab query text on the same nesting level as the ORDER BY clause we're examining. $subQueryBuffer = ''; $parenCount = 0; // If $parenCount goes negative, we've exited the subquery we're examining. // If $currentPosition goes negative, we've reached the beginning of the query. while ($parenCount >= 0 && $currentPosition >= 0) { if ($query[$currentPosition] === '(') { $parenCount--; } elseif ($query[$currentPosition] === ')') { $parenCount++; } // Only yank query text on the same nesting level as the ORDER BY clause. $subQueryBuffer = ($parenCount === 0 ? $query[$currentPosition] : ' ') . $subQueryBuffer; $currentPosition--; } return (bool) preg_match('/SELECT\s+(DISTINCT\s+)?TOP\s/i', $subQueryBuffer); } /** * {@inheritDoc} */ public function supportsLimitOffset() { return false; } /** * {@inheritDoc} */ public function convertBooleans($item) { if (is_array($item)) { foreach ($item as $key => $value) { if (! is_bool($value) && ! is_numeric($item)) { continue; } $item[$key] = $value ? 1 : 0; } } elseif (is_bool($item) || is_numeric($item)) { $item = $item ? 1 : 0; } return $item; } /** * {@inheritDoc} */ public function getCreateTemporaryTableSnippetSQL() { return 'CREATE TABLE'; } /** * {@inheritDoc} */ public function getTemporaryTableName($tableName) { return '#' . $tableName; } /** * {@inheritDoc} */ public function getDateTimeFormatString() { return 'Y-m-d H:i:s.000'; } /** * {@inheritDoc} */ public function getDateFormatString() { return 'Y-m-d H:i:s.000'; } /** * {@inheritDoc} */ public function getTimeFormatString() { return 'Y-m-d H:i:s.000'; } /** * {@inheritDoc} */ public function getDateTimeTzFormatString() { return $this->getDateTimeFormatString(); } /** * {@inheritDoc} */ public function getName() { return 'mssql'; } /** * {@inheritDoc} */ protected function initializeDoctrineTypeMappings() { $this->doctrineTypeMapping = [ 'bigint' => 'bigint', 'numeric' => 'decimal', 'bit' => 'boolean', 'smallint' => 'smallint', 'decimal' => 'decimal', 'smallmoney' => 'integer', 'int' => 'integer', 'tinyint' => 'smallint', 'money' => 'integer', 'float' => 'float', 'real' => 'float', 'double' => 'float', 'double precision' => 'float', 'smalldatetime' => 'datetime', 'datetime' => 'datetime', 'char' => 'string', 'varchar' => 'string', 'text' => 'text', 'nchar' => 'string', 'nvarchar' => 'string', 'ntext' => 'text', 'binary' => 'binary', 'varbinary' => 'binary', 'image' => 'blob', 'uniqueidentifier' => 'guid', ]; } /** * {@inheritDoc} */ public function createSavePoint($savepoint) { return 'SAVE TRANSACTION ' . $savepoint; } /** * {@inheritDoc} */ public function releaseSavePoint($savepoint) { return ''; } /** * {@inheritDoc} */ public function rollbackSavePoint($savepoint) { return 'ROLLBACK TRANSACTION ' . $savepoint; } /** * {@inheritdoc} */ public function getForeignKeyReferentialActionSQL($action) { // RESTRICT is not supported, therefore falling back to NO ACTION. if (strtoupper($action) === 'RESTRICT') { return 'NO ACTION'; } return parent::getForeignKeyReferentialActionSQL($action); } /** * {@inheritDoc} */ public function appendLockHint($fromClause, $lockMode) { switch (true) { case $lockMode === LockMode::NONE: return $fromClause . ' WITH (NOLOCK)'; case $lockMode === LockMode::PESSIMISTIC_READ: return $fromClause . ' WITH (HOLDLOCK, ROWLOCK)'; case $lockMode === LockMode::PESSIMISTIC_WRITE: return $fromClause . ' WITH (UPDLOCK, ROWLOCK)'; default: return $fromClause; } } /** * {@inheritDoc} */ public function getForUpdateSQL() { return ' '; } /** * {@inheritDoc} */ protected function getReservedKeywordsClass() { return Keywords\SQLServerKeywords::class; } /** * {@inheritDoc} */ public function quoteSingleIdentifier($str) { return '[' . str_replace(']', '][', $str) . ']'; } /** * {@inheritDoc} */ public function getTruncateTableSQL($tableName, $cascade = false) { $tableIdentifier = new Identifier($tableName); return 'TRUNCATE TABLE ' . $tableIdentifier->getQuotedName($this); } /** * {@inheritDoc} */ public function getBlobTypeDeclarationSQL(array $field) { return 'VARBINARY(MAX)'; } /** * {@inheritDoc} */ public function getDefaultValueDeclarationSQL($field) { if (! isset($field['default'])) { return empty($field['notnull']) ? ' NULL' : ''; } if (! isset($field['type'])) { return " DEFAULT '" . $field['default'] . "'"; } $type = $field['type']; if ($type instanceof Types\PhpIntegerMappingType) { return ' DEFAULT ' . $field['default']; } if ($type instanceof Types\PhpDateTimeMappingType && $field['default'] === $this->getCurrentTimestampSQL()) { return ' DEFAULT ' . $this->getCurrentTimestampSQL(); } if ($type instanceof Types\BooleanType) { return " DEFAULT '" . $this->convertBooleans($field['default']) . "'"; } return " DEFAULT '" . $field['default'] . "'"; } /** * {@inheritdoc} * * Modifies column declaration order as it differs in Microsoft SQL Server. */ public function getColumnDeclarationSQL($name, array $field) { if (isset($field['columnDefinition'])) { $columnDef = $this->getCustomTypeDeclarationSQL($field); } else { $collation = isset($field['collation']) && $field['collation'] ? ' ' . $this->getColumnCollationDeclarationSQL($field['collation']) : ''; $notnull = isset($field['notnull']) && $field['notnull'] ? ' NOT NULL' : ''; $unique = isset($field['unique']) && $field['unique'] ? ' ' . $this->getUniqueFieldDeclarationSQL() : ''; $check = isset($field['check']) && $field['check'] ? ' ' . $field['check'] : ''; $typeDecl = $field['type']->getSQLDeclaration($field, $this); $columnDef = $typeDecl . $collation . $notnull . $unique . $check; } return $name . ' ' . $columnDef; } /** * Returns a unique default constraint name for a table and column. * * @param string $table Name of the table to generate the unique default constraint name for. * @param string $column Name of the column in the table to generate the unique default constraint name for. * * @return string */ private function generateDefaultConstraintName($table, $column) { return 'DF_' . $this->generateIdentifierName($table) . '_' . $this->generateIdentifierName($column); } /** * Returns a hash value for a given identifier. * * @param string $identifier Identifier to generate a hash value for. * * @return string */ private function generateIdentifierName($identifier) { // Always generate name for unquoted identifiers to ensure consistency. $identifier = new Identifier($identifier); return strtoupper(dechex(crc32($identifier->getName()))); } }
{ "pile_set_name": "Github" }
define(function() { return (/\?/); });
{ "pile_set_name": "Github" }
package typings.devtoolsProtocol.mod.Protocol.Storage import typings.devtoolsProtocol.mod.Protocol.Browser.BrowserContextID import scala.scalajs.js import scala.scalajs.js.`|` import scala.scalajs.js.annotation._ @js.native trait GetCookiesRequest extends js.Object { /** * Browser context to use when called on the browser endpoint. */ var browserContextId: js.UndefOr[BrowserContextID] = js.native } object GetCookiesRequest { @scala.inline def apply(): GetCookiesRequest = { val __obj = js.Dynamic.literal() __obj.asInstanceOf[GetCookiesRequest] } @scala.inline implicit class GetCookiesRequestOps[Self <: GetCookiesRequest] (val x: Self) extends AnyVal { @scala.inline def duplicate: Self = (js.Dynamic.global.Object.assign(js.Dynamic.literal(), x)).asInstanceOf[Self] @scala.inline def combineWith[Other <: js.Any](other: Other): Self with Other = (js.Dynamic.global.Object.assign(js.Dynamic.literal(), x, other.asInstanceOf[js.Any])).asInstanceOf[Self with Other] @scala.inline def set(key: String, value: js.Any): Self = { x.asInstanceOf[js.Dynamic].updateDynamic(key)(value) x } @scala.inline def setBrowserContextId(value: BrowserContextID): Self = this.set("browserContextId", value.asInstanceOf[js.Any]) @scala.inline def deleteBrowserContextId: Self = this.set("browserContextId", js.undefined) } }
{ "pile_set_name": "Github" }
package User::pwent; use 5.006; our $VERSION = '1.00'; use strict; use warnings; use Config; use Carp; our(@EXPORT, @EXPORT_OK, %EXPORT_TAGS); BEGIN { use Exporter (); @EXPORT = qw(getpwent getpwuid getpwnam getpw); @EXPORT_OK = qw( pw_has $pw_name $pw_passwd $pw_uid $pw_gid $pw_gecos $pw_dir $pw_shell $pw_expire $pw_change $pw_class $pw_age $pw_quota $pw_comment $pw_expire ); %EXPORT_TAGS = ( FIELDS => [ grep(/^\$pw_/, @EXPORT_OK), @EXPORT ], ALL => [ @EXPORT, @EXPORT_OK ], ); } use vars grep /^\$pw_/, @EXPORT_OK; # # XXX: these mean somebody hacked this module's source # without understanding the underlying assumptions. # my $IE = "[INTERNAL ERROR]"; # Class::Struct forbids use of @ISA sub import { goto &Exporter::import } use Class::Struct qw(struct); struct 'User::pwent' => [ name => '$', # pwent[0] passwd => '$', # pwent[1] uid => '$', # pwent[2] gid => '$', # pwent[3] # you'll only have one/none of these three change => '$', # pwent[4] age => '$', # pwent[4] quota => '$', # pwent[4] # you'll only have one/none of these two comment => '$', # pwent[5] class => '$', # pwent[5] # you might not have this one gecos => '$', # pwent[6] dir => '$', # pwent[7] shell => '$', # pwent[8] # you might not have this one expire => '$', # pwent[9] ]; # init our groks hash to be true if the built platform knew how # to do each struct pwd field that perl can ever under any circumstances # know about. we do not use /^pw_?/, but just the tails. sub _feature_init { our %Groks; # whether build system knew how to do this feature for my $feep ( qw{ pwage pwchange pwclass pwcomment pwexpire pwgecos pwpasswd pwquota } ) { my $short = $feep =~ /^pw(.*)/ ? $1 : do { # not cluck, as we know we called ourselves, # and a confession is probably imminent anyway warn("$IE $feep is a funny struct pwd field"); $feep; }; exists $Config{ "d_" . $feep } || confess("$IE Configure doesn't d_$feep"); $Groks{$short} = defined $Config{ "d_" . $feep }; } # assume that any that are left are always there for my $feep (grep /^\$pw_/s, @EXPORT_OK) { $feep =~ /^\$pw_(.*)/; $Groks{$1} = 1 unless defined $Groks{$1}; } } # With arguments, reports whether one or more fields are all implemented # in the build machine's struct pwd pw_*. May be whitespace separated. # We do not use /^pw_?/, just the tails. # # Without arguments, returns the list of fields implemented on build # machine, space separated in scalar context. # # Takes exception to being asked whether this machine's struct pwd has # a field that Perl never knows how to provide under any circumstances. # If the module does this idiocy to itself, the explosion is noisier. # sub pw_has { our %Groks; # whether build system knew how to do this feature my $cando = 1; my $sploder = caller() ne __PACKAGE__ ? \&croak : sub { confess("$IE @_") }; if (@_ == 0) { my @valid = sort grep { $Groks{$_} } keys %Groks; return wantarray ? @valid : "@valid"; } for my $feep (map { split } @_) { defined $Groks{$feep} || $sploder->("$feep is never a valid struct pwd field"); $cando &&= $Groks{$feep}; } return $cando; } sub _populate (@) { return unless @_; my $pwob = new(); # Any that haven't been pw_had are assumed on "all" platforms of # course, this may not be so, but you can't get here otherwise, # since the underlying core call already took exception to your # impudence. $pw_name = $pwob->name ( $_[0] ); $pw_passwd = $pwob->passwd ( $_[1] ) if pw_has("passwd"); $pw_uid = $pwob->uid ( $_[2] ); $pw_gid = $pwob->gid ( $_[3] ); if (pw_has("change")) { $pw_change = $pwob->change ( $_[4] ); } elsif (pw_has("age")) { $pw_age = $pwob->age ( $_[4] ); } elsif (pw_has("quota")) { $pw_quota = $pwob->quota ( $_[4] ); } if (pw_has("class")) { $pw_class = $pwob->class ( $_[5] ); } elsif (pw_has("comment")) { $pw_comment = $pwob->comment( $_[5] ); } $pw_gecos = $pwob->gecos ( $_[6] ) if pw_has("gecos"); $pw_dir = $pwob->dir ( $_[7] ); $pw_shell = $pwob->shell ( $_[8] ); $pw_expire = $pwob->expire ( $_[9] ) if pw_has("expire"); return $pwob; } sub getpwent ( ) { _populate(CORE::getpwent()) } sub getpwnam ($) { _populate(CORE::getpwnam(shift)) } sub getpwuid ($) { _populate(CORE::getpwuid(shift)) } sub getpw ($) { ($_[0] =~ /^\d+\z/s) ? &getpwuid : &getpwnam } _feature_init(); 1; __END__ =head1 NAME User::pwent - by-name interface to Perl's built-in getpw*() functions =head1 SYNOPSIS use User::pwent; $pw = getpwnam('daemon') || die "No daemon user"; if ( $pw->uid == 1 && $pw->dir =~ m#^/(bin|tmp)?\z#s ) { print "gid 1 on root dir"; } $real_shell = $pw->shell || '/bin/sh'; for (($fullname, $office, $workphone, $homephone) = split /\s*,\s*/, $pw->gecos) { s/&/ucfirst(lc($pw->name))/ge; } use User::pwent qw(:FIELDS); getpwnam('daemon') || die "No daemon user"; if ( $pw_uid == 1 && $pw_dir =~ m#^/(bin|tmp)?\z#s ) { print "gid 1 on root dir"; } $pw = getpw($whoever); use User::pwent qw/:DEFAULT pw_has/; if (pw_has(qw[gecos expire quota])) { .... } if (pw_has("name uid gid passwd")) { .... } print "Your struct pwd has: ", scalar pw_has(), "\n"; =head1 DESCRIPTION This module's default exports override the core getpwent(), getpwuid(), and getpwnam() functions, replacing them with versions that return C<User::pwent> objects. This object has methods that return the similarly named structure field name from the C's passwd structure from F<pwd.h>, stripped of their leading "pw_" parts, namely C<name>, C<passwd>, C<uid>, C<gid>, C<change>, C<age>, C<quota>, C<comment>, C<class>, C<gecos>, C<dir>, C<shell>, and C<expire>. The C<passwd>, C<gecos>, and C<shell> fields are tainted when running in taint mode. You may also import all the structure fields directly into your namespace as regular variables using the :FIELDS import tag. (Note that this still overrides your core functions.) Access these fields as variables named with a preceding C<pw_> in front their method names. Thus, C<< $passwd_obj->shell >> corresponds to $pw_shell if you import the fields. The getpw() function is a simple front-end that forwards a numeric argument to getpwuid() and the rest to getpwnam(). To access this functionality without the core overrides, pass the C<use> an empty import list, and then access function functions with their full qualified names. The built-ins are always still available via the C<CORE::> pseudo-package. =head2 System Specifics Perl believes that no machine ever has more than one of C<change>, C<age>, or C<quota> implemented, nor more than one of either C<comment> or C<class>. Some machines do not support C<expire>, C<gecos>, or allegedly, C<passwd>. You may call these methods no matter what machine you're on, but they return C<undef> if unimplemented. You may ask whether one of these was implemented on the system Perl was built on by asking the importable C<pw_has> function about them. This function returns true if all parameters are supported fields on the build platform, false if one or more were not, and raises an exception if you asked about a field that Perl never knows how to provide. Parameters may be in a space-separated string, or as separate arguments. If you pass no parameters, the function returns the list of C<struct pwd> fields supported by your build platform's C library, as a list in list context, or a space-separated string in scalar context. Note that just because your C library had a field doesn't necessarily mean that it's fully implemented on that system. Interpretation of the C<gecos> field varies between systems, but traditionally holds 4 comma-separated fields containing the user's full name, office location, work phone number, and home phone number. An C<&> in the gecos field should be replaced by the user's properly capitalized login C<name>. The C<shell> field, if blank, must be assumed to be F</bin/sh>. Perl does not do this for you. The C<passwd> is one-way hashed garble, not clear text, and may not be unhashed save by brute-force guessing. Secure systems use more a more secure hashing than DES. On systems supporting shadow password systems, Perl automatically returns the shadow password entry when called by a suitably empowered user, even if your underlying vendor-provided C library was too short-sighted to realize it should do this. See passwd(5) and getpwent(3) for details. =head1 NOTE While this class is currently implemented using the Class::Struct module to build a struct-like class, you shouldn't rely upon this. =head1 AUTHOR Tom Christiansen =head1 HISTORY =over 4 =item March 18th, 2000 Reworked internals to support better interface to dodgey fields than normal Perl function provides. Added pw_has() field. Improved documentation. =back
{ "pile_set_name": "Github" }
// This file is part of Eigen, a lightweight C++ template library // for linear algebra. // // Copyright (C) 2008-2009 Gael Guennebaud <[email protected]> // Copyright (C) 2006-2008 Benoit Jacob <[email protected]> // // This Source Code Form is subject to the terms of the Mozilla // Public License v. 2.0. If a copy of the MPL was not distributed // with this file, You can obtain one at http://mozilla.org/MPL/2.0/. #ifndef EIGEN_META_H #define EIGEN_META_H namespace Eigen { namespace internal { /** \internal * \file Meta.h * This file contains generic metaprogramming classes which are not specifically related to Eigen. * \note In case you wonder, yes we're aware that Boost already provides all these features, * we however don't want to add a dependency to Boost. */ struct true_type { enum { value = 1 }; }; struct false_type { enum { value = 0 }; }; template<bool Condition, typename Then, typename Else> struct conditional { typedef Then type; }; template<typename Then, typename Else> struct conditional <false, Then, Else> { typedef Else type; }; template<typename T, typename U> struct is_same { enum { value = 0 }; }; template<typename T> struct is_same<T,T> { enum { value = 1 }; }; template<typename T> struct remove_reference { typedef T type; }; template<typename T> struct remove_reference<T&> { typedef T type; }; template<typename T> struct remove_pointer { typedef T type; }; template<typename T> struct remove_pointer<T*> { typedef T type; }; template<typename T> struct remove_pointer<T*const> { typedef T type; }; template <class T> struct remove_const { typedef T type; }; template <class T> struct remove_const<const T> { typedef T type; }; template <class T> struct remove_const<const T[]> { typedef T type[]; }; template <class T, unsigned int Size> struct remove_const<const T[Size]> { typedef T type[Size]; }; template<typename T> struct remove_all { typedef T type; }; template<typename T> struct remove_all<const T> { typedef typename remove_all<T>::type type; }; template<typename T> struct remove_all<T const&> { typedef typename remove_all<T>::type type; }; template<typename T> struct remove_all<T&> { typedef typename remove_all<T>::type type; }; template<typename T> struct remove_all<T const*> { typedef typename remove_all<T>::type type; }; template<typename T> struct remove_all<T*> { typedef typename remove_all<T>::type type; }; template<typename T> struct is_arithmetic { enum { value = false }; }; template<> struct is_arithmetic<float> { enum { value = true }; }; template<> struct is_arithmetic<double> { enum { value = true }; }; template<> struct is_arithmetic<long double> { enum { value = true }; }; template<> struct is_arithmetic<bool> { enum { value = true }; }; template<> struct is_arithmetic<char> { enum { value = true }; }; template<> struct is_arithmetic<signed char> { enum { value = true }; }; template<> struct is_arithmetic<unsigned char> { enum { value = true }; }; template<> struct is_arithmetic<signed short> { enum { value = true }; }; template<> struct is_arithmetic<unsigned short>{ enum { value = true }; }; template<> struct is_arithmetic<signed int> { enum { value = true }; }; template<> struct is_arithmetic<unsigned int> { enum { value = true }; }; template<> struct is_arithmetic<signed long> { enum { value = true }; }; template<> struct is_arithmetic<unsigned long> { enum { value = true }; }; template <typename T> struct add_const { typedef const T type; }; template <typename T> struct add_const<T&> { typedef T& type; }; template <typename T> struct is_const { enum { value = 0 }; }; template <typename T> struct is_const<T const> { enum { value = 1 }; }; template<typename T> struct add_const_on_value_type { typedef const T type; }; template<typename T> struct add_const_on_value_type<T&> { typedef T const& type; }; template<typename T> struct add_const_on_value_type<T*> { typedef T const* type; }; template<typename T> struct add_const_on_value_type<T* const> { typedef T const* const type; }; template<typename T> struct add_const_on_value_type<T const* const> { typedef T const* const type; }; /** \internal Allows to enable/disable an overload * according to a compile time condition. */ template<bool Condition, typename T> struct enable_if; template<typename T> struct enable_if<true,T> { typedef T type; }; /** \internal * A base class do disable default copy ctor and copy assignement operator. */ class noncopyable { noncopyable(const noncopyable&); const noncopyable& operator=(const noncopyable&); protected: noncopyable() {} ~noncopyable() {} }; /** \internal * Convenient struct to get the result type of a unary or binary functor. * * It supports both the current STL mechanism (using the result_type member) as well as * upcoming next STL generation (using a templated result member). * If none of these members is provided, then the type of the first argument is returned. FIXME, that behavior is a pretty bad hack. */ template<typename T> struct result_of {}; struct has_none {int a[1];}; struct has_std_result_type {int a[2];}; struct has_tr1_result {int a[3];}; template<typename Func, typename ArgType, int SizeOf=sizeof(has_none)> struct unary_result_of_select {typedef ArgType type;}; template<typename Func, typename ArgType> struct unary_result_of_select<Func, ArgType, sizeof(has_std_result_type)> {typedef typename Func::result_type type;}; template<typename Func, typename ArgType> struct unary_result_of_select<Func, ArgType, sizeof(has_tr1_result)> {typedef typename Func::template result<Func(ArgType)>::type type;}; template<typename Func, typename ArgType> struct result_of<Func(ArgType)> { template<typename T> static has_std_result_type testFunctor(T const *, typename T::result_type const * = 0); template<typename T> static has_tr1_result testFunctor(T const *, typename T::template result<T(ArgType)>::type const * = 0); static has_none testFunctor(...); // note that the following indirection is needed for gcc-3.3 enum {FunctorType = sizeof(testFunctor(static_cast<Func*>(0)))}; typedef typename unary_result_of_select<Func, ArgType, FunctorType>::type type; }; template<typename Func, typename ArgType0, typename ArgType1, int SizeOf=sizeof(has_none)> struct binary_result_of_select {typedef ArgType0 type;}; template<typename Func, typename ArgType0, typename ArgType1> struct binary_result_of_select<Func, ArgType0, ArgType1, sizeof(has_std_result_type)> {typedef typename Func::result_type type;}; template<typename Func, typename ArgType0, typename ArgType1> struct binary_result_of_select<Func, ArgType0, ArgType1, sizeof(has_tr1_result)> {typedef typename Func::template result<Func(ArgType0,ArgType1)>::type type;}; template<typename Func, typename ArgType0, typename ArgType1> struct result_of<Func(ArgType0,ArgType1)> { template<typename T> static has_std_result_type testFunctor(T const *, typename T::result_type const * = 0); template<typename T> static has_tr1_result testFunctor(T const *, typename T::template result<T(ArgType0,ArgType1)>::type const * = 0); static has_none testFunctor(...); // note that the following indirection is needed for gcc-3.3 enum {FunctorType = sizeof(testFunctor(static_cast<Func*>(0)))}; typedef typename binary_result_of_select<Func, ArgType0, ArgType1, FunctorType>::type type; }; /** \internal In short, it computes int(sqrt(\a Y)) with \a Y an integer. * Usage example: \code meta_sqrt<1023>::ret \endcode */ template<int Y, int InfX = 0, int SupX = ((Y==1) ? 1 : Y/2), bool Done = ((SupX-InfX)<=1 ? true : ((SupX*SupX <= Y) && ((SupX+1)*(SupX+1) > Y))) > // use ?: instead of || just to shut up a stupid gcc 4.3 warning class meta_sqrt { enum { MidX = (InfX+SupX)/2, TakeInf = MidX*MidX > Y ? 1 : 0, NewInf = int(TakeInf) ? InfX : int(MidX), NewSup = int(TakeInf) ? int(MidX) : SupX }; public: enum { ret = meta_sqrt<Y,NewInf,NewSup>::ret }; }; template<int Y, int InfX, int SupX> class meta_sqrt<Y, InfX, SupX, true> { public: enum { ret = (SupX*SupX <= Y) ? SupX : InfX }; }; /** \internal determines whether the product of two numeric types is allowed and what the return type is */ template<typename T, typename U> struct scalar_product_traits { enum { Defined = 0 }; }; template<typename T> struct scalar_product_traits<T,T> { enum { // Cost = NumTraits<T>::MulCost, Defined = 1 }; typedef T ReturnType; }; template<typename T> struct scalar_product_traits<T,std::complex<T> > { enum { // Cost = 2*NumTraits<T>::MulCost, Defined = 1 }; typedef std::complex<T> ReturnType; }; template<typename T> struct scalar_product_traits<std::complex<T>, T> { enum { // Cost = 2*NumTraits<T>::MulCost, Defined = 1 }; typedef std::complex<T> ReturnType; }; // FIXME quick workaround around current limitation of result_of // template<typename Scalar, typename ArgType0, typename ArgType1> // struct result_of<scalar_product_op<Scalar>(ArgType0,ArgType1)> { // typedef typename scalar_product_traits<typename remove_all<ArgType0>::type, typename remove_all<ArgType1>::type>::ReturnType type; // }; template<typename T> struct is_diagonal { enum { ret = false }; }; template<typename T> struct is_diagonal<DiagonalBase<T> > { enum { ret = true }; }; template<typename T> struct is_diagonal<DiagonalWrapper<T> > { enum { ret = true }; }; template<typename T, int S> struct is_diagonal<DiagonalMatrix<T,S> > { enum { ret = true }; }; } // end namespace internal } // end namespace Eigen #endif // EIGEN_META_H
{ "pile_set_name": "Github" }
/** * Copyright (c) 2008 Andrew Wilson <[email protected]>. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA * * Created 10-Aug-2008 00:42:42 */ package pcgen.core.term; import pcgen.cdom.enumeration.ObjectKey; import pcgen.cdom.reference.CDOMSingleRef; import pcgen.core.PCClass; import pcgen.core.PCStat; import pcgen.core.PlayerCharacter; public class PCSPellBaseStatScoreEvaluatorTermEvaluator extends BasePCTermEvaluator implements TermEvaluator { private final String classKey; public PCSPellBaseStatScoreEvaluatorTermEvaluator(String originalText, String classKey) { this.originalText = originalText; this.classKey = classKey; } @Override public Float resolve(PlayerCharacter pc) { final PCClass aClass = pc.getClassKeyed(classKey); if (aClass == null) { return 0.0f; } CDOMSingleRef<PCStat> ss = aClass.get(ObjectKey.SPELL_STAT); if (ss == null) { return 10.0f; } return (float) pc.getDisplay().getStatModFor(ss.get()); } @Override public boolean isSourceDependant() { return true; } public boolean isStatic() { return false; } }
{ "pile_set_name": "Github" }
#!/usr/bin/env bash export VW_BIN=/opt/vw/vowpalwabbit TRAIN="cat ../data/train.csv" TEST="cat ../data/test.csv" JAVA_BIN="java -Xmx4g -cp ../display-ad-java/target/*:. com.sigaphi.kaggle.displayad" echo "import data into redis ..." $TRAIN | $JAVA_BIN.ToRedis $TEST | $JAVA_BIN.ToRedis echo "making vw input files ..." $TRAIN | $JAVA_BIN.FeaturesToVw | gzip > train.vw.gz $TEST | $JAVA_BIN.FeaturesToVw | gzip > test.vw.gz echo "training model 1 ..." python ../scripts/vw_run.py quad_11 3 6000000 python ../scripts/vw_run.py quad_13 3 100000 python ../scripts/vw_run.py quad_12 1 10000 mv prediction_test.txt prediction_test_1.txt echo "training model 2 ..." python ../scripts/vw_run.py poly_1 6 1 mv prediction_test.txt prediction_test_2.txt echo "training model 3 ..." python ../scripts/vw_run.py poly_2 6 10000 mv prediction_test.txt prediction_test_3.txt echo "training model 4 ..." python ../scripts/vw_run.py poly_3 6 100000 mv prediction_test.txt prediction_test_4.txt echo "making a submission file" cat <(echo "Id,p1,p2,p3,p4") <(paste -d"," <(zcat test.vw.gz | cut -f1 | cut -d"," -f1) prediction_test_1.txt prediction_test_2.txt prediction_test_3.txt prediction_test_4.txt) | python ../scripts/submit.py
{ "pile_set_name": "Github" }
*> \brief \b SLADIV * * =========== DOCUMENTATION =========== * * Online html documentation available at * http://www.netlib.org/lapack/explore-html/ * *> \htmlonly *> Download SLADIV + dependencies *> <a href="http://www.netlib.org/cgi-bin/netlibfiles.tgz?format=tgz&filename=/lapack/lapack_routine/sladiv.f"> *> [TGZ]</a> *> <a href="http://www.netlib.org/cgi-bin/netlibfiles.zip?format=zip&filename=/lapack/lapack_routine/sladiv.f"> *> [ZIP]</a> *> <a href="http://www.netlib.org/cgi-bin/netlibfiles.txt?format=txt&filename=/lapack/lapack_routine/sladiv.f"> *> [TXT]</a> *> \endhtmlonly * * Definition: * =========== * * SUBROUTINE SLADIV( A, B, C, D, P, Q ) * * .. Scalar Arguments .. * REAL A, B, C, D, P, Q * .. * * *> \par Purpose: * ============= *> *> \verbatim *> *> SLADIV performs complex division in real arithmetic *> *> a + i*b *> p + i*q = --------- *> c + i*d *> *> The algorithm is due to Robert L. Smith and can be found *> in D. Knuth, The art of Computer Programming, Vol.2, p.195 *> \endverbatim * * Arguments: * ========== * *> \param[in] A *> \verbatim *> A is REAL *> \endverbatim *> *> \param[in] B *> \verbatim *> B is REAL *> \endverbatim *> *> \param[in] C *> \verbatim *> C is REAL *> \endverbatim *> *> \param[in] D *> \verbatim *> D is REAL *> The scalars a, b, c, and d in the above expression. *> \endverbatim *> *> \param[out] P *> \verbatim *> P is REAL *> \endverbatim *> *> \param[out] Q *> \verbatim *> Q is REAL *> The scalars p and q in the above expression. *> \endverbatim * * Authors: * ======== * *> \author Univ. of Tennessee *> \author Univ. of California Berkeley *> \author Univ. of Colorado Denver *> \author NAG Ltd. * *> \date November 2011 * *> \ingroup auxOTHERauxiliary * * ===================================================================== SUBROUTINE SLADIV( A, B, C, D, P, Q ) * * -- LAPACK auxiliary routine (version 3.4.0) -- * -- LAPACK is a software package provided by Univ. of Tennessee, -- * -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- * November 2011 * * .. Scalar Arguments .. REAL A, B, C, D, P, Q * .. * * ===================================================================== * * .. Local Scalars .. REAL E, F * .. * .. Intrinsic Functions .. INTRINSIC ABS * .. * .. Executable Statements .. * IF( ABS( D ).LT.ABS( C ) ) THEN E = D / C F = C + D*E P = ( A+B*E ) / F Q = ( B-A*E ) / F ELSE E = C / D F = D + C*E P = ( B+A*E ) / F Q = ( -A+B*E ) / F END IF * RETURN * * End of SLADIV * END
{ "pile_set_name": "Github" }