content
stringlengths
10
4.9M
. BACKGROUND The use of new, high-priced therapies in intensive care medicine should be assessed by a questionnaire. METHODS Questionnaires were sent to 849 intensive care units in Germany. The use of three new strategies were asked: treatment of methicilline resistant staphylococcus aureus (MRSA) patients (using linezolid), of severe bleeding (using rFVIIa), and of severe sepsis (using activated protein C ). RESULTS Approximately 39 % of the questionnaires were answered and analyzed. All three new strategies were only rarely or very rarely used in Germany even in universities and hospitals with more than 1000 beds. This appears to be very astonishing because all substances were subject to extensive marketing campaigns, received scientific prices (linezolid) or were strongly recommended by some scientific societies (aPC). One major concern to the use of the new approaches was based on the high pricing. Prices were assessed as excessive or very excessive. In spite of a mass of information about the substances, a lot of the intensivists reviewed the scientific basis as weak and not justifying the use of the costly substances. CONCLUSIONS Modern, costly pharmaceutical approaches in intensive care medicine are widely not accepted in Germany. Especially a tight financial corset hinders most intensivists to use these strategies that may be life-saving in some patients. A solution to this problem is urgently required that can be reached only in intensive exchange with all who are responsible for this dilemma.
<filename>pkg/controllers/health.go package controllers import ( "fmt" "net/http" "github.com/cechaney/burrow/pkg/core" ) func healthHandler(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "text/html") w.WriteHeader(http.StatusOK) fmt.Fprintf(w, "OK") } //GetHealthController builds and returns the healthController func GetHealthController() core.Controller { controller := core.Controller{ Name: "health", Path: "/health", Handler: healthHandler, } return controller }
// ReplaceFailedProcessGroups flags failed processes groups for removal and returns an indicator // of whether any processes were thus flagged. func ReplaceFailedProcessGroups(log logr.Logger, cluster *fdbtypes.FoundationDBCluster, adminClient fdbadminclient.AdminClient) bool { logger := log.WithValues("namespace", cluster.Namespace, "cluster", cluster.Name, "reconciler", "replaceFailedProcessGroups") if !pointer.BoolDeref(cluster.Spec.AutomationOptions.Replacements.Enabled, false) { return false } maxReplacements := getMaxReplacements(cluster, cluster.GetMaxConcurrentAutomaticReplacements()) hasReplacement := false for _, processGroupStatus := range cluster.Status.ProcessGroups { if maxReplacements <= 0 { return hasReplacement } needsReplacement, missingTime := processGroupStatus.NeedsReplacement(*cluster.Spec.AutomationOptions.Replacements.FailureDetectionTimeSeconds) if needsReplacement && *cluster.Spec.AutomationOptions.Replacements.Enabled { if len(processGroupStatus.Addresses) == 0 { hasDesiredFaultTolerance, err := internal.HasDesiredFaultTolerance(adminClient, cluster) if err != nil { log.Error(err, "Could not fetch if cluster has desired fault tolerance") continue } if !hasDesiredFaultTolerance { log.Info( "Skip process group with missing address", "processGroupID", processGroupStatus.ProcessGroupID, "failureTime", time.Unix(missingTime, 0).UTC().String()) continue } processGroupStatus.ExclusionSkipped = true log.Info( "Replace process group with missing address", "processGroupID", processGroupStatus.ProcessGroupID, "failureTime", time.Unix(missingTime, 0).UTC().String()) } logger.Info("Replace process group", "processGroupID", processGroupStatus.ProcessGroupID, "reason", fmt.Sprintf("automatic replacement detected failure time: %s", time.Unix(missingTime, 0).UTC().String())) processGroupStatus.MarkForRemoval() hasReplacement = true maxReplacements-- } } return hasReplacement }
Update on the SKA offset optics design for the U.S. Technology Development Project The U.S. design concept for the Square Kilometre Array (SKA) program is based on utilizing a large number of small-diameter dish antennas in the 12 to 15 meter diameter range. 12The Technology Development Project (TDP) is planning to design and build the first of these antennas to provide a demonstration of the technology and a solid base on which to estimate costs. The latest considerations for selecting both the optics and feed design are presented.
/** * Adds a user provider. * * @param userProvider * the user provider to add */ @Reference( name = "userProviders", cardinality = ReferenceCardinality.AT_LEAST_ONE, policy = ReferencePolicy.DYNAMIC, unbind = "removeUserProvider" ) protected synchronized void addUserProvider(UserProvider userProvider) { logger.debug("Adding {} to the list of user providers", userProvider); if (InMemoryUserAndRoleProvider.PROVIDER_NAME.equals(userProvider.getName())) { userProviders.add(0, userProvider); } else { userProviders.add(userProvider); } }
<gh_stars>10-100 package server import ( "github.com/504dev/logr/config" . "github.com/504dev/logr/logger" "github.com/gin-contrib/static" "github.com/gin-gonic/gin" "io" "os" ) func ListenHTTP() error { gin.ForceConsoleColor() gin.DefaultWriter = io.MultiWriter(os.Stdout, GinWritter) // TODO react r := NewRouter() r.Use(static.Serve("/", static.LocalFile("./frontend/dist", false))) r.GET("/", frontend) r.GET("/demo", frontend) r.GET("/login", frontend) r.GET("/jwt/:token", frontend) r.GET("/dashboards", frontend) r.GET("/dashboard/*rest", frontend) return r.Run(config.Get().Bind.Http) } func frontend(c *gin.Context) { c.File("./frontend/dist/index.html") }
/// A custom widget containing all the fields necessary for configuring one boundary condition of a simulation class BCSelector : public Gtk::Frame { private: Gtk::Grid grid; Gtk::Label pressureTypeLabel; Gtk::ComboBoxText pressureTypeSelector; AnnotatedEntry pEntry; Gtk::Label velocityTypeLabel; Gtk::ComboBoxText velocityTypeSelector; AnnotatedEntry uxEntry; AnnotatedEntry uyEntry; BoundaryCond bc; VoidFunc inputChangeHandler; void onPressureTypeChange(); void onVelocityTypeChange(); void setEntryFields(); public: BCSelector(const str& atDescriptor, const VoidFunc& inputChangeHandler, StyleManager *styleManager); bool hasValidInput() const; BoundaryCond getBc() const; void setBc(const BoundaryCond& bc); }
def parse_data(content, raincontent, latitude=52.091579, longitude=5.119734, timeframe=60, usexml=False): if usexml: return parse_xml_data(content, raincontent, latitude, longitude, timeframe) else: return parse_json_data(content, raincontent, latitude, longitude, timeframe)
/* * JBoss, Home of Professional Open Source. * Copyright 2006, Red Hat Middleware LLC, and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.jboss.tm; import javax.transaction.Status; import javax.transaction.SystemException; import javax.transaction.Transaction; import javax.transaction.TransactionManager; import javax.transaction.UserTransaction; import javax.transaction.xa.XAResource; import org.jboss.util.NestedRuntimeException; /** * TxUtils.java has utility methods for determining transaction status * in various useful ways. * * @author <a href="mailto:<EMAIL>"><NAME></a> * @author <a href="mailto:<EMAIL>"><NAME></a> * @author <a href="mailto:<EMAIL>"><NAME></a> * @version $Revision: 63567 $ */ public class TxUtils { /** Transaction Status Strings */ private static final String[] TxStatusStrings = { "STATUS_ACTIVE", "STATUS_MARKED_ROLLBACK", "STATUS_PREPARED", "STATUS_COMMITTED", "STATUS_ROLLEDBACK", "STATUS_UNKNOWN", "STATUS_NO_TRANSACTION", "STATUS_PREPARING", "STATUS_COMMITTING", "STATUS_ROLLING_BACK" }; /** * Do now allow instances of this class */ private TxUtils() { } public static boolean isActive(Transaction tx) { if (tx == null) return false; try { int status = tx.getStatus(); return isActive(status); } catch (SystemException error) { throw new NestedRuntimeException(error); } } public static boolean isActive(TransactionManager tm) { try { return isActive(tm.getTransaction()); } catch (SystemException error) { throw new NestedRuntimeException(error); } } public static boolean isActive(UserTransaction ut) { try { int status = ut.getStatus(); return isActive(status); } catch (SystemException error) { throw new NestedRuntimeException(error); } } public static boolean isActive(int status) { return status == Status.STATUS_ACTIVE; } public static boolean isUncommitted(Transaction tx) { if (tx == null) return false; try { int status = tx.getStatus(); return isUncommitted(status); } catch (SystemException error) { throw new NestedRuntimeException(error); } } public static boolean isUncommitted(TransactionManager tm) { try { return isUncommitted(tm.getTransaction()); } catch (SystemException error) { throw new NestedRuntimeException(error); } } public static boolean isUncommitted(UserTransaction ut) { try { int status = ut.getStatus(); return isUncommitted(status); } catch (SystemException error) { throw new NestedRuntimeException(error); } } public static boolean isUncommitted(int status) { return status == Status.STATUS_ACTIVE || status == Status.STATUS_MARKED_ROLLBACK; } public static boolean isCompleted(Transaction tx) { if (tx == null) return true; try { int status = tx.getStatus(); return isCompleted(status); } catch (SystemException error) { throw new NestedRuntimeException(error); } } public static boolean isCompleted(TransactionManager tm) { try { return isCompleted(tm.getTransaction()); } catch (SystemException error) { throw new NestedRuntimeException(error); } } public static boolean isCompleted(UserTransaction ut) { try { int status = ut.getStatus(); return isCompleted(status); } catch (SystemException error) { throw new NestedRuntimeException(error); } } public static boolean isCompleted(int status) { return status == Status.STATUS_COMMITTED || status == Status.STATUS_ROLLEDBACK || status == Status.STATUS_NO_TRANSACTION; } public static boolean isRollback(Transaction tx) { if (tx == null) return false; try { int status = tx.getStatus(); return isRollback(status); } catch (SystemException error) { throw new NestedRuntimeException(error); } } public static boolean isRollback(TransactionManager tm) { try { return isRollback(tm.getTransaction()); } catch (SystemException error) { throw new NestedRuntimeException(error); } } public static boolean isRollback(UserTransaction ut) { try { int status = ut.getStatus(); return isRollback(status); } catch (SystemException error) { throw new NestedRuntimeException(error); } } public static boolean isRollback(int status) { return status == Status.STATUS_MARKED_ROLLBACK || status == Status.STATUS_ROLLING_BACK || status == Status.STATUS_ROLLEDBACK; } /** * Converts a tx Status index to a String * * @see javax.transaction.Status * * @param status the Status index * @return status as String or "STATUS_INVALID" */ public static String getStatusAsString(int status) { if (status >= Status.STATUS_ACTIVE && status <= Status.STATUS_ROLLING_BACK) { return TxStatusStrings[status]; } else { return "STATUS_INVALID"; } } /** * Converts a XAResource flag to a String * * @see javax.transaction.xa.XAResource * * @param flags the flags passed in to start(), end(), recover() * @return the flags in String form */ public static String getXAResourceFlagsAsString(int flags) { if (flags == XAResource.TMNOFLAGS) { return "|TMNOFLAGS"; } else { StringBuffer sbuf = new StringBuffer(64); if ((flags & XAResource.TMONEPHASE) != 0) { sbuf.append("|TMONEPHASE"); } if ((flags & XAResource.TMJOIN) != 0) { sbuf.append("|TMJOIN"); } if ((flags & XAResource.TMRESUME) != 0) { sbuf.append("|TMRESUME"); } if ((flags & XAResource.TMSUCCESS) != 0) { sbuf.append("|TMSUCCESS"); } if ((flags & XAResource.TMFAIL) != 0) { sbuf.append("|TMFAIL"); } if ((flags & XAResource.TMSUSPEND) != 0) { sbuf.append("|TMSUSPEND"); } if ((flags & XAResource.TMSTARTRSCAN) != 0) { sbuf.append("|TMSTARTRSCAN"); } if ((flags & XAResource.TMENDRSCAN) != 0) { sbuf.append("|TMENDRSCAN"); } return sbuf.toString(); } } }
/** * A convenience class to make constructor parameters less opaque. */ public static class Builder { private double openAtGoodRatio = 0.5; private double closeAtGoodRatio = 0.75; private int minSliceCount = 10; private int evalEveryNMillis = 100; private int resetAfterNMillis = 99; private int failAfterNBadResets = 0; public Builder setOpenAtGoodRatio(double value) { openAtGoodRatio = value; return this; } public Builder setCloseAtGoodRatio(double value) { closeAtGoodRatio = value; return this; } public Builder setMinSliceCount(int value) { minSliceCount = value; return this; } public Builder setEvalEveryNMillis(int value) { evalEveryNMillis = value; return this; } public Builder setResetAfterNMillis(int value) { resetAfterNMillis = value; return this; } public Builder setFailAfterNBadResets(int value) { failAfterNBadResets = value; return this; } public TimedRatioPolicy build() { return new TimedRatioPolicy(openAtGoodRatio, closeAtGoodRatio, minSliceCount, evalEveryNMillis, resetAfterNMillis, failAfterNBadResets); } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Copyright (c) 2013-2016, <NAME>. All rights reserved. */ #include "base/CCEventListenerKeyboard.h" #include "base/CCEventListenerMouse.h" #include "base/CCEventListenerTouch.h" #include "renderer/CCTextureCache.h" #include "2d/CCMenuItem.h" #include "2d/CCSprite.h" #include "2d/CCMenu.h" #include "core/XConfig.h" #include "core/CCSX.h" #include "XScene.h" #include "XLayer.h" NS_ALIAS(cx, fusii::ccsx) NS_BEGIN(fusii) ////////////////////////////////////////////////////////////////////////////// // bool XLayer::initEx(XScene *par, int zx) { if (c::Layer::init()) { par->addLayer(this, zx); decoPre(); decoUI(); decoPost(); return true; } else { return false; } } ////////////////////////////////////////////////////////////////////////////// // Remember the parent scene object // XScene* XLayer::getSceneX() { return static_cast<XScene*>( getParent()); } ////////////////////////////////////////////////////////////////////////////// // XLayer::XLayer() { bind(this); } ////////////////////////////////////////////////////////////////////////////// // bool XLayer::keyPoll(KEYCODE key) { int k= (int) key; return k >= 0 && k < 256 ? _keys[k] : false; } ////////////////////////////////////////////////////////////////////////////// // void XLayer::disableListeners() { //CCLOG("disabling event handlers"); try { setKeyboardEnabled(false); } catch (...) { } try { setMouseEnabled(false); } catch (...) { } try { setTouchEnabled(false); } catch (...) { } _keys.fill(false); } ////////////////////////////////////////////////////////////////////////////// // void XLayer::enableListeners() { disableListeners(); try { if (cx::isDesktop()) setKeyboardEnabled(true); } catch (...) { CCLOG("failed to init-keys"); } try { if (cx::isDesktop()) setMouseEnabled(true); } catch (...) { CCLOG("failed to init-mouse"); } try { if (!cx::isDesktop()) setTouchEnabled(true); } catch (...) { CCLOG("failed to init-touch"); } //CCLOG("enabled event handlers"); } ////////////////////////////////////////////////////////////////////////////// // void XLayer::onTouchesBegan(const VecTouches &ts, c::Event*) { if (_tMode == c::Touch::DispatchMode::ALL_AT_ONCE) { onTouchStart(ts); } else { onTouchStart(ts[0]); } } ////////////////////////////////////////////////////////////////////////////// // void XLayer::onTouchesMoved(const VecTouches &ts, c::Event*) { if (_tMode == c::Touch::DispatchMode::ALL_AT_ONCE) { onTouchMotion(ts); } else { onTouchMotion(ts[0]); } } ////////////////////////////////////////////////////////////////////////////// // void XLayer::onTouchesEnded(const VecTouches &ts, c::Event*) { if (_tMode == c::Touch::DispatchMode::ALL_AT_ONCE) { onTouchEnd(ts); } else { onTouchEnd(ts[0]); } } ////////////////////////////////////////////////////////////////////////////// // void XLayer::onMouseMotion(const CCT_PT &loc) { } ////////////////////////////////////////////////////////////////////////////// // bool XLayer::onTouchStart(const VecTouches &ts) { throw "you need to implement this!"; } ////////////////////////////////////////////////////////////////////////////// // bool XLayer::onTouchStart(c::Touch *tap) { return true; } ////////////////////////////////////////////////////////////////////////////// // void XLayer::onTouchEnd(const VecTouches &ts) { throw "you need to implement this!"; } ////////////////////////////////////////////////////////////////////////////// // void XLayer::onTouchEnd(c::Touch *tap) { } ////////////////////////////////////////////////////////////////////////////// // void XLayer::onTouchMotion(const VecTouches &ts) { throw "you need to implement this!"; } ////////////////////////////////////////////////////////////////////////////// // void XLayer::onTouchMotion(c::Touch *tap) { //auto bx= MGMS()->getEnclosureBox(); //auto loc= tap->getLocation(); //auto pos= cx::clamp(loc, bx); //c->setPos(pos.x, pos.y); } ////////////////////////////////////////////////////////////////////////////// // void XLayer::onKeyPressed(KEYCODE k, c::Event*) { int n= (int)k; if (n >= 0 && n < _keys.size()) { this->_keys[n]= true; } } ////////////////////////////////////////////////////////////////////////////// // void XLayer::onKeyReleased(KEYCODE k, c::Event*) { int n= (int)k; if (n >= 0 && n < _keys.size()) { this->_keys[n]=false; } } ////////////////////////////////////////////////////////////////////////////// // void XLayer::onMouseDown(c::Event *event) { if (_mouseDown) { return; } auto e= (c::EventMouse*)event; _mouseTarget=false; _mouseDown=true; if (e->getMouseButton() == _mouseBtn) { _mouseTarget=onMouseStart(e->getLocationInView()); } } ////////////////////////////////////////////////////////////////////////////// // void XLayer::onMouseUp(c::Event *event) { auto e= (c::EventMouse*)event; if (e->getMouseButton() == _mouseBtn) { onMouseClick(e->getLocationInView()); } _mouseTarget=false; _mouseDown=false; } ////////////////////////////////////////////////////////////////////////////// // bool XLayer::onMouseStart(const CCT_PT &loc) { return true; //CCLOG("mouse Down!"); } ////////////////////////////////////////////////////////////////////////////// // void XLayer::onMouseClick(const CCT_PT &loc) { //CCLOG("mouse Up!"); } ////////////////////////////////////////////////////////////////////////////// // void XLayer::onMouseMove(c::Event *event) { auto e= (c::EventMouse*)event; if (_mouseTarget && _mouseBtn == e->getMouseButton()) { onMouseMotion(e->getLocationInView()); } } ////////////////////////////////////////////////////////////////////////////// // void XLayer::onMouseScroll(c::Event*) { } ////////////////////////////////////////////////////////////////////////////// // void XLayer::setMouseEnabled(bool enabled) { if (_mouseEnabled != enabled) { _mouseEnabled = enabled; if (enabled) { if (N_NIL(_mouseListener)) { return; } auto n = c::EventListenerMouse::create(); _mouseListener = n; n->onMouseScroll = CC_CALLBACK_1(XLayer::onMouseScroll, this); n->onMouseMove = CC_CALLBACK_1(XLayer::onMouseMove, this); n->onMouseUp = CC_CALLBACK_1(XLayer::onMouseUp, this); n->onMouseDown = CC_CALLBACK_1(XLayer::onMouseDown, this); _eventDispatcher->addEventListenerWithSceneGraphPriority(n, this); } else { _eventDispatcher->removeEventListener(_mouseListener); S__NIL(_mouseListener) } } } NS_END
Bishop John Magee faced criticism over his handling of allegations of sex abuse in his diocese [PA Wire] Bishop John Magee faced criticism over his handling of allegations of sex abuse in his diocese [PA Wire] "As I depart, I want to offer once again my sincere apologies to any person who has been abused by any priest of the diocese of Cloyne during my time as bishop or at any time," he said. 'Decades of abuse' Magee, aged 73, had been a senior figure in the Vatican, previously serving as a private secretary to three successive Roman Catholic popes before being assigned to Cloyne. He had apologised when the report into clerical child abuse by National Board for Safeguarding Children was first published at the end of 2008, but refused to resign. Ken Murray, an Irish journalist, told Al Jazeera the image of the Catholic church in Ireland had "taken a bashing" over sex abuse allegations. "This is yet another bad day for the image of the Catholic church in this country," he said. Irish government-ordered investigations have documented decades of child abuse and cover-ups in the Catholic church. Three reports published between 2005 and 2009 have documented how thousands of Irish children suffered rape, molestation and other abuse by priests in their parishes and by nuns and brothers in boarding schools and orphanages. Irish bishops did not report a single case to police until 1996 after victims began to sue the church. On Saturday, the pope apologised for decades of abuse, but took no action against bishops blamed for cover-ups. Magee's resignation is the second from an Irish bishop to be accepted by Rome. Donal Murray, former bishop of Limerick, had his resignation accepted within 10 days when he offered to quit last December. He was criticised in an investigation into the Dublin Archdiocese over failures to report child abuse.
It's unsettled, unpredictable and wide open. This election campaign is not only a three-way race – most voters are still up for grabs. Only 40 per cent of Canadians have picked a party and say that's the only one they'll consider. Most voters, three-fifths of the electorate, are still considering voting for two or more parties, or aren't sure who they might pick, according to Nanos Research data from rolling surveys that provide an insight into potential swing votes. In fact, 20 per cent of those surveyed would still consider voting for any one of three or more parties. It's a sign that the relatively tight, nose-to-nose horse race masks a volatile electorate. Voters are still flirting with more than one option, and big swings are possible. Story continues below advertisement "Although people have leanings, they're open to changing their mind," said pollster Nik Nanos. "A campaign is a trial by fire for all the federal party leaders, and what this shows is how much damage a leader can do to his own campaign if he missteps." It's not the same for every party. After a decade in office, Mr. Harper's Conservatives have the most devoted group of supporters, who are less likely to see other parties as an option. In fact, even when asked to list a second choice, 31.5 per cent of Conservative voters say they have none – more than the backers of any other party. At its core, Mr. Nanos said, the Conservatives have "an almost unshakeable base" that has stuck with the party through episodes like the Mike Duffy scandal. But the Tories also have limited growth potential compared with the other two major parties: the pool of people who say they would consider voting Conservative is smaller. Both the New Democrats and the Liberals have less committed support – and there's a lot of crossover potential among supporters of the two parties. New Democrats tend to see the Liberals as their second choice, and vice versa. "There's a lot of cross-pollination between New Democrat and Liberal supporters, who are probably just seeing either of those parties as vehicles to try to stop Stephen Harper," Mr. Nanos said. The Nanos Research data also provides some potential clues as to why Mr. Harper's Conservatives focused their pre-election advertising on knocking back Liberal Leader Justin Trudeau, rather than taking aim at the new front-runner, the NDP's Thomas Mulcair. Story continues below advertisement Story continues below advertisement One reason is that weakening the NDP before the campaign would be more likely to help the Liberals than the Tories. Only 12 per cent of NDP supporters list the Conservatives as their second choice, while 40.6 per cent see the Liberals as the best alternative – so if voters leave Mr. Mulcair, the lion's share can be expected to turn to Mr. Trudeau. When the Liberals fall, however, the Conservatives can expect to do somewhat better, because 29.2 per cent of Liberal supporters rate the Tories second. That still leaves Mr. Harper with a tough challenge to win a majority government, however. He doesn't have that kind of level of support yet, Mr. Nanos noted, and in order to emerge as the clear winner, he needs both Mr. Mulcair and Mr. Trudeau to do poorly. Right now, besting one of them mostly helps the other. His Conservatives are rarely voters' second choice. For Mr. Mulcair, that's also the best path to victory: he clearly has to target Mr. Harper, the incumbent, but his party is most likely to gain if the Liberals lose support. More than 45 per cent who favour the Liberals see the NDP as second choice, and only 29 per cent would choose the Conservatives. By contrast, Mr. Trudeau's Liberals are the most popular second choice, and can gain from either the left or right – so if the Tories decline, the Liberals are likely to gain. The Nanos data, based on rolling surveys of 1,000 Canadians conducted between June 28 and July 25, is something of a flip side of standard horse-race poll numbers, which show the party the respondents intend to vote for, plus a number of undecided voters. Respondents were asked both who they would consider voting for, and to rank their choices. It shows most voters are still open to a second choice – and will track how voters' choices solidify during the campaign. There is also another 6.4 per cent who say they aren't considering any party, or are unsure how to answer – usually those who are confused or uninterested in politics. "These are the people who are very unlikely to vote," Mr. Nanos said.
from mongoframes import * from __init__ import build_test_data, connect_to_db, time_it # Define models class Company(Frame): _fields = { 'name', 'departments', 'address', 'tel', 'website_url' } class Department(SubFrame): _fields = { 'name', 'year_end', 'annual_budget' } class Employee(Frame): _fields = { 'first_name', 'last_name', 'dob', 'role', 'tel', 'email', 'annual_salary', 'ssn', 'company', 'department' } # Define tests def test_flat_select(): """Select all employees no dereferencing""" Employee.many() def test_embedded_document_select(): """Select all companies no dereferencing""" Company.many(projection={'departments': {'$sub': Department}}) def test_full_select(): """Select all employees and their referenced companies""" Employee.many(projection={ 'company': { '$ref': Company, 'departments': {'$sub': Department} } }) if __name__ == "__main__": # Connect to the database connect_to_db() # Build the test data build_test_data() # Run the tests #time_it(test_flat_select, 100) #time_it(test_embedded_document_select, 100) time_it(test_full_select, 100)
/** * Send resume signal request to a given worker * * @param workerId id of the worker to send the resume signal to */ private void sendResumeSignal(int workerId) { if (maxOpenRequestsPerWorker == 0) { LOG.warn("sendResumeSignal: method called while the max open requests " + "for worker " + workerId + " is still 0"); return; } WritableRequest request = new SendResumeRequest(maxOpenRequestsPerWorker); Long resumeId = nettyClient.doSend(workerId, request); checkState(resumeId != null); if (LOG.isDebugEnabled()) { LOG.debug("sendResumeSignal: sending signal to worker " + workerId + " with credit=" + maxOpenRequestsPerWorker + ", ID=" + (resumeId & 0xFFFF)); } resumeRequestsId.get(workerId).add(resumeId); }
/** * Saves emails and notifies components so they can refresh their views with new data. * * @author Nilhcem * @since 1.0 */ public final class MailSaver extends Observable { private static final Logger LOGGER = LoggerFactory.getLogger(MailSaver.class); private static final String LINE_SEPARATOR = System.getProperty("line.separator"); // This can be a static variable since it is Thread Safe private static final Pattern SUBJECT_PATTERN = Pattern.compile("^Subject: (.*)$"); private final SimpleDateFormat dateFormat = new SimpleDateFormat("ddMMyyhhmmssSSS"); /** * Saves incoming email in file system and notifies observers. * * @param from the user who send the email. * @param to the recipient of the email. * @param data an InputStream object containing the email. * @see com.nilhcem.fakesmtp.gui.MainPanel#addObservers to see which observers will be notified */ public void saveEmailAndNotify(String from, String to, InputStream data) { List<String> relayDomains = UIModel.INSTANCE.getRelayDomains(); if (relayDomains != null) { boolean matches = false; for (String domain : relayDomains) { if (to.endsWith(domain)) { matches = true; break; } } if (!matches) { LOGGER.debug("Destination {} doesn't match relay domains", to); return; } } // We move everything that we can move outside the synchronized block to limit the impact EmailModel model = new EmailModel(); model.setFrom(from); model.setTo(to); String mailContent = convertStreamToString(data); model.setSubject(getSubjectFromStr(mailContent)); model.setEmailStr(mailContent); synchronized (getLock()) { String filePath = saveEmailToFile(mailContent); model.setReceivedDate(new Date()); model.setFilePath(filePath); setChanged(); notifyObservers(model); } } /** * Deletes all received emails from file system. */ public void deleteEmails() { Map<Integer, String> mails = UIModel.INSTANCE.getListMailsMap(); if (ArgsHandler.INSTANCE.memoryModeEnabled()) { return; } for (String value : mails.values()) { File file = new File(value); if (file.exists()) { try { if (!file.delete()) { LOGGER.error("Impossible to delete file {}", value); } } catch (SecurityException e) { LOGGER.error("", e); } } } } /** * Returns a lock object. * <p> * This lock will be used to make the application thread-safe, and * avoid receiving and deleting emails in the same time. * </p> * * @return a lock object <i>(which is actually the current instance of the {@code MailSaver} object)</i>. */ public Object getLock() { return this; } /** * Converts an {@code InputStream} into a {@code String} object. * <p> * The method will not copy the first 4 lines of the input stream.<br> * These 4 lines are SubEtha SMTP additional information. * </p> * * @param is the InputStream to be converted. * @return the converted string object, containing data from the InputStream passed in parameters. */ private String convertStreamToString(InputStream is) { final long lineNbToStartCopy = 0; // Do not copy the first 4 lines (received part) BufferedReader reader = new BufferedReader(new InputStreamReader(is, Charset.forName(I18n.UTF8))); StringBuilder sb = new StringBuilder(); String line; long lineNb = 0; try { while ((line = reader.readLine()) != null) { if (++lineNb > lineNbToStartCopy) { sb.append(line).append(LINE_SEPARATOR); } } } catch (IOException e) { LOGGER.error("", e); } return sb.toString(); } /** * Saves the content of the email passed in parameters in a file. * * @param mailContent the content of the email to be saved. * @return the path of the created file. */ private String saveEmailToFile(String mailContent) { if (ArgsHandler.INSTANCE.memoryModeEnabled()) { return null; } String filePath = String.format("%s%s%s", UIModel.INSTANCE.getSavePath(), File.separator, dateFormat.format(new Date())); // Create file int i = 0; File file = null; while (file == null || file.exists()) { String iStr; if (i++ > 0) { iStr = Integer.toString(i); } else { iStr = ""; } file = new File(filePath + iStr + Configuration.INSTANCE.get("emails.suffix")); } // Copy String to file try { FileUtils.writeStringToFile(file, mailContent); } catch (IOException e) { // If we can't save file, we display the error in the SMTP logs Logger smtpLogger = LoggerFactory.getLogger(org.subethamail.smtp.server.Session.class); smtpLogger.error("Error: Can't save email: {}", e.getMessage()); } return file.getAbsolutePath(); } /** * Gets the subject from the email data passed in parameters. * * @param data a string representing the email content. * @return the subject of the email, or an empty subject if not found. */ private String getSubjectFromStr(String data) { try { BufferedReader reader = new BufferedReader(new StringReader(data)); String line; while ((line = reader.readLine()) != null) { Matcher matcher = SUBJECT_PATTERN.matcher(line); if (matcher.matches()) { return matcher.group(1); } } } catch (IOException e) { LOGGER.error("", e); } return ""; } }
Dosage reduction and discontinuation of biological disease-modifying antirheumatic drugs in patients with rheumatoid arthritis, psoriatic arthritis and axial spondyloarthritis: protocol for a pragmatic, randomised controlled trial (the BIOlogical Dose OPTimisation (BIODOPT) trial) Introduction The The BIOlogical Dose OPTimisation (BIODOPT) trial is a pragmatic, multicentre, randomised controlled, open-label, parallel-group, equivalence study designed to evaluate tapering of biological disease-modifying antirheumatic drugs (bDMARDs) in patients with rheumatoid arthritis (RA), psoriatic arthritis (PsA) and axial spondyloarthritis (axSpA) in sustained clinical remission or low disease activity (LDA). Traditionally, these patients maintain standard dosage of bDMARD lifelong; however, recent studies indicate that a significant proportion of patients in sustained remission or LDA can taper their bDMARD and maintain stable disease activity. Thus, this trial aims to evaluate whether a disease activity-guided tapering strategy for bDMARDs will enable a significant dosage reduction while maintaining disease activity compared with usual care. From the individual patient’s standpoint as well as from a societal perspective, it would be advantageous if bDMARDs could be reduced or even discontinued while maintaining disease activity. Methods and analysis A total of 180 patients with RA, PsA or axSpA treated with bDMARDs and in clinical remission/LDA during the past 12 months will be enrolled from four centres in Denmark. Patients will be randomised in a ratio of 2:1 to either disease activity-guided tapering of bDMARDs (intervention group) or continuation of bDMARDs as usual care (control group). The primary objective is the difference between the two groups in the proportion of patients who have reduced their inclusion dosage of bDMARDs to 50% or less while maintaining stable disease activity at 18 months follow-up. Ethics and dissemination The study is approved by the ethics committee of Northern Jutland, Denmark (N-20170073) and by the Danish Medicine Agency. Patient research partner KHH contributed to refinement of the protocol and approved the final manuscript. Results will be disseminated through publication in international peer-reviewed journals. Trial registration number 2017-001970-41; Pre-results. Hypothesis: the central issue with tapering is to find the best way to assess what the minimal effective dose is in a patient. As this is done open label, it can be biased by nocebo and attribution effects. Disease activity guided tapering indeed underperforms, so many patients don't taper optimally. For example, in blinded studies half dosed etanercept is equal to full dose, but in DRESS 40% of patient could not dose reduce at all. I think that using a more patient centered approach might aggravate this problem. So how do you think to counter this increase nocebo and attribution effects? In fact, reading your intervention and flare criteria, I don't think you really use a more patient centered approach than usual DAS28 /ASDAS bases treat to target, with usual SDM. When the research question (which is not entirely spelled out) is whether patient centered tapering is better than protocol guided, then a DRESS/STRASS like comparator group is preferably above usual care. Primary outcome: this is I think somewhat ambiguously stated in the abstract. You state that you are comparing % of patients successfully tapered (< 50% of DDD) and being in remission, however, it seems you use a continuous outcome of disease activity. The power for superiority in % <50% DDD is obviously very high, because this is an open door. In patients who are being tapered more will use a lower dose than patients who are not tapered. You do not fully explain the 0.5 NI margin, but I think its fine. Being in remission is a suboptimal performing flare criterion, e.g. it is much to specific. When you include patients in deep remission, regression to the mean will result in many patient not being in deep remission anymore. See van der Maas 2013 OMERACT flare criteria paper. You analayse the mean in DAS28/ASDAS change, so this might not be an issue, but when talking about patients in remission it is. Patients: for RA patients this work has been done in the last year, see recent systematic reviews on the subject. Doing this for PsA and axSpA is important, because evidence is limited there. Consider focusing on those diseases Remission as inclusion, especially this deep remission is troublesome. Firstly, those patients are rare, many patients are in LDA or normal remission, and treatment is not intensified. So a tapering study should included preferably all patient who have reached their treatment goal, and more often than not, it is not deep remission. Also, patients themselves identify the patient acceptable symptom state as being around DAS28 of between 3.0 and 3.5. Finally, a somewhat higher disease activity @ baseline is not a predictor (Effectmodifier) for successful tapering (tweehuijsen et al A &R 2017). So, there is no scientific or clinical reason not to include LDA patients, and it limits generalisability. Introduction: -please include a review on bDMARD dose reduction, for example the Cochrane, Verhoef et al 2017, or the ARD Fautrel paper 2017. You fr example do not mention the STRASS Fautrel study did you check trial registers on recruiting PsA ax SpA bDMARD tapering studies? There are I think two of them, one being DRESS PS in our center (Dutch trial register NTR). -line 58: This is a misconception I think. Many patient cannot stop directly, but this does not mean these patients could stop when they were slowly tapered. There are no data to suggest that, as successful stopping seems more rare even in tapering than in stopping trials. The benefit of tapering is to identify a third group, patient who can be tapered but not stopped. See also the Fautrel den Broeder paper 2015 best practices, -page 5 line 5: see my earlier comments. Patient guided tapering might motivate people better, but for sure it will lead to more expectation bias, nocebo and attribution. Please discuss these potential drawbacks. Of note, also in DRESS and STRASS tapering to the next step was Shared decision making between patients and physicians. Methods: -you use a validated OMERACT Das28 flare criterion to assess flare and increase treatment, So, how is this different from the DRESS and STRASS studies? I don't see any more patient centeredness than in those studies -For PsA , DAS28 is a difficult to interpret measure. It misses out on skin, enthesis and axial complaints. Do you have any data on DAS28 based flare criteria in PsA? -Also, in PsA and axSpA -more so than in RA -extraarticular other associated disease might activate, like uveitis, psoriasis, IBD. Please add a strategy to cope with such an event. -Comparator strategy: in a pragmatic study, ofcourse also in the UC group tapering can happen. However, with a NI or equivalence study, the main analyses are done per protocol, because in contrast to superiority studies, this is the more conservative approach. Do you have a per protocol analyses plan, because you state ITT as primary analysis? CONSORT now I think has reporting guidelines extension for NI and equivalence studies, please check these. -You use three strata: in the analyses it is state of art to also correct for those strata, and preferably to show equivalence per stratum, at least drug and disease. Is this contemplated? -Multilevel analyses are fine, although you did not use a ML sample size calculation (I think for 2 levels they exist, not for n levels.) But the assumptions seem fine Minor comments: - The word novel (title) is overrated and used too often, suggest changing it to 'new' - The intervention is a combination of dose reduction and patient participation. It is not clear what % is expected to be due to patient education/participation and what due to the dose reduction. Since these are two interventions in one arm it should have been taken into account in the power calculation. Another way is to educate also the usual care patients. • The sample size was calculated based the expected results form previous studies in RA. In your study however, you include also PSA and axSPA, both indications that have shown to get flares after discontinuation bDMARDs. Therefor the expected % used in your power calculation is an overestimation that results in a smaller needed sample size than it should be. In my opinion this should be taken into account in the sample size calculation. Recalculated is advised. • Another misinterpretation as you mentioned is using the DAS as flare outcome for PSA. However, especially these patients might stop dose reduction because of skin activity even when the joints are not inflamed. Please leave your comments for the authors below This study by Uhrenholt et al will compare biologic dose reduction strategy with disease activity and patient report of symptoms considered and usual care. This will be interesting, but it needs some clarification. R1, Q1: How will patient report of symptoms in the intervention group be considered by their physicians and what "usual care" in the control group will be like should be explained more in detail. Otherwise, the difference between the groups is unclear. R1, A1: Thank you for bringing to our attention that the process of patient reporting of symptoms of arthritis flare in the intervention group compared to the control group needed clarification. Action item: the following item is inserted under the section "Intervention (BIODOPT) group" on page 8 of the revised manuscript: "The BIODOPT tapering algorithm is patient-centered with focus on the patient's own perception of arthritis activity during tapering; thereby, making the patient an equal partner in their disease management as expected and required by modern society. Compared to a strict tapering algorithm focusing only on e.g. DAS28crp or joint swelling, the patient-centered BIODOPT algorithm is expected to enhance patient motivation during tapering; thereby, possibly increasing compliance. If the patient has symptoms of flare due to tapering but the arthritis is in remission assessed by the physician, the patient is generally advised to continue tapering according to the BIODOPT algorithm but may remain at the current dose (or even go back one step in the algorithm) after agreement between the patient and the physician." Action item: the following item is inserted under the section "Comparator (Control) group" on page 9 of the revised manuscript: "In contrast to the intervention group, the small portion of patients tapering bDMARDs in the control group will not follow a patient-centered algorithm." R1, Q2: Sample size calculation is a bit of concern. Should it be done in each disease separately? The authors referred to the results of the DRESS study in the calculation, but it was targeting rheumatoid arthritis. It is not clear that the results can be applied to PsA and AS. Is it all right that drop-out was not taken into consideration? R1, A2: Thank you for raising this concern as we have discussed the same aspect during the initial phase when designing the trial protocol. The sample size calculation for primary endpoint 1A was inspired from the RA-DRESS trial as it is one of the very few randomised, non-inferiority/equivalence trials exploring a disease activity guided tapering algorithm among patients with inflammatory arthritis. Based on the available literature (no randomised non-inferiority/equivalence study exploring a disease activity guided tapering algorithm is to our knowledge done in PsA or axSpA) it seems reasonable to assume that the percentage of patients with PsA or axSpA meeting primary endpoint 1A will be the same as for RA, which is now described in the revised introduction. In contrast to the DRESS study, the BIODOPT trial has two primary endpoints (1A and 1B) and both endpoints must be met i.e. proving a statistically significant reduction in biologics while maintaining an equivalent disease state. Drop-out considerations is now described in the paper. Action item: the following item is inserted under the section "Introduction" on page 4 of the revised manuscript: "However, a non-inferiority trial including AS in remission on adalimumab, etanercept, infliximab or golimumab, which was stopped prematurely due to funding problems, found that prolonging the dosing interval of anti-TNF by 25% was non-inferior to full dosage anti-TNF as LDA was maintained in 81.3% of patients in the tapered group and 83.8% of patient in the full dose group. (22) In addition, Cantini et al. showed in a RCT study that 90.4% of patients with AS on full dose etanercept and 86.3% of patients with AS on half dose etanercept was still in remission after a mean follow up of 21-22 months. (32) Furthermore, prospective observational studies in AS and axSpA have proven that a large proportion of patient maintain remission/LDA after tapering of bDMARDs. (33,34) In PsA, a prospective observation study has shown that 72% of patients treated with 25 mg etanercept maintained remission 1 year after a progressive dosage reduction with 21% receiving weekly dosage and 51% receiving a dosage every-other-week. (27) Additionally, in a case-control study including patients with PsA and RA in remission on adalimumab, the proportion of patients maintaining remission was statistically significant higher among patients with PsA (88.6%) than RA (17.6%) after 50% dosage reduction. (28)" Action item: the following item is inserted under the section "Sample size and power considerations" on page 11-12 of the revised manuscript: "This assumption is inspired from the DRESS-RA trial, which it is one of the very few randomised, non-inferiority/equivalence trials exploring a disease activity guided tapering algorithm among patients with inflammatory arthritis. (30) No randomised, non-inferiority/equivalence study exploring a disease activity guided tapering algorithm is to our knowledge done in PsA or axSpA, but based on the available literature it seems reasonable to assume that the percentage of patients with PsA or axSpA meeting primary endpoint 1A will be the same as for RA (22,27,28,32). Thus, the sample size calculation was not done for each disease separately." "Drop-out considerations is based on primary endpoint 1B, as this is the endpoint with the lowest power (0.868). In a two one-sided test analysis for additive equivalence of two-sample normal means with bounds -0.5 and 0.5 for the mean difference and a significance level of 0.05, assuming a mean difference of 0 and a common standard deviation of 1, a total sample size of 156 assuming an allocation ratio of 2 to 1 is required to obtain a power of at least 0.8 (power = 0.802). Thus, 24 dropouts (180-156) are allowed corresponding to 13%." R1, Q3: The remission criteria for PsA is based on DAPSA, but the monitoring and flare definition is based on DAS28-CRP. It is inconsistent. R1, A3: Thank you for this comment. DAPSA was chosen as inclusion (remission) criteria for PsA as it is a stringent index that contains 66/68 joint count and is one of the suggested efficacy scores by EULAR. With the stringent DAPSA remission criteria we believe that only patients in true remission is included which may not be the case if the DAS28crp was used as that index only contain 28 joint count. The (RA validated) DAS28 flare criteria was chosen for PsA as there currently are no flare criteria for PsA; however, we acknowledge that it would be desirably to monitor the patients with PsA using a PsA validated criteria e.g. DAPSA if this was possible. Action item: the following item is inserted under the section "Discussion" on page 16 of the revised manuscript: "However, we acknowledge that it would be desirably to monitor patients with PsA using a PsA validated flare criteria e.g. a DAPSA based flare criteria as DAPSA is used as remission criteria for enrolment in this trial. Nevertheless, DAPSA will be calculated for patients with PsA for each trial visit for further sub analysis." R1, Q4: Introduction section should be more refined. There are many studies investigating the possibility of bDMARDs tapering. R1, A4: Thank you for making it clear that the introduction needed refinement. The introduction is now revised for clarity including description of several tapering trials. Thus, the STRASS study is now included in the introduction as suggested by reviewer 2 together with two additional reviews on tapering of bDMARD in the reference list. Action item: the following references is inserted in the section "Introduction" of the revised manuscript: Please leave your comments for the authors below This protocol describes a strategy study on dose tapering of bDMARD, using a more patient centered approach. Some designs features are very nice, but I have some comments regarding what I think to be some important issues in design of the study. General issues R2, Q1: Hypothesis: the central issue with tapering is to find the best way to assess what the minimal effective dose is in a patient. As this is done open label, it can be biased by nocebo and attribution effects. Disease activity guided tapering indeed underperforms, so many patients don't taper optimally. For example, in blinded studies half dosed etanercept is equal to full dose, but in DRESS 40% of patient could not dose reduce at all. I think that using a more patient centered approach might aggravate this problem. So how do you think to counter this increase nocebo and attribution effects? In fact, reading your intervention and flare criteria, I don't think you really use a more patient centered approach than usual DAS28 /ASDAS bases treat to target, with usual SDM. R2, A2: Thank you for this comment. In the initial phase of development of this trial, we examined different possibilities for blinding (e.g. sham spacing device); however, we did not find a feasible solution. Consequently, the trial became an open-label trial with no blinding of patients or research personnel to the intervention. We acknowledge, that open-label tapering studies incl. this trial can be biased by e.g. nocebo and/or attribution effects resulting in fewer patients being able to taper their bDMARD when compared with blinded studies as described by reviewer 2. However, compared to a strict tapering algorithm focusing only on e.g. DAS28crp or joint swelling (no patient involvement/shared decision making during tapering), the patient-centered BIODOPT tapering algorithm is expected to enhance patient motivation during tapering; thereby, possibly decreasing a nocebo effect. For clarity, the intervention incl. the patient-centered algorithm has now been described in greater detail in the section "Intervention (BIODOPT) group". Action item: the following item is inserted under the section "Intervention (BIODOPT) group" on page 8 of the revised manuscript: "The BIODOPT tapering algorithm is patient-centered with focus on the patient's own perception of arthritis activity during tapering; thereby, making the patient an equal partner in their disease management as expected and required by modern society. Compared to a strict tapering algorithm focusing only on e.g. DAS28crp or joint swelling, the patient-centered BIODOPT algorithm is expected to enhance patient motivation during tapering; thereby, possibly increasing compliance. If the patient has symptoms of flare due to tapering but the arthritis is in remission assessed by the physician, the patient is generally advised to continue tapering according to the BIODOPT algorithm but may remain at the current dose (or even go back one step in the algorithm) after agreement between the patient and the physician." Action item: the following item is inserted under the section "Discussion" on page 15 of the revised manuscript: "The BIODOPT trial evaluates a patient-centered tapering algorithm as it seems plausible that taking the patient's own assessment of arthritis activity into consideration during tapering will result in increased motivation and better adherence to the algorithm; thereby, possibly resulting in a higher rate of successful dosage reduction compared to previous studies with a stringent tapering algorithm without patient-involvement. Furthermore, a patient-centered tapering algorithm might minimise the risk of major flare as previous research in RA has shown that patients with flare have a significant higher score of several PROMs compared to patients without flare. (70)" "However, a limitation is that the trial personnel and the patients are not blinded to the intervention groups; thus, this could potentially lead to bias e.g. nocebo, expectation and/or attribution bias which would affect interpretation of the trial results." R2, Q3: When the research question (which is not entirely spelled out) is whether patient centered tapering is better than protocol guided, then a DRESS/STRASS like comparator group is preferably above usual care. R2, A3: Thank you for making it clear, that the research question needed to be clarified. The research question is whether a patient-centered tapering algorithm is better than usual care practise; thus, not if it is better than a protocol guided tapering algorithm with no patient-involvement, which is why a DRESS/STRASS like comparator group was not chosen as the control group. Action item: the following item is inserted/revised under the section "Introduction" on page 4 of the revised manuscript: "We hypothesise, that a patient-centered tapering algorithm for bDMARDs will reduce dosage of biologics while disease activity remains stable." "Thus, the aim of this study is to evaluate whether a patient-centered tapering strategy for bDMARDs will enable a significant dosage reduction while maintaining disease activity assessed 18 months from baseline compared with usual care." R2, Q4: Primary outcome: this is I think somewhat ambiguously stated in the abstract. You state that you are comparing % of patients successfully tapered (< 50% of DDD) and being in remission, however, it seems you use a continuous outcome of disease activity. R2, A4: Thank you for bringing to our attention that the primary objective needed to be written more precisely in the abstract and this section is now revised. Action item: the following item is revised under the section "Abstract" on page 2 of the revised manuscript: "The primary objective is the difference between the two groups in the proportion of patients who have reduced their inclusion dose of bDMARDs to 50% or less, while maintaining stable disease activity at 18 months follow-up." R2, Q5: The power for superiority in % <50% DDD is obviously very high, because this is an open door. In patients who are being tapered more will use a lower dose than patients who are not tapered. R2, A5: We agree that the power for superiority of 0.992 appears extremely high; however, the sample size was determined from the power of the co-primary endpoint with the least statistical power i.e. primary endpoint 1B. Action item: the following item is revised under the section "Sample size and power considerations" on page 12 of the revised manuscript: "The sample size was determined from the power of the co-primary endpoint with the least statistical power i.e. primary endpoint 1B which is why the power for superiority of 0.992 appears extremely high." R2, Q6: You do not fully explain the 0.5 NI margin, but I think its fine. R2, A6: Thank you for agreeing that the predefined margin of equivalence at ± 0.5 points is explained sufficiently even though it is not described in detail. Thus, as stated in the paper the margin was determined based on "less than half of the effect" that would be considered a clinically relevant reduction in DAS28crp level (∆DAS28crp > 1.2) or ASDAS level (∆ASDAS > 1.1) corresponding to a clinically unimportant change in arthritis disease activity. R2, Q7: Being in remission is a suboptimal performing flare criterion, e.g. it is much too specific. When you include patients in deep remission, regression to the mean will result in many patient not being in deep remission anymore. See van der Maas 2013 OMERACT flare criteria paper. You analayse the mean in DAS28/ASDAS change, so this might not be an issue, but when talking about patients in remission it is. R2, A7: Thank you for bringing to our attention that the primary objective was not described clearly in the paper. The primary objective are to evaluate whether a patient-centered tapering strategy for bDMARDs will enable a significant dosage reduction while maintaining disease activity assessed 18 months from baseline compared with usual care. Thus, we analyse the mean change in Das28crp/ASDAS and not if the patients maintain remission or not. Action item: the following item is revised under the section "Primary outcome" on page 11 of the revised manuscript: "The primary objective are to evaluate whether a patient-centered tapering strategy for bDMARDs will enable a significant dosage reduction while maintaining disease activity assessed 18 months from baseline compared with usual care. Thus, there are two primary efficacy endpoints: 1A Superiority: The proportion of patients who at 18 months are reduced to 50% or less of their inclusion dose of bDMARD. 1B Equivalence: Disease activity assessed 18 months from baseline. The primary objective is met if a statistically significant reduction in biologics is demonstrated while maintaining an equivalent disease state." R2, Q8: Patients: for RA patients this work has been done in the last year, see recent systematic reviews on the subject. Doing this for PsA and axSpA is important, because evidence is limited there. Consider focusing on those diseases. R2, A8: Thank you for this comment. We acknowledge, that randomised tapering studies have been performed in patients with RA during the last years; however, this RCT tapering trial include seven different bDMARDs, which to our knowledge has not been examined previously in RA, nor PsA or axSpA. Action item: the following item is revised under the section "Discussion" on page 15 of the revised manuscript: "The BIODOPT trial is to our knowledge the first randomised, equivalence trial exploring a disease activity guided tapering algorithm of 7 different bDMARDs including biosimilars among patients with inflammatory arthritis." R2, Q9: Remission as inclusion, especially this deep remission is troublesome. Firstly, those patients are rare, many patients are in LDA or normal remission, and treatment is not intensified. So a tapering study should included preferably all patient who have reached their treatment goal, and more often than not, it is not deep remission. Also, patients themselves identify the patient acceptable symptom state as being around DAS28 of between 3.0 and 3.5. Finally, a somewhat higher disease activity @ baseline is not a predictor (Effectmodifier) for successful tapering (tweehuijsen et al A &R 2017). So, there is no scientific or clinical reason not to include LDA patients, and it limits generalisability. R2, A9: Thank you for this comment. Previously, it have been discussed if disease activity at baseline could be an effect modification for successful tapering as results from different studies were conflicting. We chose sustained remission without swollen joints and with no steroid use within the last 12 months as an inclusion criteria to ensure that the included patients was in deep (true) remission before starting tapering of bDMARDs. However, as reviewer 2 points out, Tweehuysen et al. did not find evidence that disease activity at baseline is an effect modification for successful tapering. Consequently, we will strongly consider inclusion of patients in LDA with no swollen joints to improve the generalisability of the trial results. Action item: the following item is revised under the section "Discussion" on page 16 of the revised manuscript: "In existing studies, it have been discussed if disease activity at baseline could be an effect modification for successful tapering but the results were conflicting. (71) However, in a recent systematic review by Tweehuysen et al. it was concluded, that disease activity at baseline, i.e. remission or LDA, was not an effect modification for successful tapering of bDMARDs. (71) Consequently, we will strongly consider inclusion of patients in LDA with no swollen joints to improve the generalisability of the trial results." R2, Q10: Introduction: -please include a review on bDMARD dose reduction, for example the Cochrane, Verhoef et al 2017, or the ARD Fautrel paper 2017. You fr example do not mention the STRASS Fautrel study R2, A10: Thank you for making it clear that the introduction needed refinement. Two of the suggested reviews on bDMARD tapering is added to the reference list in the introduction and the introduction has been revised with inclusion of several trials including the STRASS trial. Action item: the following item is inserted in the section "Introduction" on page 4 of the revised manuscript: "Fautrel et al. did not disprove the null hypothesis of non-inferiority in the STRASS study due to insufficient recruitment; however, in the tapering group 18 months from baseline adalimumab or etanercept were successfully stopped in 39.1% and successfully tapered in 35.9% while standard dose had to be maintained in 20.3%. (31)" Action item: the following references is inserted in the section "Introduction" of the revised manuscript: R2, A11: Thank you for your question. Yes, we checked trial registers (clinicaltrials.gov) in the initial phase of the development of this protocol (winter 2017) and found that there only was a few studies initiated on this topic; thus, a need for further investigations. R2, Q12: -line 58: This is a misconception I think. Many patient cannot stop directly, but this does not mean these patients could stop when they were slowly tapered. There are no data to suggest that, as successful stopping seems more rare even in tapering than in stopping trials. The benefit of tapering is to identify a third group, patient who can be tapered but not stopped. See also the Fautrel den Broeder paper 2015 best practices, R2, A12: Thank you for bringing to our attention that the sentence needed to be clarified. The sentence has been rewritten as described below. Action item: the following item is revised under the section "Introduction" on page 3 of the revised manuscript: "Thus, abrupt discontinuation of bDMARDs results in flare in a significant proportion of patients. Another approach is to use a tapering algorithm to gradually reduce dosage or increase the dosage interval of bDMARDs to identify patients who can taper or even discontinue their bDMARD." R2, Q13: -page 5 line 5: see my earlier comments. Patient guided tapering might motivate people better, but for sure it will lead to more expectation bias, nocebo and attribution. Please discuss these potential drawbacks. Of note, also in DRESS and STRASS tapering to the next step was Shared decision making between patients and physicians. R2, A13: Thank you for the comment. The potential bias of the open-label design is now discussed in greater detail in the discussion section. The following item is revised under the section "Discussion" on page 15 of the revised manuscript: "However, a limitation is that the trial personnel and the patients are not blinded to the intervention groups; thus, this could potentially lead to bias e.g. nocebo, expectation and/or attribution bias which would affect interpretation of the trial results." R2, Q14: Methods: -you use a validated OMERACT Das28 flare criterion to assess flare and increase treatment, So, how is this different from the DRESS and STRASS studies? I don't see any more patient centeredness than in those studies R2, A14: Thank you for bringing to our attention, that the patient-centered algorithm needed clarification. In the BIODOPT trial, the patient is an equal partner in the shared decision during tapering of bDMARDs; thus, the patient can stop tapering prematurely and maintain the current dose or go one step back in the algorithm if he/she believe to have symptoms of activity even though the physician does not judge the arthritis to have flared. In contrast, the patients in the tapering group of the DRESS and STRASS studies were given a dose reduction advice. Action item: the following item is inserted under the section "Intervention (BIODOPT) group" on page 8 of the revised manuscript: "The BIODOPT tapering algorithm is patient-centered with focus on the patient's own perception of arthritis activity during tapering; thereby, making the patient an equal partner in their disease management as expected and required by modern society. Compared to a strict tapering algorithm focusing only on e.g. DAS28crp or joint swelling, the patient-centered BIODOPT algorithm is expected to enhance patient motivation during tapering; thereby, possibly increasing compliance. If the patient has symptoms of flare due to tapering but the arthritis is in remission assessed by the physician, the patient is generally advised to continue tapering according to the BIODOPT algorithm but may remain at the current dose (or even go back one step in the algorithm) after agreement between the patient and the physician." R2, Q15: -For PsA , DAS28 is a difficult to interpret measure. It misses out on skin, enthesis and axial complaints. Do you have any data on DAS28 based flare criteria in PsA? R2, A15: Thank you for this comment. The (RA validated) DAS28 flare criteria was chosen for PsA as there currently are no flare criteria for PsA; however, we acknowledge that it would be desirably to monitor the patients with PsA using a PsA validated criteria (if this was possible). Previously, several tapering studies have used DAS28 ≥ 3.2 as a flare criteria in patients with PsA. I this trial, PsA essential outcomes as skin involvement, nail involvement, enthesitis and dactylitis will be monitored; hence, sub analysis will gather information about these outcomes in patients with PsA who taper their biologics. Furthermore, sub analysis will also involve information about reason for escalation dosage of bDMARDs e.g. skin psoriasis flare. Action item: the following item is inserted under the section "Discussion" on page 16 of the revised manuscript: "However, we acknowledge that it would be desirably to monitor patients with PsA using a PsA validated flare criteria e.g. a DAPSA based flare criteria as DAPSA is used as remission criteria for enrolment in this trial. Nevertheless, DAPSA will be calculated for patients with PsA for each trial visit for further sub analysis. PsA and axSpA essential outcomes as skin involvement, nail involvement, enthesitis and dactylitis will be monitored; hence, sub analysis will be performed and information about reason for dosage escalation of bDMARDs will be collected e.g. skin psoriasis flare." R2, Q16: -Also, in PsA and axSpA -more so than in RA -extra-articular other associated disease might activate, like uveitis, psoriasis, IBD. Please add a strategy to cope with such an event. R2, Q16: Thank you for bringing to our attention that this needed to be specified in the article. We have now described the study procedure regarding psoriasis, uveitis or IBD flare under the section "Flare criteria". Action item: the following item is inserted under the section "Flare criteria" on page 7 of the revised manuscript: "If a patient has symptoms of psoriasis, uveitis or IBD flare during tapering, the relevant department is contacted for dialogue and expert opinion in particular indication for bDMARD dosage escalation." R2, Q17: -Comparator strategy: in a pragmatic study, of course also in the UC group tapering can happen. However, with a NI or equivalence study, the main analyses are done per protocol, because in contrast to superiority studies, this is the more conservative approach. Do you have a per protocol analyses plan, because you state ITT as primary analysis? CONSORT now I think has reporting guidelines extension for NI and equivalence studies, please check these. R2, A17: Thank you for making it clear that the section "statistical analysis plan" needed to be refined. This protocol was developed in accordance with the EQUATOR recommendations (i.e. the CONSORT statements incl. the CONSORT statement for equivalence trials) which is now made clear in the paper. All analyses will be conducted according to the intention-to-treat (ITT) principle, which is one of the two recommended analyses for equivalence trials by CONSORT (the other is per protocol). From the CONSORT non-inferiority/equivalence statement: "It should be indicated whether the conclusion relating to noninferiority or equivalence is based on ITT or perprotocol analysis or both and whether those conclusions are stable with respect to different types of analyses (eg, ITT, per-protocol)." Action item: the following item is inserted under the section "Statistical analysis" on page 12-13 of the revised manuscript: "All descriptive statistics and tests will be reported in accordance to the recommendations of the EQUATOR network (58) including the CONSORT statements. (59,60) Thus, all data analyses will be carried out according to a pre-established statistical analysis plan. Data will be analysed by the intention-to-treat principle, which is one of the recommended analyses for equivalence trials by CONSORT. (60) For the equivalence analyses (i.e. according to disease activity), imputations will not be used to replace missing data in the primary analyses, but will be included in a sensitivity analysis to assess the effect of missing data." R2, Q18: -You use three strata: in the analyses it is state of art to also correct for those strata, and preferably to show equivalence per stratum, at least drug and disease. Is this contemplated? R2, Q18: Thank you for the question. As stated in the paper, the primary analysis model will include group (i.e. intervention vs usual care), diagnosis, bDMARD failure history, centre status, and time point (4, 8, 12 months from baseline) as fixed effects, with the baseline value of the relevant variable (e.g. disease activity ) as a covariate. Thus, the primary analysis will be stratified according to diagnosis group (RA, PsA or axSpA) but not to bDMARD drug. However, we will explore the effect of bDMARD drug in sub analyses. Action item: the following item is inserted under the section "Statistical analysis" on page 14 of the revised manuscript: "In addition, exploratory sub analysis will be performed to evaluate the effect of e.g. bDMARD drug." R2, Q19: -Multilevel analyses are fine, although you did not use a ML sample size calculation (I think for 2 levels they exist, not for n levels.) But the assumptions seem fine R2, A19: Thank you for raising this question. As stated in the DELTA 2 guideline for reporting sample size calculations in randomised controlled trials: under the conventional approach with a standard trial design and unadjusted statistical analysis, the core items that need to be stated are the primary outcome, the target difference appropriately specified according to the outcome type, the associated nuisance parameter, and the statistical significance and power. Even if the planned statistical analyses (e.g. hierarchical/multilevel analyses) deviates from the conventional approach (e.g. twosample t-tests), the core principles and reporting of sample size can be modified to provide sufficient detail to ensure that the sample size estimation is equally sophisticated. However, the key principles remain the same. Thus, the sample size calculation in this trial is done in accordance with the DELTA 2 guideline. ACPA, IgM-RF, ANA X HLA-B27 2 X Biomarker collection for biobank X X X X X X X Blood human chorion gonadotropin (B-hcg) 8 X Imaging X-ray hands and feet 5 X X X-ray SI joints 2 X X 1 : Registered electronically through the touch screen in the outpatient clinic R3, Q1: • The primary outcome is the difference in percentage of patients that have at least a 50% dose reduction after 18 months of follow-up while staying in remission. The intervention is a combination of dose reduction and patient participation. It is not clear what % is expected to be due to patient education/participation and what due to the dose reduction. Since these are two interventions in one arm it should have been taken into account in the power calculation. Another way is to educate also the usual care patients. Thank you for making it clear, that the patient education needed to be explained in greater detail in the protocol paper. It should now be clear that both groups are educated at baseline about symptoms of arthritis flare and to contact their outpatient clinic if such symptoms occur. This is why the patient education was not taken into consideration in the sample size calculation. This trial evaluates a patient-centered disease activity guided tapering algorithm compared with usual care practise. Previously, other models for dosage reduction of bDMARDs have been tested in patients with RA, PsA or axSpA; thus, this trial contributes with a new suggestion on how to taper patients with inflammatory arthritis. Action item: the following item is inserted under the section "Interventions" on page 7 of the revised manuscript: "At baseline, patients in both trial groups are educated about symptoms of flare by research personnel e.g. increasing peripheral joint pain and/or joint swelling and/or increasing inflammatory back pain. If such symptoms occur, the patients are advised to contact the rheumatology outpatient clinic for a consult within 7 days." R3, Q2: • The sample size was calculated based the expected results form previous studies in RA. In your study however, you include also PSA and axSPA, both indications that have shown to get flares after discontinuation bDMARDs. Therefor the expected % used in your power calculation is an overestimation that results in a smaller needed sample size than it should be. In my opinion this should be taken into account in the sample size calculation. Recalculated is advised. R3, A2: Thank you for raising this concern as we have discussed the same aspect during the initial phase when designing the trial protocol. The sample size calculation for primary endpoint 1A was inspired from the RA-DRESS trial as it is one of the very few randomised, non-inferiority/equivalence trials exploring a disease activity guided tapering algorithm among patients with inflammatory arthritis. Based on the available literature (no randomised non-inferiority/equivalence study exploring a disease activity guided tapering algorithm is to our knowledge done in PsA or axSpA) it seems reasonable to assume that the percentage of patients with PsA or axSpA meeting primary endpoint 1A will be the same as for RA as described in the revised introduction. We acknowledge (as mentioned in the introduction of this paper), that abrupt discontinuation of bDMARDs in patients with PsA and axSpA leads to flare up in a larger proportion of patients compared to patients with RA. However, the literature exploring tapering in axSpA and PsA have not proven a very high flare rate as described in the revised introduction. Thus, the sample size calculation was not done for each disease group separately as it seems reasonable to assume that the proportion of patients who will meet primary endpoint 1A is equal among the three groups. In contrast to the DRESS study, the BIODOPT trial has two primary endpoints (1A and 1B) and both endpoints must be met i.e. proving a statistically significant reduction in biologics while maintaining an equivalent disease state. Action item: the following item is inserted under the section "Introduction" on page 4 of the revised manuscript: "However, a non-inferiority trial including AS in remission on adalimumab, etanercept, infliximab or golimumab, which was stopped prematurely due to funding problems, found that prolonging the dosing interval of anti-TNF by 25% was non-inferior to full dosage anti-TNF as LDA was maintained in 81.3% of patients in the tapered group and 83.8% of patient in the full dose group. (22) Action item: the following item is inserted under the section "Sample size considerations" on page 11-12 of the revised manuscript: "This assumption is inspired from the DRESS-RA trial, which it is one of the very few randomised, non-inferiority/equivalence trials exploring a disease activity guided tapering algorithm among patients with inflammatory arthritis. (30) No randomised non-inferiority/equivalence study exploring a disease activity guided tapering algorithm is done in PsA or axSpA, but based on the available literature it seems reasonable to assume that the percentage of patients with PsA or axSpA meeting primary endpoint 1A will be the same as for RA (26,27,29,32). Thus, the sample size calculation was not done for each disease separately." R3, Q3: • Another misinterpretation as you mentioned is using the DAS as flare outcome for PSA. However, especially these patients might stop dose reduction because of skin activity even when the joints are not inflamed. If there are more PSA patients than RA and/or AxSPA in your study population, this may underestimate your results. I suggest to include only RA patients R3, A3: Thank you for the comment. The (RA validated) DAS28 flare criteria was chosen for PsA as there currently are no flare criteria for PsA; however, we acknowledge that it would be desirably to monitor the patients with PsA using PsA validated criteria (if this was possible). I this trial, PsA essential outcomes as skin involvement, nail involvement, enthesitis and dactylitis will be monitored; hence, sub analyses will gather information about these outcomes in patients with PsA who taper their biologics. Furthermore, sub analysis will also contain information about reason for escalation dosage of bDMARD e.g. skin psoriasis flare. Currently, 69 patients diagnosed with RA, PsA or axSpA is included in the BIODOPT trial. The percentage of patients with PsA in the trial group is not greater than the percentage of patients with RA and/or axSpA; thus, we do not anticipate that using the DAS28 flare criteria for patients with PsA will result in an underestimation of the primary objective. Action item: the following item is inserted under the section "Discussion" on page 16 of the revised manuscript: "However, we acknowledge that it would be desirably to monitor patients with PsA using a PsA validated flare criteria e.g. a DAPSA based flare criteria as DAPSA is used as remission criteria for enrolment in this trial. Nevertheless, DAPSA will be calculated for patients with PsA for each trial visit for further sub analysis." R3, Q4: • In your article you have to mention whether the patients are/are not allowed to taper conventional DMARDs. Especially since tapering cDAMRD also have effect on the primary outcome R3, Q4: Thank you for bringing this to our attention. A comment about concomitant synthetic DMARD have been added under the section "Interventions". Action item: the following item is inserted under the section "Interventions" on page 7 of the revised manuscript: "Baseline concomitant synthetic DMARD and/or NSAID dose are maintained throughout the study period; however, dosage can be reduced or discontinued if the patient experience severe side effects." GENERAL COMMENT S The authors have made a number of changes that improve the manuscript and the RCT itself. I have still 3 issues that should be resolved 1/ RA EULAR guideline specifically mentioned tapering as important and evidence based intervention, so to say that no guidelines mention tapering is incorrect. https://www.ncbi.nlm.nih.gov/pubmed/28264816 2/CONSORT is a reporting guideline, and does not advocate ITT or per protocol for NI or equivalence trials. However, in their suggested reporting, they mention per protocol, not intention to treat, as the primary analyses. http://www.consortstatement.org/Media/Default/Downloads/Extensions/CONSORT%20Extension%20fo r%20Non-inferiority%20and%20Equivalence%20Trials.pdf So, to state that CONSORT suggest ITT for NI/EQ studies is not correct 3/ The strong focus on patient centered tapering is strange: -All treat to target trials use a target, but also judgement on treatment changes both from the physician and the patient. There are no studies in which patients have to follow DAS28 score guided advices 100%. Protocol adherence in DRESS was approximately 75-80%, the same in the Best study (88%) and lower in IMPROVED study (65%). Patients can also drop out the study at any time, and stop tapering. When you mention protocolised tapering as having no patient involvement, I think this reflects limited experience with clinical strategy trials in rheumatology. All treatment decisions are "DAS28"inspired" but are made in shared decision making. I think Bruno Fautrel would tell you the same for the STRASS study ;). So patient centered tapering positioned against "protocolised" tapering, is "distinction without a difference". https://en.wikipedia.org/wiki/Distinction_without_a_difference -in your study, there is no process metnioned that leads to more patient involvement other than "physicians and patients decide together". There a some studies on implementation of Shared decision making, and SDM tools have been available. If you include these, I could appriciate more the distinction with usual T2T trials. -When tapering is done more according to patient preference, whatever that means, then I could see that more patients would be willing to taper. But that is not what you study. - The statement that tapering would yield better results when patient are more involved in the decision making is very strange, because it suggest that often patients want to taper more, but are withheld by pohysicians or higher disease activity scores. This is to me unconceivable, and also not supported by evidence. You also rightfully state state that patient experienced flaring is characterized by more subjective complaints, so, more patient centered tapering would thus lead to less yield from tapering. There are no data to my knowledge that more patient involvement leads to less nocebo effects. Thus 1/ the focus on patient centered being different from other studies seems inappropriate and 2/ it will never lead to more tapering, but only to less tapering. I suggest to put less emphasis on the SDM part, and more on PsA/axSpA tapering and the several bDMARDS that are included. REVIEWER Tsutomu Takeuchi Keio University School of Medicine REVIEW RETURNED 12-Mar-2019 GENERAL COMMENTS The authors extensively revised the manuscript according to the reviewers' suggestions. The reviewer have no further comments. Moreover, the CONSORT statement for NI and EQ trials from 2010 mention both per protocol and ITT analysis but does not favour one over the other (page 2603): Non-inferiority/Equivalence CONSORT (Piaggio, G et al) "It should be indicated whether the conclusion relating to noninferiority or equivalence is based on ITT or per protocol analysis or both and whether those conclusions are stable with respect to different types of analyses (eg, ITT, per-protocol)." The primary (1A and 1B) and secondary endpoints in the BIODOPT trial will be conducted based on the intention-to-treat population; however, for these analyses we will not impute (i.e. "substitute") outcomes to replace missing data. According to the as observed principle, only patients with actual data collected in the database will be included in the analyses (independent of protocol violations). Action item: the following item is inserted under the section "Statistical analysis" on page 12-13of the revised manuscript: "The analyses for the primary and secondary endpoints will be conducted according to the ITT principle; i.e. based on the full analysis set (all randomised individuals independent of protocol violations) with outcome data available (as observed) (62). (62) For the equivalence analyses (i.e. according to disease activity), imputations will not be used to replace missing data in the primary analyses, but will be included in a sensitivity analysis to assess the effect of missing data. Thus, ITT analyses with replacement of missing data as well as analysis on "per protocol" individuals will only be performed to explore the robustness of our findings." R2, Q3: 3/ The strong focus on patient centered tapering is strange: -All treat to target trials use a target, but also judgement on treatment changes both from the physician and the patient. There are no studies in which patients have to follow DAS28 score guided advices 100%. Protocol adherence in DRESS was approximately 75-80%, the same in the Best study (88%) and lower in IMPROVED study (65%). Patients can also drop out the study at any time, and stop tapering. When you mention protocolised tapering as having no patient involvement, I think this reflects limited experience with clinical strategy trials in rheumatology. All treatment decisions are "DAS28"inspired" but are made in shared decision making. I think Bruno Fautrel would tell you the same for the STRASS study ;). So patient centered tapering positioned against "protocolised" tapering, is "distinction without a difference". https://en.wikipedia.org/wiki/Distinction_without_a_difference -in your study, there is no process menioned that leads to more patient involvement other than "physicians and patients decide together". There a some studies on implementation of Shared decision making, and SDM tools have been available. If you include these, I could appriciate more the distinction with usual T2T trials. -When tapering is done more according to patient preference, whatever that means, then I could see that more patients would be willing to taper. But that is not what you study. -The statement that tapering would yield better results when patient are more involved in the decision making is very strange, because it suggest that often patients want to taper more, but are withheld by pohysicians or higher disease activity scores. This is to me unconceivable, and also not supported by evidence. You also rightfully state state that patient experienced flaring is characterized by more subjective complaints, so, more patient centered tapering would thus lead to less yield from tapering. There are no data to my knowledge that more patient involvement leads to less nocebo effects.
/** * This class represents a color weighting for a Symbol. Only mana Symbols will have nonzero weights * for any type of mana. For any given mana Symbol, the sum of all of its color weights should be * 1. * <p> * This class is simply a data structure that holds a {@link ManaType} and its weight for a symbol to * make it easier to populate the Symbol's weight map. * * @author Alec Roelke */ public class ColorWeight { /** * Color for the weight. */ public final ManaType color; /** * The weight of the color. */ public final double weight; /** * Create a new ColorWeight. * * @param c color of the new ColorWeight * @param w weight of the new ColorWeight */ public ColorWeight(ManaType c, double w) { color = c; weight = w; } @Override public boolean equals(Object other) { if (other == null) return false; if (other == this) return false; if (other.getClass() != getClass()) return false; ColorWeight o = (ColorWeight)other; return o.color.equals(color) && o.weight == weight; } @Override public int hashCode() { return Objects.hash(color, weight); } }
<reponame>treely/boemly import { Box, useMediaQuery } from '@chakra-ui/react'; import React, { ReactNode, useEffect } from 'react'; import { useAnimation } from 'framer-motion'; import { useMeasure } from 'react-use'; import { BREAKPOINT_MD_QUERY } from '../../constants/breakpoints'; import useResizeEventListener from '../../hooks/useResizeEventListener'; import { Left, Right } from './styles'; export interface SplitScreenProps { left: ReactNode; right: ReactNode; mobileIsOpen?: boolean; hideLeftOnMobile?: boolean; apportionment?: number; } export const SplitScreen: React.FC<SplitScreenProps> = ({ left, right, mobileIsOpen = false, hideLeftOnMobile = false, apportionment = 42, }: SplitScreenProps) => { useResizeEventListener(); const [mobile] = useMediaQuery(BREAKPOINT_MD_QUERY); const [ref, { height }] = useMeasure<HTMLDivElement>(); const controls = useAnimation(); const variants = { desktop: { top: 0, height: '100%' }, mobileClosed: { top: 'var(--boemly-space-28)', height: 'calc(100% - var(--boemly-space-28))' }, mobileOpen: { top: height - 40, height: 'calc(100% - var(--boemly-space-28))' }, }; const mobileAndLeftNotHidden = mobile && !hideLeftOnMobile; useEffect(() => { if (mobileAndLeftNotHidden) { controls.start(mobileIsOpen ? 'mobileOpen' : 'mobileClosed'); } else { controls.start('desktop'); } }, [mobileIsOpen, height]); return ( <div ref={ref}> <Box position="relative" height="calc(var(--viewport-height, 1vh) * 100 - var(--header-height))" marginTop="var(--header-height)" overflow="hidden" > <Left apportionment={apportionment}>{(!mobile || !hideLeftOnMobile) && left}</Left> <Right variants={variants} initial="desktop" animate={controls} hideLeftOnMobile={hideLeftOnMobile} apportionment={100 - apportionment} > {right} </Right> </Box> </div> ); };
/** * Checks for events. Only used if checkForIncomingEvents() has not * been overridden. * * @deprecated Use checkForIncomingEvents() instead. * @return list of events */ @Deprecated protected List<String> checkForEvents() { return Collections.emptyList(); }
// BrailleCircleCellOpts sets options on the cells that contain the circle. // Cell options on a braille canvas can only be set on the entire cell, not per // pixel. func BrailleCircleCellOpts(cOpts ...cell.Option) BrailleCircleOption { return brailleCircleOption(func(opts *brailleCircleOptions) { opts.cellOpts = cOpts }) }
extern crate skim; use skim::prelude::*; /// This example illustrates downcasting custom structs that implement /// `SkimItem` after calling `Skim::run_with`. #[derive(Debug, Clone)] struct Item { text: String, } impl SkimItem for Item { fn text(&self) -> Cow<str> { Cow::Borrowed(&self.text) } fn preview(&self, _context: PreviewContext) -> ItemPreview { ItemPreview::Text(self.text.to_owned()) } } pub fn main() { let options = SkimOptionsBuilder::default() .height(Some("50%")) .multi(true) .preview(Some("")) .build() .unwrap(); let (tx, rx): (SkimItemSender, SkimItemReceiver) = unbounded(); tx.send(Arc::new(Item { text: "a".to_string() })).unwrap(); tx.send(Arc::new(Item { text: "b".to_string() })).unwrap(); tx.send(Arc::new(Item { text: "c".to_string() })).unwrap(); drop(tx); let selected_items = Skim::run_with(&options, Some(rx)) .map(|out| out.selected_items) .unwrap_or_else(Vec::new) .iter() .map(|selected_item| (**selected_item).as_any().downcast_ref::<Item>().unwrap().to_owned()) .collect::<Vec<Item>>(); for item in selected_items { println!("{:?}", item); } }
import requests import json auth_data = { "grant_type": "client_credentials", "client_id": "86092afbdb44404fa54b97442e7c8c6a", "client_secret": "bb640134957816330efd8adfaf150e30c5907c63d757169377573bc4dd259018", "scope": "read_product_data" } # create session instance session = requests.Session() auth_request = session.post( "https://idfs.gs.com/as/token.oauth2", data=auth_data) access_token_dict = json.loads(auth_request.text) print(access_token_dict) access_token = access_token_dict["access_token"] # update session headers with access token session.headers.update({"Authorization": "Bearer " + access_token}) request_url = "https://api.marquee.gs.com/v1/data/USCANFPP_MINI/query" request_query = { "where": { "gsid": ["75154", "193067", "194688", "902608", "85627"] }, "startDate": "2017-01-15", "endDate": "2018-01-15" } request = session.post(url=request_url, json=request_query) results = json.loads(request.text) print(results)
// lineDiff lists the differences in the lines of a and b. func lineDiff(t *testing.T, a, b string) { aslice := strings.Split(a, "\n") bslice := strings.Split(b, "\n") if len(aslice) != len(bslice) { t.Fatal("Can't diff text, mismatched number of lines.\n") return } for i, s := range aslice { if s != bslice[i] { t.Errorf("Line %d: %q != %q\n", i+1, s, bslice[i]) } } }
from math import log2,ceil,pow N,K = map(int,input().split()) ans = 0 for n in range(1,N+1): num = ceil(log2(ceil(K/n))) rate = (1/N)*pow(1/2,num) ans += rate print(ans)
/** * The LocationComponent on android implements both location tracking and display of user's current location. * LocationComponentManager attempts to separate that, so that Camera can ask for location tracking independent of display of user current location. * And NativeUserLocation can ask for display of user's current location - independent of Camera's user tracking. */ public class LocationComponentManager { private RCTMGLMapView mMapView = null; private MapboxMap mMap = null; private LocationManager mLocationManager = null; private LocationComponentPlugin mLocationComponent = null; private Context mContext = null; private int mCameraMode = CameraMode.NONE; private @RenderMode.Mode int mRenderMode = RenderMode.COMPASS; private OnIndicatorBearingChangedListener mLocationBearingChangedListener = new OnIndicatorBearingChangedListener() { @Override public void onIndicatorBearingChanged(double v) { if (mFollowUserLocation) { mMapView.getMapboxMap().setCamera(new CameraOptions.Builder().bearing(v).build()); } } }; private boolean bearingListenerInstalled = false; private OnIndicatorPositionChangedListener mLocationPositionChangeListener = new OnIndicatorPositionChangedListener() { @Override public void onIndicatorPositionChanged(@NonNull Point point) { if (mFollowUserLocation) { mMapView.getMapboxMap().setCamera(new CameraOptions.Builder().center(point).build()); GesturesUtils.getGestures(mMapView).setFocalPoint(mMapView.getMapboxMap().pixelForCoordinate(point)); // sendUserLocationUpdateEvent(point); } } }; public LocationComponentManager(RCTMGLMapView rctmglMapView, Context context) { mMapView = rctmglMapView; mMap = mMapView.getMapboxMap(); mContext = context; mLocationManager = LocationManager.getInstance(context); } private boolean mShowUserLocation = false; private boolean mFollowUserLocation = false; private boolean mShowingUserLocation = false; public void showUserLocation(boolean showUserLocation) { mShowUserLocation = showUserLocation; stateChanged(); } public void setFollowUserLocation(boolean followUserLocation) { mFollowUserLocation = followUserLocation; stateChanged(); } public void setCameraMode(@CameraMode.Mode int cameraMode) { mCameraMode = cameraMode; stateChanged(); LocationComponentPlugin locationComponent = LocationComponentUtils.getLocationComponent(mMapView); if (mCameraMode == CameraMode.NONE || mCameraMode == CameraMode.TRACKING) { locationComponent.removeOnIndicatorBearingChangedListener(mLocationBearingChangedListener); } else { locationComponent.addOnIndicatorBearingChangedListener(mLocationBearingChangedListener); } if (mCameraMode == CameraMode.NONE || mCameraMode == CameraMode.NONE_COMPASS || mCameraMode == CameraMode.NONE_GPS) { locationComponent.removeOnIndicatorPositionChangedListener(mLocationPositionChangeListener); } else { locationComponent.addOnIndicatorPositionChangedListener(mLocationPositionChangeListener); } } public void tintColorChanged() { applyOptions(mShowingUserLocation, mLocationComponent); } public void setRenderMode(@RenderMode.Mode int renderMode) { mRenderMode = renderMode; } /* public void addOnCameraTrackingChangedListener(OnCameraTrackingChangedListener onCameraTrackingChangedListener) { // mLocationComponent.addOnCameraTrackingChangedListener(onCameraTrackingChangedListener); }*/ @SuppressLint("MissingPermission") private void stateChanged() { mLocationComponent.setEnabled((mFollowUserLocation || mShowUserLocation)); if (mShowingUserLocation != mShowUserLocation) { updateShowUserLocation(mShowUserLocation); } if (mFollowUserLocation) { if (!mShowUserLocation) { // mLocationComponent.setRenderMode(RenderMode.GPS); } else { // mLocationComponent.setRenderMode(mRenderMode); } mLocationComponent.onStart(); } else { // mLocationComponent.setCameraMode(CameraMode.NONE); } mLocationComponent.setEnabled(mFollowUserLocation || mShowUserLocation); } public boolean hasLocationComponent() { return (mLocationComponent != null); } public void update(@NonNull Style style) { update(mShowUserLocation, style); } public void update(boolean displayUserLocation, @NonNull Style style) { if (mLocationComponent == null) { mLocationComponent = LocationComponentUtils.getLocationComponent(mMapView); mLocationComponent.setLocationProvider(mLocationManager.getProvider()); mShowingUserLocation = displayUserLocation; } updateShowUserLocation(displayUserLocation); } private void updateShowUserLocation(boolean displayUserLocation) { if (mShowingUserLocation != displayUserLocation) { applyOptions(displayUserLocation, mLocationComponent); mShowingUserLocation = displayUserLocation; } } private void applyOptions(boolean displayUserLocation, LocationComponentPlugin locationComponent) { locationComponent.setEnabled(true); if (!displayUserLocation) { LocationPuck2D locationPuck = new LocationPuck2D(); Drawable empty = AppCompatResources.getDrawable(mContext, R.drawable.empty); locationPuck.setBearingImage(empty); locationPuck.setShadowImage(empty); locationPuck.setTopImage(empty); locationComponent.setLocationPuck(locationPuck); locationComponent.setPulsingEnabled(false); } else { int mapboxBlueColor = Color.parseColor("#4A90E2"); LocationPuck2D locationPuck = new LocationPuck2D(); Drawable topImage = AppCompatResources.getDrawable(mContext, R.drawable.mapbox_user_icon); Integer tintColor = mMapView.getTintColor(); if (tintColor != null) { VectorDrawable drawable = (VectorDrawable)topImage; drawable.setTint(tintColor); topImage = drawable; } locationPuck.setTopImage(topImage); Drawable bearingImage = AppCompatResources.getDrawable(mContext, R.drawable.mapbox_user_stroke_icon); locationPuck.setBearingImage(bearingImage); Drawable shadowImage = AppCompatResources.getDrawable(mContext, R.drawable.mapbox_user_icon_shadow); locationPuck.setShadowImage(shadowImage); locationComponent.setLocationPuck(locationPuck); locationComponent.setPulsingEnabled(true); if (tintColor != null) { locationComponent.setPulsingColor(tintColor); } else { locationComponent.setPulsingColor(mapboxBlueColor); } } } }
/** * Stores the JVM property value of https.cipherSuites and sets its * value to an empty string. * This ensures that the value https.cipherSuites does * not affect the result of tests. */ static void storeHttpsCipherSuites() { String cipherSuites = System.getProperty(HTTPS_CIPHER_SUITES_KEY); if (cipherSuites != null) { LOG.info( "Found value for property {}: {}", HTTPS_CIPHER_SUITES_KEY, cipherSuites); cipherSuitesPropertyValue = cipherSuites; } System.clearProperty(HTTPS_CIPHER_SUITES_KEY); }
There were a lot of smiling faces in the Canadiens’ locker room following a rare Sunday morning practice in Brossard. The players had reason to be feeling good after Saturday night’s 3-0 win over the Buffalo Sabres at the Bell Centre with Carey Price stopping all 36 shots he faced in his first game back in goal after missing 10 games with a lower-body injury. The win ended a five-game losing streak (0-3-2) for the Canadiens, and Price’s performance was a reason for optimism going forward, even if it came against the worst team in the Eastern Conference. Price had allowed four or more goals in seven of his first 11 starts, and the shutout improved his record to 4-7-1 with a 3.44 goals-against average and .890 save percentage. The Canadiens improved their record to 9-12-3 heading into Monday’s game against the Columbus Blue Jackets at the Bell Centre (7:30 p.m., TSN2, RDS, TSN Radio 690). More important, the Canadiens moved within four points of the Detroit Red Wings for third place in the Atlantic Division, which earns a playoff spot. After Monday’s game against the Blue Jackets, the Canadiens will play the Ottawa Senators Wednesday night at the Bell Centre and then play back-to-back games against the Red Wings Thursday in Detroit and Saturday in Montreal. “We realize that this week three of the four games are against teams we want to catch (in the standings), so it’s a big week for us,” Julien said about the games against Ottawa and Detroit. The Canadiens play only six Monday night games all season, including four at the Bell Centre, so Sunday is normally a day off for the players. But not this weekend with so many games on the schedule this week and little time for full practices. “It was a good, quick skate, and now we can all go listen to Gally (Brendan Gallagher) text us about the Grey Cup because none of us are going to watch,” captain Max Pacioretty said after practice with a big smile. “He’s a big CFL fan … the only one.” Pacioretty said the other Canadiens players are more into the NFL and their Fantasy League. Pacioretty has now gone six games without a point and is a team-worst minus-12, but is happy to be back playing on a line with Phillip Danault at centre and Andrew Shaw on the right wing after a failed experiment by Julien putting the captain with Jonathan Drouin and Alex Galchenyuk. “I really like playing with Phil,” said Pacioretty, who has 7-5-12 totals this season. “I think with matchups and stuff it benefits my game a little bit. I think I play a bit like a perfectionist. I feel like you have to build up good shifts in order to cash in on a goal, and it’s hard for me to play a game where sometimes you’re in your own end and then you get that one opportunity and you feel like you should put it in the back of the net. I’ve never really played that way and that was difficult sometimes when I was on the other line and we were getting different matchups. Probably easier matchups, but I felt like we were getting sheltered a bit. So this is just comfortable for me, and it’s all about me trying to find my game, and I think going back to what makes me feel comfortable can help.” When asked why it didn’t work with Drouin and Galchenyuk, Pacioretty said: “I think all three of us try and play the same way. Those two don’t really go to the net a lot and Jo is definitely a perimeter player. I was trying to go to the net, and that’s not really my speciality.” Paul Byron took Pacioretty’s spot with Drouin and Galchenyuk Saturday night, and all three registered two points. Galchenyuk ended an 11-game goal-less drought and added an assist, while Byron scored short-handed and had an assist and Drouin picked up two assists. “I think Pauly works well with them because he can hang out at the crease and kind of create time and space for them,” Pacioretty said. “I can do that at times, but I’m more of a distance shooter and I have to get my distance shots in order to do that as well. I think they’re benefiting from good matchups right now. We’ll be the first ones to tell you individually we didn’t have great games against Dallas and Nashville (last week). Those are two good teams that didn’t give us any time and space. Last night was different. Our line and their line found a little bit of time and space. I think both lines felt more comfortable and hopefully we can roll with this now.” Pacioretty said he was headed home after practice Sunday to watch the NFL action and wait for Gallagher’s texts about the Grey Cup game. On Monday night, the captain will be looking to end his six-game pointless streak. [email protected] twitter.com/StuCowan1
/******************************************************************************* * Copyright (c) 2009, 2021 IBM Corp., Ian Craggs * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v2.0 * and Eclipse Distribution License v1.0 which accompany this distribution. * * The Eclipse Public License is available at * https://www.eclipse.org/legal/epl-2.0/ * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Ian Craggs - initial API and implementation and/or initial documentation *******************************************************************************/ /** * @file * \brief Trace messages * */ #include "Messages.h" #include "Log.h" #include <stdio.h> #include <stdlib.h> #include <string.h> #include "Heap.h" #define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0])) #define max_msg_len 120 static const char *protocol_message_list[] = { "%d %s -> CONNECT version %d clean: %d (%d)", /* 0, was 131, 68 and 69 */ "%d %s <- CONNACK rc: %d", /* 1, was 132 */ "%d %s -> CONNACK rc: %d (%d)", /* 2, was 138 */ "%d %s <- PINGREQ", /* 3, was 35 */ "%d %s -> PINGRESP (%d)", /* 4 */ "%d %s <- DISCONNECT", /* 5 */ "%d %s <- SUBSCRIBE msgid: %d", /* 6, was 39 */ "%d %s -> SUBACK msgid: %d (%d)", /* 7, was 40 */ "%d %s <- UNSUBSCRIBE msgid: %d", /* 8, was 41 */ "%d %s -> UNSUBACK msgid: %d (%d)", /* 9 */ "%d %s -> PUBLISH msgid: %d qos: %d retained: %d rc %d payload len(%d): %.*s", /* 10, was 42 */ "%d %s <- PUBLISH msgid: %d qos: %d retained: %d payload len(%d): %.*s", /* 11, was 46 */ "%d %s -> PUBACK msgid: %d (%d)", /* 12, was 47 */ "%d %s -> PUBREC msgid: %d (%d)", /* 13, was 48 */ "%d %s <- PUBACK msgid: %d", /* 14, was 49 */ "%d %s <- PUBREC msgid: %d", /* 15, was 53 */ "%d %s -> PUBREL msgid: %d (%d)", /* 16, was 57 */ "%d %s <- PUBREL msgid %d", /* 17, was 58 */ "%d %s -> PUBCOMP msgid %d (%d)", /* 18, was 62 */ "%d %s <- PUBCOMP msgid:%d", /* 19, was 63 */ "%d %s -> PINGREQ (%d)", /* 20, was 137 */ "%d %s <- PINGRESP", /* 21, was 70 */ "%d %s -> SUBSCRIBE msgid: %d (%d)", /* 22, was 72 */ "%d %s <- SUBACK msgid: %d", /* 23, was 73 */ "%d %s <- UNSUBACK msgid: %d", /* 24, was 74 */ "%d %s -> UNSUBSCRIBE msgid: %d (%d)", /* 25, was 106 */ "%d %s <- CONNECT", /* 26 */ "%d %s -> PUBLISH qos: 0 retained: %d rc: %d payload len(%d): %.*s", /* 27 */ "%d %s -> DISCONNECT (%d)", /* 28 */ "Socket error for client identifier %s, socket %d, peer address %s; ending connection", /* 29 */ "%d %s <- DISCONNECT (%d)", /* 30 */ }; static const char *trace_message_list[] = { "Failed to remove client from bstate->clients", /* 0 */ "Removed client %s from bstate->clients, socket %d", /* 1 */ "Packet_Factory: unhandled packet type %d", /* 2 */ "Packet %s received from client %s for message identifier %d, but no record of that message identifier found", /* 3 */ "Packet %s received from client %s for message identifier %d, but message is wrong QoS, %d", /* 4 */ "Packet %s received from client %s for message identifier %d, but message is in wrong state", /* 5 */ "%s received from client %s for message id %d - removing publication", /* 6 */ "Trying %s again for client %s, socket %d, message identifier %d", /* 7 */ "", /* 8 */ "(%lu) %*s(%d)> %s:%d", /* 9 */ "(%lu) %*s(%d)< %s:%d", /* 10 */ "(%lu) %*s(%d)< %s:%d (%d)", /* 11 */ "Storing unsent QoS 0 message", /* 12 */ }; /** * Get a log message by its index * @param index the integer index * @param log_level the log level, used to determine which message list to use * @return the message format string */ const char* Messages_get(int index, enum LOG_LEVELS log_level) { const char *msg = NULL; if (log_level == TRACE_PROTOCOL) msg = (index >= 0 && index < ARRAY_SIZE(protocol_message_list)) ? protocol_message_list[index] : NULL; else msg = (index >= 0 && index < ARRAY_SIZE(trace_message_list)) ? trace_message_list[index] : NULL; return msg; }
import { CellPosition, toCursor } from "app/client/components/CellPosition"; import { Disposable, dom, Emitter, Holder, IDisposable, IDisposableOwner, IDomArgs, MultiHolder, styled, TagElem } from "grainjs"; import { GristDoc } from "app/client/components/GristDoc"; import { ITooltipControl, showTooltip, tooltipCloseButton } from "app/client/ui/tooltips"; import { FieldEditorStateEvent } from "app/client/widgets/FieldEditor"; import { colors, testId } from "app/client/ui2018/cssVars"; import { cssLink } from "app/client/ui2018/links"; /** * Component that keeps track of editor's state (draft value). If user hits an escape button * by accident, this component will provide a way to continue the work. * Each editor can report its current state, that will be remembered and restored * when user whishes to continue his work. * Each document can have only one draft at a particular time, that * is cleared when changes occur on any other cell or the cursor navigates await from a cell. * * This component is built as a plugin for GristDoc. GristDoc, FieldBuilder, FieldEditor were just * extended in order to provide some public interface that this objects plugs into. * To disable the drafts, just simple remove it from GristDoc. */ export class Drafts extends Disposable { constructor( doc: GristDoc ) { super(); // Here are all the parts that play some role in this feature // Cursor will navigate the cursor on a view to a proper cell const cursor: Cursor = CursorAdapter.create(this, doc); // Storage will remember last draft const storage: Storage = StorageAdapter.create(this); // Notification will show notification with button to undo discard const notification: Notification = NotificationAdapter.create(this, doc); // Tooltip will hover above the editor and offer to continue from last edit const tooltip: Tooltip = TooltipAdapter.create(this, doc); // Editor will restore its previous state and inform about keyboard events const editor: Editor = EditorAdapter.create(this, doc); // Here is the main use case describing how parts are connected const when = makeWhen(this); // When user cancels the editor when(editor.cellCancelled, (ev: StateChanged) => { // if the state of the editor hasn't changed if (!ev.modified) { // close the tooltip and notification tooltip.close(); notification.close(); // don't store the draft - we assume that user // actually wanted to discard the draft by pressing // escape again return; } // Show notification notification.showUndoDiscard(); // Save draft in memory storage.save(ev); // Make sure that tooltip is not visible tooltip.close(); }); // When user clicks notification to continue with the draft when(notification.pressed, async () => { // if the draft is there const draft = storage.get(); if (draft) { // restore the position of a cell await cursor.goToCell(draft.position); // activate the editor await editor.activate(); // and restore last draft editor.setState(draft.state); } // We don't need the draft any more. // If user presses escape one more time it will be crated // once again storage.clear(); // Close the notification notification.close(); // tooltip is not visible here, and will be shown // when editor is activated }); // When user doesn't do anything while the notification is visible // remove the draft when it disappears when(notification.disappeared, () => { storage.clear(); }); // When editor is activated (user typed something or double clicked a cell) when(editor.activated, (pos: CellPosition) => { // if there was a draft for a cell if (storage.hasDraftFor(pos)) { // show tooltip to continue with a draft tooltip.showContinueDraft(); } // make sure that notification is not visible notification.close(); }); // When editor is modified, close tooltip after some time when(editor.cellModified, (_: StateChanged) => { tooltip.scheduleClose(); }); // When user saves a cell when(editor.cellSaved, (_: StateChanged) => { // just close everything and clear draft storage.clear(); tooltip.close(); notification.close(); }); // When a user clicks a tooltip to continue with a draft when(tooltip.click, () => { const draft = storage.get(); // if there was a draft if (draft) { // restore the draft editor.setState(draft.state); } // close the tooltip tooltip.close(); }); } } /////////////////////////////////////////////////////////// // Roles definition that abstract the way this feature interacts with Grist /** * Cursor role can navigate the cursor to a proper cell */ interface Cursor { goToCell(pos: CellPosition): Promise<void>; } /** * Editor role represents active editor that is attached to a cell. */ interface Editor { // Occurs when user triggers the save operation (by the enter key, clicking away) cellSaved: TypedEmitter<StateChanged>; // Occurs when user triggers the save operation (by the enter key, clicking away) cellModified: TypedEmitter<StateChanged>; // Occurs when user typed something on a cell or double clicked it activated: TypedEmitter<CellPosition>; // Occurs when user cancels the edit (mainly by the escape key or by icon on mobile) cellCancelled: TypedEmitter<StateChanged>; // Editor can restore its state setState(state: any): void; // Editor can be shown up to the user on active cell activate(): Promise<void>; } /** * Notification that is shown to the user on the right bottom corner */ interface Notification { // Occurs when user clicked the notification pressed: Signal; // Occurs when notification disappears with no action from a user disappeared: Signal; // Notification can be closed if it is visible close(): void; // Show notification to the user, to inform him that he can continue with the draft showUndoDiscard(): void; } /** * Storage abstraction. Is responsible for storing latest * draft (position and state) */ interface Storage { // Retrieves latest draft data get(): State | null; // Stores latest draft data save(ev: State): void; // Checks if there is draft data at the position hasDraftFor(position: CellPosition): boolean; // Removes draft data clear(): void; } /** * Tooltip role is responsible for showing tooltip over active field editor with an information * that the drafts is available, and a button to continue with the draft */ interface Tooltip { // Occurs when user clicks the button on the tooltip - so he wants // to continue with the draft click: Signal; // Show tooltip over active cell editor showContinueDraft(): void; // Close tooltip close(): void; // Close tooltip after some time scheduleClose(): void; } /** * Schema of the information that is stored in the storage. */ interface State { // State of the editor state: any; // Cell position where the draft was created position: CellPosition; } /** * Event that is emitted when editor state has changed */ interface StateChanged extends State { modified: boolean; } /////////////////////////////////////////////////////////// // Here are all the adapters for the roles above. They // abstract the way this feature interacts with the GristDoc class CursorAdapter extends Disposable implements Cursor { constructor(private _doc: GristDoc) { super(); } public async goToCell(pos: CellPosition): Promise<void> { await this._doc.recursiveMoveToCursorPos(toCursor(pos, this._doc.docModel), true); } } class StorageAdapter extends Disposable implements Storage { private _memory: State | null; public get(): State | null { return this._memory; } public save(ev: State) { this._memory = ev; } public hasDraftFor(position: CellPosition): boolean { const item = this._memory; if (item && CellPosition.equals(item.position, position)) { return true; } return false; } public clear(): void { this._memory = null; } } class NotificationAdapter extends Disposable implements Notification { public readonly pressed: Signal; public readonly disappeared: Signal; private _hadAction = false; private _holder = Holder.create(this); constructor(private _doc: GristDoc) { super(); this.pressed = this.autoDispose(new Emitter()); this.disappeared = this.autoDispose(new Emitter()); } public close(): void { this._hadAction = true; this._holder.clear(); } public showUndoDiscard() { const notifier = this._doc.app.topAppModel.notifier; const notification = notifier.createUserMessage("Undo discard", { message: () => discardNotification( dom.on("click", () => { this._hadAction = true; this.pressed.emit(); }) ) }); notification.onDispose(() => { if (!this._hadAction) { this.disappeared.emit(); } }); this._holder.autoDispose(notification); this._hadAction = false; } } class TooltipAdapter extends Disposable implements Tooltip { public readonly click: Signal; // there can be only one tooltip at a time private _tooltip: ITooltipControl | null = null; private _scheduled = false; constructor(private _doc: GristDoc) { super(); this.click = this.autoDispose(new Emitter()); // make sure that the tooltip is closed when this object gets disposed this.onDispose(() => { this.close(); }); } public scheduleClose(): void { if (this._tooltip && !this._scheduled) { this._scheduled = true; const origClose = this._tooltip.close; this._tooltip.close = () => { clearTimeout(timer); origClose(); }; const timer = setTimeout(this._tooltip.close, 6000); } } public showContinueDraft(): void { // close tooltip if there was a previous one this.close(); // get the editor dom const editorDom = this._doc.activeEditor.get()?.getDom(); if (!editorDom) { return; } // attach the tooltip this._tooltip = showTooltip( editorDom, cellTooltip(() => this.click.emit())); } public close(): void { this._scheduled = false; this._tooltip?.close(); this._tooltip = null; } } class EditorAdapter extends Disposable implements Editor { public readonly cellSaved: TypedEmitter<StateChanged> = this.autoDispose(new Emitter()); public readonly cellModified: TypedEmitter<StateChanged> = this.autoDispose(new Emitter()); public readonly activated: TypedEmitter<CellPosition> = this.autoDispose(new Emitter()); public readonly cellCancelled: TypedEmitter<StateChanged> = this.autoDispose(new Emitter()); private _holder = Holder.create<MultiHolder>(this); constructor(private _doc: GristDoc) { super(); // observe active editor this.autoDispose(_doc.activeEditor.addListener((editor) => { if (!editor) { return; } // when the editor is created we assume that it is visible to the user this.activated.emit(editor.cellPosition()); // Auto dispose the previous MultiHolder along with all the previous listeners, and create a // new MultiHolder for the new ones. const mholder = MultiHolder.create(this._holder); mholder.autoDispose(editor.changeEmitter.addListener((e: FieldEditorStateEvent) => { this.cellModified.emit({ position: e.position, state: e.currentState, modified: e.wasModified }); })); // when user presses escape mholder.autoDispose(editor.cancelEmitter.addListener((e: FieldEditorStateEvent) => { this.cellCancelled.emit({ position: e.position, state: e.currentState, modified: e.wasModified }); })); // when user presses enter to save the value mholder.autoDispose(editor.saveEmitter.addListener((e: FieldEditorStateEvent) => { this.cellSaved.emit({ position: e.position, state: e.currentState, modified: e.wasModified }); })); })); } public setState(state: any): void { // rebuild active editor with a state from a draft this._doc.activeEditor.get()?.rebuildEditor(undefined, Number.POSITIVE_INFINITY, state); } public async activate() { // open up the editor at current position await this._doc.activateEditorAtCursor({}); } } /////////////////////////////////////////////////////////// // Ui components // Cell tooltip to restore the draft - it is visible over active editor const styledTooltip = styled('div', ` display: flex; align-items: center; --icon-color: ${colors.lightGreen}; & > .${cssLink.className} { margin-left: 8px; } `); function cellTooltip(clb: () => any) { return function (ctl: ITooltipControl) { return styledTooltip( cssLink('Restore last edit', dom.on('mousedown', (ev) => { ev.preventDefault(); ctl.close(); clb(); }), testId('draft-tooltip'), ), tooltipCloseButton(ctl), ); }; } // Discard notification dom const styledNotification = styled('div', ` cursor: pointer; color: ${colors.lightGreen}; &:hover { text-decoration: underline; } `); function discardNotification(...args: IDomArgs<TagElem<"div">>) { return styledNotification( "Undo Discard", testId("draft-notification"), ...args ); } /////////////////////////////////////////////////////////// // Internal implementations - not relevant to main use case // helper method to listen to the Emitter and dispose the listener with a parent function makeWhen(owner: IDisposableOwner) { return function <T extends EmitterType<any>>(emitter: T, handler: EmitterHandler<T>) { owner.autoDispose(emitter.addListener(handler as any)); }; } // Default emitter is not typed, this augments the Emitter interface interface TypedEmitter<T> { emit(item: T): void; addListener(clb: (e: T) => any): IDisposable; } interface Signal { emit(): void; addListener(clb: () => any): IDisposable; } type EmitterType<T> = T extends TypedEmitter<infer E> ? TypedEmitter<E> : Signal; type EmitterHandler<T> = T extends TypedEmitter<infer E> ? ((e: E) => any) : () => any;
#include <stdio.h> #include <stdlib.h> #include <math.h> #include <string.h> #include <ctype.h> int correspond (char); char correspond2 (int); void conversaoAB (char*, int); void conversaoBA (char*, int); int main (void) { int numCasos, tamEntrada; int i, j; char *entrada = NULL, tipo; scanf ("%d ", &numCasos); for (j = 0; j < numCasos; j++) { /* Após recebermos a string, calculamos seu tamanho real e realocamos. Não há porque não ser elegante. Decrementamos o tamanho real porque queremos eliminar qualquer \n ou ' '. Então, precisamos analisar se ela é do tipo A (e.g., BC23) ou do tipo B (e.g., R23C55). Isso é feito verificando se há algum caractere após o primeiro número, com a funçao isalpha(). */ tamEntrada = 0; char temp; while(1) { temp = fgetc(stdin); if (temp == '\n' || temp == EOF) break; tamEntrada++; entrada = realloc(entrada, sizeof(char) * tamEntrada); entrada[tamEntrada - 1] = temp; } tamEntrada++; entrada = realloc(entrada, sizeof(char) * tamEntrada); entrada[tamEntrada - 1] = '\0'; // Procuramos o primeiro número. Assumimos que o tipo é A. // Então, uma letra após ele. Se acharmos, é tipo B. for (i = 0; isalpha( entrada[i] ); i++); tipo = 'A'; for (; i < tamEntrada; i++) { if ( isalpha( entrada[i] ) ) { tipo = 'B'; break; } else continue; } if (tipo == 'A') conversaoAB(entrada, tamEntrada); else conversaoBA(entrada, tamEntrada); free(entrada); entrada = NULL; } return 0; } /* Recebemos uma string do tipo BC23. Queremos gerar uma do tipo R23C55 e imprimi-lá. O primeiro passo é segregá-la em duas strings: uma com a letra, outra com os números. A dos números será impressa depois sem sofrer nenhuma modificação. A das letras será processada para encontrarmos o número correspondente. */ void conversaoAB (char *entrada, int tamEntrada) { int posicNumero, numGerado = 0; char *letras = NULL, *numeros = NULL; int tamLetras, tamNumeros; int i; // Aonde está o primeiro número? // Usaremos este índice para separar as duas matrizes: calculando // o tamanho de ambas e sabendo até onde copiar. for (posicNumero = 0; isalpha(entrada[posicNumero]); posicNumero++); tamLetras = posicNumero; tamNumeros = tamEntrada - tamLetras; letras = realloc(letras, sizeof(char) * (tamLetras+1)); numeros = realloc(numeros, sizeof(char) * (tamNumeros+1)); strncpy (letras, entrada, tamLetras); strncpy (numeros, &(entrada[posicNumero]), tamNumeros); letras[tamLetras] = '\0'; numeros[tamNumeros] = '\0'; /* Agora precisamos gerar o número correspondente a uma sequência XYZ... Utilizamos a fórmula: num = Z * 26^0 + Y * 26^1 + X*26^2 (...) */ for (i = 0; i < tamLetras; i++) { double pot = pow(26.0, tamLetras - (i + 1)); numGerado += correspond(letras[i]) * floor(pot + 0.3); } // Impressão printf("R"); fputs(numeros, stdout); printf("C%d\n", numGerado); free(letras); free(numeros); } /* A função converte uma string no estilo R23C55 para BC23. Em 1º lugar, segregar a string em duas: uma com o número da linha e outra com o da coluna. Essas strings serão transformadas em um inteiro com a função atoi(); Em 2º lugar, transformamos o número da coluna na sequência de letras correspondentes. */ void conversaoBA (char *entrada, int tamEntrada) { char numerosLinha[tamEntrada], numerosColuna[tamEntrada]; int numeroLinha, numeroColuna; char *letras = NULL; int numLetras = 0; int i, j; /* Obtenção dos números a partir das strings. */ for (i = 1, j = 0; ! ( isalpha(entrada[i]) ); i++, j++) numerosLinha[j] = entrada[i]; for (i += 1, j = 0; i < tamEntrada; j++, i++) numerosColuna[j] = entrada[i]; numeroLinha = atoi(numerosLinha); numeroColuna = atoi(numerosColuna); /* Vamos obter a sequência de letras a partir do númeroColuna. */ while(1) { if ((float)(numeroColuna) / 26.0 > 1.0) { numLetras++; letras = realloc(letras, sizeof(char) * numLetras); if (numeroColuna % 26 == 0) { letras[numLetras - 1] = 'Z'; numeroColuna -= 26; numeroColuna = floor(numeroColuna / 26); } else { letras[numLetras - 1] = correspond2(numeroColuna % 26); numeroColuna = floor (numeroColuna / 26); } continue; } else { numLetras++; letras = realloc(letras, sizeof(char) * numLetras); if (numeroColuna % 26 == 0) { letras[numLetras - 1] = 'Z'; } else letras[numLetras - 1] = correspond2(numeroColuna % 26); break; } } // Agora invertemos. for (i = numLetras - 1; i != -1; i--) printf("%c", letras[i]); printf("%d\n", numeroLinha); free(letras); } //? /* As funções abaixo estabelecem a correlação entre a letra do alfabeto (e.g., A) e seu número (e.g., 1). */ char correspond2 (int num) { switch(num) { case 1 : return 'A'; break; case 2 : return 'B'; break; case 3 : return 'C'; break; case 4 : return 'D'; break; case 5 : return 'E'; break; case 6 : return 'F'; break; case 7 : return 'G'; break; case 8 : return 'H'; break; case 9 : return 'I'; break; case 10 : return 'J'; break; case 11 : return 'K'; break; case 12 : return 'L'; break; case 13 : return 'M'; break; case 14 : return 'N'; break; case 15 : return 'O'; break; case 16 : return 'P'; break; case 17 : return 'Q'; break; case 18 : return 'R'; break; case 19 : return 'S'; break; case 20 : return 'T'; break; case 21 : return 'U'; break; case 22 : return 'V'; break; case 23 : return 'W'; break; case 24 : return 'X'; break; case 25 : return 'Y'; break; case 26 : return 'Z'; break; } return 'A'; } int correspond (char letr) { switch(letr) { case 'A' : return 1; break; case 'B' : return 2; break; case 'C' : return 3; break; case 'D' : return 4; break; case 'E' : return 5; break; case 'F' : return 6; break; case 'G' : return 7; break; case 'H' : return 8; break; case 'I' : return 9; break; case 'J' : return 10; break; case 'K' : return 11; break; case 'L' : return 12; break; case 'M' : return 13; break; case 'N' : return 14; break; case 'O' : return 15; break; case 'P' : return 16; break; case 'Q' : return 17; break; case 'R' : return 18; break; case 'S' : return 19; break; case 'T' : return 20; break; case 'U' : return 21; break; case 'V' : return 22; break; case 'W' : return 23; break; case 'X' : return 24; break; case 'Y' : return 25; break; case 'Z' : return 26; break; } return 0; }
<reponame>versionwen/faststart package com.wenxin.learn.faststart.web.service; import com.wenxin.learn.faststart.web.entity.LoginLog; import com.baomidou.mybatisplus.extension.service.IService; /** * <p> * 后台用户登录日志表 服务类 * </p> * * @author version * @since 2020-10-10 */ public interface LoginLogService extends IService<LoginLog> { }
class Solution { public: int minOperations(vector<int>& target, vector<int>& arr) { vector<int> v; map<int, int> m; for (int i = 0; i < target.size(); i++) { m[target[i]] = i+1; } for (int i = 0; i < arr.size(); i++) { if (m.find(arr[i]) == m.end()) continue; int val = m[arr[i]]; auto it = lower_bound(v.begin(), v.end(), val); if (it != v.end()) { v[it - v.begin()] = val; } else { v.push_back(val); } } return target.size() - v.size(); } };
N = int(input()) A = list(map(int,input().split())) a_add=[] a_2=[] a_4=[] for a in A: if a%2 != 0: a_add.append(a) if a%4 == 0: a_4.append(a) elif a%2 == 0: a_2.append(a) # print(a_add,a_2,a_4) ans='No' if len(a_4) >= len(a_add): ans='Yes' elif len(a_2)==0 and len(a_4) >= len(a_add)-1: ans='Yes' print(ans)
/** * @deprecated Since v3.5.0, use the LongSparseBooleanArray in gto-support-androidx-collection instead */ @Deprecated public class LongSparseBooleanArray extends LongSparseArray<Boolean> implements Parcelable { public static final Parcelable.Creator<LongSparseBooleanArray> CREATOR = new Parcelable.Creator<LongSparseBooleanArray>() { @Override public LongSparseBooleanArray createFromParcel(@NonNull final Parcel source) { // load data from the Parcel final int size = source.readInt(); final long[] keys = new long[size]; source.readLongArray(keys); final boolean[] values = new boolean[size]; source.readBooleanArray(values); // create & return the sparse array final LongSparseBooleanArray array = new LongSparseBooleanArray(); for (int i = 0; i < size; i++) { array.put(keys[i], values[i]); } return array; } @Override public LongSparseBooleanArray[] newArray(final int size) { return new LongSparseBooleanArray[size]; } }; public LongSparseBooleanArray() {} public LongSparseBooleanArray(final int initialCapacity) { super(initialCapacity); } @Override public int describeContents() { return 0; } @Override public void writeToParcel(@NonNull final Parcel dest, final int flags) { // read the data into a parcelable format final int size = size(); final long[] keys = new long[size]; final boolean[] values = new boolean[size]; for (int i = 0; i < size; i++) { keys[i] = keyAt(i); values[i] = valueAt(i); } // write data to the Parcel dest.writeInt(size); dest.writeLongArray(keys); dest.writeBooleanArray(values); } }
def gradient(r, mu): r1 = norm(r) return np.outer(r, r) * (3.0 * mu / r1**5) - np.identity(3) * (mu / r1**3)
It’s hard to appreciate just how quickly and thoroughly Twitter has taken over the world. Just seven years ago, in 2006, it was an idea sketched out on a pad of paper. Now, the service is used by an estimated 554 million users—a number that amounts to nearly 8 percent of the all humans on the planet—and an estimated 170 billion tweets have been sent, with that number climbing by roughly 58 million every single day. All these tweets provide an invaluable source of news, entertainment, conversation and connection between people. But for scientists, they’re also valuable as something rather different: raw data. Because Twitter features an open API (which allows for tweets to be downloaded as raw, analyzable data) and many tweets are geotagged, researchers can use billions of these tweets and analyze them by location to learn more about the geography of humans across the planet. Last fall, as part of the Global Twitter Heartbeat, a University of Illinois team analyzed the language and location of over a billion tweets from across the U.S. to create sophisticated maps of things like positive and negative emotions expressed during Hurricane Sandy, or support for Barack Obama or Mitt Romney during the Presidential election. As Joshua Keating noted on Foreign Policy‘s War of Ideas blog, members of the same group, led by Kalev Leetaru, have recently gone one step further. As published in a new study earlier this week in the online journal First Monday, they analyzed the locations and languages of 46,672,798 tweets posted between October 23 and November 30 of last year to create a stunning portrait of human activity around the planet, shown at the top of the post. They made use of the Twitter decahose, a data stream that captures a random 10 percent of all tweets worldwide at any given time (which totaled 1,535,929,521 for the time period), and simply focused on the tweets with associated geographic data. As the researchers note, the geographic density of tweets in many regions—especially in the Western world, where computers, mobile devices, and Twitter are all used at peak levels—closely matches rates of electrification and lighting use. As a result, the maps of tweets (such as the detail view of the continental U.S., below) end up looking a lot like satellite images of artificial light at night. As a test to see how well tweets matched artificial light use, they created the composite map below, in which tweets are shown as red dots and nighttime lighting is shown as blue. Areas where they correspond in frequency (and effectively cancel each other out) are shown as white, and areas where one outweighs the other remain red or blue. Many areas end up looking pretty white, with some key exceptions: Iran and China, where Twitter is banned, are noticeably blue, while many countries with relatively low electrification rates (but where Twitter is still popular) appear as red. The project got even more interesting when the researchers used an automated system to break down tweets by language. The most common language in Twitter is English, which is represented in 38.25 percent of all Tweets. After that came Japanese (11.84 percent), Spanish (11.37 percent), Indonesian (8.84 percent), Norwegian (7.74 percent) and Portugese (5.58 percent). The team constructed a map of all tweets written in the 26 most popular languages, with each represented by a different color, below: While most countries’ tweets are dominated by their official languages, many are revealed to include tweets in a variety of other languages. Look closely enough, and you’ll see a rainbow of colors subtly popping out from the grey dots (English tweets) that blanket the U.S.: Among other analyses, the research team even looked at the geography of retweeting and referencing—the average distance between a user and someone he or she retweets, as well as the average distance between that user and someone he or she simply references in a tweet. On average, the distance for a retweet was 1,115 miles and 1,118 for a reference. But, counterintuitively, there was a positive relationship between the number of times a given user retweeted or referenced another user and their distance: Pairs of users with just a handful of interactions, on the whole, were more likely to be closer together (500-600 miles apart) than those with dozens of retweets and references between them. This indicates that users who live far apart are more likely to use Twitter to interact on a regular basis. One explanation might be that the entities with the most followers—and thus the most references and retweets—are often celebrities, organizations or corporations, users that people are familiar with but don’t actually have a personal relationship with. A global map of retweets between users is below: The paper went into even more detail on other data associated with tweets: the ratio between mainstream news coverage and number of tweets in a country (Europe and the U.S. get disproportionate media coverage, while Latin America and Indonesia are overlooked), the places Twitter has added the most users recently (the Middle East and Spain) and the places where users have, on average, the most followers (South America and the West Coast). There are a few caveats to all this data. For one, though the tweets analyzed number in the tens of millions, they are still just 0.3 percent of all tweets sent, so they might not adequately represent all Twitter patterns, especially if users who enable geotagging behave differently than others. Additionally, in the fast-changing world of Twitter, some trends might have already changed significantly since last fall. But as Twitter continues to grow and as more data become available, it stands to reason that this sort of analysis will only become more popular for demographers, computer scientists and other researchers.
from setuptools import setup, find_packages from setuptools.command.test import test from multiprocessing import freeze_support import os import sys import unittest def discover_and_run_tests(): # get setup.py directory setup_file = sys.modules['__main__'].__file__ setup_dir = os.path.abspath(os.path.dirname(setup_file)) # use the default shared TestLoader instance test_loader = unittest.defaultTestLoader # use the basic test runner that outputs to sys.stderr test_runner = unittest.TextTestRunner() print(os.path.join(setup_dir, "test")) # automatically discover all tests test_suite = test_loader.discover(os.path.join(setup_dir, "test"), pattern='test_*.py') print(test_suite) # run the test suite test_runner.run(test_suite) class DiscoverTest(test): def finalize_options(self): test.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): discover_and_run_tests() if __name__ == "__main__": freeze_support() assert sys.version_info >= (3, 6), "Minimum Python >= 3.6 is required!" setup( name = "irradpy", version = "1.5.0", keywords = ("MERRA2", "Clear Sky Model", "Solar Energy"), description = "Download tool for MERRA2 dataset for Clear Sky Model.", long_description = "This is a automated tool for MERRA2 data collection and filtering, for the analysis of Clear Sky Model.", license = "MIT Licence", url = "https://github.com/BXYMartin/Python-irradpy", author = "<NAME>, <NAME>, <NAME>", author_email = "<EMAIL>", packages = find_packages(exclude=['test', 'util']), include_package_data = True, platforms = "any", install_requires = [ "pydap >= 3.0", "xarray >= 0.10.0", "config >= 0.4.0", "scipy >= 1.0.0", "utils >= 1.0.0", "netCDF4 >= 1.5.0", "numpy >= 1.10.0", "pathlib >= 1.0", "typing >= 3.5.0", "requests >= 2.0.0", "argparse >= 1.0", "cython >= 0.29.0", "pandas >= 0.20.0", ], scripts = [], cmdclass = {'test': DiscoverTest}, entry_points = { 'console_scripts': [ 'merra2_downloader = irradpy.downloader.socket:main' ] } )
// ParseTyped processes the given data and returns a map containing the values // of all named fields converted to their corresponding types. If no typehint is // given, the value will be converted to string. // The given pattern is compiled on every call to this function. // If you want to call this function more than once consider using Compile. func (grok Grok) ParseTyped(pattern string, data []byte) (map[string]interface{}, error) { complied, err := grok.Compile(pattern) if err != nil { return nil, err } return complied.ParseTyped(data) }
/* Anki:未済 作成日:①2020/03/14・2020/03/15 */ #include <stdio.h> int main(void) { long h, w, result; scanf( "%ld %ld", &h, &w ); if (h == 1 || w == 1) { printf( "1\n" ); return 0; } else if ( h % 2 == 0) { result = ( h / 2 ) * w; } else if( w % 2 == 0 ) { result = h * ( w / 2 ); } else { result = ( h * w ) / 2 + 1; } printf( "%ld\n", result ); return 0; } //たとえば (H, W) = (1, 109) のときを考えてみてください。上の解法では 500000000 が出力されますが、実際は角は全く動けないので答えは 1 となります。このような例外をコーナーケースといいます。
from unittest.mock import Mock import pytest from pip._internal.cli.status_codes import ERROR, SUCCESS from pip._internal.commands import commands_dict, create_command from pip._internal.exceptions import CommandError from tests.conftest import InMemoryPip from tests.lib import PipTestEnvironment def test_run_method_should_return_success_when_finds_command_name() -> None: """ Test HelpCommand.run for existing command """ options_mock = Mock() args = ["freeze"] help_cmd = create_command("help") status = help_cmd.run(options_mock, args) assert status == SUCCESS def test_run_method_should_return_success_when_command_name_not_specified() -> None: """ Test HelpCommand.run when there are no args """ options_mock = Mock() help_cmd = create_command("help") status = help_cmd.run(options_mock, []) assert status == SUCCESS def test_run_method_should_raise_command_error_when_command_does_not_exist() -> None: """ Test HelpCommand.run for non-existing command """ options_mock = Mock() args = ["mycommand"] help_cmd = create_command("help") with pytest.raises(CommandError): help_cmd.run(options_mock, args) def test_help_command_should_exit_status_ok_when_command_exists( script: PipTestEnvironment, ) -> None: """ Test `help` command for existing command """ result = script.pip("help", "freeze") assert result.returncode == SUCCESS def test_help_command_should_exit_status_ok_when_no_cmd_is_specified( script: PipTestEnvironment, ) -> None: """ Test `help` command for no command """ result = script.pip("help") assert result.returncode == SUCCESS def test_help_command_should_exit_status_error_when_cmd_does_not_exist( script: PipTestEnvironment, ) -> None: """ Test `help` command for non-existing command """ result = script.pip("help", "mycommand", expect_error=True) assert result.returncode == ERROR def test_help_command_redact_auth_from_url(script: PipTestEnvironment) -> None: """ Test `help` on various subcommands redact auth from url """ script.environ["PIP_INDEX_URL"] = "https://user:[email protected]" result = script.pip("install", "--help") assert result.returncode == SUCCESS assert "secret" not in result.stdout def test_help_command_redact_auth_from_url_with_extra_index_url( script: PipTestEnvironment, ) -> None: """ Test `help` on various subcommands redact auth from url with extra index url """ script.environ["PIP_INDEX_URL"] = "https://user:[email protected]" script.environ["PIP_EXTRA_INDEX_URL"] = "https://user:[email protected]" result = script.pip("install", "--help") assert result.returncode == SUCCESS assert "secret" not in result.stdout def test_help_commands_equally_functional(in_memory_pip: InMemoryPip) -> None: """ Test if `pip help` and 'pip --help' behave the same way. """ results = list(map(in_memory_pip.pip, ("help", "--help"))) results.append(in_memory_pip.pip()) out = map(lambda x: x.stdout, results) ret = map(lambda x: x.returncode, results) msg = '"pip --help" != "pip help" != "pip"' assert len(set(out)) == 1, "output of: " + msg assert sum(ret) == 0, "exit codes of: " + msg assert all(len(o) > 0 for o in out) for name in commands_dict: assert ( in_memory_pip.pip("help", name).stdout == in_memory_pip.pip(name, "--help").stdout != "" )
/* Copyright IBM Corp. All Rights Reserved. SPDX-License-Identifier: Apache-2.0 */ package fabricconfig import ( "fmt" "io/ioutil" "time" "gopkg.in/yaml.v2" "github.com/hyperledger/fabric-test/tools/operator/launcher/nl" "github.com/hyperledger/fabric-test/tools/operator/networkspec" "github.com/hyperledger/fabric-test/tools/operator/paths" docker "github.com/fsouza/go-dockerclient" ) type Core struct { Logging *Logging `yaml:"logging,omitempty"` Peer *Peer `yaml:"peer,omitempty"` VM *VM `yaml:"vm,omitempty"` Chaincode *Chaincode `yaml:"chaincode,omitempty"` Ledger *Ledger `yaml:"ledger,omitempty"` Operations *Operations `yaml:"operations,omitempty"` Metrics *Metrics `yaml:"metrics,omitempty"` } type Logging struct { Format string `yaml:"format,omitempty"` ExtraProperties map[string]interface{} `yaml:",inline,omitempty"` } type Peer struct { ID string `yaml:"id,omitempty"` NetworkID string `yaml:"networkId,omitempty"` ListenAddress string `yaml:"listenAddress,omitempty"` ChaincodeListenAddress string `yaml:"chaincodeListenAddress,omitempty"` ChaincodeAddress string `yaml:"chaincodeAddress,omitempty"` Address string `yaml:"address,omitempty"` AddressAutoDetect bool `yaml:"addressAutoDetect"` Keepalive *Keepalive `yaml:"keepalive,omitempty"` Gossip *Gossip `yaml:"gossip,omitempty"` Events *Events `yaml:"events,omitempty"` TLS *TLS `yaml:"tls,omitempty"` Authentication *Authentication `yaml:"authentication,omitempty"` FileSystemPath string `yaml:"fileSystemPath,omitempty"` BCCSP *BCCSP `yaml:"BCCSP,omitempty"` MSPConfigPath string `yaml:"mspConfigPath,omitempty"` LocalMSPID string `yaml:"localMspId,omitempty"` Deliveryclient *DeliveryClient `yaml:"deliveryclient,omitempty"` LocalMspType string `yaml:"localMspType,omitempty"` Handlers *Handlers `yaml:"handlers,omitempty"` ValidatorPoolSize int `yaml:"validatorPoolSize,omitempty"` Discovery *Discovery `yaml:"discovery,omitempty"` Limits *Limits `yaml:"limits,omitempty"` ExtraProperties map[string]interface{} `yaml:",inline,omitempty"` } type Keepalive struct { MinInterval time.Duration `yaml:"minInterval,omitempty"` Client *ClientKeepalive `yaml:"client,omitempty"` DeliveryClient *ClientKeepalive `yaml:"deliveryClient,omitempty"` } type ClientKeepalive struct { Interval time.Duration `yaml:"interval,omitempty"` Timeout time.Duration `yaml:"timeout,omitempty"` } type Gossip struct { Bootstrap string `yaml:"bootstrap,omitempty"` UseLeaderElection bool `yaml:"useLeaderElection"` OrgLeader bool `yaml:"orgLeader"` MembershipTrackerInterval time.Duration `yaml:"membershipTrackerInterval,omitempty"` Endpoint string `yaml:"endpoint,omitempty"` MaxBlockCountToStore int `yaml:"maxBlockCountToStore,omitempty"` MaxPropagationBurstLatency time.Duration `yaml:"maxPropagationBurstLatency,omitempty"` MaxPropagationBurstSize int `yaml:"maxPropagationBurstSize,omitempty"` PropagateIterations int `yaml:"propagateIterations,omitempty"` PropagatePeerNum int `yaml:"propagatePeerNum,omitempty"` PullInterval time.Duration `yaml:"pullInterval,omitempty"` PullPeerNum int `yaml:"pullPeerNum,omitempty"` RequestStateInfoInterval time.Duration `yaml:"requestStateInfoInterval,omitempty"` PublishStateInfoInterval time.Duration `yaml:"publishStateInfoInterval,omitempty"` StateInfoRetentionInterval time.Duration `yaml:"stateInfoRetentionInterval,omitempty"` PublishCertPeriod time.Duration `yaml:"publishCertPeriod,omitempty"` DialTimeout time.Duration `yaml:"dialTimeout,omitempty"` ConnTimeout time.Duration `yaml:"connTimeout,omitempty"` RecvBuffSize int `yaml:"recvBuffSize,omitempty"` SendBuffSize int `yaml:"sendBuffSize,omitempty"` DigestWaitTime time.Duration `yaml:"digestWaitTime,omitempty"` RequestWaitTime time.Duration `yaml:"requestWaitTime,omitempty"` ResponseWaitTime time.Duration `yaml:"responseWaitTime,omitempty"` AliveTimeInterval time.Duration `yaml:"aliveTimeInterval,omitempty"` AliveExpirationTimeout time.Duration `yaml:"aliveExpirationTimeout,omitempty"` ReconnectInterval time.Duration `yaml:"reconnectInterval,omitempty"` ExternalEndpoint string `yaml:"externalEndpoint,omitempty"` Election *GossipElection `yaml:"election,omitempty"` PvtData *GossipPvtData `yaml:"pvtData,omitempty"` State *GossipState `yaml:"state,omitempty"` } type GossipElection struct { StartupGracePeriod time.Duration `yaml:"startupGracePeriod,omitempty"` MembershipSampleInterval time.Duration `yaml:"membershipSampleInterval,omitempty"` LeaderAliveThreshold time.Duration `yaml:"leaderAliveThreshold,omitempty"` LeaderElectionDuration time.Duration `yaml:"leaderElectionDuration,omitempty"` } type GossipPvtData struct { PullRetryThreshold time.Duration `yaml:"pullRetryThreshold,omitempty"` TransientstoreMaxBlockRetention int `yaml:"transientstoreMaxBlockRetention,omitempty"` PushAckTimeout time.Duration `yaml:"pushAckTimeout,omitempty"` BtlPullMargin int `yaml:"btlPullMargin,omitempty"` ReconcileBatchSize int `yaml:"reconcileBatchSize,omitempty"` ReconcileSleepInterval time.Duration `yaml:"reconcileSleepInterval,omitempty"` ReconciliationEnabled bool `yaml:"reconciliationEnabled"` SkipPullingInvalidTransactionsDuringCommit bool `yaml:"skipPullingInvalidTransactionsDuringCommit"` ImplicitCollDisseminationPolicy ImplicitCollDisseminationPolicy `yaml:"implicitCollectionDisseminationPolicy"` } type ImplicitCollDisseminationPolicy struct { RequiredPeerCount int `yaml:"requiredPeerCount,omitempty"` // do not tag omitempty in order to override MaxPeerCount default with 0 MaxPeerCount int `yaml:"maxPeerCount"` } type GossipState struct { Enabled bool `yaml:"enabled"` CheckInterval time.Duration `yaml:"checkInterval,omitempty"` ResponseTimeout time.Duration `yaml:"responseTimeout,omitempty"` BatchSize int `yaml:"batchSize,omitempty"` BlockBufferSize int `yaml:"blockBufferSize,omitempty"` MaxRetries int `yaml:"maxRetries,omitempty"` } type Events struct { Address string `yaml:"address,omitempty"` Buffersize int `yaml:"buffersize,omitempty"` Timeout time.Duration `yaml:"timeout,omitempty"` Timewindow time.Duration `yaml:"timewindow,omitempty"` Keepalive *Keepalive `yaml:"keepalive,omitempty"` } type TLS struct { Enabled bool `yaml:"enabled"` ClientAuthRequired bool `yaml:"clientAuthRequired"` CA *FileRef `yaml:"ca,omitempty"` Cert *FileRef `yaml:"cert,omitempty"` Key *FileRef `yaml:"key,omitempty"` RootCert *FileRef `yaml:"rootcert,omitempty"` ClientRootCAs *FilesRef `yaml:"clientRootCAs,omitempty"` ClientKey *FileRef `yaml:"clientKey,omitempty"` ClientCert *FileRef `yaml:"clientCert,omitempty"` } type FileRef struct { File string `yaml:"file,omitempty"` } type FilesRef struct { Files []string `yaml:"files,omitempty"` } type Authentication struct { Timewindow time.Duration `yaml:"timewindow,omitempty"` } type BCCSP struct { Default string `yaml:"Default,omitempty"` SW *SoftwareProvider `yaml:"SW,omitempty"` } type SoftwareProvider struct { Hash string `yaml:"Hash,omitempty"` Security int `yaml:"Security,omitempty"` } type DeliveryClient struct { ReconnectTotalTimeThreshold time.Duration `yaml:"reconnectTotalTimeThreshold,omitempty"` AddressOverrides []*AddressOverride `yaml:"addressOverrides,omitempty"` } type AddressOverride struct { From string `yaml:"from"` To string `yaml:"to"` CACertsFile string `yaml:"caCertsFile"` } type Service struct { Enabled bool `yaml:"enabled"` ListenAddress string `yaml:"listenAddress,omitempty"` } type Handlers struct { AuthFilters []Handler `yaml:"authFilters,omitempty"` Decorators []Handler `yaml:"decorators,omitempty"` Endorsers HandlerMap `yaml:"endorsers,omitempty"` Validators HandlerMap `yaml:"validators,omitempty"` } type Handler struct { Name string `yaml:"name,omitempty"` Library string `yaml:"library,omitempty"` } type HandlerMap map[string]Handler type Discovery struct { Enabled bool `yaml:"enabled"` AuthCacheEnabled bool `yaml:"authCacheEnabled"` AuthCacheMaxSize int `yaml:"authCacheMaxSize,omitempty"` AuthCachePurgeRetentionRatio float64 `yaml:"authCachePurgeRetentionRatio"` OrgMembersAllowedAccess bool `yaml:"orgMembersAllowedAccess"` } type Limits struct { Concurrency *Concurrency `yaml:"concurrency,omitempty"` } type Concurrency struct { EndorserService int `yaml:"endorserService,omitempty"` DeliverService int `yaml:"deliverService,omitempty"` } type VM struct { Endpoint string `yaml:"endpoint,omitempty"` Docker *Docker `yaml:"docker,omitempty"` } type Docker struct { TLS *TLS `yaml:"tls,omitempty"` AttachStdout bool `yaml:"attachStdout"` HostConfig *docker.HostConfig `yaml:"hostConfig,omitempty"` } type Chaincode struct { Builder string `yaml:"builder,omitempty"` Pull bool `yaml:"pull"` Golang *Golang `yaml:"golang,omitempty"` Java *Java `yaml:"java,omitempty"` Node *Node `yaml:"node,omitempty"` InstallTimeout time.Duration `yaml:"installTimeout,omitempty"` StartupTimeout time.Duration `yaml:"startupTimeout,omitempty"` ExecuteTimeout time.Duration `yaml:"executeTimeout,omitempty"` Mode string `yaml:"mode,omitempty"` Keepalive int `yaml:"keepalive,omitempty"` System SystemFlags `yaml:"system,omitempty"` Logging *Logging `yaml:"logging,omitempty"` ExternalBuilders []ExternalBuilder `yaml:"externalBuilders"` ExtraProperties map[string]interface{} `yaml:",inline,omitempty"` } type Golang struct { Runtime string `yaml:"runtime,omitempty"` DynamicLink bool `yaml:"dynamicLink"` ExtraProperties map[string]interface{} `yaml:",inline,omitempty"` } type Java struct { Runtime string `yaml:"runtime,omitempty"` ExtraProperties map[string]interface{} `yaml:",inline,omitempty"` } type Node struct { Runtime string `yaml:"runtime,omitempty"` ExtraProperties map[string]interface{} `yaml:",inline,omitempty"` } type ExternalBuilder struct { EnvironmentWhitelist []string `yaml:"environmentWhitelist,omitempty"` Name string `yaml:"name,omitempty"` Path string `yaml:"path,omitempty"` } type SystemFlags struct { NEWLIFECYCLE string `yaml:"_lifecycle,omitempty"` CSCC string `yaml:"cscc,omitempty"` LSCC string `yaml:"lscc,omitempty"` ESCC string `yaml:"escc,omitempty"` VSCC string `yaml:"vscc,omitempty"` QSCC string `yaml:"qscc,omitempty"` } type Ledger struct { // Blockchain - not sure if it's needed State *StateConfig `yaml:"state,omitempty"` History *HistoryConfig `yaml:"history,omitempty"` } type StateConfig struct { StateDatabase string `yaml:"stateDatabase,omitempty"` CouchDBConfig *CouchDBConfig `yaml:"couchDBConfig,omitempty"` } type CouchDBConfig struct { CouchDBAddress string `yaml:"couchDBAddress,omitempty"` Username string `yaml:"username,omitempty"` Password string `yaml:"password,omitempty"` MaxRetries int `yaml:"maxRetries,omitempty"` MaxRetriesOnStartup int `yaml:"maxRetriesOnStartup,omitempty"` RequestTimeout time.Duration `yaml:"requestTimeout,omitempty"` QueryLimit int `yaml:"queryLimit,omitempty"` MaxBatchUpdateSize int `yaml:"maxBatchUpdateSize,omitempty"` WarmIndexesAfterNBlocks int `yaml:"warmIndexesAfteNBlocks,omitempty"` } type HistoryConfig struct { EnableHistoryDatabase bool `yaml:"enableHistoryDatabase"` } type Operations struct { ListenAddress string `yaml:"listenAddress,omitempty"` TLS *TLS `yaml:"tls"` } type Metrics struct { Provider string `yaml:"provider"` Statsd *Statsd `yaml:"statsd,omitempty"` } type Statsd struct { Network string `yaml:"network,omitempty"` Address string `yaml:"address,omitempty"` WriteInterval time.Duration `yaml:"writeInterval,omitempty"` Prefix string `yaml:"prefix,omitempty"` } //CoreConfig -- func CoreConfig(nsConfig networkspec.Config) (Core, error) { var coreConfig Core filePath := "./sampleconfig/core.yaml" contents, err := ioutil.ReadFile(filePath) if err != nil { return coreConfig, err } err = yaml.Unmarshal(contents, &coreConfig) if err != nil { return coreConfig, err } coreConfig.VM.Endpoint = "localhost:2375" coreConfig.Peer.ChaincodeListenAddress = "0.0.0.0:7052" if nsConfig.GossipEnable { coreConfig.Peer.Gossip.State.Enabled = true coreConfig.Peer.Gossip.UseLeaderElection = true coreConfig.Peer.Gossip.OrgLeader = false } else { coreConfig.Peer.Gossip.State.Enabled = false coreConfig.Peer.Gossip.UseLeaderElection = false coreConfig.Peer.Gossip.OrgLeader = true } if nsConfig.TLS == "true" || nsConfig.TLS == "mutual" { coreConfig.Peer.TLS.Enabled = true } else { coreConfig.Peer.TLS.Enabled = false } coreConfig.Peer.TLS.Cert.File = "/etc/hyperledger/fabric/artifacts/tls/server.crt" coreConfig.Peer.TLS.Key.File = "/etc/hyperledger/fabric/artifacts/tls/server.key" coreConfig.Peer.ChaincodeAddress = "localhost:7052" ccExecuteTimeout, _ := time.ParseDuration("1500s") coreConfig.Chaincode.ExecuteTimeout = ccExecuteTimeout coreConfig.Peer.MSPConfigPath = "/etc/hyperledger/fabric/artifacts/msp" coreConfig.Peer.FileSystemPath = "/shared/data" coreConfig.Operations.TLS.Enabled = false coreConfig.Metrics.Provider = "prometheus" if nsConfig.DBType == "couchdb" { coreConfig.Ledger.State.StateDatabase = "CouchDB" } coreConfig.Chaincode.Builder = nl.DockerImage("ccenv", nsConfig.DockerOrg, nsConfig.DockerTag, nsConfig.DockerImages.Ccenv) coreConfig.Chaincode.Golang.Runtime = nl.DockerImage("baseos", nsConfig.DockerOrg, nsConfig.DockerTag, nsConfig.DockerImages.Baseos) coreConfig.Chaincode.Java.Runtime = nl.DockerImage("javaenv", nsConfig.DockerOrg, nsConfig.DockerTag, nsConfig.DockerImages.Javaenv) coreConfig.Chaincode.Node.Runtime = nl.DockerImage("nodeenv", nsConfig.DockerOrg, nsConfig.DockerTag, nsConfig.DockerImages.Nodeenv) return coreConfig, nil } //GenerateCorePeerConfig -- func GenerateCorePeerConfig(name, orgName, mspID, artifactsLocation string, port int32, metricsPort int32, coreConfig Core) error { coreConfig.Peer.ListenAddress = fmt.Sprintf("0.0.0.0:%d", port) coreConfig.Peer.TLS.RootCert.File = fmt.Sprintf("/etc/hyperledger/fabric/artifacts/msp/tlscacerts/tlsca.%s-cert.pem", orgName) coreConfig.Peer.ID = name coreConfig.Peer.Gossip.ExternalEndpoint = fmt.Sprintf("%s:%d", name, port) coreConfig.Peer.Address = fmt.Sprintf("%s:%d", name, port) coreConfig.Peer.LocalMSPID = mspID coreConfig.Peer.Gossip.Bootstrap = fmt.Sprintf("%s:%d", name, port) coreConfig.Ledger.State.CouchDBConfig.CouchDBAddress = fmt.Sprintf("couchdb-%s:5984", name) coreConfig.Ledger.State.CouchDBConfig.Username = "admin" coreConfig.Ledger.State.CouchDBConfig.Password = "adminpw" coreConfig.Operations.ListenAddress = fmt.Sprintf(":%d", metricsPort) d, err := yaml.Marshal(&coreConfig) if err != nil { return err } cryptoConfigPath := paths.CryptoConfigDir(artifactsLocation) path := paths.JoinPath(cryptoConfigPath, fmt.Sprintf("peerOrganizations/%s/peers/%s.%s", orgName, name, orgName)) inputPath := paths.JoinPath(path, fmt.Sprintf("core-%s.yaml", name)) err = ioutil.WriteFile(inputPath, d, 0644) if err != nil { return err } return nil }
def copy(self, ID, CAS=None, **data): new = super().__new__(self.__class__) getfield = getattr setfield = setattr for field in self.__slots__: value = getfield(self, field, None) setfield(new, field, copy_maybe(value)) for field in _names[:-1]: setfield(new, field, None) new._ID = ID new._CAS = CAS or ID TDependentProperty.RAISE_PROPERTY_CALCULATION_ERROR = False new._init_energies(new.Cn, new.Hvap, new.Psat, new.Hfus, new.Sfus, new.Tm, new.Tb, new.eos, new.phase_ref) TDependentProperty.RAISE_PROPERTY_CALCULATION_ERROR = True new._label_handles() for i,j in data.items(): setfield(new, i , j) return new
N = int(input()) a = list(map(int, input().split())) brick = [0] number = 1 import sys if min(a) > 1: print('-1') sys.exit() for i in range(N): if a[i] == number: brick.append(number) number += 1 print(N - max(brick))
package common import ( "errors" "github.com/aws/aws-sdk-go/aws/credentials" "log" "os" ) func GetCredentialsFromEnvironment(accessKeyId, secretKey string) (*credentials.Credentials, error) { accessKeyVal := os.Getenv(accessKeyId) secretKeyVal := os.Getenv(secretKey) if accessKeyVal == "" || secretKeyVal == "" { log.Println("Could not load " + accessKeyId + " (or) " + secretKey + " from local env") return nil, errors.New("Could not load " + accessKeyId + " (or) " + secretKey + " from local env") } else { return credentials.NewStaticCredentials(accessKeyVal, secretKeyVal, ""), nil } }
<filename>server/memfs.py #!/usr/bin/env python """In-memory filesystem, pre-populated with a couple of files. Not complete (e.g. you can remove directories which aren't empty), but works well enough to use emacs and gcc.""" # This file should be available from # http://www.pobox.com/~asl2/software/Pinefs # and is licensed under the X Consortium license: # Copyright (c) 2003, <NAME>, <EMAIL> # All rights reserved. # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, and/or sell copies of the Software, and to permit persons # to whom the Software is furnished to do so, provided that the above # copyright notice(s) and this permission notice appear in all copies of # the Software and that both the above copyright notice(s) and this # permission notice appear in supporting documentation. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT # OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR # HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL # INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING # FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, # NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION # WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. # Except as contained in this notice, the name of a copyright holder # shall not be used in advertising or otherwise to promote the sale, use # or other dealings in this Software without prior written authorization # of the copyright holder. import rfc1094 import fsbase import array class FileObj(fsbase.FileObj): fileid_ctr = fsbase.Ctr () def __init__ (self, **kw): self.fileid = self.fileid_ctr.next () if not kw.has_key ('data'): kw ['data'] = '' if kw['type'] == rfc1094.NFDIR and not kw.has_key ('dir'): kw['dir'] = {} self.data = array.array ('b') self.data.fromstring (kw['data']) del kw['data'] for k,v in kw.items (): setattr (self, k, v) fsbase.FileObj.__init__ (self) def read (self, offset, count): return (self.data [offset: offset + count]).tostring () def write (self, offset, newdata): n = array.array ('b') n.fromstring (newdata) if offset > len (self.data): extend_len = offset - len (self.data) fill = array.array ('b') fill.fromlist ([0] * extend_len) self.data.extend (fill) self.data [offset:offset + len (newdata)] = n self.set_size () def get_dir (self): return self.dir def truncate (self): self.data = array.array ('b') def mk_link (self, name, from_fh): self.dir [name] = from_fh class FileSystem: def __init__ (self, fh_ctr = fsbase.Ctr ()): self._fh_ctr = fh_ctr self._fils = {} self._root, _ = self.create_fil (None, '', type = rfc1094.NFDIR, size = 4) self.create_fil (self._root, 'foo', type = rfc1094.NFREG, data = 40 * 'A') self.create_fil (self._root, 'bar', type = rfc1094.NFREG, data = 20 * 'B') dir_fh, _ = self.create_fil (self._root, 'dir', type = rfc1094.NFDIR, size = 4) self.create_fil (self._root, 'baz', type = rfc1094.NFREG, data = 20 * 'C') def mount (self, dirpath): if dirpath == '/': return self._root return None def _register (self, fil): fh = self._fh_ctr.next_fh () self._fils [fh] = fil return fh def get_fil (self, fh): return self._fils.get (fh, None) def _add_fil (self, dir_fh, name, new_fh): dir_fil = self.get_fil (dir_fh) assert (dir_fil <> None) dir = dir_fil.get_dir () if dir.has_key (name): raise fsbase.NFSError (rfc1094.NFSERR_EXIST) dir_fil.get_dir ()[name] = new_fh def create_fil (self, dir_fh, name, **kw): fil = FileObj (**kw) fh = self._register (fil) if dir_fh <> None: # if dir_fh == None, we're creating root directory self._add_fil (dir_fh, name, fh) return fh, fil def rename (self, old_dir, old_name, new_dir, new_name): from_dir_fil = self.get_fil (old_dir) to_dir_fil = self.get_fil (new_dir) move_fil = from_dir_fil.get_dir() [old_name] to_dir_fil.get_dir () [new_name] = move_fil del from_dir_fil.get_dir() [old_name] def remove (self, dir_fh, name): dir_fil = self.get_fil (dir_fh) if dir_fil == None: # XXX should raise error? return fh = dir_fil.get_dir ().get (name, None) if fh == None: raise fsbase.NFSError (rfc1094.NFSERROR_NOENT) fil = self.get_fil (fh) if fil.type == rfc1094.NFDIR: if fil.dir <> {}: raise fsbase.NFSError (rfc1094.NFSERR_NOTEMPTY) del self._fils [fh] del dir_fil.get_dir() [name]
/** * JAXB adapter for {@link Geometry}, in order to integrate the value in an element complying with OGC/ISO standard. * The geometry element names are usually prefixed by {@code gml:}. * * <p>The default implementation does almost nothing. The geometry objects will <strong>not</strong> * create the expected {@link JAXBElement} type. This class is only a hook to be extended by more * specialized subclasses in GML modules.</p> * * @author Guilhem Legal (Geomatys) * @since 0.3 * @version 0.3 * @module */ public class GM_Object extends XmlAdapter<GM_Object, Geometry> { /** * The Geometry value covered by a {@code gml:**} element. */ @XmlElementRef(name = "AbstractGeometry", namespace = Namespaces.GML, type = JAXBElement.class) protected JAXBElement<? extends Geometry> geometry; /** * Empty constructor for JAXB and subclasses only. */ public GM_Object() { } /** * Converts an adapter read from an XML stream to the GeoAPI interface which will * contains this value. JAXB calls automatically this method at unmarshalling time. * * @param value The adapter for a geometry value. * @return An instance of the GeoAPI interface which represents the geometry value. */ @Override public final Geometry unmarshal(final GM_Object value) { if (value != null) { final JAXBElement<? extends Geometry> g = value.geometry; if (g != null) { return g.getValue(); } } return null; } /** * Converts a GeoAPI interface to the appropriate adapter for the way it will be * marshalled into an XML file or stream. JAXB calls automatically this method at * marshalling time. * * @param value The geometry value, here the interface. * @return The adapter for the given value. */ @Override public final GM_Object marshal(final Geometry value) { if (value == null) { return null; } return wrap(value); } /** * Returns the geometry value to be covered by a {@code gml:**} element. * The default implementation returns {@code null} if all cases. Subclasses * must override this method in order to provide useful marshalling. * * @param value The value to marshal. * @return The adapter which covers the geometry value. */ protected GM_Object wrap(Geometry value) { return null; } }
// Copyright 2012 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gitiles; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.net.HttpHeaders; import com.google.gitiles.GitilesView.Type; import org.eclipse.jgit.internal.storage.dfs.DfsRepository; import org.eclipse.jgit.internal.storage.dfs.DfsRepositoryDescription; import org.eclipse.jgit.internal.storage.dfs.InMemoryRepository; import org.eclipse.jgit.junit.TestRepository; import org.eclipse.jgit.revwalk.RevCommit; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import java.io.IOException; import javax.servlet.ServletException; /** Tests for the view filter. */ @RunWith(JUnit4.class) public class ViewFilterTest { private TestRepository<DfsRepository> repo; @Before public void setUp() throws Exception { repo = new TestRepository<DfsRepository>( new InMemoryRepository(new DfsRepositoryDescription("repo"))); } @Test public void noCommand() throws Exception { assertThat(getView("/").getType()).isEqualTo(Type.HOST_INDEX); assertThat(getView("/repo").getType()).isEqualTo(Type.REPOSITORY_INDEX); assertThat(getView("/repo/+")).isNull(); assertThat(getView("/repo/+/")).isNull(); } @Test public void autoCommand() throws Exception { RevCommit parent = repo.commit().create(); RevCommit master = repo.branch("refs/heads/master").commit().parent(parent).create(); String hex = master.name(); String hexBranch = hex.substring(0, 10); repo.branch(hexBranch).commit().create(); assertThat(getView("/repo/+/master").getType()).isEqualTo(Type.REVISION); assertThat(getView("/repo/+/" + hexBranch).getType()).isEqualTo(Type.REVISION); assertThat(getView("/repo/+/" + hex).getType()).isEqualTo(Type.REVISION); assertThat(getView("/repo/+/" + hex.substring(0, 7)).getType()).isEqualTo(Type.REVISION); assertThat(getView("/repo/+/master/").getType()).isEqualTo(Type.PATH); assertThat(getView("/repo/+/" + hex + "/").getType()).isEqualTo(Type.PATH); assertThat(getView("/repo/+/" + hex + "/index.c").getType()).isEqualTo(Type.PATH); assertThat(getView("/repo/+/" + hex + "/index.md").getType()).isEqualTo(Type.DOC); assertThat(getView("/repo/+/master^..master").getType()).isEqualTo(Type.DIFF); assertThat(getView("/repo/+/master^..master/").getType()).isEqualTo(Type.DIFF); assertThat(getView("/repo/+/" + parent.name() + ".." + hex + "/").getType()) .isEqualTo(Type.DIFF); } @Test public void hostIndex() throws Exception { GitilesView view = getView("/"); assertThat(view.getType()).isEqualTo(Type.HOST_INDEX); assertThat(view.getHostName()).isEqualTo("test-host"); assertThat(view.getRepositoryName()).isNull(); assertThat(view.getRevision()).isEqualTo(Revision.NULL); assertThat(view.getOldRevision()).isEqualTo(Revision.NULL); assertThat(view.getPathPart()).isNull(); } @Test public void repositoryIndex() throws Exception { GitilesView view = getView("/repo"); assertThat(view.getType()).isEqualTo(Type.REPOSITORY_INDEX); assertThat(view.getRepositoryName()).isEqualTo("repo"); assertThat(view.getRevision()).isEqualTo(Revision.NULL); assertThat(view.getOldRevision()).isEqualTo(Revision.NULL); assertThat(view.getPathPart()).isNull(); } @Test public void refs() throws Exception { GitilesView view; view = getView("/repo/+refs"); assertThat(view.getType()).isEqualTo(Type.REFS); assertThat(view.getRepositoryName()).isEqualTo("repo"); assertThat(view.getRevision()).isEqualTo(Revision.NULL); assertThat(view.getOldRevision()).isEqualTo(Revision.NULL); assertThat(view.getPathPart()).isEqualTo(""); view = getView("/repo/+refs/"); assertThat(view.getType()).isEqualTo(Type.REFS); assertThat(view.getRepositoryName()).isEqualTo("repo"); assertThat(view.getRevision()).isEqualTo(Revision.NULL); assertThat(view.getOldRevision()).isEqualTo(Revision.NULL); assertThat(view.getPathPart()).isEqualTo(""); view = getView("/repo/+refs/heads"); assertThat(view.getType()).isEqualTo(Type.REFS); assertThat(view.getRepositoryName()).isEqualTo("repo"); assertThat(view.getRevision()).isEqualTo(Revision.NULL); assertThat(view.getOldRevision()).isEqualTo(Revision.NULL); assertThat(view.getPathPart()).isEqualTo("heads"); view = getView("/repo/+refs/heads/"); assertThat(view.getType()).isEqualTo(Type.REFS); assertThat(view.getRepositoryName()).isEqualTo("repo"); assertThat(view.getRevision()).isEqualTo(Revision.NULL); assertThat(view.getOldRevision()).isEqualTo(Revision.NULL); assertThat(view.getPathPart()).isEqualTo("heads"); view = getView("/repo/+refs/heads/master"); assertThat(view.getType()).isEqualTo(Type.REFS); assertThat(view.getRepositoryName()).isEqualTo("repo"); assertThat(view.getRevision()).isEqualTo(Revision.NULL); assertThat(view.getOldRevision()).isEqualTo(Revision.NULL); assertThat(view.getPathPart()).isEqualTo("heads/master"); } @Test public void describe() throws Exception { GitilesView view; assertThat(getView("/repo/+describe")).isNull(); assertThat(getView("/repo/+describe/")).isNull(); view = getView("/repo/+describe/deadbeef"); assertThat(view.getType()).isEqualTo(Type.DESCRIBE); assertThat(view.getRepositoryName()).isEqualTo("repo"); assertThat(view.getRevision()).isEqualTo(Revision.NULL); assertThat(view.getOldRevision()).isEqualTo(Revision.NULL); assertThat(view.getPathPart()).isEqualTo("deadbeef"); view = getView("/repo/+describe/refs/heads/master~3^~2"); assertThat(view.getType()).isEqualTo(Type.DESCRIBE); assertThat(view.getRepositoryName()).isEqualTo("repo"); assertThat(view.getRevision()).isEqualTo(Revision.NULL); assertThat(view.getOldRevision()).isEqualTo(Revision.NULL); assertThat(view.getPathPart()).isEqualTo("refs/heads/master~3^~2"); } @Test public void showBranches() throws Exception { RevCommit master = repo.branch("refs/heads/master").commit().create(); RevCommit stable = repo.branch("refs/heads/stable").commit().create(); GitilesView view; view = getView("/repo/+show/master"); assertThat(view.getType()).isEqualTo(Type.REVISION); assertThat(view.getRevision().getName()).isEqualTo("master"); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getPathPart()).isNull(); view = getView("/repo/+show/heads/master"); assertThat(view.getType()).isEqualTo(Type.REVISION); assertThat(view.getRevision().getName()).isEqualTo("heads/master"); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getPathPart()).isNull(); view = getView("/repo/+show/refs/heads/master"); assertThat(view.getType()).isEqualTo(Type.REVISION); assertThat(view.getRevision().getName()).isEqualTo("refs/heads/master"); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getPathPart()).isNull(); view = getView("/repo/+show/stable"); assertThat(view.getType()).isEqualTo(Type.REVISION); assertThat(view.getRevision().getName()).isEqualTo("stable"); assertThat(view.getRevision().getId()).isEqualTo(stable); assertThat(view.getPathPart()).isNull(); assertThat(getView("/repo/+show/stable..master")).isNull(); } @Test public void ambiguousBranchAndTag() throws Exception { RevCommit branch = repo.branch("refs/heads/name").commit().create(); RevCommit tag = repo.branch("refs/tags/name").commit().create(); GitilesView view; view = getView("/repo/+show/name"); assertThat(view.getType()).isEqualTo(Type.REVISION); assertThat(view.getRevision().getName()).isEqualTo("name"); assertThat(view.getRevision().getId()).isEqualTo(tag); assertThat(view.getPathPart()).isNull(); view = getView("/repo/+show/heads/name"); assertThat(view.getType()).isEqualTo(Type.REVISION); assertThat(view.getRevision().getName()).isEqualTo("heads/name"); assertThat(view.getRevision().getId()).isEqualTo(branch); assertThat(view.getPathPart()).isNull(); view = getView("/repo/+show/refs/heads/name"); assertThat(view.getType()).isEqualTo(Type.REVISION); assertThat(view.getRevision().getName()).isEqualTo("refs/heads/name"); assertThat(view.getRevision().getId()).isEqualTo(branch); assertThat(view.getPathPart()).isNull(); view = getView("/repo/+show/tags/name"); assertThat(view.getType()).isEqualTo(Type.REVISION); assertThat(view.getRevision().getName()).isEqualTo("tags/name"); assertThat(view.getRevision().getId()).isEqualTo(tag); assertThat(view.getPathPart()).isNull(); view = getView("/repo/+show/refs/tags/name"); assertThat(view.getType()).isEqualTo(Type.REVISION); assertThat(view.getRevision().getName()).isEqualTo("refs/tags/name"); assertThat(view.getRevision().getId()).isEqualTo(tag); assertThat(view.getPathPart()).isNull(); } @Test public void path() throws Exception { RevCommit master = repo.branch("refs/heads/master").commit().create(); repo.branch("refs/heads/stable").commit().create(); GitilesView view; view = getView("/repo/+show/master/"); assertThat(view.getType()).isEqualTo(Type.PATH); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getPathPart()).isEqualTo(""); view = getView("/repo/+show/master/foo"); assertThat(view.getType()).isEqualTo(Type.PATH); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getPathPart()).isEqualTo("foo"); view = getView("/repo/+show/master/foo/"); assertThat(view.getType()).isEqualTo(Type.PATH); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getPathPart()).isEqualTo("foo"); view = getView("/repo/+show/master/foo/bar"); assertThat(view.getType()).isEqualTo(Type.PATH); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getPathPart()).isEqualTo("foo/bar"); assertThat(getView("/repo/+show/stable..master/foo")).isNull(); } @Test public void doc() throws Exception { RevCommit master = repo.branch("refs/heads/master").commit().create(); repo.branch("refs/heads/stable").commit().create(); GitilesView view; view = getView("/repo/+doc/master/"); assertThat(view.getType()).isEqualTo(Type.DOC); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getPathPart()).isEqualTo(""); view = getView("/repo/+doc/master/index.md"); assertThat(view.getType()).isEqualTo(Type.DOC); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getPathPart()).isEqualTo("index.md"); view = getView("/repo/+doc/master/foo/"); assertThat(view.getType()).isEqualTo(Type.DOC); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getPathPart()).isEqualTo("foo"); view = getView("/repo/+doc/master/foo/bar.md"); assertThat(view.getType()).isEqualTo(Type.DOC); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getPathPart()).isEqualTo("foo/bar.md"); assertThat(getView("/repo/+doc/stable..master/foo")).isNull(); } @Test public void multipleSlashes() throws Exception { repo.branch("refs/heads/master").commit().create(); assertThat(getView("//").getType()).isEqualTo(Type.HOST_INDEX); assertThat(getView("//repo").getType()).isEqualTo(Type.REPOSITORY_INDEX); assertThat(getView("//repo//").getType()).isEqualTo(Type.REPOSITORY_INDEX); assertThat(getView("/repo/+//master")).isNull(); assertThat(getView("/repo/+/refs//heads//master")).isNull(); assertThat(getView("/repo/+//master//")).isNull(); assertThat(getView("/repo/+//master/foo//bar")).isNull(); } @Test public void diff() throws Exception { RevCommit parent = repo.commit().create(); RevCommit master = repo.branch("refs/heads/master").commit().parent(parent).create(); GitilesView view; view = getView("/repo/+diff/master^..master"); assertThat(view.getType()).isEqualTo(Type.DIFF); assertThat(view.getRevision().getName()).isEqualTo("master"); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getOldRevision().getName()).isEqualTo("master^"); assertThat(view.getOldRevision().getId()).isEqualTo(parent); assertThat(view.getPathPart()).isEqualTo(""); view = getView("/repo/+diff/master^..master/"); assertThat(view.getType()).isEqualTo(Type.DIFF); assertThat(view.getRevision().getName()).isEqualTo("master"); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getOldRevision().getName()).isEqualTo("master^"); assertThat(view.getOldRevision().getId()).isEqualTo(parent); assertThat(view.getPathPart()).isEqualTo(""); view = getView("/repo/+diff/master^..master/foo"); assertThat(view.getType()).isEqualTo(Type.DIFF); assertThat(view.getRevision().getName()).isEqualTo("master"); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getOldRevision().getName()).isEqualTo("master^"); assertThat(view.getOldRevision().getId()).isEqualTo(parent); assertThat(view.getPathPart()).isEqualTo("foo"); view = getView("/repo/+diff/refs/heads/master^..refs/heads/master"); assertThat(view.getType()).isEqualTo(Type.DIFF); assertThat(view.getRevision().getName()).isEqualTo("refs/heads/master"); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getOldRevision().getName()).isEqualTo("refs/heads/master^"); assertThat(view.getOldRevision().getId()).isEqualTo(parent); assertThat(view.getPathPart()).isEqualTo(""); } @Test public void diffAgainstEmptyCommit() throws Exception { RevCommit master = repo.branch("refs/heads/master").commit().create(); GitilesView view = getView("/repo/+diff/master^!"); assertThat(view.getType()).isEqualTo(Type.DIFF); assertThat(view.getRevision().getName()).isEqualTo("master"); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getOldRevision()).isEqualTo(Revision.NULL); assertThat(view.getPathPart()).isEqualTo(""); } @Test public void log() throws Exception { RevCommit parent = repo.commit().create(); RevCommit master = repo.branch("refs/heads/master").commit().parent(parent).create(); GitilesView view; view = getView("/repo/+log"); assertThat(view.getType()).isEqualTo(Type.LOG); assertThat(view.getRevision()).isEqualTo(Revision.NULL); assertThat(view.getPathPart()).isNull(); view = getView("/repo/+log/"); assertThat(view.getType()).isEqualTo(Type.LOG); assertThat(view.getRevision()).isEqualTo(Revision.NULL); assertThat(view.getPathPart()).isNull(); view = getView("/repo/+log/master"); assertThat(view.getType()).isEqualTo(Type.LOG); assertThat(view.getRevision().getName()).isEqualTo("master"); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getOldRevision()).isEqualTo(Revision.NULL); assertThat(view.getPathPart()).isEqualTo(""); view = getView("/repo/+log/master/"); assertThat(view.getType()).isEqualTo(Type.LOG); assertThat(view.getRevision().getName()).isEqualTo("master"); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getOldRevision()).isEqualTo(Revision.NULL); assertThat(view.getPathPart()).isEqualTo(""); view = getView("/repo/+log/master/foo"); assertThat(view.getType()).isEqualTo(Type.LOG); assertThat(view.getRevision().getName()).isEqualTo("master"); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getOldRevision()).isEqualTo(Revision.NULL); assertThat(view.getPathPart()).isEqualTo("foo"); view = getView("/repo/+log/master^..master"); assertThat(view.getType()).isEqualTo(Type.LOG); assertThat(view.getRevision().getName()).isEqualTo("master"); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getOldRevision().getName()).isEqualTo("master^"); assertThat(view.getOldRevision().getId()).isEqualTo(parent); assertThat(view.getPathPart()).isEqualTo(""); view = getView("/repo/+log/master^..master/"); assertThat(view.getType()).isEqualTo(Type.LOG); assertThat(view.getRevision().getName()).isEqualTo("master"); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getOldRevision().getName()).isEqualTo("master^"); assertThat(view.getOldRevision().getId()).isEqualTo(parent); assertThat(view.getPathPart()).isEqualTo(""); view = getView("/repo/+log/master^..master/foo"); assertThat(view.getType()).isEqualTo(Type.LOG); assertThat(view.getRevision().getName()).isEqualTo("master"); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getOldRevision().getName()).isEqualTo("master^"); assertThat(view.getOldRevision().getId()).isEqualTo(parent); assertThat(view.getPathPart()).isEqualTo("foo"); view = getView("/repo/+log/refs/heads/master^..refs/heads/master"); assertThat(view.getType()).isEqualTo(Type.LOG); assertThat(view.getRevision().getName()).isEqualTo("refs/heads/master"); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getOldRevision().getName()).isEqualTo("refs/heads/master^"); assertThat(view.getOldRevision().getId()).isEqualTo(parent); assertThat(view.getPathPart()).isEqualTo(""); } @Test public void archive() throws Exception { RevCommit master = repo.branch("refs/heads/master").commit().create(); repo.branch("refs/heads/branch").commit().create(); GitilesView view; assertThat(getView("/repo/+archive")).isNull(); assertThat(getView("/repo/+archive/")).isNull(); assertThat(getView("/repo/+archive/master..branch")).isNull(); assertThat(getView("/repo/+archive/master.foo")).isNull(); assertThat(getView("/repo/+archive/master.zip")).isNull(); assertThat(getView("/repo/+archive/master/.tar.gz")).isNull(); assertThat(getView("/repo/+archive/master/foo/.tar.gz")).isNull(); view = getView("/repo/+archive/master.tar.gz"); assertThat(view.getType()).isEqualTo(Type.ARCHIVE); assertThat(view.getRepositoryName()).isEqualTo("repo"); assertThat(view.getRevision().getName()).isEqualTo("master"); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getOldRevision()).isEqualTo(Revision.NULL); assertThat(view.getExtension()).isEqualTo(".tar.gz"); assertThat(view.getPathPart()).isNull(); view = getView("/repo/+archive/master.tar.bz2"); assertThat(view.getType()).isEqualTo(Type.ARCHIVE); assertThat(view.getRepositoryName()).isEqualTo("repo"); assertThat(view.getRevision().getName()).isEqualTo("master"); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getOldRevision()).isEqualTo(Revision.NULL); assertThat(view.getExtension()).isEqualTo(".tar.bz2"); assertThat(view.getPathPart()).isNull(); view = getView("/repo/+archive/master/foo/bar.tar.gz"); assertThat(view.getType()).isEqualTo(Type.ARCHIVE); assertThat(view.getRepositoryName()).isEqualTo("repo"); assertThat(view.getRevision().getName()).isEqualTo("master"); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getOldRevision()).isEqualTo(Revision.NULL); assertThat(view.getExtension()).isEqualTo(".tar.gz"); assertThat(view.getPathPart()).isEqualTo("foo/bar"); } @Test public void blame() throws Exception { RevCommit master = repo.branch("refs/heads/master").commit().create(); repo.branch("refs/heads/branch").commit().create(); GitilesView view; assertThat(getView("/repo/+blame")).isNull(); assertThat(getView("/repo/+blame/")).isNull(); assertThat(getView("/repo/+blame/master")).isNull(); assertThat(getView("/repo/+blame/master..branch")).isNull(); view = getView("/repo/+blame/master/foo/bar"); assertThat(view.getType()).isEqualTo(Type.BLAME); assertThat(view.getRepositoryName()).isEqualTo("repo"); assertThat(view.getRevision().getName()).isEqualTo("master"); assertThat(view.getRevision().getId()).isEqualTo(master); assertThat(view.getOldRevision()).isEqualTo(Revision.NULL); assertThat(view.getPathPart()).isEqualTo("foo/bar"); } @Test public void testNormalizeParents() throws Exception { RevCommit parent = repo.commit().create(); RevCommit master = repo.branch("refs/heads/master").commit().parent(parent).create(); GitilesView view; assertThat(getView("/repo/+/master").toUrl()).isEqualTo("/b/repo/+/master"); assertThat(getView("/repo/+/" + master.name()).toUrl()).isEqualTo("/b/repo/+/" + master.name()); assertThat(getRedirectUrl("/repo/+/master~")).isEqualTo("/b/repo/+/" + parent.name()); assertThat(getRedirectUrl("/repo/+/master^")).isEqualTo("/b/repo/+/" + parent.name()); view = getView("/repo/+log/master~..master/"); assertThat(view.getRevision().getName()).isEqualTo("master"); assertThat(view.getOldRevision().getName()).isEqualTo("master~"); view = getView("/repo/+log/master^!/"); assertThat(view.getRevision().getName()).isEqualTo("master"); assertThat(view.getOldRevision().getName()).isEqualTo("master^"); } private String getRedirectUrl(String pathAndQuery) throws ServletException, IOException { TestViewFilter.Result result = TestViewFilter.service(repo, pathAndQuery); assertThat(result.getResponse().getStatus()).isEqualTo(302); return result.getResponse().getHeader(HttpHeaders.LOCATION); } private GitilesView getView(String pathAndQuery) throws ServletException, IOException { TestViewFilter.Result result = TestViewFilter.service(repo, pathAndQuery); FakeHttpServletResponse resp = result.getResponse(); assertWithMessage("expected non-redirect status, got " + resp.getStatus()) .that(resp.getStatus() < 300 || resp.getStatus() >= 400).isTrue(); return result.getView(); } }
// this call to the parent scope. public class LexicalSearchConstInstr extends OneOperandResultBaseInstr implements FixedArityInstr { private RubySymbol constantName; // Constant caching private final ConstantLookupSite site; public LexicalSearchConstInstr(Variable result, Operand definingScope, RubySymbol constantName) { super(Operation.LEXICAL_SEARCH_CONST, result, definingScope); assert result != null: "LexicalSearchConstInstr result is null"; this.constantName = constantName; this.site = new ConstantLookupSite(constantName); } public Operand getDefiningScope() { return getOperand1(); } public String getId() { return constantName.idString(); } public RubySymbol getName() { return constantName; } @Override public String[] toStringNonOperandArgs() { return new String[] { "name: " + constantName}; } @Override public Instr clone(CloneInfo ii) { return new LexicalSearchConstInstr(ii.getRenamedVariable(result), getDefiningScope().cloneForInlining(ii), constantName); } @Override public void encode(IRWriterEncoder e) { super.encode(e); e.encode(getName()); } public static LexicalSearchConstInstr decode(IRReaderDecoder d) { return new LexicalSearchConstInstr(d.decodeVariable(), d.decodeOperand(), d.decodeSymbol()); } @Override public Object interpret(ThreadContext context, StaticScope currScope, DynamicScope currDynScope, IRubyObject self, Object[] temp) { return site.lexicalSearchConst(context, (StaticScope) getDefiningScope().retrieve(context, self, currScope, currDynScope, temp)); } @Override public void visit(IRVisitor visitor) { visitor.LexicalSearchConstInstr(this); } }
def threaded_reader(items_to_read, reader, max_threads=4): thread_pool = [] def thread_process(): try: source = source_queue.pop(0) except IndexError: source = None while source: source_reader = reader.read_from_source(source) for chunk in dictset.page_dictset(source_reader, 256): reply_queue.put(chunk) try: source = source_queue.pop(0) except IndexError: source = None source_queue = items_to_read.copy() t = min(len(source_queue), max_threads, 8) reply_queue = queue.Queue(t * 8) for _ in range(t): thread = threading.Thread(target=thread_process) thread.daemon = True thread.start() thread_pool.append(thread) time.sleep(0.01) while any([t.is_alive() for t in thread_pool]) or not(reply_queue.empty()): try: records = reply_queue.get(timeout=10) yield from records except queue.Empty: pass
import EncodingDown from "encoding-down"; import {LevelDb} from "level"; import {CodecOptions} from "level-codec"; import {LevelUp} from "levelup"; declare function level<K, V>(location: string, options?: CodecOptions): LevelDb<K, V>; declare namespace level { type LevelDb<K = unknown, V = unknown> = LevelUp<EncodingDown<K, V>>; } export = level;
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package kg.apc.perfmon.metrics.jmx; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; /** * @author undera */ public class JmxSuite extends TestCase { public JmxSuite(String testName) { super(testName); } public static Test suite() { TestSuite suite = new TestSuite("JmxSuite"); suite.addTest(MemoryDataProviderTest.suite()); suite.addTest(CompilerDataProviderTest.suite()); suite.addTest(MemoryPoolDataProviderTest.suite()); suite.addTest(GCDataProviderTest.suite()); suite.addTest(AbstractJMXDataProviderTest.suite()); suite.addTest(ClassesDataProviderTest.suite()); suite.addTest(JMXConnectorHelperTest.suite()); return suite; } protected void setUp() throws Exception { super.setUp(); } protected void tearDown() throws Exception { super.tearDown(); } }
// Copyright 2017 The Walk Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package main import ( "github.com/dearzhp/walk" . "github.com/dearzhp/walk/declarative" ) func main() { MainWindow{ Title: "Walk GradientComposite Example", MinSize: Size{400, 0}, Background: GradientBrush{ Vertexes: []walk.GradientVertex{ {X: 0, Y: 0, Color: walk.RGB(255, 255, 127)}, {X: 1, Y: 0, Color: walk.RGB(127, 191, 255)}, {X: 0.5, Y: 0.5, Color: walk.RGB(255, 255, 255)}, {X: 1, Y: 1, Color: walk.RGB(127, 255, 127)}, {X: 0, Y: 1, Color: walk.RGB(255, 127, 127)}, }, Triangles: []walk.GradientTriangle{ {0, 1, 2}, {1, 3, 2}, {3, 4, 2}, {4, 0, 2}, }, }, Layout: HBox{Margins: Margins{100, 100, 100, 100}}, Children: []Widget{ GradientComposite{ Border: true, Vertical: Bind("verticalCB.Checked"), Color1: Bind("rgb(c1RedSld.Value, c1GreenSld.Value, c1BlueSld.Value)"), Color2: Bind("rgb(c2RedSld.Value, c2GreenSld.Value, c2BlueSld.Value)"), Layout: HBox{}, Children: []Widget{ GroupBox{ Title: "Gradient Parameters", Layout: VBox{}, Children: []Widget{ CheckBox{Name: "verticalCB", Text: "Vertical", Checked: true}, GroupBox{ Title: "Color1", Layout: Grid{Columns: 2}, Children: []Widget{ Label{Text: "Red:"}, Slider{Name: "c1RedSld", Tracking: true, MaxValue: 255, Value: 95}, Label{Text: "Green:"}, Slider{Name: "c1GreenSld", Tracking: true, MaxValue: 255, Value: 191}, Label{Text: "Blue:"}, Slider{Name: "c1BlueSld", Tracking: true, MaxValue: 255, Value: 255}, }, }, GroupBox{ Title: "Color2", Layout: Grid{Columns: 2}, Children: []Widget{ Label{Text: "Red:"}, Slider{Name: "c2RedSld", Tracking: true, MaxValue: 255, Value: 239}, Label{Text: "Green:"}, Slider{Name: "c2GreenSld", Tracking: true, MaxValue: 255, Value: 63}, Label{Text: "Blue:"}, Slider{Name: "c2BlueSld", Tracking: true, MaxValue: 255, Value: 0}, }, }, }, }, }, }, }, Functions: map[string]func(args ...interface{}) (interface{}, error){ "rgb": func(args ...interface{}) (interface{}, error) { return walk.RGB(byte(args[0].(float64)), byte(args[1].(float64)), byte(args[2].(float64))), nil }, }, }.Run() }
<filename>libdtn-common/src/main/java/io/left/rightmesh/libdtn/common/data/UnknownExtensionBlock.java package io.left.rightmesh.libdtn.common.data; import io.left.rightmesh.libdtn.common.data.blob.Blob; /** * UnknownExtensionBlock is used to create a generic Extension CanonicalBlock in case when a block * type is unknown. * * @author <NAME> on 20/07/18. */ public class UnknownExtensionBlock extends BlockBlob { /** * Constructor: creates an empty UnknownExtensionBlock. * * @param type of the block */ public UnknownExtensionBlock(int type) { super(type); } /** * Constructor: creates an UnknownExtensionBlock with a Blob as data. * * @param type of the block * @param data payload */ public UnknownExtensionBlock(int type, Blob data) { super(type, data); } }
#ifndef HTTPD_HTTPD_REQ_PARSER_H #define HTTPD_HTTPD_REQ_PARSER_H #include <glib.h> #define REQ_PARSER_PARSE_OK 0 #define REQ_PARSER_PARSE_PARTIAL_REQUEST 1 #define REQ_PARSER_PARSE_INVALID_REQUEST 2 #define REQ_PARSER_METHOD_INVALID -1 #define REQ_PARSER_METHOD_GET 0 #define REQ_PARSER_METHOD_HEAD 1 #define REQ_PARSER_METHOD_POST 2 #define REQ_PARSER_METHOD_OPTIONS 3 typedef struct _req_parser req_parser; req_parser *req_parser_create(); void req_parser_destroy(req_parser *r); int req_parser_status(req_parser *r); int req_parser_method(req_parser *r); GString *req_parser_path(req_parser *r); GString *req_parser_fragment(req_parser *r); GString *req_parser_body(req_parser *r); GTree *req_parser_query(req_parser *r); GTree *req_parser_headers(req_parser *r); void req_parser_reset(req_parser *r); int req_parser_add_text(req_parser *r, const char *data, size_t len); #endif //HTTPD_HTTPD_REQ_PARSER_H
<reponame>AnimeshRy/erashare-frontend<gh_stars>1-10 export const sizeInMb = (bytes: number): string => { const marker = 1024; // Change to 1000 if required const decimal = 2; // Change as required let kiloBytes = marker; // One Kilobyte is 1024 bytes let megaBytes = marker * marker; // One MB is 1024 KB let gigaBytes = marker * marker * marker; // One GB is 1024 MB let teraBytes = marker * marker * marker * marker; // One TB is 1024 GB // return bytes if less than a KB if (bytes < kiloBytes) return bytes + " Bytes"; // return KB if less than a MB else if (bytes < megaBytes) return (bytes / kiloBytes).toFixed(decimal) + " KB"; // return MB if less than a GB else if (bytes < gigaBytes) return (bytes / megaBytes).toFixed(decimal) + " MB"; // return GB if less than a TB else return (bytes / gigaBytes).toFixed(decimal) + " GB"; }
def _checkConvObjective(self, traj): if len(self._optPointHistory[traj]) < 2 or (self._convergenceCriteria['objective'] < 0): return False o1, _ = self._optPointHistory[traj][-1] o2, _ = self._optPointHistory[traj][-2] delta = o2[self._objectiveVar]-o1[self._objectiveVar] converged = abs(delta) < self._convergenceCriteria['objective'] self.raiseADebug(self.convFormat.format(name='objective', conv=str(converged), got=delta, req=self._convergenceCriteria['objective'])) return converged
async def _register_verifiers(self, client: IndyClient, limit_agents: set): verifiers = [] verifier_ids = [] config_verifiers = self.services_config("verifiers") if not config_verifiers: LOGGER.debug("No verifiers defined by configuration") for verifier_key, verifier_cfg in config_verifiers.items(): if not verifier_cfg.get("id"): verifier_cfg["id"] = verifier_key if limit_agents is None or verifier_cfg["id"] in limit_agents: verifiers.append(verifier_cfg) verifier_ids.append(verifier_cfg["id"]) if verifiers: for verifier_cfg in verifiers: await self._register_verifier(client, verifier_cfg) elif config_verifiers: LOGGER.info("No defined verifiers referenced by AGENTS")
<reponame>morgances/matchmaking<gh_stars>0 /* * Revision History: * Initial: 2018/10/15 <NAME> */ package handler import ( "time" "github.com/TechCatsLab/apix/http/server" log "github.com/TechCatsLab/logging/logrus" "github.com/dgrijalva/jwt-go" "github.com/morgances/matchmaking/backend/constant" "github.com/morgances/matchmaking/backend/img" "github.com/morgances/matchmaking/backend/model" "github.com/zh1014/comment/response" ) type ( post struct { ID uint32 `json:"id"` OpenID string `json:"open_id"` Content string `json:"content"` Date time.Time `json:"date"` Commend uint32 `json:"commend"` Images []string `json:"Images"` NickName string `json:"nick_name"` VIP bool `json:"vip"` Age uint8 `json:"age"` Location string `json:"location"` Height string `json:"height"` Constellation string `json:"constellation"` } ) func CreatePost(this *server.Context) error { // req form-data: image_num title content openid, ok := this.Request().Context().Value("user").(*jwt.Token).Claims.(jwt.MapClaims)["open_id"].(string) if !ok { return response.WriteStatusAndDataJSON(this, constant.ErrInternalServerError, nil) } content := this.FormValue("content") post := &model.Post{ OpenID: openid, Content: content, } postId, err := model.PostService.Insert(post) if err != nil { log.Error(err) return response.WriteStatusAndDataJSON(this, constant.ErrMysql, nil) } if err = img.SavePostImages(postId, this.Request()); err != nil { log.Error(err) return response.WriteStatusAndDataJSON(this, constant.ErrSaveImage, nil) } return response.WriteStatusAndDataJSON(this, constant.ErrSucceed, nil) } func GetReviewedPost(this *server.Context) error { var ( err error resp []post ) rawPosts, err := model.PostService.FindMany(true) if err != nil { log.Error(err) return response.WriteStatusAndDataJSON(this, constant.ErrMysql, nil) } for _, rawPost := range rawPosts { post := post{} post.ID = rawPost.ID post.OpenID = rawPost.OpenID post.Content = rawPost.Content post.Date = rawPost.DateTime post.Commend = rawPost.Commend post.NickName = rawPost.NickName post.VIP = rawPost.VIP post.Age = rawPost.Age post.Location = rawPost.Location post.Height = rawPost.Height post.Constellation = rawPost.Constellation post.Images = img.GetPostImgs(post.ID) resp = append(resp, post) } return response.WriteStatusAndDataJSON(this, constant.ErrSucceed, resp) } func GetMyPost(this *server.Context) error { var ( resp []post ) openid, ok := this.Request().Context().Value("user").(*jwt.Token).Claims.(jwt.MapClaims)["open_id"].(string) if !ok { return response.WriteStatusAndDataJSON(this, constant.ErrInternalServerError, nil) } rawPosts, err := model.PostService.FindByOpenID(openid) if err != nil { log.Error(err) return response.WriteStatusAndDataJSON(this, constant.ErrMysql, nil) } for _, rawPost := range rawPosts { post := post{} post.ID = rawPost.ID post.OpenID = rawPost.OpenID post.Content = rawPost.Content post.Date = rawPost.DateTime post.Commend = rawPost.Commend post.Images = img.GetPostImgs(post.ID) resp = append(resp, post) } return response.WriteStatusAndDataJSON(this, constant.ErrSucceed, resp) } func CommendPost(this *server.Context) error { var ( err error req targetID ) if err = this.JSONBody(&req); err != nil { log.Error(err) return response.WriteStatusAndDataJSON(this, constant.ErrInvalidParam, nil) } if err = this.Validate(&req); err != nil { log.Error(err) return response.WriteStatusAndDataJSON(this, constant.ErrInvalidParam, nil) } if err = model.PostService.Commend(req.TargetID); err != nil { log.Error(err) return response.WriteStatusAndDataJSON(this, constant.ErrMysql, nil) } return response.WriteStatusAndDataJSON(this, constant.ErrSucceed, nil) } func DeletePost(this *server.Context) error { var ( req targetID ) openid, ok := this.Request().Context().Value("user").(*jwt.Token).Claims.(jwt.MapClaims)["open_id"].(string) if !ok { return response.WriteStatusAndDataJSON(this, constant.ErrInternalServerError, nil) } err := this.JSONBody(&req) if err != nil { log.Error(err) return response.WriteStatusAndDataJSON(this, constant.ErrInvalidParam, nil) } if err = this.Validate(&req); err != nil { log.Error(err) return response.WriteStatusAndDataJSON(this, constant.ErrInvalidParam, nil) } if err = model.PostService.DeleteByOpenIDAndID(openid, req.TargetID); err != nil { log.Error(err) return response.WriteStatusAndDataJSON(this, constant.ErrMysql, nil) } if err = img.ClearPostImages(req.TargetID); err != nil { // make a log but tell user succeed, because it succeed in database log.Error(err) return response.WriteStatusAndDataJSON(this, constant.ErrSucceed, nil) } return response.WriteStatusAndDataJSON(this, constant.ErrSucceed, nil) }
Hello again, apparently i disappear for ages and come back in spurts, just yesterday i did a small blog post talking about a few issues i´ve encountered recently, you can read Skin Deep here. And today i´m at it again, writing stuff and things, tho it´s not my strong. We are after all here to help each others and share info, and for the past two days i found a new trend that i find worrying to say the least, whilst going thru requests for review copies for the ongoing Skin Fair i noticed a pattern with some bloggers (see point 1), see besides blogging i do other things, like everyone else in SL i like trying new stuff and helping friends. And so i also do CSR and support for a friends shop, that happens to be in this event. With that job comes the responsibility of accepting and reviewing all blogger applications, either for the shop regular bloggers list or event specific requests. And funny thing i´m not the only blogger who does it either, most shop blogger apps are not even reviewed by the creators themselves, at least not initially, there will be someone in charge of it either a csr/manager or a partner. So people don´t seem to realize that and apply as if talking to the owner, sometimes in even inappropriate ways. But I´m not gonna talk about how you should fill in an app, I´m gonna talk about what i look at when i see your application. 1. I check your profile – I look at it for an idea of who you are and how you present yourself. Do you look professional and polished about what you do? Is your profile truthful? Would we be ok associating with it? This seems pretty basic right? But sometimes i find the complete opposite of these simple things, today i saw the profile of a blogger, who in her picks thanked all her “sponsors” and in such list claimed several events also sponsored her, now this is a untruthful profile, because you blogged one edition of an event does not mean they sponsor or support your blog, btw most of the events i found on that list don´t even carry official bloggers, at all. I get it you want to show your past experience when it comes to events, but put it down properly, say i officially blogged for: X event in 2103 and link to their list of official bloggers if available, trust me it’s not worth lying about, google will give an answer that is correct in seconds. 2. Your blog, i get that blogs are very different, we all have our own style and way of showing it, and believe me that goes into consideration, different audiences look for different things, and yet so do shops, why apply to blog poses if you don´t credit poses? Why apply for casual clothes if you only do RP related blog posts? Some people do mix up styles and love going out of their comfort zones, SL is about dressing up and god knows i´ve been casual, sci-fi, goth and fairy, but not all bloggers do that, some are very specific in their style and maybe they should consider their style when they fill out applications. 3. Your extras, how diligent are you, with other platforms, are you on flickr, plurk, tumbrl, facebook, twitter or any of the others 10005 social media sites, for me that´s an extra means you´re organized and looking for new audiences. A the end of the day these are things i personally look up when you present yourself to me, and i´m not even talking about quality of work or all the other small details that are usually in the app itself. How old your blog is, How often you blog.How is the overall quality of your photos and/or writing. How/If do you tag your posts If you add your pics to the store´s flickr groups, these are more subjective points or asked in the app itself, and trust me i will check to see if you answered truthfully. Remember all bloggers are different and so are all store owners/csrs/managers and they all ask for different things from you as a blogger, but, being truthful, organized and polished are basic things when we fail at those, we give all other bloggers a bad rep, and it´s so easy to be all of those wether you´re blogging a street corner in a dark alley, or a runway on the finest built sim. Advertisements
/* * MIT License * * Copyright (c) 2019 Matt * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package me.mattstudios.mf.base; import com.google.common.primitives.Doubles; import com.google.common.primitives.Floats; import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; import me.mattstudios.mf.base.components.CommandData; import me.mattstudios.mf.base.components.ParameterResolver; import me.mattstudios.mf.base.components.TypeResult; import org.bukkit.Bukkit; import org.bukkit.Material; import org.bukkit.Sound; import org.bukkit.World; import org.bukkit.entity.Player; import java.util.Arrays; import java.util.HashMap; import java.util.Map; @SuppressWarnings({"WeakerAccess", "UnstableApiUsage"}) public final class ParameterHandler { // The map of registered parameters. private final Map<Class<?>, ParameterResolver> registeredTypes = new HashMap<>(); // Registers all the parameters; ParameterHandler() { register(Short.class, arg -> { final Integer integer = Ints.tryParse(String.valueOf(arg)); return integer == null ? new TypeResult(arg) : new TypeResult(integer.shortValue(), arg); }); register(Integer.class, arg -> new TypeResult(Ints.tryParse(String.valueOf(arg)), arg)); register(Long.class, arg -> new TypeResult(Longs.tryParse(String.valueOf(arg)), arg)); register(Float.class, arg -> new TypeResult(Floats.tryParse(String.valueOf(arg)), arg)); register(Double.class, arg -> new TypeResult(Doubles.tryParse(String.valueOf(arg)), arg)); register(String.class, arg -> arg instanceof String ? new TypeResult(arg, arg) : new TypeResult(arg)); register(String[].class, arg -> { if (arg instanceof String[]) return new TypeResult(arg, arg); // Will most likely never happen. return new TypeResult(arg); }); register(Boolean.class, arg -> new TypeResult(Boolean.valueOf(String.valueOf(arg)), arg)); register(boolean.class, arg -> new TypeResult(Boolean.valueOf(String.valueOf(arg)), arg)); register(Player.class, arg -> new TypeResult(Bukkit.getPlayer(String.valueOf(arg)), arg)); register(Material.class, arg -> new TypeResult(Material.matchMaterial(String.valueOf(arg)), arg)); register(Sound.class, arg -> { final String soundValue = Arrays.stream(Sound.values()) .map(Enum::name) .filter(name -> name.equalsIgnoreCase(String.valueOf(arg))) .findFirst().orElse(null); return soundValue == null ? new TypeResult(null, arg) : new TypeResult(Sound.valueOf(soundValue), arg); }); register(World.class, arg -> new TypeResult(Bukkit.getWorld(String.valueOf(arg)), arg)); } /** * Registers the new class type of parameters and their results. * * @param clss The class type to be added. * @param parameterResolver The built in method that returns the value wanted. */ public void register(final Class<?> clss, final ParameterResolver parameterResolver) { registeredTypes.put(clss, parameterResolver); } /** * Gets a specific type result based on a class type. * * @param clss The class to check. * @param object The input object of the functional interface. * @param subCommand The command base class. * @param paramName The parameter name from the command method. * @return The output object of the functional interface. */ Object getTypeResult(final Class<?> clss, final Object object, final CommandData subCommand, final String paramName) { final TypeResult result = registeredTypes.get(clss).resolve(object); subCommand.getCommandBase().addArgument(paramName, result.getArgumentName()); return result.getResolvedValue(); } /** * Checks if the class has already been registered or not. * * @param clss The class type to check. * @return Returns true if it contains. */ boolean isRegisteredType(final Class<?> clss) { return registeredTypes.get(clss) != null; } }
/** write a 32bit int in LittleEndian */ public void writeIntLE(int i) throws IOException { outputStream.writeByte(i); outputStream.writeByte(i >> 8); outputStream.writeByte(i >> 16); outputStream.writeByte(i >> 24); }
<gh_stars>0 #[cfg(test)] #[derive(Debug)] pub struct CounterAddOp(pub i32); #[cfg(test)] impl Absorb<CounterAddOp> for i32 { fn absorb_first(&mut self, operation: &mut CounterAddOp, _: &Self) { *self += operation.0; } fn sync_with(&mut self, first: &Self) { *self = *first } } #[cfg(test)] #[derive(Debug, Eq, PartialEq)] pub enum CompressibleCounterOp<const MAX_COMPRESS_RANGE: usize> { Set(i32), Add(i32), Sub(i32), } #[cfg(test)] impl<const MAX_COMPRESS_RANGE: usize> Absorb<CompressibleCounterOp<MAX_COMPRESS_RANGE>> for i32 { fn absorb_first( &mut self, operation: &mut CompressibleCounterOp<MAX_COMPRESS_RANGE>, _: &Self, ) { match operation { CompressibleCounterOp::Set(v) => *self = *v, CompressibleCounterOp::Add(v) => *self += *v, CompressibleCounterOp::Sub(v) => *self -= *v, } } fn sync_with(&mut self, first: &Self) { *self = *first } const MAX_COMPRESS_RANGE: usize = MAX_COMPRESS_RANGE; fn try_compress( prev: &mut CompressibleCounterOp<MAX_COMPRESS_RANGE>, next: CompressibleCounterOp<MAX_COMPRESS_RANGE>, ) -> TryCompressResult<CompressibleCounterOp<MAX_COMPRESS_RANGE>> { match (prev, next) { (CompressibleCounterOp::Add(prev), CompressibleCounterOp::Add(next)) => { *prev += next; TryCompressResult::Compressed } (CompressibleCounterOp::Sub(prev), CompressibleCounterOp::Sub(next)) => { *prev += next; TryCompressResult::Compressed } (CompressibleCounterOp::Add(_), next @ CompressibleCounterOp::Sub(_)) => { TryCompressResult::Independent(next) } (CompressibleCounterOp::Sub(_), CompressibleCounterOp::Add(next)) => { TryCompressResult::Independent(CompressibleCounterOp::Add(next)) } (CompressibleCounterOp::Set(_), next) => TryCompressResult::Dependent(next), (prev, CompressibleCounterOp::Set(next)) => { *prev = CompressibleCounterOp::Set(next); TryCompressResult::Compressed } } } }
Tim Lincecum needed 148 pitches for his first career no-hitter Saturday night. The Detroit Tigers might be interested in a smaller workload from "The Freak." FoxSports.com's Jon Morosi says the Tigers are among teams that have inquired about acquiring Lincecum from the San Francisco Giants, presumably to be used out of the bullpen. Lincecum, a two-time Cy Young award winner, shined out of the bullpen in the World Series against Detroit last fall, throwing 4 2/3 scoreless innings with eight strikeouts and just one walk. He was shifted to relief in the playoffs after going 10-15 with a 5.18 ERA in 2012, and is 5-9 with a 4.26 ERA in a return to the rotation this season. Lincecum is in the final season of a contract that will pay him $22 million in 2013, so any team trading for the pending free agent may want some financial assistance. Or, it's entirely possible the defending world champions don't become sellers at the trade deadline. San Francisco is 6.5 games out of first place in the NL West. Detroit is in search of reliable options to bridge the gap between the starting rotation and setup man Drew Smyly and closer Joaquin Benoit. — Email Josh Slagter at [email protected]. Download the "Detroit Tigers" MLive app (iPhone or Android) for the latest news and updates. Follow @JoshSlagter
Optimum filter design for partial-response class-IV transmission systems The design of a mixed digital/analog transmit filter and a variable analog receive filter for partial-response class-IV signaling over metallic cables is discussed. Optimum filters are analytically determined for maximum cable length and additive Gaussian noise. Since the transfer function of the overall channel is given, these filters are independent of crosstalk. The optimum filter characteristics are approximated by those of single realizable filters. A simple analog equalizer section with independently controllable bandwidth and gain parameters is optimized by simulated annealing so that it could best compensate for variations of cable length when used as part of the receive filter. Simulated annealing is also employed to determine the remaining fixed section of the receive filter and the analog transmit filter. The digital section of the transmit filter is designed to achieve precise signal shaping over the considered range of cable lengths.<<ETX>>
def take_dep(): departments = ["AE", "AG", "AR", "BT", "CE", "CH", "CS", "CY", "EC", "EE", "EX", "GG", "HS", "IE", "IM", "MA", "ME", "MF", "MI", "MT", "NA", "PH", "QD"] dep = raw_input("Enter Department (e.g \"CE\" for civil) : ") dep = dep.upper() while dep not in departments: print "Please enter a valid department!" dep = raw_input("Enter Valid Department again : ") dep = dep.upper() return dep
def predict(self, x): o, s = self.forward_propagation(x) return np.argmax(o, axis=1)
/* SampleEnergyField.hpp energy field with values computed and stored on the samples <NAME> 06/15/2009 */ #ifndef _SAMPLE_ENERGY_FIELD_HPP #define _SAMPLE_ENERGY_FIELD_HPP #include "DiscreteEnergyField.hpp" #include "EnergySampleStore.hpp" class SampleEnergyField : public DiscreteEnergyField { public: SampleEnergyField(const Domain & domain, const SingleBlob & blob, const int num_class, const ClassWeight & class_weight, const KernelSize & kernel_size, EnergySampleStore & store); virtual ~SampleEnergyField(void); virtual int Set(const vector<Sample> & samples); virtual int Set(const vector<EnergySample> & samples); virtual int Get(vector<Sample> & samples) const; virtual int Get(vector<EnergySample> & samples) const; virtual const vector<const EnergySample *> & Get(void) const; virtual const vector<EnergySample *> & Get(void); virtual float Get(const EnergySample & query) const; virtual const EnergySample * Peak(const bool including_fixed) const; virtual const EnergySample * Peak(const EnergySample & query, const bool including_fixed) const; protected: virtual const EnergySample * Peak(const vector<const EnergySample *> & candidates, const bool including_fixed) const; protected: mutable Sample _center, _query, _sample; }; #endif
So I really loved that Leonard Balsera came up with some cool beer names for the Beer Baron in episode one of Titansgrave. It appears that the Internet agrees with me, and a lot of folks have asked me if we’re going to release a homebrew recipe for The Old Chaotic Neutral. I would love to do that, but I’m not sure what style it will be … so maybe you can help me out? What type of beer is Old Chaotic Neutral? Pale Ale IPA Stout Brown View Results Loading ... Loading ... I’ll leave this poll open for a few days, and get to work when a clear consensus emerges. If you have further ideas or thoughts, please let me know in comments. Like this: Like Loading...
// Writes into section on specified offset // If offset points beyond section, it is resized to contain it void WriteAsRaw(std::string& data, uint64_t number, int64_t offset) { if (offset < 0) { LOG(FATAL) << "Trying to Write raw on negative offset"; } if (static_cast<std::string::size_type>(offset) + 3 >= data.size()) { data.resize(offset + 3, '\0'); } for (int i = 0; i < 4; ++i) { data[offset + i] = (number >> (i * 8)); } }
def new_file(self, event=None): tracing.user_is_interacting() file_editor = PyEditorFrame(self.editor_list) self.editor_list.add(file_editor, self.main_view.editor_widget, text=file_editor.get_file_name()) tracing.send_statement("created", "file")
class TestQuantityMimics: """Test Quantity Mimics that are not ndarray subclasses.""" @pytest.mark.parametrize('Mimic', (QuantityMimic, QuantityMimic2)) def test_mimic_input(self, Mimic): value = np.arange(10.) mimic = Mimic(value, u.m) q = u.Quantity(mimic) assert q.unit == u.m assert np.all(q.value == value) q2 = u.Quantity(mimic, u.cm) assert q2.unit == u.cm assert np.all(q2.value == 100 * value) @pytest.mark.parametrize('Mimic', (QuantityMimic, QuantityMimic2)) def test_mimic_setting(self, Mimic): mimic = Mimic([1., 2.], u.m) q = u.Quantity(np.arange(10.), u.cm) q[8:] = mimic assert np.all(q[:8].value == np.arange(8.)) assert np.all(q[8:].value == [100., 200.]) def test_mimic_function_unit(self): mimic = QuantityMimic([1., 2.], u.dex(u.cm/u.s**2)) d = u.Dex(mimic) assert isinstance(d, u.Dex) assert d.unit == u.dex(u.cm/u.s**2) assert np.all(d.value == [1., 2.]) q = u.Quantity(mimic, subok=True) assert isinstance(q, u.Dex) assert q.unit == u.dex(u.cm/u.s**2) assert np.all(q.value == [1., 2.]) with pytest.raises(u.UnitTypeError): u.Quantity(mimic)
@Test public void testConfigureCacheUpdatePeriodWithPropertiesFileSet(){ factory=new PropertiesAndFilterConfigWroConfigurationFactory(filterConfig){ @Override protected Properties newDefaultProperties(){ final Properties props=new Properties(); props.put(ConfigConstants.cacheUpdatePeriod.name(),"15"); props.put(ConfigConstants.modelUpdatePeriod.name(),"30"); return props; } } ; Mockito.when(filterConfig.getInitParameter(ConfigConstants.cacheUpdatePeriod.name())).thenReturn("10"); final WroConfiguration config=factory.create(); Assert.assertEquals(10,config.getCacheUpdatePeriod()); Assert.assertEquals(true,config.isDebug()); Assert.assertEquals(10,config.getCacheUpdatePeriod()); Assert.assertEquals(30,config.getModelUpdatePeriod()); }
/** * A transformation that computes the complement of an automaton. * <p> * This transformation computes the complement of an automaton: Terminal states * are inverted and missing transitions are added. * * @author nono * @version $Id: Complement.java 2 2006-08-24 14:41:48Z oqube $ */ public class Complement<L, Tr extends Transition<L>, T extends Builder<L, Tr, T>> implements UnaryTransformation<L, Tr, T> { /* * (non-Javadoc) * * @see rationals.transformations.UnaryTransformation#transform(rationals.Automaton) */ public Automaton<L, Tr, T> transform(Automaton<L, Tr, T> a) { Automaton<L, Tr, T> ret = new Automaton<>(); List<State> todo = new ArrayList<>(); Map<State, State> sm = new HashMap<>(); Set<State> done = new HashSet<>(); Set<State> s = a.initials(); todo.addAll(s); while (!todo.isEmpty()) { State st = todo.remove(0); State ns = sm.get(st); if (ns == null) { ns = ret.addState(st.isInitial(), !st.isTerminal()); sm.put(st, ns); } done.add(st); for (Iterator<L> it = a.alphabet().iterator(); it.hasNext();) { L l = it.next(); Set<Transition<L>> ends = a.delta(st, l); if (ends.isEmpty()) try { ret.addTransition(new Transition<>(ns, l, ns)); } catch (NoSuchStateException e) { } else { for (Iterator<Transition<L>> i = ends.iterator(); i.hasNext();) { State end = i.next().end(); State ne = sm.get(end); if (ne == null) { ne = ret.addState(end.isInitial(), !end.isTerminal()); sm.put(end, ne); todo.add(end); } try { ret.addTransition(new Transition<>(ns, l, ne)); } catch (NoSuchStateException e) { } } } } } return ret; } }
//bwas25q2 #include <bits/stdc++.h> using namespace std; int t,b[400000],s0[400000],s1[400000],cnt1,cnt0; char a[400000]; int main() { scanf("%d",&t); for(int s=1;s<=t;s++) { cnt1=0,cnt0=0; cin>>a; int len=strlen(a); for(int i=0;i<len;i++) b[i+1]=a[i]-'0'; for(int i=1;i<=len;i++) { if(b[i]%2==0) s0[++cnt0]=b[i]; else s1[++cnt1]=b[i]; } int l0=1,l1=1; for(int i=1;i<=len;i++) { if(l0<=cnt0&&l1<=cnt1) { if(s0[l0]<s1[l1]) { printf("%d",s0[l0]); l0++; } else { printf("%d",s1[l1]); l1++; } } if(l0>cnt0&&l1<=cnt1) { printf("%d",s1[l1]); l1++; } if(l0<=cnt0&&l1>cnt1) { printf("%d",s0[l0]); l0++; } } printf("\n"); } return 0; }
package listener import ( "context" "net" "sync" "time" "github.com/dynamicgo/mesh" "github.com/dynamicgo/mesh-libp2p-network/netwrapper" "github.com/dynamicgo/slf4go" host "github.com/libp2p/go-libp2p-host" inet "github.com/libp2p/go-libp2p-net" protocol "github.com/libp2p/go-libp2p-protocol" manet "github.com/multiformats/go-multiaddr-net" ) type libp2pListener struct { sync.Mutex slf4go.Logger addr net.Addr conn chan net.Conn ctx context.Context cancel context.CancelFunc protocol protocol.ID host host.Host } // Listen listen protocol stream incoming func Listen(ctx context.Context, host host.Host, protocol protocol.ID) net.Listener { ctx, cancel := context.WithCancel(ctx) listener := &libp2pListener{ Logger: slf4go.Get("libp2p-listener"), conn: make(chan net.Conn), ctx: ctx, cancel: cancel, protocol: protocol, host: host, } addrs := host.Addrs() addr, _ := manet.ToNetAddr(addrs[0]) listener.addr = addr host.SetStreamHandler(listener.protocol, listener.streamHandler) return listener } func (listener *libp2pListener) streamHandler(stream inet.Stream) { conn := &netwrapper.StreamConn{Stream: stream} for { if listener.sendStream(conn) { return } time.Sleep(time.Second) } } func (listener *libp2pListener) sendStream(conn net.Conn) bool { listener.Lock() defer listener.Unlock() select { case <-listener.ctx.Done(): listener.WarnF("handle input stream on closed listener %s", listener.addr) return true default: } select { case listener.conn <- conn: return true default: return false } } func (listener *libp2pListener) Accept() (net.Conn, error) { conn, ok := <-listener.conn if !ok { return nil, mesh.ErrNetworkClosed } return conn, nil } func (listener *libp2pListener) Close() error { listener.Lock() defer listener.Unlock() listener.host.RemoveStreamHandler(listener.protocol) listener.cancel() close(listener.conn) return nil } func (listener *libp2pListener) Addr() net.Addr { return listener.addr }
# Lint as: python3 """Tests for main_heatmap.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from absl.testing import absltest from absl.testing import parameterized import main_heatmap import numpy as np import pandas as pd SAMPLE_LOGS_LINK = 'https://console.cloud.google.com/logs?project=xl-ml-test&advancedFilter=resource.type%3Dk8s_container%0Aresource.labels.project_id%3Dxl-ml-test%0Aresource.labels.location=us-central1-b%0Aresource.labels.cluster_name=xl-ml-test%0Aresource.labels.namespace_name=automated%0Aresource.labels.pod_name:pt-1.5-cpp-ops-func-v2-8-1587398400&dateRangeUnbound=backwardInTime' def _get_values_for_failures(values, statuses): return [zipped[0] for zipped in zip( values, statuses) if zipped[1] == 'failure'] class MainHeatmapTest(parameterized.TestCase): @parameterized.named_parameters( ('all_success_all_oob', { 'job_statuses': ['success', 'success', 'success'], 'metric_statuses': ['failure', 'failure', 'failure'], 'expected_overall_statuses': ['failure', 'failure', 'failure'], 'expected_job_status_abbrevs': ['M', 'M', 'M']}), ('all_success_some_oob', { 'job_statuses': ['success', 'success', 'success'], 'metric_statuses': ['failure', 'failure', 'success'], 'expected_overall_statuses': ['failure', 'failure', 'success'], 'expected_job_status_abbrevs': ['M', 'M', '']}), ('all_success_none_oob', { 'job_statuses': ['success', 'success', 'success'], 'metric_statuses': ['success', 'success', 'success'], 'expected_overall_statuses': ['success', 'success', 'success'], 'expected_job_status_abbrevs': ['', '', '']}), ('some_success_some_oob', { 'job_statuses': ['success', 'failure', 'success'], 'metric_statuses': ['success', 'success', 'failure'], 'expected_overall_statuses': ['success', 'failure', 'failure'], 'expected_job_status_abbrevs': ['', 'F', 'M']}), ) def test_process_dataframes(self, args_dict): job_statuses = args_dict['job_statuses'] metric_statuses = args_dict['metric_statuses'] assert len(job_statuses) == len(metric_statuses) job_status_df = pd.DataFrame({ 'test_name': pd.Series(['test{}'.format(n) for n in range( len(job_statuses))]), 'run_date': pd.Series(['2020-04-{:02d}'.format(n) for n in range( len(job_statuses))]), 'job_status': pd.Series(job_statuses), 'logs_link': pd.Series([SAMPLE_LOGS_LINK for _ in job_statuses]), 'logs_download_command': pd.Series( ['my command'] + ['' for _ in job_statuses[1:]]), }) # The SQL query in the real code only returns rows where metrics were # out of bounds. These oobs rows correspond to 'failure' for # metric_statuses in this test. metric_names = ['acc' if n % 2 else 'loss' for n in range( len(job_status_df))] metric_values = [98.0 if n % 2 else 0.6 for n in range( len(job_status_df))] metric_upper_bounds = [np.nan if n % 2 else 0.5 for n in range( len(job_status_df))] metric_lower_bounds = [99.0 if n % 2 else np.nan for n in range( len(job_status_df))] metric_status_df = pd.DataFrame({ 'test_name': pd.Series(_get_values_for_failures( job_status_df['test_name'].tolist(), metric_statuses)), 'run_date': pd.Series(_get_values_for_failures( job_status_df['run_date'].tolist(), metric_statuses)), 'metric_name': pd.Series(_get_values_for_failures( metric_names, metric_statuses)), 'metric_value': pd.Series(_get_values_for_failures( metric_values, metric_statuses)), 'metric_upper_bound': pd.Series(_get_values_for_failures( metric_upper_bounds, metric_statuses)), 'metric_lower_bound': pd.Series(_get_values_for_failures( metric_lower_bounds, metric_statuses)), }) # Process the dataframes and make sure the overall_status matches # the expected overall_status. df = main_heatmap.process_dataframes(job_status_df, metric_status_df) self.assertEqual(df['overall_status'].tolist(), args_dict['expected_overall_statuses']) self.assertEqual(df['job_status_abbrev'].tolist(), args_dict['expected_job_status_abbrevs']) # We only want to display metrics as a top-level failure if the job # succeeded. For failed jobs, it's not so helpful to know that the # metrics were out of bounds. metrics_failure_explanations = df['failed_metrics'].tolist() for i, expl_list in enumerate(metrics_failure_explanations): job_status = job_statuses[i] metric_status = metric_statuses[i] if job_status == 'success' and metric_status == 'failure': self.assertGreaterEqual(len(expl_list), 1) for expl in expl_list: self.assertTrue('outside' in expl) else: self.assertFalse(expl_list) commands = df['logs_download_command'].tolist() # If the command is already populated, it should be left alone. self.assertEqual(commands[0], 'my command') def test_process_dataframes_no_job_status(self): job_status_df = pd.DataFrame({ 'test_name': pd.Series(['a', 'b']), 'run_date': pd.Series(['2020-04-10', '2020-04-11']), 'logs_link': pd.Series(['c', 'd']), 'logs_download_command': pd.Series(['e', 'f']), }) df = main_heatmap.process_dataframes(job_status_df, pd.DataFrame()) self.assertTrue(df.empty) df = main_heatmap.process_dataframes(pd.DataFrame(), pd.DataFrame()) self.assertTrue(df.empty) def test_make_plot(self): input_df = pd.DataFrame({ 'test_name': pd.Series(['test1', 'test2', 'test3']), 'run_date': pd.Series(['2020-04-21', '2020-04-20', '2020-04-19']), 'job_status': pd.Series(['success', 'success', 'failure']), 'logs_link': pd.Series([SAMPLE_LOGS_LINK] * 3), 'job_status_abbrev': pd.Series(['f', 'f', '']), 'overall_status': pd.Series(['failure', 'success', 'failure']), }) # Make sure nothing crashes and we are able generate some kind of plot. plot = main_heatmap.make_plot(input_df) self.assertTrue(plot is not None and len(plot.renderers) > 0) def test_make_plot_empty_data(self): input_df = pd.DataFrame() # Make sure nothing crashes. plot = main_heatmap.make_plot(input_df) if __name__ == '__main__': absltest.main()
The Mall of America has 520 stores, 18,000 parking spaces, shark tanks and amusement park rides. Starting this week, it also has a poet. Twenty-seven-year-old Brian Sonia-Wallace beat out 4,000 others for a shot at the Mall of America writer in residence position, celebrating the Bloomington, Minnesota, mall's 25th birthday. "I took a big risk," Sonia-Wallace tells Here & Now's Robin Young. "The initial application was just a couple paragraphs — biographical info, what you think you'll do. And then they asked for a follow-up application, 800 words, and they called it an 'essay,' but I thought, 'What I'm gonna be proposing is poetry, so I should write an 800-word poem outlining what I'm planning on doing,' and so that's what I did." It was an all-or-nothing strategy, he says. "Either they'll love it, or they'll put it aside and say, 'We don't know what to do with this.' And it paid off." Sonia-Wallace, who calls himself the "rent poet," isn't new to crafting poetry in offbeat locations. He participated in an initiative called "Poets at the Polls" in downtown Los Angeles on Election Day, conducting "poetic exit interviews" with voters. He's also participated in the Amtrak Residency program, and worked with the National Parks System, Shuar Nation of Ecuador and Dollar Shave Club. Sonia-Wallace says his poetry in his new position will focus less on shopping and retail, and more on the Mall of America as a unique backdrop for human stories. "I don't expect many retail outlets will actually make it into the poems. And I'm a little bit smug about that," he says. "I think that that's a nice thing, to be able to write in a mall and think about, 'What are the human stories that happened there?' Like who has their first kiss, who's just visiting, who's the bored teenager in the headphones who hates it and thinks that commercialism is the downfall of society?" Sonia-Wallace says he likes to think of the approach as "site-specific poetry." "I think it is a really interesting situation that I play on a lot with my poetry that, as online shopping and online experiences get easier and easier, how do you create a physical experience, an in-person experience, for people?"
Lynnfield, MA – Out of an abundance of caution and with an emphasis on its customers' wellness and safety, HP Hood LLC is voluntarily recalling certain code dates of protein drinks from its Sacramento, CA, facility, due to the potential for premature product spoilage. HP Hood is voluntarily recalling specific products after identifying a possible packaging defect that may result in product spoilage during transport and handling. Consumers may notice that, in some cases, the packaging is bloated and product inside may have an off taste or odor. Consumers should not use the product, since it does not meet its high quality standards. The recalled products are limited to plastic bottles of 14 oz. and 10 oz. MUSCLE MILK® Genuine, MUSCLE MILK® Pro Series, MUSCLE MILK® 100 Calorie, with Best By dates of November 21, 2016 through May 23, 2017, with an "HS" in the code date. This recall applies only to the products listed below. The Best By and code dates are printed on the top of the lid of single serve bottles. No confirmed reports have been received of any consumer illness nor injuries to date. If a consumer has any of the MUSCLE MILK® products listed, they should return it to the store where they were purchased for an exchange, or call Customer Relations at 1-877-446-7635 Monday – Thursday 7:45 AM – 4:00 PM CST or Friday 7:45 AM – 2:45 PM CST. PRODUCT FLAVOR UPC (case) UPC (unit) MUSCLE MILK Genuine Muscle Milk 14oz Banana 8-76063-00223-3 8-76063-00203-5 Muscle Milk 14oz Chocolate 8-76063-00221-9 8-76063-00201-1 Muscle Milk 14oz Cookies N Crème 8-76063-00224-0 8-76063-00204-2 Muscle Milk 14oz Strawberry 8-76063-00229-5 8-76063-00209-7 Muscle Milk 14oz Vanilla Crème 8-76063-00222-6 8-76063-00202-8 Muscle Milk 14oz Caramel Kick 8-76063-00214-1 8-76063-00219-6 Muscle Milk 10oz Chocolate 8-76063-00271-4 8-76063-00261-5 Muscle Milk 10oz Vanilla Crème 8-76063-00272-1 8-76063-00262-2 Muscle Milk Pro Series 40 14 oz Crushin' Cookies 8-76063-00293-6 8-76063-00283-7 Muscle Milk Pro Series 40 14 oz Intense Vanilla 8-76063-00291-2 8-76063-00281-3 Muscle Milk Pro Series 40 14 oz Knock Out Chocolate 8-76063-00290-5 8-76063-00280-6 Muscle Milk Pro Series 40 14 oz Going Bananas 8-76063-00294-3 8-76063-00284-4 Muscle Milk 100 Calorie 14 oz Chocolate 8-76063-00575-3 8-76063-00565-4 This recall is being initiated out of an abundance of caution, with the knowledge of the US Food and Drug Administration. The products at issue can be identified by an "HS" designation in the lot code on the bottle cap. ###
package cn.edu.sysu.workflow.activiti.admission.timewheel; import com.netflix.loadbalancer.RandomRule; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.ResponseEntity; import org.springframework.util.MultiValueMap; import org.springframework.web.client.RestTemplate; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; /** * 引擎执行的任务 * @author: <NAME> * @create: 2019/12/13 **/ public class ActivitiTask implements Runnable { private static Logger logger = LoggerFactory.getLogger(ActivitiTask.class); private String url; private MultiValueMap<String, Object> variables; private RestTemplate restTemplate; public ActivitiTask(String url, MultiValueMap<String, Object> variables, RestTemplate restTemplate) { this.url = url; this.variables = variables; this.restTemplate = restTemplate; } public static AtomicInteger increment = new AtomicInteger(0); public static AtomicInteger decrement = new AtomicInteger(0); @Override public void run() { try { long start = System.currentTimeMillis(); ResponseEntity<String> result = restTemplate.postForEntity(url, variables, String.class); long end = System.currentTimeMillis(); logger.info("request response time: " + (end-start) + "ms"); } catch (Exception e) { e.printStackTrace(); } } }
<gh_stars>1-10 misspell_inverted_index_file = "./misspell_inverted_index.string" misspell_keywords_file = "./misspells.txt" RASA_API_SERVER = "http://localhost:8000/webhooks/rest/webhook" session_ending_message = "/restart" session_restart_signal = session_ending_message ############################## # Default Response Templates | ############################## default_fallback_response = "Sorry, kia ap apna swal dobara enter kr sakty hen?"
from django.core.exceptions import ImproperlyConfigured class MissingStorageModule(ImproperlyConfigured): pass class MissingStorageClass(ImproperlyConfigured): pass class NoFileStorageConfigured(ImproperlyConfigured): pass
<reponame>gabrielboliveira/mqtt-temperature<filename>src/config_sample.h<gh_stars>0 #define DEBUG_ENABLED true #define FLASH_BUILTIN_LED true #define READING_DELAY 10000 // reading delay in milliseconds // Wifi: SSID and password const PROGMEM char* WIFI_SSID = "Wifi-SSID"; const PROGMEM char* WIFI_PASSWORD = "<PASSWORD>"; // MQTT: ID, server IP, port, username and password #define MQTT_VERSION MQTT_VERSION_3_1_1 const PROGMEM char* MQTT_CLIENT_ID = "esp-dht-22"; const PROGMEM char* MQTT_SERVER_IP = "mqtt-server"; const PROGMEM uint16_t MQTT_SERVER_PORT = 1883; const PROGMEM char* MQTT_USER = "mqtt-user"; const PROGMEM char* MQTT_PASSWORD = "<PASSWORD>"; const PROGMEM char* MQTT_HUMIDITY_TOPIC = "sensor/temperature"; const PROGMEM char* MQTT_TEMPERATURE_TOPIC = "sensor/humidity"; // DHT sensor #define DHTPIN 14 // which digital pin we're connected to #define DHTTYPE DHT22 // DHT22 or DHT11
def allergen_get_name_list_from_id_list(allergen_id_list): allergen_name_list = [] for allergen in allergen_id_list: allergen_name = mongo.db.allergens.find_one( {"_id": allergen})["name"] allergen_name_list.append(allergen_name) return allergen_name_list
import { IMailSendPasswordResetDTO } from "./IMailSendPasswordResetDTO"; export interface IMailRepositoryDTO { sendPasswordReset(data: IMailSendPasswordResetDTO): Promise<boolean>; }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.orc.impl; import org.junit.jupiter.api.Test; import java.nio.ByteBuffer; import java.util.Objects; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertTrue; class TestRecordReaderUtils { private final BufferChunkList rangeList = new TestOrcLargeStripe.RangeBuilder() .range(1000, 1000) .range(2000, 1000) .range(4000, 1000) .range(4100, 100) .range(8000, 1000).build(); private static void assertChunkEquals(BufferChunk expected, BufferChunk actual) { assertTrue(Objects.equals(expected, actual) && expected.getOffset() == actual.getOffset() && expected.getLength() == actual.getLength()); } @Test public void testDeterminationOfSingleRead() { BufferChunk toChunk = RecordReaderUtils.ChunkReader.create(rangeList.get(), 0).getTo(); assertChunkEquals(rangeList.get(1), toChunk); assertTrue(RecordReaderUtils.ChunkReader.create(rangeList.get(), toChunk) .getExtraBytesFraction() < 0.001); toChunk = RecordReaderUtils.ChunkReader.create(rangeList.get(), 1000).getTo(); assertChunkEquals(rangeList.get(3), toChunk); assertTrue(RecordReaderUtils.ChunkReader.create(rangeList.get(), toChunk) .getExtraBytesFraction() >= .2); toChunk = RecordReaderUtils.ChunkReader.create(rangeList.get(), 999).getTo(); assertChunkEquals(rangeList.get(1), toChunk); assertTrue(RecordReaderUtils.ChunkReader.create(rangeList.get(), toChunk) .getExtraBytesFraction() < 0.001); } @Test public void testNoGapCombine() { BufferChunk toChunk = RecordReaderUtils.findSingleRead(rangeList.get()); assertChunkEquals(rangeList.get(1), toChunk); } @Test public void testReadExtraBytes() { RecordReaderUtils.ChunkReader chunkReader = RecordReaderUtils.ChunkReader.create(rangeList.get(), 1000); assertChunkEquals(rangeList.get(3), chunkReader.getTo()); populateAndValidateChunks(chunkReader, false); } @Test public void testRemoveBytes() { RecordReaderUtils.ChunkReader chunkReader = RecordReaderUtils.ChunkReader.create(rangeList.get(), 1000); assertChunkEquals(rangeList.get(3), chunkReader.getTo()); populateAndValidateChunks(chunkReader, true); } @Test public void testRemoveBytesSmallerOverlapFirst() { BufferChunkList rangeList = new TestOrcLargeStripe.RangeBuilder() .range(1000, 1000) .range(2000, 1000) .range(4000, 100) .range(4000, 1000) .range(8000, 1000).build(); RecordReaderUtils.ChunkReader chunkReader = RecordReaderUtils.ChunkReader.create(rangeList.get(), 1000); assertChunkEquals(rangeList.get(3), chunkReader.getTo()); populateAndValidateChunks(chunkReader, true); } @Test public void testRemoveBytesWithOverlap() { BufferChunkList rangeList = new TestOrcLargeStripe.RangeBuilder() .range(1000, 1000) .range(1800, 400) .range(2000, 1000) .range(4000, 100) .range(4000, 1000) .range(8000, 1000).build(); RecordReaderUtils.ChunkReader chunkReader = RecordReaderUtils.ChunkReader.create(rangeList.get(), 1000); assertChunkEquals(rangeList.get(4), chunkReader.getTo()); populateAndValidateChunks(chunkReader, true); } @Test public void testExtraBytesReadWithinThreshold() { BufferChunkList rangeList = new TestOrcLargeStripe.RangeBuilder() .range(1000, 1000) .range(1800, 400) .range(2000, 1000) .range(4000, 100) .range(4000, 1000) .range(8000, 1000).build(); RecordReaderUtils.ChunkReader chunkReader = RecordReaderUtils.ChunkReader.create(rangeList.get(), 1000); assertChunkEquals(rangeList.get(4), chunkReader.getTo()); chunkReader.populateChunks(makeByteBuffer(chunkReader.getReadBytes(), chunkReader.getFrom().getOffset()), false, 1.0); validateChunks(chunkReader); assertNotEquals(chunkReader.getReadBytes(), chunkReader.getReqBytes()); assertEquals(chunkReader.getReadBytes(), chunkReader.getFrom().getData().array().length); } private ByteBuffer makeByteBuffer(int length, long offset) { byte[] readBytes = new byte[length]; for (int i = 0; i < readBytes.length; i++) { readBytes[i] = (byte) ((i + offset) % Byte.MAX_VALUE); } return ByteBuffer.wrap(readBytes); } private void populateAndValidateChunks(RecordReaderUtils.ChunkReader chunkReader, boolean withRemove) { if (withRemove) { assertTrue(chunkReader.getReadBytes() > chunkReader.getReqBytes()); } ByteBuffer bytes = makeByteBuffer(chunkReader.getReadBytes(), chunkReader.getFrom().getOffset()); if (withRemove) { chunkReader.populateChunksReduceSize(bytes, false); assertEquals(chunkReader.getReqBytes(), chunkReader.getFrom().getData().array().length); } else { chunkReader.populateChunksAsIs(bytes); assertEquals(chunkReader.getReadBytes(), chunkReader.getFrom().getData().array().length); } validateChunks(chunkReader); } private void validateChunks(RecordReaderUtils.ChunkReader chunkReader) { BufferChunk current = chunkReader.getFrom(); while (current != chunkReader.getTo().next) { assertTrue(current.hasData()); assertEquals(current.getOffset() % Byte.MAX_VALUE, current.getData().get(), String.format("Failed for %s", current)); current = (BufferChunk) current.next; } } }
/// Converts a list of feedback answers to the format expected by the TMC server. pub fn prepare_feedback_form(feedback: Vec<FeedbackAnswer>) -> HashMap<String, String> { let mut form = HashMap::new(); for (i, answer) in feedback.into_iter().enumerate() { form.insert( format!("answers[{}][question_id]", i), answer.question_id.to_string(), ); form.insert(format!("answers[{}][answer]", i), answer.answer); } form }
def subsetFileMapping(self, file_type=None, sample_id=None): subset = self.sampleFileMapping subset = utils.subsetBy(subset, "FILE_TYPE", file_type) subset = utils.subsetBy(subset, "ID", sample_id) return subset
/* * USE - UML based specification environment * Copyright (C) 1999-2010 <NAME>, University of Bremen * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License as * published by the Free Software Foundation; either version 2 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. */ package org.tzi.use.analysis.coverage; import java.util.Stack; import org.tzi.use.uml.mm.MAssociation; import org.tzi.use.uml.mm.MAttribute; import org.tzi.use.uml.mm.MClass; import org.tzi.use.uml.mm.MNavigableElement; import org.tzi.use.uml.mm.MOperation; import org.tzi.use.uml.ocl.expr.ExpAllInstances; import org.tzi.use.uml.ocl.expr.ExpAny; import org.tzi.use.uml.ocl.expr.ExpAsType; import org.tzi.use.uml.ocl.expr.ExpAttrOp; import org.tzi.use.uml.ocl.expr.ExpBagLiteral; import org.tzi.use.uml.ocl.expr.ExpClosure; import org.tzi.use.uml.ocl.expr.ExpCollect; import org.tzi.use.uml.ocl.expr.ExpCollectNested; import org.tzi.use.uml.ocl.expr.ExpCollectionLiteral; import org.tzi.use.uml.ocl.expr.ExpConstBoolean; import org.tzi.use.uml.ocl.expr.ExpConstEnum; import org.tzi.use.uml.ocl.expr.ExpConstInteger; import org.tzi.use.uml.ocl.expr.ExpConstReal; import org.tzi.use.uml.ocl.expr.ExpConstString; import org.tzi.use.uml.ocl.expr.ExpConstUnlimitedNatural; import org.tzi.use.uml.ocl.expr.ExpEmptyCollection; import org.tzi.use.uml.ocl.expr.ExpExists; import org.tzi.use.uml.ocl.expr.ExpForAll; import org.tzi.use.uml.ocl.expr.ExpIf; import org.tzi.use.uml.ocl.expr.ExpIsKindOf; import org.tzi.use.uml.ocl.expr.ExpIsTypeOf; import org.tzi.use.uml.ocl.expr.ExpIsUnique; import org.tzi.use.uml.ocl.expr.ExpIterate; import org.tzi.use.uml.ocl.expr.ExpLet; import org.tzi.use.uml.ocl.expr.ExpNavigation; import org.tzi.use.uml.ocl.expr.ExpObjAsSet; import org.tzi.use.uml.ocl.expr.ExpObjOp; import org.tzi.use.uml.ocl.expr.ExpObjRef; import org.tzi.use.uml.ocl.expr.ExpObjectByUseId; import org.tzi.use.uml.ocl.expr.ExpOclInState; import org.tzi.use.uml.ocl.expr.ExpOne; import org.tzi.use.uml.ocl.expr.ExpOrderedSetLiteral; import org.tzi.use.uml.ocl.expr.ExpQuery; import org.tzi.use.uml.ocl.expr.ExpRange; import org.tzi.use.uml.ocl.expr.ExpReject; import org.tzi.use.uml.ocl.expr.ExpSelect; import org.tzi.use.uml.ocl.expr.ExpSelectByKind; import org.tzi.use.uml.ocl.expr.ExpSelectByType; import org.tzi.use.uml.ocl.expr.ExpSequenceLiteral; import org.tzi.use.uml.ocl.expr.ExpSetLiteral; import org.tzi.use.uml.ocl.expr.ExpSortedBy; import org.tzi.use.uml.ocl.expr.ExpStdOp; import org.tzi.use.uml.ocl.expr.ExpTupleLiteral; import org.tzi.use.uml.ocl.expr.ExpTupleSelectOp; import org.tzi.use.uml.ocl.expr.ExpUndefined; import org.tzi.use.uml.ocl.expr.ExpVariable; import org.tzi.use.uml.ocl.expr.Expression; import org.tzi.use.uml.ocl.expr.ExpressionVisitor; import org.tzi.use.uml.ocl.expr.ExpressionWithValue; import org.tzi.use.uml.ocl.expr.VarDecl; import org.tzi.use.uml.ocl.expr.VarDeclList; import org.tzi.use.uml.ocl.type.ObjectType; /** * Abstract visitor implementation. * @author <NAME> * */ public abstract class AbstractCoverageVisitor implements ExpressionVisitor{ protected final boolean expandOperations; public AbstractCoverageVisitor(boolean expandOperations) { this.expandOperations = expandOperations; } protected abstract void addClassCoverage(MClass cls); protected abstract void addAssociationEndCoverage(MNavigableElement dst); protected abstract void addAssociationCoverage(MAssociation assoc); protected abstract void addAttributeCoverage(MClass sourceClass, MAttribute att); protected abstract void addOperationCoverage(MClass sourceClass, MOperation att); @Override public void visitAllInstances(ExpAllInstances exp) { addClassCoverage(exp.getSourceType().cls()); } @Override public void visitAny(ExpAny exp) { visitQuery(exp); } @Override public void visitAsType(ExpAsType exp) { // Needed? } @Override public void visitAttrOp(ExpAttrOp exp) { exp.objExp().processWithVisitor(this); addAttributeCoverage(((ObjectType)exp.objExp().type()).cls(), exp.attr()); } @Override public void visitBagLiteral(ExpBagLiteral exp) { visitCollectionLiteral(exp); } @Override public void visitCollect(ExpCollect exp) { visitQuery(exp); } @Override public void visitCollectNested(ExpCollectNested exp) { visitQuery(exp); } @Override public void visitConstBoolean(ExpConstBoolean exp) {} @Override public void visitConstEnum(ExpConstEnum exp) { // TODO: Coverage? } @Override public void visitConstInteger(ExpConstInteger exp) {} @Override public void visitConstReal(ExpConstReal exp) {} @Override public void visitConstString(ExpConstString exp) {} @Override public void visitEmptyCollection(ExpEmptyCollection exp) { } @Override public void visitExists(ExpExists exp) { visitQuery(exp); } @Override public void visitForAll(ExpForAll exp) { visitQuery(exp); } @Override public void visitIf(ExpIf exp) { exp.getCondition().processWithVisitor(this); exp.getThenExpression().processWithVisitor(this); exp.getElseExpression().processWithVisitor(this); } @Override public void visitIsKindOf(ExpIsKindOf exp) { exp.getSourceExpr().processWithVisitor(this); if (exp.getTargetType().isObjectType()) { addClassCoverage(((ObjectType)exp.getTargetType()).cls()); } } @Override public void visitIsTypeOf(ExpIsTypeOf exp) { exp.getSourceExpr().processWithVisitor(this); } @Override public void visitIsUnique(ExpIsUnique exp) { visitQuery(exp); } @Override public void visitIterate(ExpIterate exp) { visitQuery(exp); } @Override public void visitLet(ExpLet exp) { exp.getVarExpression().processWithVisitor(this); exp.getInExpression().processWithVisitor(this); } @Override public void visitNavigation(ExpNavigation exp) { exp.getObjectExpression().processWithVisitor(this); addAssociationCoverage(exp.getDestination().association()); addAssociationEndCoverage(exp.getDestination()); } @Override public void visitObjAsSet(ExpObjAsSet exp) { exp.getObjectExpression().processWithVisitor(this); } private Stack<MOperation> operationStack = new Stack<MOperation>(); @Override public void visitObjOp(ExpObjOp exp) { for (Expression ex : exp.getArguments()) { ex.processWithVisitor(this); } addOperationCoverage(((ObjectType)exp.getArguments()[0].type()).cls(), exp.getOperation()); if (expandOperations && exp.getOperation().hasExpression() && !operationStack.contains(exp.getOperation())) { operationStack.push(exp.getOperation()); exp.getOperation().expression().processWithVisitor(this); operationStack.pop(); } } @Override public void visitObjRef(ExpObjRef exp) { exp.processWithVisitor(this); } @Override public void visitOne(ExpOne exp) { visitQuery(exp); } @Override public void visitOrderedSetLiteral(ExpOrderedSetLiteral exp) { visitCollectionLiteral(exp); } @Override public void visitQuery(ExpQuery exp) { exp.getRangeExpression().processWithVisitor(this); exp.getQueryExpression().processWithVisitor(this); } @Override public void visitReject(ExpReject exp) { visitQuery(exp); } @Override public void visitWithValue(ExpressionWithValue exp) { } @Override public void visitSelect(ExpSelect exp) { visitQuery(exp); } @Override public void visitSequenceLiteral(ExpSequenceLiteral exp) { visitCollectionLiteral(exp); } @Override public void visitSetLiteral(ExpSetLiteral exp) { visitCollectionLiteral(exp); } @Override public void visitSortedBy(ExpSortedBy exp) { visitQuery(exp); } @Override public void visitStdOp(ExpStdOp exp) { for (Expression expArg : exp.args()) { expArg.processWithVisitor(this); } } @Override public void visitTupleLiteral(ExpTupleLiteral exp) { for (ExpTupleLiteral.Part part : exp.getParts()) { part.getExpression().processWithVisitor(this); } } @Override public void visitTupleSelectOp(ExpTupleSelectOp exp) {} @Override public void visitUndefined(ExpUndefined exp) {} @Override public void visitVariable(ExpVariable exp) { if (exp.type().isTrueObjectType()) { addClassCoverage(((ObjectType)exp.type()).cls()); } } protected void visitCollectionLiteral(ExpCollectionLiteral exp) { for (Expression ex : exp.getElemExpr()) { ex.processWithVisitor(this); } } @Override public void visitClosure(ExpClosure expClosure) { visitQuery(expClosure); } @Override public void visitOclInState(ExpOclInState expOclInState) { expOclInState.getSourceExpr().processWithVisitor(this); } @Override public void visitVarDeclList(VarDeclList varDeclList) { for (int i = 0; i < varDeclList.size(); ++i) { varDeclList.varDecl(i).processWithVisitor(this); } } @Override public void visitVarDecl(VarDecl varDecl) { } @Override public void visitObjectByUseId(ExpObjectByUseId expObjectByUseId) { addClassCoverage(expObjectByUseId.getSourceType().cls()); expObjectByUseId.getIdExpression().processWithVisitor(this); } @Override public void visitConstUnlimitedNatural( ExpConstUnlimitedNatural expressionConstUnlimitedNatural) { } @Override public void visitSelectByKind(ExpSelectByKind expSelectByKind) { if (expSelectByKind.type().elemType().isTrueObjectType()) { addClassCoverage(((ObjectType)expSelectByKind.type().elemType()).cls()); } expSelectByKind.getSourceExpression().processWithVisitor(this); } @Override public void visitExpSelectByType(ExpSelectByType expSelectByType) { if (expSelectByType.type().elemType().isTrueObjectType()) { addClassCoverage(((ObjectType)expSelectByType.type().elemType()).cls()); } expSelectByType.getSourceExpression().processWithVisitor(this); } @Override public void visitRange(ExpRange exp) { exp.getStart().processWithVisitor(this); exp.getEnd().processWithVisitor(this); } }
############################################################################## # # Copyright (c) 2004 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## """ Viewlet implementations. """ __docformat__ = 'restructuredtext' import os import sys import zope.interface from zope.traversing import api from zope.publisher.browser import BrowserView from zope.viewlet import interfaces from zope.browserpage import simpleviewclass from zope.browserpage import ViewPageTemplateFile @zope.interface.implementer(interfaces.IViewlet) class ViewletBase(BrowserView): """Viewlet adapter class used in meta directive as a mixin class.""" def __init__(self, context, request, view, manager): super(ViewletBase, self).__init__(context, request) self.__parent__ = view self.context = context self.request = request self.manager = manager def update(self): pass def render(self): raise NotImplementedError( '`render` method must be implemented by subclass.') class SimpleAttributeViewlet(ViewletBase): """ A viewlet that uses a method named in :attr:`__page_attribute__` to produce its content. """ #: The name of the attribute of this object that will be used #: in `render` to produce our content. Must not be set to ``"render"``. __page_attribute__ = None def render(self, *args, **kw): # If a class doesn't provide it's own call, then get the attribute # given by the browser default. attr = self.__page_attribute__ if attr == 'render': raise AttributeError("render") meth = getattr(self, attr) return meth(*args, **kw) class simple(simpleviewclass.simple): """Simple viewlet class supporting the ``render()`` method.""" render = simpleviewclass.simple.__call__ def SimpleViewletClass(template, offering=None, bases=(), attributes=None, name=u''): """A function that can be used to generate a viewlet from a set of information. """ # Get the current frame if offering is None: offering = sys._getframe(1).f_globals # Create the base class hierarchy bases += (simple, ViewletBase) attrs = {'index': ViewPageTemplateFile(template, offering), '__name__': name} if attributes: attrs.update(attributes) # Generate a derived view class. class_ = type("SimpleViewletClass from %s" % template, bases, attrs) return class_ class ResourceViewletBase(object): """A simple viewlet for inserting references to resources. This is an abstract class that is expected to be used as a base only. """ _path = None def getURL(self): """ Retrieve the resource for our path using the :class:`++resource++ namespace <zope.traversing.namespace.resource>` and call it, returning the results. Commonly, the found resource will be an :class:`zope.browserresource.interfaces.IResource`, which, when called, will adapt itself to :class:`zope.traversing.browser.interfaces.IAbsoluteURL` and return the string value of the absolute URL. """ resource = api.traverse(self.context, '++resource++' + self._path, request=self.request) return resource() def render(self, *args, **kw): return self.index(*args, **kw) def JavaScriptViewlet(path): """Create a viewlet that can simply insert a javascript link.""" src = os.path.join(os.path.dirname(__file__), 'javascript_viewlet.pt') klass = type('JavaScriptViewlet', (ResourceViewletBase, ViewletBase), {'index': ViewPageTemplateFile(src), '_path': path}) return klass class CSSResourceViewletBase(ResourceViewletBase): _media = 'all' _rel = 'stylesheet' def getMedia(self): return self._media def getRel(self): return self._rel def CSSViewlet(path, media="all", rel="stylesheet"): """Create a viewlet that can simply insert a CSS link.""" src = os.path.join(os.path.dirname(__file__), 'css_viewlet.pt') klass = type('CSSViewlet', (CSSResourceViewletBase, ViewletBase), {'index': ViewPageTemplateFile(src), '_path': path, '_media': media, '_rel': rel}) return klass class ResourceBundleViewletBase(object): """A simple viewlet for inserting references to different resources. This is an abstract class that is expected to be used as a base only. """ _paths = None #: A callable (usually a template) that is used to implement #: the `render` method. index = None def getResources(self): """ Retrieve all the resources in our desired paths using the :class:`++resource++ namespace <zope.traversing.namespace.resource>` """ resources = [] append = resources.append for path in self._paths: append(api.traverse(self.context, '++resource++' + path, request=self.request)) return resources def render(self, *args, **kw): return self.index(*args, **kw) def JavaScriptBundleViewlet(paths): """Create a viewlet that can simply insert javascript links.""" src = os.path.join( os.path.dirname(__file__), 'javascript_bundle_viewlet.pt') klass = type('JavaScriptBundleViewlet', (ResourceBundleViewletBase, ViewletBase), {'index': ViewPageTemplateFile(src), '_paths': paths}) return klass class CSSResourceBundleViewletBase(object): """A simple viewlet for inserting css references to different resources. There is a sequence of dict used for the different resource descriptions. The sequence uses the following format: ({path:'the path', media:'all', rel:'stylesheet'},...) The default values for media is ``all`` and the default value for rel is ``stylesheet``. The path must be set there is no default value for the path attribute. This is an abstract class that is expected to be used as a base only. """ _items = None def getResources(self): """ Retrieve all the resources for our desired items' paths using the :class:`++resource++ namespace <zope.traversing.namespace.resource>` and return a list of dictionaries. The dictionaries are like those passed to the constructor with the defaults filled in, except that ``path`` has been replaced with ``url``. The ``url`` object is as described for `ResourceViewletBase.getURL`. """ resources = [] append = resources.append for item in self._items: info = {} info['url'] = api.traverse(self.context, '++resource++' + item.get('path'), request=self.request) info['media'] = item.get('media', 'all') info['rel'] = item.get('rel', 'stylesheet') append(info) return resources def render(self, *args, **kw): return self.index(*args, **kw) def CSSBundleViewlet(items): """ Create a viewlet that can simply insert css links. :param items: A sequence of dictionaries as described in `CSSResourceBundleViewletBase`. """ src = os.path.join(os.path.dirname(__file__), 'css_bundle_viewlet.pt') klass = type('CSSBundleViewlet', (CSSResourceBundleViewletBase, ViewletBase), {'index': ViewPageTemplateFile(src), '_items': items}) return klass
<reponame>alfaz-003/nestjs_Project-API-<filename>src/account/account-purchase/account-purchaseSchema.ts import * as mongoose from 'mongoose'; export const accountPurchase = new mongoose.Schema({ p_order : {type: mongoose.Schema.Types.ObjectId , ref: 'PurcahseOrder'} }) export interface accountPurchase extends mongoose.Document { p_order : {type: mongoose.Schema.Types.ObjectId , ref: 'PurcahseOrder'} }
/** * @author <a href="mailto:[email protected]">Matous Jobanek</a> */ public class PomEquippedEmbeddedMavenForJarSampleTestCase { @Test public void testJarSampleBuild() { BuiltProject builtProject = EmbeddedMaven .forProject(pathToJarSamplePom) .setGoals("clean", "verify") .useMaven3Version("3.3.9") .build(); verifyJarSampleSimpleBuild(builtProject); verifyJarSampleContainsOnlyOneJar(builtProject); } @Test public void testJarSampleBuildWithTestClasses() { BuiltProject builtProject = EmbeddedMaven .forProject(pathToJarSamplePom) .setGoals("clean", "package") .setProfiles("test-classes") .useMaven3Version("3.3.9") .build(); verifyJarSampleSimpleBuild(builtProject); verifyJasSampleContainsAlsoTestClasses(builtProject); } }
<filename>form_generator/Button.py from PIL import ImageFont, ImageDraw from form_generator.tools import get_class_id from form_generator.Box import Box class Button: def __init__(self, width, height, arc_radius, form_background_color, label_inside, label_inside_color, font_size): self.width = width self.height = height self.arc_radius = arc_radius self.label_inside = label_inside self.label_inside_color = label_inside_color self.border_color = form_background_color self.background_color = form_background_color self.font_size = font_size self.overall_width = self.width def draw(self, x, y, img, image_draw): """ :param x: the x position of the element :param y: the y position of the element :param img: The image element we are drawing on :param image_draw: the PIL ImageDraw object """ font_size = int(self.height * self.font_size) font = ImageFont.truetype("arial.ttf", font_size) # box containing arc must be twice the size of the radius arc_size_for_corner = self.arc_radius * 2 text_width, text_height = image_draw.textsize(self.label_inside, font) # change width if its too low self.width = max(text_width + 10, self.width) if self.arc_radius > 0: image_draw.arc((x, y, x + arc_size_for_corner, y + arc_size_for_corner), 180, 270, fill=self.border_color) # top-left image_draw.arc((x, y + self.height - arc_size_for_corner, x + arc_size_for_corner, y + self.height), 90, 180, fill=self.border_color) # bottom-left image_draw.arc((x + self.width - arc_size_for_corner, y, x + self.width, y + arc_size_for_corner), -90, 0, fill=self.border_color) # top-right image_draw.arc((x + self.width - arc_size_for_corner, y + self.height - arc_size_for_corner, x + self.width, y + self.height), 0, 90, fill=self.border_color) # bottom -right image_draw.line([(x + self.arc_radius, y), (x + self.width - self.arc_radius, y)], fill=self.border_color) # top line image_draw.line([(x + self.width, y + self.arc_radius), (x + self.width, y + self.height - self.arc_radius)], fill=self.border_color) # right line image_draw.line([(x + self.arc_radius, y + self.height), (x + self.width - self.arc_radius, y + self.height)], fill=self.border_color) # bottom line image_draw.line([(x, y + self.arc_radius), (x, y + self.height - self.arc_radius)], fill=self.border_color) # left line ImageDraw.floodfill(img, ((2 * x + self.width) / 2, (2 * y + self.height) / 2), value=self.background_color) image_draw = ImageDraw.Draw(img) image_draw.text((x + (self.width - text_width) / 2, y + (self.height - text_height) / 2), self.label_inside, font=font, fill=self.label_inside_color) return [Box(get_class_id('button'), x - 1, y - 1, self.width + 2, self.height + 2)] def get_width(self): return self.overall_width
// MutationEffectFromString returns a MutationEffect from the provided string func MutationEffectFromString(mutationString string) (MutationEffect, error) { switch strings.ToLower(mutationString) { case "inventory": return MutationEffectInventory, nil case "xray": return MutationEffectXRay, nil case "increasedvision": return MutationEffectIncreasedVision, nil case "heightenedHearing": return MutationEffectHeightenedHearing, nil case "nightVision": return MutationEffectNightVision, nil case "regeneration": return MutationEffectRegeneration, nil case "confusion": return MutationEffectConfusion, nil case "pyrokinesis": return MutationEffectPyrokinesis, nil case "push": return MutationEffectPush, nil case "teleport": return MutationEffectTeleport, nil case "teleportOther": return MutationEffectTeleportOther, nil case "burrowingClaws": return MutationEffectBurrowingClaws, nil default: return MutationEffectUnknown, fmt.Errorf("Unknown mutation '%s'", mutationString) } }
/** * Created by BANGALORE on 5/13/2016. */ public class UserSymptomInput extends AppCompatActivity{ protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.user_symptom_input); // super.onCreateDrawer(); //setContentView(R.layout.activity_main2); /* Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar);*/ Button submit = (Button) findViewById(R.id.submitButton); submit.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Toast.makeText(UserSymptomInput.this, "Submit button clicked", Toast.LENGTH_SHORT).show(); } }); }//end onCreate /*public boolean onCreateOptionsMenu(Menu menu){ getMenuInflater().inflate(R.menu.menu_login,menu); return true; }*/ public void submit(View v){ switch(v.getId()){ case R.id.submitButton: Toast.makeText(UserSymptomInput.this, "Submit button clicked", Toast.LENGTH_SHORT).show(); break; } } }
def wait(self) -> None: time_between_calls = random.choice(self.delays) time_elapsed_between_last_call = round(time.time(), 2) - self.last_call_timestamp if time_elapsed_between_last_call < time_between_calls: time.sleep(round(time_between_calls - time_elapsed_between_last_call, 2))
/* Problem : You are given a doubly linked list which in addition to the next and previous pointers, it could have a child pointer, which may or may not point to a separate doubly linked list. These child lists may have one or more children of their own, and so on, to produce a multilevel data structure, as shown in the example below. Flatten the list so that all the nodes appear in a single-level, doubly linked list. You are given the head of the first level of the list. Example 1: Input: head = [1,2,3,4,5,6,null,null,null,7,8,9,10,null,null,11,12] Output: [1,2,3,7,8,11,12,9,10,4,5,6] Example 2: Input: head = [1,2,null,3] Output: [1,3,2] */ /* Analysis :: Time Complexity :: O(n) Space Complexity :: O(n) */ /* // Definition for a Node. class Node { public int val; public Node prev; public Node next; public Node child; }; */ class Solution { public Node flatten(Node head) { if(head == null) return head; Node temp = head; while(temp != null){ if(temp.child == null){ temp = temp.next; continue; } Node child = temp.child; while(child.next != null) child = child.next; child.next = temp.next; if(temp.next != null) temp.next.prev = child; temp.next = temp.child; temp.child.prev = temp; temp.child = null; } return head; } }
/** * Command that finds a target, spins up the shooter, aims, and shoots at a the correct speed for the target distance. */ public class ShootCommand extends CommandBase { private final ShooterSubsystem shooterSubsystem; private final ShooterLimelightSubsystem limelightSubsystem; private final DriveTrainSubsystem driveTrainSubsystem; private final IndexerSubsystem indexerSubsystem; private double lastTargetDistance = 0; public ShootCommand( ShooterSubsystem shooterSubsystem, ShooterLimelightSubsystem limelightSubsystem, DriveTrainSubsystem driveTrainSubsystem, IndexerSubsystem indexerSubsystem) { this.shooterSubsystem = shooterSubsystem; this.limelightSubsystem = limelightSubsystem; this.driveTrainSubsystem = driveTrainSubsystem; this.indexerSubsystem = indexerSubsystem; addRequirements(shooterSubsystem, limelightSubsystem, driveTrainSubsystem, indexerSubsystem); } @Override public void initialize() { limelightSubsystem.enable(); lastTargetDistance = 0; } @Override public void execute() { // If the target is visible, get the new distance. If the target isn't visible we'll use the last known distance. if (limelightSubsystem.getTargetAcquired()) { lastTargetDistance = limelightSubsystem.getDistanceToTarget(); } // If we have a target distance, spin up and shoot if (lastTargetDistance > 0) { shooterSubsystem.prepareToShoot(lastTargetDistance); // TODO aim left-to-right if (shooterSubsystem.isReadyToShoot()) { // TODO do we want to make sure the target is currently visible before shooting? indexerSubsystem.shoot(); } else { indexerSubsystem.stop(); } } else { // No target has ever been visible, so stop shooterSubsystem.stop(); } } @Override public boolean isFinished() { return false; } @Override public void end(boolean interrupted) { limelightSubsystem.disable(); shooterSubsystem.stop(); driveTrainSubsystem.stop(); indexerSubsystem.stop(); } }
<filename>wrappers/serial/calibration/modelpartition.py """ Radio interferometric calibration using an expectation maximisation algorithm See the SDP document "Model Partition Calibration View Packet" """ from processing_components.calibration.modelpartition import create_modelpartition from processing_components.calibration.modelpartition import solve_modelpartition
# This script extracted from https://github.com/socialize/django-tastypie/commit/6a98aa4ff344105f8b5090a2d4f2d407bccae089 # Inheriting django-tastypie LICENSE for this file only # Copyright (c) 2010, <NAME> # All rights reserved. # The following notice applies to this file only, and has no bearing # on other material in the package which contains the file. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the tastypie nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL tastypie BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from django.http import HttpRequest #from docutils.parsers.rst import Parser #from docutils.core import Publisher from docutils.core import publish_doctree from django.template import Context, Template from sphinx.util.compat import Directive import os import json def setup(app): app.add_directive('tastydoc', TastyDirective) import tastypie class TastyDirective(Directive): # this enables content in the directive has_content = True def run(self): module_parts = self.content[0].split(".") module = ".".join(module_parts[0:len(module_parts) - 1]) member = module_parts[len(module_parts) - 1] api_module = __import__(module, fromlist = ['a']) api = api_module.__dict__[member] #parser = Parser() #publisher = Publisher() request = HttpRequest() top_level_response = api.top_level(request, None) top_level_doc = json.loads(top_level_response.content) for name in sorted(api._registry.keys()): resource_dict = top_level_doc[name] resource = api._registry[name] resource_dict['schema'] = resource.build_schema() resource_dict['schema']['field_list'] = [{'name': field, 'meta': meta} for field, meta in resource_dict['schema']['fields'].items()] for field, field_meta in resource_dict['schema']['fields'].items(): if field == 'id': field_meta['help_text'] = "Integer record identifier, unique for objects of this type" elif field == 'content_type_id': field_meta['help_text'] = "Integer type identifier" elif field == 'state' and field_meta['help_text'] == tastypie.fields.CharField.help_text: field_meta['help_text'] = "Unicode string, may be set based on ``available_transitions`` field" elif field == 'immutable_state' and field_meta['help_text'] == tastypie.fields.BooleanField.help_text: field_meta['help_text'] = "If ``true``, this object may not have its state modified by the user (monitoring only)" elif field == 'resource_uri': field_meta['help_text'] = "URL for this object" elif field == 'available_transitions': field_meta['help_text'] = "List of {'verb':"", 'state':""} for possible states (for use with POST)" elif field == 'available_jobs': field_meta['help_text'] = "List of {'args':{}, 'class_name':"", 'confirmation':"", verb: ""} for possible " \ "non-state-change jobs (for use with the ``command`` resource)" elif field == 'label': field_meta['help_text'] = "Non-unique human readable name for presentation" resource_dict['list_allowed_methods'] = [m.upper() for m in resource._meta.list_allowed_methods] resource_dict['detail_allowed_methods'] = [m.upper() for m in resource._meta.detail_allowed_methods] resource_dict['ordering'] = resource._meta.ordering resource_dict['filtering'] = resource._meta.filtering for field, methods in resource_dict['filtering'].items(): if methods == tastypie.constants.ALL_WITH_RELATIONS: resource_dict['filtering'][field] = ["including dereferenced attributes"] if methods == tastypie.constants.ALL: resource_dict['filtering'][field] = ["any filter type"] resource_dict['doc'] = resource.__doc__ path = os.path.dirname(__file__) rst_template = open(path + "/tasty-endpoint-template.rst").read() template_vars = { 'endpoints': top_level_doc, } django_template = Template(rst_template) output_rst = django_template.render(Context(template_vars)) #open('dump.rst', 'w').write(output_rst) doctree = publish_doctree(output_rst) return doctree.children