content
stringlengths 10
4.9M
|
---|
<reponame>joelostblom/dash-docs
import dash_core_components as dcc
import dash_html_components as html
from dash_docs import tools
from dash_docs import styles
from dash_docs import reusable_components as rc
examples = tools.load_examples(__file__)
layout = html.Div([
rc.Markdown('''
# Building responsive Cytoscape graphs
Starting from v0.2.0, you can make your cytoscape graph responsive:
```
cyto.Cytoscape(
id='cytoscape',
...,
responsive=True
)
```
The following app shows this new feature in action:
'''),
rc.Markdown(
examples['usage-responsive-graph.py'][0],
style=styles.code_container
)
])
|
/*
* FilmTime.h
*/
#ifndef SRC_FILMTIME_H_
#define SRC_FILMTIME_H_
#include "Film.h"
using namespace std;
class FilmTime {
unsigned hour;
Film *film;
unsigned roomID;
public:
FilmTime(unsigned h, Film *f, unsigned id);
virtual ~FilmTime();
unsigned getHour() const;
unsigned getRoomID() const;
Film* getFilm() const;
void setFilm(Film* f);
void setHour(unsigned h){hour = h;}
bool operator == (const FilmTime &ft) const;
bool operator<(const FilmTime & ft1) const; //TODO: Implement a correct version of the operator
};
#endif /* SRC_FILMTIME_H_ */
|
class DestinyCharacterCustomization:
"""Raw data about the customization options chosen for a character's face
and appearance.
You can look up the relevant class/race/gender combo in
DestinyCharacterCustomizationOptionDefinition for the character, and
then look up these values within the CustomizationOptions found to
pull some data about their choices. Warning: not all of that data is
meaningful. Some data has useful icons. Others have nothing, and are
only meant for 3D rendering purposes (which we sadly do not expose
yet)
"""
decal_color: int
decal_index: int
eye_color: int
face: int
feature_colors: t.Sequence[int]
feature_index: int
hair_colors: t.Sequence[int]
hair_index: int
lip_color: int
personality: int
skin_color: int
wear_helmet: bool
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"personality": to_json(self.personality),
"face": to_json(self.face),
"skinColor": to_json(self.skin_color),
"lipColor": to_json(self.lip_color),
"eyeColor": to_json(self.eye_color),
"hairColors": to_json(self.hair_colors),
"featureColors": to_json(self.feature_colors),
"decalColor": to_json(self.decal_color),
"wearHelmet": to_json(self.wear_helmet),
"hairIndex": to_json(self.hair_index),
"featureIndex": to_json(self.feature_index),
"decalIndex": to_json(self.decal_index),
} |
/**
* Search for etl jobs that are ready to run and update the time for next run
* @param message
* @throws Exception
*/
@Override
public void onReceive(Object message) throws Exception {
if (message.equals("checking")) {
runDueJobs();
}
} |
<reponame>hernad/zimbra9
/*
* ***** BEGIN LICENSE BLOCK *****
* Zimbra Collaboration Suite Server
* Copyright (C) 2011, 2012, 2013, 2014, 2016 Synacor, Inc.
*
* This program is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software Foundation,
* version 2 of the License.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU General Public License for more details.
* You should have received a copy of the GNU General Public License along with this program.
* If not, see <https://www.gnu.org/licenses/>.
* ***** END LICENSE BLOCK *****
*/
package com.zimbra.qa.unittest.prov;
import static org.junit.Assert.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.zimbra.cs.account.NamedEntry;
public class Verify {
public static void verifySameId(NamedEntry entry1, NamedEntry entry2)
throws Exception {
assertNotNull(entry1);
assertNotNull(entry2);
assertEquals(entry1.getId(), entry2.getId());
}
public static void verifySameEntry(NamedEntry entry1, NamedEntry entry2)
throws Exception {
verifySameId(entry1, entry2);
assertEquals(entry1.getName(), entry2.getName());
}
// verify list contains all the entries
// if checkCount == true, verify the count matches too
public static void verifyEntries(List<NamedEntry> list, NamedEntry[] entries,
boolean checkCount) throws Exception {
try {
if (checkCount)
assertEquals(list.size(), entries.length);
Set<String> ids = new HashSet<String>();
for (NamedEntry entry : list)
ids.add(entry.getId());
for (NamedEntry entry : entries) {
assertTrue(ids.contains(entry.getId()));
ids.remove(entry.getId());
}
// make sure all ids in list is present is entries
if (checkCount)
assertEquals(ids.size(), 0);
} catch (AssertionError e) {
System.out.println();
System.out.println("===== verifyEntries failed =====");
System.out.println("Message: " + e.getMessage());
System.out.println();
System.out.println("list contains " + list.size() + " entries:");
for (NamedEntry entry : list) {
System.out.println(" " + entry.getName());
}
System.out.println();
System.out.println("entries contains " + entries.length + " entries:");
for (NamedEntry entry : entries) {
System.out.println(" " + entry.getName());
}
System.out.println();
throw e;
}
}
// verify list of NamedEntry contains all the ids
// if checkCount == true, verify the count matches too
public static void verifyEntriesById(List<NamedEntry> list, String[] names,
boolean checkCount)
throws Exception {
Set<String> idsInList = new HashSet<String>();
for (NamedEntry entry : list)
idsInList.add(entry.getId());
verifyEntries(idsInList, names, checkCount);
}
// verify list of NamedEntry contains all the names
// if checkCount == true, verify the count matches too
public static void verifyEntriesByName(List<NamedEntry> list, String[] names,
boolean checkCount)
throws Exception {
Set<String> namesInList = new HashSet<String>();
for (NamedEntry entry : list)
namesInList.add(entry.getName());
verifyEntries(namesInList, names, checkCount);
}
// verify list contains all the names
// if checkCount == true, verify the count matches too
public static void verifyEntries(Set<String> list, String[] names,
boolean checkCount)
throws Exception {
try {
if (checkCount) {
assertEquals(names.length, list.size());
}
for (String name : names) {
assertTrue(list.contains(name));
}
} catch (AssertionError e) {
System.out.println();
System.out.println("===== verifyEntries failed =====");
System.out.println("Message: " + e.getMessage());
System.out.println();
System.out.println("list contains " + list.size() + " entries:");
for (String name : list) {
System.out.println(" " + name);
}
System.out.println();
System.out.println("entries contains " + names.length + " entries:");
for (String name : names) {
System.out.println(" " + name);
}
System.out.println();
throw e;
}
}
public static void verifyEquals(Collection<String> expected, Collection<String> actual)
throws Exception {
try {
assertEquals(expected.size(), actual.size());
for (String entry : expected) {
if (!actual.contains(entry)) {
System.out.println("missing entry: " + entry);
}
assertTrue(actual.contains(entry));
}
} catch (AssertionError e) {
dump(e, expected, actual);
throw e;
}
}
public static void verifyEquals(Set<String> expected, String[] actual)
throws Exception {
verifyEquals(expected, Sets.newHashSet(Arrays.asList(actual)));
}
public static void verifyEquals(List<String> expected, List<String> actual)
throws Exception {
try {
assertEquals(expected.size(), actual.size());
for (int i = 0; i < expected.size(); i++) {
assertEquals(expected.get(i), actual.get(i));
}
} catch (AssertionError e) {
dump(e, expected, actual);
throw e;
}
}
public static void verifyEquals(List<String> expected, String[] actual)
throws Exception {
verifyEquals(expected, Arrays.asList(actual));
}
private static void dump(AssertionError e, Collection<String> expected, Collection<String> actual) {
System.out.println();
System.out.println("===== verifyEquals failed =====");
System.out.println("Message: " + e.getMessage());
System.out.println();
System.out.println(String.format("expected (size=%d)", expected.size()));
for (String str : expected) {
System.out.println(" " + str);
}
System.out.println();
System.out.println(String.format("actual (size=%d)", actual.size()));
for (String str : actual) {
System.out.println(" " + str);
}
System.out.println();
}
public static void verifyEquals(Set<String> expected, List<NamedEntry> actual) {
try {
assertEquals(expected.size(), actual.size());
for (NamedEntry entry : actual) {
assertTrue(expected.contains(entry.getName()));
}
} catch (AssertionError e) {
System.out.println();
System.out.println("===== verifyEquals failed =====");
System.out.println("Message: " + e.getMessage());
System.out.println();
System.out.println(String.format("expected (size=%d)", expected.size()));
for (String name : expected)
System.out.println(" " + name);
System.out.println();
System.out.println(String.format("actual (size=%d)", actual.size()));
for (NamedEntry entry : actual)
System.out.println(" " + entry.getName());
System.out.println();
throw e;
}
}
public static void verifyEquals(List<? extends NamedEntry> expected, List<? extends NamedEntry> actual,
boolean orderMatters) {
try {
if (expected == null) {
expected = new ArrayList<NamedEntry>();
}
int size = expected.size();
assertEquals(expected.size(), actual.size());
List<String> expectedIds = Lists.newArrayList();
List<String> expectedNames = Lists.newArrayList();
for (NamedEntry entry : expected) {
expectedIds.add(entry.getId());
expectedNames.add(entry.getName());
}
List<String> actualIds = Lists.newArrayList();
List<String> actualNames = Lists.newArrayList();
for (NamedEntry entry : actual) {
actualIds.add(entry.getId());
actualNames.add(entry.getName());
}
for (int i = 0; i < size; i++) {
if (orderMatters) {
assertEquals(expectedIds.get(i), actualIds.get(i));
assertEquals(expectedNames.get(i), actualNames.get(i));
} else {
assertTrue(actualIds.contains(expectedIds.get(i)));
assertTrue(actualNames.contains(expectedNames.get(i)));
}
}
} catch (AssertionError e) {
System.out.println();
System.out.println("===== verifyEquals failed =====");
System.out.println("Message: " + e.getMessage());
System.out.println();
System.out.println(String.format("expected (size=%d)", expected.size()));
for (NamedEntry entry : expected) {
System.out.println(" " + entry.getName() + " (" + entry.getId() + ")");
}
System.out.println();
System.out.println(String.format("actual (size=%d)", actual.size()));
for (NamedEntry entry : actual) {
System.out.println(" " + entry.getName() + " (" + entry.getId() + ")");
}
System.out.println();
throw e;
}
}
public static String makeResultStr(Object... objs) {
StringBuilder sb = new StringBuilder();
for (Object obj : objs) {
if (sb.length() > 0) {
sb.append(":");
}
if (obj != null) {
if (obj instanceof Collection) {
for (Object o : (Collection) obj) {
sb.append("(");
sb.append(o.toString());
sb.append(")");
}
} else {
sb.append(obj.toString());
}
} else {
sb.append("null");
}
}
return sb.toString();
}
public static void appendResultStr(StringBuilder appendTo, Object... objs) {
if (appendTo.length() > 0) {
appendTo.append(":");
}
appendTo.append(makeResultStr(objs));
}
}
|
// toChunk finds a nonce so that when the given trojan chunk fields are hashed, the result will fall in the neighbourhood of one of the given targets
// this is done by iteratively enumerating different nonces until the BMT hash of the serialization of the trojan chunk fields results in a chunk address that has one of the targets as its prefix
// the function returns a new chunk, with the found matching hash to be used as its address,
// and its data set to the serialization of the trojan chunk fields which correctly hash into the matching address
func (m *Message) toChunk(targets Targets, span []byte) (swarm.Chunk, error) {
nonce := make([]byte, NonceSize)
if _, err := rand.Read(nonce); err != nil {
return nil, err
}
nonceInt := new(big.Int).SetBytes(nonce)
targetsLen := len(targets[0])
b, err := m.MarshalBinary()
if err != nil {
return nil, err
}
for start := time.Now(); ; {
s := append(append(span, nonce...), b...)
hash1, err := hashBytes(s)
if err != nil {
return nil, err
}
if contains(targets, hash1[:targetsLen]) {
return swarm.NewChunk(swarm.NewAddress(hash1), s), nil
}
nonceInt.Add(nonceInt, big.NewInt(1))
if nonceInt.BitLen() > (NonceSize * swarm.SpanSize) {
if time.Since(start) > (MinerTimeout * time.Second) {
break
}
nonceInt = big.NewInt(0)
}
nonce = padBytesLeft(nonceInt.Bytes())
}
return nil, ErrMinerTimeout
} |
/**
* Handles the population of installed and uninstalled extensions on the "Core Types and Extensions" page.
* This method always tries to pick up newly registered extensions from the Registry.
* </br>
* Optionally, the user may have triggered synchronise action, which updates default vocabularies to use latest
* versions, and synchronises all installed extensions and vocabularies with the registry to ensure their content
* is up-to-date.
*
* @return struts2 result
*/
public String list() {
if (synchronise) {
try {
synchronise();
addActionMessage(getText("admin.extensions.synchronise.success"));
} catch (Exception e) {
String errorMsg = e.getMessage();
if (e instanceof RegistryException) {
errorMsg = RegistryException.logRegistryException(((RegistryException)e), this);
}
addActionWarning(getText("admin.extensions.synchronise.error", new String[] {errorMsg}));
LOG.error(e);
}
}
extensions = extensionManager.list();
updateIsLatest(extensions);
newExtensions = getLatestExtensionVersions();
for (Extension e : extensions) {
newExtensions.remove(e);
}
for (Extension ex : extensions) {
if (lastSynchronised == null || lastSynchronised.before(ex.getModified())) {
lastSynchronised = ex.getModified();
}
}
return SUCCESS;
} |
#include <IMGraph.h>
#include <tuple>
#include <cassert>
IMGraph::IMGraph(const std::vector<degree_t> °reeSequence) {
#ifndef NDEBUG
if (!std::is_sorted(degreeSequence.begin(), degreeSequence.end(), std::greater<degree_t>())) {
STXXL_MSG("WARNING: degree sequence is not sorted in descending order, performance of IMGraph will be degraded!");
}
#endif
// we store the first H (inspired by the H-index, but a bit different) edges in an adjacency matrix
// we choose H such that the entries in the adjacency array would need as much memory as the adjacency matrix needs
// note that each entry in the adjacency array needs 64 bits while an entry in the adjacency matrix needs only 1 bit
int_t sum = 0;
_h = degreeSequence.size();
bool has_h = false;
constexpr node_t maxh = sizeof(_h) == 4 ? 46330ll : 3037000400ll;
for (size_t i = 0; i < degreeSequence.size(); ++i) {
// TODO adjust factor if less memory shall be consumed
if (!has_h && ((sum + degreeSequence[i]) * 32 < static_cast<int_t>((i+1)*(i+1)) || i == maxh)) {
has_h = true;
_h = i;
_first_head.reserve(degreeSequence.size() + 1 - _h);
_last_head.reserve(degreeSequence.size() + 1 - _h);
sum = 0;
}
if (has_h) {
_first_head.push_back(sum);
_last_head.push_back(sum);
}
sum += degreeSequence[i];
}
if (UNLIKELY(sum >= IMGraph::maxEdges())) {
throw std::runtime_error("Error, too many edges for internal graph. The internal graph supports at maximum 2 billion edges");
}
STXXL_MSG("Putting first " << _h << " of in total " << degreeSequence.size() << " nodes in adjacency matrix");
_first_head.push_back(sum);
_last_head.push_back(sum);
_head.resize(sum);
assert(_first_head.size() == degreeSequence.size() + 1 - _h);
assert(_last_head.size() == degreeSequence.size() + 1 - _h);
_adjacency_matrix.resize(_h * _h);
};
SwapResult IMGraph::swapEdges(const edgeid_t eid0, const edgeid_t eid1, bool direction) {
SwapResult result;
edge_t e[2] = {getEdge(eid0), getEdge(eid1)};
edge_t t[2];
std::tie(t[0], t[1]) = _swap_edges(e[0], e[1], direction);
result.edges[0] = t[0];
result.edges[1] = t[1];
// check for conflict: loop
if (t[0].first == t[0].second || t[1].first == t[1].second) {
result.loop = true;
} else { // check for conflict edges
result.loop = false;
for (unsigned char pos = 0; pos < 2; ++pos) {
result.conflictDetected[pos] = hasEdge(t[pos].first, t[pos].second);
}
}
result.performed = !result.loop && !(result.conflictDetected[0] || result.conflictDetected[1]);
if (result.performed) {
auto &idx0 = _edge_index[eid0];
auto &idx1 = _edge_index[eid1];
// reset target as we need the non-normalized target edges
t[0] = e[0];
t[1] = e[1];
if (!direction) { // reverse first edge
std::swap(idx0.first, idx0.second);
std::swap(t[0].first, t[0].second);
}
// execute swap
std::swap(idx0.second, idx1.second);
std::swap(t[0].first, t[1].first);
// update adjacency matrix
for (unsigned char pos = 0; pos < 2; ++pos) {
if (e[pos].first < _h && e[pos].second < _h) {
_adjacency_matrix[e[pos].first * _h + e[pos].second] = false;
_adjacency_matrix[e[pos].second * _h + e[pos].first] = false;
}
if (t[pos].first < _h && t[pos].second < _h) {
_adjacency_matrix[t[pos].first * _h + t[pos].second] = true;
_adjacency_matrix[t[pos].second * _h + t[pos].first] = true;
}
}
// update adjacency array or index
if (!idx0.first.index_is_node) {
_head[idx0.first.index] = t[0].first;
} else {
idx0.first.index = t[0].first;
}
if (!idx0.second.index_is_node) {
_head[idx0.second.index] = t[0].second;
} else {
idx0.second.index = t[0].second;
}
if (!idx1.first.index_is_node) {
_head[idx1.first.index] = t[1].first;
} else {
idx1.first.index = t[1].first;
}
if (!idx1.second.index_is_node) {
_head[idx1.second.index] = t[1].second;
} else {
idx1.second.index = t[1].second;
}
// normalize direction of edges in the index
if (t[0].first > t[0].second) {
std::swap(idx0.first, idx0.second);
#ifndef NDEBUG // we only need the normalized edge for the assertion below
t[0].normalize();
#endif
}
if (t[1].first > t[1].second) {
std::swap(idx1.first, idx1.second);
#ifndef NDEBUG
t[1].normalize();
#endif
}
// make sure the edge is correctly stored
assert(t[0] == getEdge(eid0) && t[1] == getEdge(eid1));
assert(hasEdge(t[0].first, t[0].second) && hasEdge(t[1].first, t[1].second));
// make sure our swap implementation is the same as _swap_edges
assert(t[0] == result.edges[0] && t[1] == result.edges[1]);
}
result.normalize();
return result;
}
IMGraph::IMEdgeStream IMGraph::getEdges() const {
return IMEdgeStream(*this);
}
|
May is usually the last “good” month in terms of new releases, it’s purpose to toss out a few more games to keep us occupied and out of the deadly summer sun until things pick back up around Aug-Sept. Around this time last year, I was eagerly waiting for a chance to be the bad guy for a bit by murdering fools as Jason Voorhees in Mortal Kombat X or stepping into the bloodied boots of The Executioner in the third DLC release for The Evil Within. This month sort of has its own theme going on too, but it’s more about re-releases. Let’s have a look.
Sylvio Remastered
With Sylvio Remastered, developer Stroboskop hopes to breathe new life into their open-world horror game about a woman named who uses the voices of the dead to uncover and vanquish an evil curse and a wicked family cult that have taken up residence in an abandoned park.
The remaster replaces the old game engine for Unity 5, and in addition to that, it brings a remade GUI, improved EVP mechanic, and full controller support. It’s available free-of-charge to owners of the original Sylvio, who can get it now in the form of an update.
Release Date: May 2 (PC)
The Park
Funcom’s The Park was met with a mostly warm reception when it released on Steam back in October, and now it’s headed to consoles. The game is a spin-off of the pseudo-horror MMO The Secret World — also definitely worth checking out — that follows a mother as she spends a night in an abandoned amusement park searching for her son. It’s relatively short (3-4 hours) length keeps the game from outstaying its welcome, and the atmosphere is top notch.
Release Date: May 3 (PS4, XBO)
Neverending Nightmares
Matt Galgenbach’s nightmarish psychological horror game Neverending Nightmares is finally bringing its unique brand of terror to the PS4 and PS Vita today (May 4 for Europe). If you haven’t played this one yet, you probably should. Just know that this game is not for the squeamish or weak of stomach.
Release Date: May 3 (PS4, Vita)
Doom
The series that defined a decade is getting a reboot, and it looks glorious. I’ve spent some time with the new Doom and from what I’ve seen, it looks as if id Software might’ve actually found a way to modernize everything we love about the series — the fast-paced combat, addictive arena-based multiplayer, badass weapons, intimidating enemies and the buckets of gore that coats just about everything — without losing what made these games so influential in the first place.
And sure, the lack of co-op in the story mode is disappointing until the community gets cozy with the SnapMap modding tools that are shipping with all versions of the game and starts churning out custom co-op campaigns. Based on my experience with games like LittleBigPlanet, that’ll take about a week.
Release Date: May 13 (PC, PS4, XBO)
Dead Island Definitive Edition
The issues with Dead Island 2 have caused some serious issues for the already troubled franchise, and despite having considerably more time than its main competitor, Deep Silver is losing the war for zombie fans’ cash monies to the wildly successful Dying Light, which was developed by Dead Island creator Techland. The Dead Island: Definitive Edition isn’t going to change that, but it should sate our appetites for undead tomfoolery as we continue to wait for Sumo Digital to finish Dead Island 2.
Release Date: May 31 (PC, PS4, XBO)
Oxenfree
For the unfamiliar, Oxenfree is an always charming, often thoughtful and occasionally even deeply unsettling video game debut from Night School Studio. “Oxenfree is all humor, horror and heart,” writes my younger self in a review of the PC version, “It doesn’t need to rely on gimmicks, and with the exception of some light backtracking that could’ve easily been solved by a jog button, no attempt is made to pad its 3-5 hour running time with irrelevant errands and tedium.”
Don’t overlook this one. I promise you’ll regret it.
Release Date: May 31 (PS4)
Asemblance
Asemblance is a PS4 exclusive psychological horror game — the first in a planned series — with a surreal narrative that blends “The Twilight Zone”, “The X-Files”, and “Black Mirror”.
Release Date: TBA May (PS4)
Crowdfunding Campaigns
Voracious readers of Bloody Disgusting’s video game section will recognize Ghost Theory as the game I desperately want to play because while I’m super into the paranormal, I’m not quite interested enough to go and record otherworldly voices or seek out floating orbs to take selfies with.
This game aims to solve that problem by adapting the daily life of a clairvoyance paranormal investigator into a video game. The team at Dreadlocks is even visiting real “haunted” locales all over the world so they can recreate each to be fully explorable in-game, with or without a virtual reality headset. It’s an incredibly promising, and original, approach that sets this supernatural horror game apart from the rest.
Ghost Theory needs to raise another ~$30k to reach its $71k funding goal and there’s only nine days left to get there. If you’d like to give them a hand, you can support it over here. |
from test.sdk_mock.mockSecuredWebsocketCore import mockSecuredWebsocketCoreNoRealHandshake
from test.sdk_mock.mockSecuredWebsocketCore import MockSecuredWebSocketCoreNoSocketIO
from test.sdk_mock.mockSecuredWebsocketCore import MockSecuredWebSocketCoreWithRealHandshake
from test.sdk_mock.mockSSLSocket import mockSSLSocket
import struct
import socket
import pytest
try:
from configparser import ConfigParser # Python 3+
except ImportError:
from ConfigParser import ConfigParser
class TestWssCore:
# Websocket Constants
_OP_CONTINUATION = 0x0
_OP_TEXT = 0x1
_OP_BINARY = 0x2
_OP_CONNECTION_CLOSE = 0x8
_OP_PING = 0x9
_OP_PONG = 0xa
def _generateStringOfAs(self, length):
ret = ""
for i in range(0, length):
ret += 'a'
return ret
def _printByteArray(self, src):
for i in range(0, len(src)):
print(hex(src[i]))
print("")
def _encodeFrame(self, rawPayload, opCode, FIN=1, RSV1=0, RSV2=0, RSV3=0, masked=1):
ret = bytearray()
# FIN+RSV1+RSV2+RSV3
F = (FIN & 0x01) << 3
R1 = (RSV1 & 0x01) << 2
R2 = (RSV2 & 0x01) << 1
R3 = (RSV3 & 0x01)
FRRR = (F | R1 | R2 | R3) << 4
# Op byte
opByte = FRRR | opCode
ret.append(opByte)
# Payload Length bytes
maskBit = masked
payloadLength = len(rawPayload)
if payloadLength <= 125:
ret.append((maskBit << 7) | payloadLength)
elif payloadLength <= 0xffff: # 16-bit unsigned int
ret.append((maskBit << 7) | 126)
ret.extend(struct.pack("!H", payloadLength))
elif payloadLength <= 0x7fffffffffffffff: # 64-bit unsigned int (most significant bit must be 0)
ret.append((maskBit << 7) | 127)
ret.extend(struct.pack("!Q", payloadLength))
else: # Overflow
raise ValueError("Exceeds the maximum number of bytes for a single websocket frame.")
if maskBit == 1:
# Mask key bytes
maskKey = bytearray(b"1234")
ret.extend(maskKey)
# Mask the payload
payloadBytes = bytearray(rawPayload)
if maskBit == 1:
for i in range(0, payloadLength):
payloadBytes[i] ^= maskKey[i % 4]
ret.extend(payloadBytes)
# Return the assembled wss frame
return ret
def setup_method(self, method):
self._dummySSLSocket = mockSSLSocket()
# Wss Handshake
def test_WssHandshakeTimeout(self):
self._dummySSLSocket.refreshReadBuffer(bytearray()) # Empty bytes to read from socket
with pytest.raises(socket.error):
self._dummySecuredWebsocket = \
MockSecuredWebSocketCoreNoSocketIO(self._dummySSLSocket, "data.iot.region.amazonaws.com", 1234)
# Constructor
def test_InvalidEndpointPattern(self):
with pytest.raises(ValueError):
self._dummySecuredWebsocket = MockSecuredWebSocketCoreWithRealHandshake(None, "ThisIsNotAValidIoTEndpoint!", 1234)
def test_BJSEndpointPattern(self):
bjsStyleEndpoint = "blablabla.iot.cn-north-1.amazonaws.com.cn"
unexpectedExceptionMessage = "Invalid endpoint pattern for wss: %s" % bjsStyleEndpoint
# Garbage wss handshake response to ensure the test code gets passed endpoint pattern validation
self._dummySSLSocket.refreshReadBuffer(b"GarbageWssHanshakeResponse")
try:
self._dummySecuredWebsocket = MockSecuredWebSocketCoreWithRealHandshake(self._dummySSLSocket, bjsStyleEndpoint, 1234)
except ValueError as e:
if str(e) == unexpectedExceptionMessage:
raise AssertionError("Encountered unexpected exception when initializing wss core with BJS style endpoint", e)
# Wss I/O
def test_WssReadComplete(self):
# Config mockSSLSocket to contain a Wss frame
rawPayload = b"If you can see me, this is good."
# The payload of this frame will be masked by a randomly-generated mask key
# securedWebsocketCore should be able to decode it and get the raw payload back
coolFrame = self._encodeFrame(rawPayload, self._OP_BINARY, FIN=1, RSV1=0, RSV2=0, RSV3=0, masked=0)
# self._printByteArray(coolFrame)
self._dummySSLSocket.refreshReadBuffer(coolFrame)
# Init securedWebsocket with this mockSSLSocket
self._dummySecuredWebsocket = \
mockSecuredWebsocketCoreNoRealHandshake(self._dummySSLSocket, "data.iot.region.amazonaws.com", 1234)
# Read it back:
readItBack = self._dummySecuredWebsocket.read(len(rawPayload)) # Basically read everything
assert rawPayload == readItBack
def test_WssReadFragmented(self):
rawPayloadFragmented = b"I am designed to be fragmented..."
# The payload of this frame will be masked by a randomly-generated mask key
# securedWebsocketCore should be able to decode it and get the raw payload back
stop1 = 4
stop2 = 9
coolFrame = self._encodeFrame(rawPayloadFragmented, self._OP_BINARY, FIN=1, RSV1=0, RSV2=0, RSV3=0, masked=0)
# self._printByteArray(coolFrame)
coolFramePart1 = coolFrame[0:stop1]
coolFramePart2 = coolFrame[stop1:stop2]
coolFramePart3 = coolFrame[stop2:len(coolFrame)]
# Config mockSSLSocket to contain a fragmented Wss frame
self._dummySSLSocket.setReadFragmented()
self._dummySSLSocket.addReadBufferFragment(coolFramePart1)
self._dummySSLSocket.addReadBufferFragment(coolFramePart2)
self._dummySSLSocket.addReadBufferFragment(coolFramePart3)
self._dummySSLSocket.loadFirstFragmented()
# In this way, reading from SSLSocket will result in 3 sslError, simulating the situation where data is not ready
# Init securedWebsocket with this mockSSLSocket
self._dummySecuredWebsocket = \
mockSecuredWebsocketCoreNoRealHandshake(self._dummySSLSocket, "data.iot.region.amazonaws.com", 1234)
# Read it back:
readItBack = bytearray()
while len(readItBack) != len(rawPayloadFragmented):
try:
# Will be interrupted due to faked socket I/O Error
# Should be able to read back the complete
readItBack += self._dummySecuredWebsocket.read(len(rawPayloadFragmented)) # Basically read everything
except:
pass
assert rawPayloadFragmented == readItBack
def test_WssReadlongFrame(self):
# Config mockSSLSocket to contain a Wss frame
rawPayloadLong = bytearray(self._generateStringOfAs(300), 'utf-8') # 300 bytes of raw payload, will use extended payload length bytes in encoding
# The payload of this frame will be masked by a randomly-generated mask key
# securedWebsocketCore should be able to decode it and get the raw payload back
coolFrame = self._encodeFrame(rawPayloadLong, self._OP_BINARY, FIN=1, RSV1=0, RSV2=0, RSV3=0, masked=0)
# self._printByteArray(coolFrame)
self._dummySSLSocket.refreshReadBuffer(coolFrame)
# Init securedWebsocket with this mockSSLSocket
self._dummySecuredWebsocket = \
mockSecuredWebsocketCoreNoRealHandshake(self._dummySSLSocket, "data.iot.region.amazonaws.com", 1234)
# Read it back:
readItBack = self._dummySecuredWebsocket.read(len(rawPayloadLong)) # Basically read everything
assert rawPayloadLong == readItBack
def test_WssReadReallylongFrame(self):
# Config mockSSLSocket to contain a Wss frame
# Maximum allowed length of a wss payload is greater than maximum allowed payload length of a MQTT payload
rawPayloadLong = bytearray(self._generateStringOfAs(0xffff + 3), 'utf-8') # 0xffff + 3 bytes of raw payload, will use extended payload length bytes in encoding
# The payload of this frame will be masked by a randomly-generated mask key
# securedWebsocketCore should be able to decode it and get the raw payload back
coolFrame = self._encodeFrame(rawPayloadLong, self._OP_BINARY, FIN=1, RSV1=0, RSV2=0, RSV3=0, masked=0)
# self._printByteArray(coolFrame)
self._dummySSLSocket.refreshReadBuffer(coolFrame)
# Init securedWebsocket with this mockSSLSocket
self._dummySecuredWebsocket = \
mockSecuredWebsocketCoreNoRealHandshake(self._dummySSLSocket, "data.iot.region.amazonaws.com", 1234)
# Read it back:
readItBack = self._dummySecuredWebsocket.read(len(rawPayloadLong)) # Basically read everything
assert rawPayloadLong == readItBack
def test_WssWriteComplete(self):
ToBeWritten = b"Write me to the cloud."
# Init securedWebsocket with this mockSSLSocket
self._dummySecuredWebsocket = \
mockSecuredWebsocketCoreNoRealHandshake(self._dummySSLSocket, "data.iot.region.amazonaws.com", 1234)
# Fire the write op
self._dummySecuredWebsocket.write(ToBeWritten)
ans = self._encodeFrame(ToBeWritten, self._OP_BINARY, FIN=1, RSV1=0, RSV2=0, RSV3=0, masked=1)
# self._printByteArray(ans)
assert ans == self._dummySSLSocket.getWriteBuffer()
def test_WssWriteFragmented(self):
ToBeWritten = b"Write me to the cloud again."
# Configure SSLSocket to perform interrupted write op
self._dummySSLSocket.setFlipWriteError()
# Init securedWebsocket with this mockSSLSocket
self._dummySecuredWebsocket = \
mockSecuredWebsocketCoreNoRealHandshake(self._dummySSLSocket, "data.iot.region.amazonaws.com", 1234)
# Fire the write op
with pytest.raises(socket.error) as e:
self._dummySecuredWebsocket.write(ToBeWritten)
assert "Not ready for write op" == e.value.strerror
lengthWritten = self._dummySecuredWebsocket.write(ToBeWritten)
ans = self._encodeFrame(ToBeWritten, self._OP_BINARY, FIN=1, RSV1=0, RSV2=0, RSV3=0, masked=1)
assert lengthWritten == len(ToBeWritten)
assert ans == self._dummySSLSocket.getWriteBuffer()
# Wss Client Behavior
def test_ClientClosesConnectionIfServerResponseIsMasked(self):
ToBeWritten = b"I am designed to be masked."
maskedFrame = self._encodeFrame(ToBeWritten, self._OP_BINARY, FIN=1, RSV1=0, RSV2=0, RSV3=0, masked=1)
self._dummySSLSocket.refreshReadBuffer(maskedFrame)
# Init securedWebsocket with this mockSSLSocket
self._dummySecuredWebsocket = \
mockSecuredWebsocketCoreNoRealHandshake(self._dummySSLSocket, "data.iot.region.amazonaws.com", 1234)
# Now read it back
with pytest.raises(socket.error) as e:
self._dummySecuredWebsocket.read(len(ToBeWritten))
assert "Server response masked, closing connection and try again." == e.value.strerror
# Verify that a closing frame from the client is on its way
closingFrame = self._encodeFrame(b"", self._OP_CONNECTION_CLOSE, FIN=1, RSV1=0, RSV2=0, RSV3=0, masked=1)
assert closingFrame == self._dummySSLSocket.getWriteBuffer()
def test_ClientClosesConnectionIfServerResponseHasReserveBitsSet(self):
ToBeWritten = b"I am designed to be masked."
maskedFrame = self._encodeFrame(ToBeWritten, self._OP_BINARY, FIN=1, RSV1=1, RSV2=0, RSV3=0, masked=1)
self._dummySSLSocket.refreshReadBuffer(maskedFrame)
# Init securedWebsocket with this mockSSLSocket
self._dummySecuredWebsocket = \
mockSecuredWebsocketCoreNoRealHandshake(self._dummySSLSocket, "data.iot.region.amazonaws.com", 1234)
# Now read it back
with pytest.raises(socket.error) as e:
self._dummySecuredWebsocket.read(len(ToBeWritten))
assert "RSV bits set with NO negotiated extensions." == e.value.strerror
# Verify that a closing frame from the client is on its way
closingFrame = self._encodeFrame(b"", self._OP_CONNECTION_CLOSE, FIN=1, RSV1=0, RSV2=0, RSV3=0, masked=1)
assert closingFrame == self._dummySSLSocket.getWriteBuffer()
def test_ClientSendsPONGIfReceivedPING(self):
PINGFrame = self._encodeFrame(b"", self._OP_PING, FIN=1, RSV1=0, RSV2=0, RSV3=0, masked=0)
self._dummySSLSocket.refreshReadBuffer(PINGFrame)
# Init securedWebsocket with this mockSSLSocket
self._dummySecuredWebsocket = \
mockSecuredWebsocketCoreNoRealHandshake(self._dummySSLSocket, "data.iot.region.amazonaws.com", 1234)
# Now read it back, this must be in the next round of paho MQTT packet reading
# Should fail since we only have a PING to read, it never contains a valid MQTT payload
with pytest.raises(socket.error) as e:
self._dummySecuredWebsocket.read(5)
assert "Not a complete MQTT packet payload within this wss frame." == e.value.strerror
# Verify that PONG frame from the client is on its way
PONGFrame = self._encodeFrame(b"", self._OP_PONG, FIN=1, RSV1=0, RSV2=0, RSV3=0, masked=1)
assert PONGFrame == self._dummySSLSocket.getWriteBuffer()
|
/**
* Base netty SimpleChannelInboundHandler
*
* @param <V> Accepted Object-POJO
*
* @author ykalay
*
* @since 1.0
*/
public abstract class BaseNettyHandler<V> extends SimpleChannelInboundHandler<V> {
private static final Logger log = LoggerFactory.getLogger(BaseNettyHandler.class.getName());
private static final boolean DEFAULT_AUTO_RELEASE = true;
private static final AttributeKey<String> URL_ATTR = AttributeKey.valueOf("uri_attr");
protected BaseNettyHandler() {
this(DEFAULT_AUTO_RELEASE);
}
protected BaseNettyHandler(boolean autoRelease) {
super(autoRelease);
}
public static void sendResponseWithNoBody(Channel channel, HttpResponseStatus httpResponseStatus) {
final HttpResponse responseBody = new DefaultHttpResponse(
HttpVersion.HTTP_1_1,
httpResponseStatus);
channel.writeAndFlush(responseBody)
.addListener(ChannelFutureListener.CLOSE);
}
public static void sendJsonResponseWithBody(Channel channel, TunnelHttpResponse tunnelHttpResponse) {
FullHttpResponse response = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1,
tunnelHttpResponse.getHttpResponseStatus(), Unpooled.wrappedBuffer(tunnelHttpResponse.getBody()));
response.headers().set(CONTENT_TYPE, "application/json");
response.headers().set(CONTENT_LENGTH, response .content().readableBytes());
channel.writeAndFlush(response)
.addListener(ChannelFutureListener.CLOSE);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
log.warn("Exception caught Ex: {0}", cause);
}
public static void setURIAsAttr(String uri, Channel channel) {
channel.attr(URL_ATTR).set(uri);
}
public static String getURIFromAttr(Channel channel) {
return channel.attr(URL_ATTR).get();
}
} |
// NewPutLolChatV1SettingsByKeyParams creates a new PutLolChatV1SettingsByKeyParams object
// with the default values initialized.
func NewPutLolChatV1SettingsByKeyParams() *PutLolChatV1SettingsByKeyParams {
var ()
return &PutLolChatV1SettingsByKeyParams{
timeout: cr.DefaultTimeout,
}
} |
Soy versus cow's milk in infants with a biparental history of atopic disease: development of atopic disease and immunoglobulins from birth to 4 years of age
Forty‐eight children with a biparental history of atopic disease were followed from birth to 4 years of age. One group was fed soy and the other cow's milk from weaning to 9 months of age. Two‐thirds of the children developed symptoms of atopic disease with no significant difference between the groups. No difference was found in the serum immunoglobulins (IgE antibodies, IgA, IgG and IgM) during the observation period. The soy fed children showed transiently lower levels of IgG antibodies to cow's milk but higher levels of IgG antibodies to soy protein. Six children showed cow's milk intolerance and a further five had symptoms possibly related to the use of cow's milk. Withholding cow's milk during the first 9 months did not reduce the incidence of symptoms of cow's milk intolerance from birth to 4 years of age. Thus, no benefit was found from replacing cows' milk with soy. A prolonged breast feeding seems most rational for infants at risk of developing atopic disease, even if the present study did not show evidence of a prophylactic effect of breast milk against the development of atopic disease. |
from dango import dcog, Cog
from .common import utils
from .common import extras
@dcog()
class UsesCommon(Cog):
def __init__(self, config):
self.b = utils.dummy()
|
<filename>src/lib/groupByType.ts
import type { ComponentClass, FunctionComponent, ReactNode } from 'react';
import { Children } from 'react';
import getElementName from './getElementName.js';
// eslint-disable-next-line max-lines-per-function
const groupByType = (
children: ReactNode | ReactNode[],
// eslint-disable-next-line @typescript-eslint/no-explicit-any
types: readonly (ComponentClass<any> | FunctionComponent | string)[] = [],
rest = 'rest',
): Record<string, ReactNode[]> => {
const typeNames: string[] = types.map((type) => (typeof type === 'string' ? type : type.name));
return Children.toArray(children).reduce(
(groups: Readonly<Record<string, ReactNode[]>>, child: ReactNode) => {
const newGroups = { ...groups };
const elementName = getElementName(child);
const key = elementName !== null && typeNames.includes(elementName) ? elementName : rest;
if (typeof newGroups[key] === 'undefined') {
newGroups[key] = [];
}
newGroups[key] = [...newGroups[key], child];
return newGroups;
},
{},
);
};
export default groupByType;
|
import numpy as np
def top_k(a, k, reverse=False):
flat = a.ravel()
if len(flat) <= k:
return np.arange(len(a))
if reverse:
top_k_inds = np.argpartition(flat, len(flat)-k)[-k:]
else:
top_k_inds = np.argpartition(flat, k)[:k]
return np.unravel_index(top_k_inds, a.shape)
|
/**
* A helper method that allows a lot of multidim data types to be filled with Infinite values
*
* @param alg
* @param a
*/
public static <T extends Algebra<T,U> & Infinite<U>, U, W extends RawData<U>>
void compute(T alg, W a)
{
U value = alg.construct();
alg.infinite().call(value);
Fill.compute(alg, value, a.rawData());
} |
McKayla Maroney, 22, filed the lawsuit against USA Gymnastics on Wednesday
She claims officials paid her to sign a non-disclosure agreement to keep secret the allegations she was abused by team doctor Larry Nassar
Gold medalist revealed on Twitter in October that Nassar had allegedly molested her for seven years beginning when she was 13
Maroney reportedly received $1.25 million in the settlement to pay for psychological treatment
Lawsuit says she suffers from anxiety and depression from the alleged abuse
It also alleges that Maroney lost millions of dollars because the trauma of the alleged sex abuse drove her from the sport
Olympic gold medalist McKayla Maroney has filed a lawsuit against USA Gymnastics and the United States Olympic Committee accusing them of trying to silence her claims of sexual abuse.
Maroney claims in the lawsuit filed on Wednesday that officials paid her to sign a confidential non-disclosure agreement to keep secret the allegations that she had been sexually abused by former team doctor Larry Nassar.
ADVERTISEMENT
The 22-year-old revealed on Twitter in October that Nassar had allegedly molested her for seven years beginning when she was 13.
The suit, filed in Los Angeles Superior Court, says the confidentiality agreement was signed as part of a financial settlement that was finalized as the allegations against Nassar surfaced.
Scroll down for video
Olympic gold medalist McKayla Maroney filed the lawsuit against USA Gymnastics and the United States Olympic Committee on Wednesday in Los Angeles Superior Court
Maroney says she accepted the settlement in December 2016 after 'years of psychological trauma' and sexual abuse. The terms weren't disclosed in court papers.
Sources told the Los Angeles Times that Maroney received $1.25 million in the settlement to pay for psychological treatment.
The lawsuit alleges that the settlement was illegal and 'for the purpose of silencing a known victim of Nassar.'
Her lawyer John Manly said Maroney entered willingly into the agreement but is now seeking to have her released from it. Manly, who wasn't part of the negotiations at the time, says it was in violation of California law because child sex abuse victims cannot be forced to sign non-disclosures in the state for settlements.
'I want people to understand that this kid had no choice. She couldn't function. She couldn't work,' Manly told ESPN.
'They (USAG) were willing to sacrifice the health and well-being of one of the most famous gymnasts in the world because they didn't want the world to know they were protecting a pedophile doctor.
'We're basically saying USAG and its lawyers violated the law by asking McKayla to agree to it and that she should be free to talk about her abuse to whomever she wants, whenever she wants.'
The court documents detail some of the alleged abuse Maroney was subjected to, including instances where Nassar allegedly inserted 'his bare, ungloved hand into her vagina' as he claimed to be performing medical treatment.
Maroney is said to suffer from anxiety and depression and is terrified that Nassar took photos of her alleged sexual abuse, according to the lawsuit.
The 22-year-old revealed on Twitter in October that former team doctor Larry Nassar had allegedly molested her for seven years beginning when she was 13. They are pictured in 2013
Resize
'Nassar would continuously, obsessively and compulsively photograph McKayla Maroney and is believed to have possessed thousands of photographs of McKayla Maroney competing in gymnastics events, training, in everyday situations,' the court papers state.
'McKayla Maroney alleges that she believes photographs were taken of her while Nassar was sexually abusing her under the guise of treatment. McKayla Maroney is further informed and believes, and on that basis alleges, that these photographs were shared by Nassar with other pedophiles for their sexual gratification.
ADVERTISEMENT
'McKayla Maroney continues to worry, distress, experience concern, anxiety, and depression over whether Nassar's photographs of her are still circulating through the internet, and whether they are possessed by other pedophiles and sexual deviants, and whether she will ever know how widely these photographs have been shared or whether they will eventually surface later in her lifetime.'
The suit also alleges that Maroney lost millions of dollars because the trauma of the alleged sex abuse drove her from the sport.
In addition to USA Gymnastics and the US Olympic Committee, the suit also seeks damages from Michigan State University where Nassar worked for decades.
Nassar is currently in prison in Michigan after he was sentenced to 60 years earlier this month possession of child pornography.
The pornography, which included more than 37,000 of images and videos depicting children as young as infants, was discovered last year while Nassar was under investigation for assault.
He is still awaiting trial on separate criminal sexual conduct charges in addition to being sued by over 125 women in civil court who claim he sexually assaulted them.
Maroney (pictured center at the 2012 Olympics after the team won gold) alleges that Nassar repeatedly abused her from the age of 13 until she left the sport last year
Maroney was praised by USA Gymnastics after speaking out and on her birthday last month the organization dedicated a post to her on social media
The allegations of sex abuse first surfaced in August 2016 when two gymnasts accused him of assault.
More than 140 females have since filed complaints against him, including decorated gymnasts Aly Raisman and Gabby Douglas.
Maroney went public in October alleging that Nassar had abused her from the age of 13 until she left gymnastics last year.
She said the abuse happened at many high-profile competitions, including the 2012 London Olympic Games where she won gold and silver medals.
Maroney claims that Nassar, who spent nearly 30 years as an osteopath with the USA Gymnastics program, first molested her when she was 13 at a National Team training camp in Texas.
'Dr. Nassar told me that I was receiving 'medically necessary treatment that he had been performing on patients for over 30 years',' Maroney said of her abuse.
'It seemed whenever and wherever this man could find the chance, I was 'treated',' she added. It happened in London before my team and I won the gold medal, and It happened before I won my Silver.'
Maroney claims that the worst abuse happened during the 2011 world gymnastics championships in Tokyo.
Maroney claims that the worst abuse happened during the 2011 world gymnastics championships in Tokyo where she won gold (pictured above)
She wrote that Nassar had given her a sleeping pill on the flight to the Japanese city, and she didn't wake until she was in his hotel room, alone, where he was performing a 'treatment'.
ADVERTISEMENT
'I thought I was going to die that night,' Maroney wrote.
Several former athletes have accused Nassar of inserting un-gloved fingers into their bodies and fondling their breasts as part of his treatment.
Nassar served as the US gymnastics team's doctor through four Olympic Games.
Following her post, Maroney was praised by USA Gymnastics and on her birthday last month the organization dedicated a post to her on social media.
'On her birthday, we celebrate not only the talent it took to deliver the best vaults in the World but also McKayla's incredible bravery and strength to come forward,' they wrote.
USA Gymnastics has not commented on the lawsuit.
The president of USA Gymnastics, Steve Penny, resigned in March after repeatedly being urged to quit for allegedly being slow to notify authorities about sexual abuse allegations in the organization.
The organization launched an independent review of its policies in the wake of the allegations against Nassar and reporting by the Indianapolis Star that highlighted chronic mishandling of abuse allegations against coaches and staff at some of its over 3,500 clubs across the country. |
def insert_documents(self, data_dir_path: str, latest=True):
dict_list = self.__get_data(data_dir_path)
transformed_data_list = self._transform_data(dict_list)
if latest:
latest_insert = self.__get_last_insert()
if latest_insert:
latest_index = self.__get_index(transformed_data_list, self.__get_last_insert())
if latest_index == len(transformed_data_list) - 1:
print("Local data and Cosmos DB in sync")
return
elif latest_index < len(transformed_data_list) - 1:
print(
"Migrating from index {}:{}".format(
latest_index, len(transformed_data_list) - 1
)
)
transformed_data_list = transformed_data_list[latest_index + 1 :]
elif latest_index > len(transformed_data_list) - 1:
print("Cosmos DB ahead of local data.")
return
else:
print(
"No data in container, migrating all {} "
"document from local storage".format(len(transformed_data_list))
)
for item in tqdm(transformed_data_list, desc="Migrating to Cosmos"):
try:
self.container.create_item(body=item)
except CosmosHttpResponseError as http_error:
tqdm.write(
"Could not insert {} from {}. Error code: {}".format(
item["web_name"], item["download_time"], http_error.message
)
) |
def predict(self, df, summed=True):
test_df = self.add_time_day(df)
test_df = self.add_hdd(test_df)
test_df = self.add_cdd(test_df)
if 'energy' not in test_df:
test_df = test_df.assign(energy=[0.0 for xx in test_df['tempF']])
weekday_df = test_df.loc[test_df['day_of_week'].isin(self.weekdays)]
weekday_pred = self.model_res_weekday.predict(weekday_df)
weekend_df = test_df.loc[test_df['day_of_week'].isin(self.weekends)]
weekend_pred = self.model_res_weekend.predict(weekend_df)
prediction = pd.concat([weekday_pred, weekend_pred])
prediction.sort_index()
variance = self.compute_variance(test_df)
if summed:
prediction = np.sum(prediction)
variance = np.sum(variance)
return prediction, variance |
import json
class GameStats():
def __init__(self, ai_game):
filename="saved_data.json"
try:
with open (filename) as f:
self.high_score=json.load(f)
except:
with open (filename, "w") as f:
json.dump(0,f)
self.settings=ai_game.settings
self.reset_stats()
self.game_active=False
def reset_stats(self):
self.ships_left=self.settings.ship_limit
self.score=0
self.level=1
|
import { Entity } from 'core/entities/entity';
import { MathUtils } from 'core/utils/math.utils';
// export interface GridEntity {
// id: number;
// }
export interface GridClient {
position: number[];
dimensions: number[];
cells: {
min: number[],
max: number[],
nodes: GridCell[][]
};
queryId: number;
entity?: Entity;
}
interface GridCell {
next: GridCell;
previous: GridCell;
client: GridClient;
}
export class SpatialHashGrid {
private cells: GridCell[][];
private dimensions: number[];
private bounds: number[][];
private queryIds = 0;
constructor(bounds: number[][], dimensions: number[]) {
const [x, y] = dimensions;
// creates an array of X by Y elements (X elements with each an array of Y elements)
this.cells = [...Array(x)].map(_ => [...Array(y)].map(_ => (null)));
this.dimensions = dimensions;
this.bounds = bounds;
}
public newClient(position: number[], dimensions: number[]): GridClient {
const client: GridClient = {
position,
dimensions,
cells: {
min: null,
max: null,
nodes: null
},
queryId: -1
};
this.insert(client);
return client;
}
public updateClient(client: GridClient): void {
const [posX, posY] = client.position;
const [width, height] = client.dimensions;
const indexInit = this.getCellIndex([posX - width / 2, posY - height / 2]);
const indexEnd = this.getCellIndex([posX + width / 2, posY + height / 2]);
if (client.cells.min[0] == indexInit[0] &&
client.cells.min[1] == indexInit[1] &&
client.cells.max[0] == indexEnd[0] &&
client.cells.max[1] == indexEnd[1]) {
return;
}
this.removeClient(client);
this.insert(client);
}
public removeClient(client: GridClient): void {
const indexInit = client.cells.min;
const indexEnd = client.cells.max;
for (let xInit = indexInit[0], xEnd = indexEnd[0]; xInit <= xEnd; xInit++) {
for (let yInit = indexInit[1], yEnd = indexEnd[1]; yInit <= yEnd; yInit++) {
const xi = xInit - indexInit[0];
const yi = yInit - indexInit[1];
const node = client.cells.nodes[xi][yi];
if (node.next) {
node.next.previous = node.previous;
}
if (node.previous) {
node.previous.next = node.next;
}
if (!node.previous) {
this.cells[xInit][yInit] = node.next;
}
}
}
client.cells.min = null;
client.cells.max = null;
client.cells.nodes = null;
}
public findNearby(position: number[], bounds: number[]): GridClient[] {
const [posX, posY] = position;
const [width, height] = bounds;
const indexInit = this.getCellIndex([posX - width / 2, posY - height / 2]);
const indexEnd = this.getCellIndex([posX + width / 2, posY + height / 2]);
const clients: GridClient[] = [];
const queryId = this.queryIds++;
for (let xInit = indexInit[0], xEnd = indexEnd[0]; xInit <= xEnd; xInit++) {
for (let yInit = indexInit[1], yEnd = indexEnd[1]; yInit <= yEnd; yInit++) {
let head = this.cells[xInit][yInit];
while (head) {
const client = head.client;
head = head.next;
if (client.queryId !== queryId) {
client.queryId = queryId;
clients.push(client);
}
}
}
}
return clients;
}
private insert(client: GridClient): void {
const [posX, posY] = client.position;
const [width, height] = client.dimensions;
const indexInit = this.getCellIndex([posX - width / 2, posY - height / 2]);
const indexEnd = this.getCellIndex([posX + width / 2, posY + height / 2]);
const nodes: GridCell[][] = [];
for (let xInit = indexInit[0], xEnd = indexEnd[0]; xInit <= xEnd; xInit++) {
nodes.push([]);
for (let yInit = indexInit[1], yEnd = indexEnd[1]; yInit <= yEnd; yInit++) {
const xIndex = xInit - indexInit[0];
const head: GridCell = {
next: null,
previous: null,
client
};
nodes[xIndex].push(head);
head.next = this.cells[xInit][yInit];
if (this.cells[xInit][yInit]) {
this.cells[xInit][yInit].previous = head;
}
this.cells[xInit][yInit] = head;
}
}
client.cells.min = indexInit;
client.cells.max = indexEnd;
client.cells.nodes = nodes;
}
private getCellIndex(position: number[]): number[] {
const x = MathUtils.sat(
(position[0] - this.bounds[0][0]) /
(this.bounds[1][0] - this.bounds[0][0])
);
const y = MathUtils.sat(
(position[1] - this.bounds[0][1]) /
(this.bounds[1][1] - this.bounds[0][1])
);
const xIndex = Math.floor(x * this.dimensions[0] - 1);
const yIndex = Math.floor(y * this.dimensions[1] - 1);
return [xIndex, yIndex];
}
}
|
// runServe runs the main HTTP service
func runServe(cmd *cobra.Command, cmdArgs []string) error {
logrus.WithFields(logrus.Fields{
"groups": runSettings.LockGroups,
}).Debug("lock groups")
if runSettings == nil {
return errors.New("nil runSettings")
}
airlock := server.Airlock{*runSettings}
stopCh := make(chan os.Signal)
signal.Notify(stopCh, os.Interrupt, syscall.SIGTERM)
if runSettings.StatusEnabled {
if err := airlock.RegisterMetrics(); err != nil {
return err
}
statusMux := http.NewServeMux()
statusMux.Handle(status.MetricsEndpoint, status.Metrics())
statusService := http.Server{
Addr: fmt.Sprintf("%s:%d", runSettings.StatusAddress, runSettings.StatusPort),
Handler: statusMux,
}
go runService(stopCh, statusService, airlock)
defer statusService.Close()
logrus.WithFields(logrus.Fields{
"address": runSettings.StatusAddress,
"port": runSettings.StatusPort,
}).Info("status service")
} else {
logrus.Warn("status service disabled")
}
serviceMux := http.NewServeMux()
serviceMux.Handle(server.PreRebootEndpoint, airlock.PreReboot())
serviceMux.Handle(server.SteadyStateEndpoint, airlock.SteadyState())
mainService := http.Server{
Addr: fmt.Sprintf("%s:%d", runSettings.ServiceAddress, runSettings.ServicePort),
Handler: serviceMux,
}
logrus.WithFields(logrus.Fields{
"address": runSettings.ServiceAddress,
"port": runSettings.ServicePort,
}).Info("main service")
go runService(stopCh, mainService, airlock)
defer mainService.Close()
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
go airlock.RunConsistencyChecker(ctx)
<-stopCh
return nil
} |
/**
* Looks for a classic macro a returns true if it finds the macro a and it is pair or unpaired (based on pair parameter).
*/
public static boolean checkPairMacro(PsiBuilder builder, int level, Parser parser) {
if (builder.getTokenType() != T_MACRO_OPEN_TAG_OPEN) return false;
PsiBuilder.Marker marker = builder.mark();
String macroName = getMacroName(builder);
boolean pair = parser == LatteParser.TRUE_parser_;
boolean result;
LatteTagSettings tag = getTag(builder);
if (tag == null || tag.getType() == LatteTagSettings.Type.AUTO_EMPTY) {
result = pair == isPair(macroName, builder);
} else if (macroName.equals("_")) {
boolean emptyArgs = true;
builder.advanceLexer();
while (emptyArgs && nextTokenIsFast(builder, T_MACRO_ARGS, T_MACRO_ARGS_NUMBER, T_MACRO_ARGS_STRING, T_MACRO_ARGS_VAR, T_PHP_METHOD)) {
emptyArgs = (builder.getTokenText().trim().length() == 0);
builder.advanceLexer();
}
result = (emptyArgs == pair);
} else {
result = (tag != null ? (pair ? (LatteTagSettings.Type.PAIR == tag.getType()) : LatteTagSettings.Type.unpairedSet.contains(tag.getType())) : !pair);
}
marker.rollbackTo();
return result;
} |
// AddServiceAgreement adds a Service Agreement which includes a job that needs
// to be scheduled.
func (app *ChainlinkApplication) AddServiceAgreement(sa *models.ServiceAgreement) error {
err := app.Store.CreateServiceAgreement(sa)
if err != nil {
return err
}
app.Scheduler.AddJob(sa.JobSpec)
logger.ErrorIf(app.FluxMonitor.AddJob(sa.JobSpec))
logger.ErrorIf(app.JobSubscriber.AddJob(sa.JobSpec, nil))
return nil
} |
OTTAWA — Struggling for relevance in the smartphone business, BlackBerry is turning to Google’s popular mobile software for a helping hand.
BlackBerry said on Friday that it would make a phone that runs the Android operating system, confirming a rumor that has been circulating for months. The company released another sour earnings report on Friday as well that underscored the importance of finding a successful product.
The move to Android should address at least one limitation of the appeal of BlackBerry’s smartphone: a lack of apps. The Android phone will be able to run the more than one million apps already created for that software.
The new BlackBerry phone is widely expected to have a large touch-screen face and a pullout physical keyboard. While he did not talk about the design, John S. Chen, the company’s chief executive, said the phone would be called the Priv, an allusion to privacy and privilege. He added that his company was working with Google to give it additional security compared with other Android phones. |
// downloadMetadataLegacy downloads a snapshot of the KV store from the server, and extracts
// a bucket manifest from the result. KV is written to a local file; the extracted manifest
// is tracked as a slice for additional processing.
//
// NOTE: This should _not_ be used against an InfluxDB instance running v2.1.0 or later, as
// it will fail to capture metadata stored in SQL.
func (c *Client) downloadMetadataLegacy(ctx context.Context, params *Params) error {
log.Println("INFO: Downloading legacy KV snapshot")
rawResp, err := c.GetBackupKV(ctx).Execute()
if err != nil {
return fmt.Errorf("failed to download KV snapshot: %w", err)
}
defer rawResp.Body.Close()
kvName := filepath.Join(params.Path, fmt.Sprintf("%s.bolt", c.baseName))
tmpKv := fmt.Sprintf("%s.tmp", kvName)
defer os.RemoveAll(tmpKv)
if err := func() error {
f, err := os.Create(tmpKv)
if err != nil {
return err
}
defer f.Close()
_, err = io.Copy(f, rawResp.Body)
return err
}(); err != nil {
return fmt.Errorf("failed to save downloaded KV snapshot: %w", err)
}
log.Println("INFO: Extracting bucket manifest from legacy KV snapshot")
c.bucketMetadata, err = br.ExtractBucketMetadata(tmpKv)
if err != nil {
return fmt.Errorf("failed to extract bucket metadata from downloaded KV snapshot: %w", err)
}
if err := func() error {
if params.Compression == br.NoCompression {
return os.Rename(tmpKv, kvName)
}
tmpIn, err := os.Open(tmpKv)
if err != nil {
return err
}
defer tmpIn.Close()
kvName = kvName + ".gz"
out, err := os.Create(kvName)
if err != nil {
return err
}
defer out.Close()
gzw := gzip.NewWriter(out)
defer gzw.Close()
_, err = io.Copy(gzw, tmpIn)
return err
}(); err != nil {
return fmt.Errorf("failed to rename downloaded KV snapshot: %w", err)
}
fi, err := os.Stat(kvName)
if err != nil {
return fmt.Errorf("failed to inspect local KV snapshot: %w", err)
}
c.manifest.KV = br.ManifestFileEntry{
FileName: fi.Name(),
Size: fi.Size(),
Compression: params.Compression,
}
return nil
} |
def OnDropFiles(self, x, y, filenames):
try:
for file in filenames:
self._docManager.CreateDocument(file, wx.lib.docview.DOC_SILENT)
except:
msgTitle = wx.GetApp().GetAppName()
if not msgTitle:
msgTitle = _("File Error")
wx.MessageBox("Could not open '%s'. '%s'" % (wx.lib.docview.FileNameFromPath(file), sys.exc_value),
msgTitle,
wx.OK | wx.ICON_EXCLAMATION,
self._docManager.FindSuitableParent()) |
Acute blindness in a dog caused by an explosive blast.
A 3-year-old, intact male, mixed breed dog was presented with a complaint of acute blindness. Ten days previously, the area where the dog was walking came under a rocket attack, and a rocket landed and exploded 300 meters away from the dog. Physical examination was unremarkable. Ophthalmoscopic examination revealed posterior segment fibrin clots and extensive vitreal hemorrhage in the right eye. A total retinal detachment (360 degrees retinal dialysis) with no evidence of hemorrhage was noted in the left eye. There was no sign of any penetrating ocular trauma, and it was assumed that the posterior segment findings were primary injuries caused by the blast wave itself. Following anti-inflammatory treatment, partial vision was restored in the right eye. Surgical re-attachment of the retina was discussed and declined by the owner. This report describes, for the first time, vitreal hemorrhage and retinal detachment as the sole injuries caused by an explosive blast wave. |
<filename>tower-web-macros/src/derive/attr.rs
use http::StatusCode;
use http::header::{HeaderName, HeaderValue};
use syn;
use quote::quote;
#[derive(Debug)]
pub(crate) struct Attribute {
pub kind: Kind,
pub source: syn::Attribute,
}
#[derive(Debug)]
pub(crate) enum Kind {
Status(Option<StatusCode>),
Header {
name: Option<HeaderName>,
value: Option<HeaderValue>,
},
Template(String),
Either
}
impl Attribute {
pub(crate) fn is_web_attribute(attr: &syn::Attribute) -> bool {
attr.path.segments.len() == 1 && attr.path.segments[0].ident == "web"
}
pub(crate) fn from_ast(attrs: &[syn::Attribute])
-> Result<Vec<Attribute>, String>
{
use syn::{Meta, NestedMeta};
let mut ret = vec![];
for attr in attrs {
if !Attribute::is_web_attribute(attr) {
continue;
}
let meta = match attr.parse_meta() {
Ok(meta) => meta,
Err(err) => return Err(format!("failed parsing attribute: {}", err)),
};
let source = attr.clone();
match meta {
Meta::List(meta_list) => {
for meta in &meta_list.nested {
let meta = match meta {
NestedMeta::Meta(meta) => meta,
NestedMeta::Lit(_) => {
unimplemented!("unexpected attribute literal; file={}; line={}", file!(), line!())
}
};
let attr = match meta {
Meta::Path(path) => {
let meta = path.get_ident().expect("invalid struct-level annotation");
if meta == "header" {
Attribute::header_from_word(&source)
} else if meta == "status" {
Attribute::status_from_word(&source)
} else if meta == "either" {
Attribute::either_from_word(&source)
} else if meta == "template" {
let actual = quote!(#meta);
return Err(format!("invalid struct level `template` annotation. The attribute must be formatted as:\n\n\
`#[web(template = \"foo\")]`\n\n\
Actual: {}", actual.to_string()));
} else {
unimplemented!("error handling");
}
}
Meta::List(meta) => {
if meta.path.is_ident("header") {
Attribute::header_from_list(meta, &source)
} else {
let actual = quote!(#meta);
return Err(format!("invalid struct level `status` annotation. The attribute must be in one of \
the following formats:\n\n\
`#[web(status)]`\n\
`#[web(status = \"201\")]`\n\n\
Actual: {}", actual.to_string()));
}
}
Meta::NameValue(meta) => {
if meta.path.is_ident("status") {
Attribute::status_from_name_value(meta, &source)
} else if meta.path.is_ident("template") {
Attribute::template_from_name_value(meta, &source)
} else if meta.path.is_ident("header") {
unimplemented!("unexpected attribute; {:?}", meta);
} else {
unimplemented!("unexpected attribute; {:?}", meta);
}
}
};
ret.push(attr);
}
}
_ => {
unimplemented!("file={}; line={}", file!(), line!());
}
}
}
Ok(ret)
}
fn status_from_word(source: &syn::Attribute) -> Attribute {
Attribute {
kind: Kind::Status(None),
source: source.clone(),
}
}
fn header_from_word(source: &syn::Attribute) -> Attribute {
Attribute {
kind: Kind::Header {
name: None,
value: None,
},
source: source.clone()
}
}
fn either_from_word(source: &syn::Attribute) -> Attribute {
Attribute {
kind: Kind::Either,
source: source.clone(),
}
}
fn status_from_name_value(
meta: &syn::MetaNameValue,
source: &syn::Attribute
) -> Attribute
{
use syn::Lit;
let kind = match meta.lit {
Lit::Str(ref lit_str) => {
let lit_str = lit_str.value();
let bytes = lit_str.as_bytes();
let status = StatusCode::from_bytes(bytes)
.unwrap();
Kind::Status(Some(status))
}
ref meta => unimplemented!("unsupported meta: {:?}", meta),
};
Attribute {
kind,
source: source.clone(),
}
}
fn template_from_name_value(
meta: &syn::MetaNameValue,
source: &syn::Attribute,
) -> Attribute
{
use syn::Lit;
let kind = match meta.lit {
Lit::Str(ref lit_str) => {
let lit_str = lit_str.value();
Kind::Template(lit_str)
}
ref meta => unimplemented!("unsupported meta: {:?}", meta),
};
Attribute {
kind,
source: source.clone(),
}
}
fn header_from_list(meta: &syn::MetaList, source: &syn::Attribute) -> Attribute {
use syn::{NestedMeta, Meta, Lit};
let mut name = None;
let mut value = None;
for meta in &meta.nested {
match meta {
NestedMeta::Meta(Meta::NameValue(meta)) => {
if meta.path.is_ident("name") {
match meta.lit {
Lit::Str(ref v) => {
let hdr = v.value()
.parse()
.unwrap(); // TODO: Error handling
name = Some(hdr);
}
_ => unimplemented!("file={}; line={}", file!(), line!()),
}
} else if meta.path.is_ident("value") {
match meta.lit {
Lit::Str(ref lit_str) => {
let lit_str = lit_str.value();
let bytes = lit_str.as_bytes();
let hdr_val = HeaderValue::from_bytes(bytes)
.unwrap();
value = Some(hdr_val);
}
_ => unimplemented!("file={}; line={}", file!(), line!()),
}
} else {
unimplemented!("file={}; line={}", file!(), line!());
}
}
meta => unimplemented!("unsupported meta: {:?}", meta),
}
}
Attribute {
kind: Kind::Header { name, value },
source: source.clone(),
}
}
}
|
//
// TalkingDataEAuth.h
// TalkingDataSDK
//
// Created by Robin on 7/13/16.
// Copyright © 2016 TendCloud. All rights reserved.
//
#import <Foundation/Foundation.h>
typedef NS_ENUM(NSInteger, TDEAuthType) {
TDEAuthTypeApplyCode = 0, // 申请认证码
TDEAuthTypeChecker, // 检查账号是否已认证
TDEAuthTypePhoneMatch, // 检查账号与手机号是否匹配
TDEAuthTypeBind, // 账号认证绑定
TDEAuthTypeUnBind // 账号认证解绑定
};
typedef NS_ENUM(NSInteger, TDAuthCodeType) {
TDAuthCodeTypeSMS = 0, // 短信认证
TDAuthCodeTypeVoice // 语音认证
};
// Delegate回调是在您的主线程中
@protocol TalkingDataEAuthDelegate <NSObject>
- (void)onRequestSuccess:(TDEAuthType)type requestId:(NSString *)requestId phoneNumber:(NSString *)phoneNumber phoneNumSeg:(NSArray *)phoneNumSeg;
- (void)onRequestFailed:(TDEAuthType)type errorCode:(NSInteger)errorCode errorMessage:(NSString *)errorMessage;
@optional
- (void)onRequestSuccess:(TDEAuthType)type;
@end
@interface TalkingDataEAuth : NSObject
/**
* 易认证初始化
*
* @param appID
* TalkingData 分配的 AppID
* @param secretID
* TalkingData 分配的 SecretID
*/
+ (void)initEAuth:(NSString *)appID secretId:(NSString *)secretID;
/**
* 设置日志输出状态
*
* @param enable
* 日志输出状态
*/
+ (void)setLogEnabled:(BOOL)enable;
/**
* 获取SDK所使用的DeviceID
*
* @return DeviceID
*/
+ (NSString *)getDeviceId;
/**
* 申请认证码
*
* @param countryCode
* 国家码 如:中国,86
* @param mobile
* 申请验证码的手机号
* @param type
* 获取认证码的方式
* @param acctName
* 用户登录时所使用的用户名
* @param smsId
* 短信模板ID
* @param delegate
* 申请认证码异步回调接口
*/
+ (void)applyAuthCode:(NSString *)countryCode
mobile:(NSString *)mobile
authCodeType:(TDAuthCodeType)type
accountName:(NSString *)acctName
smsId:(NSString *)smsId
delegate:(id<TalkingDataEAuthDelegate>)delegate;
/**
* 重新发送认证码
*
* @param countryCode
* 国家码 如:中国,86
* @param mobile
* 申请验证码的手机号
* @param type
* 获取认证码的方式
* @param acctName
* 用户登录时所使用的用户名
* @param smsId
* 短信模板ID
* @param requestId
* 申请认证码返回的ID
* @param delegate
* 申请认证码异步回调接口
*/
+ (void)reapplyAuthCode:(NSString *)countryCode
mobile:(NSString *)mobile
authCodeType:(TDAuthCodeType)type
accountName:(NSString *)acctName
smsId:(NSString *)smsId
requestId:(NSString *)requestId
delegate:(id<TalkingDataEAuthDelegate>)delegate;
/**
* 申请短信认证码
*
* @param countryCode
* 国家码 如:中国,86
* @param mobile
* 申请验证码的手机号
* @param acctName
* 用户登录时所使用的用户名
* @param delegate
* 申请认证码异步回调接口
*/
+ (void)applyAuthCode:(NSString *)countryCode
mobile:(NSString *)mobile
accountName:(NSString *)acctName
delegate:(id<TalkingDataEAuthDelegate>)delegate;
/**
* 检查账号是否已认证
*
* @param acctName
* 用户登录时所使用的用户名
* @param delegate
* 申请认证码异步回调接口
*/
+ (void)isVerifyAccount:(NSString *)acctName
delegate:(id<TalkingDataEAuthDelegate>)delegate;
/**
* 检查手机号和账号是否匹配
*
* @param countryCode
* 国家码 如:中国,86
* @param mobile
* 申请验证码的手机号
* @param acctName
* 用户登录时所使用的用户名
* @param delegate
* 申请认证码异步回调接口
*/
+ (void)isMobileMatchAccount:(NSString *)acctName
countryCode:(NSString *)countryCode
mobile:(NSString *)mobile
delegate:(id<TalkingDataEAuthDelegate>)delegate;
/**
* 进行实名认证绑定
*
* @param countryCode
* 国家码 如:中国,86
* @param mobile
* 验证手机号
* @param authCode
* 短信认证码
* @param acctName
* 用户登录时所使用的用户名
* @param delegate
* 绑定请求的异步回调接口
*/
+ (void)bindEAuth:(NSString *)countryCode
mobile:(NSString *)mobile
authCode:(NSString *)authCode
accountName:(NSString *)acctName
delegate:(id<TalkingDataEAuthDelegate>)delegate;
/**
* 解除实名认证绑定
*
* @param countryCode
* 国家码 如:中国,86
* @param mobile
* 验证手机号
* @param acctName
* 用户登录时所使用的用户名
* @param delegate
* 解绑请求的异步回调接口
*/
+ (void)unbindEAuth:(NSString *)countryCode
mobile:(NSString *)mobile
accountName:(NSString *)acctName
delegate:(id<TalkingDataEAuthDelegate>)delegate;
@end
|
Power Flow Calculation in Smart Distribution Network Based on Power Machine Learning Based on Fractional Differential Equations
Abstract Based on the theory of fractional differential equations, this paper proposes a simple recursive, iterative scheme for power flow calculation in pure radial networks. The paper determines the network hierarchy formed by the ADT stack through breadth theory. This helps us define the branch sequence of the forward and backward generation in the power flow calculation of the smart distribution network. We ensure that the Jacobian matrix remains unchanged in the smart distribution grid power flow calculation. The interval model is more practical and computationally simpler than the point model. The research results show that the power flow calculation method is efficient based on the fractional differential equation. |
<reponame>Develop-MOPH/his-connection
/// <reference path="../../../typings.d.ts" />
import * as fastify from 'fastify';
const router = (fastify, { }, next) => {
fastify.get('/', async (req: fastify.Request, reply: fastify.Reply) => {
reply.send({
ok: true,
apiCode: 'RP506',
apiName: 'Report 506',
apiDesc: 'Report 506 ระบาดวิทยา',
version: "1.0.0",
hospcode: process.env.HOSPCODE
});
})
next();
}
module.exports = router;
|
A Time-Series Analysis of Firearm Purchasing After Mass Shooting Events in the United States
Importance Increased understanding of public response to mass shootings could guide public health planning regarding firearms. Objectives To test the hypothesis that mass shootings are associated with gun purchasing in the United States and to determine factors associated with gun purchasing changes. Design and Setting In a cross-sectional study, monthly data on US background checks for all firearm purchases, handgun permits, and long gun permits between November 1, 1998, and April 30, 2016, were obtained from the National Instant Criminal Background Check System. All mass shootings resulting in 5 or more individuals injured or killed during the study period were also identified. Interrupted autoregressive integrated moving average time-series modeling was used to identify events associated with changes in gun purchase volume. Then, logistic regression was used to identify event characteristics associated with changes in gun purchases. Analyses were performed between June 6, 2016, and February 5, 2019. Exposures For the time-series analysis, each mass shooting was modeled as an exposure. In the logistic regression, examined factors were the shooter’s race/ethnicity, the region in the United States in which a shooting occurred, whether a shooting was school related, fatalities, handgun use, long gun use, automatic or semiautomatic gun use, media coverage level, and state political affiliation. Main Outcomes and Measures Identification of major mass shootings significantly associated with changes in gun purchases, and the identification of event-specific factors associated with changes in gun purchases. Results Between November 1998 and April 2016, 124 major mass shootings and 233 996 385 total background checks occurred. A total of 26 shootings (21.0%) were associated with increases in gun purchases and 22 shootings (17.7%) were associated with decreases in gun purchasing. Shootings receiving extensive media coverage were associated with handgun purchase increases (odds ratio, 5.28; 95% CI, 1.30-21.41; P = .02). Higher-fatality shootings had an inverse association with handgun purchase decreases (odds ratio, 0.73; 95% CI, 0.53-1.00; P = .049). Conclusions and Relevance The findings of this study suggest an association between mass shootings and changes in gun purchases, observed on a comprehensive timescale. Identification of media coverage and fatalities as significant factors underlying this association invites further study into the mechanisms driving gun purchase changes, holding implications for public health response to future gun violence.
Introduction
Gun violence in the United States constitutes a serious public health crisis, causing more than 30 000 deaths annually. 1 In 2017, more deaths were attributable to firearm injuries than to motor vehicle traffic crashes (12.2 vs 11.9 deaths per 100 000 people). 2 Mass shootings contribute to only a small fraction of this mortality and morbidity burden, at less than 1% of US firearm deaths. 3 Nevertheless, these events offer an important lens for understanding connections between gun violence and public opinion, with implications for gun violence prevention as a whole.
Although relatively rare compared with other forms of gun violence, mass shootings are extremely high profile. 4 These shootings often receive high media coverage; for instance, mass shootings such as the Sandy Hook Elementary School shooting were voted the top news topic of 2012 by the Associated Press, higher even than the presidential election. 5 As a result, the fear that mass shootings inspire in the public has been disproportionate to their frequency. 6,7 Surveys have shown that most US citizens view mass shootings as indications of underlying societal issues, which have the power to alter people's fear of victimization and perceptions of gun control policies. Changes in gun purchases are one way in which these attitude changes after mass shootings may translate to behavior. Sharp increases in gun purchases after mass shootings, including shootings in Newtown, Connecticut; San Bernardino, California; Orlando, Florida; and Parkland, Florida, have been documented by the media. Empirical studies of this association have been narrower in scope but offer evidence that such an association may exist. Studies specifically examining the 2012 Sandy Hook Elementary School shooting and the 2015 San Bernardino shooting have demonstrated large increases in the volume of gun sales after the events, with excess purchases of up to 3 million guns nationally after the Sandy Hook Elementary School shooting. 15,16 Another study observing a larger sample, yet limited to 6 highly publicized mass shootings, also found evidence that such an association between mass shootings and gun purchasing was possible. 17 Because these studies' scopes are limited to select shootings, especially those receiving the most extensive media coverage, there is a need for empirical examination of these effects on a larger timescale and using a broader sample of shootings.
The first of 2 main hypotheses for the increase in gun sales associated with mass shootings is that people will buy additional guns because of increased fear of victimization. Most US citizens believe that it is somewhat or very likely that their own community could experience a mass shooting. 18 Because personal protection is the most common reason for gun ownership cited by US gun owners, it is plausible that the increased anxiety from these events could also drive increased gun ownership. 19 However, the evidence is mixed on whether perceived risk of victimization actually translates to gun purchases. 20,21 Increases in gun sales were observed after the September 11, 2001 (9/11), World Trade Center attacks, an event that provoked fear nationally. 22 However, while surveys of gun owners and nonowners after the 2016 Orlando Pulse nightclub shooting demonstrated that nonowners perceived increased risk, neither group reported significantly increased gun purchasing intentions. 21 The second main mechanism by which gun purchases might increase in response to a mass shooting is that people will buy additional guns if they believe that gun control measures will restrict their future ability to do so. Mass shootings are frequently followed by calls for gun control, whether from politicians (eg, President Barack Obama's mention of the Sandy Hook Elementary shooting in his 2013 State of the Union address) or from the public (including student-led activism after the Marjory Stoneman Douglas High School shooting). 23,24 In response, people concerned about potential legislation resulting from this advocacy may be motivated to buy guns or related paraphernalia, especially those that might be restricted. For example, after the 2017 Las Vegas Route 91 Harvest festival shooting, gun stores and distributors also received much higher demands for bump stock accessories of the same types used by the shooter, which were eventually banned in December 2018. 25,26 Similar increases in purchases have been empirically observed in response to the 2008 and 2012 elections of a Democratic president 27 ; the reverse effect, decreases in gun purchasing after the 2016 presidential election (colloquially termed the "Trump slump"), has also been reported, with the reasoning being that gun legislation is less likely to occur with a conservative president and Congress. 28,29 Changes in gun purchasing behavior can have serious associations with morbidity and mortality.
For instance, states that experienced increases in firearm sales after the Sandy Hook Elementary School shooting also experienced significant increases in accidental firearm deaths. 16 These associations could potentially occur across all forms of gun violence, as associations between increased availability of guns and increased risk of suicide, homicide victimization, and unintentional injury have been observed. This association is not without complication, however, as there has been mixed evidence on whether rates of gun ownership are associated with rates of crime. 33,34 In addition, there may be moderating factors between gun purchasing and injury, such as whether the purchaser is a new or existing gun owner. 35 We aimed to identify, by examining more than 100 major mass shootings that took place in the United States during the past 2 decades, whether these shootings were associated with significant changes in gun purchasing behavior. We also sought to identify which characteristics of the mass shootings were associated with these changes.
Data Sources
We obtained data from the National Instant Criminal Background Check System on the number of background checks completed between November 1, 1998, andApril 30, 2016. 36 This database is maintained by the Federal Bureau of Investigation and used by Federal Firearms Licensees, as well as private sellers in some states, to determine customer eligibility for firearm purchases. 9 Although not all gun purchases are subject to background checks under federal law, and even successful background checks may not always lead to purchases, background check data have been used extensively in the literature as a proxy for intent to purchase firearms. 16,17,27 Data are aggregated monthly for all US states and territories, including such categories as permit purchases, redemptions, returns, or rentals of handguns or long guns. In this analysis we considered background checks for all gun-related purchases, handgun permits, and long gun permits on a national basis. Although background checks for handgun permits and long gun permits are included within the total background checks category, we chose to also examine these 2 categories separately to disentangle the 2 hypothesized mechanisms for changes in gun purchases. Gun buyers motivated by fear of crime are especially likely to buy handguns 37,38 ; conversely, because gun control efforts after mass shootings frequently single out assault-style weapons, especially semiautomatic rifles such as the AR-15, gun buyers motivated by fear of gun control might be more likely to buy long guns. This study followed the Strengthening the Reporting of Observational Studies in Epidemiology (STROBE) reporting guideline. This study was considered exempt from review by the University of Pennsylvania Institutional Review Board because the data were aggregated and anonymized.
We obtained data on mass shooting events from the Stanford Mass Shootings in America database, which contains information on all shootings with 3 or more individuals injured or killed between August 1966 and April 2016 (as of June 2016 data collection). 10 We included only shootings occurring since November 1998, ensuring the same timeframe as the National Instant Criminal Background Check System background checks data. We also used a narrower definition of major mass shooting: 5 or more individuals injured or killed. This higher morbidity threshold limited the sample to a feasible number of interruptions for the autoregressive integrated moving average (ARIMA) modeling step (N = 124); it also increased sample specificity to events that would feasibly reach the public's attention as a mass shooting (as the public's reaction to a mass shooting might be different from their reaction to gang-related violence, for instance) but still remained sensitive enough to capture a large number of events. Three nonshooting events were also included in the analysis: the 9/11 World Trade Center attacks, the November 2008 presidential election of Barack Obama, and President Obama's November 2012 reelection. These events were included because of their national significance, extensive media coverage, and literature demonstrating associated changes in gun purchasing after they took place. 22,27 Failing to control for these major events in this way could have hampered our ability to test with accuracy whether mass shootings during the same general period were associated with changes in firearm purchasing behavior.
JAMA Network Open | Public Health
We obtained data on media coverage for each shooting from the LexisNexis Academic Newspapers & Wires database, a widely used news archive providing coverage of major US and international newspapers. 42,43 For each event, we searched for newspaper articles published within 1 month of the event, recording the number of articles returned. The parameters used were the title of shooting used in the database of Stanford University of California (eg, "Sandy Hook elementary school") "AND shoot! AND gun!" omitting generic place descriptors such as "building" on a case-bycase basis to ensure result relevance. The maximum number of results returned by the LexisNexis search was 1000 articles.
Time-Series Analysis
Seasonal ARIMA modeling was used to identify events associated with concurrent changes in the volume of all 3 background check categories: total gun purchase related, handgun permit, and long gun. ARIMA modeling is a form of interrupted time-series analysis, widely used in the public health intervention literature and considered one of the strongest possible quasiexperimental designs. 44,45 The iterative modeling process consisted of first identifying the ARIMA(p, d, q)(P, D, Q) m model that best fit the background checks time-series, in which the (p, d, q) parameters refer to the order and differencing degree of nonseasonal autoregressive and moving-average components and the (P, D, Q) m parameters refer to the order and differencing degree of seasonal components, as well as the period of seasonality (m). 46 We then used indicator variables for each mass shooting ("interruption") to test the null hypothesis that each event was not associated with a significant change relative to the forecasted change in the volume of background checks; the multi-interruption approach that we used is similar to that used in the analysis by Wallace 17 of select mass shootings, as well as intervention evaluations in other public health areas. 47,48 Interruption effect patterns were tested using the following 5 different transfer functions testing different temporal patterns of gun purchase changes, which were informed by previous studies and mechanistic hypotheses: (1) zero-order function to a step variable (immediate permanent effect), (2) a first-order "pulse" function with a 3-month duration (immediate temporary effect lasting 3 months), (3) a first-order pulse function with a 5-month duration (immediate temporary effect lasting 5 months), (4) a first-order pulse function with a 5-month duration and a 1-month lag (immediate temporary effect lasting 5 months, starting 1 month after event occurrence), and (5) a combination of 2 first-order pulses in opposite directions of a total 5-month duration (gradual effect and dissipation over 5 months). The best-fit transfer function was selected based on the lowest Akaike information criterion value. Interruptions were added stepwise chronologically to the ARIMA model and retained if the P value coefficient remained P < .20. Because of the monthly nature of the background check outcome data, only 1 interruption was tested per month with multiple shootings and 1 effect size obtained for all shootings in that month. Regardless of significance, events occurring the month immediately after an event with the maximum level of media coverage (Ն1000 articles) were excluded unless they also had the maximum level, as a longerduration effect was anticipated from events with the highest level of media coverage. After modeling, event interruptions with P Ն .10 were removed from the final model in order to achieve greater statistical power.
Logistic Regression
After the time-series analysis, we used logistic regression to identify demographic, firearm-related, and event-specific characteristics associated with changes in gun purchasing. Tested outcomes were whether a given shooting was significantly associated with any change, an increase, or a decrease in total, handgun permit, or long gun permit background checks. Tested factors were the shooter's race/ethnicity, the region of the United States in which a shooting occurred, whether a shooting was school related, the number of fatalities, use of handguns, use of long guns, use of automatic or semiautomatic guns, whether a shooting received the highest degree of media coverage (as measured by Ն1000 articles returned by a LexisNexis search), and the political party of the governor at the time in the state in which a shooting occurred (as a proxy for regional political affiliations).
Missing or unknown values were coded as NA (not applicable) values and not included in the analyses. All P values were from 2-sided tests and results were deemed statistically significant at P < .05, and multivariate logistic regression was planned in the event that multiple factors were found to be significant for a given outcome. Data analyses were performed in R (R Project for Statistical Computing) between June 6, 2016, and February 5, 2019.
Results
A total of 233 996 385 background checks occurred during the study period (Figure 1), with an upward trend exhibited in all 3 examined background check categories: total gun-related purchases, handgun permits, and long gun permits. These data also displayed strong seasonality, with annual peaks typically occurring in late fall. A total of 124 mass shootings occurred during the study period ( Table 1).
The full form and parameters of the best-fit ARIMA models for each type of background check examined can be found in Table 2. The selected transfer function used to model interruptions was a first-order pulse function with a 5-month duration (immediate temporary effect lasting 5 months).
In total, 51 of the 127 tested events ( Table 3. were associated with changes in gun purchasing; 26 shootings were associated with increases and 22
JAMA Network Open | Public Health
shootings were associated with decreases. That such effects may occur has been posted extensively in media venues but has not been evaluated as a research question with the comprehensive approach performed herein, to our knowledge. The increases observed were consistent with previous studies examining smaller sets of shootings and specific political events. 27 However, the decreases in gun purchases observed were unexpected based on previous literature. We also identified that the shootings that receive a high amount of media coverage are more likely to be associated with significant increases in handgun purchasing and shootings with more fatalities are more likely to be associated with significant decreases in handgun purchasing.
Background checks associated with handgun vs long gun permits were examined separately to differentiate between the 2 major hypotheses underlying increases in gun purchases. Because of the connection between handgun ownership and self-defense, the logistic regression findings are relevant primarily to the fear of victimization hypothesis. 37 The association between media coverage and increased handgun purchases is supported by the finding by Wallace 17 that the increase in gun purchases associated with a shooting varies with the degree of news coverage. As the media coverage that mass shootings receive is disproportionate and frequently sensationalized, it thus inspires fear and motivates gun purchases for self-defense. 49 Our finding that shootings with more fatalities were associated with decreases in handgun purchases also lends credence to the association between media, public perception of victimization, and gun purchases; because shootings with higher fatalities tend to receive both more and longer news articles, the increased effect on the public's anxiety might counteract a decrease in purchases of guns for self-defense that would otherwise occur. 8,50 As the shooter's race/ethnicity and ideological motivation are also associated with an increase in the extent of media coverage as well as with its content, media focus on the perpetrator rather than victims may also contribute to increasing handgun purchases. 51 The association of the 22 mass shootings with decreases in 1 or more categories of gun purchases was an unexpected finding of the study. One potential hypothesis is regression to the mean, especially given that some shooting events associated with decreases in gun purchasing occurred closely after extremely high-profile shootings associated with large increases in all forms of gun purchases (eg, the Sandy Hook Elementary School shooting, which was followed 2 months later by the Los Angeles Police Department shooting by Christopher Dorner and by the Ladera Ranch, California, shooting). Because previous research finding only increases in gun purchases has focused on select mass shootings rather than a broader sample of mass shootings, it is also possible that lower-profile mass shootings may result in different changes in public perceptions, although this possibility is as yet unexplored. However, there were no shootings associated with decreases in all categories of background checks, as opposed to 7 events associated with increases in all categories, so the mechanisms underlying decreases of gun purchases may operate on a smaller scale than the mechanisms underlying increases of gun purchases.
Limitations
One limitation of this study is that it is not possible to draw causal conclusions from the findings without more in-depth investigation and elimination of confounders. Three nonshooting events (the Events associated with significant gun purchase changes are labeled (n = 51).
JAMA Network Open | Public Health
Firearm Purchasing After Mass Shooting Events in the United States Because of the frequent focus on assault-style weapons in the media and gun control advocacy, it was hypothesized that use of automatic or semiautomatic guns during a shooting would increase the likelihood of increases in both handgun and long gun purchases. 53 However, our analysis did not find a significant association between the type of gun used in a shooting and changes in any category of gun purchase. This finding may have been due to power issues from the sample of mass shootings (N = 124); as this analysis is intended as primarily hypothesis generating, we will be highlighting those estimates with larger odds ratios but nonsignificant results for future follow-up.
Because the examination of gun purchases was performed at a national level, there may have been event effects specific to region or urbanization level that were not captured. For example, the effects of a mass shooting might be greater in areas closer to the shooting location. If gun purchase behavior is most affected only among persons living locally to a given mass shooting event, that would likely have created bias in our results toward the null; associations found to be positive at the national level would likely have a larger signal had they been measured at the local level.
Although this study examined only gun purchases that could be captured by available background check data, also examining private gun sales could allow for a greater understanding of the effects of mass shootings on overall levels of gun ownership. An estimated 50% of private gun acquisitions, which occur outside of a gun shop or pawn shop, are not preceded by a background check, because fewer than half of US states extend background check requirements past the federal mandate (with varying levels of coverage). 54,55 Google Trends could give a proxy for people's interest in privately selling their guns, given a lack of direct measures of these data, and has previously been used as a proxy for gun acquisition. 16
Conclusions
Although data limitations prevented this analysis from examining the potential effects of mass shootings that occurred after 2016 (including the October 2018 shooting at the Pittsburgh Tree of Life synagogue), the 2016 election and conservative presidency, and the March for Our Lives gun control activism that began after the Marjory Stoneman Douglas High School shooting, we believe this study's results suggest that gun purchasing behavior during later time periods may also have fluctuated based on certain key events. The analysis not only identified that a large proportion (38.7%) of the tested major mass shootings were associated with changes in gun purchase, presenting a more comprehensive look at the phenomenon, but also identified significant associations between media coverage, incident severity, and gun purchasing behavior. Given these conclusions, understanding the mechanisms underlying these changes is an important component of the public health community's response to these shootings. |
<reponame>sang89vh/springmvc-template
package com.faq.mbackend.dto.out;
/**
* Created by jack on 4/2/16.
*/
public class ParseSessionOutVO extends BaseOutVO {
private String objectId;
private String sessionToken;
private String user;
private String restricted;
private String expiresAt;
private String updatedAt;
private String createdAt;
private String createdWith;
public String getCreatedWith() {
return createdWith;
}
public void setCreatedWith(String createdWith) {
this.createdWith = createdWith;
}
public String getObjectId() {
return objectId;
}
public void setObjectId(String objectId) {
this.objectId = objectId;
}
public String getSessionToken() {
return sessionToken;
}
public void setSessionToken(String sessionToken) {
this.sessionToken = sessionToken;
}
public String getUser() {
return user;
}
public void setUser(String user) {
this.user = user;
}
public String getRestricted() {
return restricted;
}
public void setRestricted(String restricted) {
this.restricted = restricted;
}
public String getExpiresAt() {
return expiresAt;
}
public void setExpiresAt(String expiresAt) {
this.expiresAt = expiresAt;
}
public String getUpdatedAt() {
return updatedAt;
}
public void setUpdatedAt(String updatedAt) {
this.updatedAt = updatedAt;
}
public String getCreatedAt() {
return createdAt;
}
public void setCreatedAt(String createdAt) {
this.createdAt = createdAt;
}
}
|
// NumberInt is a helper function to convert an expected integer that is returned from a json Unmarshal as a Number,
// into an actual integer without returning any errors. If there is an error, it just returns 0. Use this when you absolutely
// know you are expecting an integer. Can convert strings too.
func NumberInt(i interface{}) int {
switch n := i.(type) {
case json.Number:
v, _ := n.Int64()
return int(v)
case string:
v, _ := strconv.Atoi(n)
return v
}
return 0
} |
Quantum studies of the vibrations in H3O2- and D3O2-.
The vibrations of H3O2- and D3O2- are investigated using diffusion Monte Carlo (DMC) and vibrational configuration-interaction approaches, as implemented in the program MULTIMODE. These studies use the potential surface recently developed by Huang et al. . The focus of this work is on the vibrational ground state and fundamentals which occur between 100 and 3700 cm(-1). In most cases, excellent agreement is obtained between the fundamental frequencies calculated by the two approaches. This serves to demonstrate the power of both methods for treating this very anharmonic system. Based on the results of the MULTIMODE and DMC treatments, the extent and nature of the couplings in H3O2- and D3O2- are investigated. |
package org.highmed.dsf.bpe.start;
import static org.highmed.dsf.bpe.start.ConstantsExampleStarters.TTP_DOCKER_FHIR_BASE_URL;
public class Ping3MedicFromTtpDockerExampleStarter extends AbstractPing3MedicFromTtpExampleStarter
{
// Environment variable "DSF_CLIENT_CERTIFICATE_PATH" or args[0]: the path to the client-certificate
// highmed-dsf/dsf-tools/dsf-tools-test-data-generator/cert/Webbrowser_Test_User/Webbrowser_Test_User_certificate.p12
// Environment variable "DSF_CLIENT_CERTIFICATE_PASSWORD" or args[1]: the password of the client-certificate
// password
public static void main(String[] args) throws Exception
{
new Ping3MedicFromTtpDockerExampleStarter().main(args, TTP_DOCKER_FHIR_BASE_URL);
}
}
|
A Improved Channel Access Algorithm for IEEE 802.15.4 WPAN
The IEEE 802.15.4 standard is able to achieve low power transmissions in low-rate and short-distance wireless personal area network (WPAN). The CSMA/CA algorithm is used for contention mechanism that collision and retransmission occur. If a collision occurs, CSMA/CA algorithm executes retransmission operation. So it’s very important to decrease retransmission count. In this paper, we propose a channel access algorithm for IEEE 802.15.4 LR-WPAN. To performance analysis, we use OPNET network simulator. The proposed algorithm decreases the transmission delay, energy consumption, dropped packet and throughput is more increase, so the proposal algorithm is more efficient than the IEEE 802.15.4 standard . |
//copy constructor clones itself, calls deepCopy on children
ParamNode::ParamNode(const ParamNode &node) :
XmlNode()
{
*this = node;
this->ClearChildren();
for (int i=0; i<node.GetNumChildren(); i++) {
ParamNode *child = node.GetChild(i);
ParamNode *newchild = child->deepCopy();
this->AddChild(newchild);
}
} |
/**
* Created by rachaelmahon on 12/04/2017.
*/
public class main_Menu {
private Book_Manager library;
void runMenu(){
displayOptions();
String selection = getInput();
interpretMenuSelection(selection);
}
void displayOptions() {
System.out.println("Main Menu - Please type the number of your selection \n" +
"1. Display available books \n" +
"2. Checkout book\n" +
"3. Return book \n" +
"4. Exit\n");
}
String getInput() {
Scanner menu_selection = new Scanner(System.in);
return menu_selection.nextLine();
}
void interpretMenuSelection(String input) {
if(input.equals("1")) {
library.displayBooks();
runMenu();
} else if(input.equals("2")){
checkoutBook();
runMenu();
} else if(input.equals("3")){
returnBook();
runMenu();
} else if(input.equals("4")) {
quitApplication();
} else {
invalidMenuOption();
runMenu();
}
}
void checkoutBook() {
System.out.println("What is the title of the book you wish to checkout?:");
String book = getInput();
library.checkoutBook(book);
}
void returnBook() {
System.out.println("What is the title of the book you wish to return?:");
String book = getInput();
library.returnBook(book);
}
void quitApplication () {
System.out.println("Goodbye!");
}
void invalidMenuOption () {
System.out.println("Your selection is invalid. Please select a valid menu option.");
}
} |
import os
import numpy as np
import pandas as pd
import matplotlib as mpl
from matplotlib import pyplot as plt
from matplotlib import gridspec
from svphase.learn.features import PreloadFeatures
from svphase.analyze.compare_model_stats import get_model_and_version, main_per_class as compare_model_stats
from svphase.learn.evaluation import ClassLabel
from svphase.utils.config import FONT, COLORS
def plot(models_dir, model, model_versions, rlabels, title, low_ylim=0.4, high_ylim=1.0):
assert len(rlabels)==len(model_versions)
print models_dir, model, model_versions
n = len(rlabels)
model_stats = map("{0}/{1}.{2}.pkl.stat".format, [models_dir,]*n, [model,]*n, model_versions)
excluded_counts = {}
for v in model_versions:
feat = PreloadFeatures("{0}/feats.{1}".format(models_dir, v))
excluded_counts[v] = 1
excluded_counts[v] = sum(feat.get_nonzero()==False)
print "Comparing Models", model_stats
statsdf = compare_model_stats(model_stats)
original_order = pd.DataFrame(np.arange(len(model_stats)), index=pd.MultiIndex.from_tuples(map(get_model_and_version, model_stats)), columns=['original_idx'])
#print original_order
statsdf = pd.concat([statsdf, original_order], axis=1).reset_index().set_index('original_idx').sort_index()
statsdf = statsdf.set_index('version')
excl_c_k, excl_c_v = zip(*excluded_counts.items())
print excl_c_k, excl_c_v
excl_c = pd.DataFrame([excl_c_v,], index=['tpred:excl.',], columns=excl_c_k).T
statsdf = pd.concat([statsdf, excl_c], axis=1)
print statsdf
fig = plt.figure(figsize=(8,6.8),facecolor='white', edgecolor='none')
mpl.rc('font', **FONT)
outer_grid = gridspec.GridSpec(1,1,wspace=0.0, hspace=0.0)
outer_grid.update(left=0.08, right=0.92, hspace=0.25)
tax = fig.add_subplot(outer_grid[0])
tax.set_title(title, size='x-large', y=1.05)
tax.set_ylabel('Avg. test accuracy', size='x-large', labelpad=40)
tax.set_frame_on(False)
tax.set_xticks([])
tax.set_yticks([])
gs = gridspec.GridSpecFromSubplotSpec(len(rlabels),1, subplot_spec=outer_grid[0],hspace=0.35)
label_obj = ClassLabel()
order = ['tpred:'+c for c in label_obj.classes]
order = order[:-1] + ['tpred:excl.',] + order[-1:]
class_labels = [c[6:] for c in order]
rename_labels = dict(zip(class_labels, class_labels))
rename_labels['hom']='hom.'
rename_labels['inc']='incorrect'
class_labels = map(rename_labels.get, class_labels)
white_space = 0.01
for i, (v,stats), rlabel in zip(xrange(len(rlabels)), statsdf.iterrows(), rlabels):
ax = fig.add_subplot(gs[i,0])
print rlabel
width = stats[order]
cumsum = width.cumsum()
width /= float(cumsum[-1])
left = np.zeros(len(width), dtype=float)
left[1:] = width.cumsum()[:-1]
left += white_space
width = width-2*white_space
avg_c = map(lambda c:'avg'+c[5:], order)
std_c = map(lambda c:'std'+c[5:], order)
print v, stats, left, stats[avg_c].values, stats[std_c].values
ax.bar(left, np.ones(len(left),dtype=float), width=width, color=COLORS['bg'], linewidth=0)
ax.bar(left, stats[avg_c].values, width=width, yerr=stats[std_c].values, color=COLORS['cor'], linewidth=0)
#ylim_for_count = min(stats[avg_c][stats[avg_c].values>0])-0.05
for px, w, c,c_w in zip(left, width, class_labels, stats[order]):
ax.text(px+ w/2, low_ylim, c, ha='center', va='bottom', size='large')
ax.text(px+ w/2, high_ylim, "%d"%c_w, ha='center', va='bottom', size='small')
ax.set_xticks([])
#ax.set_xticklabels(tuple(labels), rotation=80, multialignment='center')
ax.set_ylim(low_ylim, high_ylim)
ax.set_frame_on(False)
ax.get_yaxis().tick_left()
ax.yaxis.set_major_formatter(plt.FormatStrFormatter('%0.2f'))
ax.get_xaxis().tick_bottom()
ax.set_title(rlabel, loc='left', y=1.09)
outer_grid.tight_layout(fig)
plt.show()
if __name__=='__main__':
import argparse
parser = argparse.ArgumentParser(description='Plot test accuracies for 3 subsets')
parser.add_argument('--excl', type=str, default=None, help='Exclusion count per column(separated by commas)')
parser.add_argument('title', help="title")
parser.add_argument('rlabels', help="row labels for each model. (separated by commas)")
parser.add_argument('model_dir', help="model directory")
parser.add_argument('model', help="model directory")
parser.add_argument('model_versions', nargs='+', help="stat files generated by each model")
args = parser.parse_args()
plot(args.model_dir, args.model, args.model_versions, args.rlabels.split(','), args.title)
|
import java.util.Scanner;
public class kantai {
public static void main(String[] args)
{
Scanner sc=new Scanner(System.in);
int n=sc.nextInt();
int k=2;
int val1 = n%4;
int val2=(n+1)%4;
int val3=(n+2)%4;
int max=-1;
if(val1==1)
{
System.out.println("0 A");
}
else if(val2==1)
{
System.out.println("1 A");
}
else if(val3==1)
{
System.out.println("2 A");
}
else if(val1==3)
{
System.out.println("0 B");
}
else if(val2==3)
{
System.out.println("1 B");
}
else if(val3==3)
{
System.out.println("2 B");
}
else if(val1==2)
{
System.out.println("0 C");
}
else if(val2==2)
{
System.out.println("1 C");
}
else if(val3==2)
{
System.out.println("2 C");
}
else if(val1==0)
{
System.out.println("0 D");
}
else if(val2==0)
{
System.out.println("1 D");
}
else if(val3==0)
{
System.out.println("2 D");
}
}
}
|
<gh_stars>0
/**
* @(#)ClientLogger.java
*
*
*/
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Observable;
import java.util.Observer;
/**
* This class represents a client logger component which is responsible for storing text messages in
* log files. Show events are expected to carry a
* <code>String</code> object as its parameter that is to be stored. This component need to
* subscribe to those events to receive them, which is done at the time of creation.
*
*/
public class ClientLogger implements Observer {
BufferedWriter logger;
/**
* Constructs a client logger component. A new client logger component subscribes to show events
* at the time of creation. Since client output and client logger do the same things and just changed
* output stream object, here we don't need to add a new event for logger. We can use show events to
* notify logger to record output.
*/
public ClientLogger(){
EventBus.subscribeTo(EventBus.EV_SHOW, this);
try{
logger = new BufferedWriter(new FileWriter("system.log"));
}catch (IOException e){
e.printStackTrace();
}
}
/**
* Event handler of this client logger component. On receiving a show event, the attached
* <code>String</code> object is stored into the log file.
*
* @param event an event object. (caution: not to be directly referenced)
* @param param a parameter object of the event. (to be cast to appropriate data type)
*/
@Override
public void update(Observable event, Object param) {
try {
logger.write((String) param+"\n");
logger.flush();
}catch (Exception e){
e.printStackTrace();
}
}
/**
* Close the writer before the program exits.
* @throws Throwable
*/
@Override
protected void finalize() throws Throwable {
super.finalize();
logger.close();
}
}
|
#include <bits/stdc++.h>
using namespace std;
typedef long long ll;
int main()
{
ll n, cost[100005] , nor[100005], rev[100005];
string str[100005], revstr[100005];
cin>>n;
for (int i = 0; i < n; i++)
{
cin>>cost[i];
}
for (int i = 0; i < n; i++)
{
cin>>str[i];
revstr[i] = str[i];
reverse(revstr[i].begin(),revstr[i].end());
nor[i] = 1e18;
rev[i] = 1e18;
}
nor[0] = 0;
rev[0] = cost[0];
for (int i = 1; i < n; i++)
{
bool x = false;
if (str[i] >= str[i-1])
{
x = true;
nor[i] = nor[i-1];
}
if (str[i] >= revstr[i-1] && rev[i-1] != 1e18)
{
x = true;
nor[i] = min(nor[i],rev[i-1]);
}
if (revstr[i] >= str[i-1])
{
x = true;
rev[i] = nor[i-1]+cost[i];
}
if (revstr[i] >= revstr[i-1] && rev[i-1] != 1e18)
{
x = true;
rev[i] = min(rev[i-1]+cost[i],rev[i]);
}
if(x == false)
{
cout << -1;
return 0;
}
}
ll ans = min(nor[n-1],rev[n-1]);
cout << ans;
return 0;
}
|
/**
* Factory to create the different mock object for the tests
*
* @author ssommerf
*
*/
public class Mockery {
public static Client createBeerClientMock() throws IOException {
Client client = mock(Client.class);
WebResource webResource = mock(WebResource.class);
when(client.resource("http://api.brewerydb.com/v2/")).thenReturn(
webResource);
IBreweryDBConnectorConfiguration fakeConfig = createConfigMock();
when(webResource.path(anyString())).thenReturn(webResource);
createPageMock(webResource, 1, fakeConfig);
createPageMock(webResource, 2, fakeConfig);
createPageMock(webResource, 3, fakeConfig);
createWithBreweriesMock(webResource, 1, fakeConfig);
createWithBreweriesMock(webResource, 2, fakeConfig);
createBreweriesMock(webResource, 1, fakeConfig);
createBreweriesMock(webResource, 2, fakeConfig);
createBreweriesMock(webResource, 3, fakeConfig);
createGetBeerByIdMock(client, "cBLTUw", webResource, fakeConfig);
createGetBreweryByIdMock(client, "Klgom2", webResource, fakeConfig);
WebResource searchResource = mock(WebResource.class);
when(webResource.path("search/")).thenReturn(searchResource);
when(webResource.path("search/upc")).thenReturn(searchResource);
createSearchMock(searchResource, "Haus", 1, fakeConfig);
createSearchMock(searchResource, "Haus", 2, fakeConfig);
createSearchMock(searchResource, "Haus", 3, fakeConfig);
createSearchByUPCMock(searchResource, "606905008303", fakeConfig);
return client;
}
private static void createSearchByUPCMock(WebResource searchResource,
String upc, IBreweryDBConnectorConfiguration fakeConfig) throws UniformInterfaceException, ClientHandlerException, IOException {
MultivaluedMap<String, String> map = new MultivaluedMapImpl();
map.add("code", upc);
map.add("key", fakeConfig.getApiKey());
WebResource mock = mock(WebResource.class);
when(searchResource.queryParams(map)).thenReturn(mock);
when(mock.get(BeerSearchResultPage.class)).thenReturn(
createBeerSearchResultPageForUPC(upc));
}
private static BeerSearchResultPage createBeerSearchResultPageForUPC(
String upc) throws IOException {
InputStream stream = Mockery.class.getResourceAsStream("/search/searchByUPC" + upc + ".json");
StringWriter writer = new StringWriter();
IOUtils.copy(stream, writer, "UTF-8");
String theString = writer.toString();
ObjectMapper mapper = new ObjectMapper();
return mapper.readValue(theString, BeerSearchResultPage.class);
}
private static void createSearchMock(WebResource searchResource,
String query, int page, IBreweryDBConnectorConfiguration fakeConfig)
throws UniformInterfaceException, ClientHandlerException,
IOException {
MultivaluedMap<String, String> map = new MultivaluedMapImpl();
map.add("key", fakeConfig.getApiKey());
map.add("q", query);
map.add("type", "beer");
if (page != 1) {
map.add("p", page + "");
}
WebResource mock = mock(WebResource.class);
when(searchResource.queryParams(map)).thenReturn(mock);
when(mock.get(BeerSearchResultPage.class)).thenReturn(
createBeerSearchResultPage(query, page));
}
private static BeerSearchResultPage createBeerSearchResultPage(
String query, int page) throws IOException {
InputStream stream = Mockery.class.getResourceAsStream("/search/search"
+ query + "Page" + page + ".json");
StringWriter writer = new StringWriter();
IOUtils.copy(stream, writer, "UTF-8");
String theString = writer.toString();
ObjectMapper mapper = new ObjectMapper();
return mapper.readValue(theString, BeerSearchResultPage.class);
}
public static IBreweryDBConnectorConfiguration createConfigMock() {
IBreweryDBConnectorConfiguration fakeConfig = mock(IBreweryDBConnectorConfiguration.class);
when(fakeConfig.getApiKey()).thenReturn("jjjjkkkkjjjkkkk");
return fakeConfig;
}
private static void createGetBreweryByIdMock(Client client, String string,
WebResource webResource, IBreweryDBConnectorConfiguration fakeConfig)
throws UniformInterfaceException, ClientHandlerException,
IOException {
MultivaluedMap<String, String> withLocationsMap = new MultivaluedMapImpl();
withLocationsMap.add("key", fakeConfig.getApiKey());
withLocationsMap.add("withLocations", "Y");
MultivaluedMap<String, String> withoutLocationsMap = new MultivaluedMapImpl();
withoutLocationsMap.add("key", fakeConfig.getApiKey());
WebResource resource1 = mock(WebResource.class);
when(webResource.path("brewery/" + string + "/")).thenReturn(resource1);
when(resource1.queryParams(withLocationsMap)).thenReturn(resource1);
when(resource1.queryParams(withoutLocationsMap)).thenReturn(resource1);
when(resource1.get(BreweryResult.class)).thenReturn(
createBreweryMock(string));
WebResource resource2 = mock(WebResource.class);
when(webResource.path("brewery/" + string + "x/"))
.thenReturn(resource2);
when(resource2.queryParams(withLocationsMap)).thenReturn(resource2);
when(resource2.queryParams(withoutLocationsMap)).thenReturn(resource2);
ClientResponse r = mock(ClientResponse.class);
when(r.getStatus()).thenReturn(404);
Throwable objectNotFound = new UniformInterfaceException(r);
when(resource2.get(BreweryResult.class)).thenThrow(objectNotFound);
}
private static BreweryResult createBreweryMock(String string)
throws IOException {
InputStream stream = Mockery.class
.getResourceAsStream("/breweries/brewery" + string + ".json");
StringWriter writer = new StringWriter();
IOUtils.copy(stream, writer, "UTF-8");
String theString = writer.toString();
ObjectMapper mapper = new ObjectMapper();
return mapper.readValue(theString, BreweryResult.class);
}
private static void createBreweriesMock(WebResource webResource, int page,
IBreweryDBConnectorConfiguration fakeConfig)
throws UniformInterfaceException, ClientHandlerException,
IOException {
WebResource resource1 = mock(WebResource.class);
MultivaluedMap<String, String> withLocationsMap = new MultivaluedMapImpl();
withLocationsMap.add("key", fakeConfig.getApiKey());
withLocationsMap.add("withLocations", "Y");
if (page > 1) {
withLocationsMap.add("p", page + "");
}
when(webResource.queryParams(withLocationsMap)).thenReturn(resource1);
when(resource1.get(BreweryResultPage.class)).thenReturn(
createBreweriesWithLocations(page));
WebResource resource2 = mock(WebResource.class);
MultivaluedMap<String, String> withoutLocationsMap = new MultivaluedMapImpl();
withoutLocationsMap.add("key", fakeConfig.getApiKey());
if (page > 1) {
withoutLocationsMap.add("p", page + "");
}
when(webResource.queryParams(withoutLocationsMap))
.thenReturn(resource2);
when(resource2.get(BreweryResultPage.class)).thenReturn(
createBreweries(page));
}
private static BreweryResultPage createBreweries(int page)
throws IOException {
InputStream stream = Mockery.class
.getResourceAsStream("/breweries/breweryPage" + page + ".json");
StringWriter writer = new StringWriter();
IOUtils.copy(stream, writer, "UTF-8");
String theString = writer.toString();
ObjectMapper mapper = new ObjectMapper();
return mapper.readValue(theString, BreweryResultPage.class);
}
private static BreweryResultPage createBreweriesWithLocations(int page)
throws IOException {
InputStream stream = Mockery.class
.getResourceAsStream("/breweries/breweryPage" + page
+ "WithLocations.json");
StringWriter writer = new StringWriter();
IOUtils.copy(stream, writer, "UTF-8");
String theString = writer.toString();
ObjectMapper mapper = new ObjectMapper();
return mapper.readValue(theString, BreweryResultPage.class);
}
private static void createWithBreweriesMock(WebResource webResource,
int page, IBreweryDBConnectorConfiguration fakeConfig)
throws UniformInterfaceException, ClientHandlerException,
IOException {
MultivaluedMap<String, String> mapPage1 = new MultivaluedMapImpl();
mapPage1.add("key", fakeConfig.getApiKey());
mapPage1.add("withBreweries", "Y");
if (page > 1) {
mapPage1.add("p", page + "");
}
WebResource r1 = mock(WebResource.class);
when(webResource.path("beers/")).thenReturn(beersResource);
when(beersResource.queryParams(mapPage1)).thenReturn(r1);
when(r1.get(BeerResultPage.class)).thenReturn(
createPageWithBreweries(page));
}
private static BeerResultPage createPageWithBreweries(int page)
throws IOException {
InputStream stream = Mockery.class
.getResourceAsStream("/beers/beersPage" + page
+ "WithBreweries.json");
StringWriter writer = new StringWriter();
IOUtils.copy(stream, writer, "UTF-8");
String theString = writer.toString();
ObjectMapper mapper = new ObjectMapper();
return mapper.readValue(theString, BeerResultPage.class);
}
private static void createGetBeerByIdMock(Client client, String string,
WebResource resource, IBreweryDBConnectorConfiguration fakeConfig)
throws UniformInterfaceException, ClientHandlerException,
IOException {
MultivaluedMap<String, String> map = new MultivaluedMapImpl();
map.add("key", fakeConfig.getApiKey());
WebResource r1 = mock(WebResource.class);
when(resource.path("beer/" + string + "/")).thenReturn(r1);
when(r1.queryParams(map)).thenReturn(r1);
when(r1.get(BeerResult.class)).thenReturn(createBeerResult(string));
WebResource r2 = mock(WebResource.class);
when(resource.path("beer/" + string + "x/")).thenReturn(r2);
when(r2.queryParams(map)).thenReturn(r2);
ClientResponse response = mock(ClientResponse.class);
when(response.getStatus()).thenReturn(404);
Throwable ex2 = new UniformInterfaceException(response);
when(r2.get(BeerResult.class)).thenThrow(ex2);
WebResource r3 = mock(WebResource.class);
MultivaluedMap<String, String> map2 = new MultivaluedMapImpl();
map2.add("key", fakeConfig.getApiKey() + "x");
when(r1.queryParams(map2)).thenReturn(r3);
ClientResponse r = mock(ClientResponse.class);
when(r.getStatus()).thenReturn(401);
Throwable ex = new UniformInterfaceException(r);
when(r3.get(BeerResult.class)).thenThrow(ex);
}
private static WebResource beersResource = mock(WebResource.class);
private static void createPageMock(WebResource webResource, int page,
IBreweryDBConnectorConfiguration fakeConfig) throws IOException {
MultivaluedMap<String, String> map = new MultivaluedMapImpl();
map.add("key", fakeConfig.getApiKey());
if (page > 1) {
map.add("p", page + "");
}
WebResource r1 = mock(WebResource.class);
when(webResource.path("beers/")).thenReturn(beersResource);
when(beersResource.queryParams(map)).thenReturn(r1);
when(r1.get(BeerResultPage.class)).thenReturn(createPage(page));
}
private static BeerResult createBeerResult(String beerId)
throws IOException {
InputStream stream = Mockery.class.getResourceAsStream("/beers/beer"
+ beerId + ".json");
StringWriter writer = new StringWriter();
IOUtils.copy(stream, writer, "UTF-8");
String theString = writer.toString();
ObjectMapper mapper = new ObjectMapper();
return mapper.readValue(theString, BeerResult.class);
}
private static BeerResultPage createPage(int i) throws IOException {
InputStream stream = Mockery.class
.getResourceAsStream("/beers/beersPage" + i + ".json");
StringWriter writer = new StringWriter();
IOUtils.copy(stream, writer, "UTF-8");
String theString = writer.toString();
ObjectMapper mapper = new ObjectMapper();
return mapper.readValue(theString, BeerResultPage.class);
}
} |
/**
* Keeps the first n elements of the original map, deletes the remaining
* elements and returns the sum of all deleted elements' values.
*/
public static <K> double removeAndSumUpAfter(Map<K, Double> input, int n) {
int length = Math.min(n, input.size());
Iterator<Entry<K, Double>> it = input.entrySet().iterator();
for (int i = 0; i < length; i++) {
it.next();
}
double sum = 0;
while (it.hasNext()) {
sum += it.next().getValue();
it.remove();
}
return sum;
} |
def delete(self, request, id):
api.keystone.group_delete(request, id) |
// Check if the string has the character starting at offset
// By default, offset was set with 0 index
bool dataBase::haschar(char c, string s, int offset) {
for (unsigned int i = offset; i < s.length(); i++)
if (c == s[i])
return true;
return false;
} |
#include <iostream>
#include <cstdio>
#include <cstring>
#include <cmath>
#include <queue>
using namespace std;
#define INF 0x3f3f3f
int n,k;
struct cookie{
int need;
int has;
bool operator < (const cookie &a)const{
return (a.has / a.need) < (has / need);
}
}cook[10010];
priority_queue<cookie>q;
int main(){
int i,j;
scanf("%d%d",&n,&k);
for(i = 1; i <= n; i++){
scanf("%d",&cook[i].need);
}
for(i = 1; i <= n; i++){
scanf("%d",&cook[i].has);
q.push(cook[i]);
}
// while(!q.empty()){
// printf("%d ",q.top().has);
// q.pop();
// }
while(k){
cookie temp = q.top();
q.pop();
temp.has++;
k--;
q.push(temp);
}
int mincook = q.top().has/q.top().need;
printf("%d\n",mincook);
return 0;
}
|
import Vue from "vue";
import VueRouter, { NavigationGuardNext, Route, RouteConfig } from "vue-router";
import Home from "../views/Home.vue";
import store from "../store";
Vue.use(VueRouter);
const MAIN_URL = `//${process.env.VUE_APP_MAIN_URL}`;
const TENANT_URL = process.env.VUE_APP_FULL_TENANT_URL;
const routes: Array<RouteConfig> = [
{
path: "/",
name: "Home",
component: Home,
meta: {
title: "DossierFacile, le dossier de location numérique de l’État",
description:
"Créez un dossier de location en ligne complet et vérifié par l'Etat pour trouver votre appartement ou votre logement",
hideForAuth: true
}
},
{
path: "/signup",
name: "Signup",
meta: {
title: "Création de compte - DossierFacile",
description: "Créez votre compte en quelques clics sur DossierFacile",
hideForAuth: true
},
component: () =>
import(/* webpackChunkName: "signup" */ "../views/SignupPage.vue")
},
{
path: "/login",
redirect: () => {
window.location.replace(`${TENANT_URL}/account`);
return "/account";
}
},
{
path: "/forgotten-password",
name: "ForgottenPassword",
meta: {
title: "Mot de passe oublié - DossierFacile",
description:
"Accédez à la procédure de mot de passe oublié pour votre compte DossierFacile",
hideForAuth: true
},
component: () =>
import(
/* webpackChunkName: "forgottenPassword" */ "../views/ForgottenPasswordPage.vue"
)
},
{
path: "/profile",
name: "Profile",
meta: {
title: "Édition du profil - DossierFacile",
requiresAuth: true,
hideFooter: true
},
component: () =>
import(/* webpackChunkName: "profile" */ "../views/NameInformation.vue")
},
{
path: "/nom-locataire",
name: "TenantName",
meta: {
title: "Édition du profil - DossierFacile",
requiresAuth: true,
hideFooter: true
},
component: () =>
import(/* webpackChunkName: "profile" */ "../views/NameInformation.vue")
},
{
path: "/type-locataire",
name: "TenantType",
meta: {
title: "Édition du profil - DossierFacile",
requiresAuth: true,
hideFooter: true
},
component: () =>
import(/* webpackChunkName: "profile" */ "../views/TypeInformation.vue")
},
{
path: "/documents-locataire/:substep",
name: "TenantDocuments",
meta: {
title: "Édition du profil - DossierFacile",
requiresAuth: true,
hideFooter: true
},
component: () =>
import(/* webpackChunkName: "profile" */ "../views/TenantDocument.vue")
},
{
path: "/choix-garant",
name: "GuarantorChoice",
meta: {
title: "Édition du garant - DossierFacile",
requiresAuth: true,
hideFooter: true
},
component: () =>
import(
/* webpackChunkName: "profile" */ "../views/GuarantorChoicePage.vue"
)
},
{
path: "/liste-garants",
name: "GuarantorList",
meta: {
title: "Édition du garant - DossierFacile",
requiresAuth: true,
hideFooter: true
},
component: () =>
import(/* webpackChunkName: "profile" */ "../views/GuarantorListPage.vue")
},
{
path: "/validation-dossier",
name: "ValidateFile",
meta: {
title: "Édition du garant - DossierFacile",
requiresAuth: true,
hideFooter: true
},
component: () =>
import(/* webpackChunkName: "profile" */ "../views/ValidateFilePage.vue")
},
{
path: "/info-garant/:substep/:guarantorId?",
name: "GuarantorDocuments",
meta: {
title: "Édition du garant - DossierFacile",
requiresAuth: true,
hideFooter: true
},
beforeEnter: async (to, from, next) => {
if (
(!store.state.selectedGuarantor.firstName ||
!store.state.selectedGuarantor.lastName) &&
to.params.substep !== "0"
) {
next({ name: "GuarantorDocuments", params: { substep: "0" } });
}
next();
},
component: () =>
import(
/* webpackChunkName: "profile" */ "../views/GuarantorDocumentsPage.vue"
)
},
{
path: "/public-file/:token",
name: "File",
meta: {
title: "Dossier - DossierFacile"
},
component: () =>
import(/* webpackChunkName: "file" */ "../views/PublicFile.vue")
},
{
path: "/file/:token",
name: "PublicFile",
meta: {
title: "Dossier - DossierFacile"
},
component: () => import(/* webpackChunkName: "file" */ "../views/File.vue")
},
{
path: "/source/:source",
name: "Source",
meta: {
title: "Source - DossierFacile"
},
beforeEnter: (to, from, next) => {
if ((Vue as any).$keycloak.authenticated) {
(Vue as any).$keycloak
.updateToken(70)
.then(() => {
store.dispatch("loadUser").then(() => {
next();
});
})
.catch((err: any) => {
console.error(err);
});
} else {
next();
}
},
component: () =>
import(/* webpackChunkName: "source" */ "../views/Source.vue")
},
{
path: "/lier-source/:source",
name: "SourceLink",
meta: {
title: "Source - DossierFacile",
requiresAuth: true
},
component: () =>
import(/* webpackChunkName: "source" */ "../views/SourceLink.vue")
},
{
path: "/account",
name: "Account",
meta: {
title: "Mon compte - DossierFacile",
requiresAuth: true
},
component: () =>
import(/* webpackChunkName: "account" */ "../views/Account.vue")
},
{
path: "/messaging",
name: "Messages",
meta: {
title: "Messages - DossierFacile",
requiresAuth: true
},
component: () =>
import(/* webpackChunkName: "messages" */ "../views/Messages.vue")
},
{
path: "/confirmAccount/:token",
name: "Confirm",
meta: {
title: "Confirmation de compte - DossierFacile",
hideForAuth: true
},
component: () =>
import(
/* webpackChunkName: "confirmAccount" */ "../views/ConfirmAccount.vue"
)
},
{
path: "/ajout-couple/:token",
name: "Couple",
meta: {
title: "Confirmation de compte - DossierFacile"
},
component: () =>
import(/* webpackChunkName: "register" */ "../views/JoinCouple.vue")
},
{
path: "/ajout-groupe/:token",
name: "Group",
meta: {
title: "Confirmation de compte - DossierFacile"
},
component: () =>
import(/* webpackChunkName: "register" */ "../views/JoinGroup.vue")
},
{
path: "/reset-password/:token",
name: "Password",
meta: {
title: "Nouveau mot de passe - DossierFacile",
hideForAuth: true
},
component: () =>
import(
/* webpackChunkName: "changePassword" */ "../views/ChangePasswordPage.vue"
)
},
{
path: "/inscription-locataire/:token",
name: "Inscription",
meta: {
title: "Inscription locataire - DossierFacile"
},
beforeEnter: (to, from, next) => {
if ((Vue as any).$keycloak.authenticated) {
(Vue as any).$keycloak
.updateToken(70)
.then(() => {
store.dispatch("loadUser").then(() => {
next();
});
})
.catch((err: any) => {
console.error(err);
});
} else {
next();
}
},
component: () =>
import(
/* webpackChunkName: "inscriptionLocataire" */ "../views/OwnerShare.vue"
)
},
{
path: "/locataire",
redirect: () => {
window.location.replace(`${MAIN_URL}`);
return "/info-proprietaire";
}
},
{
path: "*",
name: "404",
meta: {
title: "404 - DossierFacile"
},
component: () => import(/* webpackChunkName: "404" */ "../views/404.vue")
}
];
const router = new VueRouter({
mode: "history",
base: process.env.BASE_URL,
routes,
scrollBehavior() {
document.getElementById("app")?.scrollIntoView();
}
});
function keepGoing(to: Route, next: NavigationGuardNext<Vue>) {
if (
to.matched.some((record: { path: string }) => {
return record.path === "/account";
}) &&
store.state.user?.status === "INCOMPLETE"
) {
store.dispatch("firstProfilePage");
return;
}
document.title = to.meta?.title;
if (to.meta?.description) {
const tag = document.querySelector('meta[name="description"]');
tag?.setAttribute("content", to.meta.description);
const prop = document.querySelector('meta[property="og:description"]');
prop?.setAttribute("content", to.meta.description);
const title = document.querySelector('meta[property="og:title"]');
title?.setAttribute("content", to.meta.title);
}
next();
}
router.beforeEach((to, from, next) => {
if (to.matched.some(record => record.meta.hideFooter)) {
store.commit("isFunnel", true);
} else {
store.commit("isFunnel", false);
}
const lang = Vue.$cookies.get("lang") === "en" ? "en" : "fr";
store.dispatch("setLang", lang);
if (to.matched.some(record => record.meta.requiresAuth)) {
if (!(Vue as any).$keycloak.authenticated) {
// The page is protected and the user is not authenticated. Force a login.
(Vue as any).$keycloak.login({
redirectUri: TENANT_URL + to.fullPath
});
} else {
// The user was authenticated, and has the app role
store.dispatch("loadUser").then(() => {
keepGoing(to, next);
});
setInterval(() => {
(Vue as any).$keycloak.updateToken(60).catch((err: any) => {
console.error(err);
});
}, 45000);
}
} else if (to.matched.some(record => record.meta.hideForAuth)) {
if ((Vue as any).$keycloak.authenticated) {
next({ name: "Profile" });
}
}
keepGoing(to, next);
});
export default router;
|
/// This takes the walker and turns it into a reference to the root
pub fn root_into_ref(mut self) -> &'a mut BasicTree<D, T> {
// go to the root
self.go_to_root();
let (tel, _, _) = self.destructure();
RecRef::into_ref(tel)
} |
def delete_command(
hass,
entity_id=ENTITY_MATCH_ALL,
device=None,
command=None,
):
data = {}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
if device:
data[ATTR_DEVICE] = device
if command:
data[ATTR_COMMAND] = command
hass.services.call(DOMAIN, SERVICE_DELETE_COMMAND, data) |
// NewMessage returns a *ProtoMessage created from a
// *descriptor.DescriptorProto
func NewMessage(msg *descriptor.DescriptorProto) (*ProtoMessage, error) {
newMsg := ProtoMessage{}
newMsg.Name = *msg.Name
for _, field := range msg.Field {
newField := MessageField{}
newField.Number = int(field.GetNumber())
newField.Name = *field.Name
newField.Type.Name = getCorrectTypeName(field)
label := int32(field.GetLabel())
lname := descriptor.FieldDescriptorProto_Label_name[label]
newField.Label = lname
if *field.Type == descriptor.FieldDescriptorProto_TYPE_MESSAGE {
desc := gengo.ObjectNamed(field.GetTypeName())
if d, ok := desc.(*generator.Descriptor); ok && d.GetOptions().GetMapEntry() {
newField.IsMap = true
}
}
if *field.Type == descriptor.FieldDescriptorProto_TYPE_ENUM {
obj := gengo.ObjectNamed(field.GetTypeName())
enum, ok := obj.(*generator.EnumDescriptor)
if !ok {
return nil, errors.New(fmt.Sprintf("unknown enum type: %v", obj.TypeName()))
}
var err error
newField.Type.Enum, err = NewEnum(enum.EnumDescriptorProto)
if err != nil {
return nil, errors.Wrapf(err, "could not create custom enum %q", obj.TypeName())
}
}
newMsg.Fields = append(newMsg.Fields, &newField)
}
return &newMsg, nil
} |
/**
* Playback service control implementation
*/
public final class PlaybackServiceControlImpl implements PlaybackServiceControl {
private final Context context;
public PlaybackServiceControlImpl(@NonNull final Context context) {
this.context = context;
}
@Override
public void resendState() {
context.sendBroadcast(PlaybackServiceIntentFactory.intentResendState());
}
@Override
public void playPause() {
context.startService(PlaybackServiceIntentFactory.intentPlayPause(context));
}
@Override
public void play(@NotNull final List<Media> queue, final int position) {
context.startService(PlaybackServiceIntentFactory.intentPlay(context, queue, position));
}
@Override
public void playAnything() {
context.startService(PlaybackServiceIntentFactory.intentPlayAnything(context));
}
@Override
public void pause() {
context.startService(PlaybackServiceIntentFactory.intentPause(context));
}
@Override
public void stop() {
context.startService(PlaybackServiceIntentFactory.intentStop(context));
}
@Override
public void stopWithError(@NonNull final CharSequence errorMessage) {
context.startService(PlaybackServiceIntentFactory.intentStopWithError(context, errorMessage));
}
@Override
public void prev() {
context.startService(PlaybackServiceIntentFactory.intentPrev(context));
}
@Override
public void next() {
context.startService(PlaybackServiceIntentFactory.intentNext(context));
}
@Override
public void seek(final float positionPercent) {
context.startService(PlaybackServiceIntentFactory.intentSeek(context, positionPercent));
}
} |
#include<iostream>
#include<conio.h>
using namespace std;
const int n=100 ;
int s[n][3];
int main(){
int m;
cin>>m;
for(int i=0;i<m;i++){
for(int j=0;j<3;j++){
cin>>s[i][j];
}
}
int a,b,c;
a=0;
b=0;
c=0;
for(int i=0;i<n;i++){
a=a+s[i][0];
b=b+s[i][1];
c=c+s[i][2];
}
if(a==0&&b==0&&c==0){
cout<<"YES";
}
else{
cout<<"NO";
}
return 0;
}
|
package models
import (
"time"
"go.mongodb.org/mongo-driver/bson/primitive"
)
// MODELS
type User struct {
ID primitive.ObjectID `json:"_id,omitempty" bson:"_id,omitempty"`
Name string `json:"name,omitempty" bson:"name,omitempty"`
Email string `json:"email,omitempty" bson:"email,omitempty"`
Password string `json:"password,omitempty" bson:"password,omitempty"`
}
type Post struct {
ID primitive.ObjectID `json:"_id,omitempty" bson:"_id,omitempty"`
Caption string `json:"caption" bson:"caption,omitempty"`
Image string `json:"image" bson:"image,omitempty"`
Timestamp time.Time `json:"timestamp" bson:"timestamp,omitempty"`
Author string `json:"author" bson:"author,omitempty"`
}
|
def is_wc_src_module(self, module_name, modules_conf):
return (module_name.split(" ")[0] in modules_conf["wildcard_src_modules"]) |
We’ve been on our current server setup for 14 months. We’ve not had a single unplanned outage to date until yesterday. Even before the majority of the country was awake and at their computers or browsing the web on their phones, Mark May’s demotion broke our servers.
I’m not going to hide from the fact that I’m an Ohio State alum and that this is something that brought us great great joy. We typically attempt to adhere to some level of professionalism in running this site (at least some times) and while a couple of times I paused in my zealous celebration of May’s demotion feeling a bit guilty (I treated myself to an ice cream sundae and watched an hour of Seinfeld when I should have been working to celebrate #MarkMayVictoryDay), the fact is ESPN and May subjected Ohio State fans, Notre Dame fans, and countless college football fans to an unbelievably long tenure of hell that it’s only human nature to delight that the era has officially ended.
This isn’t just a hot take from one Buckeye who let May get under his skin. This was an OVERWHELMING sentiment that melted our server and led to headlines like:
– “ESPN takes Mark May off “College Football Final,” Ohio State fans rejoice”
– “Mark May Removal Solidifies Divorce of Worst TV Marriage Since Kim Kardashian and Kris Humphries”
– “Ohio State fans rejoice: Mark May is off ESPN show”
-“Bon Voyage Mark May: Bidding Adieu To Ohio State’s Most Notorious Troll”
If you’re unsure why May was so reviled, the last link will give you the goods into May’s history as the ultimate heel to Ohio State fans, although many other fanbases found themselves as unfortunate victims of his revolting and relentless shtick.
The funny thing is that the initial reporting of this change came from the Sports Business Journal, in somewhat of a planned safe release for such news aimed at a less informed business audience. It ran with the headline “New ESPN college football lineup: Virk, Galloway, Kanell” a little bit of misdirection downplaying the story that caught most people’s eye.
Yes, there is a story in Virk getting a well deserved promotion. Same for Galloway who given his status as a Buckeye made the news sweeter to the point that many joked Ohio State fans were enjoying this development more than winning the National Championship, which is frankly given the history of May and Ohio State fans, not that much of a stretch. The second narrative from this development is yes, Danny Kannell is now under the microscope as a questionable analyst whose noted past remarks on certain teams and conferences will draw ire. But the real story, is May being demoted. And while it’s not being spun as a demotion, that’s what it is. Although May’s move to what looks like ABC could be spun as some lateral move/readjusting, his time on camera will be much much less and he’ll be further constrained by the platform and the talent he’s paired with where scenes like this below just won’t happen.
May isn’t totally gone. My guess is that this is a soft landing for now until his contract expires. With that said, I hope ESPN and others have learned a lesson in respecting their audience, where the line of decency is, and what the consequences are for crossing it.
While I’m ecstatic this move was made, it should have been made YEARS ago and while a large amount of that opinion is derived from my Buckeye education, in a college football world where fans are becoming more and more tribal towards each others, the applauding of May’s demotion rang out from all corners of the web and social media. This was a rare instance when almost all college football fans could agree on something.
Hypothetically, it’s interesting to ponder if this move would have happened without Rece Davis getting a promotion and Lou Holtz retiring as their return and May’s departure would stick out more as an obvious demotion. Without those moves, there would be no real good way to camouflage May being shuffled elsewhere. Another question to ponder is what if Ohio State didn’t win the national championship in a run that thoroughly embarrassed May and ESPN every step of the way. One of the oldest adages in the media world is that the media should never become the story. But that’s where May’s antics got ESPN, from getting booed off stage, to indefensible agenda driven opinions that couldn’t be ignored, the awkward moment below, or the fact that Meyer felt the need to specifically lead fans in calling out May while celebrating the championship win.
May isn’t the first person to push all in with a “let’s see how much pissing people off can advance my career route.” In political commentary and hell, politics itself, it’s a viable strategy. In fact, May essentially followed the exact same path that saw the career demise of his former studio partner, Trev Alberts, who fell from ESPN to CSTV before finding firm footing as the Athletic Director of the University of Nebraska-Omaha Mavericks. May somehow thought his toxic brand of punditry was immune to the same fate and for a long period of time it was.
I just saw Mark May and Trev Alberts flying down the highway in a convertible. Thelma-and-Louise style. — sportsMonkey (@monkeyPi) June 1, 2015
The one difference between May and Alberts is apparently Alberts just did it as part of “show business.”
I don’t know what’s worse, Alberts admission of purposefully and disingenuously riling up fans for ratings (ESPN has a role in this too) or May for taking it to the extreme.
What May and ESPN did, I would never wish on any fanbase of any team. While strong consistent negative hot-takes pointed in one direction are nothing new spanning the likes of Skip Bayless, Jim Rome, Colin Cowherd, and so many national personalities, two things stuck out about May that made led me to the opinion that his presence on ESPN was the network’s biggest crime.
1) It’s not that May was a record on repeat, infuriating whatever fanbase was low hanging fruit, it’s HOW he did it. He didn’t just put his “thoughts” out there with no filter. He had a way of licking his lips, grinning, and then vomiting/screaming some ridiculous toxic opinion while his co-workers would cringe on camera. He clearly got satisfaction out of it and it was a constant theme for him on social media where the majority of his tweets and Facebook posts were under the belt jabs at Ohio State fans and other fanbases mixed in with mostly incoherent drivel. May’s social media presence was such a dumpster fire, he just stopped entirely in the fall of 2013, a move that many believe was a request or at least a shared understanding made by ESPN. You can surmise, ESPN was uncomfortable with how deranged May sounded on social media, but in spite of that, they let him continue as key personality for two seasons on television.
2) May was not tucked away in some safe corner far away from his detractors. He was inescapable to college football fans. His role on College Football Final meant that in order to get a full scope of highlights and ramifications from at times 50-60+ games (somewhat of a nightcap to all the fun anarchy of a good college football Saturday), May was there in peak form spitting fire on whatever was closest to him and seemed like it would burn easily. But it was WAY worse than that.
May was around almost all day on Saturday and because of this, frequently an Ohio State fan, Notre Dame fan, or fans of whatever team he was enjoying pummeling would get this experience.
– 5-10 minutes right before kickoff of the game you wanted to watch in which May shits all over your team and picks it to lose (despite it being favored).
– At halftime when winning and specifically proving almost all “analysis” wrong, May doing the half time show selectively nitpicking any issue in the first half despite the fact you could be winning by a significant margin in the face of his prediction of a loss.
– The post game wrap-up in which May would criticize (despite proving his prediction being wrong and often by a wide margin) the team in question saying the win was not impressive, the team got lucky, and would likely implode sometime soon. This cycle outlined above was common for fans of many teams and one that played out frequently.
May was so dedicated to his shit-mongering at one point we tracked down his record of predictions when picking against Ohio State. I believe at the time, he had picked against them 29 times and was right only 5 times. When it came to picking Ohio State games, Mark May was basically worse than Dwight Howard shooting a three.
Fans of any team should never repetitively have this experience in which the pregame, postgame, and halftime experience consists of someone telling you suck regardless of what occurs. That’s what May’s entire shtick was and I would never wish that experience to happen to any fan of any team in any sport. It’s beneath ESPN and it hurt their brand to the point where there was collateral damage from fans who held ESPN accountable for the experience.
Many people who I wouldn’t consider overly crazy, told me they boycotted ESPN because of May outside of live games. They wouldn’t even watch 30 for 30 despite hearing good things. These were mostly Ohio State fans, but fans of other teams as well chimed in saying they were so turned off, they were through with ESPN. One friend even told me he cracked while watching Fox Sports Live “Why do I care what Andy Roddick has to say about college football?” to which his wife interjected “Well it sure as hell beats listening to Mark May.”
I conveyed this to Scott Van Pelt on Twitter (who I do want to applaud for his accessibility and candidness in engaging on Twitter) awhile ago as he was poking fun of various fanbases and how they could often develop crazy theories about ESPN, a totally legitimate point. I never believed in any of these theories, but pointed out to him that having Mark May front and center given his track record and coupled with things like ESPN’s investment in the SEC Network gave fans just enough dots to connect when buying into any heavy handed top-down bias theory and was implemented across all platforms and programs.
One question Van Pelt nor anyone could ever really defend was “Is there an ESPN NFL personality who is that constantly zealously critical of a particular team?” The reality is that there was no high profile personality that trolled any pro team at this level (I think to some degree pro athletes, ownership, and league personnel would attempt to intervene), but in the case of May vs. Ohio State or some of his other prized punching bags, it was acceptable and likely encouraged regardless of the fact he was mostly critical of student athletes, largely unable to reply back.
But prayers were answered and May is now gone (how funny is it that this was announced June 1, literally hours after May ended) and I’d wager he’s only about halfway down his fall from grace at this point. The bottom line is that enough was enough for ESPN. They couldn’t ignore how hated he was and figured out that despite the hot-take eyeballs he could deliver, ESPN’s brand and business was suffering a great deal elsewhere. Despite the legitimate qualms about Kannell picking up the torch here, I can’t help but celebrate May’s upheaval as a major presence from my fall Saturdays.
https://twitter.com/TomOrr4/status/605357703221932032
While May tainted a lot of the college football fan experience for me and others, the possible silver lining is that unlike with Trev Alberts, maybe we’ve learned something. The pro leagues have never had a mainstream television personality that was as big of a shit-mongerer as May and they’ve done just fine not blatantly turning fans into enemies or conspiracy theorists. I hope ESPN and others who have figured out while there is some level of short term gain in stirring the pot with toxic overtones, that the juice just isn’t worth the squeeze. The biggest test of this theory will be to see what happens with Kanell taking over College Football Final. If Kanell continues his trolling of SEC fans (perhaps as someone ESPN could point to in deflecting questions of bias over the SEC Network), then we’ll know ESPN is still vested in angering and antagonizing fans instead of informing them, merely with different characters.
Sure, there may be some good theater in creating a monster and forcing (I use this word because again, May was inescapable) fans to look on in agony, but the price-tag of pursuing such a strategy is one not even ESPN could afford. Let’s hope the expensive lesson in brand erosion and collateral damage is one that sticks and isn’t one that has to be retaught. |
<reponame>barrhaim/jruby
package java_integration.fixtures;
/**
* This simulates a the class side of a singleton/class pair.
* object ScalaSingleton { def hello = "Hello" }
* class ScalaSingleton { def hello = "Goodbye" }
*/
public final class ScalaSingleton {
public String hello() {
return "Goodbye";
}
} |
///
/// Reloads the styles so that the attached widget will show the changes
///
pub fn reload_if_needed(&mut self) {
if self.need_refresh {
// Refresh the stylesheet
let style_sheet = self.style_sheet();
self.style_provider.load_from_data(style_sheet.as_bytes()).unwrap();
// No longer need a refresh
self.need_refresh = false;
}
} |
def kill(self, exception=GreenletExit, block=True, timeout=None):
timer = Timeout._start_new_or_dummy(timeout)
try:
while self.greenlets:
for greenlet in list(self.greenlets):
if greenlet in self.dying:
continue
try:
kill = greenlet.kill
except AttributeError:
_kill(greenlet, exception)
else:
kill(exception, block=False)
self.dying.add(greenlet)
if not block:
break
joinall(self.greenlets)
except Timeout as ex:
if ex is not timer:
raise
finally:
timer.cancel() |
def dataset_ids(self):
if not hasattr(self, "_dataset_ids"):
self._dataset_ids = list(self.catalog)
return self._dataset_ids |
/**
* @author: crazycatzhang
* @date: 2020/8/4 7:26 PM
* @description: Realize the Tower of Hanoi through divide and conquer
*/
public class HanoiTower {
public static void main(String[] args) {
hanoiTower(5, 'A', 'B', 'C');
System.out.println(count);
}
private static int count = 0;
//Define the DAC method
public static void hanoiTower(int num, char a, char b, char c) {
if (num == 1) {
System.out.println("The 1 plate from " + a + " to " + c);
count++;
} else {
hanoiTower(num - 1, a, c, b);
System.out.println("The " + num + " plate from " + a + " to " + c);
count++;
hanoiTower(num - 1, b, a, c);
}
}
} |
// Puts the path in correct orientation for FreeCAD.
// The path is made to run clockwise around the positive Z-axis.
// The shape is also returned to the origin, and therefore must be shifted
// by the offset at a later stage.
void MODEL3D::three_dim_model::arrange_path(ClipperLib::Path &to_arrange, std::vector<int> &offset)
{
if ( ClipperLib::Orientation(to_arrange)==true )
ClipperLib::ReversePath(to_arrange);
int x_lowest = to_arrange[0].X;
auto i_end = to_arrange.end();
for ( auto i = to_arrange.begin(); i!= i_end; i++ )
{
if ( i->X < x_lowest)
x_lowest = i->X;
}
ClipperLib::Path xlow_vec;
for ( auto i = to_arrange.begin(); i!=i_end; i++ )
{
if ( i->X == x_lowest )
{
ClipperLib::IntPoint pt;
pt.X = i->X;
pt.Y = i->Y;
xlow_vec.push_back(pt);
}
}
int y_lowest = xlow_vec[0].Y;
auto j_end = xlow_vec.end();
for ( auto j = xlow_vec.begin(); j!= j_end; j++ )
{
if ( j->Y < y_lowest)
y_lowest = j->Y;
}
ClipperLib::IntPoint target;
target.X = x_lowest;
target.Y = y_lowest;
auto loc = std::find(to_arrange.begin(), to_arrange.end(), target);
ClipperLib::Path arranged;
arranged.insert( arranged.end(), loc, to_arrange.end());
arranged.insert( arranged.end(), to_arrange.begin(), loc);
offset.push_back(target.X);
offset.push_back(target.Y);
auto arranged_end = arranged.end();
for(auto arranged_it = arranged.begin(); arranged_it != arranged_end; arranged_it++)
{
arranged_it->X = arranged_it->X - x_lowest;
arranged_it->Y = arranged_it->Y - y_lowest;
}
to_arrange = arranged;
} |
/*
* Copyright (c) 2013 <NAME>
*
* This file is part of GamingAnywhere (GA).
*
* GA is free software; you can redistribute it and/or modify it
* under the terms of the 3-clause BSD License as published by the
* Free Software Foundation: http://directory.fsf.org/wiki/License:BSD_3Clause
*
* GA is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
*
* You should have received a copy of the 3-clause BSD License along with GA;
* if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <errno.h>
#include <pthread.h>
#ifndef WIN32
#include <dlfcn.h>
#endif
#ifdef __linux__
#include <X11/extensions/XTest.h>
#endif
#include "ga-common.h"
#include "ga-conf.h"
#include "vsource.h"
#include "controller.h"
#include "dpipe.h"
#include "ga-hook-common.h"
#include "ga-hook-gl.h"
#ifndef WIN32
#include "ga-hook-lib.h"
#endif
#include "ctrl-sdl.h"
#include "sdl12-event.h"
#include "sdl12-video.h"
#include "sdl12-mouse.h"
#include <map>
using namespace std;
#ifndef WIN32
#ifdef __cplusplus
extern "C" {
#endif
void glFlush();
#ifdef __linux__
void glXSwapBuffers( Display *dpy, GLXDrawable drawable );
#endif
#ifdef __cplusplus
}
#endif
#endif
// For duplicate frame generation (since OpenGL does not deliver updates if nothing changes)
static struct timeval previous_frame_tv = { 0 };
static vsource_frame_t previous_frame = { 0 };
pthread_cond_t new_frame_captured_cond = PTHREAD_COND_INITIALIZER;
pthread_mutex_t new_frame_captured_mutex = PTHREAD_MUTEX_INITIALIZER;
pthread_mutex_t pipe_access_mutex = PTHREAD_MUTEX_INITIALIZER;
static struct timeval initialTv = { 0 };
static int frame_interval;
static int pts = 0;
// for hooking
t_glFlush old_glFlush = NULL;
#ifdef __linux__
t_glXSwapBuffers old_glXSwapBuffers = NULL;
Display *display = NULL;
Window window = 0;
#endif
// Thanks to https://gist.github.com/BinaryPrison/1112092 for this code.
static inline uint32_t __iter_div_u64_rem(uint64_t dividend, uint32_t divisor, uint64_t *remainder)
{
uint32_t ret = 0;
while (dividend >= divisor) {
/* The following asm() prevents the compiler from
optimising this loop into a modulo operation. */
asm("" : "+rm"(dividend));
dividend -= divisor;
ret++;
}
*remainder = dividend;
return ret;
}
#define NSEC_PER_SEC 1000000000L
static inline void timespec_add_ns(struct timespec *a, uint64_t ns)
{
a->tv_sec += __iter_div_u64_rem(a->tv_nsec + ns, NSEC_PER_SEC, &ns);
a->tv_nsec = ns;
}
static void *
frame_injection_threadproc(void *arg)
{
ga_error("Frame injection thread entered.\n");
// Set minimum framerate to maintain:
struct RTSPConf *rtspconf = rtspconf_global();
unsigned int minimum_frame_rate = rtspconf->video_fps * 0.8; // maintain at least 80% of configured frame rate
while (1)
{
// Wait until a frame is captured
struct timeval tv;
struct timespec to;
gettimeofday(&tv, NULL);
to.tv_sec = tv.tv_sec;
to.tv_nsec = tv.tv_usec * 1000;
timespec_add_ns(&to, NSEC_PER_SEC / minimum_frame_rate);
int wait_result = pthread_cond_timedwait(&new_frame_captured_cond, &new_frame_captured_mutex, &to);
struct timeval now;
gettimeofday(&now, NULL);
if (wait_result == ETIMEDOUT &&
previous_frame_tv.tv_sec > 0 &&
tvdiff_us(&now, &previous_frame_tv) > 1000000 / minimum_frame_rate &&
previous_frame.realsize > 0)
{
pthread_mutex_lock(&pipe_access_mutex);
// Inject previous frame again
dpipe_buffer_t *data = dpipe_get(g_pipe[0]);
vsource_frame_t *frame = (vsource_frame_t *) data->pointer;
vsource_dup_frame(&previous_frame, frame);
// Generate presentation time stamp
struct timeval repeat_tv;
gettimeofday(&repeat_tv, NULL);
long long new_pts = tvdiff_us(&repeat_tv, &initialTv)/frame_interval;
frame->imgpts = pts++; // new_pts
// duplicate from channel 0 to other channels
ga_hook_capture_dupframe(frame);
dpipe_store(g_pipe[0], data);
pthread_mutex_unlock(&pipe_access_mutex);
// ga_error("Frame injected.\n");
}
}
}
void
x11_replay_callback(void *msg, int msglen) {
sdlmsg_t *smsg = (sdlmsg_t*) msg;
#ifdef __linux__
if (display == NULL || window == 0) {
ga_error("Unable to replay event due to missing display or window.\n");
return;
}
/*XEvent event;
memset(&event, 0x00, sizeof(event));
bool success = XQueryPointer(
display,
window,
&event.xbutton.root,
&event.xbutton.window,
&event.xbutton.x_root,
&event.xbutton.y_root,
&event.xbutton.x,
&event.xbutton.y,
&event.xbutton.state);
//ga_error("Mouse readings x_root=%u, y_root=%u, x=%u, y=%u, state=%u\n", event.xbutton.x_root, event.xbutton.y_root, event.xbutton.x, event.xbutton.y, event.xbutton.state);
// ga_error((success) ? "YES" : "NO");
if(!success) {
ga_error("Cannot query pointer attributes which are required for the relaying of events.\n");
return;
}
*/
sdlmsg_mouse_t *msgm = (sdlmsg_mouse_t*) msg;
XTestGrabControl(display, True);
switch(smsg->msgtype) {
case SDL_EVENT_MSGTYPE_MOUSEMOTION:
ga_error("Mouse movement, x: %u y: %u\n", ntohs(msgm->mousex), ntohs(msgm->mousey));
{
int new_root_x, new_root_y;
Window child;
/*bool success = */XTranslateCoordinates(display, window, DefaultRootWindow(display), ntohs(msgm->mousex), ntohs(msgm->mousey), &new_root_x, &new_root_y, &child);
//ga_error("new_x=%d, new_y=%d\n", new_root_x, new_root_y);
XTestFakeMotionEvent (display, 0, new_root_x, new_root_y, CurrentTime);
/* Display* xdisplay1 = XOpenDisplay(NULL);
Window root = DefaultRootWindow(display);
XWarpPointer(xdisplay1, None, root, 0, 0, 0, 0, msgm->mousex), ntohs(msgm->mousey))*/
}
break;
case SDL_EVENT_MSGTYPE_MOUSEKEY:
ga_error("Mouse button event btn=%u pressed=%d\n", msgm->mousebutton, msgm->is_pressed);
/*event.type = (msgm->is_pressed == 1) ? ButtonPress : ButtonRelease;
if (event.type == ButtonRelease)
event.xbutton.state = 0x100;
event.xbutton.button = msgm->mousebutton;
event.xbutton.same_screen = True;
if(XSendEvent(display, window, True, 0xfff, &event) == 0) ga_error("Error\n");*/
XTestFakeButtonEvent (display, 1, (msgm->is_pressed == 1), CurrentTime);
// XFlush(display);
break;
}
XSync(display, True);
XTestGrabControl(display, False);
/*if(smsg->msgtype == SDL_EVENT_MSGTYPE_MOUSEMOTION) {
sdlmsg_mouse_t *msgm = (sdlmsg_mouse_t*) msg;
ga_error("Mouse movement, x: %u y: %u, %s\n", ntohs(msgm->mousex), ntohs(msgm->mousey), (msgm->is_pressed != 0) ? "pressed" : "");
}*/
// sdlmsg_ntoh(smsg);
// sdl12_hook_replay(smsg);
// ga_error("Hello World");
#endif
return;
}
static void
gl_global_init() {
#ifndef WIN32
static int initialized = 0;
pthread_t ga_server_thread;
pthread_t frame_injection_thread;
if(initialized != 0)
return;
//
// override controller
// sdl12_mapinit();
// sdlmsg_replay_init(NULL);
ctrl_server_setreplay(x11_replay_callback);
no_default_controller = 1;
if(pthread_create(&ga_server_thread, NULL, ga_server, NULL) != 0) {
ga_error("ga_hook: create thread failed.\n");
exit(-1);
}
if(pthread_create(&frame_injection_thread, NULL, frame_injection_threadproc, NULL) != 0) {
ga_error("ga_hook: create thread for frame injection failed.\n");
}
initialized = 1;
#endif
return;
}
static void
gl_hook_symbols() {
#ifndef WIN32
void *handle = NULL;
char *ptr, soname[2048];
if((ptr = getenv("LIBVIDEO")) == NULL) {
strncpy(soname, "libGL.so.1", sizeof(soname));
} else {
strncpy(soname, ptr, sizeof(soname));
}
if((handle = dlopen(soname, RTLD_LAZY)) == NULL) {
ga_error("dlopen '%s' failed: %s\n", soname, strerror(errno));
exit(-1);
}
// for hooking
old_glFlush = (t_glFlush)
ga_hook_lookup_or_quit(handle, "glFlush");
ga_error("hook-gl: hooked glFlush.\n");
#ifdef __linux__
old_glXSwapBuffers = (t_glXSwapBuffers)ga_hook_lookup_or_quit(handle, "glXSwapBuffers");
ga_error("hook_gl: hooked glXSwapBuffers\n");
#endif
// indirect hook
if((ptr = getenv("HOOKVIDEO")) == NULL)
goto quit;
strncpy(soname, ptr, sizeof(soname));
if((handle = dlopen(soname, RTLD_LAZY)) != NULL) {
hook_lib_generic(soname, handle, "glFlush", (void*) hook_glFlush);
#ifdef __linux__
hook_lib_generic(soname, handle, "glXSwapBuffers", (void*) hook_glXSwapBuffers);
#endif
}
ga_error("hook-gl: hooked into %s.\n", soname);
quit:
#endif
return;
}
void
copyFrame() {
#ifdef __linux__
static int frame_interval;
static struct timeval initialTv, captureTv;
static int frameLinesize;
static unsigned char *frameBuf;
static int sb_initialized = 0;
static int global_initialized = 0;
//
GLint vp[4];
int vp_x, vp_y, vp_width, vp_height;
int i;
//
dpipe_buffer_t *data;
vsource_frame_t *frame;
//
if(global_initialized == 0) {
gl_global_init();
global_initialized = 1;
}
// capture the screen
glGetIntegerv(GL_VIEWPORT, vp);
vp_x = vp[0];
vp_y = vp[1];
vp_width = vp[2];
vp_height = vp[3];
//
if(vp_width < 16 || vp_height < 16) {
return;
}
//
//ga_error("XXX hook_gl: viewport (%d,%d)-(%d,%d)\n",
// vp_x, vp_y, vp_width, vp_height);
//
if(ga_hook_capture_prepared(vp_width, vp_height, 1) < 0)
return;
//
if(sb_initialized == 0) {
frame_interval = 1000000/video_fps; // in the unif of us
frame_interval++;
gettimeofday(&initialTv, NULL);
frameBuf = (unsigned char*) malloc(encoder_width * encoder_height * 4);
if(frameBuf == NULL) {
ga_error("allocate frame failed.\n");
return;
}
frameLinesize = game_width * 4;
sb_initialized = 1;
} else {
gettimeofday(&captureTv, NULL);
}
//
if (enable_server_rate_control && ga_hook_video_rate_control() < 0) {
return;
}
//
pthread_mutex_lock(&pipe_access_mutex);
do {
unsigned char *src, *dst;
//
frameLinesize = game_width<<2;
//
data = dpipe_get(g_pipe[0]);
frame = (vsource_frame_t*) data->pointer;
frame->pixelformat = PIX_FMT_RGBA;
frame->realwidth = game_width;
frame->realheight = game_height;
frame->realstride = frameLinesize;
frame->realsize = game_height * frameLinesize;
frame->linesize[0] = frameLinesize;/*frame->stride*/;
// read a block of pixels from the framebuffer (backbuffer)
glReadBuffer(GL_BACK);
glReadPixels(0, 0, game_width, game_height, GL_RGBA, GL_UNSIGNED_BYTE, frameBuf);
// image is upside down!
src = frameBuf + frameLinesize * (game_height - 1);
dst = frame->imgbuf;
for(i = 0; i < frame->realheight; i++) {
bcopy(src, dst, frameLinesize);
dst += frameLinesize/*frame->stride*/;
src -= frameLinesize;
}
frame->imgpts = tvdiff_us(&captureTv, &initialTv)/frame_interval;
frame->timestamp = captureTv;
} while(0);
// duplicate from channel 0 to other channels
ga_hook_capture_dupframe(frame);
dpipe_store(g_pipe[0], data);
previous_frame_tv = captureTv;
if (frame->imgbufsize > previous_frame.imgbufsize) {
free(previous_frame.imgbuf);
previous_frame.imgbuf = (unsigned char*)malloc(frame->imgbufsize);
previous_frame.imgbufsize = frame->imgbufsize;
}
vsource_dup_frame(frame, &previous_frame);
pthread_mutex_unlock(&pipe_access_mutex);
pthread_cond_broadcast(&new_frame_captured_cond);
#endif
}
#ifdef WIN32
WINAPI
#endif
void
hook_glFlush() {
//
if(old_glFlush == NULL) {
gl_hook_symbols();
}
old_glFlush();
//
copyFrame();
return;
}
#ifdef __linux__
void
hook_glXSwapBuffers(Display *dpy, GLXDrawable drawable) {
display = dpy;
window = drawable;
//
if(old_glXSwapBuffers == NULL) {
gl_hook_symbols();
}
old_glXSwapBuffers(dpy, drawable);
//printf("JACKPOT");fflush(stdout);
//
copyFrame();
return;
}
#endif
#ifndef WIN32 /* POSIX interfaces */
void
glFlush() {
hook_glFlush();
}
#ifdef __linux__
void
glXSwapBuffers( Display *dpy, GLXDrawable drawable ){
hook_glXSwapBuffers(dpy, drawable);
}
#endif
__attribute__((constructor))
static void
gl_hook_loaded(void) {
ga_error("ga-hook-gl loaded!\n");
if(ga_hook_init() < 0) {
ga_error("ga_hook: init failed.\n");
exit(-1);
}
return;
}
#endif /* ! WIN32 */
|
/* Copyright (C) <NAME>, 2003.
* All rights reserved worldwide.
*
* This software is provided "as is" without express or implied
* warranties. You may freely copy and compile this source into
* applications you distribute provided that the copyright text
* below is included in the resulting source code, for example:
* "Portions Copyright (C) <NAME>, 2003"
*/
#include "std_all.h"
bool SeparateString (const char str[], int num, char liney, char *return1)
{
int l = 0;
return1[0] = 0;
for (unsigned int k = 0; str[k] != 0; k++)
{
if (str[k] == liney)
{
l++;
if (l == num+1)
break;
if (k < strlen(str)) strcpy(return1,"");
}
if (str[k] != liney)
sprintf(return1, "%s%c",return1 ,str[k]);
}
if (l < num)
{
return1[0] = 0;
return(false);
}
return true;
}
//snippet from Zahlman's post on gamedev: http://www.gamedev.net/community/forums/topic.asp?topic_id=372125
void StringReplace(const std::string& what, const std::string& with, std::string& in)
{
size_t pos = 0;
size_t whatLen = what.length();
size_t withLen = with.length();
while ((pos = in.find(what, pos)) != std::string::npos)
{
in.replace(pos, whatLen, with);
pos += withLen;
}
}
string ToLowerCaseString (const string & s)
{
string d (s);
for (unsigned int i=0; i < d.length(); i++)
{
d[i] = tolower(d[i]);
}
return d;
} // end of tolower
void winall_create_url_file(char url[])
{
//create temp.url
//delete old file if applicable
_unlink("temp.url");
char st_file[30], st_text[1024];
strcpy(st_file, "temp.url");
sprintf(st_text, "[InternetShortcut]\n");
add_text(st_text, st_file);
sprintf(st_text, "URL=http://%s\n",url);
add_text(st_text, st_file);
}
void winall_create_url_file_full(char url[])
{
//create temp.url
//delete old file if applicable
_unlink("temp.url");
char st_file[30], st_text[1024];
strcpy(st_file, "temp.url");
sprintf(st_text, "[InternetShortcut]\n");
add_text(st_text, st_file);
sprintf(st_text, "URL=%s\n",url);
add_text(st_text, st_file);
}
void create_url_file(char url[255])
{
//create temp.url
//delete old file if applicable
_unlink("temp.url");
char st_file[30], st_text[255];
strcpy(st_file, "temp.url");
sprintf(st_text, "[InternetShortcut]\n");
add_text(st_text, st_file);
sprintf(st_text, "URL=http://%s\n",url);
add_text(st_text, st_file);
}
//returns true if this is running on 95, 98 or ME
bool WindowsIs9xVersion()
{
OSVERSIONINFOEX winfo;
ZeroMemory(&winfo, sizeof(OSVERSIONINFOEX));
winfo.dwOSVersionInfoSize = sizeof(OSVERSIONINFOEX);
if (GetVersionEx((OSVERSIONINFO *)&winfo) == 0)
{
//Get Windows version failed
return FALSE;
}
if (winfo.dwPlatformId == VER_PLATFORM_WIN32_WINDOWS)
{
if(winfo.dwMinorVersion < 10)
{
//WinVer = W95; //Windows 95
return true;
}
else if (winfo.dwMinorVersion < 90)
{
// WinVer = W98; //Windows 98
return true;
}
else
{
// WinVer = WME; //Windows ME
return true;
}
}
return false;
}
//yes I will templatize these.. tomorrow
bool force_range(int * i_original, int i_min, int i_max)
{
if (in_range(*i_original, i_min, i_max)) return false;
if (*i_original < i_min) *i_original = i_min;
if (*i_original > i_max) *i_original = i_max;
return false;
}
bool lforce_range(long * l_original, long l_min, long l_max)
{
if (in_range(*l_original, l_min, l_max)) return false;
if (*l_original < l_min) *l_original = l_min;
if (*l_original > l_max) *l_original = l_max;
return false;
}
float lerp_float(float f_origin, float f_target, float f_percent)
{
return (f_origin - ((f_origin-f_target)*f_percent));
}
bool fforce_range(float * i_original, float i_min, float i_max)
{
if (in_range_float(*i_original, i_min, i_max)) return false;
if (*i_original < i_min) *i_original = i_min;
if (*i_original > i_max) *i_original = i_max;
return false; //changed number
}
//this let's you apply a number to a number to make it closer to a target
//it will not go pass the target number.
void set_float_with_target(float *p_float, float f_target, float f_friction)
{
if (*p_float != f_target)
{
if (*p_float > f_target)
{
*p_float -= f_friction;
if (*p_float < f_target) *p_float = f_target;
} else
{
*p_float += f_friction;
if (*p_float > f_target) *p_float = f_target;
}
}
}
//note to self: templatize this later or something
void set_long_with_target(long *p_long, long f_target, long f_friction)
{
if (*p_long != f_target)
{
if (*p_long > f_target)
{
*p_long -= f_friction;
if (*p_long < f_target) *p_long = f_target;
} else
{
*p_long += f_friction;
if (*p_long > f_target) *p_long = f_target;
}
}
}
//example, to get how many days have passed since Feb 1st, 2002 you would do this:
//i_days = GetDaysSinceDate(2, 1, 2002);
int GetDaysSinceDate(int i_month,int i_day, int i_year)
{
time_t ltime;
time( <ime );
// { 0, 0, 0, 25, 4, 100, 0 } expires on 00:00:00 25/5/2000 note the month!
tm expire = { 0, 0, 0, i_day, i_month-1, i_year-1900, 0 }; //Month is 0-11 btw
tm today = *localtime( <ime );
long time_now = (long)today.tm_mday + (long)today.tm_mon * 30 + today.tm_year*365;
long time_exp = (long)expire.tm_mday +(long)expire.tm_mon * 30 + expire.tm_year * 365;
long time_passed = time_now - time_exp;
//now let's convert it back to days
if (time_passed == 0) return 0; //avoid device by 0
return (time_passed);
}
int GetDaysSinceDate(time_t from_time)
{
time_t ltime;
time( <ime );
// { 0, 0, 0, 25, 4, 100, 0 } expires on 00:00:00 25/5/2000 note the month!
tm expire = *localtime( &from_time );
tm today = *localtime( <ime );
long time_now = (long)today.tm_mday + (long)today.tm_mon * 30 + today.tm_year*365;
long time_exp = (long)expire.tm_mday +(long)expire.tm_mon * 30 + expire.tm_year * 365;
long time_passed = time_now - time_exp;
//now let's convert it back to days
if (time_passed == 0) return 0; //avoid device by 0
return (time_passed);
}
#ifndef _UNICODE
char *show_date_month_and_day(void)
{
SYSTEMTIME lp_system;
GetLocalTime(&lp_system);
static char st_date[255];
int result = GetDateFormat(LOCALE_USER_DEFAULT, //locale
NULL, //flags
&lp_system, //system time
"MMM d",
st_date,
255);
if (result == 0)
{
LogError("Get date function failed.\n");
return NULL;
}
return st_date;
}
char *show_army_time(void)
{
SYSTEMTIME lp_system;
GetLocalTime(&lp_system);
static char st_date[255];
int result = GetTimeFormat(LOCALE_USER_DEFAULT, //locale
NULL, //flags
&lp_system, //system time
"HH':'mm'",
st_date,
255);
if (result == 0)
{
LogError("Get date function failed.\n");
return NULL;
}
return st_date;
}
char *show_date(void)
{
SYSTEMTIME lp_system;
GetLocalTime(&lp_system);
static char st_date[255];
int result = GetDateFormat(LOCALE_USER_DEFAULT, //locale
NULL, //flags
&lp_system, //system time
"dddd, MMMM dd yyyy",
st_date,
255);
if (result == 0)
{
LogError("Get date function failed.\n");
return NULL;
}
return st_date;
}
char *show_time(void)
{
SYSTEMTIME lp_system;
GetLocalTime(&lp_system);
static char st_date[255];
int result = GetTimeFormat(LOCALE_USER_DEFAULT, //locale
NULL, //flags
&lp_system, //system time
"h':'mm' 'tt",
st_date,
255);
if (result == 0)
{
LogError("Get date function failed.\n");
return NULL;
}
return st_date;
}
char *show_small_date(void)
{
SYSTEMTIME lp_system;
GetLocalTime(&lp_system);
static char st_date[255];
int result = GetDateFormat(LOCALE_USER_DEFAULT, //locale
NULL, //flags
&lp_system, //system time
"MM-dd",
st_date,
255);
if (result == 0)
{
LogError("Get date function failed.\n");
return NULL;
}
return st_date;
}
#else
TCHAR *show_date(void)
{
SYSTEMTIME lp_system;
GetLocalTime(&lp_system);
static TCHAR st_date[255];
int result = GetDateFormat(LOCALE_USER_DEFAULT, //locale
NULL, //flags
&lp_system, //system time
L"dddd, MMMM dd yyyy",
st_date,
255);
if (result == 0)
{
log_error("Get date function failed.\n");
return NULL;
}
return st_date;
}
TCHAR *show_time(void)
{
SYSTEMTIME lp_system;
GetLocalTime(&lp_system);
static TCHAR st_date[255];
int result = GetTimeFormat(LOCALE_USER_DEFAULT, //locale
NULL, //flags
&lp_system, //system time
L"h':'mm' 'tt",
st_date,
255);
if (result == 0)
{
log_error("Get date function failed.\n");
return NULL;
}
return st_date;
}
TCHAR *show_small_date(void)
{
SYSTEMTIME lp_system;
GetLocalTime(&lp_system);
static TCHAR st_date[255];
int result = GetDateFormat(LOCALE_USER_DEFAULT, //locale
NULL, //flags
&lp_system, //system time
L"MM-dd",
st_date,
255);
if (result == 0)
{
log_error("Get date function failed.\n");
return NULL;
}
return st_date;
}
#endif
|
def is_valid_bst(self, node, lower=float('-inf'), upper=float('inf')):
if node is None:
return True
if (node.val <= lower or node.val >= upper):
return False
return self.is_valid_bst(node.left, lower, node.val) and self.is_valid_bst(node.right, node.val, upper) |
// Requires WIFI Extension 2.0 firmware 2.1.0.
//
// Returns the mesh configuration as set by SetWifi2MeshConfiguration.
//
// .. versionadded:: 2.4.2$nbsp;(Firmware)
func (device *MasterBrick) GetWifi2MeshConfiguration() (enable bool, rootIP [4]uint8, rootSubnetMask [4]uint8, rootGateway [4]uint8, routerBSSID [6]uint8, groupID [6]uint8, groupSSIDPrefix string, gatewayIP [4]uint8, gatewayPort uint16, err error) {
var buf bytes.Buffer
resultBytes, err := device.device.Get(uint8(FunctionGetWifi2MeshConfiguration), buf.Bytes())
if err != nil {
return enable, rootIP, rootSubnetMask, rootGateway, routerBSSID, groupID, groupSSIDPrefix, gatewayIP, gatewayPort, err
}
if len(resultBytes) > 0 {
var header PacketHeader
header.FillFromBytes(resultBytes)
if header.Length != 55 {
return enable, rootIP, rootSubnetMask, rootGateway, routerBSSID, groupID, groupSSIDPrefix, gatewayIP, gatewayPort, fmt.Errorf("Received packet of unexpected size %d, instead of %d", header.Length, 55)
}
if header.ErrorCode != 0 {
return enable, rootIP, rootSubnetMask, rootGateway, routerBSSID, groupID, groupSSIDPrefix, gatewayIP, gatewayPort, DeviceError(header.ErrorCode)
}
resultBuf := bytes.NewBuffer(resultBytes[8:])
binary.Read(resultBuf, binary.LittleEndian, &enable)
binary.Read(resultBuf, binary.LittleEndian, &rootIP)
binary.Read(resultBuf, binary.LittleEndian, &rootSubnetMask)
binary.Read(resultBuf, binary.LittleEndian, &rootGateway)
binary.Read(resultBuf, binary.LittleEndian, &routerBSSID)
binary.Read(resultBuf, binary.LittleEndian, &groupID)
groupSSIDPrefix = ByteSliceToString(resultBuf.Next(16))
binary.Read(resultBuf, binary.LittleEndian, &gatewayIP)
binary.Read(resultBuf, binary.LittleEndian, &gatewayPort)
}
return enable, rootIP, rootSubnetMask, rootGateway, routerBSSID, groupID, groupSSIDPrefix, gatewayIP, gatewayPort, nil
} |
#include <math.h>
#include <bits/stdc++.h>
using namespace std;
int main(int argc, const char * argv[]) {
// insert code here...
int N = 0 ;
cin >> N;
int t[N+1],x[N+1],y[N+1];
//initialize
t[0]= 0;
x[0]= 0;
y[0]= 0;
for(int i=0;i<N;i++){
cin >> t[i+1]>>x[i+1]>>y[i+1];
}
bool flg = false;
for(int i= 0;i<N;i++){
int dt = t[i+1]-t[i];
int dist0 =x[i+1]+y[i+1];
int dist = abs(x[i+1]-x[i])+abs(y[i+1]-y[i]);
if(t[i+1]%2 != dist0%2) {
flg = false;
break;
}
else if( dt < dist ) {
flg =false;
break;
}
else{
flg = true;
}
}
if(flg)
cout << "Yes"<<endl;
else
cout << "No"<<endl;
}
|
/**
* A simple command to run the slice based component labeling and connecting on each slice first
*
* @param labeledImage
* @param neighborSize
* @param mKernel
* @return
*/
private static Tuple2<DTImg<long[]>, Long> scanAndMerge(DTImg<long[]> labeledImage,
D3int neighborSize,
BaseTIPLPluginIn.morphKernel mKernel,
final TimingObject inTO) {
JavaPairRDD<D3int, OmnidirectionalMap> connectedGroups = slicesToConnections
(labeledImage, neighborSize, mKernel, inTO);
JavaPairRDD<D3int, Map<Long, Long>> mergeCmds = connectedGroups.mapValues(new Function<OmnidirectionalMap, Map<Long, Long>>() {
@Override
public Map<Long, Long> call(OmnidirectionalMap arg0)
throws Exception {
final long start = System.currentTimeMillis();
Map<Long, Long> outList = groupListToMerges(arg0);
inTO.timeElapsed.$plus$eq((double) (System.currentTimeMillis() - start));
inTO.mapOperations.$plus$eq(1);
return outList;
}
});
System.out.println("Merges per slice");
long totalMerges = 0;
for (Long cVal : mergeCmds.values().map(new Function<Map<Long, Long>, Long>() {
@Override
public Long call(Map<Long, Long> arg0) throws Exception {
return (long) arg0.size();
}
}).collect()) {
System.out.println("\t" + cVal);
totalMerges += cVal;
}
JavaPairRDD<D3int, TImgSlice<long[]>> newlabeledImage = labeledImage.getBaseImg().
join(mergeCmds, SparkGlobal.getPartitioner(labeledImage))
.mapValues(new Function<Tuple2<TImgSlice<long[]>, Map<Long, Long>>,
TImgSlice<long[]>>() {
@Override
public TImgSlice<long[]> call(
Tuple2<TImgSlice<long[]>, Map<Long, Long>> inTuple) throws Exception {
final long start = System.currentTimeMillis();
final TImgSlice<long[]> cBlock = inTuple._1();
final long[] curSlice = cBlock.get();
final long[] outSlice = new long[curSlice.length];
final Map<Long, Long> mergeCommands = inTuple._2();
for (int i = 0; i < curSlice.length; i++) {
if (curSlice[i] > 0) outSlice[i] = mergeCommands.get(curSlice[i]);
}
inTO.mapOperations.$plus$eq(1);
inTO.timeElapsed.$plus$eq((double) (System.currentTimeMillis() - start));
return new TImgSlice<long[]>(outSlice, cBlock);
}
});
return
new Tuple2<DTImg<long[]>, Long>(
DTImg.WrapRDD(labeledImage, newlabeledImage, TImgTools.IMAGETYPE_LONG),
totalMerges);
} |
def load_state(self, state_dict: State, strict: bool = False) -> None:
missing_keys: List[str] = []
unexpected_keys: List[str] = []
error_msgs: List[str] = []
metadata = getattr(state_dict, '_metadata', None)
state_dict = state_dict.copy()
if metadata is not None:
state_dict._metadata = metadata
def load(module, prefix=''):
local_metadata = {} if metadata is None else metadata.get(prefix[:-1], {})
if isinstance(module, torch.nn.Module):
module_load_fn = module._load_from_state_dict
else:
module_load_fn = module._load_state_dict_hook
module_load_fn(state_dict, prefix, local_metadata, True, missing_keys,
unexpected_keys, error_msgs)
for name, child in module.__dict__.items():
if child is not None and isinstance(child, Component):
if not isinstance(child, (torch.optim.Optimizer,
torch.optim.lr_scheduler._LRScheduler)):
load(child, prefix + name + STATE_DICT_DELIMETER)
if isinstance(module, torch.nn.Module):
for name, child in module._modules.items():
if child is not None:
load(child, prefix + name + STATE_DICT_DELIMETER)
load(self)
if strict:
if len(unexpected_keys) > 0:
error_msgs.insert(0, 'Unexpected key(s) in state_dict: '
f'{", ".join(f"{k}" for k in unexpected_keys)}. ')
if len(missing_keys) > 0:
error_msgs.insert(0, 'Missing key(s) in state_dict: '
f'{", ".join(f"{k}" for k in missing_keys)}. ')
if len(error_msgs) > 0:
newline_tab = '\n\t'
raise RuntimeError('Error(s) in loading state_dict for '
f'{self.__class__.__name__}:{newline_tab}'
f'{newline_tab.join(error_msgs)}') |
<filename>ci-droid-tasks-consumer-services/src/main/java/com/societegenerale/cidroid/tasks/consumer/services/eventhandlers/RebaseHandler.java
package com.societegenerale.cidroid.tasks.consumer.services.eventhandlers;
import com.societegenerale.cidroid.tasks.consumer.services.GitCommit;
import com.societegenerale.cidroid.tasks.consumer.services.Rebaser;
import com.societegenerale.cidroid.tasks.consumer.services.RemoteSourceControl;
import com.societegenerale.cidroid.tasks.consumer.services.model.SourceControlEvent;
import com.societegenerale.cidroid.tasks.consumer.services.model.github.Comment;
import com.societegenerale.cidroid.tasks.consumer.services.model.github.PullRequest;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.tuple.Pair;
import java.util.List;
import java.util.Map;
import static java.util.stream.Collectors.toMap;
import static org.apache.commons.lang3.StringUtils.isNotEmpty;
@Slf4j
public class RebaseHandler implements PushEventHandler {
private Rebaser rebaser;
private RemoteSourceControl gitHub;
public RebaseHandler(Rebaser rebaser, RemoteSourceControl gitHub) {
this.rebaser = rebaser;
this.gitHub=gitHub;
}
@Override
public void handle(SourceControlEvent event, List<PullRequest> pullRequests) {
log.info("handling rebase for {} PRs on repo {}", pullRequests.size(), event.getRepository().getUrl());
Map<PullRequest, List<GitCommit>> rebasedCommits = pullRequests.stream()
// rebase only the mergeable PRs
.filter(PullRequest::getMergeable)
// we probably don't have the rights to push anything on the forked repo to rebase the PR,
// so not even trying to rebase if PR originates from a forked repo
.filter(this::keepPullRequestOnlyIfNotMadeFromFork)
.map(rebaser::rebase)
.collect(toMap(Pair::getKey, Pair::getValue));
for (Map.Entry<PullRequest, List<GitCommit>> commitsForSinglePr : rebasedCommits.entrySet()) {
PullRequest pr = commitsForSinglePr.getKey();
List rebasedCommitsForPr = commitsForSinglePr.getValue();
if(isNotEmpty(pr.getWarningMessageDuringRebasing())){
log.info("adding a warn comment on PR {}", pr.getNumber());
gitHub.addCommentOnPR(pr.getRepo().getFullName(), pr.getNumber(), new Comment("There was a problem during the rebase/push process :\n"+pr.getWarningMessageDuringRebasing()));
if(!rebasedCommitsForPr.isEmpty()){
log.warn("since PR was marked with a warn message, no rebased commits should be reported.. please check what happened - a bug ??");
}
}
if (!rebasedCommitsForPr.isEmpty()) {
String comment = buildPrComment(rebasedCommitsForPr);
log.info("adding an INFO comment on PR {}", pr.getNumber());
gitHub.addCommentOnPR(pr.getRepo().getFullName(), pr.getNumber(), new Comment(comment));
}
}
}
private boolean keepPullRequestOnlyIfNotMadeFromFork(PullRequest pr) {
if(pr.isMadeFromForkedRepo()){
log.info("PR {} on repo {} is made from a forked repo - not trying to rebase it",pr.getNumber(),pr.getRepo().getName());
return false;
}
else{
return true;
}
}
private String buildPrComment(List<GitCommit> commits) {
StringBuilder sb = new StringBuilder("CI-droid has rebased below ").append(commits.size()).append(" commit(s):\n");
for (GitCommit commit : commits) {
sb.append("- ").append(commit.getCommitId()).append(" / ").append(commit.getCommitMessage()).append("\n");
}
return sb.toString();
}
}
|
<reponame>folly-systems/amazon-chime-sdk-component-library-react<gh_stars>0
import SpeakerSelection from './SpeakerSelection';
import MicSelection from './MicSelection';
import CameraSelection from './CameraSelection';
import QualitySelection from './CameraSelection/QualitySelection';
export { SpeakerSelection, MicSelection, CameraSelection, QualitySelection };
|
// Having appends a HAVING clause to the statement.
func (b *SelectBuilder) Having(exprOrMap interface{}, args ...interface{}) *SelectBuilder {
handleExprType(exprOrMap, args, func(expr string, args ...interface{}) {
expr, args = handleShortNotation(expr, args)
b.HavingFragments = append(b.HavingFragments, &whereFragment{expr, args})
})
return b
} |
/* Reset the manual failover state. This works for both masters and slavesa
* as all the state about manual failover is cleared.
*
* The function can be used both to initialize the manual failover state at
* startup or to abort a manual failover in progress. */
void resetManualFailover(void) {
if (server.cluster->mf_end && clientsArePaused()) {
server.clients_pause_end_time = 0;
clientsArePaused();
}
server.cluster->mf_end = 0;
server.cluster->mf_can_start = 0;
server.cluster->mf_slave = NULL;
server.cluster->mf_master_offset = 0;
} |
def chain_without_block_validation(
base_db,
genesis_state):
overrides = {
'import_block': import_block_without_validation,
'validate_block': lambda self, block: None,
}
SpuriousDragonVMForTesting = SpuriousDragonVM.configure(validate_seal=lambda block: None)
klass = MiningChain.configure(
__name__='TestChainWithoutBlockValidation',
vm_configuration=(
(eth_constants.GENESIS_BLOCK_NUMBER, SpuriousDragonVMForTesting),
),
chain_id=1337,
**overrides,
)
genesis_params = {
'block_number': eth_constants.GENESIS_BLOCK_NUMBER,
'difficulty': eth_constants.GENESIS_DIFFICULTY,
'gas_limit': 3141592,
'parent_hash': eth_constants.GENESIS_PARENT_HASH,
'coinbase': eth_constants.GENESIS_COINBASE,
'nonce': eth_constants.GENESIS_NONCE,
'mix_hash': eth_constants.GENESIS_MIX_HASH,
'extra_data': eth_constants.GENESIS_EXTRA_DATA,
'timestamp': 1501851927,
}
chain = klass.from_genesis(base_db, genesis_params, genesis_state)
return chain |
<reponame>polymerfi/bridge-v2
import {
Checkbox,
Divider,
FormControl,
FormControlLabel,
FormLabel,
Grid,
IconButton,
TextField,
Typography,
} from "@material-ui/core";
import { Solana } from "@renproject/chains-solana";
import React, {
FunctionComponent,
useCallback,
useEffect,
useMemo,
useState,
} from "react";
import { useTranslation } from "react-i18next";
import { useDispatch, useSelector } from "react-redux";
import { useHistory } from "react-router-dom";
import {
ActionButton,
ActionButtonWrapper,
} from "../../../components/buttons/Buttons";
import { NumberFormatText } from "../../../components/formatting/NumberFormatText";
import { BackArrowIcon } from "../../../components/icons/RenIcons";
import { CheckboxWrapper } from "../../../components/inputs/InputHelpers";
import {
PaperActions,
PaperContent,
PaperHeader,
PaperNav,
PaperTitle,
} from "../../../components/layout/Paper";
import { CenteredProgress } from "../../../components/progress/ProgressHelpers";
import { TooltipWithIcon } from "../../../components/tooltips/TooltipWithIcon";
import {
AssetInfo,
LabelWithValue,
MiddleEllipsisText,
SmallSpacedDivider,
SpacedDivider,
} from "../../../components/typography/TypographyHelpers";
import { Debug } from "../../../components/utils/Debug";
import { paths } from "../../../pages/routes";
import {
BridgeChain,
getChainConfig,
getCurrencyConfig,
toMintedCurrency,
} from "../../../utils/assetConfigs";
import { useFetchFees } from "../../fees/feesHooks";
import { getTransactionFees } from "../../fees/feesUtils";
import { $exchangeRates } from "../../marketData/marketDataSlice";
import { findExchangeRate } from "../../marketData/marketDataUtils";
import { $renNetwork } from "../../network/networkSlice";
import { TransactionFees } from "../../transactions/components/TransactionFees";
import { setCurrentTxId } from "../../transactions/transactionsSlice";
import {
createTxQueryString,
getReleaseAssetDecimals,
LocationTxState,
TxConfigurationStepProps,
TxType,
} from "../../transactions/transactionsUtils";
import { useShakePaper } from "../../ui/uiHooks";
import { useSelectedChainWallet } from "../../wallet/walletHooks";
import { $wallet, setWalletPickerOpened } from "../../wallet/walletSlice";
import { SolanaTokenAccountModal } from "../components/SolanaAccountChecker";
import { $mint } from "../mintSlice";
import {
createMintTransaction,
preValidateMintTransaction,
} from "../mintUtils";
export const MintFeesStep: FunctionComponent<TxConfigurationStepProps> = ({
onPrev,
}) => {
const { t } = useTranslation();
const dispatch = useDispatch();
const history = useHistory();
const { walletConnected, account, provider } = useSelectedChainWallet();
const [mintingInitialized, setMintingInitialized] = useState(false);
const { currency } = useSelector($mint);
const [amountValue, setAmountValue] = useState("");
const { chain } = useSelector($wallet);
const network = useSelector($renNetwork);
const exchangeRates = useSelector($exchangeRates);
const currencyUsdRate = findExchangeRate(exchangeRates, currency);
const handleAmountChange = useCallback((event) => {
const newValue = event.target.value.replace(",", ".");
if (!isNaN(newValue)) {
setAmountValue(newValue);
}
}, []);
const lockCurrencyConfig = getCurrencyConfig(currency);
const decimals = getReleaseAssetDecimals(
lockCurrencyConfig.sourceChain,
currency
);
const amount = Number(amountValue);
const hasAmount = amount !== 0;
const amountUsd = amount * currencyUsdRate;
const { fees, pending } = useFetchFees(currency, TxType.MINT);
const { conversionTotal } = getTransactionFees({
amount,
fees,
type: TxType.MINT,
decimals,
});
const conversionFormatted = conversionTotal;
const { GreyIcon } = lockCurrencyConfig;
const targetCurrencyAmountUsd = conversionFormatted * currencyUsdRate;
const destinationChainConfig = getChainConfig(chain);
const destinationChainNativeCurrencyConfig = getCurrencyConfig(
destinationChainConfig.nativeCurrency
);
const mintedCurrency = toMintedCurrency(currency);
const mintedCurrencyConfig = getCurrencyConfig(mintedCurrency);
const [ackChecked, setAckChecked] = useState(false);
const [touched, setTouched] = useState(false);
const showAckError = !ackChecked && touched;
const handleAckCheckboxChange = useCallback((event) => {
setTouched(true);
setAckChecked(event.target.checked);
}, []);
useShakePaper(showAckError);
const tx = useMemo(
() =>
createMintTransaction({
currency: currency,
destAddress: account,
mintedCurrency: toMintedCurrency(currency),
mintedCurrencyChain: chain,
userAddress: account,
network: network,
}),
[currency, account, chain, network]
);
const txValid = preValidateMintTransaction(tx);
const canInitializeMinting = ackChecked && txValid;
const [showSolanaModal, setShowSolanaModal] = useState(false);
const [checkingSolana, setCheckingSolana] = useState(false);
const [hasSolanaTokenAccount, setSolanaTokenAccount] = useState<any>();
const onSolanaAccountCreated = useCallback(
(a) => {
setSolanaTokenAccount(a);
},
[setSolanaTokenAccount]
);
// FIXME: we might want to extract the solana logic in a nicer manner
useEffect(() => {
if (chain === BridgeChain.SOLC && canInitializeMinting) {
if (hasSolanaTokenAccount) {
setMintingInitialized(true);
return;
}
if (hasSolanaTokenAccount === false && !showSolanaModal) {
setShowSolanaModal(true);
setMintingInitialized(false);
}
}
}, [
canInitializeMinting,
checkingSolana,
showSolanaModal,
chain,
hasSolanaTokenAccount,
setMintingInitialized,
setShowSolanaModal,
]);
const handleConfirm = useCallback(async () => {
if (walletConnected) {
setTouched(true);
if (canInitializeMinting) {
if (chain === BridgeChain.SOLC) {
setCheckingSolana(true);
if (!hasSolanaTokenAccount) {
setSolanaTokenAccount(
await new Solana(provider, network).getAssociatedTokenAccount(
currency
)
);
}
setCheckingSolana(false);
} else {
setMintingInitialized(true);
}
} else {
setMintingInitialized(false);
}
} else {
setTouched(false);
setMintingInitialized(false);
dispatch(setWalletPickerOpened(true));
}
}, [
dispatch,
chain,
walletConnected,
canInitializeMinting,
network,
provider,
currency,
hasSolanaTokenAccount,
setSolanaTokenAccount,
setMintingInitialized,
]);
const onMintTxCreated = useCallback(
async (tx) => {
history.push({
pathname: paths.MINT_TRANSACTION,
search: "?" + createTxQueryString(tx),
state: {
txState: { newTx: true },
} as LocationTxState,
});
dispatch(setCurrentTxId(tx.id));
},
[dispatch, history]
);
// there is a dependency loop, because we depend on the number
// of txes to determine the dayIndex, which updates when we create
// a new tx, leading to multiple txes being created for the same
// parameters.
// This flag prevents that
const [creatingMintTx, setCreatingMintTx] = useState(false);
useEffect(() => {
if (mintingInitialized && !creatingMintTx) {
setCreatingMintTx(true);
onMintTxCreated(tx).catch(console.error).finally();
}
}, [onMintTxCreated, mintingInitialized, tx, creatingMintTx]);
return (
<>
{showSolanaModal && (
<SolanaTokenAccountModal
currency={currency}
provider={provider}
network={network}
onCreated={onSolanaAccountCreated}
/>
)}
<PaperHeader>
<PaperNav>
<IconButton onClick={onPrev}>
<BackArrowIcon />
</IconButton>
</PaperNav>
<PaperTitle>{t("mint.fees-title")}</PaperTitle>
<PaperActions />
</PaperHeader>
<PaperContent bottomPadding>
<Grid container alignItems="flex-end">
<Grid item xs={7}>
<Typography variant="body1" gutterBottom>
{t("fees.calculator-label")}
</Typography>
</Grid>
<Grid item xs={5}>
<TextField
label={t("fees.calculator-amount-label")}
variant="filled"
color="primary"
value={amountValue || ""}
onChange={handleAmountChange}
/>
</Grid>
</Grid>
<SmallSpacedDivider />
<Typography variant="body1" gutterBottom>
{t("mint.details-label")}
</Typography>
<LabelWithValue
label={t("mint.sending-label")}
labelTooltip={t("mint.sending-tooltip")}
value={
hasAmount ? (
<NumberFormatText value={amount} spacedSuffix={currency} />
) : (
currency
)
}
valueEquivalent={
hasAmount ? (
<NumberFormatText
value={amountUsd}
spacedSuffix="USD"
decimalScale={2}
fixedDecimalScale
/>
) : (
""
)
}
/>
<LabelWithValue
label={t("mint.to-label")}
labelTooltip={t("mint.to-tooltip")}
value={destinationChainConfig.full}
/>
<LabelWithValue
label={t("mint.recipient-address-label")}
labelTooltip={t("mint.recipient-address-tooltip")}
value={
<MiddleEllipsisText hoverable>{tx.userAddress}</MiddleEllipsisText>
}
/>
<SpacedDivider />
<Typography variant="body1" gutterBottom>
{t("fees.label")}
</Typography>
<TransactionFees
chain={chain}
amount={amount}
currency={currency}
type={TxType.MINT}
/>
</PaperContent>
<Debug it={{ amount, hasAmount, mintingInitialized }} />
<Divider />
<PaperContent darker topPadding bottomPadding>
{walletConnected &&
(pending ? (
<CenteredProgress />
) : (
<AssetInfo
label={t("mint.receiving-label")}
value={
<NumberFormatText
value={conversionFormatted}
spacedSuffix={mintedCurrencyConfig.short}
/>
}
valueEquivalent={
<NumberFormatText
prefix=" = $"
value={targetCurrencyAmountUsd}
spacedSuffix="USD"
decimalScale={2}
fixedDecimalScale
/>
}
Icon={<GreyIcon fontSize="inherit" />}
/>
))}
<CheckboxWrapper>
<FormControl error={showAckError}>
<FormControlLabel
control={
<Checkbox
checked={ackChecked}
onChange={handleAckCheckboxChange}
name="ack"
color="primary"
/>
}
label={
<FormLabel htmlFor="ack" component={Typography}>
<Typography
variant="caption"
color={showAckError ? "inherit" : "textPrimary"}
>
{t("mint.fees-ack-message", {
currency: destinationChainNativeCurrencyConfig.short,
})}
<TooltipWithIcon
title={
<>
{t("mint.fees-ack-tooltip", {
chain: destinationChainConfig.full,
currency:
destinationChainNativeCurrencyConfig.short,
})}
</>
}
/>
</Typography>
</FormLabel>
}
/>
</FormControl>
</CheckboxWrapper>
<ActionButtonWrapper>
<ActionButton
onClick={handleConfirm}
disabled={
(walletConnected ? !ackChecked || mintingInitialized : false) ||
checkingSolana ||
hasSolanaTokenAccount === false
}
>
{!walletConnected
? t("wallet.connect")
: t("mint.view-gateway-button-label", {
currency: lockCurrencyConfig.short,
})}
</ActionButton>
</ActionButtonWrapper>
</PaperContent>
<Debug it={{ tx }} />
</>
);
};
|
import type { NotLocalizedEndRoute } from '../routes';
import { createRouterBuilder } from './createRouterBuilder';
test('home', () => {
const builder = createRouterBuilder();
const ref = Symbol('ref');
builder.add('/', ref);
const router = builder.createRouter();
const rr = router.get('/') as NotLocalizedEndRoute;
expect(rr).toBeDefined();
expect(rr.ref).toBe(ref);
expect(rr.path.namedParams.length).toBe(0);
expect(rr.path.regExp.source).toBe('^\\/$');
expect(rr.path.toPath()).toBe('/');
});
test('should throw when key is used twice', () => {
const ref = Symbol('ref');
const builder = createRouterBuilder();
builder.add('/path1', ref, 'samekey');
expect(() => {
builder.add('/path1', ref, 'samekey');
}).toThrow('"samekey" is already used');
});
test('should throw when add localized is called but no locales were defined', () => {
const ref = Symbol('ref');
const builder = createRouterBuilder();
expect(() => {
builder.addLocalized({ en: '/path1' }, ref);
}).toThrow('Invalid locales');
expect(() => {
builder.addLocalizedSegment({ en: '/path1' }, () => {});
}).toThrow('Invalid locales');
expect(() => {
builder.addSegment('/path1', (segmentBuilder) => {
segmentBuilder.addLocalized({ en: '/path2' }, ref);
});
}).toThrow('Invalid locales');
expect(() => {
builder.addSegment('/path1', (segmentBuilder) => {
segmentBuilder.addLocalizedSegment({ en: '/path2' }, () => {});
});
}).toThrow('Invalid locales');
});
if (process.env.NODE_ENV !== 'production') {
test('should throw when no ref is provided', () => {
const builder = createRouterBuilder(['en']);
expect(() => {
builder.add('/', undefined);
}).toThrow('Invalid ref: "undefined"');
expect(() => {
builder.add('/', null);
}).toThrow('Invalid ref: "null"');
expect(() => {
builder.addLocalized({ en: '/' }, undefined);
}).toThrow('Invalid ref: "undefined"');
expect(() => {
builder.addLocalized({ en: '/' }, null);
}).toThrow('Invalid ref: "null"');
});
}
describe('default key is path', () => {
const ref = Symbol('ref');
test('add', () => {
const builder = createRouterBuilder();
builder.add('/path1', ref);
expect(builder.createRouter().get('/path1')).toBeDefined();
});
test('addLocalized', () => {
const builder = createRouterBuilder(['en', 'fr']);
builder.addLocalized({ en: '/en', fr: '/fr' }, ref);
expect(builder.createRouter().get('/en')).toBeDefined();
});
test('segment add', () => {
const builder = createRouterBuilder();
builder.addSegment('/sgmt', (segmentBuilder) => {
segmentBuilder.add('/path1', ref);
});
expect(builder.createRouter().get('/sgmt/path1')).toBeDefined();
});
test('segment addLocalized', () => {
const builder = createRouterBuilder(['en', 'fr']);
builder.addSegment('/sgmt', (segmentBuilder) => {
segmentBuilder.addLocalized({ en: '/en', fr: '/fr' }, ref);
});
expect(builder.createRouter().get('/sgmt/en')).toBeDefined();
});
});
|
ST. LOUIS -- Blues goalie Jaroslav Halak departed Saturday night's game against the San Jose Sharks after a second-period collision with a teammate.Defenseman Barret Jackman crashed into Halak after a sliding attempt to break up a pass play to San Jose's Martin Havlat 1:07 into the second period in Game 2 of the Western Conference Quarterfinals.Halak went down, got to his knees and onto his skates without help and appeared to be staying in the game -- but then skated off on his own power to the Blues' dressing room.He was replaced by Brian Elliott After the game, Blues Coach Ken Hitchcock told the media that Halak had a lower body injury but would travel with the team to San Jose for Games 3 and 4."We'll evaluate him tomorrow, take him on the trip and see how he feels," Hitchcock said. |
<gh_stars>0
import numpy as np
from loguru import logger
from config import JOB_TRIGGER_CONFIG, JOBS_CONFIG, SYSTEM_CONFIG
class JobGenerator:
@staticmethod
def get_value(key):
def get_normal_value(key):
if isinstance(JOBS_CONFIG[key]["range"][-1], int):
value_size = (
JOBS_CONFIG[key]["range"][-1] - JOBS_CONFIG[key]["range"][0]
)
mean_value = np.mean(JOBS_CONFIG[key]["range"])
normal_value = int(
np.random.normal(loc=mean_value, scale=value_size * 0.1)
)
else:
value_size = len(JOBS_CONFIG[key]["range"])
mean_value = int(np.mean(range(value_size)))
normal_value = int(
np.random.normal(loc=mean_value, scale=value_size * 0.1)
)
return JOBS_CONFIG[key]["range"][normal_value]
return normal_value
def get_uniform_value(key):
if isinstance(JOBS_CONFIG[key]["range"][-1], int):
return int(
np.random.uniform(
low=JOBS_CONFIG[key]["range"][0],
high=JOBS_CONFIG[key]["range"][-1],
)
)
index = int(
np.random.uniform(low=0, high=len(JOBS_CONFIG[key]["range"]) - 1)
)
return JOBS_CONFIG[key]["range"][index]
def get_slope_value(key):
""" e.g. range: 1~3 -> probabilites = 1/6, 2/6, 3/6
"""
if isinstance(JOBS_CONFIG[key]["range"][-1], int):
value_size = (
JOBS_CONFIG[key]["range"][-1] - JOBS_CONFIG[key]["range"][0]
) + 1
else:
value_size = len(JOBS_CONFIG[key]["range"])
probabilites = np.arange(1, value_size + 1) / (
sum(range(1, value_size + 1))
)
if JOBS_CONFIG[key]["slope"] == "DESC":
probabilites = probabilites[::-1]
if isinstance(JOBS_CONFIG[key]["range"][-1], int):
if (JOBS_CONFIG[key]["range"][-1] - JOBS_CONFIG[key]["range"][0]) == 0:
return JOBS_CONFIG[key]["range"][0]
return int(
np.random.choice(
np.arange(
JOBS_CONFIG[key]["range"][0],
JOBS_CONFIG[key]["range"][-1] + 1,
),
p=probabilites,
)
)
else:
index = np.random.choice(np.arange(0, value_size), p=probabilites)
return JOBS_CONFIG[key]["range"][index]
value_map = {
"default": JOBS_CONFIG[key]["default"],
"normal": get_normal_value(key),
"uniform": get_uniform_value(key),
"slope": get_slope_value(key),
}
return value_map[JOBS_CONFIG[key]["distribution"]]
@classmethod
def get_job(cls, trigger_time=None):
if not trigger_time:
trigger_time = int(cls.get_value("random_trigger_time"))
return {
"job_type": cls.get_value("job_type"),
"trigger_time": trigger_time,
"schedule_time": cls.get_value("schedule_time"),
"computing_time": cls.get_value("computing_time"),
"executors": cls.get_value("executors"),
"cpu": cls.get_value("cpu"),
"mem": cls.get_value("mem"),
}
@classmethod
def get_schedule_jobs(cls):
logger.warning(f"Exp Time: {SYSTEM_CONFIG['EXP_TIME']}")
logger.warning(
f"Schedule Job Interval: {JOB_TRIGGER_CONFIG['SCHEDULE_JOB_INTERVAL']}"
)
jobs = []
for schedule_trigger_time in JOB_TRIGGER_CONFIG["SCHEDULE_JOB_INTERVAL"]:
start_time = 1
while start_time < SYSTEM_CONFIG["EXP_TIME"]:
job = cls.get_job(start_time)
start_time += schedule_trigger_time
jobs.append(job)
return jobs
@classmethod
def gen(cls):
# random on-demand jobs
jobs = [cls.get_job() for _ in range(JOB_TRIGGER_CONFIG["RANDOM_JOB_NUM"])]
# pre-defined schedule jobs
schedule_jobs = cls.get_schedule_jobs()
return sorted([*jobs, *schedule_jobs], key=lambda x: x["trigger_time"])
|
package datawave.query.testframework;
import datawave.ingest.csv.mr.input.CSVRecordReader;
import datawave.ingest.data.RawRecordContainer;
import datawave.ingest.data.TypeRegistry;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
import java.io.File;
import java.io.IOException;
import java.net.URI;
/**
* Loads a CSV test file for ingestion.
*/
public class CSVTestFileLoader implements TestFileLoader {
private final URI uri;
private final Configuration conf;
CSVTestFileLoader(URI u, Configuration cf) {
this.uri = u;
this.conf = cf;
}
@Override
public void loadTestData(SequenceFile.Writer seqFile) throws IOException {
TypeRegistry.reset();
TypeRegistry.getInstance(this.conf);
Path path = new Path(this.uri);
File file = new File(this.uri);
FileSplit split = new FileSplit(path, 0, file.length(), null);
TaskAttemptContext ctx = new TaskAttemptContextImpl(this.conf, new TaskAttemptID());
try (CSVRecordReader reader = new CSVRecordReader()) {
reader.initialize(split, ctx);
while (reader.nextKeyValue()) {
RawRecordContainer raw = reader.getEvent();
seqFile.append(new Text(), raw);
}
}
}
}
|
/**
* Forces the tab pane associated with this list item to be shown
*
* @param fireEvents true=fire show/hide events, false=don't fire show/hide events
*/
public void showTab(final boolean fireEvents) {
showTab(anchor.getElement());
if (fireEvents) {
fireEvent(new TabShowEvent(this, null));
}
} |
def story(self) -> Optional[HashtagStory]:
if not self._insta.authenticated:
raise AuthenticationRequired()
logger.info("Retrieving story of #{0}".format(self.tagname))
variables = {"tag_names": [self.tagname], "precomposed_overlay": False, "show_story_viewer_list": False}
data = self._insta._graphql_query(QUERYHASH_REELITEMS, variables)["reels_media"]
if not data:
logger.warning("No visible story is avaliable now for this hashtag.")
return
return HashtagStory(data[0]) |
Minimally invasive localization of light source in tissue with an equidistant measurement
Bioluminescence techniques permit a way of observing biological processes in vivo, and they are considered to have a promising application on guiding tumor resection. But now, the related techniques, like bioluminescent imaging and bioluminescent tomography, have a common problem on imaging depth limitation. In this paper, a minimally invasive method was introduced to detect the emission of light source beneath the deep tissue. An equidistant measurement that derived from the diffusion equation and spherical light source model was proposed to localize the light source. Using this method, we obtained the analytic solution for 3D position of light source allowing the insertion penetrating the center of light source and unknowing parameters of the tissue. In the end, we put forward a feasible design of minimally invasive probe for clinic use. |
#ifndef STATE_INCLUDES_H
#define STATE_INCLUDES_H
#include "BackgroundFadeInStateClass.h"
#include "LogoZoomInStateClass.h"
#include "StartButtonZoomInStateClass.h"
#include "LegendAppearStateClass.h"
#include "StartButtonWaitStateClass.h"
#include "LegendVanishStateClass.h"
#include "LogoAndStartButtonVanishStateClass.h"
#include "BackgroundFadeOutStateClass.h"
#include "ShutDownStateClass.h"
#include "LegendDisappearStateClass.h"
#include "StartButtonVanishStateClass.h"
#include "MenuAppearStateClass.h"
#include "MenuVanishStateClass.h"
#include "StartButtonAppearStateClass.h"
#include "MenuWaitStateClass.h"
#include "MenuDisappearStateClass.h"
#include "BoardAppearStateClass.h"
#include "LogoShrinkStateClass.h"
#include "BoardWaitStateClass.h"
#include "LogoEnlargeStateClass.h"
#include "BoardDisappearStateClass.h"
#include "BoardCellChangeDisappearStateClass.h"
#include "BoardCellChangeAppearStateClass.h"
#include "BoardCellUncoverStateClass.h"
#include "BoardCellExplodeStateClass.h"
#include "GameLostAppearStateClass.h"
#include "GameLostVisibleWaitStateClass.h"
#include "GameLostDisappearStateClass.h"
#include "GameLostNotVisibleWaitStateClass.h"
#include "GameLostVanishStateClass.h"
#include "GameWonAppearStateClass.h"
#include "GameWonVisibleWaitStateClass.h"
#include "GameWonDisappearStateClass.h"
#include "GameWonNotVisibleWaitStateClass.h"
#include "GameWonVanishStateClass.h"
#include "BestScoresTableAppearStateClass.h"
#include "BestScoresTableWaitStateClass.h"
#include "BestScoresTableDisappearStateClass.h"
#endif
|
import FWCore.ParameterSet.Config as cms
from RecoVertex.BeamSpotProducer.BeamSpotNominalCollision2_cfi import *
|
<reponame>intensifier/NeoAxisEngine<gh_stars>0
// Copyright (C) NeoAxis Group Ltd. 8 Copthall, Roseau Valley, 00152 Commonwealth of Dominica.
#include "VHACD.h"
/////////////////////////////////////////////////////////////////////////////////////////////////////////////
EXPORT VHACD::IVHACD* VHACD_Compute64(const double* const points, const uint32_t pointCount, const uint32_t* const triangles, const uint32_t triangleCount,
int maxConvexHulls,//Maximum number of convex hulls to produce
int maxConvexTriangles,//Controls the maximum number of triangles per convex-hull (default = 64, range = 4 - 1024)
double minConvexVolume,//Controls the adaptive sampling of the generated convex-hulls (default=0.0001, range=0.0-0.01)
bool convexApproximation,//Enable / disable approximation when computing convex-hulls (default = 1)
int maxResolution,//Maximum number of voxels generated during the voxelization stage (default = 100, 000, range = 10, 000 - 16, 000, 000)
double maxConcavity,//Maximum allowed concavity (default=0.0025, range=0.0-1.0)
double alpha,//Controls the bias toward clipping along symmetry planes (default=0.05, range=0.0-1.0)
double beta,//Controls the bias toward clipping along revolution axes (default=0.05, range=0.0-1.0)
int planeDownsampling,//Controls the granularity of the search for the \"best\" clipping plane (default=4, range=1-16)
int hullDownsampling,//Controls the precision of the convex-hull generation process during the clipping plane selection stage (default=4, range=1-16)
bool normalizeMesh,//Enable / disable normalizing the mesh before applying the convex decomposition (default = 0)
bool tetrahedronMode//0: voxel-based approximate convex decomposition, 1: tetrahedron-based approximate convex decomposition (default=0)
)
{
VHACD::IVHACD::Parameters desc;
VHACD::IVHACD* myHACD = 0;
//myHACD = VHACD::CreateVHACD_ASYNC();
myHACD = VHACD::CreateVHACD();
desc.Init(maxConvexHulls, maxConvexTriangles, minConvexVolume, convexApproximation, maxResolution, maxConcavity, alpha, beta, planeDownsampling, hullDownsampling, normalizeMesh, tetrahedronMode);
bool success = myHACD->Compute(points, pointCount, triangles, triangleCount, desc);
if (!success)
{
myHACD->Release();
return NULL;
}
return myHACD;
}
EXPORT VHACD::IVHACD* VHACD_Compute32(const float* const points, const uint32_t pointCount, const uint32_t* const triangles, const uint32_t triangleCount,
int maxConvexHulls,//Maximum number of convex hulls to produce
int maxConvexTriangles,//Controls the maximum number of triangles per convex-hull (default = 64, range = 4 - 1024)
double minConvexVolume,//Controls the adaptive sampling of the generated convex-hulls (default=0.0001, range=0.0-0.01)
bool convexApproximation,//Enable / disable approximation when computing convex-hulls (default = 1)
int maxResolution,//Maximum number of voxels generated during the voxelization stage (default = 100, 000, range = 10, 000 - 16, 000, 000)
double maxConcavity,//Maximum allowed concavity (default=0.0025, range=0.0-1.0)
double alpha,//Controls the bias toward clipping along symmetry planes (default=0.05, range=0.0-1.0)
double beta,//Controls the bias toward clipping along revolution axes (default=0.05, range=0.0-1.0)
int planeDownsampling,//Controls the granularity of the search for the \"best\" clipping plane (default=4, range=1-16)
int hullDownsampling,//Controls the precision of the convex-hull generation process during the clipping plane selection stage (default=4, range=1-16)
bool normalizeMesh,//Enable / disable normalizing the mesh before applying the convex decomposition (default = 0)
bool tetrahedronMode//0: voxel-based approximate convex decomposition, 1: tetrahedron-based approximate convex decomposition (default=0)
)
{
VHACD::IVHACD::Parameters desc;
VHACD::IVHACD* myHACD = VHACD::CreateVHACD();
//VHACD::IVHACD* myHACD = VHACD::CreateVHACD_ASYNC();
desc.Init(maxConvexHulls, maxConvexTriangles, minConvexVolume, convexApproximation, maxResolution, maxConcavity, alpha, beta, planeDownsampling, hullDownsampling, normalizeMesh, tetrahedronMode);
bool success = myHACD->Compute(points, pointCount, triangles, triangleCount, desc);
if (!success)
{
myHACD->Release();
return NULL;
}
return myHACD;
}
EXPORT void VHACD_Cancel(VHACD::IVHACD* objHACD)
{
if (objHACD)
objHACD->Cancel();
}
EXPORT void VHACD_Delete(VHACD::IVHACD* objHACD)
{
if (objHACD)
{
//objHACD->Clean();
objHACD->Release();
}
}
// In synchronous mode (non-multi-threaded) the state is always 'ready'
// In asynchronous mode, this returns true if the background thread is not still actively computing a new solution.
// In an asynchronous config the 'IsReady' call will report any update or log messages in the caller's current thread.
EXPORT bool VHACD_IsReady(VHACD::IVHACD* objHACD)
{
if (objHACD)
objHACD->IsReady();
return true;
}
EXPORT int VHACD_GetClusterCount(VHACD::IVHACD* objHACD)
{
if (objHACD)
return (int)objHACD->GetNConvexHulls();
return 0;
}
EXPORT void VHACD_GetBufferSize(VHACD::IVHACD* objHACD, int cluster, int* pointCount, int* triangleCount)
{
VHACD::IVHACD::ConvexHull hull;
objHACD->GetConvexHull(cluster, hull);
*pointCount = (int)hull.m_nPoints;
*triangleCount = (int)hull.m_nTriangles;
}
// copy buffers
EXPORT void VHACD_GetBuffer64(VHACD::IVHACD* objHACD, int cluster, double* points, int* triangles)
{
VHACD::IVHACD::ConvexHull hull;
objHACD->GetConvexHull(cluster, hull);
memcpy(points, hull.m_points, sizeof(double) * hull.m_nPoints * 3);
memcpy(triangles, hull.m_triangles, sizeof(int) * hull.m_nTriangles * 3);
}
// copy buffers
EXPORT void VHACD_GetBuffer32(VHACD::IVHACD* objHACD, int cluster, float* points, int* triangles)
{
VHACD::IVHACD::ConvexHull hull;
objHACD->GetConvexHull(cluster, hull);
double* pPtr = hull.m_points;
for (int i = 0; i < hull.m_nPoints; i++)
{
*points++ = (float)*pPtr++;
*points++ = (float)*pPtr++;
*points++ = (float)*pPtr++;
}
memcpy(triangles, hull.m_triangles, sizeof(int) * hull.m_nTriangles * 3);
}
//EXPORT double* VHACD_GetConvexVertices(VHACD::IVHACD* objHACD, int cluster, int* vertexCount)
//{
// if (!objHACD)
// {
// *vertexCount = 0;
//
// return 0;
// }
//
// VHACD::IVHACD::ConvexHull hull;
//
// objHACD->GetConvexHull(cluster, hull);
//
// *vertexCount = hull.m_nPoints;
//
// return hull.m_points;
//}
//
//EXPORT uint32_t* VHACD_GetConvexFaces(VHACD::IVHACD* objHACD, int cluster, int* triangleCount)
//{
// if (!objHACD)
// {
// *triangleCount = 0;
//
// return 0;
// }
//
// VHACD::IVHACD::ConvexHull hull;
//
// objHACD->GetConvexHull(cluster, hull);
//
// *triangleCount = hull.m_nTriangles;
//
// return hull.m_triangles;
//}
|
<reponame>stulzq/sofa-registry<filename>server/server/session/src/test/java/com/alipay/sofa/registry/server/session/slot/SlotTableCacheImplTest.java<gh_stars>100-1000
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alipay.sofa.registry.server.session.slot;
import com.alipay.sofa.registry.common.model.slot.Slot;
import com.alipay.sofa.registry.common.model.slot.SlotTable;
import com.alipay.sofa.registry.common.model.slot.func.SlotFunctionRegistry;
import com.alipay.sofa.registry.server.session.AbstractSessionServerTestBase;
import com.alipay.sofa.registry.server.shared.slot.SlotTableRecorder;
import com.alipay.sofa.registry.util.DatumVersionUtil;
import com.google.common.collect.Sets;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import org.assertj.core.util.Lists;
import org.junit.Assert;
import org.junit.Test;
public class SlotTableCacheImplTest extends AbstractSessionServerTestBase {
private SlotTableCacheImpl slotTableCache = new SlotTableCacheImpl();
@Test
public void testSlotOf() {
int i = slotTableCache.slotOf("foo#@#DEFAULT_INSTANCE_ID#@#SOFA_APP");
String dataInfoId = randomString();
int slotId = slotTableCache.slotOf(dataInfoId);
Assert.assertEquals(SlotFunctionRegistry.getFunc().slotOf(dataInfoId), slotId);
AtomicBoolean concurrentResult = new AtomicBoolean(true);
new ConcurrentExecutor(10, executors)
.execute(
new Runnable() {
@Override
public void run() {
String dataInfoId = randomString();
if (SlotFunctionRegistry.getFunc().slotOf(dataInfoId)
!= slotTableCache.slotOf(dataInfoId)) {
concurrentResult.set(false);
}
}
});
Assert.assertTrue(concurrentResult.get());
}
@Test
public void testGetInfo() {
slotTableCache.updateSlotTable(randomSlotTable());
String dataInfoId = randomString();
Assert.assertEquals(
slotTableCache.getSlot(SlotFunctionRegistry.getFunc().slotOf(dataInfoId)),
slotTableCache.getSlot(dataInfoId));
Assert.assertSame(
slotTableCache.getSlot(SlotFunctionRegistry.getFunc().slotOf(dataInfoId)),
slotTableCache.getSlot(dataInfoId));
Assert.assertNull(slotTableCache.getLeader(13685));
Assert.assertNotNull(slotTableCache.getLeader(3));
}
@Test
public void testRecorders() throws InterruptedException {
SlotTable slotTable = randomSlotTable();
AtomicReference<SlotTable> ref = new AtomicReference<>();
slotTableCache.setRecorders(
Lists.newArrayList(
new SlotTableRecorder() {
@Override
public void record(SlotTable slotTable) {
ref.set(slotTable);
}
}));
CountDownLatch latch = new CountDownLatch(1);
executors.execute(
new Runnable() {
@Override
public void run() {
slotTableCache.updateSlotTable(slotTable);
latch.countDown();
}
});
latch.await();
Assert.assertNotNull(ref.get());
Assert.assertEquals(slotTable, ref.get());
}
@Test
public void testUpdateSlotTable() {
Set<Long> epoches = Sets.newConcurrentHashSet();
AtomicBoolean result = new AtomicBoolean(true);
ConsumerProducer.builder()
.consumerNum(10)
.consumer(
new Runnable() {
@Override
public void run() {
try {
epoches.add(slotTableCache.getEpoch());
} catch (Throwable th) {
result.set(false);
}
}
})
.producerNum(1)
.producer(
new Runnable() {
@Override
public void run() {
try {
slotTableCache.updateSlotTable(randomSlotTable());
} catch (Throwable th) {
result.set(false);
}
}
})
.executor(executors)
.build()
.execute(null);
Assert.assertTrue(result.get());
Assert.assertTrue(epoches.size() > 0);
}
@Test
public void testCurrentSlotTable() throws InterruptedException {
slotTableCache.updateSlotTable(randomSlotTable());
Assert.assertNotSame(
slotTableCache.getCurrentSlotTable(), slotTableCache.getCurrentSlotTable());
Assert.assertEquals(slotTableCache.getCurrentSlotTable(), slotTableCache.getCurrentSlotTable());
SlotTable prev = slotTableCache.getCurrentSlotTable();
CountDownLatch latch = new CountDownLatch(1);
executors.execute(
new Runnable() {
@Override
public void run() {
slotTableCache.updateSlotTable(randomSlotTable());
latch.countDown();
}
});
latch.await();
Assert.assertNotEquals(prev, slotTableCache.getCurrentSlotTable());
}
@Test
public void testWillNotUpdateLowerEpoch() {
slotTableCache.updateSlotTable(randomSlotTable());
slotTableCache.updateSlotTable(new SlotTable(123, randomSlotTable().getSlots()));
Assert.assertNotEquals(123, slotTableCache.getCurrentSlotTable().getEpoch());
}
@Test
public void testBlankSlot() {
SlotTable correct = randomSlotTable();
List<Slot> incorrect = correct.getSlots();
incorrect.add(new Slot(13684, "", DatumVersionUtil.nextId(), Lists.newArrayList()));
SlotTable slotTable = new SlotTable(correct.getEpoch(), incorrect);
slotTableCache.updateSlotTable(slotTable);
}
}
|
//Source: http://cryptography.wikia.com/wiki/Linear_feedback_shift_register
//This implementation has special optimization. It limit range of random
//Method return binary factor depends on the generator polynomial.
//numbers to near maximum power two.
void findShiftFactor()
{
byte result = 4;
m_shiftFactor = 2;
while ((m_shiftFactor < 8) && (result < m_maxExcluded))
{
result <<= 1;
m_shiftFactor += 1;
}
if (m_shiftFactor == 8)
m_shiftFactor = (1 << 7) | (1 << 5) | (1 << 4) | (1 << 3);
else if (m_shiftFactor == 5)
m_shiftFactor = (1 << 4) | (1 << 2);
else
m_shiftFactor = (1 << (m_shiftFactor - 1)) | (1 << (m_shiftFactor - 2));
} |
// DeleteOffsets deletes offsets for the given group.
//
// Originally, offset commits were persisted in Kafka for some retention time.
// This posed problematic for infrequently committing consumers, so the
// retention time concept was removed in Kafka v2.1 in favor of deleting
// offsets for a group only when the group became empty. However, if a group
// stops consuming from a topic, then the offsets will persist and lag
// monitoring for the group will notice an ever increasing amount of lag for
// these no-longer-consumed topics. Thus, Kafka v2.4 introduced an OffsetDelete
// request to allow admins to manually delete offsets for no longer consumed
// topics.
//
// This method requires talking to Kafka v2.4+. This returns an *AuthErr if the
// user is not authorized to delete offsets in the group at all. This does not
// return on per-topic authorization failures, instead, per-topic authorization
// failures are included in the responses.
func (cl *Client) DeleteOffsets(ctx context.Context, group string, s TopicsSet) (DeleteOffsetsResponses, error) {
if len(s) == 0 {
return nil, nil
}
req := kmsg.NewPtrOffsetDeleteRequest()
req.Group = group
for t, ps := range s {
rt := kmsg.NewOffsetDeleteRequestTopic()
rt.Topic = t
for p := range ps {
rp := kmsg.NewOffsetDeleteRequestTopicPartition()
rp.Partition = p
rt.Partitions = append(rt.Partitions, rp)
}
req.Topics = append(req.Topics, rt)
}
resp, err := req.RequestWith(ctx, cl.cl)
if err != nil {
return nil, err
}
if err := maybeAuthErr(resp.ErrorCode); err != nil {
return nil, err
}
if err := kerr.ErrorForCode(resp.ErrorCode); err != nil {
return nil, err
}
r := make(DeleteOffsetsResponses)
for _, t := range resp.Topics {
rt := make(map[int32]error)
r[t.Topic] = rt
for _, p := range t.Partitions {
rt[p.Partition] = kerr.ErrorForCode(p.ErrorCode)
}
}
return r, nil
} |
<gh_stars>100-1000
/** Copyright 2020 Alibaba Group Holding Limited.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#ifndef EXAMPLES_GNN_SAMPLER_UTIL_H_
#define EXAMPLES_GNN_SAMPLER_UTIL_H_
#include <grape/grape.h>
#include <grape/io/line_parser_base.h>
enum class RandomStrategy { Random, EdgeWeight, TopK };
void split(const std::string& str, char delim,
std::vector<std::string>& ret_strs) {
ret_strs.clear();
size_t start;
size_t end = 0;
while ((start = str.find_first_not_of(delim, end)) != std::string::npos) {
end = str.find(delim, start);
ret_strs.push_back(str.substr(start, end - start));
}
}
void parse_hop_and_num(const std::string& hop_and_num_str,
std::vector<uint32_t>& nums_of_hop,
std::vector<uint32_t>& hop_size) {
std::vector<std::string> hop_params;
split(hop_and_num_str, '-', hop_params);
for (auto& hop : hop_params) {
nums_of_hop.push_back(std::stoul(hop));
}
hop_size.resize(nums_of_hop.size() + 1);
hop_size[0] = 1;
for (size_t i = 0; i < nums_of_hop.size(); ++i) {
hop_size[i + 1] = hop_size[i] * nums_of_hop[i];
}
hop_size[0] = 0;
for (size_t i = 1; i < hop_size.size(); ++i) {
hop_size[i] = hop_size[i - 1] + hop_size[i];
}
}
#endif // EXAMPLES_GNN_SAMPLER_UTIL_H_
|
def batchmeanpsnr(truth, pred):
batchmean_psnr = 0
for i in range(len(pred)):
_, meanpsnr = lfpsnrs(np.uint8(truth[i]*255.), np.uint8(pred[i]*255.))
batchmean_psnr += meanpsnr
batchmean_psnr = batchmean_psnr / len(pred)
return batchmean_psnr |
<filename>_src/section_3/packtemr/src/main/java/com/tomekl007/EMR.java
package com.tomekl007;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.PropertiesCredentials;
import com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduce;
import com.amazonaws.services.elasticmapreduce.AmazonElasticMapReduceClient;
import com.amazonaws.services.elasticmapreduce.model.AddJobFlowStepsRequest;
import com.amazonaws.services.elasticmapreduce.model.AddJobFlowStepsResult;
import com.amazonaws.services.elasticmapreduce.model.HadoopJarStepConfig;
import com.amazonaws.services.elasticmapreduce.model.StepConfig;
import com.amazonaws.services.elasticmapreduce.util.StepFactory;
import java.io.IOException;
public class EMR {
public static void main(String[] args) {
AWSCredentials credentials = null;
try {
credentials = new PropertiesCredentials(
EMR.class.getResourceAsStream("AwsCredentials.properties"));
} catch (IOException e1) {
System.out.println("Credentials were not properly entered into AwsCredentials.properties.");
System.out.println(e1.getMessage());
System.exit(-1);
}
AmazonElasticMapReduce client = new AmazonElasticMapReduceClient(credentials);
// predefined steps. See StepFactory for list of predefined steps
StepConfig hive = new StepConfig("Hive", new StepFactory().newInstallHiveStep());
// A custom step
HadoopJarStepConfig hadoopConfig1 = new HadoopJarStepConfig()
.withJar("s3://mybucket/my-jar-location1")
.withMainClass("com.tomekl007.EMR") // optional main class, this can be omitted if jar above has a manifest
.withArgs("--verbose"); // optional list of arguments
StepConfig customStep = new StepConfig("Step1", hadoopConfig1);
AddJobFlowStepsResult result = client.addJobFlowSteps(new AddJobFlowStepsRequest()
.withJobFlowId("j-1HTE8WKS7SODR")
.withSteps(hive, customStep));
System.out.println(result.getStepIds());
}
}
|
The following blog post, unless otherwise noted, was written by a member of Gamasutras community.
The thoughts and opinions expressed are those of the writer and not Gamasutra or its parent company.
THE SHORT VERSION: When trying to select among many small objects with a ray, e.g. in VR games that employ a virtual laser pointer, consider using a "weighted angle" algorithm that picks the object with the smallest angle (between the pointer's forward direction and the vector between the pointer and the object) multiplied by the distance of the object from the pointer. This allows for forgiving, flexible, and intuitive selection that allows the player to pick out distant objects yet still privileges closer ones.
THE LONG VERSION:
This idea is pretty simple. I don't imagine that I'm the first to see it! But it wasn't obvious to me, and I was never introduced to it before, so I wanted to offer a public explanation. Hopefully I can save some people the trouble.of working it out themselves. :)
Imagine a game with lots of small UI elements that you need to select from. Maybe this is a complicated UI or a crowded battlefield that you're clicking on, or maybe it's a VR game where you're using a laser pointer to select objects. (With several headsets that use 3DOF pointer controllers, this is pretty common.) When you point that ray into a group of objects, how do you determine which object it's pointing at? What are you "hovering over"?
As it happens, I’m making exactly that sort of game. I haven't publicly announced it yet, but my next VR game, Astraeus, involves using a laser pointer to manipulate a dense network composed of scores of small objects. Here's what it looks like when I reduce that to just 3 objects (a nearby blue sphere, a somewhat distant orange sphere, and a very distant purple sphere):
So how do we pick which object we're pointing at? The simplest solution would be to cast a ray against physics colliders and pick the object that the ray hits. But for small objects and a jittery laser pointer, that could be very difficult. Maybe we could boost the size of the colliders beyond the actual size of the object? Here's a visualization of the selection area for each object (the shaded pixels indicate that if we clicked on that point, we'd select the object with the same color):
That does help a bit, but it's limited. Small objects that are far away are still difficult to select. Maybe we could boost the size of the colliders even further:
But this introduces an occlusion problem. The collision radius of the nearby blue sphere is covering up the collision for the more distant purple sphere. Even if the player were pointing directly at the purple sphere, we wouldn't be able to select it, which would be a pain.
One alternative is to select objects based on angle. You could check all the objects in front of the player and calculate the angle between the players pointing ray (the laser pointer) and the ray that extends from the pointer's position to the object. Then you could say that the object with the smallest angle is the one that we're pointing at:
Now we don't have to be hovering precisely over the object to select it! This algorithm never has occlusion issues, and it lets you more easily select distant objects.
However, it's kind of weird too. One of these objects is very nearby, and the others are futher away. It's not intuitive that a very distant object should have equal priority as the closest. The player is more likely to be trying to select the closer object, and ignoring it in favor of something deep in the background would be super frustrating.
The solution is to multiply the angle by the distance to the object. Thus, if an object is twice as far away as another, your pointing (i.e. the angle) needs to be twice as precise. With the weighted angle algorithm, nearer objects are easier to select, but you can stil always select any object if you're pointing directly at it (i.e. if the angle is near zero).
This might seem pretty basic so far, or like more trouble than it's worth, but most scenes in my game are not so simple. What happens if we have dozens upon dozens of objects to choose from?
If we just use large invisible colliders and raycast against them, you get a suboptimal solution with a lot of potentially frustrating occusion problems:
If we pick based on smallest angle, selection is a lot more forgiving, but distant objects are overprivileged, especially on the edges:
If we use weighted angles (multiplying by distance), we prioritize the nearer objects again, and the selection radiuses for each object smooth out and become a little more sensible. It feels a bit more intuitive:
There are several other ways we can improve this. First, this algorithm completely ignores occlusion; this is better than having objects blocked by invisible walls, but in some cases, this leads to the ability to select an object that is completely hidden behind another. We could solve this by first performing a direct raycast upon realistically-sized collision geometry; if there's a perfect hit, then we can forget about selecting by angle.
Second, multiplying by distance is just a way of giving added weight to that particular variable. We could also add a bonus for physicially larger objects. Or, if a certain UI element is being highlighted, we could give it extra weight so it's easier than other objects to select.
Third, we can weigh the variables differently. If distance is especially important (like if distant objects are very unlikely to be the player's intended target), we could multiply the angle by the square of the distance.
Lastly, we can add a minimum angle threshold, so that the player isn't always considered to be hovering over something.
For my game, I decided to add a minimum angle threshold, multiply the angle by distance, give double weight to blue spheres (the planets owned by the player), and scale the angle based on the sphere's size. The result is that A) the player doesn't need to be pointing directly at an object to select it, B) nearby spheres, larger spheres, and blue spheres are much easier to select, and C) careful selection of smaller and more distance objects is still possible:
In my experience, this works very well!
I highly recommend this method, especially combined with very obvious highlighting when "hovering" over an object. The danger of a "forgiving" selection algorithm is that the player might accidentally select something that they aren't directly pointing at. Using a more intuitive-feeling algorithm is enormously helpful, but you still need to communicate clearly what is being selected.
Let me know if you use the weighted angle algorithm in your game! I'm curious to know if this helped. :)
- E McNeill (@E_McNeill) |
// Frees all of the cached models
int ModelManager::freeAllModels(void)
{
std::map< ModelID, IModel* >::iterator i;
for(i = modelList.begin(); i != modelList.end(); ++i)
delete i->second;
modelList.clear();
modelIdList.clear();
numModels = 0;
return 0;
} |
<gh_stars>0
package org.smartregister.reveal.application;
import android.content.Intent;
import android.util.Log;
import org.smartregister.Context;
import org.smartregister.CoreLibrary;
import org.smartregister.configurableviews.ConfigurableViewsLibrary;
import org.smartregister.configurableviews.helper.JsonSpecHelper;
import org.smartregister.location.helper.LocationHelper;
import org.smartregister.receiver.SyncStatusBroadcastReceiver;
import org.smartregister.repository.Repository;
import org.smartregister.reveal.activity.LoginActivity;
import org.smartregister.reveal.repository.RevealRepository;
import org.smartregister.reveal.util.Utils;
import org.smartregister.sync.DrishtiSyncScheduler;
import org.smartregister.view.activity.DrishtiApplication;
import org.smartregister.view.receiver.TimeChangedBroadcastReceiver;
import static org.smartregister.util.Log.logError;
import static org.smartregister.util.Log.logInfo;
public class RevealApplication extends DrishtiApplication implements TimeChangedBroadcastReceiver.OnTimeChangedListener {
private static final String TAG = RevealApplication.class.getCanonicalName();
private static JsonSpecHelper jsonSpecHelper;
private String password;
public static synchronized RevealApplication getInstance() {
return (RevealApplication) mInstance;
}
public static JsonSpecHelper getJsonSpecHelper() {
return getInstance().jsonSpecHelper;
}
@Override
public void onCreate() {
super.onCreate();
mInstance = this;
context = Context.getInstance();
context.updateApplicationContext(getApplicationContext());
// Initialize Modules
CoreLibrary.init(context);
ConfigurableViewsLibrary.init(context, getRepository());
LocationHelper.init(Utils.ALLOWED_LEVELS, Utils.DEFAULT_LOCATION_LEVEL);
try {
Utils.saveLanguage("en");
} catch (Exception e) {
Log.e(TAG, e.getMessage());
}
this.jsonSpecHelper = new JsonSpecHelper(this);
}
@Override
public Repository getRepository() {
try {
if (repository == null) {
repository = new RevealRepository(getInstance().getApplicationContext(), context);
}
} catch (UnsatisfiedLinkError e) {
logError("Error on getRepository: " + e);
}
return repository;
}
public String getPassword() {
if (password == null) {
String username = getContext().userService().getAllSharedPreferences().fetchRegisteredANM();
password = getContext().userService().getGroupId(username);
}
return password;
}
@Override
public void logoutCurrentUser() {
Intent intent = new Intent(getApplicationContext(), LoginActivity.class);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.addCategory(Intent.CATEGORY_HOME);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
getApplicationContext().startActivity(intent);
context.userService().logoutSession();
}
public Context getContext() {
return context;
}
protected void cleanUpSyncState() {
try {
DrishtiSyncScheduler.stop(getApplicationContext());
context.allSharedPreferences().saveIsSyncInProgress(false);
} catch (Exception e) {
Log.e(TAG, e.getMessage());
}
}
@Override
public void onTerminate() {
logInfo("Application is terminating. Stopping Sync scheduler and resetting isSyncInProgress setting.");
cleanUpSyncState();
TimeChangedBroadcastReceiver.destroy(this);
SyncStatusBroadcastReceiver.destroy(this);
super.onTerminate();
}
@Override
public void onTimeChanged() {
context.userService().forceRemoteLogin();
logoutCurrentUser();
}
@Override
public void onTimeZoneChanged() {
context.userService().forceRemoteLogin();
logoutCurrentUser();
}
}
|
package com.oath.cyclops.jackson;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import cyclops.container.control.Eval;
import org.junit.Test;
public class EvalTest {
Eval<Integer> some = Eval.now(10);
@Test
public void roundTrip() {
String json = JacksonUtil.serializeToJson(Eval.now(10));
System.out.println("Json " + json);
Eval<Integer> des = JacksonUtil.convertFromJson(json,
Eval.class);
assertThat(des,
equalTo(some));
}
@Test
public void some() {
assertThat(JacksonUtil.serializeToJson(Eval.now(5)),
equalTo("5"));
}
}
|
Electronic shell structure in Ga12 icosahedra and the relation to the bulk forms of gallium.
The electronic structure of known cluster compounds with a cage-like icosahedral Ga(12) centre is studied by first-principles theoretical methods, based on density functional theory. We consider these hollow metalloid nanostructures in the context of the polymorphism of the bulk, and identify a close relation to the α phase of gallium. This previously unrecognised connection is established using the electron localisation function, which reveals the ubiquitous presence of radially-pointing covalent bonds around the Ga(12) centre--analogous to the covalent bonds between buckled deltahedral planes in α-Ga. Furthermore, we find prominent superatom shell structure in these clusters, despite their hollow icosahedral motif and the presence of covalent bonds. The exact nature of the electronic shell structure is contrasted with simple electron shell models based on jellium, and we demonstrate how the interplay between gallium dimerisation, ligand- and crystal-field effects can alter the splitting of the partially filled 1F shell. Finally, in the unique compound where the Ga(12) centre is bridged by six phosphorus ligands, the electronic structure most closely resembles that of δ-Ga and there are no well-defined superatom orbitals. The results of this comprehensive study bring new insights into the nature of chemical bonding in metalloid gallium compounds and the relation to bulk gallium metal, and they may also guide the development of more general models for ligand-protected clusters. |
<reponame>baoxuezhao/BaikalDB
// Copyright (c) 2018 Baidu, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <map>
#include "scan_node.h"
#include "filter_node.h"
#include "join_node.h"
#include "schema_factory.h"
#include "scalar_fn_call.h"
#include "slot_ref.h"
#include "runtime_state.h"
#include "rocksdb_scan_node.h"
#include "redis_scan_node.h"
namespace baikaldb {
int ScanNode::init(const pb::PlanNode& node) {
int ret = 0;
ret = ExecNode::init(node);
if (ret < 0) {
DB_WARNING("ExecNode::init fail, ret:%d", ret);
return ret;
}
_tuple_id = node.derive_node().scan_node().tuple_id();
_table_id = node.derive_node().scan_node().table_id();
if (node.derive_node().scan_node().has_engine()) {
_engine = node.derive_node().scan_node().engine();
}
return 0;
}
int ScanNode::open(RuntimeState* state) {
int ret = 0;
ret = ExecNode::open(state);
if (ret < 0) {
DB_WARNING_STATE(state, "ExecNode::open fail:%d", ret);
return ret;
}
_tuple_desc = state->get_tuple_desc(_tuple_id);
return 0;
}
void ScanNode::close(RuntimeState* state) {
ExecNode::close(state);
}
ScanNode* ScanNode::create_scan_node(const pb::PlanNode& node) {
if (node.derive_node().scan_node().has_engine()) {
pb::Engine engine = node.derive_node().scan_node().engine();
switch (engine) {
case pb::ROCKSDB:
return new RocksdbScanNode;
case pb::REDIS:
return new RedisScanNode;
break;
}
} else {
return new RocksdbScanNode;
}
return nullptr;
}
}
/* vim: set ts=4 sw=4 sts=4 tw=100 */
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.