content
stringlengths 10
4.9M
|
---|
Since revelations Democratic presidential candidate Hillary Clinton used an unsecure private email server to conduct all of her government business during her time as Secretary of State came to light earlier this year, we've heard every excuse imaginable for the practice. As we've learned in recent weeks that top secret, classified information was sent, received and stored on the server, the excuses have gotten worse.
First, Clinton claimed during a press conference in March that there was "no classified information."
"I did not email any classified material to anyone on my email. There is no classified material. So I'm certainly well-aware of the classification requirements and did not send classified material," Clinton said during the press conference.
Then the story became Clinton never sent or received any information that was marked classified.
"I did not send classified material, and I did not receive any material that was marked or designated classified," Clinton said in New Hampshire recently.
Earlier this week, Clinton Press Secretary Brian Fallon tried to ease tensions surrounding a criminal FBI investigation into Clinton's personal email use and mishandling of top secret classified information by claiming she was a "passive recipient of unwitting information that subsequently became classified."
But a new report from Reuters shows that according to basic rules, the information contained on Clinton's private server was classified from the beginning, not after the fact or when team Clinton decided it was a convenient time for it to be so.
In the small fraction of emails made public so far, Reuters has found at least 30 email threads from 2009, representing scores of individual emails, that include what the State Department's own "Classified" stamps now identify as so-called 'foreign government information.' The U.S. government defines this as any information, written or spoken, provided in confidence to U.S. officials by their foreign counterparts.
This sort of information, which the department says Clinton both sent and received in her emails, is the only kind that must be "presumed" classified, in part to protect national security and the integrity of diplomatic interactions, according to U.S. regulations examined by Reuters.
"It's born classified," said J. William Leonard, a former director of the U.S. government's Information Security Oversight Office (ISOO). Leonard was director of ISOO, part of the White House's National Archives and Records Administration, from 2002 until 2008, and worked for both the Bill Clinton and George W. Bush administrations.
"If a foreign minister just told the secretary of state something in confidence, by U.S. rules that is classified at the moment it's in U.S. channels and U.S. possession," he said in a telephone interview, adding that for the State Department to say otherwise was "blowing smoke."
In other words, Clinton knew she was handling classified information on a personal server and didn't care. Further, she shared classified information with her aides through unsecure personal accounts with little regard for protecting national security.
HRC has tried blaming: -GOP -@nytimes -email senders -Obama bureaucracy -phony labeling issue -phony GOP precedent Only fault is hers — Ron Fournier (@ron_fournier) August 21, 2015
If Lady Justice is truly blind, Clinton will be held to the same standard as everyone else when it comes to mishandling top secret information. General David Petraeus has suffered the consequences as have a number of military service members who have improperly handled classified information. There is no doubt she should be indicted. |
package main
import (
"fmt"
"sync"
"time"
)
func getRemoteData(ms time.Duration, wg *sync.WaitGroup) {
defer wg.Done()
duration := ms * time.Millisecond
time.Sleep(duration)
fmt.Println("retrieving data in : ", duration)
}
func main() {
var wg sync.WaitGroup
wg.Add(4)
go getRemoteData(1000, &wg)
go getRemoteData(800, &wg)
go getRemoteData(650, &wg)
go getRemoteData(100, &wg)
wg.Wait()
fmt.Println("finished getting all the data")
}
|
#![deny(warnings)]
extern crate cotli_helper;
pub mod support;
macro_rules! assert_formation_dps {
($expected:expr, $formation:expr) => {
assert_dps_eq!($expected, $formation.total_dps(&Default::default()));
}
}
macro_rules! assert_dps_eq {
($expected:expr, $dps:expr) => {
assert_eq!($expected, &$dps.to_string());
}
}
mod crusaders;
|
// https://leetcode.com/problems/shuffle-an-array/
#include <vector>
#include <cstdlib>
using namespace std;
class Solution {
public:
Solution(vector<int> nums)
: nums_(nums) , result_(nums)
{
}
vector<int> reset() {
return nums_;
}
vector<int> shuffle() {
for (int i = result_.size()-1; i > 0; --i) {
swap(result_[i], result_[rand()%(i+1)]);
}
return result_;
}
private:
vector<int> nums_;
vector<int> result_;
};
|
/**
*
*
* @return speech-part pred for verb agreeing with SUBJECT-PHRASE
and INTERNAL-CONSTRAINTS
*/
@LispMethod(comment = "@return speech-part pred for verb agreeing with SUBJECT-PHRASE\r\nand INTERNAL-CONSTRAINTS")
public static SubLObject verb_pred_for_subject(final SubLObject subject_phrase, SubLObject internal_constraints) {
if (internal_constraints == UNPROVIDED) {
internal_constraints = NIL;
}
internal_constraints = pph_strengthen_verb_constraints(internal_constraints);
if (NIL != pph_utilities.pph_genl_posP(pph_phrase.pph_phrase_category(subject_phrase, UNPROVIDED), $$Pronoun, UNPROVIDED)) {
return verb_pred_for_subject_pronoun(subject_phrase, internal_constraints);
}
return verb_pred_for_subject_int(subject_phrase, internal_constraints);
} |
<reponame>samtake/ecology
package mysql
// 分类
type Category struct {
Id int `gorm:"not null;primary_key;AUTO_INCREMENT"json:"id"` //自增主键
Pid int `gorm:"not null;"json:"pid"` //分类id
Title string `gorm:"not null;"json:"title"` //分类名称
Intro string `gorm:"not null;"json:"intro"` //介绍
Icon string `gorm:"not null;"json:"icon"` //分类icon
Cnt int `gorm:"not null;"json:"cnt"` //分类下的文档项目统计
Sort int `gorm:"not null;"json:"sort"` //排序
Status bool `gorm:"not null;"json:"status"` //分类状态,true表示显示,否则表示隐藏
//PrintBookCount int `orm:"default(0)" json:"print_book_count"`
//WikiCount int `orm:"default(0)" json:"wiki_count"`
//ArticleCount int `orm:"default(0)" json:"article_count"`
}
func (m Category) TableName() string {
return "category"
}
func NewCategory() *Category {
return &Category{}
}
|
//////////////////////////////////////////////////////////////////////////////////////
//
// (C) Daniel Strano and the Qrack contributors 2017-2021. All rights reserved.
//
// This is a multithreaded, universal quantum register simulation, allowing
// (nonphysical) register cloning and direct measurement of probability and
// phase, to leverage what advantages classical emulation of qubits can have.
//
// Licensed under the GNU Lesser General Public License V3.
// See LICENSE.md in the project root or https://www.gnu.org/licenses/lgpl-3.0.en.html
// for details.
#pragma once
#include "qfactory.hpp"
#include <fstream>
#include <iomanip>
#include <sstream>
#include <string>
/* A quick-and-dirty epsilon for clamping floating point values. */
#define QRACK_TEST_EPSILON 0.9
/*
* Default engine type to run the tests with. Global because catch doesn't
* support parameterization.
*/
extern enum Qrack::QInterfaceEngine testEngineType;
extern enum Qrack::QInterfaceEngine testSubEngineType;
extern enum Qrack::QInterfaceEngine testSubSubEngineType;
extern qrack_rand_gen_ptr rng;
extern bool enable_normalization;
extern bool disable_t_injection;
extern bool disable_reactive_separation;
extern bool enable_weak_sampling;
extern bool disable_terminal_measurement;
extern bool use_host_dma;
extern bool disable_hardware_rng;
extern bool async_time;
extern bool sparse;
extern int device_id;
extern bitLenInt max_qubits;
extern bool single_qubit_run;
extern std::string mOutputFileName;
extern std::ofstream mOutputFile;
extern bool isBinaryOutput;
extern int benchmarkSamples;
extern int benchmarkDepth;
extern int benchmarkMaxMagic;
extern int benchmarkShots;
extern int timeout;
extern std::vector<int64_t> devList;
extern bool optimal;
extern bool optimal_single;
/* Declare the stream-to-probability prior to including catch.hpp. */
namespace Qrack {
inline std::ostream& outputPerBitProbs(std::ostream& os, Qrack::QInterfacePtr qftReg);
inline std::ostream& outputProbableResult(std::ostream& os, Qrack::QInterfacePtr qftReg);
inline std::ostream& outputIndependentBits(std::ostream& os, Qrack::QInterfacePtr qftReg);
inline std::ostream& operator<<(std::ostream& os, Qrack::QInterfacePtr qftReg)
{
if (os.flags() & std::ios_base::showpoint) {
os.unsetf(std::ios_base::showpoint);
return outputPerBitProbs(os, qftReg);
}
if (os.flags() & std::ios_base::showbase) {
os.unsetf(std::ios_base::showbase);
return outputIndependentBits(os, qftReg);
}
return outputProbableResult(os, qftReg);
}
inline std::ostream& outputPerBitProbs(std::ostream& os, Qrack::QInterfacePtr qftReg)
{
os << "[\n";
for (int i = qftReg->GetQubitCount() - 1; i >= 0; i--) {
os << "\t " << std::setw(2) << i << "]: " << qftReg->Prob(i) << std::endl;
}
return os;
}
inline std::ostream& outputProbableResult(std::ostream& os, Qrack::QInterfacePtr qftReg)
{
bitCapInt i;
float maxProb = 0;
bitCapInt maxProbIdx = 0;
// Iterate through all possible values of the bit array
for (i = 0; i < qftReg->GetMaxQPower(); i++) {
float prob = (float)qftReg->ProbAll(i);
if (prob > maxProb) {
maxProb = prob;
maxProbIdx = i;
}
// if (prob != ZERO_R1) {
// std::cout<<"(Perm "<<(int)i<<" "<<prob<<std::endl;
// }
}
os << qftReg->GetQubitCount() << "/";
// Print the resulting maximum probability bit pattern.
for (i = qftReg->GetMaxQPower() >> 1UL; i > 0; i >>= 1UL) {
if (i & maxProbIdx) {
os << "1";
} else {
os << "0";
}
}
// And print the probability, for interest.
os << ":" << maxProb;
return os;
}
inline std::ostream& outputIndependentBits(std::ostream& os, Qrack::QInterfacePtr qftReg)
{
os << "" << qftReg->GetQubitCount() << "/";
for (int j = qftReg->GetQubitCount() - 1; j >= 0; j--) {
os << (int)(qftReg->Prob(j) > QRACK_TEST_EPSILON);
}
return os;
}
} // namespace Qrack
#include "catch.hpp"
/*
* A fixture to create a unique QInterface test, of the appropriate type, for
* each executing test case.
*/
class QInterfaceTestFixture {
protected:
Qrack::QInterfacePtr qftReg;
public:
QInterfaceTestFixture();
};
class ProbPattern : public Catch::MatcherBase<Qrack::QInterfacePtr> {
bitLenInt start;
bitLenInt length;
bitCapInt mask;
public:
ProbPattern(bitLenInt s, bitLenInt l, bitCapInt m)
: start(s)
, length(l)
, mask(m)
{
}
virtual bool match(Qrack::QInterfacePtr const& qftReg) const override
{
if (length == 0) {
((ProbPattern*)this)->length = qftReg->GetQubitCount();
}
if (length > sizeof(mask) * 8) {
WARN("requested length " << length << " larger than possible bitmap " << sizeof(mask) * 8);
return false;
}
for (bitLenInt j = 0; j < length; j++) {
/* Consider anything more than a 50% probability as a '1'. */
bool bit = (qftReg->Prob(j + start) > QRACK_TEST_EPSILON);
if (bit == !(mask & (1ULL << j))) {
return false;
}
}
return true;
}
virtual std::string describe() const override
{
std::ostringstream ss;
ss << "matches bit pattern [" << (int)start << "," << start + length << "]: " << (int)length << "/";
for (int j = (length - 1); j >= 0; j--) {
ss << !!((int)(mask & (1ULL << j)));
}
return ss.str();
}
};
inline ProbPattern HasProbability(bitLenInt s, bitLenInt l, bitCapInt m) { return ProbPattern(s, l, m); }
inline ProbPattern HasProbability(bitCapInt m) { return ProbPattern(0, 0, m); }
|
//! This allows multiple apps to write their own flash region.
//!
//! All write requests from userland are checked to ensure that they are only
//! trying to write their own flash space, and not the TBF header either.
//!
//! This driver can handle non page aligned writes.
//!
//! Userland apps should allocate buffers in flash when they are compiled to
//! ensure that there is room to write to. This should be accomplished by
//! declaring `const` buffers.
//!
//! Usage
//! -----
//!
//! ```
//! pub static mut APP_FLASH_BUFFER: [u8; 512] = [0; 512];
//! let app_flash = static_init!(
//! capsules::app_flash_driver::AppFlash<'static>,
//! capsules::app_flash_driver::AppFlash::new(nv_to_page,
//! kernel::Grant::create(), &mut APP_FLASH_BUFFER));
//! ```
use core::cmp;
use kernel::common::cells::{OptionalCell, TakeCell};
use kernel::hil;
use kernel::{AppId, AppSlice, Callback, Driver, Grant, ReturnCode, Shared};
/// Syscall driver number.
pub const DRIVER_NUM: usize = 0x50000;
#[derive(Default)]
pub struct App {
callback: Option<Callback>,
buffer: Option<AppSlice<Shared, u8>>,
pending_command: bool,
flash_address: usize,
}
pub struct AppFlash<'a> {
driver: &'a hil::nonvolatile_storage::NonvolatileStorage,
apps: Grant<App>,
current_app: OptionalCell<AppId>,
buffer: TakeCell<'static, [u8]>,
}
impl AppFlash<'a> {
pub fn new(
driver: &'a hil::nonvolatile_storage::NonvolatileStorage,
grant: Grant<App>,
buffer: &'static mut [u8],
) -> AppFlash<'a> {
AppFlash {
driver: driver,
apps: grant,
current_app: OptionalCell::empty(),
buffer: TakeCell::new(buffer),
}
}
// Check to see if we are doing something. If not, go ahead and do this
// command. If so, this is queued and will be run when the pending command
// completes.
fn enqueue_write(&self, flash_address: usize, appid: AppId) -> ReturnCode {
self.apps
.enter(appid, |app, _| {
// Check that this is a valid range in the app's flash.
let flash_length = app.buffer.as_mut().map_or(0, |app_buffer| app_buffer.len());
let (app_flash_start, app_flash_end) = appid.get_editable_flash_range();
if flash_address < app_flash_start
|| flash_address >= app_flash_end
|| flash_address + flash_length >= app_flash_end
{
return ReturnCode::EINVAL;
}
if self.current_app.is_none() {
self.current_app.set(appid);
app.buffer
.as_mut()
.map_or(ReturnCode::ERESERVE, |app_buffer| {
// Copy contents to internal buffer and write it.
self.buffer.take().map_or(ReturnCode::ERESERVE, |buffer| {
let length = cmp::min(buffer.len(), app_buffer.len());
let d = &mut app_buffer.as_mut()[0..length];
for (i, c) in buffer.as_mut()[0..length].iter_mut().enumerate() {
*c = d[i];
}
self.driver.write(buffer, flash_address, length)
})
})
} else {
// Queue this request for later.
if app.pending_command == true {
ReturnCode::ENOMEM
} else {
app.pending_command = true;
app.flash_address = flash_address;
ReturnCode::SUCCESS
}
}
}).unwrap_or_else(|err| err.into())
}
}
impl hil::nonvolatile_storage::NonvolatileStorageClient for AppFlash<'a> {
fn read_done(&self, _buffer: &'static mut [u8], _length: usize) {}
fn write_done(&self, buffer: &'static mut [u8], _length: usize) {
// Put our write buffer back.
self.buffer.replace(buffer);
// Notify the current application that the command finished.
self.current_app.take().map(|appid| {
let _ = self.apps.enter(appid, |app, _| {
app.callback.map(|mut cb| {
cb.schedule(0, 0, 0);
});
});
});
// Check if there are any pending events.
for cntr in self.apps.iter() {
let started_command = cntr.enter(|app, _| {
if app.pending_command {
app.pending_command = false;
self.current_app.set(app.appid());
let flash_address = app.flash_address;
app.buffer.as_mut().map_or(false, |app_buffer| {
self.buffer.take().map_or(false, |buffer| {
if app_buffer.len() != 512 {
false
} else {
// Copy contents to internal buffer and write it.
let length = cmp::min(buffer.len(), app_buffer.len());
let d = &mut app_buffer.as_mut()[0..length];
for (i, c) in buffer.as_mut()[0..length].iter_mut().enumerate() {
*c = d[i];
}
self.driver.write(buffer, flash_address, length)
== ReturnCode::SUCCESS
}
})
})
} else {
false
}
});
if started_command {
break;
}
}
}
}
impl Driver for AppFlash<'a> {
/// Setup buffer to write from.
///
/// ### `allow_num`
///
/// - `0`: Set write buffer. This entire buffer will be written to flash.
fn allow(
&self,
appid: AppId,
allow_num: usize,
slice: Option<AppSlice<Shared, u8>>,
) -> ReturnCode {
match allow_num {
0 => self
.apps
.enter(appid, |app, _| {
app.buffer = slice;
ReturnCode::SUCCESS
}).unwrap_or_else(|err| err.into()),
_ => ReturnCode::ENOSUPPORT,
}
}
/// Setup callbacks.
///
/// ### `subscribe_num`
///
/// - `0`: Set a write_done callback.
fn subscribe(
&self,
subscribe_num: usize,
callback: Option<Callback>,
app_id: AppId,
) -> ReturnCode {
match subscribe_num {
0 => self
.apps
.enter(app_id, |app, _| {
app.callback = callback;
ReturnCode::SUCCESS
}).unwrap_or_else(|err| err.into()),
_ => ReturnCode::ENOSUPPORT,
}
}
/// App flash control.
///
/// ### `command_num`
///
/// - `0`: Driver check.
/// - `1`: Write the memory from the `allow` buffer to the address in flash.
fn command(&self, command_num: usize, arg1: usize, _: usize, appid: AppId) -> ReturnCode {
match command_num {
0 =>
/* This driver exists. */
{
ReturnCode::SUCCESS
}
// Write to flash from the allowed buffer.
1 => {
let flash_address = arg1;
self.enqueue_write(flash_address, appid)
}
_ => ReturnCode::ENOSUPPORT,
}
}
}
|
<filename>hw/mcu/microchip/samg55/samg55/include/instance/twi4.h
/**
* \file
*
* \brief Instance description for TWI4
*
* Copyright (c) 2017 Atmel Corporation, a wholly owned subsidiary of Microchip Technology Inc.
*
* \license_start
*
* \page License
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* \license_stop
*
*/
/* file generated from device description version 2017-07-05T18:00:00Z */
#ifndef _SAMG55_TWI4_INSTANCE_H_
#define _SAMG55_TWI4_INSTANCE_H_
/* ========== Register definition for TWI4 peripheral ========== */
#if (defined(__ASSEMBLER__) || defined(__IAR_SYSTEMS_ASM__))
#define REG_TWI4_CR (0x4001C600) /**< (TWI4) Control Register */
#define REG_TWI4_MMR (0x4001C604) /**< (TWI4) Master Mode Register */
#define REG_TWI4_SMR (0x4001C608) /**< (TWI4) Slave Mode Register */
#define REG_TWI4_IADR (0x4001C60C) /**< (TWI4) Internal Address Register */
#define REG_TWI4_CWGR (0x4001C610) /**< (TWI4) Clock Waveform Generator Register */
#define REG_TWI4_SR (0x4001C620) /**< (TWI4) Status Register */
#define REG_TWI4_IER (0x4001C624) /**< (TWI4) Interrupt Enable Register */
#define REG_TWI4_IDR (0x4001C628) /**< (TWI4) Interrupt Disable Register */
#define REG_TWI4_IMR (0x4001C62C) /**< (TWI4) Interrupt Mask Register */
#define REG_TWI4_RHR (0x4001C630) /**< (TWI4) Receive Holding Register */
#define REG_TWI4_THR (0x4001C634) /**< (TWI4) Transmit Holding Register */
#define REG_TWI4_SMBTR (0x4001C638) /**< (TWI4) SMBus Timing Register */
#define REG_TWI4_ACR (0x4001C640) /**< (TWI4) Alternative Command Register */
#define REG_TWI4_FILTR (0x4001C644) /**< (TWI4) Filter Register */
#define REG_TWI4_SWMR (0x4001C64C) /**< (TWI4) SleepWalking Matching Register */
#define REG_TWI4_WPMR (0x4001C6E4) /**< (TWI4) Write Protection Mode Register */
#define REG_TWI4_WPSR (0x4001C6E8) /**< (TWI4) Write Protection Status Register */
#define REG_TWI4_RPR (0x4001C700) /**< (TWI4) Receive Pointer Register */
#define REG_TWI4_RCR (0x4001C704) /**< (TWI4) Receive Counter Register */
#define REG_TWI4_TPR (0x4001C708) /**< (TWI4) Transmit Pointer Register */
#define REG_TWI4_TCR (0x4001C70C) /**< (TWI4) Transmit Counter Register */
#define REG_TWI4_RNPR (0x4001C710) /**< (TWI4) Receive Next Pointer Register */
#define REG_TWI4_RNCR (0x4001C714) /**< (TWI4) Receive Next Counter Register */
#define REG_TWI4_TNPR (0x4001C718) /**< (TWI4) Transmit Next Pointer Register */
#define REG_TWI4_TNCR (0x4001C71C) /**< (TWI4) Transmit Next Counter Register */
#define REG_TWI4_PTCR (0x4001C720) /**< (TWI4) Transfer Control Register */
#define REG_TWI4_PTSR (0x4001C724) /**< (TWI4) Transfer Status Register */
#else
#define REG_TWI4_CR (*(__O uint32_t*)0x4001C600U) /**< (TWI4) Control Register */
#define REG_TWI4_MMR (*(__IO uint32_t*)0x4001C604U) /**< (TWI4) Master Mode Register */
#define REG_TWI4_SMR (*(__IO uint32_t*)0x4001C608U) /**< (TWI4) Slave Mode Register */
#define REG_TWI4_IADR (*(__IO uint32_t*)0x4001C60CU) /**< (TWI4) Internal Address Register */
#define REG_TWI4_CWGR (*(__IO uint32_t*)0x4001C610U) /**< (TWI4) Clock Waveform Generator Register */
#define REG_TWI4_SR (*(__I uint32_t*)0x4001C620U) /**< (TWI4) Status Register */
#define REG_TWI4_IER (*(__O uint32_t*)0x4001C624U) /**< (TWI4) Interrupt Enable Register */
#define REG_TWI4_IDR (*(__O uint32_t*)0x4001C628U) /**< (TWI4) Interrupt Disable Register */
#define REG_TWI4_IMR (*(__I uint32_t*)0x4001C62CU) /**< (TWI4) Interrupt Mask Register */
#define REG_TWI4_RHR (*(__I uint32_t*)0x4001C630U) /**< (TWI4) Receive Holding Register */
#define REG_TWI4_THR (*(__O uint32_t*)0x4001C634U) /**< (TWI4) Transmit Holding Register */
#define REG_TWI4_SMBTR (*(__IO uint32_t*)0x4001C638U) /**< (TWI4) SMBus Timing Register */
#define REG_TWI4_ACR (*(__IO uint32_t*)0x4001C640U) /**< (TWI4) Alternative Command Register */
#define REG_TWI4_FILTR (*(__IO uint32_t*)0x4001C644U) /**< (TWI4) Filter Register */
#define REG_TWI4_SWMR (*(__IO uint32_t*)0x4001C64CU) /**< (TWI4) SleepWalking Matching Register */
#define REG_TWI4_WPMR (*(__IO uint32_t*)0x4001C6E4U) /**< (TWI4) Write Protection Mode Register */
#define REG_TWI4_WPSR (*(__I uint32_t*)0x4001C6E8U) /**< (TWI4) Write Protection Status Register */
#define REG_TWI4_RPR (*(__IO uint32_t*)0x4001C700U) /**< (TWI4) Receive Pointer Register */
#define REG_TWI4_RCR (*(__IO uint32_t*)0x4001C704U) /**< (TWI4) Receive Counter Register */
#define REG_TWI4_TPR (*(__IO uint32_t*)0x4001C708U) /**< (TWI4) Transmit Pointer Register */
#define REG_TWI4_TCR (*(__IO uint32_t*)0x4001C70CU) /**< (TWI4) Transmit Counter Register */
#define REG_TWI4_RNPR (*(__IO uint32_t*)0x4001C710U) /**< (TWI4) Receive Next Pointer Register */
#define REG_TWI4_RNCR (*(__IO uint32_t*)0x4001C714U) /**< (TWI4) Receive Next Counter Register */
#define REG_TWI4_TNPR (*(__IO uint32_t*)0x4001C718U) /**< (TWI4) Transmit Next Pointer Register */
#define REG_TWI4_TNCR (*(__IO uint32_t*)0x4001C71CU) /**< (TWI4) Transmit Next Counter Register */
#define REG_TWI4_PTCR (*(__O uint32_t*)0x4001C720U) /**< (TWI4) Transfer Control Register */
#define REG_TWI4_PTSR (*(__I uint32_t*)0x4001C724U) /**< (TWI4) Transfer Status Register */
#endif /* (defined(__ASSEMBLER__) || defined(__IAR_SYSTEMS_ASM__)) */
/* ========== Instance Parameter definitions for TWI4 peripheral ========== */
#define TWI4_DMAC_ID_TX 26
#define TWI4_DMAC_ID_RX 12
#endif /* _SAMG55_TWI4_INSTANCE_ */
|
// IO data for a device by family and name
pub fn device_iodb(&mut self, family: &str, device: &str) -> &DeviceIOData {
let key = (family.to_string(), device.to_string());
if !self.iodbs.contains_key(&key) {
let io_json_buf = self.read_file(&format!("{}/{}/iodb.json", family, device));
let io = serde_json::from_str(&io_json_buf).unwrap();
self.iodbs.insert(key.clone(), io);
}
self.iodbs.get(&key).unwrap()
} |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.program.model.pcode;
import java.util.ArrayList;
import ghidra.program.model.address.Address;
import ghidra.program.model.address.AddressFactory;
import ghidra.program.model.data.DataType;
import ghidra.program.model.listing.VariableStorage;
import ghidra.util.exception.InvalidInputException;
/**
*
*
* Interface for classes that build PcodeOps and Varnodes
*/
public interface PcodeFactory {
/**
* @return Address factory
*/
public AddressFactory getAddressFactory();
/**
* @return pcode data type manager used to convert strings to Ghidra data types
*/
public PcodeDataTypeManager getDataTypeManager();
/**
* Create a new Varnode with the given size and location
*
* @param sz size of the Varnode
* @param addr location of the Varnode
*
* @return a new varnode
*/
public Varnode newVarnode(int sz, Address addr);
/**
* Create a new Varnode with the given size and location.
* Associate the Varnode with a specific reference id so that it can be retrieved,
* using just the id, via getRef();
* @param sz size of the Varnode
* @param addr location of the Varnode
* @param refId is the specific reference id
* @return the new Varnode
*/
public Varnode newVarnode(int sz, Address addr, int refId);
/**
* Create a storage object representing a value split across multiple physical locations.
* The sequence of physical locations are passed in as an array of Varnodes and the storage
* object is returned. The storage is also assigned an Address in the join address space,
* which can be retrieved by calling the getJoinAddress() method. The join Address can
* be used to create a Varnode that represents the logical whole created by concatenating
* the Varnode pieces.
* @param pieces is the array of Varnode pieces to join
* @return the VariableStorage representing the whole
* @throws InvalidInputException if a valid storage object cannot be created
*/
public VariableStorage getJoinStorage(Varnode[] pieces) throws InvalidInputException;
/**
* Get the address (in the "join" space) corresponding to the given multi-piece storage.
* The storage must have been previously registered by a previous call to getJoinStorage().
* If the storage is not multi-piece or was not registered, null is returned.
* @param storage is the multi-piece storage
* @return the corresponding "join" address
*/
public Address getJoinAddress(VariableStorage storage);
/**
* Build a storage object for a particular Varnode
* @param vn is the Varnode
* @return the storage object
* @throws InvalidInputException if valid storage cannot be created
*/
public VariableStorage buildStorage(Varnode vn) throws InvalidInputException;
/**
* Return a Varnode given its reference id, or null if the id is not registered.
* The id must have previously been registered via newVarnode().
* @param refid is the reference id
* @return the matching Varnode or null
*/
public Varnode getRef(int refid);
/**
* Get a PcodeOp given a reference id. The reference id corresponds to the op's
* SequenceNumber.getTime() field. Return null if no op matching the id has been registered
* via newOp().
* @param refid is the reference id
* @return the matching PcodeOp or null
*/
public PcodeOp getOpRef(int refid);
/**
* Get the high symbol matching the given id that has been registered with this object
* @param symbolId is the given id
* @return the matching HighSymbol or null
*/
public HighSymbol getSymbol(long symbolId);
/**
* Mark (or unmark) the given Varnode as an input (to its function)
* @param vn is the given Varnode
* @param val is true if the Varnode should be marked
* @return the altered Varnode, which may not be the same object passed in
*/
public Varnode setInput(Varnode vn, boolean val);
/**
* Mark (or unmark) the given Varnode with the "address tied" property
* @param vn is the given Varnode
* @param val is true if the Varnode should be marked
*/
public void setAddrTied(Varnode vn, boolean val);
/**
* Mark (or unmark) the given Varnode with the "persistent" property
* @param vn is the given Varnode
* @param val is true if the Varnode should be marked
*/
public void setPersistent(Varnode vn, boolean val);
/**
* Mark (or unmark) the given Varnode with the "unaffected" property
* @param vn is the given Varnode
* @param val is true if the Varnode should be marked
*/
public void setUnaffected(Varnode vn, boolean val);
/**
* Mark (or unmark) the given Varnode with the "volatile" property
* @param vn is the given Varnode
* @param val is true if the Varnode should be marked volatile
*/
public void setVolatile(Varnode vn, boolean val);
/**
* Associate a specific merge group with the given Varnode
* @param vn is the given Varnode
* @param val is the merge group
*/
public void setMergeGroup(Varnode vn, short val);
/**
* Attach a data-type to the given Varnode
* @param vn is the given Varnode
* @param type is the data-type
*/
public void setDataType(Varnode vn, DataType type);
/**
* Create a new PcodeOp given its opcode, sequence number, and input and output Varnodes
* @param sq is the sequence number
* @param opc is the opcode
* @param inputs is the array of input Varnodes, which may be empty
* @param output is the output Varnode, which may be null
* @return the new PcodeOp
*/
public PcodeOp newOp(SequenceNumber sq, int opc, ArrayList<Varnode> inputs, Varnode output);
}
|
/**
* Set: Jabba's Palace
* Type: Interrupt
* Subtype: Lost
* Title: Jabba's Palace Sabacc
*/
public class Card6_156 extends AbstractLostInterrupt {
public Card6_156() {
super(Side.DARK, 3, "Jabba's Palace Sabacc", Uniqueness.UNIQUE);
setLore("Jabba has won the service of many of his guards and other henchbeings through games of chance.");
setGameText("Requirements: A gambler, gangster, smuggler or information broker at a Jabba's Palace site. Wild cards (1-6): Passenger Deck and deserts. Clone cards: Aliens and Jabba's Palace sites (gamblers and Jabba may use clone cards as 4's.) Stakes: One character weapon or non-unique alien.");
addIcons(Icon.JABBAS_PALACE);
}
@Override
protected List<PlayInterruptAction> getGameTextTopLevelActions(final String playerId, SwccgGame game, final PhysicalCard self) {
final String opponent = game.getOpponent(playerId);
final Filter characterFilter = Filters.and(Filters.or(Filters.gambler, Filters.gangster, Filters.smuggler, Filters.information_broker), Filters.at(Filters.Jabbas_Palace_site));
// Check condition(s)
if (GameConditions.canPlaySabacc(game)
&& GameConditions.canSpot(game, self, Filters.and(Filters.your(self), characterFilter))) {
final PlayInterruptAction action = new PlayInterruptAction(game, self);
action.setText("Play sabacc");
// Choose target(s)
action.appendTargeting(
new ChooseCardOnTableEffect(action, playerId, "Choose character to play sabacc", Filters.and(Filters.your(self), characterFilter)) {
@Override
protected void cardSelected(final PhysicalCard playersCharacter) {
// After this point do not allow action to be aborted
action.setAllowAbort(false);
action.appendTargeting(
new ChooseCardsOnTableEffect(action, opponent, "Choose character to play sabacc", 0, 1, Filters.and(Filters.opponents(self), characterFilter)) {
@Override
protected void cardsSelected(Collection<PhysicalCard> selectedCards) {
String actionText;
final PhysicalCard opponentsCharacter = selectedCards.size() == 1 ? selectedCards.iterator().next() : null;
if (opponentsCharacter != null) {
action.addAnimationGroup(playersCharacter, opponentsCharacter);
actionText = "Have " + GameUtils.getCardLink(playersCharacter) + " play sabacc against " + GameUtils.getCardLink(opponentsCharacter);
} else {
action.addAnimationGroup(playersCharacter);
actionText = "Have " + GameUtils.getCardLink(playersCharacter) + " play sabacc against an unseen adversary";
}
// Allow response(s)
action.allowResponses(actionText,
new RespondablePlayCardEffect(action) {
@Override
protected void performActionResults(Action targetingAction) {
Filter wildCards = Filters.or(Filters.Passenger_Deck, Filters.desert);
Filter cloneCards = Filters.or(Filters.alien, Filters.Jabbas_Palace_site);
Map<Filterable, Integer> cloneCardPerks = new HashMap<Filterable, Integer>();
cloneCardPerks.put(Filters.or(Filters.gambler, Filters.Jabba), 4);
Filter stakes = Filters.or(Filters.character_weapon, Filters.and(Filters.non_unique, Filters.alien));
// Perform result(s)
action.appendEffect(
new PlaySabaccEffect(action, playersCharacter, opponentsCharacter, wildCards, 1, 6, cloneCards, cloneCardPerks, stakes));
}
}
);
}
}
);
}
}
);
return Collections.singletonList(action);
}
return null;
}
} |
import { Card, CardActions, CardContent, CardMedia, IconButton, makeStyles, Theme, Typography } from '@material-ui/core'
import Icon from '@mdi/react'
import { mdiHeart, mdiHeartOutline } from '@mdi/js'
import { Restaurant } from 'src/types'
import { useLoggedUserContext } from 'src/pages/LoggedUserContext'
type DishCardProps = {
_id: string
imageUrl: string
name: string
restaurant: Restaurant
isFav: boolean
handleFav: (dishId: string) => Promise<void>
className?: string
}
const DishCard = ({ _id, name, imageUrl, className, restaurant, isFav, handleFav }: DishCardProps) => {
const { user } = useLoggedUserContext()
const classes = useStyles()
return (
<Card className={className}>
<CardMedia component="img" image={imageUrl} alt={name} className={classes.cardImage} />
<CardContent className={classes.cardContent}>
<Typography gutterBottom variant="h6" component="h2" className={classes.restaurantName}>
{restaurant.name}
</Typography>
<Typography gutterBottom variant="body1" component="p">
{name}
</Typography>
<Typography gutterBottom variant="subtitle1" component="p" className={classes.restaurantAddress}>
{restaurant.address}
</Typography>
</CardContent>
{user && (
<CardActions>
<IconButton aria-label="add to favorites" onClick={() => handleFav(_id)}>
<Icon path={isFav ? mdiHeart : mdiHeartOutline} size={1} className={classes.favIcon} />
</IconButton>
</CardActions>
)}
</Card>
)
}
const useStyles = makeStyles(({ palette, spacing }: Theme) => ({
cardImage: {
height: spacing(25),
objectFit: 'cover',
},
cardContent: {
height: spacing(20),
},
restaurantName: {
textTransform: 'uppercase',
color: palette.primary.main,
fontSize: spacing(2),
},
restaurantAddress: {
fontSize: spacing(1.5),
},
favIcon: {
color: palette.primary.main,
},
}))
export default DishCard
|
def from_json(klass, json_str):
result = klass(dcm_meta_ecode, json_str)
result.check_valid()
return result |
<reponame>igortomic99/chatyServer<filename>src/generated/typegraphql-prisma/resolvers/crud/Conversation/args/index.ts<gh_stars>1-10
export { AggregateConversationArgs } from "./AggregateConversationArgs";
export { CreateConversationArgs } from "./CreateConversationArgs";
export { CreateManyConversationArgs } from "./CreateManyConversationArgs";
export { DeleteConversationArgs } from "./DeleteConversationArgs";
export { DeleteManyConversationArgs } from "./DeleteManyConversationArgs";
export { FindFirstConversationArgs } from "./FindFirstConversationArgs";
export { FindManyConversationArgs } from "./FindManyConversationArgs";
export { FindUniqueConversationArgs } from "./FindUniqueConversationArgs";
export { GroupByConversationArgs } from "./GroupByConversationArgs";
export { UpdateConversationArgs } from "./UpdateConversationArgs";
export { UpdateManyConversationArgs } from "./UpdateManyConversationArgs";
export { UpsertConversationArgs } from "./UpsertConversationArgs";
|
<reponame>yuqing521/magic-portal
/**
* Copyright (c) 2020 Bytedance Inc.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
// 将post-html的对象格式化为magic对象
import posthtml from 'posthtml';
import { Matcher, StringMatcher, Node } from 'posthtml/types/posthtml';
import { HtmlTagObject } from '@magic-microservices/magic/dist/src/utils/htmlTag';
type Maybe<T> = void | T;
type MaybeArray<T> = T | T[];
function format2MagicTagObj(postHtmlTagObj: Node): HtmlTagObject {
return {
attributes: postHtmlTagObj.attrs as Record<string, string>,
tagName: postHtmlTagObj.tag as string,
innerHTML: postHtmlTagObj.content?.join(''),
};
}
export interface ICustomDOMMatcher {
outputJsonKey: 'styles' | 'scripts';
matcher: MaybeArray<Matcher<StringMatcher, Maybe<Record<string, StringMatcher>>>>;
}
export type IPortalHtmlParserResult = Record<
ICustomDOMMatcher['outputJsonKey'],
HtmlTagObject[]
>;
// 解析 & 分类资源
function getAsserts(
tree: Node,
result: IPortalHtmlParserResult,
matchers?: ICustomDOMMatcher[],
): void {
const baseRules: ICustomDOMMatcher[] = [
{
outputJsonKey: 'scripts',
matcher: { tag: 'script' },
},
{
outputJsonKey: 'styles',
matcher: { tag: 'link', attrs: { rel: 'stylesheet' } },
},
{
outputJsonKey: 'styles',
matcher: { tag: 'style' },
},
];
[...(matchers || []), ...baseRules].forEach(({ matcher, outputJsonKey }) => {
tree.match(matcher, (node) => {
const magicObj = format2MagicTagObj(node);
if (!result[outputJsonKey]) {
result[outputJsonKey] = [];
}
result[outputJsonKey].push(magicObj);
return node;
});
});
}
export function portalHtmlParser(
html: string,
matchers?: ICustomDOMMatcher[],
): IPortalHtmlParserResult {
const result = {
styles: [],
scripts: [],
};
// 解析html并分类
posthtml()
.use((tree) => getAsserts(tree, result, matchers))
.process(html, { sync: true });
return result;
}
|
Predicting Interfacial Strengthening Behaviour of Particulate-Reinforced MMC — A Micro-mechanistic Approach
The fracture properties of particulate-reinforced metal matrix composites (MMCs) are influenced by several factors, such as particle size, inter-particle spacing and volume fraction of the reinforcement. In addition, complex microstructural mechanisms, such as precipitation hardening induced by heat treatment processing, affect the fracture toughness of MMCs. Precipitates that are formed at the particle/matrix interface region, lead to improvement of the interfacial strength, and hence enhancement of the macroscopic strength properties of the composite material. In this paper, a micro-mechanics model, based on thermodynamics principles, is proposed to determine the fracture strength of the interface at a segregated state in MMCs. This model uses energy considerations to express the fracture toughness of the interface in terms of interfacial critical strain energy release rate and elastic modulus. The interfacial fracture toughness is further expressed as a function of the macroscopic fracture toughness and mechanical properties of the composite, using a toughening mechanism model based on crack deflection and interface cracking. Mechanical testing is also performed to obtain macroscopic data, such as the fracture strength, elastic modulus and fracture toughness of the composite, which are used as input to the model. Based on the experimental data and the analysis, the interfacial strength is determined for SiC particle-reinforced aluminium matrix composites subjected to different heat treatment processing conditions. |
def match_equals(regex, string, values):
if string is None:
return False
matched = regex.findall(string)
if not matched:
return False
if isinstance(values, compat.basestring) and not isinstance(values, Sequence):
values = (values,)
return matched[0] in values |
// dup is for testing only. It is a recusive copy.
func (m *Map) dup() *Map {
var nm = &Map{
numEnts: m.numEnts,
root: m.root.dup(),
}
return nm
} |
/**
* Ensures that we can not element of the wrong type in a sublist.
*/
@Test
@DependsOnMethod("testAddWrongType")
public void testAddWrongTypeToSublist() {
final CheckedArrayList<String> list = new CheckedArrayList<>(String.class);
assertTrue(list.add("One"));
assertTrue(list.add("Two"));
assertTrue(list.add("Three"));
testAddWrongType(list.subList(1, 3));
} |
<reponame>DerHumm3l/oop-cpp
#include <iostream>
#include <string>
using namespace std;
int getGreatestCommonDivisor(int a, int b)
{
if (b == 0)
{
return a;
}
return getGreatestCommonDivisor(b, a % b);
}
void reduce(int numerator, int denominator)
{
int gcd = getGreatestCommonDivisor(numerator, denominator);
cout << "Der Bruch " << numerator << "/" << denominator
<< " laesst sich auf " << numerator / gcd << "/"
<< denominator / gcd << " kuerzen" << endl;
}
int main(int argc, char *argv[])
{
string choice, numeratorInput, denominatorInput;
int numerator, denominator;
while (true)
{
cout << "Waehlen Sie eine Aktion: " << endl
<< "1 - Kuerzen" << endl
<< "2 - Beenden" << endl
<< "Eingabe: ";
getline(cin, choice);
switch (choice[0])
{
case '1':
cout << "Geben Sie den Zaehler als ganze Zahl ein: ";
getline(cin, numeratorInput);
numerator = stoi(numeratorInput);
cout << "Geben Sie den Nenner als ganze Zahl ein: ";
getline(cin, denominatorInput);
denominator = stoi(denominatorInput);
reduce(numerator, denominator);
break;
case '2':
return 0;
default:
cout << "Falscher Input";
break;
}
}
} |
def check_neighbor_connectivity(gpm,warn=False):
check.gpm_sanity(gpm)
genotypes = np.array(gpm.data.index,dtype=int)
try:
include_mask = gpm.neighbors.loc[:,"include"] == True
except KeyError:
include_mask = np.ones(len(gpm.neighbors),dtype=bool)
non_self_mask = gpm.neighbors.loc[:,"target"] != gpm.neighbors.loc[:,"source"]
non_self_mask = np.logical_and(include_mask,non_self_mask)
source = np.unique(gpm.neighbors.loc[non_self_mask,"source"])
target = np.unique(gpm.neighbors.loc[non_self_mask,"target"])
all_neighbors = np.union1d(source,target)
isolated = np.setdiff1d(genotypes,all_neighbors)
not_a_source = np.setdiff1d(np.setdiff1d(genotypes,source),isolated)
not_targeted = np.setdiff1d(np.setdiff1d(genotypes,target),isolated)
num_not_a_source = not_a_source.shape[0]
num_not_targeted = not_targeted.shape[0]
num_isolated = isolated.shape[0]
w = None
if num_isolated > 0 or num_not_a_source > 0 or num_not_targeted > 0:
w = "Some genotypes do not have non-self neighbors. Genotypes with no\n"
w += "neighbors will be isolated, either never visited or trapping\n"
w += "individuals that start with that genotype. To speed the \n"
w += "calculation, consider removing individuals with no neigbors.\n"
w += "Genotypes that only act as sources but not targets will never\n"
w += "be visited unless they are in the initial population; genotypes\n"
w += "that act as targets but never sources are sinks that trap\n"
w += "individuals who acquire that genotype.\n\n"
if num_isolated > 0:
w += f" number isolated: {num_isolated}\n"
if num_not_targeted > 0:
w += f" number never targeted: {num_not_targeted}\n"
if num_not_a_source > 0:
w += f" number never a source: {num_not_a_source}\n"
w += "\n"
if w is not None and warn:
warnings.warn(w)
return not_a_source, not_targeted, isolated |
<reponame>ggvl/lvgl<filename>src/extra/widgets/spinbox/lv_spinbox.c
/**
* @file lv_spinbox.c
*
*/
/*********************
* INCLUDES
*********************/
#include "lv_spinbox.h"
#if LV_USE_SPINBOX
#include "../../../misc/lv_assert.h"
/*********************
* DEFINES
*********************/
#define MY_CLASS &lv_spinbox_class
/**********************
* TYPEDEFS
**********************/
/**********************
* STATIC PROTOTYPES
**********************/
static void lv_spinbox_constructor(const lv_obj_class_t * class_p, lv_obj_t * obj);
static void lv_spinbox_event(const lv_obj_class_t * class_p, lv_event_t * e);
static void lv_spinbox_updatevalue(lv_obj_t * obj);
/**********************
* STATIC VARIABLES
**********************/
const lv_obj_class_t lv_spinbox_class = {
.constructor_cb = lv_spinbox_constructor,
.event_cb = lv_spinbox_event,
.width_def = LV_DPI_DEF,
.instance_size = sizeof(lv_spinbox_t),
.editable = LV_OBJ_CLASS_EDITABLE_TRUE,
.base_class = &lv_textarea_class
};
/**********************
* MACROS
**********************/
/**********************
* GLOBAL FUNCTIONS
**********************/
lv_obj_t * lv_spinbox_create(lv_obj_t * parent)
{
LV_LOG_INFO("begin");
lv_obj_t * obj = lv_obj_class_create_obj(MY_CLASS, parent);
lv_obj_class_init_obj(obj);
return obj;
}
/*=====================
* Setter functions
*====================*/
/**
* Set spinbox value
* @param obj pointer to spinbox
* @param i value to be set
*/
void lv_spinbox_set_value(lv_obj_t * obj, int32_t i)
{
LV_ASSERT_OBJ(obj, MY_CLASS);
lv_spinbox_t * spinbox = (lv_spinbox_t *)obj;
if(i > spinbox->range_max) i = spinbox->range_max;
if(i < spinbox->range_min) i = spinbox->range_min;
spinbox->value = i;
lv_spinbox_updatevalue(obj);
}
/**
* Set spinbox rollover function
* @param spinbox pointer to spinbox
* @param b true or false to enable or disable (default)
*/
void lv_spinbox_set_rollover(lv_obj_t * obj, bool b)
{
LV_ASSERT_OBJ(obj, MY_CLASS);
lv_spinbox_t * spinbox = (lv_spinbox_t *)obj;
spinbox->rollover = b;
}
/**
* Set spinbox digit format (digit count and decimal format)
* @param spinbox pointer to spinbox
* @param digit_count number of digit excluding the decimal separator and the sign
* @param separator_position number of digit before the decimal point. If 0, decimal point is not
* shown
*/
void lv_spinbox_set_digit_format(lv_obj_t * obj, uint8_t digit_count, uint8_t separator_position)
{
LV_ASSERT_OBJ(obj, MY_CLASS);
lv_spinbox_t * spinbox = (lv_spinbox_t *)obj;
if(digit_count > LV_SPINBOX_MAX_DIGIT_COUNT) digit_count = LV_SPINBOX_MAX_DIGIT_COUNT;
if(separator_position >= digit_count) separator_position = 0;
if(separator_position > LV_SPINBOX_MAX_DIGIT_COUNT) separator_position = LV_SPINBOX_MAX_DIGIT_COUNT;
if(digit_count < LV_SPINBOX_MAX_DIGIT_COUNT) {
int64_t max_val = lv_pow(10, digit_count);
if(spinbox->range_max > max_val - 1) spinbox->range_max = max_val - 1;
if(spinbox->range_min < - max_val + 1) spinbox->range_min = - max_val + 1;
}
spinbox->digit_count = digit_count;
spinbox->dec_point_pos = separator_position;
lv_spinbox_updatevalue(obj);
}
/**
* Set spinbox step
* @param spinbox pointer to spinbox
* @param step steps on increment/decrement
*/
void lv_spinbox_set_step(lv_obj_t * obj, uint32_t step)
{
LV_ASSERT_OBJ(obj, MY_CLASS);
lv_spinbox_t * spinbox = (lv_spinbox_t *)obj;
spinbox->step = step;
lv_spinbox_updatevalue(obj);
}
/**
* Set spinbox value range
* @param spinbox pointer to spinbox
* @param range_min maximum value, inclusive
* @param range_max minimum value, inclusive
*/
void lv_spinbox_set_range(lv_obj_t * obj, int32_t range_min, int32_t range_max)
{
LV_ASSERT_OBJ(obj, MY_CLASS);
lv_spinbox_t * spinbox = (lv_spinbox_t *)obj;
spinbox->range_max = range_max;
spinbox->range_min = range_min;
if(spinbox->value > spinbox->range_max) spinbox->value = spinbox->range_max;
if(spinbox->value < spinbox->range_min) spinbox->value = spinbox->range_min;
lv_spinbox_updatevalue(obj);
}
/**
* Set cursor position to a specific digit for edition
* @param spinbox pointer to spinbox
* @param pos selected position in spinbox
*/
void lv_spinbox_set_pos(lv_obj_t * obj, uint8_t pos)
{
LV_ASSERT_OBJ(obj, MY_CLASS);
lv_spinbox_t * spinbox = (lv_spinbox_t *)obj;
int32_t step_limit;
step_limit = LV_MAX(spinbox->range_max, (spinbox->range_min < 0 ? (-spinbox->range_min) : spinbox->range_min));
int32_t new_step = spinbox->step * lv_pow(10, pos);
if(pos <= 0) spinbox->step = 1;
else if(new_step <= step_limit) spinbox->step = new_step;
lv_spinbox_updatevalue(obj);
}
/**
* Set direction of digit step when clicking an encoder button while in editing mode
* @param spinbox pointer to spinbox
* @param direction the direction (LV_DIR_RIGHT or LV_DIR_LEFT)
*/
void lv_spinbox_set_digit_step_direction(lv_obj_t * obj, lv_dir_t direction)
{
LV_ASSERT_OBJ(obj, MY_CLASS);
lv_spinbox_t * spinbox = (lv_spinbox_t *)obj;
spinbox->digit_step_dir = direction;
lv_spinbox_updatevalue(obj);
}
/*=====================
* Getter functions
*====================*/
/**
* Get the spinbox numeral value (user has to convert to float according to its digit format)
* @param obj pointer to spinbox
* @return value integer value of the spinbox
*/
int32_t lv_spinbox_get_value(lv_obj_t * obj)
{
LV_ASSERT_OBJ(obj, MY_CLASS);
lv_spinbox_t * spinbox = (lv_spinbox_t *)obj;
return spinbox->value;
}
/**
* Get the spinbox step value (user has to convert to float according to its digit format)
* @param obj pointer to spinbox
* @return value integer step value of the spinbox
*/
int32_t lv_spinbox_get_step(lv_obj_t * obj)
{
LV_ASSERT_OBJ(obj, MY_CLASS);
lv_spinbox_t * spinbox = (lv_spinbox_t *)obj;
return spinbox->step;
}
/*=====================
* Other functions
*====================*/
/**
* Select next lower digit for edition
* @param obj pointer to spinbox
*/
void lv_spinbox_step_next(lv_obj_t * obj)
{
LV_ASSERT_OBJ(obj, MY_CLASS);
lv_spinbox_t * spinbox = (lv_spinbox_t *)obj;
int32_t new_step = spinbox->step / 10;
if((new_step) > 0)
spinbox->step = new_step;
else
spinbox->step = 1;
lv_spinbox_updatevalue(obj);
}
/**
* Select next higher digit for edition
* @param obj pointer to spinbox
*/
void lv_spinbox_step_prev(lv_obj_t * obj)
{
LV_ASSERT_OBJ(obj, MY_CLASS);
lv_spinbox_t * spinbox = (lv_spinbox_t *)obj;
int32_t step_limit;
step_limit = LV_MAX(spinbox->range_max, (spinbox->range_min < 0 ? (-spinbox->range_min) : spinbox->range_min));
int32_t new_step = spinbox->step * 10;
if(new_step <= step_limit) spinbox->step = new_step;
lv_spinbox_updatevalue(obj);
}
/**
* Get spinbox rollover function status
* @param obj pointer to spinbox
*/
bool lv_spinbox_get_rollover(lv_obj_t * obj)
{
LV_ASSERT_OBJ(obj, MY_CLASS);
lv_spinbox_t * spinbox = (lv_spinbox_t *)obj;
return spinbox->rollover;
}
/**
* Increment spinbox value by one step
* @param obj pointer to spinbox
*/
void lv_spinbox_increment(lv_obj_t * obj)
{
LV_ASSERT_OBJ(obj, MY_CLASS);
lv_spinbox_t * spinbox = (lv_spinbox_t *)obj;
if(spinbox->value + spinbox->step <= spinbox->range_max) {
/*Special mode when zero crossing*/
if((spinbox->value + spinbox->step) > 0 && spinbox->value < 0) spinbox->value = -spinbox->value;
spinbox->value += spinbox->step;
}
else {
// Rollover?
if((spinbox->rollover) && (spinbox->value == spinbox->range_max))
spinbox->value = spinbox->range_min;
else
spinbox->value = spinbox->range_max;
}
lv_spinbox_updatevalue(obj);
}
/**
* Decrement spinbox value by one step
* @param obj pointer to spinbox
*/
void lv_spinbox_decrement(lv_obj_t * obj)
{
LV_ASSERT_OBJ(obj, MY_CLASS);
lv_spinbox_t * spinbox = (lv_spinbox_t *)obj;
if(spinbox->value - spinbox->step >= spinbox->range_min) {
/*Special mode when zero crossing*/
if((spinbox->value - spinbox->step) < 0 && spinbox->value > 0) spinbox->value = -spinbox->value;
spinbox->value -= spinbox->step;
}
else {
/*Rollover?*/
if((spinbox->rollover) && (spinbox->value == spinbox->range_min))
spinbox->value = spinbox->range_max;
else
spinbox->value = spinbox->range_min;
}
lv_spinbox_updatevalue(obj);
}
/**********************
* STATIC FUNCTIONS
**********************/
static void lv_spinbox_constructor(const lv_obj_class_t * class_p, lv_obj_t * obj)
{
LV_UNUSED(class_p);
LV_LOG_TRACE("begin");
lv_spinbox_t * spinbox = (lv_spinbox_t *)obj;
/*Initialize the allocated 'ext'*/
spinbox->value = 0;
spinbox->dec_point_pos = 0;
spinbox->digit_count = 5;
spinbox->step = 1;
spinbox->range_max = 99999;
spinbox->range_min = -99999;
spinbox->rollover = false;
spinbox->digit_step_dir = LV_DIR_RIGHT;
lv_textarea_set_one_line(obj, true);
lv_textarea_set_cursor_click_pos(obj, true);
lv_spinbox_updatevalue(obj);
LV_LOG_TRACE("Spinbox constructor finished");
}
static void lv_spinbox_event(const lv_obj_class_t * class_p, lv_event_t * e)
{
LV_UNUSED(class_p);
/*Call the ancestor's event handler*/
lv_res_t res = LV_RES_OK;
res = lv_obj_event_base(MY_CLASS, e);
if(res != LV_RES_OK) return;
lv_event_code_t code = lv_event_get_code(e);
lv_obj_t * obj = lv_event_get_target(e);
lv_spinbox_t * spinbox = (lv_spinbox_t *)obj;
if(code == LV_EVENT_RELEASED) {
/*If released with an ENCODER then move to the next digit*/
lv_indev_t * indev = lv_indev_get_act();
if(lv_indev_get_type(indev) == LV_INDEV_TYPE_ENCODER) {
if(lv_group_get_editing(lv_obj_get_group(obj))) {
if(spinbox->digit_count > 1) {
if(spinbox->digit_step_dir == LV_DIR_RIGHT) {
if(spinbox->step > 1) {
lv_spinbox_step_next(obj);
}
else {
/*Restart from the MSB*/
spinbox->step = lv_pow(10, spinbox->digit_count - 2);
lv_spinbox_step_prev(obj);
}
}
else {
if(spinbox->step < lv_pow(10, spinbox->digit_count - 1)) {
lv_spinbox_step_prev(obj);
}
else {
/*Restart from the LSB*/
spinbox->step = 10;
lv_spinbox_step_next(obj);
}
}
}
}
}
/*The cursor has been positioned to a digit.
* Set `step` accordingly*/
else {
const char * txt = lv_textarea_get_text(obj);
size_t txt_len = strlen(txt);
if(txt[spinbox->ta.cursor.pos] == '.') {
lv_textarea_cursor_left(obj);
}
else if(spinbox->ta.cursor.pos == (uint32_t)txt_len) {
lv_textarea_set_cursor_pos(obj, txt_len - 1);
}
else if(spinbox->ta.cursor.pos == 0 && spinbox->range_min < 0) {
lv_textarea_set_cursor_pos(obj, 1);
}
size_t len = spinbox->digit_count - 1;
uint16_t cp = spinbox->ta.cursor.pos;
if(spinbox->ta.cursor.pos > spinbox->dec_point_pos && spinbox->dec_point_pos != 0) cp--;
uint32_t pos = len - cp;
if(spinbox->range_min < 0) pos++;
spinbox->step = 1;
uint16_t i;
for(i = 0; i < pos; i++) spinbox->step *= 10;
}
}
else if(code == LV_EVENT_KEY) {
lv_indev_type_t indev_type = lv_indev_get_type(lv_indev_get_act());
uint32_t c = *((uint32_t *)lv_event_get_param(e)); /*uint32_t because can be UTF-8*/
if(c == LV_KEY_RIGHT) {
if(indev_type == LV_INDEV_TYPE_ENCODER)
lv_spinbox_increment(obj);
else
lv_spinbox_step_next(obj);
}
else if(c == LV_KEY_LEFT) {
if(indev_type == LV_INDEV_TYPE_ENCODER)
lv_spinbox_decrement(obj);
else
lv_spinbox_step_prev(obj);
}
else if(c == LV_KEY_UP) {
lv_spinbox_increment(obj);
}
else if(c == LV_KEY_DOWN) {
lv_spinbox_decrement(obj);
}
else {
lv_textarea_add_char(obj, c);
}
}
}
static void lv_spinbox_updatevalue(lv_obj_t * obj)
{
lv_spinbox_t * spinbox = (lv_spinbox_t *)obj;
char buf[LV_SPINBOX_MAX_DIGIT_COUNT + 8];
lv_memset_00(buf, sizeof(buf));
char * buf_p = buf;
uint8_t cur_shift_left = 0;
if(spinbox->range_min < 0) { // hide sign if there are only positive values
/*Add the sign*/
(*buf_p) = spinbox->value >= 0 ? '+' : '-';
buf_p++;
}
else {
/*Cursor need shift to left*/
cur_shift_left++;
}
int32_t i;
char digits[LV_SPINBOX_MAX_DIGIT_COUNT + 4];
/*Convert the numbers to string (the sign is already handled so always covert positive number)*/
lv_snprintf(digits, sizeof(digits), "%" LV_PRId32, LV_ABS(spinbox->value));
/*Add leading zeros*/
int lz_cnt = spinbox->digit_count - (int)strlen(digits);
if(lz_cnt > 0) {
for(i = (uint16_t)strlen(digits); i >= 0; i--) {
digits[i + lz_cnt] = digits[i];
}
for(i = 0; i < lz_cnt; i++) {
digits[i] = '0';
}
}
int32_t intDigits;
intDigits = (spinbox->dec_point_pos == 0) ? spinbox->digit_count : spinbox->dec_point_pos;
/*Add the decimal part*/
for(i = 0; i < intDigits && digits[i] != '\0'; i++) {
(*buf_p) = digits[i];
buf_p++;
}
if(spinbox->dec_point_pos != 0) {
/*Insert the decimal point*/
(*buf_p) = '.';
buf_p++;
for(/*Leave i*/; i < spinbox->digit_count && digits[i] != '\0'; i++) {
(*buf_p) = digits[i];
buf_p++;
}
}
/*Refresh the text*/
lv_textarea_set_text(obj, (char *)buf);
/*Set the cursor position*/
int32_t step = spinbox->step;
uint8_t cur_pos = (uint8_t)spinbox->digit_count;
while(step >= 10) {
step /= 10;
cur_pos--;
}
if(cur_pos > intDigits) cur_pos++; /*Skip the decimal point*/
cur_pos -= cur_shift_left;
lv_textarea_set_cursor_pos(obj, cur_pos);
}
#endif /*LV_USE_SPINBOX*/
|
import { getStyle, hexToRgba } from '@coreui/coreui/dist/js/coreui-utilities';
import { CustomTooltips } from '@coreui/coreui-plugin-chartjs-custom-tooltips';
import { NguCarouselConfig, NguCarouselStore, NguCarousel } from '@ngu/carousel';
import { Component, Input, OnInit, ChangeDetectorRef, ChangeDetectionStrategy } from '@angular/core';
import { Observable, interval } from 'rxjs';
import { startWith, take, map } from 'rxjs/operators';
import { slider } from './seller.animation'
import { Product } from '../../model/product.model';
@Component({
templateUrl: 'sellers.component.html',
styles: [`
h1{
min-height: 200px;
background-color: #ccc;
text-align: center;
line-height: 200px;
}
.leftRs {
position: absolute;
margin: auto;
top: 0;
bottom: 0;
width: 50px;
height: 50px;
box-shadow: 1px 2px 10px -1px rgba(0, 0, 0, .3);
border-radius: 999px;
left: 0;
}
.rightRs {
position: absolute;
margin: auto;
top: 0;
bottom: 0;
width: 50px;
height: 50px;
box-shadow: 1px 2px 10px -1px rgba(0, 0, 0, .3);
border-radius: 999px;
right: 0;
}
.product-style {
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, "Noto Sans", sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji";
font-size: 0.9rem;
}
.text-purse {
color: #20853b;
}
`]
})
export class SellersComponent implements OnInit {
imgags: any[] = [
{
name: "Rice 1 Sack",
price: 1200,
photoUrl: "https://ph-test-11.slatic.net/p/74051f2d619ce66bd578ae226fb772dd.jpg"
},
{
name: "Flour 100 gram",
price: 300,
photoUrl: "https://ph-test-11.slatic.net/p/9435dca2a5076718bde6b941e225dc79.jpg"
},
{
name: "Sugar 100 gram",
price: 200,
photoUrl: "https://ph-test-11.slatic.net/p/093c396396e93a2474c7b9c634ce882d.jpg"
},
{
name: "Salt 100 gram",
price: 100,
photoUrl: "https://ph-test-11.slatic.net/p/395754f51ab30a672040e842fb7b8bd5.jpg"
},
{
name: "Cooking Coil 2.84L",
price: 497,
photoUrl: "https://ph-live-01.slatic.net/original/3638b840c19207f8fbb0a02b144534ce.jpg"
},
{
name: "Tucino 150grams",
price: 35,
photoUrl: "https://encrypted-tbn0.gstatic.com/images?q=tbn%3AANd9GcQ276bpGthCbdY4SfUg--nnMEpCSkmyXqWGT8xKj4eYnq7ZFcP3"
},
{
name: "Embutido 150grams",
price: 35,
photoUrl: "https://encrypted-tbn0.gstatic.com/images?q=tbn%3AANd9GcQ9ZzfzR2z-KWJKG50IyNg_uL8_aqS-RSwQYAWDbKDAX4496Jrp"
}
];
imgags2: any[] = [
{
name: "<NAME>",
price: 270,
photoUrl: "https://ph-live-01.slatic.net/original/2ddf3262631191ea84b5ffb57b5df89c.jpg"
},
{
name: "<NAME>",
price: 75,
photoUrl: "https://ph-test-11.slatic.net/p/d1738f5ea12360ed436b5b7873a1c02c.jpg"
},
{
name: "Premium Strawberry Jam 16oz Set of 2",
price: 549,
photoUrl: "https://ph-test-11.slatic.net/p/12/premium-strawberry-jam-16oz-set-of-2-9369-59592331-e8e552d14ae6c845141d90fcea4747ea-catalog_233.jpg"
},
{
name: "Banana Chips with Honey 500g",
price: 399,
photoUrl: "https://ph-test-11.slatic.net/p/7c79af41b101df0a7d6a1d0805672f80.jpg"
},
{
name: "<NAME> (450grams, 6 pcs)",
price: 250,
photoUrl: "https://ph-test-11.slatic.net/p/7d2ca9f52e0313d312940a7c9dc84a57.jpg"
},
{
name: "Aaleyah's Smooth Peanut Butter 600g (BIG)",
price: 135,
photoUrl: "https://ph-test-11.slatic.net/p/4450e7b49fa4808340e704f68e7cbe81.jpg"
}
// {
// name: "<NAME>",
// price: 35,
// photoUrl: "https://encrypted-tbn0.gstatic.com/images?q=tbn%3AANd9GcQ9ZzfzR2z-KWJKG50IyNg_uL8_aqS-RSwQYAWDbKDAX4496Jrp"
// }
];
@Input() name: string;
public carouselTileItems$: Observable<Product[]>;
public carouselTileItems2$: Observable<Product[]>;
public carouselTileConfig: NguCarouselConfig = {
grid: { xs: 1, sm: 1, md: 1, lg: 5, all: 0 },
speed: 250,
point: {
visible: true
},
touch: true,
loop: true,
interval: { timing: 1500 },
animation: 'lazy'
};
tempData: any[];
constructor(private cdr: ChangeDetectorRef) {}
ngOnInit() {
this.tempData = [];
this.carouselTileItems$ = interval(500).pipe(
startWith(-1),
take(10),
map(val => {
let i=0;
const data = this.imgags;
return data;
})
);
this.carouselTileItems2$ = interval(500).pipe(
startWith(-1),
take(10),
map(val => {
let i=0;
const data = this.imgags2;
return data;
})
);
}
// carou
} |
/**
* Viewport for diagram panes that is in charge of painting the background
* image or page.
*/
public class Viewport extends JViewport {
/**
* Paints the background.
*
* @param g
* The graphics object to paint the background on.
*/
public void paint(Graphics g) {
if (isPageVisible())
paintBackgroundPages((Graphics2D) g);
else
setBackground(graph.getBackground());
if (getBackgroundImage() != null)
paintBackgroundImage((Graphics2D) g);
setOpaque(!isPageVisible() && getBackgroundImage() == null);
super.paint(g);
setOpaque(true);
}
/**
* Hook for subclassers to paint the background image.
*
* @param g2
* The graphics object to paint the image on.
*/
protected void paintBackgroundImage(Graphics2D g2) {
// Clears the background
if (!isPageVisible()) {
g2.setColor(graph.getBackground());
g2.fillRect(0, 0, graph.getWidth(), graph.getHeight());
}
// Paints the image
AffineTransform tmp = g2.getTransform();
Point offset = getViewPosition();
g2.translate(-offset.x, -offset.y);
g2.scale(graph.getScale(), graph.getScale());
Image img = getBackgroundImage().getImage();
g2.drawImage(img, 0, 0, graph);
g2.setTransform(tmp);
}
/**
* Hook for subclassers to paint the background page(s).
*
* @param g2
* The graphics object to paint the background page(s) on.
*/
protected void paintBackgroundPages(Graphics2D g2) {
// Point2D p = graph.toScreen(new
// Point2D.Double(pageFormat.getWidth(), pageFormat.getHeight()));
// Dimension pSize = graph.getPreferredSize();
// int w = (int) (p.getX() * pageScale);
// int h = (int) (p.getY() * pageScale);
// // int cols = (int) Math.max(Math.ceil((double) (pSize.width - 5)
// /
// // (double) w),
// // 1);
// // int rows = (int) Math.max(Math.ceil((double) (pSize.height -
// 5) /
// // (double)
// // h), 1);
// int cols = (int) Math.max(Math.ceil((double) (pSize.width ) /
// (double) w), 1);
// int rows = (int) Math.max(Math.ceil((double) (pSize.height) /
// (double) h), 1);
// g2.setColor(graph.getHandleColor());
//
// // Draws the pages.
// Point offset = getViewPosition();
// g2.translate(-offset.x, -offset.y);
// g2.fillRect(0, 0, graph.getWidth(), graph.getHeight());
// g2.setColor(Color.darkGray);
// g2.fillRect(3, 3, cols * w, rows * h);
// g2.setColor(getGraph().getBackground());
// g2.fillRect(1, 1, cols * w - 1, rows * h - 1);
// // Draws the pagebreaks.
// Stroke previousStroke = g2.getStroke();
// g2.setStroke(new BasicStroke(1, BasicStroke.CAP_BUTT,
// BasicStroke.JOIN_MITER, 10.0f, new float[] { 1, 2 },
// 0));
// g2.setColor(Color.darkGray);
// for (int i = 1; i < cols; i++)
// g2.drawLine(i * w, 1, i * w, rows * h - 1);
// for (int i = 1; i < rows; i++)
// g2.drawLine(1, i * h, cols * w - 1, i * h);
//
// // Restores the graphics.
// g2.setStroke(previousStroke);
// g2.translate(offset.x, offset.y);
// g2.clipRect(0, 0, cols * w - 1 - offset.x, rows * h - 1 -
// offset.y);
}
} |
<gh_stars>0
#!/usr/bin/env python
'''test/sample/scan.py
This is the test case of scanner.
'''
from filt.scanner import BaseScanner
class SampleScanner(BaseScanner):
'''
This is the sample scanner.
'''
def scan(self, target, signature):
'''
Check target and signature are same.
'''
if target == signature:
return (True, 'same')
else:
return (False, 'hoge')
if __name__ == '__main__':
# run scanner
sample_scanner = SampleScanner()
sample_scanner.run()
|
<reponame>pakchoidora/campfire
/* Generated by Nim Compiler v0.17.0 */
/* (c) 2017 <NAME> */
/* The generated code is subject to the original license. */
/* Compiled for: MacOSX, amd64, clang */
/* Command for C compiler:
clang -c -w -I/usr/local/Cellar/nim/0.17.0/nim/lib -o /Users/pakchoi/Workspace/campfire/src/campfire/nimcache/stdlib_strutils.o /Users/pakchoi/Workspace/campfire/src/campfire/nimcache/stdlib_strutils.c */
#define NIM_NEW_MANGLING_RULES
#define NIM_INTBITS 64
#include "nimbase.h"
#include <stdio.h>
#include <string.h>
#undef linux
#undef near
typedef struct NimStringDesc NimStringDesc;
typedef struct TGenericSeq TGenericSeq;
typedef struct Slice_WC2BEYwxNBO9aTbMc8sJqPg Slice_WC2BEYwxNBO9aTbMc8sJqPg;
typedef struct ValueError_Gi06FkNeykJn7mrqRZYrkA ValueError_Gi06FkNeykJn7mrqRZYrkA;
typedef struct Exception Exception;
typedef struct TNimObject TNimObject;
typedef struct TNimType TNimType;
typedef struct TNimNode TNimNode;
typedef struct Cell_1zcF9cV8XIAtbN8h5HRUB8g Cell_1zcF9cV8XIAtbN8h5HRUB8g;
typedef struct CellSeq_Axo1XVm9aaQueTOldv8le5w CellSeq_Axo1XVm9aaQueTOldv8le5w;
typedef struct GcHeap_1TRH1TZMaVZTnLNcIHuNFQ GcHeap_1TRH1TZMaVZTnLNcIHuNFQ;
typedef struct GcStack_7fytPA5bBsob6See21YMRA GcStack_7fytPA5bBsob6See21YMRA;
typedef struct MemRegion_x81NhDv59b8ercDZ9bi85jyg MemRegion_x81NhDv59b8ercDZ9bi85jyg;
typedef struct SmallChunk_tXn60W2f8h3jgAYdEmy5NQ SmallChunk_tXn60W2f8h3jgAYdEmy5NQ;
typedef struct LLChunk_XsENErzHIZV9bhvyJx56wGw LLChunk_XsENErzHIZV9bhvyJx56wGw;
typedef struct BigChunk_Rv9c70Uhp2TytkX7eH78qEg BigChunk_Rv9c70Uhp2TytkX7eH78qEg;
typedef struct IntSet_EZObFrE3NC9bIb3YMkY9crZA IntSet_EZObFrE3NC9bIb3YMkY9crZA;
typedef struct Trunk_W0r8S0Y3UGke6T9bIUWnnuw Trunk_W0r8S0Y3UGke6T9bIUWnnuw;
typedef struct AvlNode_IaqjtwKhxLEpvDS9bct9blEw AvlNode_IaqjtwKhxLEpvDS9bct9blEw;
typedef struct HeapLinks_PDV1HBZ8CQSQJC9aOBFNRSg HeapLinks_PDV1HBZ8CQSQJC9aOBFNRSg;
typedef struct TY_ujsjpB2O9cjj3uDHsXbnSzg TY_ujsjpB2O9cjj3uDHsXbnSzg;
typedef struct GcStat_0RwLoVBHZPfUAcLczmfQAg GcStat_0RwLoVBHZPfUAcLczmfQAg;
typedef struct CellSet_jG87P0AI9aZtss9ccTYBIISQ CellSet_jG87P0AI9aZtss9ccTYBIISQ;
typedef struct PageDesc_fublkgIY4LG3mT51LU2WHg PageDesc_fublkgIY4LG3mT51LU2WHg;
typedef struct BaseChunk_Sdq7WpT6qAH858F5ZEdG3w BaseChunk_Sdq7WpT6qAH858F5ZEdG3w;
typedef struct FreeCell_u6M5LHprqzkn9axr04yg9bGQ FreeCell_u6M5LHprqzkn9axr04yg9bGQ;
struct TGenericSeq {
NI len;
NI reserved;
};
struct NimStringDesc {
TGenericSeq Sup;
NIM_CHAR data[SEQ_DECL_SIZE];
};
struct Slice_WC2BEYwxNBO9aTbMc8sJqPg {
NI a;
NI b;
};
typedef NU8 FloatFormatMode_pNvEoaqwoxKyoASD11vSgQ;
typedef NIM_CHAR TY_9bPFPkkEEeeNM9bKgiV8Q49cg[6];
typedef NIM_CHAR TY_Kn8ZOVsorvcNLB4isUeMzQ[2501];
typedef NIM_CHAR TY_vB7L5YKNrMfbzoUubCEcYg[3];
typedef NU8 TNimKind_jIBKr1ejBgsfM33Kxw4j7A;
typedef NU8 TNimTypeFlag_v8QUszD1sWlSIWZz7mC4bQ_Set;
typedef N_NIMCALL_PTR(void, TY_ojoeKfW4VYIm36I9cpDTQIg) (void* p, NI op);
typedef N_NIMCALL_PTR(void*, TY_WSm2xU5ARYv9aAR4l0z9c9auQ) (void* p);
struct TNimType {
NI size;
TNimKind_jIBKr1ejBgsfM33Kxw4j7A kind;
TNimTypeFlag_v8QUszD1sWlSIWZz7mC4bQ_Set flags;
TNimType* base;
TNimNode* node;
void* finalizer;
TY_ojoeKfW4VYIm36I9cpDTQIg marker;
TY_WSm2xU5ARYv9aAR4l0z9c9auQ deepcopy;
};
struct TNimObject {
TNimType* m_type;
};
struct Exception {
TNimObject Sup;
Exception* parent;
NCSTRING name;
NimStringDesc* message;
NimStringDesc* trace;
};
struct ValueError_Gi06FkNeykJn7mrqRZYrkA {
Exception Sup;
};
typedef NU8 TNimNodeKind_unfNsxrcATrufDZmpBq4HQ;
struct TNimNode {
TNimNodeKind_unfNsxrcATrufDZmpBq4HQ kind;
NI offset;
TNimType* typ;
NCSTRING name;
NI len;
TNimNode** sons;
};
struct Cell_1zcF9cV8XIAtbN8h5HRUB8g {
NI refcount;
TNimType* typ;
};
struct GcStack_7fytPA5bBsob6See21YMRA {
void* bottom;
};
struct CellSeq_Axo1XVm9aaQueTOldv8le5w {
NI len;
NI cap;
Cell_1zcF9cV8XIAtbN8h5HRUB8g** d;
};
typedef SmallChunk_tXn60W2f8h3jgAYdEmy5NQ* TY_SiRwrEKZdLgxqz9a9aoVBglg[512];
typedef Trunk_W0r8S0Y3UGke6T9bIUWnnuw* TY_lh2A89ahMmYg9bCmpVaplLbA[256];
struct IntSet_EZObFrE3NC9bIb3YMkY9crZA {
TY_lh2A89ahMmYg9bCmpVaplLbA data;
};
typedef AvlNode_IaqjtwKhxLEpvDS9bct9blEw* TY_0aOLqZchNi8nWtMTi8ND8w[2];
struct AvlNode_IaqjtwKhxLEpvDS9bct9blEw {
TY_0aOLqZchNi8nWtMTi8ND8w link;
NI key;
NI upperBound;
NI level;
};
struct TY_ujsjpB2O9cjj3uDHsXbnSzg {
BigChunk_Rv9c70Uhp2TytkX7eH78qEg* Field0;
NI Field1;
};
typedef TY_ujsjpB2O9cjj3uDHsXbnSzg TY_LzOv2eCDGiceMKQstCLmhw[30];
struct HeapLinks_PDV1HBZ8CQSQJC9aOBFNRSg {
NI len;
TY_LzOv2eCDGiceMKQstCLmhw chunks;
HeapLinks_PDV1HBZ8CQSQJC9aOBFNRSg* next;
};
struct MemRegion_x81NhDv59b8ercDZ9bi85jyg {
NI minLargeObj;
NI maxLargeObj;
TY_SiRwrEKZdLgxqz9a9aoVBglg freeSmallChunks;
LLChunk_XsENErzHIZV9bhvyJx56wGw* llmem;
NI currMem;
NI maxMem;
NI freeMem;
NI lastSize;
BigChunk_Rv9c70Uhp2TytkX7eH78qEg* freeChunksList;
IntSet_EZObFrE3NC9bIb3YMkY9crZA chunkStarts;
AvlNode_IaqjtwKhxLEpvDS9bct9blEw* root;
AvlNode_IaqjtwKhxLEpvDS9bct9blEw* deleted;
AvlNode_IaqjtwKhxLEpvDS9bct9blEw* last;
AvlNode_IaqjtwKhxLEpvDS9bct9blEw* freeAvlNodes;
NIM_BOOL locked;
NIM_BOOL blockChunkSizeIncrease;
NI nextChunkSize;
AvlNode_IaqjtwKhxLEpvDS9bct9blEw bottomData;
HeapLinks_PDV1HBZ8CQSQJC9aOBFNRSg heapLinks;
};
struct GcStat_0RwLoVBHZPfUAcLczmfQAg {
NI stackScans;
NI cycleCollections;
NI maxThreshold;
NI maxStackSize;
NI maxStackCells;
NI cycleTableSize;
NI64 maxPause;
};
struct CellSet_jG87P0AI9aZtss9ccTYBIISQ {
NI counter;
NI max;
PageDesc_fublkgIY4LG3mT51LU2WHg* head;
PageDesc_fublkgIY4LG3mT51LU2WHg** data;
};
struct GcHeap_1TRH1TZMaVZTnLNcIHuNFQ {
GcStack_7fytPA5bBsob6See21YMRA stack;
NI cycleThreshold;
CellSeq_Axo1XVm9aaQueTOldv8le5w zct;
CellSeq_Axo1XVm9aaQueTOldv8le5w decStack;
CellSeq_Axo1XVm9aaQueTOldv8le5w tempStack;
NI recGcLock;
MemRegion_x81NhDv59b8ercDZ9bi85jyg region;
GcStat_0RwLoVBHZPfUAcLczmfQAg stat;
CellSet_jG87P0AI9aZtss9ccTYBIISQ marked;
CellSeq_Axo1XVm9aaQueTOldv8le5w additionalRoots;
};
typedef NU8 TY_nmiMWKVIe46vacnhAFrQvw_Set[32];
struct BaseChunk_Sdq7WpT6qAH858F5ZEdG3w {
NI prevSize;
NI size;
};
struct SmallChunk_tXn60W2f8h3jgAYdEmy5NQ {
BaseChunk_Sdq7WpT6qAH858F5ZEdG3w Sup;
SmallChunk_tXn60W2f8h3jgAYdEmy5NQ* next;
SmallChunk_tXn60W2f8h3jgAYdEmy5NQ* prev;
FreeCell_u6M5LHprqzkn9axr04yg9bGQ* freeList;
NI free;
NI acc;
NF data;
};
struct LLChunk_XsENErzHIZV9bhvyJx56wGw {
NI size;
NI acc;
LLChunk_XsENErzHIZV9bhvyJx56wGw* next;
};
struct BigChunk_Rv9c70Uhp2TytkX7eH78qEg {
BaseChunk_Sdq7WpT6qAH858F5ZEdG3w Sup;
BigChunk_Rv9c70Uhp2TytkX7eH78qEg* next;
BigChunk_Rv9c70Uhp2TytkX7eH78qEg* prev;
NF data;
};
typedef NI TY_9a8QARi5WsUggNU9bom7kzTQ[8];
struct Trunk_W0r8S0Y3UGke6T9bIUWnnuw {
Trunk_W0r8S0Y3UGke6T9bIUWnnuw* next;
NI key;
TY_9a8QARi5WsUggNU9bom7kzTQ bits;
};
struct PageDesc_fublkgIY4LG3mT51LU2WHg {
PageDesc_fublkgIY4LG3mT51LU2WHg* next;
NI key;
TY_9a8QARi5WsUggNU9bom7kzTQ bits;
};
struct FreeCell_u6M5LHprqzkn9axr04yg9bGQ {
FreeCell_u6M5LHprqzkn9axr04yg9bGQ* next;
NI zeroField;
};
N_NIMCALL(void, reverse_LoixoqZetR6FfezoPedx8w)(NimStringDesc** a, NI aLen_0);
N_NIMCALL(void, reverse_XQiN4wExsmIg8NFBmG3ObA)(NimStringDesc** a, NI aLen_0, NI first, NI last);
static N_INLINE(NIM_BOOL, contains_I9cy9aN2znlBRynMcXN4pBGgstrutils)(NIM_CHAR* a, NI aLen_0, NIM_CHAR item);
static N_INLINE(NI, find_b3HPX1XboPhUmnxkTjazFQstrutils)(NIM_CHAR* a, NI aLen_0, NIM_CHAR item);
N_NOINLINE(void, raiseIndexError)(void);
static N_INLINE(NI, addInt)(NI a, NI b);
N_NOINLINE(void, raiseOverflow)(void);
static N_INLINE(void, nimFrame)(TFrame* s);
N_NOINLINE(void, stackOverflow_II46IjNZztN9bmbxUD8dt8g)(void);
static N_INLINE(void, popFrame)(void);
static N_INLINE(NIM_BOOL, contains_tKnjuJQDI4zGjoGUKWyD2wstrutils)(Slice_WC2BEYwxNBO9aTbMc8sJqPg s, NI value);
N_NIMCALL(NF, round_FL9bhksfuQsfLDCxRHuknsg)(NF x, NI places);
static N_INLINE(void, stareq__7kHiltrvRlcg6wSYR3CxAwstrutils)(NF* x, NF y);
static N_INLINE(void, pluseq__7kHiltrvRlcg6wSYR3CxAw_2strutils)(NF* x, NF y);
N_NIMCALL(NimStringDesc*, nsuformatBiggestFloat)(NF f, FloatFormatMode_pNvEoaqwoxKyoASD11vSgQ format, NI precision, NIM_CHAR decimalSep);
N_NIMCALL(NimStringDesc*, mnewString)(NI len);
N_NIMCALL(NimStringDesc*, mnewString)(NI len);
N_NIMCALL(NimStringDesc*, nimIntToStr)(NI x);
static N_INLINE(void, appendChar)(NimStringDesc* dest, NIM_CHAR c);
static N_INLINE(void, appendString)(NimStringDesc* dest, NimStringDesc* src);
static N_INLINE(void, copyMem_E1xtACub5WcDa3vbrIXbwgsystem)(void* dest, void* source, NI size);
N_NIMCALL(NimStringDesc*, rawNewString)(NI space);
N_NIMCALL(NI, npuParseInt)(NimStringDesc* s, NI* number, NI start);
N_NIMCALL(void*, newObj)(TNimType* typ, NI size);
static N_INLINE(void, asgnRefNoCycle)(void** dest, void* src);
static N_INLINE(Cell_1zcF9cV8XIAtbN8h5HRUB8g*, usrToCell_yB9aH5WIlwd0xkYrcdPeXrQsystem)(void* usr);
static N_INLINE(void, rtlAddZCT_MV4BBk6J1qu70IbBxwEn4w_2system)(Cell_1zcF9cV8XIAtbN8h5HRUB8g* c);
N_NOINLINE(void, addZCT_fCDI7oO1NNVXXURtxSzsRw)(CellSeq_Axo1XVm9aaQueTOldv8le5w* s, Cell_1zcF9cV8XIAtbN8h5HRUB8g* c);
static N_INLINE(void, asgnRef)(void** dest, void* src);
static N_INLINE(void, incRef_9cAA5YuQAAC3MVbnGeV86swsystem)(Cell_1zcF9cV8XIAtbN8h5HRUB8g* c);
static N_INLINE(void, decRef_MV4BBk6J1qu70IbBxwEn4wsystem)(Cell_1zcF9cV8XIAtbN8h5HRUB8g* c);
N_NIMCALL(void, raiseException)(Exception* e, NCSTRING ename);
static N_INLINE(NI, chckRange)(NI i, NI a, NI b);
N_NOINLINE(void, raiseRangeError)(NI64 val);
static N_INLINE(NI, subInt)(NI a, NI b);
N_NIMCALL(NIM_CHAR, nsuToLowerAsciiChar)(NIM_CHAR c);
N_NIMCALL(NimStringDesc*, rawNewString)(NI cap);
N_NIMCALL(void, nsuAddf)(NimStringDesc** s, NimStringDesc* formatstr, NimStringDesc** a, NI aLen_0);
N_NOINLINE(void, invalidFormatString_61EJWW6vRISEo9a8gt0tusw)(void);
N_NIMCALL(NimStringDesc*, copyStringRC1)(NimStringDesc* src);
static N_INLINE(void, nimGCunrefNoCycle)(void* p);
N_NIMCALL(NimStringDesc*, resizeString)(NimStringDesc* dest, NI addlen);
N_NIMCALL(NimStringDesc*, addChar)(NimStringDesc* s, NIM_CHAR c);
N_NIMCALL(NI, findNormalized_SW1VCMDsxPTtzxnYrf3N6w)(NimStringDesc* x, NimStringDesc** inArray, NI inArrayLen_0);
N_NIMCALL(NI, nsuCmpIgnoreStyle)(NimStringDesc* a, NimStringDesc* b);
N_NIMCALL(NimStringDesc*, copyStrLast)(NimStringDesc* s, NI start, NI last);
N_NIMCALL(NimStringDesc*, copyStrLast)(NimStringDesc* s, NI first, NI last);
N_NIMCALL(void, failedAssertImpl_aDmpBTs9cPuXp0Mp9cfiNeyA)(NimStringDesc* msg);
N_NIMCALL(NimStringDesc*, copyString)(NimStringDesc* src);
extern TFrame* framePtr_HRfVMH3jYeBJz6Q6X9b6Ptw;
extern TNimType NTI_yCEN9anxCD6mzBxGjuaRBdg_;
extern TNimType NTI_Gi06FkNeykJn7mrqRZYrkA_;
extern GcHeap_1TRH1TZMaVZTnLNcIHuNFQ gch_IcYaEuuWivYAS86vFMTS3Q;
NIM_CONST TY_vB7L5YKNrMfbzoUubCEcYg floatFormatToChar_WVISPus3ZqCVSuP9bzn9cVxw = {103,
102,
101}
;
STRING_LITERAL(TM_JGc9b9bh2D3nTdUR7TGyq8aA_4, "invalid integer: ", 17);
STRING_LITERAL(TM_JGc9b9bh2D3nTdUR7TGyq8aA_11, "invalid format string", 21);
STRING_LITERAL(TM_JGc9b9bh2D3nTdUR7TGyq8aA_12, "len(a) == L string modified while iterating over it", 51);
STRING_LITERAL(TM_JGc9b9bh2D3nTdUR7TGyq8aA_13, "", 0);
static N_INLINE(NI, addInt)(NI a, NI b) {
NI result;
{ result = (NI)0;
result = (NI)((NU64)(a) + (NU64)(b));
{
NIM_BOOL T3_;
T3_ = (NIM_BOOL)0;
T3_ = (((NI) 0) <= (NI)(result ^ a));
if (T3_) goto LA4_;
T3_ = (((NI) 0) <= (NI)(result ^ b));
LA4_: ;
if (!T3_) goto LA5_;
goto BeforeRet_;
}
LA5_: ;
raiseOverflow();
}BeforeRet_: ;
return result;
}
static N_INLINE(void, nimFrame)(TFrame* s) {
NI T1_;
T1_ = (NI)0;
{
if (!(framePtr_HRfVMH3jYeBJz6Q6X9b6Ptw == NIM_NIL)) goto LA4_;
T1_ = ((NI) 0);
}
goto LA2_;
LA4_: ;
{
T1_ = ((NI) ((NI16)((*framePtr_HRfVMH3jYeBJz6Q6X9b6Ptw).calldepth + ((NI16) 1))));
}
LA2_: ;
(*s).calldepth = ((NI16) (T1_));
(*s).prev = framePtr_HRfVMH3jYeBJz6Q6X9b6Ptw;
framePtr_HRfVMH3jYeBJz6Q6X9b6Ptw = s;
{
if (!((*s).calldepth == ((NI16) 2000))) goto LA9_;
stackOverflow_II46IjNZztN9bmbxUD8dt8g();
}
LA9_: ;
}
static N_INLINE(void, popFrame)(void) {
framePtr_HRfVMH3jYeBJz6Q6X9b6Ptw = (*framePtr_HRfVMH3jYeBJz6Q6X9b6Ptw).prev;
}
static N_INLINE(NI, find_b3HPX1XboPhUmnxkTjazFQstrutils)(NIM_CHAR* a, NI aLen_0, NIM_CHAR item) {
NI result;
nimfr_("find", "system.nim")
{ result = (NI)0;
{
NIM_CHAR i;
NI i_2;
i = (NIM_CHAR)0;
nimln_(2052, "system.nim");
i_2 = ((NI) 0);
{
nimln_(2053, "system.nim");
while (1) {
NI TM_JGc9b9bh2D3nTdUR7TGyq8aA_2;
NI TM_JGc9b9bh2D3nTdUR7TGyq8aA_3;
if (!(i_2 < aLen_0)) goto LA3;
nimln_(2054, "system.nim");
if ((NU)(i_2) >= (NU)(aLen_0)) raiseIndexError();
i = a[i_2];
nimln_(2286, "system.nim");
{
if (!((NU8)(i) == (NU8)(item))) goto LA6_;
goto BeforeRet_;
}
LA6_: ;
nimln_(2287, "system.nim");
TM_JGc9b9bh2D3nTdUR7TGyq8aA_2 = addInt(result, ((NI) 1));
result = (NI)(TM_JGc9b9bh2D3nTdUR7TGyq8aA_2);
nimln_(2055, "system.nim");
TM_JGc9b9bh2D3nTdUR7TGyq8aA_3 = addInt(i_2, ((NI) 1));
i_2 = (NI)(TM_JGc9b9bh2D3nTdUR7TGyq8aA_3);
} LA3: ;
}
}
nimln_(2288, "system.nim");
result = ((NI) -1);
}BeforeRet_: ;
popFrame();
return result;
}
static N_INLINE(NIM_BOOL, contains_I9cy9aN2znlBRynMcXN4pBGgstrutils)(NIM_CHAR* a, NI aLen_0, NIM_CHAR item) {
NIM_BOOL result;
NI T1_;
nimfr_("contains", "system.nim")
{ result = (NIM_BOOL)0;
nimln_(2293, "system.nim");
T1_ = (NI)0;
T1_ = find_b3HPX1XboPhUmnxkTjazFQstrutils(a, aLen_0, item);
result = (((NI) 0) <= T1_);
goto BeforeRet_;
}BeforeRet_: ;
popFrame();
return result;
}
static N_INLINE(NIM_BOOL, contains_tKnjuJQDI4zGjoGUKWyD2wstrutils)(Slice_WC2BEYwxNBO9aTbMc8sJqPg s, NI value) {
NIM_BOOL result;
NIM_BOOL T1_;
nimfr_("contains", "system.nim")
result = (NIM_BOOL)0;
nimln_(1146, "system.nim");
T1_ = (NIM_BOOL)0;
T1_ = (((NI) (s.a)) <= ((NI) (value)));
if (!(T1_)) goto LA2_;
T1_ = (((NI) (value)) <= ((NI) (s.b)));
LA2_: ;
result = T1_;
popFrame();
return result;
}
static N_INLINE(void, stareq__7kHiltrvRlcg6wSYR3CxAwstrutils)(NF* x, NF y) {
nimfr_("*=", "system.nim")
nimln_(3485, "system.nim");
(*x) = ((NF)((*x)) * (NF)(y));
popFrame();
}
static N_INLINE(void, pluseq__7kHiltrvRlcg6wSYR3CxAw_2strutils)(NF* x, NF y) {
nimfr_("+=", "system.nim")
nimln_(3475, "system.nim");
(*x) = ((NF)((*x)) + (NF)(y));
popFrame();
}
N_NIMCALL(NimStringDesc*, nsuformatBiggestFloat)(NF f, FloatFormatMode_pNvEoaqwoxKyoASD11vSgQ format, NI precision, NIM_CHAR decimalSep) {
NimStringDesc* result;
TY_9bPFPkkEEeeNM9bKgiV8Q49cg frmtstr;
TY_Kn8ZOVsorvcNLB4isUeMzQ buf;
int L;
nimfr_("formatBiggestFloat", "strutils.nim")
result = (NimStringDesc*)0;
L = (int)0;
frmtstr[(((NI) 0))- 0] = 37;
{
if (!(((NI) 0) < ((NI) (precision)))) goto LA3_;
frmtstr[(((NI) 1))- 0] = 35;
frmtstr[(((NI) 2))- 0] = 46;
frmtstr[(((NI) 3))- 0] = 42;
frmtstr[(((NI) 4))- 0] = floatFormatToChar_WVISPus3ZqCVSuP9bzn9cVxw[(format)- 0];
frmtstr[(((NI) 5))- 0] = 0;
L = sprintf(((NCSTRING) (buf)), ((NCSTRING) (frmtstr)), precision, f);
}
goto LA1_;
LA3_: ;
{
frmtstr[(((NI) 1))- 0] = floatFormatToChar_WVISPus3ZqCVSuP9bzn9cVxw[(format)- 0];
frmtstr[(((NI) 2))- 0] = 0;
L = sprintf(((NCSTRING) (buf)), ((NCSTRING) (frmtstr)), f);
}
LA1_: ;
result = mnewString(((NI) (L)));
{
int i;
int i_2;
i = (int)0;
i_2 = ((int) 0);
{
while (1) {
if (!(i_2 < L)) goto LA8;
i = i_2;
{
if (!(((NU8)(buf[(i)- 0])) == ((NU8)(46)) || ((NU8)(buf[(i)- 0])) == ((NU8)(44)))) goto LA11_;
result->data[i] = decimalSep;
}
goto LA9_;
LA11_: ;
{
result->data[i] = buf[(i)- 0];
}
LA9_: ;
i_2 += ((NI) 1);
} LA8: ;
}
}
popFrame();
return result;
}
N_NIMCALL(NimStringDesc*, nsuformatFloat)(NF f, FloatFormatMode_pNvEoaqwoxKyoASD11vSgQ format, NI precision, NIM_CHAR decimalSep) {
NimStringDesc* result;
nimfr_("formatFloat", "strutils.nim")
result = (NimStringDesc*)0;
result = nsuformatBiggestFloat(f, format, precision, decimalSep);
popFrame();
return result;
}
static N_INLINE(void, appendChar)(NimStringDesc* dest, NIM_CHAR c) {
(*dest).data[((*dest).Sup.len)- 0] = c;
(*dest).data[((NI)((*dest).Sup.len + ((NI) 1)))- 0] = 0;
(*dest).Sup.len += ((NI) 1);
}
static N_INLINE(void, copyMem_E1xtACub5WcDa3vbrIXbwgsystem)(void* dest, void* source, NI size) {
void* T1_;
T1_ = (void*)0;
T1_ = memcpy(dest, source, ((size_t) (size)));
}
static N_INLINE(void, appendString)(NimStringDesc* dest, NimStringDesc* src) {
copyMem_E1xtACub5WcDa3vbrIXbwgsystem(((void*) ((&(*dest).data[((*dest).Sup.len)- 0]))), ((void*) ((*src).data)), ((NI) ((NI)((*src).Sup.len + ((NI) 1)))));
(*dest).Sup.len += (*src).Sup.len;
}
N_NIMCALL(NimStringDesc*, nsuIntToStr)(NI x, NI minchars) {
NimStringDesc* result;
nimfr_("intToStr", "strutils.nim")
result = (NimStringDesc*)0;
result = nimIntToStr((x > 0? (x) : -(x)));
{
NI i;
NI colontmp_;
NI res;
i = (NI)0;
colontmp_ = (NI)0;
colontmp_ = (NI)(((NI) (minchars)) - (result ? result->Sup.len : 0));
res = ((NI) 1);
{
while (1) {
NimStringDesc* T4_;
if (!(res <= colontmp_)) goto LA3;
i = res;
T4_ = (NimStringDesc*)0;
T4_ = rawNewString(result->Sup.len + 1);
appendChar(T4_, 48);
appendString(T4_, result);
result = T4_;
res += ((NI) 1);
} LA3: ;
}
}
{
NimStringDesc* T9_;
if (!(x < ((NI) 0))) goto LA7_;
T9_ = (NimStringDesc*)0;
T9_ = rawNewString(result->Sup.len + 1);
appendChar(T9_, 45);
appendString(T9_, result);
result = T9_;
}
LA7_: ;
popFrame();
return result;
}
N_NIMCALL(NimStringDesc*, nsuRepeatChar)(NIM_CHAR c, NI count) {
NimStringDesc* result;
nimfr_("repeat", "strutils.nim")
result = (NimStringDesc*)0;
result = mnewString(count);
{
NI i;
NI colontmp_;
NI res;
i = (NI)0;
colontmp_ = (NI)0;
colontmp_ = (NI)(((NI) (count)) - ((NI) 1));
res = ((NI) 0);
{
while (1) {
if (!(res <= ((NI) (colontmp_)))) goto LA3;
i = ((NI) (res));
result->data[i] = c;
res += ((NI) 1);
} LA3: ;
}
}
popFrame();
return result;
}
static N_INLINE(Cell_1zcF9cV8XIAtbN8h5HRUB8g*, usrToCell_yB9aH5WIlwd0xkYrcdPeXrQsystem)(void* usr) {
Cell_1zcF9cV8XIAtbN8h5HRUB8g* result;
nimfr_("usrToCell", "gc.nim")
result = (Cell_1zcF9cV8XIAtbN8h5HRUB8g*)0;
nimln_(138, "gc.nim");
result = ((Cell_1zcF9cV8XIAtbN8h5HRUB8g*) ((NI)((NU64)(((NI) (usr))) - (NU64)(((NI)sizeof(Cell_1zcF9cV8XIAtbN8h5HRUB8g))))));
popFrame();
return result;
}
static N_INLINE(void, rtlAddZCT_MV4BBk6J1qu70IbBxwEn4w_2system)(Cell_1zcF9cV8XIAtbN8h5HRUB8g* c) {
nimfr_("rtlAddZCT", "gc.nim")
nimln_(216, "gc.nim");
addZCT_fCDI7oO1NNVXXURtxSzsRw((&gch_IcYaEuuWivYAS86vFMTS3Q.zct), c);
popFrame();
}
static N_INLINE(void, asgnRefNoCycle)(void** dest, void* src) {
nimfr_("asgnRefNoCycle", "gc.nim")
nimln_(288, "gc.nim");
{
Cell_1zcF9cV8XIAtbN8h5HRUB8g* c;
nimln_(363, "system.nim");
nimln_(288, "gc.nim");
if (!!((src == NIM_NIL))) goto LA3_;
nimln_(289, "gc.nim");
c = usrToCell_yB9aH5WIlwd0xkYrcdPeXrQsystem(src);
nimln_(290, "gc.nim");
(*c).refcount += ((NI) 8);
}
LA3_: ;
nimln_(291, "gc.nim");
{
Cell_1zcF9cV8XIAtbN8h5HRUB8g* c_2;
nimln_(363, "system.nim");
nimln_(291, "gc.nim");
if (!!(((*dest) == NIM_NIL))) goto LA7_;
nimln_(292, "gc.nim");
c_2 = usrToCell_yB9aH5WIlwd0xkYrcdPeXrQsystem((*dest));
nimln_(293, "gc.nim");
{
(*c_2).refcount -= ((NI) 8);
if (!((NU64)((*c_2).refcount) < (NU64)(((NI) 8)))) goto LA11_;
nimln_(294, "gc.nim");
rtlAddZCT_MV4BBk6J1qu70IbBxwEn4w_2system(c_2);
}
LA11_: ;
}
LA7_: ;
nimln_(295, "gc.nim");
(*dest) = src;
popFrame();
}
static N_INLINE(void, incRef_9cAA5YuQAAC3MVbnGeV86swsystem)(Cell_1zcF9cV8XIAtbN8h5HRUB8g* c) {
nimfr_("incRef", "gc.nim")
nimln_(196, "gc.nim");
(*c).refcount = (NI)((NU64)((*c).refcount) + (NU64)(((NI) 8)));
popFrame();
}
static N_INLINE(void, decRef_MV4BBk6J1qu70IbBxwEn4wsystem)(Cell_1zcF9cV8XIAtbN8h5HRUB8g* c) {
nimfr_("decRef", "gc.nim")
nimln_(223, "gc.nim");
{
(*c).refcount -= ((NI) 8);
if (!((NU64)((*c).refcount) < (NU64)(((NI) 8)))) goto LA3_;
nimln_(224, "gc.nim");
rtlAddZCT_MV4BBk6J1qu70IbBxwEn4w_2system(c);
}
LA3_: ;
popFrame();
}
static N_INLINE(void, asgnRef)(void** dest, void* src) {
nimfr_("asgnRef", "gc.nim")
nimln_(281, "gc.nim");
{
Cell_1zcF9cV8XIAtbN8h5HRUB8g* T5_;
nimln_(363, "system.nim");
nimln_(281, "gc.nim");
if (!!((src == NIM_NIL))) goto LA3_;
T5_ = (Cell_1zcF9cV8XIAtbN8h5HRUB8g*)0;
T5_ = usrToCell_yB9aH5WIlwd0xkYrcdPeXrQsystem(src);
incRef_9cAA5YuQAAC3MVbnGeV86swsystem(T5_);
}
LA3_: ;
nimln_(282, "gc.nim");
{
Cell_1zcF9cV8XIAtbN8h5HRUB8g* T10_;
nimln_(363, "system.nim");
nimln_(282, "gc.nim");
if (!!(((*dest) == NIM_NIL))) goto LA8_;
T10_ = (Cell_1zcF9cV8XIAtbN8h5HRUB8g*)0;
T10_ = usrToCell_yB9aH5WIlwd0xkYrcdPeXrQsystem((*dest));
decRef_MV4BBk6J1qu70IbBxwEn4wsystem(T10_);
}
LA8_: ;
nimln_(283, "gc.nim");
(*dest) = src;
popFrame();
}
N_NIMCALL(NI, nsuParseInt)(NimStringDesc* s) {
NI result;
NI L;
nimfr_("parseInt", "strutils.nim")
result = (NI)0;
L = npuParseInt(s, (&result), ((NI) 0));
{
NIM_BOOL T3_;
ValueError_Gi06FkNeykJn7mrqRZYrkA* e;
NimStringDesc* T7_;
T3_ = (NIM_BOOL)0;
T3_ = !((L == (s ? s->Sup.len : 0)));
if (T3_) goto LA4_;
T3_ = (L == ((NI) 0));
LA4_: ;
if (!T3_) goto LA5_;
e = (ValueError_Gi06FkNeykJn7mrqRZYrkA*)0;
e = (ValueError_Gi06FkNeykJn7mrqRZYrkA*) newObj((&NTI_yCEN9anxCD6mzBxGjuaRBdg_), sizeof(ValueError_Gi06FkNeykJn7mrqRZYrkA));
(*e).Sup.Sup.m_type = (&NTI_Gi06FkNeykJn7mrqRZYrkA_);
T7_ = (NimStringDesc*)0;
T7_ = rawNewString(s->Sup.len + 17);
appendString(T7_, ((NimStringDesc*) &TM_JGc9b9bh2D3nTdUR7TGyq8aA_4));
appendString(T7_, s);
asgnRefNoCycle((void**) (&(*e).Sup.message), T7_);
asgnRef((void**) (&(*e).Sup.parent), NIM_NIL);
raiseException((Exception*)e, "ValueError");
}
LA5_: ;
popFrame();
return result;
}
static N_INLINE(NI, chckRange)(NI i, NI a, NI b) {
NI result;
{ result = (NI)0;
{
NIM_BOOL T3_;
T3_ = (NIM_BOOL)0;
T3_ = (a <= i);
if (!(T3_)) goto LA4_;
T3_ = (i <= b);
LA4_: ;
if (!T3_) goto LA5_;
result = i;
goto BeforeRet_;
}
goto LA1_;
LA5_: ;
{
raiseRangeError(((NI64) (i)));
}
LA1_: ;
}BeforeRet_: ;
return result;
}
static N_INLINE(NI, subInt)(NI a, NI b) {
NI result;
{ result = (NI)0;
result = (NI)((NU64)(a) - (NU64)(b));
{
NIM_BOOL T3_;
T3_ = (NIM_BOOL)0;
T3_ = (((NI) 0) <= (NI)(result ^ a));
if (T3_) goto LA4_;
T3_ = (((NI) 0) <= (NI)(result ^ (NI)((NU64) ~(b))));
LA4_: ;
if (!T3_) goto LA5_;
goto BeforeRet_;
}
LA5_: ;
raiseOverflow();
}BeforeRet_: ;
return result;
}
N_NIMCALL(NIM_CHAR, nsuToLowerAsciiChar)(NIM_CHAR c) {
NIM_CHAR result;
nimfr_("toLowerAscii", "strutils.nim")
result = (NIM_CHAR)0;
nimln_(209, "strutils.nim");
{
NI TM_JGc9b9bh2D3nTdUR7TGyq8aA_6;
if (!(((NU8)(c)) >= ((NU8)(65)) && ((NU8)(c)) <= ((NU8)(90)))) goto LA3_;
nimln_(210, "strutils.nim");
TM_JGc9b9bh2D3nTdUR7TGyq8aA_6 = addInt(((NI) (((NU8)(c)))), ((NI) 32));
result = ((NIM_CHAR) (((NI)chckRange((NI)(TM_JGc9b9bh2D3nTdUR7TGyq8aA_6), ((NI) 0), ((NI) 255)))));
}
goto LA1_;
LA3_: ;
{
nimln_(212, "strutils.nim");
result = c;
}
LA1_: ;
popFrame();
return result;
}
N_NIMCALL(NimStringDesc*, nsuToLowerAsciiStr)(NimStringDesc* s) {
NimStringDesc* result;
nimfr_("toLowerAscii", "strutils.nim")
result = (NimStringDesc*)0;
nimln_(221, "strutils.nim");
result = mnewString(((NI)chckRange((s ? s->Sup.len : 0), ((NI) 0), ((NI) IL64(9223372036854775807)))));
{
NI i;
NI colontmp_;
NI TM_JGc9b9bh2D3nTdUR7TGyq8aA_5;
NI res;
i = (NI)0;
colontmp_ = (NI)0;
nimln_(222, "strutils.nim");
TM_JGc9b9bh2D3nTdUR7TGyq8aA_5 = subInt((s ? s->Sup.len : 0), ((NI) 1));
colontmp_ = (NI)(TM_JGc9b9bh2D3nTdUR7TGyq8aA_5);
nimln_(1955, "system.nim");
res = ((NI) 0);
{
nimln_(1956, "system.nim");
while (1) {
NI TM_JGc9b9bh2D3nTdUR7TGyq8aA_7;
if (!(res <= colontmp_)) goto LA3;
nimln_(1957, "system.nim");
i = res;
if ((NU)(i) > (NU)(result->Sup.len)) raiseIndexError();
nimln_(223, "strutils.nim");
if ((NU)(i) > (NU)(s->Sup.len)) raiseIndexError();
result->data[i] = nsuToLowerAsciiChar(s->data[i]);
nimln_(1976, "system.nim");
TM_JGc9b9bh2D3nTdUR7TGyq8aA_7 = addInt(res, ((NI) 1));
res = (NI)(TM_JGc9b9bh2D3nTdUR7TGyq8aA_7);
} LA3: ;
}
}
popFrame();
return result;
}
N_NIMCALL(NI, nsuCmpIgnoreCase)(NimStringDesc* a, NimStringDesc* b) {
NI result;
NI i;
NI m;
NI TM_JGc9b9bh2D3nTdUR7TGyq8aA_10;
nimfr_("cmpIgnoreCase", "strutils.nim")
{ result = (NI)0;
nimln_(410, "strutils.nim");
i = ((NI) 0);
nimln_(411, "strutils.nim");
m = (((a ? a->Sup.len : 0) <= (b ? b->Sup.len : 0)) ? (a ? a->Sup.len : 0) : (b ? b->Sup.len : 0));
{
nimln_(412, "strutils.nim");
while (1) {
NIM_CHAR T3_;
NIM_CHAR T4_;
NI TM_JGc9b9bh2D3nTdUR7TGyq8aA_8;
NI TM_JGc9b9bh2D3nTdUR7TGyq8aA_9;
if (!(i < m)) goto LA2;
nimln_(413, "strutils.nim");
if ((NU)(i) > (NU)(a->Sup.len)) raiseIndexError();
T3_ = (NIM_CHAR)0;
T3_ = nsuToLowerAsciiChar(a->data[i]);
if ((NU)(i) > (NU)(b->Sup.len)) raiseIndexError();
T4_ = (NIM_CHAR)0;
T4_ = nsuToLowerAsciiChar(b->data[i]);
TM_JGc9b9bh2D3nTdUR7TGyq8aA_8 = subInt(((NI) (((NU8)(T3_)))), ((NI) (((NU8)(T4_)))));
result = (NI)(TM_JGc9b9bh2D3nTdUR7TGyq8aA_8);
nimln_(414, "strutils.nim");
{
nimln_(363, "system.nim");
nimln_(414, "strutils.nim");
if (!!((result == ((NI) 0)))) goto LA7_;
goto BeforeRet_;
}
LA7_: ;
nimln_(415, "strutils.nim");
TM_JGc9b9bh2D3nTdUR7TGyq8aA_9 = addInt(i, ((NI) 1));
i = (NI)(TM_JGc9b9bh2D3nTdUR7TGyq8aA_9);
} LA2: ;
}
nimln_(416, "strutils.nim");
TM_JGc9b9bh2D3nTdUR7TGyq8aA_10 = subInt((a ? a->Sup.len : 0), (b ? b->Sup.len : 0));
result = (NI)(TM_JGc9b9bh2D3nTdUR7TGyq8aA_10);
}BeforeRet_: ;
popFrame();
return result;
}
static N_INLINE(void, nimGCunrefNoCycle)(void* p) {
Cell_1zcF9cV8XIAtbN8h5HRUB8g* c;
nimfr_("nimGCunrefNoCycle", "gc.nim")
nimln_(270, "gc.nim");
c = usrToCell_yB9aH5WIlwd0xkYrcdPeXrQsystem(p);
nimln_(272, "gc.nim");
{
(*c).refcount -= ((NI) 8);
if (!((NU64)((*c).refcount) < (NU64)(((NI) 8)))) goto LA3_;
nimln_(273, "gc.nim");
rtlAddZCT_MV4BBk6J1qu70IbBxwEn4w_2system(c);
}
LA3_: ;
popFrame();
}
N_NOINLINE(void, invalidFormatString_61EJWW6vRISEo9a8gt0tusw)(void) {
ValueError_Gi06FkNeykJn7mrqRZYrkA* e;
NimStringDesc* T1_;
nimfr_("invalidFormatString", "strutils.nim")
e = (ValueError_Gi06FkNeykJn7mrqRZYrkA*)0;
e = (ValueError_Gi06FkNeykJn7mrqRZYrkA*) newObj((&NTI_yCEN9anxCD6mzBxGjuaRBdg_), sizeof(ValueError_Gi06FkNeykJn7mrqRZYrkA));
(*e).Sup.Sup.m_type = (&NTI_Gi06FkNeykJn7mrqRZYrkA_);
T1_ = (NimStringDesc*)0;
T1_ = (*e).Sup.message; (*e).Sup.message = copyStringRC1(((NimStringDesc*) &TM_JGc9b9bh2D3nTdUR7TGyq8aA_11));
if (T1_) nimGCunrefNoCycle(T1_);
asgnRef((void**) (&(*e).Sup.parent), NIM_NIL);
raiseException((Exception*)e, "ValueError");
popFrame();
}
N_NIMCALL(NI, nsuCmpIgnoreStyle)(NimStringDesc* a, NimStringDesc* b) {
NI result;
NI i;
NI j;
nimfr_("cmpIgnoreStyle", "strutils.nim")
result = (NI)0;
i = ((NI) 0);
j = ((NI) 0);
{
while (1) {
NIM_CHAR aa;
NIM_CHAR bb;
{
while (1) {
if (!((NU8)(a->data[i]) == (NU8)(95))) goto LA4;
i += ((NI) 1);
} LA4: ;
}
{
while (1) {
if (!((NU8)(b->data[j]) == (NU8)(95))) goto LA6;
j += ((NI) 1);
} LA6: ;
}
aa = nsuToLowerAsciiChar(a->data[i]);
bb = nsuToLowerAsciiChar(b->data[j]);
result = (NI)(((NI) (((NU8)(aa)))) - ((NI) (((NU8)(bb)))));
{
NIM_BOOL T9_;
T9_ = (NIM_BOOL)0;
T9_ = !((result == ((NI) 0)));
if (T9_) goto LA10_;
T9_ = ((NU8)(aa) == (NU8)(0));
LA10_: ;
if (!T9_) goto LA11_;
goto LA1;
}
LA11_: ;
i += ((NI) 1);
j += ((NI) 1);
}
} LA1: ;
popFrame();
return result;
}
N_NIMCALL(NI, findNormalized_SW1VCMDsxPTtzxnYrf3N6w)(NimStringDesc* x, NimStringDesc** inArray, NI inArrayLen_0) {
NI result;
NI i;
nimfr_("findNormalized", "strutils.nim")
{ result = (NI)0;
i = ((NI) 0);
{
while (1) {
if (!(i < (inArrayLen_0-1))) goto LA2;
{
NI T5_;
T5_ = (NI)0;
T5_ = nsuCmpIgnoreStyle(x, inArray[i]);
if (!(T5_ == ((NI) 0))) goto LA6_;
result = i;
goto BeforeRet_;
}
LA6_: ;
i += ((NI) 2);
} LA2: ;
}
result = ((NI) -1);
goto BeforeRet_;
}BeforeRet_: ;
popFrame();
return result;
}
N_NIMCALL(void, nsuAddf)(NimStringDesc** s, NimStringDesc* formatstr, NimStringDesc** a, NI aLen_0) {
NI i;
NI num;
nimfr_("addf", "strutils.nim")
i = ((NI) 0);
num = ((NI) 0);
{
while (1) {
if (!(i < (formatstr ? formatstr->Sup.len : 0))) goto LA2;
{
if (!((NU8)(formatstr->data[i]) == (NU8)(36))) goto LA5_;
switch (((NU8)(formatstr->data[(NI)(i + ((NI) 1))]))) {
case 35:
{
{
if (!((NU64)((aLen_0-1)) < (NU64)(num))) goto LA10_;
invalidFormatString_61EJWW6vRISEo9a8gt0tusw();
}
LA10_: ;
(*s) = resizeString((*s), a[num]->Sup.len + 0);
appendString((*s), a[num]);
i += ((NI) 2);
num += ((NI) 1);
}
break;
case 36:
{
(*s) = addChar((*s), 36);
i += ((NI) 2);
}
break;
case 49 ... 57:
case 45:
{
NI j;
NIM_BOOL negative;
NI idx;
j = ((NI) 0);
i += ((NI) 1);
negative = ((NU8)(formatstr->data[i]) == (NU8)(45));
{
if (!negative) goto LA16_;
i += ((NI) 1);
}
LA16_: ;
{
while (1) {
if (!(((NU8)(formatstr->data[i])) >= ((NU8)(48)) && ((NU8)(formatstr->data[i])) <= ((NU8)(57)))) goto LA19;
j = (NI)((NI)((NI)(j * ((NI) 10)) + ((NI) (((NU8)(formatstr->data[i]))))) - ((NI) 48));
i += ((NI) 1);
} LA19: ;
}
{
if (!!(negative)) goto LA22_;
idx = (NI)(j - ((NI) 1));
}
goto LA20_;
LA22_: ;
{
idx = (NI)(aLen_0 - j);
}
LA20_: ;
{
if (!((NU64)((aLen_0-1)) < (NU64)(idx))) goto LA27_;
invalidFormatString_61EJWW6vRISEo9a8gt0tusw();
}
LA27_: ;
(*s) = resizeString((*s), a[idx]->Sup.len + 0);
appendString((*s), a[idx]);
}
break;
case 123:
{
NI j_2;
NI x;
NimStringDesc* T32_;
j_2 = (NI)(i + ((NI) 1));
{
while (1) {
if (!!((((NU8)(formatstr->data[j_2])) == ((NU8)(0)) || ((NU8)(formatstr->data[j_2])) == ((NU8)(125))))) goto LA31;
j_2 += ((NI) 1);
} LA31: ;
}
T32_ = (NimStringDesc*)0;
T32_ = copyStrLast(formatstr, (NI)(i + ((NI) 2)), (NI)(j_2 - ((NI) 1)));
x = findNormalized_SW1VCMDsxPTtzxnYrf3N6w(T32_, a, aLen_0);
{
NIM_BOOL T35_;
T35_ = (NIM_BOOL)0;
T35_ = (((NI) 0) <= x);
if (!(T35_)) goto LA36_;
T35_ = (x < (aLen_0-1));
LA36_: ;
if (!T35_) goto LA37_;
(*s) = resizeString((*s), a[(NI)(x + ((NI) 1))]->Sup.len + 0);
appendString((*s), a[(NI)(x + ((NI) 1))]);
}
goto LA33_;
LA37_: ;
{
invalidFormatString_61EJWW6vRISEo9a8gt0tusw();
}
LA33_: ;
i = (NI)(j_2 + ((NI) 1));
}
break;
case 97 ... 122:
case 65 ... 90:
case 128 ... 255:
case 95:
{
NI j_3;
NI x_2;
NimStringDesc* T43_;
j_3 = (NI)(i + ((NI) 1));
{
while (1) {
if (!(((NU8)(formatstr->data[j_3])) >= ((NU8)(97)) && ((NU8)(formatstr->data[j_3])) <= ((NU8)(122)) || ((NU8)(formatstr->data[j_3])) >= ((NU8)(65)) && ((NU8)(formatstr->data[j_3])) <= ((NU8)(90)) || ((NU8)(formatstr->data[j_3])) >= ((NU8)(48)) && ((NU8)(formatstr->data[j_3])) <= ((NU8)(57)) || ((NU8)(formatstr->data[j_3])) >= ((NU8)(128)) && ((NU8)(formatstr->data[j_3])) <= ((NU8)(255)) || ((NU8)(formatstr->data[j_3])) == ((NU8)(95)))) goto LA42;
j_3 += ((NI) 1);
} LA42: ;
}
T43_ = (NimStringDesc*)0;
T43_ = copyStrLast(formatstr, (NI)(i + ((NI) 1)), (NI)(j_3 - ((NI) 1)));
x_2 = findNormalized_SW1VCMDsxPTtzxnYrf3N6w(T43_, a, aLen_0);
{
NIM_BOOL T46_;
T46_ = (NIM_BOOL)0;
T46_ = (((NI) 0) <= x_2);
if (!(T46_)) goto LA47_;
T46_ = (x_2 < (aLen_0-1));
LA47_: ;
if (!T46_) goto LA48_;
(*s) = resizeString((*s), a[(NI)(x_2 + ((NI) 1))]->Sup.len + 0);
appendString((*s), a[(NI)(x_2 + ((NI) 1))]);
}
goto LA44_;
LA48_: ;
{
invalidFormatString_61EJWW6vRISEo9a8gt0tusw();
}
LA44_: ;
i = j_3;
}
break;
default:
{
invalidFormatString_61EJWW6vRISEo9a8gt0tusw();
}
break;
}
}
goto LA3_;
LA5_: ;
{
(*s) = addChar((*s), formatstr->data[i]);
i += ((NI) 1);
}
LA3_: ;
} LA2: ;
}
popFrame();
}
N_NIMCALL(NimStringDesc*, nsuFormatOpenArray)(NimStringDesc* formatstr, NimStringDesc** a, NI aLen_0) {
NimStringDesc* result;
nimfr_("%", "strutils.nim")
result = (NimStringDesc*)0;
result = rawNewString(((NI) ((NI)((formatstr ? formatstr->Sup.len : 0) + (NI)((NU64)(aLen_0) << (NU64)(((NI) 4)))))));
nsuAddf((&result), formatstr, a, aLen_0);
popFrame();
return result;
}
N_NIMCALL(NIM_BOOL, allCharsInSet_wVfr4F6j4mVzI8ggLoMVdw)(NimStringDesc* s, TY_nmiMWKVIe46vacnhAFrQvw_Set theSet) {
NIM_BOOL result;
nimfr_("allCharsInSet", "strutils.nim")
{ result = (NIM_BOOL)0;
{
NIM_CHAR c;
NI i;
NI L;
c = (NIM_CHAR)0;
i = ((NI) 0);
L = (s ? s->Sup.len : 0);
{
while (1) {
if (!(i < L)) goto LA3;
c = s->data[i];
{
if (!!(((theSet[(NU)(((NU8)(c)))>>3] &(1U<<((NU)(((NU8)(c)))&7U)))!=0))) goto LA6_;
result = NIM_FALSE;
goto BeforeRet_;
}
LA6_: ;
i += ((NI) 1);
{
if (!!(((s ? s->Sup.len : 0) == L))) goto LA10_;
failedAssertImpl_aDmpBTs9cPuXp0Mp9cfiNeyA(((NimStringDesc*) &TM_JGc9b9bh2D3nTdUR7TGyq8aA_12));
}
LA10_: ;
} LA3: ;
}
}
result = NIM_TRUE;
goto BeforeRet_;
}BeforeRet_: ;
popFrame();
return result;
}
N_NIMCALL(NI, nsuFindChar)(NimStringDesc* s, NIM_CHAR sub, NI start, NI last) {
NI result;
NI last_2;
void* found;
nimfr_("find", "strutils.nim")
{ result = (NI)0;
{
if (!(((NI) (last)) == ((NI) 0))) goto LA3_;
last_2 = (s ? (s->Sup.len-1) : -1);
}
goto LA1_;
LA3_: ;
{
last_2 = ((NI) (last));
}
LA1_: ;
found = memchr(((void*) ((&s->data[start]))), sub, (NI)((NI)(last_2 - ((NI) (start))) + ((NI) 1)));
{
if (!!((found == 0))) goto LA8_;
result = (NI)((NU64)(((NI) (found))) - (NU64)(((NI) (s->data))));
goto BeforeRet_;
}
LA8_: ;
result = ((NI) -1);
goto BeforeRet_;
}BeforeRet_: ;
popFrame();
return result;
}
N_NIMCALL(NIM_BOOL, nsuStartsWith)(NimStringDesc* s, NimStringDesc* prefix) {
NIM_BOOL result;
NI i;
nimfr_("startsWith", "strutils.nim")
{ result = (NIM_BOOL)0;
i = ((NI) 0);
{
while (1) {
{
if (!((NU8)(prefix->data[i]) == (NU8)(0))) goto LA5_;
result = NIM_TRUE;
goto BeforeRet_;
}
LA5_: ;
{
if (!!(((NU8)(s->data[i]) == (NU8)(prefix->data[i])))) goto LA9_;
result = NIM_FALSE;
goto BeforeRet_;
}
LA9_: ;
i += ((NI) 1);
}
}
}BeforeRet_: ;
popFrame();
return result;
}
N_NIMCALL(NimStringDesc*, nsuJoinSep)(NimStringDesc** a, NI aLen_0, NimStringDesc* sep) {
NimStringDesc* result;
nimfr_("join", "strutils.nim")
result = (NimStringDesc*)0;
{
NI L;
if (!(((NI) 0) < aLen_0)) goto LA3_;
L = (NI)((sep ? sep->Sup.len : 0) * (NI)(aLen_0 - ((NI) 1)));
{
NI i;
NI colontmp_;
NI res;
i = (NI)0;
colontmp_ = (NI)0;
colontmp_ = (aLen_0-1);
res = ((NI) 0);
{
while (1) {
if (!(res <= colontmp_)) goto LA7;
i = res;
L += (a[i] ? a[i]->Sup.len : 0);
res += ((NI) 1);
} LA7: ;
}
}
result = rawNewString(((NI) (L)));
result = resizeString(result, a[((NI) 0)]->Sup.len + 0);
appendString(result, a[((NI) 0)]);
{
NI i_2;
NI colontmp__2;
NI res_2;
i_2 = (NI)0;
colontmp__2 = (NI)0;
colontmp__2 = (aLen_0-1);
res_2 = ((NI) 1);
{
while (1) {
if (!(res_2 <= colontmp__2)) goto LA10;
i_2 = res_2;
result = resizeString(result, sep->Sup.len + 0);
appendString(result, sep);
result = resizeString(result, a[i_2]->Sup.len + 0);
appendString(result, a[i_2]);
res_2 += ((NI) 1);
} LA10: ;
}
}
}
goto LA1_;
LA3_: ;
{
result = copyString(((NimStringDesc*) &TM_JGc9b9bh2D3nTdUR7TGyq8aA_13));
}
LA1_: ;
popFrame();
return result;
}
N_NIMCALL(NimStringDesc*, nsuFormatVarargs)(NimStringDesc* formatstr, NimStringDesc** a, NI aLen_0) {
NimStringDesc* result;
nimfr_("format", "strutils.nim")
result = (NimStringDesc*)0;
result = rawNewString(((NI) ((NI)((formatstr ? formatstr->Sup.len : 0) + aLen_0))));
nsuAddf((&result), formatstr, a, aLen_0);
popFrame();
return result;
}
N_NIMCALL(NimStringDesc*, nsuAlignString)(NimStringDesc* s, NI count, NIM_CHAR padding) {
NimStringDesc* result;
nimfr_("align", "strutils.nim")
result = (NimStringDesc*)0;
{
NI spaces;
if (!((s ? s->Sup.len : 0) < ((NI) (count)))) goto LA3_;
result = mnewString(count);
spaces = (NI)(((NI) (count)) - (s ? s->Sup.len : 0));
{
NI i;
NI colontmp_;
NI res;
i = (NI)0;
colontmp_ = (NI)0;
colontmp_ = (NI)(spaces - ((NI) 1));
res = ((NI) 0);
{
while (1) {
if (!(res <= colontmp_)) goto LA7;
i = res;
result->data[i] = padding;
res += ((NI) 1);
} LA7: ;
}
}
{
NI i_2;
NI colontmp__2;
NI res_2;
i_2 = (NI)0;
colontmp__2 = (NI)0;
colontmp__2 = (NI)(((NI) (count)) - ((NI) 1));
res_2 = spaces;
{
while (1) {
if (!(res_2 <= ((NI) (colontmp__2)))) goto LA10;
i_2 = ((NI) (res_2));
result->data[i_2] = s->data[(NI)(((NI) (i_2)) - spaces)];
res_2 += ((NI) 1);
} LA10: ;
}
}
}
goto LA1_;
LA3_: ;
{
result = copyString(s);
}
LA1_: ;
popFrame();
return result;
}
NIM_EXTERNC N_NOINLINE(void, stdlib_strutilsInit000)(void) {
nimfr_("strutils", "strutils.nim")
popFrame();
}
NIM_EXTERNC N_NOINLINE(void, stdlib_strutilsDatInit000)(void) {
}
|
// The only Apps that'll be in the `JavaClassPath` will be the system apps
private static Set<Class<? extends App>> findSystemApps() {
Reflections reflections = new Reflections(
new ConfigurationBuilder()
.setUrls(ClasspathHelper.forJavaClassPath())
.setScanners(new SubTypesScanner(true))
);
Set<Class<? extends App>> clazzes = reflections.getSubTypesOf(App.class);
clazzes.removeIf(clazz -> !Modifier.isPublic(clazz.getModifiers()));
return clazzes;
} |
Micropatterned Polymeric Nanosheets for Local Delivery of an Engineered Epithelial Monolayer
Like a carpet for cells, micropatterned polymeric nanosheets are developed toward local cell delivery. The nanosheets direct morphogenesis of retinal pigment epithelial (RPE) cells and allow for the injection of an engineered RPE monolayer through syringe needles without the loss of cell viability. Such an ultrathin carrier has the promise of a minimally invasive delivery of cells into narrow tissue spaces. |
<reponame>thingsw/react-kakao-maps
import * as React from "react";
import { render } from "react-dom";
import { MapContext } from "./Map";
export interface CustomOverlayProps {
options: kakao.maps.CustomOverlayOptions;
visible: boolean;
}
export class CustomOverlay extends React.PureComponent<CustomOverlayProps> {
public static contextType = MapContext;
public context!: React.ContextType<typeof MapContext>;
private readonly customOverlay: kakao.maps.CustomOverlay;
constructor(props: CustomOverlayProps) {
super(props);
this.customOverlay = new kakao.maps.CustomOverlay(this.props.options);
}
public componentDidMount() {
const { children, visible, options } = this.props;
const map = this.context;
this.customOverlay.setMap(map);
// 처음에는 visible = false 하고,
this.customOverlay.setVisible(false);
if (children) {
const div = document.createElement("div");
render(<React.Fragment>{children}</React.Fragment>, div);
this.customOverlay.setContent(div);
this.customOverlay.setPosition(options.position);
// 조금 뜸을 들였다가 visible = true 해야 정상적인 position에 나타난다.
const handle = setTimeout(() => {
this.customOverlay.setVisible(visible);
clearTimeout(handle);
});
}
}
public componentDidUpdate(prevProps: Readonly<CustomOverlayProps>) {
const { options, visible } = this.props;
const { options: prevOptions } = prevProps;
if (prevOptions !== options) {
if (
typeof options.map !== "undefined" &&
prevOptions.map !== options.map
) {
this.customOverlay.setMap(options.map);
}
if (prevOptions.position !== options.position) {
this.customOverlay.setPosition(options.position);
}
if (
typeof options.zIndex !== "undefined" &&
prevOptions.zIndex !== options.zIndex
) {
this.customOverlay.setZIndex(options.zIndex);
}
this.customOverlay.setVisible(visible);
}
}
public componentWillUnmount() {
this.customOverlay.setMap(null);
}
public render() {
return null;
}
}
|
/**
* Created by kev on 16-04-18.
*/
export interface IView {
draw(time:number):void;
addChild(view:IView):void;
removeChild(view:IView):void;
onResize(width?:number, height?:number) : void;
setPos(x:number, y:number, z ?:number) : void;
show():void;
hide():Promise<IView>;
destroy():void;
}
|
/**
* Subclass of {@link MongoException} representing a cursor-not-found exception.
*/
public class MongoCursorNotFoundException extends MongoException {
private static final long serialVersionUID = -4415279469780082174L;
private final long cursorId;
private final ServerAddress serverAddress;
/**
* @param cursorId cursor
* @param serverAddress server address
*/
MongoCursorNotFoundException(final long cursorId, final ServerAddress serverAddress) {
super(-5, "Cursor " + cursorId + " not found on server " + serverAddress);
this.cursorId = cursorId;
this.serverAddress = serverAddress;
}
/**
* Get the cursor id that wasn't found.
*
* @return the ID of the cursor
*/
public long getCursorId() {
return cursorId;
}
/**
* The server address where the cursor is.
*
* @return the ServerAddress representing the server the cursor was on.
*/
public ServerAddress getServerAddress() {
return serverAddress;
}
} |
/**
* @author David Withers
*
*/
public class ChangeDataflowInputPortDepthEditTest {
private static Edits edits;
@BeforeClass
public static void createEditsInstance() {
edits = new EditsImpl();
}
private DataflowInputPortImpl dataflowInputPort;
private int depth;
private int granularDepth;
@Before
public void setUp() throws Exception {
depth = 3;
granularDepth = 1;
dataflowInputPort = new DataflowInputPortImpl("port name", depth,
granularDepth, null);
}
@Test
public void testDoEditAction() throws EditException {
int newDepth = 2;
Edit<DataflowInputPort> edit = edits
.getChangeDataflowInputPortDepthEdit(dataflowInputPort,
newDepth);
assertEquals(depth, dataflowInputPort.getDepth());
assertEquals(granularDepth, dataflowInputPort.getGranularInputDepth());
edit.doEdit();
assertEquals(newDepth, dataflowInputPort.getDepth());
assertEquals(granularDepth, dataflowInputPort.getGranularInputDepth());
}
@Test
public void testCreateDataflowInputPortEdit() {
Edit<DataflowInputPort> edit = edits
.getChangeDataflowInputPortDepthEdit(dataflowInputPort, 0);
assertEquals(dataflowInputPort, edit.getSubject());
}
} |
/**
* Returns the total size of the object and of its children
* @return the total size of the object and of its children
*/
public long totalSize() {
if (totalSize < 0)
totalSize = computeTotalSize();
return totalSize;
} |
package gossip
import (
"context"
"fmt"
"time"
"github.com/libp2p/go-libp2p-core/peer"
"github.com/pkg/errors"
"github.com/gohornet/hornet/pkg/dag"
"github.com/gohornet/hornet/pkg/metrics"
"github.com/gohornet/hornet/pkg/model/hornet"
"github.com/gohornet/hornet/pkg/model/milestone"
"github.com/gohornet/hornet/pkg/model/storage"
"github.com/gohornet/hornet/pkg/model/syncmanager"
"github.com/gohornet/hornet/pkg/p2p"
"github.com/gohornet/hornet/pkg/profile"
"github.com/iotaledger/hive.go/events"
"github.com/iotaledger/hive.go/objectstorage"
"github.com/iotaledger/hive.go/protocol/message"
"github.com/iotaledger/hive.go/serializer/v2"
"github.com/iotaledger/hive.go/syncutils"
"github.com/iotaledger/hive.go/workerpool"
iotago "github.com/iotaledger/iota.go/v3"
"github.com/iotaledger/iota.go/v3/pow"
)
const (
WorkerQueueSize = 50000
)
var (
workerCount = 64
ErrInvalidTimestamp = errors.New("invalid timestamp")
ErrMessageNotSolid = errors.New("msg is not solid")
ErrMessageBelowMaxDepth = errors.New("msg is below max depth")
)
func MessageProcessedCaller(handler interface{}, params ...interface{}) {
handler.(func(msg *storage.Message, requests Requests, proto *Protocol))(params[0].(*storage.Message), params[1].(Requests), params[2].(*Protocol))
}
// Broadcast defines a message which should be broadcasted.
type Broadcast struct {
// The message data to broadcast.
MsgData []byte
// The IDs of the peers to exclude from broadcasting.
ExcludePeers map[peer.ID]struct{}
}
func BroadcastCaller(handler interface{}, params ...interface{}) {
handler.(func(b *Broadcast))(params[0].(*Broadcast))
}
// MessageProcessorEvents are the events fired by the MessageProcessor.
type MessageProcessorEvents struct {
// Fired when a message was fully processed.
MessageProcessed *events.Event
// Fired when a message is meant to be broadcasted.
BroadcastMessage *events.Event
}
// The Options for the MessageProcessor.
type Options struct {
MinPoWScore float64
NetworkID uint64
ProtocolVersion byte
BelowMaxDepth milestone.Index
WorkUnitCacheOpts *profile.CacheOpts
}
// MessageProcessor processes submitted messages in parallel and fires appropriate completion events.
type MessageProcessor struct {
// used to access the node storage.
storage *storage.Storage
// used to determine the sync status of the node.
syncManager *syncmanager.SyncManager
// contains requests for needed messages.
requestQueue RequestQueue
// used to manage connected peers.
peeringManager *p2p.Manager
// shared server metrics instance.
serverMetrics *metrics.ServerMetrics
// Deserialization parameters including byte costs
deSeriParas *iotago.DeSerializationParameters
// holds the message processor options.
opts Options
// events of the message processor.
Events MessageProcessorEvents
// cache that holds processed incomming messages.
workUnits *objectstorage.ObjectStorage
// worker pool for incomming messages.
wp *workerpool.WorkerPool
// mutex to secure the shutdown flag.
shutdownMutex syncutils.RWMutex
// indicates that the message processor was shut down.
shutdown bool
}
// NewMessageProcessor creates a new processor which parses messages.
func NewMessageProcessor(
dbStorage *storage.Storage,
syncManager *syncmanager.SyncManager,
requestQueue RequestQueue,
peeringManager *p2p.Manager,
serverMetrics *metrics.ServerMetrics,
deSeriParas *iotago.DeSerializationParameters,
opts *Options) (*MessageProcessor, error) {
proc := &MessageProcessor{
storage: dbStorage,
syncManager: syncManager,
requestQueue: requestQueue,
peeringManager: peeringManager,
serverMetrics: serverMetrics,
deSeriParas: deSeriParas,
opts: *opts,
Events: MessageProcessorEvents{
MessageProcessed: events.NewEvent(MessageProcessedCaller),
BroadcastMessage: events.NewEvent(BroadcastCaller),
},
}
wuCacheOpts := opts.WorkUnitCacheOpts
cacheTime, err := time.ParseDuration(wuCacheOpts.CacheTime)
if err != nil {
return nil, err
}
leakDetectionMaxConsumerHoldTime, err := time.ParseDuration(wuCacheOpts.LeakDetectionOptions.MaxConsumerHoldTime)
if err != nil {
return nil, err
}
proc.workUnits = objectstorage.New(
nil,
// defines the factory function for WorkUnits.
func(key []byte, data []byte) (objectstorage.StorableObject, error) {
return newWorkUnit(key, proc), nil
},
objectstorage.CacheTime(cacheTime),
objectstorage.PersistenceEnabled(false),
objectstorage.KeysOnly(true),
objectstorage.StoreOnCreation(false),
objectstorage.ReleaseExecutorWorkerCount(wuCacheOpts.ReleaseExecutorWorkerCount),
objectstorage.LeakDetectionEnabled(wuCacheOpts.LeakDetectionOptions.Enabled,
objectstorage.LeakDetectionOptions{
MaxConsumersPerObject: wuCacheOpts.LeakDetectionOptions.MaxConsumersPerObject,
MaxConsumerHoldTime: leakDetectionMaxConsumerHoldTime,
}),
)
proc.wp = workerpool.New(func(task workerpool.Task) {
p := task.Param(0).(*Protocol)
data := task.Param(2).([]byte)
switch task.Param(1).(message.Type) {
case MessageTypeMessage:
proc.processMessage(p, data)
case MessageTypeMessageRequest:
proc.processMessageRequest(p, data)
case MessageTypeMilestoneRequest:
proc.processMilestoneRequest(p, data)
}
task.Return(nil)
}, workerpool.WorkerCount(workerCount), workerpool.QueueSize(WorkerQueueSize))
return proc, nil
}
// Run runs the processor and blocks until the shutdown signal is triggered.
func (proc *MessageProcessor) Run(ctx context.Context) {
proc.wp.Start()
<-ctx.Done()
proc.Shutdown()
}
// Shutdown signals the internal worker pool and object storage
// to shut down and sets the shutdown flag.
func (proc *MessageProcessor) Shutdown() {
proc.shutdownMutex.Lock()
defer proc.shutdownMutex.Unlock()
proc.shutdown = true
proc.wp.StopAndWait()
proc.workUnits.Shutdown()
}
// Process submits the given message to the processor for processing.
func (proc *MessageProcessor) Process(p *Protocol, msgType message.Type, data []byte) {
proc.wp.Submit(p, msgType, data)
}
// Emit triggers MessageProcessed and BroadcastMessage events for the given message.
// All messages passed to this function must be checked with "DeSeriModePerformValidation" before.
// We also check if the parents are solid and not BMD before we broadcast the message, otherwise
// this message would be seen as invalid gossip by other peers.
func (proc *MessageProcessor) Emit(msg *storage.Message) error {
if msg.ProtocolVersion() != proc.opts.ProtocolVersion {
return fmt.Errorf("msg has invalid protocol version %d instead of %d", msg.ProtocolVersion(), proc.opts.ProtocolVersion)
}
essence := msg.TransactionEssence()
if essence != nil && essence.NetworkID != proc.opts.NetworkID {
return fmt.Errorf("transaction contained in msg has invalid network ID %d instead of %d", essence.NetworkID, proc.opts.NetworkID)
}
score := pow.Score(msg.Data())
if score < proc.opts.MinPoWScore {
return fmt.Errorf("msg has insufficient PoW score %0.2f", score)
}
cmi := proc.syncManager.ConfirmedMilestoneIndex()
checkParentFunc := func(messageID hornet.MessageID) error {
cachedMsgMeta := proc.storage.CachedMessageMetadataOrNil(messageID) // meta +1
if cachedMsgMeta == nil {
// parent not found
entryPointIndex, exists := proc.storage.SolidEntryPointsIndex(messageID)
if !exists {
return ErrMessageNotSolid
}
if (cmi - entryPointIndex) > proc.opts.BelowMaxDepth {
// the parent is below max depth
return ErrMessageBelowMaxDepth
}
// message is a SEP and not below max depth
return nil
}
defer cachedMsgMeta.Release(true)
if !cachedMsgMeta.Metadata().IsSolid() {
// if the parent is not solid, the message itself can't be solid
return ErrMessageNotSolid
}
// we pass a background context here to not prevent emitting messages at shutdown (COO etc).
_, ocri, err := dag.ConeRootIndexes(context.Background(), proc.storage, cachedMsgMeta.Retain(), cmi) // meta +
if err != nil {
return err
}
if (cmi - ocri) > proc.opts.BelowMaxDepth {
// the parent is below max depth
return ErrMessageBelowMaxDepth
}
return nil
}
for _, parentMsgID := range msg.Parents() {
err := checkParentFunc(parentMsgID)
if err != nil {
return err
}
}
proc.Events.MessageProcessed.Trigger(msg, (Requests)(nil), (*Protocol)(nil))
proc.Events.BroadcastMessage.Trigger(&Broadcast{MsgData: msg.Data()})
return nil
}
// WorkUnitsSize returns the size of WorkUnits currently cached.
func (proc *MessageProcessor) WorkUnitsSize() int {
return proc.workUnits.GetSize()
}
// gets a CachedWorkUnit or creates a new one if it not existent.
func (proc *MessageProcessor) workUnitFor(receivedTxBytes []byte) (cachedWorkUnit *CachedWorkUnit, newlyAdded bool) {
return &CachedWorkUnit{
proc.workUnits.ComputeIfAbsent(receivedTxBytes, func(_ []byte) objectstorage.StorableObject { // cachedWorkUnit +1
newlyAdded = true
return newWorkUnit(receivedTxBytes, proc)
}),
}, newlyAdded
}
// processes the given milestone request by parsing it and then replying to the peer with it.
func (proc *MessageProcessor) processMilestoneRequest(p *Protocol, data []byte) {
msIndex, err := ExtractRequestedMilestoneIndex(data)
if err != nil {
proc.serverMetrics.InvalidRequests.Inc()
// drop the connection to the peer
_ = proc.peeringManager.DisconnectPeer(p.PeerID, errors.WithMessage(err, "processMilestoneRequest failed"))
return
}
// peers can request the latest milestone we know
if msIndex == LatestMilestoneRequestIndex {
msIndex = proc.syncManager.LatestMilestoneIndex()
}
cachedMessage := proc.storage.MilestoneCachedMessageOrNil(msIndex) // message +1
if cachedMessage == nil {
// can't reply if we don't have the wanted milestone
return
}
defer cachedMessage.Release(true) // message -1
cachedRequestedData, err := cachedMessage.Message().Message().Serialize(serializer.DeSeriModeNoValidation, iotago.ZeroRentParas)
if err != nil {
// can't reply if serialization fails
return
}
msg, err := NewMessageMsg(cachedRequestedData)
if err != nil {
// can't reply if serialization fails
return
}
p.Enqueue(msg)
}
// processes the given message request by parsing it and then replying to the peer with it.
func (proc *MessageProcessor) processMessageRequest(p *Protocol, data []byte) {
if len(data) != iotago.MessageIDLength {
return
}
cachedMessage := proc.storage.CachedMessageOrNil(hornet.MessageIDFromSlice(data)) // message +1
if cachedMessage == nil {
// can't reply if we don't have the requested message
return
}
defer cachedMessage.Release(true) // message -1
cachedRequestedData, err := cachedMessage.Message().Message().Serialize(serializer.DeSeriModeNoValidation, iotago.ZeroRentParas)
if err != nil {
// can't reply if serialization fails
return
}
msg, err := NewMessageMsg(cachedRequestedData)
if err != nil {
// can't reply if serialization fails
return
}
p.Enqueue(msg)
}
// gets or creates a new WorkUnit for the given message and then processes the WorkUnit.
func (proc *MessageProcessor) processMessage(p *Protocol, data []byte) {
cachedWorkUnit, newlyAdded := proc.workUnitFor(data) // workUnit +1
// force release if not newly added, so the cache time is only active the first time the message is received.
defer cachedWorkUnit.Release(!newlyAdded) // workUnit -1
workUnit := cachedWorkUnit.WorkUnit()
workUnit.addReceivedFrom(p)
proc.processWorkUnit(workUnit, p)
}
// tries to process the WorkUnit by first checking in what state it is.
// if the WorkUnit is invalid (because the underlying message is invalid), the given peer is punished.
// if the WorkUnit is already completed, and the message was requested, this function emits a MessageProcessed event.
// it is safe to call this function for the same WorkUnit multiple times.
func (proc *MessageProcessor) processWorkUnit(wu *WorkUnit, p *Protocol) {
processRequests := func(wu *WorkUnit, msg *storage.Message, isMilestonePayload bool) Requests {
var requests Requests
// mark the message as received
request := proc.requestQueue.Received(msg.MessageID())
if request != nil {
requests = append(requests, request)
}
if isMilestonePayload {
// mark the milestone as received
msRequest := proc.requestQueue.Received(milestone.Index(msg.Milestone().Index))
if msRequest != nil {
requests = append(requests, msRequest)
}
}
wu.requested = requests.HasRequest()
return requests
}
wu.processingLock.Lock()
switch {
case wu.Is(Hashing):
wu.processingLock.Unlock()
return
case wu.Is(Invalid):
wu.processingLock.Unlock()
proc.serverMetrics.InvalidMessages.Inc()
// drop the connection to the peer
_ = proc.peeringManager.DisconnectPeer(p.PeerID, errors.New("peer sent an invalid message"))
return
case wu.Is(Hashed):
wu.processingLock.Unlock()
// we need to check for requests here again because there is a race condition
// between processing received messages and enqueuing requests.
requests := processRequests(wu, wu.msg, wu.msg.IsMilestone())
if wu.requested {
wu.requested = true
proc.Events.MessageProcessed.Trigger(wu.msg, requests, p)
}
if proc.storage.ContainsMessage(wu.msg.MessageID()) {
proc.serverMetrics.KnownMessages.Inc()
p.Metrics.KnownMessages.Inc()
}
return
}
wu.UpdateState(Hashing)
wu.processingLock.Unlock()
// build HORNET representation of the message
msg, err := storage.MessageFromBytes(wu.receivedMsgBytes, serializer.DeSeriModePerformValidation, proc.deSeriParas)
if err != nil {
wu.UpdateState(Invalid)
wu.punish(errors.WithMessagef(err, "peer sent an invalid message"))
return
}
// check the network ID of the message
if msg.ProtocolVersion() != proc.opts.ProtocolVersion {
wu.UpdateState(Invalid)
wu.punish(errors.New("peer sent a message with an invalid protocol version"))
return
}
essence := msg.TransactionEssence()
if essence != nil && essence.NetworkID != proc.opts.NetworkID {
wu.UpdateState(Invalid)
wu.punish(errors.New("peer sent a message containing a transaction with an invalid network ID"))
return
}
isMilestonePayload := msg.IsMilestone()
// mark the message as received
requests := processRequests(wu, msg, isMilestonePayload)
// validate PoW score
if !wu.requested && pow.Score(wu.receivedMsgBytes) < proc.opts.MinPoWScore {
wu.UpdateState(Invalid)
wu.punish(errors.New("peer sent a message with insufficient PoW score"))
return
}
// safe to set the msg here, because it is protected by the state "Hashing"
wu.msg = msg
wu.UpdateState(Hashed)
// increase the known message count for all other peers
wu.increaseKnownTxCount(p)
// do not process gossip if we are not in sync.
// we ignore all received messages if we didn't request them and it's not a milestone.
// otherwise these messages would get evicted from the cache, and it's heavier to load them
// from the storage than to request them again.
if !wu.requested && !proc.syncManager.IsNodeAlmostSynced() && !isMilestonePayload {
return
}
proc.Events.MessageProcessed.Trigger(msg, requests, p)
}
func (proc *MessageProcessor) Broadcast(cachedMsgMeta *storage.CachedMetadata) {
proc.shutdownMutex.RLock()
defer proc.shutdownMutex.RUnlock()
defer cachedMsgMeta.Release(true)
if proc.shutdown {
// do not broadcast if the message processor was shut down
return
}
if !proc.syncManager.IsNodeSyncedWithinBelowMaxDepth() {
// no need to broadcast messages if the node is not sync within "below max depth"
return
}
// we pass a background context here to not prevent broadcasting messages at shutdown (COO etc).
_, ocri, err := dag.ConeRootIndexes(context.Background(), proc.storage, cachedMsgMeta.Retain(), proc.syncManager.ConfirmedMilestoneIndex())
if err != nil {
return
}
if (proc.syncManager.LatestMilestoneIndex() - ocri) > proc.opts.BelowMaxDepth {
// the solid message was below max depth in relation to the latest milestone index, do not broadcast
return
}
cachedMsg := proc.storage.CachedMessageOrNil(cachedMsgMeta.Metadata().MessageID())
if cachedMsg == nil {
return
}
defer cachedMsg.Release(true)
cachedWorkUnit, _ := proc.workUnitFor(cachedMsg.Message().Data()) // workUnit +1
defer cachedWorkUnit.Release(true) // workUnit -1
wu := cachedWorkUnit.WorkUnit()
if wu.requested {
// no need to broadcast if the message was requested
return
}
// if the workunit was already evicted, it may happen that
// we send the message back to peers which already sent us the same message.
// we should never access the "msg", because it may not be set in this context.
// broadcast the message to all peers that didn't sent it to us yet
proc.Events.BroadcastMessage.Trigger(wu.broadcast())
}
|
<reponame>ashwinimore/chpl-api
package gov.healthit.chpl.dto;
import java.io.Serializable;
import java.util.Date;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import gov.healthit.chpl.entity.developer.DeveloperStatusEventEntity;
import lombok.Data;
import lombok.NoArgsConstructor;
@JsonIgnoreProperties(ignoreUnknown = true)
@Data
@NoArgsConstructor
public class DeveloperStatusEventDTO implements Serializable {
private static final long serialVersionUID = -2492374479266782228L;
private Long id;
private Long developerId;
private DeveloperStatusDTO status;
private Date statusDate;
private String reason;
private Boolean deleted;
public DeveloperStatusEventDTO(final DeveloperStatusEventEntity entity) {
this();
this.id = entity.getId();
this.developerId = entity.getDeveloperId();
this.status = new DeveloperStatusDTO(entity.getDeveloperStatus());
this.statusDate = entity.getStatusDate();
this.reason = entity.getReason();
this.setDeleted(entity.getDeleted());
}
public DeveloperStatusEventDTO(final DeveloperStatusEventDTO dto) {
this.id = dto.getId();
this.developerId = dto.getDeveloperId();
this.status = dto.getStatus(); // Shallow copy
this.statusDate = dto.getStatusDate();
this.reason = dto.getReason();
}
/**
* Return true iff this DTO matches a different on.
*
* @param anotherStatusEvent
* the different one
* @return true iff this matches
*/
public boolean matches(final DeveloperStatusEventDTO anotherStatusEvent) {
boolean result = false;
if (this.getId() != null && anotherStatusEvent.getId() != null
&& this.getId().longValue() == anotherStatusEvent.getId().longValue()) {
return true;
}
return result;
}
// Not all attributes have been included. The attributes being used were selected so the DeveloperManager could
// determine equality when updating a Developer
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
DeveloperStatusEventDTO other = (DeveloperStatusEventDTO) obj;
if (developerId == null) {
if (other.developerId != null) {
return false;
}
} else if (!developerId.equals(other.developerId)) {
return false;
}
if (id == null) {
if (other.id != null) {
return false;
}
} else if (!id.equals(other.id)) {
return false;
}
if (reason == null) {
if (other.reason != null) {
return false;
}
} else if (!reason.equals(other.reason)) {
return false;
}
if (status == null) {
if (other.status != null) {
return false;
}
} else if (!status.equals(other.status)) {
return false;
}
if (statusDate == null) {
if (other.statusDate != null) {
return false;
}
} else if (!statusDate.equals(other.statusDate)) {
return false;
}
return true;
}
// Not all attributes have been included. The attributes being used were selected so the DeveloperManager could
// determine equality when updating a Developer
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((developerId == null) ? 0 : developerId.hashCode());
result = prime * result + ((id == null) ? 0 : id.hashCode());
result = prime * result + ((reason == null) ? 0 : reason.hashCode());
result = prime * result + ((status == null) ? 0 : status.hashCode());
result = prime * result + ((statusDate == null) ? 0 : statusDate.hashCode());
return result;
}
}
|
n=int(input())
x=input()
if n%2==1:
ans=x[0]
for i in range(1,n):
if i%2==1:
ans=x[i]+ans
else:
ans=ans+x[i]
print(ans)
else:
# ans=x[int(n/2)-1]+x[int(n/2)]
ans=x[:2]
for i in range(2,n):
if i % 2==0:
ans=x[i]+ans
else:
ans=ans+x[i]
print(ans)
# n=int(input())
# x=input()
# # x='a'+x
# # i=n
# ans=[0]*(n)
# for i in range(n):
# # ans[(i+1)]
# if (n-i)%2==1:
# ans[int((n-i)/2)+1]=x[i]
# else:
# ans[int((n-i)/2)-1]=x[i]
# # while i > 0:
# # if i%2==0:
# # ans[i]=x[int(i/2)-1]
# # else:
# # ans[i]=x[int(i/2)+1]
# # # ans+=x[int(i/2)]
# # i-=1
# print(ans)
|
/**
* Module declaring some example configuration and a _cat action that uses
* it.
*/
public static class ConfiguredExampleModule extends AbstractModule {
@Override
protected void configure() {
bind(ExamplePluginConfiguration.class).asEagerSingleton();
Multibinder<AbstractCatAction> catActionMultibinder = Multibinder.newSetBinder(binder(), AbstractCatAction.class);
catActionMultibinder.addBinding().to(ExampleCatAction.class).asEagerSingleton();
}
} |
<gh_stars>10-100
package com.jun.timer.router.Strategy;
import com.jun.timer.common.RpcResponse;
import com.jun.timer.dto.JobParams;
import com.jun.timer.dto.LogDto;
import com.jun.timer.router.BaseRouter;
import com.jun.timer.utils.ApplicationContextHolder;
import com.jun.timer.utils.CommonUtils;
import java.util.Date;
import java.util.List;
import java.util.Random;
import java.util.concurrent.ConcurrentHashMap;
/**
* Created by xunaiyang on 2017/9/11.
*/
public class RouteByRound extends BaseRouter {
private static ConcurrentHashMap<Integer, Integer> routeCountEachJob = new ConcurrentHashMap<Integer, Integer>();
private static long CACHE_VALID_TIME = 0;
private static int count(int jobId) {
// cache clear
if (System.currentTimeMillis() > CACHE_VALID_TIME) {
routeCountEachJob.clear();
CACHE_VALID_TIME = System.currentTimeMillis() + 1000*60*60*24;
}
// count++
Integer count = routeCountEachJob.get(jobId);
count = (count==null || count>1000000)?(new Random().nextInt(100)):++count; // 初始化时主动Random一次,缓解首次压力
routeCountEachJob.put(jobId, count);
return count;
}
@Override
public String routeStrategy(Integer jobId, List<String> addressList) {
return addressList.get(count(jobId)%addressList.size());
}
@Override
public RpcResponse routeRun(JobParams jobParams, List<String> addressList, LogDto logDto) {
String address = routeStrategy(jobParams.getJobId(), addressList);
logDto.setId(null);
logDto.setAddress(address);
logDto.setStartTime(new Date());
logDto.setEndTime(new Date());
Integer logId = ApplicationContextHolder.getLogApplication().recordLogInfo(CommonUtils.getTenantId(logDto.getJobName()), logDto);
jobParams.setLogId(logId);
logDto.setId(logId);
return runExcutor(jobParams, address);
}
}
|
/**
* A melee attack from enemy to a target
*
**/
public class MeleeAttackEvent extends TimeEvent<EnemyEntity> {
private float range = 1.5f;
private Class target;
/**
* Default constructor for serialization
*/
public MeleeAttackEvent() {
// Empty for serialzation purposes
}
/**
* Constructor for melee attack event, set up to repeat an attack according to
* attackSpeed
*
* @param attackSpeed
* the delay between attacks
*
*/
public MeleeAttackEvent(int attackSpeed, Class target) {
setDoReset(true);
setResetAmount(attackSpeed);
this.target = target;
reset();
}
/**
* Creates action as per TimeEvent in which the enemy this event belongs to attacks an
* entity of the target class provided if within melee range.
*
* @param enemy
* The enemy that this melee attack belongs to
*/
@Override
public void action(EnemyEntity enemy) {
Optional<AbstractEntity> foundTarget = WorldUtil.getClosestEntityOfClass(target, enemy.getPosX(), enemy.getPosY());
// no target exists or target is out of range
if (!foundTarget.isPresent() || enemy.distanceTo(foundTarget.get()) > range) {
return;
}
GameManager.get().getWorld()
.addEntity(new MeleeAttack(target,
new Vector3(enemy.getPosX() + 0.5f, enemy.getPosY() + 0.5f, enemy.getPosZ()),
new Vector3(foundTarget.get().getPosX(), foundTarget.get().getPosY(), foundTarget.get().getPosZ()), 1, 4));
}
/**
* @return a copy of this MeleeAttackEvent
*/
@Override
public TimeEvent<EnemyEntity> copy() {
return new MeleeAttackEvent(getResetAmount(), this.target);
}
/**
* @return string representation of melee attack
*/
@Override
public String toString() {
return String.format("Melee attack with %d attackspeed", this.getResetAmount());
}
} |
Laurie Kilmartin's father is going to pass away soon.
Diagnosed with lung cancer, Mr. Kilmartin was admitted to hospice on February 20th. Laurie, a comedian and finalist on Last Comic Standing, has been live-tweeting her experience watching her dad die before her eyes.
Kilmartin's tweets hit all of the stages of grief. There's sadness of course. And there's love. And due to Kilmartin's nature, there's humor.
Here's a sample of some of her tweets:
She even reached out to Glenn Beck; her dad is a huge fan:
Dad and daughter didn't always see eye to eye on politics, but it's clear this family has humor:
She tweeted what it was like breaking the news to her young son:
Comedian Patton Oswalt saw what she was doing and tweeted out to his 1.6 million followers:
Laurie Kilmartin's follower count has gone up significantly in the last couple of days as people have started to follow her updates on her father's health. She is also not the first person to live-tweet the death of a parent. NPR's Scott Simon live-tweeted the death of his mother back in July of 2013; a loving and very emotional tribute. |
/// Score for likelihood of being valid English plaintext.
/// Uses a chi-squared score based on English letter frequencies.
/// Scores are always positive; lower is better, 0.0 is best.
pub fn score_alphabetic(v: &[u8]) -> f32 {
let mut char_freq: HashMap<u8, f32> = HashMap::new();
let total_count = v.len() as f32;
for c in v.iter() {
let count = char_freq.entry(c.to_ascii_lowercase()).or_insert(0.0);
*count += 1.00;
}
// modified chi-squared
let mut score: f32 = 0.0;
for (c, count) in char_freq.iter() {
score += match CHAR_FREQ.get(c) {
Some(ref_freq) => ((count / total_count) - ref_freq).powi(2) / ref_freq,
None => {
if ignore_char(c) {
0.0
} else {
1.0
}
}
}
}
score
} |
export type PluginOptionValue = string | boolean | number | string[];
export interface PluginOption {
[key: string]: PluginOptionValue | PluginOption | PluginOption[] | undefined;
}
export enum ServiceProvider {
GitHub = 'github',
GitLab = 'gitlab',
}
export enum ChangeLevel {
Major = 'major',
Minor = 'minor',
Patch = 'patch',
}
export enum ExclusionType {
AuthorLogin = 'authorLogin',
CommitType = 'commitType',
CommitScope = 'commitScope',
CommitSubject = 'commitSubject',
}
export interface ConfigOptions {
provider: ServiceProvider;
filePath: string;
types: Map<string, ChangeLevel>;
exclusions: Map<ExclusionType, string[]>;
plugins: Map<string, PluginOption>;
}
export class Config {
public readonly filePath: string;
public readonly provider: ServiceProvider;
private types: Map<string, ChangeLevel>;
private plugins: Map<string, PluginOption>;
private exclusions: Map<ExclusionType, string[]>;
public constructor(options: ConfigOptions) {
this.provider = options.provider;
this.filePath = options.filePath;
this.types = options.types;
this.plugins = options.plugins;
this.exclusions = options.exclusions;
}
public getPlugin(name: string): PluginOption | undefined {
return this.plugins.get(name);
}
public getPlugins(): [string, PluginOption][] {
return [...this.plugins.entries()];
}
public getTypes(): [string, ChangeLevel][] {
return [...this.types.entries()];
}
public getExclusions(): [ExclusionType, string[]][] {
return [...this.exclusions.entries()];
}
}
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-warn-unticked-promoted-constructors #-}
-- |
-- Module : Control.Protocol
-- Copyright : (c) <NAME>
-- License : BSD3
--
-- Maintainer : <EMAIL>
-- Stability : experimental
-- Portability : non-portable
--
-- This module provides type 'Protocol' to model distributed multi-party protocols,
-- ensuring the continuity of the associated resource state transitions on the type level
-- for all protocol commands and scenarios.
--
-- It accepts used-defined data type for protocol commands (GADT of kind 'Command') and
-- the list of protocol participants to create protocol type.
--
-- Command type serves as a single abstract api between all participants - it is defined
-- to allow decoupling type-level requirements for participants implementations.
--
-- Function '(->:)' wraps commands into free parameterized monad (see <https://hackage.haskell.org/package/freer-indexed freer-indexed>)
-- so that they can be chained into type-aligned scenarios, optionally written using do notation.
-- These scenarios can be interpreted in any monad using 'runProtocol' function,
-- e.g. to print protocol description or diagram or to execute system-level integration tests.
--
-- See protocol definition and scenario examples in @<./Control-Protocol-Example.html Control.Protocol.Example>@.
module Control.Protocol
( Command,
Protocol,
ProtocolCmd,
(->:),
runProtocol,
)
where
import Control.Protocol.Internal
import Control.XFreer
import Data.Kind
import Data.Singletons
-- | Defines the kind of the command data type used by 'Protocol':
--
-- * @party@ - the type (normally enumerable) that defines parties of the protocol (here it is used as a kind). This type should be singletonized.
-- * @state@ - the kind of the protocol resource state.
--
-- The first type-level tuple in command constructors defines the party that sends the command, with the initial and final resource states for that party.
--
-- The second tuple defines the party that executes command (e.g., provides some api) with its initial final and states.
--
-- See @<./Control-Protocol-Example.html Control.Protocol.Example>@ for command example.
type Command party state = (party, state, state) -> (party, state, state) -> Type -> Type
-- | Protocol data type that wraps a command and explicitly adds command participants.
--
-- Its only constructor that is not exported adds command participants
-- and combines participant states in type-level list so that they can be chained
-- in type-aligned sequence of commands:
--
-- > ProtocolCmd ::
-- > Sing (from :: p) ->
-- > Sing (to :: p) ->
-- > cmd '(from, Prj ps s from, fs') '(to, Prj ps s to, ts') a ->
-- > ProtocolCmd cmd ps s (Inj ps (Inj ps s from fs') to ts') a
--
-- Here:
--
-- * @from@ - type of party that sends the command.
-- * @to@ - type of party that executes command (e.g., provides some API).
--
-- 'Protocol' type synonym should be used to construct this type, and function '(->:)' should be used instead of the constructor.
data ProtocolCmd (cmd :: Command p k) (parties :: [p]) (s :: [k]) (s' :: [k]) (a :: Type) where
ProtocolCmd ::
Sing (from :: p) ->
Sing (to :: p) ->
cmd '(from, Prj ps s from, fs') '(to, Prj ps s to, ts') a ->
ProtocolCmd cmd ps s (Inj ps (Inj ps s from fs') to ts') a
-- | Type synonym to create protocol data type ('ProtocolCmd' wrapped in 'XFree' - parameterized free monad):
--
-- * @cmd@ - user-defined command type constructor that should have the kind 'Command'.
-- * @parties@ - type-level list of participants - it defines the order of participant states in the combined state of the system described by the protocol.
type Protocol cmd parties = XFree (ProtocolCmd cmd parties)
infix 6 ->:
-- | Function that wraps command into 'ProtocolCmd' type converted into free parameterized monad.
(->:) ::
-- | party that sends command
Sing from ->
-- | party that executes command
Sing to ->
-- | command - its initial states for both parties are projected from system state
cmd '(from, Prj ps s from, fs') '(to, Prj ps s to, ts') a ->
-- | final protocol state injects final states of both participants
Protocol cmd ps s (Inj ps (Inj ps s from fs') to ts') a
(->:) f t c = xfree $ ProtocolCmd f t c
-- | 'runProtocol' interprets protocol scenario in any monad,
-- using passed 'runCmd' function that interprets individual commands.
runProtocol ::
forall m cmd ps s s' a.
Monad m =>
-- | function to interpret a command
(forall from to b. (Sing (P from) -> Sing (P to) -> cmd from to b -> m b)) ->
-- | protocol scenario (see example in @'Control.Protocol.Example.Scenario'@)
Protocol cmd ps s s' a ->
m a
runProtocol runCmd = loop
where
loop :: forall s1 s2 b. Protocol cmd ps s1 s2 b -> m b
loop (Pure x) = return x
loop (Bind c f) = run c >>= loop . f
run :: forall s1 s2 b. ProtocolCmd cmd ps s1 s2 b -> m b
run (ProtocolCmd from to cmd) = runCmd from to cmd
|
import { Module } from '@nestjs/common';
import { ArticalController } from './artical.controller';
import { ArticalService } from './artical.service';
@Module({
controllers: [ArticalController],
providers: [ArticalService]
})
export class ArticalModule {}
|
// read reads the given byte data into the given drawing.
func read(r io.Reader, d *Drawing) error {
version, err := readHeader(r)
if err != nil {
return err
}
d.Version = version
nLayers, err := readNumber(r)
if err != nil {
return err
}
d.Layers = make([]Layer, nLayers)
for i := uint32(0); i < nLayers; i++ {
nStrokes, err := readNumber(r)
d.Layers[i].Strokes = make([]Stroke, nStrokes)
if err != nil {
return err
}
for j := uint32(0); j < nStrokes; j++ {
s, err := readStroke(r, version)
if err != nil {
return err
}
d.Layers[i].Strokes[j] = s
}
}
return nil
} |
def build_proxy_options(fname):
print('\nMULTI')
outfile = fname.replace(os.path.splitext(fname)[1], '.mp4')
try:
command = [
FFMPEG_PATH, '-i', fname,
'-y',
'-loglevel', 'warning',
'-map', '0:1 0:2',
'-c:v', 'h264',
'-b:v', VIDEO_BR,
'-crf', CRF_VALUE,
'-pix_fmt', 'yuv420p',
'-vf', 'scale=320:240',
'-sws_flags', 'lanczos',
'-preset', PRESET,
'-c:a', 'aac',
'-ac', '2',
'-b:a', AUDIO_BR,
'{}{}'.format(dest_dir, outfile)
]
print('\nBuilding proxy file: {}'.format(outfile))
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, errors = p.communicate()
print(output)
except Exception as FE:
print(FE, Exception)
logging.error('Proxy Build Error: {} : {}'.format(FE, outfile)) |
//+--------------------------------------------------------------------------
//
// Function: DeleteChildren
//
// Synopsis: Deletes only the children of a single target
//
// Arguments: credentialObject
// basePathsInfo
// pIADs: IADs pointer to the object
// *pfErrorReported: Will be set to true if DeleteChildren
// takes care of reporting the error itself
//
// Returns: HRESULT : error code to be returned from command-line app
// Could be almost any ADSI error
// Returns S_OK if there are no children
//
// History: 26-Sep-2000 JonN Created
//
//---------------------------------------------------------------------------
HRESULT DeleteChildren( CDSCmdCredentialObject& credentialObject,
IADs* pIADs,
bool* pfErrorReported )
{
ENTER_FUNCTION(LEVEL5_LOGGING, DeleteChildren);
ASSERT( pIADs && pfErrorReported );
if ( !pIADs || !pfErrorReported )
return E_POINTER;
CComQIPtr<IDirectorySearch,&IID_IDirectorySearch> spSearch( pIADs );
ASSERT( !!spSearch );
if ( !spSearch )
return E_FAIL;
HRESULT hr = SetSearchPreference(spSearch, ADS_SCOPE_ONELEVEL);
ASSERT( !FAILED(hr) );
if (FAILED(hr))
return hr;
LPWSTR pAttrs[1] = { L"aDSPath" };
ADS_SEARCH_HANDLE SearchHandle = NULL;
hr = spSearch->ExecuteSearch (L"(objectClass=*)",
pAttrs,
1,
&SearchHandle);
if (FAILED(hr))
{
DEBUG_OUTPUT(MINIMAL_LOGGING,
L"DeleteChildren: ExecuteSearch failure: 0x%08x",
hr);
return hr;
}
while ( hr = spSearch->GetNextRow( SearchHandle ),
SUCCEEDED(hr) && hr != S_ADS_NOMORE_ROWS )
{
CComBSTR sbstrADsPathThisItem;
hr = RetrieveStringColumn( spSearch,
SearchHandle,
pAttrs[0],
sbstrADsPathThisItem );
ASSERT( !FAILED(hr) );
if (FAILED(hr))
break;
CComPtr<IADsDeleteOps> spDeleteOps;
HRESULT hrThisItem = DSCmdOpenObject(credentialObject,
sbstrADsPathThisItem,
IID_IADsDeleteOps,
(void**)&spDeleteOps,
true);
if (FAILED(hrThisItem))
{
DEBUG_OUTPUT(
MINIMAL_LOGGING,
L"DeleteChildren: DsCmdOpenObject(%s) failure: 0x%08x",
sbstrADsPathThisItem, hrThisItem);
}
else
{
ASSERT( !!spDeleteOps );
hrThisItem = spDeleteOps->DeleteObject( NULL );
if (FAILED(hrThisItem))
{
DEBUG_OUTPUT(
MINIMAL_LOGGING,
L"DeleteChildren: DeleteObject(%s) failure: 0x%08x",
sbstrADsPathThisItem, hrThisItem);
}
}
if (!FAILED(hrThisItem))
continue;
if (!FAILED(hr))
hr = hrThisItem;
CComBSTR sbstrDN;
CPathCracker pathcracker;
HRESULT hr2 = pathcracker.Set( sbstrADsPathThisItem, ADS_SETTYPE_FULL );
ASSERT( !FAILED(hr2) );
if (FAILED(hr2))
break;
hr2 = pathcracker.Retrieve( ADS_FORMAT_X500_DN, &sbstrDN );
ASSERT( !FAILED(hr2) );
if (FAILED(hr2))
break;
DisplayErrorMessage(g_pszDSCommandName,
sbstrDN,
hrThisItem);
*pfErrorReported = true;
if (!fContinue)
break;
}
if (hr != S_ADS_NOMORE_ROWS)
{
DEBUG_OUTPUT(FULL_LOGGING,
L"DeleteChildren: abandoning search");
(void) spSearch->AbandonSearch( SearchHandle );
}
return (hr == S_ADS_NOMORE_ROWS) ? S_OK : hr;
} |
def add_fulltext_index(self, collection_name, fields, min_length=None):
_collection = self.get_collection(collection_name)
return _collection.add_fulltext_index(fields=fields, min_length=min_length) |
from django.shortcuts import render,redirect
# Create your views here.
from apps.ponentes.forms import PonentesForm
from apps.ponentes.models import Ponente
from django.views.generic import ListView
class ponentes_list(ListView):
model = Ponente
template_name = 'ponentes/index.html'
paginate_by = 10
|
<gh_stars>0
import { Component, OnDestroy, OnInit } from '@angular/core';
import { ActivatedRoute } from '@angular/router';
import { Subscription } from 'rxjs';
import { mergeMap } from 'rxjs/operators';
import { Player } from '../interfaces/player';
import { Team } from '../interfaces/team';
import { League } from '../interfaces/league';
import { PlayerService } from '../services/player/player.service';
import { TeamService } from '../services/team/team.service';
import { LeagueService } from '../services/league/league.service';
import { PopoverController } from '@ionic/angular';
import { DetailsPagePopoverComponent } from '../components/details-page-popover/details-page-popover.component';
@Component({
selector: 'app-player-details',
templateUrl: './player-details.page.html',
styleUrls: ['./player-details.page.scss'],
})
export class PlayerDetailsPage implements OnInit, OnDestroy {
playerDetails: Player;
teamDetails: Team;
leagueDetails: League;
playerDetailsSuscriptor: Subscription;
constructor(
private activatedRoute: ActivatedRoute,
private popoverCtrl: PopoverController,
private playerService: PlayerService,
private teamService: TeamService,
private leagueService: LeagueService,
) { }
ngOnInit(): void {
let id = this.activatedRoute.snapshot.paramMap.get('id');
this.playerDetailsSuscriptor = this.playerService.getPlayerById(id).pipe(
mergeMap(
playerResults => {
this.playerDetails = playerResults;
return this.teamService.getTeamById(playerResults["teamId"]);
}
),
mergeMap(
teamResults => {
this.teamDetails = teamResults;
return this.leagueService.getLeagueById(teamResults["Liga"]);
}
)
)
.subscribe(
leagueResults => {
this.leagueDetails = leagueResults;
}
);
}
async showPopover(event: Event): Promise<void> {
let popover = await this.popoverCtrl.create({
event,
component: DetailsPagePopoverComponent,
componentProps: {
item: this.playerDetails,
itemType: "player"
}
})
return await popover.present();
}
ngOnDestroy(): void {
if (this.playerDetailsSuscriptor) this.playerDetailsSuscriptor.unsubscribe();
}
}
|
def convolve(self, other):
if self.data.size == 0 or other.data.size == 0:
return signal()
data = np.convolve(self.data, other.data)
start = self.start + other.start
return signal(data, start) |
Short-term hydro-thermal coordination based on interior point nonlinear programming and genetic algorithms
This paper presents a combined primal-dual logarithmic-barrier interior point and genetic algorithm for short-term hydro-thermal coordination. The genetic algorithm is used to compute the optimal on/off status of thermal units, while the interior point module deals with the optimal solution of the hydraulically-coupled short-term economic dispatch of thermal and hydro units. Inter-temporal constraints both due to cascaded reservoirs and maximum up and down ramps of thermal units are included in the latter module. Results from realistic cases based on the Spanish power system are reported. |
// Eval takes a list of expressions
// representing a solo or product term
// of tensors in abstract index notation
// and returns the resulting Tensor.
//
// Note that shmensor Tensors are lazy, so computation
// is only performed when you call Reify().
//
// Consider verbose mode boolean to explore what's happening.
func Eval(t ...Expression) Tensor {
if len(t) == 0 {
return Tensor{}
}
head, tail := *t[0].t, t[1:]
for _, elt := range tail {
head = Product(head, *elt.t)
}
var signature []string
var indices []string
for _, elt := range t {
s := strings.Split(elt.t.signature, "")
signature = append(signature, s...)
i := strings.Split(elt.indices, "")
indices = append(indices, i...)
}
sortedIndices := make([]string, len(indices))
copy(sortedIndices, indices)
sort.Strings(sortedIndices)
var toContract []string
var last string
for i, elt := range sortedIndices {
if elt == last {
if i < len(sortedIndices)-1 {
if sortedIndices[i+1] == elt {
log.Fatalf("too many repeated indices %v", indices)
}
}
toContract = append(toContract, elt)
}
last = elt
}
for _, index := range toContract {
a := -1
b := -1
for j, letter := range indices {
if letter == index {
if b == -1 && a != -1 {
b = j
break
}
if a == -1 {
a = j
}
}
}
head = Trace(head, a, b)
indices = append(indices[:b], indices[b+1:]...)
signature = append(signature[:b], signature[b+1:]...)
indices = append(indices[:a], indices[a+1:]...)
signature = append(signature[:a], signature[a+1:]...)
}
return head
} |
n = int(input())
taegrt = ["3","5","7"]
ans = 0
count_num = []
def dfs(s):
global count_num
global taegrt
global ans
for val in taegrt:
tmp = s+val
if int(tmp) > n:
return
if tmp in count_num:
continue
count_num.append(tmp)
#print(tmp)
if "7" in list(tmp) and "5" in list(tmp) and "3" in list(tmp):
ans += 1
dfs(tmp)
for i in taegrt:
dfs(i)
print(ans)
|
<reponame>jfblg/ansible-cisco-use-case
#!/usr/bin/env python
import json
from os import listdir
from os.path import isfile, join
from collections import namedtuple
from ciscoconfparse import CiscoConfParse
"""
Analysis of Ansible ios_facts.json file specific to configuration of DHCP snooping.
"""
__author__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
HostRecord = namedtuple("HostRecord", "host int_list")
DHCP_SNOOP_VLAN = "500"
OUT_DIR = "inventories"
OUT_FILE = "inventory-dhcp-snoop.txt"
def main():
device_fact_files = find_files("device_details")
devices_to_change = process_configuration(device_fact_files)
inventory_lines = output_preprocess(devices_to_change)
create_inventory_file(inventory_lines, join(OUT_DIR, OUT_FILE))
def output_preprocess(data):
return_list = []
for item in data:
line = "{} interfaces=\"{}\"".format(item.host, item.int_list)
return_list.append(line + "\n")
return return_list
def create_inventory_file(data, filename):
"""Creates ansible inventory files with interface variables"""
with open(filename, 'w') as fh:
fh.write("[ci_list]\n")
fh.writelines(data)
def find_files(path):
""" Find log in the defined directory which end with "iosfacts.json"
"""
return [join(path, f) for f in listdir(path) if isfile(join(path, f)) if f.endswith("iosfacts.json")]
def process_configuration(filename_list, vlan=DHCP_SNOOP_VLAN):
"""Find ansible_net_config in the JSON and pass it to analyze_ios_config_file.
Return which interfaces on which devices have VLAN1400 as access vlan configured
"""
devices_to_configure = []
for file in filename_list:
with open(file, 'r') as fh:
data_dic = json.load(fh)
try:
hostname = data_dic["ansible_facts"]["ansible_net_hostname"].lower()
except TypeError:
return False
result = analyze_ios_config_file(data_dic, vlan)
if result is not None and len(result) > 0:
devices_to_configure.append(HostRecord(hostname, result))
return devices_to_configure
def find_interface_beloging_to_vlan(ios_config, vlan="1"):
""" Parse the IOS configuration and return list of interfaces configured in a specific VLAN.
Return list of interfaces or None
"""
parse = CiscoConfParse(ios_config.splitlines())
found_interfaces = []
for obj in parse.find_objects(r"interface"):
if obj.re_search_children(r"switchport\saccess\svlan\s{}".format(vlan)):
found_interfaces.append(obj.text.split()[1])
if len(found_interfaces) == 0:
return None
return found_interfaces
def analyze_ios_config_file(data_dic, vlan):
"""Parse Cisco IOS configuration"""
config_file = data_dic["ansible_facts"]["ansible_net_config"]
if find_interface_beloging_to_vlan(config_file, vlan=vlan) is not None:
return find_trunk_interfaces(config_file)
def find_trunk_interfaces(ios_config):
"""Parse the IOS config and return list of interfaces configured as trunk"""
parse = CiscoConfParse(ios_config.splitlines())
found_interfaces = []
for obj in parse.find_objects(r"interface"):
if obj.re_search_children(r"switchport\smode\strunk"):
found_interfaces.append(obj.text.split()[1])
if len(found_interfaces) == 0:
return None
return found_interfaces
if __name__ == '__main__':
main()
|
Optimizing Recommendation Algorithms Using Self-Similarity Matrices for Music Streaming Services
Most music recommendation algorithms such as proprietary ones designed for curating music in music streaming companies rely mainly on song ratings data to be able to recommend songs to users. This does not always provide a great music experience for many users who pay for these services. As an example there are over 35 million songs on Spotify alone and according to recent statistical report released by Spotify there are about 4 million songs on Spotify that have never been played not even once. Clearly this is not good news to content creators who trust and pay Spotify as a platform to distribute their music. The root of the problem lies on the music recommendation algorithm adopted and used by many music streaming platforms like Spotify. The algorithm fails to recommend great music to the users which they can enjoy and this results in the platform experiencing “dark music” or music that has never received any play in the platform. We propose a framework which provides a unique strategy that can be adopted by music recommendation algorithms to give users a better music experience. We utilize self-similarity matrices developed in R to visualize patterns of repetition in text extracted song lyrics. The way this works is that the lyrics of a song played by the user are extracted and a visual pattern print of a song is generated and this pattern is then compared with many other patterns of songs existing on the platforms especially ones that have not been played before. A comparison is then performed and if the comparison similarity index is above 70 percent then a song is recommended to the user. A song with high a similarity index gets first priority. We believe this will ensure great music experiences for the listeners and also benefit content creators since the likelyhood that their music will reach users will be high. |
def approve_assignment(self, assignment_id):
try:
return self._is_ok(
self.mturk.approve_assignment(AssignmentId=assignment_id)
)
except ClientError as ex:
raise MTurkServiceException(
"Failed to approve assignment {}: {}".format(
assignment_id, str(ex))
) |
/**
* @author Francisco Sanchez
*/
@RunWith(MockitoJUnitRunner.class) public class DefaultResmiServiceTest {
private static final long DATE = 1234;
private static final String _UPDATED_AT = "_updatedAt";
private static final String _CREATED_AT = "_createdAt";
private static final String DOMAIN = "DOMAIN";
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS", Locale.forLanguageTag("ES"));
String TYPE = "resource:TYPE";
String RELATION_TYPE = "relation:TYPE";
String ID = "test";
String USER_ID = "123";
String RELATION_URI = "RELATION_URI";
@Mock
MongoResmiDao resmiDao;
CollectionParameters collectionParametersMock;
@Mock ResourceQuery resourceQueryMock;
@Mock List<ResourceQuery> resourceQueriesMock;
@Mock Search resourceSearchMock;
@Mock Pagination paginationMock;
@Mock Sort sortMock;
@Mock RelationParameters relationParametersMock;
private DefaultResmiService defaultResmiService;
@Before
public void setup() {
defaultResmiService = new DefaultResmiService(resmiDao, Clock.systemUTC());
when(relationParametersMock.getAggregation()).thenReturn(Optional.empty());
when(relationParametersMock.getQueries()).thenReturn(Optional.ofNullable(resourceQueriesMock));
when(relationParametersMock.getQueries()).thenReturn(Optional.ofNullable(resourceQueriesMock));
when(relationParametersMock.getSearch()).thenReturn(Optional.ofNullable(resourceSearchMock));
when(relationParametersMock.getPagination()).thenReturn(paginationMock);
when(relationParametersMock.getSort()).thenReturn(Optional.ofNullable(sortMock));
collectionParametersMock = relationParametersMock;
reset(resmiDao);
}
@Test
public void findTest() throws BadConfigurationException, InvalidApiParamException {
ResourceUri resourceUri = new ResourceUri(DOMAIN, TYPE);
JsonArray fakeResult = new JsonArray();
when(
resmiDao.findCollection(eq(resourceUri), eq(Optional.of(resourceQueriesMock)), any(), eq(Optional.of(paginationMock)), eq(Optional.of(sortMock)))).thenReturn(fakeResult);
when(collectionParametersMock.getSearch()).thenReturn(Optional.empty());
JsonArray result = defaultResmiService.findCollection(resourceUri, Optional.of(collectionParametersMock));
assertThat(fakeResult).isEqualTo(result);
}
@Test
public void findResourceByIdTest() throws BadConfigurationException {
ResourceUri resourceUri = new ResourceUri(DOMAIN, TYPE, ID);
JsonObject fakeResult = new JsonObject();
when(resmiDao.findResource(eq(resourceUri))).thenReturn(fakeResult);
when(collectionParametersMock.getSearch()).thenReturn(Optional.empty());
JsonObject result = defaultResmiService.findResource(resourceUri);
assertThat(fakeResult).isEqualTo(result);
}
@Test
public void findRelationTest() throws BadConfigurationException, InvalidApiParamException {
JsonElement fakeResult = new JsonObject();
ResourceUri resourceUri = new ResourceUri(DOMAIN, TYPE, ID, RELATION_TYPE, "test");
when(
resmiDao.findRelation(eq(resourceUri), eq(Optional.of(resourceQueriesMock)), any(), eq(Optional.of(paginationMock)), eq(Optional.of(sortMock)))).thenReturn(fakeResult);
when(collectionParametersMock.getSearch()).thenReturn(Optional.empty());
when(relationParametersMock.getPredicateResource()).thenReturn(Optional.of("test"));
JsonElement result = defaultResmiService.findRelation(resourceUri, Optional.of(relationParametersMock));
assertThat(fakeResult).isEqualTo(result);
}
@Test
public void countCollectionTest() throws BadConfigurationException, InvalidApiParamException {
JsonElement fakeResult = new JsonObject();
when(resmiDao.count(eq(new ResourceUri(DOMAIN, TYPE)), eq(resourceQueriesMock))).thenReturn(fakeResult);
when(collectionParametersMock.getAggregation()).thenReturn(Optional.of(new Count("*")));
when(collectionParametersMock.getSearch()).thenReturn(Optional.empty());
JsonElement result = defaultResmiService.aggregate(new ResourceUri(DOMAIN, TYPE), collectionParametersMock);
assertThat(fakeResult).isEqualTo(result);
}
@Test
public void countRelationTest() throws BadConfigurationException, InvalidApiParamException {
JsonElement fakeResult = new JsonObject();
ResourceUri resourceUri = new ResourceUri(DOMAIN, TYPE, ID, RELATION_TYPE);
when(resmiDao.count(eq(resourceUri), eq(resourceQueriesMock))).thenReturn(fakeResult);
when(collectionParametersMock.getAggregation()).thenReturn(Optional.of(new Count("*")));
when(collectionParametersMock.getSearch()).thenReturn(Optional.empty());
JsonElement result = defaultResmiService.aggregate(resourceUri, relationParametersMock);
assertThat(fakeResult).isEqualTo(result);
}
@Test
public void saveResourceTest() throws StartsWithUnderscoreException {
ResourceUri uri = new ResourceUri(DOMAIN, TYPE);
JsonObject fakeResult = new JsonObject();
defaultResmiService.saveResource(uri, fakeResult, Optional.of(USER_ID));
assertThat(fakeResult.get(DefaultResmiService.ID).getAsString()).startsWith(USER_ID);
assertThat(fakeResult.get(_CREATED_AT)).isNotNull();
assertThat(fakeResult.get(_UPDATED_AT)).isNotNull();
verify(resmiDao).saveResource(uri, fakeResult);
}
@Test(expected = StartsWithUnderscoreException.class)
public void saveResourceWithUnderscoreTest() throws StartsWithUnderscoreException {
ResourceUri uri = new ResourceUri(DOMAIN, TYPE);
JsonObject fakeResult = new JsonObject();
fakeResult.add("_test", new JsonPrimitive("123"));
defaultResmiService.saveResource(uri, fakeResult, Optional.of(USER_ID));
}
@Test
public void upsertTest() throws StartsWithUnderscoreException {
String id = "123";
ResourceUri resourceUri = new ResourceUri(DOMAIN, TYPE, id);
JsonObject fakeResult = new JsonObject();
defaultResmiService.updateResource(resourceUri, fakeResult);
assertThat(fakeResult.get(_CREATED_AT)).isNotNull();
assertThat(fakeResult.get(_UPDATED_AT)).isNotNull();
verify(resmiDao).updateResource(resourceUri, fakeResult);
}
@Test
public void upsertWithDatesTest() throws StartsWithUnderscoreException, ParseException {
String id = "123";
ResourceUri resourceUri = new ResourceUri(DOMAIN, TYPE, id);
JsonObject fakeResult = new JsonObject();
fakeResult.addProperty(_CREATED_AT, DATE);
fakeResult.addProperty(_UPDATED_AT, DATE);
defaultResmiService.updateResource(resourceUri, fakeResult);
assertThat(fakeResult.get(_CREATED_AT)).isNotNull();
assertThat(extractMillis(fakeResult.get(_UPDATED_AT).getAsString())).isNotEqualTo(DATE);
verify(resmiDao).updateResource(resourceUri, fakeResult);
}
@Test(expected = StartsWithUnderscoreException.class)
public void upsertWithUnderscoreTest() throws StartsWithUnderscoreException {
String id = "123";
ResourceUri resourceUri = new ResourceUri(DOMAIN, TYPE, id);
JsonObject fakeResult = new JsonObject();
fakeResult.add("_test", new JsonPrimitive("123"));
defaultResmiService.updateResource(resourceUri, fakeResult);
}
@Test
public void createRelationTest() throws NotFoundException, StartsWithUnderscoreException {
String resourceId = "test";
JsonObject jsonObject = new JsonObject();
ResourceUri resourceUri = new ResourceUri(DOMAIN, TYPE, resourceId, RELATION_TYPE, RELATION_URI);
defaultResmiService.upsertRelation(resourceUri, jsonObject);
assertThat(jsonObject.get(_CREATED_AT)).isNotNull();
assertThat(jsonObject.get(_UPDATED_AT)).isNotNull();
verify(resmiDao).upsertRelation(resourceUri, jsonObject);
}
private long extractMillis(String date) throws ParseException {
date = date.replace("ISODate(", "").replace(")", "").replace("T", " ").replace("Z", "");
return formatter.parse(date).getTime();
}
@Test(expected = StartsWithUnderscoreException.class)
public void createRelationWithUnderscoreTest() throws NotFoundException, StartsWithUnderscoreException {
String resourceId = "test";
JsonObject jsonObject = new JsonObject();
jsonObject.add("_test", new JsonPrimitive("123"));
ResourceUri resourceUri = new ResourceUri(DOMAIN, TYPE, resourceId, RELATION_TYPE, RELATION_URI);
defaultResmiService.upsertRelation(resourceUri, jsonObject);
}
@Test
public void moveElementTest() throws NotFoundException {
ResourceUri resourceUri = new ResourceUri(DOMAIN, TYPE, ID, RELATION_TYPE, RELATION_URI);
RelationMoveOperation relationMoveOperation = new RelationMoveOperation(1);
defaultResmiService.moveRelation(resourceUri, relationMoveOperation);
verify(resmiDao).moveRelation(resourceUri, relationMoveOperation);
}
@Test
public void deleteResourceByIdTest() throws NotFoundException {
ResourceUri uri = new ResourceUri(DOMAIN, TYPE, ID);
defaultResmiService.deleteResource(uri);
verify(resmiDao).deleteResource(uri);
}
@Test
public void deleteRelationTest() throws NotFoundException {
ResourceUri uri = new ResourceUri(DOMAIN, TYPE, ID, RELATION_TYPE, "dst");
defaultResmiService.deleteRelation(uri, Optional.empty());
verify(resmiDao).deleteRelation(uri, Optional.empty());
}
@Test
public void ensureCollectionIndexTest() throws NotFoundException {
Index index = mock(Index.class);
ResourceUri resourceUri = new ResourceUri(DOMAIN, TYPE);
defaultResmiService.ensureIndex(resourceUri, index);
verify(resmiDao).ensureIndex(resourceUri, index);
}
@Test
public void ensureRelationIndexTest() throws NotFoundException {
Index index = mock(Index.class);
ResourceUri resourceUri = new ResourceUri(DOMAIN, TYPE).setRelation(RELATION_TYPE);
defaultResmiService.ensureIndex(resourceUri, index);
verify(resmiDao).ensureIndex(resourceUri, index);
}
@Test
public void averageTest() throws BadConfigurationException, InvalidApiParamException {
JsonElement fakeResult = new JsonObject();
ResourceUri resourceUri = new ResourceUri(DOMAIN, TYPE);
when(resmiDao.average(eq(resourceUri), eq(resourceQueriesMock), eq("testField"))).thenReturn(fakeResult);
when(collectionParametersMock.getAggregation()).thenReturn(Optional.of(new Average("testField")));
when(collectionParametersMock.getSearch()).thenReturn(Optional.empty());
JsonElement result = defaultResmiService.aggregate(resourceUri, collectionParametersMock);
assertThat(result).isEqualTo(fakeResult);
}
@Test
public void maxTest() throws BadConfigurationException, InvalidApiParamException {
JsonElement fakeResult = new JsonObject();
ResourceUri resourceUri = new ResourceUri(DOMAIN, TYPE);
when(resmiDao.max(eq(resourceUri), eq(resourceQueriesMock), eq("testField"))).thenReturn(fakeResult);
when(collectionParametersMock.getAggregation()).thenReturn(Optional.of(new Max("testField")));
when(collectionParametersMock.getSearch()).thenReturn(Optional.empty());
JsonElement result = defaultResmiService.aggregate(resourceUri, collectionParametersMock);
assertThat(result).isEqualTo(fakeResult);
}
@Test
public void minTest() throws BadConfigurationException, InvalidApiParamException {
JsonElement fakeResult = new JsonObject();
ResourceUri resourceUri = new ResourceUri(DOMAIN, TYPE);
when(resmiDao.min(eq(resourceUri), eq(resourceQueriesMock), eq("testField"))).thenReturn(fakeResult);
when(collectionParametersMock.getAggregation()).thenReturn(Optional.of(new Min("testField")));
when(collectionParametersMock.getSearch()).thenReturn(Optional.empty());
JsonElement result = defaultResmiService.aggregate(resourceUri, collectionParametersMock);
assertThat(result).isEqualTo(fakeResult);
}
} |
/**
* Returns all client software manifests that are broken and cannot be read.
*/
public Collection<Manifest.Key> listBroken() {
Collection<Manifest.Key> result = new ArrayList<>();
Set<Key> keys = hive.execute(new ManifestListOperation().setManifestName(MANIFEST_PREFIX));
for (Key key : keys) {
ClientSoftwareConfiguration software = read(key);
if (software == null) {
result.add(key);
}
}
return result;
} |
/**
* Initializes {@code budgetListPanelHandle} with a {@code BudgetListPanel} backed by {@code backingList}.
* Also shows the {@code Stage} that displays only {@code BudgetListPanel}.
*/
private void initUi(ObservableList<Budget> backingList) {
BudgetListPanel budgetListPanel =
new BudgetListPanel(backingList, selectedBudget, selectedBudget::set);
uiPartRule.setUiPart(budgetListPanel);
budgetListPanelHandle = new BudgetListPanelHandle(getChildNode(budgetListPanel.getRoot(),
BudgetListPanelHandle.BUDGET_LIST_VIEW_ID));
} |
def _read_rules(silent=False):
global _VOWEL_SET, _CONSONANT_SET
if silent == False:
print("Reading phonological rule data ...")
fin = pathlib.PurePath(__file__).parent / _DEF_DATA / _DEF_RULES
config = configparser.ConfigParser(allow_no_value=True)
if pathlib.Path(fin).exists() == False:
if silent == False:
print("Rule data not found. Ignoring phonological rules.")
config["group"] = {}
for b in _BLOCKS:
config[b] = {}
return config
config.read(fin)
if "group" not in config:
config["group"] = {}
for b in _BLOCKS:
if b not in config:
config[b] = {}
_VOWEL_SET = list(_VOWELS)
_CONSONANT_SET = list(_CONSONANTS)
for g in config["group"]:
vow = True
con = True
for c in g:
if c not in _VOWELS:
vow = False
if c not in _CONSONANTS:
con = False
if vow == True:
_VOWEL_SET.append(g)
if con == True:
_CONSONANT_SET.append(g)
_VOWEL_SET = tuple(_VOWEL_SET)
_CONSONANT_SET = tuple(_CONSONANT_SET)
if silent == False:
print("Phonological rules loaded!")
return config |
/**
* Created by nitinagarwal on 3/12/17.
*/
public class ListFragmentViewImpl implements ListFragmentView {
View mFragemntVideoListView;
VideoListAdapter mVideoListAdapter;
ObservableListView mListView;
public ListFragmentViewImpl(Context context, ViewGroup container, LayoutInflater inflater)
{
mFragemntVideoListView = inflater.inflate(R.layout.tab_videolist, container, false);
mVideoListAdapter = new VideoListAdapter(context, R.layout.tab_child);
mListView = (ObservableListView) mFragemntVideoListView.findViewById(R.id.ListView);
mListView.setAdapter(mVideoListAdapter);
}
@Override
public ObservableListView getListView() {
return mListView;
}
@Override
public void bindVideoList(List<String> videos, VideoListInfo videoListInfo)
{
mVideoListAdapter.bindVideoList(videos, videoListInfo);
mVideoListAdapter.notifyDataSetChanged();
}
@Override
public View getRootView() {
return mFragemntVideoListView;
}
@Override
public Bundle getViewState() {
return null;
}
} |
<filename>packages/core/src/List/ListItem.tsx
import * as React from "react";
import styled from "styled-components";
import Typo from "../Typo";
import Box from "../Box";
import Image, { ImageRatio } from "../Image";
import { getListItemStyle } from "./getStyled";
import { GlobProps } from "../common/props";
const Container = styled.li`
${getListItemStyle}
`;
const ItemMeta = styled.div`
display: flex;
flex: 1;
align-self: flex-start;
padding: 0.5rem 0;
`;
const ActionGroup = styled.div`
align-items: center;
margin: 0.4rem;
display: inline-flex;
> * {
&:not(:last-child) {
margin-right: 0.2rem;
}
}
`;
const Thumbnail = styled.div`
width: 3rem;
height: 3rem;
position: relative;
`;
export interface ListItemProps extends GlobProps {
key?: string | number;
thumbnail?: string;
title?: string | React.ReactNode;
description?: string | React.ReactNode;
actions?: React.ReactNode;
hasDivider?: boolean;
children?: React.ReactNode;
}
export const ListItem: React.FC<ListItemProps> = ({
children,
thumbnail,
title,
description,
actions,
hasDivider = false,
}) => {
return (
<Container hasDivider={hasDivider}>
<ItemMeta>
{thumbnail && (
<Thumbnail>
<ImageRatio paddingBottom={100} minWidth={36}>
<Image data-src={thumbnail} />
</ImageRatio>
</Thumbnail>
)}
<Box alignItems="center" m="0 0.8rem">
{title && (
<Typo tag="h4" fontWeight={600} mb={0}>
{title}
</Typo>
)}
{description && <div>{description}</div>}
</Box>
</ItemMeta>
{children}
{actions && <ActionGroup>{actions}</ActionGroup>}
</Container>
);
};
|
<reponame>mugambocoin/mugambo-foundation
//+build gofuzz
package gossip
import (
"bytes"
"sync"
_ "github.com/dvyukov/go-fuzz/go-fuzz-defs"
"github.com/ethereum/go-ethereum/p2p"
"github.com/ethereum/go-ethereum/p2p/enode"
"github.com/mugambocoin/mugambo-foundation/evmcore"
"github.com/mugambocoin/mugambo-foundation/integration/makegenesis"
"github.com/mugambocoin/mugambo-foundation/inter"
"github.com/mugambocoin/mugambo-foundation/utils"
)
const (
fuzzHot int = 1 // if the fuzzer should increase priority of the given input during subsequent fuzzing;
fuzzCold int = -1 // if the input must not be added to corpus even if gives new coverage;
fuzzNoMatter int = 0 // otherwise.
)
var (
fuzzedPM *ProtocolManager
)
func FuzzPM(data []byte) int {
var err error
if fuzzedPM == nil {
fuzzedPM, err = makeFuzzedPM()
if err != nil {
panic(err)
}
}
msg, err := newFuzzMsg(data)
if err != nil {
return fuzzCold
}
peer := p2p.NewPeer(enode.RandomID(enode.ID{}, 1), "fake-node-1", []p2p.Cap{})
input := &fuzzMsgReadWriter{msg}
other := fuzzedPM.newPeer(ProtocolVersion, peer, input)
err = fuzzedPM.handleMsg(other)
if err != nil {
return fuzzNoMatter
}
return fuzzHot
}
func makeFuzzedPM() (pm *ProtocolManager, err error) {
const (
genesisStakers = 3
genesisBalance = 1e18
genesisStake = 2 * 4e6
)
genStore := makegenesis.FakeGenesisStore(genesisStakers, utils.ToMgb(genesisBalance), utils.ToMgb(genesisStake))
genesis := genStore.GetGenesis()
config := DefaultConfig()
store := NewMemStore()
blockProc := DefaultBlockProc(genesis)
_, err = store.ApplyGenesis(blockProc, genesis)
if err != nil {
return
}
var (
network = genesis.Rules
heavyCheckReader HeavyCheckReader
gasPowerCheckReader GasPowerCheckReader
// TODO: init
)
mu := new(sync.RWMutex)
feed := new(ServiceFeed)
checkers := makeCheckers(config.HeavyCheck, network.EvmChainConfig().ChainID, &heavyCheckReader, &gasPowerCheckReader, store)
processEvent := func(e *inter.EventPayload) error {
return nil
}
txpool := evmcore.NewTxPool(config.TxPool, network.EvmChainConfig(), &EvmStateReader{
ServiceFeed: feed,
store: store,
})
pm, err = NewProtocolManager(
config,
feed,
txpool,
mu,
checkers,
store,
processEvent,
nil)
if err != nil {
return
}
pm.Start(3)
return
}
type fuzzMsgReadWriter struct {
msg *p2p.Msg
}
func newFuzzMsg(data []byte) (*p2p.Msg, error) {
if len(data) < 1 {
return nil, ErrEmptyMessage
}
var (
codes = []uint64{
HandshakeMsg,
EvmTxsMsg,
ProgressMsg,
NewEventIDsMsg,
GetEventsMsg,
EventsMsg,
RequestEventsStream,
EventsStreamResponse,
}
code = codes[int(data[0])%len(codes)]
)
data = data[1:]
return &p2p.Msg{
Code: code,
Size: uint32(len(data)),
Payload: bytes.NewReader(data),
}, nil
}
func (rw *fuzzMsgReadWriter) ReadMsg() (p2p.Msg, error) {
return *rw.msg, nil
}
func (rw *fuzzMsgReadWriter) WriteMsg(p2p.Msg) error {
return nil
}
|
<gh_stars>1-10
#![cfg_attr(feature = "cargo-clippy", allow(clippy::match_wild_err_arm))]
/// Read list of domain names from the command line or a file
extern crate clap;
extern crate futures;
use clap::{App, Arg};
use futures::{stream, StreamExt};
use reqwest::Client;
use std::io::{self, BufRead};
use std::time::Duration;
/// A port number for the probe
type Port = u16;
/// The two possible protocols for a probe
#[derive(Debug, Copy, Clone, PartialEq)]
enum Protocol {
Http,
Https,
}
/// A probe is composed of a probe and a protocol.
#[derive(Debug, Copy, Clone, PartialEq)]
struct Probe {
protocol: Protocol,
port: Port,
}
impl Probe {
/// Create a new probe from a protocol and port
fn new(protocol: Protocol, port: Port) -> Probe {
Probe { protocol, port }
}
/// Create a new http probe for the port.
fn new_http(port: Port) -> Probe {
Probe::new(Protocol::Http, port)
}
/// Create a new https probe for the port.
fn new_https(port: Port) -> Probe {
Probe::new(Protocol::Https, port)
}
/// Returns true if the port is the default for the protocol.
fn is_default_port(&self) -> bool {
match self {
Probe {
protocol: Protocol::Http,
port: 80,
} => true,
Probe {
protocol: Protocol::Https,
port: 443,
} => true,
_ => false,
}
}
}
#[tokio::main]
async fn main() {
let defatul_probes: Vec<Probe> = vec![Probe::new_http(80), Probe::new_https(443)];
let app = App::new("hprobe")
.version("0.1")
.about("A fast http probe")
.arg(
Arg::with_name("probes")
.short("p")
.long("probe")
.value_name("PROBE")
.help("Protocol port pair <http|https>:<port>")
.takes_value(true)
.multiple(true)
.required(false),
)
.arg(
Arg::with_name("suppress_default")
.short("s")
.long("Suppress-default")
.value_name("SUPPRESS")
.help("do not process the default http and https ports")
.takes_value(false)
.required(false),
)
.arg(
Arg::with_name("timeout")
.short("t")
.long("timeout")
.value_name("TIMEOUT")
.help("The timeout for the connect phase (ms)")
.takes_value(true)
.required(false)
.default_value("1000"),
)
.arg(
Arg::with_name("concurrency")
.short("c")
.long("concurrency")
.value_name("CONCURRENCY")
.help("The number of concurrent requests")
.takes_value(true)
.required(false)
.default_value("20"),
)
.arg(
Arg::with_name("proxy_all")
.long("proxy-all")
.value_name("PROXY_ALL")
.help("The url of the proxy to for all requests.")
.takes_value(true)
.required(false),
)
.arg(
Arg::with_name("proxy_http")
.long("proxy-http")
.value_name("PROXY_HTTP")
.help("The url of the proxy to for http requests.")
.takes_value(true)
.conflicts_with("proxy_all")
.required(false),
)
.arg(
Arg::with_name("proxy_https")
.long("proxy-https")
.value_name("PROXY_HTTPS")
.help("The url of the proxy to for https requests.")
.takes_value(true)
.conflicts_with("proxy_all")
.required(false),
)
.arg(
Arg::with_name("accept_invalid_certs")
.short("k")
.long("insecure")
.help("Accept invalid certificates.")
.takes_value(false)
.required(false),
)
.arg(
Arg::with_name("user_agent")
.short("u")
.long("user-agent")
.required(false)
.help("Set the requests USER-AGENT header")
.takes_value(true)
.required(false),
)
.arg(
Arg::with_name("async-dns")
.hidden(!cfg!(feature = "async-dns"))
.short("a")
.long("async-dns")
.required(false)
.help("Use asynchronous DNS resolution")
.takes_value(false)
.required(false),
);
let command = app.get_matches();
let probe_args: Option<Vec<_>> = command.values_of("probes").map(|x| x.collect());
let run_default = !command.is_present("suppress_default");
let timeout = command.value_of("timeout").unwrap();
let concurrency = command.value_of("concurrency").unwrap();
let concurrency_amount = match concurrency.parse::<usize>() {
Ok(c) => c,
Err(_) => panic!(
"-c --concurrency parameter was not a integer: {}",
concurrency
),
};
let (mut probes, errors) = match probe_args {
Some(p) => parse_probes(p),
None => (vec![], vec![]),
};
if !errors.is_empty() {
panic!("Invalid Probe arguments -p {:?}", errors);
}
let timeout_duration = match timeout.parse::<u64>().map(Duration::from_millis) {
Ok(t) => t,
Err(_) => panic!("-t --timeout parameter was not a number: {}, ", timeout),
};
if run_default {
probes.extend_from_slice(&defatul_probes)
}
let mut client_builder = Client::builder().connect_timeout(timeout_duration);
if let Some(url) = command.value_of("proxy_all") {
match reqwest::Proxy::all(url) {
Ok(proxy) => client_builder = client_builder.proxy(proxy),
Err(_) => panic!("Error parsing --proxy-all: {}", url),
}
};
if let Some(url) = command.value_of("proxy_http") {
match reqwest::Proxy::http(url) {
Ok(proxy) => client_builder = client_builder.proxy(proxy),
Err(_) => panic!("Error parsing --proxy-http: {}", url),
}
};
if let Some(url) = command.value_of("proxy_https") {
match reqwest::Proxy::https(url) {
Ok(proxy) => client_builder = client_builder.proxy(proxy),
Err(_) => panic!("Error parsing -proxy-https: {}", url),
}
};
if let Some(user_agent) = command.value_of("user_agent") {
client_builder = client_builder.user_agent(user_agent)
};
client_builder =
client_builder.danger_accept_invalid_certs(command.is_present("accept_invalid_certs"));
#[cfg(feature = "async-dns")]
{
client_builder = client_builder.trust_dns(command.is_present("async-dns"));
}
let client = client_builder.build().unwrap();
let stdin = io::stdin();
stream::iter(stdin.lock().lines())
.flat_map(|line| {
let line = line.unwrap();
stream::iter(&probes).map(move |probe| probe_to_url(&line, probe))
})
.map(|line| {
let client = &client;
async move { client.get(&line).send().await.map(|r| (line, r)) }
})
.buffer_unordered(concurrency_amount)
.for_each(|b| async {
match b {
Ok((r, _res)) => println!("{}", r),
Err(e) => eprintln!("Got an error: {}", e),
}
})
.await;
}
/// Format the host and probe into a URL, dropping the
/// port number if it is the default for th protocol.
fn probe_to_url(host: &str, probe: &Probe) -> String {
match probe.protocol {
Protocol::Http if probe.is_default_port() => format!("http://{}", host),
Protocol::Http => format!("http://{}:{}", host, probe.port),
Protocol::Https if probe.is_default_port() => format!("https://{}", host),
Protocol::Https => format!("https://{}:{}", host, probe.port),
}
}
/// Parse the probes passed from the command line
/// the format is `<protocol>:<port>` where protocol can be http or https,
/// and port can be a number between 0 and 65535.
fn parse_probes(probes: Vec<&str>) -> (Vec<Probe>, Vec<String>) {
let (probes, errors): (Vec<_>, Vec<_>) = probes
.iter()
.map(|p| {
let parts: Vec<&str> = p.split(':').collect();
if parts.len() == 2 {
match parts[1].parse::<u16>() {
Ok(port) if parts[0] == "http" => Ok(Probe::new_http(port)),
Ok(port) if parts[0] == "https" => Ok(Probe::new_https(port)),
_ => Err(format!("Error parsing probe: {}", p)),
}
} else {
Err(format!("Error parsing probe: {}", p))
}
})
.partition(Result::is_ok);
let probes: Vec<_> = probes.into_iter().map(Result::unwrap).collect();
let errors: Vec<_> = errors.into_iter().map(Result::unwrap_err).collect();
(probes, errors)
}
#[cfg(test)]
mod tests {
// Note this useful idiom: importing names from outer (for mod tests) scope.
use super::*;
#[test]
fn test_probe_to_url_default_http() {
assert_eq!(
probe_to_url("demo.com", &Probe::new_http(80)),
"http://demo.com"
);
}
#[test]
fn test_probe_to_url_default_https() {
assert_eq!(
probe_to_url("demo.com", &Probe::new_https(443)),
"https://demo.com"
);
}
#[test]
fn test_parse_probe_valid_http() {
assert_eq!(
parse_probes(vec!["http:8080"]),
(vec![Probe::new_http(8080)], vec![])
);
}
#[test]
fn test_parse_probe_valid_https() {
assert_eq!(
parse_probes(vec!["https:8080"]),
(vec![Probe::new_https(8080)], vec![])
);
}
#[test]
fn test_parse_probe_invalid_port() {
assert_eq!(
parse_probes(vec!["https:65536"]),
(
vec![],
vec![String::from("Error parsing probe: https:65536")]
)
);
}
#[test]
fn test_parse_probe_invalid_protocol() {
assert_eq!(
parse_probes(vec!["ftp:21"]),
(vec![], vec![String::from("Error parsing probe: ftp:21")])
);
}
}
|
def make_user_count_table(results):
symptom_table = []
total_very_active_n_obs = 0
total_very_active_ids = set()
total_overall_n_obs = 0
total_overall_user_ids = set()
for k in results.keys():
very_active_logger_data = results[k]['no_substratification']
non_very_active_logger_data = results[k]['ALL_OMITTED_USERS_ROBUSTNESS_CHECK_ONLY'][True]
pretty_feature_name = CANONICAL_PRETTY_SYMPTOM_NAMES[k].replace('\n', ' ')
feature_type = determine_mood_behavior_or_vital_sign(k)
very_active_n_obs = 1.0*very_active_logger_data['overall_n_obs']
non_active_n_obs = 1.0*non_very_active_logger_data['overall_n_obs']
overall_n_obs = very_active_n_obs + non_active_n_obs
very_active_user_ids = very_active_logger_data['unique_user_ids']
non_active_user_ids = non_very_active_logger_data['unique_user_ids']
assert len(very_active_user_ids.intersection(non_active_user_ids)) == 0
overall_user_ids = very_active_user_ids.union(non_active_user_ids)
very_active_mu = very_active_logger_data['overall_positive_frac']
overall_mu = (very_active_logger_data['overall_positive_frac'] * very_active_n_obs +
non_very_active_logger_data['overall_positive_frac'] * non_active_n_obs) / overall_n_obs
if feature_type == 'Vital sign':
very_active_mu = '%2.1f' % very_active_mu
overall_mu = '%2.1f' % overall_mu
else:
very_active_mu = '%2.1f%%' % (very_active_mu * 100.)
overall_mu = '%2.1f%%' % (overall_mu * 100.)
total_very_active_n_obs += very_active_n_obs
total_overall_n_obs += overall_n_obs
total_very_active_ids = total_very_active_ids.union(very_active_user_ids)
total_overall_user_ids = total_overall_user_ids.union(overall_user_ids)
symptom_table.append({'Dimension':pretty_feature_name,
'Category':feature_type,
'N Obs (LTLs)':"{:,.0f}".format(very_active_n_obs),
'unformatted_n_obs':very_active_n_obs,
'N Users (LTLs)':"{:,.0f}".format(len(very_active_user_ids)),
'Mean value (LTLs)':very_active_mu,
'N Obs (overall)':"{:,.0f}".format(overall_n_obs),
'N Users (overall)':"{:,.0f}".format(len(overall_user_ids)),
'Mean value (overall)':overall_mu})
symptom_table.append({'Dimension':'-',
'Category':'All combined',
'N Obs (LTLs)':"{:,.0f}".format(total_very_active_n_obs),
'unformatted_n_obs':total_very_active_n_obs,
'N Users (LTLs)':"{:,.0f}".format(len(total_very_active_ids)),
'Mean value (LTLs)':'-',
'N Obs (overall)':"{:,.0f}".format(total_overall_n_obs),
'N Users (overall)':"{:,.0f}".format(len(total_overall_user_ids)),
'Mean value (overall)':'-'
})
symptom_table = pd.DataFrame(symptom_table)
symptom_table = (symptom_table.sort_values(by=['Category', 'unformatted_n_obs'])[::-1]
[['Category',
'Dimension',
'Mean value (LTLs)',
'Mean value (overall)',
'N Obs (LTLs)',
'N Obs (overall)',
'N Users (LTLs)',
'N Users (overall)']])
print symptom_table.to_latex(index=False).replace('\\\\', '\\\\ \\hline') |
def process_news(keyword, start, end, force_refresh=False, cache_time=CACHE_TIME):
if type(start) == str:
start = datetime.strptime(start, "%Y-%m-%dT%H:%M:%S")
if type(end) == str:
end = datetime.strptime(end, "%Y-%m-%dT%H:%M:%S")
items = newsdb.find({"keywords": keyword, "pubdate": {"$gte": start, "$lt": end}},
projection={"title": True, "pubdate": True, "description": True, "flag": True,
"source": True, "link": True, "nid": True, "_id": False})
news = sorted([it for it in items], key=lambda x: x["pubdate"], reverse=True)
return news |
.
A survey was conducted in Hungary on the subject of sex predetermination among the teaching profession. 1500 subjects with no children were asked to answer a questionnaire. Almost 70% of the subjects questioned desired boys. The majority of people wanted 2 children. Among the people who wanted only 1 child, 65% preferred having a boy. Among the people who wanted 2 children, 56.5% wanted boys. Finally among the people who wanted 3 children, 60.5% wanted boys. In this group, boys were usually preferred particularly by the women rather than the men.
|
from .nwbconverter import NWBConverter
from .datainterfaces import *
from .tools import spikeinterface, roiextractors, neo
from .tools.yaml_conversion_specification import run_conversion_from_yaml
|
#include <cstdio>
#include <cstring>
#include <algorithm>
#include <iostream>
using namespace std;
const int N = 200000;
int a[N];
int c[N];
int s[N];
int getSum(int x) {
int res = 0;
while(x > 0) {
res += s[x];
x -= x&(-x);
}
return res;
}
void update(int x) {
while(x < N) {
s[x] ++;
x += x&(-x);
}
}
int main() {
int n, mx = 0;
cin >> n;
for(int i = 1; i <= n; ++ i) {
int v;
cin >> v;
int x = getSum(v);
mx = max(mx, v);
if(x == i-2) {
c[mx] ++;
}
if(x == i-1) {
c[v] --;
}
update(v);
}
int res = 1;
for(int i = 1; i <= n; ++ i) {
if(c[i] > c[res]) {
res = i;
}
}
cout << res << endl;
return 0;
}
|
import pytest
import os
import subprocess
import glob
from conftest import *
import common_libs.utilities as ut
import mysql.connector
@ithemal
class TestStats:
def test_getbenchmarks(self):
script = os.environ['ITHEMAL_HOME'] + '/learning/pytorch/stats/getbenchmarks.py'
database = '--database=testIthemal'
config = '--config=test_data/db_config.cfg'
arch = '--arch=1'
args = ['python',script, database, config, arch]
proc = subprocess.Popen(args,stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False)
stdout, stderr = proc.communicate()
success = False
for line in stdout.split('\n'):
if line == 'Total 44 2934 0':
success = True
assert success
|
def all_glyphs(self, ignore=['invisible']):
_all_glyphs = []
self.import_encoding()
for group in self.libs['groups']['order']:
if group not in ignore:
_all_glyphs += self.libs['groups']['glyphs'][group]
return _all_glyphs |
In vitro suppression of drug‐induced methaemoglobin formation by Intralipid® in whole human blood: observations relevant to the ‘lipid sink theory’ *
To provide further evidence for the lipid sink theory, we have developed an in vitro model to assess the effect of Intralipid® 20% on methaemoglobin formation by drugs of varying lipid solubility. Progressively increasing Intralipid concentrations from 4 to 24 mg.ml−1 suppressed methaemoglobin formation by the lipid soluble drug glyceryl trinitrate in a dose‐dependent manner (p < 0.001). Both dose and timing of administration of Intralipid to blood previously incubated with glyceryl trinitrate for 10 and 40 min resulted in significant suppression of methaemoglobin formation (p < 0.0001 and p < 0.05, respectively). Mathematical modelling demonstrated that the entire process of methaemoglobin formation by glyceryl trinitrate was slowed down in the presence of Intralipid. Intralipid did not significantly suppress methaemoglobin formation induced by 2‐amino‐5‐hydroxytoluene (partially lipid soluble) or sodium nitrite (lipid insoluble; both p > 0.5). This work may assist determination of the suitability of drugs taken in overdose for which Intralipid might be deployed. |
Drainage of forests in Finland
It is said that the forest industry is the backbone of the national economy in Finland. This was in particular the case in the twentieth century. That is also why the drainage of forests has played an important role in the national forestry policy. The most intensive period of forest drainage started at the beginning of the 1960s and lasted for 20 years. During that period the annual drained area was on average 200 000 ha. This article presents a short overview of the history of forest drainage in Finland in its heyday and also illustrates some current water conservation measures in the drainage of forested areas. Copyright © 2007 John Wiley & Sons, Ltd. |
/**
* (Solaris) platform specific handling for file: URLs .
* urls must not contain a hostname in the authority field
* other than "localhost".
*
* This implementation could be updated to map such URLs
* on to /net/host/...
*
* @author Michael McMahon
*/
public class FileURLMapper {
URL url;
String path;
public FileURLMapper (URL url) {
this.url = url;
}
/**
* @returns the platform specific path corresponding to the URL
* so long as the URL does not contain a hostname in the authority field.
*/
public String getPath () {
if (path != null) {
return path;
}
String host = url.getHost();
if (host == null || "".equals(host) || "localhost".equalsIgnoreCase (host)) {
path = url.getFile();
path = ParseUtil.decode (path);
}
return path;
}
/**
* Checks whether the file identified by the URL exists.
*/
public boolean exists () {
String s = getPath ();
if (s == null) {
return false;
} else {
File f = new File (s);
return f.exists();
}
}
} |
/* Copyright 2019 The MLPerf Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
/// \file
/// \brief A C API wrapping the C++ loadgen. Not tested. Needs work.
/// \details The C API allows a C or Python client to easily create
/// a SystemUnderTest without having to expose the SystemUnderTest class
/// directly.
/// ConstructSUT works with a bunch of function poitners instead that are
/// called from an underlying trampoline class.
#ifndef SYSTEM_UNDER_TEST_C_API_H_
#define SYSTEM_UNDER_TEST_C_API_H_
#include <stddef.h>
#include <stdint.h>
#include "../query_sample.h"
#include "../test_settings.h"
namespace mlperf {
namespace c {
/// \brief Optional opaque client data that creators of SUTs and QSLs can have
/// the loadgen pass back to their callback invocations.
/// Helps avoids global variables.
typedef uintptr_t ClientData;
typedef void (*IssueQueryCallback)(ClientData, const QuerySample*, size_t);
typedef void (*FlushQueriesCallback)();
typedef void (*ReportLatencyResultsCallback)(ClientData, const int64_t*,
size_t);
typedef void (*ResponseCallback)(ClientData, QuerySampleResponse*);
/// \brief SUT calls this function to report query result back to loadgen
void QuerySamplesComplete(QuerySampleResponse* responses,
size_t response_count);
void QuerySamplesCompleteResponseCb(QuerySampleResponse* responses,
size_t response_count,
ResponseCallback response_cb,
ClientData client_data);
/// \brief Create an opaque SUT pointer based on C callbacks.
void* ConstructSUT(ClientData client_data, const char* name, size_t name_length,
IssueQueryCallback issue_cb,
FlushQueriesCallback flush_queries_cb,
ReportLatencyResultsCallback report_latency_results_cb);
/// \brief Destroys the SUT created by ConstructSUT.
void DestroySUT(void* sut);
typedef void (*LoadSamplesToRamCallback)(ClientData, const QuerySampleIndex*,
size_t);
typedef void (*UnloadSamplesFromRamCallback)(ClientData,
const QuerySampleIndex*, size_t);
/// \brief Create an opaque QSL pointer based on C callbacks.
void* ConstructQSL(ClientData client_data, const char* name, size_t name_length,
size_t total_sample_count, size_t performance_sample_count,
LoadSamplesToRamCallback load_samples_to_ram_cb,
UnloadSamplesFromRamCallback unload_samples_from_ram_cb);
/// \brief Destroys the QSL created by ConsructQSL.
void DestroyQSL(void* qsl);
/// \brief Run tests on a SUT created by ConstructSUT().
/// \details This is the C entry point. See mlperf::StartTest for the C++ entry
/// point.
void StartTest(void* sut, void* qsl, const TestSettings& settings);
///
/// \brief Register a thread for query issuing in Server scenario.
/// \details This is the C entry point. See mlperf::RegisterIssueQueryThread for the C++ entry
/// point.
///
void RegisterIssueQueryThread();
} // namespace c
} // namespace mlperf
#endif // SYSTEM_UNDER_TEST_C_API_H_
|
<filename>ansi/ansi.go
package ansi
//All ANSI excae codes to incorporate colors in the terminal
var RESET string = "\u001B[0m"
var WHITE string = "\u001B[37m"
var RED string = "\u001B[31m"
var YELLOW string = "\u001B[33m"
var GREEN string = "\u001B[32m"
var BLUE string = "\u001B[34m"
var CYAN string = "\u001B[36m"
var PURPLE string = "\u001B[35m"
|
<gh_stars>0
package platform
import (
"errors"
"log"
"sync"
"github.com/spf13/viper"
"go.uber.org/zap"
)
var (
internalConfig *config
mutex sync.Mutex
ErrInvalidConfigFilePath = errors.New("Invalid config file path for settings platform.log.logfilepath")
)
func writePlatformConfiguration(conf config) error {
viper.Set("platform", conf)
err := viper.WriteConfig()
if err != nil {
log.Println("Error writing config: ", err.Error())
return err
}
return nil
}
func getPlatformConfiguration() (*config, error) {
if internalConfig == nil {
mutex.Lock()
if internalConfig == nil {
viper.SetConfigType("yml")
viper.AddConfigPath(".")
viper.SetConfigName("config")
err := viper.ReadInConfig()
if err != nil {
log.Println("Unable to read config file: ", err.Error())
return internalConfig, err
}
err = viper.UnmarshalKey("platform", &internalConfig)
if err != nil {
log.Println("Error reading config: ", err.Error())
return internalConfig, err
}
}
err := internalConfig.checkPlatformConfiguration()
if err != nil {
log.Println("Config file incorrect: ", err.Error())
return internalConfig, err
}
mutex.Unlock()
}
return internalConfig, nil
}
func GetComponentConfiguration(key string, object interface{}) error {
err := viper.UnmarshalKey(key, &object)
if err != nil {
Logger.Error("Unable to read component configuration", zap.String("configkey", key), zap.Error(err))
return err
}
return nil
}
// Config ... Platform configuration
type config struct {
Log struct {
Level string
FilePath string
//MegaBytes
MaxSize int
MaxBackups int
// Days
MaxAge int
}
HTTP struct {
Server struct {
ListeningAddress string
TLSCertFileName string
TLSKeyFileName string
TLSEnabled bool
AllowCorsForLocalDevelopment bool
}
Clients []httpClientConfig
}
Auth struct {
Server struct {
OAuth struct {
Enabled bool
IdpWellKnownURL string
ClientID string
AllowedAlgorithms []string
}
Basic struct {
Enabled bool
AllowedUsers map[string]string
}
}
Client struct {
OAuth struct {
OwnTokens []ownTokenConfig
}
}
}
Component struct {
ComponentName string
}
Database struct {
BoltDB struct {
Enabled bool
FileName string
}
}
Vault struct {
Enabled bool
AddressList []string
IsLocalAgent bool
InsecureSkipVerify bool
CaCert string
TimeoutSeconds int64
MaxRetries int
Token struct {
Enabled bool
TokenPath string
Token string
}
Cert struct {
Enabled bool
CertFile string
KeyFile string
}
}
}
// HTTPClientConfig ... For HTTP client configuration
type httpClientConfig struct {
ID string
TLSVerify bool
MaxIdleConnections int
RequestTimeout int
}
// OwnTokenConfig ... Will need to secure the credentials in the future
type ownTokenConfig struct {
ID string
IdpWellKnownURL string
ClientID string
ClientSecret string
Username string
Password string
}
func (conf *config) checkPlatformConfiguration() error {
if len(conf.Log.FilePath) < 1 {
log.Println("Configuration Log.FiePath is empty. Defaulting to ./default.log")
conf.Log.FilePath = "./default.log"
}
if conf.Log.MaxAge == 0 {
log.Println("Configuration Log.MaxAge is empty. Defaulting to 10")
conf.Log.MaxAge = 10
}
if conf.Log.MaxSize == 0 {
log.Println("Configuration Log.MaxSize is empty. Defaulting to 51200")
conf.Log.MaxSize = 51200
}
return nil
}
|
/**
* Persists a given SimpleXML-annotated object into a {@link OutputStream}.
*
* @param object a SimpleXML-annotated object
* @param stream a stream containing object serialized in XML
*/
public static void persistInStream(Object object, OutputStream stream) {
Serializer serializer = new Persister(new AnnotationStrategy());
try {
serializer.write(object, stream);
} catch (Exception e) {
e.printStackTrace();
}
if (StudioConfig.getInstance().printStreamContents()) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
try {
serializer.write(object, outputStream);
} catch (Exception e) {
e.printStackTrace();
}
System.out.println(outputStream.toString());
}
} |
def reencode_images(
sample_collection,
ext=".png",
force_reencode=True,
delete_originals=False,
num_workers=None,
skip_failures=False,
):
fov.validate_image_collection(sample_collection)
_transform_images(
sample_collection,
ext=ext,
force_reencode=force_reencode,
delete_originals=delete_originals,
num_workers=num_workers,
skip_failures=skip_failures,
) |
<gh_stars>0
package redmine
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io/ioutil"
"net"
"net/http"
"regexp"
"strconv"
"strings"
"time"
"github.com/pkg/errors"
"github.com/qarea/redminems/entities"
)
const (
redmineType = "REDMINE"
slash = '/'
dateFormat = "2006-01-02"
get = "GET"
post = "POST"
put = "PUT"
)
var (
validateStatusOK = validateStatus(http.StatusOK)
validateStatusCreated = validateStatus(http.StatusCreated)
errNotFound = errors.New("not found")
)
type requestOpts struct {
httpClient *http.Client
ctx context.Context
tracker entities.Tracker
resource string
method string
body interface{}
result interface{}
validateStatusFunc func(int) error
}
func redmineRequest(opts requestOpts) error {
resp, err := authRequest(opts)
if err != nil {
return err
}
defer resp.Body.Close()
if resp.StatusCode == http.StatusNotFound {
return errNotFound
}
if resp.StatusCode == http.StatusUnauthorized {
return entities.ErrCredentials
}
if resp.StatusCode == http.StatusForbidden {
return entities.ErrForbidden
}
b, err := ioutil.ReadAll(resp.Body)
if err != nil {
return errors.Wrap(err, "failed read body")
}
if resp.StatusCode == 422 {
return errors.Wrapf(toExternalServiceErr(b), "invalid object passed to redmine, response body: %s", string(b))
}
if resp.StatusCode == http.StatusInternalServerError {
return errors.Wrapf(entities.ErrRemoteServer, "redmine internal error, body: %s", string(b))
}
if opts.validateStatusFunc != nil {
if err := opts.validateStatusFunc(resp.StatusCode); err != nil {
return errors.Wrapf(err, "validation function failed: body: %s", string(b))
}
}
if opts.result == nil {
return nil
}
if err := json.Unmarshal(b, opts.result); err != nil {
return errors.Wrapf(err, "status code: %d, failed to unmarshal: %s", resp.StatusCode, string(b))
}
return nil
}
func validateStatus(expected int) func(s int) error {
return func(s int) error {
if s != expected {
return errors.Errorf("expected status code: %d, actual: %d", expected, s)
}
return nil
}
}
func authRequest(opts requestOpts) (*http.Response, error) {
if opts.tracker.Type != redmineType {
return nil, errors.Wrapf(entities.ErrTrackerType, "invalid type: %s", opts.tracker.Type)
}
var bodyBytes []byte
if opts.body != nil {
var err error
bodyBytes, err = json.Marshal(opts.body)
if err != nil {
return nil, errors.Wrapf(err, "fail to marshal body %+v", opts.body)
}
}
url := fullURL(opts.tracker, opts.resource)
req, err := http.NewRequest(opts.method, url, bytes.NewReader(bodyBytes))
if err != nil {
return nil, errors.Wrapf(err, "failed to create http request for url %s", url)
}
req.Header.Set("Content-Type", "application/json")
req.SetBasicAuth(opts.tracker.Credentials.Login, opts.tracker.Credentials.Password)
resp, err := opts.httpClient.Do(req.WithContext(opts.ctx))
if opts.ctx.Err() != nil {
return nil, opts.ctx.Err()
}
if err, ok := err.(net.Error); ok && err.Timeout() {
return nil, errors.Wrapf(entities.ErrTimeout, "http request timeout on URL %s", url)
}
if err != nil {
return nil, errors.Wrapf(entities.ErrTrackerURL, "http request failed err %v", err)
}
return resp, nil
}
func secondsToDate(sec int64) string {
if sec == 0 {
return ""
}
return time.Unix(sec, 0).UTC().Format(dateFormat)
}
func dateToSeconds(date string) int64 {
if date == "" {
return 0
}
t, err := time.Parse(dateFormat, date)
if err != nil {
log.ERR("Failed to parse date %s, expected format %s", date, dateFormat)
return 0
}
return t.Unix()
}
func toExternalServiceErr(b []byte) error {
var redmineError errorsResult
if err := json.Unmarshal(b, &redmineError); err != nil {
return errors.Wrapf(err, "failed to unmarshal error response body %s", string(b))
}
return entities.NewTrackerValidationErr(strings.Join(redmineError.Errors, ". "))
}
func toIssues(ir issuesRoot, tr entities.Tracker) []entities.Issue {
issues := make([]entities.Issue, len(ir.Issues))
for i, issue := range ir.Issues {
issues[i] = toIssue(issue, tr)
}
return issues
}
func reportToTimeEntry(rep entities.Report) *timeEntryRoot {
return &timeEntryRoot{timeEntry{
ActivityID: rep.ActivityID,
IssueID: int64(rep.IssueID),
Hours: secondsToHours(rep.Duration),
Comments: rep.Comments,
SpentOn: secondsToDate(rep.Started),
}}
}
func toIssue(i issue, tr entities.Tracker) entities.Issue {
var t entities.TypeID
var pid entities.ProjectID
if i.Tracker != nil {
t = entities.TypeID{
ID: i.Tracker.ID,
Name: i.Tracker.Name,
}
}
if i.Project != nil {
pid = entities.ProjectID(i.Project.ID)
}
issueID := entities.IssueID(i.ID)
return entities.Issue{
ID: issueID,
Title: i.Subject,
Type: t,
Description: i.Description,
Estimate: hoursToSeconds(i.EstimatedHours),
DueDate: dateToSeconds(i.DueDate),
ProjectID: pid,
Done: entities.Progress(i.DoneRatio),
Spent: hoursToSeconds(i.SpentHours),
URL: fullURL(tr, issueByID(issueID)),
}
}
func toIssueRoot(i entities.Issue) *issueRoot {
return &issueRoot{
Issue: issue{
DoneRatio: int(i.Done),
ID: int64(i.ID),
Subject: i.Title,
DueDate: secondsToDate(i.DueDate),
EstimatedHours: secondsToHours(i.Estimate),
TrackerID: i.Type.ID,
},
}
}
func secondsToHours(sec int64) float64 {
return float64(sec) / 3600
}
func hoursToSeconds(h float64) int64 {
return int64(h * 3600)
}
func toProjects(pr projectsRoot) []entities.Project {
ps := make([]entities.Project, len(pr.Projects))
for i, p := range pr.Projects {
ps[i] = toProject(p)
}
return ps
}
func toProject(p project) entities.Project {
return entities.Project{
ID: entities.ProjectID(p.ID),
Title: p.Name,
Description: p.Description,
IssueTypes: idNamesToTypeID(p.Trackers),
}
}
func addActivities(ps []entities.Project, activityTypes timeEntryActivitiesRoot) {
for i := range ps {
ps[i].ActivityTypes = idNamesToTypeID(activityTypes.TimeEntryActivities)
}
}
func addLinks(ps []entities.Project, tr entities.Tracker) {
for i := range ps {
ps[i].Link = fullURL(tr, projectByIDLink(ps[i].ID))
}
}
func idNamesToTypeID(ids []idName) []entities.TypeID {
var types []entities.TypeID
for _, id := range ids {
types = append(types, entities.TypeID{
ID: id.ID,
Name: id.Name,
})
}
return types
}
func fullURL(tr entities.Tracker, resource string) string {
return removeLastSlash(tr.URL) + resource
}
func removeLastSlash(url string) string {
l := len(url)
if l == 0 {
return url
}
lastChar := l - 1
if url[lastChar] == slash {
return url[:lastChar]
}
return url
}
var issueIDRegexp = regexp.MustCompile(`/issues/([0-9]+)`)
func issueIDFromURL(url entities.IssueURL) (entities.IssueID, error) {
strID := issueIDRegexp.FindStringSubmatch(string(url))
if strID == nil {
return 0, errors.Wrapf(entities.ErrIssueURL, "invalid issue URL %s", url)
}
id, err := strconv.ParseInt(strID[1], 10, 0)
if err != nil {
return 0, errors.Wrapf(entities.ErrIssueURL, "failed to parse id from URL %s", url)
}
return entities.IssueID(id), nil
}
func copyHeadersOnRedirect(req *http.Request, via []*http.Request) error {
if len(via) >= 10 {
return fmt.Errorf("too many redirects")
}
if len(via) == 0 {
return nil
}
for attr, val := range via[0].Header {
if _, ok := req.Header[attr]; !ok {
req.Header[attr] = val
}
}
return nil
}
|
def inspect_variables(text):
inspector = ExpressionInspector()
inspector.compile(text)
return inspector.variables |
/**
* Assume that we have a group called member2RepDS and members called rep1 and rep2, where
* rep1 is the current leader. The ZooKeeper nodes would appear as follows. Nodes marked with
* [p] are persistent, with [e] are ephemeral:
* <code><pre>
* /
* [p] GroupLeadershipConn/
* [p] groups/
* [p] member2RepDS/
* [e] leader : rep1
* [p] members/
* [e] rep1
* [e] rep2
* </pre></code>
*
*
* @author Mitch Stuart
* @version $Revision: 269419 $
*/
public class GroupLeadershipConnectionZkClientImpl implements GroupLeadershipConnection
{
private static final Logger LOG = Logger.getLogger(GroupLeadershipConnectionZkClientImpl.class);
private enum LeadershipAction
{
NONE,
TOOK_LEADERSHIP,
RETAINED_LEADERSHIP,
DECLINED_LEADERSHIP
}
protected static final String GROUPS_ZK_PATH_PART = "groups";
protected static final String MEMBERS_ZK_PATH_PART = "members";
protected static final String LEADER_ZK_PATH_PART = "leader";
protected static final String SHARED_DATA_ZK_PATH_PART = "shareddata";
protected static String makeUniqueMemberName()
{
return UUID.randomUUID().toString();
}
protected static String makeGroupsNodePath(String baseZkPath)
{
return baseZkPath + "/" +
GROUPS_ZK_PATH_PART;
}
/**
* Package-private - use externally for <b>TESTING ONLY</b>
*
*/
static String makeGroupNodePath(String baseZkPath,String groupName)
{
return makeGroupsNodePath(baseZkPath) + "/" +
groupName;
}
/**
* Package-private - use externally for <b>TESTING ONLY</b>
*
*/
static String makeGroupSharedDataNodePath(String baseZkPath,String groupName,String sharedDataPath)
{
return makeGroupsNodePath(baseZkPath) + "/" +
groupName + "/" + sharedDataPath;
}
/**
* Package-private - use externally for <b>TESTING ONLY</b>
*
*/
static String makeMembersNodePath(String baseZkPath,String groupName)
{
return makeGroupNodePath(baseZkPath,groupName) + "/" +
MEMBERS_ZK_PATH_PART;
}
/**
* Package-private - use externally for <b>TESTING ONLY</b>
*
*/
static String makeMemberNodePath(String baseZkPath,String groupName, String memberName)
{
return makeGroupNodePath(baseZkPath,groupName) + "/" +
MEMBERS_ZK_PATH_PART + "/" +
memberName;
}
/**
* Package-private - use externally for <b>TESTING ONLY</b>
*
*/
static String makeLeaderNodePath(String baseZkPath,String groupName)
{
return makeGroupNodePath(baseZkPath,groupName) + "/" +
LEADER_ZK_PATH_PART;
}
private final ZkClient _zkClient;
/**
* Constructor
*/
public GroupLeadershipConnectionZkClientImpl(ZkClient zkClient)
{
_zkClient = zkClient;
}
protected String ensurePersistentPathExists(String path)
{
if (!_zkClient.exists(path))
{
_zkClient.createPersistent(path, true);
}
return path;
}
protected String ensurePersistentGroupsZkPathExists(String baseZkPath)
{
String groupsZkPath = makeGroupsNodePath(baseZkPath);
return ensurePersistentPathExists(groupsZkPath);
}
protected String ensurePersistentMembersZkPathExists(String baseZkPath,String groupName)
{
String membersZkPath = makeMembersNodePath(baseZkPath,groupName);
return ensurePersistentPathExists(membersZkPath);
}
@Override
public GroupLeadershipSession joinGroup(final String baseZkPath,
final String groupName,
String memberName,
final AcceptLeadershipCallback acceptLeadershipCallback)
{
final String resolvedMemberName = memberName == null ? makeUniqueMemberName() : memberName;
LOG.info("joinGroup: groupName=" + groupName +
"; memberName=" + memberName +
"; resolvedMemberName=" + resolvedMemberName);
ensurePersistentMembersZkPathExists(baseZkPath,groupName);
final GroupLeadershipSessionZkClientImpl groupLeadershipSession =
new GroupLeadershipSessionZkClientImpl(baseZkPath,groupName, resolvedMemberName, acceptLeadershipCallback);
//Create and set call backs for data and child changes;
IZkChildListener childListener = new IZkChildListener()
{
@Override
public void handleChildChange(String parentPath, List<String> currentChildren)
throws Exception
{
takeLeadershipIfNeeded(groupLeadershipSession);
}
};
//Data change call back ; try and get notified on creation of the ephemeral member node
IZkDataListener dataListener = new IZkDataListener()
{
@Override
public void handleDataChange(String dataPath, Object data) throws Exception
{
LOG.info("LeaderElection: Data change in: " + dataPath);
takeLeadershipIfNeeded(groupLeadershipSession);
}
@Override
public void handleDataDeleted(String dataPath) throws Exception
{
relinquishLeadership(groupLeadershipSession);
}
};
groupLeadershipSession.setListener(childListener);
groupLeadershipSession.setDataListener(dataListener);
//create a ephemeral node for the member
String memberPath = makeMemberNodePath(baseZkPath,groupName, memberName);
String path = _zkClient.createEphemeralSequential(memberPath, groupLeadershipSession.getUniqueID());
groupLeadershipSession.setMemberZkName(memberName + path.substring(path.length()-10));
String dataWatchPath = makeMemberNodePath(baseZkPath,groupName,groupLeadershipSession.getMemberZkName());
LOG.info("Created path:" + path + " zkName = " + groupLeadershipSession.getMemberZkName() + " data watch path: " + dataWatchPath);
//subscribe for data changes in this node; in case the node is not created at time takeLeadership is invoked
//could be a back door to trigger takeLeadershipIfNeeded without a restart
_zkClient.subscribeDataChanges(dataWatchPath,dataListener);
takeLeadershipIfNeeded(groupLeadershipSession);
return groupLeadershipSession;
}
public boolean createBasePath(String baseZkPath,String groupName) {
//create top level path for leadership etc
String path = makeGroupNodePath(baseZkPath, groupName);
String paths = ensurePersistentPathExists(path);
return paths != null;
}
@Override
public GroupLeadershipSession joinGroupWithoutLeadershipDuties(
String domainName, String groupName, String name)
{
GroupLeadershipSessionZkClientImpl groupLeadershipSession =
new GroupLeadershipSessionZkClientImpl(domainName,groupName, name, null);
return groupLeadershipSession;
}
protected void leaveGroup(GroupLeadershipSession groupLeadershipSession)
{
LOG.info("leaveGroup: groupName=" + groupLeadershipSession.getGroupName() +
"; memberName=" + groupLeadershipSession.getMemberName());
if (! (groupLeadershipSession instanceof GroupLeadershipSessionZkClientImpl) )
{
throw new IllegalArgumentException("groupLeadershipSession must be an instance of: " +
GroupLeadershipSessionZkClientImpl.class.getName());
}
GroupLeadershipSessionZkClientImpl zkLeadershipSession =
(GroupLeadershipSessionZkClientImpl) groupLeadershipSession;
zkLeadershipSession.unsubscribeChanges();
zkLeadershipSession.unsubscribeDataChanges();
ensurePersistentMembersZkPathExists(zkLeadershipSession.getBasePathName(),zkLeadershipSession.getGroupName());
// ZkClient simply returns false (does not throw an exception) if the node does not exist
_zkClient.delete(makeMemberNodePath(zkLeadershipSession.getBasePathName(),zkLeadershipSession.getGroupName(),zkLeadershipSession.getMemberZkName()));
if (zkLeadershipSession.isLeader())
{
String leaderZkPath = makeLeaderNodePath(zkLeadershipSession.getBasePathName(),zkLeadershipSession.getGroupName());
// ZkClient simply returns false (does not throw an exception) if the node does not exist
_zkClient.delete(leaderZkPath);
}
}
@Override
public String getLeaderName(String baseZkPath,String groupName)
{
String leader = getLeaderZkName(baseZkPath,groupName);
if (leader != null && leader.length() > 10)
{
leader = leader.substring(0,leader.length()-10);
}
return leader;
}
public String getLeaderZkName(String baseZkPath,String groupName)
{
String leaderZkPath = makeLeaderNodePath(baseZkPath,groupName);
String currentLeader = _zkClient.readData(leaderZkPath, true);
return currentLeader;
}
@Override
public boolean isLeader(String baseZkPath, String groupName, String memberName)
{
String leaderZkName = getLeaderZkName(baseZkPath,groupName);
if (leaderZkName != null)
{
return memberName.equals(leaderZkName);
}
return false;
}
@Override
public List<String> getGroupNames(String baseZkPath)
{
String groupsZkPath = ensurePersistentGroupsZkPathExists(baseZkPath);
List<String> groupNames = _zkClient.getChildren(groupsZkPath);
return groupNames;
}
@Override
public GroupsLeadershipInfo getGroupsLeadershipInfo(String baseZkPath)
{
List<String> sortedGroupNames = getGroupNames(baseZkPath);
Collections.sort(sortedGroupNames);
// This list will be sorted by group name, since it is added to in order
List<GroupLeadershipInfo> sortedGroupLeadershipInfos = new ArrayList<GroupLeadershipInfo>();
for (String groupName : sortedGroupNames)
{
GroupLeadershipInfo groupLeadershipInfo = getGroupLeadershipInfo(baseZkPath,groupName);
sortedGroupLeadershipInfos.add(groupLeadershipInfo);
}
GroupsLeadershipInfo groupsLeadershipInfo = new GroupsLeadershipInfo(
Collections.unmodifiableList(sortedGroupNames),
Collections.unmodifiableList(sortedGroupLeadershipInfos));
return groupsLeadershipInfo;
}
@Override
public GroupLeadershipInfo getGroupLeadershipInfo(String baseZkPath, String groupName)
{
String membersZkPath = ensurePersistentMembersZkPathExists(baseZkPath,groupName);
List<String> sortedMemberNames = _zkClient.getChildren(membersZkPath);
for (int i=0; i < sortedMemberNames.size(); ++i)
{
sortedMemberNames.set(i,sortedMemberNames.get(i).substring(0,sortedMemberNames.get(i).length()-10));
}
Collections.sort(sortedMemberNames);
String leaderName = getLeaderName(baseZkPath,groupName);
// This list will be sorted by member name, since it is added to in order
List<GroupMemberInfo> sortedMemberInfos = new ArrayList<GroupMemberInfo>();
for (String memberName : sortedMemberNames)
{
GroupMemberInfo groupMemberInfo = new GroupMemberInfo(groupName,
memberName,
memberName.equals(leaderName));
sortedMemberInfos.add(groupMemberInfo);
}
GroupLeadershipInfo groupLeadershipInfo = new GroupLeadershipInfo(groupName,
leaderName,
Collections.unmodifiableList(sortedMemberNames),
Collections.unmodifiableList(sortedMemberInfos));
return groupLeadershipInfo;
}
private void takeLeadershipIfNeeded(GroupLeadershipSessionZkClientImpl groupLeadershipSession)
{
String baseZkPath = groupLeadershipSession.getBasePathName();
String groupName = groupLeadershipSession.getGroupName();
String memberName = groupLeadershipSession.getMemberName();
//debug aid
String leaderZkPath = makeLeaderNodePath(baseZkPath,groupName);
String currentLeader = _zkClient.readData(leaderZkPath, true);
String membersZkPath = ensurePersistentMembersZkPathExists(baseZkPath,groupName);
List<String> currentMembers = _zkClient.getChildren(membersZkPath);
LOG.info("takeLeadershipIfNeeded: groupName=" + groupName +
"; memberName=" + memberName +
"; currentLeader=" + currentLeader +
"; members=" + currentMembers);
LeadershipAction leadershipAction = LeadershipAction.DECLINED_LEADERSHIP;
//current members have sequential numbers appended to path
String newLeaderZkName = groupLeadershipSession.subscribeChangesOnHigherPriorityLeader();
if (newLeaderZkName != null)
{
if (newLeaderZkName.equals(groupLeadershipSession.getMemberZkName()))
{
if (currentLeader == null || !currentLeader.equals(newLeaderZkName))
{
takeLeadership(groupLeadershipSession);
leadershipAction = LeadershipAction.TOOK_LEADERSHIP;
groupLeadershipSession.doAcceptLeadership();
}
else
{
LOG.info("takeLeadershipIfNeeded: Current Leader is same as this node; the new leader! Do nothing! Leader= " + currentLeader + " node=" + newLeaderZkName);
}
}
else
{
LOG.info("takeLeaderShipIfNeeded: " + memberName + " declined leadership in favour of leader " + newLeaderZkName);
}
}
else
{
LOG.info("takeLeadershipIfNeeded: No nodes seen yet! Declined leadership at " + groupLeadershipSession.getMemberName());
}
//debug aid
List<String> uponExitMembers = _zkClient.getChildren(membersZkPath);
String uponExitLeader = _zkClient.readData(leaderZkPath, true);
String msg =
"takeLeadershipIfNeeded: " + leadershipAction +
"; groupName=" + groupName +
"; memberName=" + memberName +
"; uponExitLeader=" + uponExitLeader +
"; uponExitMembers=" + uponExitMembers;
if (leadershipAction == LeadershipAction.NONE)
{
LOG.warn("Unexpected leadershipAction: " + msg);
}
else
{
LOG.info(msg);
}
}
private void relinquishLeadership(GroupLeadershipSessionZkClientImpl groupLeadershipSession)
{
LOG.info("Relinquishing leadership: " + groupLeadershipSession.getMemberZkName());
groupLeadershipSession.unsubscribeChanges();
groupLeadershipSession.unsubscribeDataChanges();
}
private boolean takeLeadership(GroupLeadershipSessionZkClientImpl groupLeadershipSession)
{
String leaderZkPath = makeLeaderNodePath(groupLeadershipSession.getBasePathName(),groupLeadershipSession.getGroupName());
//if leader node doesn't exist , leader is ready to take control
if (_zkClient.exists(leaderZkPath))
{
LOG.info(leaderZkPath + " exists! Issuing delete at " + groupLeadershipSession.getMemberZkName());
//issue delete ; could be redundant; but still;
_zkClient.delete(leaderZkPath);
}
_zkClient.createEphemeral(leaderZkPath, groupLeadershipSession.getMemberZkName());
//leader doesn't follow anyone else
groupLeadershipSession.unsubscribeChanges();
return true;
}
/**
* Package-private - use externally for <b>TESTING ONLY</b>
*
* @return the zkClient instance
*/
ZkClient getZkClient()
{
return _zkClient;
}
@Override
public void close()
{
_zkClient.close();
}
protected class GroupLeadershipSessionZkClientImpl implements GroupLeadershipSession
{
private final String _groupName;
private final String _memberName;
private final String _basePathName;
private final AcceptLeadershipCallback _acceptLeadershipCallback;
private final String _uniqueID;
private IZkChildListener _childListener;
private boolean _hasLeftGroup;
private final Logger LOG = Logger.getLogger(GroupLeadershipSessionZkClientImpl.class);
private IZkDataListener _dataListener;
private final ZkSeqComparator _comparator;
//zk derived state
private String _memberZkName;
private String _listeningMemberZkName;
private String _sharedDataPath = SHARED_DATA_ZK_PATH_PART;
/**
* Constructor
*/
public GroupLeadershipSessionZkClientImpl(String basePathName,
String groupName,
String memberName,
AcceptLeadershipCallback acceptLeadershipCallback)
{
_groupName = groupName;
_memberName = memberName;
_basePathName = basePathName;
_acceptLeadershipCallback = acceptLeadershipCallback;
_uniqueID = UUID.randomUUID().toString();
_comparator = new ZkSeqComparator();
_memberZkName = null;
_listeningMemberZkName = null;
}
public String getListeningMemberZkName()
{
return _listeningMemberZkName;
}
public void setListeningMemberZkName(String listenPath)
{
_listeningMemberZkName = listenPath;
}
public void setMemberZkName(String memberZkPath)
{
_memberZkName = memberZkPath;
}
public String getMemberZkName()
{
return _memberZkName;
}
protected void setDataListener(IZkDataListener dataListener)
{
_dataListener = dataListener;
}
public String getUniqueID()
{
return _uniqueID;
}
@Override
public GroupLeadershipConnection getGroupLeadershipConnection()
{
return GroupLeadershipConnectionZkClientImpl.this;
}
protected GroupLeadershipConnectionZkClientImpl getGroupLeadershipConnectionInternal()
{
return GroupLeadershipConnectionZkClientImpl.this;
}
@Override
public String getGroupName()
{
return _groupName;
}
@Override
public String getMemberName()
{
return _memberName;
}
public AcceptLeadershipCallback getAcceptLeadershipCallback()
{
return _acceptLeadershipCallback;
}
protected void setListener(IZkChildListener childListener)
{
_childListener = childListener;
}
protected void subscribeChanges(String zkMemberName)
{
unsubscribeChanges();
String memberPath = makeMemberNodePath(_basePathName,_groupName,zkMemberName);
LOG.info("Adding listener on: " + memberPath + " by: " + this.getMemberZkName());
// No exception is thrown if the path does not exist; the listener will be called if/when
// the path comes into existence
_zkClient.subscribeChildChanges(memberPath, _childListener);
_listeningMemberZkName = zkMemberName;
}
/** Return zk name (with seq number) of first node seen in order and set watches for failover **/
protected String subscribeChangesOnHigherPriorityLeader()
{
boolean done = false;
List<String> memberZkNames = null;
while (!done)
{
String membersZkPath = ensurePersistentMembersZkPathExists(_basePathName,_groupName);
memberZkNames = _zkClient.getChildren(membersZkPath);
sortByLeaderPriority(memberZkNames);
int idx = memberZkNames.indexOf(getMemberZkName());
if (idx > 0)
{
do
{
//search for a node higher in the order that still is 'seen' by this node
--idx;
String higherPriorityLeaderZkName = memberZkNames.get(idx);
//set anticipatory subscription on the adjacent node; then check the possibility that the node on which the watch was set is no longer present
if (!higherPriorityLeaderZkName.equals(getListeningMemberZkName()))
{
subscribeChanges(higherPriorityLeaderZkName);
}
String higherPriorityLeaderZkPath = makeMemberNodePath(_basePathName,_groupName,higherPriorityLeaderZkName);
done = _zkClient.exists(higherPriorityLeaderZkPath);
//debug aid
if (!done)
{
LOG.info("subscribeChangesOnHigherPriorityLeader: " + getMemberZkName() + "Potential race avoided! " + higherPriorityLeaderZkPath + " not present! ");
}
}
while (!done && (idx > 0));
}
else
{
//this node is the leader; or this node has not been created yet
done=true;
}
}
return ( (memberZkNames!=null) && (memberZkNames.size() > 0))? memberZkNames.get(0):null;
}
protected void unsubscribeChanges()
{
if (_listeningMemberZkName != null)
{
ensurePersistentMembersZkPathExists(_basePathName,_groupName);
LOG.info("Removing listener on: " + getListeningMemberZkName() + " by: " + getMemberZkName());
// No exception is thrown if node or listener does not exist
_zkClient.unsubscribeChildChanges(makeCurrentListeningOnMemberPath(), _childListener);
}
}
protected String makeCurrentListeningOnMemberPath()
{
String currentListeningMemberPath = makeMemberNodePath(_basePathName,_groupName, _listeningMemberZkName);
return currentListeningMemberPath;
}
protected void unsubscribeDataChanges()
{
if (_dataListener != null)
{
LOG.info("Removing data listener on: " + getMemberZkName());
_zkClient.unsubscribeDataChanges(getMemberZkName(),_dataListener);
}
}
protected void sortByLeaderPriority(List<String> memberNames)
{
//use the fact that sequential nodes were created
Collections.sort(memberNames,_comparator);
}
@Override
public boolean isLeader()
{
return getGroupLeadershipConnection().isLeader(getBasePathName(),getGroupName(), getMemberZkName());
}
@Override
public String getLeaderName()
{
return getGroupLeadershipConnection().getLeaderName(getBasePathName(),getGroupName());
}
public String getLeaderZkName()
{
GroupLeadershipConnection conn = getGroupLeadershipConnection();
if (conn instanceof GroupLeadershipConnectionZkClientImpl)
{
return ((GroupLeadershipConnectionZkClientImpl) conn).getLeaderZkName(getBasePathName(),getGroupName());
}
return null;
}
@Override
public void leaveGroup()
{
if (_hasLeftGroup)
{
throw new IllegalStateException("This session has already left the group: " + this);
}
getGroupLeadershipConnectionInternal().leaveGroup(this);
_hasLeftGroup = true;
}
protected void doAcceptLeadership()
{
_acceptLeadershipCallback.doAcceptLeadership(this);
}
@Override
public GroupLeadershipInfo getGroupLeadershipInfo()
{
return getGroupLeadershipConnection().getGroupLeadershipInfo(getBasePathName(),getGroupName());
}
@Override
public String toString()
{
return "basePathName=" + _basePathName + " ; groupName=" + _groupName +
"; memberName=" + _memberName +
"; uniqueID=" + _uniqueID +
"; listeningOnMemberName=" + _listeningMemberZkName +
"; hasLeftGroup=" + _hasLeftGroup +
"; leadershipInfo=" + getGroupLeadershipInfo();
}
@Override
public String getBasePathName()
{
return _basePathName;
}
@Override
public Object readGroupData()
{
return readGroupData(null);
}
@Override
public Object readGroupData(String key)
{
String groupNodePath = makeGroupSharedDataNodePath(this.getBasePathName(),this.getGroupName(),this.getSharedDataPath());
if (key != null) groupNodePath = groupNodePath + "/" + key;
//don't throw exception; set that flag to 'true'
if (LOG.isDebugEnabled())
LOG.debug("reading data from: "+groupNodePath);
//don't throw exception; set that flag to 'true'
return _zkClient.readData(groupNodePath, true);
}
@Override
public boolean writeGroupData(Object obj)
{
return writeGroupData(null,obj);
}
protected void createPersistentSharedDataNode()
{
String groupNodePath = makeGroupSharedDataNodePath(this.getBasePathName(),this.getGroupName(),this.getSharedDataPath());
if (!_zkClient.exists(groupNodePath))
{
LOG.info("creating data node: "+groupNodePath);
_zkClient.createPersistent(groupNodePath,null);
}
}
@Override
public boolean writeGroupData(String key, Object obj)
{
String groupNodePath = makeGroupSharedDataNodePath(this.getBasePathName(),this.getGroupName(),this.getSharedDataPath());
createPersistentSharedDataNode();
if (null != key) {
groupNodePath = groupNodePath + "/" + key;
if (!_zkClient.exists(groupNodePath))
{
LOG.info("creating data node: "+groupNodePath);
_zkClient.createPersistent(groupNodePath,obj);
return true;
}
}
if (LOG.isDebugEnabled())
LOG.debug("writing data to: "+groupNodePath);
_zkClient.writeData(groupNodePath, obj);
return true;
}
@Override
public boolean removeGroupData()
{
return removeGroupData(null);
}
@Override
public boolean removeGroupData(String key)
{
String groupNodePath = makeGroupSharedDataNodePath(this.getBasePathName(),this.getGroupName(),this.getSharedDataPath());
if (key == null) {
LOG.info("removing data node: "+groupNodePath);
return _zkClient.deleteRecursive(groupNodePath);
} else {
LOG.info("removing data node: "+groupNodePath+"/"+key);
return _zkClient.delete(groupNodePath+"/"+key);
}
}
@Override
public String getSharedDataPath()
{
return _sharedDataPath;
}
@Override
public void setSharedDataPath(String sharedDataPath)
{
_sharedDataPath = sharedDataPath;
}
@Override
public List<String> getKeysOfGroupData()
{
String groupNodePath = makeGroupSharedDataNodePath(this.getBasePathName(),this.getGroupName(),this.getSharedDataPath());
if (_zkClient.exists(groupNodePath)) {
return _zkClient.getChildren(groupNodePath);
}
return null;
}
}
class ZkSeqComparator implements Comparator<String>
{
@Override
public int compare(String s1,String s2)
{
String s = s1.substring(s1.length()-10);
return s.compareTo(s2.substring(s2.length()-10));
}
}
} |
Do you have a terrified Cinderella rug I could buy?
Designer:
I’m making the designs for that new Cinderella rug. Are we doing the standard smile?
Client:
No, I’ve got a new idea. I don’t want a smile. I want a slightly concerned frown. Like one of those moments on the Bachelor when the guy says something really dumb and the contestants are trying to pretend he didn’t? I want Cinderella’s mouth to say, “What did I get myself into? I barely know this guy. I decided to marry him after one dance? Who does that? That’s how marriages start on the Jersey Shore. I only know two things about this guy: He’s good at dancing and he has my shoe. Is this really who I want raising my children?” Can you make the smile look like that?
Designer:
I think so. We call it the “old turkey” in the design industry. It’s the look you make when you smell old turkey in your fridge and you can’t remember when you bought it. You really want a turkey sandwich but you’re perched on the razor’s edge of food poisoning. Is the turkey still good? Does turkey always smell this gross and you’ve just never noticed? You’d call your wife and ask her but she’s super tired of you calling her in the middle of the day with turkey-related questions, so instead you just scrunch up your face and say “ehhhhh.”
Client:
Perfect. And for the eyes, I want her cutting them to the side trying to get the attention of a friend who can save her from a bad date. She met the guy on Tinder and he’s committed a significant amount of “Face Fraud.”
Designer:
What’s that?
Client:
Face Fraud is when you use a photo from 5 years or older as your profile pic. You find your best photo and post it regardless of if it looks like you anymore. Anyway, she’s on this date with this guy from Tinder and she regrets it. The guy didn’t mention how many ferrets he owns and he owns a lot. He won’t stop talking about them and keeps saying, “They really don’t smell once you get to know them.” But that’s a lie. And she knows it, Cinderella knows it. She just saw Belle walk into the bar and she’s hoping that if she can cut her eyes and express enough panic/terror in them, Belle will come over and help her escape the date. But Belle is busy talking to a cup and a plate so it’s a real challenge. She’s cutting those eyes, just hoping. Can you do that look?
Designer:
Definitely.
Client:
Great. This is going to be one amazing rug.
Tweet this please: |
/**
* @Author mcrwayfun
* @Description
* @Date Created in 2018/6/5
*/
public class Solution {
public boolean isSymmetric(TreeNode root) {
return root == null || (root.left == null && root.right == null) || isSymmetricHelp(root.left, root.right);
}
private boolean isSymmetricHelp(TreeNode left, TreeNode right) {
if (left == null && right == null)
return true;
if (left == null || right == null)
return false;
if (left.val != right.val)
return false;
return isSymmetricHelp(left.left, right.right) && isSymmetricHelp(left.right, right.left);
}
public static void main(String[] args) {
/**
* 1
* / \
* 2 2
* / \ / \
* 3 4 4 3
*/
TreeNode root = new TreeNode(1);
root.left = new TreeNode(2);
root.left.left = new TreeNode(3);
root.left.right = new TreeNode(4);
root.right = new TreeNode(2);
root.right.left = new TreeNode(4);
root.right.right = new TreeNode(3);
boolean symmetric = new Solution().isSymmetric(root);
System.out.println(symmetric);
}
} |
/**
* Check that fonts present in the Resources dictionary match with PDF/A-1 rules
*
* @param context
* @param resources
* @throws ValidationException
*/
protected void validateFonts(PreflightContext context, PDResources resources) throws ValidationException
{
Map<String, PDFont> mapOfFonts = getFonts(resources.getCOSObject(), context);
if (mapOfFonts != null)
{
for (Entry<String, PDFont> entry : mapOfFonts.entrySet())
{
ContextHelper.validateElement(context, entry.getValue(), FONT_PROCESS);
}
}
} |
Use of Wehner Schulze to predict skid resistance of Irish surfacing materials
This paper details the first assessment of asphalt mixes used in Ireland using the Wehner Schulze test equipment. The mixes assessed were 10mm SMA, 14mm SMA and hot rolled asphalt (HRA) made with PSV 62 greywacke aggregate. The Wehner Schulze was developed about 30 years ago in Germany and is currently being considered as a European Standard test method. The equipment used was located at IFSTTAR in Nantes, France. Asphalt test specimens (305mm x 305mm x 50mm) were prepared using a Cooper roller compactor. 225mm diameter cores were then extracted from the slabs for testing on the WS machine. The results found the 10mm and 14mm to have higher friction coefficient values compared to the hot rolled asphalt. Analysis of the data suggests that this is probably due to differing types of contact between the asphalt surface and the Wehner Schulze polishing rollers and rubber pads used for friction measurement. The investigation suggests that the tire / asphalt surface interface needs further research to help explain both the laboratory and on-site measurement of friction. |
/**
* @author Bas Leijdekkers
*/
public class DuplicateCharacterInClassInspection extends LocalInspectionTool {
@Override
public @NotNull PsiElementVisitor buildVisitor(@NotNull ProblemsHolder holder, boolean isOnTheFly) {
return new DuplicateCharacterInClassVisitor(holder);
}
private static class DuplicateCharacterInClassVisitor extends RegExpElementVisitor {
private final ProblemsHolder myHolder;
DuplicateCharacterInClassVisitor(@NotNull ProblemsHolder holder) {
myHolder = holder;
}
@Override
public void visitRegExpClass(RegExpClass regExpClass) {
final HashSet<Object> seen = new HashSet<>();
for (RegExpClassElement element : regExpClass.getElements()) {
checkForDuplicates(element, seen);
}
}
private void checkForDuplicates(RegExpClassElement element, Set<Object> seen) {
if (element instanceof RegExpChar) {
final RegExpChar regExpChar = (RegExpChar)element;
final int value = regExpChar.getValue();
if (value != -1 && !seen.add(value)) {
myHolder.registerProblem(regExpChar,
RegExpBundle.message("warning.duplicate.character.0.inside.character.class", regExpChar.getText()),
new DuplicateCharacterInClassFix(regExpChar));
}
}
else if (element instanceof RegExpSimpleClass) {
final RegExpSimpleClass regExpSimpleClass = (RegExpSimpleClass)element;
final RegExpSimpleClass.Kind kind = regExpSimpleClass.getKind();
if (!seen.add(kind)) {
final String text = regExpSimpleClass.getText();
myHolder.registerProblem(regExpSimpleClass,
RegExpBundle.message("warning.duplicate.predefined.character.class.0.inside.character.class", text),
new DuplicateCharacterInClassFix(regExpSimpleClass));
}
}
}
}
private static final class DuplicateCharacterInClassFix implements LocalQuickFix {
private final String myText;
private DuplicateCharacterInClassFix(RegExpElement predefinedCharacterClass) {
myText = predefinedCharacterClass.getText();
}
@Override
public @NotNull String getName() {
return RegExpBundle.message("inspection.quick.fix.remove.duplicate.0.from.character.class", myText);
}
@Override
public @NotNull String getFamilyName() {
return RegExpBundle.message("inspection.quick.fix.remove.duplicate.element.from.character.class");
}
@Override
public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) {
descriptor.getPsiElement().delete();
}
}
} |
def main():
nmxy=input().split(' ')
army=input().split(' ')
vest=input().split(' ')
result=0
detail=[]
j=0
for i in range(len(army)):
while j<int(nmxy[1]) and int(army[i])-int(nmxy[2])> int(vest[j]):
j+=1
if j<int(nmxy[1]):
if int(army[i])+int(nmxy[3])>= int(vest[j]):
result+=1
detail.append((i+1,j+1))
j+=1
else:
break
print(result)
for i in detail:
print(i[0],i[1])
if __name__ == "__main__":
main() |
<gh_stars>1-10
package router
import (
"net/http"
"time"
"github.com/sarmerer/forum/api/config"
"github.com/sarmerer/forum/api/controllers"
"github.com/sarmerer/forum/api/middleware"
)
type route struct {
URI string
Handler func(http.ResponseWriter, *http.Request)
Method string
MinRole int
NeedAuth bool
Activity bool
RateLimit limit
}
type limit struct {
Requests int
PerTime time.Duration
Cooldown time.Duration
}
// SetupRoutes sets handlers with middleware chains to API routes
func (mux *Router) SetupRoutes() {
routes := apiRoutes
for _, route := range routes {
seq := []middleware.Middlewares{
middleware.Logger,
middleware.SetHeaders,
middleware.SetContext,
}
if (limit{} != route.RateLimit) {
limiter := middleware.RateLimit(route.RateLimit.Requests, route.RateLimit.PerTime, route.RateLimit.Cooldown)
seq = append(seq, limiter)
}
if route.NeedAuth {
seq = append(seq, middleware.AuthorizedOnly)
}
if route.MinRole == config.RoleModer {
seq = append(seq, middleware.ModerOrHigher)
}
if route.MinRole == config.RoleAdmin {
seq = append(seq, middleware.AdminOnly)
}
if route.Activity {
seq = append(seq, middleware.UpdateUserActivity)
}
mux.HandleFunc(route.URI, route.Method, middleware.Chain(route.Handler, seq...))
}
}
var apiRoutes = []route{
/* -------------------------------------------------------------------------- */
/* Auth routes */
/* -------------------------------------------------------------------------- */
{
URI: "/api/oauth",
Handler: controllers.OAuthHandler,
Method: http.MethodPost,
MinRole: config.RoleUser,
NeedAuth: false,
},
{
URI: "/api/auth/verify",
Handler: controllers.VerifyEmail,
Method: http.MethodPost,
MinRole: config.RoleUser,
NeedAuth: false,
RateLimit: limit{Requests: 3, PerTime: time.Second},
},
{
URI: "/api/auth/send-verification",
Handler: controllers.SendVerification,
Method: http.MethodPost,
MinRole: config.RoleUser,
NeedAuth: false,
RateLimit: limit{Requests: 1, PerTime: time.Minute, Cooldown: 1 * time.Minute},
},
{
URI: "/api/auth/signin",
Handler: controllers.SignIn,
Method: http.MethodPost,
MinRole: config.RoleUser,
NeedAuth: false,
RateLimit: limit{Requests: 10, PerTime: time.Minute, Cooldown: 2 * time.Minute},
},
{
URI: "/api/auth/signup",
Handler: controllers.SignUp,
Method: http.MethodPost,
MinRole: config.RoleUser,
NeedAuth: false,
},
{
URI: "/api/auth/signout",
Handler: controllers.LogOut,
Method: http.MethodPost,
MinRole: config.RoleUser,
NeedAuth: true,
},
{
URI: "/api/auth/me",
Handler: controllers.Me,
Method: http.MethodGet,
MinRole: config.RoleUser,
NeedAuth: true,
},
/* -------------------------------------------------------------------------- */
/* User routes */
/* -------------------------------------------------------------------------- */
{
URI: "/api/users",
Handler: controllers.GetUsers,
Method: http.MethodGet,
MinRole: config.RoleAdmin,
NeedAuth: true,
},
{
URI: "/api/user/find",
Handler: controllers.FindUser,
Method: http.MethodPost,
MinRole: config.RoleUser,
NeedAuth: false,
},
{
URI: "/api/user/update",
Handler: controllers.UpdateUser,
Method: http.MethodPut,
MinRole: config.RoleUser,
NeedAuth: true,
Activity: true,
},
{
URI: "/api/user/delete",
Handler: controllers.DeleteUser,
Method: http.MethodDelete,
MinRole: config.RoleUser,
NeedAuth: true,
},
/* -------------------------------------------------------------------------- */
/* Post routes */
/* -------------------------------------------------------------------------- */
{
URI: "/api/post/find",
Handler: controllers.FindPost,
Method: http.MethodPost,
MinRole: config.RoleUser,
NeedAuth: false,
},
{
URI: "/api/posts",
Handler: controllers.GetPosts,
Method: http.MethodPost,
MinRole: config.RoleUser,
NeedAuth: false,
},
{
URI: "/api/post/create",
Handler: controllers.CreatePost,
Method: http.MethodPost,
MinRole: config.RoleUser,
NeedAuth: true,
Activity: true,
},
{
URI: "/api/post/update",
Handler: controllers.UpdatePost,
Method: http.MethodPut,
MinRole: config.RoleUser,
NeedAuth: true,
Activity: true,
},
{
URI: "/api/post/delete",
Handler: controllers.DeletePost,
Method: http.MethodDelete,
MinRole: config.RoleUser,
NeedAuth: true,
Activity: true,
},
{
URI: "/api/post/rate",
Handler: controllers.RatePost,
Method: http.MethodPost,
MinRole: config.RoleUser,
NeedAuth: true,
Activity: true,
},
/* -------------------------------------------------------------------------- */
/* Categories routes */
/* -------------------------------------------------------------------------- */
{
URI: "/api/categories",
Handler: controllers.GetAllCategories,
Method: http.MethodGet,
MinRole: config.RoleUser,
NeedAuth: false,
},
/* -------------------------------------------------------------------------- */
/* Comment routes */
/* -------------------------------------------------------------------------- */
{
URI: "/api/comments",
Handler: controllers.GetComments,
Method: http.MethodPost,
MinRole: config.RoleUser,
NeedAuth: false,
},
{
URI: "/api/comments/find",
Handler: controllers.FindComments,
Method: http.MethodPost,
MinRole: config.RoleUser,
NeedAuth: false,
},
{
URI: "/api/comment/add",
Handler: controllers.CreateComment,
Method: http.MethodPost,
MinRole: config.RoleUser,
NeedAuth: true,
Activity: true,
},
{
URI: "/api/comment/update",
Handler: controllers.UpdateComment,
Method: http.MethodPut,
MinRole: config.RoleUser,
NeedAuth: true,
Activity: true,
},
{
URI: "/api/comment/delete",
Handler: controllers.DeleteComment,
Method: http.MethodDelete,
MinRole: config.RoleUser,
NeedAuth: true,
Activity: true,
},
{
URI: "/api/comment/rate",
Handler: controllers.RateComment,
Method: http.MethodPost,
MinRole: config.RoleUser,
NeedAuth: true,
Activity: true,
},
/* -------------------------------------------------------------------------- */
/* Images server */
/* -------------------------------------------------------------------------- */
{
URI: "/api/images",
Handler: controllers.ServeImage,
Method: http.MethodGet,
MinRole: config.RoleUser,
},
{
URI: "/api/image/upload",
Handler: controllers.UploadImage,
Method: http.MethodPost,
MinRole: config.RoleUser,
},
}
|
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 23 13:24:54 2020
@author: timhe
"""
# General Python modules
import numpy as np
import os
import glob
import pandas as pd
from osgeo import gdal
def Nearest_Interpolate(Dir_in, Startdate, Enddate, Dir_out=None):
"""
This functions calculates monthly tiff files based on the daily tiff files.
(will calculate the total sum)
Parameters
----------
Dir_in : str
Path to the input data
Startdate : str
Contains the start date of the model 'yyyy-mm-dd'
Enddate : str
Contains the end date of the model 'yyyy-mm-dd'
Dir_out : str
Path to the output data, default is same as Dir_in
"""
# import WA+ modules
import watertools.General.data_conversions as DC
import watertools.General.raster_conversions as RC
# Change working directory
os.chdir(Dir_in)
# Define end and start date
Dates = pd.date_range(Startdate, Enddate, freq='D')
# Find all hourly files
files = glob.glob('*hourly*.tif')
# Get array information and define projection
geo_out, proj, size_X, size_Y = RC.Open_array_info(files[0])
if int(proj.split('"')[-2]) == 4326:
proj = "WGS84"
# Get the No Data Value
dest = gdal.Open(files[0])
NDV = dest.GetRasterBand(1).GetNoDataValue()
# Define output directory
if Dir_out is None:
Dir_out = Dir_in
if not os.path.exists(Dir_out):
os.makedirs(Dir_out)
# loop over the months and sum the days
for date in Dates:
Year = date.year
Month = date.month
Day = date.day
files_one_day = glob.glob('*hourly*%d.%02d.%02d*.tif' % (Year, Month, Day))
# Create empty arrays
Daily_data = np.zeros([size_Y, size_X])
if len(files_one_day) != 24:
print("One hour is missing!!! day %s month %s year %s" %(Day, Month, Year))
for file_one_year in files_one_day:
file_path = os.path.join(Dir_in, file_one_year)
Hour_data = RC.Open_tiff_array(file_path)
Hour_data[np.isnan(Hour_data)] = 0.0
Hour_data[Hour_data == -9999] = 0.0
Daily_data += Hour_data
# Define output name
output_name = os.path.join(Dir_out, file_one_year
.replace('hourly', 'daily')
.replace('hour', 'day'))
output_name = output_name[:-19] + '%d.%02d.%02d.tif' % (date.year, date.month, date.day)
# Save tiff file
DC.Save_as_tiff(output_name, Daily_data, geo_out, proj)
return
|
// Generates a hashed byte array based on username and salt
func Hash(userName, salt string) ([]byte, error) {
var (
hash []byte
err error
)
hash, err = scrypt.Key([]byte(userName), []byte(salt), 16384, 8, 1, keyLength)
if err != nil {
return nil, err
}
return hash, err
} |
<filename>imdbRating.py<gh_stars>0
####Python Script to find the IMDB rating of movies and TV series#####
import requests
import bs4 as bs
####Function to confirm the name of movie or Tv series to get the correct rating####
def cnfm_page(link):
try:
name_link = 'https://www.imdb.com'+link
req2 = requests.get(name_link)
soup2 = bs.BeautifulSoup(req2.content,'lxml')
title = soup2.find('div',{'class':'title_wrapper'}).find('h1')
plot = soup2.find('div',{'class':'plot_summary_wrapper'}).find('div',{'class','summary_text'})
print (title.text)
print ('With plot: {}'.format(plot.text.strip()))
ret = input('Is this what you are searching for(yes or no): ')
if ret=='yes' or ret=='Yes':
return True
if ret=='no' or ret=='No':
return False
except:
print ('XXXXXXX NO SUCH NAME EXIST.....ENTER THE VALID NAME XXXXXXXX')
exit()
####funtion to get the rating after the confirmation of movie or Tv series name####
def getrating(link):
try:
comp_link = 'https://www.imdb.com'+link
req3 = requests.get(comp_link)
soup3 = bs.BeautifulSoup(req3.content,'lxml')
rating = soup3.find('div',{'class':'ratingValue'}).find('strong')['title']
return rating
except:
print ('XXXXXXX NO SUCH NAME EXIST.....ENTER THE VALID NAME XXXXXXXX')
exit()
name = input("Enter the movie or TV series name: ")
url = 'http://www.imdb.com/find?ref_=nv_sr_fn&q={}&s=all'.format(name)
req = requests.get(url)
soup = bs.BeautifulSoup(req.content,'lxml')
search_links = soup.find('div',{'class':'findSection'}).find('table').find_all('tr')# this will give the list of all possible links for the given name
Slinks_list = []
for link in search_links:
Slinks_list.append(link.find('a').get('href'))
for link in Slinks_list:
val =cnfm_page(link)
if val is True:
break;
req_link = link #link of the movie ,which we are looking for
rating = getrating(req_link)
print ('Rating-- {}'.format(rating)) |
import { Type } from "class-transformer";
import { String } from "typescript-string-operations";
import { Position } from "vscode-languageserver";
import { AliasHelper } from "../../../aliases/AliasHelper";
import { AliasKey } from "../../../aliases/AliasKey";
import { FormattingHelper } from "../../../helper/FormattingHelper";
import { HoverContent } from "../../../helper/HoverContent";
import { CompletionContainer } from "../../../provider/code-completion/CompletionContainer";
import { GenericNode } from "../GenericNode";
import { IndexRange } from "../IndexRange";
import { ActionErrorNode } from "./ActionErrorNode";
import { ConditionNode } from "./operation/ConditionNode";
import { ConnectedOperationNode } from "./operation/ConnectedOperationNode";
import { OperationNode } from "./operation/OperationNode";
export class RuleNode extends GenericNode {
@Type(() => ConditionNode, {
discriminator: {
property: "type",
subTypes: [
{ value: OperationNode, name: "OperationNode" },
{ value: ConnectedOperationNode, name: "ConnectedOperationNode" }
]
}
})
private condition: ConditionNode | null;
@Type(() => ActionErrorNode)
private errorNode: ActionErrorNode | null;
constructor(
errorNode: ActionErrorNode | null,
condition: ConditionNode | null,
line: string[],
range: IndexRange
) {
super(line, range);
this.errorNode = errorNode;
this.condition = condition;
}
public get $errorNode(): ActionErrorNode | null {
return this.errorNode;
}
public set $errorNode(value: ActionErrorNode | null) {
this.errorNode = value;
}
public get $condition(): ConditionNode | null {
return this.condition;
}
public set $condition(value: ConditionNode | null) {
this.condition = value;
}
public getRelevantChildren(): GenericNode[] {
const childList: GenericNode[] = this.getChildren();
if (!!this.$errorNode) {
childList.push(this.$errorNode);
}
return childList;
}
public getChildren(): GenericNode[] {
const childList: GenericNode[] = [];
if (!!this.condition) {
childList.push(this.condition);
}
return childList;
}
public getHoverContent(): HoverContent {
const content: HoverContent = new HoverContent(this.$range, "Rule");
return content;
}
public getCompletionContainer(position: Position): CompletionContainer {
// Then we are inside the error-node and we don't want completion
if (
!!this.errorNode &&
!!this.errorNode.$range &&
!this.errorNode.$range.startsAfter(position) &&
(!this.condition || !this.condition.$constrained)
) {
return CompletionContainer.init().emptyTransition();
}
if (!this.condition) {
return CompletionContainer.init().operandTransition();
}
if (!this.condition.$range.startsAfter(position)) {
const container: CompletionContainer = this.condition.getCompletionContainer(
position
);
if (this.condition.isComplete() && this.errorNode == null) {
container.thenKeywordTransition();
}
return container;
} else if (this.condition.$constrained) {
return this.condition.getCompletionContainer(position);
}
return CompletionContainer.init().emptyTransition();
}
public getBeautifiedContent(aliasesHelper: AliasHelper): string {
const ruleString: string = this.$lines.join("\n");
if (!this.condition) {
return ruleString;
}
const splittedRule: string[] = ruleString.split(
this.condition.$lines.join("\n")
);
let returnString: string = "";
if (!String.IsNullOrWhiteSpace(splittedRule[0])) {
returnString +=
FormattingHelper.removeDuplicateWhitespaceFromLine(splittedRule[0]) +
" ";
}
const conditionString: string = this.condition.getBeautifiedContent(
aliasesHelper
);
returnString += conditionString;
if (!String.IsNullOrWhiteSpace(returnString)) {
returnString += "\n";
}
if (!String.IsNullOrWhiteSpace(splittedRule[1])) {
returnString += FormattingHelper.removeDuplicateWhitespaceFromLine(
splittedRule[1]
);
}
if (this.condition.$constrained) {
return returnString;
}
// keywords inside a not constrained rule should be right-justified
const relevantKeys: AliasKey[] = [
AliasKey.AND,
AliasKey.OR,
AliasKey.THEN,
AliasKey.IF
];
const relevantKeywords: string[] = aliasesHelper.getKeywordsByAliasKeys(
...relevantKeys
);
const thenKeyword: string | null = aliasesHelper.getKeywordByAliasKey(
AliasKey.THEN
);
const highestLength: number = Math.max.apply(
null,
relevantKeywords.map(word => word.length)
);
const splittedLines = returnString.split("\n");
let isErrorMessage: boolean = false;
returnString = "";
for (const line of splittedLines) {
let spaceLength: number = highestLength + 1;
const startingKeyword = relevantKeywords.filter(key =>
line.toLowerCase().startsWith(key.toLowerCase())
);
if (startingKeyword.length > 0 && !isErrorMessage) {
if (startingKeyword[0] === thenKeyword) {
isErrorMessage = true;
}
spaceLength = highestLength - startingKeyword[0].length;
}
returnString +=
FormattingHelper.generateSpaces(spaceLength) + line.trim() + "\n";
}
return returnString;
}
}
|
Yogi Adityanath said Rahul doesn't even know how to sit in a temple.
Yogi Adityanath, the Chief Minister of Uttar Pradesh, has derided the temple visits during Rahul Gandhi's campaign in Gujarat, claiming that the Congress leader was told off at a temple about "sitting as if he's doing Namaaz".The saffron-robed priest-politician was replying to a question at an event hosted by a private news channel in Lucknow when he made the comment."Rahul Gandhi is going to different temples in Gujarat. I am very happy unki buddhi shuddh ho rahi hai (his mind is cleansing)' .... I feel like laughing and also feel sad that 'us bechare ko yeh bhi nahin maloom ki mandir main kaise baitha jaata hai (poor thing, he doesn't even know how to sit in a temple)'. When he had gone to the Kashi Vishwanath temple, he was sitting as if he was about to offer namaaz. The priest had to tell him - this is a temple not a mosque," Yogi Adityanath said.Gujarat votes on December 9 and 14 and the results will be declared on December 18.Yogi Adityanath sought to remind the Congress leader that his party's UPA government at the centre had told the Supreme Court that Hindu gods Ram and Krishna were imaginary. "If they are imaginary, then what is Rahul Gandhi doing visiting so many temples?" he questioned.Before he was chosen by the ruling BJP to lead its government in Uttar Pradesh after a huge election win, Yogi Adityanath was the Lok Sabha parliamentarian from Gorakhpur in eastern Uttar Pradesh, a town where he is also head priest at the famous Gorakhnath temple.In May, just over a month after Adityanath took charge, his government told a court that he cannot be prosecuted in a riots case dating back to 2007, in which he has been accused of making an inflammatory speech. The state home department had denied permission to prosecute the Chief Minister, the court was told.Yogi Adityanath is accused of making a provocative speech on January 27, 2007, and inciting riots in Gorakhpur. Since his elevation to the top post in UP, Yogi Adityanath has emerged as a star campaigner for his party and is widely seen as its Hindutva mascot. |
def allocate(filename: str, size: int) -> str:
if '/' in filename:
raise ValueError(
f"allocate(): Argument must NOT contain path! ('{filename}')"
)
else:
filepath = os.path.join(DOWNLOAD_DIR, filename)
Terminate on filename conflict
if exists(filepath):
raise ValueError(
f"File '{filepath}' already exists!"
)
Create empty target file
1. Create new file
2. seek to size-1 byte position
3. write 1 byte
NOTE: Will NOT work as expected in some systems/filesystems
Linux + ReiserFS will NOT allocate all the space,
although it APPEARS that the file is as big as it should.
try:
with open(filepath, "wb") as f:
f.seek(size - 1)
f.write(b"\0")
log.debug(f"allocate(): {filepath} {size} bytes")
except Exception as e:
If the file was created, remove it
try:
os.remove(filepath)
except:
pass
log.error(f"allocate(): Creating '{filepath}' failed!")
raise e
return filepath |
<filename>src/framework/handler/get-framework-detail-handler.spec.ts<gh_stars>1-10
import {CachedItemStore} from '../../key-value-store';
import {FileService} from '../../util/file/def/file-service';
import {ApiService} from '../../api';
import {of} from 'rxjs';
import {Channel, Framework, FrameworkDetailsRequest, FrameworkService, FrameworkServiceConfig} from '..';
import { GetFrameworkDetailsHandler } from './get-framework-detail-handler';
describe('GetFrameworkDetailsHandler', () => {
let getFrameworkDetailsHandler: GetFrameworkDetailsHandler;
const mockframeworkService: Partial<FrameworkService> = {};
const mockApiService: Partial<ApiService> = {};
const mockFileService: Partial<FileService> = {};
const mockCacheItemStore: Partial<CachedItemStore> = {};
const mockFrameworkServiceConfig: Partial<FrameworkServiceConfig> = {};
beforeAll(() => {
getFrameworkDetailsHandler = new GetFrameworkDetailsHandler(
mockframeworkService as FrameworkService,
mockApiService as ApiService,
mockFrameworkServiceConfig as FrameworkServiceConfig,
mockFileService as FileService,
mockCacheItemStore as CachedItemStore
);
});
beforeEach(() => {
jest.clearAllMocks();
});
it('should return an instance of GetFrameworkDetailsHandler', () => {
expect(getFrameworkDetailsHandler).toBeTruthy();
});
it('should run handle function from the getFrameworkDetailsHandler including fetchFromServer', () => {
// arrange
const request: FrameworkDetailsRequest = {
frameworkId: 'SOME_FRAMEWORK_ID',
requiredCategories: []
};
const framework: Framework = {
name: 'SOME_NAME',
identifier: 'SOME_IDENTIFIER'
};
const GET_FRAMEWORK_DETAILS_ENDPOINT = '/read';
mockApiService.fetch = jest.fn().mockImplementation(() => of({ body: {result: framework}}));
mockCacheItemStore.getCached = jest.fn().mockImplementation((a, b, c, d, e) => d());
mockFileService.readFileFromAssets = jest.fn().mockImplementation(() => []);
spyOn(mockApiService, 'fetch').and.returnValue(of({
body: {
result: {
response: 'SAMPLE_RESPONSE'
}
}
}));
// act
getFrameworkDetailsHandler.handle(request).subscribe(() => {
// assert
expect(request.frameworkId).toBe('SOME_FRAMEWORK_ID');
expect(mockCacheItemStore.getCached).toHaveBeenCalled();
expect(mockApiService.fetch).toHaveBeenCalledWith();
});
});
it('should run handle function from the getFrameworkDetailsHandler using fetchFromFile', () => {
// arrange
const request: FrameworkDetailsRequest = {
frameworkId: 'SOME_FRAMEWORK_ID',
requiredCategories: []
};
const framework: Framework = {
name: 'SOME_NAME',
identifier: 'SOME_IDENTIFIER'
};
const GET_FRAMEWORK_DETAILS_ENDPOINT = '/read';
mockApiService.fetch = jest.fn().mockImplementation(() => of(''));
mockCacheItemStore.getCached = jest.fn().mockImplementation((a, b, c, d, e) => e());
mockFileService.readFileFromAssets = jest.fn().mockImplementation(() => []);
// act
getFrameworkDetailsHandler.handle(request).subscribe(() => {
// assert
expect(request.frameworkId).toBe('SOME_FRAMEWORK_ID');
expect(mockCacheItemStore.getCached).toHaveBeenCalled();
expect(mockFileService.readFileFromAssets).toHaveBeenCalled();
});
});
});
|
/**
* Created by raffaelemontella on 12/02/2017.
*/
public class Pgn01F801Parser extends PgnParser implements Pgn01F801 {
public static final String LOG_TAG="PGN01F801";
private Double latitude;
private Double longitude;
/*
private boolean startLat = true;
private boolean startLon = true;
double previousLat = 0;
double previousLon = 0;
static final double ALPHA = 1 - 1.0 / 6;
*/
public static Pgn01F801Parser newParser() {
return new Pgn01F801Parser("$PCDIN,01F801,000C8286,00,A2C9190A2C81B603*7A");
}
public Pgn01F801Parser(String sentence) {
super(sentence);
}
@Override
public void encode() {
}
@Override
public void decode() {
// Degrees = (X HB * 65536 + X LB ) * .0000001
double latitude=get4ByteInt()* .0000001;
double longitude=get4ByteInt()* .0000001;
if (latitude>=-90 && latitude<=90 && longitude>=-180 && longitude<=180) {
this.latitude=latitude;
this.longitude=longitude;
}
}
@Override
public void parse(SignalKModel signalKModel, String src) {
Log.d(LOG_TAG,"latitude:"+latitude+" longitude:"+longitude);
if (longitude!=null && latitude!=null) {
signalKModel.putPosition(SignalKConstants.vessels_dot_self_dot + SignalKConstants.nav_position, latitude, longitude, 0.0, src, now);
}
/*
if (startLat) {
previousLat = latitude;
startLat = false;
}
previousLat = Util.movingAverage(ALPHA, previousLat, latitude);
if (startLon) {
previousLon = longitude;
startLon = false;
}
previousLon = Util.movingAverage(ALPHA, previousLon, longitude);
signalKModel.putPosition(SignalKConstants.vessels_dot_self_dot + SignalKConstants.nav_position, previousLat, previousLon, 0.0, src, now);
*/
}
@Override
public ArrayList<Sentence> asSentences() {
return null;
}
@Override
public double getLatitude() {
return latitude;
}
@Override
public double getLongitude() {
return longitude;
}
} |
t=int(input())
ss="codeforces"
s=""
for x in range(2,60):
for i in range(0,10):
ans=pow(x,i)*pow((x-1),(10-i))
if(ans>=t):
for j in range(0,i):
ch=ss[j]
for a in range(0,x):
s=s+ch
for j in range(i,10):
ch=ss[j]
for a in range(0,x-1):
s=s+ch
break
if(s!=""):
break
print(s) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.