content
stringlengths 10
4.9M
|
---|
//------------------------------------------------------------------------------
// Function Name: siMhlTx_AudioSet()
// Function Description: Set the 9022/4 audio interface to basic audio.
//
// Accepts: none
// Returns: Success message if audio changed successfully.
// Error Code if resolution change failed
// Globals: mhlTxAv
//------------------------------------------------------------------------------
uint8_t siMhlTx_AudioSet (void)
{
TX_DEBUG_PRINT(("[MHL]: >>siMhlTx_AudioSet()\n"));
SetAudioMode(AVmode.audio_mode);
SetACRNValue();
return 0;
} |
def calculate_pressure_drop(self, density, flow, viscosity, length):
velocity = self.calculate_velocity(flow)
return self.calculate_friction_factor(velocity, viscosity) * (length / self.diameter) * (
density * (velocity ** 2) / 2) |
package main
import (
"fmt"
"strings"
"math/rand"
"crypto/md5"
"path"
"time"
jwt "jangled/sjwt"
"jangled/util"
"github.com/bwmarrin/snowflake"
"github.com/globalsign/mgo/bson"
"github.com/valyala/fasthttp"
)
// User flags
const (
USER_FLAG_NONE = 0
USER_FLAG_STAFF = 1 << 0
USER_FLAG_PARTNER = 1 << 1
USER_FLAG_EARLYADOPTER = 1 << 24
// The rest are unused
)
// User premium subscription type
const (
USER_PREMIUM_NONE = 0
// USER_PREMIUM_NITRO_CLASSIC = 1
USER_PREMIUM_BOLT = 2
)
// UserSettings is a Discord-compatible structure containing a user's settings
// This struct is safe to be returned by an API call
type UserSettings struct {
Locale string `json:"locale"`
AfkTimeout int `json:"afk_timeout"`
Theme string `json:"theme"`
Status string `json:"status"`
// TODO: the rest
}
// User is a Discord-compatible structure containing information on a user
// This struct is not safe to be returned by an API call
type User struct {
ID snowflake.ID `bson:"_id"`
Username string `bson:"username"`
Discriminator string `bson:"discriminator"`
Email string `bson:"email,omitempty"`
Bot bool `bson:"bot"`
Avatar string `bson:"avatar"`
MfaEnabled bool `bson:"mfa_enabled"`
Verified bool `bson:"verified"`
Flags int `bson:"flags"`
PremiumType int `bson:"premium_type"`
PremiumSince int `bson:"premium_since"`
Phone string `bson:"phone"`
LastSession int `bson:"last_session"`
PasswordHash string `bson:"password_hash"`
Settings *UserSettings `bson:"user_settings"`
Presence *gwPktDataUpdateStatus `bson:"presence"`
LastMessageIDs map[snowflake.ID]snowflake.ID `bson:"read_last_message_ids"`
JwtSecret string `bson:"jwt_secret"`
}
// CreateUser creates a user
func CreateUser(username, email, password string) (*User, error) {
c := DB.Core.C("users")
usr := &User{
ID: flake.Generate(),
Username: username,
Email: email,
PasswordHash: util.CryptPass(password),
Settings: &UserSettings{
Locale: "en-US",
},
}
dint := 1 + rand.Intn(9998)
var err error
for tries := 0; tries < 100; tries++ {
usr.Discriminator = fmt.Sprintf("%04d", dint)
err = c.Insert(usr)
if err == nil {
break
}
}
if err != nil {
return nil, err
}
return usr, nil
}
// GetUserByID returns a user by their unique ID
func GetUserByID(ID snowflake.ID) (u *User, e error) {
var u2 User
c := DB.Core.C("users")
e = c.Find(bson.M{"_id": ID}).One(&u2)
u2.ID = ID
u = &u2
return
}
func GetUserByEmail(email string) (*User, error) {
var u2 User
c := DB.Core.C("users")
err := c.Find(bson.M{"email": email}).One(&u2)
if err != nil {
return nil, err
}
return &u2, nil
}
// GetUserByToken returns a user using an authentication token
func GetUserByToken(token string) (*User, error) {
if *flgStaging {
// TODO: snowflake.ParseString
var i snowflake.ID
n, err := fmt.Sscanf(token, "%d", &i)
if n != 1 {
return nil, fmt.Errorf("Bad ID")
}
if err != nil {
return nil, err
}
return GetUserByID(i)
} else {
claims, err := jwt.Parse(token)
if err != nil {
return nil, err
}
if err := claims.Validate(); err != nil {
return nil, err
}
subj, err := claims.GetSubject()
if err != nil {
return nil, err
}
uid, err := snowflake.ParseString(subj)
if err != nil {
return nil, err
}
user, err := GetUserByID(uid)
if err != nil {
return nil, err
}
if !jwt.Verify(token, user.GetTokenSecret()) {
return nil, fmt.Errorf("Invalid token")
}
return user, nil
}
return nil, fmt.Errorf("Not implemented")
}
// GetUserByHttpRequest returns a user using a fasthttp.RequestCtx.
// Specifically, it attempts to authorize the request using a token.
func GetUserByHttpRequest(c *fasthttp.RequestCtx, ctxvar string) (*User, error) {
b := c.Request.Header.Peek("Authorization")
if b == nil {
return nil, fmt.Errorf("No authorization token supplied")
}
a := string(b)
a = strings.Replace(a, "Bot ", "", -1)
a = strings.Replace(a, "Bearer ", "", -1)
user, err := GetUserByToken(a)
if err != nil {
return nil, err
}
if ctxvar != "" {
uid2 := c.UserValue(ctxvar).(string) // You're going to pass a string, or I'll panic()
if uid2 == "" || uid2 == "@me" {
return user, nil
}
snow, err := snowflake.ParseString(uid2)
if err != nil {
return nil, err
}
user, err = GetUserByID(snow)
if err != nil {
return nil, err
}
}
return user, nil
}
func (u *User) GetTokenSecret() []byte {
return []byte(u.PasswordHash + u.JwtSecret)
}
func (u *User) IssueToken(duration time.Duration) string {
c := jwt.New()
c.SetSubject(u.ID.String())
c.Set("tag", u.Username+"#"+u.Discriminator)
c.SetIssuedAt(time.Now())
c.SetExpiresAt(time.Now().Add(duration))
c.SetTokenID()
return c.Generate(u.GetTokenSecret())
}
// ToAPI returns a version of the User struct that can be returned by API calls
func (u *User) ToAPI(safe bool) *APITypeUser {
u2 := &APITypeUser{
ID: u.ID,
Username: u.Username,
Discriminator: u.Discriminator,
AvatarHash: u.Avatar,
Bot: u.Bot,
MfaEnabled: true,
Flags: u.Flags,
PremiumType: u.PremiumType,
}
if u.Settings != nil {
u2.Locale = u.Settings.Locale
}
if u.PremiumType != USER_PREMIUM_NONE {
u2.Premium = true
}
if !safe {
if u.Phone != "" {
u2.Phone = &u.Phone
u2.Mobile = true
}
u2.Email = u.Email
u2.Verified = &u.Verified
}
return u2
}
func (u *User) StartTyping(c *Channel) error {
return StartTypingForUser(u.ID, &gwEvtDataTypingStart{
ChannelID: c.ID,
GuildID: c.GuildID,
UserID: u.ID,
Timestamp: time.Now().Unix(),
})
}
func (u *User) MarkRead(cid, mid snowflake.ID) {
if u.LastMessageIDs != nil {
u.LastMessageIDs = map[snowflake.ID]snowflake.ID{}
}
u.LastMessageIDs[cid] = mid
}
// DMChannels returns the DM channels a user is in
func (u *User) Channels() ([]*Channel, error) {
ch := []*Channel{}
err := DB.Core.C("channels").Find(bson.M{"recipient_ids": u.ID}).All(&ch)
if err != nil {
return nil, err
}
return ch, nil
}
func (u *User) Save() error {
c := DB.Core.C("users")
_, err := c.UpsertId(u.ID, u)
return err
}
func (u *User) SetTag(username, discriminator string) error {
c := DB.Core.C("users")
if discriminator == "" {
dint := 1 + rand.Intn(9998)
var err error
for tries := 0; tries < 100; tries++ {
dscm := fmt.Sprintf("%04d", dint)
err = c.UpdateId(u.ID, bson.M{"$set":bson.M{"username":username,"discriminator":dscm}})
if err == nil {
u.Discriminator = dscm
u.Username = username
return nil
}
}
return err
} else {
err := c.UpdateId(u.ID, bson.M{"$set":bson.M{"username":username,"discriminator":discriminator}})
if err == nil {
u.Username = username
u.Discriminator = discriminator
}
return err
}
}
func (u *User) SetAvatar(dataURL string) error {
c := DB.Core.C("users")
imgFp := fmt.Sprintf("%x", md5.Sum([]byte(dataURL)))
fullpath, err := ImageDataURLUpload(gFileStore, "/avatars/" + u.ID.String() + "/" + imgFp + ".png", dataURL, ImageUploadOptions{MaxWidth: 2048, MaxHeight: 2048, ForcePNG: true})
if err != nil { return err }
bp := path.Base(fullpath)
u.Avatar = strings.TrimRight(bp, path.Ext(bp))
c.UpdateId(u.ID, bson.M{"$set":bson.M{"avatar": bp}})
return nil
}
func (u *User) Guilds() ([]*Guild, error) {
return GetGuildsByUserID(u.ID)
}
/*
The following code is for testing.
It is not used in production.
*/
// Initialize dummy users in database
func InitUserStaging() {
c := DB.Core.C("users")
c.Insert(&User{
ID: 42,
Username: "test1",
Discriminator: "1234",
Email: "<EMAIL>",
PasswordHash: util.CryptPass("<PASSWORD>"),
Flags: USER_FLAG_STAFF | USER_FLAG_EARLYADOPTER,
Settings: &UserSettings{
Locale: "en-US",
},
})
c.Insert(&User{
ID: 43,
Username: "hello",
Discriminator: "4242",
Email: "<EMAIL>",
PasswordHash: util.CryptPass("<PASSWORD>"),
Flags: USER_FLAG_EARLYADOPTER,
Settings: &UserSettings{
Locale: "en-US",
},
})
}
|
def plot_df_feature_dists(model_dir, df1, df2, df1_name, df2_name, feature, class_labels):
df1 = relabel_df(df1, class_labels)
df2 = relabel_df(df2, class_labels)
f, ax = plt.subplots(figsize=(FIG_WIDTH, FIG_HEIGHT), dpi=DPI)
min_value = min(df1[feature].min(), df2[feature].min())
max_value = max(df1[feature].max(), df2[feature].max())
x = np.linspace(min_value, max_value, 100)
bins = np.linspace(0, max_value, 100)
colors = plt.get_cmap('tab20').colors
color_index = 0
for class_name in class_labels:
df1_vals = df1[df1[TARGET_LABEL] == class_name][feature].values
mean1, std1 = norm.fit(df1_vals)
df1_y = norm.pdf(x, mean1, std1)
df2_vals = df2[df2[TARGET_LABEL] == class_name][feature].values
mean2, std2 = norm.fit(df2_vals)
df2_y = norm.pdf(x, mean2, std2)
plt.plot(x, df1_y, color=colors[color_index], label=class_name + " " + df1_name)
plt.plot(x, df2_y, color=colors[color_index + 1],
label=class_name + " " + df2_name)
color_index += 2
plt.xlabel(feature.capitalize(), fontsize=LAB_S)
plt.ylabel("Normalized density", fontsize=LAB_S)
plt.legend() |
University of Minnesota student Amir-Pouyan Shiva got the letter just before New Year's: TCF Bank would be closing the account he and his wife had maintained for five years.
"This letter is to notify you that TCF is exercising its right under the terms of your account contract to discontinue our banking relationship," the Dec. 26 letter begins.
Shiva, a Ph.D. student in anthropology, asked around and quickly realized he was not alone. About a dozen students have reported receiving the letter, according to one U official. Shiva counts more. All of them have one thing in common: They're from Iran.
"We're just ordinary students," said Shiva, who is here on a student visa. "It's not fair."
The university's agreement with TCF Financial Corp. grants the Wayzata-based company the exclusive right to offer checking accounts linked to the university's photo ID cards. Nearly 30,700 students and employees have signed up, worth about $1 million a year in royalties, which the university puts toward student programming and scholarships.
A TCF Bank spokesman said the letters -- sent to other customers and "not just foreign students" -- were triggered by its investigations into transactions that might have violated federal sanctions. They're part of its regular monitoring, required by law, of more than a million checking accounts, said spokesman Jason Korstange.
He wouldn't elaborate on how the transactions allegedly violated sanctions. The U.S. government has a long list of rules and procedures governing transactions that involve Iran as part of its sanctions over the country's alleged pursuit of a nuclear weapons program and state sponsorship of terrorism.
The bank has encouraged the students to call a number or visit a branch. "If indeed the transactions can be explained," Korstange said, "then we'll keep the account open." Shiva, for one, visited a bank on Monday to see if he could work things out.
'A concern'
The university was surprised by word of the letters. "Given our relationship with TCF Bank, we would have expected that TCF representatives would have communicated with us ... prior to students receiving the letter," said university spokeswoman Patty Mattern. "That's a concern we've shared with them."
Shiva and other students approached Prof. William Beeman, who found the bank's explanation "implausible."
"The students have compared notes," he said. "Many of the students have had no money transfers at all. They have not had their accounts overdrawn. Their banking record is spotless.
"The irregularity might be that they're Iranian."
But Korstange said that the bank has "absolutely not" targeted international students and that "there were plenty of Iranian students at the university who did not get this letter." The bank must constantly remain vigilant, he said, responding to a long list of federal requirements.
A spokesman with the U.S. Treasury Department declined to comment on the specifics without knowing more about the accounts. Generally, "we expect banks to practice due diligence in order to adhere to the appropriate laws," said spokesman John Sullivan. "That being said, the focus of our sanctions program is on the Iranian government and its illicit activities, not students who are legally studying in the U.S."
'Challenging situation'
Korstange acknowledged that the letter "was probably not perfectly written" and should have included a process to contest the account's closure.
Barbara Kappler, assistant dean of international student and scholar services, said she met with nine students last week, to discuss options, including opening accounts with other banks.
"I'm concerned for the students," Kappler said. "They have found this to be a challenging situation, understandably."
The university's Twin Cities campus has 67 students from Iran. Most are working on Ph.D.s, Mattern said.
The National Iranian American Council has increasingly heard reports from Iranians and Iranian-Americans who've had their bank accounts closed or were blocked from opening them, said policy director Jamal Abdi. But "never on such a systematic level," he said.
The nonprofit council, based in Washington D.C., opposes broad sanctions "that punish ordinary people," according to its website, in favor of more targeted ones.
"The pattern we're seeing is private companies judging that it's not in their interest to do any business that is any way related to Iran," Abdi said. "This over-enforcement or mis-enforcement of the sanctions is actually leading to discrimination."
Jenna Ross • 612-673-7168 Twitter: @ByJenna |
// ListRatings returns a string of all Records sorted by rating in descending
// order. Records with the same rating are sorted by title in ascending order.
func (l *Library) ListRatings() string {
if len(l.byTitle) == 0 {
return msgLibraryEmpty
}
records := l.sortedRecords()
sort.SliceStable(records, func(i, j int) bool {
return records[i].rating > records[j].rating
})
return SprintRecords(records)
} |
def match(self, regex):
return regex.match(self.text) |
def forward(
self,
x: torch.Tensor,
M_W: torch.Tensor = None,
M_b: torch.Tensor = None,
U: torch.Tensor = None,
V: torch.Tensor = None,
B: torch.Tensor = None,
n_samples: int = 100,
delta: float = 1.0,
apply_softmax: bool = True
):
if M_W is None:
M_W = self.M_W
if M_b is None:
M_b = self.M_b
if U is None:
U = self.U
if V is None:
V = self.V
if B is None:
B = self.B
py = []
phi_container = {}
def phi_hook_fn(module, input, output):
phi_container[0] = input[0]
handle = self.last_layer.register_forward_hook(phi_hook_fn)
self.model.forward(x)
handle.remove()
phi = phi_container[0]
mu_pred = phi @ M_W + M_b
Cov_pred = torch.diag(phi @ U @ phi.t()).view(-1, 1, 1) * V.unsqueeze(0) + B.unsqueeze(0)
scale_tril = torch.cholesky(Cov_pred)
post_pred = MultivariateNormal(mu_pred, scale_tril=scale_tril)
py_ = 0
for _ in range(n_samples):
f_s = post_pred.rsample()
py_ += torch.softmax(f_s, 1) if apply_softmax else f_s
py_ /= n_samples
py.append(py_)
return torch.cat(py, dim=0) |
<reponame>maximilianharr/code_snippets
static int __svgalib_rendition_inmisc(void)
{
return 0;
}
static void __svgalib_rendition_outmisc(int i)
{
}
static int __svgalib_rendition_incrtc(int i)
{
return 0;
}
static void __svgalib_rendition_outcrtc(int i, int d)
{
}
static int __svgalib_rendition_inseq(int index)
{
return 0;
}
static void __svgalib_rendition_outseq(int index, int val)
{
}
static int __svgalib_rendition_ingra(int index)
{
return 0;
}
static void __svgalib_rendition_outgra(int index, int val)
{
}
static int __svgalib_rendition_inis1(void)
{
return 0;
}
static int __svgalib_rendition_inatt(int index)
{
return 0;
}
static void __svgalib_rendition_outatt(int index, int val)
{
}
static void __svgalib_rendition_attscreen(int i)
{
}
static void __svgalib_rendition_inpal(int i, int *r, int *g, int *b)
{
OUT(DACRAMREADADR, i );
*r=IN(DACRAMDATA);
*g=IN(DACRAMDATA);
*b=IN(DACRAMDATA);
}
static void __svgalib_rendition_outpal(int i, int r, int g, int b)
{
OUT(DACRAMWRITEADR, i );
OUT(DACRAMDATA, r );
OUT(DACRAMDATA, g );
OUT(DACRAMDATA, b );
}
static void rendition_mapio(void)
{
if(__svgalib_secondary) {
__svgalib_inmisc=__svgalib_rendition_inmisc;
__svgalib_outmisc=__svgalib_rendition_outmisc;
__svgalib_incrtc=__svgalib_rendition_incrtc;
__svgalib_outcrtc=__svgalib_rendition_outcrtc;
__svgalib_inseq=__svgalib_rendition_inseq;
__svgalib_outseq=__svgalib_rendition_outseq;
__svgalib_ingra=__svgalib_rendition_ingra;
__svgalib_outgra=__svgalib_rendition_outgra;
__svgalib_inatt=__svgalib_rendition_inatt;
__svgalib_outatt=__svgalib_rendition_outatt;
__svgalib_attscreen=__svgalib_rendition_attscreen;
__svgalib_inis1=__svgalib_rendition_inis1;
}
__svgalib_inpal=__svgalib_rendition_inpal;
__svgalib_outpal=__svgalib_rendition_outpal;
}
|
#pragma once
#include <util/random/fast.h>
#include <util/ysaveload.h>
#include <util/generic/vector.h>
struct TRestorableFastRng64 : public TCommonRNG<ui64, TRestorableFastRng64> {
template <typename T>
TRestorableFastRng64(T&& seedSource)
: SeedArgs(std::forward<T>(seedSource))
, Rng(SeedArgs)
{
}
inline void Save(IOutputStream* s) const {
::SaveMany(
s,
SeedArgs.Seed1,
SeedArgs.Seed2,
SeedArgs.Seq1,
SeedArgs.Seq2,
CallCount);
}
inline void Load(IInputStream* s) {
::LoadMany(
s,
SeedArgs.Seed1,
SeedArgs.Seed2,
SeedArgs.Seq1,
SeedArgs.Seq2,
CallCount);
new (&Rng) TFastRng64(SeedArgs);
if (CallCount > 0) {
Rng.Advance(CallCount);
}
}
inline ui64 GenRand() noexcept {
++CallCount;
return Rng.GenRand();
}
inline void Advance(ui64 delta) noexcept {
CallCount += delta;
Rng.Advance(delta);
}
ui64 GetCallCount() const {
return CallCount;
}
private:
TFastRng64::TArgs SeedArgs;
TFastRng64 Rng;
ui64 CallCount = 0;
};
TVector<ui64> GenRandUI64Vector(int size, ui64 randomSeed);
|
A Tibetan man in his mid thirties named as Sangye Khar set fire to himself and died today outside a police station in Amchok, Sangchu (the Tibetan area of Amdo), according to Tibetan sources in exile. His body was taken away by paramilitary police despite protests from Tibetans, and the situation in the area is tense, according to the same sources. Sangye Khar is the 134th Tibetan to set fire to himself in Tibet since 2009, and chose the day of a major religious anniversary marked by prayer ceremonies across Tibet.
Large deployments of armed troops with riot shields were on the streets of central Lhasa last night and today (December 16), as Tibetans gathered to mark a major Tibetan religious festival, the anniversary of the death of Tsongkhapa, founder of the Gelugpa (Yellow Hat) school of Tibetan Buddhism. Images show crowds of pilgrims by the holy Jokhang temple in a powerful display of devotion, and troops in camouflage gear with riot shields and firemen in red uniforms with fire extinguishers at the scene.
The religious festival falls on the 25th of the 10th Tibetan month (coinciding with December 16 this year) and is traditionally marked by lighting of butter-lamps. Both the Dalai Lama and the Panchen Lama belong to the Gelugpa (Yellow Hat) order of Tibetan Buddhism, founded by teacher and scholar Tsongkhapa (1357-1419). The festival is observed in the Jokhang temple in Lhasa, one of Tibet’s holiest sites, as the images from today show.
The mass gathering of both pilgrims and troops follow earlier major demonstrations of armed force at the time of peaceful religious festivals in Tibet over the past two years. In 2012, similar images emerged from Lhasa at the time of the festival of troops and firemen gathered outside the Jokhang temple, including troops in black uniforms, with some in camouflage uniform.
In 2013, massed ranks of armed troops confronted pilgrims attending peaceful gatherings in major monasteries in eastern Tibet. Vivid images disseminated on social media showed higher numbers of Tibetans than usual gathering to pray at major monasteries – Labrang and Kumbum – during the Monlam (prayer festival), also associated with Tsongkhapa, as troops stand guard or encircle the pilgrims. (Images at: Thousands of Tibetan pilgrims face troops at religious ceremonies in eastern Tibet). The numbers of pilgrims who continue to gather to pray despite such an intimidating paramilitary presence are compelling testimony to Tibetan resilience and determination to assert their religious identity.
Self-immolation of Sangye Khar
Sangye Khar’s self-immolation today in Amchok, Sangchu (Chinese: Xiahe) county, Kanlho (Chinese: Gannan) Tibetan Autonomous Prefecture, Gansu province, follows the self-immolations of three Tibetans, two of them outside monasteries, on the same Tsongkhapa anniversary in December, 2012. Sangye Khar is believed to be from a semi-nomadic area in Kanlho, Gansu. In 2012, on the same Tsongkhapa anniversary, 23-year old farmer Pema Dorjee, set fire to himself and died close to the main assembly hall of Shitsang Garser monastery in Luchu on the day of the Tsongkhapa commemoration in 2012. (ICT report, Three Tibetans self-immolate in two days during important Buddhist anniversary: images of troops in Lhasa as Tibetans pray ).
Details of the self-immolation were sketchy due to security restrictions in the area. The last two self-immolations in Tibet also occurred outside police stations. Tibetan student Lhamo Tashi set fire to himself and died on September 17 (2014) outside a government Public Security Bureau headquarters in Tsoe City, northeastern Tibet, where he was studying. The day before, on September 16, a 42-year-old Tibetan, Kunchok, set himself on fire outside a police station in the Golog (Chinese: Guoluo) Tibetan Autonomous Prefecture in Qinghai Province. |
/**
* Creates a new {@link Header} that reads from {@link XMLStreamReader}.
*
* <p>
* Note that the header implementation will read the entire data
* into memory anyway, so this might not be as efficient as you might hope.
*/
public static Header create( SOAPVersion soapVersion, XMLStreamReader reader ) throws XMLStreamException {
switch(soapVersion) {
case SOAP_11:
return new StreamHeader11(reader);
case SOAP_12:
return new StreamHeader12(reader);
default:
throw new AssertionError();
}
} |
#include <stdlib.h>
#include <sys/types.h>
#include <limits.h>
#ifdef HAVE_CATCHABLE_SEGV
# include <signal.h>
#endif
#define TEST_NAME "sodium_utils2"
#include "cmptest.h"
#ifdef __SANITIZE_ADDRESS__
# warning The sodium_utils2 test is expected to fail with address sanitizer
#endif
#undef sodium_malloc
#undef sodium_free
#undef sodium_allocarray
__attribute__((noreturn)) static void
segv_handler(int sig)
{
(void) sig;
printf("Intentional segfault / bus error caught\n");
printf("OK\n");
#ifdef SIG_DFL
# ifdef SIGSEGV
signal(SIGSEGV, SIG_DFL);
# endif
# ifdef SIGBUS
signal(SIGBUS, SIG_DFL);
# endif
# ifdef SIGABRT
signal(SIGABRT, SIG_DFL);
# endif
#endif
exit(0);
}
int
main(void)
{
void *buf;
size_t size;
unsigned int i;
if (sodium_malloc(SIZE_MAX - 1U) != NULL) {
return 1;
}
if (sodium_malloc(0U) == NULL) {
return 1;
}
if (sodium_allocarray(SIZE_MAX / 2U + 1U, SIZE_MAX / 2U) != NULL) {
return 1;
}
sodium_free(sodium_allocarray(0U, 0U));
sodium_free(sodium_allocarray(0U, 1U));
sodium_free(sodium_allocarray(1U, 0U));
buf = sodium_allocarray(1000U, 50U);
memset(buf, 0, 50000U);
sodium_free(buf);
sodium_free(sodium_malloc(0U));
sodium_free(NULL);
for (i = 0U; i < 10000U; i++) {
size = 1U + randombytes_uniform(100000U);
buf = sodium_malloc(size);
assert(buf != NULL);
memset(buf, i, size);
sodium_mprotect_noaccess(buf);
sodium_free(buf);
}
printf("OK\n");
#ifdef SIG_DFL
# ifdef SIGSEGV
signal(SIGSEGV, segv_handler);
# endif
# ifdef SIGBUS
signal(SIGBUS, segv_handler);
# endif
# ifdef SIGABRT
signal(SIGABRT, segv_handler);
# endif
#endif
size = 1U + randombytes_uniform(100000U);
buf = sodium_malloc(size);
assert(buf != NULL);
/* old versions of asan emit a warning because they don't support mlock*() */
#ifndef __SANITIZE_ADDRESS__
sodium_mprotect_readonly(buf);
sodium_mprotect_readwrite(buf);
#endif
#if defined(HAVE_CATCHABLE_SEGV) && !defined(__EMSCRIPTEN__) && !defined(__SANITIZE_ADDRESS__)
sodium_memzero(((unsigned char *) buf) + size, 1U);
sodium_mprotect_noaccess(buf);
sodium_free(buf);
printf("Overflow not caught\n");
#else
segv_handler(0);
#endif
return 0;
}
|
/*
* Given a symbol, returns its element kind for attribute purpose
*/
CorAttributeTargets SYM::getElementKind()
{
switch (kind) {
case SK_METHSYM:
return (asMETHSYM()->isCtor() ? catConstructor : catMethod);
case SK_PROPSYM:
return catProperty;
case SK_MEMBVARSYM:
return catField;
case SK_TYVARSYM:
return catGenericParameter;
case SK_AGGTYPESYM:
ASSERT(!"Bad Symbol type: SK_AGGTYPESYM");
return catField;
case SK_EVENTSYM:
return catEvent;
case SK_AGGSYM:
return asAGGSYM()->getElementKind();
case SK_AGGDECLSYM:
return asAGGDECLSYM()->Agg()->getElementKind();
case SK_GLOBALATTRSYM:
return this->asGLOBALATTRSYM()->elementKind;
default:
ASSERT(!"Bad Symbol type");
}
return (CorAttributeTargets) 0;
} |
export declare class BookModule {
}
|
<gh_stars>0
from typing import List
SHIP = 1
SPACE = 0
def calculate_hit_probability(rows: List[List[int]]) -> float:
flattened = [
column
for row in rows
for column in row]
spaces = len(flattened)
ships = flattened.count(SHIP)
return ships / spaces
# pylint: disable=unused-argument
def getHitProbability(R: int, C: int, G: List[List[int]]) -> float:
return calculate_hit_probability(G)
# pylint: enable=unused-argument
|
from abc import ABC, abstractmethod
from enum import Enum
from typing import Any, Sequence, TYPE_CHECKING, overload
if TYPE_CHECKING:
from .session import Session
from khl.message import Msg
class Command(ABC):
class Types(Enum):
MENU = 'MENU'
APP = 'APP'
trigger: str
help: str
__bot: Any
use_help: bool = True
with_reply: bool = True
with_mention: bool = True
def __init__(self) -> None:
self.name = self.__class__.__name__
@overload
@abstractmethod
async def execute(self, command_str: str, args: Sequence[str],
msg: 'Msg') -> Any:
"""
docstring
"""
raise NotImplementedError
@abstractmethod
async def execute(self, session: 'Session') -> Any:
"""
docstring
"""
raise NotImplementedError
@abstractmethod
async def run_func(self, session: 'Session') -> Any:
"""
docstring
"""
raise NotImplementedError
@abstractmethod
async def func(self, session: 'Session') -> Any:
"""
docstring
"""
raise NotImplementedError
@property
def bot(self):
return self.__bot
def set_bot(self, bot: Any):
self.__bot = bot
|
/**
* Converts and returns the padding as a {@link XYEdges} instance
*
* @param availableWidth
* the available width
* @return the created {@link XYEdges} instances
*/
public XYEdges toEdges(int availableWidth) {
XYEdges edges = new XYEdges();
setEdges(edges, availableWidth);
return edges;
} |
/**
* Returns a {@link Lookup lookup object} with full capabilities to emulate all
* supported bytecode behaviors, including <a href="MethodHandles.Lookup.html#privacc">
* private access</a>, on a target class.
* This method checks that a caller, specified as a {@code Lookup} object, is allowed to
* do <em>deep reflection</em> on the target class. If {@code m1} is the module containing
* the {@link Lookup#lookupClass() lookup class}, and {@code m2} is the module containing
* the target class, then this check ensures that
* <ul>
* <li>{@code m1} {@link Module#canRead reads} {@code m2}.</li>
* <li>{@code m2} {@link Module#isOpen(String,Module) opens} the package containing
* the target class to at least {@code m1}.</li>
* <li>The lookup has the {@link Lookup#MODULE MODULE} lookup mode.</li>
* </ul>
* <p>
* If there is a security manager, its {@code checkPermission} method is called to
* check {@code ReflectPermission("suppressAccessChecks")}.
* @apiNote The {@code MODULE} lookup mode serves to authenticate that the lookup object
* was created by code in the caller module (or derived from a lookup object originally
* created by the caller). A lookup object with the {@code MODULE} lookup mode can be
* shared with trusted parties without giving away {@code PRIVATE} and {@code PACKAGE}
* access to the caller.
* @param targetClass the target class
* @param lookup the caller lookup object
* @return a lookup object for the target class, with private access
* @throws IllegalArgumentException if {@code targetClass} is a primitve type or array class
* @throws NullPointerException if {@code targetClass} or {@code caller} is {@code null}
* @throws IllegalAccessException if the access check specified above fails
* @throws SecurityException if denied by the security manager
* @since 9
* @spec JPMS
* @see Lookup#dropLookupMode
*/
public static Lookup privateLookupIn(Class<?> targetClass, Lookup lookup) throws IllegalAccessException {
SecurityManager sm = System.getSecurityManager();
if (sm != null) sm.checkPermission(ACCESS_PERMISSION);
if (targetClass.isPrimitive())
throw new IllegalArgumentException(targetClass + " is a primitive class");
if (targetClass.isArray())
throw new IllegalArgumentException(targetClass + " is an array class");
Module targetModule = targetClass.getModule();
Module callerModule = lookup.lookupClass().getModule();
if (!callerModule.canRead(targetModule))
throw new IllegalAccessException(callerModule + " does not read " + targetModule);
if (targetModule.isNamed()) {
String pn = targetClass.getPackageName();
assert !pn.isEmpty() : "unnamed package cannot be in named module";
if (!targetModule.isOpen(pn, callerModule))
throw new IllegalAccessException(targetModule + " does not open " + pn + " to " + callerModule);
}
if ((lookup.lookupModes() & Lookup.MODULE) == 0)
throw new IllegalAccessException("lookup does not have MODULE lookup mode");
if (!callerModule.isNamed() && targetModule.isNamed()) {
IllegalAccessLogger logger = IllegalAccessLogger.illegalAccessLogger();
if (logger != null) {
logger.logIfOpenedForIllegalAccess(lookup, targetClass);
}
}
return new Lookup(targetClass);
} |
On August 17th, a new form of marketplace, the Luxury Auction, was added in Black Desert Online KR and the first auction took place shortly after. The Auction House is located in the southwest of Calpheon, inside the Banquet Hall. Extremely rare items that could not be found in BDO before will be put up for the auction and everyone can participate in the auction. If you become the highest bidder, you will get one of the most rare items in BDO.
If you want to join the auction, visit Calpheon and talk to Franco Russo. Russo will have one item available for auction at a time, and the player who bids the highest price will receive the item.
The auction is held for 10 days, and the remaining time for the auction will always be displayed at the top of the Auction House window. When a higher bidder appears before the current auction ends, the auction will be extended for 1 minute. The auction will be extended every time a higher bidder appears, meaning that an auction that involves an extremely popular item could go on endlessly.
The highest bidder’s name will not be revealed to anyone. You will get your money back if you are not the final bidder, but the time it takes to get your money back from the auction house is unknown. Also, changing the server will not change the item on the auction house, as players from different servers will be competing for the same item.
Franco Russo is currently selling a Combat/Skill EXP Transfer Ticket at the auction house. The item will allow you to choose two characters and swap their EXP. For example, if you have a level 55 Warrior and a level 60 Dark Knights and use the EXP Transfer Ticket on them, you will have a level 60 Warrior and a level 55 Dark Knight as a result. When transferring EXP from one character to another, the two characters must not be the same level, must unequip Awakening weapons, and must be over level 50. Also, awakening quests should be done separately. The minimum price for Combat/Skill EXP Transfer Ticket is 2 billion silver and the auction will take place for 10 days starting from August 18th 18:00 (KST).
▲ The Auction House is located in the southwest of Calpheon, inside the Banquet Hall.
▲ The auction is held for 10 days, 1 item at a time. |
/*
* fill_adv_template_from_key will set the advertising data based on the remaining bytes from the advertised key
*/
void fill_adv_template_from_key(char key[28]) {
memcpy(&offline_finding_adv_template[7], &key[6], 22);
offline_finding_adv_template[29] = key[0] >> 6;
} |
//determine the effect this upgrade would have on the max values
void updateMaxWeaponStats(UWORD maxValue)
{
UDWORD currentMaxValue = getMaxWeaponDamage();
if (currentMaxValue < (currentMaxValue + maxValue / 100))
{
currentMaxValue += currentMaxValue * maxValue / 100;
setMaxWeaponDamage(currentMaxValue);
}
} |
public class CreateItem {
ItemStack item;
public CreateItem(String displayName, Material material, int amount, ArrayList<String> lore)
{
item = new ItemStack(material, amount);
ItemMeta meta = item.getItemMeta();
if (displayName != null)
meta.setDisplayName(displayName);
meta.setLore(lore);
item.setItemMeta(meta);
}
public CreateItem(String displayName, int itemID, int itemData, int amount, ArrayList<String> lore)
{
item = new ItemStack(itemID, amount, (byte) itemData);
ItemMeta meta = item.getItemMeta();
if (displayName != null)
meta.setDisplayName(displayName);
meta.setLore(lore);
item.setItemMeta(meta);
}
public CreateItem(String displayName, Material material, int amount)
{
item = new ItemStack(material, amount);
ItemMeta meta = item.getItemMeta();
if (displayName != null)
meta.setDisplayName(displayName);
item.setItemMeta(meta);
}
public CreateItem(String displayName, int itemID, int itemData, int amount)
{
item = new ItemStack(itemID, amount, (byte) itemData);
ItemMeta meta = item.getItemMeta();
if (displayName != null)
meta.setDisplayName(displayName);
item.setItemMeta(meta);
}
public CreateItem(String displayName, PotionType potion, int level, ArrayList<String> lore, boolean splash, boolean hideFlags)
{
Potion createPot = new Potion(potion, level);
createPot.setSplash(splash);
item = createPot.toItemStack(1);
ItemMeta meta = item.getItemMeta();
if (displayName != null)
meta.setDisplayName(displayName);
meta.setLore(lore);
if (hideFlags)
meta.addItemFlags(ItemFlag.HIDE_POTION_EFFECTS);
item.setItemMeta(meta);
}
public CreateItem(String displayName, PotionType potion, int level, boolean splash, boolean hideFlags)
{
Potion createPot = new Potion(potion, level);
createPot.setSplash(splash);
item = createPot.toItemStack(1);
ItemMeta meta = item.getItemMeta();
if (displayName != null)
meta.setDisplayName(displayName);
if (hideFlags)
meta.addItemFlags(ItemFlag.HIDE_POTION_EFFECTS);
item.setItemMeta(meta);
}
public ItemStack getItem()
{
return this.item;
}
}
|
Environmental activist Balbir Singh Seechewal’s model of cleaning water bodies, successfully employed in Punjab, is being adopted by more than 1,600 villages situated on the banks of Ganga for the river’s rejuvenation.
Seechewal shared this information while talking to media on the sidelines of ‘Challenges and Strategies for Management of Water in Rural Areas’, organised by the National Bank for Agriculture and Rural Development (Nabard).
Seechewal said sarpanches of 1,657 villages located near Ganga in Uttarakhand, Bihar, Uttar Pradesh, West Bengal and Jharkhand had seen the effectiveness of the model and were impressed by it. “Under Seechewal model, sewage passes through different wells for removing impurity, and then the water is shifted to a pond for irrigating crops. This model is very simple and we have found that water with total dissolved solids (TDS) of up to 2,000 can be used for irrigation,” he said.
The noted activist is credited with rejuvenation of 160-km-long Kali Bein rivulet, which originates in Hoshiarpur and traverses the Doaba region to join the Sutlej-Beas confluence near Harike wetland. Union minister Uma Bharti reportedly met Seechewal last year, and said the model used for Kali Bein will be replicated for cleaning Ganga.
Read: To clean Yamuna in Delhi, AAP govt to use Seechewal model from Punjab
First Published: Jul 13, 2016 18:51 IST |
<reponame>huynhsontung/monitoror
package delivery
import (
"net/http"
"github.com/labstack/echo/v4"
"github.com/monitoror/monitoror/models"
"github.com/monitoror/monitoror/monitorable/stripe"
stripeModels "github.com/monitoror/monitoror/monitorable/stripe/models"
)
type StripeDelivery struct {
stripeUsecase stripe.Usecase
}
func NewStripeDelivery(u stripe.Usecase) *StripeDelivery {
return &StripeDelivery{u}
}
func (d *StripeDelivery) GetCount(c echo.Context) error {
// Bind / check Params
params := &stripeModels.CountParams{}
err := c.Bind(params)
if err != nil || !params.IsValid() {
return models.QueryParamsError
}
tile, err := d.stripeUsecase.Count(params)
if err != nil {
return err
}
return c.JSON(http.StatusOK, tile)
}
|
// An InstanceKlass is the VM level representation of a Java class.
public class InstanceMirrorKlass extends InstanceKlass {
static {
VM.registerVMInitializedObserver(new Observer() {
public void update(Observable o, Object data) {
initialize(VM.getVM().getTypeDataBase());
}
});
}
private static synchronized void initialize(TypeDataBase db) throws WrongTypeException {
// Just make sure it's there for now
Type type = db.lookupType("InstanceMirrorKlass");
}
public InstanceMirrorKlass(Address addr) {
super(addr);
}
public long getObjectSize(Oop o) {
return java_lang_Class.getOopSize(o) * VM.getVM().getAddressSize();
}
public void iterateNonStaticFields(OopVisitor visitor, Oop obj) {
super.iterateNonStaticFields(visitor, obj);
// Fetch the real klass from the mirror object
Klass klass = java_lang_Class.asKlass(obj);
if (klass instanceof InstanceKlass) {
((InstanceKlass)klass).iterateStaticFields(visitor);
}
}
} |
<filename>components/camel-docker/src/test/java/org/apache/camel/component/docker/headers/CreateContainerCmdHeaderTest.java<gh_stars>1-10
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.docker.headers;
import java.util.Map;
import com.github.dockerjava.api.command.CreateContainerCmd;
import com.github.dockerjava.api.model.Capability;
import com.github.dockerjava.api.model.ExposedPort;
import com.github.dockerjava.api.model.HostConfig;
import com.github.dockerjava.api.model.Volume;
import com.github.dockerjava.api.model.VolumesFrom;
import org.apache.camel.component.docker.DockerConstants;
import org.apache.camel.component.docker.DockerOperation;
import org.junit.Test;
import org.mockito.Matchers;
import org.mockito.Mock;
import org.mockito.Mockito;
/**
* Validates Create Container Request headers are parsed properly
*/
public class CreateContainerCmdHeaderTest extends BaseDockerHeaderTest<CreateContainerCmd> {
@Mock
private CreateContainerCmd mockObject;
@Test
public void createContainerHeaderTest() {
String image = "busybox";
ExposedPort exposedPort = ExposedPort.tcp(22);
boolean tty = true;
String name = "cameldocker";
String workingDir = "/opt";
boolean disableNetwork = false;
String domainName = "apache.org";
String hostname = "dockerjava";
String user = "docker";
boolean stdInOpen = false;
boolean stdInOnce = false;
boolean attachStdErr = true;
boolean attachStdOut = true;
boolean attachStdIn = false;
Long memoryLimit = 2048L;
Long swapMemory = 512L;
Integer cpuShares = 512;
Volume volumes = new Volume("/example");
VolumesFrom volumesFromContainer = new VolumesFrom("/etc");
String env = "FOO=bar";
String cmd = "whoami";
HostConfig hostConfig = new HostConfig();
Capability capAdd = Capability.NET_BROADCAST;
Capability capDrop = Capability.BLOCK_SUSPEND;
String[] entrypoint = new String[]{"sleep", "9999"};
String portSpecs = "80";
String dns = "8.8.8.8";
Map<String, Object> headers = getDefaultParameters();
headers.put(DockerConstants.DOCKER_IMAGE, image);
headers.put(DockerConstants.DOCKER_EXPOSED_PORTS, exposedPort);
headers.put(DockerConstants.DOCKER_TTY, tty);
headers.put(DockerConstants.DOCKER_NAME, name);
headers.put(DockerConstants.DOCKER_WORKING_DIR, workingDir);
headers.put(DockerConstants.DOCKER_DISABLE_NETWORK, disableNetwork);
headers.put(DockerConstants.DOCKER_HOSTNAME, hostname);
headers.put(DockerConstants.DOCKER_USER, user);
headers.put(DockerConstants.DOCKER_STD_IN_OPEN, stdInOpen);
headers.put(DockerConstants.DOCKER_STD_IN_ONCE, stdInOnce);
headers.put(DockerConstants.DOCKER_ATTACH_STD_IN, attachStdIn);
headers.put(DockerConstants.DOCKER_ATTACH_STD_ERR, attachStdErr);
headers.put(DockerConstants.DOCKER_ATTACH_STD_OUT, attachStdOut);
headers.put(DockerConstants.DOCKER_MEMORY_LIMIT, memoryLimit);
headers.put(DockerConstants.DOCKER_MEMORY_SWAP, swapMemory);
headers.put(DockerConstants.DOCKER_CPU_SHARES, cpuShares);
headers.put(DockerConstants.DOCKER_VOLUMES, volumes);
headers.put(DockerConstants.DOCKER_VOLUMES_FROM, volumesFromContainer);
headers.put(DockerConstants.DOCKER_ENV, env);
headers.put(DockerConstants.DOCKER_CMD, cmd);
headers.put(DockerConstants.DOCKER_HOST_CONFIG, hostConfig);
headers.put(DockerConstants.DOCKER_CAP_ADD, capAdd);
headers.put(DockerConstants.DOCKER_CAP_DROP, capDrop);
headers.put(DockerConstants.DOCKER_ENTRYPOINT, entrypoint);
headers.put(DockerConstants.DOCKER_PORT_SPECS, portSpecs);
headers.put(DockerConstants.DOCKER_DNS, dns);
headers.put(DockerConstants.DOCKER_DOMAIN_NAME, domainName);
template.sendBodyAndHeaders("direct:in", "", headers);
Mockito.verify(dockerClient, Mockito.times(1)).createContainerCmd(image);
Mockito.verify(mockObject, Mockito.times(1)).withExposedPorts(Matchers.eq(exposedPort));
Mockito.verify(mockObject, Mockito.times(1)).withTty(Matchers.eq(tty));
Mockito.verify(mockObject, Mockito.times(1)).withName(Matchers.eq(name));
Mockito.verify(mockObject, Mockito.times(1)).withWorkingDir(workingDir);
Mockito.verify(mockObject, Mockito.times(1)).withNetworkDisabled(disableNetwork);
Mockito.verify(mockObject, Mockito.times(1)).withHostName(hostname);
Mockito.verify(mockObject, Mockito.times(1)).withUser(user);
Mockito.verify(mockObject, Mockito.times(1)).withStdinOpen(stdInOpen);
Mockito.verify(mockObject, Mockito.times(1)).withStdInOnce(stdInOnce);
Mockito.verify(mockObject, Mockito.times(1)).withAttachStderr(attachStdErr);
Mockito.verify(mockObject, Mockito.times(1)).withAttachStdin(attachStdIn);
Mockito.verify(mockObject, Mockito.times(1)).withAttachStdout(attachStdOut);
Mockito.verify(mockObject, Mockito.times(1)).withMemoryLimit(memoryLimit);
Mockito.verify(mockObject, Mockito.times(1)).withMemorySwap(swapMemory);
Mockito.verify(mockObject, Mockito.times(1)).withCpuShares(cpuShares);
Mockito.verify(mockObject, Mockito.times(1)).withVolumes(volumes);
Mockito.verify(mockObject, Mockito.times(1)).withVolumesFrom(volumesFromContainer);
Mockito.verify(mockObject, Mockito.times(1)).withEnv(env);
Mockito.verify(mockObject, Mockito.times(1)).withCmd(cmd);
Mockito.verify(mockObject, Mockito.times(1)).withHostConfig(hostConfig);
Mockito.verify(mockObject, Mockito.times(1)).withCapAdd(capAdd);
Mockito.verify(mockObject, Mockito.times(1)).withCapDrop(capDrop);
Mockito.verify(mockObject, Mockito.times(1)).withEntrypoint(entrypoint);
Mockito.verify(mockObject, Mockito.times(1)).withPortSpecs(portSpecs);
Mockito.verify(mockObject, Mockito.times(1)).withDns(dns);
Mockito.verify(mockObject, Mockito.times(1)).withDomainName(domainName);
}
@Override
protected void setupMocks() {
Mockito.when(dockerClient.createContainerCmd(Matchers.anyString())).thenReturn(mockObject);
}
@Override
protected DockerOperation getOperation() {
return DockerOperation.CREATE_CONTAINER;
}
}
|
/**
* @brief Delete entity
*
* @param entity Entity
*/
void deleteEntity(Entity entity) {
if (entity == EntityNull)
return;
deleteAllEntityComponents(entity);
mDeleted.push_back(entity);
this->mNumEntities--;
} |
/**
* Updates only large icon in notification panel when bitmap is decoded
*
* @param bitmap the large icon in the notification panel
*/
private void updateNotificationLargeIcon(Bitmap bitmap)
{
notificationBuilder.setLargeIcon(bitmap);
NotificationManager notificationManager = (NotificationManager) getSystemService(Context
.NOTIFICATION_SERVICE);
notificationManager.notify(NOTIFICATION_ID, notificationBuilder.build());
} |
<reponame>cbbfcd/leva
import { useEffect, useRef } from 'react'
import { debounce } from '../utils'
export function useCanvas2d(
fn: Function
): [React.RefObject<HTMLCanvasElement>, React.RefObject<CanvasRenderingContext2D>] {
const canvas = useRef<HTMLCanvasElement>(null)
const ctx = useRef<CanvasRenderingContext2D | null>(null)
useEffect(() => {
const handleCanvas = debounce(() => {
canvas.current!.width = canvas.current!.offsetWidth * window.devicePixelRatio
canvas.current!.height = canvas.current!.offsetHeight * window.devicePixelRatio
fn(canvas.current, ctx.current)
}, 250)
window.addEventListener('resize', handleCanvas)
handleCanvas()
return () => window.removeEventListener('resize', handleCanvas)
}, [fn])
useEffect(() => {
ctx.current = canvas.current!.getContext('2d')
}, [])
return [canvas, ctx]
}
|
import { Action, ActionCreator, Dispatch } from "redux";
import axios from "axios";
import { Reducer } from "redux";
import {
USER_ADD_JOURNAL_FAIL,
USER_ADD_JOURNAL_REQUEST,
USER_ADD_JOURNAL_SUCCESS,
USER_CREATE_JOURNAL_GROUP_FAIL,
USER_CREATE_JOURNAL_GROUP_REQUEST,
USER_CREATE_JOURNAL_GROUP_SUCCESS,
USER_JOURNAL_DELETE_FAIL,
USER_JOURNAL_DELETE_REQUEST,
USER_JOURNAL_DELETE_SUCCESS,
USER_JOURNAL_FAIL,
USER_JOURNAL_GROUP_DELETE_FAIL,
USER_JOURNAL_GROUP_DELETE_REQUEST,
USER_JOURNAL_GROUP_DELETE_SUCCESS,
USER_JOURNAL_GROUP_DETAILS_EDIT_FAIL,
USER_JOURNAL_GROUP_DETAILS_EDIT_REQUEST,
USER_JOURNAL_GROUP_DETAILS_EDIT_SUCCESS,
USER_JOURNAL_REQUEST,
USER_JOURNAL_SUCCESS,
} from "../Constants/journal.constants";
import { JournalGroup } from "../../Interfaces/Interfaces";
const journalInitialState = { Journals: [] };
// const addPriorityInitialState = { message: "" };
type getJournalAction = {
type: string;
payload: JournalGroup[];
};
// type addPriorityAction = {
// type: string;
// payload: { message: "" };
// };
export const getJournalReducers = (
state = journalInitialState,
action: getJournalAction
) => {
switch (action.type) {
case USER_JOURNAL_REQUEST:
return { loading: true };
case USER_JOURNAL_SUCCESS:
return { loading: false, data: action.payload };
case USER_JOURNAL_FAIL:
return { loading: false, error: action.payload };
default:
return state;
}
};
// export const addPriorityReducers = (
// state = addPriorityInitialState,
// action: addPriorityAction
// ) => {
// switch (action.type) {
// case USER_CREATE_PRIORITY_REQUEST:
// return { loading: true };
// case USER_CREATE_PRIORITY_SUCCESS:
// return { loading: false, data: action.payload };
// case USER_CREATE_PRIORITY_FAIL:
// return { loading: false, error: action.payload };
// default:
// return state;
// }
// };
// export const editPriorityReducers = (
// state = addPriorityInitialState,
// action: addPriorityAction
// ) => {
// switch (action.type) {
// case USER_PRIORITY_DETAILS_EDIT_REQUEST:
// return { loading: true };
// case USER_PRIORITY_DETAILS_EDIT_SUCCESS:
// return { loading: false, data: action.payload };
// case USER_PRIORITY_DETAILS_EDIT_FAIL:
// return { loading: false, error: action.payload };
// default:
// return state;
// }
// };
// export const deletePriorityReducers = (
// state = addPriorityInitialState,
// action: addPriorityAction
// ) => {
// switch (action.type) {
// case USER_PRIORITY_DELETE_REQUEST:
// return { loading: true };
// case USER_PRIORITY_DELETE_SUCCESS:
// return { loading: false, data: action.payload };
// case USER_PRIORITY_DELETE_FAIL:
// return { loading: false, error: action.payload };
// default:
// return state;
// }
// };
|
#pragma once
#define RULES_LIMIT 100
#define BREAK_EVEN_POINT (long double) 0.5f // break-even point is in [0,1] |
class Led8x8Motion:
""" Display motion in various rooms of the house """
def __init__(self, matrix8x8):
""" create initial conditions and saving display and I2C lock """
self.matrix = matrix8x8
# self.matrix.begin()
self.matrix.set_brightness(BRIGHTNESS)
self.matrix_image = Image.new('RGB', (8, 8))
self.matrix_draw = ImageDraw.Draw(self.matrix_image)
self.dispatch = {}
self.motions = 0
self.reset()
def draw_two(self, color, row, column):
""" display a small room or area """
# print("draw_two color=",color)
self.matrix_draw.line((row, column, row, column+1), fill=color)
def draw_four(self, color, row, column):
""" draw a medium or large area """
# print("draw_four color=",color)
self.matrix_draw.line((row, column, row, column+1), fill=color)
self.matrix_draw.line((row+1, column, row+1, column+1), fill=color)
def reset(self,):
""" initialize to starting state and set brightness """
self.motions = 8
self.dispatch = {
"diy/perimeter/front/motion":
{"method": self.draw_two, "row" : 0, "column" : 3, "seconds" : 10},
"diy/main/hallway/motion":
{"method": self.draw_two, "row" : 2, "column" : 3, "seconds" : 10},
"diy/main/dining/motion":
{"method": self.draw_four, "row" : 3, "column" : 0, "seconds" : 10},
"diy/main/garage/motion":
{"method": self.draw_four, "row" : 0, "column" : 6, "seconds" : 10},
"diy/main/living/motion":
{"method": self.draw_four, "row" : 3, "column" : 6, "seconds" : 10},
"diy/upper/guest/motion":
{"method": self.draw_four, "row" : 6, "column" : 0, "seconds" : 10},
"diy/upper/study/motion":
{"method": self.draw_four, "row" : 6, "column" : 6, "seconds" : 10},
"diy/upper/stairs/motion":
{"method": self.draw_two, "row" : 5, "column" : 3, "seconds" : 10}
}
def display(self,):
''' display the series as a 64 bit image with alternating colored pixels '''
time.sleep(UPDATE_RATE_SECONDS)
self.matrix_draw.rectangle((0, 0, 7, 7), outline=(0, 0, 0), fill=(0, 0, 0))
self.motions = 0
for key in self.dispatch:
self.dispatch[key]["seconds"] = self.dispatch[key]["seconds"] - 1
if self.dispatch[key]["seconds"] > 50:
self.motions += 1
self.dispatch[key]["method"]((255, 0, 0),
self.dispatch[key]["row"],
self.dispatch[key]["column"])
elif self.dispatch[key]["seconds"] > 30:
self.motions += 1
self.dispatch[key]["method"]((255, 255, 0),
self.dispatch[key]["row"],
self.dispatch[key]["column"])
elif self.dispatch[key]["seconds"] > 0:
self.motions += 1
self.dispatch[key]["method"]((0, 255, 0),
self.dispatch[key]["row"],
self.dispatch[key]["column"])
else:
self.dispatch[key]["seconds"] = 0
self.matrix.set_image(self.matrix_image)
self.matrix.write_display()
def motion_detected(self, topic):
''' set timer to countdown occupancy '''
for key in self.dispatch:
if key == topic:
self.dispatch[key]["seconds"] = 60
# print("motion_detected topic=",topic) |
<gh_stars>1-10
import tensorflow as tf
cluster = tf.train.ClusterSpec({
"ps": [
"ps0.localhost:2223"
]})
server = tf.train.Server(cluster, job_name="ps", task_index=0)
server.join()
|
import { Component, OnInit } from '@angular/core';
import {FormControl, FormGroup} from '@angular/forms';
import {NgbModal} from '@ng-bootstrap/ng-bootstrap';
import {UserModalComponent} from './user-modal/user-modal.component';
import {UserService} from '../../service/user.service';
import {User} from '../../models/user';
import {ModalComfirmComponent} from '../../common/modal-comfirm/modal-comfirm.component';
import {formatDate} from '@angular/common';
import {Role} from '../../models/role';
import {HelperService} from '../../service/helper.service';
import {AlertService} from '../../service/alert.service';
@Component({
selector: 'app-user',
templateUrl: './user.component.html',
styleUrls: ['./user.component.scss'],
})
export class UserComponent implements OnInit {
page = 1;
pageSize = 3;
lstRoles: Role[] = [{id: 0, name: 'All'}];
lstUsers: User[] = [];
searchForm: FormGroup;
headers = ['No', 'Email', 'Fullname', 'Phonenumber', 'Role', 'Date registered', 'Action'];
constructor(private modalService: NgbModal,
private apiService: UserService,
private helperService: HelperService,
protected alertService: AlertService,
) {}
ngOnInit() {
this.getRoles();
this.searchForm = new FormGroup ({
email: new FormControl (''),
phonenumber: new FormControl(''),
fullname: new FormControl(''),
role_user: new FormControl(0),
});
this.search();
}
getRoles () {
this.helperService.getAllRole()
.subscribe(rs => this.lstRoles = this.lstRoles.concat(rs));
}
search () {
this.apiService.search(this.searchForm)
.subscribe(rs => this.lstUsers = rs.map(item => ({
...item,
createDate: formatDate(item.createDate, 'yyyy-MM-dd', 'en')
})));
}
addNew () {
const modalRef = this.modalService.open(UserModalComponent);
modalRef.componentInstance.u = new User();
modalRef.componentInstance.title = 'Add new user';
modalRef.result.then((data) => {
this.search();
}, (reason) => {
this.search();
});
}
edit (u: User) {
const modalRef = this.modalService.open(UserModalComponent);
modalRef.componentInstance.u = u;
modalRef.componentInstance.title = 'Edit user';
modalRef.result.then((data) => {
this.search();
}, (reason) => {
this.search();
});
}
delete (u: User) {
const modalRef = this.modalService.open(ModalComfirmComponent);
modalRef.componentInstance.id = u.id;
modalRef.componentInstance.content = 'Are u sure want to delete this user?';
modalRef.componentInstance.service = this.apiService;
modalRef.result.then((data) => {
this.search();
}, (reason) => {
this.search();
});
}
}
|
n=int(input())
p=[int(x) for x in input().split()]
if sum(p)>=n*9/2:
print(0)
else:
s=n*(9/2)-sum(p)
for i in range (0,n):
p[i]=5-p[i]
p=sorted(p)
t=0
y=0
for i in range (0,n):
y+=p[n-1-i]
t+=1
if y>=s:
print(t)
break
|
from typing import Optional
from arc import CLI
cli = CLI()
@cli.command()
def c1(val: Optional[int]):
print(val)
@cli.command()
def c2(val: int = None):
print(val)
cli() |
The scheme will be similar to Apple’s existing “Made for iPhone” label, given to compatible headphones, speakers and other accessories, but with a new brand and logo. Apple may also provide additional checks and assurances that certified products are not vulnerable to hackers.
The Cupertino-based company was likely to emphasise the privacy protections built into its smart home system, one person familiar with Apple’s plans told the FT, given heightened sensitivity about technology companies’ access to personal information amid revelations about US intelligence agencies’ online surveillance programmes.
Apple will launch a new software platform that will let the iPhone directly control lights, security systems and other household gadgets at WWDC next week, according to a report from The Financial Times.The piece claims Apple will make a "big play" in smart home technology by working with device makers give them official approval, like the "Made for iPhone" program does for some accessories like headphones and speaker docks, and software support in iOS 8.The piece notes that plans can always change, but that Apple has launched a number of initiatives over the past year or two, including CarPlay and iBeacon, to expand the reach of the iPhone and have it interact more with the real world and other electronic devices in it.Apple's retail stores sell a number of different components of the so-called "Internet of Things", including the Google-owned Nest Thermostat (pictured above), Philips Hue lightbulbs and Belkin WeMo Wi-Fi enabled light switches. |
/***********************************************************
LIMITEngine Header File
Copyright (C), LIMITGAME, 2020
-----------------------------------------------------------
@file Archive.h
@brief Archive for saving resource
@author minseob (https://github.com/rasidin)
***********************************************************/
#pragma once
#include "Core/Object.h"
#include <LEIntVector2.h>
#include <LEIntVector3.h>
#include <LEIntVector4.h>
#include <LEFloatVector2.h>
#include <LEFloatVector3.h>
#include <LEFloatVector4.h>
#include <LEFloatMatrix4x4.h>
namespace LimitEngine {
class Archive : public Object<LimitEngineMemoryCategory::Common>
{
enum class DataMode {
Loading = 0,
Saving,
} mDataMode;
public:
Archive() : mDataMode(DataMode::Saving), mData(nullptr), mDataSize(0u), mDataOffset(0u){}
Archive(void *InData, size_t InSize) : mDataMode(DataMode::Loading), mData(InData), mDataSize(InSize), mDataOffset(0u) {}
virtual ~Archive() {
if (mData && mDataMode == DataMode::Saving)
MemoryAllocator::Free(mData);
mData = nullptr;
mDataSize = 0u;
}
template<typename T>
Archive& operator << (T &Data);
template<typename T>
Archive& operator << (T *Data);
template<typename T>
void SerializeData(T &Data) {
if (IsLoading()) {
Data = *(T*)GetData(sizeof(T));
}
else {
memcpy(AddSize(sizeof(T)), &Data, sizeof(T));
}
}
bool IsLoading() const { return mDataMode == DataMode::Loading; }
bool IsSaving() const { return mDataMode == DataMode::Saving; }
void* AddSize(size_t mSize) {
size_t orgSize = mDataSize;
mDataSize += mSize;
void *newData = MemoryAllocator::Alloc(mDataSize);
if (orgSize)
memcpy(newData, mData, mDataSize - mSize);
MemoryAllocator::Free(mData);
mData = newData;
return (uint8*)newData + orgSize;
}
void* GetData(size_t mSize) {
size_t orgSize = mDataOffset;
mDataOffset += mSize;
return (uint8*)mData + orgSize;
}
private:
void *mData;
size_t mDataSize;
size_t mDataOffset;
friend class ResourceManager;
};
} |
<reponame>liupangzi/codekata
class Solution(object):
def kthSmallest(self, matrix, k):
"""
:type matrix: List[List[int]]
:type k: int
:rtype: int
"""
low, high = matrix[0][0], matrix[len(matrix) - 1][len(matrix[0]) - 1] + 1
while low < high:
mid = (high + low) / 2
if self.binary_search(matrix, mid) < k:
low = mid + 1
else:
high = mid
return low
def binary_search(self, matrix, mid):
i, j, result = len(matrix) - 1, 0, 0
while i >= 0 and j <= len(matrix) - 1:
if matrix[i][j] > mid:
i -= 1
else:
j += 1
result += (i + 1)
return result
|
package com.octo.android.robospice.persistence.springandroid.xml;
import java.io.File;
import java.io.IOException;
import org.simpleframework.xml.Serializer;
import org.simpleframework.xml.core.Persister;
import android.app.Application;
import com.octo.android.robospice.persistence.exception.CacheCreationException;
import com.octo.android.robospice.persistence.exception.CacheLoadingException;
import com.octo.android.robospice.persistence.exception.CacheSavingException;
import com.octo.android.robospice.persistence.springandroid.SpringAndroidObjectPersister;
public final class SimpleSerializerObjectPersister<T> extends SpringAndroidObjectPersister<T> {
// ============================================================================================
// ATTRIBUTES
// ============================================================================================
Serializer serializer = new Persister();
// ============================================================================================
// CONSTRUCTOR
// ============================================================================================
public SimpleSerializerObjectPersister(Application application, Class<T> clazz, File cacheFolder)
throws CacheCreationException {
super(application, clazz, cacheFolder);
this.serializer = new Persister();
}
public SimpleSerializerObjectPersister(Application application, Class<T> clazz) throws CacheCreationException {
this(application, clazz, null);
}
// ============================================================================================
// METHODS
// ============================================================================================
@Override
protected T deserializeData(String xml) throws CacheLoadingException {
try {
return serializer.read(getHandledClass(), xml);
} catch (Exception e) {
throw new CacheLoadingException(e);
}
}
@Override
protected void saveData(T data, Object cacheKey) throws IOException, CacheSavingException {
try {
serializer.write(data, getCacheFile(cacheKey));
} catch (Exception e) {
throw new CacheSavingException("Data was null and could not be serialized in xml");
}
}
}
|
/* SPDX-License-Identifier: GPL-2.0 */
#ifndef CEPH_RADOS_H
#define CEPH_RADOS_H
/*
* Data types for the Ceph distributed object storage layer RADOS
* (Reliable Autonomic Distributed Object Store).
*/
#include <linux/ceph/msgr.h>
/*
* fs id
*/
struct ceph_fsid {
unsigned char fsid[16];
};
static inline int ceph_fsid_compare(const struct ceph_fsid *a,
const struct ceph_fsid *b)
{
return memcmp(a, b, sizeof(*a));
}
/*
* ino, object, etc.
*/
typedef __le64 ceph_snapid_t;
#define CEPH_SNAPDIR ((__u64)(-1)) /* reserved for hidden .snap dir */
#define CEPH_NOSNAP ((__u64)(-2)) /* "head", "live" revision */
#define CEPH_MAXSNAP ((__u64)(-3)) /* largest valid snapid */
struct ceph_timespec {
__le32 tv_sec;
__le32 tv_nsec;
} __attribute__ ((packed));
/*
* object layout - how objects are mapped into PGs
*/
#define CEPH_OBJECT_LAYOUT_HASH 1
#define CEPH_OBJECT_LAYOUT_LINEAR 2
#define CEPH_OBJECT_LAYOUT_HASHINO 3
/*
* pg layout -- how PGs are mapped onto (sets of) OSDs
*/
#define CEPH_PG_LAYOUT_CRUSH 0
#define CEPH_PG_LAYOUT_HASH 1
#define CEPH_PG_LAYOUT_LINEAR 2
#define CEPH_PG_LAYOUT_HYBRID 3
#define CEPH_PG_MAX_SIZE 32 /* max # osds in a single pg */
/*
* placement group.
* we encode this into one __le64.
*/
struct ceph_pg_v1 {
__le16 preferred; /* preferred primary osd */
__le16 ps; /* placement seed */
__le32 pool; /* object pool */
} __attribute__ ((packed));
/*
* pg_pool is a set of pgs storing a pool of objects
*
* pg_num -- base number of pseudorandomly placed pgs
*
* pgp_num -- effective number when calculating pg placement. this
* is used for pg_num increases. new pgs result in data being "split"
* into new pgs. for this to proceed smoothly, new pgs are intiially
* colocated with their parents; that is, pgp_num doesn't increase
* until the new pgs have successfully split. only _then_ are the new
* pgs placed independently.
*
* lpg_num -- localized pg count (per device). replicas are randomly
* selected.
*
* lpgp_num -- as above.
*/
#define CEPH_NOPOOL ((__u64) (-1)) /* pool id not defined */
#define CEPH_POOL_TYPE_REP 1
#define CEPH_POOL_TYPE_RAID4 2 /* never implemented */
#define CEPH_POOL_TYPE_EC 3
/*
* stable_mod func is used to control number of placement groups.
* similar to straight-up modulo, but produces a stable mapping as b
* increases over time. b is the number of bins, and bmask is the
* containing power of 2 minus 1.
*
* b <= bmask and bmask=(2**n)-1
* e.g., b=12 -> bmask=15, b=123 -> bmask=127
*/
static inline int ceph_stable_mod(int x, int b, int bmask)
{
if ((x & bmask) < b)
return x & bmask;
else
return x & (bmask >> 1);
}
/*
* object layout - how a given object should be stored.
*/
struct ceph_object_layout {
struct ceph_pg_v1 ol_pgid; /* raw pg, with _full_ ps precision. */
__le32 ol_stripe_unit; /* for per-object parity, if any */
} __attribute__ ((packed));
/*
* compound epoch+version, used by storage layer to serialize mutations
*/
struct ceph_eversion {
__le64 version;
__le32 epoch;
} __attribute__ ((packed));
/*
* osd map bits
*/
/* status bits */
#define CEPH_OSD_EXISTS (1<<0)
#define CEPH_OSD_UP (1<<1)
#define CEPH_OSD_AUTOOUT (1<<2) /* osd was automatically marked out */
#define CEPH_OSD_NEW (1<<3) /* osd is new, never marked in */
extern const char *ceph_osd_state_name(int s);
/* osd weights. fixed point value: 0x10000 == 1.0 ("in"), 0 == "out" */
#define CEPH_OSD_IN 0x10000
#define CEPH_OSD_OUT 0
/* osd primary-affinity. fixed point value: 0x10000 == baseline */
#define CEPH_OSD_MAX_PRIMARY_AFFINITY 0x10000
#define CEPH_OSD_DEFAULT_PRIMARY_AFFINITY 0x10000
/*
* osd map flag bits
*/
#define CEPH_OSDMAP_NEARFULL (1<<0) /* sync writes (near ENOSPC),
not set since ~luminous */
#define CEPH_OSDMAP_FULL (1<<1) /* no data writes (ENOSPC),
not set since ~luminous */
#define CEPH_OSDMAP_PAUSERD (1<<2) /* pause all reads */
#define CEPH_OSDMAP_PAUSEWR (1<<3) /* pause all writes */
#define CEPH_OSDMAP_PAUSEREC (1<<4) /* pause recovery */
#define CEPH_OSDMAP_NOUP (1<<5) /* block osd boot */
#define CEPH_OSDMAP_NODOWN (1<<6) /* block osd mark-down/failure */
#define CEPH_OSDMAP_NOOUT (1<<7) /* block osd auto mark-out */
#define CEPH_OSDMAP_NOIN (1<<8) /* block osd auto mark-in */
#define CEPH_OSDMAP_NOBACKFILL (1<<9) /* block osd backfill */
#define CEPH_OSDMAP_NORECOVER (1<<10) /* block osd recovery and backfill */
#define CEPH_OSDMAP_NOSCRUB (1<<11) /* block periodic scrub */
#define CEPH_OSDMAP_NODEEP_SCRUB (1<<12) /* block periodic deep-scrub */
#define CEPH_OSDMAP_NOTIERAGENT (1<<13) /* disable tiering agent */
#define CEPH_OSDMAP_NOREBALANCE (1<<14) /* block osd backfill unless pg is degraded */
#define CEPH_OSDMAP_SORTBITWISE (1<<15) /* use bitwise hobject_t sort */
#define CEPH_OSDMAP_REQUIRE_JEWEL (1<<16) /* require jewel for booting osds */
#define CEPH_OSDMAP_REQUIRE_KRAKEN (1<<17) /* require kraken for booting osds */
#define CEPH_OSDMAP_REQUIRE_LUMINOUS (1<<18) /* require l for booting osds */
#define CEPH_OSDMAP_RECOVERY_DELETES (1<<19) /* deletes performed during recovery instead of peering */
/*
* The error code to return when an OSD can't handle a write
* because it is too large.
*/
#define OSD_WRITETOOBIG EMSGSIZE
/*
* osd ops
*
* WARNING: do not use these op codes directly. Use the helpers
* defined below instead. In certain cases, op code behavior was
* redefined, resulting in special-cases in the helpers.
*/
#define CEPH_OSD_OP_MODE 0xf000
#define CEPH_OSD_OP_MODE_RD 0x1000
#define CEPH_OSD_OP_MODE_WR 0x2000
#define CEPH_OSD_OP_MODE_RMW 0x3000
#define CEPH_OSD_OP_MODE_SUB 0x4000
#define CEPH_OSD_OP_MODE_CACHE 0x8000
#define CEPH_OSD_OP_TYPE 0x0f00
#define CEPH_OSD_OP_TYPE_LOCK 0x0100
#define CEPH_OSD_OP_TYPE_DATA 0x0200
#define CEPH_OSD_OP_TYPE_ATTR 0x0300
#define CEPH_OSD_OP_TYPE_EXEC 0x0400
#define CEPH_OSD_OP_TYPE_PG 0x0500
#define CEPH_OSD_OP_TYPE_MULTI 0x0600 /* multiobject */
#define __CEPH_OSD_OP1(mode, nr) \
(CEPH_OSD_OP_MODE_##mode | (nr))
#define __CEPH_OSD_OP(mode, type, nr) \
(CEPH_OSD_OP_MODE_##mode | CEPH_OSD_OP_TYPE_##type | (nr))
#define __CEPH_FORALL_OSD_OPS(f) \
/** data **/ \
/* read */ \
f(READ, __CEPH_OSD_OP(RD, DATA, 1), "read") \
f(STAT, __CEPH_OSD_OP(RD, DATA, 2), "stat") \
f(MAPEXT, __CEPH_OSD_OP(RD, DATA, 3), "mapext") \
\
/* fancy read */ \
f(MASKTRUNC, __CEPH_OSD_OP(RD, DATA, 4), "masktrunc") \
f(SPARSE_READ, __CEPH_OSD_OP(RD, DATA, 5), "sparse-read") \
\
f(NOTIFY, __CEPH_OSD_OP(RD, DATA, 6), "notify") \
f(NOTIFY_ACK, __CEPH_OSD_OP(RD, DATA, 7), "notify-ack") \
\
/* versioning */ \
f(ASSERT_VER, __CEPH_OSD_OP(RD, DATA, 8), "assert-version") \
\
f(LIST_WATCHERS, __CEPH_OSD_OP(RD, DATA, 9), "list-watchers") \
\
f(LIST_SNAPS, __CEPH_OSD_OP(RD, DATA, 10), "list-snaps") \
\
/* sync */ \
f(SYNC_READ, __CEPH_OSD_OP(RD, DATA, 11), "sync_read") \
\
/* write */ \
f(WRITE, __CEPH_OSD_OP(WR, DATA, 1), "write") \
f(WRITEFULL, __CEPH_OSD_OP(WR, DATA, 2), "writefull") \
f(TRUNCATE, __CEPH_OSD_OP(WR, DATA, 3), "truncate") \
f(ZERO, __CEPH_OSD_OP(WR, DATA, 4), "zero") \
f(DELETE, __CEPH_OSD_OP(WR, DATA, 5), "delete") \
\
/* fancy write */ \
f(APPEND, __CEPH_OSD_OP(WR, DATA, 6), "append") \
f(SETTRUNC, __CEPH_OSD_OP(WR, DATA, 8), "settrunc") \
f(TRIMTRUNC, __CEPH_OSD_OP(WR, DATA, 9), "trimtrunc") \
\
f(TMAPUP, __CEPH_OSD_OP(RMW, DATA, 10), "tmapup") \
f(TMAPPUT, __CEPH_OSD_OP(WR, DATA, 11), "tmapput") \
f(TMAPGET, __CEPH_OSD_OP(RD, DATA, 12), "tmapget") \
\
f(CREATE, __CEPH_OSD_OP(WR, DATA, 13), "create") \
f(ROLLBACK, __CEPH_OSD_OP(WR, DATA, 14), "rollback") \
\
f(WATCH, __CEPH_OSD_OP(WR, DATA, 15), "watch") \
\
/* omap */ \
f(OMAPGETKEYS, __CEPH_OSD_OP(RD, DATA, 17), "omap-get-keys") \
f(OMAPGETVALS, __CEPH_OSD_OP(RD, DATA, 18), "omap-get-vals") \
f(OMAPGETHEADER, __CEPH_OSD_OP(RD, DATA, 19), "omap-get-header") \
f(OMAPGETVALSBYKEYS, __CEPH_OSD_OP(RD, DATA, 20), "omap-get-vals-by-keys") \
f(OMAPSETVALS, __CEPH_OSD_OP(WR, DATA, 21), "omap-set-vals") \
f(OMAPSETHEADER, __CEPH_OSD_OP(WR, DATA, 22), "omap-set-header") \
f(OMAPCLEAR, __CEPH_OSD_OP(WR, DATA, 23), "omap-clear") \
f(OMAPRMKEYS, __CEPH_OSD_OP(WR, DATA, 24), "omap-rm-keys") \
f(OMAP_CMP, __CEPH_OSD_OP(RD, DATA, 25), "omap-cmp") \
\
/* tiering */ \
f(COPY_FROM, __CEPH_OSD_OP(WR, DATA, 26), "copy-from") \
f(COPY_FROM2, __CEPH_OSD_OP(WR, DATA, 45), "copy-from2") \
f(COPY_GET_CLASSIC, __CEPH_OSD_OP(RD, DATA, 27), "copy-get-classic") \
f(UNDIRTY, __CEPH_OSD_OP(WR, DATA, 28), "undirty") \
f(ISDIRTY, __CEPH_OSD_OP(RD, DATA, 29), "isdirty") \
f(COPY_GET, __CEPH_OSD_OP(RD, DATA, 30), "copy-get") \
f(CACHE_FLUSH, __CEPH_OSD_OP(CACHE, DATA, 31), "cache-flush") \
f(CACHE_EVICT, __CEPH_OSD_OP(CACHE, DATA, 32), "cache-evict") \
f(CACHE_TRY_FLUSH, __CEPH_OSD_OP(CACHE, DATA, 33), "cache-try-flush") \
\
/* convert tmap to omap */ \
f(TMAP2OMAP, __CEPH_OSD_OP(RMW, DATA, 34), "tmap2omap") \
\
/* hints */ \
f(SETALLOCHINT, __CEPH_OSD_OP(WR, DATA, 35), "set-alloc-hint") \
\
/** multi **/ \
f(CLONERANGE, __CEPH_OSD_OP(WR, MULTI, 1), "clonerange") \
f(ASSERT_SRC_VERSION, __CEPH_OSD_OP(RD, MULTI, 2), "assert-src-version") \
f(SRC_CMPXATTR, __CEPH_OSD_OP(RD, MULTI, 3), "src-cmpxattr") \
\
/** attrs **/ \
/* read */ \
f(GETXATTR, __CEPH_OSD_OP(RD, ATTR, 1), "getxattr") \
f(GETXATTRS, __CEPH_OSD_OP(RD, ATTR, 2), "getxattrs") \
f(CMPXATTR, __CEPH_OSD_OP(RD, ATTR, 3), "cmpxattr") \
\
/* write */ \
f(SETXATTR, __CEPH_OSD_OP(WR, ATTR, 1), "setxattr") \
f(SETXATTRS, __CEPH_OSD_OP(WR, ATTR, 2), "setxattrs") \
f(RESETXATTRS, __CEPH_OSD_OP(WR, ATTR, 3), "resetxattrs") \
f(RMXATTR, __CEPH_OSD_OP(WR, ATTR, 4), "rmxattr") \
\
/** subop **/ \
f(PULL, __CEPH_OSD_OP1(SUB, 1), "pull") \
f(PUSH, __CEPH_OSD_OP1(SUB, 2), "push") \
f(BALANCEREADS, __CEPH_OSD_OP1(SUB, 3), "balance-reads") \
f(UNBALANCEREADS, __CEPH_OSD_OP1(SUB, 4), "unbalance-reads") \
f(SCRUB, __CEPH_OSD_OP1(SUB, 5), "scrub") \
f(SCRUB_RESERVE, __CEPH_OSD_OP1(SUB, 6), "scrub-reserve") \
f(SCRUB_UNRESERVE, __CEPH_OSD_OP1(SUB, 7), "scrub-unreserve") \
f(SCRUB_STOP, __CEPH_OSD_OP1(SUB, 8), "scrub-stop") \
f(SCRUB_MAP, __CEPH_OSD_OP1(SUB, 9), "scrub-map") \
\
/** lock **/ \
f(WRLOCK, __CEPH_OSD_OP(WR, LOCK, 1), "wrlock") \
f(WRUNLOCK, __CEPH_OSD_OP(WR, LOCK, 2), "wrunlock") \
f(RDLOCK, __CEPH_OSD_OP(WR, LOCK, 3), "rdlock") \
f(RDUNLOCK, __CEPH_OSD_OP(WR, LOCK, 4), "rdunlock") \
f(UPLOCK, __CEPH_OSD_OP(WR, LOCK, 5), "uplock") \
f(DNLOCK, __CEPH_OSD_OP(WR, LOCK, 6), "dnlock") \
\
/** exec **/ \
/* note: the RD bit here is wrong; see special-case below in helper */ \
f(CALL, __CEPH_OSD_OP(RD, EXEC, 1), "call") \
\
/** pg **/ \
f(PGLS, __CEPH_OSD_OP(RD, PG, 1), "pgls") \
f(PGLS_FILTER, __CEPH_OSD_OP(RD, PG, 2), "pgls-filter") \
f(PG_HITSET_LS, __CEPH_OSD_OP(RD, PG, 3), "pg-hitset-ls") \
f(PG_HITSET_GET, __CEPH_OSD_OP(RD, PG, 4), "pg-hitset-get")
enum {
#define GENERATE_ENUM_ENTRY(op, opcode, str) CEPH_OSD_OP_##op = (opcode),
__CEPH_FORALL_OSD_OPS(GENERATE_ENUM_ENTRY)
#undef GENERATE_ENUM_ENTRY
};
static inline int ceph_osd_op_type_lock(int op)
{
return (op & CEPH_OSD_OP_TYPE) == CEPH_OSD_OP_TYPE_LOCK;
}
static inline int ceph_osd_op_type_data(int op)
{
return (op & CEPH_OSD_OP_TYPE) == CEPH_OSD_OP_TYPE_DATA;
}
static inline int ceph_osd_op_type_attr(int op)
{
return (op & CEPH_OSD_OP_TYPE) == CEPH_OSD_OP_TYPE_ATTR;
}
static inline int ceph_osd_op_type_exec(int op)
{
return (op & CEPH_OSD_OP_TYPE) == CEPH_OSD_OP_TYPE_EXEC;
}
static inline int ceph_osd_op_type_pg(int op)
{
return (op & CEPH_OSD_OP_TYPE) == CEPH_OSD_OP_TYPE_PG;
}
static inline int ceph_osd_op_type_multi(int op)
{
return (op & CEPH_OSD_OP_TYPE) == CEPH_OSD_OP_TYPE_MULTI;
}
static inline int ceph_osd_op_mode_subop(int op)
{
return (op & CEPH_OSD_OP_MODE) == CEPH_OSD_OP_MODE_SUB;
}
static inline int ceph_osd_op_mode_read(int op)
{
return (op & CEPH_OSD_OP_MODE_RD) &&
op != CEPH_OSD_OP_CALL;
}
static inline int ceph_osd_op_mode_modify(int op)
{
return op & CEPH_OSD_OP_MODE_WR;
}
/*
* note that the following tmap stuff is also defined in the ceph librados.h
* any modification here needs to be updated there
*/
#define CEPH_OSD_TMAP_HDR 'h'
#define CEPH_OSD_TMAP_SET 's'
#define CEPH_OSD_TMAP_CREATE 'c' /* create key */
#define CEPH_OSD_TMAP_RM 'r'
#define CEPH_OSD_TMAP_RMSLOPPY 'R'
extern const char *ceph_osd_op_name(int op);
/*
* osd op flags
*
* An op may be READ, WRITE, or READ|WRITE.
*/
enum {
CEPH_OSD_FLAG_ACK = 0x0001, /* want (or is) "ack" ack */
CEPH_OSD_FLAG_ONNVRAM = 0x0002, /* want (or is) "onnvram" ack */
CEPH_OSD_FLAG_ONDISK = 0x0004, /* want (or is) "ondisk" ack */
CEPH_OSD_FLAG_RETRY = 0x0008, /* resend attempt */
CEPH_OSD_FLAG_READ = 0x0010, /* op may read */
CEPH_OSD_FLAG_WRITE = 0x0020, /* op may write */
CEPH_OSD_FLAG_ORDERSNAP = 0x0040, /* EOLDSNAP if snapc is out of order */
CEPH_OSD_FLAG_PEERSTAT_OLD = 0x0080, /* DEPRECATED msg includes osd_peer_stat */
CEPH_OSD_FLAG_BALANCE_READS = 0x0100,
CEPH_OSD_FLAG_PARALLELEXEC = 0x0200, /* execute op in parallel */
CEPH_OSD_FLAG_PGOP = 0x0400, /* pg op, no object */
CEPH_OSD_FLAG_EXEC = 0x0800, /* op may exec */
CEPH_OSD_FLAG_EXEC_PUBLIC = 0x1000, /* DEPRECATED op may exec (public) */
CEPH_OSD_FLAG_LOCALIZE_READS = 0x2000, /* read from nearby replica, if any */
CEPH_OSD_FLAG_RWORDERED = 0x4000, /* order wrt concurrent reads */
CEPH_OSD_FLAG_IGNORE_CACHE = 0x8000, /* ignore cache logic */
CEPH_OSD_FLAG_SKIPRWLOCKS = 0x10000, /* skip rw locks */
CEPH_OSD_FLAG_IGNORE_OVERLAY = 0x20000, /* ignore pool overlay */
CEPH_OSD_FLAG_FLUSH = 0x40000, /* this is part of flush */
CEPH_OSD_FLAG_MAP_SNAP_CLONE = 0x80000, /* map snap direct to clone id */
CEPH_OSD_FLAG_ENFORCE_SNAPC = 0x100000, /* use snapc provided even if
pool uses pool snaps */
CEPH_OSD_FLAG_REDIRECTED = 0x200000, /* op has been redirected */
CEPH_OSD_FLAG_KNOWN_REDIR = 0x400000, /* redirect bit is authoritative */
CEPH_OSD_FLAG_FULL_TRY = 0x800000, /* try op despite full flag */
CEPH_OSD_FLAG_FULL_FORCE = 0x1000000, /* force op despite full flag */
};
enum {
CEPH_OSD_OP_FLAG_EXCL = 1, /* EXCL object create */
CEPH_OSD_OP_FLAG_FAILOK = 2, /* continue despite failure */
CEPH_OSD_OP_FLAG_FADVISE_RANDOM = 0x4, /* the op is random */
CEPH_OSD_OP_FLAG_FADVISE_SEQUENTIAL = 0x8, /* the op is sequential */
CEPH_OSD_OP_FLAG_FADVISE_WILLNEED = 0x10,/* data will be accessed in
the near future */
CEPH_OSD_OP_FLAG_FADVISE_DONTNEED = 0x20,/* data will not be accessed
in the near future */
CEPH_OSD_OP_FLAG_FADVISE_NOCACHE = 0x40,/* data will be accessed only
once by this client */
};
#define EOLDSNAPC ERESTART /* ORDERSNAP flag set; writer has old snapc*/
#define EBLOCKLISTED ESHUTDOWN /* blocklisted */
/* xattr comparison */
enum {
CEPH_OSD_CMPXATTR_OP_NOP = 0,
CEPH_OSD_CMPXATTR_OP_EQ = 1,
CEPH_OSD_CMPXATTR_OP_NE = 2,
CEPH_OSD_CMPXATTR_OP_GT = 3,
CEPH_OSD_CMPXATTR_OP_GTE = 4,
CEPH_OSD_CMPXATTR_OP_LT = 5,
CEPH_OSD_CMPXATTR_OP_LTE = 6
};
enum {
CEPH_OSD_CMPXATTR_MODE_STRING = 1,
CEPH_OSD_CMPXATTR_MODE_U64 = 2
};
enum {
CEPH_OSD_COPY_FROM_FLAG_FLUSH = 1, /* part of a flush operation */
CEPH_OSD_COPY_FROM_FLAG_IGNORE_OVERLAY = 2, /* ignore pool overlay */
CEPH_OSD_COPY_FROM_FLAG_IGNORE_CACHE = 4, /* ignore osd cache logic */
CEPH_OSD_COPY_FROM_FLAG_MAP_SNAP_CLONE = 8, /* map snap direct to
* cloneid */
CEPH_OSD_COPY_FROM_FLAG_RWORDERED = 16, /* order with write */
CEPH_OSD_COPY_FROM_FLAG_TRUNCATE_SEQ = 32, /* send truncate_{seq,size} */
};
enum {
CEPH_OSD_WATCH_OP_UNWATCH = 0,
CEPH_OSD_WATCH_OP_LEGACY_WATCH = 1,
/* note: use only ODD ids to prevent pre-giant code from
interpreting the op as UNWATCH */
CEPH_OSD_WATCH_OP_WATCH = 3,
CEPH_OSD_WATCH_OP_RECONNECT = 5,
CEPH_OSD_WATCH_OP_PING = 7,
};
const char *ceph_osd_watch_op_name(int o);
enum {
CEPH_OSD_ALLOC_HINT_FLAG_SEQUENTIAL_WRITE = 1,
CEPH_OSD_ALLOC_HINT_FLAG_RANDOM_WRITE = 2,
CEPH_OSD_ALLOC_HINT_FLAG_SEQUENTIAL_READ = 4,
CEPH_OSD_ALLOC_HINT_FLAG_RANDOM_READ = 8,
CEPH_OSD_ALLOC_HINT_FLAG_APPEND_ONLY = 16,
CEPH_OSD_ALLOC_HINT_FLAG_IMMUTABLE = 32,
CEPH_OSD_ALLOC_HINT_FLAG_SHORTLIVED = 64,
CEPH_OSD_ALLOC_HINT_FLAG_LONGLIVED = 128,
CEPH_OSD_ALLOC_HINT_FLAG_COMPRESSIBLE = 256,
CEPH_OSD_ALLOC_HINT_FLAG_INCOMPRESSIBLE = 512,
};
enum {
CEPH_OSD_BACKOFF_OP_BLOCK = 1,
CEPH_OSD_BACKOFF_OP_ACK_BLOCK = 2,
CEPH_OSD_BACKOFF_OP_UNBLOCK = 3,
};
/*
* an individual object operation. each may be accompanied by some data
* payload
*/
struct ceph_osd_op {
__le16 op; /* CEPH_OSD_OP_* */
__le32 flags; /* CEPH_OSD_OP_FLAG_* */
union {
struct {
__le64 offset, length;
__le64 truncate_size;
__le32 truncate_seq;
} __attribute__ ((packed)) extent;
struct {
__le32 name_len;
__le32 value_len;
__u8 cmp_op; /* CEPH_OSD_CMPXATTR_OP_* */
__u8 cmp_mode; /* CEPH_OSD_CMPXATTR_MODE_* */
} __attribute__ ((packed)) xattr;
struct {
__u8 class_len;
__u8 method_len;
__u8 argc;
__le32 indata_len;
} __attribute__ ((packed)) cls;
struct {
__le64 cookie, count;
} __attribute__ ((packed)) pgls;
struct {
__le64 snapid;
} __attribute__ ((packed)) snap;
struct {
__le64 cookie;
__le64 ver; /* no longer used */
__u8 op; /* CEPH_OSD_WATCH_OP_* */
__le32 gen; /* registration generation */
} __attribute__ ((packed)) watch;
struct {
__le64 cookie;
} __attribute__ ((packed)) notify;
struct {
__le64 offset, length;
__le64 src_offset;
} __attribute__ ((packed)) clonerange;
struct {
__le64 expected_object_size;
__le64 expected_write_size;
__le32 flags; /* CEPH_OSD_OP_ALLOC_HINT_FLAG_* */
} __attribute__ ((packed)) alloc_hint;
struct {
__le64 snapid;
__le64 src_version;
__u8 flags; /* CEPH_OSD_COPY_FROM_FLAG_* */
/*
* CEPH_OSD_OP_FLAG_FADVISE_*: fadvise flags
* for src object, flags for dest object are in
* ceph_osd_op::flags.
*/
__le32 src_fadvise_flags;
} __attribute__ ((packed)) copy_from;
};
__le32 payload_len;
} __attribute__ ((packed));
#endif
|
// Distributes the recipients to the rooms
func Distributor(ws *websocket.Conn, address string) {
var handled bool = false
for key, main := range Rooms {
if len(main.users) < ROOM_CAPACITY {
main.users[address] = &Client{ws, address, address}
roomChan <- &RoomCh{address, key}
handled = true
}
if (handled == true) && (len(main.users) == 0) {
CloseRoom(key)
}
}
if handled != true {
CreateRoom(&Client{ws, address, address}, address)
}
go func() {
WriteCommand(&wsCommand{ws, "cmd", Rooms})
WriteMessage(&wsMessage{ws, address, "msg", SERVER_NAME, "Welcome"})
for msg := range messagesQueue {
WriteMessage(msg)
}
}()
} |
package nemesis.form;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import p.URLEncodedParser;
/**
*
* @author <EMAIL>
*/
public class StandardForm implements Form {
public static final int MAX_LENGTH = Integer.MAX_VALUE;
Map<String, FormElementParser> parsers;
Map<String, FormElementValidator> elementValidators;
Map<String, List<Object>> params;
List<ValidationException> validationExceptions;
List<ParseException> parseExceptions;
List<FormValidator> formValidators;
public StandardForm() {
params = new HashMap<>();
parsers = new HashMap<>();
elementValidators = new HashMap<>();
validationExceptions = new ArrayList<>();
formValidators = new ArrayList<>();
parseExceptions = new ArrayList<>();
}
protected void define(String field, FormElementParser parser) {
parsers.put(field, parser);
}
protected void define(String field, FormElementParser parser, FormElementValidator validator) {
define(field, parser);
elementValidators.put(field, validator);
}
protected void define(FormValidator validator) {
formValidators.add(validator);
}
@Override
public void map(HttpServletRequest request) {
fillForm(request);
validateElements();
validateForm();
}
void fillForm(HttpServletRequest request) {
Map<String, List<String>> paramMap = new HashMap<>();
request.getParameterMap().forEach((k, v) -> {
paramMap.put((String) k, Arrays.asList((String[]) v));
});
URLEncodedParser parser = new URLEncodedParser();
try {
parser.parse(request.getInputStream(), MAX_LENGTH).forEach((k, v) -> {
paramMap.put(k, v);
});
} catch (IOException ex) {
}
parsers.keySet().forEach((key) -> {
List<Object> list = new ArrayList<>();
List<String> requestParams = paramMap.get(key);
if (requestParams != null) {
requestParams.forEach((s) -> {
try {
list.add(parsers.get(key).parse(s, request.getLocale()));
} catch (ParseException ex) {
parseExceptions.add(ex);
}
});
params.put(key, list);
}
});
}
void validateElements() {
elementValidators.keySet().forEach((key) -> {
try {
elementValidators.get(key).validate(key, get(key));
} catch (ValidationException ex) {
validationExceptions.add(ex);
}
});
}
void validateForm() {
formValidators.forEach((validator) -> {
try {
validator.validate(this);
} catch (ValidationException ex) {
validationExceptions.add(ex);
}
});
}
protected <DesiredType> DesiredType get(String field) {
List<Object> entry = params.get(field);
if (entry == null) {
return null;
} else {
return (DesiredType) entry.get(0);
}
}
protected <DesiredType> DesiredType getValues(String field) {
if (params.get(field) == null) {
return (DesiredType) Collections.EMPTY_LIST;
}
return (DesiredType) params.get(field);
}
public List<ValidationException> getValidationExceptions() {
return validationExceptions;
}
public List<ParseException> getParseExceptions() {
return parseExceptions;
}
public Map<String, FormElementValidator> getElementValidators() {
return elementValidators;
}
public Map<String, FormElementParser> getParsers() {
return parsers;
}
public List<FormValidator> getFormValidators() {
return formValidators;
}
public boolean isValid() {
return parseExceptions.isEmpty() && validationExceptions.isEmpty();
}
}
|
/*
* mdcommit() -- Commit a transaction.
*
* All changes to magnetic disk relations must be forced to stable
* storage. This routine makes a pass over the private table of
* file descriptors. Any descriptors to which we have done writes,
* but not synced, are synced here.
*
* Returns SM_SUCCESS or SM_FAIL with errno set as appropriate.
*/
int
mdcommit()
{
int i;
MdfdVec *v;
for (i = 0; i < CurFd; i++) {
for (v = &Md_fdvec[i]; v != (MdfdVec *) NULL; v = v->mdfd_chain) {
if (v->mdfd_flags & MDFD_DIRTY) {
if (FileSync(v->mdfd_vfd) < 0)
return (SM_FAIL);
v->mdfd_flags &= ~MDFD_DIRTY;
}
}
}
return (SM_SUCCESS);
} |
Ellen Page's filmography and actor connections
Ellen Page has starred in 32 movies. The 5 most recent movies Ellen Page was in are listed below.
Next is the list of 964 actors/actresses that Ellen Page has worked with spread over 49 pages. The list is sorted by the people Ellen Page has worked most frequently with. When you find the person you are looking for, click on the link to view a list of movies they have worked in together.
If you decide instead to click on a movie with Ellen Page, it will display the top billed cast. If the cast members have starred together in other movies; beneath the cast member a list will display.
See full filmography
Rainn Wilson and Ellen Page have starred in 4 movies together. Their first film was Juno in 2007. The most recent movie that Rainn Wilson and Ellen Page starred together was Robodog in 2016
Allison Janney and Ellen Page have starred in 3 movies together. Their first film was Juno in 2007. The most recent movie that Allison Janney and Ellen Page starred together was Tallulah in 2016
Kevin Curran and Ellen Page have starred in 3 movies together. Their first film was Marion Bridge in 2002. The most recent movie that Kevin Curran and Ellen Page starred together was Love That Boy in 2003
Elissa Sursara and Ellen Page have starred in 3 movies together. Their first film was Marion Bridge in 2002. The most recent movie that Elissa Sursara and Ellen Page starred together was Touch & Go in 2003
Maury Chaykin and Ellen Page have starred in 3 movies together. Their first film was The Crying Booth in 2002. The most recent movie that Maury Chaykin and Ellen Page starred together was Wilby Wonderful in 2004
Sandra Oh and Ellen Page have starred in 3 movies together. Their first film was Wilby Wonderful in 2004. The most recent movie that Sandra Oh and Ellen Page starred together was Window Horses in 2016
Famke Janssen and Ellen Page have starred in 2 movies together. Their first film was X-Men: The Last Stand in 2006. The most recent movie that Famke Janssen and Ellen Page starred together was X-Men: Days of Future Past in 2014
Kevin Bacon and Ellen Page have starred in 2 movies together. Their first film was Super in 2010. The most recent movie that Kevin Bacon and Ellen Page starred together was A Look Behind the Scenes: Super in 2011
Kelsey Grammer and Ellen Page have starred in 2 movies together. Their first film was X-Men: The Last Stand in 2006. The most recent movie that Kelsey Grammer and Ellen Page starred together was X-Men: Days of Future Past in 2014
Liv Tyler and Ellen Page have starred in 2 movies together. Their first film was Super in 2010. The most recent movie that Liv Tyler and Ellen Page starred together was A Look Behind the Scenes: Super in 2011
Evan Peters and Ellen Page have starred in 2 movies together. Their first film was An American Crime in 2007. The most recent movie that Evan Peters and Ellen Page starred together was X-Men: Days of Future Past in 2014
Cameron Bright and Ellen Page have starred in 2 movies together. Their first film was X-Men: The Last Stand in 2006. The most recent movie that Cameron Bright and Ellen Page starred together was Juno in 2007
James Gunn and Ellen Page have starred in 2 movies together. Their first film was Super in 2010. The most recent movie that James Gunn and Ellen Page starred together was A Look Behind the Scenes: Super in 2011
Hugh Jackman and Ellen Page have starred in 2 movies together. Their first film was X-Men: The Last Stand in 2006. The most recent movie that Hugh Jackman and Ellen Page starred together was X-Men: Days of Future Past in 2014
Anna Paquin and Ellen Page have starred in 2 movies together. Their first film was X-Men: The Last Stand in 2006. The most recent movie that Anna Paquin and Ellen Page starred together was X-Men: Days of Future Past in 2014
Cillian Murphy and Ellen Page have starred in 2 movies together. Their first film was Inception in 2010.
Julian Richings and Ellen Page have starred in 2 movies together. Their first film was X-Men: The Last Stand in 2006. The most recent movie that Julian Richings and Ellen Page starred together was The Tracey Fragments in 2007
Rick LaCour and Ellen Page have starred in 2 movies together. Their first film was Super in 2010. The most recent movie that Rick LaCour and Ellen Page starred together was The East in 2013
James Marsden and Ellen Page have starred in 2 movies together. Their first film was X-Men: The Last Stand in 2006. The most recent movie that James Marsden and Ellen Page starred together was X-Men: Days of Future Past in 2014
Joseph Rutten and Ellen Page have starred in 2 movies together. Their first film was Marion Bridge in 2002. The most recent movie that Joseph Rutten and Ellen Page starred together was Touch & Go in 2003
1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
#
# Copyright (c) 2020-2023, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from cuml.dask.common.dask_arr_utils import to_dask_cudf
import pytest
from cuml.dask.datasets.blobs import make_blobs
from cuml.dask.common.input_utils import DistributedDataHandler
import dask.array as da
from cuml.internals.safe_imports import gpu_only_import
cp = gpu_only_import("cupy")
@pytest.mark.mg
@pytest.mark.parametrize("nrows", [1e4])
@pytest.mark.parametrize("ncols", [10])
@pytest.mark.parametrize("n_parts", [2, 23])
@pytest.mark.parametrize("input_type", ["dataframe", "array", "series"])
@pytest.mark.parametrize("colocated", [True, False])
def test_extract_partitions_worker_list(
nrows, ncols, n_parts, input_type, colocated, client
):
adj_input_type = "dataframe" if input_type == "series" else input_type
X_arr, y_arr = make_blobs(
n_samples=int(nrows), n_features=ncols, n_parts=n_parts
)
if adj_input_type == "dataframe" or input_type == "dataframe":
X = to_dask_cudf(X_arr)
y = to_dask_cudf(y_arr)
elif input_type == "array":
X, y = X_arr, y_arr
if input_type == "series":
X = X[X.columns[0]]
if colocated:
ddh = DistributedDataHandler.create((X, y), client)
else:
ddh = DistributedDataHandler.create(X, client)
parts = list(map(lambda x: x[1], ddh.gpu_futures))
assert len(parts) == n_parts
@pytest.mark.mg
@pytest.mark.parametrize("nrows", [24])
@pytest.mark.parametrize("ncols", [2])
@pytest.mark.parametrize("n_parts", [2, 23])
@pytest.mark.parametrize("input_type", ["dataframe", "array", "series"])
@pytest.mark.parametrize("colocated", [True, False])
def test_extract_partitions_shape(
nrows, ncols, n_parts, input_type, colocated, client
):
adj_input_type = "dataframe" if input_type == "series" else input_type
X_arr, y_arr = make_blobs(
n_samples=nrows, n_features=ncols, n_parts=n_parts
)
if adj_input_type == "dataframe" or input_type == "dataframe":
X = to_dask_cudf(X_arr)
y = to_dask_cudf(y_arr)
elif input_type == "array":
X, y = X_arr, y_arr
if input_type == "series":
X = X[X.columns[0]]
if input_type == "dataframe" or input_type == "series":
X_len_parts = X.map_partitions(len).compute()
y_len_parts = y.map_partitions(len).compute()
elif input_type == "array":
X_len_parts = X.chunks[0]
y_len_parts = y.chunks[0]
if colocated:
ddh = DistributedDataHandler.create((X, y), client)
parts = [part.result() for worker, part in ddh.gpu_futures]
for i in range(len(parts)):
assert (parts[i][0].shape[0] == X_len_parts[i]) and (
parts[i][1].shape[0] == y_len_parts[i]
)
else:
ddh = DistributedDataHandler.create(X, client)
parts = [part.result() for worker, part in ddh.gpu_futures]
for i in range(len(parts)):
assert parts[i].shape[0] == X_len_parts[i]
@pytest.mark.mg
@pytest.mark.parametrize("nrows", [24])
@pytest.mark.parametrize("ncols", [2])
@pytest.mark.parametrize("n_parts", [2, 12])
@pytest.mark.parametrize("X_delayed", [True, False])
@pytest.mark.parametrize("y_delayed", [True, False])
@pytest.mark.parametrize("colocated", [True, False])
def test_extract_partitions_futures(
nrows, ncols, n_parts, X_delayed, y_delayed, colocated, client
):
X = cp.random.standard_normal((nrows, ncols))
y = cp.random.standard_normal((nrows,))
X = da.from_array(X, chunks=(nrows / n_parts, -1))
y = da.from_array(y, chunks=(nrows / n_parts,))
if not X_delayed:
X = client.persist(X)
if not y_delayed:
y = client.persist(y)
if colocated:
ddh = DistributedDataHandler.create((X, y), client)
else:
ddh = DistributedDataHandler.create(X, client)
parts = list(map(lambda x: x[1], ddh.gpu_futures))
assert len(parts) == n_parts
|
def size_in_um_for_plot(self) -> Tuple[float, float, float, float]:
width_in_um = self.max_width * 1e6
height_in_um = self.max_height * (self.lines - self.missing_lines) / self.lines * 1e6
return 0.0, width_in_um, 0.0, height_in_um |
Study on spatial correlation mechanism of industries between different major functional areas based on grey target theory
It is an important measure for China to implement the strategy of major functional area (MFA) to promote the optimization and upgrading of the industry and the development of regional integration. In order to study spatial correlation structure between optimization development zone and key development zone, the paper chooses Beijing-Tianjin-Hebei Metropolitan Region (BTHMR) and Ha-Chang City Group (HCCG) as examples. Based on cross regional input-output table and Social Network Analysis (SNA), regional industrial spatial correlation network model is established. By analyzing the characteristics of network structure and the function of block, industrial spatial correlation structure between optimization development zone and key development zone is studied. Furthermore, with the grey target contribution analysis, the major causes of the formation of the industrial spatial correlation structure are discussed from the characteristics of the industry in the spatial correlation network. |
/**
* @className: XmlHandler
* @description:
* @author: onnoA
* @date: 2021/9/23
**/
public class XmlHandler {
//整个xml的节点数据(把设置的每个层级XmlFormat都添加到这个集合中)
public List<XmlFormat> documentElements = new ArrayList<XmlFormat>();
/** 生成Dom树
* 返回Document(整个Dom Tree)
* */
private Document createDom(){
DocumentBuilderFactory factory = null;
DocumentBuilder builder = null;
Document document = null;
try {
factory = DocumentBuilderFactory.newInstance(); //创建DocumentBuilderFactory工厂对象
builder = factory.newDocumentBuilder(); //通过工厂对象, 创建DocumentBuilder制作对象
document = builder.newDocument(); //通过制作对象, 创建一个Document对象,该对象代表一个XML文件
document.setXmlStandalone(true); //设置XMLstandalone, true为没有dtd和schema作为该XML的说明文档
//创建根节点
Element root = document.createElement(documentElements.get(0).getRootName());
document.appendChild(root);
//循环创建整个DOM树
for(int i = 0; i < documentElements.size(); i++){
XmlFormat format = documentElements.get(i); //获取xml的一个完整层级
//循环创建层级节点
for(Entry<String, Object> entryMultiply : format.getMultiplyNames().entrySet()){
if(entryMultiply.getValue() != null && !entryMultiply.getValue().toString().equalsIgnoreCase("")){ // 层级节点键/值(没有属性节点)
Element multiplyNode = document.createElement(entryMultiply.getKey()); // 创建一个层级节点
multiplyNode.setTextContent(entryMultiply.getValue().toString()); // 设置该层级节点的值
root.appendChild(multiplyNode);
} else { // 层级节点键(带有属性节点)
Element multiplyNode = document.createElement(entryMultiply.getKey()); // 创建一个层级节点
root.appendChild(multiplyNode);
//循环创建属性节点
for(Entry<String, Object> entryProperty : format.getPropertyNames().entrySet()){
Element propertyNode = document.createElement(entryProperty.getKey()); // 创建一个属性节点
propertyNode.setTextContent(entryProperty.getValue().toString()); // 设置该属性节点的值
multiplyNode.appendChild(propertyNode);
}
root.appendChild(multiplyNode); //设置层级节点到根节点
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
return document;
}
/** 生成xml字符串
* 参数: Document树对象
* 返回String: 整个xml字符串
* */
private String createXmlToString(Document document){
String xmlString = null;
try {
// 创建TransformerFactory工厂对象
TransformerFactory transFactory = TransformerFactory.newInstance();
// 通过工厂对象, 创建Transformer对象
Transformer transformer = transFactory.newTransformer();
transformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8");
//使Xml自动换行, 并自动缩进
transformer.setOutputProperty(OutputKeys.DOCTYPE_PUBLIC, "");
transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "4"); //中间的参数网址固定写法(这里还没搞懂)
transformer.setOutputProperty(OutputKeys.INDENT, "yes"); //是否设置缩进(indent: yes|no)
// 创建DOMSource对象并将Document加载到其中
DOMSource domSource = new DOMSource(document);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
// 使用Transformer的transform()方法将DOM树转换成XML
transformer.transform(domSource, new StreamResult(bos));
xmlString = bos.toString();
} catch (TransformerException e) {
e.printStackTrace();
}
return xmlString;
}
/** 生成xml文件
* 参数: url存放文件路径, Document树对象
* 返回String: 反馈信息
* */
private String createXmlToFile(String url, Document document){
String message = null;
try{
// 创建TransformerFactory对象
TransformerFactory transFactory = TransformerFactory.newInstance();
// 创建Transformer对象
Transformer transformer = transFactory.newTransformer();
//使Xml自动换行, 并自动缩进
transformer.setOutputProperty(OutputKeys.DOCTYPE_PUBLIC, "");
transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "4"); //中间的参数网址固定写法(这里还没搞懂)
transformer.setOutputProperty(OutputKeys.INDENT, "yes"); //是否设置缩进(indent: yes|no)
// 建DOMSource对象并将Document加载到其中
DOMSource domSource = new DOMSource(document);
//生成xml文件
File file = new File(url);
if (!file.exists()) { //判断文件是否存在
file.createNewFile(); //不存在生存文件
}
FileOutputStream out = new FileOutputStream(file); //文件输出流
StreamResult xmlResult = new StreamResult(out); //设置输入源
// 使用Transformer的transform()方法将DOM树转换成XML(参数:DOMSource, 输入源)
transformer.transform(domSource, xmlResult);
message = "生成本地XML成功!";
}catch(Exception e){
e.printStackTrace();
message = "生成本地XML失败!";
}
return message;
}
/** 接口方法(重载)
* 创建Xml字符串
* 返回String: xml字符串
* */
public String createXml(){
String xmlString = null;
if(documentElements != null && documentElements.size() > 0){ //判断是否存在xml格式和内容
Document document = createDom(); //调用 生成Dom树 的方法
xmlString = createXmlToString(document); //调用 生成xml字符串 的方法
}
return xmlString;
}
/** 接口方法(重载)
* 创建Xml本地文件(参数: 路径)
* 返回String: 成功/失败消息
* */
public String createXml(String url){
String xmlMessage = null;
if(documentElements != null && documentElements.size() > 0){ //判断是否存在xml格式和内容
Document document = createDom(); //调用 生成Dom树 的方法
xmlMessage = createXmlToFile(url, document); //调用 生成xml文件 的方法
}
return xmlMessage;
}
} |
Sentiment Analysis Using Machine Learning Algorithms and Text Mining to Detect Symptoms of Mental Difficulties Over Social Media
A recent British study of people between the ages of 14 and 35 has shown that social media has a negative impact on mental health. The purpose of the paper is to detect people with mental disorders' behavior in social media in order to help Twitter users in overcoming their mental health problems such as anxiety, phobia, depression, paranoia, etc. For this, the author used text mining and machine learning algorithms (naïve Bayes, k-nearest neighbours) to analyse tweets. The obtained results were validated using different evaluation measures such as f-measure, recall, precision, entropy, etc. |
// New returns an empty circular buffer with the given capacity.
func New(cap int) *Circbuf {
if cap < 1 {
panic("runtime error: circbuf.New: len out of range")
}
return &Circbuf{len: 0, cap: cap, items: make([]interface{}, cap)}
} |
Three known attempts to make a map of x-risks prevention in the field of science exist:
1. First is the list from the Global Catastrophic Risks Institute in 2012-2013, and many links there are already not working:
2. The second was done by S. Armstrong in 2014
3. And the most beautiful and useful map was created by Andrew Critch. But its ecosystem ignores organizations which have a different view of the nature of global risks (that is, they share the value of x-risks prevention, but have another world view).
In my map I have tried to add all currently active organizations which share the value of global risks prevention.
It also regards some active independent people as organizations, if they have an important blog or field of research, but not all people are mentioned in the map. If you think that you (or someone) should be in it, please write to me at [email protected]
I used only open sources and public statements to learn about people and organizations, so I can’t provide information on the underlying net of relations.
I tried to give all organizations a short description based on its public statement and also my opinion about its activity.
In general it seems that all small organizations are focused on their collaboration with larger ones, that is MIRI and FHI, and small organizations tend to ignore each other; this is easily explainable from the social singnaling theory. Another explanation is that larger organizations have a great ability to make contacts.
It also appears that there are several organizations with similar goal statements.
It looks like the most cooperation exists in the field of AI safety, but most of the structure of this cooperation is not visible to the external viewer, in contrast to Wikipedia, where contributions of all individuals are visible.
It seems that the community in general lacks three things: a united internet forum for public discussion, an x-risks wikipedia and an x-risks related scientific journal.
Ideally, a forum should be used to brainstorm ideas, a scientific journal to publish the best ideas, peer review them and present them to the outer scientific community, and a wiki to collect results.
Currently it seems more like each organization is interested in creating its own research and hoping that someone will read it. Each small organization seems to want to be the only one to present the solutions to global problems and gain full attention from the UN and governments. It raises the problem of noise and rivalry; and also raises the problem of possible incompatible solutions, especially in AI safety.
The pdf is here: http://immortality-roadmap.com/riskorg5.pdf |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.fileChooser.actions;
import com.intellij.execution.configurations.GeneralCommandLine;
import com.intellij.execution.util.ExecUtil;
import com.intellij.ide.lightEdit.LightEditCompatible;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.fileChooser.FileSystemTree;
import com.intellij.openapi.util.NullableLazyValue;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.SystemProperties;
import org.jetbrains.annotations.Nullable;
import java.io.File;
public class GotoDesktopDirAction extends FileChooserAction implements LightEditCompatible {
private final NullableLazyValue<VirtualFile> myDesktopDirectory = new NullableLazyValue<>() {
@Nullable
@Override
protected VirtualFile compute() {
return getDesktopDirectory();
}
};
@Override
protected void actionPerformed(final FileSystemTree tree, AnActionEvent e) {
final VirtualFile dir = myDesktopDirectory.getValue();
if (dir != null) {
tree.select(dir, () -> tree.expand(dir, null));
}
}
@Override
protected void update(FileSystemTree tree, AnActionEvent e) {
VirtualFile dir = myDesktopDirectory.getValue();
e.getPresentation().setEnabled(dir != null && tree.isUnderRoots(dir));
}
@Nullable
private static VirtualFile getDesktopDirectory() {
File desktop = new File(SystemProperties.getUserHome(), "Desktop");
if (!desktop.isDirectory() && SystemInfo.hasXdgOpen()) {
String path = ExecUtil.execAndReadLine(new GeneralCommandLine("xdg-user-dir", "DESKTOP"));
if (path != null) {
desktop = new File(path);
}
}
return desktop.isDirectory() ? LocalFileSystem.getInstance().refreshAndFindFileByIoFile(desktop) : null;
}
}
|
<filename>src/components/constants.ts
export const message = {
required: (name: string | undefined) => `${name} é um campo obrigatório`,
email: () => `Este email é inválido`,
};
|
<reponame>jlifeby/seasoncard-web<filename>service-mongo/src/main/java/com/jlife/abon/service/UserService.java<gh_stars>1-10
package com.jlife.abon.service;
import com.jlife.abon.entity.Card;
import com.jlife.abon.entity.Client;
import com.jlife.abon.entity.PhoneChanging;
import com.jlife.abon.entity.User;
import com.jlife.abon.enumeration.Role;
import com.jlife.abon.enumeration.UserState;
import com.jlife.abon.error.ApiErrorCode;
import com.jlife.abon.error.NotAllowedException;
import com.jlife.abon.error.ResourceNotFoundException;
import com.jlife.abon.interfaces.Existence;
import com.jlife.abon.service.sms.SmsService;
import org.apache.commons.lang3.RandomStringUtils;
import org.jetbrains.annotations.NotNull;
import org.joda.time.DateTime;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.stereotype.Service;
import org.springframework.util.Assert;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import static com.jlife.abon.util.SecurityUtilKt.encodeBCrypt;
/**
* @author <NAME>
*/
@Service
public class UserService extends AbstractService {
private static final int LIMIT_RECOVERING_PASSWORD_WITH_PHONE = 3;
@Autowired
@Qualifier(DEFAULT_SMS_SERVICE)
private SmsService smsService;
public User getUser(String userId) {
User user = userRepository.findOne(userId);
if (user == null) {
throw new ResourceNotFoundException(ApiErrorCode.USER_NOT_FOUND, userId);
}
return user;
}
public User getUserByEmail(String email) {
User user = userRepository.findByEmail(email);
if (user == null) {
throw new ResourceNotFoundException(ApiErrorCode.USER_NOT_FOUND, email);
}
return user;
}
/**
* Returns id to create confirmation link
*
* @param userId
* @param newEmail
* @return
*/
public String setNewEmail(String userId, String newEmail) {
User userWithThisEmail = userRepository.findByEmail(newEmail);
if (userWithThisEmail != null) {
if (userWithThisEmail.getId().equals(userId)) {
throw new NotAllowedException(ApiErrorCode.YOU_ALREADY_USE_THIS_EMAIL, newEmail);
}
throw new NotAllowedException(ApiErrorCode.USER_ALREADY_EXISTS_WITH_EMAIL, newEmail);
}
User user = this.getUser(userId);
user.setNewEmail(newEmail);
String confirmationId = UUID.randomUUID().toString();
user.setConfirm(confirmationId);
Set<UserState> userStates = user.getUserStates();
userStates.add(UserState.WAITING_FOR_CONFIRM_EMAIL);
user.setUserStates(userStates);
userRepository.save(user);
return confirmationId;
}
public void confirmChangingEmail(String confirmationId) {
User user = userRepository.findByConfirm(confirmationId);
if (user == null) {
throw new ResourceNotFoundException(ApiErrorCode.OBJECT_TO_CONFIRM_DOS_NOT_EXIST, confirmationId);
}
String newEmail = user.getNewEmail();
User userWithThisEmail = userRepository.findByEmail(newEmail);
if (userWithThisEmail != null) {
throw new NotAllowedException(ApiErrorCode.USER_ALREADY_EXISTS_WITH_EMAIL, newEmail);
}
Set<UserState> userStates = user.getUserStates();
userStates.remove(UserState.WAITING_FOR_CONFIRM_EMAIL);
userStates.add(UserState.CONFIRMED_EMAIL);
user.setUserStates(userStates);
user.setConfirm(null);
user.setEmail(newEmail);
user.setNewEmail(null);
userRepository.save(user);
}
/**
* Update password for user
*
* @param userId
* @param newPassword
* @return
*/
public void setNewPasswordByUser(String userId, String newPassword) {
User user = getUser(userId);
if (!user.hasAlreadyConfirmedEmail()) {
throw new NotAllowedException(ApiErrorCode.USER_SHOULD_CONFIRM_EMAIL_BEFORE_CHANGING_PASSWORD, userId);
}
String hashedPassword = new BCryptPasswordEncoder().encode(newPassword);
Set<UserState> userStates = user.getUserStates();
user.setPassword(hashedPassword);
if (!userStates.contains(UserState.CHANGED_INIT_PASSWORD)) {
userStates.add(UserState.CHANGED_INIT_PASSWORD);
user.setUserStates(userStates);
}
userRepository.save(user);
}
/**
* Update password for user by external admin, business logic
*
* @param userId
* @param newPassword
* @return
*/
public void setNewPasswordByExternal(String userId, String newPassword) {
User user = getUser(userId);
String hashedPassword = <PASSWORD>BCrypt(newPassword);
user.setPassword(hashedPassword);
userRepository.save(user);
}
public boolean authenticate(String userId, String currentPassword) {
User user = userRepository.findOne(userId);
String oldPassword = user.getPassword();
BCryptPasswordEncoder encoder = new BCryptPasswordEncoder();
return encoder.matches(currentPassword, oldPassword);
}
/**
* Returns id to recover password link
*
* @param email
* @return
*/
public String setRecoveringId(String email) {
User user = this.getUserByEmail(email);
String recoveringId = UUID.randomUUID().toString();
user.setRecoveringId(recoveringId);
userRepository.save(user);
return recoveringId;
}
/**
* Sets new password for user using secret recovering id
*
* @param recoveringId
* @param newPassword
* @return
*/
public void setNewPasswordByRecovering(String recoveringId, String newPassword) {
User user = userRepository.findByRecoveringId(recoveringId);
if (user == null) {
throw new ResourceNotFoundException(ApiErrorCode.RECOVERING_IS_NOT_ACTIVE_OR_DOS_NOT_EXIST, recoveringId);
}
String hashedPassword = new BCryptPasswordEncoder().encode(newPassword);
user.setPassword(<PASSWORD>Password);
user.setRecoveringId(null);
userRepository.save(user);
}
/**
* Checks for existing recovering id
*
* @param recoveringId
* @return
*/
public boolean isExistRecoveringId(String recoveringId) {
User user = userRepository.findByRecoveringId(recoveringId);
return user != null;
}
/**
* Mark user as accepted user agreement.
*
* @param userId
*/
public void acceptAgreementByUser(String userId) {
User user = userRepository.findOne(userId);
Set<UserState> userStates = user.getUserStates();
if (!userStates.contains(UserState.ACCEPTED_AGREEMENT)) {
userStates.add(UserState.ACCEPTED_AGREEMENT);
user.setUserStates(userStates);
}
userRepository.save(user);
}
public User getUserByCardUUD(Long cardUUID) {
User user = userRepository.findOneByCardUUID(cardUUID);
if (user == null) {
throw new ResourceNotFoundException(ApiErrorCode.USER_NOT_FOUND_WITH_CARD_UUID, String.valueOf(cardUUID));
}
return user;
}
public void recoverPasswordWithPhone(String phone, Long cardUUID) {
if (smsService.countRecoveringRequestsForToday(phone) >= LIMIT_RECOVERING_PASSWORD_WITH_PHONE) {
throw new NotAllowedException(ApiErrorCode.ACHIEVED_LIMIT_OF_RECOVERING_PASSWORD_FOR_PHONE, phone,
String.valueOf(LIMIT_RECOVERING_PASSWORD_WITH_PHONE));
}
User existedUser = userRepository.findOneByPhoneAndCardUUID(phone, cardUUID);
if (existedUser == null) {
throw new ResourceNotFoundException(ApiErrorCode.USER_NOT_FOUND_WITH_PHONE_AND_CARD_UUID, phone, String.valueOf(cardUUID));
}
String password = <PASSWORD>StringUtils.randomAlphanumeric(6).toLowerCase();
String hashedPassword = new BCryptPasswordEncoder().encode(password);
existedUser.setPassword(hashedPassword);
userRepository.save(existedUser);
smsService.sendRecoveringPasswordBySms(phone, existedUser.getId(), cardUUID, password);
}
public User updateUserSettings(String userId, boolean allowedSmsReceiving, boolean allowedEmailReceiving) {
User user = getUser(userId);
user.setAllowedEmailReceiving(allowedEmailReceiving);
user.setAllowedSmsReceiving(allowedSmsReceiving);
return userRepository.save(user);
}
public User setNewPhone(long cardUUID, String newPhone, String newPassword, String companyId) {
User user = getUserByCardUUD(cardUUID);
PhoneChanging phoneChanging = new PhoneChanging();
phoneChanging.setOldPhone(user.getPhone());
Client client = clientRepository.findOneByCompanyIdAndCardUUID(companyId, cardUUID);
if (client == null) {
throw new NotAllowedException(ApiErrorCode.CLIENT_NOT_FOUND_WITH_CARD_UUID, String.valueOf(cardUUID));
}
user.setPhone(newPhone);
String hashedPassword = <PASSWORD>(<PASSWORD>Password);
user.setPassword(<PASSWORD>Password);
List<PhoneChanging> phoneChangings = user.getPhoneChangings();
phoneChanging.setNewPhone(newPhone);
phoneChangings.add(phoneChanging);
phoneChanging.setCreatedBy(this.auditor.getCurrentAuditor());
phoneChanging.setCreatedDate(new DateTime());
user.setPhoneChangings(phoneChangings);
return userRepository.save(user);
}
@NotNull
public User selfRegisterFromClient(@NotNull Client client) {
Assert.notNull(client.getName());
Assert.notNull(client.getPhone());
Assert.notNull(client.getCompanyId());
if (client.getEmail() != null && !this.isFreeEmailForUser(client.getEmail())) {
throw new NotAllowedException(ApiErrorCode.USER_ALREADY_EXISTS_WITH_EMAIL, client.getEmail());
}
Card nextFreeVirtualCard = cardService.occupyFreeVirtualCardForSelfRegister(client.getCompanyId());
User user = new User();
user.setName(client.getName());
user.setLastName(client.getLastName());
user.setPhone(client.getPhone());
user.setNewEmail(client.getEmail());
user.setLogoPath(client.getLogoPath());
user.setPotential(true);
user.setCardUUID(nextFreeVirtualCard.getCardUUID());
user.getRoles().add(Role.ROLE_USER.name());
user.setStatus(Existence.ACTIVE_STATUS);
User storedUser = userRepository.save(user);
nextFreeVirtualCard.setUserId(storedUser.getId());
nextFreeVirtualCard.setInitializingCompany(client.getCompanyId());
cardRepository.save(nextFreeVirtualCard);
return storedUser;
}
public boolean isFreeEmailForUser(String email) {
User userWithThisEmail = userRepository.findByEmail(email);
return userWithThisEmail == null;
}
@NotNull
public User makeUserReal(long cardUUID) {
User user = getUserByCardUUD(cardUUID);
user.setPotential(false);
return userRepository.save(user);
}
}
|
import { Stream, TransformOptions } from "stream";
interface LineTransformOptions extends TransformOptions {
autoDetect: boolean;
}
export default class LineTransform extends Stream.Transform {
private savedR: any;
private autoDetect: boolean;
private transformNeeded = true;
private skipBytes = 0;
constructor(options?: LineTransformOptions) {
super(options);
this.savedR = null;
this.autoDetect = options?.autoDetect || false;
}
nullTransform(chunk: Buffer, encoding: BufferEncoding, cb: Function) {
this.push(chunk);
cb();
}
_transform(chunk: Buffer, encoding: BufferEncoding, cb: Function) {
if (this.autoDetect) {
if (chunk[0] === 0x0a) {
this.transformNeeded = false;
this.skipBytes = 1;
} else {
this.skipBytes = 2;
}
this.autoDetect = false;
}
if (this.skipBytes) {
const skip = Math.min(chunk.length, this.skipBytes);
chunk = chunk.slice(skip);
this.skipBytes -= skip;
}
if (!chunk.length) {
return cb();
}
if (!this.transformNeeded) {
return this.nullTransform(chunk, encoding, cb);
}
let lo = 0;
let hi = 0;
if (this.savedR) {
if (chunk[0] !== 0x0a) {
this.push(this.savedR);
}
this.savedR = null;
}
const last = chunk.length - 1;
while (hi <= last) {
if (chunk[hi] === 0x0d) {
if (hi === last) {
this.savedR = chunk.slice(last);
break;
} else if (chunk[hi + 1] === 0x0a) {
this.push(chunk.slice(lo, hi));
lo = hi + 1;
}
}
hi += 1;
}
if (hi !== lo) {
this.push(chunk.slice(lo, hi));
}
cb();
}
_flush(cb: Function) {
if (this.savedR) {
this.push(this.savedR);
}
return cb();
}
} |
// GetRoomIssue returns an alert/true from ctx if there is one, and false if there isn't.
func GetRoomIssue(ctx context.Context) (structs.RoomIssue, bool) {
v, ok := ctx.Value(roomIssue).(structs.RoomIssue)
return v, ok
} |
import * as _ from 'lodash';
import {Outcome, StateVariables} from '../types';
export type DeepPartial<T> = {
[P in keyof T]?: DeepPartial<T[P]>;
};
type Modifier<T, S extends T = T> = (result: T, props?: any) => S;
/**
* A fixture accepts two optional, positional arguments
* - the first, mergeProps, will merge properties into the defaults
* - the second, extendProps, will overwrite properties of the defaults
*
* An example of a property that you wouldn't want to merge is if you want to set a property
* to `undefined`. In this case, merging the defaults {foo: {bar: 'baz'}} into {foo: undefined}
* will result in {foo: {bar: 'baz'}}, but extending {foo: undefined} by the defaults
* {foo: {bar: 'baz'}} will yield {foo: undefined}
*/
export type Fixture<T> = (mergeProps?: DeepPartial<T>, extendProps?: DeepPartial<T>) => T;
/**
* a fixture factory that produces a fixture, given a set of default values
* @param defaults default values provided by the fixture
* @param modifier a generic function that can modify the resulting value returned by the fixture
*
* Example
* ```
* const defaultState: StateWithHash = {...}
* const stateWithHashFixture = fixture(defaultState, flow(overwriteOutcome, addHash))
* ```
* - If the consumer wishes to specify `outcome`, which is an array, they probably do not
* want to merge the provided outcome with the default outcome.
* Thus, we probably want to overwrite with the providedoutcome after constructing the object.
* - If the consumer provided a state hash, it is probably incorrect, since the properties
* provided will be mixed with the default properties, which changes the state, yielding
* a different state hash
*
* Note that the consumer could specify `outcome` as an extend prop, but overwriting the outcome
* is probably what the consumer expects, and doing it in the modifier serves as a convenience for
* developers who wish to pass a single `mergeProps` object.
*/
export const fixture = function <T>(defaults: T, modifier: Modifier<T> = _.identity): Fixture<T> {
return (mergeProps?: DeepPartial<T>, extendProps?: DeepPartial<T>): T =>
modifier(_.extend(_.merge(_.cloneDeep(defaults), mergeProps), extendProps), mergeProps) as T;
};
// We don't want to deep merge outcomes
// TODO: Should we just make the default outcome empty, making this function unnecessary?
export function overwriteOutcome<T extends StateVariables>(
result: T,
props?: {outcome: Outcome}
): T {
if (props?.outcome) result.outcome = props.outcome;
return result;
}
|
//==========================================================================
// INSPECTOR.CC - part of
//
// OMNeT++/OMNEST
// Discrete System Simulation in C++
//
// Implementation of
// inspectors
//
//==========================================================================
/*--------------------------------------------------------------*
Copyright (C) 1992-2017 Andras Varga
Copyright (C) 2006-2017 OpenSim Ltd.
This file is distributed WITHOUT ANY WARRANTY. See the file
`license' for details on this and other legal matters.
*--------------------------------------------------------------*/
#include "mainwindow.h"
#include <cstring>
#include <cmath>
#include <cassert>
#include <algorithm>
#include <QtWidgets/QMenu>
#include <QtWidgets/QToolButton>
#include <QtGui/QContextMenuEvent>
#include <QtWidgets/QBoxLayout>
#include <QtWidgets/QToolBar>
#include "common/stringutil.h"
#include "common/stlutil.h"
#include "omnetpp/cobject.h"
#include "qtenvapp.h"
#include "qtenvdefs.h"
#include "qtutil.h"
#include "inspector.h"
#include "inspectorfactory.h"
#include "inspectorutil.h"
#include "genericobjectinspector.h"
using namespace omnetpp::common;
namespace omnetpp {
namespace qtenv {
//TODO these two functions are likely not needed any more
const char *insptypeNameFromCode(int code)
{
#define CASE(x) case x: return #x;
switch (code) {
CASE(INSP_DEFAULT);
CASE(INSP_OBJECT);
CASE(INSP_GRAPHICAL);
CASE(INSP_LOG);
CASE(INSP_OBJECTTREE);
default: return "?";
}
#undef CASE
}
int insptypeCodeFromName(const char *name)
{
#define CASE(x) if (strcmp(name, #x)==0) return x;
CASE(INSP_DEFAULT);
CASE(INSP_OBJECT);
CASE(INSP_GRAPHICAL);
CASE(INSP_LOG);
CASE(INSP_OBJECTTREE);
return -1;
#undef CASE
}
//----
// About the WindowType of toplevel inspectors:
//
// For quite a while, we used Qt::Dialog. This had a few problems:
// - With certain window managers, all buttons on the title bar were missing.
// (see https://dev.omnetpp.org/bugs/view.php?id=953 )
// - On macOS, they weren't always kept on top of the MainWindow.
//
// Adding Qt::WindowStaysOnTopHint to a Qt::Dialog isn't a solution either,
// because on macOS, they will stay on top of _all_ the windows at all times,
// not just the MainWindow.
//
// With Qt::Tool, they have thinner border, and disappear on Mac when the
// MainWindow loses focus (the Qt::WA_MacAlwaysShowToolWindow flag does
// the same as above, not what we want), but this is the closest we ould get...
//
// We have also tried a few others (Drawer, Popup, etc...), but none were quite right.
//
// Tkenv did something else, it put Inspector windows on a so-called
// Utility Level, so they work best (and move with the MainWindow on Mac),
// but we couldn't find a way to do that without ugly ObjC code.
//
// This is a well-known and seemingly unsolved issue, see
// http://stackoverflow.com/questions/32216498 as well.
//
Inspector::Inspector(QWidget *parent, bool isTopLevel, InspectorFactory *f)
: QWidget(parent, isTopLevel ? Qt::Tool : Qt::Widget)
{
inspectDropdownMenu = new QMenu(this);
copyDropdownMenu = new QMenu(this);
factory = f;
object = nullptr;
type = f->getInspectorType();
if (isTopLevel) {
setAttribute(Qt::WA_DeleteOnClose);
QAction *closeAction = new QAction("Close Inspector", this);
closeAction->setShortcut(Qt::CTRL + Qt::Key_W);
connect(closeAction, &QAction::triggered, this, &QWidget::close);
addAction(closeAction);
}
else {
auto layout = new QGridLayout(parent);
parent->setLayout(layout);
layout->setMargin(0);
layout->addWidget(this, 0, 0, 1, 1);
}
}
Inspector::~Inspector()
{
if (isToplevelInspector())
setPref(PREF_GEOM, geometry());
delete inspectDropdownMenu;
delete copyDropdownMenu;
getQtenv()->inspectorDeleted(this);
}
const char *Inspector::getClassName() const
{
return common::opp_typename(typeid(*this));
}
bool Inspector::supportsObject(cObject *object) const
{
return factory->supportsObject(object);
}
void Inspector::createInspectDropdownMenu()
{
if (!object)
return;
auto typeList = InspectorUtil::supportedInspTypes(object);
inspectDropdownMenu->clear();
inspectDropdownMenu->addAction("View in Embedded Object Inspector",
getQtenv()->getMainObjectInspector(), SLOT(goUpInto()))
->setData(QVariant::fromValue(object));
inspectDropdownMenu->addSeparator();
for(auto type : typeList) {
bool state = type == this->type;
QString label = InspectorUtil::getInspectMenuLabel(type);
QAction *action = inspectDropdownMenu->addAction(label, getQtenv(), SLOT(inspect()));
action->setDisabled(state);
action->setData(QVariant::fromValue(InspectActionData{object, type}));
}
}
void Inspector::createCopyDropdownMenu()
{
if (!object)
return;
copyDropdownMenu->clear();
QAction *action = copyDropdownMenu->addAction("Copy Pointer With Cast (for Debugger)", getQtenv(), SLOT(utilitiesSubMenu()));
action->setData(QVariant::fromValue(CopyActionData{object, COPY_PTRWITHCAST}));
action = copyDropdownMenu->addAction("Copy Pointer Value (for Debugger)", getQtenv(), SLOT(utilitiesSubMenu()));
action->setData(QVariant::fromValue(CopyActionData{object, COPY_PTR}));
copyDropdownMenu->addSeparator();
action = copyDropdownMenu->addAction("Copy Full Path", getQtenv(), SLOT(utilitiesSubMenu()));
action->setData(QVariant::fromValue(CopyActionData{object, COPY_FULLPATH}));
action = copyDropdownMenu->addAction("Copy Name", getQtenv(), SLOT(utilitiesSubMenu()));
action->setData(QVariant::fromValue(CopyActionData{object, COPY_FULLNAME}));
action = copyDropdownMenu->addAction("Copy Class Name", getQtenv(), SLOT(utilitiesSubMenu()));
action->setData(QVariant::fromValue(CopyActionData{object, COPY_CLASSNAME}));
}
void Inspector::doSetObject(cObject *obj)
{
if (obj && !supportsObject(obj))
throw cRuntimeError("Inspector %s doesn't support objects of class %s", getClassName(), obj->getClassName());
cObject *oldObject = object;
object = obj;
if (findObjects)
findObjects->setData(QVariant::fromValue(object));
// create context menus
createInspectDropdownMenu();
createCopyDropdownMenu();
// note that doSetObject() is always followed by refresh(), see setObject()
Q_EMIT inspectedObjectChanged(object, oldObject);
}
void Inspector::doSetObjectSafe(cObject *obj)
{
try {
doSetObject(obj);
}
catch (std::exception &e) {
getQtenv()->showException(e);
}
}
void Inspector::showWindow()
{
ASSERT(isToplevelInspector());
show();
raise();
activateWindow();
}
void Inspector::refresh()
{
if (isToplevelInspector())
refreshTitle();
if (goBackAction)
goBackAction->setEnabled(canGoBack());
if (goForwardAction)
goForwardAction->setEnabled(canGoForward());
if (goUpAction) {
if (object) {
cObject *parent = dynamic_cast<cComponent *>(object) ? ((cComponent *)object)->getParentModule() : object->getOwner();
goUpAction->setEnabled(parent);
}
else {
goUpAction->setEnabled(false);
}
}
}
const QString Inspector::PREF_GEOM = "geom";
QString Inspector::getFullPrefKey(const QString& pref, bool topLevel) const
{
return "InspectorPreferences/type" + QString::number(type) + "-"
+ (topLevel ? "-toplevel" : "-embedded") + "-"
+ pref;
}
QString Inspector::getFullPrefKeyPerType(const QString& pref, bool topLevel) const
{
return "InspectorPreferences/type" + QString::number(type)
+ (topLevel ? "-toplevel" : "-embedded") + "-"
+ (object ? getObjectShortTypeName(object, STRIPNAMESPACE_NONE) : "nullptr") + "-"
+ pref;
}
QVariant Inspector::getPref(const QString& pref, const QVariant& defaultValue, bool perType) const
{
QString fullPrefKey = perType
? getFullPrefKeyPerType(pref, isTopLevel())
: getFullPrefKey(pref, isTopLevel());
QString embeddedFullPrefKey = perType
? getFullPrefKeyPerType(pref, false)
: getFullPrefKey(pref, false);
auto value = getQtenv()->getPref(fullPrefKey);
// inheriting from the embedded inspector if not found for the toplevel
if (isTopLevel() && !value.isValid())
value = getQtenv()->getPref(embeddedFullPrefKey);
return value.isValid() ? value : defaultValue;
}
void Inspector::setPref(const QString& pref, const QVariant& value, bool perType)
{
QString key = perType ? getFullPrefKeyPerType(pref, isTopLevel()) : getFullPrefKey(pref, isTopLevel());
getQtenv()->setPref(key, value);
}
void Inspector::refreshTitle()
{
QString title = getQtenv()->getWindowTitlePrefix();
if (!object)
title += " N/A";
else {
std::string fullPath = object->getFullPath();
if (fullPath.length() <= 60)
title += QString("(%1) %2").arg(getObjectShortTypeName(object).left(40)).arg(fullPath.c_str());
//sprintf(newTitle, "%s(%.40s) %s", prefix, , fullPath.c_str());
else
title += QString("(%1) ...%2").arg(getObjectShortTypeName(object).left(40)).arg(fullPath.c_str()+fullPath.length()-55);
//sprintf(newTitle, "%s(%.40s) ...%s", prefix, getObjectShortTypeName(object), fullPath.c_str()+fullPath.length()-55);
}
setWindowTitle(title);
}
void Inspector::objectDeleted(cObject *obj)
{
if (obj == object) {
doSetObjectSafe(nullptr);
refresh();
}
removeFromToHistory(obj);
}
void Inspector::setObject(cObject *obj)
{
if (isNew) {
doSetObjectSafe(obj);
if (isToplevelInspector()) {
loadInitialGeometry();
showWindow();
}
refresh();
isNew = false;
}
else if (obj != object) {
if (object != nullptr) {
historyBack.push_back(object);
historyForward.clear();
}
doSetObjectSafe(obj);
refresh();
}
}
void Inspector::removeFromToHistory(cObject *obj)
{
remove(historyBack, obj);
remove(historyForward, obj);
}
void Inspector::loadInitialGeometry()
{
ASSERT(isToplevelInspector());
adjustSize();
auto geom = getPref(PREF_GEOM, QRect()).toRect();
if (!geom.isNull())
setGeometry(geom);
}
QSize Inspector::sizeHint() const
{
return QSize(400, 300);
}
bool Inspector::canGoForward()
{
return !historyForward.empty();
}
bool Inspector::canGoBack()
{
return !historyBack.empty();
}
void Inspector::goForward()
{
if (!historyForward.empty()) {
cObject *newObj = historyForward.back();
historyForward.pop_back();
if (object != nullptr)
historyBack.push_back(object);
doSetObjectSafe(newObj);
refresh();
}
}
void Inspector::goBack()
{
if (!historyBack.empty()) {
cObject *newObj = historyBack.back();
historyBack.pop_back();
if (object != nullptr)
historyForward.push_back(object);
doSetObjectSafe(newObj);
refresh();
}
}
void Inspector::inspectParent()
{
cObject *parentPtr = dynamic_cast<cComponent *>(object) ? ((cComponent *)object)->getParentModule() : object->getOwner();
if (parentPtr == nullptr)
return;
// inspect in current inspector if possible (and allowed), otherwise open a new one
if (supportsObject(parentPtr)) { // TODO && $config(reuse-inspectors)
setObject(parentPtr);
}
else
getQtenv()->inspect(parentPtr);
}
void Inspector::findObjectsWithin()
{
Q_EMIT getQtenv()->getMainWindow()->showFindObjectsDialog(object);
}
void Inspector::goUpInto() // XXX weird name
{
QVariant variant = static_cast<QAction *>(QObject::sender())->data();
if (variant.isValid()) {
cObject *object = variant.value<cObject *>();
setObject(object);
}
}
void Inspector::closeEvent(QCloseEvent *)
{
ASSERT(isToplevelInspector());
setPref(PREF_GEOM, geometry());
// We have to call this prematurely, so the stored inspector list won't contain it.
// The dtor will also call it, sure, but it is idempotent.
getQtenv()->inspectorDeleted(this);
getQtenv()->storeInspectors(false);
}
void Inspector::addTopLevelToolBarActions(QToolBar *toolbar)
{
// general
goBackAction = toolbar->addAction(QIcon(":/tools/back"), "Back", this, SLOT(goBack()));
goForwardAction = toolbar->addAction(QIcon(":/tools/forward"), "Forward", this, SLOT(goForward()));
goUpAction = toolbar->addAction(QIcon(":/tools/parent"), "Go to parent module", this, SLOT(inspectParent()));
toolbar->addSeparator();
QAction *action = toolbar->addAction(QIcon(":/tools/inspectas"), "Inspect");
QToolButton* toolButton = dynamic_cast<QToolButton *>(toolbar->widgetForAction(action));
toolButton->setMenu(inspectDropdownMenu);
toolButton->setPopupMode(QToolButton::InstantPopup);
action = toolbar->addAction(QIcon(":/tools/copyptr"), "Copy name, type or pointer");
toolButton = dynamic_cast<QToolButton *>(toolbar->widgetForAction(action));
toolButton->setMenu(copyDropdownMenu);
toolButton->setPopupMode(QToolButton::InstantPopup);
toolbar->addAction(QIcon(":/tools/findobj"), "Find/inspect objects", this, SLOT(findObjectsWithin()))
->setShortcut(Qt::CTRL | Qt::Key_S);
}
} // namespace qtenv
} // namespace omnetpp
|
def _construct_full_path_generator(dirs: List[str]):
dirs = [x for x in dirs if x]
if dirs:
def full_path_func(path):
to_join = [x for x in dirs + [path] if x]
return _purge_path(os.path.join(*to_join))
else:
full_path_func = _purge_path
return full_path_func |
export interface IEquatable<T> {
equalTo(other: T): boolean;
}
|
/**
* Calcite {@link SchemaFactory} used for the evaluator.
* This class is public because Calcite uses reflection to instantiate it, there is no reason to use it anywhere else
* in Gobblin.
*/
public static class PESchemaFactory implements SchemaFactory {
@Override
public Schema create(SchemaPlus parentSchema, String name, Map<String, Object> operand) {
try {
List<Class<?>> referenceInterfaces = new ArrayList<>();
for (String iface : Splitter.on(",").splitToList(operand.get(REFERENCE_INTERFACES).toString())) {
referenceInterfaces.add(Class.forName(iface));
}
int operatorIdentifier = Integer.parseInt(operand.get(OPERATOR_ID).toString());
return new AbstractSchema() {
@Override
protected Map<String, Table> getTableMap() {
HashMap<String, Table> map = new HashMap<>();
for (Class<?> iface : referenceInterfaces) {
map.put(iface.getSimpleName().toUpperCase(),
new PETable(iface, operatorIdentifier));
}
return map;
}
};
} catch (ReflectiveOperationException roe) {
throw new RuntimeException(roe);
}
}
} |
// Copyright Verizon Media. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package ai.vespa.feed.client;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import java.io.File;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.time.Duration;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalDouble;
import java.util.OptionalInt;
/**
* Parses command line arguments
*
* @author bjorncs
*/
class CliArguments {
private static final Options optionsDefinition = createOptions();
private static final String BENCHMARK_OPTION = "benchmark";
private static final String CA_CERTIFICATES_OPTION = "ca-certificates";
private static final String CERTIFICATE_OPTION = "certificate";
private static final String CONNECTIONS_OPTION = "connections";
private static final String DISABLE_SSL_HOSTNAME_VERIFICATION_OPTION = "disable-ssl-hostname-verification";
private static final String ENDPOINT_OPTION = "endpoint";
private static final String FILE_OPTION = "file";
private static final String HEADER_OPTION = "header";
private static final String HELP_OPTION = "help";
private static final String MAX_STREAMS_PER_CONNECTION = "max-streams-per-connection";
private static final String PRIVATE_KEY_OPTION = "private-key";
private static final String ROUTE_OPTION = "route";
private static final String TIMEOUT_OPTION = "timeout";
private static final String TRACE_OPTION = "trace";
private static final String VERBOSE_OPTION = "verbose";
private static final String VERSION_OPTION = "version";
private static final String STDIN_OPTION = "stdin";
private final CommandLine arguments;
private CliArguments(CommandLine arguments) throws CliArgumentsException {
validateArgumentCombination(arguments);
this.arguments = arguments;
}
static CliArguments fromRawArgs(String[] rawArgs) throws CliArgumentsException {
CommandLineParser parser = new DefaultParser();
try {
return new CliArguments(parser.parse(optionsDefinition, rawArgs));
} catch (ParseException e) {
throw new CliArgumentsException(e);
}
}
private static void validateArgumentCombination(CommandLine args) throws CliArgumentsException {
if (!args.hasOption(HELP_OPTION) && !args.hasOption(VERSION_OPTION)) {
if (!args.hasOption(ENDPOINT_OPTION)) {
throw new CliArgumentsException("Endpoint must be specified");
}
if (args.hasOption(FILE_OPTION) == args.hasOption(STDIN_OPTION)) {
throw new CliArgumentsException(String.format("Either option '%s' or '%s' must be specified", FILE_OPTION, STDIN_OPTION));
}
if (args.hasOption(CERTIFICATE_OPTION) != args.hasOption(PRIVATE_KEY_OPTION)) {
throw new CliArgumentsException(
String.format("Both '%s' and '%s' must be specified together", CERTIFICATE_OPTION, PRIVATE_KEY_OPTION));
}
} else if (args.hasOption(HELP_OPTION) && args.hasOption(VERSION_OPTION)) {
throw new CliArgumentsException(String.format("Cannot specify both '%s' and '%s'", HELP_OPTION, VERSION_OPTION));
}
}
URI endpoint() throws CliArgumentsException {
try {
return ((URL) arguments.getParsedOptionValue(ENDPOINT_OPTION)).toURI();
} catch (ParseException | URISyntaxException e) {
throw new CliArgumentsException("Invalid endpoint: " + e.getMessage(), e);
}
}
boolean helpSpecified() { return has(HELP_OPTION); }
boolean versionSpecified() { return has(VERSION_OPTION); }
OptionalInt connections() throws CliArgumentsException { return intValue(CONNECTIONS_OPTION); }
OptionalInt maxStreamsPerConnection() throws CliArgumentsException { return intValue(MAX_STREAMS_PER_CONNECTION); }
Optional<CertificateAndKey> certificateAndKey() throws CliArgumentsException {
Path certificateFile = fileValue(CERTIFICATE_OPTION).orElse(null);
Path privateKeyFile = fileValue(PRIVATE_KEY_OPTION).orElse(null);
if (privateKeyFile == null && certificateFile == null) return Optional.empty();
return Optional.of(new CertificateAndKey(certificateFile, privateKeyFile));
}
Optional<Path> caCertificates() throws CliArgumentsException { return fileValue(CA_CERTIFICATES_OPTION); }
Optional<Path> inputFile() throws CliArgumentsException {
return fileValue(FILE_OPTION);
}
Map<String, String> headers() throws CliArgumentsException {
String[] rawArguments = arguments.getOptionValues(HEADER_OPTION);
if (rawArguments == null) return Collections.emptyMap();
Map<String, String> headers = new HashMap<>();
for (String rawArgument : rawArguments) {
if (rawArgument.startsWith("\"") || rawArgument.startsWith("'")) {
rawArgument = rawArgument.substring(1);
}
if (rawArgument.endsWith("\"") || rawArgument.endsWith("'")) {
rawArgument = rawArgument.substring(0, rawArgument.length() - 1);
}
int colonIndex = rawArgument.indexOf(':');
if (colonIndex == -1) throw new CliArgumentsException("Invalid header: '" + rawArgument + "'");
headers.put(rawArgument.substring(0, colonIndex), rawArgument.substring(colonIndex + 1).trim());
}
return Collections.unmodifiableMap(headers);
}
boolean sslHostnameVerificationDisabled() { return has(DISABLE_SSL_HOSTNAME_VERIFICATION_OPTION); }
boolean benchmarkModeEnabled() { return has(BENCHMARK_OPTION); }
Optional<String> route() { return stringValue(ROUTE_OPTION); }
OptionalInt traceLevel() throws CliArgumentsException { return intValue(TRACE_OPTION); }
Optional<Duration> timeout() throws CliArgumentsException {
OptionalDouble timeout = doubleValue(TIMEOUT_OPTION);
return timeout.isPresent()
? Optional.of(Duration.ofMillis((long)(timeout.getAsDouble()*1000)))
: Optional.empty();
}
boolean verboseSpecified() { return has(VERBOSE_OPTION); }
boolean readFeedFromStandardInput() { return has(STDIN_OPTION); }
private OptionalInt intValue(String option) throws CliArgumentsException {
try {
Number number = (Number) arguments.getParsedOptionValue(option);
return number != null ? OptionalInt.of(number.intValue()) : OptionalInt.empty();
} catch (ParseException e) {
throw newInvalidValueException(option, e);
}
}
private Optional<Path> fileValue(String option) throws CliArgumentsException {
try {
File certificateFile = (File) arguments.getParsedOptionValue(option);
if (certificateFile == null) return Optional.empty();
return Optional.of(certificateFile.toPath());
} catch (ParseException e) {
throw newInvalidValueException(option, e);
}
}
private Optional<String> stringValue(String option) { return Optional.ofNullable(arguments.getOptionValue(option)); }
private OptionalDouble doubleValue(String option) throws CliArgumentsException {
try {
Number number = (Number) arguments.getParsedOptionValue(option);
return number != null ? OptionalDouble.of(number.doubleValue()) : OptionalDouble.empty();
} catch (ParseException e) {
throw newInvalidValueException(option, e);
}
}
private boolean has(String option) { return arguments.hasOption(option); }
private static CliArgumentsException newInvalidValueException(String option, ParseException cause) {
return new CliArgumentsException(String.format("Invalid value for '%s': %s", option, cause.getMessage()), cause);
}
private static Options createOptions() {
return new Options()
.addOption(Option.builder()
.longOpt(HELP_OPTION)
.build())
.addOption(Option.builder()
.longOpt(VERSION_OPTION)
.build())
.addOption(Option.builder()
.longOpt(ENDPOINT_OPTION)
.desc("URI to feed endpoint")
.hasArg()
.type(URL.class)
.build())
.addOption(Option.builder()
.longOpt(HEADER_OPTION)
.desc("HTTP header on the form 'Name: value'")
.hasArgs()
.build())
.addOption(Option.builder()
.longOpt(FILE_OPTION)
.type(File.class)
.desc("Path to feed file in JSON format")
.hasArg()
.build())
.addOption(Option.builder()
.longOpt(CONNECTIONS_OPTION)
.desc("Number of concurrent HTTP/2 connections")
.hasArg()
.type(Number.class)
.build())
.addOption(Option.builder()
.longOpt(MAX_STREAMS_PER_CONNECTION)
.desc("Number of concurrent streams per HTTP/2 connection")
.hasArg()
.type(Number.class)
.build())
.addOption(Option.builder()
.longOpt(CERTIFICATE_OPTION)
.desc("Path to PEM encoded X.509 certificate file")
.type(File.class)
.hasArg()
.build())
.addOption(Option.builder()
.longOpt(PRIVATE_KEY_OPTION)
.desc("Path to PEM/PKCS#8 encoded private key file")
.type(File.class)
.hasArg()
.build())
.addOption(Option.builder()
.longOpt(CA_CERTIFICATES_OPTION)
.desc("Path to file containing CA X.509 certificates encoded as PEM")
.type(File.class)
.hasArg()
.build())
.addOption(Option.builder()
.longOpt(DISABLE_SSL_HOSTNAME_VERIFICATION_OPTION)
.desc("Disable SSL hostname verification")
.build())
.addOption(Option.builder()
.longOpt(BENCHMARK_OPTION)
.desc("Enable benchmark mode")
.build())
.addOption(Option.builder()
.longOpt(ROUTE_OPTION)
.desc("Target Vespa route for feed operations")
.hasArg()
.build())
.addOption(Option.builder()
.longOpt(TIMEOUT_OPTION)
.desc("Feed operation timeout (in seconds)")
.hasArg()
.type(Number.class)
.build())
.addOption(Option.builder()
.longOpt(TRACE_OPTION)
.desc("The trace level of network traffic. Disabled by default (=0)")
.hasArg()
.type(Number.class)
.build())
.addOption(Option.builder()
.longOpt(STDIN_OPTION)
.desc("Read JSON input from standard input")
.build())
.addOption(Option.builder()
.longOpt(VERBOSE_OPTION)
.build());
}
void printHelp(OutputStream out) {
HelpFormatter formatter = new HelpFormatter();
PrintWriter writer = new PrintWriter(out);
formatter.printHelp(
writer,
formatter.getWidth(),
"vespa-feed-client <options>",
"Vespa feed client",
optionsDefinition,
formatter.getLeftPadding(),
formatter.getDescPadding(),
"");
writer.flush();
}
static class CliArgumentsException extends Exception {
CliArgumentsException(String message, Throwable cause) { super(message, cause); }
CliArgumentsException(Throwable cause) { super(cause.getMessage(), cause); }
CliArgumentsException(String message) { super(message); }
}
static class CertificateAndKey {
final Path certificateFile;
final Path privateKeyFile;
CertificateAndKey(Path certificateFile, Path privateKeyFile) {
this.certificateFile = certificateFile;
this.privateKeyFile = privateKeyFile;
}
}
}
|
<reponame>TSDBBench/Overlord<filename>MakeDebianIso.py
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
__author__ = '<NAME>'
__version__ = "0.01"
import argparse
import logging
import Util
import os
import shutil
from fabric.api import *
outputFileNameSuffix="-autoinstall"
neededTools = ["which", "sed","7z", "genisoimage", "gzip", "find", "cpio", "md5sum"]
# Configure ArgumentParser
parser = argparse.ArgumentParser(prog="MakeDebianIso.py",version=__version__,description="Bla", formatter_class=argparse.RawDescriptionHelpFormatter, epilog="")
parser.add_argument("-t", "--tmpfolder", metavar="TMPFOLDER", required=True, help="path to tmp space (> 5 gb)")
parser.add_argument("-i", "--isofile", metavar="ISOFILE", required=True, help="path to Debian Iso")
parser.add_argument("-f", "--outputfolder", metavar="OUTPUTFOLDER", required=True, help="path to outputfolder (will be created if not existent)")
parser.add_argument("-p", "--presseedfile", metavar="PRESEEDFILE", required=True, help="path to preseedfile")
parser.add_argument("-o", "--overwrite", action="store_true", help="overwrite if ISO is existing")
parser.add_argument("-l", "--log", action='store_true', help="Be more verbose, log vagrant output.")
args = parser.parse_args()
# Configure Logging
logLevel = logging.WARN
if args.log:
logLevel = logging.DEBUG
logging.basicConfig(level=logLevel)
logger = logging.getLogger(__name__)
# Check ob alle Tools da sind
for tool in neededTools:
with settings(warn_only=True), hide('output','running','warnings'):
ret=local("which '%s'" %(tool))
if ret.return_code != 0:
logger.error("'%s' not found, please install." %(tool))
exit(-1)
if not (Util.check_file_exists(args.presseedfile) and Util.check_file_readable(args.presseedfile)):
logger.error("Preseedfile %s does not exist or is not readable." %(args.presseedfile))
exit(-1)
if not (Util.check_file_exists(args.isofile) and Util.check_file_readable(args.isofile)):
logger.error("Isofile %s does not exist or is not readable." %(args.isofile))
exit(-1)
if not Util.check_folder(args.outputfolder,logger,False,True):
if not Util.create_folder(args.outputfolder) or not Util.check_folder(args.outputfolder,logger):
logger.error("Can't create %s." %(args.outputfolder))
exit(-1)
if not Util.check_folder(args.tmpfolder,logger,False,True):
if not Util.create_folder(args.tmpfolder) or not Util.check_folder(args.tmpfolder,logger):
logger.error("Can't create %s." %(args.tmpfolder))
exit(-1)
else:
Util.delete_folder(args.tmpfolder,logger)
if not Util.create_folder(args.tmpfolder) or not Util.check_folder(args.tmpfolder,logger):
logger.error("Can't create %s." %(args.tmpfolder))
exit(-1)
isoFileName=os.path.basename(args.isofile)
isoFileBase="default"
if len(os.path.splitext(args.isofile)) > 0:
isoFileBase=os.path.splitext(isoFileName)[0]
isoFileExt=".iso"
if len(os.path.splitext(args.isofile)) > 1:
isoFileExt=os.path.splitext(isoFileName)[1]
preseedSuffix=""
if len(os.path.splitext(os.path.basename(args.presseedfile))[0].split("-")) > 1:
for suffix in os.path.splitext(os.path.basename(args.presseedfile))[0].split("-")[1:]:
preseedSuffix += "-%s" %(suffix)
newIsoFileName="%s%s%s%s" %(isoFileBase,preseedSuffix,outputFileNameSuffix,isoFileExt)
isoPath=os.path.join(args.tmpfolder,isoFileName)
if Util.check_file_exists(os.path.join(args.outputfolder,newIsoFileName)):
if args.overwrite:
if not Util.delete_file(os.path.join(args.outputfolder,newIsoFileName),logger):
logger.error("Error while deleting %s." %(os.path.join(args.outputfolder,newIsoFileName)))
exit(-1)
else:
logger.error("Outputfile %s does exist." %(os.path.join(args.outputfolder,newIsoFileName)))
exit(-1)
# Copy Iso to tmpfolder
try:
shutil.copy(args.isofile, isoPath)
except Exception, e:
logger.error("Can't copy %s to %s." %(args.isofile,args.tmpfolder), exc_info=True)
exit(-1)
# Unpack it with 7z
with lcd(args.tmpfolder), settings(warn_only=True), hide('output','running','warnings'):
ret=local("7z x '%s'" %(isoFileName), capture=True)
if not ret.succeeded:
logger.error("Can't unpack %s. Error: %s" %(isoPath,ret.stderr))
exit(-1)
# Delete ISO
if not Util.delete_file(isoPath, logger):
logger.error("Can't delete %s." %(isoPath))
exit(-1)
# mkdir irmod, unzip initrd.gz
if not Util.create_folder(os.path.join(args.tmpfolder,"irmod")):
logger.error("Can't create %s." %(os.path.join(args.tmpfolder,"irmod")))
exit(-1)
with lcd(os.path.join(args.tmpfolder,"irmod")), settings(warn_only=True), hide('output','running','warnings'):
ret=local("gzip -d < ../install.amd/initrd.gz | cpio --extract --verbose --make-directories --no-absolute-filenames")
if ret.return_code != 2:
logger.error("Can't unpack %s. Error: %s" %(os.path.join(args.tmpfolder,"install.amd/initrd.gz"),ret.stderr))
exit(-1)
# Copy Pressed file in place
if not Util.copy_file(args.presseedfile, os.path.join(args.tmpfolder,"irmod","preseed.cfg"),logger):
logger.error("Can't copy %s to %s." %(args.presseedfile, os.path.join(args.tmpfolder,"irmod","preseed.cfg")))
exit(-1)
# Pack & Compress it again
with lcd(os.path.join(args.tmpfolder,"irmod")), settings(warn_only=True), hide('output','running','warnings'):
ret=local("find . | cpio -H newc --create --verbose | gzip -9 > ../install.amd/initrd.gz")
if not ret.succeeded:
logger.error("Can't pack %s. Error: %s" %(os.path.join(args.tmpfolder,"install.amd/initrd.gz"),ret.stderr))
exit(-1)
# Delete irmod
if not Util.delete_folder(os.path.join(args.tmpfolder,"irmod"),logger):
logger.error("Can't delete %s." %(os.path.join(args.tmpfolder,"irmod")))
exit(-1)
# Running sed + generating md5sum + genisoimage
with lcd(args.tmpfolder), settings(warn_only=True), hide('output','running','warnings'):
ret=local("sed -i \"s/timeout 0/timeout 2/g\" isolinux/isolinux.cfg")
if not ret.succeeded:
logger.error("Error while sed run on %s. Error: %s" %(os.path.join(args.tmpfolder,"isolinux/isolinux.cfg"),ret.stderr))
exit(-1)
ret=local("sed -i \"s/quiet/ipv6.disable=1 quiet/g\" isolinux/txt.cfg")
if not ret.succeeded:
logger.error("Error while sed run on %s. Error: %s" %(os.path.join(args.tmpfolder,"isolinux/txt.cfg"),ret.stderr))
exit(-1)
ret=local("md5sum `find -follow -type f` > md5sum.txt")
if not ret.succeeded:
logger.error("Error while md5sum+find run. Error: %s" %(ret.stderr))
exit(-1)
ret=local("genisoimage -o \"%s\" -r -J -no-emul-boot -boot-load-size 4 -boot-info-table -b isolinux/isolinux.bin -c isolinux/boot.cat ." %(os.path.join(args.tmpfolder,newIsoFileName)))
if not ret.succeeded:
logger.error("Error while genisoimage run. Error: %s" %(ret.stderr))
exit(-1)
# Copy Iso, delete tmpfolder
if os.path.join(args.tmpfolder,newIsoFileName) != os.path.join(args.outputfolder,newIsoFileName):
if not Util.copy_file(os.path.join(args.tmpfolder,newIsoFileName),os.path.join(args.outputfolder,newIsoFileName),logger):
logger.error("Could not copy %s to %s." %(os.path.join(args.tmpfolder,newIsoFileName),os.path.join(args.outputfolder,newIsoFileName)))
exit(-1)
if not Util.delete_folder(args.tmpfolder,logger):
logger.error("Can't delete %s." %(args.tmpfolder))
exit(-1)
logger.info("Done.")
exit(0) |
package org.redquark.leetcoding.challenge;
/**
* @author <NAME>
* <p>
* Given an array of strings strs, return the length of the longest uncommon subsequence between them.
* If the longest uncommon subsequence does not exist, return -1.
* <p>
* An uncommon subsequence between an array of strings is a string that is a subsequence of one string but not
* the others.
* <p>
* A subsequence of a string s is a string that can be obtained after deleting any number of characters from s.
* <p>
* For example, "abc" is a subsequence of "aebdc" because you can delete the underlined characters in "aebdc" to
* get "abc". Other subsequences of "aebdc" include "aebdc", "aeb", and "" (empty string).
* <p>
* Constraints:
* <p>
* 1 <= strs.length <= 50
* 1 <= strs[i].length <= 10
* strs[i] consists of lowercase English letters.
*/
public class Problem27_LongestUncommonSubsequenceII {
public int findLUSLength(String[] strs) {
// Special case
if (strs == null || strs.length == 0) {
return 0;
}
// Maximum length of uncommon subsequence
int maxLength = -1;
// Loop through the array for each string
for (int i = 0; i < strs.length; i++) {
// Flag to check if subsequence found
boolean isSubsequence = false;
// Length of the current string
int currentLength = strs[i].length();
// Loop through all the other strings except self
for (int j = 0; j < strs.length; j++) {
// If the two strings are subseqences
if (i != j && isSubsequenceFound(strs[i], strs[j])) {
isSubsequence = true;
break;
}
}
if (!isSubsequence) {
maxLength = Math.max(maxLength, currentLength);
}
}
return maxLength;
}
private boolean isSubsequenceFound(String a, String b) {
// If the two strings are same
if (a.equals(b)) {
return true;
}
int i = 0;
int j = 0;
while (i < a.length() && j < b.length()) {
if (a.charAt(i) == b.charAt(j)) {
i++;
}
j++;
}
return i == a.length();
}
}
|
// Records the creation flags of an extension grouped by
// Extension::InitFromValueFlags.
void RecordCreationFlags(const Extension* extension) {
for (int i = 0; i < Extension::kInitFromValueFlagBits; ++i) {
int flag = 1 << i;
if (extension->creation_flags() & flag) {
UMA_HISTOGRAM_EXACT_LINEAR("Extensions.LoadCreationFlags", i,
Extension::kInitFromValueFlagBits);
}
}
} |
// Variable size guards against block size changing from SetBlockSize()
// or large requests greater than the standard block size.
vtkHeapBlock(size_t size)
: Next(nullptr)
, Size(size)
{
this->Data = new char[size];
} |
#include<bits/stdc++.h>
using namespace std;
const int nmax=100001;
main(){
int t,h,m;
scanf("%d%d:%d",&t,&h,&m);
if (m>=60) m=m%10+10;
if (t==12 && (h<1||h>12)){
if (h%10) h%=10;
else h=10;
}
if (t==24 && (h<0||h>23)){
if (h%10) h%=10;
else h=10;
}
if (h<10) cout<<0;
cout<<h<<":";
if (m<10) cout<<0;
cout<<m;
}
|
/**
* @author <a href="mailto:[email protected]">Marek Posolda</a>
*/
public class LDAPObject {
private static final Logger logger = Logger.getLogger(LDAPObject.class);
private String uuid;
private LDAPDn dn;
private String rdnAttributeName;
private final List<String> objectClasses = new LinkedList<>();
// NOTE: names of read-only attributes are lower-cased to avoid case sensitivity issues
private final List<String> readOnlyAttributeNames = new LinkedList<>();
private final Map<String, Set<String>> attributes = new HashMap<>();
// Copy of "attributes" containing lower-cased keys
private final Map<String, Set<String>> lowerCasedAttributes = new HashMap<>();
public String getUuid() {
return uuid;
}
public void setUuid(String uuid) {
this.uuid = uuid;
}
public LDAPDn getDn() {
return dn;
}
public void setDn(LDAPDn dn) {
this.dn = dn;
}
public List<String> getObjectClasses() {
return objectClasses;
}
public void setObjectClasses(Collection<String> objectClasses) {
this.objectClasses.clear();
this.objectClasses.addAll(objectClasses);
}
public List<String> getReadOnlyAttributeNames() {
return readOnlyAttributeNames;
}
public void addReadOnlyAttributeName(String readOnlyAttribute) {
readOnlyAttributeNames.add(readOnlyAttribute.toLowerCase());
}
public void removeReadOnlyAttributeName(String readOnlyAttribute) {
readOnlyAttributeNames.remove(readOnlyAttribute.toLowerCase());
}
public String getRdnAttributeName() {
return rdnAttributeName;
}
public void setRdnAttributeName(String rdnAttributeName) {
this.rdnAttributeName = rdnAttributeName;
}
public void setSingleAttribute(String attributeName, String attributeValue) {
Set<String> asSet = new LinkedHashSet<>();
asSet.add(attributeValue);
setAttribute(attributeName, asSet);
}
public void setAttribute(String attributeName, Set<String> attributeValue) {
attributes.put(attributeName, attributeValue);
lowerCasedAttributes.put(attributeName.toLowerCase(), attributeValue);
}
// Case-insensitive
public String getAttributeAsString(String name) {
Set<String> attrValue = lowerCasedAttributes.get(name.toLowerCase());
if (attrValue == null || attrValue.size() == 0) {
return null;
} else if (attrValue.size() > 1) {
logger.warnf("Expected String but attribute '%s' has more values '%s' on object '%s' . Returning just first value", name, attrValue, dn);
}
return attrValue.iterator().next();
}
// Case-insensitive. Return null if there is not value of attribute with given name or set with all values otherwise
public Set<String> getAttributeAsSet(String name) {
Set<String> values = lowerCasedAttributes.get(name.toLowerCase());
return (values == null) ? null : new LinkedHashSet<>(values);
}
public Map<String, Set<String>> getAttributes() {
return attributes;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (!getClass().isInstance(obj)) {
return false;
}
LDAPObject other = (LDAPObject) obj;
return getUuid() != null && other.getUuid() != null && getUuid().equals(other.getUuid());
}
@Override
public int hashCode() {
int result = getUuid() != null ? getUuid().hashCode() : 0;
result = 31 * result + (getUuid() != null ? getUuid().hashCode() : 0);
return result;
}
@Override
public String toString() {
return "LDAP Object [ dn: " + dn + " , uuid: " + uuid + ", attributes: " + attributes + ", readOnly attribute names: " + readOnlyAttributeNames + " ]";
}
} |
<reponame>viveksiddineni/Thrizer-Admin-test
import {
Component,
OnInit,
Input,
ContentChildren,
QueryList,
TemplateRef,
ElementRef,
ViewChild,
AfterViewInit,
Output,
EventEmitter,
ContentChild,
AfterContentInit,
} from "@angular/core";
import { ForDirective } from "../for";
import { Pagination } from "projects/thrizer-admin-app/src/app/models/pagination";
import { TableComponent } from "../table/table.component";
import { Router } from "@angular/router";
import { Config } from "./listing.types";
import { MatDialog } from "@angular/material/dialog";
@Component({
selector: "app-listing",
templateUrl: "./listing.component.html",
styleUrls: ["./listing.component.scss"],
})
export class ListingComponent
extends Pagination
implements OnInit, AfterViewInit, AfterContentInit {
filterNumbers: any;
prevFilterdata: any = {};
@Input() config: Config;
@Output() changeListEvent: EventEmitter<any> = new EventEmitter();
@Output() selectionChange: EventEmitter<any> = new EventEmitter();
get label(): string {
return this.config.label;
}
get totalList(): number {
return this.config.total;
}
get hasSearch(): boolean {
return this.config ? this.config.options.search : false;
}
@Input() placeholder;
@Output() setSearch = new EventEmitter();
search = "";
clear: boolean = false;
@ContentChildren(ForDirective) templates: QueryList<ForDirective>;
get actions(): TemplateRef<any> {
const actionsTemplate =
this.templates &&
this.templates.find(({ name }) => name === "list-actions");
return actionsTemplate ? actionsTemplate.ref : null;
}
get size() {
return this.limit;
}
@ViewChild("searchField", { static: false }) _searchRef: ElementRef<
HTMLInputElement
> = new ElementRef<HTMLInputElement>(null);
@ContentChild(TableComponent, { static: false }) childTable: TableComponent;
ngAfterContentInit() {
// this.footer now points to the instance of `FooterComponent`
this.childTable.changeTableEvent.subscribe((data) => {
console.log("changeTableEvent", data);
this.sortData(data);
});
this.childTable.changeTableSelectionEvent.subscribe((data) => {
this.emitSelection(data);
});
}
constructor(private dialog: MatDialog, private router: Router) {
super();
}
ngOnInit() {}
ngAfterViewInit() {}
onFilterHandler() {
if (this.config && this.config.options && this.config.options.filter) {
if (this.filterOptions) {
this.prevFilterdata = this.filterOptions;
} else {
this.prevFilterdata = {};
}
const subscription = this.dialog
.open(this.config.options.filter, {
panelClass: "openState",
width: "400px",
disableClose: false,
position: {
right: "20px",
},
autoFocus: false,
data:
this.filterOptions && Object.keys(this.filterOptions).length > 0
? this.filterOptions
: null,
})
.afterClosed()
.subscribe((filterData) => {
console.log(filterData);
if (
filterData &&
JSON.stringify(this.prevFilterdata) !== JSON.stringify(filterData)
) {
this.filterNumbers = Object.keys(filterData).length;
this.page = 1;
this.filterOptions = filterData;
this._emitEvent();
} else {
if (filterData) {
this.filterNumbers = Object.keys(filterData).length;
} else {
this.filterNumbers = 0;
}
}
subscription.unsubscribe();
});
}
}
onSearchFieldHandler(event: MouseEvent) {
event.stopPropagation();
}
onPageHandler(data) {
this.pageOptionsOnChange = data;
this.resetSelection();
this._emitEvent();
}
resetSearch() {
this.search = "";
this._searchRef.nativeElement.value = "";
this._emitEvent();
}
searchData(event) {
if (this.search !== event) {
this.search = event;
this.page = 1;
this._emitEvent();
}
}
private _emitEvent() {
console.log(this.validPageOptions,);
this.changeListEvent.emit({ ...this.validPageOptions });
}
private emitSelection(event) {
this.selectionChange.emit(event);
}
resetSelection() {
if (this.childTable.selection) {
this.childTable.checkBoxList.map(
(checkbox) => (checkbox.checked = false)
);
}
}
sortData(event) {
// this.selectionChange.emit(event);
this.sortOptions = event;
this.resetPages();
this._emitEvent();
}
}
|
def remove_temp_data():
if os.path.isfile('bbc_sitemap.txt'):
os.remove('bbc_sitemap.txt')
if os.path.isfile('temp_data.txt'):
os.remove('temp_data.txt') |
// JobStatus returns the current status of job.
// If the PID is unavailable (i.e. the process is not running), 0 will be returned.
// An error will be returned if the job is unknown (i.e. it has no config in /etc/init).
func (*UpstartService) JobStatus(ctx context.Context, request *platform.JobStatusRequest) (*platform.JobStatusResponse, error) {
goal, state, pid, err := upstart.JobStatus(ctx, request.JobName)
return &platform.JobStatusResponse{
Goal: string(goal),
State: string(state),
Pid: int32(pid),
}, err
} |
<reponame>feberhard/ACO
/*
* //==============================================\\
* || Project: Ant Colony Optimization ||
* || Authors: <NAME>, <NAME>, ||
* || <NAME> <NAME> ||
* || Date: 05.12.2016 ||
* \\==============================================//
*/
// start vs-code task: ctrl+shift+b
// variables
var pixelSize = 25;
var field: Cell[][];
var randomField: number[];
var colorArray: string[][][]; // [ants][food][antDirection]
var canvas: HTMLCanvasElement;
var ctx: CanvasRenderingContext2D;
var default_config = {
// colors
groundColor: "#A08556", // brown
foodColor: "#00CC00", // green
// foodColor: "#246D25", // green
antToFoodColor: "#000000", // black
antToNestColor: "#CCCC00", // yellow
nestColor: "#DD0000", // red
obstacleColor: "#0000FF", // blue
ToNestPheromoneColor: "#FFDDDD",
ToFoodPheromoneColor: "#DDFFDD",
// ant
antPopulation: 20,
initialPheromoneStrength: 300,
// food
foodSources: 1,
maxFood: 200,
minDistanceToNest: 1,
// nest
nests: 1,
// cell
maxPheromone: 300,
maxAnts: 100,
obstacles: 20,
// general
fps: 100,
minimisationAlgorithmEnabled: false,
minimisationSpreadValue: 1,
minimisationSpreadPercentage: 0,
fieldWidth: 20,
fieldHeight: 20,
canvasSize: 800, // width and height of the larger side of the canvas, smaller canvas side and pixelsize depend on this
seperatePheromoneView: true,
squareCellScore: true
};
var config = JSON.parse(JSON.stringify(default_config));
var default_statistics = {
foodInSources: 0,
foodInNests: 0,
antsWithFood: 0,
};
var statistics = JSON.parse(JSON.stringify(default_statistics));
class Cell {
public ants: Ant[];
public maxAnts: number;
public toNestPheromone: number;
public toFoodPheromone: number;
// TODO probably make food a number which decreases every time an ant eats from it
public food: number;
public nest: boolean;
get color(): string {
if (this.maxAnts == 0) {
return config.obstacleColor;
}
if (this.nest) {
return config.nestColor;
}
var antCount = this.ants.length;
var foodCount = this.food;
var antDirection = this.ants.length > 0 ? this.ants[0].direction : 0;
return colorArray[this.ants.length > 0 ? this.maxAnts : 0][foodCount][antDirection];
// return colorArray[antCount][foodCount][antDirection];
}
constructor(public col: number, public row: number, maxAnts: number, food: number = 0, nest: boolean = false) {
this.ants = [];
this.maxAnts = maxAnts;
this.food = food;
this.nest = nest;
this.toNestPheromone = 0;
this.toFoodPheromone = 0;
}
public canAddAnt() {
return this.ants.length < this.maxAnts;
}
public addAnt(ant: Ant) {
if (this.canAddAnt()) {
this.ants.push(ant);
return true;
}
return false;
}
public canAddObstacle() {
return !this.nest && !this.food && this.maxAnts != 0;
}
public addFood(food: number = config.maxFood) {
this.food = Math.min(this.food + food, config.maxFood);
}
public takeFood() {
this.food = Math.max(this.food - 1, 0);
}
public setNest() {
this.nest = true;
}
public setMoved(moved: boolean) {
this.ants.forEach(a => a.moved = moved);
}
public decreasePheromone() {
this.toFoodPheromone = Math.max(this.toFoodPheromone - 1, 0);
this.toNestPheromone = Math.max(this.toNestPheromone - 1, 0);
}
public addtoNestPheromone(pheromone: number) {
this.toNestPheromone = Math.min(this.toNestPheromone + pheromone, config.maxPheromone);
}
public addToFoodPheromone(pheromone: number) {
this.toFoodPheromone = Math.min(this.toFoodPheromone + pheromone, config.maxPheromone);
}
}
const enum Direction {
toFood = 0,
toNest = 1
}
class Ant {
public moved: boolean;
public direction: Direction;
public pheromoneStrength: number;
public hasFood: boolean = false;
constructor(pheromoneStrength: number) {
this.direction = Direction.toFood;
this.pheromoneStrength = pheromoneStrength;
}
public decreasePheromoneStrength() {
this.pheromoneStrength = Math.max(this.pheromoneStrength - 2, 0);
}
}
function initColors() {
// ant
// 0 1 2 3 4 5
// ------------
// food 0 | g b
// 1 | f
// g = ground/brown, b = black, f = food/green
var maxAnts = config.maxAnts;
var maxFood = config.maxFood;
colorArray = new Array<string[][]>(maxAnts + 1);
for (var i = 0; i < maxAnts + 1; i++) {
colorArray[i] = new Array<string[]>(maxFood + 1);
var antPercent = i / maxAnts;
for (var j = 0; j < maxFood + 1; j++) {
colorArray[i][j] = new Array<string>(2);
var foodBonus = j == 0 ? 0 : Math.ceil(maxFood * 0.2);
var foodPercent = Math.min(j + foodBonus, maxFood) / maxFood; // increase visibility of food a bit
// antColor * antPercent blended with foodColor * foodPercent
// e.g.
// 1) 1 ant, 0 food (max 1 ants per cell) => 100% antColor
// 2) 1 ant, 1 food (max 4 ants per cell, max 2 food per cell) => 25% antColor blended with 50% foodColor
var antFoodColorToFood = shadeBlendConvert(antPercent + foodPercent > 0 ? antPercent / (antPercent + foodPercent) : 0, config.foodColor, config.antToFoodColor);
var antFoodColorToNest = shadeBlendConvert(antPercent + foodPercent > 0 ? antPercent / (antPercent + foodPercent) : 0, config.foodColor, config.antToNestColor);
// blend antFoodColor with groundColor
// e.g.
// 1) 1 ant, 0 food (max 1 ants per cell) => 100% antColor
// 2) 1 ant, 1 food (max 4 ants per cell, max 2 food per cell) => 75% antFoodColor blended with 25% groundColor
var cellColorToFood = shadeBlendConvert(Math.min(antPercent + foodPercent, 1), config.groundColor, antFoodColorToFood);
var cellColorToNest = shadeBlendConvert(Math.min(antPercent + foodPercent, 1), config.groundColor, antFoodColorToNest);
colorArray[i][j][Direction.toFood] = cellColorToFood;
colorArray[i][j][Direction.toNest] = cellColorToNest;
// ctx.fillStyle = cellColor;
// ctx.fillRect(i * pixelSize, j * pixelSize, pixelSize, pixelSize);
}
}
}
var pauseFlag = false;
var startCount = 0; // increase after every press on start button to check if the current loop has to be canceled
function gameloop(currentStartCount) {
setTimeout(function () {
if (pauseFlag || currentStartCount != startCount) {
return;
}
requestAnimationFrame(gameloop.bind(this, currentStartCount));
updateField();
drawClearField();
drawField();
updateStatistics();
}, 1000 / config.fps);
}
function updateStatistics() {
setHtmlInputValue('antsWithoutFood', config.antPopulation * config.nests - statistics.antsWithFood);
setHtmlInputValue('antsWithFood', statistics.antsWithFood);
setHtmlInputValue('foodInSources', statistics.foodInSources);
setHtmlInputValue('foodInNests', statistics.foodInNests);
}
function resetStatistics() {
statistics = JSON.parse(JSON.stringify(default_statistics));
statistics.foodInSources = config.foodSources * config.maxFood;
updateStatistics();
}
function init() {
pixelSize = Math.min(config.canvasSize / config.fieldWidth, config.canvasSize / config.fieldHeight);
canvas = <HTMLCanvasElement>document.getElementById("my-canvas");
ctx = canvas.getContext("2d");
canvas.width = config.fieldWidth * pixelSize;
canvas.height = config.fieldHeight * pixelSize;
canvas.style.width = canvas.width + "px";
canvas.style.height = canvas.height + "px";
field = new Array(config.fieldWidth);
for (var i = 0; i < config.fieldWidth; i++) {
field[i] = new Array(config.fieldHeight);
for (var j = 0; j < config.fieldHeight; j++) {
field[i][j] = new Cell(i, j, config.maxAnts);
}
}
randomField = new Array(config.fieldWidth * config.fieldHeight);
for (var i = 0; i < config.fieldWidth * config.fieldHeight; i++) {
randomField[i] = i;
}
initColors();
}
// function initRandomValues(field: Cell[][]) {
// for (var i = 0; i < config.obstacles; i++) {
// var x = Math.floor(Math.random() * fieldWidth);
// var y = Math.floor(Math.random() * fieldHeight);
// field[x][y].maxAnts = 0;
// }
// // for (var i = 2; i < fieldWidth - 5; i++) {
// // var x = i;
// // var y = fieldHeight - i;
// // field[x][y].maxAnts = 0;
// // field[x][y+1].maxAnts = 0;
// // }
// field[Math.round(fieldWidth / 4)][Math.round(fieldWidth / 4)].addFood();
// field[Math.round(fieldWidth / 4)][Math.round(fieldWidth / 4)].maxAnts = config.maxAnts;
// field[fieldWidth - Math.round(fieldWidth / 4)][fieldHeight - Math.round(fieldWidth / 4)].setNest();
// field[fieldWidth - Math.round(fieldWidth / 4)][fieldHeight - Math.round(fieldWidth / 4)].maxAnts = config.maxAnts;
// for (var i = 0; i < config.antPopulation; i++) {
// field[fieldWidth - Math.round(fieldWidth / 4)][fieldHeight - Math.round(fieldWidth / 4)].addAnt(new Ant(config.initialPheromoneStrength));
// }
// }
function calcMinDistance(foodSource: [number, number], nests: Array<[number, number]>) {
var minDistance = Number.MAX_VALUE;
nests.forEach(function (nest) {
minDistance = Math.min(minDistance,
// Manhattan distance
Math.abs(foodSource[0] - nest[0]) + Math.abs(foodSource[1] - nest[1]));
});
return minDistance;
}
function initRandomValues(field: Cell[][]) {
const maxDistanceToNest = config.fieldHeight + config.fieldWidth;
if (config.minDistanceToNest > maxDistanceToNest) {
alert("Reset 'min. distance to nest' to max. allowed value (" + maxDistanceToNest + ")");
config.minDistanceToNest = maxDistanceToNest;
setHtmlInputValue('minDistanceToNest', config.minDistanceToNest);
}
var maxX = config.fieldWidth;
var maxY = config.fieldHeight;
if (config.minDistanceToNest > maxDistanceToNest / 2 && config.nests == 1) {
maxX = config.fieldWidth - config.minDistanceToNest * config.fieldWidth / maxDistanceToNest;
maxY = config.fieldHeight - config.minDistanceToNest * config.fieldHeight / maxDistanceToNest;
}
var count = 0;
var nests = new Array<[number, number]>(config.nests);
while (count < config.nests) {
var x = Math.floor(Math.random() * maxX);
var y = Math.floor(Math.random() * maxY);
if (field[x][y].canAddObstacle()) {
field[x][y].setNest();
nests.push([x, y]);
for (var i = 0; i < config.antPopulation; i++) {
field[x][y].addAnt(new Ant(config.initialPheromoneStrength));
}
count++;
}
}
count = 0;
var tries = 0;
const maxTries = 100000;
while (count < config.foodSources) {
var x = Math.floor(Math.random() * config.fieldWidth);
var y = Math.floor(Math.random() * config.fieldHeight);
tries++;
if (field[x][y].canAddObstacle() && calcMinDistance([x, y], nests) >= config.minDistanceToNest || tries > maxTries) {
if (tries > maxTries) {
alert("Couldn't find a solution for 'min. distance to nest = " + config.minDistanceToNest + "'");
}
field[x][y].addFood();
count++;
tries = 0;
}
}
count = 0;
while (count < config.obstacles) {
var x = Math.floor(Math.random() * config.fieldWidth);
var y = Math.floor(Math.random() * config.fieldHeight);
if (field[x][y].canAddObstacle()) {
field[x][y].maxAnts = 0;
count++;
}
}
return true;
}
function getCellScoreAnt(cell: Cell, ant: Ant) {
if (!cell.canAddAnt()) // no room on cell
return -1;
if (ant.direction === Direction.toNest) {
if (cell.nest) {
// return Number.MAX_VALUE;
return Number.MAX_SAFE_INTEGER;
}
return cell.toNestPheromone;
} else {
if (cell.food) {
return Number.MAX_SAFE_INTEGER;
// return Number.MAX_VALUE;
}
return cell.toFoodPheromone;
}
}
// get best neighbouring cell for the ant to move (can also be the cell he is currently in)
function getBestCell(field: Cell[][], x: number, y: number, ant: Ant, scoreFunction: (cell: Cell, ant: Ant) => number) {
// 0 1 2
// _____
// 0 | 0 1 2
// 1 | 3 4 5
// 2 | 6 7 8
// var neighbourCells = [1, 3, 5, 7]; // top, left, right, bottom
var neighbourCells = [0, 1, 2, 3, 5, 6, 7, 8];
neighbourCells = randomizeArray(neighbourCells);
var neighbours = [];
var neighbourScores = [];
for (var i = 0; i < neighbourCells.length; i++) {
var n = neighbourCells[i];
var nx = Math.floor(n / 3);
var ny = n % 3;
var rx = nx - 1 + x;
var ry = ny - 1 + y;
if (rx < 0 || rx >= config.fieldWidth || ry < 0 || ry >= config.fieldHeight) {
continue;
}
var cell = field[rx][ry];
var score = scoreFunction(cell, ant);
neighbours.push(cell);
neighbourScores.push(score);
}
neighbourScores.forEach((a, index) => neighbourScores[index] = a + 1); // add + 1
if (config.squareCellScore) {
neighbourScores.forEach((a, index) => neighbourScores[index] = a * a); // square it
}
var scoreSum = neighbourScores.reduce((a, b) => a + b, 0); // sum them
neighbourScores.forEach((a, index) => neighbourScores[index] = a / scoreSum); // divide by sum
var random = Math.random();
var selectedIndex;
for (selectedIndex = 0; selectedIndex < neighbourScores.length; selectedIndex++) {
random -= neighbourScores[selectedIndex];
if (random <= 0) {
break;
}
}
return neighbours[selectedIndex];
}
function getBestCellAnt(field: Cell[][], x: number, y: number, ant: Ant) {
return getBestCell(field, x, y, ant, this.getCellScoreAnt);
}
function minimisationAlgorithm(field: Cell[][], x: number, y: number, minToNest: boolean) {
var cell = field[x][y];
var neighbourCells = [0, 1, 2, 3, 4, 5, 6, 7, 8];
var neighbours = [];
neighbours.pop()
for (var i = 0; i < neighbourCells.length; i++) {
var n = neighbourCells[i];
var nx = Math.floor(n / 3);
var ny = n % 3;
var rx = nx - 1 + x;
var ry = ny - 1 + y;
if (rx < 0 || rx >= config.fieldWidth || ry < 0 || ry >= config.fieldHeight) {
continue;
}
neighbours.push(field[rx][ry]);
}
var toSpread = config.minimisationSpreadValue;
if (minToNest) {
toSpread += cell.toNestPheromone * config.minimisationSpreadPercentage / 100;
if (cell.toNestPheromone - toSpread <= 0)
return;
cell.toNestPheromone -= toSpread;
} else {
toSpread += cell.toFoodPheromone * config.minimisationSpreadPercentage / 100;
if (cell.toFoodPheromone - toSpread <= 0)
return;
cell.toFoodPheromone -= toSpread;
}
var eliminated = true;
var av_q = 0;
while (eliminated) {
eliminated = false;
av_q = 0;
neighbours.forEach(element => {
if (minToNest)
av_q += element.toNestPheromone;
else
av_q += element.toFoodPheromone;
});
av_q += toSpread;
av_q /= neighbours.length;
for (i = 0; i < neighbours.length; i++) {
if (minToNest) {
if (neighbours[i].toNestPheromone > av_q) {
eliminated = true;
neighbours.splice(i, 1);
i--;
}
}
else {
if (neighbours[i].toFoodPheromone > av_q) {
eliminated = true;
neighbours.splice(i, 1);
i--;
}
}
}
}
if (neighbours.length <= 0)
return;
neighbours.forEach(element => {
if (minToNest) {
var f = av_q - element.toNestPheromone;
element.toNestPheromone += Math.abs(f);
}
else {
var f = av_q - element.toFoodPheromone;
element.toFoodPheromone += Math.abs(f);
}
});
}
function transition(field: Cell[][], x: number, y: number) {
var cell = field[x][y];
if (config.minimisationAlgorithmEnabled) {
minimisationAlgorithm(field, x, y, true);
minimisationAlgorithm(field, x, y, false);
}
for (var a = 0; a < cell.ants.length; a++) {
var ant = cell.ants[a];
if (ant.moved === true) {
return;
}
ant.moved = true;
var bestCell = getBestCellAnt(field, x, y, ant);
if (bestCell != null && bestCell != cell) { // move ant
if (bestCell.addAnt(ant)) { // add to better cell
cell.ants.splice(a, 1); // remove from current cell
a--;
if (ant.direction === Direction.toFood) {
bestCell.addtoNestPheromone(ant.pheromoneStrength);
if (bestCell.food && !ant.hasFood) { // take food
ant.direction = Direction.toNest;
bestCell.takeFood();
ant.hasFood = true;
ant.pheromoneStrength = Math.max(config.initialPheromoneStrength, ant.pheromoneStrength);
//ant.pheromoneStrength = config.initialPheromoneStrength;
statistics.foodInSources--;
statistics.antsWithFood++;
}
} else {
bestCell.addToFoodPheromone(ant.pheromoneStrength);
if (bestCell.nest && ant.hasFood) {
ant.direction = Direction.toFood;
ant.hasFood = false;
ant.pheromoneStrength = config.initialPheromoneStrength;
statistics.foodInNests++;
statistics.antsWithFood--;
}
}
ant.decreasePheromoneStrength();
}
}
}
}
function updateField() {
randomField = randomizeArray(randomField);
for (var i = 0; i < randomField.length; i++) { // iterate randomly over the field
var n = randomField[i];
var y = Math.floor(n / config.fieldWidth);
var x = n % config.fieldWidth;
transition(field, x, y);
}
// reset moved flag
for (var i = 0; i < config.fieldWidth; i++) {
for (var j = 0; j < config.fieldHeight; j++) {
if (field[i][j] != null) {
field[i][j].setMoved(false);
field[i][j].decreasePheromone();
}
}
}
}
function drawClearField() {
// draw ground
ctx.fillStyle = config.groundColor;
var ps = config.seperatePheromoneView ? pixelSize / 2 : pixelSize;
ctx.fillRect(0, 0, config.fieldWidth * ps, config.fieldHeight * ps);
if (config.seperatePheromoneView) {
ctx.fillRect(config.fieldWidth * ps, 0, config.fieldWidth * ps, config.fieldHeight * ps);
ctx.fillRect(0, config.fieldHeight * ps, config.fieldWidth * ps, config.fieldHeight * ps);
}
}
function drawField() {
var savedPixelSize = pixelSize;
var offsetX, offsetY;
if (config.seperatePheromoneView) {
pixelSize = pixelSize / 2;
offsetX = config.fieldWidth * pixelSize;
offsetY = config.fieldHeight * pixelSize;
}
ctx.font = pixelSize / 2 + "px Courier New";
for (var i = 0; i < config.fieldWidth; i++) {
for (var j = 0; j < config.fieldHeight; j++) {
var cell = field[i][j];
var cellColor = cell.color;
ctx.fillStyle = cellColor;
ctx.fillRect(i * pixelSize, j * pixelSize, pixelSize, pixelSize);
if (config.seperatePheromoneView) { // seperatePheromoneView
if (cell.toFoodPheromone > 0) {
// var toFoodColorValue = Math.round((cell.toFoodPheromone / config.maxPheromone) * 200) + 55; // skip to dark values
// ctx.fillStyle = "rgba(0, " + toFoodColorValue + ", 0, 1.0)";
var toFoodColorPercent = cell.toFoodPheromone / config.maxPheromone;
ctx.fillStyle = "rgba(0, 120, 0, " + toFoodColorPercent + ")";
ctx.fillRect(i * pixelSize + offsetX, j * pixelSize, pixelSize, pixelSize);
ctx.fillStyle = "#000000";
ctx.fillText("" + Math.ceil(field[i][j].toFoodPheromone), (i) * pixelSize + offsetX, (j) * pixelSize + 0.6 * pixelSize);
}
if (cell.toNestPheromone > 0) {
// var toNestColorValue = Math.round((cell.toNestPheromone / config.maxPheromone) * 200) + 55;// skip to dark values
// ctx.fillStyle = "rgba(" + toNestColorValue + ", 0, 0, 1.0)";
var toNestColorPercent = cell.toNestPheromone / config.maxPheromone;
ctx.fillStyle = "rgba(120, 0, 0, " + toNestColorPercent + ")";
ctx.fillRect(i * pixelSize, j * pixelSize + offsetY, pixelSize, pixelSize);
ctx.fillStyle = "#000000";
ctx.fillText("" + Math.ceil(field[i][j].toNestPheromone), (i) * pixelSize, (j) * pixelSize + 0.6 * pixelSize + offsetY);
}
if (cell.ants.length > 0 || cell.food > 0 || cell.nest == true || cell.maxAnts == 0) {
ctx.fillStyle = cellColor;
ctx.fillRect(i * pixelSize + offsetX, j * pixelSize, pixelSize, pixelSize);
ctx.fillRect(i * pixelSize, j * pixelSize + offsetY, pixelSize, pixelSize);
}
}
else { // no seperatePheromoneView
if (field[i][j].toFoodPheromone > 0) {
ctx.fillStyle = config.ToFoodPheromoneColor;
ctx.fillText("" + Math.ceil(field[i][j].toFoodPheromone), (i) * pixelSize, (j) * pixelSize + pixelSize);
}
if (field[i][j].toNestPheromone > 0) {
ctx.fillStyle = config.ToNestPheromoneColor;
ctx.fillText("" + Math.ceil(field[i][j].toNestPheromone), (i) * pixelSize, (j) * pixelSize + 0.5 * pixelSize);
}
}
}
}
if (config.seperatePheromoneView) {
ctx.fillStyle = "#000000";
var lineWidth = pixelSize / 4;
ctx.lineWidth = lineWidth;
var lineOffset = lineWidth / 2;
ctx.beginPath();
ctx.moveTo(offsetX + lineOffset, 0);
ctx.lineTo(offsetX + lineOffset, offsetY + lineWidth);
ctx.stroke();
ctx.beginPath();
ctx.moveTo(0, offsetY + lineOffset);
ctx.lineTo(offsetX + lineWidth, offsetY + lineOffset);
ctx.stroke();
ctx.font = "48px Helvetica";
ctx.fillText("← To Nest", offsetX + pixelSize, offsetY * 1.6);
ctx.fillText("↑ To Food", offsetX * 1.25, offsetY * 1.2);
pixelSize = savedPixelSize;
}
}
function changeFps() {
config.fps = <number>getValueFromHMTLInput('fps');
setHtmlInputValue('fpsNumber', config.fps);
}
function restoreDefaultConfig() {
config = JSON.parse(JSON.stringify(default_config));
fillHtmlInputs(config);
}
function startSimulation() {
pauseFlag = false;
readConfigurationValues(config);
init();
initRandomValues(field);
startCount++;
gameloop(startCount);
resetStatistics();
startButton.value = "Restart simulation";
pauseButton.style.display = "inline-block";
resumeButton.style.display = "none";
}
function pauseSimulation() {
pauseFlag = true;
pauseButton.style.display = "none";
resumeButton.style.display = "inline-block";
}
function resumeSimulation() {
pauseFlag = false;
pauseButton.style.display = "inline-block";
resumeButton.style.display = "none";
requestAnimationFrame(gameloop.bind(this, startCount));
}
var startButton: HTMLButtonElement;
var pauseButton: HTMLButtonElement;
var resumeButton: HTMLButtonElement;
window.onload = function () {
fillHtmlInputs(config);
startButton = <HTMLButtonElement>document.getElementById("btn-start-simulation");
pauseButton = <HTMLButtonElement>document.getElementById("btn-pause-simulation");
resumeButton = <HTMLButtonElement>document.getElementById("btn-resume-simulation");
};
|
package shimV2
import (
"encoding/json"
"io/ioutil"
"log"
"os"
)
type PubsubMessage struct {
Attributes map[string]string `json:"attributes,omitempty"`
Data string `json:"data,omitempty"`
MessageId string `json:"messageId,omitempty"`
PublishTime string `json:"publishTime,omitempty"`
}
type PubsubHandlerFunc func(message PubsubMessage)
func HandlePubSubMessage(h PubsubHandlerFunc) {
stdin, err := ioutil.ReadAll(os.Stdin)
if err != nil {
log.Fatal(err)
}
var message PubsubMessage
err = json.Unmarshal(stdin, &message)
if err != nil {
log.Fatal(err)
}
h(message)
}
|
<filename>Std/Data/Text.hs
{-# LANGUAGE MagicHash, UnboxedTuples #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE UnliftedFFITypes #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE PatternSynonyms #-}
{-|
Module : Std.Data.Text
Description : Unicode text processing
Copyright : (c) <NAME>, 2017-2018
License : BSD
Maintainer : <EMAIL>
Stability : experimental
Portability : non-portable
A 'Text' simply wraps a 'Bytes' that are UTF-8 encoded codepoints, you can use 'validate' \/ 'validateMaybe' to construct a 'Text'.
-}
module Std.Data.Text (
-- * Text type
Text, getUTF8Bytes
, validate, validateMaybe
-- * Basic creating
, empty, singleton, copy
-- * Building text
, replicate, cycleN
-- * Conversion between list
, pack, packN, packR, packRN
, unpack, unpackR
-- * Conversion between codepoint vector
, fromVector
, toVector
-- * Basic interface
, null
, length
, append
, map', imap'
, foldl', ifoldl'
, foldr', ifoldr'
, concat, concatMap
-- ** Special folds
, count, all, any
-- * Searching by equality
, elem, notElem
-- * Slice manipulation
, cons, snoc
, uncons, unsnoc
, headMaybe, tailMayEmpty
, lastMaybe, initMayEmpty
, inits, tails
, take, drop, takeR, dropR
, slice
, splitAt
, takeWhile, takeWhileR, dropWhile, dropWhileR, dropAround
, break, span
, breakR, spanR, breakOn, breakOnAll
, group, groupBy
, stripPrefix, stripSuffix
, split, splitWith, splitOn
, isPrefixOf, isSuffixOf, isInfixOf
, commonPrefix
, words, lines, unwords, unlines
, padLeft, padRight
-- * Transform
, reverse
, intersperse
, intercalate
, intercalateElem
, transpose
-- * Search
-- ** element-wise search
, find, findR
, filter, partition
-- * Unicode processing
-- ** normalization
, NormalizationResult(..), NormalizeMode(..)
, isNormalized, isNormalizedTo, normalize, normalizeTo
-- ** Case conversion
-- $case
, Locale, localeDefault, localeLithuanian, localeTurkishAndAzeriLatin
, caseFold, caseFoldWith, toLower, toLowerWith, toUpper, toUpperWith, toTitle, toTitleWith
-- ** Unicode category
, isCategory, spanCategory
, Category
, categoryLetterUppercase
, categoryLetterLowercase
, categoryLetterTitlecase
, categoryLetterOther
, categoryLetter
, categoryCaseMapped
, categoryMarkNonSpacing
, categoryMarkSpacing
, categoryMarkEnclosing
, categoryMark
, categoryNumberDecimal
, categoryNumberLetter
, categoryNumberOther
, categoryNumber
, categoryPunctuationConnector
, categoryPunctuationDash
, categoryPunctuationOpen
, categoryPunctuationClose
, categoryPunctuationInitial
, categoryPunctuationFinal
, categoryPunctuationOther
, categoryPunctuation
, categorySymbolMath
, categorySymbolCurrency
, categorySymbolModifier
, categorySymbolOther
, categorySymbol
, categorySeparatorSpace
, categorySeparatorLine
, categorySeparatorParagraph
, categorySeparator
, categoryControl
, categoryFormat
, categorySurrogate
, categoryPrivateUse
, categoryUnassigned
, categoryCompatibility
, categoryIgnoreGraphemeCluste
, categoryIscntrl
, categoryIsprint
, categoryIsspace
, categoryIsblank
, categoryIsgraph
, categoryIspunct
, categoryIsalnum
, categoryIsalpha
, categoryIsupper
, categoryIslower
, categoryIsdigit
, categoryIsxdigit
) where
import Std.Data.Text.Base
import Std.Data.Text.Search
import Std.Data.Text.Extra
import Prelude ()
|
<gh_stars>0
# Copyright (c) 2016-2021, <NAME>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are those
# of the authors and should not be interpreted as representing official policies,
# either expressed or implied, of the FreeBSD Project.
import bpy
import math
from mathutils import *
from .error import DazError, reportError
from .utils import *
#-------------------------------------------------------------
# Formula
#-------------------------------------------------------------
class Formula:
def __init__(self):
self.formulas = []
self.built = False
def parse(self, struct):
if (LS.useFormulas and
"formulas" in struct.keys()):
self.formulas = struct["formulas"]
def build(self, context, inst):
from .modifier import Morph
from .node import Node
for formula in self.formulas:
ref,key,value = self.computeFormula(formula)
if ref is None:
continue
asset = self.getAsset(ref)
if asset is None:
continue
if key == "value" and isinstance(asset, Morph):
asset.build(context, inst, value)
def postbuild(self, context, inst):
from .modifier import Morph
from .node import Node
if not LS.useMorphOnly:
return
for formula in self.formulas:
ref,key,value = self.computeFormula(formula)
if ref is None:
continue
asset = self.getAsset(ref)
if isinstance(asset, Morph):
pass
elif isinstance(asset, Node):
inst = asset.getInstance(ref, self.caller)
if inst:
inst.formulate(key, value)
def computeFormula(self, formula):
if len(formula["operations"]) != 3:
return None,None,0
stack = []
for struct in formula["operations"]:
op = struct["op"]
if op == "push":
if "url" in struct.keys():
ref,key = self.getRefKey(struct["url"])
if ref is None or key != "value":
return None,None,0
asset = self.getAsset(ref)
if not hasattr(asset, "value"):
return None,None,0
stack.append(asset.value)
elif "val" in struct.keys():
data = struct["val"]
stack.append(data)
else:
reportError("Cannot push %s" % struct.keys(), trigger=(1,5), force=True)
elif op == "mult":
x = stack[-2]*stack[-1]
stack = stack[:-2]
stack.append(x)
else:
reportError("Unknown formula %s %s" % (op, struct.items()), trigger=(1,5), force=True)
if len(stack) == 1:
ref,key = self.getRefKey(formula["output"])
return ref,key,stack[0]
else:
raise DazError("Stack error %s" % stack)
return None,None,0
def evalFormulas(self, rig, mesh):
success = False
exprs = {}
for formula in self.formulas:
self.evalFormula(formula, exprs, rig, mesh)
if not exprs and GS.verbosity > 3:
print("Could not parse formulas", self.formulas)
return exprs
def evalFormula(self, formula, exprs, rig, mesh):
from .bone import getTargetName
from .modifier import ChannelAsset
words = unquote(formula["output"]).split("#")
fileref = words[0].split(":",1)[-1]
driven = words[-1]
output,channel = driven.split("?")
if channel == "value":
if mesh is None and rig is None:
if GS.verbosity > 2:
print("Cannot drive properties", output)
print(" ", unquote(formula["output"]))
return False
pb = None
else:
output1 = getTargetName(output, rig)
if output1 is None:
reportError("Missing bone (evalFormula): %s" % output, trigger=(2,4))
return False
else:
output = output1
if output not in rig.pose.bones.keys():
return False
pb = rig.pose.bones[output]
path,idx,default = self.parseChannel(channel)
if output not in exprs.keys():
exprs[output] = {"*fileref" : (fileref, channel)}
if path not in exprs[output].keys():
exprs[output][path] = {}
if idx not in exprs[output][path].keys():
exprs[output][path][idx] = {
"factor" : 0,
"factor2" : 0,
"prop" : None,
"bone" : None,
"bone2" : None,
"path" : None,
"comp" : -1,
"comp2" : -1,
"mult" : None}
expr = exprs[output][path][idx]
if "stage" in formula.keys():
self.evalStage(formula, expr)
else:
self.evalOperations(formula, expr)
def evalStage(self, formula, expr):
if formula["stage"] == "mult":
opers = formula["operations"]
prop,type,path,comp = self.evalUrl(opers[0])
if type == "value":
expr["mult"] = prop
def evalOperations(self, formula, expr):
opers = formula["operations"]
prop,type,path,comp = self.evalUrl(opers[0])
factor = "factor"
if type == "value":
if expr["prop"] is None:
expr["prop"] = prop
elif expr["bone"] is None:
expr["bone"] = prop
expr["comp"] = comp
else:
expr["bone2"] = prop
factor = "factor2"
expr["comp2"] = comp
expr["path"] = path
self.evalMainOper(opers, expr, factor)
def evalUrl(self, oper):
if "url" not in oper.keys():
print(oper)
raise RuntimeError("BUG: Operation without URL")
url = oper["url"].split("#")[-1]
prop,type = url.split("?")
prop = unquote(prop)
path,comp,default = self.parseChannel(type)
return prop,type,path,comp
def evalMainOper(self, opers, expr, factor):
if len(opers) == 1:
expr[factor] = 1
return
oper = opers[-1]
op = oper["op"]
if op == "mult":
expr[factor] = opers[1]["val"]
elif op == "spline_tcb":
expr["points"] = [opers[n]["val"] for n in range(1,len(opers)-2)]
elif op == "spline_linear":
expr["points"] = [opers[n]["val"] for n in range(1,len(opers)-2)]
else:
reportError("Unknown formula %s" % opers, trigger=(2,6))
return
def parseChannel(self, channel):
if channel == "value":
return channel, 0, 0.0
elif channel == "general_scale":
return channel, 0, 1.0
attr,comp = channel.split("/")
idx = getIndex(comp)
if attr in ["rotation", "translation", "scale", "center_point", "end_point"]:
default = Vector((0,0,0))
elif attr in ["orientation"]:
return None, 0, Vector()
else:
msg = ("Unknown attribute: %s" % attr)
reportError(msg)
return attr, idx, default
def getExprValue(self, expr, key):
if ("factor" in expr.keys() and
key in expr["factor"].keys()):
return expr["factor"][key]
else:
return None
def getRefKey(self, string):
base = string.split(":",1)[-1]
return base.rsplit("?",1)
|
/**
* Simple data holder
*/
private static class QueueEntry
{
public Notification notification;
public Object handback;
public QueueEntry(Notification notification, Object handback)
{
this.notification = notification;
this.handback = handback;
}
} |
package abd.p1.model;
import javax.persistence.Entity;
@Entity
public class Solicitud_amistad extends Mensaje{
public Solicitud_amistad(){
}
}
|
/**************************************************************************************
* Copyright (c) 2016-2017, ARM Limited or its affiliates. All rights reserved *
* *
* This file and the related binary are licensed under the following license: *
* *
* ARM Object Code and Header Files License, v1.0 Redistribution. *
* *
* Redistribution and use of object code, header files, and documentation, without *
* modification, are permitted provided that the following conditions are met: *
* *
* 1) Redistributions must reproduce the above copyright notice and the *
* following disclaimer in the documentation and/or other materials *
* provided with the distribution. *
* *
* 2) Unless to the extent explicitly permitted by law, no reverse *
* engineering, decompilation, or disassembly of is permitted. *
* *
* 3) Redistribution and use is permitted solely for the purpose of *
* developing or executing applications that are targeted for use *
* on an ARM-based product. *
* *
* DISCLAIMER. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND *
* CONTRIBUTORS "AS IS." ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT *
* NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY, NON-INFRINGEMENT, *
* AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE *
* COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, *
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED *
* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR *
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF *
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING *
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS *
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. *
**************************************************************************************/
#ifndef SW_CRYS_RSA_SHARED_TYPES_H
#define SW_CRYS_RSA_SHARED_TYPES_H
#ifdef __cplusplus
extern "C"
{
#endif
/** @file
* @brief This file contains the types for the CCSW RSA module.
*/
/************************ Defines ******************************/
/************************************************************************/
/* the following definitions are only relevant for RSA code on SW */
/************************************************************************/
/* Define the maximal allowed width of the exponentiation sliding window
in range 2...6. This define is actual for projects on soft platform.
To minimize code size use the minimum value. To optimize performance
choose the maximum value */
/* The valid key size in bits */
#define SW_CRYS_RSA_MIN_VALID_KEY_SIZE_VALUE_IN_BITS 512
#ifndef CRYS_NO_RSA_MAX_KEY_SIZE_4096_BIT_SUPPORT
#define SW_CRYS_RSA_MAX_VALID_KEY_SIZE_VALUE_IN_BITS 4096
#define SW_CRYS_RSA_MAX_KEY_GENERATION_SIZE_BITS 4096
#else
#ifndef CRYS_NO_RSA_MAX_KEY_SIZE_3072_BIT_SUPPORT
#define SW_CRYS_RSA_MAX_VALID_KEY_SIZE_VALUE_IN_BITS 3072
#define SW_CRYS_RSA_MAX_KEY_GENERATION_SIZE_BITS 3072
#else
#define SW_CRYS_RSA_MAX_VALID_KEY_SIZE_VALUE_IN_BITS 2048
#define SW_CRYS_RSA_MAX_KEY_GENERATION_SIZE_BITS 2048
#endif
#endif
/* Define the size of the exponentiation temp buffer, used in LLF_PKI exponentiation and NON DEPENDED on
width of the sliding window. The size defined in units equaled to maximal RSA modulus size */
#define PKI_EXP_CONST_TEMP_BUFF_SIZE_IN_MODULUS_UNITS 7
#define PKI_EXP_SLIDING_WINDOW_MAX_VALUE 2
/* The maximum buffer size for the 'H' value */
#define SW_CRYS_RSA_MAXIMUM_MOD_BUFFER_SIZE_IN_WORDS ((SW_CRYS_RSA_MAX_VALID_KEY_SIZE_VALUE_IN_BITS + 64UL ) / 32 )
/* definition of PKI_KEY_GEN_TEMP_BUFF_SIZE_WORDS IS DEPENDED on width of the sliding window*/
#if( PKI_EXP_SLIDING_WINDOW_MAX_VALUE > 2 )
#define PKI_KEY_GEN_TEMP_BUFF_SIZE_WORDS \
((4 + (1<<(PKI_EXP_SLIDING_WINDOW_MAX_VALUE-2))) * SW_CRYS_RSA_MAXIMUM_MOD_BUFFER_SIZE_IN_WORDS)
#else
#define PKI_KEY_GEN_TEMP_BUFF_SIZE_WORDS \
(16 * SW_CRYS_RSA_MAXIMUM_MOD_BUFFER_SIZE_IN_WORDS)
#endif
#ifndef PKI_EXP_WINDOW_TEMP_BUFFER_SIZE_IN_MODULUS_UNITS
#define PKI_EXP_WINDOW_TEMP_BUFFER_SIZE_IN_MODULUS_UNITS (3 + (1 << (PKI_EXP_SLIDING_WINDOW_MAX_VALUE-1)))
#endif
/* Define the size of the temp buffer, used in LLF_PKI exponentiation and DEPENDED on
width of the sliding window in words */
#if (PKI_EXP_CONST_TEMP_BUFF_SIZE_IN_MODULUS_UNITS > PKI_EXP_WINDOW_TEMP_BUFFER_SIZE_IN_MODULUS_UNITS )
#define PKI_EXP_TEMP_BUFFER_SIZE_IN_WORDS \
(PKI_EXP_CONST_TEMP_BUFF_SIZE_IN_MODULUS_UNITS * SW_CRYS_RSA_MAXIMUM_MOD_BUFFER_SIZE_IN_WORDS + 2 )
#else
#define PKI_EXP_TEMP_BUFFER_SIZE_IN_WORDS \
(PKI_EXP_WINDOW_TEMP_BUFFER_SIZE_IN_MODULUS_UNITS * SW_CRYS_RSA_MAXIMUM_MOD_BUFFER_SIZE_IN_WORDS + 2 )
#endif
/* the RSA data type */
typedef struct SW_Shared_CRYS_RSAPrimeData_t {
/* The aligned input and output data buffers */
uint32_t DataIn[SW_CRYS_RSA_MAXIMUM_MOD_BUFFER_SIZE_IN_WORDS];
uint32_t DataOut[SW_CRYS_RSA_MAXIMUM_MOD_BUFFER_SIZE_IN_WORDS];
/* #include specific fields that are used by the low level */
struct {
union {
struct { /* Temporary buffers used for the exponent calculation */
uint32_t Tempbuff1[PKI_EXP_TEMP_BUFFER_SIZE_IN_WORDS];
uint32_t Tempbuff2[SW_CRYS_RSA_MAXIMUM_MOD_BUFFER_SIZE_IN_WORDS * 2];
/* Temporary buffer for self-test support */
uint32_t TempBuffer[SW_CRYS_RSA_MAXIMUM_MOD_BUFFER_SIZE_IN_WORDS];
}NonCrt;
struct { /* Temporary buffers used for the exponent calculation */
uint32_t Tempbuff1[PKI_EXP_TEMP_BUFFER_SIZE_IN_WORDS];
uint32_t Tempbuff2[SW_CRYS_RSA_MAXIMUM_MOD_BUFFER_SIZE_IN_WORDS * 2];
}Crt;
}Data;
}LLF;
}SW_Shared_CRYS_RSAPrimeData_t;
#ifdef __cplusplus
}
#endif
#endif
|
import { prerunHookWithOptions } from '@cenk1cenk2/boilerplate-oclif'
export default prerunHookWithOptions({ registerExitListeners: true })
|
def _get_plugin_from_registry(self, trans, visualization_name):
if not trans.app.visualizations_registry:
raise HTTPNotFound('No visualization registry (possibly disabled in galaxy.ini)')
return trans.app.visualizations_registry.get_plugin(visualization_name) |
<reponame>RwdrQP/scrawler_test<filename>scrawler_baidu.py
import scrawler
def main():
url = "http://www.baidu.com/"
print("downloading " + url)
html = scrawler.download(url)
if html.status == scrawler.page.ERROR_EXCEPT:
print("exception")
elif html.status == scrawler.page.ERROR_INPUT:
print("input none")
elif html.status == scrawler.page.ERROR_SERVER:
print("server error")
elif html.status == scrawler.page.ERROR_OTHER:
print("other error")
elif html.status == scrawler.page.ERROR_OK:
print("ok")
#print(html.content)
scrawler.save("/home/anonymous/webscrawler", "baidu.html", html)
main()
|
// Validate checks the model, struct and any custom validations
func (p *Person) Validate() error {
p.BeforeValidate()
return validation.ValidateStruct(p,
validation.Field(&p.Email, validation.Required, is.Email),
validation.Field(&p.FirstName, validation.Length(0, 50)),
validation.Field(&p.LastName, validation.Length(0, 50)),
validation.Field(&p.MiddleName, validation.Length(0, 50)),
)
} |
REALISATION AND PERFORMANCE OF THE ADJUSTED NUCLEAR DATA LIBRARY ERALIBI FOR CALCULATING FAST REACTOR
The adjusted nuclear data library ERALIBl is described in this paper. It is the first step in the process towards a unique data set which will be valid for all applications (core neutronics. shielding, fuel cycle) and for all types of fission reactor (thermal, epithermal. fast). It has been derived from a 1968 group application library based on JEF2.2 and a large integral data base containing the ad hoc required data to validate the cross sections for the major nuclear processes. The consistency of the integral and microscopic information is demonstrated by using the rules of information theory and a simple recipe to identify the nonconsistent integral data. The energy scheme used for the statistical consistent adjustment procedure has been designed to optimize the decoupling of cross section effects. The performance of ERALIBI for fast reactor applications is considered to be satisfactory. Nevertheless the integral data base needs to be enlarged in order to widen the applicability of the library INTRODUCTION |
<reponame>chriskim06/go-sdk<filename>reflectutil/doc.go
/*
Copyright (c) 2021 - Present. <NAME>, Inc. All rights reserved
Use of this source code is governed by a MIT license that can be found in the LICENSE file.
*/
/*
Package reflectutil includes helpers for working with the golang reflection api.
*/
package reflectutil // import "github.com/blend/go-sdk/reflectutil"
|
def apply_lineage(func: T) -> T:
_backend = get_backend()
@wraps(func)
def wrapper(self, context, *args, **kwargs):
self.log.debug("Lineage called with inlets: %s, outlets: %s", self.inlets, self.outlets)
ret_val = func(self, context, *args, **kwargs)
outlets = [unstructure(_to_dataset(x, f"{self.dag_id}.{self.task_id}")) for x in self.outlets]
inlets = [unstructure(_to_dataset(x, None)) for x in self.inlets]
if self.outlets:
self.xcom_push(
context, key=PIPELINE_OUTLETS, value=outlets, execution_date=context['ti'].execution_date
)
if self.inlets:
self.xcom_push(
context, key=PIPELINE_INLETS, value=inlets, execution_date=context['ti'].execution_date
)
if _backend:
_backend.send_lineage(operator=self, inlets=self.inlets, outlets=self.outlets, context=context)
return ret_val
return cast(T, wrapper) |
/* cf_client_rc_alloc
* Allocate a reference-counted memory region. This region will be filled
* with uint8_ts of value zero */
void *
cf_client_rc_alloc(size_t sz)
{
uint8_t *addr;
size_t asz = sizeof(cf_client_rc_counter) + sz;
addr = (uint8_t*)malloc(asz);
if (NULL == addr)
return(NULL);
cf_atomic_int_set((cf_client_rc_counter *)addr, 1);
uint8_t *base = addr + sizeof(cf_client_rc_counter);
return(base);
} |
<gh_stars>1000+
// Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: github.com/solo-io/gloo/projects/gloo/api/v1/options/tcp/tcp.proto
package tcp
import (
bytes "bytes"
fmt "fmt"
math "math"
time "time"
_ "github.com/gogo/protobuf/gogoproto"
proto "github.com/gogo/protobuf/proto"
types "github.com/gogo/protobuf/types"
_ "github.com/solo-io/protoc-gen-ext/extproto"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
var _ = time.Kitchen
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package
// Contains various settings for Envoy's tcp proxy filter.
// See here for more information: https://www.envoyproxy.io/docs/envoy/v1.10.0/api-v2/config/filter/network/tcp_proxy/v2/tcp_proxy.proto#envoy-api-msg-config-filter-network-tcp-proxy-v2-tcpproxy
type TcpProxySettings struct {
MaxConnectAttempts *types.UInt32Value `protobuf:"bytes,1,opt,name=max_connect_attempts,json=maxConnectAttempts,proto3" json:"max_connect_attempts,omitempty"`
IdleTimeout *time.Duration `protobuf:"bytes,2,opt,name=idle_timeout,json=idleTimeout,proto3,stdduration" json:"idle_timeout,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *TcpProxySettings) Reset() { *m = TcpProxySettings{} }
func (m *TcpProxySettings) String() string { return proto.CompactTextString(m) }
func (*TcpProxySettings) ProtoMessage() {}
func (*TcpProxySettings) Descriptor() ([]byte, []int) {
return fileDescriptor_7eab2eea37fe83e7, []int{0}
}
func (m *TcpProxySettings) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_TcpProxySettings.Unmarshal(m, b)
}
func (m *TcpProxySettings) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_TcpProxySettings.Marshal(b, m, deterministic)
}
func (m *TcpProxySettings) XXX_Merge(src proto.Message) {
xxx_messageInfo_TcpProxySettings.Merge(m, src)
}
func (m *TcpProxySettings) XXX_Size() int {
return xxx_messageInfo_TcpProxySettings.Size(m)
}
func (m *TcpProxySettings) XXX_DiscardUnknown() {
xxx_messageInfo_TcpProxySettings.DiscardUnknown(m)
}
var xxx_messageInfo_TcpProxySettings proto.InternalMessageInfo
func (m *TcpProxySettings) GetMaxConnectAttempts() *types.UInt32Value {
if m != nil {
return m.MaxConnectAttempts
}
return nil
}
func (m *TcpProxySettings) GetIdleTimeout() *time.Duration {
if m != nil {
return m.IdleTimeout
}
return nil
}
func init() {
proto.RegisterType((*TcpProxySettings)(nil), "tcp.options.gloo.solo.io.TcpProxySettings")
}
func init() {
proto.RegisterFile("github.com/solo-io/gloo/projects/gloo/api/v1/options/tcp/tcp.proto", fileDescriptor_7eab2eea37fe83e7)
}
var fileDescriptor_7eab2eea37fe83e7 = []byte{
// 297 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x90, 0xb1, 0x4a, 0xc4, 0x40,
0x10, 0x86, 0x59, 0x39, 0x2c, 0x72, 0x16, 0x12, 0xae, 0x38, 0x0f, 0x39, 0xc5, 0xca, 0xc6, 0x5d,
0xbc, 0x6b, 0x6d, 0x8c, 0x22, 0xd8, 0x88, 0x9c, 0xa7, 0x85, 0xcd, 0xb1, 0xd9, 0x1b, 0xd7, 0xd5,
0x24, 0xb3, 0xec, 0x4e, 0x34, 0xbe, 0x89, 0x4f, 0x20, 0x3e, 0x82, 0x6f, 0x23, 0xf8, 0x0e, 0xf6,
0xb2, 0xd9, 0x58, 0xa9, 0x60, 0x11, 0x98, 0xc9, 0xff, 0xff, 0xdf, 0xbf, 0x4c, 0x92, 0x69, 0x43,
0xb7, 0x75, 0xce, 0x15, 0x96, 0xc2, 0x63, 0x81, 0x7b, 0x06, 0x85, 0x2e, 0x10, 0x85, 0x75, 0x78,
0x07, 0x8a, 0x7c, 0xdc, 0xa4, 0x35, 0xe2, 0x61, 0x5f, 0xa0, 0x25, 0x83, 0x95, 0x17, 0xa4, 0x6c,
0xf8, 0xb8, 0x75, 0x48, 0x98, 0x0e, 0xc3, 0xd8, 0x49, 0x3c, 0xd8, 0x79, 0x20, 0x71, 0x83, 0xa3,
0x81, 0x46, 0x8d, 0xad, 0x49, 0x84, 0x29, 0xfa, 0x47, 0x63, 0x8d, 0xa8, 0x0b, 0x10, 0xed, 0x96,
0xd7, 0x37, 0xe2, 0xd1, 0x49, 0x6b, 0xc1, 0xf9, 0xbf, 0xf4, 0x65, 0xed, 0x64, 0xa0, 0x77, 0x7a,
0x0a, 0x0d, 0x45, 0x28, 0x34, 0x14, 0xff, 0xed, 0xbc, 0xb0, 0x64, 0x7d, 0xae, 0xec, 0xb9, 0xc3,
0xe6, 0xe9, 0x02, 0x88, 0x4c, 0xa5, 0x7d, 0x7a, 0x96, 0x0c, 0x4a, 0xd9, 0x2c, 0x14, 0x56, 0x15,
0x28, 0x5a, 0x48, 0x22, 0x28, 0x2d, 0xf9, 0x21, 0xdb, 0x66, 0xbb, 0xfd, 0xc9, 0x26, 0x8f, 0x3d,
0xfc, 0xbb, 0x87, 0x5f, 0x9e, 0x56, 0x34, 0x9d, 0x5c, 0xc9, 0xa2, 0x86, 0x59, 0x5a, 0xca, 0xe6,
0x28, 0x06, 0x0f, 0xbb, 0x5c, 0x9a, 0x25, 0x6b, 0x66, 0x59, 0xc0, 0x82, 0x4c, 0x09, 0x58, 0xd3,
0x70, 0xa5, 0xe5, 0x6c, 0xfc, 0xe0, 0x1c, 0x77, 0xef, 0xcd, 0x7a, 0xcf, 0xef, 0x5b, 0x6c, 0xd6,
0x0f, 0xa1, 0x79, 0xcc, 0x64, 0x27, 0x6f, 0x9f, 0x3d, 0xf6, 0xfa, 0x31, 0x66, 0xd7, 0x07, 0xff,
0x3b, 0xbd, 0xbd, 0xd7, 0xbf, 0x9c, 0x3f, 0x5f, 0x6d, 0xdb, 0xa6, 0x5f, 0x01, 0x00, 0x00, 0xff,
0xff, 0x09, 0x13, 0x7f, 0x40, 0xc1, 0x01, 0x00, 0x00,
}
func (this *TcpProxySettings) Equal(that interface{}) bool {
if that == nil {
return this == nil
}
that1, ok := that.(*TcpProxySettings)
if !ok {
that2, ok := that.(TcpProxySettings)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
return this == nil
} else if this == nil {
return false
}
if !this.MaxConnectAttempts.Equal(that1.MaxConnectAttempts) {
return false
}
if this.IdleTimeout != nil && that1.IdleTimeout != nil {
if *this.IdleTimeout != *that1.IdleTimeout {
return false
}
} else if this.IdleTimeout != nil {
return false
} else if that1.IdleTimeout != nil {
return false
}
if !bytes.Equal(this.XXX_unrecognized, that1.XXX_unrecognized) {
return false
}
return true
}
|
/**
* Creates a list of distinct CHProfiles with different u-turn costs that can be used by the tests.
* There is always a profile with infinite u-turn costs and one with u-turn-costs = 50.
*/
private List<CHConfig> createCHConfigs() {
Set<CHConfig> configs = new LinkedHashSet<>(5);
configs.add(CHConfig.edgeBased("p0", new ShortestWeighting(encoder, new DefaultTurnCostProvider(encoder, turnCostStorage, INFINITE_U_TURN_COSTS))));
configs.add(CHConfig.edgeBased("p1", new ShortestWeighting(encoder, new DefaultTurnCostProvider(encoder, turnCostStorage, 50))));
long seed = System.nanoTime();
Random rnd = new Random(seed);
while (configs.size() < 5) {
int uTurnCosts = 10 + rnd.nextInt(90);
configs.add(CHConfig.edgeBased("p" + configs.size(), new ShortestWeighting(encoder, new DefaultTurnCostProvider(encoder, turnCostStorage, uTurnCosts))));
}
return new ArrayList<>(configs);
} |
import os
import pandas as pd
input_path="output/cohort.pickle"
backend = os.getenv("OPENSAFELY_BACKEND", "expectations")
output_path = "output/" + backend + "/tables"
os.makedirs(output_path, exist_ok=True)
cohort = pd.read_pickle(input_path)
def count_prevalences(cohort):
for group_type in ["","2"]:
pop_total = cohort["patient_id"].count()
cols = ["vacc_group", "decline_group","decline_total_group",
"other_reason_group", "declined_accepted_group", "vaccinated_and_declined_group",
"preg_group", "sevment_group", "learndis_group", "immuno_group"]
prevalences = pd.DataFrame(
{"total": cohort.groupby([f"wave{group_type}"])["patient_id"].count()}
)
for col in cols:
prevalences[col] = (
cohort[cohort[col]].groupby([f"wave{group_type}"])["patient_id"].count()
)
totals = cohort[cols].sum()
totals["total"] = pop_total
totals = totals.rename("total")
prevalences = prevalences.append(totals)
for high_level_ethnicity_category in [1, 2, 3, 4, 5, 6]:
prevalences[f"ethnicity_{high_level_ethnicity_category}"] = (
cohort[cohort["high_level_ethnicity"] == high_level_ethnicity_category]
.groupby([f"wave{group_type}"])["patient_id"]
.count()
)
eth_total = (
cohort[cohort["high_level_ethnicity"] == high_level_ethnicity_category]
["patient_id"].count()
)
prevalences[f"ethnicity_{high_level_ethnicity_category}"].loc["total"] = eth_total
prevalences.fillna(0, inplace=True)
prevalences = ((prevalences // 7) * 7).astype(int)
for c in prevalences.columns:
prevalences[f"{c}_percent"] = (100*prevalences[c]/prevalences["total"]).round(1)
prevalences.fillna(0, inplace=True)
prevalences.to_csv(output_path+f"/prevalences{group_type}.csv")
count_prevalences(cohort)
|
Depletion of central catecholamines alters amphetamine- and fenfluramine-induced taste aversions in the rat.
Conditioned taste aversions induced by pairing the consumption of saccharin with an amphetamine injection are attenuated in rats with depletion of central catecholamines caused by intraventricular administration of 6-hydroxydopamine (6-OHDA). The hypothesis that dopamine (DA) depletion is responsible for this effect was tested. The reduction in conditioning caused by intraventricular 6-OHDA could not be duplicated either with injections of 6-OHDA into the substantia nigra (Experiment 1) or with intraventricular 6-OHDA injections in animals pretreated with desmethylimipramine (Experiment 2). Both treatments, however, produced large depletions of telencephalic DA. 6-Hydroxydopa infusions caused a preferential loss of telencephalic norepinephrine (NE) but also failed to alter taste aversion learning. It is concluded that the effect of intraventricular 6-OHDA on amphetamine-induced aversions was the result of depletion of both NE and DA. In a third experiment the generality of the effect was examined by pairing saccharin consumption with injections of the amphetamine congener fenfluramine. Depletion of both NE and DA failed to alter fenfluramine-induced aversions. Infusion of 6-OHDA into the substantia nigra, however, retarded the extinction of such an aversion. Evidence is discussed for a peripheral site of action for fenfluramine in the conditioned aversion paradigm. |
/**
* Chapter: 8
* Exercise: 8-04 - The standard library function int fseek(FILE *fp, long offset, int origin) is identical to lseek except
* that fp is a file pointer instead of a file descriptor and the return value is an int status, not a position. Write fseek.
* Make sure that your fseek coordinates properly with the buffering done for the other function of the library.
**/
#include <unistd.h>
#include <fcntl.h>
#include <stdlib.h>
#include "syscalls.h"
#define PERMS 0666
FILE _iob[OPEN_MAX] = { /* stdin, stdout, stderr */
{ 0, (char *) 0, (char *) 0, _READ, 0 },
{ 0, (char *) 0, (char *) 0, _WRITE, 1 },
{ 0, (char *) 0, (char *) 0, _WRITE | _UNBUF, 2 }
};
FILE *_fopen(char *, char *);
int _fillbuf(FILE *);
int _flushbuf(int, FILE *);
int fflush(FILE *);
int fclose (FILE *);
int fseek(FILE *, long, int);
int main(int argc, char *argv[]){
FILE *fp;
int c;
if (argc == 1){
while ((c = getchar()) != EOF){
putchar(c);
}
} else {
while (--argc > 0) {
if ((fp = _fopen(*++argv, "r")) == NULL) {
return 1;
} else {
while ((c = getc(fp)) != EOF) {
putchar(c);
}
fclose(fp);
}
}
}
if(ferror(stdout)){
return 1;
}
fclose(stdout);
return 0;
}
/* _fopen: open file, return ptr */
FILE *_fopen(char *name, char *mode) {
int fd;
FILE *fp;
if (*mode != 'r' && *mode != 'w' && *mode != 'a') {
return NULL;
}
for (fp = _iob; fp < _iob + OPEN_MAX; fp++){
if ((fp->flag & (_READ | _WRITE)) == 0){
break;
}
}
if ( fp >= _iob + OPEN_MAX){
return NULL;
}
if (*mode == 'w'){
fd = creat(name, PERMS);
} else if (*mode == 'a') {
if ((fd = open(name, O_WRONLY, 0)) == -1){
fd = creat(name, PERMS);
}
lseek(fd, 0L, 2);
} else {
fd = open(name, O_RDONLY, 0);
}
if (fd == -1){
return NULL;
}
fp->fd = fd;
fp->cnt = 0;
fp->base = NULL;
fp->flag = (*mode == 'r') ? _READ : _WRITE;
return fp;
}
/* _fillbuf: allocate and fill input buffer */
int _fillbuf(FILE *fp) {
int bufsize;
if ((fp->flag & (_READ | _EOF | _ERR)) != _READ){
return EOF;
}
bufsize = (fp->flag & _UNBUF) ? 1 : BUFSIZ;
if (fp->base == NULL){
if ((fp->base = (char *) malloc(bufsize)) == NULL){
return EOF;
}
}
fp->ptr = fp->base;
fp->cnt = read(fileno(fp), fp->ptr, bufsize);
if (--fp->cnt < 0) {
if (fp->cnt == -1){
fp->flag |= _EOF;
} else {
fp->flag |= _ERR;
}
fp->cnt = 0;
return EOF;
}
return (unsigned char) *fp->ptr++;
}
/* _flushbuf: allocate and flush output buffer */
int _flushbuf(int x, FILE *fp) {
int bufsize;
if ((fp->flag & (_WRITE | _ERR)) != _WRITE){
return EOF;
}
if (fp->flag & _UNBUF) {
bufsize = 1;
} else if (fp->flag & _EOF) {
bufsize = BUFSIZ - fp->cnt;
} else {
bufsize = BUFSIZ;
}
if (fp->base == NULL) {
if ((fp->base = (char *) malloc(bufsize)) == NULL){
return EOF;
}
*fp->base = '\0';
}
fp->ptr = fp->base;
if (bufsize == 1) {
*fp->ptr = x;
}
if (*fp->ptr != '\0' || bufsize == 1) {
fp->cnt = write(fileno(fp), fp->ptr, bufsize);
if (fp->cnt != bufsize) {
if (fp->cnt == -1) {
fp->flag |= _EOF;
} else {
fp->flag |= _ERR;
}
fp->cnt = 0;
return EOF;
}
}
if (bufsize != 1) {
*fp->ptr = x;
fp->cnt = BUFSIZ - 1;
}
return (unsigned char) *fp->ptr++ ;
}
/* fflush: on output stream, write unwritten buffered data. On input stream,
* the effect is undefined. NULL flushes all output streams. */
int fflush(FILE *fp) {
FILE *cond;
if (fp == NULL) {
fp = _iob;
cond = _iob + OPEN_MAX;
} else {
cond = fp + 1;
}
for (; fp < cond; fp++) {
if ((fp->flag & (_WRITE | _EOF | _ERR)) != _WRITE){
return EOF;
}
fp->flag |= _EOF;
if (_flushbuf(*fp->ptr, fp) < 0){
return EOF;
}
*fp->ptr = '\0';
fp->cnt = 0;
}
return 0;
}
/* fclose: flushes unwritten date from stream, discard unread buffered input,
* frees allocated memory, and closes stream. */
int fclose (FILE *fp) {
if ((fp->flag & (_WRITE | _EOF | _ERR)) == _WRITE){
if (fflush(fp) < 0){
return EOF;
}
}
free(fp->base);
if (close(fileno(fp)) < 0) {
fp->flag |= _ERR;
return EOF;
}
return 0;
} |
/*
* classes.hxx
*/
#ifndef _CLASSES_
#define _CLASSES_
#define IUnknownMETHODS( ClassName ) \
HRESULT STDMETHODCALLTYPE \
ClassName::QueryInterface ( \
REFIID iid, \
void ** ppv ) \
{ \
return pObject->QueryInterface( iid, ppv ); \
} \
\
ULONG STDMETHODCALLTYPE \
ClassName::AddRef() \
{ \
return pObject->AddRef(); \
} \
\
ULONG STDMETHODCALLTYPE \
ClassName::Release() \
{ \
return pObject->Release(); \
}
class MyObject;
//
// PersistFile class.
//
class PersistFile : public IPersistFile
{
private:
MyObject * pObject;
public:
PersistFile( MyObject * pObj );
// IUnknown
HRESULT __stdcall QueryInterface(
REFIID iid,
void ** ppv );
ULONG __stdcall AddRef();
ULONG __stdcall Release();
// IPersist
HRESULT __stdcall GetClassID(
CLSID * pClassID );
// IPersistFile
HRESULT __stdcall IsDirty();
HRESULT __stdcall Load(
LPCOLESTR pszFileName,
DWORD dwMode );
HRESULT __stdcall Save(
LPCOLESTR pszFileName,
BOOL fRemember );
HRESULT __stdcall SaveCompleted(
LPCOLESTR pszFileName );
HRESULT __stdcall GetCurFile(
LPOLESTR * ppszFileName );
};
//
// PersistStorage class.
//
class PersistStorage : public IPersistStorage
{
private:
MyObject * pObject;
public:
PersistStorage( MyObject * pObj );
// IUnknown
HRESULT __stdcall QueryInterface(
REFIID iid,
void ** ppv );
ULONG __stdcall AddRef();
ULONG __stdcall Release();
// IPersist
HRESULT __stdcall GetClassID(
CLSID *pClassID );
// IPersistStorage
HRESULT __stdcall IsDirty();
HRESULT __stdcall InitNew(
IStorage *pStg );
HRESULT __stdcall Load(
IStorage *pStg );
HRESULT __stdcall Save(
IStorage *pStgSave,
BOOL fSameAsLoad );
HRESULT __stdcall SaveCompleted(
IStorage *pStgNew );
HRESULT __stdcall HandsOffStorage();
};
//
// Goober class.
//
class Goober : IGoober
{
private:
MyObject * pObject;
public:
Goober( MyObject * pObj );
// IUnknown
HRESULT __stdcall QueryInterface(
REFIID iid,
void ** ppv );
ULONG __stdcall AddRef();
ULONG __stdcall Release();
// IGoober
HRESULT __stdcall Ping();
};
//
// MyObject class.
//
class MyObject : public IUnknown
{
private:
ulong Refs;
int ActivationType;
PersistFile PersistFileObj;
PersistStorage PersistStorageObj;
Goober GooberObj;
public:
MyObject( int ActType );
~MyObject();
// IUnknown
HRESULT __stdcall QueryInterface(
REFIID iid,
void ** ppv );
ULONG __stdcall AddRef();
ULONG __stdcall Release();
inline int GetActivationType() { return ActivationType; }
};
#endif
|
/*
* Okay, the login command's use is officially deprecated. Instead, you're supposed
* to use the AUTHENTICATE command with some SASL mechanism. I will include it,
* but I'll also include a check of the login_disabled flag, which will set whether or
* not this command is accepted by the command processor. Eventually, the master will
* know whether or not to accept the login command.
*/
IMAP_RESULTS LoginHandler::receiveData(INPUT_DATA_STRUCT &input) {
IMAP_RESULTS result = IMAP_OK;
if (!m_session->isLoginEnabled()) {
m_session->responseText("Login Disabled");
return IMAP_NO;
}
else {
result = IMAP_OK;
if (0 < m_parseBuffer->literalLength()) {
size_t dataUsed = m_parseBuffer->addLiteralToParseBuffer(input);
if (dataUsed <= input.dataLen) {
if (2 < (input.dataLen - dataUsed)) {
input.dataLen -= 2;
input.data[input.dataLen] = '\0';
}
}
else {
result = IMAP_IN_LITERAL;
}
}
if ((0 == m_parseBuffer->literalLength()) && (input.parsingAt < input.dataLen)) {
do {
switch (m_parseBuffer->astring(input, false, NULL)) {
case ImapStringGood:
++m_parseStage;
if ((input.parsingAt < input.dataLen) && (' ' == input.data[input.parsingAt])) {
++input.parsingAt;
}
break;
case ImapStringBad:
result = IMAP_BAD;
break;
case ImapStringPending:
result = IMAP_NOTDONE;
break;
}
} while((IMAP_OK == result) && (input.parsingAt < input.dataLen));
}
}
switch(result) {
case IMAP_OK:
if (2 == m_parseStage) {
result = execute();
}
else {
m_session->responseText("Malformed Command");
result = IMAP_BAD;
}
break;
case IMAP_NOTDONE:
m_session->responseText("Ready for Literal");
break;
case IMAP_BAD:
m_session->responseText("Malformed Command");
break;
case IMAP_IN_LITERAL:
break;
default:
m_session->responseText("Failed");
break;
}
return result;
} |
/**
* Uses {@link #getProcessPackageDirectoryResult} to look for a package in the directory specified
* by {@code recursivePkgKey}, does some work as specified by {@link PackageDirectoryConsumer} if
* such a package exists, then recursively does work in each non-excluded subdirectory as
* specified by {@link #getSkyKeyForSubdirectory}, and finally aggregates the {@link
* PackageDirectoryConsumer} value along with values from each subdirectory as specified by {@link
* #aggregateWithSubdirectorySkyValues}, and returns that aggregation.
*
* <p>Returns null if {@code env.valuesMissing()} is true, checked after each call to one of
* {@link RecursiveDirectoryTraversalFunction}'s abstract methods that were given {@code env}.
*
* <p>Will propagate {@link com.google.devtools.build.lib.packages.NoSuchPackageException} during
* a no-keep-going evaluation
*/
@Nullable
public final ReturnT visitDirectory(RecursivePkgKey recursivePkgKey, Environment env)
throws InterruptedException, ProcessPackageDirectorySkyFunctionException {
ProcessPackageDirectoryResult processPackageDirectoryResult =
getProcessPackageDirectoryResult(recursivePkgKey, env);
if (env.valuesMissing()) {
return null;
}
Iterable<SkyKey> childDeps = processPackageDirectoryResult.getChildDeps();
ConsumerT consumer = getInitialConsumer();
Map<SkyKey, SkyValue> subdirectorySkyValuesFromDeps;
if (processPackageDirectoryResult.packageExists()) {
PathFragment rootRelativePath = recursivePkgKey.getRootedPath().getRootRelativePath();
SkyKey packageErrorMessageKey =
PackageErrorMessageValue.key(
PackageIdentifier.create(recursivePkgKey.getRepositoryName(), rootRelativePath));
Map<SkyKey, SkyValue> dependentSkyValues =
env.getValues(Iterables.concat(childDeps, ImmutableList.of(packageErrorMessageKey)));
if (env.valuesMissing()) {
return null;
}
PackageErrorMessageValue pkgErrorMessageValue =
(PackageErrorMessageValue) dependentSkyValues.get(packageErrorMessageKey);
switch (pkgErrorMessageValue.getResult()) {
case NO_ERROR:
consumer.notePackage(rootRelativePath);
break;
case ERROR:
env.getListener()
.handle(Event.error("package contains errors: " + rootRelativePath.getPathString()));
consumer.notePackage(rootRelativePath);
break;
case NO_SUCH_PACKAGE_EXCEPTION:
String msg = pkgErrorMessageValue.getNoSuchPackageExceptionMessage();
env.getListener().handle(Event.error(msg));
consumer.notePackageError(msg);
break;
default:
throw new IllegalStateException(pkgErrorMessageValue.getResult().toString());
}
subdirectorySkyValuesFromDeps =
ImmutableMap.copyOf(
Maps.filterKeys(
dependentSkyValues, Predicates.not(Predicates.equalTo(packageErrorMessageKey))));
} else {
subdirectorySkyValuesFromDeps = env.getValues(childDeps);
}
if (env.valuesMissing()) {
return null;
}
return aggregateWithSubdirectorySkyValues(
consumer,
union(
subdirectorySkyValuesFromDeps,
processPackageDirectoryResult.getAdditionalValuesToAggregate()));
} |
Author: Marshall Schott
I’d reckon most of us own (and have broken) a hydrometer or three. I received my first one in the kit I purchased at the genesis of my obsession with this hobby. It’s a fantastic tool that allows us to determine fairly accurately how much sugar is in our wort/beer based on its density (click here for a more thorough explanation), thereby providing the ability to predict the percentage of alcohol in our finished beer. Some people don’t care, which I appreciate, though can only barely relate with because I like knowing stuff like final gravity (FG) and the ABV of my beer. As helpful as they are, hydrometers do have some downsides. Since they’re made of thin glass, they are fragile and very easy to break. They also require a fairly sizable amount of wort, at least enough to fill a hydrometer jar about 75%, and the liquid has to be a specific temperature to provide the most accurate reading. After doing some research a few years ago, I discovered another tool purported to provide similarly accurate measures of specific gravity while requiring a significantly smaller sample of wort and it also had the ability to automatically correct for temperature: the refractometer.
Whereas hydrometers measure specific gravity (SG) based on the density of a liquid, refractometers determine SG using refraction of light. Basically, light is passed through a very small sample of wort and the extent to which that light refracts (bends), called the refractive index, allows us to predict the amount of sugar in our wort. I shopped around for a bit and ultimately settled on the refractometer I currently use today.
There are a ton of options out there ranging drastically in price, though all look and appear to function very similarly. My guess is they’re all likely made by the same factory in another country. I chose the one I did because, at less than $30 shipped, I felt like I could justify the purchase as an interesting experiment if it didn’t work, plus I really liked that it was dual-scale, providing measurements in both both brix and SG, no conversion needed. I was definitely skeptical this product wouldn’t be nearly as accurate as my trusty ol’ hydrometer. I’d heard of a few folks who simply couldn’t get their refractometers to match their hydrometer readings and who ultimately ended up shelving the unit for what they knew worked. But I had to give it a shot. I was certainly enticed by the very dorky idea of being able to measure the SG of my wort using a single drop at any point during the brew day.
Given my doubt, I spent probably too much time researching the pros and cons of refractometers during the 2 day period between ordering and receiving the unit. I’ve already discussed many of the pros, most of which can be summed up as “ease of use.” It was the cons, which were few and far between, that got me worried. Some people seemed to get similar readings from multiple samples of the same wort, others complained about how their refractometer samples were never aligned with their hydrometer readings, and most frequently was the concern that refractometers were terrible at providing final gravity readings. Had I made a mistake?
I received my refractometer and immediately began playing with it, first by calibrating it using a drop of pre-boiled water– either my water was slightly fermentable or my refractometer was out of whack. Using the small adjustment screwdriver that was included in the package, I made the proper adjustments so the water read 1.000 SG (0 brix). I tested it again using tap water, bottled water, and hose water, all of which resulted in the same 1.000 reading. Cool. I then measured the SG of my kids’ apple juice multiple times and it came in at 1.048 on each attempt. Watered down maple syrup, juice squeezed from a lemon, saliva– all came back with similar results on each measure. At this point, I felt confident in the consistency of the product. It was time to put it to the test on a brew day. I made a batch of wort and, for the first time, measured my pre-boil OG, something that felt like a hassle with a hydrometer. There was something exciting about knowing I’d hit the numbers predicted by BeerSmith. Once the boil was complete and the wort was chilled, I drew off a hydrometer sample to compare with the refractometer sample and found they were exactly the same. I’ve done this many times since and continue to get similar results.
As far as using a refractometer pre-fermentation, I was sold, it was so easy and accurate. The time came on that first batch to check FG, which I was convinced wouldn’t be accurate at all. I measured SG using both tools and, sure enough, they were remarkably different. While the hydrometer indicated I’d reached my target FG of 1.012, the refractometer was reading something closer to 1.024 (6.1 brix). I learned that alcohol impacts the refraction of light, hence the inaccurate reading, and that there were ways to calculate for that impact to determine a more accurate FG using a refractometer. Back to Google I went, searching for a calculator that would allow me to enter the information obtained from my refractometer and spit out a FG that matched my hydrometer. Rather quickly, I stumbled on Sean Terril’s killer website where he has a refractometer calculator that appeared to do exactly what I was looking for. His calculator requires brix, which was fine since my refractometer is dual-scale. I plugged in all the required information, pressed calculate, and the estimated FG came out to 1.0117, close enough to the 1.012 my hydrometer indicated to make me a believer. I’ve tested Sean’s calculator numerous times since and have always yielded similarly accurate results. Still, I’ve tended to stick to measuring FG with my hydrometer, mostly because I enjoy sampling the beer before packaging. If you’re concerned about FG measurements with a refractometer, know that it is absolutely possible using a good calculator.
All in all, I really like my refractometer and use it multiple times every time I brew, I’ve absolutely no regrets with this purchase. I’ve easily used it over 100 times with no issue. It has a solid feel and has proven to be quite durable. If mine were to break, I wouldn’t think twice about purchasing the same one again. Is it necessary? Not at all. But it certainly is functional and I’d argue it does increase the ease of a brew day. For those who struggle to get consistently accurate results, here’s what I’ve found seems to work pretty well:
– Calibrate the refractometer using pre-boiled or bottled water every brew day.
– Make sure your wort is mixed really well before pulling a sample, especially if you brew with extract.
– Use a brewing spoon to collect wort, allow a couple drops to fall onto the sample plate, then put the cover down and gently tap to evenly disperse the liquid.
– If the sample is from hot wort, either gently blow the plate for a few seconds or let it sit for about a minute to stabilize the temperature of the wort before taking a reading.
– Rinse and dry the refractometer immediately after every use.
That’s about it. I’m not sure I’m doing anything special, but my refractometer readings are usually spot-on with what BeerSmith predicts as well as what my hydrometer indicates. And that works for me!
Follow Brülosophy on:
| Read More |
18 Ideas to Help Simplify Your Brew Day
7 Considerations for Making Better Homebrew
List of completed exBEERiments
How-to: Harvest yeast from starters
How-to: Make a lager in less than a month
| Good Deals |
10% Off Chapman Equipment ThermoBarrels using code: THINKBEERDRINKBEER03
Brand New 5 gallon ball lock kegs discounted to $75 at Adventures in Homebrewing
ThermoWorks Super-Fast Pocket Thermometer On Sale for $19 – $10 discount
Sale and Clearance Items at MoreBeer.com
If you enjoy this stuff and feel compelled to support Brulosophy.com, please check out the Support Us page for details on how you can very easily do so. Thanks!
Share this: Facebook
Twitter
Pinterest
Tumblr
Email
Like this: Like Loading... |
import Control.Applicative
import Control.Monad
import Data.List
import Data.Array
process n [] = n
process n (a:as) = process (n+1) $ filter (\z-> mod z a>0) as
main = do
getLine
k<-sort <$> map read <$> words <$> getLine:: IO [Int]
print $ process 0 k
|
package leavehomesafely.model;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
public class OWMAPI {
public static JSONObject getCurrentWeather () {
try {
int loc_id = 2867714;
String key = "&APPID=" + System.getenv("OWM_API_KEY");
//String key = "&APPID=4219aa4c3208d03913982923614a7d6d"; //ID ist hier nur zu Debug und lokalen Testzwecken eingetragen!
String metric = "&units=metric";
String lang = "&lang=de";
URL url = new URL("http://api.openweathermap.org/data/2.5/weather?id=" + loc_id + metric + lang + key);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("GET");
conn.setRequestProperty("Accept", "application/json");
if (conn.getResponseCode() != 200) {
throw new RuntimeException("Failed : HTTP error code : "
+ conn.getResponseCode());
}
BufferedReader br = new BufferedReader(new InputStreamReader(
(conn.getInputStream())));
String output;
output = br.readLine();
JSONObject obj = new JSONObject(output);
conn.disconnect();
return obj;
} catch (Exception e) {}
return null;
}
}
|
/**
* Created by fe on 16/9/1.
*/
public class TequilaNamespaceHandler extends NamespaceHandlerSupport{
public void init() {
registerBeanDefinitionParser("application",new TequilaBeanDefinitionParser(ApplicationConfig.class,true));
registerBeanDefinitionParser("registry",new TequilaBeanDefinitionParser(RegistryConfig.class,true));
registerBeanDefinitionParser("job",new TequilaBeanDefinitionParser(JobConfig.class,true));
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.