content
stringlengths 10
4.9M
|
---|
/*
* Copyright 2006 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sf.jelly.apt.decorations.declaration;
import com.sun.mirror.declaration.AnnotationMirror;
import com.sun.mirror.declaration.AnnotationTypeElementDeclaration;
import com.sun.mirror.declaration.AnnotationValue;
import com.sun.mirror.type.AnnotationType;
import com.sun.mirror.util.SourcePosition;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
/**
* A decorated annotation mirror provides:
* <p/>
* <ul>
* <li>properties for each of its elements
* </ul>
*
* @author <NAME>
*/
public class DecoratedAnnotationMirror extends HashMap<String, Object> implements AnnotationMirror {
private AnnotationMirror delegate;
private final HashMap<String, Object> allElementValues;
public DecoratedAnnotationMirror(AnnotationMirror delegate) {
if (delegate == null) {
throw new IllegalArgumentException("A delegate must be provided.");
}
this.delegate = delegate;
AnnotationType annotationType = delegate.getAnnotationType();
Collection<AnnotationTypeElementDeclaration> allElements = annotationType.getDeclaration() != null? annotationType.getDeclaration().getMethods() : Collections.<AnnotationTypeElementDeclaration>emptyList();
Map<AnnotationTypeElementDeclaration, AnnotationValue> elementValues = getElementValues();
put("annotationType", annotationType);
put("position", delegate.getPosition());
put("elementValues", elementValues);
allElementValues = new HashMap<String, Object>();
for (AnnotationTypeElementDeclaration element : allElements) {
if (elementValues.containsKey(element)) {
Object value = elementValues.get(element).getValue();
allElementValues.put(element.getSimpleName(), value);
put(element.getSimpleName(), value);
}
else {
AnnotationValue defaultValue = element.getDefaultValue();
if (defaultValue == null) {
throw new IllegalStateException(delegate.getPosition() + ": the element '" + element.getSimpleName() + "' must have a value specified.");
}
Object value = defaultValue.getValue();
allElementValues.put(element.getSimpleName(), value);
put(element.getSimpleName(), value);
}
}
}
public AnnotationType getAnnotationType() {
return this.delegate.getAnnotationType();
}
public SourcePosition getPosition() {
return this.delegate.getPosition();
}
public Map<AnnotationTypeElementDeclaration, AnnotationValue> getElementValues() {
return Collections.unmodifiableMap(this.delegate.getElementValues());
}
public Map<String, Object> getAllElementValues() {
return allElementValues;
}
public boolean equals(Object o) {
return this.delegate.equals(o);
}
}
|
def _reference_elems_in_nodes(self):
model = self.model
for ni, elems in self._node_elems.items():
model.nodes[ni].in_elements = elems |
<filename>weather/services/weather.py
from typing import Dict, Any
import aiohttp
_KEY: str = ""
def global_init(api_key: str) -> None:
"""Initializes a service."""
global _KEY
_KEY = api_key
if not _KEY:
print("Warning: No weather API key.")
print(
"If you want the weather part of the API to work, please get your own API key (free)."
)
print(
"It's available at https://api.openweathermap.org -- just sign up."
)
print()
async def now(zip_code: str, country_code: str) -> Dict[str, Any]:
"""Obtains weather using zip and country codes."""
url: str = f"https://api.openweathermap.org/data/2.5/weather?zip={zip_code},{country_code}&appid={_KEY}"
async with aiohttp.ClientSession() as session:
async with session.get(url) as response:
response.raise_for_status()
return await response.json()
|
def ignore(self):
if self._ignore is None:
self._ignore = self._sync_client._get_ingore(
self.gid, self.nid, self.path)
return self._ignore |
import { NucleoPrimitiveType } from './../_types/NucleoPrimitiveType';
type UserFormatValidation<T> = (arg: T) => boolean;
export class NucleoCustomPrimitive<T> {
Type: string;
userFormatValidation: UserFormatValidation<T>;
nativeType: string;
serialize(value: T):boolean {
if (typeof value !== 'boolean' && (typeof value !== this.nativeType || this.formatValidation(value))) {
return false;
}
return true;
}
formatValidation(value: T):boolean {
if (this.userFormatValidation) {
return !this.userFormatValidation(value);
}
return false;
}
}
export class NucleoStringAssertion<T> extends NucleoCustomPrimitive<T> {
constructor(userFormatValidation:UserFormatValidation<T>) {
super();
this.Type = 'NucleoString';
this.nativeType = 'string';
this.userFormatValidation = userFormatValidation;
}
}
export class NucleoNumberAssertion<T> extends NucleoCustomPrimitive<T> {
constructor(userFormatValidation:UserFormatValidation<T>) {
super();
this.Type = 'NucleoNumber';
this.nativeType = 'number';
this.userFormatValidation = userFormatValidation;
}
}
export const NucleoString: NucleoPrimitiveType<string> = {
Type: 'NucleoString',
serialize: (value: string):boolean => {
if (typeof value !== 'string') {
return false;
}
return true;
}
};
export const NucleoNumber: NucleoPrimitiveType<number> = {
Type: 'NucleoNumber',
serialize: (value: number):boolean => {
if (typeof value !== 'number') {
return false;
}
return true;
}
};
export const NucleoBoolean: NucleoPrimitiveType<boolean> = {
Type: 'NucleoBoolean',
serialize: (value: boolean):boolean => {
if (typeof value !== 'boolean') {
return false;
}
return true;
}
};
|
He's cute, fun, smart and you can't stop thinking about him. You're already three steps ahead of the game, mentally planning weddings, children and that giant house with the white picket fence. Finally, after all these years, the perfect relationship has finally been found. Happily ever after sure feels good, huh?
Not so fast. While many men may seem ideal after just a few weeks of dating, upon closer inspection, there can be warning signs that you should avoid a relationship with this person. It is important to recognize these warning signs before it's too late. The alternative is waking up one day down the road divorced with five kids and fifty thousand dollars in debt, watching re-runs of "Honey Boo Boo" on Nick at Nite.
OK, maybe I'm exaggerating. But the truth is, as much as we often ignore the warning signs of a potentially bad relationship early on in the dating process, these issues don't go away. Most behaviors only grow worse over time. So, instead of jumping into that long-term relationship with a man you suspect may be wrong for you, let's take a look at fifteen types of guys to avoid getting into long-term relationships with in the first place.
1. The set-in-his-ways guy.
These men will only become more rigid over time. Dating someone who refuses to do anything new makes for a long-term relationship that is both boring and one-sided. Assuming that he will eventually change and open up to your hobbies is misguided. This will likely never happen.
2. The pick-up-after-me guy.
You are his partner, not his maid. Men who expect you to clean up their dishes, pick up their clothes and take care of them as if they are little children on a consistent basis need a really check, and will likely treat you as if you are their mother for the rest of their lives.
3. The always-looking-for-a-deal guy.
Be wary of men who constantly look for deals and comment on prices early on in the dating process. While everyone likes a good deal, real men will not make this known when courting a woman. These are signs that he will likely be very cheap throughout the duration of the relationship.
4. The I-need-to-watch-sports-all-the-time guy.
There's nothing wrong with sitting on the couch and watching sports. Many guys do. But this should never trump the responsibilities of a relationship or take precedence over family obligations. There is a difference between loving sports and having a childish obsession with them. Choose a man who knows the difference.
5. The what's-for-dinner guy.
Expecting a home cooked meal every single night makes for a relationship that will likely feel unbalanced in the long run. Again, you are not his maid. There should be contributions from both parties, even if that contribution isn't always evenly divided. It's the effort that counts.
6. The I-get-increasingly-less-romantic-with-each-date guy.
Many men break out all the stops early on in the dating process, but by the fifth of sixth date, the laziness starts to creep in. While no man should always be expected to plan five-star dates, at the same time, the romance shouldn't just completely fall off a cliff at once. These changes in effort can be very foretelling of how he will be a year or two down the road.
7. The I-had-sex-and-now-I-don't-have-to-try-anymore guy.
Always play close attention to how a guy's behavior changes once he has had sex with you for the first time. If he truly cares about you, his good behaviors will grow stronger. Those who back down and start getting lazier after having sex for the first time were probably only after one thing to begin with.
8. The I-will-let-you-pay-for-some-of-my-bills guy.
Anyone, regardless of gender, who asks for help paying bills early on in a relationship should raise red flags. Don't let yourself be used. By paying for him early on, you are setting the tone that it will be this way for your entire future.
9. The lack-of-ambition guy.
Be very wary of men who talk up a big game of what they plan on doing with their lives. Some men with no ambition whatsoever like to talk up a good game, but at the end of the day, they are just saying what they know women want to hear. There is nothing wrong with a guy who wants to grow into a better person, just make sure that he's genuine.
10. The I'm-in-my-late-twenties-or-older-and-still-live-with-my-mother guy.
In my book, living at home up until age 25 is acceptable, provided the guy was going through schooling or saving up money. But anyone who has been working for a couple of years and still lives with his mother past this age is never going to grow up. Period.
11. The "sorry, I'm not a big phone person," guy.
Some men may not like being tied down to a phone, but responding to your text messages or phone calls shouldn't be annoying; it's common courtesy and respect. This is a warning sign of future selfish behavior.
12. The over-controlling guy.
The opposite of number eleven, the over-controlling guy must know where "his woman" his every second of the day and approve of who she is hanging out with. Run from this type of man immediately.
13. The I-don't-know-what-I'm-looking-for guy.
One day, he wants a relationship. The next day, he wants kids. Two months later, he's not sure about either. Often times when this line is pulled, its code for "I'm looking for a marriage and kids, just not with you." Steer clear.
14. The let's-just-stay-in guy.
If you like being indoors more than being outdoors, this may be a good match for you. But men who are constantly suggesting that they want to just stay in and watch a movie early on in the dating game are likely the lazy type, or only out for sex. This will only get worse over time. |
// KnownPackages returns a list of all known packages
// in the package graph that could potentially be imported
// by the given file.
func KnownPackages(ctx context.Context, snapshot Snapshot, fh VersionedFileHandle) ([]string, error) {
pkg, pgf, err := GetParsedFile(ctx, snapshot, fh, NarrowestPackage)
if err != nil {
return nil, fmt.Errorf("GetParsedFile: %w", err)
}
alreadyImported := map[string]struct{}{}
for _, imp := range pgf.File.Imports {
alreadyImported[imp.Path.Value] = struct{}{}
}
pkgs, err := snapshot.CachedImportPaths(ctx)
if err != nil {
return nil, err
}
var (
seen = make(map[string]struct{})
paths []string
)
for path, knownPkg := range pkgs {
gofiles := knownPkg.CompiledGoFiles()
if len(gofiles) == 0 || gofiles[0].File.Name == nil {
continue
}
pkgName := gofiles[0].File.Name.Name
if pkgName == "main" {
continue
}
if knownPkg.ForTest() != "" {
continue
}
if _, ok := alreadyImported[path]; ok {
continue
}
if _, ok := seen[path]; ok {
continue
}
seen[path] = struct{}{}
if !IsValidImport(pkg.PkgPath(), path) {
continue
}
if isDirectlyCyclical(pkg, knownPkg) {
continue
}
paths = append(paths, path)
seen[path] = struct{}{}
}
err = snapshot.RunProcessEnvFunc(ctx, func(o *imports.Options) error {
var mu sync.Mutex
ctx, cancel := context.WithTimeout(ctx, time.Millisecond*80)
defer cancel()
return imports.GetAllCandidates(ctx, func(ifix imports.ImportFix) {
mu.Lock()
defer mu.Unlock()
if _, ok := seen[ifix.StmtInfo.ImportPath]; ok {
return
}
paths = append(paths, ifix.StmtInfo.ImportPath)
}, "", pgf.URI.Filename(), pkg.GetTypes().Name(), o.Env)
})
if err != nil {
event.Error(ctx, "imports.GetAllCandidates", err)
}
sort.Slice(paths, func(i, j int) bool {
importI, importJ := paths[i], paths[j]
iHasDot := strings.Contains(importI, ".")
jHasDot := strings.Contains(importJ, ".")
if iHasDot && !jHasDot {
return false
}
if jHasDot && !iHasDot {
return true
}
return importI < importJ
})
return paths, nil
} |
<filename>rest/webhooks/edit_message.go
package webhooks
import (
"fmt"
"github.com/gompus/gompus/models/channel"
"github.com/gompus/gompus/models/channel/embed"
"github.com/gompus/gompus/models/channel/mentions"
"github.com/gompus/gompus/models/channel/message"
"github.com/gompus/gompus/rest/client"
"github.com/gompus/snowflake"
)
type EditMessageParams struct {
// ThreadID is the id of the thread the message is in.
ThreadID snowflake.Snowflake
// Content is the message content.
Content string
// Embeds contains embedded rich content.
Embeds []embed.Embed
// AllowedMentions are the message's allowed mentions.
AllowedMentions mentions.Allowed
// Components contains the components to include with the message.
Components []interface{}
// Files contains the files to include with the message.
Files []byte
// Payload contains the JSON encoded body of non-file params.
Payload string
// Attachments contains attached files.
Attachments []channel.Attachment
}
func (p EditMessageParams) queryParams() editMessageQueryParams {
return editMessageQueryParams{
ThreadID: p.ThreadID,
}
}
type editMessageQueryParams struct {
ThreadID snowflake.Snowflake `json:"thread_id,omitempty"`
}
func (p EditMessageParams) jsonParams() editMessageJsonParams {
return editMessageJsonParams{
Content: p.Content,
Embeds: p.Embeds,
AllowedMentions: p.AllowedMentions,
Components: p.Components,
Files: p.Files,
Payload: p.Payload,
Attachments: p.Attachments,
}
}
type editMessageJsonParams struct {
Content string `json:"content,omitempty"`
Embeds []embed.Embed `json:"embeds,omitempty"`
AllowedMentions mentions.Allowed `json:"allowed_mentions,omitempty"`
Components []interface{} `json:"components,omitempty"`
Files []byte `json:"files,omitempty"`
Payload string `json:"payload_json,omitempty"`
Attachments []channel.Attachment `json:"attachments,omitempty"`
}
// EditMessage edits the message with the given id that was
// sent on the webhook with the given id.
func EditMessage(token string, hookID, messageID snowflake.Snowflake, params EditMessageParams) (msg message.Message, err error) {
return msg, client.PATCH(client.Request{
Path: fmt.Sprintf("/webhhoks/%s/%s/messages/%s", hookID, token, messageID),
Query: client.GenerateQuery(params.queryParams()),
Body: params.jsonParams(),
})
}
|
def default_setup(output_dir, project_name, args):
if comm.is_main_process() and output_dir:
PathManager.mkdirs(output_dir)
rank = comm.get_rank()
setup_logger(output_dir, distributed_rank=rank, name=project_name)
setup_logger(output_dir, distributed_rank=rank, name="dlengine")
logger = logging.getLogger(__name__)
logger.info("Rank of current process: {}. World size: {}".format(rank, comm.get_world_size()))
logger.info("Environment info:\n" + collect_env_info())
logger.info("Command line arguments: " + str(args))
seed_all_rng() |
/**
* Deletes a set of remote repositories. The service returns a collection of exception, so that a non
* <code>null</code> entry indicates a issue during the deletion of the corresponding repository in the
* input
*
* @param remoteRepositories
* @return
*/
@STServiceOperation(method = RequestMethod.POST)
@PreAuthorize("@auth.isAuthorized('pm', 'D')")
public List<ExceptionDAO> deleteRemoteRepositories(
@JsonSerialized List<RepositorySummary.RemoteRepositorySummary> remoteRepositories) {
List<Exception> caughtExceptions = new ArrayList<>();
for (RemoteRepositorySummary remote : remoteRepositories) {
String repoID = remote.getRepositoryId();
String serverURL = remote.getServerURL();
String username = remote.getUsername();
String password = remote.getPassword();
RepositoryManager repoManager;
if (username != null) {
repoManager = RemoteRepositoryManager.getInstance(serverURL, username, password);
} else {
repoManager = RemoteRepositoryManager.getInstance(serverURL);
}
try {
Exception ex = null;
try {
repoManager.removeRepository(repoID);
} catch (RepositoryException | RepositoryConfigException e) {
ex = e;
}
caughtExceptions.add(ex);
} finally {
if (repoManager != null) {
repoManager.shutDown();
}
}
}
return caughtExceptions.stream().map(ExceptionDAO::valueOf).collect(Collectors.toList());
} |
We have discussed several different ways in which bitcoin creates an intrinsic value for itself. We also have discussed the absolute value that cryptography offers, and how States conjure up fiat money through legal violence via valor impositus, rather than creating money with real bonitas intrinseca value of metals that are coined. Now that we have an understanding of the above concepts, we can discuss how bitcoin is a commodity money, made from rare unique bits of data that create the whole cohesive framework that makes up bitcoin.
Satoshium
For this post I am going to pretend that bitcoins are real coins that are minted from a new metal called Satoshium. This metal is ugly, has few uses, and cannot be physically touched, as it is invisible–overall it is pretty useless. However this new metal is very, very divisible, malleable and it can be transported over any digital communication channel. All Satoshium that will ever comes into existence is created through the coinbase reward that ‘mints’ bitcoin units, which happens during the process of ‘bitcoin mining’.
The reason for the fictionalizing this alloy Satoshium is several fold:
1) To elaborate on the very important distinction between the legal creation of currency out of nothing, and how that is different from the minting of coins which must gain their value from the material the coins are made from. This is the distinction of legal tender under the force of law (valor impositus) and the nominal value states creates out of thin air (the expansion of the money supply), verses natural (bonitas intrinseca) money, which derives its value from no enforcement, or organization of men; but from the intrinsic use-value the object possess in-itself. This is seen most frequently with precious metals, but also with objects of use value like cigarettes.
2) Bitcoins can be used for much more than just money. When bitcoin units are creatively destroyed (proof-of-burn, colored coins, etc) it is similar to the melting down coins to use the metal for something more useful. Contracts, identity, transparent taxation, autonomous agents, etc. can all be created from Satoshium, or the outright destruction of bitcoin units.
3) Bitcoin is really a several dynamic systems working together (payment, identity, proof-of-existence, ownership, PGP system, etc) each with their own purpose. This is in addition to the fact that ‘bitcoins’–what one could think of as the cassacious coin, and I refer to as ‘bitcoin units’–is separate from owning a bitcoin address with no money in it.
Today we shall cover only the first item, and discuss how the bits of data that create the individual bitcoins have their own unique values that are not found within in the laws of men, but the laws of math.
Satoshium Mining
Let us think of Satoshium similar to gold or silver, with a few notable exceptions. Satoshium is rarer than gold; with only 2,100 trillion units (0.00000001 BTC–the smallest bitcoin unit, ‘a satoshi’) of Satoshium that can ever exist. Today there are about 1,350 trillion units of Satoshium that have been discovered through the ‘Satoshium mining’ process. More and more people are mining everyday with better, and better mining equipment–which is making it harder for current miners to find the mining reward. We will comeback to how bitcoins are ‘minted’ from satoshium using the coinbase reward process later in this post.
Another unique trait about satoshium is that it has a very, very steady inflation rate. For every 10 minutes of satoshium mining that is done on the bitcoin network (combining all of the mining power that everyone is using to find satoshium–be it 9 computers, or 9 billion) there are 5 billion units of satoshium discovered. After the first 4 years of mining, this amount was reduced by 1/2, to 2.5 billion units for the same 10 minute block of total work by the network. This amount will continue to divide in half every 4 years until there are no more units to be divided, which will be somewhere around 2138. Today there is much less satoshium available for mining than there was even just a few years ago–this is similar to the real deflation that metals, like gold, silver, and platinum experience over time, as they also have a finite supply.
Satoshium mining has become very difficult because so much mining energy is competing for these limited number of bitcoin units; the little guy can no longer mine satoshium on their own–it is simply too hard. This would be like trying to mine for gold with only a pick and shovel, while the guy next to you has a gold mining operation–you are not going to win. To resolve this, people discovered that if they ‘pool’ their work, everyone can share in the reward of satoshium mining based upon how much work they are doing for the bitcoin network.
The Minting of Bit-coins
Satoshium is really the coinbase reward, which is the raw material that bitcoins are minted from (fun fact, the only way you can truly destroy a bitcoin is through not claiming the full coinbase reward). In the same manner that gold is just a hunk of metal before it is minted into a coin; so is satoshium is to bitcoin. When someone is rewarded for satoshium mining, those satoshium units are grouped into chunks of 100 million units and ‘minted’ single bitcoin. This is the coinbase reward process. This ‘mints’ satoshium units into bitcoins based upon what the block reward is at that time, and pays that reward of new coins out to a new bitcoin address. This is the ‘minting’ process and how the bitcoin network creates new bitcoins.
Though each bitcoin is created equally, the data that comprises of each individual bitcoin is unique and different. Each bitcoin addresses has unique identifying properties, that differentiate each individual bitcoin to their owners, but to no one else. This is similar to the serial number that is unique to each dollar bill. This means that the history of that particular bitcoin (or subdivisions of that bitcoin) can be tracked, and can only be spent when the 53-digit unique hexadecimal private key authorizes its movement. If properly secured, it is impossible to ‘hack’ a bitcoin address and take the money from that address; as only the private key will be able to move it. This is why there are several bitcoin addresses that have tens of millions of dollars in them, and not a single one has been hacked.
The mining process is also what ensures that there are no ‘double-spend’ attacks. In lay-terms, a double-spend attack is similar to check-kiting, where one spending the balance in checking account twice before the bank can check to make sure the funds are there. We won’t go into details about this right now, but just be aware that the mining process also acts as the gatekeepers to the transference of funds, and offers mathematical assurance that no coins can be stolen, or double-spent.
The Money Supply of Bitcoin
The reason for us fictionalizing the metal Satoshium is to make clear the distinction between fiat currency that are made from nothing, and commodity money which must derive their value from an object’s intrinsic worth–the value is found in the money itself, not vice-versa. A fiat currency is a scrips certificate of exchange issued from a central bank. The scrip itself (such as a $20 bill) is just worthless paper–there is no bonitas intrinseca about it. An infinite number of these scrips can be created, as their values are created and set by the dollar accounting system controlled by the Third Bank of the United States (also known as The Fed–a misleading term that I hate). Each one of these scrips can be redeemed for goods and services for the nominal value printed on it, because it is legal tender–one must accept fiat money in exchange for goods or services. If not, you will face the wrath of the law.
Historically, once could exchange the nominal value of these worthless papers for a precise measure of commodity money, such as gold or silver. However, fiat currency no longer has any sort of value backing them–since 1973 they have been free-floating. Governments are now free to print as much money as the like, which they are happily doing. This is because there has been a low level currency war going on since 2008, and it is starting to intensify. This means that while there still is a finite, natural supply of all physical objects; there now is twice as much money (in the case of the US) that can purchase those same objects.
This is how governments expand the money supply to create inflation. This bleeds the value of the hard-earned savings of common people, in order to further enrich the current ruling class. All people in all nations are now facing these political calamities that will make us all economic casualties.
Velocity of Money
When more units of a currency are injected into circulation, this causes for a total number of units within the system to increase. If the velocity of money were normal today, this would mean that the prices of everything would double over night–but it has not. This is because the velocity of money is at historic lows, at less than 1/2 of what it normally is. This is not a mistake, but a response to the QE of the FED.
Let us compare this to how bitcoins are ‘minted’. Bitcoins derive their value from the bonitas intrinseca, the real economic work that has been preformed in the Satoshium mining process, and the use-value that Satoshium has. Each and ever single bitcoin in existence must have came from a coinbase reward–there is no other way to create bitcoins. In order to create the coinbase reward, real computational work that takes real energy–no different from the energy used to dig gold from the ground–must be preformed.
With Satoshium mining, this ‘work’ is done in the form of solving very, very, very complex mathematic problems that secure the network from ever being corrupted. This gives each bitcoin unit equal, market-based value due to the fact that it cost real-time energy to produce bitcoin today. There is no way to modify the number of bitcoin units that can be created (unlike fiat money), as bitcoins can only come from the coinbase reward, and that is hardcoded into bitcoin. This ensures all bitcoiners that no one can ever just change the supply of bitcoin in the way the US can, or any other central bank can for their currency (I’m looking at you Japan and EU).
Bitcoin as a Currency
For us to understand bitcoin as a currency, let us think of bitcoin paper wallet for the moment. This is a piece of paper that has the private key of a bitcoin address printed on it. When one inputs the private key of that address into a bitcoin client, they can access, and transfer the bitcoin found in that addresses. This is a currency bill in the most fundamental sense of the word; as it is not that piece of paper that has any value, but what it represents. What has value is the private key, as that can access the bitcoin–not the paper itself. The paper has only exchange value, not use-value. This is how banking classically existed for centuries with banking bills representing some value of gold until the 1973, when the dollar dropped its peg to gold.
Although bitcoin is called a digital currency, that is a bit of a misnomer. Bitcoin is not a currency but a commodity-money. Bitcoins must come from the coinbase reward process, and that process can only be done through the electrical labor of mining. Thus, like physical coins, a bitcoin can only be created when the correct ‘bits’ are ‘minted’ into bitcoins. Bitcoins cannot just be created willy-nilly–real computational work must be done, and real energy expended to mint bitcoins.
This is why we have differentiated the creation of bitcoin units from that of Satoshium mining. If we are to mint coins, physical or otherwise, we must have something to mint, we cannot make coins from nothing! And this is the very place that commodity monies are different from fiat money–fiat money does not represent anything other than the law, whereas bitcoins ARE something–very special data sets verified by the bitcoin network.
Bitcoin is a Commodity Money
Bitcoin is a commodity money because the cryptography that bitcoin is built on top of. This has created the contract that limits the supply of bitcoin units and protects the bitcoin payment network. It is cryptography that creates the immutable and fungibility of bitcoin units and the imperium of the bitcoin network. This immutability creates a use-value for bitcoin, which also creates its exchange value. Furthermore, the ‘satoshium’ units of bitcoin can be broken down and used for all sort of other various contractual functions. By understanding bitcoin as a commodity money, we can see the true value that bitcoin has is outside of the legal constructs of the state.
The internet now has money that is loyal to no political body, or statist organizations; but to digital ideals alone. This is not just the economic base of a new epoch, but a political one as well. Bitcoin is the economic praxis that will allow for humans to create a new class consciousness. We can use the internet to help us create a new society, and we can use bitcoin as the economic mode to create that new world.
Bitcoin is not about money, and has nothing to do with money. Bitcoin is about political power, sovereignty, and the freedom of economic exchange. This is in direct and antagonistic relations to any and all states. Bitcoin seeks to destroy the old institutions of political power, and replace them with new digitized, decentralized ones.
Once people start to see and reject the corrupt and worthless scrips of the states, there is going to be a great unraveling unlike anything we have seen before. The crisis will collapse the value of all fiat money to becoming nearly worthless, and the value of cryptocurrencies will explode. There will be chaos, and there will be anarchy–but these are the conditions of creative destruction that we must have in order to rebuild something better in place of this corrupt and wicked system called state capitalism.
—
Next: Bitcoin and The History of Money |
/**
* Created by luba on 10/3/17.
*/
public class LoginActivity extends OAuthLoginActionBarActivity<TwitterClient> {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_login);
}
// Inflate the menu; this adds items to the action bar if it is present.
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.login, menu);
return true;
}
// OAuth authenticated successfully, launch primary authenticated activity
// i.e Display application "homepage"
@Override
public void onLoginSuccess() {
if (CheckNetwork.isOnline()) {
TwitterClient mTwitterClient = TwitterApplication.getRestClient();
Toast.makeText(LoginActivity.this, "Loading user credentials", Toast.LENGTH_SHORT).show();
mTwitterClient.getUserCredentials(new UserCredentialsCallback() {
@Override
public void onSuccess(User user) {
if (user != null) {
UserInfo.getInstance().setUserInfo(user);
}
}
@Override
public void onError(Error error) {
Toast.makeText(LoginActivity.this, error.getMessage(), Toast.LENGTH_LONG).show();
}
});
} else {
// Get profile from shared preferences
}
Intent i = new Intent(this, TwitterActivity.class);
startActivity(i);
}
// OAuth authentication flow failed, handle the error
// i.e Display an error dialog or toast
@Override
public void onLoginFailure(Exception e) {
e.printStackTrace();
}
// Click handler method for the button used to start OAuth flow
// Uses the client to initiate OAuth authorization
// This should be tied to a button used to login
public void loginToRest(View view) {
getClient().connect();
}
} |
<filename>modules/web/src/com/haulmont/charts/web/gui/ChartLocaleHelper.java<gh_stars>1-10
/*
* Copyright (c) 2008-2019 Haulmont.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.haulmont.charts.web.gui;
import com.haulmont.charts.gui.amcharts.model.DayOfWeek;
import com.haulmont.charts.gui.amcharts.model.Month;
import com.haulmont.cuba.core.global.AppBeans;
import com.haulmont.cuba.core.global.Messages;
import java.util.*;
public final class ChartLocaleHelper {
private ChartLocaleHelper() {
}
public static Map<String, Object> getChartLocaleMap(Locale locale) {
Map<String, Object> chartLocaleMap = new LinkedHashMap<>();
Messages messages = AppBeans.get(Messages.class);
// day of week
List<String> dayNames = new LinkedList<>();
List<String> shortDayNames = new LinkedList<>();
for (DayOfWeek day : DayOfWeek.values()) {
dayNames.add(messages.getMainMessage("amcharts.dayNames." + day.name(), locale));
shortDayNames.add(messages.getMainMessage("amcharts.shortDayNames." + day.name(), locale));
}
chartLocaleMap.put("dayNames", dayNames);
chartLocaleMap.put("shortDayNames", shortDayNames);
// months
List<String> monthNames = new LinkedList<>();
List<String> shortMonthNames = new LinkedList<>();
for (Month m : Month.values()) {
monthNames.add(messages.getMainMessage("amcharts.monthNames." + m.name(), locale));
shortMonthNames.add(messages.getMainMessage("amcharts.shortMonthNames." + m.name(), locale));
}
chartLocaleMap.put("monthNames", monthNames);
chartLocaleMap.put("shortMonthNames", shortMonthNames);
// formatting time
chartLocaleMap.put("am", messages.getMainMessage("amcharts.am", locale));
chartLocaleMap.put("pm", messages.getMainMessage("amcharts.pm", locale));
return chartLocaleMap;
}
public static Map<String, String> getExportLocaleMap(Locale locale) {
Map<String, String> exportLocaleMap = new LinkedHashMap<>();
Messages messages = AppBeans.get(Messages.class);
exportLocaleMap.put("fallback.save.text", messages.getMainMessage("fallback.save.text", locale));
exportLocaleMap.put("fallback.save.image", messages.getMainMessage("fallback.save.image", locale));
exportLocaleMap.put("capturing.delayed.menu.label", messages.getMainMessage("capturing.delayed.menu.label", locale));
exportLocaleMap.put("capturing.delayed.menu.title", messages.getMainMessage("capturing.delayed.menu.title", locale));
exportLocaleMap.put("menu.label.print", messages.getMainMessage("menu.label.print", locale));
exportLocaleMap.put("menu.label.undo", messages.getMainMessage("menu.label.undo", locale));
exportLocaleMap.put("menu.label.redo", messages.getMainMessage("menu.label.redo", locale));
exportLocaleMap.put("menu.label.cancel", messages.getMainMessage("menu.label.cancel", locale));
exportLocaleMap.put("menu.label.save.image", messages.getMainMessage("menu.label.save.image", locale));
exportLocaleMap.put("menu.label.save.data", messages.getMainMessage("menu.label.save.data", locale));
exportLocaleMap.put("menu.label.draw", messages.getMainMessage("menu.label.draw", locale));
exportLocaleMap.put("menu.label.draw.change", messages.getMainMessage("menu.label.draw.change", locale));
exportLocaleMap.put("menu.label.draw.add", messages.getMainMessage("menu.label.draw.add", locale));
exportLocaleMap.put("menu.label.draw.shapes", messages.getMainMessage("menu.label.draw.shapes", locale));
exportLocaleMap.put("menu.label.draw.colors", messages.getMainMessage("menu.label.draw.colors", locale));
exportLocaleMap.put("menu.label.draw.widths", messages.getMainMessage("menu.label.draw.widths", locale));
exportLocaleMap.put("menu.label.draw.opacities", messages.getMainMessage("menu.label.draw.opacities", locale));
exportLocaleMap.put("menu.label.draw.text", messages.getMainMessage("menu.label.draw.text", locale));
exportLocaleMap.put("menu.label.draw.modes", messages.getMainMessage("menu.label.draw.modes", locale));
exportLocaleMap.put("menu.label.draw.modes.pencil", messages.getMainMessage("menu.label.draw.modes.pencil", locale));
exportLocaleMap.put("menu.label.draw.modes.line", messages.getMainMessage("menu.label.draw.modes.line", locale));
exportLocaleMap.put("menu.label.draw.modes.arrow", messages.getMainMessage("menu.label.draw.modes.arrow", locale));
exportLocaleMap.put("label.saved.from", messages.getMainMessage("label.saved.from", locale));
return exportLocaleMap;
}
}
|
Yes, it has long been established that foreign-born players can thrive here, in an N.B.A. of opulence and opportunity. But what about European-trained coaches? We checked in with Maurizio Gherardini, a longtime Italian league general manager who worked in the Toronto Raptors’ front office as an assistant general manager for seven years before taking over the Turkish power Fenerbahce Ulker.
“I’ve been privileged enough in my years at Benetton Treviso to work with three N.B.A.-to-become coaches — Mike D’Antoni, David Blatt and Ettore Messina,” he said. “The commitment by an N.B.A. franchise to go for a Euro coach is more complicated, even if he may eventually be a great strategist or an innovator.”
He added: “D’Antoni was initially perceived not much of an option when he first started in Denver, but then in Phoenix he developed a basketball style that somehow changed the overall quality of the game — and it’s not a surprise that the Warriors and Steve Kerr somehow have developed their game on the basis of the basketball played those years in Phoenix.
“Messina has taken the path of showing his value on a prestigious bench like the Spurs’ in order to increase his chances. Blatt coached the best player in the game in Cleveland, but he was perceived more European than American and, bottom line, it will always take a strong commitment by an organization to go for a foreign coach as such a decision needs to be bought into by your top players who, most of the time, are not European.”
Too often, old-school perception weighs more heavily than performance. Had Blatt coached at Kentucky, he would have American court cred. James never bought into him. His teammates followed their leader.
That doesn’t mean Blatt would not be a good fit with the Knicks. Jackson’s original coaching choice, Golden State’s Kerr, was about to hire Blatt as an assistant before Cleveland signed him. He no doubt has endorsed Blatt to Jackson. |
Share
Want to know why we can’t have nice things? Because apparently, the public — or at least parts of New York — is not to be trusted.
Eight months after the appearance of the first LinkNYC hubs, which are — or were — internet kiosks meant to help bring the Big Apple into the 21st century, the city has taken a step back. Some of these kiosks were not used to “save data on their mobile plans, call relatives across the country, and get a much-needed quick charge” as they were originally intended. Instead, they were used to watch pornography.
Per an announcement from the LinkNYC team, “We … know that some users have been monopolizing the Link tablets and using them inappropriately, preventing others from being able to use them while frustrating the residents and businesses around them. The kiosks were never intended for anyone’s extended, personal use and we want to ensure that Links are accessible and a welcome addition to New York City neighborhoods.” Which, as it turns out, they were not, particularly along Manhattan’s 8th Avenue, where abuse of the kiosks was particularly rampant.
The decision was released shortly after Motherboard published its own report examining the prevalence of homeless men stationing themselves in front of the LinkNYC hubs to use the internet. Evidently, the hubs have also become hotspots for public drinking and drug use — the opposite of the hubs’ original intention to “improve the quality of life.”
“As New Yorkers ourselves, we want LinkNYC to provide the best possible experience for Link users and the communities around them,” LinkNYC said. As of Wednesday, that includes, “removing web browsing on all Link tablets while we work with the City and community to explore potential solutions, like time limits” that mitigate some of the problems the hubs are experiencing.
“Other tablet features — free phone calls, maps, device charging, and access to 311 and 911 — will continue to work as they did before, and nothing is changing about LinkNYC’s superfast Wi-Fi,” the team notes. “As planned, we will continue to improve the Link experience and add new features for people to enjoy while they’re on the go.” |
ECMA Harmony and the Future of JavaScript
Posted in: javascript
New stuff I liked about the language
Object . defineProperty ( obj , "length" , { get : function () { return this . computeLength (); }, set : function ( value ) { this . changeLength ( value ); } });
Object . defineProperty ( Array . prototype , "inject" , { value : function ( memo , iterator , context ) { iterator = iterator . bind ( context ); this . each ( function ( value , index ) { memo = iterator ( memo , value , index ); }); return memo ; }, configurable : false , enumerable : false , writable : false });
[ 1 , 2 , 3 ]. inject ( 0 , function ( a , b ) { return a + b ; }); //6
var Person = function () {}; Person . prototype . eat = function () { alert ( "eating" ); }; //Ninja extends Person var Ninja = function () {}; Ninja . prototype = Object . create ( Person . prototype , { doKungFu : function () { alert ( "wootoo" ); } }); var n = new Ninja (); n . eat (); //eating n . doKungFu (); //wootoo
var Point = function ( x , y ) { this . x = x ; this . y = y ; }; Point . prototype . distanceToOrigin = function () { return Math . sqrt ( this . x * this . x + this . y * this . y ); }; //Complex extends Point var Complex = function ( x , y ) { Point . call ( this , x , y ); //call the superclass }; Complex . prototype = Object . create ( Point . prototype , { add : function ( complex ) { this . x += complex . x ; this . y += complex . y ; } });
Object.preventExtensions prevents an object from extending (i.e adding new properties to it). Still the properties it has can be deleted and their value can be changed.
Object.seal does everything Object.preventExtensions does and also sets configurable=false for its properties, so they can't be deleted. The Object properties can be changed though.
Object.freeze makes the object completely immutable. Object.freeze does the same Object.preventExtensions and Object.seal do but also sets writable=false for all object properties.
Please enable JavaScript to view the comments powered by Disqus.
Disqus
I like to see language implementors (or creators in this case) talk about the design challenges or choices they're facing with in the next version of their languages. In this video Brendan Eich , the creator of JavaScript, talks about the ES 3.1/4/5 thingy and also explains what features will be added to JavaScript in some near future (at least for the ECMA standard). I'll add a couple of comments about the JS features I liked below the video.Most of the things I liked about the new features of the language are related to Meta-Programming. These new features describe new behaviors in object properties, like getters and setters, but they are also related to object and property mutability, configuration, visibility, etc.Getters and Setters were implemented by B.E. more than nine years ago at mozilla, but a new syntax is introduced in the standard by adding a static function to the Object class.In this example the defineProperty static method of the Object class adds the length property to the obj object. The get and set methods will be called when accessing or modifying the length property. Object.getOwnPropertyDescriptor retrieves the property descriptor of the defined property.The Object.defineProperty method can also be used to define instance methods:The method does something similar as inject_into or reduce methods do in other languages:If configurable=true, the property will be enabled for deletion or to be changed in other ways. You can set a property to be non-enumerable by setting enumerable=false, and it won't be detected in a for in loop (or in any other "prop" in obj expression). This means that for example we could augment Object.prototype with methods without having to iterate through them in a for in loop. The writable property if setted to false won't allow you to change the value of that property.Still no support is added for classical inheritance patterns (which makes me happy I must confess). Instead, the differential inheritance pattern gets a function that had (somewhat) been implemented by frameworks like Closure MooTools and others with the inherits and $merge functions. Object.create can be used for implementing prototypical inheritance: you can create a new class A that inherits from B by cloning an object and augmenting it with a Properties object. For example:However, don't forget to instantiate the superclass in your subclass constructor:Mutability is nice but sometimes we need to make our objects immutable for design reasons, for security reasons. These methods change the mutability level of an object.I hope this was useful to you. There are a lot more interesting language features to come, so you can read the ECMA draft if you're interested in knowing more about this new version. |
def scores_for_groups(self, group_ids, channels, flatten=True):
waveforms = [self.waveforms_for_group(g, channels) for g in group_ids]
lengths = np.hstack([np.ones(w.shape[0])*g for g, w in zip(group_ids,
waveforms)])
return self._reduce_dimension(np.vstack(waveforms),
flatten=flatten), lengths |
<reponame>chrisjob1021/GoCodingChallenges
// Longest Palindromic Subsequence
// Dynamic Programming
//
// Time complexity: O(n2)
// Space complexity: O(n2)
//
// References:
// https://leetcode.com/problems/longest-palindromic-subsequence/description/
// https://github.com/mission-peace/interview/blob/master/src/com/interview/dynamic/LongestPalindromicSubsequence.java
//
// Acknowledgements:
// <NAME> @mission-peace
// His video explaining this algorithm: https://youtu.be/_nCsPn7_OgI
package longpalsubseq
import (
"fmt"
)
// I wish to avoid converting our integers to float64, just for the sake of using math.Max.
// Instead, let's create a simple helper function to return the max of two integers.
func max(x, y int) int {
if x > y {
return x
} else {
return y
}
}
func printLongestPalindromeSubseq(s string, T [][]int) string {
// Create result slice of bytes that represent the length of the longest palindromic subsequence.
// In each slice, we will store the bytes representing the character at each position.
res := make([]byte, T[0][len(s)-1])
// The length of the longest palindromic subsequence is stored at T[0][N], where N is the length of the input string. We will start there and work backwards.
i := 0
j := T[0][len(s)-1]
// Let's also setup two pointers that represent the left and right side of our resulting string.
// In most cases, we're going to add strings to both sides simultaneously.
// "l" represents the left side and "r" represents the right side.
l := 0
r := len(res) - 1
for i <= j {
// This handles the case where the original string's slice increased the size of the longest palindromic substring.
// We verify that the characters at the start and end of the slice match.
if s[i] == s[j] && T[i][j] == T[i+1][j-1]+2 {
res[l] = s[i]
// Move l toward the right, since we added an item to the result on the left side.
l++
res[r] = s[j]
// Move r toward the left, since we added an item to the result on the right side.
r--
i++
j--
} else if T[i][j] == T[i+1][j] {
// If T[i][j] equals the value of T[i+1][j], then we can assume that the longest palindromic subsequence was contained in the slice represented by T[i+1][j].
// Add the left side of slice represented by T[i+1][j]
res[l] = s[i+1]
// Move l toward the right, since we added an item to the result on the left side.
l++
// If T[i+1][j] is greater than 1, then we have to add the left and right side of its represented slice.
if T[i+1][j] > 1 {
res[r] = s[j]
// Move r toward the left, since we added an item to the result on the right side.
r--
}
// Backtrack from T[i][j] to where our solution came from (T[i+1][j])
i++
} else if T[i][j] == T[i][j-1] {
// If T[i][j] equals the value of T[i][j-1], then we can assume that the longest palindromic subsequence was contained in the slice represented by T[i][j-1].
// If T[i][j-1] is greater than 1, then we have to add the left and right side of its represented slice.
if T[i][j-1] > 0 {
res[l] = s[i]
// Move l toward the right, since we added an item to the result on the left side.
l++
}
// Add the right side of slice represented by T[i][j-1]
res[r] = s[j-1]
// Move r toward the left, since we added an item to the result on the right side.
r--
// Backtrack from T[i][j] to where our solution came from (T[i][j-1])
j--
} else if i == j {
// We need to handle a case where the longest palindromic subsequence is of odd length.
// For example "abdba": up to this point, we will have added "ab ba" to our resulting string.
// We would need to add the single character represented at T[i][j].
res[len(res)/2] = s[i]
break
}
}
return fmt.Sprintf("%s", res)
}
func longestPalindromeSubseq(s string) (int, [][]int) {
// We are going to build a N*N 2D matrix that represents the longest palindromic subsequence for every slice of the input string.
// N represents the length of the input string.
T := make([][]int, len(s))
for i, _ := range T {
T[i] = make([]int, len(s))
// While we are creating the result matrix, let's also account for slice of length 1.
// For example, if input is "bbbab", longest palindromic substring at s[0], s[1], s[2] and so on are all 1.
T[i][i] = 1
}
// "length" represents the length of the slice of the input string we will review.
// Start at 2, because we already took care of length 1. (Iterate up to length-1 because we've already taken care of length 1.)
for length := 2; length <= len(s); length++ {
// Now let's iterate through slices of input string of equal size to length
for i := 0; i <= len(s)-length; i++ {
// Example: "ab", "i" = s[0] ("a") and "j" = s[1] ("b")
// Subtract 1 since resulting array is indexed starting at 0.
j := i + length - 1
if length == 2 {
// Consider example of "aa"
// A slice of s[0..1] has a palindrome of "aa" or length 2.
if s[i] == s[j] {
T[i][j] = 2
} else {
// Now consider above example of "ab"
// A slice of s[0..1] has a palindrome of "a" or "b", equaling length 1.
T[i][j] = 1
}
} else if s[i] == s[j] {
// Consider a length 3 slice of "abad".
// If we start with s[0..2] or "aba", the resulting palindrome would be length 3.
// We can reach that value by adding 2 to the result of s[1..2] ("ba").
T[i][j] = T[i+1][j-1] + 2
} else {
// Consider a length 3 slice of "adbb".
// Let's move forward to the 2nd of the length 3 slices in this example.
// Evaluating "dbb", the longest palindrome is of length 2 in that slice.
// To reach that value, we take either the max of the palindromes at s[1..2] ("db") or s[2..3] ("bb").
// The first is of length 1 and the latter is of length 2.
T[i][j] = max(T[i+1][j], T[i][j-1])
}
}
}
// Returning T so that we can pass it to our function to print the longest palindromic subsequence.
return T[0][len(s)-1], T
}
|
/**
* Returns the sum of all rates leaving to successor states.
*
* @return sum of all rates leaving to successor states
*/
double sumRates()
{
if (null == succRates) {
return 0.0;
}
double sumRates = 0.0;
for (int rateNr = 0; rateNr < succRates.length; rateNr++) {
sumRates += succRates[rateNr];
}
return sumRates;
} |
<reponame>enfoTek/tomato.linksys.e2000.nvram-mod<filename>tools-src/gnu/gcc/libjava/java/security/interfaces/DSAKeyPairGenerator.java
/* DSAKeyPairGenerator.java -- Initialize a DSA key generator
Copyright (C) 1998 Free Software Foundation, Inc.
This file is part of GNU Classpath.
GNU Classpath is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2, or (at your option)
any later version.
GNU Classpath is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with GNU Classpath; see the file COPYING. If not, write to the
Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
02111-1307 USA.
Linking this library statically or dynamically with other modules is
making a combined work based on this library. Thus, the terms and
conditions of the GNU General Public License cover the whole
combination.
As a special exception, the copyright holders of this library give you
permission to link this library with independent modules to produce an
executable, regardless of the license terms of these independent
modules, and to copy and distribute the resulting executable under
terms of your choice, provided that you also meet, for each linked
independent module, the terms and conditions of the license of that
module. An independent module is a module which is not derived from
or based on this library. If you modify this library, you may extend
this exception to your version of the library, but you are not
obligated to do so. If you do not wish to do so, delete this
exception statement from your version. */
package java.security.interfaces;
import java.security.SecureRandom;
import java.security.InvalidParameterException;
/**
* This interface contains methods for intializing a Digital Signature
* Algorithm key generation engine. The initialize methods may be called
* any number of times. If no explicity initialization call is made, then
* the engine defaults to generating 1024-bit keys using pre-calculated
* base, prime, and subprime values.
*
* @version 0.0
*
* @author <NAME> (<EMAIL>)
*/
public interface DSAKeyPairGenerator
{
/**
* Initializes the key generator with the specified DSA parameters and
* random bit source
*
* @param params The DSA parameters to use
* @param random The random bit source to use
*
* @exception InvalidParameterException If the parameters passed are not valid
*/
public abstract void initialize(DSAParams params, SecureRandom random)
throws InvalidParameterException;
/**
* Initializes the key generator to a give modulus. If the <code>genParams</code>
* value is <code>true</code> then new base, prime, and subprime values
* will be generated for the given modulus. If not, the pre-calculated
* values will be used. If no pre-calculated values exist for the specified
* modulus, an exception will be thrown. It is guaranteed that there will
* always be pre-calculated values for all modulus values between 512 and
* 1024 bits inclusives.
*
* @param modlen The modulus length
* @param genParams <code>true</code> to generate new DSA parameters, <code>false</code> otherwise
* @param random The random bit source to use
*
* @exception InvalidParameterException If a parameter is invalid
*/
public abstract void initialize(int modlen, boolean genParams,
SecureRandom random)
throws InvalidParameterException;
}
|
<reponame>VRToxin-AOSP/android_external_v8
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_EXPRESSION_TYPE_COLLECTOR_H_
#define V8_EXPRESSION_TYPE_COLLECTOR_H_
#include "src/ast/ast-expression-visitor.h"
namespace v8 {
namespace internal {
// A Visitor over an AST that collects a human readable string summarizing
// structure and types. Used for testing of the typing information attached
// to the expression nodes of an AST.
struct ExpressionTypeEntry {
int depth;
const char* kind;
const AstRawString* name;
Bounds bounds;
};
class ExpressionTypeCollector : public AstExpressionVisitor {
public:
ExpressionTypeCollector(Isolate* isolate, FunctionLiteral* root,
ZoneVector<ExpressionTypeEntry>* dst);
void Run();
protected:
void VisitExpression(Expression* expression);
private:
ZoneVector<ExpressionTypeEntry>* result_;
};
} // namespace internal
} // namespace v8
#endif // V8_EXPRESSION_TYPE_COLLECTOR_H_
|
/**
* Transfer events from the primary event storage to the working area
*/
private void fill()
{
double current = this.highmark;
if (!primary.isEmpty())
{
if (this.working.size() > Simulator.MAX_BUFFER)
{
if (!overrun)
{
System.err.println(" System arrivals faster than system exits");
System.err.println(" Completed: "
+ this.completed.size());
this.overrun = true;
}
this.primary.addAll(this.working);
this.working.clear();
this.primary.sort(Comparator
.comparingDouble(Event::getCompleted));
}
else
{
this.overrun = false;
}
for (int index = 0; index < this.capacity; index++)
{
if (this.primary.isEmpty())
{
break;
}
else
{
final Event event = primary.remove(0);
this.working.add(event);
current = event.getCompleted();
}
}
}
this.highmark = current;
} |
def send_check_success(self, msg_dct):
self.log.debug('_send_check_success')
response = self.send_json(msg_dct)
if response.get('status', '') == 'success':
return True
return False |
/**
* Vigenere Cipher is a method of encrypting alphabetic text. It uses a simple form of polyalphabetic
* substitution. A polyalphabetic cipher is any cipher based on substitution, using multiple
* substitution alphabets .The encryption of the original text is done using the Vigenère square or
* Vigenère table.
*/
package ciphers;
public class vigenere {
public static String encrypt(final String message, final String key)
{
String result = "";
for (int i = 0, j = 0; i < message.length(); i++) {
char c = message.charAt(i);
if (Character.isLetter(c)){
if(Character.isUpperCase(c)) {
result += (char) ((c + key.toUpperCase().charAt(j) - 2 * 'A') % 26 + 'A');
} else {
result += (char) ((c + key.toLowerCase().charAt(j) - 2 * 'a') % 26 + 'a');
}
} else {
result+=c;
}
j = ++j % key.length();
}
return result;
}
public static String decrypt( final String message, final String key)
{
String result ="";
for(int i = 0, j = 0; i < message.length(); i++){
char c = message.charAt(i);
if (Character.isLetter(c)){
if(Character.isUpperCase(c)) {
result += ((char)('Z'-(25-(c-key.toUpperCase().charAt(j)))%26));
} else {
result += ((char)('z'-(25-(c-key.toLowerCase().charAt(j)))%26));
}
} else {
result+=c;
}
j = ++j % key.length();
}
return result;
}
public static void main (String [] args){
String text="Hello World!";
String key="itsakey";
System.out.println(text);
String ciphertext=encrypt(text, key);
System.out.println(ciphertext);
System.out.println(decrypt(ciphertext, key));
}
}
|
/**
* Tests checking a connector's name for usage by another.
*/
@Test
@DisplayName("Tests checking a connector's name for usage by another.")
@SneakyThrows(Exception.class)
void testCheckConnectorName() {
mockMvc.perform(get("/api/connector/check/" + Base64.getEncoder().encodeToString("connector-name".getBytes()) + "/connector-id"))
.andExpect(status().isOk())
.andExpect(content().string("false"));
TestConnector testConnector = new TestConnector();
testConnector.setId("connector-id");
when(connectorManager.loadByName("connector name")).thenReturn(testConnector);
mockMvc.perform(get("/api/connector/check/" + Base64.getEncoder().encodeToString("connector name".getBytes()) + "/connector-id"))
.andExpect(status().isOk())
.andExpect(content().string("false"));
mockMvc.perform(get("/api/connector/check/" + Base64.getEncoder().encodeToString("connector name".getBytes()) + "/other-connector-id"))
.andExpect(status().isOk())
.andExpect(content().string("true"));
} |
j=raw_input()
j=list(j.split())
j=map(int,j)
count=0
n,m,a,b=j[0],j[1],j[2],j[3]
if ((n/m)*b)<=((n/m)*m)*a:
count+=(n/m)*b
p=((n%m)*a)
if b<=p:
count+=b
else:count+=p
print count
else:
print n*a
|
<reponame>RogerioY/starshatter-open
/* Starshatter OpenSource Distribution
Copyright (c) 1997-2004, Destroyer Studios LLC.
All Rights Reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name "Destroyer Studios" nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
SUBSYSTEM: Stars.exe
FILE: Shield.cpp
AUTHOR: <NAME>
OVERVIEW
========
Weapon class
*/
#include "MemDebug.h"
#include "Shield.h"
#include "Shot.h"
#include "WeaponDesign.h"
#include "Game.h"
// +----------------------------------------------------------------------+
static char* shield_name[] = {
"sys.shield.none",
"sys.shield.deflector",
"sys.shield.grav",
"sys.shield.hyper"
};
static int shield_value[] = {
0, 2, 2, 3
};
// +----------------------------------------------------------------------+
Shield::Shield(SUBTYPE shield_type)
: System(SHIELD, shield_type, "shield", shield_value[shield_type], 100, 0),
shield_cutoff(0.0f), shield_capacitor(false), shield_bubble(false),
deflection_cost(1.0f), shield_curve(0.05f)
{
name = Game::GetText(shield_name[shield_type]);
abrv = Game::GetText("sys.shield.abrv");
power_flags = POWER_WATTS | POWER_CRITICAL;
energy = 0.0f;
power_level = 0.0f;
shield_level = 0.0f;
switch (shield_type) {
default:
case DEFLECTOR:
capacity = sink_rate = 2.0e3f;
shield_factor = 0.05f;
break;
case GRAV_SHIELD:
capacity = sink_rate = 7.0e3f;
shield_factor = 0.01f;
break;
case HYPER_SHIELD:
capacity = sink_rate = 10.0e3f;
shield_factor = 0.003f;
break;
}
emcon_power[0] = 0;
emcon_power[1] = 0;
emcon_power[2] = 100;
}
// +----------------------------------------------------------------------+
Shield::Shield(const Shield& s)
: System(s), shield_factor(s.shield_factor), requested_power_level(0.0f),
shield_cutoff(s.shield_cutoff), shield_capacitor(s.shield_capacitor),
shield_bubble(s.shield_bubble), deflection_cost(s.deflection_cost),
shield_curve(s.shield_curve)
{
power_flags = s.power_flags;
energy = 0.0f;
power_level = 0.0f;
shield_level = 0.0f;
Mount(s);
}
// +--------------------------------------------------------------------+
Shield::~Shield()
{ }
void
Shield::SetShieldCapacitor(bool c)
{
shield_capacitor = c;
if (shield_capacitor) {
power_flags = POWER_CRITICAL;
shield_curve = 0.05f;
}
else {
power_flags = POWER_WATTS | POWER_CRITICAL;
shield_curve = 0.25f;
}
}
// +--------------------------------------------------------------------+
void
Shield::ExecFrame(double seconds)
{
System::ExecFrame(seconds);
if (power_level < requested_power_level) {
power_level += (float) (seconds * 0.10); // ten seconds to charge up
if (power_level > requested_power_level)
power_level = (float) requested_power_level;
}
else if (power_level > requested_power_level) {
power_level -= (float) (seconds * 0.20); // five seconds to power down
if (power_level < requested_power_level)
power_level = (float) requested_power_level;
}
if (power_level < 0.01 && !shield_capacitor) {
shield_level = 0.0f;
energy = 0.0f;
}
}
// +----------------------------------------------------------------------+
void
Shield::Distribute(double delivered_energy, double seconds)
{
System::Distribute(delivered_energy, seconds);
if (shield_capacitor) {
if (shield_cutoff > 0 && shield_cutoff < 0.999) {
float cutoff = shield_cutoff * capacity;
if (energy > cutoff)
shield_level = (energy-cutoff)/(capacity-cutoff);
else
shield_level = 0.0f;
}
else {
shield_level = energy/capacity;
}
}
else {
shield_level = energy/sink_rate;
energy = 0.0f;
}
if (shield_level < 0)
shield_level = 0;
}
// +--------------------------------------------------------------------+
double
Shield::DeflectDamage(Shot* shot, double damage)
{
double filter = 1;
double penetration = 5;
double leak = 0;
if (shot)
penetration = shot->Design()->penetration;
filter = 1 - shield_factor * penetration;
if (filter < 0)
filter = 0;
else if (filter > 1)
filter = 1;
if (shield_capacitor) {
if (shield_cutoff > 0 && shield_level < 1e-6) {
leak = damage;
energy -= (float) (damage * deflection_cost);
}
else {
leak = damage * (1 - pow(shield_level, shield_curve) * filter * availability);
double deflected = damage - leak;
energy -= (float) deflected * deflection_cost;
}
}
else {
leak = damage * (1 - pow(shield_level, shield_curve) * filter * availability);
}
return leak;
}
// +--------------------------------------------------------------------+
void
Shield::SetPowerLevel(double level)
{
if (level > 100)
level = 100;
else if (level < 0)
level = 0;
level /= 100;
if (requested_power_level != level) {
// if the system is on emergency override power,
// do not let the EMCON system use this method
// to drop it back to normal power:
if (power_level > 1 && level == 1) {
requested_power_level = (float) power_level;
return;
}
requested_power_level = (float) level;
}
}
void
Shield::SetNetShieldLevel(int level)
{
if (level > 100) level = 100;
else if (level < 0) level = 0;
requested_power_level = (float) (level/100.0);
power_level = requested_power_level;
}
void
Shield::DoEMCON(int index)
{
int e = GetEMCONPower(index);
if (power_level * 100 > e || emcon != index) {
if (e == 0) {
PowerOff();
}
else if (emcon != index) {
PowerOn();
if (power_level * 100 > e)
SetPowerLevel(e);
}
}
emcon = index;
}
|
Waste treatment plant. Image: Flickr user h080
Never really known as a friendly group, the Italian mafia has been brutally murdering people for hundreds of years. Most of the time, these killings are quick—some henchmen waylay the target, squeeze a trigger a few times and make their getaway. Yet according to a new report released last week by the Italian National Institute of Health, a few local Italian mobs have been slowly killing dozens of innocent people for decades by way of a multibillion dollar toxic waste disposal racket.
In 2014, the Italian parliament mandated that the National Institute of Health conduct an investigation into the higher than normal rates of death and cancer in 55 municipalities of the Naples and Caserta regions of southern Italy. This region has garnered the nicknames "the Land of Fires" thanks to the frequency with which toxic waste is burned by the local Camorra mob, a practice which gave rise to the region's other nickname: the Triangle of Death.
The Camorra mob has been running a multi-billion dollar racket in which they dispose of toxic waste for businesses in Italy's industrial north since at least the early 1990s. By skirting environmental regulations the mob is able to dispose of these hazardous industrial materials for a fraction of the cost of legal disposal, and the industrialists in the north are smart enough to not ask questions about what happens to their garbage once it leaves their hands.
An estimated 10 million tons of toxic waste has been buried and burned at a number of dump sites throughout Naples in the last 20 years. This has led to the widespread contamination of the underground wells which irrigate the farmland in the Naples region, which provides vegetables for much of Italy's center and south.
In recent years this has led police to sequester dozens of fields because their irrigation wells were found to contain high levels of lead, arsenic and the industrial solvent tetrachloride. The poisoning of the well water has led to what the new NIH report describes as a "critical" health crisis in the region which is characterized by "excessive" rates of tumors in newborn infants, particularly brain tumors.
Local residents have long known about the Camorra mob's racket and have been complaining about the resulting health crisis for years to authorities. This culminated in 2013 with a massive protest numbering well over 30,000 people calling for an end to the mafia's dumping. The public outrage prompted the Italian parliament to order an NIH investigation into the matter in 2014. Last week's report was an update to the 2014 report and blames the higher than average rates of cancer and death in the Naples region on "ascertained or suspected exposure to a combination of environmental contaminants that can be emitted or released from illegal dump hazardous waste sites or the uncontrolled burning of both urban and hazardous waste."
The report is a step in the right direction, but for many locals it only confirms what they have known and suffered for years, Reverend Maurizio Patriciello, a priest in the area, pointed out.
"Can we claim victory? Absolutely not," he wrote in Avvenire, the newspaper of the Italian bishops' conference, on Saturday. "In this shameful, sad and painful story, we have lost everything. The government above all." |
WASHINGTON, D.C. -- Green Party leaders and candidates said today that Greens are firmly on the side of sanctuary cities that are defying President Trump's immigration crackdown.
The Green Party has called for a halt to raids by Immigration and Customs Enforcement (ICE) targeting undocumented immigrants.
Green Party of the United States
http://www.gp.org
@GreenPartyUS
For Immediate Release:
Wednesday, October 4, 2017
Contact:
Scott McLarty, Media Director, 202-904-7614, [email protected]
"We need more sanctuary cites and more public officials refusing to cooperate. However the White House and agencies like ICE try to spin it, these raids and incidents of harassment are a violation of basic human rights. The focus on undocumented Latinx immigrants betrays a streak of racism behind the crackdowns," said Darlene Elias, co-chair of the Green Party of the United States, co-chair of the party's Latinx Caucus, and Massachusetts Green.
The ICE raids, which intensified last week with 500 arrests across the U.S., netted mostly immigrants with minor and nonviolent offenses on their records, despite the agency's claim that they're seeking violent criminals.
The Green Party opposes Mr. Trump's plans for a border wall and in September demanded reversal of the president's order canceling Deferred Action for Childhood Arrivals (DACA).
Greens endorse amnesty and a quick path to U.S. citizenship for undocumented immigrants. The party's platform states that "undocumented immigrants who are already residing and working in the United States, and their families, should be granted a legal status which includes the chance to become U.S. citizens."
See also:
Sanctuary city crackdown: 50 immigrants arrested in Massachusetts this week, more than 500 nationwide
MassLive.com, September 29, 2017
Green Party condemns President Trump's cancellation of DACA
Press release: Green Party of the United States, September 7, 2017
Green Party condemns the "Trump Wall of Shame" and other racist executive orders targeting immigrants
Press release: Green Party of the United States, January 27, 2017
MORE INFORMATION
Green Party of the United States http://www.gp.org
202-319-7191
@GreenPartyUS
Green candidate database and campaign information
News Center
Ballot Access
Video
Green Papers
Google+
Twitter
Livestream
YouTube
Facebook
Green merchandise
Green Pages: The official publication of record of the Green Party of the United States
~ END ~ |
if __name__ == '__main__':
a = [int(e) for e in input().split(' ')]
s = 17
for e in a:
if e in [5, 7]:
s = s - e
r = 'YES' if s == 0 else 'NO'
print(r)
|
Apple may begin encrypting data backed up to its iCloud storage service sooner than anticipated. In a beta version of iOS 9.3, the iPhone maker has modified the restore process. Now, some users are prompted to enter the passcode for the iPhone or other Apple device that created the iCloud backup, according to a setup screen spotted by Reddit user Vista980622.
“Access to your account is protected by the passcode for ‘iPhone.’ Your passcode is encrypted and cannot be read by Apple," the new screen reads. But it appears that the feature is only enabled for some devices using Apple’s two-step authentication, which asks users to verify their identity with a six-digit verification code sent to their mobile number or one of their other iOS devices or Mac computers.
For now it doesn’t appear that Apple has rolled out the feature widely just yet. Apple iOS devices running a beta version of the software were tested by International Business Times and received no such prompts after reinstalling the operating system.
Apple is rumored to be working on upgraded security features to its iCloud service, which would encrypt users’ data with the passcodes of their devices, making it so that even the company can't access user data or hand over the keys to the government if they requested them. But in doing so, it could make it more difficult for Apple to help customers retrieve data if they have forgotten the passcode.
At the same time, Apple is also set to battle it out with the FBI on Tuesday in federal district court in Riverside, California, over whether the agency and court has the power to compel the company to create backdoor software to bypass the security features found on an iPhone 5C that was used by one of the San Bernardino killers, Syed Rizwan Farook. |
<reponame>PabloDelgadoC/DAW_Proyect<gh_stars>0
from django.db import models
##USE ABOUT MD5 TO ENCRYPT PASSOWRD EXAMPLE
#from django.db.models.functions import MD5
#Author.objects.create(name='<NAME>')
#author = Author.objects.annotate(name_md5=MD5('name')).get()
#print(author.name_md5)
#NOTA: SI SE USA ESTO, SE DEBE TBN DESENCRIPTAR AL MOMENTO DE VERIFICAR
# Create your models here.
class Empleado(models.Model):
nombres = models.CharField('Nombres', blank=True, max_length=50, help_text='Ej: <NAME>')
apellidos = models.CharField('Apellidos', blank=True, max_length=50, help_text='Ej: <NAME>')
cedula = models.PositiveIntegerField('Cedula', primary_key=True, blank=True, help_text='Ej: 0123456789')
telefono = models.PositiveSmallIntegerField('Telefono', help_text='Ej: 0987654321')
rol = models.CharField('Rol', blank=True, max_length=35, help_text='Ej: Vendedor|Cliente')
def __str__(self):
return self.nombres
class Usuario(models.Model):
user = models.CharField('User', blank=True, max_length=30)
paswword = models.CharField('Password', blank=True, max_length=20)
empleadoFK = models.ForeignKey(Empleado, on_delete=models.CASCADE)
def __str__(self):
return self.user
class TipoProducto(models.Model):
#idTipoProducto = models.AutoField(primary_key=True)
tipo = models.CharField('Tipo', blank=True, max_length=50)
descripcion = models.TextField('Descripcion')
def __str__(self):
return self.tipo
class Producto(models.Model):
userFK = models.ForeignKey(Usuario, on_delete=models.CASCADE)
imagePath = models.ImageField('Imagen', blank=True)
precio = models.FloatField('Precio', blank=True, help_text='Ej: 10.50')
detalle = models.CharField('Detalle', max_length=80)
descripcion = models.TextField('Descripcion')
tipoProductoFK = models.ForeignKey(TipoProducto, on_delete=models.CASCADE)
vistas = models.PositiveIntegerField('Vistas')
likes = models.PositiveIntegerField('Likes')
def __str__(self):
return self.imagePath
class TipoLocal(models.Model):
local = models.CharField('TipoLocal', max_length=50)
def __str__(self):
return self.local
class Local(models.Model):
nombreLocal = models.CharField('Local', primary_key=True, max_length=50, blank=True)
direccion = models.CharField('Direccion', max_length=60, blank=True)
latitud = models.FloatField('Latitud')
longitud = models.FloatField('Longitud')
telefono = models.PositiveSmallIntegerField('Telefono')
tipoLocalFK = models.ForeignKey(TipoLocal, on_delete=models.CASCADE, related_name='+')
def __str__(self):
return self.nombreLocal
class EmpleadoLocal(models.Model):
idLocal = models.ForeignKey(Local, on_delete=models.CASCADE)
idEmpleado = models.ForeignKey(Empleado, on_delete=models.CASCADE)
def __str__(self):
return self.idLocal
|
def _examples_to_batch(self, data: Dict[ColumnName, OneorMore[ArrayLike]]) -> ProcessedBatch:
in_data = {}
tgt_data = {}
for k, batch in data.items():
fld = self.dataset.fields[k]
if isinstance(fld, (tuple, list)):
for f, v in zip(fld, batch):
data_dict = tgt_data if f.is_target else in_data
if k not in data_dict: data_dict[k] = []
data_dict[k].append(f.transform_batch(v, device=self.device, train=self.dataset.train))
else:
tsr = fld.transform_batch(batch, device=self.device, train=self.dataset.train)
if fld.is_target: tgt_data[k] = tsr
else: in_data[k] = tsr
return in_data, tgt_data |
/* Copyright 2002, Red Hat Inc. */
#include <mqueue.h>
#include <errno.h>
#include <fcntl.h>
#include <sys/stat.h>
#include <sys/mman.h>
#include <sys/ipc.h>
#include <sys/sem.h>
#include <string.h>
#include <stdlib.h>
#include <machine/weakalias.h>
#define _LIBC 1
#include <sys/lock.h>
#undef _LIBC
#include "mqlocal.h"
int
mq_unlink (const char *name)
{
int size;
int saved_errno;
char *real_name;
char *ptr;
int i, rc;
int semid, msgqid;
key_t key;
/* ignore opening slash if present */
if (*name == '/')
++name;
size = strlen(name);
if ((real_name = (char *)malloc (size + sizeof(MSGQ_PREFIX))) == NULL)
{
errno = ENOSPC;
return -1;
}
/* use given name to create shared memory file name - we convert any
slashes to underscores so we don't have to create directories */
memcpy (real_name, MSGQ_PREFIX, sizeof(MSGQ_PREFIX) - 1);
memcpy (real_name + sizeof(MSGQ_PREFIX) - 1, name, size + 1);
ptr = real_name + sizeof(MSGQ_PREFIX) - 1;
for (i = 0; i < size; ++i)
{
if (*ptr == '/')
*ptr = '_';
++ptr;
}
/* get key and then unlink shared memory file */
if ((key = ftok(real_name, 255)) == (key_t)-1)
return -1;
rc = unlink (real_name);
if (rc == 0)
{
/* try to remove semaphore and msg queues associated with shared memory file */
saved_errno = errno;
semid = semget (key, 6, 0);
if (semid != -1)
semctl (semid, 0, IPC_RMID);
msgqid = msgget (key, 0);
if (msgqid != -1)
msgctl (msgqid, IPC_RMID, NULL);
errno = saved_errno;
}
return rc;
}
|
<filename>components/treegrid/src/treegrid/aggregates-directive.tsx
import { ComplexBase } from '@syncfusion/ej2-react-base';
import { AggregateRowModel } from '@syncfusion/ej2-treegrid';
/**
* `AggregateDirective` represent a aggregate row of the react TreeGrid.
* It must be contained in a TreeGrid component(`TreeGridComponent`).
* ```tsx
* <TreeGridComponent dataSource={data} allowPaging={true} allowSorting={true}>
* <ColumnsDirective>
* <ColumnDirective field='ID' width='100'></ColumnDirective>
* <ColumnDirective field='name' headerText='Name' width='100'></ColumnDirective>
* </ColumnsDirective>
* <AggregatesDirective>
* <AggregateDirective>
* <AggregateColumnsDirective>
* <AggregateColumnDirective field='ID' type='Min'></AggregateColumnsDirective>
* </<AggregateColumnsDirective>
* </AggregateDirective>
* </AggregatesDirective>
* </TreeGridComponent>
* ```
*/
export class AggregateDirective extends ComplexBase<AggregateRowModel & { children?: React.ReactNode }, AggregateRowModel> {
public static moduleName: string = 'aggregate';
}
export class AggregatesDirective extends ComplexBase<{}, {}> {
public static propertyName: string = 'aggregates';
public static moduleName: string = 'aggregates';
}
|
Let’s face it; any job is stressful. Money makes the world go 'round, and working to earn enough money to survive, or maintain a lifestyle, however luxurious or not, is a source of daily struggle for everyone. While money concerns are not the sole source of stress in one’s profession, they are a large one, but there are other factors that make a job stressful; long hours, high client demand, and the emotional trauma of the job. While everyone deals with stress on the job, even in high amounts, on a daily basis many people are able to deal with and overcome this stress one way or another. There are some professions with higher stress levels than others, however, and sadly, there are workers within those professions that do not deal with the stress and demands of their job as successfully as others. These are the ten professions with the highest suicide rates in America.
Continue scrolling to keep reading Click the button below to start this article in quick view 8.1K Shares Share Tweet Email Copy Link Copied
10 Scientists
Via universitypost.dk
Perhaps a surprise on this list, but the work of a scientist can be quite stressful. The push to discover and innovate as well as to constantly publish new findings creates a lot of stress in a field where the competition is rather fierce, and sometimes, even cut-throat. Suicide rates amongst scientists are surprising, with scientists having a likelihood of committing suicide at a rate of 1.28 higher than the general population. For every 45 male suicides in the field, there are an estimated five female suicides per year, with one prominent example of a female chemist, (chemists being the scientists most likely to take their own lives), committing suicide by swallowing cyanide upon refusal of a research grant.
9 Pharmacists
Via programs.mkedu.org
Perhaps another surprising entry on this list, pharmacists are not immune to the acute stress their job heaps upon them. Generally, a pharmacist is responsible for running their own business, and is responsible for patient welfare by prescribing them their medications. Furthermore, when some patients cannot afford to pay for their medications, or insurance will not cover said medications, the pharmacist often takes the brunt of a patient’s ire. Moreover, perhaps most difficultly, a pharmacist is constantly involved in the high-pressure world of the big pharmaceutical companies, sometimes treated as a sales rep as opposed to a health care professional. Pharmacists also have a substance abuse rate of nearly 20% higher than average, all of which contribute to the rate of suicide amongst pharmacists to be 1.29 higher than average.
8 Farm Workers
Via foxnews.com
One of the lowest paying jobs in America, with a salary of less than $20,000 annually; working in agriculture can be extremely stressful. Not only is the work hard labour, it is also dangerous, working with heavy machinery. There were 216 farm accident fatalities in 2012 alone, prompting Forbes magazine to rank farming as one of the nation's deadliest jobs. Beyond the stress of farming and the workplace hazards, a farmer is also at the mercy of nature, and nature can be cruel. When the earth doesn’t cooperate, than a farmer’s livelihood can be completely in jeopardy, resulting in a suicide rate that is 1.32 higher than average.
7 Electricians
Via electricianinottawa.com
There may be a scientific explanation for the higher than average suicide rates amongst electricians, a rate that is 1.36 higher than average. While being an electrician can be a lucrative profession, it can also be difficult when the economy is bad and work is scarce. Beyond the stressful economic factors that may affect electricians, there have been recent studies that have posited that an electrician’s long-term exposure to electromagnetic fields could ultimately affect brain chemistry. The electromagnetic fields may affect melatonin production in the brain, which can potentially lead to depression, potentially culminating in suicide.
6 Real Estate Agents
Via jkgroves.com
The world of real estate can be a high risk, high reward profession, with all of the accompanying stress expected of a career that can make you millions of dollars, or leave you broke. Especially since the housing crisis in 2008 when housing prices plummeted, the world of real estate has become extremely unpredictable. The lack of stability in real estate, particularly not knowing when the next paycheck may arrive if the housing market is weak, may very well be one of the main reasons why real estate agents commit suicide at a rate of 1.38 higher than the average person. Not only is suicide a risk as a real estate agent, but over one-third of all job-related deaths among real estate agents are murders.
5 Police Officers
Via highlandstoday.com
Thus far, police officers may be the least surprising entry on this list. The amount of stress, both physical and emotional, that a police officer endures during the course of their career can sometimes create emotional duress. Studies have suggested that police officers are more than twice as likely to show signs of depression during their careers than those in other professions, and are four times more likely to get less than 6 hours of a sleep a night, all the while dealing with violence and crime on a daily basis. While suicide amongst police officers is more common than other professions in America, it is a problem that plagues women and African-American men more often than white men, with suicide rates amongst women 2.03 higher and amongst African-American men 2.55 times higher than average.
4 Lawyers
Via mesrianilawgroup.yolasite.com
According to studies, before even graduating from law school, a reported 40% of law students already suffer from depression. Once practicing, lawyers are nearly four times more likely to suffer from depression than the average American. The extremely stressful environment a career in law demands, with long hours, poor public opinion and difficult cases and clients, are considered the biggest reasons why lawyers commit suicide at a rate of 1.33 times higher than the national average. Suicide among lawyers has become such a concern that many states have implemented mental health programs that are required for their lawyers.
3 Financial Workers
Via whistleblower.org
Another profession that may not come as a surprise, as the correlation between financial workers and suicide has been witnessed by the public since the Great Depression when stock brokers who had been bankrupted leapt from buildings, as the economy goes, so does the average rate of suicide amongst workers in the financial sector. The suicide rate amongst financial workers in America is 1.51 times higher than average, and it is not entirely surprising given the economic landscape of the country post-2008. In the first three months of 2014 alone, there were already 11 reported suicides amongst those in finance.
2 Dentists
Via freewebsitehosting4u.info
Not many, if anyone, enjoys going to the dentist. Though this may not actively contribute to the likelihood of a dentist committing suicide at a rate of 1.67 times higher than the American average, it may compound the extremely high stress nature of the job and amongst all professions, dentistry is considered one of the most stressful. Being a dentist can be a lucrative, rewarding profession, but it also brings with it long hours, reluctant, if not downright difficult patients, and no guarantee of success or stability. Because of this, studies suggest that dentists are more likely to suffer from mental disorders, but are also more reluctant to seek treatment for disorders, perhaps explaining the higher than average suicide rate.
1 Doctors
Via healthindya.com
Doctors are 1.87 times more likely to commit suicide than the average American. While suicide accounts for roughly 2% of all deaths amongst the general population in the United States, 4% of all physician deaths are by suicide. The high stress nature of the job, like all jobs on this list, is the number one factor in the suicide rate amongst doctors. Doctors also have extra difficulty; when a doctor suffers from depression or another mental disorder, they are reluctant to seek treatment, potentially fearing for their practice if word of their own need for professional help were to be revealed. There have also been theories that suggest that because doctors are trained in medicine, they simply are more adept at actually committing suicide, knowing how to achieve their desired result, and knowing what drugs to administer to do so. While this theory hasn’t been proven, it may also be a factor that helps explain the high rate of suicide amongst doctors. |
// Index pushes a list of documents to the respective partitions. It first breaks the list into
// sub-lists based on the partitions, and then pushes them in parallel to all sub-indexes
func (i *DistributedIndex) Index(docs []index.Document, options interface{}) error {
splitDocs := make([][]index.Document, len(i.partitions))
for _, d := range docs {
splitDocs[i.part.PartitionFor(d.Id)] = append(splitDocs[i.part.PartitionFor(d.Id)], d)
}
var err error
var wg sync.WaitGroup
for x, split := range splitDocs {
wg.Add(1)
go func(x int, split []index.Document) {
if e := i.partitions[x].Index(split, options); err != nil {
err = e
}
wg.Done()
}(x, split)
}
wg.Wait()
return err
} |
// NewAdmonitionParagraph returns a new Paragraph with an extra admonition attribute
func NewAdmonitionParagraph(lines []interface{}, admonitionKind AdmonitionKind, attributes interface{}) (Paragraph, error) {
log.Debugf("new admonition paragraph")
attrs, err := NewAttributes(attributes)
if err != nil {
return Paragraph{}, errors.Wrapf(err, "failed to initialize an Admonition Paragraph element")
}
p, err := NewParagraph(lines, attrs)
if err != nil {
return Paragraph{}, err
}
p.Attributes = p.Attributes.Set(AttrAdmonitionKind, admonitionKind)
return p, nil
} |
// NewConfig create a Cfg from config file
func NewConfig(p string) (*Cfg, error) {
rawConfig, err := ioutil.ReadFile(p)
if err != nil {
return nil, err
}
var config Config
err = json.Unmarshal(rawConfig, &config)
if err != nil {
return nil, err
}
return &Cfg{Path: p, RawConfig: rawConfig, Config: &config}, nil
} |
<reponame>Xan0C/curbl-ecs<filename>benchmark/web.ts
import { ECS, System } from '@curbl/ecs';
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
window.ECS = new ECS();
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
window.System = System;
|
def minimal_subregion(self) -> Tuple[int, int, 'ArrayMetadata']:
subregion_origin = self._full_min_offset
subregion_size = self._full_max_offset - self._full_min_offset
new_metadata = ArrayMetadata(
self.shape, self.dtype,
strides=self.strides,
first_element_offset=self.first_element_offset - self._full_min_offset,
buffer_size=subregion_size)
return subregion_origin, subregion_size, new_metadata |
<gh_stars>10-100
import { Module } from '@nestjs/common';
import { BijusController } from './bijus.controller';
import { BijusService } from './bijus.service';
@Module({
controllers: [BijusController],
providers: [BijusService],
})
export class BijusModule {}
|
def startServer(
self,
persistFilename: str = "networktables.ini",
listenAddress: str = "",
port: int = constants.NT_DEFAULT_PORT,
):
return self._api.startServer(persistFilename, listenAddress, port) |
Inbound Recruiting: 6 Ways the Internet Will Impact Talent Acquisition in 2016
Mike Roberts
The internet has completely transformed the way job seekers discover and apply to jobs. Research shows that more than seven in ten candidates now start their search on Google, rather than going directly to a job board. Additionally, social media and other internet-driven technologies are playing a more crucial role in their search than ever before.
While the list of internet-related impacts to the candidate side of the process is vast, it’s important to point out what this all means for recruiters and the talent acquisition function as we settle into 2016. If a more significant part of the job search process is, in fact, taking place online, then recruiters need both the skills and competencies and overarching IT support to meet candidates there.
In this post, we will inspect this transformation by focusing on a number of trends and predictions around what will happen in 2016. In particular, we will discuss connections between the world of internet marketing and recruitment marketing—mashed up into the idea of inbound recruiting.
1. Career sites will continue to dominate as a source of hire
In 2015, Bersin by Deloitte and a number of other firms highlighted a changing of the guard in source of hire. There was a time when job boards dominated as the top source of hire, but a combination of a better economy, evolving internet-user expectations and preferences, the increasing power of employer branding, and many other factors has made the career site the centerpiece for recruitment efforts.
As we’ve discussed in the past, a hub-and-spoke model is emerging, where the career site is the hub and sources like Google, job boards, social media, employer branding content, and recruitment marketing emails are the spokes. Each spoke—or source—in this model drives inbound interest and traffic.
We feel this trend will only pick up steam, particularly as more companies continue to focus on both the strength of their career site and optimizing various channels attracting candidates to it. TA teams will continue to adopt more digital marketing strategies—especially at the top of the funnel. Social recruiting, career site SEO, and more all gained ground in the list of priorities in 2015 and will play a central role in the future.
2. Recruiters will play less of a role in early phases of the job search process
Similar to the way consumers now do most of their research online and even buy things without ever going into a store or talking to a salesperson, the process of searching for a job and submitting an application is following this path. Research from the Talent Board showed in 2014 78.6% of job seekers became aware of a career opportunity from their own job search, not a recruiter.
As the job search only becomes more internet-focused, this trend is likely to continue. New attention to top-of-the-funnel recruiting strategies will offset the role of recruiters in the early phase of the job search process.
Although this may seem like bad news for recruiters, it’s not. In sales processes involving complex products or services, which are similar to the process of hiring a skilled worker, digital marketing has not made salespeople obsolete. Rather, their job has evolved, as we’ll discuss in the next point.
3. Distermediation will allow recruiters to focus on engagement
This whole movement toward a self-service, find-jobs-on-your-own model means recruiters can focus less on menial sourcing strategies and more on working to engage and build relationships with quality candidates.
Inbound marketing helps to shift the burden of sourcing off salespeople, because consumers and buyers are being pulled into sites via content, SEO, social, and so on. The same thing is starting to happen in talent acquisition, and we expect see to more focus on inbound recruiting to result in improved candidate experience as well as better candidates and recruiter productivity in 2016.
Two areas in particular, social recruiting and employer branding content, will likely get the most attention as resources are shuffled around. Awareness of the impact these strategies, coupled with freed up resources will drive new types of benefits for companies in 2016.
4. New partnerships between marketing and recruiting will be forged
Research from LinkedIn shows 47% of recruiting teams partner with marketing to build their employer brand. Considering marketing has vast experience in building brands on the internet, in addition to attracting, nurturing, and converting buyers, it only makes sense for relationships between the two departments to deepen.
We expect more collaboration and knowledge transfer to take place between marketing and recruiting in 2016. Companies that can enable this cross-functional interaction will see the impact in their inbound recruiting performance.
Transfer of knowledge will go both ways, as both functions have unique experiences trying to attract and convert strangers into buyers/candidates.
5. Replacement/covering up of candidate-facing ATS will accelerate
One of the biggest opportunities for improvement, and yet the most often overlooked (or ignored) aspect of inbound recruiting, is the candidate-facing UX and UI of the legacy ATS. For candidates, there is nothing more off-putting and conversion-rate-killing than the back-office look and feel of applicant tracking system apply flows.
At a time where consumer brands are competing based on user experience, job seekers’ patience for poor career site candidate experiences is fading rapidly. As awareness continues to raise around this topic, it is expected that the laggards still presenting a legacy ATS experience to candidates will do something about it this year—whether that’s via ATS integration or entirely ripping and replacing the system.
Some companies still don’t even offer the option to apply via mobile. It’s hard to believe, but it’s true. Those guilty of this are more than likely simply putting too much faith in their ATS, when in reality the vendors behind those ATS’ aren’t moving as fast as today’s dynamic candidate expectations.
6. “Data-driven recruiting” will become more than a topic of conversation
Used effectively, data can support and guide decision-making and help talent acquisition teams understand where to allocate their resources. As recruiters start to think more like internet marketers, it’s only natural that they will use data in more strategic ways. 2015 will be remembered as the year a majority of recruiting leaders became aware of the power of data. 2016 will be the year many will make it part of their strategy.
We also expect more recruiting teams than ever to go above and beyond ad-hoc performance management, deploying next-generation recruiting analytics solutions. Adoption will be driven largely by awareness, but also by the simplicity of SaaS delivery and subscription models, as well as the recent and ongoing consumerization of analytics.
Those are our thoughts on how the internet will impact talent acquisition in 2016. Let us know what you think on Twitter at @Jibe. |
The Canadian government is warning Washington it may retaliate if the U.S. brings in a new, more restrictive trade rule on how beef and pork product exports are labelled.
The U.S. Department of Agriculture proposal announced Friday calls for foreign producers to list on the package of meat products where the animal was born, raised and slaughtered, as well as other packaging restrictions.
"The proposed changes will increase the discrimination against exports of cattle and hogs from Canada and increase damages to Canadian industry," Canada's Agriculture Minister Gerry Ritz said in a news release from Ottawa.
The U.S. said the new rule would help it comply with a World Trade Organization decision last year that found its country-of-origin labelling regulation discriminated against Canada and Mexico.
The Canadian government has said one year after that regulation was imposed by Washington in 2008, shipments of Canadian cattle into the U.S. dropped by half and exports of slaughter hogs declined by 58 per cent.
Ritz called the new proposed rule "extremely" disappointing and said it would make the discrimination worse, hurting Canadian producers and the meat industry even more badly.
He said Ottawa will respond if the U.S. doesn't back off, but did not say how.
"Our government will consider all options, including retaliatory measures, should the U.S. not achieve compliance by May 23, 2013, as mandated by the WTO."
The Canadian Cattlemen's Association and the Canadian Pork Council issued statements Friday saying they share Ottawa's concerns.
The pork council estimates the U.S. government's current meat labelling rule has already cost Canada about $1 billion per year in reduced beef and cattle exports.
"The proposed rule is supposed to remove discrimination found by the WTO after a lengthy expensive challenge by Canada," the council said in a release.
"It does not do this, indeed, it exacerbates the problems."
The USDA said its country-of-origin rule is designed to help consumers make informed decisions about the food products they buy. |
<gh_stars>0
package rs.math.oop1.z090204.interfejsi.z02.ljudi;
class Nastavnik extends Covek implements Deskripcija, Kvalitet {
private String predmet;
private int godinaStaza;
public Nastavnik(String ime, String prezime, String predmet, int godinaStaza) {
super(ime, prezime);
this.predmet = predmet;
this.godinaStaza = godinaStaza;
}
@Override
public String toString() {
return "" +
super.toString() +
", predmet: '" + predmet + '\'' +
", godina staza: " + godinaStaza +
"";
}
@Override
public void predstaviSe() {
System.out.println(this);
}
@Override
public int jeDobar() {
return 100;
}
@Override
public int jePosten() {
return 100;
}
} |
<filename>src/main/java/fusion/oapt/general/graphToOnto/Test.java
package fusion.oapt.general.graphToOnto;
import java.io.File;
import java.io.FileWriter;
import agg.xt_basis.GraGra;
import org.apache.jena.rdf.model.*;
import org.apache.jena.ontology.*;
public class Test
{
public static void main(String[] args) throws Exception
{
//*********** Load the graph which corresponds to the ontology
//System.out.println( "Enter the path of your graph");
//Scanner sc = new Scanner(System.in);
String fileName="ontologyOnGGX.ggx";
//fileName= sc.nextLine();
GraGra gragra;
gragra = new GraGra(true);
gragra.setFileName("fileName");
gragra.load(fileName);
gragra.setName("GRammar");
if (gragra == null)
System.out.println("Grammar: " + fileName + " inexistant! ");
// create the owl file
String fileOGName =fileName.substring(0,fileName.indexOf("."))+".owl";
new File(fileOGName);
OntModel model = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
/**** Transformation *****/
//start the process of transformation
long begin = System.currentTimeMillis();
//new GraphToOntology(model, gragra);
long end = System.currentTimeMillis();
float time = ((float) (end-begin)) / 1000f;
System.out.println( "\nExecution time of transformation is : "+ time);
// save the result of the transformation
FileWriter fw = new FileWriter(fileOGName);
System.out.println("The OWL ontology is saved in " + fileOGName);
model.write( fw, "RDF/XML-ABBREV");
}
}
|
× Expand Photo by Caitlin Abrams Gavin Kaysen at Spoon and Stable
Semiotics is the study of how images and words—a phrase like “hottest restaurant ever”—derive meaning from their context. For instance, if I were to write, “Old Fashioneds are hot,” it would have a very different meaning than “the surface of the sun is hot.” A headline stating, “Soup is Hot” would contain an embedded wink, while “Venus is Hot” sends us in an entirely different orbit. Semioticians would posit that all communication is, in fact, the human mind using bits of whatever is at hand (paint, sturgeon, E-flat, lace) to create meaning in the mind of the beholder by summoning context. Without context, not only can meaning not be clear, it can’t even exist.
So let me tell you! I was in The Hottest Restaurant Ever, having an Old Fashioned, and thinking about how to put this review into useful context. Spoon and Stable is, of course, The Hottest Restaurant Ever, opened by Gavin Kaysen in late November in the North Loop neighborhood of Minneapolis.
For context: Kaysen grew up in Minnesota and left to have the starriest cooking career imaginable, winning a James Beard Award, being named a Food & Wine Best New Chef, competing on Iron Chef, and coaching the U.S. team in the Olympics of cooking: the Bocuse d’Or. The chain of conquered summits year after year prompted whispers in his home state. Will he come back? He says he might, he says he might. He has kids now, and everyone comes back to raise kids. Then the announcement: He would come back. Then, he came back. Then the poaching commenced from Minneapolis’s finest restaurant, La Belle Vie. First went the sommelier (Bill Summerville), then the pastry chef (Diane Yang), and local media (myself included) turned all eyes, all blogs on Spoon and Stable. Headlines gushed forth: The front door was installed, plans for pot roast were announced, Thomas Keller and Daniel Boulud flew in for the opening! Kaysen was featured in this very magazine before he had served a paying customer a single dish. The day the phone line opened, reservations booked solid, for months. Customers lined up outside in the falling snow in late afternoons hoping to get bar tables, which are first come, first served. I was one of those customers. I ordered the pot roast.
The menu comprises half a dozen appetizers, three pastas, which come as main courses or served in smaller portions, and half a dozen entrées, priced $25 to $39. There’s a bar menu of casual snacks as well (the full dinner menu is also available at the bar), and an elaborate dessert program. I tried most of the menu and found a few things that were very good, a great many which were very dull, and came away with little idea of how to put this all into a meaningful, useful context for a savvy Minneapolis restaurant-goer. Do you like hot restaurants? Then you’ll certainly like going to Spoon and Stable. But you already knew that. Do you mostly chase extraordinary plates of exquisite cuisine? Then Spoon and Stable may strike you as faltering.
There were some extraordinary plates: scallop crudo was a sensuous intensity of fresh scallops firmed up a bit with lime zest and salt, then decorated with charred scallion vinaigrette, a chiffonade of shiso leaves, compressed vinegared green apples, and crackling slips of garlic and fresno chili peppers. Each bite was like a waltz-step of lush pleasure followed by a tap-dance snap and crackle of vibrant spice. Delightful. The variation on salmon gravlax, with dewy but taut salmon paired with jewels of roast beets and orange was just as lively and engrossing. Pasta and risotto dishes were excellent. The perfectly chewy ropes of bucatini glistened with sea urchin cream and were studded with sea-fragrant clams, culminating in a charmingly highbrow and peasant-soul-satisfying combination that is so hard to achieve. Crispy potatoes, thrice-cooked in a process of baking, tearing, butter, garlic, and best-kitchens-in-the-world magic, are so good they could become a modern classic: Eat those right away.
But there were more dull dishes than magical ones. Dorothy’s Pot Roast (named for Kaysen’s beloved grandmother), which I tried again and again, tasted unseasoned, despite the fancy chanterelle mushrooms tucked beneath it, the raw slices of gossamer thin-sliced carrot on top, and a fancy rosemary broth poured tableside. The slow-cooked cod, seared Arctic char, and poached sturgeon were all too subtle, inducing yawns from the table. I didn’t have an entrée from the dinner menu I can honestly recommend.
Likewise, most desserts were forgettable. The apple crisp with elements of olive oil cake and coconut sorbet came together as little more than sweet; a grape frangipane with white wine sorbet was so delicate it tasted like nothing in particular. The exception was a buttercup squash custard with a pumpkin-pie-like ring of delicate custard containing a tiny lake of luxuriant sauce: pierce the wall of custard and the sauce tumbles out among quince balls and toasted marshmallow cream hillocks. It’s charming, and the tartness of the quince plays beautifully with the different flavors of sweet cream and pie spice. Why does the food here tend to want to erase itself so much of the time?
It does this less so in the bar. There, arancini, Italian-style, breaded rice balls, are filled with a liquid fontina cheese that oozes out, making it easy to blot up the black truffled crumbs that act as the little nest on the plate. Duck meatloaf sliders, made by combining duck meat and foie gras into a succulent, beautifully seared puck, then sliding them onto tiny brioche toasts, are indulgent and crave-worthy. They go well with the restrained, but darn-near-perfect cocktails by bartender Robb Jones. His roasted pineapple accent to the Old Fashioned is clever, adding a twist without the clutter.
The wine list, by Bill Summerville, is a wonder of adventure and economy, with $26 bottles of good Chianti as well as extensive offerings by single houses, such as the Austrian Weingut Prager and Italian Emidio Pepe. I particularly enjoyed the ease of “Bill’s Pick,” a nightly wine by the glass that lets diners spelunk through the wilds of Summerville’s educated taste, without risk of bottle cost. Summerville appears at most tables a few times during the evening as part of the service in the dining room, which is both courtly and gracious. Securing a table here does lead to the satisfying sense that you are in the center of the world, and possibly its king. This feeling is quadrupled when the kitchen sends out a tuft of fresh cotton candy the size of a cheerleader’s pompom to celebrate birthdays and anniversaries: Every eye turns to the huge pouf, and however much you felt like the king of the world, with the cotton candy, you feel it more so.
Poufs and kingly feelings notwithstanding, I’ll probably return with greatest excitement to the bar, to pile up appetizers and eat duck sliders. The restaurant’s food really is at its best when it’s riffing, playing, and being relaxed. While I’m in the bar, I’ll no doubt get to think more about context.
One night, I sat beside a table of Medtronic executives who compared each and every course with Bar La Grassa. OK, I thought. That’s their context. Another night I was beside a pair of very pretty young 20-somethings, one of whom went positively bonkers, sending things back: “I thought this was going to be better!” she sputtered. The kitchen graciously offered to replace her food, even though there was nothing particularly wrong with it. I happened to stand next to her later at the valet, and she was still ranting in disbelief. That’s her context, I thought. Kaysen told me later that he’s talked to customers who thought the menu would be structured like a tasting menu, like at La Belle Vie. Another context. I’ve tried and tried to imagine what I’d think if Spoon and Stable was a new restaurant by a comparative nobody, some promising sous chef from Tilia, let’s say. If that were the context I’d think: This kid shows lots of promise; he really can cook; he is one to watch! Is that, in the context of Kaysen’s career, a withering insult?
There’s a lot of promise here. At its best—in the bar—it seems to thrive because it doesn’t have any baggage. Baggage, of course, is another word we use for context when it’s bumming us out. Because even context has a context, and I think it’s inarguable that no other restaurant in the history of Minnesota has opened with so much context, throwing a spotlight on the complications of modern life, in which we all are trapped in a cultural quicksand of our own creation, with nothing to save us but our shared experience in the insistent human condition of needing to eat every day. But now we can do that eating at Spoon and Stable, which is The Hottest Restaurant Ever, and more—and less—than that, too.
211 N. 1st St., Mpls., 612-224-9850, spoonandstable.com |
/**
* This is only needed for arduino nodes. The Serial connection works on a
* different thread than the JavaFX Main thread.
*/
@Override
public void stop() throws Exception {
super.stop();
System.exit(0);
} |
import sys
inp = sys.stdin.readline
mod = 10 ** 9 + 7
def solve():
N = int(inp())
d = [set() for _ in range(N + 1)]
for i in range(N - 1):
u, v = map(int, inp().split())
d[u].add((v, i))
d[v].add((u, i))
G = [set() for _ in range(N + 1)]
st = [2]
vis = set([2])
tp_sort = []
while st:
v = st.pop()
tp_sort.append(v)
for u, i in d[v]:
if u in vis:
continue
G[v].add((u, i))
vis.add(u)
st.append(u)
sz = [0] * (N + 1)
cnt = [0] * (N - 1)
for v in tp_sort[::-1]:
cur = 1
for u, i in G[v]:
cur += sz[u]
cnt[i] = sz[u] * (N - sz[u])
sz[v] = cur
M = int(inp())
P = list(map(int, inp().split()))
cnt.sort()
cnt = cnt[::-1]
if M <= N - 1:
P.sort()
P = P[::-1]
for _ in range(N - 1 - M):
P.append(1)
else:
P.sort()
for _ in range(M - (N - 1)):
p = P.pop()
P[-1] = P[-1] * p % mod
P = P[::-1]
ans = 0
for c, p in zip(cnt, P):
ans = (ans + c * p) % mod
print(ans)
for _ in range(int(input())):
solve()
|
Natural Pathogens of Laboratory Mice, Rats, and Rabbits and Their Effects on Research
SUMMARY Laboratory mice, rats, and rabbits may harbor a variety of viral, bacterial, parasitic, and fungal agents. Frequently, these organisms cause no overt signs of disease. However, many of the natural pathogens of these laboratory animals may alter host physiology, rendering the host unsuitable for many experimental uses. While the number and prevalence of these pathogens have declined considerably, many still turn up in laboratory animals and represent unwanted variables in research. Investigators using mice, rats, and rabbits in biomedical experimentation should be aware of the profound effects that many of these agents can have on research. |
/**
* @param topLevel true if this is a top-level type where primitive types
* like 'int' are forbidden. Recursive calls pass 'false' to support
* arrays like {@code int[]}.
*/
private static void typeToString(Type type, StringBuilder result, boolean topLevel) {
if (type instanceof Class) {
Class<?> c = (Class<?>) type;
if (c.isArray()) {
typeToString(c.getComponentType(), result, false);
result.append("[]");
} else if (c.isPrimitive()) {
if (topLevel) {
throw new UnsupportedOperationException("Uninjectable type " + c.getName());
}
result.append(c.getName());
} else {
result.append(c.getName());
}
} else if (type instanceof ParameterizedType) {
ParameterizedType parameterizedType = (ParameterizedType) type;
typeToString(parameterizedType.getRawType(), result, true);
Type[] arguments = parameterizedType.getActualTypeArguments();
result.append("<");
for (int i = 0; i < arguments.length; i++) {
if (i != 0) {
result.append(", ");
}
typeToString(arguments[i], result, true);
}
result.append(">");
} else if (type instanceof GenericArrayType) {
GenericArrayType genericArrayType = (GenericArrayType) type;
typeToString(genericArrayType.getGenericComponentType(), result, false);
result.append("[]");
} else {
throw new UnsupportedOperationException("Uninjectable type " + type);
}
} |
//Guarda las rotaciones en un archivo
void GuardarArchivoRotaciones(String s, int Cod1, String Nom1, int Cod2, String Nom2){
int numero;
try{
InputStreamReader isr=new InputStreamReader(System.in);
BufferedReader br= new BufferedReader(isr);
FileWriter fw = new FileWriter("Rotaciones Autores AVL.txt",true);
BufferedWriter bw = new BufferedWriter(fw);
PrintWriter salida = new PrintWriter(bw);
salida.println("Rotacion "+s+" ");
salida.println("Argumentos: "+ Cod1 +" " +Nom1+" y " + Cod2+" " + Nom2);
salida.println("---------------------------------------------------");
rotaciones=rotaciones+"Rotacion "+s+"\n"+"Argumentos: "+ Cod1 +" " +Nom1+" y " + Cod2+" " + Nom2+"\n"+"---------------------------------------------------"+"\n";
salida.close();
}
catch(java.io.IOException ioex){
}
} |
def make_model(self):
self.model = m = make_model_with_hh(self.time_step)
hh = m.get_reaction("hh")
self.soma = m.Segment(None, [0,0,0], self.soma_diameter)
self.axon = m.Segment.make_section(self.soma,
[0,0,self.axon_length], self.axon_diameter,
maximum_segment_length=self.length_step)
self.segments = [self.soma] + self.axon
self.hh = [hh(seg, scale=1) for seg in self.segments]
if True:
print("Number of Locations:", len(self.model))
sa_units = self.soma.get_database_class().get("surface_area").get_units()
sa = self.soma.surface_area
print("Soma surface area:", sa, sa_units)
sa += sum(x.surface_area for x in self.axon)
print("Total surface area:", sa, sa_units) |
def subarray_dimensions(self, keys):
if len(keys) != len(self.dims):
raise ValueError("Number of keys must be equal to the number of" +
" dimensions. (Got " + str(len(keys)) + "/"
+ str(len(self.dims)) + ")")
newDims = DimensionHelper()
for key, dim in zip(keys, self.dims):
newDim = dim.subdimension(key)
if newDim is not None:
newDims.dims.append(newDim)
return newDims |
def add_searcher_variables(md, g, start: list, vertices_t: dict, deadline: int):
[X, Y] = ext.init_dict_variables(2)
S, m = ext.get_set_searchers(start)
var_for_test = {}
list_x_name = []
list_y_name = []
T_ext = ext.get_set_time_u_0(deadline)
for s in S:
for t in T_ext:
v_t = vertices_t.get((s, t))
for v in v_t:
dummy_x_name = "x[%d,%d,%d]" % (s, v, t)
if dummy_x_name not in list_x_name:
X[s, v, t] = md.addVar(vtype="BINARY", name=dummy_x_name)
list_x_name.append(dummy_x_name)
my_next_v = cm.get_next_vertices(g, s, v, t, vertices_t, T_ext)
if my_next_v is not None:
for u in my_next_v:
dummy_y_name = "y[%d,%d,%d,%d]" % (s, v, u, t)
if dummy_y_name not in list_y_name:
Y[s, v, u, t] = md.addVar(vtype="BINARY", name=dummy_y_name)
list_y_name.append(dummy_y_name)
var_for_test.update({'x': list_x_name})
var_for_test.update({'y': list_y_name})
my_vars = {'x': X, 'y': Y}
return my_vars, var_for_test |
Thou Shall Not Harm All Living Beings: Feminism, Jainism, and Animals
In this paper, I critically develop the Jain concept of nonharm as a feminist philosophical concept that calls for a change in our relation to living beings, specifically to animals. I build on the work of Josephine Donovan, Carol J. Adams, Jacques Derrida, Kelly Oliver, and Lori Gruen to argue for a change from an ethic of care and dialogue to an ethic of carefulness and nonpossession. I expand these discussions by considering the Jain philosophy of nonharm (ahimsa) in relation to feminist and other theories that advocate noneating of animals, “humane killing,” and “less harm.” Finally, I propose that a feminist appropriation of the Jain concept of nonharm helps us develop a feminist ethic of nonharm to all living beings. |
def load_dataset(pickle_path,only_one=False):
clips = {}
if(only_one):
pkl = sorted(os.listdir(pickle_path))[0]
clips["1"] = pickle.load(open(pickle_path+"/"+pkl,"rb"))
print("loaded clips from ",pkl)
else:
fold = 1
for pkl in sorted(os.listdir(pickle_path)):
clips["{0}".format(fold)] = pickle.load(open(pickle_path+"/"+pkl,"rb"))
print("loaded clips from ",pkl)
fold+=1
return clips |
// TestWatcher verifies that database server properly detects and applies
// changes to database resources.
func TestWatcher(t *testing.T) {
ctx := context.Background()
testCtx := setupTestContext(ctx, t)
db0, err := makeStaticDatabase("db0", nil)
require.NoError(t, err)
reconcileCh := make(chan types.Databases)
testCtx.setupDatabaseServer(ctx, t, agentParams{
Databases: []types.Database{db0},
ResourceMatchers: []services.ResourceMatcher{
{Labels: types.Labels{
"group": []string{"a"},
}},
},
OnReconcile: func(d types.Databases) {
reconcileCh <- d
},
})
assertReconciledResource(t, reconcileCh, types.Databases{db0})
db1, err := makeDynamicDatabase("db1", map[string]string{"group": "a"})
require.NoError(t, err)
err = testCtx.authServer.CreateDatabase(ctx, db1)
require.NoError(t, err)
assertReconciledResource(t, reconcileCh, types.Databases{db0, db1})
db0Updated, err := makeDynamicDatabase("db0", map[string]string{"group": "a", types.OriginLabel: types.OriginDynamic})
require.NoError(t, err)
err = testCtx.authServer.CreateDatabase(ctx, db0Updated)
require.NoError(t, err)
assertReconciledResource(t, reconcileCh, types.Databases{db0, db1})
db2, err := makeDynamicDatabase("db2", map[string]string{"group": "b"})
require.NoError(t, err)
err = testCtx.authServer.CreateDatabase(ctx, db2)
require.NoError(t, err)
assertReconciledResource(t, reconcileCh, types.Databases{db0, db1})
db2.SetStaticLabels(map[string]string{"group": "a", types.OriginLabel: types.OriginDynamic})
err = testCtx.authServer.UpdateDatabase(ctx, db2)
require.NoError(t, err)
assertReconciledResource(t, reconcileCh, types.Databases{db0, db1, db2})
db2.SetURI("localhost:2345")
err = testCtx.authServer.UpdateDatabase(ctx, db2)
require.NoError(t, err)
assertReconciledResource(t, reconcileCh, types.Databases{db0, db1, db2})
db1.SetStaticLabels(map[string]string{"group": "c", types.OriginLabel: types.OriginDynamic})
err = testCtx.authServer.UpdateDatabase(ctx, db1)
require.NoError(t, err)
assertReconciledResource(t, reconcileCh, types.Databases{db0, db2})
err = testCtx.authServer.DeleteDatabase(ctx, db2.GetName())
require.NoError(t, err)
assertReconciledResource(t, reconcileCh, types.Databases{db0})
} |
/**
* @return Whether the calling package is the current package.
*/
protected boolean isSelfPackage() {
final boolean isSelfPackage;
if (BuildConfig.DEBUG && ContextUtil.isTestContext(getContext())) {
isSelfPackage = true;
} else {
isSelfPackage = getContext().getPackageName().equals(getCallingPackage());
}
return isSelfPackage;
} |
def dstnct(n):
st=set()
ct=0
while(n!=0):
st.add(n%10)
n=n//10
ct+=1
if ct==len(st):
return True
else:
return False
n=int(input())
c=n
while(True):
c+=1
if dstnct(c):
print(c)
break
|
Genetic inhibition of interleukin-6 receptor signaling and Covid-19
There are few effective therapeutic options for the treatment of severe acute respiratory syndrome coronavirus 2 (SARS-CoV-2) infection. Early evidence has suggested that IL-6R blockers may confer benefit, particularly in severe coronavirus disease 2019 (Covid-19). We leveraged large-scale human genetic data to investigate whether IL6-R blockade may confer therapeutic benefit in Covid-19. A genetic instrument consisting of seven genetic variants in or close to IL6R was recently shown to be linked to altered levels of c-reactive protein (CRP), fibrinogen, circulating IL-6 and soluble IL-6R, concordant to known effects of pharmacological IL-6R blockade. We investigated the effect of these IL6R variants on risk of hospitalization for Covid-19 and other SARS-CoV-2-related outcomes using data from The Covid-19 Host Genetics Initiative. The IL6R variants were strongly associated with serum CRP levels in UK Biobank. Meta-analysis of scaled estimates revealed a lower risk of rheumatoid arthritis (OR 0.93 per 0.1 SD lower CRP, 95% CI, 0.90-0.96, P = 9.5 x 10-7), recapitulating this established indication for IL-6R blockers (e.g. tocilizumab and sarilumab). The IL-6R instrument was associated with lower risk of hospitalization for Covid-19 (OR 0.88 per 0.1 SD lower CRP, 95% CI, 0.78-0.99, P = 0.03). We found a consistent association when using a population-based control group (i.e. all non-cases; OR 0.91 per 0.1 SD lower CRP, 95% CI, 0.87-0.96, P = 4.9 x 10-4). Evaluation of further SARS-CoV-2-related outcomes suggested association with risk of SARS-CoV-2 infection, with no evidence of association with Covid-19 complicated by death or requiring respiratory support. We performed several sensitivity analyses to evaluate the robustness of our findings. Our results serve as genetic evidence for the potential efficacy of IL-6R blockade in Covid-19. Ongoing large-scale RCTs of IL-6R blockers will be instrumental in identifying the settings, including stage of disease, in which these agents may be effective.
Initiative.
The IL6R variants were strongly associated with serum CRP levels in UK Biobank. Meta-analysis of scaled estimates revealed a lower risk of rheumatoid arthritis (OR 0.93 per 0.1 SD lower CRP, 95% CI, 0.90-0.96, P = 9.5 × 10 -7 ), recapitulating this established indication for IL-6R blockers (e.g. tocilizumab and sarilumab). The IL-6R instrument was associated with lower risk of hospitalization for Covid-19 (OR 0.88 per 0.1 SD lower CRP, 95% CI, 0.78-0.99, P = 0.03). We found a consistent association when using a population-based control group (i.e. all non-cases; OR 0.91 per 0.1 SD lower CRP, 95% CI, 0.87-0.96, P = 4.9 × 10 -4 ). Evaluation of further SARS-CoV-2-related outcomes suggested association with risk of SARS-CoV-2 infection, with no evidence of association with Covid-19 complicated by death or requiring respiratory support. We performed several sensitivity analyses to evaluate the robustness of our findings.
. CC-BY 4.0 International license It is made available under a is the author/funder, who has granted medRxiv a license to display the preprint in perpetuity. (which was not certified by peer review) The copyright holder for this preprint this version posted July 19, 2020. ; https://doi.org/10.1101/2020.07.17.20155242 doi: medRxiv preprint Our results serve as genetic evidence for the potential efficacy of IL-6R blockade in Covid-19.
Ongoing large-scale RCTs of IL-6R blockers will be instrumental in identifying the settings, including stage of disease, in which these agents may be effective.
. CC-BY 4.0 International license It is made available under a is the author/funder, who has granted medRxiv a license to display the preprint in perpetuity. (which was not certified by peer review) The copyright holder for this preprint this version posted July 19, 2020. ; https://doi.org/10.1101/2020.07.17.20155242 doi: medRxiv preprint There are few effective therapeutic options for the treatment of severe acute respiratory syndrome coronavirus 2 (SARS-CoV-2) infection. Interleukin-6 receptor (IL-6R) blockade has been proposed as one potential therapeutic strategy and more than 40 clinical trials for anti-IL-6R antibodies (including tocilizumab and sarilumab) in the setting of SARS-CoV-2 infection are currently underway. Early evidence-from observational studies and open-label, uncontrolled trials-has suggested that IL-6R blockers may confer benefit, particularly in severe coronavirus disease 2019 (Covid-19). 1 We leveraged large-scale human genetic data 2 to investigate whether IL6-R blockade may confer therapeutic benefit in Covid-19. A genetic instrument consisting of seven genetic variants in or close to IL6R (pairwise r 2 ≤ 0.1) was recently shown to be linked to altered levels of c-reactive protein (CRP), fibrinogen, circulating IL-6 and soluble IL-6R, concordant to known effects of pharmacological IL-6R blockade. 3 We investigated the effect of these IL6R variants on risk of hospitalization for Covid-19 and other SARS-CoV-2-related outcomes using data from The Covid-19 Host Genetics Initiative. 4 The IL6R variants were strongly associated with serum CRP levels in UK Biobank ( Figure 1A).
. CC-BY 4.0 International license It is made available under a is the author/funder, who has granted medRxiv a license to display the preprint in perpetuity.
Evaluation of further SARS-CoV-2-related outcomes suggested association with risk of SARS-
CoV-2 infection, with no evidence of association with Covid-19 complicated by death or requiring respiratory support (Figure 1C). We performed several sensitivity analyses to evaluate the robustness of our findings (see Supplementary Appendix for further details, including phenotype definitions and results).
Our findings show that IL6R variants mimicking therapeutic inhibition of IL-6R are associated with lower risk of being hospitalized for Covid-19, a phenotype that correlates with disease severity (e.g. requiring supplemental oxygen is a typical reason for hospitalization). This suggests that pharmacological IL-6R blockade may be expected to lead to reduced Covid-19 severity. We also found an association with lower risk of SARS-CoV-2 infection. Whilst the latter finding may suggest that IL-6R blockade lowers susceptibility to SARS-CoV-2 infection, these phenotypes may be biased by symptom severity (e.g. individuals with more severe symptoms may be more likely to present for testing, to be offered testing or to have a positive test). The lack of association with very severe Covid-19 requiring respiratory support or leading to death may bear relevance in the context of the recently announced failure of sarilumab (a recombinant human monoclonal antibody against IL-6R) in a phase III RCT in patients requiring mechanical ventilation. 6 However, the genetic analysis with very severe Covid-19 was based on the fewest cases (536), which limits robust inference.
Our results serve as genetic evidence for the potential efficacy of IL-6R blockade in Covid-19.
Ongoing large-scale RCTs of IL-6R blockers will be instrumental in identifying the settings, including stage of disease, in which these agents may be effective.
. CC-BY 4.0 International license It is made available under a is the author/funder, who has granted medRxiv a license to display the preprint in perpetuity.
(which was not certified by peer review)
The copyright holder for this preprint this version posted July 19, 2020. ; https://doi.org/10.1101/2020.07.17.20155242 doi: medRxiv preprint . CC-BY 4.0 International license It is made available under a is the author/funder, who has granted medRxiv a license to display the preprint in perpetuity.
(which was not certified by peer review)
The copyright holder for this preprint this version posted July 19, 2020. ; https://doi.org/10.1101/2020.07.17.20155242 doi: medRxiv preprint . CC-BY 4.0 International license It is made available under a is the author/funder, who has granted medRxiv a license to display the preprint in perpetuity. (which was not certified by peer review) The copyright holder for this preprint this version posted July 19, 2020. ; https://doi.org/10.1101/2020.07.17.20155242 doi: medRxiv preprint |
// Solution 2: DFS
class Solution {
vector<vector<int>> vis, res;
vector<int> dir = {0, 1, 0, -1, 0};
void dfs(vector<vector<int>>& mat, int x, int y, int pre, int type) {
if (x < 0 || x >= mat.size() || y < 0 || y >= mat[0].size() || mat[x][y] < pre || vis[x][y] == 3 || vis[x][y] == type)
return;
vis[x][y] += type;
if (vis[x][y] == 3)
res.push_back({x, y});
for (int i = 0; i < 4; ++i) {
int dx = x + dir[i], dy = y + dir[i+1];
dfs(mat, dx, dy, mat[x][y], type);
}
}
public:
vector<vector<int>> pacificAtlantic(vector<vector<int>>& mat) {
int m = mat.size(), n = m == 0 ? 0 : mat[0].size();
vis = vector<vector<int>> (m, vector<int> (n, 0));
for (int i = 0; i < m; ++i) {
dfs(mat, i, 0, INT_MIN, 1);
dfs(mat, i, n-1, INT_MIN, 2);
}
for (int i = 0; i < n; ++i) {
dfs(mat, 0, i, INT_MIN, 1);
dfs(mat, m-1, i, INT_MIN, 2);
}
return res;
}
} |
<gh_stars>0
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.slack;
import org.apache.camel.Consumer;
import org.apache.camel.Exchange;
import org.apache.camel.ExchangePattern;
import org.apache.camel.Message;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.component.slack.helper.SlackMessage;
import org.apache.camel.support.ScheduledPollEndpoint;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriPath;
import org.apache.camel.util.ObjectHelper;
import org.json.simple.JSONObject;
/**
* The slack component allows you to send messages to Slack.
*/
@UriEndpoint(firstVersion = "2.16.0", scheme = "slack", title = "Slack", syntax = "slack:channel", label = "social")
public class SlackEndpoint extends ScheduledPollEndpoint {
@UriPath
@Metadata(required = "true")
private String channel;
@UriParam(label = "producer")
private String webhookUrl;
@UriParam(label = "producer", secret = true)
private String username;
@UriParam(label = "producer")
private String iconUrl;
@UriParam(label = "producer")
private String iconEmoji;
@UriParam(label = "consumer", secret = true)
private String token;
@UriParam(label = "consumer", defaultValue = "10")
private String maxResults = "10";
@UriParam(label = "consumer", defaultValue = "https://slack.com")
private String serverUrl = "https://slack.com";
/**
* Constructor for SlackEndpoint
*
* @param uri the full component url
* @param channelName the channel or username the message is directed at
* @param component the component that was created
*/
public SlackEndpoint(String uri, String channelName, SlackComponent component) {
super(uri, component);
this.webhookUrl = component.getWebhookUrl();
this.channel = channelName;
}
@Override
public Producer createProducer() throws Exception {
SlackProducer producer = new SlackProducer(this);
return producer;
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
if (ObjectHelper.isEmpty(token)) {
throw new RuntimeCamelException("Missing required endpoint configuration: token must be defined for Slack consumer");
}
SlackConsumer consumer = new SlackConsumer(this, processor);
configureConsumer(consumer);
return consumer;
}
@Override
public boolean isSingleton() {
return true;
}
/**
* The incoming webhook URL
*/
public void setWebhookUrl(String webhookUrl) {
this.webhookUrl = webhookUrl;
}
public String getWebhookUrl() {
return webhookUrl;
}
public String getChannel() {
return channel;
}
/**
* The channel name (syntax #name) or slackuser (syntax @userName) to send a
* message directly to an user.
*/
public void setChannel(String channel) {
this.channel = channel;
}
public String getUsername() {
return username;
}
/**
* This is the username that the bot will have when sending messages to a
* channel or user.
*/
public void setUsername(String username) {
this.username = username;
}
public String getIconUrl() {
return iconUrl;
}
/**
* The avatar that the component will use when sending message to a channel
* or user.
*/
public void setIconUrl(String iconUrl) {
this.iconUrl = iconUrl;
}
public String getIconEmoji() {
return iconEmoji;
}
/**
* Use a Slack emoji as an avatar
*/
public void setIconEmoji(String iconEmoji) {
this.iconEmoji = iconEmoji;
}
public String getToken() {
return token;
}
/**
* The token to use
*/
public void setToken(String token) {
this.token = token;
}
public String getMaxResults() {
return maxResults;
}
/**
* The Max Result for the poll
*/
public void setMaxResults(String maxResult) {
this.maxResults = maxResult;
}
public String getServerUrl() {
return serverUrl;
}
/**
* The Server URL of the Slack instance
*/
public void setServerUrl(String serverUrl) {
this.serverUrl = serverUrl;
}
public Exchange createExchange(JSONObject object) {
return createExchange(getExchangePattern(), object);
}
public Exchange createExchange(ExchangePattern pattern, JSONObject object) {
Exchange exchange = super.createExchange(pattern);
SlackMessage slackMessage = new SlackMessage();
String text = (String)object.get("text");
String username = (String)object.get("username");
slackMessage.setText(text);
slackMessage.setUsername(username);
if (ObjectHelper.isNotEmpty((JSONObject)object.get("icons"))) {
JSONObject icons = (JSONObject)object.get("icons");
if (ObjectHelper.isNotEmpty((String)icons.get("emoji"))) {
slackMessage.setIconEmoji((String)icons.get("emoji"));
}
}
Message message = exchange.getIn();
message.setBody(slackMessage);
return exchange;
}
}
|
Orthogonal Hyphenation of Planar and Liquid Chromatography for Mass Spectrometry of Biomarkers Out of the Bioassay Matrix (NP-HPTLC-UV/Vis/FLD-Bioassay-RP/IEX-HPLC-UV/Vis-ESI-MS).
Bioprofiling on the planar chromatogram with in situ biological/enzymatic assays is a powerful bioanalytical screening tool for the non-targeted detection of known and especially unknown/unidentified bioactive compounds, directly in multicomponent mixtures (e.g., foods, spices and botanicals). However, together with the bioactive zone, the adsorbed bioassay medium is eluted into the mass spectrometer (MS) and interfering the evaluation. Another sample track without bioassay has thus been handled in parallel. Hence, for a direct zone elution from the bioautogram, different setups were investigated to reduce the impact of the bioassay medium load. Biocompatible filter, orthogonal reversed phase/cation exchange columns (RP/IEX-HPLC), UV/Vis detector and Rheodyne valve were installed between the zone eluting interface (after normal-phase high-performance thin-layer chromatography-multi-imaging-bioassay, NP-HPTLC-UV/Vis/FLD-bioassay) and the MS. For the negative electrospray ionization mode (ESI-), an RP-18e-HPLC column and valve switch were exploited. After gradient optimization, the RP-column retarded the eluted polar compounds and split-off the salts of the bioassay medium in the first minutes. This reduced the bioassay load and separated analyte signals thereof. However, most bioassay medium mass signals were predominantly detectable in ESI+-MS. Here, the reduction of bioassay matrix signals was achieved by integrating a mixed mode RP/IEX column. Finally, two different super-hyphenations were successfully proven: NP-HPLC-UV/Vis/FLD-bioassay-RP-HPLC-UV/Vis-ESI--MS with valve switch and NP-HPLC-UV/Vis/FLD-bioassay-RP/IEX-HPLC-UV/Vis-ESI+/--MS with or without it. Although the original bioprofiling (NP-HPTLC-UV/Vis/FLD-bioassay) was prolonged from 3 min to 13 min per sample, such super-hyphenations covering chemistry/biology/mass spectrometry are considered as efficient non-target bioanalytical tool for fast analysis of complex samples. |
def make_distributions(sample,nums):
distdict = {}
sample = list(sample)
for n in nums:
rnsamp = np.random.choice(list(sample),(n,200000))
prosamp = np.sum(rnsamp,axis=0)
distdict[str(n)]=prosamp
return distdict |
import Vue from 'vue';
import VueRouter from 'vue-router';
Vue.use(VueRouter);
// 解决路由重复控制台报错问题
const originalPush = VueRouter.prototype.push;
VueRouter.prototype.push = function (location: string): any {
return (originalPush.call(this, location) as any).catch((err: any) => err);
};
export default new VueRouter({
mode: 'history',
base: process.env.BASE_URL,
routes: [
// 登录
{
path: '/login',
name: 'login',
component: () => import('@/pages/home/login.vue')
},
// Dashboard
{
path: '/',
name: 'Dashboard',
component: () => import('@/pages/index.vue')
},
// 基础数据
{
// 组织信息
path: '/base/organization',
name: 'organization-list',
component: () => import('@/pages/base/organization/index.vue')
},
{
// 用户信息
path: '/base/user',
name: 'user-list',
component: () => import('@/pages/base/user/index.vue')
},
{
// 岗位信息
path: '/base/position',
name: 'position-list',
component: () => import('@/pages/base/position/index.vue')
},
{
// 角色信息
path: '/base/role',
name: 'role-list',
component: () => import('@/pages/base/role/index.vue')
},
{
// 声明类型-列表
path: '/base/claim-type',
name: 'claim-type-list',
component: () => import('@/pages/base/claim-type/index.vue')
},
{
// 声明类型-新增
path: '/base/claim-type/add',
name: 'claim-type-add',
component: () => import('@/pages/base/claim-type/add.vue')
},
{
// 声明类型-编辑
path: '/base/claim-type/edit',
name: 'claim-type-edit',
component: () => import('@/pages/base/claim-type/edit.vue')
},
// 系统设置
{
// 模块信息
path: '/system/module',
name: 'module-list',
component: () => import('@/pages/system/module/index.vue')
},
{
// Api信息
path: '/system/api',
name: 'api-list',
component: () => import('@/pages/system/api/index.vue')
},
{
// 权限信息
path: '/system/permission',
name: 'permission-list',
component: () => import('@/pages/system/permission/index.vue')
},
{
// 数据字典信息
path: '/system/setting',
name: 'setting-list',
component: () => import('@/pages/system/setting/index.vue')
},
{
// 数据字典信息新增
path: '/system/setting/add',
name: 'setting-add',
component: () => import('@/pages/system/setting/add.vue')
},
{
// 数据字典信息编辑
path: '/system/setting/edit/:id',
name: 'setting-edit',
component: () => import('@/pages/system/setting/edit.vue')
},
// 认证授权
{
// 客户端
path: '/ids/client',
name: 'client-list',
component: () => import('@/pages/ids/client/index.vue')
},
{
// 客户端新增
path: '/ids/client/add',
name: 'client-add',
component: () => import('@/pages/ids/client/add.vue')
},
{
// 客户端编辑
path: '/ids/client/edit/:id',
name: 'client-edit',
component: () => import('@/pages/ids/client/edit.vue')
},
{
// Identity资源
path: '/ids/identity-resource',
name: 'identity-resource-list',
component: () => import('@/pages/ids/identity-resource/index.vue')
},
{
// Api资源
path: '/ids/api-resource',
name: 'api-resource-list',
component: () => import('@/pages/ids/api-resource/index.vue')
},
]
}); |
/**
* Get a set of empty configured {@link BatchPoints}
* @return A set of {@link BatchPoints}
*/
private BatchPoints batchPoints() {
return BatchPoints
.database(this.database)
.tag("async","true")
.consistency(InfluxDB.ConsistencyLevel.ALL)
.build();
} |
# **PSEUDO RSA**
# LIBRARIES
from Crypto.PublicKey import RSA
import os
import math
import sympy
#-------------------------------------------------------------------------------
# Filepaths
my_info_path = "./My_info/" # Folder where my public key and encrypted files are stored.
my_private_path = "./My_private_info/" # Folder where my deduced private keys will be stored.
decrypted_path = "./Decrypted/" # Folder where my decrypted files will be stored.
#-------------------------------------------------------------------------------
def second_degree(a,b,c):
'''
Objective:
- Find the solutions of a second degree polynomic equation.
Input:
- a,b,c: coefficients of the equation
Output:
- The two results (integers).
'''
inside_sqrt = b**2 - 4*a*c
res1 = (-b + math.isqrt(inside_sqrt))//(2*a)
res2 = (-b - math.isqrt(inside_sqrt)) //(2*a)
return res1,res2
def main():
# First, I will read my public key.
with open(my_info_path+"victor.novelle_pubkeyRSA_pseudo.pem",'r') as f:
my_public_key = RSA.import_key(f.read())
# Extract the modulus.
modulus = my_public_key.n
# Then,we obtain the number of bits and extract B.
block_size = modulus.bit_length()//4
B = int(bin(modulus)[3*block_size +2:],2)
carry = 0
found = False
# rxs and r+s computation.
while not found:
A = int(bin(modulus)[2:block_size+2],2)
A -= carry # We substract de carry-
C = int(bin(modulus)[block_size+2:3*block_size+2],2)
C += (1<<(2*block_size))*carry # And add it to the middle section.
AB = rs = (A << block_size) + B # rs
BA = (B << block_size) + A
# Finding r + s
r_s = math.isqrt(C-BA+2*AB)
if r_s**2 == C-BA+2*AB: found = True
else: carry+= 1
assert carry < 3
# Now we can solve the second degree equation.
r,s = second_degree(1,-r_s,rs)
# Now, we can compute p and q and thus, decrypt the files.
p = (r << block_size) + s
q = (s << block_size) + r
# First, the selection of the necessary variables from the Public Key is executed.
publicExponent = my_public_key.e
modulus = my_public_key.n
Phimodulus = (p-1) * (q-1)
# Then, we compute d.
privateExponent = int(sympy.gcdex(publicExponent,Phimodulus)[0])
if privateExponent < 0:
privateExponent += Phimodulus
# Now that we have all the components, we can create the private key.
# Following Crypto.PublicKey.RSA.construct documentation:
# https://pycryptodome.readthedocs.io/en/latest/src/public_key/rsa.html
privateKey = RSA.construct((modulus,publicExponent,privateExponent,p,q))
with open(my_private_path+"victor.novelle_privkeyRSA_pseudo.pem",'wb') as f:
f.write(privateKey.export_key())
# Now, we proceed to decript the original file, first obtaining the key encrypted with RSA and then decrypting the AES.
os.system("openssl rsautl -decrypt -in "+my_info_path+"victor.novelle_RSA_pseudo.enc -out "+decrypted_path+"/AES_key_pseudo.txt -inkey "+my_private_path+"victor.novelle_privkeyRSA_pseudo.pem")
os.system("openssl enc -d -aes-128-cbc -pbkdf2 -kfile "+decrypted_path+"/AES_key_pseudo.txt -in "+my_info_path+"victor.novelle_AES_pseudo.enc -out "+decrypted_path+"/og_file_pseudo.jpeg")
print("Files decrypted!")
main()
|
/**
* Methods for working with deployments. Much of the actual work of {@link EventDefinitionDeployer} is done by orchestrating the different pieces of work this class does; by having them here, we allow
* other deployers to make use of them.
*/
public class EventDefinitionDeploymentHelper {
/**
* Verifies that no two event definitions share the same key, to prevent database unique index violation.
*
* @throws FlowableException
* if any two event definitions have the same key
*/
public void verifyEventDefinitionsDoNotShareKeys(Collection<EventDefinitionEntity> eventDefinitions) {
Set<String> keySet = new LinkedHashSet<>();
for (EventDefinitionEntity eventDefinition : eventDefinitions) {
if (keySet.contains(eventDefinition.getKey())) {
throw new FlowableException("The deployment contains event definition with the same key, this is not allowed");
}
keySet.add(eventDefinition.getKey());
}
}
/**
* Updates all the event definition entities to match the deployment's values for tenant, engine version, and deployment id.
*/
public void copyDeploymentValuesToEventDefinitions(EventDeploymentEntity deployment, List<EventDefinitionEntity> eventDefinitions) {
String tenantId = deployment.getTenantId();
String deploymentId = deployment.getId();
for (EventDefinitionEntity eventDefinition : eventDefinitions) {
// event definition inherits the tenant id
if (tenantId != null) {
eventDefinition.setTenantId(tenantId);
}
eventDefinition.setDeploymentId(deploymentId);
}
}
/**
* Updates all the decision table entities to have the correct resource names.
*/
public void setResourceNamesOnEventDefinitions(ParsedDeployment parsedDeployment) {
for (EventDefinitionEntity eventDefinition : parsedDeployment.getAllEventDefinitions()) {
String resourceName = parsedDeployment.getResourceForEventDefinition(eventDefinition).getName();
eventDefinition.setResourceName(resourceName);
}
}
/**
* Gets the persisted event definition that matches this one for tenant and key.
* If none is found, returns null. This method assumes that the tenant and key are properly set on the
* event definition entity.
*/
public EventDefinitionEntity getMostRecentVersionOfEventDefinition(EventDefinitionEntity eventDefinition) {
String key = eventDefinition.getKey();
String tenantId = eventDefinition.getTenantId();
EventDefinitionEntityManager eventDefinitionEntityManager = CommandContextUtil.getEventRegistryConfiguration().getEventDefinitionEntityManager();
EventDefinitionEntity existingDefinition = null;
if (tenantId != null && !tenantId.equals(EventRegistryEngineConfiguration.NO_TENANT_ID)) {
existingDefinition = eventDefinitionEntityManager.findLatestEventDefinitionByKeyAndTenantId(key, tenantId);
} else {
existingDefinition = eventDefinitionEntityManager.findLatestEventDefinitionByKey(key);
}
return existingDefinition;
}
/**
* Gets the persisted version of the already-deployed event definition. Note that this is different from {@link #getExistingEventDefinition} as it looks specifically for a form that is already persisted and
* attached to a particular deployment, rather than the event definition across all deployments.
*/
public EventDefinitionEntity getPersistedInstanceOfEventDefinition(EventDefinitionEntity eventDefinition) {
String deploymentId = eventDefinition.getDeploymentId();
if (StringUtils.isEmpty(eventDefinition.getDeploymentId())) {
throw new FlowableIllegalArgumentException("Provided event definition must have a deployment id.");
}
EventDefinitionEntityManager eventDefinitionEntityManager = CommandContextUtil.getEventRegistryConfiguration().getEventDefinitionEntityManager();
EventDefinitionEntity persistedEventDefinition = null;
if (eventDefinition.getTenantId() == null || EventRegistryEngineConfiguration.NO_TENANT_ID.equals(eventDefinition.getTenantId())) {
persistedEventDefinition = eventDefinitionEntityManager.findEventDefinitionByDeploymentAndKey(deploymentId, eventDefinition.getKey());
} else {
persistedEventDefinition = eventDefinitionEntityManager.findEventDefinitionByDeploymentAndKeyAndTenantId(deploymentId,
eventDefinition.getKey(), eventDefinition.getTenantId());
}
return persistedEventDefinition;
}
} |
import React from 'react';
import TooltipReadme from '!!raw-loader!../../../../vx-tooltip/Readme.md';
import Tooltip from '../../../../vx-tooltip/src/tooltips/Tooltip';
import TooltipWithBounds from '../../../../vx-tooltip/src/tooltips/TooltipWithBounds';
import useTooltip from '../../../../vx-tooltip/src/hooks/useTooltip';
import useTooltipInPortal from '../../../../vx-tooltip/src/hooks/useTooltipInPortal';
import Portal from '../../../../vx-tooltip/src/Portal';
import DocPage from '../../components/DocPage';
import TooltipTile from '../../components/Gallery/TooltipTile';
import DotsTile from '../../components/Gallery/DotsTile';
import BarStackHorizontalTile from '../../components/Gallery/BarStackHorizontalTile';
import StatsPlotTile from '../../components/Gallery/StatsPlotTile';
import AreaTile from '../../components/Gallery/AreaTile';
const examples = [TooltipTile, DotsTile, BarStackHorizontalTile, StatsPlotTile, AreaTile];
const components = [TooltipWithBounds, Tooltip, Portal, useTooltip, useTooltipInPortal];
export default () => (
<DocPage components={components} examples={examples} readme={TooltipReadme} vxPackage="tooltip" />
);
|
module Instances where
import Test.QuickCheck (Arbitrary, arbitrary, elements)
import Test.QuickCheck.Checkers (quickBatch, EqProp, (=-=), eq)
import Test.QuickCheck.Classes (applicative, functor)
-- | Pair instances
data Pair a = Pair a a deriving (Eq, Show)
instance Functor Pair where
fmap f (Pair x y) = Pair (f x) (f y)
instance Applicative Pair where
pure x = Pair x x
Pair f1 f2 <*> Pair x1 x2 = Pair (f1 x1) (f2 x2)
instance Arbitrary a => Arbitrary (Pair a) where
arbitrary = Pair <$> arbitrary <*> arbitrary
instance (Eq a) => EqProp (Pair a) where (=-=) = eq
-- | Two instances
data Two a b = Two a b deriving (Eq, Show)
instance Functor (Two a) where
fmap f (Two a b) = Two a (f b)
instance Monoid a => Applicative (Two a) where
pure = Two mempty
Two a f <*> Two b x = Two (a `mappend` b) (f x)
instance (Arbitrary a, Arbitrary b) => Arbitrary (Two a b) where
arbitrary = Two <$> arbitrary <*> arbitrary
instance (Eq a, Eq b) => EqProp (Two a b) where (=-=) = eq
-- | Three instances
data Three a b c = Three a b c deriving (Eq, Show)
instance Functor (Three a b) where
fmap f (Three a b c) = Three a b (f c)
instance (Monoid a, Monoid b) => Applicative (Three a b) where
pure = Three mempty mempty
Three a b f <*> Three c d x = Three (a `mappend` c) (b `mappend` d) (f x)
instance (Arbitrary a, Arbitrary b, Arbitrary c) =>
Arbitrary (Three a b c) where
arbitrary = Three <$> arbitrary <*> arbitrary <*> arbitrary
instance (Eq a, Eq b, Eq c) => EqProp (Three a b c) where (=-=) = eq
-- | Three' instances
data Three' a b = Three' a b b deriving (Eq, Show)
instance Functor (Three' a) where
fmap f (Three' a b1 b2) = Three' a (f b1) (f b2)
instance Monoid a => Applicative (Three' a) where
pure x = Three' mempty x x
Three' a f1 f2 <*> Three' b x1 x2 = Three' (a `mappend` b) (f1 x1) (f2 x2)
instance (Arbitrary a, Arbitrary b) => Arbitrary (Three' a b) where
arbitrary = Three' <$> arbitrary <*> arbitrary <*> arbitrary
instance (Eq a, Eq b) => EqProp (Three' a b) where (=-=) = eq
-- | Four instances
data Four a b c d = Four a b c d deriving (Eq, Show)
instance Functor (Four a b c) where
fmap f (Four a b c d) = Four a b c (f d)
instance (Monoid a, Monoid b, Monoid c) =>
Applicative (Four a b c) where
pure = Four mempty mempty mempty
Four a1 b1 c1 f <*> Four a2 b2 c2 x = Four (a1 `mappend` a2) (b1 `mappend` b2) (c1 `mappend` c2) (f x)
instance (Arbitrary a, Arbitrary b, Arbitrary c, Arbitrary d) =>
Arbitrary (Four a b c d) where
arbitrary = Four <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
instance (Eq a, Eq b, Eq c, Eq d) => EqProp (Four a b c d) where (=-=) = eq
-- | Four' instances
data Four' a b = Four' a a a b deriving (Eq, Show)
instance Functor (Four' a) where
fmap f (Four' a1 a2 a3 b) = Four' a1 a2 a3 (f b)
instance (Monoid a) => Applicative (Four' a) where
pure = Four' mempty mempty mempty
Four' a1 b1 c1 f <*> Four' a2 b2 c2 x = Four' (a1 `mappend` a2) (b1 `mappend` b2) (c1 `mappend` c2) (f x)
instance (Arbitrary a, Arbitrary b) => Arbitrary (Four' a b) where
arbitrary = Four' <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
instance (Eq a, Eq b) => EqProp (Four' a b) where (=-=) = eq
main = do
putStrLn "Pair tests"
quickBatch $ functor (undefined :: Pair (Char, Int, Float))
quickBatch $ applicative (undefined :: Pair (Char, Int, Float))
putStrLn "Two tests"
quickBatch $ functor (undefined :: Two (Maybe String) (Char, Int, Float))
quickBatch $ applicative (undefined :: Two (Maybe String) (Char, Int, Float))
putStrLn "Three tests"
quickBatch $ functor (undefined :: Three [Int] (Maybe String) (Char, Int, Float))
quickBatch $ applicative (undefined :: Three [Int] (Maybe String) (Char, Int, Float))
putStrLn "Three' tests"
quickBatch $ functor (undefined :: Three' [Int] (Char, Int, Float))
quickBatch $ applicative (undefined :: Three' [Int] (Char, Int, Float))
putStrLn "Four tests"
quickBatch $ functor (undefined :: Four String [Int] (Maybe String) (Char, Int, Float))
quickBatch $ applicative (undefined :: Four String [Int] (Maybe String) (Char, Int, Float))
putStrLn "Four' tests"
quickBatch $ functor (undefined :: Four' String (Char, Int, Float))
quickBatch $ applicative (undefined :: Four' String (Char, Int, Float))
|
// This file is part of libigl, a simple c++ geometry processing library.
//
// Copyright (C) 2015 Alec Jacobson <[email protected]>
//
// This Source Code Form is subject to the terms of the Mozilla Public License
// v. 2.0. If a copy of the MPL was not distributed with this file, You can
// obtain one at http://mozilla.org/MPL/2.0/.
#ifndef IGL_COPYLEFT_PROGRESSIVE_HULLS_COST_AND_PLACEMENT_H
#define IGL_COPYLEFT_PROGRESSIVE_HULLS_COST_AND_PLACEMENT_H
#include <Eigen/Core>
#include "../igl_inline.h"
namespace igl
{
namespace copyleft
{
// A "cost and placement" compatible with `igl::decimate` implementing the
// "progressive hulls" algorithm in "Silhouette clipping" [Sander et al.
// 2000]. This implementation fixes an issue that the original linear
// program becomes unstable for flat patches by introducing a small
// quadratic energy term pulling the collapsed edge toward its midpoint.
// This function is not really meant to be called directly but rather
// passed to `igl::decimate` as a handle.
//
// Inputs:
// e index of edge to be collapsed
// V #V by 3 list of vertex positions
// F #F by 3 list of faces indices into V
// E #E by 3 list of edges indices into V
// EMAP #F*3 list of indices into E, mapping each directed edge to unique
// unique edge in E
// EF #E by 2 list of edge flaps, EF(e,0)=f means e=(i-->j) is the edge of
// F(f,:) opposite the vth corner, where EI(e,0)=v. Similarly EF(e,1) "
// e=(j->i)
// EI #E by 2 list of edge flap corners (see above).
// Outputs:
// cost cost of collapsing edge e
// p position to place collapsed vertex
//
IGL_INLINE void progressive_hulls_cost_and_placement(
const int e,
const Eigen::MatrixXd & V,
const Eigen::MatrixXi & F,
const Eigen::MatrixXi & E,
const Eigen::VectorXi & EMAP,
const Eigen::MatrixXi & EF,
const Eigen::MatrixXi & EI,
double & cost,
Eigen::RowVectorXd & p);
}
}
#ifndef IGL_STATIC_LIBRARY
# include "progressive_hulls_cost_and_placement.cpp"
#endif
#endif
|
Derivatives and the financialisation of the Italian state
ABSTRACT The existing literature on financialisation has devoted insufficient attention to how governments wield the market-based practices and technologies of financial innovation to pursue statecraft objectives. Because of this inattention, scholars have missed the opportunity to examine a crucial facet of the financialisation of the state. To remedy this limitation, the present article investigates how and why the Italian government designed derivatives-based strategies during the 1993–9 period. It argues that these tactics gained momentum in the context of the political struggles that developed in Italy beginning in the late 1980s. In particular, the study shows how a neoliberal-reformist alliance came to power and used financial innovation to comply with the Economic and Monetary Union (EMU) admission criteria. EMU dynamics enhanced the power position of the neoliberal-reformist coalition vis-à-vis the country's traditional political and business establishment. This work offers insights that go beyond the specificities of the Italian case. It encourages further research on how governments in other countries simultaneously exposed state institutions to financial speculation and gained access to a range of new instruments through which they could manage state affairs in a financialised manner. |
SAN JOSE, Calif. – San Jose Earthquakes defender Kip Colvey, midfielders Anibal Godoy and Vako, and forwards Marco Ureña and Chris Wondolowski have been called up to their respective national teams for the upcoming 2018 FIFA World Cup Qualifiers and international friendlies.
Wondolowski, 34, returns to the U.S. Men’s National Team to face his Quakes teammate Anibal Godoy and Panama on Oct. 6 at Orlando City Stadium in Orlando, Fla. at 4:35 p.m. PT (ESPN2, Univision). The United States will then take on Trinidad & Tobago at Ato Boldon Stadium in Courva, Trinidad & Tobago at 5 p.m. on Oct. 10 (beIN SPORT, UNIVERSO). The U.S. is in fourth place in the qualifying group, sitting behind Mexico, Costa Rica and Panama with a 2-3-3 record and will need to move into the top three to secure an automatic bid for the 2018 FIFA World Cup and avoid the inter-continental playoff.
Since making his international debut in 2011, Wondolowski has made 35 appearances, scoring 11 goals and providing three assists for the U.S. He recently scored his 133rd career MLS goal and is now tied with Jaime Moreno for the third most goals in MLS history, just one shy of second.
"It's a roster we've selected that we believe will give us success in these next two games. We have a pretty balanced roster with experienced players," said head coach Bruce Arena. "The players always give the commitment, that's never an issue. The issue is whether we get the results we need, and I think we're positioned to do that. I think if it comes down to where we need four or six points in our last two games, I think that's fine and we'll get it done."
U.S. ROSTER BY POSITION (Club; WCQ Caps/Goals):
GOALKEEPERS (3): Brad Guzan (Atlanta United FC; 15/0), Tim Howard (Colorado Rapids; 37/0), Nick Rimando (Real Salt Lake; 0/0)
DEFENDERS (9): DaMarcus Beasley (Houston Dynamo; 35/6), Matt Besler (Sporting Kansas City; 12/1), Geoff Cameron (Stoke City/ENG; 24/2), Omar Gonzalez (Pachuca/MEX; 16/0), Michael Orozco (Club Tijuana/MEX; 6/1), Tim Ream (Fulham/ENG; 6/0), Jorge Villafaña (Santos Laguna/MEX; 4/0), DeAndre Yedlin (Newcastle United/ENG; (8/0), Graham Zusi (Sporting Kansas City; 20/3)
MIDFIELDERS (10): Kellyn Acosta (FC Dallas; 5/0),Juan Agudelo (New England Revolution; 0/0), Paul Arriola (D.C. United; 5/1), Alejandro Bedoya (Philadelphia Union; 9/0), Michael Bradley (Toronto FC/CAN; 38/7), Benny Feilhaber (Sporting Kansas City; 6/0), Dax McCarty (Chicago Fire; 0/0), Darlington Nagbe (Portland Timbers; 9/0), Christian Pulisic (Borussia Dortmund/GER; 11/5), Gyasi Zardes (LA Galaxy; 4/1)
FORWARDS (4): Jozy Altidore (Toronto FC/CAN; 39/16), Clint Dempsey (Seattle Sounders FC; 41/18), Chris Wondolowski (San Jose Earthquakes; 0/0), Bobby Wood (Hamburg/GER; 12/4)
Colvey, 23, and the New Zealand National Team will face Japan in an international friendly on Oct. 6 at Toyota Stadium in Nagoya, Japan. The friendly comes in preparation for the All-White’s intercontinental playoff against South America’s fifth-ranked nation in FIFA World Cup Qualifying.
Colvey most recently joined New Zealand in September for World Cup Qualifiers against the Solomon Islands where they earned a 6-1 victory on Sept. 1, and a 2-2 draw on Sept. 5 to become the last team standing in the Oceania region’s 2018 FIFA World Cup Qualifying campaign.
Godoy, 27, returns to the Panama National Team to take on Quakes teammate Chris Wondolowski and the United States on Oct. 6 at 4:35 p.m. PT at Orlando City Stadium in Orlando, Fla.. Panama then faces Marco Ureña and Costa Rica on Oct. 10 at 5 p.m. PT at Estadio Rommel Fernandez in Panama City, Panama. Panama currently sits in third place in the fifth and final round (hexagonal) of qualifying for CONCACAF behind Mexico and Costa Rica with 10 points and a 2-2-4 record.
Godoy last joined Los Canaleros for World Cup Qualifying matches against Mexico and Trinidad & Tobago, playing the full 90 minutes in the 3-0 win against T&T and the 1-0 loss to the Mexico. He has made a total of 83 appearances with the Panama National Team and has scored one goal and provided three assists dating back to 2010.
Vako, 24, joins the Georgia National Team to face Wales at Boris Paichadze Dinamo Arena in Tbilisi, Georgia on Oct. 6 at 9 a.m. PT, and then Serbia at FK Crvena Zvezda in Belgrade, Serbia on Oct. 9 at 11:45 a.m. PT.
Georgia currently sits in fifth place in Group D with a 0-3-5 record. Vako last joined Georgia for a pair of qualifiers in September, scoring in the 1-1 draw with Ireland and playing 92 minutes in the 1-1 draw with Austria. In his senior national team career, Vako has scored six goals in 26 appearances.
Ureña, 27, will link up with the Costa Rica National Team as they take on Honduras on Oct. 6 at 7 p.m. PT at Estadio Nacional de Costa Rica in San Jose, Costa Rica, and Panama on Oct. 10 at 5 p.m. PT at Estadio Rommel Fernandez in Panama City, Panama. The Ticos are in second place in the Hex behind Mexico with a 4-1-3 record and are just one point shy of securing a place in the 2018 FIFA World Cup in Russia.
Ureña most recently joined the Costa Rica National Team for a pair of qualifiers in September against the U.S. and Mexico, and proved to be the difference for the Ticos. The Quakes forward scored both goals in Costa Rica’s 2-0 win over the U.S., and scored the lone goal in the 1-1 draw with Mexico. In total, he has made 56 appearances for the senior squad, scoring 14 goals and providing four assists. |
<gh_stars>0
import asyncio
import traceback
import websockets
import logging
from jsonrpc import JsonRpcRequest, JsonRpcResponse, error
from .response_stream import ResponseStream
from .app import app
from controllers import *
class WebsocketServer:
"""
Websocket server core class. It will handle receiving and sending messages
"""
def __init__(self, port):
self.port = port
self.start_server = websockets.serve(
self.handler, "localhost", self.port)
def run_forever(self):
"""
Start to serve the server forever.
"""
print("Starting websocket server...")
asyncio.get_event_loop().run_until_complete(self.start_server)
print("Started server")
asyncio.get_event_loop().run_forever()
async def handler(self, websocket, _):
"""
Websocket handler
"""
connection = WebsocketConnection(websocket)
await connection.listen()
class WebsocketConnection:
def __init__(self, websocket):
self.websocket = websocket
async def listen(self):
while True:
message = await self.websocket.recv()
try:
request = self.parse_request(message)
except error.JsonRpcParseError as parse_error:
response = JsonRpcResponse(
request=None,
error=parse_error,
)
await self.send_response(response)
except websockets.exceptions.ConnectionClosed:
logging.info("Websocket connection closed.")
else:
await self.process_request(request)
async def process_request(self, request: JsonRpcRequest):
try:
print("< {0} {1}".format(request.request_id, request.method))
result = await app.call_procedure(request)
response = JsonRpcResponse(
request=request,
result=result,
)
await self.send_response(response)
except error.JsonRpcError as rpc_error:
response = JsonRpcResponse(
request=request,
error=rpc_error,
)
await self.send_response(response)
except Exception as e:
print("Error", type(e), e.args)
print(traceback.format_exc())
response = JsonRpcResponse(
request=request,
error=error.JsonRpcError(500, "Server internal error", str(e)),
)
await self.send_response(response)
def parse_request(self, message: str) -> JsonRpcRequest:
return JsonRpcRequest.from_json(self, message)
async def send_response(self, response: JsonRpcResponse):
data = response.to_json()
print("Sending response")
await self.websocket.send(data)
print("> {}".format(data))
def __handle_response_stream(self, request: JsonRpcRequest, stream: ResponseStream):
stream.onData(lambda data, last: self.__send_stream_response(request, data, last))
async def __send_stream_response(self, request: JsonRpcRequest, data, last: bool):
response = JsonRpcResponse(
request=request,
result=data,
stream=not last,
)
await self.send_response(response)
|
import * as React from "react"
import Seo from "../../../components/seo"
import Layout from "../../../components/layout"
import Control from "../../../components/useForm/Control"
export default ({ location }: { location: any }) => {
return (
<Layout location={location} defaultLang="en">
<Seo title="useForm - control" location={location} />
<Control currentLanguage="en" />
</Layout>
)
}
|
/**
* Put new keys into use.
* This method will initialize the ciphers, digests, macs and compression
* according to the negotiated server and client proposals.
*
* @throws Exception if an error occurs
*/
@SuppressWarnings("checkstyle:VariableDeclarationUsageDistance")
protected void receiveNewKeys() throws Exception {
byte[] k = kex.getK();
byte[] h = kex.getH();
Digest hash = kex.getHash();
boolean debugEnabled = log.isDebugEnabled();
if (sessionId == null) {
sessionId = h.clone();
if (debugEnabled) {
log.debug("receiveNewKeys({}) session ID={}", this, BufferUtils.toHex(':', sessionId));
}
}
Buffer buffer = new ByteArrayBuffer();
buffer.putMPInt(k);
buffer.putRawBytes(h);
buffer.putByte((byte) 0x41);
buffer.putRawBytes(sessionId);
int pos = buffer.available();
byte[] buf = buffer.array();
hash.update(buf, 0, pos);
byte[] iv_c2s = hash.digest();
int j = pos - sessionId.length - 1;
buf[j]++;
hash.update(buf, 0, pos);
byte[] iv_s2c = hash.digest();
buf[j]++;
hash.update(buf, 0, pos);
byte[] e_c2s = hash.digest();
buf[j]++;
hash.update(buf, 0, pos);
byte[] e_s2c = hash.digest();
buf[j]++;
hash.update(buf, 0, pos);
byte[] mac_c2s = hash.digest();
buf[j]++;
hash.update(buf, 0, pos);
byte[] mac_s2c = hash.digest();
String value = getNegotiatedKexParameter(KexProposalOption.S2CENC);
Cipher s2ccipher = ValidateUtils.checkNotNull(NamedFactory.create(getCipherFactories(), value), "Unknown s2c cipher: %s", value);
e_s2c = resizeKey(e_s2c, s2ccipher.getBlockSize(), hash, k, h);
s2ccipher.init(isServer ? Cipher.Mode.Encrypt : Cipher.Mode.Decrypt, e_s2c, iv_s2c);
value = getNegotiatedKexParameter(KexProposalOption.S2CMAC);
Mac s2cmac = NamedFactory.create(getMacFactories(), value);
if (s2cmac == null) {
throw new SshException(SshConstants.SSH2_DISCONNECT_MAC_ERROR, "Unknown s2c MAC: " + value);
}
mac_s2c = resizeKey(mac_s2c, s2cmac.getBlockSize(), hash, k, h);
s2cmac.init(mac_s2c);
value = getNegotiatedKexParameter(KexProposalOption.S2CCOMP);
Compression s2ccomp = NamedFactory.create(getCompressionFactories(), value);
if (s2ccomp == null) {
throw new SshException(SshConstants.SSH2_DISCONNECT_COMPRESSION_ERROR, "Unknown s2c compression: " + value);
}
value = getNegotiatedKexParameter(KexProposalOption.C2SENC);
Cipher c2scipher = ValidateUtils.checkNotNull(NamedFactory.create(getCipherFactories(), value), "Unknown c2s cipher: %s", value);
e_c2s = resizeKey(e_c2s, c2scipher.getBlockSize(), hash, k, h);
c2scipher.init(isServer ? Cipher.Mode.Decrypt : Cipher.Mode.Encrypt, e_c2s, iv_c2s);
value = getNegotiatedKexParameter(KexProposalOption.C2SMAC);
Mac c2smac = NamedFactory.create(getMacFactories(), value);
if (c2smac == null) {
throw new SshException(SshConstants.SSH2_DISCONNECT_MAC_ERROR, "Unknown c2s MAC: " + value);
}
mac_c2s = resizeKey(mac_c2s, c2smac.getBlockSize(), hash, k, h);
c2smac.init(mac_c2s);
value = getNegotiatedKexParameter(KexProposalOption.C2SCOMP);
Compression c2scomp = NamedFactory.create(getCompressionFactories(), value);
if (c2scomp == null) {
throw new SshException(SshConstants.SSH2_DISCONNECT_COMPRESSION_ERROR, "Unknown c2s compression: " + value);
}
if (isServer) {
outCipher = s2ccipher;
outMac = s2cmac;
outCompression = s2ccomp;
inCipher = c2scipher;
inMac = c2smac;
inCompression = c2scomp;
} else {
outCipher = c2scipher;
outMac = c2smac;
outCompression = c2scomp;
inCipher = s2ccipher;
inMac = s2cmac;
inCompression = s2ccomp;
}
outCipherSize = outCipher.getIVSize();
outCompression.init(Compression.Type.Deflater, -1);
inCipherSize = inCipher.getIVSize();
inMacResult = new byte[inMac.getBlockSize()];
inCompression.init(Compression.Type.Inflater, -1);
int inBlockSize = inCipher.getBlockSize();
int outBlockSize = outCipher.getBlockSize();
int avgCipherBlockSize = Math.min(inBlockSize, outBlockSize);
long recommendedByteRekeyBlocks = 1L << Math.min((avgCipherBlockSize * Byte.SIZE) / 4, 63);
maxRekeyBlocks.set(this.getLongProperty(FactoryManager.REKEY_BLOCKS_LIMIT, recommendedByteRekeyBlocks));
if (debugEnabled) {
log.debug("receiveNewKeys({}) inCipher={}, outCipher={}, recommended blocks limit={}, actual={}",
this, inCipher, outCipher, recommendedByteRekeyBlocks, maxRekeyBlocks);
}
inBytesCount.set(0L);
outBytesCount.set(0L);
inPacketsCount.set(0L);
outPacketsCount.set(0L);
inBlocksCount.set(0L);
outBlocksCount.set(0L);
lastKeyTimeValue.set(System.currentTimeMillis());
firstKexPacketFollows = null;
} |
package renderer
import (
"fmt"
"io/ioutil"
"net/http"
"path/filepath"
"github.com/julienschmidt/httprouter"
)
// RenderConfig stores variables pulled from the environment
type RenderConfig struct {
StaticDir string
ConfigDir string
}
// RenderServer is the heart of this package. It routes requests to functions and does logging.
type RenderServer struct {
router *httprouter.Router // HTTP request routing
staticDir string // Location of general file resources (js, images)
configDir string
}
// ServeHTTP fulfill's RenderServer's obligation to the Handler interface.
func (a *RenderServer) ServeHTTP(w http.ResponseWriter, r *http.Request) {
a.router.ServeHTTP(w, r)
}
func (a *RenderServer) singleFile(filename string) func(http.ResponseWriter, *http.Request, httprouter.Params) {
return func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
fileBytes, err := ioutil.ReadFile(filename)
if err != nil {
http.Error(w, "404 - Not found.", http.StatusNotFound)
} else {
fmt.Fprint(w, string(fileBytes))
}
}
}
// New creates a root handler for the server.
func New(conf *RenderConfig) http.Handler {
router := httprouter.New()
app := &RenderServer{
router,
conf.StaticDir,
conf.ConfigDir,
}
router.GET("/", app.singleFile(filepath.Join(conf.StaticDir, "index.html")))
router.GET("/favicon.png", app.singleFile(filepath.Join(conf.ConfigDir, "favicon.png")))
router.GET("/logo.svg", app.singleFile(filepath.Join(conf.ConfigDir, "logo.svg")))
router.GET("/config.json", app.singleFile(filepath.Join(conf.ConfigDir, "config.json")))
router.GET("/manifest.json", app.singleFile(filepath.Join(conf.StaticDir, "manifest.json")))
router.ServeFiles("/static/css/*filepath", http.Dir(filepath.Join(conf.StaticDir, "static/css")))
router.ServeFiles("/static/js/*filepath", http.Dir(filepath.Join(conf.StaticDir, "static/js")))
router.ServeFiles("/static/media/*filepath", http.Dir(filepath.Join(conf.StaticDir, "static/media")))
router.NotFound = http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
fileBytes, err := ioutil.ReadFile(filepath.Join(conf.StaticDir, "index.html"))
if err != nil {
http.Error(rw, "404 - Not found.", http.StatusNotFound)
} else {
fmt.Fprint(rw, string(fileBytes))
}
})
return app
}
|
/**
* Holds the source triplesMap and Subject-, Object- or PredicateMap for a specific (provenanced) term.
*/
public class Metadata {
private Term triplesMap;
private Term sourceMap;
public Metadata() {
}
public Metadata(Term triplesMap) {
this(triplesMap, null);
}
public Metadata(Term triplesMap, Term sourceMap) {
this.triplesMap = triplesMap;
this.sourceMap = sourceMap;
}
Term getTriplesMap() {
return triplesMap;
}
Term getSourceMap() {
return sourceMap;
}
public void setSourceMap(Term sourceMap) {
this.sourceMap = sourceMap;
}
} |
class Output_Subtable:
"""Specific subtable that deals with extrapolator output"""
def store_output(self,
all_ases,
attack,
number_of_attackers,
adopt_policy,
percent,
percent_iter,
round_num,
extra_bash_arg_1,
extra_bash_arg_2,
extra_bash_arg_3,
extra_bash_arg_4,
extra_bash_arg_5,
table_names):
"""Stores output in the simulation results table"""
# All ases for that subtable
subtable_ases = {x["asn"]: x for x in self.Forwarding_Table.get_all()}
# We don't want to track the attacker, faster than filtering dict comp
for uncountable_asn in [attack.attacker, attack.victim]:
if uncountable_asn in subtable_ases:
del subtable_ases[uncountable_asn]
# Insert the trial data into the simulation results table
with Simulation_Results_Table() as db:
db.insert(self.table.name,
attack,
number_of_attackers,
adopt_policy,
percent,
percent_iter,
round_num,
extra_bash_arg_1,
extra_bash_arg_2,
extra_bash_arg_3,
extra_bash_arg_4,
extra_bash_arg_5,
self._get_traceback_data(subtable_ases,
all_ases,
attack),
self._get_control_plane_data(attack),
self._get_visible_hijack_data(table_names, attack, round_num))
def _get_traceback_data(self, subtable_ases, all_ases, attack):
"""Gets the data plane data through tracing back"""
# NOTE: this can easily be changed to SQL. See super optimized folder.
conds = {x: {y: 0 for y in AS_Types.list_values()}
for x in Data_Plane_Conditions.list_values()}
# For all the ases in the subtable
for og_asn, og_as_data in subtable_ases.items():
asn, as_data = og_asn, og_as_data
looping = True
# SHOULD NEVER BE LONGER THAN 64
# Done to catch extrapolator loops
for i in range(64):
# Conds are end conditions. See README.
if (condition := as_data["received_from_asn"]) in conds:
conds[condition][og_as_data["impliment"]] += 1
looping = False
break
else:
asn = as_data["received_from_asn"]
as_data = all_ases[asn]
# NEEDED FOR EXR DEVS
# If it ends the for loop and didn't change looping...
if looping:
loop_data = [all_ases, og_asn, og_as_data, attack]
self._print_loop_debug_data(*loop_data)
return conds
def _get_visible_hijack_data(self, t_names, attack, round_num):
"""Gets visible hijacks using sql for speed"""
# NOTE: this will automatically remove attackers and victims
# Since they will have nothing in their rib
conds = {}
# Gets one before the final AS. If that AS had the attacker,
# Then that is a visible hijacking
# We must include prefix and origin here due to preventive ann
# And other blackhole mechanisms
attacker_ann = []
# NOTE: This won't work for path manipulation attacks
# I set an assert statement in attack class for this
# Ya ik its not there anymore. Whatever.
for prefix in attack.attacker_prefixes:
sql = "(all_ases.prefix = '{}' AND all_ases.origin = {})".format(
prefix, attack.attacker)
attacker_ann.append(sql)
attacker_sql = " OR ".join(attacker_ann)
with Simulation_Extrapolator_Forwarding_Table(round_num=round_num) as db:
for adopt_val in AS_Types.__members__.values():
sql = f"""SELECT COUNT(*) FROM
{self.Forwarding_Table.name} og
INNER JOIN {db.name}
all_ases
ON og.received_from_asn = all_ases.asn
WHERE og.as_type = {adopt_val.value} AND ({attacker_sql})
"""
conds[adopt_val] = self.Forwarding_Table.get_count(sql)
return conds
def _print_loop_debug_data(self, all_ases, og_asn, og_as_data, attack):
"""Prints debug information for whenever the exr breaks"""
loop_str_list = []
loop_asns_set = set()
asn, as_data = og_asn, og_as_data
for i in range(64):
asn_str = f"ASN:{asn:<8}: {as_data}"
loop_str_list.append(asn_str)
asn = as_data["received_from_asn"]
as_data = all_ases[asn]
if asn in loop_asns_set:
err_str = "\n" * 20 + "Loop:\n\t"
err_str += "\n\t".join(loop_str_list) + str(attack) + "\n" * 20
logging.error(err_str)
sys.exit(1)
else:
loop_asns_set.add(asn)
def _get_control_plane_data(self, attack):
"""Gets control plane data using sql for speed"""
conds = {x: {y: 0 for y in AS_Types.list_values()}
for x in C_Plane_Conds.list_values()}
for adopt_val in AS_Types.list_values():
# NOTE: we no longer do it by prefix because now an atk can
# have multiple prefixes. Only by origin should be fine tho
# NOTE: This will break for attack manipulation attacks
sql = (f"SELECT COUNT(*) FROM {self.Forwarding_Table.name}"
" WHERE origin = %s AND asn != %s"
f" AND asn != %s AND impliment = {bool(adopt_val)}")
conds[C_Plane_Conds.RECV_ATK_PREF_ORIGIN.value][adopt_val] =\
self.Forwarding_Table.get_count(sql, [attack.attacker,
attack.attacker,
attack.victim])
conds[C_Plane_Conds.RECV_ONLY_VIC_PREF_ORIGIN.value][adopt_val] =\
self.Forwarding_Table.get_count(sql, [attack.victim,
attack.attacker,
attack.victim])
conds[C_Plane_Conds.RECV_BHOLE.value][adopt_val] =\
self.Forwarding_Table.get_count(sql,
[Data_Plane_Conditions.BHOLED.value,
attack.attacker,
attack.victim])
no_rib_sql = """SELECT COUNT(*) FROM {0}
LEFT JOIN {1} ON {0}.asn = {1}.asn
WHERE {1}.asn IS NULL AND {0}.impliment = {2}
AND {0}.asn != {3} AND {0}.asn != {4}
""".format(self.Input_Table.name,
self.Forwarding_Table.name,
bool(adopt_val),
attack.attacker,
attack.victim)
# I had defaulted these to 0, but I don't think that should happen
# So lets error if it does
assert attack.attacker is not None
assert attack.victim is not None
conds[C_Plane_Conds.NO_RIB.value][adopt_val] =\
self.Forwarding_Table.get_count(no_rib_sql)
return conds |
package main
import (
"testing"
)
func Test_redisStore(t *testing.T) {
s := &redisStore{url: "redis://localhost:6379"}
s.open()
s.ping()
k := s.put("", "bar")
if got := s.get(k); got != "bar" {
t.Errorf("expected %q, got %q", "bar", got)
}
}
func Test_mapStore(t *testing.T) {
s := &mapStore{}
s.open()
k := s.put("", "bar")
if got := s.get(k); got != "bar" {
t.Errorf("expected %q, got %q", "bar", got)
}
}
|
/**
* @author bartosz walacik
*/
public class CustomToNativeAppenderAdapter<T, C extends PropertyChange> implements PropertyChangeAppender<C> {
private final CustomPropertyComparator<T, C> delegate;
private final Class<T> propertyJavaClass;
public CustomToNativeAppenderAdapter(CustomPropertyComparator<T, C> delegate, Class<T> propertyJavaClass) {
this.delegate = delegate;
this.propertyJavaClass = propertyJavaClass;
}
@Override
public boolean supports(JaversType propertyType) {
return propertyType.getBaseJavaType().equals(propertyJavaClass);
}
@Override
public C calculateChanges(NodePair pair, JaversProperty property) {
T leftValue = (T)pair.getLeftPropertyValue(property);
T rightValue = (T)pair.getRightPropertyValue(property);
return delegate.compare(leftValue, rightValue, pair.createPropertyChangeMetadata(property), property).orElse(null);
}
@Override
public int priority() {
return HIGH_PRIORITY;
}
} |
#include<stdio.h>
#include<stdlib.h>
// utility function to print elements of array of inputted numbers
void arrayprinter ( int A[], int n)
{
int i;
for(i=0;i<n;i++)
{
printf("%d ",A[i]);
}
printf("\n");
}
// utility function to swap to numbers with their respective pointers
void swap ( int *t1, int *t2)
{
int temp = *t1;
*t1 = *t2;
*t2 = temp;
}
// sub-function of quicksort, by fixing one number(pivot),
// and placing numbers smaller than pivot in left side of array,
// and placing pivot to its correct position
int partition(int A[], int left, int right)
{
int i,pivot = A[right],j;
j = left;
for(i= left; i < right;i++)
{
if( pivot > A[i] )
{
swap(&A[j],&A[i]);
j++;
}
}
swap(&A[j],&A[right]);
return j;
}
// main function for quick sorting, which is being called recursively again and again
void quicksort(int A[], int left, int right)
{
if( left < right )
{
int partin = partition ( A,left,right);
quicksort(A,left,partin - 1);
quicksort(A, partin + 1, right);
}
}
// main body
int main()
{
int j,n;
scanf("%d",&n);
int A[n],temp;
// loop for taking input of numbers from user
for(j=0;j<n;j++)
{
scanf("%d",&temp);
A[j] = temp;
}
// quicksort function is being called and arrayprinter utility function
// for printing the array
quicksort(A,0,n-1);
arrayprinter(A,n);
}
|
<filename>98test/httptest/src/tests/hello_test.go
package tests
import (
"net/http"
"net/http/httptest"
"testing"
. "github.com/smartystreets/goconvey/convey"
)
func TestHelloController(t *testing.T) {
w := httptest.NewRecorder()
Convey("Test GetHello", t, func() {
r, _ := http.NewRequest(http.MethodGet, "/hello?name=Lyric", nil)
http.DefaultServeMux.ServeHTTP(w, r)
So(w.Code, ShouldEqual, 200)
So(w.Body.String(), ShouldEqual, "Hello,Lyric")
})
}
func TestSpec(t *testing.T) {
// Only pass t into top-level Convey calls
Convey("Given some integer with a starting value", t, func() {
Convey("When the integer is incremented", func() {
So(1, ShouldEqual, 1)
})
})
}
|
<reponame>yonch/wireless
# Copyright (c) 2012 <NAME>
# This code is released under the MIT license (see LICENSE file).
##
# \ingroup simulator
# \brief Aggregates multiple factories to be queried jointly for components
class FactoryCollection(object):
"""
This class is intended to make adding new functionality modular through adding factories.
A factory collection maintains several factory objects, and has a factory method make().
When using make(), the caller passes a name of a factory function. This factory function is
called in all factories that contain a function with that name, until one of them returns
non-null.
The collection maintains a cache of factory results for each factory function name.
self._factories is a dictionary from factory function name to the factories that contain
that function.
self._cache is a dictionary from factory function name to a tuple (args, val) where args are
the arguments of the cached factory call and val is the object that was produced by the
call.
"""
def __init__(self, function_names):
"""
C'tor
@param function_names: a list of strings, each a name of a factory function
"""
self._factories = dict([(name, []) for name in function_names])
self._cache = dict([(name, (None, None)) for name in function_names])
def add_factory(self, factory):
# for each factory function name:
for func_name in self._factories.keys():
# if factory has an attribute func_name
if hasattr(factory, func_name):
# add the factory function into self._factories under the function name
self._factories[func_name].append(getattr(factory, func_name))
def make(self, func_name, *args):
# Cache lookup
cached_args, cached_val = self._cache[func_name]
if cached_args == args:
return cached_val
# Call factory functions until a non-null value is returned
val = None
for factory_func in self._factories[func_name]:
val = factory_func(*args)
if val is not None:
# update cache
self._cache[func_name] = (args, val)
# return produced value
return val
# Couldn't find a result
raise RuntimeError, "No '%s' factory produced an object" % func_name
|
<filename>services/fetcher-go/grpc/server.go
package grpc
import (
"context"
pb "github.com/wafuwafu13/Hatena-Intern-2020/services/fetcher-go/pb/fetcher"
"github.com/wafuwafu13/Hatena-Intern-2020/services/fetcher-go/fetcher"
healthpb "google.golang.org/grpc/health/grpc_health_v1"
)
// Server は pb.RendererServer に対する実装
type Server struct {
pb.UnimplementedFetcherServer
healthpb.UnimplementedHealthServer
}
// NewServer は gRPC サーバーを作成する
func NewServer() *Server {
return &Server{}
}
// Fetch は受け取ったURLからタイトルを取得して返す
func (s *Server) Fetch(ctx context.Context, in *pb.FetcherRequest) (*pb.FetcherReply, error) {
title, err := fetcher.Fetch(ctx, in.Src)
if err != nil {
return nil, err
}
return &pb.FetcherReply{Title: title}, nil
}
|
package com.packtpub.reactive.common.checked;
public interface CheckedFunc0<R> {
R call() throws Exception;
} |
/**
* Get access to general information about LensKit.
*
* @since 2.1
* @author <a href="http://www.grouplens.org">GroupLens Research</a>
*/
public final class LenskitInfo {
private static final Logger logger = LoggerFactory.getLogger(LenskitInfo.class);
private static SoftReference<Set<String>> revisionSet;
private LenskitInfo() {
}
private static Set<String> loadRevisionSet() {
ImmutableSet.Builder<String> revisions = ImmutableSet.builder();
InputStream input = LenskitInfo.class.getResourceAsStream("/META-INF/lenskit/git-commits.lst");
if (input != null) {
try (Reader reader = new InputStreamReader(input, Charsets.UTF_8);
BufferedReader lines = new BufferedReader(reader)) {
String line;
while ((line = lines.readLine()) != null) {
revisions.add(StringUtils.trim(line));
}
} catch (IOException e) {
logger.warn("Could not read Git revision list", e);
} finally {
try {
input.close();
} catch (IOException e) {
logger.error("error closing git-commit list", e);
}
}
} else {
logger.warn("cannot find LensKit revision list");
}
Set<String> revset = revisions.build();
logger.debug("have {} active revisions", revset.size());
return revset;
}
/**
* Get the set of revisions LensKit is built from. This is in the order returned by {@code git log},
* so the head revision is first.
* @return The set of revisions included in this build of LensKit.
*/
@Nonnull
public static synchronized Set<String> getRevisions() {
Set<String> revisions = revisionSet == null ? null : revisionSet.get();
if (revisions == null) {
revisions = loadRevisionSet();
revisionSet = new SoftReference<>(revisions);
}
return revisions;
}
/**
* Query whether this version of LensKit includes a particular revision.
*
* @param revision The revision to query.
* @return {@code true} if the LensKit source is descended from {@code revision}.
*/
public static boolean includesRevision(String revision) {
return getRevisions().contains(revision);
}
/**
* Get the HEAD revision from which LensKit was built.
* @return The revision from which this version of LensKit was built.
*/
@Nonnull
public static String getHeadRevision() {
Iterator<String> iter = getRevisions().iterator();
if (iter.hasNext()) {
return iter.next();
} else {
return "UNKNOWN";
}
}
/**
* Get the current LensKit version.
* @return The LensKit version.
*/
@Nonnull
public static String lenskitVersion() {
Properties props = new Properties();
try (InputStream stream = LenskitInfo.class.getResourceAsStream("/META-INF/lenskit/version.properties")) {
props.load(stream);
} catch (IOException e) {
logger.warn("could not load LensKit version properties", e);
}
return props.getProperty("lenskit.version", "UNKNOWN");
}
} |
As Ukraine's antigovernment revolt spreads like wildfire around the country, a senior official has for the first time threatened to call a state of emergency – which could bring the army onto the streets of Kiev and other strife-plagued cities.
Ukrainian Justice Minister Olena Lukash, who woke up Monday morning to find her downtown Kiev ministry occupied by protesters, said in a televised statement, "If the protesters do not leave the Justice Ministry building.... I will ask the National Security and Defense Council of Ukraine to impose a state of emergency." She complained that an opposition-led mob had forced its way into the building overnight, using bats to smash windows, and that water sprayed during the melee had turned the ministry into "a veritable ice-skating rink."
It's the fourth major government building to be occupied in Kiev, while protesters around the more nationalist-minded west of the country have taken over administration buildings and even forced out Kiev-appointed governors in several regions. In recent days the unrest has even spread to Ukraine's eastern provinces, which were formerly considered to be bastions of pro-government sentiment.
Ukraine's ruling Party of Regions, whose leader is embattled President Viktor Yanukovych, warned Monday that the situation has spun out of control, and that "fascist youths" are trying to stage an "anti-constitutional" coup d'état. The party, which holds a narrow majority in parliament, accused protest leaders of inciting street violence in an effort to overturn a democratically elected president and constitutionally legitimate government.
"The very existence of the independent Ukraine is in jeopardy now. Trying to carry out a coup, which will divide the country, they are not thinking about the life and future of millions of peaceful citizens who are not involved in this chaos," the party's statement says.
New offer
Over the weekend Mr. Yanukovych attempted to co-opt the moderate leaders of the protest movement – especially the leader of the liberal UDAR party, boxing champion Vitaly Klitschko and the chairman of the Fatherland association, Arseny Yatseniuk – by offering them top positions in a newly reshuffled government.
So far the opposition leaders have resisted the offer, saying that their key demands have not yet been met.
"This was a poisoned offer from Yanukovych, designed to divide our demonstrators' movement," Mr. Klitschko, who was offered the post of deputy prime minister, told German Sunday paper Bild am Sonntag yesterday. "We will continue to negotiate and are still demanding early elections."
Opposition leaders want the government's resignation, repeal of draconian antiprotest laws passed by parliament two weeks ago, early presidential and parliamentary elections, and a full amnesty for all protesters who have been arrested by police over the past two turbulent months.
Mr. Yatseniuk, who would become prime minister under the president's offer, told journalists that he is awaiting the outcome of an emergency session of the Supreme Rada [parliament], which is slated for Tuesday.
"Tuesday is the judgment day; we believe not a single [one] of their words. We believe only actions and result," Yatseniuk is quoted as saying.
But some experts warn that moderate opposition leaders may have completely lost control over the more radical elements of the protest movement, which have spurned calls for a truce during negotiations with the government and continue to build barricades, occupy government buildings, and otherwise prepare for more violent confrontations with police.
"The president has already agreed to the government's resignation, and giving top posts to the leaders of the opposition. The fact that they haven't answered is really strange, since this was their principal demand" just a few days ago, says Dmitro Vydrin, a former government adviser and independent political expert. He suggests that opposition leaders are afraid, or incapable of reining in the most radical elements, who are now driving events in the streets.
On Monday, Klitschko attempted to convince radicals of the radical Spilna Sprava, or Right Deed, movement to vacate the freshly occupied Justice Ministry. According to Russian media, the protesters rejected his entreaties.
Spilna Sprava did leave the ministry later, saying they did not want to provoke authorities, but warned they could return depending on the outcome of Tuesday's parliamentary session.
Ukraine divided?
"Ukraine is now facing the very real threat of civil war," says Valery Kamchatny, a former Rada deputy and independent political expert. "Even if the government does introduce a state of emergency, half of the country is already de facto beyond government control. And there is no guarantee the army would obey orders to intervene. The army has no motivation to defend the present authorities; I've talked with military people who absolutely rule that out."
On Monday Ukraine's Defense Minister, Pavlo Lebedev, issued a statement saying that any military role in Ukraine's internal strife would be "impossible."
"The army will strictly follow the Constitution and the laws of Ukraine, which clearly specify its role, functions and tasks," Mr. Lebedev said.
Sergei Markov, a pro-Kremlin analyst who has been a frequent adviser to Russian President Vladimir Putin in the past, accuses European and US politicians of being partly to blame for the unrest because they have encouraged protesters to act as though they are confronting a dictatorship rather than a democratically elected government. He says Western leaders should tell Ukrainian opposition politicians to renounce street violence and return to constitutional political methods – fresh presidential elections are slated for 2015.
Get the Monitor Stories you care about delivered to your inbox. By signing up, you agree to our Privacy Policy
"The principal reason for this crisis is the weakness of the Ukrainian state," he says. Far from being the tough dictatorship it is depicted as in the West, the Yanukovych government has shown itself to be incapable of enforcing elementary civil order, or in negotiating stable relationships with its main neighbors, Russia and the European Union.
"This government is simply not able to stop the kind of provocations and violent attacks that would be dealt with severely in any Western country. Leaders of the EU and the US should stop encouraging this, just because they think they're blocking Russia's geopolitical interests, and realize what this is leading to. Russia and the EU urgently need to put aside differences, and cooperate to help Ukraine return to the path of legal, constitutional order. Otherwise, I fear very bloody events are in the offing." |
z=[list(map(int,input().split(':')))for i in range(2)]
a=24-z[1][0]
b=a+z[0][0]
if z[0][1]<z[1][1]:
b-=1
d=60-(z[1][1]-z[0][1])
else:
d=z[0][1]-z[1][1]
if b>23:
b=abs(b-24)
b='%d'%b
d='%d'%d
if len(b)<2:
b='0'+b
if len(d)<2:
d='0'+d
print(b+':'+d) |
/**
* CSV reader implementation based on Camel implementation (see BindyCsvDataFormat) where some issues have been fixed.
*
* @param <T> Bean type.
*/
public class CsvJpaReader<T> extends AbstractCsvReader<T> {
/**
* optional entity manager
*/
private final EntityManager em;
/**
* Cache of fetched foreign keys, property value based.
*/
private final Map<Class<?>, Map<String, Object>> foreignCache = new HashMap<>();
/**
* Cache of fetched foreign keys, index value based.
*/
private final Map<Class<?>, List<?>> foreignCacheRows = new HashMap<>();
/**
* Simple override.
*
* @param reader Input reader.
* @param clazz Class of bean to build.
* @param headers Headers, an ordered property list.
* @param em The {@link EntityManager} used to get properties and foreign keys.
*/
public CsvJpaReader(final Reader reader, final EntityManager em, final Class<T> clazz, final String... headers) {
super(reader, clazz, headers);
this.em = em;
}
@Override
protected void setForeignProperty(final T bean, final String fqname, final String rawValue, final int fkeyIndex) {
final var name = fqname.substring(0, fkeyIndex);
final var field = getField(clazz, name);
final var fkeyName = fqname.substring(fkeyIndex + 1);
// Collection management
if (field.getType().isAssignableFrom(Set.class)) {
// Set support
beanUtilsBean.setProperty(bean, name, newCollection(rawValue, name, field, fkeyName, new HashSet<>()));
} else if (field.getType().isAssignableFrom(List.class)) {
// List support
beanUtilsBean.setProperty(bean, name, newCollection(rawValue, name, field, fkeyName, new ArrayList<>()));
} else {
// Simple property
beanUtilsBean.setProperty(bean, name, getForeignProperty(rawValue, name, field,
TypeUtils.getRawType(field.getGenericType(), bean.getClass()), fkeyName));
}
}
private Object getForeignProperty(final String rawValue, final String name, final Field field, final Class<?> type,
final String fkName) {
final Object foreignEntity;
if (fkName.charAt(fkName.length() - 1) == '!') {
foreignEntity = readFromEm(rawValue, type, fkName.substring(0, fkName.length() - 1));
} else {
foreignEntity = readFromCache(rawValue, type, fkName);
}
if (foreignEntity == null) {
throw new IllegalArgumentException("Missing foreign key " + field.getDeclaringClass().getSimpleName() + "#"
+ name + "." + StringUtils.removeEnd(fkName, "!") + " = " + rawValue);
}
return foreignEntity;
}
private Collection<Object> newCollection(final String rawValue, final String masterPropertyName, final Field field,
String propertyName, final Collection<Object> arrayList) {
final var generic = (Class<?>) ((ParameterizedType) field.getGenericType()).getActualTypeArguments()[0];
for (final var item : rawValue.split(",")) {
arrayList.add(getForeignProperty(item, masterPropertyName, field, generic, propertyName));
}
return arrayList;
}
/**
* Read from entity manager.
*/
private Object readFromEm(final String rawValue, final Class<?> type, final String propertyName) {
// On by one foreign key
final List<?> resultList;
if (isRowNumber(type, propertyName)) {
// search referenced entity with a filter on row number
resultList = em.createQuery(from(type)).setFirstResult(Integer.parseInt(rawValue) - 1).setMaxResults(1)
.getResultList();
} else {
// search referenced entity with a filter on propertyName
resultList = em
.createQuery(String.format("%s WHERE %s LIKE '%s'", from(type), propertyName, rawValue), type)
.setMaxResults(1).getResultList();
}
return resultList.stream().findFirst().orElse(null);
}
/**
* Return a "FROM" query based on the given filed type.
*/
private String from(final Class<?> type) {
return "FROM " + type.getName();
}
/**
* Read from already read entities.
*/
private Object readFromCache(final String rawValue, final Class<?> type, final String propertyName) {
// Special fetching mode
if (isRowNumber(type, propertyName)) {
// search referenced entity with a filter on row number
return readFromRowNumberCache(rawValue, type);
}
// search referenced entity with a filter based on natural join
return readFromJoinCache(rawValue, type, propertyName);
}
/**
* Read from already read row index
*/
private Object readFromJoinCache(final String rawValue, final Class<?> type, final String propertyName) {
ensureCache(type, propertyName);
return foreignCache.get(type).get(rawValue);
}
/**
* Read from database and try to match the value of column to match then entity.
*/
private Object readFromRowNumberCache(final String rawValue, final Class<?> type) {
final var index = Integer.parseInt(rawValue);
if (!foreignCacheRows.containsKey(type)) {
foreignCacheRows.put(type, readAll(type));
}
if (index <= foreignCacheRows.get(type).size() && index > 0) {
return foreignCacheRows.get(type).get(index - 1);
}
return null;
}
/**
* Initialize or update cache.
*/
private void ensureCache(final Class<?> type, final String propertyName) {
if (!foreignCache.containsKey(type) || type == clazz) {
foreignCache.put(type, buildMap(readAll(type), propertyName));
}
}
/**
* Indicates this field is using a auto generated key or not.
*/
private boolean isRowNumber(final Class<?> type, final String propertyName) {
return getField(type, propertyName).getAnnotation(GeneratedValue.class) != null;
}
private List<?> readAll(final Class<?> type) {
return em.createQuery(from(type)).getResultList();
}
/**
* Return a map where key is the foreign key and value is the entity.
*/
private Map<String, Object> buildMap(final List<?> list, final String property) {
final var result = new HashMap<String, Object>();
for (final var item : list) {
final var value = beanUtilsBean.getProperty(item, property);
if (value != null) {
result.put(String.valueOf(value), item);
}
}
return result;
}
} |
Think about it—an inexpensive Linux PC crammed inside a wall-wart plug. Something like this SheevaPlug could open up a whole new framework of managing and sharing data that could be accessible to the masses.
Inside the SheevaPlug you will find a 1.2GHz, ARM-based Sheeva embedded processor, 512Mbytes of FLASH, 512Mbytes of DRAM, gigabit ethernet and a USB 2.0 port. Plug it into a wall socket and bam—a fully-functional Linux server that can store digital media accessible remotely via the internet or function as a remote print or web server. Plus, it supports "multiple standard Linux 2.6 kernel distributions" and it operates on only 5-watts of power. So running and developing software for the platform should be easy (and there should be substantial savings on your power bill).
Advertisement
Currently, open source tinkerers can get the $99 SheevaPlug development kit on the Marvell website. Several products based on the design (like the Pogoplug) are expected to arrive in the near future with price tags ranging from $79-$99. Eventually, Marvell expects the price for these devices to dip below $50. [Marvell and WSJ via Tech Report via Slashgear] |
/// Simple config file value deserialization from a string to a `PrefValue`.
/// Treats the first character of the string read in from the config file
/// as the type, which it then discards and parses the rest of the string
/// to return the correct variant of `PrefValue`.
///
fn simple_deser(s: &str) -> PrefValue {
if s.len() > 1 {
match &s[0..1] {
"s" => {
StringVal(s.to_string())
},
"i" => {
if let Ok(v) = s[1..].parse::<i32>() {
IntegerVal(v)
} else { StringVal(s.to_string()) }
},
"b" => {
if let Ok(v) = s[1..].parse::<bool>() {
BoolVal(v)
} else { StringVal(s.to_string()) }
},
_ => { StringVal(s.to_string()) },
}
} else {
StringVal(s.to_string())
}
} |
EUGENE -- The Devon Allen Express charged on at full speed Monday inside Autzen Stadium, a four-day run by Oregon's young receiver that began on the track at Hayward Field and has yet to be derailed by hurdles or defenders with bad intentions alike.
After winning the 100 meters and 110- and 400-meter hurdles Friday and Saturday at the Oregon Relays, Allen was named Monday as one of the standouts of UO's second spring football scrimmage after he caught a touchdown pass and both covered and returned kickoffs well.
"He had a great day today," said junior starting quarterback Marcus Mariota, who played sparingly in the scrimmage as backups earned a majority of the repetitions at all positions. "He made some key catches in situations and was able to break some tackles and just get out and run. Dude’s an incredible athlete. The way he’s able to jump around from football to track speaks a lot about his abilities."
EUGENE, OREGON, - Apr 1, 2014 - Oregon receiver Devon Allen talks to members of the media after spring football practice, Tue, Apr 1, 2014, outside the Football Performance Center in Eugene, Ore. Thomas Boyd/The Oregonian
The redshirt freshman from Phoenix's dual-sport spring has offered the first tantalizing glimpse into the abilities of the 6-foot, 190-pound receiver, who set Arizona's state prep records in the hurdles races in 2012 and whose development is all the more important following top wideout Bralon Addison's knee injury two weeks ago.
In a singlet, Allen has often looked unstoppable on the track. His three victories over the weekend -- he ran a wind-aided 10.55 seconds in the 100, for one -- once again showed off his speed for anyone who wanted to watch.
Yet the closed doors of football practice makes his development in pads and a helmet more of a mystery. Oregon coach Mark Helfrich said Monday that outsiders weren't the only folks having trouble gauging his progress on the gridiron. As is the case with another Duck receiver garnering much attention this spring, Johnathan Loyd, blocking and route-running have been speed bumps in Allen's rapid rise.
"He’s always been that guy, you know, in track and field there might be a hurdle in the way but there’s not too much else to think about," Helfrich said. "In football there’s a few more moving parts, so for him to cut it loose and play at his track speed is starting to happen more and more and as a result of that he’s making more and more plays."
But if Allen came out of the proverbial starting blocks slowly on the football field by redshirting last fall, he's hitting the home stretch of spring football at a remarkable clip as he chases down both playing time and the growing hype surrounding him.
"I think just the speed is a big part of my game," said Allen, who admitted to being "just a little tired" after his track/football weekend combination. "The faster you are the easier the game can be. … I'm starting to get into a rhythm."
Senior cornerback Dior Mathis also raced over the weekend at the Oregon Relays and returned Monday to "made a couple plays on special teams as both a cover guys and a returner," Helfrich said.
Oregon doesn't treat its two-sport athletes any differently than another football player during practice, Helfrich said. The only exceptions might be that Allen or Mathis have different sets of recovery exercises on an off-day from football practice.
"You wouldn’t know that those guys are doing anything off the field by the way they’re practicing right now," Helfrich said.
Allen and Mathis each say that football comes first, as both a spring priority and their overarching passion.
Still, there's no ignoring how Allen -- rated the 16th-best wideout in his 2013 recruiting class -- has burst onto the scene on the track, where he's growing accustomed to coming in first. Addison's injury means UO's top four receivers from 2013 are now gone -- meaning there are four fewer hurdles for a young guy like Allen to clear in order to get playing time.
"It's a different passion and just being on the field with your teammates is a kind of a different feeling," Allen said. "I enjoy it more."
-- Andrew Greif | |
Michael Walter Mitchell, shot neighbor due to voices in his head. (Photo: Maricopa County Sheriff's Office)
Michael Walter Mitchell, 27, of Glendale, Arizona says he shot his neighbor because the voices in his head told him to do it. Mitchell even called the police himself to report that he shot the man downstairs.
When officers arrived they discovered Pedro Gomez inside his apartment with several gunshot wounds. He was dead, according to an Arizona Republic report.
Mitchell said that he went to Gomez’s apartment because he heard a woman asking for his help. He knocked on the door and shot the man who answered right away. Mitchell also told officers that he found Gomez’s girlfriend and children in the apartment but didn’t shoot them because “it is not right for someone to point a gun and shoot women and children,” police documents show.
Mitchell told officers he had fallen on “tough times” when his girlfriend broke up with him and he was fired.
It is unclear if Mitchell acquired the gun used legally. |
NUMERICAL SIMULATION OF THE EFFECTS OF BUOYANCY FORCES AND FLOW CONTROL DEVICES ON FLUID FLOW AND HEAT TRANSFER PHENOMENA OF LIQUID STEEL IN A TUNDISH
Fluid flow and heat transfer of liquid steel in a tundish of a continuous slab caster was numerically simulated. The importance of natural flow convection was established through a dimensionless number given by the ratio Gr/Re2. Buoyancy forces proved to be as important as inertial ones, especially in the extremes of the tundish far from the entering liquid jet. The usual flow control devices like weirs and dams were not as effective as turbulence inhibitors, a kind of impact pad with lips, interior squared angles, and a square cross section. The simulations indicated that this device helps to decrease fluid turbulence in the zone of the entering liquid jet and has damping effects of step inputs of cold or hot steel, allowing for better control of the casting temperature. |
#ifndef SMART_INPUT_FILTER_DEBOUNCE_INPUT
#define SMART_INPUT_FILTER_DEBOUNCE_INPUT
#include "filter.h"
// Debounces an input such as a switch. Responds immediately if the input
// hasn't changed in a while (so that it catches the initial switch with no
// latency).
class DebounceInput : public Filter<bool, bool> {
public:
DebounceInput(uint32_t pin);
// Whether the input rose this cycle. Reset on the next call to Run.
bool Rose();
// Whether the input fell this cycle. Reset on the next call to Run.
bool Fell();
protected:
bool DoRun() override;
bool ReadFromSensor() override;
private:
bool stable_state = false;
bool current_state = false;
bool rose = false;
bool fell = false;
uint32_t state_started_at_millis = 0;
uint32_t last_successful_change_at_millis = 0;
const uint32_t pin;
const uint16_t kDebounceTimeMillis = 10;
#ifndef ARDUINO
int digitalRead(uint32_t pin);
#endif
};
#endif
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.sql.client.cli;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import org.apache.samza.sql.client.interfaces.ExecutorException;
import org.apache.samza.sql.client.util.CliUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Main entry of the program.
*/
public class Main {
private static final Logger LOG = LoggerFactory.getLogger(Main.class);
public static void main(String[] args) {
// Get configuration file path
String configFilePath = null;
for(int i = 0; i < args.length; ++i) {
switch(args[i]) {
case "-conf":
if(i + 1 < args.length) {
configFilePath = args[i + 1];
i++;
}
break;
default:
LOG.warn("Unknown parameter {}", args[i]);
break;
}
}
CliEnvironment environment = new CliEnvironment();
StringBuilder messageBuilder = new StringBuilder();
if(!CliUtil.isNullOrEmpty(configFilePath)) {
LOG.info("Configuration file path is: {}", configFilePath);
try {
FileReader fileReader = new FileReader(configFilePath);
BufferedReader bufferedReader = new BufferedReader(fileReader);
String line;
while ((line = bufferedReader.readLine()) != null) {
if (line.startsWith("#") || line.startsWith("[")) {
continue;
}
String[] strs = line.split("=");
if (strs.length != 2) {
continue;
}
String key = strs[0].trim().toLowerCase();
String value = strs[1].trim();
try {
int result = environment.setEnvironmentVariable(key, value);
if (result == -1) { // CliEnvironment doesn't recognize the key.
LOG.warn("Unknowing shell environment variable: {}", key);
} else if (result == -2) { // Invalid value
LOG.warn("Unknowing shell environment value: {}", value);
}
} catch(ExecutorException e) {
messageBuilder.append("Warning: Failed to create executor: ").append(value).append('\n');
messageBuilder.append("Warning: Using default executor " + CliConstants.DEFAULT_EXECUTOR_CLASS);
LOG.error("Failed to create user specified executor {}", value, e);
}
}
} catch (IOException e) {
LOG.error("Error in opening and reading the configuration file {}", e.toString());
}
}
environment.finishInitialization();
CliShell shell;
try {
shell = new CliShell(environment);
} catch (ExecutorException e) {
System.out.println("Unable to initialize executor. Shell must exit. ");
LOG.error("Unable to initialize executor.", e);
return;
}
shell.open(messageBuilder.toString());
}
}
|
<reponame>calebmarchent/fizz
/*
* Copyright (c) 2018-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
#pragma once
#include <functional>
#include <map>
#include <string>
#include <vector>
namespace fizz {
namespace tool {
int fizzClientCommand(const std::vector<std::string>& args);
int fizzServerCommand(const std::vector<std::string>& args);
const std::map<std::string, std::function<int(const std::vector<std::string>&)>>
fizzUtilities = {{"client", &fizzClientCommand},
{"s_client", &fizzClientCommand},
{"server", &fizzServerCommand},
{"s_server", &fizzServerCommand}};
} // namespace tool
} // namespace fizz
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.