content
stringlengths 10
4.9M
|
---|
<gh_stars>0
package com.unidev.templatecore.jmx;
import com.unidev.templatecore.Core;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jmx.export.annotation.ManagedOperation;
import org.springframework.jmx.export.annotation.ManagedOperationParameter;
import org.springframework.jmx.export.annotation.ManagedOperationParameters;
import org.springframework.jmx.export.annotation.ManagedResource;
import org.springframework.stereotype.Component;
@Component
@ManagedResource(objectName = "UniDev:name=CoreMBean")
public class CoreMBean {
@Autowired
private Core core;
@ManagedOperation(description = "Invoke me method")
@ManagedOperationParameters(
@ManagedOperationParameter(name = "argument", description = "Variable argument")
)
public String invokeMe(String argument) {
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("Argument : ").append(argument).append("\n");
String invokeMeResult = core.invokeMe();
stringBuilder.append(invokeMeResult);
return stringBuilder.toString();
}
}
|
import warnings
#refer this link to learn about warnings categories
#https://docs.python.org/3/library/warnings.html
warnings.simplefilter('default') # also can be enabled through command line flag python3 -Wd
class WarningExample:
def __init__(self,input:list):
self.input = input
def sortv1(self):
warnings.warn(
"sortv1 is deprecated, use sortv3 instead",
DeprecationWarning
)
return sorted(self.input)
def sortv2(self):
warnings.warn(
"sortv2 will be deprecated in version 3, use sortv3 instead",
PendingDeprecationWarning
)
return sorted(self.input)
def sortv3(self):
return sorted(self.input)
if __name__ == '__main__':
example = WarningExample([10,4,2,1,5])
print(example.sortv1())
print(example.sortv2())
print(example.sortv3())
|
<reponame>rpatil524/Finance-Python<gh_stars>100-1000
# -*- coding: utf-8 -*-
u"""
Created on 2016-4-1
@author: cheng.li
"""
from PyFin.POpt.Optimizer import OptTarget
from PyFin.POpt.Optimizer import portfolio_optimization
__all__ = ['OptTarget',
'portfolio_optimization']
|
// generate_keypair generates a keypair for the Private Metadata construction.
// |out_x| and |out_y| are set to the secret half of the keypair, while
// |*out_pub| is set to the public half of the keypair. It returns one on
// success and zero on failure.
static int generate_keypair(EC_SCALAR *out_x, EC_SCALAR *out_y,
EC_POINT **out_pub, const EC_GROUP *group) {
EC_POINT *h = get_h();
if (h == NULL) {
return 0;
}
static const uint8_t kDefaultAdditionalData[32] = {0};
EC_RAW_POINT tmp1, tmp2;
EC_POINT *pub = EC_POINT_new(group);
if (pub == NULL ||
!ec_random_nonzero_scalar(group, out_x, kDefaultAdditionalData) ||
!ec_random_nonzero_scalar(group, out_y, kDefaultAdditionalData) ||
!ec_point_mul_scalar_base(group, &tmp1, out_x) ||
!ec_point_mul_scalar(group, &tmp2, &h->raw, out_y)) {
EC_POINT_free(h);
EC_POINT_free(pub);
OPENSSL_PUT_ERROR(TRUST_TOKEN, ERR_R_MALLOC_FAILURE);
return 0;
}
group->meth->add(group, &pub->raw, &tmp1, &tmp2);
*out_pub = pub;
EC_POINT_free(h);
return 1;
} |
/**
*
* One trace
*
* @author Johan Henriksson
*
*/
public class QGraphicsLinTraceItem extends QGraphicsRectItem
{
public AnnotatedSequence seq;
public int cposLeft;
public int currentY;
public ViewLinearSequence view;
LinTrackTraces track;
double charHeight;
QFont fontSequence;
PlacedTrace trace;
public double fonth()
{
return charHeight-1;
}
public double mapPeakY(int index, double level)
{
level/=trace.getLocalAverage(index);
double v=Math.min(dispHeight,level*scaleY);
double y=baseY-v;
return y;
}
private double mapPeakX(double i)
{
int offsetletter=trace.getFrom()-cposLeft;
return view.mapCharToX(i*scaleX+offsetletter);
}
//Set later
private int dispHeight;
private int baseY;
private double scaleX;
private double scaleY;
public void paint(QPainter painter, QStyleOptionGraphicsItem option, QWidget widget)
{
int charsPerLine=view.charsPerLine;
SequenceTrace st=trace.getTrace();
QFont fontMismatch=new QFont(fontSequence);
fontMismatch.setBold(true);
QPen penText=new QPen();
// QPen penMismatch=new QPen();
// penMismatch.setColor(QColor.fromRgb(255, 0, 0));
QBrush brushPhred=new QBrush(QColor.darkGray);
QBrush brushNone=new QBrush(QColor.transparent);
QPen penBaseline=new QPen();
penBaseline.setColor(QColor.lightGray);
int seqletterFrom=Math.max(0,trace.getFrom()-cposLeft);
int seqletterTo=Math.min(view.charsPerLine,trace.getTo()-cposLeft);
int traceletterFrom=Math.max(0,cposLeft-trace.from);
int traceletterTo=Math.min(st.getNumBases(), cposLeft+view.charsPerLine-trace.getFrom());
int lastPeakIndex=st.basecalls.get(st.basecalls.size()-1).peakIndex;
dispHeight=150;
baseY=currentY+20+dispHeight;
scaleX=st.getNumBases()/(double)lastPeakIndex;
scaleY=50;
double phredheight=20;
double texty=currentY+fonth()+phredheight;
double phredy=currentY+phredheight;
//Draw aligned text on top
painter.setFont(fontSequence);
painter.setBrush(brushPhred);
for(int i=0;i<charsPerLine;i++)
{
int index=cposLeft+i-trace.from;
if(index>=0 && index<st.getNumBases())
{
int cpos=cposLeft + i;
if(cpos>=seq.getLength())
break;
SequenceTraceBaseCall cb=st.basecalls.get(index);
//Draw base
double x=view.mapCharToX(i);
double xrect1=x+1;
double xrect2=view.mapCharToX(i+1)-1;
painter.setPen(penText);
painter.drawText(new QPointF(x, texty), ""+cb.base);
//Draw phred
painter.drawRect(new QRectF(xrect1,phredy,xrect2-xrect1,-cb.getProb()*phredheight/trace.maxProb));
//Draw line down to peak
double x1=x+view.charWidth/3;
double x2=mapPeakX(cb.peakIndex);
double liney=mapPeakY(cb.peakIndex, st.getMaxLevel(cb.peakIndex))-1;
painter.setPen(penBaseline);
painter.drawLine(
new QPointF(x1,texty),
new QPointF(x2,liney));
}
}
//Colors for each base
QColor[] colorACGT=new QColor[]{
QColor.darkGreen,
QColor.blue,
QColor.black,
QColor.red
};
//Draw the trace lines
painter.setPen(QColor.black);
painter.drawLine(
new QPointF(view.mapCharToX(seqletterFrom),baseY),
new QPointF(view.mapCharToX(seqletterTo),baseY));
painter.setBrush(brushNone);
for(int curcol=0;curcol<4;curcol++)
{
char[] letters=new char[]{'A','C','G','T'};
int level[]=st.getLevel(letters[curcol]);
painter.setPen(colorACGT[curcol]);
QPainterPath path=new QPainterPath();
boolean first=true;
int levelfrom=(int)(traceletterFrom/scaleX);
int levelto=Math.min((int)((traceletterTo+1)/scaleX),st.getLevelLength());
for(int i=levelfrom;i<levelto;i++)
{
double y=mapPeakY(i, level[i]);
double x=mapPeakX(i);
if(first)
path.moveTo(x,y);
else
path.lineTo(x,y);
first=false;
}
painter.drawPath(path);
double y=baseY+fonth();
for(SequenceTraceBaseCall cb:st.basecalls)
{
if(cb.peakIndex>levelfrom)
{
if(cb.peakIndex>levelto)
break;
if(cb.base==letters[curcol])
{
double x=mapPeakX(cb.peakIndex)-view.charWidth/3;
painter.drawText(new QPointF(x,y), ""+cb.base);
}
}
}
}
}
@Override
public QRectF boundingRect()
{
double h=250;//fonth()*2;
return new QRectF(0,currentY, 100000, h);
}
} |
def cert_fingerprint(self) -> 'outputs.CertificateFingerprintResponse':
return pulumi.get(self, "cert_fingerprint") |
// ToMap returns a map of keys and values.
func (s *Sequence[K, V]) ToMap() map[K]V {
m := make(map[K]V, len(s.keys)*4/3+1)
for _, k := range s.keys {
m[k] = s.data[k]
}
return m
} |
<reponame>itok/SwaggerViewer
//
// SwaggerViewer-Bridging-Header.h
// SwaggerViewer
//
// Created by itok on 2018/03/03.
//
#ifndef SwaggerViewer_Bridging_Header_h
#define SwaggerViewer_Bridging_Header_h
#import <GCDWebServer/GCDWebServer.h>
#endif /* SwaggerViewer_Bridging_Header_h */
|
Quality assurance and impact measurement of university affiliated programs.
Provisions in the Developmental Disabilities Bill of Rights and Assistance Act (PL 101-496) emphasize the need to measure the impact of federally supported services in promoting the independence and productivity of clients and their integration in community settings. As a part of this overall emphasis, the Administration on Developmental Disabilities (ADD) has initiated a Quality Enhancement System for use in the University Affiliated Programs (UAPs) encompassing both process (formative) and outcome or impact (summative) evaluation. Whereas formative measures and techniques have been used for many years and are well developed, measures and techniques to determine impact have received much less attention in the professional literature. Characteristics of successful UAPs were outlined as a basis for suggesting appropriate measures of impact of UAP programs on clients and their families, on host and affiliated universities, and on the service system. Some barriers to implementation of impact measures in UAPs were discussed. |
import hashlib
import io
import logging
import os
import pilkit.processors
import pilkit.utils
from flask import current_app
from PIL import Image, ImageColor, ImageDraw, ImageFont
from . import cache, constants, exc, storage, utils
from ._compat import b, cairosvg
from .configuration import Config
logger = logging.getLogger('flask_resize')
def format_to_ext(format):
"""Return the file extension to use for format"""
return {
constants.JPEG: 'jpg',
constants.PNG: 'png',
constants.SVG: 'svg',
}[format]
def image_data(img, format, **save_options):
"""Save a PIL Image instance and return its byte contents"""
fp = io.BytesIO()
img.save(fp, format, **save_options)
fp.seek(0)
return fp.read()
def _get_package_path(relpath):
"""Get the full path for a file within the package
Args:
relpath (str):
A path contained within the flask_resize
Returns:
str: Full path for the file requested
"""
pkgdir = os.path.dirname(__file__)
return os.path.join(pkgdir, 'fonts', relpath)
def make_opaque(img, bgcolor):
"""Apply a background color to image
Args:
img (PIL.Image):
Image to alter.
bgcolor (str):
A :func:`parse_rgb` parseable value to use as background color.
Returns:
PIL.Image:
A new image with the background color applied.
"""
bgcolor = ImageColor.getrgb(utils.parse_rgb(bgcolor))
processor = pilkit.processors.MakeOpaque(background_color=bgcolor)
return processor.process(img)
def convert_svg(bdata):
if cairosvg is None:
raise exc.CairoSVGImportError(
"CairoSVG must be installed for SVG input file support. "
"Package found @ https://pypi.python.org/pypi/CairoSVG."
)
svg_data = cairosvg.svg2png(bytestring=bdata)
return Image.open(io.BytesIO(svg_data))
def create_placeholder_image(width=None, height=None, message=None):
"""
Create a placeholder image that specified its width and height, and an
optional text.
Args:
width (Optional[:class:`str`]):
Width to use for the image. Will use `height` if not provided.
height (Optional[:class:`str`]):
Height to use for the image. Will use `width` if not provided.
message (Optional[:class:`str`]):
Text to add to the center of the placeholder image.
Raises:
:class:`exc.MissingDimensionsError`:
If neither `width` nor `height` are provided.
Returns:
PIL.Image:
The placeholder image.
"""
if width is None and height is None:
raise exc.MissingDimensionsError("Specify at least one of `width` "
"or `height`")
placeholder_width = width or height
placeholder_height = height or width
placeholder_text = '{}x{}'.format(placeholder_width,
placeholder_height)
if message is not None:
placeholder_text += u' ({})'.format(message)
text_fill = (255, ) * 3
bg_fill = (220, ) * 3
img = Image.new('RGB', (placeholder_width, placeholder_height), bg_fill)
draw = ImageDraw.Draw(img)
font = ImageFont.truetype(_get_package_path('DroidSans.ttf'), size=36)
text_width, text_height = draw.textsize(placeholder_text, font=font)
draw.text((((placeholder_width - text_width) / 2),
((placeholder_height - text_height) / 2)),
text=placeholder_text, font=font, fill=text_fill)
del draw
return img
class ResizeTarget:
def __init__(
self,
image_store,
source_image_relative_url,
dimensions=None,
format=None,
quality=80,
fill=False,
bgcolor=None,
upscale=True,
progressive=True,
name_hashing_method=constants.DEFAULT_NAME_HASHING_METHOD,
cache_store=cache.NoopCache(),
target_directory=constants.DEFAULT_TARGET_DIRECTORY,
use_placeholder=False,
):
self.source_image_relative_url = source_image_relative_url
self.use_placeholder = use_placeholder
self.width, self.height = (
utils.parse_dimensions(dimensions) if dimensions is not None
else (None, None)
)
self.format = utils.parse_format(source_image_relative_url, format)
self.quality = quality
self.fill = fill
self.bgcolor = (
utils.parse_rgb(bgcolor, include_number_sign=False)
if bgcolor is not None else None
)
self.upscale = upscale
self.progressive = progressive
self.name_hashing_method = name_hashing_method
self.target_directory = target_directory
self.image_store = image_store
self.cache_store = cache_store
self._validate_arguments()
self.unique_key = self._generate_unique_key()
def _validate_arguments(self):
if not self.source_image_relative_url and not self.use_placeholder:
raise exc.EmptyImagePathError()
if self.fill and not all([self.width, self.height]):
raise exc.MissingDimensionsError(
'Fill requires both width and height to be set.'
)
@property
def file_extension(self):
return format_to_ext(self.format)
def _get_generate_unique_key_args(self):
return [
self.source_image_relative_url,
self.format,
self.quality if self.format == constants.JPEG else '',
self.width or 'auto',
self.height or 'auto',
'fill' if self.fill else '',
'fill' if self.fill else 'no-fill',
'upscale' if self.upscale else 'no-upscale',
self.bgcolor or '',
]
def _generate_unique_key(self):
cache_key_args = self._get_generate_unique_key_args()
hash = hashlib.new(self.name_hashing_method)
hash.update(b(''.join(str(a) for a in cache_key_args)))
name = hash.hexdigest()
return '.'.join([
'/'.join([self.target_directory, name]),
self.file_extension
])
def get_cached_path(self):
if self.cache_store.exists(self.unique_key):
logger.debug('Fetched from cache: {}'.format(self.unique_key))
return self.unique_key
else:
msg = '`{}` is not cached.'.format(self.unique_key)
logger.debug(msg)
raise exc.CacheMiss(msg)
def get_path(self):
if self.image_store.exists(self.unique_key):
# As the generated image might've been created on another instance,
# we'll store the path in cache key here so we won't have to
# manually check the path again.
self.cache_store.add(self.unique_key)
logger.debug('Found non-cached image: {}'.format(self.unique_key))
return self.unique_key
else:
raise exc.ImageNotFoundError(self.unique_key)
def get_generated_image(self):
return self.image_store.get(self.unique_key)
@property
def source_format(self):
if not self.source_image_relative_url:
# Missing path means only a placeholder could be generated
return constants.PNG
else:
fmt = os.path.splitext(self.source_image_relative_url)[1]
assert fmt.startswith('.')
return fmt[1:].upper()
def _generate_impl(self):
try:
source_data = self.image_store.get(
self.source_image_relative_url
)
except exc.ImageNotFoundError:
if self.use_placeholder:
source_data = self.generate_placeholder(
'Source image `{}` not found'.format(
self.source_image_relative_url
)
)
else:
raise
if self.source_format == constants.SVG:
img = convert_svg(source_data)
else:
fp = io.BytesIO(source_data)
img = Image.open(fp)
if self.width or self.height:
resize_to_fit_kw = dict(
width=self.width,
height=self.height,
upscale=self.upscale
)
if self.fill:
if self.bgcolor:
mat_color = ImageColor.getrgb(self.bgcolor)
elif self.format == constants.JPEG:
mat_color = (255, 255, 255, 255) # White
else:
mat_color = (0, 0, 0, 0) # Transparent
resize_to_fit_kw['mat_color'] = mat_color
processor = pilkit.processors.ResizeToFit(**resize_to_fit_kw)
img = processor.process(img)
options = {
'icc_profile': img.info.get('icc_profile'),
}
if self.format == constants.JPEG:
options.update(
quality=int(self.quality),
progressive=self.progressive
)
if self.bgcolor is not None:
img = make_opaque(img, self.bgcolor)
img, save_kwargs = pilkit.utils.prepare_image(img, self.format)
save_kwargs.update(options)
options = save_kwargs
return image_data(img, self.format, **options)
def generate(self):
with self.cache_store.transaction(
self.unique_key
) as transaction_successful:
if transaction_successful:
logger.info('Generating image: {}'.format(self.unique_key))
else:
logger.error(
'GenerateInProgress error for: {}'.format(self.unique_key)
)
raise exc.GenerateInProgress(self.unique_key)
try:
data = self._generate_impl()
self.image_store.save(self.unique_key, data)
except Exception as e:
logger.info(
'Exception occurred - removing {} from cache and '
'image store. Exception was: {}'
.format(self.unique_key, e)
)
try:
self.image_store.delete(self.unique_key)
except Exception as e2:
logger.warning(
'Another exception occurred while doing error cleanup '
'for: {}. The exception was: {}'
.format(self.unique_key, e2)
)
pass
self.cache_store.remove(self.unique_key)
raise e
else:
self.cache_store.add(self.unique_key)
return data
def generate_placeholder(self, message):
img = create_placeholder_image(self.width, self.height, message)
return image_data(img, 'PNG')
class Resizer:
"""Factory for creating the resize function"""
def __init__(
self,
storage_backend,
cache_store,
base_url,
name_hashing_method=constants.DEFAULT_NAME_HASHING_METHOD,
target_directory=constants.DEFAULT_TARGET_DIRECTORY,
raise_on_generate_in_progress=False,
noop=False
):
self.storage_backend = storage_backend
self.cache_store = cache_store
self.base_url = base_url
self.name_hashing_method = name_hashing_method
self.target_directory = target_directory
self.raise_on_generate_in_progress = raise_on_generate_in_progress
self.noop = noop
self._fix_base_url()
def _fix_base_url(self):
if not self.base_url.endswith('/'):
self.base_url += '/'
def __call__(
self,
image_url,
dimensions=None,
format=None,
quality=80,
fill=False,
bgcolor=None,
upscale=True,
progressive=True,
placeholder=False
):
"""Method for resizing, converting and caching images
Args:
image_url (str):
URL for the image to resize. A URL relative to `base_url`
dimensions (str, Sequence[:class:`int`, :class:`int`]):
Width and height to use when generating the new image.
Uses the format of :func:`parse_dimensions`. No resizing
is done if None is passed in.
format (Optional[:class:`str`]):
Format to convert into. Defaults to using the same format as
the original image. An exception to this default is when the
source image is of type SVG/SVGZ, then PNG is used as default.
quality (int):
Quality of the output image, if the format is JPEG.
Defaults to 80.
fill (bool):
Fill the entire width and height that was specified if True,
otherwise keep the original image dimensions.
Defaults to False.
bgcolor (Optional[:class:`str`]):
If specified this color will be used as background.
upscale (bool):
Whether or not to allow the image to become bigger than the
original if the request width and/or height is bigger than its
dimensions. Defaults to True.
progressive (bool):
Whether to use progressive encoding or not when JPEG is the
output format. Defaults to True.
placeholder (bool):
Whether to show a placeholder if the specified ``image_url``
couldn't be found.
Raises:
:class:`exc.EmptyImagePathError`:
If an empty image path was received.
:class:`exc.ImageNotFoundError`:
If the image could not be found.
:class:`exc.MissingDimensionsError`:
If ``fill`` argument was True, but width or height was
not passed.
Returns:
str:
URL to the generated and cached image.
Usage:
Generate an image from the supplied image URL that will fit
within an area of 600px width and 400px height::
resize('somedir/kittens.png', '600x400')
Resize and crop so that the image will fill the entire area::
resize('somedir/kittens.png', '300x300', fill=1)
Convert to JPG::
resize('somedir/kittens.png', '300x300', format='jpg')
"""
if self.noop:
return image_url
if image_url and image_url.startswith(self.base_url):
image_url = image_url[len(self.base_url):]
target = ResizeTarget(
self.storage_backend,
image_url,
dimensions=dimensions,
format=format,
quality=quality,
fill=fill,
bgcolor=bgcolor,
upscale=upscale,
progressive=progressive,
use_placeholder=placeholder,
cache_store=self.cache_store,
name_hashing_method=self.name_hashing_method,
target_directory=self.target_directory,
)
try:
relative_url = target.get_cached_path()
except exc.CacheMiss:
try:
relative_url = target.get_path()
except exc.ImageNotFoundError:
try:
target.generate()
except exc.GenerateInProgress:
if self.raise_on_generate_in_progress:
raise
else:
relative_url = target.unique_key
else:
relative_url = target.get_path()
return os.path.join(self.base_url, relative_url)
def make_resizer(config):
"""Resizer instance factory"""
return Resizer(
storage_backend=storage.make(config),
cache_store=cache.make(config),
base_url=config.url,
name_hashing_method=config.hash_method,
target_directory=config.target_directory,
raise_on_generate_in_progress=config.raise_on_generate_in_progress,
noop=config.noop,
)
class Resize(object):
"""
Used for initializing the configuration needed for the ``Resizer``
instance, and for the jinja filter to work in the flask app.
Args:
app (Any[flask.Flask, None]):
A Flask app can be passed in immediately if not using the app
factory pattern.
"""
def __init__(self, app=None):
if app is not None:
self.init_app(app)
def __call__(self, *args, **kwargs):
return current_app.resize(*args, **kwargs)
def init_app(self, app):
"""Initialize Flask-Resize
Args:
app (:class:`flask.Flask`):
The Flask app to configure.
Raises:
RuntimeError:
A setting wasn't specified, or was invalid.
"""
config = Config.from_dict(
app.config,
default_overrides={
'RESIZE_RAISE_ON_GENERATE_IN_PROGRESS': app.debug,
}
)
app.resize = app.jinja_env.filters['resize'] = make_resizer(config)
|
<gh_stars>10-100
//! `vst_window` provides a cross-platform API for implementing VST plugin editor windows.
mod event;
mod platform;
pub use event::{MouseButton, WindowEvent};
pub use platform::{setup, EditorWindow, EventSource};
|
def module_by_name(self, module, station_name=None):
station = self.station_by_name(station_name)
if station is None:
return None
if station["module_name"] == module:
return station
if station["_id"] == module:
return station
for mod in station["modules"]:
if "module_name" not in mod:
mod["module_name"] = "noname"
if mod["module_name"] == module:
return mod
if mod["_id"] == module:
return mod
return None |
On the face of papal summer palace, which looms over the public square of a sleepy town near Rome, is a large clock from pre-Napoleonic times. It has one hand rather than two, and a face divided into six hours instead of 12. Its purpose is to remind the world that time passes very differently at the Vatican. Or so I tell myself, as I try to rationalize how the past decade of my life has been consumed by the writing of a single Vatican novel. As a husband and father now approaching 40 years old, I find myself bracing for the publication of a book I began writing as a 27-year-old bachelor.
The novel’s genesis traces to 2003, when I came upon the surprising fact that our modern notion of Jesus’ physical appearance – the bearded, long-haired man of Christian art – goes back to about 400 AD, before which no one seems to have agreed what Jesus looked like. The Bible offers no description, so where had this image come from? Around the same time in history, mysterious relics appeared in the Christian East, purporting to be divine portraits of Jesus not made by human hands. In 1978 a British scholar proposed that the cloth we know today as the Shroud of Turin might in fact be the most famous of these early relics: an image widely known and revered in early Christendom. Even though carbon-dating tests declared the Shroud a medieval fake almost 20 years ago, millions of faithful continue to travel to Turin during the Shroud’s periodic expositions, making this single cloth more popular than any museum on earth. Increasingly, they share a conviction that today’s Turin Shroud is indeed that celebrated relic of times past. Is it possible, then, that the Shroud is the most influential image in Christian history? That, when it first emerged, it was considered so authoritative that all subsequent images of Jesus can be traced to it?
Advertisement:
This question would never have spawned a decade of research and writing had it not been for a decision I made alongside it: that the protagonist of my novel would be a Catholic priest, and that my setting would be the world capital of Jesus imagery, where ultimate control of the Turin Shroud now rests: the Vatican.
This was ambitious. I’m not a Catholic; in fact, I’m not a practicing Christian of any kind (though a decade of researching this novel has nudged me closer to becoming one). What I saw, in the question of Jesus’ physical appearance, was an entry-point into the greater mystery of what’s really knowable about Jesus. If we put aside traditional pieties as well as conspiracy theories, what are we left with? A man who is as much a historical construct as our artistic imagery of him? Or is it possible that my fictional priest knew something I didn’t? That it wasn’t his spiritual journey I was storyboarding, but my own?
In retrospect, my interest in searching for a reality beneath outward appearances has an obvious source. My previous novel, "The Rule of Four," had become a media phenomenon in 2004. My coauthor and I had spent the six years after college writing it, during most of which I’d lived in rural southwestern Virginia, supplementing scant wages teaching SAT classes with my wife’s veterinary-school loans, which together were usually enough to spare us from charging groceries on our credit card. Then, suddenly, I was on "Good Morning America" and the front page of the New York Times. What happened after that, I still have trouble understanding.
We became symbols of everything and anything. The NBA ran advertisements showing professional basketball players reading "The Rule of Four." People magazine gave us a prominent review in the “50 Most Beautiful” issue, and the Wall Street Journal followed up with an article about our exercise regimens, so that, at a literary society dinner soon afterward, when I found myself seated beside Jenny Craig of the eponymous weight-loss company, her first words to me were: “Ian, I’ve read all about you. And you’ve done such a good job staying trim.” After USA Today reported that President Bush was reading "The Rule of Four," we received letters of congratulations from congressmen, including the Senate majority leader, which were followed by an invitation to what the Department of Homeland Security called a “Red Cell” session, seeking our input into imaginative ways terrorists might try to reprise 9/11. The longer this continued, the less there was any correspondence between our inner lives and outward appearances. Who I was bore no resemblance to how the world seemed to see me.
Money deepened the problem. Earlier that year, my wife and I had been lucky to qualify for a loan on our used Toyota. Now the first royalty check could’ve bought a Jaguar for every day of the week. When Random House offered $2 million for two more books, to be doled out in payments for years to come, all memory of financial hardship vanished into the mist of unreality that now shrouded my life. My wife and I put a down payment on a large house that seemed, in those heady days of the booming real-estate market, a sound investment. Then we settled down to start a family – and I settled down to work.
For the next five years we would never worry about bills. We would have a nanny. We would enjoy a weekly date night at a restaurant nice enough to offer a separate wine menu. Above all, I would have an almost unlimited budget for researching my novel.
Advertisement:
The most important thing I learned about the Vatican during my first year of research was that its reputation for secrecy is largely a myth. Considering that leading newspapers from around the globe send full-time reporters to cover a 108-acre country with a population measured in the hundreds, few stones go unturned. If you want to know what it’s like to be a cardinal inside that most secret of places – a conclave – there’s a book on that. If you want to know what each bishop does inside the Holy See bureaucracy, there’s a book on that. I didn’t realize what a godsend these books were until I found myself needing to know things that even journalists didn’t know or weren’t allowed to report on. For while there were books on those topics too, they weren’t the kind I could buy at Amazon.
In February of 1960, the New Yorker published an anecdote about a scholar working at the Vatican Library who asks a clerk to bring him a particular ancient manuscript. (Fetching your own books at the Vatican Library is forbidden.) The clerk vanishes for a while, then returns with a slip of paper on which is written, “Missing since 1530.” There’s a fat kernel of truth in this story. The search for information about the Vatican is maddening not because that information is scarce but because it’s oceanic, bottomless, centuries deep – and because, just when you think you’re on the cusp of a discovery, you drop head-first into the abyss.
I knew my novel would take place during the waning months of Pope John Paul II’s papacy, and would involve one of his dying wishes for the Catholic Church – to reunite it with Christianity’s second-biggest institution, the Orthodox Church – so I’d envisioned a private scene at his bedside. Like almost everything else in the novel, I wanted this to be grounded in near-photographic realism, and since John Paul was famously open about his private life, I assumed it would be easy to find out what his bedroom looked like.
I was wrong. In the dozens of Vatican coffee-table books I’d stockpiled for image-sleuthing, I found nothing. Nothing, either, in a search of Google Books. I found a few details in the authorized biography of John Paul, but the details were vague: the pope’s bedroom had a full-size bed, a large table of books, and a photo of his parents. Considering that the author of this biography had been given wide-ranging access, these slim pickings were a bad indicator. Without realizing it, I was stepping into one of dozens of rabbit holes I would encounter over the coming decade. The trail led in two directions: down dark alleys into tabloid journalism, and into archived records from past papacies. I went to work.
Advertisement:
A bookstore in Paris shipped me a hard-to-find copy of an Italian paparazzo’s memoir of breaking into the papal summer residence around 1980 to shoot pictures of John Paul in his private swimming pool, a book that also contained furtive telephoto shots of John Paul pacing his hidden roof deck at the Vatican. I scoured these pictures but couldn’t find so much as an oblique glance into the pontifical bedroom just below. So I drove to the Library of Congress to dig up microfilm of the Sunday supplement to the Vatican’s own newspaper that had allegedly published, in September of 1978, an article describing every room in the pope’s apartments. But the article turned out to be nothing more than a photo of the pope’s palace taken from down in Saint Peter’s Square – effectively a tourist snapshot – in which each window was labeled “library” or “study” or “bedroom.” In the coming months, while focusing on other tasks, I returned again and again to the question of John Paul’s bedroom. In 2005, after the conclave of that year, Le Figaro had published an excellent map of the private papal apartments, yet about the details of the papal bedroom there was presque rien. A 1971 book by the director of the Vatican Press Office, tantalizingly titled "L’Appartamento Pontificio," yielded even less.
Finally, a stroke of luck: Every pope has a pair of priest-secretaries who live with him in his apartments. A shop in Florence sold me a book written by the priest-secretary of John XXIII, the beloved pope of the early 1960s. In it was the most detailed description I’d yet found of the pope’s bedroom – and it rang a bell. Online, I’d come across a former papal photographer who’d begun selling his old photos in digital format. Among them, I’d spotted a picture of Pope John in a room that was a dead ringer for this description, except that there had been no bed in it. I went back and studied every shot from the roll. At last I found it: the pope standing beside a simple wooden bed that I now recognized as his own.
The walls are plain. There are no frescoes or towering canvases in gold frames. The pope has hung a few photos and small icons, wildly undersized for the almost 15-foot ceilings. One icon is placed so as to cover up a large dark patch where something much bigger was once hung: Apparently it has been a while since anyone repainted the walls. In one corner of the bedroom is a ponderous wooden wardrobe resembling a confession booth. Charmingly, its door has been left open, with a white robe dangling out. A nightgown? Or the pope’s spare cassock? On the floor beside the bed, where the pope curls his toes each morning to greet the world, is a thin rug with zigzag borders and a long tassel fringe. It matches nothing else in the room.
Advertisement:
These small details are the ones that change with every papacy. The constants are the plain walls, the checkerboard floor, the simple wooden door leading to the bathroom, perhaps the gauzy ceiling-height drapery over the towering windows. Yet it was the small changeable details that brought me closer to what I’d really wanted to know: There is no papal interior decorator; the gilded lily that is the Vatican stops at the door to this room. The pope’s bedroom is simple, unadorned. Like the bedrooms of other priests, it betrays a life in which material possessions and the cultivation of taste for taste’s sake have been discouraged. It is perhaps the only truly personal place this man has. Feeling gratified and somehow touched, I made a small file for this information and stowed it away. When the time came, I would be ready to write my scene.
In the years that followed, I would buy 600 research books on the Vatican, all of them aimed at solving one question or another in this way. The books would arrive at my door from almost every country in Europe, including the Vatican itself. My private obsession to know the history and appearance of every building within the pope’s walls, and as much as possible about the important rooms within them, provided a welcome distraction from the harder work at hand: understanding what Catholics believe about Jesus. For, in order to do that, I could no longer rely just on books.
Today, looking back on it, the terror of reaching out to my first priest seems overwrought. In the time since that first interview, I have traded phone calls and emails with Holy See diplomats, Vatican priests, Church lawyers, the wives of Eastern Catholic clergy, the Jesuit former editor of America magazine, and the papal caretaker of the Shroud of Turin. That first time, though, unnerved me.
Advertisement:
On Jan. 3, 2008, I emailed a priest whose address I had found online. I had chosen him for his credentials. He had studied at the Gregorian University in Rome while living at the Pontifical North American College: this combination is, for American Catholics, the priestly Harvard. He had firsthand experience of Vatican cardinals and Pope John Paul II. He was now dean of an East Coast seminary. And his specialty, evidenced by scholarly books under his name, was the interpretation of scripture. I introduced myself as a novelist writing a Vatican novel from the perspective of a priest. I asked a few questions about his experience as a seminarian. Then I slid in my questions about how priest-candidates were trained to interpret the Bible.
Three hours later came the reply.
Dear Mr. Caldwell,
Puzzled as I am about why one would decide to write about the experiences of a demographic about which one knows professedly next to nothing, I am intrigued enough to offer the following.
Advertisement:
Four paragraphs of answers ensued, addressing all of my questions about seminary, and exactly none of my questions about the Bible. Then: “To pursue these matters any further, I would prefer to have the opportunity to discuss your project with you via phone.”
That evening, I called him. I have no record of the length of the conversation. But my transcript, typed as we spoke, runs to four pages. It includes this pregnant single paragraph:
First Year includes synoptic gospels: start with the Resurrection narratives. Professor was very honest about the challenges, that the details don’t correspond, the number of angels, of women, the time of day. Insistent on psychological evidence, that people so disheartened became so strong in faith.
What this means, unpacked, is: first-year men at elite Catholic seminaries in Rome are required to take a class on the synoptic gospels, the first three books of the New Testament – Matthew, Mark and Luke. The word "synoptic," meaning roughly “from the same eye,” is used to describe these three books because of their near-verbatim similarity to each other in many passages, as if we are witnessing the events of Jesus’ life through a single set of eyes. Yet at the crucial moment of all Christian history – the resurrection of Jesus, when he is found to have risen from the dead after his crucifixion – a careful comparison of the synoptic gospels shows that they do not quite agree about several details. These include the “number of angels” found standing by Jesus’ empty tomb, the number “of women” who returned to find that tomb empty, and “the time of day” when they made this discovery. To my deep surprise, these discrepancies were what a professor of scripture was pointing out to a group of future priests, at the most storied pontifical university in Rome, on the very first day of their first year of class.
Advertisement:
Surely this was an isolated incident. Even pontifical universities have their share of eccentric professors who refuse to toe the party line; John Paul’s Latin expert, who in his off-hours from Vatican service taught classes at the same legendary Jesuit school in Rome, was notorious for going off-script and criticizing Catholic positions on issues ranging from the modern use of Latin to the Church’s “obsession with sex” and abortion. Perhaps what I’d heard from this single priest was a fluke.
It wasn’t. Other priests at other seminaries reported that their first day of Synoptics went the same way. In fact, some priests felt this was the whole spirit of the class: to challenge men to discard the simple verities of altar-boy life and search in darkness for the way back toward the light. Grope, and plunge onward, and relearn the whole map of faith. I was fascinated. Even a brief glance at the Catholic Encyclopedia from the early 20th century shows a Catholic Church entrenched against modernity: the story of Noah’s flood is still a literal fact, and the only role of modern science is to verify that a big boxy ark made according to the instructions God gives Noah would float quite well, thank you very much. Now, less than a hundred years later, priests were being trained to put scripture under a microscope. To allow, and even insist, that parts of the gospels were not meant to be read as factual history. I was mesmerized by the potential implications of this approach for any understanding of Jesus as a historical figure. So I asked every priest I interviewed to tell me, as best he could remember, the exact books he read in seminary. Then I bought those books, and made them the foundation of my own portrait of Jesus.
Meanwhile, my private life was fomenting revolution in my professional one. In 2005, my first son was born. In 2007, my second. Soon, writing a novel from the perspective of a celibate priest seemed as though it would require an almost total immersion in method acting. Parenthood is the Copernican Revolution of life, a man’s discovery that his universe no longer revolves around himself. It is often suggested that when the real Copernicus discovered the earth travels round the sun, Christians promptly buried their heads in the sand. Yet in a psychological sense, religious people are much better prepared to accept the selfless reorientation around the other that is the basic experience of having children. Most parish priests I’ve met over the past decade lead lives of selflessness that would put even the most devoted biological fathers to shame. Vatican priests, however, are not parish priests.
The typical Vatican priest has no congregation. In place of baptisms and youth ministries and spiritual instruction to the young, he works at a desk. He answers to a bishop, as all priests do; but he does so in a bureaucracy. In short, he is nobody’s shepherd.
Advertisement:
This I couldn’t accept. My daily life increasingly revolved around bibs and feedings, specialized knowledge of teething biscuits, seething hatred of hinges and floorboards that squeaked during my escape from a napping child’s crib. As time passed, and I reached the dawn of preschool with my older son, I was spellbound by the act of handing a child keys to the nuclear armament of the mind: how to read. We made trips to the library, forays into children’s literature, nervous incursions into the lawless frontiers of the English language. When another year passed, and the grade-school years came creeping in – the age of PTA and volunteerism – into my life came the weird gripping beauty of the way a soccer game or chess match can reveal a boy’s hidden character. The revelation that my wife and I were coming to know our children through their choices. Day by day I found it impossible to write about anything without somehow writing about this.
There are married Catholic priests in the world. I knew this. They are not merely the rogue ones who occasionally make headlines for keeping families on the side; there are thousands of legitimate clerics who have wives and children with the full blessing of the pope. Many of these men belong to what are called the Eastern Catholic churches, descended from the ancient Eastern tradition in which married men may be ordained. When married priests aren’t celebrating liturgies, or blessing houses for parishioners, or rushing to the hospital to pray with the sick and dying, they’re often pushing their children on swing-sets, cleaning up crayon marks on living-room walls, and admiring the women across their kitchen tables and marriage beds who, on top of all the usual challenges of motherhood, shoulder a parish’s expectations that they and their children will be examples to the community. Almost all Catholic priests know what it is to live on a shoestring, but only a married priest knows how much more threadbare the shoestring seems when the fate of a whole family dangles from it.
I know this, today, because interviews with Eastern priests and their wives introduced me to the private struggle of combining one type of fatherhood with another. They also helped me understand the pressures I would be heaping on my protagonist when I made him a married priest in the overwhelmingly celibate world of the Vatican. “We get an extra $120 a month for each child,” I was told by one priest. “I think they might get a supplement like that at the Vatican too.” He was right. But as a father myself, I had to ask: is that enough to make ends meet? The priest turned to his wife, as if he wasn’t quite sure himself how she did it. She shrugged. “I take odd jobs sometimes,” she said. “When I can. For a little extra money.”
In September of 2008, my wife and I pulled our nest egg out of the stock market and, seeking safe harbor, used part of it to pay down debt on our house. The remainder we earmarked as rainy-day funds for what appeared to be a long incoming storm. The new global recession had thus disguised an ominous sign of trouble closer to home. This was our first tacit acknowledgment that the "Rule of Four" money was not bottomless. That we were beginning to experience cash-flow problems.
Advertisement:
Still, my novel was missing a final ingredient: though I had created a Vatican tableau with a vivid Eastern Catholic protagonist, the stakes of the novel needed a final essential force to electrify them. A more emphatic way to talk about sin and redemption.
One night, in a dream, I found myself inside the Sistine Chapel. Above me towered Michelangelo’s Last Judgment. Here it was: the missing piece. In a novel about transgression and reconciliation, what I needed was judgment. Not merely a crime and its investigation, but a verdict of innocence or guilt.
The gospels themselves contain a courtroom drama: Jesus is arrested, tried, convicted. He is judged and brutally punished. This is one of the great symbolic reversals of Christianity: that we, who will someday stand before Jesus in hope of mercy, once judged him ourselves and found him wanting. The freight I could carry with this single idea told me it was exactly what I’d been looking for: I would take my priest – his heart so anchored to his family that he had accepted career compromise and social ostracism at the Vatican for their sake – and threaten him with a judgment that would entail almost unfathomable personal loss. To do it, I would put him in a Church courtroom.
The Vatican has two codes of law. One is a criminal code modeled after the Italian one, which, in turn, was recently remodeled after American practice: innocent until proven guilty, and a trial characterized by competing theories of the crime. Under this code, a Vatican criminal can be sent to Rebibbia, the maximum security Italian penitentiary where Pope John Paul’s would-be assassin, Mehmet Ali Agca, was sent. But this code of law and its stakes – earthly life and death – were the wrong ones for my novel.
Advertisement:
The Vatican’s second code, called canon law, governs the Catholic Church itself. Though incapable of sending a man to prison, it is empowered to dole out excommunication, priestly banishment, and under radical circumstances, dismissal from the clerical state: what the non-Catholic world calls defrocking. This is a terrifying prospect with almost impossible implications. In the pages of my novel, I will later compare it to a mother’s being declared childless, or a person’s being declared inhuman, because the mark of the priesthood is, Catholics believe, indelible. A priest cannot actually be unmade; he can only be erased from the roll of the living clergy, shunned, rendered a ghost by the institution that frames the terms of his life as no civil government could. This is, for a priest, the true highest penalty in a canon court, and perhaps the most existential one possible.
Because canonical tribunals work in secrecy, however, it is very difficult to learn how they operate. The most common examples of canonical courts treating grave crimes are from recent priest-abuse trials, so Church lawyers are especially reluctant to share any sort of information, even in the hypothetical, with novelists. Only with great trouble was I able to find the canonical equivalent of a legal defense fund for embattled priests, whose lead attorney I begged for a referral to someone, anyone, willing to help me understand the system. Finally he referred me to a canonist – a specialist in Catholic law – who happened to be writing a book on the subject. After allowing me to win her trust, this woman taught me the ground rules of a fundamentally different system of justice. She also referred me, in turn, to a canon-law expert in Rome so highly placed that I was never even permitted to know his name, a man who stringently took me to task for technical errors, and who reminded me that artistic license could lead to “yet another horrible calumny” by a novelist against Catholicism. Civil court and Church court, I discovered, have this in common: the terror that lawyers wield over the uninitiated. But there the resemblances end.
In a Church trial, there is no presumption of innocence. The accused may not face his accuser. Evidence can be admitted even if it is improperly obtained, so long as the judges feel it has value. The Church may refuse to honor requests for information bearing on the accused’s innocence. Defense and prosecution do not compete; the judges ask all questions. And there is an unfamiliar gray area: between innocent and guilty lies a third possible verdict, “accusations unproven.” Its existence was, to me, unexpected and galvanizing. Though this verdict wisely reflects the challenges of finding a definite truth, it seems at odds with the spiritual needs of the individual Christian to know whether he is accounted among the wheat or the chaff. Without innocence, how can there be vindication? And without guilt, how can there be forgiveness?
Here, at last, my workshop of tools was complete. In my Eastern Catholic priest I had the raw stone, and in the pressures of a canonical trial I had the chisel. In my mind’s eye I had the Vatican, visible down to the most granular level of detail. And guiding my hands I had a half-decade’s apprenticeship in Catholicism. At long last, I was ready to write.
I plunged headlong onto the blank page. By 2010, I had written a quarter of the novel. But now money was becoming a pressing concern. The portion of our nest egg we had reserved for a rainy day was eaten through. Sensing that I was close to rounding the corner, I asked my accountant about the legality of cashing out my retirement account. Legal, she advised. But not wise: a home equity loan would be better. Yet we both knew that no bank would extend such a loan to a writer with no income to show in his two most recent tax returns. Against counsel, I began withdrawals, projecting that they would be enough for my family to live on until 2011. By then, I would have finished a half-manuscript, a contractual milestone for which Random House would pay me for the first time in more than five years.
All went according to plan. In late 2011, I returned to New York with the finished half-novel. None too soon, either; with the bottom of our retirement accounts in sight, only one source of funds remained, and it was the Rubicon we must not cross: our children’s college savings.
Yet I had overlooked something. Blindly, inconceivably, after seven years of work on a novel about Christianity, I had failed to take into account the role played in human life by forces beyond our control.
The publishing world was in straits. Earlier that year, the second-biggest bookseller in America, Borders, had filed for bankruptcy. My editor returned with the news that I would not be paid. Instead, Random House would exercise its option to terminate my contract for failure to deliver on time. This was, in a world where authors routinely finish books long after contracts demand them, staggering. Even more staggering was the news that Random House’s lawyers would pursue repayment of the signing money they had issued me as far back as 2004. My editor, a Random House employee herself, vowed not to let this pass. It was cruel. I had been working hard, and in good faith, and she knew it.
But the lawyers now held a power beyond any realm of art or reason. These were hard times, and cash was king. My literary agent was forced to sail into the headwind of the most unfavorable publishing market in memory, in order to find my half-manuscript a new home. Four major publishers vied for the rights, but none could venture the sums of better days. I made Simon & Schuster my new home, but almost half of my new contract with them would be diverted to reimburse Random House for what had been only the initial payment of my old one. The money that remained would somehow have to keep my family afloat.
There is an old pagan phrase to describe a writer’s gravest sin. Deus ex machina. “God from the machine.” An ending in which the author, unable to find a plausible resolution to the problems he has created for himself, calls for the arrival of a god to descend on the stage and resolve all.
But at the Vatican, every play must end with a deus ex machina. God takes the stage before the beginning of the first act, and adjudicates every ending. The author, if he chooses any other way, is guilty of failing to know his subject.
According to canon law, there is no legal authority in Christendom higher than the pope. His power is “supreme, full, immediate, and universal.” He may overturn the verdict of even the supreme court of Catholicism. In the words of the code itself, “No appeal or recourse is permitted against a sentence or decree of the Roman Pontiff.” This majestic, breathtaking absolutism is an emblem of the far greater, far more mysterious potency of God, author of law and source of judgment.
So, as the trial comes to a close, the Eastern priest waits to hear the verdict. He waits, and waits. How does the story end? What becomes of the man in the darkness who, with everything he loves at stake, convinces himself to grope and plunge onward? The man who believes, heart and soul, that he will come out of present darkness and emerge into future light?
I have spent twelve years in the midst of the gospels. Enough time to be able to recognize their echoes in art and in life. As I write this, my finished novel is emerging from the press. As publication day looms, the question returns: how does the story end?
As it started, of course. It is, and has always been, a matter of faith. |
//
// Copyright (C) 2022 <NAME> and other RDKit contributors
//
// @@ All Rights Reserved @@
// This file is part of the RDKit.
// The contents are covered by the terms of the BSD license
// which is included in the file license.txt, found at the root
// of the RDKit source tree.
//
//
// Original author: <NAME> (CozChemIx).
//
#include <GraphMol/MolDraw2D/DrawTextNotFT.h>
#include <GraphMol/MolDraw2D/MolDraw2DHelpers.h>
namespace RDKit {
namespace MolDraw2D_detail {
// ****************************************************************************
DrawTextNotFT::DrawTextNotFT(double max_fnt_sz, double min_fnt_sz)
: DrawText(max_fnt_sz, min_fnt_sz) {}
// ****************************************************************************
DrawTextNotFT::~DrawTextNotFT() {}
// ****************************************************************************
void DrawTextNotFT::alignString(
TextAlignType talign, const std::vector<TextDrawType> &draw_modes,
std::vector<std::shared_ptr<StringRect>> &rects) const {
// std::string comes in with rects aligned with first char with its
// left hand and bottom edges at 0 on y and x respectively.
// Adjust relative to that so that the relative alignment point is at
// (0,0).
if (talign == TextAlignType::MIDDLE) {
size_t num_norm = count(draw_modes.begin(), draw_modes.end(),
TextDrawType::TextDrawNormal);
if (num_norm == 1) {
talign = TextAlignType::START;
}
}
Point2D align_trans, align_offset;
if (talign == TextAlignType::START || talign == TextAlignType::END) {
size_t align_char = 0;
for (size_t i = 0; i < rects.size(); ++i) {
if (draw_modes[i] == TextDrawType::TextDrawNormal) {
align_char = i;
if (talign == TextAlignType::START) {
break;
}
}
}
align_trans = rects[align_char]->trans_;
align_offset = rects[align_char]->offset_;
} else {
// centre on the middle of the Normal text. The super- or subscripts
// should be at the ends.
double x_min = std::numeric_limits<double>::max();
double x_max = std::numeric_limits<double>::lowest();
align_offset.x = align_offset.y = 0.0;
int num_norm = 0;
for (size_t i = 0; i < rects.size(); ++i) {
if (draw_modes[i] == TextDrawType::TextDrawNormal) {
Point2D tl, tr, br, bl;
rects[i]->calcCorners(tl, tr, br, bl, 0.0);
// sometimes the rect is in a coordinate frame where +ve y is down,
// sometimes it's up. For these purposes, we don't care so long as
// the y_max is larger than the y_min. We probably don't need to do
// all the tests for x_min and x_max;
x_min = std::min({bl.x, tr.x, x_min});
x_max = std::max({bl.x, tr.x, x_max});
align_offset += rects[i]->offset_;
++num_norm;
}
}
align_trans.x = (x_max - x_min) / 2.0;
align_trans.y = 0.0;
align_offset /= num_norm;
}
for (auto r : rects) {
r->trans_ -= align_trans;
r->offset_ = align_offset;
}
}
} // namespace MolDraw2D_detail
} // namespace RDKit
|
Influence of different fillers on the properties of an experimental vinyl polysiloxane.
The aim of the study was to evaluate the effect of the incorporation of different fillers on an experimental vinyl polysiloxane (VPS) at two different concentrations, 20% and 40%. Different fillers were added to an experimental VPS. The study was developed in two stages: (i) incorporation of fillers in different concentrations: (a) 20 wt% fillers, and (b) 40 wt%. The fillers were added to experimental VPS and mixed with a speed mixer; (ii) characterization of experimental VPS; after the base paste and catalyst paste were mixed, the experimental VPS was used to make specimens specifically for each test, which were stored at 23°C for 24 hours. The tests were designed according to the specific standardization for the analysis of tensile strength, detail reproduction, Shore A hardness, and elastic recovery. For analysis of filler size pattern, scanning electron microscopy at 1500× magnification was used. The aerosil OX-50 40% (AE), and pure aluminum hydroxide 40% (PAH) groups presented the highest tensile strength and Shore A hardness values. However, those were the only groups that did not present continuous detail reproduction of an intersection of 20 μm line. The elastic recovery was not statistically significant. The undesirable characteristics of VPS (lowest Shore A hardness and tensile strength) were observed when it was added to the composition of acrylic polymer (AP) and fiberglass (FG) in both concentrations, 20% and 40%. In groups AE and PAH, agglomerates of nanofillers were shown in SEM micrography, while the other groups presented different shapes and fillers sizes. |
from collections import Counter
from operator import mul
n = int(input())
s = input()
c = Counter(s)
if n < 3:
print(0)
exit()
elif len(c.keys()) < 3:
print(0)
exit()
ans = 0
for i in range(n):
x = s[i]
ans += mul(*[v for k, v in c.items()if k != x])
c[x] -= 1
for j in range(i+1, n):
y = s[j]
if x == y:
continue
idx_diff = j-i
if n <= j+idx_diff:
continue
z = s[j+idx_diff]
if x != z and y != z:
ans -= 1
print(ans)
|
package optional;
import java.util.Optional;
import java.util.Random;
/**
* Created by kennylbj on 2017/4/29.
* A car may or may not have a Hud.
*/
final class Car {
private static final Random random = new Random(System.nanoTime());
private Hud hud;
Car() {
if (random.nextInt(3) != 0) {
hud = new Hud();
}
}
Optional<Hud> getHud() {
return Optional.ofNullable(hud);
}
}
|
// Get return the config source with the specified name
func (m *MutableSourceList) Get(name string) Source {
for _, s := range m.sources {
if name == s.Name() {
return s
}
}
return nil
} |
<filename>src/app/services/general.service.spec.ts
import { TestBed } from '@angular/core/testing';
import { GeneralService } from './general.service';
import { CubicajeGeneral } from '../models/CubicajeGeneral';
import { TipoVehiculoCargaTerrestre } from 'app/models/TipoVehiculoCargaTerrestre';
import { DimensionObjeto } from 'app/models/DimensionObjeto';
import { CubicajeRecipiente } from 'app/models/CubicajeRecipiente';
import { CubicajeGranel } from 'app/models/CubicajeGranel';
describe('GeneralService', () => {
let service: GeneralService;
beforeEach(() => {
TestBed.configureTestingModule({});
service = TestBed.inject(GeneralService);
});
it('should be created', () => {
expect(service).toBeTruthy();
});
it('should be result for CubicajeGeneral', () => {
let cubicajeGeneral = new CubicajeGeneral();
cubicajeGeneral.cantidadTransportar = 15000;
cubicajeGeneral.tipoVehiculoCargaTerrestre = new TipoVehiculoCargaTerrestre();
cubicajeGeneral.tipoVehiculoCargaTerrestre.peso = 22000;
cubicajeGeneral.largoContenedor = 40;
cubicajeGeneral.dimensionCaja = new DimensionObjeto();
cubicajeGeneral.dimensionCaja.largo = 1.5;
cubicajeGeneral.dimensionCaja.ancho = 2;
cubicajeGeneral.dimensionCaja.alto = 2;
cubicajeGeneral.dimensionCaja.capacidad = 45.45;
const result = service.calculateCubicajeGeneral(cubicajeGeneral);
expect(result.cantidadPorContenedor).toBe(416);
expect(result.cantidadContenedoresNecesarios).toBe(37);
})
it('should be result for CubicajeRecipiente', () => {
let cubicajeRecipiente = new CubicajeRecipiente();
cubicajeRecipiente.densidadLiquido = 0.789;
cubicajeRecipiente.cantidadTransportar = 60000;
cubicajeRecipiente.tipoVehiculoCargaTerrestre = new TipoVehiculoCargaTerrestre();
cubicajeRecipiente.tipoVehiculoCargaTerrestre.peso = 22000;
cubicajeRecipiente.largoContenedor = 40;
cubicajeRecipiente.dimensionRecipiente = new DimensionObjeto();
cubicajeRecipiente.dimensionRecipiente.largo = 2;
cubicajeRecipiente.dimensionRecipiente.ancho = 2;
cubicajeRecipiente.dimensionRecipiente.alto = 3.5;
cubicajeRecipiente.dimensionRecipiente.capacidad = 50;
const result = service.calculateCubicajeRecipientes(cubicajeRecipiente);
expect(result.cantidadPorContenedor).toBe(147);
expect(result.cantidadGalonesPorContenedor).toBe(7350);
expect(result.cantidadContenedoresNecesarios).toBe(9);
})
it('should be result for CubicajeGranel', () => {
let cubicajeGranel = new CubicajeGranel();
cubicajeGranel.densidadLiquido = 0.789;
cubicajeGranel.cantidadTransportar = 60000;
cubicajeGranel.capacidadCisterna = 7000;
cubicajeGranel.tipoVehiculoCargaTerrestre = new TipoVehiculoCargaTerrestre();
cubicajeGranel.tipoVehiculoCargaTerrestre.peso = 22000;
const result = service.calculateCubicajeGranel(cubicajeGranel);
expect(result.cantidadCisternas).toBe(9);
})
});
|
<filename>khayyamtriangle/khayyamtriangle.cpp<gh_stars>0
#include<iostream>
using namespace std;
long int fact(int);
int main()
{
int i, n, j;
cin>>n;
for (i = 0; i < n; i++){
for (j = 0; j <= i; j++)
printf("%ld ",fact(i)/(fact(j)*fact(i - j)));
cout<<endl;
}
return 0;
}
long fact(int n)
{
if( n==1 || n==0){
return 1;
}else{
return (n * fact(n-1));
}
} |
/**
* Provides acess to a single ASN Standards Document (and individual standards
* contained within) via the AsnDocument and AsnNode classes. Provides lists of
* AsnStandardsNodes for use in UI JSP.
*
* @author ostwald
*/
public class AsnStandardsManager implements StandardsManager {
String xmlFormat;
String xpath;
List availableDocs = null;
private StandardsRegistry standardsRegistry = null;
private String defaultDocKey = null;
/**
* Constructor for the AsnStandardsManager object
*
* @param xmlFormat format of framework for this standardsManager
* @param xpath field for which standards are managed
* @param source AsnDocument file
* @exception Exception if AsnDocument file cannot be processed
*/
public AsnStandardsManager(String xmlFormat, String xpath, File source) throws Exception {
this.xmlFormat = xmlFormat;
this.xpath = xpath;
this.standardsRegistry = StandardsRegistry.getInstance();
try {
} catch (Exception e) {
throw new Exception("AsnDocument could not be initialized: " + e.getMessage());
}
init(source);
}
/**
* Initialize the AsnStandardsManager by populating the standardsMap and tree from the provided AsnDocument file.
*
* @param source AsnDocument file
* @exception Exception if the source file cannot be processed
*/
public void init(File source) throws Exception {
AsnDocInfo docInfo = this.standardsRegistry.register(source.getAbsolutePath());
this.setDefaultDocKey(docInfo.key);
this.availableDocs = new ArrayList();
this.availableDocs.add(docInfo);
}
/**
* Gets the standardsRegistry attribute of the AsnStandardsManager object
*
* @return The standardsRegistry value
*/
public StandardsRegistry getStandardsRegistry() {
return this.standardsRegistry;
}
/**
* Gets the defaultDocKey attribute of the AsnStandardsManager object
*
* @return The defaultDocKey value
*/
public String getDefaultDocKey() {
return this.defaultDocKey;
}
/**
* Sets the defaultDocKey attribute of the AsnStandardsManager object
*
* @param docKey The new defaultDocKey value
*/
public void setDefaultDocKey(String docKey) {
this.defaultDocKey = docKey;
}
/**
* Gets the availableDocs (avaliable ASN Standards Documents) attribute of the AsnStandardsManager object
*
* @return The availableDocs value
*/
public List getAvailableDocs() {
return this.availableDocs;
}
/**
* Sets the availableDocs attribute of the AsnStandardsManager object
*
* @param docs The new availableDocs value
*/
public void setAvailableDocs(List docs) {
this.availableDocs = docs;
}
/**
* Gets the xpath attribute of the AsnStandardsManager object
*
* @return The xpath value
*/
public String getXmlFormat() {
return xmlFormat;
}
/**
* Gets the xpath attribute of the AsnStandardsManager object
*
* @return The xpath value
*/
public String getXpath() {
return xpath;
}
protected void setXpath(String xpath) {
this.xpath = xpath;
}
/**
* Gets the rendererTag attribute of the AsnStandardsManager object
*
* @return The rendererTag value
*/
public String getRendererTag() {
return "standards_MultiBox";
}
/** prints debugging information about this AsnStandardsManager */
public void report() {
prtln("\n----------------------");
prtln("xmlFormat: " + this.getXmlFormat());
prtln("xpath: " + this.getXpath());
prtln("rendererTag: " + this.getRendererTag());
}
/**
* Description of the Method
*
* @param s Description of the Parameter
*/
private static void prtln(String s) {
SchemEditUtils.prtln(s, "AsnStandardsManager");
// System.out.println(s);
}
} |
def store_file(filename, raw):
file_path = os.path.dirname(filename)
if not os.path.exists(file_path):
os.makedirs(file_path, exist_ok=True)
file = open(filename, 'w', encoding='utf8')
file.write(raw)
file.close() |
6th episode of the third season of The Office
"Diwali" is the sixth episode of the third season of the American comedy television series The Office and the show's 34th overall. It was written by Mindy Kaling, who also acts in the show as Kelly Kapoor, and directed by Miguel Arteta. The episode first aired on November 2, 2006, on NBC, twelve days after the actual Diwali holiday.
The series depicts the everyday lives of office employees in the Scranton, Pennsylvania, branch of the fictional Dunder Mifflin Paper Company. In the episode, Kelly Kapoor (Mindy Kaling) invites the office to a Diwali celebration, where Ryan Howard (B. J. Novak) struggles to make a good impression on Kelly's parents and Michael Scott (Steve Carell) considers taking his relationship with Carol Stills (Nancy Carell) to the next step. Meanwhile, at the Stamford branch, Jim Halpert (John Krasinski) and Andy Bernard (Ed Helms) get drunk while working late, leading Karen Filippelli (Rashida Jones) to give Jim a ride home.
Kaling and executive producer Greg Daniels came up with the episode idea after Kaling held a Diwali party in 2005 for series staff. She undertook research about Indian culture and the resulting episode was substituted in place of one that would have been Halloween-themed. The episode was directed by Miguel Arteta. According to Nielsen Media Research, an estimated 8.8 million viewers tuned in; it earned a 4.2/10 rating among adults aged 18 to 49, placing first in this demographic group. Critical reception to the episode was largely positive, with many praising Kaling's writing, Carell's performance, and other comedic elements. As a result of the episode, The Office became the first American comedy series to depict the holiday, and several commentators have credited it with helping introduce Indian customs to American audiences.
Synopsis [ edit ]
Kelly Kapoor (Mindy Kaling) invites the entire office staff to a celebration of Diwali, the Hindu Festival of Lights, which Michael Scott (Steve Carell) mistakenly believes to be an Indian version of Halloween. Ryan Howard (B. J. Novak) fails to make a favorable impression on Kelly's parents, who in turn try to set up Kelly with a young doctor. Initially reluctant to attend the festival, Pam Beesly (Jenna Fischer) gives in and ends up enjoying herself. Inspired by a conversation with Kelly's parents about Hindu marriage customs, Michael publicly proposes to Carol (Nancy Carell). Uncomfortable, Carol declines his offer and leaves the room. She later explains that this is only their ninth date, and drives home, leaving Michael to find a ride.
Meanwhile, in Stamford, Jim Halpert (John Krasinski) decides to bike to work. Working late, Andy Bernard (Ed Helms), Karen Filippelli (Rashida Jones), and Jim turn a late night of paperwork into an excuse to drink, although Karen, unbeknownst to Jim and Andy, pours her shots into her wastebasket. Pam sends a text message to Jim, who does not answer because he is passed out on his desk. At the end of the evening, Michael mistakenly thinks that he and Pam have a connection, and is rejected when he tries to kiss her. She gives him a ride home on the condition that he sit in the back seat. Jim tries to bicycle home but is too drunk and an amused Karen gives Jim a ride.
Production [ edit ]
Writing and filming [ edit ]
Mindy Kaling drew upon her own cultural background to write the episode.
"Diwali" was written by Mindy Kaling and directed by Miguel Arteta. While the episode is partly set at West Scranton High School,[1] filming actually took place at Grant High School in Los Angeles, the same building where Freaks and Geeks was shot. That series' co-creator, Paul Feig, has directed several episodes of The Office, including "Email Surveillance".[2]
Kaling used her own Indian cultural background as inspiration when writing the episode, which portrays the Indian festival of Diwali. She and her friend, My Name Is Earl writer Vali Chandrasekaran, had held a Diwali party in 2005 for the cast of their two shows. According to Kaling, NBC was "so excited. They were like, 'What the hell is this holiday? I've never heard of it'."[3] The following year, while discussing the possible creation of a Halloween episode, she and executive producer Greg Daniels agreed to instead create one that was Diwali-themed, as the two holidays fall near each other.[4]
When it came time to write the episode however, Kaling had to perform a Google search, as she had forgotten many details. She explained, "I was a little embarrassed with how little I knew about it. I'm Hindu, but I'm not really a practicing Hindu, so I had to do a lot of research."[5] Kaling was pleased to learn more about her culture, at first being concerned that Daniels "and people I work with were a little antsy about assigning the Hindu writer the Indian episode. I didn't want to feel like they pigeonholed me, but I felt like I'd done enough episodes that it was okay."[5] She found that making the episode "was kind of the perfect meeting of being the child of immigrants and writing for a comedy show."[6] The series writers have since joked that they should do a Diwali-themed episode every year in the same vein as the Christmas specials.[6]
When creating the story, Kaling knew that "a lot of stuff has to happen" in the midst of the festival, including Michael and Carol's relationship becoming "very roller-coastery." As with other series storylines, this development was decided upon before Kaling began writing the episode. She stated, "But it was always sort of built in that the peak of 'Diwali' would be this incredibly romantic overture [and its uncomfortable aftermath]."[4] Kaling was pleased that much of "Diwali" took place at Stamford, especially as she was able to explore the contrasts between the two offices.[5] She also liked that the episode contained "romance, people getting dressed up in costumes, lots of food, smooching and making out ... and little girls making fun of B.J. Novak."[5]
In her weekly blog for TV Guide, actress Kate Flannery wrote that "Mindy shares that B.J. Novak talent of being able to write an incredibly funny episode ("Hot Girl," "The Injury") and be in it. Diwali is the Indian festival of lights. So this week Dunder-Mifflin gets a spicy taste of Indian culture in a hurry. Michael decides the office needs an outing to the local high school for the annual Indian Diwali party."[2]
Casting [ edit ]
Kaling cast her own mother and father, a doctor and an architect respectively, as the parents of her character.[2][7] Kaling was proud of their acting, though she admitted that their presence on set was at first "mortifying." She commented, "Of course I sort of lapsed back into that pre-teen attitude of every single thing my parents do embarrasses me. But they were great about it. They were such pros – they had all these scenes with Steve Carell and they were completely unafraid. They got along effortlessly."[5] Kaling injected elements of Romeo and Juliet and Pride and Prejudice into her character's relationship with Ryan, with her parents attempting to persuade her to date an Indian doctor. She remarked, "Kelly's family has, like, nine daughters, and if only they could marry them off it would be wonderful. But Kelly is the oldest and most idiotic of them all."[4]
Though he wanted to avoid becoming an actor, Chandrasekaran was cast by Kaling as the "suitable boy" Kelly's parents want her to marry. Chandrasekaran later stated, "I asked Mindy to write as few lines as possible. Acting is not a skill that I have particularly a lot of."[8] Other guest stars included Creed Bratton as Creed Bratton, Charles Esten as Josh Porter, Ed Helms as Andy Bernard, and Rashida Jones as Karen Filippelli.[1] Nancy Carell, Steve Carell's wife, reprised her role as Carol Stills, the girlfriend of his character.[5]
Deleted scenes [ edit ]
The third season DVD contained a number of deleted scenes, including Angela complaining about attending the Diwali party, Michael hoping that the office does not embarrass themselves, Kelly explaining that Diwali is an "Indian Halloween" to Michael, an initially angry Jan being supportive about the office attending the festival, Michael explaining to the office that Ben Kingsley liberated India and then became an actor, Carol meeting Ryan and Dwight, and Angela leaving the party with Dwight.[9]
Reception [ edit ]
"Diwali" first aired on November 2, 2006 in the United States on NBC during November sweeps week.[10][11] According to Nielsen Media Research, an estimated 8.8 million viewers watched the episode, and it earned a 4.2/10 ratings share among adults aged 18 to 49. In other words, it was seen by 4.2 percent of all 18- to 49-year-olds, and 10 percent of all 18- to 49-year-olds watching television at the time of the broadcast. This was the series' highest rating since the season premiere. The episode also ranked first in its timeslot among both adults and men aged 18 to 34.[12]
"Diwali" received generally positive reviews from television critics. Entertainment Weekly columnist Abby West thought the episode "hit all the show's major buttons," focusing on Michael while also furthering the Jim/Karen and Pam/Roy storylines as well as showcasing the other characters.[11] West added that "Kaling gets what makes the show work, and she pulled it together while displaying great timing."[11] AOL TV's Michael Sciannamea considered the episode "a classic," though he thought that "the Stamford scenes don't seem to add much to the story other than the burgeoning flirtation between Jim and Karen."[13] Sciannamea concluded that it was "a terrific episode. The scenes inside the hall were hysterical, especially the dancing."[13]
Brian Zoromski of IGN rated "Diwali" 9.3 out of 10, an indication of an "amazing" episode. He wrote that "Michael completely stole the episode with a truly dizzying number of uncomfortable situations. Daily Show vet Steve Carell has become the king of the uncomfortable moment, throwing himself completely into situations viewers know won't end well."[14] Like West, Zoromski also appreciated the other characters' moments, such as Angela's "xenophobic" comments during the festivities.[14] Television Without Pity gave the episode an A grade.[15]
Cultural impact [ edit ]
As a result of this episode, The Office became the first American comedy series to depict the Diwali holiday.[5] Lauren Markoe of The Huffington Post has credited the episode with helping to introduce Diwali to the American public, writing that it "represents perhaps the brightest spotlight ever shone on Diwali in the United States."[16] In their 2012 book Diversity in U.S. Mass Media, Catherine A. Luther, Carolyn Ringer Lepre, and Naeemah Clark noted that "Diwali" "introduces the audience to Indian dancing and singing, a tradition rarely seen on television."[17] |
<reponame>butyr/leetcode-blind-75-questions
from typing import List
import pytest
from .solution import Solution
@pytest.mark.parametrize(
"inputs, expected",
[
(
["eat", "tea", "tan", "ate", "nat", "bat"],
[["bat"], ["nat", "tan"], ["ate", "eat", "tea"]],
),
],
)
def test_solution(inputs, expected):
sut = Solution()
actual = sut.group_anagram(inputs)
assert normalize(actual) == normalize(expected)
def normalize(anagrams: List[List[str]]) -> List[List[str]]:
anagrams = [sorted(anagram) for anagram in anagrams]
anagrams = sorted(anagrams)
return anagrams
|
/*!
* Initializes the MCP23S17 given its HW selected address, see datasheet for
* Address selection.
* @param address Selected address (0 to 7)
* @ss the slave select pin
*/
void TeelSys_MCP23S17::begin(uint8_t address, uint8_t ss) {
if (address > 7) {
address = 7;
}
_address = address;
_ss = ss;
_ss_default = false;
#ifdef SS
if(_ss == SS) {
_ss_default = true;
}
#endif
if(!_ss_default){
::pinMode(_ss, OUTPUT);
::digitalWrite(_ss, HIGH);
}
SPI.begin();
writeRegister(MCP23017_IODIRA, 0xff);
writeRegister(MCP23017_IODIRB, 0xff);
writeRegister(MCP23017_GPINTENA, 0x00);
writeRegister(MCP23017_GPINTENB, 0x00);
writeRegister(MCP23017_GPPUA, 0x00);
writeRegister(MCP23017_GPPUB, 0x00);
} |
This week, Paul McCartney filed a lawsuit in a New York court against publisher Sony/ATV in an effort to regain his ownership stake in the Beatles publishing catalog in what could become one of the biggest legal struggles in recent music history. If successful, it would end a long and painful battle for McCartney over the ownership of his own songs, one that has involved everyone from early Beatles manager Brian Epstein to Michael Jackson, who bought the Beatles catalog in 1985 as part of a $47.5 million deal for ATV, a situation that has long been painful for McCartney.
Though the fight has been ongoing for decades, it can be confusing to track the ownership of one of the most valuable catalogs in music history, and particularly how one of its main contributors was cut out of his own creations. Below is a brief timeline of the long and winding ownership tale of the Beatles catalog over the years.
1963: In March 1963, the Beatles' debut album Please Please Me was officially released, and Epstein sought a publisher for the songs written by McCartney, John Lennon, George Harrison and Ringo Starr. The company that resulted was called Northern Songs, majority-owned by publisher Dick James with Epstein, Lennon and McCartney, with the latter two songwriters owning 20 percent of the business apiece.
1965: Northern Songs became a public company, with Lennon and McCartney each owning a 15 percent stake and Harrison and Starr splitting a small percentage. Harrison later wrote 1968's "Only a Northern Song" about his dissatisfaction with the diminished cut he received in the deal.
1969: After relations between the Beatles and James deteriorated, James sold his stake in Northern Songs to ATV Music, owned by Lew Grade, and despite Lennon and McCartney's attempts to offer a counter bid, ATV gained control of the catalog. Later that year, the duo sold their remaining shares to ATV, leaving them without a stake in the publishing of their own songs (they both controlled their own respective songwriting shares).
1985: ATV Music, having been acquired by Robert Holmes a Court, was put up for sale. Michael Jackson, who had famously been told about the value of publishing by McCartney during the sessions for their 1982 collaboration "Say, Say, Say," purchased ATV's 4,000-song catalog for $47.5 million, becoming the owner of the approximately 250 Lennon-McCartney songs, as well as tracks by Bruce Springsteen, the Rolling Stones, Elvis Presley and more. McCartney, to say the least, was not pleased.
1995: Amid reported financial issues, Jackson sold half of ATV to Sony for approximately $100 million, and together the two formed Sony/ATV Music Publishing, with Jackson and Sony each owning 50 percent of the company.
2006: With financial problems mounting and a potential bankruptcy appearing imminent, Sony negotiated a deal on Jackson's behalf to reduce loan payments on his debts, and as part of the negotiations, retained an option to purchase 50 percent of Jackson's 50 percent ownership in the future, which would give Sony 75 percent ownership of the catalog. According to the New York Times, the catalog was valued at the time around $1 billion, and if Jackson had gone bankrupt, his share of the company could have gone to auction in the proceedings that followed.
2009: Following Jackson's sudden death at the age of 50, his share of the catalog came under the control of his estate, run by Jackson's attorney John Branca and industry veteran John McClain.
2016: After announcing their intention to trigger their purchase option of Jackson's stake in the company, Sony officially agreed to buy out the Jackson estate's full 50 percent of Sony/ATV for $750 million, making Sony the sole owner of the Lennon-McCartney catalog as well as Sony/ATV's 750,000 songs.
2018?: McCartney's lawsuit this week was filed with an eye on the U.S. Copyright Act of 1976, which allowed songwriters to retain the publishers' share of their copyrighted works released before 1978 after a 56-year period (comprised of two 28-year terms); for the earliest Beatles songs, that term will be up in 2018, with the later songs eligible by 2026. That reversion back to McCartney is not assured, however; Duran Duran lost a similar suit against Sony in December.
***One additional note about Lennon's side of the issue: A provision of that law states that if the songwriter were to die within the first 28-year period, the writer's heirs would be eligible to recapture that publishing share at the end of that first period, which in the Beatles' case would be 1990. Lennon died in 1980, and sources told Billboard in 2009 that Sony cut a deal with his wife Yoko Ono prior to the expiration of that period to retain ownership of Lennon's share for the duration of the copyright period, which covers the 70 years after the owner's death; in this case, 2050. |
Extended Space Vector Method for Calculation of Induction Motor Parameters
Abstract In this article, a novel method for the determination of spatial vectors of multi-phase windings is proposed. The proposed method is applied to squirrel-cage induction motors and it allows for the calculation of the rotor currents space vectors with any slot number (even and odd), that cannot be determined by classical expressions. On the basis of current space vectors and current flux linkages, the proposed method determines the flux linkage phasors. The current and flux linkage phasors of the stator and rotor make it possible to determine the leakage and magnetizing inductances of the rotor and stator for any operating point on the basis of finite-element analysis of the motor model without any further calculations. The new method, besides the motor parameters, allows determination of the motor torque components developed by higher harmonic components of current, which significantly affects the resultant motor torque. The validity of the proposed method is verified by comparison of motor parameters determined from finite-element analysis with the parameters determined by running no-load and locked rotor tests on an actual motor. |
export type Identity<T> = (t: T) => T;
export const identity = <T>(t: T) => t; |
<gh_stars>1-10
use ::toyunda_player::playlist::Playlist;
use ::toyunda_player::playing_state::PlayingState;
#[derive(Debug,Serialize)]
pub struct State {
pub playlist: Playlist,
pub playing_state: PlayingState,
pub display_subtitles: bool,
#[serde(skip_serializing_if="Option::is_none")]
pub quit_when_finished: Option<bool>,
pub pause_before_next: bool,
}
|
<reponame>carloszv/app<filename>src/providers/model-service.spec.ts
import { inject, TestBed } from '@angular/core/testing';
import { BaseRequestOptions, Http, HttpModule } from '@angular/http';
import { Storage } from '@ionic/storage';
import { Platform } from 'ionic-angular';
import { ImsBackendMock } from '../mocks/ims-backend-mock';
import { PlatformMock, StorageMock } from './../mocks/mocks';
import { AuthService } from './auth-service';
import { ImsService } from './ims-service';
import { ModelService } from './model-service';
import { SettingService } from './setting-service';
describe('Provider: ModelService', () => {
beforeEach(() => {
TestBed.configureTestingModule({
declarations: [],
providers: [
ModelService,
ImsService,
ImsBackendMock,
BaseRequestOptions,
AuthService,
SettingService,
{ provide: Storage, useClass: StorageMock },
{ provide: Platform, useClass: PlatformMock },
{
provide: Http,
useFactory: (imsBackendMock, options) =>
new Http(imsBackendMock, options),
deps: [ImsBackendMock, BaseRequestOptions]
}
],
imports: [HttpModule]
});
});
beforeEach(inject([AuthService, ImsBackendMock], (authService: AuthService, imsBackendMock: ImsBackendMock) => {
authService.setCurrentCredential(imsBackendMock.credential);
}));
it('Should get image table metadata fields', inject([ModelService, ImsBackendMock], (modelService: ModelService, imsBackendMock: ImsBackendMock) => {
modelService.getMetadataFieldsOfImageTable(imsBackendMock.modelArchiveName).subscribe(
metadataTableFields => expect(metadataTableFields).toEqual(imsBackendMock.modelFields),
fail);
}));
it('Should get parent image table metadata fields', inject([ModelService, ImsBackendMock], (modelService: ModelService, imsBackendMock: ImsBackendMock) => {
modelService.getMetadataFieldsOfParentImageTable(imsBackendMock.modelArchiveName).subscribe(
metadataTableFields => expect(metadataTableFields).toEqual(imsBackendMock.parentImageModelFields),
fail);
}));
it('Should get link to image table metadata fields', inject([ModelService, ImsBackendMock], (modelService: ModelService, imsBackendMock: ImsBackendMock) => {
modelService.getModelImageTableUrl(imsBackendMock.modelArchiveName).subscribe(
url => expect(url).toEqual(imsBackendMock.modelImageTableFieldsUrl),
fail);
}));
it('Should get link to parent image table metadata fields', inject([ModelService, ImsBackendMock], (modelService: ModelService, imsBackendMock: ImsBackendMock) => {
modelService.getModelParentImageTableUrl(imsBackendMock.modelArchiveName).subscribe(
url => expect(url).toEqual(imsBackendMock.modelTableFieldsUrl),
fail);
}));
it('Should get model tables', inject([ModelService, ImsBackendMock], (modelService: ModelService, imsBackendMock: ImsBackendMock) => {
modelService.getModelTables(imsBackendMock.modelArchiveName).subscribe(
modelTables => expect(modelTables).toEqual(imsBackendMock.modelTables),
fail);
}));
it('Should get link to model tables', inject([ModelService, ImsBackendMock], (modelService: ModelService, imsBackendMock: ImsBackendMock) => {
modelService.getModelArchiveUrl(imsBackendMock.modelArchiveName).subscribe(
url => expect(url).toEqual(imsBackendMock.modelTablesUrl),
fail);
}));
});
|
<reponame>TGITS/programming-workouts<filename>exercism/python/darts/darts.py
from math import sqrt
def score(x, y):
if distance_from_origin(x, y) > 10:
return 0
elif distance_from_origin(x, y) > 5:
return 1
elif distance_from_origin(x, y) > 1:
return 5
else:
return 10
def distance_from_origin(x, y):
return sqrt(x**2 + y**2)
|
<gh_stars>1-10
export class Meeting {
constructor(
public time: string,
public date: string,
public placeName: string,
public streetAddress: string,
public cityState: string,
public notes: string
) {}
}
|
<reponame>leesangmin144/length-extension-attack<gh_stars>0
"""
* Filename: sha256.py
* Author: Akif-G (mehmetgultekin At sabanciuniv.edu)
* Copyright:
* Disclaimer: This code is presented "as is" without any guarantees.
* Details: Implementation of the SHA-256 hashing algorithm.
SHA-256 is one of the three algorithms in the SHA2
specification. The others, SHA-384 and SHA-512, are not
offered in this implementation.
For a better vision; Algorithm specification can be found here:
* http://csrc.nist.gov/publications/fips/fips180-2/fips180-2withchangenotice.pdf
with some explanation:
* https://tools.ietf.org/html/rfc4634#section-6.2
Used Wikipedia as a source Of Pseudocode :
* https://en.wikipedia.org/wiki/SHA-2#Pseudocode
Block size:512 bit
Output size: 256 bit
! note: this algorithm is known as susceptible for Length extension attack
: This attack is valid when the data and resulting signiture,that is computed with a secret key, is known while secretkey -salt- is not known. In vulnerable databases' with secret validation methods, attacker can use this algorithm, which is also aimed to be implemented in this repository.
: For more
* https://en.wikipedia.org/wiki/Length_extension_attack
* https://blog.skullsecurity.org/2012/everything-you-need-to-know-about-hash-length-extension-attacks
"""
### Useful functions for bit/byte/binary/decimal/hexadecimal operations ###
def convertToBites(string):
# not a part of the hashing algorithm but we will store and use bite string for explanation ease.
# so here to convert the strings into bitearray
# note: we may use a function but i need every step shown on the output here...
en=string.encode()
arr=[]
for i in en:
arr.append(i)
return arr
def wordConverter(arrayOfElems):
# not a part of the hashing algorithm but we will store and use bite string for explanation ease.
#takes 4 8-bit and converts to 32 bits.
collided=0
for elem in arrayOfElems:
#print("\te:",elem)
collided=collided*(2**8)+elem
#print("\t\t->",bin(elem),"\t->:",bin(collided))
#print("c:",collided)
return collided
def Lengthwith64bit(Length):
# not a part of the hashing algorithm but we will store and use bite string for explanation ease.
# converts the Length to 64 bit representation,
# return will be 8 integer since we did not use 32(digest_size) for this far.
arr=[0 for x in range(64)]
inbits=bin(Length)[2:] #-2 is for 0b part since we are trying to count the bits.
if len(inbits)>2**64:
raise ValueError('value is bigger than 2**64')
i=len(inbits)-1
while i>=0:
arr[63-i]=inbits[len(inbits)-1-i]
i-=1
#print(arr)
#collide into 8 bites:
asBin=""
asHex=[]
for j in range(64):
if (j+1)%8!=0:
asBin+=str(arr[j])
else:
asBin+=str(arr[j])
asHex.append(asBin)
asBin=""
#convert from binary to integer -8 bit...
asDec=[]
for string in asHex:
asDec.append(int(string,2))
#print(asDec)
return asDec
### MACRO FUNCTIONS ####
# these functions are going to be used in hashing process. They are not a part of the Hashing algorithm, but Can be assumed as core elements of Sha256
block_size = 64
digest_size = 32
def CH(x,y,z):
# choose( x, y, z) = (x AND y) XOR ( (NOT x) AND z)
# for each bit index, that result bit is according to the majority of the 3 inputs bits for x y and z at this index.
return (x & y ^( (~x) & z ) )
def MAJ( x, y, z):
# majority( x, y, z) = (x AND y) XOR (x AND z) XOR (y AND z)
# as the x input chooses if the output is from y or from z. More precisely, for each bit index, that result bit is according to the bit from y (or respectively z ) at this index, depending on if the bit from x at this index is 1 (or respectively 0).
return (x & y) ^ (x & z) ^ (y & z)
def ROTR(n,x):
# rotateRight(n,x)=(x >> n) v (x << w - n). where w is digest_size
# equivalent to a circular shift (rotation) of x by n positions to the right.
try:
return (x>>n) | (x<<(32-n)) & 0xFFFFFFFF #32=digest_size can be implemented as an input vice versa
except:
raise ValueError( 'n should be less than 32 in sha256 for RotateRight %s()'%(n))
def SHR(n,x):
# SHR^n(x) = x>>n
# The right shift operation SHR^n(x), where x is a w-bit (digest_size) word and n is an integer with 0 <= n < w.
try:
return (x>>n)
except:
raise ValueError('n should be less than 32 in sha256 for RotateRight %s()'%(n))
def BSIG0(x):
# BSIG0(x) = ROTR^2(x) XOR ROTR^13(x) XOR ROTR^22(x)
return ROTR(2,x) ^ROTR(13,x)^ROTR(22,x)
def BSIG1(x):
# BSIG1(x) = ROTR^6(x) XOR ROTR^11(x) XOR ROTR^25(x)
return ROTR(6,x) ^ROTR(11,x)^ROTR(25,x)
def SSIG0(x):
# SSIG0(x) = ROTR^7(x) XOR ROTR^18(x) XOR SHR^3(x)
return ROTR(7,x) ^ROTR(18,x)^SHR(3,x)
def SSIG1(x):
# SSIG1(x) = ROTR^17(x) XOR ROTR^19(x) XOR SHR^10(x)
return ROTR(17,x) ^ROTR(19,x)^SHR(10,x)
class Sha256:
"""
algorithm can be defined in two stages:
preprocessing:
Preprocessing involves padding a message, parsing the padded message into m-bit blocks, and setting initialization values to be used in the hash computation.
hash computation:
The hash computation generates a message schedule from the padded message and uses that schedule, along with functions, constants, and word operations to iteratively generate a series of hash values.
"""
### PreProcessing ###
def __init__(self, message, originHash=None, salt=None):
if message is not None:
if type(message) is not str:
raise TypeError('%s() argument 1 must be string, not %s' % (self.__class__.__name__, type(message).__name__))
##
##
#Constants and H(0) : will be used in Hash Processing.
#these can not be changed, offered by NSA:
"""
constants: These words represent the first thirty-two bits of the fractional parts of the cube roots of the first sixtyfour prime numbers,in hex. See below:
primes:
2 3 5 7 11 13 17 19 23 29 31 37 41 43 47 53 59 61 67 71
73 79 83 89 97 101 103 107 109 113 127 131 137 139 149 151 157 163 167 173
179 181 191 193 197 199 211 223 227 229 233 239 241 251 257 263 269 271 277 281
283 293 307 311
"""
self._K = [0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5,
0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,
0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3,
0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,
0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc,
0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7,
0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,
0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13,
0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,
0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3,
0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5,
0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,
0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208,
0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2]
"""
For SHA-256, the initial hash value, H(0), consists of the following eight 32-bit words, in hex. These words were obtained by taking the first thirty-two bits of the fractional parts of the square roots of the first eight prime numbers.
primes:
2 3 5 7 11 13 17 19
"""
self.initialHashValues = [0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a,
0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19]
##
##
""" Added code for length extension attack (1/3) """
if(originHash is not None):
import io
originHash = io.BytesIO(originHash)
for idx in range(0, 8):
self.initialHashValues[idx] = int(originHash.read(8), 16)
self._firstBlockLen = 512
# Timeline of the processes:
padded=self.padding(message,salt)
#print("pad:",padded)
parsed=self.parsing(padded)
#print(parsed)
""" Convert list to str (2/3) """
tmp = ''
for idx in range(0, 8):
tmp += self.hash(parsed)[idx][2:]
self.sha256 = tmp
def padding(self, message=None, salt=None):
"""
MD-compliant padding:
The purpose of this padding is to ensure that the padded message is a multiple of 512 bits.
For this padding, as a standart, message needs to have length 0<=L<2^64.
"""
if len(message)>=(2**64):
raise ValueError('for padding, message length needs to be less than 2**64')
#convert to bits (As list/array)
if salt is not None:
message=salt+message
bites=convertToBites(message)
#print("message",message)
#print("inbits: " , bites)
#add 1 to it
Length=len(bites)*8 #since our input was consisted by 8-bits bytes (string)
bites.append(int('10000000',2))
#add "0" for smallest, non-negative number; while L = 448 mod(512),since last 64 is for length...
while (len(bites)*8)%512 !=448:
bites.append(0)
#print("appended 0: " , bites,"\nLength",len(bites))
#append the length of the message, in bites
#note that: we used bin() to ease since there is no contribution of it for the understanding of the problem...
#after converting it to bin we will know how many 0 we also need to use:
#note that: implementations based on digest_size(32) would be more understandable in this sense, will consider.
#LenghtArray=Lenthwith64bit(Length)
""" Added code for length extension attack (3/3) """
LenghtArray=Lengthwith64bit(Length + self._firstBlockLen)
for i in LenghtArray:
bites.append(i)
#!! to be able to see the result of padding re-open this::
#print('with length: ',len(bites),'\nresulting padding:',bites)
#return bites
return bites
def parsing(self,message):
"""
Parse the padded message into N 512-bit message blocks, M(1), , …, M(N) .
where any M(n) is expressed as sixteen 32-bit words, the first 32 bits of message block i are denoted M(0(i)), the next 32 bits are M(n(i)) , and so on up to M(15(i)).
"""
#create 512 bit objects as Matrix , which any object includes 32 bites
width=int(512/32) #actually 16, as we previously described
height= int((len(message)*8)/512)
#print("width:",width,"\theight:",height)
Matrix = [[0 for x in range(width)] for y in range(height)]
#here we need to implement a conversion since our word length was 8 bites(bytes) in convertTobites...
for column in range(len(Matrix)):
for word in range(len(Matrix[column])):
first=(column*16+word)*4
Matrix[column][word]=wordConverter( [ message[first], message[first+1], message[first+2], message[first+3] ] )
#parse every object into 16, 32-bit object
#did already while convertToBites automatically
#!! to be able to see the result of parsing re-open this::
#print("resulting parsing:")
#for i in Matrix:
# print(i)
#return bit matrix
return Matrix
### Hash Computation ###
#Hashing algorithm that uses Macro Functions
def hash(self, preprocessed):
"""
Merkle–Damgard construction:
Merkle Damgard construction is an hashing algorithm that builds collision resistant hash fuctions.
Note that:
* In some parts of the implementation here you will see ( & 0xFFFFFFFF ), this is implemented since we only want 8decimal, with overflows...
* since digest_size -word length in bits- is 32 bits. 32bit=2^32=16^8, so we need 8 decimals in hexadecimals.
* if there is any addition or extraction on words(aka. 32-bits digest sized...) you need to use it...
"""
#for ease transfer the values of inital hashvalues to Array, which we also use both intermediate, and final values...
H=self.initialHashValues.copy()
messageBlocks=[]
#preprocessed ( as array ) contains N many, 512-Bit structure (every particular one of them defined as "M" here). M contains 16 many 32-bit words.
for M in range(len(preprocessed)):
#preparing the Message Schedule W
W=[0 for words in range(64)]
for i in range(len(W)):
if i <16: #0 to 15
W[i]=preprocessed[M][i]
else: #15 to 63
W[i]=SSIG1(W[i-2]) + W[i-7] + SSIG0(W[i-15]) + W[i-16] & 0xFFFFFFFF
#initialize 8 working variables , mentioned as a,b,c,d,e,f,g,h
a= H[ 0 ]
b= H[ 1 ]
c= H[ 2 ]
d= H[ 3 ]
e= H[ 4 ]
f= H[ 5 ]
g= H[ 6 ]
h= H[ 7 ]
#Perform The Main Hash computation
for t in range(64):
T1 = h + BSIG1(e) + CH(e,f,g) + self._K[t] + W[t]
T2 = BSIG0(a) + MAJ(a,b,c)
h = g
g = f
f = e
e = d + T1 & 0xFFFFFFFF
d = c
c = b
b = a
a = T1 + T2 & 0xFFFFFFFF
#to be able to see every iteration as a list re-open this::
#print(M,".",t,":\t", hex(a),hex(b),hex(c),hex(d),hex(e),hex(f),hex(g),hex(h))
#Compute the intermediate hash value H(i):
H[ 0 ]= a + H[ 0 ] &0xFFFFFFFF
H[ 1 ]= b + H[ 1 ] &0xFFFFFFFF
H[ 2 ]= c + H[ 2 ] &0xFFFFFFFF
H[ 3 ]= d + H[ 3 ] &0xFFFFFFFF
H[ 4 ]= e + H[ 4 ] &0xFFFFFFFF
H[ 5 ]= f + H[ 5 ] &0xFFFFFFFF
H[ 6 ]= g + H[ 6 ] &0xFFFFFFFF
H[ 7 ]= h + H[ 7 ] &0xFFFFFFFF
messageBlocks.append(H.copy())
#After the above computations have been sequentially performed for all of the blocks in the message, the final output is calculated.
#print(messageBlocks)
lastHash=messageBlocks[len(messageBlocks)-1]
asHex=[0 for i in range(len(lastHash))]
#for print as hex
for e in range(len(lastHash)):
asHex[e]=hex(lastHash[e])
return asHex
"""
### tests ###
#one block
test=Sha256("abc")
for i in test.sha256:
print(i)
#multi block
print("\t**********************************************************************************************************")
test2=Sha256("abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq")
for i in test2.sha256:
print(i)
#long input len=1.000.000
print("\t**********************************************************************************************************")
a=""
for i in range(1000000):
a+="a"
test3=Sha256(a)
for i in test3.sha256:
print(i)
"""
|
/*!
* @brief Initializes the DAC peripheral according to the specified parameters in the dacConfig.
*
* @param channel: the selected DAC channel.
* This parameter can be:
* @arg DAC_Channel_1: DAC Channel1 selected
* @arg DAC_Channel_2: DAC Channel2 selected
*
* @param dacConfig: pointer to a DAC_Config_T structure that contains
* the configuration information for the specified DAC channel.
*
* @retval None
*
* @note DAC_Channel_2 is only for APM32F072 and APM32F091 devices
*/
void DAC_Config(uint32_t channel, DAC_Config_T* dacConfig)
{
uint32_t tmpcfg;
tmpcfg = (dacConfig->trigger | dacConfig->waveGeneration |
dacConfig->maskAmplitudeSelect | dacConfig->outputBuff);
DAC->CTRL = (tmpcfg << channel);
} |
// ReadLogConfigFile read log config from local config file.
func ReadLogConfigFile(configFile string) (LoggerProperties, error) {
fileByte, err := ioutil.ReadFile(configFile)
if err != nil {
return LoggerProperties{}, err
}
lp := LoggerProperties{}
err = yaml.Unmarshal(fileByte, &lp)
if err != nil {
return LoggerProperties{}, err
}
return lp, nil
} |
/**
* An application wide thread-local context that maintains the active URI for the current request.
*/
@ApplicationScoped
class WebContextHolder {
private final ThreadLocal<UriInfo> localUri = new ThreadLocal<>();
void setUriInfo(UriInfo info) {
localUri.set(info);
}
void clear() {
localUri.remove();
}
/**
* A request scoped bean that returns the {@link UriInfo} of the request active in the current
* thread.
*
* @return The {@link UriInfo} of the active request.
*/
@RequestScoped
@Produces
UriInfo uriInfo() {
return localUri.get();
}
} |
/*
* SonarQube
* Copyright (C) 2009-2023 SonarSource SA
* mailto:info AT sonarsource DOT com
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.sonar.ce.task.projectanalysis.period;
/**
* Repository of period used to compute differential measures.
* Here are the steps to retrieve the period :
* - Read the period property ${@link org.sonar.core.config.CorePropertyDefinitions#LEAK_PERIOD}
* - Try to find the matching snapshots from the property
* - If a snapshot is found, the period is added to the repository
*/
public interface PeriodHolder {
/**
* Finds out whether the holder contains a Period
*
* @throws IllegalStateException if the periods haven't been initialized
*/
boolean hasPeriod();
/**
* Finds out whether the holder contains a Period with a date
*
* @throws IllegalStateException if the periods haven't been initialized
*/
boolean hasPeriodDate();
/**
* Retrieve the period from the Holder.
*
* @throws IllegalStateException if the period hasn't been initialized
* @throws IllegalStateException if there is no period
*/
Period getPeriod();
}
|
// Run starts the scheduler.
func (s *Scheduler) Run(client *clientv3.Client) error {
s.client = client
s.queue = recipe.NewPriorityQueue(client, common.QueuePrefix)
s.logger.Infof("starting main scheduler")
go s.watchDone()
for {
select {
case <-s.ready:
s.process()
case <-s.ctx.Done():
return s.ctx.Err()
case <-s.done:
return nil
}
}
} |
<reponame>spaceleo1/slow-mandelbulb-explorer
#ifndef CUDAMAIN_H
#define CUDAMAIN_H
#include <SFML/Graphics/Color.hpp>
#include <iostream>
#include "global.h"
#include "vec3d.h"
#include "camera.h"
sf::Uint8* d_pixels;
void init() {
cudaMalloc(&d_pixels, W * H * 4 * sizeof(sf::Uint8));
}
template<typename T>
__global__ void kernel(sf::Uint8* d_pixels, int n, T& figure, Camera* d_camera) {
int id = blockIdx.x * blockDim.x + threadIdx.x;
while (id < n) {
float4 color = figure.getPixel(id % W, id / W, d_camera);
d_pixels[4 * id] = color.x;
d_pixels[4 * id + 1] = color.y;
d_pixels[4 * id + 2] = color.z;
d_pixels[4 * id + 3] = color.w;
id += gridDim.x * blockDim.x;
}
}
template<typename T>
void update(sf::Uint8* pixels, T& figure, Camera* d_camera) {
kernel<<<blocks, threads>>>(d_pixels, W * H, figure, d_camera);
cudaMemcpy((void**) pixels, d_pixels, W * H * 4 * sizeof(sf::Uint8), cudaMemcpyDeviceToHost);
}
void destruct() {
cudaFree(d_pixels);
}
#endif |
/**
* Adds an nluRelations to nluRelations.
*
* @param nluRelations the new nluRelations
* @return the GetSearchMatchesOptions builder
*/
public Builder addNluRelations(String nluRelations) {
com.ibm.cloud.sdk.core.util.Validator.notNull(nluRelations,
"nluRelations cannot be null");
if (this.nluRelations == null) {
this.nluRelations = new ArrayList<String>();
}
this.nluRelations.add(nluRelations);
return this;
} |
from .ops import (
s_add_e,
s_mul_e,
s_sub_e,
s_add_e_sum,
s_mul_e_sum,
s_sub_e_sum,
s_add_e_mean,
s_mul_e_mean,
s_sub_e_mean,
s_add_t,
s_mul_t,
s_dot_t,
s_sub_t,
s_div_t,
)
|
// String returns a string representation of the TokenType.
func (t TokenType) String() string {
switch t {
case ErrorToken:
return "Error"
case TextToken:
return "Text"
case StartTagToken:
return "StartTag"
case EndTagToken:
return "EndTag"
case SelfClosingTagToken:
return "SelfClosingTag"
case CommentToken:
return "Comment"
case DoctypeToken:
return "Doctype"
}
return "Invalid(" + strconv.Itoa(int(t)) + ")"
} |
Effect of Phlogopite on Plant Growth Under Phosphorus Deficiency
The release of potassium (K) from a K-bearing trioctahedral mica (phlogopite, <0.25 mm), was investigated in pot experiments with pak-choi, maize and radish under different phosphorus (P) regimes. In the nutrient solution culture, where phlogopite was the sole K source, P-starvation led to a significant increase in the total K uptake in pak-choi and radish plants, and the net release of K from phlogopite was significantly stimulated. The x-ray diffractometry showed that the degree of vermiculitization induced by these two species under P deficiency was much stronger than that found in normal growth conditions. Similar results were obtained in a pot experiment with soil and application of phlogopite as K fertilizer, where three treatments were prepared: A control without K application, a KCl treatment (100 mg K kg−1 soil) and a phlogopite treatment (50 g phlogopite kg−1 soil, yielding 100 mg exchangeable-K kg−1 soil). Maize and pak-choi were grown in triplicated pots for 5 weeks. After 3 weeks, no further P was supplied to half of the pots of the phlogopite treatment. The shoot dry weight of plants grown in the control and phlogopite treatments were similar, but they were significantly lower than the KCl treatment. The total K uptake in plants grown in phlogopite treatments reached the 70% of that supplied with KCl. Therefore, phlogopite could be used as a slow-release K fertilizer to a certain extent. |
<reponame>int-e/vulkan
{-# language CPP #-}
-- No documentation found for Chapter "Promoted_From_VK_KHR_device_groupAndVK_KHR_bind_memory2"
module Vulkan.Core11.Promoted_From_VK_KHR_device_groupAndVK_KHR_bind_memory2 ( BindBufferMemoryDeviceGroupInfo(..)
, BindImageMemoryDeviceGroupInfo(..)
, StructureType(..)
, ImageCreateFlagBits(..)
, ImageCreateFlags
) where
import Foreign.Marshal.Alloc (allocaBytesAligned)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Cont (evalContT)
import Data.Vector (generateM)
import qualified Data.Vector (imapM_)
import qualified Data.Vector (length)
import Data.Typeable (Typeable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import GHC.Generics (Generic)
import Foreign.Ptr (Ptr)
import Data.Word (Word32)
import Data.Kind (Type)
import Control.Monad.Trans.Cont (ContT(..))
import Data.Vector (Vector)
import Vulkan.CStruct.Utils (advancePtrBytes)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.Core10.FundamentalTypes (Rect2D)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero(..))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO))
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO))
import Vulkan.Core10.Enums.ImageCreateFlagBits (ImageCreateFlagBits(..))
import Vulkan.Core10.Enums.ImageCreateFlagBits (ImageCreateFlags)
import Vulkan.Core10.Enums.StructureType (StructureType(..))
-- | VkBindBufferMemoryDeviceGroupInfo - Structure specifying device within a
-- group to bind to
--
-- = Members
--
-- If the @pNext@ list of
-- 'Vulkan.Core11.Promoted_From_VK_KHR_bind_memory2.BindBufferMemoryInfo'
-- includes a 'BindBufferMemoryDeviceGroupInfo' structure, then that
-- structure determines how memory is bound to buffers across multiple
-- devices in a device group.
--
-- = Description
--
-- The 'BindBufferMemoryDeviceGroupInfo' structure is defined as:
--
-- - @sType@ is the type of this structure.
--
-- - @pNext@ is @NULL@ or a pointer to a structure extending this
-- structure.
--
-- - @deviceIndexCount@ is the number of elements in @pDeviceIndices@.
--
-- - @pDeviceIndices@ is a pointer to an array of device indices.
--
-- If @deviceIndexCount@ is greater than zero, then on device index i the
-- buffer is attached to the instance of @memory@ on the physical device
-- with device index pDeviceIndices[i].
--
-- If @deviceIndexCount@ is zero and @memory@ comes from a memory heap with
-- the
-- 'Vulkan.Core10.Enums.MemoryHeapFlagBits.MEMORY_HEAP_MULTI_INSTANCE_BIT'
-- bit set, then it is as if @pDeviceIndices@ contains consecutive indices
-- from zero to the number of physical devices in the logical device, minus
-- one. In other words, by default each physical device attaches to its own
-- instance of @memory@.
--
-- If @deviceIndexCount@ is zero and @memory@ comes from a memory heap
-- without the
-- 'Vulkan.Core10.Enums.MemoryHeapFlagBits.MEMORY_HEAP_MULTI_INSTANCE_BIT'
-- bit set, then it is as if @pDeviceIndices@ contains an array of zeros.
-- In other words, by default each physical device attaches to instance
-- zero.
--
-- == Valid Usage
--
-- - #VUID-VkBindBufferMemoryDeviceGroupInfo-deviceIndexCount-01606#
-- @deviceIndexCount@ /must/ either be zero or equal to the number of
-- physical devices in the logical device
--
-- - #VUID-VkBindBufferMemoryDeviceGroupInfo-pDeviceIndices-01607# All
-- elements of @pDeviceIndices@ /must/ be valid device indices
--
-- == Valid Usage (Implicit)
--
-- - #VUID-VkBindBufferMemoryDeviceGroupInfo-sType-sType# @sType@ /must/
-- be
-- 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO'
--
-- - #VUID-VkBindBufferMemoryDeviceGroupInfo-pDeviceIndices-parameter# If
-- @deviceIndexCount@ is not @0@, @pDeviceIndices@ /must/ be a valid
-- pointer to an array of @deviceIndexCount@ @uint32_t@ values
--
-- = See Also
--
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data BindBufferMemoryDeviceGroupInfo = BindBufferMemoryDeviceGroupInfo
{ -- No documentation found for Nested "VkBindBufferMemoryDeviceGroupInfo" "pDeviceIndices"
deviceIndices :: Vector Word32 }
deriving (Typeable)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (BindBufferMemoryDeviceGroupInfo)
#endif
deriving instance Show BindBufferMemoryDeviceGroupInfo
instance ToCStruct BindBufferMemoryDeviceGroupInfo where
withCStruct x f = allocaBytesAligned 32 8 $ \p -> pokeCStruct p x (f p)
pokeCStruct p BindBufferMemoryDeviceGroupInfo{..} f = evalContT $ do
lift $ poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO)
lift $ poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
lift $ poke ((p `plusPtr` 16 :: Ptr Word32)) ((fromIntegral (Data.Vector.length $ (deviceIndices)) :: Word32))
pPDeviceIndices' <- ContT $ allocaBytesAligned @Word32 ((Data.Vector.length (deviceIndices)) * 4) 4
lift $ Data.Vector.imapM_ (\i e -> poke (pPDeviceIndices' `plusPtr` (4 * (i)) :: Ptr Word32) (e)) (deviceIndices)
lift $ poke ((p `plusPtr` 24 :: Ptr (Ptr Word32))) (pPDeviceIndices')
lift $ f
cStructSize = 32
cStructAlignment = 8
pokeZeroCStruct p f = evalContT $ do
lift $ poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO)
lift $ poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
pPDeviceIndices' <- ContT $ allocaBytesAligned @Word32 ((Data.Vector.length (mempty)) * 4) 4
lift $ Data.Vector.imapM_ (\i e -> poke (pPDeviceIndices' `plusPtr` (4 * (i)) :: Ptr Word32) (e)) (mempty)
lift $ poke ((p `plusPtr` 24 :: Ptr (Ptr Word32))) (pPDeviceIndices')
lift $ f
instance FromCStruct BindBufferMemoryDeviceGroupInfo where
peekCStruct p = do
deviceIndexCount <- peek @Word32 ((p `plusPtr` 16 :: Ptr Word32))
pDeviceIndices <- peek @(Ptr Word32) ((p `plusPtr` 24 :: Ptr (Ptr Word32)))
pDeviceIndices' <- generateM (fromIntegral deviceIndexCount) (\i -> peek @Word32 ((pDeviceIndices `advancePtrBytes` (4 * (i)) :: Ptr Word32)))
pure $ BindBufferMemoryDeviceGroupInfo
pDeviceIndices'
instance Zero BindBufferMemoryDeviceGroupInfo where
zero = BindBufferMemoryDeviceGroupInfo
mempty
-- | VkBindImageMemoryDeviceGroupInfo - Structure specifying device within a
-- group to bind to
--
-- = Members
--
-- If the @pNext@ list of
-- 'Vulkan.Core11.Promoted_From_VK_KHR_bind_memory2.BindImageMemoryInfo'
-- includes a 'BindImageMemoryDeviceGroupInfo' structure, then that
-- structure determines how memory is bound to images across multiple
-- devices in a device group.
--
-- = Description
--
-- The 'BindImageMemoryDeviceGroupInfo' structure is defined as:
--
-- - @sType@ is the type of this structure.
--
-- - @pNext@ is @NULL@ or a pointer to a structure extending this
-- structure.
--
-- - @deviceIndexCount@ is the number of elements in @pDeviceIndices@.
--
-- - @pDeviceIndices@ is a pointer to an array of device indices.
--
-- - @splitInstanceBindRegionCount@ is the number of elements in
-- @pSplitInstanceBindRegions@.
--
-- - @pSplitInstanceBindRegions@ is a pointer to an array of
-- 'Vulkan.Core10.FundamentalTypes.Rect2D' structures describing which
-- regions of the image are attached to each instance of memory.
--
-- If @deviceIndexCount@ is greater than zero, then on device index i
-- @image@ is attached to the instance of the memory on the physical device
-- with device index pDeviceIndices[i].
--
-- Let N be the number of physical devices in the logical device. If
-- @splitInstanceBindRegionCount@ is greater than zero, then
-- @pSplitInstanceBindRegions@ is an array of N2 rectangles, where the
-- image region specified by the rectangle at element i*N+j in resource
-- instance i is bound to the memory instance j. The blocks of the memory
-- that are bound to each sparse image block region use an offset in
-- memory, relative to @memoryOffset@, computed as if the whole image were
-- being bound to a contiguous range of memory. In other words,
-- horizontally adjacent image blocks use consecutive blocks of memory,
-- vertically adjacent image blocks are separated by the number of bytes
-- per block multiplied by the width in blocks of @image@, and the block at
-- (0,0) corresponds to memory starting at @memoryOffset@.
--
-- If @splitInstanceBindRegionCount@ and @deviceIndexCount@ are zero and
-- the memory comes from a memory heap with the
-- 'Vulkan.Core10.Enums.MemoryHeapFlagBits.MEMORY_HEAP_MULTI_INSTANCE_BIT'
-- bit set, then it is as if @pDeviceIndices@ contains consecutive indices
-- from zero to the number of physical devices in the logical device, minus
-- one. In other words, by default each physical device attaches to its own
-- instance of the memory.
--
-- If @splitInstanceBindRegionCount@ and @deviceIndexCount@ are zero and
-- the memory comes from a memory heap without the
-- 'Vulkan.Core10.Enums.MemoryHeapFlagBits.MEMORY_HEAP_MULTI_INSTANCE_BIT'
-- bit set, then it is as if @pDeviceIndices@ contains an array of zeros.
-- In other words, by default each physical device attaches to instance
-- zero.
--
-- == Valid Usage
--
-- - #VUID-VkBindImageMemoryDeviceGroupInfo-deviceIndexCount-01633# At
-- least one of @deviceIndexCount@ and @splitInstanceBindRegionCount@
-- /must/ be zero
--
-- - #VUID-VkBindImageMemoryDeviceGroupInfo-deviceIndexCount-01634#
-- @deviceIndexCount@ /must/ either be zero or equal to the number of
-- physical devices in the logical device
--
-- - #VUID-VkBindImageMemoryDeviceGroupInfo-pDeviceIndices-01635# All
-- elements of @pDeviceIndices@ /must/ be valid device indices
--
-- - #VUID-VkBindImageMemoryDeviceGroupInfo-splitInstanceBindRegionCount-01636#
-- @splitInstanceBindRegionCount@ /must/ either be zero or equal to the
-- number of physical devices in the logical device squared
--
-- - #VUID-VkBindImageMemoryDeviceGroupInfo-pSplitInstanceBindRegions-01637#
-- Elements of @pSplitInstanceBindRegions@ that correspond to the same
-- instance of an image /must/ not overlap
--
-- - #VUID-VkBindImageMemoryDeviceGroupInfo-offset-01638# The @offset.x@
-- member of any element of @pSplitInstanceBindRegions@ /must/ be a
-- multiple of the sparse image block width
-- ('Vulkan.Core10.SparseResourceMemoryManagement.SparseImageFormatProperties'::@imageGranularity.width@)
-- of all non-metadata aspects of the image
--
-- - #VUID-VkBindImageMemoryDeviceGroupInfo-offset-01639# The @offset.y@
-- member of any element of @pSplitInstanceBindRegions@ /must/ be a
-- multiple of the sparse image block height
-- ('Vulkan.Core10.SparseResourceMemoryManagement.SparseImageFormatProperties'::@imageGranularity.height@)
-- of all non-metadata aspects of the image
--
-- - #VUID-VkBindImageMemoryDeviceGroupInfo-extent-01640# The
-- @extent.width@ member of any element of @pSplitInstanceBindRegions@
-- /must/ either be a multiple of the sparse image block width of all
-- non-metadata aspects of the image, or else @extent.width@ +
-- @offset.x@ /must/ equal the width of the image subresource
--
-- - #VUID-VkBindImageMemoryDeviceGroupInfo-extent-01641# The
-- @extent.height@ member of any element of @pSplitInstanceBindRegions@
-- /must/ either be a multiple of the sparse image block height of all
-- non-metadata aspects of the image, or else @extent.height@ +
-- @offset.y@ /must/ equal the width of the image subresource
--
-- == Valid Usage (Implicit)
--
-- - #VUID-VkBindImageMemoryDeviceGroupInfo-sType-sType# @sType@ /must/
-- be
-- 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO'
--
-- - #VUID-VkBindImageMemoryDeviceGroupInfo-pDeviceIndices-parameter# If
-- @deviceIndexCount@ is not @0@, @pDeviceIndices@ /must/ be a valid
-- pointer to an array of @deviceIndexCount@ @uint32_t@ values
--
-- - #VUID-VkBindImageMemoryDeviceGroupInfo-pSplitInstanceBindRegions-parameter#
-- If @splitInstanceBindRegionCount@ is not @0@,
-- @pSplitInstanceBindRegions@ /must/ be a valid pointer to an array of
-- @splitInstanceBindRegionCount@
-- 'Vulkan.Core10.FundamentalTypes.Rect2D' structures
--
-- = See Also
--
-- 'Vulkan.Core10.FundamentalTypes.Rect2D',
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data BindImageMemoryDeviceGroupInfo = BindImageMemoryDeviceGroupInfo
{ -- No documentation found for Nested "VkBindImageMemoryDeviceGroupInfo" "pDeviceIndices"
deviceIndices :: Vector Word32
, -- No documentation found for Nested "VkBindImageMemoryDeviceGroupInfo" "pSplitInstanceBindRegions"
splitInstanceBindRegions :: Vector Rect2D
}
deriving (Typeable)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (BindImageMemoryDeviceGroupInfo)
#endif
deriving instance Show BindImageMemoryDeviceGroupInfo
instance ToCStruct BindImageMemoryDeviceGroupInfo where
withCStruct x f = allocaBytesAligned 48 8 $ \p -> pokeCStruct p x (f p)
pokeCStruct p BindImageMemoryDeviceGroupInfo{..} f = evalContT $ do
lift $ poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO)
lift $ poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
lift $ poke ((p `plusPtr` 16 :: Ptr Word32)) ((fromIntegral (Data.Vector.length $ (deviceIndices)) :: Word32))
pPDeviceIndices' <- ContT $ allocaBytesAligned @Word32 ((Data.Vector.length (deviceIndices)) * 4) 4
lift $ Data.Vector.imapM_ (\i e -> poke (pPDeviceIndices' `plusPtr` (4 * (i)) :: Ptr Word32) (e)) (deviceIndices)
lift $ poke ((p `plusPtr` 24 :: Ptr (Ptr Word32))) (pPDeviceIndices')
lift $ poke ((p `plusPtr` 32 :: Ptr Word32)) ((fromIntegral (Data.Vector.length $ (splitInstanceBindRegions)) :: Word32))
pPSplitInstanceBindRegions' <- ContT $ allocaBytesAligned @Rect2D ((Data.Vector.length (splitInstanceBindRegions)) * 16) 4
lift $ Data.Vector.imapM_ (\i e -> poke (pPSplitInstanceBindRegions' `plusPtr` (16 * (i)) :: Ptr Rect2D) (e)) (splitInstanceBindRegions)
lift $ poke ((p `plusPtr` 40 :: Ptr (Ptr Rect2D))) (pPSplitInstanceBindRegions')
lift $ f
cStructSize = 48
cStructAlignment = 8
pokeZeroCStruct p f = evalContT $ do
lift $ poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO)
lift $ poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
pPDeviceIndices' <- ContT $ allocaBytesAligned @Word32 ((Data.Vector.length (mempty)) * 4) 4
lift $ Data.Vector.imapM_ (\i e -> poke (pPDeviceIndices' `plusPtr` (4 * (i)) :: Ptr Word32) (e)) (mempty)
lift $ poke ((p `plusPtr` 24 :: Ptr (Ptr Word32))) (pPDeviceIndices')
pPSplitInstanceBindRegions' <- ContT $ allocaBytesAligned @Rect2D ((Data.Vector.length (mempty)) * 16) 4
lift $ Data.Vector.imapM_ (\i e -> poke (pPSplitInstanceBindRegions' `plusPtr` (16 * (i)) :: Ptr Rect2D) (e)) (mempty)
lift $ poke ((p `plusPtr` 40 :: Ptr (Ptr Rect2D))) (pPSplitInstanceBindRegions')
lift $ f
instance FromCStruct BindImageMemoryDeviceGroupInfo where
peekCStruct p = do
deviceIndexCount <- peek @Word32 ((p `plusPtr` 16 :: Ptr Word32))
pDeviceIndices <- peek @(Ptr Word32) ((p `plusPtr` 24 :: Ptr (Ptr Word32)))
pDeviceIndices' <- generateM (fromIntegral deviceIndexCount) (\i -> peek @Word32 ((pDeviceIndices `advancePtrBytes` (4 * (i)) :: Ptr Word32)))
splitInstanceBindRegionCount <- peek @Word32 ((p `plusPtr` 32 :: Ptr Word32))
pSplitInstanceBindRegions <- peek @(Ptr Rect2D) ((p `plusPtr` 40 :: Ptr (Ptr Rect2D)))
pSplitInstanceBindRegions' <- generateM (fromIntegral splitInstanceBindRegionCount) (\i -> peekCStruct @Rect2D ((pSplitInstanceBindRegions `advancePtrBytes` (16 * (i)) :: Ptr Rect2D)))
pure $ BindImageMemoryDeviceGroupInfo
pDeviceIndices' pSplitInstanceBindRegions'
instance Zero BindImageMemoryDeviceGroupInfo where
zero = BindImageMemoryDeviceGroupInfo
mempty
mempty
|
<reponame>jrcichra/SoulMagic
package com.rainbowluigi.soulmagic.util;
public class Reference {
public static final String MOD_ID = "soulmagic";
}
|
A monkey fell onto a transformer at the Gitaru hydroelectric power station in Kenya and caused a nationwide blackout on Tuesday.
KenGen, the operator of Gitaru dam and Kenya's largest electricity producer, posted a photo on Facebook of the dastardly monkey still perched atop the transformer. Using our crack zoological skills, it appears to be a vervet monkey, which is native to Kenya.
The monkey climbed onto the roof of the Gitaru power station and then dropped onto a transformer, tripping it. This caused the other transformers to overload and trip, which in turn caused the whole power station to go offline. The loss of 180MW from the hydro plant was enough to cause a national blackout, according to KenGen.
It isn't clear if the nationwide blackout was caused by a further cascade of transformer trips, or if it was more of a brownout as the grid tried to deal with a rapid reduction in power generation.
Power was restored to the nation four hours later—and, yes, the monkey survived and has now been taken in by the Kenya Wildlife Service.
KenGen says that its power installations are secured by electric fencing "which keeps away marauding animals," but presumably the little blighter found a way through it. The company apologised for the "isolated incident" and said that it's "looking at ways of further enhancing security at all our power plants." |
///Initialize by an array with array-length. "keys" is the array for
///initializing keys. "keyLength" is its length.
void MersenneTwister::Initialize(uint keys[], uint keyLength)
{
const uint cInitialSeed = 19650218;
Initialize(cInitialSeed);
uint i = 1;
uint j = 0;
uint k = cN > keyLength ? cN : keyLength;
for(; k != 0; k--)
{
const uint h = i - 1;
mValues[i] = (mValues[i] ^ ((mValues[h] ^ (mValues[h] >> 30)) * 1664525)) +
keys[j] + j;
mValues[i] &= cAllMask;
++i;
++j;
if(i >= cN)
{
mValues[0] = mValues[cN - 1];
i = 1;
}
if(j >= keyLength)
{
j = 0;
}
}
for(k = cN - 1; k != 0; --k)
{
const uint h = i - 1;
mValues[i] = (mValues[i] ^ ((mValues[h] ^ (mValues[h] >> 30)) * 1566083941)) - i;
mValues[i] &= cAllMask;
++i;
if(i >= cN)
{
mValues[0] = mValues[cN - 1];
i = 1;
}
}
mValues[0] = cUpperMask;
} |
<reponame>betagouv/preuve-covoiturage
// $schema: 'http://json-schema.org/draft-07/schema#',
// $id: 'https://geojson.org/schema/MultiPolygon.json',
// title: 'GeoJSON MultiPolygon',
export const MultiPolygonSchema = {
type: 'object',
required: ['type', 'coordinates'],
properties: {
type: {
type: 'string',
enum: ['MultiPolygon'],
},
coordinates: {
type: 'array',
items: {
type: 'array',
items: {
type: 'array',
minItems: 4,
items: {
type: 'array',
minItems: 2,
items: {
type: 'number',
},
},
},
},
},
bbox: {
type: 'array',
minItems: 4,
items: {
type: 'number',
},
},
},
};
|
import { useSelector } from "react-redux";
import styled from "styled-components";
import {
EventDto,
EventType,
EventPayloadEntity,
} from "../../database/chatLogReducer";
import { Group, Player } from "../../database/GroupReducer";
import { RootState } from "../../database/Store";
import { System } from "../../database/SystemReducer";
import { getEntityTileConfig } from "../../services/SystemService";
import EntityTile from "../generic/EntityTile";
interface $MessageProps {
event: EventDto;
visible: boolean;
opacity: number;
}
const Message = ({ event, visible, opacity }: $MessageProps) => {
const system: System = useSelector((state: RootState) => state.system);
const liveGroup: Group = useSelector((state: RootState) => state.group);
const foundPlayer: Player[] = liveGroup.players.filter(
(player) => player.peerAddress === event.uuid
);
const playerImg: string =
foundPlayer.length > 0 ? foundPlayer[0].pic : liveGroup.me.pic;
const playerName: string =
foundPlayer.length > 0 ? foundPlayer[0].name : liveGroup.me.name;
const renderMessageContentType = () => {
switch (event.type) {
case EventType.Message:
return (
<>
<b>{playerName}:</b> {event.payload}
</>
);
case EventType.Entity:
const eventPayload: EventPayloadEntity =
event.payload as EventPayloadEntity;
return (
<EntityTile
configs={Object.getOwnPropertyNames(
getEntityTileConfig(system, eventPayload.entityName)
)}
entity={eventPayload.entity}
entityName={eventPayload.entityName}
isChatTile
/>
);
default:
return <>default</>;
}
};
return (
<MessageContainer opacity={opacity}>
<PlayerImg src={playerImg}></PlayerImg>
<Tooltip>{renderMessageContentType()}</Tooltip>
</MessageContainer>
);
};
export default Message;
const Tooltip = styled.span`
visibility: "hidden";
width: fit-content;
max-width: 300px;
padding: 5px;
margin-top: -5px;
z-index: 200;
position: absolute;
right: 80px;
font-size: 16px;
text-align: center;
background-color: ${({ theme }) => theme.highlight};
color: #fff;
border-radius: 10px;
opacity: 0;
transition: 0.3s;
&::after {
content: "";
position: absolute;
top: 50%;
left: 100%;
margin-top: -5px;
border-width: 5px;
border-style: solid;
border-color: transparent transparent transparent
${({ theme }) => theme.highlight};
}
`;
const MessageContainer = styled.div<{ opacity: number }>`
opacity: ${(props) => (props.opacity <= 0 ? 0.02 : props.opacity)};
width: 100%;
display: flex;
justify-content: center;
align-items: center;
&:hover {
opacity: 1;
}
&:hover ${Tooltip} {
visibility: visible;
opacity: 1;
}
`;
const PlayerImg = styled.div<{ src: string }>`
height: 50px;
width: 50px;
border-radius: 50px;
border: ${({ theme }) => theme.highlight} 2px solid;
background-image: url(${(props) => props.src});
background-repeat: no-repeat;
background-position: center; /* Center the image */
background-size: cover;
`;
|
/**
* handles the filter button being clicked
*
* @param event
* the clicked event
*/
private void handleFilterViewerKeyPress(KeyEvent event) {
if (event.character == SWT.DEL && event.stateMask == 0) {
removeFilters();
}
} |
/*
* Copyright (c) 2016 <NAME>
*
* This software may be modified and distributed under the terms
* of the MIT License. See the LICENSE file for details.
*/
package com.achow101.bittipaddr.client;
import com.achow101.bittipaddr.shared.AddrReq;
import com.google.gwt.core.client.EntryPoint;
import com.google.gwt.user.client.ui.*;
import com.google.gwt.user.client.rpc.AsyncCallback;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.user.client.ui.Button;
import com.google.gwt.user.client.ui.TextArea;
/**
* Entry point classes define <code>onModuleLoad()</code>
*/
public class bittipaddr implements EntryPoint {
private boolean edited = false;
/**
* This is the entry point method.
*/
public void onModuleLoad() {
// Add textboxes
TextBox unitLookupBox = new TextBox();
TextBox unitPassBox = new TextBox();
TextBox xpubBox = new TextBox();
xpubBox.setWidth("600");
TextArea addrsArea = new TextArea();
addrsArea.setWidth("300");
addrsArea.setHeight("300");
// Checkbox to enable editing with lookup
CheckBox submitEdit = new CheckBox("Submit changes after clicking button");
CheckBox allowEdit = new CheckBox("Allow editing the unit");
// Add text elements
HTML output = new HTML();
// Create Button
Button submitBtn = new Button("Submit");
submitBtn.addClickHandler(new ClickHandler() {
public void onClick(ClickEvent event) {
// Clear previous output
output.setHTML("");
// Get entered data and some prelim checking
String xpub = xpubBox.getText();
String pass = unitPassBox.getText();
String unit = unitLookupBox.getText();
String[] addrs = addrsArea.getText().split("\n");
if(!xpub.isEmpty() && !addrs[0].isEmpty() && unit.isEmpty() && pass.isEmpty())
{
output.setHTML("<p style=\"color:red;\">Cannot set both xpub and a list of addresses</p>");
return;
}
// Send to server
AddrReq req = new AddrReq();
if(!unit.isEmpty())
{
req.setId(unit);
req.setPassword(<PASSWORD>);
req.setEditable(allowEdit.getValue());
if(edited)
{
if(xpub.isEmpty())
{
output.setHTML("<p style=\"color:red;\">Must have an xpub. Set as \"NONE\" (without quotes) if no xpub</p>");
return;
}
req.setEdited();
req.setAddresses(addrs);
req.setXpub(xpub.isEmpty() ? "NONE" : xpub);
}
}
else if(!xpub.isEmpty())
{
req = new AddrReq(xpub);
}
else if(addrs.length != 0)
{
req = new AddrReq(addrs);
}
bittipaddrService.App.getInstance().addAddresses(req, new AddAddrAsyncCallback(output, xpubBox, addrsArea));
}
});
// Add to html
RootPanel.get("submitBtn").add(submitBtn);
RootPanel.get("unitLookup").add(unitLookupBox);
RootPanel.get("unitPass").add(unitPassBox);
RootPanel.get("enterxpub").add(xpubBox);
RootPanel.get("enterAddrList").add(addrsArea);
RootPanel.get("completedReqOutput").add(output);
RootPanel.get("edit").add(submitEdit);
RootPanel.get("allowEdit").add(allowEdit);
}
private class AddAddrAsyncCallback implements AsyncCallback<String> {
private HTML outhtml;
private TextBox xpubBox;
private TextArea addrsArea;
public AddAddrAsyncCallback(HTML outhtml, TextBox xpubBox, TextArea addrsArea) {
this.outhtml = outhtml;
this.xpubBox = xpubBox;
this.addrsArea = addrsArea;
}
public void onSuccess(String result) {
// Check for editable, will begin with PLAIN keyword
if(result.startsWith("PLAIN"))
{
result = result.substring(result.indexOf("\n") + 1);
xpubBox.setText(result.substring(0, result.indexOf("\n")));
result = result.substring(result.indexOf("\n") + 1);
addrsArea.setText(result);
edited = true;
}
else
outhtml.setHTML(result);
}
public void onFailure(Throwable throwable) {
outhtml.setHTML("<p style=\"color:red;\">Failed to receive answer from server!</p>");
}
}
}
|
class TimestampedImage:
"""Timestamped image with an accompanying camera model.
Args:
img: (H,W,C) image.
camera_model: Pinhole camera model with intrinsics and extrinsics.
timestamp_ns: Nanosecond timestamp.
"""
img: NDArrayByte
camera_model: PinholeCamera
timestamp_ns: int |
Nitrogen-protected microwave-assisted extraction of ascorbic acid from fruit and vegetables.
In this study, nitrogen-protected microwave-assisted extraction (NPMAE), in which microwave-assisted extraction was performed under nitrogen protection, was initially developed and combined with HPLC separation for the determination of ascorbic acid (AA), an oxidizable component, from fruit and vegetables. The extraction conditions of NPMAE were investigated by extraction of AA from guava, and the chosen conditions were as follows: extraction solvent of 0.25% metaphosphoric acid solution, solid/liquid ratio of 1:10 g/mL, microwave power of 400 W and irradiation time of 10 min. Subsequently, this novel NPMAE method was evaluated by extraction of AA from different fruit and vegetables, such as guava, yellow pepper, green pepper and cayenne pepper. Compared with conventional MAE and solvent extraction methods, the oxidation of AA was significantly reduced or prevented in the process of NPMAE, providing higher extraction yield of AA. These results suggested the potential of NPMAE method for the extraction of oxidizable compounds from different spices of matrices. |
import compression from 'fastify-compress';
import cors from 'fastify-cors';
import helmet from 'fastify-helmet';
import rateLimit from 'fastify-rate-limit';
import { FastifyAdapter, NestFastifyApplication } from '@nestjs/platform-fastify';
import { Injectable } from '@nestjs/common';
import { AppConfigService } from '../config.service';
@Injectable()
export class FastifyService {
buildFastifyHttpAdapter = () => {
return new FastifyAdapter({
ignoreTrailingSlash: true,
caseSensitive: true,
bodyLimit: +process.env.FASTIFY_BODY_SIZE_LIMIT,
maxParamLength: +process.env.FASTIFY_MAX_PARAM_LENGTH,
trustProxy: process.env.HOST_NAME === 'Heroku', // Enable https over Heroku: https://www.fastify.io/docs/latest/Server/#trustproxy,
logger: { prettyPrint: true },
});
};
registerFastifyPlugins = (app: NestFastifyApplication) => {
const configService = app.get(AppConfigService);
const { FASTIFY_RATE_LIMIT_MAX_NUM_CONNECTIONS, FASTIFY_RATE_LIMIT_TIME_WINDOW_MS } =
configService.serverConfig;
// https://docs.nestjs.com/techniques/security#helmet
// https://github.com/fastify/fastify-helmet
app.register(helmet, {
contentSecurityPolicy: {
directives: {
defaultSrc: [`'self'`],
styleSrc: [`'self'`, `'unsafe-inline'`],
imgSrc: [`'self'`, 'data:', 'validator.swagger.io'],
scriptSrc: [`'self'`, `https: 'unsafe-inline'`],
},
},
});
// https://github.com/fastify/fastify-cors
app.register(cors, { origin: '*' });
// https://docs.nestjs.com/techniques/security#rate-limiting
// https://github.com/fastify/fastify-rate-limit
app.register(rateLimit, {
max: FASTIFY_RATE_LIMIT_MAX_NUM_CONNECTIONS,
timeWindow: FASTIFY_RATE_LIMIT_TIME_WINDOW_MS,
});
// https://docs.nestjs.com/techniques/compression#use-with-fastify
// https://github.com/fastify/fastify-compress
app.register(compression, { encodings: ['gzip', 'deflate'] });
};
}
|
#!/usr/bin/env python2.7
# Copyright 2017, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Upload docker images to Google Container Registry."""
from __future__ import print_function
import argparse
import atexit
import os
import shutil
import subprocess
import tempfile
argp = argparse.ArgumentParser(description='Run interop tests.')
argp.add_argument('--gcr_path',
default='gcr.io/grpc-testing',
help='Path of docker images in Google Container Registry')
argp.add_argument('--gcr_tag',
default='latest',
help='the tag string for the images to upload')
argp.add_argument('--with_files',
default=[],
nargs='+',
help='additional files to include in the docker image')
argp.add_argument('--with_file_dest',
default='/var/local/image_info',
help='Destination directory for with_files inside docker image')
argp.add_argument('--images',
default=[],
nargs='+',
help='local docker images in the form of repo:tag ' +
'(i.e. grpc_interop_java:26328ad8) to upload')
argp.add_argument('--keep',
action='store_true',
help='keep the created local images after uploading to GCR')
args = argp.parse_args()
def upload_to_gcr(image):
"""Tags and Pushes a docker image in Google Containger Registry.
image: docker image name, i.e. grpc_interop_java:26328ad8
A docker image image_foo:tag_old will be uploaded as
<gcr_path>/image_foo:<gcr_tag>
after inserting extra with_files under with_file_dest in the image. The
original image name will be stored as label original_name:"image_foo:tag_old".
"""
tag_idx = image.find(':')
if tag_idx == -1:
print('Failed to parse docker image name %s' % image)
return False
new_tag = '%s/%s:%s' % (args.gcr_path, image[:tag_idx], args.gcr_tag)
lines = ['FROM ' + image]
lines.append('LABEL original_name="%s"' % image)
temp_dir = tempfile.mkdtemp()
atexit.register(lambda: subprocess.call(['rm', '-rf', temp_dir]))
# Copy with_files inside the tmp directory, which will be the docker build
# context.
for f in args.with_files:
shutil.copy(f, temp_dir)
lines.append('COPY %s %s/' % (os.path.basename(f), args.with_file_dest))
# Create a Dockerfile.
with open(os.path.join(temp_dir, 'Dockerfile'), 'w') as f:
f.write('\n'.join(lines))
build_cmd = ['docker', 'build', '--rm', '--tag', new_tag, temp_dir]
subprocess.check_output(build_cmd)
if not args.keep:
atexit.register(lambda: subprocess.call(['docker', 'rmi', new_tag]))
# Upload to GCR.
if args.gcr_path:
subprocess.call(['gcloud', 'docker', '--', 'push', new_tag])
return True
for image in args.images:
upload_to_gcr(image)
|
class PageParser:
"""
Class that compiles all regexes prehand for performance and then extracts module information from a string.
:param to_be_ignored: tokens that should be ignored
"""
def __init__(self, to_be_ignored: list = []):
self.to_be_ignored = [(re.compile(regex[0]), regex[1]) for regex in to_be_ignored]
self.blank_pattern = re.compile(r'^(?![\s\S])')
self.course_id_pattern = re.compile(r'(?P<id>\d\d\s*-\s*\w\w\s*-\s*\d\d\d\d)(\s*-\s*(?P<type>\w\w))?')
self.cp_pattern = re.compile(r'(\d+)\s*CP')
self.module_description_flag = re.compile('Modulbeschreibung')
self.module_name_flag = re.compile('Modulname')
self.module_nr_flag = re.compile('Modul Nr.')
self.m_person_flag = re.compile('Modulverantwortliche Person')
self.cp_flag = re.compile('Kreditpunkte')
self.arbeitsaufwand_flag = re.compile('Arbeitsaufwand')
self.turnus_flag = re.compile('Angebotsturnus')
self.language_flag = re.compile('Sprache')
self.course_nr_flag = re.compile('Kurs Nr.')
self.sws_flag = re.compile('SWS')
self.content_flag = re.compile(r'Lerninhalt')
self.objectives_flag = re.compile(r'(3\s*)?Qualifikationsziele\s*/\s*Lernergebnisse(\s*/\s*Kompetenzen)?')
self.prerequisites_flag = re.compile(r'(4\s+)?Voraussetzung\s+für\s+die\s+Teilnahme')
self.every_semester = re.compile('jedes Semester|Jedes Semester')
self.winter_semester = re.compile('Wintersemester')
self.summer_semster = re.compile('Sommersemester')
self.number_pattern = re.compile(r'\d+')
self.remove_1 = re.compile(r'\s+-\s+(und)?')
self.type_mapping = {
'iv': 'Integrierte Lehrveranstaltung',
'pj': 'Projektseminar',
'pl': 'Praktikum in der Lehre',
'pp': 'Projektpraktikum',
'pr': 'Praktikum',
'se': 'Seminar',
'tt': 'Tutorium',
'ue': 'Übung',
'vl': 'Vorlesung',
'vu': 'Vorlseung und Übung'
}
def process_one_line_str(self, string):
"""
Process one line string such as name of the module
:param string: string to be processed
:return: processed string
"""
string = re.sub('\n', '', string.strip())
slashand = self.remove_1.search(string)
if slashand and slashand.group(1):
return self.remove_1.sub(r'- \g<1>', string)
return self.remove_1.sub("-", string)
def parse(self, page_pypdf):
"""
Extract a module from PDF pages
:param page_pypdf: pdf page where module information should be extracted
:return: module extracted from pdf page
"""
name = self.parse_name(page_pypdf)
cp: int = self.parse_cp(page_pypdf)
turnus = self.parse_turnus(page_pypdf)
_id, _type = self.parse_course_nr(page_pypdf)
category = self.parse_category(page_pypdf)
content = self.parse_content(page_pypdf)
objectives = self.parse_objectives(page_pypdf)
return Module(_id, name, cp, _type, turnus, category, content, objectives)
def parse_name(self, page):
"""
Parse the name of the module on the page
:param page: page to be searched on
:return: name of the module
"""
res = self.module_name_flag.search(page)
if res:
m_name_start = res.span()[1]
m_name_end = self.module_nr_flag.search(page, m_name_start).span()[0]
string = page[m_name_start: m_name_end]
return re.sub(r'\n', '', self.process_one_line_str(string))
return None
def parse_cp(self, page):
"""
Parse credit points of module
:param page:
:return: credit points of the module
"""
res = self.cp_pattern.search(page)
if res:
return res.group(1)
return -1
def parse_turnus(self, page):
"""
Parse in which half of a year the module is offered(refer to Turnus Enum)
:param page:
:return: turnus of the module
"""
res = self.turnus_flag.search(page)
if res:
turnus_start = res.span()[1]
turnus_end = self.language_flag.search(page, turnus_start)
if not turnus_end:
turnus_end = self.m_person_flag.search(page, turnus_start)
if turnus_end:
turnus_end = turnus_end.span()[0]
processed = self.process_one_line_str(page[turnus_start: turnus_end])
if self.summer_semster.search(processed):
ret = Turnus.SOMMER
elif self.winter_semester.search(processed):
ret = Turnus.WINTER
elif self.every_semester.search(processed):
ret = Turnus.BOTH
else:
ret = Turnus.NA
return ret
return None
def parse_course_nr(self, page):
"""
Parse the course number of the module
:param page:
:return: course number of the module
"""
res = self.course_id_pattern.search(page)
if res:
res2 = self.course_id_pattern.search(page, res.span()[1])
if res2:
_id = re.sub(r'\s', '', res2.group('id'))
_type = re.sub(r'\s', '', res2.group('type'))
return _id, _type
_id = re.sub(r'\s', '', res.group('id'))
return _id, None
res = self.course_id_pattern.search(page, 0)
return res.group('id')
def parse_category(self, page):
"""
Parse the category of module. This method is not implemented and should be overwritten if
categories should be parsed.
:param page:
:return: category of the module
"""
return ''
def parse_content(self, page):
"""
Parse the content description of the module
:param page:
:return: content description of the module
"""
res = self.content_flag.search(page)
if res:
content_start = res.span()[1]
content_end = self.objectives_flag.search(page, content_start).span()[0]
content = process_description(page[content_start: content_end]).strip()
for ignore in self.to_be_ignored:
content = ignore[0].sub(ignore[1], content)
return content.strip()
return None
def parse_objectives(self, page):
"""
Parse the objectives description of the module
:param page:
:return: objectives description of the module
"""
res = self.objectives_flag.search(page)
if res:
objectives_start = res.span()[1]
objectives_end = self.prerequisites_flag.search(page)
if objectives_end:
objectives_end = objectives_end.span()[0]
objectives = process_description(page[objectives_start: objectives_end]).strip()
for ignore in self.to_be_ignored:
objectives = ignore[0].sub(ignore[1], objectives)
return objectives.strip()
return None |
/* char *T1_GetFullName( FontID): Get the Full Name from
the font dictionary associated with the specified font, or NULL if
an error occurs. */
char *T1_GetFullName( int FontID)
{
static char fullname[MAXPSNAMELEN];
if (T1_CheckForFontID(FontID)!=1){
T1_errno=T1ERR_INVALID_FONTID;
return(NULL);
}
strncpy(fullname,
(char *)(pFontBase->pFontArray[FontID].pType1Data->fontInfoP[FULLNAME].value.data.nameP),
pFontBase->pFontArray[FontID].pType1Data->fontInfoP[FULLNAME].value.len);
fullname[pFontBase->pFontArray[FontID].pType1Data->fontInfoP[FULLNAME].value.len]=0;
return(fullname);
} |
<gh_stars>10-100
/*
* Copyright 2020 Xiaomi
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.xiaomi.youpin.gwdash.bo;
import java.util.Objects;
public class ApiDebugParam {
private Integer aid;
private String url;
private String httpMethod;
private String headers;
private Integer timeout;
private String params;
public Integer getAid() {
return aid;
}
public void setAid(Integer aid) {
this.aid = aid;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getHttpMethod() {
return httpMethod;
}
public void setHttpMethod(String httpMethod) {
this.httpMethod = httpMethod;
}
public String getHeaders() {
return headers;
}
public void setHeaders(String headers) {
this.headers = headers;
}
public Integer getTimeout() {
return timeout;
}
public void setTimeout(Integer timeout) {
this.timeout = timeout;
}
public String getParams() {
return params;
}
public void setParams(String params) {
this.params = params;
}
@Override
public String toString() {
return "ApiDebugParam{" +
"aid=" + aid +
", url='" + url + '\'' +
", httpMethod='" + httpMethod + '\'' +
", headers='" + headers + '\'' +
", timeout=" + timeout +
", params='" + params + '\'' +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ApiDebugParam that = (ApiDebugParam) o;
return Objects.equals(aid, that.aid) &&
Objects.equals(url, that.url) &&
Objects.equals(httpMethod, that.httpMethod) &&
Objects.equals(headers, that.headers) &&
Objects.equals(timeout, that.timeout) &&
Objects.equals(params, that.params);
}
@Override
public int hashCode() {
return Objects.hash(aid, url, httpMethod, headers, timeout, params);
}
}
|
<filename>server/rdbms/src/main/java/cn/surveyking/server/impl/RepoTemplateServiceImpl.java<gh_stars>0
package cn.surveyking.server.impl;
import cn.surveyking.server.domain.model.RepoTemplate;
import cn.surveyking.server.mapper.RepoTemplateMapper;
import cn.surveyking.server.service.BaseService;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
/**
* @author javahuang
* @date 2022/4/29
*/
@Service
@Transactional(rollbackFor = Exception.class)
public class RepoTemplateServiceImpl extends BaseService<RepoTemplateMapper, RepoTemplate> {
}
|
Close video Shadow of Flynn legal jeopardy falls on Trump, Sessions Matt Miller, former Justice Department spokesman, talks with Rachel Maddow about the nature of the case against disgraced Trump NSA Mike Flynn and whether that has implications for any obstruction of justice case that might be made against Trump. share tweet email save Embed
At last year’s Republican National Convention, retired Gen. Michael Flynn, a prominent member of Donald Trump’s inner circle, delivered a speech in which he chanted, “Lock her up!” in reference to Hillary Clinton.
“If I did a tenth of what she did, I would be in jail,” Flynn added at the time.
It’s a quote to keep in mind today.
Former U.S. National Security Adviser Michael Flynn was charged with making false statements to the FBI and is expected to plead guilty Friday morning in federal court in Washington. […] Flynn, who was fired after just 24 days on the job, was one of the first Trump associates to come under scrutiny in the federal probe now led by Special Counsel Robert Mueller into possible collusion between Moscow and the Trump campaign.
It’s a development that’s worth pausing to appreciate: the former White House national security adviser is facing a criminal charge for lying to the FBI about his conversations with a Russian official – following Russia’s attack that helped put Donald Trump in power.
This comes roughly a month after Mueller’s special counsel team indicted former Trump campaign manager Paul Manafort and his longtime associate Rick Gates. The same day, we already learned that George Papadopoulos, one of only a handful of Trump foreign policy advisers, pleaded guilty to lying to federal agents about his contacts with Kremlin-connected Russians, and he’s now struck a cooperation agreement with Mueller’s team.
After those charges were reported in late October, Trump World could plausibly claim that those officials worked on the president’s campaign, but not in the president’s White House. As of today, that’s no longer the case: Flynn worked with Trump in the West Wing before being fired three weeks into his tenure.
Today’s news comes a week after we learned that Flynn’s lawyers cut off communications with Trump’s lawyers, which was widely seen as evidence that Flynn was either “cooperating with prosecutors or negotiating a deal.”
What’s more, NBC News reported a few weeks ago that Mueller’s team has “gathered enough evidence to bring charges” against Flynn and his son. One possible avenue for Flynn would be to flip on someone higher up the ladder.
There are still some questions that need to be answered. Today’s news suggests Flynn may now be working with Mueller’s team – in other words, he’s likely “flipped” – but what Flynn has to offer is far from clear.
While that comes into focus, there’s still a core truth to consider: when a former White House national security adviser pleads guilty to lying to the FBI about talks with a foreign adversary, it’s a story that should jolt the political world. If you’ve been inclined to believe the president’s assurances that the entire Trump-Russia scandal is “fake news,” today’s developments offer profound evidence to the contrary. |
The full details of the soundtrack album for the superhero movie Wonder Woman have been revealed. The album features the film’s original music composed by Rupert Gregson-Williams (Hacksaw Ridge, The Legend of Tarzan, The Crown, Over the Hedge). Also included is the new song To Be Human by Sia feat. Labyrinth. The soundtrack will be released physically on June 2, 2017 and is now available for pre-order on Amazon. Wonder Woman is directed by Patty Jenkins and stars Gal Gadot, Chris Pine, Connie Nielsen, Robin Wright, David Thewlis, Danny Huston, Elena Anaya, Ewen Bremner and Saïd Taghmaoui. The movie based on the DC Comics character will be released nationwide on June 2. Visit the official movie website to learn more about the film.
Here’s the album track list:
1. Amazons Of Themyscira
2. History Lesson
3. Angel On The Wing
4. Ludendorff, Enough!
5. Pain, Loss & Love
6. No Man’s Land
7. Fausta
8. Wonder Woman’s Wrath
9. The God Of War
10. We Are All To Blame
11. Hell Hath No Fury
12. Lightning Strikes
13. Trafalgar Celebration
14. Action Reaction
15. To Be Human – Sia (feat. Labrinth) |
/**
* Checks whether the specified {@code collection} contains null values, throws {@link IllegalStateException} with a customized error message if it has.
*
* @param collection the collection to be checked.
* @param argumentName the name of the argument to be used in the error message.
* @return the {@code collection}.
* @throws java.lang.NullPointerException if {@code collection} is null.
* @throws java.lang.IllegalStateException if {@code collection} contains null values.
* @see #requireNonNull(Object, String)
*/
public static <T, L extends List<T>> L requireNonNullEntries(final L collection, final String argumentName) {
requireNonNull(collection, argumentName);
final String msg = String.format(NOT_NULL_ENTRIES_MSG_FORMAT, argumentName);
for (final Object value : collection) {
requireState(value != null, msg);
}
return collection;
} |
Domain name troubles continue for The Pirate Bay this week. Just a few days after the site switched on its new .ms domain name, it's already been suspended by the Montserratian .MS registry. The torrent site now has to look out for new alternatives to keep the hydra going.
The Pirate Bay has had its fair share of domain name issues in recent months.
The site previously burnt through a ‘hydra’ of six domain names after it sailed away from its .se mainstay. Ironically, the Swedish TLD turned out to be more resilient than any of the alternatives.
In an apparent attempt to diversify TPB switched on a new domain name last week. However, the Montserrat-based ThePirateBay.ms didn’t last long either and has already been suspended.
The .MS domain now has the infamous “serverhold” status which suggests that the responsible registry interfered. The status has been used before when domain names were flagged for copyright issues, and strips the domain name of its DNS entries.
“This status code is set by your domain’s Registry Operator. Your domain is not activated in the DNS,” ICANN writes.
As a result of the issue ThePirateBay.ms is no longer accessible. TorrentFreak reached out to the responsible registry for a comment but at the time of writing we haven’t heard back.
The suspension means that TPB is back to square one again, leaving ThePirateBay.org and ThePirateBay.se as the only stable domain names. Whether the Pirate Bay team has any plans to add any new domains is unknown at this point.
The site’s operators previously informed TF that they have plenty of domain names prepared, so it’s likely that one or more will become active in the future. |
/**
* Test of isQueued method, of class LockableResource.
*/
@Test
public void testIsQueued_0args() {
System.out.println("isQueued");
boolean expResult = false;
boolean result = instance.isQueued();
assertEquals(expResult, result);
} |
//-----------------------------------------------------------------------------
// Purpose: Passes this call down into the server vehicle where the tests are done
//-----------------------------------------------------------------------------
bool CBaseServerVehicle::PassengerShouldReceiveDamage( CTakeDamageInfo &info )
{
if ( GetDrivableVehicle() )
return GetDrivableVehicle()->PassengerShouldReceiveDamage( info );
return true;
} |
def _observe(self, operator, *data_frames): |
// ExportResourceData provides resource data to export for the container.
func (p *staticplus) ExportResourceData(c cache.Container) map[string]string {
a, ok := p.allocations[c.GetCacheID()]
if !ok {
p.Warn("can't find allocation for container %s", c.PrettyName())
return nil
}
data := map[string]string{}
if a.shared != 0 {
data[policy.ExportSharedCPUs] = p.shared.String()
}
if a != nil && !a.exclusive.IsEmpty() {
isolated := a.exclusive.Intersection(p.sys.Isolated()).String()
if isolated != "" {
data[policy.ExportIsolatedCPUs] = isolated
}
exclusive := a.exclusive.Difference(p.sys.Isolated()).String()
if exclusive != "" {
data[policy.ExportExclusiveCPUs] = exclusive
}
}
return data
} |
<reponame>dora1998/FileSaveLineBot
package main
import (
"fmt"
"github.com/joho/godotenv"
"github.com/line/line-bot-sdk-go/linebot"
"github.com/line/line-bot-sdk-go/linebot/httphandler"
"golang.org/x/net/context"
"google.golang.org/appengine"
"log"
"net/http"
"os"
)
const MESSEAGE_GROUP_JOINED="こんにちは!しおパンダbotだぱん!\nファイルが送信されたら、Dropboxに自動で取っておくから是非使って欲しいぱん。"
var botHandler *httphandler.WebhookHandler
func main() {
err := godotenv.Load("line.env")
if err != nil {
panic(err)
}
botHandler, err = httphandler.New(
os.Getenv("LINE_BOT_CHANNEL_SECRET"),
os.Getenv("LINE_BOT_CHANNEL_TOKEN"),
)
botHandler.HandleEvents(handleCallback)
http.Handle("/callback", botHandler)
http.HandleFunc("/task", handleTask)
port := os.Getenv("PORT")
if port == "" {
port = "8080"
log.Printf("Defaulting to port %s", port)
}
log.Printf("Listening on port %s", port)
log.Fatal(http.ListenAndServe(fmt.Sprintf(":%s", port), nil))
}
// Webhook を受け取り、TaskQueueに流す
func handleCallback(evs []*linebot.Event, r *http.Request) {
c := newContext(r)
bot, err := newLINEBot(c)
if err != nil {
errorf(c, "newLINEBot: %v", err)
return
}
for _, e := range evs {
logf(c, "Webhook recieved.\nEventType: %s\nMesseage: %#v", e.Type, e.Message)
switch e.Type {
case linebot.EventTypeJoin:
// グループへ投入された時はメッセージをすぐ返す
_, err := bot.ReplyMessage(
e.ReplyToken,
linebot.NewTextMessage(MESSEAGE_GROUP_JOINED),
).Do()
if err != nil {
errorf(c, "ReplayMessage: %v", err)
continue
}
taskData := &ReceivedMessage{
Type: e.Type,
Message: nil,
Source: *e.Source,
Timestamp: e.Timestamp,
ReplyToken: e.ReplyToken,
}
replyMessage(c, bot, taskData)
case linebot.EventTypeMessage:
var taskData *ReceivedMessage = nil
// 画像とファイル送信のみ反応し、TaskQueueへ投げるデータを生成
switch mes := e.Message.(type) {
case *linebot.FileMessage:
logf(c, "FileMessage Received.")
taskData = &ReceivedMessage{
Type: e.Type,
Message: &MessageBody{
ID: mes.ID,
FileName: &mes.FileName,
},
Source: *e.Source,
Timestamp: e.Timestamp,
ReplyToken: e.ReplyToken,
}
case *linebot.ImageMessage:
logf(c, "ImageMessage Received.")
taskData = &ReceivedMessage{
Type: e.Type,
Message: &MessageBody{
ID: mes.ID,
FileName: nil,
},
Source: *e.Source,
Timestamp: e.Timestamp,
ReplyToken: e.ReplyToken,
}
}
// 保存処理をするメッセージだけTaskQueueに投げる
// TODO: 現在TaskQueueにAddしても実行されないため、直接処理中
if taskData != nil {
//j, err := json.Marshal(taskData)
//if err != nil {
// errorf(c, "json.Marshal: %v", err)
// return
//}
//b64data := base64.StdEncoding.EncodeToString(j)
//t := taskqueue.NewPOSTTask("/task", map[string][]string{"data": {b64data}})
//taskqueue.Add(c, t, "")
//logf(c, "TaskQueue Sent.")
replyMessage(c, bot, taskData)
}
}
}
}
func logf(c context.Context, format string, args ...interface{}) {
log.Printf(format, args...)
}
func errorf(c context.Context, format string, args ...interface{}) {
log.Fatalf(format, args...)
}
func newContext(r *http.Request) context.Context {
return appengine.NewContext(r)
}
func newLINEBot(c context.Context) (*linebot.Client, error) {
//return botHandler.NewClient(
// linebot.WithHTTPClient(urlfetch.Client(c)),
//)
return linebot.New(
os.Getenv("LINE_BOT_CHANNEL_SECRET"),
os.Getenv("LINE_BOT_CHANNEL_TOKEN"),
)
}
func isDevServer() bool {
return os.Getenv("RUN_WITH_DEVAPPSERVER") != ""
} |
<reponame>donhui/kubesphere<filename>pkg/apiserver/authorization/union/union_test.go
/*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package union
import (
"errors"
"fmt"
"k8s.io/apiserver/pkg/authentication/user"
"kubesphere.io/kubesphere/pkg/apiserver/authorization/authorizer"
"reflect"
"testing"
)
type mockAuthzHandler struct {
decision authorizer.Decision
err error
}
func (mock *mockAuthzHandler) Authorize(a authorizer.Attributes) (authorizer.Decision, string, error) {
return mock.decision, "", mock.err
}
func TestAuthorizationSecondPasses(t *testing.T) {
handler1 := &mockAuthzHandler{decision: authorizer.DecisionNoOpinion}
handler2 := &mockAuthzHandler{decision: authorizer.DecisionAllow}
authzHandler := New(handler1, handler2)
authorized, _, _ := authzHandler.Authorize(nil)
if authorized != authorizer.DecisionAllow {
t.Errorf("Unexpected authorization failure")
}
}
func TestAuthorizationFirstPasses(t *testing.T) {
handler1 := &mockAuthzHandler{decision: authorizer.DecisionAllow}
handler2 := &mockAuthzHandler{decision: authorizer.DecisionNoOpinion}
authzHandler := New(handler1, handler2)
authorized, _, _ := authzHandler.Authorize(nil)
if authorized != authorizer.DecisionAllow {
t.Errorf("Unexpected authorization failure")
}
}
func TestAuthorizationNonePasses(t *testing.T) {
handler1 := &mockAuthzHandler{decision: authorizer.DecisionNoOpinion}
handler2 := &mockAuthzHandler{decision: authorizer.DecisionNoOpinion}
authzHandler := New(handler1, handler2)
authorized, _, _ := authzHandler.Authorize(nil)
if authorized == authorizer.DecisionAllow {
t.Errorf("Expected failed authorization")
}
}
func TestAuthorizationError(t *testing.T) {
handler1 := &mockAuthzHandler{err: fmt.Errorf("foo")}
handler2 := &mockAuthzHandler{err: fmt.Errorf("foo")}
authzHandler := New(handler1, handler2)
_, _, err := authzHandler.Authorize(nil)
if err == nil {
t.Errorf("Expected error: %v", err)
}
}
type mockAuthzRuleHandler struct {
resourceRules []authorizer.ResourceRuleInfo
nonResourceRules []authorizer.NonResourceRuleInfo
err error
}
func (mock *mockAuthzRuleHandler) RulesFor(user user.Info, namespace string) ([]authorizer.ResourceRuleInfo, []authorizer.NonResourceRuleInfo, bool, error) {
if mock.err != nil {
return []authorizer.ResourceRuleInfo{}, []authorizer.NonResourceRuleInfo{}, false, mock.err
}
return mock.resourceRules, mock.nonResourceRules, false, nil
}
func TestAuthorizationResourceRules(t *testing.T) {
handler1 := &mockAuthzRuleHandler{
resourceRules: []authorizer.ResourceRuleInfo{
&authorizer.DefaultResourceRuleInfo{
Verbs: []string{"*"},
APIGroups: []string{"*"},
Resources: []string{"bindings"},
},
&authorizer.DefaultResourceRuleInfo{
Verbs: []string{"get", "list", "watch"},
APIGroups: []string{"*"},
Resources: []string{"*"},
},
},
}
handler2 := &mockAuthzRuleHandler{
resourceRules: []authorizer.ResourceRuleInfo{
&authorizer.DefaultResourceRuleInfo{
Verbs: []string{"*"},
APIGroups: []string{"*"},
Resources: []string{"events"},
},
&authorizer.DefaultResourceRuleInfo{
Verbs: []string{"get"},
APIGroups: []string{"*"},
Resources: []string{"*"},
ResourceNames: []string{"foo"},
},
},
}
expected := []authorizer.DefaultResourceRuleInfo{
{
Verbs: []string{"*"},
APIGroups: []string{"*"},
Resources: []string{"bindings"},
},
{
Verbs: []string{"get", "list", "watch"},
APIGroups: []string{"*"},
Resources: []string{"*"},
},
{
Verbs: []string{"*"},
APIGroups: []string{"*"},
Resources: []string{"events"},
},
{
Verbs: []string{"get"},
APIGroups: []string{"*"},
Resources: []string{"*"},
ResourceNames: []string{"foo"},
},
}
authzRulesHandler := NewRuleResolvers(handler1, handler2)
rules, _, _, _ := authzRulesHandler.RulesFor(nil, "")
actual := getResourceRules(rules)
if !reflect.DeepEqual(expected, actual) {
t.Errorf("Expected: \n%#v\n but actual: \n%#v\n", expected, actual)
}
}
func TestAuthorizationNonResourceRules(t *testing.T) {
handler1 := &mockAuthzRuleHandler{
nonResourceRules: []authorizer.NonResourceRuleInfo{
&authorizer.DefaultNonResourceRuleInfo{
Verbs: []string{"get"},
NonResourceURLs: []string{"/api"},
},
},
}
handler2 := &mockAuthzRuleHandler{
nonResourceRules: []authorizer.NonResourceRuleInfo{
&authorizer.DefaultNonResourceRuleInfo{
Verbs: []string{"get"},
NonResourceURLs: []string{"/api/*"},
},
},
}
expected := []authorizer.DefaultNonResourceRuleInfo{
{
Verbs: []string{"get"},
NonResourceURLs: []string{"/api"},
},
{
Verbs: []string{"get"},
NonResourceURLs: []string{"/api/*"},
},
}
authzRulesHandler := NewRuleResolvers(handler1, handler2)
_, rules, _, _ := authzRulesHandler.RulesFor(nil, "")
actual := getNonResourceRules(rules)
if !reflect.DeepEqual(expected, actual) {
t.Errorf("Expected: \n%#v\n but actual: \n%#v\n", expected, actual)
}
}
func getResourceRules(infos []authorizer.ResourceRuleInfo) []authorizer.DefaultResourceRuleInfo {
rules := make([]authorizer.DefaultResourceRuleInfo, len(infos))
for i, info := range infos {
rules[i] = authorizer.DefaultResourceRuleInfo{
Verbs: info.GetVerbs(),
APIGroups: info.GetAPIGroups(),
Resources: info.GetResources(),
ResourceNames: info.GetResourceNames(),
}
}
return rules
}
func getNonResourceRules(infos []authorizer.NonResourceRuleInfo) []authorizer.DefaultNonResourceRuleInfo {
rules := make([]authorizer.DefaultNonResourceRuleInfo, len(infos))
for i, info := range infos {
rules[i] = authorizer.DefaultNonResourceRuleInfo{
Verbs: info.GetVerbs(),
NonResourceURLs: info.GetNonResourceURLs(),
}
}
return rules
}
func TestAuthorizationUnequivocalDeny(t *testing.T) {
cs := []struct {
authorizers []authorizer.Authorizer
decision authorizer.Decision
}{
{
authorizers: []authorizer.Authorizer{},
decision: authorizer.DecisionNoOpinion,
},
{
authorizers: []authorizer.Authorizer{
&mockAuthzHandler{decision: authorizer.DecisionNoOpinion},
&mockAuthzHandler{decision: authorizer.DecisionAllow},
&mockAuthzHandler{decision: authorizer.DecisionDeny},
},
decision: authorizer.DecisionAllow,
},
{
authorizers: []authorizer.Authorizer{
&mockAuthzHandler{decision: authorizer.DecisionNoOpinion},
&mockAuthzHandler{decision: authorizer.DecisionDeny},
&mockAuthzHandler{decision: authorizer.DecisionAllow},
},
decision: authorizer.DecisionDeny,
},
{
authorizers: []authorizer.Authorizer{
&mockAuthzHandler{decision: authorizer.DecisionNoOpinion},
&mockAuthzHandler{decision: authorizer.DecisionDeny, err: errors.New("webhook failed closed")},
&mockAuthzHandler{decision: authorizer.DecisionAllow},
},
decision: authorizer.DecisionDeny,
},
}
for i, c := range cs {
t.Run(fmt.Sprintf("case %v", i), func(t *testing.T) {
authzHandler := New(c.authorizers...)
decision, _, _ := authzHandler.Authorize(nil)
if decision != c.decision {
t.Errorf("Unexpected authorization failure: %v, expected: %v", decision, c.decision)
}
})
}
}
|
/**
* Use the provided ConstantFolder model to create a SB_Folder to add to the
* parent node as a child. Copies recursive behavior of
* {@link #addBehavior(Behavior, DefaultMutableTreeNode)}
*/
private void addConstantFolder(ConstantFolder constantFolderModel,
DefaultMutableTreeNode parentNode) {
SB_Folder folder = new SB_Folder(constantFolderModel);
DefaultMutableTreeNode node = new DefaultMutableTreeNode(folder);
insertNodeInto(node, parentNode);
ConstantFolderGroup children
= constantFolderModel.getConstantChildren();
if (children != null) {
for (Object child : children.getConstantOrConstantFolder()) {
if (child instanceof Constant) {
addConstant((Constant) child, node);
} else {
addConstantFolder((ConstantFolder) child, node);
}
}
}
} |
/**
* Base implementation for capturing data from an {@linkplain com.adaptris.core.AdaptrisMessage} and storing it in a jdbc database.
*
*/
public abstract class JdbcDataCaptureServiceImpl extends JdbcServiceWithParameters {
@NotNull
@InputFieldHint(style = "SQL", expression = true)
private String statement = null;
@AdvancedConfig(rare = true)
@InputFieldDefault(value = "false")
private Boolean saveReturnedKeys = null;
@InputFieldDefault(value = "")
@InputFieldHint(style = "BLANKABLE")
@AffectsMetadata
private String rowsUpdatedMetadataKey;
protected transient DatabaseActor actor;
public JdbcDataCaptureServiceImpl() {
super();
actor = new DatabaseActor();
}
@Override
protected void initJdbcService() throws CoreException {}
@Override
protected void startService() throws CoreException {}
@Override
protected void stopService() {
actor.destroy();
}
@Override
protected void closeJdbcService() {
}
/**
* SQL statement to perform.
*
* @param s the statement.
*/
public void setStatement(String s) {
statement = s;
}
/**
* The configured Statement.
*
* @return the SQL statement.
*/
public String getStatement() {
return statement;
}
public boolean saveReturnedKeys() {
return BooleanUtils.toBooleanDefaultIfNull(getSaveReturnedKeys(), false);
}
/**
* Store any return value from the SQL statement as metadata.
*
* @param save the falg.
*/
public void setSaveReturnedKeys(Boolean save) {
saveReturnedKeys = save;
}
/**
* Get the configured flag.
*
* @return the flag.
*/
public Boolean getSaveReturnedKeys() {
return saveReturnedKeys;
}
public String getRowsUpdatedMetadataKey() {
return rowsUpdatedMetadataKey;
}
/**
* Set the metadata key which will contain the number of rows updated by this service.
* <p>
* The precise value will depend on the statement(s) being executed; this is simply an aggregation
* of the values returned by {@link Statement#executeUpdate(String)}.
* </p>
*
* @param key the metadata key, if set this metadata will contain the number of rows affected.
*/
public void setRowsUpdatedMetadataKey(String key) {
rowsUpdatedMetadataKey = key;
}
protected void updateMetadata(AdaptrisMessage msg, long value) {
if (!isBlank(getRowsUpdatedMetadataKey())) {
msg.addMetadata(getRowsUpdatedMetadataKey(), String.valueOf(value));
}
}
protected void saveKeys(AdaptrisMessage msg, Statement stmt) throws SQLException {
ResultSet rs = null;
Statement savedKeysQuery = null;
try {
if (saveReturnedKeys()) {
if (!actor.isOldJbc()) {
rs = stmt.getGeneratedKeys();
rs.next();
ResultSetMetaData rsmd = rs.getMetaData();
for (int i = 1; i <= rsmd.getColumnCount(); i++) {
String name = rsmd.getColumnName(i);
String value = rs.getObject(name).toString();
msg.addMetadata(name, value);
}
}
}
}
finally {
JdbcUtil.closeQuietly(savedKeysQuery);
JdbcUtil.closeQuietly(rs);
}
}
protected DatabaseActor configureActor(AdaptrisMessage msg) throws SQLException {
Connection c = getConnection(msg);
if (!c.equals(actor.getSqlConnection())) {
actor.reInitialise(c);
}
return actor;
}
@Override
protected void prepareService() throws CoreException {}
protected class DatabaseActor {
private PreparedStatement insertStatement = null;
private String lastInsertStatement = "";
private Connection sqlConnection;
private boolean oldJDBC;
DatabaseActor() {
}
void reInitialise(Connection c) throws SQLException {
destroy();
sqlConnection = c;
}
public PreparedStatement getInsertStatement(AdaptrisMessage msg) throws SQLException {
String currentStatement = getParameterApplicator().prepareParametersToStatement(msg.resolve(getStatement()));
if (!lastInsertStatement.equals(currentStatement) || insertStatement == null) {
JdbcUtil.closeQuietly(insertStatement);
insertStatement = prepare(currentStatement);
lastInsertStatement = currentStatement;
}
return insertStatement;
}
private PreparedStatement prepare(String statement) throws SQLException {
PreparedStatement result = null;
if (saveReturnedKeys()) {
try {
result = prepareStatement(sqlConnection, statement, Statement.RETURN_GENERATED_KEYS);
}
catch (Throwable error) {
oldJDBC = true;
result = prepareStatement(sqlConnection, statement);
}
}
else {
result = prepareStatement(sqlConnection, statement);
}
return result;
}
void destroy() {
JdbcUtil.closeQuietly(insertStatement, sqlConnection);
sqlConnection = null;
insertStatement = null;
}
public Connection getSqlConnection() {
return sqlConnection;
}
boolean isOldJbc() {
return oldJDBC;
}
}
} |
<reponame>kalik1/summoner-fe
import { ServerStatus } from './server.status.class';
export class Server {
id: string;
name: string;
cmd?: string;
image?: any;
Instance?: any;
serverPort: number;
managePort: number;
status?: ServerStatus;
}
|
def _GetAliasForIdentifier(identifier, alias_map):
ns = identifier.split('.', 1)[0]
aliased_symbol = alias_map.get(ns)
if aliased_symbol:
return aliased_symbol + identifier[len(ns):] |
ELISA detection of specific functional antibodies in human serum to Escherichia coli, tetanus toxoid, and diphtheria-tetanus toxoids: normal values for IgG, IgA, and IgM.
An inexpensive, easily performed enzyme-linked immunosorbent assay (ELISA) was developed to measure specific IgG, IgA, and IgM antibodies to the common antigens Escherichia coli, diphtheria-tetanus toxoid, and tetanus toxoid. Normal values were established. Classical antibody deficiency disease states were confirmed and delineated by these assays. Additionally, several instances were discovered when functional antibody levels were abnormal when the serum immunoglobulin levels were normal. The use of ELISA assays for antibodies to common antigens provides a useful technique to measure and monitor isotype responses of the humoral immune system. |
import * as React from 'react'
export const Shop: React.FC = () => {
React.useEffect(() => {
const script = document.createElement('script');
script.src = "https://shop.myspreadshop.com/shopfiles/shopclient/shopclient.nocache.js";
script.async = true;
document.body.appendChild(script);
});
return (
<div id="myShop">
<a href="https://trashmobeco.myspreadshop.com">trashmobeco</a>
</div>
)}
|
def create_schedule_abc(user_model=None):
user_model = user_model or settings.AUTH_USER_MODEL
class AbstractBaseSchedule(models.Model, AbstractSchedule):
owner = models.ForeignKey(
user_model, related_name='schedules',
db_index=True, blank=True, null=True,
)
url = models.URLField()
cycle = models.IntegerField(default=DEFAULT_SCHEDULE_CYCLE)
enabled = models.BooleanField(default=False)
type = models.CharField(max_length=SCHEDULE_TYPE_MAX_LENGTH,
default=DEFAULT_SCHEDULE_TYPE)
options = JSONField(default=DEFAULT_OPTIONS)
class Meta:
abstract = True
unique_together = (('owner', 'url'),)
return AbstractBaseSchedule |
// Copyright 2013, 2020 <NAME>.
//
// SPDX-License-Identifier: Apache-2.0
package camutil
import (
"bytes"
"context"
"encoding/base64"
"encoding/hex"
"errors"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/url"
"os"
"os/exec"
"strings"
"sync"
"github.com/hashicorp/go-retryablehttp"
"perkeep.org/pkg/auth"
"perkeep.org/pkg/blob"
"perkeep.org/pkg/blobserver/localdisk"
"perkeep.org/pkg/client"
"perkeep.org/pkg/schema"
)
var Log = func(keyvals ...interface{}) error { return nil }
// Downloader is the struct for downloading file/dir blobs
type Downloader struct {
cl *client.Client
blob.Fetcher
args []string
}
var (
cachedClient = make(map[string]*client.Client, 1)
cachedClientMtx sync.Mutex
)
// NewClient returns a new client for the given server. Auth is set up according
// to the client config (~/.config/camlistore/client-config.json)
// and the environment variables.
func NewClient(server string) (*client.Client, error) {
if server == "" {
server = "http://localhost:3179"
}
cachedClientMtx.Lock()
defer cachedClientMtx.Unlock()
c, ok := cachedClient[server]
if ok {
return c, nil
}
var authIncluded bool
opts := make([]client.ClientOption, 0, 4)
if !strings.Contains(server, "://") {
opts = append(opts, client.OptionServer(server), client.OptionInsecure(true))
} else if strings.HasPrefix(server, "file://") {
bs, err := localdisk.New(server[7:])
if err != nil {
return nil, err
}
opts = append(opts, client.OptionUseStorageClient(bs))
} else {
URL, err := url.Parse(server)
if err != nil {
return nil, err
}
opts = append(opts, client.OptionServer(server))
if URL.Scheme == "http" {
opts = append(opts, client.OptionInsecure(true))
}
if u := URL.User; u != nil && u.Username() != "" {
passwd, _ := u.Password()
opts = append(opts, client.OptionAuthMode(auth.NewBasicAuth(u.Username(), passwd)))
authIncluded = true
}
}
var err error
if c, err = client.New(opts...); err != nil {
return nil, err
}
rc := retryablehttp.NewClient()
clcl := *c.HTTPClient()
rc.HTTPClient = &clcl
rc.Logger = nil
rc.CheckRetry = func(ctx context.Context, resp *http.Response, err error) (bool, error) {
retry, err := retryablehttp.ErrorPropagatedRetryPolicy(ctx, resp, err)
if !retry || err == nil {
return retry, err
}
if resp.Body != nil {
b, _ := ioutil.ReadAll(io.LimitReader(resp.Body, 2000))
err = fmt.Errorf("%w: %s", err, string(b))
}
return retry, err
}
c.SetHTTPClient(rc.StandardClient())
if !authIncluded {
if err = c.SetupAuth(); err != nil {
return nil, err
}
}
cachedClient[server] = c
return c, nil
}
var (
cachedDownloader = make(map[string]*Downloader, 1)
cachedDownloaderMtx sync.Mutex
)
// The followings are copied from camlistore.org/cmd/camget
// NewDownloader creates a new Downloader (client + properties + disk cache)
// for the server
func NewDownloader(server string, noCache bool) (*Downloader, error) {
cachedDownloaderMtx.Lock()
defer cachedDownloaderMtx.Unlock()
down, ok := cachedDownloader[server]
if ok {
return down, nil
}
down = new(Downloader)
var err error
if down.cl, err = NewClient(server); err != nil {
return nil, err
}
if strings.HasPrefix(server, "file://") {
down.Fetcher = down.cl
cachedDownloader[server] = down
return down, nil
}
if noCache {
down.Fetcher = down.cl
} else {
down.Fetcher, err = NewBadgerCache(down.cl, 512<<20)
if err != nil {
return nil, fmt.Errorf("setup local disk cache: %w", err)
}
if Verbose {
Log("msg", "Using temp blob cache directory "+down.Fetcher.(*BadgerCache).Root)
}
}
if server != "" {
down.args = []string{"-server=" + server}
} else {
down.args = []string{}
}
cachedDownloader[server] = down
return down, nil
}
// Close closes the downloader (the underlying client)
func (down *Downloader) Close() {
if down != nil && down.Fetcher != nil {
if dc, ok := down.Fetcher.(interface{ Clean() }); ok {
dc.Clean()
}
}
}
// ParseBlobNames parses the blob names, appending to items, and returning
// the expanded slice, and error if happened.
// This uses blob.Parse, and can decode base64-encoded refs as a plus.
func ParseBlobNames(items []blob.Ref, names []string) ([]blob.Ref, error) {
for _, arg := range names {
br, ok := blob.Parse(arg)
if !ok {
var e error
if br, e = Base64ToRef(arg); e != nil {
return nil, e
}
}
items = append(items, br)
}
return items, nil
}
// Base64ToRef decodes a base64-encoded blobref
func Base64ToRef(arg string) (br blob.Ref, err error) {
b := make([]byte, 64)
t := make([]byte, 2*len(b))
var i, n int
i = len(arg)
if i > cap(t) {
i = cap(t)
}
t = []byte(arg[:i])
i = bytes.IndexByte(t, byte('-'))
if i < 0 {
err = fmt.Errorf("no - in %q", arg)
return
}
n, err = base64.URLEncoding.Decode(b[:cap(b)], t[i+1:])
if err != nil {
err = fmt.Errorf("cannot decode %q as base64: %w", t[i+1:], err)
return
}
b = b[:n]
copy(t[:i], bytes.ToLower(t[:i]))
t = t[:cap(t)]
n = 2*len(b) - len(t) + n + 1
if n > 0 {
t = append(t, make([]byte, n)...)
}
n = hex.Encode(t[i+1:], b)
arg = string(t[:i+1+n])
br, ok := blob.Parse(arg)
if !ok {
err = fmt.Errorf("cannot parse %q as blobref: %w", arg, err)
return
}
return br, nil
}
// Start starts the downloads of the blobrefs.
// Just the JSON schema if contents is false, else the content of the blob.
func (down *Downloader) Start(ctx context.Context, contents bool, items ...blob.Ref) (io.ReadCloser, error) {
readers := make([]io.Reader, 0, len(items))
closers := make([]io.Closer, 0, len(items))
var (
rc io.ReadCloser
err error
)
for _, br := range items {
if contents {
rc, err = schema.NewFileReader(ctx, down.Fetcher, br)
if err == nil {
rc.(*schema.FileReader).LoadAllChunks()
}
} else {
var b *blob.Blob
b, err = blob.FromFetcher(ctx, down.Fetcher, br)
if err == nil {
var r io.Reader
r, err = b.ReadAll(ctx)
rc = struct {
io.Reader
io.Closer
}{r, ioutil.NopCloser(nil)}
} else if errors.Is(err, os.ErrNotExist) {
return nil, fmt.Errorf("%v: %w", br, err)
} else {
Log("error", err)
}
}
if err == nil && rc != nil {
readers = append(readers, rc)
closers = append(closers, rc)
continue
}
Log("msg", "downloading", "blob", br, "error", err)
args := append(make([]string, 0, len(down.args)+3), down.args...)
if contents {
args = append(args, "-contents=true")
}
if InsecureTLS {
args = append(args, "-insecure=true")
}
args = append(args, br.String())
c := exec.Command(cmdPkGet, args...)
var errBuf bytes.Buffer
c.Stderr = &errBuf
if rc, err = c.StdoutPipe(); err != nil {
return nil, fmt.Errorf("create stdout pipe for %s %q: %s: %w", cmdPkGet, args, errBuf.String(), err)
}
Log("msg", "calling "+cmdPkGet, "args", args)
if err = c.Run(); err != nil {
return nil, fmt.Errorf("call %s %q: %s: %w", cmdPkGet, args, errBuf.String(), err)
}
readers = append(readers, rc)
closers = append(closers, rc)
}
if len(readers) == 0 {
return nil, io.EOF
}
return struct {
io.Reader
io.Closer
}{io.MultiReader(readers...),
multiCloser{closers},
}, nil
}
// Save saves contents of the blobs into destDir as files
func (down *Downloader) Save(ctx context.Context, destDir string, contents bool, items ...blob.Ref) error {
for _, br := range items {
if err := smartFetch(ctx, down.Fetcher, destDir, br); err != nil {
Log("msg", "Save", "error", err)
return err
}
}
return nil
}
func fetch(ctx context.Context, src blob.Fetcher, br blob.Ref) (io.ReadCloser, error) {
r, _, err := src.Fetch(ctx, br)
if err != nil {
return nil, fmt.Errorf("fetch %s: %w", br, err)
}
return r, nil
}
var _ = io.Closer(multiCloser{})
type multiCloser struct {
closers []io.Closer
}
func (mc multiCloser) Close() error {
var err error
for _, c := range mc.closers {
if closeErr := c.Close(); closeErr != nil && err == nil {
err = closeErr
}
}
return err
}
|
def _calc_node_expanding_priority(self, search_node: SearchNode) -> float:
return (1 - self.heuristic_weight) * search_node.g_cost \
+ self.heuristic_weight * self.heuristic_function.estimate(search_node.state) |
/**
* Servlet Filter implementation class LoginFilter
*/
@WebFilter("/LoginFilter")
public class LoginFilter extends HttpFilter {
private String redirectUrl;
private String uncheckedUrls;
private String sessionKey;
@Override
protected void init() {
// TODO Auto-generated method stub
ServletContext servletContext=getFilterConfig().getServletContext();
redirectUrl=servletContext.getInitParameter("rediretPage");
uncheckedUrls=servletContext.getInitParameter("uncheckedUrls");
sessionKey=servletContext.getInitParameter("userSessionKey");
}
@Override
public void doFilter(HttpServletRequest request, HttpServletResponse response, FilterChain filterChain)
throws IOException, ServletException {
// TODO Auto-generated method stub
System.out.println("-----34 dofilter-----------");
//1.获取请求的url
String servletPath=request.getServletPath();
//2.检查获取的servletpath是否为不需要检查的url的一部分,若是则直接放行,方法结束
List<String> urls=Arrays.asList(uncheckedUrls.split(","));
if(urls.contains(servletPath)) {
filterChain.doFilter(request, response);
return;
}
//3.获取sessionid对比,若不一样则重定向到redirecturl
Object sessionid=request.getSession().getAttribute(sessionKey);
System.out.println("--------------sessionkey:"+sessionid);
System.out.println("--------------sessionid:"+request.getSession().getId());
if(sessionid!=request.getSession().getId()) {
response.sendRedirect( request.getContextPath()+redirectUrl);
return;
}
//4.若sessionid相同则放行,允许访问
filterChain.doFilter(request, response);
}
} |
Paris, March 2
The provisional Franco-Moroccan agreement signed today by the Foreign Minister, M. Pineau, and the Moroccan Premier, Si Bekkai, has given such wide satisfaction to the Moroccans that at first sight it is difficult to see what the French have retained.
The agreement recognises that the Treaty of Fez, which created the Protectorate in 1912, has become outdated, and declares that negotiations will henceforth will be between equal and sovereign States, with Morocco having the right to an army and a Foreign Ministry of its own. It recognises the right of the Sultan to legislate without any French veto, and provides for negotiations for handing over parts of the administration hitherto reserved to the French.
France recognises the integrity of Moroccan territory and promises to secure that it is recognised by others - a reference to the Spanish zone and to Tangier. The Resident -General will at once become a High Commissioner. And Morocco is to be represented forthwith on the committee which decides monetary policy for the franc zone.
Ratification Question
The Moroccan delegates are jubilant, and in a public statement to-night declare that Morocco is now released form all traces of wardship. They say that the Sultan can once more be proud of his nation and that the French have shown themselves still to be people of the principles of 1789. They claim that the provision that the agreement is subject to ratification by the National Assembly is a mere formality.
The Sultan will be able to claim complete victory when he returns to Rabat with, for the first time in his life, the right to rule without anyone else's permission. But reports from Morocco indicate that he will need all the prestige that he can mobilise to overcome the drift to anarchy. Though France recognises his right to an army, at present he does not dispose of one.
In making these enormous concessions, the French Government is influenced by several considerations. First of all there is the collapse of the old class of notables with the disappearance of the Glaoui.
The Sultan undoubtedly needs an answer to his extremists, but he also needs an instrument of administration and for the maintenance of order. The French Government has obtained the following points: -
The French High Commissioner will still have a right to see all decrees affecting Frenchmen or foreigners. The status of the French Army in Morocco remains provisionally unchanged. The transfer of administrative services hitherto French is to be regulated by agreement. The rights and liberties of French citizens in Morocco are to be guaranteed. Finally: the Moroccan Government admits that foreign policy, security, economics, and cultural affairs are all aspects of the inter-dependence of the two countries which is to be regulated by the new treaty.
How interdependence is to be interpreted is, of course, an open question and many Moroccans will demand that it be whittled down to very little: but there is at least a starting-point here for negotiation. |
From behind the Iron Curtain to Hybrid Warfare Networks (Echoes of Churchill’s Fulton Speech)
Aim. Based on the analysis of Winston Churchill’s Fulton speech and the project “The Unthinkable” initiated by him, we can trace the causes of the unavoidability and, moreover, the aggravation of the confrontation between the collective West and modern Russia.Tasks. Analysis of the course of the history of the post-war world from the unification of the Western world against the Soviet Union during the Cold War, initiated by Winston Churchill’s Fulton speech, as well as the revival of the “Iron Curtain” — a global barrier between the collective West and the entire non-Western world entangled in a network of hybrid warfare.Methods. The historical method, the method of civilizational and cultural analysis were applied, content analysis and event analysis of Churchill’s Fulton speech were carried out, allowing to analyze the intentions and identify the true goals of the author and the hidden meaning of his speech, which affect the global political discourse.Results. It is argued that it is necessary to take into account historical analogies that make it possible to reveal the deep meaning of strategic planning of the policy of peace as opposed to the policy of war both in the Eurasian space and throughout the world.Conclusions. Judgments are formulated about the growing geopolitical tension between the collective West and the entire non-Western world in the networks of hybrid warfare. |
import pandas as pd
# TODO: confirm these changes don't affect other areas of the code
def merge_county_and_hosp(df_county, df_hospital):
outcomes = ['tot_cases', 'tot_deaths']
# df_hospital = df_hospital.loc[~df_hospital.countyFIPS.isna()]
# df_hospital['countyFIPS'] = df_hospital['countyFIPS'].astype(int).astype(str).str.zfill(5)
df_hospital['countyFIPS'][~df_hospital.countyFIPS.isna()] = df_hospital['countyFIPS'][
~df_hospital.countyFIPS.isna()].astype(int).astype(str).str.zfill(5)
df = df_hospital.merge(df_county, how='left', on='countyFIPS')
df[outcomes] = df[outcomes].fillna(0)
# aggregate employees by county
total_emp_county = df.groupby('countyFIPS').agg({'Hospital Employees': 'sum'})
total_emp_county = total_emp_county.rename(columns={'Hospital Employees': 'Hospital Employees in County'})
df_county = pd.merge(df_county, total_emp_county, how='left', on='countyFIPS')
df = pd.merge(df, total_emp_county, how='left', on='countyFIPS')
# filter hospitals
# df = df[~df['countyFIPS'].isna()] # & df['IsAcademicHospital'] & df['Hospital Employees'] > 0]
df = df.sort_values(by=['tot_deaths', 'Hospital Employees'], ascending=False)
df = df.drop(columns='Hospital Name')
df = df.rename(columns={'Facility Name': 'Hospital Name'})
df = df.drop_duplicates('CMS Certification Number', keep=False)
# fraction of employees out of all county hospitals
df['Frac Hospital Employees of County'] = df['Hospital Employees'] / df['Hospital Employees in County']
df = df.loc[:, ~df.columns.duplicated()]
return df
|
/*
* Copyright (C) 2009-2018 Lightbend Inc. <https://www.lightbend.com>
*/
package javaguide.advanced.embedding;
import java.io.IOException;
import org.junit.Test;
import play.libs.ws.WSClient;
import play.libs.ws.WSResponse;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.CompletionStage;
import java.util.function.Consumer;
//#imports
import play.routing.RoutingDsl;
import play.server.Server;
import static play.mvc.Controller.*;
//#imports
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
public class JavaEmbeddingPlay {
@Test
public void simple() throws Exception {
//#simple
Server server = Server.forRouter((components) ->
RoutingDsl.fromComponents(components)
.GET("/hello/:to").routingTo((request, to) ->
ok("Hello " + to)
)
.build()
);
//#simple
try {
withClient(ws -> {
//#http-port
CompletionStage<WSResponse> response = ws.url(
"http://localhost:" + server.httpPort() + "/hello/world"
).get();
//#http-port
try {
assertThat(response.toCompletableFuture().get(10, TimeUnit.SECONDS).getBody(), equalTo("Hello world"));
} catch (Exception e) {
throw new RuntimeException(e);
}
});
} finally {
//#stop
server.stop();
//#stop
}
}
@Test
public void config() throws Exception {
//#config
Server server = Server.forRouter((components) ->
RoutingDsl.fromComponents(components)
.GET("/hello/:to").routingTo((request, to) ->
ok("Hello " + to)
)
.build()
);
//#config
try {
withClient(ws -> {
try {
assertThat(ws.url("http://localhost:" + server.httpPort() + "/hello/world").get().toCompletableFuture().get(10,
TimeUnit.SECONDS).getBody(), equalTo("Hello world"));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
);
} finally {
server.stop();
}
}
private void withClient(Consumer<WSClient> callback) throws IOException {
try (WSClient client = play.test.WSTestClient.newClient(19000)) {
callback.accept(client);
}
}
}
|
Heat-kernel estimates for random walk among random conductances with heavy tail
We study models of discrete-time, symmetric, $\Z^{d}$-valued random walks in random environments, driven by a field of i.i.d. random nearest-neighbor conductances $\omega_{xy}\in $, with polynomial tail near 0 with exponent $\gamma>0$. We first prove for all $d\geq5$ that the return probability shows an anomalous decay (non-Gaussian) that approches (up to sub-polynomial terms) a random constant times $n^{-2}$ when we push the power $\gamma$ to zero. In contrast, we prove that the heat-kernel decay is as close as we want, in a logarithmic sense, to the standard decay $n^{-d/2}$ for large values of the parameter $\gamma$.
Introduction and results
The main purpose of this work is the derivation of heat-kernel bounds for random walks (X n ) n∈N among polynomial lower tail random conductances with exponent γ > 0, on Z d , d > 4. We show that the heat-kernel exhibits opposite behaviors, anomalous and standard, for small and large values of γ.
Random walks in reversible random environments are driven by the transition matrix P ω (x, y) = ω xy π ω (x) . (1.1) where (ω xy ) is a family of random (non-negative) conductances subject to the symmetry condition ω xy = ω yx . The sum π ω (x) = y ω xy defines an invariant, reversible measure for the corresponding discrete-time Markov chain. In most situations ω xy are non-zero only for nearest neighbors on Z d and are sampled from a shift-invariant, ergodic or even i.i.d. measure Q. c 2009 by Omar Boukhadra. Provided for non-commercial research and education use. Not for reproduction, distribution or commercial use.
One has then the standard local-CLT like decay of the heat-kernel (c 1 , c 2 are absolute constants), as proved by Delmotte : (1.2) Once the assumption of uniform ellipticity is relaxed, matters get more complicated. The most-intensely studied example is the simple random walk on the infinite cluster of supercritical bond percolation on Z d , d ≥ 2. This corresponds to ω xy ∈ {0, 1} i.i.d. with Q(ω b = 1) > p c (d) where p c (d) is the percolation threshold (cf. ). Here an annealed invariance principle has been obtained by De Masi, Ferrari, Goldstein and Wick - in the late 1980s. More recently, Mathieu and Rémy proved the on-diagonal (i.e., x = y) version of the heat-kernel upper bound (1.2)-a slightly weaker version of which was also obtained by Heicklen and Hoffman -and, soon afterwards, Barlow proved the full upper and lower bounds on P n ω (x, y) of the form (1.2). (Both these results hold for n exceeding some random time defined relative to the environment in the vicinity of x and y.) Heat-kernel upper bounds were then used in the proofs of quenched invariance principles by Sidoravicius and Sznitman for d ≥ 4, and for all d ≥ 2 by Berger and Biskup and Mathieu and Piatnitski .
We consider in our case a family of symmetric, irreducible, nearest-neighbor Markov chains on Z d , d ≥ 5, driven by a field of i.i.d. bounded random conductances ω xy ∈ and subject to the symmetry condition ω xy = ω yx . These are constructed as follows. Let Ω be the set of functions ω : Z d × Z d → R + such that ω xy > 0 iff x ∼ y, and ω xy = ω yx ( x ∼ y means that x and y are nearest neighbors). We call elements of Ω environments.
We choose the family where γ > 0 is a parameter. Therefore, the conductances are Q-a.s. positive.
In a recent paper, Fontes and Mathieu studied continuous-time random walks on Z d which are defined by generators L ω of the form with conductances given by for i.i.d. random variables ω(x) > 0 satisfying (1.3). For these cases, it was found that the annealed heat-kernel, dQ(ω)P ω 0 (X t = 0), exhibits an anomalous decay, for γ < d/2. Explicitly, from , Theorem 4.3, we have In addition, in a more recent paper, Berger, Biskup, Hoffman and Kozma , provided universal upper bounds on the quenched heat-kernel by considering the nearest-neighbor simple random walk on Z d , d ≥ 2, driven by a field of i.i.d. bounded random conductances ω xy ∈ . The conductance law is i.i.d. subject to the condition that the probability of ω xy > 0 exceeds the threshold p c (d) for bond percolation on Z d . For environments in which the origin is connected to infinity by bonds with positive conductances, they studied the decay of the 2n-step return probability P 2n ω (0, 0). They have proved that P 2n ω (0, 0) is bounded by a random constant times n −d/2 in d = 2, 3, while it is o(n −2 ) in d ≥ 5 and O(n −2 log n) in d = 4. More precisely, from , Theorem 2.1, we have for almost every ω ∈ {0 ∈ C ∞ } (C ∞ represents the set of sites that have a path to infinity along bonds with positive conductances), and for all n ≥ 1.
The distributions that they use in part (1) of Theorem 1.1 have a tail near zero of the general form Berger, Biskup , Hoffman and Kozma called attention to the fact that the construction of an estimate of the anomalous heat-kernel decay for random walk among polynomial lower tail random conductances on Z d , seems to require subtle control of heat-kernel lower bounds which go beyond the estimates that can be easily pulled out from the literature. In the present paper, we give a response to this question and show that every distribution with an appropriate power-law decay near zero, can serve as such example, and that when we push the power to zero. The lower bound obtained for the return probability approaches (up to sub-polynomial terms) the upper bound supplied by and that for all d ≥ 5.
Here is our first main result whose proof is given in section 2 : There exists a positive constant δ(γ) depending only on d and γ such that Q-a.s., there exists C = C(ω) < ∞ and for all n ≥ 1 (1.9) The proof tells us in fact, with (1.5), that for d ≥ 5 we have almost surely (1.10) (2) As we were reminded by M. Biskup and T.M. Prescott, the invariance principle (CLT) (cf Theorem 2.1. in and Theorem 1.3 in ) automatically implies the "usual" lower bound on the heat-kernel under weaker conditions on the conductances. Indeed, the Markov property and reversibility of X yield Cauchy-Schwarz then gives Now the invariance principle implies that P ω 0 (|X n | ≤ √ n) 2 has a positive limit as n → ∞ and the Spatial Ergodic Theorem shows that |C ∞ ∩ d | grows proportionally to n d/2 . Hence we get Note that, in d = 2, 3, this complements nicely the "universal" upper bounds derived in . In d = 4, the decay is at most n −2 log n and at least n −2 .
The result of Fontes and Mathieu (1.4) (cf. , Theorem 4.3) encourages us to believe that the quenched heat-kernel has a standard decay when γ ≥ d/2, but the construction seems to require subtle control of heat-kernel upper bounds. In the second result of this paper whose proof is given in section 3, we prove, for all d ≥ 5, that the heat-kernel decay is as close as we want, in a logarithmic sense, to the standard decay n −d/2 for large values of the parameter γ. For the cases where d = 2, 3, we have a standard decay of the quenched return probability under weaker conditions on the conductances (see Remark 1.3).
Theorem 1.4 Let d ≥ 5. There exists a positive constant δ(γ) depending only on d and γ such that Q-a.s., In what follows,, we refer to P ω x (·) as the quenched law of the random walk X = (X n ) n≥0 on ((Z d ) N , G) with transitions given in (1.1) in the environment ω, where G is the σ−algebra generated by cylinder functions, and let P := Q ⊗ P ω 0 be the so-called annealed semi-direct product measure law defined by where F denote the Borel σ−algebra on Ω (which is the same as the σ−algebra generated by cylinder functions).
Anomalous heat-kernel decay
In this section we provide the proof of Theorem 1.2.
We consider a family of bounded nearest-neighbor conductances The law Q of the ω's will be i.i.d. subject to the conditions given in (1.3).
We prove this lower bound by following a different approach of the one adopted by Berger, Biskup , Hoffman and Kozma to prove (1.6-1.7). In fact, they prove that in a box of side length ℓ n there exists a configuration where a strong bond with conductance of order 1, is separated from other sites by bonds of strength 1/n, and (at least) one of these "weak" bonds is connected to the origin by a "strong" path not leaving the box. Then the probability that the walk is back to the origin at time n is bounded below by the probability that the walk goes directly towards the above pattern (this costs e O(ℓn) of probability) then crosses the weak bond (which costs 1/n), spends time n − 2ℓ n on the strong bond (which costs only O(1) of probability), then crosses a weak bond again (another factor of 1/n) and then heads towards the origin to get there on time (another e O(ℓn) term). The cost of this strategy is O(1)e O(ℓn) n −2 so if ℓ n = o(log n) then we get leading order n −2 .
Our method for proving Theorem 1.2 is, in fact, simple -we note that due to the reversibility of the walk and with a good use of Cauchy-Schwartz, one does not need to condition on the exact path of the walk, but rather show that the walker has a relatively large probability of staying within a small box around the origin. Our objective will consist in showing that for almost every ω, the probability that the random walk when started at the origin is at time n inside the box B n δ = d , is greater than c/n (where c is a constant and δ = δ(γ) ↓ 0). Hence we will get P 2n ω (0, 0)/π(0) ≥ c/n 2+δd by virtue of the following inequality which, for almost every environment ω, derives from the reversibility of X, Cauchy-Schwarz inequality and (1.3) : (2.1) In order to do this, our strategy is to show that the random walk meets a trap, with positive probability, before getting out from d , where, by definition, a trap is an edge of conductance of order 1 that can be reached only by crossing an edge of order 1/n. The random walk, being imprisoned in the trap inside the box d , will not get out from this box before time n with positive probability. Then the Markov property yields P ω 0 (X n ∈ d ) ≥ c/n. Thus, we will be brought to follow the walk until it finds a specific configuration in the environment.
First, we will need to prove one lemma. Let B N = d be the box centered at the origin and of radius 3N and define ∂B N to be its inner boundary, that is, the set of vertices in B N which are adjacent to some vertex not in B N . We Now, let α, ξ be positive constants such that Q(ω b ≥ ξ) > 0. Define A N (x) to be the event that the configuration near x, y = x + ǫ(x)ê i 0 and z = x + 2ǫ(x)ê i 0 is as follows: (1) 1 2 N −α < ω xy ≤ N −α .
(3) every other bond emanating out of y or z has ω b ≤ N −α .
The event A N (x) so constructed involves a collection of 4d − 1 bonds that will be denoted by C(x), i.e.
If the bonds of the collection C(x) satisfy the conditions of the event A N (x), we agree to call it a trap that we will denote by P N .
The lemma says then that : The occurrence of the event A N (X H k ) means that the random walk X has met a trap P N situated outside of the box B k when it has hit for the first time the boundary of the box B k .
Let q N be the Q-probability of having the configuration of the trap P N . We have q N = Q(A N (x)) = P , ∀x ∈ ∂B k and ∀k ≤ N − 1. Indeed, by virtue of the i.i.d. character of the conductances and the Markov property, when the random walk hits the boundary of B k for the first time at some element x, the probability that the collection C(x) constitutes a trap, i.e., satisfies the conditions of the event A N (x), depends only on the edges of the collection C(x), which have not been visited before. Let k 1 < k 2 ≤ N − 1 and x ∈ ∂B k 2 , we have then since the events {A k 1 N , X H k 2 = x} and A N (x) depend respectively on the conductances of the bonds of B k 2 and the conductances of the bonds of the collection C(x) which is situated outside the box B k 2 when x ∈ ∂B k 2 . Thus With some adaptations, this reasoning remains true in the case of more than two events A k N . We come now to the proof of Theorem 1.2.
Proof of Theorem 1.2. Let d ≥ 5 and γ > 0. Set α = 1−ǫ (4d−2)γ for arbitrary positive constant ǫ < 1 (the constant α is the same used in the definition of the event A N (x)). As seen before (cf. (2.1)), for almost every environment ω, the reversibility of X, Cauchy-Schwarz inequality and (1.3) give By the assumption (1.3) on the conductances and the definition of the event A N (x), the probability of having the configuration of the trap P N is greater than cN −(1−ǫ) (where c is a constant that we use henceforth as a generic constant). Indeed, when N is large enough, we have Consider now the following event The event Λ N so defined may be interpreted as follows : at least, one among the N disjoint collections C(X H k ), k ≤ N − 1, constitutes a trap P N . The events A k N being independent by lemma 2.1, we have
Chebychev inequality and (2.3) then give
It results by Borel-Cantelli lemma that for almost every ω, there exists N 0 ≥ 1 such that for each N ≥ N 0 , the event A N (x) occurs inside the box B N with positive probability (greater than 1/2) on the path of X, for some x ∈ B N −1 . For almost every ω, one may say that X meets with positive probability a trap P N at some site x ∈ B N −1 before getting outside of B N .
Suppose that N ≥ N 0 and let n be such that N α ≤ n < (N + 1) α . Define to be the rank of the first among the N collections C(X H k ), k ≤ N − 1, that constitutes a trap P N . If D N = k, the random variable D N so defined depends only on the steps of X up to time H k . Thus, if D N = k, we have X H k ∈ B N −1 and C(X H k ) constitutes a trap P N . So, if we set X H k = x, the bond (of the trap P N ) will have then a conductance of order N −α . In this case, the probability for the random walk, when started at X H k = x, to cross the bond is by the property (1) of the definition of the event A N (x) above greater than (2.5) Here we use the fact that π ω (x) ≤ 2d by virtue of (1.3). This implies by the Markov property and by (2.5) that (2.6) If the trap P N retains enough the random walk X, we will have H N ≥ n, when it starts at y (always the same y = x + ǫ(x)ê i 0 of the collection C(x)). Let {X j steps outside of the trap P N } and we say "X j steps outside of the trap P N ", when X j+1 is on a site of the border of the trap P N , i.e. X j+1 = y ±ê i , ∀i = i 0 , or X j+1 = x (resp. X j+1 = z ±ê i , The complement of E N is in fact the event that X does not leave the trap during its first n jumps, i.e. X jumps n times, starting at y, in turn on z and y, which, according to the configuration of the trap, costs for each jump a probability greater than ξ ξ + (2d − 1)N −α . Then, we have by the Markov property and since by the choice of N α ≤ n < (N + 1) α (2.7) So, putting this in (2.6), we obtain Now, according to (2.4), we have P ω 0 (D N ≤ N − 1) ≥ 1 / 2 . Then we deduce A fortiori, we have Thus, for all N ≥ N 0 , by replacing the last inequality in (2.2), we obtain where δ(γ) := d(4d − 2)γ/(1 − ǫ). When we let ǫ −→ 0, we get (1.10).
Standard heat-kernel decay
We give here the proof of Theorem 1.4. Let us first give some definitions and fix some notations besides those seen before. Consider a Markov chain on a countable state-space V with transition probability denoted by P(x, y) and invariant measure denoted by π. Define Q(x, y) = π(x)P(x, y) and for each S 1 , S 2 ⊂ V , let (3.1) and use it to define the isoperimetric profile (Here π(S) is the measure of S.) It is easy to check that we may restrict the infimum to sets S that are connected in the graph structure induced on V by P.
To prove Theorem 1.4, we combine basically two facts. On the one hand, we use Theorem 2 of Morris and Peres and all x ∈ V . Let ǫ > 0 and x, y ∈ V . Then x, y ∈ B N +1 , x ∼ y}. Call Z d e the set of even points of Z d , i.e., the points x := (x 1 , . . . , x d ) such that | d i=1 x i | = 2k, with k ∈ N (0 ∈ N), and equip it with the graph structure defined by : two points x, y ∈ Z d e ⊂ Z d are neighbors when they are separated in Z d by two steps, i.e.
We operate the following modification on the environment ω by definingω b = 1 on every bond b / ∈ B N +1 andω b = ω b otherwise. Then, we will adapt the machinery above to the following setting V = Z d e , P = P 2 ω and π = πω, (3.6) with the objects in (3.1-3.3) denoted by Qω, Φ (ω) S and Φω(r). So, the random walk associated with P 2 ω moves on the even points. On the other hand, we need to know the following standard fact that gives a lower bound of the conductances of the box B N . For a proof, see , Lemma 3.6.
Thus, for arbitrary µ > 0, we can write Q−a.s., for all N large enough (3.8) Our next step involves extraction of appropriate bounds on surface and volume terms.
The proof of lemma 3.2 will be a consequence of the following well-known fact of isoperimetric inequalities on Z d (see , Chapter I, § 4). For any connected Λ ⊂ Z d , let ∂Λ denote the set of edges between Λ and Z d \ Λ. Then, there exists a constant κ such that |∂Λ| ≥ κ|Λ| Then, Lemma 3.1 gives a.s. inf b∈B N ω(b) > α and by virtue of (3.10), we have |∂Λ| ≥ κ|Λ| d−1 d , then (3.9) will follow from (3.11-3.12). It remains to prove (3.11-3.12). The bound (3.12) is implied by πω(x) ≤ 2d. For (3.11), since P 2 ω represents two steps of a random walk, we get a lower bound on Q ω (Λ, Z d e \ Λ) by picking a site x ∈ Λ which has a neighbor y ∈ Z d that has a neighbor z ∈ Z d e on the outer boundary of Λ. By Lemma 3.1, if x or z ∈ B N +1 , the relevant contribution is bounded by πω(x)P 2 ω (x, z) ≥ πω(x)ω xy πω(x)ω yz πω(y) ≥ α 2 2d . (3.13) For the case where x, z / ∈ Z d e ∩ B N +1 , clearly the left-hand side of (3.13) is bounded by 1/(2d) > α 2 /(2d). Once Λ has at least two elements, we can do this for (y, z) ranging over all bonds in ∂Λ, so summing over (y, z) we get (3.11).
Now we get what we need to estimate the decay of P 2n ω (0, 0). Proof of Theorem 1.4. Let d ≥ 5, γ > 8d and choose µ > 0 such that Let n = ⌊N/2⌋, N ≫ 1, and consider the random walk onω. We will derive a bound on Φ (ω) Λ for connected Λ ⊂ Z d e . Henceforth c denotes a generic constant. Observe that (3.9) implies Φ (ω) Λ ≥ cα 2 πω(Λ) −1/d . (3.14) Then, we conclude that Φω(r) ≥ cα 2 r −1/d (3.15) The relevant integral is thus bounded by for some constant c > 0. Setting ǫ proportional to n 4d 2 γ +4µd− d 2 , and noting σ ≥ α 2 /(2d), the right-hand side is less than n and by setting δ(γ) = 4d 2 /γ, we will get , ∀x ∈ Z d e . (3.17) As the random walk will not leave the box B N by time 2n, we can replaceω by ω in (3.17), and since P 2n ω (0, x) = 0 for each x / ∈ B N , then after letting µ → 0, we get lim sup This proves the claim for even n; for odd n we just concatenate this with a single step of the random walk.
Bencherif-Madani for his support. I also would like to thank the referees for their careful reading and comments that led to an improvement of the paper. |
package info.kyorohiro.helloworld.util;
public interface SimpleLockInter {
void beginLock();
void endLock();
}
|
/*++
Copyright (c) 2015 Microsoft Corporation
--*/
#include "tactic/arith/arith_bounds_tactic.h"
#include "ast/arith_decl_plugin.h"
struct arith_bounds_tactic : public tactic {
ast_manager& m;
arith_util a;
arith_bounds_tactic(ast_manager& m):
m(m),
a(m)
{
}
ast_manager& get_manager() { return m; }
void operator()(/* in */ goal_ref const & in,
/* out */ goal_ref_buffer & result) override {
bounds_arith_subsumption(in, result);
}
char const* name() const override { return "arith_bounds"; }
tactic* translate(ast_manager & mgr) override {
return alloc(arith_bounds_tactic, mgr);
}
void checkpoint() {
tactic::checkpoint(m);
}
struct info { rational r; unsigned idx; bool is_strict;};
/**
\brief Basic arithmetic subsumption simplification based on bounds.
*/
void mk_proof(proof_ref& pr, goal_ref const& s, unsigned i, unsigned j) {
if (s->proofs_enabled()) {
proof* th_lemma = m.mk_th_lemma(a.get_family_id(), m.mk_implies(s->form(i), s->form(j)), 0, nullptr);
pr = m.mk_modus_ponens(s->pr(i), th_lemma);
}
}
bool is_le_or_lt(expr* e, expr*& e1, expr*& e2, bool& is_strict) {
bool is_negated = m.is_not(e, e);
if ((!is_negated && (a.is_le(e, e1, e2) || a.is_ge(e, e2, e1))) ||
(is_negated && (a.is_lt(e, e2, e1) || a.is_gt(e, e1, e2)))) {
is_strict = false;
return true;
}
if ((!is_negated && (a.is_lt(e, e1, e2) || a.is_gt(e, e2, e1))) ||
(is_negated && (a.is_le(e, e2, e1) || a.is_ge(e, e1, e2)))) {
is_strict = true;
return true;
}
return false;
}
void bounds_arith_subsumption(goal_ref const& g, goal_ref_buffer& result) {
info inf;
rational r;
goal_ref s(g); // initialize result.
obj_map<expr, info> lower, upper;
expr* e1, *e2;
TRACE("arith_subsumption", s->display(tout); );
for (unsigned i = 0; i < s->size(); ++i) {
checkpoint();
expr* lemma = s->form(i);
bool is_strict = false;
bool is_lower = false;
if (!is_le_or_lt(lemma, e1, e2, is_strict)) {
continue;
}
// e1 <= e2 or e1 < e2
if (a.is_numeral(e2, r)) {
is_lower = true;
}
else if (a.is_numeral(e1, r)) {
is_lower = false;
}
else {
continue;
}
proof_ref new_pr(m);
if (is_lower && upper.find(e1, inf)) {
if (inf.r > r || (inf.r == r && is_strict && !inf.is_strict)) {
mk_proof(new_pr, s, i, inf.idx);
s->update(inf.idx, m.mk_true(), new_pr);
inf.r = r;
inf.is_strict = is_strict;
inf.idx = i;
upper.insert(e1, inf);
}
else {
mk_proof(new_pr, s, inf.idx, i);
s->update(i, m.mk_true(), new_pr);
}
}
else if (is_lower) {
inf.r = r;
inf.is_strict = is_strict;
inf.idx = i;
upper.insert(e1, inf);
}
else if (!is_lower && lower.find(e2, inf)) {
if (inf.r < r || (inf.r == r && is_strict && !inf.is_strict)) {
mk_proof(new_pr, s, i, inf.idx);
s->update(inf.idx, m.mk_true(), new_pr);
inf.r = r;
inf.is_strict = is_strict;
inf.idx = i;
lower.insert(e2, inf);
}
else {
mk_proof(new_pr, s, inf.idx, i);
s->update(i, m.mk_true());
}
}
else if (!is_lower) {
inf.r = r;
inf.is_strict = is_strict;
inf.idx = i;
lower.insert(e2, inf);
}
}
s->elim_true();
result.push_back(s.get());
TRACE("arith_subsumption", s->display(tout); );
}
void cleanup() override {}
};
tactic * mk_arith_bounds_tactic(ast_manager & m, params_ref const & p) {
return alloc(arith_bounds_tactic, m);
}
|
/**
* Removes a data, parameter or directory.
*
* @param pathname name of the data to remove
*
* @return the data that was removed
*
* @throws DataAccessException
*/
public Data deleteData(String pathname) throws DataAccessException
{
Data res = null;
boolean largest = taskManager.lock();
try
{
res = _operateData(pathname.trim(), 3, null, 0, true);
}
finally
{
taskManager.unlock(largest);
}
return res;
} |
package org.schoellerfamily.gedbrowser.datamodel.util;
import org.schoellerfamily.gedbrowser.datamodel.Attribute;
import org.schoellerfamily.gedbrowser.datamodel.Child;
import org.schoellerfamily.gedbrowser.datamodel.Family;
import org.schoellerfamily.gedbrowser.datamodel.Husband;
import org.schoellerfamily.gedbrowser.datamodel.Person;
import org.schoellerfamily.gedbrowser.datamodel.Root;
import org.schoellerfamily.gedbrowser.datamodel.Wife;
/**
* @author <NAME>
*/
public interface FamilyBuilder {
/**
* Get the root object of the data set.
*
* @return the root object
*/
Root getRoot();
/**
* Create empty family.
*
* @return the family
*/
Family createFamily();
/**
* Encapsulate creating family with the given ID.
*
* @param idString the id string for the family
* @return the family
*/
Family createFamily(String idString);
/**
* Create a dated event.
*
* @param family the family the event is for
* @param type the type of event
* @param dateString the date of the event
* @return the created event
*/
Attribute createFamilyEvent(Family family, String type, String dateString);
/**
* Create an undated event.
*
* @param family the family the event is for
* @param type the type of event
* @return the created event
*/
Attribute createFamilyEvent(Family family, String type);
/**
* Add a person as the husband in a family.
* @param family the family
* @param person the person
*
* @return the husband object
*/
Husband addHusbandToFamily(Family family, Person person);
/**
* Add a person as the wife in a family.
*
* @param family the family
* @param person the person
* @return the wife object
*/
Wife addWifeToFamily(Family family, Person person);
/**
* Add a person as a child in a family.
*
* @param person the person
* @param family the family
* @return the Child object
*/
Child addChildToFamily(Family family, Person person);
}
|
items = hero.findItems()
gem0 = items[0]
hero.say("Bruno " + gem0)
hero.say("Matilda " + items[1])
gem1 = items[2]
hero.moveXY(gem1.pos.x, gem1.pos.y)
|
/**********************************
* SCAENA FRAMEWORK
* Author: <NAME>
* License: MIT - 2016
**********************************/
#pragma once
#include "TextureRequest.h"
#include <glm/glm.hpp>
#include <list>
using namespace std;
class FrameBuffer;
typedef glm::vec2 vec2;
class FrameBufferRequest{
private:
bool requiredDepthBuffer;
TextureRequest* depthRequest;
vector<TextureRequest*> textureRequests;
unsigned int hashCode;
void addTextureRequestInformationToList(TextureRequest* textureRequest, list<unsigned int>* textureInformation);
void generateHashCode();
public:
FrameBufferRequest();
virtual ~FrameBufferRequest();
bool isRequiredDepthBuffer(){ return this->requiredDepthBuffer; }
void addTextureRequest(int width, int height, unsigned int format, unsigned int internalFormat);
void addTextureRequest(TextureRequest* textureRequest);
vector<TextureRequest*>* getTextureRequests(){ return &this->textureRequests; }
void setDepthBufferRequest(TextureRequest* depthRequest);
TextureRequest* getDepthBufferRequest(){ return this->depthRequest; }
// Evalua si el frame buffer corresponde con el request
bool matches(FrameBuffer* frameBuffer);
// Devuelve el hash code del frame buffer request para identificar la composicion del mismo
unsigned int getHashCode(){ return this->hashCode; }
}; |
import { Component, OnInit } from '@angular/core';
import { FormBuilder, Validators, FormGroup } from '@angular/forms';
import { Router } from '@angular/router';
declare interface TableData {
headerRow: string[];
dataRows: string[][];
}
declare interface OfficerData {
headerRow: string[];
dataRows: string[][];
}
@Component({
selector: 'users-cmp',
moduleId: module.id,
templateUrl: 'users.component.html'
})
export class UsersComponent{
public tableData1: TableData;
public officerData1: OfficerData;
ngOnInit() {
this.tableData1 = {
headerRow: ['ID', 'Company Name', 'Address', 'Business Purpose', 'Status', 'Action'],
dataRows: [
['1', 'British American Tobacco', 'Uhuru Estate, Uyo', "Ciggarette Production", "Excise Trade Licence"],
['2', '<NAME>', 'Jeje Street, Ota Ogun State', 'Alcholic drinks Production', 'Revised Entry of Premise Licence']
]
}
this.officerData1 = {
headerRow: ['ID', 'Name', 'Rank'],
dataRows: [
['1', "<NAME>", 'Inspector'],
['2', '<NAME>', 'Seagent']
]
}
}
}
|
n, m = map(int, raw_input().split())
l1 = map(int, raw_input().split())
l2 = map(int, raw_input().split())
ans = 10000000000000000000
for i in range(n):
cur = -10000000000000000000
for j in range(n):
if j != i:
for k in range(m):
cur = max(cur, l1[j]*l2[k])
ans = min(ans, cur)
print ans
|
/**
* <pre>
* Withdraw withdraws a candidate from the leader election
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<atomix.primitive.election.v1.Primitive.WithdrawResponse> withdraw(
atomix.primitive.election.v1.Primitive.WithdrawRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getWithdrawMethod(), getCallOptions()), request);
} |
def validate_refresh_authorization_roles(self, unique_role, query_options, clients):
try:
self.client.execute("create role %s_internal1" % unique_role)
self.client.execute("create role %s_internal2" % unique_role)
role = "%s_internal1" % unique_role
subprocess.check_call(
["/bin/bash", "-c",
"%s/bin/sentryShell --conf %s/sentry-site.xml -dr -r %s" %
(os.getenv("SENTRY_HOME"), os.getenv("SENTRY_CONF_DIR"), role)],
stdout=sys.stdout, stderr=sys.stderr)
result = self.execute_query_expect_success(self.client, "show roles")
assert any(role in x for x in result.data)
self.execute_query_expect_success(self.client, "refresh authorization",
query_options=query_options)
for client in clients:
result = self.execute_query_expect_success(client, "show roles")
assert not any(role in x for x in result.data)
role = "%s_external" % unique_role
subprocess.check_call(
["/bin/bash", "-c",
"%s/bin/sentryShell --conf %s/sentry-site.xml -cr -r %s" %
(os.getenv("SENTRY_HOME"), os.getenv("SENTRY_CONF_DIR"), role)],
stdout=sys.stdout, stderr=sys.stderr)
result = self.execute_query_expect_success(self.client, "show roles")
assert not any(role in x for x in result.data)
self.execute_query_expect_success(self.client, "refresh authorization",
query_options=query_options)
for client in clients:
result = self.execute_query_expect_success(client, "show roles")
assert any(role in x for x in result.data)
finally:
for suffix in ["internal1", "internal2", "external"]:
self.role_cleanup("%s_%s" % (unique_role, suffix)) |
/*
************************************************************************************************************************
* DELAY TASK 'n' TICKS
*
* Description: This function is called to delay execution of the currently running task until the specified number of
* system ticks expires. This, of course, directly equates to delaying the current task for some time to
* expire. No delay will result if the specified delay is 0. If the specified delay is greater than 0
* then, a context switch will result.
*
* Arguments : dly is a value in 'clock ticks' that the task will either delay for or, the target match value
* of the tick counter (OSTickCtr). Note that specifying 0 means the task is not to delay.
*
* depending on the option argument, the task will wake up when OSTickCtr reaches:
*
* OS_OPT_TIME_DLY : OSTickCtr + dly
* OS_OPT_TIME_TIMEOUT : OSTickCtr + dly
* OS_OPT_TIME_MATCH : dly
* OS_OPT_TIME_PERIODIC : OSTCBCurPtr->TickCtrPrev + dly
*
* opt specifies whether 'dly' represents absolute or relative time; default option marked with *** :
*
* *** OS_OPT_TIME_DLY specifies a relative time from the current value of OSTickCtr.
* OS_OPT_TIME_TIMEOUT same as OS_OPT_TIME_DLY.
* OS_OPT_TIME_MATCH indicates that 'dly' specifies the absolute value that OSTickCtr
* must reach before the task will be resumed.
* OS_OPT_TIME_PERIODIC indicates that 'dly' specifies the periodic value that OSTickCtr
* must reach before the task will be resumed.
*
* p_err is a pointer to a variable that will contain an error code from this call.
*
* OS_ERR_NONE The call was successful and the delay occurred
* OS_ERR_OPT_INVALID If you specified an invalid option for this function
* OS_ERR_OS_NOT_RUNNING If uC/OS-III is not running yet
* OS_ERR_SCHED_LOCKED Can't delay when the scheduler is locked
* OS_ERR_TIME_DLY_ISR If you called this function from an ISR
* OS_ERR_TIME_ZERO_DLY If the effective delay is zero
* OS_ERR_TICK_DISABLED If kernel ticks are disabled
*
* Returns : none
*
* Note(s) : none
************************************************************************************************************************
*/
void OSTimeDly (OS_TICK dly,
OS_OPT opt,
OS_ERR *p_err)
{
#if (OS_CFG_TICK_EN > 0u)
CPU_SR_ALLOC();
#endif
#ifdef OS_SAFETY_CRITICAL
if (p_err == (OS_ERR *)0) {
OS_SAFETY_CRITICAL_EXCEPTION();
return;
}
#endif
#if (OS_CFG_TICK_EN == 0u)
*p_err = OS_ERR_TICK_DISABLED;
return;
#else
#if (OS_CFG_CALLED_FROM_ISR_CHK_EN > 0u)
if (OSIntNestingCtr > 0u) {
*p_err = OS_ERR_TIME_DLY_ISR;
return;
}
#endif
#if (OS_CFG_INVALID_OS_CALLS_CHK_EN > 0u)
if (OSRunning != OS_STATE_OS_RUNNING) {
*p_err = OS_ERR_OS_NOT_RUNNING;
return;
}
#endif
if (OSSchedLockNestingCtr > 0u) {
*p_err = OS_ERR_SCHED_LOCKED;
return;
}
switch (opt) {
case OS_OPT_TIME_DLY:
case OS_OPT_TIME_TIMEOUT:
case OS_OPT_TIME_PERIODIC:
case OS_OPT_TIME_MATCH:
break;
default:
*p_err = OS_ERR_OPT_INVALID;
return;
}
#if (OS_CFG_TICK_EN > 0u)
CPU_CRITICAL_ENTER();
OS_TickListInsertDly(OSTCBCurPtr,
dly,
opt,
p_err);
if (*p_err != OS_ERR_NONE) {
CPU_CRITICAL_EXIT();
return;
}
OS_RdyListRemove(OSTCBCurPtr);
CPU_CRITICAL_EXIT();
OSSched();
#endif
#endif
} |
//test separation/grouping into ssm/secretsmanager with valid parameters
func TestBadYaml(t *testing.T) {
objects := `
- objectName: secret1
objectType: secretsmanager
- {`
_, err := NewSecretDescriptorList(objects)
if err == nil {
t.Fatalf("Expected error but got none.")
}
} |
Three new scalarane-based sesterterpenes from the tropical marine sponge strepsichordaia lendenfeldi1
From the dichloromethane extract of the tropical marine sponge Strepsichordaia lendenfeldi collected from the Great Barrier Reef, Australia, three new (1, 2, and 9) and seven known (3-8 and 10) scalarane-based sesterterpenes were isolated. All molecular structures were secured by spectroscopic methods, particularly 1D and 2D NMR, and accurate mass measurement. |
// Swap swaps the elements with indexes i and j.
func (t Tracks) Swap(i int, j int) {
tmp := *t[i]
tmp.ID = t[j].ID
t[j].ID = t[i].ID
t[i] = t[j]
t[j] = &tmp
} |
def create_competitions_csv():
competitions = {}
with open('athlete_events.csv') as csvfile:
reader = csv.reader(csvfile)
next(reader)
game_ID = 1
for row in reader:
game = row[8]
year = int(row[9])
season = row[10]
city = row[11]
if game not in competitions:
competitions[game] = [game_ID, year, season, city]
game_ID += 1
with open('competitions.csv', 'w', newline='') as csvfile:
writer = csv.writer(csvfile)
for game in competitions.values():
writer.writerow(game)
return competitions |
#include<bits/stdc++.h>
using namespace std;
int main()
{
long long int c,k,i,t,x,y,z,j,N,M,b,s,T,V,F,f,a,K;
string S,W;
set<long long int>A,B;
vector<long long int>P;
vector<long long int>::iterator it,it1;
cin>>F;
for(f=0; f<F; f++)
{
cin>>N>>k;
x=1;
for(i=1; i<=k; i++)
{
x=((x*N)%(1000000000+7));
}
cout<<x<<endl;
}
return 0;
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.