content
stringlengths 10
4.9M
|
---|
#include <bits/stdc++.h>
#include <unistd.h>
#include "../util/helpers.cpp"
using namespace std;
// http://adventofcode.com/2019/day/13
vector<int> do_phase(const vector<int> &v, const vector<int> &base_pattern, int phase_num)
{
vector<int> out;
for (size_t i = 0; i < v.size(); i++)
{
int temp = 0;
// build the full pattern for this rotation
vector<int> pattern;
int counter = 0;
for (size_t j = 0; counter <= v.size(); j++)
{
// cout << "j = " << j << ", counter = " << counter << endl;
size_t k = 0;
do
{
if (j == 0 && k == 0)
{
// skip the first in the pattern
}
else
{
// cout << "counter = " << counter << endl;
// cout << " " << v[counter - 1] << " * " << base_pattern[j % base_pattern.size()] << " + ";
temp += v[counter - 1] * base_pattern[j % base_pattern.size()];
}
counter++;
k++;
} while (k <= i && counter <= v.size());
}
temp = abs(temp);
// cout << " = " << temp << endl;
out.push_back(temp % 10);
}
return out;
}
string solve(vector<int> v, bool part_two)
{
int num_phases = 100;
vector<int> base_pattern = {0, 1, 0, -1};
// build the real signal (original signal repeated 10k times)
if (part_two)
{
vector<int> temp(v);
v.resize(v.size() * 10000);
for (size_t i = 0; i < v.size(); i++)
{
v[i] = temp[i % temp.size()];
}
}
for (int i = 0; i < num_phases; i++)
{
cout << "doing phase " << i + 1 << endl;
v = do_phase(v, base_pattern, i + 1);
// cout << "after " << i + 1 << " phase: " << v << endl;
}
// calculate offset for part 2
int offset = 0;
if (part_two)
{
string temp_offset = "";
for (int i = 0; i < 7; i++)
{
temp_offset += v[i] + '0';
}
offset = stoi(temp_offset);
}
// extract the message
string out = "";
for (int i = 0; i < 8; i++)
{
out += v[(i + offset) % v.size()] + '0';
}
return out;
}
int main()
{
string s;
getline(cin, s);
// s = "12345678";
// s = "19617804207202209144916044189917";
// s = "03036732577212944063491565474664";
vector<int> v;
for (auto c : s)
{
v.push_back(c - '0');
}
// cout << "Part 1: \n" << solve(v, false) << endl;
cout << "Part 2: " << solve(v, true) << endl;
} |
A hazmat transportation monitoring system based on Global Positioning System / Beidou Navigation Satellite System and RS485 bus
With the demand for road transport of hazmat in the past few years, accidents occur frequently during the dangerous goods transportation and road transportation safety issues become increasingly prominent. Therefore, it is urgent to make full use of technical measures of information collection and transmission to monitor the state of dangerous goods. A real-time system is put forward in this paper, which can monitor the state of hazmat during the transportation by using Global Positioning System (GPS)/BeiDou Navigation Satellite System (BDS) technology and sensor acquisition modules, which is based on RS485 bus technology. Combine the above technical schemes with General Packet Radio Service (GPRS) radio transmission, Radio Frequency Identification (RFID) and Geographic Information System (GIS) technology, the system can monitor the position information and crucial state information of the dangerous goods during the transportation. The system can fundamentally solve the safety issues of hazmat transport, and it is helpful to achieve scientific management in further. |
<reponame>zann1x/FrameGraph<gh_stars>0
// Copyright (c) 2018-2019, <NAME>. For more information see 'LICENSE'
#include "SphericalCubeMath.h"
using namespace FG;
#define TEST CHECK_FATAL
template <typename Projection>
static void Test_ForwardInverseProjection ()
{
static constexpr uint lod = 12;
static constexpr double err = 0.0001;
for (uint face = 0; face < 6; ++face)
{
for (uint y = 1; y < lod+2; ++y)
for (uint x = 1; x < lod+2; ++x)
{
const double2 ncoord = double2{ double(x)/(lod+2), double(y)/(lod+2) } * 2.0 - 1.0;
const double3 forward = Projection::Forward( ncoord, ECubeFace(face) );
auto[inv, inv_face] = Projection::Inverse( forward );
TEST( uint(inv_face) == face );
TEST(Equals( ncoord.x, inv.x, err ));
TEST(Equals( ncoord.y, inv.y, err ));
}
}
}
extern void UnitTest_SphericalCubeMath ()
{
Test_ForwardInverseProjection< OriginCube >();
Test_ForwardInverseProjection< IdentitySphericalCube >();
Test_ForwardInverseProjection< TangentialSphericalCube >();
double3 c0 = IdentitySphericalCube::Forward( double2{0.0, 1.0101}, ECubeFace::XPos );
double3 c1 = IdentitySphericalCube::Forward( double2{0.0, 1.01}, ECubeFace::XPos );
double3 c2 = IdentitySphericalCube::Forward( double2{0.0, 1.00997}, ECubeFace::XPos );
double3 b0 = IdentitySphericalCube::Forward( double2{1.01, 0.0}, ECubeFace::YNeg );
double3 b1 = IdentitySphericalCube::Forward( double2{1.00, 0.0}, ECubeFace::YNeg );
double3 b2 = IdentitySphericalCube::Forward( double2{0.99, 0.0}, ECubeFace::YNeg );
FG_LOGI( "UnitTest_SphericalCubeMath" );
}
|
#include<stdio.h>
int choice[10000000];
int main(void)
{
int h1,a1,c1,h2,a2;
scanf("%d%d%d",&h1,&a1,&c1);
scanf("%d%d",&h2,&a2);
int vava=h1,monster=h2,result,remain=c1-a2;
for(int i=0;;i++)
{
if(monster-a1<=0)
{
result=i+1;
choice[i]=1;
break;
}
if(vava-a2<=0)
{ vava+=remain;choice[i]=0;}
else
{ vava-=a2;monster-=a1;choice[i]=1;}
}
printf("%d\n",result);
for(int i=0;i<result;i++)
{
if(choice[i])
printf("STRIKE\n");
else
printf("HEAL\n");
}
} |
<reponame>apokalipsys/tenprintcover-java
package org.apoka.tenprintcover;
import org.apoka.graphics.Image;
import org.kohsuke.args4j.CmdLineException;
import org.kohsuke.args4j.CmdLineParser;
import org.kohsuke.args4j.Option;
import java.io.IOException;
public class TenPrintCover {
@Option(name = "-t", usage = "Book title", required = true, metaVar = "TITLE")
String title;
@Option(name = "-s", usage = "Book subtitle", metaVar = "SUBTITLE")
String subtitle;
@Option(name = "-a", usage = "Author(s) of the book", required = true, metaVar = "AUTHOR")
String author;
@Option(name = "-o", usage = "Filename of the cover image in PNG format", required = true, metaVar = "FILE")
String outfile;
public void doMain(String[] args) throws IOException {
CmdLineParser parser = new CmdLineParser(this);
try {
parser.parseArgument(args);
} catch (CmdLineException e) {
System.err.println(e.getMessage());
parser.printUsage(System.err);
return;
}
if(subtitle == null) {
subtitle = "";
}
Cover cover = new Cover(title, subtitle, author, 400, 600);
Image image = cover.draw();
image.save(outfile);
}
public static void main(String... args) throws IOException {
new TenPrintCover().doMain(args);
}
}
|
package cmd
import (
"github.com/mitchellh/go-homedir"
log "github.com/sirupsen/logrus"
)
const (
defaultPort = 5126
)
var (
homeDir, _ = homedir.Dir()
)
func setDebugMode() {
log.SetLevel(log.DebugLevel)
}
|
<reponame>imle/go-firmata
package firmata
import (
"fmt"
"reflect"
"testing"
)
func TestByteConversion(t *testing.T) {
for i := uint16(0x00); i <= 0xFF; i++ {
t.Run(fmt.Sprintf("0x%02X", i), func(t *testing.T) {
a, b := ByteToTwoByte(byte(i))
o := TwoByteToByte(a, b)
if byte(i) != o {
t.Errorf("ByteToTwoByte(0x%02X) = 0x%02X, 0x%02X => TwoByteToByte() = 0x%02X", i, a, b, o)
}
})
}
}
func TestTwoByteString(t *testing.T) {
tests := []struct {
name string
bytes []byte
want string
}{
{
name: "nil",
bytes: nil,
want: "",
},
{
name: "empty",
bytes: []byte{},
want: "",
},
{
name: "test string",
bytes: ByteSliceToTwoByteRepresentation([]byte{
0x74, 0x65, 0x73, 0x74, 0x20, 0x73, 0x74, 0x72, 0x69, 0x6E, 0x67,
}),
want: "test string",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := TwoByteString(tt.bytes); got != tt.want {
t.Errorf("TwoByteString() = %v, want %v", got, tt.want)
}
})
}
}
func TestByteSliceTo2ByteRepresentation(t *testing.T) {
tests := []struct {
name string
input []byte
expected []byte
}{
{
name: "nil",
input: nil,
expected: []byte{},
},
{
name: "empty",
input: []byte{},
expected: []byte{},
},
{
name: "7 lsb set",
input: []byte{0b01111111, 0b11111111},
expected: []byte{0b01111111, 0b00000000, 0b01111111, 0b00000001},
},
{
name: "7 lsb not set",
input: []byte{0b00000000, 0b10000000},
expected: []byte{0b00000000, 0b00000000, 0b00000000, 0b00000001},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := ByteSliceToTwoByteRepresentation(tt.input); !reflect.DeepEqual(got, tt.expected) {
t.Errorf("ByteSliceToTwoByteRepresentation() = %v, want %v", got, tt.expected)
}
})
}
}
func TestTwoByteRepresentationToByteSlice(t *testing.T) {
tests := []struct {
name string
input []byte
expected []byte
}{
{
name: "nil",
input: nil,
expected: []byte{},
},
{
name: "empty",
input: []byte{},
expected: []byte{},
},
{
name: "7 lsb set",
input: []byte{0b01111111, 0b00000000, 0b01111111, 0b00000001},
expected: []byte{0b01111111, 0b11111111},
},
{
name: "7 lsb not set",
input: []byte{0b00000000, 0b00000000, 0b00000000, 0b00000001},
expected: []byte{0b00000000, 0b10000000},
},
{
name: "only 1 byte",
input: []byte{0b01000000},
expected: []byte{0b01000000},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := TwoByteRepresentationToByteSlice(tt.input); !reflect.DeepEqual(got, tt.expected) {
t.Errorf("TwoByteRepresentationToByteSlice() = %v, want %v", got, tt.expected)
}
})
}
}
|
#ifndef VOXEL_TYPE_H
#define VOXEL_TYPE_H
// A unique identifier for each type of voxel.
// Voxel types, if they are still part of the design, are meant for host usage only,
// and not for mapping textures to the OpenCL kernel.
// This type is still experimental and might be removed.
// -------------------------------
// Voxel types are used instead of integer IDs, since it's easier to name specific
// kinds of voxels. Depending on the climate and season, a certain voxel type might
// actually be realized as one of many things, such as either dirt, wet dirt, or
// snow. The intent with voxel types is to have independent generic voxels whose
// actual appearance can be changed with the flip of a switch somewhere else in the
// code. If there are certain climates or seasons that don't have a mapping for a
// certain voxel type then just duplicate a related voxel type's mapping.
// There's an implicit mapping of voxel types to only wall and flat texture names,
// since voxels shouldn't be using any other texture types than those.
// If a certain voxel type, like "bed", doesn't make sense for a location like a
// dungeon, then just use a "None" or "Dev" texture type or something to indicate
// that the texture used in its place should just be blank, since it wouldn't ever be
// used anyway. It's better to use a blank placeholder than to find something that's
// "close enough" in this case.
// The sides of pits and rivers aren't really their own voxel. I'm not sure how that
// will fit into the voxel generation. Maybe it could be a second-pass triangle
// generation algorithm that checks adjacencies between voxel faces. I'll need to
// look more into this.
enum class VoxelType
{
// Empty.
Air,
// Ground.
Ground1,
Ground2,
Ground3,
Ground4,
// Plain old wall and their associated half walls.
Wall1,
Wall2,
Wall3,
Wall4,
HalfWall1,
HalfWall2,
HalfWall3,
HalfWall4,
// Four diagonals for each wall texture?
// Bridges. Multiple bridge voxel types are necessary to act as stairs,
// or perhaps slightly raised platforms for altars in dungeons. Let's say
// each one is a quarter the height of a voxel.
Bridge1,
Bridge2,
Bridge3,
Bridge4,
// Beds (direction dependent, i.e., NorthSouth/EastWest).
Bed1,
Bed2,
// Tables and shelves. There are multiple heights of shelves, like in the
// Halls of Colossus.
Table,
Shelf1,
Shelf2,
Shelf3,
// Specific buildings...
// Tavern1, Mages1, palace gates, etc..
// Liquid (probably either water or lava. No ice). There should be a voxel
// underneath them, since they are just liquid by themselves.
Liquid
};
#endif
|
/**
* Base class for all standard forms whose models are created using "properties"
* of some kind that are defined by a single string. Subclasses must implement
* {@link #createPropertyModel(String)} to map those strings to Wicket models.
* Based on that method, this class provides factory methods for the form
* components.
*
* @param <T> the model type
*/
public abstract class AbstractPropertyBasedStandardFormPanel<T> extends StandardFormPanel {
/**
* Constructor.
* @param id the wicket id
* @param model the bean model
* @param stateless whether to use a stateless form
*/
public AbstractPropertyBasedStandardFormPanel(final String id, final IModel<T> model, final boolean stateless) {
super(id, model, stateless);
}
/**
* Getter method for the model.
* @return the model
*/
@SuppressWarnings("unchecked")
public final IModel<T> getModel() {
return (IModel<T>)getDefaultModel();
}
/**
* Creates a model for a property being edited
* @param propertyName the name of the property
* @return the property model
*/
public abstract <P> PropertyModel<P> createPropertyModel(String propertyName);
/**
* Adds a string-typed text field for a bean property.
* @param label the label for the text field
* @param propertyName the name of the bean property
* @return a configurator that can be used to further configure the added components
*/
public final SingleFormComponentElementConfigurator addTextField(final String label, final String propertyName) {
return addTextField(label, this.<String>createPropertyModel(propertyName));
}
/**
* Adds a text field using the specified type and model.
* @param label the label for the text field
* @param type the model type
* @param propertyName the name of the bean property
* @return a configurator that can be used to further configure the added components
*/
public final <P> SingleFormComponentElementConfigurator addTextField(final String label, final Class<P> type, final String propertyName) {
return addTextField(label, type, this.<P>createPropertyModel(propertyName));
}
/**
* Adds an email text field using the specified model.
* @param label the label for the text field
* @param propertyName the name of the bean property
* @return a configurator that can be used to further configure the added components
*/
public final SingleFormComponentElementConfigurator addEmailTextField(final String label, final String propertyName) {
return addEmailTextField(label, this.<String>createPropertyModel(propertyName));
}
/**
* Adds a password field using the specified model.
* @param label the label for the password field
* @param propertyName the name of the bean property
* @return a configurator that can be used to further configure the added components
*/
public final SingleFormComponentElementConfigurator addPasswordTextField(final String label, final String propertyName) {
return addPasswordTextField(label, this.<String>createPropertyModel(propertyName));
}
/**
* Adds a text area using the specified model.
* @param label the label for the text area
* @param propertyName the name of the bean property
* @param rows number of visible rows
* @return a configurator that can be used to further configure the added components
*/
public final SingleFormComponentElementConfigurator addTextArea(final String label, final String propertyName, final int rows) {
return addTextArea(label, this.<String>createPropertyModel(propertyName), rows);
}
} |
// Test closing the browser while inspecting an extension popup with dev tools.
IN_PROC_BROWSER_TEST_F(BrowserActionInteractiveViewsTest,
MAYBE_CloseBrowserWithDevTools) {
if (!ShouldRunPopupTest())
return;
ASSERT_TRUE(LoadExtension(test_data_dir_.AppendASCII(
"browser_action/popup")));
const Extension* extension = GetSingleLoadedExtension();
ASSERT_TRUE(extension) << message_;
content::WindowedNotificationObserver frame_observer(
content::NOTIFICATION_LOAD_COMPLETED_MAIN_FRAME,
content::NotificationService::AllSources());
BrowserActionTestUtil::Create(browser())->InspectPopup(0);
frame_observer.Wait();
EXPECT_TRUE(BrowserActionTestUtil::Create(browser())->HasPopup());
chrome::CloseWindow(browser());
} |
def process_spatial_coordinates(
x: Union[float, int, np.ndarray, None] = None,
y: Union[float, int, np.ndarray, None] = None,
z: Union[float, int, np.ndarray, None] = None,
coords: Optional[np.ndarray] = None,
order: tuple = ("x", "y", "z"),
) -> Tuple[np.ndarray, List[str]]:
if (x is None) and (y is None) and (z is None) and (coords is None):
return np.array([]), []
x = np.array(flatten(x)) if x is not None else None
y = np.array(flatten(y)) if y is not None else None
z = np.array(flatten(z)) if z is not None else None
if coords is not None:
if not type(coords) is np.ndarray:
raise TypeError(
f"The argument 'coords' must be of type numpy.ndarray. Found "
f"{type(coords)} however."
)
else:
n_coords, n_points = coords.shape
else:
n_points_list = [len(v) for v in [x, y, z] if v is not None]
n_points_set = set(n_points_list)
if len(n_points_set) == 1:
n_coords = len(n_points_list)
n_points = n_points_list[0]
else:
raise RuntimeError(
f"Found inconsistent lengths in given coordinate "
f"vectors: {n_points_list}!"
)
if coords is not None:
adjusted_order = list(order[:n_coords])
else:
coords = np.zeros((n_coords, n_points))
adjusted_order = []
row_idx = 0
for v in order:
if eval(v) is not None:
adjusted_order.append(v)
coords[row_idx, :] = eval(v)
row_idx += 1
return coords, adjusted_order |
Run-length encoding
Run-length encoding is a simple compression scheme in which runs of equal values are represented by the value and a repeat count. For example, a supermarket cashier might process this line of shopping
as
4 bananas
3 apples
2 bananas
1 pineapple
3 apples
Unix packs in its very own run length encoder, uniq -c . It works just fine — so long as the values you want to encode are newline separated byte strings, that is.
Let’s use a sequence of coin tosses as an example stream. $RANDOM generates random numbers. We use the least significant bit of these numbers as an index into an array containing the values heads , tails .
$ HT=(heads tails) $ toss() { echo ${HT[$RANDOM&1]}; } $ toss; toss; toss heads tails tails $ tosses() { while [ 1 ]; do toss; done; } $ tosses | head tails tails tails heads tails heads heads heads tails tails
Passing a fresh sample from this same stream through our run-length encoder we get:
$ tosses | uniq -c | head 2 heads 1 tails 1 heads 1 tails 1 heads 6 tails 3 heads 1 tails 4 heads 1 tails
An awk script can be used as a run-length decoder. (There must be a neater way, using sed maybe?)
$ runlendec() { awk '{ while ($1--) print $2 }'; } $ tosses | head | tee orig.log | uniq -c | runlendec | tee encdec.log heads tails heads tails heads heads tails tails heads heads $ diff orig.log encdec.log
Here, we toss a coin 10 times teeing the original sequence to a file. The next two links in the pipeline compress and decompress the sequence, teeing the results to another file. Finally, as a sanity check, we confirm the round trip results are the same.
Run-length encoding in Python
This Unix run-length codec is fun, but of limited practical use. One good feature, though, is the way it operates on streams of data (including infinite streams), leaving clients free to decide how best to slice and buffer these streams.
Python has a fine library of high-level stream transformation tools from which we can build a generic and flexible run-length codec in just a few lines. Since I want to progress from run-length coding to something more advanced, I’ll leave discussing how to implement this codec for now, but if you’d like to write your own version, here’s a description suitable for doctesting.
Import the run-length codec functions and compress a short string. >>> from runlength import compress, decompress >>> comp = compress('AABBBACC') The returned compressor is a stream (an iterable). >>> next(comp) (2, 'A') Pull the rest of the stream into memory. >>> rest = list(comp) >>> rest [(3, 'B'), (1, 'A'), (2, 'C')] Simple decompress example. >>> concat = ''.join >>> concat(decompress(rest)) 'BBBACC' Compress, decompress also work with infinite streams, like the a2b3 stream, which repeatedly cycles two pairs. >>> from itertools import cycle, islice >>> a2b3 = cycle([(2, 'a'), (3, 'b')]) >>> dec = decompress(a2b3) Pull 8 values from the decompressed stream. >>> concat(islice(dec, 8)) 'aabbbaab' Now compress the decompressed stream, and explore a few items. >>> comp = compress(dec) >>> next(comp) (2, 'b') >>> list(islice(comp, 2)) [(2, 'a'), (3, 'b')]
DEFLATE
The Wikipedia page on run-length encoding identifies monochrome images as good candidates for run-length compression. The white and black pixels typically group into long runs. Indeed, any simple image using a limited palette should reduce well using this compression scheme.
The chessboard above is 256×256 pixels, each square being 32×32 pixels. We could run-length encode this 64K pixel image as 256×8 = 2K runs of 32 pixels, a decent saving. (Actually, we should do slightly better, noting that there are runs of length 64 at the chessboard rank boundaries, but you get the idea.)
(32,W)(32,B)(32,W)(32,B)(32,W)(32,B)(32,W)(32,B), (32,W)(32,B)(32,W)(32,B)(32,W)(32,B)(32,W)(32,B), .... (32,B)(32,W)(32,B)(32,W)(32,B)(32,W)(32,B)(32,W)
Like a paletted image, a block of text — the web page you’re reading now, for example — employs a limited alphabet. Although the characters in this text don’t usually group into long runs there’s plenty of repetition, especially in the raw HTML: all the occurrences of <div> , <span> and class used for CSS styling, for example. The DEFLATE compression algorithm uses a clever twist on run-length encoding to remove this redundancy:
The compressed data consists of a series of elements of two types: literal bytes (of strings that have not been detected as duplicated within the previous 32K input bytes), and pointers to duplicated strings, where a pointer is represented as a pair <length, backward distance>. (RFC-1951)
(In addition, a multiple-level dynamic Huffman encoding scheme reduces the space needed for the strings, distances and lengths themselves.)
There’s more to these pointer elements than first appears: the length can exceed the backward distance. Thus the sequence:
heads heads heads heads heads
can be deflated as the literal type heads
followed by the pointer type <24, 6> .
If you’ve spotted the potential for recursion, good! The inflating stream can reference itself, which can reference itself, which can … Confusing?
Zipping pixels
PNG images use DEFLATE compression (as implemented by zlib) to save on pixel storage space. Here’s a binary view of the raw data in the chessboard graphic shown above, all 137 bytes of it. The 64K pixels themselves compress into a 88 byte IDAT chunk, of which the final 8 bytes are a checksum and (I think?) some padding. Maybe the image could be squeezed harder, but I’m impressed!
8950 4e47 0d0a 1a0a 0000 000d 4948 4452 .PNG........IHDR 0000 0100 0000 0100 0100 0000 0074 0995 .............t.. cb00 0000 5049 4441 5468 81ed ceb1 0d00 ....PIDATh...... 200c 0341 f65f 1a58 803a 2f74 6e52 e424 ..A._.X.:/tnR.$ 7bed 9b75 f3ba cf07 0000 df83 ca0e 0000 {..u............ 7a60 ba1f 0080 2ea8 ec00 00a0 07a6 fb01 z`.............. 00e8 82ca 0e00 007a 60ba 1f00 802e a8ec .......z`....... 0000 2007 0e8a 69f0 e2b9 9471 c700 0000 .. ...i....q.... 0049 454e 44ae 4260 82 .IEND.B`.
Here’s a trace of how zlib inflates the compressed pixels in this IDAT chunk. (Source code available via anonymous SVN at http://wordaligned.org/svn/etc/zlib_trace.)
inflate: allocated inflate: reset inflate: zlib header ok inflate: dynamic codes block (last) inflate: table sizes ok inflate: code lengths ok inflate: codes ok inflate: literal 0x00 inflate: literal 0xff inflate: length 3 inflate: distance 1 inflate: literal 0x00 inflate: length 3 inflate: distance 1 inflate: length 24 inflate: distance 8 inflate: length 25 inflate: distance 25 inflate: length 258 inflate: distance 33 ....
I’ve attempted to show the first few stages of the genesis of the uncompressed stream in the picture below. The way the stream recursively inflates itself is quite beautiful.
put 00 put ff go back 1 (to ff), put 3 put 00 go back 1 (to 00), put 3 go back 8 (to 00 00 00 00 ff ff ff ff) put 24
Two elements later, and the repeat length has grown to 258. In fact, the entire chessboard is generated from just 3 literal and 43 pointer elements.
(Not all graphics have such a regular pattern, of course, so we can’t always achieve such dramatic compression.)
Deflated HTML
Web servers can and do save on band-width by transferring gzip compressed HTML to gzip capable clients. (Gzip is a simple wrapper around DEFLATE.) Any PNG images transferred will also have their pixels DEFLATE compressed.
$ curl http://wordaligned.org --head --compress HTTP/1.1 200 OK Date: Sun, 17 May 2009 17:41:53 GMT Server: lighttpd | Word Aligned Content-Type: text/html; charset=UTF-8 .... Vary: Accept-Encoding Content-Encoding: gzip Content-Length: 20
The Word Aligned front page contains about 75Kb of HTML, which gzips to just 16Kb — a decent saving. Relevant lines from the lighttpd configuration file read:
lighttpd mod_compress server.modules = ( .... "mod_compress" ) compress.cache-dir = basedir + "lighttpd/cache/compress/" compress.filetype = ("text/plain", "text/html", "text/css")
I uphold Gzip (built on zlib, which implements DEFLATE) as a hero of the web. As we’ve seen, it implements a powerful and elegant algorithm, but perhaps the best thing about it is that it’s free to use, a freedom worth fighting for. Check out this battle report from the FAQ. |
Imagine enough water to fill a couple of great lakes, but spread under some of the driest parts of eight western states. That was the High Plains Aquifer 60 years ago, before new pumping and irrigation systems made it easy for farmers to extract billions of gallons from it, and use it to grow lucrative crops on the arid land.
An agricultural gold rush of sorts followed, transforming the regional economy. But now parts of the aquifer are playing out, leaving parts of the high plains high and dry.
Nate Pike has been riding the dry rolling ranch lands south of Dodge City Kansas for most of his 80 years, sometimes just to visit a spring called St. Jacob’s Well.
"As a young boy I’d saddle me a horse, put me a little cane pole on it, come down here and fish," he says.
Pike’s not fishing today. The water level has retreated on the High Plains Aquifer. And it’s a sad sight for Pike.
"That thing, had a lot of water in it, it never went down, never changed," he says with a nervous chuckle. "I can’t believe you can’t see the water from up here."
He says he blames it on irrigation, which is pumping the aquifer dry.
"Right now we’re running right at 1000 gallons a minute, 24/7," says Jesse Garetson, standing by one of his irrigation wells near Copeland, Kan.
This one, and about 39,000 others like it in Kansas pump long and hard in growing season. If you want a visual for that much water, picture Niagara Falls. In 2011, Kansas wells sucked out enough water to keep Niagara Falls thundering full force for three weeks.
That calibrated deluge has sprouted a vibrant economy on the arid land. In drought years, like 2013, farmers can barely grow anything here. Now, with irrigation, Garetson says, they can grow just about everything.
"We’ve raised potatoes, we’ve had peaches, cotton, corn, milo, wheat, soy beans," he says.
All the corn grown out here supplies ethanol plants, and feeds dairy cows, pigs and cattle. The livestock support a big meat packing industry. All thanks to this one resource.
"Water," says Garetson. "More precious than gold."
And, like gold, this water is not renewable. Most of the torrent farmers are pumping out sat trapped under the prairie for millions of years.
"We’re mining it," says Garetson. "Because once you hit a gold vein in mine, and you get all the gold that’s in that vein, you're through with that mine."
Garetson says parts of this water mine are playing out, but not evenly. In spots the water table has plunged more than 100 feet, wells are running dry and tensions are rising. He is suing to protect his water rights.
"It does nothing but create neighbor problems," Garetson says. "We producers perceive as long as there’s water we’re pumping; it’s a free for all."
"I know my neighbor’s pumping my water. I’m pumping his water. Cause they’re hooked in the same reservoir. If he don’t pump it, I will," says Anthony Stevenson, who farms near Ulysses, Kan.
But, he says, he waters more judiciously these days.
His irrigation system is efficient, delivering water to the base of his lush, eight foot tall plants. And this year, Stevenson planted only half this field in corn. His well produces just half the water it once did, and the lingering drought is not helping.
"We can’t out pump a drought," says Stevenson. "We can’t out pump mother nature. Our wells aren’t big enough."
Stevenson is gradually farming more like people did out here before irrigation- growing more wheat, less corn, and letting dry fields sit fallow a full year between plantings to collect moisture. His income is taking a serious hit.
The aquifer’s decline hasn’t gone unnoticed.
Wayne Bossert runs one of the state’s four groundwater management districts. He says Kansas stopped new development on parts of the aquifer 30 years ago.
"So we prevented it from getting any worse a long time ago," says Bossert.
Last year Kansas began enforcing very stiff penalties for over pumping. That move drew death threats against state water officials. Still, Bossart says most farmers now want to manage the decline of the aquifer. They can’t ignore it.
"When that supply gets interrupted, or they start pumping air, or they start realizing that that, hey, that may not be there for very much longer. That’s the paradigm shift," says Bossert. "That’s what changes their attitudes and their options and their willingness to sit down and talk."
Bossert convened a group of farmers who came up with their own conservation plan. They’ve agreed to cut usage by 20 percent during the next five years, or face stiff sanctions. He expects to see the same approach elsewhere in western Kansas. Because lots of farmers here want to give one more generation a shot at the good life they’ve had, irrigating with water drawn from the High Plains Aquifer.
This is the first part of a two-part series on the High Plains Aquifer from Harvest Public Media, you can read the second part here. |
ATHENS — It always could change, but as they enter the last third of the regular season, the Georgia Bulldogs are as healthy as they’ve been all season.
DaQuan Hawkins-Muckle returned to practice on Monday, and while the junior defensive lineman may not be a significant contributor, he signifies the state of the team.
Every projected starter is healthy, with only one set to miss the game at South Carolina this Saturday: inside linebacker Natrez Patrick, who will serve the final game of a four-game suspension for a marijuana arrest.
Rashad Roundtree, a reserve defensive back, remains out with undisclosed injuries, and he doesn’t look to be back anytime soon. He entered the season competing for a second-team spot.
Georgia has had key players miss time this year, including Jacob Eason, the starting quarterback to start the season. But even after Eason returned, he took a back seat to freshman Jake Fromm, who has run away with the job. Defensive lineman Trenton Thompson missed two games with a knee injury but returned for the win against Florida. Hawkins-Muckle has 2 tackles in three games this year, and missed the past two with an undisclosed injury.
Even the tailbacks have escaped the injury bug so far this season, despite playing a position that has been prone to injury in recent years at Georgia. That, and the depth at tailback, allows the team to use fresh legs well into games as opposing defenses are wearing out.
“I think you could ask a lot of teams in the country that question and they’d say it’s a huge benefit,” coach Kirby Smart said on Monday. “We probably take it for granted to be honest with you. We don’t look at it that way until we look at someone we’re playing and they say, ‘Well, we’ve got this back out, this back bruised up, this guy beat up’, and it’s not because of the last game, it’s because of the cumulative effect of the 15-20 carries. We’ve certainly had a lot of carries, just spread amongst a good group of backs.” |
extern crate es_core;
extern crate es_data;
use self::es_core::optimizer::ScoreLogger;
use self::es_data::dataset::types::{MetaType, Metadata};
use crate::l2r::scorer::parameters::*;
use crate::l2r::scorer::utils::*;
/// Scorer for basic binary indicator
/// Can be used to bury or boost based on a field
#[derive(Debug, Clone)]
pub struct BasicBinaryScorer {
/// Field name containing the value to compare with
pub field_name: String,
/// Value to compare with
pub field_value: MetaType,
/// Only matters for numerical MetaTypes. Defaults to val > threshold
pub flip_comparator: bool,
/// 0-indexed goal to optimize for
pub goal_index: f32,
}
impl BasicBinaryScorer {
/// Returns new BasicBinaryScorer
pub fn new(parameters: &BinaryScoringParameters) -> BasicBinaryScorer {
BasicBinaryScorer {
field_name: parameters.field_name.clone(),
field_value: parameters.field_value.clone(),
flip_comparator: parameters.flip_comparator,
goal_index: parameters.goal_index,
}
}
/// Gets the best average we could have with an optimal order
fn get_optimal_average(&self, num_valid: usize, num_docs: usize, goal_index: f32) -> f32 {
// if goal_index is beyond the number of docs for this query, it is never achievable.
// The goal is beyond the number of docs. Closest we can do is moving everything to the bottom
if goal_index >= (num_docs as f32) {
let mut total = 0.;
for index in (num_docs - num_valid)..num_docs {
total += index as f32;
}
return total / (num_valid as f32);
}
// (num_docs_above + num_docs_below) >= num_valid since we grabbed the valid docs from that list
let num_docs_above = goal_index;
let num_docs_below = (num_docs as f32) - goal_index - 1.;
// if one doc is at the goal, we can focus on the remaining docs.
let num_docs_remaining = num_valid - 1;
let half_num_remaining = (num_docs_remaining / 2) as f32;
// Can perfectly center around the goal index
if half_num_remaining <= num_docs_above && half_num_remaining <= num_docs_below {
let average = if num_docs_remaining % 2 == 0 {
goal_index
} else {
// have an extra doc. can be added above or below.
// Place below for simplicity. This will not affect the optimal distance, but allows us to avoid negatives.
let total = (num_docs_remaining as f32) * goal_index
+ (goal_index + half_num_remaining + 1.);
total / (num_valid as f32)
};
return average;
}
// Either above or below doesn't have enough room for half the docs.
if half_num_remaining >= num_docs_above {
// There aren't enough slots above, so include all.
let mut total = 0.;
for index in 0..num_valid {
total += index as f32;
}
total / (num_valid as f32)
} else {
// There aren't enough slots below, so include all.
let mut total = 0.;
for index in (num_docs - num_valid)..num_docs {
total += index as f32;
}
total / (num_valid as f32)
}
}
}
impl Scorer for BasicBinaryScorer {
/// Computes the distance from the goal
fn score(&self, scores: &[(ScoredInstance, &Metadata)]) -> (f32, Option<ScoreLogger>) {
// computes the average index of the feature in the Metadata
let valid_indices = scores
.iter()
.enumerate()
.map(|(idx, score)| {
if !score.1.contains_key(&self.field_name) {
None
} else if validate_metatype(
&score.1[&self.field_name],
&self.field_value,
self.flip_comparator,
) {
Some(idx as f32)
} else {
None
}
})
.filter(|idx| idx.is_some())
.map(|idx| idx.expect("index should not be None"))
.collect::<Vec<f32>>();
// This candidate set does not contain documents that match the expected topic.
// TODO: should we penalize or ignore these cases?
if valid_indices.is_empty() {
return (1., None);
}
let index_sum: f32 = valid_indices.iter().map(|x| x).sum();
let average = index_sum / (valid_indices.len() as f32);
// computes the distance between that average and the provided index
let dist = (self.goal_index - average).powf(2.);
let optimal_average =
self.get_optimal_average(valid_indices.len(), scores.len(), self.goal_index);
let optimal_dist = (self.goal_index - optimal_average).powf(2.);
// get farthest position
let farthest_position = if ((scores.len() as f32) - self.goal_index - 1.) > self.goal_index
{
(scores.len() as f32) - self.goal_index - 1.
} else {
0.
};
// get the optimal average for the farthest position
let worst_case_average =
self.get_optimal_average(valid_indices.len(), scores.len(), farthest_position);
// Can't improve
if (worst_case_average - optimal_average).abs() <= 1e-6 {
return (1., None);
}
let worst_case_dist = (self.goal_index - worst_case_average).powf(2.);
// Scale dist and worst_case_dist by optimal_dist as we may never be able to reach the goal
// This allows the score to be 1 if we reach the optimal arrangement.
let dist_scaled = dist - optimal_dist;
let worst_case_dist_scaled = worst_case_dist - optimal_dist;
// Normalize by maximum distance from the goal possible
// if avg = goal_index, score = 1. if avg ~ worst_case_dist, score ~ 0
(
(worst_case_dist_scaled - dist_scaled) / worst_case_dist_scaled,
None,
)
}
}
#[cfg(test)]
mod scorer_tests {
use super::*;
#[test]
fn test_binary_optimal_average() {
let param = BinaryScoringParameters {
field_name: "unused".to_string(),
field_value: MetaType::Str("unused".to_string()),
flip_comparator: false,
goal_index: 0.,
};
let scorer = BasicBinaryScorer::new(¶m);
assert_eq!(scorer.get_optimal_average(1, 10, 0.), 0.);
assert_eq!(scorer.get_optimal_average(1, 10, 9.), 9.);
assert_eq!(scorer.get_optimal_average(1, 10, 15.), 9.);
assert_eq!(scorer.get_optimal_average(2, 13, 0.), 0.5);
assert_eq!(scorer.get_optimal_average(4, 13, 1.), 1.5);
assert_eq!(scorer.get_optimal_average(5, 13, 1.), 2.);
assert_eq!(scorer.get_optimal_average(5, 13, 11.), 10.);
assert_eq!(scorer.get_optimal_average(2, 13, 1.), 1.5);
assert_eq!(scorer.get_optimal_average(4, 13, 11.), 11.5);
}
}
|
// popNonce returns a nonce value previously stored with c.addNonce
// or fetches a fresh one from c.dir.NonceURL.
// If NonceURL is empty, it first tries c.directoryURL() and, failing that,
// the provided url.
func (c *Client) popNonce(ctx context.Context, url string) (string, error) {
c.noncesMu.Lock()
defer c.noncesMu.Unlock()
if len(c.nonces) == 0 {
if c.dir != nil && c.dir.NonceURL != "" {
return c.fetchNonce(ctx, c.dir.NonceURL)
}
dirURL := c.directoryURL()
v, err := c.fetchNonce(ctx, dirURL)
if err != nil && url != dirURL {
v, err = c.fetchNonce(ctx, url)
}
return v, err
}
var nonce string
for nonce = range c.nonces {
delete(c.nonces, nonce)
break
}
return nonce, nil
} |
/** SecurityConfig captures the security related configuration for FeastClient */
@AutoValue
public abstract class SecurityConfig {
/**
* Enables authentication If specified, the call credentials used to provide credentials to
* authenticate with Feast.
*/
public abstract Optional<CallCredentials> getCredentials();
/** Whether to use TLS transport security is use when connecting to Feast. */
public abstract boolean isTLSEnabled();
/**
* If specified and TLS is enabled, provides path to TLS certificate use the verify Service
* identity.
*/
public abstract Optional<String> getCertificatePath();
@AutoValue.Builder
public abstract static class Builder {
public abstract Builder setCredentials(Optional<CallCredentials> credentials);
public abstract Builder setTLSEnabled(boolean isTLSEnabled);
public abstract Builder setCertificatePath(Optional<String> certificatePath);
public abstract SecurityConfig build();
}
public static SecurityConfig.Builder newBuilder() {
return new AutoValue_SecurityConfig.Builder()
.setCredentials(Optional.empty())
.setTLSEnabled(false)
.setCertificatePath(Optional.empty());
}
} |
def draw_fpentamino(on_grid:np.ndarray,at_offset:int) -> np.ndarray:
on_grid[4+at_offset,4+at_offset] = 1
on_grid[4+at_offset,5+at_offset] = 1
on_grid[5+at_offset,3+at_offset] = 1
on_grid[5+at_offset,4+at_offset] = 1
on_grid[6+at_offset,4+at_offset] = 1 |
<filename>quic/typings/data/quic.schema.d.ts
declare namespace Quic {
namespace Data {
interface ISchema {
props: {
[name: string]: ISchema;
};
indexs: {
[name: number]: ISchema;
};
prop(name: string): ISchema;
index(nameOrIndex: string | number): ISchema;
subscribe(listener: ISchemaValuechangeListener): ISchema;
unsubscibe(listener: ISchemaValuechangeListener): ISchema;
get_value(data: any, fillDefault?: boolean): any;
set_value(data: any, value: any, evtArgs?: any): ISchema;
notify(value: any, data: any, trigger: ISchema, evtArgs?: any): any;
define(expr: string): ISchema;
name: string | number;
composite: ISchema;
isArray: boolean;
isObject: boolean;
}
interface ISchemaValuechangeListener {
(value: any, data: any, trigger: ISchema, evtArgs: any): any;
}
class Schema implements ISchema {
props: {
[name: string]: ISchema;
};
indexs: {
[name: number]: ISchema;
};
__listeners: Array<ISchemaValuechangeListener>;
__stack: Array<ISchema>;
name: string | number;
composite: ISchema;
isArray: boolean;
isObject: boolean;
constructor(name?: string, composite?: ISchema);
prop(name: string): ISchema;
index(name: string): ISchema;
get_value(data: any, fillDefault?: boolean): any;
set_value(data: any, value: any, evtArgs?: any): ISchema;
notify(value: any, data: any, trigger: ISchema, evtArgs: any): this;
subscribe(listener: ISchemaValuechangeListener): ISchema;
unsubscibe(listener: ISchemaValuechangeListener): ISchema;
define(expr: string): ISchema;
}
}
}
|
/**
* Test of setSilentUpdate method, of class org.netbeans.modules.tasklist.core.Task.
*/
public void testSetSilentUpdate() {
System.out.println("testSetSilentUpdate");
} |
Book Review: Gill Plain and Susan Sellers, eds, A History of Feminist Literary Criticism. Cambridge: Cambridge University Press, 2007. 352 pp. (incl. index). ISBN 9780521852555, £70.00 (hbk)
Woolf lived. Through the focus on Virginia Woolf and those ‘servants’ we see how aggressive and embattled everyday class relations can be; the domestic space of the kitchen is, as all feminists know, a gendered battleground, but here we find it as a place in which women fought each other for their identity. Virginia Woolf, arguably, had to fight to maintain her sense of not being the female ‘angel in the house’ whilst Nellie clearly resented Virginia’s refusal to acknowledge her need for her. Both women wanted separation, both wanted individual recognition, but class relations could not allow that mutual interdependence to be given a public voice. But even if it had, we have to acknowledge that the very modernity for which Virginia Woolf stood included an understanding of the inequalities of class. Within that understanding it remains impossible to write of the individual human condition without an account of class. Thus whilst Virginia Woolf enormously enlarged our understanding of the subjectivity of modernity we are left, after Light’s groundbreaking book, with the question of how that subjectivity – with all its resistance to patriarchy and authoritarianism – remains deeply rooted in an aesthetic of bourgeois order. |
Have you ever imagined what it is like to be a gamer girl? This article takes the perspective from a female gamer herself. Let’s begin.
Lightning has her problems, but thanks to Square Enix, sometimes genders are not discriminated.
Remember when it was just you and your N64 (or whatever console that launched your fantastical trek into gaming) and then the world got involved, bringing much conflict? Let’s go back to the peace and contentment you felt way back then.
For the Love of Video Games
This beautiful gem is available on etsy. Congrats…you have achieved victory over the dungeon’s boss.
Okay, so being a gamer should have nothing to do with gender, age, race, or anything other than our mutual love for the game we are playing. This said, even well into the 21st century we are still dealing with stereotypes, prejudice, and chauvinism. Although all of these are a problem, the only one that seems to be acceptable by other gamers, and even an occasional admin, is the judgment and harassment of women. Use racial slurs or call someone gay in a demeaning way and it’s an instant ban, be an aggressive and offensive jerk and you might get a slight warning. The hold that this discrimination has on us will be our topic for today.
Game Developers Started It
Shout out to the beloved Samus Aran for keeping it real
Did you know that only about 20% of all main characters in video games are female? It’s true. In real life, of course, the population is about 50% women. Plus, when women are in video games, their main and often sole purpose is to be an attraction for the men. Women are usually an object for uncalled for attention, whether it is to be a romantic interest for the main protagonist or a scantily clad woman to boost sales. This automatically puts women lower on the totem pole.
But in actuality about 50% of gamers, give or take, are women. Thus, gamer girls aren’t actually a minority. So, how is it possible that one could name every female in professional eSports? Because the number is astoundingly low! Why? Because being in eSports as a girl instantly makes you a subject. You are known as one of the ‘female gamers’ in the profession. Not just a pro-player. Can you imagine the difference the world’s view would be of Faker (the best LoL player in the world) if he was a girl? His fame would be for different reasons. The focus becomes less on skill, and more on gender.
For instance, let’s say a new player is added to one of the most popular team’s roster. Happens, yeah. Well, this time, instead of another average twenty year old boy with mad skillz, we have a beautiful, or even average twenty year old woman. When a new player is added, there is always a little talk, but it focuses solely on gameplay for unknown faces. But of it were a woman, we all know the talk that would spread. Women would be saying ‘good for her’ or ‘whatever, she isn’t all that’ if she happened to be uncannily pretty. Men would not be so mild. There would be much dissing and admiring for all the wrong reasons.
You may say, ‘Go for it, who cares what people do and think and say?’ But, what about men in ‘female’ professions? Less than 10% of men are nurses. This is why men don’t like to pursue careers as nurses. They are treated differently (and often mistreated) by doctors, patients, and especially other nurses. In general, minorities are either looked down upon or put on a pedestal in fear of offending someone. Sad, but true. So…
What’s the difference?
One of the first female gaming icons, Ms. Pacman.
Why do so many people believe that girls cannot be skilled at video games? Why are you called out simply for being female? Why do guys pay special attention to anyone that could possibly be a woman? Why are we so often treated like aliens, instead of friends…or enemies?
We are all gamers; we play because we like to play. We are fans, otaku, nerds, geeks, or just enjoy a round or two…or fifty of competitive gaming. Sure, a girl can play Wii, or even Pokémon. But when she shows up in a ranked /competitive game, things are bound to go down eventually. Men generally see women differently…
She Must Be a Princess
Thanks Peach for needing to be saved…by a plumber.
The majority of male gamers will treat female gamers differently than their male friends. I have to say that many men will treat women with more respect than they will their guy friends, but this often leads to evasion and alienation of the women in the gaming world. Although their intentions are not bad, the outcome leaves girls feeling different and segregated.
Another type of guy is the one that pays special attention to girls in a way that seems faultless. They will gift you in-game, make it a point to clean up their talk, and praise you no matter how awful you happen to do. This feels awkward, patronizing, and again separates us. Especially when you know how they would treat your male friends in the same position, and this disrespect is cause for anger. Every friend request we have to question whether they add us because they actually thought we were skilled or even friendly, or simply because we are female.
Get Back in the Kitchen, Woman
Cooking Mama…yay…
Unfortunately the most common encounter that disrespects women is the openly offensive guy. I’m sure any of you who have spent long in the gaming world have seen someone who has made comments such as “You’re a girl, support me.” Or “Go make a sandwich.” These are the “nice” comments. I won’t discuss the severely inappropriate messages or ban-worthy conversations they try to have. Sure, it seems harmless and all in good fun, but it really does nothing but continue to broaden the gap between us.
Honorable…or Female?
Honor awards for League of Legends
So, I am very into League of Legends, and there is an honor system on there where after a game, you can honor your teammates or opponents for friendliness, helpfulness, or teamwork. Well, I got a badge for consistently getting enough honors. On more than one occasion I have had people tell me that the only reason I got this badge was because I was a girl. You see, I don’t know if it bothers me more the judgement from those haters, or the fact that they could possibly be right. I could be a complete flamer and rager, and I would still have a better chance at getting honor than my teammates. But on the flipside, I could have a wonderful game, and be reported for something irrelevant simply because someone doesn’t want me in the game.
Disclaimer: Men are Not All the Same
Shh…you can’t fault the master.
Don’t get me wrong, I know that male chauvinism is not the only judgment out there, but that is my focus for today, and honestly most games on League my gender is ignored, and everyone treats me the same as our other teammates and opponents. But some are different. MMOs for example are ridiculous. It is hard to go a day without someone asking me if I am a girl, as if it is relevant. Anyway, I could be a 50 year old man posing as a 20 year old girl for all they know. My word means nothing. Who knows…maybe I am a 50 year old man…you still don’t know…
For Those Needing an Excuse: Shame on Girls
Who is that? I didn’t recognize her almost properly dressed.
Don’t worry; I’m not letting the girls off the hook either. I mean, I know girls are partially responsible for this stereotype and annoyance drawn from guys. Many girls do use their gender to get what they want, which is usually attention. But, I do understand why many of you judge us. Girls very often use words like nerd or gamer girl to attract attention from males. They will post risqué pictures online with controllers or memorabilia and try to join conversations about gaming. There is a reason they feel they need this, and I am not here to judge, just to say this is not your standard girl you meet in a game. Those girls often, though not always, keep in offline, or on social media.
You may think that the point in this post is to try to get attention, that I am one of those “gamer girls.” But that is the exact opposite of my intention. As hard as it is to believe, I am trying to draw the attention away from the gender differences in gaming. I learned long ago, that is better if I rarely bring it to attention and ignore the comments when I can. Simply focus on the game, teamwork, and winning. The others will usually follow, even the most offensive ones. After all, it is insane to repeat the same process again and again expecting different results (thank you Mr. Einstein). The majority of us really do just want to play. If men only knew the way to girl who plays video games, not a “gamer girl’s”, heart is to just play like she is a teammate, a fellow gamer, not something special.
Perhaps you are considering the idea to live stream? How to live stream video games is a great read for those thinking about jumping into the online world of video game live streaming.
An End Note:
So please, if you do nothing else today…just play. Be yourself, and stop acting like a noob. Doesn’t matter if you are a girl, a guy, or prefer not to say. Just get out there and enjoy the game. That is the intent of the creation. |
<filename>impl/cons/console.go<gh_stars>0
package cons
import (
"github.com/chzyer/readline"
"github.com/golangmc/minecraft-server/apis/uuid"
"github.com/golangmc/minecraft-server/lib"
"io"
"os"
"github.com/golangmc/minecraft-server/apis/base"
"github.com/golangmc/minecraft-server/apis/logs"
"github.com/golangmc/minecraft-server/impl/data/system"
)
type Console struct {
o io.Writer
rl *readline.Instance
logger *logs.Logging
IChannel chan string
OChannel chan string
report chan system.Message
}
func NewConsole(report chan system.Message) *Console {
console := &Console{
IChannel: make(chan string),
OChannel: make(chan string),
report: report,
}
console.rl = lib.ReadLine()
console.o = io.MultiWriter(console.rl.Stdout(), console.newLogFile("latest.log"))
console.logger = logs.NewLoggingWith("console", console.o, logs.EveryLevel...)
return console
}
func (c *Console) Load() {
// handle i channel
go func() {
for {
line, err := c.rl.Readline()
if err == readline.ErrInterrupt && len(line) != 0 {
continue
}
if err == readline.ErrInterrupt || err == io.EOF {
c.report <- system.Make(system.STOP, "normal stop")
return
}
err = base.Attempt(func() {
c.IChannel <- line
})
if err != nil {
c.report <- system.Make(system.FAIL, err)
}
}
}()
// handle o channel
go func() {
for line := range c.OChannel {
c.logger.Info(line)
}
}()
go func() {
select {
case command := <-c.report:
switch command.Command {
// stop selecting when stop is received
case system.STOP:
return
case system.FAIL:
return
}
}
}()
}
func (c *Console) Kill() {
defer func() {
_ = recover() // ignore panic with closing closed channel
}()
// save the log file as YYYY-MM-DD-{index}.log{.gz optionally compressed}
close(c.OChannel)
}
func (c *Console) Name() string {
return "ConsoleSender"
}
func (c *Console) SendMessage(message ...interface{}) {
defer func() {
if err := recover(); err != nil {
c.report <- system.Make(system.FAIL, err)
}
}()
c.OChannel <- base.ConvertToString(message...)
}
type logFileWriter struct {
file *os.File
}
func (c *Console) newLogFile(name string) io.Writer {
file, err := os.Create(name)
if err != nil {
c.report <- system.Make(system.FAIL, err)
return nil
}
return &logFileWriter{file: file}
}
func (l *logFileWriter) Write(p []byte) (n int, err error) {
// this is going to be messy, but this should convert to string, strip colors, and then write to file. Don't @ me.
return l.file.Write(p)
}
func (c *Console) UUID() uuid.UUID {
return uuid.TextToUUID(c.Name())
}
|
/**
* Updates the camera based on which C buttons are pressed this frame
*/
void handle_c_button_movement(struct Camera *c) {
s16 cSideYaw;
if (gPlayer1Controller->buttonPressed & U_CBUTTONS) {
if (c->mode != CAMERA_MODE_FIXED && (gCameraMovementFlags & CAM_MOVE_ZOOMED_OUT)) {
gCameraMovementFlags &= ~CAM_MOVE_ZOOMED_OUT;
play_sound_cbutton_up();
} else {
set_mode_c_up(c);
if (sZeroZoomDist > gCameraZoomDist) {
sZoomAmount = -gCameraZoomDist;
} else {
sZoomAmount = gCameraZoomDist;
}
}
}
if (c->mode != CAMERA_MODE_FIXED) {
if (gPlayer1Controller->buttonPressed & D_CBUTTONS) {
if (gCameraMovementFlags & CAM_MOVE_ZOOMED_OUT) {
gCameraMovementFlags |= CAM_MOVE_ALREADY_ZOOMED_OUT;
sZoomAmount = gCameraZoomDist + 400.f;
#ifndef VERSION_JP
play_camera_buzz_if_cdown();
#endif
} else {
gCameraMovementFlags |= CAM_MOVE_ZOOMED_OUT;
sZoomAmount = gCameraZoomDist + 400.f;
play_sound_cbutton_down();
}
}
cSideYaw = 0x1000;
if (gPlayer1Controller->buttonPressed & R_CBUTTONS) {
if (gCameraMovementFlags & CAM_MOVE_ROTATE_LEFT) {
gCameraMovementFlags &= ~CAM_MOVE_ROTATE_LEFT;
} else {
gCameraMovementFlags |= CAM_MOVE_ROTATE_RIGHT;
if (sCSideButtonYaw == 0) {
play_sound_cbutton_side();
}
sCSideButtonYaw = -cSideYaw;
}
}
if (gPlayer1Controller->buttonPressed & L_CBUTTONS) {
if (gCameraMovementFlags & CAM_MOVE_ROTATE_RIGHT) {
gCameraMovementFlags &= ~CAM_MOVE_ROTATE_RIGHT;
} else {
gCameraMovementFlags |= CAM_MOVE_ROTATE_LEFT;
if (sCSideButtonYaw == 0) {
play_sound_cbutton_side();
}
sCSideButtonYaw = cSideYaw;
}
}
}
} |
#ifndef STPrimaryVertexEVENTS_HH
#define STPrimaryVertexEVENTS_HH
#include "StRareEventCut.h"
#include <Stiostream.h>
class StEvent;
class StPrimaryVertexEvents : public StRareEventCut {
public:
StPrimaryVertexEvents(){};
~StPrimaryVertexEvents(){};
int Accept(StEvent* event);
void Report();
ClassDef(StPrimaryVertexEvents,1)
};
#endif
|
.
The restructuring of the State Sanitary Inspectorate was determined by the need to adjust the existing system to the new structure- and task-oriented standards introduced by the administrative reforms at the provincial level. This brought about a lot of changes in the way State County Sanitary Inspectors operate. For first few months after the enforcement of the Act of January 23.2009 amending certain acts regarding reorganization and redistribution of competences of the public administration at the provincial level (Journal of Laws, No. 92, item. 753, 2009) brought about a lot of ambiguities and expectations. Such unresolved issues are typical for the initial or transition period after implementation of a new law. The approach to the operation of the State Sanitary Inspectorate at the county level depicted in the article lays the emphasis on the challenges in the legal and task-oriented domain that arose after coming into force of the aforementioned amendment. By way of conclusion, the authors of the article briefly summaries the practical outcomes of the transformations undergone by the State Sanitary Inspectorate at the county level. |
//! Python extension, exported functions and type conversions.
pub mod conversions;
pub mod utils;
use crate::blending::params::{BlendAlgorithmParams, Options};
use crate::blending::{
blend_images, demultiply_image, get_blending_algorithm, is_algorithm_multiplied, BlendAlgorithm,
};
use crate::constants;
use crate::errors::PConvertError;
use crate::parallelism::{ResultMessage, ThreadPool};
use crate::utils::{read_png_from_file, write_png_parallel, write_png_to_file};
use pyo3::exceptions::PyException;
use pyo3::prelude::*;
use pyo3::types::{IntoPyDict, PyDict, PySequence};
use std::sync::mpsc;
use utils::{
build_algorithm, build_params, get_compression_type, get_filter_type, get_num_threads,
};
static mut THREAD_POOL: Option<ThreadPool> = None;
#[pymodule]
fn pconvert_rust(_py: Python, module: &PyModule) -> PyResult<()> {
unsafe {
let mut thread_pool = ThreadPool::new(constants::DEFAULT_THREAD_POOL_SIZE).unwrap();
thread_pool.start();
THREAD_POOL = Some(thread_pool);
}
module.add("COMPILATION_DATE", constants::COMPILATION_DATE)?;
module.add("COMPILATION_TIME", constants::COMPILATION_TIME)?;
module.add("VERSION", constants::VERSION)?;
module.add("ALGORITHMS", constants::ALGORITHMS.to_vec())?;
module.add("COMPILER", constants::COMPILER)?;
module.add("COMPILER_VERSION", constants::COMPILER_VERSION)?;
module.add("LIBPNG_VERSION", constants::LIBPNG_VERSION)?;
module.add("FEATURES", constants::FEATURES.to_vec())?;
module.add("PLATFORM_CPU_BITS", constants::PLATFORM_CPU_BITS)?;
let filters: Vec<String> = constants::FILTER_TYPES
.to_vec()
.iter()
.map(|x| format!("{:?}", x))
.collect();
module.add("FILTER_TYPES", filters)?;
let compressions: Vec<String> = constants::COMPRESSION_TYPES
.to_vec()
.iter()
.map(|x| format!("{:?}", x))
.collect();
module.add("COMPRESSION_TYPES", compressions)?;
#[pyfn(module, "blend_images")]
fn blend_images_py(
py: Python,
bot_path: String,
top_path: String,
target_path: String,
algorithm: Option<String>,
is_inline: Option<bool>,
options: Option<Options>,
) -> PyResult<()> {
// blends two images using either the single-threaded or the multiple-threaded version
// taking into consideration the requested number of thread in options
py.allow_threads(|| -> PyResult<()> {
let num_threads = get_num_threads(&options);
if num_threads <= 0 {
blend_images_single_thread(
bot_path,
top_path,
target_path,
algorithm,
is_inline,
options,
)
} else {
unsafe {
blend_images_multi_thread(
bot_path,
top_path,
target_path,
algorithm,
is_inline,
options,
num_threads,
)
}
}
})
}
#[pyfn(module, "blend_multiple")]
fn blend_multiple_py(
py: Python,
img_paths: &PySequence,
out_path: String,
algorithm: Option<String>,
algorithms: Option<&PySequence>,
is_inline: Option<bool>,
options: Option<Options>,
) -> PyResult<()> {
// parses python types to rust owned values so that they are safely shared between threads
let img_paths: Vec<String> = img_paths.extract()?;
let num_images = img_paths.len();
let algorithms_to_apply: Vec<(BlendAlgorithm, Option<BlendAlgorithmParams>)> =
if let Some(algorithms) = algorithms {
build_params(algorithms)?
} else if let Some(algorithm) = algorithm {
let algorithm = build_algorithm(&algorithm)?;
vec![(algorithm, None); num_images - 1]
} else {
vec![(BlendAlgorithm::Multiplicative, None); num_images - 1]
};
// blends multiple images using either the single-threaded or the multiple-threaded version
// taking into consideration the requested number of thread in options
py.allow_threads(|| -> PyResult<()> {
let num_threads = get_num_threads(&options);
if num_threads <= 0 {
blend_multiple_single_thread(
img_paths,
out_path,
algorithms_to_apply,
is_inline,
options,
)
} else {
unsafe {
blend_multiple_multi_thread(
img_paths,
out_path,
algorithms_to_apply,
is_inline,
options,
num_threads,
)
}
}
})
}
#[pyfn(module, "get_thread_pool_status")]
fn get_thread_pool_status(py: Python) -> PyResult<&PyDict> {
unsafe {
match &mut THREAD_POOL {
Some(thread_pool) => {
let status_dict = thread_pool.get_status().into_py_dict(py);
Ok(status_dict)
}
None => Err(PyException::new_err(
"Acessing global thread pool".to_string(),
)),
}
}
}
Ok(())
}
fn blend_images_single_thread(
bot_path: String,
top_path: String,
target_path: String,
algorithm: Option<String>,
is_inline: Option<bool>,
options: Option<Options>,
) -> PyResult<()> {
let algorithm = algorithm.unwrap_or_else(|| String::from("multiplicative"));
let algorithm = build_algorithm(&algorithm)?;
let _is_inline = is_inline.unwrap_or(false);
let demultiply = is_algorithm_multiplied(&algorithm);
let algorithm_fn = get_blending_algorithm(&algorithm);
let mut bot = read_png_from_file(bot_path, demultiply)?;
let top = read_png_from_file(top_path, demultiply)?;
blend_images(&mut bot, &top, &algorithm_fn, &None);
let compression_type = get_compression_type(&options);
let filter_type = get_filter_type(&options);
write_png_to_file(target_path, &bot, compression_type, filter_type)?;
Ok(())
}
unsafe fn blend_images_multi_thread(
bot_path: String,
top_path: String,
target_path: String,
algorithm: Option<String>,
is_inline: Option<bool>,
options: Option<Options>,
num_threads: i32,
) -> PyResult<()> {
let algorithm = algorithm.unwrap_or_else(|| String::from("multiplicative"));
let algorithm = build_algorithm(&algorithm)?;
let _is_inline = is_inline.unwrap_or(false);
let demultiply = is_algorithm_multiplied(&algorithm);
let algorithm_fn = get_blending_algorithm(&algorithm);
let thread_pool = match &mut THREAD_POOL {
Some(thread_pool) => thread_pool,
None => panic!("Unable to access global pconvert thread pool"),
};
// expands thread pool to the desired number of threads/parallelism (if necessary and possible)
thread_pool.expand_to(num_threads as usize);
let bot_result_channel = thread_pool
.execute(move || ResultMessage::ImageResult(read_png_from_file(bot_path, demultiply)));
let top_result_channel = thread_pool
.execute(move || ResultMessage::ImageResult(read_png_from_file(top_path, demultiply)));
let mut bot = match bot_result_channel.recv().unwrap() {
ResultMessage::ImageResult(result) => result,
}?;
let top = match top_result_channel.recv().unwrap() {
ResultMessage::ImageResult(result) => result,
}?;
blend_images(&mut bot, &top, &algorithm_fn, &None);
let compression_type = get_compression_type(&options);
let filter_type = get_filter_type(&options);
write_png_parallel(target_path, &bot, compression_type, filter_type)?;
Ok(())
}
fn blend_multiple_single_thread(
img_paths: Vec<String>,
out_path: String,
algorithms: Vec<(BlendAlgorithm, Option<BlendAlgorithmParams>)>,
is_inline: Option<bool>,
options: Option<Options>,
) -> PyResult<()> {
let num_images = img_paths.len();
if num_images < 1 {
return Err(PyErr::from(PConvertError::ArgumentError(
"ArgumentError: 'img_paths' must contain at least one path".to_string(),
)));
}
if algorithms.len() != num_images - 1 {
return Err(PyErr::from(PConvertError::ArgumentError(format!(
"ArgumentError: 'algorithms' must be of size {} (one per blending operation)",
num_images - 1
))));
};
let _is_inline = is_inline.unwrap_or(false);
// loops through the algorithms to apply and blends the
// current composition with the next layer
let mut img_paths_iter = img_paths.iter();
let first_path = img_paths_iter.next().unwrap().to_string();
let first_demultiply = is_algorithm_multiplied(&algorithms[0].0);
let mut composition = read_png_from_file(first_path, first_demultiply)?;
let zip_iter = img_paths_iter.zip(algorithms.iter());
for pair in zip_iter {
let path = pair.0.to_string();
let (algorithm, algorithm_params) = pair.1;
let demultiply = is_algorithm_multiplied(&algorithm);
let algorithm_fn = get_blending_algorithm(&algorithm);
let current_layer = read_png_from_file(path, demultiply)?;
blend_images(
&mut composition,
¤t_layer,
&algorithm_fn,
algorithm_params,
);
}
let compression_type = get_compression_type(&options);
let filter_type = get_filter_type(&options);
write_png_to_file(out_path, &composition, compression_type, filter_type)?;
Ok(())
}
unsafe fn blend_multiple_multi_thread(
img_paths: Vec<String>,
out_path: String,
algorithms: Vec<(BlendAlgorithm, Option<BlendAlgorithmParams>)>,
is_inline: Option<bool>,
options: Option<Options>,
num_threads: i32,
) -> PyResult<()> {
let num_images = img_paths.len();
if num_images < 1 {
return Err(PyErr::from(PConvertError::ArgumentError(
"ArgumentError: 'img_paths' must contain at least one path".to_string(),
)));
}
if algorithms.len() != num_images - 1 {
return Err(PyErr::from(PConvertError::ArgumentError(format!(
"ArgumentError: 'algorithms' must be of size {} (one per blending operation)",
num_images - 1
))));
};
let _is_inline = is_inline.unwrap_or(false);
let thread_pool = match &mut THREAD_POOL {
Some(thread_pool) => thread_pool,
None => panic!("Unable to access global pconvert thread pool"),
};
// expands thread pool to the desired number of threads/parallelism (if necessary and possible)
thread_pool.expand_to(num_threads as usize);
let mut png_channels: Vec<mpsc::Receiver<ResultMessage>> = Vec::with_capacity(num_images);
for path in img_paths.into_iter() {
let result_channel = thread_pool.execute(move || -> ResultMessage {
ResultMessage::ImageResult(read_png_from_file(path, false))
});
png_channels.push(result_channel);
}
let first_demultiply = is_algorithm_multiplied(&algorithms[0].0);
let mut composition = match png_channels[0].recv().unwrap() {
ResultMessage::ImageResult(result) => result,
}?;
if first_demultiply {
demultiply_image(&mut composition)
}
// loops through the algorithms to apply and blends the
// current composition with the next layer
// retrieves the images from the result channels
for i in 1..png_channels.len() {
let (algorithm, algorithm_params) = &algorithms[i - 1];
let demultiply = is_algorithm_multiplied(&algorithm);
let algorithm_fn = get_blending_algorithm(&algorithm);
let mut current_layer = match png_channels[i].recv().unwrap() {
ResultMessage::ImageResult(result) => result,
}?;
if demultiply {
demultiply_image(&mut current_layer)
}
blend_images(
&mut composition,
¤t_layer,
&algorithm_fn,
algorithm_params,
);
}
let compression_type = get_compression_type(&options);
let filter_type = get_filter_type(&options);
write_png_parallel(out_path, &composition, compression_type, filter_type)?;
Ok(())
}
|
/**
* Find the filter environments.
*
* @param request the HTTP servlet request.
* @param servletName name of the servlet if any. Can be null.
*
* @return the filter environments.
*/
protected List<DefaultFilterEnvironment> findFilterEnvironments(HttpServletRequest request, String servletName) {
List<DefaultFilterEnvironment> result = null;
String path = request.getServletPath() + (request.getPathInfo() == null ? "" : request.getPathInfo());
Collection<String> filterNames = webApplicationRequestMapper.findFilterMappings(path);
if (servletName != null) {
String servletNamePath = "servlet:// " + servletName;
filterNames.addAll(webApplicationRequestMapper.findFilterMappings(servletNamePath));
}
if (!filterNames.isEmpty()) {
result = new ArrayList<>();
for (String filterName : filterNames) {
if (filters.get(filterName) != null) {
result.add(filters.get(filterName));
}
}
}
return result;
} |
def extract_exif(self):
width, height = self.extract_image_size()
make, model = self.extract_make(), self.extract_model()
orientation = self.extract_orientation()
geo = self.extract_geo()
capture = self.extract_capture_time()
direction = self.extract_direction()
d = {
'width': width,
'height': height,
'orientation': orientation,
'direction': direction,
'make': make,
'model': model,
'capture_time': capture
}
d['gps'] = geo
return d |
def predict(self, RDD_X2):
if self._U != None:
RDD_norm = self._scaler.transform(RDD_X2)
U = self._U
RDD=RDD_norm.map(lambda x: x.dot(U.T))
return RDD
else :
print 'You have to fit the model first' |
Feed Additives with the Inclusion of Co and Mn Change Their Bioavailability and Digestibility of Substances in Bull Calves
In accordance with the scheme of the experiment animals in the control group received the basic diet (BD), Experimental Group I–BD + feed additive, replacing 30% of the concentrated part of the ration with the inclusion of salts of sulfates of cobalt and manganese, Group II – BD + feed additive, replacing 30% of the concentrated part of the ration with the inclusion of nano - and microparticles of cobalt and manganese. In situ studies found that the dry matter digestibility of feed additives containing chemically pure metal particles of cobalt and manganese and inactivated feed yeast was 15.4-20.0% (P<0.05) lower than in additives with wheat bran. However, when simulating rennet digestion (in vitro), it was found that the dry matter digestibility of feed additives containing chemically pure metal particles of cobalt and manganese and inactivated feed yeast increased by 1.5-2.2% in relation to additives containing wheat bran. Comparing the availability of pure manganese, we note that in the mixture with feed yeast, it was higher than with bran by more than 2.6 times. Similar values were observed for its inorganic form.
Introduction
The development and implementation of reliable methods for correcting the physiological status of the animal body in order to optimize its productive functioning are important tasks for modern biological science . Recently, various feed additives and biologically active substances have been widely used for breeding young cattle . Feed is the main source of vitamins and minerals for animals. At the same time, the mineral and vitamin composition of each type of feed is subject to significant fluctuations and depends on the type of soil, climatic conditions, plant species, vegetation phase, agrochemical measures carried out by farms, harvesting technology, storage and preparation of prepared feed for feeding, and other factors . In the forage prepared by the farm, there is often a lack of some elements and an excess of others, which leads to diseases, reduced productivity, violations in reproduction, deterioration of the quality of milk and meat received from cows, and low efficiency of feed use.
It is known that minerals play a role in four types of animal body functions: structural, physiological, catalytic, and regulatory ones . Thus, information concerning the requirements for these substances for maintaining the vital activity of the body is important for beef cattle, for example in order to achieve their maximum production potential . Thus, according to the recommendations of the US National Research Council on cattle (NRC) , it is assumed that at least 17 minerals are required for these animals. In modern conditions of animal husbandry, control by specialists over the supply of animals with minerals and vitamins is extremely important, since diseases associated with their insufficiency, imbalance and toxicity are now widespread .
The aim of the study was to study the effect of feed additives with the addition of essential chemical elements on their bioavailability and digestibility of the main nutrients in the diet of bull calves.
Materials and methods
The objects of the study were the ruminal fluid of young cattle (selection was carried out through a chronic fistula of the scar); young bulls of the red steppe breed at the age of 12 months. The research site is the physiological yard of the Pokrovsky Agricultural College, a branch of the Orenburg State Agrarian University.
Animal care and experimental studies were performed in accordance with the instructions and recommendations of Russian Regulations, 1987(Order No. 755 on 12.08.1977 the USSR Ministry of Health) and "The Guide for Care and Use of Laboratory Animals (National Academy Press Washington, D.C. 1996)". When performing research, efforts were made to minimize animal suffering and reduce the number of samples used.
The basic diet (BD) included Sudan grass hay (2nd cutting), alfalfa hay, concentrates. The animals in the control group received BD, Experimental Group I -BD + feed additive, which replaced 30% of the concentrated part of the diet with the inclusion of salts of cobalt and manganese sulfates, Experimental Group II-BD + feed additive, which replaced 30 % of the concentrated part of the diet with the inclusion of nano -and microparticles of cobalt and manganese.
Studies were performed using nylon pouches: in vitro using artificial SCAR KPL 01 -24 and 12 hour exposure; in situchronic scar fistula on young cattle -24 hour exposure.
Mathematical processing of the obtained data was performed by the program "SPSS Statistics Version 20".
Research results and discussion
According to the results of the in situ studies, it was found ( Fig.1) that the dry matter digestibility of feed additives containing chemically pure metal particles of cobalt and manganese and inactivated feed yeast was lower by 15.4-20.0% (P<0.05) than in additives with wheat bran. Figure 1. Dry matter digestibility of feed additives in situ, % (24 hours. incubation in the rumen) ainactivated feed yeast; bwheat bran; Cfeed yeast + bran.
In our opinion, this is primarily due to the fact that monnanooligosaccharides contained in large quantities in the cell walls of feed yeast have a sorption capacity, for example in relation to biologically active substances. Similar results were observed when using metal salts (sulfates).
At the same time, during further studies (imitation of rennet digestion in vitro), it was found (Fig. 2) that the dry matter digestibility of feed additives containing chemically pure metal particles of cobalt and manganese and inactivated feed yeast increased by 1.5-2.2% in relation to additives containing wheat bran. The dry matter digestibility of feed additives containing trace element salts, depending on the origin of the main component, changed as follows: it decreased in the presence of feed yeast with respect to manganese (more than 6%) and slightly increased with respect to cobalt.
After incubation, the samples were analyzed for the composition of 25 trace elements (Table 1). After incubation in the rumen of the mixture of feed yeast (CD) + manganese (CP), the content of the latter was higher than in the mixture of bran (O) + manganese (CP) by 43.9% (P≤0.05), i.e. the availability of this element for the rumen microflora in the first variant was lower. As for cobalt, after incubation in the rumen of the CD + cobalt (CP) mixture, the content of the latter was higher than in the O + cobalt (CP) mixture by 32.5% (P≤0.05).
Comparing the availability of pure manganese, we note that in the mixture with feed yeast, it was higher than with bran by more than 2.6 times. Similar values were observed for its inorganic form. As for cobalt, the availability of a chemically pure substance did not differ depending on the carriers. When incubating feed yeast with its salts, there was an increase in availability for the body by 77% (P≤0.05).
Further studies evaluated the availability of chemical elements in the samples after in vitro incubation (Table 2). After in vitro studies of the mixture of feed yeast and Co, the content of this element was 1.80% higher than in the bran + cobalt mixture, i.e. its availability in the first variant was lower. As for chemically pure manganese, the amount of it in the O+Mn mixture was higher (18.88%, P≤0.05), compared to the FY+Mn mixture. A limited number of similar studies have been found in the available literature, as it was noted that there is a deficiency of cobalt and zinc (4.2% and 9.7%, respectively) in the diets of cattle, which confirms the relevance of our research.
Calves from cows that received a diet with added sulfate sources of Cu, Co, Mn, and Zn (INR) and with organic sources of the same elements (AAC) had higher (<0.01) concentrations of Co in the liver at birth compared to calves from control cows (CON). The concentration of Cu and Zn in the liver at birth was higher (≤ 0.05) in calves from AAC cows compared to analogues from CON cows. In a growing batch of calves from AAC cows, the incidence of bovine respiratory diseases was lower (<0.01) compared to the CON and INR groups. Taken together, these results suggest that feeding the AAC diet to late-aged nursing cows stimulated genetic effects on the growth and health of postnatal offspring compared to the CON diet . In other studies, it was noted that the sources of chemical elements did not affect the productivity of cattle and immune responses during rearing and fattening .
The fact that metal complexes exhibit higher antibacterial and antifungal efficacy in comparison with their corresponding ligands may also affect the digestibility of substances . It was also noted that the consumption of organic trace elements in combination with amino acids during the transition period improved liver function and reduced inflammation and oxidative stress .
Conclusions
Thus, in the course of research, it was proposed to prepare a feed additive for young cattle. It consists in mixing highly dispersed particles of cobalt (up to 150 nm in size) and manganese (up to 300 nm in size) in a dose 10% higher than the norm for young cattle on fattening. It is mixed with inactivated feed yeast, subjected to granulation at a temperature of 60-70° °С and under pressure up to 1.5 bar. It |
/** copy method for event handler plugins (called when SCIP copies plugins) */
static
SCIP_DECL_EVENTCOPY(eventhdlrCopyObj)
{
SCIP_EVENTHDLRDATA* eventhdlrdata;
assert(scip != NULL);
eventhdlrdata = SCIPeventhdlrGetData(eventhdlr);
assert(eventhdlrdata != NULL);
assert(eventhdlrdata->objeventhdlr != NULL);
assert(eventhdlrdata->objeventhdlr->scip_ != scip);
if( eventhdlrdata->objeventhdlr->iscloneable() )
{
scip::ObjEventhdlr* newobjeventhdlr;
newobjeventhdlr = dynamic_cast<scip::ObjEventhdlr*> (eventhdlrdata->objeventhdlr->clone(scip));
SCIP_CALL( SCIPincludeObjEventhdlr(scip, newobjeventhdlr, TRUE) );
}
return SCIP_OKAY;
} |
<gh_stars>10-100
from ray.experimental.workflow.api import step, run, resume
from ray.experimental.workflow.workflow_access import WorkflowExecutionError
__all__ = ("step", "run", "resume", "WorkflowExecutionError")
|
Luna "likes most d*cks." It's no wonder why our first encounter was during the B*kkake Social Club! Many ask me why a woman would want to suck so many c*cks in one sitting (or kneeling). Well, this week's Manwhore Podcast has your answer! After growing up Orthodox Jewish, she is now a professional dominatrix who takes pleasure in whipping her fellow Chosen People!
PLUS: dating while homeless, polyamory, sex work, break-ups, BDSM, happy endings!
Missed my Periscope of New York Body Painting Day? Check out the pictures on Twitter!
Support your favorite whore by donating to The Manwhore Podcast! Make your pledge today by visiting my Patreon page!
This week's episode is sponsored by the letter Q. The queerest letter in the alphabet supports you and your lifestlye. Q: question nothing.
Email your comments, questions, and boobies to [email protected].
www.ManwhorePod.com |
days,sumTime = [int(x) for x in input().split()]
MAxDictionar = []
MInDictionar = []
for i in range(days):
MIN,MAX = [int(x) for x in input().split()]
MAxDictionar.append(MAX)
MInDictionar.append(MIN)
if sum(MAxDictionar) < sumTime or sum(MInDictionar)>sumTime:
print("NO")
exit()
sumTime-=sum(MInDictionar)
for i in range(days):
add = min(sumTime , MAxDictionar[i]-MInDictionar[i])
MInDictionar[i] = str(MInDictionar[i] + add)
sumTime -= add
print("YES")
print(" ".join(MInDictionar)) |
<gh_stars>10-100
package code
import (
"archive/zip"
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"log"
"net/http"
"os"
"os/exec"
"path/filepath"
"strconv"
"strings"
"syscall"
"time"
"github.com/shurcooL/go/vfs/godocfs/vfsutil"
"github.com/shurcooL/home/internal/mod"
"github.com/shurcooL/httperror"
"golang.org/x/mod/modfile"
"golang.org/x/mod/module"
"sourcegraph.com/sourcegraph/go-vcs/vcs"
"sourcegraph.com/sourcegraph/go-vcs/vcs/git"
)
// ModuleHandler is a Go module server that implements the
// module proxy protocol, as specified at
// https://golang.org/cmd/go/#hdr-Module_proxy_protocol.
//
// At this time, it has various restrictions compared to the
// general go mod download functionality that extracts module
// versions from a VCS repository:
//
// • It serves only pseudo-versions derived from commits
// on master branch. No other versions or module queries
// are supported at this time.
//
// • It serves a single module corresponding to the root
// of each repository. Multi-module repositories are not
// supported at this time.
//
// • It serves only the v0 major version. Major versions
// other than v0 are not supported at this time.
//
// This may change over time as my needs evolve.
type ModuleHandler struct {
// Code is the underlying source of Go code.
// Each repository root available in it is served as a Go module.
Code *Service
}
// ServeModule serves a module proxy protocol HTTP request.
//
// The "$GOPROXY/" prefix must be stripped from req.URL.Path, so that
// the given req.URL.Path is like "<module>/@v/<version>.info" (no leading slash).
func (h ModuleHandler) ServeModule(w http.ResponseWriter, req *http.Request) error {
if req.Method != http.MethodGet {
return httperror.Method{Allowed: []string{http.MethodGet}}
}
// Parse the module path, type, and version from the URL.
r, ok := parseModuleProxyRequest(req.URL.Path)
if !ok {
return os.ErrNotExist
}
unesc, ok := r.Unescape() // Unescape module path and version.
if !ok {
return httperror.BadRequest{Err: fmt.Errorf("failed to unescape module path=%q and/or version=%q", r.Module, r.Version)}
}
modulePath, typ, version := unesc.Module, unesc.Type, unesc.Version
// Look up code directory by module path.
d, err := h.Code.GetDirectory(req.Context(), modulePath)
if err != nil || !d.IsRepoRoot() {
return os.ErrNotExist
}
gitDir := filepath.Join(h.Code.reposDir, filepath.FromSlash(d.RepoRoot))
// Handle "/@v/list" request.
if typ == "list" {
return h.serveList(req.Context(), w, gitDir)
}
// Parse the time and revision from the v0.0.0 pseudo-version.
versionTime, versionRevision, err := mod.ParseV000PseudoVersion(version)
if err != nil {
return os.ErrNotExist
}
// Open the git repository and get the commit that corresponds to the pseudo-version.
repo, err := git.Open(gitDir)
if err != nil {
return err
}
defer func() {
if err := repo.Close(); err != nil {
log.Println("ModuleHandler.ServeModule: repo.Close:", err)
}
}()
commitID, err := repo.ResolveRevision(versionRevision)
if err != nil {
return os.ErrNotExist
}
commit, err := repo.GetCommit(commitID)
if err != nil || commit.Committer == nil || !versionTime.Equal(time.Unix(commit.Committer.Date.Seconds, 0).UTC()) {
return os.ErrNotExist
} else if !isCommitOnMaster(req.Context(), gitDir, commit) {
return os.ErrNotExist
}
// Handle one of "/@v/<version>.<ext>" requests.
switch typ {
case "info":
return h.serveInfo(w, version, versionTime)
case "mod":
return h.serveMod(w, modulePath, repo, commitID)
case "zip":
return h.serveZip(w, modulePath, version, repo, commitID)
default:
panic("unreachable")
}
}
func (ModuleHandler) serveList(ctx context.Context, w http.ResponseWriter, gitDir string) error {
revs, err := listMasterCommits(ctx, gitDir)
if err != nil {
return err
}
w.Header().Set("Content-Type", "text/plain; charset=utf-8")
for i := len(revs) - 1; i >= 0; i-- {
fmt.Fprintln(w, revs[i].Version)
}
return nil
}
func (ModuleHandler) serveInfo(w http.ResponseWriter, version string, time time.Time) error {
w.Header().Set("Content-Type", "application/json")
enc := json.NewEncoder(w)
enc.SetIndent("", "\t")
err := enc.Encode(mod.RevInfo{
Version: version,
Time: time,
})
return err
}
func (ModuleHandler) serveMod(w http.ResponseWriter, modulePath string, repo *git.Repository, commitID vcs.CommitID) error {
fs, err := repo.FileSystem(commitID)
if err != nil {
return err
}
f, err := fs.Open("/go.mod")
if os.IsNotExist(err) {
// go.mod file doesn't exist in this commit.
f = nil
} else if err != nil {
return err
}
if f != nil {
defer f.Close()
}
w.Header().Set("Content-Type", "text/plain; charset=utf-8")
if f != nil {
// Copy the existing go.mod file.
_, err := io.Copy(w, f)
return err
} else {
// Synthesize a go.mod file with just the module path.
_, err := fmt.Fprintf(w, "module %s\n", modfile.AutoQuote(modulePath))
return err
}
}
func (ModuleHandler) serveZip(w http.ResponseWriter, modulePath, version string, repo *git.Repository, commitID vcs.CommitID) error {
w.Header().Set("Content-Type", "application/zip")
return WriteModuleZip(w, module.Version{Path: modulePath, Version: version}, repo, commitID)
}
// WriteModuleZip builds a zip archive for module version m
// by including all files from repository r at commit id,
// and writes the result to w.
//
// WriteModuleZip does not support multi-module repositories.
// A go.mod file may be in root, but not in any other directory.
//
// Unlike "golang.org/x/mod/zip".Create, it does not verify
// any module zip restrictions. It will produce an invalid
// module zip if given a commit containing invalid files.
// It should be used on commits that are known to have files
// that are all acceptable to include in a module zip.
//
func WriteModuleZip(w io.Writer, m module.Version, r vcs.Repository, id vcs.CommitID) error {
fs, err := r.FileSystem(id)
if err != nil {
return err
}
z := zip.NewWriter(w)
err = vfsutil.Walk(fs, "/", func(name string, fi os.FileInfo, err error) error {
if err != nil {
return err
}
if fi.IsDir() {
// We need to include only files, not directories.
return nil
}
dst, err := z.Create(m.Path + "@" + m.Version + name)
if err != nil {
return err
}
src, err := fs.Open(name)
if err != nil {
return err
}
_, err = io.Copy(dst, src)
src.Close()
return err
})
if err != nil {
return err
}
err = z.Close()
return err
}
// isCommitOnMaster reports whether commit c is a part of master branch
// of git repo at gitDir, and no errors occurred while determining that.
func isCommitOnMaster(ctx context.Context, gitDir string, c *vcs.Commit) bool {
cmd := exec.CommandContext(ctx, "git", "merge-base", "--is-ancestor", "--", string(c.ID), "master")
cmd.Dir = gitDir
err := cmd.Run()
return err == nil
}
// listMasterCommits returns a list of commits in git repo on master branch.
// If master branch doesn't exist, an empty list is returned.
func listMasterCommits(ctx context.Context, gitDir string) ([]mod.RevInfo, error) {
cmd := exec.CommandContext(ctx, "git", "log",
"--format=tformat:%H%x00%ct",
"-z",
"master")
cmd.Dir = gitDir
var buf bytes.Buffer
cmd.Stdout = &buf
err := cmd.Start()
if err != nil {
return nil, fmt.Errorf("could not start command: %v", err)
}
err = cmd.Wait()
if ee, _ := err.(*exec.ExitError); ee != nil && ee.Sys().(syscall.WaitStatus).ExitStatus() == 128 {
return nil, nil // Master branch doesn't exist.
} else if err != nil {
return nil, fmt.Errorf("%v: %v", cmd.Args, err)
}
var revs []mod.RevInfo
for b := buf.Bytes(); len(b) != 0; {
var (
// Calls to readLine match exactly what is specified in --format.
commitHash = readLine(&b)
committerDate = readLine(&b)
)
timestamp, err := strconv.ParseInt(committerDate, 10, 64)
if err != nil {
return nil, fmt.Errorf("invalid time from git log: %v", err)
}
t := time.Unix(timestamp, 0).UTC()
revs = append(revs, mod.RevInfo{
Version: mod.PseudoVersion("", "", t, commitHash[:12]),
Time: t,
})
}
return revs, nil
}
// readLine reads a line until zero byte, then updates b to the byte that immediately follows.
// A zero byte must exist in b, otherwise readLine panics.
func readLine(b *[]byte) string {
i := bytes.IndexByte(*b, 0)
s := string((*b)[:i])
*b = (*b)[i+1:]
return s
}
// moduleProxyRequest represents a module proxy request.
// The Module and Version fields may be escaped or unescaped.
type moduleProxyRequest struct {
Module string // Module path.
Type string // Type of request. One of "list", "info", "mod", or "zip".
Version string // Module version. Applies only when Type is not "list".
}
// parseModuleProxyRequest parses the module proxy request
// from the given URL. It does not attempt to unescape the
// module path and version, the caller is responsible for that.
func parseModuleProxyRequest(url string) (_ moduleProxyRequest, ok bool) {
// Split "<module>/@v/<file>" into module and file.
i := strings.Index(url, "/@v/")
if i == -1 {
return moduleProxyRequest{}, false
}
module, file := url[:i], url[i+len("/@v/"):]
// Return early for "/@v/list" request. It has no Version.
if file == "list" {
return moduleProxyRequest{Module: module, Type: "list"}, true
}
// Split "/@v/<version>.<ext>" into version and ext.
i = strings.LastIndexByte(file, '.')
if i == -1 {
return moduleProxyRequest{}, false
}
version, ext := file[:i], file[i+1:]
// Check that ext is valid.
switch ext {
case "info", "mod", "zip":
return moduleProxyRequest{Module: module, Type: ext, Version: version}, true
default:
return moduleProxyRequest{}, false
}
}
// Unescape returns a copy of r with Module and Version fields unescaped.
func (r moduleProxyRequest) Unescape() (_ moduleProxyRequest, ok bool) {
var err error
r.Module, err = module.UnescapePath(r.Module)
if err != nil {
return moduleProxyRequest{}, false
}
if r.Type == "list" {
return r, true
}
r.Version, err = module.UnescapeVersion(r.Version)
if err != nil {
return moduleProxyRequest{}, false
}
return r, true
}
// Escape returns a copy of r with Module and Version fields escaped.
func (r moduleProxyRequest) Escape() (_ moduleProxyRequest, ok bool) {
var err error
r.Module, err = module.EscapePath(r.Module)
if err != nil {
return moduleProxyRequest{}, false
}
if r.Type == "list" {
return r, true
}
r.Version, err = module.EscapeVersion(r.Version)
if err != nil {
return moduleProxyRequest{}, false
}
return r, true
}
// URL returns the URL of the module proxy request.
func (r moduleProxyRequest) URL() string {
switch r.Type {
case "list":
return r.Module + "/@v/list"
default:
return r.Module + "/@v/" + r.Version + "." + r.Type
}
}
|
Implications of the radio spectral index transition in LS I +61°303 for its INTEGRAL data analysis
The TeV emitting X-ray binary LS I +61{\deg}303 has two radio periodicities that correspond to a large periodic outburst with the same period as the orbit, 26.5 days (phase \Phi), and a second periodicity of 1667 days (phase \Theta), which modulates the orbital phase and amplitude of the large outburst. Analyses of the radio spectral index revealed in LS I +61{\deg}303 the presence of the critical transition typical for microquasars from optically thick emission (related to a steady jet) to an optically thin outburst (related to a transient jet), and found that it occurs at \Phi_{crit}, which is modulated by \Theta: \Phi_{crit}=f(\Theta). We examine the possible implications of averaging high energy data over large \Theta and \Phi intervals in the light of puzzling published INTEGRAL results, which differ for different averaging of the data. In microquasars, a simultaneous transition between two X-ray states occurs at the switch from optically thick radio emission to an optically thin radio outburst, from the low/hard to the steep power-law state. Assuming that the same transition occurs in LS I +61{\deg}303 at \Phi_{crit}, we can show qualitatively the effect of averaging high energy data on \Theta, by analysing the effects of averaging radio spectral index data across the same \Theta interval. We then model the two X-ray states, low/hard and steep power-law state, and show quantitatively how their mixing can affect the results. When folded over too large a \Theta interval, spectral data from INTEGRAL can yield a false picture of the emission behaviour of the source along the orbit because it may be mixing two spectral states. Furthermore, averaging the data along the orbit may result in a dominant low/hard spectral state, which, for insufficiently extended sampling, might appear without a cut-off. |
import pytest
import rlp
from eth.codecs import abi
from hexbytes import HexBytes
import vyper.ir.compile_ir as compile_ir
from vyper.codegen.ir_node import IRnode
from vyper.compiler.settings import OptimizationLevel
from vyper.utils import EIP_170_LIMIT, checksum_encode, keccak256
# initcode used by create_minimal_proxy_to
def eip1167_initcode(_addr):
addr = HexBytes(_addr)
pre = HexBytes("0x602D3D8160093D39F3363d3d373d3d3d363d73")
post = HexBytes("0x5af43d82803e903d91602b57fd5bf3")
return HexBytes(pre + (addr + HexBytes(0) * (20 - len(addr))) + post)
# initcode used by CreateCopyOf
def vyper_initcode(runtime_bytecode):
bytecode_len_hex = hex(len(runtime_bytecode))[2:].rjust(6, "0")
return HexBytes("0x62" + bytecode_len_hex + "3d81600b3d39f3") + runtime_bytecode
def test_create_minimal_proxy_to_create(get_contract):
code = """
main: address
@external
def test() -> address:
self.main = create_minimal_proxy_to(self)
return self.main
"""
c = get_contract(code)
address_bits = int(c.address, 16)
nonce = 1
rlp_encoded = rlp.encode([address_bits, nonce])
expected_create_address = keccak256(rlp_encoded)[12:].rjust(20, b"\x00")
assert c.test() == checksum_encode("0x" + expected_create_address.hex())
def test_create_minimal_proxy_to_call(get_contract, w3):
code = """
interface SubContract:
def hello() -> Bytes[100]: view
other: public(address)
@external
def test() -> address:
self.other = create_minimal_proxy_to(self)
return self.other
@external
def hello() -> Bytes[100]:
return b"hello world!"
@external
def test2() -> Bytes[100]:
return SubContract(self.other).hello()
"""
c = get_contract(code)
assert c.hello() == b"hello world!"
c.test(transact={})
assert c.test2() == b"hello world!"
def test_minimal_proxy_exception(w3, get_contract, assert_tx_failed):
code = """
interface SubContract:
def hello(a: uint256) -> Bytes[100]: view
other: public(address)
@external
def test() -> address:
self.other = create_minimal_proxy_to(self)
return self.other
@external
def hello(a: uint256) -> Bytes[100]:
assert a > 0, "invaliddddd"
return b"hello world!"
@external
def test2(a: uint256) -> Bytes[100]:
return SubContract(self.other).hello(a)
"""
c = get_contract(code)
assert c.hello(1) == b"hello world!"
c.test(transact={})
assert c.test2(1) == b"hello world!"
assert_tx_failed(lambda: c.test2(0))
GAS_SENT = 30000
tx_hash = c.test2(0, transact={"gas": GAS_SENT})
receipt = w3.eth.get_transaction_receipt(tx_hash)
assert receipt["status"] == 0
assert receipt["gasUsed"] < GAS_SENT
def test_create_minimal_proxy_to_create2(
get_contract, create2_address_of, keccak, assert_tx_failed
):
code = """
main: address
@external
def test(_salt: bytes32) -> address:
self.main = create_minimal_proxy_to(self, salt=_salt)
return self.main
"""
c = get_contract(code)
salt = keccak(b"vyper")
assert HexBytes(c.test(salt)) == create2_address_of(
c.address, salt, eip1167_initcode(c.address)
)
c.test(salt, transact={})
# revert on collision
assert_tx_failed(lambda: c.test(salt, transact={}))
# test blueprints with various prefixes - 0xfe would block calls to the blueprint
# contract, and 0xfe7100 is ERC5202 magic
@pytest.mark.parametrize("blueprint_prefix", [b"", b"\xfe", b"\xfe\71\x00"])
def test_create_from_blueprint(
get_contract,
deploy_blueprint_for,
w3,
keccak,
create2_address_of,
assert_tx_failed,
blueprint_prefix,
):
code = """
@external
def foo() -> uint256:
return 123
"""
prefix_len = len(blueprint_prefix)
deployer_code = f"""
created_address: public(address)
@external
def test(target: address):
self.created_address = create_from_blueprint(target, code_offset={prefix_len})
@external
def test2(target: address, salt: bytes32):
self.created_address = create_from_blueprint(target, code_offset={prefix_len}, salt=salt)
"""
# deploy a foo so we can compare its bytecode with factory deployed version
foo_contract = get_contract(code)
expected_runtime_code = w3.eth.get_code(foo_contract.address)
f, FooContract = deploy_blueprint_for(code, initcode_prefix=blueprint_prefix)
d = get_contract(deployer_code)
d.test(f.address, transact={})
test = FooContract(d.created_address())
assert w3.eth.get_code(test.address) == expected_runtime_code
assert test.foo() == 123
# extcodesize check
zero_address = "0x" + "00" * 20
assert_tx_failed(lambda: d.test(zero_address))
# now same thing but with create2
salt = keccak(b"vyper")
d.test2(f.address, salt, transact={})
test = FooContract(d.created_address())
assert w3.eth.get_code(test.address) == expected_runtime_code
assert test.foo() == 123
# check if the create2 address matches our offchain calculation
initcode = w3.eth.get_code(f.address)
initcode = initcode[len(blueprint_prefix) :] # strip the prefix
assert HexBytes(test.address) == create2_address_of(d.address, salt, initcode)
# can't collide addresses
assert_tx_failed(lambda: d.test2(f.address, salt))
def test_create_from_blueprint_bad_code_offset(
get_contract, get_contract_from_ir, deploy_blueprint_for, w3, assert_tx_failed
):
deployer_code = """
BLUEPRINT: immutable(address)
@external
def __init__(blueprint_address: address):
BLUEPRINT = blueprint_address
@external
def test(code_ofst: uint256) -> address:
return create_from_blueprint(BLUEPRINT, code_offset=code_ofst)
"""
initcode_len = 100
# deploy a blueprint contract whose contained initcode contains only
# zeroes (so no matter which offset, create_from_blueprint will
# return empty code)
ir = IRnode.from_list(["deploy", 0, ["seq"] + ["stop"] * initcode_len, 0])
bytecode, _ = compile_ir.assembly_to_evm(
compile_ir.compile_to_assembly(ir, optimize=OptimizationLevel.NONE)
)
# manually deploy the bytecode
c = w3.eth.contract(abi=[], bytecode=bytecode)
deploy_transaction = c.constructor()
tx_info = {"from": w3.eth.accounts[0], "value": 0, "gasPrice": 0}
tx_hash = deploy_transaction.transact(tx_info)
blueprint_address = w3.eth.get_transaction_receipt(tx_hash)["contractAddress"]
blueprint_code = w3.eth.get_code(blueprint_address)
print("BLUEPRINT CODE:", blueprint_code)
d = get_contract(deployer_code, blueprint_address)
# deploy with code_ofst=0 fine
d.test(0)
# deploy with code_ofst=len(blueprint) - 1 fine
d.test(initcode_len - 1)
# code_offset=len(blueprint) NOT fine! would EXTCODECOPY empty initcode
assert_tx_failed(lambda: d.test(initcode_len))
# code_offset=EIP_170_LIMIT definitely not fine!
assert_tx_failed(lambda: d.test(EIP_170_LIMIT))
# test create_from_blueprint with args
def test_create_from_blueprint_args(
get_contract, deploy_blueprint_for, w3, keccak, create2_address_of, assert_tx_failed
):
code = """
struct Bar:
x: String[32]
FOO: immutable(String[128])
BAR: immutable(Bar)
@external
def __init__(foo: String[128], bar: Bar):
FOO = foo
BAR = bar
@external
def foo() -> String[128]:
return FOO
@external
def bar() -> Bar:
return BAR
"""
deployer_code = """
struct Bar:
x: String[32]
created_address: public(address)
@external
def test(target: address, arg1: String[128], arg2: Bar):
self.created_address = create_from_blueprint(target, arg1, arg2)
@external
def test2(target: address, arg1: String[128], arg2: Bar, salt: bytes32):
self.created_address = create_from_blueprint(target, arg1, arg2, salt=salt)
@external
def test3(target: address, argdata: Bytes[1024]):
self.created_address = create_from_blueprint(target, argdata, raw_args=True)
@external
def test4(target: address, argdata: Bytes[1024], salt: bytes32):
self.created_address = create_from_blueprint(target, argdata, salt=salt, raw_args=True)
@external
def should_fail(target: address, arg1: String[129], arg2: Bar):
self.created_address = create_from_blueprint(target, arg1, arg2)
"""
FOO = "hello!"
BAR = ("world!",)
# deploy a foo so we can compare its bytecode with factory deployed version
foo_contract = get_contract(code, FOO, BAR)
expected_runtime_code = w3.eth.get_code(foo_contract.address)
f, FooContract = deploy_blueprint_for(code)
d = get_contract(deployer_code)
initcode = w3.eth.get_code(f.address)
d.test(f.address, FOO, BAR, transact={})
test = FooContract(d.created_address())
assert w3.eth.get_code(test.address) == expected_runtime_code
assert test.foo() == FOO
assert test.bar() == BAR
# extcodesize check
assert_tx_failed(lambda: d.test("0x" + "00" * 20, FOO, BAR))
# now same thing but with create2
salt = keccak(b"vyper")
d.test2(f.address, FOO, BAR, salt, transact={})
test = FooContract(d.created_address())
assert w3.eth.get_code(test.address) == expected_runtime_code
assert test.foo() == FOO
assert test.bar() == BAR
encoded_args = abi.encode("(string,(string))", (FOO, BAR))
assert HexBytes(test.address) == create2_address_of(d.address, salt, initcode + encoded_args)
d.test3(f.address, encoded_args, transact={})
test = FooContract(d.created_address())
assert w3.eth.get_code(test.address) == expected_runtime_code
assert test.foo() == FOO
assert test.bar() == BAR
d.test4(f.address, encoded_args, keccak(b"test4"), transact={})
test = FooContract(d.created_address())
assert w3.eth.get_code(test.address) == expected_runtime_code
assert test.foo() == FOO
assert test.bar() == BAR
# can't collide addresses
assert_tx_failed(lambda: d.test2(f.address, FOO, BAR, salt))
# ditto - with raw_args
assert_tx_failed(lambda: d.test4(f.address, encoded_args, salt))
# but creating a contract with different args is ok
FOO = "bar"
d.test2(f.address, FOO, BAR, salt, transact={})
# just for kicks
assert FooContract(d.created_address()).foo() == FOO
assert FooContract(d.created_address()).bar() == BAR
# Foo constructor should fail
FOO = "01" * 129
BAR = ("",)
sig = keccak("should_fail(address,string,(string))".encode()).hex()[:10]
encoded = abi.encode("(address,string,(string))", (f.address, FOO, BAR)).hex()
assert_tx_failed(lambda: w3.eth.send_transaction({"to": d.address, "data": f"{sig}{encoded}"}))
def test_create_copy_of(get_contract, w3, keccak, create2_address_of, assert_tx_failed):
code = """
created_address: public(address)
@internal
def _create_copy_of(target: address):
self.created_address = create_copy_of(target)
@internal
def _create_copy_of2(target: address, salt: bytes32):
self.created_address = create_copy_of(target, salt=salt)
@external
def test(target: address) -> address:
x: uint256 = 0
self._create_copy_of(target)
assert x == 0 # check memory not clobbered
return self.created_address
@external
def test2(target: address, salt: bytes32) -> address:
x: uint256 = 0
self._create_copy_of2(target, salt)
assert x == 0 # check memory not clobbered
return self.created_address
"""
c = get_contract(code)
bytecode = w3.eth.get_code(c.address)
c.test(c.address, transact={})
test1 = c.created_address()
assert w3.eth.get_code(test1) == bytecode
# extcodesize check
assert_tx_failed(lambda: c.test("0x" + "00" * 20))
# test1 = c.test(b"\x01")
# assert w3.eth.get_code(test1) == b"\x01"
salt = keccak(b"vyper")
c.test2(c.address, salt, transact={})
test2 = c.created_address()
assert w3.eth.get_code(test2) == bytecode
assert HexBytes(test2) == create2_address_of(c.address, salt, vyper_initcode(bytecode))
# can't create2 where contract already exists
assert_tx_failed(lambda: c.test2(c.address, salt, transact={}))
# test single byte contract
# test2 = c.test2(b"\x01", salt)
# assert HexBytes(test2) == create2_address_of(c.address, salt, vyper_initcode(b"\x01"))
# assert_tx_failed(lambda: c.test2(bytecode, salt))
|
// RemoveVmAuthKey revokes a VM auth key.
func (c *Panorama) RevokeVmAuthKey(key string) error {
type rreq struct {
XMLName xml.Name `xml:"request"`
Key string `xml:"bootstrap>vm-auth-key>revoke>vm-auth-key"`
}
req := rreq{
Key: key,
}
c.LogOp("(op) revoking vm auth code: %s", key)
_, err := c.Op(req, "", nil, nil)
return err
} |
<reponame>ride-austin/ios-driver
//
// QueueZone.h
// RideDriver
//
// Created by <NAME> on 10/17/16.
// Copyright © 2016 FuelMe LLC. All rights reserved.
//
#import <Mantle/Mantle.h>
@interface QueueZone : MTLModel <MTLJSONSerializing>
@property (nonatomic, readonly) NSString *areaQueueName;
@property (nonatomic, readonly) NSURL *iconUrl;
@property (nonatomic) NSDictionary<NSString *, NSNumber *> *lengths;
@end
|
<reponame>jorgeluis11/profile
from django.db import models
from django.utils import timezone
from autoslug import AutoSlugField
from imagekit.models import ImageSpecField
from imagekit.processors import ResizeToFill
class ProjectManager(models.Manager):
def top(self):
return "Manager"
def get_url_large(self, filename):
return 'img/large/%s' % (filename)
def get_url_medium(self, filename):
return 'img/medium/%s' % (filename)
class Projects(models.Model):
title = models.TextField(max_length=55)
large = models.ImageField(upload_to=get_url_large)
url = models.TextField(max_length=55)
# medium = models.ImageField(upload_to=get_url_medium,blank=True)
# medium = models.ImageField(upload_to=get_url_medium,blank=True)
medium = ImageSpecField(source='large',
# processors=[ResizeToFill(400, 400)],
format='JPEG',
options={'quality': 60})
description = models.TextField(max_length=2000)
submit_date = models.DateTimeField(('date/time submitted'),
default=timezone.now())
visible = models.BooleanField(default=False)
user_rating = models.IntegerField(default=0)
visits = models.IntegerField(default=0)
slug = AutoSlugField(populate_from='title', unique=True)
objects = ProjectManager()
|
Oil in times of democracy: debates on extraction and politics in Ghana
The discovery of oil in 2007 in Ghana came in a time when elections had shown the capacity to allow for alternation of two main political parties in government. The main objective of this article is to investigate the terms of the debates that have been developed in regard to the relation between natural resources and democracy in the country. Three main issues have been addressed: the capacity of democratic institutions to condition and configure the social and political effects of the oil industry in the country; the potential perverse effects of oil extraction on democratic institutions; and the role of international arena and actors in the constitution of the political economy of oil in Ghana. |
import styled from 'styled-components'
import { Logout } from 'styled-icons/heroicons-outline'
import { Login } from 'styled-icons/material'
export const Auth = styled.div`
grid-area: LG;
background-color: var(--tertiary);
`
export const BtnGoogle = styled.button`
@media (min-width: 1024px) {
width: 165px;
> span {
display: initial !important;
}
}
margin: 5px;
width: 65px;
height: 35px;
border-radius: 4px;
background: var(--link);
color: var(--white);
border: 0px transparent;
text-align: center;
&:hover {
background: var(--discord);
}
> span {
display: none;
}
`
export const LogoutIcon = styled(Logout)`
width: 20px;
height: 20px;
color: var(--white);
`
export const LoginIcon = styled(Login)`
width: 20px;
height: 20px;
color: var(--white);
`
|
Learn how to change the oil on your 4th generation (2012+) Toyota Avalon with a 3.5L 2GR-FE V6 engine. We've got a step by step how to with all the parts and tools you'll need. Fortunately, the Avalon is extra easy to change oil on, there are no trim panels to remove or get in the way, you'll just need the right tools to get the oil filter cap off.
For the Toyota Avalon, the maintenance schedule calls for an oil change every 5,000 miles if you take short trips, tow, drive on dirty or dusty roads, or leave the car idling for long periods. Under ideal conditions, the recommend interval every 10,000 miles.
The Toyota 3.5L V6 2GR-FE is in a TON of cars. This guide will likely work for most of them.
Vehicles with this motor: |
import { Video, VideoSrc } from '@/video/state/types';
import { srcToGlobalVideo, srcToHostVideo, srcToIFrameSource, srcToIFrameVideo, videosWithSubtitle } from '@/video/state/state';
import { RemoveVideoInIFrame, useVideoElementMutationObserver, useWindowMessage, VideosInIFrame } from '@/composables';
import { watch } from 'vue';
import { addVttTo, removeVttFrom } from '@/video/state';
import { reset } from '@/app/state';
import { currentSelectedVideoSrc } from '@/navigation/state';
import { removeUrlHash } from '@/util/url';
const isValidVideo = (el: HTMLVideoElement): boolean => el.offsetWidth !== 0 && el.offsetHeight !== 0 && el.currentSrc !== '';
const findVideosInCurrentTab = (): Record<VideoSrc, Video> =>
Object.fromEntries(
[...document.querySelectorAll('video')]
.filter((el) => isValidVideo(el))
.map((el) => [
removeUrlHash(el.currentSrc),
{
src: removeUrlHash(el.currentSrc),
in: 'HOST',
hasSubtitle: el.classList.contains('plussub'),
el
}
])
);
const resetSrcToHostVideo = () => {
srcToHostVideo.value = findVideosInCurrentTab();
};
export const init = (): void => {
resetSrcToHostVideo();
// handle host videos
// handles also if the source or the src changes
[...document.querySelectorAll('video')].forEach((el) => el.addEventListener('loadedmetadata', resetSrcToHostVideo));
// new videos added to the page
useVideoElementMutationObserver(({ added, removed }) => {
resetSrcToHostVideo();
added.forEach((el) => el.addEventListener('loadedmetadata', resetSrcToHostVideo));
if (removed.some((el) => srcToHostVideo.value[removeUrlHash(el.currentSrc)]?.hasSubtitle)) {
reset();
}
});
useWindowMessage({
[VideosInIFrame]: ({ origin, source, data: { videos, frameSrc } }) => {
videos.forEach((e) => (srcToIFrameSource[removeUrlHash(e.currentSrc)] = { window: source as Window, frameSrc, origin }));
Object.assign(
srcToIFrameVideo.value,
Object.fromEntries(
videos.map((e) => [
removeUrlHash(e.currentSrc),
{
hasSubtitle: e.hasSubtitle,
src: removeUrlHash(e.currentSrc),
in: 'I_FRAME'
}
])
)
);
}
});
useWindowMessage({
[RemoveVideoInIFrame]: ({ data: { currentSrc, frameSrc } }) => {
currentSrc = removeUrlHash(currentSrc);
if (srcToIFrameVideo.value[currentSrc]?.hasSubtitle) {
reset();
}
delete srcToIFrameVideo.value[currentSrc];
delete srcToIFrameSource[frameSrc];
}
});
watch(
() => currentSelectedVideoSrc.value,
(src, prevSrc) => {
if (prevSrc && srcToGlobalVideo.value[prevSrc]) {
removeVttFrom({ video: srcToGlobalVideo.value[prevSrc] });
}
if (src && srcToGlobalVideo.value[src]) {
removeVttFrom({ video: srcToGlobalVideo.value[src] });
addVttTo({ video: srcToGlobalVideo.value[src], subtitle: window.plusSub_subtitle.value.withOffsetParsed });
}
}
);
watch(
() => window.plusSub_subtitle.value.withOffsetParsed,
(subtitle) =>
videosWithSubtitle.value.forEach((video) => {
removeVttFrom({ video });
addVttTo({ video, subtitle });
})
);
};
|
216 Multiplex base editing of NK cell to enhance cancer immunotherapy
Natural killer (NK) cells have many unique features that have gained attention in cancer immunotherapy. NK cells can kill in antigen independent and dependent fashion, can be used as an allogeneic product, and perform antibody-dependent cell-mediated cytotoxicity (ADCC). However, NK cell function is regulated by many activating and inhibitory receptors, which cancer cells take advantage of to avoid being killed by NK cells. NK cells are also known for their technical and biological challenges which result in low editing efficiencies, compared to T cells and other immune cells.Base editing (BE) is a CRISPR-Cas9 based genome editing technology that allows precise single base transitions. Previously, we reported a high efficiency method for multiplex engineering of T cells using BE and thus reasoned that applying similar concepts in NK cells may offer an opportunity to alter many genes simultaneously at higher efficiency through multiplex base editing. We thus selected a panel of genes bearing critical roles in NK cell function for immunotherapy, including inhibitory intracellular regulator AHR and CISH, inhibitory checkpoint receptor KLRG1, TIGIT, KLRC1, and PDCD1, and Fc receptor CD16A. CD16A is responsible for NK cell ADCC and is regulated via cleavage upon NK activation. Non-cleavable CD16A improves ADCC killing and can be achieved through single-base substitution with BE.Using the adenosine BE (ABE8e), we achieved multiplex editing (6 genes) rates up to 99% and 95% editing/knockout at DNA and protein levels, respectively. Notably, we assessed for reduction in editing efficiency when additional genes were targeted and found no significant reduction in editing efficiencies when targeting up to 6 genes simultaneously. Moreover, functional evaluation of non-cleavable CD16A NK cells revealed up to 35% increase of cytotoxicity against Raji cells.We were able to achieve high multiplex editing efficiency in primary human NK cells using ABE8eand there was no significant decrease of editing efficiency as the number of gene of interest increases, up to 6 genes in total. Functional assay confirmed increased NK cell cytotoxicity against tumor cells. Our end goal is to achieve high efficiency multiplex editing in CAR-expressing NK cells to further improve NK cell activity and toxicity for cancer immunotherapy.Webber B, Lonetree C, Kluesner M, et al. Highly efficient multiplex human T cell engineering without double-strand breaks usingCas9 base editors. Nat Commun 2019;10:5222. |
package org.loose.fis.sre.controllers;
//import javafx.collections.FXCollections;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
import javafx.collections.FXCollections;
import javafx.collections.ListChangeListener;
import javafx.collections.ObservableList;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.fxml.Initializable;
import javafx.scene.Parent;
import javafx.scene.control.*;
import javafx.scene.Scene;
import javafx.scene.control.cell.PropertyValueFactory;
import javafx.scene.text.Text;
import javafx.stage.Stage;
import java.util.ArrayList;
//import javafx.collections.ObservableList;
//import javafx.scene.layout.VBox;
import org.loose.fis.sre.services.BookService;
import org.loose.fis.sre.model.Book;
//import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.Objects;
import java.util.ResourceBundle;
public class BooksListController implements Initializable {
@FXML
private TextField filterField;
@FXML
private TableView<Book> table;
@FXML
private TableColumn<Book, String> title;
@FXML
private TableColumn<Book, String> author;
@FXML
private TableColumn<Book, String> price;
@FXML
private TableColumn<Book, String> pageNumber;
@FXML
private TableColumn<Book, String> genre;
@FXML
private Button cancelButton;
@FXML
private Button searchButton;
@FXML
private Button orderButton;
@FXML
private Text errorMessage;
@FXML
private TextField searchBar;
private final ObservableList<Book> masterData = FXCollections.observableArrayList();
private final ObservableList<Book> filteredData = FXCollections.observableArrayList();
@Override
public void initialize(URL url, ResourceBundle resourceBundle){
title.setCellValueFactory(new PropertyValueFactory<>("title"));
author.setCellValueFactory(new PropertyValueFactory<>("author"));
price.setCellValueFactory(new PropertyValueFactory<>("price"));
pageNumber.setCellValueFactory(new PropertyValueFactory<>("pageNumber"));
genre.setCellValueFactory(new PropertyValueFactory<>("genre"));
table.setItems(BookService.getBooks());
//masterData = table.getItems();
System.out.println("Initial masterData = " + masterData);
filterField.textProperty().addListener(new ChangeListener<String>() {
public void changed(ObservableValue<? extends String> observable,
String oldValue, String newValue) {
updateFilteredData();
}
});
System.out.println("Initial filteredData -> masterData = " + filteredData);
}
@FXML
private void updateFilteredData() {
System.out.println("updateFilteredData() -> filteredData = " + filteredData);
filteredData.clear();
System.out.println("updateFilteredData() -> masterData = " + masterData);
for (Book book : masterData) {
if (matchesFilter(book)) {
filteredData.add(book);
}
}
reapplyTableSortOrder();
}
@FXML
private boolean matchesFilter(Book book) {
String filterString = filterField.getText();
if (filterString == null || filterString.isEmpty()) {
return true;
}
String lowerCaseFilterString = filterString.toLowerCase();
if (book.getTitle().toLowerCase().contains(lowerCaseFilterString)) {
return true;
} else return book.getAuthor().toLowerCase().contains(lowerCaseFilterString);
}
@FXML
private void reapplyTableSortOrder() {
ArrayList<TableColumn<Book, ?>> sortOrder = new ArrayList<>(table.getSortOrder());
table.getSortOrder().clear();
table.getSortOrder().addAll(sortOrder);
}
@FXML
public void navigateToOrderView() {
Parent root = null;
try {
root = FXMLLoader.load(Objects.requireNonNull(getClass().getClassLoader().getResource("bookOrder.fxml")));
Stage registerStage = (Stage) orderButton.getScene().getWindow();
registerStage.setTitle("Book Order");
registerStage.setScene(new Scene(root));
registerStage.show();
}catch(IOException e)
{
errorMessage.setText("error");
}
}
@FXML
public void cancelButtonOnAction()
{
backToHomePageForm();
}
@FXML
public void backToHomePageForm() {
Parent root;
try {
root = FXMLLoader.load(Objects.requireNonNull(getClass().getClassLoader().getResource("homePage.fxml")));
Stage registerStage = (Stage) cancelButton.getScene().getWindow();
registerStage.setTitle("Home Page");
registerStage.setScene(new Scene(root));
registerStage.show();
}catch(IOException e)
{
errorMessage.setText("error");
}
}
}
|
<filename>src/config/pagination-adapters/index.ts
import { LimitOffset } from "./limit-offset"
import { RelayForward } from "./relay"
import { NoPagination } from "./no-pagination"
export * from "./types"
const PaginationAdapters = [NoPagination, LimitOffset, RelayForward]
export { LimitOffset, RelayForward, NoPagination, PaginationAdapters }
|
/**
* Copyright (c) 2013-2021 UChicago Argonne, LLC and The HDF Group.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
#ifndef MERCURY_TEST_H
#define MERCURY_TEST_H
#include "na_test.h"
#include "mercury.h"
#include "mercury_bulk.h"
#include "mercury_request.h"
#ifdef HG_TEST_HAS_THREAD_POOL
# include "mercury_thread_mutex.h"
# include "mercury_thread_pool.h"
#endif
#include "mercury_atomic.h"
#include "mercury_mem_pool.h"
#include "test_bulk.h"
#include "test_overflow.h"
#include "test_rpc.h"
#ifdef HG_TEST_HAS_CRAY_DRC
# include <rdmacred.h>
#endif
/*************************************/
/* Public Type and Struct Definition */
/*************************************/
struct hg_test_info {
struct na_test_info na_test_info;
#ifdef HG_TEST_HAS_THREAD_POOL
hg_thread_pool_t *thread_pool;
#endif
hg_class_t *hg_class;
hg_context_t *context;
hg_context_t **secondary_contexts;
hg_request_class_t *request_class;
hg_addr_t target_addr;
struct hg_mem_pool *bulk_pool;
hg_size_t buf_size_min;
hg_size_t buf_size_max;
#ifdef HG_TEST_HAS_CRAY_DRC
uint32_t credential;
uint32_t wlm_id;
drc_info_handle_t credential_info;
uint32_t cookie;
#endif
unsigned int handle_max;
unsigned int thread_count;
hg_bool_t auth;
hg_bool_t auto_sm;
hg_bool_t bidirectional;
};
struct hg_test_context_info {
hg_atomic_int32_t finalizing;
};
struct hg_test_handle_info {
#ifdef HG_TEST_HAS_THREAD_POOL
struct hg_thread_work work;
#endif
void *data;
};
/*****************/
/* Public Macros */
/*****************/
/* Max */
#ifndef MAX
# define MAX(a, b) (((a) > (b)) ? (a) : (b))
#endif
/* Default error macro */
#include "mercury_log.h"
#include <inttypes.h>
#include <stdio.h>
#include <stdlib.h>
extern HG_PRIVATE HG_LOG_OUTLET_DECL(hg_test);
#define HG_TEST_LOG_ERROR(...) \
HG_LOG_WRITE(hg_test, HG_LOG_LEVEL_ERROR, __VA_ARGS__)
#define HG_TEST_LOG_WARNING(...) \
HG_LOG_WRITE(hg_test, HG_LOG_LEVEL_WARNING, __VA_ARGS__)
#ifdef HG_HAS_DEBUG
# define HG_TEST_LOG_DEBUG(...) \
HG_LOG_WRITE(hg_test, HG_LOG_LEVEL_DEBUG, __VA_ARGS__)
#else
# define HG_TEST_LOG_DEBUG(...) (void) 0
#endif
/* Branch predictor hints */
#ifndef _WIN32
# define likely(x) __builtin_expect(!!(x), 1)
# define unlikely(x) __builtin_expect(!!(x), 0)
#else
# define likely(x) (x)
# define unlikely(x) (x)
#endif
/* Error macros */
#define HG_TEST_GOTO_DONE(label, ret, ret_val) \
do { \
ret = ret_val; \
goto label; \
} while (0)
#define HG_TEST_GOTO_ERROR(label, ret, err_val, ...) \
do { \
HG_LOG_ERROR(__VA_ARGS__); \
ret = err_val; \
goto label; \
} while (0)
/* Check for hg_ret value and goto label */
#define HG_TEST_CHECK_HG_ERROR(label, hg_ret, ...) \
do { \
if (unlikely(hg_ret != HG_SUCCESS)) { \
HG_TEST_LOG_ERROR(__VA_ARGS__); \
goto label; \
} \
} while (0)
/* Check for cond, set ret to err_val and goto label */
#define HG_TEST_CHECK_ERROR(cond, label, ret, err_val, ...) \
do { \
if (unlikely(cond)) { \
HG_TEST_LOG_ERROR(__VA_ARGS__); \
ret = err_val; \
goto label; \
} \
} while (0)
#define HG_TEST_CHECK_ERROR_NORET(cond, label, ...) \
do { \
if (unlikely(cond)) { \
HG_TEST_LOG_ERROR(__VA_ARGS__); \
goto label; \
} \
} while (0)
#define HG_TEST_CHECK_ERROR_DONE(cond, ...) \
do { \
if (unlikely(cond)) { \
HG_TEST_LOG_ERROR(__VA_ARGS__); \
} \
} while (0)
/* Check for cond and print warning */
#define HG_TEST_CHECK_WARNING(cond, ...) \
do { \
if (unlikely(cond)) { \
HG_TEST_LOG_WARNING(__VA_ARGS__); \
} \
} while (0)
#define HG_TEST(x) \
do { \
printf("Testing %-62s", x); \
fflush(stdout); \
} while (0)
#define HG_PASSED() \
do { \
puts(" PASSED"); \
fflush(stdout); \
} while (0)
#define HG_FAILED() \
do { \
puts("*FAILED*"); \
fflush(stdout); \
} while (0)
/*********************/
/* Public Prototypes */
/*********************/
#ifdef __cplusplus
extern "C" {
#endif
/**
* Initialize client/server
*/
hg_return_t
HG_Test_init(int argc, char *argv[], struct hg_test_info *hg_test_info);
/**
* Finalize client/server
*/
hg_return_t
HG_Test_finalize(struct hg_test_info *hg_test_info);
#ifdef __cplusplus
}
#endif
#endif /* MERCURY_TEST_H */
|
An Improved Reflector with Serrated Resistive Films Frame for Compact Antenna Test Range
In this paper, a novel reflector design with serrated resistance films (R-films) frame for compact antenna test range (CATR) is proposed. The CATR is an essential system for antenna testing at 5G millimeter-wave frequencies, which can convert spherical incident waves into quasi plane waves by parabolic reflectors in relatively short distance, thus forming a quiet zone (QZ) where large devices under test (DUT) can be tested. Conventional reflectors with metal edges can lead to serious edge diffraction and usually increase noise level inside QZ. In order to mitigate strong edge diffraction at low-frequency, this paper proposes a reflector with resistive films (R-films) frame, which mainly mitigates edge diffraction at the low-frequency bands and achieves better QZ performance without increasing metal area. The novel serrated R-films frame have low edge diffraction and is supported by a substrate to achieve better stability and connectivity. The proposed reflector/with R-films frame can be considered as a competitive alternative to the traditional reflector. |
// AddStoryComment adds a comment to a story
func (d *Database) AddStoryComment(StoryboardID string, UserID string, StoryID string, Comment string) ([]*StoryboardGoal, error) {
if _, err := d.db.Exec(
`call story_comment_add($1, $2, $3, $4);`,
StoryboardID,
StoryID,
UserID,
Comment,
); err != nil {
log.Println(err)
}
goals := d.GetStoryboardGoals(StoryboardID)
return goals, nil
} |
Ronan Tynan (born 14 May 1960) is an Irish tenor singer and former Paralympic athlete.
He was a member of The Irish Tenors re-joining in 2011 while continuing to pursue his solo career since May 2004. In the United States, audiences know him for his involvement with that vocal group and for his renditions of "God Bless America." He is also known for participating in the 1984 and 1988 Summer Paralympics.
Life and career [ edit ]
Tynan was born in Dublin, Ireland. His family home is in Johnstown, County Kilkenny, Ireland.[1] He was born with phocomelia, causing both of his lower legs to be underdeveloped; his legs were unusually short (he is now 6-foot 4), his feet were splayed outward, and he had three toes on each foot.[2]:16 He was one of a set of twins; his twin brother Edmond died at 11 months old.[2]:18 At age 20, he had his legs amputated below the knee, after a back injury from a car accident; the injury to his back made it impossible for him to continue using prosthetic legs without the amputation.[2]:84 Within weeks of the accident, he was climbing stairs at his college dormitory on artificial legs. Within a year, Tynan was winning in international competitions in track and field athletics. He represented Ireland in the 1984 and 1988 Summer Paralympics, winning four golds, two silvers, and one bronze medal.[3] Between 1981 and 1984, he won 18 gold medals from various competitions and set 14 world records.[4][5]
In the following years, Tynan became the first person with a disability to be admitted to the National College of Physical Education, in Limerick. He worked for about two years in the prosthetics industry, then went to Trinity College, became a physician specialising in Orthopedic Sports Injuries, and graduated in 1993.
Encouraged to also study voice by his father Edmund, Tynan won a series of voice competition awards and joined the Irish Tenors.
A devout Roman Catholic, Tynan has appeared on Eternal Word Television Network (EWTN). At the invitation of New York Archbishop Timothy Dolan, he sang at the Archbishop's installation Mass in St. Patrick's Cathedral on 15 April 2009.
Presidential performances [ edit ]
Tynan has performed in several events President George W. Bush has attended. Some of them include:
Other performances [ edit ]
Tynan has sung "God Bless America" at sporting event venues, such as Yankee Stadium (during New York Yankees home Major League Baseball games, including Opening Day, nationally televised games, the last game at the old Yankee Stadium, and playoff games) and on several occasions prior to games involving the National Hockey League's Buffalo Sabres including a performance before 71,217 fans at the AMP Energy NHL Winter Classic along with Sabres anthem singer Doug Allen, who performed the Canadian national anthem, on 1 January 2008, when the Sabres played the Pittsburgh Penguins. Tynan has not performed for the Sabres since Terrence Pegula purchased the team in 2011. Most recently, he sang Eagles Wings at the 2017 Memorial Day Concert.
In 2004 he sang Theme from New York, New York at the Belmont Stakes where Smarty Jones failed in his attempt to win the Triple Crown.[10] and less than a week later he was at the Washington National Cathedral for former United States President Ronald Reagan's state funeral, where he sang "Amazing Grace" and Franz Schubert's "Ave Maria".
Tynan sang for George H. W. Bush at Bush's Houston home on the day of the president's death on November 30, 2018. The first song was Silent Night, while the second was a Gaelic song. Bush's friend and former aide James Baker said that while Tynan was singing Silent Night, "[b]elieve it or not, the president was mouthing the words."[11]
"As long as they're not Jewish" controversy [ edit ]
While a real estate agent and prospective buyer Dr. Gabrielle Gold-von Simson were looking at an apartment in Tynan's building on Manhattan's East Side, Tynan made what was construed to be an anti-Semitic remark. Shortly after this, the Yankees cancelled Tynan's performance of "God Bless America" for Game 1 of the 2009 American League Championship Series on 16 October 2009 because of the incident.[12]
According to Tynan's version of the event, two Jewish women came to view an apartment in his building. Some time afterwards, another real estate agent showed up with a potential client. The agent joked to Tynan "at least they're not (Boston) Red Sox fans". "As long as they're not Jewish," Tynan replied, referring to the exacting women he had met earlier. The prospective client, Jewish paediatrician Dr. Gabrielle Gold-Von Simson, took umbrage and said: "Why would you say that?" Tynan replied: "That would be scary", and laughed, referring to the previous incident. Tynan subsequently apologised for his remark.[12] The Anti-Defamation League accepted his apology. Tynan performed at an ADL event in Manhattan soon thereafter.[13]
Only July 4, 2010 Tynan performed "God Bless America" for the Boston Red Sox at Fenway Park with the support of some in the local Jewish community.[14]
Further reading [ edit ]
Halfway Home: My life 'til Now. Scribner. 2002. ISBN 0-7432-2291-1. |
Child abuse and neglect in the Jaffna district of Sri Lanka – a study on knowledge attitude practices and behavior of health care professionals
Background Victims and perpetrators of child abuse do not typically self-report to child protection services, therefore responsibility of detection and reporting falls on the others. Knowledge on child protection is essential for the first contact person and such information is sparse in research literature originally coming from Sri Lanka. Anecdotally, several cases of child abuse have been missed out at the first contact level. Therefore we undertook this survey to assess the knowledge, attitudes towards child protection and the experiences of medical officers, nursing officers and social workers on child protection. Method This was a descriptive analytical study carried out in hospitals and the community during March–October 2016. An anonymous content validated self-administered questionnaire was used as the study instrument. Knowledge, Attitude, Practices and Behaviour were assessed via multiple choice questions and responses according to Likert score. Three anonymised case records were given as case vignettes to be studied by the participants and their responses were also recorded on the questionnaire. Results Among the 246 responders 156 (63.4%) were doctors. All groups of professionals identified the forms of child abuse correctly and the social indicators of child abuse was correctly identified in 152 (61.7%). Majority failed to identify the features of the perpetrator. Majority of the professionals showed a favourable response in attitude when dealing with child maltreatment. 153 (62%) had suspected child abuse in their career and 64% of them had reported it to the authority. Fifty two (21%) had attended a training workshop on child abuse and 65.8% of the responders were not satisfied with their knowledge. 229(93%) of them indicated that they wanted some form of education on child maltreatment. The Knowledge, Attitude and Behaviour towards child abuse were significantly good on people with experience in the field of Paediatrics and Judicial Medical work, when compared to those who did not have the experience in these two fields. (p value< 0.01). Conclusion Although the knowledge among health professionals regarding child abuse and care was satisfactory, further areas need reinforcement. The attitude was more positive, the behavior and practices on child maltreatment needed reinforcement via workshops and continuing medical education.
Background
Child Maltreatment or Abuse has been a worldwide problem and continues to be a major crisis in our current society as well. Child maltreatment is defined by the World Health Organisation (WHO) as abuse and neglect that occurs to children under 18 years of age. It includes all types of physical, sexual abuse, neglect and negligence of the child, emotional ill treatment and exploitation for commercial and non-commercial reasons. This can lead to problems in child's health, survival and dignity of the child especially in responsibility, trust and power . The National Society for the Prevention of Cruelty to Children also describes the types of abuse similarly to the WHO .
The physical signs of abuse may include unexplained bruising, marks or injuries on any part of the body, multiple bruises which are unexplained, cigarette burn marks, broken bones and scalds, with upward splash marks . Changes in behaviour can also indicate physical abuse. The symptoms can be child's fear anticipating the parents being approached for an explanation by the autorities, aggressive behaviour or severe temper tantrums, flinching when touched, depression and withdrawn behaviour .
Visible evidences are seen in physical abuse whereas these are absent in emotional abuse or neglect but can leave deep, long lasting scars in their minds. When abused children get help early, their chances of recovery and healing from it is greater .
Most common forms of child abuse have been recorded in South Asian region and in addition to them, conscription of children during armed conflict, which is a new form of child abuse, has also been recorded especially in Sri Lanka and Nepal . The 2006 UN Study on Violence against Children, estimated that in South Asia, between 41 and 88 million children witness violence at home every year. Evidence also indicates that half of the world's child brides live in South Asia, where 46% of women aged 20-24 are first married or in union before they reach the age of 18 and that around 44 million children are engaged in child labour across the region. Sexual abuse and exploitation, as well as child trafficking and corporal punishment raise additional concerns in the region. No data are provided on sexual abuse and exploitation, despite the importance of these phenomena in the region. Abuse is often not reported and is shrouded in secrecy; hence the regional data is scarce .
The reports on child abuse, neglect and exploitation are increasing in Sri Lanka as well. According to the figures tabled in the Sri Lankan Parliament in April 2013, there are about 15,000 legal trials pending Nationwide and more than 4,000 (27%) involve some form of violence towards a child. Around 1500 cases per year are reported for issues related to children. The same report says there were 1,750 cases of child rape, 5,475 cases of child molestation and 1,194 cases of child abuse in 2012 . The number reported is much less than the actual incidence, because large number of children do not report abuse .
Another major reason for underreporting of child maltreatment in this region is due to the sociocultural impact it makes as most of the abuse victims are alienated and hence do not get reported. Primary caretakers from Asian countries were less likely to report the abuse to authorities when compared to the other caretakers . Most of the primary caretakers from Asia disbelieved child abuse, hence the children do not self-report to the carer. Discussing family problems to anyone outside the family can be considered shameful. The cultural pressures makes the victim to internalise the conflict and they are least supported. They express more suicidal impulses rather than anger and hostility .
Diagnosis and management of child abuse is a challenge and has to be done through a multidisciplinary approach involving medical and legal professionals. Therefore, precise knowledge on the subject of child maltreatment is essential among these professionals .
Events of child abuse and neglect are commonly not detected as first responders in child care fail to identify injuries, conditions, or behaviours. In the absence of timely intervention, an abused child has a 10% risk of having fatal injuries . A study done in Central Gujarat India suggests that medical and dental residents are not prepared in detecting and managing children with issues related to protection . A significant gap was seen in recognising and responding effectively. Mandated training on detecting and management of child abuse and neglect, to all medical and allied professionals will improve reporting of suspected child abuse .
It is known that victims and perpetrators of child abuse do not usually self-report to child protection services Medical officers, being the first responders in most cases, are in an ideal position to report abuse. Hence, it is very important for medical officers to be familiar on medico-legal aspects of child abuse.
Therefore, the objective of this study was to describe the knowledge, attitudes and experiences of medical officers, nursing officers and social workers regarding child abuse in the Jaffna District of Sri Lanka, and to assess the associations with socio demographic factors, experience in the field of Paediatrics and Judicial Medicine towards child abuse.
Study design
This was a descriptive analytical study which was carried out in hospitals and community in the Jaffna District of Sri Lanka from March to October 2016.
Setting
The Jaffna District, Sri Lanka is situated in the North of Sri Lanka and has one tertiary care centre and three general hospitals. The tertiary care centre has specialists care and receives referrals from all the general hospitals. All these hospitals together, cater for the entire population of 610,640.
and would see approximately 200,000 per annum at the outpatient and emergency departments. Children seen for child abuse at the peripheral units also get referred to the tertiary unit for specialised care.
Participants
Simple random sampling was done among the doctors, nurses in all 4 hospitals and social workers from the community. This included, all the medical and dental officers (includes Consultants, Senior Registrars, Registrars, Senior House Officers (SHO), Resident House Officer (RHO), Intern Medical Officers (IMO)) and Nursing Officers working in different hospitals. Social workers are personals who work mainly in the community and play a role in identifying child abuse and neglect in the field.
Sample size was calculated using the Daniel formula and the p was 13% according to Starling et al. . Level of confidence was 95% with z being 1.96 and the nonresponse rate was 20%. This gave a total sample size of 208, enough to obtain a 95% confidence interval that the results could be generalised to a wider population .
As there was definite sampling frame, a simple random sampling was done. Eligible sample of participants were informed and a written informed consent was obtained. Ethical approval was obtained from Faculty of Medicine, University of Jaffna, Sri Lanka. (J/ERC/16/72/NDR/0143).
Data collection and analysis
An anonymous pre-tested and standardised selfadministered questionnaire was used as the study instrument. The questionnaire included questions to assess the socio-demographic factors of medical, nursing and social workers. The 24-item questions were in the native language comprising of multiple choice or true false format based on the literature, to assess the knowledge, and a 10 item questions to assess the attitude. A field test was conducted with 10 experts in the field of child abuse to measure the content validity. Content Validity Ratio (CVR) was calculated using Lawshe's formula CVR = (N e -N/2)/(N/2), in which the N e is the number of panellists indicating "essential" and N is the total number of panellists . The CVR for the whole questionnaire was 0.80. The questionnaire was modified as per the expert suggestions, and the modified version was used as the study tool.
Three case vignettes were prepared from anonymised case records. The cases were physical abuse, sexual abuse and neglect based on delay in seeking medical help. These case histories were given in paper format and confidence in the story was assessed on a 5 point scale (1 being not confidant and 5 being very confident) and reporting and taking action on the individual cases was assessed by "yes"/ "no" responses. Cronbach alpha was used to assess the reliability of the scores.
To assess the knowledge, questions regarding types of child abuse, identifying features of abuse and characteristics of the perpetrator were considered. Questions were analysed by responses to each of the question separately and were expressed as percentage. Chi-square test for significance of difference among proportions was calculated.
To assess the attitude of the participants regarding child abuse and neglect, each respondent was asked 10-item questions.. Responses were recorded as "strongly agree" or "disagree", or "somewhat agree" or "disagree", or "don't know"/ "can't say". Depending on whether it was a proper attitude or not, scores from 1 to 4 were allotted. A score of '0' was given for "don't know"/ "can't say". Six items had negative statements and they were allocated the reverse scores. A total 40 was then divided into sub scores which were defined as 0-9 Very Poor, 10-18 considered as there are many issues which need changing, 19-27 as more positive attitudes and 28-40 as having a good overall attitude. Analysis of variance for significance of difference among means was calculated and Cronbach alpha was used to assess the reliability of the scores.
To assess the experiences of participants, five questions in the questionnaire were provided and the responses were expressed as percentage. Each respondent was asked if s/he undertook any particular action in the previous year that would help towards having better practices.
The data was described using frequencies and percentages. P value of < 0.05 was considered as statistically significant. Data was coded and entered in SPSS version 20.
Results
A Total of 273 were selected for the survey and number of responders was 246(90.1%). Among the responders, 156 (63.4%) were medical officers, 59 (24%) were nursing officers and others were social workers. Mean age of the subjects was 34.70 ± 7.924 yrs. Male female ratio was 1:1.29. Most of them (149-60.6%) were married, 108 (43.7%) had experience less than 5 years in their respective fields and 107 (43.5%) had children of their own. The characteristics of the responders along with their experiences in the profession are provided in Table 1. Among those in the medical profession 33(21.2%) were intern medical officers, who were the first contact in most of the cases when a patient is admitted, 19(12.2%) were consultants and 104(66.7%) belonged to the middle grade, whose experience varied from 1.5-15 years.
Knowledge of the responders
All groups of professionals were able to identify the forms of child abuse correctly and there was no significant difference between the groups, except on seeking timely medical advice. Frequency of identifying the types of child abuse is provided in Table 2.
The knowledge of the social indicators of child abuse was correctly identified by 152 (61.7%). The knowledge on the features of the perpetrators was satisfactory in 74%(n = 182). The knowledge of the perpetrator being known to the family was identified in 75%(n = 187), perpetrator being abused as a child in 62%(n = 153) and having a psychiatry background in 61%(n = 152). There was no significant difference between the groups in the identifying features of the perpetrators. (p value > 0.5).
Knowledge of the physical indicators was satisfactory in all groups of health workers. (Table 3).
The three case vignettes were mainly of child physical abuse, sexual abuse and seeking delayed medical help (Neglect). All categories of people identified the type of abuse correctly. Cronbach alpha was 0.81 in confidence in reporting, suggesting good internal consistency. The mean scores for confidence in reporting were 2.7, 3.1 and 1.4 for cases of physical, sexual abuse and neglect respectively. The differences in the mean scores also had a significant difference between the groups. (P value < 0.001) The decision to report to the authority was 90% (n = 222) in the case of physical abuse, 97% (n = 240) in sexual abuse and 65% (n = 160) in the case of neglect. The decision to report was significant between the groups in dealing with the case of neglect (p value < 0.001) but not in physical and sexual abuse. (p values > 0.5) Experience in the field of paediatrics and judicial work did not have a significant association in identification and reporting of the cases given in the case vignettes.
The source of knowledge was mainly through the university education system (54.1%) followed by reading the literature (52.8%) and following Continuing Medical Education (CME) programme on child abuse (41.1%).
Attitude of the responders
Mean attitude scores were 20.16 ± 3.3, 20.25 ± 4.04, 23. 84 ± 5.3 for doctors, nurses and social-workers respectively. (F = 12.55 p = 0.000) Even though the majority of the professionals showed a more positive attitude, there are many issues that need changing. (Table 4) Majority (76.4%) were confident in reporting child abuse and 24% said they would defer reporting until firm evidence was present. 60.5% were confident in giving evidence in a court of law and 45% were not familiar with the legal issues. Only 24.3% were satisfied with the local child protection services (Table 5).
Practices and behaviour
Majority of the professionals (62%) suspected child abuse in children and only 64% had reported child abuse to the authorities previously. All the cases suspected were not reported to the authorities and the main reasons provided being: Lack of adequate history and evidence (56,6.1%), uncertainty of the diagnosis (55, 22.3%), possible harmful effects on the child's family (31, 12.6%), lack of knowledge of the referral procedure (25, 10.1%), aggressive and angry parents (15, 6.1%), possible effect on my professional career (13, 5.28%) and fear and anxiety of the court proceedings (11, 4.47%). All the professionals indicated that education on child protection is essential but only 52(21%) had attended training workshops on child abuse. Different practices adopted by the professionals are provided in Table 6.
Analysis of the data showed knowledge regarding child abuse (p 0.001), knowledge of the characteristics of the perpetrator (p 0.04), attitude of more positivity towards Child abuse and neglect (p value 0.01), behaviour of detecting and reporting of child abuse and neglect (p 0.001) and the awareness of the law of child protection had a significant differences with the experience of the person and the speciality of paediatrics and judicial medical work. The inexperienced felt that the doctor was responsible for the stigma that occurred to these children (p 0.001).
The participants have indicated that the preferred methods of updating the knowledge on child abuse and neglect were to undertake continuing education and workshops on child abuse (70.3%) followed by information booklets (48%) and online self-study (28.5%).
Discussion
Our study aimed at identifying the knowledge and practices of professionals, first in contact with the children who have been abused and neglected. Our study indicated satisfactory overall knowledge and it correlated well with the experience and speciality of the responders. Awareness and Basic knowledge on child abuse and neglect are the important prerequisite for reporting suspected cases of child abuse. The ability to detect and diagnose when an abused child presents, is also vital to the care of the child. When compared to studies done in Gujarat and Karnataka in India, our study indicates overall knowledge is satisfactory .
The knowledge regarding indicators of abuse was unsatisfactory as only 68(27.6%) answered all seven responses correctly. The torn oral fraenum was identified as a form of abuse earlier but in current literature it has been disproved . In this study the torn fraenum was identified as a form of abuse by 76% of the responders. This study indicates that there may be a deficiency of updated knowledge about changes to these concepts. Thomas et al. have explained that this lack of The knowledge among the medical doctors and nurses was higher when compared to the social workers about various types of abuse and physical indicators. Even though it was statistically not significant, the community social workers should have adequate knowledge of child abuse for early detection, which would prevent detrimental effects .
The perpetrators of Child Sexual Abuse (CSA) are usually known to the family and may have been also abused as child. This fact should be understood clearly by all healthcare professionals to prevent future perpetrators from initiating abuse . In our study 23% (57) did not know the characteristic features of the perpetrators. Hence further training is needed in this aspect.
The responses to the case vignettes highlight the current knowledge and course of action. Case vignettes on physical and sexual abuse the responders were confident and the course of action of reporting was 90. 2% in physical abuse and 97.5% in sexual abuse. Van Haeringen et al. stated that only 69% of the health professionals reported the highest level of suspected physical abuse where as our study indicated the opposite. Case vignette of a delay in seeking medical help and neglect had the minimum score with minimum number (65%) reporting it to the authorities. The keenness to improve the knowledge has been shown by all the professionals by indicating their interest to improve their knowledge base and interest in attending a continuing medical programme and workshops on Child abuse. The interest to be trained is a good initiative to detect child abuse and possibilities towards future introduction of screening for child abuse and neglect at emergency and outpatient settings .
The attitude towards more positive and good were seen in 69% of the responders and this is similar (65.5%) to a study done in Karnataka by Kirankumar et al. . Attitudes towards reporting child abuse are another aspect that was studied. A suspected case of child abuse and neglect has to be reported to the authorities without delay but this can be traumatic to the parents, carers and the health care professionals. Since the stigma involved in abuse is profound in this part of the world where reporting can be a serious issue. Our study also indicated that the responders had suspected but not reported due to various reasons. This may have been due to lack of adequate history and evidence, uncertainty of the diagnosis, possible harmful effects on the child's family, lack of knowledge of the referral procedure, aggressive and angry behaviour of parents, possible effect on the profession and fear and anxiety toward possible court proceedings. Similar fears were also noted in the study done by Deshapande et al. in Gujarat .
A study done by Jones et al. on child abuse reporting experience, found that even at the highest levels of suspicion, only 73% of injuries were reported to child protective services and many factors hindered the reporting. This includes the clinician's level of closeness with the family, some issues in the history and the expectations of the child protection services. In our study the health professionals indicated they were either not satisfied with the services or were not aware of the availability of the services. Training and continuing medical education along with a support system to overcome the fears can alleviate this problem. In addition, strengthening the child protection services and making health providers aware of the existence of the services, can improve child abuse being detected and reported early.
Behaviour and practices by the health care professionals towards child abuse also plays a major role in identifying cases of abuse. The need to improve the knowledge is clearly stated in our study which indicates the professionals are committed to learn and improve the services rendered. Our study found several gaps especially in the reporting system and the awareness of the existing law. All felt the importance of the issue as 99. 5% indicated that education regarding child abuse was important. Even though they felt the importance, only 21.1% had attended a workshop or CME on child abuse and neglect. This was also identified in the study by Reiniger et al . Modern methods of CME like on-line courses may be more appropriate in this fast moving modern world. Self-awareness of the level of knowledge on child abuse and neglect is important for further improvements in knowledge. Our study states that 65.8% of the responders were not satisfied with their knowledge and 93% of them indicated they wanted some form of CME on abuse. It is reported that physicians in rural regions of Austria possessed basic knowledge on child abuse but were not aware of the referral system . To improve diagnosis, reporting, strengthening the interaction with experts and to reduce fears in handling child abuse victim, better training is needed.
In regards to the practices of child abuse, some responders had misbeliefs, mainly in the aspect of removing the child from home (63%) after the incident irrespective of the situation that it took place. They also indicated to remove the child from the school as the family may face social isolation (35.7%). This practice can be detrimental to the child. Significant and lifelong adverse effects on the child's mental health and development are seen in all forms of abuse. Support is needed not only medically, but also in psychosocial aspect, for the speedy recovery of the child. A child's experience of maltreatment may cause great stress and disruption in the family and making them feel guilty about what has already occurred in the home. There is a chance that other members in the family too may have been affected. Health professionals also feel the stigma in the family and society and thereby inappropriate decisions like moving the child away from home and school has been suggested as a way of management.
Experience in the field of paediatrics and Judicial medical work indicated that the knowledge, attitude and behaviour towards child abuse and neglect were good when compared to the professionals in other specialities. As the experienced person is not the first contact person, it is mandatory to train the first responder on child abuse .
This study has few strengths and limitations. The random sampling technique has minimised the selection bias. The questionnaire was self-administered hence the issues that arise from face to face were overcome. The questionnaire was tested for content validity, but we were not able to perform the Pearson's product moment correlation coefficient as the field experts were contacted only once, and the time tested second administration was not performed due to practical difficulties and unavailability of the experts. Even though this is a descriptive analytical study, a qualitative study or a mixed method study could have assessed the attitude and practices of child abuse better. This study can be generalised to the South Asian region as it involves a large number of responders both from the clinical and community level who belong to the same socio-cultural background.
Conclusion
The results reveal that the knowledge, attitude and behaviour of the different health care professionals are satisfactory with few deficiencies, mainly in the areas of identifying the perpetrator and the decision they will take in the case of neglect. All the groups felt their knowledge was satisfactory and wanted to further their knowledge through various continuing medical programmes. The experience and professionals involved in child care and judicial work had a statistically significant good knowledge, attitude and behaviour regarding child abuse.
There were barriers in reporting despite a legal requirement; hence support of the child protection services and the effectiveness of these services need to be evaluated. The gap between detecting and reporting can be overcome by improving the knowledge base.
Understanding and clinical competencies in detecting child abuse are crucial knowledge and skills that are required to evaluate the effectiveness of curricula and the programmes involved in CME, in preparing future healthcare professionals to identify, manage and prevent child abuse. A regular check on the outcome of the education has to be assessed and improvements must be made according to latest evidences. Professional education programmes must sensitise all health care professionals of the occurrences and instruct them on how and when to report a suspected case of child abuse and neglect. |
async def halldorsson_science_de_novos(result):
logging.info('getting Halldorsson et al Science 2019 de novos')
with tempfile.NamedTemporaryFile() as temp:
download_file(url, temp.name)
df = pandas.read_table(temp.name, comment='#')
df['person_id'] = df['Proband_id'].astype(str)
df['person_id'] += '|halldorsson'
df['chrom'] = df['Chr'].astype(str)
df['pos'] = df['Pos']
df['ref'] = df['Ref']
df['alt'] = df['Alt']
df['study'] = '10.1126/science.aau1043'
df['confidence'] = 'high'
df['build'] = 'grch38'
variants = set()
for row in df.itertuples():
var = DeNovo(row.person_id, row.chrom, row.pos, row.ref, row.alt,
row.study, row.confidence, row.build)
variants.add(var)
result.append(variants) |
use async_graphql::ID;
use bson::{self, oid::ObjectId};
use serde_derive::{Deserialize, Serialize};
// use syn::Fields;
// use uuid::Uuid;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Role {
pub id: ID,
pub name: String,
pub description: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RoleModel {
pub _id: ObjectId,
pub name: String,
pub description: String,
}
impl Role {
pub fn new() -> Self {
Self {
id: ID::from(""),
name: String::from(""),
description: String::from(""),
}
}
}
impl RoleModel {
pub fn new() -> Self {
let converted_id = bson::oid::ObjectId::new();
Self {
_id: converted_id,
name: String::from(""),
description: String::from(""),
}
}
pub fn to_norm(&self) -> Role {
Role {
id: ID::from(self._id.to_string()),
name: self.name.to_owned(),
description: self.description.to_owned(),
}
}
}
#[async_graphql::Object]
impl Role {
async fn id(&self) -> &str {
&self.id
}
async fn name(&self) -> &str {
&self.name
}
async fn description(&self) -> &str {
&self.description
}
}
|
/**
* Compute the Point Volatility Modulus
*
* @param iXDate The X Date
* @param iYDate The Y Date
*
* @return The Point Volatility Modulus
*
* @throws java.lang.Exception Thrown if the Point Volatility Modulus cannot be computed
*/
public double pointVolatilityModulus (
final int iXDate,
final int iYDate)
throws java.lang.Exception
{
int iNumFactor = _pfsg.numFactor();
double dblPointVolatilityModulus = 0.;
for (int i = 0; i < iNumFactor; ++i) {
double dblWeightedFactorPointVolatility = weightedFactorPointVolatility (i, iXDate, iYDate);
dblPointVolatilityModulus += dblWeightedFactorPointVolatility * dblWeightedFactorPointVolatility;
}
return dblPointVolatilityModulus;
} |
<filename>source/dred/dtk/dtk.h
// Copyright (C) 2018 <NAME>. See included LICENSE file.
#ifndef DTK_H
#define DTK_H
#if defined(_MSC_VER)
#pragma warning(push)
#pragma warning(disable:4201) // nonstandard extension used: nameless struct/union
#endif
// Platform/backend detection.
#ifdef _WIN32
#define DTK_WIN32
#if (!defined(WINAPI_FAMILY) || WINAPI_FAMILY == WINAPI_FAMILY_DESKTOP_APP)
#define DTK_WIN32_DESKTOP
#endif
#else
#define DTK_POSIX
#ifdef __linux__
#define DTK_LINUX
#endif
#include <pthread.h>
#include <semaphore.h>
#endif
#ifdef DTK_LINUX
#define DTK_GTK
//#define DTK_X11
#endif
typedef enum
{
dtk_platform_win32,
dtk_platform_gtk,
//dtk_platform_x11 // Not yet.
} dtk_platform;
// Sized types.
#if defined(_MSC_VER) && _MSC_VER < 1600
typedef signed char dtk_int8;
typedef unsigned char dtk_uint8;
typedef signed short dtk_int16;
typedef unsigned short dtk_uint16;
typedef signed int dtk_int32;
typedef unsigned int dtk_uint32;
typedef signed __int64 dtk_int64;
typedef unsigned __int64 dtk_uint64;
#else
#include <stdint.h>
typedef int8_t dtk_int8;
typedef uint8_t dtk_uint8;
typedef int16_t dtk_int16;
typedef uint16_t dtk_uint16;
typedef int32_t dtk_int32;
typedef uint32_t dtk_uint32;
typedef int64_t dtk_int64;
typedef uint64_t dtk_uint64;
#endif
typedef dtk_uint8 dtk_bool8;
typedef dtk_uint32 dtk_bool32;
#define DTK_TRUE 1
#define DTK_FALSE 0
typedef void* dtk_handle;
typedef void* dtk_ptr;
typedef void (* dtk_proc)();
// Result codes.
typedef int dtk_result;
#define DTK_SUCCESS 0
#define DTK_NO_EVENT 1 // Not an error. Returned by dtk_next_event() to indicate there are no events currently in the queue.
#define DTK_ERROR -1
#define DTK_INVALID_ARGS -2
#define DTK_INVALID_OPERATION -3
#define DTK_OUT_OF_MEMORY -4
#define DTK_NO_BACKEND -5
#define DTK_FAILED_TO_INIT_BACKEND -6
#define DTK_OUT_OF_RANGE -7
#define DTK_CANCELLED -8
#define DTK_FAILED_TO_OPEN_DIALOG -9
#define DTK_FAILED_TO_OPEN_FILE -10
#define DTK_FAILED_TO_READ_FILE -11
#define DTK_FAILED_TO_WRITE_FILE -12
#define DTK_FILE_TOO_BIG -13
#define DTK_PATH_TOO_LONG -14
#define DTK_NAME_TOO_LONG -15
#define DTK_DOES_NOT_EXIST -16
#define DTK_ALREADY_EXISTS -17
#define DTK_ACCESS_DENIED -18
#define DTK_TOO_MANY_OPEN_FILES -19
#define DTK_END_OF_FILE -20
#define DTK_NO_SPACE -21
#define DTK_NEGATIVE_SEEK -22
#define DTK_TIMEOUT -23
#define DTK_NO_DISPLAY -256
#define DTK_NO_SCREEN -257
#define DTK_QUIT -1024 // Returned by dtk_next_event() when a quit message is received.
// Standard library stuff.
#include <string.h>
#include <stdlib.h>
#include <stdio.h> // For FILE
#include <stdarg.h>
#include <errno.h>
#include <time.h>
#ifndef dtk_assert
#define dtk_assert(condition) assert(condition)
#endif
#ifndef dtk_malloc
#define dtk_malloc(sz) malloc(sz);
#endif
#ifndef dtk_calloc
#define dtk_calloc(c, sz) calloc((c), (sz))
#endif
#ifndef dtk_realloc
#define dtk_realloc(p, sz) realloc((p), (sz))
#endif
#ifndef dtk_free
#define dtk_free(p) free(p)
#endif
#ifndef dtk_copy_memory
#define dtk_copy_memory(dst, src, sz) memcpy((dst), (src), (sz))
#endif
#ifndef dtk_zero_memory
#define dtk_zero_memory(p, sz) memset((p), 0, (sz))
#endif
#define dtk_zero_object(p) dtk_zero_memory((p), sizeof(*(p)))
#define dtk_count_of(obj) (sizeof(obj) / sizeof(obj[0]))
#define dtk_offset_ptr(p, offset) (((dtk_uint8*)(p)) + (offset))
#define dtk_min(x, y) (((x) < (y)) ? (x) : (y))
#define dtk_max(x, y) (((x) > (y)) ? (x) : (y))
#define dtk_clamp(x, lo, hi) (((x) < (lo)) ? (lo) : (((x) > (hi)) ? (hi) : (x)))
#define dtk_round_up(x, multiple) ((((x) + ((multiple) - 1)) / (multiple)) * (multiple))
#define dtk_round_up_signed(x, multiple) ((((x) + (((x) >= 0)*((multiple) - 1))) / (multiple)) * (multiple))
#if defined(_MSC_VER)
#define DTK_INLINE static __inline
#else
#define DTK_INLINE static inline
#endif
typedef struct dtk_context dtk_context;
typedef struct dtk_event dtk_event;
typedef struct dtk_control dtk_control;
typedef struct dtk_label dtk_label;
typedef struct dtk_button dtk_button;
typedef struct dtk_checkbox dtk_checkbox;
typedef struct dtk_color_button dtk_color_button;
typedef struct dtk_tabbar dtk_tabbar;
typedef struct dtk_tabgroup dtk_tabgroup;
typedef struct dtk_scrollbar dtk_scrollbar;
typedef struct dtk_textbox dtk_textbox;
typedef struct dtk_window dtk_window;
typedef struct dtk_menu dtk_menu;
typedef struct dtk_timer dtk_timer;
typedef enum
{
dtk_system_cursor_type_none,
dtk_system_cursor_type_default,
dtk_system_cursor_type_arrow = dtk_system_cursor_type_default,
dtk_system_cursor_type_hand,
dtk_system_cursor_type_text,
dtk_system_cursor_type_cross,
dtk_system_cursor_type_double_arrow_h,
dtk_system_cursor_type_double_arrow_v,
} dtk_system_cursor_type;
typedef enum
{
dtk_application_font_type_ui,
dtk_application_font_type_monospace,
dtk_application_font_type_default = dtk_application_font_type_ui
} dtk_application_font_type;
// The callback function for handing log messages.
typedef void (* dtk_log_proc)(dtk_context* pTK, const char* message);
// The callback function to call when an event is received and needs handling. The return value controls whether or
// not the event should be propagated. Returning true will propagate the event, false will cancel the event. You will
// almost always want to return true.
typedef dtk_bool32 (* dtk_event_proc)(dtk_event* pEvent);
#include "nanosvg.h"
#include "nanosvgrast.h"
#include "stb_image_resize.h"
#include "dtk_rect.h"
#include "dtk_string.h"
#include "dtk_path.h"
#include "dtk_time.h"
#include "dtk_io.h"
#include "dtk_threading.h"
#include "dtk_ipc.h"
#include "dtk_math.h"
#include "dtk_monitor.h"
#include "dtk_svg.h"
#include "dtk_graphics.h"
#include "dtk_image.h"
#include "dtk_stock_images.h"
#include "dtk_input.h"
#include "dtk_accelerators.h"
#include "dtk_control.h"
#include "dtk_label.h"
#include "dtk_button.h"
#include "dtk_checkbox.h"
#include "dtk_color_button.h"
#include "dtk_tabbar.h"
#include "dtk_tabgroup.h"
#include "dtk_scrollbar.h"
#include "dtk_textbox.h"
#include "dtk_window.h"
#include "dtk_menu.h"
#include "dtk_dialogs.h"
#include "dtk_timer.h"
#include "dtk_clipboard.h"
#include "dtk_paint_queue.h"
#include "dtk_command_line.h"
// Event types.
typedef int dtk_event_type;
#define DTK_EVENT_NONE 0
#define DTK_EVENT_QUIT 1
#define DTK_EVENT_MENU 2
#define DTK_EVENT_ACCELERATOR 3
#define DTK_EVENT_CLOSE 4
#define DTK_EVENT_PAINT 5
#define DTK_EVENT_SIZE 6
#define DTK_EVENT_MOVE 7
#define DTK_EVENT_SHOW 8
#define DTK_EVENT_HIDE 9
#define DTK_EVENT_MOUSE_LEAVE 10
#define DTK_EVENT_MOUSE_ENTER 11
#define DTK_EVENT_MOUSE_MOVE 12
#define DTK_EVENT_MOUSE_BUTTON_DOWN 13
#define DTK_EVENT_MOUSE_BUTTON_UP 14
#define DTK_EVENT_MOUSE_BUTTON_DBLCLICK 15
#define DTK_EVENT_MOUSE_WHEEL 16
#define DTK_EVENT_KEY_DOWN 17
#define DTK_EVENT_KEY_UP 18
#define DTK_EVENT_PRINTABLE_KEY_DOWN 19
#define DTK_EVENT_CAPTURE_KEYBOARD 20
#define DTK_EVENT_RELEASE_KEYBOARD 21
#define DTK_EVENT_CAPTURE_MOUSE 22
#define DTK_EVENT_RELEASE_MOUSE 23
#define DTK_EVENT_DPI_CHANGED 24
#define DTK_EVENT_TOOLTIP 25
#define DTK_EVENT_BUTTON_PRESSED 128
#define DTK_EVENT_CHECKBOX_CHECK_CHANGED 129
#define DTK_EVENT_COLOR_BUTTON_COLOR_CHANGED 130
#define DTK_EVENT_SCROLLBAR_SCROLL 132
#define DTK_EVENT_TABBAR_MOUSE_BUTTON_DOWN_TAB 133
#define DTK_EVENT_TABBAR_MOUSE_BUTTON_UP_TAB 134
#define DTK_EVENT_TABBAR_MOUSE_BUTTON_DBLCLICK_TAB 135
#define DTK_EVENT_TABBAR_CHANGE_TAB 136
#define DTK_EVENT_TABBAR_CLOSE_TAB 137
#define DTK_EVENT_TABBAR_PIN_TAB 138
#define DTK_EVENT_TABBAR_UNPIN_TAB 139
#define DTK_EVENT_TABBAR_REMOVE_TAB 140
#define DTK_EVENT_TABGROUP_MOUSE_BUTTON_DOWN_TAB 141
#define DTK_EVENT_TABGROUP_MOUSE_BUTTON_UP_TAB 142
#define DTK_EVENT_TABGROUP_MOUSE_BUTTON_DBLCLICK_TAB 143
#define DTK_EVENT_TABGROUP_CHANGE_TAB 144
#define DTK_EVENT_TABGROUP_CLOSE_TAB 145
#define DTK_EVENT_TABGROUP_PIN_TAB 146
#define DTK_EVENT_TABGROUP_UNPIN_TAB 147
#define DTK_EVENT_REFRESH_LAYOUT 251 // A special event that's used to indicate to a control that it needs to refresh the layout of it's children and inner elements.
#define DTK_EVENT_APPLICATION_FONT 252 // A special event for retrieving an application-defined font for certain categories.
#define DTK_EVENT_APPLICATION_SCALE 253 // A special event for retrieving an application-defined scaling factor for GUI elements.
#define DTK_EVENT_CHANGE_KEYBOARD_CAPTURE 254 // A special event used internally for handling keyboard capture changes.
#define DTK_EVENT_CHANGE_MOUSE_CAPTURE 255 // ^^^
#define DTK_EVENT_CUSTOM 256
struct dtk_event
{
dtk_event_type type;
dtk_context* pTK;
dtk_control* pControl;
union
{
struct
{
int result;
} quit;
struct
{
int unused;
} close;
struct
{
dtk_menu* pMenu;
dtk_uint32 itemIndex;
dtk_uint32 itemID;
} menu;
struct
{
dtk_key key;
dtk_uint32 modifiers;
dtk_uint32 id;
} accelerator;
struct
{
dtk_rect rect;
dtk_surface* pSurface;
} paint;
struct
{
dtk_int32 width;
dtk_int32 height;
} size;
struct
{
dtk_int32 x;
dtk_int32 y;
} move;
struct
{
int unused;
} show;
struct
{
int unused;
} hide;
struct
{
int unused;
} mouseLeave;
struct
{
int unused;
} mouseEnter;
struct
{
dtk_int32 x;
dtk_int32 y;
dtk_uint32 state;
} mouseMove;
struct
{
dtk_int32 x;
dtk_int32 y;
dtk_mouse_button button;
dtk_uint32 state;
} mouseButton;
struct
{
dtk_int32 x;
dtk_int32 y;
dtk_int32 delta;
dtk_uint32 state;
} mouseWheel;
struct
{
dtk_key key;
dtk_uint32 state;
} keyDown;
struct
{
dtk_key key;
dtk_uint32 state;
} keyUp;
struct
{
dtk_uint32 utf32;
dtk_uint32 state;
} printableKeyDown;
struct
{
dtk_control* pOldCapturedControl;
} captureKeyboard, captureMouse;
struct
{
dtk_control* pNewCapturedControl;
} releaseKeyboard, releaseMouse;
struct
{
float newDPIScale;
dtk_int32 suggestedPosX; // <-- Absolute position.
dtk_int32 suggestedPosY;
dtk_int32 suggestedWidth;
dtk_int32 suggestedHeight;
} dpiChanged;
struct
{
dtk_int32 x;
dtk_int32 y;
dtk_int32 absoluteX;
dtk_int32 absoluteY;
dtk_tooltip tooltip;
} tooltip;
struct
{
int unused;
} refreshInnerLayout;
struct
{
dtk_application_font_type type;
dtk_font* pFont; // <-- Set by the event handler.
} applicationFont;
struct
{
float scale; // <-- Set by the event handler.
} applicationScale;
struct
{
int unused;
} button;
struct
{
dtk_bool32 checked;
} checkbox;
struct
{
dtk_color color;
} colorButton;
struct
{
dtk_uint32 scrollPos;
} scrollbar;
struct
{
dtk_int32 newTabIndex;
dtk_int32 oldTabIndex;
dtk_int32 tabIndex;
struct
{
dtk_int32 x;
dtk_int32 y;
dtk_mouse_button button;
dtk_uint32 state;
} mouseButton;
} tabbar;
struct
{
dtk_int32 newTabIndex;
dtk_int32 oldTabIndex;
dtk_int32 tabIndex;
struct
{
dtk_int32 x;
dtk_int32 y;
dtk_mouse_button button;
dtk_uint32 state;
} mouseButton;
} tabgroup;
struct
{
dtk_uint32 id;
const void* pData; // <-- Marked as const to discourage modification since it's only a _copy_ of the original input data.
size_t dataSize;
} custom;
};
};
#ifdef DTK_GTK
typedef struct
{
dtk_accelerator accelerator;
/*GClosure**/ dtk_ptr pClosure;
} dtk_accelerator_gtk;
#endif
// The main toolkit context.
struct dtk_context
{
dtk_platform platform;
dtk_event_proc onEvent;
dtk_log_proc onLog;
int exitCode;
dtk_window* pFirstWindow;
dtk_window* pWindowWithKeyboardCapture; // This is set in the window's DTK_CAPTURE_KEYBOARD / DTK_RELEASE_KEYBOARD event handlers in dtk_window_default_event_handler()
dtk_window* pWindowWithMouseCapture; // ^^^
dtk_control* pControlWantingKeyboardCapture;
dtk_control* pControlWithKeyboardCapture;
dtk_control* pControlWithMouseCapture;
dtk_window* pWindowUnderMouse;
dtk_control* pControlUnderMouse; // Used for mouse enter/leave state management.
dtk_int32 lastMousePosX;
dtk_int32 lastMousePosY;
void* pUserData;
dtk_paint_queue paintQueue;
dtk_font uiFont;
dtk_font monospaceFont;
dtk_image stockImages[DTK_STOCK_IMAGE_COUNT];
dtk_bool32 isUIFontInitialized : 1;
dtk_bool32 isMonospaceFontInitialized : 1;
union
{
#ifdef DTK_WIN32
struct
{
/*HMODULE*/ dtk_handle hComctl32DLL;
dtk_proc InitCommonControlsEx;
/*HMODULE*/ dtk_handle hOle32DLL;
dtk_proc OleInitialize;
dtk_proc OleUninitialize;
/*HMODULE*/ dtk_handle hMsimg32DLL;
dtk_proc AlphaBlend;
/*HCURSOR*/ dtk_handle hCursorArrow;
/*HCURSOR*/ dtk_handle hCursorHand;
/*HCURSOR*/ dtk_handle hCursorIBeam;
/*HCURSOR*/ dtk_handle hCursorCross;
/*HCURSOR*/ dtk_handle hCursorSizeWE;
/*HCURSOR*/ dtk_handle hCursorSizeNS;
/*HACCEL*/ dtk_handle hAccel; // The global accelerator table.
dtk_accelerator* pAccelerators;
dtk_uint32 acceleratorCount;
dtk_uint32 acceleratorCapacity;
/*HWND*/ dtk_handle hMessagingWindow; // A special hidden window which is only used for pumping messages, usually custom ones.
/*HDC*/ dtk_handle hGraphicsDC; // A special device context for use by the graphics sub-system (usually for font management).
/*HWND*/ dtk_handle hTooltipWindow; // The window used for tooltips. The same tooltip window is shared across the entire application.
/*HWND*/ dtk_handle hLastTooltipOwner; // The window that last owned the tooltip.
dtk_string tooltipText;
void* pCharConvBuffer; // For wchar_t <-> char conversions.
size_t charConvBufferSize;
dtk_int32* pGlyphCache; // The cache of glyph character positions. Used by the graphics sub-system.
size_t glyphCacheSize;
} win32;
#endif
#ifdef DTK_GTK
struct
{
/*GdkCursor**/ dtk_ptr pCursorDefault;
/*GdkCursor**/ dtk_ptr pCursorIBeam;
/*GdkCursor**/ dtk_ptr pCursorCross;
/*GdkCursor**/ dtk_ptr pCursorDoubleArrowH;
/*GdkCursor**/ dtk_ptr pCursorDoubleArrowV;
/*GtkAccelGroup**/ dtk_ptr pAccelGroup;
dtk_accelerator_gtk* pAccelerators;
dtk_uint32 acceleratorCount;
dtk_uint32 acceleratorCapacity;
} gtk;
struct
{
int unused;
} cairo;
#endif
};
};
#if defined(_MSC_VER)
#pragma warning(pop)
#endif
// Initializes the toolkit.
//
// This can be called multiple times, but each call must be paired with a matching call to dtk_uninit().
//
// You can set onEvent to NULL, but you'll want to call dtk_set_event_callback() later on to hook up an
// event handler.
//
// Once a context has been initialized it's location in memory cannot be changed. The reason for this is
// that controls and whatnot need to reference the context that owns it, which is done via a pointer.
// Consider using malloc() to allocate memory for the context if this becomes an issue for you.
//
// Thread Safety: UNSAFE
dtk_result dtk_init(dtk_context* pTK, dtk_event_proc onEvent, void* pUserData);
// Uninitializes the toolkit.
//
// Thread Safety: UNSAFE
dtk_result dtk_uninit(dtk_context* pTK);
// Sets the global event handler callback.
//
// Thread Safety: SAFE
// This is implemented as an atomic assignment.
dtk_result dtk_set_event_callback(dtk_context* pTK, dtk_event_proc proc);
// Waits for an event and processes it.
//
// Set <blocking> to DTK_TRUE to block until a message is received. Set to DTK_FALSE to return immediately
// if no event is in the queue, in which case DTK_NO_EVENT will be returned.
//
// <pExitCode> will store the exit code when the loop has been terminated, and can be null.
//
// Thread Safety: UNSAFE
// Do not call this from multiple threads. Have a single thread that does all event handling.
dtk_result dtk_next_event(dtk_context* pTK, dtk_bool32 blocking, int* pExitCode);
// Flushes the event queue by handling any events currently sitting in the event queue.
//
// This will not remove any quit messages from the queue. The rationale for this is to ensure the application's
// main loop is given the opportunity to handle it and terminate the application.
//
// An event queue flush is performed whenever a control is uninitialized to ensure there are no lingering events
// that refer to an uninitialized control.
//
// This is non-blocking, but should be considered inefficient.
void dtk_flush_event_queue(dtk_context* pTK);
// Posts an event to the queue which will later be handled by the event handler of the respective control.
//
// To handle the event immediately, use dtk_handle_control_event().
dtk_result dtk_post_local_event(dtk_event* pEvent);
// Same as dtk_post_local_event(), except handles it immediately instead of posting it to the queue. The return
// value is the value returned by the event handler.
dtk_bool32 dtk_handle_local_event(dtk_event* pEvent);
// Posts a custom event.
//
// This will post an event of type DTK_EVENT_CUSTOM. This will make a copy of the data.
//
// Thread Safety: SAFE
dtk_result dtk_post_custom_event(dtk_context* pTK, dtk_control* pControl, dtk_uint32 eventID, const void* pData, size_t dataSize);
// Same as dtk_post_custom_event(), except handles it immediately rather than posting it to the queue.
//
// This will not make a copy of the data.
dtk_result dtk_handle_custom_event(dtk_context* pTK, dtk_control* pControl, dtk_uint32 eventID, const void* pData, size_t dataSize);
// Posts a paint notification to the event queue to let it know there is a pending paint request for a window.
dtk_result dtk_post_paint_notification_event(dtk_context* pTK, dtk_window* pWindow);
// Handles a paint notification event.
dtk_result dtk_handle_paint_notification_event(dtk_context* pTK, dtk_window* pWindow);
// The default event handler.
//
// Applications should call this from their own global event handler.
dtk_bool32 dtk_default_event_handler(dtk_event* pEvent);
// Posts a quit event to the event queue. This will cause the main loop to terminate and dtk_next_event() to
// return DTK_QUIT.
//
// Thread Safety: SAFE
dtk_result dtk_post_quit_event(dtk_context* pTK, int exitCode);
// Posts DTK_EVENT_TOOLTIP events.
dtk_result dtk_do_tooltip(dtk_context* pTK);
// Sets the logging callback.
dtk_result dtk_set_log_callback(dtk_context* pTK, dtk_log_proc proc);
//// Accelerators ////
// Binds an accelerator.
//
// If you need to bind multiple accelerators, consider binding them in bulk with dtk_bind_accelerators(). Binding
// acceleratos individually on Windows is particularly inefficient.
dtk_result dtk_bind_accelerator(dtk_context* pTK, dtk_accelerator accelerator);
// Binds a group of accelerators.
dtk_result dtk_bind_accelerators(dtk_context* pTK, dtk_accelerator* pAccelerators, dtk_uint32 count);
// Unbinds an accelerator.
dtk_result dtk_unbind_accelerator(dtk_context* pTK, dtk_accelerator accelerator);
//// Screens and Monitors ////
// Retrieves the dimensions of the main screen in pixels. This is _not_ scaled.
dtk_result dtk_get_screen_size(dtk_context* pTK, dtk_uint32* pSizeX, dtk_uint32* pSizeY);
//// DPI Scaling ////
// Retrieves the base DPI scaling factor.
float dtk_get_system_dpi_scale(dtk_context* pTK);
// Retrieves an application-defined scaling factor for GUI elements.
float dtk_get_application_scaling_factor(dtk_context* pTK);
//// Input ////
// Sets the control that should receive keyboard capture. This will fail if the control is not allowed to receive capture.
//
// Setting pControl to NULL is equivalent to dtk_release_keyboard().
dtk_result dtk_capture_keyboard(dtk_context* pTK, dtk_control* pControl);
// Releases the keyboard capture from whatever control currently has the capture.
dtk_result dtk_release_keyboard(dtk_context* pTK);
// Retrieves a pointer to the control with the keyboard capture.
dtk_control* dtk_get_control_with_keyboard_capture(dtk_context* pTK);
// Sets the control that should receive mouse capture.
//
// Setting pControl to NULL is equivalent to dtk_release_mouse().
dtk_result dtk_capture_mouse(dtk_context* pTK, dtk_control* pControl);
// Releases the mouse capture from whatever control currently has the capture.
dtk_result dtk_release_mouse(dtk_context* pTK);
// Retrieves a pointer to the control with the mouse capture.
dtk_control* dtk_get_control_with_mouse_capture(dtk_context* pTK);
//// Graphics ////
// Retrieves the default font for UI elements.
dtk_font* dtk_get_ui_font(dtk_context* pTK);
// Retrieves the default monospace font.
//
// This will first try retrieving the font from the application. If the application does not define a monospace font, it
// will be based on the operating system.
dtk_font* dtk_get_monospace_font(dtk_context* pTK);
// Retrieves a stock image by it's ID.
dtk_image* dtk_get_stock_image(dtk_context* pTK, dtk_uint32 stockImageID);
//// Known Folders ////
// Retrieves the full path of the executable.
//
// The return value is the length of the returned string, including the null terminator. Returns 0 on error.
size_t dtk_get_executable_path(char* pathOut, size_t pathOutSize);
// Retrieves the directory of the executable.
//
// The return value is the length of the returned string, including the null terminator. Returns 0 on error.
//
// The returned string will not include the name of the executable.
size_t dtk_get_executable_directory_path(char* pathOut, size_t pathOutSize);
// Retrieves the path of the user's config directory.
//
// On Windows this will typically be %APPDATA% and on Linux it will usually be ~/.config
size_t dtk_get_config_directory_path(char* pathOut, size_t pathOutSize);
//// User Accounts and Process Management ////
// Retrieves the user name of the user running the application.
size_t dtk_get_username(char* usernameOut, size_t usernameOutSize);
// Retrieves the ID of the current process.
unsigned int dtk_get_process_id();
#endif // DTK_H
|
def _maybe_expand_macro(self,
component,
component_list_type,
template_data=None):
component_copy = copy.deepcopy(component)
if isinstance(component, dict):
component_name, component_data = next(iter(component_copy.items()))
else:
component_name, component_data = component_copy, None
if template_data:
component_name = deep_format(component_name, template_data, True)
if not self._is_macro(component_name, component_list_type):
return None
if ModuleRegistry.is_module_name(component_name, component_list_type):
self._mask_warned[component_name] = True
logger.warning(
"You have a macro ('%s') defined for '%s' "
"component list type that is masking an inbuilt "
"definition" % (component_name, component_list_type))
macro_component_list = self._get_macro_components(component_name,
component_list_type)
if component_data:
if template_data:
template_data = copy.deepcopy(template_data)
template_data.update(component_data)
macro_component_list = deep_format(
macro_component_list, template_data, False)
else:
macro_component_list = deep_format(
macro_component_list, component_data, False)
return macro_component_list |
<reponame>GabrielModog/pokemanos
import { Reducer } from 'redux';
import { PokemonsState, PokemonsType } from './types';
const INITIAL_STATE: PokemonsState = {
data: [],
error: false,
loading: false,
layout: 'GRID',
};
const reducer: Reducer<PokemonsState | any> = (
state = INITIAL_STATE,
action
) => {
switch (action.type) {
case PokemonsType.CHANGE_LAYOUT:
return {
...state,
layout: action.payload.layout,
};
case PokemonsType.LOAD_REQUEST:
return { ...state, loading: true };
case PokemonsType.LOAD_SUCCESS:
return {
...state,
loading: false,
error: false,
data: action.payload.data,
};
case PokemonsType.LOAD_FAILURE:
return {
...state,
loading: false,
error: true,
data: [],
};
case PokemonsType.SEARCH_REQUEST:
return { ...state, loading: true };
case PokemonsType.SEARCH_SUCCESS:
return {
...state,
loading: false,
error: false,
data: [action.payload.data],
};
case PokemonsType.SEARCH_FAILURE:
return {
...state,
loading: false,
error: true,
data: [],
};
case PokemonsType.SEARCH_TYPE_REQUEST:
return { ...state, loading: true };
case PokemonsType.SEARCH_TYPE_SUCCESS:
return {
...state,
loading: false,
error: false,
data: action.payload.data,
};
case PokemonsType.SEARCH_TYPE_FAILURE:
return {
loading: false,
error: false,
data: [],
};
default:
return state;
}
};
export default reducer;
|
import * as React from "react";
import { Gallery } from "./Gallery";
import { GalleryPageMainSection } from "./GalleryPageMainSection";
export class GalleryPage extends React.Component {
render() {
const urls = [
"https://images.unsplash.com/photo-1513836279014-a89f7a76ae86?w=1920&q=80",
"https://images.unsplash.com/photo-1502082553048-f009c37129b9?w=1920&q=80",
"https://images.unsplash.com/photo-1540206395-68808572332f?w=1920&q=80",
"https://images.unsplash.com/photo-1504700610630-ac6aba3536d3?w=1920&q=80",
"https://images.unsplash.com/photo-1433086966358-54859d0ed716?w=1920&q=80",
"https://images.unsplash.com/photo-1540202403-b7abd6747a18?w=1920&q=80",
"https://images.unsplash.com/photo-1536431311719-398b6704d4cc?w=1920&q=80",
"https://images.unsplash.com/photo-1453323403791-7fbd02be3e40?w=1920&q=80",
"https://images.unsplash.com/photo-1504567961542-e24d9439a724?w=1920&q=80",
"https://images.unsplash.com/photo-1523712999610-f77fbcfc3843?w=1920&q=80",
"https://images.unsplash.com/photo-1534406315430-4d7cf92bc690?w=1920&q=80",
"https://images.unsplash.com/photo-1508669232496-137b159c1cdb?w=1920&q=80",
"https://images.unsplash.com/photo-1529419412599-7bb870e11810?w=1920&q=80",
"https://images.unsplash.com/photo-1517811409552-396f829138a2?w=1920&q=80",
"https://images.unsplash.com/photo-1439853949127-fa647821eba0?w=1920&q=80",
"https://images.unsplash.com/photo-1445962125599-30f582ac21f4?w=1920&q=80",
"https://images.unsplash.com/photo-1505245208761-ba872912fac0?w=1920&q=80",
"https://images.unsplash.com/photo-1487622750296-6360190669a1?w=1920&q=80",
"https://images.unsplash.com/photo-1470770903676-69b98201ea1c?w=1920&q=80",
"https://images.unsplash.com/photo-1444465693019-aa0b6392460d?w=1920&q=80"
];
return (
<div>
<GalleryPageMainSection />
<div className="text-center my-5 py-3 mx-5 px-3">
<Gallery urls={urls} numberOfImagePerAxis={3}></Gallery>
</div>
</div>
)
}
}
|
<reponame>kevinYin/ideploy<filename>deployment-web/src/main/java/io/ideploy/deployment/admin/websocket/request/CookieRequestMessage.java
package io.ideploy.deployment.admin.websocket.request;
/**
* 功能: 发送cookie的请求
* <p>
* 详细:
*
* @author jingyesi 17/2/4
*/
public class CookieRequestMessage extends WebSocketRequestMessage{
/**
* cookie
*/
private String cookies;
public String getCookies() {
return cookies;
}
public void setCookies(String cookies) {
this.cookies = cookies;
}
}
|
<filename>node_modules/@types/express/lib/application.d.ts
import {Server} from 'http';
import {ListenOptions} from 'net';
import {Router, ParamHandler, HandlerArgument, PathArgument} from './router/index';
declare namespace app {
export interface Application extends Router {
/**
* Contains one or more path patterns on which a sub-app was mounted.
*/
mountpath: string | string[];
/**
* Has properties that are local variables within the application.
* Once set, the value of app.locals properties persist throughout the life of the application,
* in contrast with res.locals properties that are valid only for the lifetime of the request.
* You can access local variables in templates rendered within the application.
* This is useful for providing helper functions to templates, as well as application-level data.
* Local variables are available in middleware via req.app.locals (see req.app)
*/
locals: any;
/**
* Initialize the server.
*
* - setup default configuration
* - setup default middleware
* - setup route reflection methods
*/
init(): void;
/**
* Initialize application configuration.
*/
defaultConfiguration(): void;
/**
* Register the given template engine callback `fn`
* as `ext`.
*
* By default will `require()` the engine based on the
* file extension. For example if you try to render
* a "foo.jade" file Express will invoke the following internally:
*
* app.engine('jade', require('jade').__express);
*
* For engines that do not provide `.__express` out of the box,
* or if you wish to "map" a different extension to the template engine
* you may use this method. For example mapping the EJS template engine to
* ".html" files:
*
* app.engine('html', require('ejs').renderFile);
*
* In this case EJS provides a `.renderFile()` method with
* the same signature that Express expects: `(path, options, callback)`,
* though note that it aliases this method as `ejs.__express` internally
* so if you're using ".ejs" extensions you dont need to do anything.
*
* Some template engines do not follow this convention, the
* [Consolidate.js](https://github.com/visionmedia/consolidate.js)
* library was created to map all of node's popular template
* engines to follow this convention, thus allowing them to
* work seamlessly within Express.
*/
engine(ext: string, fn: Function): Application;
/**
* Assign `setting` to `val`, or return `setting`'s value.
*
* app.set('foo', 'bar');
* app.get('foo');
* // => "bar"
* app.set('foo', ['bar', 'baz']);
* app.get('foo');
* // => ["bar", "baz"]
*
* Mounted servers inherit their parent server's settings.
*/
set(setting: string, val: any): this;
get(name: string): any;
// need to duplicate this here from the Router because of the overload
get(path: PathArgument, ...handlers: HandlerArgument[]): this;
/**
* Add callback triggers to route parameters, where name is the name of the parameter or an array of them,
* and callback is the callback function. The parameters of the callback function are the request object,
* the response object, the next middleware, the value of the parameter and the name of the parameter,
* in that order.
* If name is an array, the callback trigger is registered for each parameter declared in it,
* in the order in which they are declared. Furthermore, for each declared parameter except the last one,
* a call to next inside the callback will call the callback for the next declared parameter.
* For the last parameter, a call to next will call the next middleware in place for the route currently
* being processed, just like it would if name were just a string.
* For example, when :user is present in a route path, you may map user loading logic to automatically
* provide req.user to the route, or perform validations on the parameter input.
*/
param(name: string | string[], handler: ParamHandler): this;
/**
* @deprecated
*/
param(callback: (name: string, matcher: RegExp) => ParamHandler): this;
/**
* Return the app's absolute pathname
* based on the parent(s) that have
* mounted it.
*
* For example if the application was
* mounted as "/admin", which itself
* was mounted as "/blog" then the
* return value would be "/blog/admin".
*/
path(): string;
/**
* Check if `setting` is enabled (truthy).
*
* app.enabled('foo')
* // => false
*
* app.enable('foo')
* app.enabled('foo')
* // => true
*/
enabled(setting: string): boolean;
/**
* Check if `setting` is disabled.
*
* app.disabled('foo')
* // => true
*
* app.enable('foo')
* app.disabled('foo')
* // => false
*/
disabled(setting: string): boolean;
/**
* Enable `setting`.
*/
enable(setting: string): this;
/**
* Disable `setting`.
*/
disable(setting: string): this;
/**
* Render the given view `name` name with `options`
* and a callback accepting an error and the
* rendered template string.
*
* Example:
*
* app.render('email', { name: 'Tobi' }, function(err, html){
* // ...
* })
*/
render(name: string, locals?: { [local: string]: any }, callback?: (err: Error, html: string) => void): void;
render(name: string, callback: (err: Error, html: string) => void): void;
/**
* Listen for connections.
*
* A node `http.Server` is returned, with this
* application (which is a `Function`) as its
* callback. If you wish to create both an HTTP
* and HTTPS server you may do so with the "http"
* and "https" modules as shown here:
*
* var http = require('http')
* , https = require('https')
* , express = require('express')
* , app = express();
*
* http.createServer(app).listen(80);
* https.createServer({ ... }, app).listen(443);
*/
listen(port: number, hostname?: string, backlog?: number, listeningListener?: Function): Server;
listen(port: number, hostname?: string, listeningListener?: Function): Server;
listen(port: number, backlog?: number, listeningListener?: Function): Server;
listen(port: number, listeningListener?: Function): Server;
listen(path: string, backlog?: number, listeningListener?: Function): Server;
listen(path: string, listeningListener?: Function): Server;
listen(handle: any, backlog?: number, listeningListener?: Function): Server;
listen(handle: any, listeningListener?: Function): Server;
listen(options: ListenOptions, listeningListener?: Function): Server;
}
}
declare const app: app.Application;
export = app;
|
<filename>packages/eolts/inst/include/R_nts.h
// -*- mode: C++; indent-tabs-mode: nil; c-basic-offset: 4; tab-width: 4; -*-
// vim: set shiftwidth=4 softtabstop=4 expandtab:
/*
* 2013,2014, Copyright University Corporation for Atmospheric Research
*
* This file is part of the "eolts" package for the R software environment.
* The license and distribution terms for this file may be found in the
* file LICENSE in this package.
*/
#ifndef R_NTS_H
#define R_NTS_H
#include "R_Matrix.h"
#include "R_utime.h"
namespace eolts {
class R_nts {
public:
/**
* for creating an R_nts object from scratch.
*/
R_nts();
/**
* for creating an R_nts object from an nts R object.
*/
R_nts(SEXP obj);
/**
* Does not free up any allocated R memory of the object.
*/
~R_nts();
SEXP getRObject() { return _obj; }
/**
* Set the .Data slot.
*/
void setMatrix(SEXP val);
SEXP getMatrix();
/**
* Set the positions slot.
*/
void setPositions(SEXP val);
SEXP getPositions();
void setStations(const std::vector<std::string>& names,
const std::vector<int>& numbers);
std::vector<std::string> getStationNames() const;
std::vector<int> getStationNumbers() const;
void setUnits(const std::vector<std::string>& names);
std::vector<std::string> getUnits() const;
void setLongNames(const std::vector<std::string>& names);
std::vector<std::string> getLongNames() const;
/**
* Set the weights slot.
*/
void setWeights(SEXP val);
SEXP getWeights();
void setWeightMap(std::vector<int>);
std::vector<int> getWeightMap() const;
void setTimeFormat(const std::string& val);
std::string getTimeFormat() const;
void setTimeZone(const std::string& val);
std::string getTimeZone() const;
static SEXP classDef;
static SEXP dataSlotName;
static SEXP posSlotName;
static SEXP unitsSlotName;
static SEXP weightsSlotName;
static SEXP weightMapSlotName;
static SEXP stationsSlotName;
static SEXP longNamesSlotName;
static SEXP startposSlotName;
static SEXP endposSlotName;
static SEXP timeFormatSlotName;
static SEXP timeZoneSlotName;
private:
// declared private to prevent copying and assignment
R_nts(const R_nts &);
R_nts &operator=(const R_nts &) const;
SEXP _obj;
PROTECT_INDEX _pindx;
};
} // namespace eolts
#endif
|
import React from "react";
import styled from "styled-components";
const ScheduleSection = styled.section`
display: grid;
gap: 1rem;
padding: 1rem;
${({ theme }) => theme.breakpoints.up("md")} {
gap: 2rem;
padding: 2rem;
}
`;
const Schedule: React.FC = () => {
return <ScheduleSection>Schedule</ScheduleSection>;
};
export default Schedule;
|
#include <bits/stdc++.h>
using namespace std;
int main(){
int n;
ios_base::sync_with_stdio(false);
cin.tie(NULL);
cin>>n;
int x=n%7;
int mp[]={0,2,4,6,1,3,5};
int n4=mp[n%7];
if(n4*4>n){
cout<<-1<<endl;
}
else{
for(int i=0;i<n4;i++){
cout<<4;
}
x=(n-n4*4)/7;
for(int j=0;j<x;j++)
cout<<7;
}
} |
// Post writes body to the DataDog api
func (d *DataDogLogger) Post(body []byte) error {
req, err := http.NewRequestWithContext(d.Context, http.MethodPost, d.URL, bytes.NewBuffer(body))
if err != nil {
_, wErr := fmt.Fprintf(os.Stderr, "error writing logs %v", err)
if wErr != nil {
return wErr
}
return err
}
req.Header.Add("Content-Type", "application/json")
req.Header.Add("DD-API-KEY", d.APIKey)
resp, respErr := d.client.Do(req)
if respErr != nil {
_, wErr := fmt.Fprintf(os.Stderr, "error writing logs %v", respErr)
if wErr != nil {
return wErr
}
return respErr
}
defer resp.Body.Close()
switch resp.StatusCode {
case http.StatusOK:
return nil
default:
_, wErr := fmt.Fprintf(os.Stderr, "error writing logs %d status code returned", resp.StatusCode)
if wErr != nil {
return wErr
}
return ErrAPIResponse
}
} |
/*
������i�������Ե�i����Ϊ��Сֵ��ʽ��ö��
���е���������Է�Ϊ������i����Ͳ�������i����
������i�������Ե�i����Ϊ��Сֵ��Ψһ��ʶ����һЩ����� �����������������Щȷ����
����ʱ��Ҫô�������Ҳ�����Сֵ��Ҫô������
*/
#include<bits/stdc++.h>
#define lson l,m,rt<<1
#define rson m+1,r,rt<<1|1
using namespace std;
typedef long long ll;
const ll INF=1e18;
const ll mod=1000000007;
const ll p=137;
const ll MAXN=2e3+5;
ll n,d,a[MAXN],f[MAXN],root,ans;
vector<ll>G[MAXN];
ll add(ll x,ll y)
{
ll ret=x+y;
if(ret>=mod)
{
return ret-mod;
}
return ret;
}
void dfs(ll u,ll pre)
{
f[u]=1;
for(ll i=0;i<G[u].size();i++)
{
ll v=G[u][i];
if(v==pre)
{
continue;
}
if(a[v]==a[root]&&v<root)
{
continue;
}
if(a[root]<=a[v]&&a[v]<=a[root]+d)
{
dfs(v,u);
f[u]=f[u]*(f[v]+1)%mod;
}
}
}
int main()
{
// freopen("c://duipai//939cdata.txt", "r", stdin);
// freopen("c://duipai//939cwa.txt", "w", stdout);
while(~scanf("%lld %lld",&d,&n))
{
// string str;
// cin>>str;
// cout<<str<<endl;
for(ll i=1;i<=n;i++)
{
scanf("%lld",&a[i]);
}
for(ll i=1;i<n;i++)
{
ll u,v;
scanf("%lld %lld",&u,&v);
G[u].push_back(v);
G[v].push_back(u);
}
for(ll i=1;i<=n;i++)
{
root=i;
memset(f,0,sizeof(f));
dfs(i,0);
ans=add(ans,f[i]);
}
printf("%lld\n",ans);
}
}
/*
3
1 2
2 3
1 -1 1
*/
|
<reponame>florisweb/JLP<gh_stars>0
import { Question, Word } from './types';
import { App } from './app';
// //@ts-ignore
function shuffleArray(arr:Question[]) {
return arr.sort(() => Math.random() - 0.5);
}
const Server = new (function() {
const syncTimeout:number = 1000 * 60 * 2; //ms
this.sync = async function() {
await Promise.all([
this.reviews.getQuestions(true),
this.lessons.getWords(true),
this.wordBaskets.getWordBaskets(true),
this.curLevel.getCurLevelData(true),
]);
}
this.sendRequest = async function() {
//@ts-ignore
let response = await REQUEST.send(...arguments);
if (response == "E_noAuth") App.requestSignIn();
return response;
}
this.reviews = new (function() {
let lastSync:Date = new Date(0);
this.list = [];
this.getQuestions = async function(_forceUpdate: Boolean):Promise<Question[] | Boolean> {
if (new Date().getTime() - lastSync.getTime() < syncTimeout && !_forceUpdate) return this.list;
let result = await Server.sendRequest("database/trainer/getReviewSession.php");
if (!result) return false;
let questions: Question[] = [];
for (let word of result)
{
questions.push({
askMeaning: true,
word: word,
});
if (word.type == 0) continue;
questions.push({
askMeaning: false,
word: word,
});
}
shuffleArray(questions);
this.list = questions;
lastSync = new Date();
return questions;
}
this.updateWordTrainStatus = async function(_question:Question, _correct:Boolean) {
return await Server.sendRequest(
"database/trainer/updateWordTrainStatus.php",
"wordId=" + _question.word.id + "&correct=" + (_correct ? "1" : "0") + "&isMeaning=" + (_question.askMeaning ? "1" : "0")
);
}
} as any);
this.lessons = new (function() {
let lastSync:Date = new Date(0);
this.list = [];
this.getWords = async function(_forceUpdate:boolean):Promise<Word[] | Boolean> {
if (new Date().getTime() - lastSync.getTime() < syncTimeout && !_forceUpdate) return this.list;
let result = await Server.sendRequest("database/trainer/getLessonSession.php");
if (!result) return false;
shuffleArray(result);
this.list = result;
lastSync = new Date();
return result;
}
} as any);
this.wordBaskets = new (function() {
let lastSync:Date = new Date(0);
this.list = [];
this.getWordBaskets = async function(_forceUpdate:boolean):Promise<Word[] | Boolean> {
if (new Date().getTime() - lastSync.getTime() < syncTimeout && !_forceUpdate) return this.list;
let result = await Server.sendRequest("database/trainer/getWordBaskets.php");
if (!result) return false;
this.list = result;
lastSync = new Date();
return result;
}
} as any);
this.curLevel = new (function() {
let lastSync:Date = new Date(0);
this.data = {
level: 0,
progress: 0,
}
this.getCurLevelData = async function(_forceUpdate:boolean):Promise<Word[] | Boolean> {
if (new Date().getTime() - lastSync.getTime() < syncTimeout && !_forceUpdate) return this.list;
let result = await Server.sendRequest("database/trainer/getCurLevelData.php");
if (!result) return false;
this.data = result;
lastSync = new Date();
return result;
}
} as any);
} as any);
export default Server; |
// GetChannelClient gets a channel client
func GetChannelClient(ctx context.Context, trillAdminClient trillian.TrillianAdminClient, trillMapClient trillian.TrillianMapClient, channelMapID int64, tracer opentracing.Tracer) (*tclient.MapClient, error) {
channelLogger.Info().Msg("[DBoM:GetChannelClient] Entered")
span, ctx := opentracing.StartSpanFromContextWithTracer(ctx, tracer, "DBoM:GetChannelClient")
rqst := &trillian.GetTreeRequest{
TreeId: channelMapID,
}
channelTree, treeError := trillAdminClient.GetTree(ctx, rqst)
if treeError != nil {
tracing.LogAndTraceErr(channelLogger, span, treeError, responses.InternalError)
return nil, treeError
}
channelLogger.Info().Msg("[DBoM:GetChannelClient] Finished")
span.Finish()
return tclient.NewMapClientFromTree(trillMapClient, channelTree)
} |
// LengthEQ applies the EQ predicate on the "length" field.
func LengthEQ(v int) predicate.BinaryItem {
return predicate.BinaryItem(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldLength), v))
})
} |
<reponame>golden-dimension/xs2a
/*
* Copyright 2018-2019 adorsys GmbH & Co KG
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.adorsys.psd2.aspsp.profile.service;
import de.adorsys.psd2.aspsp.profile.config.BankProfileSetting;
import de.adorsys.psd2.aspsp.profile.config.ProfileConfigurations;
import de.adorsys.psd2.aspsp.profile.domain.AspspSettings;
import de.adorsys.psd2.aspsp.profile.domain.ais.*;
import de.adorsys.psd2.aspsp.profile.domain.common.CommonAspspProfileBankSetting;
import de.adorsys.psd2.aspsp.profile.domain.common.CommonAspspProfileSetting;
import de.adorsys.psd2.aspsp.profile.domain.piis.PiisAspspProfileSetting;
import de.adorsys.psd2.aspsp.profile.domain.piis.PiisRedirectLinkBankSetting;
import de.adorsys.psd2.aspsp.profile.domain.piis.PiisRedirectLinkSetting;
import de.adorsys.psd2.aspsp.profile.domain.pis.PisAspspProfileBankSetting;
import de.adorsys.psd2.aspsp.profile.domain.pis.PisAspspProfileSetting;
import de.adorsys.psd2.aspsp.profile.domain.pis.PisRedirectLinkSetting;
import de.adorsys.psd2.aspsp.profile.domain.sb.SbAspspProfileBankSetting;
import de.adorsys.psd2.aspsp.profile.domain.sb.SbAspspProfileSetting;
import de.adorsys.psd2.xs2a.core.profile.ScaApproach;
import de.adorsys.psd2.xs2a.core.profile.StartAuthorisationMode;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.Optional;
@Service
@RequiredArgsConstructor
public class AspspProfileServiceImpl implements AspspProfileService {
private final ProfileConfigurations profileConfigurations;
@Override
public AspspSettings getAspspSettings(String instanceId) {
BankProfileSetting setting = profileConfigurations.getSetting(instanceId);
AisAspspProfileBankSetting aisBankSetting = setting.getAis();
ConsentTypeBankSetting consentTypeSetting = aisBankSetting.getConsentTypes();
ConsentTypeSetting consentTypes = new ConsentTypeSetting(consentTypeSetting.isBankOfferedConsentSupported(),
consentTypeSetting.isGlobalConsentSupported(),
consentTypeSetting.isAvailableAccountsConsentSupported(),
consentTypeSetting.getAccountAccessFrequencyPerDay(),
consentTypeSetting.getNotConfirmedConsentExpirationTimeMs(),
consentTypeSetting.getMaxConsentValidityDays(),
consentTypeSetting.isAccountOwnerInformationSupported(),
consentTypeSetting.isTrustedBeneficiariesSupported());
AisRedirectLinkSetting aisRedirectLinkToOnlineBanking = new AisRedirectLinkSetting(aisBankSetting.getRedirectLinkToOnlineBanking().getAisRedirectUrlToAspsp());
AisTransactionSetting transactionParameters = new AisTransactionSetting(aisBankSetting.getTransactionParameters().getAvailableBookingStatuses(),
aisBankSetting.getTransactionParameters().isTransactionsWithoutBalancesSupported(),
aisBankSetting.getTransactionParameters().getSupportedTransactionApplicationTypes());
DeltaReportSetting deltaReportSettings = new DeltaReportSetting(aisBankSetting.getDeltaReportSettings().isEntryReferenceFromSupported(),
aisBankSetting.getDeltaReportSettings().isDeltaListSupported());
OneTimeConsentScaBankSetting scaRequirementsForOneTimeConsentsBankSetting = aisBankSetting.getScaRequirementsForOneTimeConsents();
OneTimeConsentScaSetting scaRequirementsForOneTimeConsents = new OneTimeConsentScaSetting(scaRequirementsForOneTimeConsentsBankSetting.isScaByOneTimeAvailableAccountsConsentRequired(), scaRequirementsForOneTimeConsentsBankSetting.isScaByOneTimeGlobalConsentRequired());
AisAspspProfileSetting ais = new AisAspspProfileSetting(consentTypes, aisRedirectLinkToOnlineBanking, transactionParameters, deltaReportSettings, scaRequirementsForOneTimeConsents);
PisAspspProfileBankSetting pisBankSetting = setting.getPis();
PisRedirectLinkSetting pisRedirectLinkToOnlineBanking = new PisRedirectLinkSetting(pisBankSetting.getRedirectLinkToOnlineBanking().getPisRedirectUrlToAspsp(),
pisBankSetting.getRedirectLinkToOnlineBanking().getPisPaymentCancellationRedirectUrlToAspsp(),
pisBankSetting.getRedirectLinkToOnlineBanking().getPaymentCancellationRedirectUrlExpirationTimeMs());
PisAspspProfileSetting pis = new PisAspspProfileSetting(pisBankSetting.getSupportedPaymentTypeAndProductMatrix(),
pisBankSetting.getMaxTransactionValidityDays(),
pisBankSetting.getNotConfirmedPaymentExpirationTimeMs(),
pisBankSetting.isPaymentCancellationAuthorisationMandated(),
pisRedirectLinkToOnlineBanking,
pisBankSetting.getCountryValidationSupported(),
pisBankSetting.getSupportedTransactionStatusFormats());
PiisRedirectLinkSetting piisRedirectLinkSetting = new PiisRedirectLinkSetting(Optional.ofNullable(setting.getPiis().getRedirectLinkToOnlineBanking())
.map(PiisRedirectLinkBankSetting::getPiisRedirectUrlToAspsp)
.orElse(null));
PiisAspspProfileSetting piis = new PiisAspspProfileSetting(setting.getPiis().getPiisConsentSupported(), piisRedirectLinkSetting);
SbAspspProfileBankSetting sbBankSetting = setting.getSb();
SbAspspProfileSetting sb = new SbAspspProfileSetting(sbBankSetting.isSigningBasketSupported(),
sbBankSetting.getSigningBasketMaxEntries(),
sbBankSetting.getNotConfirmedSigningBasketExpirationTimeMs(),
sbBankSetting.getSbRedirectUrlToAspsp());
CommonAspspProfileBankSetting commonBankSetting = setting.getCommon();
CommonAspspProfileSetting common = new CommonAspspProfileSetting(commonBankSetting.getScaRedirectFlow(),
commonBankSetting.getOauthConfigurationUrl(),
commonBankSetting.getStartAuthorisationMode() == null
? StartAuthorisationMode.AUTO
: StartAuthorisationMode.getByValue(commonBankSetting.getStartAuthorisationMode()),
commonBankSetting.isTppSignatureRequired(),
commonBankSetting.isPsuInInitialRequestMandated(),
commonBankSetting.getRedirectUrlExpirationTimeMs(),
commonBankSetting.getAuthorisationExpirationTimeMs(),
commonBankSetting.isForceXs2aBaseLinksUrl(),
commonBankSetting.getXs2aBaseLinksUrl(),
commonBankSetting.getSupportedAccountReferenceFields(),
commonBankSetting.getMulticurrencyAccountLevelSupported(),
commonBankSetting.isAisPisSessionsSupported(),
commonBankSetting.isCheckTppRolesFromCertificateSupported(),
commonBankSetting.getAspspNotificationsSupported(),
commonBankSetting.isAuthorisationConfirmationRequestMandated(),
commonBankSetting.isAuthorisationConfirmationCheckByXs2a(),
commonBankSetting.isCheckUriComplianceToDomainSupported(),
commonBankSetting.getTppUriComplianceResponse());
return new AspspSettings(ais, pis, piis, sb, common);
}
@Override
public List<ScaApproach> getScaApproaches(String instanceId) {
return profileConfigurations.getSetting(instanceId)
.getCommon()
.getScaApproachesSupported();
}
@Override
public boolean isMultitenancyEnabled() {
return profileConfigurations.isMultitenancyEnabled();
}
}
|
<reponame>lucasaciole/-DC-UFSCar-ES2-201701--GrupoFoco
package org.jabref.logic.l10n;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.List;
import java.util.Objects;
import java.util.ResourceBundle;
import java.util.stream.Collectors;
/**
* A bundle containing localized strings.
* It wraps an ordinary resource bundle and performs escaping/unescaping of keys and values similar to
* {@link Localization}. Needed to support JavaFX inline binding.
*/
public class LocalizationBundle extends ResourceBundle {
private final ResourceBundle baseBundle;
public LocalizationBundle(ResourceBundle baseBundle) {
this.baseBundle = Objects.requireNonNull(baseBundle);
}
@Override
protected Object handleGetObject(String key) {
return Localization.translate(baseBundle, "message", key);
}
@Override
public Enumeration<String> getKeys() {
ArrayList<String> baseKeys = Collections.list(baseBundle.getKeys());
List<String> unescapedKeys = baseKeys.stream().map(key -> new LocalizationKey(key).getTranslationValue())
.collect(Collectors.toList());
return Collections.enumeration(unescapedKeys);
}
}
|
<gh_stars>1-10
from ..config import get_logger
class Tool(object):
def _log(self):
if not (hasattr(self, "_cached_log") and self._cached_log is not None):
self._cached_log = get_logger(type(self).__name__.lower())
return self._cached_log
|
/**
* Diff table in embedded class to avoid immediate instantiation.
*
* @author keve
*
*/
private static final class DiffTable {
/**
* Difference lookup table.
*/
private static final int[][] BIT_PAIRS_DIFF_TABLE = generateTable();
private static int[][] generateTable() {
final int[][] result = new int[256][256];
for (int i = 0; i < 256; i++) {
for (int j = 0; j < 256; j++) {
int x = i;
int y = j;
int d;
int diff = 0;
d = Math.abs(x % 4 - y % 4);
diff += d == 3 ? DIFF_SCALE6 : d;
x /= 4;
y /= 4;
d = Math.abs(x % 4 - y % 4);
diff += d == 3 ? DIFF_SCALE6 : d;
x /= 4;
y /= 4;
d = Math.abs(x % 4 - y % 4);
diff += d == 3 ? DIFF_SCALE6 : d;
x /= 4;
y /= 4;
d = Math.abs(x % 4 - y % 4);
diff += d == 3 ? DIFF_SCALE6 : d;
result[i][j] = diff;
}
}
return result;
}
} |
<filename>src/main/go/src/euler/solver026.go
// COPYRIGHT (C) 2017 barreiro. All Rights Reserved.
// GoLang solvers for Project Euler problems
package euler
import (
"euler/algorithm"
)
// A unit fraction contains 1 in the numerator. The decimal representation of the unit fractions with denominators 2 to 10 are given:
// 1/2 = 0.5
// 1/3 = 0.(3)
// 1/4 = 0.25
// 1/5 = 0.2
// 1/6 = 0.1(6)
// 1/7 = 0.(142857)
// 1/8 = 0.125
// 1/9 = 0.(1)
// 1/10 = 0.1
// Where 0.1(6) means 0.166666..., and has a 1-digit recurring cycle. It can be seen that 1/7 has a 6-digit recurring cycle.
// Find the value of d < 1000 for which 1/d contains the longest recurring cycle in its decimal fraction part.
func Solver026() int {
return solver026(1000)
}
func solver026(N int) int {
// For primes: if 10 is a primitive root modulo p, the recurring cycle is equal to p − 1; if not is a factor of p − 1
for generator := algorithm.PrimesLessThan(N); ; {
if p := generator(); isPrimeRootTen(p) {
return p
}
}
}
func isPrimeRootTen(p int) bool {
for f := range algorithm.PrimeFactors(p - 1) {
if algorithm.PowerMod(10, (p-1)/f, p) == 1 {
return false
}
}
return true
}
|
// ExpiresAt returns a state option that sets a state value to its zero value at
// the given time.
//
// Values persisted to local storage with the Persist option are removed from
// it.
func ExpiresAt(t time.Time) StateOption {
return func(s *State) {
s.ExpiresAt = t
}
} |
def pull_urls(page):
soup = BeautifulSoup(page, "html.parser")
soup.prettify()
return [anchor['href'] for anchor in soup.findAll('a', href=True)] |
<gh_stars>10-100
import {
Scalar,
SchemaElement,
Type,
TypeTag,
Variant,
LookupName,
matchSchemaElement,
createLookupName
} from '../schema';
import { TsFileBlock, TsFileBlock as ts } from '../ts/ast';
import { variantPayloadTypeName } from '../ts/schema2ast';
import {
BincodeLibTypes,
traverseType,
chainName,
RequiredImport,
flatMap,
ReadOrWrite,
collectRequiredImports,
enumerateStructFields,
CodePiece,
TypeSerDe,
schema2tsBlocks,
SerDeCodeGenInput
} from './sharedPieces';
const ReadFuncs: ReadOrWrite = {
[Scalar.Bool]: 'read_bool',
[Scalar.Str]: 'read_str',
[Scalar.F32]: 'read_f32',
[Scalar.F64]: 'read_f64',
[Scalar.I32]: 'read_i32',
[Scalar.U8]: 'read_u8',
[Scalar.U16]: 'read_u16',
[Scalar.U32]: 'read_u32',
[Scalar.USIZE]: 'read_u64',
Opt: 'opt_reader',
Seq: 'seq_reader',
Nullable: 'nullable_reader'
};
const deserializerType = (typeStr: string) =>
`${BincodeLibTypes.Deserializer}<${typeStr}>`;
const enumMappingArrayName = (enumName: string) => `${enumName}ReverseMap`;
const deserFuncName = (typeName: string) => `read${typeName}`;
const deserializerChainName = (types: Type[], lookup: LookupName): string =>
chainName(types, ReadFuncs, deserFuncName, lookup);
const deserializerNameFor = (type: Type, lookup: LookupName): string =>
deserializerChainName(traverseType(type), lookup);
const { fromLibrary, fromTypesDeclaration } = RequiredImport;
const entry2DeserBlocks = (
name: string,
entry: SchemaElement,
lookup: LookupName
) =>
matchSchemaElement(entry, {
Enum: ({ variants }): CodePiece => ({
name,
requiredImports: [
fromTypesDeclaration(name),
fromLibrary(ReadFuncs[Scalar.U32])
],
blocks: [
genEnumIndexMapping(name, variants),
generateEnumDeserializer(name)
],
serdes: []
}),
// default: (): Piece => ({
// requiredImports: [],
// blocks: [],
// typeDeserializers: []
// }),
Alias: (type): CodePiece => ({
name,
requiredImports: [
fromTypesDeclaration(name),
...collectRequiredImports(type, ReadFuncs, lookup)
],
serdes: generateTypesDeserializers(type, lookup),
blocks: [
ts.ConstVar({
name: deserFuncName(name),
type: deserializerType(name),
expression: deserializerNameFor(type, lookup)
})
],
dependsOn: [deserializerNameFor(type, lookup)]
}),
Newtype: (type): CodePiece => ({
name,
requiredImports: [
fromTypesDeclaration(name),
...collectRequiredImports(type, ReadFuncs, lookup)
],
serdes: generateTypesDeserializers(type, lookup),
blocks: [
ts.ArrowFunc({
name: deserFuncName(name),
returnType: name,
body: `${name}(${deserializerNameFor(type, lookup)}(sink))`,
params: [{ name: 'sink', type: BincodeLibTypes.Sink }]
})
],
dependsOn: [deserializerNameFor(type, lookup)]
}),
Tuple: (types): CodePiece => ({
name,
requiredImports: [
fromTypesDeclaration(name),
...flatMap(types, t => collectRequiredImports(t, ReadFuncs, lookup))
],
blocks: [
generateTupleDeserializer(
name,
types,
args => `${name}(${args})`,
true,
lookup
)
],
serdes: flatMap(types, t => generateTypesDeserializers(t, lookup)),
dependsOn: types.map(t => deserializerNameFor(t, lookup))
}),
Struct: (members): CodePiece => {
const fields = enumerateStructFields(members);
return {
name,
requiredImports: [
fromTypesDeclaration(name),
...flatMap(fields, f =>
collectRequiredImports(f.type, ReadFuncs, lookup)
)
],
blocks: [generateStructDeserializer(name, fields, true, lookup)],
serdes: flatMap(fields, f =>
generateTypesDeserializers(f.type, lookup)
),
dependsOn: fields.map(f => deserializerNameFor(f.type, lookup))
};
},
Union: (variants): CodePiece => ({
// this can be potentially sharable?
name: name,
requiredImports: [
fromTypesDeclaration(name),
fromLibrary(ReadFuncs[Scalar.U32]),
...flatMap(
variants,
Variant.match({
Unit: () => [] as RequiredImport[],
NewType: (_, type) =>
collectRequiredImports(type, ReadFuncs, lookup),
Struct: (variantName, members) =>
flatMap(enumerateStructFields(members), m =>
collectRequiredImports(m.type, ReadFuncs, lookup)
).concat(
fromTypesDeclaration(variantPayloadTypeName(name, variantName))
),
Tuple: (_, types) =>
flatMap(types, t => collectRequiredImports(t, ReadFuncs, lookup))
})
)
],
blocks: [
ts.ArrowFunc({
name: deserFuncName(name),
body: genUnionDeserializers(name, variants, 'sink', lookup),
returnType: name,
params: [{ name: 'sink', type: BincodeLibTypes.Sink }]
}),
...flatMap(
variants,
Variant.match({
Struct: (variantName, members) => [
generateStructDeserializer(
variantPayloadTypeName(name, variantName),
enumerateStructFields(members),
false,
lookup
)
],
default: () => [] as TsFileBlock[]
})
)
],
serdes: flatMap(
variants,
Variant.match({
Unit: () => [] as TypeSerDe[],
NewType: (_, type) => generateTypesDeserializers(type, lookup),
Struct: (_, members) =>
flatMap(enumerateStructFields(members), m =>
generateTypesDeserializers(m.type, lookup)
),
Tuple: (_, types) =>
flatMap(types, t => generateTypesDeserializers(t, lookup))
})
),
dependsOn: flatMap(
variants,
Variant.match({
Unit: () => [] as string[],
NewType: (_, type) => [deserializerNameFor(type, lookup)],
Struct: (_, members) =>
enumerateStructFields(members).map(m =>
deserializerNameFor(m.type, lookup)
),
Tuple: (_, types) => types.map(t => deserializerNameFor(t, lookup))
})
)
})
});
export const schema2deserializersAST = ({
schema,
typesDeclarationFile,
pathToBincodeLib
}: SerDeCodeGenInput): TsFileBlock[] => {
const lookup = createLookupName(schema);
return schema2tsBlocks({
pieces: Object.entries(schema).map(([name, element]) =>
entry2DeserBlocks(name, element, lookup)
),
serdeName: deserFuncName,
serdeType: deserializerType,
serdeChainName: types => deserializerChainName(types, lookup),
lookup,
libImports: [BincodeLibTypes.Deserializer],
pathToBincodeLib,
typesDeclarationFile,
readOrWrite: ReadFuncs
});
};
const genEnumIndexMapping = (enumName: string, variants: string[]) =>
ts.ConstVar({
name: enumMappingArrayName(enumName),
dontExport: true,
expression: `[${variants.map(v => `${enumName}.${v}`).join(', ')}]`,
type: `${enumName}[]`
});
const genUnionDeserializers = (
unionName: string,
variants: Variant[],
sinkArg: string,
lookup: LookupName
) => {
const unionCtor = (variantName: string) => `${unionName}.${variantName}`;
return `{
switch (${ReadFuncs[Scalar.U32]}(${sinkArg})) {
${variants
.map(v => ({
exp: Variant.match(v, {
Unit: unionCtor,
Struct: name =>
`${unionCtor(name)}(${deserFuncName(
variantPayloadTypeName(unionName, name)
)}(${sinkArg}))`,
NewType: (name, type) =>
`${unionCtor(name)}(${deserializerNameFor(
type,
lookup
)}(${sinkArg}))`,
Tuple: (name, types) =>
`${unionCtor(name)}(${types
.map(type => `${deserializerNameFor(type, lookup)}(${sinkArg})`)
.join(', ')})`
})
}))
.map(({ exp }, i) => `case ${i}: return ${exp};`)
.join('\n')}
};
throw new Error("bad variant index for ${unionName}");
}`;
};
const generateTypesDeserializers = (
type: Type,
lookup: LookupName,
typeDeserializers: TypeSerDe[] = []
): TypeSerDe[] => {
switch (type.tag) {
case TypeTag.Scalar:
// skip scalars
return typeDeserializers;
case TypeTag.Vec:
case TypeTag.Nullable:
case TypeTag.Option:
return generateTypesDeserializers(
type.value,
lookup,
typeDeserializers.concat({
typeChain: traverseType(type),
toOrFrom: type,
body: `${
type.tag === TypeTag.Option
? ReadFuncs.Opt
: type.tag === TypeTag.Nullable
? ReadFuncs.Nullable
: ReadFuncs.Seq
}(${deserializerChainName(traverseType(type.value), lookup)})`
})
);
}
// skip direct references
return typeDeserializers;
};
const generateEnumDeserializer = (enumName: string): TsFileBlock =>
ts.ArrowFunc({
name: deserFuncName(enumName),
returnType: enumName,
params: [{ name: 'sink', type: BincodeLibTypes.Sink }],
body: `${enumMappingArrayName(enumName)}[${ReadFuncs[Scalar.U32]}(sink)]`
});
const generateStructDeserializer = (
name: string,
fields: { name: string; type: Type }[],
shouldExport: boolean,
lookup: LookupName
): TsFileBlock =>
ts.ArrowFunc({
name: deserFuncName(name),
wrappedInBraces: true,
returnType: name,
dontExport: !shouldExport || undefined,
params: [{ name: 'sink', type: BincodeLibTypes.Sink }],
body: `${fields
.map(
f => `const ${f.name} = ${deserializerNameFor(f.type, lookup)}(sink);`
)
.join('\n')}
return {${fields.map(f => f.name).join(', ')}};
`
});
const generateTupleDeserializer = (
tupleName: string,
types: Type[],
tupleCtorFunc: (argsStr: string) => string,
shouldExport: boolean,
lookup: LookupName
): TsFileBlock =>
ts.ArrowFunc({
name: deserFuncName(tupleName),
returnType: tupleName,
body: tupleCtorFunc(
`${types
.map(type => `${deserializerNameFor(type, lookup)}(sink)`)
.join(', ')}`
),
dontExport: !shouldExport || undefined,
params: [{ name: 'sink', type: BincodeLibTypes.Sink }]
});
|
package fr.dofuscraft.dofuscraftstuffs.items;
import fr.dofuscraft.dofuscraftstuffs.DofuscraftStuffs;
import fr.dofuscraft.dofuscraftstuffs.init.ModArmors;
import fr.dofuscraft.dofuscraftstuffs.init.ModItems;
import fr.dofuscraft.dofuscraftstuffs.utils.References;
import net.minecraft.entity.Entity;
import net.minecraft.inventory.EntityEquipmentSlot;
import net.minecraft.item.ItemArmor;
import net.minecraft.item.ItemStack;
public class DCItemArmor extends ItemArmor
{
private final String armorName;
public DCItemArmor(String name, ArmorMaterial materialIn, int renderIndexIn, EntityEquipmentSlot equipmentSlotIn, String armorName)
{
super(materialIn, renderIndexIn, equipmentSlotIn);
setRegistryName(name).setUnlocalizedName(name);
setCreativeTab(DofuscraftStuffs.armors);
this.armorName = armorName;
ModArmors.INSTANCE.armors.add(this);
}
@Override
public boolean isDamageable()
{
return false;
}
@Override
public String getArmorTexture(ItemStack stack, Entity entity, EntityEquipmentSlot slot, String type)
{
if( slot == EntityEquipmentSlot.LEGS )
{
return References.MODID + ":textures/models/armor/" + this.armorName + "_layer_2.png";
}
return References.MODID + ":textures/models/armor/" + this.armorName + "_layer_1.png";
}
}
|
<gh_stars>1-10
class MyTest {
static <T> T m() {
return null;
}
void n(boolean b) {
double ex = 0.0;
ex <error descr="Operator '+' cannot be applied to 'double', 'java.lang.Object'">+=</error> (b ? MyTest.m() : 0);
ex <error descr="Operator '*' cannot be applied to 'double', 'java.lang.Object'">*=</error> (b ? MyTest.m() : 0);
boolean s = false;
s <error descr="Operator '&' cannot be applied to 'boolean', 'java.lang.Object'">&=</error> (b ? MyTest.m() : true);
}
} |
def _spacy_tokenizer_lemmatizer(text):
parsed_data = parser(text)
list_of_lemmatized_tokens = [token.lemma_ for token in parsed_data]
return list_of_lemmatized_tokens |
Optimal Policy for the Replacement of Industrial Systems Subject to Technological Obsolescence - Using Genetic Algorithm
The technological obsolescence of industrial systems is characterized by the existence of challenger units possessing identical functionalities but with improved performance. This paper aims to define a new approach that makes it possible to obtain the optimal number of obsolete industrial systems which should be replaced by new-type units. This approach presents a new point of view compared with previous works available in the literature. The main idea and the originality of our approach is that we apply a genetic algorithm (GA) by considering the failure frequency, the influence of the environment/safety factors of the old-type systems and the purchase/implementation cost of the new-type units. These parameters are introduced in order to optimize this type of replacement in the context of engineering. |
/*
*
* 36TH ELEMENT LICENSE 1.0
*
* This is a project of 36TH ELEMENT TECHNOLOGIES PVT. LTD.
* This project is a closed source and proprietary software package.
* None of the contents of this software is to be used for uses not intended,
* And no one is to interface with the software in methods not defined or previously decided by 36TH ELEMENT TECHNOLOGIES PVT. LTD.
* No changes should be done to this project without prior authorization by 36TH ELEMENT TECHNOLOGIES PVT. LTD.
* 2018 (C) 36TH ELEMENT TECHNOLOGIES PVT. LTD.
*
*
*/
package cloudDBEndpoints;
import java.io.IOException;
import java.io.PrintWriter;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import security.Auth;
/**
*
* @author karan
*/
public class ExtractDeviceKey extends HttpServlet {
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
try {
// Lets test whatever code we need to.
response.setContentType("text/html;charset=UTF-8");
if ("<KEY>".equals(request.getParameter("access_token"))) {
PrintWriter out = response.getWriter();
String smallDeviceToken = Auth.removeUserPaddingToken(request.getParameter("device_token"));
out.println(
"{"
+ "\"success\":\""
+ smallDeviceToken
+ "\""
+ "}");
} else {
PrintWriter out = response.getWriter();
out.println(
"{"
+ "\"error\":\"Failed Authentication\""
+ "}");
}
} catch (Exception e) {
// There was an Error.
}
}
}
|
/**
* Parses text from the beginning of the given string to produce a date in UTC using the given pattern and the
* default date format symbols for the UTC. The method may not use the entire text of the given string.
* <p>
*
* @param pattern the pattern to parse.
* @param stringDate A <code>String</code> whose beginning should be parsed.
* @param isLenient A <code>boolean</code>judge DateFormat parse the date Lenient or not.
* @return A <code>Date</code> parsed from the string.
* @throws ParseException
* @exception ParseException if the beginning of the specified string cannot be parsed.
*
* {talendTypes} Date
*
* {Category} TalendDate
*
* {param} string("yyyy-MM-dd HH:mm:ss") pattern : the pattern to parse
*
* {param} string("") stringDate : A <code>String</code> whose beginning should be parsed
*
* {param} boolean(true) isLenient : Judge DateFormat parse the date Lenient or not.
*
* {example} parseDate("yyyy-MM-dd HH:mm:ss", "29-02-1979 23:59:59",false) #
*/
public synchronized static Date parseDateInUTC(String pattern, String stringDate, boolean isLenient) {
try {
boolean hasZone = false;
boolean inQuote = false;
char[] ps = pattern.toCharArray();
for (char p : ps) {
if (p == '\'') {
inQuote = !inQuote;
} else if (!inQuote && (p == 'Z' || p == 'z')) {
hasZone = true;
break;
}
}
DateFormat df = FastDateParser.getInstance(pattern);
TimeZone originalTZ = df.getTimeZone();
df.setTimeZone(TimeZone.getTimeZone("UTC"));
df.setLenient(isLenient);
Date d = df.parse(stringDate);
df.setTimeZone(originalTZ);
if (hasZone) {
int offset = df.getCalendar().get(Calendar.ZONE_OFFSET);
char sign = offset >= 0 ? '+' : '-';
int hour = Math.abs(offset) / 1000 / 60 / 60;
int min = Math.abs(offset) / 1000 / 60 % 60;
String minStr = min < 10 ? "0" + min : min + "";
TalendTimestampWithTZ tstz = new TalendTimestampWithTZ(new java.sql.Timestamp(d.getTime()),
TimeZone.getTimeZone("GMT" + sign + hour + ":" + minStr));
return tstz;
} else {
return d;
}
} catch (ParseException e) {
throw new RuntimeException(e);
}
} |
/**
* This class handles all the functions related to an Institute.
*/
@Service
@Transactional
public class InstituteService implements UserDetailsService {
private InstituteRepository instituteRepository;
private InstituteClassRepository instituteClassRepository;
private StudentRepository studentRepository;
@Autowired
public InstituteService(InstituteRepository instituteRepository, InstituteClassRepository instituteClassRepository, StudentRepository studentRepository) {
this.instituteRepository = instituteRepository;
this.instituteClassRepository = instituteClassRepository;
this.studentRepository = studentRepository;
}
@Override
public UserDetails loadUserByUsername(String s) throws UsernameNotFoundException {
if(s.equals("[email protected]")){ // Super user
return new User("[email protected]", "superuser@osbs123", Collections.emptyList());
}
Institute instituteByEmail = instituteRepository.findInstituteByEmail(s);
return new User(instituteByEmail.getEmail(), new String(instituteByEmail.getPassword()), Collections.emptyList());
}
public List<Institute> getAllInstitutes() {
return instituteRepository.findAll();
}
public Institute registerInstitute(String email, char[] password, String address, String contact) {
return instituteRepository.save(new Institute(email, password, address, contact));
}
public Optional<Institute> getInstituteById(long instituteId) {
return instituteRepository.findById(instituteId);
}
public InstituteClass createClass(String subject, String day, String time, String lectureHall, String instructor, BigDecimal monthlyFee, Institute institute){
return instituteClassRepository.save(new InstituteClass(subject, day, time, lectureHall, instructor, monthlyFee, institute));
}
/**
* This method calculates the monthly bill for a student
* @param instituteId - id of the Institute for which monthly bill is to be calculated for all classes undertaken by student at the institute
* @param studentId - id of the Student for which monthly bill is to be calculated for all classes at an institute undertaken by the student
* @return monthly bill
*/
public MonthlyBill calculateMonthlyBillForStudent(long instituteId, long studentId) throws Exception {
Student student = studentRepository.findById(studentId).orElseThrow(() -> new Exception(String.format("Student not found for ID: %s.", studentId)));
Set<InstituteClass> enrolledClassesAtSpecificInstitute = student.getRegisteredClasses().stream().filter(instituteClass -> instituteClass.getInstitute().getInstituteId() == instituteId).collect(Collectors.toSet());
Map<String, BigDecimal> feeBreakdown = new HashMap<>();
BigDecimal grandTotal = BigDecimal.ZERO;
for (InstituteClass enrolledClass : enrolledClassesAtSpecificInstitute) {
feeBreakdown.put(enrolledClass.getSubject(), enrolledClass.getMonthlyFee()); // Add entry to monthly bill with subject of class and monthly fee
grandTotal = grandTotal.add(enrolledClass.getMonthlyFee()); // Add monthly fee of the class to the grand total
}
return new MonthlyBill(studentId, student.getName(), feeBreakdown, grandTotal);
}
public Set<InstituteClass> getAllClassesByInstitute(long instituteId) {
return instituteClassRepository.getAllByInstituteInstituteId(instituteId);
}
public Institute getInstituteByEmail(String email) {
return instituteRepository.findInstituteByEmail(email);
}
} |
#include <cstdio>
#include <algorithm>
#include <set>
#include <utility>
#include <vector>
#include <cmath>
using namespace std;
const int CMax = 100002;
int n;
long long p, A[CMax][2], B[CMax], C[CMax], D[CMax];
double ats = 0;
int main(){
scanf("%d %I64d", &n, &p);
for (int i = 0; i < n; i++){
scanf("%I64d %I64d", &A[i][0], &A[i][1]);
B[i] = (A[i][1] / p) - (A[i][0] / p);
if (A[i][0] % p == 0) B[i]++;
C[i] = A[i][1] - A[i][0] + 1;
if (i > 0){
D[i-1] = (B[i-1] * C[i]) + ((C[i-1] - B[i-1]) * B[i]);
ats += (double)D[i-1] / (double)(C[i-1] * C[i]);
}
}
D[n-1] = (B[n-1] * C[0]) + ((C[n-1] - B[n-1]) * B[0]);
ats += (double)D[n-1] / (double)(C[n-1] * C[0]);
printf("%.8lf\n", ats*(double)2000);
}
|
Blockchain Based E-Voting System: Open Issues and Challenges
Blockchain technology has become very trendy and penetrated different domains, mostly due to the popularity of cryptocurrencies. Blockchain technology offers decentralized nodes for e-voting and is used to create e-voting systems, mainly because of their end-to-end verification benefits. This technology is an excellent replacement for traditional e-voting solutions with distributed performance, reliability and security. The following article provides an overview of e-voting systems based on blockchain technology. The main purpose of this analysis was to examine the current state of blockchain-based voting systems, as well as any associated difficulties in predicting future events. |
use ::reqwest::blocking::Client;
use clap::{App, Arg};
use console::Term;
use const_format::formatcp;
use log::*;
use std::path::Path;
mod api;
mod app;
mod branch_protection_rules;
mod config;
mod context;
mod models;
mod options;
mod repository_settings;
mod topic_operation;
const PKG_NAME: &str = env!("CARGO_PKG_NAME");
const PKG_VERSION: &str = env!("CARGO_PKG_VERSION");
const USER_AGENT: &str = formatcp!("{}/{}", PKG_NAME, PKG_VERSION);
fn main() -> Result<(), anyhow::Error> {
let terminal = Term::stdout();
let matches = App::new(clap::crate_name!())
.version(clap::crate_version!())
.author(clap::crate_authors!())
.about(clap::crate_description!())
.arg(
Arg::new("config")
.short('c')
.long("config")
.value_name("FILE")
.help("Path to config file to use")
.default_value("config.json")
.takes_value(true),
)
.arg(
Arg::new("v")
.short('v')
.multiple_occurrences(true)
.help("Sets the level of verbosity"),
)
.arg(
Arg::new("fix")
.long("fix")
.help("Try to fix the issues found"),
)
.arg(
Arg::new("repo")
.long("repo")
.multiple_occurrences(true)
.help("Target GitHub repository")
.takes_value(true),
)
.arg(
Arg::new("user")
.long("user")
.multiple_occurrences(true)
.help("Target GitHub user")
.takes_value(true),
)
.arg(
Arg::new("organization")
.long("organization")
.alias("org")
.multiple_occurrences(true)
.help("Target GitHub organization")
.takes_value(true),
)
.get_matches();
let log_level = match matches.occurrences_of("v") {
0 => log::LevelFilter::Info,
1 => log::LevelFilter::Debug,
_ => log::LevelFilter::Trace,
};
env_logger::Builder::new()
.format_timestamp(None)
.format_target(false)
.filter_module(module_path!(), log_level)
.init();
let repos: Vec<&str> = matches.values_of("repo").unwrap_or_default().collect();
let users: Vec<&str> = matches.values_of("user").unwrap_or_default().collect();
let organizations: Vec<&str> = matches
.values_of("organization")
.unwrap_or_default()
.collect();
let config_path = Path::new(matches.value_of("config").expect("value MUST be set"));
let config = config::load_config(config_path)?;
let github_api_token: String = match std::env::var("GITHUB_API_TOKEN") {
Ok(s) => s,
Err(_) => return Err(anyhow::anyhow!("xD")),
};
info!("Github API root: {:?}", config.github_api_root);
let client = Client::builder()
.user_agent(USER_AGENT)
.default_headers(
std::iter::once((
reqwest::header::AUTHORIZATION,
reqwest::header::HeaderValue::from_str(&format!("Bearer {}", github_api_token))?,
))
.collect(),
)
.build()?;
let api_client = api::new(client, config.github_api_root.as_str())?;
let options = options::Options {
dry_run: !matches.is_present("fix"),
dry_run_bpr: !matches.is_present("fix"),
};
let ctx = context::Context {
config,
terminal,
api_client,
options,
};
// info!("Config: {:?}", ctx.config.topics);
app::run(ctx, repos, users, organizations)?;
// terminal.write_line("")?;
Ok(())
}
|
<gh_stars>1-10
/* vi: set sw=4 ts=4: */
/*
* getopt.c - Enhanced implementation of BSD getopt(1)
* Copyright (c) 1997, 1998, 1999, 2000 <NAME> <<EMAIL>>
*
* Licensed under GPLv2 or later, see file LICENSE in this source tree.
*/
/*
* Version 1.0-b4: Tue Sep 23 1997. First public release.
* Version 1.0: Wed Nov 19 1997.
* Bumped up the version number to 1.0
* Fixed minor typo (CSH instead of TCSH)
* Version 1.0.1: Tue Jun 3 1998
* Fixed sizeof instead of strlen bug
* Bumped up the version number to 1.0.1
* Version 1.0.2: Thu Jun 11 1998 (not present)
* Fixed gcc-2.8.1 warnings
* Fixed --version/-V option (not present)
* Version 1.0.5: Tue Jun 22 1999
* Make -u option work (not present)
* Version 1.0.6: Tue Jun 27 2000
* No important changes
* Version 1.1.0: Tue Jun 30 2000
* Added NLS support (partly written by <NAME>
* <<EMAIL>>)
* Ported to Busybox - <NAME> <<EMAIL>>
* Removed --version/-V and --help/-h
* Removed parse_error(), using bb_error_msg() from Busybox instead
* Replaced our_malloc with xmalloc and our_realloc with xrealloc
*
*/
//usage:#define getopt_trivial_usage
//usage: "[OPTIONS] [--] OPTSTRING PARAMS"
//usage:#define getopt_full_usage "\n\n"
//usage: IF_LONG_OPTS(
//usage: " -a,--alternative Allow long options starting with single -"
//usage: "\n -l,--longoptions=LOPT[,...] Long options to be recognized"
//usage: "\n -n,--name=PROGNAME The name under which errors are reported"
//usage: "\n -o,--options=OPTSTRING Short options to be recognized"
//usage: "\n -q,--quiet Disable error reporting by getopt(3)"
//usage: "\n -Q,--quiet-output No normal output"
//usage: "\n -s,--shell=SHELL Set shell quoting conventions"
//usage: "\n -T,--test Test for getopt(1) version"
//usage: "\n -u,--unquoted Don't quote the output"
//usage: )
//usage: IF_NOT_LONG_OPTS(
//usage: " -a Allow long options starting with single -"
//usage: "\n -l LOPT[,...] Long options to be recognized"
//usage: "\n -n PROGNAME The name under which errors are reported"
//usage: "\n -o OPTSTRING Short options to be recognized"
//usage: "\n -q Disable error reporting by getopt(3)"
//usage: "\n -Q No normal output"
//usage: "\n -s SHELL Set shell quoting conventions"
//usage: "\n -T Test for getopt(1) version"
//usage: "\n -u Don't quote the output"
//usage: )
//usage: "\n"
//usage: "\nExample:"
//usage: "\n"
//usage: "\nO=`getopt -l bb: -- ab:c:: \"$@\"` || exit 1"
//usage: "\neval set -- \"$O\""
//usage: "\nwhile true; do"
//usage: "\n case \"$1\" in"
//usage: "\n -a) echo A; shift;;"
//usage: "\n -b|--bb) echo \"B:'$2'\"; shift 2;;"
//usage: "\n -c) case \"$2\" in"
//usage: "\n \"\") echo C; shift 2;;"
//usage: "\n *) echo \"C:'$2'\"; shift 2;;"
//usage: "\n esac;;"
//usage: "\n --) shift; break;;"
//usage: "\n *) echo Error; exit 1;;"
//usage: "\n esac"
//usage: "\ndone"
//usage:
//usage:#define getopt_example_usage
//usage: "$ cat getopt.test\n"
//usage: "#!/bin/sh\n"
//usage: "GETOPT=`getopt -o ab:c:: --long a-long,b-long:,c-long:: \\\n"
//usage: " -n 'example.busybox' -- \"$@\"`\n"
//usage: "if [ $? != 0 ]; then exit 1; fi\n"
//usage: "eval set -- \"$GETOPT\"\n"
//usage: "while true; do\n"
//usage: " case $1 in\n"
//usage: " -a|--a-long) echo \"Option a\"; shift;;\n"
//usage: " -b|--b-long) echo \"Option b, argument '$2'\"; shift 2;;\n"
//usage: " -c|--c-long)\n"
//usage: " case \"$2\" in\n"
//usage: " \"\") echo \"Option c, no argument\"; shift 2;;\n"
//usage: " *) echo \"Option c, argument '$2'\"; shift 2;;\n"
//usage: " esac;;\n"
//usage: " --) shift; break;;\n"
//usage: " *) echo \"Internal error!\"; exit 1;;\n"
//usage: " esac\n"
//usage: "done\n"
#if ENABLE_FEATURE_GETOPT_LONG
# include <getopt.h>
#endif
#include "libbb.h"
/* NON_OPT is the code that is returned when a non-option is found in '+'
mode */
enum {
NON_OPT = 1,
#if ENABLE_FEATURE_GETOPT_LONG
/* LONG_OPT is the code that is returned when a long option is found. */
LONG_OPT = 2
#endif
};
/* For finding activated option flags. Must match getopt32 call! */
enum {
OPT_o = 0x1, // -o
OPT_n = 0x2, // -n
OPT_q = 0x4, // -q
OPT_Q = 0x8, // -Q
OPT_s = 0x10, // -s
OPT_T = 0x20, // -T
OPT_u = 0x40, // -u
#if ENABLE_FEATURE_GETOPT_LONG
OPT_a = 0x80, // -a
OPT_l = 0x100, // -l
#endif
SHELL_IS_TCSH = 0x8000, /* hijack this bit for other purposes */
};
/* 0 is getopt_long, 1 is getopt_long_only */
#define alternative (option_mask32 & OPT_a)
#define quiet_errors (option_mask32 & OPT_q)
#define quiet_output (option_mask32 & OPT_Q)
#define quote (!(option_mask32 & OPT_u))
#define shell_TCSH (option_mask32 & SHELL_IS_TCSH)
/*
* This function 'normalizes' a single argument: it puts single quotes around
* it and escapes other special characters. If quote is false, it just
* returns its argument.
* Bash only needs special treatment for single quotes; tcsh also recognizes
* exclamation marks within single quotes, and nukes whitespace.
* This function returns a pointer to a buffer that is overwritten by
* each call.
*/
static const char *normalize(const char *arg)
{
char *bufptr;
#if ENABLE_FEATURE_CLEAN_UP
static char *BUFFER = NULL;
free(BUFFER);
#else
char *BUFFER;
#endif
if (!quote) { /* Just copy arg */
BUFFER = xstrdup(arg);
return BUFFER;
}
/* Each character in arg may take up to four characters in the result:
For a quote we need a closing quote, a backslash, a quote and an
opening quote! We need also the global opening and closing quote,
and one extra character for '\0'. */
BUFFER = xmalloc(strlen(arg)*4 + 3);
bufptr = BUFFER;
*bufptr ++= '\'';
while (*arg) {
if (*arg == '\'') {
/* Quote: replace it with: '\'' */
*bufptr ++= '\'';
*bufptr ++= '\\';
*bufptr ++= '\'';
*bufptr ++= '\'';
} else if (shell_TCSH && *arg == '!') {
/* Exclamation mark: replace it with: \! */
*bufptr ++= '\'';
*bufptr ++= '\\';
*bufptr ++= '!';
*bufptr ++= '\'';
} else if (shell_TCSH && *arg == '\n') {
/* Newline: replace it with: \n */
*bufptr ++= '\\';
*bufptr ++= 'n';
} else if (shell_TCSH && isspace(*arg)) {
/* Non-newline whitespace: replace it with \<ws> */
*bufptr ++= '\'';
*bufptr ++= '\\';
*bufptr ++= *arg;
*bufptr ++= '\'';
} else
/* Just copy */
*bufptr ++= *arg;
arg++;
}
*bufptr ++= '\'';
*bufptr ++= '\0';
return BUFFER;
}
/*
* Generate the output. argv[0] is the program name (used for reporting errors).
* argv[1..] contains the options to be parsed. argc must be the number of
* elements in argv (ie. 1 if there are no options, only the program name),
* optstr must contain the short options, and longopts the long options.
* Other settings are found in global variables.
*/
#if !ENABLE_FEATURE_GETOPT_LONG
#define generate_output(argv,argc,optstr,longopts) \
generate_output(argv,argc,optstr)
#endif
static int generate_output(char **argv, int argc, const char *optstr, const struct option *longopts)
{
int exit_code = 0; /* We assume everything will be OK */
int opt;
#if ENABLE_FEATURE_GETOPT_LONG
int longindex;
#endif
const char *charptr;
if (quiet_errors) /* No error reporting from getopt(3) */
opterr = 0;
/* We used it already in main() in getopt32(),
* we *must* reset getopt(3): */
#ifdef __GLIBC__
optind = 0;
#else /* BSD style */
optind = 1;
/* optreset = 1; */
#endif
while (1) {
opt =
#if ENABLE_FEATURE_GETOPT_LONG
alternative ?
getopt_long_only(argc, argv, optstr, longopts, &longindex) :
getopt_long(argc, argv, optstr, longopts, &longindex);
#else
getopt(argc, argv, optstr);
#endif
if (opt == -1)
break;
if (opt == '?' || opt == ':' )
exit_code = 1;
else if (!quiet_output) {
#if ENABLE_FEATURE_GETOPT_LONG
if (opt == LONG_OPT) {
printf(" --%s", longopts[longindex].name);
if (longopts[longindex].has_arg)
printf(" %s",
normalize(optarg ? optarg : ""));
} else
#endif
if (opt == NON_OPT)
printf(" %s", normalize(optarg));
else {
printf(" -%c", opt);
charptr = strchr(optstr, opt);
if (charptr != NULL && *++charptr == ':')
printf(" %s",
normalize(optarg ? optarg : ""));
}
}
}
if (!quiet_output) {
printf(" --");
while (optind < argc)
printf(" %s", normalize(argv[optind++]));
bb_putchar('\n');
}
return exit_code;
}
#if ENABLE_FEATURE_GETOPT_LONG
/*
* Register several long options. options is a string of long options,
* separated by commas or whitespace.
* This nukes options!
*/
static struct option *add_long_options(struct option *long_options, char *options)
{
int long_nr = 0;
int arg_opt, tlen;
char *tokptr = strtok(options, ", \t\n");
if (long_options)
while (long_options[long_nr].name)
long_nr++;
while (tokptr) {
arg_opt = no_argument;
tlen = strlen(tokptr);
if (tlen) {
tlen--;
if (tokptr[tlen] == ':') {
arg_opt = required_argument;
if (tlen && tokptr[tlen-1] == ':') {
tlen--;
arg_opt = optional_argument;
}
tokptr[tlen] = '\0';
if (tlen == 0)
bb_error_msg_and_die("empty long option specified");
}
long_options = xrealloc_vector(long_options, 4, long_nr);
long_options[long_nr].has_arg = arg_opt;
/*long_options[long_nr].flag = NULL; - xrealloc_vector did it */
long_options[long_nr].val = LONG_OPT;
long_options[long_nr].name = xstrdup(tokptr);
long_nr++;
/*memset(&long_options[long_nr], 0, sizeof(long_options[0])); - xrealloc_vector did it */
}
tokptr = strtok(NULL, ", \t\n");
}
return long_options;
}
#endif
static void set_shell(const char *new_shell)
{
if (!strcmp(new_shell, "bash") || !strcmp(new_shell, "sh"))
return;
if (!strcmp(new_shell, "tcsh") || !strcmp(new_shell, "csh"))
option_mask32 |= SHELL_IS_TCSH;
else
bb_error_msg("unknown shell '%s', assuming bash", new_shell);
}
/* Exit codes:
* 0) No errors, successful operation.
* 1) getopt(3) returned an error.
* 2) A problem with parameter parsing for getopt(1).
* 3) Internal error, out of memory
* 4) Returned for -T
*/
#if ENABLE_FEATURE_GETOPT_LONG
static const char getopt_longopts[] ALIGN1 =
"options\0" Required_argument "o"
"longoptions\0" Required_argument "l"
"quiet\0" No_argument "q"
"quiet-output\0" No_argument "Q"
"shell\0" Required_argument "s"
"test\0" No_argument "T"
"unquoted\0" No_argument "u"
"alternative\0" No_argument "a"
"name\0" Required_argument "n"
;
#endif
int getopt_main(int argc, char **argv) MAIN_EXTERNALLY_VISIBLE;
int getopt_main(int argc, char **argv)
{
int n;
char *optstr = NULL;
char *name = NULL;
unsigned opt;
const char *compatible;
char *s_arg;
#if ENABLE_FEATURE_GETOPT_LONG
struct option *long_options = NULL;
llist_t *l_arg = NULL;
#endif
compatible = getenv("GETOPT_COMPATIBLE"); /* used as yes/no flag */
if (!argv[1]) {
if (compatible) {
/* For some reason, the original getopt gave no error
when there were no arguments. */
printf(" --\n");
return 0;
}
bb_error_msg_and_die("missing optstring argument");
}
if (argv[1][0] != '-' || compatible) {
char *s = argv[1];
option_mask32 |= OPT_u; /* quoting off */
s = xstrdup(s + strspn(s, "-+"));
argv[1] = argv[0];
return generate_output(argv+1, argc-1, s, long_options);
}
#if !ENABLE_FEATURE_GETOPT_LONG
opt = getopt32(argv, "+o:n:qQs:Tu", &optstr, &name, &s_arg);
#else
applet_long_options = getopt_longopts;
opt_complementary = "l::";
opt = getopt32(argv, "+o:n:qQs:Tual:",
&optstr, &name, &s_arg, &l_arg);
/* Effectuate the read options for the applet itself */
while (l_arg) {
long_options = add_long_options(long_options, llist_pop(&l_arg));
}
#endif
if (opt & OPT_s) {
set_shell(s_arg);
}
if (opt & OPT_T) {
return 4;
}
/* All options controlling the applet have now been parsed */
n = optind - 1;
if (!optstr) {
optstr = argv[++n];
if (!optstr)
bb_error_msg_and_die("missing optstring argument");
}
argv[n] = name ? name : argv[0];
return generate_output(argv + n, argc - n, optstr, long_options);
}
|
Doctors of Indian origin living overseas who want to practice, research or teach in India can now get work permits within a fortnight.
“Overseas doctors willing to contribute to India’s healthcare can now apply online … the application will be processed within 15 days, including permission to practice from the Medical Council of India (MCI),” health minister Dr Harsh Vardhan, who recently came back from a week-long trip to the US, told HT.
“The idea is to tap existing talent,” said Vardhan, who briefed PM Narendra Modi about the plan over the weekend. “PM Modi is keen to encourage skills transfer so that all parts of India get medical care at par with international levels,” said the minister.
The news was also welcomed by the American Association of Physicians of Indian Origin (AAPI).
AAPI represents around 100,000 physicians, including medical students, working in the US. “There’s a lot of interest in working in India,” said AAPI ex-president Dr Jayesh Shah.
“AAPI has been requesting India to simplify the red tape for four years,” Shah told HT.
“We’re preparing files of every physician member on when, where, how many days and what they would like to work for in India,” Shah said. The work permits could be for a time period of a few weeks to a few years.
Looking forward, Vardhan said: “Now that we have AAPI on board, the next step is to invite Indian doctors working for or retired from the UK’s National Health Service.” In April this year, NHS recruited 50 doctors from India to fill its staffing gaps.
MCI president Jayshree Mehta informed that section 14(1) of the MCI act provides for such a step, “This way, we can get medical talent to India and provide international exposure to medical students”.
During his trip to the US, Vardhan also met the director of the Centers of Disease Control to discuss knowledge-sharing on various diseases and diagnostic tests.
First Published: Jul 08, 2014 01:03 IST |
Towards risk knowledge management in unmanned aerial vehicles applications development
UAV (Unmanned Aerial Vehicle) applications development projects are complex due to the high level of details involved in the development stages and the different components that must be integrated within the same project. This paper discusses the role of knowledge management (KM) in reducing the risks in applications development projects based on current approaches in similar environments. This paper addresses at the beginning the challenges that faces UAV applications development. Then, the paper elaborates on the role of knowledge management in UAV application development and other similar environments with focusing on current approach. In addition, the paper illustrates a proposed knowledge management framework to reduce the risks in UAV applications development projects. |
<reponame>kun-shang/GRACE-Gravity-Inversion<gh_stars>0
/*
------------------------------------------------------------------------
Purpose: some subroutines of matrix and vector operation
Notes:
Programmer: <NAME> @ 4.29.2014
Functions:
void mgrns(double u, double g, double *r,int n,double a[]);
double mgrn1(double u, double g, double *r);
double mean (double * array, double N);
double std_dev (double * array, double N);
*/
#ifndef _NUMRS_H_
#include "numrs.h"
#endif
double mean (double * array, double N)
{
int i;
double sum = 0 ;
for (i = 0; i < N; i++)
sum = sum + array [i];
return sum/N;
} // function calculating mean
double std_dev (double * array, double N)
{
int i;
double sum = 0;
double STD_DEV = 0; // returning zero's
for (i = 0; i < N; i++)
{
sum = sum + array [i];
STD_DEV = STD_DEV + pow(array [i], 2);
}
return sqrt ((STD_DEV/N) - (pow(sum/N,2)));
} // function calculating standard deviation
void mgrns(double u, double g, double *r,int n,double a[])
// int n;
// double u,g,*r,a[];
{ int i,k,m;
double s,w,v,t;
s=65536.0; w=2053.0; v=13849.0;
for (k=0; k<=n-1; k++)
{ t=0.0;
for (i=1; i<=12; i++)
{ *r=(*r)*w+v; m=(int)(*r/s);
*r=*r-m*s; t=t+(*r)/s;
}
a[k]=u+g*(t-6.0);
}
return;
}
double mgrn1(double u, double g, double *r)
// double u,g,*r;
{ int i,m;
double s,w,v,t;
s=65536.0; w=2053.0; v=13849.0;
t=0.0;
for (i=1; i<=12; i++)
{ *r=(*r)*w+v; m=(int)(*r/s);
*r=*r-m*s; t=t+(*r)/s;
}
t=u+g*(t-6.0);
return(t);
}
|
First, I cut down some plastic Suja juice bottles, which I thought had an interesting shape. Since I want water to be able to easily drain out of the planters, I glued a piece of a drinking straw to the bottom of the bottle.
For my inner shell, I used an old film container, but you could also use an empty cosmetics bottle, or whatever you have around. I glued the inner shell to the straw, and some chopsticks to both shells to keep the inner shell centered.
I made one at a larger scale, with the juice bottle as the inner shell. I can't remember where the harder plastic container came from, but it was time to clean the studio and get rid of unnecessary stuff. Plucking things from a junk drawer to make them useful one last time before recycling is one of my favorite ways to declutter (albeit slow).
I had a can of mold release kicking around so I gave them all a quick spray. This step is optional and I'm not sure it helped in the end, but typically you can substitute a vegetable oil cooking spray for mold release if you really want to experiment (though the oil may stain the concrete). |
<filename>go/concurrency/3.fan_in_and_out/2.fan_in_out/main.go<gh_stars>0
package main
import (
"fmt"
"sync"
)
func GetPipeLine(left, right int) <-chan int {
c := make(chan int)
go func() {
for i := left; i < left+right; i++ {
c <- i
}
close(c)
}()
return c
}
func filterOdd(in int) (int, bool) {
if in%2 != 0 {
return in, false
}
return in, true
}
func square(in int) (int, bool) {
return in * in, true
}
func Event(id string, num int, f func(int) (int, bool), in <-chan int) <-chan int {
var fanOutResult []chan int
fanOutChan := make(chan int)
// send message to several channel
for i := 0; i < num; i++ {
out := make(chan int)
go func(i int) {
fmt.Printf("task:[%s], run goroutine num:%v\n", id, i)
for v := range in {
r, ok := f(v)
if ok {
out <- r
}
}
close(out)
fmt.Printf("task:[%s],run goroutine num:%v, done\n", id, i)
}(i)
fanOutResult = append(fanOutResult, out)
}
go func() {
var wg sync.WaitGroup
// fan out channal collected all fan-in channel data
for _, v := range fanOutResult {
wg.Add(1)
go func(out <-chan int) {
defer wg.Done()
for v := range out {
fanOutChan <- v
}
}(v)
}
wg.Wait()
close(fanOutChan)
}()
return fanOutChan
}
func main() {
in := GetPipeLine(1, 40000)
out := Event("task1", 3, square, Event("task1", 2, filterOdd, in))
// time.Sleep(5 * time.Second)
for v := range out {
fmt.Printf("v=%v\n", v)
}
}
|
/**
* This class exists to provide methods that Javascript in a WebView can make
* callbacks on. Currently this class is attached using
* addJavascriptInterface(one of these, "ParentProxy"); as a result, the
* JavaScript can call the @JavascriptInterface methods of this class as if they
* were methods of a global object called ParentProxy, e.g.,
* ParentProxy.postMessage("some json"). Or in typescript, since I haven't
* figured out a way to declare that this global exists and has these methods,
* we defeat type-checking with (window as any).ParentProxy.postMessage("some
* json").
*/
public class WebAppInterface {
// The reader activity that created this class to serve a particular web view.
private ReaderActivity mContext;
WebAppInterface(ReaderActivity c) {
mContext = c;
}
// This can be helpful in debugging. It's not currently used in production.
@JavascriptInterface
public void showToast(String toast) {
Toast.makeText(mContext, toast, Toast.LENGTH_SHORT).show();
}
// Receives messages 'posted' by the javascript in the webview.
// Unfortunately, can't find a way to configure things so that
// window.postMessage will call this.
// Instead, we set things up as described in the class comment.
@JavascriptInterface
public void receiveMessage(String message) {
mContext.receiveMessage(message);
}
} |
// Converts a duplex mode to its Mojo type.
mojom::PrintDuplexMode ToArcDuplexMode(int duplex_mode) {
printing::mojom::DuplexMode mode =
static_cast<printing::mojom::DuplexMode>(duplex_mode);
switch (mode) {
case printing::mojom::DuplexMode::kLongEdge:
return mojom::PrintDuplexMode::LONG_EDGE;
case printing::mojom::DuplexMode::kShortEdge:
return mojom::PrintDuplexMode::SHORT_EDGE;
default:
return mojom::PrintDuplexMode::NONE;
}
} |
import {
_getWordRandomc_,
_getWordLowerc_,
_getWordUpperc_,
_getNumber_,
_getSpecial_,
} from "../lib/chart";
import { _shuffle_ } from "../lib/permuter";
interface options {
length: number;
lowercase?: boolean;
uppercase?: boolean;
number?: boolean;
special?: boolean;
}
enum sizes {
min = 0,
max = 2048,
}
/**
* Build a random password
* @param {Object} options Customizable options
* @param {Number} options.length String length
* @param {Boolean} [options.lowercase] At least one lowercase
* @param {Boolean} [options.uppercase] At least one uppercase
* @param {Boolean} [options.number] At least one number
* @returns {String} Password generated
* @example Just lowercase password
* // returns abcde
* build({length:5, lowercase:true});
* @example Just number pin
* // returns 9854
* build({length:4, number:true});
* @example Extreme password
* // returns <PASSWORD>[|<PASSWORD>
* build({length:32, lowercase:true, uppercase:true, number:true, special:true});
*/
const build = (options: options): string => {
const _options = options;
return _validations(_options, () => _logic(_options));
};
/**
* Validate Customizable options
* @param {Opject} _options Customizable options
* @param {Function} _next Callback
* @returns {String}
*/
const _validations = (_options: options, _next: any): string => {
if (
typeof _options.length !== "number" ||
_options.length < sizes.min ||
_options.length > sizes.max
) {
throw new RangeError(
`"length" is not a valid number, it must be between ${sizes.min} and ${sizes.max}`
);
}
return _next();
};
/**
* Logic for build password
* @param {Opject} _options Customizable options
* @returns {String}
*/
const _logic = (_options: options | any): string => {
let _passwsordGene: string = "";
const _keysOptionsNL = Object.keys(_options).filter(
(_key: string, _index: number) => _options[`${_key}`] && _key !== "length"
); // keys option, no length
const _lengthKeys = _keysOptionsNL.length;
const _lengthPass = _options.length;
// Build to know how many times per chart
let _quantitiesChart = []; // { _opt -> Option Name, _qtt -> Quantity }
_quantitiesChart = _keysOptionsNL
.map((_key) => ({ _opt: _key, _qtt: 1 }))
.splice(0, _lengthPass);
if (_lengthKeys < _lengthPass) {
_quantitiesChart = _keysOptionsNL
.map((_key) => ({ _opt: _key, _qtt: 1 }))
.splice(0, _lengthPass);
const _add = Math.floor(_lengthPass / _lengthKeys); // To know how many has to have every _qtt
const _res = _lengthPass % _lengthKeys; // To know how many has to add for residue
// Adding equals charts
for (let _qC of _quantitiesChart) {
_qC._qtt += _add - 1;
}
// Adding residue anywhere
if (_res) {
for (let _i = 0; _i < _res; _i++) {
let _index = Math.floor(Math.random() * (_lengthKeys - 1)) + 1;
_index -= 1;
_quantitiesChart[_index]._qtt += 1;
}
}
}
for (let _qC of _quantitiesChart) {
for (let _i = 0; _i < _qC._qtt; _i++) {
switch (_qC._opt) {
case "lowercase": {
let _randomIndex = Math.floor(Math.random() * (25 - 0)) + 1;
_passwsordGene += _getWordLowerc_(_randomIndex);
break;
}
case "uppercase": {
let _randomIndex = Math.floor(Math.random() * (25 - 0)) + 1;
_passwsordGene += _getWordUpperc_(_randomIndex);
break;
}
case "number": {
let _randomIndex = Math.floor(Math.random() * (100 - 0)) + 1;
_randomIndex = Math.round(_randomIndex / 10);
_passwsordGene += _getNumber_(_randomIndex);
break;
}
case "special": {
let _randomIndex = Math.floor(Math.random() * (5 - 0)) + 1;
_passwsordGene += _getSpecial_(_randomIndex);
break;
}
}
}
}
_passwsordGene = _shuffle_(_passwsordGene);
return _passwsordGene;
};
export default { build };
|
Residential Experiences and the Culture of Suburbanization: A Case Study of Portuguese Homebuyers in Mississauga
This paper examines the housing experiences of immigrants to Canada through a survey of first-generation Portuguese homebuyers in Mississauga, a suburb of Toronto. The survey focused on the push/pull factors leading to their decision to live in the suburbs, their housing search strategies, and their use of services in Mississauga and in the initial area of Portuguese immigrant settlement in downtown Toronto. This study uses data from a questionnaire administered to 110 Portuguese homebuyers in 1990, shortly after their first move to suburban Mississauga; a sample of those respondents were re-interviewed in 2003. The evidence indicates that these immigrants were ‘pulled’ into relocating to Mississauga because of their desire to live in a single-family dwelling in a good neighbourhood. Their housing search relied extensively on ethnic sources of information, particularly Portuguese real estate agents. In general, this group of immigrants expressed satisfaction with their move. The Portuguese community in Mississauga is characterized by a form of voluntary segregation, which seems to be partly a result of their reliance on their own ethnic community for information, language barriers to participating in non-Portuguese activities, and a cultural preference for living near people of the same ethnic background. One consequence of this re-segregation process, by which Portuguese people recreate a Portuguese ‘homeland’ in the suburbs, has been the limitation of their social contacts with members of other ethnic communities that have also settled in suburban Mississauga. |
/**
* Created by allenliu on 2017/8/31.
*/
public class AllenHttp {
private static OkHttpClient client;
public static OkHttpClient getHttpClient() {
if (client == null)
client = new OkHttpClient();
return client;
}
private static <T extends Request.Builder> T assembleHeader(T builder, VersionParams versionParams) {
com.allenliu.versionchecklib.core.http.HttpHeaders headers = versionParams.getHttpHeaders();
if (headers != null) {
ALog.e("header:");
for (Map.Entry<String, String> stringStringEntry : headers.entrySet()) {
String key = stringStringEntry.getKey();
String value = stringStringEntry.getValue();
ALog.e(key+"="+value+"\n");
builder.addHeader(key, value);
}
}
return builder;
}
private static String assembleUrl(String url, HttpParams params) {
StringBuffer urlBuilder = new StringBuffer(url);
if (params != null) {
urlBuilder.append("?");
for (Map.Entry<String, Object> stringObjectEntry : params.entrySet()) {
String key = stringObjectEntry.getKey();
String value = stringObjectEntry.getValue() + "";
urlBuilder.append(key).append("=").append(value).append("&");
}
url = urlBuilder.substring(0, urlBuilder.length() - 1);
}
ALog.e("url:"+url);
return url;
}
public static Request.Builder get(VersionParams versionParams) {
Request.Builder builder = new Request.Builder();
builder = assembleHeader(builder, versionParams);
builder.url(assembleUrl(versionParams.getRequestUrl(), versionParams.getRequestParams()));
return builder;
}
public static Request.Builder post(VersionParams versionParams) {
FormBody formBody = getRequestParams(versionParams);
Request.Builder builder = new Request.Builder();
builder = assembleHeader(builder, versionParams);
builder.post(formBody).url(versionParams.getRequestUrl());
return builder;
}
public static Request.Builder postJson(VersionParams versionParams) {
MediaType JSON = MediaType.parse("application/json; charset=utf-8");
String json = getRequestParamsJson(versionParams.getRequestParams());
RequestBody body = RequestBody.create(JSON, json);
Request.Builder builder = new Request.Builder();
builder = assembleHeader(builder, versionParams);
builder.post(body).url(versionParams.getRequestUrl());
return builder;
}
private static FormBody getRequestParams(VersionParams versionParams) {
FormBody.Builder builder = new FormBody.Builder();
HttpParams params = versionParams.getRequestParams();
for (Map.Entry<String, Object> entry : params.entrySet()) {
builder.add(entry.getKey(), entry.getValue() + "");
ALog.e("params key:"+entry.getKey()+"-----value:"+entry.getValue());
}
return builder.build();
}
private static String getRequestParamsJson(HttpParams params) {
String json;
JSONObject jsonObject = new JSONObject();
for (Map.Entry<String, Object> entry : params.entrySet()) {
try {
jsonObject.put(entry.getKey(), entry.getValue());
} catch (JSONException e) {
e.printStackTrace();
}
}
json = jsonObject.toString();
ALog.e("json:"+json);
return json;
}
} |
//MoveTypes returns all moves that are valid in this game: all of the Moves
//that have been added via AddMove during initalization. Returns nil until
//game.SetUp() has been called.
func (g *GameManager) moveTypes() []*moveType {
if !g.initialized {
return nil
}
return g.moves
} |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.