content
stringlengths 10
4.9M
|
---|
/** Define a test hook for coordinating waiting. */
public class WaitTestHook<T> extends TestHookAdapter<T> {
/** Logger for this class. */
protected final Logger logger =
LoggerUtils.getLoggerFixedPrefix(getClass(), "Test");
/** Whether the hook is waiting. */
private boolean waiting = false;
/** Whether the hook should stop waiting. */
private boolean stopWaiting = false;
/**
* Creates a test hook that will cause {@link #awaitWaiting} to stop
* waiting when it starts waiting, and will itself stop waiting when {@link
* #stopWaiting()} is called.
*/
public WaitTestHook() { }
/**
* Assert that the test hook is called and begins waiting within the
* specified number of milliseconds.
*/
public synchronized void awaitWaiting(final long timeout)
throws InterruptedException {
final long start = System.currentTimeMillis();
while (!waiting && (start + timeout > System.currentTimeMillis())) {
wait(10000);
}
logger.info(this + ": Awaited waiting for " +
(System.currentTimeMillis() - start) + " milliseconds");
assertTrue(this + ": Should be waiting", waiting);
}
/**
* Tell the test hook to stop waiting, asserting that it has started
* waiting.
*/
public synchronized void stopWaiting() {
assertTrue(this + ": Should be waiting", waiting);
stopWaiting = true;
notifyAll();
logger.info(this + ": Stopped waiting");
}
/** Wait until {@link #stopWaiting()} is called. */
@Override
public synchronized void doHook() {
waiting = true;
notifyAll();
logger.info(this + ": Now waiting");
while (!stopWaiting) {
try {
wait(10000);
} catch (InterruptedException e) {
break;
}
}
}
/**
* Wait until {@link #stopWaiting()} is called, regardless of the argument.
*/
@Override
public void doHook(T obj) {
doHook();
}
} |
<reponame>shengwen1997/puyuma-core<filename>src/core/main.cpp<gh_stars>0
#include <cstdio>
#include <opencv2/opencv.hpp>
#include <raspicam/raspicam_cv.h>
#include "camera.hpp"
#include "motor.hpp"
#include "lane_detector.hpp"
#include "self_driving.hpp"
#include "intrinsic_calibration.hpp"
#include "extrinsic_calibration.hpp"
#include "color_calibration.hpp"
#define SAVE_RAW_IMG 0
using namespace cv;
cv::Mat camera_matrix, distort_coefficient;
bool color_calib = false;
void load_settings()
{
if(!load_intrinsic_calibration("./intrinsic.yaml", camera_matrix, distort_coefficient)) {
cout << "failed to load intrinsic parameters, please calibrate the "
"camera first.\n";
exit(0);
}
if(!load_extrinsic_calibration("./extrinsic.yaml")) {
cout << "failed to load extrinsic parameters, please calibrate the "
"camera first.\n";
exit(0);
}
if(!load_color_calibration("./color_calibration.yaml") && color_calib == false) {
cout << "failed to load color calibration settings, please calibrate the "
"lane mark color thresholding value first.\n";
exit(0);
}
load_motor_calibration("motor.yaml");
load_pid_param("pid.yaml");
}
void greeting(int argc, char **argv)
{
if(argc == 2) {
if(strcmp(argv[1], "run") == 0) {
cout << "activating self-driving system...\n";
} else {
goto help;
}
} else if(argc == 3 && (strcmp(argv[1], "-c") == 0)) {
if(strcmp(argv[2], "intrinsic") == 0) {
cout << "intrinsic calibration mode.\n";
intrinsic_calibration();
} else if(strcmp(argv[2], "extrinsic") == 0) {
extrinsic_calibration();
cout << "extrinsic calibration mode.\n";
} else if(strcmp(argv[2], "color") == 0) {
cout << "color thresholding calibraion mode.\n";
hsv_color_thresholding_calibration();
color_calib = true;
} else {
goto help;
}
} else {
help:
cout << "activate self-control system: ./puyuma run\n"
"calibrate intrinsic parameters: ./puyuma -c intrinsic\n"
"calibrate extrinsic parameters: ./puyuma -c extrinsic\n"
"calibrate color thresholding values: ./puyuma -c color\n";
exit(0);
}
}
int main(int argc, char **argv)
{
greeting(argc, argv);
load_settings();
motor_init();
raspicam::RaspiCam_Cv camera;
if(camera_setup(camera, IMAGE_WIDTH, IMAGE_HEIGHT) == false) {
cout << "failed to open the camera. - camera_setup()\n";
exit(0);
}
#if SAVE_RAW_IMG == 1
camera_saver_init("driver_record.avi", IMAGE_WIDTH, IMAGE_HEIGHT);
#endif
lane_estimator_init();
cv::Mat raw_image, undistort_image;
float d = 0, phi = 0;
//self-driving system main loop
while(1) {
camera.grab();
camera.retrieve(raw_image);
#if SAVE_RAW_IMG == 1
camera_save(raw_image);
#endif
/* image undistortion and rectifying */
cv::undistort(raw_image, undistort_image, camera_matrix, distort_coefficient);
bool get_pose = lane_estimate(undistort_image, d, phi);
if((get_pose == true) && (color_calib == false)) {
self_driving_control(d, phi);
} else {
halt_motor();
}
waitKey(1);
}
return 0;
}
__attribute__((destructor))void end()
{
halt_motor();
}
|
// Peek returns the next n bytes without advancing the reader. The bytes stop
// being valid at the next read call. If Peek returns fewer than n bytes, it
// also returns an error explaining why the read is short. The error is
// ErrBufferFull if n is larger than b's buffer size.
func (b *Reader) Peek(n int) ([]byte, error) {
if n < 0 {
return nil, ErrNegativeCount
}
for b.w-b.r < n && b.w-b.r < len(b.buf) && b.err == nil {
b.fill()
}
if n > len(b.buf) {
return b.buf[b.r:b.w], ErrBufferFull
}
var err error
if avail := b.w - b.r; avail < n {
n = avail
err = b.readErr()
if err == nil {
err = ErrBufferFull
}
}
return b.buf[b.r : b.r+n], err
} |
package graphql
import (
"bytes"
"fmt"
"io"
"os"
"path/filepath"
"reflect"
"strings"
"text/template"
"github.com/pkg/errors"
"golang.org/x/tools/imports"
"github.com/EGT-Ukraine/go2gql/generator/plugins/graphql/lib/importer"
)
type schemaGenerator struct {
tracerEnabled bool
schemaCfg SchemaConfig
goPkg string
parser *schemaParser
imports *importer.Importer
}
func (g schemaGenerator) importFunc(importPath string) func() string {
return func() string {
return g.imports.New(importPath)
}
}
func (g schemaGenerator) bodyTemplateContext() (interface{}, error) {
schemaObjects, err := g.parser.SchemaObjects()
if err != nil {
return nil, errors.Wrap(err, "failed to resolve objects to generate")
}
return SchemaBodyContext{
File: g.schemaCfg,
Importer: g.imports,
SchemaName: g.schemaCfg.Name,
QueryObject: schemaObjects.QueryObject,
MutationObject: schemaObjects.MutationObject,
Objects: schemaObjects.Objects,
Services: schemaObjects.Services,
TracerEnabled: g.tracerEnabled,
}, nil
}
func (g schemaGenerator) goTypeStr(typ GoType) string {
return typ.String(g.imports)
}
func (g schemaGenerator) goTypeForNew(typ GoType) string {
switch typ.Kind {
case reflect.Ptr:
return g.goTypeStr(*typ.ElemType)
case reflect.Struct:
return g.imports.Prefix(typ.Pkg) + typ.Name
}
panic("type " + typ.Kind.String() + " is not supported")
}
func (g schemaGenerator) bodyTemplateFuncs() map[string]interface{} {
return map[string]interface{}{
"ctxPkg": g.importFunc("context"),
"debugPkg": g.importFunc("runtime/debug"),
"fmtPkg": g.importFunc("fmt"),
"errorsPkg": g.importFunc(ErrorsPkgPath),
"gqlPkg": g.importFunc(GraphqlPkgPath),
"scalarsPkg": g.importFunc(ScalarsPkgPath),
"interceptorsPkg": g.importFunc(InterceptorsPkgPath),
"opentracingPkg": g.importFunc(OpentracingPkgPath),
"concat": func(st ...string) string {
return strings.Join(st, "")
},
"isArray": func(typ GoType) bool {
return typ.Kind == reflect.Slice
},
"goType": g.goTypeStr,
"goTypeForNew": g.goTypeForNew,
"serviceConstructor": func(filedType string, service SchemaService, ctx SchemaBodyContext) string {
return ctx.Importer.Prefix(service.Pkg) + "Get" + service.Name + "Service" + filedType + "Methods"
},
}
}
func (g schemaGenerator) headTemplateContext() map[string]interface{} {
return map[string]interface{}{
"imports": g.imports.Imports(),
"package": g.schemaCfg.OutputPackage,
}
}
func (g schemaGenerator) headTemplateFuncs() map[string]interface{} {
return nil
}
func (g schemaGenerator) generateBody() ([]byte, error) {
buf := new(bytes.Buffer)
tmpl, err := templatesSchemas_bodyGohtmlBytes()
if err != nil {
return nil, errors.Wrap(err, "failed to get head template")
}
bodyTpl, err := template.New("body").Funcs(g.bodyTemplateFuncs()).Parse(string(tmpl))
if err != nil {
return nil, errors.Wrap(err, "failed to parse template")
}
bodyCtx, err := g.bodyTemplateContext()
if err != nil {
return nil, errors.Wrap(err, "failed to prepare body context")
}
err = bodyTpl.Execute(buf, bodyCtx)
if err != nil {
return nil, errors.Wrap(err, "failed to execute template")
}
return buf.Bytes(), nil
}
func (g schemaGenerator) generateHead() ([]byte, error) {
buf := new(bytes.Buffer)
tmpl, err := templatesSchemas_headGohtmlBytes()
if err != nil {
return nil, errors.Wrap(err, "failed to get head template")
}
bodyTpl, err := template.New("head").Funcs(g.headTemplateFuncs()).Parse(string(tmpl))
if err != nil {
return nil, errors.Wrap(err, "failed to parse template")
}
err = bodyTpl.Execute(buf, g.headTemplateContext())
if err != nil {
return nil, errors.Wrap(err, "failed to execute template")
}
return buf.Bytes(), nil
}
func (g schemaGenerator) generate(out io.Writer) error {
body, err := g.generateBody()
if err != nil {
return errors.Wrap(err, "failed to generate body")
}
head, err := g.generateHead()
if err != nil {
return errors.Wrap(err, "failed to generate head")
}
r := bytes.Join([][]byte{
head,
body,
}, nil)
res, err := imports.Process("file", r, &imports.Options{
Comments: true,
})
// TODO: fix this
if err != nil {
fmt.Fprintln(os.Stderr, err.Error())
} else {
r = res
}
_, err = out.Write(r)
if err != nil {
return errors.Wrap(err, "failed to write output")
}
return nil
}
func (p *Plugin) generateSchemas() error {
for _, schema := range p.schemaConfigs {
pkg, err := GoPackageByPath(filepath.Dir(schema.OutputPath), p.generateCfg.VendorPath)
if err != nil {
return errors.Wrapf(err, "failed to resolve schema %s output go package", schema.Name)
}
parser := newSchemaParser(schema, p.files)
g := schemaGenerator{
parser: parser,
tracerEnabled: p.generateCfg.GenerateTraces,
schemaCfg: schema,
goPkg: pkg,
imports: &importer.Importer{
CurrentPackage: pkg,
},
}
file, err := os.OpenFile(schema.OutputPath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0666)
if err != nil {
return errors.Wrapf(err, "failed to open schema %s output file for write", schema.OutputPath)
}
err = g.generate(file)
if err != nil {
if cerr := file.Close(); cerr != nil {
err = errors.Wrap(err, cerr.Error())
}
return errors.Wrapf(err, "failed to generate types file %s", schema.OutputPath)
}
if file.Close(); err != nil {
return errors.Wrapf(err, "failed to close generated schema %s file", schema.OutputPath)
}
}
return nil
}
|
def add_view(self, view):
if view.name not in map(lambda v: v.name, self.views):
new_view = True
self.views.append(view)
for ID, targets in view.map.items():
for target in targets:
self.add_target(target)
return True
else:
return False |
<reponame>NicoleRauch/react-flux-enzyme
module React.Flux.Enzyme (module E) where
import React.Flux.Enzyme.ReactWrapper as E
import React.Flux.Enzyme.ShallowWrapper as E
|
from collections import namedtuple
from kaa.filetype.default import defaultmode
from gappedbuf import re as gre
from kaa.highlight import Tokenizer, Span, Keywords, EndSection
from kaa.theme import Theme, Style
JavaScriptThemes = {
'default':
Theme([
# None defined
])
}
def build_tokenizer(stop=None, terminates=None):
JSTOKENS = namedtuple('jstokens', ['keywords', 'comment1', 'comment2',
'string1', 'string2', 'stop'])
keywords=Keywords('javasctipt-keyword', 'keyword',
["break", "case", "catch", "continue", "debugger", "default", "delete",
"do", "else", "finally", "for", "function", "if", "in", "instanceof",
"new", "return", "switch", "this", "throw", "try", "typeof", "var",
"void", "while", "with", "class", "enum", "export", "extends", "import",
"super", "implements", "interface", "let", "package", "private",
"protected", "public", "static", "yield",])
comment1=Span('javascript-comment1', 'comment', r'/\*', r'\*/', escape='\\')
comment2=Span('javascript-comment2', 'comment', r'//', r'$', escape='\\')
string1=Span('javascript-string1', 'string', '"', '"', escape='\\')
string2=Span('javascript-string2', 'string', "'", "'", escape='\\')
tokens = JSTOKENS(keywords, comment1, comment2, string1, string2, stop)
return Tokenizer(tokens, terminates=terminates)
class JavaScriptMode(defaultmode.DefaultMode):
MODENAME = 'JavaScript'
def init_themes(self):
super().init_themes()
self.themes.append(JavaScriptThemes)
def init_tokenizers(self):
self.tokenizers = [build_tokenizer()]
|
/**
* Convenient wrapper for a button that goes to a particular place when clicked.
*/
public class PlaceButton extends Composite {
private final Button button = new Button();
public PlaceButton( final PlaceManager placeManager, final DefaultPlaceRequest goTo ) {
checkNotNull( "placeManager", placeManager );
checkNotNull( "goTo", goTo );
button.addClickHandler( new ClickHandler() {
@Override
public void onClick( ClickEvent event ) {
placeManager.goTo( goTo );
}
} );
button.setText( goTo.toString() );
initWidget( button );
}
} |
/**
* The entry point for clients to access role data
*/
@RestController
@RequestMapping("/roles")
public class RoleController {
/**
* Using the Role service to process role data
*/
@Autowired
RoleService roleService;
/**
* List of all roles
* <br>Example: <a href="http://localhost:2019/roles/roles">http://localhost:2019/roles/roles</a>
*
* @return JSON List of all the roles and their associated users
* @see RoleService#findAll() RoleService.findAll()
*/
@GetMapping(value = "/roles", produces = {"application/json"})
public ResponseEntity<?> getAllRoles() {
List<Role> roles = roleService.findAll();
return new ResponseEntity<>(roles, HttpStatus.OK);
}
/**
* The Role referenced by the given primary key
* <br>Example: <a href="http://localhost:2019/roles/role/2">http://localhost:2019/roles/role/2</a>
*
* @param roleid The primary key (long) of the role you seek
* @return JSON object of the role you seek
* @see RoleService#findRoleById(long) RoleService.findRoleById(long)
*/
@GetMapping(value = "/role/{roleid}", produces = {"application/json"})
public ResponseEntity<?> getRoleById(@PathVariable long roleid) {
Role r = roleService.findRoleById(roleid);
return new ResponseEntity<>(r, HttpStatus.OK);
}
/**
* The Role with the given name
* <br>Example: <a href="http://localhost:2019/roles/role/name/data">http://localhost:2019/roles/role/name/data</a>
*
* @param rolename The name of the role you seek
* @return JSON object of the role you seek
* @see RoleService#findRoleByName(String) RoleService.findRoleByName(String)
*/
@GetMapping(value = "/role/name/{rolename}", produces = {"application/json"})
public ResponseEntity<?> getRoleByName(@PathVariable String rolename) {
Role r = roleService.findRoleByName(rolename);
return new ResponseEntity<>(r, HttpStatus.OK);
}
/**
* Given a complete Role object, create a new Role record
* <br>Example: <a href="http://localhost:2019/roles/role">http://localhost:2019/roles/role</a>
*
* @param newRole A complete new Role object
* @return A location header with the URI to the newly created role and a status of CREATED
* @see RoleService#save(Role) RoleService.save(Role)
*/
@PostMapping(value = "/role", consumes = {"application/json"})
public ResponseEntity<?> addNewRole(@Valid @RequestBody Role newRole) {
newRole.setRoleid(0);
newRole = roleService.save(newRole);
// Set the location header for the newly created resource
HttpHeaders responseHeaders = new HttpHeaders();
URI newRoleURI = ServletUriComponentsBuilder.fromCurrentRequest()
.path("/{roleid}")
.buildAndExpand(newRole.getRoleid())
.toUri();
responseHeaders.setLocation(newRoleURI);
return new ResponseEntity<>(null, responseHeaders, HttpStatus.CREATED);
}
/**
* The process allows you to update a role name only!
* <br>Example: <a href="http://localhost:2019/roles/role/1">http://localhost:2019/roles/role/1</a>
*
* @param roleid The primary key (long) of the role you wish to update
* @param updateRole The new name (String) for the role
* @return Status of OK
* @see RoleService#update(Role, long) RoleService.update(Role, long)
*/
@PutMapping(value = "/role/{roleid}", consumes = {"application/json"})
public ResponseEntity<?> updateRole(@Valid @RequestBody Role updateRole,
@PathVariable long roleid) {
updateRole = roleService.update(updateRole, roleid);
return new ResponseEntity<>(HttpStatus.OK);
}
/**
* Deletes a given role
* <br>Example: <a href="http://localhost:2019/roles/role/3">http://localhost:2019/roles/role/3</a>
*
* @param roleid the primary key of the role you wish to delete
* @return Status of OK
* @see RoleService#delete(long) RoleService.delete(long)
*/
@DeleteMapping(value = "/role/{roleid}", produces = {"application/json"})
public ResponseEntity<?> deleteRoleById(@PathVariable long roleid) {
roleService.delete(roleid);
return new ResponseEntity<>(HttpStatus.OK);
}
} |
On the 1st January 2011, I released the first Open Source version of Labyrinth, both to CPAN and GitHub. In additon I also released several plugins and a demo site to highlight some of the basic functionality of the system.
Labyrinth has been in the making since December 2002, although the true beginnings are from about mid-2001. The codebase has evolved over the years as I've developed more and more websites, and got a better understanding exactly what I would want from a Website Management System. Labyrinth had the intention of being a website in a box, and although it's not quite there yet, hopefully once I've released all the plugin code I can put a proper installation tool in place.
Labyrinth now is the backend to several Open Source websites, CPAN Testers using it for the Reports, Blog, Wiki and Preferences sites, as well as some personal, commercial and community projects. As a consequence Labyrinth has become stable enough to look at growing the plugins, rather than the core code. I'm sure there is plenty that could be done with the core code, but for the moment providing a good set of plugins, and some example sites are my next aims.
As mentioned, I see Labyrinth as a Website Management System. While many similar applications and frameworks provide the scaffolding for a Content Management System, Labyrinth extends that by not only providing the ability to manage your content, but also to provide a degree of structure around the functionality of the site, so the management of users and groups, menu options and access, as well as notification mechanisms, enable you to provide more control dynamically.
When writing the fore-runner to Labyrinth, one aspect required was the ability to turn on and off functionality instantly, which meant much of the logic flow was described in the data, not the code. Labyrinth has built on this idea, so that the dispatch tables and general functionality can be controlled by the user via administration screens, and not by uploading new code. When I started looking at this sort of application back in 2001, there was nothing available that could do that. Today there are several frameworks written in Perl that potentially could be tailored to process a website in this way, but all require the developer to design and code the functionality. Labyrinth aims to provide that pre-packaged.
I'm primarily releasing Labyrinth so that I can release all the code that drives the CPAN Testers websites. Giving others the ability to better suggest improvements and contribute. The system allows me the freedom to build websites quickly and easily, with the hardwork being put into the design and CSS layouts. With so many other frameworks available, all of which have bigger development teams and support mechanisms than I can offer, I'm not intending Labyrinth to be a competitor. It might interest some, which is great, but if you prefer to work on other frameworks that's great too. After all it's still Perl ;)
More news of plugins and sites being released coming soon. |
// SendDistribution send node information about to the peer
func (peer *Peer) SendDistribution() error {
packet := packets.NewPacket(packets.CMD_DISTRIBUTION, peer.Server.ServerNode.ServerNetworkNode)
peer.SendPacket(packet)
return nil
} |
<reponame>rebeccajohnson88/qss20_s21_proj
# Authors: JG, DC
# Date: 6/1/2021
# Purpose: fuzzy matches between the H2A applications data and the WHD investigations data
# Filename: A1_fuzzy_matching.py
# imports
import pandas as pd
import numpy as np
import random
import re
import recordlinkage
import time
# -------- USER DEFINED FUNCTIONS --------
# this function will pull out the certification status from a given h2a application
def find_status(one):
string_version = str(one) # convert to string
pattern = r'\-\s(.*)$' # define regex pattern
found = re.findall(pattern, string_version) # search for pattern and return what's found
return found[0]
# this function will clean the EMPLOYER_NAME in approved_only (h2a apps) and legal_name in violations (WHD data)
def clean_names(one):
string_version = str(one) # convert to string
upper_only = string_version.upper() # convert to uppercase
pattern = r"(LLC|CO|INC)\." # locate the LLC, CO, or INC that are followed by a period
replacement = r'\1' # replace the whole pattern with the LLC/CO/INC component
res = re.sub(pattern, replacement, upper_only) # compute and return the result
return res
# Function to do the fuzzy matching
def fuzzy_match(dbase1, dbase2, blockLeft, blockRight, matchVar1, matchVar2, distFunction,
threshold, colsLeft, colsRight):
print('*** Starting Fuzzy Matching ***')
link_jobs_debar = recordlinkage.Index() # initialize our Index
link_jobs_debar.block(left_on=blockLeft, right_on=blockRight) # block on the given block variable
# form our index with the two given databases
candidate_links = link_jobs_debar.index(dbase1, dbase2)
compare = recordlinkage.Compare() # initialize our compare class
if len(matchVar1) != len(matchVar2): # ensure matching num. of matching vars
print("Need to pass in your matching variables in an array and you need to have "
"the same number of matching variables. Please try again. ")
return
for i in range(len(matchVar1)): # for each matching pair, add to our comparator
compare.string(matchVar1[i], matchVar2[i], method=distFunction, threshold=threshold)
compare_vectors = compare.compute(candidate_links, dbase1, dbase2) # compute
# compare_vectors
# rename columns
temp_array = []
for i in range(len(matchVar1)):
colName = str(matchVar1[i])
temp_array.append(colName)
compare_vectors.columns = temp_array
# Find the correct selection
conditions = []
for one in matchVar1:
condition_string = "({one_input} == 1)".format(one_input = one)
conditions.append(condition_string)
if len(conditions) > 1:
comparison = "&".join(conditions)
else:
comparison = conditions[0]
selected = compare_vectors.query(comparison).copy()
# Extract index from selection
n = selected.shape[0]
index_dbase1_values = []
index_dbase2_values = []
for i in range(n):
index = selected.index[i]
index_dbase1_values.append(index[0])
index_dbase2_values.append(index[1])
selected["index_dbase1"] = index_dbase1_values.copy()
selected["index_dbase2"] = index_dbase2_values.copy()
# merge jobs with original columns
# this will throw an error if jobs is not the left
dbase1["index_dbase1"] = dbase1.index
dbase1_columns = colsLeft
m1 = pd.merge(selected, dbase1[dbase1_columns], on="index_dbase1", how="inner")
# merge debar with original columns
dbase2["index_dbase2"] = dbase2.index
dbase2_columns = colsRight
m2 = pd.merge(m1, dbase2[dbase2_columns], on="index_dbase2", how="inner", suffixes=["_left", "_right"])
print('**** DONE WITH FUZZY MATCHING ****')
return m2
# -------- DRIVER CODE --------
# load in h2a data
h2a = pd.read_excel("../data/h2a_2018.xlsx")
print('*** H2A Loaded ***')
# load in investigations/violations data
# url = "../my_data/whd_whisard.csv"
url = "https://enfxfr.dol.gov/data_catalog/WHD/whd_whisard_20210415.csv.zip"
investigations = pd.read_csv(url, index_col=None, dtype={7:'str'})
print('*** WHD Investigations Loaded ***')
# convert the dates in investigations to datetime objects
investigations['findings_start_date'] = pd.to_datetime(investigations['findings_start_date'], errors='coerce')
investigations['findings_end_date'] = pd.to_datetime(investigations['findings_end_date'], errors="coerce")
print('*** WHD Investigations Dates Converted ***')
# use the find status function and put into a new column
h2a["status"] = [find_status(one) for one in h2a.CASE_STATUS] # put the status in a new column
print('*** Status Generated in H2A applications data ***')
# filter to applications that have received certification or partial certification
approved_only = h2a.loc[((h2a.status == "CERTIFICATION") | (h2a.status == "PARTIAL CERTIFICATION")),:].copy()
print('*** Filtered to certified and partially certified applications***')
# make new "name" columns for the cleaned versions of the names
approved_only["name"] = [clean_names(one) for one in approved_only.EMPLOYER_NAME]
approved_only_pure = approved_only.copy()
investigations["name"] = [clean_names(one) for one in investigations.legal_name]
investigations_cleaned = investigations.loc[investigations.name != "NAN", :].copy() # get rid of NAN names
print('*** Cleaned Names in WHD investigations data ***')
print('*** Converting ld_dt to datetime ***')
investigations_cleaned['ld_dt'] = pd.to_datetime(investigations_cleaned['ld_dt'], errors='coerce')
print('*** Converted ld_dt to datetime ***')
# relevant investigations are those after 2017
print('*** Subsetting to only investigations after 2017-01-01 ***')
relevant_investigations = investigations_cleaned[investigations_cleaned.ld_dt > '2017-01-01'].copy()
# Clean up the city names
print('*** Cleaning up City Names in both Datasets ***')
approved_only["city"] = [str(one).upper() for one in approved_only.EMPLOYER_CITY]
relevant_investigations["city"] = [str(one).upper() for one in relevant_investigations.cty_nm]
# fuzzy match the two datasets
blockLeft = "EMPLOYER_STATE"
blockRight = "st_cd"
matchingVarsLeft = ["name", "city"]
matchingVarsRight = ["name", "city"]
colsLeft = ["status", "JOB_START_DATE", "JOB_END_DATE", "EMPLOYER_STATE", "name", "index_dbase1", "city"]
colsRight = ["st_cd", "name", "h2a_violtn_cnt", "findings_start_date", "findings_end_date",
"index_dbase2", "city", "ld_dt"]
approved_only.to_csv("../output/approvedOnly.csv")
res = fuzzy_match(approved_only, relevant_investigations, blockLeft, blockRight, matchingVarsLeft, matchingVarsRight, "jarowinkler",
0.85, colsLeft, colsRight)
# Update this at some point to provide a unique file name so we don't overwrite files
csv_path = '../output/fuzzyMatchResult.csv'
print('*** SAVING %s ***' % csv_path)
res.to_csv("../output/fuzzyMatchResult.csv")
|
// FIXME: Fix semantics of Thread::block
class KernelMutex
{
public:
~KernelMutex()
{
VERIFY(m_waiting_threads.size() == 0);
}
void lock()
{
if (Scheduler::is_initialized()) {
Thread *active_thread = Scheduler::the().active_thread_if_avaliable();
if (active_thread != nullptr) {
if (m_holding_thread.is_null()) {
m_holding_thread = *active_thread;
} else {
m_waiting_threads.enqueue(*active_thread);
active_thread->block();
}
}
} else {
}
}
void unlock()
{
if (Scheduler::is_initialized()) {
m_holding_thread.clear();
if (m_waiting_threads.size() > 0) {
RefPtr<Thread> next_thread = m_waiting_threads.dequeue();
FIXME_ASSERT(m_holding_thread.is_null());
m_holding_thread = next_thread;
m_holding_thread->wakeup();
}
} else {
VERIFY(m_holding_thread.is_null());
}
}
private:
RefPtr<Thread> m_holding_thread;
CircularQueue<RefPtr<Thread>, 16> m_waiting_threads;
} |
/* ipmi_kontron_next_boot_set - Select the next boot order on CP6012
*
* @intf: ipmi interface
* @id: fru id
*
* returns -1 on error
* returns 1 if successful
*/
static int
ipmi_kontron_nextboot_set(struct ipmi_intf * intf, int argc, char **argv)
{
struct ipmi_rs *rsp;
struct ipmi_rq req;
uint8_t msg_data[8];
int i;
memset(msg_data, 0, sizeof(msg_data));
msg_data[0] = 0xb4;
msg_data[1] = 0x90;
msg_data[2] = 0x91;
msg_data[3] = 0x8b;
msg_data[4] = 0x9d;
msg_data[5] = 0xFF;
msg_data[6] = 0xFF;
for (i = 0; bootdev[i] != 0; i++) {
if (strcmp(argv[0], bootdev[i]) == 0) {
msg_data[5] = i;
break;
}
}
if (msg_data[5] == 0xFF) {
printf("Unknown boot device: %s\n", argv[0]);
return -1;
}
memset(&req, 0, sizeof(req));
req.msg.netfn = 0x3E;
req.msg.cmd = 0x02;
req.msg.data = msg_data;
req.msg.data_len = 7;
req.msg.lun = 0x03;
rsp = intf->sendrecv(intf, &req);
if (rsp == NULL)
{
printf("Device not present (No Response)\n");
return(-1);
}
if (rsp->ccode > 0) {
printf("Device not present (%s)\n",
val2str(rsp->ccode, completion_code_vals));
return(-1);
}
return 0;
} |
// If no bundle name is specified, the first app's name is used.
// TODO(omaha): There is no enforcement of required or optional values of
// the extra arguments. Come up with a way to enforce this.
// TODO(omaha): If we prefix extra_args with '/' and replace all '=' with ' '
// and all & with '/' then we should be able to use the CommandLineParser
// class to pull out the values here. We'd need to define scenarios for all
// permutations of ExtraArgs, but this shouldn't be difficult to get the right
// ones.
HRESULT ExtraArgsParser::Parse(const TCHAR* extra_args,
const TCHAR* app_args,
CommandLineExtraArgs* args) {
HRESULT hr = ParseExtraArgs(extra_args, args);
if (FAILED(hr)) {
return hr;
}
return ParseAppArgs(app_args, args);
} |
import * as React from 'react'
import styled from 'styled-components'
import { Indicator } from '../types/DataType'
type ChartTitleProps = {
countries: string[]
indicators: Indicator[]
}
const StyledChartTitle = styled.div`
padding: 1em;
text-align: center;
:first-letter {
text-transform: capitalize;
}
`
const HighlightedText = styled.span<{color: string}>`
background-color: ${props => props.color};
`
const indicatorsColor = '#fff1b5'
const countriesColor = '#e4ffbb'
export default class ChartTitle extends React.Component<ChartTitleProps> {
chainElements (strings: string[], delimiter: string, color: string): React.ReactNode {
return strings.length !== 0
? strings
.map<React.ReactNode>(ind => <HighlightedText color={color}>{ind}</HighlightedText>)
.reduce((prev, curr) => [prev, delimiter, curr])
: undefined
}
render() {
const props = this.props
return (
<StyledChartTitle>
{
this.chainElements(props.indicators, ' and ', indicatorsColor)
} in {
this.chainElements(props.countries.slice(0, props.countries.length - 1), ', ', countriesColor)
}
{props.countries.length > 1 ? ' and ' : ''}
<HighlightedText color={countriesColor}>
{props.countries[props.countries.length - 1]}
</HighlightedText>
, 14 day cumulative per 100 000
</StyledChartTitle>
)
}
} |
After writing a cover story heralding Barack Obama as the “First Gay President,” conservative blogger Andrew Sullivan went on “The Chris Matthews Show” to talk about what Obama’s personal endorsement of gay marriage meant to him.
A lot, apparently:
“It’s hugely important, and to tell you the truth I didn’t realize how important it would be until it happened. Beforehand, I was kind of steeled. I was like, ‘I don’t care, he’s going to disappoint us again.’ And then I sat down and watched our president tell me that I am his equal, that I’m no longer outside, I’m fully part of this family and to hear the president who is in some ways a father figure speak to that, the tears came down like with many people in our families, to be included. “I never understood the power of a president’s words until that day, really. I thought, all that matters is the states and the Congress and the Defense of Marriage Act and I had all this in my head and suddenly this man saying, ‘I’m with you, I get it, you’re like me, I’m like you, there is nothing between us, we are the same people and we are equal human beings and I want to treat you the way you treat me.’ That—that was overwhelming. That’s all I can say. I was at a loss for words.”
The general reception from Democrats and LGBT activists has been rather cynical, with a chorus of “it’s just a political decision designed to drum up votes and money, which perhaps will fail spectacularly and end up losing us key swing states in November.” So it’s nice to see someone as influential as Andrew Sullivan take the positive approach on this monumental moment, which, removed from the political spin-game, is culturally groundbreaking.
Yes, perhaps Obama is playing politics. He always is, because he’s the President of the Free World. But he’s also becoming part of our family, and in that way the whole United States of America could be considered our first, second (or distant) cousins.
Which level of proximity you and your gay friends want to take all depends on how often you want the breeders over for dinner with their yappy bratty kids. |
//! Internal shared convenient things.
use crate::error::{Result, Ret, RsmpegError};
use libc::c_int;
use rusty_ffmpeg::ffi;
use std::{ops::Deref, ptr::NonNull};
/// Triage a pointer to Some(non-null) or None
pub trait PointerUpgrade<T>: Sized {
fn upgrade(self) -> Option<NonNull<T>>;
}
impl<T> PointerUpgrade<T> for *const T {
#[inline]
fn upgrade(self) -> Option<NonNull<T>> {
NonNull::new(self as *mut _)
}
}
impl<T> PointerUpgrade<T> for *mut T {
#[inline]
fn upgrade(self) -> Option<NonNull<T>> {
NonNull::new(self)
}
}
/// This is a common pattern in FFmpeg that an api returns Null as an error.
/// We can set specific error code(Usually FFmpeg error code like
/// ffi::AVERROR(ffi::ENOMEM)).
pub trait RsmpegPointerUpgrade<T>: PointerUpgrade<T> {
/// Triage the pointer. If null, return RsmpegError::AVError(err) here.
fn upgrade_or(self, err: c_int) -> Result<NonNull<T>>;
}
impl<T> RsmpegPointerUpgrade<T> for *const T {
#[inline]
fn upgrade_or(self, err: c_int) -> Result<NonNull<T>> {
self.upgrade().ok_or(RsmpegError::AVError(err))
}
}
impl<T> RsmpegPointerUpgrade<T> for *mut T {
#[inline]
fn upgrade_or(self, err: c_int) -> Result<NonNull<T>> {
self.upgrade().ok_or(RsmpegError::AVError(err))
}
}
/// This is a common pattern in FFmpeg that an api returns negative number as an
/// error, zero or bigger a success. Here we triage the returned number of FFmpeg
/// API to `Ok(positive)` and `Err(negative)`.
pub trait RetUpgrade {
fn upgrade(self) -> Ret;
}
impl RetUpgrade for c_int {
fn upgrade(self) -> Ret {
if self < 0 {
Ret::Err(self)
} else {
Ret::Ok(self)
}
}
}
/// This is a convenient trait we dont't find in the rust std library. Accessing
/// member of a ffi struct mutably is not always safe(consider directly changing
/// the capacity of a Vec). But for some members, accessing them is a need. So
/// UnsafeDerefMut is come to rescue. You can use `foo.deref_mut().member = bar`
/// in a unsafe block if type of foo implements this trait.
pub trait UnsafeDerefMut: Deref {
/// Mutably dereferences the value, unsafely.
unsafe fn deref_mut(&mut self) -> &mut Self::Target;
}
/// Since ffi::AVERROR(ffi::EAGAIN) is often used in match arm, but RFC #2920
/// ([tracking issue](https://github.com/rust-lang/rust/issues/76001)) haven't
/// yet been implemented, we currently create a const value here as a workaround.
pub const AVERROR_EAGAIN: i32 = ffi::AVERROR(ffi::EAGAIN);
pub const AVERROR_ENOMEM: i32 = ffi::AVERROR(ffi::ENOMEM);
|
/*
Copyright 2012, 2015 <NAME>.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/** \file
Define a repeat parser, which parses the same thing a number of times.
It outputs a sequence of sub-parser outputs.
*/
#ifndef PARSE_LL_BASE_REPEAT_HPP_INCLUDED
#define PARSE_LL_BASE_REPEAT_HPP_INCLUDED
#include <cassert>
#include <boost/optional.hpp>
#include "range/core.hpp"
#include "core.hpp"
#include <boost/mpl/if.hpp>
#include <type_traits>
namespace parse_ll {
/**
Parser that repeats it sub-parser a number of times, where the number can be
constrained.
Between each sub-parser the skip parser is used, but not before or after the
repeat parser.
*/
template <class SubParser> struct repeat_parser
: public parser_base <repeat_parser <SubParser> >
{
SubParser sub_parser;
int minimum, maximum;
public:
repeat_parser (SubParser const & sub_parser, int minimum, int maximum)
: sub_parser (sub_parser), minimum (minimum), maximum (maximum) {}
};
struct repeat_parser_tag;
template <class SubParser> struct decayed_parser_tag <repeat_parser <SubParser>>
{ typedef repeat_parser_tag type; };
class repeat_parser_maker_bounds {
int minimum, maximum;
public:
repeat_parser_maker_bounds (int minimum, int maximum)
: minimum (minimum), maximum (maximum) {}
template <class SubParser>
repeat_parser <SubParser>
operator[] (SubParser const & sub_parser) const
{ return repeat_parser <SubParser> (sub_parser, minimum, maximum); }
};
struct repeat_parser_maker {
template <class SubParser>
repeat_parser <SubParser>
operator[] (SubParser const & sub_parser) const
{ return repeat_parser <SubParser> (sub_parser, 0, -1); }
repeat_parser_maker_bounds operator() (int count) const
{ return repeat_parser_maker_bounds (count, count); }
repeat_parser_maker_bounds operator() (int minimum, int maximum) const
{ return repeat_parser_maker_bounds (minimum, maximum); }
repeat_parser_maker_bounds at_least (int minimum) const
{ return repeat_parser_maker_bounds (minimum, -1); }
repeat_parser_maker_bounds at_most (int maximum) const
{ return repeat_parser_maker_bounds (0, maximum); }
};
static const auto repeat = repeat_parser_maker();
/**
Implementations that are optimal in different scenarios.
For now, there is only one implementation.
\todo
One implementation would be to cache the remaining input range.
However, this must be shared between repeat_outcome(s) and repeat_output.
repeat_output should set it once it hits the final parse.
This should probably be done in a shared_ptr <optional <Input>>.
Now that's some extra investment, and it may make sense only for big parsers.
\todo
The reason why it is impossible to ask output() for the rest of the input range
after it's been exhausted, is that actors will be passed the input range and
never give it back.
If this is a common occurrence, then it may be possible to specialise
transform_parse <repeat_outcome <...>> for this case?
*/
enum class repeat_type { lazy };
template <class Policy, class SubParser, class Input,
repeat_type Implementation> struct repeat_outcome;
template <class Policy, class SubParser, class Input,
repeat_type Implementation> struct repeat_output;
struct repeat_output_range_tag {};
namespace operation {
template <> struct parse <repeat_parser_tag> {
template <class Policy, class SubParser, class Input>
repeat_outcome <Policy, SubParser, Input, repeat_type::lazy>
operator() (Policy const & policy,
repeat_parser <SubParser> const & parser, Input const & input) const
{
return repeat_outcome <Policy, SubParser, Input, repeat_type::lazy>
(policy, parser, input);
}
};
template <> struct describe <repeat_parser_tag> {
template <class Parser> const char * operator() (Parser const &) const
{ return "repeat"; }
};
} // namespace operation
/**** Implementation: lazy ****/
/**
With minimum and maximum lengths, it may be best to cache the sub-parses.
This would require the output to be a concatenation of the output of the cached
sub-parses plus the on-the-fly sub-parses.
For now, the constructor checks whether the parse will succeed, i.e., whether
the minimum number of sub-parses succeed.
When actually producing the output, the sub-parses are regenerated.
This may be faster in one case and slower in another.
\todo
It would be great if this would cause a static assertion failure in cases where
the sub-parser always succeeds.
Even I got caught out by this in writing test cases.
*/
template <class Policy, class SubParser, class Input>
struct repeat_outcome <Policy, SubParser, Input, repeat_type::lazy>
{
Policy policy;
repeat_parser <SubParser> const * parser;
Input input;
public:
repeat_outcome (Policy const & policy,
repeat_parser <SubParser> const & parser, Input const & input)
: policy (policy), parser (&parser), input (input) {}
};
namespace operation {
template <class Policy, class SubParser, class Input>
struct success <repeat_outcome <
Policy, SubParser, Input, repeat_type::lazy>>
{
bool operator() (
repeat_outcome <Policy, SubParser, Input, repeat_type::lazy>
const & outcome) const
{
// Check whether the minimum number of parses of the sub-parser
// can be obtained.
Input current = outcome.input;
for (int count = 0; count < outcome.parser->minimum; ++ count) {
if (count != 0)
current = parse_ll::skip_over (
outcome.policy.skip_parser(), current);
auto sub_outcome = parse_ll::parse (
outcome.policy, outcome.parser->sub_parser, current);
if (!::parse_ll::success (sub_outcome))
return false;
current = parse_ll::rest (sub_outcome);
}
return true;
}
};
template <class Policy, class SubParser, class Input>
struct output <repeat_outcome <
Policy, SubParser, Input, repeat_type::lazy>>
{
typedef typename parse_ll::detail::parser_output <
Policy, SubParser, Input>::type sub_output_type;
/**
Select the output type.
If the sub-parser outputs void, then the repeat_parser also outputs
void.
*/
typedef typename boost::mpl::if_ <std::is_same <sub_output_type, void>,
void,
repeat_output <Policy, SubParser, Input, repeat_type::lazy>
>::type output_type;
// If output_type is void, this never gets instantiated.
output_type operator() (
repeat_outcome <Policy, SubParser, Input, repeat_type::lazy>
const & outcome) const
{
assert (::parse_ll::success (outcome));
return repeat_output <Policy, SubParser, Input, repeat_type::lazy>
(outcome.policy,
*outcome.parser, outcome.parser->maximum, outcome.input);
}
};
template <class Policy, class SubParser, class Input>
struct rest <repeat_outcome <
Policy, SubParser, Input, repeat_type::lazy>>
{
Input operator() (
repeat_outcome <Policy, SubParser, Input, repeat_type::lazy>
const & outcome) const
{
assert (::parse_ll::success (outcome));
// Run the sub_parser through the input.
Input current = outcome.input;
for (int count = 0; count != outcome.parser->maximum; ++ count) {
auto sub_outcome = parse_ll::parse (
outcome.policy, outcome.parser->sub_parser,
// Only skip in between elements, not before.
(count == 0) ? current : parse_ll::skip_over (
outcome.policy.skip_parser(), current));
// If the parser has failed, current is still at rest() applied
// to the sub-parser that last succeeded: the skip parser has
// not been applied.
if (! ::parse_ll::success (sub_outcome)) {
assert (count >= outcome.parser->minimum);
return current;
}
current = ::parse_ll::rest (sub_outcome);
}
return current;
}
};
} // namespace operation
/**
Lazy implementation of the output of a repeat parser, as a range.
The implementation essentially wraps the outcome of the sub-parser, in
sub_outcome.
The range is empty if sub_outcome has not succeeded (or if maximum==0).
The first element is the output of sub_outcome.
drop() uses the rest of sub_outcome.
\todo When maximum==0, the sub_outcome does not even have to be instantiated.
*/
template <class Policy, class SubParser, class Input>
struct repeat_output <Policy, SubParser, Input, repeat_type::lazy>
{
Policy policy;
// If this were a reference, the class could not be copy-assigned.
repeat_parser <SubParser> const * parser;
int maximum;
typedef typename detail::parser_outcome <Policy, SubParser, Input>::type
sub_outcome_type;
sub_outcome_type sub_outcome;
public:
repeat_output (Policy const & policy,
repeat_parser <SubParser> const & parser,
int maximum, Input const & input)
: policy (policy), parser (&parser), maximum (maximum),
sub_outcome (parse_ll::parse (policy, parser.sub_parser, input)) {}
private:
friend class range::helper::member_access;
bool empty (direction::front) const {
if (maximum != 0 && ::parse_ll::success (sub_outcome))
return false;
else
return true;
}
auto first (direction::front) const
-> decltype (::parse_ll::output (sub_outcome))
{
assert (!empty (range::front));
return ::parse_ll::output (sub_outcome);
}
repeat_output drop_one (direction::front) const {
assert (!empty (range::front));
auto next_range = parse_ll::skip_over (
policy.skip_parser(), ::parse_ll::rest (sub_outcome));
return repeat_output (policy, *parser, maximum - 1, next_range);
}
};
} // namespace parse_ll
namespace range {
template <class Policy, class SubParser, class Input,
parse_ll::repeat_type Implementation>
struct tag_of_qualified <parse_ll::repeat_output <
Policy, SubParser, Input, Implementation>>
{ typedef parse_ll::repeat_output_range_tag type; };
} // namespace range
#endif // PARSE_LL_BASE_REPEAT_HPP_INCLUDED
|
Therapeutic enquiries about biological agents as a tool to identify safety aspects and patterns of use
Background Biotechnological agents (BA) are increasingly being used in clinical practice. We aimed to determine, whether enquiries about them to a therapeutic consultation service have also become more frequent, and to describe the information requested in these consultations. Methods We retrospectively reviewed 14 104 therapeutic consultations collected in a computerised database between 2000 and 2014. Enquiries about BA (monoclonal antibodies, fusion proteins or cytokine antagonists) were chosen. Information on the type of BA, underlying condition, type of enquiry and affiliation of the enquirer was retrieved and compared with data from consultations about other agents. Results During the study period, 365 enquiries about 30 different BA were received. Only 4% of them were received before 2004, while 48.8% were received after 2010. Rituximab, infliximab, adalimumab and etanercept were most frequently enquired about. Agent selection (n=184) and/or adverse effects (n=174) were the most frequent reasons for making an enquiry. Most enquiries about an agent selection were made about an off-label use (n=164), mainly for systemic autoimmune diseases (n=61). Over half of the enquiries about adverse effects were about their teratogenic potential (n=96). Enquiries about BA more often requested an opinion (87.7% vs 77.7%) were made by physicians (89.9% vs 76.9%), from a hospital (81.6% vs 44.5%) and regarded a specific patient (87.4% vs 74.5%). Conclusions Therapeutic consultations about BA are increasing. Most of them are related to uncertainties of health professionals regarding any new medicine: their off-label use, actual adverse effects or the teratogenic potential of the involved agents. |
from os.path import join
COURSE_DATA = join('..', 'course-data')
details_source = join(COURSE_DATA, 'details')
xml_source = join(COURSE_DATA, 'raw_xml')
course_dest = join(COURSE_DATA, 'courses')
term_dest = join(COURSE_DATA)
mappings_path = join(COURSE_DATA, 'data-lists')
handmade_path = join(COURSE_DATA, 'data-mappings')
term_clbid_mapping_path = join(COURSE_DATA, 'courses', '_index')
def make_clbid_map_path(term):
return join(term_clbid_mapping_path, str(term) + '.json')
def find_details_subdir(clbid):
str_clbid = str(clbid).zfill(10)
n_thousand = int(clbid) // 1000
thousands_subdir = str(n_thousand * 1000).zfill(5)
return join(thousands_subdir, str_clbid)
def make_course_path(clbid):
return join(course_dest, f'{find_details_subdir(clbid)}.json')
def make_detail_path(clbid):
return join(details_source, f'{find_details_subdir(clbid)}.json')
def make_xml_term_path(term):
return join(xml_source, f'{term}.xml')
def make_built_term_path(term, kind):
return f'{term}.{kind}'
|
<reponame>alexeyyavorskiy/vet-beckend
export class UpdateSpeciesDto {
readonly id: number;
readonly label: string;
animalId?: number;
}
|
#include<stdio.h>
#include<map>
#include<vector>
#include<algorithm>
#include<string.h>
#define Max 2*100005
using namespace std;
map<int,int>m;
vector<int>vec;
int main(){
char s[Max];
int a[Max+1][3],i,j,val;
scanf("%s",s);
int len = strlen(s);
a[len][0] = 0,a[len][1] = 0,a[len][2] = 0;
for(i=len-1;i>=0;i--){
for(j=0;j<3;j++){
a[i][j] = 0;
val = (j+s[i]-'0')%3;
if(val==0)
a[i][j]+=1;
a[i][j] += (a[i+1][0]<a[i+1][val])?a[i+1][val]:a[i+1][0];
}
}
printf("%d\n",a[0][0]);
return 0;
}
|
package controllers;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionStage;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.google.inject.Inject;
import Model.UserModel;
import actors.SentimentActor;
import actors.TweetWordsActor;
import actors.HashtagActor;
import actors.HashtagActor.HashTagTweets;
import actors.LocationActor;
import actors.LocationActor.LocationTweets;
import actors.UserActor;
import actors.SocketActor;
import actors.UserActor.UserProfile;
import akka.actor.ActorRef;
import akka.actor.ActorSystem;
/*import akka.event.Logging;
import akka.event.LoggingAdapter;*/
import play.mvc.Controller;
import play.libs.F;
import play.mvc.*;
import static akka.pattern.PatternsCS.ask;
import views.html.*;
import java.util.List;
import twitter4j.Status;
import java.util.Map;
import org.slf4j.Logger;
import play.libs.streams.ActorFlow;
import play.libs.F;
import akka.actor.*;
import akka.stream.*;
import play.mvc.*;
import javax.inject.Singleton;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import actors.TweetWordsActor.FindTweetWords;
import akka.actor.AbstractActor;
/***
* @author v6
*This controller contains an action to handle HTTP requests
* to the application's home page.
* This class contains the methods to fetch data from the twitter API
*/
public class NewController extends Controller {
public static ActorRef sentimentActor, tweetWordsActor, hashtagActor, locationActor, userActor;
@Inject
private ActorSystem actorSystem;
@Inject
private Materializer materializer;
@Inject
public NewController(ActorSystem system) {
sentimentActor = system.actorOf(SentimentActor.props());
tweetWordsActor = system.actorOf(TweetWordsActor.props());
hashtagActor = system.actorOf(HashtagActor.props());
locationActor = system.actorOf(LocationActor.props());
userActor = system.actorOf(UserActor.props());
}
/**
* An action that renders an HTML page with a welcome message.
* The configuration in the <code>routes</code> file means that
* this method will be called when the application receives a
* <code>GET</code> request with a path of <code>/</code>.
* <p>
* This method renders the index page and displays the message passed in the render method().
*
* @return Result
* @author <NAME>
*/
public Result index() {
return ok(index.render("Welcome to TweetMiner"));
}
/**
* @param hashtag the hashtag with which the query is run
* @return a Future of a result to be rendered to the HTML page
* @author shireen
* An action that renders a HTML page with tweets for a hashtag query
*/
public CompletionStage<Result> getHashtags(String hashtag) {
return ask(hashtagActor, new HashTagTweets(hashtag), 5000)
.thenApply(hashtagTweets -> ok(locationTweets.render((List<Status>) hashtagTweets, "Hashtag Tweets")));
}
/**
* @param latitude geolocation attribute of the owner of the tweet
* @param longitude geolocation attribute of the owner of the tweet
* @return a Future of a result to be rendered to the HTML page
* @author <NAME>
* An action that returns a HTML page with tweets from the specific geolocation
*/
public CompletionStage<Result> getLocation(String latitude, String longitude) {
return ask(locationActor, new LocationTweets(latitude, longitude), 5000)
.thenApply(tweets -> ok(locationTweets.render((List<Status>) tweets, "Location Tweets")));
}
/**
* @param username the name of the user whose profile is retrieved
* @return a Future of a result to be rendered to the HTML page
* @author kritika
* An action that returns a HTML page with the profile of the tweet owner
*/
public CompletionStage<Result> getUserProfile(String username) throws Exception {
return ask(userActor, new UserProfile(username), 5000)
.thenApply(tweetUser -> ok(user.render((UserModel) tweetUser)));
}
/**
* @param query search terms for which word level statistics are generated
* @return a future of a result to be rendered to an HTML page
* @throws Exception
* @author nileesha
* An action that renders a HTML page with word level statistics for an individual query
*/
public CompletionStage<Result> getTweetWords(String query) {
return ask(tweetWordsActor, new FindTweetWords(query), 5000)
.thenApply(tweetWordCount -> ok(tweetWords.render((Map<String, Long>) tweetWordCount, query)));
}
public WebSocket ws() {
System.out.println("testing ws...");
return WebSocket.Text.acceptOrResult(request -> {
if (sameOriginCheck(request)) {
return CompletableFuture.completedFuture(
F.Either.Right(ActorFlow.actorRef(SocketActor::props,
actorSystem, materializer)));
} else {
return CompletableFuture.completedFuture(F.Either.Left(forbidden()));
}
});
}
private boolean sameOriginCheck(Http.RequestHeader rh) {
final Optional<String> origin = rh.header("Origin");
if (!origin.isPresent()) {
//logger.error("originCheck: rejecting request because no Origin header found");
System.out.println("originCheck: rejecting request because no Origin header found");
return false;
} else if (originMatches(origin.get())) {
//logger.debug("originCheck: originValue = " + origin);
System.out.println("originCheck: originValue = " + origin);
return true;
} else {
//logger.error("originCheck: rejecting request because Origin header value " + origin + " is not in the same origin");
System.out.println("originCheck: rejecting request because Origin header value " + origin + " is not in the same origin");
return false;
}
}
private boolean originMatches(String origin) {
return origin.contains("localhost:9000") || origin.contains("localhost:19001");
}
}
|
def kt_configure():
maven_install(
name = "kotlin_rules_maven",
fetch_sources = True,
artifacts = [
"com.google.code.findbugs:jsr305:3.0.2",
"junit:junit:4.13-beta-3",
"com.google.protobuf:protobuf-java:3.6.0",
"com.google.protobuf:protobuf-java-util:3.6.0",
"com.google.guava:guava:27.1-jre",
"com.google.truth:truth:0.45",
"com.google.auto.service:auto-service:1.0-rc5",
"com.google.auto.service:auto-service-annotations:1.0-rc5",
"com.google.auto.value:auto-value:1.6.5",
"com.google.auto.value:auto-value-annotations:1.6.5",
"com.google.dagger:dagger:2.35.1",
"com.google.dagger:dagger-compiler:2.35.1",
"com.google.dagger:dagger-producers:2.35.1",
"javax.annotation:javax.annotation-api:1.3.2",
"javax.inject:javax.inject:1",
"org.pantsbuild:jarjar:1.7.2",
"org.jetbrains.kotlinx:atomicfu-js:0.15.2",
"org.jetbrains.kotlinx:kotlinx-coroutines-core:1.5.0",
"org.jetbrains.kotlinx:kotlinx-coroutines-core-js:1.5.0",
"org.jetbrains.kotlinx:kotlinx-coroutines-test:1.5.0",
"org.jetbrains.kotlinx:kotlinx-coroutines-debug:1.5.0",
"org.jetbrains.kotlinx:kotlinx-serialization-runtime:1.0-M1-1.4.0-rc",
],
repositories = [
"https://maven-central.storage.googleapis.com/repos/central/data/",
"https://repo1.maven.org/maven2",
],
)
rules_proto_dependencies()
rules_proto_toolchains()
stardoc_repositories()
bazel_skylib_workspace()
android_sdk_repository(
name = "androidsdk",
build_tools_version = versions.ANDROID.BUILD_TOOLS,
)
android_ndk_repository(name = "androidndk")
[
native.local_repository(
name = version,
path = "src/main/starlark/%s" % version,
repo_mapping = {
"@dev_io_bazel_rules_kotlin": "@",
},
)
for version in versions.CORE
] |
package presentation
import (
"github.com/the4thamigo-uk/paymentserver/pkg/domain/account"
"github.com/the4thamigo-uk/paymentserver/pkg/domain/amount"
"github.com/the4thamigo-uk/paymentserver/pkg/domain/bank"
"github.com/the4thamigo-uk/paymentserver/pkg/domain/charges"
"github.com/the4thamigo-uk/paymentserver/pkg/domain/date"
"github.com/the4thamigo-uk/paymentserver/pkg/domain/entity"
"github.com/the4thamigo-uk/paymentserver/pkg/domain/fx"
"github.com/the4thamigo-uk/paymentserver/pkg/domain/money"
"github.com/the4thamigo-uk/paymentserver/pkg/domain/payment"
"github.com/the4thamigo-uk/paymentserver/pkg/domain/sponsor"
)
// Payment is an external representation of the associated domain object.
type Payment struct {
Type string `json:"type"`
Entity
OrganisationID string `json:"organisation_id"`
Attributes Attributes `json:"attributes"`
}
// Entity is an external representation of the associated domain object
type Entity struct {
ID string `json:"id"`
Version int `json:"version"`
}
// Attributes is an external representation of the associated payment domain object.
type Attributes struct {
Amount string `json:"amount"`
BeneficiaryParty Account `json:"beneficiary_party"`
ChargesInformation Charges `json:"charges_information"`
Currency string `json:"currency"`
DebtorParty Account `json:"debtor_party"`
EndToEndReference string `json:"end_to_end_reference"`
Fx *Fx `json:"fx,omitempty"`
NumericReference string `json:"numeric_reference"`
PaymentID string `json:"payment_id"`
PaymentPurpose string `json:"payment_purpose"`
PaymentScheme string `json:"payment_scheme"`
PaymentType string `json:"payment_type"`
ProcessingDate string `json:"processing_date"`
Reference string `json:"reference"`
SchemePaymentSubType string `json:"scheme_payment_sub_type"`
SchemePaymentType string `json:"scheme_payment_type"`
SponsorParty Sponsor `json:"sponsor_party"`
}
// Account is an external representation of the associated domain object.
type Account struct {
AccountName string `json:"account_name"`
AccountNumber string `json:"account_number"`
AccountNumberCode string `json:"account_number_code"`
AccountType *int `json:"account_type,omitempty"`
Address string `json:"address"`
BankID string `json:"bank_id"`
BankIDCode string `json:"bank_id_code"`
Name string `json:"name"`
}
// Charges is an external representation of the associated domain object object.
type Charges struct {
BearerCode string `json:"bearer_code"`
SenderCharges []Money `json:"sender_charges"`
ReceiverChargesAmount string `json:"receiver_charges_amount"`
ReceiverChargesCurrency string `json:"receiver_charges_currency"`
}
// Fx is an external representation of the associated domain object object.
type Fx struct {
ContractReference string `json:"contract_reference"`
ExchangeRate string `json:"exchange_rate"`
OriginalAmount string `json:"original_amount"`
OriginalCurrency string `json:"original_currency"`
}
// Money is an external representation of the associated domain object object.
type Money struct {
Amount string `json:"amount"`
Currency string `json:"currency"`
}
// Sponsor is an external representation of the associated domain object object.
type Sponsor struct {
AccountNumber string `json:"account_number"`
BankID string `json:"bank_id"`
BankIDCode string `json:"bank_id_code"`
}
// NewEntity creates an entity identifier from the given id and version
func NewEntity(id string, ver int) Entity {
return Entity{
ID: id,
Version: ver,
}
}
// FromDomainPayment creates an external representation of the domain object
func FromDomainPayment(p payment.Payment) (*Payment, error) {
err := p.Validate()
if err != nil {
return nil, err
}
return &Payment{
Type: "Payment",
Entity: Entity{
ID: p.Entity.ID,
Version: p.Entity.Version,
},
OrganisationID: p.OrganisationID,
Attributes: Attributes{
Amount: p.Credit.Amount().String(),
Currency: p.Credit.Currency().String(),
BeneficiaryParty: FromDomainAccount(p.Beneficiary),
DebtorParty: FromDomainAccount(p.Debtor),
ChargesInformation: FromDomainCharges(p.Charges),
EndToEndReference: p.EndToEndRef,
Fx: FromDomainFx(p.Fx),
NumericReference: p.NumericRef,
PaymentID: p.ID,
PaymentPurpose: p.Purpose,
PaymentScheme: p.Scheme,
PaymentType: p.Type,
ProcessingDate: p.ProcessingDate.String(),
Reference: p.Reference,
SchemePaymentSubType: p.SchemeSubType,
SchemePaymentType: p.SchemeType,
SponsorParty: FromDomainSponsor(p.Sponsor),
}}, nil
}
// FromDomainAccount creates an external representation of the domain object
func FromDomainAccount(a account.Account) Account {
return Account{
AccountName: a.ID.Name,
AccountNumber: a.ID.Number,
AccountNumberCode: a.ID.Code.String(),
AccountType: a.ID.Type,
Address: a.Address,
BankID: a.BankID.ID,
BankIDCode: a.BankID.Code.String(),
Name: a.Name,
}
}
// FromDomainFx creates an external representation of the domain object
func FromDomainFx(fx *fx.Contract) *Fx {
if fx == nil {
return nil
}
return &Fx{
ContractReference: fx.Reference,
ExchangeRate: fx.Rate.String(),
OriginalAmount: fx.Domestic.Amount().String(),
OriginalCurrency: fx.Domestic.Currency().String(),
}
}
// FromDomainCharges creates an external representation of the domain object
func FromDomainCharges(c charges.Charges) Charges {
sc := []Money{}
for _, s := range c.Sender {
sc = append(sc, Money{
Amount: s.Amount().String(),
Currency: s.Currency().String(),
})
}
return Charges{
BearerCode: c.BearerCode.String(),
SenderCharges: sc,
ReceiverChargesAmount: c.Receiver.Amount().String(),
ReceiverChargesCurrency: c.Receiver.Currency().String(),
}
}
// FromDomainSponsor creates an external representation of the domain object
func FromDomainSponsor(s sponsor.Sponsor) Sponsor {
return Sponsor{
AccountNumber: s.Number,
BankID: s.BankID.ID,
BankIDCode: s.BankID.Code.String(),
}
}
// ToDomainPayment creates a domain object from the associated external representation
func (p Payment) ToDomainPayment() (*payment.Payment, error) {
a := &p.Attributes
credit, err := money.Parse(a.Amount, a.Currency)
if err != nil {
return nil, err
}
beneficiary, err := a.BeneficiaryParty.ToDomainAccount()
if err != nil {
return nil, err
}
debtor, err := a.DebtorParty.ToDomainAccount()
if err != nil {
return nil, err
}
charges, err := a.ChargesInformation.ToDomainCharges()
if err != nil {
return nil, err
}
fx, err := a.Fx.ToDomainFx()
if err != nil {
return nil, err
}
sponsor, err := a.SponsorParty.ToDomainSponsor()
if err != nil {
return nil, err
}
date, err := date.Parse(a.ProcessingDate)
if err != nil {
return nil, err
}
p2 := &payment.Payment{
Entity: entity.Entity{
ID: p.ID,
Version: p.Version,
},
OrganisationID: p.OrganisationID,
Credit: *credit,
Beneficiary: *beneficiary,
Debtor: *debtor,
ProcessingDate: date,
Charges: *charges,
Fx: fx,
Sponsor: *sponsor,
ID: a.PaymentID,
Type: a.PaymentType,
Purpose: a.PaymentPurpose,
Scheme: a.PaymentScheme,
SchemeType: a.SchemePaymentType,
SchemeSubType: a.SchemePaymentSubType,
NumericRef: a.NumericReference,
EndToEndRef: a.EndToEndReference,
Reference: a.Reference,
}
err = p2.Validate()
return p2, err
}
// ToDomainAccount creates a domain object from the associated external representation
func (a Account) ToDomainAccount() (*account.Account, error) {
acode, err := account.Parse(a.AccountNumberCode)
if err != nil {
return nil, err
}
bcode, err := bank.Parse(a.BankIDCode)
if err != nil {
return nil, err
}
return &account.Account{
ID: account.Identifier{
Name: a.AccountName,
Number: a.AccountNumber,
Code: *acode,
Type: a.AccountType,
},
Name: a.Name,
Address: a.Address,
BankID: bank.Identifier{
ID: a.BankID,
Code: *bcode,
},
}, nil
}
// ToDomainCharges creates a domain object from the associated external representation
func (c *Charges) ToDomainCharges() (*charges.Charges, error) {
sc := []money.Money{}
for _, s := range c.SenderCharges {
m, err := money.Parse(s.Amount, s.Currency)
if err != nil {
return nil, err
}
sc = append(sc, *m)
}
rc, err := money.Parse(c.ReceiverChargesAmount, c.ReceiverChargesCurrency)
if err != nil {
return nil, err
}
bc, err := charges.Parse(c.BearerCode)
if err != nil {
return nil, err
}
return &charges.Charges{
BearerCode: *bc,
Sender: sc,
Receiver: *rc,
}, nil
}
// ToDomainFx creates a domain object from the associated external representation
func (f *Fx) ToDomainFx() (*fx.Contract, error) {
r, err := amount.Parse(f.ExchangeRate)
if err != nil {
return nil, err
}
d, err := money.Parse(f.OriginalAmount, f.OriginalCurrency)
if err != nil {
return nil, err
}
return &fx.Contract{
Reference: f.ContractReference,
Rate: r,
Domestic: *d,
}, nil
}
// ToDomainSponsor creates a domain object from the associated external representation
func (s *Sponsor) ToDomainSponsor() (*sponsor.Sponsor, error) {
bc, err := bank.Parse(s.BankIDCode)
if err != nil {
return nil, err
}
return &sponsor.Sponsor{
Number: s.AccountNumber,
BankID: bank.Identifier{
ID: s.BankID,
Code: *bc,
},
}, nil
}
|
def attach(self, device, name=None, channel=None):
self.device = device
self.name = name
self.initialize(channel=channel) |
A dedicated paediatric departmental intranet site – is it worth it?
Aims There is currently no published evidence looking into the use of departmental websites in medicine. This study investigated the usage, acceptance and potential benefits of a paediatric departmental intranet site in a DGH over a three year period. Methods We introduced a paediatric intranet site in our department five years ago, which includes all of our guidelines on an exclusively paperless basis. Further information relevant to mainly medical but also nursing staff is provided through the site. We looked at the usage of all pages to our paediatric intranet site by analysing monthly hit rates for both the departmental pages and selected guideline documents. We then followed this up by undertaking a survey among junior doctors to determine more detailed user feedback. Results The most frequented pages on our intranet site by far were the guideline pages with a monthly average of about 140 hits for the paediatric guideline index and approximately 100 for the neonatal guideline index. Most of the other departmental pages received between 5 and 10 hits per month. However, when looking at maintenance, these were pages requiring very little time in upkeep. The departmental hit figures compared well to other hospital intranet pages, a fact reflected in a monthly usage survey showing four of the paediatric pages in the 25 most visited hospital pages. A survey of 40 junior medical staff working in the department confirmed that the intranet guidelines were easy to access with no patient safety risks identified but clear benefits for patient safety. Conclusion This is the first study providing quantitative data for the usage of a paediatric departmental intranet site in a medium sized DGH in the UK. It shows that an intranet site can be a safe and worthwhile medium to disseminate important departmental information including medical guidelines. Most of all, the intranet site provides clear clinical governance benefits for departments such as the facility of providing even minor updates to guidelines in an instant. Our study is beneficial to any paediatric department considering the establishment of their own intranet site. |
package fi.joniaromaa.duelsminigame.pregame;
import java.io.File;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import org.apache.commons.io.FileUtils;
import org.bukkit.Chunk;
import org.bukkit.Location;
import org.bukkit.World;
import org.bukkit.entity.Player;
import org.bukkit.scoreboard.DisplaySlot;
import org.bukkit.scoreboard.Objective;
import org.bukkit.util.BlockVector;
import fi.joniaromaa.duelsminigame.DuelsPlugin;
import fi.joniaromaa.duelsminigame.config.DuelsMinigameConfig;
import fi.joniaromaa.duelsminigame.config.DuelsMinigameMapConfig;
import fi.joniaromaa.duelsminigame.user.dataset.UserStartLocationDataStorage;
import fi.joniaromaa.duelsminigame.utils.LangUtils;
import fi.joniaromaa.minigameframework.api.game.PreMinigameStatus;
import fi.joniaromaa.minigameframework.builder.MinigameWorldBuilder;
import fi.joniaromaa.minigameframework.config.MinigameConfig;
import fi.joniaromaa.minigameframework.config.MinigameMapConfig;
import fi.joniaromaa.minigameframework.game.AbstractPreMinigame;
import fi.joniaromaa.minigameframework.player.BukkitUser;
import fi.joniaromaa.minigameframework.world.BlockBreakContractTypeType;
import fi.joniaromaa.minigameframework.world.WorldWeatherType;
import fi.joniaromaa.parinacorelibrary.bukkit.data.WordlessLocation;
import fi.joniaromaa.parinacorelibrary.bukkit.scoreboard.ScoreboardDynamicScore;
import fi.joniaromaa.parinacorelibrary.bukkit.scoreboard.ScoreboardManager;
import fi.joniaromaa.parinacorelibrary.bukkit.scoreboard.ScoreboardViewer;
import fi.joniaromaa.parinacorelibrary.bukkit.utils.LocationUtils;
import fi.joniaromaa.parinacorelibrary.bukkit.utils.WorldUtils;
import net.md_5.bungee.api.ChatColor;
public class DuelsPreMinigame extends AbstractPreMinigame
{
private Map<Location, Integer> spawnLocations;
private World world;
public DuelsPreMinigame(int gameId, DuelsMinigameConfig config)
{
this(gameId, config, config.getRandomMapConfig());
}
public DuelsPreMinigame(int gameId, MinigameConfig config, MinigameMapConfig mapConfig)
{
super(gameId, config, mapConfig);
this.scoreboardManager = new ScoreboardManager(DuelsPlugin.getPlugin(), this::setupScoreboard);
}
public void setup() throws Exception
{
FileUtils.copyDirectory(Paths.get(DuelsPlugin.getPlugin().getDataFolder().getPath(), "maps", this.getMapConfig().getId(), "world").toFile(), new File("duels_minigame-" + this.getGameId()));
this.world = MinigameWorldBuilder.builder().worldName("duels_minigame-" + this.getGameId())
.voidOnlyGenerator()
.saveChunks(false)
.setWeatherType(WorldWeatherType.CLEAR)
.doDaylightCycle(false)
.doFireTick(false)
.allowBlockPlace(this.getConfig().getDuelType().canPlaceBlocks())
.blockBreakContractType(this.getConfig().getDuelType().canBreakBlocks() ? BlockBreakContractTypeType.WORLD : BlockBreakContractTypeType.USER_PLACED)
.build(DuelsPlugin.getPlugin());
this.world.setKeepSpawnInMemory(true);
this.world.setPVP(true);
this.world.setSpawnFlags(false, false);
this.world.setTime(6000);
for(Chunk chunk : this.world.getLoadedChunks())
{
if (Boolean.TRUE.equals(this.shouldUnloadChunk(chunk)))
{
chunk.unload(false, false);
}
}
if (this.getMapConfig().hasGameArea())
{
WorldUtils.loadChunksBetween(this.world, (int)this.getMapConfig().getGameAreaBorder().getMin().getX() >> 4, (int)this.getMapConfig().getGameAreaBorder().getMin().getZ() >> 4, (int)this.getMapConfig().getGameAreaBorder().getMax().getX() >> 4, (int)this.getMapConfig().getGameAreaBorder().getMax().getZ() >> 4);
}
this.spawnLocations = new HashMap<>();
for(WordlessLocation location : this.getMapConfig().getSpawnLocations())
{
this.spawnLocations.put(location.toLocation(this.world), 0);
}
super.setup();
}
public Boolean shouldUnloadChunk(Chunk chunk)
{
if (!this.getMapConfig().hasGameArea() || !this.getMapConfig().isGameAreaRestricted())
{
return null;
}
BlockVector gameMinBorder = this.getMapConfig().getGameAreaBorder().getMin();
BlockVector gameMaxBorder = this.getMapConfig().getGameAreaBorder().getMax();
int x = chunk.getX();
int z = chunk.getZ();
if (gameMinBorder != null && gameMaxBorder != null && LocationUtils.outsideChunkCoords(x, z, (int)gameMinBorder.getX() >> 4, (int)gameMinBorder.getZ() >> 4, (int)gameMaxBorder.getX() >> 4, (int)gameMaxBorder.getZ() >> 4))
{
return true;
}
return false;
}
private void setupScoreboard(ScoreboardViewer viewer)
{
Objective sideBar = viewer.getScoreboard().registerNewObjective("sideBar", "dummy");
sideBar.setDisplayName(ChatColor.AQUA + "Duels");
sideBar.setDisplaySlot(DisplaySlot.SIDEBAR);
sideBar.getScore(" ").setScore(7);
viewer.addDynamicScore(new ScoreboardDynamicScore(viewer, sideBar, this::updateScoreboardPlayerCount, 6));
sideBar.getScore(" ").setScore(5);
viewer.addDynamicScore(new ScoreboardDynamicScore(viewer, sideBar, this::updateScoreboardTime, 4));
sideBar.getScore(" ").setScore(3);
sideBar.getScore(LangUtils.getText(viewer.getPlayer().spigot().getLocale(), "scoreboard.map", this.getMapConfig().getId())).setScore(2);
sideBar.getScore(" ").setScore(1);
sideBar.getScore(ChatColor.AQUA + "parina" + ChatColor.GREEN + "craft.net").setScore(0);
}
private void updateScoreboardPlayerCount(ScoreboardDynamicScore dynamicScore)
{
dynamicScore.set(LangUtils.getText(dynamicScore.getScoreboardViewer().getPlayer().spigot().getLocale(), "scoreboard.players", this.getPlayersCount(), this.getPlayersLimit()));
}
private void updateScoreboardTime(ScoreboardDynamicScore dynamicScore)
{
if (this.getStatus() == PreMinigameStatus.WAITING_FOR_PLAYERS)
{
dynamicScore.set(LangUtils.getText(dynamicScore.getScoreboardViewer().getPlayer().spigot().getLocale(), "scoreboard.waiting-for-players"));
}
else
{
dynamicScore.set(LangUtils.getText(dynamicScore.getScoreboardViewer().getPlayer().spigot().getLocale(), "scoreboard.countdown-starting", this.getTimeLeftToStartInSecs() + "s"));
}
}
@Override
public Optional<Location> onPlayerSpawn(Player player)
{
Entry<Location, Integer> entry = this.spawnLocations.entrySet().stream().min((o1, o2) -> Integer.compare(o1.getValue(), o2.getValue())).get();
Location location = entry.getKey();
Integer value = entry.getValue();
this.spawnLocations.put(location, value + 1);
BukkitUser user = this.getPlayer(player);
user.getUser().setDataStorage(new UserStartLocationDataStorage(location));
this.getConfig().getDuelType().preGameSpawn(user);
return Optional.of(entry.getKey());
}
@Override
public void onPlayerJoin(Player player)
{
BukkitUser user = this.getPlayer(player);
this.getConfig().getDuelType().preGameJoin(user);
super.onPlayerJoin(player);
}
@Override
public void onPlayerQuit(Player player)
{
BukkitUser user = this.getPlayer(player);
UserStartLocationDataStorage startLocation = user.getUser().removeDataStorage(UserStartLocationDataStorage.class).orElse(null);
if (startLocation != null)
{
this.spawnLocations.put(startLocation.getLocation(), this.spawnLocations.get(startLocation.getLocation()) - 1);
}
this.getConfig().getDuelType().preGameLeave(user);
super.onPlayerQuit(player);
}
@Override
public void onCriticalException(Throwable e)
{
this.world.getPlayers().forEach((p) -> p.kickPlayer("Critical error"));
DuelsPlugin.getPlugin().getServer().unloadWorld(this.world, false);
try
{
FileUtils.deleteDirectory(this.world.getWorldFolder());
}
catch (IOException e1) //Failed to delete the directory
{
e1.printStackTrace();
}
}
@Override
public DuelsMinigameConfig getConfig()
{
return (DuelsMinigameConfig)super.getConfig();
}
@Override
public DuelsMinigameMapConfig getMapConfig()
{
return (DuelsMinigameMapConfig)super.getMapConfig();
}
@Override
public World getGameWorld()
{
return this.world;
}
}
|
/**
* Returns the {@link Installation} if device token exists and request is
* authenticated (Basic Or Bearer).
*
* @param request {@link HttpServletRequest}
*
*/
public Optional<Installation> loadInstallationWhenAuthorized(HttpServletRequest request) {
String deviceToken = ClientAuthHelper.getDeviceToken(request);
Variant variant = loadVariantWhenAuthorized(deviceToken, null, request);
if (variant != null) {
return Optional.ofNullable(clientInstallationService
.findInstallationForVariantByDeviceToken(variant.getVariantID(), deviceToken));
}
return Optional.empty();
} |
How to Explore with Intent - Exploratory Testing Self-Management
By Maaret Pyhäjärvi
Exploratory testing is the wonderful idea that we can use our freedom of choice while testing, to learn as we go on and let that learning influence the choices we make next. The system we test is our external imagination, and it's our responsibility to give it the chance to whisper out all the information there is.
When we test, everyone is allowed to stretch their assigned boxes with exploration at least a little. Even the most test case oriented organizations will ask you to think, learn, and look around while executing your assigned tests. That's what makes you good at testing in the organization.
For more of a stretch, these organizations will allow for a few hours of freedom from the assigned box, to do some time-boxed exploratory testing for finding gaps your usual test case harness keeps you from spotting.
Others, like myself, work in the exploratory testing mode full time. In this mode, test cases (if such will exist) are an output of the process instead of an input and created at a time we know the most about a feature or product. We've learned a lot by the time we're done testing.
Regardless of whether your mode of exploratory testing is using it as technique (extending your test cases), as a task (time-boxing exploration) or as an approach (engulfing all your thinking of testing), there's a critical skill of self-management you'll need to develop. You'll want to explore with intent, keep track of what you know and learn, and what more there is to learn. All of this will grow iteratively and incrementally as you do this type of testing.
Intertwining Different Testing Activities
With years of practice on skilled exploration, I find it now possible to do different activities simultaneously. I can strategize on a testing big picture and create tasks out of the ideas. I can execute testing on some of those ideas configuring the environments and learn from the different types of thinking. It's not really simultaneous, it's intertwined into these tiny bits of tasks, allowing my mind to wonder and categorize things into a frame of reference.
It was not always possible. Actually, it was really hard. In particular, it is really hard to intertwine long-term (looking into future work) and short-term (looking at what is going on now) thinking, which are very different in nature. It's ok, because the ability to intertwine is not a requirement to get started. You would do well acknowledging where your abilities are and developing them further by practicing intertwining, but also allowing yourself time to focus on just one thing. With exploratory testing, the formula includes you: what works for you, as you are today.
A Practical Example
Imagine learning to drive a car. You're taking your first lessons at the driving school and after some bits of theory you know the basic mechanics of driving but have never done any of it.
You've been shown the three pedals, and when you stop to think, you know which one is which. You know the gear shifter and it's clear without telling what the steering wheel does (as long as you drive forward, that is). And finally comes the moment you're actually going to drive.
The driving instructor makes you drive a couple of laps around the parking lot and then tells you to drive out, amongst other cars. With newness of all of this, your mind blanks and you remember nothing of the following half an hour. And if you remember something, it's the time when your car stopped at an embarrassing location because it was too hard to do the right combination of clutch and gears.
All the pieces are new and doing the right combination of even two of them at the same time is an effort. Think about it, when you looked if you could turn right, didn't you already start turning the wheel? And when you were stopped at the lights to turn, didn't it take significant effort to get moving and turn at the same time?
After years of driving, you're able to do the details without thinking much, and you're free to use your energy on optimizing your route of the day or the discussion you're having with the person next to you. Or choosing a new scenic route without messing up your driving flow.
It's the same with testing. There's a number of things to pay attention to. The details of the application you're operating. The details of the tools you need to use. The uncertainties of information. All your thoughts and knowledge. The information you get from others, and whether you trust it or not. The ideas of what to test and how to test it. The ideas of what would help you test again later. The expectations driving you to care about particular type of information. Combining any two of these at a time seems like a stretch and yet with exploratory testing, you're expected to keep track of all of these in some way. And most essentially from all the details, you're expected to build out and communicate both a long-term and a short-term view of the testing you've done and are about to do.
Learning To Self-manage
I find that a critical skill for an exploratory tester is the skill to self-manage, and to create a structure that helps you keep track of what you're doing. Nowadays, with some years of experience behind me, I just create mind maps. There is a simple tool I found to be brilliant for learning the right kind of thinking, and that tool is what I want to share with you.
When I say tool, I mean more of a thinking tool. The thinking tool here though has a physical structure.
For a relevant timeframe, I was going around testing with a notebook for a very particular purpose. Each page in the notebook represented a day of testing, and provided me a mechanism to keep track of my days. A page was split into four sections, with invisible titles I've illustrated in the picture: Mission (why am I here?), Charter (what I'm doing today?), Details (what am I keeping track of in details?) and Other Charters (what should I be doing before I'm done?).
At the start of a day of testing, I would open a fresh page and review my status after letting earlier learning sink in. Each of the pages would stay there to remind me of how my learning journey developed as the application was built up, one day at a time.
Mission
In the top left corner, I would stick a note about my mission, my purpose or as I often liked to think of it, the sandbox I was hired to play in. What did the organization expect of me as per information I would provide, having hired me as an exploratory tester? How I could describe that in just a few sentences?
For example, I was hired in an organization with ten teams, each working on a particular area of the product. My team was specializing in installations. That little note reminded me that while I could test anything outside the installations if I so wished, there was a sandbox that I was supposed to cover for relevant findings and it was unlikely that others would feel the urge to dig deep into my area.
They were likely to travel through it, but all the special things in the area, they would probably rather avoid. If I would be digging through someone else's area, nothing would stop me. But I might leave mine unattended. I might feel that I used all this time, and therefore I'm done, even if I was only shallowly covering my own area.
The mission note reminded me of the types of information the organization considered relevant, and the area of responsibility I felt I had accepted. It served as an anchor when the whispers of the product lead me elsewhere to explore.
Charter
In the top right corner was my note about the work of the day: the Charter. Each morning I would imagine what I was trying to achieve today - only to learn most evenings I had done something completely different. Charter is a framing of what I'm testing, and as I learn they change over time. It's acceptable to start out with one idea and end up with something completely different when you are finished.
The note of the day was another anchor keeping me honest. With exploration, I'm not required to stick to my own plans. But I'm required to be in control of my plans in the sense that I don't fool myself into believing something is done just because the time is used.
Continuing on my example with the Installations team, I might set up my charter of the day to be 2 installations with a deep dive into what actually gets installed. Or I might set it up to be 20 installations, looking through each shallowly. Or I might decide to focus on a few particular features and their combinations. If I saw something while testing that triggered another thought, I could follow it. But at the end of the day, I could review my idea from the morning: did I do 20 shallow installations like I thought I would? If I didn't, what did I do? What am I learning for myself from how things turned out?
Details
In the bottom right corner, I would pile up notes. At first, these were just lines of text I would write that would often fill the page next to the one I was working on. Later, I realized, that for me there were three things I wanted to make notes of: the bugs, the questions, the ideas for test automation or test cases, and my notes extended to have a categorization shorthand.
With any of the detailed ideas, I could choose to stop doing the testing I was doing, and attend to the detail right away. I could decide that instead of focusing on exploring to find new information, I could create an automated test case from a scenario I cooked up from exploration. I could decide that instead of completing what I was planning on doing today, I would write the great bug report with proper investigation behind it. I could decide to find a product owner, a support representative, a programmer, or my manager to get an answer for a burning question I had. Or, I could make note of any of these with minimum effort, and stick to my idea of what I would do to test the application before attending to the details.
I learned that people like me can generate so many questions, that if I don't have a personal throttling mechanism, I can block others from focusing on other things. So I realized that collecting the questions and asking them in regular intervals was a good discipline for me. And while looking through my questions, I would notice that I had answers to more questions myself than I first thought.
With each detail, the choice is mine. Shall I act on this detail immediately, or could it wait? Am I losing something relevant if I don't get my answer right away? Is the bug I found something the developer would rather know now, than at the end of my working day? Do I want to stop being in exploratory mode to improve my documentation, or to pair with a developer to implement a piece of test automation, or do I rather time-box that work for another day from the idea I had while testing?
Other Charters
In the bottom left corner, I would make notes of exploratory testing work I realized needed doing while I was testing. I would write down ideas small and large that I would park for future reference, sometimes realizing later that some of those I had already covered and just forgotten. Sometimes I would add them to my backlog of work to do, and sometimes tuning the existing backlog of work to support choosing focus points of upcoming testing days.
Some of my ideas would require creating code for purposes of extending the reach of exploration. Some ideas would require getting intimately familiar with the details of log files and database structures. Each new idea would build on the learning that had happened before, making me reassess my strategy of what information I would invest in to have available first.
You're In Control
The tool isn't there to control you, it's there to give you a structure to make your work visible for you. You get to decide what happens when you explore, and in what order. If you need to go through a particular flow 15 times from various angles, you do that. If you find it hard to think about strategy and importance of particular tasks when you're deep in doing testing, you reserve time separately for strategic thinking.
With the days passing, and notes taken, I could go back seeing what types of sessions I would typically have. There would be days where I'd just survey a functionality, to figure out a plan of charters without focus on details. There would be target rich functionalities, where the only detail I could pay attention to was the bugs. Over time, I could pay attention to doing things intentionally with particular focus, and intentionally intertwined. I could stop to think, how different days and different combinations made me feel. I learned to combine things in ways that were useful for my organization, but also maximized the fun I could have while testing in a versatile manner.
While most value was in learning to self-manage my testing work around learning, there was also a side impact. When someone would show up to ask about what I had done and was doing, I could just flip a page and give an account of what had been going on. Seeing the structure created trust in those who were interested in my progress.
As an active learner, you will get better every day you spend on testing. Exploratory testing treats test design, test execution and learning as parallel, as mutually supportive activities to find unknown unknowns. Doing things in parallel can be difficult, and testing needs to adjust to the tester's personal skill level and style. Your skill to self-manage your work and your learning - making learning and reflection a habit - is what differentiates skilled exploratory testing from randomly putting testing activities together.
I believe that the thing that makes us, testers, to not be treated as a commodity, is learning. It's the same with programmers. Learners outperform the ones that don't. Exploratory testing has learning at it's core.
About Maaret
Maaret Pyhäjärvi is a software professional with testing emphasis. She identifies as an empirical technologist, a tester and a programmer, a catalyst for improvement and a speaker. Her day job is working with a small software product development team as a hands-on testing specialist. On the side, she teaches exploratory testing and makes a point of adding new, relevant feedback for test-automation heavy projects through skilled exploratory testing. In addition to being a tester and a teacher, she is a serial volunteer for different non-profits driving forward the state of software development. She blogs regularly at http://visible-quality.blogspot.fi. |
<gh_stars>0
// The Computer Language Benchmarks Game
// http://benchmarksgame.alioth.debian.org/
//
// contributed by the Rust Project Developers
// contributed by <NAME>
// contributed by TeXitoi
// modified by <NAME>
// contributed by <NAME> (@cristicbz)
// contributed by <NAME>
// contributed by <NAME>
// modified by hanabi1224, use portable_simd on nightly rust
// removed parallelization
#![feature(portable_simd)]
use std::ops::*;
use std::simd::Simd;
const LANES: usize = 4;
type IntType = i32;
const LANES_INT: IntType = LANES as IntType;
type F64Vec = Simd<f64, LANES>;
type IntVec = Simd<IntType, LANES>;
const ONE: IntVec = IntVec::splat(1);
const TWO: IntVec = IntVec::splat(2);
const LANES_VEC: IntVec = IntVec::splat(LANES_INT);
const J_ARRAY: IntVec = IntVec::from_array([0, 1, 2, 3]);
fn main() {
let n = std::env::args()
.nth(1)
.and_then(|n| n.parse().ok())
.unwrap_or(100);
let answer = spectralnorm(n);
println!("{:.9}", answer);
}
#[inline]
fn spectralnorm(n: usize) -> f64 {
// round up to multiple of 4
let n = (n + LANES - 1) / LANES * LANES;
// This program overflows when n > 23170
debug_assert!(n <= 23170 as usize);
let array_size = n / LANES;
let mut u = vec![F64Vec::splat(1.0); array_size];
let mut v = vec![F64Vec::default(); array_size];
for _ in 0..10 {
mult_at_av(&u, &mut v, array_size);
mult_at_av(&v, &mut u, array_size);
}
(inner_product(&u, &v) / inner_product(&v, &v)).sqrt()
}
#[inline]
fn mult_at_av(v: &[F64Vec], out: &mut [F64Vec], array_size: usize) {
let mut tmp = vec![F64Vec::default(); array_size];
dot_vecs(v, &mut tmp, |i, j| inv_a(i, j));
dot_vecs(&tmp, out, |i, j| inv_a(j, i));
}
#[inline]
fn dot_vecs<F>(v: &[F64Vec], out: &mut [F64Vec], inv_a: F)
where
F: Fn(IntVec, IntVec) -> F64Vec + Sync,
{
out.iter_mut().enumerate().for_each(|(i, slot)| {
*slot = dot(i as IntType, v, &inv_a);
});
}
fn dot<F>(i: IntType, v: &[F64Vec], inv_a: F) -> F64Vec
where
F: Fn(IntVec, IntVec) -> F64Vec,
{
let mut result = F64Vec::default();
for k in 0..LANES_INT {
// We're computing everything in chunks of four so the indces of output
// are 4*i, 4*i+1, 4*i+2 and 4*i+3.
let i_vec = IntVec::splat(LANES_INT * i + k);
// column indices of A (equivarent to indices of v)
let mut j_vec = J_ARRAY;
let mut sum = F64Vec::default();
// Each slot in the pair gets its own sum, which is further computed in
// four f64 lanes (which are summed at the end).
for j in 0..v.len() {
sum += v[j] / inv_a(i_vec, j_vec);
j_vec += LANES_VEC;
}
// Sum the four lanes for each slot.
result[k as usize] = sum.horizontal_sum();
}
result
}
/// Calculate 1 / A[i, j] for each element of i, j
#[inline]
fn inv_a(i: IntVec, j: IntVec) -> F64Vec {
let sum_i_j = i + j;
let v = sum_i_j * (sum_i_j + ONE) / TWO + i + ONE;
F64Vec::from_array([v[0] as f64, v[1] as f64, v[2] as f64, v[3] as f64])
}
/// Vectorised form of inner product
#[inline]
fn inner_product(v: &[F64Vec], u: &[F64Vec]) -> f64 {
// (1) compute inner product across four lanes.
let r = u
.iter()
.zip(v)
.map(|(&x, &y)| x * y)
.fold(F64Vec::default(), |s, x| s + x);
// (2) sum the four lanes.
r.horizontal_sum()
}
|
// Author: <NAME> <<EMAIL>>
// controllers package -
package controllers
import (
. "TheGorgeous/controllers/helpers"
"net/http"
)
// Struct type indexController -
type indexController struct {
LayoutHelper
}
// IndexController function -
func IndexController() *indexController {
return &indexController{}
}
// Index method -
func (this *indexController) Index(w http.ResponseWriter, r *http.Request) {
PageData["PageTitle"] = "Index"
this.Render(w, r,
PageData,
"layout.gohtml", "index/index.gohtml")
}
// About method -
func (this *indexController) About(w http.ResponseWriter, r *http.Request) {
PageData["PageTitle"] = "About"
this.Render(w, r,
PageData,
"layout.gohtml", "index/about.gohtml")
}
// ContactUs method -
func (this *indexController) ContactUs(w http.ResponseWriter, r *http.Request) {
PageData["PageTitle"] = "Contact Us"
this.Render(w, r,
PageData,
"layout.gohtml", "index/contact.gohtml")
}
|
/**
* Translates the NetworkTables into the Codex with the corresponding enumeration name
*/
public void parseFromNetworkTables() {
for(E e : EnumUtils.getEnums(mEnumClass)) {
String key = e.name().toUpperCase();
Double value = kNetworkTable.getEntry(key).getDouble(Double.NaN);
mCodex.set(e, value);
}
} |
/**
* Load the input, and then run the algorithm under the
* controlled timing setting.
*/
public void run() {
algorithm.loadInput(input);
GenResults gs = new GenResults(algorithm, NUMREPEATS);
gs.run();
this.results = algorithm.getResults();
this.ticks = gs.getTicks();
this.time = gs.getTime();
} |
<filename>src/lib/utils/requestPhotos.tests.ts
import { LegacyNativeModules } from "lib/NativeModules/LegacyNativeModules"
import { Platform } from "react-native"
import { openPicker } from "react-native-image-crop-picker"
import { requestPhotos } from "./requestPhotos"
jest.mock("react-native-image-crop-picker", () => ({
openPicker: jest.fn(),
}))
describe("requestPhotos", () => {
describe("on iOS", () => {
it("calls the native module on iOS 14 and above", () => {
Platform.OS = "ios"
Object.defineProperty(Platform, "Version", {
get: () => 15,
})
const mockRequestPhotos = jest.fn()
LegacyNativeModules.ARPHPhotoPickerModule.requestPhotos = mockRequestPhotos
requestPhotos()
expect(mockRequestPhotos).toHaveBeenCalled()
})
})
describe("on Android", () => {
it("shows the react native photo picker", () => {
Platform.OS = "android"
Object.defineProperty(Platform, "Version", {
get: () => 23,
})
requestPhotos()
expect(openPicker).toHaveBeenCalled()
})
})
})
|
package registrar
import (
"github.com/cosmos/cosmos-sdk/simapp/params"
"github.com/forbole/egldjuno/logging"
"github.com/forbole/egldjuno/types"
"github.com/forbole/egldjuno/modules/messages"
"github.com/forbole/egldjuno/modules/modules"
"github.com/forbole/egldjuno/client"
"github.com/forbole/egldjuno/db"
)
// Registrar represents a modules registrar. This allows to build a list of modules that can later be used by
// specifying their names inside the TOML configuration file.
type Registrar interface {
BuildModules(types.Config, *params.EncodingConfig, db.Database, *client.Proxy) modules.Modules
}
// ------------------------------------------------------------------------------------------------------------------
var _ Registrar = &EmptyRegistrar{}
// EmptyRegistrar represents a Registrar which does not register any custom module
type EmptyRegistrar struct{}
// BuildModules implements Registrar
func (*EmptyRegistrar) BuildModules(
types.Config, *params.EncodingConfig, db.Database, *client.Proxy,
) modules.Modules {
return nil
}
// ------------------------------------------------------------------------------------------------------------------
// DefaultRegistrar represents a registrar that allows to handle the default egldjuno modules
type DefaultRegistrar struct {
parser messages.MessageAddressesParser
}
// NewDefaultRegistrar builds a new DefaultRegistrar
func NewDefaultRegistrar(parser messages.MessageAddressesParser) *DefaultRegistrar {
return &DefaultRegistrar{
parser: parser,
}
}
// BuildModules implements Registrar
func (r *DefaultRegistrar) BuildModules(
cfg types.Config, encodingCfg *params.EncodingConfig, db db.Database, _ *client.Proxy,
) modules.Modules {
return modules.Modules{
messages.NewModule(r.parser, encodingCfg.Marshaler, db),
}
}
// ------------------------------------------------------------------------------------------------------------------
// GetModules returns the list of module implementations based on the given module names.
// For each module name that is specified but not found, a warning log is printed.
func GetModules(mods modules.Modules, names []string, logger logging.Logger) []modules.Module {
var modulesImpls []modules.Module
for _, name := range names {
module, found := mods.FindByName(name)
if found {
modulesImpls = append(modulesImpls, module)
} else {
logger.Error("Module is required but not registered. Be sure to register it using registrar.RegisterModule", "module", name)
}
}
return modulesImpls
}
|
def covertype_train(project_id,
region,
source_table_name,
gcs_root,
dataset_id,
evaluation_metric_name,
evaluation_metric_threshold,
model_id,
version_id,
replace_existing_version,
hypertune_settings=HYPERTUNE_SETTINGS,
dataset_location='US'):
query = generate_sampling_query(
source_table_name=source_table_name, num_lots=10, lots=[1, 2, 3, 4])
training_file_path = '{}/{}'.format(gcs_root, TRAINING_FILE_PATH)
create_training_split = bigquery_query_op(
query=query,
project_id=project_id,
dataset_id=dataset_id,
table_id='',
output_gcs_path=training_file_path,
dataset_location=dataset_location)
query = generate_sampling_query(
source_table_name=source_table_name, num_lots=10, lots=[8])
validation_file_path = '{}/{}'.format(gcs_root, VALIDATION_FILE_PATH)
create_validation_split = bigquery_query_op(
query=query,
project_id=project_id,
dataset_id=dataset_id,
table_id='',
output_gcs_path=validation_file_path,
dataset_location=dataset_location)
query = generate_sampling_query(
source_table_name=source_table_name, num_lots=10, lots=[9])
testing_file_path = '{}/{}'.format(gcs_root, TESTING_FILE_PATH)
create_testing_split = bigquery_query_op(
query=query,
project_id=project_id,
dataset_id=dataset_id,
table_id='',
output_gcs_path=testing_file_path,
dataset_location=dataset_location)
tune_args = [
'--training_dataset_path',
create_training_split.outputs['output_gcs_path'],
'--validation_dataset_path',
create_validation_split.outputs['output_gcs_path'], '--hptune', 'True'
]
job_dir = '{}/{}/{}'.format(gcs_root, 'jobdir/hypertune',
kfp.dsl.RUN_ID_PLACEHOLDER)
hypertune = mlengine_train_op(
project_id=project_id,
region=region,
master_image_uri=TRAINER_IMAGE,
job_dir=job_dir,
args=tune_args,
training_input=hypertune_settings)
get_best_trial = retrieve_best_run_op(project_id, hypertune.outputs['job_id'])
job_dir = '{}/{}/{}'.format(gcs_root, 'jobdir', kfp.dsl.RUN_ID_PLACEHOLDER)
train_args = [
'--training_dataset_path',
create_training_split.outputs['output_gcs_path'],
'--validation_dataset_path',
create_validation_split.outputs['output_gcs_path'], '--alpha',
get_best_trial.outputs['alpha'], '--max_iter',
get_best_trial.outputs['max_iter'], '--hptune', 'False'
]
train_model = mlengine_train_op(
project_id=project_id,
region=region,
master_image_uri=TRAINER_IMAGE,
job_dir=job_dir,
args=train_args)
eval_model = evaluate_model_op(
dataset_path=str(create_testing_split.outputs['output_gcs_path']),
model_path=str(train_model.outputs['job_dir']),
metric_name=evaluation_metric_name)
with kfp.dsl.Condition(
eval_model.outputs['metric_value'] > evaluation_metric_threshold):
deploy_model = mlengine_deploy_op(
model_uri=train_model.outputs['job_dir'],
project_id=project_id,
model_id=model_id,
version_id=version_id,
runtime_version=RUNTIME_VERSION,
python_version=PYTHON_VERSION,
replace_existing_version=replace_existing_version)
if USE_KFP_SA == 'True':
kfp.dsl.get_pipeline_conf().add_op_transformer(use_gcp_secret('user-gcp-sa')) |
def currentsellings():
items = Item.query.filter(
(Item.user_id == session["user_id"]) & (Item.sold == 0)).all()
return render_template("currentsellings.html", items=items) |
/// Create a new instance of the `GameBoyRom`.
pub fn new(rom_bytes: &'rom [u8]) -> Self {
Self {
rom_data: rom_bytes,
}
} |
<gh_stars>0
------------------------------------------------------------------------------
module Network.HTTP.Media.MediaType.Tests (tests) where
------------------------------------------------------------------------------
import qualified Data.ByteString.UTF8 as BS
import qualified Data.Map as Map
------------------------------------------------------------------------------
import Control.Monad (join, liftM)
import Data.String (fromString)
import Data.Maybe (isNothing)
import Data.Monoid ((<>), mconcat)
import Distribution.TestSuite.QuickCheck
------------------------------------------------------------------------------
import Network.HTTP.Media.Match
import Network.HTTP.Media.MediaType ((/?), (/.))
import Network.HTTP.Media.MediaType.Internal
import Network.HTTP.Media.MediaType.Gen
------------------------------------------------------------------------------
tests :: [Test]
tests =
[ testEq
, testShow
, testFromString
, testHas
, testGet
, testMatches
, testMoreSpecificThan
, testMostSpecific
, testParse
]
------------------------------------------------------------------------------
-- Equality is derived, but we test it here to get 100% coverage.
testEq :: Test
testEq = testGroup "Eq"
[ testProperty "==" $ do
media <- genMediaType
return $ media == media
, testProperty "/=" $ do
media <- genMediaType
media' <- genDiffMediaType media
return $ media /= media'
]
------------------------------------------------------------------------------
testShow :: Test
testShow = testProperty "show" $ do
media <- genMediaType
return $ parse (BS.fromString $ show media) == Just media
------------------------------------------------------------------------------
testFromString :: Test
testFromString = testProperty "fromString" $ do
media <- genMediaType
return $ media == fromString (show media)
------------------------------------------------------------------------------
testHas :: Test
testHas = testGroup "(/?)"
[ testProperty "True for property it has" $ do
media <- genWithParams
return $ all (media /?) (Map.keys $ parameters media)
, testProperty "False for property it doesn't have" $ do
media <- genWithParams
return $ all (not . (stripParams media /?))
(Map.keys $ parameters media)
]
------------------------------------------------------------------------------
testGet :: Test
testGet = testGroup "(/.)"
[ testProperty "Retrieves property it has" $ do
media <- genWithParams
let is n v = (&& media /. n == Just v)
return $ Map.foldrWithKey is True $ parameters media
, testProperty "Nothing for property it doesn't have" $ do
media <- genWithParams
let is n _ = (&& isNothing (stripParams media /. n))
return $ Map.foldrWithKey is True $ parameters media
]
------------------------------------------------------------------------------
testMatches :: Test
testMatches = testGroup "matches"
[ testProperty "Equal values match" $ do
media <- genMediaType
return $ matches media media
, testProperty "Same sub but different main don't match" $ do
media <- genMaybeSubStar
main <- genDiffByteString $ mainType media
return $ not (matches media media { mainType = main }) &&
not (matches media { mainType = main } media)
, testProperty "Same main but different sub don't match" $ do
media <- genConcreteMediaType
sub <- genDiffByteString $ subType media
return . not $ matches media media { subType = sub } ||
matches media { subType = sub } media
, testProperty "Different parameters don't match" $
liftM (not . dotJoin matches stripParams) genWithParams
, testProperty "Missing parameters match" $ do
media <- genWithParams
let media' = stripParams media
return $ matches media media' && not (matches media' media)
, testGroup "*/*"
[ testProperty "Matches itself" $ matches anything anything
, testProperty "Matches anything on the right" $
liftM (`matches` anything) genMediaType
, testProperty "Doesn't match more specific on the left" $
liftM (not . matches anything) genMaybeSubStar
]
, testGroup "type/*"
[ testProperty "Matches itself" $ liftM (join matches) genSubStar
, testProperty "Matches on the right" $
liftM (dotJoin (flip matches) subStarOf) genConcreteMediaType
, testProperty "Doesn't match on the left" $
liftM (not . dotJoin matches subStarOf) genConcreteMediaType
]
]
------------------------------------------------------------------------------
testMoreSpecificThan :: Test
testMoreSpecificThan = testGroup "isMoreSpecific"
[ testProperty "Against */*" $
liftM (`moreSpecificThan` anything) genMaybeSubStar
, testProperty "With */*" $
liftM (not . moreSpecificThan anything) genMaybeSubStar
, testProperty "Against type/*" $
liftM (dotJoin (flip moreSpecificThan) subStarOf) genConcreteMediaType
, testProperty "With type/*" $
liftM (not . dotJoin moreSpecificThan subStarOf) genConcreteMediaType
, testProperty "With parameters" $
liftM (dotJoin (flip moreSpecificThan) stripParams) genWithParams
, testProperty "Different types" $ do
media <- genWithoutParams
media' <- genDiffMediaTypeWith genWithoutParams media
return . not $
moreSpecificThan media media' || moreSpecificThan media' media
, testProperty "Different parameters" $ do
media <- genWithParams
params <- genDiffParameters $ parameters media
return . not $ moreSpecificThan media media { parameters = params }
]
------------------------------------------------------------------------------
testMostSpecific :: Test
testMostSpecific = testGroup "mostSpecific"
[ testProperty "With */*" $ do
media <- genConcreteMediaType
return $ mostSpecific media anything == media &&
mostSpecific anything media == media
, testProperty "With type/*" $ do
media <- genConcreteMediaType
let m1 = media { parameters = Map.empty }
m2 = m1 { subType = "*" }
return $ mostSpecific m1 m2 == m1 && mostSpecific m2 m1 == m1
, testProperty "With parameters" $ do
media <- genMediaType
params <- genParameters
let media' = media { parameters = params }
media'' = media { parameters = Map.empty }
return $ mostSpecific media' media'' == media' &&
mostSpecific media'' media' == media'
, testProperty "Different types" $ do
media <- genConcreteMediaType
media' <- genDiffMediaTypeWith genConcreteMediaType media
return $ mostSpecific media media' == media
, testProperty "Left biased" $ do
media <- genConcreteMediaType
media' <- genConcreteMediaType
let media'' = media' { parameters = parameters media }
return $ mostSpecific media media'' == media &&
mostSpecific media'' media == media''
]
------------------------------------------------------------------------------
testParse :: Test
testParse = testProperty "parse" $ do
media <- genMediaType
let main = mainType media
sub = subType media
params = parameters media
let (Just parsed) = parse $ main <> "/" <> sub <> mconcat
(map (uncurry ((<>) . (<> "=") . (";" <>))) $ Map.toList params)
return $ parsed == media
------------------------------------------------------------------------------
-- | Like 'join', but applies the given function to the first argument.
dotJoin :: (a -> a -> b) -> (a -> a) -> a -> b
dotJoin f g a = f (g a) a
|
/**
* Provides abstraction to the drive mechanism of the robot.
*
* @author Benjamin Landers
*/
public class DriveTrain extends Subsystem
{
public final String TAG = "DriveTrain";
public Jaguar left, right;
public Gyro gyro;
public DriveTrain(int left, int right, int gyro)
{
this.left = new Jaguar(left);
this.right = new Jaguar(right);
this.gyro = new Gyro(gyro);
Log.v(TAG, "Drive train subsystem instantiated.");
}
public void initDefaultCommand()
{
setDefaultCommand(new DriveCommand());
Log.v(TAG, "Drive train default command set to DriveCommand().");
}
} |
import unittest
import torch
from util.data_handling.string_generator import IndependentGenerator
from edit_distance.task.dataset_generator_synthetic import EditDistanceDatasetGenerator
ALPHABET_SIZE = 6
class TestEDDatasetGenerationSynthetic(unittest.TestCase):
def __init__(self, methodName):
super().__init__(methodName)
self.generator = IndependentGenerator(alphabet_size=ALPHABET_SIZE, seed=0)
self.dataset = EditDistanceDatasetGenerator(
N_batches={"train": 4, "val": 2, "test": 3},
batch_size={"train": 5, "val": 3, "test": 4},
len_sequence={"train": 10, "val": 10, "test": 10},
max_changes={"train": 4, "val": 4, "test": 4},
string_generator=self.generator, seed=0)
def test_shape_sequences(self):
assert self.dataset.sequences['train'].shape == (4, 5, 10), "Sequences train shape is not correct"
assert self.dataset.sequences['val'].shape == (2, 3, 10), "Sequences val shape is not correct"
assert self.dataset.sequences['test'].shape == (3, 4, 10), "Sequences test shape is not correct"
def test_shape_distances(self):
assert self.dataset.distances['train'].shape == (4, 5, 5), "Distances train shape is not correct"
assert self.dataset.distances['val'].shape == (2, 3, 3), "Distances val shape is not correct"
assert self.dataset.distances['test'].shape == (3, 4, 4), "Distances test shape is not correct"
def test_range_elements(self):
assert torch.all(self.dataset.sequences['train'] < ALPHABET_SIZE), "Sequences train elements out of size"
assert torch.all(self.dataset.sequences['val'] < ALPHABET_SIZE), "Sequences val elements out of size"
assert torch.all( self.dataset.sequences['test'] < ALPHABET_SIZE), "Sequences test elements out of size"
|
package logs
import (
"fmt"
"io"
"os"
"strings"
"time"
rotatelog "github.com/lestrrat/go-file-rotatelogs"
"github.com/op/go-logging"
"github.com/opensourceways/app-robot-server/config"
)
const (
logDir = "logs"
logSoftLink = "latest_log"
Module = "app-robot"
)
var Logger = logging.MustGetLogger(Module)
func Init() error {
var backends [] logging.Backend
cLog := config.Application.Log
if cLog.SaveFile {
backend, err := registerFile(cLog)
if err != nil {
return err
}
backends = append(backends, backend)
}
backends = append(backends, registerStdout(cLog))
logging.SetBackend(backends...)
return nil
}
func registerFile(log config.Log) (logging.Backend, error) {
if ok := pathExists(logDir); !ok {
fmt.Println("create log directory")
_ = os.Mkdir(logDir, os.ModePerm)
}
fileWriter, err := rotatelog.New(
logDir+string(os.PathSeparator)+"%Y-%m-%d.log",
rotatelog.WithLinkName(logSoftLink),
rotatelog.WithMaxAge(7*24*time.Hour),
rotatelog.WithRotationTime(24*time.Hour),
)
if err != nil {
return nil, err
}
level, err := logging.LogLevel(log.Level)
if err != nil {
return nil, err
}
return createBackend(fileWriter, log, level), nil
}
func registerStdout(log config.Log) logging.Backend {
level, err := logging.LogLevel(log.Level)
if err != nil {
fmt.Println(err)
}
return createBackend(os.Stdout, log, level)
}
func createBackend(w io.Writer, log config.Log, level logging.Level) logging.Backend {
backend := logging.NewLogBackend(w, log.Prefix, 0)
stoutWriter := false
if w == os.Stdout {
stoutWriter = true
}
format := getLogFormatter(stoutWriter)
backendLeveled := logging.AddModuleLevel(logging.NewBackendFormatter(backend, format))
backendLeveled.SetLevel(level, Module)
return backendLeveled
}
func getLogFormatter(stdoutWriter bool) logging.Formatter {
pattern := `%{time:2006/01/02 - 15:04:05.000} %{shortfile} %{color}▶ [%{level:.6s}] %{color:reset}%{message}`
if !stdoutWriter {
pattern = strings.Replace(pattern, "%{color}", "", -1)
pattern = strings.Replace(pattern, "%{color:reset}", "", -1)
}
return logging.MustStringFormatter(pattern)
}
func pathExists(path string) bool {
_, err := os.Stat(path)
if err == nil {
return true
}
if os.IsNotExist(err) {
return false
}
return false
}
|
/*
* Check if there are messages waiting to be collected
*/
public boolean hasMessagesAvailable() {
if( inmsgs!=null ) { return inmsgs.isEmpty(); }
return true;
} |
/**************************************************************/
/**
Checks a file segment header within a B-tree root page and updates
the segment header space id.
@return TRUE if valid */
static
bool
btr_root_fseg_adjust_on_import(
fseg_header_t* seg_header,
page_zip_des_t* page_zip,
ulint space,
mtr_t* mtr)
{
ulint offset = mach_read_from_2(seg_header + FSEG_HDR_OFFSET);
if (offset < FIL_PAGE_DATA
|| offset > UNIV_PAGE_SIZE - FIL_PAGE_DATA_END) {
return(FALSE);
} else if (page_zip) {
mach_write_to_4(seg_header + FSEG_HDR_SPACE, space);
page_zip_write_header(page_zip, seg_header + FSEG_HDR_SPACE,
4, mtr);
} else {
mlog_write_ulint(seg_header + FSEG_HDR_SPACE,
space, MLOG_4BYTES, mtr);
}
return(TRUE);
} |
‘Ni kijana!’ is Swahili phrase that means ‘It’s a boy!’ This exciting exclamation was heard on September 3rd when we celebrated the arrival of a new baby in the gorilla family troop at Disney’s Animal Kingdom! Our avid Disney Parks Blog readers might be thinking, “Didn’t you just announce a new gorilla baby?” Yes, we did! This is the second gorilla birth this year, and the fifth in the park’s history. The first gorilla birth at Disney’s Animal Kingdom occurred in 1997 before the park opened, the second baby arrived in 1999 and the third was born in 2010.
Western lowland gorillas are born with dark brown to black hair, black skin, and brown or reddish hair on their head. Mature males of breeding age develop silver or gray coloring on their backs and are consequently known as “silverbacks.” Juvenile and young-adult male gorillas are called “blackbacks” because they have yet to develop the silver markings. The diet of Western lowland gorillas is very diverse, including over 200 distinct species of plants, mainly leaves, buds, shoots, roots, bark and fruit. In the tropical rainforests of western Africa where they live, termites and ants are also great snack options. Unfortunately, West African rainforests are shrinking due to human encroachment and land clearing connected to agriculture and other pressures. Coltan is a mineral used in the production of cell phones, and mining for this mineral makes habitats unsuitable for gorillas and other wildlife. Recycling old cell phones and other electronics is a great way to reduce the need for coltan mining and, in turn, help conserve these amazing animals and their habitat.
All Western lowland gorilla babies born at Disney’s Animal Kingdom have been a part of the Association of Zoos and Aquariums’ (AZA) Species Survival Plan (SSP), which ensures long-term survival of species by helping AZA-accredited zoos and aquariums manage species’ genetic diversity through detailed records of individual animals. Western lowland gorillas are a critically endangered species that face threats that include disease and illegal bushmeat hunting in the wild. In addition to supporting the gorilla SSP, Disney also contributes to gorilla conservation through the Disney Worldwide Conservation Fund (DWCF), which has provided more than $700,000 in conservation grants to 14 nonprofit organizations focused on research and conservation of Western lowland gorillas, cross-river gorillas, Grauer’s gorillas and mountain gorillas.
While we celebrate and welcome the newest baby gorilla here at Disney’s Animal Kingdom, remember that you can join us in taking action to help gorillas and other wildlife. Recycle old cell phones to protect gorillas’ habitats from mining, and visit Disney.com/conservation to learn more about Disney’s conservation efforts and discover new ways to support conservation near you!
If you missed the last gorilla baby announcement, click here to view the story and catch up on all the excitement! |
/**
* Fills buffer with town's name
* @param buff buffer start
* @param t we want to get name of this town
* @param last end of buffer
* @return pointer to terminating '\0'
*/
char *GetTownName(char *buff, const Town *t, const char *last)
{
TownNameParams par(t);
return GetTownName(buff, &par, t->townnameparts, last);
} |
def encapsulate(*args):
if len(args) > 1:
return list(Sequential.encapsulate(x) for x in args if len(x) > 0)
layers = args[0]
if len(layers) == 1:
return layers[0]
def f(x):
for layer in layers:
x = layer(x)
return x
f.__doc__ = '\n'.join([str(y) for y in layers])
f.layers = layers
return f |
def add_message(self, msg_id, location, msg):
module, obj, line, col_offset = location[1:]
sigle = self.make_sigle(msg_id)
full_msg = [sigle, module, obj, str(line), msg]
self.msgs += [[sigle, module, obj, str(line)]]
self.gui.msg_queue.put(full_msg) |
/**
* Enable or disable notification messages to clients
* every cache_time/2 seconds
*/
public void enableNotify(boolean enable) {
if (enable && notifyTimer == null) {
notifyTimer = new NotifyTimer();
Timer timer = new Timer();
timer.schedule(new NotifyTimer(), 0, cache_time*1000/2);
} else if (!enable && notifyTimer != null) {
notifyTimer.cancel();
notifyTimer = null;
}
} |
/// Instantiates `Self` from bytes.
///
/// The bytes are not fully verified (i.e., they may not represent a valid BLS point). Only the
/// byte-length is checked.
pub fn deserialize(bytes: &[u8]) -> Result<Self, Error> {
if bytes.len() == SIGNATURE_BYTES_LEN {
let mut pk_bytes = [0; SIGNATURE_BYTES_LEN];
pk_bytes[..].copy_from_slice(bytes);
Ok(Self {
bytes: pk_bytes,
_phantom_signature: PhantomData,
_phantom_public_key: PhantomData,
})
} else {
Err(Error::InvalidByteLength {
got: bytes.len(),
expected: SIGNATURE_BYTES_LEN,
})
}
} |
Photo: Heidi Carpenter
On Saturday, heavy rains moved into Keeneland as lady luck left and two of the heavy favorites of the day fell. But despite the rain and losses, I had a wonderful time; if anything, the rains allowed me to take some interesting photos.
Champion sprinter Groupie Doll was happy to pose for photographs--she knew what cameras meant! She was defending her title in the Thoroughbred Club of America (Gr. II) sprint stakes.
Groupie Doll pauses to touch her nose to her handler.
Groupie Doll
Gypsy Robin was a talented challenger...
...as was Judy the Beauty.
Groupie Doll, Rajiv Maragh up.
The fillies and mares in the Thoroughbred Club of America stakes walk to the starting gate as the sky threatened to release the first of the torrential rains. Groupie Doll ended up finishing 3rd, behind Gypsy Robin and winner Judy the Beauty.
Winding Way, a full sister to Kauai Katie, was entered in the First Lady Stakes (Gr. I). Her blaze suggested the origins of her name.
Daisy Devine is a personal favorite!
Say (IRE), a stunning daughter of Galileo, was perfectly behaved while being saddled.
Then the sky upened up and the first of the day's heavy rains came down. Here, Better Lucky and her handler hurry for cover.
The fillies and mares prepare to leave the saddling paddock in the rain.
Despite her name, Dayatthespa was initially reluctant to leave the shelter of her saddling stall.
The rains were so heavy that I could barely see the start of the First Lady.
Better Lucky wins the First Lady, with Dayatthespa a close 2nd and Daisy Devine 3rd.
It was then announced that the feature race of the day, the Shadwell Turf Mile, was off the turf. It was clear that Keeneland's polytrack was also drenched, as displayed by the field of the Dixiana Breeder's Futurity (Gr. II), which was won by We Miss Artie.
Silver Max was ready to make history in the Shadwell Turf Mile (Gr. I).
Charlie LoPresti-trained Turallure was as handsome as ever in the rain.
Jockey John Velazquez arrives for another LoPresti trainee, Wise Dan.
LoPresti paid careful attention to his star horse; this included stretching out his forelegs as Velazquez looked on.
Riders up for the Shadwell!
Silver Max, Robby Albarado up.
Wise Dan, John Velazquez up.
The start of the Shadwell.
Wise Dan attempts to claim a good position in the race. He ended up being bumped and going wide around both turns.
Wise Dan in the middle of his powerful stretch drive. He came close but was unable to catch the front-running Silver Max.
Wise Dan being walked back to his barn by his disappointed handlers. |
<filename>src/analysis/special_functions.rs
use crate::{
analysis::{functions::Info as FuncInfo, imports::Imports},
codegen::Visibility,
config::GObject,
library::{Type as LibType, TypeId},
version::Version,
};
use std::{collections::BTreeMap, str::FromStr};
#[derive(Clone, Copy, Eq, Debug, Ord, PartialEq, PartialOrd)]
pub enum Type {
Compare,
Copy,
Equal,
Free,
Ref,
Display,
Unref,
Hash,
}
impl FromStr for Type {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
use self::Type::*;
match s {
"compare" => Ok(Compare),
"copy" => Ok(Copy),
"equal" => Ok(Equal),
"free" | "destroy" => Ok(Free),
"is_equal" => Ok(Equal),
"ref" | "ref_" => Ok(Ref),
"unref" => Ok(Unref),
"hash" => Ok(Hash),
_ => Err(format!("Unknown type '{}'", s)),
}
}
}
#[derive(Debug, Clone)]
pub struct TraitInfo {
pub glib_name: String,
pub version: Option<Version>,
pub first_parameter_mut: bool,
}
type TraitInfos = BTreeMap<Type, TraitInfo>;
#[derive(Clone, Copy, Eq, Debug, Ord, PartialEq, PartialOrd)]
pub enum FunctionType {
StaticStringify,
}
#[derive(Debug, Clone)]
pub struct FunctionInfo {
pub type_: FunctionType,
pub version: Option<Version>,
}
type FunctionInfos = BTreeMap<String, FunctionInfo>;
#[derive(Debug, Default)]
pub struct Infos {
traits: TraitInfos,
functions: FunctionInfos,
}
impl Infos {
pub fn traits(&self) -> &TraitInfos {
&self.traits
}
pub fn traits_mut(&mut self) -> &mut TraitInfos {
&mut self.traits
}
pub fn has_trait(&self, type_: Type) -> bool {
self.traits.contains_key(&type_)
}
pub fn functions(&self) -> &FunctionInfos {
&self.functions
}
}
/// Returns true on functions that take an instance as single argument and
/// return a string as result.
fn is_stringify(func: &mut FuncInfo, parent_type: &LibType, obj: &GObject) -> bool {
if func.parameters.c_parameters.len() != 1 {
return false;
}
if !func.parameters.c_parameters[0].instance_parameter {
return false;
}
if let Some(ret) = func.ret.parameter.as_mut() {
if ret.lib_par.typ != TypeId::tid_utf8() {
return false;
}
if func.name == "to_string" {
// Rename to to_str to make sure it doesn't clash with ToString::to_string
assert!(func.new_name.is_none(), "A `to_string` function can't be renamed manually. It's automatically renamed to `to_str`");
func.new_name = Some("to_str".to_owned());
// As to not change old code behaviour, assume non-nullability outside
// enums and flags only, and exclusively for to_string. Function inside
// enums and flags have been appropriately marked in Gir.
if !obj.trust_return_value_nullability
&& !matches!(parent_type, LibType::Enumeration(_) | LibType::Bitfield(_))
{
*ret.lib_par.nullable = false;
}
}
// Cannot generate Display implementation for Option<>
!*ret.lib_par.nullable
} else {
false
}
}
fn update_func(func: &mut FuncInfo, type_: Type) -> bool {
if !func.commented {
use self::Type::*;
match type_ {
Copy | Free | Ref | Unref => func.hidden = true,
Hash | Compare | Equal => func.visibility = Visibility::Private,
Display => func.visibility = Visibility::Public,
};
}
true
}
pub fn extract(functions: &mut [FuncInfo], parent_type: &LibType, obj: &GObject) -> Infos {
let mut specials = Infos::default();
let mut has_copy = false;
let mut has_free = false;
let mut destroy = None;
for (pos, func) in functions.iter_mut().enumerate() {
if is_stringify(func, parent_type, obj) {
let return_transfer_none = func.ret.parameter.as_ref().map_or(false, |ret| {
ret.lib_par.transfer == crate::library::Transfer::None
});
// Assume only enumerations and bitfields can return static strings
let returns_static_ref = return_transfer_none
&& matches!(parent_type, LibType::Enumeration(_) | LibType::Bitfield(_))
// We cannot mandate returned lifetime if this is not generated.
// (And this prevents an unused std::ffi::CStr from being emitted below)
&& func.status.need_generate();
if returns_static_ref {
// Override the function with a &'static (non allocating) -returning string
// if the transfer type is none and it matches the above heuristics.
specials.functions.insert(
func.glib_name.clone(),
FunctionInfo {
type_: FunctionType::StaticStringify,
version: func.version,
},
);
}
// Some stringifying functions can serve as Display implementation
if matches!(
func.name.as_str(),
"to_string" | "to_str" | "name" | "get_name"
) {
// FUTURE: Decide which function gets precedence if multiple Display prospects exist.
specials.traits.insert(
Type::Display,
TraitInfo {
glib_name: func.glib_name.clone(),
version: func.version,
first_parameter_mut: false,
},
);
}
} else if let Ok(type_) = func.name.parse() {
if func.name == "destroy" {
destroy = Some((func.glib_name.clone(), pos));
continue;
}
if !update_func(func, type_) {
continue;
}
if func.name == "copy" {
has_copy = true;
} else if func.name == "free" {
has_free = true;
}
let first_parameter_mut = func
.parameters
.c_parameters
.first()
.map_or(false, |p| p.ref_mode == super::ref_mode::RefMode::ByRefMut);
specials.traits.insert(
type_,
TraitInfo {
glib_name: func.glib_name.clone(),
version: func.version,
first_parameter_mut,
},
);
}
}
if has_copy && !has_free {
if let Some((glib_name, pos)) = destroy {
let ty_ = Type::from_str("destroy").unwrap();
let func = &mut functions[pos];
update_func(func, ty_);
specials.traits.insert(
ty_,
TraitInfo {
glib_name,
version: func.version,
first_parameter_mut: true,
},
);
}
}
specials
}
// Some special functions (e.g. `copy` on refcounted types) should be exposed
pub fn unhide(functions: &mut [FuncInfo], specials: &Infos, type_: Type) {
if let Some(func) = specials.traits().get(&type_) {
let func = functions
.iter_mut()
.find(|f| f.glib_name == func.glib_name && !f.commented);
if let Some(func) = func {
func.visibility = Visibility::Public;
func.hidden = false;
}
}
}
pub fn analyze_imports(specials: &Infos, imports: &mut Imports) {
for (type_, info) in specials.traits() {
use self::Type::*;
match type_ {
Copy if info.first_parameter_mut => {
imports.add_with_version("glib::translate::*", info.version)
}
Compare => imports.add_with_version("std::cmp", info.version),
Display => imports.add_with_version("std::fmt", info.version),
Hash => imports.add_with_version("std::hash", info.version),
Equal => imports.add_with_version("glib::translate::*", info.version),
_ => {}
}
}
for info in specials.functions().values() {
match info.type_ {
FunctionType::StaticStringify => {
imports.add_with_version("std::ffi::CStr", info.version)
}
}
}
}
|
""" This is a custom JUnit XML reporter for AltWalker implemented using the built-in PrintReporter as basis.
It provides the ability to either generate a JUnit XML report containing a testcase element per each model
or a single testcase that abstracts the whole execution, no matter how many models have been exercised.
"""
from altwalker.reporter import Reporter
from junit_xml import TestSuite, TestCase
import json
import datetime
import time
import pdb
import sys
def _add_timestamp(string):
return "[{}] {}".format(datetime.datetime.now(), string)
def _format_step(step):
if step.get("modelName"):
string = "{}.{}".format(step["modelName"], step["name"])
else:
string = "{}".format(step["name"])
return string
def _format_step_info(step):
string = ""
if step.get("data"):
data = json.dumps(step["data"], sort_keys=True, indent=4)
string += "\nData:\n{}\n".format(data)
if step.get("unvisitedElements"):
unvisited_elements = json.dumps(step["unvisitedElements"], sort_keys=True, indent=4)
string += "\nUnvisited Elements:\n{}\n".format(unvisited_elements)
return string
class _Formater(Reporter):
"""Format the message for reporting."""
def step_start(self, gstep):
"""Report the starting execution of a step.
Args:
gstep (:obj:`dict`): The step that will be executed next.
"""
message = "{} Running".format(_format_step(gstep))
message += _format_step_info(gstep)
self._log(_add_timestamp(message))
if ('modelName' in gstep) and ('name' in gstep):
if gstep['modelName'] not in self.models:
self.models[gstep['modelName']] = {}
self.models[gstep['modelName']]['step_started_at'] = time.time()
def step_end(self, gstep, step_result):
"""Report the result of the step execution.
Args:
gstep (:obj:`dict`): The step just executed.
step_result (:obj:`dict`): The result of the step.
"""
error = step_result.get("error")
status = "FAIL" if error else "PASSED"
message = "{} Status: {}\n".format(_format_step(gstep), status)
error_message = ""
output = step_result.get("output")
result = step_result.get("result")
if output:
message += "Output:\n{}".format(output)
if result:
message += "\nResult: {}\n".format(json.dumps(result, sort_keys=True, indent=4))
if error:
error_message += "\nError: {}\n".format(error["message"])
if error.get("trace"):
error_message += "\n{}\n".format(error["trace"])
self._log(_add_timestamp(message))
self._log(_add_timestamp(error_message))
#self.debugger.set_trace()
#gstep['type']
#gstep['modelName']
#gstep['name']
#gstep['properties']
#gstep['data']
if ('modelName' in gstep) and ('name' in gstep):
if gstep['modelName'] not in self.models:
self.models[gstep['modelName']] = {}
self.models[gstep['modelName']]['status'] = status
total_elapsed_time = 0
if 'total_elapsed_time' in self.models[gstep['modelName']]:
total_elapsed_time = self.models[gstep['modelName']]['total_elapsed_time']
elapsed_time = 0
if 'step_started_at' in self.models[gstep['modelName']]:
elapsed_time = time.time() - self.models[gstep['modelName']]['step_started_at']
self.models[gstep['modelName']]['total_elapsed_time'] = total_elapsed_time + elapsed_time
#if output:
if 'output' not in self.models[gstep['modelName']]:
self.models[gstep['modelName']]['output'] = ''
self.models[gstep['modelName']]['output'] = self.models[gstep['modelName']]['output'] + "\n" + _add_timestamp(message)
if error:
if 'error' not in self.models[gstep['modelName']]:
self.models[gstep['modelName']]['error'] = ''
# self.models[gstep['modelName']]['error'] = self.models[gstep['modelName']]['error'] + "\n" + error.get('message', '') + error.get('trace', '')
self.models[gstep['modelName']]['error'] = self.models[gstep['modelName']]['error'] + "\n" + _add_timestamp(error_message)
def error(self, step, message, trace=None):
"""Report an unexpected error.
Args:
step (:obj:`dict`): The step executed when the error occurred.
message (:obj:`str`): The message of the error.
trace (:obj:`str`): The traceback.
"""
if step:
string = "Unexpected error occurred while running {}.\n".format(_format_step(step))
else:
string = "Unexpected error occurred.\n"
string += "{}\n".format(message)
if trace:
string += "\n{}\n".format(trace)
self._log(_add_timestamp(string))
class CustomJunitReporter(_Formater):
"""This reporter builds a custom JUnit XML report, with one testcase per model or a single testcase for the whole execution."""
def __init__(self):
self.debugger = pdb.Pdb(skip=['altwalker.*'], stdout=sys.stdout)
self.models = {}
self.statistcs = None
def _log(self, string):
"""Prints the string using the buildin :func:`print` function."""
print(string)
def set_statistics(self, statistics):
self.statistics = statistics
def _format_statistics(self):
"""Pretty-print statistics."""
s = "== Global Statistics ==\n"
total_models = self.statistics["totalNumberOfModels"]
completed_models = self.statistics["totalCompletedNumberOfModels"]
model_coverage = "{}%".format(completed_models * 100 // total_models)
s += "Model Coverage: {}%\n".format(model_coverage)
s += "Number of Models: {}\n".format(str(total_models))
s += "Completed Models: {}\n".format(str(completed_models))
s += "Failed Models: {}\n".format(self.statistics["totalFailedNumberOfModels"])
s += "Incomplete Models: {}\n".format(self.statistics["totalIncompleteNumberOfModels"])
s += "Not Executed Models: {}\n".format(self.statistics["totalNotExecutedNumberOfModels"])
s += "====\n"
return s
def to_xml_string(self, testsuite_name="AltWalker", generate_single_testcase=False, single_testcase_name="whole_model"):
test_cases = []
if not generate_single_testcase:
for model in self.models:
output = self.models[model].get('output', None)
error_output = self.models[model].get('error', None)
testcase = TestCase(model, "models", self.models[model]['total_elapsed_time'], output, error_output)
if error_output:
testcase.add_failure_info("failure", error_output)
test_cases.append(testcase)
else:
output = self._format_statistics()
error_output = ""
total_elapsed_time = 0
for model in self.models:
output += self.models[model].get('output', "")
error_output += self.models[model].get('error', "")
total_elapsed_time += self.models[model].get('total_elapsed_time', 0)
testcase = TestCase(single_testcase_name, "models", total_elapsed_time, output, error_output)
if error_output:
testcase.add_failure_info("failure", error_output)
test_cases.append(testcase)
ts = TestSuite(testsuite_name, test_cases)
# pretty printing is on by default but can be disabled using prettyprint=False
return TestSuite.to_xml_string([ts]) |
/**
* The code will cease to work once the enum values are no longer in descending order.
*/
@Test
public void testDescendingOrder() {
long previous = Long.MAX_VALUE;
for (PowersOfTen value : PowersOfTen.values()) {
assertThat(value.getLongValue()).isLessThan(previous);
previous = value.getLongValue();
}
} |
/**
* @author Peter Williams
*/
public class ParentManagedDDBeanTableModel extends InnerTableModel {
// Fields required to interpret parentBean correctly (comments wrt/ JspConfig)
private CommonDDBean parentBean;
private String parentPropertyName; // = JspConfig.PROPERTY;
private List<TableEntry> properties;
private ParentPropertyFactory beanFactory; // JspConfigPropertyFactory
private Class entryPanelClass; // Class to use for new rows, or null if use default.
public ParentManagedDDBeanTableModel(XmlMultiViewDataSynchronizer synchronizer,
CommonDDBean parentBean, String propertyName, List<TableEntry> properties,
Class entryPanelClass, ParentPropertyFactory factory) {
super(synchronizer, computeColumnNames(properties), computeColumnWidths(properties));
this.parentBean = parentBean;
this.parentPropertyName = propertyName;
this.properties = properties;
this.beanFactory = factory;
this.entryPanelClass = entryPanelClass;
}
private static String [] computeColumnNames(List<TableEntry> props) {
String [] names = new String [props.size()];
for(int i = 0; i < props.size(); i++) {
names[i] = props.get(i).getColumnName();
}
return names;
}
private static int [] computeColumnWidths(List<TableEntry> props) {
int [] width = new int [props.size()];
for(int i = 0; i < props.size(); i++) {
width[i] = props.get(i).getColumnWidth();
}
return width;
}
public void setValueAt(Object value, int rowIndex, int columnIndex) {
// !PW TODO implement this if supporting inline editing of table values.
// SecurityRoleRef securityRoleRef = ejb.getSecurityRoleRef(rowIndex);
// switch (columnIndex) {
// case 0:
// securityRoleRef.setRoleName((String) value);
// break;
// case 1:
// securityRoleRef.setRoleLink((String) value);
// break;
// case 2:
// securityRoleRef.setDescription((String) value);
// break;
// }
// modelUpdatedFromUI();
// fireTableCellUpdated(rowIndex, columnIndex);
}
public int getRowCount() {
// CommonDDBean [] children = (CommonDDBean[]) parentBean.getValues(parentPropertyName);
// return children != null ? children.length : 0;
return parentBean != null ? parentBean.size(parentPropertyName) : 0;
}
public Object getValueAt(int rowIndex, int columnIndex) {
Object result = null;
Object row = parentBean.getValue(parentPropertyName, rowIndex);
if(row instanceof CommonDDBean) {
TableEntry columnEntry = properties.get(columnIndex);
result = columnEntry.getEntry((CommonDDBean) row);
}
return result;
}
public int addRow() {
// if (assemblyDesc == null) {
// assemblyDesc = getAssemblyDesc();
// }
//
// final SecurityRolePanel dialogPanel = new SecurityRolePanel();
// final String currentRoleName = null;
//
// EditDialog dialog = new EditDialog(dialogPanel, NbBundle.getMessage(EjbRefsTablePanel.class,"TTL_SecurityRole"), true) {
// protected String validate() {
// String name = dialogPanel.getRoleName().trim();
//
// if (name.length()==0) {
// return NbBundle.getMessage(SecurityRoleTablePanel.class,"TXT_EmptySecurityRoleName");
// } else {
// SecurityRole[] roles = assemblyDesc.getSecurityRole();
// boolean exists=false;
//
// for (int i = 0; i < roles.length; i++) {
// if (name.equals(roles[i].getRoleName())){
// return NbBundle.getMessage(SecurityRoleTablePanel.class,"TXT_SecurityRoleNameExists",name);
// }
// }
// }
//
// return null;
// }
// };
// dialog.setValid(false);
// javax.swing.event.DocumentListener docListener = new EditDialog.DocListener(dialog);
// dialogPanel.getRoleNameTF().getDocument().addDocumentListener(docListener);
// dialogPanel.getDescriptionTA().getDocument().addDocumentListener(docListener);
//
// java.awt.Dialog d = org.openide.DialogDisplayer.getDefault().createDialog(dialog);
// d.setVisible(true);
//
// dialogPanel.getRoleNameTF().getDocument().removeDocumentListener(docListener);
// dialogPanel.getDescriptionTA().getDocument().removeDocumentListener(docListener);
//
// if (dialog.getValue().equals(EditDialog.OK_OPTION)) {
// SecurityRole role = assemblyDesc.newSecurityRole();
// role.setRoleName(dialogPanel.getRoleName());
// role.setDescription(dialogPanel.getDescription());
// assemblyDesc.addSecurityRole(role);
// modelUpdatedFromUI();
// }
CommonDDBean param = beanFactory.newInstance(parentBean);
parentBean.addValue(parentPropertyName, param);
modelUpdatedFromUI();
return getRowCount() - 1;
}
public void removeRow(int row) {
parentBean.removeValue(parentPropertyName, row);
modelUpdatedFromUI();
}
private GenericTableDialogPanelAccessor internalGetDialogPanel() {
GenericTableDialogPanelAccessor subPanel = null;
// try {
// subPanel = (GenericTableDialogPanelAccessor) entryPanelClass.newInstance();
// subPanel.init(getAppServerVersion(),
// GenericTablePanel.this.getWidth()*3/4, properties, extraData);
//
//// TODO accessibility for popup panels. (help context as well?)
//// ((JPanel) subPanel).getAccessibleContext().setAccessibleName(
//// resourceBundle.getString("ACSN_POPUP_" + resourceBase)); // NOI18N
//// ((JPanel) subPanel).getAccessibleContext().setAccessibleDescription(
//// resourceBundle.getString("ACSD_POPUP_" + resourceBase)); // NOI18N
// } catch(Exception ex) {
// // Coding error if we get here.
// ErrorManager.getDefault().notify(ErrorManager.INFORMATIONAL, ex);
// }
return subPanel;
}
/** New interface added for migration to sun-* DD API model. If the backing
* model stores the properties in a parent property, then this is the factory
* for creating instances of the parent to store each row, as added by the
* user.
*/
public interface ParentPropertyFactory {
/* Implement this method to return a new blank instance of the correct
* bean type, e.g. WebserviceEndpoint, etc.
*
* Glorified function pointer really, guess we do need closures :o
*/
// public CommonDDBean newParentProperty(ASDDVersion asVersion);
public CommonDDBean newInstance(CommonDDBean parent);
}
} |
<gh_stars>0
import unittest
from app.models import User,Article
from app import db
class BlogTest(unittest.TestCase):
def setUp(self):
'''
Sets up the before all tests
'''
self.user_admin = User(username='admin',password_hash='<PASSWORD>',email='<EMAIL>')
self.new_article = Article(article='article1',category='technology',user_id=self.user_admin)
def tearDown(self):
'''
deletes test data tests after every test
'''
User.query.delete()
Article.query.delete()
def test_check_instance_variables(self):
'''
test the instances
'''
self.assertEquals(self.new_article.article,'article1')
self.assertEquals(self.new_article.category,'technology')
self.assertEquals(self.new_article.user_id,self.user_admin)
def test_save_article(self):
'''
test saving in the db
'''
self.new_article.save_article()
self.assertTrue(len(Review.query.all())>0)
def test_get_article_by_id(self):
'''
tests getting article by id
'''
self.new_article.save_article()
got_article = Article.query.get(1)
self.assertTrue(len(got_article)==1)
|
/**
* Return the column position within the given columns.
*
* @param columns the list of columns to search the column from.
* @param colId the wanted column id.
* @return the column position within the given columns.
*/
private int findColumnPosition(List<ColumnMetadata> columns, String colId) {
for (int i = 0; i < columns.size(); i++) {
if (columns.get(i).getId().equals(colId)) {
return i;
}
}
return columns.size();
} |
<filename>src/main/java/com/rbc/shopping/repository/OrderRepository.java
package com.rbc.shopping.repository;
import com.rbc.shopping.entity.Orders;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.stereotype.Repository;
import java.util.List;
/**
* Repository class for ORDERS table.
*
* @author SARA
*/
@Repository
public interface OrderRepository extends JpaRepository<Orders, Long> {
/**
* Finds and returns orders list based on User id.
*
* @param userId long representing User Id.
* @return List of Orders.
*/
@Query(value = "SELECT * FROM ORDERS WHERE user_id = ?1", nativeQuery = true)
List<Orders> findOrdersByUserId(Long userId);
/**
* Finds and returns orders list for all users except supplied User id.
*
* @param userId long representing user id.
* @return List of Orders.
*/
@Query(value = "SELECT * FROM ORDERS WHERE user_id <> ?1", nativeQuery = true)
List<Orders> findOrdersByOtherUsers(Long userId);
}
|
/*! Progress Bar just writes the given message and flushes */
class NoProgressBar : public ProgressIndicator {
public:
NoProgressBar(const std::string& message, const unsigned int messageWidth = 40);
void updateProgress(const unsigned long progress, const unsigned long total) override {}
void reset() override {}
} |
<filename>main.go
package main
import (
"log"
"time"
)
func main() {
start := time.Now()
log.Println("Start")
log.Println("App", pVersion)
doIt()
statsDmp()
memDmp()
elapsed := time.Since(start)
log.Println("Takes about: ", elapsed)
log.Println("Done")
}
|
import { Controller, Get, Post, Res, Body, UseGuards,UseInterceptors, CacheInterceptor, CacheKey,CacheTTL } from '@nestjs/common';
import { Param } from '../../common/decorators/param.decorator';
import { UserService } from './user.service';
import { CreateUserDto } from '../../pojo/dto/user.create.dto';
import { ApiTags, ApiBearerAuth } from '@nestjs/swagger';
import { LocalAuthGuard } from '../../aop/guards/local-auth.guard';
import { JwtAuthGuard } from '../../aop/guards/jwt-auth.guard';
import { databaseProviders } from '../db/database.providers';
@ApiBearerAuth() // Swagger 的 JWT 验证
@ApiTags('user')
@Controller('/user')
@UseInterceptors(CacheInterceptor)
export class UserController {
constructor(
private readonly userService: UserService,
) {}
@Get('/configData')
async configData(){
console.log('接口调用');
const data = await this.userService.configData();
return data;
}
@Get('/logTest')
async logTest(){
console.log('接口调用');
const data = await this.userService.logTest();
return data;
}
@UseGuards(LocalAuthGuard)
@Get('/one')
async getOne(){
const data = await this.userService.testTypeOrm();
return data;
}
@Post('/test')
async create(@Param() createUserDto: CreateUserDto){
return createUserDto;
}
@UseGuards(JwtAuthGuard)
@Get('profile')
getProfile() {
return 'xxx';
}
} |
n,x,t=input().split()
n=int(n)
x=int(x)
t=int(t)
i=1
for i in range(n*2):
if n-x*i<=0:
break
else:
i+=1
print(t*i) |
def tracking_hybrid(self):
if self.tracking_stage == 0:
self.cur_data['pose'] = SE3(self.gt_poses[self.cur_data['id']])
self.tracking_stage = 1
return
elif self.tracking_stage >= 1:
start_time = time()
cur_data, ref_data = self.deep_flow_forward(
self.cur_data,
self.ref_data,
forward_backward=self.cfg.deep_flow.forward_backward)
self.timers.timers['Flow-CNN'].append(time()-start_time)
for ref_id in self.ref_data['id']:
hybrid_pose = SE3()
E_pose, _ = self.compute_pose_2d2d(
cur_data['kp_best'],
ref_data['kp_best'][ref_id])
hybrid_pose.R = E_pose.R
if np.linalg.norm(E_pose.t) != 0:
scale = self.find_scale_from_depth(
cur_data['kp_best'], ref_data['kp_best'][ref_id],
E_pose.inv_pose, self.cur_data['depth']
)
if scale != -1:
hybrid_pose.t = E_pose.t * scale
if np.linalg.norm(E_pose.t) == 0 or scale == -1:
pnp_pose, _, _ \
= self.compute_pose_3d2d(
cur_data['kp_best'],
ref_data['kp_best'][ref_id],
ref_data['depth'][ref_id]
)
hybrid_pose = pnp_pose
self.tracking_mode = "PnP"
ref_data['pose'][ref_id] = copy.deepcopy(hybrid_pose)
self.ref_data = copy.deepcopy(ref_data)
self.cur_data = copy.deepcopy(cur_data)
self.ref_data['kp'] = copy.deepcopy(ref_data['kp_best'])
self.cur_data['kp'] = copy.deepcopy(cur_data['kp_best'])
pose = self.ref_data['pose'][self.ref_data['id'][-1]]
self.update_global_pose(pose, 1)
self.tracking_stage += 1
del(ref_data)
del(cur_data) |
def f(x):
r=0
while x:r+=x;x//=10
return r
for s in[*open(0)][1:]:l,r=map(int,s.split());print(f(r)-f(l)) |
<filename>containers/index.ts<gh_stars>0
export { default as ProductCard } from "./ProductCard/ProductCard";
export { default as CartItem } from "./CartItem/CartItem";
export { default as CartSummary } from "./CartSummary/CartSummary";
export { default as CardPayment } from "./CardPayment/CardPayment";
|
/**
* Overriten method responsible for drawing the view
*/
@Override
protected void paintComponent(Graphics g) {
super.paintComponent(g);
for(var view : views)
{
view.draw(g);
}
Toolkit.getDefaultToolkit().sync();
} |
Closed Beta
Your unique download code:
Note: Do not share this key or you may not be able to play.
This Promotion Code is case sensitive and must be entered exactly as displayed.
*Start date and terms subject to change without notice. If full game is not yet released at time of purchase, entitlement will allow for beta access until commercial release.
For assistance, go to www.us.playstation.com/support or contact SCE Consumer Service at 1-800-345-7669.
This voucher ("Voucher") is good for one PSN account to redeem the entitlement to download the beta version of the DUST 514™ game for the PlayStation®3 system via the PlayStation®Store. Voucher expires 7.17.2012.
Voucher can only be redeemed through PSN Account Management. The approximate value of the voucher is $0.00 USD. Voucher cannot be redeemed for cash, cannot be returned for cash or credit, or any other purpose. Voucher will not be replaced if lost, destroyed, or stolen. User responsible for all applicable internet fees.
User of the PlayStation®Network is subject to the PlayStation Network Terms of Service and User Agreement and applicable privacy policy, see www.us.playstation.com/support/useragreements. Children under 18 must have parental consent to establish a PlayStation Network Account and may not be able to access certain content or services. Broadband service required for download; user responsible for associated fees. PlayStation Network features and offerings may change without notice.
"PlayStation", the "PS" Family logo and "PS3" are registered trademarks and the PlayStation Network logo is a trademark of Sony Computer Entertainment Inc.
©2012 CCP hf. All rights reserved. "CCP", "DUST 514", "EVE" and "EVE Online" are trademarks or registered trademarks of CCP hf in the United States and other jurisdictions. The ESRB ratings icon is a registered trademark of the Entertainment Software Association. |
In linguistics, an eggcorn is an idiosyncratic substitution of a word or phrase for a word or words that sound similar or identical in the speaker's dialect (sometimes called oronyms). The new phrase introduces a meaning that is different from the original but plausible in the same context, such as "old-timers' disease" for "Alzheimer's disease".[1] An eggcorn can be described as an intra-lingual phono-semantic matching, a matching in which the intended word and substitute are from the same language.
Etymology [ edit ]
The term eggcorn was coined by professor of linguistics Geoffrey Pullum in September 2003 in response to an article by Mark Liberman on the website Language Log, a blog for linguists.[2] Liberman discussed the case of a woman who substitutes the phrase egg corn for the word acorn, and argued that the precise phenomenon lacked a name. Pullum suggested using "eggcorn" itself as a label.
Similar phenomena [ edit ]
An eggcorn differs from a malapropism, the latter being a substitution that creates a nonsensical phrase. Classical malapropisms generally derive their comic effect from the fault of the user, while eggcorns are substitutions that exhibit creativity, logic[3][scientific citation needed] or ignorance. Eggcorns often involve replacing an unfamiliar, archaic, or obscure word with a more common or modern word ("baited breath" for "bated breath").[4]
The phenomenon is similar to the form of wordplay known as the pun except that, by definition, the speaker or writer intends the pun to have some humorous effect on the recipient, whereas one who speaks or writes an eggcorn is unaware.[5]
It is also similar to, but differs from, mondegreens or a folk etymology.[6]
Examples [ edit ]
See also [ edit ]
References [ edit ] |
/**
* Sets the flow tasks in the DB in status 'New' so they will be 'visible' to the producer thread/s and therefore
* will be added to the queue for execution.
* @param flowId to update in DB.
* @throws PersistenceException in case the update failed - may cause some of the tasks of a flow to be in status
* 'New' and others to remain in their old status.
*/
public void startFlow(int flowId) throws PersistenceException {
synchronized(producerLock) {
producerMayWork = false;
}
persistence.startFlow(flowId);
synchronized(producerLock) {
producerMayWork = true;
producerLock.notify();
}
} |
<filename>app/src/main/java/com/projects/marcoscavalcante/deloapp/Utils/BaseFragment.java
package com.projects.marcoscavalcante.deloapp.Utils;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.util.DisplayMetrics;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
public abstract class BaseFragment extends Fragment {
protected View mRootView;
protected abstract int getLayout();
private static final int NUM_COLUMNS = 2;
@Nullable
@Override
public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container,
@Nullable Bundle savedInstanceState) {
mRootView = inflater.inflate( getLayout(), container, false );
return mRootView;
}
protected int getNumberOfColumns() {
DisplayMetrics displayMetrics = new DisplayMetrics();
getActivity().getWindowManager().getDefaultDisplay().getMetrics(displayMetrics);
float scaleFactor = getResources().getDisplayMetrics().density * 100;
int number = displayMetrics.widthPixels;
int nColumns = (int) ( (float) number / scaleFactor ) / NUM_COLUMNS;
if (nColumns < NUM_COLUMNS) {
return NUM_COLUMNS; //to keep the grid aspect
}
return nColumns;
}
}
|
#ifndef CAN_BUS_DEFS_H
#define CAN_BUS_DEFS_H
#define REQUEST_STATE_TO_ALL 0x00000001 // Команда всем передать свое состояние
#define BCKPC_STATE_PACKET 0x00000060 // Ответ контроллера резервного питания.
#define BCKPC_CMD_PACKET 0x00000070 // Команда контроллеру резервного питания.
#define ALL_ENTER_TO_WRK_MOD 0x00001000 // Команда всем перейти в рабочий режим
#endif // CAN_BUS_DEFS_H
|
#ifndef SphereCollider_hpp
#define SphereCollider_hpp
#include "Collider.hpp"
namespace FishEngine
{
class FE_EXPORT SphereCollider : public Collider
{
public:
DefineComponent(SphereCollider);
SphereCollider() = default;
SphereCollider(const Vector3& center,
const float raduis);
virtual void OnDrawGizmosSelected() override;
private:
friend class FishEditor::Inspector;
Vector3 m_center{0, 0, 0};
float m_radius = 1;
virtual void CreatePhysicsShape() override;
};
}
#endif // SphereCollider_hpp
|
/*
* Copyright (c) 2016. <NAME>, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.taptrack.tcmptappy.application;
import android.app.Application;
import android.content.Context;
import com.taptrack.tcmptappy.dagger.ApplicationScope;
import com.taptrack.tcmptappy.domain.activetappiesstatus.ActiveTappiesStatusModule;
import com.taptrack.tcmptappy.domain.activetappiesstatus.ActiveTappiesStatusService;
import com.taptrack.tcmptappy.domain.contentprovider.ContentProviderModule;
import com.taptrack.tcmptappy.domain.contentprovider.TappyBleDemoProvider;
import com.taptrack.tcmptappy.domain.contentprovider.db.TappyBleDemoDbOpenHelper;
import com.taptrack.tcmptappy.domain.devicesearch.TappyBleDeviceSearch;
import com.taptrack.tcmptappy.domain.devicesearch.TappyBleDeviceSearchModule;
import com.taptrack.tcmptappy.domain.messagepersistence.MessagePersistenceModule;
import com.taptrack.tcmptappy.domain.messagepersistence.TCMPMessagePersistenceService;
import com.taptrack.tcmptappy.domain.preferencepersistence.AppPreferenceModule;
import com.taptrack.tcmptappy.domain.preferencepersistence.AppPreferenceService;
import com.taptrack.tcmptappy.domain.tappycommunication.TappyCommunicationModule;
import com.taptrack.tcmptappy.domain.tappycommunication.TappyStatusService;
import com.taptrack.tcmptappy.domain.tappycommunication.backgroundservices.TappyManagementBgService;
import com.taptrack.tcmptappy.domain.tappypersistence.ActiveTappiesService;
import com.taptrack.tcmptappy.domain.tappypersistence.SavedTappiesService;
import com.taptrack.tcmptappy.domain.tappypersistence.TappyPersistenceModule;
import com.taptrack.tcmptappy.tcmp.common.CommandFamilyMessageResolver;
import com.taptrack.tcmptappy.ui.activities.interacttappy.InteractTappyActivityBollard;
import com.taptrack.tcmptappy.ui.activities.searchfortappies.SearchTappiesActivityBollard;
import com.taptrack.tcmptappy.ui.modules.mainnavigationbar.MainNavigationPresenter;
import com.taptrack.tcmptappy.ui.modules.mainnavigationbar.MainNavigationUiModule;
import com.taptrack.tcmptappy.ui.modules.sendtcmpmessage.SendTcmpMessagePresenter;
import com.taptrack.tcmptappy.ui.modules.sendtcmpmessage.SendTcmpMessageUiModule;
import com.taptrack.tcmptappy.ui.modules.tappyblesearcher.TappyBleDeviceSearchPresenter;
import com.taptrack.tcmptappy.ui.modules.tappyblesearcher.TappyBleSearchUiModule;
import com.taptrack.tcmptappy.ui.modules.tcmpmessagelist.DisplayTcmpMessagePresenter;
import com.taptrack.tcmptappy.ui.modules.tcmpmessagelist.TcmpMessageUiModule;
import javax.inject.Named;
import dagger.Component;
@ApplicationScope
@Component(modules = {
AppModule.class,
TappyBleDeviceSearchModule.class,
TappyPersistenceModule.class,
ContentProviderModule.class,
MainNavigationUiModule.class,
TappyBleSearchUiModule.class,
AppPreferenceModule.class,
TcmpMessageUiModule.class,
MessagePersistenceModule.class,
SendTcmpMessageUiModule.class,
TappyCommunicationModule.class,
ActiveTappiesStatusModule.class
})
public interface AppComponent {
// appliation
Application provideApplication();
@Named(AppModule.NAME_APP_CONTEXT)
Context provideAppContext();
// domain helpers
TappyBleDemoDbOpenHelper provideDbHelper();
CommandFamilyMessageResolver getCommandFamilyMessageResolver();
TappyManagerBinderProvider provideTappyManagerBinderProvider(); //dont ask
// domain services
TappyBleDeviceSearch getBleDeviceSearch();
SavedTappiesService getSavedTappiesService();
ActiveTappiesService getActiveTappiesService();
AppPreferenceService getTappyDemoPreferenceService();
TCMPMessagePersistenceService getMessagePersistenceService();
ActiveTappiesStatusService getActiveTappyStatusService();
TappyStatusService getTappyStatusService();
// presenters
TappyBleDeviceSearchPresenter getDeviceSearchPresenter();
MainNavigationPresenter getNavigationPresenter();
DisplayTcmpMessagePresenter getMessagePresenter();
SendTcmpMessagePresenter getSendPresenter();
// injection targets
// system-level targets
void inject(TappyBleDemoProvider provider);
void inject(TappyManagementBgService service);
// bollards
void inject (SearchTappiesActivityBollard bollard);
void inject(InteractTappyActivityBollard bollard);
}
|
<reponame>ZborovskiyKyryl/trello_Kyryl
package com.trello.qa.manadger;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.WebDriverWait;
public class BoardHelper extends HelperBase{
public BoardHelper(WebDriver driver) {
super(driver);
}
public void fillBoardCreationForm(BoardData board) {
type(By.cssSelector("[data-test-id='header-create-board-title-input']"), board.getBoardName());
if (isElementPresent(By.cssSelector(".W6rMLOx8U0MrPx"))) {
click(By.cssSelector(".W6rMLOx8U0MrPx"));
click(By.xpath("//nav[@class='SdlcRrTVPA8Y3K']//li[1]"));//no team
}
}
public void changeBoard(String Boardname) {
int beforeCreation = getPersnalBoardsCount();
if(beforeCreation == 0){
createBoard();
}
clickOnFirstPrivateBoard();
click(By.xpath("//*[@class='js-board-editing-target board-header-btn-text']"));
Onlytype(By.xpath("//input[@class='board-name-input js-board-name-input']"), Boardname);
returnToHomePage();
//
}
public String getBoardNameFromBoardPage() {
return driver.findElement(By.xpath("//*[@class='js-board-editing-target board-header-btn-text']")).getText();
}
public void selectCreateBoardFromDropDown() {
click(By.cssSelector("[data-test-id='header-create-board-button']"));
}
public void confirmBoardCreation() {
waitForElementAndClick(By.cssSelector("[data-test-id='header-create-board-submit-button']"), 20);
}
public int getPersnalBoardsCount() {
return driver.findElements(By.xpath("//*[@class='icon-lg icon-member']/../../..//li")).size() - 1;
}
public void createBoard() {
clickOnPlusButtonOnHeader();
selectCreateBoardFromDropDown();
//fillBoardCreationForm("qa21", "descr qa 21");
confirmBoardCreation();
returnToHomePage();
}
public void clickOnMoreButtonInBoardMenu() {
WebElement menuButton = driver.findElement(By.cssSelector(".board-header-btn.mod-show-menu"));
System.out.println(menuButton.getCssValue("visibility"));
if (menuButton.getCssValue("visibility").equals("visible")) {
click(By.cssSelector(".mod-show-menu"));
click(By.cssSelector(".js-open-more"));
} else {
click(By.cssSelector(".js-open-more"));
}
}
public void clickOnFirstPrivateBoard() {
click(By.xpath("//*[@class='icon-lg icon-member']/../../..//li"));
}
public void initBoardDeletion() {
clickCloseBoardButton();
confirmCloseButton();
}
public void confirmCloseButton() {
click(By.cssSelector(".js-confirm.full.negate"));
}
private void clickCloseBoardButton() {
click(By.cssSelector(".board-menu-navigation-item-link.js-close-board"));
}
public void confirmBoardDeletion() {
new WebDriverWait(driver, 10)
.until(ExpectedConditions.elementToBeClickable(By.cssSelector(".js-delete")));
click(By.cssSelector(".js-delete"));
}
public void confirmFinishBoardDeletion() {
click(By.cssSelector(".js-confirm.full"));
}
}
|
//-- HelloRenderer.cpp ----------------------------------------------------------
//
// Copyright (C) 2015
// University Corporation for Atmospheric Research
// All Rights Reserved
//
//----------------------------------------------------------------------------
//
// File: HelloRenderer.cpp
//
// Author: <NAME>
//
// Description: Implementation of HelloRenderer class
//
//----------------------------------------------------------------------------
#include <vapor/glutil.h> // Must be included first!!!
#include <cstdlib>
#include <cstdio>
#include <cstring>
#include <cfloat>
#ifndef WIN32
#include <unistd.h>
#endif
#include <vapor/ParamsMgr.h>
#include <vapor/DataMgrUtils.h>
#include <vapor/HelloRenderer.h>
#define INCLUDE_DEPRECATED_LEGACY_VECTOR_MATH
#include <vapor/LegacyVectorMath.h>
using namespace VAPoR;
using namespace Wasp;
//
// Register class with object factory!!!
//
static RendererRegistrar<HelloRenderer> registrar(HelloRenderer::GetClassType(), HelloParams::GetClassType());
//----------------------------------------------------------------------------
//
//----------------------------------------------------------------------------
HelloRenderer::HelloRenderer(const ParamsMgr *pm, string winName, string dataSetName, string instName, DataMgr *dataMgr)
: Renderer(pm, winName, dataSetName, HelloParams::GetClassType(), HelloRenderer::GetClassType(), instName, dataMgr)
{
}
//----------------------------------------------------------------------------
//
//----------------------------------------------------------------------------
HelloRenderer::~HelloRenderer() {}
int HelloRenderer::_initializeGL() { return (0); }
int HelloRenderer::_paintGL(bool)
{
HelloParams *rParams = (HelloParams *)GetActiveParams();
// Next we need to get a Grid for the data we are rendering.
Grid *helloGrid;
// To obtain the Grid, we need the refinement level, variable, LOD, and extents:
int actualRefLevel = rParams->GetRefinementLevel();
int lod = rParams->GetCompressionLevel();
// Get the variable name
string varname = rParams->GetVariableName();
// Determine the full vdc extents, in order to render
// in local user coordinates.
// Determine the data extents.
// The extents of data needed are determined by the end points.
// Get the end points from the Params:
vector<double> point1 = rParams->GetPoint1();
vector<double> point2 = rParams->GetPoint2();
VAssert(point1.size() == point2.size());
VAssert(point1.size() >= 2 && point1.size() <= 3);
cout << "helloParams point: " << endl;
cout << "point1: " << point1[0] << " " << point1[1] << " " << point1[2] << endl;
cout << "point2: " << point2[0] << " " << point2[1] << " " << point2[2] << endl;
// Finally, obtain the Grid of the data for the specified region, at requested refinement and lod,
// using Renderer::getGrids()
size_t timestep = rParams->GetCurrentTimestep();
int rc = DataMgrUtils::GetGrids(_dataMgr, timestep, varname, point1, point2, true, &actualRefLevel, &lod, &helloGrid);
if (rc < 0) { return rc; }
// Set the grid to use nearest-point interpolation, to calculate actual (uninterpolated) data max and min
helloGrid->SetInterpolationOrder(0);
// In order to sample the data at the user-specified refinement level, need to determine the number of voxels
// that the line crosses, which requires knowing the underlying grid.
//
size_t nsamples = 100;
// nsamples is the number of samples along the line.
// Divide the line into maxvox equal sections, sample the variable at each point along the line, to find
// coordinates of min and max value
double maxval = -DBL_MAX;
double minval = DBL_MAX;
vector<double> minPoint, maxPoint;
for (int i = 0; i < nsamples; i++) {
vector<double> coord;
for (int j = 0; j < point1.size(); j++) { coord.push_back(point1[j] + i * (point2[j] - point1[j]) / (double)(nsamples - 1)); }
double sampledVal = helloGrid->GetValue(coord);
if (sampledVal == helloGrid->GetMissingValue()) continue;
if (minval > sampledVal) {
minval = sampledVal;
minPoint = coord;
}
if (maxval < sampledVal) {
maxval = sampledVal;
maxPoint = coord;
}
}
// Obtain the line width
float width = (float)rParams->GetLineThickness();
// Set up lighting and color. We will use the lighting settings from the viewpoint params for rendering the lines,
// but lighting will be disabled for rendering the max and min points.
ViewpointParams *vpParams = _paramsMgr->GetViewpointParams(_winName);
int nLights = vpParams->getNumLights();
float fcolor[3];
rParams->GetConstantColor(fcolor);
if (nLights == 0) {
glDisable(GL_LIGHTING);
} else {
glShadeModel(GL_SMOOTH);
glMaterialfv(GL_FRONT_AND_BACK, GL_AMBIENT_AND_DIFFUSE, fcolor);
glMaterialf(GL_FRONT_AND_BACK, GL_SHININESS, vpParams->getExponent());
// The line geometry will get a white specular color:
float specColor[4];
specColor[0] = specColor[1] = specColor[2] = 0.8f;
specColor[3] = 1.f;
glMaterialfv(GL_FRONT_AND_BACK, GL_SPECULAR, specColor);
glEnable(GL_LIGHTING);
glEnable(GL_COLOR_MATERIAL);
}
glColor3fv(fcolor);
// glLineWidth(width);
if (point1.size() != 3) {
point1.push_back(0.0);
point2.push_back(0.0);
minPoint.push_back(0.0);
maxPoint.push_back(0.0);
}
// Calculate the normal vector as orthogonal to the line and projected to the viewer direction
// To do this, take the cross product of the line direction with the viewer direction,
// And then cross the result with the line direction.
// Find the direction vector along the line and the camera direction
double m[16];
vpParams->GetModelViewMatrix(m);
double posvec[3], upvec[3], dirvec[3];
bool status = vpParams->ReconstructCamera(m, posvec, upvec, dirvec);
if (!status) {
SetErrMsg("Failed to get camera parameters");
return (-1);
}
double lineDir[3], vdir[3], cross[3], normvec[3];
for (int i = 0; i < 3; i++) {
lineDir[i] = point2[i] - point1[i];
vdir[i] = dirvec[i];
}
float len = vlength(lineDir);
if (len == 0.f) len = 1.;
vscale(lineDir, 1. / len);
vcross(vdir, lineDir, cross);
len = vlength(cross);
if (len == 0.f) len = 1.;
vscale(cross, 1. / len);
vcross(cross, lineDir, normvec);
len = vlength(normvec);
if (len == 0.f) len = 1.;
vscale(normvec, 1. / len);
// Now render the line
// translate to as to render in local user coordinates
//
glBegin(GL_LINES);
glNormal3dv(normvec);
glVertex3d(point1[0], point1[1], point1[2]);
glNormal3dv(normvec);
glVertex3d(point2[0], point2[1], point2[2]);
glEnd();
// Then render the Max and Min points:
glDisable(GL_LIGHTING);
glPointSize(4. * width);
// Max will be white
glColor3f(1.f, 1.f, 1.f);
glBegin(GL_POINTS);
glVertex3d(maxPoint[0], maxPoint[1], maxPoint[2]);
glEnd();
// Set min point to be yellow
glColor3f(1.f, 1.f, 0.f);
glBegin(GL_POINTS);
glVertex3d(minPoint[0], minPoint[1], minPoint[2]);
glEnd();
return 0;
}
|
a,b = map(list, raw_input().split('|'))
s = list(raw_input())
while s:
e = s.pop()
if len(a) >= len(b):
b.append(e)
else:
a.append(e)
if len(a) == len(b):
print '%s|%s' % (''.join(a), ''.join(b))
else:
print 'Impossible'
|
// Returns the current position in the given file, or -1 on error. errno is set
// to indicate the error.
//
// See https://liballeg.org/a5docs/5.2.6/file.html#al_ftell
func (f *File) Tell() (int64, error) {
pos := int64(C.al_ftell((*C.ALLEGRO_FILE)(f)))
if pos == -1 {
return 0, LastError()
}
return pos, nil
} |
/**
* The attributes of a vertex are
* its height and the array of faces surrounding that vertex.
* <p>
* There is a crude parallelism between the Vertex and Face classes, corresponding
* to the duality of planar maps that exchanges vertices and faces.
*/
public class Vertex {
private final int height;
private final Face[] incidentFace;
private Vertex(int height, int size) {
this.height = height;
incidentFace = new Face[size];
}
/**
* This is the only public constructor. When constructed, a vertex bounds
* only two faces. Vertices with higher valence can only be created through
* faceDivision.
* <p>
* The height of the constructed vertex is 0.
* <p>
* @param F1 one bounding face around the vertex
* @param F2 the other bounding face around the vertex.
*/
Vertex(Face F1, Face F2, int height) {
this.height = height;
incidentFace = new Face[] {
F1, F2
};
incidentFace[0] = F1;
incidentFace[1] = F2;
}
/**
* Make a deep clone of a vertex from another graph.
* This method is involved.
* Starting with a graph create a hashMap whose keys are the faces (faceDict),
* and a hashMap whose keys are the vertices (vertexDict). Initialize so
* that the keys point to null. As vertices and faces of the clone are created
* the HashMap objects are updated with the new clone associations.
* <p>
* Vertices, Faces, and Graphs are immutable, so once a clone of one vertex
* is started, the clone spreads through the entire Graph.
* <p>
* This method is deeply recursive and is tightly entertwined with Face.deepClone().
* <p>
* If reverse=true, the clone is a mirror image of the original, otherwise exact clone.
* @param faceDict HashMap containing bindings of old faces to cloned faces
* @param vertexDict HashMap containing bindings of od vertices to cloned vertices
* @param reverse boolean specifying whether the clone should be a mirror image
* <p>
*/
Vertex deepClone(HashMap faceDict, HashMap vertexDict, boolean reverse) {
if(vertexDict.get(this) != null)
util.Eiffel.error("each vertex should be cloned only once");
Vertex V = new Vertex(this.getHeight(), this.size());
vertexDict.put(this, V);
Face F = this.getAny();
Face newF = null;
for(int i = 0;i < this.size();i++) {
F = incidentFace[i];
newF = (Face)faceDict.get(F);
if(newF == null)
newF = F.deepClone(faceDict, vertexDict, reverse); // recursive;
if(reverse)
V.incidentFace[this.size() - i - 1] = newF;
else
V.incidentFace[i] = newF;
}
return V;
}
/**
* Clone a vertex as a descendent vertex in a face division.
* <p>
* One face is chosen that is split into two faces.
* The process of dividing a face is called faceDivision
* <p>
* See mitosis.gif
* <p>
*
* @ram1 The first ramification vertex
* @ram2 The second ramification vertex
* @oldF The face undergoing Division.
* @face12 The face resulting from Division from ram1 to ram2 clockwise
* @face21 The face resulting from Division from ram2 to ram1 clockwise around oldF
* @faceDict HashMap associating old faces with their descendents
* @vertexDict HashMap associating old vertices with their descendents.
* contract precondition: ram1 and ram2 are Vertices of oldF.
* contract precondition: all arguments are non-null.
*/
Vertex faceDivision(Vertex ram1, Vertex ram2, Face oldF, Face face12, Face face21, HashMap faceDict, HashMap vertexDict) {
// Early exit if vertex has been processed already.
Vertex V = (Vertex)vertexDict.get(this);
if(V != null)
return V;
int distance = oldF.directedLength(ram1, ram2);
if(distance < 0)
util.Eiffel.error("ram1 and ram2 should lie on oldF");
if(face12.isFinal())
util.Eiffel.error("face12 should not be final");
int distanceThis = oldF.directedLength(ram1, this);
/* place new vertex in dictionary, an extra face occurs at ram1, ram2 */{
int extra = 0;
if(this.equals(ram1) || this.equals(ram2))
extra++;
V = new Vertex(this.getHeight(), this.size() + extra);
vertexDict.put(this, V);
}
/* Loop through incidentFace and add corresponding Face to V
Special cases occur if the Face is oldF */
Face F;
Face newF;
int offset = 0;
for(int i = 0;i < this.size();i++) {
F = incidentFace[i];
// CASE O: ram1
if(F.equals(oldF) && distanceThis == 0) { //counterclockwise here for vertices.
V.incidentFace[i + offset] = face21;
offset++;
V.incidentFace[i + offset] = face12;
}
// CASE 1: face12
else
if(F.equals(oldF) && distanceThis < distance)
V.incidentFace[i + offset] = face12;
// CASE 2: ram2
else
if(F.equals(oldF) && distanceThis == distance) { //counterclockwise
V.incidentFace[i + offset] = face12;
offset++;
V.incidentFace[i + offset] = face21;
}
// CASE 3: face21
else
if(F.equals(oldF))
V.incidentFace[i + offset] = face21;
// CASE 4: default: F!= oldF
else {
newF = (Face)faceDict.get(F);
if(newF == null)
newF = F.faceDivision(ram1, ram2, oldF, face12, face21, faceDict, vertexDict); // recursive;
V.incidentFace[i + offset] = newF;
}
// ESAC
}
return V;
}
/**
* Count the number of faces at the vertex that have between
* minGon and maxGon faces (inclusive). NonFinal faces are
* excluded.
* @param minGon int giving the minimum number of vertices for a face to be included in
* the count
* @param maxGon int giving the maximum number of vertices for a face to be included in
* the count
* <p>
* Example:
* faceCount(3,3) number of nonFinal triangles around a vertex.
* faceCount(4,4) number of nonFinal quadrilaterals around a vertex.
* faceCount(5,Integer.MAX_VALUE) number of nonFinal exceptional faces around a vertex.
*/
public int faceCount(int minGon, int maxGon) {
int count = 0;
Face f;
for(int i = 0;i < incidentFace.length;i++) {
f = incidentFace[i];
if((f.isFinal()) && (f.size() >= minGon) && (f.size() <= maxGon))
count++;
}
return count;
}
/**
* nonFinalCount is the number of Faces around the vertex that are not final.
*/
public int nonFinalCount() {
int count = 0;
Face f;
for(int i = 0;i < incidentFace.length;i++) {
f = incidentFace[i];
if(!f.isFinal())
count++;
}
return count;
}
/**
* The height is a field that has only minor significance. It is
* used heuristically in the construction of new graphs from a
* given one. The height is non-negative integer that roughly measures how
* late in the construction that vertex was added. Higher heights occur
* in vertices that were constructed later in the game.
* <p>
* The height has no significance for final graphs.
*/
public int getHeight() {
return height;
}
/**
* The size of a vertex is the number of faces with that vertex.
*/
public int size() {
return incidentFace.length;
}
/**
* Find the count'th successor to Face f in the cyclic order around the
* vertex. If the Face f does not appear on the vertex, returns null.
*
* <p>
* Examples:
* next(f,0) = f; next(f,size())=f; next(f,1) = successor, ....
*
* <p>
* Face.next moves clockwise around the face, but Vertex.next moves counterclockwise.
* This means that if faces R and S share an edge E with terminal vertices x and y,
* and if y occurs clockwise around R from x,
* then S=x.next(R,1); R=y.next(S,1); x=S.next(y,1); y=R.next(x,1);
* See graphDoc.gif
*
* <p>
* @param f the Face used as a base point around the Vertex.
*
* @param count this many faces past f to locate the Face returned.
* The return value only depends on count mod this.size();
*/
public Face next(Face f, int count) {
int index = -1;
// a bit of optimized code, using the previously found index, if possible
if(cacheNextIndex >= 0 && cacheNextIndex < incidentFace.length && incidentFace[cacheNextIndex].equals(f))
index = cacheNextIndex;
else
for(int i = 0;i < incidentFace.length;i++) {
if(f.equals(incidentFace[i]))
index = i;
}
cacheNextIndex = index % incidentFace.length;
if(index < 0)
return null;
index = (index + count) % incidentFace.length;
if(index < 0)
index += incidentFace.length;
return incidentFace[index];
}
private int cacheNextIndex; // used for optimization in method "next"
/**
* getAny returns any face of the vertex. This, together with repeated
* calls to "next" will give an enumeration of all faces.
* <p>
* In the unlikely event that the vertex has no faces, a null is returned;
*/
public Face getAny() {
if(incidentFace.length == 0)
return null;
return incidentFace[0];
}
/**
* Used in constructor of graph from a Formatter object.
* @param faceIndex index of face in Formatter.vertexAtFace order
* @param vList list of vertices being built in new graph. Initialized to null.
* @param fList list of faces built in new graph.
* precondition: vList[vertexIndex]=null;
* postcondition: vList[vertexIndex]=this;
* postcondition: height = 0;
*/
Vertex(Formatter f, int vertexIndex, Vertex[] vList, Face[] fList) {
vList[vertexIndex] = this;
height = 0;
final int[] faceIndices = f.faceAtVertex(vertexIndex);
incidentFace = new Face[faceIndices.length];
for(int i = 0;i < faceIndices.length;i++) {
int j = faceIndices[i];
if(fList[j] == null)
new Face(f, j, vList, fList); // warning: side-effect makes fList[j]!=null.
incidentFace[i] = fList[j];
}
}
public static class Test extends util.UnitTest {
public void testVertex() {
}
}
} |
def _parse_ascii_armored_base64(data: str) -> bytes:
data = data.strip()
lines = (line for line in data.split('\n'))
header = next(lines).rstrip()
base64_data = ''
line = next(lines).rstrip()
while line and (line != EC_PRIVATE_FOOTER) and (line != EC_PUBLIC_FOOTER):
base64_data += line
line = next(lines).rstrip()
return a2b_base64(base64_data) |
/*
* The MIT License
*
* Copyright (c) 2020 Nefele <https://github.com/nefele-org>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.nefele.ui.wizard;
import com.jfoenix.controls.JFXButton;
import javafx.application.Platform;
import javafx.fxml.FXML;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.image.Image;
import javafx.stage.Modality;
import javafx.stage.Stage;
import javafx.stage.StageStyle;
import org.nefele.Application;
import org.nefele.cloud.DriveProvider;
import org.nefele.cloud.DriveProviders;
import org.nefele.cloud.providers.DropboxDriveProvider;
import org.nefele.cloud.providers.GoogleDriveProvider;
import org.nefele.cloud.providers.OfflineDriveProvider;
import org.nefele.core.Resources;
import org.nefele.ui.base.NefelePane;
import org.nefele.ui.scenes.cloudhelper.CloudHelper;
import org.nefele.ui.scenes.cloudhelper.CloudHelperItem;
import java.net.URL;
import java.util.ResourceBundle;
public class WizardPage3 extends WizardPage {
@FXML private JFXButton buttonAddCloud;
public WizardPage3(Parent wizardRoot) {
super(wizardRoot);
Resources.getFXML(this, "/fxml/wizard/WizardPage3.fxml");
}
@Override
public void initialize(URL location, ResourceBundle resources) {
buttonAddCloud.setOnMouseClicked(e ->
Platform.runLater(() -> {
NefelePane nefelePane = new NefelePane(new CloudHelper() {{
getHelperButtons().add(new CloudHelperItem(
OfflineDriveProvider.SERVICE_ID,
OfflineDriveProvider.SERVICE_DEFAULT_DESCRIPTION, "LAYERS", "DRIVE_OFFLINE_HINT"));
getHelperButtons().add(new CloudHelperItem(
GoogleDriveProvider.SERVICE_ID,
GoogleDriveProvider.SERVICE_DEFAULT_DESCRIPTION, "GOOGLE_DRIVE", "DRIVE_GOOGLE_DRIVE_HINT"));
getHelperButtons().add(new CloudHelperItem(
DropboxDriveProvider.SERVICE_ID,
DropboxDriveProvider.SERVICE_DEFAULT_DESCRIPTION, "DROPBOX", "DRIVE_DROPBOX_HINT"));
}});
nefelePane.setPrefWidth(600);
nefelePane.setModal(NefelePane.MODAL_DIALOG);
nefelePane.setShowDarkMode(false);
nefelePane.setShowLogo(true);
nefelePane.setShowStatusBar(false);
nefelePane.setResizable(false);
Stage stage = new Stage();
Scene scene = new Scene(nefelePane);
stage.setScene(scene);
stage.setTitle("Nefele");
stage.getIcons().add(new Image(Resources.getURL(this, "/images/trayicon.png").toExternalForm()));
stage.initModality(Modality.APPLICATION_MODAL);
stage.initStyle(StageStyle.UNDECORATED);
stage.setWidth(600);
stage.setHeight(400);
stage.showAndWait();
if(DriveProviders.getInstance().getDriveProviders().stream().anyMatch(i -> i.getStatus() == DriveProvider.STATUS_READY)) {
checkedProperty().setValue(true);
((Wizard) super.getWizardRoot()).getButtonForward().fire();
}
})
);
Application.getInstance().getViews().add(this);
}
}
|
/**
* @deprecated Define a {@link IWicketContextExecutor} bean in your application and use this bean instead of
* extending AbstractBackgroundWicketThreadContextBuilder.
*/
public abstract class AbstractBackgroundWicketThreadContextBuilder implements IContextualService {
@Autowired
private IWicketContextExecutor wicketContextExecutor;
protected abstract String getApplicationName();
@Override
public <T> T runWithContext(Callable<T> callable) throws Exception {
return wicketContextExecutor.runWithContext(callable);
}
protected <T> T runWithContext(Callable<T> callable, Locale locale) throws Exception {
return wicketContextExecutor.runWithContext(callable, locale);
}
} |
<filename>services/ui-src/src/measures/2022/Qualifiers/validationFunctions.ts
import { ACS, CCS, CCSC, CCSM, HHCS } from "./validations";
export const validationFunctions = {
ACS,
CCS,
CCSC,
CCSM,
HHCS,
};
|
/* The set_voice_position method should set the voice's playback position,
given the value in samples. This should never be called on a streaming
voice. */
static int _dsound_set_voice_position(ALLEGRO_VOICE *voice, unsigned int val)
{
ALLEGRO_DS_DATA *ex_data = (ALLEGRO_DS_DATA *)voice->extra;
HRESULT hr;
val *= ex_data->channels * (ex_data->bits_per_sample/8);
hr = ex_data->ds8_buffer->SetCurrentPosition(val);
if (FAILED(hr)) {
ALLEGRO_ERROR("SetCurrentPosition failed: %s\n", ds_get_error(hr));
return 1;
}
return 0;
} |
Some 5,000 activists in Berlin rallied in front of Reichstag and dug at least one hundred graves to express their solidarity with asylum seekers, who have died trying to reach Europe after fleeing war and persecution in their home countries.
‘Refugees are welcome here’: Thousands march in Berlin to support migrants, Greece
The rally was organized by the Center for Political Beauty; a group, which says it wants to “re-transform Europe into a continent of immigration.” The organization used the controversial slogan: "The Dead Are Coming,” while they have also made art works to spread their message.
Thousands in front of the German Bundestag dig symbolic graves to bring the migrant crisis home to EU policy makers pic.twitter.com/ESBtft8Q9V — Belal Awad|بلال عواد (@Baloo1987) June 21, 2015
The protest, which took place a day after UN World Refugee Day, gathered at least 5,000 people, according to police estimates. An officer told AFP that a "small number" of people were arrested for minor offences.
READ MORE: ‘Worst crisis since WWII’: Amnesty lashes out at world leaders over 50mn refugees
Some scuffles with police protesters now sitting in front of riot police on German Parliament lawn A photo posted by Lizzie Phelan (@lizzie_phelan) on Jun 21, 2015 at 7:57am PDT
The activists, many of them dressed in black, carried improvised coffins, which apparently symbolize the caskets of asylum seekers who have died while trying to make their way to Europe.
“Everyone can see that we are peaceful and that we want to change something - to stop the dying [of migrants] in the Mediterranean Sea,” one of protestors told Der Tagesspiegel newspaper.
5.500 Entschlossene heben gerade spontan Gräber vor dem Bundestag aus. Die Toten kommen!Posted by Zentrum für Politische Schönheit on dimanche 21 juin 2015
They also managed to dig about a hundred small ‘graves’ with wooden crosses inscribed with messages such as, "Borders Kill," “Stop Deportations,” "Fortresses Fall" and the “EU kills.”
READ MORE: ‘They are good guys’: Italian politician loses her job for sheltering African migrants
“They [the German authorities] must understand that the people dying in the Mediterranean Sea are just a part of our lives,” one of the demonstrators told RT.
Ruptly producer Belal Awad was pepper sprayed by police officers during the rally, RT’s video agency said on Twitter.
“I just popped my pepper spray cherry, I guess I was in their way,” Awad wrote on Facebook after the attack.
The European Union is struggling with the growing problems of migrants who are coming from war-stricken counties. The European Commission (EC) is planning to resettle about 40,000 people, which includes 24,000 from Italy and 16,000 from Greece in 23 EU member states over the next two years.
Ein von Gräbern übersäter Platz, unmittelbar vor dem Reichstag, ist schon ein starkes Bild. #dietotenkommenpic.twitter.com/BFOpt34ssV — oleschri (@oleschri) June 21, 2015
Earlier in June, Amnesty International said that governments around the world had effectively let thousands of people, who are fleeing wars in Africa and the Middle East, die by failing to provide them with basic human protection.
#DieTotenKommen successful solidarity rally in Berlin, acc to organizers more than 100 graves dug today #neverforgetpic.twitter.com/YyPHqYN04i — denise reese (@denice_ruptly) June 21, 2015
The total number of forcibly displaced people around the globe is now thought to be above 50 million.
March in Berlin for the #dietotenkommen. Against EU absence from the tragedies that affect the #Mediterranean daily. pic.twitter.com/w8e47liuWU — Ludo Orlando (@LudoOrlando) June 21, 2015
The human rights group called on the whole of Europe to share the burden of dealing with the refugee crisis, saying that Brussels has “pushed them back into the sea rather than resettle them.”
I was moved. What happend today speaks for itself. Thank you so much for this. #[email protected]/pXA6hs95bJ — Waagnat (@HouseOfRoughArt) June 21, 2015 |
The Wasted Fractions of Pequi Fruit are Rich Sources of Dietary Fibers and Phenolic Compounds
Considering the scarcity of studies on the nutrients and phenolic compounds in the wasted fractions of the pequi (Caryocar brasiliense Camb.) fruit processing, this study investigated the proximate composition, identified the phenolic compounds, and quantified the gallic and ellagic acids in the shell (peel and external mesocarp), and in the external mesocarp of pequi. The shell and the external mesocarp of the pequi fruit presented high concentrations of total dietary fibers, soluble fibers and phenolic compounds, mostly the freeze-dried pequi shell, which showed approximately 50% total dietary fibers, 20% soluble dietary fibers and 10% polyphenols, with remarkable antioxidant capacity. The phenolics identified in the pequi shell and external mesocarp were gallic acid, ellagic acid, and quercetin. In addition, protocatechuic acid, catechin, p-coumaric acid, and luteolin were identified for the first time in the pequi by-products. The freeze-dried pequi shell showed twice the gallic and ellagic acids concentrations compared to those of the external mesocarp. The wasted by-products of pequi, especially the pequi shell, are rich in healthy phytochemicals with the potential to be used by the food and pharmaceutical industries as ingredients in functional plant-based products or nutraceuticals. |
// AOJ 3065 How old are you
// 2019.9.30 bal4u
#include <stdio.h>
typedef long long ll;
int main()
{
int N, q, x;
ll A, B;
A = 0, B = 1;
scanf("%d", &N);
while (N--) {
scanf("%d%d", &q, &x);
if (q == 1) A *= x, B *= x;
else if (q == 2) A += x;
else A -= x;
}
printf("%lld %lld\n", -A, B);
return 0;
}
|
// Reselect the video / audio tracks.
private void reselectTracks() {
if (counter.add(System.currentTimeMillis())) {
Log.i(WebUtil.DEBUG, "reselectTracks ignored, too frequent for more than 9 calls duration one minute.");
return;
}
Log.i(WebUtil.DEBUG, "reselectTracks for group constraint");
callback.invalidate();
} |
//#pragma GCC optimize(2)
//#pragma GCC optimize(3)
#include <bits/stdc++.h>
#define int long long
#define mod 1000000007
//#define getchar() (p1==p2&&(p2=(p1=buf)+fread(buf,1,1<<22,stdin),p1 == p2)?EOF:*p1++)
using namespace std ;
//char buf[(1 << 22)] , *p1 = buf , *p2 = buf ;
inline int read ()
{
char c = getchar () ; int x = 0 , f = 1 ;
while (c < '0' || c > '9') { if (c == '-') f = -1 ; c = getchar () ; }
while (c >= '0' && c <= '9'){ x = x * 10 + c - '0' ; c = getchar () ; }
return x * f ;
}
int f[20][2][2] , a[20] , n , len , ans ;
int GetAns (int x)
{
// cout << x << "\n" ;
memset (f , 0 , sizeof (f)) ;
f[0][0][0] = 1 ;
int lim = x / 2 , ret = 0 ;
for (int i = 0 ; i < lim ; i ++)
for (int j = 0 ; j <= 1 ; j ++)
for (int k = 0 ; k <= 1 ; k ++)
if (f[i][j][k])
for (int l = 0 ; l <= 9 ; l ++)
{
int A = l + a[i + 1] + k , num = A % 10 ;
int K = A / 10 , J = j * 10 + l - num - a[x - i] ;
if (J < 0 || J > 1) continue ;
if (! i && (! l || ! num)) continue ;
f[i + 1][J][K] += f[i][j][k] ;
}
if (x & 1)
for (int i = 0 ; i <= 1 ; i ++)
for (int j = 0 ; j <= 1 ; j ++)
{
if (f[lim][i][j])
for (int k = 0 ; k <= 9 ; k ++)
{
int num = k + a[(x + 1) / 2] + j ;
if (num % 10 == k && num / 10 == i) ret += f[lim][i][j] ;
}
}
else for (int i = 0 ; i <= 1 ; i ++) ret += f[lim][i][i] ;
return ret ;
}
signed main ()
{
// freopen ("mirrored.in" , "r" , stdin) ; freopen ("mirrored.out" , "w" , stdout) ;
cin >> n ;
int tmp = n ;
while (tmp)
{
a[++ len] = tmp % 10 ;
tmp /= 10 ;
}
for (int i = len ; i <= len * 2 ; i ++)
ans += GetAns (i) ;
cout << ans ;
}
|
/** Computes the concrete types that can result from the given expression. */
ConcreteType inferConcreteType(ConcreteScope scope, Node expr) {
Preconditions.checkNotNull(scope);
Preconditions.checkNotNull(expr);
ConcreteType ret;
switch (expr.getType()) {
case Token.NAME:
StaticSlot<ConcreteType> slot = scope.getSlot(expr.getString());
if (slot != null) {
ret = slot.getType();
} else {
ret = ConcreteType.ALL;
}
break;
case Token.THIS:
ret = scope.getTypeOfThis();
break;
case Token.ASSIGN:
ret = inferConcreteType(scope, expr.getLastChild());
break;
case Token.COMMA:
ret = inferConcreteType(scope, expr.getLastChild());
break;
case Token.AND:
ret = inferConcreteType(scope, expr.getLastChild());
break;
case Token.OR:
ret = inferConcreteType(scope, expr.getFirstChild()).unionWith(
inferConcreteType(scope, expr.getLastChild()));
break;
case Token.HOOK:
ret = inferConcreteType(scope,
expr.getFirstChild().getNext()).unionWith(
inferConcreteType(scope, expr.getLastChild()));
break;
case Token.GETPROP:
ConcreteType recvType = inferConcreteType(scope, expr.getFirstChild());
if (recvType.isAll()) {
ret = recvType;
break;
}
Node prop = expr.getLastChild();
String propName = prop.getString();
ConcreteType type = recvType.getPropertyType(propName);
if ("prototype".equals(propName)) {
for (ConcreteFunctionType funType : recvType.getFunctions()) {
type = type.unionWith(funType.getPrototypeType());
}
} else if (compiler.getCodingConvention()
.isSuperClassReference(propName)) {
for (ConcreteFunctionType superType : recvType.getSuperclassTypes()) {
type = type.unionWith(superType.getPrototypeType());
}
} else if ("call".equals(propName)) {
type = recvType;
}
ret = type;
break;
case Token.GETELEM:
ret = ConcreteType.ALL;
break;
case Token.CALL:
ConcreteType targetType =
inferConcreteType(scope, expr.getFirstChild());
if (targetType.isAll()) {
ret = targetType;
break;
}
ret = ConcreteType.NONE;
for (ConcreteFunctionType funType : targetType.getFunctions()) {
ret = ret.unionWith(funType.getReturnSlot().getType());
}
break;
case Token.NEW:
ConcreteType constructorType =
inferConcreteType(scope, expr.getFirstChild());
if (constructorType.isAll()) {
throw new AssertionError("Attempted new call on all type!");
}
ret = ConcreteType.NONE;
for (ConcreteInstanceType instType
: constructorType.getFunctionInstanceTypes()) {
ret = ret.unionWith(instType);
}
allInstantiatedTypes.add(ret);
break;
case Token.FUNCTION:
ret = createConcreteFunction(expr, scope);
break;
case Token.OBJECTLIT:
if ((expr.getJSType() != null) && !expr.getJSType().isUnknownType()) {
JSType exprType = expr.getJSType().restrictByNotNullOrUndefined();
ConcreteType inst = createConcreteInstance(exprType.toObjectType());
allInstantiatedTypes.add(inst);
ret = inst;
} else {
ret = ConcreteType.ALL;
}
break;
case Token.ARRAYLIT:
ObjectType arrayType = (ObjectType) getTypeRegistry()
.getNativeType(JSTypeNative.ARRAY_TYPE);
ConcreteInstanceType inst = createConcreteInstance(arrayType);
allInstantiatedTypes.add(inst);
ret = inst;
break;
default:
ret = ConcreteType.NONE;
}
return createTypeIntersection(ret, expr.getJSType());
} |
from openpyxl import Workbook
wb = Workbook()
ws = wb.active
data = [
["Fruit", "Quantity"],
["Kiwi", 3],
["Grape", 15],
["Apple", 3],
["Peach", 3],
["Pomegranate", 3],
["Pear", 3],
["Tangerine", 3],
["Blueberry", 3],
["Mango", 3],
["Watermelon", 3],
["Blackberry", 3],
["Orange", 3],
["Raspberry", 3],
["Banana", 3]
]
for r in data:
ws.append(r)
ws.auto_filter.ref = "A1:B15"
ws.auto_filter.add_filter_column(0, ["Kiwi", "Apple", "Mango"])
ws.auto_filter.add_sort_condition("B2:B15")
wb.save("filtered.xlsx")
|
Development of adult sensilla on the wing and notum of Drosophila melanogaster.
We have investigated the temporal pattern of appearance, cell lineage, and cytodifferentiation of selected sensory organs (sensilla) of adult Drosophila. This analysis was facilitated by the discovery that the monoclonal antibody 22C10 labels not only the neuron of the developing sensillum organ, but the accessory cells as well. The precursors of the macrochaetes and the recurved (chemosensory) bristles of the wing margin divide around and shortly after puparium formation, while those of the microchaetes and the stout and slender (mechanosensory) bristles of the wing margin divide between 9 h and 18 h after puparium formation (apf). The onset of sensillum differentiation follows the terminal precursor division within a few hours. Four of the cells in an individual microchaete organ are clonally related: A single first-order precursor cell divides to produce two second-order precursors; one of these divides into the neuron and thecogen cell, the other into the trichogen cell and tormogen cell. Along the anterior wing margin, two rounds of division generate the cells of the mechanosensory sensilla; here, no strict clonal relationship seems to exist between the cells of an individual sensillum. At the time of sensillum precursor division, many other, non-sensillum-producing cells within the notum and wing proliferate as well. This mitotic activity follows a spatially non-random pattern. |
def load_word2vec(filepath, vocabulary, embedding_dim):
embeddings = np.random.uniform(-0.25, 0.25, (len(vocabulary), embedding_dim))
words_found = 0
with open(filepath, "rb") as f:
header = f.readline()
word2vec_vocab_size, embedding_size = map(int, header.split())
binary_len = np.dtype("float32").itemsize * embedding_size
for line in range(word2vec_vocab_size):
word = []
while True:
ch = f.read(1).decode("latin-1")
if ch == " ":
word = "".join(word)
break
if ch != "\n":
word.append(ch)
idx = vocabulary.get(word, None)
if idx != None:
embeddings[idx] = np.fromstring(f.read(binary_len), dtype="float32")
words_found += 1
else:
f.read(binary_len)
print("Word Embeddings Extracted: {}".format(words_found))
print("Word Embeddings Randomly Initialized: {}".format(len(vocabulary) - words_found))
return embeddings |
<gh_stars>1-10
N = int(input())
a_list = list(map(int, input().split()))
a_list.sort()
from collections import deque
deq = deque(a_list)
res = []
while len(deq) > 2:
min_num = deq.popleft()
max_num = deq.pop()
if min_num < 0 and max_num > 0:
ne = deq.pop()
if ne >= 0:
res.append((min_num, ne))
min_num -= ne
deq.appendleft(min_num)
deq.append(max_num)
else:
res.append((max_num, ne))
max_num -= ne
deq.append(max_num)
deq.appendleft(min_num)
else:
if max_num <= 0:
res.append((max_num, min_num))
max_num = max_num - min_num
deq.append(max_num)
else:
res.append((min_num, max_num))
min_num = min_num - max_num
deq.appendleft(min_num)
tmp1 = deq[0]
tmp2 = deq[1]
res.append((max(tmp1, tmp2), min(tmp2, tmp1)))
ans = max(tmp1, tmp2) - min(tmp2, tmp1)
print(ans)
for x, y in res:
print(x, y)
|
import { ChronoAtom, MinimalChronoAtom } from "../../src/chrono/Atom.js"
import { ChronoGraph, MinimalChronoGraph } from "../../src/chrono/Graph.js"
declare const StartTest : any
StartTest(t => {
t.it('Behavior depending from data', async t => {
const graph : ChronoGraph = MinimalChronoGraph.new()
const box0 : ChronoAtom = graph.addNode(MinimalChronoAtom.new())
const box1 : ChronoAtom = graph.addNode(MinimalChronoAtom.new())
const box2 : ChronoAtom = graph.addNode(MinimalChronoAtom.new())
const box3 : ChronoAtom = graph.addNode(MinimalChronoAtom.new({
calculation : function * () {
if ((yield box0) === 'sum') {
return (yield box1) + (yield box2)
} else {
return (yield box1) * (yield box2)
}
}
}))
box0.put('sum')
box1.put(0)
box2.put(1)
await graph.propagate()
t.is(box3.get(), 1, "Correct result calculated")
await box1.set(1)
t.is(box3.get(), 2, "Correct result calculated")
await box0.set('mul')
t.is(box3.get(), 1, "Correct result calculated after behavior change")
box1.put(2)
box2.put(2)
await graph.propagate()
t.is(box3.get(), 4, "Correct result calculated after behavior change")
})
})
|
const express = require('express')
const bodyParser = require('body-parser')
const cookieParser = require('cookie-parser')
const cors = require('cors')
const path = require('path')
const app = express()
const log = console.log
var port = process.env.PORT || 4000
// Body parser: https://github.com/expressjs/body-parser
app.use(bodyParser.urlencoded({ extended: false }))
app.use(bodyParser.json())
// CORS on ExpressJS: https://github.com/expressjs/cors
app.use(cors())
// Cookie parser: https://github.com/expressjs/cookie-parser
app.use(cookieParser())
// For fontend route
var frontendDir = path.join(path.dirname(path.dirname(__dirname)), 'frontend')
app.use('/home', express.static(path.join(frontendDir, 'build')))
app.get('/home', function(req, res) {
res.sendFile(path.join(frontendDir, 'build', 'index.html'))
})
app.get('/', function(req, res) {
res.redirect('/home')
})
app.listen(port, function() {
log('Server listening at port %d', port)
})
import { IService, DemoService } from './App'
let service: IService = new DemoService()
/**
* Test
*/
// let memberId = '001'
// let memberId2 = '002'
// let taskPoint = '001'
// let taskPoint2 = '002'
// console.log(service.memberLogin(memberId))
// console.log(service.memberGetNext(memberId))
// console.log(service.memberCheckin(memberId, taskPoint))
// console.log(service.memberLogin(memberId2))
// console.log(service.memberGetNext(memberId))
// console.log(service.memberCheckin(memberId2, taskPoint))
// console.log(service.memberGetNext(memberId))
// console.log(service.memberGetNext(memberId2))
// console.log(service.memberCheckin(memberId, taskPoint2))
// console.log(service.memberCheckin(memberId2, taskPoint2))
// console.log(service.memberGetNext(memberId))
// console.log(service.memberGetNext(memberId2))
app.use(function(err, req, res, next) {
console.error(err.stack)
res.status(500).send('Something broke!')
})
app.post('/login', function(req, res) {
console.log('login', req.body)
res.set('Connection', 'close')
let memberId = req.body.memberId
res.json(service.memberLogin(memberId))
})
app.post('/getNext', function(req, res) {
console.log('getNext', req.body)
res.set('Connection', 'close')
let memberId = req.body.memberId
res.json(service.memberGetNext(memberId))
})
app.post('/checkIn', function(req, res) {
console.log('checkIn', req.body)
res.set('Connection', 'close')
let memberId = req.body.memberId
let taskPoint = req.body.taskPoint
res.json(service.memberCheckin(memberId, taskPoint))
})
|
/**
* A label that displays a help icon and can open a help page when clicked. For
* a help page in the SE3 docs, use {@link #setWikiPage} to set the location.
* Otherwise, use {@link #setHelpPage} and provide the full URL.
*
* @author Chris Jennings <https://cgjennings.ca/contact>
* @since 3.0
*/
@SuppressWarnings("serial")
public class JHelpButton extends JLabel {
private String helpPage;
private static final Icon icon = ResourceKit.getIcon("application/help.png");
private static final Icon moicon = ResourceKit.getIcon("application/help-hi.png");
private boolean fontSet = false;
public JHelpButton() {
helpPage = "index.html";
setIcon(icon);
super.setText("");
setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR));
final Color fg = UIManager.getColor(Theme.LINK_LABEL_FOREGROUND);
setForeground(fg == null ? Color.BLUE : fg);
addMouseListener(new MouseAdapter() {
@Override
public void mousePressed(MouseEvent e) {
if (e.getButton() == MouseEvent.BUTTON1) {
openHelpPage();
}
}
@Override
public void mouseEntered(MouseEvent e) {
setIcon(moicon);
}
@Override
public void mouseExited(MouseEvent e) {
setIcon(icon);
}
});
KeyStroke helpKey = AcceleratorTable.getApplicationTable().get("app-help-item");
if (helpKey != null) {
getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put(helpKey, "HELP");
getActionMap().put("HELP", new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
Commands.HELP.actionPerformed(e);
}
});
}
}
@Override
public void setText(String text) {
if (!fontSet && getFont() != null) {
Font f = getFont();
Map<TextAttribute, Object> m = new HashMap<>();
m.put(TextAttribute.UNDERLINE, TextAttribute.UNDERLINE_ON);
f = f.deriveFont(m);
setFont(f);
fontSet = true;
}
super.setText(text);
}
public String getHelpPage() {
return helpPage;
}
/**
* Sets the page that this button links to. The value can either be a
* complete http[s] URL or the base name of a Strange Eons doc page.
*
* @param helpPage a non-null, non-empty help page
*/
public void setHelpPage(String helpPage) {
this.helpPage = helpPage;
}
@Deprecated
/**
* @deprecated
*/
public void setWikiPage(String pageTitle) {
setHelpPage(pageTitle);
}
/**
* Open the help page that has been set for this component. Subclasses may
* override this to implement new help mediums.
*/
public void openHelpPage() {
Commands.HELP.actionPerformed(new ActionEvent(this, 0, helpPage));
}
@Override
public Dimension getPreferredSize() {
// improves selectability on touch devices
Dimension d = super.getPreferredSize();
if (d.width < 24) {
d.width = 24;
}
return d;
}
} |
<gh_stars>0
// ArduinoJson.hやFS.hはここでincludeすべきだが,
// ArduinoJson.hがこのファイルからincludeできなかった(No such file or directory)
// ため,メインのjRemocon.inoでまとめてincludeするようにしている
typedef struct {
char ssid[0xFF];
char pass[0xFF];
IPAddress *ip;
IPAddress *subnet;
IPAddress *gateway;
} wifi_config;
IPAddress* toIPAddress(const char *str) {
char buf[0xFF];
char *tp, *err = NULL;
int num[4];
strcpy(buf, str);
tp = strtok(buf, ".");
for(int i=0; i<4; i++) {
if (tp == NULL) return NULL;
num[i] = strtol(tp, &err, 10);
if (*err != '\0') return NULL;
tp = strtok(NULL, ".");
}
return new IPAddress(num[0], num[1], num[2], num[3]);
}
// -1: file error
// -2: json error
// -3: param load error
int loadConfig(wifi_config &config) {
File configFile = SPIFFS.open("/config.json", "r");
if (!configFile) {
Serial.println("Failed to open config file");
return -1;
}
size_t size = configFile.size();
if (size > 1024) {
Serial.println("Config file size is too large");
return -1;
}
std::unique_ptr<char[]> buf(new char[size]);
configFile.readBytes(buf.get(), size);
StaticJsonBuffer<300> jsonBuffer;
JsonObject& json = jsonBuffer.parseObject(buf.get());
if (!json.success()) {
Serial.println("Failed to parse config file");
return -2;
}
if (json.containsKey("ssid") == false) {
Serial.println("Failed to load parameter: ssid");
return -3;
}
if (json.containsKey("pass") == false) {
Serial.println("Failed to load parameter: pass");
return -3;
}
if (json.containsKey("ip") == false) {
Serial.println("Failed to load parameter: ip");
return -3;
}
if (json.containsKey("subnet") == false) {
Serial.println("Failed to load parameter: subnet");
return -3;
}
if (json.containsKey("gateway") == false) {
Serial.println("Failed to load parameter: gateway");
return -3;
}
strcpy(config.ssid, json["ssid"]);
strcpy(config.pass, json["pass"]);
config.ip = toIPAddress(json["ip"]);
config.subnet = toIPAddress(json["subnet"]);
config.gateway = toIPAddress(json["gateway"]);
return 0;
}
int saveConfig(const char *ssid, const char *pass, const char *ip,
const char *subnet, const char *gateway) {
StaticJsonBuffer<300> jsonBuffer;
JsonObject& json = jsonBuffer.createObject();
json["ssid"] = ssid;
json["pass"] = pass;
json["ip"] = ip;
json["subnet"] = subnet;
json["gateway"] = gateway;
File configFile = SPIFFS.open("/config.json", "w");
if (!configFile) {
Serial.println("Failed to open config file for writing");
return false;
}
json.printTo(configFile);
} |
from pepper.framework import AbstractImage, Bounds, Object
from PIL import Image
import numpy as np
import json
import os
def read(root):
OBJ_HANDLE = "_obj.json"
RGB_HANDLE = "_rgb.png"
DEPTH_HANDLE = "_depth.npy"
META_HANDLE = "_meta.json"
obj_files = sorted([item for item in os.listdir(root) if item.endswith(OBJ_HANDLE)])
for obj_file in obj_files:
hash = obj_file.replace(OBJ_HANDLE, "")
with open(os.path.join(root, hash + OBJ_HANDLE)) as obj_file:
objs = json.load(obj_file)
with open(os.path.join(root, hash + META_HANDLE)) as meta_file:
meta = json.load(meta_file)
rgb = np.array(Image.open(os.path.join(root, hash + RGB_HANDLE)))
depth = np.load(os.path.join(root, hash + DEPTH_HANDLE))
img = AbstractImage(rgb, Bounds.from_json(meta["bounds"]), depth, meta["time"])
# TODO: Is this always the correct image for the objects?
objects = [Object.from_json(obj, img) for obj in objs]
yield img, objects
if __name__ == '__main__':
for image, objects in read(r"C:\Users\Pepper\Documents\Pepper\pepper\tmp\data\20190930_125844"):
print(image.time, image, objects)
for obj in objects:
print(obj.image_bounds, image.get_image(obj.image_bounds).shape)
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.