content
stringlengths 10
4.9M
|
---|
<gh_stars>0
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package p11kit
import (
"encoding/binary"
"encoding/hex"
"fmt"
"io"
"strconv"
"strings"
"time"
)
type call uint32
func (c call) String() string {
if s, ok := callStrings[c]; ok {
return s
}
return fmt.Sprintf("unknown call(%d)", uint32(c))
}
var callStrings = map[call]string{
callInitialize: "C_Initialize",
callFinalize: "C_Finalize",
callGetInfo: "C_GetInfo",
callGetSlotList: "C_GetSlotList",
callGetSlotInfo: "C_GetSlotInfo",
callGetTokenInfo: "C_GetTokenInfo",
callGetMechanismList: "C_GetMechanismList",
callGetMechanismInfo: "C_GetMechanismInfo",
callInitToken: "C_InitToken",
callOpenSession: "C_OpenSession",
callCloseSession: "C_CloseSession",
callCloseAllSessions: "C_CloseAllSessions",
callGetSessionInfo: "C_GetSessionInfo",
callInitPIN: "C_InitPIN",
callSetPIN: "C_SetPIN",
callGetOperationState: "C_GetOperationState",
callSetOperationState: "C_SetOperationState",
callLogin: "C_Login",
callLogout: "C_Logout",
callCreateObject: "C_CreateObject",
callCopyObject: "C_CopyObject",
callDestroyObject: "C_DestroyObject",
callGetObjectSize: "C_GetObjectSize",
callGetAttributeValue: "C_GetAttributeValue",
callSetAttributeValue: "C_SetAttributeValue",
callFindObjectsInit: "C_FindObjectsInit",
callFindObjects: "C_FindObjects",
callFindObjectsFinal: "C_FindObjectsFinal",
callEncryptInit: "C_EncryptInit",
callEncrypt: "C_Encrypt",
callEncryptUpdate: "C_EncryptUpdate",
callEncryptFinal: "C_EncryptFinal",
callDecryptInit: "C_DecryptInit",
callDecrypt: "C_Decrypt",
callDecryptUpdate: "C_DecryptUpdate",
callDecryptFinal: "C_DecryptFinal",
callDigestInit: "C_DigestInit",
callDigest: "C_Digest",
callDigestUpdate: "C_DigestUpdate",
callDigestKey: "C_DigestKey",
callDigestFinal: "C_DigestFinal",
callSignInit: "C_SignInit",
callSign: "C_Sign",
callSignUpdate: "C_SignUpdate",
callSignFinal: "C_SignFinal",
callSignRecoverInit: "C_SignRecoverInit",
callSignRecover: "C_SignRecover",
callVerifyInit: "C_VerifyInit",
callVerify: "C_Verify",
callVerifyUpdate: "C_VerifyUpdate",
callVerifyFinal: "C_VerifyFinal",
callVerifyRecoverInit: "C_VerifyRecoverInit",
callVerifyRecover: "C_VerifyRecover",
callDigestEncryptUpdate: "C_DigestEncryptUpdate",
callDecryptDigestUpdate: "C_DecryptDigestUpdate",
callSignEncryptUpdate: "C_SignEncryptUpdate",
callDecryptVerifyUpdate: "C_DecryptVerifyUpdate",
callGenerateKey: "C_GenerateKey",
callGenerateKeyPair: "C_GenerateKeyPair",
callWrapKey: "C_WrapKey",
callUnwrapKey: "C_UnwrapKey",
callDeriveKey: "C_DeriveKey",
callSeedRandom: "C_SeedRandom",
callGenerateRandom: "C_GenerateRandom",
callWaitForSlotEvent: "C_WaitForSlotEvent",
}
// call values supported by this package.
//
// See https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.h#L46
const (
callError call = 0
callInitialize = 1
callFinalize = 2
callGetInfo = 3
callGetSlotList = 4
callGetSlotInfo = 5
callGetTokenInfo = 6
callGetMechanismList = 7
callGetMechanismInfo = 8
callInitToken = 9
callOpenSession = 10
callCloseSession = 11
callCloseAllSessions = 12
callGetSessionInfo = 13
callInitPIN = 14
callSetPIN = 15
callGetOperationState = 16
callSetOperationState = 17
callLogin = 18
callLogout = 19
callCreateObject = 20
callCopyObject = 21
callDestroyObject = 22
callGetObjectSize = 23
callGetAttributeValue = 24
callSetAttributeValue = 25
callFindObjectsInit = 26
callFindObjects = 27
callFindObjectsFinal = 28
callEncryptInit = 29
callEncrypt = 30
callEncryptUpdate = 31
callEncryptFinal = 32
callDecryptInit = 33
callDecrypt = 34
callDecryptUpdate = 35
callDecryptFinal = 36
callDigestInit = 37
callDigest = 38
callDigestUpdate = 39
callDigestKey = 40
callDigestFinal = 41
callSignInit = 42
callSign = 43
callSignUpdate = 44
callSignFinal = 45
callSignRecoverInit = 46
callSignRecover = 47
callVerifyInit = 48
callVerify = 49
callVerifyUpdate = 50
callVerifyFinal = 51
callVerifyRecoverInit = 52
callVerifyRecover = 53
callDigestEncryptUpdate = 54
callDecryptDigestUpdate = 55
callSignEncryptUpdate = 56
callDecryptVerifyUpdate = 57
callGenerateKey = 58
callGenerateKeyPair = 59
callWrapKey = 60
callUnwrapKey = 61
callDeriveKey = 62
callSeedRandom = 63
callGenerateRandom = 64
callWaitForSlotEvent = 65
)
var binaryEncoding = binary.BigEndian
type buffer struct {
b []byte
}
func (b *buffer) Write(buff []byte) (int, error) {
b.b = append(b.b, buff...)
return len(buff), nil
}
func (b *buffer) len() int {
return len(b.b)
}
func (b *buffer) bytes() []byte {
return b.b
}
func newBuffer(b []byte) buffer {
return buffer{b: b}
}
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L1039
func (b *buffer) addAttribute(a attribute) {
b.addUint32(uint32(a.typ))
val := a.value()
if val == nil {
b.addByte(0)
return
}
b.addByte(1)
b.addUint32(uint32(len(val)))
b.b = append(b.b, val...)
}
func (b *buffer) addByteArray(a []byte) {
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L730
b.addUint32(uint32(len(a)))
b.b = append(b.b, a...)
}
func (b *buffer) addUint32(n uint32) {
var buff [4]byte
binaryEncoding.PutUint32(buff[:], n)
b.b = append(b.b, buff[:]...)
}
func (b *buffer) addUint64(n uint64) {
var buff [8]byte
binaryEncoding.PutUint64(buff[:], n)
b.b = append(b.b, buff[:]...)
}
func (b *buffer) addByte(by byte) {
b.b = append(b.b, by)
}
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L998
func (b *buffer) addDate(t time.Time) {
year := uint64(t.Year())
month := uint64(t.Month())
day := uint64(t.Day())
if 1000 <= year && year <= 9999 {
b.b = strconv.AppendUint(b.b, year, 10)
} else {
b.b = append(b.b, '0', '0', '0', '0')
}
if 0 <= month && month <= 9 {
b.b = append(b.b, '0')
b.b = strconv.AppendUint(b.b, month, 10)
} else if 10 <= month && month <= 99 {
b.b = strconv.AppendUint(b.b, month, 10)
} else {
b.b = append(b.b, '0', '0')
}
if 0 <= day && day <= 9 {
b.b = append(b.b, '0')
b.b = strconv.AppendUint(b.b, day, 10)
} else if 10 <= day && day <= 99 {
b.b = strconv.AppendUint(b.b, day, 10)
} else {
b.b = append(b.b, '0', '0')
}
}
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L1232
func (b *buffer) attribute(a *attribute) bool {
var (
typ uint32
validity byte
)
if !b.uint32(&typ) || !b.byte(&validity) {
return false
}
if validity == 0 {
*a = attribute{typ: attributeType(typ)}
return true
}
var length uint32
if !b.uint32(&length) {
return false
}
attr := attribute{typ: attributeType(typ)}
switch attr.typ.valueType() {
case attributeTypeByte:
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L890
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L1075
var n byte
if !b.byte(&n) {
return false
}
attr.byte = &n
case attributeTypeUlong:
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L891
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L1097
var n uint64
if !b.uint64(&n) {
return false
}
attr.ulong = &n
case attributeTypeMechanismArray:
// TODO(ericchiang): implement
return false
case attributeTypeDate:
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L894
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L1182
var t time.Time
if !b.date(&t) {
return false
}
attr.date = &t
case attributeTypeByteArray:
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L895
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L1211
var arr []byte
if !b.byteArray(&arr) {
return false
}
attr.bytes = arr
default:
return false
}
*a = attr
return true
}
func (b *buffer) byte(by *byte) bool {
if len(b.b) == 0 {
return false
}
*by = b.b[0]
b.b = b.b[1:]
return true
}
func (b *buffer) uint32(n *uint32) bool {
if len(b.b) < 4 {
return false
}
buff := b.b[:4]
b.b = b.b[4:]
*n = binaryEncoding.Uint32(buff)
return true
}
func (b *buffer) uint64(n *uint64) bool {
if len(b.b) < 8 {
return false
}
buff := b.b[:8]
b.b = b.b[8:]
*n = binaryEncoding.Uint64(buff)
return true
}
func (b *buffer) byteArray(a *[]byte) bool {
var n uint32
if !b.uint32(&n) {
return false
}
if n == 0xffffffff {
// https://github.com/p11-glue/p11-kit/blob/0.24.1/p11-kit/rpc-message.c#L730
return true
}
if len(b.b) < int(n) {
return false
}
*a = b.b[:n]
b.b = b.b[n:]
return true
}
func (b *buffer) date(t *time.Time) bool {
if len(b.b) < 8 {
return false
}
year, err := strconv.ParseUint(string(b.b[:4]), 10, 64)
if err != nil {
return false
}
month, err := strconv.ParseUint(string(b.b[4:6]), 10, 64)
if err != nil {
return false
}
day, err := strconv.ParseUint(string(b.b[6:8]), 10, 64)
if err != nil {
return false
}
*t = time.Date(int(year), time.Month(month), int(day), 0, 0, 0, 0, time.UTC)
b.b = b.b[8:]
return true
}
type body struct {
call call
signature string
buffer buffer
error error
}
func newResponse(req *body) *body {
return &body{call: req.call}
}
const (
sigAttributeArray = "aA"
sigAttributeBuffer = "fA"
sigByte = "y"
sigByteArray = "ay"
sigByteBuffer = "fy"
sigMechanism = "M"
sigString = "s"
sigUlong = "u"
sigUlongArray = "au"
sigUlongBuffer = "fu"
sigVersion = "v"
)
func (b *body) err() error {
if b.error != nil {
return b.error
}
if len(b.signature) != 0 {
return fmt.Errorf("trailing elements: %s", b.signature)
}
if b.buffer.len() != 0 {
return fmt.Errorf("trailing data: %d bytes:\n%s", b.buffer.len(), hex.Dump(b.buffer.bytes()))
}
return nil
}
func (b *body) writeSig(next string) {
b.signature += next
}
func (b *body) sig(want string) bool {
if b.error != nil {
return false
}
if !strings.HasPrefix(b.signature, want) {
b.error = fmt.Errorf("invalid signature attempting to parse '%s' from '%s'", want, b.signature)
return false
}
b.signature = strings.TrimPrefix(b.signature, want)
return true
}
func (b *body) decode(sig string, fn func() bool) {
if b.error != nil {
return
}
if !b.sig(sig) {
return
}
if fn() {
return
}
b.error = io.ErrUnexpectedEOF
return
}
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L278
func (b *body) writeAttributeArray(a []attribute) {
b.writeSig(sigAttributeArray)
b.buffer.addUint32(uint32(len(a)))
for _, attr := range a {
b.buffer.addAttribute(attr)
}
}
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L313
func (b *body) writeByte(c byte) {
b.writeSig(sigByte)
b.buffer.addByte(c)
}
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L371
func (b *body) writeByteArray(a []byte, n uint32) {
b.writeSig(sigByteArray)
if a == nil {
b.buffer.addByte(0)
b.buffer.addUint32(n)
} else {
b.buffer.addByte(1)
b.buffer.addByteArray(a)
}
}
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L345
func (b *body) writeUlong(n uint64) {
b.writeSig(sigUlong)
b.buffer.addUint64(n)
}
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L407
func (b *body) writeUlongArray(a []uint64, n uint32) {
b.writeSig(sigUlongArray)
if a == nil {
b.buffer.addByte(0)
b.buffer.addUint32(n)
return
}
b.buffer.addByte(1)
b.buffer.addUint32(uint32(len(a)))
for _, ele := range a {
b.buffer.addUint64(ele)
}
}
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L493
func (b *body) writeString(s string, n uint32) {
b.writeSig(sigString)
b.buffer.addUint32(n)
for i := 0; i < int(n); i++ {
if i < len(s) {
b.buffer.addByte(s[i])
} else {
b.buffer.addByte(' ')
}
}
}
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L448
func (b *body) writeVersion(v Version) {
b.writeSig(sigVersion)
b.buffer.addByte(v.Major)
b.buffer.addByte(v.Minor)
}
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L345
func (b *body) readByte(c *byte) {
b.decode(sigByte, func() bool {
return b.buffer.byte(c)
})
}
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-client.c#L203
func (b *body) readAttributeArray(a *[]attribute) {
b.decode(sigAttributeArray, func() bool {
var length uint32
if !b.buffer.uint32(&length) {
return false
}
var (
arr []attribute
i uint32
)
for i = 0; i < length; i++ {
var attr attribute
if !b.buffer.attribute(&attr) {
return false
}
arr = append(arr, attr)
}
*a = arr
return true
})
}
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L371
func (b *body) readByteArray(a *[]byte, n *uint32) {
var (
arr []byte
arrLen uint32
)
b.decode(sigByteArray, func() bool {
var hasContent byte
if !b.buffer.byte(&hasContent) {
return false
}
if hasContent != 0 {
return b.buffer.byteArray(&arr)
}
return b.buffer.uint32(&arrLen)
})
if b.error != nil {
return
}
if a != nil {
*a = arr
}
if n != nil {
*n = arrLen
}
}
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L345
func (b *body) readUlong(n *uint64) {
b.decode(sigUlong, func() bool {
return b.buffer.uint64(n)
})
}
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L371
func (b *body) readUlongBuffer(count *uint32) {
b.decode(sigUlongBuffer, func() bool {
return b.buffer.uint32(count)
})
}
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L358
func (b *body) readByteBuffer(count *uint32) {
b.decode(sigByteBuffer, func() bool {
return b.buffer.uint32(count)
})
}
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L1559
func (b *body) readMechanism(m *mechanism) {
b.decode(sigMechanism, func() bool {
if !b.buffer.uint32(&m.typ) {
return false
}
switch m.typ {
case ckmRSAPKCSPSS:
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L1315
var p rsaPKCSPSSParams
if !b.buffer.uint64(&p.hashAlg) ||
!b.buffer.uint64(&p.mgf) ||
!b.buffer.uint64(&p.saltLen) {
return false
}
m.params = p
default:
var p []byte
if !b.buffer.byteArray(&p) {
return false
}
m.params = p
}
return true
})
}
// https://github.com/p11-glue/p11-kit/blob/0.24.0/p11-kit/rpc-message.c#L247
func (b *body) readAttributeBuffer(attrs *[]attributeTemplate) {
b.decode(sigAttributeBuffer, func() bool {
var count uint32
if !b.buffer.uint32(&count) {
return false
}
var arr []attributeTemplate
for i := uint32(0); i < count; i++ {
var typ, valLen uint32
if !b.buffer.uint32(&typ) || !b.buffer.uint32(&valLen) {
return false
}
arr = append(arr, attributeTemplate{attributeType(typ), valLen})
}
*attrs = arr
return true
})
}
// A message has the form:
//
// message_id (uint32)
// options_lenth (uint32)
// body_length (uint32)
// options (array of bytes)
// body (array of bytes)
//
// A body has the form:
//
// call_id (uint32)
// signature_length (uint32)
// signature (array of bytes)
// fields (rest of the data)
//
type header struct {
ID uint32
OptionsLen uint32
BufferLen uint32
}
func readRequest(r io.Reader) (uint32, *body, error) {
var h header
if err := binary.Read(r, binaryEncoding, &h); err != nil {
return 0, nil, fmt.Errorf("reading request header: %v", err)
}
optsLen := int(h.OptionsLen)
buffLen := int(h.BufferLen)
n := optsLen + buffLen
b := make([]byte, n)
if _, err := io.ReadFull(r, b); err != nil {
return 0, nil, fmt.Errorf("reading request body: %v", err)
}
// We ignore the "options" field since that's what the upstream server does.
buff := newBuffer(b[optsLen:])
var (
callID uint32
sigBytes []byte
)
if !buff.uint32(&callID) || !buff.byteArray(&sigBytes) {
return 0, nil, fmt.Errorf("malformed request body")
}
return h.ID, &body{call: call(callID), signature: string(sigBytes), buffer: buff}, nil
}
func writeResponse(w io.Writer, messageID uint32, body *body) error {
bodyLen := 4 + 4 + len(body.signature) + body.buffer.len()
var b buffer
b.addUint32(messageID)
b.addUint32(0) // options lengh is zero.
b.addUint32(uint32(bodyLen))
b.addUint32(uint32(body.call))
b.addByteArray([]byte(body.signature))
b.Write(body.buffer.bytes())
_, err := w.Write(b.bytes())
return err
}
|
import React, { FC, useState, useEffect } from 'react';
import './App.css';
import {getAxiosCall} from './utils/apiCalls';
import * as cheerio from 'cheerio';
import CountyCard from './components/countyCard';
import RegionCard from './components/regionCard';
import {REGIONS} from './constants/regions';
import Airtable from "airtable";
const base = new Airtable({ apiKey: 'key2NRWl08o5DUlCy' }).base('appH1XlYPbZSY2Vtg');
interface IProps {
}
const App: FC<IProps> = (props: IProps)=> {
const [regionData, setRegionData] = useState('');
const [wineries, setWineries] = useState<any>([]);
const [county, setCounty] = useState('');
const [tableName, setTableName] = useState('');
useEffect(()=>{
// countyBase("Table 1")
// .select({ view: "Grid view" })
// .eachPage((records, fetchNextPage) => {
// for (var i = 0; i < records.length; i++) {
// console.log(records[i])
// }
// fetchNextPage();
// });
// console.log(countyBase.table.name)
// getRecords();
console.log(138/10)
},[])
const getRecords = (tableName:string) =>{
base(tableName || 'Dummy').select({
view: "Grid view"
}).eachPage(function page(records, fetchNextPage) {
// This function (`page`) will get called for each page of records.
records.forEach(function(record) {
console.log('Retrieved', record.get('Winery Name'));
});
// To fetch the next page of records, call `fetchNextPage`.
// If there are more records, `page` will get called again.
// If there are no more records, `done` will get called.
fetchNextPage();
}, function done(err) {
if (err) { console.error(err); return; }
});
}
const createRecords = (data:any, tableName:string) =>{
if(!tableName.length){
alert('Please enter the table name!')
return;
}
base(tableName).create(
data
, function(err:any, records:any) {
if (err) {
console.error(err);
alert(err)
return;
}
records?.forEach(function (record:any) {
console.log(record.getId());
});
alert(`Successfully created ${records.length} records in ${tableName}!`)
});
}
const handleCountyClick = async(path:string, name: string)=>{
const responseData = await getAxiosCall('/getWinery/'+regionData + '/' + path);//fetching data
// console.log(responseData)
setCounty(name);
let $ = cheerio.load(responseData);
let resultArray:any[] = [];
$("#posts_countable_list li").each(function(i, element) {
let result:any = {name: '', link: ''};
result.name = $(this).children("a").text();
result.link = $(this).children("a").attr("href")
resultArray.push(result);
});
setWineries(resultArray)
// console.log(resultArray, wineries)
}
const getRegionCounties = () =>{
let counties:any = [];
REGIONS.forEach((item)=>{
if(item.path === regionData){
counties = item.counties;
}
})
return counties.map((item:any, index:number)=>{
return (<CountyCard name={item.name} path={item.path} handleClick={()=>handleCountyClick(item.path, item.name)} key={index}/>)
})
}
const getWineryInfo = async(link:string) =>{
let seperated = link.split('/');
let winery = seperated[4];//getting the winery path from the full link
const responseData = await getAxiosCall('/winery/'+winery);
console.log(responseData)
}
function chunkArrayInGroups(arr:any, size:number) {
let result = [];
let pos = 0;
while (pos < arr.length) {
result.push(arr.slice(pos, pos + size));
pos += size;
}
return result;
}
const createCountyRecords = ()=>{
if(!tableName.length){
alert('Please enter the table name!')
return;
}
let data = [];
for(let i=0; i< wineries.length; i++){
let record = {
"fields": {
"Winery Name" : wineries[i].name
}
}
data.push(record);
}
if(data.length <=10){
createRecords(data, tableName);
}else{
// for(let i=0; i< Math.floor(data.length/10);i++){
// createRecords(data.slice(i*10, i+10+9), tableName);
// }
console.log(data)
console.log(chunkArrayInGroups(data, 10))
let chunkedData = chunkArrayInGroups(data, 10);
for(let i=0; i<chunkedData.length; i++){
createRecords(chunkedData[i], tableName)
}
}
}
return (
<div>
<header>
<p>Search CA Wineries</p>
<div className='header-right'>
<input className='table-name-input' placeholder='Table Name' onChange={(e)=>setTableName(e.target.value)}/>
<a href='https://airtable.com/shrsIp9DqeZK7ih6E' target='_blank'>Link to Airtable</a>
</div>
</header>
<div className='regions-container'>
{REGIONS.map((item:any, index: number)=>{
return (<RegionCard name={item.name} path={item.path} handleClick={()=>setRegionData(item.path)} selected={regionData===item.path} key={index}/>)
})}
</div>
<div className="container">
<div className='counties-container'>
{getRegionCounties()}
</div>
<div className='wineries-container'>
{county && <div className='county-title-container'>
<h3>{`${county} wineries`}</h3>
<button className='airtable-button' onClick={()=>createCountyRecords()}>Add all Wineries to Airtable</button>
</div>}
<ol>
{wineries && wineries.map((item:any, index:number)=>{
return <li key={index} onClick={()=>getWineryInfo(item.link)}>
<a href={item.link} target='_blank'>{item.name}</a>
<button className='airtable-button' onClick={()=>createRecords(
[{
"fields": {
"Winery Name" : item.name
}
}], tableName)}>Add to Airtable</button>
</li>
})}
</ol>
</div>
</div>
</div>
);
}
export default App;
|
<gh_stars>1-10
-- | This module exports the types used to create submits.
module Data.Factual.Write.Submit
(
-- * Submit type
Submit(..)
-- * Required modules
, module Data.Factual.Shared.Table
) where
import Data.Factual.Write
import Data.Factual.Shared.Table
import Data.Maybe (fromJust)
import qualified Data.Map as M
import Data.List.Utils (join)
-- | The Submit type represents a Write to the API which performs an upsert
-- (a row can be updated or a new row can be written). The table and user
-- must be specified, while the factual ID, reference, and comment are
-- optional (omitted for new rows). Finally the values are specified in a
-- String to String Map.
data Submit = Submit { table :: Table
, user :: String
, factualId :: Maybe String
, values :: M.Map String String
, reference :: Maybe String
, comment :: Maybe String
} deriving (Eq, Show)
-- The Submit type is a member of the Write typeclass so that it can be
-- sent as a post request to the API.
instance Write Submit where
path submit = pathString submit
params _ = M.empty
body submit = M.fromList [ ("user", user submit)
, ("values", valuesString (values submit))
, ("reference", maybeString (reference submit))
, ("comment", maybeString (reference submit)) ]
-- The following functions are helpers for the Write typeclass functions.
pathString :: Submit -> String
pathString submit
| factualId submit == Nothing = (show $ table submit) ++ "/submit"
| otherwise = (show $ table submit)
++ "/"
++ (fromJust $ factualId submit)
++ "/submit"
valuesString :: M.Map String String -> String
valuesString values = "{" ++ join "," (map valueString $ M.keys values) ++ "}"
where valueString key = "\"" ++ key ++ "\":\"" ++
(fromJust $ M.lookup key values) ++ "\""
maybeString :: Maybe String -> String
maybeString (Just s) = s
maybeString Nothing = ""
|
/* Return true iff ARR contains CORE, in either of the two elements. */
static bool
contains_core_p (unsigned *arr, unsigned core)
{
if (arr[0] != INVALID_CORE)
{
if (arr[0] == core)
return true;
if (arr[1] != INVALID_CORE)
return arr[1] == core;
}
return false;
} |
/* eslint-disable max-len,quotes,quote-props */
const intlStrings = {
'inputSchema.validation.generic':
'Field {rootName}.{fieldKey} {message}',
'inputSchema.validation.required':
'Field {rootName}.{fieldKey} is required',
'inputSchema.validation.proxyRequired':
'Field {rootName}.{fieldKey} is required. Please provide custom proxy URLs or use Apify Proxy.',
'inputSchema.validation.requestListSourcesInvalid':
'Items in {rootName}.{fieldKey} at positions [{invalidIndexes}] do not contain valid URLs',
'inputSchema.validation.arrayKeysInvalid':
'Keys in {rootName}.{fieldKey} at positions [{invalidIndexes}] must match regular expression "{pattern}"',
'inputSchema.validation.arrayValuesInvalid':
'Values in {rootName}.{fieldKey} at positions [{invalidIndexes}] must match regular expression "{pattern}"',
'inputSchema.validation.objectKeysInvalid':
'Keys [{invalidKeys}] in {rootName}.{fieldKey} must match regular expression "{pattern}',
'inputSchema.validation.objectValuesInvalid':
'Keys [{invalidKeys}] in {rootName}.{fieldKey} must have string value which matches regular expression "{pattern}"',
'inputSchema.validation.additionalProperty':
'Property {rootName}.{fieldKey} is not allowed.',
'inputSchema.validation.proxyGroupsNotAvailable':
'You currently do not have access to proxy groups: {groups}',
'inputSchema.validation.customProxyInvalid':
'Proxy URL "{invalidUrl}" has invalid format, it must be http://[username[:password]]@hostname:port.',
'inputSchema.validation.apifyProxyCountryInvalid':
'Country code "{invalidCountry}" is invalid. Only ISO 3166-1 alpha-2 country codes are supported.',
'inputSchema.validation.apifyProxyCountryWithoutApifyProxyForbidden':
'The country for Apify Proxy can be specified only when using Apify Proxy.',
'inputSchema.validation.noAvailableAutoProxy':
'Currently you do not have access to any proxy group usable in automatic mode.',
'inputSchema.validation.noMatchingDefinition':
'Field schema.properties.{fieldKey} is not matching any input schema type definition. Please make sure that it\'s type is valid.',
};
/* eslint-enable max-len,quotes,quote-props */
/**
* Helper function to simulate intl formatMessage function
*/
export function m(stringId: string, variables?: Record<string, any>) {
let text = intlStrings[stringId];
if (!text) return stringId;
if (variables) {
Object.keys(variables).forEach((variableName) => {
text = text.split(`{${variableName}}`).join(variables[variableName]);
});
}
return text;
}
|
/**
* See description on SeleniumDslMatchers
* @author Lucas Cavalcanti
*/
public class DivExistsMatcher<T extends ContentTag> extends TypeSafeMatcher<T> {
@Override
public boolean matchesSafely(ContentTag item) {
return item.exists();
}
public void describeTo(Description description) {
description.appendText("a div that exists");
}
@Factory
public static <T extends ContentTag> Matcher<T> divExists() {
return new DivExistsMatcher<T>();
}
} |
export * from "./api.js";
export * from "./point.js";
export * from "./circle-circle.js";
export * from "./line-line.js";
export * from "./line-poly.js";
export * from "./plane-plane.js";
export * from "./ray-circle.js";
export * from "./ray-line.js";
export * from "./ray-plane.js";
export * from "./ray-poly.js";
export * from "./ray-rect.js";
export * from "./rect-circle.js";
export * from "./rect-rect.js";
|
N,X,M=map(int, input().split())
m=[0]*M
t={}
def f(x,y):
return x*x % y
a = X
s=0
for i in range(N):
if a in t:
x = t[a]
nn = N - x
q,r = divmod(nn,i-x)
s = m[x-1] + (m[i-1] - m[x-1])*q + m[x-1+r] - m[x-1]
break
t[a]=i
s += a
m[i] = s
a = f(a,M)
print(s) |
/**
* A helper class for running given SQL script.
*/
public class SqlScriptRunner {
private final Connection connection;
private final Dialect dialect;
private static final Charset DEFAULT_CHARSET = StandardCharsets.UTF_8;
/**
* Constructs this runner.
*
* @param connection the established connection to the database.
*/
public SqlScriptRunner(Connection connection) {
this.connection = connection;
this.dialect = guessDialect(connection);
}
/**
* Runs a script read from the specified resource file.
*
* @param baseName the base name of the resource file.
*/
public void runScript(String baseName) {
String resourceName = getSqlResourceName(baseName, this.dialect);
try (InputStream in = getClass().getResourceAsStream(resourceName)) {
try (Reader reader = new InputStreamReader(in, DEFAULT_CHARSET)) {
runScript(reader);
}
} catch (IOException e) {
throw new WorkflowException(RESOURCE_CANNOT_BE_READ.with(resourceName), e);
} catch (SQLException e) {
throw new WorkflowException(SQL_SCRIPT_FAILED.with(resourceName), e);
}
}
public void runScript(Reader reader) throws IOException, SQLException {
try (BufferedReader bufferedReader = new BufferedReader(reader)) {
List<String> lines = bufferedReader.lines().collect(Collectors.toList());
runScript(lines);
}
}
public void runScript(List<String> lines) throws SQLException {
String[] commands = parseScript(lines);
execute(commands);
}
private void execute(String[] commands) throws SQLException {
try (Statement s = this.connection.createStatement()) {
for (String command : commands) {
s.execute(command);
}
}
this.connection.commit();
}
private String[] parseScript(List<String> lines) {
String whole = lines.stream().map(String::trim).map(line -> line.replaceAll("--.*", ""))
.filter(line -> !line.isEmpty()).collect(Collectors.joining(" "));
return whole.replaceAll("/\\*.*?\\*/", "").split(";");
}
private static Dialect guessDialect(Connection connection) {
String productName = null;
try {
DatabaseMetaData metadata = connection.getMetaData();
productName = metadata.getDatabaseProductName();
return Dialect.ofProduct(productName);
} catch (SQLException e) {
throw new WorkflowException(REPOSITORY_ACCESS_ERROR_OCCURRED.toString(), e);
} catch (NoSuchElementException e) {
throw new WorkflowException(REPOSITORY_PRODUCT_UNSUPPORTED.with(productName));
}
}
private String getSqlResourceName(String baseName, Dialect dialect) {
StringBuilder b = new StringBuilder(baseName);
b.append("-").append(dialect.getSpecifier()).append(".sql");
return b.toString();
}
} |
// determineHttpMethod determines which method (GET, POST or PUT) is going to be used for the
// HTTP request
func determineHttpMethod(action string) (method string) {
switch action {
case
"songinfo",
"queueinfo",
"playlists":
method = "GET"
case "next",
"previous",
"pause",
"resume",
"add":
method = "POST"
case "play",
"save",
"stop":
method = "PUT"
}
return method
} |
<reponame>secure-foundations/verus
use crate::attributes::get_verifier_attrs;
use crate::context::BodyCtxt;
use crate::util::{err_span_str, unsupported_err_span};
use crate::{unsupported, unsupported_err, unsupported_err_unless};
use rustc_ast::{IntTy, Mutability, UintTy};
use rustc_hir::def::{DefKind, Res};
use rustc_hir::definitions::DefPath;
use rustc_hir::{
GenericBound, GenericParam, GenericParamKind, Generics, HirId, ItemKind, LifetimeParamKind,
ParamName, PathSegment, PolyTraitRef, PrimTy, QPath, TraitBoundModifier, TraitFn,
TraitItemKind, Ty, Visibility, VisibilityKind,
};
use rustc_infer::infer::TyCtxtInferExt;
use rustc_middle::ty::{AdtDef, TyCtxt, TyKind};
use rustc_span::def_id::{DefId, LOCAL_CRATE};
use rustc_span::symbol::{kw, Ident};
use rustc_span::Span;
use rustc_trait_selection::infer::InferCtxtExt;
use std::sync::Arc;
use vir::ast::{GenericBoundX, IntRange, Path, PathX, Typ, TypBounds, TypX, Typs, VirErr};
use vir::ast_util::types_equal;
pub(crate) fn def_path_to_vir_path<'tcx>(tcx: TyCtxt<'tcx>, def_path: DefPath) -> Path {
let krate = if def_path.krate == LOCAL_CRATE {
None
} else {
Some(Arc::new(tcx.crate_name(def_path.krate).to_string()))
};
let segments =
Arc::new(def_path.data.iter().map(|d| Arc::new(format!("{}", d))).collect::<Vec<_>>());
Arc::new(PathX { krate, segments })
}
fn get_function_def_impl_item_node<'tcx>(
tcx: TyCtxt<'tcx>,
hir_id: rustc_hir::HirId,
) -> Option<(&'tcx rustc_hir::FnSig<'tcx>, &'tcx rustc_hir::BodyId)> {
let node = tcx.hir().get(hir_id);
match node {
rustc_hir::Node::ImplItem(rustc_hir::ImplItem {
kind: rustc_hir::ImplItemKind::Fn(fn_sig, body_id),
..
}) => Some((fn_sig, body_id)),
_ => None,
}
}
pub(crate) fn get_function_def<'tcx>(
tcx: TyCtxt<'tcx>,
hir_id: rustc_hir::HirId,
) -> (&'tcx rustc_hir::FnSig<'tcx>, &'tcx rustc_hir::BodyId) {
get_function_def_impl_item_node(tcx, hir_id)
.or_else(|| match tcx.hir().get(hir_id) {
rustc_hir::Node::Item(item) => match &item.kind {
ItemKind::Fn(fn_sig, _, body_id) => Some((fn_sig, body_id)),
_ => None,
},
rustc_hir::Node::TraitItem(item) => match &item.kind {
TraitItemKind::Fn(fn_sig, TraitFn::Provided(body_id)) => Some((fn_sig, body_id)),
_ => None,
},
node => unsupported!("extern functions, or other function Node", node),
})
.expect("function expected")
}
pub(crate) fn typ_path_and_ident_to_vir_path<'tcx>(path: &Path, ident: vir::ast::Ident) -> Path {
let mut path = (**path).clone();
Arc::make_mut(&mut path.segments).push(ident);
Arc::new(path)
}
pub(crate) fn fn_item_hir_id_to_self_def_id<'tcx>(
tcx: TyCtxt<'tcx>,
hir_id: HirId,
) -> Option<DefId> {
let parent_id = tcx.hir().get_parent_node(hir_id);
let parent_node = tcx.hir().get(parent_id);
match parent_node {
rustc_hir::Node::Item(rustc_hir::Item {
kind: rustc_hir::ItemKind::Impl(impll), ..
}) => match &impll.self_ty.kind {
rustc_hir::TyKind::Path(QPath::Resolved(
None,
rustc_hir::Path { res: rustc_hir::def::Res::Def(_, self_def_id), .. },
)) => Some(*self_def_id),
_ => {
panic!("impl type is not given by a path");
}
},
_ => None,
}
}
pub(crate) fn def_id_to_vir_path<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> Path {
// The path that rustc gives a DefId might be given in terms of an 'impl' path
// However, it makes for a better path name to use the path to the *type*.
// So first, we check if the given DefId is the definition of a fn inside an impl.
// If so, we construct a VIR path based on the VIR path for the type.
if let Some(local_id) = def_id.as_local() {
let hir = tcx.hir().local_def_id_to_hir_id(local_id);
if get_function_def_impl_item_node(tcx, hir).is_some() {
if let Some(self_def_id) = fn_item_hir_id_to_self_def_id(tcx, hir) {
let ty_path = def_path_to_vir_path(tcx, tcx.def_path(self_def_id));
return typ_path_and_ident_to_vir_path(&ty_path, def_to_path_ident(tcx, def_id));
}
}
}
// Otherwise build a path based on the segments rustc gives us
// without doing anything fancy.
def_path_to_vir_path(tcx, tcx.def_path(def_id))
}
pub(crate) fn def_to_path_ident<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> vir::ast::Ident {
let def_path = tcx.def_path(def_id);
match def_path.data.last().expect("unexpected empty impl path").data {
rustc_hir::definitions::DefPathData::ValueNs(name) => Arc::new(name.to_string()),
_ => panic!("unexpected name of impl"),
}
}
pub(crate) fn def_id_to_datatype<'tcx, 'hir>(
tcx: TyCtxt<'tcx>,
def_id: DefId,
typ_args: Typs,
) -> TypX {
TypX::Datatype(def_id_to_vir_path(tcx, def_id), typ_args)
}
pub(crate) fn def_id_to_datatype_typ_args<'tcx, 'hir>(
tcx: TyCtxt<'tcx>,
def_id: DefId,
segments: &'hir [PathSegment<'hir>],
) -> (Path, Typs) {
let typ_args: Vec<Typ> = match &segments.last().expect("type must have a segment").args {
None => vec![],
Some(args) => args
.args
.iter()
.filter_map(|a| match a {
rustc_hir::GenericArg::Type(t) => Some(ty_to_vir(tcx, &t)),
rustc_hir::GenericArg::Lifetime(_) => None,
_ => panic!("unexpected type arguments"),
})
.collect(),
};
(def_id_to_vir_path(tcx, def_id), Arc::new(typ_args))
}
pub(crate) fn def_id_to_datatype_typx<'tcx, 'hir>(
tcx: TyCtxt<'tcx>,
def_id: DefId,
segments: &'hir [PathSegment<'hir>],
) -> TypX {
let (path, typ_args) = def_id_to_datatype_typ_args(tcx, def_id, segments);
TypX::Datatype(path, typ_args)
}
// TODO: proper handling of def_ids
// use https://doc.rust-lang.org/stable/nightly-rustc/rustc_middle/ty/context/struct.TyCtxt.html#method.lang_items ?
pub(crate) fn hack_get_def_name<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> String {
let debug_name = tcx.def_path_debug_str(def_id);
let last_colon = debug_name.rfind(':').unwrap();
debug_name[last_colon + 1..].to_string()
}
pub(crate) fn ident_to_var<'tcx>(ident: &Ident) -> String {
ident.to_string()
}
pub(crate) fn is_visibility_private(vis_kind: &VisibilityKind, inherited_is_private: bool) -> bool {
match vis_kind {
VisibilityKind::Inherited => inherited_is_private,
VisibilityKind::Public => false,
VisibilityKind::Crate(_) => false,
VisibilityKind::Restricted { .. } => unsupported!("restricted visibility"),
}
}
pub(crate) fn mk_visibility<'tcx>(
owning_module: &Option<Path>,
vis: &Visibility<'tcx>,
inherited_is_private: bool,
) -> vir::ast::Visibility {
vir::ast::Visibility {
owning_module: owning_module.clone(),
is_private: is_visibility_private(&vis.node, inherited_is_private),
}
}
pub(crate) fn get_range(typ: &Typ) -> IntRange {
match &**typ {
TypX::Int(range) => *range,
_ => panic!("get_range {:?}", typ),
}
}
pub(crate) fn mk_range<'tcx>(ty: rustc_middle::ty::Ty<'tcx>) -> IntRange {
match ty.kind() {
TyKind::Adt(_, _) if ty.to_string() == crate::typecheck::BUILTIN_INT => IntRange::Int,
TyKind::Adt(_, _) if ty.to_string() == crate::typecheck::BUILTIN_NAT => IntRange::Nat,
TyKind::Uint(rustc_middle::ty::UintTy::U8) => IntRange::U(8),
TyKind::Uint(rustc_middle::ty::UintTy::U16) => IntRange::U(16),
TyKind::Uint(rustc_middle::ty::UintTy::U32) => IntRange::U(32),
TyKind::Uint(rustc_middle::ty::UintTy::U64) => IntRange::U(64),
TyKind::Uint(rustc_middle::ty::UintTy::U128) => IntRange::U(128),
TyKind::Uint(rustc_middle::ty::UintTy::Usize) => IntRange::USize,
TyKind::Int(rustc_middle::ty::IntTy::I8) => IntRange::I(8),
TyKind::Int(rustc_middle::ty::IntTy::I16) => IntRange::I(16),
TyKind::Int(rustc_middle::ty::IntTy::I32) => IntRange::I(32),
TyKind::Int(rustc_middle::ty::IntTy::I64) => IntRange::I(64),
TyKind::Int(rustc_middle::ty::IntTy::I128) => IntRange::I(128),
TyKind::Int(rustc_middle::ty::IntTy::Isize) => IntRange::ISize,
_ => panic!("mk_range {:?}", ty),
}
}
pub(crate) fn mid_ty_simplify<'tcx>(
tcx: TyCtxt<'tcx>,
ty: rustc_middle::ty::Ty<'tcx>,
allow_mut_ref: bool,
) -> rustc_middle::ty::Ty<'tcx> {
match ty.kind() {
TyKind::Ref(_, t, Mutability::Not) => mid_ty_simplify(tcx, t, allow_mut_ref),
TyKind::Ref(_, t, Mutability::Mut) if allow_mut_ref => {
mid_ty_simplify(tcx, t, allow_mut_ref)
}
TyKind::Adt(AdtDef { did, .. }, args) => {
if Some(*did) == tcx.lang_items().owned_box() && args.len() == 2 {
if let rustc_middle::ty::subst::GenericArgKind::Type(t) = args[0].unpack() {
mid_ty_simplify(tcx, t, false)
} else {
panic!("unexpected type argument")
}
} else {
ty
}
}
_ => ty,
}
}
// TODO review and cosolidate type translation, e.g. with `ty_to_vir`, if possible
pub(crate) fn mid_ty_to_vir<'tcx>(
tcx: TyCtxt<'tcx>,
ty: rustc_middle::ty::Ty<'tcx>,
allow_mut_ref: bool,
) -> Typ {
match ty.kind() {
TyKind::Bool => Arc::new(TypX::Bool),
TyKind::Uint(_) | TyKind::Int(_) => Arc::new(TypX::Int(mk_range(ty))),
TyKind::Ref(_, tys, rustc_ast::Mutability::Not) => mid_ty_to_vir(tcx, tys, allow_mut_ref),
TyKind::Ref(_, tys, rustc_ast::Mutability::Mut) if allow_mut_ref => {
mid_ty_to_vir(tcx, tys, allow_mut_ref)
}
TyKind::Param(param) if param.name == kw::SelfUpper => {
Arc::new(TypX::TypParam(vir::def::trait_self_type_param()))
}
TyKind::Param(param) => Arc::new(TypX::TypParam(Arc::new(param.name.to_string()))),
TyKind::Never => {
// All types are inhabited in SMT; we pick an arbitrary inhabited type for Never
Arc::new(TypX::Tuple(Arc::new(vec![])))
}
TyKind::Tuple(_) => {
let typs: Vec<Typ> =
ty.tuple_fields().map(|t| mid_ty_to_vir(tcx, t, allow_mut_ref)).collect();
Arc::new(TypX::Tuple(Arc::new(typs)))
}
TyKind::Adt(AdtDef { did, .. }, args) => Arc::new({
let s = ty.to_string();
// TODO use lang items instead of string comparisons
if s == crate::typecheck::BUILTIN_INT {
TypX::Int(IntRange::Int)
} else if s == crate::typecheck::BUILTIN_NAT {
TypX::Int(IntRange::Nat)
} else {
let typ_args: Vec<Typ> = args
.iter()
.filter_map(|arg| match arg.unpack() {
rustc_middle::ty::subst::GenericArgKind::Type(t) => {
Some(mid_ty_to_vir(tcx, t, allow_mut_ref))
}
rustc_middle::ty::subst::GenericArgKind::Lifetime(_) => None,
_ => panic!("unexpected type argument"),
})
.collect();
if Some(*did) == tcx.lang_items().owned_box() && typ_args.len() == 2 {
return typ_args[0].clone();
}
let def_name = vir::ast_util::path_as_rust_name(&def_id_to_vir_path(tcx, *did));
if (def_name == "alloc::rc::Rc" || def_name == "alloc::sync::Arc")
&& typ_args.len() == 1
{
return typ_args[0].clone();
}
def_id_to_datatype(tcx, *did, Arc::new(typ_args))
}
}),
TyKind::Closure(_def, substs) => {
let sig = substs.as_closure().sig();
let args: Vec<Typ> = sig
.inputs()
.skip_binder()
.iter()
.map(|t| mid_ty_to_vir(tcx, t, allow_mut_ref))
.collect();
let ret = mid_ty_to_vir(tcx, sig.output().skip_binder(), allow_mut_ref);
Arc::new(TypX::Lambda(Arc::new(args), ret))
}
_ => {
unsupported!(format!("type {:?}", ty))
}
}
}
pub(crate) fn is_type_std_rc_or_arc<'tcx>(
tcx: TyCtxt<'tcx>,
ty: rustc_middle::ty::Ty<'tcx>,
) -> bool {
match ty.kind() {
TyKind::Adt(AdtDef { did, .. }, _args) => {
let def_name = vir::ast_util::path_as_rust_name(&def_id_to_vir_path(tcx, *did));
if def_name == "alloc::rc::Rc" || def_name == "alloc::sync::Arc" {
return true;
}
}
_ => {}
}
false
}
// TODO remove if unused
pub(crate) fn _ty_resolved_path_to_debug_path(_tcx: TyCtxt<'_>, ty: &Ty) -> String {
let Ty { hir_id: _, kind, span: _ } = ty;
match kind {
rustc_hir::TyKind::Path(QPath::Resolved(None, path)) => path
.segments
.iter()
.map(|x| x.ident.name.to_ident_string())
.collect::<Vec<_>>()
.join("::"),
_ => panic!("{:?} does not have a resolved path", ty),
}
}
pub(crate) fn impl_def_id_to_self_ty<'tcx>(tcx: TyCtxt<'tcx>, impl_def_id: DefId) -> Typ {
let local_id = impl_def_id.as_local().expect("impl_def_id_to_self_ty expects local id");
let hir_id = tcx.hir().local_def_id_to_hir_id(local_id);
let node = tcx.hir().get(hir_id);
match node {
rustc_hir::Node::Item(rustc_hir::Item {
kind: rustc_hir::ItemKind::Impl(impll), ..
}) => ty_to_vir(tcx, &impll.self_ty),
_ => {
panic!("impl_def_id_to_self_ty expected an Impl node");
}
}
}
pub(crate) fn ty_to_vir<'tcx>(tcx: TyCtxt<'tcx>, ty: &Ty) -> Typ {
let Ty { hir_id: _, kind, span } = ty;
match kind {
rustc_hir::TyKind::Tup(tys) => {
Arc::new(TypX::Tuple(Arc::new(tys.iter().map(|t| ty_to_vir(tcx, t)).collect())))
}
rustc_hir::TyKind::Rptr(
_,
rustc_hir::MutTy { ty: tys, mutbl: rustc_ast::Mutability::Not },
) => ty_to_vir(tcx, tys),
rustc_hir::TyKind::Path(QPath::Resolved(None, path)) => Arc::new(match path.res {
Res::PrimTy(PrimTy::Bool) => TypX::Bool,
Res::PrimTy(PrimTy::Uint(UintTy::U8)) => TypX::Int(IntRange::U(8)),
Res::PrimTy(PrimTy::Uint(UintTy::U16)) => TypX::Int(IntRange::U(16)),
Res::PrimTy(PrimTy::Uint(UintTy::U32)) => TypX::Int(IntRange::U(32)),
Res::PrimTy(PrimTy::Uint(UintTy::U64)) => TypX::Int(IntRange::U(64)),
Res::PrimTy(PrimTy::Uint(UintTy::U128)) => TypX::Int(IntRange::U(128)),
Res::PrimTy(PrimTy::Uint(UintTy::Usize)) => TypX::Int(IntRange::USize),
Res::PrimTy(PrimTy::Int(IntTy::I8)) => TypX::Int(IntRange::I(8)),
Res::PrimTy(PrimTy::Int(IntTy::I16)) => TypX::Int(IntRange::I(16)),
Res::PrimTy(PrimTy::Int(IntTy::I32)) => TypX::Int(IntRange::I(32)),
Res::PrimTy(PrimTy::Int(IntTy::I64)) => TypX::Int(IntRange::I(64)),
Res::PrimTy(PrimTy::Int(IntTy::I128)) => TypX::Int(IntRange::I(128)),
Res::PrimTy(PrimTy::Int(IntTy::Isize)) => TypX::Int(IntRange::ISize),
Res::Def(DefKind::TyParam, def_id) => {
let path = def_id_to_vir_path(tcx, def_id);
let name = path.segments.last().unwrap();
if **name == kw::SelfUpper.to_string() {
TypX::TypParam(vir::def::trait_self_type_param())
} else {
TypX::TypParam(name.clone())
}
}
Res::Def(DefKind::Struct, def_id) => {
// TODO: consider using #[rust_diagnostic_item] and https://doc.rust-lang.org/stable/nightly-rustc/rustc_middle/ty/query/query_stored/type.diagnostic_items.html for the builtin lib
let def_name = vir::ast_util::path_as_rust_name(&def_id_to_vir_path(tcx, def_id));
if def_name == "builtin::int" {
TypX::Int(IntRange::Int)
} else if def_name == "builtin::nat" {
TypX::Int(IntRange::Nat)
} else if Some(def_id) == tcx.lang_items().owned_box()
|| def_name == "alloc::rc::Rc"
|| def_name == "alloc::sync::Arc"
{
match &path.segments[0].args.expect("Box/Rc/Arc arg").args[0] {
rustc_hir::GenericArg::Type(t) => return ty_to_vir(tcx, t),
_ => panic!("unexpected arg to Box/Rc/Arc"),
}
} else {
def_id_to_datatype_typx(tcx, def_id, &path.segments)
}
}
Res::Def(DefKind::Enum, def_id) => def_id_to_datatype_typx(tcx, def_id, &path.segments),
Res::SelfTy(None, Some((impl_def_id, false))) => {
return impl_def_id_to_self_ty(tcx, impl_def_id);
}
_ => {
unsupported!(format!("type {:#?} {:?} {:?}", kind, path.res, span))
}
}),
_ => {
unsupported!(format!("type {:#?} {:?}", kind, span))
}
}
}
pub(crate) fn typ_of_node<'tcx>(bctx: &BodyCtxt<'tcx>, id: &HirId, allow_mut_ref: bool) -> Typ {
mid_ty_to_vir(bctx.ctxt.tcx, bctx.types.node_type(*id), allow_mut_ref)
}
pub(crate) fn typ_of_node_expect_mut_ref<'tcx>(
bctx: &BodyCtxt<'tcx>,
id: &HirId,
span: Span,
) -> Result<Typ, VirErr> {
let ty = bctx.types.node_type(*id);
if let TyKind::Ref(_, _tys, rustc_ast::Mutability::Mut) = ty.kind() {
Ok(mid_ty_to_vir(bctx.ctxt.tcx, ty, true))
} else {
err_span_str(span, "a mutable reference is expected here")
}
}
pub(crate) fn implements_structural<'tcx>(
tcx: TyCtxt<'tcx>,
ty: &'tcx rustc_middle::ty::TyS<'tcx>,
) -> bool {
let structural_def_id = tcx
.get_diagnostic_item(rustc_span::Symbol::intern("builtin::Structural"))
.expect("structural trait is not defined");
let substs_ref = tcx.mk_substs([].iter());
let ty_impls_structural = tcx.infer_ctxt().enter(|infcx| {
infcx
.type_implements_trait(
structural_def_id,
ty,
substs_ref,
rustc_middle::ty::ParamEnv::empty(),
)
.must_apply_modulo_regions()
});
ty_impls_structural
}
// Do equality operations on these operands translate into the SMT solver's == operation?
pub(crate) fn is_smt_equality<'tcx>(
bctx: &BodyCtxt<'tcx>,
_span: Span,
id1: &HirId,
id2: &HirId,
) -> bool {
let (t1, t2) = (typ_of_node(bctx, id1, false), typ_of_node(bctx, id2, false));
match (&*t1, &*t2) {
(TypX::Bool, TypX::Bool) => true,
(TypX::Int(_), TypX::Int(_)) => true,
(TypX::Datatype(..), TypX::Datatype(..)) if types_equal(&t1, &t2) => {
let ty = bctx.types.node_type(*id1);
implements_structural(bctx.ctxt.tcx, &ty)
}
_ => false,
}
}
// Do arithmetic operations on these operands translate into the SMT solver's <=, +, =>, etc.?
// (possibly with clipping/wrapping for finite-size integers?)
pub(crate) fn is_smt_arith<'tcx>(bctx: &BodyCtxt<'tcx>, id1: &HirId, id2: &HirId) -> bool {
match (&*typ_of_node(bctx, id1, false), &*typ_of_node(bctx, id2, false)) {
(TypX::Bool, TypX::Bool) => true,
(TypX::Int(_), TypX::Int(_)) => true,
_ => false,
}
}
pub(crate) fn check_generic_bound<'tcx>(
tcx: TyCtxt<'tcx>,
span: Span,
bound: &'tcx GenericBound<'tcx>,
) -> Result<vir::ast::GenericBound, VirErr> {
match bound {
GenericBound::Trait(
PolyTraitRef { bound_generic_params: [], trait_ref, span: _ },
TraitBoundModifier::None,
) => {
let path = &trait_ref.path;
let def_id = match path.res {
rustc_hir::def::Res::Def(_, def_id) => def_id,
_ => return unsupported_err!(span, "generic bounds"),
};
if Some(def_id) == tcx.lang_items().fn_trait() {
let args = &path.segments.last().expect("last segment").args.expect("GenericArgs");
unsupported_err_unless!(args.args.len() == 1, span, "generic bounds");
unsupported_err_unless!(args.bindings.len() == 1, span, "generic bounds");
unsupported_err_unless!(
args.bindings[0].gen_args.args.len() == 0,
span,
"generic bounds"
);
let t_args = match &args.args[0] {
rustc_hir::GenericArg::Type(t) => ty_to_vir(tcx, &t),
_ => panic!("unexpected arg to Fn"),
};
let t_ret = match &args.bindings[0].kind {
rustc_hir::TypeBindingKind::Equality { ty } => ty_to_vir(tcx, ty),
_ => panic!("unexpected arg to Fn"),
};
let args = match &*t_args {
TypX::Tuple(args) => args.clone(),
_ => panic!("unexpected arg to Fn"),
};
Ok(Arc::new(GenericBoundX::FnSpec(args, t_ret)))
} else if Some(def_id) == tcx.lang_items().sized_trait()
|| Some(def_id) == tcx.lang_items().copy_trait()
|| Some(def_id) == tcx.lang_items().unpin_trait()
|| Some(def_id) == tcx.lang_items().sync_trait()
|| Some(def_id) == tcx.get_diagnostic_item(rustc_span::sym::Send)
{
// Rust language marker traits are ignored in VIR
Ok(Arc::new(GenericBoundX::Traits(vec![])))
} else {
let typx = def_id_to_datatype_typx(tcx, def_id, &path.segments);
if let TypX::Datatype(trait_name, _args) = typx {
Ok(Arc::new(GenericBoundX::Traits(vec![trait_name])))
} else {
panic!("unexpected trait bound");
}
}
}
_ => {
unsupported_err!(span, "generic bounds")
}
}
}
pub(crate) fn check_generics_bounds<'tcx>(
tcx: TyCtxt<'tcx>,
generics: &'tcx Generics<'tcx>,
check_that_external_body_datatype_declares_positivity: bool,
) -> Result<Vec<(vir::ast::Ident, vir::ast::GenericBound, bool)>, VirErr> {
let Generics { params, where_clause, span: _ } = generics;
let mut typ_params: Vec<(vir::ast::Ident, vir::ast::GenericBound, bool)> = Vec::new();
for param in params.iter() {
let vattrs = get_verifier_attrs(tcx.hir().attrs(param.hir_id))?;
let neg = vattrs.maybe_negative;
let pos = vattrs.strictly_positive;
if neg && pos {
return err_span_str(
param.span,
"type parameter cannot be both maybe_negative and strictly_positive",
);
}
if check_that_external_body_datatype_declares_positivity && !neg && !pos {
return err_span_str(
param.span,
"in external_body datatype, each type parameter must be either #[verifier(maybe_negative)] or #[verifier(strictly_positive)] (maybe_negative is always safe to use)",
);
}
let strictly_positive = !neg; // strictly_positive is the default
let GenericParam { hir_id: _, name, bounds, span, pure_wrt_drop, kind } = param;
unsupported_err_unless!(!pure_wrt_drop, *span, "generic pure_wrt_drop");
match (name, kind) {
(ParamName::Plain(id), GenericParamKind::Type { default: None, synthetic: false }) => {
// lifetime bounds can be ignored for VIR, since they are only relevant
// for rustc's borrow-checking pass
let bounds: Vec<&GenericBound> =
bounds.iter().filter(|bound| !is_lifetime_bound(bound)).collect();
// trait/function bounds
let ident = Arc::new(id.name.as_str().to_string());
let mut trait_bounds: Vec<Path> = Vec::new();
let mut fn_bounds: Vec<vir::ast::GenericBound> = Vec::new();
for bound in bounds {
let vir_bound = check_generic_bound(tcx, *span, bound)?;
match &*vir_bound {
GenericBoundX::Traits(ts) => {
trait_bounds.extend(ts.clone());
}
GenericBoundX::FnSpec(..) => fn_bounds.push(vir_bound),
}
}
unsupported_err_unless!(fn_bounds.len() <= 1, *span, "multiple function bounds");
unsupported_err_unless!(
fn_bounds.len() == 0 || trait_bounds.len() == 0,
*span,
"combined trait/function bounds"
);
let bound = if fn_bounds.len() == 1 {
fn_bounds[0].clone()
} else {
Arc::new(GenericBoundX::Traits(trait_bounds))
};
typ_params.push((ident, bound, strictly_positive));
}
(
ParamName::Plain(_id),
GenericParamKind::Lifetime { kind: LifetimeParamKind::Explicit },
) => {}
(
ParamName::Fresh(_),
GenericParamKind::Lifetime { kind: LifetimeParamKind::Elided },
) => {}
_ => unsupported_err!(*span, "complex generics", generics),
}
}
unsupported_err_unless!(where_clause.predicates.len() == 0, generics.span, "where clause");
Ok(typ_params)
}
pub(crate) fn check_generics_bounds_fun<'tcx>(
tcx: TyCtxt<'tcx>,
generics: &'tcx Generics<'tcx>,
) -> Result<TypBounds, VirErr> {
Ok(Arc::new(
check_generics_bounds(tcx, generics, false)?.into_iter().map(|(a, b, _)| (a, b)).collect(),
))
}
pub(crate) fn is_lifetime_bound(bound: &GenericBound) -> bool {
match bound {
GenericBound::Trait(..) => false,
GenericBound::LangItemTrait(..) => false,
GenericBound::Outlives(..) => true,
}
}
|
/**
* Created by sunny on 6/9/16.
*/
public class Weather implements Serializable {
private String temperature;
private String humidity;
private String pressure;
private ArrayList<String> descriptions;
private ArrayList<String> icons;
public void setTemperature(String t) {
temperature = t;
}
public void setHumidity(String h) {
humidity = h;
}
public void setPressure(String p) {
pressure = p;
}
public void setDescriptions(ArrayList<String> d) {
descriptions = d;
}
public void setIcons(ArrayList<String> i) {
icons = i;
}
public String getTemperature() {
return temperature;
}
public String getHumidity() {
return humidity;
}
public String getPressure() {
return pressure;
}
public ArrayList<String> getDescriptions() {
return descriptions;
}
public ArrayList<String> getIcons() {
return icons;
}
} |
def nexus_users_list_items_by_username(self, page, limit):
if (self.genesis_id == None): return(self.__error("Not logged in"))
parms = "?username={}&page={}&limit={}".format(self.username, page,
limit)
url = users_url.format(sdk_url, "list/items") + parms
json_data = self.__get(url)
return(json_data) |
class StubBWProject:
"""Stub equivalent of BWProject, which can return enough canned responses to create an instance of BWQueries
Also contains a canned response to allow BWQueries' get() method to be called and get info about a specific query"""
def __init__(
self, project="MyProject", username="[email protected]", password="mypassword"
):
self.project = project
self.username = username
self.password = password
self.examples = {
"queries": {
"resultsTotal": 1,
"resultsPage": -1,
"resultsPageSize": -1,
"results": [
{
"id": query_id,
"name": "My Query",
"description": None,
"creationDate": "2019-01-01T00:00:00.000+0000",
"lastModificationDate": "2019-01-02T00:00:00.000+0000",
"industry": "general-(recommended)",
"includedTerms": ["My Query String"],
"languages": ["en"],
"twitterLimit": 1500,
"dailyLimit": 10000,
"type": "search string",
"twitterScreenName": None,
"highlightTerms": ["my", "query", "string"],
"samplePercent": 100,
"lastModifiedUsername": "[email protected]",
"languageAgnostic": False,
"lockedQuery": False,
"lockedByUsername": None,
"lockedTime": None,
"createdByWizard": False,
"unlimitedHistoricalData": {
"backfillMinDate": "2019-01-01T00:00:00.000+0000",
"unlimitedHistoricalDataEnabled": False,
},
}
],
},
"tags": {
"resultsTotal": -1,
"resultsPage": -1,
"resultsPageSize": -1,
"results": [],
},
"categories": {
"resultsTotal": -1,
"resultsPage": -1,
"resultsPageSize": -1,
"results": [],
},
}
self.examples["specific_query"] = self.examples["queries"]["results"][0]
self.apiurl = "https://api.brandwatch.com/"
self.token = 2222222222
def get(self, endpoint, params={}):
"""get without the need for responses library to be used"""
if endpoint in ["queries", "tags", "categories"]:
return self.examples[endpoint]
elif endpoint.startswith("queries/"): # e.g. the call is for queries/query_id
return self.examples["specific_query"]
else:
print(endpoint)
raise NotImplementedError |
def f(n):
l = [map(int, raw_input().split()) for _ in xrange(n)]
s = [0] * 100005
for i, j in l:
s[i] += 1
for i in xrange(n):
home = n - 1 + s[l[i][1]]
away = 2*(n - 1) - home
print home, away
n = int(raw_input())
f(n) |
//returns 1 if the given bounds overlap another room/hallway space
int is_overlapping(int x_start, int y_start, int x_end, int y_end) {
if (x_start < 0 || y_start < 0 || x_end >= MAP_SIZE || y_end >= MAP_SIZE) {
return 1;
}
for (int x = x_start; x <= x_end; x++) {
for (int y = y_start; y <= y_end; y++) {
if (map[x][y] != TILE_EMPTY && map[x][y] != TILE_WALL && map[x][y] != TILE_DOOR) {
return 1;
}
}
}
return 0;
} |
#include<bits/stdc++.h>
using namespace std;
const int N = 3e5 + 9, LG = 19;
template <class T>
struct BIT { //1-indexed
int n; vector<T> t;
BIT() {}
BIT(int _n) {
n = _n; t.assign(n + 1, 0);
}
T query(int i) {
T ans = 0;
for (; i >= 1; i -= (i & -i)) ans += t[i];
return ans;
}
void upd(int i, T val) {
if (i <= 0) return;
for (; i <= n; i += (i & -i)) t[i] += val;
}
void upd(int l, int r, T val) {
upd(l, val);
upd(r + 1, -val);
}
T query(int l, int r) {
return query(r) - query(l - 1);
}
};
BIT<int> t;
vector<int> g[N], h[N];
int st[N], en[N], T;
int par[N][LG + 1], dep[N];
void dfs2(int u, int p = 0) {
par[u][0] = p;
dep[u] = dep[p] + 1;
st[u] = ++T;
for (int i = 1; i <= LG; i++) par[u][i] = par[par[u][i - 1]][i - 1];
for (auto v: h[u]) if (v != p) {
dfs2(v, u);
}
en[u] = T;
}
int kth(int u, int k) {
for (int i = 0; i <= LG; i++) if (k & (1 << i)) u = par[u][i];
return u;
}
int ans, cur, add[N];
vector<array<int, 5>> vec;
void dfs(int u) {
if (t.query(st[u], en[u]) == 0) {
int l = 1, r = dep[u] - 1, z = 0;
while (l <= r) {
int mid = l + r >> 1;
int v = kth(u, mid);
if (t.query(st[v], en[v])) {
z = v; r = mid - 1;
}
else {
l = mid + 1;
}
}
vec.push_back({z, add[z], u, add[u], cur});
if (z and add[z]) {
add[z] = 0; add[u] = 1;
}
else {
cur++; add[u] = 1;
}
}
else vec.push_back({0, 0, 0, 0, cur});
t.upd(st[u], 1);
ans = max(ans, cur);
for (auto v: g[u]) {
dfs(v);
}
auto [w, x, q, y, z] = vec.back();
vec.pop_back();
if (w) add[w] = x;
if (q) add[q] = y;
cur = z;
t.upd(st[u], -1);
}
int32_t main() {
ios_base::sync_with_stdio(0);
cin.tie(0);
int tc; cin >> tc;
while (tc--) {
int n; cin >> n;
for (int i = 2; i <= n; i++) {
int k; cin >> k;
g[k].push_back(i);
}
for (int i = 2; i <= n; i++) {
int k; cin >> k;
h[k].push_back(i);
}
T = 0;
dfs2(1);
t = BIT<int> (n + 1);
ans = 0; cur = 0;
dfs(1);
cout << ans << '\n';
for (int i = 1; i <= n; i++) {
g[i].clear();
h[i].clear();
add[i] = 0;
for (int j = 0; j <= LG; j++) {
par[i][j] = 0;
}
}
}
return 0;
} |
A Tailored SMS Text Message–Based Intervention to Facilitate Patient Access to Referred Community-Based Social Needs Resources: Protocol for a Pilot Feasibility and Acceptability Study
Background Health care providers are increasingly screening patients for unmet social needs (eg, food, housing, transportation, and social isolation) and referring patients to relevant community-based resources and social services. Patients’ connection to referred services is often low, however, suggesting the need for additional support to facilitate engagement with resources. SMS text messaging presents an opportunity to address barriers related to contacting resources in an accessible, scalable, and low-cost manner. Objective In this multi-methods pilot study, we aim to develop an automated SMS text message–based intervention to promote patient connection to referred social needs resources within 2 weeks of the initial referral and to evaluate its feasibility and patient acceptability. This protocol describes the intervention, conceptual underpinnings, study design, and evaluation plan to provide a detailed illustration of how SMS technology can complement current social needs screening and referral practice patterns without disrupting care. Methods For this pilot prospective cohort study, this SMS text message–based intervention augments an existing social needs screening, referral, and navigation program at a federally qualified health center. Patients who received at least one referral for any identified unmet social need are sent 2 rounds of SMS messages over 2 weeks. The first round consists of 5-10 messages that deliver descriptions of and contact information for the referred resources. The second round consists of 2 messages that offer a brief reminder to contact the resources. Participants will evaluate the intervention via a survey and a semistructured interview, informed by an adapted technology acceptance model. Rapid qualitative and thematic analysis will be used to extract themes from the responses. Primary outcomes are implementation feasibility and patient acceptability. Secondary outcomes relate to intervention effectiveness: self-reported attempt to connect and successful connection to referred resources 2 weeks after the initial referral encounter. Results The study received regulatory approval in May 2021, and we anticipate enrolling 15-20 participants for this initial pilot. Conclusions This protocol presents detailed implementation methods about a novel automated SMS intervention for social care integration within primary care. By sharing the study protocol early, we intend to facilitate the development and adoption of similar tools across different clinical settings, as more health care providers seek to address the unmet social needs of patients. Study findings will provide practical insights into the design and implementation of SMS text message–based interventions to improve social and medical care coordination. International Registered Report Identifier (IRRID) DERR1-10.2196/37316 |
<reponame>martinusso/money<gh_stars>0
package money
import (
"testing"
)
func TestAbsolute(t *testing.T) {
values := map[float64]float64{
1.99: 1.99,
42.987: 42.99,
-12345.9: 12345.90,
-1234567890.934: 1234567890.93,
-1: 1.00,
}
for k, v := range values {
got := New(k, USD).Absolute()
if got != v {
t.Errorf("Expected '%f', got '%f'", v, got)
}
}
}
func TestCurrency(t *testing.T) {
expected := "$ 123,456.79"
if got := New(0123456.789, USD).Currency(); got != expected {
t.Errorf("Expected '%s', got '%s'", expected, got)
}
expected = "R$ 123.456,74"
if got := New(0123456.742, BRL).Currency(); got != expected {
t.Errorf("Expected '%s', got '%s'", expected, got)
}
expected = "R$ 1,99"
if got := New(01.994, BRL).Currency(); got != expected {
t.Errorf("Expected '%s', got '%s'", expected, got)
}
}
func TestFormatted(t *testing.T) {
expected := "123,456.79"
if got := New(0123456.789, USD).Formatted(); got != expected {
t.Errorf("Expected '%s', got '%s'", expected, got)
}
expected = "123.456,74"
if got := New(0123456.742, BRL).Formatted(); got != expected {
t.Errorf("Expected '%s', got '%s'", expected, got)
}
}
func TestFormat(t *testing.T) {
got := New(0123456.789, BRL).Format('.', ',')
expected := "123.456,79"
if got != expected {
t.Errorf("Expected '%s', got '%s'", expected, got)
}
}
func TestCalculator(t *testing.T) {
m := New(12345.9, USD)
// subtract
got := m.Subtract(42)
expected := 12303.9
if got != expected {
t.Errorf("Expected '%f', got '%f'", expected, got)
}
// sum
got = m.Sum(57.954)
expected = 12361.85
if got != expected {
t.Errorf("Expected '%f', got '%f'", expected, got)
}
}
func TestCompare(t *testing.T) {
values := map[int]map[float64]float64{
-1: {
2.00: 2.005,
},
0: {
1.99: 1.989,
2.0: 1.999,
3.0: 3.004,
},
1: {
2.0: 1.99,
3.005: 3.0,
},
}
for c, v := range values {
for v1, v2 := range v {
got := New(v1, USD).Compare(v2)
if got != c {
t.Errorf("Expected '%d', got '%d' ('%f', '%f')", c, got, v1, v2)
}
}
}
}
func TestEquals(t *testing.T) {
v1 := 1.99
v2 := 1.9888
if !New(v1, USD).Equals(v2) {
t.Errorf("Should be equals ('%f', '%f')", v1, v2)
}
}
func TestGreaterThan(t *testing.T) {
v1 := 1.998
v2 := 1.99
if !New(v1, USD).GreaterThan(v2) {
t.Errorf("Should be greater than ('%f', '%f')", v1, v2)
}
}
func TestGreaterThanOrEqual(t *testing.T) {
v1 := 2.00
v2 := 1.99
if !New(v1, USD).GreaterThanOrEqual(v2) {
t.Errorf("Should be greater than or equal ('%f', '%f')", v1, v2)
}
v1 = 1.9898
v2 = 1.99
if !New(v1, USD).GreaterThanOrEqual(v2) {
t.Errorf("Should be greater than or equal ('%f', '%f')", v1, v2)
}
}
func TestLessThan(t *testing.T) {
v1 := 1.98
v2 := 1.99
if !New(v1, USD).LessThan(v2) {
t.Errorf("Should be less than ('%f', '%f')", v1, v2)
}
}
func TestLessThanOrEqual(t *testing.T) {
v1 := 123.45
v2 := 123.456
if !New(v1, USD).LessThanOrEqual(v2) {
t.Errorf("Should be less than or equal ('%f', '%f')", v1, v2)
}
v1 = 3.98
v2 = 3.978
if !New(v1, USD).LessThanOrEqual(v2) {
t.Errorf("Should be less than or equal ('%f', '%f')", v1, v2)
}
}
|
package com.spryrocks.android.modules.ui.routing.endpoints;
import com.spryrocks.android.modules.ui.routing.context.IDialogTarget;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.fragment.app.DialogFragment;
class DialogFragmentEndpointBase<TDialogFragment extends DialogFragment> extends EndpointBase<IDialogTarget, TDialogFragment, TDialogFragment, DialogFragmentEndpointBase.Wrapper<TDialogFragment>, DialogEndpointSettings> {
DialogFragmentEndpointBase(IDialogTarget target, Class<TDialogFragment> tFragmentClass) {
super(target, tFragmentClass);
}
@NonNull
@Override
protected Wrapper<TDialogFragment> createWrapper(IDialogTarget target, Class<TDialogFragment> tFragmentClass) {
return new Wrapper<>(target, tFragmentClass);
}
@NonNull
@Override
protected DialogEndpointSettings createSettings() {
return new DialogEndpointSettings();
}
static class Wrapper<TDialogFragment extends DialogFragment> extends EndpointBase.Wrapper<IDialogTarget, TDialogFragment, DialogEndpointSettings> {
Wrapper(IDialogTarget target, Class<TDialogFragment> tFragmentClass) {
super(target, createFragment(tFragmentClass));
}
@Override
void setup(@Nullable DialogEndpointSettings settings) {
if (settings == null) {
//noinspection UnnecessaryReturnStatement
return;
}
// use settings
}
@Override
void start() {
target.showDialog(wrapped);
}
private static <T extends DialogFragment> T createFragment(Class<T> tDialogFragmentClass) {
try {
return tDialogFragmentClass.newInstance();
} catch (InstantiationException e) {
throw new RuntimeException(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
}
} |
use error::Err;
use std::f64::NAN;
/// Relative Strenght Index
pub fn rsi(data: &[f64], period: usize) -> Result<Vec<f64>, Err> {
if period > data.len() {
return Err(Err::NotEnoughtData);
}
let mut changes = Vec::new();
for i in 0..data.len() - 1 {
let change = data[i + 1] - data[i];
changes.push(change);
}
let rsi_range = data.len() - period;
let mut rsis = vec![NAN ; rsi_range];
// gains & losses
let mut gains = Vec::new();
let mut losses = Vec::new();
for i in 0..changes.len() {
if changes[i] > 0.0 {
gains.push(changes[i]);
losses.push(0.0);
} else if changes[i] < 0.0 {
losses.push(changes[i] * -1.0);
gains.push(0.0);
} else {
gains.push(0.0);
losses.push(0.0);
}
}
let mut avg_gain: f64 = gains[..period].iter().sum::<f64>() / gains[..period].len() as f64;
let mut avg_loss: f64 = losses[..period].iter().sum::<f64>() / losses[..period].len() as f64;
if avg_loss == 0.0 {
rsis[0] = 100.0;
} else {
let rs = avg_gain / avg_loss;
rsis[0] = 100.0 - (100.0 / (1.0 + rs));
}
for i in 1..rsi_range {
avg_gain = (avg_gain * (period - 1) as f64 + gains[i + (period - 1)]) / period as f64;
avg_loss = (avg_loss * (period - 1) as f64 + losses[i + (period - 1)]) / period as f64;
if avg_loss == 0.0 {
rsis[i] = 100.0;
} else {
let rs = avg_gain / avg_loss;
rsis[i] = 100.0 - (100.0 / (1.0 + rs));
}
}
Ok(rsis)
}
|
/*************************************************************************
* Runs a dialog for an exception.
*
* @param message The error message text.
* @param what A string to be appended to the message text.
* @param ex The exception to be described by the dialog.
* @param fatal Whether execution should be aborted after this dialog.
*
************************************************************************/
private static void exceptionMessage(String message, String what, Exception e, boolean fatal)
{
e.printStackTrace();
String errorKind = e.getClass() + "";
String errorWhat = e.getMessage();
String[] lines = new String[] { message, what, " ", errorKind, errorWhat };
Dialogs.message(lines, true, fatal);
} |
#ifndef CONSTANT_H
#define CONSTANT_H
#define WINDOWLEN 1280
#define WINDOWHEIGHT 720
#define WINDOWTITLE "QLink"
#define SCOREMSG "Your Score: "
#define TIMEMSG "Your Time: "
#define PLAYER1MSG "Player One"
#define PLAYER2MSG "Player Two"
#define INFOMSG "INFO AREA"
#define INITINFOMSG "Enjoy your game!"
#define ELIMATE_SUCCESS "Success"
#define NOT_SAME_GRP "The two block \naren't in the \nsame block\ngroup."
#define OVER_BROKEN_LINE "You can't link \nthese two\nblocks within \n2 broken lines"
#define SOL_NUM_MSG "Sol Number: "
#define HINT_END "Hint Time end"
#define PAUSE "Paused\nPress end to\nsave"
#define START "Start Now!"
#define HINT_ADDED "Hint Time Added!"
#define HINT_BEGIN "Hint Begins"
#define GAME_OVER "GAME OVER"
#endif // CONSTANT_H
|
import {EditorState} from "@codemirror/next/state"
import {CompletionContext, CompletionResult, CompletionSource} from "@codemirror/next/autocomplete"
import {html} from "@codemirror/next/lang-html"
import ist from "ist"
function get(doc: string, conf: {explicit?: boolean} = {}) {
let cur = doc.indexOf("|")
doc = doc.slice(0, cur) + doc.slice(cur + 1)
let state = EditorState.create({
doc,
selection: {anchor: cur},
extensions: [html()]
})
let result = state.languageDataAt<CompletionSource>("autocomplete", cur)[0](new CompletionContext(state, cur, !!conf.explicit))
return result as CompletionResult | null
}
describe("HTML completion", () => {
it("completes tag names", () => {
let c = get("<|")!.options
ist(c.length, 100, ">")
ist(!c.some(o => /\W/.test(o.label)))
})
it("doesn't complete from nothing unless explicit", () => {
ist(!get("|"))
})
it("completes at top level", () => {
let c = get("|", {explicit: true})!.options
ist(c.length, 100, ">")
ist(c.every(o => /^<\w+$/.test(o.label) && o.type == "type"))
})
it("completes inside an element", () => {
let c = get("<body>|", {explicit: true})!.options
ist(c.length, 100, ">")
ist(c.some(o => o.label == "</body>"))
ist(c.every(o => /^<(\/body>|\w+)$/.test(o.label)))
})
it("completes attribute names", () => {
let c = get("<body f|")!.options
ist(c.length)
ist(c.every(o => o.type == "property"))
})
it("completes attribute names explicitly", () => {
let c = get("<body |", {explicit: true})!.options
ist(c.length)
ist(c.every(o => o.type == "property"))
})
it("completes attribute values", () => {
let c = get("<form method=|")!.options
ist(c.map(o => o.label).sort().join(","), "delete,get,post,put")
})
it("completes the 2nd attribute's values", () => {
let c = get("<form lang=en method=|")!.options
ist(c.map(o => o.label).sort().join(","), "delete,get,post,put")
})
it("keeps quotes for attribute values", () => {
let c = get('<form method="|')!.options
ist(c.map(o => o.apply).sort().join(","), 'delete",get",post",put"')
})
it("omits already closed quotes", () => {
let c = get('<form method="|"')!
ist(c.to, 14)
ist(c.options.map(o => o.apply).sort().join(","), "delete,get,post,put")
})
it("can handle single quotes", () => {
let c = get("<form method='|'")!
ist(c.to, 14)
ist(c.options.map(o => o.apply).sort().join(","), "delete,get,post,put")
})
it("completes close tags", () => {
let c = get("<body></|")!.options
ist(c.length, 1)
ist(c[0].apply, "body>")
})
it("completes partial close tags", () => {
let c = get("<body></b|")!.options
ist(c.length, 1)
ist(c[0].apply, "body>")
})
it("only completes close tags that haven't already been closed", () => {
let c = get("<html><body><div><p></|</body></html>")!.options
ist(c.length, 2)
ist(c.map(o => o.apply).join(","), "p>,div>")
})
it("includes close tag in completion after less-than", () => {
let c = get("<html><|")!.options
ist(c.some(o => o.apply == "/html>"))
})
})
|
package schema
import "github.com/vaniila/hyper/gql"
type schema struct {
query, mut, sub gql.Object
conf gql.SchemaConfig
}
func (v *schema) Query(o gql.Object) gql.Schema {
v.query = o
return v
}
func (v *schema) Mutation(o gql.Object) gql.Schema {
v.mut = o
return v
}
func (v *schema) Subscription(o gql.Object) gql.Schema {
v.sub = o
return v
}
func (v *schema) Config() gql.SchemaConfig {
if v.conf == nil {
v.conf = &schemaconfig{
schema: v,
compiled: new(compiled),
}
}
return v.conf
}
// New creates a new schema
func New(opt ...Option) gql.Schema {
opts := newOptions(opt...)
return &schema{
query: opts.Query,
mut: opts.Mutation,
sub: opts.Subscription,
}
}
|
The Delay of the Parousia and the Changed Function of Eschatological Language
Abstract Although the New Testament texts show an awareness of the problems involved with the delay of the parousia, they still defend the legitimacy of the belief in its imminence. A similar pattern can also be found in other early Christian texts. The strategies for coping with and explaining the delay of the parousia change over time, and ultimately the understanding of “imminence” itself is developed. Although belief in the parousia appears to have been fundamental to early Christianity, the significance of this event is transformed from a hope of deliverance to a reason for moral exhortation. |
/**
* Created by eugene on 16/5/28.
*/
public class Main2 {
public static void main(String[] args){
Scanner sc = new Scanner(System.in);
// String s = sc.next();
// String m = sc.next();
// numbers(String.valueOf(36452411l), BigInteger.valueOf(10l));
numbers(String.valueOf(4243), BigInteger.valueOf(100));
// numbers(s, BigInteger.valueOf(Long.valueOf(m)));
}
private static void numbers(String S, BigInteger M){
HashMap<Integer, String> map = new HashMap<>();
HashSet<String> set = new HashSet<>();
String temp = "";
int count = 0;
StringBuilder sb = new StringBuilder(S);
BigInteger i = BigInteger.ZERO;
for (; i.compareTo(M)<0; i=i.add(BigInteger.ONE)){
char c = sb.charAt(0);
sb.deleteCharAt(0);
sb.insert(c-'0'-1, c);
if (c=='1') break;
if (!set.contains(sb.toString())){
set.add(sb.toString());
map.put(count, sb.toString());
count++;
} else {
break;
}
}
BigInteger remainder = M.mod(BigInteger.valueOf(count));
System.out.println(map.get(Integer.valueOf(remainder.toString())+1));
System.out.println(sb.toString());
}
} |
-----------------------------------------------------------------------------
-- |
-- Module : Data.Functor.Cx
-- Copyright : (C) 2017 <NAME>
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : <NAME> <<EMAIL>>
-- Stability : experimental
-- Portability : portable
--
----------------------------------------------------------------------------
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeInType #-}
module Data.Functor.Cx
( CxFunctor(..)
, CxPointed(..)
, CxApplicative(..)
) where
import Data.Singletons.Prelude
infixl 4 <|$
infixl 4 <|*|>
class CxFunctor f where
imap :: (a -> b) -> f j k a -> f j k b
(<|$) :: b -> f j k a -> f j k b
(<|$) = imap . const
{-# MINIMAL (imap) #-}
class CxPointed f => CxApplicative f where
ipure :: a -> f i i a
ipure = ipoint
(<|*|>) :: f i j (a -> b) -> f j k a -> f i k b
{-# MINIMAL (<|*|>) #-}
class CxFunctor f => CxPointed f where
ipoint :: a -> f i i a
|
/**
* Create the diff annotation vectors.
*/
private void createDiffVectors()
{
int i;
long sum = 0;
/* Fill diff matrix */
this.diffOnTerms = new int[this.allItemList.size()][];
this.diffOffTerms = new int[this.allItemList.size()][];
this.diffOnTerms[0] = this.items2Terms[0]; /* For the first step, all terms must be activated */
this.diffOffTerms[0] = new int[0];
for (i = 1; i < this.allItemList.size(); i++) {
int prevOnTerms[] = this.items2Terms[i - 1];
int newOnTerms[] = this.items2Terms[i];
this.diffOnTerms[i] = setDiff(newOnTerms, prevOnTerms);
this.diffOffTerms[i] = setDiff(prevOnTerms, newOnTerms);
sum += this.diffOnTerms[i].length + this.diffOffTerms[i].length;
}
logger.info(sum + " differences detected (" + (double) sum / this.allItemList.size() + " per item)");
this.diffOnTermsFreqs = new int[this.allItemList.size()][][];
this.diffOffTermsFreqs = new int[this.allItemList.size()][][];
this.factors = new double[this.allItemList.size()][];
for (int item = 0; item < this.allItemList.size(); item++) {
int numTerms = this.items2TermFrequencies[item].length;
int numTermsWithExplicitFrequencies = 0;
int numConfigs = 0;
/*
* Determine the number of terms that have non-1.0 frequency. We restrict them to the top 6 (the less
* probable) due to complexity issues and hope that this a good enough approximation.
*/
for (i = 0; i < numTerms && i < this.maxFrequencyTerms; i++) {
if (this.items2TermFrequencies[item][this.item2TermFrequenciesOrder[item][i]] >= 1.0) {
break;
}
numTermsWithExplicitFrequencies++;
}
/* We try each possible activity/inactivity combination of terms with explicit frequencies */
SubsetGenerator sg = new SubsetGenerator(numTermsWithExplicitFrequencies, numTermsWithExplicitFrequencies);
SubsetGenerator.Subset s;
/* First, determine the number of configs (could calculate binomial coefficient of course) */
while ((s = sg.next()) != null) {
numConfigs++;
}
this.diffOnTermsFreqs[item] = new int[numConfigs][];
this.diffOffTermsFreqs[item] = new int[numConfigs][];
this.factors[item] = new double[numConfigs];
/* Contains the settings of the previous run */
IntArray prevArray = new IntArray(this.slimGraph.getNumberOfVertices());
int config = 0;
while ((s = sg.next()) != null) {
boolean[] hidden = new boolean[this.slimGraph.getNumberOfVertices()];
boolean[] taken = new boolean[numTermsWithExplicitFrequencies];
double factor = 0.0;
/* First, activate variable terms according to the current selection */
for (i = 0; i < s.r; i++) {
int ti = this.item2TermFrequenciesOrder[item][s.j[i]]; /*
* index of term within the all directly
* associated indices
*/
int h = this.items2DirectTerms[item][ti]; /* global index of term */
hidden[h] = true;
activateAncestors(h, hidden);
factor += Math.log(this.items2TermFrequencies[item][ti]);
taken[s.j[i]] = true;
}
/* Needs also respect the inactive terms in the factor */
for (i = 0; i < numTermsWithExplicitFrequencies; i++) {
if (!taken[i]) {
factor +=
Math.log(1 - this.items2TermFrequencies[item][this.item2TermFrequenciesOrder[item][i]]);
}
}
/* Second, activate mandatory terms */
for (i = numTermsWithExplicitFrequencies; i < numTerms; i++) {
int ti = this.item2TermFrequenciesOrder[item][i];
int h = this.items2DirectTerms[item][ti]; /* global index of term */
hidden[h] = true;
activateAncestors(h, hidden);
/* Factor is always 0 */
}
/* Now make a sparse representation */
IntArray newArray = new IntArray(hidden);
/* And record the difference */
this.diffOnTermsFreqs[item][config] = setDiff(newArray.get(), prevArray.get());
this.diffOffTermsFreqs[item][config] = setDiff(prevArray.get(), newArray.get());
this.factors[item][config] = factor;
prevArray = newArray;
config++;
}
}
} |
use std::sync::Arc;
use twilight_http::Client as HttpClient;
use twilight_model::gateway::payload::incoming::MessageCreate;
/// The Ping command of our Rust Discord bot.
///
/// Use:
/// ```rs
/// use commands::ping;
///
/// fn main() {
/// ping::ping().await;
/// }
/// ```
pub async fn ping(http: Arc<HttpClient>, msg: Box<MessageCreate>) {
http.create_message(msg.channel_id)
.content("Pong!")
.unwrap()
.exec()
.await
.unwrap();
} |
//When the function is called, sends user to "ItemDrop3" screen.
public void sendBack4(View view)
{
Intent intent = new Intent (this, ItemDrop3.class);
startActivity(intent);
} |
By 2019, there could be two Donald Trumps in public office.
That, according to the New York Post, could be a true statement after the president's son, Donald Trump Jr., told a posh Long Island gun club he is thinking about challenging Gov. Andrew Cuomo (D-N.Y.).
Trump Jr. did not say he would definitely seek to unseat the two-term governor and former HUD secretary, but that if he chose to run for an office in the near future, it would likely be that one.
O'Reilly: It's Been a Very Good Week for Trump & His Haters Are Frustrated
Hannity on Trump's Syria Strike: 'Instead of Weakness, We Now Have Strength'
Krauthammer: Trump Told Our Adversaries 'America Is Back'
He said he had no interest in running for mayor, or being "one of 100" senators or congressmen.
Trump Jr. said that in the time since his father's campaign ended, Trump Organization business deals seem a little more "boring."
"The politics bug bit me," the 39-year-old said at the F6 Labs gun club in Hicksville, N.Y.
"Do I want to be behind the scenes and be a mouthpiece and fight back against crazy liberal media? Maybe." Trump Jr. said to the group that reportedly included the local police commissioner and several regional businessmen.
The Post noted that Trump Jr. is an avid outdoorsman, gun rights advocate and hunter-- all factors that could help him in the largely mountainous and more conservative 'Upstate'.
Cuomo, 59, all but kicked-off his campaign for a third term at a January event in his hometown of Queens, N.Y., and is seen as a possible challenger to President Trump in 2020.
GOP Lawmaker: 'Steve Bannon's White House Days Are Numbered'
Kennedy Blasts Kaine Over Syria Criticism: You Ran as VP With a 'Total Warmonger'
Judge Jeanine on Syria: The So-Called Trump-Putin 'Bromance' Is Over |
/**
*
* @author Matthew Stevenson <www.mechio.org>
*/
public class WavPlayerConfigLoader implements ConfigurationLoader<WavPlayerConfig, File> {
/**
* Config format version name.
*/
public final static String VERSION_NAME = "AvroWavPlayerConfig";
/**
* Config format version number.
*/
public final static String VERSION_NUMBER = "1.0";
/**
* Config format VersionProperty.
*/
public final static VersionProperty VERSION = new VersionProperty(VERSION_NAME, VERSION_NUMBER);
@Override
public VersionProperty getConfigurationFormat() {
return VERSION;
}
@Override
public WavPlayerConfig loadConfiguration(File param) throws Exception {
return AvroUtils.readFromFile(
WavPlayerConfigRecord.class, null,
WavPlayerConfigRecord.SCHEMA$, param, true);
}
@Override
public Class<WavPlayerConfig> getConfigurationClass() {
return WavPlayerConfig.class;
}
@Override
public Class<File> getParameterClass() {
return File.class;
}
} |
/**
* vos_mem_print_stack_trace() - Print saved stack trace
* @mem_struct: Pointer to the memory structure which has the saved stack trace
* to be printed
*
* Return: None
*/
static inline void vos_mem_print_stack_trace(struct s_vos_mem_struct* mem_struct)
{
VOS_TRACE(VOS_MODULE_ID_VOSS, VOS_TRACE_LEVEL_FATAL,
"Call stack for the source of leaked memory:");
print_stack_trace(&mem_struct->trace, 1);
} |
<filename>shop-seller/src/main/java/quick/pager/shop/seller/response/package-info.java
package quick.pager.shop.seller.response; |
/**
* Prueba para desasociar un Modificaciones existente de un CasoDeUso existente.
*
* @throws BusinessLogicException
*/
@Test
public void removeModificacionesTest() throws BusinessLogicException {
modificacionCasoDeUsoLogic.removeCasoDeUso(modificacionesData.get(0).getId());
Assert.assertNull(modificacionCasoDeUsoLogic.getCasoDeUso(modificacionesData.get(0).getId()));
} |
#pragma once
#include "../common/protocol.h"
class CFileLoader
{
protected:
PATCH_START m_Info;
DWORD m_dwReceivedSize;
BYTE *m_pData;
bool m_bPatched;
public:
CFileLoader( );
~CFileLoader( );
bool StartDownload( SOCKET s, PATCH_START *info );
void ProcessData( SOCKET s, PATCH_DATA_ANSWER *info );
DWORD GetReceivedBytes( );
DWORD GetExpecetedBytes( );
bool IsCompleted( );
};
|
Palmer made it into Q3 for only the second time this season on Saturday, but stopped on track before he even started a flying lap with a reported loss of gearbox oil pressure.
He had set two times in the second part of qualifying that would have been good enough for seventh on the grid ahead of teammate Nico Hulkenberg, and said his qualifying was "perfect" until the problem.
The five-place grid penalty drops Palmer behind Fernando Alonso, Romain Grosjean, Kevin Magnussen, Carlos Sainz and Lance Stroll.
Speaking after the session the British driver said he knew there was a "reasonable chance" of a penalty, but said he could "shake off" any lingering doubts about reliability for the race.
"It would have been a lot harder if the reliability had come at the start of qualifying and we couldn't show what we were capable of," he said.
"We're starting a bit out of place even if we're 10th, and if we're further back we've got even more cars that we're quicker than ahead.
"It changes the race, but in terms of the pace and mentality and confidence, it's still there."
Palmer described his weekend so far as his most emphatic performance in F1 to date, as he regularly outpaced teammate Hulkenberg for the first time in 2017.
"I had a really good feeling in the car from FP1," said Palmer. "We haven't had to make any big changes, just a weekend of small tweaks and fine-tuning.
"I think today [Saturday] every single lap has been quick – it's not just trying to pull one lap out, it's every single one, and no mistakes."
While Palmer was confident, prior to his penalty being confirmed, of having the pace to score points, teammate Hulkenberg predicted a "tough" race.
The German has struggled this weekend and said he was "very happy" to salvage seventh on the grid in the circumstances.
"It was a bit of a struggle all weekend long, especially on my side of the garage we've done the set-up upside down, left, right, upside down again and again," he explained.
"In qualy I didn't feel that good in the car to be honest I'm a bit suspicious there's an issue going on with the aero.
"I couldn't really find the sweet spot, the set-up that I felt happy and the harmony I felt in Budapest. Yesterday was more the front end, today is more the rear end. It's been inconsistent so it's difficult to always target the right things.
"With how I felt in the car it's not the best situation to go into a race." |
/**
* Add model in cash.
* @param model the model for adding.
*/
public void add(Model model) {
if(model != null) {
map.putIfAbsent(model.getId(), model);
}
} |
<reponame>amit2014/libphenom
/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "phenom/job.h"
#include "phenom/log.h"
#include "phenom/sysutil.h"
#include "phenom/configuration.h"
#include "corelib/job.h"
#ifdef HAVE_KQUEUE
static inline void init_kq_set(struct ph_nbio_kq_set *set)
{
set->used = 0;
set->events = set->base;
set->size = sizeof(set->base) / sizeof(set->base[0]);
}
static void grow_kq_set(struct ph_nbio_kq_set *set)
{
struct kevent *k;
if (set->events == set->base) {
k = malloc(set->size * 2 * sizeof(*k));
if (!k) {
ph_panic("OOM");
}
memcpy(k, set->events, set->used * sizeof(*k));
set->events = k;
} else {
k = realloc(set->events, set->size * 2 * sizeof(*k));
if (!k) {
ph_panic("OOM");
}
set->events = k;
}
set->size *= 2;
}
static inline void add_kq_set(struct ph_nbio_kq_set *set,
uintptr_t ident,
int16_t filter,
uint16_t flags,
uint32_t fflags,
intptr_t data,
void *udata)
{
int n;
if (set->used + 1 >= set->size) {
grow_kq_set(set);
}
n = set->used++;
EV_SET(&set->events[n], ident, filter, flags, fflags, data, udata);
}
static inline void dispose_kq_set(struct ph_nbio_kq_set *set)
{
if (set->events == set->base) {
return;
}
free(set->events);
}
void ph_nbio_emitter_init(struct ph_nbio_emitter *emitter)
{
struct kevent tev;
emitter->io_fd = kqueue();
if (emitter->io_fd == -1) {
ph_panic("kqueue(): `Pe%d", errno);
}
init_kq_set(&emitter->kqset);
// Configure timer
EV_SET(&tev, 0, EVFILT_TIMER, EV_ADD, 0, WHEEL_INTERVAL_MS, emitter);
if (kevent(emitter->io_fd, &tev, 1, NULL, 0, NULL)) {
ph_panic("setting up timer: kevent: `Pe%d", errno);
}
}
static inline void dispatch_kevent(struct ph_nbio_emitter *emitter,
ph_thread_t *thread, struct kevent *event)
{
ph_iomask_t mask;
ph_job_t *job;
if (event->filter != EVFILT_TIMER && (event->flags & EV_ERROR) != 0) {
// We're pretty strict about errors at this stage to try to
// ensure that we're doing the right thing. There may be
// cases that we should ignore
ph_panic("kqueue error on fd:%d `Pe%d",
(int)event->ident, (int)event->data);
}
switch (event->filter) {
case EVFILT_TIMER:
gettimeofday(&thread->now, NULL);
thread->refresh_time = false;
ph_nbio_emitter_timer_tick(emitter);
break;
case EVFILT_READ:
mask = PH_IOMASK_READ;
// You'd think that we'd want to do this here, but EV_EOF can
// be set when we notice that read has been shutdown, but while
// we still have data in the buffer that we want to read.
// On this platform we detect EOF as part of attempting to read
/*
if (event->flags & EV_EOF) {
mask |= PH_IOMASK_ERR;
}
*/
thread->refresh_time = true;
job = event->udata;
job->kmask = 0;
ph_nbio_emitter_dispatch_immediate(emitter, job, mask);
break;
case EVFILT_WRITE:
thread->refresh_time = true;
job = event->udata;
job->kmask = 0;
ph_nbio_emitter_dispatch_immediate(emitter, job, PH_IOMASK_WRITE);
break;
}
}
void ph_nbio_emitter_run(struct ph_nbio_emitter *emitter, ph_thread_t *thread)
{
int n, i;
int max_chunk, max_sleep;
struct timespec ts;
max_chunk = ph_config_query_int("$.nbio.max_per_wakeup", 1024);
max_sleep = ph_config_query_int("$.nbio.max_sleep", 5000);
ts.tv_sec = max_sleep / 1000;
ts.tv_nsec = (max_sleep - (ts.tv_sec * 1000)) * 1000000;
while (ck_pr_load_int(&_ph_run_loop)) {
n = kevent(emitter->io_fd, emitter->kqset.events, emitter->kqset.used,
emitter->kqset.events, MIN(emitter->kqset.size, max_chunk), &ts);
if (n < 0 && errno != EINTR) {
ph_panic("kevent: `Pe%d", errno);
}
if (n <= 0) {
ph_job_collector_emitter_call(emitter);
ph_thread_epoch_poll();
continue;
}
ph_thread_epoch_begin();
for (i = 0; i < n; i++) {
dispatch_kevent(emitter, thread, &emitter->kqset.events[i]);
}
if (n + 1 >= emitter->kqset.size) {
grow_kq_set(&emitter->kqset);
}
emitter->kqset.used = 0;
if (ph_job_have_deferred_items(thread)) {
ph_job_pool_apply_deferred_items(thread);
}
ph_thread_epoch_end();
ph_job_collector_emitter_call(emitter);
ph_thread_epoch_poll();
}
dispose_kq_set(&emitter->kqset);
}
ph_result_t ph_nbio_emitter_apply_io_mask(struct ph_nbio_emitter *emitter,
ph_job_t *job, ph_iomask_t mask)
{
struct ph_nbio_kq_set *set, local_set;
int res;
if (job->fd == -1) {
return PH_OK;
}
if (mask == job->kmask) {
return PH_OK;
}
if (emitter == ph_thread_self()->is_emitter) {
set = &emitter->kqset;
} else {
init_kq_set(&local_set);
set = &local_set;
}
if (mask & PH_IOMASK_READ) {
add_kq_set(set, job->fd, EVFILT_READ, EV_ADD|EV_ONESHOT, 0, 0, job);
}
if (mask & PH_IOMASK_WRITE) {
add_kq_set(set, job->fd, EVFILT_WRITE, EV_ADD|EV_ONESHOT, 0, 0, job);
}
if ((mask & (PH_IOMASK_READ|PH_IOMASK_WRITE)) == 0) {
// Neither read nor write -> delete
add_kq_set(set, job->fd, EVFILT_READ, EV_DELETE, 0, 0, job);
add_kq_set(set, job->fd, EVFILT_WRITE, EV_DELETE, 0, 0, job);
}
job->kmask = mask;
job->mask = mask;
if (set == &local_set) {
// Apply it immediately
res = kevent(emitter->io_fd, set->events, set->used, NULL, 0, NULL);
if (res != 0 && mask == 0 && errno == ENOENT) {
// It's "OK" if we decided to delete it and it wasn't there
res = 0;
}
if (res != 0) {
ph_panic("kevent: setting mask to %02x on fd %d with %d slots -> `Pe%d",
mask, job->fd, set->used, errno);
return PH_ERR;
}
}
return PH_OK;
}
ph_iomask_t ph_job_get_kmask(ph_job_t *job)
{
return job->kmask;
}
#endif
/* vim:ts=2:sw=2:et:
*/
|
/*
Change the item in the list at index. Return the old item.
*/
void *mprSetItem(MprList *lp, int index, cvoid *item)
{
void *old;
int length;
mprAssert(lp);
mprAssert(lp->size >= 0);
mprAssert(lp->length >= 0);
mprAssert(index >= 0);
length = lp->length;
if (index >= length) {
length = index + 1;
}
lock(lp);
if (length > lp->size) {
if (growList(lp, length - lp->size) < 0) {
unlock(lp);
return 0;
}
}
old = lp->items[index];
lp->items[index] = (void*) item;
lp->length = length;
unlock(lp);
return old;
} |
<reponame>andieritter/groSUREy
import * as React from 'react';
import { useState } from 'react';
import { TextField } from '@material-ui/core';
const NewItem: React.FC = (): JSX.Element => {
return (
<div>
<form>
<h2>New Item</h2>
<TextField id="itemInput" label="item" variant="outlined"></TextField>
<TextField id="itemBrand" label="brand" variant="outlined"></TextField>
<TextField id="itemMaxPrice" label="maximum price" variant="outlined"></TextField>
<TextField id="itemNotes" label="notes" variant="outlined"></TextField>
</form>
</div>
)
}
export default NewItem; |
// InRange returns true if the given value is within the range.
func (e *AnalysisExpected) InRange(value float64) bool {
if e.Min != nil && *e.Min > value {
return false
}
if e.Max != nil && *e.Max < value {
return false
}
return true
} |
// ParseServiceNode is the inverse of service node function
func ParseServiceNode(s string) (Node, error) {
parts := strings.Split(s, serviceNodeSeparator)
out := Node{}
if len(parts) != 4 {
return out, errors.New("missing parts in the service node")
}
out.Type = NodeType(parts[0])
out.IPAddress = parts[1]
out.ID = parts[2]
out.Domain = parts[3]
return out, nil
} |
def load_data_from_finegan_dollak_csv_file(self, in_csv):
with open(in_csv, encoding='utf-8-sig') as f:
reader = csv.DictReader(f)
current_table = None
for row in reader:
if row['Table Name'] == '-':
continue
table_name = row['Table Name'].strip().upper()
field_name = row[' Field Name'].strip().upper()
is_primary_key = row[' Is Primary Key'].strip().lower() in ['y', 'yes', 'pri']
is_foreign_key = row[' Is Foreign Key'].strip().lower() in ['y', 'yes']
data_type = row[' Type'].strip().lower()
if current_table is None or table_name != current_table.name:
table_node = Table(table_name, caseless=self.caseless)
self.index_table(table_node)
current_table = table_node
field_node = Field(current_table, field_name,
caseless=self.caseless,
data_type=data_type,
is_primary_key=is_primary_key,
is_foreign_key=is_foreign_key)
self.index_field(field_node)
self.create_adjacency_matrix() |
/**
* Task 04: Graph querying
* @author elozano
*
*/
public class Consultas
{
private static String ns = "http://exmple.com/biblioteca/";
private static String filename;
private static Model model;
private static InputStream in;
private static final int CONSULTA_FECHA = 2;
private static final int CONSULTA_TIPO = 0;
private static final int CONSULTA_BIBLIOTECA = 1;
// public static String foafNS = "http://xmlns.com/foaf/0.1#";
// public static String foafEmailURI = foafNS+"email";
// public static String foafKnowsURI = foafNS+"knows";
//
public Consultas(String file){
filename =file;
// Create an empty model
model = ModelFactory.createDefaultModel();
// Use the FileManager to find the input file
in = FileManager.get().open(filename);
if (in == null)
throw new IllegalArgumentException("File: "+filename+" not found");
// Read the RDF/XML file
// model.read(new FileInputStream(filename),null,"TTL");
model.read(in, null);
}
public static void main(String args[]) throws FileNotFoundException //main de prueba
{
filename = "/Users/Sanchez/Documents/workspace/consultas/src/eventosBiblioteca1.rdf";
// Create an empty model
model = ModelFactory.createDefaultModel();
// Use the FileManager to find the input file
in = FileManager.get().open(filename);
if (in == null)
throw new IllegalArgumentException("File: "+filename+" not found");
// Read the RDF/XML file
// model.read(new FileInputStream(filename),null,"TTL");
model.read(in, null);
List<String> resultados= consulta("/contenido/actividades/CursosTalleres", 0);
System.out.println("Resultados Array devuelto : ");
for(int i=0; i<resultados.size(); i++){
System.out.println(resultados.get(i));
}
}
public static List<String> mostrarTodo(){
String queryString =
"PREFIX vcard: <" + VCARD.getURI() + "> " +
"SELECT ?Subject ?title "+
"WHERE { ?Subject vcard:TITLE ?title. } ";
Query query = QueryFactory.create(queryString);
QueryExecution qexec = QueryExecutionFactory.create(query, model) ;
ResultSet results = qexec.execSelect() ;
ArrayList<String> resultadoConsulta = new ArrayList<String>();
while (results.hasNext())
{
QuerySolution binding = results.nextSolution();
Resource subj = (Resource) binding.get("Subject");
String categoria=subj.getProperty(VCARD.CATEGORIES).getLiteral().toString();
if(categoria.length()>22)
categoria=categoria.substring(23);
else
categoria="actividad";
resultadoConsulta.add("Evento: "+subj.getProperty(VCARD.TITLE).getLiteral()+"\n Fecha: "+subj.getProperty(VCARD.Other).getLiteral()+" Categoría: "+categoria+"\n Biblioteca: "+subj.getProperty(VCARD.Locality).getLiteral());
}
return resultadoConsulta;
}
public static List<String> consulta(String campo, int tipo){
ArrayList<String> resultadoConsulta = new ArrayList<String>();
String queryString="";
switch(tipo){
case CONSULTA_FECHA:
queryString="PREFIX vcard: <" + VCARD.getURI() + "> " +
"SELECT ?Subject "+
"WHERE { ?Subject vcard:Other '"+campo +"'. } ";
break;
case CONSULTA_BIBLIOTECA:
queryString="PREFIX vcard: <" + VCARD.getURI() + "> " +
"SELECT ?Subject ?title "+
"WHERE { ?Subject vcard:Locality '" +campo+ "'. } ";
break;
case CONSULTA_TIPO:
queryString="PREFIX vcard: <" + VCARD.getURI() + "> " +
"SELECT ?Subject ?title "+
"WHERE { ?Subject vcard:CATEGORIES '"+campo+"'. } ";
break;
}
Query query = QueryFactory.create(queryString);
QueryExecution qexec = QueryExecutionFactory.create(query, model) ;
ResultSet results = qexec.execSelect() ;
while (results.hasNext())
{
QuerySolution binding = results.nextSolution();
Resource subj = (Resource) binding.get("Subject");
String categoria=subj.getProperty(VCARD.CATEGORIES).getLiteral().toString();
if(categoria.length()>22)
categoria=categoria.substring(23);
else
categoria="actividad";
resultadoConsulta.add("Evento: "+subj.getProperty(VCARD.TITLE).getLiteral()+"\n Fecha: "+subj.getProperty(VCARD.Other).getLiteral()+" Categoría: "+categoria+"\n Biblioteca: "+subj.getProperty(VCARD.Locality).getLiteral());
}
return resultadoConsulta;
}
} |
<gh_stars>0
package cn.ting.er.datamapping.utils;
/**
* @author wenting.Li
* @version 0.0.1
* @since JDK 8
*/
public interface Wrapper<T> {
T getOriginal();
}
|
def lambda_handler(event, context):
logger.info('event: %s' % event)
request_payload = event
if request_payload.get('headers'):
response_data = sign_headers(request_payload['headers'])
else:
credential = list([c for c in request_payload['conditions'] if 'x-amz-credential' in c][0].values())[0]
logger.info('credential=%s, data_type=%s' % (credential, type(credential)))
response_data = sign_policy(json.dumps(event).encode('utf-8'), str(credential))
return response_data |
/**
* Enforces that no alarm is currently ringing,<br/>
* If one is, AlarmActivity is immediately started<br/>
* and the passed activity is finished.
*
* @param activity the activity to finish if needed.
*/
public static void startIfRinging( Activity activity ) {
SFApplication app = SFApplication.get();
Alarm ringingAlarm = app.getRingingAlarm();
if ( ringingAlarm != null ) {
Intent i = new Intent( app, AlarmActivity.class );
activity.startActivity( new AlarmIntentHelper( i ).setAlarmId( ringingAlarm ).intent() );
activity.finish();
}
} |
// TODO: Update when redbeams service is ready
@Test(dataProvider = TEST_CONTEXT_WITH_MOCK, enabled = false)
@Description(
given = "there is a prepared database",
when = "when a database create request is sent with the same database name",
then = "the create should return a BadRequestException")
public void createAndCreateWithSameNameThenShouldThrowBadRequestException(MockedTestContext testContext) {
String databaseName = resourcePropertyProvider().getName();
testContext
.given(RedbeamsDatabaseTestDto.class)
.withName(databaseName)
.when(databaseTestClient.createV4(), RunningParameter.key(databaseName))
.when(databaseTestClient.listV4(), RunningParameter.key(databaseName))
.then(RedbeamsDatabaseTestAssertion.containsDatabaseName(databaseName, 1), RunningParameter.key(databaseName))
.whenException(databaseTestClient.createV4(), BadRequestException.class)
.validate();
} |
//CHECKSTYLE:Indentation:OFF
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// $ANTLR 3.4 C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g 2012-12-06 11:16:38
package org.apache.hop.pipeline.transforms.edi2xml.grammar;
import org.antlr.runtime.BitSet;
import org.antlr.runtime.IntStream;
import org.antlr.runtime.MismatchedSetException;
import org.antlr.runtime.MismatchedTokenException;
import org.antlr.runtime.NoViableAltException;
import org.antlr.runtime.Parser;
import org.antlr.runtime.ParserRuleReturnScope;
import org.antlr.runtime.RecognitionException;
import org.antlr.runtime.RecognizerSharedState;
import org.antlr.runtime.RuleReturnScope;
import org.antlr.runtime.TokenStream;
import org.antlr.stringtemplate.StringTemplate;
import org.antlr.stringtemplate.StringTemplateGroup;
import org.antlr.stringtemplate.language.AngleBracketTemplateLexer;
import org.apache.commons.lang.StringEscapeUtils;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
@SuppressWarnings( { "all", "warnings", "unchecked" } )
public class FastSimpleGenericEdifactDirectXMLParser extends Parser {
public static final String[] tokenNames = new String[] {
"<invalid>", "<EOR>", "<DOWN>", "<UP>", "COMPLEX_ELEMENT_ITEM_SEPARATOR", "ELEMENT_SEPARATOR",
"RELEASE_CHARACTER", "SEGMENT_TERMINATOR", "TEXT_DATA", "' '", "'UNA:+,? \\''", "'UNA:+.? \\''", "'\\n'",
"'\\r'", "'\\t'" };
public static final int EOF = -1;
public static final int T__9 = 9;
public static final int T__10 = 10;
public static final int T__11 = 11;
public static final int T__12 = 12;
public static final int T__13 = 13;
public static final int T__14 = 14;
public static final int COMPLEX_ELEMENT_ITEM_SEPARATOR = 4;
public static final int ELEMENT_SEPARATOR = 5;
public static final int RELEASE_CHARACTER = 6;
public static final int SEGMENT_TERMINATOR = 7;
public static final int TEXT_DATA = 8;
// delegates
public Parser[] getDelegates() {
return new Parser[] {};
}
// delegators
public FastSimpleGenericEdifactDirectXMLParser( TokenStream input ) {
this( input, new RecognizerSharedState() );
}
public FastSimpleGenericEdifactDirectXMLParser( TokenStream input, RecognizerSharedState state ) {
super( input, state );
}
protected StringTemplateGroup templateLib = new StringTemplateGroup(
"FastSimpleGenericEdifactDirectXMLParserTemplates", AngleBracketTemplateLexer.class );
public void setTemplateLib( StringTemplateGroup templateLib ) {
this.templateLib = templateLib;
}
public StringTemplateGroup getTemplateLib() {
return templateLib;
}
/**
* allows convenient multi-value initialization: "new STAttrMap().put(...).put(...)"
*/
public static class STAttrMap extends HashMap<String, Object> {
public STAttrMap put( String attrName, Object value ) {
super.put( attrName, value );
return this;
}
public STAttrMap put( String attrName, int value ) {
super.put( attrName, new Integer( value ) );
return this;
}
}
public String[] getTokenNames() {
return FastSimpleGenericEdifactDirectXMLParser.tokenNames;
}
public String getGrammarFileName() {
return "C:\\workspace-sts\\Hop trunk - "
+ "restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\"
+ "FastSimpleGenericEdifactDirectXML.g";
}
public static final String XML_HEAD = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n";
public static final String TAG_EDIFACT = "<edifact>\n";
public static final String TAG_EDIFACT_END = "</edifact>";
public static final String TAG_ELEMENT = "\t\t<element>\n";
public static final String TAG_ELEMENT_END = "\t\t</element>\n";
public static final String TAG_VALUE = "\t\t\t<value>";
public static final String TAG_VALUE_END = "</value>\n";
public LinkedList<Object> tagIndexes = new LinkedList<Object>();
// helper functions to sanitize incoming input
public String sanitizeText( String txt ) {
// resolve all RELEASE characters
if ( txt.indexOf( "?" ) >= 0 ) {
txt = txt.replace( "?+", "+" );
txt = txt.replace( "?:", ":" );
txt = txt.replace( "?'", "'" );
txt = txt.replace( "??", "?" );
}
// enocde XML entities
return StringEscapeUtils.escapeXml( txt );
}
// assume about 8k for an edifact message
public StringBuilder buf = new StringBuilder( 8192 );
// helper method for writing tag indexes to the stream
public void appendIndexes() {
if ( tagIndexes.size() == 0 ) {
return;
}
for ( Object i : tagIndexes ) {
String s = (String) i;
buf.append( "\t\t<index>" + s + "</index>\n" );
}
}
// error handling overrides -> just exit
protected void mismatch( IntStream input, int ttype, BitSet follow ) throws RecognitionException {
throw new MismatchedTokenException( ttype, input );
}
public Object recoverFromMismatchedSet( IntStream input, RecognitionException e, BitSet follow ) throws RecognitionException {
throw e;
}
public static class edifact_return extends ParserRuleReturnScope {
public StringTemplate st;
public Object getTemplate() {
return st;
}
public String toString() {
return st == null ? null : st.toString();
}
}
;
// $ANTLR start "edifact"
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\FastSimpleGenericEdifactDirectXML.g:77:1:
// edifact : ( una )? ( segment )* ;
public final edifact_return edifact() throws RecognitionException {
edifact_return retval =
new edifact_return();
retval.start = input.LT( 1 );
try {
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:81:4:
// ( ( una )? ( segment )* )
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:81:6:
// ( una )? ( segment )*
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:81:6:
// ( una )?
int alt1 = 2;
int LA1_0 = input.LA( 1 );
if ( ( ( LA1_0 >= 10 && LA1_0 <= 11 ) ) ) {
alt1 = 1;
}
switch ( alt1 ) {
case 1:
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:81:6:
// una
pushFollow( FOLLOW_una_in_edifact64 );
una();
state._fsp--;
break;
}
buf = new StringBuilder( 8192 );
buf.append( XML_HEAD );
buf.append( TAG_EDIFACT );
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:83:4:
// ( segment )*
loop2:
do {
int alt2 = 2;
int LA2_0 = input.LA( 1 );
if ( ( LA2_0 == TEXT_DATA ) ) {
alt2 = 1;
}
switch ( alt2 ) {
case 1:
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:83:4:
// segment
pushFollow( FOLLOW_segment_in_edifact76 );
segment();
state._fsp--;
break;
default:
break loop2;
}
} while ( true );
buf.append( TAG_EDIFACT_END );
retval.stop = input.LT( -1 );
} catch ( RecognitionException e ) {
// do not try to recover from parse errors, propagate the error instead
throw e;
}
return retval;
}
// $ANTLR end "edifact"
public static class una_return extends ParserRuleReturnScope {
public StringTemplate st;
public Object getTemplate() {
return st;
}
public String toString() {
return st == null ? null : st.toString();
}
}
;
// $ANTLR start "una"
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\FastSimpleGenericEdifactDirectXML.g:89:1:
// una : ( 'UNA:+.? \\'' | 'UNA:+,? \\'' );
public final una_return una() throws RecognitionException {
una_return retval =
new una_return();
retval.start = input.LT( 1 );
try {
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:89:7:
// ( 'UNA:+.? \\'' | 'UNA:+,? \\'' )
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\FastSimpleGenericEdifactDirectXML.g:
if ( ( input.LA( 1 ) >= 10 && input.LA( 1 ) <= 11 ) ) {
input.consume();
state.errorRecovery = false;
} else {
MismatchedSetException mse = new MismatchedSetException( null, input );
throw mse;
}
retval.stop = input.LT( -1 );
} catch ( RecognitionException e ) {
// do not try to recover from parse errors, propagate the error instead
throw e;
}
return retval;
}
// $ANTLR end "una"
public static class segment_return extends ParserRuleReturnScope {
public StringTemplate st;
public Object getTemplate() {
return st;
}
public String toString() {
return st == null ? null : st.toString();
}
}
;
// $ANTLR start "segment"
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\FastSimpleGenericEdifactDirectXML.g:91:1:
// segment : tag ( data_element )* SEGMENT_TERMINATOR ( ' ' | '\\n' | '\\r' | '\\t' )* ;
public final segment_return segment() throws RecognitionException {
segment_return retval =
new segment_return();
retval.start = input.LT( 1 );
tag_return tag1 = null;
try {
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:91:11:
// ( tag ( data_element )* SEGMENT_TERMINATOR ( ' ' | '\\n' | '\\r' | '\\t' )* )
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:91:13:
// tag ( data_element )* SEGMENT_TERMINATOR ( ' ' | '\\n' | '\\r' | '\\t' )*
pushFollow( FOLLOW_tag_in_segment107 );
tag1 = tag();
state._fsp--;
buf.append( "\t<" + ( tag1 != null ? tag1.name : null ) + ">\n" );
appendIndexes();
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:92:4:
// ( data_element )*
loop3:
do {
int alt3 = 2;
int LA3_0 = input.LA( 1 );
if ( ( LA3_0 == ELEMENT_SEPARATOR ) ) {
alt3 = 1;
}
switch ( alt3 ) {
case 1:
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:92:4:
// data_element
pushFollow( FOLLOW_data_element_in_segment114 );
data_element();
state._fsp--;
break;
default:
break loop3;
}
} while ( true );
match( input, SEGMENT_TERMINATOR, FOLLOW_SEGMENT_TERMINATOR_in_segment117 );
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:92:37:
// ( ' ' | '\\n' | '\\r' | '\\t' )*
loop4:
do {
int alt4 = 2;
int LA4_0 = input.LA( 1 );
if ( ( LA4_0 == 9 || ( LA4_0 >= 12 && LA4_0 <= 14 ) ) ) {
alt4 = 1;
}
switch ( alt4 ) {
case 1:
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:
if ( input.LA( 1 ) == 9 || ( input.LA( 1 ) >= 12 && input.LA( 1 ) <= 14 ) ) {
input.consume();
state.errorRecovery = false;
} else {
MismatchedSetException mse = new MismatchedSetException( null, input );
throw mse;
}
break;
default:
break loop4;
}
} while ( true );
buf.append( "\t</" + ( tag1 != null ? tag1.name : null ) + ">\n" );
retval.stop = input.LT( -1 );
} catch ( RecognitionException e ) {
// do not try to recover from parse errors, propagate the error instead
throw e;
}
return retval;
}
// $ANTLR end "segment"
public static class data_element_return extends ParserRuleReturnScope {
public StringTemplate st;
public Object getTemplate() {
return st;
}
public String toString() {
return st == null ? null : st.toString();
}
}
;
// $ANTLR start "data_element"
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\FastSimpleGenericEdifactDirectXML.g:96:1:
// data_element : ss data_element_payload ;
public final data_element_return data_element() throws RecognitionException {
data_element_return retval =
new data_element_return();
retval.start = input.LT( 1 );
try {
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:96:15:
// ( ss data_element_payload )
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:96:17:
// ss data_element_payload
pushFollow( FOLLOW_ss_in_data_element143 );
ss();
state._fsp--;
pushFollow( FOLLOW_data_element_payload_in_data_element145 );
data_element_payload();
state._fsp--;
retval.stop = input.LT( -1 );
} catch ( RecognitionException e ) {
// do not try to recover from parse errors, propagate the error instead
throw e;
}
return retval;
}
// $ANTLR end "data_element"
public static class data_element_payload_return extends ParserRuleReturnScope {
public StringTemplate st;
public Object getTemplate() {
return st;
}
public String toString() {
return st == null ? null : st.toString();
}
}
;
// $ANTLR start "data_element_payload"
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\FastSimpleGenericEdifactDirectXML.g:98:1:
// data_element_payload : ( composite_data_item ds )* composite_data_item ;
public final data_element_payload_return data_element_payload() throws RecognitionException {
data_element_payload_return retval =
new data_element_payload_return();
retval.start = input.LT( 1 );
try {
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:98:22:
// ( ( composite_data_item ds )* composite_data_item )
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:98:24:
// ( composite_data_item ds )* composite_data_item
buf.append( TAG_ELEMENT );
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:99:4:
// ( composite_data_item ds )*
loop5:
do {
int alt5 = 2;
int LA5_0 = input.LA( 1 );
if ( ( LA5_0 == TEXT_DATA ) ) {
int LA5_1 = input.LA( 2 );
if ( ( LA5_1 == COMPLEX_ELEMENT_ITEM_SEPARATOR ) ) {
alt5 = 1;
}
} else if ( ( LA5_0 == COMPLEX_ELEMENT_ITEM_SEPARATOR ) ) {
alt5 = 1;
}
switch ( alt5 ) {
case 1:
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:99:5:
// composite_data_item ds
pushFollow( FOLLOWComposite_data_item_in_data_element_payload160 );
composite_data_item();
state._fsp--;
pushFollow( FOLLOW_ds_in_data_element_payload162 );
ds();
state._fsp--;
break;
default:
break loop5;
}
} while ( true );
pushFollow( FOLLOWComposite_data_item_in_data_element_payload166 );
composite_data_item();
state._fsp--;
buf.append( TAG_ELEMENT_END );
retval.stop = input.LT( -1 );
} catch ( RecognitionException e ) {
// do not try to recover from parse errors, propagate the error instead
throw e;
}
return retval;
}
// $ANTLR end "data_element_payload"
public static class composite_data_item_return extends ParserRuleReturnScope {
public StringTemplate st;
public Object getTemplate() {
return st;
}
public String toString() {
return st == null ? null : st.toString();
}
}
;
// $ANTLR start "composite_data_item"
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\FastSimpleGenericEdifactDirectXML.g:102:1:
// composite_data_item : composite_data_item_val ;
public final composite_data_item_return composite_data_item() throws RecognitionException {
composite_data_item_return retval =
new composite_data_item_return();
retval.start = input.LT( 1 );
composite_data_item_val_return composite_data_item_val2 = null;
try {
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:102:21:
// ( composite_data_item_val )
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:102:23:
// composite_data_item_val
pushFollow( FOLLOWComposite_data_item_val_inComposite_data_item180 );
composite_data_item_val2 = composite_data_item_val();
state._fsp--;
buf.append( TAG_VALUE );
buf.append( sanitizeText( ( composite_data_item_val2 != null ? input.toString(
composite_data_item_val2.start, composite_data_item_val2.stop ) : null ) ) );
buf.append( TAG_VALUE_END );
retval.stop = input.LT( -1 );
} catch ( RecognitionException e ) {
// do not try to recover from parse errors, propagate the error instead
throw e;
}
return retval;
}
// $ANTLR end "composite_data_item"
public static class composite_data_item_val_return extends ParserRuleReturnScope {
public StringTemplate st;
public Object getTemplate() {
return st;
}
public String toString() {
return st == null ? null : st.toString();
}
}
;
// $ANTLR start "composite_data_item_val"
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\FastSimpleGenericEdifactDirectXML.g:105:1:
// composite_data_item_val : ( txt |);
public final composite_data_item_val_return composite_data_item_val() throws RecognitionException {
composite_data_item_val_return retval =
new composite_data_item_val_return();
retval.start = input.LT( 1 );
try {
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:105:25:
// ( txt |)
int alt6 = 2;
int LA6_0 = input.LA( 1 );
if ( ( LA6_0 == TEXT_DATA ) ) {
alt6 = 1;
} else if ( ( ( LA6_0 >= COMPLEX_ELEMENT_ITEM_SEPARATOR && LA6_0 <= ELEMENT_SEPARATOR )
|| LA6_0 == SEGMENT_TERMINATOR ) ) {
alt6 = 2;
} else {
NoViableAltException nvae = new NoViableAltException( "", 6, 0, input );
throw nvae;
}
switch ( alt6 ) {
case 1:
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:105:27:
// txt
pushFollow( FOLLOW_txt_inComposite_data_item_val193 );
txt();
state._fsp--;
break;
case 2:
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:105:31:
break;
}
retval.stop = input.LT( -1 );
} catch ( RecognitionException e ) {
// do not try to recover from parse errors, propagate the error instead
throw e;
}
return retval;
}
// $ANTLR end "composite_data_item_val"
public static class tag_return extends ParserRuleReturnScope {
public String name;
public List<Object> indexes;
public StringTemplate st;
public Object getTemplate() {
return st;
}
public String toString() {
return st == null ? null : st.toString();
}
}
;
// $ANTLR start "tag"
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\FastSimpleGenericEdifactDirectXML.g:108:1:
// tag returns [String name, List indexes] : tag_name ( ds i+= tag_index_id )* ;
public final tag_return tag() throws RecognitionException {
tag_return retval =
new tag_return();
retval.start = input.LT( 1 );
List<Object> list_i = null;
tagName_return tagName3 = null;
RuleReturnScope i = null;
try {
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:108:41:
// ( tag_name ( ds i+= tag_index_id )* )
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:108:43:
// tag_name ( ds i+= tag_index_id )*
pushFollow( FOLLOW_tagName_in_tag208 );
tagName3 = tagName();
state._fsp--;
tagIndexes.clear();
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:108:74:
// ( ds i+= tag_index_id )*
loop7:
do {
int alt7 = 2;
int LA7_0 = input.LA( 1 );
if ( ( LA7_0 == COMPLEX_ELEMENT_ITEM_SEPARATOR ) ) {
alt7 = 1;
}
switch ( alt7 ) {
case 1:
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:108:75:
// ds i+= tag_index_id
pushFollow( FOLLOW_ds_in_tag213 );
ds();
state._fsp--;
pushFollow( FOLLOW_tagIndex_id_in_tag217 );
i = tagIndex_id();
state._fsp--;
if ( list_i == null ) {
list_i = new ArrayList<Object>();
}
list_i.add( i.getTemplate() );
break;
default:
break loop7;
}
} while ( true );
retval.name = ( tagName3 != null ? input.toString( tagName3.start, tagName3.stop ) : null ).trim();
retval.stop = input.LT( -1 );
} catch ( RecognitionException e ) {
// do not try to recover from parse errors, propagate the error instead
throw e;
}
return retval;
}
// $ANTLR end "tag"
public static class tagName_return extends ParserRuleReturnScope {
public StringTemplate st;
public Object getTemplate() {
return st;
}
public String toString() {
return st == null ? null : st.toString();
}
}
;
// $ANTLR start "tag_name"
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\FastSimpleGenericEdifactDirectXML.g:112:1:
// tag_name : txt ;
public final tagName_return tagName() throws RecognitionException {
tagName_return retval =
new tagName_return();
retval.start = input.LT( 1 );
try {
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:112:11:
// ( txt )
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:112:13:
// txt
pushFollow( FOLLOW_txt_in_tagName239 );
txt();
state._fsp--;
retval.stop = input.LT( -1 );
} catch ( RecognitionException e ) {
// do not try to recover from parse errors, propagate the error instead
throw e;
}
return retval;
}
// $ANTLR end "tag_name"
public static class tagIndex_id_return extends ParserRuleReturnScope {
public StringTemplate st;
public Object getTemplate() {
return st;
}
public String toString() {
return st == null ? null : st.toString();
}
}
;
// $ANTLR start "tag_index_id"
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\FastSimpleGenericEdifactDirectXML.g:115:1:
// tag_index_id : tag_index_id_val ;
public final tagIndex_id_return tagIndex_id() throws RecognitionException {
tagIndex_id_return retval =
new tagIndex_id_return();
retval.start = input.LT( 1 );
tagIndex_id_val_return tagIndex_id_val4 = null;
try {
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:115:15:
// ( tag_index_id_val )
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:115:17:
// tag_index_id_val
pushFollow( FOLLOW_tagIndex_id_val_in_tagIndex_id249 );
tagIndex_id_val4 = tagIndex_id_val();
state._fsp--;
tagIndexes.add( ( tagIndex_id_val4 != null ? input.toString(
tagIndex_id_val4.start, tagIndex_id_val4.stop ) : null ) );
retval.stop = input.LT( -1 );
} catch ( RecognitionException e ) {
// do not try to recover from parse errors, propagate the error instead
throw e;
}
return retval;
}
// $ANTLR end "tag_index_id"
public static class tagIndex_id_val_return extends ParserRuleReturnScope {
public StringTemplate st;
public Object getTemplate() {
return st;
}
public String toString() {
return st == null ? null : st.toString();
}
}
;
// $ANTLR start "tag_index_id_val"
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\FastSimpleGenericEdifactDirectXML.g:116:1:
// tag_index_id_val : ( txt |);
public final tagIndex_id_val_return tagIndex_id_val() throws RecognitionException {
tagIndex_id_val_return retval =
new tagIndex_id_val_return();
retval.start = input.LT( 1 );
try {
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:116:18:
// ( txt |)
int alt8 = 2;
int LA8_0 = input.LA( 1 );
if ( ( LA8_0 == TEXT_DATA ) ) {
alt8 = 1;
} else if ( ( ( LA8_0 >= COMPLEX_ELEMENT_ITEM_SEPARATOR && LA8_0 <= ELEMENT_SEPARATOR )
|| LA8_0 == SEGMENT_TERMINATOR ) ) {
alt8 = 2;
} else {
NoViableAltException nvae = new NoViableAltException( "", 8, 0, input );
throw nvae;
}
switch ( alt8 ) {
case 1:
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:116:20:
// txt
pushFollow( FOLLOW_txt_in_tagIndex_id_val258 );
txt();
state._fsp--;
break;
case 2:
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:116:24:
break;
}
retval.stop = input.LT( -1 );
} catch ( RecognitionException e ) {
// do not try to recover from parse errors, propagate the error instead
throw e;
}
return retval;
}
// $ANTLR end "tag_index_id_val"
public static class ds_return extends ParserRuleReturnScope {
public StringTemplate st;
public Object getTemplate() {
return st;
}
public String toString() {
return st == null ? null : st.toString();
}
}
;
// $ANTLR start "ds"
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\FastSimpleGenericEdifactDirectXML.g:119:1:
// ds : COMPLEX_ELEMENT_ITEM_SEPARATOR ;
public final ds_return ds() throws RecognitionException {
ds_return retval =
new ds_return();
retval.start = input.LT( 1 );
try {
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:119:6:
// ( COMPLEX_ELEMENT_ITEM_SEPARATOR )
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:119:8:
// COMPLEX_ELEMENT_ITEM_SEPARATOR
match( input, COMPLEX_ELEMENT_ITEM_SEPARATOR, FOLLOW_COMPLEX_ELEMENT_ITEM_SEPARATOR_in_ds271 );
retval.stop = input.LT( -1 );
} catch ( RecognitionException e ) {
// do not try to recover from parse errors, propagate the error instead
throw e;
}
return retval;
}
// $ANTLR end "ds"
public static class ss_return extends ParserRuleReturnScope {
public StringTemplate st;
public Object getTemplate() {
return st;
}
public String toString() {
return st == null ? null : st.toString();
}
}
;
// $ANTLR start "ss"
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\FastSimpleGenericEdifactDirectXML.g:120:1:
// ss : ELEMENT_SEPARATOR ;
public final ss_return ss() throws RecognitionException {
ss_return retval =
new ss_return();
retval.start = input.LT( 1 );
try {
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:120:6:
// ( ELEMENT_SEPARATOR )
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:120:8:
// ELEMENT_SEPARATOR
match( input, ELEMENT_SEPARATOR, FOLLOW_ELEMENT_SEPARATOR_in_ss280 );
retval.stop = input.LT( -1 );
} catch ( RecognitionException e ) {
// do not try to recover from parse errors, propagate the error instead
throw e;
}
return retval;
}
// $ANTLR end "ss"
public static class txt_return extends ParserRuleReturnScope {
public StringTemplate st;
public Object getTemplate() {
return st;
}
public String toString() {
return st == null ? null : st.toString();
}
}
;
// $ANTLR start "txt"
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\FastSimpleGenericEdifactDirectXML.g:121:1:
// txt : TEXT_DATA ;
public final txt_return txt() throws RecognitionException {
txt_return retval =
new txt_return();
retval.start = input.LT( 1 );
try {
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:121:7:
// ( TEXT_DATA )
// C:\\workspace-sts\\Hop trunk -
// restruct\\engine\\src\\org\\project-hop\\di\\pipeline\\transforms\\edi2xml\\grammar\\
// FastSimpleGenericEdifactDirectXML.g:121:9:
// TEXT_DATA
match( input, TEXT_DATA, FOLLOW_TEXT_DATA_in_txt289 );
retval.stop = input.LT( -1 );
} catch ( RecognitionException e ) {
// do not try to recover from parse errors, propagate the error instead
throw e;
}
return retval;
}
// $ANTLR end "txt"
// Delegated rules
public static final BitSet FOLLOW_una_in_edifact64 = new BitSet( new long[] { 0x0000000000000102L } );
public static final BitSet FOLLOW_segment_in_edifact76 = new BitSet( new long[] { 0x0000000000000102L } );
public static final BitSet FOLLOW_tag_in_segment107 = new BitSet( new long[] { 0x00000000000000A0L } );
public static final BitSet FOLLOW_data_element_in_segment114 = new BitSet( new long[] { 0x00000000000000A0L } );
public static final BitSet FOLLOW_SEGMENT_TERMINATOR_in_segment117 = new BitSet(
new long[] { 0x0000000000007202L } );
public static final BitSet FOLLOW_ss_in_data_element143 = new BitSet( new long[] { 0x0000000000000100L } );
public static final BitSet FOLLOW_data_element_payload_in_data_element145 = new BitSet(
new long[] { 0x0000000000000002L } );
public static final BitSet FOLLOWComposite_data_item_in_data_element_payload160 = new BitSet(
new long[] { 0x0000000000000010L } );
public static final BitSet FOLLOW_ds_in_data_element_payload162 =
new BitSet( new long[] { 0x0000000000000100L } );
public static final BitSet FOLLOWComposite_data_item_in_data_element_payload166 = new BitSet(
new long[] { 0x0000000000000002L } );
public static final BitSet FOLLOWComposite_data_item_val_inComposite_data_item180 = new BitSet(
new long[] { 0x0000000000000002L } );
public static final BitSet FOLLOW_txt_inComposite_data_item_val193 = new BitSet(
new long[] { 0x0000000000000002L } );
public static final BitSet FOLLOW_tagName_in_tag208 = new BitSet( new long[] { 0x0000000000000012L } );
public static final BitSet FOLLOW_ds_in_tag213 = new BitSet( new long[] { 0x0000000000000100L } );
public static final BitSet FOLLOW_tagIndex_id_in_tag217 = new BitSet( new long[] { 0x0000000000000012L } );
public static final BitSet FOLLOW_txt_in_tagName239 = new BitSet( new long[] { 0x0000000000000002L } );
public static final BitSet FOLLOW_tagIndex_id_val_in_tagIndex_id249 = new BitSet(
new long[] { 0x0000000000000002L } );
public static final BitSet FOLLOW_txt_in_tagIndex_id_val258 = new BitSet( new long[] { 0x0000000000000002L } );
public static final BitSet FOLLOW_COMPLEX_ELEMENT_ITEM_SEPARATOR_in_ds271 = new BitSet(
new long[] { 0x0000000000000002L } );
public static final BitSet FOLLOW_ELEMENT_SEPARATOR_in_ss280 = new BitSet( new long[] { 0x0000000000000002L } );
public static final BitSet FOLLOW_TEXT_DATA_in_txt289 = new BitSet( new long[] { 0x0000000000000002L } );
}
|
Every share makes Black Voice louder! Share To Share To
"I wish I could have saved more, to be honest," said a brave ex-marine who had saved 70 people during Orlando nightclub shooting.
Many witnesses of Orlando shooting said that they mistook the gunfire for the part of the show. Luckily, Imran Yousuf, a 24-year-old Hindu who left the Marine Corps just two months prior to the shooting, was working as a bouncer at the Pulse nightclub on Saturday night on June 11. The brave young man recognized the gunfire right away.
“The initial one was three or four (shots),” he said. “That was a shock. Three of four shots go off and you could tell it was a high caliber. Everyone froze. I’m here in the back and I saw people start pouring into the back hallway, and they just sardine pack everyone.”
Yousuf was at the back of the club when the gunman Omar Mateen walked in and started shooting at people. About 70 people panicked and got trapped in the corridor where Yousuf was. They all were so shocked that could do nothing but wait for the deadly shooter to come to them. No one thought to try a locked door leading out the pack of the club onto the street
“There was only one choice. Either we all stay there and we all die, or I could take the chance, and I jumped over to open that latch and we got everyone that we can out of there,” the brave young man said. “As soon as people found that door was open they kept pouring out and after that, we just ran.”
Yousuf stayed after he saved all those people to help carry the wounded to ambulances. Thanks to his training in the Marine Corps, Imran Yousuf was prepared and used strategies from that to pull off this feat and save dozens of lives. Yet, American mainstream media has not covered this act of courage at all. The person of color interests no one.
It is sad that like every other person of color Mr. Yousuf never got the attention he deserved. I am sure, if Imran was white he would be praised and glorified. This country is so white and only Whites have the attention of public here.
Share this article and help to spread the word about the feat of a person of color. |
/*
* Copyright (C) 2019 Toshiba Corporation
* SPDX-License-Identifier: Apache-2.0
*/
import { Rectangle, Transform } from "pixi.js";
import { DBasePoint } from "./d-base-point";
import { DViewTarget } from "./d-view-to-target";
import { EShapeContainer } from "./shape/e-shape-container";
export class DChartPlotAreaContainer extends EShapeContainer implements DViewTarget {
protected _position: DBasePoint;
protected _scale: DBasePoint;
protected _workRect: Rectangle;
constructor(onChange: () => void) {
super();
const transform: Transform = this.transform;
this._position = new DBasePoint(transform.position, onChange);
this._scale = new DBasePoint(transform.scale, onChange);
this._workRect = new Rectangle();
}
// @ts-ignore
get position(): DBasePoint {
return this._position;
}
// @ts-ignore
get scale(): DBasePoint {
return this._scale;
}
getBounds(skipUpdate?: boolean, rect?: Rectangle): Rectangle {
(this as any)._lastBoundsID = -1;
return super.getBounds(skipUpdate, rect);
}
calculateBounds(): void {
const bounds = this._bounds;
const work = this._work;
const rect = this._workRect;
const worldTransform = this.transform.worldTransform;
let xmin = 0;
let xmax = 0;
let ymin = 0;
let ymax = 0;
const children = this.children;
if (0 < children.length) {
let isFirst = true;
for (let i = 0, imax = children.length; i < imax; ++i) {
const child = children[i];
if (child.visible) {
child.getBoundsInternal(work, false, rect);
if (isFirst) {
isFirst = false;
xmin = rect.x;
ymin = rect.y;
xmax = rect.x + rect.width;
ymax = rect.y + rect.height;
} else {
xmin = Math.min(xmin, rect.x);
ymin = Math.min(ymin, rect.y);
xmax = Math.max(xmax, rect.x + rect.width);
ymax = Math.max(ymax, rect.y + rect.height);
}
}
}
}
bounds.clear();
work.set(xmin, ymin);
worldTransform.apply(work, work);
bounds.addPoint(work);
work.set(xmax, ymax);
worldTransform.apply(work, work);
bounds.addPoint(work);
}
}
|
<filename>CCUE4/Source/CCUE4/core/ChessConstants.cpp
#include "ChessConstants.h"
namespace cc {
static FIntPoint IndexToPositionMap[100] = {
{-8, -9}, {-6, -9}, {-4, -9}, {-2, -9}, { 0, -9}, { 2, -9}, { 4, -9}, { 6, -9}, { 8, -9}, { 0, 0},
{-8, -7}, {-6, -7}, {-4, -7}, {-2, -7}, { 0, -7}, { 2, -7}, { 4, -7}, { 6, -7}, { 8, -7}, { 0, 0},
{-8, -5}, {-6, -5}, {-4, -5}, {-2, -5}, { 0, -5}, { 2, -5}, { 4, -5}, { 6, -5}, { 8, -5}, { 0, 0},
{-8, -3}, {-6, -3}, {-4, -3}, {-2, -3}, { 0, -3}, { 2, -3}, { 4, -3}, { 6, -3}, { 8, -3}, { 0, 0},
{-8, -1}, {-6, -1}, {-4, -1}, {-2, -1}, { 0, -1}, { 2, -1}, { 4, -1}, { 6, -1}, { 8, -1}, { 0, 0},
{-8, 1}, {-6, 1}, {-4, 1}, {-2, 1}, { 0, 1}, { 2, 1}, { 4, 1}, { 6, 1}, { 8, 1}, { 0, 0},
{-8, 3}, {-6, 3}, {-4, 3}, {-2, 3}, { 0, 3}, { 2, 3}, { 4, 3}, { 6, 3}, { 8, 3}, { 0, 0},
{-8, 5}, {-6, 5}, {-4, 5}, {-2, 5}, { 0, 5}, { 2, 5}, { 4, 5}, { 6, 5}, { 8, 5}, { 0, 0},
{-8, 7}, {-6, 7}, {-4, 7}, {-2, 7}, { 0, 7}, { 2, 7}, { 4, 7}, { 6, 7}, { 8, 7}, { 0, 0},
{-8, 9}, {-6, 9}, {-4, 9}, {-2, 9}, { 0, 9}, { 2, 9}, { 4, 9}, { 6, 9}, { 8, 9}, { 0, 0},
};
static const int32 InvalidIdx = 99;
const float ChessConstants::PosScale = 24.0f;
FIntPoint ChessConstants::PieceIndexToPosition(int32 index) {
if (index < 0 || index >= 100) {
return IndexToPositionMap[InvalidIdx];
}
return IndexToPositionMap[index];
}
int32 ChessConstants::PiecePositionToIndex(const int32 x, const int32 y) {
if (x < -9 || x > 9 || y < -10 || y > 10) {
return InvalidIdx;
}
if ((x + 10) % 2 == 1 || (y + 20) % 2 == 0) {
return InvalidIdx;
}
return ((y - (-9)) / 2 * 10) + (x - (-8)) / 2;
}
const TArray<FIntPoint>& ChessConstants::PieceInitPositions() {
static TArray<FIntPoint> ret;
if (ret.Num() > 0) {
return ret;
}
// 0 bj
ret.Add(IndexToPositionMap[4]);
// 1 bs1
ret.Add(IndexToPositionMap[3]);
// 2 bs2
ret.Add(IndexToPositionMap[5]);
// 3 bx1
ret.Add(IndexToPositionMap[2]);
// 4 bx2
ret.Add(IndexToPositionMap[6]);
// 5 bm1
ret.Add(IndexToPositionMap[1]);
// 6 bm2
ret.Add(IndexToPositionMap[7]);
// 7 bc1
ret.Add(IndexToPositionMap[0]);
// 8 bc2
ret.Add(IndexToPositionMap[8]);
// 9 bp1
ret.Add(IndexToPositionMap[21]);
// 10 bp2
ret.Add(IndexToPositionMap[27]);
// 11 bb1
ret.Add(IndexToPositionMap[30]);
// 12 bb2
ret.Add(IndexToPositionMap[32]);
// 13 bb3
ret.Add(IndexToPositionMap[34]);
// 14 bb4
ret.Add(IndexToPositionMap[36]);
// 15 bb5
ret.Add(IndexToPositionMap[38]);
// 16 rj
ret.Add(IndexToPositionMap[94]);
// 17 rs1
ret.Add(IndexToPositionMap[93]);
// 18 rs2
ret.Add(IndexToPositionMap[95]);
// 19 rx1
ret.Add(IndexToPositionMap[92]);
// 20 rx2
ret.Add(IndexToPositionMap[96]);
// 21 rm1
ret.Add(IndexToPositionMap[91]);
// 22 rm2
ret.Add(IndexToPositionMap[97]);
// 23 rc1
ret.Add(IndexToPositionMap[90]);
// 24 rc2
ret.Add(IndexToPositionMap[98]);
// 25 rp1
ret.Add(IndexToPositionMap[71]);
// 26 rp2
ret.Add(IndexToPositionMap[77]);
// 27 rb1
ret.Add(IndexToPositionMap[60]);
// 28 rb2
ret.Add(IndexToPositionMap[62]);
// 29 rb3
ret.Add(IndexToPositionMap[64]);
// 30 rb4
ret.Add(IndexToPositionMap[66]);
// 31 rb5
ret.Add(IndexToPositionMap[68]);
return ret;
}
float ChessConstants::GetVectorLength2D(FVector vec) {
return sqrt(vec.X * vec.X + vec.Y * vec.Y);
}
int32 ChessConstants::IndexFromVectorPosition(const FVector target, const float scale) {
const float fx = target.X / scale;
const float fy = target.Y / scale;
int ix = roundf(fx);
int iy = roundf(fy);
// UE_LOG(LogTemp, Log, TEXT("OnBoardOver %d %d"), ix, iy);
if (ix < -9 || ix > 9 || iy < -10 || iy > 10) {
return InvalidIdx;
}
if (ix == -9) {
ix = -8;
} else if (ix == 9) {
ix = 8;
} else if ((ix + 20) % 2 == 1) {
ix = (fx > ix) ? (ix + 1) : (ix - 1);
}
if (iy == -10) {
iy = -9;
} else if (iy == 10) {
iy = 9;
} else if ((iy + 20) % 2 == 0) {
iy = (fy > iy) ? (iy + 1) : (iy - 1);
}
return PiecePositionToIndex(ix, iy);
}
int32 ChessConstants::Absi32(int32 val) {
if (val < 0) {
return -val;
}
return val;
}
}
|
def image_to_plasma_png(fname: str, img: np.array) -> None:
plt.imsave(fname + '.png', img, cmap='plasma') |
def testFrozenParameters(self):
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.fc1 = nn.Linear(30, 50)
self.fc2 = nn.Linear(50, 1)
for param in self.fc1.parameters():
param.requires_grad = False
def forward(self, x):
hidden = self.fc1(x)
logits = self.fc2(hidden).squeeze(1)
return logits
inputs = torch.randn(16, 30)
ones = torch.ones(8)
zeros = torch.zeros(8)
labels = torch.cat([ones, zeros], dim=0)
net = Net()
param = filter(lambda x: x.requires_grad, net.parameters())
inner_opt = torch.optim.SGD(param, lr=1e-1)
loss_func = nn.BCEWithLogitsLoss()
with higher.innerloop_ctx(net, inner_opt) as (fnet, diffopt):
logits = fnet(inputs)
loss = loss_func(logits, labels)
diffopt.step(loss)
zipped = list(zip(net.parameters(), fnet.parameters()))
self.assertTrue(torch.equal(*zipped[0]))
self.assertTrue(torch.equal(*zipped[1]))
self.assertFalse(torch.equal(*zipped[2]))
self.assertFalse(torch.equal(*zipped[3])) |
In one of the most "interesting" moves I've seen in the mobile market, MSI has equipped their GX60 gaming notebook with an HD 7970M...paired with an AMD A10-4600M APU. Curious to see how the combination would stack up against the Intel i7-3720QM + HD 7970M combination used in AVADirect's Clevo P170EM, I ran some quick benchmarks and put together a video of Batman: Arkham City showing the systems running side by side. First, here's the video link:
Not surprisingly, the Ivy Bridge solution walks away from the Trinity laptop when we turn down the details a bit, but at maximum quality the two solutions appear relatively close. The issue is that while average frame rates may be close in some cases, minimum frame rates often tell the real story. There are points in the above video where Trinity falls to sub-30 FPS for a second or two at a time, which can be very noticeable in gameplay.
Anyway, I'm curious: are you interested in more videos like this? It takes a lot more time than a simple reporting of a benchmark number, but if there's enough demand for it I'll be happy to oblige. I should also note that there are some titles where the Trinity and Ivy Bridge notebooks are fairly close in performance (at maximum detail at least), while other titles are even more in favor of a faster CPU (e.g. Skyrim). Regardless, the full review of the MSI GX60 will be forthcoming.
Pricing for the GX60 is the one area where MSI looks to beat Intel HD 7970M offerings. The base model comes with a 750GB hard drive, 8GB RAM, A10-4600M, and of course the HD 7970M. Right now (if it's in stock), you can get that configuration for around $1200. Our particular unit takes yet another odd approach by including a 128GB RAID 0 SSD set for the OS and apps, which might sound appealing until you realize they're using SanDisk U100 drives (not exactly the fastest SSDs around); we're not sure what pricing is for this particular configuration. AVADirect's P170EM by contrast is priced at $1519, with a $100 off coupon available at the time of writing. That will get you an i7-3630QM and the 7970M, so for about $150 to $200 extra, for gaming purposes we recommend most users go that route. |
// CreateInterconnectRequest returns a request value for making API operation for
// AWS Direct Connect.
//
// Creates an interconnect between an AWS Direct Connect Partner's network and
// a specific AWS Direct Connect location.
//
// An interconnect is a connection that is capable of hosting other connections.
// The AWS Direct Connect partner can use an interconnect to provide AWS Direct
// Connect hosted connections to customers through their own network services.
// Like a standard connection, an interconnect links the partner's network to
// an AWS Direct Connect location over a standard Ethernet fiber-optic cable.
// One end is connected to the partner's router, the other to an AWS Direct
// Connect router.
//
// You can automatically add the new interconnect to a link aggregation group
// (LAG) by specifying a LAG ID in the request. This ensures that the new interconnect
// is allocated on the same AWS Direct Connect endpoint that hosts the specified
// LAG. If there are no available ports on the endpoint, the request fails and
// no interconnect is created.
//
// For each end customer, the AWS Direct Connect Partner provisions a connection
// on their interconnect by calling AllocateHostedConnection. The end customer
// can then connect to AWS resources by creating a virtual interface on their
// connection, using the VLAN assigned to them by the AWS Direct Connect Partner.
//
// Intended for use by AWS Direct Connect Partners only.
//
// // Example sending a request using CreateInterconnectRequest.
// req := client.CreateInterconnectRequest(params)
// resp, err := req.Send(context.TODO())
// if err == nil {
// fmt.Println(resp)
// }
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/directconnect-2012-10-25/CreateInterconnect
func (c *Client) CreateInterconnectRequest(input *CreateInterconnectInput) CreateInterconnectRequest {
op := &aws.Operation{
Name: opCreateInterconnect,
HTTPMethod: "POST",
HTTPPath: "/",
}
if input == nil {
input = &CreateInterconnectInput{}
}
req := c.newRequest(op, input, &CreateInterconnectOutput{})
return CreateInterconnectRequest{Request: req, Input: input, Copy: c.CreateInterconnectRequest}
} |
<filename>android-31/src/com/android/server/hdmi/HdmiCecConfig.java
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.server.hdmi;
import static android.hardware.hdmi.HdmiControlManager.CecSettingName;
import android.annotation.IntDef;
import android.annotation.NonNull;
import android.annotation.Nullable;
import android.annotation.StringDef;
import android.content.ContentResolver;
import android.content.Context;
import android.content.SharedPreferences;
import android.database.ContentObserver;
import android.hardware.hdmi.HdmiControlManager;
import android.net.Uri;
import android.os.Environment;
import android.os.Handler;
import android.os.Looper;
import android.os.SystemProperties;
import android.os.UserHandle;
import android.provider.Settings.Global;
import android.util.ArrayMap;
import com.android.internal.R;
import com.android.internal.annotations.GuardedBy;
import com.android.internal.annotations.VisibleForTesting;
import com.android.internal.util.ConcurrentUtils;
import java.io.File;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map.Entry;
import java.util.concurrent.Executor;
/**
* The {@link HdmiCecConfig} class is used for getting information about
* available HDMI CEC settings.
*/
public class HdmiCecConfig {
private static final String TAG = "HdmiCecConfig";
private static final String ETC_DIR = "etc";
private static final String CONFIG_FILE = "cec_config.xml";
private static final String SHARED_PREFS_DIR = "shared_prefs";
private static final String SHARED_PREFS_NAME = "cec_config.xml";
private static final int STORAGE_SYSPROPS = 0;
private static final int STORAGE_GLOBAL_SETTINGS = 1;
private static final int STORAGE_SHARED_PREFS = 2;
@IntDef({
STORAGE_SYSPROPS,
STORAGE_GLOBAL_SETTINGS,
STORAGE_SHARED_PREFS,
})
private @interface Storage {}
private static final String VALUE_TYPE_STRING = "string";
private static final String VALUE_TYPE_INT = "int";
@StringDef({
VALUE_TYPE_STRING,
VALUE_TYPE_INT,
})
private @interface ValueType {}
@NonNull private final Context mContext;
@NonNull private final StorageAdapter mStorageAdapter;
private final Object mLock = new Object();
@GuardedBy("mLock")
private final ArrayMap<Setting, ArrayMap<SettingChangeListener, Executor>>
mSettingChangeListeners = new ArrayMap<>();
private SettingsObserver mSettingsObserver;
private LinkedHashMap<String, Setting> mSettings = new LinkedHashMap<>();
/**
* Exception thrown when the CEC Configuration setup verification fails.
* This usually means a settings lacks default value or storage/storage key.
*/
public static class VerificationException extends RuntimeException {
public VerificationException(String message) {
super(message);
}
}
/**
* Listener used to get notifications when value of a setting changes.
*/
public interface SettingChangeListener {
/**
* Called when value of a setting changes.
*
* @param setting name of a CEC setting that changed
*/
void onChange(@NonNull @CecSettingName String setting);
}
/**
* Setting storage input/output helper class.
*/
public static class StorageAdapter {
@NonNull private final Context mContext;
@NonNull private final SharedPreferences mSharedPrefs;
StorageAdapter(@NonNull Context context) {
mContext = context;
// The package info in the context isn't initialized in the way it is for normal apps,
// so the standard, name-based context.getSharedPreferences doesn't work. Instead, we
// build the path manually below using the same policy that appears in ContextImpl.
final Context deviceContext = mContext.createDeviceProtectedStorageContext();
final File prefsFile = new File(new File(Environment.getDataSystemDirectory(),
SHARED_PREFS_DIR), SHARED_PREFS_NAME);
mSharedPrefs = deviceContext.getSharedPreferences(prefsFile, Context.MODE_PRIVATE);
}
/**
* Read the value from a system property.
* Returns the given default value if the system property is not set.
*/
public String retrieveSystemProperty(@NonNull String storageKey,
@NonNull String defaultValue) {
return SystemProperties.get(storageKey, defaultValue);
}
/**
* Write the value to a system property.
*/
public void storeSystemProperty(@NonNull String storageKey,
@NonNull String value) {
SystemProperties.set(storageKey, value);
}
/**
* Read the value from a global setting.
* Returns the given default value if the system property is not set.
*/
public String retrieveGlobalSetting(@NonNull String storageKey,
@NonNull String defaultValue) {
String value = Global.getString(mContext.getContentResolver(), storageKey);
return value != null ? value : defaultValue;
}
/**
* Write the value to a global setting.
*/
public void storeGlobalSetting(@NonNull String storageKey,
@NonNull String value) {
Global.putString(mContext.getContentResolver(), storageKey, value);
}
/**
* Read the value from a shared preference.
* Returns the given default value if the preference is not set.
*/
public String retrieveSharedPref(@NonNull String storageKey,
@NonNull String defaultValue) {
return mSharedPrefs.getString(storageKey, defaultValue);
}
/**
* Write the value to a shared preference.
*/
public void storeSharedPref(@NonNull String storageKey,
@NonNull String value) {
mSharedPrefs.edit().putString(storageKey, value).apply();
}
}
private class SettingsObserver extends ContentObserver {
SettingsObserver(Handler handler) {
super(handler);
}
@Override
public void onChange(boolean selfChange, Uri uri) {
String setting = uri.getLastPathSegment();
HdmiCecConfig.this.notifyGlobalSettingChanged(setting);
}
}
private class Value {
private final String mStringValue;
private final Integer mIntValue;
Value(@NonNull String value) {
mStringValue = value;
mIntValue = null;
}
Value(@NonNull Integer value) {
mStringValue = null;
mIntValue = value;
}
String getStringValue() {
return mStringValue;
}
Integer getIntValue() {
return mIntValue;
}
}
protected class Setting {
@NonNull private final Context mContext;
@NonNull private final @CecSettingName String mName;
private final boolean mUserConfigurable;
private Value mDefaultValue = null;
private List<Value> mAllowedValues = new ArrayList<>();
Setting(@NonNull Context context,
@NonNull @CecSettingName String name,
int userConfResId) {
mContext = context;
mName = name;
mUserConfigurable = mContext.getResources().getBoolean(userConfResId);
}
public @CecSettingName String getName() {
return mName;
}
public @ValueType String getValueType() {
return getDefaultValue().getStringValue() != null
? VALUE_TYPE_STRING
: VALUE_TYPE_INT;
}
public Value getDefaultValue() {
if (mDefaultValue == null) {
throw new VerificationException("Invalid CEC setup for '"
+ this.getName() + "' setting. "
+ "Setting has no default value.");
}
return mDefaultValue;
}
public boolean getUserConfigurable() {
return mUserConfigurable;
}
private void registerValue(@NonNull Value value,
int allowedResId, int defaultResId) {
if (mContext.getResources().getBoolean(allowedResId)) {
mAllowedValues.add(value);
if (mContext.getResources().getBoolean(defaultResId)) {
if (mDefaultValue != null) {
throw new VerificationException("Invalid CEC setup for '"
+ this.getName() + "' setting. "
+ "Setting already has a default value.");
}
mDefaultValue = value;
}
}
}
public void registerValue(@NonNull String value, int allowedResId,
int defaultResId) {
registerValue(new Value(value), allowedResId, defaultResId);
}
public void registerValue(int value, int allowedResId,
int defaultResId) {
registerValue(new Value(value), allowedResId, defaultResId);
}
public List<Value> getAllowedValues() {
return mAllowedValues;
}
}
@VisibleForTesting
HdmiCecConfig(@NonNull Context context,
@NonNull StorageAdapter storageAdapter) {
mContext = context;
mStorageAdapter = storageAdapter;
Setting hdmiCecEnabled = registerSetting(
HdmiControlManager.CEC_SETTING_NAME_HDMI_CEC_ENABLED,
R.bool.config_cecHdmiCecEnabled_userConfigurable);
hdmiCecEnabled.registerValue(HdmiControlManager.HDMI_CEC_CONTROL_ENABLED,
R.bool.config_cecHdmiCecControlEnabled_allowed,
R.bool.config_cecHdmiCecControlEnabled_default);
hdmiCecEnabled.registerValue(HdmiControlManager.HDMI_CEC_CONTROL_DISABLED,
R.bool.config_cecHdmiCecControlDisabled_allowed,
R.bool.config_cecHdmiCecControlDisabled_default);
Setting hdmiCecVersion = registerSetting(
HdmiControlManager.CEC_SETTING_NAME_HDMI_CEC_VERSION,
R.bool.config_cecHdmiCecVersion_userConfigurable);
hdmiCecVersion.registerValue(HdmiControlManager.HDMI_CEC_VERSION_1_4_B,
R.bool.config_cecHdmiCecVersion14b_allowed,
R.bool.config_cecHdmiCecVersion14b_default);
hdmiCecVersion.registerValue(HdmiControlManager.HDMI_CEC_VERSION_2_0,
R.bool.config_cecHdmiCecVersion20_allowed,
R.bool.config_cecHdmiCecVersion20_default);
Setting powerControlMode = registerSetting(
HdmiControlManager.CEC_SETTING_NAME_POWER_CONTROL_MODE,
R.bool.config_cecPowerControlMode_userConfigurable);
powerControlMode.registerValue(HdmiControlManager.POWER_CONTROL_MODE_TV,
R.bool.config_cecPowerControlModeTv_allowed,
R.bool.config_cecPowerControlModeTv_default);
powerControlMode.registerValue(HdmiControlManager.POWER_CONTROL_MODE_BROADCAST,
R.bool.config_cecPowerControlModeBroadcast_allowed,
R.bool.config_cecPowerControlModeBroadcast_default);
powerControlMode.registerValue(HdmiControlManager.POWER_CONTROL_MODE_NONE,
R.bool.config_cecPowerControlModeNone_allowed,
R.bool.config_cecPowerControlModeNone_default);
Setting powerStateChangeOnActiveSourceLost = registerSetting(
HdmiControlManager.CEC_SETTING_NAME_POWER_STATE_CHANGE_ON_ACTIVE_SOURCE_LOST,
R.bool.config_cecPowerStateChangeOnActiveSourceLost_userConfigurable);
powerStateChangeOnActiveSourceLost.registerValue(
HdmiControlManager.POWER_STATE_CHANGE_ON_ACTIVE_SOURCE_LOST_NONE,
R.bool.config_cecPowerStateChangeOnActiveSourceLostNone_allowed,
R.bool.config_cecPowerStateChangeOnActiveSourceLostNone_default);
powerStateChangeOnActiveSourceLost.registerValue(
HdmiControlManager.POWER_STATE_CHANGE_ON_ACTIVE_SOURCE_LOST_STANDBY_NOW,
R.bool.config_cecPowerStateChangeOnActiveSourceLostStandbyNow_allowed,
R.bool.config_cecPowerStateChangeOnActiveSourceLostStandbyNow_default);
Setting systemAudioModeMuting = registerSetting(
HdmiControlManager.CEC_SETTING_NAME_SYSTEM_AUDIO_MODE_MUTING,
R.bool.config_cecSystemAudioModeMuting_userConfigurable);
systemAudioModeMuting.registerValue(HdmiControlManager.SYSTEM_AUDIO_MODE_MUTING_ENABLED,
R.bool.config_cecSystemAudioModeMutingEnabled_allowed,
R.bool.config_cecSystemAudioModeMutingEnabled_default);
systemAudioModeMuting.registerValue(HdmiControlManager.SYSTEM_AUDIO_MODE_MUTING_DISABLED,
R.bool.config_cecSystemAudioModeMutingDisabled_allowed,
R.bool.config_cecSystemAudioModeMutingDisabled_default);
Setting volumeControlMode = registerSetting(
HdmiControlManager.CEC_SETTING_NAME_VOLUME_CONTROL_MODE,
R.bool.config_cecVolumeControlMode_userConfigurable);
volumeControlMode.registerValue(HdmiControlManager.VOLUME_CONTROL_ENABLED,
R.bool.config_cecVolumeControlModeEnabled_allowed,
R.bool.config_cecVolumeControlModeEnabled_default);
volumeControlMode.registerValue(HdmiControlManager.VOLUME_CONTROL_DISABLED,
R.bool.config_cecVolumeControlModeDisabled_allowed,
R.bool.config_cecVolumeControlModeDisabled_default);
Setting tvWakeOnOneTouchPlay = registerSetting(
HdmiControlManager.CEC_SETTING_NAME_TV_WAKE_ON_ONE_TOUCH_PLAY,
R.bool.config_cecTvWakeOnOneTouchPlay_userConfigurable);
tvWakeOnOneTouchPlay.registerValue(HdmiControlManager.TV_WAKE_ON_ONE_TOUCH_PLAY_ENABLED,
R.bool.config_cecTvWakeOnOneTouchPlayEnabled_allowed,
R.bool.config_cecTvWakeOnOneTouchPlayEnabled_default);
tvWakeOnOneTouchPlay.registerValue(HdmiControlManager.TV_WAKE_ON_ONE_TOUCH_PLAY_DISABLED,
R.bool.config_cecTvWakeOnOneTouchPlayDisabled_allowed,
R.bool.config_cecTvWakeOnOneTouchPlayDisabled_default);
Setting tvSendStandbyOnSleep = registerSetting(
HdmiControlManager.CEC_SETTING_NAME_TV_SEND_STANDBY_ON_SLEEP,
R.bool.config_cecTvSendStandbyOnSleep_userConfigurable);
tvSendStandbyOnSleep.registerValue(HdmiControlManager.TV_SEND_STANDBY_ON_SLEEP_ENABLED,
R.bool.config_cecTvSendStandbyOnSleepEnabled_allowed,
R.bool.config_cecTvSendStandbyOnSleepEnabled_default);
tvSendStandbyOnSleep.registerValue(HdmiControlManager.TV_SEND_STANDBY_ON_SLEEP_DISABLED,
R.bool.config_cecTvSendStandbyOnSleepDisabled_allowed,
R.bool.config_cecTvSendStandbyOnSleepDisabled_default);
Setting rcProfileTv = registerSetting(
HdmiControlManager.CEC_SETTING_NAME_RC_PROFILE_TV,
R.bool.config_cecRcProfileTv_userConfigurable);
rcProfileTv.registerValue(HdmiControlManager.RC_PROFILE_TV_NONE,
R.bool.config_cecRcProfileTvNone_allowed,
R.bool.config_cecRcProfileTvNone_default);
rcProfileTv.registerValue(HdmiControlManager.RC_PROFILE_TV_ONE,
R.bool.config_cecRcProfileTvOne_allowed,
R.bool.config_cecRcProfileTvOne_default);
rcProfileTv.registerValue(HdmiControlManager.RC_PROFILE_TV_TWO,
R.bool.config_cecRcProfileTvTwo_allowed,
R.bool.config_cecRcProfileTvTwo_default);
rcProfileTv.registerValue(HdmiControlManager.RC_PROFILE_TV_THREE,
R.bool.config_cecRcProfileTvThree_allowed,
R.bool.config_cecRcProfileTvThree_default);
rcProfileTv.registerValue(HdmiControlManager.RC_PROFILE_TV_FOUR,
R.bool.config_cecRcProfileTvFour_allowed,
R.bool.config_cecRcProfileTvFour_default);
Setting rcProfileSourceRootMenu = registerSetting(
HdmiControlManager.CEC_SETTING_NAME_RC_PROFILE_SOURCE_HANDLES_ROOT_MENU,
R.bool.config_cecRcProfileSourceRootMenu_userConfigurable);
rcProfileSourceRootMenu.registerValue(
HdmiControlManager.RC_PROFILE_SOURCE_ROOT_MENU_HANDLED,
R.bool.config_cecRcProfileSourceRootMenuHandled_allowed,
R.bool.config_cecRcProfileSourceRootMenuHandled_default);
rcProfileSourceRootMenu.registerValue(
HdmiControlManager.RC_PROFILE_SOURCE_ROOT_MENU_NOT_HANDLED,
R.bool.config_cecRcProfileSourceRootMenuNotHandled_allowed,
R.bool.config_cecRcProfileSourceRootMenuNotHandled_default);
Setting rcProfileSourceSetupMenu = registerSetting(
HdmiControlManager.CEC_SETTING_NAME_RC_PROFILE_SOURCE_HANDLES_SETUP_MENU,
R.bool.config_cecRcProfileSourceSetupMenu_userConfigurable);
rcProfileSourceSetupMenu.registerValue(
HdmiControlManager.RC_PROFILE_SOURCE_SETUP_MENU_HANDLED,
R.bool.config_cecRcProfileSourceSetupMenuHandled_allowed,
R.bool.config_cecRcProfileSourceSetupMenuHandled_default);
rcProfileSourceSetupMenu.registerValue(
HdmiControlManager.RC_PROFILE_SOURCE_SETUP_MENU_NOT_HANDLED,
R.bool.config_cecRcProfileSourceSetupMenuNotHandled_allowed,
R.bool.config_cecRcProfileSourceSetupMenuNotHandled_default);
Setting rcProfileSourceContentsMenu = registerSetting(
HdmiControlManager.CEC_SETTING_NAME_RC_PROFILE_SOURCE_HANDLES_CONTENTS_MENU,
R.bool.config_cecRcProfileSourceContentsMenu_userConfigurable);
rcProfileSourceContentsMenu.registerValue(
HdmiControlManager.RC_PROFILE_SOURCE_CONTENTS_MENU_HANDLED,
R.bool.config_cecRcProfileSourceContentsMenuHandled_allowed,
R.bool.config_cecRcProfileSourceContentsMenuHandled_default);
rcProfileSourceContentsMenu.registerValue(
HdmiControlManager.RC_PROFILE_SOURCE_CONTENTS_MENU_NOT_HANDLED,
R.bool.config_cecRcProfileSourceContentsMenuNotHandled_allowed,
R.bool.config_cecRcProfileSourceContentsMenuNotHandled_default);
Setting rcProfileSourceTopMenu = registerSetting(
HdmiControlManager.CEC_SETTING_NAME_RC_PROFILE_SOURCE_HANDLES_TOP_MENU,
R.bool.config_cecRcProfileSourceTopMenu_userConfigurable);
rcProfileSourceTopMenu.registerValue(
HdmiControlManager.RC_PROFILE_SOURCE_TOP_MENU_HANDLED,
R.bool.config_cecRcProfileSourceTopMenuHandled_allowed,
R.bool.config_cecRcProfileSourceTopMenuHandled_default);
rcProfileSourceTopMenu.registerValue(
HdmiControlManager.RC_PROFILE_SOURCE_TOP_MENU_NOT_HANDLED,
R.bool.config_cecRcProfileSourceTopMenuNotHandled_allowed,
R.bool.config_cecRcProfileSourceTopMenuNotHandled_default);
Setting rcProfileSourceMediaContextSensitiveMenu = registerSetting(
HdmiControlManager
.CEC_SETTING_NAME_RC_PROFILE_SOURCE_HANDLES_MEDIA_CONTEXT_SENSITIVE_MENU,
R.bool.config_cecRcProfileSourceMediaContextSensitiveMenu_userConfigurable);
rcProfileSourceMediaContextSensitiveMenu.registerValue(
HdmiControlManager.RC_PROFILE_SOURCE_MEDIA_CONTEXT_SENSITIVE_MENU_HANDLED,
R.bool.config_cecRcProfileSourceMediaContextSensitiveMenuHandled_allowed,
R.bool.config_cecRcProfileSourceMediaContextSensitiveMenuHandled_default);
rcProfileSourceMediaContextSensitiveMenu.registerValue(
HdmiControlManager.RC_PROFILE_SOURCE_MEDIA_CONTEXT_SENSITIVE_MENU_NOT_HANDLED,
R.bool.config_cecRcProfileSourceMediaContextSensitiveMenuNotHandled_allowed,
R.bool.config_cecRcProfileSourceMediaContextSensitiveMenuNotHandled_default);
verifySettings();
}
HdmiCecConfig(@NonNull Context context) {
this(context, new StorageAdapter(context));
}
private Setting registerSetting(@NonNull @CecSettingName String name,
int userConfResId) {
Setting setting = new Setting(mContext, name, userConfResId);
mSettings.put(name, setting);
return setting;
}
private void verifySettings() {
for (Setting setting: mSettings.values()) {
// This will throw an exception when a setting
// doesn't have a default value assigned.
setting.getDefaultValue();
getStorage(setting);
getStorageKey(setting);
}
}
@Nullable
private Setting getSetting(@NonNull String name) {
return mSettings.containsKey(name) ? mSettings.get(name) : null;
}
@Storage
private int getStorage(@NonNull Setting setting) {
switch (setting.getName()) {
case HdmiControlManager.CEC_SETTING_NAME_HDMI_CEC_ENABLED:
return STORAGE_GLOBAL_SETTINGS;
case HdmiControlManager.CEC_SETTING_NAME_HDMI_CEC_VERSION:
return STORAGE_SHARED_PREFS;
case HdmiControlManager.CEC_SETTING_NAME_POWER_CONTROL_MODE:
return STORAGE_GLOBAL_SETTINGS;
case HdmiControlManager.CEC_SETTING_NAME_VOLUME_CONTROL_MODE:
return STORAGE_GLOBAL_SETTINGS;
case HdmiControlManager.CEC_SETTING_NAME_POWER_STATE_CHANGE_ON_ACTIVE_SOURCE_LOST:
return STORAGE_SHARED_PREFS;
case HdmiControlManager.CEC_SETTING_NAME_SYSTEM_AUDIO_MODE_MUTING:
return STORAGE_SHARED_PREFS;
case HdmiControlManager.CEC_SETTING_NAME_TV_WAKE_ON_ONE_TOUCH_PLAY:
return STORAGE_GLOBAL_SETTINGS;
case HdmiControlManager.CEC_SETTING_NAME_TV_SEND_STANDBY_ON_SLEEP:
return STORAGE_GLOBAL_SETTINGS;
case HdmiControlManager.CEC_SETTING_NAME_RC_PROFILE_TV:
return STORAGE_SHARED_PREFS;
case HdmiControlManager.CEC_SETTING_NAME_RC_PROFILE_SOURCE_HANDLES_ROOT_MENU:
return STORAGE_SHARED_PREFS;
case HdmiControlManager.CEC_SETTING_NAME_RC_PROFILE_SOURCE_HANDLES_SETUP_MENU:
return STORAGE_SHARED_PREFS;
case HdmiControlManager.CEC_SETTING_NAME_RC_PROFILE_SOURCE_HANDLES_CONTENTS_MENU:
return STORAGE_SHARED_PREFS;
case HdmiControlManager.CEC_SETTING_NAME_RC_PROFILE_SOURCE_HANDLES_TOP_MENU:
return STORAGE_SHARED_PREFS;
case HdmiControlManager
.CEC_SETTING_NAME_RC_PROFILE_SOURCE_HANDLES_MEDIA_CONTEXT_SENSITIVE_MENU:
return STORAGE_SHARED_PREFS;
default:
throw new VerificationException("Invalid CEC setting '" + setting.getName()
+ "' storage.");
}
}
private String getStorageKey(@NonNull Setting setting) {
switch (setting.getName()) {
case HdmiControlManager.CEC_SETTING_NAME_HDMI_CEC_ENABLED:
return Global.HDMI_CONTROL_ENABLED;
case HdmiControlManager.CEC_SETTING_NAME_HDMI_CEC_VERSION:
return setting.getName();
case HdmiControlManager.CEC_SETTING_NAME_POWER_CONTROL_MODE:
return Global.HDMI_CONTROL_SEND_STANDBY_ON_SLEEP;
case HdmiControlManager.CEC_SETTING_NAME_VOLUME_CONTROL_MODE:
return Global.HDMI_CONTROL_VOLUME_CONTROL_ENABLED;
case HdmiControlManager.CEC_SETTING_NAME_POWER_STATE_CHANGE_ON_ACTIVE_SOURCE_LOST:
return setting.getName();
case HdmiControlManager.CEC_SETTING_NAME_SYSTEM_AUDIO_MODE_MUTING:
return setting.getName();
case HdmiControlManager.CEC_SETTING_NAME_TV_WAKE_ON_ONE_TOUCH_PLAY:
return Global.HDMI_CONTROL_AUTO_WAKEUP_ENABLED;
case HdmiControlManager.CEC_SETTING_NAME_TV_SEND_STANDBY_ON_SLEEP:
return Global.HDMI_CONTROL_AUTO_DEVICE_OFF_ENABLED;
case HdmiControlManager.CEC_SETTING_NAME_RC_PROFILE_TV:
return setting.getName();
case HdmiControlManager.CEC_SETTING_NAME_RC_PROFILE_SOURCE_HANDLES_ROOT_MENU:
return setting.getName();
case HdmiControlManager.CEC_SETTING_NAME_RC_PROFILE_SOURCE_HANDLES_SETUP_MENU:
return setting.getName();
case HdmiControlManager.CEC_SETTING_NAME_RC_PROFILE_SOURCE_HANDLES_CONTENTS_MENU:
return setting.getName();
case HdmiControlManager.CEC_SETTING_NAME_RC_PROFILE_SOURCE_HANDLES_TOP_MENU:
return setting.getName();
case HdmiControlManager
.CEC_SETTING_NAME_RC_PROFILE_SOURCE_HANDLES_MEDIA_CONTEXT_SENSITIVE_MENU:
return setting.getName();
default:
throw new VerificationException("Invalid CEC setting '" + setting.getName()
+ "' storage key.");
}
}
protected String retrieveValue(@NonNull Setting setting, @NonNull String defaultValue) {
@Storage int storage = getStorage(setting);
String storageKey = getStorageKey(setting);
if (storage == STORAGE_SYSPROPS) {
HdmiLogger.debug("Reading '" + storageKey + "' sysprop.");
return mStorageAdapter.retrieveSystemProperty(storageKey, defaultValue);
} else if (storage == STORAGE_GLOBAL_SETTINGS) {
HdmiLogger.debug("Reading '" + storageKey + "' global setting.");
return mStorageAdapter.retrieveGlobalSetting(storageKey, defaultValue);
} else if (storage == STORAGE_SHARED_PREFS) {
HdmiLogger.debug("Reading '" + storageKey + "' shared preference.");
return mStorageAdapter.retrieveSharedPref(storageKey, defaultValue);
}
return null;
}
protected void storeValue(@NonNull Setting setting, @NonNull String value) {
@Storage int storage = getStorage(setting);
String storageKey = getStorageKey(setting);
if (storage == STORAGE_SYSPROPS) {
HdmiLogger.debug("Setting '" + storageKey + "' sysprop.");
mStorageAdapter.storeSystemProperty(storageKey, value);
} else if (storage == STORAGE_GLOBAL_SETTINGS) {
HdmiLogger.debug("Setting '" + storageKey + "' global setting.");
mStorageAdapter.storeGlobalSetting(storageKey, value);
} else if (storage == STORAGE_SHARED_PREFS) {
HdmiLogger.debug("Setting '" + storageKey + "' shared pref.");
mStorageAdapter.storeSharedPref(storageKey, value);
notifySettingChanged(setting);
}
}
private void notifyGlobalSettingChanged(String setting) {
switch (setting) {
case Global.HDMI_CONTROL_ENABLED:
notifySettingChanged(HdmiControlManager.CEC_SETTING_NAME_HDMI_CEC_ENABLED);
break;
case Global.HDMI_CONTROL_SEND_STANDBY_ON_SLEEP:
notifySettingChanged(HdmiControlManager.CEC_SETTING_NAME_POWER_CONTROL_MODE);
break;
case Global.HDMI_CONTROL_VOLUME_CONTROL_ENABLED:
notifySettingChanged(HdmiControlManager.CEC_SETTING_NAME_VOLUME_CONTROL_MODE);
break;
case Global.HDMI_CONTROL_AUTO_WAKEUP_ENABLED:
notifySettingChanged(HdmiControlManager.CEC_SETTING_NAME_TV_WAKE_ON_ONE_TOUCH_PLAY);
break;
case Global.HDMI_CONTROL_AUTO_DEVICE_OFF_ENABLED:
notifySettingChanged(HdmiControlManager.CEC_SETTING_NAME_TV_SEND_STANDBY_ON_SLEEP);
break;
}
}
private void notifySettingChanged(@NonNull @CecSettingName String name) {
Setting setting = getSetting(name);
if (setting == null) {
throw new IllegalArgumentException("Setting '" + name + "' does not exist.");
}
notifySettingChanged(setting);
}
protected void notifySettingChanged(@NonNull Setting setting) {
synchronized (mLock) {
ArrayMap<SettingChangeListener, Executor> listeners =
mSettingChangeListeners.get(setting);
if (listeners == null) {
return; // No listeners registered, do nothing.
}
for (Entry<SettingChangeListener, Executor> entry: listeners.entrySet()) {
SettingChangeListener listener = entry.getKey();
Executor executor = entry.getValue();
executor.execute(new Runnable() {
@Override
public void run() {
listener.onChange(setting.getName());
}
});
}
}
}
/**
* This method registers Global Setting change observer.
* Needs to be called once after initialization of HdmiCecConfig.
*/
public void registerGlobalSettingsObserver(Looper looper) {
Handler handler = new Handler(looper);
mSettingsObserver = new SettingsObserver(handler);
ContentResolver resolver = mContext.getContentResolver();
String[] settings = new String[] {
Global.HDMI_CONTROL_ENABLED,
Global.HDMI_CONTROL_SEND_STANDBY_ON_SLEEP,
Global.HDMI_CONTROL_VOLUME_CONTROL_ENABLED,
Global.HDMI_CONTROL_AUTO_WAKEUP_ENABLED,
Global.HDMI_CONTROL_AUTO_DEVICE_OFF_ENABLED,
};
for (String setting: settings) {
resolver.registerContentObserver(Global.getUriFor(setting), false,
mSettingsObserver, UserHandle.USER_ALL);
}
}
/**
* This method unregisters Global Setting change observer.
*/
public void unregisterGlobalSettingsObserver() {
ContentResolver resolver = mContext.getContentResolver();
resolver.unregisterContentObserver(mSettingsObserver);
}
/**
* Register change listener for a given setting name using DirectExecutor.
*/
public void registerChangeListener(@NonNull @CecSettingName String name,
SettingChangeListener listener) {
registerChangeListener(name, listener, ConcurrentUtils.DIRECT_EXECUTOR);
}
/**
* Register change listener for a given setting name and executor.
*/
public void registerChangeListener(@NonNull @CecSettingName String name,
SettingChangeListener listener,
Executor executor) {
Setting setting = getSetting(name);
if (setting == null) {
throw new IllegalArgumentException("Setting '" + name + "' does not exist.");
}
@Storage int storage = getStorage(setting);
if (storage != STORAGE_GLOBAL_SETTINGS && storage != STORAGE_SHARED_PREFS) {
throw new IllegalArgumentException("Change listeners for setting '" + name
+ "' not supported.");
}
synchronized (mLock) {
if (!mSettingChangeListeners.containsKey(setting)) {
mSettingChangeListeners.put(setting, new ArrayMap<>());
}
mSettingChangeListeners.get(setting).put(listener, executor);
}
}
/**
* Remove change listener for a given setting name.
*/
public void removeChangeListener(@NonNull @CecSettingName String name,
SettingChangeListener listener) {
Setting setting = getSetting(name);
if (setting == null) {
throw new IllegalArgumentException("Setting '" + name + "' does not exist.");
}
synchronized (mLock) {
if (mSettingChangeListeners.containsKey(setting)) {
ArrayMap<SettingChangeListener, Executor> listeners =
mSettingChangeListeners.get(setting);
listeners.remove(listener);
if (listeners.isEmpty()) {
mSettingChangeListeners.remove(setting);
}
}
}
}
/**
* Returns a list of all settings based on the XML metadata.
*/
public @CecSettingName List<String> getAllSettings() {
return new ArrayList<>(mSettings.keySet());
}
/**
* Returns a list of user-modifiable settings based on the XML metadata.
*/
public @CecSettingName List<String> getUserSettings() {
List<String> settings = new ArrayList<>();
for (Setting setting: mSettings.values()) {
if (setting.getUserConfigurable()) {
settings.add(setting.getName());
}
}
return settings;
}
/**
* For a given setting name returns true if and only if the value type of that
* setting is a string.
*/
public boolean isStringValueType(@NonNull @CecSettingName String name) {
Setting setting = getSetting(name);
if (setting == null) {
throw new IllegalArgumentException("Setting '" + name + "' does not exist.");
}
return getSetting(name).getValueType().equals(VALUE_TYPE_STRING);
}
/**
* For a given setting name returns true if and only if the value type of that
* setting is an int.
*/
public boolean isIntValueType(@NonNull @CecSettingName String name) {
Setting setting = getSetting(name);
if (setting == null) {
throw new IllegalArgumentException("Setting '" + name + "' does not exist.");
}
return getSetting(name).getValueType().equals(VALUE_TYPE_INT);
}
/**
* For a given setting name returns values that are allowed for that setting (string).
*/
public List<String> getAllowedStringValues(@NonNull @CecSettingName String name) {
Setting setting = getSetting(name);
if (setting == null) {
throw new IllegalArgumentException("Setting '" + name + "' does not exist.");
}
if (!setting.getValueType().equals(VALUE_TYPE_STRING)) {
throw new IllegalArgumentException("Setting '" + name
+ "' is not a string-type setting.");
}
List<String> allowedValues = new ArrayList<String>();
for (Value allowedValue : setting.getAllowedValues()) {
allowedValues.add(allowedValue.getStringValue());
}
return allowedValues;
}
/**
* For a given setting name returns values that are allowed for that setting (string).
*/
public List<Integer> getAllowedIntValues(@NonNull @CecSettingName String name) {
Setting setting = getSetting(name);
if (setting == null) {
throw new IllegalArgumentException("Setting '" + name + "' does not exist.");
}
if (!setting.getValueType().equals(VALUE_TYPE_INT)) {
throw new IllegalArgumentException("Setting '" + name
+ "' is not a string-type setting.");
}
List<Integer> allowedValues = new ArrayList<Integer>();
for (Value allowedValue : setting.getAllowedValues()) {
allowedValues.add(allowedValue.getIntValue());
}
return allowedValues;
}
/**
* For a given setting name returns the default value for that setting (string).
*/
public String getDefaultStringValue(@NonNull @CecSettingName String name) {
Setting setting = getSetting(name);
if (setting == null) {
throw new IllegalArgumentException("Setting '" + name + "' does not exist.");
}
if (!setting.getValueType().equals(VALUE_TYPE_STRING)) {
throw new IllegalArgumentException("Setting '" + name
+ "' is not a string-type setting.");
}
return getSetting(name).getDefaultValue().getStringValue();
}
/**
* For a given setting name returns the default value for that setting (int).
*/
public int getDefaultIntValue(@NonNull @CecSettingName String name) {
Setting setting = getSetting(name);
if (setting == null) {
throw new IllegalArgumentException("Setting '" + name + "' does not exist.");
}
if (!setting.getValueType().equals(VALUE_TYPE_INT)) {
throw new IllegalArgumentException("Setting '" + name
+ "' is not a string-type setting.");
}
return getSetting(name).getDefaultValue().getIntValue();
}
/**
* For a given setting name returns the current value of that setting (string).
*/
public String getStringValue(@NonNull @CecSettingName String name) {
Setting setting = getSetting(name);
if (setting == null) {
throw new IllegalArgumentException("Setting '" + name + "' does not exist.");
}
if (!setting.getValueType().equals(VALUE_TYPE_STRING)) {
throw new IllegalArgumentException("Setting '" + name
+ "' is not a string-type setting.");
}
HdmiLogger.debug("Getting CEC setting value '" + name + "'.");
return retrieveValue(setting, setting.getDefaultValue().getStringValue());
}
/**
* For a given setting name returns the current value of that setting (int).
*/
public int getIntValue(@NonNull @CecSettingName String name) {
Setting setting = getSetting(name);
if (setting == null) {
throw new IllegalArgumentException("Setting '" + name + "' does not exist.");
}
if (!setting.getValueType().equals(VALUE_TYPE_INT)) {
throw new IllegalArgumentException("Setting '" + name
+ "' is not a int-type setting.");
}
HdmiLogger.debug("Getting CEC setting value '" + name + "'.");
String defaultValue = Integer.toString(setting.getDefaultValue().getIntValue());
String value = retrieveValue(setting, defaultValue);
return Integer.parseInt(value);
}
/**
* For a given setting name and value sets the current value of that setting (string).
*/
public void setStringValue(@NonNull @CecSettingName String name, @NonNull String value) {
Setting setting = getSetting(name);
if (setting == null) {
throw new IllegalArgumentException("Setting '" + name + "' does not exist.");
}
if (!setting.getUserConfigurable()) {
throw new IllegalArgumentException("Updating CEC setting '" + name + "' prohibited.");
}
if (!setting.getValueType().equals(VALUE_TYPE_STRING)) {
throw new IllegalArgumentException("Setting '" + name
+ "' is not a string-type setting.");
}
if (!getAllowedStringValues(name).contains(value)) {
throw new IllegalArgumentException("Invalid CEC setting '" + name
+ "' value: '" + value + "'.");
}
HdmiLogger.debug("Updating CEC setting '" + name + "' to '" + value + "'.");
storeValue(setting, value);
}
/**
* For a given setting name and value sets the current value of that setting (int).
*/
public void setIntValue(@NonNull @CecSettingName String name, int value) {
Setting setting = getSetting(name);
if (setting == null) {
throw new IllegalArgumentException("Setting '" + name + "' does not exist.");
}
if (!setting.getUserConfigurable()) {
throw new IllegalArgumentException("Updating CEC setting '" + name + "' prohibited.");
}
if (!setting.getValueType().equals(VALUE_TYPE_INT)) {
throw new IllegalArgumentException("Setting '" + name
+ "' is not a int-type setting.");
}
if (!getAllowedIntValues(name).contains(value)) {
throw new IllegalArgumentException("Invalid CEC setting '" + name
+ "' value: '" + value + "'.");
}
HdmiLogger.debug("Updating CEC setting '" + name + "' to '" + value + "'.");
storeValue(setting, Integer.toString(value));
}
}
|
//add gate if none or last gate has Y>=220
public static void addRandomGate()
{
if (totalGates == 0 || gates.get(lastGate).gateY >= 220)
{
GateSpawner gs = new GateSpawner();
gs.gateX = r.nextInt(randomX);
gates.add(gs);
totalGates++;
}
} |
<filename>hack/garden-feat/garden-feat.go
package main
import (
"fmt"
"github.com/geofffranks/simpleyaml"
"github.com/geofffranks/spruce"
flag "github.com/spf13/pflag"
"gopkg.in/alediaferia/stackgo.v1"
"gopkg.in/yaml.v2"
"io/ioutil"
"os"
"path/filepath"
"sort"
"strconv"
)
func usage() {
_, _ = fmt.Fprintf(os.Stderr, "Usage: %s <command> [--option]... arg...\n", filepath.Base(os.Args[0]))
_, _ = fmt.Fprintf(os.Stderr, "Commands: cname, elements, features, flags, ignore, params, platform\n")
_, _ = fmt.Fprintf(os.Stderr, "Options:\n")
flag.PrintDefaults()
}
func parseCmdLine(argv []string) (progName string, cmd string, featDir string, features []string, ignore []string, args []string) {
progName = filepath.Base(argv[0])
flag.Usage = usage
flag.ErrHelp = nil
flag.CommandLine = flag.NewFlagSet(argv[0], flag.ContinueOnError)
flag.StringVar(&featDir, "feat-dir", "../features", "Directory of GardenLinux features")
flag.StringSliceVarP(&ignore, "ignore", "i", nil, "List of feaures to ignore (comma-separated)")
flag.StringSliceVarP(&features, "features", "f", nil, "List of feaures (comma-separated)")
var help bool
flag.BoolVarP(&help, "help", "h", false, "Show this help message")
err := flag.CommandLine.Parse(argv[1:])
if err != nil {
_, _ = fmt.Fprintf(os.Stderr, "%s: %s\n", progName, err)
flag.Usage()
os.Exit(2)
}
if help {
flag.Usage()
os.Exit(2)
}
cmd = flag.Arg(0)
switch cmd {
case "cname":
case "elements":
case "features":
case "flags":
case "ignore":
case "params":
case "platform":
default:
flag.Usage()
os.Exit(2)
}
args = flag.Args()[1:]
return
}
func main() {
progName, cmd, featDir, features, ignore, args := parseCmdLine(os.Args)
allFeatures, err := readFeatures(featDir, "platform", "element", "flag")
if err != nil {
_, _ = fmt.Fprintf(os.Stderr, "%s: %s\n", progName, err)
os.Exit(1)
}
switch cmd {
case "cname":
err = cnameCmd(allFeatures, features, ignore, args)
case "elements":
err = elementsCmd(allFeatures, features, ignore, args)
case "features":
err = featuresCmd(allFeatures, features, ignore, args)
case "flags":
err = flagsCmd(allFeatures, features, ignore, args)
case "ignore":
err = ignoreCmd(allFeatures, features, ignore, args)
case "params":
err = paramsCmd(allFeatures, features, ignore, args)
case "platform":
err = platformCmd(allFeatures, features, ignore, args)
}
if err != nil {
_, _ = fmt.Fprintf(os.Stderr, "%s: %s\n", progName, err)
os.Exit(1)
}
}
func cnameCmd(allFeatures featureSet, features []string, ignore []string, _ []string) error {
ignored := makeSet(ignore)
expanded, _, err := expandFeatures(allFeatures, features, ignored)
if err != nil {
return fmt.Errorf("cname: %w", err)
}
_, err = sortFeatures(allFeatures, expanded, false, true)
if err != nil {
return fmt.Errorf("cname: %w", err)
}
features, err = reduceFeatures(allFeatures, features, ignored)
if err != nil {
return fmt.Errorf("cname: %w", err)
}
features, err = sortFeatures(allFeatures, features, true, false)
if err != nil {
return fmt.Errorf("cname: %w", err)
}
err = printCname(allFeatures, features)
if err != nil {
return fmt.Errorf("cname: %w", err)
}
return nil
}
func elementsCmd(allFeatures featureSet, features []string, ignore []string, _ []string) error {
features, err := sortFeatures(allFeatures, features, false, false)
if err != nil {
return fmt.Errorf("elements: %w", err)
}
features, _, err = expandFeatures(allFeatures, features, makeSet(ignore))
if err != nil {
return fmt.Errorf("elements: %w", err)
}
_, err = sortFeatures(allFeatures, features, false, true)
if err != nil {
return fmt.Errorf("elements: %w", err)
}
elements, err := filterByType(allFeatures, features, "platform", "element")
if err != nil {
return fmt.Errorf("elements: %w", err)
}
err = printStrings(elements...)
if err != nil {
return fmt.Errorf("elements: %w", err)
}
return nil
}
func featuresCmd(allFeatures featureSet, features []string, ignore []string, _ []string) error {
features, err := sortFeatures(allFeatures, features, false, false)
if err != nil {
return fmt.Errorf("features: %w", err)
}
features, _, err = expandFeatures(allFeatures, features, makeSet(ignore))
if err != nil {
return fmt.Errorf("features: %w", err)
}
_, err = sortFeatures(allFeatures, features, false, true)
if err != nil {
return fmt.Errorf("features: %w", err)
}
err = printStrings(features...)
if err != nil {
return fmt.Errorf("features: %w", err)
}
return nil
}
func flagsCmd(allFeatures featureSet, features []string, ignore []string, _ []string) error {
features, err := sortFeatures(allFeatures, features, false, false)
if err != nil {
return fmt.Errorf("flags: %w", err)
}
features, _, err = expandFeatures(allFeatures, features, makeSet(ignore))
if err != nil {
return fmt.Errorf("flags: %w", err)
}
_, err = sortFeatures(allFeatures, features, false, true)
if err != nil {
return fmt.Errorf("flags: %w", err)
}
flags, err := filterByType(allFeatures, features, "flag")
if err != nil {
return fmt.Errorf("flags: %w", err)
}
err = printStrings(flags...)
if err != nil {
return fmt.Errorf("elements: %w", err)
}
return nil
}
func ignoreCmd(allFeatures featureSet, features []string, ignore []string, _ []string) error {
features, ignoredSet, err := expandFeatures(allFeatures, features, makeSet(ignore))
if err != nil {
return fmt.Errorf("ignore: %w", err)
}
_, err = sortFeatures(allFeatures, features, false, true)
if err != nil {
return fmt.Errorf("ignore: %w", err)
}
ignored := make([]string, 0, len(ignoredSet))
for f := range ignoredSet {
ignored = append(ignored, f)
}
sort.Strings(ignored)
err = printStrings(ignored...)
if err != nil {
return fmt.Errorf("ignore: %w", err)
}
return nil
}
func paramsCmd(allFeatures featureSet, features []string, ignore []string, args []string) error {
features, err := sortFeatures(allFeatures, features, false, false)
if err != nil {
return fmt.Errorf("params: %w", err)
}
features, _, err = expandFeatures(allFeatures, features, makeSet(ignore))
if err != nil {
return fmt.Errorf("params: %w", err)
}
_, err = sortFeatures(allFeatures, features, false, true)
if err != nil {
return fmt.Errorf("params: %w", err)
}
yamls := make([]map[interface{}]interface{}, 0, len(features))
for _, f := range features {
yamls = append(yamls, allFeatures[f].yaml)
}
mergedYAML, err := spruce.Merge(yamls...)
if err != nil {
return fmt.Errorf("params: %w", err)
}
e := &spruce.Evaluator{Tree: mergedYAML}
err = e.Run([]string{"description", "type", "features"}, args)
if err != nil {
return fmt.Errorf("params: %w", err)
}
mergedYAML = e.Tree
err = printShellVars(mergedYAML)
if err != nil {
return fmt.Errorf("params: %w", err)
}
return nil
}
func platformCmd(allFeatures featureSet, features []string, ignore []string, _ []string) error {
features, _, err := expandFeatures(allFeatures, features, makeSet(ignore))
if err != nil {
return fmt.Errorf("platform: %w", err)
}
features, err = sortFeatures(allFeatures, features, false, true)
if err != nil {
return fmt.Errorf("platform: %w", err)
}
err = printStrings(features[0])
if err != nil {
return fmt.Errorf("platform: %w", err)
}
return nil
}
type feature struct {
Description string `yaml:"description,omitempty"`
Type string `yaml:"type,omitempty"`
Features struct {
Include []string `yaml:"include,omitempty"`
Exclude []string `yaml:"exclude,omitempty"`
} `yaml:"features,omitempty"`
yaml map[interface{}]interface{}
}
type featureSet map[string]feature
type graph map[string][]string
type set map[string]struct{}
func buildInclusionGraph(allFeatures featureSet) graph {
incGraph := make(map[string][]string, len(allFeatures))
for name, feat := range allFeatures {
incGraph[name] = feat.Features.Include
}
return incGraph
}
func makeSet(items []string) set {
s := make(set)
for _, i := range items {
s[i] = struct{}{}
}
return s
}
func readFeatures(featDir string, types ...string) (featureSet, error) {
entries, err := ioutil.ReadDir(featDir)
if err != nil {
return nil, err
}
allFeatures := make(featureSet)
for _, e := range entries {
var featFile, featName string
if e.IsDir() {
featName = e.Name()
featFile = filepath.Join(featDir, featName, "info.yaml")
if _, err = os.Stat(featFile); os.IsNotExist(err) {
continue
}
} else if filepath.Ext(e.Name()) == ".yaml" {
featName = e.Name()[:len(e.Name())-5]
featFile = e.Name()
} else {
continue
}
featData, err := ioutil.ReadFile(featFile)
if err != nil {
return nil, err
}
var f feature
err = yaml.Unmarshal(featData, &f)
if err != nil {
return nil, err
}
accept := false
for _, t := range types {
if f.Type == t {
accept = true
}
}
if !accept {
return nil, fmt.Errorf("feature %s has unsupported type %s", featName, f.Type)
}
y, err := simpleyaml.NewYaml(featData)
if err != nil {
return nil, err
}
f.yaml, err = y.Map()
if err != nil {
return nil, err
}
allFeatures[featName] = f
}
return allFeatures, nil
}
func sortFeatures(allFeatures featureSet, unsorted []string, strict, validatePlatform bool) ([]string, error) {
var platforms, others, flags []string
for _, f := range unsorted {
feat, ok := allFeatures[f]
if !ok {
return nil, fmt.Errorf("feature %s does not exist", f)
}
if feat.Type == "platform" {
if validatePlatform && len(platforms) > 0 {
return nil, fmt.Errorf("cannot have multiple platforms: %s and %s", platforms[0], f)
}
platforms = append(platforms, f)
} else if feat.Type == "flag" {
flags = append(flags, f)
} else {
others = append(others, f)
}
}
if validatePlatform && len(platforms) == 0 {
return nil, fmt.Errorf("must have a platform")
}
if strict {
sort.Strings(platforms)
sort.Strings(others)
sort.Strings(flags)
}
sorted := make([]string, len(unsorted))
n := copy(sorted, platforms)
n += copy(sorted[n:], others)
copy(sorted[n:], flags)
return sorted, nil
}
func filterByType(allFeatures featureSet, features []string, types ...string) ([]string, error) {
var matching []string
for _, f := range features {
feat, ok := allFeatures[f]
if !ok {
return nil, fmt.Errorf("feature %s does not exist", f)
}
for _, t := range types {
if feat.Type == t {
matching = append(matching, f)
}
}
}
return matching, nil
}
func printStrings(strings ...string) error {
for i, s := range strings {
if i > 0 {
_, err := fmt.Print(" ")
if err != nil {
return err
}
}
_, err := fmt.Printf("%s", s)
if err != nil {
return err
}
}
_, err := fmt.Println()
if err != nil {
return err
}
return nil
}
func printCname(allFeatures featureSet, features []string) error {
for i, f := range features {
feat, ok := allFeatures[f]
if !ok {
return fmt.Errorf("feature %s does not exist", f)
}
if feat.Type != "flag" && i > 0 {
_, err := fmt.Print("-")
if err != nil {
return err
}
}
_, err := fmt.Printf("%s", f)
if err != nil {
return err
}
}
_, err := fmt.Println()
if err != nil {
return err
}
return nil
}
func printShellVars(root map[interface{}]interface{}) error {
type prefixNode struct {
prefix string
node interface{}
}
stack := stackgo.NewStack()
stack.Push(prefixNode{"", root})
for stack.Size() > 0 {
pn := stack.Pop().(prefixNode)
switch pn.node.(type) {
case bool, int, string:
_, err := fmt.Printf("%s='%v'\n", pn.prefix, pn.node)
if err != nil {
return err
}
case []interface{}:
a := pn.node.([]interface{})
onlyScalars := true
for _, val := range a {
switch val.(type) {
case bool, int, string:
default:
onlyScalars = false
}
}
if onlyScalars {
_, err := fmt.Printf("%s=(", pn.prefix)
if err != nil {
return err
}
for _, val := range a {
_, err = fmt.Printf(" '%v'", val)
if err != nil {
return err
}
}
_, err = fmt.Println(" )")
if err != nil {
return err
}
continue
}
_, err := fmt.Printf("%s=(", pn.prefix)
if err != nil {
return err
}
prefixNodes := make([]prefixNode, 0, len(a))
for i, val := range a {
prefix := strconv.Itoa(i)
if m, ok := val.(map[interface{}]interface{}); ok {
if nameVal, k := m["name"]; k {
switch nameVal.(type) {
case bool, int, string:
prefix = fmt.Sprintf("%v", nameVal)
}
}
}
if pn.prefix != "" {
prefix = pn.prefix + "_" + prefix
}
_, err = fmt.Printf(" '%s'", prefix)
if err != nil {
return err
}
prefixNodes = append(prefixNodes, prefixNode{prefix, val})
}
_, err = fmt.Println(" )")
if err != nil {
return err
}
for i := len(prefixNodes) - 1; i >= 0; i-- {
stack.Push(prefixNodes[i])
}
case map[interface{}]interface{}:
m := pn.node.(map[interface{}]interface{})
headerPrefix := pn.prefix
if pn.prefix == "" {
headerPrefix = "_"
}
_, err := fmt.Printf("%s=(", headerPrefix)
if err != nil {
return err
}
prefixNodes := make([]prefixNode, 0, len(m))
for key, val := range m {
prefix := key.(string)
if pn.prefix != "" {
prefix = pn.prefix + "_" + prefix
}
_, err = fmt.Printf(" '%s'", prefix)
if err != nil {
return err
}
prefixNodes = append(prefixNodes, prefixNode{prefix, val})
}
_, err = fmt.Println(" )")
if err != nil {
return err
}
for i := len(prefixNodes) - 1; i >= 0; i-- {
stack.Push(prefixNodes[i])
}
}
}
return nil
}
func postorderDFS(g graph, seen set, origin string, allowVertex func(string) bool, processVertex func(string)) error {
if _, ok := g[origin]; !ok {
return fmt.Errorf("%s is not part of the graph", origin)
}
n := len(g)
if seen == nil {
seen = make(set, n)
}
if _, ok := seen[origin]; ok {
return nil
}
hot := make(set, n)
stack := stackgo.NewStack()
seen[origin] = struct{}{}
stack.Push(origin)
for stack.Size() > 0 {
v := stack.Top().(string)
if allowVertex != nil && !allowVertex(v) {
stack.Pop()
continue
}
hot[v] = struct{}{}
done := true
edges := g[v]
for i := len(edges) - 1; i >= 0; i-- {
if _, ok := hot[edges[i]]; ok {
return fmt.Errorf("%s is part of a loop", edges[i])
}
if _, ok := seen[edges[i]]; !ok {
done = false
seen[edges[i]] = struct{}{}
stack.Push(edges[i])
}
}
if done {
stack.Pop()
delete(hot, v)
if processVertex != nil {
processVertex(v)
}
}
}
return nil
}
func expandFeatures(allFeatures featureSet, features []string, ignored set) ([]string, set, error) {
gInc := buildInclusionGraph(allFeatures)
collectedIgn := make(set)
collectedExcl := make(set)
var expanded []string
seen := make(set, len(gInc))
for _, f := range features {
err := postorderDFS(gInc, seen, f, func(v string) bool {
_, ok := ignored[v]
if ok {
collectedIgn[v] = struct{}{}
_, _ = fmt.Fprintf(os.Stderr, "WARNING: %s is being ignored\n", v)
}
return !ok
}, func(v string) {
expanded = append(expanded, v)
for _, e := range allFeatures[v].Features.Exclude {
collectedExcl[e] = struct{}{}
}
})
if err != nil {
return nil, nil, err
}
}
for _, f := range expanded {
if _, ok := collectedExcl[f]; ok {
return nil, nil, fmt.Errorf("%s has been excluded by another feature", f)
}
}
return expanded, collectedIgn, nil
}
func reduceFeatures(allFeatures featureSet, features []string, ignored set) ([]string, error) {
gInc := buildInclusionGraph(allFeatures)
collectedExcl := make(set)
visited := make(set)
minimal := make(set, len(features))
i := 0
for i < len(features) {
if features[i] == "" {
i++
continue
}
f := features[i]
i++
_, ok := ignored[f]
if ok {
continue
}
minimal[f] = struct{}{}
err := postorderDFS(gInc, nil, f, func(v string) bool {
_, ok := ignored[v]
return !ok
}, func(v string) {
if v == f {
return
}
for j, h := range features[i:] {
if h == v {
features[i+j] = ""
}
}
delete(minimal, v)
visited[v] = struct{}{}
for _, e := range allFeatures[v].Features.Exclude {
collectedExcl[e] = struct{}{}
}
})
if err != nil {
return nil, err
}
}
for f := range visited {
if _, ok := collectedExcl[f]; ok {
return nil, fmt.Errorf("%s has been excluded by another feature", f)
}
}
reduced := make([]string, 0, len(minimal))
for f := range minimal {
reduced = append(reduced, f)
}
return reduced, nil
}
|
/**
* Process the specified HTTP request, and create the corresponding HTTP
* response (or forward to another web component that will create it).
*
* @param request The HTTP request we are processing
* @param response The HTTP response we are creating
*
* @exception IOException if an input/output error occurs
* @exception ServletException if a servlet exception occurs
*/
public void execute(HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
boolean retval = HTTPUtils.validateRequestParameters(request);
if (!retval) {
try {
String nextJSP = "/pages/appscan_response.jsf";
RequestDispatcher dispatcher = getServletContext().getRequestDispatcher(nextJSP);
dispatcher.forward(request,response);
return;
} catch (Exception ex) {
ex.printStackTrace();
}
}
String action = HTTPUtils.cleanXSS((String)request.getParameter("action"));
if (action == null) action = "concept";
String node_id = HTTPUtils.cleanXSS((String)request.getParameter("ontology_node_id"));
String ontology_display_name =
HTTPUtils.cleanXSS((String)request.getParameter("ontology_display_name"));
String ontology_source = HTTPUtils.cleanXSS((String)request.getParameter("ontology_source"));
long ms = System.currentTimeMillis();
if (action.equals("build_tree")) {
build_hierarchy(request, response);
} else if (action.equals("expand_tree")) {
expand_hierarchy(request, response);
} else if (action.equals("search_tree")) {
search_hierarchy(request, response);
} else if (action.equals("expand_tree0")) {
String node_id_0 = node_id;
if (node_id != null && ontology_display_name != null) {
int pos = node_id.indexOf("|");
if (pos != -1) {
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
JSONObject json = new JSONObject();
JSONArray nodesArray = null;
try {
nodesArray = CacheController.getInstance().getRemainingSubconcepts(node_id_0);
if (nodesArray != null) {
json.put("nodes", nodesArray);
}
} catch (Exception e) {
e.printStackTrace();
}
response.getWriter().write(json.toString());
_logger.debug("Run time (milliseconds): "
+ (System.currentTimeMillis() - ms));
return;
}
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
JSONObject json = new JSONObject();
JSONArray nodesArray = null;
try {
if (ontology_source == null) {
nodesArray =
CacheController.getInstance().getSubconcepts(
ontology_display_name, null, node_id);
} else {
/*
nodesArray =
CacheController.getInstance()
.getSubconceptsBySource(ontology_display_name,
null, node_id, ontology_source);
*/
nodesArray = CacheController.getInstance().getRemainingSubconcepts(node_id_0 + "|" + ontology_source + "|0");
}
if (nodesArray != null) {
json.put("nodes", nodesArray);
}
} catch (Exception e) {
}
response.getWriter().write(json.toString());
_logger.debug("Run time (milliseconds): "
+ (System.currentTimeMillis() - ms));
return;
}
}
else if (action.equals("search_tree0")) {
if (node_id != null && ontology_display_name != null) {
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
/*
* JSONObject json = new JSONObject();
*
* try { String max_tree_level_str = null; int maxLevel = -1;
* try { max_tree_level_str =
* NCImBrowserProperties.getInstance()
* .getProperty(NCImBrowserProperties.MAXIMUM_TREE_LEVEL);
* maxLevel = Integer.parseInt(max_tree_level_str); } catch
* (Exception ex) { }
*
* JSONArray rootsArray = null; rootsArray =
* CacheController.getInstance
* ().getPathsToRoots(ontology_display_name, null, node_id,
* ontology_source, true, maxLevel); if (rootsArray.length() ==
* 0) {//_logger.debug(
* "AjaxServlet getPathsToRoots finds no path -- calling getRootConceptsBySource..."
* ); //rootsArray =
* CacheController.getInstance().getRootConceptsBySource
* (ontology_display_name, null, ontology_source); }
*
* json.put("root_nodes", rootsArray); } catch (Exception e) {
* e.printStackTrace(); }
*
* response.getWriter().write(json.toString());
* _logger.debug("search_tree: " + json.toString());
*/
String t =
CacheController.getInstance().getPathsToRootsExt(
ontology_display_name, null, node_id, ontology_source,
false);
response.getWriter().write(t);
_logger.debug("Run time (milliseconds): "
+ (System.currentTimeMillis() - ms));
return;
}
}
else if (action.equals("build_tree1")) {
if (ontology_display_name == null)
ontology_display_name = "NCI Thesaurus";
response.setContentType("text/html");
response.setHeader("Cache-Control", "no-cache");
JSONObject json = new JSONObject();
JSONArray nodesArray = null;
try {
if (ontology_source == null
|| ontology_source.compareTo("null") == 0) {
nodesArray =
CacheController.getInstance().getRootConcepts(
ontology_display_name, null);
} else {
nodesArray =
CacheController.getInstance().getSourceRoots(
ontology_display_name, null, ontology_source, true);
}
if (nodesArray != null) {
json.put("root_nodes", nodesArray);
}
} catch (Exception e) {
e.printStackTrace();
}
response.getWriter().write(json.toString());
_logger.debug("Run time (milliseconds): "
+ (System.currentTimeMillis() - ms));
return;
}
else if (action.equals("concept")) {
String concept_detail_scheme = HTTPUtils.cleanXSS((String)request.getParameter("dictionary"));
String concept_detail_code = HTTPUtils.cleanXSS((String)request.getParameter("code"));
String concept_detail_type = HTTPUtils.cleanXSS((String)request.getParameter("type"));
Entity c = DataUtils.getConceptByCode(
Constants.CODING_SCHEME_NAME, null, null, concept_detail_code);
request.getSession().setAttribute("code", concept_detail_code);
request.getSession().setAttribute("concept", c);
request.getSession().setAttribute("type", "properties");
request.getSession().setAttribute("new_search", Boolean.TRUE);
response.setContentType("text/html;charset=utf-8");
String response_page_url = request.getContextPath()
+ "/pages/concept_details.jsf?dictionary="
+ concept_detail_scheme
+ "&code="
+ concept_detail_code
+ "&type="
+ concept_detail_type;
response.sendRedirect(response_page_url);
} else if (action.equals("cart")) {
processCartActions(request, response);
} else if (action.equals("addtocart")) {
addToCart(request, response);
}
} |
<gh_stars>1-10
import mongoose from 'mongoose';
export default interface ILog extends mongoose.Document {
_id: mongoose.Schema.Types.ObjectId;
message: string;
created_at: string;
}
|
// from 3GPP TS 27.005 3.3.4 Select Cell Broadcast Message Types
bool SmsMiscManager::CloseCBRange(uint32_t fromMsgId, uint32_t toMsgId)
{
bool isChange = false;
auto iter = rangeList_.begin();
while (iter != rangeList_.end()) {
auto oldIter = iter++;
auto &info = *oldIter;
if (fromMsgId >= info.fromMsgId && toMsgId <= info.toMsgId) {
isChange = true;
if (info.fromMsgId == fromMsgId && info.toMsgId == toMsgId) {
rangeList_.erase(oldIter);
break;
} else if (info.fromMsgId == fromMsgId && info.toMsgId != toMsgId) {
rangeList_.emplace_back(toMsgId + 1, info.toMsgId);
rangeList_.erase(oldIter);
break;
} else if (info.fromMsgId != fromMsgId && info.toMsgId == toMsgId) {
rangeList_.emplace_back(info.fromMsgId, fromMsgId - 1);
rangeList_.erase(oldIter);
break;
} else if (fromMsgId > info.fromMsgId && toMsgId < info.toMsgId) {
rangeList_.emplace_back(info.fromMsgId, fromMsgId - 1);
rangeList_.emplace_back(toMsgId + 1, info.toMsgId);
rangeList_.erase(oldIter);
break;
}
} else if (fromMsgId <= info.fromMsgId && toMsgId >= info.toMsgId && !isChange) {
isChange = true;
rangeList_.erase(oldIter);
} else if (isChange && toMsgId >= info.toMsgId) {
rangeList_.erase(oldIter);
} else if (isChange && toMsgId < info.toMsgId && toMsgId >= info.fromMsgId) {
rangeList_.emplace_back(toMsgId + 1, info.toMsgId);
rangeList_.erase(oldIter);
rangeList_.sort();
break;
} else if ((fromMsgId > info.fromMsgId && fromMsgId < info.toMsgId) && toMsgId >= info.toMsgId &&
!isChange) {
isChange = true;
rangeList_.emplace_back(info.fromMsgId, fromMsgId - 1);
rangeList_.erase(oldIter);
rangeList_.sort();
} else if (fromMsgId == info.toMsgId && toMsgId >= info.toMsgId && !isChange) {
isChange = true;
rangeList_.emplace_back(info.fromMsgId, fromMsgId - 1);
rangeList_.erase(oldIter);
rangeList_.sort();
}
}
if (isChange) {
rangeList_.sort();
return true;
} else {
return false;
}
} |
// InitializeDatabase opens database connection and initializes schema if it does not exist
func InitializeDatabase(connectionURL string) (*dbr.Connection, error) {
connection, err := WaitForDatabaseAccess(connectionURL, connectionRetries)
if err != nil {
return nil, err
}
initialized, err := checkIfDatabaseInitialized(connection)
if err != nil {
closeDBConnection(connection)
return nil, errors.Wrap(err, "Failed to check if database is initialized")
}
if initialized {
log.Info("Database already initialized")
return connection, nil
}
return connection, nil
} |
import { Navbar } from '../../components';
import { LayoutContainer } from './styles';
type Props = {
children: unknown;
};
const MainLayout = ({ children }: Props): JSX.Element => (
<>
<Navbar />
<LayoutContainer>{children}</LayoutContainer>
</>
);
export default MainLayout;
|
// NewCASFileStoreWithLRUMap initializes and returns a new Content-Addressable
// FileStore. It uses the first few bytes of file digest (which is also used as
// file name) as shard ID.
// For every byte, one more level of directories will be created. It also stores
// objects in a LRU FileStore.
// When size exceeds limit, the least recently accessed entry will be removed.
func NewCASFileStoreWithLRUMap(size int, clk clock.Clock) FileStore {
m := NewLRUFileMap(size, clk)
return &localFileStore{
fileEntryFactory: NewCASFileEntryFactory(),
fileMap: m,
}
} |
def backward_elimaination_approach(self):
logger.add_info_log(
"Enter class LinearRegressionWithFeatureSelection : backward_elimaination_approach function")
try:
cols = list(self.x_train.columns)
pmax = 1
while len(cols) > 0:
p = []
x_1 = self.x_train[cols]
x_1 = sm.add_constant(
x_1)
lr = sm.OLS(self.y_train,
x_1).fit()
p = pd.Series(lr.pvalues.values[1:], index=cols)
pmax = max(p)
feature_with_p_max = p.idxmax()
if pmax > 0.05:
cols.remove(feature_with_p_max)
else:
break
selected_features_be = cols
print()
print("Features selected by the Backward elimination method in Linear regression are ",
selected_features_be)
print()
x_train_be = self.x_train[selected_features_be]
x_test_be = self.x_test[selected_features_be]
lr = LinearRegression()
lr.fit(x_train_be, self.y_train)
y_pred_train_be = lr.predict(x_train_be)
y_pred_test_be = lr.predict(x_test_be)
logger.add_info_log("class LinearRegressionWithFeatureSelection : backward_elimaination_approach. Model "
"Build successfully")
return lr, x_train_be, y_pred_train_be, x_test_be, y_pred_test_be, selected_features_be
except Exception as e:
logger.add_exception_log(f'class LinearRegressionWithFeatureSelection : backward_elimaination_approach. Model '
f'Build failed. Exception {str(e)}') |
// buildCommand builds the `gcloud run deploy` command. We can pass all fields in here, the command
// builder will only set flags for valid inputs and return an error or ignore
// on invalid inputs.
func (cc *Controller) buildCommand(dd cloudrunner.DeploymentDescription,
credentials cloudrunner.Credentials) (gcloud.CloudRunCommand, error) {
return cc.Builder.
AllowUnauthenticated(dd.AllowUnauthenticated).
Image(dd.Image).
MaxInstances(dd.MaxInstances).
Memory(dd.Memory).
ProjectID(credentials.ProjectID).
Region(dd.Region).
Service(dd.Service).
VPCConnector(dd.VPCConnector).
Build()
} |
/**
* Adds every entry necessary in the closure table where the given space is the root
*
* @param jobSpaceId
* @return
*/
private static boolean updateJobSpaceClosureTable(int jobSpaceId, Connection con) {
try {
Timestamp time = new Timestamp(System.currentTimeMillis());
JobSpace root = new JobSpace();
root.setId(jobSpaceId);
List<JobSpace> spaces = Spaces.getSubSpacesForJob(jobSpaceId, true);
spaces.add(root);
for (JobSpace s : spaces) {
boolean success = addToJobSpaceClosure(root.getId(), s.getId(), time, con);
if (!success) {
return false;
}
}
return true;
} catch (Exception e) {
log.error("updateJobSpaceClosureTable", e);
}
return false;
} |
<reponame>uninth/UNItools
/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim:expandtab:autoindent:tabstop=4:shiftwidth=4:filetype=c:cindent:textwidth=0:
*
* Copyright (C) 2005 Dell Inc.
* by <NAME> <<EMAIL>>
* Licensed under the Open Software License version 2.1
*
* Alternatively, you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published
* by the Free Software Foundation; either version 2 of the License,
* or (at your option) any later version.
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU General Public License for more details.
*/
#define LIBSMBIOS_C_SOURCE
// Include compat.h first, then system headers, then public, then private
#include "smbios_c/compat.h"
// system
#include <stdarg.h>
#include <stdlib.h>
#include <string.h>
// public
#include "smbios_c/obj/smi.h"
#include "smbios_c/smbios.h"
#include "smbios_c/smi.h"
#include "libsmbios_c_intlize.h"
#include "internal_strl.h"
// private
#include "smi_impl.h"
// forward declarations
void _smi_free(struct dell_smi_obj *m);
// static vars
static struct dell_smi_obj singleton; // auto-init to 0
typedef int (*init_fn)(struct dell_smi_obj *);
static char *module_error_buf; // auto-init to 0
__attribute__((destructor)) static void return_mem(void)
{
fnprintf("\n");
free(module_error_buf);
module_error_buf = 0;
}
static char *smi_get_module_error_buf()
{
fnprintf("\n");
if (!module_error_buf)
module_error_buf = calloc(1, ERROR_BUFSIZE);
return module_error_buf;
}
static void clear_err(const struct dell_smi_obj *this)
{
fnprintf("\n");
if (this && this->errstring)
memset(this->errstring, 0, ERROR_BUFSIZE);
if(module_error_buf)
memset(module_error_buf, 0, ERROR_BUFSIZE);
}
struct dell_smi_obj *dell_smi_factory(int flags, ...)
{
va_list ap;
struct dell_smi_obj *toReturn = 0;
int ret;
fnprintf("\n");
if (flags==DELL_SMI_DEFAULTS)
flags = DELL_SMI_GET_SINGLETON;
if (flags & DELL_SMI_GET_SINGLETON)
toReturn = &singleton;
else
toReturn = (struct dell_smi_obj *)calloc(1, sizeof(struct dell_smi_obj));
if (toReturn->initialized)
goto out;
if (flags & DELL_SMI_UNIT_TEST_MODE)
{
va_start(ap, flags);
init_fn fn = va_arg(ap, init_fn);
fnprintf("call fn pointer: %p\n", fn);
ret = fn(toReturn);
va_end(ap);
} else
{
fnprintf("default init\n");
ret = init_dell_smi_obj(toReturn);
}
if (ret == 0)
goto out;
// failed
fnprintf("failed\n");
toReturn->initialized = 0;
toReturn = 0;
out:
if (toReturn && ! (flags & DELL_SMI_NO_ERR_CLEAR))
clear_err(toReturn);
return toReturn;
}
void dell_smi_obj_free(struct dell_smi_obj *m)
{
fnprintf("\n");
if (m && m != &singleton)
_smi_free(m);
}
const char *dell_smi_obj_strerror(struct dell_smi_obj *s)
{
const char * retval = 0;
fnprintf("\n");
if (s)
retval = s->errstring;
else
retval = module_error_buf;
fnprintf("error string: %s\n", retval);
return retval;
}
void dell_smi_obj_set_class(struct dell_smi_obj *this, u16 smi_class)
{
fnprintf(" %d\n", smi_class);
clear_err(this);
if(this)
this->smi_buf.smi_class = smi_class;
}
void dell_smi_obj_set_select(struct dell_smi_obj *this, u16 smi_select)
{
fnprintf(" %d\n", smi_select);
clear_err(this);
if(this)
this->smi_buf.smi_select = smi_select;
}
void dell_smi_obj_set_arg(struct dell_smi_obj *this, u8 argno, u32 value)
{
fnprintf(" %d -> 0x%x\n", argno, value);
clear_err(this);
if(!this) goto out;
free(this->physical_buffer[argno]);
this->physical_buffer[argno] = 0;
this->physical_buffer_size[argno] = 0;
this->smi_buf.arg[argno] = value;
out:
return;
}
u32 dell_smi_obj_get_res(struct dell_smi_obj *this, u8 argno)
{
clear_err(this);
u32 retval = 0;
if (this)
retval = this->smi_buf.res[argno];
fnprintf(" %d = 0x%x\n", argno, retval);
return retval;
}
static u8 * dell_smi_obj_make_buffer_X(struct dell_smi_obj *this, u8 argno, size_t size)
{
u8 *retval = 0;
fnprintf("\n");
clear_err(this);
if (argno>3 || !this)
goto out;
this->smi_buf.arg[argno] = 0;
free(this->physical_buffer[argno]);
this->physical_buffer[argno] = calloc(1, size);
this->physical_buffer_size[argno] = size;
retval = this->physical_buffer[argno];
out:
return retval;
}
const char *bufpat = "DSCI";
u8 * dell_smi_obj_make_buffer_frombios_withheader(struct dell_smi_obj *this, u8 argno, size_t size)
{
// allocate 4 extra bytes to hold size marker at the beginning
u8 *buf = dell_smi_obj_make_buffer_X(this, argno, size + sizeof(u32));
fnprintf("\n");
if(buf)
{
// write buffer pattern
for (int i=0; i<size+4; i++)
buf[i] = bufpat[i%4];
// write size of remaining bytes
memcpy(buf, &size, sizeof(u32));
buf += sizeof(u32);
}
return buf;
}
u8 * dell_smi_obj_make_buffer_frombios_withoutheader(struct dell_smi_obj *this, u8 argno, size_t size)
{
fnprintf("\n");
return dell_smi_obj_make_buffer_X(this, argno, size);
}
u8 * dell_smi_obj_make_buffer_frombios_auto(struct dell_smi_obj *this, u8 argno, size_t size)
{
clear_err(this);
u8 smbios_ver = 1;
u8 *retval = 0;
// look in smbios struct 0xD0 (Revisions and IDs) to find the Dell SMBIOS implementation version
// offset 4 of the struct == dell major version
struct smbios_struct *s = smbios_get_next_struct_by_type(0, 0xd0);
smbios_struct_get_data(s, &(smbios_ver), 0x04, sizeof(u8));
fnprintf("dell smbios ver: %d\n", smbios_ver);
if (smbios_ver >= 2)
retval = dell_smi_obj_make_buffer_frombios_withheader(this, argno, size);
else
retval = dell_smi_obj_make_buffer_frombios_withoutheader(this, argno, size);
return retval;
}
u8 * dell_smi_obj_make_buffer_tobios(struct dell_smi_obj *this, u8 argno, size_t size)
{
return dell_smi_obj_make_buffer_X(this, argno, size);
}
int dell_smi_obj_execute(struct dell_smi_obj *this)
{
fnprintf("\n");
clear_err(this);
int retval = -1;
if(!this)
goto out;
this->smi_buf.res[0] = -3; //default to 'not handled'
if (this->execute)
retval = this->execute(this);
out:
return retval;
}
/**************************************************
*
* Internal functions
*
**************************************************/
void __internal _smi_free(struct dell_smi_obj *this)
{
fnprintf("\n");
this->initialized=0;
for (int i=0;i<4;++i)
{
free(this->physical_buffer[i]);
this->physical_buffer[i]=0;
this->physical_buffer_size[i] = 0;
}
free(this->errstring);
this->errstring = 0;
free(this);
}
int __internal init_dell_smi_obj_std(struct dell_smi_obj *this)
{
int retval = 0;
char *errbuf = 0;
fnprintf("\n");
const char *error = _("Failed to find appropriate SMBIOS 0xD4 structure.\n");
struct smbios_struct *s = smbios_get_next_struct_by_type(0, 0xda);
if (s) {
smbios_struct_get_data(s, &(this->command_address), 4, sizeof(u16));
smbios_struct_get_data(s, &(this->command_code), 6, sizeof(u8));
}
else
goto out_fail;
error = _("Failed to allocate memory for error string.\n");
this->errstring = calloc(1, ERROR_BUFSIZE);
if (!this->errstring)
goto out_fail;
this->initialized = 1;
goto out;
out_fail:
fnprintf(" out_fail \n");
retval = -1;
errbuf = smi_get_module_error_buf();
if (errbuf){
fnprintf("error: %s\n", error);
strlcpy(errbuf, error, ERROR_BUFSIZE);
fnprintf("smbios_strerror: %s\n", smbios_strerror());
strlcat(errbuf, smbios_strerror(), ERROR_BUFSIZE);
}
out:
return retval;
}
|
//Process input and update the position
void FlyCamera::Update(platform::Game* game, float ellapsed_time)
{
if (game->IsFocus())
{
float forward_input = game->GetInputSlotValue(platform::InputSlotValue::ControllerThumbLeftY);
if (game->GetInputSlotState(platform::InputSlotState::Up) || game->GetInputSlotState(platform::InputSlotState::Key_W)) forward_input += 1.f * ellapsed_time;
if (game->GetInputSlotState(platform::InputSlotState::Down) || game->GetInputSlotState(platform::InputSlotState::Key_S)) forward_input -= 1.f * ellapsed_time;
float side_input = game->GetInputSlotValue(platform::InputSlotValue::ControllerThumbLeftX);
if (game->GetInputSlotState(platform::InputSlotState::Right) || game->GetInputSlotState(platform::InputSlotState::Key_D)) side_input += 1.f * ellapsed_time;
if (game->GetInputSlotState(platform::InputSlotState::Left) || game->GetInputSlotState(platform::InputSlotState::Key_A)) side_input -= 1.f * ellapsed_time;
float up_input = (game->GetInputSlotValue(platform::InputSlotValue::ControllerRightTrigger) - game->GetInputSlotValue(platform::InputSlotValue::ControllerLeftTrigger));
if (game->GetInputSlotState(platform::InputSlotState::PageUp) || game->GetInputSlotState(platform::InputSlotState::Key_Z)) up_input += 1.f * ellapsed_time;
if (game->GetInputSlotState(platform::InputSlotState::PageDown) || game->GetInputSlotState(platform::InputSlotState::Key_X)) up_input -= 1.f * ellapsed_time;
if (game->GetInputSlotState(platform::InputSlotState::RightMouseButton))
{
side_input += game->GetInputSlotValue(platform::InputSlotValue::MouseRelativePositionX) * m_mouse_move_factor;
forward_input += game->GetInputSlotValue(platform::InputSlotValue::MouseRelativePositionY) * m_mouse_move_factor;
}
glm::vec2 rotation_input;
rotation_input.x = game->GetInputSlotValue(platform::InputSlotValue::ControllerThumbRightX) * ellapsed_time;
rotation_input.y = game->GetInputSlotValue(platform::InputSlotValue::ControllerThumbRightY) * ellapsed_time;
if (game->GetInputSlotState(platform::InputSlotState::LeftMouseButton))
{
rotation_input.x += game->GetInputSlotValue(platform::InputSlotValue::MouseRelativePositionX) * m_mouse_rotate_factor;
rotation_input.y -= game->GetInputSlotValue(platform::InputSlotValue::MouseRelativePositionY) * m_mouse_rotate_factor;
}
for (auto& input_event : game->GetInputEvents())
{
if (input_event.type == platform::EventType::MouseWheel)
{
m_move_speed_factor += m_wheel_factor * input_event.value;
}
}
m_move_speed_factor = glm::clamp(m_move_speed_factor, 0.2f, 5.f);
glm::vec3 move_speed;
glm::mat3x3 rot = glm::rotate(m_rotation.y, glm::vec3(1.f, 0.f, 0.f)) * glm::rotate(m_rotation.x, glm::vec3(0.f, 0.f, 1.f));
glm::vec3 forward = glm::vec3(0.f, 1.f, 0.f) * rot;
glm::vec3 side = glm::vec3(1.f, 0.f, 0.f) * rot;
move_speed = -side_input * m_move_factor * side * m_move_speed_factor * m_move_speed_factor;
move_speed += forward_input * m_move_factor * forward * m_move_speed_factor * m_move_speed_factor;
move_speed.z += up_input * m_move_factor;
m_move_speed += move_speed;
glm::vec2 angles;
angles.x = -rotation_input.x * m_rotation_factor;
angles.y = -rotation_input.y * m_rotation_factor;
m_rotation_speed += angles;
}
m_position += m_move_speed * ellapsed_time;
m_rotation += m_rotation_speed * ellapsed_time;
if (m_rotation.y > glm::radians(85.f)) m_rotation.y = glm::radians(85.f);
if (m_rotation.y < glm::radians(-85.f)) m_rotation.y = glm::radians(-85.f);
damp
m_move_speed -= m_move_speed * glm::clamp((m_damp_factor * ellapsed_time), 0.f, 1.f);
m_rotation_speed -= m_rotation_speed * glm::clamp((m_damp_factor * ellapsed_time), 0.f, 1.f);
UpdateInternalData();
} |
#include<bits/stdc++.h>
using namespace std;
// long long int GCD(long long int a,long long int b)
// {
// if(b==0)
// return a;
// return GCD(b,a%b);
// }
long long int number(long long int n)
{ long long int t=n;
long long int sum=0;
while(n!=0)
{
sum=sum+(n%10);
n/=10;
}
return sum;
}
int main()
{long long int t;
cin>>t;
while(t--)
{long long int n;
cin>>n;
long long int m=n;
long long int sum=0;
while(1)
{
int x=__gcd(m,number(m));
if(x>1)
{
cout<<m<<endl;
break;
}
++m;
}
// long long int x=number(m);
// while(x==1)
// {
// m++;
// x=number(m);
// }
// if(t==0)
// cout<<m;
// else
// cout<<m<<endl;
}
}
|
package com.dburyak.sandbox.sandboxspringboot;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.AfterEach;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.data.mongo.AutoConfigureDataMongo;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.mongodb.core.ReactiveMongoOperations;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.DynamicPropertyRegistry;
import org.springframework.test.context.DynamicPropertySource;
import org.testcontainers.containers.MongoDBContainer;
import org.testcontainers.junit.jupiter.Container;
import org.testcontainers.junit.jupiter.Testcontainers;
import reactor.test.StepVerifier;
@SpringBootTest
@AutoConfigureDataMongo
@ActiveProfiles("integration-test")
@Testcontainers
@Slf4j
@DirtiesContext
public class MongoIntegrationTest {
public static final String MONGO_VERSION;
public static final String REDIS_VERSION = "6.0.1";
static {
// TODO: read version here from application-integration-test.yml config
MONGO_VERSION = "4.4.4";
}
@Autowired
protected ReactiveMongoOperations mongo;
@Container
protected static final MongoDBContainer MONGO_CONTAINER = new MongoDBContainer("mongo:" + MONGO_VERSION);
@DynamicPropertySource
protected static void mongoProperties(DynamicPropertyRegistry reg) {
reg.add("spring.data.mongodb.uri", () -> {
log.debug("using mongo config: uri={}", MONGO_CONTAINER.getReplicaSetUrl());
return MONGO_CONTAINER.getReplicaSetUrl();
});
}
@AfterEach
protected void cleanupAllDataInDb() {
log.debug("removing all mongo data");
StepVerifier
.create(mongo.getCollectionNames()
.flatMap(col -> mongo.remove(new Query(), col))
.collectList()
)
.expectNextCount(1L)
.verifyComplete();
}
}
|
/**
* readObject is called to restore the state of the URL from the
* stream. It reads the components of the URL and finds the local
* stream handler.
*/
private synchronized void readObject(java.io.ObjectInputStream s)
throws IOException, ClassNotFoundException
{
s.defaultReadObject();
if ((handler = getURLStreamHandler(protocol)) == null) {
throw new IOException("unknown protocol: " + protocol);
}
if (authority == null &&
((host != null && host.length() > 0) || port != -1)) {
if (host == null)
host = "";
authority = (port == -1) ? host : host + ":" + port;
int at = host.lastIndexOf('@');
if (at != -1) {
userInfo = host.substring(0, at);
host = host.substring(at+1);
}
} else if (authority != null) {
int ind = authority.indexOf('@');
if (ind != -1)
userInfo = authority.substring(0, ind);
}
path = null;
query = null;
if (file != null) {
int q = file.lastIndexOf('?');
if (q != -1) {
query = file.substring(q+1);
path = file.substring(0, q);
} else
path = file;
}
hashCode = -1;
} |
package stats
import (
"fmt"
"net/http"
"time"
"github.com/mholt/caddy/middleware"
"github.com/rcrowley/go-metrics"
)
func (m *metricsModule) ServeHTTP(w http.ResponseWriter, r *http.Request) (int, error) {
next := m.next
path := ""
if m.uiPath != "" && middleware.Path(r.URL.Path).Matches(m.uiPath) {
next = statsHandler{}
path = "getStats"
}
if path == "" {
path = m.pathName(r.URL.Path, r.Method)
}
//every datapoint gets tagged with server and path. A few get some extra.
tags := func(extra ...string) map[string]string {
m := map[string]string{"path": path, "server": m.serverName}
if len(extra)%2 == 0 {
for i := 1; i < len(extra); i += 2 {
m[extra[i-1]] = extra[i]
}
}
return m
}
start := time.Now()
code, err := next.ServeHTTP(w, r)
duration := time.Now().Sub(start)
mname := MetricName("caddy.requests", tags("status", fmt.Sprint(code)))
counter := metrics.GetOrRegisterCounter(mname, metrics.DefaultRegistry)
counter.Inc(1)
mname = MetricName("caddy.errors", tags())
counter = metrics.GetOrRegisterCounter(mname, metrics.DefaultRegistry)
if err != nil {
counter.Inc(1)
}
mname = MetricName("caddy.response_time", tags())
timer := metrics.GetOrRegisterTimer(mname, metrics.DefaultRegistry)
timer.Update(duration)
return code, err
}
type statsHandler struct{}
func (s statsHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) (int, error) {
snapshotLock.RLock()
defer snapshotLock.RUnlock()
w.Write(currentJSON)
w.Header().Set("Content-Type", "application/json")
return 200, nil
}
func (m *metricsModule) pathName(url string, method string) string {
for _, pth := range m.paths {
if middleware.Path(url).Matches(pth.path) {
if pth.methods == nil {
return pth.name
}
for _, m := range pth.methods {
if m == method {
return pth.name
}
}
}
}
return "/"
}
|
/**
* High-performance stream JSON parser. Provides the decoding algorithm to interpret the Errai Wire Protcol,
* including serializable types. This parser always assumes the outer payload is a Map. So it probably shouldn't
* be used as a general parser.
*
* @author Mike Brock
* @since 1.1
*/
public class JSONStreamDecoder {
private final CharBuffer buffer;
private final BufferedReader reader;
private char carry;
private int read;
private boolean initial = true;
/**
* Decodes the JSON payload by reading from the given stream of UTF-8 encoded
* characters. Reads to the end of the input stream unless there are errors,
* in which case the current position in the stream will not be at EOF, but
* may possibly be beyond the character that caused the error.
*
* @param inStream
* The input stream to read from. It must contain character data
* encoded as UTF-8, and it must be positioned to read from the start
* of the JSON message to be parsed.
*/
public JSONStreamDecoder(final InputStream inStream) {
this.buffer = CharBuffer.allocate(25);
try {
this.reader = new BufferedReader(
new InputStreamReader(inStream, "UTF-8")
);
}
catch (UnsupportedEncodingException e) {
throw new Error("UTF-8 is not supported by this JVM?", e);
}
}
public static EJValue decode(final InputStream instream) throws IOException {
return new JSONStreamDecoder(instream).parse();
}
public char read() throws IOException {
if (carry != 0) {
final char oldCarry = carry;
carry = 0;
return oldCarry;
}
if (read <= 0) {
if (!initial) buffer.rewind();
initial = false;
if ((read = reader.read(buffer)) <= 0) {
return 0;
}
buffer.rewind();
}
read--;
return buffer.get();
}
public EJValue parse() {
try {
return new ErraiJSONValue(_parse(new OuterContext()));
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
private Object _parse(Context ctx) throws IOException {
char c;
StringBuilder appender;
while ((c = read()) != 0) {
switch (c) {
case '[':
ctx.addValue(_parse(new ArrayContext(new ArrayList<Object>())));
break;
case '{':
ctx.addValue(_parse(new ObjectContext(new LinkedHashMap<Object, Object>())));
break;
case ']':
case '}':
return ctx.record();
case ',':
ctx.record();
break;
case '"':
case '\'':
char term = c;
appender = new StringBuilder(100);
StrCapture:
while ((c = read()) != 0) {
switch (c) {
case '\\':
appender.append(handleEscapeSequence());
break;
case '"':
case '\'':
if (c == term) {
ctx.addValue(appender.toString());
term = 0;
break StrCapture;
}
default:
appender.append(c);
}
}
if (term != 0) {
throw new RuntimeException("unterminated string literal");
}
break;
case ':':
continue;
default:
if (isNumberStart(c)) {
carry = c;
ctx.addValue(parseDouble());
break;
}
else if (Character.isJavaIdentifierPart(c)) {
appender = new StringBuilder(100).append(c);
while (((c = read()) != 0) && Character.isJavaIdentifierPart(c)) {
appender.append(c);
}
String s = appender.toString();
if (s.length() > 5) ctx.addValue(s);
else if ("null".equals(s)) {
ctx.addValue(null);
}
else if ("true".equals(s)) {
ctx.addValue(Boolean.TRUE);
}
else if ("false".equals(s)) {
ctx.addValue(Boolean.FALSE);
}
else {
ctx.addValue(s);
}
if (c != 0) carry = c;
}
}
}
return ctx.record();
}
private char handleEscapeSequence() throws IOException {
char c;
switch (c = read()) {
case '\\':
return '\\';
case '/':
return '/';
case 'b':
return '\b';
case 'f':
return '\f';
case 't':
return '\t';
case 'r':
return '\r';
case 'n':
return '\n';
case '\'':
return '\'';
case '"':
return '\"';
case 'u':
//handle unicode
char[] unicodeSeq = new char[4];
int i = 0;
for (; i < 4 && isValidHexPart(c = read()); i++) {
unicodeSeq[i] = c;
}
if (i != 4) {
throw new RuntimeException("illegal unicode escape sequence: expected 4 hex characters after \\u");
}
return (char) Integer.decode("0x" + new String(unicodeSeq)).intValue();
default:
throw new RuntimeException("illegal escape sequence: " + c);
}
}
/** The states the double recognizer can go through while attempting to parse a JSON numeric value. */
private static enum State { READ_SIGN, READ_INT, READ_FRAC, READ_EXP_SIGN, READ_EXP };
/**
* Parses a JSON numeric literal <b>with the side effect of consuming
* characters from the input</b> up until a character is encountered that
* cannot be used to form a JSON number. JSON numbers have the following
* grammar:
*
* <dl>
* <dt><i>number</i>
* <dd><i>int</i>
* <dd><i>int frac</i>
* <dd><i>int exp</i>
* <dd><i>int frac exp</i>
*
* <dt><i>int</i>
* <dd><i>digit</i>
* <dd><i>digit1-9</i> <i>digits</i>
* <dd><b>'-'</b> <i>digit</i>
* <dd><b>'-'</b> <i>digit1-9</i> <i>digits</i>
*
* <dt><i>frac</i>
* <dd><b>'.'</b> <i>digits</i>
*
* <dt><i>exp</i>
* <dd><i>e digits</i>
*
* <dt><i>digits</i>
* <dd><i>digit</i>
* <dd><i>digit digits</i>
*
* <dt><i>digit1-9</i>
* <dd><b>'1'</b> | <b>'2'</b> | <b>'3'</b> | <b>'4'</b> | <b>'5'</b> |
* <b>'6'</b> | <b>'7'</b> | <b>'8'</b> | <b>'9'</b>
*
* <dt><i>digit</i>
* <dd><b>'0'</b> | <b>'1'</b> | <b>'2'</b> | <b>'3'</b> | <b>'4'</b> |
* <b>'5'</b> | <b>'6'</b> | <b>'7'</b> | <b>'8'</b> | <b>'9'</b>
*
* <dt><i>e</i>
* <dd><b>'e'</b> | <b>'e+'</b> | <b>'e-'</b> | <b>'E'</b> | <b>'E+'</b> |
* <b>'E-'</b>
* </dl>
*
* @return The number that was parsed from the input stream.
* <p><i>Note on side effects:</i>after this method returns, the next
* @throws IOException
*/
private double parseDouble() throws IOException {
final StringBuilder sb = new StringBuilder(25);
State state = State.READ_SIGN;
char c;
recognize:
while ((c = read()) != 0) {
switch (state) {
case READ_SIGN:
if (c == '-' || ('0' <= c && c <= '9')) {
sb.append(c);
state = State.READ_INT;
}
else {
throw new NumberFormatException("Found '" + c + "' but expected '-' or a digit 1-9");
}
break;
case READ_INT:
if ('0' <= c && c <= '9') {
sb.append(c);
}
else if (c == '.') {
sb.append(c);
state = State.READ_FRAC;
}
else if (c == 'E' || c == 'e') {
sb.append(c);
state = State.READ_EXP_SIGN;
}
else {
// found the end of the numeric literal
carry = c;
break recognize;
}
break;
case READ_FRAC:
if ('0' <= c && c <= '9') {
sb.append(c);
}
else if (c == 'E' || c == 'e') {
sb.append(c);
state = State.READ_EXP_SIGN;
}
else {
// found the end of the numeric literal
carry = c;
break recognize;
}
break;
case READ_EXP_SIGN:
if (c == '-' || c == '+' || ('0' <= c && c <= '9')) {
sb.append(c);
state = State.READ_EXP;
}
else {
throw new NumberFormatException("The numeric literal \"" + sb + "\" is malformed (can't end with e or E)");
}
break;
case READ_EXP:
if ('0' <= c && c <= '9') {
sb.append(c);
}
else {
// found the end of the numeric literal
carry = c;
break recognize;
}
break;
}
}
return Double.parseDouble(sb.toString());
}
/**
* Returns true if c could be the start of a JSON number. Note that a return
* value of true does not indicate that the value will be a valid number. JSON
* numbers are not permitted to begin with a '0' or a '.', so in those cases
* {@link #parseDouble()} will throw an error even though this method returned
* true. This is an acceptable outcome, though, because there is nothing else
* the errant character could represent in the JSON stream.
*
* @param c
* the character to test
* @return true if c is a numeric digit, '-', or '.'.
*/
private static boolean isNumberStart(char c) {
switch (c) {
case '.':
case '-':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
return true;
default:
return false;
}
}
private static boolean isValidHexPart(char c) {
switch (c) {
case '.':
case '-':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case 'A':
case 'B':
case 'C':
case 'D':
case 'E':
case 'F':
return true;
default:
return false;
}
}
private static abstract class Context<T> {
abstract T record();
abstract void addValue(Object val);
}
private static class OuterContext extends Context<Object> {
private Context _wrapped;
private Object col;
@Override
Object record() {
return col;
}
@SuppressWarnings("unchecked")
@Override
void addValue(Object val) {
if (_wrapped == null) {
if (val instanceof List) {
_wrapped = new ArrayContext((List<Object>) (col = val));
}
else if (val instanceof Map) {
_wrapped = new ObjectContext((Map<Object, Object>) (col = val));
}
else if (val instanceof String) {
_wrapped = new StringContext((String) (col = val));
}
else {
throw new RuntimeException("expected list or map but found: " + (val == null ? null : val.getClass().getName()));
}
}
else {
_wrapped.addValue(val);
}
}
}
private static class ArrayContext extends Context<List> {
List<Object> collection;
private ArrayContext(List<Object> collection) {
this.collection = collection;
}
@Override
void addValue(Object val) {
collection.add(val);
}
@Override
public List record() {
return collection;
}
}
private static class StringContext extends Context<String> {
String value;
private StringContext(String value) {
this.value = value;
}
@Override
void addValue(Object val) {}
@Override
public String record() {
return value;
}
}
private static class ObjectContext extends Context<Map> {
protected Object lhs;
protected Object rhs;
Map<Object, Object> collection;
private ObjectContext(Map<Object, Object> collection) {
this.collection = collection;
}
@Override
void addValue(Object val) {
if (lhs == null) {
lhs = val;
}
else {
rhs = val;
}
}
@Override
Map record() {
if (lhs != null) {
collection.put(lhs, rhs);
}
lhs = rhs = null;
return collection;
}
}
} |
def partial_velocity(vel_list, u_list, frame):
if not hasattr(vel_list, '__iter__'):
raise TypeError('Provide velocities in an iterable')
if not hasattr(u_list, '__iter__'):
raise TypeError('Provide speeds in an iterable')
list_of_pvlists = []
for i in vel_list:
pvlist = []
for j in u_list:
vel = i.diff(j, frame)
pvlist += [vel]
list_of_pvlists += [pvlist]
return list_of_pvlists |
<filename>src/shop/order/order.controller.ts
import {
Controller,
Body,
Post,
Query,
Get,
NotFoundException,
Param,
Delete,
UseGuards,
Patch,
HttpException,
} from '@nestjs/common';
import {InjectModel} from '@nestjs/mongoose';
import {Product} from '@app/schema/product.schema';
import {Model, Types} from 'mongoose';
import {Order} from '@app/schema/order.schema';
import {AuthGuard} from '@nestjs/passport';
import {
ChangeStatusDto,
createOrderDto,
CartItem,
editOrderDto,
getAllDto,
bulkDeleteDto,
OrderModel,
OrderStatus,
} from './order.types';
import {ApiService} from '@app/service/api/api.service';
@Controller('order')
export class OrderController {
constructor(
@InjectModel(Product.name) private productModel: Model<Product>,
@InjectModel(Order.name) private orderModel: OrderModel,
private api: ApiService
) {}
@Patch('change-order-status')
@UseGuards(AuthGuard('jwt'))
async changeStatus(@Body() req: ChangeStatusDto): Promise<string> {
for (const orderId of req.orders) {
await this.orderModel.findByIdAndUpdate(orderId, {
status: req.status,
});
}
return JSON.stringify(req.status);
}
@Patch('bulk-delete')
@UseGuards(AuthGuard('jwt'))
async bulkDelete(@Body() items: bulkDeleteDto): Promise<void> {
this.orderModel.deleteMany({_id: {$in: items.itemIds}}).exec();
}
@Post()
async create(@Body() req: createOrderDto): Promise<Types.ObjectId> {
const captchaResult = await this.api.verifyCaptcha(req.captcha);
if (!captchaResult.data.success) {
throw new HttpException(captchaResult.data['error-codes'], 400);
}
const newOrder = new this.orderModel();
newOrder.shippingAddress = req.shippingAddress;
newOrder.billingAddress = req.billingAddress;
newOrder.notes = req.notes;
newOrder.total = 0;
for (const [_id, quantity] of Object.entries(req.cart)) {
const product = await this.productModel.findById(_id);
if (!product) {
continue;
}
let price = 0;
price = product.onSale ? product.salePrice : product.price;
newOrder.total += price * quantity;
const newCartItem: CartItem = {} as CartItem;
newCartItem.product = {
name: product.name,
description: product.description,
onSale: product.onSale,
image: product.image,
price: price,
originalId: product._id,
};
newCartItem.quantity = quantity;
newOrder.cart.push(newCartItem);
}
const created = await newOrder.save();
return created._id;
}
@Patch(':id')
@UseGuards(AuthGuard('jwt'))
async edit(@Body() req: editOrderDto, @Param('id') id): Promise<Order> {
const order = await this.orderModel.findById(id).exec();
if (!order) {
throw new NotFoundException();
}
const newOrderData: Partial<Order> = {
shippingAddress: req.shippingAddress ? req.shippingAddress : order.shippingAddress,
billingAddress: req.billingAddress ? req.billingAddress : order.billingAddress,
cart: order.cart,
total: order.total,
status: req.status ? req.status : order.status,
notes: req.notes ? req.notes : order.notes,
};
if (req.cartUpdateRQ && Object.keys(req.cartUpdateRQ).length) {
newOrderData.cart = [];
newOrderData.total = 0;
for (const [_id, quantity] of Object.entries(req.cartUpdateRQ)) {
const product = await this.productModel.findById(_id);
if (!product) {
continue;
}
let price = 0;
price = product.onSale ? product.salePrice : product.price;
newOrderData.total += price * quantity;
const newCartItem: CartItem = {} as CartItem;
newCartItem.product = {
name: product.name,
description: product.description,
onSale: product.onSale,
image: product.image,
price: price,
originalId: product._id,
};
newCartItem.quantity = quantity;
newOrderData.cart.push(newCartItem);
}
}
// const updated = await order.update(newOrderData);
const updated = await this.orderModel.findByIdAndUpdate(id, newOrderData, {new: true});
return updated;
}
@Get('')
@UseGuards(AuthGuard('jwt'))
async getAll(@Query() query: getAllDto): Promise<any> {
let paginationConfig = {};
if (!query.page) {
paginationConfig = {
limit: 10,
page: 1,
};
} else {
paginationConfig = {
limit: query.limit,
page: query.page,
};
}
let filter: any = {};
let searchStr;
if (query.search) {
searchStr = `${query.search}`;
filter = {
$or: [
{
shippingFullName: new RegExp(searchStr, 'i'),
},
{
billingFullName: new RegExp(searchStr, 'i'),
},
{
'shippingAddress.email': new RegExp(searchStr, 'i'),
},
{
'billingAddress.email': new RegExp(searchStr, 'i'),
},
],
};
}
const sortType = query.sortType ? query.sortType : 'createdAt';
const sortOrder = query.sortOrder ? query.sortOrder : 'desc';
const aggregation: any = [
{
$addFields: {
shippingFullName: {$concat: ['$shippingAddress.first_name', ' ', '$shippingAddress.last_name']},
billingFullName: {$concat: ['$billingAddress.first_name', ' ', '$billingAddress.last_name']},
},
},
{$match: filter},
{$sort: {[sortType]: sortOrder === 'asc' ? 1 : -1}},
{
$project: {
shippingFullName: false,
billingFullName: false,
},
},
];
const agg = this.orderModel.aggregate(aggregation);
const paginateResult = await this.orderModel.aggregatePaginate(agg, paginationConfig);
const {totalDocs, limit, page, totalPages} = paginateResult;
return {
items: paginateResult.docs,
total: totalDocs,
limit,
page,
pages: totalPages,
};
}
@Get(':id')
@UseGuards(AuthGuard('jwt'))
async getOne(@Param('id') id: string): Promise<Order> {
const order = await this.orderModel
.findById(id)
.select('-cart._id -__v')
.exec();
if (!order) {
throw new NotFoundException();
}
return order.toObject();
}
@Delete(':id')
@UseGuards(AuthGuard('jwt'))
async delete(@Param('id') id: string): Promise<void> {
const result = await this.orderModel.findByIdAndDelete(id).exec();
if (!result) {
throw new NotFoundException();
}
}
}
|
The Saudi TV channel Al Arabiya has come under fire for publishing a fake photo online. On Monday, it posted the following photo to its official Twitter account. The description, in Arabic, reads: “Al Arabiya exclusive: Saudi Arabia’s Operation Decisive Storm destroys a Houthi military convoy in Saada [Yemen].”There’s just one problem: that photo is an iconic photo from the first Gulf War. According to information provided by the US Department of Defense, which put the image in the public domain , it was taken by a military photographer on March 4, 1991 during Operation Desert Storm. The photo shows Iraqi military vehicles that were destroyed by coalition forces.Readers quickly recognized the photo and started deriding the channel on Twitter. The post was deleted on Monday, but it remained online in cached form.This is not the first time Al Arabiya has illustrated news items with bogus photos. Last December, it illustrated an article about Iraqi women who were allegedly murdered for refusing to have sex with jihadists with a photo of mass grave in Libya taken a year earlier. |
// Measure performance impact of enabling accounts.
static void bufferAccountUse(benchmark::State& state) {
const std::string data(state.range(0), 'a');
uint64_t iters = state.range(1);
const bool enable_accounts = (state.range(2) != 0);
auto config = envoy::config::overload::v3::BufferFactoryConfig();
config.set_minimum_account_to_track_power_of_two(2);
Buffer::WatermarkBufferFactory buffer_factory(config);
FakeStreamResetHandler reset_handler;
auto account = buffer_factory.createAccount(reset_handler);
RELEASE_ASSERT(account != nullptr, "");
for (auto _ : state) {
UNREFERENCED_PARAMETER(_);
Buffer::OwnedImpl buffer;
if (enable_accounts) {
buffer.bindAccount(account);
}
for (uint64_t idx = 0; idx < iters; ++idx) {
buffer.add(data);
}
}
account->clearDownstream();
} |
/** \brief DatasetFactory provides a way to inspect/discover a Dataset's
* expected schema before materializing the Dataset and underlying Sources. */
@Namespace("arrow::dataset") @NoOffset @Properties(inherit = org.bytedeco.arrow.presets.arrow_dataset.class)
public class DatasetFactory extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public DatasetFactory(Pointer p) { super(p); }
public static native @ByVal DatasetFactoryResult Make(
@ByVal SourceFactoryVector factories);
/** \brief Return the list of SourceFactory */
public native @Const @ByRef SourceFactoryVector factories();
/** \brief Get the schemas of the Sources. */
public native @ByVal SchemaVectorResult InspectSchemas();
/** \brief Get unified schema for the resulting Dataset. */
public native @ByVal SchemaResult Inspect();
/** \brief Create a Dataset with the given schema. */
public native @ByVal DatasetResult Finish(@Const @SharedPtr @ByRef Schema schema);
/** \brief Create a Dataset using the inspected schema. */
public native @ByVal DatasetResult Finish();
} |
/**
* Implementation for the "objc_proto_library" rule.
*/
public class ObjcProtoLibrary implements RuleConfiguredTargetFactory {
@Override
public ConfiguredTarget create(final RuleContext ruleContext)
throws InterruptedException, RuleErrorException, ActionConflictException {
new ProtoAttributes(ruleContext).validate();
return createProtobufTarget(ruleContext);
}
private ConfiguredTarget createProtobufTarget(RuleContext ruleContext)
throws InterruptedException, RuleErrorException, ActionConflictException {
NestedSetBuilder<Artifact> filesToBuild = NestedSetBuilder.stableOrder();
Iterable<ProtoInfo> protoInfos =
ruleContext.getPrerequisites("deps", Mode.TARGET, ProtoInfo.PROVIDER);
Iterable<ObjcProtoProvider> objcProtoProviders =
ruleContext.getPrerequisites("deps", Mode.TARGET, ObjcProtoProvider.SKYLARK_CONSTRUCTOR);
ProtobufSupport protoSupport =
new ProtobufSupport(
ruleContext,
ruleContext.getConfiguration(),
protoInfos,
objcProtoProviders,
getPortableProtoFilters(ruleContext, objcProtoProviders, protoInfos))
.registerGenerationActions()
.addFilesToBuild(filesToBuild);
return ObjcRuleClasses.ruleConfiguredTarget(ruleContext, filesToBuild.build())
.addNativeDeclaredProvider(protoSupport.getObjcProvider().get())
.build();
}
private static NestedSet<Artifact> getPortableProtoFilters(
RuleContext ruleContext,
Iterable<ObjcProtoProvider> objcProtoProviders,
Iterable<ProtoInfo> protoInfos) {
ProtoAttributes attributes = new ProtoAttributes(ruleContext);
NestedSetBuilder<Artifact> portableProtoFilters = NestedSetBuilder.stableOrder();
portableProtoFilters.addTransitive(
ProtobufSupport.getTransitivePortableProtoFilters(objcProtoProviders));
// If this target specifies filters, use those. If not, generate a filter only if there are
// direct proto_library targets, and generate a filter only for those files.
if (attributes.hasPortableProtoFilters()) {
portableProtoFilters.addAll(attributes.getPortableProtoFilters());
} else if (!Iterables.isEmpty(protoInfos)) {
Artifact generatedFilter = ProtobufSupport.getGeneratedPortableFilter(ruleContext,
ruleContext.getConfiguration());
ProtobufSupport.registerPortableFilterGenerationAction(
ruleContext, generatedFilter, protoInfos);
portableProtoFilters.add(generatedFilter);
}
return portableProtoFilters.build();
}
} |
#include "mandel.hpp"
extern "C" {
int mandel(float r, float i);
}
|
package cieloecommerce.sdk.ecommerce.request;
import java.io.IOException;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.StringEntity;
import com.google.gson.GsonBuilder;
import cieloecommerce.sdk.Environment;
import cieloecommerce.sdk.Merchant;
import cieloecommerce.sdk.ecommerce.CardToken;
public class CreateCartTokenRequest extends AbstractSaleRequest<CardToken, CardToken> {
public CreateCartTokenRequest(Merchant merchant, Environment environment) {
super(merchant, environment);
}
@Override
public CardToken execute(CardToken param) throws IOException, CieloRequestException {
String url = environment.getApiUrl() + "1/card/";
HttpPost request = new HttpPost(url);
request.setEntity(new StringEntity(new GsonBuilder().create().toJson(param)));
HttpResponse response = sendRequest(request);
return readResponse(response, CardToken.class);
}
} |
from helpers.time_utils import days
import json
import brownie
import pytest
from brownie import *
from helpers.constants import *
from helpers.registry import registry
from helpers.registry.artifacts import artifacts
from collections import namedtuple
from config.badger_config import badger_config, digg_config, sett_config
from scripts.systems.badger_system import connect_badger
from scripts.systems.constants import SettType
from helpers.token_utils import distribute_test_ether, distribute_from_whales
from rich.console import Console
from helpers.proxy_utils import deploy_proxy
from helpers.utils import approx
from decimal import Decimal
console = Console()
def main():
# Assign accounts
with open(digg_config.prod_json) as f:
badger_deploy = json.load(f)
deployer = accounts.at(badger_deploy["deployer"], force=True)
guardian = accounts.at(badger_deploy["guardian"], force=True)
keeper = accounts.at(badger_deploy["keeper"], force=True)
governance = accounts.at(badger_deploy["timelock"], force=True)
strategist = accounts[3]
user1 = accounts[4]
user2 = accounts[5]
user3 = accounts[6]
namedAccounts = {
"deployer": deployer,
"guardian": guardian,
"keeper": keeper,
"governance": governance,
"strategist": strategist,
"user1": user1,
"user2": user2,
"user3": user3,
}
# Setup Badger system
badger = connect_badger(badger_config.prod_json)
distribute_test_ether(deployer, Wei("20 ether"))
# Key of Sett to migrate (ONLY UNCOMMENT THE ONE TO TEST):
settKey = "native.renCrv"
# settKey = "native.sbtcCrv"
# settKey = "native.tbtcCrv"
# Connect to prod controller and vault
vault = badger.sett_system.vaults[settKey]
print("Vault for " + settKey + " fetched with address " + vault.address)
controller = interface.IController(vault.controller())
print("Controller for " + settKey + " fetched with address " + controller.address)
# Deploy and initialize the strategy
if settKey == "native.renCrv":
params = sett_config.native.convexRenCrv.params
want = sett_config.native.convexRenCrv.params.want
# Transfer assets to users
distribute_from_whales(user1, 1, "renCrv")
if settKey == "native.sbtcCrv":
params = sett_config.native.convexSbtcCrv.params
want = sett_config.native.convexSbtcCrv.params.want
# Transfer assets to users
distribute_from_whales(user1, 1, "sbtcCrv")
if settKey == "native.tbtcCrv":
params = sett_config.native.convexTbtcCrv.params
want = sett_config.native.convexTbtcCrv.params.want
# Transfer assets to users
distribute_from_whales(user1, 1, "tbtcCrv")
contract = StrategyConvexStakingOptimizer.deploy({"from": deployer})
strategy = deploy_proxy(
"StrategyConvexStakingOptimizer",
StrategyConvexStakingOptimizer.abi,
contract.address,
web3.toChecksumAddress(badger.devProxyAdmin.address),
contract.initialize.encode_input(
governance.address,
strategist.address,
controller.address,
keeper.address,
guardian.address,
[
params.want,
badger.badgerTree.address,
],
params.pid,
[
params.performanceFeeGovernance,
params.performanceFeeStrategist,
params.withdrawalFee,
],
),
deployer,
)
# Finish setup
return namedtuple("setup", "badger controller vault strategy namedAccounts")(
badger, controller, vault, strategy, namedAccounts
)
|
Some observations on reality testing as a clinical concept.
This paper asserts that reality testing is a complex ego activity which cannot be characterized globally as either intact or defective. In normals, neurotics, and "borderlines" it is actually a highly variable function. Some problems of nomenclature are addressed. Among many analysts there is an implicit tendency to concretize the means by which reality testing is performed. This may lead to certain conceptual problems and clinical inaccuracies. The relationship of reality testing to unconscious conflicts from all phases of development is emphasized and illustrated. Issues in the technical handling in analysis of manifestations of disturbed reality testing are discussed. |
"""Module grouping tests for the pydov.util.owsutil module."""
import copy
import re
import pytest
from numpy.compat import unicode
from owslib.etree import etree
from owslib.fes import (
PropertyIsEqualTo,
FilterRequest,
)
from owslib.iso import MD_Metadata
from owslib.util import nspath_eval
from pydov.util import owsutil
from pydov.util.errors import (
MetadataNotFoundError,
FeatureCatalogueNotFoundError,
)
from pydov.util.location import (
Within,
Box,
)
from tests.test_search_boring import (
md_metadata,
mp_remote_md,
mp_remote_describefeaturetype,
mp_remote_fc,
location_md_metadata,
location_fc_featurecatalogue,
location_wfs_describefeaturetype,
)
from tests.test_search import (
wfs,
mp_wfs,
mp_remote_fc_notfound
)
def clean_xml(xml):
"""Clean the given XML string of namespace definition, namespace
prefixes and syntactical but otherwise meaningless differences.
Parameters
----------
xml : str
String representation of XML document.
Returns
-------
str
String representation of cleaned XML document.
"""
# remove xmlns namespace definitions
r = re.sub(r'[ ]+xmlns:[^=]+="[^"]+"', '', xml)
# remove namespace prefixes in tags
r = re.sub(r'<(/?)[^:]+:([^ >]+)([ >])', r'<\1\2\3', r)
# remove extra spaces in tags
r = re.sub(r'[ ]+/>', '/>', r)
# remove extra spaces between tags
r = re.sub(r'>[ ]+<', '><', r)
return r
class TestOwsutil(object):
"""Class grouping tests for the pydov.util.owsutil module."""
def test_get_csw_base_url(self, wfs):
"""Test the owsutil.get_csw_base_url method.
Test whether the CSW base URL of the dov-pub:Boringen layer is correct.
Parameters
----------
wfs : pytest.fixture returning owslib.wfs.WebFeatureService
WebFeatureService based on the local GetCapabilities.
"""
contentmetadata = wfs.contents['dov-pub:Boringen']
assert owsutil.get_csw_base_url(contentmetadata) == \
'https://www.dov.vlaanderen.be/geonetwork/srv/nl/csw'
def test_get_csw_base_url_nometadataurls(self, wfs):
"""Test the owsutil.get_csw_base_url method for a layer without
metdata urls.
Test whether a MetadataNotFoundError is raised.
Parameters
----------
wfs : pytest.fixture returning owslib.wfs.WebFeatureService
WebFeatureService based on the local GetCapabilities.
"""
contents = copy.deepcopy(wfs.contents)
contentmetadata = contents['dov-pub:Boringen']
contentmetadata.metadataUrls = []
with pytest.raises(MetadataNotFoundError):
owsutil.get_csw_base_url(contentmetadata)
def test_get_featurecatalogue_uuid(self, md_metadata):
"""Test the owsutil.get_featurecatalogue_uuid method.
Test whether the featurecatalogue uuid of the dov-pub:Boringen layer
is correct.
Parameters
----------
md_metadata : pytest.fixture providing owslib.iso.MD_Metadata
Parsed metadata describing the Boringen WFS layer in more detail,
in the ISO 19115/19139 format.
"""
assert owsutil.get_featurecatalogue_uuid(md_metadata) == \
'c0cbd397-520f-4ee1-aca7-d70e271eeed6'
def test_get_featurecatalogue_uuid_nocontentinfo(self, md_metadata):
"""Test the owsutil.get_featurecatalogue_uuid method when the
metadata is missing the gmd:contentInfo element.
Test whether a FeatureCatalogueNotFoundError is raised.
Parameters
----------
md_metadata : pytest.fixture providing owslib.iso.MD_Metadata
Parsed metadata describing the Boringen WFS layer in more detail,
in the ISO 19115/19139 format.
"""
tree = etree.fromstring(md_metadata.xml)
root = tree.find('{http://www.isotc211.org/2005/gmd}MD_Metadata')
for ci in tree.findall(
'.//{http://www.isotc211.org/2005/gmd}contentInfo'):
root.remove(ci)
md_metadata.xml = etree.tostring(tree)
with pytest.raises(FeatureCatalogueNotFoundError):
owsutil.get_featurecatalogue_uuid(md_metadata)
def test_get_featurecatalogue_uuid_nouuidref(self, md_metadata):
"""Test the owsutil.get_featurecatalogue_uuid method when the
gmd:contentInfo element is missing a 'uuidref' attribute.
Test whether a FeatureCatalogueNotFoundError is raised.
Parameters
----------
md_metadata : pytest.fixture providing owslib.iso.MD_Metadata
Parsed metadata describing the Boringen WFS layer in more detail,
in the ISO 19115/19139 format.
"""
tree = etree.fromstring(md_metadata.xml)
for ci in tree.findall(nspath_eval(
'gmd:MD_Metadata/gmd:contentInfo/'
'gmd:MD_FeatureCatalogueDescription/'
'gmd:featureCatalogueCitation',
{'gmd': 'http://www.isotc211.org/2005/gmd'})):
ci.attrib.pop('uuidref')
md_metadata.xml = etree.tostring(tree)
with pytest.raises(FeatureCatalogueNotFoundError):
owsutil.get_featurecatalogue_uuid(md_metadata)
def test_get_namespace(self, wfs, mp_remote_describefeaturetype):
"""Test the owsutil.get_namespace method.
Test whether the namespace of the dov-pub:Boringen layer is correct.
Parameters
----------
wfs : pytest.fixture returning owslib.wfs.WebFeatureService
WebFeatureService based on the local GetCapabilities.
mp_remote_describefeaturetype : pytest.fixture
Monkeypatch the call to a remote DescribeFeatureType of the
dov-pub:Boringen layer.
"""
assert owsutil.get_namespace(wfs, 'dov-pub:Boringen') == \
'http://dov.vlaanderen.be/ocdov/dov-pub'
def test_get_remote_featurecatalogue(self, mp_remote_fc):
"""Test the owsutil.get_remote_featurecatalogue method.
Test whether the feature catalogue of the dov-pub:Boringen layer
matches the format described in the docs.
Parameters
----------
mp_remote_fc : pytest.fixture
Monkeypatch the call to get the remote feature catalogue of the
dov-pub:Boringen layer.
"""
fc = owsutil.get_remote_featurecatalogue(
'https://www.dov.vlaanderen.be/geonetwork/srv/nl/csw',
'c0cbd397-520f-4ee1-aca7-d70e271eeed6')
assert type(fc) is dict
assert 'definition' in fc
assert type(fc['definition']) in (str, unicode)
assert 'attributes' in fc
assert type(fc['attributes']) is dict
attrs = fc['attributes']
if len(attrs) > 0:
for attr in attrs.values():
assert type(attr) is dict
assert 'definition' in attr
assert type(attr['definition']) in (str, unicode)
assert 'values' in attr
assert type(attr['values']) is list
if len(attr['values']) > 0:
for v in attr['values']:
assert type(v) in (str, unicode)
assert len(attr['values']) == len(set(attr['values']))
assert 'multiplicity' in attr
mp = attr['multiplicity']
assert type(mp) is tuple
assert len(mp) == 2
assert mp[0] in (0, 1)
assert (type(mp[1]) is int and mp[1] > 0) or mp[1] == 'Inf'
def test_get_remote_featurecataloge_baduuid(self, mp_remote_fc_notfound):
"""Test the owsutil.get_remote_featurecatalogue method with an
inexistent feature catalogue uuid.
Test whether a FeatureCatalogueNotFoundError is raised.
Parameters
----------
mp_remote_fc_notfound : pytest.fixture
Monkeypatch the call to get an inexistent remote featurecatalogue.
"""
with pytest.raises(FeatureCatalogueNotFoundError):
owsutil.get_remote_featurecatalogue(
'https://www.dov.vlaanderen.be/geonetwork/srv/nl/csw',
'badfc000-0000-0000-0000-badfc00badfc')
def test_get_remote_metadata(self, md_metadata):
"""Test the owsutil.get_remote_metadata method.
Test whether the resulting MD_Metadata is correct.
Parameters
----------
md_metadata : pytest.fixture returning owslib.iso.MD_Metadata
Parsed metadata describing the Boringen WFS layer in more detail,
in the ISO 19115/19139 format.
"""
assert type(md_metadata) is MD_Metadata
def test_get_remote_metadata_nometadataurls(self, wfs):
"""Test the owsutil.get_remote_metadata method when the WFS layer
missed metadata URLs.
Test whether a MetadataNotFoundError is raised.
Parameters
----------
wfs : pytest.fixture returning owslib.wfs.WebFeatureService
WebFeatureService based on the local GetCapabilities.
"""
contents = copy.deepcopy(wfs.contents)
contentmetadata = contents['dov-pub:Boringen']
contentmetadata.metadataUrls = []
with pytest.raises(MetadataNotFoundError):
owsutil.get_remote_metadata(contentmetadata)
def test_wfs_build_getfeature_request_onlytypename(self):
"""Test the owsutil.wfs_build_getfeature_request method with only a
typename specified.
Test whether the XML of the WFS GetFeature call is generated correctly.
"""
xml = owsutil.wfs_build_getfeature_request('dov-pub:Boringen')
assert clean_xml(etree.tostring(xml).decode('utf8')) == clean_xml(
'<wfs:GetFeature xmlns:wfs="http://www.opengis.net/wfs" '
'xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" '
'service="WFS" version="1.1.0" '
'xsi:schemaLocation="http://www.opengis.net/wfs '
'http://schemas.opengis.net/wfs/1.1.0/wfs.xsd"><wfs:Query '
'typeName="dov-pub:Boringen"><ogc:Filter '
'xmlns:ogc="http://www.opengis.net/ogc"/></wfs:Query></wfs'
':GetFeature>')
def test_wfs_build_getfeature_request_bbox_nogeometrycolumn(self):
"""Test the owsutil.wfs_build_getfeature_request method with a location
argument but without the geometry_column argument.
Test whether an AttributeError is raised.
"""
with pytest.raises(AttributeError):
xml = owsutil.wfs_build_getfeature_request(
'dov-pub:Boringen',
location=Within(Box(151650, 214675, 151750, 214775)))
def test_wfs_build_getfeature_request_bbox(self):
"""Test the owsutil.wfs_build_getfeature_request method with a
typename, box and geometry_column.
Test whether the XML of the WFS GetFeature call is generated correctly.
"""
xml = owsutil.wfs_build_getfeature_request(
'dov-pub:Boringen',
location=Within(Box(151650, 214675, 151750, 214775)),
geometry_column='geom')
assert clean_xml(etree.tostring(xml).decode('utf8')) == clean_xml(
'<wfs:GetFeature xmlns:wfs="http://www.opengis.net/wfs" '
'xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" '
'service="WFS" version="1.1.0" '
'xsi:schemaLocation="http://www.opengis.net/wfs '
'http://schemas.opengis.net/wfs/1.1.0/wfs.xsd"><wfs:Query '
'typeName="dov-pub:Boringen"><ogc:Filter '
'xmlns:ogc="http://www.opengis.net/ogc"><ogc:Within> '
'<ogc:PropertyName>geom</ogc:PropertyName><gml:Envelope '
'xmlns:gml="http://www.opengis.net/gml" srsDimension="2" '
'srsName="http://www.opengis.net/gml/srs/epsg.xml#31370"><gml'
':lowerCorner>151650.000 '
'214675.000</gml:lowerCorner><gml:upperCorner>151750.000 '
'214775.000</gml:upperCorner></gml:Envelope></ogc:Within></ogc'
':Filter></wfs:Query></wfs:GetFeature>')
def test_wfs_build_getfeature_request_propertyname(self):
"""Test the owsutil.wfs_build_getfeature_request method with a list
of propertynames.
Test whether the XML of the WFS GetFeature call is generated correctly.
"""
xml = owsutil.wfs_build_getfeature_request(
'dov-pub:Boringen', propertyname=['fiche', 'diepte_tot_m'])
assert clean_xml(etree.tostring(xml).decode('utf8')) == clean_xml(
'<wfs:GetFeature xmlns:wfs="http://www.opengis.net/wfs" '
'xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" '
'service="WFS" version="1.1.0" '
'xsi:schemaLocation="http://www.opengis.net/wfs '
'http://schemas.opengis.net/wfs/1.1.0/wfs.xsd"> <wfs:Query '
'typeName="dov-pub:Boringen"> '
'<wfs:PropertyName>fiche</wfs:PropertyName> '
'<wfs:PropertyName>diepte_tot_m</wfs:PropertyName> <ogc:Filter/> '
'</wfs:Query> </wfs:GetFeature>')
def test_wfs_build_getfeature_request_filter(self):
"""Test the owsutil.wfs_build_getfeature_request method with an
attribute filter.
Test whether the XML of the WFS GetFeature call is generated correctly.
"""
query = PropertyIsEqualTo(propertyname='gemeente',
literal='Herstappe')
filter_request = FilterRequest()
filter_request = filter_request.setConstraint(query)
try:
filter_request = etree.tostring(filter_request,
encoding='unicode')
except LookupError:
# Python2.7 without lxml uses 'utf-8' instead.
filter_request = etree.tostring(filter_request,
encoding='utf-8')
xml = owsutil.wfs_build_getfeature_request(
'dov-pub:Boringen', filter=filter_request)
assert clean_xml(etree.tostring(xml).decode('utf8')) == clean_xml(
'<wfs:GetFeature xmlns:wfs="http://www.opengis.net/wfs" '
'xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" '
'service="WFS" version="1.1.0" '
'xsi:schemaLocation="http://www.opengis.net/wfs '
'http://schemas.opengis.net/wfs/1.1.0/wfs.xsd"> <wfs:Query '
'typeName="dov-pub:Boringen"> <ogc:Filter> '
'<ogc:PropertyIsEqualTo> '
'<ogc:PropertyName>gemeente</ogc:PropertyName> '
'<ogc:Literal>Herstappe</ogc:Literal> </ogc:PropertyIsEqualTo> '
'</ogc:Filter> </wfs:Query> </wfs:GetFeature>')
def test_wfs_build_getfeature_request_bbox_filter(self):
"""Test the owsutil.wfs_build_getfeature_request method with an
attribute filter, a box and a geometry_column.
Test whether the XML of the WFS GetFeature call is generated correctly.
"""
query = PropertyIsEqualTo(propertyname='gemeente',
literal='Herstappe')
filter_request = FilterRequest()
filter_request = filter_request.setConstraint(query)
try:
filter_request = etree.tostring(filter_request,
encoding='unicode')
except LookupError:
# Python2.7 without lxml uses 'utf-8' instead.
filter_request = etree.tostring(filter_request,
encoding='utf-8')
xml = owsutil.wfs_build_getfeature_request(
'dov-pub:Boringen', filter=filter_request,
location=Within(Box(151650, 214675, 151750, 214775)),
geometry_column='geom')
assert clean_xml(etree.tostring(xml).decode('utf8')) == clean_xml(
'<wfs:GetFeature xmlns:wfs="http://www.opengis.net/wfs" '
'xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" '
'service="WFS" version="1.1.0" '
'xsi:schemaLocation="http://www.opengis.net/wfs '
'http://schemas.opengis.net/wfs/1.1.0/wfs.xsd"> <wfs:Query '
'typeName="dov-pub:Boringen"> <ogc:Filter> <ogc:And> '
'<ogc:PropertyIsEqualTo> '
'<ogc:PropertyName>gemeente</ogc:PropertyName> '
'<ogc:Literal>Herstappe</ogc:Literal> </ogc:PropertyIsEqualTo> '
'<ogc:Within> <ogc:PropertyName>geom</ogc:PropertyName> '
'<gml:Envelope xmlns:gml="http://www.opengis.net/gml" '
'srsDimension="2" '
'srsName="http://www.opengis.net/gml/srs/epsg.xml#31370"> '
'<gml:lowerCorner>151650.000 214675.000</gml:lowerCorner> '
'<gml:upperCorner>151750.000 214775.000</gml:upperCorner> '
'</gml:Envelope> </ogc:Within> </ogc:And> </ogc:Filter> '
'</wfs:Query> </wfs:GetFeature>')
def test_wfs_build_getfeature_request_bbox_filter_propertyname(self):
"""Test the owsutil.wfs_build_getfeature_request method with an
attribute filter, a box, a geometry_column and a list of
propertynames.
Test whether the XML of the WFS GetFeature call is generated correctly.
"""
query = PropertyIsEqualTo(propertyname='gemeente',
literal='Herstappe')
filter_request = FilterRequest()
filter_request = filter_request.setConstraint(query)
try:
filter_request = etree.tostring(filter_request,
encoding='unicode')
except LookupError:
# Python2.7 without lxml uses 'utf-8' instead.
filter_request = etree.tostring(filter_request,
encoding='utf-8')
xml = owsutil.wfs_build_getfeature_request(
'dov-pub:Boringen', filter=filter_request,
location=Within(Box(151650, 214675, 151750, 214775)),
geometry_column='geom', propertyname=['fiche', 'diepte_tot_m'])
assert clean_xml(etree.tostring(xml).decode('utf8')) == clean_xml(
'<wfs:GetFeature xmlns:wfs="http://www.opengis.net/wfs" '
'xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" '
'service="WFS" version="1.1.0" '
'xsi:schemaLocation="http://www.opengis.net/wfs '
'http://schemas.opengis.net/wfs/1.1.0/wfs.xsd"> <wfs:Query '
'typeName="dov-pub:Boringen"> '
'<wfs:PropertyName>fiche</wfs:PropertyName> '
'<wfs:PropertyName>diepte_tot_m</wfs:PropertyName> <ogc:Filter> '
'<ogc:And> <ogc:PropertyIsEqualTo> '
'<ogc:PropertyName>gemeente</ogc:PropertyName> '
'<ogc:Literal>Herstappe</ogc:Literal> </ogc:PropertyIsEqualTo> '
'<ogc:Within> <ogc:PropertyName>geom</ogc:PropertyName> '
'<gml:Envelope xmlns:gml="http://www.opengis.net/gml" '
'srsDimension="2" '
'srsName="http://www.opengis.net/gml/srs/epsg.xml#31370"> '
'<gml:lowerCorner>151650.000 214675.000</gml:lowerCorner> '
'<gml:upperCorner>151750.000 214775.000</gml:upperCorner> '
'</gml:Envelope> </ogc:Within> </ogc:And> </ogc:Filter> '
'</wfs:Query> </wfs:GetFeature>')
|
/** @format */
import { GlobalConfig } from "../../config";
import { buildApiUrl, IQueryParams, NetworkClient } from "../../network";
import { IPagedEntity } from "../../models";
import { IAlert, IAlertSummary, ICreateAlertPayload, IUpdateAlertPayload } from "./models";
import { wrapWithErrorHandler } from "../../network/utils";
export * from "./enums";
export class Alerts {
baseUrl: string = "alerts";
networkClient: NetworkClient;
constructor(networkClient: NetworkClient) {
this.networkClient = networkClient;
}
/**
* Retrieve an alert matching the given identifier
* @param planIdentifier - Identifier of the target plan
* @param identifier - Id related to a specific alert
* @returns A single alert or null if not found;
*/
public async get(planIdentifier: string, identifier: string): Promise<IAlert | undefined> {
const urlSegments = [planIdentifier, this.baseUrl, identifier];
return await wrapWithErrorHandler(async () => {
const url = buildApiUrl(urlSegments);
const res = await this.networkClient.get<IAlert>(url);
return res;
});
}
/**
* Retrieve a list of alerts for a given plan.
* @param planIdentifier - Identifier of the target plan
* @param subscribedOnly - If true, only subscribed alerts will be returned
* @param page - The page number to return
* @param pageSize - The number of items per page. Max is 100
* @returns An array of {AlertSummary} objects
*/
async getAll(planIdentifier: string, subscribedOnly: boolean = false, page: number = 1, pageSize = 20): Promise<IPagedEntity<IAlertSummary> | undefined> {
const urlSegments = ["plans", planIdentifier, this.baseUrl];
var queryParams: IQueryParams = {
subscribedOnly,
page,
pageSize,
};
return await wrapWithErrorHandler(async () => {
const url = buildApiUrl(urlSegments);
return await this.networkClient.get<IPagedEntity<IAlertSummary>>(url, queryParams);
});
}
/**
* Create a new alert for the given plan
* @param planIdentifier - Identifier of the target plan
* @param alert - The alert to create
* @returns The created alert
*/
async create(planIdentifier: string, alert: ICreateAlertPayload): Promise<IAlert | undefined> {
let urlSegments = [planIdentifier, this.baseUrl];
var queryParams: IQueryParams = {
userIdentifier: GlobalConfig.userIdentifier,
};
return await wrapWithErrorHandler(async () => {
const url = buildApiUrl(urlSegments);
const res = await this.networkClient.post<IAlert>(url, alert, queryParams);
return res;
});
}
/**
* Create a new alert for the given plan
* @param planIdentifier - Identifier of the target plan
* @param alertIdentifier - Identifier of the alert to update
* @param alert - The new version of the alert
* @returns The updated alert
*/
async update(planIdentifier: string, alertIdentifier: string, alert: IUpdateAlertPayload): Promise<IAlert | undefined> {
let urlSegments = [planIdentifier, this.baseUrl, alertIdentifier];
var queryParams: IQueryParams = {
userIdentifier: GlobalConfig.userIdentifier,
};
return await wrapWithErrorHandler(async () => {
const url = buildApiUrl(urlSegments);
const res = await this.networkClient.put<IAlert>(url, alert, queryParams);
return res;
});
}
/**
* Subscribe the current user to a given alert
* @param planIdentifier - Identifier of the target plan
* @param identifier - Id related to a specific alert
* @returns {boolean} - True if subscribing was successful
*/
async subscribe(planIdentifier: string, identifier: string): Promise<void> {
let urlSegments = [planIdentifier, this.baseUrl, identifier, "subscribe"];
var queryParams: IQueryParams = {
userIdentifier: GlobalConfig.userIdentifier,
};
await wrapWithErrorHandler(async () => {
const url = buildApiUrl(urlSegments);
await this.networkClient.put<boolean>(url, undefined, queryParams);
});
}
/**
* Unsubscribe the current user to a given alert
* @param planIdentifier - Identifier of the target plan
* @param identifier - Id related to a specific alert
* @returns {boolean} - True if unsubscribing was successful
*/
async unsubscribe(planIdentifier: string, identifier: string): Promise<void> {
let urlSegments = [planIdentifier, this.baseUrl, identifier, "unsubscribe"];
var queryParams: IQueryParams = {
userIdentifier: GlobalConfig.userIdentifier,
};
await wrapWithErrorHandler(async () => {
const url = buildApiUrl(urlSegments);
await this.networkClient.put(url, undefined, queryParams);
});
}
/**
* Delete an alert matching the given identifier for the given plan.
* @param planIdentifier - Identifier of the target plan
* @param identifier - Id related to a specific alert
*/
async delete(planIdentifier: string, identifier: string): Promise<void> {
let urlSegments = [planIdentifier, this.baseUrl, identifier];
var queryParams: IQueryParams = {
userIdentifier: GlobalConfig.userIdentifier,
};
await wrapWithErrorHandler(async () => {
const url = buildApiUrl(urlSegments);
await this.networkClient.deleteFromApi(url, undefined, queryParams);
});
}
}
|
<filename>project/src/main/java/project/parsing/knx/steps/DeleteTempFolderParsingStep.java
package project.parsing.knx.steps;
import java.io.IOException;
import org.apache.commons.io.FileUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import project.parsing.knx.KNXProjectParsingContext;
import project.parsing.steps.ParsingStep;
public class DeleteTempFolderParsingStep implements ParsingStep<KNXProjectParsingContext> {
private static final Logger LOG = LogManager.getLogger(DeleteTempFolderParsingStep.class);
@Override
public void process(final KNXProjectParsingContext context) throws IOException {
if (context.getTempDirectory() == null) {
return;
}
LOG.info("Deleting path = " + context.getTempDirectory());
FileUtils.deleteDirectory(context.getTempDirectory().toFile());
}
}
|
Multi-functional metasurface architecture for amplitude, polarization and wavefront control
Metasurfaces (MSs) have been utilized to manipulate different properties of electromagnetic waves. By combining local control over the wave amplitude, phase, and polarization into a single tunable structure, a multi-functional and reconfigurable metasurface can be realized, capable of full control over incident radiation. Here, we experimentally validate a multi-functional metasurface architecture for the microwave regime, where in principle variable loads are connected behind the backplane to reconfigurably shape the complex surface impedance. As a proof-of-concept step, we fabricate several metasurface instances with static loads in different configurations (surface mount capacitors and resistors of different values in different connection topologies) to validate the approach and showcase the different achievable functionalities. Specifically, we show perfect absorption for oblique incidence (both polarizations), broadband linear polarization conversion, and beam splitting, demonstrating control over the amplitude, polarization state, and wavefront, respectively. Measurements are performed in the 4-18 GHz range inside an anechoic chamber and show good agreement with theoretically-anticipated results. Our results clearly demonstrate the practical potential of the proposed architecture for reconfigurable electromagnetic wave manipulation.
Metasurfaces (MSs) have been utilized to manipulate different properties of electromagnetic waves. By combining local control over the wave amplitude, phase, and polarization into a single tunable structure, a multifunctional and reconfigurable metasurface can be realized, capable of full control over incident radiation. Here, we experimentally validate a multi-functional metasurface architecture for the microwave regime, where in principle variable loads are connected behind the backplane to reconfigurably shape the complex surface impedance. As a proof-of-concept step, we fabricate several metasurface instances with static loads in different configurations (surface mount capacitors and resistors of different values in different connection topologies) to validate the approach and showcase the different achievable functionalities. Specifically, we show perfect absorption for oblique incidence (both polarizations), broadband linear polarization conversion, and beam splitting, demonstrating control over the amplitude, polarization state, and wavefront, respectively. Measurements are performed in the 4-18 GHz range inside an anechoic chamber and show good agreement with theoretically-anticipated results. Our results clearly demonstrate the practical potential of the proposed architecture for reconfigurable electromagnetic wave manipulation.
I. INTRODUCTION
Metasurfaces, the single-layer version of metamaterials, have attracted considerable interest in recent years . They are capable of manipulating the amplitude, phase, and polarization of the incident electromagnetic wave by appropriately engineering the subwavelength, resonant building blocks (termed meta-atoms). Maximum functionality can be obtained by combining into a single metasurface the ability to locally control all three aforementioned characteristics . In physical terms, this amounts to locally manipulating the anisotropic, complex surface impedance (real and imaginary parts of the tensor elements) . This approach can lead to multi-functional and reconfigurable metasurfaces, which allow for maximum operation versatility . Currently, both static and reconfigurable versions of metasurfaces are being actively researched for a broad range of applications in the microwave and mmWave (5G) frequencies, including absorbers , isolators , filters , switchable screens , enhanced antennas , and wavefront shaping devices, which have been both theoretically studied and experimentally demonstrated .
In Ref. 4 we conceptualized a metasurface unit cell with locally-and continuously-tunable complex surface impedance, for multiple reconfigurable functions. Further developing this concept, in Ref. 15 we proposed a practical and scalable multi-functional metasurface architecture for the mi-crowave regime, where electronic integrated circuits are assembled behind the metasurface backplane in order to dynamically engineer the metasurface properties. The integrated circuits supply tunable resistance and capacitance loads to each meta-atom , allowing to locally shape the complex surface impedance. By co-simulating electronic chip and electromagnetic responses, we have theoretically showcased the potential of the proposed structure.
In this work, we report the essential step of experimentally validating the multi-functional metasurface architecture. To this end, we fabricate several instances of the metasurface with static loads mounted behind the backplane in place of the integrated circuits. The fixed surface-mounted devices (SMD), capacitors and resistors, are placed in different connection topologies for each metasurface instance. This allows to showcase different functionalities of (i) independent perfect absorption for the two linear polarizations under oblique incidence, (ii) polarization control illustrated by broadband linear polarization conversion, and (iii) wavefront manipulation illustrated by beam splitting. The replacement of the tunable integrated chips with fixed loads serves to experimentally validate the metasurface architecture prior to the costly assembling of the actual chips. Note that the chosen values of resistance and capacitance are within the capabilities of the custom chip implementation , so that the demonstrated functionalities are readily achievable with the reconfigurable version of the proposed multi-functional metasurface.
The remainder of the paper is organized as follows. In Sect. II we present the metasurface architecture and discuss fabrication and measurement details. Simulation and experimental results are presented in Sect. III for all three showcased functionalities. Finally, the conclusions appear in Sect. IV.
II. MULTI-FUNCTIONAL METASURFACE ARCHITECTURE
The multi-functional metasurface under study is a threemetallization-layer printed circuit board (PCB) structure based on a high-frequency Panasonic Megtron7N dielectric substrate (ε r = 3.35, tan δ = 0.002); the geometry is described in Fig. 1. The top metallization layer contains a 2 × 2 array of square copper patches (w × w = 3.95 mm × 3.95 mm) in a symmetric configuration ; the periodicity is square with lattice constant a = 9 mm (∼ λ 0 /7 at the frequency of 5 GHz). The second (middle) metallization layer is the metasurface backplane so that the structure operates in reflection (negligible transmission). Finally, the third (bottom) metallization layer accommodates the SMD components . This way, the loads are "hidden" beneath the backplane and do not interfere with the incident electromagnetic waves nor obstruct the aperture; connection between patches and loads is accomplished by means of through vias (TVs). The fabricated The four-patch unit cell geometry along with the possi-bilities for horizontal, vertical, and diagonal connections behind the backplane has been judiciously chosen so as to allow for electrically bestowing anisotropic properties in the surface impedance. Specifically, it provides the ability to address the x-and y-linear polarization independently (e.g. absorb at different frequencies or incident angles), as well as allow for linear polarization conversion. The current patterning of the bottom copper layer can accommodate two components in parallel for each of the x-and y-directions, and one series connection in the diagonal branch , we can couple the two orthogonal linear polarizations and achieve polarization conversion; by an appropriate capacitance C tot = C d /2 we can tune the supported resonances and achieve large aggregate spectral bandwidths. Finally, we can form supercells by assembling different components in the constituent unit cells. This is exemplified in Fig. 1(f) where a supercell comprised of 8 9-mm-wide unit cells is formed from two blocks (4 unit cells each) with different capacitance value in the vertical connections (C 1 and C 2 , respectively). The supercell dimension exceeds the free space wavelength allowing to split the output beam, equivalent with a binary grating structure. For more details regarding our generic strategy towards multiple functionalities see Supplemental Material, Fig. 2(a). A corresponding X-ray view is depicted in Fig. 2(b); this inspection was used to verify the electrical continuity between patches, TVs and SMD pads. The metasurface mounted for measurement inside the anechoic chamber is depicted in Fig. 2(d). The assembled boards are mounted on the head of a motorized positioner, allowing for rotation of its mast and head. Standard-gain pyramidal horns are used as transmitting (Tx) and receiving (Rx) antennas; they are mounted either on tripods or on an arm attached on the rotating positioner mast or head , for obtaining 2D or 3D scattering pattern measurements. A vector network analyzer (Anritsu 37397D) feeds the horn antennas to perform S-parameter farfield measurements of the metasurface and of a reference reflective plate (of equal dimensions), used for normalization. A photograph of the whole bistatic measurement setup inside the anechoic chamber can be found in the Supplemental Material . The first metasurface functionality studied is perfect absorption, where the uniform RC loadings determine the resonance frequency and resonance "depth", primarily governed by the capacitance (C) and resistance (R), respectively. Appropriate RC pairs (SMD components) for two distinct resonances near 5 GHz were assembled on the two parallel x-and y-slots of each board, as illustrated in Fig. 1(d) and Fig. 2(c) . We measure the co-polarized reflection spectrum r = S 21 , where a minimum in reflection corresponds to an absorption maximum for our uniform metal-backed metasurface (no transmission and no diffraction orders allowed); note that cross-polarization coupling is negligible for this configuration, confirmed by both simulation and experiment. Figure 3(a) depicts results for normal incidence; simulation spectra were obtained through single-cell simulations (periodic boundary conditions) performed in CST Studio; specific details can be found in the Supplemental Material, Section S3. When the electric field is y-polarized(x-polarized), it is the RC 1 (RC 2 ) pairs in the vertical(horizontal) slots that govern the resonance. The measurement verifies that the resonance fre-quencies are to the left and right of 5 GHz in the two cases, as designed; small blueshifts in resonance frequency are attributed to inductive reactance from soldering of the SMD elements, more pronounced in the case of the 2.7 pF capacitor (RC 1 ). Moreover, the measured reflection dips are deeper than those predicted in the simulations, which is attributed to extra losses stemming from parasitic resistance of the SMD capacitors, soldering, and PCB materials. In anticipation of such additional resistive contributions, the nominal resistance values were chosen such that the metasurface is in the "undercoupled" regime, meaning that any extra resistance would push the operation point towards critical coupling and deepen the reflection minimum .
Results for oblique incidence inside the xz plane are depicted in Fig. 3(b). In the case denoted by RC 1 -TE(RC 2 -TM), the electric field is polarized along the y axis(x axis) leading to TE(TM) polarization. In all cases, the receiver horn is aligned at the specular reflection direction, since the metasurface is uniform and no higher diffraction orders are propagating. In the simulations, we observe that for the TE oblique incidence, as the angle increases the resonance depth diminishes without a visible frequency shift; the opposite holds for the TM-polarized case: the resonance frequency is visibly shifted (increased) without a change in the resonance depth. These trends are reproduced in the experiment corroborating the reasonably good agreement between simulation and measurement. A small discrepancy is observed for the TM polarization where the measured reflection minima become shallower as the incidence angle increases. However, this apparent change concerns small values: from −30 dB to −21 dB. Note that some discrepancy between simulation and measurement is expected since simulations refer to the infinitely-periodic metasurface, whereas measurements are conducted with a finite-size metasurface: 18 × 26 cells (162 mm × 234 mm). This discrepancy will be more pronounced for large incidence angles, since a smaller effective aperture is captured in this case.
III.2. Broadband Polarization Converter
The second functionality studied is broadband linearpolarization conversion in reflection by electrically rotating the principal axis of the surface impedance of the metasurface. The metasurface is again loaded uniformly, but the loading is placed diagonally inside the square unit cell, as in Fig. 1(e) and the top right panel of Fig. 2(c), so as to emulate a 45 •cut wire that couples orthogonal (x and y) linear polarizations. We have also assessed the combination of populating one horizontal and one vertical connection in the back of the cell, emulating an "L" shape geometry which has been also successfully used for polarization conversion , but found inferior performance. This time we only use capacitor loading, i.e., no resistors, to minimize absorption. Following a parametric simulation study, we found that the capacitance value required for broadband and high-amplitude crosspolarized reflection was around 0.5 pF. Since the minimum C-value of available commercial SMD components was in the The resonance frequencies are nicely reproduced in the experiment; the deeper reflection minima in the measurements are due to additional loss compared to the simulation (see text). (b) Oblique incidence inside the xz plane: TE polarization when E y involving the RC 1 loads and TM polarization when E x involving the RC 2 loads. As the incidence angle increases, the TE resonance depth decreases and the TM resonance frequency is blue shifted. order of 1 pF, we combine two SMD capacitors in series to attain the required value. Figure 4 presents the simulated and measured crosspolarized reflection (XPR) spectra, under normal incidence. The broadband XPR covering the entire X-band (8)(9)(10)(11)(12) arises as a combination of distinct features merging in a continuous aggregate band . In the Supplemental Material (Fig. S3) we investigate the field profiles of the individual resonances adopting a simplified metasurface model. The width of the main cross-polarized reflection band is found in good agreement between measurement and simulations and approaches ∼ 5 GHz. However, the spectral features within the XPR band are not completely matching. We attribute this discrepancy to the utilization of several sets of horn antennas (four in total) for covering the frequency ranges 4-6, 6-8, , in order to perform this wideband measurement. The four measurements were subsequently stitched together to end up with the result depicted in Fig. 4. In particular, peaks near the edges of each measurement may be somewhat distorted. We think that this is the case with the first and second peaks, which are close to the stitching (horn swapping) at 8.2 GHz. This hypothesis is corroborated by the fact that the outlying peaks near 5.5 and 15 GHz show excellent agreement between simulation and measurement. Finally, we note that for oblique incidence the FIG. 4. Simulated and measured cross-polarized reflection spectra under normal incidence for the metasurface with diagonal loading consisting of two 0.8 pF capacitors in series. Note the broad highamplitude cross-polarized spectrum covering the entire X-band (8)(9)(10)(11)(12) and the two isolated peaks near 5.5 and 15 GHz, which are also reproduced in the experiment. bandwidth of XPR deteriorates; for angles above 15 • , two reflection dips slice the XPR spectrum in three bands, as can be seen in the Supplemental Material (Fig. S5).
III.3. Wavefront Shaping
The last functionality studied is wavefront manipulation, exemplified through the case of beam splitting: A normally incident beam (plane wave) is divided in two approximately equal beams in symmetric oblique directions, i.e., ±θ s . This is accomplished by applying a (non-uniform) binary encoding across the aperture, as shown in Fig. 1(f) and bottom panel of Fig. 2(c). Specifically, two different capacitor loads are identified which lead to reflection coefficients that exhibit a near-unity amplitude and a 180 • phase difference at the operating frequency; for details, see Supplemental Material, Section S6. For our unit cell design and target band of 4-6 GHz, the two required capacitance values were in the vicinity of 1 and 3 pF, respectively. Subsequently, we assemble these loads so as to form supercells whose extent is larger than the wavelength (λ < p < 2λ , p is the supercell period) and implement a flat binary grating on the metasurface (a "stripes" pattern), which leads to first-order diffraction modes (scattered beams) in directions θ s = ±sin −1 {λ /p}, assuming normal incidence and infinite aperture. In our case, we opted for a period of p = 8a = 72 mm made from eight 9-mm-wide unit cells arranged in four columns of cells with C 1 = 0.8 pF loading followed by four columns of cells with C 2 = 2.7 pF loading; each cell contained two identical SMD capacitors placed in the vertical slots as shown in Fig. 1(f) and the bottom panel of Fig. 2(c). When the illuminating field is polarized parallel to these loads, the infinite grating produces diffraction lobes approximately in the θ s = ±51 • directions; longer periods lead to diffraction closer to specular reflection (normal, in our case), but could not be well-accommodated inside our finite metasurface. Semi-analytically predicted and measured co-polarized scattering gain pattern at 5.3 GHz for a normally illuminated metasurface configured as a binary grating. The grating is inscribed using two reactive loadings (0.8 and 2.7 pF) that exhibit a π phase-difference at the specified frequency. The supercell period is p = 8a = 72 mm which results in a two-beam splitting at approximately θ s = ±51 • (±1 diffraction orders).
The measured and semi-analytically extracted 2D scattering patterns, depicted as the metasurface gain normalized to the reference reflector (of same aperture), are illustrated in Fig. 5, showing overall good agreement, despite some degradation near the splitting maxima. In this case, the theoretical results are not from full-wave simulations but are based on the Huygens-Fresnel principle that estimates the scattered farfield pattern from the unit cell reflection coefficients using Fraunhofer diffraction superposition; implementation details for modeling the metasurface and the absorbing-foam frame seen in Fig. 2(d) can be found in the Supplemental Material (Section S7). The asymmetry in the scattering pattern with respect to the "right" and "left" lobe is due to the incommensurate number of cells between the metasurface aperture (26) and the grating period (8) in combination with the partially reflecting material that frames the effective aperture ; more details can be found in the Supplemental Material. This is also the cause of the split-lobe maxima appearing at slightly different angles than the prescribed (θ s = ±51 • ) in simulation and measurement.
Finally, we note that the very same structure can be used for beam steering, or anomalous reflection, as outlined in the Supplemental Material (Fig. S9).
IV. CONCLUSIONS
In conclusion, with this family of static-load metasurfaces we experimentally verify our approach towards a microwave multi-functional and reconfigurable metasurface. The simulation and experimental results have demonstrated successful amplitude, wavefront and polarization control. More generally, this study serves as a proof-of-concept for the broader, software-controlled intelligent metasurface vision, i.e., when the resistive and capacitive loads are supplied by chips embedded in the unit cells, forming an inter-connected controller network which is computer controlled . The measured performance using commercial-off-the-shelf (COTS) SMD loads exceeded our expectations, particularly for the demanding absorber functionality; consequently, we anticipate similar or improved in-band performance when using custom-designed chips instead of COTS components. Note that embedding such chips in the back-side of the unit-cells will not perturb the EM design and performance of the metasurface, owing to the decoupling offered by the backplane, nor obstruct its aperture. |
/**
* Created by jvettraino on 5/15/2015.
*/
public class AppUtils {
private static SimpleDateFormat auditFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
private static SimpleDateFormat dateOnlyFormatter = new SimpleDateFormat("yyyy-MM-dd");
static{
auditFormatter.setTimeZone(TimeZone.getTimeZone("GMT"));
dateOnlyFormatter.setTimeZone(TimeZone.getTimeZone("GMT"));
}
public static String getFileAssetAsString(AssetManager manager, String fileName) {
StringBuilder out = new StringBuilder();
InputStream inputStream = null;
try {
inputStream = manager.open(fileName);
BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
String line;
while ((line = reader.readLine()) != null) {
out.append(line);
}
}catch(Exception e){
Log.e("CondorQueryLog", "Could not read file template" );
}finally{
if ( inputStream != null ){ try{ inputStream.close(); }catch(IOException e ){} }
}
return out.toString();
}
public static String formatAuditDate( Date date ){
return auditFormatter.format(date);
}
public static String formatDate( Date date ){
return dateOnlyFormatter.format(date);
}
public static String getUrl( Server server, String urlPath ) {
return server.getProtocol() + "://" + server.getAddress() + ":" + server.getPort() + urlPath;
}
public static String getDeviceId( Context context ) {
String id = Settings.Secure.getString(context.getContentResolver(), Settings.Secure.ANDROID_ID);
return id == null ? "Unknown" : id;
}
public static ArrayList<HeaderItem> getHeaderItems(Map<String, List<String>> headers) {
ArrayList<HeaderItem> headerList = new ArrayList<HeaderItem>();
if ( headers != null && !headers.isEmpty() ){
for( Map.Entry<String,List<String>> entry : headers.entrySet() ) {
String headerName = entry.getKey();
if ( entry.getKey() != null && !headerName.isEmpty() ) {
headerList.add(new HeaderItem(headerName, entry.getValue().get(0)));
}
}
}
return headerList;
}
/**
* Returns MAC address of the given interface name.
* @param interfaceName eth0, wlan0 or NULL=use first interface
* @return mac address or empty string
*/
public static String getMACAddress(String interfaceName) {
try {
List<NetworkInterface> interfaces = list(NetworkInterface.getNetworkInterfaces());
for (NetworkInterface intf : interfaces) {
if (interfaceName != null) {
if (!intf.getName().equalsIgnoreCase(interfaceName)) continue;
}
byte[] mac = intf.getHardwareAddress();
if (mac==null) return "";
StringBuilder buf = new StringBuilder();
for (int idx=0; idx<mac.length; idx++)
buf.append(String.format("%02X:", mac[idx]));
if (buf.length()>0) buf.deleteCharAt(buf.length()-1);
return buf.toString();
}
} catch (Exception ex) { } // for now eat exceptions
return "";
}
public static String getFirstIPAddress(){
String ip = getIPAddress(true);
return ip == null || ip.isEmpty() ? getIPAddress(false) : ip;
}
/**
* Get IP address from first non-localhost interface
* @param ipv4 true=return ipv4, false=return ipv6
* @return address or empty string
*/
public static String getIPAddress(boolean useIPv4) {
try {
List<NetworkInterface> interfaces = list(NetworkInterface.getNetworkInterfaces());
for (NetworkInterface intf : interfaces) {
List<InetAddress> addrs = list(intf.getInetAddresses());
for (InetAddress addr : addrs) {
if (!addr.isLoopbackAddress()) {
String sAddr = addr.getHostAddress().toUpperCase();
boolean isIPv4 = InetAddressUtils.isIPv4Address(sAddr);
if (useIPv4) {
if (isIPv4)
return sAddr;
} else {
if (!isIPv4) {
int delim = sAddr.indexOf('%'); // drop ip6 port suffix
return delim<0 ? sAddr : sAddr.substring(0, delim);
}
}
}
}
}
} catch (Exception ex) { } // for now eat exceptions
return "";
}
public static String replaceEach(
String text, String search, String replacement ) {
return replaceEach(text, new String[]{search}, new String[]{replacement});
}
//MEthod copied from Apache Commons StringUtils
public static String replaceEach(
String text, String[] searchList, String[] replacementList ) {
if (text == null || text.length() == 0 || searchList == null ||
searchList.length == 0 || replacementList == null || replacementList.length == 0) {
return text;
}
int searchLength = searchList.length;
int replacementLength = replacementList.length;
// make sure lengths are ok, these need to be equal
if (searchLength != replacementLength) {
throw new IllegalArgumentException("Search and Replace array lengths don't match: "
+ searchLength
+ " vs "
+ replacementLength);
}
// keep track of which still have matches
boolean[] noMoreMatchesForReplIndex = new boolean[searchLength];
// index on index that the match was found
int textIndex = -1;
int replaceIndex = -1;
int tempIndex = -1;
// index of replace array that will replace the search string found
// NOTE: logic duplicated below START
for (int i = 0; i < searchLength; i++) {
if (noMoreMatchesForReplIndex[i] || searchList[i] == null ||
searchList[i].length() == 0 || replacementList[i] == null) {
continue;
}
tempIndex = text.indexOf(searchList[i]);
// see if we need to keep searching for this
if (tempIndex == -1) {
noMoreMatchesForReplIndex[i] = true;
} else {
if (textIndex == -1 || tempIndex < textIndex) {
textIndex = tempIndex;
replaceIndex = i;
}
}
}
// NOTE: logic mostly below END
// no search strings found, we are done
if (textIndex == -1) {
return text;
}
int start = 0;
// get a good guess on the size of the result buffer so it doesn't have to double if it goes over a bit
int increase = 0;
// count the replacement text elements that are larger than their corresponding text being replaced
for (int i = 0; i < searchList.length; i++) {
if (searchList[i] == null || replacementList[i] == null) {
continue;
}
int greater = replacementList[i].length() - searchList[i].length();
if (greater > 0) {
increase += 3 * greater; // assume 3 matches
}
}
// have upper-bound at 20% increase, then let Java take over
increase = Math.min(increase, text.length() / 5);
StringBuilder buf = new StringBuilder(text.length() + increase);
while (textIndex != -1) {
for (int i = start; i < textIndex; i++) {
buf.append(text.charAt(i));
}
buf.append(replacementList[replaceIndex]);
start = textIndex + searchList[replaceIndex].length();
textIndex = -1;
replaceIndex = -1;
tempIndex = -1;
// find the next earliest match
// NOTE: logic mostly duplicated above START
for (int i = 0; i < searchLength; i++) {
if (noMoreMatchesForReplIndex[i] || searchList[i] == null ||
searchList[i].length() == 0 || replacementList[i] == null) {
continue;
}
tempIndex = text.indexOf(searchList[i], start);
// see if we need to keep searching for this
if (tempIndex == -1) {
noMoreMatchesForReplIndex[i] = true;
} else {
if (textIndex == -1 || tempIndex < textIndex) {
textIndex = tempIndex;
replaceIndex = i;
}
}
}
// NOTE: logic duplicated above END
}
int textLength = text.length();
for (int i = start; i < textLength; i++) {
buf.append(text.charAt(i));
}
String result = buf.toString();
return result;
}
} |
// GetAnchor is a free data retrieval call binding the contract method 0x4c7df18f.
//
// Solidity: function getAnchor(blockNumber uint256) constant returns(uint256)
func (_FactomAnchor *FactomAnchorCaller) GetAnchor(opts *bind.CallOpts, blockNumber *big.Int) (*big.Int, error) {
var (
ret0 = new(*big.Int)
)
out := ret0
err := _FactomAnchor.contract.Call(opts, out, "getAnchor", blockNumber)
return *ret0, err
} |
/**
* Strictly for testing.
* have to be in org.apache.bookkeeper.bookie to not introduce changes to InterleavedLedgerStorage
*/
public class SlowInterleavedLedgerStorage extends InterleavedLedgerStorage {
public static final String PROP_SLOW_STORAGE_FLUSH_DELAY = "test.slowStorage.flushDelay";
public static final String PROP_SLOW_STORAGE_ADD_DELAY = "test.slowStorage.addDelay";
public static final String PROP_SLOW_STORAGE_GET_DELAY = "test.slowStorage.getDelay";
/**
* Strictly for testing.
*/
public static class SlowEntryLogger extends EntryLogger {
public volatile long getDelay = 0;
public volatile long addDelay = 0;
public volatile long flushDelay = 0;
public SlowEntryLogger(ServerConfiguration conf, LedgerDirsManager ledgerDirsManager, EntryLogListener listener,
StatsLogger statsLogger) throws IOException {
super(conf, ledgerDirsManager, listener, statsLogger);
}
public SlowEntryLogger setAddDelay(long delay) {
addDelay = delay;
return this;
}
public SlowEntryLogger setGetDelay(long delay) {
getDelay = delay;
return this;
}
public SlowEntryLogger setFlushDelay(long delay) {
flushDelay = delay;
return this;
}
@Override
public void flush() throws IOException {
delayMs(flushDelay);
super.flush();
}
@Override
public long addEntry(long ledger, ByteBuf entry, boolean rollLog) throws IOException {
delayMs(addDelay);
return super.addEntry(ledger, entry, rollLog);
}
@Override
public ByteBuf readEntry(long ledgerId, long entryId, long location)
throws IOException, Bookie.NoEntryException {
delayMs(getDelay);
return super.readEntry(ledgerId, entryId, location);
}
private static void delayMs(long delay) {
if (delay < 1) {
return;
}
try {
Thread.sleep(delay);
} catch (InterruptedException e) {
//noop
}
}
}
public SlowInterleavedLedgerStorage() {
super();
}
@Override
public void initialize(ServerConfiguration conf,
LedgerManager ledgerManager,
LedgerDirsManager ledgerDirsManager,
LedgerDirsManager indexDirsManager,
StateManager stateManager,
CheckpointSource checkpointSource,
Checkpointer checkpointer,
StatsLogger statsLogger)
throws IOException {
super.initialize(conf, ledgerManager, ledgerDirsManager, indexDirsManager,
stateManager, checkpointSource, checkpointer, statsLogger);
// do not want to add these to config class, reading throw "raw" interface
long getDelay = conf.getLong(PROP_SLOW_STORAGE_GET_DELAY, 0);
long addDelay = conf.getLong(PROP_SLOW_STORAGE_ADD_DELAY, 0);
long flushDelay = conf.getLong(PROP_SLOW_STORAGE_FLUSH_DELAY, 0);
entryLogger = new SlowEntryLogger(conf, ledgerDirsManager, this, statsLogger)
.setAddDelay(addDelay)
.setGetDelay(getDelay)
.setFlushDelay(flushDelay);
}
public void setAddDelay(long delay) {
((SlowEntryLogger) entryLogger).setAddDelay(delay);
}
public void setGetDelay(long delay) {
((SlowEntryLogger) entryLogger).setGetDelay(delay);
}
public void setFlushDelay(long delay) {
((SlowEntryLogger) entryLogger).setFlushDelay(delay);
}
} |
<reponame>Andrei94/WritingAwesomeJavaCodeWorkshop
package de.stevenschwenke.java.writingawesomejavacodeworkshop.part1JavaLanguageAndMethods.c09_lombok.builder;
import lombok.Builder;
import lombok.NonNull;
import lombok.Singular;
import java.util.Set;
@Builder
public class BuilderExample {
@NonNull private String name;
@NonNull private int age;
@Singular private Set<String> occupations;
} |
/* Check if chr is contained in the string */
static int chk_chr (const char* str, int chr)
{
while (*str && *str != chr) str++;
return *str;
} |
/**
* @param rawMessage the raw message entered by the user
* @throws CommandParseException thrown if no command was found
* @throws CommandArgumentException thrown if the command was run and its onCommand method returned false
*/
public void parseCommand(@NotNull String rawMessage, @NotNull User user, @NotNull TextChannel textChannel) throws CommandParseException, CommandArgumentException {
if (!rawMessage.startsWith(prefix)) {
throw new CommandParseException("Message does not start with prefix.");
}
boolean safe = rawMessage.split(prefix, 2).length > 1;
if (safe) {
String beheadedRawMessage = rawMessage.substring(1);
String[] args = beheadedRawMessage.split(" ");
String commandName = args[0];
if (args.length == 1) {
args = new String[0];
} else {
args = Arrays.copyOfRange(args, 1, args.length);
}
Command command = getCommand(commandName);
if (command == null) {
throw new CommandParseException("Command not found.");
}
if (!command.onCommand(user, textChannel, args)) {
throw new CommandArgumentException(command.usage());
}
} else {
throw new CommandParseException("Command not found.");
}
} |
<gh_stars>1-10
package net.tarilabs.joind_ex042015;
import static java.util.stream.Collectors.*;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.List;
import java.util.Map;
import java.util.stream.StreamSupport;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.Response;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
public class FetchData {
static final Logger LOG = LoggerFactory.getLogger(FetchData.class);
private static String dataPathPrefix ;
static Client client = ClientBuilder.newBuilder().build();
public static void main( String[] args ) throws JsonProcessingException, IOException {
String eventId = null;
if (args.length == 0) {
eventId = "3347";
} else {
eventId = args[0];
}
dataPathPrefix = "."+File.separator+"data"+File.separator+LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyyMMddHHmmss"))+File.separator+eventId+File.separator;
String talksContent = getTalksJSON(eventId);
ObjectMapper mapper = new ObjectMapper();
JsonNode rootNode = mapper.readTree(talksContent);
List<TalkRecord> talks = StreamSupport.stream(rootNode.path("talks").spliterator(), false)
.map(TalkRecord::fromJsonNode)
.collect(toList());
for (TalkRecord t : talks) { // Using external iteration is not FP-style programming, but in this point I preferred to start this way and possibly refactor later.
LOG.info("Fetching for talk {} {} ", t.getId(), t.getTalk_title());
JsonNode commentNode = mapper.readTree(getCommentsJSON(eventId, t.getId()));
List<Comment> commentsForTalk = StreamSupport.stream(commentNode.path("comments").spliterator(), false)
.map(Comment::fromJsonNode)
.collect(new AvoidAnonDupCommentsCollector());
Map<Boolean, Long> anonOrNot = commentsForTalk.stream()
.collect(partitioningBy(Comment::isAnon,
counting()));
Map<Boolean, Double> anonPartAvg = commentsForTalk.stream()
.collect(partitioningBy(Comment::isAnon,
averagingInt(Comment::getRating)));
Double totalAvg = commentsForTalk.stream()
.collect(averagingInt(Comment::getRating));
TalkRecordStats talkRecordStats = new TalkRecordStats(t, anonOrNot, anonPartAvg, totalAvg);
LOG.info("Fetching for talk {} stats are {}", t.getId(), talkRecordStats);
appendToCsv(eventId, talkRecordStats);
}
}
private static void appendToCsv(String eventId, TalkRecordStats talkRecordStats) {
try {
Path path = Paths.get(dataPathPrefix+"stats.csv");
Files.write(path, talkRecordStats.toCvsLine().getBytes(), StandardOpenOption.CREATE, StandardOpenOption.APPEND);
Files.write(path, "\n".getBytes(), StandardOpenOption.APPEND);
} catch (IOException e) {
LOG.error("Unable to append stats {} to file.", talkRecordStats, e);
}
}
private static String getTalksJSON(String eventId) {
WebTarget target = client.target("http://api.joind.in/v2.1/events/"+eventId+"/talks?resultsperpage=0");
LOG.info("About to get {}", target.getUri());
Response response = target.request().get();
String talksContent = response.readEntity(String.class);
response.close();
if (LOG.isDebugEnabled()) LOG.debug("response: {} ", talksContent);
try {
Path path = Paths.get(dataPathPrefix+"talks.json");
Files.createDirectories(path.getParent());
Files.write(path, talksContent.getBytes());
} catch (Exception e) {
LOG.warn("Unable to save target {} to file.", target.getUri(), e);
}
return talksContent;
}
private static String getCommentsJSON(String eventId, String talkId) {
WebTarget target = client.target("http://api.joind.in/v2.1/talks/"+talkId+"/comments?resultsperpage=0");
LOG.info("About to get {}", target.getUri());
Response response = target.request().get();
String talksContent = response.readEntity(String.class);
response.close();
if (LOG.isDebugEnabled()) LOG.debug("response: {} ", talksContent);
try {
Path path = Paths.get(dataPathPrefix+"talk-"+talkId+"-comments.json");
Files.createDirectories(path.getParent());
Files.write(path, talksContent.getBytes());
} catch (Exception e) {
LOG.warn("Unable to save target {} to file.", target.getUri(), e);
}
return talksContent;
}
}
|
def post_save(cls, sender, document, **kwargs):
refel=ElementReferenceDB.objects.filter(element=document).first()
if not refel:
ElementReferenceAO(element=document.to_obj()).save() |
/**
* Non-metric Space Library
*
* Main developers: Bilegsaikhan Naidan, Leonid Boytsov, Yury Malkov, Ben Frederickson, David Novak
*
* For the complete list of contributors and further details see:
* https://github.com/nmslib/nmslib
*
* Copyright (c) 2013-2018
*
* This code is released under the
* Apache License Version 2.0 http://www.apache.org/licenses/.
*
*/
#ifndef _INIT_H_
#define _INIT_H_
/*
* This function should be called only *ONCE*,
* but before actually using library functionality.
*/
#include "logging.h"
namespace similarity {
void initLibrary(int seed = 0, LogChoice choice = LIB_LOGNONE, const char*pLogFile = NULL);
}
#endif
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.