text
stringlengths 2
100k
| meta
dict |
---|---|
var convert = require('./convert'),
func = convert('get', require('../get'));
func.placeholder = require('./placeholder');
module.exports = func;
| {
"pile_set_name": "Github"
} |
/*
* Tigase XMPP Server - The instant messaging server
* Copyright (C) 2004 Tigase, Inc. ([email protected])
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. Look for COPYING file in the top folder.
* If not, see http://www.gnu.org/licenses/.
*/
package tigase.kernel.beans;
/**
* If bean implements this interface, then if bean will be created and configured, Kernel calls method {@link
* Initializable#initialize()}.
*/
public interface Initializable {
/**
* Method will be called, when bean will be created, configured and ready to use.
*/
void initialize();
}
| {
"pile_set_name": "Github"
} |
#!/bin/bash
# author: Mike Snitzer <[email protected]>
# desc: used to make lm_sensors metrics available to ganglia
# /etc/sysconfig/ganglia is used to specify INTERFACE
CONFIG=/etc/sysconfig/ganglia
[ -f $CONFIG ] && . $CONFIG
#default to eth0
if [ -z "$MCAST_IF" ]; then
MCAST_IF=eth0
fi
GMETRIC_BIN=/usr/bin/gmetric
# establish a base commandline
GMETRIC="$GMETRIC_BIN -i $MCAST_IF"
SENSORS=/usr/bin/sensors
# load the lm_sensors modules
module=`/sbin/lsmod | awk '{print $1}' | grep i2c-piix4`
if [ -z "$module" ]; then
/sbin/modprobe i2c-piix4
# lm87 is for supermicro P3TDLE, replace when appropriate
/sbin/modprobe lm87
fi
# send cpu temps if gmond is running
`/sbin/service gmond status > /dev/null`
if [ $? -eq 0 ]; then
# send cpu temperatures
let count=0
for temp in `${SENSORS} | grep emp | cut -b 13-16`; do
$GMETRIC -t float -n "cpu${count}_temp" -u "C" -v $temp
let count+=1
done
# send cpu fan speed
let count=0
for fan in `${SENSORS} | grep fan | cut -b 9-14`; do
$GMETRIC -t uint32 -n "cpu${count}_fan" -u "RPM" -v $fan
let count+=1
done
fi | {
"pile_set_name": "Github"
} |
/**
* Database schema required by \yii\i18n\DbMessageSource.
*
* @author Dmitry Naumenko <[email protected]>
* @link http://www.yiiframework.com/
* @copyright 2008 Yii Software LLC
* @license http://www.yiiframework.com/license/
* @since 2.0.7
*/
if object_id('[source_message]', 'U') is not null
drop table [source_message];
if object_id('[message]', 'U') is not null
drop table [message];
CREATE TABLE [source_message]
(
[id] integer IDENTITY PRIMARY KEY,
[category] varchar(255),
[message] text
);
CREATE TABLE [message]
(
[id] integer NOT NULL,
[language] varchar(16) NOT NULL,
[translation] text
);
ALTER TABLE [message] ADD CONSTRAINT [pk_message_id_language] PRIMARY KEY ([id], [language]);
ALTER TABLE [message] ADD CONSTRAINT [fk_message_source_message] FOREIGN KEY ([id]) REFERENCES [source_message] ([id]) ON UPDATE CASCADE ON DELETE NO ACTION;
CREATE INDEX [idx_message_language] on [message] ([language]);
CREATE INDEX [idx_source_message_category] on [source_message] ([category]); | {
"pile_set_name": "Github"
} |
const path = require('path')
const createTestProject = require('@vue/cli-test-utils/createTestProject')
const Service = require('@vue/cli-service')
async function create (name, presets) {
const project = await createTestProject(
name, presets, path.join(process.cwd(), './tests/e2e/projects')
)
// mocking...
const pkg = JSON.parse(await project.read('package.json'))
pkg.devDependencies['vue-cli-plugin-p11n'] = '../../../..'
await project.write('package.json', JSON.stringify(pkg, null, 2))
/*
jest.mock('@vue/cli-shared-utils')
const utils = require('@vue/cli-shared-utils')
utils.loadModule.mockReturnValueOnce(require('./generator'))
*/
const invoke = require('@vue/cli/lib/invoke')
await invoke('p11n', {}, project.dir)
return Promise.resolve(project)
}
function createMockService (plugins = [], context = '/', init = true, mode) {
const service = new Service(context, {
plugins,
useBuiltIn: false
})
if (init) {
service.init(mode)
}
return service
}
module.exports = {
create,
createMockService
}
| {
"pile_set_name": "Github"
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.state;
/**
* The {@link AlertFirmness} enum is used to represent whether an alert should
* be considered as a real alert or whether it could still potentially be a
* false positive. Alerts which are {@link #SOFT} must have more occurrences in
* order to rule out the possibility of a false positive.
*/
public enum AlertFirmness {
/**
* The alert is a potential false positive and needs more instances to be
* confirmed.
*/
SOFT,
/**
* The alert is not a potential false-positive.
*/
HARD;
}
| {
"pile_set_name": "Github"
} |
---
- id: a81ea4ad-bc9f-49a7-82d4-4466df641487
name: 7.A.1 - Screen Capturing
description: Load custom PowerShell module and take screenshots.
tactic: collection
technique:
attack_id: T1113
name: Screen Capture
platforms:
windows:
psh,pwsh:
command: |
if (! $(test-path -path "C:\Program Files\SysinternalsSuite\psversion.ps1";)) {
write-host "[!] The path C:\Program Files\SysinternalsSuite\psversion.ps1 does not exist. Execution has stopped.";
exit 1;
}
Set-Location -path "C:\Program Files\SysinternalsSuite";
. .\psversion.ps1;
Invoke-ScreenCapture; Start-Sleep -Seconds 3; View-Job -JobName "Screenshot";
| {
"pile_set_name": "Github"
} |
/**********************************************************************/
/* ____ ____ */
/* / /\/ / */
/* /___/ \ / */
/* \ \ \/ */
/* \ \ Copyright (c) 2003-2009 Xilinx, Inc. */
/* / / All Right Reserved. */
/* /---/ /\ */
/* \ \ / \ */
/* \___\/\___\ */
/***********************************************************************/
/* This file is designed for use with ISim build 0x16fbe694 */
#define XSI_HIDE_SYMBOL_SPEC true
#include "xsi.h"
#include <memory.h>
#ifdef __GNUC__
#include <stdlib.h>
#else
#include <malloc.h>
#define alloca _alloca
#endif
static const char *ng0 = "D:/ise/ISE_DS/ISE/verilog/src/glbl.v";
static unsigned int ng1[] = {1U, 0U};
static unsigned int ng2[] = {0U, 0U};
static void NetDecl_15_0(char *t0)
{
char *t1;
char *t2;
char *t3;
char *t4;
char *t5;
char *t6;
char *t7;
unsigned int t8;
unsigned int t9;
char *t10;
unsigned int t11;
unsigned int t12;
char *t13;
unsigned int t14;
unsigned int t15;
char *t16;
LAB0: t1 = (t0 + 6640U);
t2 = *((char **)t1);
if (t2 == 0)
goto LAB2;
LAB3: goto *t2;
LAB2: xsi_set_current_line(15, ng0);
t2 = (t0 + 1960U);
t3 = *((char **)t2);
t2 = (t0 + 8328);
t4 = (t2 + 56U);
t5 = *((char **)t4);
t6 = (t5 + 56U);
t7 = *((char **)t6);
memset(t7, 0, 8);
t8 = 1U;
t9 = t8;
t10 = (t3 + 4);
t11 = *((unsigned int *)t3);
t8 = (t8 & t11);
t12 = *((unsigned int *)t10);
t9 = (t9 & t12);
t13 = (t7 + 4);
t14 = *((unsigned int *)t7);
*((unsigned int *)t7) = (t14 | t8);
t15 = *((unsigned int *)t13);
*((unsigned int *)t13) = (t15 | t9);
xsi_driver_vfirst_trans(t2, 0, 0U);
t16 = (t0 + 8200);
*((int *)t16) = 1;
LAB1: return;
}
static void Cont_44_1(char *t0)
{
char *t1;
char *t2;
char *t3;
char *t4;
char *t5;
char *t6;
char *t7;
char *t8;
char *t9;
unsigned int t10;
unsigned int t11;
char *t12;
unsigned int t13;
unsigned int t14;
char *t15;
unsigned int t16;
unsigned int t17;
char *t18;
LAB0: t1 = (t0 + 6888U);
t2 = *((char **)t1);
if (t2 == 0)
goto LAB2;
LAB3: goto *t2;
LAB2: xsi_set_current_line(44, ng0);
t2 = (t0 + 3320);
t3 = (t2 + 56U);
t4 = *((char **)t3);
t5 = (t0 + 8392);
t6 = (t5 + 56U);
t7 = *((char **)t6);
t8 = (t7 + 56U);
t9 = *((char **)t8);
memset(t9, 0, 8);
t10 = 1U;
t11 = t10;
t12 = (t4 + 4);
t13 = *((unsigned int *)t4);
t10 = (t10 & t13);
t14 = *((unsigned int *)t12);
t11 = (t11 & t14);
t15 = (t9 + 4);
t16 = *((unsigned int *)t9);
*((unsigned int *)t9) = (t16 | t10);
t17 = *((unsigned int *)t15);
*((unsigned int *)t15) = (t17 | t11);
xsi_driver_vfirst_trans(t5, 0, 0);
t18 = (t0 + 8216);
*((int *)t18) = 1;
LAB1: return;
}
static void Cont_45_2(char *t0)
{
char *t1;
char *t2;
char *t3;
char *t4;
char *t5;
char *t6;
char *t7;
char *t8;
char *t9;
unsigned int t10;
unsigned int t11;
char *t12;
unsigned int t13;
unsigned int t14;
char *t15;
unsigned int t16;
unsigned int t17;
char *t18;
LAB0: t1 = (t0 + 7136U);
t2 = *((char **)t1);
if (t2 == 0)
goto LAB2;
LAB3: goto *t2;
LAB2: xsi_set_current_line(45, ng0);
t2 = (t0 + 3480);
t3 = (t2 + 56U);
t4 = *((char **)t3);
t5 = (t0 + 8456);
t6 = (t5 + 56U);
t7 = *((char **)t6);
t8 = (t7 + 56U);
t9 = *((char **)t8);
memset(t9, 0, 8);
t10 = 1U;
t11 = t10;
t12 = (t4 + 4);
t13 = *((unsigned int *)t4);
t10 = (t10 & t13);
t14 = *((unsigned int *)t12);
t11 = (t11 & t14);
t15 = (t9 + 4);
t16 = *((unsigned int *)t9);
*((unsigned int *)t9) = (t16 | t10);
t17 = *((unsigned int *)t15);
*((unsigned int *)t15) = (t17 | t11);
xsi_driver_vfirst_trans(t5, 0, 0);
t18 = (t0 + 8232);
*((int *)t18) = 1;
LAB1: return;
}
static void Cont_46_3(char *t0)
{
char *t1;
char *t2;
char *t3;
char *t4;
char *t5;
char *t6;
char *t7;
char *t8;
char *t9;
unsigned int t10;
unsigned int t11;
char *t12;
unsigned int t13;
unsigned int t14;
char *t15;
unsigned int t16;
unsigned int t17;
char *t18;
LAB0: t1 = (t0 + 7384U);
t2 = *((char **)t1);
if (t2 == 0)
goto LAB2;
LAB3: goto *t2;
LAB2: xsi_set_current_line(46, ng0);
t2 = (t0 + 3640);
t3 = (t2 + 56U);
t4 = *((char **)t3);
t5 = (t0 + 8520);
t6 = (t5 + 56U);
t7 = *((char **)t6);
t8 = (t7 + 56U);
t9 = *((char **)t8);
memset(t9, 0, 8);
t10 = 1U;
t11 = t10;
t12 = (t4 + 4);
t13 = *((unsigned int *)t4);
t10 = (t10 & t13);
t14 = *((unsigned int *)t12);
t11 = (t11 & t14);
t15 = (t9 + 4);
t16 = *((unsigned int *)t9);
*((unsigned int *)t9) = (t16 | t10);
t17 = *((unsigned int *)t15);
*((unsigned int *)t15) = (t17 | t11);
xsi_driver_vfirst_trans(t5, 0, 0);
t18 = (t0 + 8248);
*((int *)t18) = 1;
LAB1: return;
}
static void Initial_48_4(char *t0)
{
char *t1;
char *t2;
char *t3;
char *t4;
LAB0: t1 = (t0 + 7632U);
t2 = *((char **)t1);
if (t2 == 0)
goto LAB2;
LAB3: goto *t2;
LAB2: xsi_set_current_line(48, ng0);
LAB4: xsi_set_current_line(49, ng0);
t2 = ((char*)((ng1)));
t3 = (t0 + 3320);
xsi_vlogvar_assign_value(t3, t2, 0, 0, 1);
xsi_set_current_line(50, ng0);
t2 = ((char*)((ng1)));
t3 = (t0 + 3640);
xsi_vlogvar_assign_value(t3, t2, 0, 0, 1);
xsi_set_current_line(51, ng0);
t2 = (t0 + 7440);
xsi_process_wait(t2, 100000LL);
*((char **)t1) = &&LAB5;
LAB1: return;
LAB5: xsi_set_current_line(52, ng0);
t3 = ((char*)((ng2)));
t4 = (t0 + 3320);
xsi_vlogvar_assign_value(t4, t3, 0, 0, 1);
xsi_set_current_line(53, ng0);
t2 = ((char*)((ng2)));
t3 = (t0 + 3640);
xsi_vlogvar_assign_value(t3, t2, 0, 0, 1);
goto LAB1;
}
static void Initial_56_5(char *t0)
{
char *t1;
char *t2;
char *t3;
char *t4;
LAB0: t1 = (t0 + 7880U);
t2 = *((char **)t1);
if (t2 == 0)
goto LAB2;
LAB3: goto *t2;
LAB2: xsi_set_current_line(56, ng0);
LAB4: xsi_set_current_line(57, ng0);
t2 = ((char*)((ng1)));
t3 = (t0 + 3480);
xsi_vlogvar_assign_value(t3, t2, 0, 0, 1);
xsi_set_current_line(58, ng0);
t2 = (t0 + 7688);
xsi_process_wait(t2, 0LL);
*((char **)t1) = &&LAB5;
LAB1: return;
LAB5: xsi_set_current_line(59, ng0);
t3 = ((char*)((ng2)));
t4 = (t0 + 3480);
xsi_vlogvar_assign_value(t4, t3, 0, 0, 1);
goto LAB1;
}
extern void work_m_00000000002013452923_2073120511_init()
{
static char *pe[] = {(void *)NetDecl_15_0,(void *)Cont_44_1,(void *)Cont_45_2,(void *)Cont_46_3,(void *)Initial_48_4,(void *)Initial_56_5};
xsi_register_didat("work_m_00000000002013452923_2073120511", "isim/test_isim_beh.exe.sim/work/m_00000000002013452923_2073120511.didat");
xsi_register_executes(pe);
}
| {
"pile_set_name": "Github"
} |
package buildutil
import (
"fmt"
"go/build"
"io"
"io/ioutil"
"os"
"path"
"path/filepath"
"sort"
"strings"
"time"
)
// FakeContext returns a build.Context for the fake file tree specified
// by pkgs, which maps package import paths to a mapping from file base
// names to contents.
//
// The fake Context has a GOROOT of "/go" and no GOPATH, and overrides
// the necessary file access methods to read from memory instead of the
// real file system.
//
// Unlike a real file tree, the fake one has only two levels---packages
// and files---so ReadDir("/go/src/") returns all packages under
// /go/src/ including, for instance, "math" and "math/big".
// ReadDir("/go/src/math/big") would return all the files in the
// "math/big" package.
//
func FakeContext(pkgs map[string]map[string]string) *build.Context {
clean := func(filename string) string {
f := path.Clean(filepath.ToSlash(filename))
// Removing "/go/src" while respecting segment
// boundaries has this unfortunate corner case:
if f == "/go/src" {
return ""
}
return strings.TrimPrefix(f, "/go/src/")
}
ctxt := build.Default // copy
ctxt.GOROOT = "/go"
ctxt.GOPATH = ""
ctxt.IsDir = func(dir string) bool {
dir = clean(dir)
if dir == "" {
return true // needed by (*build.Context).SrcDirs
}
return pkgs[dir] != nil
}
ctxt.ReadDir = func(dir string) ([]os.FileInfo, error) {
dir = clean(dir)
var fis []os.FileInfo
if dir == "" {
// enumerate packages
for importPath := range pkgs {
fis = append(fis, fakeDirInfo(importPath))
}
} else {
// enumerate files of package
for basename := range pkgs[dir] {
fis = append(fis, fakeFileInfo(basename))
}
}
sort.Sort(byName(fis))
return fis, nil
}
ctxt.OpenFile = func(filename string) (io.ReadCloser, error) {
filename = clean(filename)
dir, base := path.Split(filename)
content, ok := pkgs[path.Clean(dir)][base]
if !ok {
return nil, fmt.Errorf("file not found: %s", filename)
}
return ioutil.NopCloser(strings.NewReader(content)), nil
}
ctxt.IsAbsPath = func(path string) bool {
path = filepath.ToSlash(path)
// Don't rely on the default (filepath.Path) since on
// Windows, it reports virtual paths as non-absolute.
return strings.HasPrefix(path, "/")
}
return &ctxt
}
type byName []os.FileInfo
func (s byName) Len() int { return len(s) }
func (s byName) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
func (s byName) Less(i, j int) bool { return s[i].Name() < s[j].Name() }
type fakeFileInfo string
func (fi fakeFileInfo) Name() string { return string(fi) }
func (fakeFileInfo) Sys() interface{} { return nil }
func (fakeFileInfo) ModTime() time.Time { return time.Time{} }
func (fakeFileInfo) IsDir() bool { return false }
func (fakeFileInfo) Size() int64 { return 0 }
func (fakeFileInfo) Mode() os.FileMode { return 0644 }
type fakeDirInfo string
func (fd fakeDirInfo) Name() string { return string(fd) }
func (fakeDirInfo) Sys() interface{} { return nil }
func (fakeDirInfo) ModTime() time.Time { return time.Time{} }
func (fakeDirInfo) IsDir() bool { return true }
func (fakeDirInfo) Size() int64 { return 0 }
func (fakeDirInfo) Mode() os.FileMode { return 0755 }
| {
"pile_set_name": "Github"
} |
{ 'struct': 'foo',
'data': false }
| {
"pile_set_name": "Github"
} |
module Choco
class PluginGenerator < Thor::Group
include Thor::Actions
def self.source_root
File.dirname(__FILE__)
end
desc 'Generates a Choco plugin (Sammy or js-model)'
argument :name
class_option :for_lib
def create_model_file
@plugin_name = name.camelcase.singularize
@name = name.underscore.singularize
template('templates/lib/plugin.js', "lib/plugin_#{@name}.js")
end
end
end
| {
"pile_set_name": "Github"
} |
accessing class hierarchy
objectMemoryClass
^Spur32BitCoMemoryManager | {
"pile_set_name": "Github"
} |
define(
//begin v1.x content
{
"dateFormatItem-yyyyMMMEd": "G y. MMM d., E",
"field-dayperiod": "napszak",
"field-minute": "perc",
"dateFormatItem-MMMEd": "MMM d., E",
"field-day-relative+-1": "tegnap",
"dateFormatItem-hms": "a h:mm:ss",
"field-day-relative+-2": "tegnapelőtt",
"field-weekday": "hét napja",
"dateFormatItem-MMM": "LLL",
"dateFormatItem-Gy": "G y",
"field-era": "éra",
"field-hour": "óra",
"dateFormatItem-y": "G y",
"dateFormatItem-yyyy": "G y",
"dateFormatItem-Ed": "d., E",
"field-day-relative+0": "ma",
"field-day-relative+1": "holnap",
"field-day-relative+2": "holnapután",
"dateFormatItem-yyyyMMMM": "G y. MMMM",
"dateFormatItem-GyMMMd": "G y. MMM d.",
"dateFormat-long": "G y. MMMM d.",
"field-zone": "időzóna",
"dateFormatItem-Hm": "H:mm",
"field-week-relative+-1": "Előző hét",
"dateFormat-medium": "G y.MM.dd.",
"dateFormatItem-Hms": "H:mm:ss",
"field-year-relative+0": "Ez az év",
"field-year-relative+1": "Következő év",
"field-year-relative+-1": "Előző év",
"dateFormatItem-ms": "mm:ss",
"dateFormatItem-yyyyQQQQ": "G y. QQQQ",
"field-year": "év",
"field-week": "hét",
"dateFormatItem-yyyyMd": "G y.MM.dd.",
"dateFormatItem-yyyyMMMd": "G y. MMM d.",
"dateFormatItem-yyyyMEd": "G y.MM.dd., E",
"dateFormatItem-MMMd": "MMM d.",
"field-week-relative+0": "Ez a hét",
"field-week-relative+1": "Következő hét",
"field-month-relative+0": "Ez a hónap",
"dateFormatItem-H": "H",
"field-month": "hónap",
"field-month-relative+1": "Következő hónap",
"dateFormatItem-MMMMd": "MMMM d.",
"dateFormatItem-M": "L",
"field-second": "másodperc",
"dateFormatItem-GyMMMEd": "G y. MMM d., E",
"dateFormatItem-GyMMM": "G y. MMM",
"field-day": "nap",
"dateFormatItem-yyyyQQQ": "G y. QQQ",
"dateFormatItem-MEd": "M. d., E",
"dateFormatItem-hm": "a h:mm",
"dateFormat-short": "GGGGG y.MM.dd.",
"dateFormatItem-yyyyM": "G y.M.",
"dateFormat-full": "G y. MMMM d., EEEE",
"dateFormatItem-Md": "M. d.",
"dateFormatItem-yyyyMMM": "G y. MMM",
"dateFormatItem-d": "d",
"field-month-relative+-1": "Előző hónap",
"dateFormatItem-h": "a h"
}
//end v1.x content
); | {
"pile_set_name": "Github"
} |
/*
Copyright (c) 2012 The VCT Project
This file is part of VoxelConeTracing and is an implementation of
"Interactive Indirect Illumination Using Voxel Cone Tracing" by Crassin et al
VoxelConeTracing is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
VoxelConeTracing is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with VoxelConeTracing. If not, see <http://www.gnu.org/licenses/>.
*/
/*!
* \author Dominik Lazarek ([email protected])
* \author Andreas Weinmann ([email protected])
*/
#version 430 core
#define NODE_NOT_FOUND 0xFFFFFFFF
layout(r32ui) uniform uimage2D nodeMap;
//layout(rgba8) uniform image2D nodeMap;
void main() {
ivec2 nodeMapSize = imageSize(nodeMap);
ivec2 uv = ivec2(0);
uv.x = (gl_VertexID % nodeMapSize.x);
uv.y = (gl_VertexID / nodeMapSize.x);
imageStore(nodeMap, uv, uvec4(NODE_NOT_FOUND));
//imageStore(nodeMap, uv, vec4(0));
}
| {
"pile_set_name": "Github"
} |
The MongoDB driver for Go
-------------------------
Please go to [http://labix.org/mgo](http://labix.org/mgo) for all project details.
| {
"pile_set_name": "Github"
} |
---
title: "Source file <filename> specified multiple times"
ms.date: 07/20/2015
f1_keywords:
- "bc2002"
- "vbc2002"
helpviewer_keywords:
- "BC2002"
ms.assetid: 0e3ab493-bf7e-42bd-870a-d10846ce79ad
---
# Source file \<filename> specified multiple times
A file has been specified multiple times.
**Error ID:** BC2002
## To correct this error
- Remove redundant file specifiers.
## See also
- [My.Computer.FileSystem](xref:Microsoft.VisualBasic.FileIO.FileSystem)
| {
"pile_set_name": "Github"
} |
require('../../../modules/es6.string.blink');
module.exports = require('../../../modules/_entry-virtual')('String').blink;
| {
"pile_set_name": "Github"
} |
## @file
# Provides the capbility to configure secure boot in a setup browser
# By this module, user may change the content of DB, DBX, PK and KEK.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
##
[Defines]
INF_VERSION = 0x00010005
BASE_NAME = SecureBootConfigDxe
MODULE_UNI_FILE = SecureBootConfigDxe.uni
FILE_GUID = F0E6A44F-7195-41c3-AC64-54F202CD0A21
MODULE_TYPE = DXE_DRIVER
VERSION_STRING = 1.0
ENTRY_POINT = SecureBootConfigDriverEntryPoint
UNLOAD_IMAGE = SecureBootConfigDriverUnload
#
# VALID_ARCHITECTURES = IA32 X64 EBC
#
[Sources]
SecureBootConfigDriver.c
SecureBootConfigImpl.c
SecureBootConfigFileExplorer.c
SecureBootConfigDevicePath.c
SecureBootConfigMisc.c
SecureBootConfigImpl.h
SecureBootConfig.vfr
SecureBootConfigStrings.uni
SecureBootConfigNvData.h
[Packages]
MdePkg/MdePkg.dec
MdeModulePkg/MdeModulePkg.dec
SecurityPkg/SecurityPkg.dec
CryptoPkg/CryptoPkg.dec
[LibraryClasses]
BaseLib
BaseMemoryLib
BaseCryptLib
MemoryAllocationLib
UefiLib
UefiBootServicesTableLib
UefiRuntimeServicesTableLib
UefiDriverEntryPoint
UefiHiiServicesLib
DebugLib
HiiLib
PlatformSecureLib
DevicePathLib
FileExplorerLib
PeCoffLib
[Guids]
## SOMETIMES_CONSUMES ## Variable:L"CustomMode"
## SOMETIMES_PRODUCES ## Variable:L"CustomMode"
gEfiCustomModeEnableGuid
## SOMETIMES_CONSUMES ## Variable:L"SecureBootEnable"
## SOMETIMES_PRODUCES ## Variable:L"SecureBootEnable"
gEfiSecureBootEnableDisableGuid
## SOMETIMES_CONSUMES ## GUID # Unique ID for the type of the signature.
## SOMETIMES_PRODUCES ## GUID # Unique ID for the type of the signature.
gEfiCertRsa2048Guid
## SOMETIMES_CONSUMES ## GUID # Unique ID for the type of the signature.
## SOMETIMES_PRODUCES ## GUID # Unique ID for the type of the signature.
gEfiCertX509Guid
## SOMETIMES_CONSUMES ## GUID # Unique ID for the type of the signature.
## SOMETIMES_PRODUCES ## GUID # Unique ID for the type of the signature.
gEfiCertSha1Guid
## SOMETIMES_CONSUMES ## GUID # Unique ID for the type of the signature.
## SOMETIMES_PRODUCES ## GUID # Unique ID for the type of the signature.
gEfiCertSha256Guid
## SOMETIMES_CONSUMES ## Variable:L"db"
## SOMETIMES_PRODUCES ## Variable:L"db"
## SOMETIMES_CONSUMES ## Variable:L"dbx"
## SOMETIMES_PRODUCES ## Variable:L"dbx"
gEfiImageSecurityDatabaseGuid
## SOMETIMES_CONSUMES ## Variable:L"SetupMode"
## SOMETIMES_PRODUCES ## Variable:L"PK"
## SOMETIMES_CONSUMES ## Variable:L"KEK"
## SOMETIMES_PRODUCES ## Variable:L"KEK"
## SOMETIMES_CONSUMES ## Variable:L"SecureBoot"
gEfiGlobalVariableGuid
gEfiIfrTianoGuid ## PRODUCES ## GUID # HII opcode
## PRODUCES ## HII
## CONSUMES ## HII
gSecureBootConfigFormSetGuid
gEfiCertPkcs7Guid ## SOMETIMES_PRODUCES ## GUID # Unique ID for the type of the certificate.
gEfiCertTypeRsa2048Sha256Guid ## SOMETIMES_CONSUMES ## GUID # Unique ID for the type of the certificate.
gEfiFileSystemVolumeLabelInfoIdGuid ## SOMETIMES_CONSUMES ## GUID # Indicate the information type
gEfiCertX509Sha256Guid ## SOMETIMES_PRODUCES ## GUID # Unique ID for the type of the certificate.
gEfiCertX509Sha384Guid ## SOMETIMES_PRODUCES ## GUID # Unique ID for the type of the certificate.
gEfiCertX509Sha512Guid ## SOMETIMES_PRODUCES ## GUID # Unique ID for the type of the certificate.
[Protocols]
gEfiHiiConfigAccessProtocolGuid ## PRODUCES
gEfiDevicePathProtocolGuid ## PRODUCES
[Depex]
gEfiHiiConfigRoutingProtocolGuid AND
gEfiHiiDatabaseProtocolGuid AND
gEfiVariableArchProtocolGuid AND
gEfiVariableWriteArchProtocolGuid
[UserExtensions.TianoCore."ExtraFiles"]
SecureBootConfigDxeExtra.uni
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env sh
# The cqlsh is a duplicate of the the existing cqlsh==5.0.3 with hard coded default protocol version to 3.
# the original cqlsh have hard coded protocol version of 4.
# the current [2016-10-01] version of server spec is [Cassandra 2.1.15 | CQL spec 3.2.1 | Native protocol v3]
echo "cqlsh " \
"--cqlversion=\"${CQLSH_VERSION:-3.2.1}\" "\
"-e \"CREATE KEYSPACE IF NOT EXISTS ${CASSANDRA_KEYSPACE_PREFIX:-wasabi}_experiments WITH replication = {'class' : 'SimpleStrategy', 'replication_factor' : ${CASSANDRA_REPLICATION:-1}};\"" \
"--username=${CQLSH_USERNAME}" \
"--password=\"${CQLSH_PASSWORD}\"" \
"${CQLSH_HOST:-localhost}" \
"${CASSANDRA_PORT:-9042}"
while ! nc -w 1 -z ${CQLSH_HOST:-localhost} ${CASSANDRA_PORT:-9042}; do sleep 0.1; done
cqlsh --cqlversion="${CQLSH_VERSION:-3.2.1}" \
-e "CREATE KEYSPACE IF NOT EXISTS ${CASSANDRA_KEYSPACE_PREFIX:-wasabi}_experiments WITH replication = {'class' : 'SimpleStrategy', 'replication_factor' : ${CASSANDRA_REPLICATION:-1}};" \
--username=${CQLSH_USERNAME} \
--password="${CQLSH_PASSWORD}" \
${CQLSH_HOST:-localhost} \
${CASSANDRA_PORT:-9042}
if [ $? -ne 0 ]; then
echo "failed to execute the create keyspace command. Please contact administrator."
exit 1;
fi
| {
"pile_set_name": "Github"
} |
## MOBY DEBUG OPTIONS ##
CONFIG_LOCKDEP=y
CONFIG_FRAME_POINTER=y
CONFIG_LOCKUP_DETECTOR=y
CONFIG_DETECT_HUNG_TASK=y
CONFIG_DEBUG_TIMEKEEPING=y
CONFIG_DEBUG_RT_MUTEXES=y
CONFIG_DEBUG_SPINLOCK=y
CONFIG_DEBUG_MUTEXES=y
CONFIG_DEBUG_WW_MUTEX_SLOWPATH=y
CONFIG_DEBUG_LOCK_ALLOC=y
CONFIG_PROVE_LOCKING=y
CONFIG_LOCK_STAT=y
CONFIG_DEBUG_ATOMIC_SLEEP=y
CONFIG_DEBUG_LIST=y
CONFIG_DEBUG_NOTIFIERS=y
CONFIG_PROVE_RCU=y
CONFIG_RCU_TRACE=y
CONFIG_KGDB=y
CONFIG_KGDB_SERIAL_CONSOLE=y
CONFIG_KGDBOC=y
CONFIG_DEBUG_RODATA_TEST=y
CONFIG_DEBUG_WX=y
| {
"pile_set_name": "Github"
} |
/******************************************************************************
* SOFA, Simulation Open-Framework Architecture *
* (c) 2006 INRIA, USTL, UJF, CNRS, MGH *
* *
* This program is free software; you can redistribute it and/or modify it *
* under the terms of the GNU Lesser General Public License as published by *
* the Free Software Foundation; either version 2.1 of the License, or (at *
* your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, but WITHOUT *
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or *
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License *
* for more details. *
* *
* You should have received a copy of the GNU Lesser General Public License *
* along with this program. If not, see <http://www.gnu.org/licenses/>. *
*******************************************************************************
* Authors: The SOFA Team and external contributors (see Authors.txt) *
* *
* Contact information: [email protected] *
******************************************************************************/
#define SOFA_HELPER_ADVANCEDTIMER_CPP
#include <sofa/helper/AdvancedTimer.h>
#include <sofa/helper/system/thread/CTime.h>
#include <sofa/helper/vector.h>
#include <sofa/helper/map.h>
#include "../../extlibs/json/json.h"
#include <cmath>
#include <cstdlib>
#include <stack>
#include <algorithm>
#include <cctype>
#define DEFAULT_INTERVAL 100
using namespace sofa::core::objectmodel;
using json = sofa::helper::json;
namespace sofa
{
namespace helper
{
typedef sofa::helper::system::thread::ctime_t ctime_t;
typedef sofa::helper::system::thread::CTime CTime;
template class SOFA_HELPER_API AdvancedTimer::Id<AdvancedTimer::Timer>;
template class SOFA_HELPER_API AdvancedTimer::Id<AdvancedTimer::Step>;
template class SOFA_HELPER_API AdvancedTimer::Id<AdvancedTimer::Obj>;
template class SOFA_HELPER_API AdvancedTimer::Id<AdvancedTimer::Val>;
class TimerData
{
public:
AdvancedTimer::IdTimer id;
helper::vector<Record> records;
int nbIter;
int interval;
int defaultInterval;
AdvancedTimer::outputType timerOutputType;
std::map<AdvancedTimer::IdStep, StepData> stepData;
helper::vector<AdvancedTimer::IdStep> steps;
class ValData
{
public:
int num, numIt;
double vmin;
double vmax;
double vtotal;
double vtotal2;
double vtotalIt;
int lastIt;
ValData() : num(0), numIt(0), vmin(0), vmax(0), vtotal(0), vtotal2(0), vtotalIt(0), lastIt(-1) {}
};
std::map<AdvancedTimer::IdVal, ValData> valData;
helper::vector<AdvancedTimer::IdVal> vals;
TimerData()
: nbIter(0), interval(0), defaultInterval(DEFAULT_INTERVAL), timerOutputType(AdvancedTimer::STDOUT)
{
}
void init(AdvancedTimer::IdTimer id)
{
this->id = id;
std::string envvar = std::string("SOFA_TIMER_") + (std::string)id;
const char* val = getenv(envvar.c_str());
if (!val || !*val)
val = getenv("SOFA_TIMER_ALL");
if (val && *val)
interval = atoi(val);
else
interval = 0;
defaultInterval = (interval != 0) ? interval : DEFAULT_INTERVAL;
this->timerOutputType = AdvancedTimer::outputType::STDOUT;
}
void clear();
void process();
void print();
void print(std::ostream& result);
json getJson(std::string stepNumber);
json getLightJson(std::string stepNumber);
json createJSONArray(int s, json jsonObject, StepData& data);
};
std::map< AdvancedTimer::IdTimer, TimerData > timers;
std::atomic<int> activeTimers;
SOFA_THREAD_SPECIFIC_PTR(std::stack<AdvancedTimer::IdTimer>, curTimerThread);
SOFA_THREAD_SPECIFIC_PTR(helper::vector<Record>, curRecordsThread);
std::stack<AdvancedTimer::IdTimer>& getCurTimer()
{
std::stack<AdvancedTimer::IdTimer>* ptr = curTimerThread;
if (!ptr)
{
ptr = new std::stack<AdvancedTimer::IdTimer>;
curTimerThread = ptr;
}
return *ptr;
}
helper::vector<Record>* getCurRecords()
{
if (!activeTimers) return nullptr;
return curRecordsThread;
}
void setCurRecords(helper::vector<Record>* ptr)
{
helper::vector<Record>* prev = curRecordsThread;
curRecordsThread = ptr;
if (ptr && !prev) ++activeTimers;
else if (!ptr && prev) --activeTimers;
}
AdvancedTimer::SyncCallBack syncCallBack = nullptr;
void* syncCallBackData = nullptr;
std::pair<AdvancedTimer::SyncCallBack,void*> AdvancedTimer::setSyncCallBack(SyncCallBack cb, void* userData)
{
std::pair<AdvancedTimer::SyncCallBack,void*> old;
old.first = syncCallBack;
old.second = syncCallBackData;
syncCallBack = cb;
syncCallBackData = userData;
return old;
}
void AdvancedTimer::clear()
{
setCurRecords(nullptr);
std::stack<AdvancedTimer::IdTimer>* ptr = curTimerThread;
if (ptr)
while (!ptr->empty())
ptr->pop();
if (activeTimers == 0)
timers.clear();
}
bool AdvancedTimer::isEnabled(IdTimer id)
{
TimerData& data = timers[id];
if (!data.id)
{
data.init(id);
}
return (data.interval != 0);
}
void AdvancedTimer::setEnabled(IdTimer id, bool val)
{
TimerData& data = timers[id];
if (!data.id)
{
data.init(id);
}
if (val && data.interval == 0)
data.interval = data.defaultInterval;
else if (!val && data.interval != 0)
data.interval = 0;
}
int AdvancedTimer::getInterval(IdTimer id)
{
TimerData& data = timers[id];
if (!data.id)
{
data.init(id);
}
return (data.interval ? data.interval : data.defaultInterval);
}
void AdvancedTimer::setInterval(IdTimer id, int val)
{
TimerData& data = timers[id];
if (!data.id)
{
data.init(id);
}
data.defaultInterval = val;
if (data.interval) data.interval = val;
}
void AdvancedTimer::begin(IdTimer id)
{
std::stack<AdvancedTimer::IdTimer>& curTimer = getCurTimer();
curTimer.push(id);
TimerData& data = timers[curTimer.top()];
if (!data.id)
{
data.init(id);
}
if (data.interval == 0)
{
setCurRecords(nullptr);
return;
}
helper::vector<Record>* curRecords = &(data.records);
setCurRecords(curRecords);
curRecords->clear();
if (syncCallBack) (*syncCallBack)(syncCallBackData);
Record r;
r.time = CTime::getTime();
r.type = Record::RBEGIN;
r.id = id;
curRecords->push_back(r);
}
void AdvancedTimer::end(IdTimer id, std::ostream& result)
{
std::stack<AdvancedTimer::IdTimer>& curTimer = getCurTimer();
if (curTimer.empty())
{
msg_error("AdvancedTimer::end") << "timer[" << id << "] called while begin was not" ;
return;
}
if (id != curTimer.top())
{
msg_error("AdvancedTimer::end") << "timer[" << id << "] does not correspond to last call to begin(" << curTimer.top() << ")" ;
return;
}
helper::vector<Record>* curRecords = getCurRecords();
if (curRecords)
{
if (syncCallBack) (*syncCallBack)(syncCallBackData);
Record r;
r.time = CTime::getTime();
r.type = Record::REND;
r.id = id;
curRecords->push_back(r);
TimerData& data = timers[curTimer.top()];
data.process();
if (data.nbIter == data.interval)
{
data.print(result);
data.clear();
}
}
curTimer.pop();
if (curTimer.empty())
{
setCurRecords(nullptr);
}
else
{
TimerData& data = timers[curTimer.top()];
setCurRecords((data.interval == 0) ? nullptr : &(data.records));
}
}
void AdvancedTimer::end(IdTimer id)
{
std::stack<AdvancedTimer::IdTimer>& curTimer = getCurTimer();
if (curTimer.empty())
{
msg_error("AdvancedTimer::end") << "timer[" << id << "] called while begin was not" ;
return;
}
if (id != curTimer.top())
{
msg_error("AdvancedTimer::end") << "timer[" << id << "] does not correspond to last call to begin(" << curTimer.top() << ")" ;
return;
}
TimerData& dataT = timers[id];
if (dataT.timerOutputType == GUI || dataT.timerOutputType == LJSON || dataT.timerOutputType == JSON)
{
dataT.clear();
return;
}
helper::vector<Record>* curRecords = getCurRecords();
if (curRecords)
{
if (syncCallBack) (*syncCallBack)(syncCallBackData);
Record r;
r.time = CTime::getTime();
r.type = Record::REND;
r.id = id;
curRecords->push_back(r);
TimerData& data = timers[curTimer.top()];
data.process();
if (data.nbIter == data.interval)
{
data.print();
data.clear();
}
}
curTimer.pop();
if (curTimer.empty())
{
setCurRecords(nullptr);
}
else
{
TimerData& data = timers[curTimer.top()];
setCurRecords((data.interval == 0) ? nullptr : &(data.records));
}
}
std::string AdvancedTimer::end(IdTimer id, simulation::Node* node)
{
TimerData& data = timers[id];
if(!data.id)
{
return std::string("");
}
switch(data.timerOutputType)
{
case JSON : return getTimeAnalysis(id, node);
case LJSON : return getTimeAnalysis(id, node);
case GUI : return std::string("");
case STDOUT : end(id);
return std::string("");
default : end(id);
return std::string("");
}
}
bool AdvancedTimer::isActive()
{
helper::vector<Record>* curRecords = getCurRecords();
if (!curRecords) return false;
return true;
}
void AdvancedTimer::stepBegin(IdStep id)
{
helper::vector<Record>* curRecords = getCurRecords();
if (!curRecords) return;
Record r;
r.time = CTime::getTime();
r.type = Record::RSTEP_BEGIN;
r.id = id;
curRecords->push_back(r);
}
void AdvancedTimer::stepBegin(IdStep id, IdObj obj)
{
helper::vector<Record>* curRecords = getCurRecords();
if (!curRecords) return;
Record r;
r.time = CTime::getTime();
r.type = Record::RSTEP_BEGIN;
r.id = id;
r.obj = obj;
curRecords->push_back(r);
}
void AdvancedTimer::stepEnd (IdStep id)
{
helper::vector<Record>* curRecords = getCurRecords();
if (!curRecords) return;
if (syncCallBack) (*syncCallBack)(syncCallBackData);
Record r;
r.time = CTime::getTime();
r.type = Record::RSTEP_END;
r.id = id;
curRecords->push_back(r);
}
void AdvancedTimer::stepEnd (IdStep id, IdObj obj)
{
helper::vector<Record>* curRecords = getCurRecords();
if (!curRecords) return;
Record r;
r.time = CTime::getTime();
r.type = Record::RSTEP_END;
r.id = id;
r.obj = obj;
curRecords->push_back(r);
}
void AdvancedTimer::stepNext (IdStep prevId, IdStep nextId)
{
helper::vector<Record>* curRecords = getCurRecords();
if (!curRecords) return;
Record r;
if (syncCallBack) (*syncCallBack)(syncCallBackData);
r.time = CTime::getTime();
r.type = Record::RSTEP_END;
r.id = prevId;
curRecords->push_back(r);
r.type = Record::RSTEP_BEGIN;
r.id = nextId;
curRecords->push_back(r);
}
void AdvancedTimer::step (IdStep id)
{
helper::vector<Record>* curRecords = getCurRecords();
if (!curRecords) return;
if (syncCallBack) (*syncCallBack)(syncCallBackData);
Record r;
r.time = CTime::getTime();
r.type = Record::RSTEP;
r.id = id;
curRecords->push_back(r);
}
void AdvancedTimer::step (IdStep id, IdObj obj)
{
helper::vector<Record>* curRecords = getCurRecords();
if (!curRecords) return;
if (syncCallBack) (*syncCallBack)(syncCallBackData);
Record r;
r.time = CTime::getTime();
r.type = Record::RSTEP;
r.id = id;
r.obj = obj;
curRecords->push_back(r);
}
void AdvancedTimer::valSet(IdVal id, double val)
{
helper::vector<Record>* curRecords = getCurRecords();
if (!curRecords) return;
Record r;
r.time = CTime::getTime();
r.type = Record::RVAL_SET;
r.id = id;
r.val = val;
curRecords->push_back(r);
}
void AdvancedTimer::valAdd(IdVal id, double val)
{
helper::vector<Record>* curRecords = getCurRecords();
if (!curRecords) return;
Record r;
r.time = CTime::getTime();
r.type = Record::RVAL_ADD;
r.id = id;
r.val = val;
curRecords->push_back(r);
}
// API using strings instead of Id, to remove the need for Id creation when no timing is recorded
void AdvancedTimer::begin(const char* idStr)
{
begin(IdTimer(idStr));
}
void AdvancedTimer::end(const char* idStr)
{
end(IdTimer(idStr));
}
void AdvancedTimer::stepBegin(const char* idStr)
{
helper::vector<Record>* curRecords = getCurRecords();
if (!curRecords) return;
stepBegin(IdStep(idStr));
}
void AdvancedTimer::stepBegin(const char* idStr, const char* objStr)
{
helper::vector<Record>* curRecords = getCurRecords();
if (!curRecords) return;
stepBegin(IdStep(idStr), IdObj(objStr));
}
void AdvancedTimer::stepBegin(const char* idStr, const std::string& objStr)
{
helper::vector<Record>* curRecords = getCurRecords();
if (!curRecords) return;
stepBegin(IdStep(idStr), IdObj(objStr));
}
void AdvancedTimer::stepEnd (const char* idStr)
{
helper::vector<Record>* curRecords = getCurRecords();
if (!curRecords) return;
stepEnd (IdStep(idStr));
}
void AdvancedTimer::stepEnd (const char* idStr, const char* objStr)
{
helper::vector<Record>* curRecords = getCurRecords();
if (!curRecords) return;
stepEnd (IdStep(idStr), IdObj(objStr));
}
void AdvancedTimer::stepEnd (const char* idStr, const std::string& objStr)
{
helper::vector<Record>* curRecords = getCurRecords();
if (!curRecords) return;
stepEnd (IdStep(idStr), IdObj(objStr));
}
void AdvancedTimer::stepNext (const char* prevIdStr, const char* nextIdStr)
{
helper::vector<Record>* curRecords = getCurRecords();
if (!curRecords) return;
stepNext (IdStep(prevIdStr), IdStep(nextIdStr));
}
void AdvancedTimer::step (const char* idStr)
{
helper::vector<Record>* curRecords = getCurRecords();
if (!curRecords) return;
step (IdStep(idStr));
}
void AdvancedTimer::step (const char* idStr, const char* objStr)
{
helper::vector<Record>* curRecords = getCurRecords();
if (!curRecords) return;
step (IdStep(idStr), IdObj(objStr));
}
void AdvancedTimer::step (const char* idStr, const std::string& objStr)
{
helper::vector<Record>* curRecords = getCurRecords();
if (!curRecords) return;
step (IdStep(idStr), IdObj(objStr));
}
void AdvancedTimer::valSet(const char* idStr, double val)
{
helper::vector<Record>* curRecords = getCurRecords();
if (!curRecords) return;
valSet(IdVal(idStr),val);
}
void AdvancedTimer::valAdd(const char* idStr, double val)
{
helper::vector<Record>* curRecords = getCurRecords();
if (!curRecords) return;
valAdd(IdVal(idStr),val);
}
void TimerData::clear()
{
nbIter = 0;
steps.clear();
stepData.clear();
vals.clear();
valData.clear();
}
void TimerData::process()
{
if (records.empty()) return;
++nbIter;
if (nbIter == 0) return; // do not keep stats on very first iteration
ctime_t t0 = records[0].time;
//ctime_t last_t = 0;
int level = 0;
for (unsigned int ri = 0; ri < records.size(); ++ri)
{
const Record& r = records[ri];
ctime_t t = r.time - t0;
//last_t = r.time;
if (r.type == Record::REND || r.type == Record::RSTEP_END) --level;
switch (r.type)
{
case Record::RNONE:
break;
case Record::RBEGIN:
case Record::RSTEP_BEGIN:
case Record::RSTEP:
{
AdvancedTimer::IdStep id;
if (r.type != Record::RBEGIN) id = AdvancedTimer::IdStep(r.id);
if (stepData.find(id) == stepData.end())
steps.push_back(id);
StepData& data = stepData[id];
data.level = level;
if (data.lastIt != nbIter)
{
data.lastIt = nbIter;
data.tstart += t;
++data.numIt;
}
data.lastTime = t;
data.label = std::string(id);
++data.num;
break;
}
case Record::REND:
case Record::RSTEP_END:
{
AdvancedTimer::IdStep id;
if (r.type != Record::REND) id = AdvancedTimer::IdStep(r.id);
StepData& data = stepData[id];
if (data.lastIt == nbIter)
{
ctime_t dur = t - data.lastTime;
data.ttotal += dur;
data.ttotal2 += dur*dur;
data.label = std::string(id);
if (data.num == 1 || dur > data.tmax) data.tmax = dur;
if (data.num == 1 || dur < data.tmin) data.tmin = dur;
}
break;
}
case Record::RVAL_SET:
case Record::RVAL_ADD:
{
AdvancedTimer::IdVal id = AdvancedTimer::IdVal(r.id);
if (valData.find(id) == valData.end())
vals.push_back(id);
ValData& data = valData[id];
if (r.type == Record::RVAL_SET || (data.lastIt != nbIter))
{
// update vmin and vmax
if (data.num == 1 || data.vtotalIt < data.vmin) data.vmin = data.vtotalIt;
if (data.num == 1 || data.vtotalIt > data.vmax) data.vmax = data.vtotalIt;
}
if (data.lastIt != nbIter)
{
data.lastIt = nbIter;
data.vtotalIt = r.val;
data.vtotal += r.val;
data.vtotal2 += r.val*r.val;
++data.numIt;
++data.num;
}
else if (r.type == Record::RVAL_SET)
{
data.vtotalIt = r.val;
data.vtotal += r.val;
data.vtotal2 += r.val*r.val;
++data.num;
}
else
{
data.vtotalIt += r.val;
data.vtotal += r.val;
data.vtotal2 += r.val*r.val;
}
break;
}
}
if (r.type == Record::RBEGIN || r.type == Record::RSTEP_BEGIN) ++level;
}
for (unsigned int vi=0; vi < vals.size(); ++vi)
{
AdvancedTimer::IdVal id = vals[vi];
ValData& data = valData[id];
if (data.num > 0)
{
// update vmin and vmax
if (data.num == 1 || data.vtotalIt < data.vmin) data.vmin = data.vtotalIt;
if (data.num == 1 || data.vtotalIt > data.vmax) data.vmax = data.vtotalIt;
}
}
}
void printVal(std::ostream& out, double v)
{
if (v < 0)
{
v = -v;
v += 0.005;
long long i = (long long)floor(v);
if (i >= 10000)
{
v += 0.495;
i = (long long)floor(v);
out << "-" << i;
if (i < 100000)
out << ' ';
}
else if (i >= 1000)
{
v += 0.045;
i = (long long)floor(v);
int dec = (int)floor((v-i)*10);
out << '-' << i;
if (dec == 0)
out << " ";
else
out << '.' << dec;
}
else
{
int dec = (int)floor((v-i)*100);
long long m = 100;
while (i < m && m > 1)
{
out << ' ';
m /= 10;
}
out << '-' << i;
if (dec == 0)
out << " ";
else if (dec < 10)
out << ".0" << dec;
else
out << '.' << dec;
}
}
else
{
v += 0.005;
long long i = (long long)floor(v);
if (i >= 100000)
{
v += 0.495;
i = (long long)floor(v);
out << i;
if (i < 1000000)
out << ' ';
}
else if (i >= 10000)
{
v += 0.045;
i = (long long)floor(v);
int dec = (int)floor((v-i)*10);
out << i;
if (dec == 0)
out << " ";
else
out << '.' << dec;
}
else
{
int dec = (int)floor((v-i)*100);
long long m = 1000;
while (i < m && m > 1)
{
out << ' ';
m /= 10;
}
out << i;
if (dec == 0)
out << " ";
else if (dec < 10)
out << ".0" << dec;
else
out << '.' << dec;
}
}
};
void printNoVal(std::ostream& out)
{
out << " ";
};
void printVal(std::ostream& out, double v, int niter)
{
if (niter == 0)
printNoVal(out);
else
printVal(out, v/niter);
}
void printTime(std::ostream& out, ctime_t t, int niter=1)
{
static ctime_t timer_freq = CTime::getTicksPerSec();
printVal(out, 1000.0 * (double)t / (double)(niter*timer_freq));
}
void TimerData::print()
{
static ctime_t tmargin = CTime::getTicksPerSec() / 100000;
std::ostream& out = std::cout;
out << "==== " << id << " ====\n\n";
if (!records.empty())
{
out << "Trace of last iteration :\n";
ctime_t t0 = records[0].time;
ctime_t last_t = 0;
int level = 0;
for (unsigned int ri = 1; ri < records.size(); ++ri)
{
const Record& r = records[ri];
out << " * ";
if (ri > 0 && ri < records.size()-1 && r.time <= last_t + tmargin)
{
printNoVal(out);
out << " ";
}
else
{
printTime(out, r.time - t0);
out << " ms";
last_t = r.time;
}
out << " ";
if (r.type == Record::REND || r.type == Record::RSTEP_END) --level;
for (int l=0; l<level; ++l)
out << " ";
switch(r.type)
{
case Record::RNONE:
out << "NONE";
break;
case Record::RSTEP_BEGIN:
out << "> begin " << AdvancedTimer::IdStep(r.id);
if (r.obj)
out << " on " << AdvancedTimer::IdObj(r.obj);
break;
case Record::RSTEP_END:
out << "< end " << AdvancedTimer::IdStep(r.id);
if (r.obj)
out << " on " << AdvancedTimer::IdObj(r.obj);
break;
case Record::RSTEP:
out << "- step " << AdvancedTimer::IdStep(r.id);
if (r.obj)
out << " on " << AdvancedTimer::IdObj(r.obj);
break;
case Record::RVAL_SET:
out << ": var " << AdvancedTimer::IdVal(r.id);
out << " = " << r.val;
break;
case Record::RVAL_ADD:
out << ": var " << AdvancedTimer::IdVal(r.id);
out << " += " << r.val;
break;
case Record::REND:
out << "END";
break;
default:
out << "UNKNOWN RECORD TYPE" << (int)r.type;
}
out << std::endl;
if (r.type == Record::RBEGIN || r.type == Record::RSTEP_BEGIN) ++level;
}
}
if (!steps.empty())
{
out << "\nSteps Duration Statistics (in ms) :\n";
out << " LEVEL\t START\t NUM\t MIN\t MAX\t MEAN\t DEV\t TOTAL\tPERCENT\tID\n";
ctime_t ttotal = stepData[AdvancedTimer::IdStep()].ttotal;
for (unsigned int s=0; s<steps.size(); ++s)
{
StepData& data = stepData[steps[s]];
printVal(out, data.level);
out << '\t';
printTime(out, data.tstart, data.numIt);
out << '\t';
printVal(out, data.num, (s == 0) ? 1 : nbIter);
out << '\t';
printTime(out, data.tmin);
out << '\t';
printTime(out, data.tmax);
out << '\t';
double mean = (double)data.ttotal / data.num;
printTime(out, (ctime_t)mean);
out << '\t';
printTime(out, (ctime_t)(sqrt((double)data.ttotal2/data.num - mean*mean)));
out << '\t';
printTime(out, data.ttotal, (s == 0) ? 1 : nbIter);
out << '\t';
printVal(out, 100.0*data.ttotal / (double) ttotal);
out << '\t';
if (s == 0)
out << "TOTAL";
else
{
for(int ii=0; ii<data.level; ii++) out<<"."; // indentation to show the hierarchy level
out << steps[s];
}
out << std::endl;
}
}
if (!vals.empty())
{
out << "\nValues Statistics :\n";
out << " NUM\t MIN\t MAX\t MEAN\t DEV\t TOTAL\tID\n";
for (unsigned int s=0; s<vals.size(); ++s)
{
ValData& data = valData[vals[s]];
printVal(out, data.num, nbIter);
out << '\t';
printVal(out, data.vmin);
out << '\t';
printVal(out, data.vmax);
out << '\t';
double mean = data.vtotal / data.num;
printVal(out, mean);
out << '\t';
printVal(out, sqrt(data.vtotal2/data.num - mean*mean) );
out << '\t';
printVal(out, data.vtotal, nbIter);
out << '\t';
out << vals[s];
out << std::endl;
}
}
out << "\n iteration : " << getCurRecords()->size();
out << "\n==== END ====\n";
out << std::endl;
}
AdvancedTimer::outputType AdvancedTimer::convertOutputType(std::string type)
{
std::for_each(type.begin(), type.end(), [](char& c) {
c = std::tolower(static_cast<unsigned char>(c)); } );
if(type.compare("json") == 0)
return JSON;
else if(type.compare("ljson") == 0)
return LJSON;
else if(type.compare("stdout") == 0)
return STDOUT;
else if(type.compare("gui") == 0)
return GUI;
else // Add your own outputTypes before the else
{
msg_warning("AdvancedTimer") << "Unable to set output type to " << type << ". Switching to the default 'stdout' output. Valid types are [stdout, json, ljson].";
return STDOUT;
}
}
void AdvancedTimer::setOutputType(IdTimer id, const std::string& type)
{
// Seek for the timer
TimerData& data = timers[id];
if (!data.id)
{
data.init(id);
}
data.timerOutputType = convertOutputType(type);
}
AdvancedTimer::outputType AdvancedTimer::getOutputType(IdTimer id)
{
TimerData& data = timers[id];
return data.timerOutputType;
}
// -------------------------------
// Methods used for JSON output
std::string getVal(double v)
{
std::stringstream outputStringStream;
if (v < 0)
{
v = -v;
v += 0.005;
long long i = (long long)floor(v);
if (i >= 10000)
{
v += 0.495;
i = (long long)floor(v);
outputStringStream << "-" << i;
if (i < 100000)
outputStringStream << ' ';
}
else if (i >= 1000)
{
v += 0.045;
i = (long long)floor(v);
int dec = (int)floor((v-i)*10);
outputStringStream << '-' << i;
if (dec == 0)
outputStringStream << " ";
else
outputStringStream << '.' << dec;
} else
{
int dec = (int)floor((v-i)*100);
long long m = 100;
while (i < m && m > 1)
{
outputStringStream << ' ';
m /= 10;
}
outputStringStream << '-' << i;
if (dec == 0)
outputStringStream << " ";
else if (dec < 10)
outputStringStream << ".0" << dec;
else
outputStringStream << '.' << dec;
}
}
else
{
v += 0.005;
long long i = (long long)floor(v);
if (i >= 100000)
{
v += 0.495;
i = (long long)floor(v);
outputStringStream << i;
if (i < 1000000)
outputStringStream << ' ';
}
else if (i >= 10000)
{
v += 0.045;
i = (long long)floor(v);
int dec = (int)floor((v-i)*10);
outputStringStream << i;
if (dec == 0)
outputStringStream << " ";
else
outputStringStream << '.' << dec;
}
else
{
int dec = (int)floor((v-i)*100);
long long m = 1000;
while (i < m && m > 1)
{
outputStringStream << ' ';
m /= 10;
}
outputStringStream << i;
if (dec == 0)
outputStringStream << " ";
else if (dec < 10)
outputStringStream << ".0" << dec;
else
outputStringStream << '.' << dec;
}
}
return outputStringStream.str();
}
std::string getNoVal()
{
return " ";
}
std::string getVal(double v, int niter)
{
if (niter == 0)
return getNoVal();
else
return getVal(v/niter);
}
std::string getTime(ctime_t t, int niter=1)
{
static ctime_t timer_freq = CTime::getTicksPerSec();
return getVal(1000.0 * (double)t / (double)(niter*timer_freq));
}
double strToDouble(std::string const &stringToConvert, std::size_t const precision)
{
std::stringstream convertingStream;
convertingStream << std::setprecision(precision) << std::fixed << stringToConvert << std::endl;
double answer;
convertingStream >> answer;
return answer;
}
void TimerData::print(std::ostream& result)
{
//static ctime_t tmargin = CTime::getTicksPerSec() / 100000;
std::ostream& out = result;
out << "Timer: " << id << "\n";
if (!steps.empty())
{
//out << "\nSteps Duration Statistics (in ms) :\n";
out << " LEVEL START NUM MIN MAX MEAN DEV TOTAL PERCENT ID\n";
ctime_t ttotal = stepData[AdvancedTimer::IdStep()].ttotal;
for (unsigned int s=0; s<steps.size(); ++s)
{
StepData& data = stepData[steps[s]];
printVal(out, data.level);
out << " ";
printTime(out, data.tstart, data.numIt);
out << " ";
printVal(out, data.num, (s == 0) ? 1 : nbIter);
out << " ";
printTime(out, data.tmin);
out << " ";
printTime(out, data.tmax);
out << " ";
double mean = (double)data.ttotal / data.num;
printTime(out, (ctime_t)mean);
out << " ";
printTime(out, (ctime_t)(sqrt((double)data.ttotal2/data.num - mean*mean)));
out << " ";
printTime(out, data.ttotal, (s == 0) ? 1 : nbIter);
out << " ";
printVal(out, 100.0*data.ttotal / (double) ttotal);
out << " ";
if (s == 0)
out << "TOTAL";
else
{
for(int ii=0; ii<data.level; ii++) out<<"."; // indentation to show the hierarchy level
out << steps[s];
}
out << std::endl;
}
}
if (!vals.empty())
{
out << "\nValues Statistics :\n";
out << " NUM\t MIN\t MAX\t MEAN\t DEV\t TOTAL\tID\n";
for (unsigned int s=0; s<vals.size(); ++s)
{
ValData& data = valData[vals[s]];
printVal(out, data.num, nbIter);
out << '\t';
printVal(out, data.vmin);
out << '\t';
printVal(out, data.vmax);
out << '\t';
double mean = data.vtotal / data.num;
printVal(out, mean);
out << '\t';
printVal(out, sqrt(data.vtotal2/data.num - mean*mean) );
out << '\t';
printVal(out, data.vtotal, nbIter);
out << '\t';
out << vals[s];
out << std::endl;
}
}
//out << "\n==== END ====\n";
out << std::endl;
}
json TimerData::createJSONArray(int s,json jsonObject, StepData& data)
{
double value = 0;
ctime_t ttotal = stepData[AdvancedTimer::IdStep()].ttotal;
// Level :
value = strToDouble(getVal(data.level), 4);
jsonObject["Level"] = value;
// Start
value = strToDouble(getTime(data.tstart, data.numIt), 4);
jsonObject["Start"] = value;
// Num
value = strToDouble(getVal(data.num, (s == 0) ? 1 : nbIter), 4);
jsonObject["Num"] = value;
// TMin
value = strToDouble(getTime(data.tmin), 4);
jsonObject["Min"] = value;
// TMax
value = strToDouble(getTime(data.tmax), 4);
jsonObject["Max"] = value;
// Mean
double mean = (double)data.ttotal / data.num;
value = strToDouble(getTime((ctime_t)mean), 4);
jsonObject["Mean"] = value;
// Dev
value = strToDouble(getTime((ctime_t)(sqrt((double)data.ttotal2/data.num - mean*mean))), 4);
jsonObject["Dev"] = value;
// Total
value = strToDouble(getTime(data.ttotal, (s == 0) ? 1 : nbIter), 4);
jsonObject["Total"] = value;
// Percent
value = strToDouble(getVal(100.0*data.ttotal / (double) ttotal), 4);
jsonObject["Percent"] = value;
return jsonObject;
}
json TimerData::getJson(std::string stepNumber)
{
json jsonOutput;
json temp;
json *jsonPointer;
std::vector<std::string> deepthTree;
std::string jsonObjectName = stepNumber;
int componantLevel = 0;
int subComponantLevel = 0;
std::stringstream ComposantId;
if (!steps.empty())
{
// Clean the streamString
ComposantId.str("");
componantLevel = 0;
subComponantLevel = 0;
// Create the JSON container
jsonPointer = &jsonOutput[jsonObjectName];
temp = *jsonPointer;
for (unsigned int s=0; s<steps.size(); s++)
{
// Clean the streamString
ComposantId.str("");
StepData& data = stepData[steps[s]];
if (s == 0)
{
ComposantId << "TOTAL";
deepthTree.push_back(ComposantId.str());
subComponantLevel = 0;
temp[ComposantId.str()]["Values"] = createJSONArray(s, temp[ComposantId.str()]["Values"], data);
*jsonPointer = temp;
jsonPointer = &jsonPointer->at(ComposantId.str());
}
else
{
for(int ii=0; ii<data.level; ii++) ++subComponantLevel; // indentation to show the hierarchy level
// If the level increment
if(componantLevel < subComponantLevel)
{
temp = *jsonPointer;
ComposantId << steps[s];
deepthTree.push_back(ComposantId.str());
temp[ComposantId.str()]["Values"] = createJSONArray(s, temp[ComposantId.str()]["Values"], data);;
*jsonPointer = temp;
jsonPointer = &jsonPointer->at(ComposantId.str());
}
// If the level decrement
else if(componantLevel > subComponantLevel)
{
deepthTree.pop_back();
jsonPointer = &jsonOutput[jsonObjectName];
temp = *jsonPointer;
for(unsigned int i = 0; i < deepthTree.size(); i++)
{
temp = temp.at(deepthTree.at(i));
jsonPointer = &jsonPointer->at(deepthTree.at(i));
}
ComposantId << steps[s];
temp[ComposantId.str()]["Values"] = createJSONArray(s, temp[ComposantId.str()]["Values"], data);
}
// If the level stay the same
else if (componantLevel == subComponantLevel)
{
ComposantId << steps[s];
temp = *jsonPointer;
temp[ComposantId.str()]["Values"] = createJSONArray(s, temp[ComposantId.str()]["Values"], data);
*jsonPointer = temp;
}
}
componantLevel = subComponantLevel;
subComponantLevel = 0;
}
}
return jsonOutput;
}
json TimerData::getLightJson(std::string stepNumber)
{
json jsonOutput;
std::vector<std::string> deepthTree;
std::string jsonObjectName = stepNumber;
std::string father;
int componantLevel = 0;
int subComponantLevel = 0;
std::stringstream ComposantId;
if (!steps.empty())
{
// Clean the streamString
ComposantId.str("");
componantLevel = 0;
subComponantLevel = 0;
// Create the JSON container
jsonOutput[jsonObjectName];
for (unsigned int s=0; s<steps.size(); s++)
{
// Clean the streamString
ComposantId.str("");
StepData& data = stepData[steps[s]];
if (s == 0)
{
ComposantId << "TOTAL";
deepthTree.push_back(ComposantId.str());
subComponantLevel = 0;
jsonOutput[jsonObjectName][ComposantId.str()]["Father"] = "None";
jsonOutput[jsonObjectName][ComposantId.str()]["Values"] = createJSONArray(s, jsonOutput[jsonObjectName][ComposantId.str()]["Values"], data);
}
else
{
for(int ii=0; ii<data.level; ii++) ++subComponantLevel; // indentation to show the hierarchy level
// If the level increment
if(componantLevel < subComponantLevel)
{
father = deepthTree.at(deepthTree.size()-1);
ComposantId << steps[s];
deepthTree.push_back(ComposantId.str());
jsonOutput[jsonObjectName][ComposantId.str()]["Father"] = father;
jsonOutput[jsonObjectName][ComposantId.str()]["Values"] = createJSONArray(s, jsonOutput[jsonObjectName][ComposantId.str()]["Values"], data);;
}
// If the level decrement
else if(componantLevel > subComponantLevel)
{
deepthTree.pop_back();
father = deepthTree.at(deepthTree.size()-1);
ComposantId << steps[s];
jsonOutput[jsonObjectName][ComposantId.str()]["Father"] = father;
jsonOutput[jsonObjectName][ComposantId.str()]["Values"] = createJSONArray(s, jsonOutput[jsonObjectName][ComposantId.str()]["Values"], data);
}
// If the level stay the same
else if (componantLevel == subComponantLevel)
{
ComposantId << steps[s];
jsonOutput[jsonObjectName][ComposantId.str()]["Father"] = father;
jsonOutput[jsonObjectName][ComposantId.str()]["Values"] = createJSONArray(s, jsonOutput[jsonObjectName][ComposantId.str()]["Values"], data);
}
}
componantLevel = subComponantLevel;
subComponantLevel = 0;
}
}
return jsonOutput;
}
helper::vector<AdvancedTimer::IdStep> AdvancedTimer::getSteps(IdTimer id, bool processData)
{
TimerData& data = timers[id];
if (processData)
data.process();
return data.steps;
}
std::map<AdvancedTimer::IdStep, StepData> AdvancedTimer::getStepData(IdTimer id, bool processData)
{
TimerData& data = timers[id];
if (processData)
data.process();
return data.stepData;
}
helper::vector<Record> AdvancedTimer::getRecords(IdTimer id)
{
TimerData& data = timers[id];
for (Record & r : data.records) {
switch (r.type) {
case Record::RBEGIN: // Timer begins
case Record::REND: // Timer ends
r.label = IdTimer::IdFactory::getName(r.id);
if (r.obj != 0 || (!IdObj::IdFactory::getName(r.obj).empty() && IdObj::IdFactory::getName(r.obj) != "0")) {
r.label += " (" + IdObj::IdFactory::getName(r.obj) + ")";
}
break;
case Record::RSTEP_BEGIN: // Step begins
case Record::RSTEP_END: // Step ends
case Record::RSTEP: // Step
r.label = IdStep::IdFactory::getName(r.id);
if (r.obj != 0 || (!IdObj::IdFactory::getName(r.obj).empty() && IdObj::IdFactory::getName(r.obj) != "0")) {
r.label += " (" + IdObj::IdFactory::getName(r.obj) + ")";
}
break;
case Record::RVAL_SET: // Sets a value
case Record::RVAL_ADD: // Adds a value
r.label = IdVal::IdFactory::getName(r.id);
break;
default:
r.label = "Unknown";
break;
}
}
return data.records;
}
void AdvancedTimer::clearData(IdTimer id)
{
TimerData& data = timers[id];
data.clear();
}
std::string AdvancedTimer::getTimeAnalysis(IdTimer id, simulation::Node* node)
{
// Get simulation context and find the actual simulation step
double time = node->getContext()->getTime();
double deltaTime = node->getContext()->getDt();
std::stringstream tempStepNumber;
std::string stepNumber;
json outputJson;
std::string outputStr;
// We need to convert this way to keep stepNumber as accurate as possible
tempStepNumber << time/deltaTime;
stepNumber = tempStepNumber.str();
// Get the timer result and create the JSON
std::stack<AdvancedTimer::IdTimer>& curTimer = getCurTimer();
if (curTimer.empty())
{
msg_error("AdvancedTimer::end") << "timer[" << id << "] called while begin was not" ;
return nullptr;
}
if (id != curTimer.top())
{
msg_error("AdvancedTimer::end") << "timer[" << id << "] does not correspond to last call to begin(" << curTimer.top() << ")" ;
return nullptr;
}
helper::vector<Record>* curRecords = getCurRecords();
if (curRecords)
{
if (syncCallBack) (*syncCallBack)(syncCallBackData);
Record r;
r.time = CTime::getTime();
r.type = Record::REND;
r.id = id;
curRecords->push_back(r);
TimerData& data = timers[curTimer.top()];
data.process();
if (data.nbIter == data.interval)
{
// Get values and create the JSON output
switch(data.timerOutputType)
{
case JSON : outputJson = data.getJson(stepNumber);
break;
case LJSON : outputJson = data.getLightJson(stepNumber);
break;
default : outputJson = data.getJson(stepNumber);
}
data.clear();
}
}
curTimer.pop();
if (curTimer.empty())
{
setCurRecords(nullptr);
}
else
{
TimerData& data = timers[curTimer.top()];
setCurRecords((data.interval == 0) ? nullptr : &(data.records));
}
outputStr = outputJson.dump(4);
if(outputStr.compare("null") != 0)
{
outputStr.erase(0,1);
outputStr.erase(outputStr.end()-2, outputStr.end());
}
return outputStr;
}
}
}
| {
"pile_set_name": "Github"
} |
msgid ""
msgstr ""
"Project-Id-Version: red-discordbot\n"
"POT-Creation-Date: 2020-08-06 12:19+0000\n"
"Last-Translator: \n"
"Language-Team: Vietnamese\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: redgettext 3.1\n"
"Plural-Forms: nplurals=1; plural=0;\n"
"X-Crowdin-Project: red-discordbot\n"
"X-Crowdin-Project-ID: 289505\n"
"X-Crowdin-Language: vi\n"
"X-Crowdin-File-ID: 45\n"
"Language: vi_VN\n"
#: redbot/cogs/trivia/checks.py:13 redbot/cogs/trivia/trivia.py:344
msgid "There is no ongoing trivia session in this channel."
msgstr ""
#: redbot/cogs/trivia/converters.py:15
msgid "`{arg}` is not a number."
msgstr ""
#: redbot/cogs/trivia/converters.py:17
msgid "`{arg}` is not a finite number."
msgstr ""
#: redbot/cogs/trivia/session.py:20
msgid "I know this one! {answer}!"
msgstr ""
#: redbot/cogs/trivia/session.py:21
msgid "Easy: {answer}."
msgstr ""
#: redbot/cogs/trivia/session.py:22
msgid "Oh really? It's {answer} of course."
msgstr ""
#: redbot/cogs/trivia/session.py:25
msgid "To the next one I guess..."
msgstr ""
#: redbot/cogs/trivia/session.py:26
msgid "Moving on..."
msgstr ""
#: redbot/cogs/trivia/session.py:27
msgid "I'm sure you'll know the answer of the next one."
msgstr ""
#: redbot/cogs/trivia/session.py:28
msgid "😔 Next one."
msgstr ""
#: redbot/cogs/trivia/session.py:114
msgid "An unexpected error occurred in the trivia session.\\nCheck your console or logs for details."
msgstr ""
#: redbot/cogs/trivia/session.py:135
msgid "Question number {num}!"
msgstr ""
#: redbot/cogs/trivia/session.py:144
msgid "There are no more questions!"
msgstr ""
#: redbot/cogs/trivia/session.py:152
msgid "{trivia_list} (by {author})"
msgstr ""
#: redbot/cogs/trivia/session.py:157
msgid "Starting Trivia: {list_names}"
msgstr ""
#: redbot/cogs/trivia/session.py:203
msgid "Guys...? Well, I guess I'll stop then."
msgstr ""
#: redbot/cogs/trivia/session.py:211
msgid " **+1** for me!"
msgstr ""
#: redbot/cogs/trivia/session.py:216
msgid "You got it {user}! **+1** to you!"
msgstr ""
#: redbot/cogs/trivia/session.py:311
msgid "Congratulations, {user}, you have received {num} {currency} for coming first."
msgstr ""
#: redbot/cogs/trivia/trivia.py:41
#, docstring
msgid "Play trivia with friends!"
msgstr ""
#: redbot/cogs/trivia/trivia.py:79
#, docstring
msgid "Manage Trivia settings."
msgstr ""
#: redbot/cogs/trivia/trivia.py:83
#, docstring
msgid "Show the current trivia settings."
msgstr ""
#: redbot/cogs/trivia/trivia.py:87
msgid "Current settings\\nBot gains points: {bot_plays}\\nAnswer time limit: {delay} seconds\\nLack of response timeout: {timeout} seconds\\nPoints to win: {max_score}\\nReveal answer on timeout: {reveal_answer}\\nPayout multiplier: {payout_multiplier}\\nAllow lists to override settings: {allow_override}"
msgstr ""
#: redbot/cogs/trivia/trivia.py:103
#, docstring
msgid "Set the total points required to win."
msgstr ""
#: redbot/cogs/trivia/trivia.py:105
msgid "Score must be greater than 0."
msgstr ""
#: redbot/cogs/trivia/trivia.py:109
msgid "Done. Points required to win set to {num}."
msgstr ""
#: redbot/cogs/trivia/trivia.py:113
#, docstring
msgid "Set the maximum seconds permitted to answer a question."
msgstr ""
#: redbot/cogs/trivia/trivia.py:115
msgid "Must be at least 4 seconds."
msgstr ""
#: redbot/cogs/trivia/trivia.py:119
msgid "Done. Maximum seconds to answer set to {num}."
msgstr ""
#: redbot/cogs/trivia/trivia.py:123
#, docstring
msgid "Set how long until trivia stops due to no response."
msgstr ""
#: redbot/cogs/trivia/trivia.py:126
msgid "Must be larger than the answer time limit."
msgstr ""
#: redbot/cogs/trivia/trivia.py:130
msgid "Done. Trivia sessions will now time out after {num} seconds of no responses."
msgstr ""
#: redbot/cogs/trivia/trivia.py:137
#, docstring
msgid "Allow/disallow trivia lists to override settings."
msgstr ""
#: redbot/cogs/trivia/trivia.py:142
msgid "Done. Trivia lists can now override the trivia settings for this server."
msgstr ""
#: redbot/cogs/trivia/trivia.py:146
msgid "Done. Trivia lists can no longer override the trivia settings for this server."
msgstr ""
#: redbot/cogs/trivia/trivia.py:154
#, docstring
msgid "Set whether or not the bot gains points.\\n\\n If enabled, the bot will gain a point if no one guesses correctly.\\n "
msgstr ""
#: redbot/cogs/trivia/trivia.py:161
msgid "Done. I'll now gain a point if users don't answer in time."
msgstr ""
#: redbot/cogs/trivia/trivia.py:163
msgid "Alright, I won't embarass you at trivia anymore."
msgstr ""
#: redbot/cogs/trivia/trivia.py:167
#, docstring
msgid "Set whether or not the answer is revealed.\\n\\n If enabled, the bot will reveal the answer if no one guesses correctly\\n in time.\\n "
msgstr ""
#: redbot/cogs/trivia/trivia.py:175
msgid "Done. I'll reveal the answer if no one knows it."
msgstr ""
#: redbot/cogs/trivia/trivia.py:177
msgid "Alright, I won't reveal the answer to the questions anymore."
msgstr ""
#: redbot/cogs/trivia/trivia.py:183
#, docstring
msgid "Set the payout multiplier.\\n\\n This can be any positive decimal number. If a user wins trivia when at\\n least 3 members are playing, they will receive credits. Set to 0 to\\n disable.\\n\\n The number of credits is determined by multiplying their total score by\\n this multiplier.\\n "
msgstr ""
#: redbot/cogs/trivia/trivia.py:194
msgid "Multiplier must be at least 0."
msgstr ""
#: redbot/cogs/trivia/trivia.py:198
msgid "Done. Payout multiplier set to {num}."
msgstr ""
#: redbot/cogs/trivia/trivia.py:200
msgid "Done. I will no longer reward the winner with a payout."
msgstr ""
#: redbot/cogs/trivia/trivia.py:205
#, docstring
msgid "Manage Custom Trivia lists."
msgstr ""
#: redbot/cogs/trivia/trivia.py:210
#, docstring
msgid "List uploaded custom trivia."
msgstr ""
#: redbot/cogs/trivia/trivia.py:212
msgid "No custom Trivia lists uploaded."
msgstr ""
#: redbot/cogs/trivia/trivia.py:228 redbot/cogs/trivia/trivia.py:235
msgid "Uploaded trivia lists"
msgstr ""
#: redbot/cogs/trivia/trivia.py:245
#, docstring
msgid "Upload a trivia file."
msgstr ""
#: redbot/cogs/trivia/trivia.py:247
msgid "Supply a file with next message or type anything to cancel."
msgstr ""
#: redbot/cogs/trivia/trivia.py:253
msgid "You took too long to upload a list."
msgstr ""
#: redbot/cogs/trivia/trivia.py:256
msgid "You have cancelled the upload process."
msgstr ""
#: redbot/cogs/trivia/trivia.py:264
msgid "Invalid syntax: "
msgstr ""
#: redbot/cogs/trivia/trivia.py:267
msgid "There was an error parsing the trivia list. See logs for more info."
msgstr ""
#: redbot/cogs/trivia/trivia.py:274
#, docstring
msgid "Delete a trivia file."
msgstr ""
#: redbot/cogs/trivia/trivia.py:278
msgid "Trivia {filename} was deleted."
msgstr ""
#: redbot/cogs/trivia/trivia.py:280
msgid "Trivia file was not found."
msgstr ""
#: redbot/cogs/trivia/trivia.py:285
#, docstring
msgid "Start trivia session on the specified category.\\n\\n You may list multiple categories, in which case the trivia will involve\\n questions from all of them.\\n "
msgstr ""
#: redbot/cogs/trivia/trivia.py:296
msgid "There is already an ongoing trivia session in this channel."
msgstr ""
#: redbot/cogs/trivia/trivia.py:307
msgid "Invalid category `{name}`. See `{prefix}trivia list` for a list of trivia categories."
msgstr ""
#: redbot/cogs/trivia/trivia.py:314
msgid "There was an error parsing the trivia list for the `{name}` category. It may be formatted incorrectly."
msgstr ""
#: redbot/cogs/trivia/trivia.py:326
msgid "The trivia list was parsed successfully, however it appears to be empty!"
msgstr ""
#: redbot/cogs/trivia/trivia.py:341
#, docstring
msgid "Stop an ongoing trivia session."
msgstr ""
#: redbot/cogs/trivia/trivia.py:348
msgid "Trivia stopped."
msgstr ""
#: redbot/cogs/trivia/trivia.py:352
#, docstring
msgid "List available trivia categories."
msgstr ""
#: redbot/cogs/trivia/trivia.py:357 redbot/cogs/trivia/trivia.py:363
msgid "Available trivia lists"
msgstr ""
#: redbot/cogs/trivia/trivia.py:373
#, docstring
msgid "Leaderboard for trivia.\\n\\n Defaults to the top 10 of this server, sorted by total wins. Use\\n subcommands for a more customised leaderboard.\\n "
msgstr ""
#: redbot/cogs/trivia/trivia.py:388
#, docstring
msgid "Leaderboard for this server.\\n\\n `<sort_by>` can be any of the following fields:\\n - `wins` : total wins\\n - `avg` : average score\\n - `total` : total correct answers\\n - `games` : total games played\\n\\n `<top>` is the number of ranks to show on the leaderboard.\\n "
msgstr ""
#: redbot/cogs/trivia/trivia.py:401 redbot/cogs/trivia/trivia.py:430
msgid "Unknown field `{field_name}`, see `{prefix}help trivia leaderboard server` for valid fields to sort by."
msgstr ""
#: redbot/cogs/trivia/trivia.py:417
#, docstring
msgid "Global trivia leaderboard.\\n\\n `<sort_by>` can be any of the following fields:\\n - `wins` : total wins\\n - `avg` : average score\\n - `total` : total correct answers from all sessions\\n - `games` : total games played\\n\\n `<top>` is the number of ranks to show on the leaderboard.\\n "
msgstr ""
#: redbot/cogs/trivia/trivia.py:485
msgid "There are no scores on record!"
msgstr ""
#: redbot/cogs/trivia/trivia.py:515
msgid "Rank"
msgstr ""
#: redbot/cogs/trivia/trivia.py:516
msgid "Member"
msgstr ""
#: redbot/cogs/trivia/trivia.py:517
msgid "Wins"
msgstr ""
#: redbot/cogs/trivia/trivia.py:518
msgid "Games Played"
msgstr ""
#: redbot/cogs/trivia/trivia.py:519
msgid "Total Score"
msgstr ""
#: redbot/cogs/trivia/trivia.py:520
msgid "Average Score"
msgstr ""
#: redbot/cogs/trivia/trivia.py:635
msgid "{filename} is a reserved trivia name and cannot be replaced.\\nChoose another name."
msgstr ""
#: redbot/cogs/trivia/trivia.py:644
msgid "{filename} already exists. Do you wish to overwrite?"
msgstr ""
#: redbot/cogs/trivia/trivia.py:666
msgid "You took too long answering."
msgstr ""
#: redbot/cogs/trivia/trivia.py:670
msgid "I am not replacing the existing file."
msgstr ""
#: redbot/cogs/trivia/trivia.py:679
msgid "Saved Trivia list as {filename}."
msgstr ""
| {
"pile_set_name": "Github"
} |
<?php
// Type definition
$server->wsdl->addComplexType(
'ArrayOfstring',
'complexType',
'array',
'',
'SOAP-ENC:Array',
[],
[['ref' => 'SOAP-ENC:arrayType', 'wsdl:arrayType' => 'xsd:string[]']],
'xsd:string'
);
$server->wsdl->addComplexType(
'ArrayOfInteger',
'complexType',
'array',
'',
'SOAP-ENC:Array',
[],
[['ref' => 'SOAP-ENC:arrayType', 'wsdl:arrayType' => 'xsd:integer[]']],
'xsd:integer'
);
$server->wsdl->addComplexType(
'ArrayOflong',
'complexType',
'array',
'',
'SOAP-ENC:Array',
[],
[['ref' => 'SOAP-ENC:arrayType', 'wsdl:arrayType' => 'xsd:long[]']],
'xsd:long'
);
$server->wsdl->addComplexType(
'ArrayOfint',
'complexType',
'array',
'',
'SOAP-ENC:Array',
[],
[['ref' => 'SOAP-ENC:arrayType', 'wsdl:arrayType' => 'xsd:int[]']],
'xsd:int'
);
$server->wsdl->addComplexType(
'Revision',
'complexType',
'struct',
'sequence',
'',
[
'revision' => ['name' => 'revision', 'type' => 'xsd:string'],
'author' => ['name' => 'author', 'type' => 'xsd:string'],
'date' => ['name' => 'date', 'type' => 'xsd:string'],
'message' => ['name' => 'message', 'type' => 'xsd:string'],
]
);
$server->wsdl->addComplexType(
'ArrayOfRevision',
'complexType',
'array',
'',
'SOAP-ENC:Array',
[],
[['ref' => 'SOAP-ENC:arrayType', 'wsdl:arrayType' => 'tns:Revision[]']],
'tns:Revision'
);
$server->wsdl->addComplexType(
'Commiter',
'complexType',
'struct',
'sequence',
'',
[
'user_id' => ['name' => 'user_id', 'type' => 'xsd:int'],
'commit_count' => ['name' => 'commit_count', 'type' => 'xsd:int'],
]
);
$server->wsdl->addComplexType(
'ArrayOfCommiter',
'complexType',
'array',
'',
'SOAP-ENC:Array',
[],
[['ref' => 'SOAP-ENC:arrayType', 'wsdl:arrayType' => 'tns:Commiter[]']],
'tns:Commiter'
);
$server->wsdl->addComplexType(
'SvnPathInfo',
'complexType',
'struct',
'sequence',
'',
[
'path' => ['name' => 'path', 'type' => 'xsd:string'],
'commit_count' => ['name' => 'commit_count', 'type' => 'xsd:int'],
]
);
$server->wsdl->addComplexType(
'ArrayOfSvnPathInfo',
'complexType',
'array',
'',
'SOAP-ENC:Array',
[],
[['ref' => 'SOAP-ENC:arrayType', 'wsdl:arrayType' => 'tns:SvnPathInfo[]']],
'tns:SvnPathInfo'
);
$server->wsdl->addComplexType(
'SvnPathDetails',
'complexType',
'struct',
'sequence',
'',
[
'path' => ['name' => 'path', 'type' => 'xsd:string'],
'author' => ['name' => 'author', 'type' => 'xsd:int'],
'message' => ['name' => 'message', 'type' => 'xsd:string'],
'timestamp' => ['name' => 'timestamp', 'type' => 'xsd:int'],
]
);
$server->wsdl->addComplexType(
'ArrayOfSvnPathDetails',
'complexType',
'array',
'',
'SOAP-ENC:Array',
[],
[['ref' => 'SOAP-ENC:arrayType', 'wsdl:arrayType' => 'tns:SvnPathDetails[]']],
'tns:SvnPathDetails'
);
$server->wsdl->addComplexType(
'UserInfo',
'complexType',
'struct',
'sequence',
'',
[
'identifier' => ['name' => 'identifier', 'type' => 'xsd:string'],
'username' => ['name' => 'username', 'type' => 'xsd:string'],
'id' => ['name' => 'id', 'type' => 'xsd:string'],
'real_name' => ['name' => 'real_name', 'type' => 'xsd:string'],
'email' => ['name' => 'email', 'type' => 'xsd:string'],
'ldap_id' => ['name' => 'ldap_id', 'type' => 'xsd:string'],
]
);
$server->wsdl->addComplexType(
'ArrayOfUserInfo',
'complexType',
'array',
'',
'SOAP-ENC:Array',
[],
[['ref' => 'SOAP-ENC:arrayType', 'wsdl:arrayType' => 'tns:UserInfo[]']],
'tns:UserInfo'
);
$server->wsdl->addComplexType(
'DescField',
'complexType',
'struct',
'sequence',
'',
[
'id' => ['name' => 'id', 'type' => 'xsd:int'],
'name' => ['name' => 'name', 'type' => 'xsd:string'],
'is_mandatory' => ['name' => 'is_mandatory', 'type' => 'xsd:int'],
]
);
$server->wsdl->addComplexType(
'ArrayOfDescFields',
'complexType',
'array',
'',
'SOAP-ENC:Array',
[],
[['ref' => 'SOAP-ENC:arrayType', 'wsdl:arrayType' => 'tns:DescField[]']],
'tns:DescField'
);
$server->wsdl->addComplexType(
'DescFieldValue',
'complexType',
'struct',
'sequence',
'',
[
'id' => ['name' => 'id', 'type' => 'xsd:int'],
'value' => ['name' => 'value', 'type' => 'xsd:string'],
]
);
$server->wsdl->addComplexType(
'ArrayOfDescFieldsValues',
'complexType',
'array',
'',
'SOAP-ENC:Array',
[],
[['ref' => 'SOAP-ENC:arrayType', 'wsdl:arrayType' => 'tns:DescFieldValue[]']],
'tns:DescFieldValue'
);
$server->wsdl->addComplexType(
'ServiceValue',
'complexType',
'struct',
'sequence',
'',
[
'id' => ['name' => 'id', 'type' => 'xsd:int'],
'short_name' => ['name' => 'short_name', 'type' => 'xsd:string'],
'is_used' => ['name' => 'is_used', 'type' => 'xsd:int'],
]
);
$server->wsdl->addComplexType(
'ArrayOfServicesValues',
'complexType',
'array',
'',
'SOAP-ENC:Array',
[],
[['ref' => 'SOAP-ENC:arrayType', 'wsdl:arrayType' => 'tns:ServiceValue[]']],
'tns:ServiceValue'
);
| {
"pile_set_name": "Github"
} |
package android.accessibilityservice;
import android.graphics.Path;
import android.graphics.PathMeasure;
import android.graphics.RectF;
import android.os.Parcel;
import android.os.Parcelable;
import com.android.internal.util.Preconditions;
import java.util.ArrayList;
import java.util.List;
public final class GestureDescription {
private static final long MAX_GESTURE_DURATION_MS = 60000;
private static final int MAX_STROKE_COUNT = 10;
private final List<StrokeDescription> mStrokes;
private final float[] mTempPos;
public static int getMaxStrokeCount() {
return 10;
}
public static long getMaxGestureDuration() {
return 60000;
}
private GestureDescription() {
this.mStrokes = new ArrayList();
this.mTempPos = new float[2];
}
private GestureDescription(List<StrokeDescription> strokes) {
this.mStrokes = new ArrayList();
this.mTempPos = new float[2];
this.mStrokes.addAll(strokes);
}
public int getStrokeCount() {
return this.mStrokes.size();
}
public StrokeDescription getStroke(int index) {
return this.mStrokes.get(index);
}
/* access modifiers changed from: private */
/* access modifiers changed from: public */
private long getNextKeyPointAtLeast(long offset) {
long nextKeyPoint = Long.MAX_VALUE;
for (int i = 0; i < this.mStrokes.size(); i++) {
long thisStartTime = this.mStrokes.get(i).mStartTime;
if (thisStartTime < nextKeyPoint && thisStartTime >= offset) {
nextKeyPoint = thisStartTime;
}
long thisEndTime = this.mStrokes.get(i).mEndTime;
if (thisEndTime < nextKeyPoint && thisEndTime >= offset) {
nextKeyPoint = thisEndTime;
}
}
if (nextKeyPoint == Long.MAX_VALUE) {
return -1;
}
return nextKeyPoint;
}
/* access modifiers changed from: private */
/* access modifiers changed from: public */
private int getPointsForTime(long time, TouchPoint[] touchPoints) {
int numPointsFound = 0;
for (int i = 0; i < this.mStrokes.size(); i++) {
StrokeDescription strokeDescription = this.mStrokes.get(i);
if (strokeDescription.hasPointForTime(time)) {
touchPoints[numPointsFound].mStrokeId = strokeDescription.getId();
touchPoints[numPointsFound].mContinuedStrokeId = strokeDescription.getContinuedStrokeId();
touchPoints[numPointsFound].mIsStartOfPath = strokeDescription.getContinuedStrokeId() < 0 && time == strokeDescription.mStartTime;
touchPoints[numPointsFound].mIsEndOfPath = !strokeDescription.willContinue() && time == strokeDescription.mEndTime;
strokeDescription.getPosForTime(time, this.mTempPos);
touchPoints[numPointsFound].mX = (float) Math.round(this.mTempPos[0]);
touchPoints[numPointsFound].mY = (float) Math.round(this.mTempPos[1]);
numPointsFound++;
}
}
return numPointsFound;
}
/* access modifiers changed from: private */
public static long getTotalDuration(List<StrokeDescription> paths) {
long latestEnd = Long.MIN_VALUE;
for (int i = 0; i < paths.size(); i++) {
latestEnd = Math.max(latestEnd, paths.get(i).mEndTime);
}
return Math.max(latestEnd, 0L);
}
public static class Builder {
private final List<StrokeDescription> mStrokes = new ArrayList();
public Builder addStroke(StrokeDescription strokeDescription) {
if (this.mStrokes.size() < 10) {
this.mStrokes.add(strokeDescription);
if (GestureDescription.getTotalDuration(this.mStrokes) <= 60000) {
return this;
}
this.mStrokes.remove(strokeDescription);
throw new IllegalStateException("Gesture would exceed maximum duration with new stroke");
}
throw new IllegalStateException("Attempting to add too many strokes to a gesture");
}
public GestureDescription build() {
if (this.mStrokes.size() != 0) {
return new GestureDescription(this.mStrokes);
}
throw new IllegalStateException("Gestures must have at least one stroke");
}
}
public static class StrokeDescription {
private static final int INVALID_STROKE_ID = -1;
static int sIdCounter;
boolean mContinued;
int mContinuedStrokeId;
long mEndTime;
int mId;
Path mPath;
private PathMeasure mPathMeasure;
long mStartTime;
float[] mTapLocation;
private float mTimeToLengthConversion;
public StrokeDescription(Path path, long startTime, long duration) {
this(path, startTime, duration, false);
}
public StrokeDescription(Path path, long startTime, long duration, boolean willContinue) {
this.mContinuedStrokeId = -1;
this.mContinued = willContinue;
boolean z = true;
Preconditions.checkArgument(duration > 0, "Duration must be positive");
Preconditions.checkArgument(startTime >= 0, "Start time must not be negative");
Preconditions.checkArgument(!path.isEmpty(), "Path is empty");
RectF bounds = new RectF();
path.computeBounds(bounds, false);
Preconditions.checkArgument((bounds.bottom < 0.0f || bounds.top < 0.0f || bounds.right < 0.0f || bounds.left < 0.0f) ? false : z, "Path bounds must not be negative");
this.mPath = new Path(path);
this.mPathMeasure = new PathMeasure(path, false);
if (this.mPathMeasure.getLength() == 0.0f) {
Path tempPath = new Path(path);
tempPath.lineTo(-1.0f, -1.0f);
this.mTapLocation = new float[2];
new PathMeasure(tempPath, false).getPosTan(0.0f, this.mTapLocation, null);
}
if (!this.mPathMeasure.nextContour()) {
this.mPathMeasure.setPath(this.mPath, false);
this.mStartTime = startTime;
this.mEndTime = startTime + duration;
this.mTimeToLengthConversion = getLength() / ((float) duration);
int i = sIdCounter;
sIdCounter = i + 1;
this.mId = i;
return;
}
throw new IllegalArgumentException("Path has more than one contour");
}
public Path getPath() {
return new Path(this.mPath);
}
public long getStartTime() {
return this.mStartTime;
}
public long getDuration() {
return this.mEndTime - this.mStartTime;
}
public int getId() {
return this.mId;
}
public StrokeDescription continueStroke(Path path, long startTime, long duration, boolean willContinue) {
if (this.mContinued) {
StrokeDescription strokeDescription = new StrokeDescription(path, startTime, duration, willContinue);
strokeDescription.mContinuedStrokeId = this.mId;
return strokeDescription;
}
throw new IllegalStateException("Only strokes marked willContinue can be continued");
}
public boolean willContinue() {
return this.mContinued;
}
public int getContinuedStrokeId() {
return this.mContinuedStrokeId;
}
/* access modifiers changed from: package-private */
public float getLength() {
return this.mPathMeasure.getLength();
}
/* access modifiers changed from: package-private */
public boolean getPosForTime(long time, float[] pos) {
float[] fArr = this.mTapLocation;
if (fArr != null) {
pos[0] = fArr[0];
pos[1] = fArr[1];
return true;
} else if (time == this.mEndTime) {
return this.mPathMeasure.getPosTan(getLength(), pos, null);
} else {
return this.mPathMeasure.getPosTan(this.mTimeToLengthConversion * ((float) (time - this.mStartTime)), pos, null);
}
}
/* access modifiers changed from: package-private */
public boolean hasPointForTime(long time) {
return time >= this.mStartTime && time <= this.mEndTime;
}
}
public static class TouchPoint implements Parcelable {
public static final Parcelable.Creator<TouchPoint> CREATOR = new Parcelable.Creator<TouchPoint>() {
/* class android.accessibilityservice.GestureDescription.TouchPoint.AnonymousClass1 */
@Override // android.os.Parcelable.Creator
public TouchPoint createFromParcel(Parcel in) {
return new TouchPoint(in);
}
@Override // android.os.Parcelable.Creator
public TouchPoint[] newArray(int size) {
return new TouchPoint[size];
}
};
private static final int FLAG_IS_END_OF_PATH = 2;
private static final int FLAG_IS_START_OF_PATH = 1;
public int mContinuedStrokeId;
public boolean mIsEndOfPath;
public boolean mIsStartOfPath;
public int mStrokeId;
public float mX;
public float mY;
public TouchPoint() {
}
public TouchPoint(TouchPoint pointToCopy) {
copyFrom(pointToCopy);
}
public TouchPoint(Parcel parcel) {
this.mStrokeId = parcel.readInt();
this.mContinuedStrokeId = parcel.readInt();
int startEnd = parcel.readInt();
boolean z = false;
this.mIsStartOfPath = (startEnd & 1) != 0;
this.mIsEndOfPath = (startEnd & 2) != 0 ? true : z;
this.mX = parcel.readFloat();
this.mY = parcel.readFloat();
}
public void copyFrom(TouchPoint other) {
this.mStrokeId = other.mStrokeId;
this.mContinuedStrokeId = other.mContinuedStrokeId;
this.mIsStartOfPath = other.mIsStartOfPath;
this.mIsEndOfPath = other.mIsEndOfPath;
this.mX = other.mX;
this.mY = other.mY;
}
public String toString() {
return "TouchPoint{mStrokeId=" + this.mStrokeId + ", mContinuedStrokeId=" + this.mContinuedStrokeId + ", mIsStartOfPath=" + this.mIsStartOfPath + ", mIsEndOfPath=" + this.mIsEndOfPath + ", mX=" + this.mX + ", mY=" + this.mY + '}';
}
@Override // android.os.Parcelable
public int describeContents() {
return 0;
}
@Override // android.os.Parcelable
public void writeToParcel(Parcel dest, int flags) {
dest.writeInt(this.mStrokeId);
dest.writeInt(this.mContinuedStrokeId);
dest.writeInt((this.mIsStartOfPath ? 1 : 0) | (this.mIsEndOfPath ? 2 : 0));
dest.writeFloat(this.mX);
dest.writeFloat(this.mY);
}
}
public static class GestureStep implements Parcelable {
public static final Parcelable.Creator<GestureStep> CREATOR = new Parcelable.Creator<GestureStep>() {
/* class android.accessibilityservice.GestureDescription.GestureStep.AnonymousClass1 */
@Override // android.os.Parcelable.Creator
public GestureStep createFromParcel(Parcel in) {
return new GestureStep(in);
}
@Override // android.os.Parcelable.Creator
public GestureStep[] newArray(int size) {
return new GestureStep[size];
}
};
public int numTouchPoints;
public long timeSinceGestureStart;
public TouchPoint[] touchPoints;
public GestureStep(long timeSinceGestureStart2, int numTouchPoints2, TouchPoint[] touchPointsToCopy) {
this.timeSinceGestureStart = timeSinceGestureStart2;
this.numTouchPoints = numTouchPoints2;
this.touchPoints = new TouchPoint[numTouchPoints2];
for (int i = 0; i < numTouchPoints2; i++) {
this.touchPoints[i] = new TouchPoint(touchPointsToCopy[i]);
}
}
public GestureStep(Parcel parcel) {
this.timeSinceGestureStart = parcel.readLong();
Parcelable[] parcelables = parcel.readParcelableArray(TouchPoint.class.getClassLoader());
this.numTouchPoints = parcelables == null ? 0 : parcelables.length;
this.touchPoints = new TouchPoint[this.numTouchPoints];
for (int i = 0; i < this.numTouchPoints; i++) {
this.touchPoints[i] = (TouchPoint) parcelables[i];
}
}
@Override // android.os.Parcelable
public int describeContents() {
return 0;
}
@Override // android.os.Parcelable
public void writeToParcel(Parcel dest, int flags) {
dest.writeLong(this.timeSinceGestureStart);
dest.writeParcelableArray(this.touchPoints, flags);
}
}
public static class MotionEventGenerator {
private static TouchPoint[] sCurrentTouchPoints;
public static List<GestureStep> getGestureStepsFromGestureDescription(GestureDescription description, int sampleTimeMs) {
long j;
List<GestureStep> gestureSteps = new ArrayList<>();
TouchPoint[] currentTouchPoints = getCurrentTouchPoints(description.getStrokeCount());
int currentTouchPointSize = 0;
long timeSinceGestureStart = 0;
long nextKeyPointTime = description.getNextKeyPointAtLeast(0);
while (nextKeyPointTime >= 0) {
if (currentTouchPointSize == 0) {
j = nextKeyPointTime;
} else {
j = Math.min(nextKeyPointTime, ((long) sampleTimeMs) + timeSinceGestureStart);
}
timeSinceGestureStart = j;
currentTouchPointSize = description.getPointsForTime(timeSinceGestureStart, currentTouchPoints);
gestureSteps.add(new GestureStep(timeSinceGestureStart, currentTouchPointSize, currentTouchPoints));
nextKeyPointTime = description.getNextKeyPointAtLeast(1 + timeSinceGestureStart);
}
return gestureSteps;
}
private static TouchPoint[] getCurrentTouchPoints(int requiredCapacity) {
TouchPoint[] touchPointArr = sCurrentTouchPoints;
if (touchPointArr == null || touchPointArr.length < requiredCapacity) {
sCurrentTouchPoints = new TouchPoint[requiredCapacity];
for (int i = 0; i < requiredCapacity; i++) {
sCurrentTouchPoints[i] = new TouchPoint();
}
}
return sCurrentTouchPoints;
}
}
}
| {
"pile_set_name": "Github"
} |
#ifndef _TUTTLE_HOST_NODE_HPP_
#define _TUTTLE_HOST_NODE_HPP_
#include "INode.hpp"
#include "ComputeOptions.hpp"
#include "memory/MemoryCache.hpp"
#include <boost/assign/list_of.hpp>
#include <memory>
namespace tuttle
{
namespace host
{
class NodeInit;
using boost::assign::list_of;
INode* createNode(const std::string& pluginName);
bool compute(const std::vector<NodeInit>& nodes, const ComputeOptions& options = ComputeOptions());
bool compute(memory::IMemoryCache& memoryCache, const std::vector<NodeInit>& nodes,
const ComputeOptions& options = ComputeOptions());
bool compute(memory::IMemoryCache& memoryCache, const std::vector<NodeInit>& nodes, const ComputeOptions& options,
memory::IMemoryCache& internMemoryCache);
/**
* @brief Node initializer class.
*/
class NodeInit
{
public:
NodeInit() {}
NodeInit(const std::string& pluginName);
NodeInit(INode& node);
/**
* @brief Non-standard copy contructor that steals the data.
*/
NodeInit(const NodeInit& other) { setNode(other.release()); }
NodeInit& operator=(const NodeInit& other)
{
setNode(other.release());
return *this;
}
INode& operator->() { return *_node.get(); }
const INode& operator->() const { return *_node.get(); }
/**
* @brief Set parameter values. If it's a multi-dimensional parameter,
* you should put all dimensions values.
* @exemple setParam("redColor", 1.0, 0.0, 0.0, 1.0)
*/
NodeInit& setParam(const char* paramName, ...);
/**
* @brief Set parameter value from a string expression.
*/
NodeInit& setParamExp(const std::string& paramName, const std::string& paramValue);
const INode& get() const { return *_node; }
INode& get() { return *_node; }
void setNode(INode& node) { _node.reset(&node); }
INode& release() const { return *_node.release(); }
void setBeforeRenderCallback(Callback* cb);
private:
mutable std::auto_ptr<INode> _node;
};
}
}
#endif
| {
"pile_set_name": "Github"
} |
package bind;
import a.b.c.d.SomeClass;
import x.y.z.A1;
import x.y.z.B2;
public class BindingFail1 extends SomeClass implements A1, B2, Int1 {
public void a() {
}
public void x() {
}
} | {
"pile_set_name": "Github"
} |
#
# Linkerd lifecycle test configuration
#
# slow_cooker ->(HTTP/1.1)->
# bb-p2p ->(gRPC)->
# bb-broadcast ->(gRPC)->
# bb-terminus1
# bb-terminus2
# ...
# bb-terminus10
#
#
# slow_cooker
#
---
apiVersion: extensions/v1beta1
kind: Deployment
metadata:
labels:
app: slow-cooker
name: slow-cooker
spec:
replicas: 1
template:
metadata:
labels:
app: slow-cooker
spec:
containers:
- image: buoyantio/slow_cooker:1.2.0
name: slow-cooker
command:
- "/bin/sh"
args:
- "-c"
- |
sleep 30 # wait for pods to start
slow_cooker \
-qps 10 \
-concurrency 10 \
-interval 30s \
-metric-addr 0.0.0.0:9990 \
http://bb-p2p:7070
ports:
- name: slow-cooker
containerPort: 9990
---
#
# bb point-to-point
#
kind: Service
apiVersion: v1
metadata:
name: bb-p2p
spec:
selector:
app: bb-p2p
ports:
- name: bb-p2p-http1
port: 7070
targetPort: 7070
---
apiVersion: extensions/v1beta1
kind: Deployment
metadata:
labels:
app: bb-p2p
name: bb-p2p
spec:
replicas: 1
template:
metadata:
labels:
app: bb-p2p
spec:
containers:
- image: buoyantio/bb:v0.0.5
name: bb-p2p
command:
- "/bin/bash"
args:
- "-c"
- |
exec \
/out/bb point-to-point-channel \
--grpc-downstream-server=bb-broadcast:8080 \
--h1-server-port=7070
ports:
- containerPort: 7070
name: bb-p2p-http1
---
#
# bb broadcast
#
kind: Service
apiVersion: v1
metadata:
name: bb-broadcast
spec:
selector:
app: bb-broadcast
ports:
- name: bb-bcast-grpc
port: 8080
targetPort: 8080
---
apiVersion: extensions/v1beta1
kind: Deployment
metadata:
labels:
app: bb-broadcast
name: bb-broadcast
spec:
replicas: 1
template:
metadata:
labels:
app: bb-broadcast
spec:
containers:
- image: buoyantio/bb:v0.0.5
name: bb-broadcast
command:
- "/bin/bash"
args:
- "-c"
- |
exec \
/out/bb broadcast-channel \
--grpc-downstream-server=bb-terminus1:9090 \
--grpc-downstream-server=bb-terminus2:9090 \
--grpc-downstream-server=bb-terminus3:9090 \
--grpc-downstream-server=bb-terminus4:9090 \
--grpc-downstream-server=bb-terminus5:9090 \
--grpc-downstream-server=bb-terminus6:9090 \
--grpc-downstream-server=bb-terminus7:9090 \
--grpc-downstream-server=bb-terminus8:9090 \
--grpc-downstream-server=bb-terminus9:9090 \
--grpc-downstream-server=bb-terminus10:9090 \
--grpc-server-port=8080
ports:
- containerPort: 8080
name: bb-bcast-grpc
---
#
# bb terminus
#
kind: Service
apiVersion: v1
metadata:
name: bb-terminus1
spec:
selector:
app: bb-terminus
ports:
- name: bb-term-grpc
port: 9090
targetPort: 9090
---
kind: Service
apiVersion: v1
metadata:
name: bb-terminus2
spec:
selector:
app: bb-terminus
ports:
- name: bb-term-grpc
port: 9090
targetPort: 9090
---
kind: Service
apiVersion: v1
metadata:
name: bb-terminus3
spec:
selector:
app: bb-terminus
ports:
- name: bb-term-grpc
port: 9090
targetPort: 9090
---
kind: Service
apiVersion: v1
metadata:
name: bb-terminus4
spec:
selector:
app: bb-terminus
ports:
- name: bb-term-grpc
port: 9090
targetPort: 9090
---
kind: Service
apiVersion: v1
metadata:
name: bb-terminus5
spec:
selector:
app: bb-terminus
ports:
- name: bb-term-grpc
port: 9090
targetPort: 9090
---
kind: Service
apiVersion: v1
metadata:
name: bb-terminus6
spec:
selector:
app: bb-terminus
ports:
- name: bb-term-grpc
port: 9090
targetPort: 9090
---
kind: Service
apiVersion: v1
metadata:
name: bb-terminus7
spec:
selector:
app: bb-terminus
ports:
- name: bb-term-grpc
port: 9090
targetPort: 9090
---
kind: Service
apiVersion: v1
metadata:
name: bb-terminus8
spec:
selector:
app: bb-terminus
ports:
- name: bb-term-grpc
port: 9090
targetPort: 9090
---
kind: Service
apiVersion: v1
metadata:
name: bb-terminus9
spec:
selector:
app: bb-terminus
ports:
- name: bb-term-grpc
port: 9090
targetPort: 9090
---
kind: Service
apiVersion: v1
metadata:
name: bb-terminus10
spec:
selector:
app: bb-terminus
ports:
- name: bb-term-grpc
port: 9090
targetPort: 9090
---
apiVersion: extensions/v1beta1
kind: Deployment
metadata:
labels:
app: bb-terminus
name: bb-terminus
spec:
replicas: 1
template:
metadata:
labels:
app: bb-terminus
spec:
containers:
- image: buoyantio/bb:v0.0.5
name: bb-terminus
command:
- "/bin/bash"
args:
- "-c"
- |
exec \
/out/bb terminus \
--grpc-server-port=9090 \
--response-text=BANANA0
ports:
- containerPort: 9090
name: bb-term-grpc
---
#
# Redeploy via kubectl
#
kind: ServiceAccount
apiVersion: v1
metadata:
name: redeployer
---
kind: Role
apiVersion: rbac.authorization.k8s.io/v1beta1
metadata:
name: lifecycle:redeployer
rules:
- apiGroups: ["extensions"]
resources: ["deployments"]
verbs: ["get", "patch"]
- apiGroups: [""]
resources: ["pods", "services"]
verbs: ["create", "delete", "get", "list"]
---
kind: RoleBinding
apiVersion: rbac.authorization.k8s.io/v1beta1
metadata:
name: lifecycle:redeployer
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: Role
name: lifecycle:redeployer
subjects:
- kind: ServiceAccount
name: redeployer
---
apiVersion: v1
kind: ConfigMap
metadata:
name: redeployer
data:
redeployer: |-
#!/bin/sh
# give deployment time to fully roll out
sleep 60
LAST=$(date +%s)
while true; do
PODS=$(kubectl -n $LIFECYCLE_NS get po --field-selector=status.phase=Running --selector=app=bb-terminus -o jsonpath='{.items[*].metadata.name}')
SPACES=$(echo "${PODS}" | awk -F" " '{print NF-1}')
POD_COUNT=$(($SPACES+1))
echo "found ${POD_COUNT} running pods"
# restart each pod every minute
SLEEP_TIME=$(( 60 / $POD_COUNT ))
if [ $SLEEP_TIME = 0 ]; then
SLEEP_TIME=1
fi
for POD in ${PODS}; do
kubectl -n $LIFECYCLE_NS delete po $POD
# bounce services every 30 seconds
NOW=$(date +%s)
if [ $(( NOW - LAST )) -gt 30 ]; then
echo "bouncing services..."
for i in `seq 1 10`; do
SVC=$(kubectl -n $LIFECYCLE_NS get svc/bb-terminus$i -o json)
kubectl -n $LIFECYCLE_NS delete svc/bb-terminus$i
echo $SVC | kubectl -n $LIFECYCLE_NS apply -f -
done
LAST=NOW
fi
echo "sleeping for ${SLEEP_TIME} seconds..."
sleep $SLEEP_TIME
done
# redeploy the whole bb-terminus deployment
echo "redeploying bb-terminus deployment"
DEPLOY=$(kubectl -n $LIFECYCLE_NS get deploy/bb-terminus -o json)
echo $DEPLOY | sed -E 's/BANANA[0-9]+/BANANA'$(date +'%s')'/g' | kubectl -n $LIFECYCLE_NS apply -f -
# give the new deployment time to roll out, before we start deleting pods again
echo "sleeping for 60 seconds..."
sleep 60
done
---
apiVersion: extensions/v1beta1
kind: Deployment
metadata:
labels:
app: redeployer
name: redeployer
spec:
replicas: 1
template:
metadata:
labels:
app: redeployer
spec:
serviceAccount: redeployer
containers:
- image: lachlanevenson/k8s-kubectl:v1.14.0
name: redeployer
command:
- "/data/redeployer"
env:
- name: LIFECYCLE_NS
valueFrom:
fieldRef:
fieldPath: metadata.namespace
volumeMounts:
- name: redeployer
mountPath: /data
volumes:
- name: redeployer
configMap:
name: redeployer
defaultMode: 0744
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<appSettings>
<add key="port" value="5555"/>
<!--最大连接数-->
<add key="numConnections" value="20000"/>
<!--缓存大小-->
<add key="receiveBufferSize" value="1024"/>
<!--超时时间-->
<add key="overtime" value="20"/>
</appSettings>
<startup>
<supportedRuntime version="v4.0" sku=".NETFramework,Version=v4.6.1"/>
</startup>
</configuration>
| {
"pile_set_name": "Github"
} |
import io
from dataclasses import dataclass
from typing import Any, List
from src.types.sized_bytes import bytes32
from src.util.clvm import int_to_bytes, int_from_bytes
from src.util.hash import std_hash
from src.util.ints import uint64
from src.util.streamable import streamable, Streamable
@dataclass(frozen=True)
@streamable
class Coin(Streamable):
"""
This structure is used in the body for the reward and fees genesis coins.
"""
parent_coin_info: bytes32
puzzle_hash: bytes32
amount: uint64
def name(self) -> bytes32:
return self.get_hash()
def as_list(self) -> List[Any]:
return [self.parent_coin_info, self.puzzle_hash, self.amount]
@property
def name_str(self) -> str:
return self.name().hex()
@classmethod
def from_bytes(cls, blob):
parent_coin_info = blob[:32]
puzzle_hash = blob[32:64]
amount = int_from_bytes(blob[64:])
return Coin(parent_coin_info, puzzle_hash, uint64(amount))
def __bytes__(self):
f = io.BytesIO()
f.write(self.parent_coin_info)
f.write(self.puzzle_hash)
f.write(int_to_bytes(self.amount))
return f.getvalue()
def hash_coin_list(coin_list: List[Coin]) -> bytes32:
coin_list.sort(key=lambda x: x.name_str, reverse=True)
buffer = bytearray()
for coin in coin_list:
buffer.extend(coin.name())
return std_hash(buffer)
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<shape
xmlns:android="http://schemas.android.com/apk/res/android"
android:shape="rectangle">
<!-- 填充的颜色 -->
<solid android:color="#555555" />
<!-- 设置按钮的四个角为弧形 -->
<!-- android:radius 弧形的半径 -->
<corners android:radius="10dip" />
<!-- padding:Button里面的文字与Button边界的间隔 -->
<padding
android:left="0dp"
android:top="0dp"
android:right="0dp"
android:bottom="0dp"
/>
</shape> | {
"pile_set_name": "Github"
} |
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build windows
// Package windows contains an interface to the low-level operating system
// primitives. OS details vary depending on the underlying system, and
// by default, godoc will display the OS-specific documentation for the current
// system. If you want godoc to display syscall documentation for another
// system, set $GOOS and $GOARCH to the desired system. For example, if
// you want to view documentation for freebsd/arm on linux/amd64, set $GOOS
// to freebsd and $GOARCH to arm.
//
// The primary use of this package is inside other packages that provide a more
// portable interface to the system, such as "os", "time" and "net". Use
// those packages rather than this one if you can.
//
// For details of the functions and data types in this package consult
// the manuals for the appropriate operating system.
//
// These calls return err == nil to indicate success; otherwise
// err represents an operating system error describing the failure and
// holds a value of type syscall.Errno.
package windows // import "golang.org/x/sys/windows"
import (
"syscall"
)
// ByteSliceFromString returns a NUL-terminated slice of bytes
// containing the text of s. If s contains a NUL byte at any
// location, it returns (nil, syscall.EINVAL).
func ByteSliceFromString(s string) ([]byte, error) {
for i := 0; i < len(s); i++ {
if s[i] == 0 {
return nil, syscall.EINVAL
}
}
a := make([]byte, len(s)+1)
copy(a, s)
return a, nil
}
// BytePtrFromString returns a pointer to a NUL-terminated array of
// bytes containing the text of s. If s contains a NUL byte at any
// location, it returns (nil, syscall.EINVAL).
func BytePtrFromString(s string) (*byte, error) {
a, err := ByteSliceFromString(s)
if err != nil {
return nil, err
}
return &a[0], nil
}
// Single-word zero for use when we need a valid pointer to 0 bytes.
// See mksyscall.pl.
var _zero uintptr
func (ts *Timespec) Unix() (sec int64, nsec int64) {
return int64(ts.Sec), int64(ts.Nsec)
}
func (tv *Timeval) Unix() (sec int64, nsec int64) {
return int64(tv.Sec), int64(tv.Usec) * 1000
}
func (ts *Timespec) Nano() int64 {
return int64(ts.Sec)*1e9 + int64(ts.Nsec)
}
func (tv *Timeval) Nano() int64 {
return int64(tv.Sec)*1e9 + int64(tv.Usec)*1000
}
| {
"pile_set_name": "Github"
} |
"""Test program for the fcntl C module.
OS/2+EMX doesn't support the file locking operations.
"""
import os
import struct
import sys
import _testcapi
import unittest
from test.test_support import (verbose, TESTFN, unlink, run_unittest,
import_module)
# Skip test if no fnctl module.
fcntl = import_module('fcntl')
# TODO - Write tests for flock() and lockf().
def get_lockdata():
if sys.platform.startswith('atheos'):
start_len = "qq"
else:
try:
os.O_LARGEFILE
except AttributeError:
start_len = "ll"
else:
start_len = "qq"
if (sys.platform.startswith(('netbsd', 'freebsd', 'openbsd', 'bsdos'))
or sys.platform == 'darwin'):
if struct.calcsize('l') == 8:
off_t = 'l'
pid_t = 'i'
else:
off_t = 'lxxxx'
pid_t = 'l'
lockdata = struct.pack(off_t + off_t + pid_t + 'hh', 0, 0, 0,
fcntl.F_WRLCK, 0)
elif sys.platform in ['aix3', 'aix4', 'hp-uxB', 'unixware7']:
lockdata = struct.pack('hhlllii', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
elif sys.platform in ['os2emx']:
lockdata = None
else:
lockdata = struct.pack('hh'+start_len+'hh', fcntl.F_WRLCK, 0, 0, 0, 0, 0)
if lockdata:
if verbose:
print 'struct.pack: ', repr(lockdata)
return lockdata
lockdata = get_lockdata()
class TestFcntl(unittest.TestCase):
def setUp(self):
self.f = None
def tearDown(self):
if self.f and not self.f.closed:
self.f.close()
unlink(TESTFN)
def test_fcntl_fileno(self):
# the example from the library docs
self.f = open(TESTFN, 'w')
rv = fcntl.fcntl(self.f.fileno(), fcntl.F_SETFL, os.O_NONBLOCK)
if verbose:
print 'Status from fcntl with O_NONBLOCK: ', rv
if sys.platform not in ['os2emx']:
rv = fcntl.fcntl(self.f.fileno(), fcntl.F_SETLKW, lockdata)
if verbose:
print 'String from fcntl with F_SETLKW: ', repr(rv)
self.f.close()
def test_fcntl_file_descriptor(self):
# again, but pass the file rather than numeric descriptor
self.f = open(TESTFN, 'w')
rv = fcntl.fcntl(self.f, fcntl.F_SETFL, os.O_NONBLOCK)
if sys.platform not in ['os2emx']:
rv = fcntl.fcntl(self.f, fcntl.F_SETLKW, lockdata)
self.f.close()
def test_fcntl_bad_file(self):
class F:
def __init__(self, fn):
self.fn = fn
def fileno(self):
return self.fn
self.assertRaises(ValueError, fcntl.fcntl, -1, fcntl.F_SETFL, os.O_NONBLOCK)
self.assertRaises(ValueError, fcntl.fcntl, F(-1), fcntl.F_SETFL, os.O_NONBLOCK)
self.assertRaises(TypeError, fcntl.fcntl, 'spam', fcntl.F_SETFL, os.O_NONBLOCK)
self.assertRaises(TypeError, fcntl.fcntl, F('spam'), fcntl.F_SETFL, os.O_NONBLOCK)
# Issue 15989
self.assertRaises(ValueError, fcntl.fcntl, _testcapi.INT_MAX + 1,
fcntl.F_SETFL, os.O_NONBLOCK)
self.assertRaises(ValueError, fcntl.fcntl, F(_testcapi.INT_MAX + 1),
fcntl.F_SETFL, os.O_NONBLOCK)
self.assertRaises(ValueError, fcntl.fcntl, _testcapi.INT_MIN - 1,
fcntl.F_SETFL, os.O_NONBLOCK)
self.assertRaises(ValueError, fcntl.fcntl, F(_testcapi.INT_MIN - 1),
fcntl.F_SETFL, os.O_NONBLOCK)
def test_fcntl_64_bit(self):
# Issue #1309352: fcntl shouldn't fail when the third arg fits in a
# C 'long' but not in a C 'int'.
try:
cmd = fcntl.F_NOTIFY
# This flag is larger than 2**31 in 64-bit builds
flags = fcntl.DN_MULTISHOT
except AttributeError:
self.skipTest("F_NOTIFY or DN_MULTISHOT unavailable")
fd = os.open(os.path.dirname(os.path.abspath(TESTFN)), os.O_RDONLY)
try:
fcntl.fcntl(fd, cmd, flags)
finally:
os.close(fd)
def test_main():
run_unittest(TestFcntl)
if __name__ == '__main__':
test_main()
| {
"pile_set_name": "Github"
} |
var convert = require('./convert'),
func = convert('floor', require('../floor'));
func.placeholder = require('./placeholder');
module.exports = func;
| {
"pile_set_name": "Github"
} |
<?php
final class PhabricatorRepositoryStatusMessage
extends PhabricatorRepositoryDAO {
const TYPE_INIT = 'init';
const TYPE_FETCH = 'fetch';
const TYPE_NEEDS_UPDATE = 'needs-update';
const CODE_ERROR = 'error';
const CODE_OKAY = 'okay';
protected $repositoryID;
protected $statusType;
protected $statusCode;
protected $parameters = array();
protected $epoch;
protected $messageCount;
protected function getConfiguration() {
return array(
self::CONFIG_TIMESTAMPS => false,
self::CONFIG_SERIALIZATION => array(
'parameters' => self::SERIALIZATION_JSON,
),
self::CONFIG_COLUMN_SCHEMA => array(
'statusType' => 'text32',
'statusCode' => 'text32',
'messageCount' => 'uint32',
),
self::CONFIG_KEY_SCHEMA => array(
'repositoryID' => array(
'columns' => array('repositoryID', 'statusType'),
'unique' => true,
),
),
) + parent::getConfiguration();
}
public function getParameter($key, $default = null) {
return idx($this->parameters, $key, $default);
}
public function getStatusTypeName() {
$names = array(
self::TYPE_INIT => pht('Error While Initializing Repository'),
self::TYPE_FETCH => pht('Error While Fetching Changes'),
self::TYPE_NEEDS_UPDATE => pht('Repository Needs Update'),
);
$type = $this->getStatusType();
return idx($names, $type, $type);
}
}
| {
"pile_set_name": "Github"
} |
use crate::extn::prelude::*;
pub fn init(interp: &mut Artichoke) -> InitializeResult<()> {
let spec = crate::module::Spec::new(interp, "Benchmark", None)?;
interp.0.borrow_mut().def_module::<Benchmark>(spec);
interp.def_rb_source_file(
b"benchmark.rb",
&include_bytes!(concat!(env!("OUT_DIR"), "/src/generated/benchmark.rb"))[..]
)?;
Ok(())
}
#[derive(Debug)]
pub struct Benchmark;
| {
"pile_set_name": "Github"
} |
void FutureApi::OnFrontConnected()
{
gil_scoped_acquire acquire;
this->onFrontConnected();
};
void FutureApi::OnFrontDisconnected(int iReason)
{
gil_scoped_acquire acquire;
this->onFrontDisconnected(iReason);
};
void FutureApi::OnHeartBeatWarning(int iTimeLapse)
{
gil_scoped_acquire acquire;
this->onHeartBeatWarning(iTimeLapse);
};
void FutureApi::OnRspNeedVerify(bool bFirstLogin, bool bHasSetQA)
{
gil_scoped_acquire acquire;
this->onRspNeedVerify(bFirstLogin, bHasSetQA);
};
void FutureApi::OnRspUserLogin(CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspUserLogin(error, iRequestID, bIsLast);
};
void FutureApi::OnRspUserLogout(CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspUserLogout(error, iRequestID, bIsLast);
};
void FutureApi::OnRspVerifyCode(CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspVerifyCode(error, iRequestID, bIsLast);
};
void FutureApi::OnRspSafeVerify(CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspSafeVerify(error, iRequestID, bIsLast);
};
void FutureApi::OnRspSetVerifyQA(CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspSetVerifyQA(error, iRequestID, bIsLast);
};
void FutureApi::OnRspAccount(CFutureRspAccountField *pRspAccount, CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict data;
if (pRspAccount)
{
data["UserId"] = toUtf(pRspAccount->UserId);
data["UserName"] = toUtf(pRspAccount->UserName);
data["UserType"] = toUtf(pRspAccount->UserType);
data["LoginPwd"] = toUtf(pRspAccount->LoginPwd);
data["AccountNo"] = toUtf(pRspAccount->AccountNo);
data["TradePwd"] = toUtf(pRspAccount->TradePwd);
data["IsSimulation"] = toUtf(pRspAccount->IsSimulation);
data["FrontendIp"] = toUtf(pRspAccount->FrontendIp);
data["FrontendPort"] = toUtf(pRspAccount->FrontendPort);
data["CurrencyNo"] = toUtf(pRspAccount->CurrencyNo);
data["UserState"] = toUtf(pRspAccount->UserState);
data["SelAll"] = toUtf(pRspAccount->SelAll);
data["Strategy"] = toUtf(pRspAccount->Strategy);
data["Inner"] = toUtf(pRspAccount->Inner);
data["YingSun"] = toUtf(pRspAccount->YingSun);
data["ChaoDan"] = toUtf(pRspAccount->ChaoDan);
data["Option"] = toUtf(pRspAccount->Option);
data["CmeMarket"] = toUtf(pRspAccount->CmeMarket);
data["CmeCOMEXMarket"] = toUtf(pRspAccount->CmeCOMEXMarket);
data["CmeNYMEXMarket"] = toUtf(pRspAccount->CmeNYMEXMarket);
data["CmeCBTMarket"] = toUtf(pRspAccount->CmeCBTMarket);
data["IceUSMarket"] = toUtf(pRspAccount->IceUSMarket);
data["IceECMarket"] = toUtf(pRspAccount->IceECMarket);
data["IceEFMarket"] = toUtf(pRspAccount->IceEFMarket);
data["CanTradeStockHK"] = toUtf(pRspAccount->CanTradeStockHK);
data["CanTradeStockAM"] = toUtf(pRspAccount->CanTradeStockAM);
data["MultiLogin"] = toUtf(pRspAccount->MultiLogin);
data["SellStockHK"] = toUtf(pRspAccount->SellStockHK);
data["SellStockAM"] = toUtf(pRspAccount->SellStockAM);
data["CanTradeStockKRX"] = toUtf(pRspAccount->CanTradeStockKRX);
data["HkexMarket"] = toUtf(pRspAccount->HkexMarket);
data["IdNumber"] = toUtf(pRspAccount->IdNumber);
data["HkexMarketFee"] = toUtf(pRspAccount->HkexMarketFee);
data["IsProfessional"] = toUtf(pRspAccount->IsProfessional);
data["IsOverSea"] = toUtf(pRspAccount->IsOverSea);
data["IsFirstLogin"] = toUtf(pRspAccount->IsFirstLogin);
data["UserMobile"] = toUtf(pRspAccount->UserMobile);
data["HasSetQA"] = toUtf(pRspAccount->HasSetQA);
data["CanTradeStockSGXQ"] = toUtf(pRspAccount->CanTradeStockSGXQ);
data["ExistMac"] = toUtf(pRspAccount->ExistMac);
data["RatioINE"] = toUtf(pRspAccount->RatioINE);
data["EurexMarket"] = toUtf(pRspAccount->EurexMarket);
data["HkexIsOverMaxTerminal"] = toUtf(pRspAccount->HkexIsOverMaxTerminal);
data["HkexOverMoney"] = toUtf(pRspAccount->HkexOverMoney);
data["CanTradeStockAU"] = toUtf(pRspAccount->CanTradeStockAU);
data["NyFlag"] = toUtf(pRspAccount->NyFlag);
}
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspAccount(data, error, iRequestID, bIsLast);
};
void FutureApi::OnRspQuestion(CFutureRspQuestionField *pRspVersion, CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict data;
if (pRspVersion)
{
data["QuestionType"] = toUtf(pRspVersion->QuestionType);
data["QuestionId"] = toUtf(pRspVersion->QuestionId);
data["QuestionCN"] = toUtf(pRspVersion->QuestionCN);
data["QuestionEN"] = toUtf(pRspVersion->QuestionEN);
}
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspQuestion(data, error, iRequestID, bIsLast);
};
void FutureApi::OnRspOrderInsert(CFutureRspOrderInsertField *pRspOrderInsert, CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict data;
if (pRspOrderInsert)
{
data["UserId"] = toUtf(pRspOrderInsert->UserId);
data["AccountNo"] = toUtf(pRspOrderInsert->AccountNo);
data["SystemNo"] = toUtf(pRspOrderInsert->SystemNo);
data["LocalNo"] = toUtf(pRspOrderInsert->LocalNo);
data["OrderNo"] = toUtf(pRspOrderInsert->OrderNo);
data["OrigOrderNo"] = toUtf(pRspOrderInsert->OrigOrderNo);
data["OrderMethod"] = toUtf(pRspOrderInsert->OrderMethod);
data["AcceptType"] = toUtf(pRspOrderInsert->AcceptType);
data["ExchangeCode"] = toUtf(pRspOrderInsert->ExchangeCode);
data["TreatyCode"] = toUtf(pRspOrderInsert->TreatyCode);
data["BuySale"] = toUtf(pRspOrderInsert->BuySale);
data["OrderNumber"] = toUtf(pRspOrderInsert->OrderNumber);
data["OrderPrice"] = toUtf(pRspOrderInsert->OrderPrice);
data["FilledNumber"] = toUtf(pRspOrderInsert->FilledNumber);
data["FilledPrice"] = toUtf(pRspOrderInsert->FilledPrice);
data["TradeType"] = toUtf(pRspOrderInsert->TradeType);
data["PriceType"] = toUtf(pRspOrderInsert->PriceType);
data["HtsType"] = toUtf(pRspOrderInsert->HtsType);
data["OrderDate"] = toUtf(pRspOrderInsert->OrderDate);
data["OrderTime"] = toUtf(pRspOrderInsert->OrderTime);
data["ErrorCode"] = toUtf(pRspOrderInsert->ErrorCode);
data["OrderState"] = toUtf(pRspOrderInsert->OrderState);
data["IsRiskOrder"] = toUtf(pRspOrderInsert->IsRiskOrder);
data["CancelUserId"] = toUtf(pRspOrderInsert->CancelUserId);
data["TriggerPrice"] = toUtf(pRspOrderInsert->TriggerPrice);
data["ValidDate"] = toUtf(pRspOrderInsert->ValidDate);
data["AddReduce"] = toUtf(pRspOrderInsert->AddReduce);
data["StrategyId"] = toUtf(pRspOrderInsert->StrategyId);
data["MaxShow"] = toUtf(pRspOrderInsert->MaxShow);
data["MinQty"] = toUtf(pRspOrderInsert->MinQty);
data["ExchangeTime"] = toUtf(pRspOrderInsert->ExchangeTime);
data["CancelTime"] = toUtf(pRspOrderInsert->CancelTime);
}
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspOrderInsert(data, error, iRequestID, bIsLast);
};
void FutureApi::OnRspOrderModify(CFutureRspOrderModifyField *pRspOrderModify, CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict data;
if (pRspOrderModify)
{
data["UserId"] = toUtf(pRspOrderModify->UserId);
data["AccountNo"] = toUtf(pRspOrderModify->AccountNo);
data["SystemNo"] = toUtf(pRspOrderModify->SystemNo);
data["LocalNo"] = toUtf(pRspOrderModify->LocalNo);
data["OrderNo"] = toUtf(pRspOrderModify->OrderNo);
data["OrigOrderNo"] = toUtf(pRspOrderModify->OrigOrderNo);
data["OrderMethod"] = toUtf(pRspOrderModify->OrderMethod);
data["AcceptType"] = toUtf(pRspOrderModify->AcceptType);
data["ExchangeCode"] = toUtf(pRspOrderModify->ExchangeCode);
data["TreatyCode"] = toUtf(pRspOrderModify->TreatyCode);
data["BuySale"] = toUtf(pRspOrderModify->BuySale);
data["OrderNumber"] = toUtf(pRspOrderModify->OrderNumber);
data["OrderPrice"] = toUtf(pRspOrderModify->OrderPrice);
data["FilledNumber"] = toUtf(pRspOrderModify->FilledNumber);
data["FilledPrice"] = toUtf(pRspOrderModify->FilledPrice);
data["TradeType"] = toUtf(pRspOrderModify->TradeType);
data["PriceType"] = toUtf(pRspOrderModify->PriceType);
data["HtsType"] = toUtf(pRspOrderModify->HtsType);
data["OrderDate"] = toUtf(pRspOrderModify->OrderDate);
data["OrderTime"] = toUtf(pRspOrderModify->OrderTime);
data["ErrorCode"] = toUtf(pRspOrderModify->ErrorCode);
data["OrderState"] = toUtf(pRspOrderModify->OrderState);
data["IsRiskOrder"] = toUtf(pRspOrderModify->IsRiskOrder);
data["CancelUserId"] = toUtf(pRspOrderModify->CancelUserId);
data["TriggerPrice"] = toUtf(pRspOrderModify->TriggerPrice);
data["ValidDate"] = toUtf(pRspOrderModify->ValidDate);
data["AddReduce"] = toUtf(pRspOrderModify->AddReduce);
data["StrategyId"] = toUtf(pRspOrderModify->StrategyId);
data["MaxShow"] = toUtf(pRspOrderModify->MaxShow);
data["MinQty"] = toUtf(pRspOrderModify->MinQty);
data["ExchangeTime"] = toUtf(pRspOrderModify->ExchangeTime);
data["CancelTime"] = toUtf(pRspOrderModify->CancelTime);
}
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspOrderModify(data, error, iRequestID, bIsLast);
};
void FutureApi::OnRspOrderCancel(CFutureRspOrderCancelField *pRspOrderCancel, CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict data;
if (pRspOrderCancel)
{
data["UserId"] = toUtf(pRspOrderCancel->UserId);
data["AccountNo"] = toUtf(pRspOrderCancel->AccountNo);
data["SystemNo"] = toUtf(pRspOrderCancel->SystemNo);
data["LocalNo"] = toUtf(pRspOrderCancel->LocalNo);
data["OrderNo"] = toUtf(pRspOrderCancel->OrderNo);
data["CancelNo"] = toUtf(pRspOrderCancel->CancelNo);
data["ExchangeCode"] = toUtf(pRspOrderCancel->ExchangeCode);
data["TreatyCode"] = toUtf(pRspOrderCancel->TreatyCode);
data["BuySale"] = toUtf(pRspOrderCancel->BuySale);
data["OrderNumber"] = toUtf(pRspOrderCancel->OrderNumber);
data["OrderPrice"] = toUtf(pRspOrderCancel->OrderPrice);
data["FilledNumber"] = toUtf(pRspOrderCancel->FilledNumber);
data["CancelNumber"] = toUtf(pRspOrderCancel->CancelNumber);
data["TradeType"] = toUtf(pRspOrderCancel->TradeType);
data["PriceType"] = toUtf(pRspOrderCancel->PriceType);
data["HtsType"] = toUtf(pRspOrderCancel->HtsType);
data["CancelDate"] = toUtf(pRspOrderCancel->CancelDate);
data["CancelTime"] = toUtf(pRspOrderCancel->CancelTime);
data["ErrorCode"] = toUtf(pRspOrderCancel->ErrorCode);
data["IsRiskOrder"] = toUtf(pRspOrderCancel->IsRiskOrder);
}
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspOrderCancel(data, error, iRequestID, bIsLast);
};
void FutureApi::OnRspPasswordUpdate(CFutureRspPasswordUpdateField *pRspPasswordUpdate, CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict data;
if (pRspPasswordUpdate)
{
data["UserId"] = toUtf(pRspPasswordUpdate->UserId);
data["OldPassword"] = toUtf(pRspPasswordUpdate->OldPassword);
data["NewPassword"] = toUtf(pRspPasswordUpdate->NewPassword);
}
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspPasswordUpdate(data, error, iRequestID, bIsLast);
};
void FutureApi::OnRtnTrade(CFutureRtnTradeField *pRtnTrade, CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict data;
if (pRtnTrade)
{
data["UserId"] = toUtf(pRtnTrade->UserId);
data["AccountNo"] = toUtf(pRtnTrade->AccountNo);
data["FilledNo"] = toUtf(pRtnTrade->FilledNo);
data["OrderNo"] = toUtf(pRtnTrade->OrderNo);
data["SystemNo"] = toUtf(pRtnTrade->SystemNo);
data["LocalNo"] = toUtf(pRtnTrade->LocalNo);
data["ExchangeCode"] = toUtf(pRtnTrade->ExchangeCode);
data["TreatyCode"] = toUtf(pRtnTrade->TreatyCode);
data["BuySale"] = toUtf(pRtnTrade->BuySale);
data["FilledNumber"] = toUtf(pRtnTrade->FilledNumber);
data["FilledPrice"] = toUtf(pRtnTrade->FilledPrice);
data["FilledDate"] = toUtf(pRtnTrade->FilledDate);
data["FilledTime"] = toUtf(pRtnTrade->FilledTime);
data["Commsion"] = toUtf(pRtnTrade->Commsion);
data["OrderNumber"] = toUtf(pRtnTrade->OrderNumber);
data["OrderPrice"] = toUtf(pRtnTrade->OrderPrice);
data["DeliveryDate"] = toUtf(pRtnTrade->DeliveryDate);
data["FilledType"] = toUtf(pRtnTrade->FilledType);
data["OrderType"] = toUtf(pRtnTrade->OrderType);
data["ValidDate"] = toUtf(pRtnTrade->ValidDate);
data["AddReduce"] = toUtf(pRtnTrade->AddReduce);
data["ErrorDescription"] = toUtf(pRtnTrade->ErrorDescription);
}
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRtnTrade(data, error, iRequestID, bIsLast);
};
void FutureApi::OnRtnOrder(CFutureRtnOrderField *pRtnOrder, CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict data;
if (pRtnOrder)
{
data["LocalOrderNo"] = toUtf(pRtnOrder->LocalOrderNo);
data["ExchangeNo"] = toUtf(pRtnOrder->ExchangeNo);
data["TreatyCode"] = toUtf(pRtnOrder->TreatyCode);
data["OrderNo"] = toUtf(pRtnOrder->OrderNo);
data["OrderNumber"] = pRtnOrder->OrderNumber;
data["FilledNumber"] = pRtnOrder->FilledNumber;
data["FilledAdvPrice"] = pRtnOrder->FilledAdvPrice;
data["BuyHoldNumber"] = pRtnOrder->BuyHoldNumber;
data["BuyHoldOpenPrice"] = pRtnOrder->BuyHoldOpenPrice;
data["BuyHoldPrice"] = pRtnOrder->BuyHoldPrice;
data["SaleHoldNumber"] = pRtnOrder->SaleHoldNumber;
data["SaleHoldOpenPrice"] = pRtnOrder->SaleHoldOpenPrice;
data["SaleHoldPrice"] = pRtnOrder->SaleHoldPrice;
data["IsCanceled"] = toUtf(pRtnOrder->IsCanceled);
data["FilledTotalFee"] = pRtnOrder->FilledTotalFee;
data["Status"] = pRtnOrder->Status;
data["AccountNo"] = toUtf(pRtnOrder->AccountNo);
data["HoldType"] = toUtf(pRtnOrder->HoldType);
data["HoldMarginBuy"] = pRtnOrder->HoldMarginBuy;
data["HoldMarginSale"] = pRtnOrder->HoldMarginSale;
data["CurrPrice"] = pRtnOrder->CurrPrice;
data["FloatProfit"] = pRtnOrder->FloatProfit;
}
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRtnOrder(data, error, iRequestID, bIsLast);
};
void FutureApi::OnRtnCapital(CFutureRtnCapitalField *pRtnCapital, CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict data;
if (pRtnCapital)
{
data["ClientNo"] = toUtf(pRtnCapital->ClientNo);
data["AccountNo"] = toUtf(pRtnCapital->AccountNo);
data["CurrencyNo"] = toUtf(pRtnCapital->CurrencyNo);
data["Available"] = pRtnCapital->Available;
data["YAvailable"] = pRtnCapital->YAvailable;
data["CanCashOut"] = pRtnCapital->CanCashOut;
data["Money"] = pRtnCapital->Money;
data["ExpiredProfit"] = pRtnCapital->ExpiredProfit;
data["FrozenDeposit"] = pRtnCapital->FrozenDeposit;
data["Fee"] = pRtnCapital->Fee;
data["Deposit"] = pRtnCapital->Deposit;
data["KeepDeposit"] = pRtnCapital->KeepDeposit;
data["Status"] = pRtnCapital->Status;
data["InMoney"] = pRtnCapital->InMoney;
data["OutMoney"] = pRtnCapital->OutMoney;
data["UnexpiredProfit"] = pRtnCapital->UnexpiredProfit;
data["TodayTotal"] = pRtnCapital->TodayTotal;
data["UnaccountProfit"] = pRtnCapital->UnaccountProfit;
data["Royalty"] = pRtnCapital->Royalty;
data["ExchangeNo"] = toUtf(pRtnCapital->ExchangeNo);
data["TreatyCode"] = toUtf(pRtnCapital->TreatyCode);
data["OrderNo"] = toUtf(pRtnCapital->OrderNo);
data["OrderNumber"] = pRtnCapital->OrderNumber;
data["FilledNumber"] = pRtnCapital->FilledNumber;
data["FilledAdvPrice"] = pRtnCapital->FilledAdvPrice;
data["BuyHoldNumber"] = pRtnCapital->BuyHoldNumber;
data["BuyHoldOpenPrice"] = pRtnCapital->BuyHoldOpenPrice;
data["BuyHoldPrice"] = pRtnCapital->BuyHoldPrice;
data["SaleHoldNumber"] = pRtnCapital->SaleHoldNumber;
data["SaleHoldOpenPrice"] = pRtnCapital->SaleHoldOpenPrice;
data["SaleHoldPrice"] = pRtnCapital->SaleHoldPrice;
data["IsCanceled"] = toUtf(pRtnCapital->IsCanceled);
data["FilledTotalFee"] = pRtnCapital->FilledTotalFee;
data["Credit"] = pRtnCapital->Credit;
data["MarginLimit"] = pRtnCapital->MarginLimit;
data["BorrowValue"] = pRtnCapital->BorrowValue;
data["MortgageMoney"] = pRtnCapital->MortgageMoney;
data["T1"] = pRtnCapital->T1;
data["T2"] = pRtnCapital->T2;
data["T3"] = pRtnCapital->T3;
data["TN"] = pRtnCapital->TN;
data["TradeLimit"] = pRtnCapital->TradeLimit;
data["FCrossMax"] = pRtnCapital->FCrossMax;
data["SellFreezenMoney"] = pRtnCapital->SellFreezenMoney;
data["SellInterest"] = pRtnCapital->SellInterest;
data["SellNeedAddMargin"] = pRtnCapital->SellNeedAddMargin;
}
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRtnCapital(data, error, iRequestID, bIsLast);
};
void FutureApi::OnRtnPosition(CFutureRtnPositionField *pRtnPosition, CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict data;
if (pRtnPosition)
{
data["LocalOrderNo"] = toUtf(pRtnPosition->LocalOrderNo);
data["ExchangeNo"] = toUtf(pRtnPosition->ExchangeNo);
data["TreatyCode"] = toUtf(pRtnPosition->TreatyCode);
data["OrderNo"] = toUtf(pRtnPosition->OrderNo);
data["OrderNumber"] = pRtnPosition->OrderNumber;
data["FilledNumber"] = pRtnPosition->FilledNumber;
data["FilledAdvPrice"] = pRtnPosition->FilledAdvPrice;
data["BuyHoldNumber"] = pRtnPosition->BuyHoldNumber;
data["BuyHoldOpenPrice"] = pRtnPosition->BuyHoldOpenPrice;
data["BuyHoldPrice"] = pRtnPosition->BuyHoldPrice;
data["SaleHoldNumber"] = pRtnPosition->SaleHoldNumber;
data["SaleHoldOpenPrice"] = pRtnPosition->SaleHoldOpenPrice;
data["SaleHoldPrice"] = pRtnPosition->SaleHoldPrice;
data["IsCanceled"] = toUtf(pRtnPosition->IsCanceled);
data["FilledTotalFee"] = pRtnPosition->FilledTotalFee;
data["Status"] = pRtnPosition->Status;
data["AccountNo"] = toUtf(pRtnPosition->AccountNo);
data["HoldType"] = toUtf(pRtnPosition->HoldType);
data["HoldMarginBuy"] = pRtnPosition->HoldMarginBuy;
data["HoldMarginSale"] = pRtnPosition->HoldMarginSale;
data["CurrPrice"] = pRtnPosition->CurrPrice;
data["FloatProfit"] = pRtnPosition->FloatProfit;
}
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRtnPosition(data, error, iRequestID, bIsLast);
};
void FutureApi::OnRspQryOrder(CFutureRspOrderField *pRspOrder, CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict data;
if (pRspOrder)
{
data["UserId"] = toUtf(pRspOrder->UserId);
data["AccountNo"] = toUtf(pRspOrder->AccountNo);
data["SystemNo"] = toUtf(pRspOrder->SystemNo);
data["LocalNo"] = toUtf(pRspOrder->LocalNo);
data["OrderNo"] = toUtf(pRspOrder->OrderNo);
data["OrigOrderNo"] = toUtf(pRspOrder->OrigOrderNo);
data["OrderMethod"] = toUtf(pRspOrder->OrderMethod);
data["AcceptType"] = toUtf(pRspOrder->AcceptType);
data["ExchangeCode"] = toUtf(pRspOrder->ExchangeCode);
data["TreatyCode"] = toUtf(pRspOrder->TreatyCode);
data["BuySale"] = toUtf(pRspOrder->BuySale);
data["OrderNumber"] = toUtf(pRspOrder->OrderNumber);
data["OrderPrice"] = toUtf(pRspOrder->OrderPrice);
data["FilledNumber"] = toUtf(pRspOrder->FilledNumber);
data["FilledPrice"] = toUtf(pRspOrder->FilledPrice);
data["TradeType"] = toUtf(pRspOrder->TradeType);
data["PriceType"] = toUtf(pRspOrder->PriceType);
data["HtsType"] = toUtf(pRspOrder->HtsType);
data["OrderDate"] = toUtf(pRspOrder->OrderDate);
data["OrderTime"] = toUtf(pRspOrder->OrderTime);
data["ErrorCode"] = toUtf(pRspOrder->ErrorCode);
data["OrderState"] = toUtf(pRspOrder->OrderState);
data["IsRiskOrder"] = toUtf(pRspOrder->IsRiskOrder);
data["CancelUserId"] = toUtf(pRspOrder->CancelUserId);
data["TriggerPrice"] = toUtf(pRspOrder->TriggerPrice);
data["ValidDate"] = toUtf(pRspOrder->ValidDate);
data["AddReduce"] = toUtf(pRspOrder->AddReduce);
data["StrategyId"] = toUtf(pRspOrder->StrategyId);
data["MaxShow"] = toUtf(pRspOrder->MaxShow);
data["MinQty"] = toUtf(pRspOrder->MinQty);
data["ExchangeTime"] = toUtf(pRspOrder->ExchangeTime);
data["CancelTime"] = toUtf(pRspOrder->CancelTime);
}
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspQryOrder(data, error, iRequestID, bIsLast);
};
void FutureApi::OnRspQryTrade(CFutureRspTradeField *pRspTrade, CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict data;
if (pRspTrade)
{
data["UserId"] = toUtf(pRspTrade->UserId);
data["AccountNo"] = toUtf(pRspTrade->AccountNo);
data["FilledNo"] = toUtf(pRspTrade->FilledNo);
data["OrderNo"] = toUtf(pRspTrade->OrderNo);
data["SystemNo"] = toUtf(pRspTrade->SystemNo);
data["LocalNo"] = toUtf(pRspTrade->LocalNo);
data["ExchangeCode"] = toUtf(pRspTrade->ExchangeCode);
data["TreatyCode"] = toUtf(pRspTrade->TreatyCode);
data["BuySale"] = toUtf(pRspTrade->BuySale);
data["FilledNumber"] = toUtf(pRspTrade->FilledNumber);
data["FilledPrice"] = toUtf(pRspTrade->FilledPrice);
data["FilledDate"] = toUtf(pRspTrade->FilledDate);
data["FilledTime"] = toUtf(pRspTrade->FilledTime);
data["Commsion"] = toUtf(pRspTrade->Commsion);
data["OrderNumber"] = toUtf(pRspTrade->OrderNumber);
data["OrderPrice"] = toUtf(pRspTrade->OrderPrice);
data["DeliveryDate"] = toUtf(pRspTrade->DeliveryDate);
data["FilledType"] = toUtf(pRspTrade->FilledType);
data["OrderType"] = toUtf(pRspTrade->OrderType);
data["ValidDate"] = toUtf(pRspTrade->ValidDate);
data["AddReduce"] = toUtf(pRspTrade->AddReduce);
data["ErrorDescription"] = toUtf(pRspTrade->ErrorDescription);
}
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspQryTrade(data, error, iRequestID, bIsLast);
};
void FutureApi::OnRspQryCapital(CFutureRspCapitalField *pRspCapital, CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict data;
if (pRspCapital)
{
data["UserId"] = toUtf(pRspCapital->UserId);
data["InMoney"] = toUtf(pRspCapital->InMoney);
data["OutMoney"] = toUtf(pRspCapital->OutMoney);
data["TodayCanUse"] = toUtf(pRspCapital->TodayCanUse);
data["TodayAmount"] = toUtf(pRspCapital->TodayAmount);
data["TodayBalance"] = toUtf(pRspCapital->TodayBalance);
data["FreezenMoney"] = toUtf(pRspCapital->FreezenMoney);
data["Commission"] = toUtf(pRspCapital->Commission);
data["Margin"] = toUtf(pRspCapital->Margin);
data["OldCanUse"] = toUtf(pRspCapital->OldCanUse);
data["OldAmount"] = toUtf(pRspCapital->OldAmount);
data["OldBalance"] = toUtf(pRspCapital->OldBalance);
data["FloatingProfit"] = toUtf(pRspCapital->FloatingProfit);
data["CurrencyNo"] = toUtf(pRspCapital->CurrencyNo);
data["CurrencyRate"] = pRspCapital->CurrencyRate;
data["UnexpiredProfit"] = pRspCapital->UnexpiredProfit;
data["UnaccountProfit"] = pRspCapital->UnaccountProfit;
data["KeepDeposit"] = pRspCapital->KeepDeposit;
data["Royalty"] = pRspCapital->Royalty;
data["Credit"] = pRspCapital->Credit;
data["AddCapital"] = pRspCapital->AddCapital;
data["IniEquity"] = pRspCapital->IniEquity;
data["AccountNo"] = toUtf(pRspCapital->AccountNo);
data["MortgageMoney"] = pRspCapital->MortgageMoney;
data["MarginLimit"] = pRspCapital->MarginLimit;
data["BorrowValue"] = pRspCapital->BorrowValue;
data["T1"] = pRspCapital->T1;
data["T2"] = pRspCapital->T2;
data["T3"] = pRspCapital->T3;
data["TN"] = pRspCapital->TN;
data["TradeLimit"] = pRspCapital->TradeLimit;
data["CanCashOut"] = pRspCapital->CanCashOut;
data["AccruedCrInt"] = pRspCapital->AccruedCrInt;
data["AccruedDrInt"] = pRspCapital->AccruedDrInt;
data["CrossMax"] = pRspCapital->CrossMax;
data["SellFreezenMoney"] = pRspCapital->SellFreezenMoney;
data["SellInterest"] = pRspCapital->SellInterest;
data["SellNeedAddMargin"] = pRspCapital->SellNeedAddMargin;
data["NetProfit"] = toUtf(pRspCapital->NetProfit);
data["ProfitRate"] = toUtf(pRspCapital->ProfitRate);
data["RiskRate"] = toUtf(pRspCapital->RiskRate);
data["ErrorDescription"] = toUtf(pRspCapital->ErrorDescription);
}
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspQryCapital(data, error, iRequestID, bIsLast);
};
void FutureApi::OnRspQryVersion(CFutureRspVersionField *pRspVersion, CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict data;
if (pRspVersion)
{
data["Version"] = toUtf(pRspVersion->Version);
data["MustUpdate"] = toUtf(pRspVersion->MustUpdate);
data["MustVersion"] = toUtf(pRspVersion->MustVersion);
data["VersionContent_CN"] = toUtf(pRspVersion->VersionContent_CN);
data["VersionContent_US"] = toUtf(pRspVersion->VersionContent_US);
}
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspQryVersion(data, error, iRequestID, bIsLast);
};
void FutureApi::OnRspQryPosition(CFutureRspPositionField *pRspPosition, CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict data;
if (pRspPosition)
{
data["MatchDate"] = toUtf(pRspPosition->MatchDate);
data["MatchNo"] = toUtf(pRspPosition->MatchNo);
data["ClientNo"] = toUtf(pRspPosition->ClientNo);
data["ExchangeNo"] = toUtf(pRspPosition->ExchangeNo);
data["CommodityNo"] = toUtf(pRspPosition->CommodityNo);
data["ContractNo"] = toUtf(pRspPosition->ContractNo);
data["Direct"] = toUtf(pRspPosition->Direct);
data["HoldVol"] = pRspPosition->HoldVol;
data["HoldPrice"] = pRspPosition->HoldPrice;
data["CurrencyNo"] = toUtf(pRspPosition->CurrencyNo);
data["ForciblyPrice"] = pRspPosition->ForciblyPrice;
data["Account"] = toUtf(pRspPosition->Account);
data["HoldType"] = toUtf(pRspPosition->HoldType);
data["DeliveryDate"] = toUtf(pRspPosition->DeliveryDate);
data["ExchangeName"] = toUtf(pRspPosition->ExchangeName);
data["CurrencyName"] = toUtf(pRspPosition->CurrencyName);
data["ContCode"] = toUtf(pRspPosition->ContCode);
data["HoldMargin"] = pRspPosition->HoldMargin;
}
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspQryPosition(data, error, iRequestID, bIsLast);
};
void FutureApi::OnRspQryCurrency(CFutureRspCurrencyField *pRspCurrency, CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict data;
if (pRspCurrency)
{
data["CurrencyNo"] = toUtf(pRspCurrency->CurrencyNo);
data["IsBase"] = pRspCurrency->IsBase;
data["ChangeRate"] = pRspCurrency->ChangeRate;
data["CurrencyName"] = toUtf(pRspCurrency->CurrencyName);
data["CurrencyNameEN"] = toUtf(pRspCurrency->CurrencyNameEN);
}
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspQryCurrency(data, error, iRequestID, bIsLast);
};
void FutureApi::OnRspQryExchange(CFutureRspExchangeField *pRspExchange, CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict data;
if (pRspExchange)
{
data["ExchangeNo"] = toUtf(pRspExchange->ExchangeNo);
data["ExchangeName"] = toUtf(pRspExchange->ExchangeName);
data["SettleType"] = toUtf(pRspExchange->SettleType);
data["NameEN"] = toUtf(pRspExchange->NameEN);
}
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspQryExchange(data, error, iRequestID, bIsLast);
};
void FutureApi::OnRspQryStrategy(CFutureRspStrategyField *pRspStrategy, CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict data;
if (pRspStrategy)
{
data["UserId"] = toUtf(pRspStrategy->UserId);
data["KeyId"] = toUtf(pRspStrategy->KeyId);
data["Name"] = toUtf(pRspStrategy->Name);
data["Code"] = toUtf(pRspStrategy->Code);
data["PriceType"] = pRspStrategy->PriceType;
data["PriceTypeDetailType"] = pRspStrategy->PriceTypeDetailType;
data["PriceFormula"] = toUtf(pRspStrategy->PriceFormula);
data["TriggerMethod"] = pRspStrategy->TriggerMethod;
data["InnerProtect"] = pRspStrategy->InnerProtect;
data["PassiveFailCloseMainLeg"] = pRspStrategy->PassiveFailCloseMainLeg;
data["SlipPoint"] = pRspStrategy->SlipPoint;
data["RecoverPriceMethod"] = pRspStrategy->RecoverPriceMethod;
data["RecoverPriceSeconds"] = pRspStrategy->RecoverPriceSeconds;
data["SetType"] = toUtf(pRspStrategy->SetType);
data["MinChangePrice"] = pRspStrategy->MinChangePrice;
data["MaxNum"] = pRspStrategy->MaxNum;
data["SuportQuantity"] = pRspStrategy->SuportQuantity;
data["SafeDeep"] = pRspStrategy->SafeDeep;
data["MainRange"] = pRspStrategy->MainRange;
data["ManualZhuiDanBeyondNum"] = toUtf(pRspStrategy->ManualZhuiDanBeyondNum);
data["ManualPingZhuDongBeyondNum"] = toUtf(pRspStrategy->ManualPingZhuDongBeyondNum);
data["AutoGuaDanSeconds"] = pRspStrategy->AutoGuaDanSeconds;
data["AutoZhuiBeiDongDots"] = pRspStrategy->AutoZhuiBeiDongDots;
data["AutoPingZhuDongDots"] = pRspStrategy->AutoPingZhuDongDots;
data["AutoZhiSunDot"] = pRspStrategy->AutoZhiSunDot;
data["AutoZhiSunZhuiBeiDongDots"] = pRspStrategy->AutoZhiSunZhuiBeiDongDots;
data["AutoZhiSunPingZhuDongDots"] = pRspStrategy->AutoZhiSunPingZhuDongDots;
data["DotLen"] = pRspStrategy->DotLen;
data["TradeTime"] = toUtf(pRspStrategy->TradeTime);
}
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspQryStrategy(data, error, iRequestID, bIsLast);
};
void FutureApi::OnRspQryCommodity(CFutureRspCommodityField *pRspCommodity, CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict data;
if (pRspCommodity)
{
data["CommodityNo"] = toUtf(pRspCommodity->CommodityNo);
data["ExchangeNo"] = toUtf(pRspCommodity->ExchangeNo);
data["CommodityType"] = toUtf(pRspCommodity->CommodityType);
data["Name"] = toUtf(pRspCommodity->Name);
data["Enabled"] = toUtf(pRspCommodity->Enabled);
data["RegDate"] = toUtf(pRspCommodity->RegDate);
data["CurrencyNo"] = toUtf(pRspCommodity->CurrencyNo);
data["ProductDot"] = pRspCommodity->ProductDot;
data["LowerTick"] = pRspCommodity->LowerTick;
data["UpperTick"] = pRspCommodity->UpperTick;
data["DotNum"] = pRspCommodity->DotNum;
data["StrikeCommodityId"] = pRspCommodity->StrikeCommodityId;
data["OptionStyle"] = toUtf(pRspCommodity->OptionStyle);
data["ExchangeNo2"] = toUtf(pRspCommodity->ExchangeNo2);
data["IsSFuture"] = toUtf(pRspCommodity->IsSFuture);
}
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspQryCommodity(data, error, iRequestID, bIsLast);
};
void FutureApi::OnRspQryInstrument(CFutureRspInstrumentField *pRspInstrument, CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict data;
if (pRspInstrument)
{
data["CommodityCode"] = toUtf(pRspInstrument->CommodityCode);
data["ExchangeNo"] = toUtf(pRspInstrument->ExchangeNo);
data["ContractNo"] = toUtf(pRspInstrument->ContractNo);
data["ContractFName"] = toUtf(pRspInstrument->ContractFName);
data["CommodityNo"] = toUtf(pRspInstrument->CommodityNo);
data["CommodityFName"] = toUtf(pRspInstrument->CommodityFName);
data["CommodityType"] = toUtf(pRspInstrument->CommodityType);
data["CommodityFCurrencyNo"] = toUtf(pRspInstrument->CommodityFCurrencyNo);
data["CurrencyFName"] = toUtf(pRspInstrument->CurrencyFName);
data["ProductDot"] = pRspInstrument->ProductDot;
data["UpperTick"] = pRspInstrument->UpperTick;
data["ExchangeName"] = toUtf(pRspInstrument->ExchangeName);
data["LastSettlePrice"] = pRspInstrument->LastSettlePrice;
data["TradeMonth"] = toUtf(pRspInstrument->TradeMonth);
data["DotNum"] = pRspInstrument->DotNum;
data["LowerTick"] = pRspInstrument->LowerTick;
data["DotNumCarry"] = pRspInstrument->DotNumCarry;
data["UpperTickCarry"] = pRspInstrument->UpperTickCarry;
data["FirstNoticeDay"] = toUtf(pRspInstrument->FirstNoticeDay);
data["FreezenPercent"] = pRspInstrument->FreezenPercent;
data["FreezenMoney"] = pRspInstrument->FreezenMoney;
data["FeeMoney"] = pRspInstrument->FeeMoney;
data["FeePercent"] = pRspInstrument->FeePercent;
data["PriceStrike"] = pRspInstrument->PriceStrike;
data["ProductDotStrike"] = pRspInstrument->ProductDotStrike;
data["UpperTickStrike"] = pRspInstrument->UpperTickStrike;
data["LastTradeDay"] = toUtf(pRspInstrument->LastTradeDay);
data["LastUpdateDay"] = toUtf(pRspInstrument->LastUpdateDay);
data["CriticalPrice"] = pRspInstrument->CriticalPrice;
data["CriticalMinChangedPrice"] = pRspInstrument->CriticalMinChangedPrice;
data["ExchangeSub"] = toUtf(pRspInstrument->ExchangeSub);
data["OptionType"] = toUtf(pRspInstrument->OptionType);
data["OptionMonth"] = toUtf(pRspInstrument->OptionMonth);
data["OptionStrikePrice"] = toUtf(pRspInstrument->OptionStrikePrice);
data["OptionCommodityNo"] = toUtf(pRspInstrument->OptionCommodityNo);
data["OptionContractNo"] = toUtf(pRspInstrument->OptionContractNo);
data["ContractFNameEN"] = toUtf(pRspInstrument->ContractFNameEN);
data["CommodityFNameEN"] = toUtf(pRspInstrument->CommodityFNameEN);
data["OptionStyle"] = toUtf(pRspInstrument->OptionStyle);
}
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspQryInstrument(data, error, iRequestID, bIsLast);
};
void FutureApi::OnRspQryExchangeTime(CFutureRspExchangeTimeField *pRspExchangeTime, CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict data;
if (pRspExchangeTime)
{
data["Year"] = toUtf(pRspExchangeTime->Year);
data["SummerBegin"] = toUtf(pRspExchangeTime->SummerBegin);
data["WinterBegin"] = toUtf(pRspExchangeTime->WinterBegin);
data["ExchangeNo"] = toUtf(pRspExchangeTime->ExchangeNo);
data["Name"] = toUtf(pRspExchangeTime->Name);
}
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspQryExchangeTime(data, error, iRequestID, bIsLast);
};
void FutureApi::OnRspQryCommodityTime(CFutureRspCommodityTimeField *pRspCommodityTime, CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict data;
if (pRspCommodityTime)
{
data["CrossTrade"] = toUtf(pRspCommodityTime->CrossTrade);
data["Stage"] = toUtf(pRspCommodityTime->Stage);
data["Summer"] = toUtf(pRspCommodityTime->Summer);
data["Opendate"] = toUtf(pRspCommodityTime->Opendate);
data["Closingdate"] = toUtf(pRspCommodityTime->Closingdate);
data["CommodityNo"] = toUtf(pRspCommodityTime->CommodityNo);
data["ComName"] = toUtf(pRspCommodityTime->ComName);
data["ExchangeNo"] = toUtf(pRspCommodityTime->ExchangeNo);
data["ExName"] = toUtf(pRspCommodityTime->ExName);
}
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspQryCommodityTime(data, error, iRequestID, bIsLast);
};
void FutureApi::OnRspQryTotalPosition(CFutureRspTotalPositionField *pRspTotalPosition, CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict data;
if (pRspTotalPosition)
{
data["LocalOrderNo"] = toUtf(pRspTotalPosition->LocalOrderNo);
data["ExchangeNo"] = toUtf(pRspTotalPosition->ExchangeNo);
data["TreatyCode"] = toUtf(pRspTotalPosition->TreatyCode);
data["OrderNo"] = toUtf(pRspTotalPosition->OrderNo);
data["OrderNumber"] = pRspTotalPosition->OrderNumber;
data["FilledNumber"] = pRspTotalPosition->FilledNumber;
data["FilledAdvPrice"] = pRspTotalPosition->FilledAdvPrice;
data["BuyHoldNumber"] = pRspTotalPosition->BuyHoldNumber;
data["BuyHoldOpenPrice"] = pRspTotalPosition->BuyHoldOpenPrice;
data["BuyHoldPrice"] = pRspTotalPosition->BuyHoldPrice;
data["SaleHoldNumber"] = pRspTotalPosition->SaleHoldNumber;
data["SaleHoldOpenPrice"] = pRspTotalPosition->SaleHoldOpenPrice;
data["SaleHoldPrice"] = pRspTotalPosition->SaleHoldPrice;
data["IsCanceled"] = toUtf(pRspTotalPosition->IsCanceled);
data["FilledTotalFee"] = pRspTotalPosition->FilledTotalFee;
data["Status"] = pRspTotalPosition->Status;
data["AccountNo"] = toUtf(pRspTotalPosition->AccountNo);
data["HoldType"] = toUtf(pRspTotalPosition->HoldType);
data["HoldMarginBuy"] = pRspTotalPosition->HoldMarginBuy;
data["HoldMarginSale"] = pRspTotalPosition->HoldMarginSale;
data["CurrPrice"] = pRspTotalPosition->CurrPrice;
data["FloatProfit"] = pRspTotalPosition->FloatProfit;
}
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspQryTotalPosition(data, error, iRequestID, bIsLast);
};
void FutureApi::OnRspQryStrategyDetail(CFutureRspStrategyDetailField *pRspStrategyDetail, CFutureRspInfoField *pRspInfo, int iRequestID, bool bIsLast)
{
gil_scoped_acquire acquire;
dict data;
if (pRspStrategyDetail)
{
data["StartegyCommodityNo"] = toUtf(pRspStrategyDetail->StartegyCommodityNo);
data["StartegyContractNo"] = toUtf(pRspStrategyDetail->StartegyContractNo);
data["Price"] = toUtf(pRspStrategyDetail->Price);
data["LegNum"] = pRspStrategyDetail->LegNum;
data["LastTradeDate"] = toUtf(pRspStrategyDetail->LastTradeDate);
data["SeqId"] = pRspStrategyDetail->SeqId;
data["CommodityNo"] = toUtf(pRspStrategyDetail->CommodityNo);
data["ContractNo"] = toUtf(pRspStrategyDetail->ContractNo);
data["Direct"] = toUtf(pRspStrategyDetail->Direct);
data["Quantity"] = pRspStrategyDetail->Quantity;
data["Model"] = toUtf(pRspStrategyDetail->Model);
data["ModifyDate"] = toUtf(pRspStrategyDetail->ModifyDate);
data["OrderMethod"] = toUtf(pRspStrategyDetail->OrderMethod);
data["PriceTolerance"] = pRspStrategyDetail->PriceTolerance;
data["PriceParam"] = pRspStrategyDetail->PriceParam;
data["SuportQuantity"] = pRspStrategyDetail->SuportQuantity;
data["MinMove"] = pRspStrategyDetail->MinMove;
data["TimerOrder"] = pRspStrategyDetail->TimerOrder;
data["TimerOrderBeyondNum"] = toUtf(pRspStrategyDetail->TimerOrderBeyondNum);
data["TimerOrderPrice"] = pRspStrategyDetail->TimerOrderPrice;
data["TimerOrderPriceBeyondNum"] = toUtf(pRspStrategyDetail->TimerOrderPriceBeyondNum);
data["TimerOrderPriceTriggerNum"] = toUtf(pRspStrategyDetail->TimerOrderPriceTriggerNum);
data["ZhiSunOrder"] = pRspStrategyDetail->ZhiSunOrder;
data["ZhiSunOrderBeyondNum"] = toUtf(pRspStrategyDetail->ZhiSunOrderBeyondNum);
data["TimerPingCang"] = pRspStrategyDetail->TimerPingCang;
data["TimerPingCangBeyondNum"] = toUtf(pRspStrategyDetail->TimerPingCangBeyondNum);
data["SafeDeep"] = pRspStrategyDetail->SafeDeep;
data["MainRange"] = pRspStrategyDetail->MainRange;
}
dict error;
if (pRspInfo)
{
error["ErrorID"] = pRspInfo->ErrorID;
error["ErrorMsg"] = toUtf(pRspInfo->ErrorMsg);
}
this->onRspQryStrategyDetail(data, error, iRequestID, bIsLast);
};
| {
"pile_set_name": "Github"
} |
# -*- coding:utf-8 -*-
# 用于修剪网络模型
import numpy as np
import matplotlib.pyplot as plt
import os
os.environ['GLOG_minloglevel'] = '2'
import caffe
# 由稠密变成CSC稀疏矩阵
def dense_to_sparse_csc(W_flatten, num_level):
# W_flatten: 扁平化的权重矩阵
# num_level: 量化级别
csc_W = [] # 存储稀疏矩阵
csc_indx = []
indx = 0
for n in range(len(W_flatten)):
if W_flatten[n]!=0 or indx == 2**num_level:
csc_W.append(W_flatten[n])
csc_indx.append(indx)
indx = 0
else:
indx += 1
if indx!=0:
csc_W.append(0.0)
csc_indx.append(indx-1)
return np.array(csc_W, dtype=np.float32),np.array(csc_indx, dtype=np.int8)
# 由稠密变成CSC稀疏矩阵
def sparse_to_dense_csc(csc_W, csc_W_indx):
# W_flatten: 扁平化的权重矩阵
# num_level: 量化级别
W_flatten = [] # 存储稠密矩阵
indx = 0
for n in range(len(csc_W)):
if csc_W_indx[n]!=0:
W_flatten.extend([0]*(csc_W_indx[n]))
W_flatten.append(csc_W[n])
return np.array(W_flatten, dtype=np.float32)
def read_sparse_net(filename, net, layers):
pass
def write_sparse_net(filename, net):
pass
# 画出各层参数的直方图
def draw_hist_weight(net, layers):
plt.figure() # 画图
layer_num = len(layers)
for i, layer in enumerate(layers):
i += 1
W = net.params[layer][0].data
plt.subplot(layer_num/2, 2, i)
numBins = 2 ^ 5
plt.hist(W.flatten(), numBins, color='blue', alpha=0.8)
plt.title(layer)
plt.show()
# 网络模型的参数
def analyze_param(net, layers):
print '\n=============analyze_param start==============='
total_nonzero = 0
total_allparam = 0
percentage_list = []
for i, layer in enumerate(layers):
i += 1
W = net.params[layer][0].data
b = net.params[layer][1].data
print 'W(%s) range = [%f, %f]' % (layer, min(W.flatten()), max(W.flatten()))
print 'W(%s) mean = %f, std = %f' % (layer, np.mean(W.flatten()), np.std(W.flatten()))
non_zero = (np.count_nonzero(W.flatten()) + np.count_nonzero(b.flatten())) # 参数非零值
all_param = (np.prod(W.shape) + np.prod(b.shape)) # 所有参数的数目
this_layer_percentage = non_zero / float(all_param) # 参数比例
total_nonzero += non_zero
total_allparam += all_param
print 'non-zero W and b cnt = %d' % non_zero
print 'total W and b cnt = %d' % all_param
print 'percentage = %f\n' % (this_layer_percentage)
percentage_list.append(this_layer_percentage)
print '=====> summary:'
print 'non-zero W and b cnt = %d' % total_nonzero
print 'total W and b cnt = %d' % total_allparam
print 'percentage = %f' % (total_nonzero / float(total_allparam))
print '=============analyze_param ends ==============='
return (total_nonzero / float(total_allparam), percentage_list)
def prune(threshold, test_net, layers):
sqarse_net = {}
for i, layer in enumerate(layers):
print '\n============ Pruning %s : threshold=%0.2f ============' % (layer,threshold[i])
W = test_net.params[layer][0].data
b = test_net.params[layer][1].data
hi = np.max(np.abs(W.flatten()))
hi = np.sort(-np.abs(W.flatten()))[int((len(W.flatten())-1)* threshold[i])]
# abs(val) = 0 ==> 0
# abs(val) >= threshold ==> 1
interpolated = np.interp(np.abs(W), [0, hi * threshold[i], 999999999.0], [0.0, 1.0, 1.0])
# 小于阈值的权重被随机修剪
random_samps = np.random.rand(len(W.flatten()))
random_samps.shape = W.shape
# 修剪阈值
# mask = (random_samps < interpolated)
mask = (np.abs(W) > (np.abs(hi)))
mask = np.bool_(mask)
W = W * mask
print 'non-zero W percentage = %0.5f ' % (np.count_nonzero(W.flatten()) / float(np.prod(W.shape)))
# 保存修剪后的阈值
test_net.params[layer][0].data[...] = W
# net.params[layer][0].mask[...] = mask
csc_W, csc_W_indx = dense_to_sparse_csc(W.flatten(), 8)
dense_W = sparse_to_dense_csc(csc_W, csc_W_indx)
sqarse_net[layer + '_W'] = csc_W
sqarse_net[layer + '_W_indx'] = csc_W_indx
# 计算修剪后的权重稀疏度
# np.savez(model_dir + model_name +"_crc.npz",sqarse_net) # 保存存储成CRC格式的稀疏网络
(total_percentage, percentage_list) = analyze_param(test_net, layers)
test_loss, accuracy = test_net_accuracy(test_net)
return (threshold, total_percentage, percentage_list, test_loss, accuracy)
def test_net_accuracy(test_net):
test_iter = 100
test_loss = 0
accuracy = 0
for test_it in range(test_iter):
# 进行一次测试
test_net.forward()
# 计算test loss
test_loss += test_net.blobs['loss'].data
# 计算test accuracy
accuracy += test_net.blobs['accuracy'].data
return (test_loss / test_iter), (accuracy / test_iter)
def eval_prune_threshold(threshold_list, test_prototxt, caffemodel, prune_layers):
def net_prune(threshold, test_prototx, caffemodel, prune_layers):
test_net = caffe.Net(test_prototx, caffemodel, caffe.TEST)
return prune(threshold, test_net, prune_layers)
accuracy = []
for threshold in threshold_list:
results = net_prune(threshold, test_prototxt, caffemodel, prune_layers)
print 'threshold: ', results[0]
print '\ntotal_percentage: ', results[1]
print '\npercentage_list: ', results[2]
print '\ntest_loss: ', results[3]
print '\naccuracy: ', results[4]
accuracy.append(results[4])
plt.plot(accuracy,'r.')
plt.show()
# 迭代训练修剪后网络
def retrain_pruned(solver, pruned_caffemodel, threshold, prune_layers):
#solver = caffe.SGDSolver(solver_proto)
retrain_iter = 20
accuracys = []
for i in range(retrain_iter):
solver.net.copy_from(pruned_caffemodel)
# solver.solve()
solver.step(500)
_,_,_,_,accuracy=prune(threshold, solver.test_nets[0], prune_layers)
solver.test_nets[0].save(pruned_caffemodel)
accuracys.append(accuracy)
plt.plot(accuracys, 'r.-')
plt.show()
#CPU或GPU模型转换
#caffe.set_mode_cpu()
caffe.set_device(0)
caffe.set_mode_gpu()
caffe_root = '../../'
#model_dir = caffe_root + 'models/SSD_300x300/'
#deploy = model_dir + 'deploy.prototxt'
#model_name = 'VGG_VOC0712_SSD_300x300_iter_60000'
#caffemodel = model_dir + model_name + '.caffemodel'
model_dir = caffe_root + 'models/mnist/'
deploy = model_dir + 'deploy.prototxt'
model_name = 'LeNet5_Mnist_shapshot_iter_10000'
caffemodel = model_dir + model_name + '.caffemodel'
test_prototxt = model_dir + 'test.prototxt'
solver_proto = model_dir + 'solver.prototxt'
solver = caffe.SGDSolver(solver_proto)
# 要修剪的层
prune_layers = ['conv1','conv2','ip1','ip2']
# 测试修剪率
test_threshold_list = [[0.3, 1 ,1 ,1], [0.4, 1 ,1 ,1], [0.5, 1 ,1 ,1], [0.6, 1 ,1 ,1], [0.7, 1 ,1 ,1],
[1, 0.05, 1, 1], [1, 0.1, 1, 1], [1, 0.15, 1, 1], [1, 0.2, 1, 1], [1, 0.3, 1, 1],
[1, 1, 0.05, 1], [1, 1, 0.1, 1], [1, 1, 0.15, 1], [1, 1, 0.2, 1], [1, 1, 0.3, 1],
[1, 1, 1, 0.05], [1, 1, 1, 0.1], [1, 1, 1, 0.15], [1, 1, 1, 0.2], [1, 1, 1, 0.3]]
# 验证修剪率
#eval_prune_threshold(test_threshold_list, test_prototxt, caffemodel, prune_layers)
threshold = [0.3, 0.1, 0.01, 0.2]
prune(threshold, solver.test_nets[0], prune_layers)
pruned_model = model_dir + model_name +'_pruned' + '.caffemodel'
solver.test_nets[0].save(pruned_model)
retrain_pruned(solver, pruned_model, threshold, prune_layers)
"""
# 各层对应的修剪率
threshold = [0.3, 0.1, 0.01, 0.2]
net = caffe.Net(deploy, caffemodel, caffe.TEST)
# 修剪
prune(threshold, net, prune_layers, test_prototxt)
# 保存修剪后的稀疏网络模型
output_model = model_name +'_pruned' + '.caffemodel'
net.save(output_model)
""" | {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<!--
DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
Copyright (c) 2010-2015 Oracle and/or its affiliates. All rights reserved.
The contents of this file are subject to the terms of either the GNU
General Public License Version 2 only ("GPL") or the Common Development
and Distribution License("CDDL") (collectively, the "License"). You
may not use this file except in compliance with the License. You can
obtain a copy of the License at
https://glassfish.dev.java.net/public/CDDL+GPL_1_1.html
or packager/legal/LICENSE.txt. See the License for the specific
language governing permissions and limitations under the License.
When distributing the software, include this License Header Notice in each
file and include the License file at packager/legal/LICENSE.txt.
GPL Classpath Exception:
Oracle designates this particular file as subject to the "Classpath"
exception as provided by Oracle in the GPL Version 2 section of the License
file that accompanied this code.
Modifications:
If applicable, add the following below the License Header, with the fields
enclosed by brackets [] replaced by your own identifying information:
"Portions Copyright [year] [name of copyright owner]"
Contributor(s):
If you wish your version of this file to be governed by only the CDDL or
only the GPL Version 2, indicate your decision by adding "[Contributor]
elects to include this software in this distribution under the [CDDL or GPL
Version 2] license." If you don't indicate a single choice of license, a
recipient has the option to distribute your version of this file under
either the CDDL, the GPL Version 2 or to extend the choice of license to
its licensees as provided above. However, if you add GPL Version 2 code
and therefore, elected the GPL Version 2 license, then the option applies
only if the new code is made subject to such option by the copyright
holder.
-->
<!-- Portions Copyright [2016-2019] [Payara Foundation] -->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>fish.payara.server.internal.packager</groupId>
<artifactId>packages</artifactId>
<version>5.2020.5-SNAPSHOT</version>
</parent>
<artifactId>glassfish-ha</artifactId>
<name>Glassfish HA Package</name>
<packaging>distribution-fragment</packaging>
<description>This pom describes how to assemble the GlassFish HA package</description>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>process-step1</id>
</execution>
<execution>
<id>process-step2</id>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<executions>
<execution>
<id>process-step3</id>
</execution>
</executions>
</plugin>
</plugins>
</build>
<profiles>
<profile>
<id>ips</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<id>process-step4</id>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.glassfish.build</groupId>
<artifactId>glassfishbuild-maven-plugin</artifactId>
<executions>
<execution>
<id>process-step5</id>
<goals>
<goal>exec</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-resources-plugin</artifactId>
<executions>
<execution>
<id>copy-resources</id>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
<dependencies>
<dependency>
<groupId>fish.payara.server.internal.ha</groupId>
<artifactId>ha-hazelcast-store</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
</project>
| {
"pile_set_name": "Github"
} |
import {Component, ViewEncapsulation} from '@angular/core';
import {BubbleMapsService} from './bubbleMaps.service';
@Component({
selector: 'bubble-maps',
encapsulation: ViewEncapsulation.None,
styles: [require('./bubbleMaps.scss')],
template: require('./bubbleMaps.html'),
})
export class BubbleMaps {
chartData:Object;
constructor(private _bubbleMapsService:BubbleMapsService) {
}
ngOnInit() {
this.chartData = this._bubbleMapsService.getData();
}
}
| {
"pile_set_name": "Github"
} |
package main
import "C"
//export GoFunc
func GoFunc() {}
func main() {}
| {
"pile_set_name": "Github"
} |
////////////////////////////////////////////////////////////////////////////
// Module : xrServer_Objects.h
// Created : 19.09.2002
// Modified : 04.06.2003
// Author : Oles Shyshkovtsov, Alexander Maksimchuk, Victor Reutskiy and Dmitriy Iassenev
// Description : Server objects space
////////////////////////////////////////////////////////////////////////////
#pragma once
#include "script_export_space.h"
#define SERVER_ENTITY_DECLARE_BEGIN0(__A) class __A { public: DECLARE_SCRIPT_REGISTER_FUNCTION
#define SERVER_ENTITY_DECLARE_BEGIN(__A,__B) class __A : public __B { typedef __B inherited; public: DECLARE_SCRIPT_REGISTER_FUNCTION
#define SERVER_ENTITY_DECLARE_BEGIN2(__A,__B,__C) class __A : public __B, public __C { typedef __B inherited1; typedef __C inherited2; public: DECLARE_SCRIPT_REGISTER_FUNCTION
#define SERVER_ENTITY_DECLARE_BEGIN3(__A,__B,__C,__D) class __A : public __B, public __C, public __D { typedef __B inherited1; typedef __C inherited2; typedef __D inherited3; public: DECLARE_SCRIPT_REGISTER_FUNCTION
#define SERVER_ENTITY_DECLARE_END \
public:\
virtual void UPDATE_Read (NET_Packet& P); \
virtual void UPDATE_Write (NET_Packet& P); \
virtual void STATE_Read (NET_Packet& P, u16 size); \
virtual void STATE_Write (NET_Packet& P); \
};
struct SRotation
{
float yaw, pitch, roll;
SRotation() { yaw=pitch=roll=0; }
SRotation(float y, float p, float r) { yaw=y;pitch=p;roll=r; }
};
enum EPOType {
epotBox,
epotFixedChain,
epotFreeChain,
epotSkeleton
};
DEFINE_VECTOR (u32, DWORD_VECTOR, DWORD_IT);
DEFINE_VECTOR (bool, BOOL_VECTOR, BOOL_IT);
DEFINE_VECTOR (float, FLOAT_VECTOR, FLOAT_IT);
DEFINE_VECTOR (LPSTR, LPSTR_VECTOR, LPSTR_IT);
DEFINE_VECTOR (Fvector, FVECTOR_VECTOR, FVECTOR_IT);
#ifdef XRGAME_EXPORTS
# define DECLARE_ENTITY_DESTROY
#endif
#ifdef DECLARE_ENTITY_DESTROY
template <class T> void F_entity_Destroy (T *&P)
{
xr_delete (P);
};
#endif
| {
"pile_set_name": "Github"
} |
/**
* @alternateClassName Ext.DomHelper
* @singleton
*
* The DomHelper class provides a layer of abstraction from DOM and transparently supports creating elements via DOM or
* using HTML fragments. It also has the ability to create HTML fragment templates from your DOM building code.
*
* ## DomHelper element specification object
*
* A specification object is used when creating elements. Attributes of this object are assumed to be element
* attributes, except for 4 special attributes:
*
* * **tag**: The tag name of the element
* * **children (or cn)**: An array of the same kind of element definition objects to be created and appended. These
* can be nested as deep as you want.
* * **cls**: The class attribute of the element. This will end up being either the "class" attribute on a HTML
* fragment or className for a DOM node, depending on whether DomHelper is using fragments or DOM.
* * **html**: The innerHTML for the element
*
* ## Insertion methods
*
* Commonly used insertion methods:
*
* * {@link #append}
* * {@link #insertBefore}
* * {@link #insertAfter}
* * {@link #overwrite}
* * {@link #insertHtml}
*
* ## Example
*
* This is an example, where an unordered list with 3 children items is appended to an existing element with id
* 'my-div':
*
* var dh = Ext.DomHelper; // create shorthand alias
* // specification object
* var spec = {
* id: 'my-ul',
* tag: 'ul',
* cls: 'my-list',
* // append children after creating
* children: [ // may also specify 'cn' instead of 'children'
* {tag: 'li', id: 'item0', html: 'List Item 0'},
* {tag: 'li', id: 'item1', html: 'List Item 1'},
* {tag: 'li', id: 'item2', html: 'List Item 2'}
* ]
* };
* var list = dh.append(
* 'my-div', // the context element 'my-div' can either be the id or the actual node
* spec // the specification object
* );
*
* Element creation specification parameters in this class may also be passed as an Array of specification objects.
* This can be used to insert multiple sibling nodes into an existing container very efficiently. For example, to add
* more list items to the example above:
*
* dh.append('my-ul', [
* {tag: 'li', id: 'item3', html: 'List Item 3'},
* {tag: 'li', id: 'item4', html: 'List Item 4'}
* ]);
*
* ## Templating
*
* The real power is in the built-in templating. Instead of creating or appending any elements, createTemplate returns
* a Template object which can be used over and over to insert new elements. Revisiting the example above, we could
* utilize templating this time:
*
* // create the node
* var list = dh.append('my-div', {tag: 'ul', cls: 'my-list'});
* // get template
* var tpl = dh.createTemplate({tag: 'li', id: 'item{0}', html: 'List Item {0}'});
*
* for(var i = 0; i < 5; i++){
* tpl.append(list, i); // use template to append to the actual node
* }
*
* An example using a template:
*
* var html = '"{0}" href="{1}" class="nav">{2}';
*
* var tpl = new Ext.DomHelper.createTemplate(html);
* tpl.append('blog-roll', ['link1', 'http://www.foxmulder.com/', "Fox's Site"]);
* tpl.append('blog-roll', ['link2', 'http://www.danascully.org/', "Scully's Site"]);
*
* The same example using named parameters:
*
* var html = '"{id}" href="{url}" class="nav">{text}';
*
* var tpl = new Ext.DomHelper.createTemplate(html);
* tpl.append('blog-roll', {
* id: 'link1',
* url: 'http://www.danascully.org/',
* text: "Scully's Site"
* });
* tpl.append('blog-roll', {
* id: 'link2',
* url: 'http://www.foxmulder.com/',
* text: "Fox's Site"
* });
*
* ## Compiling Templates
*
* Templates are applied using regular expressions. The performance is great, but if you are adding a bunch of DOM
* elements using the same template, you can increase performance even further by "compiling" the template. The way
* "compile()" works is the template is parsed and broken up at the different variable points and a dynamic function is
* created and eval'ed. The generated function performs string concatenation of these parts and the passed variables
* instead of using regular expressions.
*
* var html = '"{id}" href="{url}" class="nav">{text}';
*
* var tpl = new Ext.DomHelper.createTemplate(html);
* tpl.compile();
*
* // ... use template like normal
*
* ## Performance Boost
*
* DomHelper will transparently create HTML fragments when it can. Using HTML fragments instead of DOM can
* significantly boost performance.
*
* Element creation specification parameters may also be strings which are used as innerHTML.
*/
Ext.define('Ext.dom.Helper', function() {
var afterbegin = 'afterbegin',
afterend = 'afterend',
beforebegin = 'beforebegin',
beforeend = 'beforeend',
bbValues = ['BeforeBegin', 'previousSibling'],
aeValues = ['AfterEnd', 'nextSibling'],
bb_ae_PositionHash = {
beforebegin: bbValues,
afterend: aeValues
},
fullPositionHash = {
beforebegin: bbValues,
afterend: aeValues,
afterbegin: ['AfterBegin', 'firstChild'],
beforeend: ['BeforeEnd', 'lastChild']
};
return {
singleton: true,
alternateClassName: [
'Ext.DomHelper',
'Ext.core.DomHelper'
],
emptyTags: /^(?:br|frame|hr|img|input|link|meta|range|spacer|wbr|area|param|col)$/i,
confRe: /^(?:tag|children|cn|html|tpl|tplData)$/i,
endRe: /end/i,
// Since cls & for are reserved words, we need to transform them
attributeTransform: { cls : 'class', htmlFor : 'for' },
closeTags: {},
detachedDiv: document.createElement('div'),
decamelizeName: function () {
var camelCaseRe = /([a-z])([A-Z])/g,
cache = {};
function decamel (match, p1, p2) {
return p1 + '-' + p2.toLowerCase();
}
return function (s) {
return cache[s] || (cache[s] = s.replace(camelCaseRe, decamel));
};
}(),
generateMarkup: function(spec, buffer) {
var me = this,
specType = typeof spec,
attr, val, tag, i, closeTags;
if (specType === "string" || specType === "number") {
buffer.push(spec);
} else if (Ext.isArray(spec)) {
for (i = 0; i < spec.length; i++) {
if (spec[i]) {
me.generateMarkup(spec[i], buffer);
}
}
} else {
tag = spec.tag || 'div';
buffer.push('<', tag);
for (attr in spec) {
if (spec.hasOwnProperty(attr)) {
val = spec[attr];
if (val !== undefined && !me.confRe.test(attr)) {
if (val && val.join) {
val = val.join(' ');
}
if (typeof val === "object") {
buffer.push(' ', attr, '="');
me.generateStyles(val, buffer, true).push('"');
} else {
buffer.push(' ', me.attributeTransform[attr] || attr, '="', val, '"');
}
}
}
}
// Now either just close the tag or try to add children and close the tag.
if (me.emptyTags.test(tag)) {
buffer.push('/>');
} else {
buffer.push('>');
// Apply the tpl html, and cn specifications
if ((val = spec.tpl)) {
val.applyOut(spec.tplData, buffer);
}
if ((val = spec.html)) {
buffer.push(val);
}
if ((val = spec.cn || spec.children)) {
me.generateMarkup(val, buffer);
}
// we generate a lot of close tags, so cache them rather than push 3 parts
closeTags = me.closeTags;
buffer.push(closeTags[tag] || (closeTags[tag] = '</' + tag + '>'));
}
}
return buffer;
},
/**
* Converts the styles from the given object to text. The styles are CSS style names
* with their associated value.
*
* The basic form of this method returns a string:
*
* var s = Ext.DomHelper.generateStyles({
* backgroundColor: 'red'
* });
*
* // s = 'background-color:red;'
*
* Alternatively, this method can append to an output array.
*
* var buf = [];
*
* ...
*
* Ext.DomHelper.generateStyles({
* backgroundColor: 'red'
* }, buf);
*
* In this case, the style text is pushed on to the array and the array is returned.
*
* @param {Object} styles The object describing the styles.
* @param {String[]} [buffer] The output buffer.
* @param {Boolean} [encode] `true` to {@link Ext.String#htmlEncode} property values if they
* are going to be inserted as HTML attributes.
* @return {String/String[]} If buffer is passed, it is returned. Otherwise the style
* string is returned.
*/
generateStyles: function (styles, buffer, encode) {
var a = buffer || [],
name, val;
for (name in styles) {
if (styles.hasOwnProperty(name)) {
val = styles[name];
// Since a majority of attributes won't have html characters (basically
// restricted to fonts), we'll check first before we try and encode it
// because it's less expensive and this method gets called a lot.
name = this.decamelizeName(name);
if (encode && Ext.String.hasHtmlCharacters(val)) {
val = Ext.String.htmlEncode(val);
}
a.push(name, ':', val, ';');
}
}
return buffer || a.join('');
},
/**
* Returns the markup for the passed Element(s) config.
* @param {Object} spec The DOM object spec (and children).
* @return {String}
*/
markup: function(spec) {
if (typeof spec === "string") {
return spec;
}
var buf = this.generateMarkup(spec, []);
return buf.join('');
},
/**
* Applies a style specification to an element.
*
* Styles in object form should be a valid DOM element style property.
* [Valid style property names](http://www.w3schools.com/jsref/dom_obj_style.asp)
* (_along with the supported CSS version for each_)
*
* // <div id="my-el">Phineas Flynn</div>
*
* var el = Ext.get('my-el'),
* dh = Ext.dom.Helper;
*
* dh.applyStyles(el, 'color: white;');
*
* dh.applyStyles(el, {
* fontWeight: 'bold',
* backgroundColor: 'gray',
* padding: '10px'
* });
*
* dh.applyStyles(el, function () {
* if (name.initialConfig.html === 'Phineas Flynn') {
* return 'font-style: italic;';
* // OR return { fontStyle: 'italic' };
* }
* });
*
* @param {String/HTMLElement/Ext.dom.Element} el The element to apply styles to
* @param {String/Object/Function} styles A style specification string e.g. 'width:100px', or object in the form {width:'100px'}, or
* a function which returns such a specification.
*/
applyStyles: function(el, styles) {
Ext.fly(el).applyStyles(styles);
},
/**
* @private
* Fix for browsers which do not support createContextualFragment
*/
createContextualFragment: function(html){
var div = this.detachedDiv,
fragment = document.createDocumentFragment(),
length, childNodes;
div.innerHTML = html;
childNodes = div.childNodes;
length = childNodes.length;
// Move nodes into fragment, don't clone: http://jsperf.com/create-fragment
while (length--) {
fragment.appendChild(childNodes[0]);
}
return fragment;
},
/**
* Creates new DOM element(s) without inserting them to the document.
* @param {Object/String} o The DOM object spec (and children) or raw HTML blob
* @return {HTMLElement} The new uninserted node
*/
createDom: function(o, parentNode){
var me = this,
markup = me.markup(o),
div = me.detachedDiv,
child;
div.innerHTML = markup;
child = div.firstChild;
// Important to clone the node here, IE8 & 9 have an issue where the markup
// in the first element will be lost.
// var ct = document.createElement('div'),
// a, b;
// ct.innerHTML = '<div>markup1</div>';
// a = ct.firstChild;
// ct.innerHTML = '<div>markup2</div>';
// b = ct.firstChild;
// console.log(a.innerHTML, b.innerHTML);
return Ext.supports.ChildContentClearedWhenSettingInnerHTML ? child.cloneNode(true) : child;
},
/**
* Inserts an HTML fragment into the DOM.
* @param {String} where Where to insert the html in relation to el - beforeBegin, afterBegin, beforeEnd, afterEnd.
*
* For example take the following HTML: `<div>Contents</div>`
*
* Using different `where` values inserts element to the following places:
*
* - beforeBegin: `<HERE><div>Contents</div>`
* - afterBegin: `<div><HERE>Contents</div>`
* - beforeEnd: `<div>Contents<HERE></div>`
* - afterEnd: `<div>Contents</div><HERE>`
*
* @param {HTMLElement/TextNode} el The context element
* @param {String} html The HTML fragment
* @return {HTMLElement} The new node
*/
insertHtml: function(where, el, html) {
var me = this,
hashVal,
range,
rangeEl,
setStart,
frag;
where = where.toLowerCase();
// Has fast HTML insertion into existing DOM: http://www.w3.org/TR/html5/apis-in-html-documents.html#insertadjacenthtml
if (el.insertAdjacentHTML) {
if (me.ieInsertHtml) {
// hook for IE table hack - impl in ext package override
frag = me.ieInsertHtml(where, el, html);
if (frag) {
return frag;
}
}
hashVal = fullPositionHash[where];
if (hashVal) {
el.insertAdjacentHTML(hashVal[0], html);
return el[hashVal[1]];
}
// if (not IE and context element is an HTMLElement) or TextNode
} else {
// we cannot insert anything inside a textnode so...
if (el.nodeType === 3) {
where = where === afterbegin ? beforebegin : where;
where = where === beforeend ? afterend : where;
}
range = Ext.supports.CreateContextualFragment ? el.ownerDocument.createRange() : undefined;
setStart = 'setStart' + (this.endRe.test(where) ? 'After' : 'Before');
if (bb_ae_PositionHash[where]) {
if (range) {
range[setStart](el);
frag = range.createContextualFragment(html);
} else {
frag = this.createContextualFragment(html);
}
el.parentNode.insertBefore(frag, where === beforebegin ? el : el.nextSibling);
return el[(where === beforebegin ? 'previous' : 'next') + 'Sibling'];
} else {
rangeEl = (where === afterbegin ? 'first' : 'last') + 'Child';
if (el.firstChild) {
if (range) {
// Creating ranges on a hidden element throws an error, checking for
// visibility is expensive, so we'll catch the error and fall back to
// using the full fragment
try {
range[setStart](el[rangeEl]);
frag = range.createContextualFragment(html);
}
catch(e) {
frag = this.createContextualFragment(html);
}
} else {
frag = this.createContextualFragment(html);
}
if (where === afterbegin) {
el.insertBefore(frag, el.firstChild);
} else {
el.appendChild(frag);
}
} else {
el.innerHTML = html;
}
return el[rangeEl];
}
}
//<debug>
Ext.raise({
sourceClass: 'Ext.DomHelper',
sourceMethod: 'insertHtml',
htmlToInsert: html,
targetElement: el,
msg: 'Illegal insertion point reached: "' + where + '"'
});
//</debug>
},
/**
* Creates new DOM element(s) and inserts them before el.
* @param {String/HTMLElement/Ext.dom.Element} el The context element
* @param {Object/String} o The DOM object spec (and children) or raw HTML blob
* @param {Boolean} [returnElement] true to return a Ext.Element
* @return {HTMLElement/Ext.dom.Element} The new node
*/
insertBefore: function(el, o, returnElement) {
return this.doInsert(el, o, returnElement, beforebegin);
},
/**
* Creates new DOM element(s) and inserts them after el.
* @param {String/HTMLElement/Ext.dom.Element} el The context element
* @param {Object} o The DOM object spec (and children)
* @param {Boolean} [returnElement] true to return a Ext.Element
* @return {HTMLElement/Ext.dom.Element} The new node
*/
insertAfter: function(el, o, returnElement) {
return this.doInsert(el, o, returnElement, afterend);
},
/**
* Creates new DOM element(s) and inserts them as the first child of el.
* @param {String/HTMLElement/Ext.dom.Element} el The context element
* @param {Object/String} o The DOM object spec (and children) or raw HTML blob
* @param {Boolean} [returnElement] true to return a Ext.Element
* @return {HTMLElement/Ext.dom.Element} The new node
*/
insertFirst: function(el, o, returnElement) {
return this.doInsert(el, o, returnElement, afterbegin);
},
/**
* Creates new DOM element(s) and appends them to el.
* @param {String/HTMLElement/Ext.dom.Element} el The context element
* @param {Object/String} o The DOM object spec (and children) or raw HTML blob
* @param {Boolean} [returnElement] true to return a Ext.Element
* @return {HTMLElement/Ext.dom.Element} The new node
*/
append: function(el, o, returnElement) {
return this.doInsert(el, o, returnElement, beforeend);
},
/**
* Creates new DOM element(s) and overwrites the contents of el with them.
* @param {String/HTMLElement/Ext.dom.Element} el The context element
* @param {Object/String} o The DOM object spec (and children) or raw HTML blob
* @param {Boolean} [returnElement=false] true to return an Ext.Element
* @return {HTMLElement/Ext.dom.Element} The new node
*/
overwrite: function(el, html, returnElement) {
var me = this,
newNode;
el = Ext.getDom(el);
html = me.markup(html);
if (me.ieOverwrite) {
// hook for IE table hack - impl in ext package override
newNode = me.ieOverwrite(el, html);
}
if (!newNode) {
el.innerHTML = html;
newNode = el.firstChild;
}
return returnElement ? Ext.get(newNode) : newNode;
},
doInsert: function(el, o, returnElement, where) {
var me = this,
newNode;
el = el.dom || Ext.getDom(el);
if ('innerHTML' in el) {
// regular dom node
// For standard HTMLElements, we insert as innerHTML instead of
// createElement/appenChild because it is much faster in all versions of
// IE: https://fiddle.sencha.com/#fiddle/tj
newNode = me.insertHtml(where, el, me.markup(o));
} else {
// document fragment does not support innerHTML
newNode = me.createDom(o, null);
// we cannot insert anything inside a textnode so...
if (el.nodeType === 3) {
where = where === afterbegin ? beforebegin : where;
where = where === beforeend ? afterend : where;
}
if (bb_ae_PositionHash[where]) {
el.parentNode.insertBefore(newNode, where === beforebegin ? el : el.nextSibling);
} else if (el.firstChild && where === afterbegin) {
el.insertBefore(newNode, el.firstChild);
} else {
el.appendChild(newNode);
}
}
return returnElement ? Ext.get(newNode) : newNode;
},
/**
* Creates a new Ext.Template from the DOM object spec.
* @param {Object} o The DOM object spec (and children)
* @return {Ext.Template} The new template
*/
createTemplate: function(o) {
var html = this.markup(o);
return new Ext.Template(html);
},
/**
* @method createHtml
* Alias for {@link #markup}.
* @deprecated 5.0.0
*/
createHtml: function(spec) {
return this.markup(spec);
}
};
});
| {
"pile_set_name": "Github"
} |
/*
* Identifier access and function closure handling.
*
* Provides the primitives for slow path identifier accesses: GETVAR,
* PUTVAR, DELVAR, etc. The fast path, direct register accesses, should
* be used for most identifier accesses. Consequently, these slow path
* primitives should be optimized for maximum compactness.
*
* ECMAScript environment records (declarative and object) are represented
* as internal objects with control keys. Environment records have a
* parent record ("outer environment reference") which is represented by
* the implicit prototype for technical reasons (in other words, it is a
* convenient field). The prototype chain is not followed in the ordinary
* sense for variable lookups.
*
* See identifier-handling.rst for more details on the identifier algorithms
* and the internal representation. See function-objects.rst for details on
* what function templates and instances are expected to look like.
*
* Care must be taken to avoid duk_tval pointer invalidation caused by
* e.g. value stack or object resizing.
*
* TODO: properties for function instances could be initialized much more
* efficiently by creating a property allocation for a certain size and
* filling in keys and values directly (and INCREFing both with "bulk incref"
* primitives.
*
* XXX: duk_hobject_getprop() and duk_hobject_putprop() calls are a bit
* awkward (especially because they follow the prototype chain); rework
* if "raw" own property helpers are added.
*/
#include "duk_internal.h"
/*
* Local result type for duk__get_identifier_reference() lookup.
*/
typedef struct {
duk_hobject *env;
duk_hobject *holder; /* for object-bound identifiers */
duk_tval *value; /* for register-bound and declarative env identifiers */
duk_uint_t attrs; /* property attributes for identifier (relevant if value != NULL) */
duk_bool_t has_this; /* for object-bound identifiers: provide 'this' binding */
} duk__id_lookup_result;
/*
* Create a new function object based on a "template function" which contains
* compiled bytecode, constants, etc, but lacks a lexical environment.
*
* ECMAScript requires that each created closure is a separate object, with
* its own set of editable properties. However, structured property values
* (such as the formal arguments list and the variable map) are shared.
* Also the bytecode, constants, and inner functions are shared.
*
* See E5 Section 13.2 for detailed requirements on the function objects;
* there are no similar requirements for function "templates" which are an
* implementation dependent internal feature. Also see function-objects.rst
* for a discussion on the function instance properties provided by this
* implementation.
*
* Notes:
*
* * Order of internal properties should match frequency of use, since the
* properties will be linearly scanned on lookup (functions usually don't
* have enough properties to warrant a hash part).
*
* * The created closure is independent of its template; they do share the
* same 'data' buffer object, but the template object itself can be freed
* even if the closure object remains reachable.
*/
DUK_LOCAL void duk__inc_data_inner_refcounts(duk_hthread *thr, duk_hcompfunc *f) {
duk_tval *tv, *tv_end;
duk_hobject **funcs, **funcs_end;
DUK_UNREF(thr);
/* If function creation fails due to out-of-memory, the data buffer
* pointer may be NULL in some cases. That's actually possible for
* GC code, but shouldn't be possible here because the incomplete
* function will be unwound from the value stack and never instantiated.
*/
DUK_ASSERT(DUK_HCOMPFUNC_GET_DATA(thr->heap, f) != NULL);
tv = DUK_HCOMPFUNC_GET_CONSTS_BASE(thr->heap, f);
tv_end = DUK_HCOMPFUNC_GET_CONSTS_END(thr->heap, f);
while (tv < tv_end) {
DUK_TVAL_INCREF(thr, tv);
tv++;
}
funcs = DUK_HCOMPFUNC_GET_FUNCS_BASE(thr->heap, f);
funcs_end = DUK_HCOMPFUNC_GET_FUNCS_END(thr->heap, f);
while (funcs < funcs_end) {
DUK_HEAPHDR_INCREF(thr, (duk_heaphdr *) *funcs);
funcs++;
}
}
/* Push a new closure on the stack.
*
* Note: if fun_temp has NEWENV, i.e. a new lexical and variable declaration
* is created when the function is called, only outer_lex_env matters
* (outer_var_env is ignored and may or may not be same as outer_lex_env).
*/
DUK_LOCAL const duk_uint16_t duk__closure_copy_proplist[] = {
/* order: most frequent to least frequent */
DUK_STRIDX_INT_VARMAP,
DUK_STRIDX_INT_FORMALS,
#if defined(DUK_USE_PC2LINE)
DUK_STRIDX_INT_PC2LINE,
#endif
#if defined(DUK_USE_FUNC_FILENAME_PROPERTY)
DUK_STRIDX_FILE_NAME,
#endif
#if defined(DUK_USE_NONSTD_FUNC_SOURCE_PROPERTY)
DUK_STRIDX_INT_SOURCE
#endif
};
DUK_INTERNAL
void duk_js_push_closure(duk_hthread *thr,
duk_hcompfunc *fun_temp,
duk_hobject *outer_var_env,
duk_hobject *outer_lex_env,
duk_bool_t add_auto_proto) {
duk_hcompfunc *fun_clos;
duk_harray *formals;
duk_small_uint_t i;
duk_uint_t len_value;
DUK_ASSERT(fun_temp != NULL);
DUK_ASSERT(DUK_HCOMPFUNC_GET_DATA(thr->heap, fun_temp) != NULL);
DUK_ASSERT(DUK_HCOMPFUNC_GET_FUNCS(thr->heap, fun_temp) != NULL);
DUK_ASSERT(DUK_HCOMPFUNC_GET_BYTECODE(thr->heap, fun_temp) != NULL);
DUK_ASSERT(outer_var_env != NULL);
DUK_ASSERT(outer_lex_env != NULL);
DUK_UNREF(len_value);
DUK_STATS_INC(thr->heap, stats_envrec_pushclosure);
fun_clos = duk_push_hcompfunc(thr);
DUK_ASSERT(fun_clos != NULL);
DUK_ASSERT(DUK_HOBJECT_GET_PROTOTYPE(thr->heap, (duk_hobject *) fun_clos) == thr->builtins[DUK_BIDX_FUNCTION_PROTOTYPE]);
duk_push_hobject(thr, &fun_temp->obj); /* -> [ ... closure template ] */
DUK_ASSERT(DUK_HOBJECT_IS_COMPFUNC((duk_hobject *) fun_clos));
DUK_ASSERT(DUK_HCOMPFUNC_GET_DATA(thr->heap, fun_clos) == NULL);
DUK_ASSERT(DUK_HCOMPFUNC_GET_FUNCS(thr->heap, fun_clos) == NULL);
DUK_ASSERT(DUK_HCOMPFUNC_GET_BYTECODE(thr->heap, fun_clos) == NULL);
DUK_HCOMPFUNC_SET_DATA(thr->heap, fun_clos, DUK_HCOMPFUNC_GET_DATA(thr->heap, fun_temp));
DUK_HCOMPFUNC_SET_FUNCS(thr->heap, fun_clos, DUK_HCOMPFUNC_GET_FUNCS(thr->heap, fun_temp));
DUK_HCOMPFUNC_SET_BYTECODE(thr->heap, fun_clos, DUK_HCOMPFUNC_GET_BYTECODE(thr->heap, fun_temp));
/* Note: all references inside 'data' need to get their refcounts
* upped too. This is the case because refcounts are decreased
* through every function referencing 'data' independently.
*/
DUK_HBUFFER_INCREF(thr, DUK_HCOMPFUNC_GET_DATA(thr->heap, fun_clos));
duk__inc_data_inner_refcounts(thr, fun_temp);
fun_clos->nregs = fun_temp->nregs;
fun_clos->nargs = fun_temp->nargs;
#if defined(DUK_USE_DEBUGGER_SUPPORT)
fun_clos->start_line = fun_temp->start_line;
fun_clos->end_line = fun_temp->end_line;
#endif
DUK_ASSERT(DUK_HCOMPFUNC_GET_DATA(thr->heap, fun_clos) != NULL);
DUK_ASSERT(DUK_HCOMPFUNC_GET_FUNCS(thr->heap, fun_clos) != NULL);
DUK_ASSERT(DUK_HCOMPFUNC_GET_BYTECODE(thr->heap, fun_clos) != NULL);
/* XXX: Could also copy from template, but there's no way to have any
* other value here now (used code has no access to the template).
* Prototype is set by duk_push_hcompfunc().
*/
DUK_ASSERT(DUK_HOBJECT_GET_PROTOTYPE(thr->heap, &fun_clos->obj) == thr->builtins[DUK_BIDX_FUNCTION_PROTOTYPE]);
#if 0
DUK_HOBJECT_SET_PROTOTYPE_UPDREF(thr, &fun_clos->obj, thr->builtins[DUK_BIDX_FUNCTION_PROTOTYPE]);
#endif
/* Copy duk_hobject flags as is from the template using a mask.
* Leave out duk_heaphdr owned flags just in case (e.g. if there's
* some GC flag or similar). Some flags can then be adjusted
* separately if necessary.
*/
/* DUK_HEAPHDR_SET_FLAGS() masks changes to non-duk_heaphdr flags only. */
DUK_HEAPHDR_SET_FLAGS((duk_heaphdr *) fun_clos, DUK_HEAPHDR_GET_FLAGS_RAW((duk_heaphdr *) fun_temp));
DUK_DD(DUK_DDPRINT("fun_temp heaphdr flags: 0x%08lx, fun_clos heaphdr flags: 0x%08lx",
(unsigned long) DUK_HEAPHDR_GET_FLAGS_RAW((duk_heaphdr *) fun_temp),
(unsigned long) DUK_HEAPHDR_GET_FLAGS_RAW((duk_heaphdr *) fun_clos)));
DUK_ASSERT(DUK_HOBJECT_HAS_EXTENSIBLE(&fun_clos->obj));
DUK_ASSERT(!DUK_HOBJECT_HAS_BOUNDFUNC(&fun_clos->obj));
DUK_ASSERT(DUK_HOBJECT_HAS_COMPFUNC(&fun_clos->obj));
DUK_ASSERT(!DUK_HOBJECT_HAS_NATFUNC(&fun_clos->obj));
DUK_ASSERT(!DUK_HOBJECT_IS_THREAD(&fun_clos->obj));
/* DUK_HOBJECT_FLAG_ARRAY_PART: don't care */
/* DUK_HOBJECT_FLAG_NEWENV: handled below */
DUK_ASSERT(!DUK_HOBJECT_HAS_EXOTIC_ARRAY(&fun_clos->obj));
DUK_ASSERT(!DUK_HOBJECT_HAS_EXOTIC_STRINGOBJ(&fun_clos->obj));
DUK_ASSERT(!DUK_HOBJECT_HAS_EXOTIC_ARGUMENTS(&fun_clos->obj));
if (!DUK_HOBJECT_HAS_CONSTRUCTABLE(&fun_clos->obj)) {
/* If the template is not constructable don't add an automatic
* .prototype property. This is the case for e.g. ES2015 object
* literal getters/setters and method definitions.
*/
add_auto_proto = 0;
}
/*
* Setup environment record properties based on the template and
* its flags.
*
* If DUK_HOBJECT_HAS_NEWENV(fun_temp) is true, the environment
* records represent identifiers "outside" the function; the
* "inner" environment records are created on demand. Otherwise,
* the environment records are those that will be directly used
* (e.g. for declarations).
*
* _Lexenv is always set; _Varenv defaults to _Lexenv if missing,
* so _Varenv is only set if _Lexenv != _Varenv.
*
* This is relatively complex, see doc/identifier-handling.rst.
*/
if (DUK_HOBJECT_HAS_NEWENV(&fun_clos->obj)) {
#if defined(DUK_USE_FUNC_NAME_PROPERTY)
if (DUK_HOBJECT_HAS_NAMEBINDING(&fun_clos->obj)) {
duk_hobject *proto;
duk_hdecenv *new_env;
/*
* Named function expression, name needs to be bound
* in an intermediate environment record. The "outer"
* lexical/variable environment will thus be:
*
* a) { funcname: <func>, __prototype: outer_lex_env }
* b) { funcname: <func>, __prototype: <globalenv> } (if outer_lex_env missing)
*/
if (outer_lex_env) {
proto = outer_lex_env;
} else {
proto = thr->builtins[DUK_BIDX_GLOBAL_ENV];
}
/* -> [ ... closure template env ] */
new_env = duk_hdecenv_alloc(thr,
DUK_HOBJECT_FLAG_EXTENSIBLE |
DUK_HOBJECT_CLASS_AS_FLAGS(DUK_HOBJECT_CLASS_DECENV));
DUK_ASSERT(new_env != NULL);
duk_push_hobject(thr, (duk_hobject *) new_env);
DUK_ASSERT(DUK_HOBJECT_GET_PROTOTYPE(thr->heap, (duk_hobject *) new_env) == NULL);
DUK_HOBJECT_SET_PROTOTYPE(thr->heap, (duk_hobject *) new_env, proto);
DUK_HOBJECT_INCREF_ALLOWNULL(thr, proto);
DUK_ASSERT(new_env->thread == NULL); /* Closed. */
DUK_ASSERT(new_env->varmap == NULL);
/* It's important that duk_xdef_prop() is a 'raw define' so that any
* properties in an ancestor are never an issue (they should never be
* e.g. non-writable, but just in case).
*
* Because template objects are not visible to user code, the case
* where .name is missing shouldn't happen in practice. It it does,
* the name 'undefined' gets bound and maps to the closure (which is
* a bit odd, but safe).
*/
(void) duk_get_prop_stridx_short(thr, -2, DUK_STRIDX_NAME);
/* -> [ ... closure template env funcname ] */
duk_dup_m4(thr); /* -> [ ... closure template env funcname closure ] */
duk_xdef_prop(thr, -3, DUK_PROPDESC_FLAGS_NONE); /* -> [ ... closure template env ] */
/* env[funcname] = closure */
/* [ ... closure template env ] */
DUK_HCOMPFUNC_SET_LEXENV(thr->heap, fun_clos, (duk_hobject *) new_env);
DUK_HCOMPFUNC_SET_VARENV(thr->heap, fun_clos, (duk_hobject *) new_env);
DUK_HOBJECT_INCREF(thr, (duk_hobject *) new_env);
DUK_HOBJECT_INCREF(thr, (duk_hobject *) new_env);
duk_pop_unsafe(thr);
/* [ ... closure template ] */
}
else
#endif /* DUK_USE_FUNC_NAME_PROPERTY */
{
/*
* Other cases (function declaration, anonymous function expression,
* strict direct eval code). The "outer" environment will be whatever
* the caller gave us.
*/
DUK_HCOMPFUNC_SET_LEXENV(thr->heap, fun_clos, outer_lex_env);
DUK_HCOMPFUNC_SET_VARENV(thr->heap, fun_clos, outer_lex_env);
DUK_HOBJECT_INCREF(thr, outer_lex_env);
DUK_HOBJECT_INCREF(thr, outer_lex_env);
/* [ ... closure template ] */
}
} else {
/*
* Function gets no new environment when called. This is the
* case for global code, indirect eval code, and non-strict
* direct eval code. There is no direct correspondence to the
* E5 specification, as global/eval code is not exposed as a
* function.
*/
DUK_ASSERT(!DUK_HOBJECT_HAS_NAMEBINDING(&fun_temp->obj));
DUK_HCOMPFUNC_SET_LEXENV(thr->heap, fun_clos, outer_lex_env);
DUK_HCOMPFUNC_SET_VARENV(thr->heap, fun_clos, outer_var_env);
DUK_HOBJECT_INCREF(thr, outer_lex_env); /* NULLs not allowed; asserted on entry */
DUK_HOBJECT_INCREF(thr, outer_var_env);
}
DUK_DDD(DUK_DDDPRINT("closure varenv -> %!ipO, lexenv -> %!ipO",
(duk_heaphdr *) fun_clos->var_env,
(duk_heaphdr *) fun_clos->lex_env));
/* Call handling assumes this for all callable closures. */
DUK_ASSERT(DUK_HCOMPFUNC_GET_LEXENV(thr->heap, fun_clos) != NULL);
DUK_ASSERT(DUK_HCOMPFUNC_GET_VARENV(thr->heap, fun_clos) != NULL);
/*
* Copy some internal properties directly
*
* The properties will be non-writable and non-enumerable, but
* configurable.
*
* Function templates are bare objects, so inheritance of internal
* Symbols is not an issue here even when using ordinary property
* reads. The function instance created is not bare, so internal
* Symbols must be defined without inheritance checks.
*/
/* [ ... closure template ] */
DUK_DDD(DUK_DDDPRINT("copying properties: closure=%!iT, template=%!iT",
(duk_tval *) duk_get_tval(thr, -2),
(duk_tval *) duk_get_tval(thr, -1)));
for (i = 0; i < (duk_small_uint_t) (sizeof(duk__closure_copy_proplist) / sizeof(duk_uint16_t)); i++) {
duk_small_int_t stridx = (duk_small_int_t) duk__closure_copy_proplist[i];
if (duk_xget_owndataprop_stridx_short(thr, -1, stridx)) {
/* [ ... closure template val ] */
DUK_DDD(DUK_DDDPRINT("copying property, stridx=%ld -> found", (long) stridx));
duk_xdef_prop_stridx_short(thr, -3, stridx, DUK_PROPDESC_FLAGS_C);
} else {
DUK_DDD(DUK_DDDPRINT("copying property, stridx=%ld -> not found", (long) stridx));
duk_pop_unsafe(thr);
}
}
/*
* "length" maps to number of formals (E5 Section 13.2) for function
* declarations/expressions (non-bound functions). Note that 'nargs'
* is NOT necessarily equal to the number of arguments. Use length
* of _Formals; if missing, assume nargs matches .length.
*/
/* [ ... closure template ] */
formals = duk_hobject_get_formals(thr, (duk_hobject *) fun_temp);
if (formals) {
len_value = (duk_uint_t) formals->length;
DUK_DD(DUK_DDPRINT("closure length from _Formals -> %ld", (long) len_value));
} else {
len_value = fun_temp->nargs;
DUK_DD(DUK_DDPRINT("closure length defaulted from nargs -> %ld", (long) len_value));
}
duk_push_uint(thr, len_value); /* [ ... closure template len_value ] */
duk_xdef_prop_stridx_short(thr, -3, DUK_STRIDX_LENGTH, DUK_PROPDESC_FLAGS_C);
/*
* "prototype" is, by default, a fresh object with the "constructor"
* property.
*
* Note that this creates a circular reference for every function
* instance (closure) which prevents refcount-based collection of
* function instances.
*
* XXX: Try to avoid creating the default prototype object, because
* many functions are not used as constructors and the default
* prototype is unnecessary. Perhaps it could be created on-demand
* when it is first accessed?
*/
/* [ ... closure template ] */
if (add_auto_proto) {
duk_push_object(thr); /* -> [ ... closure template newobj ] */
duk_dup_m3(thr); /* -> [ ... closure template newobj closure ] */
duk_xdef_prop_stridx_short(thr, -2, DUK_STRIDX_CONSTRUCTOR, DUK_PROPDESC_FLAGS_WC); /* -> [ ... closure template newobj ] */
duk_compact(thr, -1); /* compact the prototype */
duk_xdef_prop_stridx_short(thr, -3, DUK_STRIDX_PROTOTYPE, DUK_PROPDESC_FLAGS_W); /* -> [ ... closure template ] */
}
/*
* "arguments" and "caller" must be mapped to throwers for strict
* mode and bound functions (E5 Section 15.3.5).
*
* XXX: This is expensive to have for every strict function instance.
* Try to implement as virtual properties or on-demand created properties.
*/
/* [ ... closure template ] */
if (DUK_HOBJECT_HAS_STRICT(&fun_clos->obj)) {
duk_xdef_prop_stridx_thrower(thr, -2, DUK_STRIDX_CALLER);
duk_xdef_prop_stridx_thrower(thr, -2, DUK_STRIDX_LC_ARGUMENTS);
} else {
#if defined(DUK_USE_NONSTD_FUNC_CALLER_PROPERTY)
DUK_DDD(DUK_DDDPRINT("function is non-strict and non-standard 'caller' property in use, add initial 'null' value"));
duk_push_null(thr);
duk_xdef_prop_stridx_short(thr, -3, DUK_STRIDX_CALLER, DUK_PROPDESC_FLAGS_NONE);
#else
DUK_DDD(DUK_DDDPRINT("function is non-strict and non-standard 'caller' property not used"));
#endif
}
/*
* "name" used to be non-standard but is now defined by ES2015.
* In ES2015/ES2016 the .name property is configurable.
*/
/* [ ... closure template ] */
#if defined(DUK_USE_FUNC_NAME_PROPERTY)
/* XXX: Look for own property only; doesn't matter much because
* templates are bare objects.
*/
if (duk_get_prop_stridx_short(thr, -1, DUK_STRIDX_NAME)) {
/* [ ... closure template name ] */
DUK_ASSERT(duk_is_string(thr, -1));
DUK_DD(DUK_DDPRINT("setting function instance name to %!T", duk_get_tval(thr, -1)));
duk_xdef_prop_stridx_short(thr, -3, DUK_STRIDX_NAME, DUK_PROPDESC_FLAGS_C); /* -> [ ... closure template ] */
} else {
/* Anonymous functions don't have a .name in ES2015, so don't set
* it on the instance either. The instance will then inherit
* it from Function.prototype.name.
*/
DUK_DD(DUK_DDPRINT("not setting function instance .name"));
duk_pop_unsafe(thr);
}
#endif
/*
* Compact the closure, in most cases no properties will be added later.
* Also, without this the closures end up having unused property slots
* (e.g. in Duktape 0.9.0, 8 slots would be allocated and only 7 used).
* A better future solution would be to allocate the closure directly
* to correct size (and setup the properties directly without going
* through the API).
*/
duk_compact(thr, -2);
/*
* Some assertions (E5 Section 13.2).
*/
DUK_ASSERT(DUK_HOBJECT_GET_CLASS_NUMBER(&fun_clos->obj) == DUK_HOBJECT_CLASS_FUNCTION);
DUK_ASSERT(DUK_HOBJECT_GET_PROTOTYPE(thr->heap, &fun_clos->obj) == thr->builtins[DUK_BIDX_FUNCTION_PROTOTYPE]);
DUK_ASSERT(DUK_HOBJECT_HAS_EXTENSIBLE(&fun_clos->obj));
DUK_ASSERT(duk_has_prop_stridx(thr, -2, DUK_STRIDX_LENGTH) != 0);
DUK_ASSERT(add_auto_proto == 0 || duk_has_prop_stridx(thr, -2, DUK_STRIDX_PROTOTYPE) != 0);
/* May be missing .name */
DUK_ASSERT(!DUK_HOBJECT_HAS_STRICT(&fun_clos->obj) ||
duk_has_prop_stridx(thr, -2, DUK_STRIDX_CALLER) != 0);
DUK_ASSERT(!DUK_HOBJECT_HAS_STRICT(&fun_clos->obj) ||
duk_has_prop_stridx(thr, -2, DUK_STRIDX_LC_ARGUMENTS) != 0);
/*
* Finish
*/
/* [ ... closure template ] */
DUK_DDD(DUK_DDDPRINT("created function instance: template=%!iT -> closure=%!iT",
(duk_tval *) duk_get_tval(thr, -1),
(duk_tval *) duk_get_tval(thr, -2)));
duk_pop_unsafe(thr);
/* [ ... closure ] */
}
/*
* Delayed activation environment record initialization (for functions
* with NEWENV).
*
* The non-delayed initialization is handled by duk_handle_call().
*/
DUK_LOCAL void duk__preallocate_env_entries(duk_hthread *thr, duk_hobject *varmap, duk_hobject *env) {
duk_uint_fast32_t i;
for (i = 0; i < (duk_uint_fast32_t) DUK_HOBJECT_GET_ENEXT(varmap); i++) {
duk_hstring *key;
key = DUK_HOBJECT_E_GET_KEY(thr->heap, varmap, i);
DUK_ASSERT(key != NULL); /* assume keys are compact in _Varmap */
DUK_ASSERT(!DUK_HOBJECT_E_SLOT_IS_ACCESSOR(thr->heap, varmap, i)); /* assume plain values */
/* Predefine as 'undefined' to reserve a property slot.
* This makes the unwind process (where register values
* are copied to the env object) safe against throwing.
*
* XXX: This could be made much faster by creating the
* property table directly.
*/
duk_push_undefined(thr);
DUK_DDD(DUK_DDDPRINT("preallocate env entry for key %!O", key));
duk_hobject_define_property_internal(thr, env, key, DUK_PROPDESC_FLAGS_WE);
}
}
/* shared helper */
DUK_INTERNAL
duk_hobject *duk_create_activation_environment_record(duk_hthread *thr,
duk_hobject *func,
duk_size_t bottom_byteoff) {
duk_hdecenv *env;
duk_hobject *parent;
duk_hcompfunc *f;
DUK_ASSERT(thr != NULL);
DUK_ASSERT(func != NULL);
DUK_STATS_INC(thr->heap, stats_envrec_create);
f = (duk_hcompfunc *) func;
parent = DUK_HCOMPFUNC_GET_LEXENV(thr->heap, f);
if (!parent) {
parent = thr->builtins[DUK_BIDX_GLOBAL_ENV];
}
env = duk_hdecenv_alloc(thr,
DUK_HOBJECT_FLAG_EXTENSIBLE |
DUK_HOBJECT_CLASS_AS_FLAGS(DUK_HOBJECT_CLASS_DECENV));
DUK_ASSERT(env != NULL);
duk_push_hobject(thr, (duk_hobject *) env);
DUK_ASSERT(DUK_HOBJECT_GET_PROTOTYPE(thr->heap, (duk_hobject *) env) == NULL);
DUK_HOBJECT_SET_PROTOTYPE(thr->heap, (duk_hobject *) env, parent);
DUK_HOBJECT_INCREF_ALLOWNULL(thr, parent); /* parent env is the prototype */
/* open scope information, for compiled functions only */
DUK_ASSERT(env->thread == NULL);
DUK_ASSERT(env->varmap == NULL);
DUK_ASSERT(env->regbase_byteoff == 0);
if (DUK_HOBJECT_IS_COMPFUNC(func)) {
duk_hobject *varmap;
varmap = duk_hobject_get_varmap(thr, func);
if (varmap != NULL) {
env->varmap = varmap;
DUK_HOBJECT_INCREF(thr, varmap);
env->thread = thr;
DUK_HTHREAD_INCREF(thr, thr);
env->regbase_byteoff = bottom_byteoff;
/* Preallocate env property table to avoid potential
* for out-of-memory on unwind when the env is closed.
*/
duk__preallocate_env_entries(thr, varmap, (duk_hobject *) env);
} else {
/* If function has no _Varmap, leave the environment closed. */
DUK_ASSERT(env->thread == NULL);
DUK_ASSERT(env->varmap == NULL);
DUK_ASSERT(env->regbase_byteoff == 0);
}
}
return (duk_hobject *) env;
}
DUK_INTERNAL
void duk_js_init_activation_environment_records_delayed(duk_hthread *thr,
duk_activation *act) {
duk_hobject *func;
duk_hobject *env;
DUK_ASSERT(thr != NULL);
func = DUK_ACT_GET_FUNC(act);
DUK_ASSERT(func != NULL);
DUK_ASSERT(!DUK_HOBJECT_HAS_BOUNDFUNC(func)); /* bound functions are never in act 'func' */
/*
* Delayed initialization only occurs for 'NEWENV' functions.
*/
DUK_ASSERT(DUK_HOBJECT_HAS_NEWENV(func));
DUK_ASSERT(act->lex_env == NULL);
DUK_ASSERT(act->var_env == NULL);
DUK_STATS_INC(thr->heap, stats_envrec_delayedcreate);
env = duk_create_activation_environment_record(thr, func, act->bottom_byteoff);
DUK_ASSERT(env != NULL);
/* 'act' is a stable pointer, so still OK. */
DUK_DDD(DUK_DDDPRINT("created delayed fresh env: %!ipO", (duk_heaphdr *) env));
#if defined(DUK_USE_DEBUG_LEVEL) && (DUK_USE_DEBUG_LEVEL >= 2)
{
duk_hobject *p = env;
while (p) {
DUK_DDD(DUK_DDDPRINT(" -> %!ipO", (duk_heaphdr *) p));
p = DUK_HOBJECT_GET_PROTOTYPE(thr->heap, p);
}
}
#endif
act->lex_env = env;
act->var_env = env;
DUK_HOBJECT_INCREF(thr, env); /* XXX: incref by count (here 2 times) */
DUK_HOBJECT_INCREF(thr, env);
duk_pop_unsafe(thr);
}
/*
* Closing environment records.
*
* The environment record MUST be closed with the thread where its activation
* is; i.e. if 'env' is open, 'thr' must match env->thread, and the regbase
* and varmap must still be valid. On entry, 'env' must be reachable.
*/
DUK_INTERNAL void duk_js_close_environment_record(duk_hthread *thr, duk_hobject *env) {
duk_uint_fast32_t i;
duk_hobject *varmap;
duk_hstring *key;
duk_tval *tv;
duk_uint_t regnum;
DUK_ASSERT(thr != NULL);
DUK_ASSERT(env != NULL);
if (DUK_UNLIKELY(!DUK_HOBJECT_IS_DECENV(env))) {
DUK_DDD(DUK_DDDPRINT("env not a declarative record: %!iO", (duk_heaphdr *) env));
return;
}
varmap = ((duk_hdecenv *) env)->varmap;
if (varmap == NULL) {
DUK_DDD(DUK_DDDPRINT("env already closed: %!iO", (duk_heaphdr *) env));
return;
}
DUK_ASSERT(((duk_hdecenv *) env)->thread != NULL);
DUK_HDECENV_ASSERT_VALID((duk_hdecenv *) env);
DUK_DDD(DUK_DDDPRINT("closing env: %!iO", (duk_heaphdr *) env));
DUK_DDD(DUK_DDDPRINT("varmap: %!O", (duk_heaphdr *) varmap));
/* Env must be closed in the same thread as where it runs. */
DUK_ASSERT(((duk_hdecenv *) env)->thread == thr);
/* XXX: additional conditions when to close variables? we don't want to do it
* unless the environment may have "escaped" (referenced in a function closure).
* With delayed environments, the existence is probably good enough of a check.
*/
/* Note: we rely on the _Varmap having a bunch of nice properties, like:
* - being compacted and unmodified during this process
* - not containing an array part
* - having correct value types
*/
DUK_DDD(DUK_DDDPRINT("copying bound register values, %ld bound regs", (long) DUK_HOBJECT_GET_ENEXT(varmap)));
/* Copy over current variable values from value stack to the
* environment record. The scope object is empty but may
* inherit from another scope which has conflicting names.
*/
/* XXX: Do this using a once allocated entry area, no side effects.
* Hash part would need special treatment however (maybe copy, and
* then realloc with hash part if large enough).
*/
for (i = 0; i < (duk_uint_fast32_t) DUK_HOBJECT_GET_ENEXT(varmap); i++) {
duk_size_t regbase_byteoff;
key = DUK_HOBJECT_E_GET_KEY(thr->heap, varmap, i);
DUK_ASSERT(key != NULL); /* assume keys are compact in _Varmap */
DUK_ASSERT(!DUK_HOBJECT_E_SLOT_IS_ACCESSOR(thr->heap, varmap, i)); /* assume plain values */
tv = DUK_HOBJECT_E_GET_VALUE_TVAL_PTR(thr->heap, varmap, i);
DUK_ASSERT(DUK_TVAL_IS_NUMBER(tv));
DUK_ASSERT(DUK_TVAL_GET_NUMBER(tv) <= (duk_double_t) DUK_UINT32_MAX); /* limits */
#if defined(DUK_USE_FASTINT)
DUK_ASSERT(DUK_TVAL_IS_FASTINT(tv));
regnum = (duk_uint_t) DUK_TVAL_GET_FASTINT_U32(tv);
#else
regnum = (duk_uint_t) DUK_TVAL_GET_NUMBER(tv);
#endif
regbase_byteoff = ((duk_hdecenv *) env)->regbase_byteoff;
DUK_ASSERT((duk_uint8_t *) thr->valstack + regbase_byteoff + sizeof(duk_tval) * regnum >= (duk_uint8_t *) thr->valstack);
DUK_ASSERT((duk_uint8_t *) thr->valstack + regbase_byteoff + sizeof(duk_tval) * regnum < (duk_uint8_t *) thr->valstack_top);
/* Write register value into env as named properties.
* If property already exists, overwrites silently.
* Property is writable, but not deletable (not configurable
* in terms of property attributes).
*
* This property write must not throw because we're unwinding
* and unwind code is not allowed to throw at present. The
* call itself has no such guarantees, but we've preallocated
* entries for each property when the env was created, so no
* out-of-memory error should be possible. If this guarantee
* is not provided, problems like GH-476 may happen.
*/
duk_push_tval(thr, (duk_tval *) (void *) ((duk_uint8_t *) thr->valstack + regbase_byteoff + sizeof(duk_tval) * regnum));
DUK_DDD(DUK_DDDPRINT("closing identifier %!O -> reg %ld, value %!T",
(duk_heaphdr *) key,
(long) regnum,
(duk_tval *) duk_get_tval(thr, -1)));
duk_hobject_define_property_internal(thr, env, key, DUK_PROPDESC_FLAGS_WE);
}
/* NULL atomically to avoid inconsistent state + side effects. */
DUK_HOBJECT_DECREF_NORZ(thr, ((duk_hdecenv *) env)->thread);
DUK_HOBJECT_DECREF_NORZ(thr, ((duk_hdecenv *) env)->varmap);
((duk_hdecenv *) env)->thread = NULL;
((duk_hdecenv *) env)->varmap = NULL;
DUK_DDD(DUK_DDDPRINT("env after closing: %!O", (duk_heaphdr *) env));
}
/*
* GETIDREF: a GetIdentifierReference-like helper.
*
* Provides a parent traversing lookup and a single level lookup
* (for HasBinding).
*
* Instead of returning the value, returns a bunch of values allowing
* the caller to read, write, or delete the binding. Value pointers
* are duk_tval pointers which can be mutated directly as long as
* refcounts are properly updated. Note that any operation which may
* reallocate valstacks or compact objects may invalidate the returned
* duk_tval (but not object) pointers, so caller must be very careful.
*
* If starting environment record 'env' is given, 'act' is ignored.
* However, if 'env' is NULL, the caller may identify, in 'act', an
* activation which hasn't had its declarative environment initialized
* yet. The activation registers are then looked up, and its parent
* traversed normally.
*
* The 'out' structure values are only valid if the function returns
* success (non-zero).
*/
/* lookup name from an open declarative record's registers */
DUK_LOCAL
duk_bool_t duk__getid_open_decl_env_regs(duk_hthread *thr,
duk_hstring *name,
duk_hdecenv *env,
duk__id_lookup_result *out) {
duk_tval *tv;
duk_size_t reg_rel;
DUK_ASSERT(thr != NULL);
DUK_ASSERT(name != NULL);
DUK_ASSERT(env != NULL);
DUK_ASSERT(out != NULL);
DUK_ASSERT(DUK_HOBJECT_IS_DECENV((duk_hobject *) env));
DUK_HDECENV_ASSERT_VALID(env);
if (env->thread == NULL) {
/* already closed */
return 0;
}
DUK_ASSERT(env->varmap != NULL);
tv = duk_hobject_find_entry_tval_ptr(thr->heap, env->varmap, name);
if (DUK_UNLIKELY(tv == NULL)) {
return 0;
}
DUK_ASSERT(DUK_TVAL_IS_NUMBER(tv));
DUK_ASSERT(DUK_TVAL_GET_NUMBER(tv) <= (duk_double_t) DUK_UINT32_MAX); /* limits */
#if defined(DUK_USE_FASTINT)
DUK_ASSERT(DUK_TVAL_IS_FASTINT(tv));
reg_rel = (duk_size_t) DUK_TVAL_GET_FASTINT_U32(tv);
#else
reg_rel = (duk_size_t) DUK_TVAL_GET_NUMBER(tv);
#endif
DUK_ASSERT_DISABLE(reg_rel >= 0); /* unsigned */
tv = (duk_tval *) (void *) ((duk_uint8_t *) env->thread->valstack + env->regbase_byteoff + sizeof(duk_tval) * reg_rel);
DUK_ASSERT(tv >= env->thread->valstack && tv < env->thread->valstack_end); /* XXX: more accurate? */
out->value = tv;
out->attrs = DUK_PROPDESC_FLAGS_W; /* registers are mutable, non-deletable */
out->env = (duk_hobject *) env;
out->holder = NULL;
out->has_this = 0;
return 1;
}
/* lookup name from current activation record's functions' registers */
DUK_LOCAL
duk_bool_t duk__getid_activation_regs(duk_hthread *thr,
duk_hstring *name,
duk_activation *act,
duk__id_lookup_result *out) {
duk_tval *tv;
duk_hobject *func;
duk_hobject *varmap;
duk_size_t reg_rel;
DUK_ASSERT(thr != NULL);
DUK_ASSERT(name != NULL);
DUK_ASSERT(act != NULL);
DUK_ASSERT(out != NULL);
func = DUK_ACT_GET_FUNC(act);
DUK_ASSERT(func != NULL);
DUK_ASSERT(DUK_HOBJECT_HAS_NEWENV(func));
if (!DUK_HOBJECT_IS_COMPFUNC(func)) {
return 0;
}
/* XXX: move varmap to duk_hcompfunc struct field? */
varmap = duk_hobject_get_varmap(thr, func);
if (!varmap) {
return 0;
}
tv = duk_hobject_find_entry_tval_ptr(thr->heap, varmap, name);
if (!tv) {
return 0;
}
DUK_ASSERT(DUK_TVAL_IS_NUMBER(tv));
reg_rel = (duk_size_t) DUK_TVAL_GET_NUMBER(tv);
DUK_ASSERT_DISABLE(reg_rel >= 0);
DUK_ASSERT(reg_rel < ((duk_hcompfunc *) func)->nregs);
tv = (duk_tval *) (void *) ((duk_uint8_t *) thr->valstack + act->bottom_byteoff);
tv += reg_rel;
out->value = tv;
out->attrs = DUK_PROPDESC_FLAGS_W; /* registers are mutable, non-deletable */
out->env = NULL;
out->holder = NULL;
out->has_this = 0;
return 1;
}
DUK_LOCAL
duk_bool_t duk__get_identifier_reference(duk_hthread *thr,
duk_hobject *env,
duk_hstring *name,
duk_activation *act,
duk_bool_t parents,
duk__id_lookup_result *out) {
duk_tval *tv;
duk_uint_t sanity;
DUK_ASSERT(thr != NULL);
DUK_ASSERT(env != NULL || act != NULL);
DUK_ASSERT(name != NULL);
DUK_ASSERT(out != NULL);
DUK_ASSERT(!env || DUK_HOBJECT_IS_ENV(env));
DUK_ASSERT(!env || !DUK_HOBJECT_HAS_ARRAY_PART(env));
/*
* Conceptually, we look for the identifier binding by starting from
* 'env' and following to chain of environment records (represented
* by the prototype chain).
*
* If 'env' is NULL, the current activation does not yet have an
* allocated declarative environment record; this should be treated
* exactly as if the environment record existed but had no bindings
* other than register bindings.
*
* Note: we assume that with the DUK_HOBJECT_FLAG_NEWENV cleared
* the environment will always be initialized immediately; hence
* a NULL 'env' should only happen with the flag set. This is the
* case for: (1) function calls, and (2) strict, direct eval calls.
*/
if (env == NULL && act != NULL) {
duk_hobject *func;
duk_hcompfunc *f;
DUK_DDD(DUK_DDDPRINT("duk__get_identifier_reference: env is NULL, activation is non-NULL -> "
"delayed env case, look up activation regs first"));
/*
* Try registers
*/
if (duk__getid_activation_regs(thr, name, act, out)) {
DUK_DDD(DUK_DDDPRINT("duk__get_identifier_reference successful: "
"name=%!O -> value=%!T, attrs=%ld, has_this=%ld, env=%!O, holder=%!O "
"(found from register bindings when env=NULL)",
(duk_heaphdr *) name, (duk_tval *) out->value,
(long) out->attrs, (long) out->has_this,
(duk_heaphdr *) out->env, (duk_heaphdr *) out->holder));
return 1;
}
DUK_DDD(DUK_DDDPRINT("not found in current activation regs"));
/*
* Not found in registers, proceed to the parent record.
* Here we need to determine what the parent would be,
* if 'env' was not NULL (i.e. same logic as when initializing
* the record).
*
* Note that environment initialization is only deferred when
* DUK_HOBJECT_HAS_NEWENV is set, and this only happens for:
* - Function code
* - Strict eval code
*
* We only need to check _Lexenv here; _Varenv exists only if it
* differs from _Lexenv (and thus _Lexenv will also be present).
*/
if (!parents) {
DUK_DDD(DUK_DDDPRINT("duk__get_identifier_reference failed, no parent traversal "
"(not found from register bindings when env=NULL)"));
goto fail_not_found;
}
func = DUK_ACT_GET_FUNC(act);
DUK_ASSERT(func != NULL);
DUK_ASSERT(DUK_HOBJECT_HAS_NEWENV(func));
f = (duk_hcompfunc *) func;
env = DUK_HCOMPFUNC_GET_LEXENV(thr->heap, f);
if (!env) {
env = thr->builtins[DUK_BIDX_GLOBAL_ENV];
}
DUK_DDD(DUK_DDDPRINT("continue lookup from env: %!iO",
(duk_heaphdr *) env));
}
/*
* Prototype walking starting from 'env'.
*
* ('act' is not needed anywhere here.)
*/
sanity = DUK_HOBJECT_PROTOTYPE_CHAIN_SANITY;
while (env != NULL) {
duk_small_uint_t cl;
duk_uint_t attrs;
DUK_DDD(DUK_DDDPRINT("duk__get_identifier_reference, name=%!O, considering env=%p -> %!iO",
(duk_heaphdr *) name,
(void *) env,
(duk_heaphdr *) env));
DUK_ASSERT(env != NULL);
DUK_ASSERT(DUK_HOBJECT_IS_ENV(env));
DUK_ASSERT(!DUK_HOBJECT_HAS_ARRAY_PART(env));
cl = DUK_HOBJECT_GET_CLASS_NUMBER(env);
DUK_ASSERT(cl == DUK_HOBJECT_CLASS_OBJENV || cl == DUK_HOBJECT_CLASS_DECENV);
if (cl == DUK_HOBJECT_CLASS_DECENV) {
/*
* Declarative environment record.
*
* Identifiers can never be stored in ancestors and are
* always plain values, so we can use an internal helper
* and access the value directly with an duk_tval ptr.
*
* A closed environment is only indicated by it missing
* the "book-keeping" properties required for accessing
* register-bound variables.
*/
DUK_HDECENV_ASSERT_VALID((duk_hdecenv *) env);
if (duk__getid_open_decl_env_regs(thr, name, (duk_hdecenv *) env, out)) {
DUK_DDD(DUK_DDDPRINT("duk__get_identifier_reference successful: "
"name=%!O -> value=%!T, attrs=%ld, has_this=%ld, env=%!O, holder=%!O "
"(declarative environment record, scope open, found in regs)",
(duk_heaphdr *) name, (duk_tval *) out->value,
(long) out->attrs, (long) out->has_this,
(duk_heaphdr *) out->env, (duk_heaphdr *) out->holder));
return 1;
}
tv = duk_hobject_find_entry_tval_ptr_and_attrs(thr->heap, env, name, &attrs);
if (tv) {
out->value = tv;
out->attrs = attrs;
out->env = env;
out->holder = env;
out->has_this = 0;
DUK_DDD(DUK_DDDPRINT("duk__get_identifier_reference successful: "
"name=%!O -> value=%!T, attrs=%ld, has_this=%ld, env=%!O, holder=%!O "
"(declarative environment record, found in properties)",
(duk_heaphdr *) name, (duk_tval *) out->value,
(long) out->attrs, (long) out->has_this,
(duk_heaphdr *) out->env, (duk_heaphdr *) out->holder));
return 1;
}
} else {
/*
* Object environment record.
*
* Binding (target) object is an external, uncontrolled object.
* Identifier may be bound in an ancestor property, and may be
* an accessor. Target can also be a Proxy which we must support
* here.
*/
/* XXX: we could save space by using _Target OR _This. If _Target, assume
* this binding is undefined. If _This, assumes this binding is _This, and
* target is also _This. One property would then be enough.
*/
duk_hobject *target;
duk_bool_t found;
DUK_ASSERT(cl == DUK_HOBJECT_CLASS_OBJENV);
DUK_HOBJENV_ASSERT_VALID((duk_hobjenv *) env);
target = ((duk_hobjenv *) env)->target;
DUK_ASSERT(target != NULL);
/* Target may be a Proxy or property may be an accessor, so we must
* use an actual, Proxy-aware hasprop check here.
*
* out->holder is NOT set to the actual duk_hobject where the
* property is found, but rather the object binding target object.
*/
#if defined(DUK_USE_ES6_PROXY)
if (DUK_UNLIKELY(DUK_HOBJECT_IS_PROXY(target))) {
duk_tval tv_name;
duk_tval tv_target_tmp;
DUK_ASSERT(name != NULL);
DUK_TVAL_SET_STRING(&tv_name, name);
DUK_TVAL_SET_OBJECT(&tv_target_tmp, target);
found = duk_hobject_hasprop(thr, &tv_target_tmp, &tv_name);
} else
#endif /* DUK_USE_ES6_PROXY */
{
/* XXX: duk_hobject_hasprop() would be correct for
* non-Proxy objects too, but it is about ~20-25%
* slower at present so separate code paths for
* Proxy and non-Proxy now.
*/
found = duk_hobject_hasprop_raw(thr, target, name);
}
if (found) {
out->value = NULL; /* can't get value, may be accessor */
out->attrs = 0; /* irrelevant when out->value == NULL */
out->env = env;
out->holder = target;
out->has_this = ((duk_hobjenv *) env)->has_this;
DUK_DDD(DUK_DDDPRINT("duk__get_identifier_reference successful: "
"name=%!O -> value=%!T, attrs=%ld, has_this=%ld, env=%!O, holder=%!O "
"(object environment record)",
(duk_heaphdr *) name, (duk_tval *) out->value,
(long) out->attrs, (long) out->has_this,
(duk_heaphdr *) out->env, (duk_heaphdr *) out->holder));
return 1;
}
}
if (!parents) {
DUK_DDD(DUK_DDDPRINT("duk__get_identifier_reference failed, no parent traversal "
"(not found from first traversed env)"));
goto fail_not_found;
}
if (DUK_UNLIKELY(sanity-- == 0)) {
DUK_ERROR_RANGE(thr, DUK_STR_PROTOTYPE_CHAIN_LIMIT);
DUK_WO_NORETURN(return 0;);
}
env = DUK_HOBJECT_GET_PROTOTYPE(thr->heap, env);
}
/*
* Not found (even in global object)
*/
fail_not_found:
return 0;
}
/*
* HASVAR: check identifier binding from a given environment record
* without traversing its parents.
*
* This primitive is not exposed to user code as such, but is used
* internally for e.g. declaration binding instantiation.
*
* See E5 Sections:
* 10.2.1.1.1 HasBinding(N)
* 10.2.1.2.1 HasBinding(N)
*
* Note: strictness has no bearing on this check. Hence we don't take
* a 'strict' parameter.
*/
#if 0 /*unused*/
DUK_INTERNAL
duk_bool_t duk_js_hasvar_envrec(duk_hthread *thr,
duk_hobject *env,
duk_hstring *name) {
duk__id_lookup_result ref;
duk_bool_t parents;
DUK_DDD(DUK_DDDPRINT("hasvar: thr=%p, env=%p, name=%!O "
"(env -> %!dO)",
(void *) thr, (void *) env, (duk_heaphdr *) name,
(duk_heaphdr *) env));
DUK_ASSERT(thr != NULL);
DUK_ASSERT(env != NULL);
DUK_ASSERT(name != NULL);
DUK_ASSERT_REFCOUNT_NONZERO_HEAPHDR(env);
DUK_ASSERT_REFCOUNT_NONZERO_HEAPHDR(name);
DUK_ASSERT(DUK_HOBJECT_IS_ENV(env));
DUK_ASSERT(!DUK_HOBJECT_HAS_ARRAY_PART(env));
/* lookup results is ignored */
parents = 0;
return duk__get_identifier_reference(thr, env, name, NULL, parents, &ref);
}
#endif
/*
* GETVAR
*
* See E5 Sections:
* 11.1.2 Identifier Reference
* 10.3.1 Identifier Resolution
* 11.13.1 Simple Assignment [example of where the Reference is GetValue'd]
* 8.7.1 GetValue (V)
* 8.12.1 [[GetOwnProperty]] (P)
* 8.12.2 [[GetProperty]] (P)
* 8.12.3 [[Get]] (P)
*
* If 'throw' is true, always leaves two values on top of stack: [val this].
*
* If 'throw' is false, returns 0 if identifier cannot be resolved, and the
* stack will be unaffected in this case. If identifier is resolved, returns
* 1 and leaves [val this] on top of stack.
*
* Note: the 'strict' flag of a reference returned by GetIdentifierReference
* is ignored by GetValue. Hence we don't take a 'strict' parameter.
*
* The 'throw' flag is needed for implementing 'typeof' for an unreferenced
* identifier. An unreference identifier in other contexts generates a
* ReferenceError.
*/
DUK_LOCAL
duk_bool_t duk__getvar_helper(duk_hthread *thr,
duk_hobject *env,
duk_activation *act,
duk_hstring *name,
duk_bool_t throw_flag) {
duk__id_lookup_result ref;
duk_tval tv_tmp_obj;
duk_tval tv_tmp_key;
duk_bool_t parents;
DUK_DDD(DUK_DDDPRINT("getvar: thr=%p, env=%p, act=%p, name=%!O "
"(env -> %!dO)",
(void *) thr, (void *) env, (void *) act,
(duk_heaphdr *) name, (duk_heaphdr *) env));
DUK_ASSERT(thr != NULL);
DUK_ASSERT(name != NULL);
/* env and act may be NULL */
DUK_STATS_INC(thr->heap, stats_getvar_all);
DUK_ASSERT_REFCOUNT_NONZERO_HEAPHDR(env);
DUK_ASSERT_REFCOUNT_NONZERO_HEAPHDR(name);
parents = 1; /* follow parent chain */
if (duk__get_identifier_reference(thr, env, name, act, parents, &ref)) {
if (ref.value) {
duk_push_tval(thr, ref.value);
duk_push_undefined(thr);
} else {
DUK_ASSERT(ref.holder != NULL);
/* ref.holder is safe across the getprop call (even
* with side effects) because 'env' is reachable and
* ref.holder is a direct heap pointer.
*/
DUK_TVAL_SET_OBJECT(&tv_tmp_obj, ref.holder);
DUK_TVAL_SET_STRING(&tv_tmp_key, name);
(void) duk_hobject_getprop(thr, &tv_tmp_obj, &tv_tmp_key); /* [value] */
if (ref.has_this) {
duk_push_hobject(thr, ref.holder);
} else {
duk_push_undefined(thr);
}
/* [value this] */
}
return 1;
} else {
if (throw_flag) {
DUK_ERROR_FMT1(thr, DUK_ERR_REFERENCE_ERROR,
"identifier '%s' undefined",
(const char *) DUK_HSTRING_GET_DATA(name));
DUK_WO_NORETURN(return 0;);
}
return 0;
}
}
DUK_INTERNAL
duk_bool_t duk_js_getvar_envrec(duk_hthread *thr,
duk_hobject *env,
duk_hstring *name,
duk_bool_t throw_flag) {
return duk__getvar_helper(thr, env, NULL, name, throw_flag);
}
DUK_INTERNAL
duk_bool_t duk_js_getvar_activation(duk_hthread *thr,
duk_activation *act,
duk_hstring *name,
duk_bool_t throw_flag) {
DUK_ASSERT(act != NULL);
return duk__getvar_helper(thr, act->lex_env, act, name, throw_flag);
}
/*
* PUTVAR
*
* See E5 Sections:
* 11.1.2 Identifier Reference
* 10.3.1 Identifier Resolution
* 11.13.1 Simple Assignment [example of where the Reference is PutValue'd]
* 8.7.2 PutValue (V,W) [see especially step 3.b, undefined -> automatic global in non-strict mode]
* 8.12.4 [[CanPut]] (P)
* 8.12.5 [[Put]] (P)
*
* Note: may invalidate any valstack (or object) duk_tval pointers because
* putting a value may reallocate any object or any valstack. Caller beware.
*/
DUK_LOCAL
void duk__putvar_helper(duk_hthread *thr,
duk_hobject *env,
duk_activation *act,
duk_hstring *name,
duk_tval *val,
duk_bool_t strict) {
duk__id_lookup_result ref;
duk_tval tv_tmp_obj;
duk_tval tv_tmp_key;
duk_bool_t parents;
DUK_STATS_INC(thr->heap, stats_putvar_all);
DUK_DDD(DUK_DDDPRINT("putvar: thr=%p, env=%p, act=%p, name=%!O, val=%p, strict=%ld "
"(env -> %!dO, val -> %!T)",
(void *) thr, (void *) env, (void *) act,
(duk_heaphdr *) name, (void *) val, (long) strict,
(duk_heaphdr *) env, (duk_tval *) val));
DUK_ASSERT(thr != NULL);
DUK_ASSERT(name != NULL);
DUK_ASSERT(val != NULL);
/* env and act may be NULL */
DUK_ASSERT_REFCOUNT_NONZERO_HEAPHDR(env);
DUK_ASSERT_REFCOUNT_NONZERO_HEAPHDR(name);
DUK_ASSERT_REFCOUNT_NONZERO_TVAL(val);
/*
* In strict mode E5 protects 'eval' and 'arguments' from being
* assigned to (or even declared anywhere). Attempt to do so
* should result in a compile time SyntaxError. See the internal
* design documentation for details.
*
* Thus, we should never come here, run-time, for strict code,
* and name 'eval' or 'arguments'.
*/
DUK_ASSERT(!strict ||
(name != DUK_HTHREAD_STRING_EVAL(thr) &&
name != DUK_HTHREAD_STRING_LC_ARGUMENTS(thr)));
/*
* Lookup variable and update in-place if found.
*/
parents = 1; /* follow parent chain */
if (duk__get_identifier_reference(thr, env, name, act, parents, &ref)) {
if (ref.value && (ref.attrs & DUK_PROPDESC_FLAG_WRITABLE)) {
/* Update duk_tval in-place if pointer provided and the
* property is writable. If the property is not writable
* (immutable binding), use duk_hobject_putprop() which
* will respect mutability.
*/
duk_tval *tv_val;
tv_val = ref.value;
DUK_ASSERT(tv_val != NULL);
DUK_TVAL_SET_TVAL_UPDREF(thr, tv_val, val); /* side effects */
/* ref.value invalidated here */
} else {
DUK_ASSERT(ref.holder != NULL);
DUK_TVAL_SET_OBJECT(&tv_tmp_obj, ref.holder);
DUK_TVAL_SET_STRING(&tv_tmp_key, name);
(void) duk_hobject_putprop(thr, &tv_tmp_obj, &tv_tmp_key, val, strict);
/* ref.value invalidated here */
}
return;
}
/*
* Not found: write to global object (non-strict) or ReferenceError
* (strict); see E5 Section 8.7.2, step 3.
*/
if (strict) {
DUK_DDD(DUK_DDDPRINT("identifier binding not found, strict => reference error"));
DUK_ERROR_FMT1(thr, DUK_ERR_REFERENCE_ERROR,
"identifier '%s' undefined",
(const char *) DUK_HSTRING_GET_DATA(name));
DUK_WO_NORETURN(return;);
}
DUK_DDD(DUK_DDDPRINT("identifier binding not found, not strict => set to global"));
DUK_TVAL_SET_OBJECT(&tv_tmp_obj, thr->builtins[DUK_BIDX_GLOBAL]);
DUK_TVAL_SET_STRING(&tv_tmp_key, name);
(void) duk_hobject_putprop(thr, &tv_tmp_obj, &tv_tmp_key, val, 0); /* 0 = no throw */
/* NB: 'val' may be invalidated here because put_value may realloc valstack,
* caller beware.
*/
}
DUK_INTERNAL
void duk_js_putvar_envrec(duk_hthread *thr,
duk_hobject *env,
duk_hstring *name,
duk_tval *val,
duk_bool_t strict) {
duk__putvar_helper(thr, env, NULL, name, val, strict);
}
DUK_INTERNAL
void duk_js_putvar_activation(duk_hthread *thr,
duk_activation *act,
duk_hstring *name,
duk_tval *val,
duk_bool_t strict) {
DUK_ASSERT(act != NULL);
duk__putvar_helper(thr, act->lex_env, act, name, val, strict);
}
/*
* DELVAR
*
* See E5 Sections:
* 11.4.1 The delete operator
* 10.2.1.1.5 DeleteBinding (N) [declarative environment record]
* 10.2.1.2.5 DeleteBinding (N) [object environment record]
*
* Variable bindings established inside eval() are deletable (configurable),
* other bindings are not, including variables declared in global level.
* Registers are always non-deletable, and the deletion of other bindings
* is controlled by the configurable flag.
*
* For strict mode code, the 'delete' operator should fail with a compile
* time SyntaxError if applied to identifiers. Hence, no strict mode
* run-time deletion of identifiers should ever happen. This function
* should never be called from strict mode code!
*/
DUK_LOCAL
duk_bool_t duk__delvar_helper(duk_hthread *thr,
duk_hobject *env,
duk_activation *act,
duk_hstring *name) {
duk__id_lookup_result ref;
duk_bool_t parents;
DUK_DDD(DUK_DDDPRINT("delvar: thr=%p, env=%p, act=%p, name=%!O "
"(env -> %!dO)",
(void *) thr, (void *) env, (void *) act,
(duk_heaphdr *) name, (duk_heaphdr *) env));
DUK_ASSERT(thr != NULL);
DUK_ASSERT(name != NULL);
/* env and act may be NULL */
DUK_ASSERT_REFCOUNT_NONZERO_HEAPHDR(name);
parents = 1; /* follow parent chain */
if (duk__get_identifier_reference(thr, env, name, act, parents, &ref)) {
if (ref.value && !(ref.attrs & DUK_PROPDESC_FLAG_CONFIGURABLE)) {
/* Identifier found in registers (always non-deletable)
* or declarative environment record and non-configurable.
*/
return 0;
}
DUK_ASSERT(ref.holder != NULL);
return duk_hobject_delprop_raw(thr, ref.holder, name, 0);
}
/*
* Not found (even in global object).
*
* In non-strict mode this is a silent SUCCESS (!), see E5 Section 11.4.1,
* step 3.b. In strict mode this case is a compile time SyntaxError so
* we should not come here.
*/
DUK_DDD(DUK_DDDPRINT("identifier to be deleted not found: name=%!O "
"(treated as silent success)",
(duk_heaphdr *) name));
return 1;
}
#if 0 /*unused*/
DUK_INTERNAL
duk_bool_t duk_js_delvar_envrec(duk_hthread *thr,
duk_hobject *env,
duk_hstring *name) {
return duk__delvar_helper(thr, env, NULL, name);
}
#endif
DUK_INTERNAL
duk_bool_t duk_js_delvar_activation(duk_hthread *thr,
duk_activation *act,
duk_hstring *name) {
DUK_ASSERT(act != NULL);
return duk__delvar_helper(thr, act->lex_env, act, name);
}
/*
* DECLVAR
*
* See E5 Sections:
* 10.4.3 Entering Function Code
* 10.5 Declaration Binding Instantion
* 12.2 Variable Statement
* 11.1.2 Identifier Reference
* 10.3.1 Identifier Resolution
*
* Variable declaration behavior is mainly discussed in Section 10.5,
* and is not discussed in the execution semantics (Sections 11-13).
*
* Conceptually declarations happen when code (global, eval, function)
* is entered, before any user code is executed. In practice, register-
* bound identifiers are 'declared' automatically (by virtue of being
* allocated to registers with the initial value 'undefined'). Other
* identifiers are declared in the function prologue with this primitive.
*
* Since non-register bindings eventually back to an internal object's
* properties, the 'prop_flags' argument is used to specify binding
* type:
*
* - Immutable binding: set DUK_PROPDESC_FLAG_WRITABLE to false
* - Non-deletable binding: set DUK_PROPDESC_FLAG_CONFIGURABLE to false
* - The flag DUK_PROPDESC_FLAG_ENUMERABLE should be set, although it
* doesn't really matter for internal objects
*
* All bindings are non-deletable mutable bindings except:
*
* - Declarations in eval code (mutable, deletable)
* - 'arguments' binding in strict function code (immutable)
* - Function name binding of a function expression (immutable)
*
* Declarations may go to declarative environment records (always
* so for functions), but may also go to object environment records
* (e.g. global code). The global object environment has special
* behavior when re-declaring a function (but not a variable); see
* E5.1 specification, Section 10.5, step 5.e.
*
* Declarations always go to the 'top-most' environment record, i.e.
* we never check the record chain. It's not an error even if a
* property (even an immutable or non-deletable one) of the same name
* already exists.
*
* If a declared variable already exists, its value needs to be updated
* (if possible). Returns 1 if a PUTVAR needs to be done by the caller;
* otherwise returns 0.
*/
DUK_LOCAL
duk_bool_t duk__declvar_helper(duk_hthread *thr,
duk_hobject *env,
duk_hstring *name,
duk_tval *val,
duk_small_uint_t prop_flags,
duk_bool_t is_func_decl) {
duk_hobject *holder;
duk_bool_t parents;
duk__id_lookup_result ref;
duk_tval *tv;
DUK_DDD(DUK_DDDPRINT("declvar: thr=%p, env=%p, name=%!O, val=%!T, prop_flags=0x%08lx, is_func_decl=%ld "
"(env -> %!iO)",
(void *) thr, (void *) env, (duk_heaphdr *) name,
(duk_tval *) val, (unsigned long) prop_flags,
(unsigned int) is_func_decl, (duk_heaphdr *) env));
DUK_ASSERT(thr != NULL);
DUK_ASSERT(env != NULL);
DUK_ASSERT(name != NULL);
DUK_ASSERT(val != NULL);
/* Note: in strict mode the compiler should reject explicit
* declaration of 'eval' or 'arguments'. However, internal
* bytecode may declare 'arguments' in the function prologue.
* We don't bother checking (or asserting) for these now.
*/
/* Note: val is a stable duk_tval pointer. The caller makes
* a value copy into its stack frame, so 'tv_val' is not subject
* to side effects here.
*/
/*
* Check whether already declared.
*
* We need to check whether the binding exists in the environment
* without walking its parents. However, we still need to check
* register-bound identifiers and the prototype chain of an object
* environment target object.
*/
parents = 0; /* just check 'env' */
if (duk__get_identifier_reference(thr, env, name, NULL, parents, &ref)) {
duk_int_t e_idx;
duk_int_t h_idx;
duk_small_uint_t flags;
/*
* Variable already declared, ignore re-declaration.
* The only exception is the updated behavior of E5.1 for
* global function declarations, E5.1 Section 10.5, step 5.e.
* This behavior does not apply to global variable declarations.
*/
if (!(is_func_decl && env == thr->builtins[DUK_BIDX_GLOBAL_ENV])) {
DUK_DDD(DUK_DDDPRINT("re-declare a binding, ignoring"));
return 1; /* 1 -> needs a PUTVAR */
}
/*
* Special behavior in E5.1.
*
* Note that even though parents == 0, the conflicting property
* may be an inherited property (currently our global object's
* prototype is Object.prototype). Step 5.e first operates on
* the existing property (which is potentially in an ancestor)
* and then defines a new property in the global object (and
* never modifies the ancestor).
*
* Also note that this logic would become even more complicated
* if the conflicting property might be a virtual one. Object
* prototype has no virtual properties, though.
*
* XXX: this is now very awkward, rework.
*/
DUK_DDD(DUK_DDDPRINT("re-declare a function binding in global object, "
"updated E5.1 processing"));
DUK_ASSERT(ref.holder != NULL);
holder = ref.holder;
/* holder will be set to the target object, not the actual object
* where the property was found (see duk__get_identifier_reference()).
*/
DUK_ASSERT(DUK_HOBJECT_GET_CLASS_NUMBER(holder) == DUK_HOBJECT_CLASS_GLOBAL);
DUK_ASSERT(!DUK_HOBJECT_HAS_EXOTIC_ARRAY(holder)); /* global object doesn't have array part */
/* XXX: use a helper for prototype traversal; no loop check here */
/* must be found: was found earlier, and cannot be inherited */
for (;;) {
DUK_ASSERT(holder != NULL);
if (duk_hobject_find_entry(thr->heap, holder, name, &e_idx, &h_idx)) {
DUK_ASSERT(e_idx >= 0);
break;
}
/* SCANBUILD: NULL pointer dereference, doesn't actually trigger,
* asserted above.
*/
holder = DUK_HOBJECT_GET_PROTOTYPE(thr->heap, holder);
}
DUK_ASSERT(holder != NULL);
DUK_ASSERT(e_idx >= 0);
/* SCANBUILD: scan-build produces a NULL pointer dereference warning
* below; it never actually triggers because holder is actually never
* NULL.
*/
/* ref.holder is global object, holder is the object with the
* conflicting property.
*/
flags = DUK_HOBJECT_E_GET_FLAGS(thr->heap, holder, e_idx);
if (!(flags & DUK_PROPDESC_FLAG_CONFIGURABLE)) {
if (flags & DUK_PROPDESC_FLAG_ACCESSOR) {
DUK_DDD(DUK_DDDPRINT("existing property is a non-configurable "
"accessor -> reject"));
goto fail_existing_attributes;
}
if (!((flags & DUK_PROPDESC_FLAG_WRITABLE) &&
(flags & DUK_PROPDESC_FLAG_ENUMERABLE))) {
DUK_DDD(DUK_DDDPRINT("existing property is a non-configurable "
"plain property which is not writable and "
"enumerable -> reject"));
goto fail_existing_attributes;
}
DUK_DDD(DUK_DDDPRINT("existing property is not configurable but "
"is plain, enumerable, and writable -> "
"allow redeclaration"));
}
if (holder == ref.holder) {
/* XXX: if duk_hobject_define_property_internal() was updated
* to handle a pre-existing accessor property, this would be
* a simple call (like for the ancestor case).
*/
DUK_DDD(DUK_DDDPRINT("redefine, offending property in global object itself"));
if (flags & DUK_PROPDESC_FLAG_ACCESSOR) {
duk_hobject *tmp;
tmp = DUK_HOBJECT_E_GET_VALUE_GETTER(thr->heap, holder, e_idx);
DUK_HOBJECT_E_SET_VALUE_GETTER(thr->heap, holder, e_idx, NULL);
DUK_HOBJECT_DECREF_ALLOWNULL(thr, tmp);
DUK_UNREF(tmp);
tmp = DUK_HOBJECT_E_GET_VALUE_SETTER(thr->heap, holder, e_idx);
DUK_HOBJECT_E_SET_VALUE_SETTER(thr->heap, holder, e_idx, NULL);
DUK_HOBJECT_DECREF_ALLOWNULL(thr, tmp);
DUK_UNREF(tmp);
} else {
tv = DUK_HOBJECT_E_GET_VALUE_TVAL_PTR(thr->heap, holder, e_idx);
DUK_TVAL_SET_UNDEFINED_UPDREF(thr, tv);
}
/* Here val would be potentially invalid if we didn't make
* a value copy at the caller.
*/
tv = DUK_HOBJECT_E_GET_VALUE_TVAL_PTR(thr->heap, holder, e_idx);
DUK_TVAL_SET_TVAL(tv, val);
DUK_TVAL_INCREF(thr, tv);
DUK_HOBJECT_E_SET_FLAGS(thr->heap, holder, e_idx, prop_flags);
DUK_DDD(DUK_DDDPRINT("updated global binding, final result: "
"value -> %!T, prop_flags=0x%08lx",
(duk_tval *) DUK_HOBJECT_E_GET_VALUE_TVAL_PTR(thr->heap, holder, e_idx),
(unsigned long) prop_flags));
} else {
DUK_DDD(DUK_DDDPRINT("redefine, offending property in ancestor"));
DUK_ASSERT(ref.holder == thr->builtins[DUK_BIDX_GLOBAL]);
duk_push_tval(thr, val);
duk_hobject_define_property_internal(thr, ref.holder, name, prop_flags);
}
return 0;
}
/*
* Not found (in registers or record objects). Declare
* to current variable environment.
*/
/*
* Get holder object
*/
if (DUK_HOBJECT_IS_DECENV(env)) {
DUK_HDECENV_ASSERT_VALID((duk_hdecenv *) env);
holder = env;
} else {
DUK_HOBJENV_ASSERT_VALID((duk_hobjenv *) env);
holder = ((duk_hobjenv *) env)->target;
DUK_ASSERT(holder != NULL);
}
/*
* Define new property
*
* Note: this may fail if the holder is not extensible.
*/
/* XXX: this is awkward as we use an internal method which doesn't handle
* extensibility etc correctly. Basically we'd want to do a [[DefineOwnProperty]]
* or Object.defineProperty() here.
*/
if (!DUK_HOBJECT_HAS_EXTENSIBLE(holder)) {
goto fail_not_extensible;
}
duk_push_hobject(thr, holder);
duk_push_hstring(thr, name);
duk_push_tval(thr, val);
duk_xdef_prop(thr, -3, prop_flags); /* [holder name val] -> [holder] */
duk_pop_unsafe(thr);
return 0;
fail_existing_attributes:
fail_not_extensible:
DUK_ERROR_TYPE(thr, "declaration failed");
DUK_WO_NORETURN(return 0;);
}
DUK_INTERNAL
duk_bool_t duk_js_declvar_activation(duk_hthread *thr,
duk_activation *act,
duk_hstring *name,
duk_tval *val,
duk_small_uint_t prop_flags,
duk_bool_t is_func_decl) {
duk_hobject *env;
duk_tval tv_val_copy;
DUK_ASSERT(act != NULL);
/*
* Make a value copy of the input val. This ensures that
* side effects cannot invalidate the pointer.
*/
DUK_TVAL_SET_TVAL(&tv_val_copy, val);
val = &tv_val_copy;
/*
* Delayed env creation check
*/
if (!act->var_env) {
DUK_ASSERT(act->lex_env == NULL);
duk_js_init_activation_environment_records_delayed(thr, act);
/* 'act' is a stable pointer, so still OK. */
}
DUK_ASSERT(act->lex_env != NULL);
DUK_ASSERT(act->var_env != NULL);
env = act->var_env;
DUK_ASSERT(env != NULL);
DUK_ASSERT(DUK_HOBJECT_IS_ENV(env));
return duk__declvar_helper(thr, env, name, val, prop_flags, is_func_decl);
}
| {
"pile_set_name": "Github"
} |
###########################################################################
#
# Copyright 2016 Samsung Electronics All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
###########################################################################
############################################################################
# external/ftpd/Makefile
#
# Copyright (C) 2012 Gregory Nutt. All rights reserved.
# Author: Gregory Nutt <[email protected]>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# 3. Neither the name NuttX nor the names of its contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
############################################################################
-include $(TOPDIR)/.config
-include $(TOPDIR)/Make.defs
ASRCS =
CSRCS = ftpd.c
AOBJS = $(ASRCS:.S=$(OBJEXT))
COBJS = $(CSRCS:.c=$(OBJEXT))
SRCS = $(ASRCS) $(CSRCS)
OBJS = $(AOBJS) $(COBJS)
ifeq ($(CONFIG_WINDOWS_NATIVE),y)
BIN = ..\libexternal$(LIBEXT)
else
ifeq ($(WINTOOL),y)
BIN = ..\\libexternal$(LIBEXT)
else
BIN = ../libexternal$(LIBEXT)
endif
endif
DEPPATH = --dep-path .
# Common build
VPATH =
all: .built
.PHONY: depend clean distclean
$(AOBJS): %$(OBJEXT): %.S
$(call ASSEMBLE, $<, $@)
$(COBJS): %$(OBJEXT): %.c
$(call COMPILE, $<, $@)
.built: $(OBJS)
$(call ARCHIVE, $(BIN), $(OBJS))
$(Q) touch .built
.depend: Makefile $(SRCS)
$(Q) $(MKDEP) $(DEPPATH) "$(CC)" -- $(CFLAGS) -- $(SRCS) >Make.dep
$(Q) touch $@
depend: .depend
clean:
$(call DELFILE, .built)
$(call CLEAN)
distclean: clean
$(call DELFILE, Make.dep)
$(call DELFILE, .depend)
-include Make.dep
| {
"pile_set_name": "Github"
} |
## Check llvm-readobj and llvm-readelf can dump files of the different ELF types.
# RUN: yaml2obj %s --docnum=1 -o %t1
# RUN: llvm-readobj -h %t1 | FileCheck %s --match-full-lines --check-prefix LLVM-NONE
# RUN: llvm-readelf -h %t1 | FileCheck %s --match-full-lines --check-prefix GNU-NONE
# LLVM-NONE: ElfHeader {
# LLVM-NONE: Type: None (0x0)
# GNU-NONE: ELF Header:
# GNU-NONE: Type: NONE (none)
--- !ELF
FileHeader:
Class: ELFCLASS64
Data: ELFDATA2LSB
Type: ET_NONE
Machine: EM_X86_64
# RUN: yaml2obj %s --docnum=2 -o %t2
# RUN: llvm-readobj -h %t2 | FileCheck %s --match-full-lines --check-prefix LLVM-REL
# RUN: llvm-readelf -h %t2 | FileCheck %s --match-full-lines --check-prefix GNU-REL
# LLVM-REL: ElfHeader {
# LLVM-REL: Type: Relocatable (0x1)
# GNU-REL: ELF Header:
# GNU-REL: Type: REL (Relocatable file)
--- !ELF
FileHeader:
Class: ELFCLASS64
Data: ELFDATA2LSB
Type: ET_REL
Machine: EM_X86_64
# RUN: yaml2obj %s --docnum=3 -o %t3
# RUN: llvm-readobj -h %t3 | FileCheck %s --match-full-lines --check-prefix LLVM-EXEC
# RUN: llvm-readelf -h %t3 | FileCheck %s --match-full-lines --check-prefix GNU-EXEC
# LLVM-EXEC: ElfHeader {
# LLVM-EXEC: Type: Executable (0x2)
# GNU-EXEC: ELF Header:
# GNU-EXEC: Type: EXEC (Executable file)
--- !ELF
FileHeader:
Class: ELFCLASS64
Data: ELFDATA2LSB
Type: ET_EXEC
Machine: EM_X86_64
# RUN: yaml2obj %s --docnum=4 -o %t4
# RUN: llvm-readobj -h %t4 | FileCheck %s --match-full-lines --check-prefix LLVM-DYN
# RUN: llvm-readelf -h %t4 | FileCheck %s --match-full-lines --check-prefix GNU-DYN
# LLVM-DYN: ElfHeader {
# LLVM-DYN: Type: SharedObject (0x3)
# GNU-DYN: ELF Header:
# GNU-DYN: Type: DYN (Shared object file)
--- !ELF
FileHeader:
Class: ELFCLASS64
Data: ELFDATA2LSB
Type: ET_DYN
Machine: EM_X86_64
# RUN: yaml2obj %s --docnum=5 -o %t5
# RUN: llvm-readobj -h %t5 | FileCheck %s --match-full-lines --check-prefix LLVM-CORE
# RUN: llvm-readelf -h %t5 | FileCheck %s --match-full-lines --check-prefix GNU-CORE
# LLVM-CORE: ElfHeader {
# LLVM-CORE: Type: Core (0x4)
# GNU-CORE: ELF Header:
# GNU-CORE: Type: CORE (Core file)
--- !ELF
FileHeader:
Class: ELFCLASS64
Data: ELFDATA2LSB
Type: ET_CORE
Machine: EM_X86_64
# RUN: yaml2obj %s --docnum=6 -o %t6
# RUN: llvm-readobj -h %t6 | FileCheck %s --match-full-lines --check-prefix LLVM-LOOS
# RUN: llvm-readelf -h %t6 | FileCheck %s --match-full-lines --check-prefix GNU-LOOS
# LLVM-LOOS: ElfHeader {
# LLVM-LOOS: Type: 0xFE00
# GNU-LOOS: ELF Header:
# GNU-LOOS: Type: fe00
--- !ELF
FileHeader:
Class: ELFCLASS64
Data: ELFDATA2LSB
Type: 0xfe00
Machine: EM_X86_64
# RUN: yaml2obj %s --docnum=7 -o %t7
# RUN: llvm-readobj -h %t7 | FileCheck %s --match-full-lines --check-prefix LLVM-HIOS
# RUN: llvm-readelf -h %t7 | FileCheck %s --match-full-lines --check-prefix GNU-HIOS
# LLVM-HIOS: ElfHeader {
# LLVM-HIOS: Type: 0xFEFF
# GNU-HIOS: ELF Header:
# GNU-HIOS: Type: feff
--- !ELF
FileHeader:
Class: ELFCLASS64
Data: ELFDATA2LSB
Type: 0xfeff
Machine: EM_X86_64
# RUN: yaml2obj %s --docnum=8 -o %t8
# RUN: llvm-readobj -h %t8 | FileCheck %s --match-full-lines --check-prefix LLVM-LOPROC
# RUN: llvm-readelf -h %t8 | FileCheck %s --match-full-lines --check-prefix GNU-LOPROC
# LLVM-LOPROC: ElfHeader {
# LLVM-LOPROC: Type: 0xFF00
# GNU-LOPROC: ELF Header:
# GNU-LOPROC: Type: ff00
--- !ELF
FileHeader:
Class: ELFCLASS64
Data: ELFDATA2LSB
Type: 0xff00
Machine: EM_X86_64
# RUN: yaml2obj %s --docnum=9 -o %t9
# RUN: llvm-readobj -h %t9 | FileCheck %s --match-full-lines --check-prefix LLVM-HIPROC
# RUN: llvm-readelf -h %t9 | FileCheck %s --match-full-lines --check-prefix GNU-HIPROC
# LLVM-HIPROC: ElfHeader {
# LLVM-HIPROC: Type: 0xFFFF
# GNU-HIPROC: ELF Header:
# GNU-HIPROC: Type: ffff
--- !ELF
FileHeader:
Class: ELFCLASS64
Data: ELFDATA2LSB
Type: 0xffff
Machine: EM_X86_64
| {
"pile_set_name": "Github"
} |
version https://git-lfs.github.com/spec/v1
oid sha256:aadbaefc877cba9f30e337e2dc89e9a54eb22594132373a7da7d4b0a94d974a3
size 18927
| {
"pile_set_name": "Github"
} |
using System.Collections.Generic;
using System.IO;
using System.Net;
using ZKWebStandard.Collections;
using ZKWebStandard.Web;
using System.Web;
namespace ZKWeb.Hosting.AspNet
{
/// <summary>
/// Http request wrapper for Asp.net<br/>
/// Asp.Net Http请求的包装类<br/>
/// </summary>
internal class AspNetHttpRequestWrapper : IHttpRequest
{
/// <summary>
/// Parent http context<br/>
/// 所属的Http上下文<br/>
/// </summary>
protected AspNetHttpContextWrapper ParentContext { get; set; }
/// <summary>
/// Original http request<br/>
/// 原始的Http请求<br/>
/// </summary>
protected HttpRequest OriginalRequest { get; set; }
public Stream Body
{
get { return OriginalRequest.InputStream; }
}
public long? ContentLength
{
get { return OriginalRequest.ContentLength; }
}
public string ContentType
{
get { return OriginalRequest.ContentType; }
}
public string Host
{
get { return OriginalRequest.Url.Authority; }
}
public IHttpContext HttpContext
{
get { return ParentContext; }
}
public bool IsHttps
{
get { return OriginalRequest.IsSecureConnection; }
}
public string Method
{
get { return OriginalRequest.HttpMethod; }
}
public string Protocol
{
get { return OriginalRequest.ServerVariables["SERVER_PROTOCOL"]; }
}
public string Path
{
get { return OriginalRequest.Path; }
}
public string QueryString
{
get { return OriginalRequest.Url.Query; }
}
public string Scheme
{
get { return OriginalRequest.Url.Scheme; }
}
public IPAddress RemoteIpAddress
{
get { return IPAddress.Parse(OriginalRequest.ServerVariables["REMOTE_ADDR"]); }
}
public int RemotePort
{
get { return int.Parse(OriginalRequest.ServerVariables["REMOTE_PORT"]); }
}
public IDictionary<string, object> CustomParameters { get; }
public string GetCookie(string key)
{
return OriginalRequest.Cookies[key]?.Value;
}
public IEnumerable<Pair<string, string>> GetCookies()
{
foreach (Cookie cookie in OriginalRequest.Cookies)
{
yield return Pair.Create(cookie.Name, cookie.Value);
}
}
public IList<string> GetQueryValue(string key)
{
return OriginalRequest.QueryString.GetValues(key);
}
public IEnumerable<Pair<string, IList<string>>> GetQueryValues()
{
var query = OriginalRequest.QueryString;
foreach (var key in query.AllKeys)
{
yield return Pair.Create<string, IList<string>>(key, query.GetValues(key));
}
}
public IList<string> GetFormValue(string key)
{
return OriginalRequest.Form.GetValues(key);
}
public IEnumerable<Pair<string, IList<string>>> GetFormValues()
{
var form = OriginalRequest.Form;
foreach (var key in form.AllKeys)
{
yield return Pair.Create<string, IList<string>>(key, form.GetValues(key));
}
}
public string GetHeader(string key)
{
// http://stackoverflow.com/questions/4371328/are-duplicate-http-response-headers-acceptable
IList<string> values = OriginalRequest.Headers.GetValues(key);
if (values == null)
{
return null;
}
return string.Join(",", values);
}
public IEnumerable<Pair<string, string>> GetHeaders()
{
var headers = OriginalRequest.Headers;
foreach (var key in headers.AllKeys)
{
yield return Pair.Create(key, string.Join(",", headers.GetValues(key)));
}
}
public IHttpPostedFile GetPostedFile(string key)
{
var file = OriginalRequest.Files[key];
if (file == null)
{
return null;
}
return new AspNetHttpPostedFileWrapper(file);
}
public IEnumerable<Pair<string, IHttpPostedFile>> GetPostedFiles()
{
var files = OriginalRequest.Files;
foreach (var key in files.AllKeys)
{
yield return Pair.Create<string, IHttpPostedFile>(
key, new AspNetHttpPostedFileWrapper(files[key]));
}
}
/// <summary>
/// Initialize<br/>
/// 初始化<br/>
/// </summary>
/// <param name="parentContext">Parent http context</param>
/// <param name="originalRequest">Original http request</param>
public AspNetHttpRequestWrapper(
AspNetHttpContextWrapper parentContext, HttpRequest originalRequest)
{
ParentContext = parentContext;
OriginalRequest = originalRequest;
CustomParameters = new Dictionary<string, object>();
}
}
}
| {
"pile_set_name": "Github"
} |
<#--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<#assign messageTitle = uiLabelMap.CommonForgotYourPassword>
<div id="loginBar"><span>${messageTitle}</span><div id="company-logo"></div></div>
<center>
<div class="screenlet login-screenlet">
<div class="screenlet-title-bar">
<h3>${messageTitle}</h3>
</div>
<div class="screenlet-body">
<form method="post" action="<@ofbizUrl>forgotPassword_step3${previousParams?if_exists}</@ofbizUrl>" name="forgotpassword">
<table class="basic-table" cellspacing="0">
<#if userLoginId?has_content>
<tr>
<td class="label">${uiLabelMap.CommonUsername}</td>
<td><input type="text" size="20" name="USERNAME" value="<#if requestParameters.USERNAME?has_content>${requestParameters.USERNAME}<#elseif autoUserLogin?has_content>${autoUserLogin.userLoginId}</#if>" /></td>
</tr>
<tr>
<td colspan="2" align="center">
<input type="submit" name="GET_PASSWORD_HINT" class="smallSubmit" value="${uiLabelMap.CommonGetPasswordHint}" />
<input type="submit" name="EMAIL_PASSWORD" class="smallSubmit" value="${uiLabelMap.CommonEmailPassword}" />
</td>
</tr>
<#else>
<tr>
<td colspan="2" align="center">
${uiLabelMap.PartyUserLoginMissingError}
</td>
</tr>
</#if>
<tr>
<td colspan="2" align="center">
<a href='#' class="buttontext" onclick="window.history.back();">${uiLabelMap.CommonGoBack}</a>
</td>
</tr>
</table>
<input type="hidden" name="JavaScriptEnabled" value="N" />
</form>
</div>
</div>
</center> | {
"pile_set_name": "Github"
} |
#ifndef SYSTEM_INFO_H
#define SYSTEM_INFO_H
typedef struct {
char robot_name[24];
char robot_description[64];
}__attribute__((packed)) RobotInfo;
typedef struct {
unsigned char battery_series;
unsigned char power_remain_alarm;
float battery_voltage_alarm;
}__attribute__((packed)) SystemParameters;
typedef struct{
unsigned char valid;
unsigned short int year;
unsigned char month;
unsigned char date;
unsigned char week;
unsigned char hour;
unsigned char min;
unsigned char sec;
}__attribute__((packed)) LocalTime;
typedef struct{
unsigned char valid;
unsigned char hour;
unsigned char min;
unsigned char sec;
}__attribute__((packed)) WorkTime;
//1HZ
typedef struct {
float system_time;
float cpu_temperature;
float cpu_usage;
float battery_voltage;
float power_remain; // 0 ~ 100 (%)
LocalTime local_time;
WorkTime work_time1;
WorkTime work_time2;
}__attribute__((packed)) SystemInfo;
#endif // SYSTEM_INFO_H
| {
"pile_set_name": "Github"
} |
package table
import (
"errors"
"strings"
"github.com/33cn/chain33/common/db"
"github.com/33cn/chain33/common/log/log15"
"github.com/33cn/chain33/types"
"github.com/33cn/chain33/util"
)
var tablelog = log15.New("module", "db.table")
/*
设计表的联查:
我们不可能做到数据库这样强大的功能,但是联查功能几乎是不可能绕过的功能。
table1:
[gameId, status]
table2:
[txhash, gameId, addr]
他们都独立的构造, 与更新
如果设置了两个表的: join, 比如: addr & status 要作为一个查询key, 那么我们需要维护一个:
join_table2_table1:
//table2 primary key
//table1 primary key
//addr_status 的一个关联index
[txhash, gameId, addr_status]
能够join的前提:
table2 包含 table1 的primary key
数据更新:
table1 数据更新 自动触发: join_table2_table1 更新 addr & status
table2 数据更新 也会自动触发: join_table2_table1 更新 addr & status
例子:
table1 更新了 gameId 对应 status -> 触发 join_table2_table1 所有对应 gameId 更新 addr & status
table2 更新了 txhash 对应的 addr -> 触发 join_table2_table1 所有对应的 txhash 对应的 addr & status
注意 join_table2_table1 是自动维护的
table2 中自动可以查询 addr & status 这个index
*/
//JoinTable 是由两个表格组合成的一个表格,自动维护一个联合结构
//其中主表: LeftTable
//连接表: RightTable
type JoinTable struct {
left *Table
right *Table
*Table
Fk string
leftIndex []string
rightIndex []string
}
//NewJoinTable 新建一个JoinTable
func NewJoinTable(left *Table, right *Table, indexes []string) (*JoinTable, error) {
if left.kvdb != right.kvdb {
return nil, errors.New("jointable: kvdb must same")
}
if _, ok := left.kvdb.(db.KVDB); !ok {
return nil, errors.New("jointable: kvdb must be db.KVDB")
}
if left.opt.Prefix != right.opt.Prefix {
return nil, errors.New("jointable: left and right table prefix must same")
}
fk := right.opt.Primary
if !left.canGet(fk) {
return nil, errors.New("jointable: left must has right primary index")
}
join := &JoinTable{left: left, right: right, Fk: fk}
for _, index := range indexes {
joinindex := strings.Split(index, joinsep)
if len(joinindex) != 2 {
return nil, errors.New("jointable: index config error")
}
if joinindex[0] != "" && !left.canGet(joinindex[0]) {
return nil, errors.New("jointable: left table can not get: " + joinindex[0])
}
if joinindex[0] != "" {
join.leftIndex = append(join.leftIndex, joinindex[0])
}
if joinindex[1] == "" || !right.canGet(joinindex[1]) {
return nil, errors.New("jointable: right table can not get: " + joinindex[1])
}
if joinindex[1] != "" {
join.rightIndex = append(join.rightIndex, joinindex[1])
}
}
opt := &Option{
Join: true,
Prefix: left.opt.Prefix,
Name: left.opt.Name + joinsep + right.opt.Name,
Primary: left.opt.Primary,
Index: indexes,
}
mytable, err := NewTable(&JoinMeta{
left: left.meta,
right: right.meta}, left.kvdb, opt)
if err != nil {
return nil, err
}
join.Table = mytable
return join, nil
}
//GetLeft get left table
func (join *JoinTable) GetLeft() *Table {
return join.left
}
//GetRight get right table
func (join *JoinTable) GetRight() *Table {
return join.right
}
//GetTable get table by name
func (join *JoinTable) GetTable(name string) (*Table, error) {
if join.left.opt.Name == name {
return join.left, nil
}
if join.right.opt.Name == name {
return join.right, nil
}
return nil, types.ErrNotFound
}
//MustGetTable if name not exist, panic
func (join *JoinTable) MustGetTable(name string) *Table {
table, err := join.GetTable(name)
if err != nil {
panic(err)
}
return table
}
//GetData rewrite get data of jointable
func (join *JoinTable) GetData(primaryKey []byte) (*Row, error) {
leftrow, err := join.left.GetData(primaryKey)
if err != nil {
return nil, err
}
rightprimary, err := join.left.index(leftrow, join.Fk)
if err != nil {
return nil, err
}
rightrow, err := join.right.GetData(rightprimary)
if err != nil {
return nil, err
}
rowjoin := join.meta.CreateRow()
rowjoin.Ty = None
rowjoin.Primary = leftrow.Primary
rowjoin.Data.(*JoinData).Left = leftrow.Data
rowjoin.Data.(*JoinData).Right = rightrow.Data
return rowjoin, nil
}
//ListIndex 查询jointable 数据
func (join *JoinTable) ListIndex(indexName string, prefix []byte, primaryKey []byte, count, direction int32) (rows []*Row, err error) {
if !strings.Contains(indexName, joinsep) || !join.canGet(indexName) {
return nil, errors.New("joinable query: indexName must be join index")
}
query := &Query{table: join, kvdb: join.left.kvdb.(db.KVDB)}
return query.ListIndex(indexName, prefix, primaryKey, count, direction)
}
//Save 重写默认的save 函数,不仅仅 Save left,right table
//还要save jointable
//没有update 到情况,只有del, add, 性能考虑可以加上 update 的情况
//目前update 是通过 del + add 完成
//left modify: del index, add new index (query right by primary) (check in cache)
//right modify: query all primary in left, include in cache, del index, add new index
//TODO: 没有修改过的数据不需要修改
func (join *JoinTable) Save() (kvs []*types.KeyValue, err error) {
for _, row := range join.left.rows {
if row.Ty == None {
continue
}
err := join.saveLeft(row)
if err != nil {
return nil, err
}
}
for _, row := range join.right.rows {
if row.Ty == None {
continue
}
err := join.saveRight(row)
if err != nil {
return nil, err
}
}
joinkvs, err := join.Table.Save()
if err != nil {
return nil, err
}
kvs = append(kvs, joinkvs...)
leftkvs, err := join.left.Save()
if err != nil {
return nil, err
}
kvs = append(kvs, leftkvs...)
rightkvs, err := join.right.Save()
if err != nil {
return nil, err
}
kvs = append(kvs, rightkvs...)
return util.DelDupKey(kvs), nil
}
func (join *JoinTable) isLeftModify(row *Row) bool {
oldrow := &Row{Data: row.old}
for _, index := range join.leftIndex {
_, _, ismodify, err := join.left.getModify(row, oldrow, index)
if ismodify {
return true
}
if err != nil {
tablelog.Error("isLeftModify", "err", err)
}
}
return false
}
func (join *JoinTable) isRightModify(row *Row) bool {
oldrow := &Row{Data: row.old}
for _, index := range join.rightIndex {
_, _, ismodify, err := join.right.getModify(row, oldrow, index)
if ismodify {
return true
}
if err != nil {
tablelog.Error("isLeftModify", "err", err)
}
}
return false
}
func (join *JoinTable) saveLeft(row *Row) error {
if row.Ty == Update && !join.isLeftModify(row) {
return nil
}
olddata := &JoinData{}
rowjoin := join.meta.CreateRow()
rowjoin.Ty = row.Ty
rowjoin.Primary = row.Primary
rowjoin.Data.(*JoinData).Left = row.Data
olddata.Left = row.old
rightprimary, err := join.left.index(row, join.Fk)
if err != nil {
return err
}
rightrow, incache, err := join.right.findRow(rightprimary)
if err != nil {
return err
}
if incache && rightrow.Ty == Update {
olddata.Right = rightrow.old
} else {
olddata.Right = rightrow.Data
}
//只考虑 left 有变化, 那么就修改(如果right 也修改了,在right中处理)
if row.Ty == Update {
rowjoin.old = olddata
}
rowjoin.Data.(*JoinData).Right = rightrow.Data
join.addRowCache(rowjoin)
return nil
}
func (join *JoinTable) saveRight(row *Row) error {
if row.Ty == Update && !join.isRightModify(row) {
return nil
}
indexName := join.right.opt.Primary
indexValue := row.Primary
q := join.left.GetQuery(join.left.kvdb.(db.KVDB))
rows, err := q.ListIndex(indexName, indexValue, nil, 0, db.ListDESC)
if err != nil && err != types.ErrNotFound {
return err
}
rows, err = join.left.mergeCache(rows, indexName, indexValue)
if err != nil {
return err
}
for _, onerow := range rows {
olddata := &JoinData{Right: row.old, Left: onerow.Data}
if onerow.Ty == Update {
olddata.Left = onerow.old
}
rowjoin := join.meta.CreateRow()
rowjoin.Ty = row.Ty
rowjoin.Primary = onerow.Primary
if row.Ty == Update {
rowjoin.old = olddata
}
rowjoin.Data.(*JoinData).Right = row.Data
rowjoin.Data.(*JoinData).Left = onerow.Data
join.addRowCache(rowjoin)
}
return nil
}
//JoinData 由left 和 right 两个数据组成
type JoinData struct {
Left types.Message
Right types.Message
}
//Reset data
func (msg *JoinData) Reset() {
msg.Left.Reset()
msg.Right.Reset()
}
//ProtoMessage data
func (msg *JoinData) ProtoMessage() {
msg.Left.ProtoMessage()
msg.Right.ProtoMessage()
}
//String string
func (msg *JoinData) String() string {
return msg.Left.String() + msg.Right.String()
}
//JoinMeta left right 合成的一个meta 结构
type JoinMeta struct {
left RowMeta
right RowMeta
data *JoinData
}
//CreateRow create a meta struct
func (tx *JoinMeta) CreateRow() *Row {
return &Row{Data: &JoinData{}}
}
//SetPayload 设置数据
func (tx *JoinMeta) SetPayload(data types.Message) error {
if txdata, ok := data.(*JoinData); ok {
tx.data = txdata
if tx.data.Left != nil && tx.data.Right != nil {
err := tx.left.SetPayload(tx.data.Left)
if err != nil {
return err
}
err = tx.right.SetPayload(tx.data.Right)
if err != nil {
return err
}
}
return nil
}
return types.ErrTypeAsset
}
//Get 按照indexName 查询 indexValue
func (tx *JoinMeta) Get(key string) ([]byte, error) {
indexs := strings.Split(key, joinsep)
//获取primary
if len(indexs) <= 1 {
return tx.left.Get(key)
}
var leftvalue []byte
var err error
if indexs[0] != "" {
leftvalue, err = tx.left.Get(indexs[0])
if err != nil {
return nil, err
}
}
rightvalue, err := tx.right.Get(indexs[1])
if err != nil {
return nil, err
}
return JoinKey(leftvalue, rightvalue), nil
}
//JoinKey 两个left 和 right key 合并成一个key
func JoinKey(leftvalue, rightvalue []byte) []byte {
return types.Encode(&types.KeyValue{Key: leftvalue, Value: rightvalue})
}
| {
"pile_set_name": "Github"
} |
# By default, a read preference sets no maximum on staleness.
---
topology_description:
type: ReplicaSetNoPrimary
servers:
- &1
address: a:27017
type: RSSecondary
avg_rtt_ms: 50 # Too far.
lastUpdateTime: 0
maxWireVersion: 5
lastWrite: {lastWriteDate: {$numberLong: "1000001"}}
- &2
address: b:27017
type: RSSecondary
avg_rtt_ms: 5
lastUpdateTime: 0
maxWireVersion: 5
lastWrite: {lastWriteDate: {$numberLong: "1"}} # Very stale.
read_preference:
mode: Nearest
suitable_servers: # Very stale server is fine.
- *1
- *2
in_latency_window:
- *2
| {
"pile_set_name": "Github"
} |
import Vue from 'vue';
import Router from 'vue-router';
import Translate from 'views/Translate';
import Language from 'views/Language';
import Settings from 'views/Settings';
Vue.use(Router);
export default new Router({
routes: [
{
path: '/language',
name: 'language',
component: Language,
},
{
path: '/settings',
name: 'settings',
component: Settings,
},
{
path: '*',
name: 'translate',
component: Translate,
},
],
});
| {
"pile_set_name": "Github"
} |
import { PanGesture } from './pan-gesture';
import { assert, clamp } from '../util/util';
import { Platform } from '../platform/platform';
import { pointerCoord } from '../util/dom';
/**
* @hidden
*/
export class SlideGesture extends PanGesture {
public slide: SlideData = null;
constructor(plt: Platform, element: HTMLElement, opts = {}) {
super(plt, element, opts);
}
/*
* Get the min and max for the slide. pageX/pageY.
* Only called on dragstart.
*/
getSlideBoundaries(_slide: SlideData, _ev: any) {
return {
min: 0,
max: this.getNativeElement().offsetWidth
};
}
/*
* Get the element's pos when the drag starts.
* For example, an open side menu starts at 100% and a closed
* sidemenu starts at 0%.
*/
getElementStartPos(_slide: SlideData, _ev: any) {
return 0;
}
onDragStart(ev: any) {
this.onSlideBeforeStart(ev);
let coord = <any>pointerCoord(ev);
let pos = coord[this.direction];
this.slide = {
min: 0,
max: 0,
pointerStartPos: pos,
pos: pos,
timestamp: Date.now(),
elementStartPos: 0,
started: true,
delta: 0,
distance: 0,
velocity: 0,
};
// TODO: we should run this in the next frame
let {min, max} = this.getSlideBoundaries(this.slide, ev);
this.slide.min = min;
this.slide.max = max;
this.slide.elementStartPos = this.getElementStartPos(this.slide, ev);
this.onSlideStart(this.slide, ev);
}
onDragMove(ev: any) {
let slide: SlideData = this.slide;
assert(slide.min !== slide.max, 'slide data must be properly initialized');
let coord = <any>pointerCoord(ev);
let newPos = coord[this.direction];
let newTimestamp = Date.now();
let velocity = (this.plt.isRTL ? (slide.pos - newPos) : (newPos - slide.pos)) / (newTimestamp - slide.timestamp);
slide.pos = newPos;
slide.timestamp = newTimestamp;
slide.distance = clamp(
slide.min,
(this.plt.isRTL ? slide.pointerStartPos - newPos : newPos - slide.pointerStartPos) + slide.elementStartPos,
slide.max
);
slide.velocity = velocity;
slide.delta = (this.plt.isRTL ? slide.pointerStartPos - newPos : newPos - slide.pointerStartPos);
this.onSlide(slide, ev);
}
onDragEnd(ev: any) {
this.onSlideEnd(this.slide, ev);
this.slide = null;
}
onSlideBeforeStart(_ev?: any): void {}
onSlideStart(_slide?: SlideData, _ev?: any): void {}
onSlide(_slide?: SlideData, _ev?: any): void {}
onSlideEnd(_slide?: SlideData, _ev?: any): void {}
}
/**
* @hidden
*/
export interface SlideData {
min: number;
max: number;
distance: number;
delta: number;
started: boolean;
pos: any;
timestamp: number;
pointerStartPos: number;
elementStartPos: number;
velocity: number;
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2016 Linaro Ltd.
* Copyright 2016 ZTE Corporation.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*
*/
#include <linux/clk.h>
#include <linux/component.h>
#include <linux/list.h>
#include <linux/module.h>
#include <linux/of_graph.h>
#include <linux/of_platform.h>
#include <linux/spinlock.h>
#include <drm/drm_atomic_helper.h>
#include <drm/drm_crtc.h>
#include <drm/drm_crtc_helper.h>
#include <drm/drm_fb_cma_helper.h>
#include <drm/drm_fb_helper.h>
#include <drm/drm_gem_cma_helper.h>
#include <drm/drm_of.h>
#include <drm/drmP.h>
#include "zx_drm_drv.h"
#include "zx_vou.h"
struct zx_drm_private {
struct drm_fbdev_cma *fbdev;
};
static void zx_drm_fb_output_poll_changed(struct drm_device *drm)
{
struct zx_drm_private *priv = drm->dev_private;
drm_fbdev_cma_hotplug_event(priv->fbdev);
}
static const struct drm_mode_config_funcs zx_drm_mode_config_funcs = {
.fb_create = drm_fb_cma_create,
.output_poll_changed = zx_drm_fb_output_poll_changed,
.atomic_check = drm_atomic_helper_check,
.atomic_commit = drm_atomic_helper_commit,
};
static void zx_drm_lastclose(struct drm_device *drm)
{
struct zx_drm_private *priv = drm->dev_private;
drm_fbdev_cma_restore_mode(priv->fbdev);
}
DEFINE_DRM_GEM_CMA_FOPS(zx_drm_fops);
static struct drm_driver zx_drm_driver = {
.driver_features = DRIVER_GEM | DRIVER_MODESET | DRIVER_PRIME |
DRIVER_ATOMIC,
.lastclose = zx_drm_lastclose,
.gem_free_object_unlocked = drm_gem_cma_free_object,
.gem_vm_ops = &drm_gem_cma_vm_ops,
.dumb_create = drm_gem_cma_dumb_create,
.prime_handle_to_fd = drm_gem_prime_handle_to_fd,
.prime_fd_to_handle = drm_gem_prime_fd_to_handle,
.gem_prime_export = drm_gem_prime_export,
.gem_prime_import = drm_gem_prime_import,
.gem_prime_get_sg_table = drm_gem_cma_prime_get_sg_table,
.gem_prime_import_sg_table = drm_gem_cma_prime_import_sg_table,
.gem_prime_vmap = drm_gem_cma_prime_vmap,
.gem_prime_vunmap = drm_gem_cma_prime_vunmap,
.gem_prime_mmap = drm_gem_cma_prime_mmap,
.fops = &zx_drm_fops,
.name = "zx-vou",
.desc = "ZTE VOU Controller DRM",
.date = "20160811",
.major = 1,
.minor = 0,
};
static int zx_drm_bind(struct device *dev)
{
struct drm_device *drm;
struct zx_drm_private *priv;
int ret;
priv = devm_kzalloc(dev, sizeof(*priv), GFP_KERNEL);
if (!priv)
return -ENOMEM;
drm = drm_dev_alloc(&zx_drm_driver, dev);
if (IS_ERR(drm))
return PTR_ERR(drm);
drm->dev_private = priv;
dev_set_drvdata(dev, drm);
drm_mode_config_init(drm);
drm->mode_config.min_width = 16;
drm->mode_config.min_height = 16;
drm->mode_config.max_width = 4096;
drm->mode_config.max_height = 4096;
drm->mode_config.funcs = &zx_drm_mode_config_funcs;
ret = component_bind_all(dev, drm);
if (ret) {
DRM_DEV_ERROR(dev, "failed to bind all components: %d\n", ret);
goto out_unregister;
}
ret = drm_vblank_init(drm, drm->mode_config.num_crtc);
if (ret < 0) {
DRM_DEV_ERROR(dev, "failed to init vblank: %d\n", ret);
goto out_unbind;
}
/*
* We will manage irq handler on our own. In this case, irq_enabled
* need to be true for using vblank core support.
*/
drm->irq_enabled = true;
drm_mode_config_reset(drm);
drm_kms_helper_poll_init(drm);
priv->fbdev = drm_fbdev_cma_init(drm, 32,
drm->mode_config.num_connector);
if (IS_ERR(priv->fbdev)) {
ret = PTR_ERR(priv->fbdev);
DRM_DEV_ERROR(dev, "failed to init cma fbdev: %d\n", ret);
priv->fbdev = NULL;
goto out_poll_fini;
}
ret = drm_dev_register(drm, 0);
if (ret)
goto out_fbdev_fini;
return 0;
out_fbdev_fini:
if (priv->fbdev) {
drm_fbdev_cma_fini(priv->fbdev);
priv->fbdev = NULL;
}
out_poll_fini:
drm_kms_helper_poll_fini(drm);
drm_mode_config_cleanup(drm);
out_unbind:
component_unbind_all(dev, drm);
out_unregister:
dev_set_drvdata(dev, NULL);
drm->dev_private = NULL;
drm_dev_unref(drm);
return ret;
}
static void zx_drm_unbind(struct device *dev)
{
struct drm_device *drm = dev_get_drvdata(dev);
struct zx_drm_private *priv = drm->dev_private;
drm_dev_unregister(drm);
if (priv->fbdev) {
drm_fbdev_cma_fini(priv->fbdev);
priv->fbdev = NULL;
}
drm_kms_helper_poll_fini(drm);
drm_mode_config_cleanup(drm);
component_unbind_all(dev, drm);
dev_set_drvdata(dev, NULL);
drm->dev_private = NULL;
drm_dev_unref(drm);
}
static const struct component_master_ops zx_drm_master_ops = {
.bind = zx_drm_bind,
.unbind = zx_drm_unbind,
};
static int compare_of(struct device *dev, void *data)
{
return dev->of_node == data;
}
static int zx_drm_probe(struct platform_device *pdev)
{
struct device *dev = &pdev->dev;
struct device_node *parent = dev->of_node;
struct device_node *child;
struct component_match *match = NULL;
int ret;
ret = devm_of_platform_populate(dev);
if (ret)
return ret;
for_each_available_child_of_node(parent, child) {
component_match_add(dev, &match, compare_of, child);
of_node_put(child);
}
return component_master_add_with_match(dev, &zx_drm_master_ops, match);
}
static int zx_drm_remove(struct platform_device *pdev)
{
component_master_del(&pdev->dev, &zx_drm_master_ops);
return 0;
}
static const struct of_device_id zx_drm_of_match[] = {
{ .compatible = "zte,zx296718-vou", },
{ /* end */ },
};
MODULE_DEVICE_TABLE(of, zx_drm_of_match);
static struct platform_driver zx_drm_platform_driver = {
.probe = zx_drm_probe,
.remove = zx_drm_remove,
.driver = {
.name = "zx-drm",
.of_match_table = zx_drm_of_match,
},
};
static struct platform_driver *drivers[] = {
&zx_crtc_driver,
&zx_hdmi_driver,
&zx_tvenc_driver,
&zx_vga_driver,
&zx_drm_platform_driver,
};
static int zx_drm_init(void)
{
return platform_register_drivers(drivers, ARRAY_SIZE(drivers));
}
module_init(zx_drm_init);
static void zx_drm_exit(void)
{
platform_unregister_drivers(drivers, ARRAY_SIZE(drivers));
}
module_exit(zx_drm_exit);
MODULE_AUTHOR("Shawn Guo <[email protected]>");
MODULE_DESCRIPTION("ZTE ZX VOU DRM driver");
MODULE_LICENSE("GPL v2");
| {
"pile_set_name": "Github"
} |
using System;
namespace Heavy.Web.Models
{
public class ErrorViewModel
{
public string RequestId { get; set; }
public bool ShowRequestId => !string.IsNullOrEmpty(RequestId);
}
} | {
"pile_set_name": "Github"
} |
---
title: Share an object via a temporary URL
excerpt: Share an object via a temporary URL
slug: share_an_object_via_a_temporary_url
legacy_guide_number: g2007
---
##
OpenStack Swift allows you to store a large number of files.
To manage them, you need to be authenticated with a token, for each API request. This means your Swift permissions (read, write, etc ...) can be verified.
The token is generated by the authentication system using your credentials.
However, if you want to share a file with a friend or co-worker without having to provide personal information related to authentication, you can create a temporary URL (tempurl) instead.
The Tempurl is a feature that allows you to choose the files you want to share and to make them available for the time you want, by setting an expiration time for the link.
## How does it work?
The Tempurl function generates a temporary address which contains:
- The endpoint URL, for example:
"https://storage.sbg1.cloud.ovh.net/"
- The full path for the object that contains your project, the container and the object name. Eg: "v1/AUTH_tenant/default/file"
- A first additional parameter tempurlsign, which is the signature that is generated with your secret key, the HTTP method, file path and expiration date.
- A second parameter url_expires, which is the link expiration date.
## Prerequisites
- [Prepare the environment to use the OpenStack API]({legacy}1851)
- Set OpenStack environment variables
- Python must be installed on your computer
- The Python script: [swift-temp-url](https://raw.githubusercontent.com/openstack/swift/master/bin/swift-temp-url)
## Generate the key
First, you have to generate a key. This key can be used for all files in your account, it is generated once for all future temp urls, so make sure you choose a secure and long key. However, please note that you can regenerate a new key whenever you want.
To generate your key, please use a string which contains at least 20 characters.
You can use various tools including:
- [http://www.random.org/strings/](http://www.random.org/strings/)
- "/dev/urandom" on Linux
- Or just a simple: "date +%s | md5sum"
Once you have generated your key, you can configure it on your project using Swiftclient (replace the "12345" string with your key):
```
swift post -m "Temp-URL-Key: 12345"
```
Or use curl:
```
curl -i -X POST \ -H "X-Account-Meta-Temp-URL-Key: 12345" \ -H "X-Auth-Token: abcdef12345" \ https://storage.sbg1.cloud.ovh.net/v1/AUTH_ProjectID
```
## Please note
The full header is X-Account-Meta-Temp-Url-Key but Swiftclient uses Temp-URL-Key and adds the X-Account prefix automatically.
Now that the key is configured on your account, you can check that the header has been correctly applied using swiftclient:
```
swift stat
```
Or curl:
```
curl -i -X HEAD \ -H "X-Auth-Token: abcdef12345" \ ttps://storage.sbg1.cloud.ovh.net/v1/AUTH_ProjectID
```
## Generate the URL
The following tasks can be done offline.
Generate a temporary URL using the swift-temp-url script:
```
python swift-temp-url GET 60 /v1/AUTH_tenant/default/file 12345
```
- GET: HTTP method.
- 60: This link is available for 60 seconds, you can set your own limit.
- 12345: To be replaced with your key
- /v1/AUTH_tenant/default/file: The path to your file.
You do not need the endpoint at this stage in the process.
This will give you the temporary URL, for example:
```
v1/AUTH_tenant/default/file?temp_url_sig=8016dsdf3122d526afds60911cde59fds3&temp_url_expires=1401548543
```
You will then be able to see your file path, the signature, and the expiration date, as explained above.
To get your URL working, you just need to add the endpoint in front of your tempurl:
```
https://storage.sbg1.cloud.ovh.net/v1/AUTH_tenant/default/file?temp_url_sig=8016dsdf3122d526afds60911cde59fds3&temp_url_expires=1401548543
```
In our example, this URL lets anyone download the "file" file in the "default" container, for up to 60 seconds, without having to provide authentication.
After 60 seconds, the URL will no longer work.
For more advanced users who want to generate tempurls with the [swift-temp-url](https://raw.githubusercontent.com/openstack/swift/master/bin/swift-temp-url) script, there is more information available in the [OpenStack documentation](http://docs.openstack.org/liberty/config-reference/content/object-storage-tempurl.html).
##
| {
"pile_set_name": "Github"
} |
# Don't touch this folder
uuuuuuuuuuuuuuuuuuuu
u" uuuuuuuuuuuuuuuuuu "u
u" u$$$$$$$$$$$$$$$$$$$$u "u
u" u$$$$$$$$$$$$$$$$$$$$$$$$u "u
u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
u" u$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$u "u
$ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
$ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
$ $$$" ... "$... ...$" ... "$$$ ... "$$$ $
$ $$$u `"$$$$$$$ $$$ $$$$$ $$ $$$ $$$ $
$ $$$$$$uu "$$$$ $$$ $$$$$ $$ """ u$$$ $
$ $$$""$$$ $$$$ $$$u "$$$" u$$ $$$$$$$$ $
$ $$$$....,$$$$$..$$$$$....,$$$$..$$$$$$$$ $
$ $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ $
"u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
"u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
"u "$$$$$$$$$$$$$$$$$$$$$$$$$$$$" u"
"u "$$$$$$$$$$$$$$$$$$$$$$$$" u"
"u "$$$$$$$$$$$$$$$$$$$$" u"
"u """""""""""""""""" u"
""""""""""""""""""""
!!! Changes made in this directory are subject to being overwritten by automation !!!
The files in this directory are shared by all Arcade repos and managed by automation. If you need to make changes to these files, open an issue or submit a pull request to https://github.com/dotnet/arcade first.
| {
"pile_set_name": "Github"
} |
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef REMOTING_CLIENT_CLIENT_STATUS_LOGGER_H_
#define REMOTING_CLIENT_CLIENT_STATUS_LOGGER_H_
#include "base/macros.h"
#include "base/threading/non_thread_safe.h"
#include "base/time/time.h"
#include "remoting/protocol/connection_to_host.h"
#include "remoting/protocol/errors.h"
#include "remoting/signaling/log_to_server.h"
namespace remoting {
namespace protocol {
class PerformanceTracker;
} // namespace protocol
// ClientStatusLogger sends client log entries to a server.
// The contents of the log entries are described in server_log_entry_client.cc.
// They do not contain any personally identifiable information.
class ClientStatusLogger : public base::NonThreadSafe {
public:
ClientStatusLogger(ServerLogEntry::Mode mode,
SignalStrategy* signal_strategy,
const std::string& directory_bot_jid);
~ClientStatusLogger();
void LogSessionStateChange(protocol::ConnectionToHost::State state,
protocol::ErrorCode error);
void LogStatistics(protocol::PerformanceTracker* perf_tracker);
// Allows test code to fake SignalStrategy state change events.
void SetSignalingStateForTest(SignalStrategy::State state);
private:
LogToServer log_to_server_;
// Generates a new random session ID.
void GenerateSessionId();
// Expire the session ID if the maximum duration has been exceeded.
void MaybeExpireSessionId();
// A randomly generated session ID to be attached to log messages. This
// is regenerated at the start of a new session.
std::string session_id_;
// Start time of the session.
base::TimeTicks session_start_time_;
// Time when the session ID was generated.
base::TimeTicks session_id_generation_time_;
DISALLOW_COPY_AND_ASSIGN(ClientStatusLogger);
};
} // namespace remoting
#endif // REMOTING_CLIENT_CLIENT_STATUS_LOGGER_H_
| {
"pile_set_name": "Github"
} |
let context;
module.exports = {
setContext(app) {
context = app;
},
get context() {
return context;
}
};
| {
"pile_set_name": "Github"
} |
<%--
APDPlat - Application Product Development Platform
Copyright (c) 2013, 杨尚川, [email protected]
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
--%>
<%@page contentType="text/html" pageEncoding="UTF-8"%>
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>系统响应性能日志</title>
<%@include file="../include/common.jsp" %>
<script type="text/javascript" src="../js/chart.js"></script>
<script type="text/javascript" src="js/processTime.js"></script>
</head>
<body>
<div id='grid-div' style="width:100%; height:100%;">
</div>
</body>
</html> | {
"pile_set_name": "Github"
} |
// Copyright 1998-2019 Epic Games, Inc. All Rights Reserved.
#include "ILoadingScreenModule.h"
#include "LoadingScreenSettings.h"
#include "SSimpleLoadingScreen.h"
#include "Framework/Application/SlateApplication.h"
#define LOCTEXT_NAMESPACE "LoadingScreen"
class FLoadingScreenModule : public ILoadingScreenModule
{
public:
FLoadingScreenModule();
/** IModuleInterface implementation */
virtual void StartupModule() override;
virtual void ShutdownModule() override;
virtual bool IsGameModule() const override
{
return true;
}
private:
void HandlePrepareLoadingScreen();
void BeginLoadingScreen(const FLoadingScreenDescription& ScreenDescription);
};
IMPLEMENT_MODULE(FLoadingScreenModule, LoadingScreen)
FLoadingScreenModule::FLoadingScreenModule()
{
}
void FLoadingScreenModule::StartupModule()
{
if ( !IsRunningDedicatedServer() && FSlateApplication::IsInitialized() )
{
// Load for cooker reference
const ULoadingScreenSettings* Settings = GetDefault<ULoadingScreenSettings>();
for ( const FStringAssetReference& Ref : Settings->StartupScreen.Images )
{
Ref.TryLoad();
}
for ( const FStringAssetReference& Ref : Settings->DefaultScreen.Images )
{
Ref.TryLoad();
}
if ( IsMoviePlayerEnabled() )
{
GetMoviePlayer()->OnPrepareLoadingScreen().AddRaw(this, &FLoadingScreenModule::HandlePrepareLoadingScreen);
}
// Prepare the startup screen, the PrepareLoadingScreen callback won't be called
// if we've already explictly setup the loading screen.
BeginLoadingScreen(Settings->StartupScreen);
}
}
void FLoadingScreenModule::ShutdownModule()
{
if ( !IsRunningDedicatedServer() )
{
GetMoviePlayer()->OnPrepareLoadingScreen().RemoveAll(this);
}
}
void FLoadingScreenModule::HandlePrepareLoadingScreen()
{
const ULoadingScreenSettings* Settings = GetDefault<ULoadingScreenSettings>();
BeginLoadingScreen(Settings->DefaultScreen);
}
void FLoadingScreenModule::BeginLoadingScreen(const FLoadingScreenDescription& ScreenDescription)
{
FLoadingScreenAttributes LoadingScreen;
LoadingScreen.MinimumLoadingScreenDisplayTime = ScreenDescription.MinimumLoadingScreenDisplayTime;
LoadingScreen.bAutoCompleteWhenLoadingCompletes = ScreenDescription.bAutoCompleteWhenLoadingCompletes;
LoadingScreen.bMoviesAreSkippable = ScreenDescription.bMoviesAreSkippable;
LoadingScreen.bWaitForManualStop = ScreenDescription.bWaitForManualStop;
LoadingScreen.MoviePaths = ScreenDescription.MoviePaths;
LoadingScreen.PlaybackType = ScreenDescription.PlaybackType;
if ( ScreenDescription.bShowUIOverlay )
{
LoadingScreen.WidgetLoadingScreen = SNew(SSimpleLoadingScreen, ScreenDescription);
}
GetMoviePlayer()->SetupLoadingScreen(LoadingScreen);
}
#undef LOCTEXT_NAMESPACE
| {
"pile_set_name": "Github"
} |
#pragma once
#include "compiler.h"
#include <stdint.h>
#include <sys/types.h>
BEGIN_DECLS
void die(const char* errstr, ...)
__attribute__((noreturn, __format__(__printf__, 1, 2)));
void edie(const char* errstr, ...)
__attribute__((noreturn, __format__(__printf__, 1, 2)));
size_t xread(int fd, void *buf, size_t n);
void xwrite(int fd, const void *buf, size_t n);
uint64_t now_usec(void);
int setaffinity(int c);
END_DECLS
| {
"pile_set_name": "Github"
} |
## 最小的K个数
**题目:**
>输入n个整数,找出其中最小的K个数。例如输入4,5,1,6,2,7,3,8这8个数字,则最小的4个数字是1,2,3,4,。
---
### 我的代码
```java
import java.util.*;
public class Solution {
public ArrayList<Integer> GetLeastNumbers_Solution(int [] input, int k) {
ArrayList<Integer> l=new ArrayList<Integer>();
if(k>input.length)
return l;
for(int i=0;i<input.length;i++)
l.add(input[i]);
Collections.sort(l);
for(int j=k;j<input.length;j++)
l.remove(k);
return l;
}
}
```
### 思路分析
- **O(nlogk)的算法,特别适合处理海量数据**
<div align="center"> <img src="https://raw.githubusercontent.com/LyricYang/Internet-Recruiting-Algorithm-Problems/master/JianZhiOffer/Code/pic/Q1028P1.png"/> </div><br>
```java
/*
*基于堆排序算法,构建最大堆。时间复杂度为O(nlogk)
*如果用快速排序,时间复杂度为O(nlogn);
*如果用冒泡排序,时间复杂度为O(n*k)
*/
import java.util.ArrayList;
public class Solution {
public ArrayList<Integer> GetLeastNumbers_Solution(int [] input, int k) {
ArrayList<Integer> list=new ArrayList<Integer>();
//检查输入的特殊情况
if(input==null || input.length<=0 || input.length<k){
return list;
}
//构建最大堆
for(int len=k/2-1; len>=0; len--){
adjustMaxHeapSort(input,len,k-1);
}
//从第k个元素开始分别与最大堆的最大值做比较,如果比最大值小,则替换并调整堆。
//最终堆里的就是最小的K个数。
int tmp;
for(int i=k; i<input.length; i++){
if(input[i]<input[0]){
tmp=input[0];
input[0]=input[i];
input[i]=tmp;
adjustMaxHeapSort(input,0,k-1);
}
}
for(int j=0; j<k; j++){
list.add(input[j]);
}
return list;
}
public void adjustMaxHeapSort(int[] input, int pos, int length){
int temp;
int child;
for(temp=input[pos]; 2*pos+1<=length; pos=child){
child=2*pos+1;
if(child<length && input[child]<input[child+1]){
child++;
}
if(input[child]>temp){
input[pos]=input[child];
}else{
break;
}
}
input[pos]=temp;
}
}
```
| {
"pile_set_name": "Github"
} |
AMD
AMDEA
AMT
Acid
Alcohol
Alprazolam
Amines
Aminoketones
Amphetamines
Amphetamines
Antipsychotics
Atomoxetine
Atropine
Atypical antipsychotics
Barbiturates
Benzodiazepines
Betel nut
Bupropion
CBD
Caffeine
Cannabis
Cathinone
Chloral hydrate
Chloroform
Chlorpromazine
Cholinergics
Clozapine
Cocaine
Codeine
DMT
DOM
DXM
Deliriants
Depressants
Diazepam
Diethylpropion
Dimenhydrinate
Diphenhydramine
Dissociative anesthetics
Dissociatives
Ecstasy
Ephedrine
Ether
Fentanyl
Flunitrazepam
Fluoxetine
Fluphenazine
Fluvoxamine
GHB
Hallucinogens
Haloperidol
Heroin
Hydrocodone
Ibogaine
Ibotenic acid
Ketamine
Ketamine
Khat
LSD
Legend
Lorazepam
MAOIs
MDM
Maprotiline
Marijuana
Mescaline
Methadone
Methaqualone
Methylphenidate
Methylxanthines
Mirtazapine
Morphine
Muscarine
Muscimol
Narcotic Analgesics
Nicotine
Nitrous oxide
Olanzapine
Opium
Oxycodone
PCP
Paroxetine
Perphenazine
Pimozide
Pseudoephedrine
Psilocybin
Psychedelics
Psychedelics
Psychomotor
Quetiapine
Risperidone
Rohypnol
SSRIs
Salvinorin A
Scopolamine
Sedative Hypnotics
Sertraline
Stimulants
Stimulants
Sulpiride
Sympathomimetic
TCAs
THC
TeCAs
Temazepam
Theobromine
Theophylline
Thioridazine
Tiletamine
Trazodone
Typical antipsychotics
Ziprasidone
| {
"pile_set_name": "Github"
} |
$pagination-prefix: ".#{$css-prefix}pagination";
$pagination-item-split: $s1 !default;
$pagination-current-font-size: $font-size-base-body1 !default;
$pagination-total-font-size: $font-size-base-body1 !default;
$pagination-jump-font-size: $font-size-base-body1 !default;
$pagination-ellipsis-font-size: $font-size-base-body1 !default;
$pagination-input-width: $s9 !default;
$pagination-input-margin: $s1 !default;
$pagination-size-selector-title-margin-right: $s1 !default;
$pagination-size-selector-number-padding: $s3 !default;
$pagination-size-selector-filter-height: $s7 !default;
$pagination-large-current-font-size: $font-size-base-subhead !default;
$pagination-large-total-font-size: $font-size-base-subhead !default;
$pagination-large-jump-font-size: $font-size-base-subhead !default;
$pagination-large-ellipsis-font-size: $font-size-base-subhead !default;
$pagination-large-size-selector-number-padding: $s4 !default;
$pagination-large-size-selector-filter-height: $s10 !default;
$pagination-small-current-font-size: $font-size-base-caption !default;
$pagination-small-total-font-size: $font-size-base-caption !default;
$pagination-small-jump-font-size: $font-size-base-caption !default;
$pagination-small-ellipsis-font-size: $font-size-base-caption !default;
$pagination-small-size-selector-number-padding: $s2 !default;
$pagination-small-size-selector-filter-height: $s5 !default;
$pagination-current-color: $color-b1-6 !default;
$pagination-total-color: $color-n2-4 !default;
$pagination-jump-color: $color-n2-2 !default;
$pagination-ellipsis-color: $color-n2-4 !default;
$pagination-size-selector-title-color: $color-n2-2 !default;
$pagination-item-current-color: $color-b1-6 !default;
$pagination-item-current-border-color: $color-b1-6 !default;
$pagination-item-current-bg: $color-n1-2 !default;
| {
"pile_set_name": "Github"
} |
Starting build on Tue Jul 30 22:30:30 UTC 2019
Using kernel:
Linux version 4.18.0-1024-azure (buildd@lcy01-amd64-006) (gcc version 7.4.0 (Ubuntu 7.4.0-1ubuntu1~18.04.1)) #25~18.04.1-Ubuntu SMP Fri Jun 28 23:27:46 UTC 2019
Components downloaded in this VHD build (some of the below components might get deleted during cluster provisioning if they are not needed):
- apt-transport-https
- auditd
- blobfuse
- ca-certificates
- ceph-common
- cgroup-lite
- cifs-utils
- conntrack
- cracklib-runtime
- ebtables
- ethtool
- fuse
- git
- glusterfs-client
- init-system-helpers
- iproute2
- ipset
- iptables
- jq
- libpam-pwquality
- libpwquality-tools
- mount
- nfs-common
- pigz socat
- util-linux
- xz-utils
- zip
- etcd v3.2.26
- moby v3.0.6
- nvidia-docker2 nvidia-container-runtime
- Azure CNI version 1.0.24
- Azure CNI version 1.0.22
- CNI plugin version 0.7.5
- CNI plugin version 0.7.1
- containerd version 1.2.4
- containerd version 1.1.6
- containerd version 1.1.5
- img
Docker images pre-pulled:
- k8s.gcr.io/kubernetes-dashboard-amd64:v1.10.1
- k8s.gcr.io/exechealthz-amd64:1.2
- k8s.gcr.io/addon-resizer:1.8.5
- k8s.gcr.io/addon-resizer:1.8.4
- k8s.gcr.io/addon-resizer:1.8.1
- k8s.gcr.io/addon-resizer:1.7
- k8s.gcr.io/heapster-amd64:v1.5.4
- k8s.gcr.io/heapster-amd64:v1.5.3
- k8s.gcr.io/heapster-amd64:v1.5.1
- k8s.gcr.io/metrics-server-amd64:v0.2.1
- k8s.gcr.io/k8s-dns-kube-dns-amd64:1.15.4
- k8s.gcr.io/k8s-dns-kube-dns-amd64:1.15.0
- k8s.gcr.io/k8s-dns-kube-dns-amd64:1.14.13
- k8s.gcr.io/k8s-dns-kube-dns-amd64:1.14.5
- k8s.gcr.io/kube-addon-manager-amd64:v9.0.1
- k8s.gcr.io/kube-addon-manager-amd64:v9.0
- k8s.gcr.io/kube-addon-manager-amd64:v8.9.1
- k8s.gcr.io/kube-addon-manager-amd64:v8.9
- k8s.gcr.io/kube-addon-manager-amd64:v8.8
- k8s.gcr.io/kube-addon-manager-amd64:v8.7
- k8s.gcr.io/kube-addon-manager-amd64:v8.6
- k8s.gcr.io/k8s-dns-dnsmasq-nanny-amd64:1.15.4
- k8s.gcr.io/k8s-dns-dnsmasq-nanny-amd64:1.15.0
- k8s.gcr.io/k8s-dns-dnsmasq-nanny-amd64:1.14.10
- k8s.gcr.io/k8s-dns-dnsmasq-nanny-amd64:1.14.8
- k8s.gcr.io/k8s-dns-dnsmasq-nanny-amd64:1.14.5
- k8s.gcr.io/pause-amd64:3.1
- mcr.microsoft.com/k8s/azurestack/core/pause-amd64:3.1
- gcr.io/kubernetes-helm/tiller:v2.11.0
- gcr.io/kubernetes-helm/tiller:v2.8.1
- k8s.gcr.io/cluster-autoscaler:v1.15.1
- k8s.gcr.io/cluster-autoscaler:v1.15.0
- k8s.gcr.io/cluster-autoscaler:v1.14.4
- k8s.gcr.io/cluster-autoscaler:v1.14.2
- k8s.gcr.io/cluster-autoscaler:v1.14.0
- k8s.gcr.io/cluster-autoscaler:v1.13.6
- k8s.gcr.io/cluster-autoscaler:v1.13.4
- k8s.gcr.io/cluster-autoscaler:v1.13.2
- k8s.gcr.io/cluster-autoscaler:v1.13.1
- k8s.gcr.io/cluster-autoscaler:v1.12.7
- k8s.gcr.io/cluster-autoscaler:v1.12.5
- k8s.gcr.io/cluster-autoscaler:v1.12.3
- k8s.gcr.io/cluster-autoscaler:v1.12.2
- k8s.gcr.io/cluster-autoscaler:v1.3.9
- k8s.gcr.io/cluster-autoscaler:v1.3.8
- k8s.gcr.io/cluster-autoscaler:v1.3.7
- k8s.gcr.io/cluster-autoscaler:v1.3.4
- k8s.gcr.io/cluster-autoscaler:v1.3.3
- k8s.gcr.io/cluster-autoscaler:v1.2.5
- k8s.gcr.io/cluster-autoscaler:v1.2.2
- k8s.gcr.io/k8s-dns-sidecar-amd64:1.14.10
- k8s.gcr.io/k8s-dns-sidecar-amd64:1.14.8
- k8s.gcr.io/coredns:1.5.0
- k8s.gcr.io/coredns:1.3.1
- k8s.gcr.io/coredns:1.2.6
- k8s.gcr.io/coredns:1.2.2
- k8s.gcr.io/rescheduler:v0.4.0
- k8s.gcr.io/rescheduler:v0.3.1
- microsoft/virtual-kubelet:latest
- mcr.microsoft.com/containernetworking/networkmonitor:v0.0.6
- mcr.microsoft.com/containernetworking/networkmonitor:v0.0.5
- mcr.microsoft.com/containernetworking/azure-npm:v1.0.18
- nvidia/k8s-device-plugin:1.11
- nvidia/k8s-device-plugin:1.10
- docker.io/deis/hcp-tunnel-front:v1.9.2-v4.0.4
- docker.io/deis/kube-svc-redirect:v1.0.2
- mcr.microsoft.com/k8s/flexvolume/keyvault-flexvolume:v0.0.7
- mcr.microsoft.com/k8s/flexvolume/blobfuse-flexvolume:1.0.8
- gcr.io/google-containers/ip-masq-agent-amd64:v2.3.0
- k8s.gcr.io/ip-masq-agent-amd64:v2.3.0
- gcr.io/google-containers/ip-masq-agent-amd64:v2.0.0
- k8s.gcr.io/ip-masq-agent-amd64:v2.0.0
- nginx:1.13.12-alpine
- mcr.microsoft.com/k8s/kms/keyvault:v0.0.9
- quay.io/coreos/flannel:v0.10.0-amd64
- quay.io/coreos/flannel:v0.8.0-amd64
- busybox
- k8s.gcr.io/cloud-controller-manager-amd64:v1.15.1
- k8s.gcr.io/hyperkube-amd64:v1.15.1
- mcr.microsoft.com/k8s/azurestack/core/hyperkube-amd64:v1.15.1-azs
- k8s.gcr.io/cloud-controller-manager-amd64:v1.15.0
- k8s.gcr.io/hyperkube-amd64:v1.15.0
- k8s.gcr.io/cloud-controller-manager-amd64:v1.14.4
- k8s.gcr.io/hyperkube-amd64:v1.14.4
- mcr.microsoft.com/k8s/azurestack/core/hyperkube-amd64:v1.14.4-azs
- k8s.gcr.io/cloud-controller-manager-amd64:v1.14.3
- k8s.gcr.io/hyperkube-amd64:v1.14.3
- mcr.microsoft.com/k8s/azurestack/core/hyperkube-amd64:v1.14.3-azs
- k8s.gcr.io/cloud-controller-manager-amd64:v1.13.8
- k8s.gcr.io/hyperkube-amd64:v1.13.8
- mcr.microsoft.com/k8s/azurestack/core/hyperkube-amd64:v1.13.8-azs
- k8s.gcr.io/cloud-controller-manager-amd64:v1.13.7
- k8s.gcr.io/hyperkube-amd64:v1.13.7
- mcr.microsoft.com/k8s/azurestack/core/hyperkube-amd64:v1.13.7-azs
- k8s.gcr.io/cloud-controller-manager-amd64:v1.12.8
- k8s.gcr.io/hyperkube-amd64:v1.12.8
- mcr.microsoft.com/k8s/azurestack/core/hyperkube-amd64:v1.12.8-azs
- k8s.gcr.io/cloud-controller-manager-amd64:v1.12.7
- k8s.gcr.io/hyperkube-amd64:v1.12.7
- mcr.microsoft.com/k8s/azurestack/core/hyperkube-amd64:v1.12.7-azs
- k8s.gcr.io/cloud-controller-manager-amd64:v1.11.10
- k8s.gcr.io/hyperkube-amd64:v1.11.10
- mcr.microsoft.com/k8s/azurestack/core/hyperkube-amd64:v1.11.10-azs
- k8s.gcr.io/cloud-controller-manager-amd64:v1.11.9
- k8s.gcr.io/hyperkube-amd64:v1.11.9
- mcr.microsoft.com/k8s/azurestack/core/hyperkube-amd64:v1.11.9-azs
- k8s.gcr.io/cloud-controller-manager-amd64:v1.10.13
- k8s.gcr.io/hyperkube-amd64:v1.10.13
- k8s.gcr.io/cloud-controller-manager-amd64:v1.10.12
- k8s.gcr.io/hyperkube-amd64:v1.10.12
- registry:2.7.1
WARNING: 75% of /dev/sda1 is used
Install completed successfully on Tue Jul 30 22:55:19 UTC 2019
VSTS Build NUMBER: 20190730.3
VSTS Build ID: 23882323
Commit: 3c5cc0f1d609315b37eeae15aba93d3a9f35cdab
Feature flags:
| {
"pile_set_name": "Github"
} |
{
"scriptFile": "main.py",
"bindings": [
{
"type": "httpTrigger",
"direction": "in",
"name": "req"
},
{
"direction": "out",
"name": "msg",
"queueName": "testqueue",
"connection": "AzureWebJobsStorage",
"type": "queue"
},
{
"direction": "out",
"name": "$return",
"type": "http"
}
]
}
| {
"pile_set_name": "Github"
} |
/* Generated by RuntimeBrowser.
*/
@protocol AMSMetricsBagContract <AMSURLBagContract>
@optional
- (AMSBagValue *)metricsDictionary;
- (AMSBagValue *)metricsURL;
- (AMSBagValue *)metricsUrl;
@end
| {
"pile_set_name": "Github"
} |
/*=============================================================================
Copyright (c) 2001-2011 Joel de Guzman
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
==============================================================================*/
#if !defined(FUSION_SEQUENCE_GENERATION_10022005_0615)
#define FUSION_SEQUENCE_GENERATION_10022005_0615
#include <boost/fusion/support/config.hpp>
#include <boost/fusion/container/generation/make_deque.hpp>
#include <boost/fusion/container/generation/deque_tie.hpp>
#include <boost/fusion/container/generation/cons_tie.hpp>
#include <boost/fusion/container/generation/ignore.hpp>
#include <boost/fusion/container/generation/list_tie.hpp>
#include <boost/fusion/container/generation/make_cons.hpp>
#include <boost/fusion/container/generation/make_list.hpp>
#include <boost/fusion/container/generation/make_map.hpp>
#include <boost/fusion/container/generation/map_tie.hpp>
#include <boost/fusion/container/generation/make_vector.hpp>
#include <boost/fusion/container/generation/vector_tie.hpp>
#include <boost/fusion/container/generation/make_set.hpp>
#endif
| {
"pile_set_name": "Github"
} |
package Archive::Zip::ZipFileMember;
use strict;
use vars qw( $VERSION @ISA );
BEGIN {
$VERSION = '1.29';
@ISA = qw ( Archive::Zip::FileMember );
}
use Archive::Zip qw(
:CONSTANTS
:ERROR_CODES
:PKZIP_CONSTANTS
:UTILITY_METHODS
);
# Create a new Archive::Zip::ZipFileMember
# given a filename and optional open file handle
#
sub _newFromZipFile {
my $class = shift;
my $fh = shift;
my $externalFileName = shift;
my $possibleEocdOffset = shift; # normally 0
my $self = $class->new(
'crc32' => 0,
'diskNumberStart' => 0,
'localHeaderRelativeOffset' => 0,
'dataOffset' => 0, # localHeaderRelativeOffset + header length
@_
);
$self->{'externalFileName'} = $externalFileName;
$self->{'fh'} = $fh;
$self->{'possibleEocdOffset'} = $possibleEocdOffset;
return $self;
}
sub isDirectory {
my $self = shift;
return (
substr( $self->fileName, -1, 1 ) eq '/'
and
$self->uncompressedSize == 0
);
}
# Seek to the beginning of the local header, just past the signature.
# Verify that the local header signature is in fact correct.
# Update the localHeaderRelativeOffset if necessary by adding the possibleEocdOffset.
# Returns status.
sub _seekToLocalHeader {
my $self = shift;
my $where = shift; # optional
my $previousWhere = shift; # optional
$where = $self->localHeaderRelativeOffset() unless defined($where);
# avoid loop on certain corrupt files (from Julian Field)
return _formatError("corrupt zip file")
if defined($previousWhere) && $where == $previousWhere;
my $status;
my $signature;
$status = $self->fh()->seek( $where, IO::Seekable::SEEK_SET );
return _ioError("seeking to local header") unless $status;
( $status, $signature ) =
_readSignature( $self->fh(), $self->externalFileName(),
LOCAL_FILE_HEADER_SIGNATURE );
return $status if $status == AZ_IO_ERROR;
# retry with EOCD offset if any was given.
if ( $status == AZ_FORMAT_ERROR && $self->{'possibleEocdOffset'} ) {
$status = $self->_seekToLocalHeader(
$self->localHeaderRelativeOffset() + $self->{'possibleEocdOffset'},
$where
);
if ( $status == AZ_OK ) {
$self->{'localHeaderRelativeOffset'} +=
$self->{'possibleEocdOffset'};
$self->{'possibleEocdOffset'} = 0;
}
}
return $status;
}
# Because I'm going to delete the file handle, read the local file
# header if the file handle is seekable. If it isn't, I assume that
# I've already read the local header.
# Return ( $status, $self )
sub _become {
my $self = shift;
my $newClass = shift;
return $self if ref($self) eq $newClass;
my $status = AZ_OK;
if ( _isSeekable( $self->fh() ) ) {
my $here = $self->fh()->tell();
$status = $self->_seekToLocalHeader();
$status = $self->_readLocalFileHeader() if $status == AZ_OK;
$self->fh()->seek( $here, IO::Seekable::SEEK_SET );
return $status unless $status == AZ_OK;
}
delete( $self->{'eocdCrc32'} );
delete( $self->{'diskNumberStart'} );
delete( $self->{'localHeaderRelativeOffset'} );
delete( $self->{'dataOffset'} );
return $self->SUPER::_become($newClass);
}
sub diskNumberStart {
shift->{'diskNumberStart'};
}
sub localHeaderRelativeOffset {
shift->{'localHeaderRelativeOffset'};
}
sub dataOffset {
shift->{'dataOffset'};
}
# Skip local file header, updating only extra field stuff.
# Assumes that fh is positioned before signature.
sub _skipLocalFileHeader {
my $self = shift;
my $header;
my $bytesRead = $self->fh()->read( $header, LOCAL_FILE_HEADER_LENGTH );
if ( $bytesRead != LOCAL_FILE_HEADER_LENGTH ) {
return _ioError("reading local file header");
}
my $fileNameLength;
my $extraFieldLength;
my $bitFlag;
(
undef, # $self->{'versionNeededToExtract'},
$bitFlag,
undef, # $self->{'compressionMethod'},
undef, # $self->{'lastModFileDateTime'},
undef, # $crc32,
undef, # $compressedSize,
undef, # $uncompressedSize,
$fileNameLength,
$extraFieldLength
) = unpack( LOCAL_FILE_HEADER_FORMAT, $header );
if ($fileNameLength) {
$self->fh()->seek( $fileNameLength, IO::Seekable::SEEK_CUR )
or return _ioError("skipping local file name");
}
if ($extraFieldLength) {
$bytesRead =
$self->fh()->read( $self->{'localExtraField'}, $extraFieldLength );
if ( $bytesRead != $extraFieldLength ) {
return _ioError("reading local extra field");
}
}
$self->{'dataOffset'} = $self->fh()->tell();
if ( $bitFlag & GPBF_HAS_DATA_DESCRIPTOR_MASK ) {
# Read the crc32, compressedSize, and uncompressedSize from the
# extended data descriptor, which directly follows the compressed data.
#
# Skip over the compressed file data (assumes that EOCD compressedSize
# was correct)
$self->fh()->seek( $self->{'compressedSize'}, IO::Seekable::SEEK_CUR )
or return _ioError("seeking to extended local header");
# these values should be set correctly from before.
my $oldCrc32 = $self->{'eocdCrc32'};
my $oldCompressedSize = $self->{'compressedSize'};
my $oldUncompressedSize = $self->{'uncompressedSize'};
my $status = $self->_readDataDescriptor();
return $status unless $status == AZ_OK;
return _formatError(
"CRC or size mismatch while skipping data descriptor")
if ( $oldCrc32 != $self->{'crc32'}
|| $oldUncompressedSize != $self->{'uncompressedSize'} );
}
return AZ_OK;
}
# Read from a local file header into myself. Returns AZ_OK if successful.
# Assumes that fh is positioned after signature.
# Note that crc32, compressedSize, and uncompressedSize will be 0 if
# GPBF_HAS_DATA_DESCRIPTOR_MASK is set in the bitFlag.
sub _readLocalFileHeader {
my $self = shift;
my $header;
my $bytesRead = $self->fh()->read( $header, LOCAL_FILE_HEADER_LENGTH );
if ( $bytesRead != LOCAL_FILE_HEADER_LENGTH ) {
return _ioError("reading local file header");
}
my $fileNameLength;
my $crc32;
my $compressedSize;
my $uncompressedSize;
my $extraFieldLength;
(
$self->{'versionNeededToExtract'}, $self->{'bitFlag'},
$self->{'compressionMethod'}, $self->{'lastModFileDateTime'},
$crc32, $compressedSize,
$uncompressedSize, $fileNameLength,
$extraFieldLength
) = unpack( LOCAL_FILE_HEADER_FORMAT, $header );
if ($fileNameLength) {
my $fileName;
$bytesRead = $self->fh()->read( $fileName, $fileNameLength );
if ( $bytesRead != $fileNameLength ) {
return _ioError("reading local file name");
}
$self->fileName($fileName);
}
if ($extraFieldLength) {
$bytesRead =
$self->fh()->read( $self->{'localExtraField'}, $extraFieldLength );
if ( $bytesRead != $extraFieldLength ) {
return _ioError("reading local extra field");
}
}
$self->{'dataOffset'} = $self->fh()->tell();
if ( $self->hasDataDescriptor() ) {
# Read the crc32, compressedSize, and uncompressedSize from the
# extended data descriptor.
# Skip over the compressed file data (assumes that EOCD compressedSize
# was correct)
$self->fh()->seek( $self->{'compressedSize'}, IO::Seekable::SEEK_CUR )
or return _ioError("seeking to extended local header");
my $status = $self->_readDataDescriptor();
return $status unless $status == AZ_OK;
}
else {
return _formatError(
"CRC or size mismatch after reading data descriptor")
if ( $self->{'crc32'} != $crc32
|| $self->{'uncompressedSize'} != $uncompressedSize );
}
return AZ_OK;
}
# This will read the data descriptor, which is after the end of compressed file
# data in members that that have GPBF_HAS_DATA_DESCRIPTOR_MASK set in their
# bitFlag.
# The only reliable way to find these is to rely on the EOCD compressedSize.
# Assumes that file is positioned immediately after the compressed data.
# Returns status; sets crc32, compressedSize, and uncompressedSize.
sub _readDataDescriptor {
my $self = shift;
my $signatureData;
my $header;
my $crc32;
my $compressedSize;
my $uncompressedSize;
my $bytesRead = $self->fh()->read( $signatureData, SIGNATURE_LENGTH );
return _ioError("reading header signature")
if $bytesRead != SIGNATURE_LENGTH;
my $signature = unpack( SIGNATURE_FORMAT, $signatureData );
# unfortunately, the signature appears to be optional.
if ( $signature == DATA_DESCRIPTOR_SIGNATURE
&& ( $signature != $self->{'crc32'} ) )
{
$bytesRead = $self->fh()->read( $header, DATA_DESCRIPTOR_LENGTH );
return _ioError("reading data descriptor")
if $bytesRead != DATA_DESCRIPTOR_LENGTH;
( $crc32, $compressedSize, $uncompressedSize ) =
unpack( DATA_DESCRIPTOR_FORMAT, $header );
}
else {
$bytesRead =
$self->fh()->read( $header, DATA_DESCRIPTOR_LENGTH_NO_SIG );
return _ioError("reading data descriptor")
if $bytesRead != DATA_DESCRIPTOR_LENGTH_NO_SIG;
$crc32 = $signature;
( $compressedSize, $uncompressedSize ) =
unpack( DATA_DESCRIPTOR_FORMAT_NO_SIG, $header );
}
$self->{'eocdCrc32'} = $self->{'crc32'}
unless defined( $self->{'eocdCrc32'} );
$self->{'crc32'} = $crc32;
$self->{'compressedSize'} = $compressedSize;
$self->{'uncompressedSize'} = $uncompressedSize;
return AZ_OK;
}
# Read a Central Directory header. Return AZ_OK on success.
# Assumes that fh is positioned right after the signature.
sub _readCentralDirectoryFileHeader {
my $self = shift;
my $fh = $self->fh();
my $header = '';
my $bytesRead = $fh->read( $header, CENTRAL_DIRECTORY_FILE_HEADER_LENGTH );
if ( $bytesRead != CENTRAL_DIRECTORY_FILE_HEADER_LENGTH ) {
return _ioError("reading central dir header");
}
my ( $fileNameLength, $extraFieldLength, $fileCommentLength );
(
$self->{'versionMadeBy'},
$self->{'fileAttributeFormat'},
$self->{'versionNeededToExtract'},
$self->{'bitFlag'},
$self->{'compressionMethod'},
$self->{'lastModFileDateTime'},
$self->{'crc32'},
$self->{'compressedSize'},
$self->{'uncompressedSize'},
$fileNameLength,
$extraFieldLength,
$fileCommentLength,
$self->{'diskNumberStart'},
$self->{'internalFileAttributes'},
$self->{'externalFileAttributes'},
$self->{'localHeaderRelativeOffset'}
) = unpack( CENTRAL_DIRECTORY_FILE_HEADER_FORMAT, $header );
$self->{'eocdCrc32'} = $self->{'crc32'};
if ($fileNameLength) {
$bytesRead = $fh->read( $self->{'fileName'}, $fileNameLength );
if ( $bytesRead != $fileNameLength ) {
_ioError("reading central dir filename");
}
}
if ($extraFieldLength) {
$bytesRead = $fh->read( $self->{'cdExtraField'}, $extraFieldLength );
if ( $bytesRead != $extraFieldLength ) {
return _ioError("reading central dir extra field");
}
}
if ($fileCommentLength) {
$bytesRead = $fh->read( $self->{'fileComment'}, $fileCommentLength );
if ( $bytesRead != $fileCommentLength ) {
return _ioError("reading central dir file comment");
}
}
# NK 10/21/04: added to avoid problems with manipulated headers
if ( $self->{'uncompressedSize'} != $self->{'compressedSize'}
and $self->{'compressionMethod'} == COMPRESSION_STORED )
{
$self->{'uncompressedSize'} = $self->{'compressedSize'};
}
$self->desiredCompressionMethod( $self->compressionMethod() );
return AZ_OK;
}
sub rewindData {
my $self = shift;
my $status = $self->SUPER::rewindData(@_);
return $status unless $status == AZ_OK;
return AZ_IO_ERROR unless $self->fh();
$self->fh()->clearerr();
# Seek to local file header.
# The only reason that I'm doing this this way is that the extraField
# length seems to be different between the CD header and the LF header.
$status = $self->_seekToLocalHeader();
return $status unless $status == AZ_OK;
# skip local file header
$status = $self->_skipLocalFileHeader();
return $status unless $status == AZ_OK;
# Seek to beginning of file data
$self->fh()->seek( $self->dataOffset(), IO::Seekable::SEEK_SET )
or return _ioError("seeking to beginning of file data");
return AZ_OK;
}
# Return bytes read. Note that first parameter is a ref to a buffer.
# my $data;
# my ( $bytesRead, $status) = $self->readRawChunk( \$data, $chunkSize );
sub _readRawChunk {
my ( $self, $dataRef, $chunkSize ) = @_;
return ( 0, AZ_OK ) unless $chunkSize;
my $bytesRead = $self->fh()->read( $$dataRef, $chunkSize )
or return ( 0, _ioError("reading data") );
return ( $bytesRead, AZ_OK );
}
1;
| {
"pile_set_name": "Github"
} |
string
byte array
| {
"pile_set_name": "Github"
} |
import { _n } from '@wordpress/i18n';
/**
* Get post type label.
*
* @param {{key: string, count: number}} typeData Type data.
*
* @return {string} Translated label.
*/
const getPostTypeLabel = ( { key, count } ) => {
return {
course: _n( 'course', 'courses', count, 'sensei-lms' ),
lesson: _n( 'lesson', 'lessons', count, 'sensei-lms' ),
question: _n( 'question', 'questions', count, 'sensei-lms' ),
}[ key ];
};
/**
* ImportSuccessResults component.
*
* @param {Object} input ImportSuccessResults input.
* @param {Array} input.successResults An array of counts of successfully imported items.
*/
const ImportSuccessResults = ( { successResults } ) => (
<ul className="sensei-import-bullet-list">
{ successResults.map( ( { key, count } ) => (
<li key={ key }>
{ count }{ ' ' }
{ getPostTypeLabel( {
key,
count,
} ) }
</li>
) ) }
</ul>
);
export default ImportSuccessResults;
| {
"pile_set_name": "Github"
} |
context("Error message")
# This test fails with the CRAN custom clang toolchains.
# The test is automatically removed by the autobrew script
test_that("SyntaxError from V8", {
ctx <- V8::v8()
if(getRversion() < 4)
skip_on_os("mac")
expect_error(ctx$eval('var foo = }bla}'), 'SyntaxError', class = "std::invalid_argument")
})
| {
"pile_set_name": "Github"
} |
// Copyright 1998-2017 Epic Games, Inc. All Rights Reserved.
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("SwarmCommonUtils")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("SwarmCommonUtils")]
[assembly: AssemblyCopyright("Copyright © 2014")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("67079ec4-8039-4160-ac89-e5da5582dc8c")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| {
"pile_set_name": "Github"
} |
package com.fasterxml.jackson.databind.deser.creators;
import com.fasterxml.jackson.annotation.*;
import com.fasterxml.jackson.databind.*;
/**
* Unit tests for verifying that it is possible to annotate
* various kinds of things with {@link JsonCreator} annotation.
*/
public class TestPolymorphicCreators
extends BaseMapTest
{
static class Animal
{
// All animals have names, for our demo purposes...
public String name;
protected Animal() { }
/**
* Creator method that can instantiate instances of
* appropriate polymoprphic type
*/
@JsonCreator
public static Animal create(@JsonProperty("type") String type)
{
if ("dog".equals(type)) {
return new Dog();
}
if ("cat".equals(type)) {
return new Cat();
}
throw new IllegalArgumentException("No such animal type ('"+type+"')");
}
}
static class Dog extends Animal
{
double barkVolume; // in decibels
public Dog() { }
public void setBarkVolume(double v) { barkVolume = v; }
}
static class Cat extends Animal
{
boolean likesCream;
public int lives;
public Cat() { }
public void setLikesCream(boolean likesCreamSurely) { likesCream = likesCreamSurely; }
}
abstract static class AbstractRoot
{
protected final String opt;
protected AbstractRoot(String opt) {
this.opt = opt;
}
@JsonCreator
public static final AbstractRoot make(@JsonProperty("which") int which,
@JsonProperty("opt") String opt) {
if (1 == which) {
return new One(opt);
}
throw new RuntimeException("cannot instantiate " + which);
}
abstract public int getWhich();
public final String getOpt() {
return opt;
}
}
static final class One extends AbstractRoot {
protected One(String opt) {
super(opt);
}
@Override public int getWhich() {
return 1;
}
}
/*
/**********************************************************
/* Actual tests
/**********************************************************
*/
private final ObjectMapper MAPPER = new ObjectMapper();
/**
* Simple test to verify that it is possible to implement polymorphic
* deserialization manually.
*/
public void testManualPolymorphicDog() throws Exception
{
// first, a dog, start with type
Animal animal = MAPPER.readValue("{ \"type\":\"dog\", \"name\":\"Fido\", \"barkVolume\" : 95.0 }", Animal.class);
assertEquals(Dog.class, animal.getClass());
assertEquals("Fido", animal.name);
assertEquals(95.0, ((Dog) animal).barkVolume);
}
public void testManualPolymorphicCatBasic() throws Exception
{
// and finally, lactose-intolerant, but otherwise robust super-cat:
Animal animal = MAPPER.readValue("{ \"name\" : \"Macavity\", \"type\":\"cat\", \"lives\":18, \"likesCream\":false }", Animal.class);
assertEquals(Cat.class, animal.getClass());
assertEquals("Macavity", animal.name); // ... there's no one like Macavity!
Cat cat = (Cat) animal;
assertEquals(18, cat.lives);
// ok, he can't drink dairy products. Let's verify:
assertEquals(false, cat.likesCream);
}
public void testManualPolymorphicCatWithReorder() throws Exception
{
// Then cat; shuffle order to mandate buffering
Animal animal = MAPPER.readValue("{ \"likesCream\":true, \"name\" : \"Venla\", \"type\":\"cat\" }", Animal.class);
assertEquals(Cat.class, animal.getClass());
assertEquals("Venla", animal.name);
// bah, of course cats like cream. But let's ensure Jackson won't mess with laws of nature!
assertTrue(((Cat) animal).likesCream);
}
public void testManualPolymorphicWithNumbered() throws Exception
{
final ObjectWriter w = MAPPER.writerFor(AbstractRoot.class);
final ObjectReader r = MAPPER.readerFor(AbstractRoot.class);
AbstractRoot input = AbstractRoot.make(1, "oh hai!");
String json = w.writeValueAsString(input);
AbstractRoot result = r.readValue(json);
assertNotNull(result);
assertEquals("oh hai!", result.getOpt());
}
}
| {
"pile_set_name": "Github"
} |
<?php defined('SYSPATH') OR die('No direct access allowed.');
class OAuth2 extends Kohana_OAuth2 {} | {
"pile_set_name": "Github"
} |
/*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.impl.cmmn.model;
import java.io.Serializable;
/**
*
* @author Deivarayan Azhagappan
*
*/
public class CmmnVariableOnPartDeclaration implements Serializable {
private static final long serialVersionUID = 1L;
protected String variableEvent;
protected String variableName;
public String getVariableEvent() {
return variableEvent;
}
public void setVariableEvent(String variableEvent) {
this.variableEvent = variableEvent;
}
public String getVariableName() {
return variableName;
}
public void setVariableName(String variableName) {
this.variableName = variableName;
}
}
| {
"pile_set_name": "Github"
} |
<?php
// +----------------------------------------------------------------------
// | ThinkPHP [ WE CAN DO IT JUST THINK IT ]
// +----------------------------------------------------------------------
// | Copyright (c) 2006-2012 http://thinkphp.cn All rights reserved.
// +----------------------------------------------------------------------
// | Licensed ( http://www.apache.org/licenses/LICENSE-2.0 )
// +----------------------------------------------------------------------
// | Author: liu21st <[email protected]>
// +----------------------------------------------------------------------
namespace Behavior;
/**
* 语言检测 并自动加载语言包
*/
class CheckLangBehavior {
// 行为扩展的执行入口必须是run
public function run(&$params){
// 检测语言
$this->checkLanguage();
}
/**
* 语言检查
* 检查浏览器支持语言,并自动加载语言包
* @access private
* @return void
*/
private function checkLanguage() {
// 不开启语言包功能,仅仅加载框架语言文件直接返回
if (!C('LANG_SWITCH_ON',null,false)){
return;
}
$langSet = C('DEFAULT_LANG');
$varLang = C('VAR_LANGUAGE',null,'l');
$langList = C('LANG_LIST',null,'zh-cn');
// 启用了语言包功能
// 根据是否启用自动侦测设置获取语言选择
if (C('LANG_AUTO_DETECT',null,true)){
if(isset($_GET[$varLang])){
$langSet = $_GET[$varLang];// url中设置了语言变量
cookie('think_language',$langSet,3600);
}elseif(cookie('think_language')){// 获取上次用户的选择
$langSet = cookie('think_language');
}elseif(isset($_SERVER['HTTP_ACCEPT_LANGUAGE'])){// 自动侦测浏览器语言
preg_match('/^([a-z\d\-]+)/i', $_SERVER['HTTP_ACCEPT_LANGUAGE'], $matches);
$langSet = $matches[1];
cookie('think_language',$langSet,3600);
}
if(false === stripos($langList,$langSet)) { // 非法语言参数
$langSet = C('DEFAULT_LANG');
}
}
// 定义当前语言
define('LANG_SET',strtolower($langSet));
// 读取框架语言包
$file = THINK_PATH.'Lang/'.LANG_SET.'.php';
if(LANG_SET != C('DEFAULT_LANG') && is_file($file))
L(include $file);
// 读取应用公共语言包
$file = LANG_PATH.LANG_SET.'.php';
if(is_file($file))
L(include $file);
// 读取模块语言包
$file = MODULE_PATH.'Lang/'.LANG_SET.'.php';
if(is_file($file))
L(include $file);
// 读取当前控制器语言包
$file = MODULE_PATH.'Lang/'.LANG_SET.'/'.strtolower(CONTROLLER_NAME).'.php';
if (is_file($file))
L(include $file);
}
}
| {
"pile_set_name": "Github"
} |
import * as React from 'react';
import { mount } from 'enzyme';
import * as sinon from 'sinon';
import { Toggle } from './Toggle';
import { IToggleProps } from './Toggle.types';
describe('ToggleState', () => {
it('can call the callback on a change of toggle', () => {
let isToggledValue;
const callback = (ev: React.MouseEvent<HTMLElement>, isToggled: boolean) => {
isToggledValue = isToggled;
};
const component = mount<React.ReactInstance>(<Toggle label="Label" onChange={callback} />);
expect(
component
.find('button')
.first()
.getDOMNode()
.getAttribute('aria-checked'),
).toEqual('false');
component
.find('button')
.first()
.simulate('click');
expect(isToggledValue).toEqual(true);
expect(
component
.find('button')
.first()
.getDOMNode()
.getAttribute('aria-checked'),
).toEqual('true');
});
it(`doesn't update the state if the user provides checked`, () => {
const component = mount(<Toggle label="Label" checked={false} />);
expect(
component
.find('button')
.first()
.getDOMNode()
.getAttribute('aria-checked'),
).toEqual('false');
component
.find('button')
.first()
.simulate('click');
expect(
component
.update()
.find('button')
.first()
.getDOMNode()
.getAttribute('aria-checked'),
).toEqual('false');
});
it(`doesn't render a label element if none is provided`, () => {
const component = mount(<Toggle checked={false} />);
expect(component.find('label').length).toEqual(0);
});
it(`doesn't trigger onSubmit when placed inside a form`, () => {
let checked: boolean | undefined;
const onSubmit = sinon.spy();
const onChange: IToggleProps['onChange'] = (ev, toggled) => {
checked = toggled;
};
const wrapper = mount(
<form
action="#"
onSubmit={e => {
onSubmit();
e.preventDefault();
}}
>
<Toggle label="Label" onChange={onChange} />
</form>,
);
const button: any = wrapper.find('button');
// simulate to change toggle state
button.simulate('click');
expect(checked).toEqual(true);
expect(onSubmit.called).toEqual(false);
});
});
| {
"pile_set_name": "Github"
} |
/*
* SPDX-License-Identifier: MIT
*
* Copyright © 2018 Intel Corporation
*
* Autogenerated file by GPU Top : https://github.com/rib/gputop
* DO NOT EDIT manually!
*/
#include <linux/sysfs.h>
#include "i915_drv.h"
#include "i915_oa_glk.h"
static const struct i915_oa_reg b_counter_config_test_oa[] = {
{ _MMIO(0x2740), 0x00000000 },
{ _MMIO(0x2744), 0x00800000 },
{ _MMIO(0x2714), 0xf0800000 },
{ _MMIO(0x2710), 0x00000000 },
{ _MMIO(0x2724), 0xf0800000 },
{ _MMIO(0x2720), 0x00000000 },
{ _MMIO(0x2770), 0x00000004 },
{ _MMIO(0x2774), 0x00000000 },
{ _MMIO(0x2778), 0x00000003 },
{ _MMIO(0x277c), 0x00000000 },
{ _MMIO(0x2780), 0x00000007 },
{ _MMIO(0x2784), 0x00000000 },
{ _MMIO(0x2788), 0x00100002 },
{ _MMIO(0x278c), 0x0000fff7 },
{ _MMIO(0x2790), 0x00100002 },
{ _MMIO(0x2794), 0x0000ffcf },
{ _MMIO(0x2798), 0x00100082 },
{ _MMIO(0x279c), 0x0000ffef },
{ _MMIO(0x27a0), 0x001000c2 },
{ _MMIO(0x27a4), 0x0000ffe7 },
{ _MMIO(0x27a8), 0x00100001 },
{ _MMIO(0x27ac), 0x0000ffe7 },
};
static const struct i915_oa_reg flex_eu_config_test_oa[] = {
};
static const struct i915_oa_reg mux_config_test_oa[] = {
{ _MMIO(0x9840), 0x00000080 },
{ _MMIO(0x9888), 0x19800000 },
{ _MMIO(0x9888), 0x07800063 },
{ _MMIO(0x9888), 0x11800000 },
{ _MMIO(0x9888), 0x23810008 },
{ _MMIO(0x9888), 0x1d950400 },
{ _MMIO(0x9888), 0x0f922000 },
{ _MMIO(0x9888), 0x1f908000 },
{ _MMIO(0x9888), 0x37900000 },
{ _MMIO(0x9888), 0x55900000 },
{ _MMIO(0x9888), 0x47900000 },
{ _MMIO(0x9888), 0x33900000 },
};
static ssize_t
show_test_oa_id(struct device *kdev, struct device_attribute *attr, char *buf)
{
return sprintf(buf, "1\n");
}
void
i915_perf_load_test_config_glk(struct drm_i915_private *dev_priv)
{
strlcpy(dev_priv->perf.oa.test_config.uuid,
"dd3fd789-e783-4204-8cd0-b671bbccb0cf",
sizeof(dev_priv->perf.oa.test_config.uuid));
dev_priv->perf.oa.test_config.id = 1;
dev_priv->perf.oa.test_config.mux_regs = mux_config_test_oa;
dev_priv->perf.oa.test_config.mux_regs_len = ARRAY_SIZE(mux_config_test_oa);
dev_priv->perf.oa.test_config.b_counter_regs = b_counter_config_test_oa;
dev_priv->perf.oa.test_config.b_counter_regs_len = ARRAY_SIZE(b_counter_config_test_oa);
dev_priv->perf.oa.test_config.flex_regs = flex_eu_config_test_oa;
dev_priv->perf.oa.test_config.flex_regs_len = ARRAY_SIZE(flex_eu_config_test_oa);
dev_priv->perf.oa.test_config.sysfs_metric.name = "dd3fd789-e783-4204-8cd0-b671bbccb0cf";
dev_priv->perf.oa.test_config.sysfs_metric.attrs = dev_priv->perf.oa.test_config.attrs;
dev_priv->perf.oa.test_config.attrs[0] = &dev_priv->perf.oa.test_config.sysfs_metric_id.attr;
dev_priv->perf.oa.test_config.sysfs_metric_id.attr.name = "id";
dev_priv->perf.oa.test_config.sysfs_metric_id.attr.mode = 0444;
dev_priv->perf.oa.test_config.sysfs_metric_id.show = show_test_oa_id;
}
| {
"pile_set_name": "Github"
} |
# These are supported funding model platforms
github: robiso
| {
"pile_set_name": "Github"
} |
ol#qunit-tests {
font-family:"Helvetica Neue Light", "HelveticaNeue-Light", "Helvetica Neue", Calibri, Helvetica, Arial;
margin:0;
padding:0;
list-style-position:inside;
font-size: smaller;
}
ol#qunit-tests li{
padding:0.4em 0.5em 0.4em 2.5em;
border-bottom:1px solid #fff;
font-size:small;
list-style-position:inside;
}
ol#qunit-tests li ol{
box-shadow: inset 0px 2px 13px #999;
-moz-box-shadow: inset 0px 2px 13px #999;
-webkit-box-shadow: inset 0px 2px 13px #999;
margin-top:0.5em;
margin-left:0;
padding:0.5em;
background-color:#fff;
border-radius:15px;
-moz-border-radius: 15px;
-webkit-border-radius: 15px;
}
ol#qunit-tests li li{
border-bottom:none;
margin:0.5em;
background-color:#fff;
list-style-position: inside;
padding:0.4em 0.5em 0.4em 0.5em;
}
ol#qunit-tests li li.pass{
border-left:26px solid #C6E746;
background-color:#fff;
color:#5E740B;
}
ol#qunit-tests li li.fail{
border-left:26px solid #EE5757;
background-color:#fff;
color:#710909;
}
ol#qunit-tests li.pass{
background-color:#D2E0E6;
color:#528CE0;
}
ol#qunit-tests li.fail{
background-color:#EE5757;
color:#000;
}
ol#qunit-tests li strong {
cursor:pointer;
}
h1#qunit-header{
background-color:#0d3349;
margin:0;
padding:0.5em 0 0.5em 1em;
color:#fff;
font-family:"Helvetica Neue Light", "HelveticaNeue-Light", "Helvetica Neue", Calibri, Helvetica, Arial;
border-top-right-radius:15px;
border-top-left-radius:15px;
-moz-border-radius-topright:15px;
-moz-border-radius-topleft:15px;
-webkit-border-top-right-radius:15px;
-webkit-border-top-left-radius:15px;
text-shadow: rgba(0, 0, 0, 0.5) 4px 4px 1px;
}
h2#qunit-banner{
font-family:"Helvetica Neue Light", "HelveticaNeue-Light", "Helvetica Neue", Calibri, Helvetica, Arial;
height:5px;
margin:0;
padding:0;
}
h2#qunit-banner.qunit-pass{
background-color:#C6E746;
}
h2#qunit-banner.qunit-fail, #qunit-testrunner-toolbar {
background-color:#EE5757;
}
#qunit-testrunner-toolbar {
font-family:"Helvetica Neue Light", "HelveticaNeue-Light", "Helvetica Neue", Calibri, Helvetica, Arial;
padding:0;
/*width:80%;*/
padding:0em 0 0.5em 2em;
font-size: small;
}
h2#qunit-userAgent {
font-family:"Helvetica Neue Light", "HelveticaNeue-Light", "Helvetica Neue", Calibri, Helvetica, Arial;
background-color:#2b81af;
margin:0;
padding:0;
color:#fff;
font-size: small;
padding:0.5em 0 0.5em 2.5em;
text-shadow: rgba(0, 0, 0, 0.5) 2px 2px 1px;
}
p#qunit-testresult{
font-family:"Helvetica Neue Light", "HelveticaNeue-Light", "Helvetica Neue", Calibri, Helvetica, Arial;
margin:0;
font-size: small;
color:#2b81af;
border-bottom-right-radius:15px;
border-bottom-left-radius:15px;
-moz-border-radius-bottomright:15px;
-moz-border-radius-bottomleft:15px;
-webkit-border-bottom-right-radius:15px;
-webkit-border-bottom-left-radius:15px;
background-color:#D2E0E6;
padding:0.5em 0.5em 0.5em 2.5em;
}
strong b.fail{
color:#710909;
}
strong b.pass{
color:#5E740B;
}
| {
"pile_set_name": "Github"
} |
<?php
/**
* Exception for 501 Not Implemented responses
*
* @package Requests
*/
/**
* Exception for 501 Not Implemented responses
*
* @package Requests
*/
class Requests_Exception_HTTP_501 extends Requests_Exception_HTTP {
/**
* HTTP status code
*
* @var integer
*/
protected $code = 501;
/**
* Reason phrase
*
* @var string
*/
protected $reason = 'Not Implemented';
} | {
"pile_set_name": "Github"
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_162) on Fri Sep 13 20:43:34 PDT 2019 -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>Uses of Interface com.ctc.wstx.cfg.InputConfigFlags (Woodstox 6.0.0 API)</title>
<meta name="date" content="2019-09-13">
<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Interface com.ctc.wstx.cfg.InputConfigFlags (Woodstox 6.0.0 API)";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../com/ctc/wstx/cfg/InputConfigFlags.html" title="interface in com.ctc.wstx.cfg">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?com/ctc/wstx/cfg/class-use/InputConfigFlags.html" target="_top">Frames</a></li>
<li><a href="InputConfigFlags.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Interface com.ctc.wstx.cfg.InputConfigFlags" class="title">Uses of Interface<br>com.ctc.wstx.cfg.InputConfigFlags</h2>
</div>
<div class="classUseContainer">
<ul class="blockList">
<li class="blockList">
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
<caption><span>Packages that use <a href="../../../../../com/ctc/wstx/cfg/InputConfigFlags.html" title="interface in com.ctc.wstx.cfg">InputConfigFlags</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Package</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="#com.ctc.wstx.api">com.ctc.wstx.api</a></td>
<td class="colLast">
<div class="block">Package that contains subset of Woodstox classes that are considered to be
its public API (in addition to regular Stax 1.0 -- javax.xml.stream.* -- and
Stax2 -- org.codehaus.stax2.*).</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="#com.ctc.wstx.dtd">com.ctc.wstx.dtd</a></td>
<td class="colLast">
<div class="block">Package that contains Woodstox classes that implement DTD handling.</div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><a href="#com.ctc.wstx.sr">com.ctc.wstx.sr</a></td>
<td class="colLast">
<div class="block">This package contains supporting code for handling namespace information;
element stacks that keep track of elements parsed and such.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="#com.ctc.wstx.stax">com.ctc.wstx.stax</a></td>
<td class="colLast">
<div class="block">This package contains miscellaneous classes that implement Woodstox.</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList">
<ul class="blockList">
<li class="blockList"><a name="com.ctc.wstx.api">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../com/ctc/wstx/cfg/InputConfigFlags.html" title="interface in com.ctc.wstx.cfg">InputConfigFlags</a> in <a href="../../../../../com/ctc/wstx/api/package-summary.html">com.ctc.wstx.api</a></h3>
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing classes, and an explanation">
<caption><span>Classes in <a href="../../../../../com/ctc/wstx/api/package-summary.html">com.ctc.wstx.api</a> that implement <a href="../../../../../com/ctc/wstx/cfg/InputConfigFlags.html" title="interface in com.ctc.wstx.cfg">InputConfigFlags</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Class and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code>class </code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../com/ctc/wstx/api/ReaderConfig.html" title="class in com.ctc.wstx.api">ReaderConfig</a></span></code>
<div class="block">Simple configuration container class; passed by reader factory to reader
instance created.</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList"><a name="com.ctc.wstx.dtd">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../com/ctc/wstx/cfg/InputConfigFlags.html" title="interface in com.ctc.wstx.cfg">InputConfigFlags</a> in <a href="../../../../../com/ctc/wstx/dtd/package-summary.html">com.ctc.wstx.dtd</a></h3>
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing classes, and an explanation">
<caption><span>Classes in <a href="../../../../../com/ctc/wstx/dtd/package-summary.html">com.ctc.wstx.dtd</a> that implement <a href="../../../../../com/ctc/wstx/cfg/InputConfigFlags.html" title="interface in com.ctc.wstx.cfg">InputConfigFlags</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Class and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code>class </code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../com/ctc/wstx/dtd/FullDTDReader.html" title="class in com.ctc.wstx.dtd">FullDTDReader</a></span></code>
<div class="block">Reader that reads in DTD information from internal or external subset.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>class </code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../com/ctc/wstx/dtd/MinimalDTDReader.html" title="class in com.ctc.wstx.dtd">MinimalDTDReader</a></span></code>
<div class="block">Minimal DTD reader implementation that only knows how to skip
internal DTD subsets.</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList"><a name="com.ctc.wstx.sr">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../com/ctc/wstx/cfg/InputConfigFlags.html" title="interface in com.ctc.wstx.cfg">InputConfigFlags</a> in <a href="../../../../../com/ctc/wstx/sr/package-summary.html">com.ctc.wstx.sr</a></h3>
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing classes, and an explanation">
<caption><span>Classes in <a href="../../../../../com/ctc/wstx/sr/package-summary.html">com.ctc.wstx.sr</a> that implement <a href="../../../../../com/ctc/wstx/cfg/InputConfigFlags.html" title="interface in com.ctc.wstx.cfg">InputConfigFlags</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Class and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code>class </code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../com/ctc/wstx/sr/BasicStreamReader.html" title="class in com.ctc.wstx.sr">BasicStreamReader</a></span></code>
<div class="block">Partial implementation of <code>XMLStreamReader2</code> consisting of
all functionality other than DTD-validation-specific parts, and
Typed Access API (Stax2 v3.0), which are implemented at
sub-classes.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>class </code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../com/ctc/wstx/sr/StreamScanner.html" title="class in com.ctc.wstx.sr">StreamScanner</a></span></code>
<div class="block">Abstract base class that defines some basic functionality that all
Woodstox reader classes (main XML reader, DTD reader) extend from.</div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>class </code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../com/ctc/wstx/sr/TypedStreamReader.html" title="class in com.ctc.wstx.sr">TypedStreamReader</a></span></code>
<div class="block">Complete implementation of <code>XMLStreamReader2</code>,
including Typed Access API (Stax2 v3.0) implementation.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>class </code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../com/ctc/wstx/sr/ValidatingStreamReader.html" title="class in com.ctc.wstx.sr">ValidatingStreamReader</a></span></code>
<div class="block">Implementation of <code>XMLStreamReader2</code>
that builds on <a href="../../../../../com/ctc/wstx/sr/TypedStreamReader.html" title="class in com.ctc.wstx.sr"><code>TypedStreamReader</code></a> and adds full DTD-handling
including DTD validation</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList"><a name="com.ctc.wstx.stax">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../com/ctc/wstx/cfg/InputConfigFlags.html" title="interface in com.ctc.wstx.cfg">InputConfigFlags</a> in <a href="../../../../../com/ctc/wstx/stax/package-summary.html">com.ctc.wstx.stax</a></h3>
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing classes, and an explanation">
<caption><span>Classes in <a href="../../../../../com/ctc/wstx/stax/package-summary.html">com.ctc.wstx.stax</a> that implement <a href="../../../../../com/ctc/wstx/cfg/InputConfigFlags.html" title="interface in com.ctc.wstx.cfg">InputConfigFlags</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Class and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code>class </code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../com/ctc/wstx/stax/WstxInputFactory.html" title="class in com.ctc.wstx.stax">WstxInputFactory</a></span></code>
<div class="block">Factory for creating various Stax objects (stream/event reader,
writer).</div>
</td>
</tr>
</tbody>
</table>
</li>
</ul>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../com/ctc/wstx/cfg/InputConfigFlags.html" title="interface in com.ctc.wstx.cfg">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?com/ctc/wstx/cfg/class-use/InputConfigFlags.html" target="_top">Frames</a></li>
<li><a href="InputConfigFlags.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2019 <a href="http://fasterxml.com">FasterXML</a>. All rights reserved.</small></p>
</body>
</html>
| {
"pile_set_name": "Github"
} |
{
"variants": {
"": {
"model": "appliedenergistics2:block/spatial_io_port"
}
}
}
| {
"pile_set_name": "Github"
} |
module UtilitiesTests
using Test
import Base64: stringmime
import Documenter
import Markdown
module UnitTests
module SubModule end
# Does `submodules` collect *all* the submodules?
module A
module B
module C
module D end
end
end
end
mutable struct T end
mutable struct S{T} end
"Documenter unit tests."
Base.length(::T) = 1
f(x) = x
const pi = 3.0
end
module OuterModule
module InnerModule
import ..OuterModule
export OuterModule
end
end
module ExternalModule end
module ModuleWithAliases
using ..ExternalModule
Y = ExternalModule
module A
module B
const X = Main
end
end
end
# hasfield was added in Julia 1.2. This definition borrowed from Compat.jl (MIT)
# Note: this can not be inside the testset
(VERSION < v"1.2.0-DEV.272") && (hasfield(::Type{T}, name::Symbol) where T = Base.fieldindex(T, name, false) > 0)
@testset "Utilities" begin
let doc = @doc(length)
a = Documenter.Utilities.filterdocs(doc, Set{Module}())
b = Documenter.Utilities.filterdocs(doc, Set{Module}([UnitTests]))
c = Documenter.Utilities.filterdocs(doc, Set{Module}([Base]))
d = Documenter.Utilities.filterdocs(doc, Set{Module}([UtilitiesTests]))
@test a !== nothing
@test a === doc
@test b !== nothing
@test occursin("Documenter unit tests.", stringmime("text/plain", b))
@test c !== nothing
@test !occursin("Documenter unit tests.", stringmime("text/plain", c))
@test d === nothing
end
# Documenter.Utilities.issubmodule
@test Documenter.Utilities.issubmodule(Main, Main) === true
@test Documenter.Utilities.issubmodule(UnitTests, UnitTests) === true
@test Documenter.Utilities.issubmodule(UnitTests.SubModule, Main) === true
@test Documenter.Utilities.issubmodule(UnitTests.SubModule, UnitTests) === true
@test Documenter.Utilities.issubmodule(UnitTests.SubModule, Base) === false
@test Documenter.Utilities.issubmodule(UnitTests, UnitTests.SubModule) === false
@test UnitTests.A in Documenter.Utilities.submodules(UnitTests.A)
@test UnitTests.A.B in Documenter.Utilities.submodules(UnitTests.A)
@test UnitTests.A.B.C in Documenter.Utilities.submodules(UnitTests.A)
@test UnitTests.A.B.C.D in Documenter.Utilities.submodules(UnitTests.A)
@test OuterModule in Documenter.Utilities.submodules(OuterModule)
@test OuterModule.InnerModule in Documenter.Utilities.submodules(OuterModule)
@test length(Documenter.Utilities.submodules(OuterModule)) == 2
@test Documenter.Utilities.submodules(ModuleWithAliases) == Set([ModuleWithAliases, ModuleWithAliases.A, ModuleWithAliases.A.B])
@test Documenter.Utilities.isabsurl("file.md") === false
@test Documenter.Utilities.isabsurl("../file.md") === false
@test Documenter.Utilities.isabsurl(".") === false
@test Documenter.Utilities.isabsurl("https://example.org/file.md") === true
@test Documenter.Utilities.isabsurl("http://example.org") === true
@test Documenter.Utilities.isabsurl("ftp://user:[email protected]") === true
@test Documenter.Utilities.isabsurl("/fs/absolute/path") === false
@test Documenter.Utilities.doccat(UnitTests) == "Module"
@test Documenter.Utilities.doccat(UnitTests.T) == "Type"
@test Documenter.Utilities.doccat(UnitTests.S) == "Type"
@test Documenter.Utilities.doccat(UnitTests.f) == "Function"
@test Documenter.Utilities.doccat(UnitTests.pi) == "Constant"
# repo type
@test Documenter.Utilities.repo_host_from_url("https://bitbucket.org/somerepo") == Documenter.Utilities.RepoBitbucket
@test Documenter.Utilities.repo_host_from_url("https://www.bitbucket.org/somerepo") == Documenter.Utilities.RepoBitbucket
@test Documenter.Utilities.repo_host_from_url("http://bitbucket.org/somethingelse") == Documenter.Utilities.RepoBitbucket
@test Documenter.Utilities.repo_host_from_url("http://github.com/Whatever") == Documenter.Utilities.RepoGithub
@test Documenter.Utilities.repo_host_from_url("https://github.com/Whatever") == Documenter.Utilities.RepoGithub
@test Documenter.Utilities.repo_host_from_url("https://www.github.com/Whatever") == Documenter.Utilities.RepoGithub
@test Documenter.Utilities.repo_host_from_url("https://gitlab.com/Whatever") == Documenter.Utilities.RepoGitlab
# line range
let formatting = Documenter.Utilities.LineRangeFormatting(Documenter.Utilities.RepoGithub)
@test Documenter.Utilities.format_line(1:1, formatting) == "L1"
@test Documenter.Utilities.format_line(123:123, formatting) == "L123"
@test Documenter.Utilities.format_line(2:5, formatting) == "L2-L5"
@test Documenter.Utilities.format_line(100:9999, formatting) == "L100-L9999"
end
let formatting = Documenter.Utilities.LineRangeFormatting(Documenter.Utilities.RepoGitlab)
@test Documenter.Utilities.format_line(1:1, formatting) == "L1"
@test Documenter.Utilities.format_line(123:123, formatting) == "L123"
@test Documenter.Utilities.format_line(2:5, formatting) == "L2-5"
@test Documenter.Utilities.format_line(100:9999, formatting) == "L100-9999"
end
let formatting = Documenter.Utilities.LineRangeFormatting(Documenter.Utilities.RepoBitbucket)
@test Documenter.Utilities.format_line(1:1, formatting) == "1"
@test Documenter.Utilities.format_line(123:123, formatting) == "123"
@test Documenter.Utilities.format_line(2:5, formatting) == "2:5"
@test Documenter.Utilities.format_line(100:9999, formatting) == "100:9999"
end
@test Documenter.Utilities.linerange(Core.svec(), 0) === 0:0
# URL building
filepath = string(first(methods(Documenter.Utilities.url)).file)
Sys.iswindows() && (filepath = replace(filepath, "/" => "\\")) # work around JuliaLang/julia#26424
let expected_filepath = "/src/Utilities/Utilities.jl"
Sys.iswindows() && (expected_filepath = replace(expected_filepath, "/" => "\\"))
@test endswith(filepath, expected_filepath)
end
mktempdir() do path
path_repo = joinpath(path, "repository")
mkpath(path_repo)
cd(path_repo) do
# Create a simple mock repo in a temporary directory with a single file.
@test success(`git init`)
@test success(`git config user.email "[email protected]"`)
@test success(`git config user.name "Test Committer"`)
@test success(`git remote add origin [email protected]:JuliaDocs/Documenter.jl.git`)
mkpath("src")
filepath = abspath(joinpath("src", "SourceFile.jl"))
write(filepath, "X")
@test success(`git add -A`)
@test success(`git commit -m"Initial commit."`)
# Run tests
commit = Documenter.Utilities.repo_commit(filepath)
@test Documenter.Utilities.url("//blob/{commit}{path}#{line}", filepath) == "//blob/$(commit)/src/SourceFile.jl#"
@test Documenter.Utilities.url(nothing, "//blob/{commit}{path}#{line}", Documenter.Utilities, filepath, 10:20) == "//blob/$(commit)/src/SourceFile.jl#L10-L20"
# repo_root & relpath_from_repo_root
@test Documenter.Utilities.repo_root(filepath) == dirname(abspath(joinpath(dirname(filepath), ".."))) # abspath() keeps trailing /, hence dirname()
@test Documenter.Utilities.repo_root(filepath; dbdir=".svn") == nothing
@test Documenter.Utilities.relpath_from_repo_root(filepath) == joinpath("src", "SourceFile.jl")
# We assume that a temporary file is not in a repo
@test Documenter.Utilities.repo_root(tempname()) == nothing
@test Documenter.Utilities.relpath_from_repo_root(tempname()) == nothing
end
# Test worktree
path_worktree = joinpath(path, "worktree")
cd("$(path_repo)") do
@test success(`git worktree add $(path_worktree)`)
end
cd("$(path_worktree)") do
filepath = abspath(joinpath("src", "SourceFile.jl"))
# Run tests
commit = Documenter.Utilities.repo_commit(filepath)
@test Documenter.Utilities.url("//blob/{commit}{path}#{line}", filepath) == "//blob/$(commit)/src/SourceFile.jl#"
@test Documenter.Utilities.url(nothing, "//blob/{commit}{path}#{line}", Documenter.Utilities, filepath, 10:20) == "//blob/$(commit)/src/SourceFile.jl#L10-L20"
# repo_root & relpath_from_repo_root
@test Documenter.Utilities.repo_root(filepath) == dirname(abspath(joinpath(dirname(filepath), ".."))) # abspath() keeps trailing /, hence dirname()
@test Documenter.Utilities.repo_root(filepath; dbdir=".svn") == nothing
@test Documenter.Utilities.relpath_from_repo_root(filepath) == joinpath("src", "SourceFile.jl")
# We assume that a temporary file is not in a repo
@test Documenter.Utilities.repo_root(tempname()) == nothing
@test Documenter.Utilities.relpath_from_repo_root(tempname()) == nothing
end
# Test submodule
path_submodule = joinpath(path, "submodule")
mkpath(path_submodule)
cd(path_submodule) do
@test success(`git init`)
@test success(`git config user.email "[email protected]"`)
@test success(`git config user.name "Test Committer"`)
# NOTE: the target path in the `git submodule add` command is necessary for
# Windows builds, since otherwise Git claims that the path is in a .gitignore
# file.
@test success(`git submodule add $(path_repo) repository`)
@test success(`git add -A`)
@test success(`git commit -m"Initial commit."`)
end
path_submodule_repo = joinpath(path, "submodule", "repository")
@test isdir(path_submodule_repo)
cd(path_submodule_repo) do
filepath = abspath(joinpath("src", "SourceFile.jl"))
# Run tests
commit = Documenter.Utilities.repo_commit(filepath)
@test isfile(filepath)
@test Documenter.Utilities.url("//blob/{commit}{path}#{line}", filepath) == "//blob/$(commit)/src/SourceFile.jl#"
@test Documenter.Utilities.url(nothing, "//blob/{commit}{path}#{line}", Documenter.Utilities, filepath, 10:20) == "//blob/$(commit)/src/SourceFile.jl#L10-L20"
# repo_root & relpath_from_repo_root
@test Documenter.Utilities.repo_root(filepath) == dirname(abspath(joinpath(dirname(filepath), ".."))) # abspath() keeps trailing /, hence dirname()
@test Documenter.Utilities.repo_root(filepath; dbdir=".svn") == nothing
@test Documenter.Utilities.relpath_from_repo_root(filepath) == joinpath("src", "SourceFile.jl")
# We assume that a temporary file is not in a repo
@test Documenter.Utilities.repo_root(tempname()) == nothing
@test Documenter.Utilities.relpath_from_repo_root(tempname()) == nothing
end
end
import Documenter.Documents: Document, Page, Globals
import Documenter.Utilities: Markdown2
let page = Page("source", "build", :build, [], IdDict{Any,Any}(), Globals(), Markdown2.MD()), doc = Document()
code = """
x += 3
γγγ_γγγ
γγγ
"""
exprs = Documenter.Utilities.parseblock(code, doc, page)
@test isa(exprs, Vector)
@test length(exprs) === 3
@test isa(exprs[1][1], Expr)
@test exprs[1][1].head === :+=
@test exprs[1][2] == "x += 3\n"
@test exprs[2][2] == "γγγ_γγγ\n"
@test exprs[3][1] === :γγγ
@test exprs[3][2] == "γγγ\n"
end
@testset "TextDiff" begin
import Documenter.Utilities.TextDiff: splitby
@test splitby(r"\s+", "X Y Z") == ["X ", "Y ", "Z"]
@test splitby(r"[~]", "X~Y~Z") == ["X~", "Y~", "Z"]
@test splitby(r"[▶]", "X▶Y▶Z") == ["X▶", "Y▶", "Z"]
@test splitby(r"[▶]+", "X▶▶Y▶Z▶") == ["X▶▶", "Y▶", "Z▶"]
@test splitby(r"[▶]+", "▶▶Y▶Z▶") == ["▶▶", "Y▶", "Z▶"]
@test splitby(r"[▶]+", "Ω▶▶Y▶Z▶") == ["Ω▶▶", "Y▶", "Z▶"]
@test splitby(r"[▶]+", "Ω▶▶Y▶Z▶κ") == ["Ω▶▶", "Y▶", "Z▶", "κ"]
end
# This test checks that deprecation warnings are captured correctly
@static if isdefined(Base, :with_logger)
@testset "withoutput" begin
_, _, _, output = Documenter.Utilities.withoutput() do
println("println")
@info "@info"
f() = (Base.depwarn("depwarn", :f); nothing)
f()
end
# The output is dependent on whether the user is running tests with deprecation
# warnings enabled or not. To figure out whether that is the case or not, we can
# look at the .depwarn field of the undocumented Base.JLOptions object.
@test isdefined(Base, :JLOptions)
@test hasfield(Base.JLOptions, :depwarn)
if Base.JLOptions().depwarn == 0 # --depwarn=no, default on Julia >= 1.5
@test output == "println\n[ Info: @info\n"
else # --depwarn=yes
@test startswith(output, "println\n[ Info: @info\n┌ Warning: depwarn\n")
end
end
end
@testset "issues #749, #790, #823" begin
let parse(x) = Documenter.Utilities.parseblock(x, nothing, nothing)
for LE in ("\r\n", "\n")
l1, l2 = parse("x = Int[]$(LE)$(LE)push!(x, 1)$(LE)")
@test l1[1] == :(x = Int[])
@test l1[2] == "x = Int[]$(LE)"
@test l2[1] == :(push!(x, 1))
@test l2[2] == "push!(x, 1)$(LE)"
end
end
end
@testset "mdparse" begin
mdparse = Documenter.Utilities.mdparse
@test_throws ArgumentError mdparse("", mode=:foo)
mdparse("") isa Markdown.Paragraph
@test mdparse("foo bar") isa Markdown.Paragraph
let md = mdparse("", mode=:span)
@test md isa Vector{Any}
@test length(md) == 1
end
let md = mdparse("", mode=:blocks)
@test md isa Vector{Any}
@test length(md) == 0
end
@test mdparse("!!! adm"; mode=:single) isa Markdown.Admonition
let md = mdparse("!!! adm", mode=:blocks)
@test md isa Vector{Any}
@test length(md) == 1
end
let md = mdparse("x\n\ny", mode=:blocks)
@test md isa Vector{Any}
@test length(md) == 2
end
@info "Expected error output:"
@test_throws ArgumentError mdparse("!!! adm", mode=:span)
@test_throws ArgumentError mdparse("x\n\ny")
@test_throws ArgumentError mdparse("x\n\ny", mode=:span)
@info ".. end of expected error output."
end
@testset "JSDependencies" begin
using Documenter.Utilities.JSDependencies:
RemoteLibrary, Snippet, RequireJS, verify, writejs, parse_snippet
libraries = [
RemoteLibrary("foo", "example.com/foo"),
RemoteLibrary("bar", "example.com/bar"; deps = ["foo"]),
]
snippet = Snippet(["foo", "bar"], ["Foo"], "f(x)")
let r = RequireJS(libraries)
push!(r, snippet)
@test verify(r)
output = let io = IOBuffer()
writejs(io, r)
String(take!(io))
end
# The expected output should look something like this:
#
# // Generated by Documenter.jl
# requirejs.config({
# paths: {
# 'bar': 'example.com/bar',
# 'foo': 'example.com/foo',
# },
# shim: {
# "bar": {
# "deps": [
# "foo"
# ]
# }
# }
# });
# ////////////////////////////////////////////////////////////////////////////////
# require(['foo', 'bar'], function(Foo) {
# f(x)
# })
#
# But the output is not entirely deterministic, so we can't do just a string
# comparison. Hence, we'll just do a few simple `occursin` tests, to make sure
# that the most important things are at least present.
@test occursin("'foo'", output)
@test occursin("'bar'", output)
@test occursin("example.com/foo", output)
@test occursin("example.com/bar", output)
@test occursin("f(x)", output)
@test occursin(r"requirejs\.config\({[\S\s]+}\)", output)
@test occursin(r"require\([\S\s]+\)", output)
end
# Error conditions: missing dependency
let r = RequireJS([
RemoteLibrary("foo", "example.com/foo"),
RemoteLibrary("bar", "example.com/bar"; deps = ["foo", "baz"]),
])
@test !verify(r)
push!(r, RemoteLibrary("baz", "example.com/baz"))
@test verify(r)
push!(r, Snippet(["foo", "qux"], ["Foo"], "f(x)"))
@test !verify(r)
push!(r, RemoteLibrary("qux", "example.com/qux"))
@test verify(r)
end
let io = IOBuffer(raw"""
// libraries: foo, bar
// arguments: $
script
""")
snippet = parse_snippet(io)
@test snippet.deps == ["foo", "bar"]
@test snippet.args == ["\$"]
@test snippet.js == "script\n"
end
# jsescape
@testset "jsescape" begin
using Documenter.Utilities.JSDependencies: jsescape
@test jsescape("abc123") == "abc123"
@test jsescape("▶αβγ") == "▶αβγ"
@test jsescape("") == ""
@test jsescape("a\nb") == "a\\nb"
@test jsescape("\r\n") == "\\r\\n"
@test jsescape("\\") == "\\\\"
@test jsescape("\"'") == "\\\"\\'"
# Ref: #639
@test jsescape("\u2028") == "\\u2028"
@test jsescape("\u2029") == "\\u2029"
@test jsescape("policy to
delete.") == "policy to\\u2028 delete."
end
@testset "json_jsescape" begin
using Documenter.Utilities.JSDependencies: json_jsescape
@test json_jsescape(["abc"]) == raw"[\"abc\"]"
@test json_jsescape(["\\"]) == raw"[\"\\\\\"]"
@test json_jsescape(["x\u2028y"]) == raw"[\"x\u2028y\"]"
end
# Proper escaping of generated JS
let r = RequireJS([
RemoteLibrary("fo\'o", "example.com\n/foo"),
])
@test verify(r)
push!(r, Snippet(["fo\'o"], ["Foo"], "f(x)"))
output = let io = IOBuffer()
writejs(io, r)
String(take!(io))
end
@test occursin("'fo\\'o'", output)
@test occursin("example.com\\n/foo", output)
@test !occursin("'fo'o'", output)
@test !occursin("example.com\n/foo", output)
end
end
@testset "codelang" begin
@test Documenter.Utilities.codelang("") == ""
@test Documenter.Utilities.codelang(" ") == ""
@test Documenter.Utilities.codelang(" ") == ""
@test Documenter.Utilities.codelang("\t ") == ""
@test Documenter.Utilities.codelang("julia") == "julia"
@test Documenter.Utilities.codelang("julia-repl") == "julia-repl"
@test Documenter.Utilities.codelang("julia-repl x=y") == "julia-repl"
@test Documenter.Utilities.codelang("julia-repl\tx=y") == "julia-repl"
@test Documenter.Utilities.codelang(" julia-repl\tx=y") == "julia-repl"
@test Documenter.Utilities.codelang("\t julia \tx=y ") == "julia"
@test Documenter.Utilities.codelang("\t julia \tx=y ") == "julia"
@test Documenter.Utilities.codelang("&%^ ***") == "&%^"
end
end
end
| {
"pile_set_name": "Github"
} |
// Copyright Aleksey Gurtovoy 2000-2004
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// *Preprocessed* version of the main "vector.hpp" header
// -- DO NOT modify by hand!
namespace boost { namespace mpl {
template<
typename T0 = na, typename T1 = na, typename T2 = na, typename T3 = na
, typename T4 = na, typename T5 = na, typename T6 = na, typename T7 = na
, typename T8 = na, typename T9 = na, typename T10 = na, typename T11 = na
, typename T12 = na, typename T13 = na, typename T14 = na
, typename T15 = na, typename T16 = na, typename T17 = na
, typename T18 = na, typename T19 = na
>
struct vector;
template<
>
struct vector<
na, na, na, na, na, na, na, na, na, na, na, na, na, na, na, na, na
, na, na, na
>
: vector0< >
{
typedef vector0< >::type type;
};
template<
typename T0
>
struct vector<
T0, na, na, na, na, na, na, na, na, na, na, na, na, na, na, na, na
, na, na, na
>
: vector1<T0>
{
typedef typename vector1<T0>::type type;
};
template<
typename T0, typename T1
>
struct vector<
T0, T1, na, na, na, na, na, na, na, na, na, na, na, na, na, na, na
, na, na, na
>
: vector2< T0,T1 >
{
typedef typename vector2< T0,T1 >::type type;
};
template<
typename T0, typename T1, typename T2
>
struct vector<
T0, T1, T2, na, na, na, na, na, na, na, na, na, na, na, na, na, na
, na, na, na
>
: vector3< T0,T1,T2 >
{
typedef typename vector3< T0,T1,T2 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3
>
struct vector<
T0, T1, T2, T3, na, na, na, na, na, na, na, na, na, na, na, na, na
, na, na, na
>
: vector4< T0,T1,T2,T3 >
{
typedef typename vector4< T0,T1,T2,T3 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
>
struct vector<
T0, T1, T2, T3, T4, na, na, na, na, na, na, na, na, na, na, na, na
, na, na, na
>
: vector5< T0,T1,T2,T3,T4 >
{
typedef typename vector5< T0,T1,T2,T3,T4 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5
>
struct vector<
T0, T1, T2, T3, T4, T5, na, na, na, na, na, na, na, na, na, na, na
, na, na, na
>
: vector6< T0,T1,T2,T3,T4,T5 >
{
typedef typename vector6< T0,T1,T2,T3,T4,T5 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6
>
struct vector<
T0, T1, T2, T3, T4, T5, T6, na, na, na, na, na, na, na, na, na, na
, na, na, na
>
: vector7< T0,T1,T2,T3,T4,T5,T6 >
{
typedef typename vector7< T0,T1,T2,T3,T4,T5,T6 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7
>
struct vector<
T0, T1, T2, T3, T4, T5, T6, T7, na, na, na, na, na, na, na, na, na
, na, na, na
>
: vector8< T0,T1,T2,T3,T4,T5,T6,T7 >
{
typedef typename vector8< T0,T1,T2,T3,T4,T5,T6,T7 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7, typename T8
>
struct vector<
T0, T1, T2, T3, T4, T5, T6, T7, T8, na, na, na, na, na, na, na, na
, na, na, na
>
: vector9< T0,T1,T2,T3,T4,T5,T6,T7,T8 >
{
typedef typename vector9< T0,T1,T2,T3,T4,T5,T6,T7,T8 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7, typename T8, typename T9
>
struct vector<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, na, na, na, na, na, na, na
, na, na, na
>
: vector10< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9 >
{
typedef typename vector10< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7, typename T8, typename T9
, typename T10
>
struct vector<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, na, na, na, na, na, na
, na, na, na
>
: vector11< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10 >
{
typedef typename vector11< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7, typename T8, typename T9
, typename T10, typename T11
>
struct vector<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, na, na, na, na
, na, na, na, na
>
: vector12< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11 >
{
typedef typename vector12< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7, typename T8, typename T9
, typename T10, typename T11, typename T12
>
struct vector<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, na, na, na
, na, na, na, na
>
: vector13< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12 >
{
typedef typename vector13< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7, typename T8, typename T9
, typename T10, typename T11, typename T12, typename T13
>
struct vector<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, na, na
, na, na, na, na
>
: vector14< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13 >
{
typedef typename vector14< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7, typename T8, typename T9
, typename T10, typename T11, typename T12, typename T13, typename T14
>
struct vector<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, na
, na, na, na, na
>
: vector15<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14
>
{
typedef typename vector15< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7, typename T8, typename T9
, typename T10, typename T11, typename T12, typename T13, typename T14
, typename T15
>
struct vector<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14
, T15, na, na, na, na
>
: vector16<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14
, T15
>
{
typedef typename vector16< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7, typename T8, typename T9
, typename T10, typename T11, typename T12, typename T13, typename T14
, typename T15, typename T16
>
struct vector<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14
, T15, T16, na, na, na
>
: vector17<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14
, T15, T16
>
{
typedef typename vector17< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7, typename T8, typename T9
, typename T10, typename T11, typename T12, typename T13, typename T14
, typename T15, typename T16, typename T17
>
struct vector<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14
, T15, T16, T17, na, na
>
: vector18<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14
, T15, T16, T17
>
{
typedef typename vector18< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16,T17 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7, typename T8, typename T9
, typename T10, typename T11, typename T12, typename T13, typename T14
, typename T15, typename T16, typename T17, typename T18
>
struct vector<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14
, T15, T16, T17, T18, na
>
: vector19<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14
, T15, T16, T17, T18
>
{
typedef typename vector19< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16,T17,T18 >::type type;
};
/// primary template (not a specialization!)
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7, typename T8, typename T9
, typename T10, typename T11, typename T12, typename T13, typename T14
, typename T15, typename T16, typename T17, typename T18, typename T19
>
struct vector
: vector20<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14
, T15, T16, T17, T18, T19
>
{
typedef typename vector20< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16,T17,T18,T19 >::type type;
};
}}
| {
"pile_set_name": "Github"
} |
USE_MULTI_THREAD=1
LDFLAGS= `pkg-config --libs opencv` -lstdc++ -lopentracker
CXXFLAGS = -Wall -std=c++0x
ifeq ($(USE_MULTI_THREAD), 1)
CXXFLAGS+= -DUSE_MULTI_THREAD
LDFLAGS+= -pthread
endif
all: run_opentracker_example.bin
run_opentracker_example.bin: run_opentracker_example.o
$(CC) -o $@ $^ $(LDFLAGS)
%.o: %.c
$(CC) -c -o $@ $< $(CFLAGS)
%.o: %.cc
$(CXX) -c -o $@ $< $(CXXFLAGS)
%.o: %.cpp
$(CXX) -c -o $@ $< $(CXXFLAGS)
.PHONY: clean
clean:
rm -rf ./.d *.o *.bin *.so *.a */*.o */*.bin
| {
"pile_set_name": "Github"
} |
library(spData)
library(tmap)
legend_title = expression("Area (km"^2*")")
map_nza = tm_shape(nz) +
tm_fill(col = "Land_area", title = legend_title) + tm_borders()
s1 = map_nza + tm_style("bw") + tm_layout(title = "style: bw")
s2 = map_nza + tm_style("classic") + tm_layout(title = "style: classic")
s3 = map_nza + tm_style("cobalt") + tm_layout(title = "style: cobalt")
s4 = map_nza + tm_style("col_blind") + tm_layout(title = "style: col_blind")
tmap_arrange(s1, s2, s3, s4, nrow = 1) | {
"pile_set_name": "Github"
} |
require 'bloom-filter'
module Jetpants
class Shard
# Runs queries against a slave in the pool to verify sharding key values
def validate_shard_data
tables = Table.from_config('sharded_tables', shard_pool.name)
table_statuses = {}
tables.limited_concurrent_map(8) { |table|
table.sharding_keys.each do |col|
range_sql = table.sql_range_check col, @min_id, @max_id
# use a standby slave, since this query will be very heavy and these shards are live
db = standby_slaves.last
result = db.query_return_array range_sql
if result.first.values.first > 0
table_statuses[table] = :invalid
else
table_statuses[table] = :valid
end
end
}
table_statuses
end
# Uses a bloom filter to check for duplicate unique keys on two shards
# @shards - an array of two shards
# @table - the table object to examine
# @key - the (symbol) of the key for which to verify uniqueness
# @min_key_val - the minimum value of the key to consider
# @max_key_val - the maximum value of the key to consider
# @chunk_size - the number of values to retrieve in one query
def self.check_duplicate_keys(shards, table, key, min_key_val = nil, max_key_val = nil, chunk_size = 5000)
dbs = []
shards.each do |shard|
raise "Invalid shard #{shard}!" unless shard.is_a? Shard
raise "Attempting to validate table not con" unless shard.has_table? table.name
end
raise "Currently only possible to compare 2 shards!" unless shards.count == 2
raise "Invalid index '#{key}' for table '#{table}'!" if table.indexes[key].nil?
raise "Only currently implemented for single-column indexes" unless table.indexes[key][:columns].count == 1
source_shard = shards.first
source_db = source_shard.standby_slaves.last
comparison_shard = shards.last
comparison_db = comparison_shard.standby_slaves.last
column = table.indexes[key][:columns].first
dbs = [ source_db, comparison_db ]
dbs.concurrent_each do |db|
db.pause_replication
db.stop_query_killer
db.disable_monitoring
end
min_val = min_key_val || source_db.query_return_first_value("SELECT min(#{column}) FROM #{table}")
max_val = max_key_val || source_db.query_return_first_value("SELECT max(#{column}) FROM #{table}")
# maximum possible entries and desired error rate
max_size = (max_val.to_i - min_val.to_i) / 3
filter = BloomFilter.new size: max_size
curr_val = min_val
source_db.output "Generating filter from #{source_shard} from values #{min_val}-#{max_val}"
while curr_val < max_val do
vals = source_db.query_return_array("SELECT #{column} FROM #{table} WHERE #{column} > #{curr_val} LIMIT #{chunk_size}").map{ |row| row.values.first }
vals.each{ |val| filter.insert val }
curr_val = vals.last
end
min_val = min_key_val || comparison_db.query_return_first_value("SELECT min(#{column}) FROM #{table}")
max_val = max_key_val || comparison_db.query_return_first_value("SELECT max(#{column}) FROM #{table}")
possible_dupes = []
curr_val = min_val
comparison_db.output "Searching for duplicates in #{comparison_shard} from values #{min_val}-#{max_val}"
while curr_val < max_val do
vals = comparison_db.query_return_array("SELECT #{column} FROM #{table} WHERE #{column} > #{curr_val} LIMIT #{chunk_size}").map{ |row| row.values.first }
vals.each{ |val| possible_dupes << val if filter.include? val }
curr_val = vals.last
end
if possible_dupes.empty?
source_db.output "There were no duplicates"
else
source_db.output "There are #{possible_dupes.count} potential duplicates"
end
possible_dupes
ensure
unless dbs.empty?
dbs.concurrent_each do |db|
db.start_replication
db.catch_up_to_master
db.start_query_killer
db.enable_monitoring
end
end
end
# Finds duplicate unique keys on two distinct shards
#
# @shards - an array of two shards
# @table - the table object to examine
# @key - the (symbol) of the key for which to verify uniqueness
# @min_key_val - the minimum value of the key to consider
# @max_key_val - the maximum value of the key to consider
def self.find_duplicate_keys(shards, table, key, min_key_val = nil, max_key_val = nil)
# check_duplicate_keys method will do all the validation of the parameters
keys = Shard.check_duplicate_keys(shards, table, key, min_key_val, max_key_val)
column = table.indexes[key][:columns].first
keys.map do |k|
count = shards.concurrent_map do |s|
query = "select count(*) from #{table} where #{column} = #{k}"
s.standby_slaves.last.query_return_first(query).values.first.to_i
end.reduce(&:+)
[k, count]
end.select{ |f| f[1] > 1 }
end
# Generate a list of filenames for exported data
def table_export_filenames(full_path = true, tables = false)
export_filenames = []
tables = Table.from_config('sharded_tables', shard_pool.name) unless tables
export_filenames = tables.map { |table| table.export_filenames(@min_id, @max_id) }.flatten
export_filenames.map!{ |filename| File.basename filename } unless full_path
export_filenames
end
# Sets up an aggregate node and new shard master with data from two shards, returned with replication stopped
# This will take two standby slaves, pause replication, export their data, ship it to the aggregate
# node and new master, import the data, and set up multi-source replication to the shards being merged
def self.set_up_aggregate_node(shards_to_merge, aggregate_node, new_shard_master)
# validation
shards_to_merge.each do |shard|
raise "Attempting to create an aggregate node with a non-shard!" unless shard.is_a? Shard
end
raise "Attempting to set up aggregation on a non-aggregate node!" unless aggregate_node.aggregator?
raise "Attempting to set up aggregation on a node that is already aggregating!" unless aggregate_node.aggregating_nodes.empty?
raise "Invalid new master node!" unless new_shard_master.is_a? DB
raise "New shard master already has a pool!" unless new_shard_master.pool.nil?
data_nodes = [ new_shard_master, aggregate_node ]
# create and ship schema. Mysql is stopped so that we can use buffer pool memory during network copy on destinations
slave = shards_to_merge.last.standby_slaves.reject{|s| s.in_remote_datacenter? }.last
data_nodes.each do |db|
db.stop_mysql
slave.ship_schema_to db
end
# grab slave list to export data
slaves_to_replicate = shards_to_merge.map { |shard| shard.standby_slaves.reject{|s| s.in_remote_datacenter? }.last }
# sharded table list to ship
tables = Plugin::MergeHelper.tables_to_merge(shards_to_merge.first.shard_pool.name)
# data export counts for validation later
export_counts = {}
slave_coords = {}
# concurrency controls for export/transfer
transfer_lock = Mutex.new
# asynchronously export data on all slaves
slaves_to_replicate.concurrent_map { |slave|
# these get cleaned up further down after replication is set up
slave.disable_monitoring
slave.set_downtime 12
slave.stop_query_killer
slave.pause_replication
slave.export_data tables, slave.pool.min_id, slave.pool.max_id
# record export counts for validation
export_counts[slave] = slave.import_export_counts
# retain coords to set up replication hierarchy
file, pos = slave.binlog_coordinates
slave_coords[slave] = { :log_file => file, :log_pos => pos }
transfer_lock.synchronize do
slave.fast_copy_chain(
Jetpants.export_location,
data_nodes,
port: 3307,
files: slave.pool.table_export_filenames(full_path = false, tables),
overwrite: true
)
end
# clean up files on origin slave
slave.output "Cleaning up export files..."
slave.pool.table_export_filenames(full_path = true, tables).map { |file|
slave.ssh_cmd("rm -f #{file}")
}
# restart origin slave replication
slave.resume_replication
slave.catch_up_to_master
slave.enable_monitoring
slave.start_query_killer
slave.cancel_downtime rescue nil
}
# settings to improve import speed and prevent GTID problems
data_nodes.each do |db|
opts = ['--skip-log-bin', '--skip-log-slave-updates', '--innodb-autoinc-lock-mode=2', '--skip-slave-start', '--innodb_flush_log_at_trx_commit=2', '--innodb-doublewrite=0']
opts << '--loose-gtid-mode=OFF' if db == new_shard_master
db.start_mysql opts
db.import_schemata!
end
# import data in a separate loop, as we want to leave the origin slaves
# in a non-replicating state for as little time as possible
data_nodes.concurrent_map { |db|
# load data and inject export counts from earlier for validation
slaves_to_replicate.map { |slave|
db.inject_counts export_counts[slave]
db.import_data tables, slave.pool.min_id, slave.pool.max_id
}
}
# clear out earlier import options
data_nodes.concurrent_each do |db|
db.restart_mysql "--skip-slave-start"
end
# set up replication hierarchy
slaves_to_replicate.each do |slave|
aggregate_node.add_node_to_aggregate slave, slave_coords[slave]
end
# Set up replication from aggregator to new_master.
# We intentionally pass no options to change_master_to, since it's smart enough
# to do the right thing (in this case: use aggregator's current coordinates)
new_shard_master.change_master_to aggregate_node
end
def combined_shard
Jetpants.shards(shard_pool.name).select { |shard| (
shard.min_id.to_i <= @min_id.to_i \
&& shard.max_id.to_i >= @max_id.to_i \
&& shard.max_id != 'INFINITY' \
&& @max_id != 'INFINITY' \
&& (shard.state == :initialized || shard.state == :ready) \
&& shard != self
)}.first
end
def prepare_for_merged_reads
@state = :merging
sync_configuration
end
def prepare_for_merged_writes
@state = :deprecated
sync_configuration
end
def decomission!
# trigger the logic in the jetpants collins helper to eject the boxes in the cluster
@state = :recycle
sync_configuration
end
def in_config?
[:merging, :ready, :child, :needs_cleanup, :read_only, :offline].include? @state
end
end
end
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2013 Hannes Janetzek
*
* This file is part of the OpenScienceMap project (http://www.opensciencemap.org).
*
* This program is free software: you can redistribute it and/or modify it under the
* terms of the GNU Lesser General Public License as published by the Free Software
* Foundation, either version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License along with
* this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.oscim.layers.tile;
import org.oscim.core.MapPosition;
import org.oscim.event.Event;
import org.oscim.layers.Layer;
import org.oscim.map.Map;
import org.oscim.map.Map.UpdateListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* TODO - add a TileLayer.Builder
*/
public abstract class TileLayer extends Layer implements UpdateListener {
static final Logger log = LoggerFactory.getLogger(TileLayer.class);
private static final int NUM_LOADERS = 4;
/**
* TileManager responsible for adding visible tiles
* to load queue and managing in-memory tile cache.
*/
protected final TileManager mTileManager;
protected TileLoader[] mTileLoader;
public TileLayer(Map map, TileManager tileManager, TileRenderer renderer) {
super(map);
renderer.setTileManager(tileManager);
mTileManager = tileManager;
mRenderer = renderer;
}
public TileLayer(Map map, TileManager tileManager) {
super(map);
mTileManager = tileManager;
}
protected void setRenderer(TileRenderer renderer) {
renderer.setTileManager(mTileManager);
mRenderer = renderer;
}
abstract protected TileLoader createLoader();
public TileRenderer tileRenderer() {
return (TileRenderer) mRenderer;
}
protected void initLoader(int numLoaders) {
mTileLoader = new TileLoader[numLoaders];
for (int i = 0; i < numLoaders; i++) {
mTileLoader[i] = createLoader();
mTileLoader[i].start();
}
}
/**
* Override to set number of loader threads. Default is 4.
*/
protected int getNumLoaders() {
return NUM_LOADERS;
}
@Override
public void onMapEvent(Event event, MapPosition mapPosition) {
if (event == Map.CLEAR_EVENT) {
/* sync with TileRenderer */
synchronized (mRenderer) {
tileRenderer().clearTiles();
mTileManager.init();
}
if (mTileManager.update(mapPosition))
notifyLoaders();
} else if (event == Map.POSITION_EVENT) {
if (mTileManager.update(mapPosition))
notifyLoaders();
}
}
@Override
public void onDetach() {
for (TileLoader loader : mTileLoader) {
loader.pause();
loader.finish();
loader.dispose();
}
}
void notifyLoaders() {
for (TileLoader loader : mTileLoader)
loader.go();
}
protected void pauseLoaders(boolean wait) {
for (TileLoader loader : mTileLoader) {
loader.cancel();
if (!loader.isPausing())
loader.pause();
}
if (!wait)
return;
for (TileLoader loader : mTileLoader) {
if (!loader.isPausing())
loader.awaitPausing();
}
}
protected void resumeLoaders() {
for (TileLoader loader : mTileLoader)
loader.proceed();
}
public TileManager getManager() {
return mTileManager;
}
}
| {
"pile_set_name": "Github"
} |
#include "processor.hpp"
const std::array<ProcessorInfo::Item, 3> ProcessorInfo::info{{
{Processor::None, u"None"_q, u""_q, (int)0},
{Processor::CPU, u"CPU"_q, u""_q, (int)1},
{Processor::GPU, u"GPU"_q, u""_q, (int)2}
}};
| {
"pile_set_name": "Github"
} |
/*
* node_list.c
*
* Created on: Mar 8, 2011
* Author: posixninja
*
* Copyright (c) 2011 Joshua Hill. All Rights Reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "list.h"
#include "node.h"
#include "node_list.h"
void node_list_destroy(node_list_t* list) {
if(list != NULL) {
list_destroy((list_t*) list);
}
}
node_list_t* node_list_create(node_t* node) {
node_list_t* list = (node_list_t*) malloc(sizeof(node_list_t));
if(list == NULL) {
return NULL;
}
memset(list, '\0', sizeof(node_list_t));
// Initialize structure
list_init((list_t*) list);
list->count = 0;
return list;
}
int node_list_add(node_list_t* list, node_t* node) {
if (!list || !node) return -1;
// Find the last element in the list
node_t* last = list->end;
// Setup our new node as the new last element
node->next = NULL;
node->prev = last;
// Set the next element of our old "last" element
if (last) {
// but only if the node list is not empty
last->next = node;
}
// Set the lists prev to the new last element
list->end = node;
// Increment our node count for this list
list->count++;
return 0;
}
int node_list_insert(node_list_t* list, unsigned int index, node_t* node) {
if (!list || !node) return -1;
if (index >= list->count) {
return node_list_add(list, node);
}
// Get the first element in the list
node_t* cur = list->begin;
unsigned int pos = 0;
node_t* prev = NULL;
if (index > 0) {
while (pos < index) {
prev = cur;
cur = cur->next;
pos++;
}
}
if (prev) {
// Set previous node
node->prev = prev;
// Set next node of our new node to next node of the previous node
node->next = prev->next;
// Set next node of previous node to our new node
prev->next = node;
} else {
node->prev = NULL;
// get old first element in list
node->next = list->begin;
// set new node as first element in list
list->begin = node;
}
if (node->next == NULL) {
// Set the lists prev to the new last element
list->end = node;
} else {
// set prev of the new next element to our node
node->next->prev = node;
}
// Increment our node count for this list
list->count++;
return 0;
}
int node_list_remove(node_list_t* list, node_t* node) {
if (!list || !node) return -1;
if (list->count == 0) return -1;
node_t* n;
for (n = list->begin; n; n = n->next) {
if (node == n) {
node_t* newnode = node->next;
if (node->prev) {
node->prev->next = newnode;
if (newnode) {
newnode->prev = node->prev;
} else {
// last element in the list
list->end = node->prev;
}
} else {
// we just removed the first element
if (newnode) {
newnode->prev = NULL;
}
list->begin = newnode;
}
list->count--;
return 0;
}
}
return -1;
}
| {
"pile_set_name": "Github"
} |
package com.github.kokorin.jaffree.ffprobe.data;
import org.junit.Assert;
import org.junit.Test;
import java.io.InputStream;
import java.util.List;
public class DefaultFormatParserTest {
@Test
public void parse() throws Exception {
Data data = parseResource("ffprobe_streams.out");
Assert.assertNotNull(data);
List<DSection> streams = data.getSections("STREAM");
Assert.assertEquals(2, streams.size());
}
@Test
public void parseHttpsPresigned() throws Exception {
Data data = parseResource("ffprobe_streams_presigned_https.out");
Assert.assertNotNull(data);
List<DSection> streams = data.getSections("STREAM");
Assert.assertEquals(2, streams.size());
DSection format = data.getSection("FORMAT");
Assert.assertNotNull(format);
String filename = format.getString("filename");
String expectedFilename = "https://somebucket.s3.region.amazonaws.com/file.mp4?versionId=v1&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Date=somedate&X-Amz-SignedHeaders=host&X-Amz-Expires=someamount&X-Amz-Credential=cred&X-Amz-Signature=sig";
Assert.assertEquals(expectedFilename, filename);
}
public static Data parseResource(String name) throws Exception {
try (InputStream input = DefaultFormatParserTest.class.getResourceAsStream(name)){
return new DefaultFormatParser().parse(input);
}
}
} | {
"pile_set_name": "Github"
} |
# Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
service.attlist &= attribute hostalias { xsd:NCName }
service.attlist &= attribute baseport { xsd:unsignedShort }?
service.attlist &= attribute jvmargs { text }?
service.attlist &= attribute jvm-options { text }?
service.attlist &= attribute jvm-gc-options { text }?
# preload is for internal use only
service.attlist &= attribute preload { text }?
anyElement = element * {
(attribute * { text }
| text
| anyElement)*
}
# Valid qualified java class name. See http://docs.oracle.com/javase/specs/jls/se8/html/jls-3.html#jls-3.8
JavaId = xsd:string { pattern = "([a-zA-Z_$][a-zA-Z\d_$]*\.)*[a-zA-Z_$][a-zA-Z\d_$]*" }
Nodes = element nodes {
attribute count { xsd:positiveInteger | xsd:string } &
attribute flavor { xsd:string }? &
attribute docker-image { xsd:string }? &
Resources?
}
Resources = element resources {
attribute vcpu { xsd:double { minExclusive = "0.0" } | xsd:string } &
attribute memory { xsd:string } &
attribute disk { xsd:string } &
attribute disk-speed { xsd:string }? &
attribute storage-type { xsd:string }?
}
OptionalDedicatedNodes = element nodes {
attribute count { xsd:positiveInteger | xsd:string } &
attribute flavor { xsd:string }? &
attribute required { xsd:boolean }? &
attribute docker-image { xsd:string }? &
attribute dedicated { xsd:boolean }? &
attribute exclusive { xsd:boolean }? &
Resources?
}
GenericConfig = element config {
attribute name { text },
attribute namespace { text }?, # TODO: Remove in Vespa 8
attribute version { text }?,
anyElement +
}
ComponentSpec =
( attribute id { xsd:Name | JavaId } | attribute idref { xsd:Name } | attribute ident { xsd:Name } )
ComponentId =
ComponentSpec
BundleSpec =
attribute class { xsd:Name | JavaId }? &
attribute bundle { xsd:Name }?
Component = element component {
ComponentDefinition
}
ComponentDefinition =
ComponentId &
BundleSpec &
GenericConfig* &
Component*
| {
"pile_set_name": "Github"
} |
.\" **************************************************************************
.\" * _ _ ____ _
.\" * Project ___| | | | _ \| |
.\" * / __| | | | |_) | |
.\" * | (__| |_| | _ <| |___
.\" * \___|\___/|_| \_\_____|
.\" *
.\" * Copyright (C) 1998 - 2014, Daniel Stenberg, <[email protected]>, et al.
.\" *
.\" * This software is licensed as described in the file COPYING, which
.\" * you should have received as part of this distribution. The terms
.\" * are also available at http://curl.haxx.se/docs/copyright.html.
.\" *
.\" * You may opt to use, copy, modify, merge, publish, distribute and/or sell
.\" * copies of the Software, and permit persons to whom the Software is
.\" * furnished to do so, under the terms of the COPYING file.
.\" *
.\" * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
.\" * KIND, either express or implied.
.\" *
.\" **************************************************************************
.\"
.TH CURLOPT_MAIL_AUTH 3 "19 Jun 2014" "libcurl 7.37.0" "curl_easy_setopt options"
.SH NAME
CURLOPT_MAIL_AUTH \- specify SMTP authentication address
.SH SYNOPSIS
#include <curl/curl.h>
CURLcode curl_easy_setopt(CURL *handle, CURLOPT_MAIL_AUTH, char *auth);
.SH DESCRIPTION
Pass a pointer to a zero terminated string as parameter. This will be used to
specify the authentication address (identity) of a submitted message that is
being relayed to another server.
This optional parameter allows co-operating agents in a trusted environment to
communicate the authentication of individual messages and should only be used
by the application program, using libcurl, if the application is itself a mail
server acting in such an environment. If the application is operating as such
and the AUTH address is not known or is invalid, then an empty string should
be used for this parameter.
Unlike \fICURLOPT_MAIL_FROM(3)\fP and \fICURLOPT_MAIL_RCPT(3)\fP, the address
should not be specified within a pair of angled brackets (<>). However, if an
empty string is used then a pair of brackets will be sent by libcurl as
required by RFC2554.
.SH DEFAULT
NULL
.SH PROTOCOLS
SMTP
.SH EXAMPLE
TODO
.SH AVAILABILITY
Added in 7.25.0
.SH RETURN VALUE
Returns CURLE_OK if the option is supported, CURLE_UNKNOWN_OPTION if not, or
CURLE_OUT_OF_MEMORY if there was insufficient heap space.
.SH "SEE ALSO"
.BR CURLOPT_MAIL_FROM "(3), " CURLOPT_MAIL_RCPT "(3), "
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2016, Seraphim Sense Ltd.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted
* provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of conditions
* and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list of
* conditions and the following disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used to
* endorse or promote products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
* TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
* USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#import "ANAsapButton.h"
@implementation ANAsapButton
- (id)initWithCoder:(NSCoder *)aDecoder {
self = [super initWithCoder:aDecoder];
if (self) {
[self commonInit];
}
return self;
}
- (id)initWithFrame:(CGRect)frame {
self = [super initWithFrame:frame];
if (self) {
[self commonInit];
}
return self;
}
- (void)commonInit {
[self.titleLabel setFont:[UIFont fontWithName:@"Asap-Regular" size:self.titleLabel.font.pointSize]];
}
@end
| {
"pile_set_name": "Github"
} |
VPATH += $$PWD
INCLUDEPATH *= $$PWD $$PWD/..
DEPENDPATH *= $$PWD $$PWD/..
# Input
HEADERS += \
$$PWD/bookmarkmanager.h \
$$PWD/contentwindow.h \
$$PWD/filternamedialog.h \
$$PWD/indexwindow.h \
$$PWD/topicchooser.h
SOURCES += \
$$PWD/bookmarkmanager.cpp \
$$PWD/contentwindow.cpp \
$$PWD/filternamedialog.cpp \
$$PWD/indexwindow.cpp \
$$PWD/topicchooser.cpp
FORMS += \
$$PWD/bookmarkdialog.ui \
$$PWD/filternamedialog.ui \
$$PWD/topicchooser.ui
| {
"pile_set_name": "Github"
} |
// Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// 'AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// A test for keyed call ICs with a mix of smi and string keys.
function testOne(receiver, key, result) {
for(var i = 0; i != 10; i++ ) {
assertEquals(result, receiver[key]());
}
}
function testMany(receiver, keys, results) {
for (var i = 0; i != 10; i++) {
for (var k = 0; k != keys.length; k++) {
assertEquals(results[k], receiver[keys[k]]());
}
}
}
var toStringNonSymbol = 'to';
toStringNonSymbol += 'String';
function TypeOfThis() { return typeof this; }
Number.prototype.square = function() { return this * this; }
Number.prototype.power4 = function() { return this.square().square(); }
Number.prototype.type = TypeOfThis;
String.prototype.type = TypeOfThis;
Boolean.prototype.type = TypeOfThis;
// Use a non-symbol key to force inline cache to generic case.
testOne(0, toStringNonSymbol, '0');
testOne(1, 'toString', '1');
testOne('1', 'toString', '1');
testOne(1.0, 'toString', '1');
testOne(1, 'type', 'object');
testOne(2.3, 'type', 'object');
testOne('x', 'type', 'object');
testOne(true, 'type', 'object');
testOne(false, 'type', 'object');
testOne(2, 'square', 4);
testOne(2, 'power4', 16);
function zero () { return 0; }
function one () { return 1; }
function two () { return 2; }
var fixed_array = [zero, one, two];
var dict_array = [ zero, one, two ];
dict_array[100000] = 1;
var fast_prop = { zero: zero, one: one, two: two };
var normal_prop = { zero: zero, one: one, two: two };
normal_prop.x = 0;
delete normal_prop.x;
var first3num = [0, 1, 2];
var first3str = ['zero', 'one', 'two'];
// Use a non-symbol key to force inline cache to generic case.
testMany('123', [toStringNonSymbol, 'charAt', 'charCodeAt'], ['123', '1', 49]);
testMany(fixed_array, first3num, first3num);
testMany(dict_array, first3num, first3num);
testMany(fast_prop, first3str, first3num);
testMany(normal_prop, first3str, first3num);
function testException(receiver, keys, exceptions) {
for (var i = 0; i != 10; i++) {
for (var k = 0; k != keys.length; k++) {
var thrown = false;
try {
var result = receiver[keys[k]]();
} catch (e) {
thrown = true;
}
assertEquals(exceptions[k], thrown);
}
}
}
testException([zero, one, /* hole */ ], [0, 1, 2], [false, false, true]);
| {
"pile_set_name": "Github"
} |
<?php
/*
* Copyright 2007-2017 Abstrium <contact (at) pydio.com>
* This file is part of Pydio.
*
* Pydio is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Pydio is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Pydio. If not, see <http://www.gnu.org/licenses/>.
*
* The latest code can be found at <https://pydio.com>.
*/
namespace Pydio\Core\Exception;
defined('AJXP_EXEC') or die('Access not allowed');
/**
* Class ActionNotFoundException
* Exception thrown when a requests is calling an unknown (or disabled) action.
* @package Pydio\Core\Exception
*/
class ActionNotFoundException extends PydioException
{
/**
* ActionNotFoundException constructor.
* @param string $action
*/
public function __construct($action)
{
parent::__construct("Could not find action ".$action, null);
}
} | {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.