text
stringlengths 2
100k
| meta
dict |
---|---|
.header-border {
border-bottom: solid 1px #c6c5c5;
}
.h4-mar{
margin-top: 30px;
}
.h4-mar-2 {
margin-top: 30px;
}
.address>.radio {
padding-left: 50px;
}
.radio>label {
margin: 5px 0 5px 0;
}
.address-check input[type='checkbox']:checked {
background: #FFF0E8;
border: solid 1px #f40;
}
.manage-a {
font-size: 15px;
color: #2aabd2;
} | {
"pile_set_name": "Github"
} |
require 'tzinfo/timezone_definition'
module TZInfo
module Definitions
module America
module Chicago
include TimezoneDefinition
timezone 'America/Chicago' do |tz|
tz.offset :o0, -21036, 0, :LMT
tz.offset :o1, -21600, 0, :CST
tz.offset :o2, -21600, 3600, :CDT
tz.offset :o3, -18000, 0, :EST
tz.offset :o4, -21600, 3600, :CWT
tz.offset :o5, -21600, 3600, :CPT
tz.transition 1883, 11, :o1, 9636533, 4
tz.transition 1918, 3, :o2, 14530103, 6
tz.transition 1918, 10, :o1, 58125451, 24
tz.transition 1919, 3, :o2, 14532287, 6
tz.transition 1919, 10, :o1, 58134187, 24
tz.transition 1920, 6, :o2, 14534933, 6
tz.transition 1920, 10, :o1, 58143091, 24
tz.transition 1921, 3, :o2, 14536655, 6
tz.transition 1921, 10, :o1, 58151827, 24
tz.transition 1922, 4, :o2, 14539049, 6
tz.transition 1922, 9, :o1, 58159723, 24
tz.transition 1923, 4, :o2, 14541233, 6
tz.transition 1923, 9, :o1, 58168627, 24
tz.transition 1924, 4, :o2, 14543417, 6
tz.transition 1924, 9, :o1, 58177363, 24
tz.transition 1925, 4, :o2, 14545601, 6
tz.transition 1925, 9, :o1, 58186099, 24
tz.transition 1926, 4, :o2, 14547785, 6
tz.transition 1926, 9, :o1, 58194835, 24
tz.transition 1927, 4, :o2, 14549969, 6
tz.transition 1927, 9, :o1, 58203571, 24
tz.transition 1928, 4, :o2, 14552195, 6
tz.transition 1928, 9, :o1, 58212475, 24
tz.transition 1929, 4, :o2, 14554379, 6
tz.transition 1929, 9, :o1, 58221211, 24
tz.transition 1930, 4, :o2, 14556563, 6
tz.transition 1930, 9, :o1, 58229947, 24
tz.transition 1931, 4, :o2, 14558747, 6
tz.transition 1931, 9, :o1, 58238683, 24
tz.transition 1932, 4, :o2, 14560931, 6
tz.transition 1932, 9, :o1, 58247419, 24
tz.transition 1933, 4, :o2, 14563157, 6
tz.transition 1933, 9, :o1, 58256155, 24
tz.transition 1934, 4, :o2, 14565341, 6
tz.transition 1934, 9, :o1, 58265059, 24
tz.transition 1935, 4, :o2, 14567525, 6
tz.transition 1935, 9, :o1, 58273795, 24
tz.transition 1936, 3, :o3, 14569373, 6
tz.transition 1936, 11, :o1, 58283707, 24
tz.transition 1937, 4, :o2, 14571893, 6
tz.transition 1937, 9, :o1, 58291267, 24
tz.transition 1938, 4, :o2, 14574077, 6
tz.transition 1938, 9, :o1, 58300003, 24
tz.transition 1939, 4, :o2, 14576303, 6
tz.transition 1939, 9, :o1, 58308739, 24
tz.transition 1940, 4, :o2, 14578487, 6
tz.transition 1940, 9, :o1, 58317643, 24
tz.transition 1941, 4, :o2, 14580671, 6
tz.transition 1941, 9, :o1, 58326379, 24
tz.transition 1942, 2, :o4, 14582399, 6
tz.transition 1945, 8, :o5, 58360379, 24
tz.transition 1945, 9, :o1, 58361491, 24
tz.transition 1946, 4, :o2, 14591633, 6
tz.transition 1946, 9, :o1, 58370227, 24
tz.transition 1947, 4, :o2, 14593817, 6
tz.transition 1947, 9, :o1, 58378963, 24
tz.transition 1948, 4, :o2, 14596001, 6
tz.transition 1948, 9, :o1, 58387699, 24
tz.transition 1949, 4, :o2, 14598185, 6
tz.transition 1949, 9, :o1, 58396435, 24
tz.transition 1950, 4, :o2, 14600411, 6
tz.transition 1950, 9, :o1, 58405171, 24
tz.transition 1951, 4, :o2, 14602595, 6
tz.transition 1951, 9, :o1, 58414075, 24
tz.transition 1952, 4, :o2, 14604779, 6
tz.transition 1952, 9, :o1, 58422811, 24
tz.transition 1953, 4, :o2, 14606963, 6
tz.transition 1953, 9, :o1, 58431547, 24
tz.transition 1954, 4, :o2, 14609147, 6
tz.transition 1954, 9, :o1, 58440283, 24
tz.transition 1955, 4, :o2, 14611331, 6
tz.transition 1955, 10, :o1, 58449859, 24
tz.transition 1956, 4, :o2, 14613557, 6
tz.transition 1956, 10, :o1, 58458595, 24
tz.transition 1957, 4, :o2, 14615741, 6
tz.transition 1957, 10, :o1, 58467331, 24
tz.transition 1958, 4, :o2, 14617925, 6
tz.transition 1958, 10, :o1, 58476067, 24
tz.transition 1959, 4, :o2, 14620109, 6
tz.transition 1959, 10, :o1, 58484803, 24
tz.transition 1960, 4, :o2, 14622293, 6
tz.transition 1960, 10, :o1, 58493707, 24
tz.transition 1961, 4, :o2, 14624519, 6
tz.transition 1961, 10, :o1, 58502443, 24
tz.transition 1962, 4, :o2, 14626703, 6
tz.transition 1962, 10, :o1, 58511179, 24
tz.transition 1963, 4, :o2, 14628887, 6
tz.transition 1963, 10, :o1, 58519915, 24
tz.transition 1964, 4, :o2, 14631071, 6
tz.transition 1964, 10, :o1, 58528651, 24
tz.transition 1965, 4, :o2, 14633255, 6
tz.transition 1965, 10, :o1, 58537555, 24
tz.transition 1966, 4, :o2, 14635439, 6
tz.transition 1966, 10, :o1, 58546291, 24
tz.transition 1967, 4, :o2, 14637665, 6
tz.transition 1967, 10, :o1, 58555027, 24
tz.transition 1968, 4, :o2, 14639849, 6
tz.transition 1968, 10, :o1, 58563763, 24
tz.transition 1969, 4, :o2, 14642033, 6
tz.transition 1969, 10, :o1, 58572499, 24
tz.transition 1970, 4, :o2, 9964800
tz.transition 1970, 10, :o1, 25686000
tz.transition 1971, 4, :o2, 41414400
tz.transition 1971, 10, :o1, 57740400
tz.transition 1972, 4, :o2, 73468800
tz.transition 1972, 10, :o1, 89190000
tz.transition 1973, 4, :o2, 104918400
tz.transition 1973, 10, :o1, 120639600
tz.transition 1974, 1, :o2, 126691200
tz.transition 1974, 10, :o1, 152089200
tz.transition 1975, 2, :o2, 162374400
tz.transition 1975, 10, :o1, 183538800
tz.transition 1976, 4, :o2, 199267200
tz.transition 1976, 10, :o1, 215593200
tz.transition 1977, 4, :o2, 230716800
tz.transition 1977, 10, :o1, 247042800
tz.transition 1978, 4, :o2, 262771200
tz.transition 1978, 10, :o1, 278492400
tz.transition 1979, 4, :o2, 294220800
tz.transition 1979, 10, :o1, 309942000
tz.transition 1980, 4, :o2, 325670400
tz.transition 1980, 10, :o1, 341391600
tz.transition 1981, 4, :o2, 357120000
tz.transition 1981, 10, :o1, 372841200
tz.transition 1982, 4, :o2, 388569600
tz.transition 1982, 10, :o1, 404895600
tz.transition 1983, 4, :o2, 420019200
tz.transition 1983, 10, :o1, 436345200
tz.transition 1984, 4, :o2, 452073600
tz.transition 1984, 10, :o1, 467794800
tz.transition 1985, 4, :o2, 483523200
tz.transition 1985, 10, :o1, 499244400
tz.transition 1986, 4, :o2, 514972800
tz.transition 1986, 10, :o1, 530694000
tz.transition 1987, 4, :o2, 544608000
tz.transition 1987, 10, :o1, 562143600
tz.transition 1988, 4, :o2, 576057600
tz.transition 1988, 10, :o1, 594198000
tz.transition 1989, 4, :o2, 607507200
tz.transition 1989, 10, :o1, 625647600
tz.transition 1990, 4, :o2, 638956800
tz.transition 1990, 10, :o1, 657097200
tz.transition 1991, 4, :o2, 671011200
tz.transition 1991, 10, :o1, 688546800
tz.transition 1992, 4, :o2, 702460800
tz.transition 1992, 10, :o1, 719996400
tz.transition 1993, 4, :o2, 733910400
tz.transition 1993, 10, :o1, 752050800
tz.transition 1994, 4, :o2, 765360000
tz.transition 1994, 10, :o1, 783500400
tz.transition 1995, 4, :o2, 796809600
tz.transition 1995, 10, :o1, 814950000
tz.transition 1996, 4, :o2, 828864000
tz.transition 1996, 10, :o1, 846399600
tz.transition 1997, 4, :o2, 860313600
tz.transition 1997, 10, :o1, 877849200
tz.transition 1998, 4, :o2, 891763200
tz.transition 1998, 10, :o1, 909298800
tz.transition 1999, 4, :o2, 923212800
tz.transition 1999, 10, :o1, 941353200
tz.transition 2000, 4, :o2, 954662400
tz.transition 2000, 10, :o1, 972802800
tz.transition 2001, 4, :o2, 986112000
tz.transition 2001, 10, :o1, 1004252400
tz.transition 2002, 4, :o2, 1018166400
tz.transition 2002, 10, :o1, 1035702000
tz.transition 2003, 4, :o2, 1049616000
tz.transition 2003, 10, :o1, 1067151600
tz.transition 2004, 4, :o2, 1081065600
tz.transition 2004, 10, :o1, 1099206000
tz.transition 2005, 4, :o2, 1112515200
tz.transition 2005, 10, :o1, 1130655600
tz.transition 2006, 4, :o2, 1143964800
tz.transition 2006, 10, :o1, 1162105200
tz.transition 2007, 3, :o2, 1173600000
tz.transition 2007, 11, :o1, 1194159600
tz.transition 2008, 3, :o2, 1205049600
tz.transition 2008, 11, :o1, 1225609200
tz.transition 2009, 3, :o2, 1236499200
tz.transition 2009, 11, :o1, 1257058800
tz.transition 2010, 3, :o2, 1268553600
tz.transition 2010, 11, :o1, 1289113200
tz.transition 2011, 3, :o2, 1300003200
tz.transition 2011, 11, :o1, 1320562800
tz.transition 2012, 3, :o2, 1331452800
tz.transition 2012, 11, :o1, 1352012400
tz.transition 2013, 3, :o2, 1362902400
tz.transition 2013, 11, :o1, 1383462000
tz.transition 2014, 3, :o2, 1394352000
tz.transition 2014, 11, :o1, 1414911600
tz.transition 2015, 3, :o2, 1425801600
tz.transition 2015, 11, :o1, 1446361200
tz.transition 2016, 3, :o2, 1457856000
tz.transition 2016, 11, :o1, 1478415600
tz.transition 2017, 3, :o2, 1489305600
tz.transition 2017, 11, :o1, 1509865200
tz.transition 2018, 3, :o2, 1520755200
tz.transition 2018, 11, :o1, 1541314800
tz.transition 2019, 3, :o2, 1552204800
tz.transition 2019, 11, :o1, 1572764400
tz.transition 2020, 3, :o2, 1583654400
tz.transition 2020, 11, :o1, 1604214000
tz.transition 2021, 3, :o2, 1615708800
tz.transition 2021, 11, :o1, 1636268400
tz.transition 2022, 3, :o2, 1647158400
tz.transition 2022, 11, :o1, 1667718000
tz.transition 2023, 3, :o2, 1678608000
tz.transition 2023, 11, :o1, 1699167600
tz.transition 2024, 3, :o2, 1710057600
tz.transition 2024, 11, :o1, 1730617200
tz.transition 2025, 3, :o2, 1741507200
tz.transition 2025, 11, :o1, 1762066800
tz.transition 2026, 3, :o2, 1772956800
tz.transition 2026, 11, :o1, 1793516400
tz.transition 2027, 3, :o2, 1805011200
tz.transition 2027, 11, :o1, 1825570800
tz.transition 2028, 3, :o2, 1836460800
tz.transition 2028, 11, :o1, 1857020400
tz.transition 2029, 3, :o2, 1867910400
tz.transition 2029, 11, :o1, 1888470000
tz.transition 2030, 3, :o2, 1899360000
tz.transition 2030, 11, :o1, 1919919600
tz.transition 2031, 3, :o2, 1930809600
tz.transition 2031, 11, :o1, 1951369200
tz.transition 2032, 3, :o2, 1962864000
tz.transition 2032, 11, :o1, 1983423600
tz.transition 2033, 3, :o2, 1994313600
tz.transition 2033, 11, :o1, 2014873200
tz.transition 2034, 3, :o2, 2025763200
tz.transition 2034, 11, :o1, 2046322800
tz.transition 2035, 3, :o2, 2057212800
tz.transition 2035, 11, :o1, 2077772400
tz.transition 2036, 3, :o2, 2088662400
tz.transition 2036, 11, :o1, 2109222000
tz.transition 2037, 3, :o2, 2120112000
tz.transition 2037, 11, :o1, 2140671600
tz.transition 2038, 3, :o2, 14792981, 6
tz.transition 2038, 11, :o1, 59177635, 24
tz.transition 2039, 3, :o2, 14795165, 6
tz.transition 2039, 11, :o1, 59186371, 24
tz.transition 2040, 3, :o2, 14797349, 6
tz.transition 2040, 11, :o1, 59195107, 24
tz.transition 2041, 3, :o2, 14799533, 6
tz.transition 2041, 11, :o1, 59203843, 24
tz.transition 2042, 3, :o2, 14801717, 6
tz.transition 2042, 11, :o1, 59212579, 24
tz.transition 2043, 3, :o2, 14803901, 6
tz.transition 2043, 11, :o1, 59221315, 24
tz.transition 2044, 3, :o2, 14806127, 6
tz.transition 2044, 11, :o1, 59230219, 24
tz.transition 2045, 3, :o2, 14808311, 6
tz.transition 2045, 11, :o1, 59238955, 24
tz.transition 2046, 3, :o2, 14810495, 6
tz.transition 2046, 11, :o1, 59247691, 24
tz.transition 2047, 3, :o2, 14812679, 6
tz.transition 2047, 11, :o1, 59256427, 24
tz.transition 2048, 3, :o2, 14814863, 6
tz.transition 2048, 11, :o1, 59265163, 24
tz.transition 2049, 3, :o2, 14817089, 6
tz.transition 2049, 11, :o1, 59274067, 24
tz.transition 2050, 3, :o2, 14819273, 6
tz.transition 2050, 11, :o1, 59282803, 24
end
end
end
end
end
| {
"pile_set_name": "Github"
} |
/*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package intstr
import (
"encoding/json"
"reflect"
"testing"
"github.com/ghodss/yaml"
)
func TestFromInt(t *testing.T) {
i := FromInt(93)
if i.Type != Int || i.IntVal != 93 {
t.Errorf("Expected IntVal=93, got %+v", i)
}
}
func TestFromString(t *testing.T) {
i := FromString("76")
if i.Type != String || i.StrVal != "76" {
t.Errorf("Expected StrVal=\"76\", got %+v", i)
}
}
type IntOrStringHolder struct {
IOrS IntOrString `json:"val"`
}
func TestIntOrStringUnmarshalJSON(t *testing.T) {
cases := []struct {
input string
result IntOrString
}{
{"{\"val\": 123}", FromInt(123)},
{"{\"val\": \"123\"}", FromString("123")},
}
for _, c := range cases {
var result IntOrStringHolder
if err := json.Unmarshal([]byte(c.input), &result); err != nil {
t.Errorf("Failed to unmarshal input '%v': %v", c.input, err)
}
if result.IOrS != c.result {
t.Errorf("Failed to unmarshal input '%v': expected %+v, got %+v", c.input, c.result, result)
}
}
}
func TestIntOrStringMarshalJSON(t *testing.T) {
cases := []struct {
input IntOrString
result string
}{
{FromInt(123), "{\"val\":123}"},
{FromString("123"), "{\"val\":\"123\"}"},
}
for _, c := range cases {
input := IntOrStringHolder{c.input}
result, err := json.Marshal(&input)
if err != nil {
t.Errorf("Failed to marshal input '%v': %v", input, err)
}
if string(result) != c.result {
t.Errorf("Failed to marshal input '%v': expected: %+v, got %q", input, c.result, string(result))
}
}
}
func TestIntOrStringMarshalJSONUnmarshalYAML(t *testing.T) {
cases := []struct {
input IntOrString
}{
{FromInt(123)},
{FromString("123")},
}
for _, c := range cases {
input := IntOrStringHolder{c.input}
jsonMarshalled, err := json.Marshal(&input)
if err != nil {
t.Errorf("1: Failed to marshal input: '%v': %v", input, err)
}
var result IntOrStringHolder
err = yaml.Unmarshal(jsonMarshalled, &result)
if err != nil {
t.Errorf("2: Failed to unmarshal '%+v': %v", string(jsonMarshalled), err)
}
if !reflect.DeepEqual(input, result) {
t.Errorf("3: Failed to marshal input '%+v': got %+v", input, result)
}
}
}
func TestGetValueFromIntOrPercent(t *testing.T) {
tests := []struct {
input IntOrString
total int
roundUp bool
expectErr bool
expectVal int
}{
{
input: FromInt(123),
expectErr: false,
expectVal: 123,
},
{
input: FromString("90%"),
total: 100,
roundUp: true,
expectErr: false,
expectVal: 90,
},
{
input: FromString("90%"),
total: 95,
roundUp: true,
expectErr: false,
expectVal: 86,
},
{
input: FromString("90%"),
total: 95,
roundUp: false,
expectErr: false,
expectVal: 85,
},
{
input: FromString("%"),
expectErr: true,
},
{
input: FromString("90#"),
expectErr: true,
},
{
input: FromString("#%"),
expectErr: true,
},
}
for i, test := range tests {
t.Logf("test case %d", i)
value, err := GetValueFromIntOrPercent(&test.input, test.total, test.roundUp)
if test.expectErr && err == nil {
t.Errorf("expected error, but got none")
continue
}
if !test.expectErr && err != nil {
t.Errorf("unexpected err: %v", err)
continue
}
if test.expectVal != value {
t.Errorf("expected %v, but got %v", test.expectVal, value)
}
}
}
| {
"pile_set_name": "Github"
} |
ip = input("please enter the new IP address:")
syspref = App.open("System Preferences.app")
with Region(syspref.window()):
click("Network.png")
click("1254367484704.png")
click("1256519960853.png")
click("1254367285543.png" )
wait("1256520016190.png")
type(ip + "\t")
type("255.255.255.0\t")
type("192.168.0.254\t")
click("1254367352295.png")
| {
"pile_set_name": "Github"
} |
// Don't crash when a scripted proxy handler throws Error.prototype.
var g = newGlobal();
var dbg = Debugger(g);
dbg.onDebuggerStatement = function (frame) {
try {
frame.arguments[0].deleteProperty("x");
} catch (exc) {
return;
}
throw new Error("deleteProperty should throw");
};
g.eval("function h(obj) { debugger; }");
g.eval("h(Proxy.create({delete: function () { throw Error.prototype; }}));");
| {
"pile_set_name": "Github"
} |
OpenIDE-Module-Long-Description = API pro vizuální vzhled prvk\u016f
OpenIDE-Module-Short-Description = API pro vizuální vzhled prvk\u016f
NodeGraphFunction.Degree.name = Stupe\u0148
NodeGraphFunction.InDegree.name = Stupe\u0148 Dovnit\u0159
NodeGraphFunction.OutDegree.name = Stupe\u0148 Ven
EdgeGraphFunction.Weight.name = Váha
EdgeGraphFunction.Type.name = Typ
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<html lang="zh_CN" xmlns:th="http://www.thymeleaf.org">
<head>
<meta charset="UTF-8">
<title>菜单模板</title>
</head>
<body>
<div class="perfree-menu" th:fragment="menu">
<!-- menu start 菜单部分开始 -->
<div class="layui-side layui-bg-black left-navbar-menu">
<div class="layui-side-scroll">
<div class="layui-logo">
<a href="#" class="logo-text">
<i class="fa fa-pagelines" style="font-size: 20px;"></i>
<span class='logo-text-value'>Go-Fastdfs 管理</span>
</a>
</div>
<ul class="layui-nav layui-nav-tree left-menu" lay-filter="menu" lay-shrink="all">
<!-- 菜单列表 -->
<li class="layui-nav-item" lay-unselect>
<a href="javascript:;" class="nav-menu-a" onclick="openTab('book','控制台','/main','1');">
<i class="fa fa-home" style="font-size: 16px;"></i> <span class="menu-text">控制台</span>
</a>
</li>
<li class="layui-nav-item" lay-unselect>
<a href="javascript:;" class="nav-menu-a">
<i class="fa fa-file-archive-o" style="font-size: 14px;"></i> <span class="menu-text">文件管理</span>
</a>
<dl class="layui-nav-child">
<dd class="child-menu" lay-unselect>
<a href="javascript:;" onclick="openTab('cloud-upload','文件上传','/file/upload','2');" class="nav-menu-a-child" id="fileUpload">文件上传</a>
</dd>
<dd class="child-menu" lay-unselect>
<a href="javascript:;" onclick="openTab('folder-o','文件列表','/file','3');" class="nav-menu-a-child" id="file">文件列表</a>
</dd>
</dl>
</li>
<li class="layui-nav-item" lay-unselect>
<a href="javascript:;" class="nav-menu-a" >
<i class="fa fa-cubes" style="font-size: 14px;"></i> <span class="menu-text">集群管理</span>
</a>
<dl class="layui-nav-child">
<dd class="child-menu" lay-unselect>
<a href="javascript:;" onclick="openTab('cube','集群列表','/peers','4');" class="nav-menu-a-child" >集群列表</a>
</dd>
</dl>
</li>
<li class="layui-nav-item" lay-unselect>
<a href="javascript:;" class="nav-menu-a" >
<i class="fa fa-cogs" style="font-size: 14px;"></i> <span class="menu-text">系统设置</span>
</a>
<dl class="layui-nav-child">
<dd class="child-menu" lay-unselect>
<a href="javascript:;" onclick="openTab('user-o','个人资料','/settings/user','6');" class="nav-menu-a-child" id="settingsUser">个人资料</a>
</dd>
</dl>
</li>
</ul>
</div>
</div>
<!-- menu end 菜单部分结束 -->
</div>
</body>
</html> | {
"pile_set_name": "Github"
} |
/* ========================================================================
* PlantUML : a free UML diagram generator
* ========================================================================
*
* (C) Copyright 2009-2020, Arnaud Roques
*
* Project Info: http://plantuml.com
*
* If you like this project or if you find it useful, you can support us at:
*
* http://plantuml.com/patreon (only 1$ per month!)
* http://plantuml.com/paypal
*
* This file is part of PlantUML.
*
* PlantUML is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* PlantUML distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
* License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
* USA.
*
*
* Original Author: Arnaud Roques
*
*
*/
package net.sourceforge.plantuml.ugraphic;
import java.awt.image.AffineTransformOp;
public enum AffineTransformType {
TYPE_NEAREST_NEIGHBOR, TYPE_BILINEAR;
public int toLegacyInt() {
switch (this) {
case TYPE_BILINEAR:
return AffineTransformOp.TYPE_BILINEAR;
case TYPE_NEAREST_NEIGHBOR:
return AffineTransformOp.TYPE_NEAREST_NEIGHBOR;
}
throw new AssertionError();
}
}
| {
"pile_set_name": "Github"
} |
apiVersion: v1
kind: ReplicationController
metadata:
name: redis-master
# these labels can be applied automatically
# from the labels in the pod template if not set
labels:
app: redis
role: master
tier: backend
spec:
# this replicas value is default
# modify it according to your case
replicas: 1
# selector can be applied automatically
# from the labels in the pod template if not set
# selector:
# matchLabels:
# app: ok
# role: master
# tier: backend
template:
metadata:
labels:
app: redis
role: master
tier: backend
spec:
containers:
- name: master
image: redis
resources:
requests:
cpu: 100m
memory: 256Mi
ports:
- containerPort: 6379
| {
"pile_set_name": "Github"
} |
snd-au8810-objs := au8810.o
snd-au8820-objs := au8820.o
snd-au8830-objs := au8830.o
obj-$(CONFIG_SND_AU8810) += snd-au8810.o
obj-$(CONFIG_SND_AU8820) += snd-au8820.o
obj-$(CONFIG_SND_AU8830) += snd-au8830.o
| {
"pile_set_name": "Github"
} |
// SPDX-License-Identifier: GPL-2.0
/*
* Implementation of per-board codec beeping
* Copyright (c) 2011 The Chromium OS Authors.
* Copyright 2018 Google LLC
*/
#define LOG_CATEGORY UCLASS_SOUND
#include <common.h>
#include <dm.h>
#include <hda_codec.h>
#include <log.h>
#include <pci.h>
#include <sound.h>
#include <asm/io.h>
#include <dt-bindings/sound/azalia.h>
#include <linux/bitops.h>
#include <linux/delay.h>
/**
* struct hda_regs - HDA registers
*
* https://wiki.osdev.org/Intel_High_Definition_Audio
* https://www.intel.com/content/www/us/en/standards/high-definition-audio-specification.html
*/
struct hda_regs {
u16 gcap;
u8 vmin;
u8 vmaj;
u16 outpay;
u16 inpay;
u32 gctl;
u16 wakeen;
u16 statests;
u8 reserved[0x50];
u32 cmd; /* 0x60 */
u32 resp;
u32 icii;
};
enum {
HDA_ICII_BUSY = BIT(0),
HDA_ICII_VALID = BIT(1),
/* Common node IDs */
HDA_ROOT_NODE = 0x00,
/* HDA verbs fields */
HDA_VERB_NID_S = 20,
HDA_VERB_VERB_S = 8,
HDA_VERB_PARAM_S = 0,
HDA_VERB_GET_PARAMS = 0xf00,
HDA_VERB_SET_BEEP = 0x70a,
/* GET_PARAMS parameter IDs */
GET_PARAMS_NODE_COUNT = 0x04,
GET_PARAMS_AUDIO_GROUP_CAPS = 0x08,
GET_PARAMS_AUDIO_WIDGET_CAPS = 0x09,
/* Sub-node fields */
NUM_SUB_NODES_S = 0,
NUM_SUB_NODES_M = 0xff << NUM_SUB_NODES_S,
FIRST_SUB_NODE_S = 16,
FIRST_SUB_NODE_M = 0xff << FIRST_SUB_NODE_S,
/* Get Audio Function Group Capabilities fields */
AUDIO_GROUP_CAPS_BEEP_GEN = 0x10000,
/* Get Audio Widget Capabilities fields */
AUDIO_WIDGET_TYPE_BEEP = 0x7,
AUDIO_WIDGET_TYPE_S = 20,
AUDIO_WIDGET_TYPE_M = 0xf << AUDIO_WIDGET_TYPE_S,
BEEP_FREQ_BASE = 12000,
};
static inline uint hda_verb(uint nid, uint verb, uint param)
{
return nid << HDA_VERB_NID_S | verb << HDA_VERB_VERB_S |
param << HDA_VERB_PARAM_S;
}
int hda_wait_for_ready(struct hda_regs *regs)
{
int timeout = 1000; /* Use a 1msec timeout */
while (timeout--) {
u32 reg32 = readl(®s->icii);
if (!(reg32 & HDA_ICII_BUSY))
return 0;
udelay(1);
}
return -ETIMEDOUT;
}
static int wait_for_response(struct hda_regs *regs, uint *response)
{
int timeout = 1000;
u32 reg32;
/* Send the verb to the codec */
setbits_le32(®s->icii, HDA_ICII_BUSY | HDA_ICII_VALID);
/* Use a 1msec timeout */
while (timeout--) {
reg32 = readl(®s->icii);
if ((reg32 & (HDA_ICII_VALID | HDA_ICII_BUSY)) ==
HDA_ICII_VALID) {
if (response)
*response = readl(®s->resp);
return 0;
}
udelay(1);
}
return -ETIMEDOUT;
}
int hda_wait_for_valid(struct hda_regs *regs)
{
return wait_for_response(regs, NULL);
}
static int set_bits(void *port, u32 mask, u32 val)
{
u32 reg32;
int count;
/* Write (val & mask) to port */
clrsetbits_le32(port, mask, val);
/* Wait for readback of register to match what was just written to it */
count = 50;
do {
/* Wait 1ms based on BKDG wait time */
mdelay(1);
reg32 = readl(port) & mask;
} while (reg32 != val && --count);
/* Timeout occurred */
if (!count)
return -ETIMEDOUT;
return 0;
}
int hda_codec_detect(struct hda_regs *regs)
{
uint reg8;
/* Set Bit 0 to 1 to exit reset state (BAR + 0x8)[0] */
if (set_bits(®s->gctl, 1, 1))
goto no_codec;
/* Write back the value once reset bit is set */
writew(readw(®s->gcap), ®s->gcap);
/* Read in Codec location */
reg8 = readb(®s->statests) & 0xf;
if (!reg8)
goto no_codec;
return reg8;
no_codec:
/* Codec Not found - put HDA back in reset */
set_bits(®s->gctl, 1, 0);
log_debug("No codec\n");
return 0;
}
static int find_verb_data(struct udevice *dev, uint id, ofnode *nodep)
{
ofnode parent = dev_read_subnode(dev, "codecs");
ofnode node;
u32 vendor_id, device_id;
ofnode_for_each_subnode(node, parent) {
if (ofnode_read_u32(node, "vendor-id", &vendor_id) ||
ofnode_read_u32(node, "device-id", &device_id)) {
log_debug("Cannot get IDs for '%s'\n",
ofnode_get_name(node));
return -EINVAL;
}
if (id != (vendor_id << 16 | device_id)) {
log_debug("Skip codec node '%s' for %08x\n",
ofnode_get_name(node), id);
continue;
}
log_debug("Found codec node '%s' for %08x\n",
ofnode_get_name(node), id);
*nodep = node;
return 0;
}
return -ENOENT;
}
static int send_verbs(ofnode node, const char *prop_name, struct hda_regs *regs)
{
int ret, verb_size, i;
const u32 *verb;
verb = ofnode_get_property(node, prop_name, &verb_size);
if (verb_size < 0) {
log_debug("No verb data\n");
return -EINVAL;
}
log_debug("verb_size: %d\n", verb_size);
for (i = 0; i < verb_size / sizeof(*verb); i++) {
ret = hda_wait_for_ready(regs);
if (ret) {
log_debug(" codec ready timeout\n");
return ret;
}
writel(fdt32_to_cpu(verb[i]), ®s->cmd);
ret = hda_wait_for_valid(regs);
if (ret) {
log_debug(" codec valid timeout\n");
return ret;
}
}
return 0;
}
static int codec_init(struct udevice *dev, struct hda_regs *regs, uint addr)
{
ofnode node;
uint id;
int ret;
log_debug("Initializing codec #%d\n", addr);
ret = hda_wait_for_ready(regs);
if (ret) {
log_debug(" codec not ready\n");
return ret;
}
/* Read the codec's vendor ID */
writel(addr << AZALIA_CODEC_SHIFT |
AZALIA_OPCODE_READ_PARAM << AZALIA_VERB_SHIFT |
AZALIA_PARAM_VENDOR_ID, ®s->cmd);
ret = hda_wait_for_valid(regs);
if (ret) {
log_debug(" codec not valid\n");
return ret;
}
id = readl(®s->resp);
log_debug("codec vid/did: %08x\n", id);
ret = find_verb_data(dev, id, &node);
if (ret) {
log_debug("No verb (err=%d)\n", ret);
return ret;
}
ret = send_verbs(node, "verbs", regs);
if (ret) {
log_debug("failed to send verbs (err=%d)\n", ret);
return ret;
}
log_debug("verb loaded\n");
return 0;
}
int hda_codecs_init(struct udevice *dev, struct hda_regs *regs, u32 codec_mask)
{
int ret;
int i;
for (i = 3; i >= 0; i--) {
if (codec_mask & (1 << i)) {
ret = codec_init(dev, regs, i);
if (ret)
return ret;
}
}
ret = send_verbs(dev_ofnode(dev), "beep-verbs", regs);
if (ret) {
log_debug("failed to send beep verbs (err=%d)\n", ret);
return ret;
}
log_debug("beep verbs loaded\n");
return 0;
}
/**
* exec_verb() - Write a verb to the codec
*
* @regs: HDA registers
* @val: Command to write
* @response: Set to response from codec
* @return 0 if OK, -ve on error
*/
static int exec_verb(struct hda_regs *regs, uint val, uint *response)
{
int ret;
ret = hda_wait_for_ready(regs);
if (ret)
return ret;
writel(val, ®s->cmd);
return wait_for_response(regs, response);
}
/**
* get_subnode_info() - Get subnode information
*
* @regs: HDA registers
* @nid: Parent node ID to check
* @num_sub_nodesp: Returns number of subnodes
* @start_sub_node_nidp: Returns start subnode number
* @return 0 if OK, -ve on error
*/
static int get_subnode_info(struct hda_regs *regs, uint nid,
uint *num_sub_nodesp, uint *start_sub_node_nidp)
{
uint response;
int ret;
ret = exec_verb(regs, hda_verb(nid, HDA_VERB_GET_PARAMS,
GET_PARAMS_NODE_COUNT),
&response);
if (ret < 0) {
printf("Audio: Error reading sub-node info %d\n", nid);
return ret;
}
*num_sub_nodesp = (response & NUM_SUB_NODES_M) >> NUM_SUB_NODES_S;
*start_sub_node_nidp = (response & FIRST_SUB_NODE_M) >>
FIRST_SUB_NODE_S;
return 0;
}
/**
* find_beep_node_in_group() - Finds the beeping node
*
* Searches the audio group for a node that supports beeping
*
* @regs: HDA registers
* @group_nid: Group node ID to check
* @return 0 if OK, -ve on error
*/
static uint find_beep_node_in_group(struct hda_regs *regs, uint group_nid)
{
uint node_count = 0;
uint current_nid = 0;
uint response;
uint end_nid;
int ret;
ret = get_subnode_info(regs, group_nid, &node_count, ¤t_nid);
if (ret < 0)
return 0;
end_nid = current_nid + node_count;
while (current_nid < end_nid) {
ret = exec_verb(regs,
hda_verb(current_nid, HDA_VERB_GET_PARAMS,
GET_PARAMS_AUDIO_WIDGET_CAPS),
&response);
if (ret < 0) {
printf("Audio: Error reading widget caps\n");
return 0;
}
if ((response & AUDIO_WIDGET_TYPE_M) >> AUDIO_WIDGET_TYPE_S ==
AUDIO_WIDGET_TYPE_BEEP)
return current_nid;
current_nid++;
}
return 0; /* no beep node found */
}
/**
* audio_group_has_beep_node() - Check if group has a beep node
*
* Checks if the given audio group contains a beep generator
* @regs: HDA registers
* @nid: Node ID to check
* @return 0 if OK, -ve on error
*/
static int audio_group_has_beep_node(struct hda_regs *regs, uint nid)
{
uint response;
int ret;
ret = exec_verb(regs, hda_verb(nid, HDA_VERB_GET_PARAMS,
GET_PARAMS_AUDIO_GROUP_CAPS),
&response);
if (ret < 0) {
printf("Audio: Error reading audio group caps %d\n", nid);
return 0;
}
return !!(response & AUDIO_GROUP_CAPS_BEEP_GEN);
}
/**
* get_hda_beep_nid() - Finds the node ID of the beep node
*
* Finds the nid of the beep node if it exists. Starts at the root node, for
* each sub-node checks if the group contains a beep node. If the group
* contains a beep node, polls each node in the group until it is found.
*
* If the device has a intel,beep-nid property, the value of that is used
* instead.
*
* @dev: Sound device
* @return Node ID >0 if found, -ve error code otherwise
*/
static int get_hda_beep_nid(struct udevice *dev)
{
struct hda_codec_priv *priv = dev_get_priv(dev);
uint current_nid = 0;
uint node_count = 0;
uint end_nid;
int ret;
/* If the field exists, use the beep nid set in the fdt */
ret = dev_read_u32(dev, "intel,beep-nid", ¤t_nid);
if (!ret)
return current_nid;
ret = get_subnode_info(priv->regs, HDA_ROOT_NODE, &node_count,
¤t_nid);
if (ret < 0)
return ret;
end_nid = current_nid + node_count;
while (current_nid < end_nid) {
if (audio_group_has_beep_node(priv->regs, current_nid))
return find_beep_node_in_group(priv->regs,
current_nid);
current_nid++;
}
/* no beep node found */
return -ENOENT;
}
/**
* set_beep_divisor() - Sets the beep divisor to set the pitch
*
* @priv: Device's private data
* @divider: Divider value (0 to disable the beep)
* @return 0 if OK, -ve on error
*/
static int set_beep_divisor(struct hda_codec_priv *priv, uint divider)
{
return exec_verb(priv->regs,
hda_verb(priv->beep_nid, HDA_VERB_SET_BEEP, divider),
NULL);
}
int hda_codec_init(struct udevice *dev)
{
struct hda_codec_priv *priv = dev_get_priv(dev);
ulong base_addr;
base_addr = dm_pci_read_bar32(dev, 0);
log_debug("base = %08lx\n", base_addr);
if (!base_addr)
return -EINVAL;
priv->regs = (struct hda_regs *)base_addr;
return 0;
}
int hda_codec_finish_init(struct udevice *dev)
{
struct hda_codec_priv *priv = dev_get_priv(dev);
int ret;
ret = get_hda_beep_nid(dev);
if (ret <= 0) {
log_warning("Could not find beep NID (err=%d)\n", ret);
return ret ? ret : -ENOENT;
}
priv->beep_nid = ret;
return 0;
}
int hda_codec_start_beep(struct udevice *dev, int frequency_hz)
{
struct hda_codec_priv *priv = dev_get_priv(dev);
uint divider_val;
if (!priv->beep_nid) {
log_err("Failed to find a beep-capable node\n");
return -ENOENT;
}
if (!frequency_hz)
divider_val = 0; /* off */
else if (frequency_hz > BEEP_FREQ_BASE)
divider_val = 1;
else if (frequency_hz < BEEP_FREQ_BASE / 0xff)
divider_val = 0xff;
else
divider_val = 0xff & (BEEP_FREQ_BASE / frequency_hz);
return set_beep_divisor(priv, divider_val);
}
int hda_codec_stop_beep(struct udevice *dev)
{
struct hda_codec_priv *priv = dev_get_priv(dev);
return set_beep_divisor(priv, 0);
}
static const struct sound_ops hda_codec_ops = {
.setup = hda_codec_finish_init,
.start_beep = hda_codec_start_beep,
.stop_beep = hda_codec_stop_beep,
};
U_BOOT_DRIVER(hda_codec) = {
.name = "hda_codec",
.id = UCLASS_SOUND,
.ops = &hda_codec_ops,
.priv_auto_alloc_size = sizeof(struct hda_codec_priv),
.probe = hda_codec_init,
};
static struct pci_device_id hda_supported[] = {
{ PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_COUGARPOINT_HDA},
{ PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_PANTHERPOINT_HDA},
{ PCI_DEVICE(PCI_VENDOR_ID_INTEL,
PCI_DEVICE_ID_INTEL_WILDCATPOINT_HDA) },
/*
* Note this driver is not necessarily generic, but it attempts to
* support any codec in the hd-audio class
*/
{ PCI_DEVICE_CLASS(PCI_CLASS_MULTIMEDIA_HD_AUDIO, 0xffffff) },
};
U_BOOT_PCI_DEVICE(hda_codec, hda_supported);
| {
"pile_set_name": "Github"
} |
# Changelog
## [1.0.6] - 2019-05-13
**Fix**
- fix #29 หัวใบกำกับภาษีหายกรณีสร้างใบยกเลิกมากกว่าสองใบ
## [1.0.5] - 2018-10-05
**Update**
- update
1. อัพเดทกระบวนการ read/write database เมื่อมีการ uninstall และ install ข้อมูลลูกค้าจะยังอยู่
*หมายเหตุ หากใช้ Installation version 1.0.4 ลงไปจะต้อง run program สำหรับอัพเกรด database เป็น version ใหม่
2. รองรับใบเสร็จรับเงิน/ใบกำกับภาษี
3. ปรับเพิ่มการแสดงผลความยาวของชื่อลูกค้า
4. รองรับการใส่เครื่องหมาย & < > ' "
5. ปรับล๊อคความยาวของ บ้านเลขที่
6. รองรับผู้ซื้อที่เป็นชาวต่างชาติ โดยใช้ เลขที่หนังสือเดินทาง
7. รองรับผู้ซื้อที่เป็นคนไทย โดยใช้เลขที่บัตรประชาชน
## [1.0.4] - 2018-09-17
**Update**
- update แขวงใหม่ ได้แก่
1. เขตสะพานสูง - แขวงราษฎร์พัฒนา แขวงทับช้าง
2. เขตพญาไท - แขวงพญาไท
3. เขตดินแดน - แขวงรัชดาภิเษก
4. เขตพระโขนง - แขวงพระโขนงใต้
5. เขตสวนหลวง - แขวงอ่อนนุช แขวงพัฒนาการ
6. เขตบางนา - แขวงบางนาเหนือ แขวงบางนาใต้
7. เขตบางบอน - แขวงบางบอนเหนือ แขวงบางบอนใต้ แขวงคลองบางพราน และแขวงคลองบางบอน
## [1.0.3] - 2018-06-04
**Add**
- fix #24 add SQLlite x64,x86 folder
## [1.0.2] - 2018-05-24
**Update**
- fix #22 ที่อยู่ขึ้นไม่ถูกต้องกรณีสร้างใบยกเลิก
## [1.0.1] - 2018-03-27
**Update**
- Update XMP schema for validation
## [1.0.0] - 2017-06-30
**Added**
- e-Tax Invoice by TeDA - PDF Generator on PC baseline version
| {
"pile_set_name": "Github"
} |
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/compiler/memory-optimizer.h"
#include "src/codegen/interface-descriptors.h"
#include "src/codegen/tick-counter.h"
#include "src/compiler/js-graph.h"
#include "src/compiler/linkage.h"
#include "src/compiler/node-matchers.h"
#include "src/compiler/node-properties.h"
#include "src/compiler/node.h"
#include "src/roots/roots-inl.h"
namespace v8 {
namespace internal {
namespace compiler {
namespace {
bool CanAllocate(const Node* node) {
switch (node->opcode()) {
case IrOpcode::kAbortCSAAssert:
case IrOpcode::kBitcastTaggedToWord:
case IrOpcode::kBitcastWordToTagged:
case IrOpcode::kComment:
case IrOpcode::kDebugBreak:
case IrOpcode::kDeoptimizeIf:
case IrOpcode::kDeoptimizeUnless:
case IrOpcode::kEffectPhi:
case IrOpcode::kIfException:
case IrOpcode::kLoad:
case IrOpcode::kLoadElement:
case IrOpcode::kLoadField:
case IrOpcode::kLoadFromObject:
case IrOpcode::kPoisonedLoad:
case IrOpcode::kProtectedLoad:
case IrOpcode::kProtectedStore:
case IrOpcode::kRetain:
case IrOpcode::kStackPointerGreaterThan:
case IrOpcode::kStaticAssert:
// TODO(tebbi): Store nodes might do a bump-pointer allocation.
// We should introduce a special bump-pointer store node to
// differentiate that.
case IrOpcode::kStore:
case IrOpcode::kStoreElement:
case IrOpcode::kStoreField:
case IrOpcode::kStoreToObject:
case IrOpcode::kTaggedPoisonOnSpeculation:
case IrOpcode::kUnalignedLoad:
case IrOpcode::kUnalignedStore:
case IrOpcode::kUnreachable:
case IrOpcode::kUnsafePointerAdd:
case IrOpcode::kWord32AtomicAdd:
case IrOpcode::kWord32AtomicAnd:
case IrOpcode::kWord32AtomicCompareExchange:
case IrOpcode::kWord32AtomicExchange:
case IrOpcode::kWord32AtomicLoad:
case IrOpcode::kWord32AtomicOr:
case IrOpcode::kWord32AtomicPairAdd:
case IrOpcode::kWord32AtomicPairAnd:
case IrOpcode::kWord32AtomicPairCompareExchange:
case IrOpcode::kWord32AtomicPairExchange:
case IrOpcode::kWord32AtomicPairLoad:
case IrOpcode::kWord32AtomicPairOr:
case IrOpcode::kWord32AtomicPairStore:
case IrOpcode::kWord32AtomicPairSub:
case IrOpcode::kWord32AtomicPairXor:
case IrOpcode::kWord32AtomicStore:
case IrOpcode::kWord32AtomicSub:
case IrOpcode::kWord32AtomicXor:
case IrOpcode::kWord32PoisonOnSpeculation:
case IrOpcode::kWord64AtomicAdd:
case IrOpcode::kWord64AtomicAnd:
case IrOpcode::kWord64AtomicCompareExchange:
case IrOpcode::kWord64AtomicExchange:
case IrOpcode::kWord64AtomicLoad:
case IrOpcode::kWord64AtomicOr:
case IrOpcode::kWord64AtomicStore:
case IrOpcode::kWord64AtomicSub:
case IrOpcode::kWord64AtomicXor:
case IrOpcode::kWord64PoisonOnSpeculation:
return false;
case IrOpcode::kCall:
return !(CallDescriptorOf(node->op())->flags() &
CallDescriptor::kNoAllocate);
default:
break;
}
return true;
}
Node* SearchAllocatingNode(Node* start, Node* limit, Zone* temp_zone) {
ZoneQueue<Node*> queue(temp_zone);
ZoneSet<Node*> visited(temp_zone);
visited.insert(limit);
queue.push(start);
while (!queue.empty()) {
Node* const current = queue.front();
queue.pop();
if (visited.find(current) == visited.end()) {
visited.insert(current);
if (CanAllocate(current)) {
return current;
}
for (int i = 0; i < current->op()->EffectInputCount(); ++i) {
queue.push(NodeProperties::GetEffectInput(current, i));
}
}
}
return nullptr;
}
bool CanLoopAllocate(Node* loop_effect_phi, Zone* temp_zone) {
Node* const control = NodeProperties::GetControlInput(loop_effect_phi);
// Start the effect chain walk from the loop back edges.
for (int i = 1; i < control->InputCount(); ++i) {
if (SearchAllocatingNode(loop_effect_phi->InputAt(i), loop_effect_phi,
temp_zone) != nullptr) {
return true;
}
}
return false;
}
Node* EffectPhiForPhi(Node* phi) {
Node* control = NodeProperties::GetControlInput(phi);
for (Node* use : control->uses()) {
if (use->opcode() == IrOpcode::kEffectPhi) {
return use;
}
}
return nullptr;
}
void WriteBarrierAssertFailed(Node* node, Node* object, const char* name,
Zone* temp_zone) {
std::stringstream str;
str << "MemoryOptimizer could not remove write barrier for node #"
<< node->id() << "\n";
str << " Run mksnapshot with --csa-trap-on-node=" << name << ","
<< node->id() << " to break in CSA code.\n";
Node* object_position = object;
if (object_position->opcode() == IrOpcode::kPhi) {
object_position = EffectPhiForPhi(object_position);
}
Node* allocating_node = nullptr;
if (object_position && object_position->op()->EffectOutputCount() > 0) {
allocating_node = SearchAllocatingNode(node, object_position, temp_zone);
}
if (allocating_node) {
str << "\n There is a potentially allocating node in between:\n";
str << " " << *allocating_node << "\n";
str << " Run mksnapshot with --csa-trap-on-node=" << name << ","
<< allocating_node->id() << " to break there.\n";
if (allocating_node->opcode() == IrOpcode::kCall) {
str << " If this is a never-allocating runtime call, you can add an "
"exception to Runtime::MayAllocate.\n";
}
} else {
str << "\n It seems the store happened to something different than a "
"direct "
"allocation:\n";
str << " " << *object << "\n";
str << " Run mksnapshot with --csa-trap-on-node=" << name << ","
<< object->id() << " to break there.\n";
}
FATAL("%s", str.str().c_str());
}
} // namespace
MemoryOptimizer::MemoryOptimizer(
JSGraph* jsgraph, Zone* zone, PoisoningMitigationLevel poisoning_level,
MemoryLowering::AllocationFolding allocation_folding,
const char* function_debug_name, TickCounter* tick_counter)
: graph_assembler_(jsgraph, zone),
memory_lowering_(jsgraph, zone, &graph_assembler_, poisoning_level,
allocation_folding, WriteBarrierAssertFailed,
function_debug_name),
jsgraph_(jsgraph),
empty_state_(AllocationState::Empty(zone)),
pending_(zone),
tokens_(zone),
zone_(zone),
tick_counter_(tick_counter) {}
void MemoryOptimizer::Optimize() {
EnqueueUses(graph()->start(), empty_state());
while (!tokens_.empty()) {
Token const token = tokens_.front();
tokens_.pop();
VisitNode(token.node, token.state);
}
DCHECK(pending_.empty());
DCHECK(tokens_.empty());
}
void MemoryOptimizer::VisitNode(Node* node, AllocationState const* state) {
tick_counter_->DoTick();
DCHECK(!node->IsDead());
DCHECK_LT(0, node->op()->EffectInputCount());
switch (node->opcode()) {
case IrOpcode::kAllocate:
// Allocate nodes were purged from the graph in effect-control
// linearization.
UNREACHABLE();
case IrOpcode::kAllocateRaw:
return VisitAllocateRaw(node, state);
case IrOpcode::kCall:
return VisitCall(node, state);
case IrOpcode::kLoadFromObject:
return VisitLoadFromObject(node, state);
case IrOpcode::kLoadElement:
return VisitLoadElement(node, state);
case IrOpcode::kLoadField:
return VisitLoadField(node, state);
case IrOpcode::kStoreToObject:
return VisitStoreToObject(node, state);
case IrOpcode::kStoreElement:
return VisitStoreElement(node, state);
case IrOpcode::kStoreField:
return VisitStoreField(node, state);
case IrOpcode::kStore:
return VisitStore(node, state);
default:
if (!CanAllocate(node)) {
// These operations cannot trigger GC.
return VisitOtherEffect(node, state);
}
}
DCHECK_EQ(0, node->op()->EffectOutputCount());
}
bool MemoryOptimizer::AllocationTypeNeedsUpdateToOld(Node* const node,
const Edge edge) {
// Test to see if we need to update the AllocationType.
if (node->opcode() == IrOpcode::kStoreField && edge.index() == 1) {
Node* parent = node->InputAt(0);
if (parent->opcode() == IrOpcode::kAllocateRaw &&
AllocationTypeOf(parent->op()) == AllocationType::kOld) {
return true;
}
}
return false;
}
void MemoryOptimizer::VisitAllocateRaw(Node* node,
AllocationState const* state) {
DCHECK_EQ(IrOpcode::kAllocateRaw, node->opcode());
const AllocateParameters& allocation = AllocateParametersOf(node->op());
AllocationType allocation_type = allocation.allocation_type();
// Propagate tenuring from outer allocations to inner allocations, i.e.
// when we allocate an object in old space and store a newly allocated
// child object into the pretenured object, then the newly allocated
// child object also should get pretenured to old space.
if (allocation_type == AllocationType::kOld) {
for (Edge const edge : node->use_edges()) {
Node* const user = edge.from();
if (user->opcode() == IrOpcode::kStoreField && edge.index() == 0) {
Node* child = user->InputAt(1);
if (child->opcode() == IrOpcode::kAllocateRaw &&
AllocationTypeOf(child->op()) == AllocationType::kYoung) {
NodeProperties::ChangeOp(child, node->op());
break;
}
}
}
} else {
DCHECK_EQ(AllocationType::kYoung, allocation_type);
for (Edge const edge : node->use_edges()) {
Node* const user = edge.from();
if (AllocationTypeNeedsUpdateToOld(user, edge)) {
allocation_type = AllocationType::kOld;
break;
}
}
}
Reduction reduction = memory_lowering()->ReduceAllocateRaw(
node, allocation_type, allocation.allow_large_objects(), &state);
CHECK(reduction.Changed() && reduction.replacement() != node);
// Replace all uses of node and kill the node to make sure we don't leave
// dangling dead uses.
NodeProperties::ReplaceUses(node, reduction.replacement(),
graph_assembler_.effect(),
graph_assembler_.control());
node->Kill();
EnqueueUses(state->effect(), state);
}
void MemoryOptimizer::VisitLoadFromObject(Node* node,
AllocationState const* state) {
DCHECK_EQ(IrOpcode::kLoadFromObject, node->opcode());
memory_lowering()->ReduceLoadFromObject(node);
EnqueueUses(node, state);
}
void MemoryOptimizer::VisitStoreToObject(Node* node,
AllocationState const* state) {
DCHECK_EQ(IrOpcode::kStoreToObject, node->opcode());
memory_lowering()->ReduceStoreToObject(node, state);
EnqueueUses(node, state);
}
void MemoryOptimizer::VisitLoadElement(Node* node,
AllocationState const* state) {
DCHECK_EQ(IrOpcode::kLoadElement, node->opcode());
memory_lowering()->ReduceLoadElement(node);
EnqueueUses(node, state);
}
void MemoryOptimizer::VisitLoadField(Node* node, AllocationState const* state) {
DCHECK_EQ(IrOpcode::kLoadField, node->opcode());
memory_lowering()->ReduceLoadField(node);
EnqueueUses(node, state);
}
void MemoryOptimizer::VisitStoreElement(Node* node,
AllocationState const* state) {
DCHECK_EQ(IrOpcode::kStoreElement, node->opcode());
memory_lowering()->ReduceStoreElement(node, state);
EnqueueUses(node, state);
}
void MemoryOptimizer::VisitStoreField(Node* node,
AllocationState const* state) {
DCHECK_EQ(IrOpcode::kStoreField, node->opcode());
memory_lowering()->ReduceStoreField(node, state);
EnqueueUses(node, state);
}
void MemoryOptimizer::VisitStore(Node* node, AllocationState const* state) {
DCHECK_EQ(IrOpcode::kStore, node->opcode());
memory_lowering()->ReduceStore(node, state);
EnqueueUses(node, state);
}
void MemoryOptimizer::VisitCall(Node* node, AllocationState const* state) {
DCHECK_EQ(IrOpcode::kCall, node->opcode());
// If the call can allocate, we start with a fresh state.
if (!(CallDescriptorOf(node->op())->flags() & CallDescriptor::kNoAllocate)) {
state = empty_state();
}
EnqueueUses(node, state);
}
void MemoryOptimizer::VisitOtherEffect(Node* node,
AllocationState const* state) {
EnqueueUses(node, state);
}
MemoryOptimizer::AllocationState const* MemoryOptimizer::MergeStates(
AllocationStates const& states) {
// Check if all states are the same; or at least if all allocation
// states belong to the same allocation group.
AllocationState const* state = states.front();
MemoryLowering::AllocationGroup* group = state->group();
for (size_t i = 1; i < states.size(); ++i) {
if (states[i] != state) state = nullptr;
if (states[i]->group() != group) group = nullptr;
}
if (state == nullptr) {
if (group != nullptr) {
// We cannot fold any more allocations into this group, but we can still
// eliminate write barriers on stores to this group.
// TODO(bmeurer): We could potentially just create a Phi here to merge
// the various tops; but we need to pay special attention not to create
// an unschedulable graph.
state = AllocationState::Closed(group, nullptr, zone());
} else {
// The states are from different allocation groups.
state = empty_state();
}
}
return state;
}
void MemoryOptimizer::EnqueueMerge(Node* node, int index,
AllocationState const* state) {
DCHECK_EQ(IrOpcode::kEffectPhi, node->opcode());
int const input_count = node->InputCount() - 1;
DCHECK_LT(0, input_count);
Node* const control = node->InputAt(input_count);
if (control->opcode() == IrOpcode::kLoop) {
if (index == 0) {
if (CanLoopAllocate(node, zone())) {
// If the loop can allocate, we start with an empty state at the
// beginning.
EnqueueUses(node, empty_state());
} else {
// If the loop cannot allocate, we can just propagate the state from
// before the loop.
EnqueueUses(node, state);
}
} else {
// Do not revisit backedges.
}
} else {
DCHECK_EQ(IrOpcode::kMerge, control->opcode());
// Check if we already know about this pending merge.
NodeId const id = node->id();
auto it = pending_.find(id);
if (it == pending_.end()) {
// Insert a new pending merge.
it = pending_.insert(std::make_pair(id, AllocationStates(zone()))).first;
}
// Add the next input state.
it->second.push_back(state);
// Check if states for all inputs are available by now.
if (it->second.size() == static_cast<size_t>(input_count)) {
// All inputs to this effect merge are done, merge the states given all
// input constraints, drop the pending merge and enqueue uses of the
// EffectPhi {node}.
state = MergeStates(it->second);
EnqueueUses(node, state);
pending_.erase(it);
}
}
}
void MemoryOptimizer::EnqueueUses(Node* node, AllocationState const* state) {
for (Edge const edge : node->use_edges()) {
if (NodeProperties::IsEffectEdge(edge)) {
EnqueueUse(edge.from(), edge.index(), state);
}
}
}
void MemoryOptimizer::EnqueueUse(Node* node, int index,
AllocationState const* state) {
if (node->opcode() == IrOpcode::kEffectPhi) {
// An EffectPhi represents a merge of different effect chains, which
// needs special handling depending on whether the merge is part of a
// loop or just a normal control join.
EnqueueMerge(node, index, state);
} else {
Token token = {node, state};
tokens_.push(token);
}
}
Graph* MemoryOptimizer::graph() const { return jsgraph()->graph(); }
} // namespace compiler
} // namespace internal
} // namespace v8
| {
"pile_set_name": "Github"
} |
'use strict'
module.exports = {
up: async function (queryInterface, Sequelize) {
await queryInterface.changeColumn('Notes', 'authorship', { type: Sequelize.TEXT('long') })
await queryInterface.changeColumn('Revisions', 'authorship', { type: Sequelize.TEXT('long') })
},
down: async function (queryInterface, Sequelize) {
await queryInterface.changeColumn('Notes', 'authorship', { type: Sequelize.TEXT })
await queryInterface.changeColumn('Revisions', 'authorship', { type: Sequelize.TEXT })
}
}
| {
"pile_set_name": "Github"
} |
---
title: 实现一个简单的html ast解析算法
date: 2017-05-23
---
<!-- 为了更方便归档,请先完善以上信息,正文贴下面 -->
<!--
注意点:
0. 文章中的资源(主要是图片)引用请使用 HTTPS
1. 文章末可以加上自己的署名,如: by [Kaola](http://www.kaola.com)
2. 最好不要用 NOS 图床,感觉加防盗链是迟早的事
-->
## 概述
html ast解析算法的过程是将一段html字符串,解析成一个javascript对象。
<!-- more -->
## 示例字符串
```html
<div>
<input r-value="{name}" />
<p>
<span></span>
<span style="display:block;">
描述信息2
<span>{name}</span>
</span>
</p>
</div>
```
## 思路分析
整体实现分析:
1. 通过正则匹配出token,设置node,添加到ast对象中,同时截取掉剩余的字段
2. 主要的token: 开标签`<div>`,`<p>`等,自闭合标签`<input />`,闭合标签`</div>`, `</p>`等
3. 实现范围可以先从标签名开始,再添加属性,再解析文本(textNode)
代码思路:
1. 用一个数组储存树结构,children表示分支
2. 先匹配到一个`div`开标签,push进数组
3. 匹配到input自闭合标签放入div.children
4. 匹配到p标签放入div.children
5. 匹配到span放入p.children
6. 匹配到`</span>`结束标签,表示后面的标签需要加入到p.children而不是span.children
7. 递归到没有开标签为止
## 代码实现
分布解析的动态demo,代码每一步都有详细的解释:
[online demo](https://jerryni.github.io/algorithom/demos/html.ast.parser.html)
[github源码](https://github.com/jerryni/algorithom/blob/master/demos/html.ast.parser.html)
by [Kaola nrz](https://github.com/jerryni) | {
"pile_set_name": "Github"
} |
/*
* Based on code written by J.T. Conklin <[email protected]>.
* Public domain.
*/
#include <i387/bsd_asm.h>
//__FBSDID("$FreeBSD: src/lib/msun/i387/s_truncf.S,v 1.4 2011/01/07 16:13:12 kib Exp $")
ENTRY(truncf)
pushl %ebp
movl %esp,%ebp
subl $8,%esp
fstcw -4(%ebp) /* store fpu control word */
movw -4(%ebp),%dx
orw $0x0c00,%dx /* round towards -oo */
movw %dx,-8(%ebp)
fldcw -8(%ebp) /* load modfied control word */
flds 8(%ebp) /* round */
frndint
fldcw -4(%ebp) /* restore original control word */
leave
ret
END(truncf)
/* Enable stack protection */
#if defined(__ELF__)
.section .note.GNU-stack,"",%progbits
#endif
| {
"pile_set_name": "Github"
} |
/**
* DataCleaner (community edition)
* Copyright (C) 2014 Free Software Foundation, Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.datacleaner.customcolumn;
import java.util.List;
import org.datacleaner.api.InputRow;
import org.datacleaner.configuration.DataCleanerConfiguration;
import org.datacleaner.configuration.DataCleanerConfigurationImpl;
import org.datacleaner.connection.CsvDatastore;
import org.datacleaner.data.MutableInputColumn;
import org.datacleaner.job.AnalysisJob;
import org.datacleaner.job.builder.AnalysisJobBuilder;
import org.datacleaner.job.builder.TransformerComponentBuilder;
import org.datacleaner.job.runner.AnalysisResultFuture;
import org.datacleaner.job.runner.AnalysisRunner;
import org.datacleaner.job.runner.AnalysisRunnerImpl;
import org.datacleaner.result.ListResult;
import junit.framework.TestCase;
public class CustomColumnTypeTest extends TestCase {
@SuppressWarnings("unchecked")
public void testCustomColumnOutputInJob() throws Throwable {
final DataCleanerConfiguration configuration = new DataCleanerConfigurationImpl();
final AnalysisJob job;
final MutableInputColumn<?> monthObjectColumn;
// build example job
try (AnalysisJobBuilder builder = new AnalysisJobBuilder(configuration)) {
builder.setDatastore(new CsvDatastore("Names", "src/test/resources/month-strings.csv"));
builder.addSourceColumns("month");
final TransformerComponentBuilder<MockConvertToMonthObjectTransformer> convertTransformer =
builder.addTransformer(MockConvertToMonthObjectTransformer.class)
.addInputColumn(builder.getSourceColumnByName("month"));
monthObjectColumn = convertTransformer.getOutputColumns().get(0);
builder.addAnalyzer(MockMonthConsumingAnalyzer.class).addInputColumns(monthObjectColumn);
job = builder.toAnalysisJob();
}
final ListResult<InputRow> result;
// run job
{
final AnalysisRunner runner = new AnalysisRunnerImpl(configuration);
final AnalysisResultFuture resultFuture = runner.run(job);
if (resultFuture.isErrornous()) {
throw resultFuture.getErrors().get(0);
}
result = (ListResult<InputRow>) resultFuture.getResults().get(0);
}
final List<InputRow> list = result.getValues();
assertEquals(9, list.size());
Month value = (Month) list.get(0).getValue(monthObjectColumn);
assertEquals("Month [monthNameFull=January, monthShortCut=JAN, monthAsNumber=1]", value.toString());
assertEquals(1, value.getMonthAsNumber());
value = (Month) list.get(5).getValue(monthObjectColumn);
assertEquals("Month [monthNameFull=December, monthShortCut=DEC, monthAsNumber=12]", value.toString());
assertEquals(12, value.getMonthAsNumber());
}
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0"?>
<!--
/**
* Copyright © Magento, Inc. All rights reserved.
* See COPYING.txt for license details.
*/
-->
<config xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="urn:magento:module:Magento_Config:etc/system_file.xsd">
<system>
<section id="sales">
<group id="instant_purchase" translate="label" type="text" sortOrder="200" showInDefault="1" showInWebsite="1" showInStore="1">
<label>Instant Purchase</label>
<field id="active" translate="label comment" type="select" sortOrder="1" showInDefault="1" showInWebsite="1" showInStore="1" canRestore="1">
<label>Enabled</label>
<source_model>Magento\Config\Model\Config\Source\Yesno</source_model>
<comment>Payment method with vault and instant purchase support should be enabled.</comment>
</field>
<field id="button_text" translate="label" type="text" sortOrder="1" showInDefault="1" showInWebsite="1" showInStore="1" canRestore="1">
<label>Button Text</label>
</field>
</group>
</section>
</system>
</config>
| {
"pile_set_name": "Github"
} |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "sync/internal_api/public/test/test_internal_components_factory.h"
#include "sync/sessions/sync_session_context.h"
#include "sync/syncable/in_memory_directory_backing_store.h"
#include "sync/syncable/on_disk_directory_backing_store.h"
#include "sync/syncable/invalid_directory_backing_store.h"
#include "sync/test/engine/fake_sync_scheduler.h"
namespace syncer {
TestInternalComponentsFactory::TestInternalComponentsFactory(
const Switches& switches,
StorageOption option,
StorageOption* storage_used)
: switches_(switches),
storage_override_(option),
storage_used_(storage_used) {
}
TestInternalComponentsFactory::~TestInternalComponentsFactory() { }
std::unique_ptr<SyncScheduler> TestInternalComponentsFactory::BuildScheduler(
const std::string& name,
sessions::SyncSessionContext* context,
syncer::CancelationSignal* cancelation_signal) {
return std::unique_ptr<SyncScheduler>(new FakeSyncScheduler());
}
std::unique_ptr<sessions::SyncSessionContext>
TestInternalComponentsFactory::BuildContext(
ServerConnectionManager* connection_manager,
syncable::Directory* directory,
ExtensionsActivity* monitor,
const std::vector<SyncEngineEventListener*>& listeners,
sessions::DebugInfoGetter* debug_info_getter,
ModelTypeRegistry* model_type_registry,
const std::string& invalidator_client_id) {
// Tests don't wire up listeners.
std::vector<SyncEngineEventListener*> empty_listeners;
return std::unique_ptr<sessions::SyncSessionContext>(
new sessions::SyncSessionContext(
connection_manager, directory, monitor, empty_listeners,
debug_info_getter, model_type_registry,
switches_.encryption_method == ENCRYPTION_KEYSTORE,
switches_.pre_commit_updates_policy ==
FORCE_ENABLE_PRE_COMMIT_UPDATE_AVOIDANCE,
invalidator_client_id));
}
std::unique_ptr<syncable::DirectoryBackingStore>
TestInternalComponentsFactory::BuildDirectoryBackingStore(
StorageOption storage,
const std::string& dir_name,
const base::FilePath& backing_filepath) {
if (storage_used_)
*storage_used_ = storage;
switch (storage_override_) {
case STORAGE_IN_MEMORY:
return std::unique_ptr<syncable::DirectoryBackingStore>(
new syncable::InMemoryDirectoryBackingStore(dir_name));
case STORAGE_ON_DISK:
return std::unique_ptr<syncable::DirectoryBackingStore>(
new syncable::OnDiskDirectoryBackingStore(dir_name,
backing_filepath));
case STORAGE_INVALID:
return std::unique_ptr<syncable::DirectoryBackingStore>(
new syncable::InvalidDirectoryBackingStore());
}
NOTREACHED();
return std::unique_ptr<syncable::DirectoryBackingStore>();
}
InternalComponentsFactory::Switches
TestInternalComponentsFactory::GetSwitches() const {
return switches_;
}
} // namespace syncer
| {
"pile_set_name": "Github"
} |
exports.valid = {
fullName : "John Doe",
age : 47,
state : "Massachusetts",
city : "Boston",
zip : 16417,
married : false,
dozen : 12,
dozenOrBakersDozen : 13,
favoriteEvenNumber : 14,
topThreeFavoriteColors : [ "red", "blue", "green" ],
favoriteSingleDigitWholeNumbers : [ 7 ],
favoriteFiveLetterWord : "coder",
emailAddresses :
[
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ@letters-in-local.org",
"[email protected]",
"&'*+-./=?^_{}[email protected]",
"mixed-1234-in-{+^}[email protected]",
"[email protected]",
"\"quoted\"@sld.com",
"\"\\e\\s\\c\\a\\p\\e\\d\"@sld.com",
"\"[email protected]\"@sld.com",
"\"escaped\\\"quote\"@sld.com",
"\"back\\slash\"@sld.com",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"the-total-length@of-an-entire-address.cannot-be-longer-than-two-hundred-and-fifty-four-characters.and-this-address-is-254-characters-exactly.so-it-should-be-valid.and-im-going-to-add-some-more-words-here.to-increase-the-lenght-blah-blah-blah-blah-bla.org",
"the-character-limit@for-each-part.of-the-domain.is-sixty-three-characters.this-is-exactly-sixty-three-characters-so-it-is-valid-blah-blah.com",
"[email protected]"
],
ipAddresses : [ "127.0.0.1", "24.48.64.2", "192.168.1.1", "209.68.44.3", "2.2.2.2" ]
}
exports.invalid = {
fullName : null,
age : -1,
state : 47,
city : false,
zip : [null],
married : "yes",
dozen : 50,
dozenOrBakersDozen : "over 9000",
favoriteEvenNumber : 15,
topThreeFavoriteColors : [ "red", 5 ],
favoriteSingleDigitWholeNumbers : [ 78, 2, 999 ],
favoriteFiveLetterWord : "codernaut",
emailAddresses : [],
ipAddresses : [ "999.0.099.1", "294.48.64.2346", false, "2221409.64214128.42414.235233", "124124.12412412" ]
}
exports.schema = { // from cosmic thingy
name : "test",
type : "object",
additionalProperties : false,
required : ["fullName", "age", "zip", "married", "dozen", "dozenOrBakersDozen", "favoriteEvenNumber", "topThreeFavoriteColors", "favoriteSingleDigitWholeNumbers", "favoriteFiveLetterWord", "emailAddresses", "ipAddresses"],
properties :
{
fullName : { type : "string" },
age : { type : "integer", minimum : 0 },
optionalItem : { type : "string" },
state : { type : "string" },
city : { type : "string" },
zip : { type : "integer", minimum : 0, maximum : 99999 },
married : { type : "boolean" },
dozen : { type : "integer", minimum : 12, maximum : 12 },
dozenOrBakersDozen : { type : "integer", minimum : 12, maximum : 13 },
favoriteEvenNumber : { type : "integer", multipleOf : 2 },
topThreeFavoriteColors : { type : "array", minItems : 3, maxItems : 3, uniqueItems : true, items : { type : "string" }},
favoriteSingleDigitWholeNumbers : { type : "array", minItems : 1, maxItems : 10, uniqueItems : true, items : { type : "integer", minimum : 0, maximum : 9 }},
favoriteFiveLetterWord : { type : "string", minLength : 5, maxLength : 5 },
emailAddresses : { type : "array", minItems : 1, uniqueItems : true, items : { type : "string", format : "email" }},
ipAddresses : { type : "array", uniqueItems : true, items : { type : "string", format : "ipv4" }},
}
} | {
"pile_set_name": "Github"
} |
// Copyright Louis Dionne 2013-2017
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE.md or copy at http://boost.org/LICENSE_1_0.txt)
#include "_specs.hpp"
#include <auto/cartesian_product.hpp>
int main() { }
| {
"pile_set_name": "Github"
} |
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Author: [email protected] (Kenton Varda)
// Based on original Protocol Buffers design by
// Sanjay Ghemawat, Jeff Dean, and others.
//
// The messages in this file describe the definitions found in .proto files.
// A valid .proto file can be translated directly to a FileDescriptorProto
// without any other information (e.g. without reading its imports).
syntax = "proto2";
package google.protobuf;
option java_package = "com.google.protobuf";
option java_outer_classname = "DescriptorProtos";
// descriptor.proto must be optimized for speed because reflection-based
// algorithms don't work during bootstrapping.
option optimize_for = SPEED;
// The protocol compiler can output a FileDescriptorSet containing the .proto
// files it parses.
message FileDescriptorSet {
repeated FileDescriptorProto file = 1;
}
// Describes a complete .proto file.
message FileDescriptorProto {
optional string name = 1; // file name, relative to root of source tree
optional string package = 2; // e.g. "foo", "foo.bar", etc.
// Names of files imported by this file.
repeated string dependency = 3;
// Indexes of the public imported files in the dependency list above.
repeated int32 public_dependency = 10;
// Indexes of the weak imported files in the dependency list.
// For Google-internal migration only. Do not use.
repeated int32 weak_dependency = 11;
// All top-level definitions in this file.
repeated DescriptorProto message_type = 4;
repeated EnumDescriptorProto enum_type = 5;
repeated ServiceDescriptorProto service = 6;
repeated FieldDescriptorProto extension = 7;
optional FileOptions options = 8;
// This field contains optional information about the original source code.
// You may safely remove this entire field without harming runtime
// functionality of the descriptors -- the information is needed only by
// development tools.
optional SourceCodeInfo source_code_info = 9;
// The syntax of the proto file.
// The supported values are "proto2" and "proto3".
optional string syntax = 12;
}
// Describes a message type.
message DescriptorProto {
optional string name = 1;
repeated FieldDescriptorProto field = 2;
repeated FieldDescriptorProto extension = 6;
repeated DescriptorProto nested_type = 3;
repeated EnumDescriptorProto enum_type = 4;
message ExtensionRange {
optional int32 start = 1;
optional int32 end = 2;
}
repeated ExtensionRange extension_range = 5;
repeated OneofDescriptorProto oneof_decl = 8;
optional MessageOptions options = 7;
}
// Describes a field within a message.
message FieldDescriptorProto {
enum Type {
// 0 is reserved for errors.
// Order is weird for historical reasons.
TYPE_DOUBLE = 1;
TYPE_FLOAT = 2;
// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if
// negative values are likely.
TYPE_INT64 = 3;
TYPE_UINT64 = 4;
// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if
// negative values are likely.
TYPE_INT32 = 5;
TYPE_FIXED64 = 6;
TYPE_FIXED32 = 7;
TYPE_BOOL = 8;
TYPE_STRING = 9;
TYPE_GROUP = 10; // Tag-delimited aggregate.
TYPE_MESSAGE = 11; // Length-delimited aggregate.
// New in version 2.
TYPE_BYTES = 12;
TYPE_UINT32 = 13;
TYPE_ENUM = 14;
TYPE_SFIXED32 = 15;
TYPE_SFIXED64 = 16;
TYPE_SINT32 = 17; // Uses ZigZag encoding.
TYPE_SINT64 = 18; // Uses ZigZag encoding.
};
enum Label {
// 0 is reserved for errors
LABEL_OPTIONAL = 1;
LABEL_REQUIRED = 2;
LABEL_REPEATED = 3;
// TODO(sanjay): Should we add LABEL_MAP?
};
optional string name = 1;
optional int32 number = 3;
optional Label label = 4;
// If type_name is set, this need not be set. If both this and type_name
// are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP.
optional Type type = 5;
// For message and enum types, this is the name of the type. If the name
// starts with a '.', it is fully-qualified. Otherwise, C++-like scoping
// rules are used to find the type (i.e. first the nested types within this
// message are searched, then within the parent, on up to the root
// namespace).
optional string type_name = 6;
// For extensions, this is the name of the type being extended. It is
// resolved in the same manner as type_name.
optional string extendee = 2;
// For numeric types, contains the original text representation of the value.
// For booleans, "true" or "false".
// For strings, contains the default text contents (not escaped in any way).
// For bytes, contains the C escaped value. All bytes >= 128 are escaped.
// TODO(kenton): Base-64 encode?
optional string default_value = 7;
// If set, gives the index of a oneof in the containing type's oneof_decl
// list. This field is a member of that oneof. Extensions of a oneof should
// not set this since the oneof to which they belong will be inferred based
// on the extension range containing the extension's field number.
optional int32 oneof_index = 9;
optional FieldOptions options = 8;
}
// Describes a oneof.
message OneofDescriptorProto {
optional string name = 1;
}
// Describes an enum type.
message EnumDescriptorProto {
optional string name = 1;
repeated EnumValueDescriptorProto value = 2;
optional EnumOptions options = 3;
}
// Describes a value within an enum.
message EnumValueDescriptorProto {
optional string name = 1;
optional int32 number = 2;
optional EnumValueOptions options = 3;
}
// Describes a service.
message ServiceDescriptorProto {
optional string name = 1;
repeated MethodDescriptorProto method = 2;
optional ServiceOptions options = 3;
}
// Describes a method of a service.
message MethodDescriptorProto {
optional string name = 1;
// Input and output type names. These are resolved in the same way as
// FieldDescriptorProto.type_name, but must refer to a message type.
optional string input_type = 2;
optional string output_type = 3;
optional MethodOptions options = 4;
// Identifies if client streams multiple client messages
optional bool client_streaming = 5 [default=false];
// Identifies if server streams multiple server messages
optional bool server_streaming = 6 [default=false];
}
// ===================================================================
// Options
// Each of the definitions above may have "options" attached. These are
// just annotations which may cause code to be generated slightly differently
// or may contain hints for code that manipulates protocol messages.
//
// Clients may define custom options as extensions of the *Options messages.
// These extensions may not yet be known at parsing time, so the parser cannot
// store the values in them. Instead it stores them in a field in the *Options
// message called uninterpreted_option. This field must have the same name
// across all *Options messages. We then use this field to populate the
// extensions when we build a descriptor, at which point all protos have been
// parsed and so all extensions are known.
//
// Extension numbers for custom options may be chosen as follows:
// * For options which will only be used within a single application or
// organization, or for experimental options, use field numbers 50000
// through 99999. It is up to you to ensure that you do not use the
// same number for multiple options.
// * For options which will be published and used publicly by multiple
// independent entities, e-mail [email protected]
// to reserve extension numbers. Simply provide your project name (e.g.
// Object-C plugin) and your porject website (if available) -- there's no need
// to explain how you intend to use them. Usually you only need one extension
// number. You can declare multiple options with only one extension number by
// putting them in a sub-message. See the Custom Options section of the docs
// for examples:
// https://developers.google.com/protocol-buffers/docs/proto#options
// If this turns out to be popular, a web service will be set up
// to automatically assign option numbers.
message FileOptions {
// Sets the Java package where classes generated from this .proto will be
// placed. By default, the proto package is used, but this is often
// inappropriate because proto packages do not normally start with backwards
// domain names.
optional string java_package = 1;
// If set, all the classes from the .proto file are wrapped in a single
// outer class with the given name. This applies to both Proto1
// (equivalent to the old "--one_java_file" option) and Proto2 (where
// a .proto always translates to a single class, but you may want to
// explicitly choose the class name).
optional string java_outer_classname = 8;
// If set true, then the Java code generator will generate a separate .java
// file for each top-level message, enum, and service defined in the .proto
// file. Thus, these types will *not* be nested inside the outer class
// named by java_outer_classname. However, the outer class will still be
// generated to contain the file's getDescriptor() method as well as any
// top-level extensions defined in the file.
optional bool java_multiple_files = 10 [default=false];
// If set true, then the Java code generator will generate equals() and
// hashCode() methods for all messages defined in the .proto file.
// - In the full runtime, this is purely a speed optimization, as the
// AbstractMessage base class includes reflection-based implementations of
// these methods.
//- In the lite runtime, setting this option changes the semantics of
// equals() and hashCode() to more closely match those of the full runtime;
// the generated methods compute their results based on field values rather
// than object identity. (Implementations should not assume that hashcodes
// will be consistent across runtimes or versions of the protocol compiler.)
optional bool java_generate_equals_and_hash = 20 [default=false];
// If set true, then the Java2 code generator will generate code that
// throws an exception whenever an attempt is made to assign a non-UTF-8
// byte sequence to a string field.
// Message reflection will do the same.
// However, an extension field still accepts non-UTF-8 byte sequences.
// This option has no effect on when used with the lite runtime.
optional bool java_string_check_utf8 = 27 [default=false];
// Generated classes can be optimized for speed or code size.
enum OptimizeMode {
SPEED = 1; // Generate complete code for parsing, serialization,
// etc.
CODE_SIZE = 2; // Use ReflectionOps to implement these methods.
LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime.
}
optional OptimizeMode optimize_for = 9 [default=SPEED];
// Sets the Go package where structs generated from this .proto will be
// placed. If omitted, the Go package will be derived from the following:
// - The basename of the package import path, if provided.
// - Otherwise, the package statement in the .proto file, if present.
// - Otherwise, the basename of the .proto file, without extension.
optional string go_package = 11;
// Should generic services be generated in each language? "Generic" services
// are not specific to any particular RPC system. They are generated by the
// main code generators in each language (without additional plugins).
// Generic services were the only kind of service generation supported by
// early versions of google.protobuf.
//
// Generic services are now considered deprecated in favor of using plugins
// that generate code specific to your particular RPC system. Therefore,
// these default to false. Old code which depends on generic services should
// explicitly set them to true.
optional bool cc_generic_services = 16 [default=false];
optional bool java_generic_services = 17 [default=false];
optional bool py_generic_services = 18 [default=false];
// Is this file deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for everything in the file, or it will be completely ignored; in the very
// least, this is a formalization for deprecating files.
optional bool deprecated = 23 [default=false];
// Enables the use of arenas for the proto messages in this file. This applies
// only to generated classes for C++.
optional bool cc_enable_arenas = 31 [default=false];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message MessageOptions {
// Set true to use the old proto1 MessageSet wire format for extensions.
// This is provided for backwards-compatibility with the MessageSet wire
// format. You should not use this for any other reason: It's less
// efficient, has fewer features, and is more complicated.
//
// The message must be defined exactly as follows:
// message Foo {
// option message_set_wire_format = true;
// extensions 4 to max;
// }
// Note that the message cannot have any defined fields; MessageSets only
// have extensions.
//
// All extensions of your type must be singular messages; e.g. they cannot
// be int32s, enums, or repeated messages.
//
// Because this is an option, the above two restrictions are not enforced by
// the protocol compiler.
optional bool message_set_wire_format = 1 [default=false];
// Disables the generation of the standard "descriptor()" accessor, which can
// conflict with a field of the same name. This is meant to make migration
// from proto1 easier; new code should avoid fields named "descriptor".
optional bool no_standard_descriptor_accessor = 2 [default=false];
// Is this message deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the message, or it will be completely ignored; in the very least,
// this is a formalization for deprecating messages.
optional bool deprecated = 3 [default=false];
// Whether the message is an automatically generated map entry type for the
// maps field.
//
// For maps fields:
// map<KeyType, ValueType> map_field = 1;
// The parsed descriptor looks like:
// message MapFieldEntry {
// option map_entry = true;
// optional KeyType key = 1;
// optional ValueType value = 2;
// }
// repeated MapFieldEntry map_field = 1;
//
// Implementations may choose not to generate the map_entry=true message, but
// use a native map in the target language to hold the keys and values.
// The reflection APIs in such implementions still need to work as
// if the field is a repeated message field.
//
// NOTE: Do not set the option in .proto files. Always use the maps syntax
// instead. The option should only be implicitly set by the proto compiler
// parser.
optional bool map_entry = 7;
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message FieldOptions {
// The ctype option instructs the C++ code generator to use a different
// representation of the field than it normally would. See the specific
// options below. This option is not yet implemented in the open source
// release -- sorry, we'll try to include it in a future version!
optional CType ctype = 1 [default = STRING];
enum CType {
// Default mode.
STRING = 0;
CORD = 1;
STRING_PIECE = 2;
}
// The packed option can be enabled for repeated primitive fields to enable
// a more efficient representation on the wire. Rather than repeatedly
// writing the tag and type for each element, the entire array is encoded as
// a single length-delimited blob.
optional bool packed = 2;
// Should this field be parsed lazily? Lazy applies only to message-type
// fields. It means that when the outer message is initially parsed, the
// inner message's contents will not be parsed but instead stored in encoded
// form. The inner message will actually be parsed when it is first accessed.
//
// This is only a hint. Implementations are free to choose whether to use
// eager or lazy parsing regardless of the value of this option. However,
// setting this option true suggests that the protocol author believes that
// using lazy parsing on this field is worth the additional bookkeeping
// overhead typically needed to implement it.
//
// This option does not affect the public interface of any generated code;
// all method signatures remain the same. Furthermore, thread-safety of the
// interface is not affected by this option; const methods remain safe to
// call from multiple threads concurrently, while non-const methods continue
// to require exclusive access.
//
//
// Note that implementations may choose not to check required fields within
// a lazy sub-message. That is, calling IsInitialized() on the outher message
// may return true even if the inner message has missing required fields.
// This is necessary because otherwise the inner message would have to be
// parsed in order to perform the check, defeating the purpose of lazy
// parsing. An implementation which chooses not to check required fields
// must be consistent about it. That is, for any particular sub-message, the
// implementation must either *always* check its required fields, or *never*
// check its required fields, regardless of whether or not the message has
// been parsed.
optional bool lazy = 5 [default=false];
// Is this field deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for accessors, or it will be completely ignored; in the very least, this
// is a formalization for deprecating fields.
optional bool deprecated = 3 [default=false];
// For Google-internal migration only. Do not use.
optional bool weak = 10 [default=false];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message EnumOptions {
// Set this option to true to allow mapping different tag names to the same
// value.
optional bool allow_alias = 2;
// Is this enum deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the enum, or it will be completely ignored; in the very least, this
// is a formalization for deprecating enums.
optional bool deprecated = 3 [default=false];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message EnumValueOptions {
// Is this enum value deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the enum value, or it will be completely ignored; in the very least,
// this is a formalization for deprecating enum values.
optional bool deprecated = 1 [default=false];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message ServiceOptions {
// Note: Field numbers 1 through 32 are reserved for Google's internal RPC
// framework. We apologize for hoarding these numbers to ourselves, but
// we were already using them long before we decided to release Protocol
// Buffers.
// Is this service deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the service, or it will be completely ignored; in the very least,
// this is a formalization for deprecating services.
optional bool deprecated = 33 [default=false];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message MethodOptions {
// Note: Field numbers 1 through 32 are reserved for Google's internal RPC
// framework. We apologize for hoarding these numbers to ourselves, but
// we were already using them long before we decided to release Protocol
// Buffers.
// Is this method deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the method, or it will be completely ignored; in the very least,
// this is a formalization for deprecating methods.
optional bool deprecated = 33 [default=false];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
// A message representing a option the parser does not recognize. This only
// appears in options protos created by the compiler::Parser class.
// DescriptorPool resolves these when building Descriptor objects. Therefore,
// options protos in descriptor objects (e.g. returned by Descriptor::options(),
// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions
// in them.
message UninterpretedOption {
// The name of the uninterpreted option. Each string represents a segment in
// a dot-separated name. is_extension is true iff a segment represents an
// extension (denoted with parentheses in options specs in .proto files).
// E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents
// "foo.(bar.baz).qux".
message NamePart {
required string name_part = 1;
required bool is_extension = 2;
}
repeated NamePart name = 2;
// The value of the uninterpreted option, in whatever type the tokenizer
// identified it as during parsing. Exactly one of these should be set.
optional string identifier_value = 3;
optional uint64 positive_int_value = 4;
optional int64 negative_int_value = 5;
optional double double_value = 6;
optional bytes string_value = 7;
optional string aggregate_value = 8;
}
// ===================================================================
// Optional source code info
// Encapsulates information about the original source file from which a
// FileDescriptorProto was generated.
message SourceCodeInfo {
// A Location identifies a piece of source code in a .proto file which
// corresponds to a particular definition. This information is intended
// to be useful to IDEs, code indexers, documentation generators, and similar
// tools.
//
// For example, say we have a file like:
// message Foo {
// optional string foo = 1;
// }
// Let's look at just the field definition:
// optional string foo = 1;
// ^ ^^ ^^ ^ ^^^
// a bc de f ghi
// We have the following locations:
// span path represents
// [a,i) [ 4, 0, 2, 0 ] The whole field definition.
// [a,b) [ 4, 0, 2, 0, 4 ] The label (optional).
// [c,d) [ 4, 0, 2, 0, 5 ] The type (string).
// [e,f) [ 4, 0, 2, 0, 1 ] The name (foo).
// [g,h) [ 4, 0, 2, 0, 3 ] The number (1).
//
// Notes:
// - A location may refer to a repeated field itself (i.e. not to any
// particular index within it). This is used whenever a set of elements are
// logically enclosed in a single code segment. For example, an entire
// extend block (possibly containing multiple extension definitions) will
// have an outer location whose path refers to the "extensions" repeated
// field without an index.
// - Multiple locations may have the same path. This happens when a single
// logical declaration is spread out across multiple places. The most
// obvious example is the "extend" block again -- there may be multiple
// extend blocks in the same scope, each of which will have the same path.
// - A location's span is not always a subset of its parent's span. For
// example, the "extendee" of an extension declaration appears at the
// beginning of the "extend" block and is shared by all extensions within
// the block.
// - Just because a location's span is a subset of some other location's span
// does not mean that it is a descendent. For example, a "group" defines
// both a type and a field in a single declaration. Thus, the locations
// corresponding to the type and field and their components will overlap.
// - Code which tries to interpret locations should probably be designed to
// ignore those that it doesn't understand, as more types of locations could
// be recorded in the future.
repeated Location location = 1;
message Location {
// Identifies which part of the FileDescriptorProto was defined at this
// location.
//
// Each element is a field number or an index. They form a path from
// the root FileDescriptorProto to the place where the definition. For
// example, this path:
// [ 4, 3, 2, 7, 1 ]
// refers to:
// file.message_type(3) // 4, 3
// .field(7) // 2, 7
// .name() // 1
// This is because FileDescriptorProto.message_type has field number 4:
// repeated DescriptorProto message_type = 4;
// and DescriptorProto.field has field number 2:
// repeated FieldDescriptorProto field = 2;
// and FieldDescriptorProto.name has field number 1:
// optional string name = 1;
//
// Thus, the above path gives the location of a field name. If we removed
// the last element:
// [ 4, 3, 2, 7 ]
// this path refers to the whole field declaration (from the beginning
// of the label to the terminating semicolon).
repeated int32 path = 1 [packed=true];
// Always has exactly three or four elements: start line, start column,
// end line (optional, otherwise assumed same as start line), end column.
// These are packed into a single field for efficiency. Note that line
// and column numbers are zero-based -- typically you will want to add
// 1 to each before displaying to a user.
repeated int32 span = 2 [packed=true];
// If this SourceCodeInfo represents a complete declaration, these are any
// comments appearing before and after the declaration which appear to be
// attached to the declaration.
//
// A series of line comments appearing on consecutive lines, with no other
// tokens appearing on those lines, will be treated as a single comment.
//
// Only the comment content is provided; comment markers (e.g. //) are
// stripped out. For block comments, leading whitespace and an asterisk
// will be stripped from the beginning of each line other than the first.
// Newlines are included in the output.
//
// Examples:
//
// optional int32 foo = 1; // Comment attached to foo.
// // Comment attached to bar.
// optional int32 bar = 2;
//
// optional string baz = 3;
// // Comment attached to baz.
// // Another line attached to baz.
//
// // Comment attached to qux.
// //
// // Another line attached to qux.
// optional double qux = 4;
//
// optional string corge = 5;
// /* Block comment attached
// * to corge. Leading asterisks
// * will be removed. */
// /* Block comment attached to
// * grault. */
// optional int32 grault = 6;
optional string leading_comments = 3;
optional string trailing_comments = 4;
}
}
| {
"pile_set_name": "Github"
} |
#!/bin/sh
LC_ALL=C
export LC_ALL
stat=0
so="$1"
def="$2"
PATTERN="_ANCHOR_"
TMPFILE="$(mktemp .nm-check-exports.XXXXXX)"
get_syms() {
${NM:-nm} "$1" |
sed -n 's/^[[:xdigit:]]\+ [DT] //p' |
sort
}
get_syms_from_def() {
sed -n 's/^\t\(\([_a-zA-Z0-9]\+\)\|#\s*\([_a-zA-Z0-9]\+@@\?[_a-zA-Z0-9]\+\)\);$/\2\3/p' "$1" |
sort
}
anchor() {
sed "s/.*/$PATTERN\0$PATTERN/"
}
unanchor() {
sed "s/^$PATTERN\(.*\)$PATTERN\$/\1/"
}
get_syms "$so" | anchor > "$TMPFILE"
WRONG="$(get_syms_from_def "$def" | anchor | grep -F -f - "$TMPFILE" -v)"
RESULT=$?
if [ $RESULT -eq 0 ]; then
stat=1
echo ">>library \"$so\" exports symbols that are not in linker script \"$def\":"
echo "$WRONG" | unanchor | nl
fi
get_syms_from_def "$def" | anchor > "$TMPFILE"
WRONG="$(get_syms "$so" | anchor | grep -F -f - "$TMPFILE" -v)"
RESULT=$?
if [ $RESULT -eq 0 ]; then
stat=1
echo ">>linker script \"$def\" contains symbols that are not exported by library \"$so\":"
echo "$WRONG" | unanchor | nl
fi
rm -rf "$TMPFILE"
exit $stat
| {
"pile_set_name": "Github"
} |
/*****************************************************************************
*
* ReoGrid - .NET Spreadsheet Control
*
* http://reogrid.net/
*
* THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY
* KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR
* PURPOSE.
*
* Author: Jing <lujing at unvell.com>
*
* Copyright (c) 2012-2016 Jing <lujing at unvell.com>
* Copyright (c) 2012-2016 unvell.com, all rights reserved.
*
****************************************************************************/
using System.Text;
using System.IO;
using unvell.Common;
using unvell.ReoGrid.XML;
using unvell.ReoGrid.Core;
using unvell.ReoGrid.Graphics;
namespace unvell.ReoGrid.IO
{
internal class RGHTMLExporter
{
/// <summary>
/// Export grid as html5 into specified stream
/// </summary>
/// <param name="s">Stream contains the exported HTML5 content</param>
/// <param name="sheet">Instance of worksheet</param>
/// <param name="pageTitle">Custom page title of HTML page</param>
/// <param name="htmlHeader">True to export default HTML header tag; false to export table content only</param>
public static void Export(Stream s, Worksheet sheet, string pageTitle, bool htmlHeader = true)
{
using (StreamWriter sw = new StreamWriter(s))
{
StringBuilder sb = new StringBuilder();
Cell cell;
if (htmlHeader)
{
sw.WriteLine("<!DOCTYPE html>");
sw.WriteLine("<html>");
sw.WriteLine("<head>");
sw.WriteLine(" <title>{0}</title>", pageTitle);
sw.WriteLine(" <meta content=\"text/html; charset=UTF-8\">");
sw.WriteLine("</head>");
sw.WriteLine("<body>");
}
sw.WriteLine(" <table style='border-collapse:collapse;border:none;'>");
int maxRow = sheet.MaxContentRow;
int maxCol = sheet.MaxContentCol;
for (int r = 0; r <= maxRow; r++)
{
var row = sheet.RetrieveRowHeader(r);
sw.WriteLine(string.Format(" <tr style='height:{0}px;'>", row.InnerHeight));
for (int c = 0; c <= maxCol; )
{
var col = sheet.RetrieveColumnHeader(c);
cell = sheet.GetCell(r, c);
if(cell != null && ( cell.Colspan <= 0 || cell.Rowspan <= 0))
{
c++;
continue;
}
sb.Length = 0;
sb.Append(" <td");
if (cell != null && cell.Rowspan > 1)
{
sb.Append(" rowspan='" + cell.Rowspan + "'");
}
if (cell != null && cell.Colspan > 1)
{
sb.Append(" colspan='" + cell.Colspan + "'");
}
sb.AppendFormat(" style='width:{0}px;", cell == null ? col.Width : cell.Width);
bool halignOutputted = false;
if (cell != null)
{
// render horizontal align
if (cell.RenderHorAlign == ReoGridRenderHorAlign.Right)
{
WriteHtmlStyle(sb, "text-align", "right");
halignOutputted = true;
}
else if (cell.RenderHorAlign == ReoGridRenderHorAlign.Center)
{
WriteHtmlStyle(sb, "text-align", "center");
halignOutputted = true;
}
}
WorksheetRangeStyle style = sheet.GetCellStyles(r, c);
if (style != null)
{
// back color
if (style.HasStyle(PlainStyleFlag.BackColor) && style.BackColor != SolidColor.White)
{
WriteHtmlStyle(sb, "background-color", TextFormatHelper.EncodeColor(style.BackColor));
}
// text color
if (style.HasStyle(PlainStyleFlag.TextColor) && style.TextColor != SolidColor.Black)
{
WriteHtmlStyle(sb, "color", TextFormatHelper.EncodeColor(style.TextColor));
}
// font size
if (style.HasStyle(PlainStyleFlag.FontSize))
{
WriteHtmlStyle(sb, "font-size", style.FontSize.ToString() +"pt");
}
// horizontal align
if (!halignOutputted && style.HasStyle(PlainStyleFlag.HorizontalAlign))
{
WriteHtmlStyle(sb, "text-align", XmlFileFormatHelper.EncodeHorizontalAlign(style.HAlign));
}
// vertical align
if (style.HasStyle(PlainStyleFlag.VerticalAlign))
{
WriteHtmlStyle(sb, "vertical-align", XmlFileFormatHelper.EncodeVerticalAlign(style.VAlign));
}
}
RangeBorderInfoSet rbi = sheet.GetRangeBorders(cell == null ? new RangePosition(r, c, 1, 1)
: new RangePosition(cell.InternalRow, cell.InternalCol, cell.Rowspan, cell.Colspan));
if (!rbi.Top.IsEmpty) WriteCellBorder(sb, "border-top", rbi.Top);
if (!rbi.Left.IsEmpty) WriteCellBorder(sb, "border-left", rbi.Left);
if (!rbi.Right.IsEmpty) WriteCellBorder(sb, "border-right", rbi.Right);
if (!rbi.Bottom.IsEmpty) WriteCellBorder(sb, "border-bottom", rbi.Bottom);
sb.Append("'>");
sw.WriteLine(sb.ToString());
//cell = Grid.GetCell(r, c);
string text = null;
if (cell != null)
{
text = string.IsNullOrEmpty(cell.DisplayText) ? " " :
#if !CLIENT_PROFILE
HtmlEncode(cell.DisplayText)
#else
cell.DisplayText
#endif // CLIENT_PROFILE
;
}
else
text = " ";
sw.WriteLine(text);
sw.WriteLine(" </td>");
c += cell == null ? 1 : cell.Colspan;
}
sw.WriteLine(" </tr>");
}
sw.WriteLine(" </table>");
if (htmlHeader)
{
sw.WriteLine("</body>");
sw.WriteLine("</html>");
}
}
}
/// <summary>
/// HTML-encodes a string and returns the encoded string.
/// </summary>
/// <remarks>
/// http://weblog.west-wind.com/posts/2009/Feb/05/Html-and-Uri-String-Encoding-without-SystemWeb
/// </remarks>
/// <param name="text">The text string to encode.</param>
/// <returns>The HTML-encoded text.</returns>
public static string HtmlEncode(string text)
{
if (text == null)
return null;
StringBuilder sb = new StringBuilder(text.Length + (int)System.Math.Ceiling(text.Length * 0.3f));
int len = text.Length;
for (int i = 0; i < len; i++)
{
switch (text[i])
{
case '<':
sb.Append("<");
break;
case '>':
sb.Append(">");
break;
case '"':
sb.Append(""");
break;
case '\'':
sb.Append("'");
break;
case '&':
sb.Append("&");
break;
default:
//if (text[i] > 159)
//{
// // decimal numeric entity
// sb.Append("&#");
// sb.Append(((int)text[i]).ToString(System.Globalization.CultureInfo.InvariantCulture));
// sb.Append(";");
//}
//else
sb.Append(text[i]);
break;
}
}
return sb.ToString();
}
private static void WriteHtmlStyle(StringBuilder sb, string name, string value)
{
sb.AppendFormat("{0}:{1};", name, value);
}
private static void WriteCellBorder(StringBuilder sb, string name, RangeBorderStyle borderStyle)
{
WriteHtmlStyle(sb, name, string.Format("{0} {1}",
ToHTMLBorderLineStyle(borderStyle.Style), TextFormatHelper.EncodeColor(borderStyle.Color)));
}
private static string ToHTMLBorderLineStyle(BorderLineStyle borderLineStyle)
{
switch (borderLineStyle)
{
default:
case BorderLineStyle.Solid:
return "solid 1px";
case BorderLineStyle.Dashed:
case BorderLineStyle.Dashed2:
case BorderLineStyle.DashDotDot:
case BorderLineStyle.DashDot:
return "dashed 1px";
case BorderLineStyle.Dotted:
return "dotted 1px";
case BorderLineStyle.BoldSolid:
return "solid 2px";
case BorderLineStyle.BoldDashed:
case BorderLineStyle.BoldDashDot:
case BorderLineStyle.BoldDashDotDot:
return "dashed 2px";
case BorderLineStyle.BoldDotted:
return "dotted 2px";
case BorderLineStyle.BoldSolidStrong:
return "solid 3px";
}
}
}
}
| {
"pile_set_name": "Github"
} |
//! An application to run property tests for `bindgen` with _fuzzed_ C headers
//! using `quickcheck`
//!
//! ## Usage
//!
//! Print help
//! ```bash
//! $ cargo run --bin=quickchecking -- -h
//! ```
//!
//! Run with default values
//! ```bash
//! $ cargo run --bin=quickchecking
//! ```
//!
#![deny(missing_docs)]
extern crate clap;
extern crate quickchecking;
use clap::{App, Arg};
use std::path::Path;
// Validate CLI argument input for generation range.
fn validate_generate_range(v: String) -> Result<(), String> {
match v.parse::<usize>() {
Ok(_) => Ok(()),
Err(_) => Err(String::from(
"Generate range could not be converted to a usize.",
)),
}
}
// Validate CLI argument input for tests count.
fn validate_tests_count(v: String) -> Result<(), String> {
match v.parse::<usize>() {
Ok(_) => Ok(()),
Err(_) => Err(String::from(
"Tests count could not be converted to a usize.",
)),
}
}
// Validate CLI argument input for fuzzed headers output path.
fn validate_path(v: String) -> Result<(), String> {
match Path::new(&v).is_dir() {
true => Ok(()),
false => Err(String::from("Provided directory path does not exist.")),
}
}
fn main() {
let matches = App::new("quickchecking")
.version("0.2.0")
.about(
"Bindgen property tests with quickcheck. \
Generate random valid C code and pass it to the \
csmith/predicate.py script",
)
.arg(
Arg::with_name("path")
.short("p")
.long("path")
.value_name("PATH")
.help(
"Optional. Preserve generated headers for inspection, \
provide directory path for header output. [default: None] ",
)
.takes_value(true)
.validator(validate_path),
)
.arg(
Arg::with_name("range")
.short("r")
.long("range")
.value_name("RANGE")
.help(
"Sets the range quickcheck uses during generation. \
Corresponds to things like arbitrary usize and \
arbitrary vector length. This number doesn't have \
to grow much for execution time to increase \
significantly.",
)
.takes_value(true)
.default_value("32")
.validator(validate_generate_range),
)
.arg(
Arg::with_name("count")
.short("c")
.long("count")
.value_name("COUNT")
.help(
"Count / number of tests to run. Running a fuzzed \
header through the predicate.py script can take a \
long time, especially if the generation range is \
large. Increase this number if you're willing to \
wait a while.",
)
.takes_value(true)
.default_value("2")
.validator(validate_tests_count),
)
.get_matches();
let output_path: Option<&str> = matches.value_of("path");
let generate_range: usize = matches.value_of("range").unwrap().parse::<usize>().unwrap();
let tests: usize = matches.value_of("count").unwrap().parse::<usize>().unwrap();
quickchecking::test_bindgen(generate_range, tests, output_path)
}
| {
"pile_set_name": "Github"
} |
// Copyright 2015 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "internal_macros.h"
#ifdef BENCHMARK_OS_WINDOWS
#include <shlwapi.h>
#undef StrCat // Don't let StrCat in string_util.h be renamed to lstrcatA
#include <versionhelpers.h>
#include <windows.h>
#else
#include <fcntl.h>
#ifndef BENCHMARK_OS_FUCHSIA
#include <sys/resource.h>
#endif
#include <sys/time.h>
#include <sys/types.h> // this header must be included before 'sys/sysctl.h' to avoid compilation error on FreeBSD
#include <unistd.h>
#if defined BENCHMARK_OS_FREEBSD || defined BENCHMARK_OS_MACOSX || \
defined BENCHMARK_OS_NETBSD || defined BENCHMARK_OS_OPENBSD
#define BENCHMARK_HAS_SYSCTL
#include <sys/sysctl.h>
#endif
#endif
#if defined(BENCHMARK_OS_SOLARIS)
#include <kstat.h>
#endif
#include <algorithm>
#include <array>
#include <bitset>
#include <cerrno>
#include <climits>
#include <cstdint>
#include <cstdio>
#include <cstdlib>
#include <cstring>
#include <fstream>
#include <iostream>
#include <iterator>
#include <limits>
#include <memory>
#include <sstream>
#include "check.h"
#include "cycleclock.h"
#include "internal_macros.h"
#include "log.h"
#include "sleep.h"
#include "string_util.h"
namespace benchmark {
namespace {
void PrintImp(std::ostream& out) { out << std::endl; }
template <class First, class... Rest>
void PrintImp(std::ostream& out, First&& f, Rest&&... rest) {
out << std::forward<First>(f);
PrintImp(out, std::forward<Rest>(rest)...);
}
template <class... Args>
BENCHMARK_NORETURN void PrintErrorAndDie(Args&&... args) {
PrintImp(std::cerr, std::forward<Args>(args)...);
std::exit(EXIT_FAILURE);
}
#ifdef BENCHMARK_HAS_SYSCTL
/// ValueUnion - A type used to correctly alias the byte-for-byte output of
/// `sysctl` with the result type it's to be interpreted as.
struct ValueUnion {
union DataT {
uint32_t uint32_value;
uint64_t uint64_value;
// For correct aliasing of union members from bytes.
char bytes[8];
};
using DataPtr = std::unique_ptr<DataT, decltype(&std::free)>;
// The size of the data union member + its trailing array size.
size_t Size;
DataPtr Buff;
public:
ValueUnion() : Size(0), Buff(nullptr, &std::free) {}
explicit ValueUnion(size_t BuffSize)
: Size(sizeof(DataT) + BuffSize),
Buff(::new (std::malloc(Size)) DataT(), &std::free) {}
ValueUnion(ValueUnion&& other) = default;
explicit operator bool() const { return bool(Buff); }
char* data() const { return Buff->bytes; }
std::string GetAsString() const { return std::string(data()); }
int64_t GetAsInteger() const {
if (Size == sizeof(Buff->uint32_value))
return static_cast<int32_t>(Buff->uint32_value);
else if (Size == sizeof(Buff->uint64_value))
return static_cast<int64_t>(Buff->uint64_value);
BENCHMARK_UNREACHABLE();
}
uint64_t GetAsUnsigned() const {
if (Size == sizeof(Buff->uint32_value))
return Buff->uint32_value;
else if (Size == sizeof(Buff->uint64_value))
return Buff->uint64_value;
BENCHMARK_UNREACHABLE();
}
template <class T, int N>
std::array<T, N> GetAsArray() {
const int ArrSize = sizeof(T) * N;
CHECK_LE(ArrSize, Size);
std::array<T, N> Arr;
std::memcpy(Arr.data(), data(), ArrSize);
return Arr;
}
};
ValueUnion GetSysctlImp(std::string const& Name) {
#if defined BENCHMARK_OS_OPENBSD
int mib[2];
mib[0] = CTL_HW;
if ((Name == "hw.ncpu") || (Name == "hw.cpuspeed")){
ValueUnion buff(sizeof(int));
if (Name == "hw.ncpu") {
mib[1] = HW_NCPU;
} else {
mib[1] = HW_CPUSPEED;
}
if (sysctl(mib, 2, buff.data(), &buff.Size, nullptr, 0) == -1) {
return ValueUnion();
}
return buff;
}
return ValueUnion();
#else
size_t CurBuffSize = 0;
if (sysctlbyname(Name.c_str(), nullptr, &CurBuffSize, nullptr, 0) == -1)
return ValueUnion();
ValueUnion buff(CurBuffSize);
if (sysctlbyname(Name.c_str(), buff.data(), &buff.Size, nullptr, 0) == 0)
return buff;
return ValueUnion();
#endif
}
BENCHMARK_MAYBE_UNUSED
bool GetSysctl(std::string const& Name, std::string* Out) {
Out->clear();
auto Buff = GetSysctlImp(Name);
if (!Buff) return false;
Out->assign(Buff.data());
return true;
}
template <class Tp,
class = typename std::enable_if<std::is_integral<Tp>::value>::type>
bool GetSysctl(std::string const& Name, Tp* Out) {
*Out = 0;
auto Buff = GetSysctlImp(Name);
if (!Buff) return false;
*Out = static_cast<Tp>(Buff.GetAsUnsigned());
return true;
}
template <class Tp, size_t N>
bool GetSysctl(std::string const& Name, std::array<Tp, N>* Out) {
auto Buff = GetSysctlImp(Name);
if (!Buff) return false;
*Out = Buff.GetAsArray<Tp, N>();
return true;
}
#endif
template <class ArgT>
bool ReadFromFile(std::string const& fname, ArgT* arg) {
*arg = ArgT();
std::ifstream f(fname.c_str());
if (!f.is_open()) return false;
f >> *arg;
return f.good();
}
bool CpuScalingEnabled(int num_cpus) {
// We don't have a valid CPU count, so don't even bother.
if (num_cpus <= 0) return false;
#ifndef BENCHMARK_OS_WINDOWS
// On Linux, the CPUfreq subsystem exposes CPU information as files on the
// local file system. If reading the exported files fails, then we may not be
// running on Linux, so we silently ignore all the read errors.
std::string res;
for (int cpu = 0; cpu < num_cpus; ++cpu) {
std::string governor_file =
StrCat("/sys/devices/system/cpu/cpu", cpu, "/cpufreq/scaling_governor");
if (ReadFromFile(governor_file, &res) && res != "performance") return true;
}
#endif
return false;
}
int CountSetBitsInCPUMap(std::string Val) {
auto CountBits = [](std::string Part) {
using CPUMask = std::bitset<sizeof(std::uintptr_t) * CHAR_BIT>;
Part = "0x" + Part;
CPUMask Mask(std::stoul(Part, nullptr, 16));
return static_cast<int>(Mask.count());
};
size_t Pos;
int total = 0;
while ((Pos = Val.find(',')) != std::string::npos) {
total += CountBits(Val.substr(0, Pos));
Val = Val.substr(Pos + 1);
}
if (!Val.empty()) {
total += CountBits(Val);
}
return total;
}
BENCHMARK_MAYBE_UNUSED
std::vector<CPUInfo::CacheInfo> GetCacheSizesFromKVFS() {
std::vector<CPUInfo::CacheInfo> res;
std::string dir = "/sys/devices/system/cpu/cpu0/cache/";
int Idx = 0;
while (true) {
CPUInfo::CacheInfo info;
std::string FPath = StrCat(dir, "index", Idx++, "/");
std::ifstream f(StrCat(FPath, "size").c_str());
if (!f.is_open()) break;
std::string suffix;
f >> info.size;
if (f.fail())
PrintErrorAndDie("Failed while reading file '", FPath, "size'");
if (f.good()) {
f >> suffix;
if (f.bad())
PrintErrorAndDie(
"Invalid cache size format: failed to read size suffix");
else if (f && suffix != "K")
PrintErrorAndDie("Invalid cache size format: Expected bytes ", suffix);
else if (suffix == "K")
info.size *= 1000;
}
if (!ReadFromFile(StrCat(FPath, "type"), &info.type))
PrintErrorAndDie("Failed to read from file ", FPath, "type");
if (!ReadFromFile(StrCat(FPath, "level"), &info.level))
PrintErrorAndDie("Failed to read from file ", FPath, "level");
std::string map_str;
if (!ReadFromFile(StrCat(FPath, "shared_cpu_map"), &map_str))
PrintErrorAndDie("Failed to read from file ", FPath, "shared_cpu_map");
info.num_sharing = CountSetBitsInCPUMap(map_str);
res.push_back(info);
}
return res;
}
#ifdef BENCHMARK_OS_MACOSX
std::vector<CPUInfo::CacheInfo> GetCacheSizesMacOSX() {
std::vector<CPUInfo::CacheInfo> res;
std::array<uint64_t, 4> CacheCounts{{0, 0, 0, 0}};
GetSysctl("hw.cacheconfig", &CacheCounts);
struct {
std::string name;
std::string type;
int level;
uint64_t num_sharing;
} Cases[] = {{"hw.l1dcachesize", "Data", 1, CacheCounts[1]},
{"hw.l1icachesize", "Instruction", 1, CacheCounts[1]},
{"hw.l2cachesize", "Unified", 2, CacheCounts[2]},
{"hw.l3cachesize", "Unified", 3, CacheCounts[3]}};
for (auto& C : Cases) {
int val;
if (!GetSysctl(C.name, &val)) continue;
CPUInfo::CacheInfo info;
info.type = C.type;
info.level = C.level;
info.size = val;
info.num_sharing = static_cast<int>(C.num_sharing);
res.push_back(std::move(info));
}
return res;
}
#elif defined(BENCHMARK_OS_WINDOWS)
std::vector<CPUInfo::CacheInfo> GetCacheSizesWindows() {
std::vector<CPUInfo::CacheInfo> res;
DWORD buffer_size = 0;
using PInfo = SYSTEM_LOGICAL_PROCESSOR_INFORMATION;
using CInfo = CACHE_DESCRIPTOR;
using UPtr = std::unique_ptr<PInfo, decltype(&std::free)>;
GetLogicalProcessorInformation(nullptr, &buffer_size);
UPtr buff((PInfo*)malloc(buffer_size), &std::free);
if (!GetLogicalProcessorInformation(buff.get(), &buffer_size))
PrintErrorAndDie("Failed during call to GetLogicalProcessorInformation: ",
GetLastError());
PInfo* it = buff.get();
PInfo* end = buff.get() + (buffer_size / sizeof(PInfo));
for (; it != end; ++it) {
if (it->Relationship != RelationCache) continue;
using BitSet = std::bitset<sizeof(ULONG_PTR) * CHAR_BIT>;
BitSet B(it->ProcessorMask);
// To prevent duplicates, only consider caches where CPU 0 is specified
if (!B.test(0)) continue;
CInfo* Cache = &it->Cache;
CPUInfo::CacheInfo C;
C.num_sharing = static_cast<int>(B.count());
C.level = Cache->Level;
C.size = Cache->Size;
C.type = "Unknown";
switch (Cache->Type) {
case CacheUnified:
C.type = "Unified";
break;
case CacheInstruction:
C.type = "Instruction";
break;
case CacheData:
C.type = "Data";
break;
case CacheTrace:
C.type = "Trace";
break;
}
res.push_back(C);
}
return res;
}
#endif
std::vector<CPUInfo::CacheInfo> GetCacheSizes() {
#ifdef BENCHMARK_OS_MACOSX
return GetCacheSizesMacOSX();
#elif defined(BENCHMARK_OS_WINDOWS)
return GetCacheSizesWindows();
#else
return GetCacheSizesFromKVFS();
#endif
}
int GetNumCPUs() {
#ifdef BENCHMARK_HAS_SYSCTL
int NumCPU = -1;
if (GetSysctl("hw.ncpu", &NumCPU)) return NumCPU;
fprintf(stderr, "Err: %s\n", strerror(errno));
std::exit(EXIT_FAILURE);
#elif defined(BENCHMARK_OS_WINDOWS)
SYSTEM_INFO sysinfo;
// Use memset as opposed to = {} to avoid GCC missing initializer false
// positives.
std::memset(&sysinfo, 0, sizeof(SYSTEM_INFO));
GetSystemInfo(&sysinfo);
return sysinfo.dwNumberOfProcessors; // number of logical
// processors in the current
// group
#elif defined(BENCHMARK_OS_SOLARIS)
// Returns -1 in case of a failure.
int NumCPU = sysconf(_SC_NPROCESSORS_ONLN);
if (NumCPU < 0) {
fprintf(stderr,
"sysconf(_SC_NPROCESSORS_ONLN) failed with error: %s\n",
strerror(errno));
}
return NumCPU;
#else
int NumCPUs = 0;
int MaxID = -1;
std::ifstream f("/proc/cpuinfo");
if (!f.is_open()) {
std::cerr << "failed to open /proc/cpuinfo\n";
return -1;
}
const std::string Key = "processor";
std::string ln;
while (std::getline(f, ln)) {
if (ln.empty()) continue;
size_t SplitIdx = ln.find(':');
std::string value;
if (SplitIdx != std::string::npos) value = ln.substr(SplitIdx + 1);
if (ln.size() >= Key.size() && ln.compare(0, Key.size(), Key) == 0) {
NumCPUs++;
if (!value.empty()) {
int CurID = std::stoi(value);
MaxID = std::max(CurID, MaxID);
}
}
}
if (f.bad()) {
std::cerr << "Failure reading /proc/cpuinfo\n";
return -1;
}
if (!f.eof()) {
std::cerr << "Failed to read to end of /proc/cpuinfo\n";
return -1;
}
f.close();
if ((MaxID + 1) != NumCPUs) {
fprintf(stderr,
"CPU ID assignments in /proc/cpuinfo seem messed up."
" This is usually caused by a bad BIOS.\n");
}
return NumCPUs;
#endif
BENCHMARK_UNREACHABLE();
}
double GetCPUCyclesPerSecond() {
#if defined BENCHMARK_OS_LINUX || defined BENCHMARK_OS_CYGWIN
long freq;
// If the kernel is exporting the tsc frequency use that. There are issues
// where cpuinfo_max_freq cannot be relied on because the BIOS may be
// exporintg an invalid p-state (on x86) or p-states may be used to put the
// processor in a new mode (turbo mode). Essentially, those frequencies
// cannot always be relied upon. The same reasons apply to /proc/cpuinfo as
// well.
if (ReadFromFile("/sys/devices/system/cpu/cpu0/tsc_freq_khz", &freq)
// If CPU scaling is in effect, we want to use the *maximum* frequency,
// not whatever CPU speed some random processor happens to be using now.
|| ReadFromFile("/sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_max_freq",
&freq)) {
// The value is in kHz (as the file name suggests). For example, on a
// 2GHz warpstation, the file contains the value "2000000".
return freq * 1000.0;
}
const double error_value = -1;
double bogo_clock = error_value;
std::ifstream f("/proc/cpuinfo");
if (!f.is_open()) {
std::cerr << "failed to open /proc/cpuinfo\n";
return error_value;
}
auto startsWithKey = [](std::string const& Value, std::string const& Key) {
if (Key.size() > Value.size()) return false;
auto Cmp = [&](char X, char Y) {
return std::tolower(X) == std::tolower(Y);
};
return std::equal(Key.begin(), Key.end(), Value.begin(), Cmp);
};
std::string ln;
while (std::getline(f, ln)) {
if (ln.empty()) continue;
size_t SplitIdx = ln.find(':');
std::string value;
if (SplitIdx != std::string::npos) value = ln.substr(SplitIdx + 1);
// When parsing the "cpu MHz" and "bogomips" (fallback) entries, we only
// accept positive values. Some environments (virtual machines) report zero,
// which would cause infinite looping in WallTime_Init.
if (startsWithKey(ln, "cpu MHz")) {
if (!value.empty()) {
double cycles_per_second = std::stod(value) * 1000000.0;
if (cycles_per_second > 0) return cycles_per_second;
}
} else if (startsWithKey(ln, "bogomips")) {
if (!value.empty()) {
bogo_clock = std::stod(value) * 1000000.0;
if (bogo_clock < 0.0) bogo_clock = error_value;
}
}
}
if (f.bad()) {
std::cerr << "Failure reading /proc/cpuinfo\n";
return error_value;
}
if (!f.eof()) {
std::cerr << "Failed to read to end of /proc/cpuinfo\n";
return error_value;
}
f.close();
// If we found the bogomips clock, but nothing better, we'll use it (but
// we're not happy about it); otherwise, fallback to the rough estimation
// below.
if (bogo_clock >= 0.0) return bogo_clock;
#elif defined BENCHMARK_HAS_SYSCTL
constexpr auto* FreqStr =
#if defined(BENCHMARK_OS_FREEBSD) || defined(BENCHMARK_OS_NETBSD)
"machdep.tsc_freq";
#elif defined BENCHMARK_OS_OPENBSD
"hw.cpuspeed";
#else
"hw.cpufrequency";
#endif
unsigned long long hz = 0;
#if defined BENCHMARK_OS_OPENBSD
if (GetSysctl(FreqStr, &hz)) return hz * 1000000;
#else
if (GetSysctl(FreqStr, &hz)) return hz;
#endif
fprintf(stderr, "Unable to determine clock rate from sysctl: %s: %s\n",
FreqStr, strerror(errno));
#elif defined BENCHMARK_OS_WINDOWS
// In NT, read MHz from the registry. If we fail to do so or we're in win9x
// then make a crude estimate.
DWORD data, data_size = sizeof(data);
if (IsWindowsXPOrGreater() &&
SUCCEEDED(
SHGetValueA(HKEY_LOCAL_MACHINE,
"HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0",
"~MHz", nullptr, &data, &data_size)))
return static_cast<double>((int64_t)data *
(int64_t)(1000 * 1000)); // was mhz
#elif defined (BENCHMARK_OS_SOLARIS)
kstat_ctl_t *kc = kstat_open();
if (!kc) {
std::cerr << "failed to open /dev/kstat\n";
return -1;
}
kstat_t *ksp = kstat_lookup(kc, (char*)"cpu_info", -1, (char*)"cpu_info0");
if (!ksp) {
std::cerr << "failed to lookup in /dev/kstat\n";
return -1;
}
if (kstat_read(kc, ksp, NULL) < 0) {
std::cerr << "failed to read from /dev/kstat\n";
return -1;
}
kstat_named_t *knp =
(kstat_named_t*)kstat_data_lookup(ksp, (char*)"current_clock_Hz");
if (!knp) {
std::cerr << "failed to lookup data in /dev/kstat\n";
return -1;
}
if (knp->data_type != KSTAT_DATA_UINT64) {
std::cerr << "current_clock_Hz is of unexpected data type: "
<< knp->data_type << "\n";
return -1;
}
double clock_hz = knp->value.ui64;
kstat_close(kc);
return clock_hz;
#endif
// If we've fallen through, attempt to roughly estimate the CPU clock rate.
const int estimate_time_ms = 1000;
const auto start_ticks = cycleclock::Now();
SleepForMilliseconds(estimate_time_ms);
return static_cast<double>(cycleclock::Now() - start_ticks);
}
} // end namespace
const CPUInfo& CPUInfo::Get() {
static const CPUInfo* info = new CPUInfo();
return *info;
}
CPUInfo::CPUInfo()
: num_cpus(GetNumCPUs()),
cycles_per_second(GetCPUCyclesPerSecond()),
caches(GetCacheSizes()),
scaling_enabled(CpuScalingEnabled(num_cpus)) {}
} // end namespace benchmark
| {
"pile_set_name": "Github"
} |
path: "tensorflow.initializers.uniform_unit_scaling"
tf_class {
is_instance: "<class \'tensorflow.python.ops.init_ops.UniformUnitScaling\'>"
is_instance: "<class \'tensorflow.python.ops.init_ops.Initializer\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\', \'factor\', \'seed\', \'dtype\'], varargs=None, keywords=None, defaults=[\'1.0\', \'None\', \"<dtype: \'float32\'>\"], "
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
}
| {
"pile_set_name": "Github"
} |
// Copyright 2018 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// +build windows
package embed
import (
"os"
"go.uber.org/zap/zapcore"
)
func getJournalWriteSyncer() (zapcore.WriteSyncer, error) {
return zapcore.AddSync(os.Stderr), nil
}
| {
"pile_set_name": "Github"
} |
# ----------------------------------------------------------------------------
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# ----------------------------------------------------------------------------
---
mqtt:
topic-name: plc4x/test-topic
server-host: test.mosquitto.org
server-port: 1883
plc:
connection: test:plc4x-example-mqtt
addresses:
- name: intParam
address: RANDOM/foo:INTEGER
size: 1
type: java.lang.Integer
- name: intParam2
address: RANDOM/bar:INTEGER
size: 1
type: java.lang.Integer
polling-interval: 2000
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) AXA Group Operations Spain S.A.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
const fs = require('fs');
const path = require('path');
const {
ArrToObj,
Container,
Normalizer,
ObjToArr,
Stemmer,
Stopwords,
Tokenizer,
Timer,
logger,
MemoryStorage,
} = require('@nlpjs/core');
const { fs: requestfs, request } = require('@nlpjs/request');
const pluginInformation = require('./plugin-information.json');
const {
listFilesAbsolute,
getAbsolutePath,
loadEnv,
loadEnvFromJson,
} = require('./helper');
const defaultPathConfiguration = './conf.json';
const defaultPathPipeline = './pipelines.md';
const defaultPathPlugins = './plugins';
function loadPipelinesStr(instance, pipelines) {
instance.loadPipelinesFromString(pipelines);
}
function loadPipelinesFromFile(instance, fileName) {
const str = fs.readFileSync(fileName, 'utf8');
instance.loadPipelinesFromString(str);
}
function loadPipelines(instance, fileName) {
if (Array.isArray(fileName)) {
for (let i = 0; i < fileName.length; i += 1) {
loadPipelines(instance, fileName[i]);
}
} else if (fs.existsSync(fileName)) {
if (fs.lstatSync(fileName).isDirectory()) {
const files = listFilesAbsolute(fileName).filter((x) =>
x.endsWith('.md')
);
for (let i = 0; i < files.length; i += 1) {
loadPipelines(instance, files[i]);
}
} else {
loadPipelinesFromFile(instance, fileName);
}
}
}
function loadPlugins(instance, fileName) {
if (Array.isArray(fileName)) {
for (let i = 0; i < fileName.length; i += 1) {
loadPlugins(instance, fileName[i]);
}
} else if (fs.existsSync(fileName)) {
if (fs.lstatSync(fileName).isDirectory()) {
const files = listFilesAbsolute(fileName).filter((x) =>
x.endsWith('.js')
);
for (let i = 0; i < files.length; i += 1) {
loadPlugins(instance, files[i]);
}
} else {
/* eslint-disable-next-line */
const plugin = require(fileName);
instance.use(plugin);
}
}
}
function traverse(obj, preffix) {
if (typeof obj === 'string') {
if (obj.startsWith('$')) {
return (
process.env[`${preffix}${obj.slice(1)}`] || process.env[obj.slice(1)]
);
}
return obj;
}
if (Array.isArray(obj)) {
return obj.map((x) => traverse(x, preffix));
}
if (typeof obj === 'object') {
const keys = Object.keys(obj);
const result = {};
for (let i = 0; i < keys.length; i += 1) {
result[keys[i]] = traverse(obj[keys[i]], preffix);
}
return result;
}
return obj;
}
function containerBootstrap(
inputSettings,
srcMustLoadEnv,
container,
preffix,
pipelines,
parent
) {
const mustLoadEnv = srcMustLoadEnv === undefined ? true : srcMustLoadEnv;
const instance = container || new Container(preffix);
instance.parent = parent;
if (!preffix) {
instance.register('fs', requestfs);
instance.register('request', { get: request });
instance.use(ArrToObj);
instance.use(Normalizer);
instance.use(ObjToArr);
instance.use(Stemmer);
instance.use(Stopwords);
instance.use(Tokenizer);
instance.use(Timer);
instance.use(logger);
instance.use(MemoryStorage);
}
const srcSettings = inputSettings || {};
let settings = srcSettings;
if (typeof settings === 'string') {
settings = {
pathConfiguration: srcSettings,
pathPipeline: defaultPathPipeline,
pathPlugins: defaultPathPlugins,
};
} else {
if (!settings.pathConfiguration) {
settings.pathConfiguration = defaultPathConfiguration;
}
if (!settings.pathPipeline) {
settings.pathPipeline = defaultPathPipeline;
}
if (!settings.pathPlugins) {
settings.pathPlugins = defaultPathPlugins;
}
}
if (
srcSettings.loadEnv ||
(srcSettings.loadEnv === undefined && mustLoadEnv)
) {
loadEnv();
}
settings.pathConfiguration = getAbsolutePath(settings.pathConfiguration);
if (srcSettings.envFileName) {
loadEnv(srcSettings.envFileName);
}
if (srcSettings.env) {
loadEnvFromJson(preffix, srcSettings.env);
}
let configuration;
if (settings.isChild || !fs.existsSync(settings.pathConfiguration)) {
configuration = settings;
} else {
configuration = JSON.parse(
fs.readFileSync(settings.pathConfiguration, 'utf8')
);
}
configuration = traverse(configuration, preffix ? `${preffix}_` : '');
if (configuration.pathPipeline) {
settings.pathPipeline = configuration.pathPipeline;
}
if (configuration.pathPlugins) {
settings.pathPlugins = configuration.pathPlugins;
}
if (configuration.settings) {
const keys = Object.keys(configuration.settings);
for (let i = 0; i < keys.length; i += 1) {
instance.registerConfiguration(
keys[i],
configuration.settings[keys[i]],
true
);
}
}
if (configuration.use) {
for (let i = 0; i < configuration.use.length; i += 1) {
const current = configuration.use[i];
if (typeof current === 'string') {
let infoArr = pluginInformation[current];
if (!infoArr) {
throw new Error(
`Plugin information not found for plugin "${current}"`
);
}
if (!Array.isArray(infoArr)) {
infoArr = [infoArr];
}
for (let j = 0; j < infoArr.length; j += 1) {
const info = infoArr[j];
let lib;
try {
/* eslint-disable-next-line */
lib = require(info.path);
} catch (err) {
try {
/* eslint-disable-next-line */
lib = require(getAbsolutePath(
path.join('./node_modules', info.path)
));
} catch (err2) {
throw new Error(
`You have to install library "${info.path}" to use plugin "${current}"`
);
}
}
instance.use(lib[info.className], info.name, info.isSingleton);
}
} else {
let lib;
try {
/* eslint-disable-next-line */
lib = require(current.path);
} catch (err) {
/* eslint-disable-next-line */
lib = require(getAbsolutePath(current.path));
}
instance.use(lib[current.className], current.name, current.isSingleton);
}
}
}
if (configuration.terraform) {
for (let i = 0; i < configuration.terraform.length; i += 1) {
const current = configuration.terraform[i];
const terra = instance.get(current.className);
instance.register(current.name, terra, true);
}
}
if (configuration.childs) {
instance.childs = configuration.childs;
}
if (pipelines) {
for (let i = 0; i < pipelines.length; i += 1) {
const pipeline = pipelines[i];
instance.registerPipeline(
pipeline.tag,
pipeline.pipeline,
pipeline.overwrite
);
}
}
loadPipelines(instance, settings.pathPipeline || './pipelines.md');
if (configuration.pipelines) {
loadPipelinesStr(instance, configuration.pipelines);
}
loadPlugins(instance, settings.pathPlugins || './plugins');
return instance;
}
module.exports = containerBootstrap;
| {
"pile_set_name": "Github"
} |
/*
* FindBugs - Find Bugs in Java programs
* Copyright (C) 2006, University of Maryland
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package edu.umd.cs.findbugs.classfile.impl;
import edu.umd.cs.findbugs.classfile.ICodeBaseEntry;
/**
* @author David Hovemeyer
*/
public abstract class AbstractScannableCodeBaseEntry implements ICodeBaseEntry {
@Override
public abstract AbstractScannableCodeBase getCodeBase();
public abstract String getRealResourceName();
/*
* (non-Javadoc)
*
* @see
* edu.umd.cs.findbugs.classfile.ICodeBaseEntry#overrideResourceName(java
* .lang.String)
*/
@Override
public void overrideResourceName(String resourceName) {
getCodeBase().addResourceNameTranslation(getRealResourceName(), resourceName);
}
/*
* (non-Javadoc)
*
* @see edu.umd.cs.findbugs.classfile.ICodeBaseEntry#getResourceName()
*/
@Override
public String getResourceName() {
return getCodeBase().translateResourceName(getRealResourceName());
}
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="iso-8859-1"?>
<!DOCTYPE html
PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<title>Class: Selenium::Rake::RemoteControlStopTask</title>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1" />
<meta http-equiv="Content-Script-Type" content="text/javascript" />
<link rel="stylesheet" href="../../.././rdoc-style.css" type="text/css" media="screen" />
<script type="text/javascript">
// <![CDATA[
function popupCode( url ) {
window.open(url, "Code", "resizable=yes,scrollbars=yes,toolbar=no,status=no,height=150,width=400")
}
function toggleCode( id ) {
if ( document.getElementById )
elem = document.getElementById( id );
else if ( document.all )
elem = eval( "document.all." + id );
else
return false;
elemStyle = elem.style;
if ( elemStyle.display != "block" ) {
elemStyle.display = "block"
} else {
elemStyle.display = "none"
}
return true;
}
// Make codeblocks hidden by default
document.writeln( "<style type=\"text/css\">div.method-source-code { display: none }</style>" )
// ]]>
</script>
</head>
<body>
<div id="classHeader">
<table class="header-table">
<tr class="top-aligned-row">
<td><strong>Class</strong></td>
<td class="class-name-in-header">Selenium::Rake::RemoteControlStopTask</td>
</tr>
<tr class="top-aligned-row">
<td><strong>In:</strong></td>
<td>
<a href="../../../files/lib/selenium/rake/remote_control_stop_task_rb.html">
lib/selenium/rake/remote_control_stop_task.rb
</a>
<br />
</td>
</tr>
<tr class="top-aligned-row">
<td><strong>Parent:</strong></td>
<td>
Object
</td>
</tr>
</table>
</div>
<!-- banner header -->
<div id="bodyContent">
<div id="contextContent">
</div>
<div id="method-list">
<h3 class="section-bar">Methods</h3>
<div class="name-list">
<a href="#M000248">define</a>
<a href="#M000247">new</a>
</div>
</div>
</div>
<!-- if includes -->
<div id="section">
<div id="attribute-list">
<h3 class="section-bar">Attributes</h3>
<div class="name-list">
<table>
<tr class="top-aligned-row context-row">
<td class="context-item-name">host</td>
<td class="context-item-value"> [RW] </td>
<td class="context-item-desc"></td>
</tr>
<tr class="top-aligned-row context-row">
<td class="context-item-name">port</td>
<td class="context-item-value"> [RW] </td>
<td class="context-item-desc"></td>
</tr>
<tr class="top-aligned-row context-row">
<td class="context-item-name">timeout_in_seconds</td>
<td class="context-item-value"> [RW] </td>
<td class="context-item-desc"></td>
</tr>
</table>
</div>
</div>
<!-- if method_list -->
<div id="methods">
<h3 class="section-bar">Public Class methods</h3>
<div id="method-M000247" class="method-detail">
<a name="M000247"></a>
<div class="method-heading">
<a href="#M000247" class="method-signature">
<span class="method-name">new</span><span class="method-args">(name = :'selenium:rc:stop') {|self if block_given?| ...}</span>
</a>
</div>
<div class="method-description">
<p><a class="source-toggle" href="#"
onclick="toggleCode('M000247-source');return false;">[Source]</a></p>
<div class="method-source-code" id="M000247-source">
<pre>
<span class="ruby-comment cmt"># File lib/selenium/rake/remote_control_stop_task.rb, line 7</span>
7: <span class="ruby-keyword kw">def</span> <span class="ruby-identifier">initialize</span>(<span class="ruby-identifier">name</span> = <span class="ruby-value str">'selenium:rc:stop'</span><span class="ruby-value str">'selenium:rc:stop'</span>)
8: <span class="ruby-ivar">@host</span> = <span class="ruby-value str">"localhost"</span>
9: <span class="ruby-ivar">@name</span> = <span class="ruby-identifier">name</span>
10: <span class="ruby-ivar">@port</span> = <span class="ruby-value">4444</span>
11: <span class="ruby-ivar">@timeout_in_seconds</span> = <span class="ruby-value">5</span>
12: <span class="ruby-keyword kw">yield</span> <span class="ruby-keyword kw">self</span> <span class="ruby-keyword kw">if</span> <span class="ruby-identifier">block_given?</span>
13: <span class="ruby-identifier">define</span>
14: <span class="ruby-keyword kw">end</span>
</pre>
</div>
</div>
</div>
<h3 class="section-bar">Public Instance methods</h3>
<div id="method-M000248" class="method-detail">
<a name="M000248"></a>
<div class="method-heading">
<a href="#M000248" class="method-signature">
<span class="method-name">define</span><span class="method-args">()</span>
</a>
</div>
<div class="method-description">
<p><a class="source-toggle" href="#"
onclick="toggleCode('M000248-source');return false;">[Source]</a></p>
<div class="method-source-code" id="M000248-source">
<pre>
<span class="ruby-comment cmt"># File lib/selenium/rake/remote_control_stop_task.rb, line 16</span>
16: <span class="ruby-keyword kw">def</span> <span class="ruby-identifier">define</span>
17: <span class="ruby-identifier">desc</span> <span class="ruby-value str">"Stop Selenium Remote Control running"</span>
18: <span class="ruby-identifier">task</span> <span class="ruby-ivar">@name</span> <span class="ruby-keyword kw">do</span>
19: <span class="ruby-identifier">puts</span> <span class="ruby-node">"Stopping Selenium Remote Control running at #{@host}:#{@port}..."</span>
20: <span class="ruby-identifier">remote_control</span> = <span class="ruby-constant">Selenium</span><span class="ruby-operator">::</span><span class="ruby-constant">RemoteControl</span><span class="ruby-operator">::</span><span class="ruby-constant">RemoteControl</span>.<span class="ruby-identifier">new</span>(<span class="ruby-ivar">@host</span>, <span class="ruby-ivar">@port</span>, <span class="ruby-ivar">@timeout_in_seconds</span>)
21: <span class="ruby-identifier">remote_control</span>.<span class="ruby-identifier">stop</span>
22: <span class="ruby-identifier">puts</span> <span class="ruby-node">"Stopped Selenium Remote Control running at #{@host}:#{@port}"</span>
23: <span class="ruby-keyword kw">end</span>
24: <span class="ruby-keyword kw">end</span>
</pre>
</div>
</div>
</div>
</div>
</div>
<div id="validator-badges">
<p><small><a href="http://validator.w3.org/check/referer">[Validate]</a></small></p>
</div>
</body>
</html> | {
"pile_set_name": "Github"
} |
/* eslint-disable */
const webpack = require('webpack');
'use strict';
const EngineAddon = require('ember-engines/lib/engine-addon');
module.exports = EngineAddon.extend({
name: 'login',
lazyLoading: { enabled: true },
isDevelopingAddon() {
return true;
},
options: {
babel: { plugins: [require('ember-auto-import/babel-plugin')] }
}
});
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<meta name="generator" content="Doxygen 1.8.6"/>
<title>OpenNI 2.0: openni::CoordinateConverter Class Reference</title>
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
<tbody>
<tr style="height: 56px;">
<td id="projectlogo"><img alt="Logo" src="OpenNILogo.bmp"/></td>
<td style="padding-left: 0.5em;">
<div id="projectname">OpenNI 2.0
</div>
</td>
</tr>
</tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.8.6 -->
<div id="navrow1" class="tabs">
<ul class="tablist">
<li><a href="index.html"><span>Main Page</span></a></li>
<li><a href="pages.html"><span>Related Pages</span></a></li>
<li><a href="namespaces.html"><span>Namespaces</span></a></li>
<li class="current"><a href="annotated.html"><span>Classes</span></a></li>
<li><a href="files.html"><span>Files</span></a></li>
</ul>
</div>
<div id="navrow2" class="tabs2">
<ul class="tablist">
<li><a href="annotated.html"><span>Class List</span></a></li>
<li><a href="classes.html"><span>Class Index</span></a></li>
<li><a href="inherits.html"><span>Class Hierarchy</span></a></li>
<li><a href="functions.html"><span>Class Members</span></a></li>
</ul>
</div>
<div id="nav-path" class="navpath">
<ul>
<li class="navelem"><a class="el" href="namespaceopenni.html">openni</a></li><li class="navelem"><a class="el" href="classopenni_1_1_coordinate_converter.html">CoordinateConverter</a></li> </ul>
</div>
</div><!-- top -->
<div class="header">
<div class="summary">
<a href="#pub-static-methods">Static Public Member Functions</a> |
<a href="classopenni_1_1_coordinate_converter-members.html">List of all members</a> </div>
<div class="headertitle">
<div class="title">openni::CoordinateConverter Class Reference</div> </div>
</div><!--header-->
<div class="contents">
<p><code>#include <<a class="el" href="_open_n_i_8h_source.html">OpenNI.h</a>></code></p>
<table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="pub-static-methods"></a>
Static Public Member Functions</h2></td></tr>
<tr class="memitem:a4faabf558f2eb6e27948e50bffa8f581"><td class="memItemLeft" align="right" valign="top">static <a class="el" href="namespaceopenni.html#abf03edf56fbb987feebce8e98b3e0333">Status</a> </td><td class="memItemRight" valign="bottom"><a class="el" href="classopenni_1_1_coordinate_converter.html#a4faabf558f2eb6e27948e50bffa8f581">convertDepthToColor</a> (const <a class="el" href="classopenni_1_1_video_stream.html">VideoStream</a> &depthStream, const <a class="el" href="classopenni_1_1_video_stream.html">VideoStream</a> &colorStream, int depthX, int depthY, <a class="el" href="namespaceopenni.html#ab2403242071ff06fc6fe18c0c6111d0f">DepthPixel</a> depthZ, int *pColorX, int *pColorY)</td></tr>
<tr class="separator:a4faabf558f2eb6e27948e50bffa8f581"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a3129cdc99f37c8084ddb27849702b1dc"><td class="memItemLeft" align="right" valign="top">static <a class="el" href="namespaceopenni.html#abf03edf56fbb987feebce8e98b3e0333">Status</a> </td><td class="memItemRight" valign="bottom"><a class="el" href="classopenni_1_1_coordinate_converter.html#a3129cdc99f37c8084ddb27849702b1dc">convertDepthToWorld</a> (const <a class="el" href="classopenni_1_1_video_stream.html">VideoStream</a> &depthStream, int depthX, int depthY, <a class="el" href="namespaceopenni.html#ab2403242071ff06fc6fe18c0c6111d0f">DepthPixel</a> depthZ, float *pWorldX, float *pWorldY, float *pWorldZ)</td></tr>
<tr class="separator:a3129cdc99f37c8084ddb27849702b1dc"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a5d934a92602a069c703c0640cd776aeb"><td class="memItemLeft" align="right" valign="top">static <a class="el" href="namespaceopenni.html#abf03edf56fbb987feebce8e98b3e0333">Status</a> </td><td class="memItemRight" valign="bottom"><a class="el" href="classopenni_1_1_coordinate_converter.html#a5d934a92602a069c703c0640cd776aeb">convertDepthToWorld</a> (const <a class="el" href="classopenni_1_1_video_stream.html">VideoStream</a> &depthStream, float depthX, float depthY, float depthZ, float *pWorldX, float *pWorldY, float *pWorldZ)</td></tr>
<tr class="separator:a5d934a92602a069c703c0640cd776aeb"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a38b9a3f31f62ebd8664f4f8a1fd26704"><td class="memItemLeft" align="right" valign="top">static <a class="el" href="namespaceopenni.html#abf03edf56fbb987feebce8e98b3e0333">Status</a> </td><td class="memItemRight" valign="bottom"><a class="el" href="classopenni_1_1_coordinate_converter.html#a38b9a3f31f62ebd8664f4f8a1fd26704">convertWorldToDepth</a> (const <a class="el" href="classopenni_1_1_video_stream.html">VideoStream</a> &depthStream, float worldX, float worldY, float worldZ, int *pDepthX, int *pDepthY, <a class="el" href="namespaceopenni.html#ab2403242071ff06fc6fe18c0c6111d0f">DepthPixel</a> *pDepthZ)</td></tr>
<tr class="separator:a38b9a3f31f62ebd8664f4f8a1fd26704"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a8a68229388b168a836c3fe709f2ef953"><td class="memItemLeft" align="right" valign="top">static <a class="el" href="namespaceopenni.html#abf03edf56fbb987feebce8e98b3e0333">Status</a> </td><td class="memItemRight" valign="bottom"><a class="el" href="classopenni_1_1_coordinate_converter.html#a8a68229388b168a836c3fe709f2ef953">convertWorldToDepth</a> (const <a class="el" href="classopenni_1_1_video_stream.html">VideoStream</a> &depthStream, float worldX, float worldY, float worldZ, float *pDepthX, float *pDepthY, float *pDepthZ)</td></tr>
<tr class="separator:a8a68229388b168a836c3fe709f2ef953"><td class="memSeparator" colspan="2"> </td></tr>
</table>
<a name="details" id="details"></a><h2 class="groupheader">Detailed Description</h2>
<div class="textblock"><p>The <a class="el" href="classopenni_1_1_coordinate_converter.html">CoordinateConverter</a> class converts points between the different coordinate systems.</p>
<p><b>Depth and World coordinate systems</b></p>
<p><a class="el" href="classopenni_1_1_open_n_i.html">OpenNI</a> applications commonly use two different coordinate systems to represent depth. These two systems are referred to as Depth and World representation.</p>
<p>Depth coordinates are the native data representation. In this system, the frame is a map (two dimensional array), and each pixel is assigned a depth value. This depth value represents the distance between the camera plane and whatever object is in the given pixel. The X and Y coordinates are simply the location in the map, where the origin is the top-left corner of the field of view.</p>
<p>World coordinates superimpose a more familiar 3D Cartesian coordinate system on the world, with the camera lens at the origin. In this system, every point is specified by 3 points – x, y and z. The x axis of this system is along a line that passes through the infrared projector and CMOS imager of the camera. The y axis is parallel to the front face of the camera, and perpendicular to the x axis (it will also be perpendicular to the ground if the camera is upright and level). The z axis runs into the scene, perpendicular to both the x and y axis. From the perspective of the camera, an object moving from left to right is moving along the increasing x axis. An object moving up is moving along the increasing y axis, and an object moving away from the camera is moving along the increasing z axis.</p>
<p>Mathematically, the Depth coordinate system is the projection of the scene on the CMOS. If the sensor's angular field of view and resolution are known, then an angular size can be calculated for each pixel. This is how the conversion algorithms work. The dependence of this calculation on FoV and resolution is the reason that a <a class="el" href="classopenni_1_1_video_stream.html">VideoStream</a> pointer must be provided to these functions. The <a class="el" href="classopenni_1_1_video_stream.html">VideoStream</a> pointer is used to determine parameters for the specific points to be converted.</p>
<p>Since Depth coordinates are a projective, the apparent size of objects in depth coordinates (measured in pixels) will increase as an object moves closer to the sensor. The size of objects in the World coordinate system is independent of distance from the sensor.</p>
<p>Note that converting from Depth to World coordinates is relatively expensive computationally. It is generally not practical to convert the entire raw depth map to World coordinates. A better approach is to have your computer vision algorithm work in Depth coordinates for as long as possible, and only converting a few specific points to World coordinates right before output.</p>
<p>Note that when converting from Depth to World or vice versa, the Z value remains the same. </p>
</div><h2 class="groupheader">Member Function Documentation</h2>
<a class="anchor" id="a4faabf558f2eb6e27948e50bffa8f581"></a>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">static <a class="el" href="namespaceopenni.html#abf03edf56fbb987feebce8e98b3e0333">Status</a> openni::CoordinateConverter::convertDepthToColor </td>
<td>(</td>
<td class="paramtype">const <a class="el" href="classopenni_1_1_video_stream.html">VideoStream</a> & </td>
<td class="paramname"><em>depthStream</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">const <a class="el" href="classopenni_1_1_video_stream.html">VideoStream</a> & </td>
<td class="paramname"><em>colorStream</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">int </td>
<td class="paramname"><em>depthX</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">int </td>
<td class="paramname"><em>depthY</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype"><a class="el" href="namespaceopenni.html#ab2403242071ff06fc6fe18c0c6111d0f">DepthPixel</a> </td>
<td class="paramname"><em>depthZ</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">int * </td>
<td class="paramname"><em>pColorX</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">int * </td>
<td class="paramname"><em>pColorY</em> </td>
</tr>
<tr>
<td></td>
<td>)</td>
<td></td><td></td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">inline</span><span class="mlabel">static</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
<p>For a given depth point, provides the coordinates of the corresponding color value. Useful for superimposing the depth and color images. This operation is the same as turning on registration, but is performed on a single pixel rather than the whole image. </p>
<dl class="params"><dt>Parameters</dt><dd>
<table class="params">
<tr><td class="paramdir">[in]</td><td class="paramname">depthStream</td><td>Reference to a <a class="el" href="classopenni_1_1_video_stream.html">openni::VideoStream</a> that produced the depth value </td></tr>
<tr><td class="paramdir">[in]</td><td class="paramname">colorStream</td><td>Reference to a <a class="el" href="classopenni_1_1_video_stream.html">openni::VideoStream</a> that we want to find the appropriate color pixel in </td></tr>
<tr><td class="paramdir">[in]</td><td class="paramname">depthX</td><td>X value of the depth point, given in Depth coordinates and measured in pixels </td></tr>
<tr><td class="paramdir">[in]</td><td class="paramname">depthY</td><td>Y value of the depth point, given in Depth coordinates and measured in pixels </td></tr>
<tr><td class="paramdir">[in]</td><td class="paramname">depthZ</td><td>Z(depth) value of the depth point, given in the <a class="el" href="namespaceopenni.html#a6d1ecf2502394d600cb8d3709092d5a5">PixelFormat</a> of depthStream </td></tr>
<tr><td class="paramdir">[out]</td><td class="paramname">pColorX</td><td>The X coordinate of the color pixel that overlaps the given depth pixel, measured in pixels </td></tr>
<tr><td class="paramdir">[out]</td><td class="paramname">pColorY</td><td>The Y coordinate of the color pixel that overlaps the given depth pixel, measured in pixels </td></tr>
</table>
</dd>
</dl>
</div>
</div>
<a class="anchor" id="a3129cdc99f37c8084ddb27849702b1dc"></a>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">static <a class="el" href="namespaceopenni.html#abf03edf56fbb987feebce8e98b3e0333">Status</a> openni::CoordinateConverter::convertDepthToWorld </td>
<td>(</td>
<td class="paramtype">const <a class="el" href="classopenni_1_1_video_stream.html">VideoStream</a> & </td>
<td class="paramname"><em>depthStream</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">int </td>
<td class="paramname"><em>depthX</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">int </td>
<td class="paramname"><em>depthY</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype"><a class="el" href="namespaceopenni.html#ab2403242071ff06fc6fe18c0c6111d0f">DepthPixel</a> </td>
<td class="paramname"><em>depthZ</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">float * </td>
<td class="paramname"><em>pWorldX</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">float * </td>
<td class="paramname"><em>pWorldY</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">float * </td>
<td class="paramname"><em>pWorldZ</em> </td>
</tr>
<tr>
<td></td>
<td>)</td>
<td></td><td></td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">inline</span><span class="mlabel">static</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
<p>Converts a single point from the Depth coordinate system to the World coordinate system. </p>
<dl class="params"><dt>Parameters</dt><dd>
<table class="params">
<tr><td class="paramdir">[in]</td><td class="paramname">depthStream</td><td>Reference to an openi::VideoStream that will be used to determine the format of the Depth coordinates </td></tr>
<tr><td class="paramdir">[in]</td><td class="paramname">depthX</td><td>The X coordinate of the point to be converted, measured in pixels with 0 at the far left of the image </td></tr>
<tr><td class="paramdir">[in]</td><td class="paramname">depthY</td><td>The Y coordinate of the point to be converted, measured in pixels with 0 at the top of the image </td></tr>
<tr><td class="paramdir">[in]</td><td class="paramname">depthZ</td><td>the Z(depth) coordinate of the point to be converted, measured in the <a class="el" href="namespaceopenni.html#a6d1ecf2502394d600cb8d3709092d5a5">PixelFormat</a> of depthStream </td></tr>
<tr><td class="paramdir">[out]</td><td class="paramname">pWorldX</td><td>Pointer to a place to store the X coordinate of the output value, measured in millimeters in World coordinates </td></tr>
<tr><td class="paramdir">[out]</td><td class="paramname">pWorldY</td><td>Pointer to a place to store the Y coordinate of the output value, measured in millimeters in World coordinates </td></tr>
<tr><td class="paramdir">[out]</td><td class="paramname">pWorldZ</td><td>Pointer to a place to store the Z coordinate of the output value, measured in millimeters in World coordinates </td></tr>
</table>
</dd>
</dl>
</div>
</div>
<a class="anchor" id="a5d934a92602a069c703c0640cd776aeb"></a>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">static <a class="el" href="namespaceopenni.html#abf03edf56fbb987feebce8e98b3e0333">Status</a> openni::CoordinateConverter::convertDepthToWorld </td>
<td>(</td>
<td class="paramtype">const <a class="el" href="classopenni_1_1_video_stream.html">VideoStream</a> & </td>
<td class="paramname"><em>depthStream</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">float </td>
<td class="paramname"><em>depthX</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">float </td>
<td class="paramname"><em>depthY</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">float </td>
<td class="paramname"><em>depthZ</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">float * </td>
<td class="paramname"><em>pWorldX</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">float * </td>
<td class="paramname"><em>pWorldY</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">float * </td>
<td class="paramname"><em>pWorldZ</em> </td>
</tr>
<tr>
<td></td>
<td>)</td>
<td></td><td></td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">inline</span><span class="mlabel">static</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
<p>Converts a single point from a floating point representation of the Depth coordinate system to the World coordinate system. </p>
<dl class="params"><dt>Parameters</dt><dd>
<table class="params">
<tr><td class="paramdir">[in]</td><td class="paramname">depthStream</td><td>Reference to an openi::VideoStream that will be used to determine the format of the Depth coordinates </td></tr>
<tr><td class="paramdir">[in]</td><td class="paramname">depthX</td><td>The X coordinate of the point to be converted, measured in pixels with 0.0 at the far left of the image </td></tr>
<tr><td class="paramdir">[in]</td><td class="paramname">depthY</td><td>The Y coordinate of the point to be converted, measured in pixels with 0.0 at the top of the image </td></tr>
<tr><td class="paramdir">[in]</td><td class="paramname">depthZ</td><td>Z(depth) coordinate of the point to be converted, measured in the <a class="el" href="namespaceopenni.html#a6d1ecf2502394d600cb8d3709092d5a5">PixelFormat</a> of depthStream </td></tr>
<tr><td class="paramdir">[out]</td><td class="paramname">pWorldX</td><td>Pointer to a place to store the X coordinate of the output value, measured in millimeters in World coordinates </td></tr>
<tr><td class="paramdir">[out]</td><td class="paramname">pWorldY</td><td>Pointer to a place to store the Y coordinate of the output value, measured in millimeters in World coordinates </td></tr>
<tr><td class="paramdir">[out]</td><td class="paramname">pWorldZ</td><td>Pointer to a place to store the Z coordinate of the output value, measured in millimeters in World coordinates </td></tr>
</table>
</dd>
</dl>
</div>
</div>
<a class="anchor" id="a38b9a3f31f62ebd8664f4f8a1fd26704"></a>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">static <a class="el" href="namespaceopenni.html#abf03edf56fbb987feebce8e98b3e0333">Status</a> openni::CoordinateConverter::convertWorldToDepth </td>
<td>(</td>
<td class="paramtype">const <a class="el" href="classopenni_1_1_video_stream.html">VideoStream</a> & </td>
<td class="paramname"><em>depthStream</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">float </td>
<td class="paramname"><em>worldX</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">float </td>
<td class="paramname"><em>worldY</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">float </td>
<td class="paramname"><em>worldZ</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">int * </td>
<td class="paramname"><em>pDepthX</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">int * </td>
<td class="paramname"><em>pDepthY</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype"><a class="el" href="namespaceopenni.html#ab2403242071ff06fc6fe18c0c6111d0f">DepthPixel</a> * </td>
<td class="paramname"><em>pDepthZ</em> </td>
</tr>
<tr>
<td></td>
<td>)</td>
<td></td><td></td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">inline</span><span class="mlabel">static</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
<p>Converts a single point from the World coordinate system to the Depth coordinate system. </p>
<dl class="params"><dt>Parameters</dt><dd>
<table class="params">
<tr><td class="paramdir">[in]</td><td class="paramname">depthStream</td><td>Reference to an <a class="el" href="classopenni_1_1_video_stream.html">openni::VideoStream</a> that will be used to determine the format of the Depth coordinates </td></tr>
<tr><td class="paramdir">[in]</td><td class="paramname">worldX</td><td>The X coordinate of the point to be converted, measured in millimeters in World coordinates </td></tr>
<tr><td class="paramdir">[in]</td><td class="paramname">worldY</td><td>The Y coordinate of the point to be converted, measured in millimeters in World coordinates </td></tr>
<tr><td class="paramdir">[in]</td><td class="paramname">worldZ</td><td>The Z coordinate of the point to be converted, measured in millimeters in World coordinates </td></tr>
<tr><td class="paramdir">[out]</td><td class="paramname">pDepthX</td><td>Pointer to a place to store the X coordinate of the output value, measured in pixels with 0 at far left of image </td></tr>
<tr><td class="paramdir">[out]</td><td class="paramname">pDepthY</td><td>Pointer to a place to store the Y coordinate of the output value, measured in pixels with 0 at top of image </td></tr>
<tr><td class="paramdir">[out]</td><td class="paramname">pDepthZ</td><td>Pointer to a place to store the Z(depth) coordinate of the output value, measured in the <a class="el" href="namespaceopenni.html#a6d1ecf2502394d600cb8d3709092d5a5">PixelFormat</a> of depthStream </td></tr>
</table>
</dd>
</dl>
</div>
</div>
<a class="anchor" id="a8a68229388b168a836c3fe709f2ef953"></a>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">static <a class="el" href="namespaceopenni.html#abf03edf56fbb987feebce8e98b3e0333">Status</a> openni::CoordinateConverter::convertWorldToDepth </td>
<td>(</td>
<td class="paramtype">const <a class="el" href="classopenni_1_1_video_stream.html">VideoStream</a> & </td>
<td class="paramname"><em>depthStream</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">float </td>
<td class="paramname"><em>worldX</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">float </td>
<td class="paramname"><em>worldY</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">float </td>
<td class="paramname"><em>worldZ</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">float * </td>
<td class="paramname"><em>pDepthX</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">float * </td>
<td class="paramname"><em>pDepthY</em>, </td>
</tr>
<tr>
<td class="paramkey"></td>
<td></td>
<td class="paramtype">float * </td>
<td class="paramname"><em>pDepthZ</em> </td>
</tr>
<tr>
<td></td>
<td>)</td>
<td></td><td></td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">inline</span><span class="mlabel">static</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
<p>Converts a single point from the World coordinate system to a floating point representation of the Depth coordinate system </p>
<dl class="params"><dt>Parameters</dt><dd>
<table class="params">
<tr><td class="paramdir">[in]</td><td class="paramname">depthStream</td><td>Reference to an <a class="el" href="classopenni_1_1_video_stream.html">openni::VideoStream</a> that will be used to determine the format of the Depth coordinates </td></tr>
<tr><td class="paramdir">[in]</td><td class="paramname">worldX</td><td>The X coordinate of the point to be converted, measured in millimeters in World coordinates </td></tr>
<tr><td class="paramdir">[in]</td><td class="paramname">worldY</td><td>The Y coordinate of the point to be converted, measured in millimeters in World coordinates </td></tr>
<tr><td class="paramdir">[in]</td><td class="paramname">worldZ</td><td>The Z coordinate of the point to be converted, measured in millimeters in World coordinates </td></tr>
<tr><td class="paramdir">[out]</td><td class="paramname">pDepthX</td><td>Pointer to a place to store the X coordinate of the output value, measured in pixels with 0.0 at far left of the image </td></tr>
<tr><td class="paramdir">[out]</td><td class="paramname">pDepthY</td><td>Pointer to a place to store the Y coordinate of the output value, measured in pixels with 0.0 at the top of the image </td></tr>
<tr><td class="paramdir">[out]</td><td class="paramname">pDepthZ</td><td>Pointer to a place to store the Z(depth) coordinate of the output value, measured in millimeters with 0.0 at the camera lens </td></tr>
</table>
</dd>
</dl>
</div>
</div>
<hr/>The documentation for this class was generated from the following file:<ul>
<li><a class="el" href="_open_n_i_8h_source.html">OpenNI.h</a></li>
</ul>
</div><!-- contents -->
<!-- start footer part -->
<hr class="footer"/><address class="footer"><small>
Generated on Fri Nov 25 2016 14:34:35 for OpenNI 2.0 by  <a href="http://www.doxygen.org/index.html">
<img class="footer" src="doxygen.png" alt="doxygen"/>
</a> 1.8.6
</small></address>
</body>
</html>
| {
"pile_set_name": "Github"
} |
if (!document.getElementById('mathjaxscript_pelican_#%@#$@#')) {{
var align = "{align}",
indent = "{indent}",
linebreak = "{linebreak_automatic}";
if ({responsive}) {{
align = (screen.width < {responsive_break}) ? "left" : align;
indent = (screen.width < {responsive_break}) ? "0em" : indent;
linebreak = (screen.width < {responsive_break}) ? 'true' : linebreak;
}}
var mathjaxscript = document.createElement('script');
mathjaxscript.id = 'mathjaxscript_pelican_#%@#$@#';
mathjaxscript.type = 'text/javascript';
mathjaxscript.src = {source};
var configscript = document.createElement('script');
configscript.type = 'text/x-mathjax-config';
configscript[(window.opera ? "innerHTML" : "text")] =
"MathJax.Hub.Config({{" +
" config: ['MMLorHTML.js']," +
" TeX: {{ extensions: ['AMSmath.js','AMSsymbols.js','noErrors.js','noUndefined.js'{tex_extensions}], equationNumbers: {{ autoNumber: '{equation_numbering}' }} }}," +
" jax: ['input/TeX','input/MathML','output/HTML-CSS']," +
" extensions: ['tex2jax.js','mml2jax.js','MathMenu.js','MathZoom.js']," +
" displayAlign: '"+ align +"'," +
" displayIndent: '"+ indent +"'," +
" showMathMenu: {show_menu}," +
" messageStyle: '{message_style}'," +
" tex2jax: {{ " +
" inlineMath: [ ['\\\\(','\\\\)'] ], " +
" displayMath: [ ['$$','$$'] ]," +
" processEscapes: {process_escapes}," +
" preview: '{latex_preview}'," +
" }}, " +
" 'HTML-CSS': {{ " +
" availableFonts: {font_list}," +
" preferredFont: 'STIX'," +
" styles: {{ '.MathJax_Display, .MathJax .mo, .MathJax .mi, .MathJax .mn': {{color: '{color} ! important'}} }}," +
" linebreaks: {{ automatic: "+ linebreak +", width: '90% container' }}," +
" }}, " +
"}}); " +
"if ('{mathjax_font}' !== 'default') {{" +
"MathJax.Hub.Register.StartupHook('HTML-CSS Jax Ready',function () {{" +
"var VARIANT = MathJax.OutputJax['HTML-CSS'].FONTDATA.VARIANT;" +
"VARIANT['normal'].fonts.unshift('MathJax_{mathjax_font}');" +
"VARIANT['bold'].fonts.unshift('MathJax_{mathjax_font}-bold');" +
"VARIANT['italic'].fonts.unshift('MathJax_{mathjax_font}-italic');" +
"VARIANT['-tex-mathit'].fonts.unshift('MathJax_{mathjax_font}-italic');" +
"}});" +
"MathJax.Hub.Register.StartupHook('SVG Jax Ready',function () {{" +
"var VARIANT = MathJax.OutputJax.SVG.FONTDATA.VARIANT;" +
"VARIANT['normal'].fonts.unshift('MathJax_{mathjax_font}');" +
"VARIANT['bold'].fonts.unshift('MathJax_{mathjax_font}-bold');" +
"VARIANT['italic'].fonts.unshift('MathJax_{mathjax_font}-italic');" +
"VARIANT['-tex-mathit'].fonts.unshift('MathJax_{mathjax_font}-italic');" +
"}});" +
"}}";
(document.body || document.getElementsByTagName('head')[0]).appendChild(configscript);
(document.body || document.getElementsByTagName('head')[0]).appendChild(mathjaxscript);
}}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>BuildMachineOSBuild</key>
<string>17E199</string>
<key>CFBundleDevelopmentRegion</key>
<string>English</string>
<key>CFBundleExecutable</key>
<string>FakeSMC</string>
<key>CFBundleIdentifier</key>
<string>org.netkas.driver.FakeSMC</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>FakeSMC</string>
<key>CFBundlePackageType</key>
<string>KEXT</string>
<key>CFBundleShortVersionString</key>
<string>6.26-344-g1cf53906.1787</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleSupportedPlatforms</key>
<array>
<string>MacOSX</string>
</array>
<key>CFBundleVersion</key>
<string>1787</string>
<key>DTCompiler</key>
<string>com.apple.compilers.llvm.clang.1_0</string>
<key>DTPlatformBuild</key>
<string>8E3004b</string>
<key>DTPlatformVersion</key>
<string>GM</string>
<key>DTSDKBuild</key>
<string>12D75</string>
<key>DTSDKName</key>
<string>macosx10.8</string>
<key>DTXcode</key>
<string>0833</string>
<key>DTXcodeBuild</key>
<string>8E3004b</string>
<key>IOKitPersonalities</key>
<dict>
<key>FakeSMC</key>
<dict>
<key>CFBundleIdentifier</key>
<string>org.netkas.driver.FakeSMC</string>
<key>Configuration</key>
<dict>
<key>Clover</key>
<dict>
<key>BEMB</key>
<array>
<string>BEMB</string>
<string>flag</string>
</array>
<key>EPCI</key>
<array>
<string>EPCI</string>
<string>ui32</string>
</array>
<key>RBr</key>
<array>
<string>RBr</string>
<string>ch8*</string>
</array>
<key>REV</key>
<array>
<string>REV</string>
<string>{rev</string>
</array>
<key>RPlt</key>
<array>
<string>RPlt</string>
<string>ch8*</string>
</array>
</dict>
<key>ExceptionKeys</key>
<dict>
<key>CLKH</key>
<integer>1</integer>
<key>CLKT</key>
<integer>1</integer>
<key>MSDW</key>
<integer>1</integer>
<key>NATJ</key>
<integer>1</integer>
<key>NATi</key>
<integer>1</integer>
<key>NTOK</key>
<integer>1</integer>
<key>WKTP</key>
<integer>0</integer>
</dict>
<key>Keys</key>
<dict>
<key>$Adr</key>
<array>
<string>ui32</string>
<data>
AAADAA==
</data>
</array>
<key>$Num</key>
<array>
<string>ui8</string>
<data>
AQ==
</data>
</array>
<key>ACID</key>
<array>
<string>ch8*</string>
<data>
hfwETdgSevQ=
</data>
</array>
<key>ALI0</key>
<array>
<string>{ali</string>
<data>
BwECAA==
</data>
</array>
<key>ALRV</key>
<array>
<string>ui16</string>
<data>
AAE=
</data>
</array>
<key>ALV0</key>
<array>
<string>{alv</string>
<data>
ARAGdQEgALYO4A==
</data>
</array>
<key>FNum</key>
<array>
<string>ui8</string>
<data>
AA==
</data>
</array>
<key>LsNM</key>
<array>
<string>ui8</string>
<data>
AQ==
</data>
</array>
<key>LsbV</key>
<array>
<string>{rev</string>
<data>
AQQKAAY=
</data>
</array>
<key>MSLD</key>
<array>
<string>ui8 </string>
<data>
AA==
</data>
</array>
<key>MSSD</key>
<array>
<string>si8</string>
<data>
BQ==
</data>
</array>
<key>MSSP</key>
<array>
<string>si8</string>
<data>
BQ==
</data>
</array>
<key>NATJ</key>
<array>
<string>ui8</string>
<data>
AA==
</data>
</array>
<key>OSK0</key>
<array>
<string>ch8*</string>
<data>
b3VyaGFyZHdvcmtieXRoZXNld29y
ZHNndWFyZGVkcGw=
</data>
</array>
<key>OSK1</key>
<array>
<string>ch8*</string>
<data>
ZWFzZWRvbnRzdGVhbChjKUFwcGxl
Q29tcHV0ZXJJbmM=
</data>
</array>
<key>REV </key>
<array>
<string>{rev</string>
<data>
ATAPAAAD
</data>
</array>
<key>RMde</key>
<array>
<string>char</string>
<data>
QQ==
</data>
</array>
<key>RVBF</key>
<array>
<string>{rev</string>
<data>
ATAPAAAD
</data>
</array>
<key>RVUF</key>
<array>
<string>{rev</string>
<data>
ATAPAAAD
</data>
</array>
</dict>
<key>Types</key>
<dict>
<key>BEMB</key>
<string>flag</string>
<key>CLKC</key>
<string>{clc</string>
<key>CLKH</key>
<string>{clh</string>
<key>CLKT</key>
<string>ui32</string>
<key>CLWK</key>
<string>ui32</string>
<key>EPCI</key>
<string>flag</string>
<key>LSSS</key>
<string>{lso</string>
<key>MSDS</key>
<string>ui8</string>
<key>MSDW</key>
<string>flag</string>
<key>MSPS</key>
<string>{msp</string>
<key>RPlt</key>
<string>ch8*</string>
</dict>
<key>debug</key>
<false/>
<key>smc-compatible</key>
<string>smc-napa</string>
<key>trace</key>
<false/>
</dict>
<key>IOClass</key>
<string>FakeSMC</string>
<key>IOMatchCategory</key>
<string>FakeSMC</string>
<key>IOProviderClass</key>
<string>AppleACPIPlatformExpert</string>
<key>IOResourceMatch</key>
<string>FakeSMCKeyStore</string>
<key>RM,Build</key>
<string>Release-rehabman</string>
<key>RM,Version</key>
<string>FakeSMC 1787</string>
</dict>
<key>FakeSMCKeyStore</key>
<dict>
<key>CFBundleIdentifier</key>
<string>org.netkas.driver.FakeSMC</string>
<key>IOClass</key>
<string>FakeSMCKeyStore</string>
<key>IOMatchCategory</key>
<string>FakeSMCKeyStore</string>
<key>IOProviderClass</key>
<string>IOResources</string>
<key>IOResourceMatch</key>
<string>IOKit</string>
<key>IOUserClientClass</key>
<string>FakeSMCKeyStoreUserClient</string>
</dict>
</dict>
<key>NSHumanReadableCopyright</key>
<string>Copyright © 2017 netkas. All rights reserved.</string>
<key>OSBundleCompatibleVersion</key>
<string>1429</string>
<key>OSBundleLibraries</key>
<dict>
<key>com.apple.iokit.IOACPIFamily</key>
<string>1.0.0d1</string>
<key>com.apple.kpi.bsd</key>
<string>10.6</string>
<key>com.apple.kpi.iokit</key>
<string>10.6</string>
<key>com.apple.kpi.libkern</key>
<string>10.6</string>
<key>com.apple.kpi.mach</key>
<string>10.6</string>
<key>com.apple.kpi.unsupported</key>
<string>10.6</string>
</dict>
<key>OSBundleRequired</key>
<string>Root</string>
<key>Source Code</key>
<string>"https://github.com/RehabMan/OS-X-FakeSMC-kozlek.git"</string>
</dict>
</plist>
| {
"pile_set_name": "Github"
} |
<hkobject name="#zcbe$34" class="BSDirectAtModifier" signature="0x19a005c0">
<hkparam name="variableBindingSet">#zcbe$35</hkparam>
<hkparam name="userData">0</hkparam>
<hkparam name="name">Z_AimAttackNPCDAM</hkparam>
<hkparam name="enable">true</hkparam>
<hkparam name="directAtTarget">true</hkparam>
<hkparam name="sourceBoneIndex">-1</hkparam>
<hkparam name="startBoneIndex">25</hkparam>
<hkparam name="endBoneIndex">26</hkparam>
<hkparam name="limitHeadingDegrees">45.000000</hkparam>
<hkparam name="limitPitchDegrees">45.000000</hkparam>
<hkparam name="offsetHeadingDegrees">0.000000</hkparam>
<hkparam name="offsetPitchDegrees">0.000000</hkparam>
<hkparam name="onGain">0.550000</hkparam>
<hkparam name="offGain">0.550000</hkparam>
<hkparam name="targetLocation">
0.000000
0.000000
0.000000
0.000000
</hkparam>
<hkparam name="userInfo">2</hkparam>
<hkparam name="directAtCamera">false</hkparam>
<hkparam name="directAtCameraX">0.000000</hkparam>
<hkparam name="directAtCameraY">0.000000</hkparam>
<hkparam name="directAtCameraZ">0.000000</hkparam>
<hkparam name="active">false</hkparam>
<hkparam name="currentHeadingOffset">0.000000</hkparam>
<hkparam name="currentPitchOffset">0.000000</hkparam>
</hkobject>
| {
"pile_set_name": "Github"
} |
prefix=/usr
exec_prefix=${prefix}
libdir=${exec_prefix}/lib
includedir=${prefix}/include
Name: OpenSSL-libcrypto
Description: OpenSSL cryptography library
Version: 0.9.7l
Requires:
Libs: -L${libdir} -lcrypto -lz
Cflags: -I${includedir}
| {
"pile_set_name": "Github"
} |
@echo off
call python.bat translator.py --root-dir ..
pause | {
"pile_set_name": "Github"
} |
// SPDX-License-Identifier: GPL-2.0
#if defined(__i386__) || defined(__x86_64__)
#include <unistd.h>
#include <errno.h>
#include <stdio.h>
#include <stdint.h>
#include <pci/pci.h>
#include "helpers/helpers.h"
#define MSR_AMD_PSTATE_STATUS 0xc0010063
#define MSR_AMD_PSTATE 0xc0010064
#define MSR_AMD_PSTATE_LIMIT 0xc0010061
union msr_pstate {
struct {
unsigned fid:6;
unsigned did:3;
unsigned vid:7;
unsigned res1:6;
unsigned nbdid:1;
unsigned res2:2;
unsigned nbvid:7;
unsigned iddval:8;
unsigned idddiv:2;
unsigned res3:21;
unsigned en:1;
} bits;
struct {
unsigned fid:8;
unsigned did:6;
unsigned vid:8;
unsigned iddval:8;
unsigned idddiv:2;
unsigned res1:31;
unsigned en:1;
} fam17h_bits;
unsigned long long val;
};
static int get_did(int family, union msr_pstate pstate)
{
int t;
if (family == 0x12)
t = pstate.val & 0xf;
else if (family == 0x17 || family == 0x18)
t = pstate.fam17h_bits.did;
else
t = pstate.bits.did;
return t;
}
static int get_cof(int family, union msr_pstate pstate)
{
int t;
int fid, did, cof;
did = get_did(family, pstate);
if (family == 0x17 || family == 0x18) {
fid = pstate.fam17h_bits.fid;
cof = 200 * fid / did;
} else {
t = 0x10;
fid = pstate.bits.fid;
if (family == 0x11)
t = 0x8;
cof = (100 * (fid + t)) >> did;
}
return cof;
}
/* Needs:
* cpu -> the cpu that gets evaluated
* cpu_family -> The cpu's family (0x10, 0x12,...)
* boots_states -> how much boost states the machines support
*
* Fills up:
* pstates -> a pointer to an array of size MAX_HW_PSTATES
* must be initialized with zeros.
* All available HW pstates (including boost states)
* no -> amount of pstates above array got filled up with
*
* returns zero on success, -1 on failure
*/
int decode_pstates(unsigned int cpu, unsigned int cpu_family,
int boost_states, unsigned long *pstates, int *no)
{
int i, psmax, pscur;
union msr_pstate pstate;
unsigned long long val;
/* Only read out frequencies from HW when CPU might be boostable
to keep the code as short and clean as possible.
Otherwise frequencies are exported via ACPI tables.
*/
if (cpu_family < 0x10 || cpu_family == 0x14)
return -1;
if (read_msr(cpu, MSR_AMD_PSTATE_LIMIT, &val))
return -1;
psmax = (val >> 4) & 0x7;
if (read_msr(cpu, MSR_AMD_PSTATE_STATUS, &val))
return -1;
pscur = val & 0x7;
pscur += boost_states;
psmax += boost_states;
for (i = 0; i <= psmax; i++) {
if (i >= MAX_HW_PSTATES) {
fprintf(stderr, "HW pstates [%d] exceeding max [%d]\n",
psmax, MAX_HW_PSTATES);
return -1;
}
if (read_msr(cpu, MSR_AMD_PSTATE + i, &pstate.val))
return -1;
if ((cpu_family == 0x17) && (!pstate.fam17h_bits.en))
continue;
else if (!pstate.bits.en)
continue;
pstates[i] = get_cof(cpu_family, pstate);
}
*no = i;
return 0;
}
int amd_pci_get_num_boost_states(int *active, int *states)
{
struct pci_access *pci_acc;
struct pci_dev *device;
uint8_t val = 0;
*active = *states = 0;
device = pci_slot_func_init(&pci_acc, 0x18, 4);
if (device == NULL)
return -ENODEV;
val = pci_read_byte(device, 0x15c);
if (val & 3)
*active = 1;
else
*active = 0;
*states = (val >> 2) & 7;
pci_cleanup(pci_acc);
return 0;
}
#endif /* defined(__i386__) || defined(__x86_64__) */
| {
"pile_set_name": "Github"
} |
// SPDX-License-Identifier: GPL-2.0-only
/*
* Copyright (C) Sistina Software, Inc. 1997-2003 All rights reserved.
* Copyright (C) 2004-2006 Red Hat, Inc. All rights reserved.
*/
#include <linux/sched.h>
#include <linux/slab.h>
#include <linux/spinlock.h>
#include <linux/completion.h>
#include <linux/buffer_head.h>
#include <linux/mempool.h>
#include <linux/gfs2_ondisk.h>
#include <linux/bio.h>
#include <linux/fs.h>
#include <linux/list_sort.h>
#include <linux/blkdev.h>
#include "bmap.h"
#include "dir.h"
#include "gfs2.h"
#include "incore.h"
#include "inode.h"
#include "glock.h"
#include "log.h"
#include "lops.h"
#include "meta_io.h"
#include "recovery.h"
#include "rgrp.h"
#include "trans.h"
#include "util.h"
#include "trace_gfs2.h"
/**
* gfs2_pin - Pin a buffer in memory
* @sdp: The superblock
* @bh: The buffer to be pinned
*
* The log lock must be held when calling this function
*/
void gfs2_pin(struct gfs2_sbd *sdp, struct buffer_head *bh)
{
struct gfs2_bufdata *bd;
BUG_ON(!current->journal_info);
clear_buffer_dirty(bh);
if (test_set_buffer_pinned(bh))
gfs2_assert_withdraw(sdp, 0);
if (!buffer_uptodate(bh))
gfs2_io_error_bh_wd(sdp, bh);
bd = bh->b_private;
/* If this buffer is in the AIL and it has already been written
* to in-place disk block, remove it from the AIL.
*/
spin_lock(&sdp->sd_ail_lock);
if (bd->bd_tr)
list_move(&bd->bd_ail_st_list, &bd->bd_tr->tr_ail2_list);
spin_unlock(&sdp->sd_ail_lock);
get_bh(bh);
atomic_inc(&sdp->sd_log_pinned);
trace_gfs2_pin(bd, 1);
}
static bool buffer_is_rgrp(const struct gfs2_bufdata *bd)
{
return bd->bd_gl->gl_name.ln_type == LM_TYPE_RGRP;
}
static void maybe_release_space(struct gfs2_bufdata *bd)
{
struct gfs2_glock *gl = bd->bd_gl;
struct gfs2_sbd *sdp = gl->gl_name.ln_sbd;
struct gfs2_rgrpd *rgd = gfs2_glock2rgrp(gl);
unsigned int index = bd->bd_bh->b_blocknr - gl->gl_name.ln_number;
struct gfs2_bitmap *bi = rgd->rd_bits + index;
if (bi->bi_clone == NULL)
return;
if (sdp->sd_args.ar_discard)
gfs2_rgrp_send_discards(sdp, rgd->rd_data0, bd->bd_bh, bi, 1, NULL);
memcpy(bi->bi_clone + bi->bi_offset,
bd->bd_bh->b_data + bi->bi_offset, bi->bi_bytes);
clear_bit(GBF_FULL, &bi->bi_flags);
rgd->rd_free_clone = rgd->rd_free;
rgd->rd_extfail_pt = rgd->rd_free;
}
/**
* gfs2_unpin - Unpin a buffer
* @sdp: the filesystem the buffer belongs to
* @bh: The buffer to unpin
* @ai:
* @flags: The inode dirty flags
*
*/
static void gfs2_unpin(struct gfs2_sbd *sdp, struct buffer_head *bh,
struct gfs2_trans *tr)
{
struct gfs2_bufdata *bd = bh->b_private;
BUG_ON(!buffer_uptodate(bh));
BUG_ON(!buffer_pinned(bh));
lock_buffer(bh);
mark_buffer_dirty(bh);
clear_buffer_pinned(bh);
if (buffer_is_rgrp(bd))
maybe_release_space(bd);
spin_lock(&sdp->sd_ail_lock);
if (bd->bd_tr) {
list_del(&bd->bd_ail_st_list);
brelse(bh);
} else {
struct gfs2_glock *gl = bd->bd_gl;
list_add(&bd->bd_ail_gl_list, &gl->gl_ail_list);
atomic_inc(&gl->gl_ail_count);
}
bd->bd_tr = tr;
list_add(&bd->bd_ail_st_list, &tr->tr_ail1_list);
spin_unlock(&sdp->sd_ail_lock);
clear_bit(GLF_LFLUSH, &bd->bd_gl->gl_flags);
trace_gfs2_pin(bd, 0);
unlock_buffer(bh);
atomic_dec(&sdp->sd_log_pinned);
}
void gfs2_log_incr_head(struct gfs2_sbd *sdp)
{
BUG_ON((sdp->sd_log_flush_head == sdp->sd_log_tail) &&
(sdp->sd_log_flush_head != sdp->sd_log_head));
if (++sdp->sd_log_flush_head == sdp->sd_jdesc->jd_blocks)
sdp->sd_log_flush_head = 0;
}
u64 gfs2_log_bmap(struct gfs2_jdesc *jd, unsigned int lblock)
{
struct gfs2_journal_extent *je;
list_for_each_entry(je, &jd->extent_list, list) {
if (lblock >= je->lblock && lblock < je->lblock + je->blocks)
return je->dblock + lblock - je->lblock;
}
return -1;
}
/**
* gfs2_end_log_write_bh - end log write of pagecache data with buffers
* @sdp: The superblock
* @bvec: The bio_vec
* @error: The i/o status
*
* This finds the relevant buffers and unlocks them and sets the
* error flag according to the status of the i/o request. This is
* used when the log is writing data which has an in-place version
* that is pinned in the pagecache.
*/
static void gfs2_end_log_write_bh(struct gfs2_sbd *sdp,
struct bio_vec *bvec,
blk_status_t error)
{
struct buffer_head *bh, *next;
struct page *page = bvec->bv_page;
unsigned size;
bh = page_buffers(page);
size = bvec->bv_len;
while (bh_offset(bh) < bvec->bv_offset)
bh = bh->b_this_page;
do {
if (error)
mark_buffer_write_io_error(bh);
unlock_buffer(bh);
next = bh->b_this_page;
size -= bh->b_size;
brelse(bh);
bh = next;
} while(bh && size);
}
/**
* gfs2_end_log_write - end of i/o to the log
* @bio: The bio
*
* Each bio_vec contains either data from the pagecache or data
* relating to the log itself. Here we iterate over the bio_vec
* array, processing both kinds of data.
*
*/
static void gfs2_end_log_write(struct bio *bio)
{
struct gfs2_sbd *sdp = bio->bi_private;
struct bio_vec *bvec;
struct page *page;
struct bvec_iter_all iter_all;
if (bio->bi_status) {
if (!cmpxchg(&sdp->sd_log_error, 0, (int)bio->bi_status))
fs_err(sdp, "Error %d writing to journal, jid=%u\n",
bio->bi_status, sdp->sd_jdesc->jd_jid);
gfs2_withdraw_delayed(sdp);
/* prevent more writes to the journal */
clear_bit(SDF_JOURNAL_LIVE, &sdp->sd_flags);
wake_up(&sdp->sd_logd_waitq);
}
bio_for_each_segment_all(bvec, bio, iter_all) {
page = bvec->bv_page;
if (page_has_buffers(page))
gfs2_end_log_write_bh(sdp, bvec, bio->bi_status);
else
mempool_free(page, gfs2_page_pool);
}
bio_put(bio);
if (atomic_dec_and_test(&sdp->sd_log_in_flight))
wake_up(&sdp->sd_log_flush_wait);
}
/**
* gfs2_log_submit_bio - Submit any pending log bio
* @biop: Address of the bio pointer
* @opf: REQ_OP | op_flags
*
* Submit any pending part-built or full bio to the block device. If
* there is no pending bio, then this is a no-op.
*/
void gfs2_log_submit_bio(struct bio **biop, int opf)
{
struct bio *bio = *biop;
if (bio) {
struct gfs2_sbd *sdp = bio->bi_private;
atomic_inc(&sdp->sd_log_in_flight);
bio->bi_opf = opf;
submit_bio(bio);
*biop = NULL;
}
}
/**
* gfs2_log_alloc_bio - Allocate a bio
* @sdp: The super block
* @blkno: The device block number we want to write to
* @end_io: The bi_end_io callback
*
* Allocate a new bio, initialize it with the given parameters and return it.
*
* Returns: The newly allocated bio
*/
static struct bio *gfs2_log_alloc_bio(struct gfs2_sbd *sdp, u64 blkno,
bio_end_io_t *end_io)
{
struct super_block *sb = sdp->sd_vfs;
struct bio *bio = bio_alloc(GFP_NOIO, BIO_MAX_PAGES);
bio->bi_iter.bi_sector = blkno << sdp->sd_fsb2bb_shift;
bio_set_dev(bio, sb->s_bdev);
bio->bi_end_io = end_io;
bio->bi_private = sdp;
return bio;
}
/**
* gfs2_log_get_bio - Get cached log bio, or allocate a new one
* @sdp: The super block
* @blkno: The device block number we want to write to
* @bio: The bio to get or allocate
* @op: REQ_OP
* @end_io: The bi_end_io callback
* @flush: Always flush the current bio and allocate a new one?
*
* If there is a cached bio, then if the next block number is sequential
* with the previous one, return it, otherwise flush the bio to the
* device. If there is no cached bio, or we just flushed it, then
* allocate a new one.
*
* Returns: The bio to use for log writes
*/
static struct bio *gfs2_log_get_bio(struct gfs2_sbd *sdp, u64 blkno,
struct bio **biop, int op,
bio_end_io_t *end_io, bool flush)
{
struct bio *bio = *biop;
if (bio) {
u64 nblk;
nblk = bio_end_sector(bio);
nblk >>= sdp->sd_fsb2bb_shift;
if (blkno == nblk && !flush)
return bio;
gfs2_log_submit_bio(biop, op);
}
*biop = gfs2_log_alloc_bio(sdp, blkno, end_io);
return *biop;
}
/**
* gfs2_log_write - write to log
* @sdp: the filesystem
* @page: the page to write
* @size: the size of the data to write
* @offset: the offset within the page
* @blkno: block number of the log entry
*
* Try and add the page segment to the current bio. If that fails,
* submit the current bio to the device and create a new one, and
* then add the page segment to that.
*/
void gfs2_log_write(struct gfs2_sbd *sdp, struct page *page,
unsigned size, unsigned offset, u64 blkno)
{
struct bio *bio;
int ret;
bio = gfs2_log_get_bio(sdp, blkno, &sdp->sd_log_bio, REQ_OP_WRITE,
gfs2_end_log_write, false);
ret = bio_add_page(bio, page, size, offset);
if (ret == 0) {
bio = gfs2_log_get_bio(sdp, blkno, &sdp->sd_log_bio,
REQ_OP_WRITE, gfs2_end_log_write, true);
ret = bio_add_page(bio, page, size, offset);
WARN_ON(ret == 0);
}
}
/**
* gfs2_log_write_bh - write a buffer's content to the log
* @sdp: The super block
* @bh: The buffer pointing to the in-place location
*
* This writes the content of the buffer to the next available location
* in the log. The buffer will be unlocked once the i/o to the log has
* completed.
*/
static void gfs2_log_write_bh(struct gfs2_sbd *sdp, struct buffer_head *bh)
{
u64 dblock;
dblock = gfs2_log_bmap(sdp->sd_jdesc, sdp->sd_log_flush_head);
gfs2_log_incr_head(sdp);
gfs2_log_write(sdp, bh->b_page, bh->b_size, bh_offset(bh), dblock);
}
/**
* gfs2_log_write_page - write one block stored in a page, into the log
* @sdp: The superblock
* @page: The struct page
*
* This writes the first block-sized part of the page into the log. Note
* that the page must have been allocated from the gfs2_page_pool mempool
* and that after this has been called, ownership has been transferred and
* the page may be freed at any time.
*/
void gfs2_log_write_page(struct gfs2_sbd *sdp, struct page *page)
{
struct super_block *sb = sdp->sd_vfs;
u64 dblock;
dblock = gfs2_log_bmap(sdp->sd_jdesc, sdp->sd_log_flush_head);
gfs2_log_incr_head(sdp);
gfs2_log_write(sdp, page, sb->s_blocksize, 0, dblock);
}
/**
* gfs2_end_log_read - end I/O callback for reads from the log
* @bio: The bio
*
* Simply unlock the pages in the bio. The main thread will wait on them and
* process them in order as necessary.
*/
static void gfs2_end_log_read(struct bio *bio)
{
struct page *page;
struct bio_vec *bvec;
struct bvec_iter_all iter_all;
bio_for_each_segment_all(bvec, bio, iter_all) {
page = bvec->bv_page;
if (bio->bi_status) {
int err = blk_status_to_errno(bio->bi_status);
SetPageError(page);
mapping_set_error(page->mapping, err);
}
unlock_page(page);
}
bio_put(bio);
}
/**
* gfs2_jhead_pg_srch - Look for the journal head in a given page.
* @jd: The journal descriptor
* @page: The page to look in
*
* Returns: 1 if found, 0 otherwise.
*/
static bool gfs2_jhead_pg_srch(struct gfs2_jdesc *jd,
struct gfs2_log_header_host *head,
struct page *page)
{
struct gfs2_sbd *sdp = GFS2_SB(jd->jd_inode);
struct gfs2_log_header_host uninitialized_var(lh);
void *kaddr = kmap_atomic(page);
unsigned int offset;
bool ret = false;
for (offset = 0; offset < PAGE_SIZE; offset += sdp->sd_sb.sb_bsize) {
if (!__get_log_header(sdp, kaddr + offset, 0, &lh)) {
if (lh.lh_sequence >= head->lh_sequence)
*head = lh;
else {
ret = true;
break;
}
}
}
kunmap_atomic(kaddr);
return ret;
}
/**
* gfs2_jhead_process_page - Search/cleanup a page
* @jd: The journal descriptor
* @index: Index of the page to look into
* @done: If set, perform only cleanup, else search and set if found.
*
* Find the page with 'index' in the journal's mapping. Search the page for
* the journal head if requested (cleanup == false). Release refs on the
* page so the page cache can reclaim it (put_page() twice). We grabbed a
* reference on this page two times, first when we did a find_or_create_page()
* to obtain the page to add it to the bio and second when we do a
* find_get_page() here to get the page to wait on while I/O on it is being
* completed.
* This function is also used to free up a page we might've grabbed but not
* used. Maybe we added it to a bio, but not submitted it for I/O. Or we
* submitted the I/O, but we already found the jhead so we only need to drop
* our references to the page.
*/
static void gfs2_jhead_process_page(struct gfs2_jdesc *jd, unsigned long index,
struct gfs2_log_header_host *head,
bool *done)
{
struct page *page;
page = find_get_page(jd->jd_inode->i_mapping, index);
wait_on_page_locked(page);
if (PageError(page))
*done = true;
if (!*done)
*done = gfs2_jhead_pg_srch(jd, head, page);
put_page(page); /* Once for find_get_page */
put_page(page); /* Once more for find_or_create_page */
}
static struct bio *gfs2_chain_bio(struct bio *prev, unsigned int nr_iovecs)
{
struct bio *new;
new = bio_alloc(GFP_NOIO, nr_iovecs);
bio_copy_dev(new, prev);
new->bi_iter.bi_sector = bio_end_sector(prev);
new->bi_opf = prev->bi_opf;
new->bi_write_hint = prev->bi_write_hint;
bio_chain(new, prev);
submit_bio(prev);
return new;
}
/**
* gfs2_find_jhead - find the head of a log
* @jd: The journal descriptor
* @head: The log descriptor for the head of the log is returned here
*
* Do a search of a journal by reading it in large chunks using bios and find
* the valid log entry with the highest sequence number. (i.e. the log head)
*
* Returns: 0 on success, errno otherwise
*/
int gfs2_find_jhead(struct gfs2_jdesc *jd, struct gfs2_log_header_host *head,
bool keep_cache)
{
struct gfs2_sbd *sdp = GFS2_SB(jd->jd_inode);
struct address_space *mapping = jd->jd_inode->i_mapping;
unsigned int block = 0, blocks_submitted = 0, blocks_read = 0;
unsigned int bsize = sdp->sd_sb.sb_bsize, off;
unsigned int bsize_shift = sdp->sd_sb.sb_bsize_shift;
unsigned int shift = PAGE_SHIFT - bsize_shift;
unsigned int max_blocks = 2 * 1024 * 1024 >> bsize_shift;
struct gfs2_journal_extent *je;
int sz, ret = 0;
struct bio *bio = NULL;
struct page *page = NULL;
bool done = false;
errseq_t since;
memset(head, 0, sizeof(*head));
if (list_empty(&jd->extent_list))
gfs2_map_journal_extents(sdp, jd);
since = filemap_sample_wb_err(mapping);
list_for_each_entry(je, &jd->extent_list, list) {
u64 dblock = je->dblock;
for (; block < je->lblock + je->blocks; block++, dblock++) {
if (!page) {
page = find_or_create_page(mapping,
block >> shift, GFP_NOFS);
if (!page) {
ret = -ENOMEM;
done = true;
goto out;
}
off = 0;
}
if (bio && (off || block < blocks_submitted + max_blocks)) {
sector_t sector = dblock << sdp->sd_fsb2bb_shift;
if (bio_end_sector(bio) == sector) {
sz = bio_add_page(bio, page, bsize, off);
if (sz == bsize)
goto block_added;
}
if (off) {
unsigned int blocks =
(PAGE_SIZE - off) >> bsize_shift;
bio = gfs2_chain_bio(bio, blocks);
goto add_block_to_new_bio;
}
}
if (bio) {
blocks_submitted = block;
submit_bio(bio);
}
bio = gfs2_log_alloc_bio(sdp, dblock, gfs2_end_log_read);
bio->bi_opf = REQ_OP_READ;
add_block_to_new_bio:
sz = bio_add_page(bio, page, bsize, off);
BUG_ON(sz != bsize);
block_added:
off += bsize;
if (off == PAGE_SIZE)
page = NULL;
if (blocks_submitted <= blocks_read + max_blocks) {
/* Keep at least one bio in flight */
continue;
}
gfs2_jhead_process_page(jd, blocks_read >> shift, head, &done);
blocks_read += PAGE_SIZE >> bsize_shift;
if (done)
goto out; /* found */
}
}
out:
if (bio)
submit_bio(bio);
while (blocks_read < block) {
gfs2_jhead_process_page(jd, blocks_read >> shift, head, &done);
blocks_read += PAGE_SIZE >> bsize_shift;
}
if (!ret)
ret = filemap_check_wb_err(mapping, since);
if (!keep_cache)
truncate_inode_pages(mapping, 0);
return ret;
}
static struct page *gfs2_get_log_desc(struct gfs2_sbd *sdp, u32 ld_type,
u32 ld_length, u32 ld_data1)
{
struct page *page = mempool_alloc(gfs2_page_pool, GFP_NOIO);
struct gfs2_log_descriptor *ld = page_address(page);
clear_page(ld);
ld->ld_header.mh_magic = cpu_to_be32(GFS2_MAGIC);
ld->ld_header.mh_type = cpu_to_be32(GFS2_METATYPE_LD);
ld->ld_header.mh_format = cpu_to_be32(GFS2_FORMAT_LD);
ld->ld_type = cpu_to_be32(ld_type);
ld->ld_length = cpu_to_be32(ld_length);
ld->ld_data1 = cpu_to_be32(ld_data1);
ld->ld_data2 = 0;
return page;
}
static void gfs2_check_magic(struct buffer_head *bh)
{
void *kaddr;
__be32 *ptr;
clear_buffer_escaped(bh);
kaddr = kmap_atomic(bh->b_page);
ptr = kaddr + bh_offset(bh);
if (*ptr == cpu_to_be32(GFS2_MAGIC))
set_buffer_escaped(bh);
kunmap_atomic(kaddr);
}
static int blocknr_cmp(void *priv, struct list_head *a, struct list_head *b)
{
struct gfs2_bufdata *bda, *bdb;
bda = list_entry(a, struct gfs2_bufdata, bd_list);
bdb = list_entry(b, struct gfs2_bufdata, bd_list);
if (bda->bd_bh->b_blocknr < bdb->bd_bh->b_blocknr)
return -1;
if (bda->bd_bh->b_blocknr > bdb->bd_bh->b_blocknr)
return 1;
return 0;
}
static void gfs2_before_commit(struct gfs2_sbd *sdp, unsigned int limit,
unsigned int total, struct list_head *blist,
bool is_databuf)
{
struct gfs2_log_descriptor *ld;
struct gfs2_bufdata *bd1 = NULL, *bd2;
struct page *page;
unsigned int num;
unsigned n;
__be64 *ptr;
gfs2_log_lock(sdp);
list_sort(NULL, blist, blocknr_cmp);
bd1 = bd2 = list_prepare_entry(bd1, blist, bd_list);
while(total) {
num = total;
if (total > limit)
num = limit;
gfs2_log_unlock(sdp);
page = gfs2_get_log_desc(sdp,
is_databuf ? GFS2_LOG_DESC_JDATA :
GFS2_LOG_DESC_METADATA, num + 1, num);
ld = page_address(page);
gfs2_log_lock(sdp);
ptr = (__be64 *)(ld + 1);
n = 0;
list_for_each_entry_continue(bd1, blist, bd_list) {
*ptr++ = cpu_to_be64(bd1->bd_bh->b_blocknr);
if (is_databuf) {
gfs2_check_magic(bd1->bd_bh);
*ptr++ = cpu_to_be64(buffer_escaped(bd1->bd_bh) ? 1 : 0);
}
if (++n >= num)
break;
}
gfs2_log_unlock(sdp);
gfs2_log_write_page(sdp, page);
gfs2_log_lock(sdp);
n = 0;
list_for_each_entry_continue(bd2, blist, bd_list) {
get_bh(bd2->bd_bh);
gfs2_log_unlock(sdp);
lock_buffer(bd2->bd_bh);
if (buffer_escaped(bd2->bd_bh)) {
void *kaddr;
page = mempool_alloc(gfs2_page_pool, GFP_NOIO);
ptr = page_address(page);
kaddr = kmap_atomic(bd2->bd_bh->b_page);
memcpy(ptr, kaddr + bh_offset(bd2->bd_bh),
bd2->bd_bh->b_size);
kunmap_atomic(kaddr);
*(__be32 *)ptr = 0;
clear_buffer_escaped(bd2->bd_bh);
unlock_buffer(bd2->bd_bh);
brelse(bd2->bd_bh);
gfs2_log_write_page(sdp, page);
} else {
gfs2_log_write_bh(sdp, bd2->bd_bh);
}
gfs2_log_lock(sdp);
if (++n >= num)
break;
}
BUG_ON(total < num);
total -= num;
}
gfs2_log_unlock(sdp);
}
static void buf_lo_before_commit(struct gfs2_sbd *sdp, struct gfs2_trans *tr)
{
unsigned int limit = buf_limit(sdp); /* 503 for 4k blocks */
unsigned int nbuf;
if (tr == NULL)
return;
nbuf = tr->tr_num_buf_new - tr->tr_num_buf_rm;
gfs2_before_commit(sdp, limit, nbuf, &tr->tr_buf, 0);
}
static void buf_lo_after_commit(struct gfs2_sbd *sdp, struct gfs2_trans *tr)
{
struct list_head *head;
struct gfs2_bufdata *bd;
if (tr == NULL)
return;
head = &tr->tr_buf;
while (!list_empty(head)) {
bd = list_first_entry(head, struct gfs2_bufdata, bd_list);
list_del_init(&bd->bd_list);
gfs2_unpin(sdp, bd->bd_bh, tr);
}
}
static void buf_lo_before_scan(struct gfs2_jdesc *jd,
struct gfs2_log_header_host *head, int pass)
{
if (pass != 0)
return;
jd->jd_found_blocks = 0;
jd->jd_replayed_blocks = 0;
}
static int buf_lo_scan_elements(struct gfs2_jdesc *jd, u32 start,
struct gfs2_log_descriptor *ld, __be64 *ptr,
int pass)
{
struct gfs2_inode *ip = GFS2_I(jd->jd_inode);
struct gfs2_sbd *sdp = GFS2_SB(jd->jd_inode);
struct gfs2_glock *gl = ip->i_gl;
unsigned int blks = be32_to_cpu(ld->ld_data1);
struct buffer_head *bh_log, *bh_ip;
u64 blkno;
int error = 0;
if (pass != 1 || be32_to_cpu(ld->ld_type) != GFS2_LOG_DESC_METADATA)
return 0;
gfs2_replay_incr_blk(jd, &start);
for (; blks; gfs2_replay_incr_blk(jd, &start), blks--) {
blkno = be64_to_cpu(*ptr++);
jd->jd_found_blocks++;
if (gfs2_revoke_check(jd, blkno, start))
continue;
error = gfs2_replay_read_block(jd, start, &bh_log);
if (error)
return error;
bh_ip = gfs2_meta_new(gl, blkno);
memcpy(bh_ip->b_data, bh_log->b_data, bh_log->b_size);
if (gfs2_meta_check(sdp, bh_ip))
error = -EIO;
else {
struct gfs2_meta_header *mh =
(struct gfs2_meta_header *)bh_ip->b_data;
if (mh->mh_type == cpu_to_be32(GFS2_METATYPE_RG)) {
struct gfs2_rgrpd *rgd;
rgd = gfs2_blk2rgrpd(sdp, blkno, false);
if (rgd && rgd->rd_addr == blkno &&
rgd->rd_bits && rgd->rd_bits->bi_bh) {
fs_info(sdp, "Replaying 0x%llx but we "
"already have a bh!\n",
(unsigned long long)blkno);
fs_info(sdp, "busy:%d, pinned:%d\n",
buffer_busy(rgd->rd_bits->bi_bh) ? 1 : 0,
buffer_pinned(rgd->rd_bits->bi_bh));
gfs2_dump_glock(NULL, rgd->rd_gl, true);
}
}
mark_buffer_dirty(bh_ip);
}
brelse(bh_log);
brelse(bh_ip);
if (error)
break;
jd->jd_replayed_blocks++;
}
return error;
}
/**
* gfs2_meta_sync - Sync all buffers associated with a glock
* @gl: The glock
*
*/
static void gfs2_meta_sync(struct gfs2_glock *gl)
{
struct address_space *mapping = gfs2_glock2aspace(gl);
struct gfs2_sbd *sdp = gl->gl_name.ln_sbd;
int error;
if (mapping == NULL)
mapping = &sdp->sd_aspace;
filemap_fdatawrite(mapping);
error = filemap_fdatawait(mapping);
if (error)
gfs2_io_error(gl->gl_name.ln_sbd);
}
static void buf_lo_after_scan(struct gfs2_jdesc *jd, int error, int pass)
{
struct gfs2_inode *ip = GFS2_I(jd->jd_inode);
struct gfs2_sbd *sdp = GFS2_SB(jd->jd_inode);
if (error) {
gfs2_meta_sync(ip->i_gl);
return;
}
if (pass != 1)
return;
gfs2_meta_sync(ip->i_gl);
fs_info(sdp, "jid=%u: Replayed %u of %u blocks\n",
jd->jd_jid, jd->jd_replayed_blocks, jd->jd_found_blocks);
}
static void revoke_lo_before_commit(struct gfs2_sbd *sdp, struct gfs2_trans *tr)
{
struct gfs2_meta_header *mh;
unsigned int offset;
struct list_head *head = &sdp->sd_log_revokes;
struct gfs2_bufdata *bd;
struct page *page;
unsigned int length;
gfs2_write_revokes(sdp);
if (!sdp->sd_log_num_revoke)
return;
length = gfs2_struct2blk(sdp, sdp->sd_log_num_revoke);
page = gfs2_get_log_desc(sdp, GFS2_LOG_DESC_REVOKE, length, sdp->sd_log_num_revoke);
offset = sizeof(struct gfs2_log_descriptor);
list_for_each_entry(bd, head, bd_list) {
sdp->sd_log_num_revoke--;
if (offset + sizeof(u64) > sdp->sd_sb.sb_bsize) {
gfs2_log_write_page(sdp, page);
page = mempool_alloc(gfs2_page_pool, GFP_NOIO);
mh = page_address(page);
clear_page(mh);
mh->mh_magic = cpu_to_be32(GFS2_MAGIC);
mh->mh_type = cpu_to_be32(GFS2_METATYPE_LB);
mh->mh_format = cpu_to_be32(GFS2_FORMAT_LB);
offset = sizeof(struct gfs2_meta_header);
}
*(__be64 *)(page_address(page) + offset) = cpu_to_be64(bd->bd_blkno);
offset += sizeof(u64);
}
gfs2_assert_withdraw(sdp, !sdp->sd_log_num_revoke);
gfs2_log_write_page(sdp, page);
}
static void revoke_lo_after_commit(struct gfs2_sbd *sdp, struct gfs2_trans *tr)
{
struct list_head *head = &sdp->sd_log_revokes;
struct gfs2_bufdata *bd;
struct gfs2_glock *gl;
while (!list_empty(head)) {
bd = list_first_entry(head, struct gfs2_bufdata, bd_list);
list_del_init(&bd->bd_list);
gl = bd->bd_gl;
gfs2_glock_remove_revoke(gl);
kmem_cache_free(gfs2_bufdata_cachep, bd);
}
}
static void revoke_lo_before_scan(struct gfs2_jdesc *jd,
struct gfs2_log_header_host *head, int pass)
{
if (pass != 0)
return;
jd->jd_found_revokes = 0;
jd->jd_replay_tail = head->lh_tail;
}
static int revoke_lo_scan_elements(struct gfs2_jdesc *jd, u32 start,
struct gfs2_log_descriptor *ld, __be64 *ptr,
int pass)
{
struct gfs2_sbd *sdp = GFS2_SB(jd->jd_inode);
unsigned int blks = be32_to_cpu(ld->ld_length);
unsigned int revokes = be32_to_cpu(ld->ld_data1);
struct buffer_head *bh;
unsigned int offset;
u64 blkno;
int first = 1;
int error;
if (pass != 0 || be32_to_cpu(ld->ld_type) != GFS2_LOG_DESC_REVOKE)
return 0;
offset = sizeof(struct gfs2_log_descriptor);
for (; blks; gfs2_replay_incr_blk(jd, &start), blks--) {
error = gfs2_replay_read_block(jd, start, &bh);
if (error)
return error;
if (!first)
gfs2_metatype_check(sdp, bh, GFS2_METATYPE_LB);
while (offset + sizeof(u64) <= sdp->sd_sb.sb_bsize) {
blkno = be64_to_cpu(*(__be64 *)(bh->b_data + offset));
error = gfs2_revoke_add(jd, blkno, start);
if (error < 0) {
brelse(bh);
return error;
}
else if (error)
jd->jd_found_revokes++;
if (!--revokes)
break;
offset += sizeof(u64);
}
brelse(bh);
offset = sizeof(struct gfs2_meta_header);
first = 0;
}
return 0;
}
static void revoke_lo_after_scan(struct gfs2_jdesc *jd, int error, int pass)
{
struct gfs2_sbd *sdp = GFS2_SB(jd->jd_inode);
if (error) {
gfs2_revoke_clean(jd);
return;
}
if (pass != 1)
return;
fs_info(sdp, "jid=%u: Found %u revoke tags\n",
jd->jd_jid, jd->jd_found_revokes);
gfs2_revoke_clean(jd);
}
/**
* databuf_lo_before_commit - Scan the data buffers, writing as we go
*
*/
static void databuf_lo_before_commit(struct gfs2_sbd *sdp, struct gfs2_trans *tr)
{
unsigned int limit = databuf_limit(sdp);
unsigned int nbuf;
if (tr == NULL)
return;
nbuf = tr->tr_num_databuf_new - tr->tr_num_databuf_rm;
gfs2_before_commit(sdp, limit, nbuf, &tr->tr_databuf, 1);
}
static int databuf_lo_scan_elements(struct gfs2_jdesc *jd, u32 start,
struct gfs2_log_descriptor *ld,
__be64 *ptr, int pass)
{
struct gfs2_inode *ip = GFS2_I(jd->jd_inode);
struct gfs2_glock *gl = ip->i_gl;
unsigned int blks = be32_to_cpu(ld->ld_data1);
struct buffer_head *bh_log, *bh_ip;
u64 blkno;
u64 esc;
int error = 0;
if (pass != 1 || be32_to_cpu(ld->ld_type) != GFS2_LOG_DESC_JDATA)
return 0;
gfs2_replay_incr_blk(jd, &start);
for (; blks; gfs2_replay_incr_blk(jd, &start), blks--) {
blkno = be64_to_cpu(*ptr++);
esc = be64_to_cpu(*ptr++);
jd->jd_found_blocks++;
if (gfs2_revoke_check(jd, blkno, start))
continue;
error = gfs2_replay_read_block(jd, start, &bh_log);
if (error)
return error;
bh_ip = gfs2_meta_new(gl, blkno);
memcpy(bh_ip->b_data, bh_log->b_data, bh_log->b_size);
/* Unescape */
if (esc) {
__be32 *eptr = (__be32 *)bh_ip->b_data;
*eptr = cpu_to_be32(GFS2_MAGIC);
}
mark_buffer_dirty(bh_ip);
brelse(bh_log);
brelse(bh_ip);
jd->jd_replayed_blocks++;
}
return error;
}
/* FIXME: sort out accounting for log blocks etc. */
static void databuf_lo_after_scan(struct gfs2_jdesc *jd, int error, int pass)
{
struct gfs2_inode *ip = GFS2_I(jd->jd_inode);
struct gfs2_sbd *sdp = GFS2_SB(jd->jd_inode);
if (error) {
gfs2_meta_sync(ip->i_gl);
return;
}
if (pass != 1)
return;
/* data sync? */
gfs2_meta_sync(ip->i_gl);
fs_info(sdp, "jid=%u: Replayed %u of %u data blocks\n",
jd->jd_jid, jd->jd_replayed_blocks, jd->jd_found_blocks);
}
static void databuf_lo_after_commit(struct gfs2_sbd *sdp, struct gfs2_trans *tr)
{
struct list_head *head;
struct gfs2_bufdata *bd;
if (tr == NULL)
return;
head = &tr->tr_databuf;
while (!list_empty(head)) {
bd = list_first_entry(head, struct gfs2_bufdata, bd_list);
list_del_init(&bd->bd_list);
gfs2_unpin(sdp, bd->bd_bh, tr);
}
}
static const struct gfs2_log_operations gfs2_buf_lops = {
.lo_before_commit = buf_lo_before_commit,
.lo_after_commit = buf_lo_after_commit,
.lo_before_scan = buf_lo_before_scan,
.lo_scan_elements = buf_lo_scan_elements,
.lo_after_scan = buf_lo_after_scan,
.lo_name = "buf",
};
static const struct gfs2_log_operations gfs2_revoke_lops = {
.lo_before_commit = revoke_lo_before_commit,
.lo_after_commit = revoke_lo_after_commit,
.lo_before_scan = revoke_lo_before_scan,
.lo_scan_elements = revoke_lo_scan_elements,
.lo_after_scan = revoke_lo_after_scan,
.lo_name = "revoke",
};
static const struct gfs2_log_operations gfs2_databuf_lops = {
.lo_before_commit = databuf_lo_before_commit,
.lo_after_commit = databuf_lo_after_commit,
.lo_scan_elements = databuf_lo_scan_elements,
.lo_after_scan = databuf_lo_after_scan,
.lo_name = "databuf",
};
const struct gfs2_log_operations *gfs2_log_ops[] = {
&gfs2_databuf_lops,
&gfs2_buf_lops,
&gfs2_revoke_lops,
NULL,
};
| {
"pile_set_name": "Github"
} |
{
"_meta": {
"type": {
"text": "markup"
}
},
"ru-RU": {
"text": "<b:include src=\"./ru.tmpl\"/>"
},
"en-US": {
"text": "<b:include src=\"./en.tmpl\"/>"
}
}
| {
"pile_set_name": "Github"
} |
import unittest
import pytest
from geopyspark.geotrellis import SpatialPartitionStrategy, LocalLayout
from geopyspark.geotrellis.constants import LayerType, Operation
from geopyspark.geotrellis.geotiff import get
from geopyspark.geotrellis.neighborhood import Square
from geopyspark.tests.base_test_class import BaseTestClass
from geopyspark.tests.python_test_utils import file_path
class PartitionPreservationTest(BaseTestClass):
rdd = get(LayerType.SPATIAL, file_path("srtm_52_11.tif"), max_tile_size=6001)
@pytest.fixture(autouse=True)
def tearDown(self):
yield
BaseTestClass.pysc._gateway.close()
def test_partition_preservation(self):
partition_states = []
strategy = SpatialPartitionStrategy(16)
tiled = self.rdd.tile_to_layout()
tiled2 = self.rdd.tile_to_layout(partition_strategy=strategy)
partition_states.append(tiled2.get_partition_strategy())
added_layer = (tiled + tiled2) * 0.75
partition_states.append(added_layer.get_partition_strategy())
local_max_layer = added_layer.local_max(tiled)
partition_states.append(local_max_layer.get_partition_strategy())
focal_layer = local_max_layer.focal(Operation.MAX, Square(1))
partition_states.append(focal_layer.get_partition_strategy())
reprojected_layer = focal_layer.tile_to_layout(
layout=LocalLayout(),
target_crs=3857,
partition_strategy=strategy)
partition_states.append(reprojected_layer.get_partition_strategy())
pyramided = reprojected_layer.pyramid()
partition_states.append(pyramided.levels[pyramided.max_zoom].get_partition_strategy())
self.assertTrue(all(x == partition_states[0] for x in partition_states))
if __name__ == "__main__":
unittest.main()
| {
"pile_set_name": "Github"
} |
module PlanService::API
class NoPlans
Plan = PlanService::DataTypes::Plan
def active?
false
end
def authorize(*)
not_in_use
end
def create(*)
not_in_use
end
def create_initial_trial(*)
not_in_use
end
def get_current(community_id:)
Result::Success.new(
Plan.call(
community_id: community_id,
status: :active,
features: { deletable: true, admin_email: true, whitelabel: true },
expires_at: nil,
created_at: Time.now,
updated_at: Time.now).merge(expired: false, closed: false)
)
end
def get_trials(*)
not_in_use
end
def get_external_service_link(marketplace_data)
Result::Error.new("Plan service is not in use.")
end
# private
def not_in_use
Result::Error.new("Plan service is not in use.")
end
end
end
| {
"pile_set_name": "Github"
} |
/**
* Copyright (C) 2016 Turi
* All rights reserved.
*
* This software may be modified and distributed under the terms
* of the BSD license. See the LICENSE file for details.
*/
#ifndef RCPP_SERIALIZE_HPP
#define RCPP_SERIALIZE_HPP
#undef HAVE_VISIBILITY_ATTRIBUTE
#include <stdlib.h>
#include <string.h>
// most of functions used here are included from Rinternals.h
//#include <Rcpp.h>
#include <RApiSerializeAPI.h>
#include <boost/regex.hpp>
#include <boost/algorithm/string.hpp>
// the serialization function from SEXP to std::string
inline std::string serializeToStr(SEXP object) {
// using R's C API, all SEXP objects will be serialized into a raw vector
Rcpp::RawVector val = serializeToRaw(object);
// convert R raw vector into a std::string
std::string res;
for (size_t i = 0; i < val.size(); i++) {
res = res + std::to_string(int(val[i])) + "\t";
}
return res;
}
// unserialize from the std::string
inline SEXP unserializeFromStr(std::string s) {
// parse the std::string into a raw vector
std::vector<std::string> strs;
boost::regex e("^\\d.+");
if (boost::regex_match(s, e)) {
boost::split(strs,s,boost::is_any_of("\t"));
}
Rcpp::RawVector object(strs.size() - 1);
for (size_t i = 0; i < strs.size() - 1; i++) {
object[i] = static_cast<unsigned char>(std::stoi(strs[i]));
}
return unserializeFromRaw(object);
}
#endif
| {
"pile_set_name": "Github"
} |
var mkdirp = require('../');
var path = require('path');
var test = require('tap').test;
var mockfs = require('mock-fs');
var _0777 = parseInt('0777', 8);
var _0755 = parseInt('0755', 8);
test('opts.fs', function (t) {
t.plan(5);
var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
var file = '/beep/boop/' + [x,y,z].join('/');
var xfs = mockfs.fs();
mkdirp(file, { fs: xfs, mode: _0755 }, function (err) {
t.ifError(err);
xfs.exists(file, function (ex) {
t.ok(ex, 'created file');
xfs.stat(file, function (err, stat) {
t.ifError(err);
t.equal(stat.mode & _0777, _0755);
t.ok(stat.isDirectory(), 'target not a directory');
});
});
});
});
| {
"pile_set_name": "Github"
} |
/*=============================================================================
Copyright (c) 2001-2011 Joel de Guzman
Copyright (c) 2005-2006 Dan Marsden
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
==============================================================================*/
#if !defined(BOOST_FUSION_ADAPTED_30122005_1420)
#define BOOST_FUSION_ADAPTED_30122005_1420
#include <boost/fusion/support/config.hpp>
#include <boost/fusion/adapted/adt.hpp>
#include <boost/fusion/adapted/array.hpp>
#include <boost/fusion/adapted/boost_array.hpp>
#include <boost/fusion/adapted/boost_tuple.hpp>
#include <boost/fusion/adapted/mpl.hpp>
#include <boost/fusion/adapted/std_pair.hpp>
#include <boost/fusion/adapted/struct.hpp>
// The std_tuple_iterator adaptor only supports implementations
// using variadic templates
#if !defined(BOOST_NO_CXX11_VARIADIC_TEMPLATES)
#include <boost/fusion/adapted/std_tuple.hpp>
#endif
#endif
| {
"pile_set_name": "Github"
} |
/*
* Copyright Andrey Semashev 2007 - 2015.
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE_1_0.txt or copy at
* http://www.boost.org/LICENSE_1_0.txt)
*/
/*!
* \file parser_utils.hpp
* \author Andrey Semashev
* \date 31.03.2008
*
* \brief This header is the Boost.Log library implementation, see the library documentation
* at http://www.boost.org/doc/libs/release/libs/log/doc/html/index.html.
*/
#ifndef BOOST_LOG_PARSER_UTILS_HPP_INCLUDED_
#define BOOST_LOG_PARSER_UTILS_HPP_INCLUDED_
#include <boost/log/detail/config.hpp>
#include <string>
#include <iostream>
#include <cctype>
#include <boost/log/utility/string_literal.hpp>
#include <boost/log/detail/header.hpp>
#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
namespace boost {
BOOST_LOG_OPEN_NAMESPACE
namespace aux {
//! Some constants and algorithms needed for parsing
template< typename > struct char_constants;
#ifdef BOOST_LOG_USE_CHAR
template< >
struct char_constants< char >
{
typedef char char_type;
typedef std::basic_string< char_type > string_type;
typedef boost::log::basic_string_literal< char_type > literal_type;
static const char_type char_comment = '#';
static const char_type char_comma = ',';
static const char_type char_dot = '.';
static const char_type char_quote = '"';
static const char_type char_percent = '%';
static const char_type char_exclamation = '!';
static const char_type char_and = '&';
static const char_type char_or = '|';
static const char_type char_equal = '=';
static const char_type char_greater = '>';
static const char_type char_less = '<';
static const char_type char_underline = '_';
static const char_type char_backslash = '\\';
static const char_type char_section_bracket_left = '[';
static const char_type char_section_bracket_right = ']';
static const char_type char_paren_bracket_left = '(';
static const char_type char_paren_bracket_right = ')';
static const char_type* not_keyword() { return "not"; }
static const char_type* and_keyword() { return "and"; }
static const char_type* or_keyword() { return "or"; }
static const char_type* equal_keyword() { return "="; }
static const char_type* greater_keyword() { return ">"; }
static const char_type* less_keyword() { return "<"; }
static const char_type* not_equal_keyword() { return "!="; }
static const char_type* greater_or_equal_keyword() { return ">="; }
static const char_type* less_or_equal_keyword() { return "<="; }
static const char_type* begins_with_keyword() { return "begins_with"; }
static const char_type* ends_with_keyword() { return "ends_with"; }
static const char_type* contains_keyword() { return "contains"; }
static const char_type* matches_keyword() { return "matches"; }
static const char_type* message_text_keyword() { return "_"; }
static literal_type true_keyword() { return literal_type("true"); }
static literal_type false_keyword() { return literal_type("false"); }
static const char_type* default_level_attribute_name() { return "Severity"; }
static const char_type* core_section_name() { return "Core"; }
static const char_type* sink_section_name_prefix() { return "Sink:"; }
static const char_type* core_disable_logging_param_name() { return "DisableLogging"; }
static const char_type* filter_param_name() { return "Filter"; }
static const char_type* sink_destination_param_name() { return "Destination"; }
static const char_type* file_name_param_name() { return "FileName"; }
static const char_type* rotation_size_param_name() { return "RotationSize"; }
static const char_type* rotation_interval_param_name() { return "RotationInterval"; }
static const char_type* rotation_time_point_param_name() { return "RotationTimePoint"; }
static const char_type* append_param_name() { return "Append"; }
static const char_type* enable_final_rotation_param_name() { return "EnableFinalRotation"; }
static const char_type* auto_flush_param_name() { return "AutoFlush"; }
static const char_type* auto_newline_mode_param_name() { return "AutoNewline"; }
static const char_type* asynchronous_param_name() { return "Asynchronous"; }
static const char_type* format_param_name() { return "Format"; }
static const char_type* provider_id_param_name() { return "ProviderID"; }
static const char_type* log_name_param_name() { return "LogName"; }
static const char_type* source_name_param_name() { return "LogSource"; }
static const char_type* registration_param_name() { return "Registration"; }
static const char_type* local_address_param_name() { return "LocalAddress"; }
static const char_type* target_address_param_name() { return "TargetAddress"; }
static const char_type* target_param_name() { return "Target"; }
static const char_type* max_size_param_name() { return "MaxSize"; }
static const char_type* max_files_param_name() { return "MaxFiles"; }
static const char_type* min_free_space_param_name() { return "MinFreeSpace"; }
static const char_type* scan_for_files_param_name() { return "ScanForFiles"; }
static const char_type* scan_method_all() { return "All"; }
static const char_type* scan_method_matching() { return "Matching"; }
static const char_type* auto_newline_mode_disabled() { return "Disabled"; }
static const char_type* auto_newline_mode_always_insert() { return "AlwaysInsert"; }
static const char_type* auto_newline_mode_insert_if_missing() { return "InsertIfMissing"; }
static const char_type* registration_never() { return "Never"; }
static const char_type* registration_on_demand() { return "OnDemand"; }
static const char_type* registration_forced() { return "Forced"; }
static const char_type* text_file_destination() { return "TextFile"; }
static const char_type* console_destination() { return "Console"; }
static const char_type* syslog_destination() { return "Syslog"; }
static const char_type* simple_event_log_destination() { return "SimpleEventLog"; }
static const char_type* debugger_destination() { return "Debugger"; }
static literal_type monday_keyword() { return literal_type("Monday"); }
static literal_type short_monday_keyword() { return literal_type("Mon"); }
static literal_type tuesday_keyword() { return literal_type("Tuesday"); }
static literal_type short_tuesday_keyword() { return literal_type("Tue"); }
static literal_type wednesday_keyword() { return literal_type("Wednesday"); }
static literal_type short_wednesday_keyword() { return literal_type("Wed"); }
static literal_type thursday_keyword() { return literal_type("Thursday"); }
static literal_type short_thursday_keyword() { return literal_type("Thu"); }
static literal_type friday_keyword() { return literal_type("Friday"); }
static literal_type short_friday_keyword() { return literal_type("Fri"); }
static literal_type saturday_keyword() { return literal_type("Saturday"); }
static literal_type short_saturday_keyword() { return literal_type("Sat"); }
static literal_type sunday_keyword() { return literal_type("Sunday"); }
static literal_type short_sunday_keyword() { return literal_type("Sun"); }
static std::ostream& get_console_log_stream() { return std::clog; }
static int to_number(char_type c)
{
using namespace std; // to make sure we can use C functions unqualified
int n = 0;
if (isdigit(c))
n = c - '0';
else if (c >= 'a' && c <= 'f')
n = c - 'a' + 10;
else if (c >= 'A' && c <= 'F')
n = c - 'A' + 10;
return n;
}
//! Skips spaces in the beginning of the input
static const char_type* trim_spaces_left(const char_type* begin, const char_type* end);
//! Skips spaces in the end of the input
static const char_type* trim_spaces_right(const char_type* begin, const char_type* end);
//! Scans for the attribute name placeholder in the input
static const char_type* scan_attr_placeholder(const char_type* begin, const char_type* end);
//! Parses an operand string (possibly quoted) from the input
static const char_type* parse_operand(const char_type* begin, const char_type* end, string_type& operand);
//! Converts escape sequences to the corresponding characters
static void translate_escape_sequences(string_type& str);
};
#endif
#ifdef BOOST_LOG_USE_WCHAR_T
template< >
struct char_constants< wchar_t >
{
typedef wchar_t char_type;
typedef std::basic_string< char_type > string_type;
typedef boost::log::basic_string_literal< char_type > literal_type;
static const char_type char_comment = L'#';
static const char_type char_comma = L',';
static const char_type char_dot = L'.';
static const char_type char_quote = L'"';
static const char_type char_percent = L'%';
static const char_type char_exclamation = L'!';
static const char_type char_and = L'&';
static const char_type char_or = L'|';
static const char_type char_equal = L'=';
static const char_type char_greater = L'>';
static const char_type char_less = L'<';
static const char_type char_underline = L'_';
static const char_type char_backslash = L'\\';
static const char_type char_section_bracket_left = L'[';
static const char_type char_section_bracket_right = L']';
static const char_type char_paren_bracket_left = L'(';
static const char_type char_paren_bracket_right = L')';
static const char_type* not_keyword() { return L"not"; }
static const char_type* and_keyword() { return L"and"; }
static const char_type* or_keyword() { return L"or"; }
static const char_type* equal_keyword() { return L"="; }
static const char_type* greater_keyword() { return L">"; }
static const char_type* less_keyword() { return L"<"; }
static const char_type* not_equal_keyword() { return L"!="; }
static const char_type* greater_or_equal_keyword() { return L">="; }
static const char_type* less_or_equal_keyword() { return L"<="; }
static const char_type* begins_with_keyword() { return L"begins_with"; }
static const char_type* ends_with_keyword() { return L"ends_with"; }
static const char_type* contains_keyword() { return L"contains"; }
static const char_type* matches_keyword() { return L"matches"; }
static const char_type* message_text_keyword() { return L"_"; }
static literal_type true_keyword() { return literal_type(L"true"); }
static literal_type false_keyword() { return literal_type(L"false"); }
static const char_type* default_level_attribute_name() { return L"Severity"; }
static const char_type* core_section_name() { return L"Core"; }
static const char_type* sink_section_name_prefix() { return L"Sink:"; }
static const char_type* core_disable_logging_param_name() { return L"DisableLogging"; }
static const char_type* filter_param_name() { return L"Filter"; }
static const char_type* sink_destination_param_name() { return L"Destination"; }
static const char_type* file_name_param_name() { return L"FileName"; }
static const char_type* rotation_size_param_name() { return L"RotationSize"; }
static const char_type* rotation_interval_param_name() { return L"RotationInterval"; }
static const char_type* rotation_time_point_param_name() { return L"RotationTimePoint"; }
static const char_type* append_param_name() { return L"Append"; }
static const char_type* enable_final_rotation_param_name() { return L"EnableFinalRotation"; }
static const char_type* auto_flush_param_name() { return L"AutoFlush"; }
static const char_type* auto_newline_mode_param_name() { return L"AutoNewline"; }
static const char_type* asynchronous_param_name() { return L"Asynchronous"; }
static const char_type* format_param_name() { return L"Format"; }
static const char_type* provider_id_param_name() { return L"ProviderID"; }
static const char_type* log_name_param_name() { return L"LogName"; }
static const char_type* source_name_param_name() { return L"LogSource"; }
static const char_type* registration_param_name() { return L"Registration"; }
static const char_type* local_address_param_name() { return L"LocalAddress"; }
static const char_type* target_address_param_name() { return L"TargetAddress"; }
static const char_type* target_param_name() { return L"Target"; }
static const char_type* max_size_param_name() { return L"MaxSize"; }
static const char_type* max_files_param_name() { return L"MaxFiles"; }
static const char_type* min_free_space_param_name() { return L"MinFreeSpace"; }
static const char_type* scan_for_files_param_name() { return L"ScanForFiles"; }
static const char_type* scan_method_all() { return L"All"; }
static const char_type* scan_method_matching() { return L"Matching"; }
static const char_type* auto_newline_mode_disabled() { return L"Disabled"; }
static const char_type* auto_newline_mode_always_insert() { return L"AlwaysInsert"; }
static const char_type* auto_newline_mode_insert_if_missing() { return L"InsertIfMissing"; }
static const char_type* registration_never() { return L"Never"; }
static const char_type* registration_on_demand() { return L"OnDemand"; }
static const char_type* registration_forced() { return L"Forced"; }
static const char_type* text_file_destination() { return L"TextFile"; }
static const char_type* console_destination() { return L"Console"; }
static const char_type* syslog_destination() { return L"Syslog"; }
static const char_type* simple_event_log_destination() { return L"SimpleEventLog"; }
static const char_type* debugger_destination() { return L"Debugger"; }
static literal_type monday_keyword() { return literal_type(L"Monday"); }
static literal_type short_monday_keyword() { return literal_type(L"Mon"); }
static literal_type tuesday_keyword() { return literal_type(L"Tuesday"); }
static literal_type short_tuesday_keyword() { return literal_type(L"Tue"); }
static literal_type wednesday_keyword() { return literal_type(L"Wednesday"); }
static literal_type short_wednesday_keyword() { return literal_type(L"Wed"); }
static literal_type thursday_keyword() { return literal_type(L"Thursday"); }
static literal_type short_thursday_keyword() { return literal_type(L"Thu"); }
static literal_type friday_keyword() { return literal_type(L"Friday"); }
static literal_type short_friday_keyword() { return literal_type(L"Fri"); }
static literal_type saturday_keyword() { return literal_type(L"Saturday"); }
static literal_type short_saturday_keyword() { return literal_type(L"Sat"); }
static literal_type sunday_keyword() { return literal_type(L"Sunday"); }
static literal_type short_sunday_keyword() { return literal_type(L"Sun"); }
static std::wostream& get_console_log_stream() { return std::wclog; }
static int to_number(char_type c)
{
int n = 0;
if (c >= L'0' && c <= L'9')
n = c - L'0';
else if (c >= L'a' && c <= L'f')
n = c - L'a' + 10;
else if (c >= L'A' && c <= L'F')
n = c - L'A' + 10;
return n;
}
static bool iswxdigit(char_type c)
{
return (c >= L'0' && c <= L'9') || (c >= L'a' && c <= L'f') || (c >= L'A' && c <= L'F');
}
//! Skips spaces in the beginning of the input
static const char_type* trim_spaces_left(const char_type* begin, const char_type* end);
//! Skips spaces in the end of the input
static const char_type* trim_spaces_right(const char_type* begin, const char_type* end);
//! Scans for the attribute name placeholder in the input
static const char_type* scan_attr_placeholder(const char_type* begin, const char_type* end);
//! Parses an operand string (possibly quoted) from the input
static const char_type* parse_operand(const char_type* begin, const char_type* end, string_type& operand);
//! Converts escape sequences to the corresponding characters
static void translate_escape_sequences(string_type& str);
};
#endif
} // namespace aux
BOOST_LOG_CLOSE_NAMESPACE // namespace log
} // namespace boost
#include <boost/log/detail/footer.hpp>
#endif // BOOST_LOG_PARSER_UTILS_HPP_INCLUDED_
| {
"pile_set_name": "Github"
} |
"""
Use this script to create JSON-Line description files that can be used to
train deep-speech models through this library.
This works with data directories that are organized like LibriSpeech:
data_directory/group/speaker/[file_id1.wav, file_id2.wav, ...,
speaker.trans.txt]
Where speaker.trans.txt has in each line, file_id transcription
"""
from __future__ import absolute_import, division, print_function
import argparse
import json
import os
import wave
def main(data_directory, output_file):
labels = []
durations = []
keys = []
for group in os.listdir(data_directory):
if group.startswith('.'):
continue
speaker_path = os.path.join(data_directory, group)
for speaker in os.listdir(speaker_path):
if speaker.startswith('.'):
continue
labels_file = os.path.join(speaker_path, speaker,
'{}-{}.trans.txt'
.format(group, speaker))
for line in open(labels_file):
split = line.strip().split()
file_id = split[0]
label = ' '.join(split[1:]).lower()
audio_file = os.path.join(speaker_path, speaker,
file_id) + '.wav'
audio = wave.open(audio_file)
duration = float(audio.getnframes()) / audio.getframerate()
audio.close()
keys.append(audio_file)
durations.append(duration)
labels.append(label)
with open(output_file, 'w') as out_file:
for i in range(len(keys)):
line = json.dumps({'key': keys[i], 'duration': durations[i],
'text': labels[i]})
out_file.write(line + '\n')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('data_directory', type=str,
help='Path to data directory')
parser.add_argument('output_file', type=str,
help='Path to output file')
args = parser.parse_args()
main(args.data_directory, args.output_file)
| {
"pile_set_name": "Github"
} |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.slides.v1.model;
/**
* Creates a new table.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Google Slides API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class CreateTableRequest extends com.google.api.client.json.GenericJson {
/**
* Number of columns in the table.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer columns;
/**
* The element properties for the table.
*
* The table will be created at the provided size, subject to a minimum size. If no size is
* provided, the table will be automatically sized.
*
* Table transforms must have a scale of 1 and no shear components. If no transform is provided,
* the table will be centered on the page.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private PageElementProperties elementProperties;
/**
* A user-supplied object ID.
*
* If you specify an ID, it must be unique among all pages and page elements in the presentation.
* The ID must start with an alphanumeric character or an underscore (matches regex
* `[a-zA-Z0-9_]`); remaining characters may include those as well as a hyphen or colon (matches
* regex `[a-zA-Z0-9_-:]`). The length of the ID must not be less than 5 or greater than 50.
*
* If you don't specify an ID, a unique one is generated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String objectId;
/**
* Number of rows in the table.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer rows;
/**
* Number of columns in the table.
* @return value or {@code null} for none
*/
public java.lang.Integer getColumns() {
return columns;
}
/**
* Number of columns in the table.
* @param columns columns or {@code null} for none
*/
public CreateTableRequest setColumns(java.lang.Integer columns) {
this.columns = columns;
return this;
}
/**
* The element properties for the table.
*
* The table will be created at the provided size, subject to a minimum size. If no size is
* provided, the table will be automatically sized.
*
* Table transforms must have a scale of 1 and no shear components. If no transform is provided,
* the table will be centered on the page.
* @return value or {@code null} for none
*/
public PageElementProperties getElementProperties() {
return elementProperties;
}
/**
* The element properties for the table.
*
* The table will be created at the provided size, subject to a minimum size. If no size is
* provided, the table will be automatically sized.
*
* Table transforms must have a scale of 1 and no shear components. If no transform is provided,
* the table will be centered on the page.
* @param elementProperties elementProperties or {@code null} for none
*/
public CreateTableRequest setElementProperties(PageElementProperties elementProperties) {
this.elementProperties = elementProperties;
return this;
}
/**
* A user-supplied object ID.
*
* If you specify an ID, it must be unique among all pages and page elements in the presentation.
* The ID must start with an alphanumeric character or an underscore (matches regex
* `[a-zA-Z0-9_]`); remaining characters may include those as well as a hyphen or colon (matches
* regex `[a-zA-Z0-9_-:]`). The length of the ID must not be less than 5 or greater than 50.
*
* If you don't specify an ID, a unique one is generated.
* @return value or {@code null} for none
*/
public java.lang.String getObjectId() {
return objectId;
}
/**
* A user-supplied object ID.
*
* If you specify an ID, it must be unique among all pages and page elements in the presentation.
* The ID must start with an alphanumeric character or an underscore (matches regex
* `[a-zA-Z0-9_]`); remaining characters may include those as well as a hyphen or colon (matches
* regex `[a-zA-Z0-9_-:]`). The length of the ID must not be less than 5 or greater than 50.
*
* If you don't specify an ID, a unique one is generated.
* @param objectId objectId or {@code null} for none
*/
public CreateTableRequest setObjectId(java.lang.String objectId) {
this.objectId = objectId;
return this;
}
/**
* Number of rows in the table.
* @return value or {@code null} for none
*/
public java.lang.Integer getRows() {
return rows;
}
/**
* Number of rows in the table.
* @param rows rows or {@code null} for none
*/
public CreateTableRequest setRows(java.lang.Integer rows) {
this.rows = rows;
return this;
}
@Override
public CreateTableRequest set(String fieldName, Object value) {
return (CreateTableRequest) super.set(fieldName, value);
}
@Override
public CreateTableRequest clone() {
return (CreateTableRequest) super.clone();
}
}
| {
"pile_set_name": "Github"
} |
https://github.com/codehaus/jaxen/blob/master/jaxen/LICENSE.txt
/*
$Id$
Copyright 2003-2006 The Werken Company. All Rights Reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the Jaxen Project nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/ | {
"pile_set_name": "Github"
} |
/* GitHub stylesheet for MarkdownPad (http://markdownpad.com) */
/* Author: Nicolas Hery - http://nicolashery.com */
/* Version: b13fe65ca28d2e568c6ed5d7f06581183df8f2ff */
/* Source: https://github.com/nicolahery/markdownpad-github */
/* RESET
=============================================================================*/
html, body, div, span, applet, object, iframe, h1, h2, h3, h4, h5, h6, p, blockquote, pre, a, abbr, acronym, address, big, cite, code, del, dfn, em, img, ins, kbd, q, s, samp, small, strike, strong, sub, sup, tt, var, b, u, i, center, dl, dt, dd, ol, ul, li, fieldset, form, label, legend, table, caption, tbody, tfoot, thead, tr, th, td, article, aside, canvas, details, embed, figure, figcaption, footer, header, hgroup, menu, nav, output, ruby, section, summary, time, mark, audio, video {
margin: 0;
padding: 0;
border: 0;
}
/* BODY
=============================================================================*/
body {
font-family: Helvetica, arial, freesans, clean, sans-serif;
font-size: 14px;
line-height: 1.6;
color: #333;
background-color: #fff;
padding: 20px;
max-width: 960px;
margin: 0 auto;
}
body>*:first-child {
margin-top: 0 !important;
}
body>*:last-child {
margin-bottom: 0 !important;
}
/* BLOCKS
=============================================================================*/
p, blockquote, ul, ol, dl, table, pre {
margin: 15px 0;
}
/* HEADERS
=============================================================================*/
h1, h2, h3, h4, h5, h6 {
margin: 20px 0 10px;
padding: 0;
font-weight: bold;
-webkit-font-smoothing: antialiased;
}
h1 tt, h1 code, h2 tt, h2 code, h3 tt, h3 code, h4 tt, h4 code, h5 tt, h5 code, h6 tt, h6 code {
font-size: inherit;
}
h1 {
font-size: 24px;
border-bottom: 1px solid #ccc;
color: #000;
}
h2 {
font-size: 18px;
color: #000;
}
h3 {
font-size: 14px;
}
h4 {
font-size: 14px;
}
h5 {
font-size: 14px;
}
h6 {
color: #777;
font-size: 14px;
}
body>h2:first-child, body>h1:first-child, body>h1:first-child+h2, body>h3:first-child, body>h4:first-child, body>h5:first-child, body>h6:first-child {
margin-top: 0;
padding-top: 0;
}
a:first-child h1, a:first-child h2, a:first-child h3, a:first-child h4, a:first-child h5, a:first-child h6 {
margin-top: 0;
padding-top: 0;
}
h1+p, h2+p, h3+p, h4+p, h5+p, h6+p {
margin-top: 10px;
}
/* LINKS
=============================================================================*/
a {
color: #4183C4;
text-decoration: none;
}
a:hover {
text-decoration: underline;
}
/* LISTS
=============================================================================*/
ul, ol {
padding-left: 30px;
}
ul li > :first-child,
ol li > :first-child,
ul li ul:first-of-type,
ol li ol:first-of-type,
ul li ol:first-of-type,
ol li ul:first-of-type {
margin-top: 0px;
}
ul ul, ul ol, ol ol, ol ul {
margin-bottom: 0;
}
dl {
padding: 0;
}
dl dt {
font-size: 14px;
font-weight: bold;
font-style: italic;
padding: 0;
margin: 15px 0 5px;
}
dl dt:first-child {
padding: 0;
}
dl dt>:first-child {
margin-top: 0px;
}
dl dt>:last-child {
margin-bottom: 0px;
}
dl dd {
margin: 0 0 15px;
padding: 0 15px;
}
dl dd>:first-child {
margin-top: 0px;
}
dl dd>:last-child {
margin-bottom: 0px;
}
/* CODE
=============================================================================*/
pre, code, tt {
font-size: 12px;
font-family: Consolas, "Liberation Mono", Courier, monospace;
}
code, tt {
margin: 0 0px;
padding: 0px 0px;
white-space: nowrap;
border: 1px solid #eaeaea;
background-color: #f8f8f8;
border-radius: 3px;
}
pre>code {
margin: 0;
padding: 0;
white-space: pre;
border: none;
background: transparent;
}
pre {
background-color: #f8f8f8;
border: 1px solid #ccc;
font-size: 13px;
line-height: 19px;
overflow: auto;
padding: 6px 10px;
border-radius: 3px;
}
pre code, pre tt {
background-color: transparent;
border: none;
}
kbd {
-moz-border-bottom-colors: none;
-moz-border-left-colors: none;
-moz-border-right-colors: none;
-moz-border-top-colors: none;
background-color: #DDDDDD;
background-image: linear-gradient(#F1F1F1, #DDDDDD);
background-repeat: repeat-x;
border-color: #DDDDDD #CCCCCC #CCCCCC #DDDDDD;
border-image: none;
border-radius: 2px 2px 2px 2px;
border-style: solid;
border-width: 1px;
font-family: "Helvetica Neue",Helvetica,Arial,sans-serif;
line-height: 10px;
padding: 1px 4px;
}
/* QUOTES
=============================================================================*/
blockquote {
border-left: 4px solid #DDD;
padding: 0 15px;
color: #777;
}
blockquote>:first-child {
margin-top: 0px;
}
blockquote>:last-child {
margin-bottom: 0px;
}
/* HORIZONTAL RULES
=============================================================================*/
hr {
clear: both;
margin: 15px 0;
height: 0px;
overflow: hidden;
border: none;
background: transparent;
border-bottom: 4px solid #ddd;
padding: 0;
}
/* TABLES
=============================================================================*/
table th {
font-weight: bold;
}
table th, table td {
border: 1px solid #ccc;
padding: 6px 13px;
}
table tr {
border-top: 1px solid #ccc;
background-color: #fff;
}
table tr:nth-child(2n) {
background-color: #f8f8f8;
}
/* IMAGES
=============================================================================*/
img {
max-width: 100%
} | {
"pile_set_name": "Github"
} |
USE zabbix;
DROP PROCEDURE IF EXISTS partition_maintenance;
DELIMITER $$
CREATE PROCEDURE `partition_maintenance`(SCHEMA_NAME VARCHAR(32), TABLE_NAME VARCHAR(32), INTERVAL_TYPE VARCHAR(10),
INTERVAL_VALUE INT, CREATE_NEXT_INTERVALS INT, KEEP_DATA_DAYS INT)
BEGIN
DECLARE _KEEP_DATA_AFTER_TS INT;
CALL partition_verify(SCHEMA_NAME, TABLE_NAME, INTERVAL_TYPE, INTERVAL_VALUE);
CALL partition_create(SCHEMA_NAME, TABLE_NAME, INTERVAL_TYPE, INTERVAL_VALUE, CREATE_NEXT_INTERVALS);
SET _KEEP_DATA_AFTER_TS = UNIX_TIMESTAMP(DATE_SUB(NOW(), INTERVAL KEEP_DATA_DAYS DAY));
CALL partition_delete(SCHEMA_NAME, TABLE_NAME, _KEEP_DATA_AFTER_TS);
END$$
DELIMITER ;
| {
"pile_set_name": "Github"
} |
// RUN: %clang_cc1 %s -verify -fsyntax-only
- (void)compilerTestAgainst; // expected-error {{missing context for method declaration}}
void xx(); // expected-error {{expected method body}}
| {
"pile_set_name": "Github"
} |
package com.suixingpay.config.server;
import com.suixingpay.config.server.condition.ApplicationConfigCondition;
import com.suixingpay.config.server.entity.ApplicationConfigDO;
import com.suixingpay.config.server.entity.ApplicationDO;
import com.suixingpay.config.server.entity.ProfileDO;
import com.suixingpay.config.server.entity.UserDO;
import com.suixingpay.config.server.enums.SourceType;
import com.suixingpay.config.server.enums.YesNo;
import com.suixingpay.config.server.service.ApplicationConfigService;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.test.annotation.Rollback;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
/**
* @author: qiujiayu[[email protected]]
* @date: 2017年9月5日 下午3:53:38
* @version: V1.0
* @review: qiujiayu[[email protected]]/2017年9月5日 下午3:53:38
*/
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class ApplicationConfigServiceTest extends BaseServiceTest {
@Autowired
private ApplicationConfigService applicationConfigService;
@Test
@Transactional
@Rollback(true)
public void test() {
ApplicationConfigDO applicationConfigDO = new ApplicationConfigDO();
ProfileDO profileDO = new ProfileDO();
profileDO.setProfile("dev");
ApplicationDO applicationDO = new ApplicationDO();
applicationDO.setName("config-demo");
UserDO userDO = UserDO.builder().id(2).administrator(YesNo.NO).build();
applicationConfigDO.setProfile(profileDO).setApplication(applicationDO)
.setPropertySource("suixingpay.test:1234").setSourceType(SourceType.PROPERTIES).setMemo("test")
.setVersion(0)// 添加
.setUser(userDO);
applicationConfigService.saveApplicationConfig(applicationConfigDO);
ApplicationConfigCondition condition = new ApplicationConfigCondition();
Pageable pageable = new PageRequest(1, 20);
condition.setPageable(pageable);
List<ApplicationConfigDO> list = applicationConfigService.pageByCondition(condition).getContent();
if (null != list) {
for (ApplicationConfigDO configDO : list) {
System.out.println(configDO);
applicationConfigService.saveApplicationConfig(configDO);
}
}
System.out.println(applicationConfigService.getByApplicationNameAnddProfile("config-demo", "dev"));
}
}
| {
"pile_set_name": "Github"
} |
<div class="rbox" name="tip">
<div class="rbox-title" name="tip">{tr}Tip{/tr}</div>
<div class="rbox-data" name="tip">{tr}To learn more about <a class="rbox-link" target="tikihelp" href="http://mods.tikiwiki.org">mods</a>.{/tr}
</div>
</div>
<br />
<style>
{literal}
.focus { background-color : #eeee77; }
{/literal}
</style>
<h1><a href="tiki-mods.php" class="pagetitle">{tr}TikiWiki Mods{/tr}</a></h1>
<span class="button2"><a href="tiki-mods_admin.php" class="linkbut">{tr}Mods Configuration{/tr}</a></span>
<span class="button2"><a href="tiki-mods.php?reload=1{$findarg}{$typearg}" class="linkbut">{tr}Update remote index{/tr}</a></span>
<span class="button2"><a href="tiki-mods.php?rebuild=1{$findarg}{$typearg}" class="linkbut">{tr}Rebuild local list{/tr}</a></span>
{if $feature_mods_provider eq 'y'}
<span class="button3">
<span class="button2"><a href="tiki-mods.php?republishall=1{$findarg}{$typearg}" class="linkbut">{tr}Republish all{/tr}</a></span>
<span class="button2"><a href="tiki-mods.php?publishall=1{$findarg}{$typearg}" class="linkbut">{tr}Publish all{/tr}</a></span>
<span class="button2"><a href="tiki-mods.php?unpublishall=1{$findarg}{$typearg}" class="linkbut">{tr}Unpublish all{/tr}</a></span>
</span>
{/if}
<br /><br />
{if $iswritable}
<div class="simplebox" style="color:#009900;"><b>{tr}Attention{/tr}</b><br />{tr}Apache has the right to write in your file tree, which enables the installation, removal or
upgrade of packages. When you are done with those operations, think to fix those permissions back to a safe state (by using
"./fixperms fix" for example).{/tr}</div>
{else}
<div class="simplebox" style="color:#990000;"><b>{tr}Attention{/tr}</b><br />{tr}To install, remove or upgrade packages you need to give the apache user the right
to write files in your web tree (you can use "./fixperms.sh open" to set it up). After installation you need to remove that
permission (using "./fixperms fix").{/tr}</div>
{/if}
<br />
{if $tikifeedback}
<br />
{section name=n loop=$tikifeedback}
<div class="simplebox {if $tikifeedback[n].num > 0} highlight{/if}">{$tikifeedback[n].mes}</div>
{/section}{/if}
<form method="get" action="tiki-mods.php">
<select name="type" onchange="this.form.submit();">
<option value="">{tr}all types{/tr}</option>
{foreach key=it item=i from=$types}
<option value="{$it|escape}"{if $it eq $type} selected="selected"{/if}>{$it}</option>
{/foreach}
</select>
<input type="text" name="find" value="{$find|escape}" />
<input type="submit" name="f" value="{tr}find{/tr}" />
</form>
<table cellspacing="0" cellpadding="2" border="0" class="normal">
{foreach key=type item=i from=$display}
<tr><td colspan="{if $feature_mods_provider eq 'y'}3{else}2{/if}">
<span class="button2"><a href="tiki-mods.php?type={$type|escape:"url"}{$findarg}" class="linkbut" title="{tr}Display only this type{/tr}">{$type}</a></span>
</td><td colspan="7"> </td>
</tr>
{cycle values="odd,even" print=false}
{foreach key=item item=it from=$display.$type}
<tr class="{if $focus and $focus eq $display.$type.$item.name}focus{else}{cycle}{/if}">
{if $feature_mods_provider eq 'y'}
{assign var=mod value=$public.$type.$item.modname}
{if $public.$type.$item}
{if $dist.$mod.rev_major lt $local.$type.$item.rev_major or
($dist.$mod.rev_major eq $local.$type.$item.rev_major and
$dist.$mod.rev_minor lt $local.$type.$item.rev_minor) or
($dist.$mod.rev_major eq $local.$type.$item.rev_major and
$dist.$mod.rev_minor eq $local.$type.$item.rev_minor and
$dist.$mod.rev_subminor lt $local.$type.$item.rev_subminor)}
<td style="background:#fcfeac;"><a href="tiki-mods.php?unpublish={$public.$type.$item.modname|escape:"url"}{$findarg}{$typearg}" title="{tr}Unpublish{/tr}">[x]</a>{if $dist.$mod}<a
href="tiki-mods.php?republish={$public.$type.$item.modname|escape:"url"}{$findarg}{$typearg}" title="{tr}Republish{/tr}">{$dist.$mod.revision}>{$local.$type.$item.revision}</a>{/if}
{else}
<td style="background:#fcfeac;"><a href="tiki-mods.php?unpublish={$public.$type.$item.modname|escape:"url"}{$findarg}{$typearg}" title="{tr}Unpublish{/tr}">[x]</a>{if $dist.$mod}{$dist.$mod.revision}{/if}
{/if}
</td>
{elseif $local.$type.$item}
<td style="background:#ededed;"><a href="tiki-mods.php?publish={$local.$type.$item.modname|escape:"url"}{$findarg}{$typearg}" title="{tr}Publish{/tr}">[+]</a></td>
{else}
<td style="background:#ededed;"></td>
{/if}
{/if}
{if $remote.$type.$item}
{if $remote.$type.$item.rev_major gt $local.$type.$item.rev_major or
($remote.$type.$item.rev_major eq $local.$type.$item.rev_major and
$remote.$type.$item.rev_minor gt $local.$type.$item.rev_minor) or
($remote.$type.$item.rev_major eq $local.$type.$item.rev_major and
$remote.$type.$item.rev_minor eq $local.$type.$item.rev_minor and
$remote.$type.$item.rev_subminor gt $local.$type.$item.rev_subminor)}
<td style="background:#fcfeac;"><a href="tiki-mods.php?dl={$remote.$type.$item.modname|escape:"url"}-{$remote.$type.$item.revision}{$findarg}{$typearg}" title="{tr}Download{/tr}">{$remote.$type.$item.revision}</a></td>
{else}
<td style="background:#acfeac;"><a href="tiki-mods.php?dl={$remote.$type.$item.modname|escape:"url"}-{$remote.$type.$item.revision}{$findarg}{$typearg}" title="{tr}Download{/tr}">{$remote.$type.$item.revision}</a></td>
{/if}
{else}
<td style="background:#dcdcdc;"></td>
{/if}
{if $local.$type.$item.name}
<td><b><a href="tiki-mods.php?focus={$local.$type.$item.modname|escape:"url"}{$findarg}{$typearg}">{$local.$type.$item.name}</a></b></td>
<td>{$local.$type.$item.revision}</td>
<td>{$local.$type.$item.licence}</td>
<td>{$local.$type.$item.description}</td>
{if $installed.$type.$item}
{if $local.$type.$item.rev_major gt $installed.$type.$item.rev_major or
($local.$type.$item.rev_major eq $installed.$type.$item.rev_major and
$local.$type.$item.rev_minor gt $installed.$type.$item.rev_minor) or
($local.$type.$item.rev_major eq $installed.$type.$item.rev_major and
$local.$type.$item.rev_minor eq $installed.$type.$item.rev_minor and
$local.$type.$item.rev_subminor gt $installed.$type.$item.rev_subminor)}
<td style="background:#dcdeac;">{$installed.$type.$item.revision}{if $iswritable}<a href="tiki-mods.php?action=upgrade&package={$local.$type.$item.modname|escape:"url"}{$findarg}{$typearg}">>{$local.$type.$item.revision}</a>{/if}</td>
{else}
<td style="background:#acfeac;">{$installed.$type.$item.revision}</td>
{/if}
<td style="background:#fcaeac;">{if $iswritable}<a href="tiki-mods.php?action=remove&package={$local.$type.$item.modname|escape:"url"}{$findarg}{$typearg}">{tr}remove{/tr}</a>{/if}</td>
{else}
<td colspan="3">{if $iswritable}<a href="tiki-mods.php?action=install&package={$local.$type.$item.modname|escape:"url"}{$findarg}{$typearg}">{tr}install{/tr}</a>{else}<b><s>{tr}Install{/tr}</s></b>{/if}</td>
{/if}
{else}
<td>{$remote.$type.$item.name}</td>
<td>{$remote.$type.$item.revision}</td>
<td>{$remote.$type.$item.licence}</td>
<td>{$remote.$type.$item.description}</td>
{/if}
</tr>
{if $focus and $focus eq $local.$type.$item.modname}
<tr class="{cycle}"><td colspan="{if $feature_mods_provider eq 'y'}9{else}8{/if}">
<table><tr><td>
<div class="simplebox">
{if $more.docurl}Documentation :<br />{foreach key=ku item=iu from=$more.docurl}<a href="{$iu}">{$iu}</a><br />{/foreach}{/if}
{if $more.devurl}Development : <br />{foreach key=ku item=iu from=$more.devurl}<a href="{$iu}">{$iu}</a><br />{/foreach}{/if}
{if $more.help}{$more.help}<br />{/if}
{if $more.help}{$more.help}<br />{/if}
{if $more.author}{tr}author{/tr}: {$more.author[0]}<br />{/if}
{tr}last modification{/tr}: {$more.lastmodif[0]}<br />
{tr}by{/tr}: {$more.contributor[0]}<br />
</div>
</td><td>
{foreach key=kk item=ii from=$more.files}
{$ii[0]} -> <b>{$ii[1]}</b><br />
{/foreach}
</td></tr></table>
</td></tr>
{/if}
{/foreach}
{/foreach}
</table>
| {
"pile_set_name": "Github"
} |
import React, { createContext, useState } from 'react'
import t from 'prop-types'
import uuidv4 from 'uuid/v4'
import firebase, { db } from 'services/firebase'
import { useAuth } from 'hooks'
const OrderContext = createContext()
function OrderProvider ({ children }) {
const [pizzas, addPizza] = useState([])
const [orderInProgress, setOrderInProgress] = useState(false)
const [phone, addPhone] = useState('')
const [address, addAddress] = useState({})
const { userInfo } = useAuth()
function addPizzaToOrder (pizza) {
if (orderInProgress) {
return addPizza((pizzas) => pizzas.concat(newPizza(pizza)))
}
setOrderInProgress(true)
addPizza([newPizza(pizza)])
}
function newPizza (pizza) {
return {
id: uuidv4(),
...pizza
}
}
function removePizzaFromOrder (id) {
console.log('removePizzaFromOrder:', id)
addPizza((pizzas) => pizzas.filter(p => p.id !== id))
}
async function sendOrder () {
console.log('send order')
try {
await db.collection('orders').add({
userId: userInfo.user.uid,
createdAt: firebase.firestore.FieldValue.serverTimestamp(),
address,
phone,
pizzas: pizzas.map(pizza => ({
size: pizza.pizzaSize,
flavours: pizza.pizzaFlavours,
quantity: pizza.quantity
}))
})
} catch (e) {
console.log('erro ao salvar pedido:', e)
}
setOrderInProgress(false)
}
return (
<OrderContext.Provider value={{
order: {
pizzas,
address,
phone
},
addPizzaToOrder,
removePizzaFromOrder,
addAddress,
addPhone,
sendOrder
}}>
{children}
</OrderContext.Provider>
)
}
OrderProvider.propTypes = {
children: t.node.isRequired
}
export { OrderProvider, OrderContext }
| {
"pile_set_name": "Github"
} |
/* SPDX-License-Identifier: GPL-2.0+ */
/*
* Copyright 2017 Icenowy Zheng <[email protected]>
*
*/
#ifndef _CCU_SUNIV_F1C100S_H_
#define _CCU_SUNIV_F1C100S_H_
#include <dt-bindings/clock/suniv-ccu-f1c100s.h>
#include <dt-bindings/reset/suniv-ccu-f1c100s.h>
#define CLK_PLL_CPU 0
#define CLK_PLL_AUDIO_BASE 1
#define CLK_PLL_AUDIO 2
#define CLK_PLL_AUDIO_2X 3
#define CLK_PLL_AUDIO_4X 4
#define CLK_PLL_AUDIO_8X 5
#define CLK_PLL_VIDEO 6
#define CLK_PLL_VIDEO_2X 7
#define CLK_PLL_VE 8
#define CLK_PLL_DDR0 9
#define CLK_PLL_PERIPH 10
/* CPU clock is exported */
#define CLK_AHB 12
#define CLK_APB 13
/* All bus gates, DRAM gates and mod clocks are exported */
#define CLK_NUMBER (CLK_AVS + 1)
#endif /* _CCU_SUNIV_F1C100S_H_ */
| {
"pile_set_name": "Github"
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0) on Wed Apr 05 01:58:42 EDT 2017 -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>Uses of Class org.owasp.appsensor.rpc.thrift.generated.AppSensorApi.AsyncClient.addEvent_call (appsensor-parent 2.3.2 API)</title>
<meta name="date" content="2017-04-05">
<link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.owasp.appsensor.rpc.thrift.generated.AppSensorApi.AsyncClient.addEvent_call (appsensor-parent 2.3.2 API)";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<div class="header">
<h2 title="Uses of Class org.owasp.appsensor.rpc.thrift.generated.AppSensorApi.AsyncClient.addEvent_call" class="title">Uses of Class<br>org.owasp.appsensor.rpc.thrift.generated.AppSensorApi.AsyncClient.addEvent_call</h2>
</div>
<div class="classUseContainer">
<ul class="blockList">
<li class="blockList">
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
<caption><span>Packages that use <a href="../../../../../../../org/owasp/appsensor/rpc/thrift/generated/AppSensorApi.AsyncClient.addEvent_call.html" title="class in org.owasp.appsensor.rpc.thrift.generated">AppSensorApi.AsyncClient.addEvent_call</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Package</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="#org.owasp.appsensor.rpc.thrift.generated">org.owasp.appsensor.rpc.thrift.generated</a></td>
</tr>
</tbody>
</table>
</li>
<li class="blockList">
<ul class="blockList">
<li class="blockList"><a name="org.owasp.appsensor.rpc.thrift.generated">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../../../org/owasp/appsensor/rpc/thrift/generated/AppSensorApi.AsyncClient.addEvent_call.html" title="class in org.owasp.appsensor.rpc.thrift.generated">AppSensorApi.AsyncClient.addEvent_call</a> in <a href="../../../../../../../org/owasp/appsensor/rpc/thrift/generated/package-summary.html">org.owasp.appsensor.rpc.thrift.generated</a></h3>
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Method parameters in <a href="../../../../../../../org/owasp/appsensor/rpc/thrift/generated/package-summary.html">org.owasp.appsensor.rpc.thrift.generated</a> with type arguments of type <a href="../../../../../../../org/owasp/appsensor/rpc/thrift/generated/AppSensorApi.AsyncClient.addEvent_call.html" title="class in org.owasp.appsensor.rpc.thrift.generated">AppSensorApi.AsyncClient.addEvent_call</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="typeNameLabel">AppSensorApi.AsyncIface.</span><code><span class="memberNameLink"><a href="../../../../../../../org/owasp/appsensor/rpc/thrift/generated/AppSensorApi.AsyncIface.html#addEvent-org.owasp.appsensor.rpc.thrift.generated.Event-java.lang.String-org.apache.thrift.async.AsyncMethodCallback-">addEvent</a></span>(<a href="../../../../../../../org/owasp/appsensor/rpc/thrift/generated/Event.html" title="class in org.owasp.appsensor.rpc.thrift.generated">Event</a> event,
<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> clientApplicationName,
org.apache.thrift.async.AsyncMethodCallback<<a href="../../../../../../../org/owasp/appsensor/rpc/thrift/generated/AppSensorApi.AsyncClient.addEvent_call.html" title="class in org.owasp.appsensor.rpc.thrift.generated">AppSensorApi.AsyncClient.addEvent_call</a>> resultHandler)</code></td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="typeNameLabel">AppSensorApi.AsyncClient.</span><code><span class="memberNameLink"><a href="../../../../../../../org/owasp/appsensor/rpc/thrift/generated/AppSensorApi.AsyncClient.html#addEvent-org.owasp.appsensor.rpc.thrift.generated.Event-java.lang.String-org.apache.thrift.async.AsyncMethodCallback-">addEvent</a></span>(<a href="../../../../../../../org/owasp/appsensor/rpc/thrift/generated/Event.html" title="class in org.owasp.appsensor.rpc.thrift.generated">Event</a> event,
<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> clientApplicationName,
org.apache.thrift.async.AsyncMethodCallback<<a href="../../../../../../../org/owasp/appsensor/rpc/thrift/generated/AppSensorApi.AsyncClient.addEvent_call.html" title="class in org.owasp.appsensor.rpc.thrift.generated">AppSensorApi.AsyncClient.addEvent_call</a>> resultHandler)</code></td>
</tr>
</tbody>
</table>
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing constructors, and an explanation">
<caption><span>Constructor parameters in <a href="../../../../../../../org/owasp/appsensor/rpc/thrift/generated/package-summary.html">org.owasp.appsensor.rpc.thrift.generated</a> with type arguments of type <a href="../../../../../../../org/owasp/appsensor/rpc/thrift/generated/AppSensorApi.AsyncClient.addEvent_call.html" title="class in org.owasp.appsensor.rpc.thrift.generated">AppSensorApi.AsyncClient.addEvent_call</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Constructor and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/owasp/appsensor/rpc/thrift/generated/AppSensorApi.AsyncClient.addEvent_call.html#addEvent_call-org.owasp.appsensor.rpc.thrift.generated.Event-java.lang.String-org.apache.thrift.async.AsyncMethodCallback-org.apache.thrift.async.TAsyncClient-org.apache.thrift.protocol.TProtocolFactory-org.apache.thrift.transport.TNonblockingTransport-">addEvent_call</a></span>(<a href="../../../../../../../org/owasp/appsensor/rpc/thrift/generated/Event.html" title="class in org.owasp.appsensor.rpc.thrift.generated">Event</a> event,
<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> clientApplicationName,
org.apache.thrift.async.AsyncMethodCallback<<a href="../../../../../../../org/owasp/appsensor/rpc/thrift/generated/AppSensorApi.AsyncClient.addEvent_call.html" title="class in org.owasp.appsensor.rpc.thrift.generated">AppSensorApi.AsyncClient.addEvent_call</a>> resultHandler,
org.apache.thrift.async.TAsyncClient client,
org.apache.thrift.protocol.TProtocolFactory protocolFactory,
org.apache.thrift.transport.TNonblockingTransport transport)</code></td>
</tr>
</tbody>
</table>
</li>
</ul>
</li>
</ul>
</div>
<p class="legalCopy"><small>Copyright © 2017 <a href="http://www.owasp.org">The Open Web Application Security Project (OWASP)</a>. All rights reserved.</small></p>
</body>
</html>
| {
"pile_set_name": "Github"
} |
//------------------------------------------------------------------------------
//
// Copyright (c) Microsoft Corporation.
// All rights reserved.
//
// This code is licensed under the MIT License.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files(the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions :
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
//------------------------------------------------------------------------------
namespace Microsoft.IdentityModel.JsonWebTokens
{
/// <summary>
/// Constants for Json Web tokens.
/// </summary>
public static class JsonClaimValueTypes
{
/// <summary>
/// A URI that represents the JSON XML data type.
/// </summary>
/// <remarks>When mapping json to .Net Claim(s), if the value was not a string (or an enumeration of strings), the ClaimValue will serialized using the current JSON serializer, a property will be added with the .Net type and the ClaimTypeValue will be set to 'JsonClaimValueType'.</remarks>
public const string Json = "JSON";
/// <summary>
/// A URI that represents the JSON array XML data type.
/// </summary>
/// <remarks>When mapping json to .Net Claim(s), if the value was not a string (or an enumeration of strings), the ClaimValue will serialized using the current JSON serializer, a property will be added with the .Net type and the ClaimTypeValue will be set to 'JsonClaimValueType'.</remarks>
public const string JsonArray = "JSON_ARRAY";
/// <summary>
/// A URI that represents the JSON null data type
/// </summary>
/// <remarks>When mapping json to .Net Claim(s), we use empty string to represent the claim value and set the ClaimValueType to JsonNull</remarks>
public const string JsonNull = "JSON_NULL";
}
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" standalone="no" ?>
<!DOCTYPE pov SYSTEM "/usr/share/cgc-docs/replay.dtd">
<pov>
<cbid>CROMU_00033</cbid>
<replay>
<write echo="ascii"><data>NDIR\x07/GzWnaO</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>NDIR\x0c/GzWnaO/jpSs</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>SEND\x0a/ut1yb9PKW\x4d\x00ffpegYbXJaCnDg0IxjirTlx0qzuraQPq5wB6VqYlWW3cfmyg1cBGICdeBLX84wTRPvVqbgwY885CA</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /ut1yb9PKW\n</data></match></read>
<write echo="ascii"><data>SEND\x0e/GzWnaO/cCHrey\x1d\x00gSnca8eDD6cSA0HPNvQQJvz2KkoLi</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/cCHrey\n</data></match></read>
<write echo="ascii"><data>SEND\x05/GLBv\x42\x00qbQwZvGYr29CFKOTdY5QLGhJxu0BEa4gqiKpSfhGWMvRpTHwCjt3wnw2Ir7LS0gRrb</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GLBv\n</data></match></read>
<write echo="ascii"><data>NDIR\x11/GzWnaO/MmYcrYKWm</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>NDIR\x13/GzWnaO/jpSs/G1IQN0</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>NDIR\x1a/GzWnaO/MmYcrYKWm/7iNjbi86</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>SEND\x06/zEIY4\x17\x00va1kbfnEfEQIIs4IRTNsSkb</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /zEIY4\n</data></match></read>
<write echo="ascii"><data>RECV\x0a/ut1yb9PKW</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[DATA] ffpegYbXJaCnDg0IxjirTlx0qzuraQPq5wB6VqYlWW3cfmyg1cBGICdeBLX84wTRPvVqbgwY885CA\n</data></match></read>
<write echo="ascii"><data>SEND\x1b/GzWnaO/jpSs/G1IQN0/WWytqs0\x3f\x00qIG2XIZx6wKc6cajIzpAD5L8W4rl0ubL1IRTTOMceef4v0rDUbnPbwIciA9y7ha</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/G1IQN0/WWytqs0\n</data></match></read>
<write echo="ascii"><data>RECV\x05/GLBv</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[DATA] qbQwZvGYr29CFKOTdY5QLGhJxu0BEa4gqiKpSfhGWMvRpTHwCjt3wnw2Ir7LS0gRrb\n</data></match></read>
<write echo="ascii"><data>SEND\x17/GzWnaO/jpSs/lgF6KH1nIa\x4b\x00FsptzzvspZmqmxX35XHQ7kZ9MLyoU8y4OJPc777pRY4g8KyXB1b7zWzrXdhe0odqN6EflEaNUJO</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/lgF6KH1nIa\n</data></match></read>
<write echo="ascii"><data>SEND\x06/88mJ9\x3a\x00XhLMrrnnBXCF7HMrFTDaLL9RyXqG4VXZGjeID5w1n0wyN5M3ioOapAcrpO</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /88mJ9\n</data></match></read>
<write echo="ascii"><data>RECV\x06/zEIY4</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[DATA] va1kbfnEfEQIIs4IRTNsSkb\n</data></match></read>
<write echo="ascii"><data>SEND\x1b/GzWnaO/jpSs/G1IQN0/97HIRga\x28\x00LCiiMg2C3Tm8IpD11bMp2fb9dfiwpwp5DZsy6e0r</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/G1IQN0/97HIRga\n</data></match></read>
<write echo="ascii"><data>SEND\x18/GzWnaO/jpSs/G1IQN0/ClQ7\x2b\x00lRjRHuoPmlxd6SHfyTS5OyLvr3NXYP91nSauwWWKFGl</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/G1IQN0/ClQ7\n</data></match></read>
<write echo="ascii"><data>NDIR\x19/GzWnaO/MmYcrYKWm/bQpVKkc</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>RECV\x1b/GzWnaO/jpSs/G1IQN0/WWytqs0</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[DATA] qIG2XIZx6wKc6cajIzpAD5L8W4rl0ubL1IRTTOMceef4v0rDUbnPbwIciA9y7ha\n</data></match></read>
<write echo="ascii"><data>NDIR\x1d/GzWnaO/jpSs/G1IQN0/uFeGAmqjg</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>RECV\x05/GLBv</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[DATA] qbQwZvGYr29CFKOTdY5QLGhJxu0BEa4gqiKpSfhGWMvRpTHwCjt3wnw2Ir7LS0gRrb\n</data></match></read>
<write echo="ascii"><data>SEND\x06/ZZFpj\x4e\x00gcwARlvJC1rkXD41Arj4K9uCyngFGXIdz05jl6ySjGg2bbzrp11DO1B6e8H8bABEMU7qFtg3RUUMwX</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /ZZFpj\n</data></match></read>
<write echo="ascii"><data>NDIR\x13/GzWnaO/jpSs/HXeXzJ</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>SEND\x18/GzWnaO/jpSs/G1IQN0/MPHq\x32\x00UKVTjH6IWOsAB7ajBkJe2Lp6lu52jBQx0ZkVOiF2PVAbswoqg3</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/G1IQN0/MPHq\n</data></match></read>
<write echo="ascii"><data>NDIR\x15/GzWnaO/jpSs/H3fGbn0D</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>NDIR\x12/GzWnaO/ZtpzesLfLx</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>SEND\x17/GzWnaO/ZtpzesLfLx/N8iV\x45\x00Sh63B42lcqOSoYea2er0NIPvEQDrUX9HBwdNWJE1WncQbwqy89oU6ocdo2cY95P5d0r5l</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/ZtpzesLfLx/N8iV\n</data></match></read>
<write echo="ascii"><data>SEND\x25/GzWnaO/jpSs/G1IQN0/uFeGAmqjg/JwS577x\x57\x00ze5LHoAjgeUdScEwLMzQTsWtJ21KxB7BxkvERzW5hnmSfq37BeAiWffMnIFtJlt0JrbRYl7lr2G21NGng5z5h0r</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/G1IQN0/uFeGAmqjg/JwS577x\n</data></match></read>
<write echo="ascii"><data>SEND\x0c/GzWnaO/LgE3\x30\x00RBYd3WbCHvWQHfTXkogRRtbd1wD1LmuANDtg6hUr5pPTg3ek</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/LgE3\n</data></match></read>
<write echo="ascii"><data>NDIR\x1f/GzWnaO/MmYcrYKWm/bQpVKkc/qb6S9</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>SEND\x07/FkBC6P\x49\x00jlProNnUiHtLMDn376VnV24u0xutnaSBc9Qz612QlmJCsjrv6KUD82sUPzypEKs4XP3JnHR8P</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /FkBC6P\n</data></match></read>
<write echo="ascii"><data>NDIR\x21/GzWnaO/MmYcrYKWm/7iNjbi86/bU162t</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>NDIR\x25/GzWnaO/MmYcrYKWm/bQpVKkc/qb6S9/IOgK5</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>NDIR\x23/GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>NDIR\x28/GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>SEND\x1a/GzWnaO/jpSs/H3fGbn0D/oEkN\x2a\x00R9HHEmE8mwEr6Nj1dRV8QNNaPILp8rUgU3xDZ7MDne</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/H3fGbn0D/oEkN\n</data></match></read>
<write echo="ascii"><data>NDIR\x19/GzWnaO/jpSs/HXeXzJ/zrWm0</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>RECV\x05/GLBv</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[DATA] qbQwZvGYr29CFKOTdY5QLGhJxu0BEa4gqiKpSfhGWMvRpTHwCjt3wnw2Ir7LS0gRrb\n</data></match></read>
<write echo="ascii"><data>SEND\x22/GzWnaO/jpSs/G1IQN0/uFeGAmqjg/Ay7r\x19\x00wtyMQweE8GjCOmeFAZXwZQRxB</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/G1IQN0/uFeGAmqjg/Ay7r\n</data></match></read>
<write echo="ascii"><data>SEND\x24/GzWnaO/MmYcrYKWm/bQpVKkc/qb6S9/jxL2\x57\x00sQqwQ4Ye8uFhZJl6PL37ZDbSQBqu6e5f8ulKy4K1vv81H3flBBNpPHG5X4dnBU3uWeN13Gf0duqXbub5Sgf3i61</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/MmYcrYKWm/bQpVKkc/qb6S9/jxL2\n</data></match></read>
<write echo="ascii"><data>SEND\x1a/GzWnaO/jpSs/H3fGbn0D/x6pV\x15\x002O8oGlxxzSw5b6EJjp0ru</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/H3fGbn0D/x6pV\n</data></match></read>
<write echo="ascii"><data>SEND\x33/GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/yuxisXkN94\x21\x00XMb5rNqgSsCRrtRanocMi26klcI8hAUCl</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/yuxisXkN94\n</data></match></read>
<write echo="ascii"><data>SEND\x19/GzWnaO/MmYcrYKWm/pbCydr8\x53\x00v8tlzoNI4JC7sFz5Ftzn0XV5sWOiWqPTtoC1XJ08CMgpK48cyj6gaz8QCb0F4yYk6Kd3xkPC43w8AFJp9F6</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/MmYcrYKWm/pbCydr8\n</data></match></read>
<write echo="ascii"><data>SEND\x27/GzWnaO/MmYcrYKWm/7iNjbi86/bU162t/GHrEo\x2b\x00ODFTLHgvnU5IWAixXg6QpwNMeklbCyEfGqsVLTsJYba</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/MmYcrYKWm/7iNjbi86/bU162t/GHrEo\n</data></match></read>
<write echo="ascii"><data>RECV\x05/GLBv</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[DATA] qbQwZvGYr29CFKOTdY5QLGhJxu0BEa4gqiKpSfhGWMvRpTHwCjt3wnw2Ir7LS0gRrb\n</data></match></read>
<write echo="ascii"><data>SEND\x2e/GzWnaO/MmYcrYKWm/bQpVKkc/qb6S9/IOgK5/dEknRWPp\x5b\x00znmVymO2QGYS3I3nkD8YWQGnX765NCdl4dXTATRsegPRZhmEvpmezpyOblU1eTFc7rC1ASDfOcWL103SfNdBchtaLX7</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/MmYcrYKWm/bQpVKkc/qb6S9/IOgK5/dEknRWPp\n</data></match></read>
<write echo="ascii"><data>SEND\x26/GzWnaO/MmYcrYKWm/7iNjbi86/bU162t/r8Pz\x30\x000uHXArx2kGm2JmpjfEx8e2IyzJnYhFfLOaLkU0BUzcnNFr7p</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/MmYcrYKWm/7iNjbi86/bU162t/r8Pz\n</data></match></read>
<write echo="ascii"><data>NDIR\x0a/BgkplRqMX</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>NDIR\x20/GzWnaO/MmYcrYKWm/7iNjbi86/4c0Ol</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>RECV\x05/GLBv</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[DATA] qbQwZvGYr29CFKOTdY5QLGhJxu0BEa4gqiKpSfhGWMvRpTHwCjt3wnw2Ir7LS0gRrb\n</data></match></read>
<write echo="ascii"><data>SEND\x1e/GzWnaO/jpSs/HXeXzJ/zrWm0/F214\x5e\x00bqXNC5vVZSuPPA7zaC4PQV4JY9CN2ZaHbL0lEfWSOfQwjW5UXuDsoXy4eiabeUc96Xi7IHwCtYS3u1boL98GhjxZCF93hE</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/HXeXzJ/zrWm0/F214\n</data></match></read>
<write echo="ascii"><data>SEND\x2b/GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/sQxhe2D\x56\x00g33PMHweYZ2zcmT3IGiV4gw6pRgz94nbXTgsGkTTELPlvHnkfiYm07huzlSVHjs4G8lStPBzQKPOU7bV7R2EkT</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/sQxhe2D\n</data></match></read>
<write echo="ascii"><data>SEND\x1f/GzWnaO/MmYcrYKWm/bQpVKkc/3xMtJ\x19\x00PJGQLjxJBCZuBxQsTEuFV0gpc</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/MmYcrYKWm/bQpVKkc/3xMtJ\n</data></match></read>
<write echo="ascii"><data>SEND\x23/GzWnaO/MmYcrYKWm/bQpVKkc/laI0XCHuL\x50\x00oaUfslt9myEIvfIFeaDBIUba6AXC5jzPkzTbFGG6ytoFSuDSaFTbunuMW45qcoFpVUOIikbuYMtxaIu2</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/MmYcrYKWm/bQpVKkc/laI0XCHuL\n</data></match></read>
<write echo="ascii"><data>RECV\x06/ZZFpj</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[DATA] gcwARlvJC1rkXD41Arj4K9uCyngFGXIdz05jl6ySjGg2bbzrp11DO1B6e8H8bABEMU7qFtg3RUUMwX\n</data></match></read>
<write echo="ascii"><data>NDIR\x2a/GzWnaO/MmYcrYKWm/7iNjbi86/bU162t/kP0VvqVL</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>NDIR\x1c/GzWnaO/jpSs/H3fGbn0D/HXTYU9</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>SEND\x21/GzWnaO/MmYcrYKWm/7iNjbi86/FqSYlI\x46\x00RfSUZQYs1zkxB9YmVMixvH4QEp4oE1A2qJW6xuxixM5exhCU07fNvw0wyFowryamxvWomp</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/MmYcrYKWm/7iNjbi86/FqSYlI\n</data></match></read>
<write echo="ascii"><data>NDIR\x1a/GzWnaO/jpSs/H3fGbn0D/nrh6</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>SEND\x25/GzWnaO/jpSs/H3fGbn0D/nrh6/DFBCQNOnfI\x53\x00IB1GCd2T6mV8OmS7r4ihhMSH30CHYeaqUlN0DeNIcRseZRQYIz17vschAUQqZhaPhY19Pn8tTW16060z67j</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/H3fGbn0D/nrh6/DFBCQNOnfI\n</data></match></read>
<write echo="ascii"><data>RECV\x1a/GzWnaO/jpSs/H3fGbn0D/x6pV</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[DATA] 2O8oGlxxzSw5b6EJjp0ru\n</data></match></read>
<write echo="ascii"><data>RECV\x25/GzWnaO/jpSs/G1IQN0/uFeGAmqjg/JwS577x</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[DATA] ze5LHoAjgeUdScEwLMzQTsWtJ21KxB7BxkvERzW5hnmSfq37BeAiWffMnIFtJlt0JrbRYl7lr2G21NGng5z5h0r\n</data></match></read>
<write echo="ascii"><data>RECV\x22/GzWnaO/jpSs/G1IQN0/uFeGAmqjg/Ay7r</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[DATA] wtyMQweE8GjCOmeFAZXwZQRxB\n</data></match></read>
<write echo="ascii"><data>NDIR\x2a/GzWnaO/MmYcrYKWm/7iNjbi86/4c0Ol/6NGmOmfau</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>NDIR\x21/GzWnaO/MmYcrYKWm/bQpVKkc/nvSMWYj</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>SEND\x22/GzWnaO/jpSs/H3fGbn0D/HXTYU9/3mZ3d\x58\x008SoF51isGL5ABrvMnnJJ981cUYt9gtrJUZ73o468Ql0tXpDZK0OkBpeSlEmSIRUxmG2M4tB0kRjPiGBAhIoDZgzS</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/H3fGbn0D/HXTYU9/3mZ3d\n</data></match></read>
<write echo="ascii"><data>SEND\x24/GzWnaO/jpSs/H3fGbn0D/nrh6/TdbdSMqcV\x4e\x00Xzon9MXmpqZloL8Lk4CrLDZTEEmyHoP5ELOCIyqqtJEbi4KiwwQWzTDPpYQaYFg6gELlq1ZAPu1eWU</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/H3fGbn0D/nrh6/TdbdSMqcV\n</data></match></read>
<write echo="ascii"><data>NDIR\x32/GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/XLPGNmi5z</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>NDIR\x28/GzWnaO/MmYcrYKWm/bQpVKkc/qb6S9/SoV2DGjb</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>NDIR\x16/GzWnaO/jpSs/20pY6qgFU</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>SEND\x1a/GzWnaO/MmYcrYKWm/nJmF9FOq\x19\x004NfAhDMlHLKfsPzE44l9qzsvn</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/MmYcrYKWm/nJmF9FOq\n</data></match></read>
<write echo="ascii"><data>NDIR\x1b/GzWnaO/jpSs/G1IQN0/sIR8cvp</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>NDIR\x29/GzWnaO/MmYcrYKWm/bQpVKkc/qb6S9/sfeUCsd4y</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>NDIR\x27/GzWnaO/jpSs/H3fGbn0D/HXTYU9/ZeXB7lluUN</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>SEND\x32/GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/oOKwsdl2D\x2b\x00N2yYXzno7POmBMCsH1O56TBWvrbnYv85xl5QEpAVW6Y</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/oOKwsdl2D\n</data></match></read>
<write echo="ascii"><data>SEND\x1d/GzWnaO/jpSs/G1IQN0/myQ3kA501\x63\x00QSvgxgEiXpJLsotgjPNC4lAPExItaM78fcAFk8ikE7hpg1IHtZbYSXniWuGXmSfN1YIQ6PY7oAqJnVQzhlx4NsDXokCuGBHUu72</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/G1IQN0/myQ3kA501\n</data></match></read>
<write echo="ascii"><data>RECV\x25/GzWnaO/jpSs/G1IQN0/uFeGAmqjg/JwS577x</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[DATA] ze5LHoAjgeUdScEwLMzQTsWtJ21KxB7BxkvERzW5hnmSfq37BeAiWffMnIFtJlt0JrbRYl7lr2G21NGng5z5h0r\n</data></match></read>
<write echo="ascii"><data>NDIR\x22/GzWnaO/MmYcrYKWm/7iNjbi86/Xvvl4bw</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>NDIR\x38/GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/XLPGNmi5z/sBiOU</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>REPO\x21/GzWnaO/MmYcrYKWm/7iNjbi86/FqSYlI</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] FqSYlI removed\n</data></match></read>
<write echo="ascii"><data>SEND\x43/GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/XLPGNmi5z/sBiOU/gaykFF4AGP\x50\x00Ww8VAunIOlwI6XVhobTDuAXcef5lt6Ppi4pLGCqvtOceFEY5ryrmaUMABHmCYi0N7vlhGAVkMFVWmxwx</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/XLPGNmi5z/sBiOU/gaykFF4AGP\n</data></match></read>
<write echo="ascii"><data>NDIR\x2c/GzWnaO/MmYcrYKWm/bQpVKkc/qb6S9/IOgK5/04Abdf</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>NDIR\x43/GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/XLPGNmi5z/sBiOU/86NOKEbYWB</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>NDIR\x4d/GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/XLPGNmi5z/sBiOU/86NOKEbYWB/e2BwozxUk</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>REPO\x17/GzWnaO/jpSs/lgF6KH1nIa</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] lgF6KH1nIa removed\n</data></match></read>
<write echo="ascii"><data>SEND\x3c/GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/XLPGNmi5z/ziKSN5J2l\x59\x00OZTV3zafUAOdYgxIJW7jHWxkzqjzFvvUEBtGOFnMY0aV9nZZbdUfVzBl6Q1TX6p4JxsZM3zuBuHZGyUyLB6n61pTv</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/XLPGNmi5z/ziKSN5J2l\n</data></match></read>
<write echo="ascii"><data>SEND\x21/GzWnaO/jpSs/H3fGbn0D/HXTYU9/e8fP\x61\x00p30um8bdT1QSQAQGOhQE6zVc8tCMxRcVXvWVPmwp3zb0aZUHGX0yxzdjXk025BawX1fi8yZYYAOMFXaLg8suCu9hrSMQs0Eai</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/H3fGbn0D/HXTYU9/e8fP\n</data></match></read>
<write echo="ascii"><data>RECV\x1a/GzWnaO/jpSs/H3fGbn0D/x6pV</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[DATA] 2O8oGlxxzSw5b6EJjp0ru\n</data></match></read>
<write echo="ascii"><data>SEND\x1e/GzWnaO/jpSs/20pY6qgFU/65PcjOr\x4e\x00qWJTfi032JZIan0UFWOM8oRzOBYAjjyvi1Lv8Mp5HbVfmmvyHspbC5ChXdtcXFuttHyBshRXuKEiee</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/20pY6qgFU/65PcjOr\n</data></match></read>
<write echo="ascii"><data>SEND\x2f/GzWnaO/jpSs/H3fGbn0D/HXTYU9/ZeXB7lluUN/CfvV699\x4a\x00WkrSX3o2JpW1jSqpc6Hr32nyBD4zVLn1OnvgwmXCvt0o3UbkB9MNmYYVOp3vZIaNAGjYuoVWuz</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/H3fGbn0D/HXTYU9/ZeXB7lluUN/CfvV699\n</data></match></read>
<write echo="ascii"><data>REPO\x1a/GzWnaO/jpSs/H3fGbn0D/x6pV</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] x6pV removed\n</data></match></read>
<write echo="ascii"><data>REPO\x2b/GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/sQxhe2D</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] sQxhe2D removed\n</data></match></read>
<write echo="ascii"><data>REPO\x0c/GzWnaO/LgE3</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] LgE3 removed\n</data></match></read>
<write echo="ascii"><data>SEND\x22/GzWnaO/jpSs/G1IQN0/uFeGAmqjg/CncY\x1c\x00bkagv7TbBtwROkf0wK4tqL0ElAsW</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/G1IQN0/uFeGAmqjg/CncY\n</data></match></read>
<write echo="ascii"><data>REPO\x1a/GzWnaO/jpSs/H3fGbn0D/oEkN</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] oEkN removed\n</data></match></read>
<write echo="ascii"><data>SEND\x30/GzWnaO/jpSs/H3fGbn0D/HXTYU9/ZeXB7lluUN/7OlHXlZu\x23\x009tmx0VdAPgzFiv5EyvGHJGmtdIX0I1sFlPL</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/H3fGbn0D/HXTYU9/ZeXB7lluUN/7OlHXlZu\n</data></match></read>
<write echo="ascii"><data>REPO\x0e/GzWnaO/cCHrey</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] cCHrey removed\n</data></match></read>
<write echo="ascii"><data>SEND\x16/GzWnaO/MmYcrYKWm/MIN6\x1b\x00CfzP2vf9ydK1qU675GG4Eat26Mz</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/MmYcrYKWm/MIN6\n</data></match></read>
<write echo="ascii"><data>SEND\x37/GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/XLPGNmi5z/FLxT\x46\x00kLFkvcP6j8j7y3INIvNy1EdWzShdGoCxlDJQsREhBQ4RiswzOtWGcQQmLTZ4DmQj1PIN9v</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/XLPGNmi5z/FLxT\n</data></match></read>
<write echo="ascii"><data>SEND\x54/GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/XLPGNmi5z/sBiOU/86NOKEbYWB/e2BwozxUk/OMuBWf\x1e\x00uQIruqnwH5MdqKvaYUFkrbAAVCqL9o</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/XLPGNmi5z/sBiOU/86NOKEbYWB/e2BwozxUk/OMuBWf\n</data></match></read>
<write echo="ascii"><data>NDIR\x18/GzWnaO/MmYcrYKWm/EiPxBb</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>NDIR\x1f/GzWnaO/MmYcrYKWm/7iNjbi86/0b5z</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>SEND\x15/BgkplRqMX/Hq2X9XLqiL\x41\x00Tazj8v97FGj2YYCb5gLmbAz80FI2M6FGvSj4oNd4VVIsz6IJoU6EW4Tbi2asLsENO</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /BgkplRqMX/Hq2X9XLqiL\n</data></match></read>
<write echo="ascii"><data>NDIR\x19/GzWnaO/jpSs/G1IQN0/UT8EP</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>SEND\x22/GzWnaO/jpSs/G1IQN0/sIR8cvp/bVbJa9\x51\x00FiEK44tFr2tFiUJgyud3lFYeI6uJsJAEZtbZrfhayXTyIwSn5RgHV4updoevm1vpQTG7JNPv3I0wr9rLd</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/G1IQN0/sIR8cvp/bVbJa9\n</data></match></read>
<write echo="ascii"><data>SEND\x39/GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/XLPGNmi5z/aEt08Y\x62\x00o1RqcSmL5Vxc0MN8pfTxwaadvenmPkUwpszhwdWRBVUyHmrW6FmaKkq0cW2IqShmdveh8BhTDaOGQEzjlHq1z7MZM2aoNJu6rb</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/XLPGNmi5z/aEt08Y\n</data></match></read>
<write echo="ascii"><data>REPO\x33/GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/yuxisXkN94</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] yuxisXkN94 removed\n</data></match></read>
<write echo="ascii"><data>REPO\x22/GzWnaO/jpSs/G1IQN0/uFeGAmqjg/Ay7r</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Ay7r removed\n</data></match></read>
<write echo="ascii"><data>SEND\x0e/GzWnaO/nLdL8x\x59\x003YOSKQr4rgGdJDwaVptVK0M9pWpLrpCUaSQIGjQmjanCEafdsMvIZQfBuHo4cigcxs2JxunMKloOrqfQKcDWQ4nQo</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/nLdL8x\n</data></match></read>
<write echo="ascii"><data>NDIR\x28/GzWnaO/MmYcrYKWm/7iNjbi86/Xvvl4bw/3ESLs</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>REPO\x32/GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/oOKwsdl2D</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] oOKwsdl2D removed\n</data></match></read>
<write echo="ascii"><data>SEND\x17/GzWnaO/jpSs/bQW4qsv963\x4a\x004UZiV926V1XOI9HsvToRjb87FGa1QEXxpzcoxWBUa1AqYBq2nTSEU0nkEyi8pL4UL5R2lHzMZJ</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/bQW4qsv963\n</data></match></read>
<write echo="ascii"><data>SEND\x2c/GzWnaO/jpSs/H3fGbn0D/HXTYU9/ZeXB7lluUN/G7Qz\x5a\x00CJhcF9HszyNGgspo9IKAlVKOVu979Y5AmT7RAcQ9HjSKodXbftRaY4orc21I0sjzKEi66Yx3GboqbbBecsNjsS7aqT</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/H3fGbn0D/HXTYU9/ZeXB7lluUN/G7Qz\n</data></match></read>
<write echo="ascii"><data>NDIR\x1c/GzWnaO/ZtpzesLfLx/ZKecY8qI7</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>NDIR\x1f/GzWnaO/MmYcrYKWm/EiPxBb/Tzka8W</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>REPO\x1e/GzWnaO/jpSs/20pY6qgFU/65PcjOr</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] 65PcjOr removed\n</data></match></read>
<write echo="ascii"><data>NDIR\x27/GzWnaO/MmYcrYKWm/7iNjbi86/bU162t/wtAcA</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>NDIR\x35/GzWnaO/MmYcrYKWm/7iNjbi86/bU162t/kP0VvqVL/K4IR6oQwKQ</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>NDIR\x17/GzWnaO/jpSs/5yJSJyhth1</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>SEND\x22/GzWnaO/jpSs/G1IQN0/UT8EP/EytmSZ5F\x15\x00SvJ11TY7RpXHgBjNgRsF0</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/G1IQN0/UT8EP/EytmSZ5F\n</data></match></read>
<write echo="ascii"><data>SEND\x37/GzWnaO/MmYcrYKWm/bQpVKkc/qb6S9/IOgK5/04Abdf/3aSV7km3Ow\x3a\x00HoDZNsAS5QYzdeZT0sNq5qUkoe8Fj9JomnAmOccaAWp8mES95GJfLA9J96</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/MmYcrYKWm/bQpVKkc/qb6S9/IOgK5/04Abdf/3aSV7km3Ow\n</data></match></read>
<write echo="ascii"><data>SEND\x16/GzWnaO/jpSs/shIoO0E9B\x5d\x00iR5vIRSKUaMjgMqnp25EOQOQdW7qXyaYCeKYusSy513U3z8UwJ6ywHlHHhRs4MGrBPy3Cy5zOe9mtBl185ZXTtqzUgI7I</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/shIoO0E9B\n</data></match></read>
<write echo="ascii"><data>SEND\x30/GzWnaO/MmYcrYKWm/bQpVKkc/qb6S9/SoV2DGjb/BFXxyG9\x39\x00oqTblzfVVCcR5kOxI4xG2m2Vxw2bvb2AKnw2yyL2yNvufUjUqm4gdY33R</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/MmYcrYKWm/bQpVKkc/qb6S9/SoV2DGjb/BFXxyG9\n</data></match></read>
<write echo="ascii"><data>REPO\x54/GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/XLPGNmi5z/sBiOU/86NOKEbYWB/e2BwozxUk/OMuBWf</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] OMuBWf removed\n</data></match></read>
<write echo="ascii"><data>REPO\x0a/ut1yb9PKW</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] ut1yb9PKW removed\n</data></match></read>
<write echo="ascii"><data>NDIR\x28/GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/xY4d</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>SEND\x21/GzWnaO/jpSs/5yJSJyhth1/cIz0sOGis\x29\x00Az87MEV5CWWVxDmfa5zi9XU1H34CF2rCZAXl7e7E3</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/5yJSJyhth1/cIz0sOGis\n</data></match></read>
<write echo="ascii"><data>NDIR\x1b/GzWnaO/jpSs/G1IQN0/NDlYCvE</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>RECV\x22/GzWnaO/jpSs/G1IQN0/uFeGAmqjg/CncY</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[DATA] bkagv7TbBtwROkf0wK4tqL0ElAsW\n</data></match></read>
<write echo="ascii"><data>REPO\x30/GzWnaO/jpSs/H3fGbn0D/HXTYU9/ZeXB7lluUN/7OlHXlZu</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] 7OlHXlZu removed\n</data></match></read>
<write echo="ascii"><data>REPO\x19/GzWnaO/MmYcrYKWm/pbCydr8</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] pbCydr8 removed\n</data></match></read>
<write echo="ascii"><data>REPO\x37/GzWnaO/MmYcrYKWm/bQpVKkc/qb6S9/IOgK5/04Abdf/3aSV7km3Ow</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] 3aSV7km3Ow removed\n</data></match></read>
<write echo="ascii"><data>SEND\x22/GzWnaO/jpSs/G1IQN0/UT8EP/EJZFv8pw\x55\x00Jh1pUTMPmXoVLVNYNWOqlvbu91pP2Psk2ypPiqtPfaKTBbycnWCUbQVyUGjNQGmdbW8VFnWHhHd90Fz4WMLPY</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/jpSs/G1IQN0/UT8EP/EJZFv8pw\n</data></match></read>
<write echo="ascii"><data>NDIR\x1f/GzWnaO/jpSs/G1IQN0/UT8EP/MmKUw</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>REPO\x43/GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/XLPGNmi5z/sBiOU/gaykFF4AGP</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] gaykFF4AGP removed\n</data></match></read>
<write echo="ascii"><data>SEND\x4a/GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/XLPGNmi5z/sBiOU/86NOKEbYWB/80LB7k\x47\x00b6XoJEx8tMyym6uKNXVtjWGgwXXZoduwlv4JAn8qxFqv0bRZD61Oj98JyOwAowbP4Vv733S</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/XLPGNmi5z/sBiOU/86NOKEbYWB/80LB7k\n</data></match></read>
<write echo="ascii"><data>REPO\x24/GzWnaO/jpSs/H3fGbn0D/nrh6/TdbdSMqcV</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] TdbdSMqcV removed\n</data></match></read>
<write echo="ascii"><data>RECV\x2c/GzWnaO/jpSs/H3fGbn0D/HXTYU9/ZeXB7lluUN/G7Qz</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[DATA] CJhcF9HszyNGgspo9IKAlVKOVu979Y5AmT7RAcQ9HjSKodXbftRaY4orc21I0sjzKEi66Yx3GboqbbBecsNjsS7aqT\n</data></match></read>
<write echo="ascii"><data>NDIR\x18/GzWnaO/MmYcrYKWm/bpxJjz</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>NDIR\x49/GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/XLPGNmi5z/sBiOU/86NOKEbYWB/ylBX9</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Added new directory\n</data></match></read>
<write echo="ascii"><data>SEND\x37/GzWnaO/MmYcrYKWm/bQpVKkc/qb6S9/IOgK5/04Abdf/oMItdyEbST\x1e\x00a7T8MC7DJAzK5kzZEljYBY6eQ593yM</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/MmYcrYKWm/bQpVKkc/qb6S9/IOgK5/04Abdf/oMItdyEbST\n</data></match></read>
<write echo="ascii"><data>REPO\x1f/GzWnaO/MmYcrYKWm/bQpVKkc/3xMtJ</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] 3xMtJ removed\n</data></match></read>
<write echo="ascii"><data>SEND\x25/GzWnaO/MmYcrYKWm/7iNjbi86/0b5z/Aoumo\x2e\x00dYMNHTcysqzcBU9OsOXlowr7km0aEJwDHITOT74KvR05xf</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] File received: /GzWnaO/MmYcrYKWm/7iNjbi86/0b5z/Aoumo\n</data></match></read>
<write echo="ascii"><data>RECV\x39/GzWnaO/MmYcrYKWm/bQpVKkc/Qs7knaT0B/R4JT/XLPGNmi5z/aEt08Y</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[DATA] o1RqcSmL5Vxc0MN8pfTxwaadvenmPkUwpszhwdWRBVUyHmrW6FmaKkq0cW2IqShmdveh8BhTDaOGQEzjlHq1z7MZM2aoNJu6rb\n</data></match></read>
<write echo="ascii"><data>STOP</data></write>
<read echo="ascii"><delim>\n</delim><match><data>[INFO] Terminating\n</data></match></read>
</replay>
</pov>
| {
"pile_set_name": "Github"
} |
require('../modules/web.timers');
require('../modules/web.immediate');
require('../modules/web.dom.iterable');
module.exports = require('../modules/$.core'); | {
"pile_set_name": "Github"
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
<html>
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8" />
<link rel="shortcut icon" type="image/ico" href="http://www.datatables.net/media/images/favicon.ico" />
<title>DataTables example</title>
<style type="text/css" title="currentStyle">
@import "../../media/css/demo_page.css";
@import "../../media/css/demo_table.css";
</style>
<script type="text/javascript" language="javascript" src="../../media/js/jquery.js"></script>
<script type="text/javascript" language="javascript" src="../../media/js/jquery.dataTables.js"></script>
<script type="text/javascript" charset="utf-8">
$(document).ready(function() {
$('#example').dataTable( {
"sPaginationType": "full_numbers"
} );
} );
</script>
</head>
<body id="dt_example" class="example_alt_pagination">
<div id="container">
<div class="full_width big">
DataTables alternative pagination example
</div>
<h1>Preamble</h1>
<p>The page controls which are used by default in DataTables (forward and backward buttons only) are great for most situations, but there are cases where you may wish to customise the controls presented to the end user. This is made simple by DataTables through its extensible pagination mechanism. There are two types of pagination controls built into DataTables: <b>two_button</b> (default) and <b>full_numbers</b>. To switch between these two types, use the <b>sPaginationType</b> initialisation parameter. You can add additional types of pagination control by extending the <b>$.fn.dataTableExt.oPagination</b> object.</p>
<p>Note also that the number of pages which are shown with direct links (the 1, 2, 3...) can be changed by setting the variable <b>jQuery.fn.dataTableExt.oPagination.iFullNumbersShowPages</b> (default 5). Odd numbers are best to keep the display even.</p>
<p>The example below shows the <b>full_numbers</b> type of pagination, where 'first', 'previous', 'next' and 'last' buttons are presented, as well as the five pages around the current page.</p>
<h1>Live example</h1>
<div id="demo">
<table cellpadding="0" cellspacing="0" border="0" class="display" id="example">
<thead>
<tr>
<th>Rendering engine</th>
<th>Browser</th>
<th>Platform(s)</th>
<th>Engine version</th>
<th>CSS grade</th>
</tr>
</thead>
<tbody>
<tr class="gradeX">
<td>Trident</td>
<td>
Internet
Explorer
4.0
</td>
<td>Win 95+</td>
<td class="center">4</td>
<td class="center">X</td>
</tr>
<tr class="gradeC">
<td>Trident</td>
<td>Internet
Explorer 5.0</td>
<td>Win 95+</td>
<td class="center">5</td>
<td class="center">C</td>
</tr>
<tr class="gradeA">
<td>Trident</td>
<td>Internet
Explorer 5.5</td>
<td>Win 95+</td>
<td class="center">5.5</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Trident</td>
<td>Internet
Explorer 6</td>
<td>Win 98+</td>
<td class="center">6</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Trident</td>
<td>Internet Explorer 7</td>
<td>Win XP SP2+</td>
<td class="center">7</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Trident</td>
<td>AOL browser (AOL desktop)</td>
<td>Win XP</td>
<td class="center">6</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Gecko</td>
<td>Firefox 1.0</td>
<td>Win 98+ / OSX.2+</td>
<td class="center">1.7</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Gecko</td>
<td>Firefox 1.5</td>
<td>Win 98+ / OSX.2+</td>
<td class="center">1.8</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Gecko</td>
<td>Firefox 2.0</td>
<td>Win 98+ / OSX.2+</td>
<td class="center">1.8</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Gecko</td>
<td>Firefox 3.0</td>
<td>Win 2k+ / OSX.3+</td>
<td class="center">1.9</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Gecko</td>
<td>Camino 1.0</td>
<td>OSX.2+</td>
<td class="center">1.8</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Gecko</td>
<td>Camino 1.5</td>
<td>OSX.3+</td>
<td class="center">1.8</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Gecko</td>
<td>Netscape 7.2</td>
<td>Win 95+ / Mac OS 8.6-9.2</td>
<td class="center">1.7</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Gecko</td>
<td>Netscape Browser 8</td>
<td>Win 98SE+</td>
<td class="center">1.7</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Gecko</td>
<td>Netscape Navigator 9</td>
<td>Win 98+ / OSX.2+</td>
<td class="center">1.8</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Gecko</td>
<td>Mozilla 1.0</td>
<td>Win 95+ / OSX.1+</td>
<td class="center">1</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Gecko</td>
<td>Mozilla 1.1</td>
<td>Win 95+ / OSX.1+</td>
<td class="center">1.1</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Gecko</td>
<td>Mozilla 1.2</td>
<td>Win 95+ / OSX.1+</td>
<td class="center">1.2</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Gecko</td>
<td>Mozilla 1.3</td>
<td>Win 95+ / OSX.1+</td>
<td class="center">1.3</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Gecko</td>
<td>Mozilla 1.4</td>
<td>Win 95+ / OSX.1+</td>
<td class="center">1.4</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Gecko</td>
<td>Mozilla 1.5</td>
<td>Win 95+ / OSX.1+</td>
<td class="center">1.5</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Gecko</td>
<td>Mozilla 1.6</td>
<td>Win 95+ / OSX.1+</td>
<td class="center">1.6</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Gecko</td>
<td>Mozilla 1.7</td>
<td>Win 98+ / OSX.1+</td>
<td class="center">1.7</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Gecko</td>
<td>Mozilla 1.8</td>
<td>Win 98+ / OSX.1+</td>
<td class="center">1.8</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Gecko</td>
<td>Seamonkey 1.1</td>
<td>Win 98+ / OSX.2+</td>
<td class="center">1.8</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Gecko</td>
<td>Epiphany 2.20</td>
<td>Gnome</td>
<td class="center">1.8</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Webkit</td>
<td>Safari 1.2</td>
<td>OSX.3</td>
<td class="center">125.5</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Webkit</td>
<td>Safari 1.3</td>
<td>OSX.3</td>
<td class="center">312.8</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Webkit</td>
<td>Safari 2.0</td>
<td>OSX.4+</td>
<td class="center">419.3</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Webkit</td>
<td>Safari 3.0</td>
<td>OSX.4+</td>
<td class="center">522.1</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Webkit</td>
<td>OmniWeb 5.5</td>
<td>OSX.4+</td>
<td class="center">420</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Webkit</td>
<td>iPod Touch / iPhone</td>
<td>iPod</td>
<td class="center">420.1</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Webkit</td>
<td>S60</td>
<td>S60</td>
<td class="center">413</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Presto</td>
<td>Opera 7.0</td>
<td>Win 95+ / OSX.1+</td>
<td class="center">-</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Presto</td>
<td>Opera 7.5</td>
<td>Win 95+ / OSX.2+</td>
<td class="center">-</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Presto</td>
<td>Opera 8.0</td>
<td>Win 95+ / OSX.2+</td>
<td class="center">-</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Presto</td>
<td>Opera 8.5</td>
<td>Win 95+ / OSX.2+</td>
<td class="center">-</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Presto</td>
<td>Opera 9.0</td>
<td>Win 95+ / OSX.3+</td>
<td class="center">-</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Presto</td>
<td>Opera 9.2</td>
<td>Win 88+ / OSX.3+</td>
<td class="center">-</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Presto</td>
<td>Opera 9.5</td>
<td>Win 88+ / OSX.3+</td>
<td class="center">-</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Presto</td>
<td>Opera for Wii</td>
<td>Wii</td>
<td class="center">-</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Presto</td>
<td>Nokia N800</td>
<td>N800</td>
<td class="center">-</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>Presto</td>
<td>Nintendo DS browser</td>
<td>Nintendo DS</td>
<td class="center">8.5</td>
<td class="center">C/A<sup>1</sup></td>
</tr>
<tr class="gradeC">
<td>KHTML</td>
<td>Konqureror 3.1</td>
<td>KDE 3.1</td>
<td class="center">3.1</td>
<td class="center">C</td>
</tr>
<tr class="gradeA">
<td>KHTML</td>
<td>Konqureror 3.3</td>
<td>KDE 3.3</td>
<td class="center">3.3</td>
<td class="center">A</td>
</tr>
<tr class="gradeA">
<td>KHTML</td>
<td>Konqureror 3.5</td>
<td>KDE 3.5</td>
<td class="center">3.5</td>
<td class="center">A</td>
</tr>
<tr class="gradeX">
<td>Tasman</td>
<td>Internet Explorer 4.5</td>
<td>Mac OS 8-9</td>
<td class="center">-</td>
<td class="center">X</td>
</tr>
<tr class="gradeC">
<td>Tasman</td>
<td>Internet Explorer 5.1</td>
<td>Mac OS 7.6-9</td>
<td class="center">1</td>
<td class="center">C</td>
</tr>
<tr class="gradeC">
<td>Tasman</td>
<td>Internet Explorer 5.2</td>
<td>Mac OS 8-X</td>
<td class="center">1</td>
<td class="center">C</td>
</tr>
<tr class="gradeA">
<td>Misc</td>
<td>NetFront 3.1</td>
<td>Embedded devices</td>
<td class="center">-</td>
<td class="center">C</td>
</tr>
<tr class="gradeA">
<td>Misc</td>
<td>NetFront 3.4</td>
<td>Embedded devices</td>
<td class="center">-</td>
<td class="center">A</td>
</tr>
<tr class="gradeX">
<td>Misc</td>
<td>Dillo 0.8</td>
<td>Embedded devices</td>
<td class="center">-</td>
<td class="center">X</td>
</tr>
<tr class="gradeX">
<td>Misc</td>
<td>Links</td>
<td>Text only</td>
<td class="center">-</td>
<td class="center">X</td>
</tr>
<tr class="gradeX">
<td>Misc</td>
<td>Lynx</td>
<td>Text only</td>
<td class="center">-</td>
<td class="center">X</td>
</tr>
<tr class="gradeC">
<td>Misc</td>
<td>IE Mobile</td>
<td>Windows Mobile 6</td>
<td class="center">-</td>
<td class="center">C</td>
</tr>
<tr class="gradeC">
<td>Misc</td>
<td>PSP browser</td>
<td>PSP</td>
<td class="center">-</td>
<td class="center">C</td>
</tr>
<tr class="gradeU">
<td>Other browsers</td>
<td>All others</td>
<td>-</td>
<td class="center">-</td>
<td class="center">U</td>
</tr>
</tbody>
<tfoot>
<tr>
<th>Rendering engine</th>
<th>Browser</th>
<th>Platform(s)</th>
<th>Engine version</th>
<th>CSS grade</th>
</tr>
</tfoot>
</table>
</div>
<div class="spacer"></div>
<h1>Initialisation code</h1>
<pre class="brush: js;">$(document).ready(function() {
$('#example').dataTable( {
"sPaginationType": "full_numbers"
} );
} );</pre>
<style type="text/css">
@import "../examples_support/syntax/css/shCore.css";
</style>
<script type="text/javascript" language="javascript" src="../examples_support/syntax/js/shCore.js"></script>
<h1>Other examples</h1>
<div class="demo_links">
<h2>Basic initialisation</h2>
<ul>
<li><a href="../basic_init/zero_config.html">Zero configuration</a></li>
<li><a href="../basic_init/filter_only.html">Feature enablement</a></li>
<li><a href="../basic_init/table_sorting.html">Sorting data</a></li>
<li><a href="../basic_init/multi_col_sort.html">Multi-column sorting</a></li>
<li><a href="../basic_init/multiple_tables.html">Multiple tables</a></li>
<li><a href="../basic_init/hidden_columns.html">Hidden columns</a></li>
<li><a href="../basic_init/complex_header.html">Complex headers - grouping with colspan</a></li>
<li><a href="../basic_init/dom.html">DOM positioning</a></li>
<li><a href="../basic_init/flexible_width.html">Flexible table width</a></li>
<li><a href="../basic_init/state_save.html">State saving</a></li>
<li><a href="../basic_init/alt_pagination.html">Alternative pagination styles</a></li>
<li>Scrolling: <br>
<a href="../basic_init/scroll_x.html">Horizontal</a> /
<a href="../basic_init/scroll_y.html">Vertical</a> /
<a href="../basic_init/scroll_xy.html">Both</a> /
<a href="../basic_init/scroll_y_theme.html">Themed</a> /
<a href="../basic_init/scroll_y_infinite.html">Infinite</a>
</li>
<li><a href="../basic_init/language.html">Change language information (internationalisation)</a></li>
<li><a href="../basic_init/themes.html">ThemeRoller themes (Smoothness)</a></li>
</ul>
<h2>Advanced initialisation</h2>
<ul>
<li>Events: <br>
<a href="../advanced_init/events_live.html">Live events</a> /
<a href="../advanced_init/events_pre_init.html">Pre-init</a> /
<a href="../advanced_init/events_post_init.html">Post-init</a>
</li>
<li><a href="../advanced_init/column_render.html">Column rendering</a></li>
<li><a href="../advanced_init/html_sort.html">Sorting without HTML tags</a></li>
<li><a href="../advanced_init/dom_multiple_elements.html">Multiple table controls (sDom)</a></li>
<li><a href="../advanced_init/length_menu.html">Defining length menu options</a></li>
<li><a href="../advanced_init/complex_header.html">Complex headers and hidden columns</a></li>
<li><a href="../advanced_init/dom_toolbar.html">Custom toolbar (element) around table</a></li>
<li><a href="../advanced_init/highlight.html">Row highlighting with CSS</a></li>
<li><a href="../advanced_init/row_grouping.html">Row grouping</a></li>
<li><a href="../advanced_init/row_callback.html">Row callback</a></li>
<li><a href="../advanced_init/footer_callback.html">Footer callback</a></li>
<li><a href="../advanced_init/sorting_control.html">Control sorting direction of columns</a></li>
<li><a href="../advanced_init/language_file.html">Change language information from a file (internationalisation)</a></li>
<li><a href="../advanced_init/defaults.html">Setting defaults</a></li>
<li><a href="../advanced_init/localstorage.html">State saving with localStorage</a></li>
<li><a href="../advanced_init/dt_events.html">Custom events</a></li>
</ul>
<h2>API</h2>
<ul>
<li><a href="../api/add_row.html">Dynamically add a new row</a></li>
<li><a href="../api/multi_filter.html">Individual column filtering (using "input" elements)</a></li>
<li><a href="../api/multi_filter_select.html">Individual column filtering (using "select" elements)</a></li>
<li><a href="../api/highlight.html">Highlight rows and columns</a></li>
<li><a href="../api/row_details.html">Show and hide details about a particular record</a></li>
<li><a href="../api/select_row.html">User selectable rows (multiple rows)</a></li>
<li><a href="../api/select_single_row.html">User selectable rows (single row) and delete rows</a></li>
<li><a href="../api/editable.html">Editable rows (with jEditable)</a></li>
<li><a href="../api/form.html">Submit form with elements in table</a></li>
<li><a href="../api/counter_column.html">Index column (static number column)</a></li>
<li><a href="../api/show_hide.html">Show and hide columns dynamically</a></li>
<li><a href="../api/api_in_init.html">API function use in initialisation object (callback)</a></li>
<li><a href="../api/tabs_and_scrolling.html">DataTables scrolling and tabs</a></li>
<li><a href="../api/regex.html">Regular expression filtering</a></li>
</ul>
</div>
<div class="demo_links">
<h2>Data sources</h2>
<ul>
<li><a href="../data_sources/dom.html">DOM</a></li>
<li><a href="../data_sources/js_array.html">Javascript array</a></li>
<li><a href="../data_sources/ajax.html">Ajax source</a></li>
<li><a href="../data_sources/server_side.html">Server side processing</a></li>
</ul>
<h2>Server-side processing</h2>
<ul>
<li><a href="../server_side/server_side.html">Obtain server-side data</a></li>
<li><a href="../server_side/custom_vars.html">Add extra HTTP variables</a></li>
<li><a href="../server_side/post.html">Use HTTP POST</a></li>
<li><a href="../server_side/ids.html">Automatic addition of IDs and classes to rows</a></li>
<li><a href="../server_side/object_data.html">Reading table data from objects</a></li>
<li><a href="../server_side/row_details.html">Show and hide details about a particular record</a></li>
<li><a href="../server_side/select_rows.html">User selectable rows (multiple rows)</a></li>
<li><a href="../server_side/jsonp.html">JSONP for a cross domain data source</a></li>
<li><a href="../server_side/editable.html">jEditable integration with DataTables</a></li>
<li><a href="../server_side/defer_loading.html">Deferred loading of Ajax data</a></li>
<li><a href="../server_side/pipeline.html">Pipelining data (reduce Ajax calls for paging)</a></li>
</ul>
<h2>Ajax data source</h2>
<ul>
<li><a href="../ajax/ajax.html">Ajax sourced data (array of arrays)</a></li>
<li><a href="../ajax/objects.html">Ajax sourced data (array of objects)</a></li>
<li><a href="../ajax/defer_render.html">Deferred DOM creation for extra speed</a></li>
<li><a href="../ajax/null_data_source.html">Empty data source columns</a></li>
<li><a href="../ajax/custom_data_property.html">Use a data source other than aaData (the default)</a></li>
<li><a href="../ajax/objects_subarrays.html">Read column data from sub-arrays</a></li>
<li><a href="../ajax/deep.html">Read column data from deeply nested properties</a></li>
</ul>
<h2>Plug-ins</h2>
<ul>
<li><a href="../plug-ins/plugin_api.html">Add custom API functions</a></li>
<li><a href="../plug-ins/sorting_plugin.html">Sorting and automatic type detection</a></li>
<li><a href="../plug-ins/sorting_sType.html">Sorting without automatic type detection</a></li>
<li><a href="../plug-ins/paging_plugin.html">Custom pagination controls</a></li>
<li><a href="../plug-ins/range_filtering.html">Range filtering / custom filtering</a></li>
<li><a href="../plug-ins/dom_sort.html">Live DOM sorting</a></li>
<li><a href="../plug-ins/html_sort.html">Automatic HTML type detection</a></li>
</ul>
</div>
<div id="footer" class="clear" style="text-align:center;">
<p>
Please refer to the <a href="http://www.datatables.net/usage">DataTables documentation</a> for full information about its API properties and methods.<br>
Additionally, there are a wide range of <a href="http://www.datatables.net/extras">extras</a> and <a href="http://www.datatables.net/plug-ins">plug-ins</a> which extend the capabilities of DataTables.
</p>
<span style="font-size:10px;">
DataTables designed and created by <a href="http://www.sprymedia.co.uk">Allan Jardine</a> © 2007-2011<br>
DataTables is dual licensed under the <a href="http://www.datatables.net/license_gpl2">GPL v2 license</a> or a <a href="http://www.datatables.net/license_bsd">BSD (3-point) license</a>.
</span>
</div>
</div>
</body>
</html> | {
"pile_set_name": "Github"
} |
<?php
/**
* @link http://www.yiiframework.com/
* @copyright Copyright (c) 2008 Yii Software LLC
* @license http://www.yiiframework.com/license/
*/
namespace yii\helpers;
use Yii;
use yii\base\ErrorException;
use yii\base\InvalidConfigException;
use yii\base\InvalidParamException;
/**
* BaseFileHelper provides concrete implementation for [[FileHelper]].
*
* Do not use BaseFileHelper. Use [[FileHelper]] instead.
*
* @author Qiang Xue <[email protected]>
* @author Alex Makarov <[email protected]>
* @since 2.0
*/
class BaseFileHelper
{
const PATTERN_NODIR = 1;
const PATTERN_ENDSWITH = 4;
const PATTERN_MUSTBEDIR = 8;
const PATTERN_NEGATIVE = 16;
const PATTERN_CASE_INSENSITIVE = 32;
/**
* @var string the path (or alias) of a PHP file containing MIME type information.
*/
public static $mimeMagicFile = '@yii/helpers/mimeTypes.php';
/**
* Normalizes a file/directory path.
* The normalization does the following work:
*
* - Convert all directory separators into `DIRECTORY_SEPARATOR` (e.g. "\a/b\c" becomes "/a/b/c")
* - Remove trailing directory separators (e.g. "/a/b/c/" becomes "/a/b/c")
* - Turn multiple consecutive slashes into a single one (e.g. "/a///b/c" becomes "/a/b/c")
* - Remove ".." and "." based on their meanings (e.g. "/a/./b/../c" becomes "/a/c")
*
* @param string $path the file/directory path to be normalized
* @param string $ds the directory separator to be used in the normalized result. Defaults to `DIRECTORY_SEPARATOR`.
* @return string the normalized file/directory path
*/
public static function normalizePath($path, $ds = DIRECTORY_SEPARATOR)
{
$path = rtrim(strtr($path, '/\\', $ds . $ds), $ds);
if (strpos($ds . $path, "{$ds}.") === false && strpos($path, "{$ds}{$ds}") === false) {
return $path;
}
// the path may contain ".", ".." or double slashes, need to clean them up
$parts = [];
foreach (explode($ds, $path) as $part) {
if ($part === '..' && !empty($parts) && end($parts) !== '..') {
array_pop($parts);
} elseif ($part === '.' || $part === '' && !empty($parts)) {
continue;
} else {
$parts[] = $part;
}
}
$path = implode($ds, $parts);
return $path === '' ? '.' : $path;
}
/**
* Returns the localized version of a specified file.
*
* The searching is based on the specified language code. In particular,
* a file with the same name will be looked for under the subdirectory
* whose name is the same as the language code. For example, given the file "path/to/view.php"
* and language code "zh-CN", the localized file will be looked for as
* "path/to/zh-CN/view.php". If the file is not found, it will try a fallback with just a language code that is
* "zh" i.e. "path/to/zh/view.php". If it is not found as well the original file will be returned.
*
* If the target and the source language codes are the same,
* the original file will be returned.
*
* @param string $file the original file
* @param string $language the target language that the file should be localized to.
* If not set, the value of [[\yii\base\Application::language]] will be used.
* @param string $sourceLanguage the language that the original file is in.
* If not set, the value of [[\yii\base\Application::sourceLanguage]] will be used.
* @return string the matching localized file, or the original file if the localized version is not found.
* If the target and the source language codes are the same, the original file will be returned.
*/
public static function localize($file, $language = null, $sourceLanguage = null)
{
if ($language === null) {
$language = Yii::$app->language;
}
if ($sourceLanguage === null) {
$sourceLanguage = Yii::$app->sourceLanguage;
}
if ($language === $sourceLanguage) {
return $file;
}
$desiredFile = dirname($file) . DIRECTORY_SEPARATOR . $language . DIRECTORY_SEPARATOR . basename($file);
if (is_file($desiredFile)) {
return $desiredFile;
} else {
$language = substr($language, 0, 2);
if ($language === $sourceLanguage) {
return $file;
}
$desiredFile = dirname($file) . DIRECTORY_SEPARATOR . $language . DIRECTORY_SEPARATOR . basename($file);
return is_file($desiredFile) ? $desiredFile : $file;
}
}
/**
* Determines the MIME type of the specified file.
* This method will first try to determine the MIME type based on
* [finfo_open](http://php.net/manual/en/function.finfo-open.php). If the `fileinfo` extension is not installed,
* it will fall back to [[getMimeTypeByExtension()]] when `$checkExtension` is true.
* @param string $file the file name.
* @param string $magicFile name of the optional magic database file (or alias), usually something like `/path/to/magic.mime`.
* This will be passed as the second parameter to [finfo_open()](http://php.net/manual/en/function.finfo-open.php)
* when the `fileinfo` extension is installed. If the MIME type is being determined based via [[getMimeTypeByExtension()]]
* and this is null, it will use the file specified by [[mimeMagicFile]].
* @param boolean $checkExtension whether to use the file extension to determine the MIME type in case
* `finfo_open()` cannot determine it.
* @return string the MIME type (e.g. `text/plain`). Null is returned if the MIME type cannot be determined.
* @throws InvalidConfigException when the `fileinfo` PHP extension is not installed and `$checkExtension` is `false`.
*/
public static function getMimeType($file, $magicFile = null, $checkExtension = true)
{
if ($magicFile !== null) {
$magicFile = Yii::getAlias($magicFile);
}
if (!extension_loaded('fileinfo')) {
if ($checkExtension) {
return static::getMimeTypeByExtension($file, $magicFile);
} else {
throw new InvalidConfigException('The fileinfo PHP extension is not installed.');
}
}
$info = finfo_open(FILEINFO_MIME_TYPE, $magicFile);
if ($info) {
$result = finfo_file($info, $file);
finfo_close($info);
if ($result !== false) {
return $result;
}
}
return $checkExtension ? static::getMimeTypeByExtension($file, $magicFile) : null;
}
/**
* Determines the MIME type based on the extension name of the specified file.
* This method will use a local map between extension names and MIME types.
* @param string $file the file name.
* @param string $magicFile the path (or alias) of the file that contains all available MIME type information.
* If this is not set, the file specified by [[mimeMagicFile]] will be used.
* @return string the MIME type. Null is returned if the MIME type cannot be determined.
*/
public static function getMimeTypeByExtension($file, $magicFile = null)
{
$mimeTypes = static::loadMimeTypes($magicFile);
if (($ext = pathinfo($file, PATHINFO_EXTENSION)) !== '') {
$ext = strtolower($ext);
if (isset($mimeTypes[$ext])) {
return $mimeTypes[$ext];
}
}
return null;
}
/**
* Determines the extensions by given MIME type.
* This method will use a local map between extension names and MIME types.
* @param string $mimeType file MIME type.
* @param string $magicFile the path (or alias) of the file that contains all available MIME type information.
* If this is not set, the file specified by [[mimeMagicFile]] will be used.
* @return array the extensions corresponding to the specified MIME type
*/
public static function getExtensionsByMimeType($mimeType, $magicFile = null)
{
$mimeTypes = static::loadMimeTypes($magicFile);
return array_keys($mimeTypes, mb_strtolower($mimeType, 'UTF-8'), true);
}
private static $_mimeTypes = [];
/**
* Loads MIME types from the specified file.
* @param string $magicFile the path (or alias) of the file that contains all available MIME type information.
* If this is not set, the file specified by [[mimeMagicFile]] will be used.
* @return array the mapping from file extensions to MIME types
*/
protected static function loadMimeTypes($magicFile)
{
if ($magicFile === null) {
$magicFile = static::$mimeMagicFile;
}
$magicFile = Yii::getAlias($magicFile);
if (!isset(self::$_mimeTypes[$magicFile])) {
self::$_mimeTypes[$magicFile] = require($magicFile);
}
return self::$_mimeTypes[$magicFile];
}
/**
* Copies a whole directory as another one.
* The files and sub-directories will also be copied over.
* @param string $src the source directory
* @param string $dst the destination directory
* @param array $options options for directory copy. Valid options are:
*
* - dirMode: integer, the permission to be set for newly copied directories. Defaults to 0775.
* - fileMode: integer, the permission to be set for newly copied files. Defaults to the current environment setting.
* - filter: callback, a PHP callback that is called for each directory or file.
* The signature of the callback should be: `function ($path)`, where `$path` refers the full path to be filtered.
* The callback can return one of the following values:
*
* * true: the directory or file will be copied (the "only" and "except" options will be ignored)
* * false: the directory or file will NOT be copied (the "only" and "except" options will be ignored)
* * null: the "only" and "except" options will determine whether the directory or file should be copied
*
* - only: array, list of patterns that the file paths should match if they want to be copied.
* A path matches a pattern if it contains the pattern string at its end.
* For example, '.php' matches all file paths ending with '.php'.
* Note, the '/' characters in a pattern matches both '/' and '\' in the paths.
* If a file path matches a pattern in both "only" and "except", it will NOT be copied.
* - except: array, list of patterns that the files or directories should match if they want to be excluded from being copied.
* A path matches a pattern if it contains the pattern string at its end.
* Patterns ending with '/' apply to directory paths only, and patterns not ending with '/'
* apply to file paths only. For example, '/a/b' matches all file paths ending with '/a/b';
* and '.svn/' matches directory paths ending with '.svn'. Note, the '/' characters in a pattern matches
* both '/' and '\' in the paths.
* - caseSensitive: boolean, whether patterns specified at "only" or "except" should be case sensitive. Defaults to true.
* - recursive: boolean, whether the files under the subdirectories should also be copied. Defaults to true.
* - beforeCopy: callback, a PHP callback that is called before copying each sub-directory or file.
* If the callback returns false, the copy operation for the sub-directory or file will be cancelled.
* The signature of the callback should be: `function ($from, $to)`, where `$from` is the sub-directory or
* file to be copied from, while `$to` is the copy target.
* - afterCopy: callback, a PHP callback that is called after each sub-directory or file is successfully copied.
* The signature of the callback should be: `function ($from, $to)`, where `$from` is the sub-directory or
* file copied from, while `$to` is the copy target.
* @throws \yii\base\InvalidParamException if unable to open directory
*/
public static function copyDirectory($src, $dst, $options = [])
{
$src = static::normalizePath($src);
$dst = static::normalizePath($dst);
if ($src === $dst || strpos($dst, $src . DIRECTORY_SEPARATOR) === 0) {
throw new InvalidParamException('Trying to copy a directory to itself or a subdirectory.');
}
if (!is_dir($dst)) {
static::createDirectory($dst, isset($options['dirMode']) ? $options['dirMode'] : 0775, true);
}
$handle = opendir($src);
if ($handle === false) {
throw new InvalidParamException("Unable to open directory: $src");
}
if (!isset($options['basePath'])) {
// this should be done only once
$options['basePath'] = realpath($src);
$options = self::normalizeOptions($options);
}
while (($file = readdir($handle)) !== false) {
if ($file === '.' || $file === '..') {
continue;
}
$from = $src . DIRECTORY_SEPARATOR . $file;
$to = $dst . DIRECTORY_SEPARATOR . $file;
if (static::filterPath($from, $options)) {
if (isset($options['beforeCopy']) && !call_user_func($options['beforeCopy'], $from, $to)) {
continue;
}
if (is_file($from)) {
copy($from, $to);
if (isset($options['fileMode'])) {
@chmod($to, $options['fileMode']);
}
} else {
// recursive copy, defaults to true
if (!isset($options['recursive']) || $options['recursive']) {
static::copyDirectory($from, $to, $options);
}
}
if (isset($options['afterCopy'])) {
call_user_func($options['afterCopy'], $from, $to);
}
}
}
closedir($handle);
}
/**
* Removes a directory (and all its content) recursively.
*
* @param string $dir the directory to be deleted recursively.
* @param array $options options for directory remove. Valid options are:
*
* - traverseSymlinks: boolean, whether symlinks to the directories should be traversed too.
* Defaults to `false`, meaning the content of the symlinked directory would not be deleted.
* Only symlink would be removed in that default case.
*
* @throws ErrorException in case of failure
*/
public static function removeDirectory($dir, $options = [])
{
if (!is_dir($dir)) {
return;
}
if (isset($options['traverseSymlinks']) && $options['traverseSymlinks'] || !is_link($dir)) {
if (!($handle = opendir($dir))) {
return;
}
while (($file = readdir($handle)) !== false) {
if ($file === '.' || $file === '..') {
continue;
}
$path = $dir . DIRECTORY_SEPARATOR . $file;
if (is_dir($path)) {
static::removeDirectory($path, $options);
} else {
try {
unlink($path);
} catch (ErrorException $e) {
if (DIRECTORY_SEPARATOR === '\\') {
// last resort measure for Windows
$lines = [];
exec("DEL /F/Q \"$path\"", $lines, $deleteError);
} else {
throw $e;
}
}
}
}
closedir($handle);
}
if (is_link($dir)) {
unlink($dir);
} else {
rmdir($dir);
}
}
/**
* Returns the files found under the specified directory and subdirectories.
* @param string $dir the directory under which the files will be looked for.
* @param array $options options for file searching. Valid options are:
*
* - `filter`: callback, a PHP callback that is called for each directory or file.
* The signature of the callback should be: `function ($path)`, where `$path` refers the full path to be filtered.
* The callback can return one of the following values:
*
* * `true`: the directory or file will be returned (the `only` and `except` options will be ignored)
* * `false`: the directory or file will NOT be returned (the `only` and `except` options will be ignored)
* * `null`: the `only` and `except` options will determine whether the directory or file should be returned
*
* - `except`: array, list of patterns excluding from the results matching file or directory paths.
* Patterns ending with slash ('/') apply to directory paths only, and patterns not ending with '/'
* apply to file paths only. For example, '/a/b' matches all file paths ending with '/a/b';
* and `.svn/` matches directory paths ending with `.svn`.
* If the pattern does not contain a slash (`/`), it is treated as a shell glob pattern
* and checked for a match against the pathname relative to `$dir`.
* Otherwise, the pattern is treated as a shell glob suitable for consumption by `fnmatch(3)`
* `with the `FNM_PATHNAME` flag: wildcards in the pattern will not match a `/` in the pathname.
* For example, `views/*.php` matches `views/index.php` but not `views/controller/index.php`.
* A leading slash matches the beginning of the pathname. For example, `/*.php` matches `index.php` but not `views/start/index.php`.
* An optional prefix `!` which negates the pattern; any matching file excluded by a previous pattern will become included again.
* If a negated pattern matches, this will override lower precedence patterns sources. Put a backslash (`\`) in front of the first `!`
* for patterns that begin with a literal `!`, for example, `\!important!.txt`.
* Note, the '/' characters in a pattern matches both '/' and '\' in the paths.
* - `only`: array, list of patterns that the file paths should match if they are to be returned. Directory paths
* are not checked against them. Same pattern matching rules as in the `except` option are used.
* If a file path matches a pattern in both `only` and `except`, it will NOT be returned.
* - `caseSensitive`: boolean, whether patterns specified at `only` or `except` should be case sensitive. Defaults to `true`.
* - `recursive`: boolean, whether the files under the subdirectories should also be looked for. Defaults to `true`.
* @return array files found under the directory, in no particular order. Ordering depends on the files system used.
* @throws InvalidParamException if the dir is invalid.
*/
public static function findFiles($dir, $options = [])
{
if (!is_dir($dir)) {
throw new InvalidParamException("The dir argument must be a directory: $dir");
}
$dir = rtrim($dir, DIRECTORY_SEPARATOR);
if (!isset($options['basePath'])) {
// this should be done only once
$options['basePath'] = realpath($dir);
$options = self::normalizeOptions($options);
}
$list = [];
$handle = opendir($dir);
if ($handle === false) {
throw new InvalidParamException("Unable to open directory: $dir");
}
while (($file = readdir($handle)) !== false) {
if ($file === '.' || $file === '..') {
continue;
}
$path = $dir . DIRECTORY_SEPARATOR . $file;
if (static::filterPath($path, $options)) {
if (is_file($path)) {
$list[] = $path;
} elseif (!isset($options['recursive']) || $options['recursive']) {
$list = array_merge($list, static::findFiles($path, $options));
}
}
}
closedir($handle);
return $list;
}
/**
* Checks if the given file path satisfies the filtering options.
* @param string $path the path of the file or directory to be checked
* @param array $options the filtering options. See [[findFiles()]] for explanations of
* the supported options.
* @return boolean whether the file or directory satisfies the filtering options.
*/
public static function filterPath($path, $options)
{
if (isset($options['filter'])) {
$result = call_user_func($options['filter'], $path);
if (is_bool($result)) {
return $result;
}
}
if (empty($options['except']) && empty($options['only'])) {
return true;
}
$path = str_replace('\\', '/', $path);
if (!empty($options['except'])) {
if (($except = self::lastExcludeMatchingFromList($options['basePath'], $path, $options['except'])) !== null) {
return $except['flags'] & self::PATTERN_NEGATIVE;
}
}
if (!empty($options['only']) && !is_dir($path)) {
if (($except = self::lastExcludeMatchingFromList($options['basePath'], $path, $options['only'])) !== null) {
// don't check PATTERN_NEGATIVE since those entries are not prefixed with !
return true;
}
return false;
}
return true;
}
/**
* Creates a new directory.
*
* This method is similar to the PHP `mkdir()` function except that
* it uses `chmod()` to set the permission of the created directory
* in order to avoid the impact of the `umask` setting.
*
* @param string $path path of the directory to be created.
* @param integer $mode the permission to be set for the created directory.
* @param boolean $recursive whether to create parent directories if they do not exist.
* @return boolean whether the directory is created successfully
* @throws \yii\base\Exception if the directory could not be created (i.e. php error due to parallel changes)
*/
public static function createDirectory($path, $mode = 0775, $recursive = true)
{
if (is_dir($path)) {
return true;
}
$parentDir = dirname($path);
// recurse if parent dir does not exist and we are not at the root of the file system.
if ($recursive && !is_dir($parentDir) && $parentDir !== $path) {
static::createDirectory($parentDir, $mode, true);
}
try {
if (!mkdir($path, $mode)) {
return false;
}
} catch (\Exception $e) {
if (!is_dir($path)) {// https://github.com/yiisoft/yii2/issues/9288
throw new \yii\base\Exception("Failed to create directory \"$path\": " . $e->getMessage(), $e->getCode(), $e);
}
}
try {
return chmod($path, $mode);
} catch (\Exception $e) {
throw new \yii\base\Exception("Failed to change permissions for directory \"$path\": " . $e->getMessage(), $e->getCode(), $e);
}
}
/**
* Performs a simple comparison of file or directory names.
*
* Based on match_basename() from dir.c of git 1.8.5.3 sources.
*
* @param string $baseName file or directory name to compare with the pattern
* @param string $pattern the pattern that $baseName will be compared against
* @param integer|boolean $firstWildcard location of first wildcard character in the $pattern
* @param integer $flags pattern flags
* @return boolean whether the name matches against pattern
*/
private static function matchBasename($baseName, $pattern, $firstWildcard, $flags)
{
if ($firstWildcard === false) {
if ($pattern === $baseName) {
return true;
}
} elseif ($flags & self::PATTERN_ENDSWITH) {
/* "*literal" matching against "fooliteral" */
$n = StringHelper::byteLength($pattern);
if (StringHelper::byteSubstr($pattern, 1, $n) === StringHelper::byteSubstr($baseName, -$n, $n)) {
return true;
}
}
$fnmatchFlags = 0;
if ($flags & self::PATTERN_CASE_INSENSITIVE) {
$fnmatchFlags |= FNM_CASEFOLD;
}
return fnmatch($pattern, $baseName, $fnmatchFlags);
}
/**
* Compares a path part against a pattern with optional wildcards.
*
* Based on match_pathname() from dir.c of git 1.8.5.3 sources.
*
* @param string $path full path to compare
* @param string $basePath base of path that will not be compared
* @param string $pattern the pattern that path part will be compared against
* @param integer|boolean $firstWildcard location of first wildcard character in the $pattern
* @param integer $flags pattern flags
* @return boolean whether the path part matches against pattern
*/
private static function matchPathname($path, $basePath, $pattern, $firstWildcard, $flags)
{
// match with FNM_PATHNAME; the pattern has base implicitly in front of it.
if (isset($pattern[0]) && $pattern[0] === '/') {
$pattern = StringHelper::byteSubstr($pattern, 1, StringHelper::byteLength($pattern));
if ($firstWildcard !== false && $firstWildcard !== 0) {
$firstWildcard--;
}
}
$namelen = StringHelper::byteLength($path) - (empty($basePath) ? 0 : StringHelper::byteLength($basePath) + 1);
$name = StringHelper::byteSubstr($path, -$namelen, $namelen);
if ($firstWildcard !== 0) {
if ($firstWildcard === false) {
$firstWildcard = StringHelper::byteLength($pattern);
}
// if the non-wildcard part is longer than the remaining pathname, surely it cannot match.
if ($firstWildcard > $namelen) {
return false;
}
if (strncmp($pattern, $name, $firstWildcard)) {
return false;
}
$pattern = StringHelper::byteSubstr($pattern, $firstWildcard, StringHelper::byteLength($pattern));
$name = StringHelper::byteSubstr($name, $firstWildcard, $namelen);
// If the whole pattern did not have a wildcard, then our prefix match is all we need; we do not need to call fnmatch at all.
if (empty($pattern) && empty($name)) {
return true;
}
}
$fnmatchFlags = FNM_PATHNAME;
if ($flags & self::PATTERN_CASE_INSENSITIVE) {
$fnmatchFlags |= FNM_CASEFOLD;
}
return fnmatch($pattern, $name, $fnmatchFlags);
}
/**
* Scan the given exclude list in reverse to see whether pathname
* should be ignored. The first match (i.e. the last on the list), if
* any, determines the fate. Returns the element which
* matched, or null for undecided.
*
* Based on last_exclude_matching_from_list() from dir.c of git 1.8.5.3 sources.
*
* @param string $basePath
* @param string $path
* @param array $excludes list of patterns to match $path against
* @return string null or one of $excludes item as an array with keys: 'pattern', 'flags'
* @throws InvalidParamException if any of the exclude patterns is not a string or an array with keys: pattern, flags, firstWildcard.
*/
private static function lastExcludeMatchingFromList($basePath, $path, $excludes)
{
foreach (array_reverse($excludes) as $exclude) {
if (is_string($exclude)) {
$exclude = self::parseExcludePattern($exclude, false);
}
if (!isset($exclude['pattern']) || !isset($exclude['flags']) || !isset($exclude['firstWildcard'])) {
throw new InvalidParamException('If exclude/include pattern is an array it must contain the pattern, flags and firstWildcard keys.');
}
if ($exclude['flags'] & self::PATTERN_MUSTBEDIR && !is_dir($path)) {
continue;
}
if ($exclude['flags'] & self::PATTERN_NODIR) {
if (self::matchBasename(basename($path), $exclude['pattern'], $exclude['firstWildcard'], $exclude['flags'])) {
return $exclude;
}
continue;
}
if (self::matchPathname($path, $basePath, $exclude['pattern'], $exclude['firstWildcard'], $exclude['flags'])) {
return $exclude;
}
}
return null;
}
/**
* Processes the pattern, stripping special characters like / and ! from the beginning and settings flags instead.
* @param string $pattern
* @param boolean $caseSensitive
* @throws \yii\base\InvalidParamException
* @return array with keys: (string) pattern, (int) flags, (int|boolean) firstWildcard
*/
private static function parseExcludePattern($pattern, $caseSensitive)
{
if (!is_string($pattern)) {
throw new InvalidParamException('Exclude/include pattern must be a string.');
}
$result = [
'pattern' => $pattern,
'flags' => 0,
'firstWildcard' => false,
];
if (!$caseSensitive) {
$result['flags'] |= self::PATTERN_CASE_INSENSITIVE;
}
if (!isset($pattern[0])) {
return $result;
}
if ($pattern[0] === '!') {
$result['flags'] |= self::PATTERN_NEGATIVE;
$pattern = StringHelper::byteSubstr($pattern, 1, StringHelper::byteLength($pattern));
}
if (StringHelper::byteLength($pattern) && StringHelper::byteSubstr($pattern, -1, 1) === '/') {
$pattern = StringHelper::byteSubstr($pattern, 0, -1);
$result['flags'] |= self::PATTERN_MUSTBEDIR;
}
if (strpos($pattern, '/') === false) {
$result['flags'] |= self::PATTERN_NODIR;
}
$result['firstWildcard'] = self::firstWildcardInPattern($pattern);
if ($pattern[0] === '*' && self::firstWildcardInPattern(StringHelper::byteSubstr($pattern, 1, StringHelper::byteLength($pattern))) === false) {
$result['flags'] |= self::PATTERN_ENDSWITH;
}
$result['pattern'] = $pattern;
return $result;
}
/**
* Searches for the first wildcard character in the pattern.
* @param string $pattern the pattern to search in
* @return integer|boolean position of first wildcard character or false if not found
*/
private static function firstWildcardInPattern($pattern)
{
$wildcards = ['*', '?', '[', '\\'];
$wildcardSearch = function ($r, $c) use ($pattern) {
$p = strpos($pattern, $c);
return $r === false ? $p : ($p === false ? $r : min($r, $p));
};
return array_reduce($wildcards, $wildcardSearch, false);
}
/**
* @param array $options raw options
* @return array normalized options
*/
private static function normalizeOptions(array $options)
{
if (!array_key_exists('caseSensitive', $options)) {
$options['caseSensitive'] = true;
}
if (isset($options['except'])) {
foreach ($options['except'] as $key => $value) {
if (is_string($value)) {
$options['except'][$key] = self::parseExcludePattern($value, $options['caseSensitive']);
}
}
}
if (isset($options['only'])) {
foreach ($options['only'] as $key => $value) {
if (is_string($value)) {
$options['only'][$key] = self::parseExcludePattern($value, $options['caseSensitive']);
}
}
}
return $options;
}
}
| {
"pile_set_name": "Github"
} |
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package openpgp
import "hash"
// NewCanonicalTextHash reformats text written to it into the canonical
// form and then applies the hash h. See RFC 4880, section 5.2.1.
func NewCanonicalTextHash(h hash.Hash) hash.Hash {
return &canonicalTextHash{h, 0}
}
type canonicalTextHash struct {
h hash.Hash
s int
}
var newline = []byte{'\r', '\n'}
func (cth *canonicalTextHash) Write(buf []byte) (int, error) {
start := 0
for i, c := range buf {
switch cth.s {
case 0:
if c == '\r' {
cth.s = 1
} else if c == '\n' {
cth.h.Write(buf[start:i])
cth.h.Write(newline)
start = i + 1
}
case 1:
cth.s = 0
}
}
cth.h.Write(buf[start:])
return len(buf), nil
}
func (cth *canonicalTextHash) Sum(in []byte) []byte {
return cth.h.Sum(in)
}
func (cth *canonicalTextHash) Reset() {
cth.h.Reset()
cth.s = 0
}
func (cth *canonicalTextHash) Size() int {
return cth.h.Size()
}
func (cth *canonicalTextHash) BlockSize() int {
return cth.h.BlockSize()
}
| {
"pile_set_name": "Github"
} |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*
*/
package com.microsoft.azure.management.network.v2017_10_01.implementation;
import com.microsoft.azure.arm.model.implementation.WrapperImpl;
import com.microsoft.azure.management.network.v2017_10_01.LoadBalancerProbes;
import rx.Observable;
import rx.functions.Func1;
import com.microsoft.azure.Page;
import com.microsoft.azure.management.network.v2017_10_01.Probe;
class LoadBalancerProbesImpl extends WrapperImpl<LoadBalancerProbesInner> implements LoadBalancerProbes {
private final NetworkManager manager;
LoadBalancerProbesImpl(NetworkManager manager) {
super(manager.inner().loadBalancerProbes());
this.manager = manager;
}
public NetworkManager manager() {
return this.manager;
}
private ProbeImpl wrapModel(ProbeInner inner) {
return new ProbeImpl(inner, manager());
}
@Override
public Observable<Probe> listAsync(final String resourceGroupName, final String loadBalancerName) {
LoadBalancerProbesInner client = this.inner();
return client.listAsync(resourceGroupName, loadBalancerName)
.flatMapIterable(new Func1<Page<ProbeInner>, Iterable<ProbeInner>>() {
@Override
public Iterable<ProbeInner> call(Page<ProbeInner> page) {
return page.items();
}
})
.map(new Func1<ProbeInner, Probe>() {
@Override
public Probe call(ProbeInner inner) {
return wrapModel(inner);
}
});
}
@Override
public Observable<Probe> getAsync(String resourceGroupName, String loadBalancerName, String probeName) {
LoadBalancerProbesInner client = this.inner();
return client.getAsync(resourceGroupName, loadBalancerName, probeName)
.map(new Func1<ProbeInner, Probe>() {
@Override
public Probe call(ProbeInner inner) {
return wrapModel(inner);
}
});
}
}
| {
"pile_set_name": "Github"
} |
import tensorflow as tf
from baselines.common.distributions import make_pdtype
from collections import OrderedDict
from gym import spaces
def canonical_dtype(orig_dt):
if orig_dt.kind == 'f':
return tf.float32
elif orig_dt.kind in 'iu':
return tf.int32
else:
raise NotImplementedError
class StochasticPolicy(object):
def __init__(self, scope, ob_space, ac_space):
self.abs_scope = (tf.get_variable_scope().name + '/' + scope).lstrip('/')
self.ob_space = ob_space
self.ac_space = ac_space
self.pdtype = make_pdtype(ac_space)
self.ph_new = tf.placeholder(dtype=tf.float32, shape=(None, None), name='new')
self.ph_ob_keys = []
self.ph_ob_dtypes = {}
shapes = {}
if isinstance(ob_space, spaces.Dict):
assert isinstance(ob_space.spaces, OrderedDict)
for key, box in ob_space.spaces.items():
assert isinstance(box, spaces.Box)
self.ph_ob_keys.append(key)
# Keys must be ordered, because tf.concat(ph) depends on order. Here we don't keep OrderedDict
# order and sort keys instead. Rationale is to give freedom to modify environment.
self.ph_ob_keys.sort()
for k in self.ph_ob_keys:
self.ph_ob_dtypes[k] = ob_space.spaces[k].dtype
shapes[k] = ob_space.spaces[k].shape
else:
print(ob_space)
box = ob_space
assert isinstance(box, spaces.Box)
self.ph_ob_keys = [None]
self.ph_ob_dtypes = { None: box.dtype }
shapes = { None: box.shape }
self.ph_ob = OrderedDict([(k, tf.placeholder(
canonical_dtype(self.ph_ob_dtypes[k]),
(None, None,) + tuple(shapes[k]),
name=(('obs/%s'%k) if k is not None else 'obs')
)) for k in self.ph_ob_keys ])
assert list(self.ph_ob.keys())==self.ph_ob_keys, "\n%s\n%s\n" % (list(self.ph_ob.keys()), self.ph_ob_keys)
ob_shape = tf.shape(next(iter(self.ph_ob.values())))
self.sy_nenvs = ob_shape[0]
self.sy_nsteps = ob_shape[1]
self.ph_ac = self.pdtype.sample_placeholder([None, None], name='ac')
self.pd = self.vpred = self.ph_istate = None
def finalize(self, pd, vpred, ph_istate=None): #pylint: disable=W0221
self.pd = pd
self.vpred = vpred
self.ph_istate = ph_istate
def ensure_observation_is_dict(self, ob):
if self.ph_ob_keys==[None]:
return { None: ob }
else:
return ob
def call(self, ob, new, istate):
"""
Return acs, vpred, neglogprob, nextstate
"""
raise NotImplementedError
def initial_state(self, n):
raise NotImplementedError
def update_normalization(self, ob):
pass
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env python
import io
from pathlib import Path
from setuptools import setup, find_packages
import umap
def is_pkg(line):
return line and not line.startswith(('--', 'git', '#'))
with io.open('requirements.txt', encoding='utf-8') as reqs:
install_requires = [l for l in reqs.read().split('\n') if is_pkg(l)]
setup(
name="umap-project",
version=umap.__version__,
author=umap.__author__,
author_email=umap.__contact__,
description=umap.__doc__,
keywords="django leaflet geodjango openstreetmap map",
url=umap.__homepage__,
packages=find_packages(),
include_package_data=True,
platforms=["any"],
zip_safe=True,
long_description=Path('README.md').read_text(),
long_description_content_type='text/markdown',
install_requires=install_requires,
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries :: Python Modules",
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
entry_points={
'console_scripts': ['umap=umap.bin:main'],
},
)
| {
"pile_set_name": "Github"
} |
---
layout: post
title: Ground knowledge
order: 20
---
## Web
It is a shame that many developers do not understand the fundamentals of web development. We are creating web apps without even knowing how the Internet (or the web) works. That's why junior developers do not understand what params are and what triggers the different types of HTTP requests. The result is that often a junior developer has a weak foundation in fundamental web principles and largely understands programming as some sort of magic. Unfortunately, with a rails-centric education in programming, a junior developer relies more on the "magic" of rails than an understanding of programming.
* [Mozilla: How the Web works](https://developer.mozilla.org/en-US/Learn/Getting_started_with_the_web/How_the_Web_works)
* [What really happens when you navigate to a URL](http://igoro.com/archive/what-really-happens-when-you-navigate-to-a-url/)
* [What is HTTP](http://www.jmarshall.com/easy/http/)
If you feel confident in your basic web knowledge and want to dive deeper in all aspects, read the free ebook [High-Performance Browser Networking](https://hpbn.co/).
>This book provides a hands-on overview of what every web developer needs to know about the various types of networks (WiFi, 3G/4G), transport protocols (UDP, TCP, and TLS), application protocols (HTTP/1.1, HTTP/2), and APIs available in the browser (XHR, WebSocket, WebRTC, and more) to deliver the best, fast, reliable, and resilient—user experience.
* [High-Performance Browser Networking by Ilya Grigorik](https://hpbn.co/)
## Linux
A lot of us come to web development with Windows desktops. But even if Ruby can be installed on Windows, I recommend using a Linux/Unix based machine for Ruby development. Either a machine running a dedicated Linux/Unix OS or a virtual machine running Linux/Unix in Windows works just fine. The world of web servers (VPS) is all about Linux, so it is a good idea to be very familiar with Linux and be able to perform basic Linux tasks, performance tuning, and advanced web server setup for your application within a Linux environment.
* [Ubuntu](http://www.ubuntu.com/)
* [DistroWatch TOP](http://distrowatch.com/dwres.php?resource=popularity)
## IDE
You can write Ruby code in any text editor, but using a more sophisticated IDE increases productivity.
Editors like SublimeText and Atom require some additional plugin setup
The most full-featured IDE is RubyMine, but it is not free.
Due to the dynamic nature of Ruby, it is hard for IDEs to do correct autocompletion most of the time. This is why RubyMine is not as powerful as similar IDEs for other languages, but it still provides quite a lot of additional integrated tools.
* [Visual Studio Code (VSCode)](https://code.visualstudio.com/)
* [RubyMine](https://www.jetbrains.com/ruby/index.html)
* [SublimeText](https://www.sublimetext.com/)
* [Atom](https://atom.io/)
* [NetBeans](https://netbeans.org/features/ruby/index.html)
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<!--
Copyright 2013 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
</resources>
| {
"pile_set_name": "Github"
} |
var _ = require("lodash");
var d3 = require("d3");
var Renderer = require("dataproofer").Rendering;
var util = require("dataproofertest-js/util");
function HTMLRenderer(config) {
Renderer.call(this, config);
var rows = window.rows = config.rows;
this.rows = rows;
// console.log("render config", config, this);
this.sampleProgress = config.sampleProgress;
this.totalRows = config.totalRows;
this.columnHeads = config.columnHeads;
var resultList = [];
this.resultList = resultList;
d3.selectAll("#nav-buttons button").classed("rounded", false);
d3.select(".grid-footer").classed("hidden", false);
d3.selectAll(".test:not(.active)")
.classed("hidden", true);
d3.selectAll(".tests-wrapper").classed("hidden", function() {
return d3.select(this)
.selectAll(".test")
.classed("hidden");
});
d3.selectAll(".suite").classed("hidden", function() {
return d3.select(this)
.select(".tests-wrapper")
.classed("hidden");
});
d3.selectAll(".toggle").classed("hidden", true);
d3.selectAll(".test label").style("pointer-events", "none");
d3.selectAll(".suite-hed").classed("hidden", true);
d3.select(".column-3")
.classed("hidden", false)
.select("#grid")
.selectAll("*")
.remove();
var column2Height = d3.select(".column-2").node().getBoundingClientRect().height;
var gridFooterHeight = d3.select(".grid-footer").node().getBoundingClientRect().height;
var containerWidth = window.innerWidth - d3.select(".column-1").node().getBoundingClientRect().width - d3.select(".column-3").node().getBoundingClientRect().width;
var containerHeight = column2Height - gridFooterHeight; // heights of grid header and footer
d3.select("#grid").classed("hidden", false);
d3.select("#grid-container")
.style({
width: containerWidth + "px",
height: containerHeight + "px"
});
d3.select("#grid").style({
width: containerWidth + "px",
height: containerHeight + "px"
});
var data = [];
var headers = _.map(_.keys(rows[0]), function(header, idx) {
if (util.isEmpty(header)) return "Column " + idx;
return header;
});
_.forEach( rows , function(row) {
data.push( _.values(row) );
});
var handsOnTable = new Handsontable(document.getElementById("grid"),
{
data: data,
readOnly: true,
autoWrapRow: true,
autoWrapCol: true,
wordWrap: false,
width: containerWidth,
height: containerHeight,
colWidths: 100,
rowHeight: 24,
colHeaders: headers,
rowHeaders: true,
columnSorting: false,
manualRowResize: false,
manualColumnResize: true,
comments: true,
commentedCellClassName: "htCommentCell",
autoColumnSize: {
"samplingRatio": 23
},
search: {
callback: searchResultSelect
}
});
this.handsOnTable = handsOnTable;
window.handsOnTable = handsOnTable;
d3.select("#file-loader-button")
.classed("loaded", true)
.html("<i class='fa fa-search' aria-hidden='true'></i> Select New File");
// .on("click", function() {
// document.location.reload(true);
// });
function searchResultSelect(instance, row, col, value, result) {
Handsontable.Search.DEFAULT_CALLBACK.apply(this, arguments);
if (result) {
handsOnTable.selectCell(row, col);
}
}
var searchTimeout;
this.searchHandler = function (event) {
if(searchTimeout) clearTimeout(searchTimeout);
setTimeout(function() {
handsOnTable.search.query(event.target.value);
handsOnTable.render();
}, 500)
}
var searchField = document.getElementById("search-field");
Handsontable.Dom.addEvent(searchField, "keyup", this.searchHandler);
}
HTMLRenderer.prototype = Object.create(Renderer.prototype, {});
HTMLRenderer.prototype.constructor = HTMLRenderer;
HTMLRenderer.prototype.addResult = function(suite, test, result) {
this.resultList.push({ suite: suite, test: test, result: result || {} });
};
HTMLRenderer.prototype.destroy = function() {
var searchField = document.getElementById("search-field");
Handsontable.Dom.removeEvent(searchField, "keyup", this.searchHandler)
this.handsOnTable.destroy();
d3.select("#grid").selectAll("*").remove();
}
HTMLRenderer.prototype.done = function() {
var columnHeads = this.columnHeads;
var rows = this.rows;
var sampleProgress = this.sampleProgress;
var totalRows = d3.format(",")(this.totalRows);
var rowsTestedPct = d3.format(".0%")(sampleProgress);
var resultList = this.resultList;
var handsOnTable = this.handsOnTable;
var colorScale = d3.scaleThreshold()
.domain(_.range(0,1.1,0.1))
.range([
'#342c51',
'#5a314a',
'#7a3543',
'#99393b',
'#b73c32',
'#d63e26',
'#cf6824',
'#b18f27',
'#87b02c',
'#32cd32'
]);
this.comments = renderCellComments(rows, columnHeads, resultList, handsOnTable);
this.highlightGrid();
var that = this;
setTimeout(function() {
that.renderFingerPrint();
}, 100);
handsOnTable.addHook("afterColumnSort", function(columnIndex) {
that.renderFingerPrint({col: columnIndex });
});
handsOnTable.addHook("afterOnCellMouseDown", function(evt, coords) {
that.renderFingerPrint({col: coords.col, row: coords.row });
});
var progressWrapper = d3.select("div#progress-bar")
progressWrapper.classed("hidden", false)
.selectAll("*").remove();
progressWrapper.append("div")
.attr("id", "progress-info")
.text(function() { return `${rowsTestedPct} of ${totalRows} rows tested`; });
progressWrapper.append("div")
.attr("id", "progress-wrapper")
.append("div")
.attr("id", "progress")
.style("width", function() {
var widthInt = d3.select("#progress-wrapper").style("width").replace("px", "") * sampleProgress;
return widthInt + "px";
})
.style("background-color", function() {
return colorScale(sampleProgress);
});
if (sampleProgress >= 1) {
d3.select("#back-button").classed("rounded", true);
d3.select("#forward-button").classed("hidden", true);
}
// We want to separate out tests that failed and tests that passed here
// Summarize testsPassed.length, and then append all failed tests like normal
d3.selectAll(".header-info").remove();
d3.select(".test-sets")
.insert("div", ":first-child")
.html(function() {
var headersCheck = resultList[0];
var missingHeadersStr = "<div class='header-info'>";
if (headersCheck.result.testState === "failed") {
missingHeadersStr += "<i class='fa fa-times-circle'></i>";
missingHeadersStr += " Failed: Missing or duplicate column headers: ";
missingHeadersStr += headersCheck.result.badColumnHeads.join(", ");
missingHeadersStr += "</div>";
} else {
missingHeadersStr += "<i class='fa fa-check-circle'></i>";
missingHeadersStr += " Passed: No missing or duplicate column headers";
}
return missingHeadersStr;
});
var passedResults = _.filter(resultList, function(d){
return d.result.testState === "passed";
});
var numPassed = passedResults.length;
var numTests = resultList.length;
var finalRate = numPassed/numTests;
// console.log("finalRate",finalRate);
var finalGrade = d3.format('.0%')(finalRate);
var finalColor = colorScale(finalRate);
d3.select(".summary").remove();
d3.select(".test-sets")
.insert("div", ":first-child")
.attr("class", "summary")
.html(function() {
var scoreHtml = `<span id="final-grade">${finalGrade}</span><span class="block header-info">(${numPassed}/${numTests}) passed</span>`;
return scoreHtml;
})
.select("#final-grade")
.style("color", finalColor);
var tests = d3.selectAll(".test")
.data(resultList, function(d) { return d.suite + "-" + d.test.name(); });
tests.select("i.fa-question-circle")
.each(function(d) {
d3.select(this)
.attr("original-title", function(d) {
var tooltipStr = "";
if (d.result.passed !== "passed") {
tooltipStr += d.test.conclusion();
} else {
tooltipStr += d.test.description();
}
return tooltipStr;
});
});
var timeout;
var filterResults = function (d) {
clearTimeout(timeout);
that.renderFingerPrint({ test: d.test.name(), column: d.column });
that.highlightGrid({ highlightCells: d.result.highlightCells || [], testName: d.test.name() });
};
var clearFilteredResults = function(d) {
// debounce
timeout = setTimeout(function() {
that.renderFingerPrint();
that.highlightGrid();
}, 300);
};
that.clearFilteredResults = clearFilteredResults;
tests.classed("pass", function(d) {
return d.result.testState === "passed";
})
.classed("fail", function(d) {
return d.result.testState === "failed";
})
.classed("warn", function(d) {
return d.result.testState === "warn";
})
.classed("info", function(d) {
return d.result.testState === "info";
});
d3.selectAll(".active.test:not(.pass)")
.on("mouseover", filterResults)
.on("click", filterResults);
d3.selectAll(".result-icon").remove()
tests.insert("i", "label")
.attr("class", function(d) {
if (d.result.testState === "passed") return "fa-check-circle";
if (d.result.testState === "failed") return "fa-times-circle";
if (d.result.testState === "warn") return "fa-exclamation-circle";
if (d.result.testState === "info") return "fa-info-circle";
})
.classed("result-icon fa", true);
};
HTMLRenderer.prototype.highlightGrid = function(options) {
if(!options) options = {};
var highlightCells = options.highlightCells;
var testName = options.testName;
var comments = [];
if(options.testName) {
comments = this.comments;
} else {
this.comments.filter(function(comment) {
return comment.array.filter(function(d) { return d.testState !== "info" }).length > 0
});
}
var handsOnTable = this.handsOnTable;
// var rowsToShow = [];
if (highlightCells && testName) {
var currentComments = _.filter(comments, function(comment) {
return comment.array
.map(function(d) { return d.name })
.indexOf(testName) > -1;
});
handsOnTable.updateSettings({
cell: currentComments,
commentedCellClassName: "htCommentCell filtered"
});
if (currentComments[0]) {
handsOnTable.selectCell(
currentComments[0].row,
currentComments[0].col,
currentComments[0].row,
currentComments[0].col,
true
);
}
} else {
handsOnTable.updateSettings({
cell: comments,
commentedCellClassName: "htCommentCell"
});
handsOnTable.deselectCell();
}
};
HTMLRenderer.prototype.renderFingerPrint = function(options) {
if(!options) options = {};
var columnIndex = options.col;
var rowIndex = options.row;
var test = options.test;
// var column = options.column;
var rows = this.rows;
// var columnHeads = this.columnHeads;
var comments = this.comments;
var handsOnTable = this.handsOnTable;
var clearFilteredResults = this.clearFilteredResults;
var width = 200;
var resultsBBOX = d3.select(".column-3").node().getBoundingClientRect();
var height = resultsBBOX.height;
var cellWidth = 2;
var cellHeight = 1;
var cols = Object.keys(rows[0]);
cellWidth = width / cols.length;
cellHeight = height / rows.length;
var canvas = d3.select("#fingerprint").node();
var context = canvas.getContext("2d");
canvas.width = width;
canvas.height = height;
function renderPrint() {
context.fillStyle = "#fff";
context.fillRect(0, 0, width, height);
comments.forEach(function(comment) {
var array = [];
if(test) {
array = comment.array.filter(function(d) { return d.name === test; });
} else {
array = comment.array.filter(function(d) { return d.testState === "failed" || d.testState === "warn" });
}
// only render this cell if its got items in the array
if(!array.length && !comment.array.length) return;
if(!array.length && comment.array.length) {
context.fillStyle = "#ddd"; // default state if info/pass
} else {
if(test) {
context.fillStyle = "#e03e22"; // if a test is highlighted we show it's cells as red
} else {
context.fillStyle = "#EFE7B8"; //default state if array has failed/warn elements
}
}
//transformRowIndex = Handsontable.hooks.run(handsOnTable, 'modifyRow', comment.row)
var transformRowIndex;
if(handsOnTable.sortIndex && handsOnTable.sortIndex.length) {
transformRowIndex = handsOnTable.sortIndex[comment.row][0];
} else {
transformRowIndex = comment.row;
}
context.fillRect(comment.col * cellWidth, transformRowIndex * cellHeight, cellWidth, cellHeight);
});
}
renderPrint();
function renderCol(col) {
context.strokeStyle = "#444";
context.strokeRect(col * cellWidth, 0, cellWidth, height);
}
function renderRow(row) {
context.strokeStyle = "#444";
context.strokeRect(0, row * cellHeight, width, cellHeight);
}
if(columnIndex || columnIndex === 0) {
renderCol(columnIndex);
}
if(rowIndex || rowIndex === 0) {
renderRow(rowIndex);
}
function selectGridCell (d,i) {
var selectFiltered = d3.selectAll(".filtered");
var isFiltered = selectFiltered.empty? true : false;
if (isFiltered) {
d3.selectAll(".test").classed("filtered", false);
clearFilteredResults();
}
var mouse = d3.mouse(canvas);
var x = mouse[0];
var y = mouse[1];
if (y < 0) y = 0;
var row = Math.floor(y / height * rows.length); // for now our cells are 1 pixel high so this works
var col = Math.floor(x / width * cols.length);
handsOnTable.selectCell(row, col, row, col, true);
renderPrint();
renderCol(col);
renderRow(row);
}
var drag = d3.drag()
.on("drag.fp", selectGridCell);
d3.select(canvas)
.call(drag)
.on("click.fp", selectGridCell);
};
function renderCellComments(rows, columnHeads, resultList, handsOnTable) {
// setup/update the comments
var comments = [];
var commentCollector = [];
_.each(rows, function(row, rowIndex) {
commentCollector[rowIndex] = {};
_.each(columnHeads, function(columnHead) {
// keep an object with each key
commentCollector[rowIndex][columnHead] = [];
});
});
// loop over resultList
resultList.forEach(function(d){
if(d.result && d.result.highlightCells && d.result.highlightCells.length) {
_.each(rows, function(row, rowIndex) {
_.each(columnHeads, function(columnHead) {
var value = d.result.highlightCells[rowIndex][columnHead];
if(value) {
commentCollector[rowIndex][columnHead].push({name: d.test.name(), testState: d.result.testState});
}
});
});
}
});
_.each(rows, function(row, rowIndex) {
_.each(columnHeads, function(columnHead, columnIndex) {
var array = commentCollector[rowIndex][columnHead];
if(array && array.length && array.length > 0) {
var names = array.map(function(d) { return d.name })
var string = names.join("\n");
comments.push({row: rowIndex, col: columnIndex, comment: string, array: array});
}
});
});
return comments;
}
| {
"pile_set_name": "Github"
} |
{
"images": [
{
"filename": "ic_shopping_basket_white_48pt.png",
"idiom": "universal",
"scale": "1x"
},
{
"filename": "ic_shopping_basket_white_48pt_2x.png",
"idiom": "universal",
"scale": "2x"
},
{
"filename": "ic_shopping_basket_white_48pt_3x.png",
"idiom": "universal",
"scale": "3x"
}
],
"info": {
"author": "xcode",
"version": 1
}
}
| {
"pile_set_name": "Github"
} |
X-Account-Key: account5
X-UIDL: GmailId128181933863ffb0
X-Mozilla-Status: 0000
X-Mozilla-Status2: 00000000
X-Mozilla-Keys:
Delivered-To: [email protected]
Received: by 10.143.34.8 with SMTP id m8cs43295wfj;
Mon, 19 Apr 2010 15:05:34 -0700 (PDT)
Received: by 10.223.17.155 with SMTP id s27mr470391faa.13.1271714624502;
Mon, 19 Apr 2010 15:03:44 -0700 (PDT)
Return-Path: <bounce-debian-laptop=mlsubscriber.tech=csmining.org@lists.debian.org>
Received: from liszt.debian.org (liszt.debian.org [82.195.75.100])
by mx.google.com with ESMTP id k29si1307221fkk.15.2010.04.19.15.03.43;
Mon, 19 Apr 2010 15:03:44 -0700 (PDT)
Received-SPF: pass (google.com: manual fallback record for domain of bounce-debian-laptop=mlsubscriber.tech=csmining.org@lists.debian.org designates 82.195.75.100 as permitted sender) client-ip=82.195.75.100;
Authentication-Results: mx.google.com; spf=pass (google.com: manual fallback record for domain of bounce-debian-laptop=mlsubscriber.tech=csmining.org@lists.debian.org designates 82.195.75.100 as permitted sender) smtp.mail=bounce-debian-laptop=mlsubscriber.tech=csmining.org@lists.debian.org
Received: from localhost (localhost [127.0.0.1])
by liszt.debian.org (Postfix) with QMQP
id 669E813A4E3D; Mon, 19 Apr 2010 22:03:34 +0000 (UTC)
Old-Return-Path: <[email protected]>
XChecker-Version: SpamAssassin 3.2.5 (2008-06-10) on liszt.debian.org
XLevel:
XStatus: No, score=-9.5 required=4.0 tests=FOURLA,LDOSUBSCRIBER,
LDO_WHITELIST,RATWARE_GECKO_BUILD autolearn=failed version=3.2.5
X-Original-To: [email protected]
Delivered-To: [email protected]
Received: from localhost (localhost [127.0.0.1])
by liszt.debian.org (Postfix) with ESMTP id 9187313A4D5F
for <[email protected]>; Mon, 19 Apr 2010 22:03:28 +0000 (UTC)
X-Virus-Scanned: at lists.debian.org with policy bank en-lt
X-AmavisStatus: No, score=-6.9 tagged_above=-10000 required=5.3
tests=[BAYES_00=-2, FOURLA=0.1, LDO_WHITELIST=-5] autolearn=ham
Received: from liszt.debian.org ([127.0.0.1])
by localhost (lists.debian.org [127.0.0.1]) (amavisd-new, port 2525)
with ESMTP id Ad9FfMQMAhgE for <[email protected]>;
Mon, 19 Apr 2010 22:03:21 +0000 (UTC)
X-policyd-weight: using cached result; rate: -3.66
Received: from scully.cob.calpoly.edu (videoserver8.cob.calpoly.edu [129.65.185.9])
(using TLSv1 with cipher AES256-SHA (256/256 bits))
(Client did not present a certificate)
by liszt.debian.org (Postfix) with ESMTPS id DAFF213A4DB3
for <[email protected]>; Mon, 19 Apr 2010 22:03:20 +0000 (UTC)
Received: from damien.cob.calpoly.edu ([129.65.91.128] helo=[127.0.0.1])
by scully.cob.calpoly.edu with esmtpsa (TLS1.0:DHE_RSA_AES_256_CBC_SHA1:32)
(Exim 4.69)
(envelope-from <[email protected]>)
id 1O3z3l-00030T-Es
for [email protected]; Mon, 19 Apr 2010 15:03:17 -0700
Message-ID: <[email protected]>
Date: Mon, 19 Apr 2010 15:03:11 -0700
From: Joe Emenaker <[email protected]>
User-Agent: Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.1.9) Gecko/20100317 Lightning/1.0b1 Thunderbird/3.0.4
MIME-Version: 1.0
To: [email protected]
Subject: Re: Battery monitor tools?
References: <[email protected]> <[email protected]>
In-Reply-To: <[email protected]>
Content-Type: multipart/signed; protocol="application/pkcs7-signature"; micalg=sha1; boundary="------------ms090808050700030703080806"
X-Rc-Virus: 2007-09-13_01
X-Rc-Spam: 2008-11-04_01
Resent-Message-ID: <8veJ8RF8KNB.A.S-.2MNzLB@liszt>
Resent-From: [email protected]
X-Mailing-List: <[email protected]> archive/latest/34112
X-Loop: [email protected]
List-Id: <debian-laptop.lists.debian.org>
List-Post: <mailto:[email protected]>
List-Help: <mailto:[email protected]?subject=help>
List-Subscribe: <mailto:[email protected]?subject=subscribe>
List-Unsubscribe: <mailto:[email protected]?subject=unsubscribe>
Precedence: list
Resent-Sender: [email protected]
Resent-Date: Mon, 19 Apr 2010 22:03:34 +0000 (UTC)
This is a cryptographically signed message in MIME format.
--------------ms090808050700030703080806
Content-Type: text/plain; charset=ISO-8859-1; format=flowed
Content-Transfer-Encoding: quoted-printable
On 4/19/2010 2:29 PM, Bob Proulx wrote:
> Joe Emenaker wrote:
> =20
>> I'm looking for a good battery charge/discharge profiler tool for my
>> Ubuntu laptop.
>> =20
> Of course I am sure that you know you are posting about Ubuntu to the
> Debian laptop mailing list? The least you could do is to lie to us
> and say you are running Debian when posting to a Debian list. :-)
> =20
Well, I maintain about 4 Debian servers... and I've been using Debian=20
almost exclusively ever since the .deb packaging system got rolled out=20
(yeah... waaaayyy back). Because Ubuntu is a derivative of Debian (in=20
the sense that just about anything available for Debian is either also=20
packaged for, or the Debian package is installable on, Ubuntu), and also =
because we in the Debian crowd have better kung-fu than those lame=20
GUI-installer Ubuntu-ites...... *and* because I've already been=20
subscribed on this list for several years.... I figured I'd ask here.
> There are some useful tools available though. You might look at 'ibam'=
=2E
> =20
Yeah, I saw IBAM, but it looked like it just generated plain ol' .png=20
files or something. I was hoping for something a little more snazzy.
- Joe
--------------ms090808050700030703080806
Content-Type: application/pkcs7-signature; name="smime.p7s"
Content-Transfer-Encoding: base64
Content-Disposition: attachment; filename="smime.p7s"
Content-Description: S/MIME Cryptographic Signature
MIAGCSqGSIb3DQEHAqCAMIACAQExCzAJBgUrDgMCGgUAMIAGCSqGSIb3DQEHAQAAoIIO5DCC
BEYwggOvoAMCAQICEGb9R+PCGeToms2Z3fU6yyQwDQYJKoZIhvcNAQEFBQAwXzELMAkGA1UE
BhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAxIFB1Ymxp
YyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA1MTAyODAwMDAwMFoXDTE1
MTAyNzIzNTk1OVowgd0xCzAJBgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjEf
MB0GA1UECxMWVmVyaVNpZ24gVHJ1c3QgTmV0d29yazE7MDkGA1UECxMyVGVybXMgb2YgdXNl
IGF0IGh0dHBzOi8vd3d3LnZlcmlzaWduLmNvbS9ycGEgKGMpMDUxHjAcBgNVBAsTFVBlcnNv
bmEgTm90IFZhbGlkYXRlZDE3MDUGA1UEAxMuVmVyaVNpZ24gQ2xhc3MgMSBJbmRpdmlkdWFs
IFN1YnNjcmliZXIgQ0EgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMnf
rOfq+PgDFMQAktXBfjbCPO98chXLwKuMPRyVzm8eECw/AO2XJua2x+atQx0/pIdHR0w+VPhs
+Mf8sZ69MHC8l7EDBeqV8a1AxUR6SwWi8mD81zplYu//EHuiVrvFTnAt1qIfPO2wQuhejVch
rKaZ2RHp0hoHwHRHQgv8xTTq/ea6JNEdCBU3otdzzwFBL2OyOj++pRpu9MlKWz2VphW7NQIZ
+dTvvI8OcXZZu0u2Ptb8Whb01g6J8kn+bAztFenZiHWcec5gJ925rXXOL3OVekA6hXVJsLjf
aLyrzROChRFQo+A8C67AClPN1zBvhTJGG+RJEMJs4q8fef/btLUCAwEAAaOB/zCB/DASBgNV
HRMBAf8ECDAGAQH/AgEAMEQGA1UdIAQ9MDswOQYLYIZIAYb4RQEHFwEwKjAoBggrBgEFBQcC
ARYcaHR0cHM6Ly93d3cudmVyaXNpZ24uY29tL3JwYTALBgNVHQ8EBAMCAQYwEQYJYIZIAYb4
QgEBBAQDAgEGMC4GA1UdEQQnMCWkIzAhMR8wHQYDVQQDExZQcml2YXRlTGFiZWwzLTIwNDgt
MTU1MB0GA1UdDgQWBBQRfV4ZfTwE32ps1qKKGj8x2DuUUjAxBgNVHR8EKjAoMCagJKAihiBo
dHRwOi8vY3JsLnZlcmlzaWduLmNvbS9wY2ExLmNybDANBgkqhkiG9w0BAQUFAAOBgQA8o9oC
YzrEk6qrctPcrVA4HgyeFkqIt+7r2f8PjZWg1rv6aguuYYTYaEeJ70+ssh9JQZtJM3aTi55u
uUMcYL3C3Ioth8FFwBFyBBprJCpsb+f8BxMp0Hc6I+f1wYVoGb/GAVQgGa41gsxiPGEJxvTV
67APpp8zhZrTcY5Qj5ndYjCCBUkwggQxoAMCAQICEEgTlfTkCEE6iKvXTkEr69IwDQYJKoZI
hvcNAQEFBQAwgd0xCzAJBgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjEfMB0G
A1UECxMWVmVyaVNpZ24gVHJ1c3QgTmV0d29yazE7MDkGA1UECxMyVGVybXMgb2YgdXNlIGF0
IGh0dHBzOi8vd3d3LnZlcmlzaWduLmNvbS9ycGEgKGMpMDUxHjAcBgNVBAsTFVBlcnNvbmEg
Tm90IFZhbGlkYXRlZDE3MDUGA1UEAxMuVmVyaVNpZ24gQ2xhc3MgMSBJbmRpdmlkdWFsIFN1
YnNjcmliZXIgQ0EgLSBHMjAeFw0xMDAxMjEwMDAwMDBaFw0xMTAxMjEyMzU5NTlaMIIBEjEX
MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0IE5ldHdv
cmsxRjBEBgNVBAsTPXd3dy52ZXJpc2lnbi5jb20vcmVwb3NpdG9yeS9SUEEgSW5jb3JwLiBi
eSBSZWYuLExJQUIuTFREKGMpOTgxHjAcBgNVBAsTFVBlcnNvbmEgTm90IFZhbGlkYXRlZDEz
MDEGA1UECxMqRGlnaXRhbCBJRCBDbGFzcyAxIC0gTmV0c2NhcGUgRnVsbCBTZXJ2aWNlMRgw
FgYDVQQDFA9Kb3NlcGggRW1lbmFrZXIxHzAdBgkqhkiG9w0BCQEWEGpvZUBlbWVuYWtlci5j
b20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCXKMSWuWpcuKn69/GTa2PmMcf2
0kJxj6BRS9/F4cFAzapQcFVNFMET69Y1y/awSGRw6C0cYOnGUosOTSTHB1lD547VoomZsOhw
WZhzMyVolyJPEkOoJF9eqE3Ynpzd0ick9AzpBrojKvaR1Bvcujx1Z71JzYldrpi61S+ZhYLo
q17DqncOQE9Z1Clt3TVIaiYiBhLSFh+c7ZJNE/4VnNiTyr8+YVVDFs8bCzv2V9+yr2Er5dKw
0bJvj3a7BpndjfALcuS9zILVbqHxjVa2f8KT1LwSd4DjKlOpeJobABJlineUCvuH5ZPneJWd
pC9CVK3VHKtBjw8pq6k9YGh4iIK/AgMBAAGjgcwwgckwCQYDVR0TBAIwADBEBgNVHSAEPTA7
MDkGC2CGSAGG+EUBBxcBMCowKAYIKwYBBQUHAgEWHGh0dHBzOi8vd3d3LnZlcmlzaWduLmNv
bS9ycGEwCwYDVR0PBAQDAgWgMB0GA1UdJQQWMBQGCCsGAQUFBwMEBggrBgEFBQcDAjBKBgNV
HR8EQzBBMD+gPaA7hjlodHRwOi8vSW5kQzFEaWdpdGFsSUQtY3JsLnZlcmlzaWduLmNvbS9J
bmRDMURpZ2l0YWxJRC5jcmwwDQYJKoZIhvcNAQEFBQADggEBAJq7J+S3fCnlQR5o5hqYOjsF
YNfwuoN6QuTK58Ji43kF/36UJFNR4DcSU5PueWiEYr/5316Gorcc0r3Xtf1LJJJMYdUFA4zD
J1IV1Onf4vXpQU7Jb4yNixJzbRbmebWzcKjo5gruQjW0ivSjBXcB4UJLRumL6wk8uS5y2t1l
d+XMOaCmO5BUFXWymSa5BNNMeXQ3YMwxqQobqAv/H5cncOfD4M+lZ4JpVrjQMduKYS8L2wII
99QjmHMyl2cswq4NppTX0+vyaf75VQosuNptQwReyhmyfbC4sZTvGcRReDLsZKV6etA8gQ+g
vOCQjyvkW360fNgdDXsCP7UaUbnaTtkwggVJMIIEMaADAgECAhBIE5X05AhBOoir105BK+vS
MA0GCSqGSIb3DQEBBQUAMIHdMQswCQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIElu
Yy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0IE5ldHdvcmsxOzA5BgNVBAsTMlRlcm1zIG9m
IHVzZSBhdCBodHRwczovL3d3dy52ZXJpc2lnbi5jb20vcnBhIChjKTA1MR4wHAYDVQQLExVQ
ZXJzb25hIE5vdCBWYWxpZGF0ZWQxNzA1BgNVBAMTLlZlcmlTaWduIENsYXNzIDEgSW5kaXZp
ZHVhbCBTdWJzY3JpYmVyIENBIC0gRzIwHhcNMTAwMTIxMDAwMDAwWhcNMTEwMTIxMjM1OTU5
WjCCARIxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJpU2lnbiBUcnVz
dCBOZXR3b3JrMUYwRAYDVQQLEz13d3cudmVyaXNpZ24uY29tL3JlcG9zaXRvcnkvUlBBIElu
Y29ycC4gYnkgUmVmLixMSUFCLkxURChjKTk4MR4wHAYDVQQLExVQZXJzb25hIE5vdCBWYWxp
ZGF0ZWQxMzAxBgNVBAsTKkRpZ2l0YWwgSUQgQ2xhc3MgMSAtIE5ldHNjYXBlIEZ1bGwgU2Vy
dmljZTEYMBYGA1UEAxQPSm9zZXBoIEVtZW5ha2VyMR8wHQYJKoZIhvcNAQkBFhBqb2VAZW1l
bmFrZXIuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAlyjElrlqXLip+vfx
k2tj5jHH9tJCcY+gUUvfxeHBQM2qUHBVTRTBE+vWNcv2sEhkcOgtHGDpxlKLDk0kxwdZQ+eO
1aKJmbDocFmYczMlaJciTxJDqCRfXqhN2J6c3dInJPQM6Qa6Iyr2kdQb3Lo8dWe9Sc2JXa6Y
utUvmYWC6Ktew6p3DkBPWdQpbd01SGomIgYS0hYfnO2STRP+FZzYk8q/PmFVQxbPGws79lff
sq9hK+XSsNGyb492uwaZ3Y3wC3LkvcyC1W6h8Y1Wtn/Ck9S8EneA4ypTqXiaGwASZYp3lAr7
h+WT53iVnaQvQlSt1RyrQY8PKaupPWBoeIiCvwIDAQABo4HMMIHJMAkGA1UdEwQCMAAwRAYD
VR0gBD0wOzA5BgtghkgBhvhFAQcXATAqMCgGCCsGAQUFBwIBFhxodHRwczovL3d3dy52ZXJp
c2lnbi5jb20vcnBhMAsGA1UdDwQEAwIFoDAdBgNVHSUEFjAUBggrBgEFBQcDBAYIKwYBBQUH
AwIwSgYDVR0fBEMwQTA/oD2gO4Y5aHR0cDovL0luZEMxRGlnaXRhbElELWNybC52ZXJpc2ln
bi5jb20vSW5kQzFEaWdpdGFsSUQuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCauyfkt3wp5UEe
aOYamDo7BWDX8LqDekLkyufCYuN5Bf9+lCRTUeA3ElOT7nlohGK/+d9ehqK3HNK917X9SySS
TGHVBQOMwydSFdTp3+L16UFOyW+MjYsSc20W5nm1s3Co6OYK7kI1tIr0owV3AeFCS0bpi+sJ
PLkuctrdZXflzDmgpjuQVBV1spkmuQTTTHl0N2DMMakKG6gL/x+XJ3Dnw+DPpWeCaVa40DHb
imEvC9sCCPfUI5hzMpdnLMKuDaaU19Pr8mn++VUKLLjabUMEXsoZsn2wuLGU7xnEUXgy7GSl
enrQPIEPoLzgkI8r5Ft+tHzYHQ17Aj+1GlG52k7ZMYIE7DCCBOgCAQEwgfIwgd0xCzAJBgNV
BAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjEfMB0GA1UECxMWVmVyaVNpZ24gVHJ1
c3QgTmV0d29yazE7MDkGA1UECxMyVGVybXMgb2YgdXNlIGF0IGh0dHBzOi8vd3d3LnZlcmlz
aWduLmNvbS9ycGEgKGMpMDUxHjAcBgNVBAsTFVBlcnNvbmEgTm90IFZhbGlkYXRlZDE3MDUG
A1UEAxMuVmVyaVNpZ24gQ2xhc3MgMSBJbmRpdmlkdWFsIFN1YnNjcmliZXIgQ0EgLSBHMgIQ
SBOV9OQIQTqIq9dOQSvr0jAJBgUrDgMCGgUAoIICzjAYBgkqhkiG9w0BCQMxCwYJKoZIhvcN
AQcBMBwGCSqGSIb3DQEJBTEPFw0xMDA0MTkyMjAzMTFaMCMGCSqGSIb3DQEJBDEWBBSs5qSs
J3BRqPiOd8J2JFLHzcEwezBfBgkqhkiG9w0BCQ8xUjBQMAsGCWCGSAFlAwQBAjAKBggqhkiG
9w0DBzAOBggqhkiG9w0DAgICAIAwDQYIKoZIhvcNAwICAUAwBwYFKw4DAgcwDQYIKoZIhvcN
AwICASgwggEDBgkrBgEEAYI3EAQxgfUwgfIwgd0xCzAJBgNVBAYTAlVTMRcwFQYDVQQKEw5W
ZXJpU2lnbiwgSW5jLjEfMB0GA1UECxMWVmVyaVNpZ24gVHJ1c3QgTmV0d29yazE7MDkGA1UE
CxMyVGVybXMgb2YgdXNlIGF0IGh0dHBzOi8vd3d3LnZlcmlzaWduLmNvbS9ycGEgKGMpMDUx
HjAcBgNVBAsTFVBlcnNvbmEgTm90IFZhbGlkYXRlZDE3MDUGA1UEAxMuVmVyaVNpZ24gQ2xh
c3MgMSBJbmRpdmlkdWFsIFN1YnNjcmliZXIgQ0EgLSBHMgIQSBOV9OQIQTqIq9dOQSvr0jCC
AQUGCyqGSIb3DQEJEAILMYH1oIHyMIHdMQswCQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNp
Z24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0IE5ldHdvcmsxOzA5BgNVBAsTMlRl
cm1zIG9mIHVzZSBhdCBodHRwczovL3d3dy52ZXJpc2lnbi5jb20vcnBhIChjKTA1MR4wHAYD
VQQLExVQZXJzb25hIE5vdCBWYWxpZGF0ZWQxNzA1BgNVBAMTLlZlcmlTaWduIENsYXNzIDEg
SW5kaXZpZHVhbCBTdWJzY3JpYmVyIENBIC0gRzICEEgTlfTkCEE6iKvXTkEr69IwDQYJKoZI
hvcNAQEBBQAEggEASpcHKFjqQHqQ091Ztlwg9PzvGBL1Wv7zI2LyrdsitsamWTAk9JGQDGEc
XcCmhYlzp5RP8yNBEgpGNA55Wj2YB6OvBOOyQbFHuio+v9JR2NWv0fDaCUkrOkTvMRnvW9u8
W/38H0Yfsoc8JqyQbSiOxUFXIIFFiDvQCIdWAlhN5amrQH8OysBab7QQDuv8Hjp68U74pd6y
gGdQmTgeob1x1MXLUpHAclr+0K8GWgdPyC4iiR5NowzIzs9KTNpwR3gnY9Q1Si9R+arC/KFJ
1428bFBqWld8ELa84vVywAvSicnQNHzNBnyKojUvjZLoGj1jvh0EaOrtWqKKip4ZhZLiwQAA
AAAAAA==
--------------ms090808050700030703080806--
--
To UNSUBSCRIBE, email to [email protected]
with a subject of "unsubscribe". Trouble? Contact [email protected]
Archive: http://lists.debian.org/[email protected]
| {
"pile_set_name": "Github"
} |
{
"name": "symfony/workflow",
"type": "library",
"description": "Symfony Workflow Component",
"keywords": ["workflow", "petrinet", "place", "transition", "statemachine", "state"],
"homepage": "https://symfony.com",
"license": "MIT",
"authors": [
{
"name": "Fabien Potencier",
"email": "[email protected]"
},
{
"name": "Grégoire Pineau",
"email": "[email protected]"
},
{
"name": "Symfony Community",
"homepage": "https://symfony.com/contributors"
}
],
"require": {
"php": ">=7.2.5",
"symfony/polyfill-php80": "^1.15"
},
"require-dev": {
"psr/log": "~1.0",
"symfony/dependency-injection": "^4.4|^5.0",
"symfony/event-dispatcher": "^4.4|^5.0",
"symfony/expression-language": "^4.4|^5.0",
"symfony/security-core": "^4.4|^5.0",
"symfony/validator": "^4.4|^5.0"
},
"conflict": {
"symfony/event-dispatcher": "<4.4"
},
"autoload": {
"psr-4": { "Symfony\\Component\\Workflow\\": "" }
},
"minimum-stability": "dev",
"extra": {
"branch-alias": {
"dev-master": "5.2-dev"
}
}
}
| {
"pile_set_name": "Github"
} |
<?php
/*
* This file is part of PHP CS Fixer.
*
* (c) Fabien Potencier <[email protected]>
* Dariusz Rumiński <[email protected]>
*
* This source file is subject to the MIT license that is bundled
* with this source code in the file LICENSE.
*/
namespace PhpCsFixer\Tests\Fixer\StringNotation;
use PhpCsFixer\Tests\Test\AbstractFixerTestCase;
use PhpCsFixer\WhitespacesFixerConfig;
/**
* @author Ilija Tovilo <[email protected]>
*
* @internal
*
* @covers \PhpCsFixer\Fixer\StringNotation\StringLineEndingFixer
*/
final class StringLineEndingFixerTest extends AbstractFixerTestCase
{
/**
* @param string $expected
* @param null|string $input
*
* @dataProvider provideFixCases
*/
public function testFix($expected, $input = null)
{
$this->doTest($expected, $input);
}
public function provideFixCases()
{
$heredocTemplate = "<?php\n\$a=\n<<<EOT\n%s\n\nEOT;\n";
$nowdocTemplate = "<?php\n\$a=\n<<<'EOT'\n%s\n\nEOT;\n";
$input = '/**
* @SWG\Get(
* path="/api/v0/cards",
* operationId="listCards",
* tags={"Банковские карты"},
* summary="Возвращает список банковских карт."
* )
*/';
return [
[
"<?php \$a = 'my\nmulti\nline\nstring';\r\n",
"<?php \$a = 'my\r\nmulti\nline\r\nstring';\r\n",
],
[
"<?php \$a = \"my\nmulti\nline\nstring\";\r\n",
"<?php \$a = \"my\r\nmulti\nline\r\nstring\";\r\n",
],
[
"<?php \$a = \"my\nmulti\nline\nstring\nwith\n\$b\ninterpolation\";\r\n",
"<?php \$a = \"my\r\nmulti\nline\r\nstring\nwith\r\n\$b\ninterpolation\";\r\n",
],
[
sprintf($heredocTemplate, $input),
sprintf($heredocTemplate, str_replace("\n", "\r", $input)),
],
[
sprintf($heredocTemplate, $input),
sprintf($heredocTemplate, str_replace("\n", "\r\n", $input)),
],
[
sprintf($nowdocTemplate, $input),
sprintf($nowdocTemplate, str_replace("\n", "\r", $input)),
],
[
sprintf($nowdocTemplate, $input),
sprintf($nowdocTemplate, str_replace("\n", "\r\n", $input)),
],
[
sprintf(str_replace('<<<', 'b<<<', $nowdocTemplate), $input),
sprintf(str_replace('<<<', 'b<<<', $nowdocTemplate), str_replace("\n", "\r\n", $input)),
],
[
sprintf(str_replace('<<<', 'B<<<', $nowdocTemplate), $input),
sprintf(str_replace('<<<', 'B<<<', $nowdocTemplate), str_replace("\n", "\r\n", $input)),
],
[
sprintf(str_replace('<<<', 'b<<<', $heredocTemplate), $input),
sprintf(str_replace('<<<', 'b<<<', $heredocTemplate), str_replace("\n", "\r\n", $input)),
],
[
sprintf(str_replace('<<<', 'B<<<', $heredocTemplate), $input),
sprintf(str_replace('<<<', 'B<<<', $heredocTemplate), str_replace("\n", "\r\n", $input)),
],
'not T_CLOSE_TAG, do T_INLINE_HTML' => [
"<?php foo(); ?>\r\nA\n\n",
"<?php foo(); ?>\r\nA\r\n\r\n",
],
];
}
public function testWithDifferentLineEndingConfiguration()
{
$this->fixer->setWhitespacesConfig(new WhitespacesFixerConfig("\t", "\r\n"));
$this->doTest(
"<?php \$a = 'my\r\nmulti\r\nline\r\nstring';",
"<?php \$a = 'my\nmulti\nline\nstring';"
);
}
}
| {
"pile_set_name": "Github"
} |
/*global Buffer require exports console setTimeout */
var net = require("net"),
util = require("./lib/util"),
Queue = require("./lib/queue"),
to_array = require("./lib/to_array"),
events = require("events"),
crypto = require("crypto"),
parsers = [], commands,
connection_id = 0,
default_port = 6379,
default_host = "127.0.0.1";
// can set this to true to enable for all connections
exports.debug_mode = false;
// hiredis might not be installed
try {
require("./lib/parser/hiredis");
parsers.push(require("./lib/parser/hiredis"));
} catch (err) {
if (exports.debug_mode) {
console.warn("hiredis parser not installed.");
}
}
parsers.push(require("./lib/parser/javascript"));
function RedisClient(stream, options) {
this.stream = stream;
this.options = options = options || {};
this.connection_id = ++connection_id;
this.connected = false;
this.ready = false;
this.connections = 0;
if (this.options.socket_nodelay === undefined) {
this.options.socket_nodelay = true;
}
this.should_buffer = false;
this.command_queue_high_water = this.options.command_queue_high_water || 1000;
this.command_queue_low_water = this.options.command_queue_low_water || 0;
this.max_attempts = null;
if (options.max_attempts && !isNaN(options.max_attempts) && options.max_attempts > 0) {
this.max_attempts = +options.max_attempts;
}
this.command_queue = new Queue(); // holds sent commands to de-pipeline them
this.offline_queue = new Queue(); // holds commands issued but not able to be sent
this.commands_sent = 0;
this.connect_timeout = false;
if (options.connect_timeout && !isNaN(options.connect_timeout) && options.connect_timeout > 0) {
this.connect_timeout = +options.connect_timeout;
}
this.enable_offline_queue = true;
if (typeof this.options.enable_offline_queue === "boolean") {
this.enable_offline_queue = this.options.enable_offline_queue;
}
this.initialize_retry_vars();
this.pub_sub_mode = false;
this.subscription_set = {};
this.monitoring = false;
this.closing = false;
this.server_info = {};
this.auth_pass = null;
this.parser_module = null;
this.selected_db = null; // save the selected db here, used when reconnecting
this.old_state = null;
var self = this;
this.stream.on("connect", function () {
self.on_connect();
});
this.stream.on("data", function (buffer_from_socket) {
self.on_data(buffer_from_socket);
});
this.stream.on("error", function (msg) {
self.on_error(msg.message);
});
this.stream.on("close", function () {
self.connection_gone("close");
});
this.stream.on("end", function () {
self.connection_gone("end");
});
this.stream.on("drain", function () {
self.should_buffer = false;
self.emit("drain");
});
events.EventEmitter.call(this);
}
util.inherits(RedisClient, events.EventEmitter);
exports.RedisClient = RedisClient;
RedisClient.prototype.initialize_retry_vars = function () {
this.retry_timer = null;
this.retry_totaltime = 0;
this.retry_delay = 150;
this.retry_backoff = 1.7;
this.attempts = 1;
};
// flush offline_queue and command_queue, erroring any items with a callback first
RedisClient.prototype.flush_and_error = function (message) {
var command_obj;
while (this.offline_queue.length > 0) {
command_obj = this.offline_queue.shift();
if (typeof command_obj.callback === "function") {
command_obj.callback(message);
}
}
this.offline_queue = new Queue();
while (this.command_queue.length > 0) {
command_obj = this.command_queue.shift();
if (typeof command_obj.callback === "function") {
command_obj.callback(message);
}
}
this.command_queue = new Queue();
};
RedisClient.prototype.on_error = function (msg) {
var message = "Redis connection to " + this.host + ":" + this.port + " failed - " + msg,
self = this, command_obj;
if (this.closing) {
return;
}
if (exports.debug_mode) {
console.warn(message);
}
this.flush_and_error(message);
this.connected = false;
this.ready = false;
this.emit("error", new Error(message));
// "error" events get turned into exceptions if they aren't listened for. If the user handled this error
// then we should try to reconnect.
this.connection_gone("error");
};
RedisClient.prototype.do_auth = function () {
var self = this;
if (exports.debug_mode) {
console.log("Sending auth to " + self.host + ":" + self.port + " id " + self.connection_id);
}
self.send_anyway = true;
self.send_command("auth", [this.auth_pass], function (err, res) {
if (err) {
if (err.toString().match("LOADING")) {
// if redis is still loading the db, it will not authenticate and everything else will fail
console.log("Redis still loading, trying to authenticate later");
setTimeout(function () {
self.do_auth();
}, 2000); // TODO - magic number alert
return;
} else {
return self.emit("error", new Error("Auth error: " + err.message));
}
}
if (res.toString() !== "OK") {
return self.emit("error", new Error("Auth failed: " + res.toString()));
}
if (exports.debug_mode) {
console.log("Auth succeeded " + self.host + ":" + self.port + " id " + self.connection_id);
}
if (self.auth_callback) {
self.auth_callback(err, res);
self.auth_callback = null;
}
// now we are really connected
self.emit("connect");
if (self.options.no_ready_check) {
self.on_ready();
} else {
self.ready_check();
}
});
self.send_anyway = false;
};
RedisClient.prototype.on_connect = function () {
if (exports.debug_mode) {
console.log("Stream connected " + this.host + ":" + this.port + " id " + this.connection_id);
}
var self = this;
this.connected = true;
this.ready = false;
this.attempts = 0;
this.connections += 1;
this.command_queue = new Queue();
this.emitted_end = false;
this.initialize_retry_vars();
if (this.options.socket_nodelay) {
this.stream.setNoDelay();
}
this.stream.setTimeout(0);
this.init_parser();
if (this.auth_pass) {
this.do_auth();
} else {
this.emit("connect");
if (this.options.no_ready_check) {
this.on_ready();
} else {
this.ready_check();
}
}
};
RedisClient.prototype.init_parser = function () {
var self = this;
if (this.options.parser) {
if (! parsers.some(function (parser) {
if (parser.name === self.options.parser) {
self.parser_module = parser;
if (exports.debug_mode) {
console.log("Using parser module: " + self.parser_module.name);
}
return true;
}
})) {
throw new Error("Couldn't find named parser " + self.options.parser + " on this system");
}
} else {
if (exports.debug_mode) {
console.log("Using default parser module: " + parsers[0].name);
}
this.parser_module = parsers[0];
}
this.parser_module.debug_mode = exports.debug_mode;
// return_buffers sends back Buffers from parser to callback. detect_buffers sends back Buffers from parser, but
// converts to Strings if the input arguments are not Buffers.
this.reply_parser = new this.parser_module.Parser({
return_buffers: self.options.return_buffers || self.options.detect_buffers || false
});
// "reply error" is an error sent back by Redis
this.reply_parser.on("reply error", function (reply) {
self.return_error(new Error(reply));
});
this.reply_parser.on("reply", function (reply) {
self.return_reply(reply);
});
// "error" is bad. Somehow the parser got confused. It'll try to reset and continue.
this.reply_parser.on("error", function (err) {
self.emit("error", new Error("Redis reply parser error: " + err.stack));
});
};
RedisClient.prototype.on_ready = function () {
var self = this;
this.ready = true;
if (this.old_state !== null) {
this.monitoring = this.old_state.monitoring;
this.pub_sub_mode = this.old_state.pub_sub_mode;
this.selected_db = this.old_state.selected_db;
this.old_state = null;
}
// magically restore any modal commands from a previous connection
if (this.selected_db !== null) {
this.send_command('select', [this.selected_db]);
}
if (this.pub_sub_mode === true) {
// only emit "ready" when all subscriptions were made again
var callback_count = 0;
var callback = function() {
callback_count--;
if (callback_count == 0) {
self.emit("ready");
}
}
Object.keys(this.subscription_set).forEach(function (key) {
var parts = key.split(" ");
if (exports.debug_mode) {
console.warn("sending pub/sub on_ready " + parts[0] + ", " + parts[1]);
}
callback_count++;
self.send_command(parts[0] + "scribe", [parts[1]], callback);
});
return;
} else if (this.monitoring) {
this.send_command("monitor");
} else {
this.send_offline_queue();
}
this.emit("ready");
};
RedisClient.prototype.on_info_cmd = function (err, res) {
var self = this, obj = {}, lines, retry_time;
if (err) {
return self.emit("error", new Error("Ready check failed: " + err.message));
}
lines = res.toString().split("\r\n");
lines.forEach(function (line) {
var parts = line.split(':');
if (parts[1]) {
obj[parts[0]] = parts[1];
}
});
obj.versions = [];
obj.redis_version.split('.').forEach(function (num) {
obj.versions.push(+num);
});
// expose info key/vals to users
this.server_info = obj;
if (!obj.loading || (obj.loading && obj.loading === "0")) {
if (exports.debug_mode) {
console.log("Redis server ready.");
}
this.on_ready();
} else {
retry_time = obj.loading_eta_seconds * 1000;
if (retry_time > 1000) {
retry_time = 1000;
}
if (exports.debug_mode) {
console.log("Redis server still loading, trying again in " + retry_time);
}
setTimeout(function () {
self.ready_check();
}, retry_time);
}
};
RedisClient.prototype.ready_check = function () {
var self = this;
if (exports.debug_mode) {
console.log("checking server ready state...");
}
this.send_anyway = true; // secret flag to send_command to send something even if not "ready"
this.info(function (err, res) {
self.on_info_cmd(err, res);
});
this.send_anyway = false;
};
RedisClient.prototype.send_offline_queue = function () {
var command_obj, buffered_writes = 0;
while (this.offline_queue.length > 0) {
command_obj = this.offline_queue.shift();
if (exports.debug_mode) {
console.log("Sending offline command: " + command_obj.command);
}
buffered_writes += !this.send_command(command_obj.command, command_obj.args, command_obj.callback);
}
this.offline_queue = new Queue();
// Even though items were shifted off, Queue backing store still uses memory until next add, so just get a new Queue
if (!buffered_writes) {
this.should_buffer = false;
this.emit("drain");
}
};
RedisClient.prototype.connection_gone = function (why) {
var self = this, message;
// If a retry is already in progress, just let that happen
if (this.retry_timer) {
return;
}
if (exports.debug_mode) {
console.warn("Redis connection is gone from " + why + " event.");
}
this.connected = false;
this.ready = false;
if (this.old_state === null) {
var state = {
monitoring: this.monitoring,
pub_sub_mode: this.pub_sub_mode,
selected_db: this.selected_db
};
this.old_state = state;
this.monitoring = false;
this.pub_sub_mode = false;
this.selected_db = null;
}
// since we are collapsing end and close, users don't expect to be called twice
if (! this.emitted_end) {
this.emit("end");
this.emitted_end = true;
}
this.flush_and_error("Redis connection gone from " + why + " event.");
// If this is a requested shutdown, then don't retry
if (this.closing) {
this.retry_timer = null;
if (exports.debug_mode) {
console.warn("connection ended from quit command, not retrying.");
}
return;
}
this.retry_delay = Math.floor(this.retry_delay * this.retry_backoff);
if (exports.debug_mode) {
console.log("Retry connection in " + this.current_retry_delay + " ms");
}
if (this.max_attempts && this.attempts >= this.max_attempts) {
this.retry_timer = null;
// TODO - some people need a "Redis is Broken mode" for future commands that errors immediately, and others
// want the program to exit. Right now, we just log, which doesn't really help in either case.
console.error("node_redis: Couldn't get Redis connection after " + this.max_attempts + " attempts.");
return;
}
this.attempts += 1;
this.emit("reconnecting", {
delay: self.retry_delay,
attempt: self.attempts
});
this.retry_timer = setTimeout(function () {
if (exports.debug_mode) {
console.log("Retrying connection...");
}
self.retry_totaltime += self.current_retry_delay;
if (self.connect_timeout && self.retry_totaltime >= self.connect_timeout) {
self.retry_timer = null;
// TODO - engage Redis is Broken mode for future commands, or whatever
console.error("node_redis: Couldn't get Redis connection after " + self.retry_totaltime + "ms.");
return;
}
self.stream.connect(self.port, self.host);
self.retry_timer = null;
}, this.retry_delay);
};
RedisClient.prototype.on_data = function (data) {
if (exports.debug_mode) {
console.log("net read " + this.host + ":" + this.port + " id " + this.connection_id + ": " + data.toString());
}
try {
this.reply_parser.execute(data);
} catch (err) {
// This is an unexpected parser problem, an exception that came from the parser code itself.
// Parser should emit "error" events if it notices things are out of whack.
// Callbacks that throw exceptions will land in return_reply(), below.
// TODO - it might be nice to have a different "error" event for different types of errors
this.emit("error", err);
}
};
RedisClient.prototype.return_error = function (err) {
var command_obj = this.command_queue.shift(), queue_len = this.command_queue.getLength();
if (this.pub_sub_mode === false && queue_len === 0) {
this.emit("idle");
this.command_queue = new Queue();
}
if (this.should_buffer && queue_len <= this.command_queue_low_water) {
this.emit("drain");
this.should_buffer = false;
}
if (command_obj && typeof command_obj.callback === "function") {
try {
command_obj.callback(err);
} catch (callback_err) {
// if a callback throws an exception, re-throw it on a new stack so the parser can keep going
process.nextTick(function () {
throw callback_err;
});
}
} else {
console.log("node_redis: no callback to send error: " + err.message);
// this will probably not make it anywhere useful, but we might as well throw
process.nextTick(function () {
throw err;
});
}
};
// if a callback throws an exception, re-throw it on a new stack so the parser can keep going.
// put this try/catch in its own function because V8 doesn't optimize this well yet.
function try_callback(callback, reply) {
try {
callback(null, reply);
} catch (err) {
process.nextTick(function () {
throw err;
});
}
}
// hgetall converts its replies to an Object. If the reply is empty, null is returned.
function reply_to_object(reply) {
var obj = {}, j, jl, key, val;
if (reply.length === 0) {
return null;
}
for (j = 0, jl = reply.length; j < jl; j += 2) {
key = reply[j].toString();
val = reply[j + 1];
obj[key] = val;
}
return obj;
}
function reply_to_strings(reply) {
var i;
if (Buffer.isBuffer(reply)) {
return reply.toString();
}
if (Array.isArray(reply)) {
for (i = 0; i < reply.length; i++) {
reply[i] = reply[i].toString();
}
return reply;
}
return reply;
}
RedisClient.prototype.return_reply = function (reply) {
var command_obj, obj, i, len, type, timestamp, argindex, args, queue_len;
command_obj = this.command_queue.shift(),
queue_len = this.command_queue.getLength();
if (this.pub_sub_mode === false && queue_len === 0) {
this.emit("idle");
this.command_queue = new Queue(); // explicitly reclaim storage from old Queue
}
if (this.should_buffer && queue_len <= this.command_queue_low_water) {
this.emit("drain");
this.should_buffer = false;
}
if (command_obj && !command_obj.sub_command) {
if (typeof command_obj.callback === "function") {
if (this.options.detect_buffers && command_obj.buffer_args === false) {
// If detect_buffers option was specified, then the reply from the parser will be Buffers.
// If this command did not use Buffer arguments, then convert the reply to Strings here.
reply = reply_to_strings(reply);
}
// TODO - confusing and error-prone that hgetall is special cased in two places
if (reply && 'hgetall' === command_obj.command.toLowerCase()) {
reply = reply_to_object(reply);
}
try_callback(command_obj.callback, reply);
} else if (exports.debug_mode) {
console.log("no callback for reply: " + (reply && reply.toString && reply.toString()));
}
} else if (this.pub_sub_mode || (command_obj && command_obj.sub_command)) {
if (Array.isArray(reply)) {
type = reply[0].toString();
if (type === "message") {
this.emit("message", reply[1].toString(), reply[2]); // channel, message
} else if (type === "pmessage") {
this.emit("pmessage", reply[1].toString(), reply[2].toString(), reply[3]); // pattern, channel, message
} else if (type === "subscribe" || type === "unsubscribe" || type === "psubscribe" || type === "punsubscribe") {
if (reply[2] === 0) {
this.pub_sub_mode = false;
if (this.debug_mode) {
console.log("All subscriptions removed, exiting pub/sub mode");
}
} else {
this.pub_sub_mode = true;
}
// subscribe commands take an optional callback and also emit an event, but only the first response is included in the callback
// TODO - document this or fix it so it works in a more obvious way
if (command_obj && typeof command_obj.callback === "function") {
try_callback(command_obj.callback, reply[1].toString());
}
this.emit(type, reply[1].toString(), reply[2]); // channel, count
} else {
throw new Error("subscriptions are active but got unknown reply type " + type);
}
} else if (! this.closing) {
throw new Error("subscriptions are active but got an invalid reply: " + reply);
}
} else if (this.monitoring) {
len = reply.indexOf(" ");
timestamp = reply.slice(0, len);
argindex = reply.indexOf('"');
args = reply.slice(argindex + 1, -1).split('" "').map(function (elem) {
return elem.replace(/\\"/g, '"');
});
this.emit("monitor", timestamp, args);
} else {
throw new Error("node_redis command queue state error. If you can reproduce this, please report it.");
}
};
// This Command constructor is ever so slightly faster than using an object literal, but more importantly, using
// a named constructor helps it show up meaningfully in the V8 CPU profiler and in heap snapshots.
function Command(command, args, sub_command, buffer_args, callback) {
this.command = command;
this.args = args;
this.sub_command = sub_command;
this.buffer_args = buffer_args;
this.callback = callback;
}
RedisClient.prototype.send_command = function (command, args, callback) {
var arg, this_args, command_obj, i, il, elem_count, buffer_args, stream = this.stream, command_str = "", buffered_writes = 0, last_arg_type;
if (typeof command !== "string") {
throw new Error("First argument to send_command must be the command name string, not " + typeof command);
}
if (Array.isArray(args)) {
if (typeof callback === "function") {
// probably the fastest way:
// client.command([arg1, arg2], cb); (straight passthrough)
// send_command(command, [arg1, arg2], cb);
} else if (! callback) {
// most people find this variable argument length form more convenient, but it uses arguments, which is slower
// client.command(arg1, arg2, cb); (wraps up arguments into an array)
// send_command(command, [arg1, arg2, cb]);
// client.command(arg1, arg2); (callback is optional)
// send_command(command, [arg1, arg2]);
// client.command(arg1, arg2, undefined); (callback is undefined)
// send_command(command, [arg1, arg2, undefined]);
last_arg_type = typeof args[args.length - 1];
if (last_arg_type === "function" || last_arg_type === "undefined") {
callback = args.pop();
}
} else {
throw new Error("send_command: last argument must be a callback or undefined");
}
} else {
throw new Error("send_command: second argument must be an array");
}
// if the last argument is an array and command is sadd, expand it out:
// client.sadd(arg1, [arg2, arg3, arg4], cb);
// converts to:
// client.sadd(arg1, arg2, arg3, arg4, cb);
if ((command === 'sadd' || command === 'SADD') && args.length > 0 && Array.isArray(args[args.length - 1])) {
args = args.slice(0, -1).concat(args[args.length - 1]);
}
buffer_args = false;
for (i = 0, il = args.length, arg; i < il; i += 1) {
if (Buffer.isBuffer(args[i])) {
buffer_args = true;
}
}
command_obj = new Command(command, args, false, buffer_args, callback);
if ((!this.ready && !this.send_anyway) || !stream.writable) {
if (exports.debug_mode) {
if (!stream.writable) {
console.log("send command: stream is not writeable.");
}
}
if (this.enable_offline_queue) {
if (exports.debug_mode) {
console.log("Queueing " + command + " for next server connection.");
}
this.offline_queue.push(command_obj);
this.should_buffer = true;
} else {
var not_writeable_error = new Error('send_command: stream not writeable. enable_offline_queue is false');
if (command_obj.callback) {
command_obj.callback(not_writeable_error);
} else {
throw not_writeable_error;
}
}
return false;
}
if (command === "subscribe" || command === "psubscribe" || command === "unsubscribe" || command === "punsubscribe") {
this.pub_sub_command(command_obj);
} else if (command === "monitor") {
this.monitoring = true;
} else if (command === "quit") {
this.closing = true;
} else if (this.pub_sub_mode === true) {
throw new Error("Connection in pub/sub mode, only pub/sub commands may be used");
}
this.command_queue.push(command_obj);
this.commands_sent += 1;
elem_count = args.length + 1;
// Always use "Multi bulk commands", but if passed any Buffer args, then do multiple writes, one for each arg.
// This means that using Buffers in commands is going to be slower, so use Strings if you don't already have a Buffer.
command_str = "*" + elem_count + "\r\n$" + command.length + "\r\n" + command + "\r\n";
if (! buffer_args) { // Build up a string and send entire command in one write
for (i = 0, il = args.length, arg; i < il; i += 1) {
arg = args[i];
if (typeof arg !== "string") {
arg = String(arg);
}
command_str += "$" + Buffer.byteLength(arg) + "\r\n" + arg + "\r\n";
}
if (exports.debug_mode) {
console.log("send " + this.host + ":" + this.port + " id " + this.connection_id + ": " + command_str);
}
buffered_writes += !stream.write(command_str);
} else {
if (exports.debug_mode) {
console.log("send command (" + command_str + ") has Buffer arguments");
}
buffered_writes += !stream.write(command_str);
for (i = 0, il = args.length, arg; i < il; i += 1) {
arg = args[i];
if (!(Buffer.isBuffer(arg) || arg instanceof String)) {
arg = String(arg);
}
if (Buffer.isBuffer(arg)) {
if (arg.length === 0) {
if (exports.debug_mode) {
console.log("send_command: using empty string for 0 length buffer");
}
buffered_writes += !stream.write("$0\r\n\r\n");
} else {
buffered_writes += !stream.write("$" + arg.length + "\r\n");
buffered_writes += !stream.write(arg);
buffered_writes += !stream.write("\r\n");
if (exports.debug_mode) {
console.log("send_command: buffer send " + arg.length + " bytes");
}
}
} else {
if (exports.debug_mode) {
console.log("send_command: string send " + Buffer.byteLength(arg) + " bytes: " + arg);
}
buffered_writes += !stream.write("$" + Buffer.byteLength(arg) + "\r\n" + arg + "\r\n");
}
}
}
if (exports.debug_mode) {
console.log("send_command buffered_writes: " + buffered_writes, " should_buffer: " + this.should_buffer);
}
if (buffered_writes || this.command_queue.getLength() >= this.command_queue_high_water) {
this.should_buffer = true;
}
return !this.should_buffer;
};
RedisClient.prototype.pub_sub_command = function (command_obj) {
var i, key, command, args;
if (this.pub_sub_mode === false && exports.debug_mode) {
console.log("Entering pub/sub mode from " + command_obj.command);
}
this.pub_sub_mode = true;
command_obj.sub_command = true;
command = command_obj.command;
args = command_obj.args;
if (command === "subscribe" || command === "psubscribe") {
if (command === "subscribe") {
key = "sub";
} else {
key = "psub";
}
for (i = 0; i < args.length; i++) {
this.subscription_set[key + " " + args[i]] = true;
}
} else {
if (command === "unsubscribe") {
key = "sub";
} else {
key = "psub";
}
for (i = 0; i < args.length; i++) {
delete this.subscription_set[key + " " + args[i]];
}
}
};
RedisClient.prototype.end = function () {
this.stream._events = {};
this.connected = false;
this.ready = false;
return this.stream.end();
};
function Multi(client, args) {
this.client = client;
this.queue = [["MULTI"]];
if (Array.isArray(args)) {
this.queue = this.queue.concat(args);
}
}
exports.Multi = Multi;
// take 2 arrays and return the union of their elements
function set_union(seta, setb) {
var obj = {};
seta.forEach(function (val) {
obj[val] = true;
});
setb.forEach(function (val) {
obj[val] = true;
});
return Object.keys(obj);
}
// This static list of commands is updated from time to time. ./lib/commands.js can be updated with generate_commands.js
commands = set_union(["get", "set", "setnx", "setex", "append", "strlen", "del", "exists", "setbit", "getbit", "setrange", "getrange", "substr",
"incr", "decr", "mget", "rpush", "lpush", "rpushx", "lpushx", "linsert", "rpop", "lpop", "brpop", "brpoplpush", "blpop", "llen", "lindex",
"lset", "lrange", "ltrim", "lrem", "rpoplpush", "sadd", "srem", "smove", "sismember", "scard", "spop", "srandmember", "sinter", "sinterstore",
"sunion", "sunionstore", "sdiff", "sdiffstore", "smembers", "zadd", "zincrby", "zrem", "zremrangebyscore", "zremrangebyrank", "zunionstore",
"zinterstore", "zrange", "zrangebyscore", "zrevrangebyscore", "zcount", "zrevrange", "zcard", "zscore", "zrank", "zrevrank", "hset", "hsetnx",
"hget", "hmset", "hmget", "hincrby", "hdel", "hlen", "hkeys", "hvals", "hgetall", "hexists", "incrby", "decrby", "getset", "mset", "msetnx",
"randomkey", "select", "move", "rename", "renamenx", "expire", "expireat", "keys", "dbsize", "auth", "ping", "echo", "save", "bgsave",
"bgrewriteaof", "shutdown", "lastsave", "type", "multi", "exec", "discard", "sync", "flushdb", "flushall", "sort", "info", "monitor", "ttl",
"persist", "slaveof", "debug", "config", "subscribe", "unsubscribe", "psubscribe", "punsubscribe", "publish", "watch", "unwatch", "cluster",
"restore", "migrate", "dump", "object", "client", "eval", "evalsha"], require("./lib/commands"));
commands.forEach(function (command) {
RedisClient.prototype[command] = function (args, callback) {
if (Array.isArray(args) && typeof callback === "function") {
return this.send_command(command, args, callback);
} else {
return this.send_command(command, to_array(arguments));
}
};
RedisClient.prototype[command.toUpperCase()] = RedisClient.prototype[command];
Multi.prototype[command] = function () {
this.queue.push([command].concat(to_array(arguments)));
return this;
};
Multi.prototype[command.toUpperCase()] = Multi.prototype[command];
});
// store db in this.select_db to restore it on reconnect
RedisClient.prototype.select = function (db, callback) {
var self = this;
this.send_command('select', [db], function (err, res) {
if (err === null) {
self.selected_db = db;
}
if (typeof(callback) === 'function') {
callback(err, res);
}
});
};
RedisClient.prototype.SELECT = RedisClient.prototype.select;
// Stash auth for connect and reconnect. Send immediately if already connected.
RedisClient.prototype.auth = function () {
var args = to_array(arguments);
this.auth_pass = args[0];
this.auth_callback = args[1];
if (exports.debug_mode) {
console.log("Saving auth as " + this.auth_pass);
}
if (this.connected) {
this.send_command("auth", args);
}
};
RedisClient.prototype.AUTH = RedisClient.prototype.auth;
RedisClient.prototype.hmget = function (arg1, arg2, arg3) {
if (Array.isArray(arg2) && typeof arg3 === "function") {
return this.send_command("hmget", [arg1].concat(arg2), arg3);
} else if (Array.isArray(arg1) && typeof arg2 === "function") {
return this.send_command("hmget", arg1, arg2);
} else {
return this.send_command("hmget", to_array(arguments));
}
};
RedisClient.prototype.HMGET = RedisClient.prototype.hmget;
RedisClient.prototype.hmset = function (args, callback) {
var tmp_args, tmp_keys, i, il, key;
if (Array.isArray(args) && typeof callback === "function") {
return this.send_command("hmset", args, callback);
}
args = to_array(arguments);
if (typeof args[args.length - 1] === "function") {
callback = args[args.length - 1];
args.length -= 1;
} else {
callback = null;
}
if (args.length === 2 && typeof args[0] === "string" && typeof args[1] === "object") {
// User does: client.hmset(key, {key1: val1, key2: val2})
tmp_args = [ args[0] ];
tmp_keys = Object.keys(args[1]);
for (i = 0, il = tmp_keys.length; i < il ; i++) {
key = tmp_keys[i];
tmp_args.push(key);
if (typeof args[1][key] !== "string") {
var err = new Error("hmset expected value to be a string", key, ":", args[1][key]);
if (callback) return callback(err);
else throw err;
}
tmp_args.push(args[1][key]);
}
args = tmp_args;
}
return this.send_command("hmset", args, callback);
};
RedisClient.prototype.HMSET = RedisClient.prototype.hmset;
Multi.prototype.hmset = function () {
var args = to_array(arguments), tmp_args;
if (args.length >= 2 && typeof args[0] === "string" && typeof args[1] === "object") {
tmp_args = [ "hmset", args[0] ];
Object.keys(args[1]).map(function (key) {
tmp_args.push(key);
tmp_args.push(args[1][key]);
});
if (args[2]) {
tmp_args.push(args[2]);
}
args = tmp_args;
} else {
args.unshift("hmset");
}
this.queue.push(args);
return this;
};
Multi.prototype.HMSET = Multi.prototype.hmset;
Multi.prototype.exec = function (callback) {
var self = this;
// drain queue, callback will catch "QUEUED" or error
// TODO - get rid of all of these anonymous functions which are elegant but slow
this.queue.forEach(function (args, index) {
var command = args[0], obj;
if (typeof args[args.length - 1] === "function") {
args = args.slice(1, -1);
} else {
args = args.slice(1);
}
if (args.length === 1 && Array.isArray(args[0])) {
args = args[0];
}
if (command.toLowerCase() === 'hmset' && typeof args[1] === 'object') {
obj = args.pop();
Object.keys(obj).forEach(function (key) {
args.push(key);
args.push(obj[key]);
});
}
this.client.send_command(command, args, function (err, reply) {
if (err) {
var cur = self.queue[index];
if (typeof cur[cur.length - 1] === "function") {
cur[cur.length - 1](err);
} else {
throw new Error(err);
}
self.queue.splice(index, 1);
}
});
}, this);
// TODO - make this callback part of Multi.prototype instead of creating it each time
return this.client.send_command("EXEC", [], function (err, replies) {
if (err) {
if (callback) {
callback(new Error(err));
return;
} else {
throw new Error(err);
}
}
var i, il, j, jl, reply, args;
if (replies) {
for (i = 1, il = self.queue.length; i < il; i += 1) {
reply = replies[i - 1];
args = self.queue[i];
// TODO - confusing and error-prone that hgetall is special cased in two places
if (reply && args[0].toLowerCase() === "hgetall") {
replies[i - 1] = reply = reply_to_object(reply);
}
if (typeof args[args.length - 1] === "function") {
args[args.length - 1](null, reply);
}
}
}
if (callback) {
callback(null, replies);
}
});
};
Multi.prototype.EXEC = Multi.prototype.exec;
RedisClient.prototype.multi = function (args) {
return new Multi(this, args);
};
RedisClient.prototype.MULTI = function (args) {
return new Multi(this, args);
};
// stash original eval method
var eval = RedisClient.prototype.eval;
// hook eval with an attempt to evalsha for cached scripts
RedisClient.prototype.eval =
RedisClient.prototype.EVAL = function () {
var self = this,
args = to_array(arguments),
callback;
if (typeof args[args.length - 1] === "function") {
callback = args.pop();
}
// replace script source with sha value
var source = args[0];
args[0] = crypto.createHash("sha1").update(source).digest("hex");
self.evalsha(args, function (err, reply) {
if (err && /NOSCRIPT/.test(err.message)) {
args[0] = source;
eval.call(self, args, callback);
} else if (callback) {
callback(err, reply);
}
});
};
exports.createClient = function (port_arg, host_arg, options) {
var port = port_arg || default_port,
host = host_arg || default_host,
redis_client, net_client;
net_client = net.createConnection(port, host);
redis_client = new RedisClient(net_client, options);
redis_client.port = port;
redis_client.host = host;
return redis_client;
};
exports.print = function (err, reply) {
if (err) {
console.log("Error: " + err);
} else {
console.log("Reply: " + reply);
}
};
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8" ?>
<resources>
<style name="MainTheme" parent="MainTheme.Base">
</style>
<!-- Base theme applied no matter what API -->
<style name="MainTheme.Base" parent="Theme.AppCompat.Light.DarkActionBar">
<!--If you are using revision 22.1 please use just windowNoTitle. Without android:-->
<item name="windowNoTitle">true</item>
<!--We will be using the toolbar so no need to show ActionBar-->
<item name="windowActionBar">false</item>
<!-- Set theme colors from http://www.google.com/design/spec/style/color.html#color-color-palette -->
<!-- colorPrimary is used for the default action bar background -->
<item name="colorPrimary">#2196F3</item>
<!-- colorPrimaryDark is used for the status bar -->
<item name="colorPrimaryDark">#1976D2</item>
<!-- colorAccent is used as the default value for colorControlActivated
which is used to tint widgets -->
<item name="colorAccent">#FF4081</item>
<!-- You can also set colorControlNormal, colorControlActivated
colorControlHighlight and colorSwitchThumbNormal. -->
<item name="windowActionModeOverlay">true</item>
<item name="android:datePickerDialogTheme">@style/AppCompatDialogStyle</item>
</style>
<style name="AppCompatDialogStyle" parent="Theme.AppCompat.Light.Dialog">
<item name="colorAccent">#FF4081</item>
</style>
</resources>
| {
"pile_set_name": "Github"
} |
/****************************************************************************
* Driver for Solarflare network controllers and boards
* Copyright 2005-2006 Fen Systems Ltd.
* Copyright 2006-2013 Solarflare Communications Inc.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 as published
* by the Free Software Foundation, incorporated herein by reference.
*/
#ifndef EFX_IO_H
#define EFX_IO_H
#include <linux/io.h>
#include <linux/spinlock.h>
/**************************************************************************
*
* NIC register I/O
*
**************************************************************************
*
* Notes on locking strategy for the Falcon architecture:
*
* Many CSRs are very wide and cannot be read or written atomically.
* Writes from the host are buffered by the Bus Interface Unit (BIU)
* up to 128 bits. Whenever the host writes part of such a register,
* the BIU collects the written value and does not write to the
* underlying register until all 4 dwords have been written. A
* similar buffering scheme applies to host access to the NIC's 64-bit
* SRAM.
*
* Writes to different CSRs and 64-bit SRAM words must be serialised,
* since interleaved access can result in lost writes. We use
* efx_nic::biu_lock for this.
*
* We also serialise reads from 128-bit CSRs and SRAM with the same
* spinlock. This may not be necessary, but it doesn't really matter
* as there are no such reads on the fast path.
*
* The DMA descriptor pointers (RX_DESC_UPD and TX_DESC_UPD) are
* 128-bit but are special-cased in the BIU to avoid the need for
* locking in the host:
*
* - They are write-only.
* - The semantics of writing to these registers are such that
* replacing the low 96 bits with zero does not affect functionality.
* - If the host writes to the last dword address of such a register
* (i.e. the high 32 bits) the underlying register will always be
* written. If the collector and the current write together do not
* provide values for all 128 bits of the register, the low 96 bits
* will be written as zero.
* - If the host writes to the address of any other part of such a
* register while the collector already holds values for some other
* register, the write is discarded and the collector maintains its
* current state.
*
* The EF10 architecture exposes very few registers to the host and
* most of them are only 32 bits wide. The only exceptions are the MC
* doorbell register pair, which has its own latching, and
* TX_DESC_UPD, which works in a similar way to the Falcon
* architecture.
*/
#if BITS_PER_LONG == 64
#define EFX_USE_QWORD_IO 1
#endif
/* Hardware issue requires that only 64-bit naturally aligned writes
* are seen by hardware. Its not strictly necessary to restrict to
* x86_64 arch, but done for safety since unusual write combining behaviour
* can break PIO.
*/
#ifdef CONFIG_X86_64
/* PIO is a win only if write-combining is possible */
#ifdef ARCH_HAS_IOREMAP_WC
#define EFX_USE_PIO 1
#endif
#endif
#ifdef EFX_USE_QWORD_IO
static inline void _efx_writeq(struct efx_nic *efx, __le64 value,
unsigned int reg)
{
__raw_writeq((__force u64)value, efx->membase + reg);
}
static inline __le64 _efx_readq(struct efx_nic *efx, unsigned int reg)
{
return (__force __le64)__raw_readq(efx->membase + reg);
}
#endif
static inline void _efx_writed(struct efx_nic *efx, __le32 value,
unsigned int reg)
{
__raw_writel((__force u32)value, efx->membase + reg);
}
static inline __le32 _efx_readd(struct efx_nic *efx, unsigned int reg)
{
return (__force __le32)__raw_readl(efx->membase + reg);
}
/* Write a normal 128-bit CSR, locking as appropriate. */
static inline void efx_writeo(struct efx_nic *efx, const efx_oword_t *value,
unsigned int reg)
{
unsigned long flags __attribute__ ((unused));
netif_vdbg(efx, hw, efx->net_dev,
"writing register %x with " EFX_OWORD_FMT "\n", reg,
EFX_OWORD_VAL(*value));
spin_lock_irqsave(&efx->biu_lock, flags);
#ifdef EFX_USE_QWORD_IO
_efx_writeq(efx, value->u64[0], reg + 0);
_efx_writeq(efx, value->u64[1], reg + 8);
#else
_efx_writed(efx, value->u32[0], reg + 0);
_efx_writed(efx, value->u32[1], reg + 4);
_efx_writed(efx, value->u32[2], reg + 8);
_efx_writed(efx, value->u32[3], reg + 12);
#endif
mmiowb();
spin_unlock_irqrestore(&efx->biu_lock, flags);
}
/* Write 64-bit SRAM through the supplied mapping, locking as appropriate. */
static inline void efx_sram_writeq(struct efx_nic *efx, void __iomem *membase,
const efx_qword_t *value, unsigned int index)
{
unsigned int addr = index * sizeof(*value);
unsigned long flags __attribute__ ((unused));
netif_vdbg(efx, hw, efx->net_dev,
"writing SRAM address %x with " EFX_QWORD_FMT "\n",
addr, EFX_QWORD_VAL(*value));
spin_lock_irqsave(&efx->biu_lock, flags);
#ifdef EFX_USE_QWORD_IO
__raw_writeq((__force u64)value->u64[0], membase + addr);
#else
__raw_writel((__force u32)value->u32[0], membase + addr);
__raw_writel((__force u32)value->u32[1], membase + addr + 4);
#endif
mmiowb();
spin_unlock_irqrestore(&efx->biu_lock, flags);
}
/* Write a 32-bit CSR or the last dword of a special 128-bit CSR */
static inline void efx_writed(struct efx_nic *efx, const efx_dword_t *value,
unsigned int reg)
{
netif_vdbg(efx, hw, efx->net_dev,
"writing register %x with "EFX_DWORD_FMT"\n",
reg, EFX_DWORD_VAL(*value));
/* No lock required */
_efx_writed(efx, value->u32[0], reg);
}
/* Read a 128-bit CSR, locking as appropriate. */
static inline void efx_reado(struct efx_nic *efx, efx_oword_t *value,
unsigned int reg)
{
unsigned long flags __attribute__ ((unused));
spin_lock_irqsave(&efx->biu_lock, flags);
value->u32[0] = _efx_readd(efx, reg + 0);
value->u32[1] = _efx_readd(efx, reg + 4);
value->u32[2] = _efx_readd(efx, reg + 8);
value->u32[3] = _efx_readd(efx, reg + 12);
spin_unlock_irqrestore(&efx->biu_lock, flags);
netif_vdbg(efx, hw, efx->net_dev,
"read from register %x, got " EFX_OWORD_FMT "\n", reg,
EFX_OWORD_VAL(*value));
}
/* Read 64-bit SRAM through the supplied mapping, locking as appropriate. */
static inline void efx_sram_readq(struct efx_nic *efx, void __iomem *membase,
efx_qword_t *value, unsigned int index)
{
unsigned int addr = index * sizeof(*value);
unsigned long flags __attribute__ ((unused));
spin_lock_irqsave(&efx->biu_lock, flags);
#ifdef EFX_USE_QWORD_IO
value->u64[0] = (__force __le64)__raw_readq(membase + addr);
#else
value->u32[0] = (__force __le32)__raw_readl(membase + addr);
value->u32[1] = (__force __le32)__raw_readl(membase + addr + 4);
#endif
spin_unlock_irqrestore(&efx->biu_lock, flags);
netif_vdbg(efx, hw, efx->net_dev,
"read from SRAM address %x, got "EFX_QWORD_FMT"\n",
addr, EFX_QWORD_VAL(*value));
}
/* Read a 32-bit CSR or SRAM */
static inline void efx_readd(struct efx_nic *efx, efx_dword_t *value,
unsigned int reg)
{
value->u32[0] = _efx_readd(efx, reg);
netif_vdbg(efx, hw, efx->net_dev,
"read from register %x, got "EFX_DWORD_FMT"\n",
reg, EFX_DWORD_VAL(*value));
}
/* Write a 128-bit CSR forming part of a table */
static inline void
efx_writeo_table(struct efx_nic *efx, const efx_oword_t *value,
unsigned int reg, unsigned int index)
{
efx_writeo(efx, value, reg + index * sizeof(efx_oword_t));
}
/* Read a 128-bit CSR forming part of a table */
static inline void efx_reado_table(struct efx_nic *efx, efx_oword_t *value,
unsigned int reg, unsigned int index)
{
efx_reado(efx, value, reg + index * sizeof(efx_oword_t));
}
/* Page size used as step between per-VI registers */
#define EFX_VI_PAGE_SIZE 0x2000
/* Calculate offset to page-mapped register */
#define EFX_PAGED_REG(page, reg) \
((page) * EFX_VI_PAGE_SIZE + (reg))
/* Write the whole of RX_DESC_UPD or TX_DESC_UPD */
static inline void _efx_writeo_page(struct efx_nic *efx, efx_oword_t *value,
unsigned int reg, unsigned int page)
{
reg = EFX_PAGED_REG(page, reg);
netif_vdbg(efx, hw, efx->net_dev,
"writing register %x with " EFX_OWORD_FMT "\n", reg,
EFX_OWORD_VAL(*value));
#ifdef EFX_USE_QWORD_IO
_efx_writeq(efx, value->u64[0], reg + 0);
_efx_writeq(efx, value->u64[1], reg + 8);
#else
_efx_writed(efx, value->u32[0], reg + 0);
_efx_writed(efx, value->u32[1], reg + 4);
_efx_writed(efx, value->u32[2], reg + 8);
_efx_writed(efx, value->u32[3], reg + 12);
#endif
}
#define efx_writeo_page(efx, value, reg, page) \
_efx_writeo_page(efx, value, \
reg + \
BUILD_BUG_ON_ZERO((reg) != 0x830 && (reg) != 0xa10), \
page)
/* Write a page-mapped 32-bit CSR (EVQ_RPTR, EVQ_TMR (EF10), or the
* high bits of RX_DESC_UPD or TX_DESC_UPD)
*/
static inline void
_efx_writed_page(struct efx_nic *efx, const efx_dword_t *value,
unsigned int reg, unsigned int page)
{
efx_writed(efx, value, EFX_PAGED_REG(page, reg));
}
#define efx_writed_page(efx, value, reg, page) \
_efx_writed_page(efx, value, \
reg + \
BUILD_BUG_ON_ZERO((reg) != 0x400 && \
(reg) != 0x420 && \
(reg) != 0x830 && \
(reg) != 0x83c && \
(reg) != 0xa18 && \
(reg) != 0xa1c), \
page)
/* Write TIMER_COMMAND. This is a page-mapped 32-bit CSR, but a bug
* in the BIU means that writes to TIMER_COMMAND[0] invalidate the
* collector register.
*/
static inline void _efx_writed_page_locked(struct efx_nic *efx,
const efx_dword_t *value,
unsigned int reg,
unsigned int page)
{
unsigned long flags __attribute__ ((unused));
if (page == 0) {
spin_lock_irqsave(&efx->biu_lock, flags);
efx_writed(efx, value, EFX_PAGED_REG(page, reg));
spin_unlock_irqrestore(&efx->biu_lock, flags);
} else {
efx_writed(efx, value, EFX_PAGED_REG(page, reg));
}
}
#define efx_writed_page_locked(efx, value, reg, page) \
_efx_writed_page_locked(efx, value, \
reg + BUILD_BUG_ON_ZERO((reg) != 0x420), \
page)
#endif /* EFX_IO_H */
| {
"pile_set_name": "Github"
} |
# Docker Machine Experimental Features
Docker Machine’s experimental features gives you access to bleeding edge features. By identifying experimental features, you can try out features early and give feedback to the Docker Machine maintainers. In this way, we hope to refine our feature designs by exposing them earlier to real-world usage.
This page describes the experimental features in Docker Machine. Docker Machine is currently in beta. Neither it nor its experimental features are ready for production use.
The information below describes each feature and the Github pull requests and
issues associated with it. If necessary, links are provided to additional
documentation on an issue. As an active Docker user and community member,
please feel free to provide any feedback on these features you wish.
## Current experimental features
Currently, you can experiment with [migrating a Boot2Docker created VM to Docker Machine](b2d_migration.md). Also, consider reviewing our [rough plan for the migration](b2d_migration_tasks.md).
Additional experimental features include support for Red Hat, Debian, CentOS, Fedora, and RancherOS as base OSes. These features have no separate feature documentation. We simply encourage you to try them.
## How to comment on an experimental feature
Each feature's documentation includes a list of proposal pull requests or PRs associated with the feature. If you want to comment on or suggest a change to a feature, please add it to the existing feature PR.
Issues or problems with a feature? Inquire for help on the `#docker-machine` IRC channel or in on the [Docker Google group](https://groups.google.com/forum/#!forum/docker-user).
| {
"pile_set_name": "Github"
} |
package com.ofg.infrastructure.web.resttemplate.fluent;
import java.net.URI;
import java.util.concurrent.Callable;
import org.springframework.cloud.sleuth.Tracer;
import org.springframework.cloud.zookeeper.discovery.dependency.ZookeeperDependencies;
import org.springframework.web.client.RestOperations;
import com.google.common.annotations.VisibleForTesting;
import com.ofg.infrastructure.discovery.MicroserviceConfiguration;
import com.ofg.infrastructure.discovery.ServiceAlias;
import com.ofg.infrastructure.discovery.ServiceConfigurationResolver;
import com.ofg.infrastructure.discovery.ServiceResolver;
import com.ofg.infrastructure.web.resttemplate.fluent.common.response.receive.PredefinedHttpHeaders;
/**
* Abstraction over {@link RestOperations} that provides a fluent API for accessing HTTP resources.
* It's bound with {@link ServiceResolver} that allows to easily access the microservice collaborators.
* <p/>
* You can call a collaborator 'users' defined in microservice descriptor for example named 'microservice.json' as follows
* <p/>
* <pre>
* {
* "prod": {
* "this": "foo/bar/registration",
* "dependencies": {
* "users": "foo/bar/users",
* "newsletter": "foo/bar/comms/newsletter",
* "confirmation": "foo/bar/security/confirmation"
* }
* }
* }
* </pre>
* <p/>
* in the following manner (example for POST):
* <p/>
* <pre>
* serviceRestClient.forService('users').post()
* .onUrl('/some/url/to/service')
* .body('<loan><id>100</id><name>Smith</name></loan>')
* .withHeaders()
* .contentTypeXml()
* .andExecuteFor()
* .aResponseEntity()
* .ofType(String)
* </pre>
* <p/>
* If you want to send a request to the outside world you can also profit from this component as follows (example for google.com):
* <p/>
* <pre>
* serviceRestClient.forExternalService().get()
* .onUrl('http://google.com')
* .andExecuteFor()
* .aResponseEntity()
* .ofType(String)
* </pre>
* <p/>
* This client has built in retrying mechanism supported:
* <p/>
* <pre>
*
* @@Autowired AsyncRetryExecutor executor
* <p/>
* serviceRestClient
* .forExternalService()
* .retryUsing(
* executor
* .withMaxRetries(5)
* .withFixedBackoff(2_000)
* .withUniformJitter())
* .delete()
* .onUrl(SOME_SERVICE_URL)
* .ignoringResponseAsync()
* </pre>
* <p/>
* If you are using retry mechanism, another features is enabled - asynchronous invocation. By appending {@code Async}
* to last method you will get {@code ListenableFuture} instance. This way you can easily run multiple requests
* concurrently, combine them, etc.
* @see <a href="https://github.com/nurkiewicz/async-retry">async-retry</a>
*/
public class ServiceRestClient {
private final RestOperations restOperations;
private final ServiceResolver serviceResolver;
private final ServiceConfigurationResolver configurationResolver;
private final ZookeeperDependencies zookeeperDependencies;
private final TracingInfo tracingInfo;
@Deprecated
public ServiceRestClient(RestOperations restOperations, ServiceResolver serviceResolver,
ServiceConfigurationResolver configurationResolver, TracingInfo tracingInfo) {
this.configurationResolver = configurationResolver;
this.restOperations = restOperations;
this.serviceResolver = serviceResolver;
this.zookeeperDependencies = null;
this.tracingInfo = tracingInfo;
}
public ServiceRestClient(RestOperations restOperations, ServiceResolver serviceResolver,
ZookeeperDependencies zookeeperDependencies, TracingInfo tracingInfo) {
this.restOperations = restOperations;
this.serviceResolver = serviceResolver;
this.zookeeperDependencies = zookeeperDependencies;
this.configurationResolver = null;
this.tracingInfo = tracingInfo;
}
/**
* Returns fluent api to send requests to given collaborating service
*
* @deprecated since 0.9.1, use {@link #forService(ServiceAlias serviceAlias)} instead
*
* @param serviceName - name of collaborating service from microservice configuration file
* @return builder for the specified HttpMethod
*/
@Deprecated
public HttpMethodBuilder forService(String serviceName) {
return forService(new ServiceAlias(serviceName));
}
/**
* Returns fluent api to send requests to given collaborating service
*
* @param serviceAlias - collaborating service alias as defined in microservice configuration file
* @return builder for the specified HttpMethod
*/
public HttpMethodBuilder forService(ServiceAlias serviceAlias) {
if (configurationResolver != null) {
return getMethodBuilderUsingConfigurationResolver(serviceAlias);
}
return getMethodBuilderUsingZookeeperDeps(serviceAlias);
}
@Deprecated
private HttpMethodBuilder getMethodBuilderUsingConfigurationResolver(ServiceAlias serviceAlias) {
final MicroserviceConfiguration.Dependency dependency = configurationResolver.getDependency(serviceAlias);
final PredefinedHttpHeaders predefinedHeaders = new PredefinedHttpHeaders(dependency);
return new HttpMethodBuilder(getServiceUri(serviceAlias), restOperations, predefinedHeaders, tracingInfo);
}
private HttpMethodBuilder getMethodBuilderUsingZookeeperDeps(ServiceAlias serviceAlias) {
final PredefinedHttpHeaders predefinedHeaders = new PredefinedHttpHeaders(zookeeperDependencies.getDependencyForAlias(serviceAlias.getName()));
return new HttpMethodBuilder(getServiceUri(serviceAlias), restOperations, predefinedHeaders, tracingInfo);
}
/**
* Lazy evaluation of the service's URI
*/
private Callable<String> getServiceUri(final ServiceAlias serviceAlias) {
return new Callable<String>() {
@Override
public String call() throws Exception {
final URI uri = serviceResolver.fetchUri(serviceAlias);
if (uri == null) {
throw new DependencyMissingException(serviceAlias);
}
return uri.toString();
}
};
}
class DependencyMissingException extends RuntimeException {
public DependencyMissingException(ServiceAlias serviceAlias) {
super("No running instance of [" + serviceAlias.getName() + "] is available.");
}
}
/**
* Returns fluent api to send requests to external service
*
* @return builder for the specified HttpMethod
*/
public HttpMethodBuilder forExternalService() {
return new HttpMethodBuilder(restOperations, tracingInfo);
}
}
| {
"pile_set_name": "Github"
} |
// The UnixCredentials system call is currently only implemented on Linux
// http://golang.org/src/pkg/syscall/sockcmsg_linux.go
// https://golang.org/s/go1.4-syscall
// http://code.google.com/p/go/source/browse/unix/sockcmsg_linux.go?repo=sys
// Local implementation of the UnixCredentials system call for FreeBSD
package dbus
/*
const int sizeofPtr = sizeof(void*);
#define _WANT_UCRED
#include <sys/ucred.h>
*/
import "C"
import (
"io"
"os"
"syscall"
"unsafe"
)
// http://golang.org/src/pkg/syscall/ztypes_linux_amd64.go
// https://golang.org/src/syscall/ztypes_freebsd_amd64.go
type Ucred struct {
Pid int32
Uid uint32
Gid uint32
}
// http://golang.org/src/pkg/syscall/types_linux.go
// https://golang.org/src/syscall/types_freebsd.go
// https://github.com/freebsd/freebsd/blob/master/sys/sys/ucred.h
const (
SizeofUcred = C.sizeof_struct_ucred
)
// http://golang.org/src/pkg/syscall/sockcmsg_unix.go
func cmsgAlignOf(salen int) int {
salign := C.sizeofPtr
return (salen + salign - 1) & ^(salign - 1)
}
// http://golang.org/src/pkg/syscall/sockcmsg_unix.go
func cmsgData(h *syscall.Cmsghdr) unsafe.Pointer {
return unsafe.Pointer(uintptr(unsafe.Pointer(h)) + uintptr(cmsgAlignOf(syscall.SizeofCmsghdr)))
}
// http://golang.org/src/pkg/syscall/sockcmsg_linux.go
// UnixCredentials encodes credentials into a socket control message
// for sending to another process. This can be used for
// authentication.
func UnixCredentials(ucred *Ucred) []byte {
b := make([]byte, syscall.CmsgSpace(SizeofUcred))
h := (*syscall.Cmsghdr)(unsafe.Pointer(&b[0]))
h.Level = syscall.SOL_SOCKET
h.Type = syscall.SCM_CREDS
h.SetLen(syscall.CmsgLen(SizeofUcred))
*((*Ucred)(cmsgData(h))) = *ucred
return b
}
// http://golang.org/src/pkg/syscall/sockcmsg_linux.go
// ParseUnixCredentials decodes a socket control message that contains
// credentials in a Ucred structure. To receive such a message, the
// SO_PASSCRED option must be enabled on the socket.
func ParseUnixCredentials(m *syscall.SocketControlMessage) (*Ucred, error) {
if m.Header.Level != syscall.SOL_SOCKET {
return nil, syscall.EINVAL
}
if m.Header.Type != syscall.SCM_CREDS {
return nil, syscall.EINVAL
}
ucred := *(*Ucred)(unsafe.Pointer(&m.Data[0]))
return &ucred, nil
}
func (t *unixTransport) SendNullByte() error {
ucred := &Ucred{Pid: int32(os.Getpid()), Uid: uint32(os.Getuid()), Gid: uint32(os.Getgid())}
b := UnixCredentials(ucred)
_, oobn, err := t.UnixConn.WriteMsgUnix([]byte{0}, b, nil)
if err != nil {
return err
}
if oobn != len(b) {
return io.ErrShortWrite
}
return nil
}
| {
"pile_set_name": "Github"
} |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for object_detection.core.preprocessor."""
import numpy as np
import six
import tensorflow as tf
from object_detection.core import preprocessor
from object_detection.core import standard_fields as fields
if six.PY2:
import mock # pylint: disable=g-import-not-at-top
else:
from unittest import mock # pylint: disable=g-import-not-at-top
class PreprocessorTest(tf.test.TestCase):
def createColorfulTestImage(self):
ch255 = tf.fill([1, 100, 200, 1], tf.constant(255, dtype=tf.uint8))
ch128 = tf.fill([1, 100, 200, 1], tf.constant(128, dtype=tf.uint8))
ch0 = tf.fill([1, 100, 200, 1], tf.constant(0, dtype=tf.uint8))
imr = tf.concat([ch255, ch0, ch0], 3)
img = tf.concat([ch255, ch255, ch0], 3)
imb = tf.concat([ch255, ch0, ch255], 3)
imw = tf.concat([ch128, ch128, ch128], 3)
imu = tf.concat([imr, img], 2)
imd = tf.concat([imb, imw], 2)
im = tf.concat([imu, imd], 1)
return im
def createTestImages(self):
images_r = tf.constant([[[128, 128, 128, 128], [0, 0, 128, 128],
[0, 128, 128, 128], [192, 192, 128, 128]]],
dtype=tf.uint8)
images_r = tf.expand_dims(images_r, 3)
images_g = tf.constant([[[0, 0, 128, 128], [0, 0, 128, 128],
[0, 128, 192, 192], [192, 192, 128, 192]]],
dtype=tf.uint8)
images_g = tf.expand_dims(images_g, 3)
images_b = tf.constant([[[128, 128, 192, 0], [0, 0, 128, 192],
[0, 128, 128, 0], [192, 192, 192, 128]]],
dtype=tf.uint8)
images_b = tf.expand_dims(images_b, 3)
images = tf.concat([images_r, images_g, images_b], 3)
return images
def createTestBoxes(self):
boxes = tf.constant(
[[0.0, 0.25, 0.75, 1.0], [0.25, 0.5, 0.75, 1.0]], dtype=tf.float32)
return boxes
def createTestLabelScores(self):
return tf.constant([1.0, 0.5], dtype=tf.float32)
def createTestLabelScoresWithMissingScore(self):
return tf.constant([0.5, np.nan], dtype=tf.float32)
def createTestMasks(self):
mask = np.array([
[[255.0, 0.0, 0.0],
[255.0, 0.0, 0.0],
[255.0, 0.0, 0.0]],
[[255.0, 255.0, 0.0],
[255.0, 255.0, 0.0],
[255.0, 255.0, 0.0]]])
return tf.constant(mask, dtype=tf.float32)
def createTestKeypoints(self):
keypoints = np.array([
[[0.1, 0.1], [0.2, 0.2], [0.3, 0.3]],
[[0.4, 0.4], [0.5, 0.5], [0.6, 0.6]],
])
return tf.constant(keypoints, dtype=tf.float32)
def createTestKeypointsInsideCrop(self):
keypoints = np.array([
[[0.4, 0.4], [0.5, 0.5], [0.6, 0.6]],
[[0.4, 0.4], [0.5, 0.5], [0.6, 0.6]],
])
return tf.constant(keypoints, dtype=tf.float32)
def createTestKeypointsOutsideCrop(self):
keypoints = np.array([
[[0.1, 0.1], [0.2, 0.2], [0.3, 0.3]],
[[0.1, 0.1], [0.2, 0.2], [0.3, 0.3]],
])
return tf.constant(keypoints, dtype=tf.float32)
def createKeypointFlipPermutation(self):
return np.array([0, 2, 1], dtype=np.int32)
def createTestLabels(self):
labels = tf.constant([1, 2], dtype=tf.int32)
return labels
def createTestBoxesOutOfImage(self):
boxes = tf.constant(
[[-0.1, 0.25, 0.75, 1], [0.25, 0.5, 0.75, 1.1]], dtype=tf.float32)
return boxes
def expectedImagesAfterNormalization(self):
images_r = tf.constant([[[0, 0, 0, 0], [-1, -1, 0, 0],
[-1, 0, 0, 0], [0.5, 0.5, 0, 0]]],
dtype=tf.float32)
images_r = tf.expand_dims(images_r, 3)
images_g = tf.constant([[[-1, -1, 0, 0], [-1, -1, 0, 0],
[-1, 0, 0.5, 0.5], [0.5, 0.5, 0, 0.5]]],
dtype=tf.float32)
images_g = tf.expand_dims(images_g, 3)
images_b = tf.constant([[[0, 0, 0.5, -1], [-1, -1, 0, 0.5],
[-1, 0, 0, -1], [0.5, 0.5, 0.5, 0]]],
dtype=tf.float32)
images_b = tf.expand_dims(images_b, 3)
images = tf.concat([images_r, images_g, images_b], 3)
return images
def expectedMaxImageAfterColorScale(self):
images_r = tf.constant([[[0.1, 0.1, 0.1, 0.1], [-0.9, -0.9, 0.1, 0.1],
[-0.9, 0.1, 0.1, 0.1], [0.6, 0.6, 0.1, 0.1]]],
dtype=tf.float32)
images_r = tf.expand_dims(images_r, 3)
images_g = tf.constant([[[-0.9, -0.9, 0.1, 0.1], [-0.9, -0.9, 0.1, 0.1],
[-0.9, 0.1, 0.6, 0.6], [0.6, 0.6, 0.1, 0.6]]],
dtype=tf.float32)
images_g = tf.expand_dims(images_g, 3)
images_b = tf.constant([[[0.1, 0.1, 0.6, -0.9], [-0.9, -0.9, 0.1, 0.6],
[-0.9, 0.1, 0.1, -0.9], [0.6, 0.6, 0.6, 0.1]]],
dtype=tf.float32)
images_b = tf.expand_dims(images_b, 3)
images = tf.concat([images_r, images_g, images_b], 3)
return images
def expectedMinImageAfterColorScale(self):
images_r = tf.constant([[[-0.1, -0.1, -0.1, -0.1], [-1, -1, -0.1, -0.1],
[-1, -0.1, -0.1, -0.1], [0.4, 0.4, -0.1, -0.1]]],
dtype=tf.float32)
images_r = tf.expand_dims(images_r, 3)
images_g = tf.constant([[[-1, -1, -0.1, -0.1], [-1, -1, -0.1, -0.1],
[-1, -0.1, 0.4, 0.4], [0.4, 0.4, -0.1, 0.4]]],
dtype=tf.float32)
images_g = tf.expand_dims(images_g, 3)
images_b = tf.constant([[[-0.1, -0.1, 0.4, -1], [-1, -1, -0.1, 0.4],
[-1, -0.1, -0.1, -1], [0.4, 0.4, 0.4, -0.1]]],
dtype=tf.float32)
images_b = tf.expand_dims(images_b, 3)
images = tf.concat([images_r, images_g, images_b], 3)
return images
def expectedImagesAfterMirroring(self):
images_r = tf.constant([[[0, 0, 0, 0], [0, 0, -1, -1],
[0, 0, 0, -1], [0, 0, 0.5, 0.5]]],
dtype=tf.float32)
images_r = tf.expand_dims(images_r, 3)
images_g = tf.constant([[[0, 0, -1, -1], [0, 0, -1, -1],
[0.5, 0.5, 0, -1], [0.5, 0, 0.5, 0.5]]],
dtype=tf.float32)
images_g = tf.expand_dims(images_g, 3)
images_b = tf.constant([[[-1, 0.5, 0, 0], [0.5, 0, -1, -1],
[-1, 0, 0, -1], [0, 0.5, 0.5, 0.5]]],
dtype=tf.float32)
images_b = tf.expand_dims(images_b, 3)
images = tf.concat([images_r, images_g, images_b], 3)
return images
def expectedBoxesAfterMirroring(self):
boxes = tf.constant([[0.0, 0.0, 0.75, 0.75], [0.25, 0.0, 0.75, 0.5]],
dtype=tf.float32)
return boxes
def expectedBoxesAfterXY(self):
boxes = tf.constant([[0.25, 0.0, 1.0, 0.75], [0.5, 0.25, 1, 0.75]],
dtype=tf.float32)
return boxes
def expectedMasksAfterMirroring(self):
mask = np.array([
[[0.0, 0.0, 255.0],
[0.0, 0.0, 255.0],
[0.0, 0.0, 255.0]],
[[0.0, 255.0, 255.0],
[0.0, 255.0, 255.0],
[0.0, 255.0, 255.0]]])
return tf.constant(mask, dtype=tf.float32)
def expectedLabelScoresAfterThresholding(self):
return tf.constant([1.0], dtype=tf.float32)
def expectedBoxesAfterThresholding(self):
return tf.constant([[0.0, 0.25, 0.75, 1.0]], dtype=tf.float32)
def expectedLabelsAfterThresholding(self):
return tf.constant([1], dtype=tf.float32)
def expectedMasksAfterThresholding(self):
mask = np.array([
[[255.0, 0.0, 0.0],
[255.0, 0.0, 0.0],
[255.0, 0.0, 0.0]]])
return tf.constant(mask, dtype=tf.float32)
def expectedKeypointsAfterThresholding(self):
keypoints = np.array([
[[0.1, 0.1], [0.2, 0.2], [0.3, 0.3]]
])
return tf.constant(keypoints, dtype=tf.float32)
def expectedLabelScoresAfterThresholdingWithMissingScore(self):
return tf.constant([np.nan], dtype=tf.float32)
def expectedBoxesAfterThresholdingWithMissingScore(self):
return tf.constant([[0.25, 0.5, 0.75, 1]], dtype=tf.float32)
def expectedLabelsAfterThresholdingWithMissingScore(self):
return tf.constant([2], dtype=tf.float32)
def testNormalizeImage(self):
preprocess_options = [(preprocessor.normalize_image, {
'original_minval': 0,
'original_maxval': 256,
'target_minval': -1,
'target_maxval': 1
})]
images = self.createTestImages()
tensor_dict = {fields.InputDataFields.image: images}
tensor_dict = preprocessor.preprocess(tensor_dict, preprocess_options)
images = tensor_dict[fields.InputDataFields.image]
images_expected = self.expectedImagesAfterNormalization()
with self.test_session() as sess:
(images_, images_expected_) = sess.run(
[images, images_expected])
images_shape_ = images_.shape
images_expected_shape_ = images_expected_.shape
expected_shape = [1, 4, 4, 3]
self.assertAllEqual(images_expected_shape_, images_shape_)
self.assertAllEqual(images_shape_, expected_shape)
self.assertAllClose(images_, images_expected_)
def testRetainBoxesAboveThreshold(self):
boxes = self.createTestBoxes()
labels = self.createTestLabels()
label_scores = self.createTestLabelScores()
(retained_boxes, retained_labels,
retained_label_scores) = preprocessor.retain_boxes_above_threshold(
boxes, labels, label_scores, threshold=0.6)
with self.test_session() as sess:
(retained_boxes_, retained_labels_, retained_label_scores_,
expected_retained_boxes_, expected_retained_labels_,
expected_retained_label_scores_) = sess.run([
retained_boxes, retained_labels, retained_label_scores,
self.expectedBoxesAfterThresholding(),
self.expectedLabelsAfterThresholding(),
self.expectedLabelScoresAfterThresholding()])
self.assertAllClose(
retained_boxes_, expected_retained_boxes_)
self.assertAllClose(
retained_labels_, expected_retained_labels_)
self.assertAllClose(
retained_label_scores_, expected_retained_label_scores_)
def testRetainBoxesAboveThresholdWithMasks(self):
boxes = self.createTestBoxes()
labels = self.createTestLabels()
label_scores = self.createTestLabelScores()
masks = self.createTestMasks()
_, _, _, retained_masks = preprocessor.retain_boxes_above_threshold(
boxes, labels, label_scores, masks, threshold=0.6)
with self.test_session() as sess:
retained_masks_, expected_retained_masks_ = sess.run([
retained_masks, self.expectedMasksAfterThresholding()])
self.assertAllClose(
retained_masks_, expected_retained_masks_)
def testRetainBoxesAboveThresholdWithKeypoints(self):
boxes = self.createTestBoxes()
labels = self.createTestLabels()
label_scores = self.createTestLabelScores()
keypoints = self.createTestKeypoints()
(_, _, _, retained_keypoints) = preprocessor.retain_boxes_above_threshold(
boxes, labels, label_scores, keypoints=keypoints, threshold=0.6)
with self.test_session() as sess:
(retained_keypoints_,
expected_retained_keypoints_) = sess.run([
retained_keypoints,
self.expectedKeypointsAfterThresholding()])
self.assertAllClose(
retained_keypoints_, expected_retained_keypoints_)
def testRetainBoxesAboveThresholdWithMissingScore(self):
boxes = self.createTestBoxes()
labels = self.createTestLabels()
label_scores = self.createTestLabelScoresWithMissingScore()
(retained_boxes, retained_labels,
retained_label_scores) = preprocessor.retain_boxes_above_threshold(
boxes, labels, label_scores, threshold=0.6)
with self.test_session() as sess:
(retained_boxes_, retained_labels_, retained_label_scores_,
expected_retained_boxes_, expected_retained_labels_,
expected_retained_label_scores_) = sess.run([
retained_boxes, retained_labels, retained_label_scores,
self.expectedBoxesAfterThresholdingWithMissingScore(),
self.expectedLabelsAfterThresholdingWithMissingScore(),
self.expectedLabelScoresAfterThresholdingWithMissingScore()])
self.assertAllClose(
retained_boxes_, expected_retained_boxes_)
self.assertAllClose(
retained_labels_, expected_retained_labels_)
self.assertAllClose(
retained_label_scores_, expected_retained_label_scores_)
def testRandomFlipBoxes(self):
boxes = self.createTestBoxes()
# Case where the boxes are flipped.
boxes_expected1 = self.expectedBoxesAfterMirroring()
# Case where the boxes are not flipped.
boxes_expected2 = boxes
# After elementwise multiplication, the result should be all-zero since one
# of them is all-zero.
boxes_diff = tf.multiply(
tf.squared_difference(boxes, boxes_expected1),
tf.squared_difference(boxes, boxes_expected2))
expected_result = tf.zeros_like(boxes_diff)
with self.test_session() as sess:
(boxes_diff, expected_result) = sess.run([boxes_diff, expected_result])
self.assertAllEqual(boxes_diff, expected_result)
def testFlipMasks(self):
test_mask = self.createTestMasks()
flipped_mask = preprocessor._flip_masks(test_mask)
expected_mask = self.expectedMasksAfterMirroring()
with self.test_session() as sess:
flipped_mask, expected_mask = sess.run([flipped_mask, expected_mask])
self.assertAllEqual(flipped_mask.flatten(), expected_mask.flatten())
def testRandomHorizontalFlip(self):
preprocess_options = [(preprocessor.random_horizontal_flip, {})]
images = self.expectedImagesAfterNormalization()
boxes = self.createTestBoxes()
tensor_dict = {fields.InputDataFields.image: images,
fields.InputDataFields.groundtruth_boxes: boxes}
images_expected1 = self.expectedImagesAfterMirroring()
boxes_expected1 = self.expectedBoxesAfterMirroring()
images_expected2 = images
boxes_expected2 = boxes
tensor_dict = preprocessor.preprocess(tensor_dict, preprocess_options)
images = tensor_dict[fields.InputDataFields.image]
boxes = tensor_dict[fields.InputDataFields.groundtruth_boxes]
boxes_diff1 = tf.squared_difference(boxes, boxes_expected1)
boxes_diff2 = tf.squared_difference(boxes, boxes_expected2)
boxes_diff = tf.multiply(boxes_diff1, boxes_diff2)
boxes_diff_expected = tf.zeros_like(boxes_diff)
images_diff1 = tf.squared_difference(images, images_expected1)
images_diff2 = tf.squared_difference(images, images_expected2)
images_diff = tf.multiply(images_diff1, images_diff2)
images_diff_expected = tf.zeros_like(images_diff)
with self.test_session() as sess:
(images_diff_, images_diff_expected_, boxes_diff_,
boxes_diff_expected_) = sess.run([images_diff, images_diff_expected,
boxes_diff, boxes_diff_expected])
self.assertAllClose(boxes_diff_, boxes_diff_expected_)
self.assertAllClose(images_diff_, images_diff_expected_)
def testRunRandomHorizontalFlipWithMaskAndKeypoints(self):
preprocess_options = [(preprocessor.random_horizontal_flip, {})]
image_height = 3
image_width = 3
images = tf.random_uniform([1, image_height, image_width, 3])
boxes = self.createTestBoxes()
masks = self.createTestMasks()
keypoints = self.createTestKeypoints()
keypoint_flip_permutation = self.createKeypointFlipPermutation()
tensor_dict = {
fields.InputDataFields.image: images,
fields.InputDataFields.groundtruth_boxes: boxes,
fields.InputDataFields.groundtruth_instance_masks: masks,
fields.InputDataFields.groundtruth_keypoints: keypoints
}
preprocess_options = [
(preprocessor.random_horizontal_flip,
{'keypoint_flip_permutation': keypoint_flip_permutation})]
preprocessor_arg_map = preprocessor.get_default_func_arg_map(
include_instance_masks=True, include_keypoints=True)
tensor_dict = preprocessor.preprocess(
tensor_dict, preprocess_options, func_arg_map=preprocessor_arg_map)
boxes = tensor_dict[fields.InputDataFields.groundtruth_boxes]
masks = tensor_dict[fields.InputDataFields.groundtruth_instance_masks]
keypoints = tensor_dict[fields.InputDataFields.groundtruth_keypoints]
with self.test_session() as sess:
boxes, masks, keypoints = sess.run([boxes, masks, keypoints])
self.assertTrue(boxes is not None)
self.assertTrue(masks is not None)
self.assertTrue(keypoints is not None)
def testRandomPixelValueScale(self):
preprocessing_options = []
preprocessing_options.append((preprocessor.normalize_image, {
'original_minval': 0,
'original_maxval': 255,
'target_minval': 0,
'target_maxval': 1
}))
preprocessing_options.append((preprocessor.random_pixel_value_scale, {}))
images = self.createTestImages()
tensor_dict = {fields.InputDataFields.image: images}
tensor_dict = preprocessor.preprocess(tensor_dict, preprocessing_options)
images_min = tf.to_float(images) * 0.9 / 255.0
images_max = tf.to_float(images) * 1.1 / 255.0
images = tensor_dict[fields.InputDataFields.image]
values_greater = tf.greater_equal(images, images_min)
values_less = tf.less_equal(images, images_max)
values_true = tf.fill([1, 4, 4, 3], True)
with self.test_session() as sess:
(values_greater_, values_less_, values_true_) = sess.run(
[values_greater, values_less, values_true])
self.assertAllClose(values_greater_, values_true_)
self.assertAllClose(values_less_, values_true_)
def testRandomImageScale(self):
preprocess_options = [(preprocessor.random_image_scale, {})]
images_original = self.createTestImages()
tensor_dict = {fields.InputDataFields.image: images_original}
tensor_dict = preprocessor.preprocess(tensor_dict, preprocess_options)
images_scaled = tensor_dict[fields.InputDataFields.image]
images_original_shape = tf.shape(images_original)
images_scaled_shape = tf.shape(images_scaled)
with self.test_session() as sess:
(images_original_shape_, images_scaled_shape_) = sess.run(
[images_original_shape, images_scaled_shape])
self.assertTrue(
images_original_shape_[1] * 0.5 <= images_scaled_shape_[1])
self.assertTrue(
images_original_shape_[1] * 2.0 >= images_scaled_shape_[1])
self.assertTrue(
images_original_shape_[2] * 0.5 <= images_scaled_shape_[2])
self.assertTrue(
images_original_shape_[2] * 2.0 >= images_scaled_shape_[2])
def testRandomRGBtoGray(self):
preprocess_options = [(preprocessor.random_rgb_to_gray, {})]
images_original = self.createTestImages()
tensor_dict = {fields.InputDataFields.image: images_original}
tensor_dict = preprocessor.preprocess(tensor_dict, preprocess_options)
images_gray = tensor_dict[fields.InputDataFields.image]
images_gray_r, images_gray_g, images_gray_b = tf.split(
value=images_gray, num_or_size_splits=3, axis=3)
images_r, images_g, images_b = tf.split(
value=images_original, num_or_size_splits=3, axis=3)
images_r_diff1 = tf.squared_difference(tf.to_float(images_r),
tf.to_float(images_gray_r))
images_r_diff2 = tf.squared_difference(tf.to_float(images_gray_r),
tf.to_float(images_gray_g))
images_r_diff = tf.multiply(images_r_diff1, images_r_diff2)
images_g_diff1 = tf.squared_difference(tf.to_float(images_g),
tf.to_float(images_gray_g))
images_g_diff2 = tf.squared_difference(tf.to_float(images_gray_g),
tf.to_float(images_gray_b))
images_g_diff = tf.multiply(images_g_diff1, images_g_diff2)
images_b_diff1 = tf.squared_difference(tf.to_float(images_b),
tf.to_float(images_gray_b))
images_b_diff2 = tf.squared_difference(tf.to_float(images_gray_b),
tf.to_float(images_gray_r))
images_b_diff = tf.multiply(images_b_diff1, images_b_diff2)
image_zero1 = tf.constant(0, dtype=tf.float32, shape=[1, 4, 4, 1])
with self.test_session() as sess:
(images_r_diff_, images_g_diff_, images_b_diff_, image_zero1_) = sess.run(
[images_r_diff, images_g_diff, images_b_diff, image_zero1])
self.assertAllClose(images_r_diff_, image_zero1_)
self.assertAllClose(images_g_diff_, image_zero1_)
self.assertAllClose(images_b_diff_, image_zero1_)
def testRandomAdjustBrightness(self):
preprocessing_options = []
preprocessing_options.append((preprocessor.normalize_image, {
'original_minval': 0,
'original_maxval': 255,
'target_minval': 0,
'target_maxval': 1
}))
preprocessing_options.append((preprocessor.random_adjust_brightness, {}))
images_original = self.createTestImages()
tensor_dict = {fields.InputDataFields.image: images_original}
tensor_dict = preprocessor.preprocess(tensor_dict, preprocessing_options)
images_bright = tensor_dict[fields.InputDataFields.image]
image_original_shape = tf.shape(images_original)
image_bright_shape = tf.shape(images_bright)
with self.test_session() as sess:
(image_original_shape_, image_bright_shape_) = sess.run(
[image_original_shape, image_bright_shape])
self.assertAllEqual(image_original_shape_, image_bright_shape_)
def testRandomAdjustContrast(self):
preprocessing_options = []
preprocessing_options.append((preprocessor.normalize_image, {
'original_minval': 0,
'original_maxval': 255,
'target_minval': 0,
'target_maxval': 1
}))
preprocessing_options.append((preprocessor.random_adjust_contrast, {}))
images_original = self.createTestImages()
tensor_dict = {fields.InputDataFields.image: images_original}
tensor_dict = preprocessor.preprocess(tensor_dict, preprocessing_options)
images_contrast = tensor_dict[fields.InputDataFields.image]
image_original_shape = tf.shape(images_original)
image_contrast_shape = tf.shape(images_contrast)
with self.test_session() as sess:
(image_original_shape_, image_contrast_shape_) = sess.run(
[image_original_shape, image_contrast_shape])
self.assertAllEqual(image_original_shape_, image_contrast_shape_)
def testRandomAdjustHue(self):
preprocessing_options = []
preprocessing_options.append((preprocessor.normalize_image, {
'original_minval': 0,
'original_maxval': 255,
'target_minval': 0,
'target_maxval': 1
}))
preprocessing_options.append((preprocessor.random_adjust_hue, {}))
images_original = self.createTestImages()
tensor_dict = {fields.InputDataFields.image: images_original}
tensor_dict = preprocessor.preprocess(tensor_dict, preprocessing_options)
images_hue = tensor_dict[fields.InputDataFields.image]
image_original_shape = tf.shape(images_original)
image_hue_shape = tf.shape(images_hue)
with self.test_session() as sess:
(image_original_shape_, image_hue_shape_) = sess.run(
[image_original_shape, image_hue_shape])
self.assertAllEqual(image_original_shape_, image_hue_shape_)
def testRandomDistortColor(self):
preprocessing_options = []
preprocessing_options.append((preprocessor.normalize_image, {
'original_minval': 0,
'original_maxval': 255,
'target_minval': 0,
'target_maxval': 1
}))
preprocessing_options.append((preprocessor.random_distort_color, {}))
images_original = self.createTestImages()
images_original_shape = tf.shape(images_original)
tensor_dict = {fields.InputDataFields.image: images_original}
tensor_dict = preprocessor.preprocess(tensor_dict, preprocessing_options)
images_distorted_color = tensor_dict[fields.InputDataFields.image]
images_distorted_color_shape = tf.shape(images_distorted_color)
with self.test_session() as sess:
(images_original_shape_, images_distorted_color_shape_) = sess.run(
[images_original_shape, images_distorted_color_shape])
self.assertAllEqual(images_original_shape_, images_distorted_color_shape_)
def testRandomJitterBoxes(self):
preprocessing_options = []
preprocessing_options.append((preprocessor.random_jitter_boxes, {}))
boxes = self.createTestBoxes()
boxes_shape = tf.shape(boxes)
tensor_dict = {fields.InputDataFields.groundtruth_boxes: boxes}
tensor_dict = preprocessor.preprocess(tensor_dict, preprocessing_options)
distorted_boxes = tensor_dict[fields.InputDataFields.groundtruth_boxes]
distorted_boxes_shape = tf.shape(distorted_boxes)
with self.test_session() as sess:
(boxes_shape_, distorted_boxes_shape_) = sess.run(
[boxes_shape, distorted_boxes_shape])
self.assertAllEqual(boxes_shape_, distorted_boxes_shape_)
def testRandomCropImage(self):
preprocessing_options = []
preprocessing_options.append((preprocessor.normalize_image, {
'original_minval': 0,
'original_maxval': 255,
'target_minval': 0,
'target_maxval': 1
}))
preprocessing_options.append((preprocessor.random_crop_image, {}))
images = self.createTestImages()
boxes = self.createTestBoxes()
labels = self.createTestLabels()
tensor_dict = {fields.InputDataFields.image: images,
fields.InputDataFields.groundtruth_boxes: boxes,
fields.InputDataFields.groundtruth_classes: labels}
distorted_tensor_dict = preprocessor.preprocess(tensor_dict,
preprocessing_options)
distorted_images = distorted_tensor_dict[fields.InputDataFields.image]
distorted_boxes = distorted_tensor_dict[
fields.InputDataFields.groundtruth_boxes]
boxes_rank = tf.rank(boxes)
distorted_boxes_rank = tf.rank(distorted_boxes)
images_rank = tf.rank(images)
distorted_images_rank = tf.rank(distorted_images)
self.assertEqual(3, distorted_images.get_shape()[3])
with self.test_session() as sess:
(boxes_rank_, distorted_boxes_rank_, images_rank_,
distorted_images_rank_) = sess.run([
boxes_rank, distorted_boxes_rank, images_rank, distorted_images_rank
])
self.assertAllEqual(boxes_rank_, distorted_boxes_rank_)
self.assertAllEqual(images_rank_, distorted_images_rank_)
def testRandomCropImageGrayscale(self):
preprocessing_options = [(preprocessor.rgb_to_gray, {}),
(preprocessor.normalize_image, {
'original_minval': 0,
'original_maxval': 255,
'target_minval': 0,
'target_maxval': 1,
}),
(preprocessor.random_crop_image, {})]
images = self.createTestImages()
boxes = self.createTestBoxes()
labels = self.createTestLabels()
tensor_dict = {
fields.InputDataFields.image: images,
fields.InputDataFields.groundtruth_boxes: boxes,
fields.InputDataFields.groundtruth_classes: labels
}
distorted_tensor_dict = preprocessor.preprocess(
tensor_dict, preprocessing_options)
distorted_images = distorted_tensor_dict[fields.InputDataFields.image]
distorted_boxes = distorted_tensor_dict[
fields.InputDataFields.groundtruth_boxes]
boxes_rank = tf.rank(boxes)
distorted_boxes_rank = tf.rank(distorted_boxes)
images_rank = tf.rank(images)
distorted_images_rank = tf.rank(distorted_images)
self.assertEqual(1, distorted_images.get_shape()[3])
with self.test_session() as sess:
session_results = sess.run([
boxes_rank, distorted_boxes_rank, images_rank, distorted_images_rank
])
(boxes_rank_, distorted_boxes_rank_, images_rank_,
distorted_images_rank_) = session_results
self.assertAllEqual(boxes_rank_, distorted_boxes_rank_)
self.assertAllEqual(images_rank_, distorted_images_rank_)
def testRandomCropImageWithBoxOutOfImage(self):
preprocessing_options = []
preprocessing_options.append((preprocessor.normalize_image, {
'original_minval': 0,
'original_maxval': 255,
'target_minval': 0,
'target_maxval': 1
}))
preprocessing_options.append((preprocessor.random_crop_image, {}))
images = self.createTestImages()
boxes = self.createTestBoxesOutOfImage()
labels = self.createTestLabels()
tensor_dict = {fields.InputDataFields.image: images,
fields.InputDataFields.groundtruth_boxes: boxes,
fields.InputDataFields.groundtruth_classes: labels}
distorted_tensor_dict = preprocessor.preprocess(tensor_dict,
preprocessing_options)
distorted_images = distorted_tensor_dict[fields.InputDataFields.image]
distorted_boxes = distorted_tensor_dict[
fields.InputDataFields.groundtruth_boxes]
boxes_rank = tf.rank(boxes)
distorted_boxes_rank = tf.rank(distorted_boxes)
images_rank = tf.rank(images)
distorted_images_rank = tf.rank(distorted_images)
with self.test_session() as sess:
(boxes_rank_, distorted_boxes_rank_, images_rank_,
distorted_images_rank_) = sess.run(
[boxes_rank, distorted_boxes_rank, images_rank,
distorted_images_rank])
self.assertAllEqual(boxes_rank_, distorted_boxes_rank_)
self.assertAllEqual(images_rank_, distorted_images_rank_)
def testRandomCropImageWithRandomCoefOne(self):
preprocessing_options = [(preprocessor.normalize_image, {
'original_minval': 0,
'original_maxval': 255,
'target_minval': 0,
'target_maxval': 1
})]
images = self.createTestImages()
boxes = self.createTestBoxes()
labels = self.createTestLabels()
tensor_dict = {fields.InputDataFields.image: images,
fields.InputDataFields.groundtruth_boxes: boxes,
fields.InputDataFields.groundtruth_classes: labels}
tensor_dict = preprocessor.preprocess(tensor_dict, preprocessing_options)
images = tensor_dict[fields.InputDataFields.image]
preprocessing_options = [(preprocessor.random_crop_image, {
'random_coef': 1.0
})]
distorted_tensor_dict = preprocessor.preprocess(tensor_dict,
preprocessing_options)
distorted_images = distorted_tensor_dict[fields.InputDataFields.image]
distorted_boxes = distorted_tensor_dict[
fields.InputDataFields.groundtruth_boxes]
distorted_labels = distorted_tensor_dict[
fields.InputDataFields.groundtruth_classes]
boxes_shape = tf.shape(boxes)
distorted_boxes_shape = tf.shape(distorted_boxes)
images_shape = tf.shape(images)
distorted_images_shape = tf.shape(distorted_images)
with self.test_session() as sess:
(boxes_shape_, distorted_boxes_shape_, images_shape_,
distorted_images_shape_, images_, distorted_images_,
boxes_, distorted_boxes_, labels_, distorted_labels_) = sess.run(
[boxes_shape, distorted_boxes_shape, images_shape,
distorted_images_shape, images, distorted_images,
boxes, distorted_boxes, labels, distorted_labels])
self.assertAllEqual(boxes_shape_, distorted_boxes_shape_)
self.assertAllEqual(images_shape_, distorted_images_shape_)
self.assertAllClose(images_, distorted_images_)
self.assertAllClose(boxes_, distorted_boxes_)
self.assertAllEqual(labels_, distorted_labels_)
def testRandomCropWithMockSampleDistortedBoundingBox(self):
preprocessing_options = [(preprocessor.normalize_image, {
'original_minval': 0,
'original_maxval': 255,
'target_minval': 0,
'target_maxval': 1
})]
images = self.createColorfulTestImage()
boxes = tf.constant([[0.1, 0.1, 0.8, 0.3],
[0.2, 0.4, 0.75, 0.75],
[0.3, 0.1, 0.4, 0.7]], dtype=tf.float32)
labels = tf.constant([1, 7, 11], dtype=tf.int32)
tensor_dict = {fields.InputDataFields.image: images,
fields.InputDataFields.groundtruth_boxes: boxes,
fields.InputDataFields.groundtruth_classes: labels}
tensor_dict = preprocessor.preprocess(tensor_dict, preprocessing_options)
images = tensor_dict[fields.InputDataFields.image]
preprocessing_options = [(preprocessor.random_crop_image, {})]
with mock.patch.object(
tf.image,
'sample_distorted_bounding_box') as mock_sample_distorted_bounding_box:
mock_sample_distorted_bounding_box.return_value = (tf.constant(
[6, 143, 0], dtype=tf.int32), tf.constant(
[190, 237, -1], dtype=tf.int32), tf.constant(
[[[0.03, 0.3575, 0.98, 0.95]]], dtype=tf.float32))
distorted_tensor_dict = preprocessor.preprocess(tensor_dict,
preprocessing_options)
distorted_boxes = distorted_tensor_dict[
fields.InputDataFields.groundtruth_boxes]
distorted_labels = distorted_tensor_dict[
fields.InputDataFields.groundtruth_classes]
expected_boxes = tf.constant([[0.178947, 0.07173, 0.75789469, 0.66244733],
[0.28421, 0.0, 0.38947365, 0.57805908]],
dtype=tf.float32)
expected_labels = tf.constant([7, 11], dtype=tf.int32)
with self.test_session() as sess:
(distorted_boxes_, distorted_labels_,
expected_boxes_, expected_labels_) = sess.run(
[distorted_boxes, distorted_labels,
expected_boxes, expected_labels])
self.assertAllClose(distorted_boxes_, expected_boxes_)
self.assertAllEqual(distorted_labels_, expected_labels_)
def testStrictRandomCropImageWithMasks(self):
image = self.createColorfulTestImage()[0]
boxes = self.createTestBoxes()
labels = self.createTestLabels()
masks = tf.random_uniform([2, 200, 400], dtype=tf.float32)
with mock.patch.object(
tf.image,
'sample_distorted_bounding_box'
) as mock_sample_distorted_bounding_box:
mock_sample_distorted_bounding_box.return_value = (
tf.constant([6, 143, 0], dtype=tf.int32),
tf.constant([190, 237, -1], dtype=tf.int32),
tf.constant([[[0.03, 0.3575, 0.98, 0.95]]], dtype=tf.float32))
(new_image, new_boxes, new_labels,
new_masks) = preprocessor._strict_random_crop_image(
image, boxes, labels, masks=masks)
with self.test_session() as sess:
new_image, new_boxes, new_labels, new_masks = sess.run([
new_image, new_boxes, new_labels, new_masks])
expected_boxes = np.array([
[0.0, 0.0, 0.75789469, 1.0],
[0.23157893, 0.24050637, 0.75789469, 1.0],
], dtype=np.float32)
self.assertAllEqual(new_image.shape, [190, 237, 3])
self.assertAllEqual(new_masks.shape, [2, 190, 237])
self.assertAllClose(
new_boxes.flatten(), expected_boxes.flatten())
def testStrictRandomCropImageWithKeypoints(self):
image = self.createColorfulTestImage()[0]
boxes = self.createTestBoxes()
labels = self.createTestLabels()
keypoints = self.createTestKeypoints()
with mock.patch.object(
tf.image,
'sample_distorted_bounding_box'
) as mock_sample_distorted_bounding_box:
mock_sample_distorted_bounding_box.return_value = (
tf.constant([6, 143, 0], dtype=tf.int32),
tf.constant([190, 237, -1], dtype=tf.int32),
tf.constant([[[0.03, 0.3575, 0.98, 0.95]]], dtype=tf.float32))
(new_image, new_boxes, new_labels,
new_keypoints) = preprocessor._strict_random_crop_image(
image, boxes, labels, keypoints=keypoints)
with self.test_session() as sess:
new_image, new_boxes, new_labels, new_keypoints = sess.run([
new_image, new_boxes, new_labels, new_keypoints])
expected_boxes = np.array([
[0.0, 0.0, 0.75789469, 1.0],
[0.23157893, 0.24050637, 0.75789469, 1.0],
], dtype=np.float32)
expected_keypoints = np.array([
[[np.nan, np.nan],
[np.nan, np.nan],
[np.nan, np.nan]],
[[0.38947368, 0.07173],
[0.49473682, 0.24050637],
[0.60000002, 0.40928277]]
], dtype=np.float32)
self.assertAllEqual(new_image.shape, [190, 237, 3])
self.assertAllClose(
new_boxes.flatten(), expected_boxes.flatten())
self.assertAllClose(
new_keypoints.flatten(), expected_keypoints.flatten())
def testRunRandomCropImageWithMasks(self):
image = self.createColorfulTestImage()
boxes = self.createTestBoxes()
labels = self.createTestLabels()
masks = tf.random_uniform([2, 200, 400], dtype=tf.float32)
tensor_dict = {
fields.InputDataFields.image: image,
fields.InputDataFields.groundtruth_boxes: boxes,
fields.InputDataFields.groundtruth_classes: labels,
fields.InputDataFields.groundtruth_instance_masks: masks,
}
preprocessor_arg_map = preprocessor.get_default_func_arg_map(
include_instance_masks=True)
preprocessing_options = [(preprocessor.random_crop_image, {})]
with mock.patch.object(
tf.image,
'sample_distorted_bounding_box'
) as mock_sample_distorted_bounding_box:
mock_sample_distorted_bounding_box.return_value = (
tf.constant([6, 143, 0], dtype=tf.int32),
tf.constant([190, 237, -1], dtype=tf.int32),
tf.constant([[[0.03, 0.3575, 0.98, 0.95]]], dtype=tf.float32))
distorted_tensor_dict = preprocessor.preprocess(
tensor_dict, preprocessing_options, func_arg_map=preprocessor_arg_map)
distorted_image = distorted_tensor_dict[fields.InputDataFields.image]
distorted_boxes = distorted_tensor_dict[
fields.InputDataFields.groundtruth_boxes]
distorted_labels = distorted_tensor_dict[
fields.InputDataFields.groundtruth_classes]
distorted_masks = distorted_tensor_dict[
fields.InputDataFields.groundtruth_instance_masks]
with self.test_session() as sess:
(distorted_image_, distorted_boxes_, distorted_labels_,
distorted_masks_) = sess.run(
[distorted_image, distorted_boxes, distorted_labels,
distorted_masks])
expected_boxes = np.array([
[0.0, 0.0, 0.75789469, 1.0],
[0.23157893, 0.24050637, 0.75789469, 1.0],
], dtype=np.float32)
self.assertAllEqual(distorted_image_.shape, [1, 190, 237, 3])
self.assertAllEqual(distorted_masks_.shape, [2, 190, 237])
self.assertAllEqual(distorted_labels_, [1, 2])
self.assertAllClose(
distorted_boxes_.flatten(), expected_boxes.flatten())
def testRunRandomCropImageWithKeypointsInsideCrop(self):
image = self.createColorfulTestImage()
boxes = self.createTestBoxes()
labels = self.createTestLabels()
keypoints = self.createTestKeypointsInsideCrop()
tensor_dict = {
fields.InputDataFields.image: image,
fields.InputDataFields.groundtruth_boxes: boxes,
fields.InputDataFields.groundtruth_classes: labels,
fields.InputDataFields.groundtruth_keypoints: keypoints
}
preprocessor_arg_map = preprocessor.get_default_func_arg_map(
include_keypoints=True)
preprocessing_options = [(preprocessor.random_crop_image, {})]
with mock.patch.object(
tf.image,
'sample_distorted_bounding_box'
) as mock_sample_distorted_bounding_box:
mock_sample_distorted_bounding_box.return_value = (
tf.constant([6, 143, 0], dtype=tf.int32),
tf.constant([190, 237, -1], dtype=tf.int32),
tf.constant([[[0.03, 0.3575, 0.98, 0.95]]], dtype=tf.float32))
distorted_tensor_dict = preprocessor.preprocess(
tensor_dict, preprocessing_options, func_arg_map=preprocessor_arg_map)
distorted_image = distorted_tensor_dict[fields.InputDataFields.image]
distorted_boxes = distorted_tensor_dict[
fields.InputDataFields.groundtruth_boxes]
distorted_labels = distorted_tensor_dict[
fields.InputDataFields.groundtruth_classes]
distorted_keypoints = distorted_tensor_dict[
fields.InputDataFields.groundtruth_keypoints]
with self.test_session() as sess:
(distorted_image_, distorted_boxes_, distorted_labels_,
distorted_keypoints_) = sess.run(
[distorted_image, distorted_boxes, distorted_labels,
distorted_keypoints])
expected_boxes = np.array([
[0.0, 0.0, 0.75789469, 1.0],
[0.23157893, 0.24050637, 0.75789469, 1.0],
], dtype=np.float32)
expected_keypoints = np.array([
[[0.38947368, 0.07173],
[0.49473682, 0.24050637],
[0.60000002, 0.40928277]],
[[0.38947368, 0.07173],
[0.49473682, 0.24050637],
[0.60000002, 0.40928277]]
])
self.assertAllEqual(distorted_image_.shape, [1, 190, 237, 3])
self.assertAllEqual(distorted_labels_, [1, 2])
self.assertAllClose(
distorted_boxes_.flatten(), expected_boxes.flatten())
self.assertAllClose(
distorted_keypoints_.flatten(), expected_keypoints.flatten())
def testRunRandomCropImageWithKeypointsOutsideCrop(self):
image = self.createColorfulTestImage()
boxes = self.createTestBoxes()
labels = self.createTestLabels()
keypoints = self.createTestKeypointsOutsideCrop()
tensor_dict = {
fields.InputDataFields.image: image,
fields.InputDataFields.groundtruth_boxes: boxes,
fields.InputDataFields.groundtruth_classes: labels,
fields.InputDataFields.groundtruth_keypoints: keypoints
}
preprocessor_arg_map = preprocessor.get_default_func_arg_map(
include_keypoints=True)
preprocessing_options = [(preprocessor.random_crop_image, {})]
with mock.patch.object(
tf.image,
'sample_distorted_bounding_box'
) as mock_sample_distorted_bounding_box:
mock_sample_distorted_bounding_box.return_value = (
tf.constant([6, 143, 0], dtype=tf.int32),
tf.constant([190, 237, -1], dtype=tf.int32),
tf.constant([[[0.03, 0.3575, 0.98, 0.95]]], dtype=tf.float32))
distorted_tensor_dict = preprocessor.preprocess(
tensor_dict, preprocessing_options, func_arg_map=preprocessor_arg_map)
distorted_image = distorted_tensor_dict[fields.InputDataFields.image]
distorted_boxes = distorted_tensor_dict[
fields.InputDataFields.groundtruth_boxes]
distorted_labels = distorted_tensor_dict[
fields.InputDataFields.groundtruth_classes]
distorted_keypoints = distorted_tensor_dict[
fields.InputDataFields.groundtruth_keypoints]
with self.test_session() as sess:
(distorted_image_, distorted_boxes_, distorted_labels_,
distorted_keypoints_) = sess.run(
[distorted_image, distorted_boxes, distorted_labels,
distorted_keypoints])
expected_boxes = np.array([
[0.0, 0.0, 0.75789469, 1.0],
[0.23157893, 0.24050637, 0.75789469, 1.0],
], dtype=np.float32)
expected_keypoints = np.array([
[[np.nan, np.nan],
[np.nan, np.nan],
[np.nan, np.nan]],
[[np.nan, np.nan],
[np.nan, np.nan],
[np.nan, np.nan]],
])
self.assertAllEqual(distorted_image_.shape, [1, 190, 237, 3])
self.assertAllEqual(distorted_labels_, [1, 2])
self.assertAllClose(
distorted_boxes_.flatten(), expected_boxes.flatten())
self.assertAllClose(
distorted_keypoints_.flatten(), expected_keypoints.flatten())
def testRunRetainBoxesAboveThreshold(self):
boxes = self.createTestBoxes()
labels = self.createTestLabels()
label_scores = self.createTestLabelScores()
tensor_dict = {
fields.InputDataFields.groundtruth_boxes: boxes,
fields.InputDataFields.groundtruth_classes: labels,
fields.InputDataFields.groundtruth_label_scores: label_scores
}
preprocessing_options = [
(preprocessor.retain_boxes_above_threshold, {'threshold': 0.6})
]
retained_tensor_dict = preprocessor.preprocess(
tensor_dict, preprocessing_options)
retained_boxes = retained_tensor_dict[
fields.InputDataFields.groundtruth_boxes]
retained_labels = retained_tensor_dict[
fields.InputDataFields.groundtruth_classes]
retained_label_scores = retained_tensor_dict[
fields.InputDataFields.groundtruth_label_scores]
with self.test_session() as sess:
(retained_boxes_, retained_labels_,
retained_label_scores_, expected_retained_boxes_,
expected_retained_labels_, expected_retained_label_scores_) = sess.run(
[retained_boxes, retained_labels, retained_label_scores,
self.expectedBoxesAfterThresholding(),
self.expectedLabelsAfterThresholding(),
self.expectedLabelScoresAfterThresholding()])
self.assertAllClose(retained_boxes_, expected_retained_boxes_)
self.assertAllClose(retained_labels_, expected_retained_labels_)
self.assertAllClose(
retained_label_scores_, expected_retained_label_scores_)
def testRunRetainBoxesAboveThresholdWithMasks(self):
boxes = self.createTestBoxes()
labels = self.createTestLabels()
label_scores = self.createTestLabelScores()
masks = self.createTestMasks()
tensor_dict = {
fields.InputDataFields.groundtruth_boxes: boxes,
fields.InputDataFields.groundtruth_classes: labels,
fields.InputDataFields.groundtruth_label_scores: label_scores,
fields.InputDataFields.groundtruth_instance_masks: masks
}
preprocessor_arg_map = preprocessor.get_default_func_arg_map(
include_instance_masks=True)
preprocessing_options = [
(preprocessor.retain_boxes_above_threshold, {'threshold': 0.6})
]
retained_tensor_dict = preprocessor.preprocess(
tensor_dict, preprocessing_options, func_arg_map=preprocessor_arg_map)
retained_masks = retained_tensor_dict[
fields.InputDataFields.groundtruth_instance_masks]
with self.test_session() as sess:
(retained_masks_, expected_masks_) = sess.run(
[retained_masks,
self.expectedMasksAfterThresholding()])
self.assertAllClose(retained_masks_, expected_masks_)
def testRunRetainBoxesAboveThresholdWithKeypoints(self):
boxes = self.createTestBoxes()
labels = self.createTestLabels()
label_scores = self.createTestLabelScores()
keypoints = self.createTestKeypoints()
tensor_dict = {
fields.InputDataFields.groundtruth_boxes: boxes,
fields.InputDataFields.groundtruth_classes: labels,
fields.InputDataFields.groundtruth_label_scores: label_scores,
fields.InputDataFields.groundtruth_keypoints: keypoints
}
preprocessor_arg_map = preprocessor.get_default_func_arg_map(
include_keypoints=True)
preprocessing_options = [
(preprocessor.retain_boxes_above_threshold, {'threshold': 0.6})
]
retained_tensor_dict = preprocessor.preprocess(
tensor_dict, preprocessing_options, func_arg_map=preprocessor_arg_map)
retained_keypoints = retained_tensor_dict[
fields.InputDataFields.groundtruth_keypoints]
with self.test_session() as sess:
(retained_keypoints_, expected_keypoints_) = sess.run(
[retained_keypoints,
self.expectedKeypointsAfterThresholding()])
self.assertAllClose(retained_keypoints_, expected_keypoints_)
def testRunRandomCropToAspectRatioWithMasks(self):
image = self.createColorfulTestImage()
boxes = self.createTestBoxes()
labels = self.createTestLabels()
masks = tf.random_uniform([2, 200, 400], dtype=tf.float32)
tensor_dict = {
fields.InputDataFields.image: image,
fields.InputDataFields.groundtruth_boxes: boxes,
fields.InputDataFields.groundtruth_classes: labels,
fields.InputDataFields.groundtruth_instance_masks: masks
}
preprocessor_arg_map = preprocessor.get_default_func_arg_map(
include_instance_masks=True)
preprocessing_options = [(preprocessor.random_crop_to_aspect_ratio, {})]
with mock.patch.object(preprocessor,
'_random_integer') as mock_random_integer:
mock_random_integer.return_value = tf.constant(0, dtype=tf.int32)
distorted_tensor_dict = preprocessor.preprocess(
tensor_dict, preprocessing_options, func_arg_map=preprocessor_arg_map)
distorted_image = distorted_tensor_dict[fields.InputDataFields.image]
distorted_boxes = distorted_tensor_dict[
fields.InputDataFields.groundtruth_boxes]
distorted_labels = distorted_tensor_dict[
fields.InputDataFields.groundtruth_classes]
distorted_masks = distorted_tensor_dict[
fields.InputDataFields.groundtruth_instance_masks]
with self.test_session() as sess:
(distorted_image_, distorted_boxes_, distorted_labels_,
distorted_masks_) = sess.run([
distorted_image, distorted_boxes, distorted_labels, distorted_masks
])
expected_boxes = np.array([0.0, 0.5, 0.75, 1.0], dtype=np.float32)
self.assertAllEqual(distorted_image_.shape, [1, 200, 200, 3])
self.assertAllEqual(distorted_labels_, [1])
self.assertAllClose(distorted_boxes_.flatten(),
expected_boxes.flatten())
self.assertAllEqual(distorted_masks_.shape, [1, 200, 200])
def testRunRandomCropToAspectRatioWithKeypoints(self):
image = self.createColorfulTestImage()
boxes = self.createTestBoxes()
labels = self.createTestLabels()
keypoints = self.createTestKeypoints()
tensor_dict = {
fields.InputDataFields.image: image,
fields.InputDataFields.groundtruth_boxes: boxes,
fields.InputDataFields.groundtruth_classes: labels,
fields.InputDataFields.groundtruth_keypoints: keypoints
}
preprocessor_arg_map = preprocessor.get_default_func_arg_map(
include_keypoints=True)
preprocessing_options = [(preprocessor.random_crop_to_aspect_ratio, {})]
with mock.patch.object(preprocessor,
'_random_integer') as mock_random_integer:
mock_random_integer.return_value = tf.constant(0, dtype=tf.int32)
distorted_tensor_dict = preprocessor.preprocess(
tensor_dict, preprocessing_options, func_arg_map=preprocessor_arg_map)
distorted_image = distorted_tensor_dict[fields.InputDataFields.image]
distorted_boxes = distorted_tensor_dict[
fields.InputDataFields.groundtruth_boxes]
distorted_labels = distorted_tensor_dict[
fields.InputDataFields.groundtruth_classes]
distorted_keypoints = distorted_tensor_dict[
fields.InputDataFields.groundtruth_keypoints]
with self.test_session() as sess:
(distorted_image_, distorted_boxes_, distorted_labels_,
distorted_keypoints_) = sess.run([
distorted_image, distorted_boxes, distorted_labels,
distorted_keypoints
])
expected_boxes = np.array([0.0, 0.5, 0.75, 1.0], dtype=np.float32)
expected_keypoints = np.array(
[[0.1, 0.2], [0.2, 0.4], [0.3, 0.6]], dtype=np.float32)
self.assertAllEqual(distorted_image_.shape, [1, 200, 200, 3])
self.assertAllEqual(distorted_labels_, [1])
self.assertAllClose(distorted_boxes_.flatten(),
expected_boxes.flatten())
self.assertAllClose(distorted_keypoints_.flatten(),
expected_keypoints.flatten())
def testRandomPadImage(self):
preprocessing_options = [(preprocessor.normalize_image, {
'original_minval': 0,
'original_maxval': 255,
'target_minval': 0,
'target_maxval': 1
})]
images = self.createTestImages()
boxes = self.createTestBoxes()
labels = self.createTestLabels()
tensor_dict = {fields.InputDataFields.image: images,
fields.InputDataFields.groundtruth_boxes: boxes,
fields.InputDataFields.groundtruth_classes: labels}
tensor_dict = preprocessor.preprocess(tensor_dict, preprocessing_options)
images = tensor_dict[fields.InputDataFields.image]
preprocessing_options = [(preprocessor.random_pad_image, {})]
padded_tensor_dict = preprocessor.preprocess(tensor_dict,
preprocessing_options)
padded_images = padded_tensor_dict[fields.InputDataFields.image]
padded_boxes = padded_tensor_dict[
fields.InputDataFields.groundtruth_boxes]
boxes_shape = tf.shape(boxes)
padded_boxes_shape = tf.shape(padded_boxes)
images_shape = tf.shape(images)
padded_images_shape = tf.shape(padded_images)
with self.test_session() as sess:
(boxes_shape_, padded_boxes_shape_, images_shape_,
padded_images_shape_, boxes_, padded_boxes_) = sess.run(
[boxes_shape, padded_boxes_shape, images_shape,
padded_images_shape, boxes, padded_boxes])
self.assertAllEqual(boxes_shape_, padded_boxes_shape_)
self.assertTrue((images_shape_[1] >= padded_images_shape_[1] * 0.5).all)
self.assertTrue((images_shape_[2] >= padded_images_shape_[2] * 0.5).all)
self.assertTrue((images_shape_[1] <= padded_images_shape_[1]).all)
self.assertTrue((images_shape_[2] <= padded_images_shape_[2]).all)
self.assertTrue(np.all((boxes_[:, 2] - boxes_[:, 0]) >= (
padded_boxes_[:, 2] - padded_boxes_[:, 0])))
self.assertTrue(np.all((boxes_[:, 3] - boxes_[:, 1]) >= (
padded_boxes_[:, 3] - padded_boxes_[:, 1])))
def testRandomCropPadImageWithRandomCoefOne(self):
preprocessing_options = [(preprocessor.normalize_image, {
'original_minval': 0,
'original_maxval': 255,
'target_minval': 0,
'target_maxval': 1
})]
images = self.createTestImages()
boxes = self.createTestBoxes()
labels = self.createTestLabels()
tensor_dict = {fields.InputDataFields.image: images,
fields.InputDataFields.groundtruth_boxes: boxes,
fields.InputDataFields.groundtruth_classes: labels}
tensor_dict = preprocessor.preprocess(tensor_dict, preprocessing_options)
images = tensor_dict[fields.InputDataFields.image]
preprocessing_options = [(preprocessor.random_crop_pad_image, {
'random_coef': 1.0
})]
padded_tensor_dict = preprocessor.preprocess(tensor_dict,
preprocessing_options)
padded_images = padded_tensor_dict[fields.InputDataFields.image]
padded_boxes = padded_tensor_dict[
fields.InputDataFields.groundtruth_boxes]
boxes_shape = tf.shape(boxes)
padded_boxes_shape = tf.shape(padded_boxes)
images_shape = tf.shape(images)
padded_images_shape = tf.shape(padded_images)
with self.test_session() as sess:
(boxes_shape_, padded_boxes_shape_, images_shape_,
padded_images_shape_, boxes_, padded_boxes_) = sess.run(
[boxes_shape, padded_boxes_shape, images_shape,
padded_images_shape, boxes, padded_boxes])
self.assertAllEqual(boxes_shape_, padded_boxes_shape_)
self.assertTrue((images_shape_[1] >= padded_images_shape_[1] * 0.5).all)
self.assertTrue((images_shape_[2] >= padded_images_shape_[2] * 0.5).all)
self.assertTrue((images_shape_[1] <= padded_images_shape_[1]).all)
self.assertTrue((images_shape_[2] <= padded_images_shape_[2]).all)
self.assertTrue(np.all((boxes_[:, 2] - boxes_[:, 0]) >= (
padded_boxes_[:, 2] - padded_boxes_[:, 0])))
self.assertTrue(np.all((boxes_[:, 3] - boxes_[:, 1]) >= (
padded_boxes_[:, 3] - padded_boxes_[:, 1])))
def testRandomCropToAspectRatio(self):
preprocessing_options = [(preprocessor.normalize_image, {
'original_minval': 0,
'original_maxval': 255,
'target_minval': 0,
'target_maxval': 1
})]
images = self.createTestImages()
boxes = self.createTestBoxes()
labels = self.createTestLabels()
tensor_dict = {
fields.InputDataFields.image: images,
fields.InputDataFields.groundtruth_boxes: boxes,
fields.InputDataFields.groundtruth_classes: labels
}
tensor_dict = preprocessor.preprocess(tensor_dict, preprocessing_options)
images = tensor_dict[fields.InputDataFields.image]
preprocessing_options = [(preprocessor.random_crop_to_aspect_ratio, {
'aspect_ratio': 2.0
})]
cropped_tensor_dict = preprocessor.preprocess(tensor_dict,
preprocessing_options)
cropped_images = cropped_tensor_dict[fields.InputDataFields.image]
cropped_boxes = cropped_tensor_dict[
fields.InputDataFields.groundtruth_boxes]
boxes_shape = tf.shape(boxes)
cropped_boxes_shape = tf.shape(cropped_boxes)
images_shape = tf.shape(images)
cropped_images_shape = tf.shape(cropped_images)
with self.test_session() as sess:
(boxes_shape_, cropped_boxes_shape_, images_shape_,
cropped_images_shape_) = sess.run([
boxes_shape, cropped_boxes_shape, images_shape, cropped_images_shape
])
self.assertAllEqual(boxes_shape_, cropped_boxes_shape_)
self.assertEqual(images_shape_[1], cropped_images_shape_[1] * 2)
self.assertEqual(images_shape_[2], cropped_images_shape_[2])
def testRandomBlackPatches(self):
preprocessing_options = []
preprocessing_options.append((preprocessor.normalize_image, {
'original_minval': 0,
'original_maxval': 255,
'target_minval': 0,
'target_maxval': 1
}))
preprocessing_options.append((preprocessor.random_black_patches, {
'size_to_image_ratio': 0.5
}))
images = self.createTestImages()
tensor_dict = {fields.InputDataFields.image: images}
blacked_tensor_dict = preprocessor.preprocess(tensor_dict,
preprocessing_options)
blacked_images = blacked_tensor_dict[fields.InputDataFields.image]
images_shape = tf.shape(images)
blacked_images_shape = tf.shape(blacked_images)
with self.test_session() as sess:
(images_shape_, blacked_images_shape_) = sess.run(
[images_shape, blacked_images_shape])
self.assertAllEqual(images_shape_, blacked_images_shape_)
def testRandomResizeMethod(self):
preprocessing_options = []
preprocessing_options.append((preprocessor.normalize_image, {
'original_minval': 0,
'original_maxval': 255,
'target_minval': 0,
'target_maxval': 1
}))
preprocessing_options.append((preprocessor.random_resize_method, {
'target_size': (75, 150)
}))
images = self.createTestImages()
tensor_dict = {fields.InputDataFields.image: images}
resized_tensor_dict = preprocessor.preprocess(tensor_dict,
preprocessing_options)
resized_images = resized_tensor_dict[fields.InputDataFields.image]
resized_images_shape = tf.shape(resized_images)
expected_images_shape = tf.constant([1, 75, 150, 3], dtype=tf.int32)
with self.test_session() as sess:
(expected_images_shape_, resized_images_shape_) = sess.run(
[expected_images_shape, resized_images_shape])
self.assertAllEqual(expected_images_shape_,
resized_images_shape_)
def testResizeToRangePreservesStaticSpatialShape(self):
"""Tests image resizing, checking output sizes."""
in_shape_list = [[60, 40, 3], [15, 30, 3], [15, 50, 3]]
min_dim = 50
max_dim = 100
expected_shape_list = [[75, 50, 3], [50, 100, 3], [30, 100, 3]]
for in_shape, expected_shape in zip(in_shape_list, expected_shape_list):
in_image = tf.random_uniform(in_shape)
out_image = preprocessor.resize_to_range(
in_image, min_dimension=min_dim, max_dimension=max_dim)
self.assertAllEqual(out_image.get_shape().as_list(), expected_shape)
def testResizeToRangeWithDynamicSpatialShape(self):
"""Tests image resizing, checking output sizes."""
in_shape_list = [[60, 40, 3], [15, 30, 3], [15, 50, 3]]
min_dim = 50
max_dim = 100
expected_shape_list = [[75, 50, 3], [50, 100, 3], [30, 100, 3]]
for in_shape, expected_shape in zip(in_shape_list, expected_shape_list):
in_image = tf.placeholder(tf.float32, shape=(None, None, 3))
out_image = preprocessor.resize_to_range(
in_image, min_dimension=min_dim, max_dimension=max_dim)
out_image_shape = tf.shape(out_image)
with self.test_session() as sess:
out_image_shape = sess.run(out_image_shape,
feed_dict={in_image:
np.random.randn(*in_shape)})
self.assertAllEqual(out_image_shape, expected_shape)
def testResizeToRangeWithMasksPreservesStaticSpatialShape(self):
"""Tests image resizing, checking output sizes."""
in_image_shape_list = [[60, 40, 3], [15, 30, 3]]
in_masks_shape_list = [[15, 60, 40], [10, 15, 30]]
min_dim = 50
max_dim = 100
expected_image_shape_list = [[75, 50, 3], [50, 100, 3]]
expected_masks_shape_list = [[15, 75, 50], [10, 50, 100]]
for (in_image_shape, expected_image_shape, in_masks_shape,
expected_mask_shape) in zip(in_image_shape_list,
expected_image_shape_list,
in_masks_shape_list,
expected_masks_shape_list):
in_image = tf.random_uniform(in_image_shape)
in_masks = tf.random_uniform(in_masks_shape)
out_image, out_masks = preprocessor.resize_to_range(
in_image, in_masks, min_dimension=min_dim, max_dimension=max_dim)
self.assertAllEqual(out_masks.get_shape().as_list(), expected_mask_shape)
self.assertAllEqual(out_image.get_shape().as_list(), expected_image_shape)
def testResizeToRangeWithMasksAndDynamicSpatialShape(self):
"""Tests image resizing, checking output sizes."""
in_image_shape_list = [[60, 40, 3], [15, 30, 3]]
in_masks_shape_list = [[15, 60, 40], [10, 15, 30]]
min_dim = 50
max_dim = 100
expected_image_shape_list = [[75, 50, 3], [50, 100, 3]]
expected_masks_shape_list = [[15, 75, 50], [10, 50, 100]]
for (in_image_shape, expected_image_shape, in_masks_shape,
expected_mask_shape) in zip(in_image_shape_list,
expected_image_shape_list,
in_masks_shape_list,
expected_masks_shape_list):
in_image = tf.placeholder(tf.float32, shape=(None, None, 3))
in_masks = tf.placeholder(tf.float32, shape=(None, None, None))
in_masks = tf.random_uniform(in_masks_shape)
out_image, out_masks = preprocessor.resize_to_range(
in_image, in_masks, min_dimension=min_dim, max_dimension=max_dim)
out_image_shape = tf.shape(out_image)
out_masks_shape = tf.shape(out_masks)
with self.test_session() as sess:
out_image_shape, out_masks_shape = sess.run(
[out_image_shape, out_masks_shape],
feed_dict={
in_image: np.random.randn(*in_image_shape),
in_masks: np.random.randn(*in_masks_shape)
})
self.assertAllEqual(out_image_shape, expected_image_shape)
self.assertAllEqual(out_masks_shape, expected_mask_shape)
def testResizeToRangeWithInstanceMasksTensorOfSizeZero(self):
"""Tests image resizing, checking output sizes."""
in_image_shape_list = [[60, 40, 3], [15, 30, 3]]
in_masks_shape_list = [[0, 60, 40], [0, 15, 30]]
height = 50
width = 100
expected_image_shape_list = [[50, 100, 3], [50, 100, 3]]
expected_masks_shape_list = [[0, 50, 100], [0, 50, 100]]
for (in_image_shape, expected_image_shape, in_masks_shape,
expected_mask_shape) in zip(in_image_shape_list,
expected_image_shape_list,
in_masks_shape_list,
expected_masks_shape_list):
in_image = tf.random_uniform(in_image_shape)
in_masks = tf.random_uniform(in_masks_shape)
out_image, out_masks = preprocessor.resize_image(
in_image, in_masks, new_height=height, new_width=width)
out_image_shape = tf.shape(out_image)
out_masks_shape = tf.shape(out_masks)
with self.test_session() as sess:
out_image_shape, out_masks_shape = sess.run(
[out_image_shape, out_masks_shape])
self.assertAllEqual(out_image_shape, expected_image_shape)
self.assertAllEqual(out_masks_shape, expected_mask_shape)
def testResizeToRange4DImageTensor(self):
image = tf.random_uniform([1, 200, 300, 3])
with self.assertRaises(ValueError):
preprocessor.resize_to_range(image, 500, 600)
def testResizeToRangeSameMinMax(self):
"""Tests image resizing, checking output sizes."""
in_shape_list = [[312, 312, 3], [299, 299, 3]]
min_dim = 320
max_dim = 320
expected_shape_list = [[320, 320, 3], [320, 320, 3]]
for in_shape, expected_shape in zip(in_shape_list, expected_shape_list):
in_image = tf.random_uniform(in_shape)
out_image = preprocessor.resize_to_range(
in_image, min_dimension=min_dim, max_dimension=max_dim)
out_image_shape = tf.shape(out_image)
with self.test_session() as sess:
out_image_shape = sess.run(out_image_shape)
self.assertAllEqual(out_image_shape, expected_shape)
def testScaleBoxesToPixelCoordinates(self):
"""Tests box scaling, checking scaled values."""
in_shape = [60, 40, 3]
in_boxes = [[0.1, 0.2, 0.4, 0.6],
[0.5, 0.3, 0.9, 0.7]]
expected_boxes = [[6., 8., 24., 24.],
[30., 12., 54., 28.]]
in_image = tf.random_uniform(in_shape)
in_boxes = tf.constant(in_boxes)
_, out_boxes = preprocessor.scale_boxes_to_pixel_coordinates(
in_image, boxes=in_boxes)
with self.test_session() as sess:
out_boxes = sess.run(out_boxes)
self.assertAllClose(out_boxes, expected_boxes)
def testScaleBoxesToPixelCoordinatesWithKeypoints(self):
"""Tests box and keypoint scaling, checking scaled values."""
in_shape = [60, 40, 3]
in_boxes = self.createTestBoxes()
in_keypoints = self.createTestKeypoints()
expected_boxes = [[0., 10., 45., 40.],
[15., 20., 45., 40.]]
expected_keypoints = [
[[6., 4.], [12., 8.], [18., 12.]],
[[24., 16.], [30., 20.], [36., 24.]],
]
in_image = tf.random_uniform(in_shape)
_, out_boxes, out_keypoints = preprocessor.scale_boxes_to_pixel_coordinates(
in_image, boxes=in_boxes, keypoints=in_keypoints)
with self.test_session() as sess:
out_boxes_, out_keypoints_ = sess.run([out_boxes, out_keypoints])
self.assertAllClose(out_boxes_, expected_boxes)
self.assertAllClose(out_keypoints_, expected_keypoints)
def testSubtractChannelMean(self):
"""Tests whether channel means have been subtracted."""
with self.test_session():
image = tf.zeros((240, 320, 3))
means = [1, 2, 3]
actual = preprocessor.subtract_channel_mean(image, means=means)
actual = actual.eval()
self.assertTrue((actual[:, :, 0] == -1).all())
self.assertTrue((actual[:, :, 1] == -2).all())
self.assertTrue((actual[:, :, 2] == -3).all())
def testOneHotEncoding(self):
"""Tests one hot encoding of multiclass labels."""
with self.test_session():
labels = tf.constant([1, 4, 2], dtype=tf.int32)
one_hot = preprocessor.one_hot_encoding(labels, num_classes=5)
one_hot = one_hot.eval()
self.assertAllEqual([0, 1, 1, 0, 1], one_hot)
def testSSDRandomCrop(self):
preprocessing_options = [
(preprocessor.normalize_image, {
'original_minval': 0,
'original_maxval': 255,
'target_minval': 0,
'target_maxval': 1
}),
(preprocessor.ssd_random_crop, {})]
images = self.createTestImages()
boxes = self.createTestBoxes()
labels = self.createTestLabels()
tensor_dict = {fields.InputDataFields.image: images,
fields.InputDataFields.groundtruth_boxes: boxes,
fields.InputDataFields.groundtruth_classes: labels}
distorted_tensor_dict = preprocessor.preprocess(tensor_dict,
preprocessing_options)
distorted_images = distorted_tensor_dict[fields.InputDataFields.image]
distorted_boxes = distorted_tensor_dict[
fields.InputDataFields.groundtruth_boxes]
images_rank = tf.rank(images)
distorted_images_rank = tf.rank(distorted_images)
boxes_rank = tf.rank(boxes)
distorted_boxes_rank = tf.rank(distorted_boxes)
with self.test_session() as sess:
(boxes_rank_, distorted_boxes_rank_, images_rank_,
distorted_images_rank_) = sess.run(
[boxes_rank, distorted_boxes_rank, images_rank,
distorted_images_rank])
self.assertAllEqual(boxes_rank_, distorted_boxes_rank_)
self.assertAllEqual(images_rank_, distorted_images_rank_)
def testSSDRandomCropPad(self):
images = self.createTestImages()
boxes = self.createTestBoxes()
labels = self.createTestLabels()
preprocessing_options = [
(preprocessor.normalize_image, {
'original_minval': 0,
'original_maxval': 255,
'target_minval': 0,
'target_maxval': 1
}),
(preprocessor.ssd_random_crop_pad, {})]
tensor_dict = {fields.InputDataFields.image: images,
fields.InputDataFields.groundtruth_boxes: boxes,
fields.InputDataFields.groundtruth_classes: labels}
distorted_tensor_dict = preprocessor.preprocess(tensor_dict,
preprocessing_options)
distorted_images = distorted_tensor_dict[fields.InputDataFields.image]
distorted_boxes = distorted_tensor_dict[
fields.InputDataFields.groundtruth_boxes]
images_rank = tf.rank(images)
distorted_images_rank = tf.rank(distorted_images)
boxes_rank = tf.rank(boxes)
distorted_boxes_rank = tf.rank(distorted_boxes)
with self.test_session() as sess:
(boxes_rank_, distorted_boxes_rank_, images_rank_,
distorted_images_rank_) = sess.run([
boxes_rank, distorted_boxes_rank, images_rank, distorted_images_rank
])
self.assertAllEqual(boxes_rank_, distorted_boxes_rank_)
self.assertAllEqual(images_rank_, distorted_images_rank_)
def testSSDRandomCropFixedAspectRatio(self):
images = self.createTestImages()
boxes = self.createTestBoxes()
labels = self.createTestLabels()
preprocessing_options = [
(preprocessor.normalize_image, {
'original_minval': 0,
'original_maxval': 255,
'target_minval': 0,
'target_maxval': 1
}),
(preprocessor.ssd_random_crop_fixed_aspect_ratio, {})]
tensor_dict = {
fields.InputDataFields.image: images,
fields.InputDataFields.groundtruth_boxes: boxes,
fields.InputDataFields.groundtruth_classes: labels
}
distorted_tensor_dict = preprocessor.preprocess(tensor_dict,
preprocessing_options)
distorted_images = distorted_tensor_dict[fields.InputDataFields.image]
distorted_boxes = distorted_tensor_dict[
fields.InputDataFields.groundtruth_boxes]
images_rank = tf.rank(images)
distorted_images_rank = tf.rank(distorted_images)
boxes_rank = tf.rank(boxes)
distorted_boxes_rank = tf.rank(distorted_boxes)
with self.test_session() as sess:
(boxes_rank_, distorted_boxes_rank_, images_rank_,
distorted_images_rank_) = sess.run(
[boxes_rank, distorted_boxes_rank, images_rank,
distorted_images_rank])
self.assertAllEqual(boxes_rank_, distorted_boxes_rank_)
self.assertAllEqual(images_rank_, distorted_images_rank_)
def testSSDRandomCropFixedAspectRatioWithMasksAndKeypoints(self):
images = self.createTestImages()
boxes = self.createTestBoxes()
labels = self.createTestLabels()
masks = self.createTestMasks()
keypoints = self.createTestKeypoints()
preprocessing_options = [
(preprocessor.normalize_image, {
'original_minval': 0,
'original_maxval': 255,
'target_minval': 0,
'target_maxval': 1
}),
(preprocessor.ssd_random_crop_fixed_aspect_ratio, {})]
tensor_dict = {
fields.InputDataFields.image: images,
fields.InputDataFields.groundtruth_boxes: boxes,
fields.InputDataFields.groundtruth_classes: labels,
fields.InputDataFields.groundtruth_instance_masks: masks,
fields.InputDataFields.groundtruth_keypoints: keypoints,
}
preprocessor_arg_map = preprocessor.get_default_func_arg_map(
include_instance_masks=True, include_keypoints=True)
distorted_tensor_dict = preprocessor.preprocess(
tensor_dict, preprocessing_options, func_arg_map=preprocessor_arg_map)
distorted_images = distorted_tensor_dict[fields.InputDataFields.image]
distorted_boxes = distorted_tensor_dict[
fields.InputDataFields.groundtruth_boxes]
images_rank = tf.rank(images)
distorted_images_rank = tf.rank(distorted_images)
boxes_rank = tf.rank(boxes)
distorted_boxes_rank = tf.rank(distorted_boxes)
with self.test_session() as sess:
(boxes_rank_, distorted_boxes_rank_, images_rank_,
distorted_images_rank_) = sess.run(
[boxes_rank, distorted_boxes_rank, images_rank,
distorted_images_rank])
self.assertAllEqual(boxes_rank_, distorted_boxes_rank_)
self.assertAllEqual(images_rank_, distorted_images_rank_)
if __name__ == '__main__':
tf.test.main()
| {
"pile_set_name": "Github"
} |
{ stdenv, fetchurl, fetchFromGitHub
, autoreconfHook
, glib
, db
, pkgconfig
}:
let
modelData = fetchurl {
url = "mirror://sourceforge/libpinyin/models/model17.text.tar.gz";
sha256 = "1kb2nswpsqlk2qm5jr7vqcp97f2dx7nvpk24lxjs1g12n252f5z0";
};
in
stdenv.mkDerivation rec {
pname = "libpinyin";
version = "2.3.0";
src = fetchFromGitHub {
owner = "libpinyin";
repo = "libpinyin";
rev = version;
sha256 = "14fkpp16s5k0pbw5wwd24pqr0qbdjgbl90n9aqwx72m03n7an40l";
};
postUnpack = ''
tar -xzf ${modelData} -C $sourceRoot/data
'';
nativeBuildInputs = [ autoreconfHook glib db pkgconfig ];
meta = with stdenv.lib; {
description = "Library for intelligent sentence-based Chinese pinyin input method";
homepage = "https://sourceforge.net/projects/libpinyin";
license = licenses.gpl2;
maintainers = with maintainers; [ ericsagnes ];
platforms = platforms.linux;
};
}
| {
"pile_set_name": "Github"
} |
package main
import (
"net/http"
"io"
"os"
"path/filepath"
)
func main() {
http.ListenAndServe(":9000", http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) {
if req.Method == "POST" {
file, _, err := req.FormFile("my-file")
if err != nil {
http.Error(res, err.Error(), 500)
return
}
defer file.Close()
src := io.LimitReader(file, 400)
dst, err := os | {
"pile_set_name": "Github"
} |
//
// Podcast.swift
// Podcasts
//
// Created by Eugene Karambirov on 21/09/2018.
// Copyright © 2018 Eugene Karambirov. All rights reserved.
//
import Foundation
final class Podcast: NSObject, Decodable, NSCoding {
var trackName: String?
var artistName: String?
var artworkUrl600: String?
var trackCount: Int?
var feedUrlSting: String?
func encode(with aCoder: NSCoder) {
print("Trying to transform Podcast into Data")
aCoder.encode(trackName ?? "", forKey: Keys.trackNameKey)
aCoder.encode(artistName ?? "", forKey: Keys.artistNameKey)
aCoder.encode(artworkUrl600 ?? "", forKey: Keys.artworkKey)
aCoder.encode(feedUrlSting ?? "", forKey: Keys.feedKey)
}
init?(coder aDecoder: NSCoder) {
print("Trying to turn Data into Podcast")
trackName = aDecoder.decodeObject(forKey: Keys.trackNameKey) as? String
artistName = aDecoder.decodeObject(forKey: Keys.artistNameKey) as? String
artworkUrl600 = aDecoder.decodeObject(forKey: Keys.artworkKey) as? String
feedUrlSting = aDecoder.decodeObject(forKey: Keys.feedKey) as? String
}
}
private extension Podcast {
enum Keys {
static let trackNameKey = "trackNameKey"
static let artistNameKey = "artistNameKey"
static let artworkKey = "artworkKey"
static let feedKey = "feedKey"
}
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<root>
<!--
Microsoft ResX Schema
Version 2.0
The primary goals of this format is to allow a simple XML format
that is mostly human readable. The generation and parsing of the
various data types are done through the TypeConverter classes
associated with the data types.
Example:
... ado.net/XML headers & schema ...
<resheader name="resmimetype">text/microsoft-resx</resheader>
<resheader name="version">2.0</resheader>
<resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader>
<resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader>
<data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data>
<data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data>
<data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64">
<value>[base64 mime encoded serialized .NET Framework object]</value>
</data>
<data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
<value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value>
<comment>This is a comment</comment>
</data>
There are any number of "resheader" rows that contain simple
name/value pairs.
Each data row contains a name, and value. The row also contains a
type or mimetype. Type corresponds to a .NET class that support
text/value conversion through the TypeConverter architecture.
Classes that don't support this are serialized and stored with the
mimetype set.
The mimetype is used for serialized objects, and tells the
ResXResourceReader how to depersist the object. This is currently not
extensible. For a given mimetype the value must be set accordingly:
Note - application/x-microsoft.net.object.binary.base64 is the format
that the ResXResourceWriter will generate, however the reader can
read any of the formats listed below.
mimetype: application/x-microsoft.net.object.binary.base64
value : The object must be serialized with
: System.Runtime.Serialization.Formatters.Binary.BinaryFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.soap.base64
value : The object must be serialized with
: System.Runtime.Serialization.Formatters.Soap.SoapFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.bytearray.base64
value : The object must be serialized into a byte array
: using a System.ComponentModel.TypeConverter
: and then encoded with base64 encoding.
-->
<xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
<xsd:import namespace="http://www.w3.org/XML/1998/namespace" />
<xsd:element name="root" msdata:IsDataSet="true">
<xsd:complexType>
<xsd:choice maxOccurs="unbounded">
<xsd:element name="metadata">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" />
</xsd:sequence>
<xsd:attribute name="name" use="required" type="xsd:string" />
<xsd:attribute name="type" type="xsd:string" />
<xsd:attribute name="mimetype" type="xsd:string" />
<xsd:attribute ref="xml:space" />
</xsd:complexType>
</xsd:element>
<xsd:element name="assembly">
<xsd:complexType>
<xsd:attribute name="alias" type="xsd:string" />
<xsd:attribute name="name" type="xsd:string" />
</xsd:complexType>
</xsd:element>
<xsd:element name="data">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
<xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" />
<xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
<xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
<xsd:attribute ref="xml:space" />
</xsd:complexType>
</xsd:element>
<xsd:element name="resheader">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" />
</xsd:complexType>
</xsd:element>
</xsd:choice>
</xsd:complexType>
</xsd:element>
</xsd:schema>
<resheader name="resmimetype">
<value>text/microsoft-resx</value>
</resheader>
<resheader name="version">
<value>2.0</value>
</resheader>
<resheader name="reader">
<value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<resheader name="writer">
<value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<metadata name="$this.Locked" type="System.Boolean, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089">
<value>True</value>
</metadata>
</root> | {
"pile_set_name": "Github"
} |
// Mantid Repository : https://github.com/mantidproject/mantid
//
// Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
// NScD Oak Ridge National Laboratory, European Spallation Source,
// Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS
// SPDX - License - Identifier: GPL - 3.0 +
#pragma once
#include <QList>
#include <QMap>
#include <QStringList>
#include <qwt_plot.h>
#include <qwt_plot_item.h>
class QwtPlotCurve;
class QwtPlotZoomer;
namespace MantidQt {
namespace MantidWidgets {
class PeakLabel;
class PeakMarker2D;
/**
* Implements a simple widget for plotting a single curve.
* Allows to keep more than one curve.
*/
class MiniPlotQwt : public QwtPlot {
Q_OBJECT
public:
explicit MiniPlotQwt(QWidget *parent);
~MiniPlotQwt() override;
void setXLabel(QString xunit);
void setData(std::vector<double> x, std::vector<double> y, QString xunit,
QString curveLabel);
QString label() const { return m_label; }
void setYAxisLabelRotation(double degrees);
void addPeakLabel(const PeakMarker2D * /*marker*/);
void clearPeakLabels();
bool hasCurve() const;
void store();
bool hasStored() const;
QStringList getLabels() const;
void removeCurve(const QString &label);
QColor getCurveColor(const QString &label) const;
void recalcXAxisDivs();
void recalcYAxisDivs();
bool isYLogScale() const;
QString getXUnits() const { return m_xUnits; }
public slots:
void setXScale(double from, double to);
void setYScale(double from, double to);
void clearCurve();
void recalcAxisDivs();
void setYLogScale();
void setYLinearScale();
void clearAll();
signals:
void showContextMenu();
void clickedAt(double /*_t1*/, double /*_t2*/);
protected:
void resizeEvent(QResizeEvent *e) override;
void contextMenuEvent(QContextMenuEvent *e) override;
void mousePressEvent(QMouseEvent * /*unused*/) override;
void mouseReleaseEvent(QMouseEvent * /*unused*/) override;
private:
QwtPlotCurve *m_curve;
QString m_label; ///< label to identify stored curve
QwtPlotZoomer *m_zoomer; ///< does zooming
int m_x0; ///< save x coord of last left mouse click
int m_y0; ///< save y coord of last left mouse click
QList<PeakLabel *> m_peakLabels;
QMap<QString, QwtPlotCurve *> m_stored; ///< stored curves
QList<QColor> m_colors; ///< colors for stored curves
int m_colorIndex;
QString m_xUnits;
};
class PeakLabel : public QwtPlotItem {
public:
PeakLabel(const PeakMarker2D *m, const MiniPlotQwt *plot)
: m_marker(m), m_plot(plot) {}
void draw(QPainter *painter, const QwtScaleMap &xMap, const QwtScaleMap &yMap,
const QRect &canvasRect) const override;
private:
const PeakMarker2D *m_marker;
const MiniPlotQwt *m_plot;
};
} // namespace MantidWidgets
} // namespace MantidQt | {
"pile_set_name": "Github"
} |
#!/usr/bin/env python
"""
This source file is part of the Swift.org open source project
Copyright (c) 2014 - 2020 Apple Inc. and the Swift project authors
Licensed under Apache License v2.0 with Runtime Library Exception
See https://swift.org/LICENSE.txt for license information
See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
------------------------------------------------------------------------------
This is a helper script for the main swift repository's build-script.py that
knows how to build and install the stress tester utilities given a swift
workspace.
"""
from __future__ import print_function
import argparse
import sys
import os, platform
import subprocess
def printerr(message):
print(message, file=sys.stderr)
def main(argv_prefix = []):
args = parse_args(argv_prefix + sys.argv[1:])
run(args)
def parse_args(args):
parser = argparse.ArgumentParser(prog='build-script-helper.py')
parser.add_argument('--package-path', default='')
parser.add_argument('-v', '--verbose', action='store_true', help='log executed commands')
parser.add_argument('--prefix', help='install path')
parser.add_argument('--configuration', default='debug')
parser.add_argument('--build-path', default=None)
parser.add_argument('--multiroot-data-file', help='Path to an Xcode workspace to create a unified build of SwiftSyntax with other projects.')
parser.add_argument('--toolchain', required=True, help='the toolchain to use when building this package')
parser.add_argument('--update', action='store_true', help='update all SwiftPM dependencies')
parser.add_argument('--no-local-deps', action='store_true', help='use normal remote dependencies when building')
parser.add_argument('build_actions', help="Extra actions to perform. Can be any number of the following", choices=['all', 'build', 'test', 'generate-xcodeproj'], nargs="*", default=['build'])
parsed = parser.parse_args(args)
parsed.swift_exec = os.path.join(parsed.toolchain, 'bin', 'swift')
# Convert package_path to absolute path, relative to root of repo.
repo_path = os.path.dirname(__file__)
parsed.package_path = os.path.realpath(
os.path.join(repo_path, parsed.package_path))
if not parsed.build_path:
parsed.build_path = os.path.join(parsed.package_path, '.build')
return parsed
def run(args):
package_name = os.path.basename(args.package_path)
env = dict(os.environ)
# Use local dependencies (i.e. checked out next swift-format).
if not args.no_local_deps:
env['SWIFTCI_USE_LOCAL_DEPS'] = "1"
if args.update:
print("** Updating dependencies of %s **" % package_name)
try:
update_swiftpm_dependencies(package_path=args.package_path,
swift_exec=args.swift_exec,
build_path=args.build_path,
env=env,
verbose=args.verbose)
except subprocess.CalledProcessError as e:
printerr('FAIL: Updating dependencies of %s failed' % package_name)
printerr('Executing: %s' % ' '.join(e.cmd))
sys.exit(1)
# The test action creates its own build. No need to build if we are just testing.
if should_run_action('build', args.build_actions):
print("** Building %s **" % package_name)
try:
invoke_swift(package_path=args.package_path,
swift_exec=args.swift_exec,
action='build',
products=['swift-format'],
build_path=args.build_path,
multiroot_data_file=args.multiroot_data_file,
configuration=args.configuration,
env=env,
verbose=args.verbose)
except subprocess.CalledProcessError as e:
printerr('FAIL: Building %s failed' % package_name)
printerr('Executing: %s' % ' '.join(e.cmd))
sys.exit(1)
output_dir = os.path.realpath(os.path.join(args.build_path, args.configuration))
if should_run_action("generate-xcodeproj", args.build_actions):
print("** Generating Xcode project for %s **" % package_name)
try:
generate_xcodeproj(args.package_path,
swift_exec=args.swift_exec,
env=env,
verbose=args.verbose)
except subprocess.CalledProcessError as e:
printerr('FAIL: Generating the Xcode project failed')
printerr('Executing: %s' % ' '.join(e.cmd))
sys.exit(1)
if should_run_action("test", args.build_actions):
print("** Testing %s **" % package_name)
try:
invoke_swift(package_path=args.package_path,
swift_exec=args.swift_exec,
action='test',
products=['%sPackageTests' % package_name],
build_path=args.build_path,
multiroot_data_file=args.multiroot_data_file,
configuration=args.configuration,
env=env,
verbose=args.verbose)
except subprocess.CalledProcessError as e:
printerr('FAIL: Testing %s failed' % package_name)
printerr('Executing: %s' % ' '.join(e.cmd))
sys.exit(1)
def should_run_action(action_name, selected_actions):
if action_name in selected_actions:
return True
elif "all" in selected_actions:
return True
else:
return False
def update_swiftpm_dependencies(package_path, swift_exec, build_path, env, verbose):
args = [swift_exec, 'package', '--package-path', package_path, '--build-path', build_path, 'update']
check_call(args, env=env, verbose=verbose)
def invoke_swift(package_path, swift_exec, action, products, build_path, multiroot_data_file, configuration, env, verbose):
# Until rdar://53881101 is implemented, we cannot request a build of multiple
# targets simultaneously. For now, just build one product after the other.
for product in products:
invoke_swift_single_product(package_path, swift_exec, action, product, build_path, multiroot_data_file, configuration, env, verbose)
def invoke_swift_single_product(package_path, swift_exec, action, product, build_path, multiroot_data_file, configuration, env, verbose):
args = [swift_exec, action, '--package-path', package_path, '-c', configuration, '--build-path', build_path]
if platform.system() != "Darwin":
args.extend(["--enable-test-discovery"])
if multiroot_data_file:
args.extend(['--multiroot-data-file', multiroot_data_file])
if action == 'test':
args.extend(['--test-product', product])
else:
args.extend(['--product', product])
# Tell SwiftSyntax that we are building in a build-script environment so that
# it does not need to be rebuilt if it has already been built before.
env['SWIFT_BUILD_SCRIPT_ENVIRONMENT'] = '1'
check_call(args, env=env, verbose=verbose)
def generate_xcodeproj(package_path, swift_exec, env, verbose):
package_name = os.path.basename(package_path)
xcodeproj_path = os.path.join(package_path, '%s.xcodeproj' % package_name)
args = [swift_exec, 'package', '--package-path', package_path, 'generate-xcodeproj', '--output', xcodeproj_path]
check_call(args, env=env, verbose=verbose)
def check_call(cmd, verbose, env=os.environ, **kwargs):
if verbose:
print(' '.join([escape_cmd_arg(arg) for arg in cmd]))
return subprocess.check_call(cmd, env=env, stderr=subprocess.STDOUT, **kwargs)
def escape_cmd_arg(arg):
if '"' in arg or ' ' in arg:
return '"%s"' % arg.replace('"', '\\"')
else:
return arg
if __name__ == '__main__':
main()
| {
"pile_set_name": "Github"
} |
use_frameworks!
platform :ios, '10.0'
target 'ISEmojiViewExample' do
pod 'ISEmojiView', :path => '../'
end
| {
"pile_set_name": "Github"
} |
attribute vec4 aPosition;
attribute vec4 aTextureCoord;
uniform mat4 uMVPMatrix;
uniform mat4 uSTMatrix;
varying vec2 vTextureCoord;
void main() {
gl_Position = uMVPMatrix * aPosition;
vTextureCoord = (uSTMatrix * aTextureCoord).xy;
} | {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<Product Revision="2"
xmlns="https://github.com/OpenZWave/open-zwave">
<!-- Please refer to https://github.com/OpenZWave/open-zwave/wiki/Adding-Devices for instructions -->
<MetaData>
<MetaDataItem name="Name">Duco CO2/RH sensor</MetaDataItem>
<MetaDataItem name="Description">The DUCO CO2 sensor measures the indoor CO2 levels and temperature. This is a good indication for the indoor air quality.Duco Tronic System
The Duco Tronic System is a demand-driven natural ventilation system that always provides a balance between controlled natural ventilation and a central mechanical extraction.
The demand for ventilation is determined based on air quality (CO 2 ) in the living spaces, the relative humidity (RH) in the 'wet' rooms and outdoor temperature readings.
These values are measured by means of various sensors in different rooms. The sensors communicate wirelessly via the Z-wave protocol with the electronically controlled by the air supply
grilles and DucoBox the mechanical removal of the polluted and humid air controls. This electronic control ensures that only where a ventilation when needed, so the energy loss is limited.
Moreover, the Duco Tronic System takes into account the temperature and adjusts to the weather conditions. If it gets colder outside, it will automatically schedule a higher CO 2 target gain. T
his opens it progresses less rapidly and less far. When the weather warms up, the opposite is true. In extreme heat is ventilated only by those grids where the lowest temperature measured, for example,
on the shady side of the building. The measurement of the outside temperature is done by means of a sensor located in the control unit of the air vents situated. Thanks to the electronically controlled grids
within the 'Duco Tronic System requires the user to do anything. The system ventilates automatically . Still, the resident boss and he may, if necessary or desired, the air supply and exhaust manually.
Over time, the system automatically switches back to the most optimal position. All components, sensors, DucoBox and grills are aesthetically designed, fast and easy to install, use and maintenance.</MetaDataItem>
<MetaDataItem name="OzwInfoPage">http://www.openzwave.com/device-database/XXXX:XXXX:XXXX</MetaDataItem>
<MetaDataItem name="ProductPage">https://www.duco.eu/nl-nl-gearchiveerde-systemen</MetaDataItem>
<MetaDataItem name="ProductSupport">https://www.duco.eu/nl-nl-gearchiveerde-systemen</MetaDataItem>
<MetaDataItem name="ProductPic">images/DefaultProductImage.png</MetaDataItem>
<MetaDataItem name="ProductManual">https://www.duco.eu/Wes/CDN/1/Attachments/GH_DucoTronic%20System%20Z-wave_(nl)_online%20garantie_LowRes_635500206155264045.pdf</MetaDataItem>
<MetaDataItem name="WakeupDescription"> <!-- Short description on how to wake up the device if its battery powered --></MetaDataItem>
<MetaDataItem name="InclusionDescription">Press all 4 buttons to enter installer mode. Next pres button 1 to add the device to a z-wave network.</MetaDataItem>
<MetaDataItem name="ExclusionDescription"> <!-- Short Description on how to exclude the device --></MetaDataItem>
<MetaDataItem name="ResetDescription">Press all 4 buttons to enter installer mode. Next press and hold button 1 and 2 to remove the device from a z-wave network.</MetaDataItem>
<MetaDataItem id="0001" name="ZWProductPage" type="0004">https://products.z-wavealliance.org/products/49</MetaDataItem>
<MetaDataItem id="0001" name="FrequencyName" type="0004">CEPT (Europe)</MetaDataItem>
<MetaDataItem id="0001" name="Identifier" type="0004">00001801/G12809</MetaDataItem>
<ChangeLog>
<Entry author="Mark van Kemenade - [email protected]" date="29 Januari 2020" revision="1">Created the config file from scratch based on information from https://products.z-wavealliance.org/products/49 and http://devel.pepper1.net/zwavedb/device/156</Entry>
<Entry author="Jean-Francois Auger" date="23 Sept 2020" revision="2">Remove auto on group 1, cleanup MetaDataItem</Entry>
</ChangeLog>
</MetaData>
<CommandClass id="133"> <!-- Association -->
<Associations num_groups="7">
<Group index="1" label="Duco control" max_associations="20"/>
<!-- This is the control group used by the DUCO products to talk to each other. Only DUCO products should be added to this group. -->
<Group auto="true" index="2" label="Temp Basic set (on)" max_associations="20"/>
<!-- This is the temperature ON group. When the temperature goes above the temperature high set point, then the sensor will send BASIC SET( ON ) to all the nodes in the group. And when the temperature goes below the temperature low set point, then the sensor will send BASIC SET( OFF ) to all the nodes in the group. -->
<Group auto="true" index="3" label="Temp Basic set (off)" max_associations="20"/>
<!-- This is the temperature OFF group. When the temperature goes above the temperature high set point, then the sensor will send BASIC SET( OFF ) to all the nodes in the group. And when the temperature goes below the temperature low set point, then the sensor will send BASIC SET( ON ) to all the nodes in the group. -->
<Group auto="true" index="4" label="Temperature delta" max_associations="20"/>
<!-- This is the temperature DELTA group. When the temperature has changed more than the Delta temperature value, then the sensor will send a MULTI SENSOR REPORT with the temperature value to all the nodes in the group. -->
<Group auto="true" index="5" label="RH Basic set (on)" max_associations="20"/>
<!-- This is the RH ON group. When the RH goes above the RH high set point, then the sensor will send BASIC SET( ON ) to all the nodes in the group. And when the RH goes below the RH low set point, then the sensor will send BASIC SET( OFF ) to all the nodes in the group. -->
<Group auto="true" index="6" label="RH Basic set (off)" max_associations="20"/>
<!-- This is the RH OFF group. When the RH goes above the RH high set point, then the sensor will send BASIC SET( OFF ) to all the nodes in the group. And when the RH goes below the RH low set point, then the sensor will send BASIC SET( ON ) to all the nodes in the group. -->
<Group auto="true" index="7" label="RH delta" max_associations="20"/>
<!-- This is the RH DELTA group. When the RH has changed more than the Delta RH value, then the sensor will send a MULTI SENSOR REPORT with the RH value to all the nodes in the group. -->
</Associations>
</CommandClass>
<CommandClass id="112"> <!-- Configuration -->
<!-- Measure Level -->
<Value genre="config" index="1" label="Measure Level" size="1" type="byte" value="60" min="1" max="127" units="sec">
<Help>How often the value is measured(example: if value=60, the sensor value is measured every 60 seconds)</Help>
</Value>
<!-- CO² set point -->
<Value genre="config" index="2" label="CO² set point" size="2" type="short" value="1000" min="1" max="32767" units="PPM">
<Help>Set point CO2: Used in ‘automatic mode’. (button 4) if the device is a CO2 sensor. The CO2 level in a room is a good indication of the air quality in this room.
When the CO2 level is above this set point, the room needs more ventilation. Consequently, the CO2 sensor will open the associated grating unit.</Help>
</Value>
<!-- Humidity set point -->
<Value genre="config" index="3" label="CO² set point" size="1" type="byte" value="70" min="1" max="127" units="%">
<Help>Set point RH: Used in ‘automatic mode’. (button 4) if the device is a humidity sensor. The humidity level in a room is a good indication of the air quality in this room.
When the RH level is above this set point, the room needs more ventilation. Consequently, the RH sensor will open the associated grating unit.</Help>
</Value>
<!-- Button 1 -->
<Value genre="config" index="4" label="Button 1" size="1" type="byte" value="15" min="1" max="127" units="%">
<Help> The value when button1 is pressed. Values above 99 will be capped to 99 (maximum) used in association group 1</Help>
</Value>
<!-- Button 2 -->
<Value genre="config" index="5" label="Button 2" size="1" type="byte" value="50" min="1" max="127" units="%">
<Help> The value when button2 is pressed. Values above 99 will be capped to 99 (maximum) used in association group 1</Help>
</Value>
<!-- Button 3 -->
<Value genre="config" index="6" label="Button 3" size="1" type="byte" value="99" min="1" max="127" units="%">
<Help>The value when button3 is pressed. Values above 99 will be capped to 99 (maximum) used in association group 1</Help>
</Value>
<!-- Min automatic level -->
<Value genre="config" index="7" label="Min automatic level" size="1" type="byte" value="15" min="1" max="127" units="%">
<Help>The minimum value that can be send by the CO2 sensor when it is in automatic mode.</Help>
</Value>
<!-- Temperature high setpoint -->
<Value genre="config" index="8" label="Temperature high setpoint" size="2" type="short" value="0" min="1" max="32767" units="°C">
<Help>Temperature high set point: used in association group 2 or 3</Help>
</Value>
<!-- Temperature low setpoint -->
<Value genre="config" index="9" label="Temperature low setpoint" size="2" type="short" value="0" min="1" max="32767" units="°C">
<Help>Temperature low set point: used in association group 2 or 3</Help>
</Value>
<!-- Temperature delta setpoint -->
<Value genre="config" index="10" label="Temperature delta setpoint" size="2" type="short" value="2" min="1" max="32767" units="°C">
<Help>Temperature delta set point: used in association group 4</Help>
</Value>
<!-- RH high setpoint -->
<Value genre="config" index="11" label="RH high setpoint" size="2" type="short" value="0" min="1" max="32767" units="%">
<Help>RH high set point: used in association group 5 or 6</Help>
</Value>
<!-- RH low setpoint -->
<Value genre="config" index="12" label="RH low setpoint" size="2" type="short" value="0" min="1" max="32767" units="%">
<Help>RH low set point: used in group5 or 6</Help>
</Value>
<!-- RH delta setpoint -->
<Value genre="config" index="13" label="RH delta setpoint" size="2" type="short" value="0" min="1" max="32767" units="%">
<Help>RH delta set point: used in group7</Help>
</Value>
<!-- CO2 high setpoint -->
<Value genre="config" index="14" label="CO2 high setpoint" size="2" type="short" value="0" min="1" max="32767" units="PPM">
<Help>CO2 high set point: not available in RH sensor</Help>
</Value>
<!-- CO2 low setpoint -->
<Value genre="config" index="15" label="CO2 low setpoint" size="2" type="short" value="0" min="1" max="32767" units="PPM">
<Help>CO2 low set point: not available in RH sensor</Help>
</Value>
<!-- CO2 delta setpoint -->
<Value genre="config" index="16" label="CO2 delta setpoint" size="2" type="short" value="500" min="1" max="32767" units="PPM">
<Help> CO2 delta set point: not available in RH sensor</Help>
</Value>
</CommandClass>
</Product>
| {
"pile_set_name": "Github"
} |
#include <stdio.h>
#ifdef _MSC_VER
#pragma warning(disable:4996)
#endif // _MSC_VER
// This convoluted function returns 0
// but will hopefully not be optimized away in release builds...
int return_0_non_optimizable()
{
char buffer[100];
long value = 62831853;
char *c;
int result;
sprintf(buffer, "%ld", value);
c = buffer;
result = 0;
while (*c) {
int digit = (int)(c[0] - '0');
result = result + digit;
c++;
}
return result - 36;
}
int main()
{
int p, q, r;
p = 42;
q = return_0_non_optimizable();
r = p / q; // this is an integer division by zero
printf("r=%d\n", r);
return 0;
}
| {
"pile_set_name": "Github"
} |
package org.elixir_lang.beam.chunk.debug_info.v1.erl_abstract_code.abstract_code_compiler_options.abstract_code
data class Scope(val varNameSet: Set<String>, val pinning: Boolean = false) {
fun union(other: Scope): Scope {
val otherVarNameSet = other.varNameSet
val otherPinning = other.pinning
val pinningUnion = pinning || otherPinning
return when {
pinning == pinningUnion && varNameSet.containsAll(otherVarNameSet) -> this
otherPinning == pinningUnion && otherVarNameSet.containsAll(varNameSet) -> other
else ->
Scope(varNameSet.union(otherVarNameSet), pinningUnion)
}
}
companion object {
val EMPTY = Scope(emptySet())
}
}
| {
"pile_set_name": "Github"
} |
/*
Velociraptor - Hunting Evil
Copyright (C) 2019 Velocidex Innovations.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package main
import (
"fmt"
"log"
"regexp"
"strings"
"time"
"github.com/Velocidex/ordereddict"
"github.com/Velocidex/yaml/v2"
kingpin "gopkg.in/alecthomas/kingpin.v2"
"www.velocidex.com/golang/velociraptor/config"
config_proto "www.velocidex.com/golang/velociraptor/config/proto"
flows_proto "www.velocidex.com/golang/velociraptor/flows/proto"
logging "www.velocidex.com/golang/velociraptor/logging"
"www.velocidex.com/golang/velociraptor/services"
vql_subsystem "www.velocidex.com/golang/velociraptor/vql"
)
var (
artifact_command = app.Command(
"artifacts", "Process artifact definitions.")
artifact_command_list = artifact_command.Command(
"list", "Print all artifacts")
artifact_command_show = artifact_command.Command(
"show", "Show an artifact")
artifact_command_show_name = artifact_command_show.Arg(
"name", "Name to show.").Required().String()
artifact_command_list_name = artifact_command_list.Arg(
"regex", "Regex of names to match.").
HintAction(listArtifactsHint).String()
artifact_command_list_verbose_count = artifact_command_list.Flag(
"details", "Show more details (Use -d -dd for even more)").
Short('d').Counter()
artifact_command_collect = artifact_command.Command(
"collect", "Collect all artifacts")
artifact_command_collect_output = artifact_command_collect.Flag(
"output", "When specified we create a zip file and "+
"store all output in it.").
Default("").String()
artifact_command_collect_report = artifact_command_collect.Flag(
"report", "When specified we create a report html file.").
Default("").String()
artificat_command_collect_admin_flag = artifact_command_collect.Flag(
"require_admin", "Ensure the user is an admin").Bool()
artifact_command_collect_output_password = artifact_command_collect.Flag(
"password", "When specified we encrypt zip file with this password.").
Default("").String()
artifact_command_collect_format = artifact_command_collect.Flag(
"format", "Output format to use (text,json,csv,jsonl).").
Default("json").Enum("text", "json", "csv", "jsonl")
artifact_command_collect_name = artifact_command_collect.Arg(
"artifact_name", "The artifact name to collect.").
Required().HintAction(listArtifactsHint).Strings()
artifact_command_collect_args = artifact_command_collect.Flag(
"args", "Artifact args.").Strings()
)
func listArtifactsHint() []string {
config_obj := config.GetDefaultConfig()
result := []string{}
manager, err := services.GetRepositoryManager()
if err != nil {
return nil
}
repository, err := manager.GetGlobalRepository(config_obj)
if err != nil {
return result
}
result = append(result, repository.List()...)
return result
}
func getRepository(config_obj *config_proto.Config) (services.Repository, error) {
manager, err := services.GetRepositoryManager()
kingpin.FatalIfError(err, "Artifact GetGlobalRepository ")
repository, err := manager.GetGlobalRepository(config_obj)
kingpin.FatalIfError(err, "Artifact GetGlobalRepository ")
if *artifact_definitions_dir != "" {
logging.GetLogger(config_obj, &logging.ToolComponent).
Info("Loading artifacts from %s",
*artifact_definitions_dir)
_, err := repository.LoadDirectory(*artifact_definitions_dir)
if err != nil {
logging.GetLogger(config_obj, &logging.ToolComponent).
Error("Artifact LoadDirectory: %v ", err)
return nil, err
}
}
return repository, nil
}
func doArtifactCollect() {
checkAdmin()
config_obj, err := DefaultConfigLoader.WithNullLoader().LoadAndValidate()
kingpin.FatalIfError(err, "Load Config ")
sm, err := startEssentialServices(config_obj)
kingpin.FatalIfError(err, "Load Config ")
defer sm.Close()
collect_args := ordereddict.NewDict()
for _, item := range *artifact_command_collect_args {
parts := strings.SplitN(item, "=", 2)
arg_name := parts[0]
if len(parts) < 2 {
collect_args.Set(arg_name, "Y")
} else {
collect_args.Set(arg_name, parts[1])
}
}
manager, err := services.GetRepositoryManager()
kingpin.FatalIfError(err, "GetRepositoryManager")
scope := manager.BuildScope(services.ScopeBuilder{
Config: config_obj,
ACLManager: vql_subsystem.NullACLManager{},
Logger: log.New(&LogWriter{config_obj}, " ", 0),
Env: ordereddict.NewDict().
Set("Artifacts", *artifact_command_collect_name).
Set("Output", *artifact_command_collect_output).
Set("Password", *artifact_command_collect_output_password).
Set("Report", *artifact_command_collect_report).
Set("Args", collect_args).
Set("Format", *artifact_command_collect_format),
})
defer scope.Close()
_, err = getRepository(config_obj)
kingpin.FatalIfError(err, "Loading extra artifacts")
now := time.Now()
defer func() {
logging.GetLogger(config_obj, &logging.ToolComponent).
Info("Collection completed in %v Seconds",
time.Now().Unix()-now.Unix())
}()
if *trace_vql_flag {
scope.Tracer = logging.NewPlainLogger(config_obj,
&logging.ToolComponent)
}
query := `
SELECT * FROM collect(artifacts=Artifacts, output=Output, report=Report,
password=Password, args=Args, format=Format)`
eval_local_query(config_obj, *artifact_command_collect_format, query, scope)
}
func getFilterRegEx(pattern string) (*regexp.Regexp, error) {
pattern = strings.Replace(pattern, "*", ".*", -1)
pattern = "^" + pattern + "$"
return regexp.Compile(pattern)
}
func doArtifactShow() {
config_obj, err := DefaultConfigLoader.WithNullLoader().LoadAndValidate()
kingpin.FatalIfError(err, "Load Config ")
sm, err := startEssentialServices(config_obj)
kingpin.FatalIfError(err, "Starting services.")
defer sm.Close()
kingpin.FatalIfError(err, "Load Config ")
repository, err := getRepository(config_obj)
kingpin.FatalIfError(err, "Loading extra artifacts")
artifact, pres := repository.Get(config_obj, *artifact_command_show_name)
if !pres {
kingpin.Fatalf("Artifact %s not found",
*artifact_command_show_name)
}
fmt.Println(artifact.Raw)
}
func doArtifactList() {
config_obj, err := DefaultConfigLoader.WithNullLoader().LoadAndValidate()
kingpin.FatalIfError(err, "Load Config ")
sm, err := startEssentialServices(config_obj)
kingpin.FatalIfError(err, "Starting services.")
defer sm.Close()
ctx, cancel := install_sig_handler()
defer cancel()
repository, err := getRepository(config_obj)
kingpin.FatalIfError(err, "Loading extra artifacts")
var name_regex *regexp.Regexp
if *artifact_command_list_name != "" {
re, err := getFilterRegEx(*artifact_command_list_name)
kingpin.FatalIfError(err, "Artifact name regex not valid")
name_regex = re
}
for _, name := range repository.List() {
// Skip artifacts that do not match.
if name_regex != nil && name_regex.FindString(name) == "" {
continue
}
if *artifact_command_list_verbose_count == 0 {
fmt.Println(name)
continue
}
artifact, pres := repository.Get(config_obj, name)
if !pres {
kingpin.Fatalf("Artifact %s not found", name)
}
fmt.Println(artifact.Raw)
if *artifact_command_list_verbose_count <= 1 {
continue
}
launcher, err := services.GetLauncher()
kingpin.FatalIfError(err, "GetLauncher")
request, err := launcher.CompileCollectorArgs(
ctx, config_obj, vql_subsystem.NullACLManager{}, repository,
&flows_proto.ArtifactCollectorArgs{
Artifacts: []string{artifact.Name},
})
kingpin.FatalIfError(err, "Unable to compile artifact.")
res, err := yaml.Marshal(request)
kingpin.FatalIfError(err, "Unable to encode artifact.")
fmt.Printf("VQLCollectorArgs %s:\n***********\n%v\n",
artifact.Name, string(res))
}
}
// Load any artifacts defined inside the config file.
func load_config_artifacts(config_obj *config_proto.Config) error {
if config_obj.Autoexec == nil {
return nil
}
repository, err := getRepository(config_obj)
if err != nil {
return err
}
for _, definition := range config_obj.Autoexec.ArtifactDefinitions {
definition.Raw = ""
serialized, err := yaml.Marshal(definition)
if err != nil {
return err
}
// Add the raw definition for inspection.
definition.Raw = string(serialized)
_, err = repository.LoadProto(definition, true /* validate */)
if err != nil {
return err
}
}
return nil
}
func init() {
command_handlers = append(command_handlers, func(command string) bool {
switch command {
case artifact_command_list.FullCommand():
doArtifactList()
case artifact_command_show.FullCommand():
doArtifactShow()
case artifact_command_collect.FullCommand():
doArtifactCollect()
default:
return false
}
return true
})
}
| {
"pile_set_name": "Github"
} |
<!doctype html>
<title>CodeMirror: Julia mode</title>
<meta charset="utf-8"/>
<link rel=stylesheet href="../../doc/docs.css">
<link rel="stylesheet" href="../../lib/codemirror.css">
<script src="../../lib/codemirror.js"></script>
<script src="julia.js"></script>
<style type="text/css">.CodeMirror {border-top: 1px solid black; border-bottom: 1px solid black;}</style>
<div id=nav>
<a href="http://codemirror.net"><h1>CodeMirror</h1><img id=logo src="../../doc/logo.png"></a>
<ul>
<li><a href="../../index.html">Home</a>
<li><a href="../../doc/manual.html">Manual</a>
<li><a href="https://github.com/codemirror/codemirror">Code</a>
</ul>
<ul>
<li><a href="../index.html">Language modes</a>
<li><a class=active href="#">Julia</a>
</ul>
</div>
<article>
<h2>Julia mode</h2>
<div><textarea id="code" name="code">
#numbers
1234
1234im
.234
.234im
2.23im
2.3f3
23e2
0x234
#strings
'a'
"asdf"
r"regex"
b"bytestring"
"""
multiline string
"""
#identifiers
a
as123
function_name!
#unicode identifiers
# a = x\ddot
a⃗ = ẍ
# a = v\dot
a⃗ = v̇
#F\vec = m \cdotp a\vec
F⃗ = m·a⃗
#literal identifier multiples
3x
4[1, 2, 3]
#dicts and indexing
x=[1, 2, 3]
x[end-1]
x={"julia"=>"language of technical computing"}
#exception handling
try
f()
catch
@printf "Error"
finally
g()
end
#types
immutable Color{T<:Number}
r::T
g::T
b::T
end
#functions
function change!(x::Vector{Float64})
for i = 1:length(x)
x[i] *= 2
end
end
#function invocation
f('b', (2, 3)...)
#operators
|=
&=
^=
\-
%=
*=
+=
-=
<=
>=
!=
==
%
*
+
-
<
>
!
=
|
&
^
\
?
~
:
$
<:
.<
.>
<<
<<=
>>
>>>>
>>=
>>>=
<<=
<<<=
.<=
.>=
.==
->
//
in
...
//
:=
.//=
.*=
./=
.^=
.%=
.+=
.-=
\=
\\=
||
===
&&
|=
.|=
<:
>:
|>
<|
::
x ? y : z
#macros
@spawnat 2 1+1
@eval(:x)
#keywords and operators
if else elseif while for
begin let end do
try catch finally return break continue
global local const
export import importall using
function macro module baremodule
type immutable quote
true false enumerate
</textarea></div>
<script>
var editor = CodeMirror.fromTextArea(document.getElementById("code"), {
mode: {name: "julia",
},
lineNumbers: true,
indentUnit: 4,
matchBrackets: true
});
</script>
<p><strong>MIME types defined:</strong> <code>text/x-julia</code>.</p>
</article>
| {
"pile_set_name": "Github"
} |
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>javax.measure</groupId>
<artifactId>unit-api</artifactId>
<version>2.1-SNAPSHOT</version>
<name>Units of Measurement API</name>
<packaging>bundle</packaging>
<url>http://unitsofmeasurement.github.io/unit-api/</url>
<description>Units of Measurement Standard - This JSR specifies Java packages for modeling and working with measurement values, quantities and their corresponding units.</description>
<organization>
<name>Jean-Marie Dautelle, Werner Keil, Otavio Santana</name>
<url>http://unitsofmeasurement.github.io</url>
</organization>
<inceptionYear>2014</inceptionYear>
<licenses>
<license>
<name>BSD 3-Clause</name>
<url>LICENSE</url>
<distribution>manual</distribution>
</license>
</licenses>
<parent>
<groupId>org.sonatype.oss</groupId>
<artifactId>oss-parent</artifactId>
<version>9</version>
</parent>
<!-- Issue managements and mailing lists. -->
<issueManagement>
<system>GitHub</system>
<url>https://github.com/unitsofmeasurement/unit-api/issues</url>
</issueManagement>
<ciManagement>
<system>CircleCI</system>
<url>https://circleci.com/gh/unitsofmeasurement/unit-api</url>
</ciManagement>
<mailingLists>
<mailingList>
<name>Units-Dev</name>
<subscribe>https://groups.google.com/group/units-dev/subscribe</subscribe>
<post>[email protected]</post>
</mailingList>
<mailingList>
<name>Units-Users</name>
<subscribe>https://groups.google.com/group/units-users/subscribe</subscribe>
<post>[email protected]</post>
</mailingList>
</mailingLists>
<scm>
<connection>scm:git:[email protected]:unitsofmeasurement/unit-api.git</connection>
<developerConnection>scm:git:[email protected]:unitsofmeasurement/unit-api.git</developerConnection>
<url>https://github.com/unitsofmeasurement/unit-api</url>
</scm>
<!-- Build Settings -->
<properties>
<basedir>.</basedir>
<sourceEncoding>UTF-8</sourceEncoding> <!-- in Maven 3. -->
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>${sourceEncoding}</project.reporting.outputEncoding>
<jdkVersion>8</jdkVersion>
<jdkOptionalVersion>9</jdkOptionalVersion>
<project.build.javaVersion>${jdkVersion}</project.build.javaVersion>
<maven.compile.targetLevel>${jdkVersion}</maven.compile.targetLevel>
<maven.compile.sourceLevel>${jdkVersion}</maven.compile.sourceLevel>
<additionalparam>-Xdoclint:none</additionalparam>
<thisYear>2020</thisYear>
<!-- Plugins -->
<felix.version>5.1.1</felix.version>
<github.maven.version>0.12</github.maven.version>
<github.global.server>github</github.global.server>
<maven.resources.version>3.1.0</maven.resources.version>
<maven.compile.version>3.8.1</maven.compile.version>
<maven.site.version>3.9.1</maven.site.version>
<maven.jar.version>3.2.0</maven.jar.version>
<maven.javadoc.version>3.2.0</maven.javadoc.version>
<maven.surefire.version>2.22.2</maven.surefire.version>
<maven.surefire-report.version>2.22.2</maven.surefire-report.version>
<jacoco.plugin.version>0.8.6</jacoco.plugin.version>
<junit.jupiter.version>5.7.0</junit.jupiter.version>
<!--Exclude the files Here -->
<sonar.exclusions>src/main/java/javax/measure/BinaryPrefix.java,src/main/java/javax/measure/MetricPrefix.java</sonar.exclusions>
</properties>
<!-- Developers and Contributors -->
<developers>
<developer>
<id>dautelle</id>
<name>Jean-Marie Dautelle</name>
<email>[email protected]</email>
<organization>Airbus</organization>
<organizationUrl>http://www.airbus.com</organizationUrl>
<timezone>+1</timezone>
<roles>
<role>Architect</role>
<role>Java Developer</role>
<role>Spec Lead</role>
</roles>
</developer>
<developer>
<id>keilw</id>
<name>Werner Keil</name>
<email>[email protected]</email>
<organization>Creative Arts & Technologies</organization>
<organizationUrl>http://www.catmedia.us</organizationUrl>
<timezone>+1</timezone>
<roles>
<role>Architect</role>
<role>Java Developer</role>
<role>Spec Lead</role>
</roles>
</developer>
<developer>
<id>otaviojava</id>
<name>Otávio Gonçalves de Santana</name>
<email>[email protected]</email>
<organization>Individual / SouJava</organization>
<timezone>-5</timezone>
<roles>
<role>Expert</role>
<role>Java Developer</role>
<role>Spec Lead</role>
</roles>
</developer>
<developer>
<id>desruisseaux</id>
<name>Martin Desruisseaux</name>
<email>[email protected]</email>
<organization>Geomatys</organization>
<organizationUrl>http://www.geomatys.com</organizationUrl>
<timezone>+1</timezone>
<roles>
<role>Expert</role>
<role>Java Developer</role>
<role>Architect</role>
</roles>
</developer>
<developer>
<id>thodorisbais</id>
<name>Thodoris Bais</name>
<email>[email protected]</email>
<organization>Individual / Utrecht JUG</organization>
<timezone>+1</timezone>
<roles>
<role>Expert</role>
<role>Java Developer</role>
</roles>
</developer>
<developer>
<id>Daniel-Dos</id>
<name>Daniel Dias</name>
<email>[email protected]</email>
<organization>Individual / SouJava</organization>
<timezone>-5</timezone>
<roles>
<role>Expert</role>
<role>Java Developer</role>
</roles>
</developer>
<developer>
<id>jhg023</id>
<name>Jacob Glickman</name>
<organization>Individual</organization>
<timezone>-4</timezone>
<roles>
<role>Expert</role>
<role>Java Developer</role>
</roles>
</developer>
<developer>
<id>magesh678</id>
<name>Magesh Kasthuri</name>
<organization>Individual</organization>
<timezone>+4</timezone>
<roles>
<role>Expert</role>
<role>Java Developer</role>
</roles>
</developer>
<developer>
<id>mohalmo</id>
<name>Mohammed Al-Moayed</name>
<organization>Individual</organization>
<timezone>+2</timezone>
<roles>
<role>Expert</role>
<role>Java Developer</role>
</roles>
</developer>
</developers>
<contributors>
<contributor>
<name>Andi Huber</name>
<organization>Individual</organization>
<timezone>+1</timezone>
<roles>
<role>Contributor</role>
</roles>
</contributor>
<contributor>
<name>Filip Van Laenen</name>
<email>[email protected]</email>
<organization>Computas</organization>
<timezone>+1</timezone>
<roles>
<role>Contributor</role>
</roles>
</contributor>
<contributor>
<name>Mads Opheim</name>
<organization>Computas</organization>
<timezone>+1</timezone>
<roles>
<role>Contributor</role>
</roles>
</contributor>
<contributor>
<name>Matthijs Thoolen</name>
<organization>Utrecht Java User Group</organization>
<timezone>+1</timezone>
<roles>
<role>Contributor</role>
</roles>
</contributor>
<contributor>
<name>Anakar Parida</name>
<organization>Individual</organization>
<timezone>+5.5</timezone>
<roles>
<role>Contributor</role>
</roles>
</contributor>
<contributor>
<name>Rustam Mehmandarov</name>
<organization>Individual</organization>
<timezone>+3</timezone>
<roles>
<role>Contributor</role>
</roles>
</contributor>
<contributor>
<name>Nathan Scott</name>
<email>[email protected]</email>
<organization>Red Hat</organization>
<timezone>+10</timezone>
<roles>
<role>Contributor Emeritus</role>
</roles>
</contributor>
<contributor>
<!-- id>duckasteroid</id -->
<name>Chris Senior</name>
<email>[email protected]</email>
<organization>Snap-on Inc.</organization>
<roles>
<role>Expert Emeritus</role>
</roles>
</contributor>
<contributor>
<!-- id>leomrlima</id -->
<name>Leonardo de Moura Rocha Lima</name>
<email>[email protected]</email>
<organization>V2COM</organization>
<organizationUrl>http://www.v2com.mobi/</organizationUrl>
<timezone>-5</timezone>
<roles>
<role>Expert Emeritus</role>
<role>Java Developer</role>
</roles>
</contributor>
<contributor>
<!-- id>eralmas7</id -->
<name>Almas Shaikh</name>
<email>[email protected]</email>
<organization>Individual / JP Morgan</organization>
<timezone>+5.5</timezone>
<roles>
<role>Test Engineer</role>
</roles>
</contributor>
<contributor>
<!-- id>rajmahendra</id -->
<name>Rajmahendra Hegde</name>
<email>[email protected]</email>
<organization>JUG Chennai</organization>
<timezone>+5.5</timezone>
<roles>
<role>Expert Emeritus</role>
</roles>
</contributor>
<contributor>
<!-- id>karen_legrand</id -->
<name>Karen Legrand</name>
<email>[email protected]</email>
<organization>Innovation Emergency Management (IEM)</organization>
<organizationUrl>http://www.iem.com</organizationUrl>
<timezone>-5</timezone>
<roles>
<role>Expert Emeritus</role>
</roles>
</contributor>
<contributor>
<!-- id>mohamed-taman</id -->
<name>Mohamed Mahmoud Taman</name>
<email>[email protected]</email>
<organization>Individual / Morocco JUG</organization>
<timezone>+2</timezone>
<roles>
<role>Expert Emeritus</role>
</roles>
</contributor>
<contributor>
<name>Daniel Leuck</name>
<email>[email protected]</email>
<organization>Ikayzo</organization>
<timezone>-9</timezone>
<roles>
<role>Supporter</role>
</roles>
</contributor>
<contributor>
<name>Eric Russell</name>
<email>[email protected]</email>
<timezone>-5</timezone>
<roles>
<role>Supporter</role>
</roles>
</contributor>
<contributor>
<name>John Paul Morrison</name>
<organization>J.P. Morrison Enterprises, Ltd.</organization>
<timezone>-5</timezone>
<roles>
<role>Supporter</role>
</roles>
</contributor>
<contributor>
<name>Michael Gruebsch</name>
<email>[email protected]</email>
<roles>
<role>Supporter</role>
</roles>
<timezone>+1</timezone>
</contributor>
</contributors>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>${maven.compile.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<version>1.8</version>
</plugin>
<!-- License -->
<plugin>
<groupId>com.mycila</groupId>
<artifactId>license-maven-plugin</artifactId>
<version>3.0</version>
<executions>
<execution>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
<!-- Format -->
<plugin>
<groupId>net.revelc.code</groupId>
<artifactId>formatter-maven-plugin</artifactId>
<version>0.5.2</version>
</plugin>
<!--This plugin's configuration is used to store Eclipse m2e settings
only. It has no influence on the Maven build itself. -->
<plugin>
<groupId>org.eclipse.m2e</groupId>
<artifactId>lifecycle-mapping</artifactId>
<version>1.0.0</version>
<configuration>
<lifecycleMappingMetadata>
<pluginExecutions>
<pluginExecution>
<pluginExecutionFilter>
<groupId>org.jacoco</groupId>
<artifactId>
jacoco-maven-plugin
</artifactId>
<versionRange>
[0.7.1.201405082137,)
</versionRange>
<goals>
<goal>prepare-agent</goal>
</goals>
</pluginExecutionFilter>
<action>
<ignore></ignore>
</action>
</pluginExecution>
<pluginExecution>
<pluginExecutionFilter>
<groupId>
net.revelc.code
</groupId>
<artifactId>
formatter-maven-plugin
</artifactId>
<versionRange>
[0.5.2,)
</versionRange>
<goals>
<goal>format</goal>
</goals>
</pluginExecutionFilter>
<action>
<ignore></ignore>
</action>
</pluginExecution>
</pluginExecutions>
</lifecycleMappingMetadata>
</configuration>
</plugin>
</plugins>
</pluginManagement>
<plugins>
<!-- Compilation -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<release>${maven.compile.targetLevel}</release>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<phase>prepare-package</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<mkdir
dir="${project.build.directory}/classes/META-INF/versions/${jdkOptionalVersion}" />
<javac
destdir="${project.build.directory}/classes/META-INF/versions/${jdkOptionalVersion}"
srcdir="${project.basedir}/src/main/jdk${jdkOptionalVersion}"
includeAntRuntime="false">
<compilerarg
line="--release=${jdkOptionalVersion} --patch-module java.measure=${project.build.directory}/classes" />
</javac>
</target>
</configuration>
</execution>
</executions>
</plugin>
<!-- Need to specificy at least 2.22.0 for JUnit 5 -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven.surefire.version}</version>
</plugin>
<!-- Coverage -->
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>${jacoco.plugin.version}</version>
<configuration>
<excludes>
<exclude>META-INF/versions/**</exclude>
</excludes>
</configuration>
<executions>
<execution>
<id>pre-unit-test</id>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>post-unit-test</id>
<phase>test</phase>
<goals>
<goal>report</goal>
<goal>check</goal>
</goals>
<configuration>
<rules>
<rule>
<element>BUNDLE</element>
<limits>
<limit implementation="org.jacoco.report.check.Limit">
<counter>INSTRUCTION</counter>
<value>COVEREDRATIO</value>
<minimum>0.5</minimum>
</limit>
<limit>
<counter>COMPLEXITY</counter>
<value>COVEREDRATIO</value>
<minimum>0.5</minimum>
</limit>
</limits>
</rule>
</rules>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.eluder.coveralls</groupId>
<artifactId>coveralls-maven-plugin</artifactId>
<version>4.3.0</version>
<dependencies>
<dependency>
<groupId>javax.xml.bind</groupId>
<artifactId>jaxb-api</artifactId>
<version>2.2.3</version>
</dependency>
</dependencies>
<configuration>
<repoToken>${env.COVERALLS_REPO_TOKEN}</repoToken>
</configuration>
</plugin>
<!-- Attach Sources -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>3.2.0</version>
<executions>
<execution>
<id>attach-sources</id>
<phase>package</phase>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven.javadoc.version}</version>
<executions>
<execution>
<id>attach-javadocs</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
<configuration>
<detectLinks>true</detectLinks>
<keywords>true</keywords>
<linksource>true</linksource>
<failOnError>false</failOnError>
<verbose>true</verbose>
<tags>
<tag>
<name>apiNote</name>
<placement>a</placement>
<head>API Note:</head>
</tag>
<tag>
<name>implSpec</name>
<placement>a</placement>
<head>Implementation Requirements:</head>
</tag>
<tag>
<name>implNote</name>
<placement>a</placement>
<head>Implementation Note:</head>
</tag>
<tag><name>param</name></tag>
<tag><name>return</name></tag>
<tag><name>throws</name></tag>
<tag><name>since</name></tag>
<tag><name>version</name></tag>
<tag><name>serialData</name></tag>
<tag><name>see</name></tag>
</tags>
</configuration>
</plugin>
<!-- JAR packaging -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>${maven.jar.version}</version>
<configuration>
<archive>
<manifest>
<addDefaultImplementationEntries>true</addDefaultImplementationEntries>
</manifest>
<manifestEntries>
<Specification-Title>${project.name}</Specification-Title>
<Specification-Version>${project.version}</Specification-Version>
<Specification-Vendor>${project.organization.name}</Specification-Vendor>
<Implementation-Vendor>Unit-API contributors</Implementation-Vendor>
<Implementation-URL>http://unitsofmeasurement.github.io</Implementation-URL>
<Automatic-Module-Name>java.measure</Automatic-Module-Name>
<Multi-Release>true</Multi-Release>
</manifestEntries>
<addMavenDescriptor>false</addMavenDescriptor>
</archive>
</configuration>
<executions>
<execution>
<goals>
<goal>test-jar</goal>
</goals>
</execution>
</executions>
</plugin>
<!-- Packaging (OSGi bundle) -->
<plugin>
<groupId>org.apache.felix</groupId>
<artifactId>maven-bundle-plugin</artifactId>
<version>${felix.version}</version>
<extensions>true</extensions>
</plugin>
<!-- Resources -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
<version>${maven.resources.version}</version>
<configuration>
<encoding>${project.build.sourceEncoding}</encoding>
</configuration>
</plugin>
<!-- ======================================================= -->
<!-- Maven License Plugin -->
<!-- ======================================================= -->
<plugin>
<groupId>com.mycila</groupId>
<artifactId>license-maven-plugin</artifactId>
<configuration>
<header>src/main/config/header.txt</header>
<properties>
<owner>${project.organization.name}</owner>
<currentYear>${thisYear}</currentYear>
</properties>
<excludes>
<exclude>.editorconfig</exclude>
<exclude>.gitattributes</exclude>
<exclude>.github/**</exclude>
<exclude>.circleci/**</exclude>
<exclude>**/LICENSE</exclude>
<exclude>**/README</exclude>
<exclude>**/pom.xml</exclude>
<exclude>**/settings.xml</exclude>
<exclude>docs/**</exclude>
<exclude>src/test/resources/**</exclude>
<exclude>src/main/resources/**</exclude>
<exclude>src/main/config/**</exclude>
<exclude>src/main/emf/**</exclude>
<exclude>src/site/**</exclude>
<exclude>src/etc/**</exclude>
<exclude>*.css</exclude>
<exclude>*.jpg</exclude>
<exclude>*.png</exclude>
<exclude>*.yml</exclude>
</excludes>
<headerDefinitions>
<headerDefinition>src/main/config/headers.xml</headerDefinition>
</headerDefinitions>
<mapping>
<java>JAVA_STYLE</java>
</mapping>
</configuration>
</plugin>
<!-- ======================================================= -->
<!-- Maven Code Formatter -->
<!-- ======================================================= -->
<plugin>
<groupId>net.revelc.code</groupId>
<artifactId>formatter-maven-plugin</artifactId>
<configuration>
<configFile>${project.basedir}/src/main/config/eclipse-formatter-config.xml</configFile>
</configuration>
<executions>
<execution>
<goals>
<goal>format</goal>
</goals>
</execution>
</executions>
</plugin>
<!-- Maven web site -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId>
<version>${maven.site.version}</version>
</plugin>
</plugins>
</build>
<reporting>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-project-info-reports-plugin</artifactId>
<version>3.1.0</version>
<reportSets>
<reportSet>
<reports>
<report>index</report>
<report>summary</report>
<report>licenses</report>
<report>scm</report>
<report>ci-management</report>
<report>team</report>
<report>mailing-lists</report>
<report>issue-management</report>
</reports>
</reportSet>
</reportSets>
</plugin>
<!-- Javadoc generation -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven.javadoc.version}</version>
<configuration>
<source>${maven.compile.sourceLevel}</source>
<encoding>${project.build.sourceEncoding}</encoding>
<docencoding>${project.reporting.outputEncoding}</docencoding>
<charset>${project.reporting.outputEncoding}</charset>
<locale>en</locale>
<detectJavaApiLink>false</detectJavaApiLink>
<noqualifier>all</noqualifier>
<quiet>true</quiet>
<keywords>true</keywords>
<links>
<link>http://docs.oracle.com/javase/9/docs/api/</link>
</links>
</configuration>
</plugin>
<!-- Code analysis -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-pmd-plugin</artifactId>
<version>3.13.0</version>
<configuration>
<sourceEncoding>${project.build.sourceEncoding}</sourceEncoding>
</configuration>
</plugin>
<!-- Report on test results -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-report-plugin</artifactId>
<version>${maven.surefire-report.version}</version>
</plugin>
<!-- Static analysis for occurrences of bug patterns -->
<plugin>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs-maven-plugin</artifactId>
<version>4.0.4</version>
</plugin>
</plugins>
</reporting>
<!-- Deployment to public servers -->
<distributionManagement>
<repository>
<id>bintray-release</id>
<name>oss-jfrog-artifactory-release</name>
<url>https://oss.jfrog.org/artifactory/oss-release-local</url>
</repository>
<snapshotRepository>
<id>bintray-snapshot</id>
<name>oss-jfrog-artifactory-snapshot</name>
<url>https://oss.jfrog.org/artifactory/oss-snapshot-local</url>
</snapshotRepository>
<site>
<id>JSR-385</id>
<name>JSR-385 Maven reports</name>
<url>file:///var/www/www.unitsofmeasurement.github.io/jsr-385</url>
<!-- No longer active, just a placeholder! -->
</site>
</distributionManagement>
<dependencies>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>${junit.jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>${junit.jupiter.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
<profiles>
<!-- Individual JARs -->
<profile>
<id>base-jar</id>
<!-- This profile builds only the base (root level) elements into a separate
-base.jar file -->
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<execution>
<phase>prepare-package</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<mkdir
dir="${project.build.directory}/classes/META-INF/versions/${jdkOptionalVersion}" />
<javac
destdir="${project.build.directory}/classes/META-INF/versions/${jdkOptionalVersion}"
srcdir="${project.basedir}/src/etc/modules/core/jdk${jdkOptionalVersion}"
includeAntRuntime="false">
<compilerarg
line="--release=${jdkOptionalVersion} --patch-module java.measure.base=${project.build.directory}/classes" />
</javac>
</target>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-jar-plugin</artifactId>
<configuration>
<archive>
<manifest>
<addDefaultImplementationEntries>true</addDefaultImplementationEntries>
</manifest>
<manifestEntries>
<Specification-Title>${project.name}</Specification-Title>
<Specification-Version>${project.version}</Specification-Version>
<Specification-Vendor>${project.organization.name}</Specification-Vendor>
<Implementation-Vendor>Unit-API contributors</Implementation-Vendor>
<Implementation-URL>http://unitsofmeasurement.github.io</Implementation-URL>
<Automatic-Module-Name>java.measure.base</Automatic-Module-Name>
<Multi-Release>true</Multi-Release>
</manifestEntries>
<addMavenDescriptor>false</addMavenDescriptor>
</archive>
</configuration>
<executions>
<execution>
<id>base-jar</id>
<goals>
<goal>jar</goal>
</goals>
<configuration>
<classifier>base</classifier>
<excludes>
<exclude>javax/measure/format/**</exclude>
<exclude>javax/measure/quantity/**</exclude>
<exclude>javax/measure/spi/**</exclude>
</excludes>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>format-jar</id>
<!-- This profile builds (optional) format elements into separate JAR
files -->
<build>
<plugins>
<plugin>
<artifactId>maven-jar-plugin</artifactId>
<version>${maven.jar.version}</version>
<executions>
<execution>
<id>format-jar</id>
<goals>
<goal>jar</goal>
</goals>
<configuration>
<classifier>format</classifier>
<includes>
<include>javax/measure/format/**</include>
</includes>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>quantity-jar</id>
<!-- This profile builds (optional) quantities into separate JAR files -->
<build>
<plugins>
<plugin>
<artifactId>maven-jar-plugin</artifactId>
<version>${maven.jar.version}</version>
<executions>
<execution>
<id>quanity-jar</id>
<goals>
<goal>jar</goal>
</goals>
<configuration>
<classifier>quantity</classifier>
<includes>
<include>javax/measure/quantity/**</include>
</includes>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>spi-jar</id>
<!-- This profile builds (optional) SPI into separate JAR files -->
<build>
<plugins>
<plugin>
<artifactId>maven-jar-plugin</artifactId>
<version>${maven.jar.version}</version>
<executions>
<execution>
<id>spi-jar</id>
<goals>
<goal>jar</goal>
</goals>
<configuration>
<classifier>spi</classifier>
<includes>
<include>javax/measure/spi/**</include>
</includes>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<!-- Profile JARs -->
<profile>
<id>format-profile</id>
<!-- This profile builds the Format Profile (base+format) into a separate
jar file -->
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<artifactId>maven-jar-plugin</artifactId>
<version>${maven.jar.version}</version>
<executions>
<execution>
<id>format-profile-jar</id>
<goals>
<goal>jar</goal>
</goals>
<configuration>
<classifier>format-profile</classifier>
<excludes>
<exclude>javax/measure/quantity/**</exclude>
<exclude>javax/measure/spi/**</exclude>
</excludes>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>spi-profile</id>
<!-- This profile builds the SPI Profile (base+format+spi) into a separate
jar file -->
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<build>
<plugins>
<plugin>
<artifactId>maven-jar-plugin</artifactId>
<version>${maven.jar.version}</version>
<executions>
<execution>
<id>base-jar</id>
<goals>
<goal>jar</goal>
</goals>
<configuration>
<classifier>spi-profile</classifier>
<excludes>
<exclude>javax/measure/quantity/**</exclude>
</excludes>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>documentation</id>
<build>
<plugins>
<plugin>
<groupId>org.asciidoctor</groupId>
<artifactId>asciidoctor-maven-plugin</artifactId>
<executions>
<execution>
<id>output-html</id>
<phase>generate-resources</phase>
<goals>
<goal>process-asciidoc</goal>
</goals>
<configuration>
<outputDirectory>target/docs</outputDirectory>
<sourceHighlighter>highlightjs</sourceHighlighter> <!-- coderay -->
<backend>html</backend>
<embedAssets>true</embedAssets>
<imagesDir>src/main/asciidoc/images</imagesDir>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>brokenGitHubSite</id>
<build>
<plugins>
<!-- ======================================================= -->
<!-- GitHub Plugin -->
<!-- ======================================================= -->
<!-- This plugin seems permanently broken, will remove it to profile -->
<!-- TODO remove this profile soon -->
<plugin>
<groupId>com.github.github</groupId>
<artifactId>site-maven-plugin</artifactId>
<version>${github.maven.version}</version>
<configuration>
<!-- must match the server's id -->
<server>github</server>
<message>Building site for ${project.version}</message>
</configuration>
<executions>
<execution>
<goals>
<goal>site</goal>
</goals>
<phase>site-deploy</phase>
<configuration>
<path>site</path>
<merge>true</merge>
<!-- <outputDirectory>${project.build.directory}/mvn-repo</outputDirectory> -->
<repositoryName>unit-api</repositoryName>
<repositoryOwner>unitsofmeasurement</repositoryOwner>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>localCopySite</id>
<!-- ======================================================= -->
<!-- Locally copies the "site" into the GitHub pages folder -->
<!-- ======================================================= -->
<build>
<plugins>
<plugin>
<artifactId>maven-resources-plugin</artifactId>
<executions>
<execution>
<id>copy-resources</id>
<phase>site</phase>
<goals>
<goal>copy-resources</goal>
</goals>
<configuration>
<outputDirectory>${basedir}/docs/site</outputDirectory>
<resources>
<resource>
<directory>target/site</directory>
<filtering>false</filtering>
</resource>
</resources>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>sonar</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
<properties>
<sonar.host.url>https://sonarcloud.io</sonar.host.url>
<sonar.projectKey>${env.SONARCLOUD_PROJECT_KEY}</sonar.projectKey>
<sonar.login>${env.SONARCLOUD_LOGIN}</sonar.login>
<sonar.organization>${env.SONARCLOUD_ORG}</sonar.organization>
</properties>
</profile>
</profiles>
</project>
| {
"pile_set_name": "Github"
} |
//
// TGMessagesNavigationController.m
// Telegram
//
// Created by keepcoder on 06/04/16.
// Copyright © 2016 keepcoder. All rights reserved.
//
#import "TGMessagesNavigationController.h"
#import "TGViewMessagesDragging.h"
#import "TGInlineAudioPlayer.h"
#import "TGAudioPlayerWindow.h"
@interface TGMessagesNavigationController ()
@property (nonatomic,strong) TGInlineAudioPlayer *inlineAudioPlayer;
@end
@implementation TGMessagesNavigationController
@synthesize view = _view;
-(void)loadView {
TGViewMessagesDragging *view = [[TGViewMessagesDragging alloc] initWithFrame:self.frameInit];
view.navigationViewController = self;
[view registerForDraggedTypes:[NSArray arrayWithObjects:NSFilenamesPboardType,NSStringPboardType,NSTIFFPboardType,NSURLPboardType, nil]];
_view = view;
[super loadView];
}
-(void)pushViewController:(TMViewController *)viewController animated:(BOOL)animated {
[self updateInlinePlayer:viewController];
[super pushViewController:viewController animated:animated];
}
-(void)goBackWithAnimation:(BOOL)animated {
[super goBackWithAnimation:animated];
[self updateInlinePlayer:self.currentController];
}
-(void)updateInlinePlayer:(TMViewController *)viewController {
[_inlineAudioPlayer setStyle:TGAudioPlayerGlobalStyleMini animated:NO];
[_inlineAudioPlayer setFrameOrigin:NSMakePoint(0, NSHeight(self.view.frame) - (viewController.isNavigationBarHidden ? 0 : self.navigationOffset) - 50)];
}
-(void)showInlinePlayer:(TGAudioGlobalController *)controller {
if(!_inlineAudioPlayer) {
_inlineAudioPlayer = [[TGInlineAudioPlayer alloc] initWithFrame:NSMakeRect(0, NSHeight(self.view.frame) - self.navigationOffset - 50, NSWidth(self.view.frame), 50) globalController:controller];
[self.view addSubview:_inlineAudioPlayer];
}
[self.inlineAudioPlayer show:controller ? controller.conversation : self.messagesViewController.conversation navigation:self];
[self.currentController.view setFrameSize:NSMakeSize(NSWidth(self.currentController.view.frame), self.view.bounds.size.height - self.navigationOffset - self.viewControllerTopOffset)];
}
-(TGAudioGlobalController *)inlineController {
return _inlineAudioPlayer.audioController;
}
-(void)hideInlinePlayer:(TGAudioGlobalController *)controller {
[_inlineAudioPlayer removeFromSuperview];
_inlineAudioPlayer = nil;
[self.currentController.view setFrameSize:NSMakeSize(NSWidth(self.currentController.view.frame), self.view.bounds.size.height - self.navigationOffset)];
if(controller) {
[TGAudioPlayerWindow showWithController:controller];
}
}
-(int)viewControllerTopOffset {
return NSHeight(_inlineAudioPlayer.frame);
}
- (TGView *)view {
if(!_view)
[self loadView];
return (TGView *) _view;
}
@end
| {
"pile_set_name": "Github"
} |
# coding: utf-8
require "#{File.dirname(__FILE__)}/../spec_helper"
describe Bonsai::Page do
describe "class methods" do
it "should respond to all" do
Bonsai::Page.should respond_to :all
end
it "should contain pages" do
Bonsai::Page.all.first.should be_an_instance_of(Bonsai::Page)
end
it "should remove numbers over 10 from the permalink" do
Bonsai::Page.find("many-pages").permalink.should == "/many-pages/"
end
it "should be equal" do
Bonsai::Page.find("about-us").should == Bonsai::Page.find("about-us")
end
end
describe "relationships" do
before :all do
@index = Bonsai::Page.find("index")
@about = Bonsai::Page.find("about-us")
@history = Bonsai::Page.find("about-us/history")
@contact = Bonsai::Page.find("about-us/contact")
@child = Bonsai::Page.find("about-us/history/child")
end
it "should have siblings" do
@history.siblings.should be_an_instance_of(Array)
@history.siblings.size.should == 1
@history.siblings.should include(@contact)
@history.siblings.should_not include(@history)
end
it "should have a parent" do
@history.parent.should == @about
end
it "should not have a parent" do
@about.parent.should == nil
end
it "should not have a parent for index" do
@index.parent.should == nil
end
it "should have children" do
@about.children.should be_an_instance_of(Array)
@about.children.size.should == 1
@about.children.should include(@contact)
end
it "should not have floating pages in the children array" do
@about.children.should_not include(@history)
end
it "should have ancestors" do
@child.ancestors.should be_an_instance_of(Array)
@child.ancestors.size.should == 2
@child.ancestors.should include(@history)
@child.ancestors.should include(@about)
end
it "should have the ancestors in a top down order" do
@child.ancestors.first.should == @about
@child.ancestors.last.should == @history
end
it "index should be a floating page" do
@index.floating?.should be_true
end
it "about should not be a floating page" do
@about.floating?.should be_false
end
end
describe "instance" do
let(:page) { Bonsai::Page.find("about-us/history") }
subject { page }
its(:slug) { should == "history" }
its(:name) { should == "History" }
its(:permalink) { should == "/about-us/history/" }
its(:ctime) { should be_an_instance_of Time }
its(:mtime) { should be_an_instance_of Time }
its(:write_path) { should == "/about-us/history/index.html" }
its(:disk_path) { should == "#{Bonsai.root_dir}/content/1.about-us/history/demo-template.yml" }
its(:template) { should be_an_instance_of(Bonsai::Template) }
it "should to_hash to its variables" do
page.content[:page_title].should == "About our history"
page.content[:page_title].should_not be_nil
end
describe "assets" do
subject { page.assets }
it { should be_an_instance_of Array }
its(:length) { should be 6 }
describe "asset properties" do
it "should have the correct name" do
page.assets.first['name'].should == "001"
end
it "should have the correct path" do
page.assets.first['path'].should == "/about-us/history/001.jpg"
end
it "should have the correct disk_path" do
page.assets.first['disk_path'].should == File.join(Dir.pwd, "spec/support/content/1.about-us/history/001.jpg")
end
it "should titleize the name attribute and remove the file extension" do
page.assets[2]['name'].should == "A File Asset"
end
end
end
end
describe "render" do
describe "general" do
let(:page) { Bonsai::Page.find("about-us/contact").render }
subject { page }
it { should_not be_nil }
it "should replace liquid variables with properties from the content file" do
page.should == "Hello from our template, named Contact\n\nGet in touch\n<p>“A designer knows he has achieved perfection not when there is nothing left to add, but when there is nothing left to take away.”</p>\n\n<p>– Antoine de Saint-Exupery</p>\n\nThis content should be inserted!"
end
describe "markdown" do
it "should not use markdown for single line content" do
page.should =~ /\nGet in touch\n/
end
it "should use markdown for multiple line content" do
page.should =~ /<p>“A designer knows he/
end
it "should use smartypants" do
page.should =~ /“/
end
end
end
describe "images" do
let(:page) { Bonsai::Page.find("render/image-spec").render }
it "should render successfully" do
page.should == "\n /render/image-spec/images/image001.jpg\n"
end
it "should write in images" do
page.should include "image001.jpg"
end
end
# Pages that use a structure yet have no parent page should still render
describe "page without parent" do
it "should render successfully" do
lambda { Bonsai::Page.find("legals/terms-and-conditions").render }.should_not raise_error
end
end
end
describe "to hash" do
before :all do
@page = Bonsai::Page.find("about-us/history")
end
it "should respond to to_hash" do
@page.should respond_to(:to_hash)
end
%w(slug permalink name page_title children siblings parent ancestors magic navigation updated_at created_at).each do |key|
it "should have a to_hash key for #{key}" do
@page.to_hash.keys.should include(key)
end
end
it "should include global site variables from site.yml" do
@page.to_hash['site_name'].should == "Bonsai"
@page.to_hash['url'].should == "http://tinytree.info"
@page.to_hash['copyright'].should == 2010
end
describe "disk_assets" do
before :all do
@vars = @page.to_hash
end
describe "enum" do
it "should not have a child" do
@vars.should_not have_key('child')
end
it "should have magic" do
@vars.should have_key('magic')
end
it "it should be a an array of hashes" do
@vars['magic'].should be_an_instance_of(Array)
@vars['magic'].first.should be_an_instance_of(Hash)
@vars['magic'].size.should == 2
end
end
end
end
describe "broken page" do
before :all do
Bonsai::Page.path = "spec/support/broken/content"
end
after :all do
Bonsai::Page.path = "spec/support/content"
end
it "should exist" do
Bonsai::Page.find("page").should be_an_instance_of(Bonsai::Page)
end
it "should error gracefully" do
lambda { Bonsai::Page.find("page").render }.should_not raise_error(ArgumentError)
end
end
end
| {
"pile_set_name": "Github"
} |
*******************************************************
* Run Autoruns, Parse the CSV, and copy all of the *
* executables *
*******************************************************
SET:CopyPath=Part
ACQ:\ARN
SAY:[+] Executing Autoruns (Ignoring MS Executables)...
CMD:\SYS\Autorunsc.exe /accepteula -a * -c -h -m > &Acq\AutoRun.dat
SAY:[+] Copying Executables...
LST:&ACN\ARN\AutoRun.dat
NCP:"&LS8" "&ACQ"
SAY:[+] Processing Complete...
SAY:
SAY:[+] Now Hashing Artifacts...
HSH:ACQ
SAY:
PZZ: All Done! Press Any Key to Finish.
| {
"pile_set_name": "Github"
} |
dubbo.application.qos.port=33333
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.